diff --git a/.cursor/rules/pr-comments.mdc b/.cursor/rules/pr-comments.mdc deleted file mode 120000 index 4b5e57d6..00000000 --- a/.cursor/rules/pr-comments.mdc +++ /dev/null @@ -1 +0,0 @@ -../../.ai/rules/pr-comments.mdc \ No newline at end of file diff --git a/.gitignore b/.gitignore index 62171e1d..8ec81799 100644 --- a/.gitignore +++ b/.gitignore @@ -28,3 +28,5 @@ packages/transloadit/README.md packages/transloadit/CHANGELOG.md packages/transloadit/LICENSE package.tgz +packages/mcp-server/.mcpregistry_github_token +packages/mcp-server/.mcpregistry_registry_token diff --git a/docs/fingerprint/transloadit-after.json b/docs/fingerprint/transloadit-after.json index cca93ce0..d4e5197d 100644 --- a/docs/fingerprint/transloadit-after.json +++ b/docs/fingerprint/transloadit-after.json @@ -1,5 +1,5 @@ { - "packageDir": "/home/kvz/code/node-sdk/packages/transloadit", + "packageDir": "packages/transloadit", "tarball": { "filename": "transloadit-4.1.2.tgz", "sizeBytes": 1110470, diff --git a/docs/fingerprint/transloadit-baseline.json b/docs/fingerprint/transloadit-baseline.json index 521052f5..cba9f986 100644 --- a/docs/fingerprint/transloadit-baseline.json +++ b/docs/fingerprint/transloadit-baseline.json @@ -1,9 +1,9 @@ { - "packageDir": "/home/kvz/code/node-sdk/packages/transloadit", + "packageDir": "packages/transloadit", "tarball": { "filename": "transloadit-4.7.5.tgz", - "sizeBytes": 1250742, - "sha256": "195c48c7b93e44360d29e3c74d3dbb720503242123a7a57c3387000c71b72c1a" + "sizeBytes": 1338690, + "sha256": "82b176af124eca81eec440520d3ca4b68525b257b1dd59e3f1a4333e62e26e9e" }, "packageJson": { "name": "transloadit", @@ -48,8 +48,8 @@ }, { "path": "dist/cli/commands/assemblies.js", - "sizeBytes": 51217, - "sha256": "c368505ba2086dfbcc6148c5ac656a9ac228093cf8cebe95ba650f4dfe21592d" + "sizeBytes": 50297, + "sha256": "c88802ee5f259357a626addd8e8602c39672bbb1e658515e956db8ad09934fb5" }, { "path": "dist/alphalib/types/assembliesGet.js", @@ -316,6 +316,11 @@ "sizeBytes": 1228, "sha256": "474e8f93000f842761a1cebe9282c17eeba8c809f1d8ef25db026796edacbf89" }, + { + "path": "dist/cli/fileProcessingOptions.js", + "sizeBytes": 1907, + "sha256": "dcc0a2470ca0003901ab4fc24f033f27f3b3e2fe7db131a166b20efe29568b59" + }, { "path": "dist/alphalib/types/robots/ftp-import.js", "sizeBytes": 2406, @@ -326,6 +331,11 @@ "sizeBytes": 3534, "sha256": "c4bd648bb097acadbc349406192105367b9d94c516700b99c9f4d7a4b6c7a6f0" }, + { + "path": "dist/cli/commands/generated-intents.js", + "sizeBytes": 86298, + "sha256": "f64a7238d2954d1ff71ab02be0d2f18d1dd048e3543cd9a79950794fdbdcd365" + }, { "path": "dist/alphalib/types/robots/google-import.js", "sizeBytes": 3748, @@ -398,14 +408,44 @@ }, { "path": "dist/cli/commands/index.js", - "sizeBytes": 2145, - "sha256": "b44764be9d6a803669bbc1a937f553566ce91993ed283c7f6d5ef65cbff6b263" + "sizeBytes": 2312, + "sha256": "a11ca4773963c91d8d03123b9e2e7a2a5d268880e1bae18f0419df9a36adfb26" }, { "path": "dist/inputFiles.js", "sizeBytes": 7836, "sha256": "1d77d129abc1b11be894d1cf6c34afc93370165e39871d6d5b672c058d1a0489" }, + { + "path": "dist/cli/intentCommandSpecs.js", + "sizeBytes": 6595, + "sha256": "19fc06131e457c60d77d46fcfbf970855849b08b16f76ee76fa65f2188dc9c4c" + }, + { + "path": "dist/cli/intentFields.js", + "sizeBytes": 3431, + "sha256": "dd72c1bbbb64be5b3f346803935060707b203d364060d7fc10a44b063eb6110c" + }, + { + "path": "dist/cli/intentInputPolicy.js", + "sizeBytes": 56, + "sha256": "f2dfdc05ddec25bf8ae63448d8e562ff7ba6ec3b17b4ea4be0adb151017c5991" + }, + { + "path": "dist/cli/intentResolvedDefinitions.js", + "sizeBytes": 12204, + "sha256": "1caadb7700937def4eb86539f8e8f12f4bc532f9ab944368ffd2ffcef527ce6b" + }, + { + "path": "dist/cli/intentRuntime.js", + "sizeBytes": 10990, + "sha256": "e2e2ef1c92038c176922a69db8c1637fb97228a4cbb08057d27b31a33121a074" + }, + { + "path": "dist/cli/intentSmokeCases.js", + "sizeBytes": 3072, + "sha256": "01e0f5f7d57c1fbb697b9ce1cd599b375cbe2b0414c565f2e6e7a957d470df9d" + }, { "path": "dist/lintAssemblyInput.js", "sizeBytes": 2335, @@ -599,7 +639,7 @@ { "path": "dist/Transloadit.js", "sizeBytes": 37922, - "sha256": "500d82f5b654da175e301294540522718b2a81e15d87c3cd365f074fe961a769" + "sha256": "da28e944dd0a9cadb5a2cecdb2d859a639a53abd4d45489fab02069115918b6a" }, { "path": "dist/alphalib/tryCatch.js", @@ -698,8 +738,8 @@ }, { "path": "package.json", - "sizeBytes": 2730, - "sha256": "313dd2ac13d3e4857b71bd889b2c9fa7f2458cf2bf5be2dd5a1996eb3d23199d" + "sizeBytes": 2734, + "sha256": "154923aac42eb65b220c74a778fddb5c74eef07d0024fbd325100f82993ce6b2" }, { "path": "dist/alphalib/types/robots/_index.d.ts.map", @@ -753,13 +793,13 @@ }, { "path": "dist/cli/commands/assemblies.d.ts.map", - "sizeBytes": 3737, - "sha256": "e659be90cee8252d9fa4a5db72cf3d48d2548d0f5a716368cc024f7ed1e4b222" + "sizeBytes": 3889, + "sha256": "fdb9b7ad5f7d7ceae62c5bc690c823697b0316a460de39fa5243d5b89dcd6fb4" }, { "path": "dist/cli/commands/assemblies.js.map", - "sizeBytes": 44866, - "sha256": "8bc2496707790b60dfde07065b6df6adc7152d04e999ed4c84f1993eaeadc28f" + "sizeBytes": 46414, + "sha256": "7e3bc37a39d0d3a320a10a8a0dc65169cf10064e9e744700a1837bb22ccdb1f4" }, { "path": "dist/alphalib/types/assembliesGet.d.ts.map", @@ -1291,6 +1331,16 @@ "sizeBytes": 1017, "sha256": "6583f0e6b3a04b39758bc60bbd77383f00715365ac714be95b871ba6797050b9" }, + { + "path": "dist/cli/fileProcessingOptions.d.ts.map", + "sizeBytes": 911, + "sha256": "c2a4f82001dc780feba5894d66f72f1977d23e4ace574c0eda2c751946d11827" + }, + { + "path": "dist/cli/fileProcessingOptions.js.map", + "sizeBytes": 1588, + "sha256": "3635f9b2407ba7bb4a82884b7c284aa651a54f3ba4b2b2df3cfb450ce179c76c" + }, { "path": "dist/alphalib/types/robots/ftp-import.d.ts.map", "sizeBytes": 976, @@ -1311,6 +1361,16 @@ "sizeBytes": 2145, "sha256": "ce1bf48c1cc713ae843061cba3c3b119475baa5cb6b62ac4b575e50b297bcf71" }, + { + "path": "dist/cli/commands/generated-intents.d.ts.map", + "sizeBytes": 9296, + "sha256": "3ce6b15ecd331d084554df1d694418252ca852fe7a3dba793a6c14a11db917f5" + }, + { + "path": "dist/cli/commands/generated-intents.js.map", + "sizeBytes": 39425, + "sha256": "bcbe46850689d5ac0ee546d7904623f41ddfe312f3bc4ad6ae71f39d69c23067" + }, { "path": "dist/alphalib/types/robots/google-import.d.ts.map", "sizeBytes": 960, @@ -1454,12 +1514,12 @@ { "path": "dist/cli/commands/index.d.ts.map", "sizeBytes": 198, - "sha256": "3f955192e7d7832d6fd0c8ee0244b153e42c947686425750c7c8c58d6657f2a7" + "sha256": "6a459d827f048c87854b1570a2215cd69dc696ebe809a695a4d633e9dd4541ca" }, { "path": "dist/cli/commands/index.js.map", - "sizeBytes": 1940, - "sha256": "1cad8333ee5fd6c34071a6d8528a7b55399be0626baf1754e28453d714836868" + "sizeBytes": 2088, + "sha256": "5e514ba662ee52294dc9b50a7744fa3c8d89f60d0f49eaa118d30e9b16bfcb39" }, { "path": "dist/inputFiles.d.ts.map", @@ -1471,6 +1531,66 @@ "sizeBytes": 8595, "sha256": "fa96090c58247759bef9b7767bd4b4f474bba332ee5a6edf0429e89e99a0c25c" }, + { + "path": "dist/cli/intentCommandSpecs.d.ts.map", + "sizeBytes": 1195, + "sha256": "1621122696872464f8e01e51236beb7cccf3479149b3257141d16346f585622e" + }, + { + "path": "dist/cli/intentCommandSpecs.js.map", + "sizeBytes": 4862, + "sha256": "2e8c6e2ad7ca01caaad39a3404aa5bf0062569e37d30d27de5a473416127bbb4" + }, + { + "path": "dist/cli/intentFields.d.ts.map", + "sizeBytes": 492, + "sha256": "64be986a13e9b21e1c7bc047c01df4d06105eba0cb14da660791dd26a07b2090" + }, + { + "path": "dist/cli/intentFields.js.map", + "sizeBytes": 3606, + "sha256": "812807a35eb785d9415db2b134f766b25130f63a143955b07348ca52dbb608de" + }, + { + "path": "dist/cli/intentInputPolicy.d.ts.map", + "sizeBytes": 346, + "sha256": "a4d49f03eba0c6811f065f0048f3f3efa454f32eee70050ce598e180d50827db" + }, + { + "path": "dist/cli/intentInputPolicy.js.map", + "sizeBytes": 133, + "sha256": "3f85c00a0565c65820326f2e6c694648153782cce52bb6b806dd4a68896669b1" + }, + { + "path": "dist/cli/intentResolvedDefinitions.d.ts.map", + "sizeBytes": 1873, + "sha256": "e7c166c13d834a2f5b2316dbbe26f24bc17247314b15223255e435face5631ce" + }, + { + "path": "dist/cli/intentResolvedDefinitions.js.map", + "sizeBytes": 10243, + "sha256": "6005cb3491d92ff86da8d52b49cb9c1aad07f8e395bcdf4b02aee974a8bbbfdb" + }, + { + "path": "dist/cli/intentRuntime.d.ts.map", + "sizeBytes": 3272, + "sha256": "54e3e9404ce47f1450005a50b49a185469b5bb8f1afb1367c340d4b1b73f5951" + }, + { + "path": "dist/cli/intentRuntime.js.map", + "sizeBytes": 9520, + "sha256": "804c1df6a3285d08b606b02b392271f4b64eded2dae29659eeef587d477c2ef2" + }, + { + "path": "dist/cli/intentSmokeCases.d.ts.map", + "sizeBytes": 369, + "sha256": "5fbbb3c25c53e55cc34ba0be87dcacd00373a6cc2ef774dedf58d1354998fbf3" + }, + { + "path": "dist/cli/intentSmokeCases.js.map", + "sizeBytes": 2362, + "sha256": "79eba061880bea4639cf758199a81f9ba4be20276b463f5d488d5a8054a0659f" + }, { "path": "dist/lintAssemblyInput.d.ts.map", "sizeBytes": 522, @@ -1854,12 +1974,12 @@ { "path": "dist/Transloadit.d.ts.map", "sizeBytes": 6679, - "sha256": "ee51b85a546a35f49fd8512705d9bd090d704edd94757ed6f457b882e9bc2396" + "sha256": "319e3cf611757159752a324d59ca0f6fa02a8218e32e61c8ffb103764812a9e0" }, { "path": "dist/Transloadit.js.map", "sizeBytes": 27586, - "sha256": "9fd1ee82626e9e2452ec799d3a8ae775f4a7c1fd9b99d9703f7e3e2bd0b3d191" + "sha256": "409d5759a0e57719a00e5ab6314a89a49aa083e6bc335078e4e12fb9c046a41c" }, { "path": "dist/alphalib/tryCatch.d.ts.map", @@ -2053,8 +2173,8 @@ }, { "path": "README.md", - "sizeBytes": 36476, - "sha256": "62cf02f92243b72419d266b5e94adc7f06cbf55fc6155c5ecf67115afdc47635" + "sizeBytes": 37376, + "sha256": "71e16691f95885bbd342ed8f02a8c447c968b6034fb8f16b35911ab7462abff9" }, { "path": "dist/alphalib/types/robots/_index.d.ts", @@ -2108,13 +2228,13 @@ }, { "path": "dist/cli/commands/assemblies.d.ts", - "sizeBytes": 4342, - "sha256": "df6486047bbd89862b7cb433d05f63a128c1fad4520df978842adcecd4f17503" + "sizeBytes": 4488, + "sha256": "7dfbf42f5da3cb819883856d0c18166719149511509de1a2ad9eab8bf50e8d58" }, { "path": "src/cli/commands/assemblies.ts", - "sizeBytes": 50948, - "sha256": "d2a9de8dbd22233785a9880537ece31c0123b1959a24048b50b87c8a759db10e" + "sizeBytes": 52099, + "sha256": "4d41d313c6722cb601fa451c9d67a07c65f81659c6bd168205e061b023090bb1" }, { "path": "dist/alphalib/types/assembliesGet.d.ts", @@ -2646,6 +2766,16 @@ "sizeBytes": 2068, "sha256": "08af2039f3e568d27b91508b8002ce2ee19714817d69360a4e942cf27f820657" }, + { + "path": "dist/cli/fileProcessingOptions.d.ts", + "sizeBytes": 1095, + "sha256": "1faaca480253919fde59880952643428df7e387b4837040c77d18874255c0e81" + }, + { + "path": "src/cli/fileProcessingOptions.ts", + "sizeBytes": 2331, + "sha256": "c9fbc2dc5bc2593f298f8ca47091643951bd22c6f08bd138d8ef8ade9c1f9357" + }, { "path": "dist/alphalib/types/robots/ftp-import.d.ts", "sizeBytes": 10382, @@ -2666,6 +2796,16 @@ "sizeBytes": 4197, "sha256": "1bbaa2361cc3675a29178cbd0f4fcecaad1033032f154a6da36c5c677a9c9447" }, + { + "path": "dist/cli/commands/generated-intents.d.ts", + "sizeBytes": 265589, + "sha256": "714397e265f3d3c87a085c6e60c1eae9b9ea5b1d9bff344e2172857a0f882d86" + }, + { + "path": "src/cli/commands/generated-intents.ts", + "sizeBytes": 83511, + "sha256": "ae240c3978168433d4dab3ebf24fc230eed59faa50c9288855421e3c04bd2ca8" + }, { "path": "dist/alphalib/types/robots/google-import.d.ts", "sizeBytes": 9781, @@ -2813,8 +2953,8 @@ }, { "path": "src/cli/commands/index.ts", - "sizeBytes": 2044, - "sha256": "b6752fa800c6a91e662b75a0c0973f0ba513f263d4a96d5e46a0d3e1f1a9f828" + "sizeBytes": 2200, + "sha256": "dcf03b6ac54bf0793a6be2cc945d8b8e3173d5de69366b19d78d960e4e1e8d2f" }, { "path": "dist/inputFiles.d.ts", @@ -2826,6 +2966,66 @@ "sizeBytes": 8411, "sha256": "0df54cb83ac5c718f3d3f78ffb77a31d485e2ab5f0a9d91b4f64852e72d1a589" }, + { + "path": "dist/cli/intentCommandSpecs.d.ts", + "sizeBytes": 1439, + "sha256": "6cc613798ca129ddae21c32e9f41ff1100ec1062b7a692ad407598a047dc5c50" + }, + { + "path": "src/cli/intentCommandSpecs.ts", + "sizeBytes": 7289, + "sha256": "6361d5878bbc63b57abd1eadead9b9627dec0c75054b5c77efbb7f3ac61d75cd" + }, + { + "path": "dist/cli/intentFields.d.ts", + "sizeBytes": 436, + "sha256": "c57fc802ff7528fbb9546869294aeeec3e066e06cadf6856c1bde04a3dc2fcb7" + }, + { + "path": "src/cli/intentFields.ts", + "sizeBytes": 3285, + "sha256": "112fa2f6772eef50f2e7b528c8ba7eb349570e01e84011b30279ada2c34f3009" + }, + { + "path": "dist/cli/intentInputPolicy.d.ts", + "sizeBytes": 333, + "sha256": "d44f15f350569ae0cce2ab042d52a086870d9cdfac36ddc8b10fa64f1c20ec3b" + }, + { + "path": "src/cli/intentInputPolicy.ts", + "sizeBytes": 275, + "sha256": "915772425ea5a963f79b42c13d95077733ea173910e0156a3b93964714c52ead" + }, + { + "path": "dist/cli/intentResolvedDefinitions.d.ts", + "sizeBytes": 2118, + "sha256": "074d9091a432bc7131b45199417343b987a5965d5c465d66f51136cf70684ddd" + }, + { + "path": "src/cli/intentResolvedDefinitions.ts", + "sizeBytes": 14794, + "sha256": "d17db6ffe07012976b8fef6f206f4f50ffb8c01696169cc54f41a87685e2ff10" + }, + { + "path": "dist/cli/intentRuntime.d.ts", + "sizeBytes": 4257, + "sha256": "5e205d60b47eaab7562af41bbbff7596345caf84b24debe4fae5d4e9323cbfe9" + }, + { + "path": "src/cli/intentRuntime.ts", + "sizeBytes": 13958, + "sha256": "be3abc271b0983b12e6c70b9c4943eda93205199d145f68a29c337b662700242" + }, + { + "path": "dist/cli/intentSmokeCases.d.ts", + "sizeBytes": 337, + "sha256": "d3a0809ad489635cb567005d0e29b024acfc4b480474d81e379c0e98b1b2ba48" + }, + { + "path": "src/cli/intentSmokeCases.ts", + "sizeBytes": 2939, + "sha256": "b6939c7182cf90b73da1fa279c1d44aee97086deb5280ade2e47c57e197fef44" + }, { "path": "src/alphalib/typings/json-to-ast.d.ts", "sizeBytes": 760, @@ -3214,12 +3414,12 @@ { "path": "dist/Transloadit.d.ts", "sizeBytes": 12397, - "sha256": "b1e9233014c13c47832c7fb8b2c82bc75e1b3519f259b3ce71f9bd6d8150f36d" + "sha256": "b5d21acd74ea575bc5c9820ba48d736cd0f44a025f4981aa22d4085007fdf736" }, { "path": "src/Transloadit.ts", "sizeBytes": 42665, - "sha256": "d8a3d50a5f245e79258bada7ca39cc9aaedbe430b521145c819b0d46d3fcb1bf" + "sha256": "c6fc410d37595c38306b6e73ca5ff7aa3ea56a2571f23f6800c4f46875df87e4" }, { "path": "dist/alphalib/tryCatch.d.ts", diff --git a/docs/fingerprint/transloadit-baseline.package.json b/docs/fingerprint/transloadit-baseline.package.json index b1621636..99acf0ed 100644 --- a/docs/fingerprint/transloadit-baseline.package.json +++ b/docs/fingerprint/transloadit-baseline.package.json @@ -70,19 +70,19 @@ "src": "./src" }, "scripts": { - "check": "yarn lint:ts && yarn fix && yarn test:unit", + "check": "yarn lint:ts && yarn test:unit", "fix:js": "biome check --write .", "lint:ts": "yarn --cwd ../.. tsc:node", "fix:js:unsafe": "biome check --write . --unsafe", "lint:js": "biome check .", - "lint": "npm-run-all --parallel 'lint:js'", - "fix": "npm-run-all --serial 'fix:js'", + "lint": "yarn lint:js", + "fix": "yarn fix:js", "lint:deps": "knip --dependencies --no-progress", "fix:deps": "knip --dependencies --no-progress --fix", "prepack": "node ../../scripts/prepare-transloadit.ts", - "test:unit": "yarn --cwd ../.. tsc:utils && ../../node_modules/.bin/vitest run --coverage ./test/unit", - "test:e2e": "yarn --cwd ../.. tsc:utils && ../../node_modules/.bin/vitest run ./test/e2e", - "test": "yarn --cwd ../.. tsc:utils && ../../node_modules/.bin/vitest run --coverage" + "test:unit": "yarn --cwd ../.. tsc:utils && ../../node_modules/.bin/vitest run --coverage --passWithNoTests ./test/unit", + "test:e2e": "yarn --cwd ../.. tsc:utils && ../../node_modules/.bin/vitest run --passWithNoTests ./test/e2e", + "test": "yarn --cwd ../.. tsc:utils && ../../node_modules/.bin/vitest run --coverage --passWithNoTests" }, "license": "MIT", "main": "./dist/Transloadit.js", diff --git a/packages/node/README.md b/packages/node/README.md index 1540e3f3..d84c3443 100644 --- a/packages/node/README.md +++ b/packages/node/README.md @@ -84,7 +84,28 @@ npx -y transloadit auth token --aud mcp --scope assemblies:write,templates:read ### Processing Media -Create Assemblies to process files using Assembly Instructions (steps) or Templates: +For common one-off tasks, prefer the intent-first commands: + +```bash +# Generate an image from a text prompt +npx transloadit image generate --prompt "A red bicycle in a studio" --out bicycle.png + +# Generate a preview for any input path or URL +npx transloadit preview generate --input https://example.com/file.pdf --out preview.png + +# Paste base64 input directly into an intent command +npx transloadit document convert --input-base64 "$(base64 -i input.txt)" --format pdf --out output.pdf + +# Encode a video into an HLS package +npx transloadit video encode-hls --input input.mp4 --out dist/hls +``` + +The generated intent catalog also includes commands such as `image remove-background`, +`image optimize`, `image resize`, `document convert`, `document optimize`, +`document auto-rotate`, `document thumbs`, `audio waveform`, `text speak`, +`video thumbs`, `file compress`, and `file decompress`. + +For full control, create Assemblies directly using Assembly Instructions (steps) or Templates: ```bash # Process a file using a steps file diff --git a/packages/node/package.json b/packages/node/package.json index b2da1b2c..6d90e59c 100644 --- a/packages/node/package.json +++ b/packages/node/package.json @@ -87,8 +87,8 @@ "lint:ts": "yarn --cwd ../.. tsc:node", "fix:js:unsafe": "biome check --write . --unsafe", "lint:js": "biome check .", - "lint": "npm-run-all --parallel 'lint:js'", - "fix": "npm-run-all --serial 'fix:js'", + "lint": "yarn lint:js", + "fix": "yarn fix:js", "lint:deps": "knip --dependencies --no-progress", "fix:deps": "knip --dependencies --no-progress --fix", "prepack": "node -e \"require('node:fs').rmSync('dist',{recursive:true,force:true})\" && rm -f tsconfig.tsbuildinfo tsconfig.build.tsbuildinfo && yarn --cwd ../.. tsc:node", diff --git a/packages/node/scripts/test-intents-e2e.sh b/packages/node/scripts/test-intents-e2e.sh new file mode 100755 index 00000000..f32c69f0 --- /dev/null +++ b/packages/node/scripts/test-intents-e2e.sh @@ -0,0 +1,284 @@ +#!/usr/bin/env bash + +set -euo pipefail + +SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" +REPO_ROOT="$(cd "$SCRIPT_DIR/../../.." && pwd)" +WORKDIR="${1:-/tmp/node-sdk-intent-e2e}" +OUTDIR="$WORKDIR/out" +LOGDIR="$WORKDIR/logs" +FIXTUREDIR="$WORKDIR/fixtures" +CLI=(node "$REPO_ROOT/packages/node/src/cli.ts") +PREVIEW_URL='https://www.w3.org/WAI/ER/tests/xhtml/testfiles/resources/pdf/dummy.pdf' + +if [[ -f "$REPO_ROOT/.env" ]]; then + set -a + # shellcheck disable=SC1090 + source "$REPO_ROOT/.env" + set +a +fi + +if [[ -z "${TRANSLOADIT_KEY:-}" || -z "${TRANSLOADIT_SECRET:-}" ]]; then + echo "Missing TRANSLOADIT_KEY / TRANSLOADIT_SECRET. Expected them in $REPO_ROOT/.env or the environment." >&2 + exit 1 +fi + +require_command() { + local command_name="$1" + if ! command -v "$command_name" >/dev/null 2>&1; then + echo "Missing required command: $command_name" >&2 + exit 1 + fi +} + +prepare_fixtures() { + require_command curl + require_command ffmpeg + require_command zip + + rm -rf "$WORKDIR" + mkdir -p "$OUTDIR" "$LOGDIR" "$FIXTUREDIR" + + cp "$REPO_ROOT/packages/node/examples/fixtures/berkley.jpg" "$FIXTUREDIR/input.jpg" + cp "$REPO_ROOT/packages/node/test/e2e/fixtures/testsrc.mp4" "$FIXTUREDIR/input.mp4" + printf 'Hello from Transloadit CLI intents\n' >"$FIXTUREDIR/input.txt" + zip -j "$FIXTUREDIR/input.zip" "$FIXTUREDIR/input.txt" >/dev/null + ffmpeg -f lavfi -i sine=frequency=1000:duration=1 -q:a 9 -acodec libmp3lame -y "$FIXTUREDIR/input.mp3" >/dev/null 2>&1 + curl -L --fail --silent --show-error -o "$FIXTUREDIR/input.pdf" "$PREVIEW_URL" +} + +verify_file_type() { + local path="$1" + local expected="$2" + + [[ -s "$path" ]] || return 1 + file "$path" | grep -F "$expected" >/dev/null +} + +verify_png() { + verify_file_type "$1" 'PNG image data' +} + +verify_jpeg() { + verify_file_type "$1" 'JPEG image data' +} + +verify_pdf() { + verify_file_type "$1" 'PDF document' +} + +verify_mp3() { + verify_file_type "$1" 'Audio file' +} + +verify_zip() { + verify_file_type "$1" 'Zip archive data' +} + +verify_document_thumbs() { + [[ -f "$1/in.png" ]] || return 1 + verify_png "$1/in.png" +} + +verify_video_thumbs() { + [[ -f "$1/in_0.jpg" ]] || return 1 + verify_jpeg "$1/in_0.jpg" +} + +verify_video_encode_hls() { + [[ -f "$1/high/in.mp4" ]] || return 1 + [[ -f "$1/low/in.mp4" ]] || return 1 + [[ -f "$1/mid/in.mp4" ]] || return 1 + [[ -f "$1/adaptive/my_playlist.m3u8" ]] || return 1 +} + +verify_file_decompress() { + [[ -f "$1/input.txt" ]] || return 1 + grep -F 'Hello from Transloadit CLI intents' "$1/input.txt" >/dev/null +} + +verify_json() { + node --input-type=module <<'NODE' "$1" +import { readFileSync } from 'node:fs' + +const value = JSON.parse(readFileSync(process.argv[1], 'utf8')) +const ok = + value != null && + (!Array.isArray(value) || value.length > 0) && + (typeof value !== 'object' || Object.keys(value).length > 0) + +process.exit(ok ? 0 : 1) +NODE +} + +verify_image_describe_labels() { + node --input-type=module <<'NODE' "$1" +import { readFileSync } from 'node:fs' + +const value = JSON.parse(readFileSync(process.argv[1], 'utf8')) +const ok = + Array.isArray(value) && + value.length > 0 && + value.every((item) => typeof item === 'string' || (item && typeof item.name === 'string')) + +process.exit(ok ? 0 : 1) +NODE +} + +verify_image_describe_wordpress() { + node --input-type=module <<'NODE' "$1" +import { readFileSync } from 'node:fs' + +const value = JSON.parse(readFileSync(process.argv[1], 'utf8')) +const required = ['altText', 'title', 'caption', 'description'] +const ok = + value && + typeof value === 'object' && + required.every((key) => typeof value[key] === 'string' && value[key].trim().length > 0) + +process.exit(ok ? 0 : 1) +NODE +} + +verify_output() { + local verifier="$1" + local path="$2" + + case "$verifier" in + json) verify_json "$path" ;; + png) verify_png "$path" ;; + jpeg) verify_jpeg "$path" ;; + pdf) verify_pdf "$path" ;; + mp3) verify_mp3 "$path" ;; + zip) verify_zip "$path" ;; + document-thumbs) verify_document_thumbs "$path" ;; + video-thumbs) verify_video_thumbs "$path" ;; + video-encode-hls) verify_video_encode_hls "$path" ;; + file-decompress) verify_file_decompress "$path" ;; + image-describe-labels) verify_image_describe_labels "$path" ;; + image-describe-wordpress) verify_image_describe_wordpress "$path" ;; + *) + echo "Unknown verifier: $verifier" >&2 + return 1 + ;; + esac +} + +resolve_placeholder() { + local arg="$1" + + case "$arg" in + @preview-url) printf '%s\n' "$PREVIEW_URL" ;; + @fixture/*) printf '%s\n' "$FIXTUREDIR/${arg#@fixture/}" ;; + *) printf '%s\n' "$arg" ;; + esac +} + +run_case() { + local name="$1" + local output_path="$2" + local verifier="$3" + shift 3 + + local logfile="$LOGDIR/${name}.log" + rm -rf "$output_path" + mkdir -p "$(dirname "$output_path")" + + set +e + "${CLI[@]}" "$@" >"$logfile" 2>&1 + local exit_code=$? + set -e + + local verdict='FAIL' + local detail='' + + if [[ $exit_code -eq 0 ]] && verify_output "$verifier" "$output_path"; then + verdict='OK' + if [[ -f "$output_path" ]]; then + detail="$(file "$output_path" | sed 's#^.*: ##' | tr '\n' ' ' | awk '{$1=$1; print}')" + else + detail="$(find "$output_path" -type f | sed "s#^$output_path/##" | sort | tr '\n' ',' | sed 's/,$//')" + fi + else + if [[ -s "$logfile" ]]; then + detail="$(tail -n 8 "$logfile" | tr '\n' ' ' | awk '{$1=$1; print}' | cut -c1-220)" + else + detail='No output captured' + fi + fi + + printf '%s\t%s\t%s\t%s\n' "$name" "$exit_code" "$verdict" "$detail" +} + +prepare_fixtures + +RESULTS_TSV="$WORKDIR/results.tsv" +printf 'command\texit\tverdict\tdetail\n' >"$RESULTS_TSV" + +while IFS=$'\t' read -r name path_string args_string output_rel verifier; do + [[ -n "$name" ]] || continue + + read -r -a path_parts <<<"$path_string" + IFS=$'\x1f' read -r -a raw_args <<<"$args_string" + + resolved_args=() + for arg in "${raw_args[@]}"; do + resolved_args+=("$(resolve_placeholder "$arg")") + done + + run_case "$name" "$OUTDIR/$output_rel" "$verifier" \ + "${path_parts[@]}" \ + "${resolved_args[@]}" \ + --out "$OUTDIR/$output_rel" \ + >>"$RESULTS_TSV" +done < <( + node --input-type=module <<'NODE' +import { intentSmokeCases } from './packages/node/test/support/intentSmokeCases.ts' + +for (const smokeCase of intentSmokeCases) { + console.log([ + smokeCase.paths.join('-'), + smokeCase.paths.join(' '), + smokeCase.args.join('\x1f'), + smokeCase.outputPath, + smokeCase.verifier, + ].join('\t')) +} + +for (const smokeCase of [ + { + name: 'image-describe-labels', + paths: ['image', 'describe'], + args: ['--input', '@fixture/input.jpg', '--fields', 'labels'], + outputPath: 'image-describe-labels.json', + verifier: 'image-describe-labels', + }, + { + name: 'image-describe-wordpress', + paths: ['image', 'describe'], + args: ['--input', '@fixture/input.jpg', '--for', 'wordpress'], + outputPath: 'image-describe-wordpress.json', + verifier: 'image-describe-wordpress', + }, +]) { + console.log([ + smokeCase.name, + smokeCase.paths.join(' '), + smokeCase.args.join('\x1f'), + smokeCase.outputPath, + smokeCase.verifier, + ].join('\t')) +} +NODE +) + +column -t -s $'\t' "$RESULTS_TSV" + +if awk -F '\t' 'NR > 1 && $3 != "OK" { exit 1 }' "$RESULTS_TSV"; then + echo + echo "All intent commands passed. Fixtures, outputs, and logs are in $WORKDIR" +else + echo + echo "One or more intent commands failed. Inspect $LOGDIR for details." >&2 + exit 1 +fi diff --git a/packages/node/src/Transloadit.ts b/packages/node/src/Transloadit.ts index 5878b93a..18ad3ef8 100644 --- a/packages/node/src/Transloadit.ts +++ b/packages/node/src/Transloadit.ts @@ -68,12 +68,11 @@ export { TimeoutError, UploadError, } from 'got' -export type { AssemblyStatus } from './alphalib/types/assemblyStatus.ts' -export * from './apiTypes.ts' -export { InconsistentResponseError, ApiError } export { extractFieldNamesFromTemplate } from './alphalib/stepParsing.ts' // Builtin templates replace the legacy golden template helpers. export { mergeTemplateContent } from './alphalib/templateMerge.ts' +export type { AssemblyStatus } from './alphalib/types/assemblyStatus.ts' +export * from './apiTypes.ts' export type { Base64Strategy, InputFile, @@ -93,6 +92,7 @@ export type { RobotParamHelp, } from './robots.ts' export { getRobotHelp, isKnownRobot, listRobots } from './robots.ts' +export { ApiError, InconsistentResponseError } const log = debug('transloadit') const logWarn = debug('transloadit:warn') diff --git a/packages/node/src/alphalib/types/robots/ai-chat.ts b/packages/node/src/alphalib/types/robots/ai-chat.ts index af2bc783..7a92b061 100644 --- a/packages/node/src/alphalib/types/robots/ai-chat.ts +++ b/packages/node/src/alphalib/types/robots/ai-chat.ts @@ -148,6 +148,7 @@ export const meta: RobotMetaInput = { export const MODEL_CAPABILITIES: Record = { 'anthropic/claude-4-sonnet-20250514': { pdf: true, image: true }, 'anthropic/claude-4-opus-20250514': { pdf: true, image: true }, + 'anthropic/claude-sonnet-4-6': { pdf: true, image: true }, 'anthropic/claude-sonnet-4-5': { pdf: true, image: true }, 'anthropic/claude-opus-4-5': { pdf: true, image: true }, 'anthropic/claude-opus-4-6': { pdf: true, image: true }, diff --git a/packages/node/src/cli.ts b/packages/node/src/cli.ts index bdcd0b93..bf62dd17 100644 --- a/packages/node/src/cli.ts +++ b/packages/node/src/cli.ts @@ -32,13 +32,13 @@ export async function main(args = process.argv.slice(2)): Promise { } } -export function runCliWhenExecuted(): void { +export async function runCliWhenExecuted(): Promise { if (!shouldRunCli(process.argv[1])) return - void main().catch((error) => { + await main().catch((error) => { console.error((error as Error).message) process.exitCode = 1 }) } -runCliWhenExecuted() +await runCliWhenExecuted() diff --git a/packages/node/src/cli/commands/assemblies.ts b/packages/node/src/cli/commands/assemblies.ts index a3def35b..01542755 100644 --- a/packages/node/src/cli/commands/assemblies.ts +++ b/packages/node/src/cli/commands/assemblies.ts @@ -1,12 +1,13 @@ +import { randomUUID } from 'node:crypto' import EventEmitter from 'node:events' import fs from 'node:fs' import fsp from 'node:fs/promises' import path from 'node:path' import process from 'node:process' -import type { Readable, Writable } from 'node:stream' +import type { Readable } from 'node:stream' +import { Writable } from 'node:stream' import { pipeline } from 'node:stream/promises' import { setTimeout as delay } from 'node:timers/promises' -import tty from 'node:tty' import { promisify } from 'node:util' import { Command, Option } from 'clipanion' import got from 'got' @@ -15,15 +16,30 @@ import * as t from 'typanion' import { z } from 'zod' import { formatLintIssue } from '../../alphalib/assembly-linter.lang.en.ts' import { tryCatch } from '../../alphalib/tryCatch.ts' -import type { Steps, StepsInput } from '../../alphalib/types/template.ts' -import { stepsSchema } from '../../alphalib/types/template.ts' +import type { StepsInput } from '../../alphalib/types/template.ts' import type { CreateAssemblyParams, ReplayAssemblyParams } from '../../apiTypes.ts' +import { ensureUniqueCounterValue } from '../../ensureUniqueCounter.ts' import type { LintFatalLevel } from '../../lintAssemblyInstructions.ts' import { lintAssemblyInstructions } from '../../lintAssemblyInstructions.ts' import type { CreateAssemblyOptions, Transloadit } from '../../Transloadit.ts' import { lintingExamples } from '../docs/assemblyLintingExamples.ts' -import { createReadStream, formatAPIError, readCliInput, streamToBuffer } from '../helpers.ts' +import { + concurrencyOption, + deleteAfterProcessingOption, + inputPathsOption, + recursiveOption, + reprocessStaleOption, + singleAssemblyOption, + validateSharedFileProcessingOptions, + watchOption, +} from '../fileProcessingOptions.ts' +import { formatAPIError, readCliInput } from '../helpers.ts' import type { IOutputCtl } from '../OutputCtl.ts' +import type { AssemblyResultEntryLike, NormalizedAssemblyResultFile } from '../resultFiles.ts' +import { flattenAssemblyResultFiles } from '../resultFiles.ts' +import type { ResultUrlRow } from '../resultUrls.ts' +import { collectResultUrlRows, printResultUrls } from '../resultUrls.ts' +import { readStepsInputFile } from '../stepsInput.ts' import { ensureError, isErrnoException } from '../types.ts' import { AuthenticatedCommand, UnauthenticatedCommand } from './BaseCommand.ts' @@ -148,13 +164,7 @@ export async function replay( ): Promise { if (steps) { try { - const buf = await streamToBuffer(createReadStream(steps)) - const parsed: unknown = JSON.parse(buf.toString()) - const validated = stepsSchema.safeParse(parsed) - if (!validated.success) { - throw new Error(`Invalid steps format: ${validated.error.message}`) - } - await apiCall(validated.data) + await apiCall(await readStepsInputFile(steps)) } catch (err) { const error = ensureError(err) output.error(error.message) @@ -163,14 +173,13 @@ export async function replay( await apiCall() } - async function apiCall(stepsOverride?: Steps): Promise { + async function apiCall(stepsOverride?: StepsInput): Promise { const promises = assemblies.map(async (assembly) => { const [err] = await tryCatch( client.replayAssembly(assembly, { reparse_template: reparse ? 1 : 0, fields, notify_url, - // Steps (validated) is assignable to StepsInput at runtime; cast for TS steps: stepsOverride as ReplayAssemblyParams['steps'], }), ) @@ -298,49 +307,43 @@ async function getNodeWatch(): Promise { const stdinWithPath = process.stdin as unknown as { path: string } stdinWithPath.path = '/dev/stdin' -interface OutStream extends Writable { +interface OutputPlan { + mtime: Date path?: string - mtime?: Date } interface Job { - in: Readable | null - out: OutStream | null + inputPath: string | null + out: OutputPlan | null } -type OutstreamProvider = (inpath: string | null, indir?: string) => Promise - -interface StreamRegistry { - [key: string]: OutStream | undefined -} +type OutputPlanProvider = (inpath: string | null, indir?: string) => Promise interface JobEmitterOptions { + allowOutputCollisions?: boolean recursive?: boolean - outstreamProvider: OutstreamProvider - streamRegistry: StreamRegistry + outputPlanProvider: OutputPlanProvider + singleAssembly?: boolean watch?: boolean reprocessStale?: boolean } interface ReaddirJobEmitterOptions { dir: string - streamRegistry: StreamRegistry recursive?: boolean - outstreamProvider: OutstreamProvider + outputPlanProvider: OutputPlanProvider topdir?: string } interface SingleJobEmitterOptions { file: string - streamRegistry: StreamRegistry - outstreamProvider: OutstreamProvider + outputPlanProvider: OutputPlanProvider } interface WatchJobEmitterOptions { file: string - streamRegistry: StreamRegistry recursive?: boolean - outstreamProvider: OutstreamProvider + outputPlanProvider: OutputPlanProvider } interface StatLike { @@ -360,7 +363,46 @@ async function myStat( return await fsp.stat(filepath) } -function dirProvider(output: string): OutstreamProvider { +function getJobInputPath(filepath: string): string { + const normalizedFile = path.normalize(filepath) + if (normalizedFile === '-') { + return stdinWithPath.path + } + + return normalizedFile +} + +function createInputUploadStream(filepath: string): Readable { + const instream = fs.createReadStream(filepath) + // Attach a no-op error handler to prevent unhandled errors if stream is destroyed + // before being consumed (e.g., due to output collision detection) + instream.on('error', () => {}) + return instream +} + +function createOutputPlan(pathname: string | undefined, mtime: Date): OutputPlan { + if (pathname == null) { + return { + mtime, + } + } + + return { + mtime, + path: pathname, + } +} + +async function createExistingPathOutputPlan(outputPath: string | undefined): Promise { + if (outputPath == null) { + return createOutputPlan(undefined, new Date(0)) + } + + const [, stats] = await tryCatch(fsp.stat(outputPath)) + return createOutputPlan(outputPath, stats?.mtime ?? new Date(0)) +} + +function dirProvider(output: string): OutputPlanProvider { return async (inpath, indir = process.cwd()) => { // Inputless assemblies can still write into a directory, but output paths are derived from // assembly results rather than an input file path (handled later). @@ -374,41 +416,375 @@ function dirProvider(output: string): OutstreamProvider { let relpath = path.relative(indir, inpath) relpath = relpath.replace(/^(\.\.\/)+/, '') const outpath = path.join(output, relpath) - const outdir = path.dirname(outpath) - - await fsp.mkdir(outdir, { recursive: true }) - const [, stats] = await tryCatch(fsp.stat(outpath)) - const mtime = stats?.mtime ?? new Date(0) - const outstream = fs.createWriteStream(outpath) as OutStream - // Attach a no-op error handler to prevent unhandled errors if stream is destroyed - // before being consumed (e.g., due to output collision detection) - outstream.on('error', () => {}) - outstream.mtime = mtime - return outstream + return await createExistingPathOutputPlan(outpath) } } -function fileProvider(output: string): OutstreamProvider { - const dirExistsP = fsp.mkdir(path.dirname(output), { recursive: true }) +function fileProvider(output: string): OutputPlanProvider { return async (_inpath) => { - await dirExistsP - if (output === '-') return process.stdout as OutStream - - const [, stats] = await tryCatch(fsp.stat(output)) - const mtime = stats?.mtime ?? new Date(0) - const outstream = fs.createWriteStream(output) as OutStream - // Attach a no-op error handler to prevent unhandled errors if stream is destroyed - // before being consumed (e.g., due to output collision detection) - outstream.on('error', () => {}) - outstream.mtime = mtime - return outstream + if (output === '-') { + return await createExistingPathOutputPlan(undefined) + } + + return await createExistingPathOutputPlan(output) } } -function nullProvider(): OutstreamProvider { +function nullProvider(): OutputPlanProvider { return async (_inpath) => null } +async function downloadResultToFile( + resultUrl: string, + outPath: string, + signal: AbortSignal, +): Promise { + await fsp.mkdir(path.dirname(outPath), { recursive: true }) + + const tempPath = path.join( + path.dirname(outPath), + `.${path.basename(outPath)}.${randomUUID()}.tmp`, + ) + const outStream = fs.createWriteStream(tempPath) + outStream.on('error', () => {}) + + const [dlErr] = await tryCatch(pipeline(got.stream(resultUrl, { signal }), outStream)) + if (dlErr) { + await fsp.rm(tempPath, { force: true }) + throw dlErr + } + + await fsp.rename(tempPath, outPath) +} + +async function downloadResultToStdout(resultUrl: string, signal: AbortSignal): Promise { + const stdoutStream = new Writable({ + write(chunk, _encoding, callback) { + let settled = false + + const finish = (err?: Error | null) => { + if (settled) return + settled = true + process.stdout.off('drain', onDrain) + process.stdout.off('error', onError) + callback(err ?? undefined) + } + + const onDrain = () => finish() + const onError = (err: Error) => finish(err) + + process.stdout.once('error', onError) + + try { + if (process.stdout.write(chunk)) { + finish() + return + } + + process.stdout.once('drain', onDrain) + } catch (err) { + finish(ensureError(err)) + } + }, + final(callback) { + callback() + }, + }) + + await pipeline(got.stream(resultUrl, { signal }), stdoutStream) +} + +function sanitizeResultName(value: string): string { + const base = path.basename(value) + return base.replaceAll('\\', '_').replaceAll('/', '_').replaceAll('\u0000', '') +} + +async function ensureUniquePath(targetPath: string, reservedPaths: Set): Promise { + const parsed = path.parse(targetPath) + return await ensureUniqueCounterValue({ + initialValue: targetPath, + isTaken: async (candidate) => { + if (reservedPaths.has(candidate)) { + return true + } + + const [statErr] = await tryCatch(fsp.stat(candidate)) + return statErr == null + }, + reserve: (candidate) => { + reservedPaths.add(candidate) + }, + nextValue: (counter) => path.join(parsed.dir, `${parsed.name}__${counter}${parsed.ext}`), + }) +} + +function flattenAssemblyResults(results: Record>): { + allFiles: NormalizedAssemblyResultFile[] + entries: Array<[string, Array]> +} { + return { + allFiles: flattenAssemblyResultFiles(results), + entries: Object.entries(results), + } +} + +function getResultFileName(file: NormalizedAssemblyResultFile): string { + return sanitizeResultName(file.name) +} + +interface AssemblyDownloadTarget { + resultUrl: string + targetPath: string | null +} + +const STALE_OUTPUT_GRACE_MS = 1000 + +function isMeaningfullyNewer(newer: Date, older: Date): boolean { + return newer.getTime() - older.getTime() > STALE_OUTPUT_GRACE_MS +} + +async function buildDirectoryDownloadTargets({ + allFiles, + baseDir, + groupByStep, +}: { + allFiles: NormalizedAssemblyResultFile[] + baseDir: string + groupByStep: boolean +}): Promise { + await fsp.mkdir(baseDir, { recursive: true }) + + const targets: AssemblyDownloadTarget[] = [] + const reservedPaths = new Set() + for (const resultFile of allFiles) { + const targetDir = groupByStep ? path.join(baseDir, resultFile.stepName) : baseDir + await fsp.mkdir(targetDir, { recursive: true }) + + targets.push({ + resultUrl: resultFile.url, + targetPath: await ensureUniquePath( + path.join(targetDir, getResultFileName(resultFile)), + reservedPaths, + ), + }) + } + + return targets +} + +function getSingleResultDownloadTarget( + allFiles: NormalizedAssemblyResultFile[], + targetPath: string | null, +): AssemblyDownloadTarget[] { + const first = allFiles[0] + const resultUrl = first?.url ?? null + if (resultUrl == null) { + return [] + } + + return [{ resultUrl, targetPath }] +} + +async function resolveResultDownloadTargets({ + allFiles, + entries, + hasDirectoryInput, + inPath, + inputs, + outputMode, + outputPath, + outputRoot, + outputRootIsDirectory, + singleAssembly, +}: { + allFiles: NormalizedAssemblyResultFile[] + entries: Array<[string, Array]> + hasDirectoryInput: boolean + inPath: string | null + inputs: string[] + outputMode?: 'directory' | 'file' + outputPath: string | null + outputRoot: string + outputRootIsDirectory: boolean + singleAssembly?: boolean +}): Promise { + const shouldGroupByInput = + !singleAssembly && inPath != null && (hasDirectoryInput || inputs.length > 1) + + const resolveDirectoryBaseDir = (): string => { + if (!shouldGroupByInput || inPath == null) { + return outputRoot + } + + if (hasDirectoryInput && outputPath != null) { + const mappedRelative = path.relative(outputRoot, outputPath) + const mappedDir = path.dirname(mappedRelative) + const mappedStem = path.parse(mappedRelative).name + return path.join(outputRoot, mappedDir === '.' ? '' : mappedDir, mappedStem) + } + + return path.join(outputRoot, path.parse(path.basename(inPath)).name) + } + + if (!outputRootIsDirectory) { + if (allFiles.length > 1) { + if (outputPath == null) { + throw new Error('stdout can only receive a single result file') + } + + throw new Error('file outputs can only receive a single result file') + } + + return getSingleResultDownloadTarget(allFiles, outputPath) + } + + if (singleAssembly) { + return await buildDirectoryDownloadTargets({ + allFiles, + baseDir: outputRoot, + groupByStep: false, + }) + } + + if (outputMode === 'directory' || outputPath == null) { + return await buildDirectoryDownloadTargets({ + allFiles, + baseDir: resolveDirectoryBaseDir(), + groupByStep: entries.length > 1, + }) + } + + if (allFiles.length === 1) { + return getSingleResultDownloadTarget(allFiles, outputPath) + } + + return await buildDirectoryDownloadTargets({ + allFiles, + baseDir: path.join(path.dirname(outputPath), path.parse(outputPath).name), + groupByStep: true, + }) +} + +async function shouldSkipStaleOutput({ + inputPaths, + outputPath, + outputPlanMtime, + outputRootIsDirectory, + reprocessStale, + singleInputReference = 'output-plan', +}: { + inputPaths: string[] + outputPath: string | null + outputPlanMtime: Date + outputRootIsDirectory: boolean + reprocessStale?: boolean + singleInputReference?: 'input' | 'output-plan' +}): Promise { + if (reprocessStale || outputPath == null || outputRootIsDirectory) { + return false + } + + if (inputPaths.length === 0 || inputPaths.some((inputPath) => inputPath === stdinWithPath.path)) { + return false + } + + const [outputErr, outputStat] = await tryCatch(fsp.stat(outputPath)) + if (outputErr != null || outputStat == null) { + return false + } + + if (inputPaths.length === 1) { + if (singleInputReference === 'output-plan') { + return isMeaningfullyNewer(outputStat.mtime, outputPlanMtime) + } + + const [inputErr, inputStat] = await tryCatch(fsp.stat(inputPaths[0])) + if (inputErr != null || inputStat == null) { + return false + } + + return isMeaningfullyNewer(outputStat.mtime, inputStat.mtime) + } + + const inputStats = await Promise.all( + inputPaths.map(async (inputPath) => { + const [inputErr, inputStat] = await tryCatch(fsp.stat(inputPath)) + if (inputErr != null || inputStat == null) { + return null + } + return inputStat + }), + ) + + if (inputStats.some((inputStat) => inputStat == null)) { + return false + } + + return inputStats.every((inputStat) => { + return inputStat != null && isMeaningfullyNewer(outputStat.mtime, inputStat.mtime) + }) +} + +async function materializeAssemblyResults({ + abortSignal, + hasDirectoryInput, + inPath, + inputs, + outputMode, + outputPath, + outputRoot, + outputRootIsDirectory, + outputctl, + results, + singleAssembly, +}: { + abortSignal: AbortSignal + hasDirectoryInput: boolean + inPath: string | null + inputs: string[] + outputMode?: 'directory' | 'file' + outputPath: string | null + outputRoot: string | null + outputRootIsDirectory: boolean + outputctl: IOutputCtl + results: Record> + singleAssembly?: boolean +}): Promise { + if (outputRoot == null) { + return + } + + const { allFiles, entries } = flattenAssemblyResults(results) + const targets = await resolveResultDownloadTargets({ + allFiles, + entries, + hasDirectoryInput, + inPath, + inputs, + outputMode, + outputPath, + outputRoot, + outputRootIsDirectory, + singleAssembly, + }) + + for (const { resultUrl, targetPath } of targets) { + outputctl.debug('DOWNLOADING') + const [dlErr] = await tryCatch( + targetPath == null + ? downloadResultToStdout(resultUrl, abortSignal) + : downloadResultToFile(resultUrl, targetPath, abortSignal), + ) + if (dlErr) { + if (dlErr.name === 'AbortError') { + continue + } + outputctl.error(dlErr.message) + throw dlErr + } + } +} + class MyEventEmitter extends EventEmitter { protected hasEnded: boolean @@ -428,29 +804,25 @@ class MyEventEmitter extends EventEmitter { } class ReaddirJobEmitter extends MyEventEmitter { - constructor({ - dir, - streamRegistry, - recursive, - outstreamProvider, - topdir = dir, - }: ReaddirJobEmitterOptions) { + constructor({ dir, recursive, outputPlanProvider, topdir = dir }: ReaddirJobEmitterOptions) { super() process.nextTick(() => { - this.processDirectory({ dir, streamRegistry, recursive, outstreamProvider, topdir }).catch( - (err) => { - this.emit('error', err) - }, - ) + this.processDirectory({ + dir, + recursive, + outputPlanProvider, + topdir, + }).catch((err) => { + this.emit('error', err) + }) }) } private async processDirectory({ dir, - streamRegistry, recursive, - outstreamProvider, + outputPlanProvider, topdir, }: ReaddirJobEmitterOptions & { topdir: string }): Promise { const files = await fsp.readdir(dir) @@ -459,9 +831,7 @@ class ReaddirJobEmitter extends MyEventEmitter { for (const filename of files) { const file = path.normalize(path.join(dir, filename)) - pendingOperations.push( - this.processFile({ file, streamRegistry, recursive, outstreamProvider, topdir }), - ) + pendingOperations.push(this.processFile({ file, recursive, outputPlanProvider, topdir })) } await Promise.all(pendingOperations) @@ -470,15 +840,13 @@ class ReaddirJobEmitter extends MyEventEmitter { private async processFile({ file, - streamRegistry, recursive = false, - outstreamProvider, + outputPlanProvider, topdir, }: { file: string - streamRegistry: StreamRegistry recursive?: boolean - outstreamProvider: OutstreamProvider + outputPlanProvider: OutputPlanProvider topdir: string }): Promise { const stats = await fsp.stat(file) @@ -488,9 +856,8 @@ class ReaddirJobEmitter extends MyEventEmitter { await new Promise((resolve, reject) => { const subdirEmitter = new ReaddirJobEmitter({ dir: file, - streamRegistry, recursive, - outstreamProvider, + outputPlanProvider, topdir, }) subdirEmitter.on('job', (job: Job) => this.emit('job', job)) @@ -499,67 +866,51 @@ class ReaddirJobEmitter extends MyEventEmitter { }) } } else { - const existing = streamRegistry[file] - if (existing) existing.end() - const outstream = await outstreamProvider(file, topdir) - streamRegistry[file] = outstream ?? undefined - const instream = fs.createReadStream(file) - // Attach a no-op error handler to prevent unhandled errors if stream is destroyed - // before being consumed (e.g., due to output collision detection) - instream.on('error', () => {}) - this.emit('job', { in: instream, out: outstream }) + const outputPlan = await outputPlanProvider(file, topdir) + this.emit('job', { inputPath: getJobInputPath(file), out: outputPlan }) } } } class SingleJobEmitter extends MyEventEmitter { - constructor({ file, streamRegistry, outstreamProvider }: SingleJobEmitterOptions) { + constructor({ file, outputPlanProvider }: SingleJobEmitterOptions) { super() const normalizedFile = path.normalize(file) - const existing = streamRegistry[normalizedFile] - if (existing) existing.end() - outstreamProvider(normalizedFile).then((outstream) => { - streamRegistry[normalizedFile] = outstream ?? undefined - - let instream: Readable | null - if (normalizedFile === '-') { - if (tty.isatty(process.stdin.fd)) { - instream = null - } else { - instream = process.stdin - } - } else { - instream = fs.createReadStream(normalizedFile) - // Attach a no-op error handler to prevent unhandled errors if stream is destroyed - // before being consumed (e.g., due to output collision detection) - instream.on('error', () => {}) - } - - process.nextTick(() => { - this.emit('job', { in: instream, out: outstream }) - this.emit('end') + outputPlanProvider(normalizedFile) + .then((outputPlan) => { + process.nextTick(() => { + this.emit('job', { inputPath: getJobInputPath(normalizedFile), out: outputPlan }) + this.emit('end') + }) + }) + .catch((err: unknown) => { + process.nextTick(() => { + this.emit('error', ensureError(err)) + }) }) - }) } } class InputlessJobEmitter extends MyEventEmitter { - constructor({ - outstreamProvider, - }: { streamRegistry: StreamRegistry; outstreamProvider: OutstreamProvider }) { + constructor({ outputPlanProvider }: { outputPlanProvider: OutputPlanProvider }) { super() process.nextTick(() => { - outstreamProvider(null).then((outstream) => { - try { - this.emit('job', { in: null, out: outstream }) - } catch (err) { - this.emit('error', err) - } + outputPlanProvider(null) + .then((outputPlan) => { + try { + this.emit('job', { inputPath: null, out: outputPlan }) + } catch (err) { + this.emit('error', ensureError(err)) + return + } - this.emit('end') - }) + this.emit('end') + }) + .catch((err: unknown) => { + this.emit('error', ensureError(err)) + }) }) } } @@ -574,10 +925,10 @@ class NullJobEmitter extends MyEventEmitter { class WatchJobEmitter extends MyEventEmitter { private watcher: NodeWatcher | null = null - constructor({ file, streamRegistry, recursive, outstreamProvider }: WatchJobEmitterOptions) { + constructor({ file, recursive, outputPlanProvider }: WatchJobEmitterOptions) { super() - this.init({ file, streamRegistry, recursive, outstreamProvider }).catch((err) => { + this.init({ file, recursive, outputPlanProvider }).catch((err) => { this.emit('error', err) }) @@ -597,9 +948,8 @@ class WatchJobEmitter extends MyEventEmitter { private async init({ file, - streamRegistry, recursive, - outstreamProvider, + outputPlanProvider, }: WatchJobEmitterOptions): Promise { const stats = await fsp.stat(file) const topdir = stats.isDirectory() ? file : undefined @@ -614,7 +964,7 @@ class WatchJobEmitter extends MyEventEmitter { this.watcher.on('close', () => this.emit('end')) this.watcher.on('change', (_evt: string, filename: string) => { const normalizedFile = path.normalize(filename) - this.handleChange(normalizedFile, topdir, streamRegistry, outstreamProvider).catch((err) => { + this.handleChange(normalizedFile, topdir, outputPlanProvider).catch((err) => { this.emit('error', err) }) }) @@ -623,23 +973,13 @@ class WatchJobEmitter extends MyEventEmitter { private async handleChange( normalizedFile: string, topdir: string | undefined, - streamRegistry: StreamRegistry, - outstreamProvider: OutstreamProvider, + outputPlanProvider: OutputPlanProvider, ): Promise { const stats = await fsp.stat(normalizedFile) if (stats.isDirectory()) return - const existing = streamRegistry[normalizedFile] - if (existing) existing.end() - - const outstream = await outstreamProvider(normalizedFile, topdir) - streamRegistry[normalizedFile] = outstream ?? undefined - - const instream = fs.createReadStream(normalizedFile) - // Attach a no-op error handler to prevent unhandled errors if stream is destroyed - // before being consumed (e.g., due to output collision detection) - instream.on('error', () => {}) - this.emit('job', { in: instream, out: outstream }) + const outputPlan = await outputPlanProvider(normalizedFile, topdir) + this.emit('job', { inputPath: getJobInputPath(normalizedFile), out: outputPlan }) } } @@ -697,12 +1037,16 @@ function detectConflicts(jobEmitter: EventEmitter): MyEventEmitter { jobEmitter.on('end', () => emitter.emit('end')) jobEmitter.on('error', (err: Error) => emitter.emit('error', err)) jobEmitter.on('job', (job: Job) => { - if (job.in == null || job.out == null) { + if (job.inputPath == null || job.out == null) { + emitter.emit('job', job) + return + } + const inPath = job.inputPath + const outPath = job.out.path + if (outPath == null) { emitter.emit('job', job) return } - const inPath = (job.in as fs.ReadStream).path as string - const outPath = job.out.path as string if (Object.hasOwn(outfileAssociations, outPath) && outfileAssociations[outPath] !== inPath) { emitter.emit( 'error', @@ -724,12 +1068,12 @@ function dismissStaleJobs(jobEmitter: EventEmitter): MyEventEmitter { jobEmitter.on('end', () => Promise.all(pendingChecks).then(() => emitter.emit('end'))) jobEmitter.on('error', (err: Error) => emitter.emit('error', err)) jobEmitter.on('job', (job: Job) => { - if (job.in == null || job.out == null) { + if (job.inputPath == null || job.out == null) { emitter.emit('job', job) return } - const inPath = (job.in as fs.ReadStream).path as string + const inPath = job.inputPath const checkPromise = fsp .stat(inPath) .then((stats) => { @@ -747,12 +1091,23 @@ function dismissStaleJobs(jobEmitter: EventEmitter): MyEventEmitter { return emitter } +function passthroughJobs(jobEmitter: EventEmitter): MyEventEmitter { + const emitter = new MyEventEmitter() + + jobEmitter.on('end', () => emitter.emit('end')) + jobEmitter.on('error', (err: Error) => emitter.emit('error', err)) + jobEmitter.on('job', (job: Job) => emitter.emit('job', job)) + + return emitter +} + function makeJobEmitter( inputs: string[], { + allowOutputCollisions, recursive, - outstreamProvider, - streamRegistry, + outputPlanProvider, + singleAssembly, watch: watchOption, reprocessStale, }: JobEmitterOptions, @@ -765,35 +1120,43 @@ function makeJobEmitter( async function processInputs(): Promise { for (const input of inputs) { if (input === '-') { - emitterFns.push( - () => new SingleJobEmitter({ file: input, outstreamProvider, streamRegistry }), - ) + emitterFns.push(() => new SingleJobEmitter({ file: input, outputPlanProvider })) watcherFns.push(() => new NullJobEmitter()) } else { const stats = await fsp.stat(input) if (stats.isDirectory()) { emitterFns.push( () => - new ReaddirJobEmitter({ dir: input, recursive, outstreamProvider, streamRegistry }), + new ReaddirJobEmitter({ + dir: input, + recursive, + outputPlanProvider, + }), ) watcherFns.push( () => - new WatchJobEmitter({ file: input, recursive, outstreamProvider, streamRegistry }), + new WatchJobEmitter({ + file: input, + recursive, + outputPlanProvider, + }), ) } else { - emitterFns.push( - () => new SingleJobEmitter({ file: input, outstreamProvider, streamRegistry }), - ) + emitterFns.push(() => new SingleJobEmitter({ file: input, outputPlanProvider })) watcherFns.push( () => - new WatchJobEmitter({ file: input, recursive, outstreamProvider, streamRegistry }), + new WatchJobEmitter({ + file: input, + recursive, + outputPlanProvider, + }), ) } } } if (inputs.length === 0) { - emitterFns.push(() => new InputlessJobEmitter({ outstreamProvider, streamRegistry })) + emitterFns.push(() => new InputlessJobEmitter({ outputPlanProvider })) } startEmitting() @@ -818,14 +1181,18 @@ function makeJobEmitter( emitter.emit('error', err) }) - const stalefilter = reprocessStale ? (x: EventEmitter) => x as MyEventEmitter : dismissStaleJobs - return stalefilter(detectConflicts(emitter)) + const conflictFilter = allowOutputCollisions ? passthroughJobs : detectConflicts + const staleFilter = reprocessStale || singleAssembly ? passthroughJobs : dismissStaleJobs + + return staleFilter(conflictFilter(emitter)) } export interface AssembliesCreateOptions { steps?: string + stepsData?: StepsInput template?: string fields?: Record + outputMode?: 'directory' | 'file' watch?: boolean recursive?: boolean inputs: string[] @@ -844,8 +1211,10 @@ export async function create( client: Transloadit, { steps, + stepsData, template, fields, + outputMode, watch: watchOption, recursive, inputs, @@ -855,35 +1224,18 @@ export async function create( singleAssembly, concurrency = DEFAULT_CONCURRENCY, }: AssembliesCreateOptions, -): Promise<{ results: unknown[]; hasFailures: boolean }> { +): Promise<{ resultUrls: ResultUrlRow[]; results: unknown[]; hasFailures: boolean }> { // Quick fix for https://github.com/transloadit/transloadify/issues/13 // Only default to stdout when output is undefined (not provided), not when explicitly null let resolvedOutput = output if (resolvedOutput === undefined && !process.stdout.isTTY) resolvedOutput = '-' // Read steps file async before entering the Promise constructor - // We use StepsInput (the input type) rather than Steps (the transformed output type) + // We use StepsInput (the input type) rather than the transformed output type // to avoid zod adding default values that the API may reject - let stepsData: StepsInput | undefined + let effectiveStepsData = stepsData if (steps) { - const stepsContent = await fsp.readFile(steps, 'utf8') - const parsed: unknown = JSON.parse(stepsContent) - // Basic structural validation: must be an object with step names as keys - if (parsed == null || typeof parsed !== 'object' || Array.isArray(parsed)) { - throw new Error('Invalid steps format: expected an object with step names as keys') - } - // Validate each step has a robot field - for (const [stepName, step] of Object.entries(parsed)) { - if (step == null || typeof step !== 'object' || Array.isArray(step)) { - throw new Error(`Invalid steps format: step '${stepName}' must be an object`) - } - if (!('robot' in step) || typeof (step as Record).robot !== 'string') { - throw new Error( - `Invalid steps format: step '${stepName}' must have a 'robot' string property`, - ) - } - } - stepsData = parsed as StepsInput + effectiveStepsData = await readStepsInputFile(steps) } // Determine output stat async before entering the Promise constructor @@ -891,9 +1243,19 @@ export async function create( if (resolvedOutput != null) { const [err, stat] = await tryCatch(myStat(process.stdout, resolvedOutput)) if (err && (!isErrnoException(err) || err.code !== 'ENOENT')) throw err - outstat = stat ?? { isDirectory: () => false } + outstat = + stat ?? + ({ + isDirectory: () => outputMode === 'directory', + } satisfies StatLike) + + if (outputMode === 'directory' && stat != null && !stat.isDirectory()) { + const msg = 'Output must be a directory for this command' + outputctl.error(msg) + throw new Error(msg) + } - if (!outstat.isDirectory() && inputs.length !== 0) { + if (!outstat.isDirectory() && inputs.length !== 0 && !singleAssembly) { const firstInput = inputs[0] if (firstInput) { const firstInputStat = await myStat(process.stdin, firstInput) @@ -906,248 +1268,196 @@ export async function create( } } + const inputStats = await Promise.all( + inputs.map(async (input) => { + if (input === '-') return null + return await myStat(process.stdin, input) + }), + ) + const hasDirectoryInput = inputStats.some((stat) => stat?.isDirectory() === true) + return new Promise((resolve, reject) => { const params: CreateAssemblyParams = ( - stepsData ? { steps: stepsData as CreateAssemblyParams['steps'] } : { template_id: template } + effectiveStepsData + ? { steps: effectiveStepsData as CreateAssemblyParams['steps'] } + : { template_id: template } ) as CreateAssemblyParams if (fields) { params.fields = fields } - const outstreamProvider: OutstreamProvider = + const outputPlanProvider: OutputPlanProvider = resolvedOutput == null ? nullProvider() : outstat?.isDirectory() ? dirProvider(resolvedOutput) : fileProvider(resolvedOutput) - const streamRegistry: StreamRegistry = {} const emitter = makeJobEmitter(inputs, { + allowOutputCollisions: singleAssembly, + outputPlanProvider, recursive, watch: watchOption, - outstreamProvider, - streamRegistry, + singleAssembly, reprocessStale, }) // Use p-queue for concurrency management const queue = new PQueue({ concurrency }) const results: unknown[] = [] + const resultUrls: ResultUrlRow[] = [] let hasFailures = false // AbortController to cancel all in-flight createAssembly calls when an error occurs const abortController = new AbortController() + const outputRootIsDirectory = Boolean(resolvedOutput != null && outstat?.isDirectory()) - // Helper to process a single assembly job - async function processAssemblyJob( - inPath: string | null, - outPath: string | null, - outMtime: Date | undefined, - ): Promise { - outputctl.debug(`PROCESSING JOB ${inPath ?? 'null'} ${outPath ?? 'null'}`) - - // Create fresh streams for this job - const inStream = inPath ? fs.createReadStream(inPath) : null - inStream?.on('error', () => {}) - - let superceded = false - // When writing to a file path (non-directory output), we treat finish as a supersede signal. - // Directory-output multi-download mode does not use a single shared outstream. - const markSupersededOnFinish = (stream: OutStream) => { - stream.on('finish', () => { - superceded = true - }) - } - + function createAssemblyOptions(uploads?: Record): CreateAssemblyOptions { const createOptions: CreateAssemblyOptions = { params, signal: abortController.signal, } - if (inStream != null) { - createOptions.uploads = { in: inStream } + if (uploads != null && Object.keys(uploads).length > 0) { + createOptions.uploads = uploads } + return createOptions + } + async function awaitCompletedAssembly(createOptions: CreateAssemblyOptions): Promise<{ + assembly: Awaited> + assemblyId: string + }> { const result = await client.createAssembly(createOptions) - if (superceded) return undefined - const assemblyId = result.assembly_id if (!assemblyId) throw new Error('No assembly_id in result') const assembly = await client.awaitAssemblyCompletion(assemblyId, { signal: abortController.signal, - onPoll: () => { - if (superceded) return false - return true - }, + onPoll: () => true, onAssemblyProgress: (status) => { outputctl.debug(`Assembly status: ${status.ok}`) }, }) - if (superceded) return undefined - if (assembly.error || (assembly.ok && assembly.ok !== 'ASSEMBLY_COMPLETED')) { const msg = `Assembly failed: ${assembly.error || assembly.message} (Status: ${assembly.ok})` outputctl.error(msg) throw new Error(msg) } - if (!assembly.results) throw new Error('No results in assembly') + return { assembly, assemblyId } + } - const outIsDirectory = Boolean(resolvedOutput != null && outstat?.isDirectory()) - const entries = Object.entries(assembly.results) - const allFiles: Array<{ - stepName: string - file: { name?: string; basename?: string; ext?: string; ssl_url?: string; url?: string } - }> = [] - for (const [stepName, stepResults] of entries) { - for (const file of stepResults as Array<{ - name?: string - basename?: string - ext?: string - ssl_url?: string - url?: string - }>) { - allFiles.push({ stepName, file }) - } + async function executeAssemblyLifecycle({ + createOptions, + inPath, + inputPaths, + outputPlan, + singleAssemblyMode, + }: { + createOptions: CreateAssemblyOptions + inPath: string | null + inputPaths: string[] + outputPlan: OutputPlan | null + singleAssemblyMode?: boolean + }): Promise { + outputctl.debug(`PROCESSING JOB ${inPath ?? 'null'} ${outputPlan?.path ?? 'null'}`) + + const { assembly, assemblyId } = await awaitCompletedAssembly(createOptions) + if (!assembly.results) throw new Error('No results in assembly') + resultUrls.push(...collectResultUrlRows({ assemblyId, results: assembly.results })) + + if ( + !singleAssemblyMode && + (await shouldSkipStaleOutput({ + inputPaths, + outputPath: outputPlan?.path ?? null, + outputPlanMtime: outputPlan?.mtime ?? new Date(0), + outputRootIsDirectory, + reprocessStale, + })) + ) { + outputctl.debug(`SKIPPED STALE RESULT ${inPath ?? 'null'} ${outputPlan?.path ?? 'null'}`) + return assembly } - const getFileUrl = (file: { ssl_url?: string; url?: string }): string | null => - file.ssl_url ?? file.url ?? null + await materializeAssemblyResults({ + abortSignal: abortController.signal, + hasDirectoryInput: singleAssemblyMode ? false : hasDirectoryInput, + inPath, + inputs: inputPaths, + outputMode, + outputPath: outputPlan?.path ?? null, + outputRoot: resolvedOutput ?? null, + outputRootIsDirectory, + outputctl, + results: assembly.results, + singleAssembly: singleAssemblyMode, + }) - const sanitizeName = (value: string): string => { - const base = path.basename(value) - return base.replaceAll('\\', '_').replaceAll('/', '_').replaceAll('\u0000', '') - } + outputctl.debug(`COMPLETED ${inPath ?? 'null'} ${outputPlan?.path ?? 'null'}`) - const ensureUniquePath = async (targetPath: string): Promise => { - const parsed = path.parse(targetPath) - let candidate = targetPath - let counter = 1 - while (true) { - const [statErr] = await tryCatch(fsp.stat(candidate)) - if (statErr) return candidate - candidate = path.join(parsed.dir, `${parsed.name}__${counter}${parsed.ext}`) - counter += 1 - } - } - - if (resolvedOutput != null && !superceded) { - // Directory output: - // - For single-result, input-backed jobs, preserve existing behavior (write to mapped file path). - // - Otherwise (multi-result or inputless), download all results into a directory structure. - if (outIsDirectory && (inPath == null || allFiles.length !== 1 || outPath == null)) { - let baseDir = resolvedOutput - if (inPath != null) { - let relpath = path.relative(process.cwd(), inPath) - relpath = relpath.replace(/^(\.\.\/)+/, '') - baseDir = path.join(resolvedOutput, path.dirname(relpath), path.parse(relpath).name) - } - await fsp.mkdir(baseDir, { recursive: true }) - - for (const { stepName, file } of allFiles) { - const resultUrl = getFileUrl(file) - if (!resultUrl) continue - - const stepDir = path.join(baseDir, stepName) - await fsp.mkdir(stepDir, { recursive: true }) - - const rawName = - file.name ?? - (file.basename && file.ext ? `${file.basename}.${file.ext}` : undefined) ?? - `${stepName}_result` - const safeName = sanitizeName(rawName) - const targetPath = await ensureUniquePath(path.join(stepDir, safeName)) - - outputctl.debug('DOWNLOADING') - const outStream = fs.createWriteStream(targetPath) as OutStream - outStream.on('error', () => {}) - const [dlErr] = await tryCatch( - pipeline(got.stream(resultUrl, { signal: abortController.signal }), outStream), - ) - if (dlErr) { - if (dlErr.name === 'AbortError') continue - outputctl.error(dlErr.message) - throw dlErr - } - } - } else if (!outIsDirectory && outPath != null) { - const first = allFiles[0] - const resultUrl = first ? getFileUrl(first.file) : null - if (resultUrl) { - outputctl.debug('DOWNLOADING') - const outStream = fs.createWriteStream(outPath) as OutStream - outStream.on('error', () => {}) - outStream.mtime = outMtime - markSupersededOnFinish(outStream) - - const [dlErr] = await tryCatch( - pipeline(got.stream(resultUrl, { signal: abortController.signal }), outStream), - ) - if (dlErr) { - if (dlErr.name !== 'AbortError') { - outputctl.error(dlErr.message) - throw dlErr - } - } - } - } else if (outIsDirectory && outPath != null) { - // Single-result, input-backed job: preserve existing file mapping in outdir. - const first = allFiles[0] - const resultUrl = first ? getFileUrl(first.file) : null - if (resultUrl) { - outputctl.debug('DOWNLOADING') - const outStream = fs.createWriteStream(outPath) as OutStream - outStream.on('error', () => {}) - outStream.mtime = outMtime - markSupersededOnFinish(outStream) - - const [dlErr] = await tryCatch( - pipeline(got.stream(resultUrl, { signal: abortController.signal }), outStream), - ) - if (dlErr) { - if (dlErr.name !== 'AbortError') { - outputctl.error(dlErr.message) - throw dlErr - } - } + if (del) { + for (const inputPath of inputPaths) { + if (inputPath === stdinWithPath.path) { + continue } + await fsp.unlink(inputPath) } } + return assembly + } - outputctl.debug(`COMPLETED ${inPath ?? 'null'} ${outPath ?? 'null'}`) + // Helper to process a single assembly job + async function processAssemblyJob( + inPath: string | null, + outputPlan: OutputPlan | null, + ): Promise { + const inStream = inPath ? createInputUploadStream(inPath) : null - if (del && inPath) { - await fsp.unlink(inPath) - } - return assembly + return await executeAssemblyLifecycle({ + createOptions: createAssemblyOptions(inStream == null ? undefined : { in: inStream }), + inPath, + inputPaths: inPath == null ? [] : [inPath], + outputPlan, + }) } - if (singleAssembly) { - // Single-assembly mode: collect file paths, then create one assembly with all inputs - // We close streams immediately to avoid exhausting file descriptors with many files + function handleEmitterError(err: Error): void { + abortController.abort() + queue.clear() + outputctl.error(err) + reject(err) + } + + function runSingleAssemblyEmitter(): void { const collectedPaths: string[] = [] emitter.on('job', (job: Job) => { - if (job.in != null) { - const inPath = (job.in as fs.ReadStream).path as string + if (job.inputPath != null) { + const inPath = job.inputPath outputctl.debug(`COLLECTING JOB ${inPath}`) collectedPaths.push(inPath) - // Close the stream immediately to avoid file descriptor exhaustion - ;(job.in as fs.ReadStream).destroy() - outputctl.debug(`STREAM CLOSED ${inPath}`) } }) - emitter.on('error', (err: Error) => { - abortController.abort() - queue.clear() - outputctl.error(err) - reject(err) - }) - emitter.on('end', async () => { if (collectedPaths.length === 0) { - resolve({ results: [], hasFailures: false }) + resolve({ resultUrls, results: [], hasFailures: false }) + return + } + + if ( + await shouldSkipStaleOutput({ + inputPaths: collectedPaths, + outputPath: resolvedOutput ?? null, + outputPlanMtime: new Date(0), + outputRootIsDirectory, + reprocessStale, + singleInputReference: 'input', + }) + ) { + outputctl.debug(`SKIPPED STALE SINGLE ASSEMBLY ${resolvedOutput ?? 'null'}`) + resolve({ resultUrls, results: [], hasFailures: false }) return } @@ -1156,13 +1466,14 @@ export async function create( const inputPaths: string[] = [] for (const inPath of collectedPaths) { const basename = path.basename(inPath) - let key = basename - let counter = 1 - while (key in uploads) { - key = `${path.parse(basename).name}_${counter}${path.parse(basename).ext}` - counter++ - } - uploads[key] = fs.createReadStream(inPath) + const key = await ensureUniqueCounterValue({ + initialValue: basename, + isTaken: (candidate) => candidate in uploads, + nextValue: (counter) => + `${path.parse(basename).name}_${counter}${path.parse(basename).ext}`, + reserve: () => {}, + }) + uploads[key] = createInputUploadStream(inPath) inputPaths.push(inPath) } @@ -1170,69 +1481,14 @@ export async function create( try { const assembly = await queue.add(async () => { - const createOptions: CreateAssemblyOptions = { - params, - signal: abortController.signal, - } - if (Object.keys(uploads).length > 0) { - createOptions.uploads = uploads - } - - const result = await client.createAssembly(createOptions) - const assemblyId = result.assembly_id - if (!assemblyId) throw new Error('No assembly_id in result') - - const asm = await client.awaitAssemblyCompletion(assemblyId, { - signal: abortController.signal, - onAssemblyProgress: (status) => { - outputctl.debug(`Assembly status: ${status.ok}`) - }, + return await executeAssemblyLifecycle({ + createOptions: createAssemblyOptions(uploads), + inPath: null, + inputPaths, + outputPlan: + resolvedOutput == null ? null : createOutputPlan(resolvedOutput, new Date(0)), + singleAssemblyMode: true, }) - - if (asm.error || (asm.ok && asm.ok !== 'ASSEMBLY_COMPLETED')) { - const msg = `Assembly failed: ${asm.error || asm.message} (Status: ${asm.ok})` - outputctl.error(msg) - throw new Error(msg) - } - - // Download all results - if (asm.results && resolvedOutput != null) { - for (const [stepName, stepResults] of Object.entries(asm.results)) { - for (const stepResult of stepResults) { - const resultUrl = - (stepResult as { ssl_url?: string; url?: string }).ssl_url ?? stepResult.url - if (!resultUrl) continue - - let outPath: string - if (outstat?.isDirectory()) { - outPath = path.join(resolvedOutput, stepResult.name || `${stepName}_result`) - } else { - outPath = resolvedOutput - } - - outputctl.debug(`DOWNLOADING ${stepResult.name} to ${outPath}`) - const [dlErr] = await tryCatch( - pipeline( - got.stream(resultUrl, { signal: abortController.signal }), - fs.createWriteStream(outPath), - ), - ) - if (dlErr) { - if (dlErr.name === 'AbortError') continue - outputctl.error(dlErr.message) - throw dlErr - } - } - } - } - - // Delete input files if requested - if (del) { - for (const inPath of inputPaths) { - await fsp.unlink(inPath) - } - } - return asm }) results.push(assembly) } catch (err) { @@ -1240,30 +1496,18 @@ export async function create( outputctl.error(err as Error) } - resolve({ results, hasFailures }) + resolve({ resultUrls, results, hasFailures }) }) - } else { - // Default mode: one assembly per file with p-queue concurrency limiting - emitter.on('job', (job: Job) => { - const inPath = job.in - ? (((job.in as fs.ReadStream).path as string | undefined) ?? null) - : null - const outPath = job.out?.path ?? null - const outMtime = job.out?.mtime - outputctl.debug(`GOT JOB ${inPath ?? 'null'} ${outPath ?? 'null'}`) - - // Close the original streams immediately - we'll create fresh ones when processing - if (job.in != null) { - ;(job.in as fs.ReadStream).destroy() - } - if (job.out != null) { - job.out.destroy() - } + } - // Add job to queue - p-queue handles concurrency automatically + function runPerFileEmitter(): void { + emitter.on('job', (job: Job) => { + const inPath = job.inputPath + const outputPlan = job.out + outputctl.debug(`GOT JOB ${inPath ?? 'null'} ${outputPlan?.path ?? 'null'}`) queue .add(async () => { - const result = await processAssemblyJob(inPath, outPath, outMtime) + const result = await processAssemblyJob(inPath, outputPlan) if (result !== undefined) { results.push(result) } @@ -1274,19 +1518,19 @@ export async function create( }) }) - emitter.on('error', (err: Error) => { - abortController.abort() - queue.clear() - outputctl.error(err) - reject(err) - }) - emitter.on('end', async () => { - // Wait for all queued jobs to complete await queue.onIdle() - resolve({ results, hasFailures }) + resolve({ resultUrls, results, hasFailures }) }) } + + emitter.on('error', handleEmitterError) + + if (singleAssembly) { + runSingleAssemblyEmitter() + } else { + runPerFileEmitter() + } }) } @@ -1330,9 +1574,7 @@ export class AssembliesCreateCommand extends AuthenticatedCommand { description: 'Specify a template to use for these assemblies', }) - inputs = Option.Array('--input,-i', { - description: 'Provide an input file or a directory', - }) + inputs = inputPathsOption() outputPath = Option.String('--output,-o', { description: 'Specify an output file or directory', @@ -1342,29 +1584,20 @@ export class AssembliesCreateCommand extends AuthenticatedCommand { description: 'Set a template field (KEY=VAL)', }) - watch = Option.Boolean('--watch,-w', false, { - description: 'Watch inputs for changes', - }) + watch = watchOption() - recursive = Option.Boolean('--recursive,-r', false, { - description: 'Enumerate input directories recursively', - }) + recursive = recursiveOption() - deleteAfterProcessing = Option.Boolean('--delete-after-processing,-d', false, { - description: 'Delete input files after they are processed', - }) + deleteAfterProcessing = deleteAfterProcessingOption() - reprocessStale = Option.Boolean('--reprocess-stale', false, { - description: 'Process inputs even if output is newer', - }) + reprocessStale = reprocessStaleOption() - singleAssembly = Option.Boolean('--single-assembly', false, { - description: 'Pass all input files to a single assembly instead of one assembly per file', - }) + singleAssembly = singleAssemblyOption() - concurrency = Option.String('--concurrency,-c', { - description: 'Maximum number of concurrent assemblies (default: 5)', - validator: t.isNumber(), + concurrency = concurrencyOption() + + printUrls = Option.Boolean('--print-urls', { + description: 'Print temporary result URLs after completion', }) protected async run(): Promise { @@ -1378,10 +1611,6 @@ export class AssembliesCreateCommand extends AuthenticatedCommand { } const inputList = this.inputs ?? [] - if (inputList.length === 0 && this.watch) { - this.output.error('assemblies create --watch requires at least one input') - return 1 - } // Default to stdin only for `--steps` mode (common "pipe a file into a one-off assembly" use case). // For `--template` mode, templates may be inputless or use /http/import, so stdin should be explicit (`--input -`). @@ -1401,12 +1630,18 @@ export class AssembliesCreateCommand extends AuthenticatedCommand { fieldsMap[key] = value } - if (this.singleAssembly && this.watch) { - this.output.error('--single-assembly cannot be used with --watch') + const sharedValidationError = validateSharedFileProcessingOptions({ + explicitInputCount: this.inputs?.length ?? 0, + singleAssembly: this.singleAssembly, + watch: this.watch, + watchRequiresInputsMessage: 'assemblies create --watch requires at least one input', + }) + if (sharedValidationError != null) { + this.output.error(sharedValidationError) return 1 } - const { hasFailures } = await create(this.output, this.client, { + const { hasFailures, resultUrls } = await create(this.output, this.client, { steps: this.steps, template: this.template, fields: fieldsMap, @@ -1417,8 +1652,11 @@ export class AssembliesCreateCommand extends AuthenticatedCommand { del: this.deleteAfterProcessing, reprocessStale: this.reprocessStale, singleAssembly: this.singleAssembly, - concurrency: this.concurrency, + concurrency: this.concurrency == null ? undefined : Number(this.concurrency), }) + if (this.printUrls) { + printResultUrls(this.output, resultUrls) + } return hasFailures ? 1 : undefined } } diff --git a/packages/node/src/cli/commands/index.ts b/packages/node/src/cli/commands/index.ts index 8f048784..b76456ee 100644 --- a/packages/node/src/cli/commands/index.ts +++ b/packages/node/src/cli/commands/index.ts @@ -1,7 +1,7 @@ import { Builtins, Cli } from 'clipanion' import packageJson from '../../../package.json' with { type: 'json' } - +import { intentCommands } from '../intentCommands.ts' import { AssembliesCreateCommand, AssembliesDeleteCommand, @@ -10,9 +10,7 @@ import { AssembliesListCommand, AssembliesReplayCommand, } from './assemblies.ts' - import { SignatureCommand, SmartCdnSignatureCommand, TokenCommand } from './auth.ts' - import { BillsGetCommand } from './bills.ts' import { DocsRobotsGetCommand, DocsRobotsListCommand } from './docs.ts' import { NotificationsReplayCommand } from './notifications.ts' @@ -71,5 +69,10 @@ export function createCli(): Cli { cli.register(DocsRobotsListCommand) cli.register(DocsRobotsGetCommand) + // Intent-first commands + for (const command of intentCommands) { + cli.register(command) + } + return cli } diff --git a/packages/node/src/cli/commands/templates.ts b/packages/node/src/cli/commands/templates.ts index a2f2bffe..031649b1 100644 --- a/packages/node/src/cli/commands/templates.ts +++ b/packages/node/src/cli/commands/templates.ts @@ -5,12 +5,11 @@ import { Command, Option } from 'clipanion' import rreaddir from 'recursive-readdir' import { z } from 'zod' import { tryCatch } from '../../alphalib/tryCatch.ts' -import type { Steps } from '../../alphalib/types/template.ts' -import { stepsSchema } from '../../alphalib/types/template.ts' import type { TemplateContent } from '../../apiTypes.ts' import type { Transloadit } from '../../Transloadit.ts' import { createReadStream, formatAPIError, streamToBuffer } from '../helpers.ts' import type { IOutputCtl } from '../OutputCtl.ts' +import { parseStepsInputJson } from '../stepsInput.ts' import ModifiedLookup from '../template-last-modified.ts' import type { TemplateFile } from '../types.ts' import { ensureError, isTransloaditAPIError, TemplateFileDataSchema } from '../types.ts' @@ -60,16 +59,11 @@ export async function create( try { const buf = await streamToBuffer(createReadStream(file)) - const parsed: unknown = JSON.parse(buf.toString()) - const validated = stepsSchema.safeParse(parsed) - if (!validated.success) { - throw new Error(`Invalid template steps format: ${validated.error.message}`) - } + const steps = parseStepsInputJson(buf.toString()) const result = await client.createTemplate({ name, - // Steps (validated) is assignable to StepsInput at runtime; cast for TS - template: { steps: validated.data } as TemplateContent, + template: { steps } as TemplateContent, }) output.print(result.id, result) return result @@ -106,23 +100,18 @@ export async function modify( try { const buf = await streamToBuffer(createReadStream(file)) - let steps: Steps | null = null + let steps: TemplateContent['steps'] | null = null let newName = name if (buf.length > 0) { - const parsed: unknown = JSON.parse(buf.toString()) - const validated = stepsSchema.safeParse(parsed) - if (!validated.success) { - throw new Error(`Invalid template steps format: ${validated.error.message}`) - } - steps = validated.data + steps = parseStepsInputJson(buf.toString()) as TemplateContent['steps'] } if (!name || buf.length === 0) { const tpl = await client.getTemplate(template) if (!name) newName = tpl.name if (buf.length === 0 && tpl.content.steps) { - steps = tpl.content.steps + steps = tpl.content.steps as TemplateContent['steps'] } } diff --git a/packages/node/src/cli/fileProcessingOptions.ts b/packages/node/src/cli/fileProcessingOptions.ts new file mode 100644 index 00000000..6ccc4de0 --- /dev/null +++ b/packages/node/src/cli/fileProcessingOptions.ts @@ -0,0 +1,87 @@ +import { Option } from 'clipanion' +import * as t from 'typanion' + +export interface SharedFileProcessingValidationInput { + explicitInputCount: number + singleAssembly: boolean + watch: boolean + watchRequiresInputsMessage: string +} + +export function inputPathsOption(description = 'Provide an input file or a directory'): string[] { + return Option.Array('--input,-i', { + description, + }) as unknown as string[] +} + +export function recursiveOption(description = 'Enumerate input directories recursively'): boolean { + return Option.Boolean('--recursive,-r', false, { + description, + }) as unknown as boolean +} + +export function deleteAfterProcessingOption( + description = 'Delete input files after they are processed', +): boolean { + return Option.Boolean('--delete-after-processing,-d', false, { + description, + }) as unknown as boolean +} + +export function reprocessStaleOption( + description = 'Process inputs even if output is newer', +): boolean { + return Option.Boolean('--reprocess-stale', false, { + description, + }) as unknown as boolean +} + +export function watchOption(description = 'Watch inputs for changes'): boolean { + return Option.Boolean('--watch,-w', false, { + description, + }) as unknown as boolean +} + +export function singleAssemblyOption( + description = 'Pass all input files to a single assembly instead of one assembly per file', +): boolean { + return Option.Boolean('--single-assembly', false, { + description, + }) as unknown as boolean +} + +export function concurrencyOption( + description = 'Maximum number of concurrent assemblies (default: 5)', +): number | undefined { + return Option.String('--concurrency,-c', { + description, + validator: t.applyCascade(t.isNumber(), [t.isAtLeast(1)]), + }) as unknown as number | undefined +} + +export function countProvidedInputs({ + inputBase64, + inputs, +}: { + inputBase64?: string[] + inputs?: string[] +}): number { + return (inputs ?? []).length + (inputBase64 ?? []).length +} + +export function validateSharedFileProcessingOptions({ + explicitInputCount, + singleAssembly, + watch, + watchRequiresInputsMessage, +}: SharedFileProcessingValidationInput): string | undefined { + if (watch && explicitInputCount === 0) { + return watchRequiresInputsMessage + } + + if (watch && singleAssembly) { + return '--single-assembly cannot be used with --watch' + } + + return undefined +} diff --git a/packages/node/src/cli/intentCommandSpecs.ts b/packages/node/src/cli/intentCommandSpecs.ts new file mode 100644 index 00000000..9990c61f --- /dev/null +++ b/packages/node/src/cli/intentCommandSpecs.ts @@ -0,0 +1,276 @@ +import type { z } from 'zod' + +import type { RobotMetaInput } from '../alphalib/types/robots/_instructions-primitives.ts' +import { + robotAudioWaveformInstructionsSchema, + meta as robotAudioWaveformMeta, +} from '../alphalib/types/robots/audio-waveform.ts' +import { + robotDocumentAutorotateInstructionsSchema, + meta as robotDocumentAutorotateMeta, +} from '../alphalib/types/robots/document-autorotate.ts' +import { + robotDocumentConvertInstructionsSchema, + meta as robotDocumentConvertMeta, +} from '../alphalib/types/robots/document-convert.ts' +import { + robotDocumentOptimizeInstructionsSchema, + meta as robotDocumentOptimizeMeta, +} from '../alphalib/types/robots/document-optimize.ts' +import { + robotDocumentThumbsInstructionsSchema, + meta as robotDocumentThumbsMeta, +} from '../alphalib/types/robots/document-thumbs.ts' +import { + robotFileCompressInstructionsSchema, + meta as robotFileCompressMeta, +} from '../alphalib/types/robots/file-compress.ts' +import { + robotFileDecompressInstructionsSchema, + meta as robotFileDecompressMeta, +} from '../alphalib/types/robots/file-decompress.ts' +import { + robotFilePreviewInstructionsSchema, + meta as robotFilePreviewMeta, +} from '../alphalib/types/robots/file-preview.ts' +import { + robotImageBgremoveInstructionsSchema, + meta as robotImageBgremoveMeta, +} from '../alphalib/types/robots/image-bgremove.ts' +import { + robotImageGenerateInstructionsSchema, + meta as robotImageGenerateMeta, +} from '../alphalib/types/robots/image-generate.ts' +import { + robotImageOptimizeInstructionsSchema, + meta as robotImageOptimizeMeta, +} from '../alphalib/types/robots/image-optimize.ts' +import { + robotImageResizeInstructionsSchema, + meta as robotImageResizeMeta, +} from '../alphalib/types/robots/image-resize.ts' +import { + robotTextSpeakInstructionsSchema, + meta as robotTextSpeakMeta, +} from '../alphalib/types/robots/text-speak.ts' +import { + robotVideoThumbsInstructionsSchema, + meta as robotVideoThumbsMeta, +} from '../alphalib/types/robots/video-thumbs.ts' + +export type IntentInputMode = 'local-files' | 'none' +export type IntentOutputMode = 'directory' | 'file' + +interface IntentSchemaDefinition { + meta: RobotMetaInput + schema: z.AnyZodObject +} + +interface IntentBaseDefinition { + outputMode?: IntentOutputMode + paths?: string[] +} + +export interface RobotIntentDefinition extends IntentBaseDefinition, IntentSchemaDefinition { + defaultSingleAssembly?: boolean + inputMode?: IntentInputMode + kind: 'robot' + robot: string +} + +export interface TemplateIntentDefinition extends IntentBaseDefinition { + kind: 'template' + paths: string[] + templateId: string +} + +export interface SemanticIntentDefinition extends IntentBaseDefinition { + kind: 'semantic' + paths: string[] + semantic: string +} + +export type IntentDefinition = + | RobotIntentDefinition + | TemplateIntentDefinition + | SemanticIntentDefinition + +const commandPathAliases = new Map([ + ['autorotate', 'auto-rotate'], + ['bgremove', 'remove-background'], +]) + +function defineRobotIntent(definition: RobotIntentDefinition): RobotIntentDefinition { + return definition +} + +function defineTemplateIntent(definition: TemplateIntentDefinition): TemplateIntentDefinition { + return definition +} + +function defineSemanticIntent(definition: SemanticIntentDefinition): SemanticIntentDefinition { + return definition +} + +export function getIntentCatalogKey(definition: IntentDefinition): string { + if (definition.kind === 'robot') { + return definition.robot + } + + if (definition.kind === 'template') { + return definition.templateId + } + + return `${definition.semantic}:${definition.paths.join('/')}` +} + +export function getIntentPaths(definition: IntentDefinition): string[] { + if (definition.paths != null) { + return definition.paths + } + + if (definition.kind !== 'robot') { + throw new Error(`Intent definition ${getIntentCatalogKey(definition)} is missing paths`) + } + + const segments = definition.robot.split('/').filter(Boolean) + const [group, action] = segments + if (group == null || action == null) { + throw new Error(`Could not infer command path from robot "${definition.robot}"`) + } + + return [group, commandPathAliases.get(action) ?? action] +} + +export function getIntentCommandLabel(definition: IntentDefinition): string { + return getIntentPaths(definition).join(' ') +} + +export function getIntentResultStepName(definition: IntentDefinition): string | null { + if (definition.kind !== 'robot') { + return null + } + + const paths = getIntentPaths(definition) + const action = paths[paths.length - 1] + if (action == null) { + throw new Error(`Intent definition ${definition.robot} has no action path`) + } + + return action.replaceAll('-', '_') +} + +export function findIntentDefinitionByPaths( + paths: readonly string[], +): IntentDefinition | undefined { + return intentCatalog.find((definition) => { + const definitionPaths = getIntentPaths(definition) + return ( + definitionPaths.length === paths.length && + definitionPaths.every((part, index) => part === paths[index]) + ) + }) +} + +export const intentCatalog = [ + defineRobotIntent({ + kind: 'robot', + robot: '/image/generate', + meta: robotImageGenerateMeta, + schema: robotImageGenerateInstructionsSchema, + }), + defineRobotIntent({ + kind: 'robot', + robot: '/file/preview', + paths: ['preview', 'generate'], + meta: robotFilePreviewMeta, + schema: robotFilePreviewInstructionsSchema, + }), + defineRobotIntent({ + kind: 'robot', + robot: '/image/bgremove', + meta: robotImageBgremoveMeta, + schema: robotImageBgremoveInstructionsSchema, + }), + defineRobotIntent({ + kind: 'robot', + robot: '/image/optimize', + meta: robotImageOptimizeMeta, + schema: robotImageOptimizeInstructionsSchema, + }), + defineRobotIntent({ + kind: 'robot', + robot: '/image/resize', + meta: robotImageResizeMeta, + schema: robotImageResizeInstructionsSchema, + }), + defineRobotIntent({ + kind: 'robot', + robot: '/document/convert', + meta: robotDocumentConvertMeta, + schema: robotDocumentConvertInstructionsSchema, + }), + defineRobotIntent({ + kind: 'robot', + robot: '/document/optimize', + meta: robotDocumentOptimizeMeta, + schema: robotDocumentOptimizeInstructionsSchema, + }), + defineRobotIntent({ + kind: 'robot', + robot: '/document/autorotate', + meta: robotDocumentAutorotateMeta, + schema: robotDocumentAutorotateInstructionsSchema, + }), + defineRobotIntent({ + kind: 'robot', + robot: '/document/thumbs', + outputMode: 'directory', + meta: robotDocumentThumbsMeta, + schema: robotDocumentThumbsInstructionsSchema, + }), + defineRobotIntent({ + kind: 'robot', + robot: '/audio/waveform', + meta: robotAudioWaveformMeta, + schema: robotAudioWaveformInstructionsSchema, + }), + defineRobotIntent({ + kind: 'robot', + robot: '/text/speak', + meta: robotTextSpeakMeta, + schema: robotTextSpeakInstructionsSchema, + }), + defineRobotIntent({ + kind: 'robot', + robot: '/video/thumbs', + outputMode: 'directory', + meta: robotVideoThumbsMeta, + schema: robotVideoThumbsInstructionsSchema, + }), + defineTemplateIntent({ + kind: 'template', + templateId: 'builtin/encode-hls-video@latest', + paths: ['video', 'encode-hls'], + outputMode: 'directory', + }), + defineSemanticIntent({ + kind: 'semantic', + semantic: 'image-describe', + paths: ['image', 'describe'], + }), + defineRobotIntent({ + kind: 'robot', + robot: '/file/compress', + defaultSingleAssembly: true, + meta: robotFileCompressMeta, + schema: robotFileCompressInstructionsSchema, + }), + defineRobotIntent({ + kind: 'robot', + robot: '/file/decompress', + outputMode: 'directory', + meta: robotFileDecompressMeta, + schema: robotFileDecompressInstructionsSchema, + }), +] satisfies IntentDefinition[] diff --git a/packages/node/src/cli/intentCommands.ts b/packages/node/src/cli/intentCommands.ts new file mode 100644 index 00000000..aca341f4 --- /dev/null +++ b/packages/node/src/cli/intentCommands.ts @@ -0,0 +1,490 @@ +import type { CommandClass } from 'clipanion' +import { Command } from 'clipanion' +import type { ZodObject, ZodRawShape, ZodTypeAny } from 'zod' + +import type { RobotMetaInput } from '../alphalib/types/robots/_instructions-primitives.ts' +import type { + IntentDefinition, + IntentInputMode, + IntentOutputMode, + RobotIntentDefinition, + SemanticIntentDefinition, +} from './intentCommandSpecs.ts' +import { getIntentPaths, getIntentResultStepName, intentCatalog } from './intentCommandSpecs.ts' +import type { IntentFieldKind, IntentFieldSpec } from './intentFields.ts' +import { + createIntentOption, + inferIntentExampleValue, + inferIntentFieldKind, + unwrapIntentSchema, +} from './intentFields.ts' +import type { IntentInputPolicy } from './intentInputPolicy.ts' +import type { + IntentCommandDefinition, + IntentFileCommandDefinition, + IntentNoInputCommandDefinition, + IntentSingleStepExecutionDefinition, +} from './intentRuntime.ts' +import { + GeneratedBundledFileIntentCommand, + GeneratedNoInputIntentCommand, + GeneratedStandardFileIntentCommand, + GeneratedWatchableFileIntentCommand, + getIntentOptionDefinitions, +} from './intentRuntime.ts' +import { getSemanticIntentDescriptor } from './semanticIntents/index.ts' + +interface GeneratedSchemaField extends IntentFieldSpec { + description?: string + exampleValue: string + optionFlags: string + propertyName: string + required: boolean +} + +interface ResolvedIntentLocalFilesInput { + defaultSingleAssembly?: boolean + inputPolicy: IntentInputPolicy + kind: 'local-files' +} + +interface ResolvedIntentNoneInput { + kind: 'none' +} + +type ResolvedIntentInput = ResolvedIntentLocalFilesInput | ResolvedIntentNoneInput + +type IntentBaseClass = + | typeof GeneratedBundledFileIntentCommand + | typeof GeneratedNoInputIntentCommand + | typeof GeneratedStandardFileIntentCommand + | typeof GeneratedWatchableFileIntentCommand + +type BuiltIntentCommandDefinition = IntentCommandDefinition & { + intentDefinition: IntentFileCommandDefinition | IntentNoInputCommandDefinition +} + +const hiddenFieldNames = new Set([ + 'ffmpeg_stack', + 'force_accept', + 'ignore_errors', + 'imagemagick_stack', + 'output_meta', + 'queue', + 'result', + 'robot', + 'stack', + 'use', +]) + +function toCamelCase(value: string): string { + return value.replace(/_([a-z])/g, (_match, letter: string) => letter.toUpperCase()) +} + +function toKebabCase(value: string): string { + return value.replaceAll('_', '-') +} + +function toPascalCase(parts: string[]): string { + return parts + .flatMap((part) => part.split('-')) + .map((part) => `${part[0]?.toUpperCase() ?? ''}${part.slice(1)}`) + .join('') +} + +function stripTrailingPunctuation(value: string): string { + return value.replace(/[.:]+$/, '').trim() +} + +function getTypicalInputFile(meta: RobotMetaInput): string { + switch (meta.typical_file_type) { + case 'audio file': + return 'input.mp3' + case 'document': + return 'input.pdf' + case 'image': + return 'input.png' + case 'video': + return 'input.mp4' + default: + return 'input.file' + } +} + +function getDefaultOutputPath(paths: string[], outputMode: IntentOutputMode): string { + if (outputMode === 'directory') { + return 'output/' + } + + const [group] = paths + if (group === 'audio') return 'output.png' + if (group === 'document') return 'output.pdf' + if (group === 'image') return 'output.png' + if (group === 'text') return 'output.mp3' + return 'output.file' +} + +function inferOutputPath( + paths: string[], + outputMode: IntentOutputMode, + fieldSpecs: readonly GeneratedSchemaField[], +): string { + if (outputMode === 'directory') { + return 'output/' + } + + const formatExample = fieldSpecs + .map((fieldSpec) => + fieldSpec.required && fieldSpec.name === 'format' ? fieldSpec.exampleValue : null, + ) + .find((value) => value != null) + + if (fieldSpecs.some((fieldSpec) => fieldSpec.name === 'format') && formatExample != null) { + if (fieldSpecs.some((fieldSpec) => fieldSpec.name === 'relative_pathname')) { + return `archive.${formatExample}` + } + } + + if (formatExample != null && /^[-\w]+$/.test(formatExample)) { + return `output.${formatExample}` + } + + return getDefaultOutputPath(paths, outputMode) +} + +function inferInputModeFromShape(shape: Record): IntentInputMode { + if ('prompt' in shape) { + return unwrapIntentSchema(shape.prompt).required ? 'none' : 'local-files' + } + + return 'local-files' +} + +function inferIntentInput( + definition: RobotIntentDefinition, + shape: Record, +): ResolvedIntentInput { + const inputMode = definition.inputMode ?? inferInputModeFromShape(shape) + if (inputMode === 'none') { + return { kind: 'none' } + } + + const promptIsOptional = 'prompt' in shape && !unwrapIntentSchema(shape.prompt).required + const inputPolicy = promptIsOptional + ? ({ + kind: 'optional', + field: 'prompt', + attachUseWhenInputsProvided: true, + } satisfies IntentInputPolicy) + : ({ kind: 'required' } satisfies IntentInputPolicy) + + if (definition.defaultSingleAssembly) { + return { + kind: 'local-files', + defaultSingleAssembly: true, + inputPolicy, + } + } + + return { + kind: 'local-files', + inputPolicy, + } +} + +function inferFixedValues( + definition: RobotIntentDefinition, + input: ResolvedIntentInput, + inputMode: IntentInputMode, +): Record { + if (definition.defaultSingleAssembly) { + return { + robot: definition.robot, + result: true, + use: { + steps: [':original'], + bundle_steps: true, + }, + } + } + + if (inputMode === 'none') { + return { + robot: definition.robot, + result: true, + } + } + + if (input.kind === 'local-files' && input.inputPolicy.kind === 'required') { + return { + robot: definition.robot, + result: true, + use: ':original', + } + } + + return { + robot: definition.robot, + result: true, + } +} + +function collectSchemaFields( + schemaShape: Record, + fixedValues: Record, + input: ResolvedIntentInput, +): GeneratedSchemaField[] { + return Object.entries(schemaShape) + .filter(([key]) => !hiddenFieldNames.has(key) && !Object.hasOwn(fixedValues, key)) + .flatMap(([key, fieldSchema]) => { + const { required: schemaRequired, schema: unwrappedSchema } = unwrapIntentSchema(fieldSchema) + + let kind: IntentFieldKind + try { + kind = inferIntentFieldKind(unwrappedSchema) + } catch { + return [] + } + + return [ + { + name: key, + propertyName: toCamelCase(key), + optionFlags: `--${toKebabCase(key)}`, + required: (input.kind === 'none' && key === 'prompt') || schemaRequired, + description: fieldSchema.description, + exampleValue: inferIntentExampleValue({ + kind, + name: key, + schema: unwrappedSchema as ZodTypeAny, + }), + kind, + }, + ] + }) +} + +function inferExamples( + spec: BuiltIntentCommandDefinition, + definition?: RobotIntentDefinition, +): Array<[string, string]> { + if (definition == null) { + if (spec.intentDefinition.execution.kind === 'dynamic-step') { + return spec.examples + } + + return [ + ['Run the command', `transloadit ${spec.paths.join(' ')} --input input.mp4 --out output/`], + ] + } + + const parts = ['transloadit', ...spec.paths] + const schemaShape = (definition.schema as ZodObject).shape as Record< + string, + ZodTypeAny + > + const inputMode = definition.inputMode ?? inferInputModeFromShape(schemaShape) + const fieldSpecs = + spec.intentDefinition.execution.kind === 'single-step' + ? (spec.intentDefinition.execution.fields as readonly GeneratedSchemaField[]) + : [] + + if (inputMode === 'local-files') { + parts.push('--input', getTypicalInputFile(definition.meta)) + } + + if (inputMode === 'none') { + const promptField = fieldSpecs.find((fieldSpec) => fieldSpec.name === 'prompt') + parts.push('--prompt', promptField?.exampleValue ?? JSON.stringify('A red bicycle in a studio')) + } + + for (const fieldSpec of fieldSpecs) { + if (!fieldSpec.required) continue + if (fieldSpec.name === 'prompt' && inputMode === 'none') continue + + parts.push(fieldSpec.optionFlags, fieldSpec.exampleValue) + } + + const outputMode = spec.intentDefinition.outputMode ?? 'file' + parts.push('--out', inferOutputPath(spec.paths, outputMode, fieldSpecs)) + + return [['Run the command', parts.join(' ')]] +} + +function resolveRobotIntent(definition: RobotIntentDefinition): BuiltIntentCommandDefinition { + const paths = getIntentPaths(definition) + const className = `${toPascalCase(paths)}Command` + const commandLabel = paths.join(' ') + const schema = definition.schema as ZodObject + const schemaShape = schema.shape as Record + const inputMode = definition.inputMode ?? inferInputModeFromShape(schemaShape) + const input = inferIntentInput(definition, schemaShape) + const fixedValues = inferFixedValues(definition, input, inputMode) + const fieldSpecs = collectSchemaFields(schemaShape, fixedValues, input) + const outputMode = definition.outputMode ?? 'file' + const execution: IntentSingleStepExecutionDefinition = { + kind: 'single-step', + schema, + fields: fieldSpecs, + fixedValues, + resultStepName: + getIntentResultStepName(definition) ?? + (() => { + throw new Error(`Could not infer result step name for "${definition.robot}"`) + })(), + } + + const spec: BuiltIntentCommandDefinition = { + className, + description: stripTrailingPunctuation(definition.meta.title), + details: + inputMode === 'none' + ? `Runs \`${definition.robot}\` and writes the result to \`--out\`.` + : definition.defaultSingleAssembly === true + ? `Runs \`${definition.robot}\` for the provided inputs and writes the result to \`--out\`.` + : outputMode === 'directory' + ? `Runs \`${definition.robot}\` on each input file and writes the results to \`--out\`.` + : `Runs \`${definition.robot}\` on each input file and writes the result to \`--out\`.`, + examples: [], + paths, + runnerKind: + input.kind === 'none' ? 'no-input' : input.defaultSingleAssembly ? 'bundled' : 'standard', + intentDefinition: + input.kind === 'none' + ? { + execution, + outputDescription: 'Write the result to this path', + outputMode, + } + : { + commandLabel, + execution, + inputPolicy: input.inputPolicy, + outputDescription: + outputMode === 'directory' + ? 'Write the results to this directory' + : 'Write the result to this path or directory', + outputMode, + }, + } + + return { + ...spec, + examples: inferExamples(spec, definition), + } +} + +function resolveSemanticIntent(definition: SemanticIntentDefinition): BuiltIntentCommandDefinition { + const paths = getIntentPaths(definition) + const descriptor = getSemanticIntentDescriptor(definition.semantic) + + return { + className: `${toPascalCase(paths)}Command`, + description: descriptor.presentation.description, + details: descriptor.presentation.details, + examples: [...descriptor.presentation.examples], + paths, + runnerKind: descriptor.runnerKind, + intentDefinition: { + commandLabel: paths.join(' '), + execution: descriptor.execution, + inputPolicy: descriptor.inputPolicy, + outputDescription: descriptor.outputDescription, + }, + } +} + +function resolveTemplateIntent( + definition: IntentDefinition & { kind: 'template' }, +): BuiltIntentCommandDefinition { + const outputMode = definition.outputMode ?? 'file' + const paths = getIntentPaths(definition) + const spec: BuiltIntentCommandDefinition = { + className: `${toPascalCase(paths)}Command`, + description: `Run ${stripTrailingPunctuation(definition.templateId)}`, + details: `Runs the \`${definition.templateId}\` template and writes the outputs to \`--out\`.`, + examples: [], + paths, + runnerKind: 'standard', + intentDefinition: { + commandLabel: paths.join(' '), + execution: { + kind: 'template', + templateId: definition.templateId, + }, + inputPolicy: { kind: 'required' }, + outputDescription: + outputMode === 'directory' + ? 'Write the results to this directory' + : 'Write the result to this path or directory', + outputMode, + }, + } + + return { + ...spec, + examples: inferExamples(spec), + } +} + +function resolveIntent(definition: IntentDefinition): BuiltIntentCommandDefinition { + if (definition.kind === 'robot') { + return resolveRobotIntent(definition) + } + + if (definition.kind === 'semantic') { + return resolveSemanticIntent(definition) + } + + return resolveTemplateIntent(definition) +} + +function getBaseClass(spec: BuiltIntentCommandDefinition): IntentBaseClass { + if (spec.runnerKind === 'no-input') { + return GeneratedNoInputIntentCommand + } + + if (spec.runnerKind === 'bundled') { + return GeneratedBundledFileIntentCommand + } + + if (spec.runnerKind === 'watchable') { + return GeneratedWatchableFileIntentCommand + } + + return GeneratedStandardFileIntentCommand +} + +function createIntentCommandClass(spec: BuiltIntentCommandDefinition): CommandClass { + const BaseClass = getBaseClass(spec) + + class RuntimeIntentCommand extends BaseClass {} + + Object.defineProperty(RuntimeIntentCommand, 'name', { + value: spec.className, + }) + + Object.assign(RuntimeIntentCommand, { + paths: [spec.paths], + intentDefinition: spec.intentDefinition, + usage: Command.Usage({ + category: 'Intent Commands', + description: spec.description, + details: spec.details, + examples: spec.examples, + }), + }) + + for (const field of getIntentOptionDefinitions(spec.intentDefinition)) { + Object.defineProperty(RuntimeIntentCommand.prototype, field.propertyName, { + configurable: true, + enumerable: true, + writable: true, + value: createIntentOption(field), + }) + } + + return RuntimeIntentCommand as unknown as CommandClass +} + +export const intentCommands = intentCatalog.map(resolveIntent).map(createIntentCommandClass) diff --git a/packages/node/src/cli/intentFields.ts b/packages/node/src/cli/intentFields.ts new file mode 100644 index 00000000..2bb5574a --- /dev/null +++ b/packages/node/src/cli/intentFields.ts @@ -0,0 +1,408 @@ +import { Option } from 'clipanion' +import * as t from 'typanion' +import type { z } from 'zod' +import { + ZodArray, + ZodBoolean, + ZodDefault, + ZodEffects, + ZodEnum, + ZodLiteral, + ZodNullable, + ZodNumber, + ZodObject, + ZodOptional, + ZodString, + ZodUnion, +} from 'zod' + +export type IntentFieldKind = 'auto' | 'boolean' | 'json' | 'number' | 'string' | 'string-array' + +export interface IntentFieldSpec { + kind: IntentFieldKind + name: string +} + +export interface IntentOptionLike extends IntentFieldSpec { + description?: string + optionFlags: string + required?: boolean +} + +export function unwrapIntentSchema(input: unknown): { required: boolean; schema: unknown } { + let schema = input + let required = true + + while (true) { + if (schema instanceof ZodEffects) { + schema = schema._def.schema + continue + } + + if (schema instanceof ZodOptional) { + required = false + schema = schema.unwrap() + continue + } + + if (schema instanceof ZodDefault) { + required = false + schema = schema.removeDefault() + continue + } + + if (schema instanceof ZodNullable) { + required = false + schema = schema.unwrap() + continue + } + + return { required, schema } + } +} + +export function inferIntentFieldKind(schema: unknown): IntentFieldKind { + const unwrappedSchema = unwrapIntentSchema(schema).schema + + if (unwrappedSchema instanceof ZodString || unwrappedSchema instanceof ZodEnum) { + return 'string' + } + + if (unwrappedSchema instanceof ZodNumber) { + return 'number' + } + + if (unwrappedSchema instanceof ZodBoolean) { + return 'boolean' + } + + if (unwrappedSchema instanceof ZodLiteral) { + if (typeof unwrappedSchema.value === 'number') return 'number' + if (typeof unwrappedSchema.value === 'boolean') return 'boolean' + return 'string' + } + + if (unwrappedSchema instanceof ZodArray) { + const elementKind = inferIntentFieldKind(unwrappedSchema.element) + if (elementKind === 'string') { + return 'string-array' + } + + return 'json' + } + + if (unwrappedSchema instanceof ZodObject) { + return 'json' + } + + if (unwrappedSchema instanceof ZodUnion) { + const optionKinds = Array.from( + new Set(unwrappedSchema._def.options.map((option: unknown) => inferIntentFieldKind(option))), + ) as IntentFieldKind[] + if ( + optionKinds.length === 2 && + optionKinds.includes('string') && + optionKinds.includes('string-array') + ) { + return 'string-array' + } + if (optionKinds.length === 1) { + const [kind] = optionKinds + if (kind != null) return kind + } + return 'auto' + } + + throw new Error('Unsupported schema type') +} + +export function createIntentOption(fieldDefinition: IntentOptionLike): unknown { + const { description, kind, optionFlags, required } = fieldDefinition + + if (kind === 'boolean') { + return Option.Boolean(optionFlags, { + description, + required, + }) + } + + if (kind === 'number') { + return Option.String(optionFlags, { + description, + required, + validator: t.isNumber(), + }) + } + + if (kind === 'string-array') { + return Option.Array(optionFlags, { + description, + required, + }) + } + + return Option.String(optionFlags, { + description, + required, + }) +} + +function inferSchemaExampleValue(schema: unknown): string | null { + const unwrappedSchema = unwrapIntentSchema(schema).schema + + if (unwrappedSchema instanceof ZodLiteral) { + return String(unwrappedSchema.value) + } + + if (unwrappedSchema instanceof ZodEnum) { + return unwrappedSchema.options[0] ?? null + } + + if (unwrappedSchema instanceof ZodUnion) { + for (const option of unwrappedSchema._def.options) { + const exampleValue = inferSchemaExampleValue(option) + if (exampleValue != null) { + return exampleValue + } + } + } + + return null +} + +export function parseStringArrayValue(raw: unknown): string[] { + const addNormalizedValues = (source: string[], value: string): void => { + source.push( + ...value + .split(',') + .map((part) => part.trim()) + .filter(Boolean), + ) + } + + const normalizeJsonArray = (value: string): string[] | null => { + const trimmed = value.trim() + if (!trimmed.startsWith('[')) { + return null + } + + let parsedJson: unknown + try { + parsedJson = JSON.parse(trimmed) + } catch { + throw new Error(`Expected valid JSON but received "${value}"`) + } + + if (!Array.isArray(parsedJson) || !parsedJson.every((item) => typeof item === 'string')) { + throw new Error(`Expected an array of strings but received "${value}"`) + } + + return parsedJson + } + + const values = Array.isArray(raw) ? raw : [raw] + const normalizedValues: string[] = [] + for (const value of values) { + if (typeof value !== 'string') { + normalizedValues.push(String(value)) + continue + } + + const parsedJson = normalizeJsonArray(value) + if (parsedJson != null) { + normalizedValues.push(...parsedJson) + continue + } + + addNormalizedValues(normalizedValues, value) + } + + return normalizedValues +} + +function pickPreferredExampleValue(name: string, candidates: readonly string[]): string | null { + if (candidates.length === 0) { + return null + } + + if (name === 'format') { + const preferredFormats = ['pdf', 'zip', 'jpg', 'png', 'mp3'] + for (const preferredFormat of preferredFormats) { + if (candidates.includes(preferredFormat)) { + return preferredFormat + } + } + } + + return candidates[0] ?? null +} + +export function inferIntentExampleValue({ + kind, + name, + schema, +}: { + kind: IntentFieldKind + name: string + schema?: z.ZodTypeAny +}): string { + if (name === 'prompt') { + return JSON.stringify('A red bicycle in a studio') + } + + if (name === 'provider') { + return 'aws' + } + + if (name === 'target_language') { + return 'en-US' + } + + if (name === 'voice') { + return 'female-1' + } + + const schemaExample = + schema instanceof ZodEnum + ? pickPreferredExampleValue(name, schema.options) + : schema instanceof ZodUnion + ? pickPreferredExampleValue( + name, + schema._def.options + .map((option: unknown) => inferSchemaExampleValue(option)) + .filter((value: string | null): value is string => value != null), + ) + : schema == null + ? null + : inferSchemaExampleValue(schema) + if (schemaExample != null) { + return schemaExample + } + + if (kind === 'boolean') { + return 'true' + } + + if (kind === 'number') { + return '1' + } + + return 'value' +} + +export function coerceIntentFieldValue( + kind: IntentFieldKind, + raw: unknown, + fieldSchema?: z.ZodTypeAny, +): unknown { + if (kind === 'number' && typeof raw === 'number') { + return raw + } + + if (kind === 'boolean' && typeof raw === 'boolean') { + return raw + } + + if (kind === 'auto') { + if (fieldSchema == null) { + return raw + } + + const candidates: unknown[] = [] + + if (typeof raw !== 'string') { + candidates.push(raw) + } + + const trimmed = typeof raw === 'string' ? raw.trim() : '' + + if (typeof raw === 'string' && (trimmed.startsWith('{') || trimmed.startsWith('['))) { + try { + candidates.push(JSON.parse(trimmed)) + } catch {} + } + + candidates.push(raw) + + if ( + typeof raw === 'string' && + trimmed !== '' && + !trimmed.startsWith('{') && + !trimmed.startsWith('[') + ) { + try { + candidates.push(JSON.parse(trimmed)) + } catch {} + } + + if (raw === 'true' || raw === 'false') { + candidates.push(raw === 'true') + } + + const numericValue = Number(raw) + if ((typeof raw === 'number' || trimmed !== '') && !Number.isNaN(numericValue)) { + candidates.push(numericValue) + } + + for (const candidate of candidates) { + const parsed = fieldSchema.safeParse(candidate) + if (parsed.success) { + return parsed.data as boolean | number | string + } + } + + return raw + } + + if (kind === 'number') { + if (typeof raw !== 'string') { + throw new Error(`Expected a number but received "${String(raw)}"`) + } + if (raw.trim() === '') { + throw new Error(`Expected a number but received "${raw}"`) + } + const value = Number(raw) + if (Number.isNaN(value)) { + throw new Error(`Expected a number but received "${raw}"`) + } + return value + } + + if (kind === 'json') { + if (typeof raw !== 'string') { + return raw + } + let parsedJson: unknown + try { + parsedJson = JSON.parse(raw) + } catch { + throw new Error(`Expected valid JSON but received "${raw}"`) + } + + if (fieldSchema == null) { + return parsedJson + } + + const parsed = fieldSchema.safeParse(parsedJson) + if (!parsed.success) { + throw new Error(parsed.error.message) + } + + return parsed.data + } + + if (kind === 'boolean') { + if (typeof raw !== 'string') { + throw new Error(`Expected "true" or "false" but received "${String(raw)}"`) + } + if (raw === 'true') return true + if (raw === 'false') return false + throw new Error(`Expected "true" or "false" but received "${raw}"`) + } + + if (kind === 'string-array') { + return parseStringArrayValue(raw) + } + + return raw +} diff --git a/packages/node/src/cli/intentInputPolicy.ts b/packages/node/src/cli/intentInputPolicy.ts new file mode 100644 index 00000000..c72dc576 --- /dev/null +++ b/packages/node/src/cli/intentInputPolicy.ts @@ -0,0 +1,11 @@ +export interface RequiredIntentInputPolicy { + kind: 'required' +} + +export interface OptionalIntentInputPolicy { + attachUseWhenInputsProvided: boolean + field: string + kind: 'optional' +} + +export type IntentInputPolicy = OptionalIntentInputPolicy | RequiredIntentInputPolicy diff --git a/packages/node/src/cli/intentRuntime.ts b/packages/node/src/cli/intentRuntime.ts new file mode 100644 index 00000000..06433234 --- /dev/null +++ b/packages/node/src/cli/intentRuntime.ts @@ -0,0 +1,660 @@ +import { statSync } from 'node:fs' +import { basename } from 'node:path' +import { Option } from 'clipanion' +import type { z } from 'zod' + +import { prepareInputFiles } from '../inputFiles.ts' +import type { AssembliesCreateOptions } from './commands/assemblies.ts' +import * as assembliesCommands from './commands/assemblies.ts' +import { AuthenticatedCommand } from './commands/BaseCommand.ts' +import { + concurrencyOption, + countProvidedInputs, + deleteAfterProcessingOption, + inputPathsOption, + recursiveOption, + reprocessStaleOption, + singleAssemblyOption, + validateSharedFileProcessingOptions, + watchOption, +} from './fileProcessingOptions.ts' +import type { IntentFieldSpec } from './intentFields.ts' +import { coerceIntentFieldValue } from './intentFields.ts' +import type { IntentInputPolicy } from './intentInputPolicy.ts' +import { printResultUrls } from './resultUrls.ts' +import { getSemanticIntentDescriptor } from './semanticIntents/index.ts' + +export interface PreparedIntentInputs { + cleanup: Array<() => Promise> + hasTransientInputs: boolean + inputs: string[] +} + +export interface IntentSingleStepExecutionDefinition { + fields: readonly IntentOptionDefinition[] + fixedValues: Record + kind: 'single-step' + resultStepName: string + schema: z.AnyZodObject +} + +export interface IntentDynamicStepExecutionDefinition { + fields: readonly IntentOptionDefinition[] + handler: string + kind: 'dynamic-step' + resultStepName: string +} + +export interface IntentTemplateExecutionDefinition { + kind: 'template' + templateId: string +} + +export type IntentFileExecutionDefinition = + | IntentDynamicStepExecutionDefinition + | IntentSingleStepExecutionDefinition + | IntentTemplateExecutionDefinition + +export interface IntentFileCommandDefinition { + commandLabel: string + execution: IntentFileExecutionDefinition + inputPolicy: IntentInputPolicy + outputDescription: string + outputMode?: 'directory' | 'file' +} + +export interface IntentNoInputCommandDefinition { + execution: IntentSingleStepExecutionDefinition + outputDescription: string + outputMode?: 'directory' | 'file' +} + +export type IntentRunnerKind = 'bundled' | 'no-input' | 'standard' | 'watchable' + +export interface IntentCommandDefinition { + className: string + description: string + details: string + examples: Array<[string, string]> + intentDefinition: IntentFileCommandDefinition | IntentNoInputCommandDefinition + paths: string[] + runnerKind: IntentRunnerKind +} + +export interface IntentOptionDefinition extends IntentFieldSpec { + description?: string + optionFlags: string + propertyName: string + required?: boolean +} + +function isHttpUrl(value: string): boolean { + try { + const url = new URL(value) + return url.protocol === 'http:' || url.protocol === 'https:' + } catch { + return false + } +} + +function normalizeBase64Value(value: string): string { + const trimmed = value.trim() + const marker = ';base64,' + const markerIndex = trimmed.indexOf(marker) + if (!trimmed.startsWith('data:') || markerIndex === -1) { + return trimmed + } + + return trimmed.slice(markerIndex + marker.length) +} + +export async function prepareIntentInputs({ + inputBase64Values, + inputValues, +}: { + inputBase64Values: string[] + inputValues: string[] +}): Promise { + const preparedOrder: string[] = [] + const syntheticInputs: Array< + | { + base64: string + field: string + filename: string + kind: 'base64' + } + | { + field: string + kind: 'url' + url: string + } + > = [] + + for (const value of inputValues) { + if (!isHttpUrl(value)) { + preparedOrder.push(value) + continue + } + + const field = `input_url_${syntheticInputs.length + 1}` + syntheticInputs.push({ + kind: 'url', + field, + url: value, + }) + preparedOrder.push(field) + } + + for (const [index, value] of inputBase64Values.entries()) { + const field = `input_base64_${index + 1}` + const filename = `input-base64-${index + 1}.bin` + syntheticInputs.push({ + kind: 'base64', + field, + filename, + base64: normalizeBase64Value(value), + }) + preparedOrder.push(field) + } + + if (syntheticInputs.length === 0) { + return { + cleanup: [], + hasTransientInputs: false, + inputs: preparedOrder, + } + } + + const prepared = await prepareInputFiles({ + inputFiles: syntheticInputs.map((input) => { + if (input.kind === 'url') { + return { + kind: 'url' as const, + field: input.field, + url: input.url, + filename: basename(new URL(input.url).pathname) || undefined, + } + } + + return { + kind: 'base64' as const, + field: input.field, + base64: input.base64, + filename: input.filename, + } + }), + base64Strategy: 'tempfile', + allowPrivateUrls: false, + urlStrategy: 'download', + }) + + const inputs = preparedOrder.map((value) => prepared.files[value] ?? value) + + return { + cleanup: prepared.cleanup, + hasTransientInputs: true, + inputs, + } +} + +export function parseIntentStep({ + fields, + fixedValues, + rawValues, + schema, +}: { + fields: readonly IntentFieldSpec[] + fixedValues: Record + rawValues: Record + schema: TSchema +}): z.input { + const input: Record = { ...fixedValues } + + for (const fieldSpec of fields) { + const rawValue = rawValues[fieldSpec.name] + if (rawValue == null) continue + const fieldSchema = schema.shape[fieldSpec.name] + input[fieldSpec.name] = coerceIntentFieldValue(fieldSpec.kind, rawValue, fieldSchema) + } + + const parsed = schema.parse(input) as Record + const normalizedInput: Record = { ...fixedValues } + + for (const fieldSpec of fields) { + const rawValue = rawValues[fieldSpec.name] + if (rawValue == null) continue + normalizedInput[fieldSpec.name] = parsed[fieldSpec.name] + } + + return normalizedInput as z.input +} + +function resolveSingleStepFixedValues( + execution: IntentSingleStepExecutionDefinition, + inputPolicy: IntentInputPolicy, + hasInputs: boolean, +): Record { + if (!hasInputs) { + return execution.fixedValues + } + + if (inputPolicy.kind !== 'optional' || inputPolicy.attachUseWhenInputsProvided !== true) { + return execution.fixedValues + } + + return { + ...execution.fixedValues, + use: ':original', + } +} + +function createSingleStep( + execution: IntentSingleStepExecutionDefinition, + inputPolicy: IntentInputPolicy, + rawValues: Record, + hasInputs: boolean, +): z.input { + return parseIntentStep({ + schema: execution.schema, + fixedValues: resolveSingleStepFixedValues(execution, inputPolicy, hasInputs), + fields: execution.fields, + rawValues, + }) +} + +function createDynamicIntentStep( + execution: IntentDynamicStepExecutionDefinition, + rawValues: Record, +): Record { + return getSemanticIntentDescriptor(execution.handler).createStep(rawValues) +} + +function requiresLocalInput( + inputPolicy: IntentInputPolicy, + rawValues: Record, +): boolean { + if (inputPolicy.kind === 'required') { + return true + } + + return rawValues[inputPolicy.field] == null +} + +async function executeIntentCommand({ + client, + definition, + output, + outputPath, + printUrls, + rawValues, + createOptions, +}: { + client: AuthenticatedCommand['client'] + createOptions: Omit + definition: IntentFileCommandDefinition | IntentNoInputCommandDefinition + output: AuthenticatedCommand['output'] + outputPath?: string + printUrls: boolean + rawValues: Record +}): Promise { + const inputPolicy: IntentInputPolicy = + 'inputPolicy' in definition ? definition.inputPolicy : { kind: 'required' } + const executionOptions = + definition.execution.kind === 'template' + ? { + template: definition.execution.templateId, + } + : { + stepsData: { + [definition.execution.resultStepName]: + definition.execution.kind === 'single-step' + ? createSingleStep( + definition.execution, + inputPolicy, + rawValues, + createOptions.inputs.length > 0, + ) + : createDynamicIntentStep(definition.execution, rawValues), + } as AssembliesCreateOptions['stepsData'], + } + + const { hasFailures, resultUrls } = await assembliesCommands.create(output, client, { + ...createOptions, + output: outputPath ?? null, + outputMode: definition.outputMode, + ...executionOptions, + }) + if (printUrls) { + printResultUrls(output, resultUrls) + } + return hasFailures ? 1 : undefined +} + +abstract class GeneratedIntentCommandBase extends AuthenticatedCommand { + declare static intentDefinition: IntentFileCommandDefinition | IntentNoInputCommandDefinition + + outputPath = Option.String('--out,-o', { + description: this.getOutputDescription(), + }) + + printUrls = Option.Boolean('--print-urls', { + description: 'Print temporary result URLs after completion', + }) + + protected getIntentDefinition(): IntentFileCommandDefinition | IntentNoInputCommandDefinition { + const commandClass = this.constructor as unknown as typeof GeneratedIntentCommandBase + return commandClass.intentDefinition + } + + protected getIntentRawValues(): Record { + return readIntentRawValues(this, getIntentOptionDefinitions(this.getIntentDefinition())) + } + + private getOutputDescription(): string { + return this.getIntentDefinition().outputDescription + } + + protected validateOutputChoice(): number | undefined { + if (this.outputPath == null && !this.printUrls) { + this.output.error('Specify at least one of --out or --print-urls') + return 1 + } + + return undefined + } +} + +export abstract class GeneratedNoInputIntentCommand extends GeneratedIntentCommandBase { + protected override async run(): Promise { + const outputValidationError = this.validateOutputChoice() + if (outputValidationError != null) { + return outputValidationError + } + + return await executeIntentCommand({ + client: this.client, + createOptions: { + inputs: [], + }, + definition: this.getIntentDefinition() as IntentNoInputCommandDefinition, + output: this.output, + outputPath: this.outputPath, + printUrls: this.printUrls ?? false, + rawValues: this.getIntentRawValues(), + }) + } +} + +export function getIntentOptionDefinitions( + definition: IntentFileCommandDefinition | IntentNoInputCommandDefinition, +): readonly IntentOptionDefinition[] { + if (definition.execution.kind !== 'single-step' && definition.execution.kind !== 'dynamic-step') { + return [] + } + + return definition.execution.fields +} + +export function readIntentRawValues( + command: object, + fieldDefinitions: readonly IntentOptionDefinition[], +): Record { + const rawValues: Record = {} + + for (const fieldDefinition of fieldDefinitions) { + rawValues[fieldDefinition.name] = (command as Record)[ + fieldDefinition.propertyName + ] + } + + return rawValues +} + +export abstract class GeneratedFileIntentCommandBase extends GeneratedIntentCommandBase { + inputs = inputPathsOption('Provide an input path, directory, URL, or - for stdin') + + inputBase64 = Option.Array('--input-base64', { + description: 'Provide base64-encoded input content directly', + }) + + recursive = recursiveOption() + + deleteAfterProcessing = deleteAfterProcessingOption() + + reprocessStale = reprocessStaleOption() + + protected override getIntentDefinition(): IntentFileCommandDefinition { + return super.getIntentDefinition() as IntentFileCommandDefinition + } + + protected async prepareInputs(): Promise { + return await prepareIntentInputs({ + inputValues: this.inputs ?? [], + inputBase64Values: this.inputBase64 ?? [], + }) + } + + protected getCreateOptions( + inputs: string[], + ): Omit { + return { + del: this.deleteAfterProcessing, + inputs, + reprocessStale: this.reprocessStale, + recursive: this.recursive, + } + } + + protected getProvidedInputCount(): number { + return countProvidedInputs({ + inputs: this.inputs, + inputBase64: this.inputBase64, + }) + } + + protected hasTransientInputSources(): boolean { + return ( + (this.inputs?.some((input) => isHttpUrl(input)) ?? false) || + (this.inputBase64?.length ?? 0) > 0 + ) + } + + protected resolveOutputMode(): 'directory' | 'file' | undefined { + if (this.getIntentDefinition().outputMode != null) { + return this.getIntentDefinition().outputMode + } + + if (this.outputPath == null) { + return undefined + } + + try { + return statSync(this.outputPath).isDirectory() ? 'directory' : 'file' + } catch { + return 'file' + } + } + + protected isDirectoryOutputTarget(): boolean { + return this.resolveOutputMode() === 'directory' + } + + protected validateInputPresence(rawValues: Record): number | undefined { + const intentDefinition = this.getIntentDefinition() + const inputCount = this.getProvidedInputCount() + if (inputCount !== 0) { + return undefined + } + + if (!requiresLocalInput(intentDefinition.inputPolicy, rawValues)) { + return undefined + } + + if (intentDefinition.inputPolicy.kind === 'required') { + this.output.error(`${intentDefinition.commandLabel} requires --input or --input-base64`) + return 1 + } + + this.output.error( + `${intentDefinition.commandLabel} requires --input or --${intentDefinition.inputPolicy.field.replaceAll('_', '-')}`, + ) + return 1 + } + + protected validateBeforePreparingInputs(rawValues: Record): number | undefined { + const outputValidationError = this.validateOutputChoice() + if (outputValidationError != null) { + return outputValidationError + } + + const validationError = this.validateInputPresence(rawValues) + if (validationError != null) { + return validationError + } + + const execution = this.getIntentDefinition().execution + if (execution.kind === 'dynamic-step') { + createDynamicIntentStep(execution, rawValues) + } + + return undefined + } + + protected validatePreparedInputs(_preparedInputs: PreparedIntentInputs): number | undefined { + return undefined + } + + protected async executePreparedInputs( + rawValues: Record, + preparedInputs: PreparedIntentInputs, + ): Promise { + return await executeIntentCommand({ + client: this.client, + createOptions: this.getCreateOptions(preparedInputs.inputs), + definition: this.getIntentDefinition(), + output: this.output, + outputPath: this.outputPath, + printUrls: this.printUrls ?? false, + rawValues, + }) + } + + protected override async run(): Promise { + const rawValues = this.getIntentRawValues() + const validationError = this.validateBeforePreparingInputs(rawValues) + if (validationError != null) { + return validationError + } + + const preparedInputs = await this.prepareInputs() + try { + const preparedInputError = this.validatePreparedInputs(preparedInputs) + if (preparedInputError != null) { + return preparedInputError + } + + return await this.executePreparedInputs(rawValues, preparedInputs) + } finally { + await Promise.all(preparedInputs.cleanup.map((cleanup) => cleanup())) + } + } +} + +export abstract class GeneratedWatchableFileIntentCommand extends GeneratedFileIntentCommandBase { + watch = watchOption() + + concurrency = concurrencyOption() + + protected override getCreateOptions( + inputs: string[], + ): Omit { + return { + ...super.getCreateOptions(inputs), + concurrency: this.concurrency, + watch: this.watch, + } + } + + protected override validateBeforePreparingInputs( + rawValues: Record, + ): number | undefined { + const validationError = super.validateBeforePreparingInputs(rawValues) + if (validationError != null) { + return validationError + } + + const sharedValidationError = validateSharedFileProcessingOptions({ + explicitInputCount: this.getProvidedInputCount(), + singleAssembly: false, + watch: this.watch, + watchRequiresInputsMessage: `${this.getIntentDefinition().commandLabel} --watch requires --input or --input-base64`, + }) + if (sharedValidationError != null) { + this.output.error(sharedValidationError) + return 1 + } + + if (this.watch && this.hasTransientInputSources()) { + this.output.error('--watch is only supported for filesystem inputs') + return 1 + } + + return undefined + } + + protected override validatePreparedInputs( + preparedInputs: PreparedIntentInputs, + ): number | undefined { + if (this.watch && preparedInputs.hasTransientInputs) { + this.output.error('--watch is only supported for filesystem inputs') + return 1 + } + return undefined + } +} + +export abstract class GeneratedStandardFileIntentCommand extends GeneratedWatchableFileIntentCommand { + singleAssembly = singleAssemblyOption() + + protected override getCreateOptions( + inputs: string[], + ): Omit { + return { + ...super.getCreateOptions(inputs), + singleAssembly: this.singleAssembly, + } + } + + protected override validateBeforePreparingInputs( + rawValues: Record, + ): number | undefined { + const validationError = super.validateBeforePreparingInputs(rawValues) + if (validationError != null) { + return validationError + } + + if ( + this.singleAssembly && + this.getProvidedInputCount() > 1 && + !this.isDirectoryOutputTarget() + ) { + this.output.error( + 'Output must be a directory when using --single-assembly with multiple inputs', + ) + return 1 + } + + return undefined + } +} + +export abstract class GeneratedBundledFileIntentCommand extends GeneratedFileIntentCommandBase { + protected override getCreateOptions( + inputs: string[], + ): Omit { + return { + ...super.getCreateOptions(inputs), + singleAssembly: true, + } + } +} diff --git a/packages/node/src/cli/resultFiles.ts b/packages/node/src/cli/resultFiles.ts new file mode 100644 index 00000000..ed6d1a7d --- /dev/null +++ b/packages/node/src/cli/resultFiles.ts @@ -0,0 +1,93 @@ +export interface AssemblyResultEntryLike { + basename?: unknown + ext?: unknown + name?: unknown + ssl_url?: unknown + url?: unknown +} + +export interface NormalizedAssemblyResultFile { + file: AssemblyResultEntryLike + name: string + stepName: string + url: string +} + +function isAssemblyResultEntryLike(value: unknown): value is AssemblyResultEntryLike { + return value != null && typeof value === 'object' +} + +function normalizeAssemblyResultName( + stepName: string, + file: AssemblyResultEntryLike, +): string | null { + if (typeof file.name === 'string') { + return file.name + } + + if (typeof file.basename === 'string') { + if (typeof file.ext === 'string' && file.ext.length > 0) { + return `${file.basename}.${file.ext}` + } + + return file.basename + } + + return `${stepName}_result` +} + +function normalizeAssemblyResultUrl(file: AssemblyResultEntryLike): string | null { + if (typeof file.ssl_url === 'string') { + return file.ssl_url + } + + if (typeof file.url === 'string') { + return file.url + } + + return null +} + +export function normalizeAssemblyResultFile( + stepName: string, + value: unknown, +): NormalizedAssemblyResultFile | null { + if (!isAssemblyResultEntryLike(value)) { + return null + } + + const url = normalizeAssemblyResultUrl(value) + const name = normalizeAssemblyResultName(stepName, value) + if (url == null || name == null) { + return null + } + + return { + file: value, + name, + stepName, + url, + } +} + +export function flattenAssemblyResultFiles(results: unknown): NormalizedAssemblyResultFile[] { + if (results == null || typeof results !== 'object' || Array.isArray(results)) { + return [] + } + + const files: NormalizedAssemblyResultFile[] = [] + for (const [stepName, stepResults] of Object.entries(results)) { + if (!Array.isArray(stepResults)) { + continue + } + + for (const stepResult of stepResults) { + const normalized = normalizeAssemblyResultFile(stepName, stepResult) + if (normalized != null) { + files.push(normalized) + } + } + } + + return files +} diff --git a/packages/node/src/cli/resultUrls.ts b/packages/node/src/cli/resultUrls.ts new file mode 100644 index 00000000..b500a666 --- /dev/null +++ b/packages/node/src/cli/resultUrls.ts @@ -0,0 +1,58 @@ +import type { IOutputCtl } from './OutputCtl.ts' +import { flattenAssemblyResultFiles } from './resultFiles.ts' + +export interface ResultUrlRow { + assemblyId: string + name: string + step: string + url: string +} + +export function collectResultUrlRows({ + assemblyId, + results, +}: { + assemblyId: string + results: unknown +}): ResultUrlRow[] { + return flattenAssemblyResultFiles(results).map((file) => ({ + assemblyId, + step: file.stepName, + name: file.name, + url: file.url, + })) +} + +export function formatResultUrlRows(rows: readonly ResultUrlRow[]): string { + if (rows.length === 0) { + return '' + } + + const includeAssembly = new Set(rows.map((row) => row.assemblyId)).size > 1 + const headers = includeAssembly ? ['ASSEMBLY', 'STEP', 'NAME', 'URL'] : ['STEP', 'NAME', 'URL'] + const tableRows = rows.map((row) => + includeAssembly ? [row.assemblyId, row.step, row.name, row.url] : [row.step, row.name, row.url], + ) + + const widths = headers.map((header, index) => + Math.max(header.length, ...tableRows.map((row) => row[index]?.length ?? 0)), + ) + + return [headers, ...tableRows] + .map((row) => + row + .map((value, index) => + index === row.length - 1 ? value : value.padEnd(widths[index] ?? value.length), + ) + .join(' '), + ) + .join('\n') +} + +export function printResultUrls(output: IOutputCtl, rows: readonly ResultUrlRow[]): void { + if (rows.length === 0) { + return + } + + output.print(formatResultUrlRows(rows), { urls: rows }) +} diff --git a/packages/node/src/cli/semanticIntents/imageDescribe.ts b/packages/node/src/cli/semanticIntents/imageDescribe.ts new file mode 100644 index 00000000..f63191dc --- /dev/null +++ b/packages/node/src/cli/semanticIntents/imageDescribe.ts @@ -0,0 +1,271 @@ +import { parseStringArrayValue } from '../intentFields.ts' +import type { + IntentDynamicStepExecutionDefinition, + IntentOptionDefinition, +} from '../intentRuntime.ts' + +const imageDescribeFields = ['labels', 'altText', 'title', 'caption', 'description'] as const + +type ImageDescribeField = (typeof imageDescribeFields)[number] + +const wordpressDescribeFields = [ + 'altText', + 'title', + 'caption', + 'description', +] as const satisfies readonly ImageDescribeField[] + +const defaultDescribeModel = 'anthropic/claude-sonnet-4-6' + +export const imageDescribeExecutionDefinition = { + kind: 'dynamic-step', + handler: 'image-describe', + resultStepName: 'describe', + fields: [ + { + name: 'fields', + kind: 'string-array', + propertyName: 'fields', + optionFlags: '--fields', + description: + 'Describe output fields to generate, for example labels or altText,title,caption,description', + required: false, + }, + { + name: 'forProfile', + kind: 'string', + propertyName: 'forProfile', + optionFlags: '--for', + description: 'Use a named output profile, currently: wordpress', + required: false, + }, + { + name: 'model', + kind: 'string', + propertyName: 'model', + optionFlags: '--model', + description: 'Model to use for generated text fields (default: anthropic/claude-sonnet-4-6)', + required: false, + }, + ] as const satisfies readonly IntentOptionDefinition[], +} satisfies IntentDynamicStepExecutionDefinition + +export const imageDescribeCommandPresentation = { + description: 'Describe images as labels or publishable text fields', + details: + 'Generates image labels through `/image/describe`, or structured altText/title/caption/description through `/ai/chat`, then writes the JSON result to `--out`.', + examples: [ + [ + 'Describe an image as labels', + 'transloadit image describe --input hero.jpg --out labels.json', + ], + [ + 'Generate WordPress-ready fields', + 'transloadit image describe --input hero.jpg --for wordpress --out fields.json', + ], + [ + 'Request a custom field set', + 'transloadit image describe --input hero.jpg --fields altText,title,caption --out fields.json', + ], + ] as Array<[string, string]>, +} as const + +function parseDescribeFields(value: string[] | undefined): ImageDescribeField[] { + const rawFields = parseStringArrayValue(value ?? []) + + if (rawFields.length === 0) { + return [] + } + + const fields: ImageDescribeField[] = [] + const seen = new Set() + + for (const rawField of rawFields) { + if (!imageDescribeFields.includes(rawField as ImageDescribeField)) { + throw new Error( + `Unsupported --fields value "${rawField}". Supported values: ${imageDescribeFields.join(', ')}`, + ) + } + + const field = rawField as ImageDescribeField + if (seen.has(field)) { + continue + } + + seen.add(field) + fields.push(field) + } + + return fields +} + +function resolveDescribeProfile(profile: string | undefined): 'wordpress' | null { + if (profile == null) { + return null + } + + if (profile === 'wordpress') { + return 'wordpress' + } + + throw new Error(`Unsupported --for value "${profile}". Supported values: wordpress`) +} + +function resolveRequestedDescribeFields({ + explicitFields, + profile, +}: { + explicitFields: ImageDescribeField[] + profile: 'wordpress' | null +}): ImageDescribeField[] { + if (explicitFields.length > 0) { + return explicitFields + } + + if (profile === 'wordpress') { + return [...wordpressDescribeFields] + } + + return explicitFields.length === 0 ? ['labels'] : explicitFields +} + +function validateDescribeFields({ + fields, + model, + profile, +}: { + fields: ImageDescribeField[] + model: string + profile: 'wordpress' | null +}): void { + const includesLabels = fields.includes('labels') + + if (includesLabels && fields.length > 1) { + throw new Error( + 'The labels field cannot be combined with altText, title, caption, or description', + ) + } + + if (includesLabels && profile != null) { + throw new Error('--for cannot be combined with --fields labels') + } + + if (includesLabels && model !== defaultDescribeModel) { + throw new Error( + '--model is only supported when generating altText, title, caption, or description', + ) + } +} + +function resolveImageDescribeRequest(rawValues: Record): { + fields: ImageDescribeField[] + profile: 'wordpress' | null +} { + const explicitFields = parseDescribeFields(rawValues.fields as string[] | undefined) + const profile = resolveDescribeProfile(rawValues.forProfile as string | undefined) + const fields = resolveRequestedDescribeFields({ explicitFields, profile }) + validateDescribeFields({ + fields, + model: String(rawValues.model ?? defaultDescribeModel), + profile, + }) + + return { fields, profile } +} + +function buildDescribeAiChatSchema(fields: readonly ImageDescribeField[]): Record { + const properties = Object.fromEntries( + fields.map((field) => { + const description = + field === 'altText' + ? 'A concise accessibility-focused alt text that objectively describes the image' + : field === 'title' + ? 'A concise publishable title for the image' + : field === 'caption' + ? 'A short caption suitable for displaying below the image' + : 'A richer description of the image suitable for CMS usage' + + return [ + field, + { + type: 'string', + description, + }, + ] + }), + ) + + return { + type: 'object', + additionalProperties: false, + required: [...fields], + properties, + } +} + +function buildDescribeAiChatMessages({ + fields, + profile, +}: { + fields: readonly ImageDescribeField[] + profile: 'wordpress' | null +}): { + messages: string + systemMessage: string +} { + const requestedFields = fields.join(', ') + const profileHint = + profile === 'wordpress' + ? 'The output is for the WordPress media library.' + : 'The output is for a publishing workflow.' + + return { + systemMessage: [ + 'You generate accurate image copy for publishing workflows.', + profileHint, + 'Return only the schema fields requested.', + 'Be concrete, concise, and faithful to what is visibly present in the image.', + 'Do not invent facts, brands, locations, or identities that are not clearly visible.', + 'Avoid keyword stuffing, hype, and mentions of SEO or accessibility in the output itself.', + 'For altText, write one objective sentence focused on what matters to someone who cannot see the image.', + 'For title, keep it short and natural.', + 'For caption, write one short sentence suitable for publication.', + 'For description, write one or two sentences with slightly more context than the caption.', + ].join(' '), + messages: `Analyze the attached image and fill these fields: ${requestedFields}.`, + } +} + +export function createImageDescribeStep( + rawValues: Record, +): Record { + const { fields, profile } = resolveImageDescribeRequest(rawValues) + if (fields.length === 1 && fields[0] === 'labels') { + return { + robot: '/image/describe', + use: ':original', + result: true, + provider: 'aws', + format: 'json', + granularity: 'list', + explicit_descriptions: false, + } + } + + const { messages, systemMessage } = buildDescribeAiChatMessages({ fields, profile }) + + return { + robot: '/ai/chat', + use: ':original', + result: true, + model: String(rawValues.model ?? defaultDescribeModel), + format: 'json', + return_messages: 'last', + test_credentials: true, + schema: JSON.stringify(buildDescribeAiChatSchema(fields)), + messages, + system_message: systemMessage, + // @TODO Move these inline /ai/chat instructions into a builtin template in api2 and + // switch this command to call that builtin instead of shipping prompt logic in the CLI. + } +} diff --git a/packages/node/src/cli/semanticIntents/index.ts b/packages/node/src/cli/semanticIntents/index.ts new file mode 100644 index 00000000..76b5adcd --- /dev/null +++ b/packages/node/src/cli/semanticIntents/index.ts @@ -0,0 +1,39 @@ +import type { IntentInputPolicy } from '../intentInputPolicy.ts' +import type { IntentDynamicStepExecutionDefinition, IntentRunnerKind } from '../intentRuntime.ts' +import { + createImageDescribeStep, + imageDescribeCommandPresentation, + imageDescribeExecutionDefinition, +} from './imageDescribe.ts' + +export interface SemanticIntentDescriptor { + createStep: (rawValues: Record) => Record + execution: IntentDynamicStepExecutionDefinition + inputPolicy: IntentInputPolicy + outputDescription: string + presentation: { + description: string + details: string + examples: Array<[string, string]> + } + runnerKind: IntentRunnerKind +} + +export const semanticIntentDescriptors: Record = { + 'image-describe': { + createStep: createImageDescribeStep, + execution: imageDescribeExecutionDefinition, + inputPolicy: { kind: 'required' }, + outputDescription: 'Write the JSON result to this path or directory', + presentation: imageDescribeCommandPresentation, + runnerKind: 'watchable', + }, +} + +export function getSemanticIntentDescriptor(name: string): SemanticIntentDescriptor { + if (!(name in semanticIntentDescriptors)) { + throw new Error(`Semantic intent descriptor does not exist for "${name}"`) + } + + return semanticIntentDescriptors[name] +} diff --git a/packages/node/src/cli/stepsInput.ts b/packages/node/src/cli/stepsInput.ts new file mode 100644 index 00000000..392006a2 --- /dev/null +++ b/packages/node/src/cli/stepsInput.ts @@ -0,0 +1,20 @@ +import fsp from 'node:fs/promises' + +import type { StepsInput } from '../alphalib/types/template.ts' +import { stepsSchema } from '../alphalib/types/template.ts' + +export function parseStepsInputJson(content: string): StepsInput { + const parsed: unknown = JSON.parse(content) + const validated = stepsSchema.safeParse(parsed) + if (!validated.success) { + throw new Error(`Invalid steps format: ${validated.error.message}`) + } + + // Preserve the original input shape so we do not leak zod defaults into API payloads. + return parsed as StepsInput +} + +export async function readStepsInputFile(filePath: string): Promise { + const content = await fsp.readFile(filePath, 'utf8') + return parseStepsInputJson(content) +} diff --git a/packages/node/src/ensureUniqueCounter.ts b/packages/node/src/ensureUniqueCounter.ts new file mode 100644 index 00000000..43ff4f7b --- /dev/null +++ b/packages/node/src/ensureUniqueCounter.ts @@ -0,0 +1,22 @@ +export async function ensureUniqueCounterValue({ + initialValue, + isTaken, + reserve, + nextValue, +}: { + initialValue: T + isTaken: (candidate: T) => Promise | boolean + reserve: (candidate: T) => void + nextValue: (counter: number) => T +}): Promise { + let candidate = initialValue + let counter = 1 + + while (await isTaken(candidate)) { + candidate = nextValue(counter) + counter += 1 + } + + reserve(candidate) + return candidate +} diff --git a/packages/node/src/inputFiles.ts b/packages/node/src/inputFiles.ts index 00f7acdf..fd635aff 100644 --- a/packages/node/src/inputFiles.ts +++ b/packages/node/src/inputFiles.ts @@ -1,3 +1,4 @@ +import * as dnsPromises from 'node:dns/promises' import { createWriteStream } from 'node:fs' import { mkdtemp, rm, writeFile } from 'node:fs/promises' import { isIP } from 'node:net' @@ -5,9 +6,12 @@ import { tmpdir } from 'node:os' import { basename, join } from 'node:path' import type { Readable } from 'node:stream' import { pipeline } from 'node:stream/promises' +import type CacheableLookup from 'cacheable-lookup' +import type { EntryObject, IPFamily } from 'cacheable-lookup' import got from 'got' import type { Input as IntoStreamInput } from 'into-stream' import type { CreateAssemblyParams } from './apiTypes.ts' +import { ensureUniqueCounterValue } from './ensureUniqueCounter.ts' export type InputFile = | { @@ -63,15 +67,28 @@ const ensureUnique = (field: string, used: Set): void => { used.add(field) } -const ensureUniqueStepName = (baseName: string, used: Set): string => { - let name = baseName - let counter = 1 - while (used.has(name)) { - name = `${baseName}_${counter}` - counter += 1 - } - used.add(name) - return name +const ensureUniqueStepName = async (baseName: string, used: Set): Promise => + await ensureUniqueCounterValue({ + initialValue: baseName, + isTaken: (candidate) => used.has(candidate), + reserve: (candidate) => used.add(candidate), + nextValue: (counter) => `${baseName}_${counter}`, + }) + +const ensureUniqueTempFilePath = async ( + root: string, + filename: string, + used: Set, +): Promise => { + const parsed = basename(filename) + const extension = parsed.includes('.') ? `.${parsed.split('.').slice(1).join('.')}` : '' + const stem = extension === '' ? parsed : parsed.slice(0, -extension.length) + return await ensureUniqueCounterValue({ + initialValue: join(root, parsed), + isTaken: (candidate) => used.has(candidate), + reserve: (candidate) => used.add(candidate), + nextValue: (counter) => join(root, `${stem}-${counter}${extension}`), + }) } const decodeBase64 = (value: string): Buffer => Buffer.from(value, 'base64') @@ -106,9 +123,14 @@ const findImportStepName = (field: string, steps: Record): stri return null } -const downloadUrlToFile = async (url: string, filePath: string): Promise => { - await pipeline(got.stream(url), createWriteStream(filePath)) -} +const MAX_URL_REDIRECTS = 10 + +const isRedirectStatusCode = (statusCode: number): boolean => + statusCode === 301 || + statusCode === 302 || + statusCode === 303 || + statusCode === 307 || + statusCode === 308 const isPrivateIp = (address: string): boolean => { if (address === 'localhost') return true @@ -134,7 +156,9 @@ const isPrivateIp = (address: string): boolean => { return false } -const assertPublicDownloadUrl = (value: string): void => { +const resolvePublicDownloadAddress = async ( + value: string, +): Promise<{ address: string; family: 4 | 6 }> => { const parsed = new URL(value) if (!['http:', 'https:'].includes(parsed.protocol)) { throw new Error(`URL downloads are limited to http/https: ${value}`) @@ -142,6 +166,150 @@ const assertPublicDownloadUrl = (value: string): void => { if (isPrivateIp(parsed.hostname)) { throw new Error(`URL downloads are limited to public hosts: ${value}`) } + + const resolvedAddresses = await dnsPromises.lookup(parsed.hostname, { + all: true, + verbatim: true, + }) + if (resolvedAddresses.some((address) => isPrivateIp(address.address))) { + throw new Error(`URL downloads are limited to public hosts: ${value}`) + } + + const firstAddress = resolvedAddresses[0] + if (firstAddress == null) { + throw new Error(`Unable to resolve URL hostname: ${value}`) + } + + return { + address: firstAddress.address, + family: firstAddress.family as 4 | 6, + } +} + +const downloadUrlToFile = async ({ + allowPrivateUrls, + filePath, + url, +}: { + allowPrivateUrls: boolean + filePath: string + url: string +}): Promise => { + let currentUrl = url + + for (let redirectCount = 0; redirectCount <= MAX_URL_REDIRECTS; redirectCount += 1) { + let validatedAddress: { address: string; family: 4 | 6 } | null = null + if (!allowPrivateUrls) { + validatedAddress = await resolvePublicDownloadAddress(currentUrl) + } + + const dnsLookup: CacheableLookup['lookup'] | undefined = + validatedAddress == null ? undefined : createPinnedDnsLookup(validatedAddress) + + const responseStream = got.stream(currentUrl, { + dnsLookup, + followRedirect: false, + retry: { limit: 0 }, + throwHttpErrors: false, + }) + + const response = await new Promise< + Readable & { headers: Record; statusCode?: number } + >((resolvePromise, reject) => { + responseStream.once('response', (incomingResponse) => { + resolvePromise( + incomingResponse as Readable & { + headers: Record + statusCode?: number + }, + ) + }) + responseStream.once('error', reject) + }) + + const statusCode = response.statusCode ?? 0 + if (isRedirectStatusCode(statusCode)) { + responseStream.destroy() + const location = response.headers.location + if (location == null) { + throw new Error(`Redirect response missing Location header: ${currentUrl}`) + } + currentUrl = new URL(location, currentUrl).toString() + continue + } + + if (statusCode >= 400) { + responseStream.destroy() + throw new Error(`Failed to download URL: ${currentUrl} (${statusCode})`) + } + + await pipeline(responseStream, createWriteStream(filePath)) + return + } + + throw new Error(`Too many redirects while downloading URL input: ${url}`) +} + +function createPinnedDnsLookup(validatedAddress: { + address: string + family: 4 | 6 +}): CacheableLookup['lookup'] { + function pinnedDnsLookup( + _hostname: string, + family: IPFamily, + callback: (error: NodeJS.ErrnoException | null, address: string, family: IPFamily) => void, + ): void + function pinnedDnsLookup( + _hostname: string, + callback: (error: NodeJS.ErrnoException | null, address: string, family: IPFamily) => void, + ): void + function pinnedDnsLookup( + _hostname: string, + options: { all: true }, + callback: (error: NodeJS.ErrnoException | null, result: ReadonlyArray) => void, + ): void + function pinnedDnsLookup( + _hostname: string, + options: object, + callback: (error: NodeJS.ErrnoException | null, address: string, family: IPFamily) => void, + ): void + function pinnedDnsLookup( + _hostname: string, + familyOrCallback: + | IPFamily + | object + | ((error: NodeJS.ErrnoException | null, address: string, family: IPFamily) => void), + callback?: + | ((error: NodeJS.ErrnoException | null, address: string, family: IPFamily) => void) + | ((error: NodeJS.ErrnoException | null, result: ReadonlyArray) => void), + ): void { + if (typeof familyOrCallback === 'function') { + familyOrCallback(null, validatedAddress.address, validatedAddress.family) + return + } + + if ( + typeof familyOrCallback === 'object' && + familyOrCallback != null && + 'all' in familyOrCallback + ) { + ;( + callback as ( + error: NodeJS.ErrnoException | null, + result: ReadonlyArray, + ) => void + )(null, [{ address: validatedAddress.address, family: validatedAddress.family, expires: 0 }]) + return + } + + ;(callback as (error: NodeJS.ErrnoException | null, address: string, family: IPFamily) => void)( + null, + validatedAddress.address, + validatedAddress.family, + ) + } + + return pinnedDnsLookup } export const prepareInputFiles = async ( @@ -176,6 +344,7 @@ export const prepareInputFiles = async ( const steps = isRecord(nextParams.steps) ? { ...nextParams.steps } : {} const usedSteps = new Set(Object.keys(steps)) const usedFields = new Set() + const usedTempPaths = new Set() const importUrlsByStep = new Map() const importStepNames = Object.keys(steps).filter((name) => isHttpImportStep(steps[name])) const sharedImportStep = importStepNames.length === 1 ? importStepNames[0] : null @@ -211,7 +380,7 @@ export const prepareInputFiles = async ( if (base64Strategy === 'tempfile') { const root = await ensureTempRoot() const filename = file.filename ? basename(file.filename) : `${file.field}.bin` - const filePath = join(root, filename) + const filePath = await ensureUniqueTempFilePath(root, filename, usedTempPaths) await writeFile(filePath, buffer) files[file.field] = filePath } else { @@ -226,7 +395,7 @@ export const prepareInputFiles = async ( urlStrategy === 'import' || (urlStrategy === 'import-if-present' && targetStep) if (shouldImport) { - const stepName = targetStep ?? ensureUniqueStepName(file.field, usedSteps) + const stepName = targetStep ?? (await ensureUniqueStepName(file.field, usedSteps)) const urls = importUrlsByStep.get(stepName) ?? [] urls.push(file.url) importUrlsByStep.set(stepName, urls) @@ -238,11 +407,12 @@ export const prepareInputFiles = async ( (file.filename ? basename(file.filename) : null) ?? getFilenameFromUrl(file.url) ?? `${file.field}.bin` - const filePath = join(root, filename) - if (!allowPrivateUrls) { - assertPublicDownloadUrl(file.url) - } - await downloadUrlToFile(file.url, filePath) + const filePath = await ensureUniqueTempFilePath(root, filename, usedTempPaths) + await downloadUrlToFile({ + allowPrivateUrls, + filePath, + url: file.url, + }) files[file.field] = filePath } } diff --git a/packages/node/test/support/intentSmokeCases.ts b/packages/node/test/support/intentSmokeCases.ts new file mode 100644 index 00000000..c66b787c --- /dev/null +++ b/packages/node/test/support/intentSmokeCases.ts @@ -0,0 +1,115 @@ +import { + getIntentCatalogKey, + getIntentPaths, + intentCatalog, +} from '../../src/cli/intentCommandSpecs.ts' + +export interface IntentSmokeCase { + args: string[] + key: string + outputPath: string + paths: string[] + verifier: string +} + +const intentSmokeOverrides: Record> = { + '/audio/waveform': { + args: ['--input', '@fixture/input.mp3'], + outputPath: 'audio-waveform.png', + verifier: 'png', + }, + '/document/autorotate': { + args: ['--input', '@fixture/input.pdf'], + outputPath: 'document-auto-rotate.pdf', + verifier: 'pdf', + }, + '/document/convert': { + args: ['--input', '@fixture/input.txt', '--format', 'pdf'], + outputPath: 'document-convert.pdf', + verifier: 'pdf', + }, + '/document/optimize': { + args: ['--input', '@fixture/input.pdf'], + outputPath: 'document-optimize.pdf', + verifier: 'pdf', + }, + '/document/thumbs': { + args: ['--input', '@fixture/input.pdf'], + outputPath: 'document-thumbs', + verifier: 'document-thumbs', + }, + '/file/compress': { + args: ['--input', '@fixture/input.txt', '--format', 'zip'], + outputPath: 'file-compress.zip', + verifier: 'zip', + }, + '/file/decompress': { + args: ['--input', '@fixture/input.zip'], + outputPath: 'file-decompress', + verifier: 'file-decompress', + }, + '/file/preview': { + args: ['--input', '@preview-url', '--width', '300'], + outputPath: 'preview-generate.png', + verifier: 'png', + }, + '/image/bgremove': { + args: ['--input', '@fixture/input.jpg'], + outputPath: 'image-remove-background.png', + verifier: 'png', + }, + '/image/generate': { + args: [ + '--prompt', + 'A small red bicycle on a cream background, studio lighting', + '--model', + 'google/nano-banana', + ], + outputPath: 'image-generate.png', + verifier: 'png', + }, + 'image-describe:image/describe': { + args: ['--input', '@fixture/input.jpg'], + outputPath: 'image-describe.json', + verifier: 'json', + }, + '/image/optimize': { + args: ['--input', '@fixture/input.jpg'], + outputPath: 'image-optimize.jpg', + verifier: 'jpeg', + }, + '/image/resize': { + args: ['--input', '@fixture/input.jpg', '--width', '200'], + outputPath: 'image-resize.jpg', + verifier: 'jpeg', + }, + '/text/speak': { + args: ['--prompt', 'Hello from the Transloadit Node CLI intents test.', '--provider', 'aws'], + outputPath: 'text-speak.mp3', + verifier: 'mp3', + }, + '/video/thumbs': { + args: ['--input', '@fixture/input.mp4'], + outputPath: 'video-thumbs', + verifier: 'video-thumbs', + }, + 'builtin/encode-hls-video@latest': { + args: ['--input', '@fixture/input.mp4'], + outputPath: 'video-encode-hls', + verifier: 'video-encode-hls', + }, +} + +export const intentSmokeCases = intentCatalog.map((intent) => { + const key = getIntentCatalogKey(intent) + const smokeCase = intentSmokeOverrides[key] + if (smokeCase == null) { + throw new Error(`Missing smoke-case definition for ${key}`) + } + + return { + ...smokeCase, + key, + paths: getIntentPaths(intent), + } +}) satisfies IntentSmokeCase[] diff --git a/packages/node/test/unit/cli/assemblies-create.test.ts b/packages/node/test/unit/cli/assemblies-create.test.ts new file mode 100644 index 00000000..89209b52 --- /dev/null +++ b/packages/node/test/unit/cli/assemblies-create.test.ts @@ -0,0 +1,1009 @@ +import { EventEmitter } from 'node:events' +import { mkdir, mkdtemp, readdir, readFile, rm, stat, utimes, writeFile } from 'node:fs/promises' +import { tmpdir } from 'node:os' +import path from 'node:path' +import { setTimeout as delay } from 'node:timers/promises' +import tty from 'node:tty' +import nock from 'nock' +import { afterEach, describe, expect, it, vi } from 'vitest' + +import { create } from '../../../src/cli/commands/assemblies.ts' +import OutputCtl from '../../../src/cli/OutputCtl.ts' + +const tempDirs: string[] = [] + +async function createTempDir(prefix: string): Promise { + const tempDir = await mkdtemp(path.join(tmpdir(), prefix)) + tempDirs.push(tempDir) + return tempDir +} + +function getLegacyRelativeInputPath(inputPath: string): string { + return path.relative(process.cwd(), inputPath).replace(/^(\.\.\/)+/, '') +} + +async function collectRelativeFiles(rootDir: string, currentDir = rootDir): Promise { + const entries = await readdir(currentDir, { withFileTypes: true }) + const files: string[] = [] + + for (const entry of entries) { + const fullPath = path.join(currentDir, entry.name) + if (entry.isDirectory()) { + files.push(...(await collectRelativeFiles(rootDir, fullPath))) + continue + } + + files.push(path.relative(rootDir, fullPath)) + } + + return files.sort() +} + +afterEach(async () => { + vi.restoreAllMocks() + vi.resetModules() + nock.cleanAll() + nock.abortPendingRequests() + + await Promise.all( + tempDirs.splice(0).map((tempDir) => rm(tempDir, { recursive: true, force: true })), + ) +}) + +describe('assemblies create', () => { + it('writes result bytes to stdout when output is -', async () => { + vi.spyOn(console, 'error').mockImplementation(() => {}) + + const output = new OutputCtl() + const stdoutWrite = vi.spyOn(process.stdout, 'write').mockImplementation(() => true) + const client = { + createAssembly: vi.fn().mockResolvedValue({ assembly_id: 'assembly-stdout' }), + awaitAssemblyCompletion: vi.fn().mockResolvedValue({ + ok: 'ASSEMBLY_COMPLETED', + results: { + generated: [{ url: 'http://downloads.test/stdout.txt', name: 'stdout.txt' }], + }, + }), + } + + nock('http://downloads.test').get('/stdout.txt').reply(200, 'stdout-contents') + + await expect( + create(output, client as never, { + inputs: [], + output: '-', + stepsData: { + generated: { + robot: '/image/generate', + result: true, + prompt: 'hello', + model: 'flux-schnell', + }, + }, + }), + ).resolves.toEqual( + expect.objectContaining({ + hasFailures: false, + }), + ) + + expect(stdoutWrite).toHaveBeenCalled() + expect(stdoutWrite.mock.calls.map(([chunk]) => String(chunk)).join('')).toContain( + 'stdout-contents', + ) + }) + + it('waits for stdout drain before finishing stdout downloads', async () => { + vi.spyOn(console, 'error').mockImplementation(() => {}) + + const output = new OutputCtl() + let resolved = false + const stdoutWrite = vi.spyOn(process.stdout, 'write').mockImplementation(() => false) + const client = { + createAssembly: vi.fn().mockResolvedValue({ assembly_id: 'assembly-stdout-drain' }), + awaitAssemblyCompletion: vi.fn().mockResolvedValue({ + ok: 'ASSEMBLY_COMPLETED', + results: { + generated: [{ url: 'http://downloads.test/stdout-drain.txt', name: 'stdout-drain.txt' }], + }, + }), + } + + nock('http://downloads.test').get('/stdout-drain.txt').reply(200, 'stdout-drain') + + const createPromise = create(output, client as never, { + inputs: [], + output: '-', + stepsData: { + generated: { + robot: '/image/generate', + result: true, + prompt: 'hello', + model: 'flux-schnell', + }, + }, + }).then(() => { + resolved = true + }) + + await delay(20) + expect(resolved).toBe(false) + expect(stdoutWrite).toHaveBeenCalled() + + process.stdout.emit('drain') + + await createPromise + expect(resolved).toBe(true) + }) + + it('returns result URLs for completed assemblies without local output', async () => { + vi.spyOn(console, 'error').mockImplementation(() => {}) + + const output = new OutputCtl() + const client = { + createAssembly: vi.fn().mockResolvedValue({ assembly_id: 'assembly-urls' }), + awaitAssemblyCompletion: vi.fn().mockResolvedValue({ + ok: 'ASSEMBLY_COMPLETED', + results: { + generated: [{ url: 'http://downloads.test/result.png', name: 'result.png' }], + }, + }), + } + + await expect( + create(output, client as never, { + inputs: [], + output: null, + stepsData: { + generated: { + robot: '/image/generate', + result: true, + prompt: 'hello', + model: 'flux-schnell', + }, + }, + }), + ).resolves.toEqual( + expect.objectContaining({ + hasFailures: false, + resultUrls: [ + { + assemblyId: 'assembly-urls', + step: 'generated', + name: 'result.png', + url: 'http://downloads.test/result.png', + }, + ], + }), + ) + }) + + it('rejects stdout output when an assembly returns multiple files', async () => { + vi.spyOn(console, 'error').mockImplementation(() => {}) + const stdoutWrite = vi.spyOn(process.stdout, 'write').mockImplementation(() => true) + + const output = new OutputCtl() + const client = { + createAssembly: vi.fn().mockResolvedValue({ assembly_id: 'assembly-stdout-multi' }), + awaitAssemblyCompletion: vi.fn().mockResolvedValue({ + ok: 'ASSEMBLY_COMPLETED', + results: { + generated: [ + { url: 'http://downloads.test/stdout-a.txt', name: 'a.txt' }, + { url: 'http://downloads.test/stdout-b.txt', name: 'b.txt' }, + ], + }, + }), + } + + nock('http://downloads.test').get('/stdout-a.txt').reply(200, 'stdout-a') + nock('http://downloads.test').get('/stdout-b.txt').reply(200, 'stdout-b') + + await expect( + create(output, client as never, { + inputs: [], + output: '-', + stepsData: { + generated: { + robot: '/image/generate', + result: true, + prompt: 'hello', + model: 'flux-schnell', + }, + }, + }), + ).resolves.toEqual( + expect.objectContaining({ + hasFailures: true, + }), + ) + + expect(stdoutWrite).not.toHaveBeenCalled() + }) + + it('rejects file outputs when an assembly returns multiple files', async () => { + vi.spyOn(console, 'error').mockImplementation(() => {}) + + const tempDir = await createTempDir('transloadit-file-output-multi-') + const outputPath = path.join(tempDir, 'result.txt') + + const output = new OutputCtl() + const client = { + createAssembly: vi.fn().mockResolvedValue({ assembly_id: 'assembly-file-multi' }), + awaitAssemblyCompletion: vi.fn().mockResolvedValue({ + ok: 'ASSEMBLY_COMPLETED', + results: { + generated: [ + { url: 'http://downloads.test/result-a.txt', name: 'a.txt' }, + { url: 'http://downloads.test/result-b.txt', name: 'b.txt' }, + ], + }, + }), + } + + nock('http://downloads.test').get('/result-a.txt').reply(200, 'result-a') + nock('http://downloads.test').get('/result-b.txt').reply(200, 'result-b') + + await expect( + create(output, client as never, { + inputs: [], + output: outputPath, + stepsData: { + generated: { + robot: '/image/generate', + result: true, + prompt: 'hello', + model: 'flux-schnell', + }, + }, + }), + ).resolves.toEqual( + expect.objectContaining({ + hasFailures: true, + }), + ) + + await expect(stat(outputPath)).rejects.toThrow() + }) + + it('supports bundled single-assembly outputs written to a file path', async () => { + vi.spyOn(console, 'error').mockImplementation(() => {}) + + const tempDir = await createTempDir('transloadit-bundle-') + const inputA = path.join(tempDir, 'a.txt') + const inputB = path.join(tempDir, 'b.txt') + const outputPath = path.join(tempDir, 'bundle.zip') + + await writeFile(inputA, 'a') + await writeFile(inputB, 'b') + + const output = new OutputCtl() + const client = { + createAssembly: vi.fn().mockResolvedValue({ assembly_id: 'assembly-1' }), + awaitAssemblyCompletion: vi.fn().mockResolvedValue({ + ok: 'ASSEMBLY_COMPLETED', + results: { + compressed: [{ url: 'http://downloads.test/bundle.zip', name: 'bundle.zip' }], + }, + }), + } + + nock('http://downloads.test').get('/bundle.zip').reply(200, 'bundle-contents') + + await expect( + create(output, client as never, { + inputs: [inputA, inputB], + output: outputPath, + singleAssembly: true, + stepsData: { + compressed: { + robot: '/file/compress', + result: true, + use: { + steps: [':original'], + bundle_steps: true, + }, + }, + }, + }), + ).resolves.toEqual( + expect.objectContaining({ + hasFailures: false, + }), + ) + + expect(client.createAssembly).toHaveBeenCalledTimes(1) + expect(await readFile(outputPath, 'utf8')).toBe('bundle-contents') + }) + + it('rejects invalid steps files before calling the API', async () => { + vi.spyOn(console, 'error').mockImplementation(() => {}) + + const tempDir = await createTempDir('transloadit-invalid-steps-') + const stepsPath = path.join(tempDir, 'steps.json') + + await writeFile( + stepsPath, + JSON.stringify({ + generated: { + robot: '/image/generate', + prompt: 123, + model: 'google/nano-banana', + }, + }), + ) + + const output = new OutputCtl() + const client = { + createAssembly: vi.fn(), + awaitAssemblyCompletion: vi.fn(), + } + + await expect( + create(output, client as never, { + inputs: [], + output: path.join(tempDir, 'result.png'), + steps: stepsPath, + }), + ).rejects.toThrow(/Invalid steps format/) + + expect(client.createAssembly).not.toHaveBeenCalled() + }) + + it('keeps unchanged inputs in single-assembly rebuilds when one input is stale', async () => { + vi.spyOn(console, 'error').mockImplementation(() => {}) + + const tempDir = await createTempDir('transloadit-bundle-stale-') + const inputA = path.join(tempDir, 'a.txt') + const inputB = path.join(tempDir, 'b.txt') + const outputPath = path.join(tempDir, 'bundle.zip') + + await writeFile(inputA, 'a') + await writeFile(inputB, 'b') + await writeFile(outputPath, 'old-bundle') + + const baseTime = new Date('2026-01-01T00:00:00.000Z') + const outputTime = new Date('2026-01-01T00:00:10.000Z') + const changedInputTime = new Date('2026-01-01T00:00:20.000Z') + + await utimes(inputA, changedInputTime, changedInputTime) + await utimes(inputB, baseTime, baseTime) + await utimes(outputPath, outputTime, outputTime) + + const output = new OutputCtl() + const client = { + createAssembly: vi.fn().mockResolvedValue({ assembly_id: 'assembly-stale-bundle' }), + awaitAssemblyCompletion: vi.fn().mockResolvedValue({ + ok: 'ASSEMBLY_COMPLETED', + results: { + compressed: [{ url: 'http://downloads.test/bundle.zip', name: 'bundle.zip' }], + }, + }), + } + + nock('http://downloads.test').get('/bundle.zip').reply(200, 'bundle-contents') + + await create(output, client as never, { + inputs: [inputA, inputB], + output: outputPath, + singleAssembly: true, + stepsData: { + compressed: { + robot: '/file/compress', + result: true, + use: { + steps: [':original'], + bundle_steps: true, + }, + }, + }, + }) + + expect(client.createAssembly).toHaveBeenCalledTimes(1) + const uploads = client.createAssembly.mock.calls[0]?.[0]?.uploads + expect(Object.keys(uploads ?? {}).sort()).toEqual(['a.txt', 'b.txt']) + }) + + it('skips bundled single-assembly runs when the output is newer than every input', async () => { + vi.spyOn(console, 'error').mockImplementation(() => {}) + + const tempDir = await createTempDir('transloadit-bundle-skip-stale-') + const inputA = path.join(tempDir, 'a.txt') + const inputB = path.join(tempDir, 'b.txt') + const outputPath = path.join(tempDir, 'bundle.zip') + + await writeFile(inputA, 'a') + await writeFile(inputB, 'b') + await writeFile(outputPath, 'existing-bundle') + + const inputTime = new Date('2026-01-01T00:00:00.000Z') + const outputTime = new Date('2026-01-01T00:00:10.000Z') + + await utimes(inputA, inputTime, inputTime) + await utimes(inputB, inputTime, inputTime) + await utimes(outputPath, outputTime, outputTime) + + const output = new OutputCtl() + const client = { + createAssembly: vi.fn(), + awaitAssemblyCompletion: vi.fn(), + } + + await expect( + create(output, client as never, { + inputs: [inputA, inputB], + output: outputPath, + singleAssembly: true, + stepsData: { + compressed: { + robot: '/file/compress', + result: true, + use: { + steps: [':original'], + bundle_steps: true, + }, + }, + }, + }), + ).resolves.toEqual( + expect.objectContaining({ + hasFailures: false, + results: [], + }), + ) + + expect(client.createAssembly).not.toHaveBeenCalled() + expect(await readFile(outputPath, 'utf8')).toBe('existing-bundle') + }) + + it('reruns single-input bundled assemblies when the input is newer than the output', async () => { + vi.spyOn(console, 'error').mockImplementation(() => {}) + + const tempDir = await createTempDir('transloadit-bundle-single-input-stale-') + const inputPath = path.join(tempDir, 'a.txt') + const outputPath = path.join(tempDir, 'bundle.zip') + + await writeFile(inputPath, 'a') + await writeFile(outputPath, 'existing-bundle') + + const outputTime = new Date('2026-01-01T00:00:10.000Z') + const inputTime = new Date('2026-01-01T00:00:20.000Z') + + await utimes(inputPath, inputTime, inputTime) + await utimes(outputPath, outputTime, outputTime) + + const output = new OutputCtl() + const client = { + createAssembly: vi.fn().mockResolvedValue({ assembly_id: 'assembly-single-input-stale' }), + awaitAssemblyCompletion: vi.fn().mockResolvedValue({ + ok: 'ASSEMBLY_COMPLETED', + results: { + compressed: [{ url: 'http://downloads.test/bundle-single.zip', name: 'bundle.zip' }], + }, + }), + } + + nock('http://downloads.test').get('/bundle-single.zip').reply(200, 'fresh-bundle') + + await create(output, client as never, { + inputs: [inputPath], + output: outputPath, + singleAssembly: true, + stepsData: { + compressed: { + robot: '/file/compress', + result: true, + use: { + steps: [':original'], + bundle_steps: true, + }, + }, + }, + }) + + expect(client.createAssembly).toHaveBeenCalledTimes(1) + expect(await readFile(outputPath, 'utf8')).toBe('fresh-bundle') + }) + + it('rewrites existing bundled outputs on single-assembly reruns', async () => { + vi.spyOn(console, 'error').mockImplementation(() => {}) + + const tempDir = await createTempDir('transloadit-bundle-rerun-') + const inputA = path.join(tempDir, 'a.txt') + const inputB = path.join(tempDir, 'b.txt') + const outputPath = path.join(tempDir, 'bundle.zip') + + await writeFile(inputA, 'a') + await writeFile(inputB, 'b') + await writeFile(outputPath, 'old-bundle') + + const output = new OutputCtl() + const client = { + createAssembly: vi.fn().mockResolvedValue({ assembly_id: 'assembly-rerun-bundle' }), + awaitAssemblyCompletion: vi.fn().mockResolvedValue({ + ok: 'ASSEMBLY_COMPLETED', + results: { + compressed: [{ url: 'http://downloads.test/bundle-rerun.zip', name: 'bundle.zip' }], + }, + }), + } + + nock('http://downloads.test').get('/bundle-rerun.zip').reply(200, 'fresh-bundle') + + await expect( + create(output, client as never, { + inputs: [inputA, inputB], + output: outputPath, + singleAssembly: true, + stepsData: { + compressed: { + robot: '/file/compress', + result: true, + use: { + steps: [':original'], + bundle_steps: true, + }, + }, + }, + }), + ).resolves.toEqual( + expect.objectContaining({ + hasFailures: false, + }), + ) + + expect(await readFile(outputPath, 'utf8')).toBe('fresh-bundle') + }) + + it('does not let older watch assemblies overwrite newer results', async () => { + vi.spyOn(console, 'error').mockImplementation(() => {}) + vi.resetModules() + + class FakeWatcher extends EventEmitter { + close(): void { + this.emit('close') + } + } + + const fakeWatcher = new FakeWatcher() + vi.doMock('node-watch', () => { + return { + default: vi.fn(() => fakeWatcher), + } + }) + + const { create: createWithWatch } = await import('../../../src/cli/commands/assemblies.ts') + + const tempDir = await createTempDir('transloadit-watch-') + const inputPath = path.join(tempDir, 'clip.mp4') + const outputPath = path.join(tempDir, 'thumb.jpg') + + await writeFile(inputPath, 'video-v1') + await writeFile(outputPath, 'existing-thumb') + + const baseTime = new Date('2026-01-01T00:00:00.000Z') + const outputTime = new Date('2026-01-01T00:00:10.000Z') + const firstChangeTime = new Date('2026-01-01T00:00:20.000Z') + const secondChangeTime = new Date('2026-01-01T00:00:30.000Z') + + await utimes(inputPath, baseTime, baseTime) + await utimes(outputPath, outputTime, outputTime) + + const output = new OutputCtl() + const client = { + createAssembly: vi + .fn() + .mockResolvedValueOnce({ assembly_id: 'assembly-old' }) + .mockResolvedValueOnce({ assembly_id: 'assembly-new' }), + awaitAssemblyCompletion: vi.fn(async (assemblyId: string) => { + if (assemblyId === 'assembly-old') { + await delay(80) + return { + ok: 'ASSEMBLY_COMPLETED', + results: { + thumbs: [{ url: 'http://downloads.test/old.jpg', name: 'old.jpg' }], + }, + } + } + + await delay(10) + return { + ok: 'ASSEMBLY_COMPLETED', + results: { + thumbs: [{ url: 'http://downloads.test/new.jpg', name: 'new.jpg' }], + }, + } + }), + } + + nock('http://downloads.test').get('/old.jpg').reply(200, 'old-result') + nock('http://downloads.test').get('/new.jpg').reply(200, 'new-result') + + const createPromise = createWithWatch(output, client as never, { + inputs: [inputPath], + output: outputPath, + watch: true, + concurrency: 2, + stepsData: { + thumbs: { + robot: '/video/thumbs', + result: true, + use: ':original', + }, + }, + }) + + await delay(20) + await writeFile(inputPath, 'video-v2') + await utimes(inputPath, firstChangeTime, firstChangeTime) + fakeWatcher.emit('change', 'update', inputPath) + + await delay(5) + await writeFile(inputPath, 'video-v3') + await utimes(inputPath, secondChangeTime, secondChangeTime) + fakeWatcher.emit('change', 'update', inputPath) + + await delay(20) + fakeWatcher.close() + + await expect(createPromise).resolves.toEqual( + expect.objectContaining({ + hasFailures: false, + }), + ) + + expect(await readFile(outputPath, 'utf8')).toBe('new-result') + }) + + it('does not try to delete /dev/stdin after stdin processing', async () => { + vi.spyOn(console, 'error').mockImplementation(() => {}) + vi.spyOn(process.stdout, 'write').mockImplementation(() => true) + vi.spyOn(tty, 'isatty').mockReturnValue(false) + + const tempDir = await createTempDir('transloadit-stdin-') + const outputPath = path.join(tempDir, 'waveform.png') + + const output = new OutputCtl() + const client = { + createAssembly: vi.fn().mockResolvedValue({ assembly_id: 'assembly-stdin' }), + awaitAssemblyCompletion: vi.fn().mockResolvedValue({ + ok: 'ASSEMBLY_COMPLETED', + results: { + waveform: [{ url: 'http://downloads.test/stdin-waveform.png', name: 'waveform.png' }], + }, + }), + } + + nock('http://downloads.test').get('/stdin-waveform.png').reply(200, 'waveform') + + await expect( + create(output, client as never, { + inputs: ['-'], + output: outputPath, + del: true, + stepsData: { + waveform: { + robot: '/audio/waveform', + result: true, + use: ':original', + }, + }, + }), + ).resolves.toEqual( + expect.objectContaining({ + hasFailures: false, + }), + ) + + expect(await readFile(outputPath, 'utf8')).toBe('waveform') + }) + + it('surfaces output plan failures through the normal error path', async () => { + vi.spyOn(console, 'error').mockImplementation(() => {}) + vi.spyOn(process.stdout, 'write').mockImplementation(() => true) + + const tempDir = await createTempDir('transloadit-output-plan-failure-') + const outputDir = path.join(tempDir, 'out') + await mkdir(outputDir, { recursive: true }) + + const output = new OutputCtl() + const client = { + createAssembly: vi.fn(), + awaitAssemblyCompletion: vi.fn(), + } + + await expect( + create(output, client as never, { + inputs: ['-'], + output: outputDir, + outputMode: 'directory', + stepsData: { + waveform: { + robot: '/audio/waveform', + result: true, + use: ':original', + }, + }, + }), + ).rejects.toThrow('You must provide an input to output to a directory') + + expect(client.createAssembly).not.toHaveBeenCalled() + }) + + it('writes single-input directory outputs using result filenames', async () => { + vi.spyOn(console, 'error').mockImplementation(() => {}) + + const tempDir = await createTempDir('transloadit-outdir-') + const inputPath = path.join(tempDir, 'clip.mp4') + const outputDir = path.join(tempDir, 'thumbs') + + await writeFile(inputPath, 'video') + await mkdir(outputDir, { recursive: true }) + + const output = new OutputCtl() + const client = { + createAssembly: vi.fn().mockResolvedValue({ assembly_id: 'assembly-2' }), + awaitAssemblyCompletion: vi.fn().mockResolvedValue({ + ok: 'ASSEMBLY_COMPLETED', + results: { + thumbs: [ + { url: 'http://downloads.test/one.jpg', name: 'one.jpg' }, + { url: 'http://downloads.test/two.jpg', name: 'two.jpg' }, + ], + }, + }), + } + + nock('http://downloads.test').get('/one.jpg').reply(200, 'one') + nock('http://downloads.test').get('/two.jpg').reply(200, 'two') + + await expect( + create( + output, + client as never, + { + inputs: [inputPath], + output: outputDir, + stepsData: { + thumbs: { + robot: '/video/thumbs', + result: true, + use: ':original', + }, + }, + outputMode: 'directory', + } as never, + ), + ).resolves.toEqual( + expect.objectContaining({ + hasFailures: false, + }), + ) + + expect(await readFile(path.join(outputDir, 'one.jpg'), 'utf8')).toBe('one') + expect(await readFile(path.join(outputDir, 'two.jpg'), 'utf8')).toBe('two') + }) + + it('keeps duplicate sanitized result filenames from overwriting each other', async () => { + vi.spyOn(console, 'error').mockImplementation(() => {}) + + const tempDir = await createTempDir('transloadit-dupe-results-') + const inputPath = path.join(tempDir, 'clip.mp4') + const outputDir = path.join(tempDir, 'thumbs') + + await writeFile(inputPath, 'video') + await mkdir(outputDir, { recursive: true }) + + const output = new OutputCtl() + const client = { + createAssembly: vi.fn().mockResolvedValue({ assembly_id: 'assembly-dupe-results' }), + awaitAssemblyCompletion: vi.fn().mockResolvedValue({ + ok: 'ASSEMBLY_COMPLETED', + results: { + thumbs: [ + { url: 'http://downloads.test/dupe-a.jpg', name: 'thumb.jpg' }, + { url: 'http://downloads.test/dupe-b.jpg', name: 'thumb.jpg' }, + ], + }, + }), + } + + nock('http://downloads.test').get('/dupe-a.jpg').reply(200, 'first-thumb') + nock('http://downloads.test').get('/dupe-b.jpg').reply(200, 'second-thumb') + + await expect( + create(output, client as never, { + inputs: [inputPath], + output: outputDir, + outputMode: 'directory', + stepsData: { + thumbs: { + robot: '/video/thumbs', + result: true, + use: ':original', + }, + }, + }), + ).resolves.toEqual( + expect.objectContaining({ + hasFailures: false, + }), + ) + + expect(await readFile(path.join(outputDir, 'thumb.jpg'), 'utf8')).toBe('first-thumb') + expect(await readFile(path.join(outputDir, 'thumb__1.jpg'), 'utf8')).toBe('second-thumb') + }) + + it('preserves legacy step-directory layout for generic directory outputs', async () => { + vi.spyOn(console, 'error').mockImplementation(() => {}) + + const tempDir = await createTempDir('transloadit-legacy-outdir-') + const inputPath = path.join(tempDir, 'clip.mp4') + const outputDir = path.join(tempDir, 'thumbs') + + await writeFile(inputPath, 'video') + await mkdir(outputDir, { recursive: true }) + + const output = new OutputCtl() + const client = { + createAssembly: vi.fn().mockResolvedValue({ assembly_id: 'assembly-legacy-dir' }), + awaitAssemblyCompletion: vi.fn().mockResolvedValue({ + ok: 'ASSEMBLY_COMPLETED', + results: { + thumbs: [ + { url: 'http://downloads.test/one.jpg', name: 'one.jpg' }, + { url: 'http://downloads.test/two.jpg', name: 'two.jpg' }, + ], + }, + }), + } + + nock('http://downloads.test').get('/one.jpg').reply(200, 'one') + nock('http://downloads.test').get('/two.jpg').reply(200, 'two') + + await create( + output, + client as never, + { + inputs: [inputPath], + output: outputDir, + stepsData: { + thumbs: { + robot: '/video/thumbs', + result: true, + use: ':original', + }, + }, + } as never, + ) + + const legacyRelative = getLegacyRelativeInputPath(inputPath) + const legacyBaseDir = path.join(path.dirname(legacyRelative), path.parse(legacyRelative).name) + + expect(await collectRelativeFiles(outputDir)).toEqual([ + path.join(legacyBaseDir, 'thumbs', 'one.jpg'), + path.join(legacyBaseDir, 'thumbs', 'two.jpg'), + ]) + }) + + it('uses the actual result filename for single-result directory outputs', async () => { + vi.spyOn(console, 'error').mockImplementation(() => {}) + + const tempDir = await createTempDir('transloadit-single-result-outdir-') + const inputPath = path.join(tempDir, 'archive.zip') + const outputDir = path.join(tempDir, 'extracted') + + await writeFile(inputPath, 'zip-data') + await mkdir(outputDir, { recursive: true }) + + const output = new OutputCtl() + const client = { + createAssembly: vi.fn().mockResolvedValue({ assembly_id: 'assembly-3' }), + awaitAssemblyCompletion: vi.fn().mockResolvedValue({ + ok: 'ASSEMBLY_COMPLETED', + results: { + decompressed: [{ url: 'http://downloads.test/input.txt', name: 'input.txt' }], + }, + }), + } + + nock('http://downloads.test').get('/input.txt').reply(200, 'hello') + + await expect( + create(output, client as never, { + inputs: [inputPath], + output: outputDir, + stepsData: { + decompressed: { + robot: '/file/decompress', + result: true, + use: ':original', + }, + }, + outputMode: 'directory', + }), + ).resolves.toEqual( + expect.objectContaining({ + hasFailures: false, + }), + ) + + expect(await readFile(path.join(outputDir, 'input.txt'), 'utf8')).toBe('hello') + }) + + it('preserves mapped out paths for legacy single-result directory outputs', async () => { + vi.spyOn(console, 'error').mockImplementation(() => {}) + + const tempDir = await createTempDir('transloadit-legacy-single-result-') + const inputPath = path.join(tempDir, 'archive.zip') + const outputDir = path.join(tempDir, 'extracted') + + await writeFile(inputPath, 'zip-data') + await mkdir(outputDir, { recursive: true }) + + const output = new OutputCtl() + const client = { + createAssembly: vi.fn().mockResolvedValue({ assembly_id: 'assembly-legacy-single-result' }), + awaitAssemblyCompletion: vi.fn().mockResolvedValue({ + ok: 'ASSEMBLY_COMPLETED', + results: { + decompressed: [{ url: 'http://downloads.test/input.txt', name: 'input.txt' }], + }, + }), + } + + nock('http://downloads.test').get('/input.txt').reply(200, 'hello') + + await create(output, client as never, { + inputs: [inputPath], + output: outputDir, + stepsData: { + decompressed: { + robot: '/file/decompress', + result: true, + use: ':original', + }, + }, + }) + + expect(await collectRelativeFiles(outputDir)).toEqual([getLegacyRelativeInputPath(inputPath)]) + }) + + it('does not create an empty output file when assembly creation fails', async () => { + vi.spyOn(console, 'error').mockImplementation(() => {}) + + const tempDir = await createTempDir('transloadit-failed-create-') + const inputPath = path.join(tempDir, 'image.jpg') + const outputPath = path.join(tempDir, 'resized.jpg') + + await writeFile(inputPath, 'image-data') + + const output = new OutputCtl() + const client = { + createAssembly: vi.fn().mockRejectedValue(new Error('boom')), + } + + await expect( + create(output, client as never, { + inputs: [inputPath], + output: outputPath, + stepsData: { + resized: { + robot: '/image/resize', + result: true, + use: ':original', + width: 200, + }, + }, + }), + ).resolves.toEqual( + expect.objectContaining({ + hasFailures: true, + }), + ) + + await expect(stat(outputPath)).rejects.toMatchObject({ + code: 'ENOENT', + }) + }) +}) diff --git a/packages/node/test/unit/cli/intents.test.ts b/packages/node/test/unit/cli/intents.test.ts new file mode 100644 index 00000000..c648bd24 --- /dev/null +++ b/packages/node/test/unit/cli/intents.test.ts @@ -0,0 +1,1083 @@ +import { mkdtemp, readFile, rm, writeFile } from 'node:fs/promises' +import { tmpdir } from 'node:os' +import path from 'node:path' +import nock from 'nock' +import { afterEach, describe, expect, it, vi } from 'vitest' +import { z } from 'zod' + +import * as assembliesCommands from '../../../src/cli/commands/assemblies.ts' +import { + findIntentDefinitionByPaths, + getIntentPaths, + getIntentResultStepName, + intentCatalog, +} from '../../../src/cli/intentCommandSpecs.ts' +import { intentCommands } from '../../../src/cli/intentCommands.ts' +import { + coerceIntentFieldValue, + inferIntentFieldKind, + parseStringArrayValue, +} from '../../../src/cli/intentFields.ts' +import { prepareIntentInputs } from '../../../src/cli/intentRuntime.ts' +import OutputCtl from '../../../src/cli/OutputCtl.ts' +import { main } from '../../../src/cli.ts' +import { intentSmokeCases } from '../../support/intentSmokeCases.ts' + +const noopWrite = () => true +const tempDirs: string[] = [] + +const resetExitCode = () => { + process.exitCode = undefined +} + +async function createTempDir(prefix: string): Promise { + const tempDir = await mkdtemp(path.join(tmpdir(), prefix)) + tempDirs.push(tempDir) + return tempDir +} + +async function runIntentCommand( + args: string[], + createResult: Awaited> = { + resultUrls: [], + results: [], + hasFailures: false, + }, +): Promise<{ + createSpy: ReturnType> +}> { + vi.stubEnv('TRANSLOADIT_KEY', 'key') + vi.stubEnv('TRANSLOADIT_SECRET', 'secret') + + const createSpy = vi.spyOn(assembliesCommands, 'create').mockResolvedValue(createResult) + vi.spyOn(process.stdout, 'write').mockImplementation(noopWrite) + + await main(args) + + return { createSpy } +} + +function getIntentCommand(paths: string[]): (typeof intentCommands)[number] { + const command = intentCommands.find((candidate) => { + const candidatePaths = candidate.paths[0] + return candidatePaths != null && candidatePaths.join(' ') === paths.join(' ') + }) + + if (command == null) { + throw new Error(`No intent command found for ${paths.join(' ')}`) + } + + return command +} + +function getIntentStepName(paths: string[]): string { + const definition = findIntentDefinitionByPaths(paths) + if (definition == null || definition.kind !== 'robot') { + throw new Error(`No robot intent definition found for ${paths.join(' ')}`) + } + + const stepName = getIntentResultStepName(definition) + if (stepName == null) { + throw new Error(`No intent result step name found for ${paths.join(' ')}`) + } + + return stepName +} + +afterEach(() => { + vi.restoreAllMocks() + vi.unstubAllEnvs() + nock.cleanAll() + resetExitCode() + return Promise.all( + tempDirs.splice(0).map((tempDir) => rm(tempDir, { recursive: true, force: true })), + ) +}) + +describe('intent commands', () => { + it('routes image describe labels through /image/describe', async () => { + const { createSpy } = await runIntentCommand([ + 'image', + 'describe', + '--input', + 'hero.jpg', + '--fields', + 'labels', + '--out', + 'labels.json', + ]) + + expect(process.exitCode).toBeUndefined() + expect(createSpy).toHaveBeenCalledWith( + expect.any(OutputCtl), + expect.anything(), + expect.objectContaining({ + inputs: ['hero.jpg'], + output: 'labels.json', + stepsData: { + describe: expect.objectContaining({ + robot: '/image/describe', + use: ':original', + result: true, + provider: 'aws', + format: 'json', + granularity: 'list', + explicit_descriptions: false, + }), + }, + }), + ) + }) + + it('prints aligned result URLs without requiring --out', async () => { + const logSpy = vi.spyOn(console, 'log').mockImplementation(() => {}) + + const { createSpy } = await runIntentCommand( + ['image', 'describe', '--input', 'hero.jpg', '--fields', 'labels', '--print-urls'], + { + results: [], + hasFailures: false, + resultUrls: [ + { + assemblyId: 'assembly-1', + step: 'describe', + name: 'hero.json', + url: 'https://example.com/hero.json', + }, + ], + }, + ) + + expect(process.exitCode).toBeUndefined() + expect(createSpy).toHaveBeenCalledWith( + expect.any(OutputCtl), + expect.anything(), + expect.objectContaining({ + inputs: ['hero.jpg'], + output: null, + }), + ) + expect(logSpy).toHaveBeenCalledWith(expect.stringContaining('STEP')) + expect(logSpy).toHaveBeenCalledWith(expect.stringContaining('https://example.com/hero.json')) + }) + + it('prints machine-readable result URLs with --json', async () => { + const logSpy = vi.spyOn(console, 'log').mockImplementation(() => {}) + + await runIntentCommand( + ['--json', 'image', 'describe', '--input', 'hero.jpg', '--fields', 'labels', '--print-urls'], + { + results: [], + hasFailures: false, + resultUrls: [ + { + assemblyId: 'assembly-1', + step: 'describe', + name: 'hero.json', + url: 'https://example.com/hero.json', + }, + ], + }, + ) + + expect(logSpy).toHaveBeenCalledWith( + JSON.stringify({ + urls: [ + { + assemblyId: 'assembly-1', + step: 'describe', + name: 'hero.json', + url: 'https://example.com/hero.json', + }, + ], + }), + ) + }) + + it('routes image describe --for wordpress through /ai/chat with a schema', async () => { + const { createSpy } = await runIntentCommand([ + 'image', + 'describe', + '--input', + 'hero.jpg', + '--for', + 'wordpress', + '--out', + 'fields.json', + ]) + + expect(process.exitCode).toBeUndefined() + expect(createSpy).toHaveBeenCalledWith( + expect.any(OutputCtl), + expect.anything(), + expect.objectContaining({ + inputs: ['hero.jpg'], + output: 'fields.json', + stepsData: { + describe: expect.objectContaining({ + robot: '/ai/chat', + use: ':original', + result: true, + model: 'anthropic/claude-sonnet-4-6', + format: 'json', + return_messages: 'last', + test_credentials: true, + messages: expect.stringContaining('altText, title, caption, description'), + }), + }, + }), + ) + + const describeStep = createSpy.mock.calls[0]?.[2].stepsData?.describe + expect(describeStep).toBeDefined() + if (describeStep == null || typeof describeStep !== 'object') { + throw new Error('Missing describe step') + } + + const schema = JSON.parse(String((describeStep as Record).schema)) + expect(schema).toEqual({ + type: 'object', + additionalProperties: false, + required: ['altText', 'title', 'caption', 'description'], + properties: expect.objectContaining({ + altText: expect.objectContaining({ type: 'string' }), + title: expect.objectContaining({ type: 'string' }), + caption: expect.objectContaining({ type: 'string' }), + description: expect.objectContaining({ type: 'string' }), + }), + }) + }) + + it('rejects combining labels with authored image describe fields', async () => { + vi.stubEnv('TRANSLOADIT_KEY', 'key') + vi.stubEnv('TRANSLOADIT_SECRET', 'secret') + + const createSpy = vi.spyOn(assembliesCommands, 'create').mockResolvedValue({ + results: [], + hasFailures: false, + }) + vi.spyOn(process.stdout, 'write').mockImplementation(noopWrite) + + await main([ + 'image', + 'describe', + '--input', + 'hero.jpg', + '--fields', + 'labels,caption', + '--out', + 'fields.json', + ]) + + expect(process.exitCode).toBe(1) + expect(createSpy).not.toHaveBeenCalled() + }) + + it('rejects combining --fields labels with --for wordpress', async () => { + const { createSpy } = await runIntentCommand([ + 'image', + 'describe', + '--input', + 'hero.jpg', + '--fields', + 'labels', + '--for', + 'wordpress', + '--out', + 'fields.json', + ]) + + expect(process.exitCode).toBe(1) + expect(createSpy).not.toHaveBeenCalled() + }) + + it('maps image generate flags to /image/generate step parameters', async () => { + const { createSpy } = await runIntentCommand([ + 'image', + 'generate', + '--prompt', + 'A red bicycle in a studio', + '--model', + 'flux-schnell', + '--aspect-ratio', + '2:3', + '--out', + 'generated.png', + ]) + + expect(process.exitCode).toBeUndefined() + expect(createSpy).toHaveBeenCalledWith( + expect.any(OutputCtl), + expect.anything(), + expect.objectContaining({ + inputs: [], + output: 'generated.png', + stepsData: { + [getIntentStepName(['image', 'generate'])]: expect.objectContaining({ + robot: '/image/generate', + result: true, + prompt: 'A red bicycle in a studio', + model: 'flux-schnell', + aspect_ratio: '2:3', + }), + }, + }), + ) + }) + + it('maps preview generate flags to /file/preview step parameters', async () => { + const { createSpy } = await runIntentCommand([ + 'preview', + 'generate', + '--input', + 'document.pdf', + '--width', + '320', + '--height', + '200', + '--format', + 'jpg', + '--out', + 'preview.jpg', + ]) + + expect(process.exitCode).toBeUndefined() + expect(createSpy).toHaveBeenCalledWith( + expect.any(OutputCtl), + expect.anything(), + expect.objectContaining({ + inputs: ['document.pdf'], + output: 'preview.jpg', + stepsData: { + [getIntentStepName(['preview', 'generate'])]: expect.objectContaining({ + robot: '/file/preview', + result: true, + use: ':original', + width: 320, + height: 200, + format: 'jpg', + }), + }, + }), + ) + }) + + it('downloads URL inputs for preview generate before calling assemblies create', async () => { + nock('https://example.com').get('/file.pdf').reply(200, 'pdf-data') + const { createSpy } = await runIntentCommand([ + 'preview', + 'generate', + '--input', + 'https://example.com/file.pdf', + '--out', + 'preview.png', + ]) + + expect(process.exitCode).toBeUndefined() + expect(createSpy).toHaveBeenCalledWith( + expect.any(OutputCtl), + expect.anything(), + expect.objectContaining({ + inputs: [expect.stringContaining('transloadit-input-')], + stepsData: { + [getIntentStepName(['preview', 'generate'])]: expect.objectContaining({ + robot: '/file/preview', + use: ':original', + }), + }, + }), + ) + }) + + it('rejects private-host URL inputs for intent commands', async () => { + await expect( + prepareIntentInputs({ + inputValues: ['http://127.0.0.1/secret'], + inputBase64Values: [], + }), + ).rejects.toThrow('URL downloads are limited to public hosts') + }) + + it('keeps duplicate remote basenames as distinct temp inputs', async () => { + nock('http://198.51.100.10').get('/nested/file.pdf').reply(200, 'first-file') + nock('http://198.51.100.11').get('/other/file.pdf').reply(200, 'second-file') + + const prepared = await prepareIntentInputs({ + inputValues: ['http://198.51.100.10/nested/file.pdf', 'http://198.51.100.11/other/file.pdf'], + inputBase64Values: [], + }) + + try { + expect(prepared.inputs).toHaveLength(2) + const firstPath = prepared.inputs[0] + const secondPath = prepared.inputs[1] + expect(firstPath).toBeDefined() + expect(secondPath).toBeDefined() + expect(firstPath).not.toBe(secondPath) + if (firstPath == null || secondPath == null) { + throw new Error('Expected prepared input paths') + } + + expect(await readFile(firstPath, 'utf8')).toBe('first-file') + expect(await readFile(secondPath, 'utf8')).toBe('second-file') + } finally { + await Promise.all(prepared.cleanup.map((cleanup) => cleanup())) + } + }) + + it('supports base64 inputs for intent commands', async () => { + const { createSpy } = await runIntentCommand([ + 'document', + 'convert', + '--input-base64', + Buffer.from('hello world').toString('base64'), + '--format', + 'pdf', + '--out', + 'output.pdf', + ]) + + expect(process.exitCode).toBeUndefined() + expect(createSpy).toHaveBeenCalledWith( + expect.any(OutputCtl), + expect.anything(), + expect.objectContaining({ + inputs: [expect.stringContaining('transloadit-input-')], + stepsData: { + [getIntentStepName(['document', 'convert'])]: expect.objectContaining({ + robot: '/document/convert', + use: ':original', + format: 'pdf', + }), + }, + }), + ) + }) + + it('rejects --watch URL inputs before downloading them', async () => { + vi.stubEnv('TRANSLOADIT_KEY', 'key') + vi.stubEnv('TRANSLOADIT_SECRET', 'secret') + + const createSpy = vi.spyOn(assembliesCommands, 'create').mockResolvedValue({ + results: [], + hasFailures: false, + }) + const downloadScope = nock('https://example.test').get('/file.pdf').reply(200, 'pdf') + + vi.spyOn(process.stdout, 'write').mockImplementation(noopWrite) + + await main([ + 'preview', + 'generate', + '--watch', + '--input', + 'https://example.test/file.pdf', + '--out', + 'preview.png', + ]) + + expect(process.exitCode).toBe(1) + expect(createSpy).not.toHaveBeenCalled() + expect(downloadScope.isDone()).toBe(false) + }) + + it('accepts native boolean flags for generated intent options', async () => { + const { createSpy } = await runIntentCommand([ + 'image', + 'optimize', + '--input', + 'input.jpg', + '--progressive', + '--out', + 'optimized.jpg', + ]) + + expect(process.exitCode).toBeUndefined() + expect(createSpy).toHaveBeenCalledWith( + expect.any(OutputCtl), + expect.anything(), + expect.objectContaining({ + inputs: ['input.jpg'], + stepsData: { + [getIntentStepName(['image', 'optimize'])]: expect.objectContaining({ + robot: '/image/optimize', + use: ':original', + progressive: true, + }), + }, + }), + ) + }) + + it('rejects multi-input standard single-assembly runs with a file output before processing', async () => { + vi.stubEnv('TRANSLOADIT_KEY', 'key') + vi.stubEnv('TRANSLOADIT_SECRET', 'secret') + + const tempDir = await createTempDir('transloadit-intent-single-assembly-') + const inputA = path.join(tempDir, 'a.jpg') + const inputB = path.join(tempDir, 'b.jpg') + await writeFile(inputA, 'a') + await writeFile(inputB, 'b') + + const createSpy = vi.spyOn(assembliesCommands, 'create').mockResolvedValue({ + results: [], + hasFailures: false, + }) + const errorSpy = vi.spyOn(console, 'error').mockImplementation(() => {}) + + vi.spyOn(process.stdout, 'write').mockImplementation(noopWrite) + + await main([ + 'image', + 'optimize', + '--single-assembly', + '--input', + inputA, + '--input', + inputB, + '--out', + path.join(tempDir, 'optimized.jpg'), + ]) + + expect(process.exitCode).toBe(1) + expect(createSpy).not.toHaveBeenCalled() + const loggedError = errorSpy.mock.calls.flatMap((call) => call.map(String)).join(' ') + expect(loggedError).toContain( + 'Output must be a directory when using --single-assembly with multiple inputs', + ) + }) + + it('maps video encode-hls to the builtin template', async () => { + const { createSpy } = await runIntentCommand([ + 'video', + 'encode-hls', + '--input', + 'input.mp4', + '--out', + 'dist/hls', + '--recursive', + ]) + + expect(process.exitCode).toBeUndefined() + expect(createSpy).toHaveBeenCalledWith( + expect.any(OutputCtl), + expect.anything(), + expect.objectContaining({ + template: 'builtin/encode-hls-video@latest', + inputs: ['input.mp4'], + output: 'dist/hls', + recursive: true, + }), + ) + }) + + it('maps text speak flags to /text/speak step parameters', async () => { + const { createSpy } = await runIntentCommand([ + 'text', + 'speak', + '--prompt', + 'Hello world', + '--provider', + 'aws', + '--target-language', + 'en-US', + '--voice', + 'female-1', + '--out', + 'hello.mp3', + ]) + + expect(process.exitCode).toBeUndefined() + expect(createSpy).toHaveBeenCalledWith( + expect.any(OutputCtl), + expect.anything(), + expect.objectContaining({ + inputs: [], + output: 'hello.mp3', + stepsData: { + [getIntentStepName(['text', 'speak'])]: expect.objectContaining({ + robot: '/text/speak', + result: true, + prompt: 'Hello world', + provider: 'aws', + target_language: 'en-US', + voice: 'female-1', + }), + }, + }), + ) + }) + + it('supports prompt-only text speak runs without an input file', async () => { + const { createSpy } = await runIntentCommand([ + 'text', + 'speak', + '--prompt', + 'Hello from a prompt', + '--provider', + 'aws', + '--out', + 'hello.mp3', + ]) + + expect(process.exitCode).toBeUndefined() + expect(createSpy).toHaveBeenCalledWith( + expect.any(OutputCtl), + expect.anything(), + expect.objectContaining({ + inputs: [], + output: 'hello.mp3', + stepsData: { + [getIntentStepName(['text', 'speak'])]: { + robot: '/text/speak', + result: true, + prompt: 'Hello from a prompt', + provider: 'aws', + }, + }, + }), + ) + }) + + it('supports file-backed text speak runs without a prompt', async () => { + const { createSpy } = await runIntentCommand([ + 'text', + 'speak', + '--input', + 'article.txt', + '--provider', + 'aws', + '--out', + 'hello.mp3', + ]) + + expect(process.exitCode).toBeUndefined() + expect(createSpy).toHaveBeenCalledWith( + expect.any(OutputCtl), + expect.anything(), + expect.objectContaining({ + inputs: ['article.txt'], + output: 'hello.mp3', + stepsData: { + [getIntentStepName(['text', 'speak'])]: { + robot: '/text/speak', + result: true, + use: ':original', + provider: 'aws', + }, + }, + }), + ) + }) + + it('omits schema defaults from generated intent steps', async () => { + const { createSpy } = await runIntentCommand([ + 'audio', + 'waveform', + '--input', + 'podcast.mp3', + '--out', + 'waveform.png', + ]) + + expect(process.exitCode).toBeUndefined() + expect(createSpy).toHaveBeenCalledWith( + expect.any(OutputCtl), + expect.anything(), + expect.objectContaining({ + inputs: ['podcast.mp3'], + output: 'waveform.png', + stepsData: { + [getIntentStepName(['audio', 'waveform'])]: { + robot: '/audio/waveform', + result: true, + use: ':original', + }, + }, + }), + ) + }) + + it('applies schema normalization before submitting generated steps', async () => { + const { createSpy } = await runIntentCommand([ + 'audio', + 'waveform', + '--input', + 'song.mp3', + '--style', + '1', + '--out', + 'waveform.png', + ]) + + expect(process.exitCode).toBeUndefined() + expect(createSpy).toHaveBeenCalledWith( + expect.any(OutputCtl), + expect.anything(), + expect.objectContaining({ + inputs: ['song.mp3'], + output: 'waveform.png', + stepsData: { + [getIntentStepName(['audio', 'waveform'])]: expect.objectContaining({ + robot: '/audio/waveform', + result: true, + use: ':original', + style: 'v1', + }), + }, + }), + ) + }) + + it('passes directory output intent for multi-file commands', async () => { + const { createSpy } = await runIntentCommand([ + 'video', + 'thumbs', + '--input', + 'demo.mp4', + '--out', + 'thumbs', + ]) + + expect(process.exitCode).toBeUndefined() + expect(createSpy).toHaveBeenCalledWith( + expect.any(OutputCtl), + expect.anything(), + expect.objectContaining({ + inputs: ['demo.mp4'], + output: 'thumbs', + outputMode: 'directory', + }), + ) + }) + + it('coerces numeric literal union options like video thumbs --rotate', async () => { + const { createSpy } = await runIntentCommand([ + 'video', + 'thumbs', + '--input', + 'demo.mp4', + '--rotate', + '90', + '--out', + 'thumbs', + ]) + + expect(process.exitCode).toBeUndefined() + expect(createSpy).toHaveBeenCalledWith( + expect.any(OutputCtl), + expect.anything(), + expect.objectContaining({ + stepsData: { + [getIntentStepName(['video', 'thumbs'])]: expect.objectContaining({ + robot: '/video/thumbs', + rotate: 90, + }), + }, + }), + ) + }) + + it('maps array-valued robot parameters from JSON flags', async () => { + const { createSpy } = await runIntentCommand([ + 'video', + 'thumbs', + '--input', + 'demo.mp4', + '--offsets', + '[1,2,3]', + '--out', + 'thumbs', + ]) + + expect(process.exitCode).toBeUndefined() + expect(createSpy).toHaveBeenCalledWith( + expect.any(OutputCtl), + expect.anything(), + expect.objectContaining({ + stepsData: { + [getIntentStepName(['video', 'thumbs'])]: expect.objectContaining({ + robot: '/video/thumbs', + offsets: [1, 2, 3], + }), + }, + }), + ) + }) + + it('maps object-valued robot parameters from JSON flags', async () => { + const { createSpy } = await runIntentCommand([ + 'preview', + 'generate', + '--input', + 'document.pdf', + '--strategy', + '{"document":["page","icon"],"unknown":["icon"]}', + '--out', + 'preview.png', + ]) + + expect(process.exitCode).toBeUndefined() + expect(createSpy).toHaveBeenCalledWith( + expect.any(OutputCtl), + expect.anything(), + expect.objectContaining({ + stepsData: { + [getIntentStepName(['preview', 'generate'])]: expect.objectContaining({ + robot: '/file/preview', + strategy: expect.objectContaining({ + document: ['page', 'icon'], + unknown: ['icon'], + }), + }), + }, + }), + ) + }) + + it('rejects blank numeric values instead of coercing them to zero', () => { + expect(() => coerceIntentFieldValue('number', ' ')).toThrow('Expected a number') + }) + + it('classifies string array schemas as string-array intent fields', () => { + expect(inferIntentFieldKind(z.array(z.string()))).toBe('string-array') + expect(inferIntentFieldKind(z.union([z.string(), z.array(z.string())]))).toBe('string-array') + }) + + it('parses shared string-array values from csv, repeated flags, and JSON arrays', () => { + expect(parseStringArrayValue('altText,title')).toEqual(['altText', 'title']) + expect(parseStringArrayValue(['altText,title', 'caption'])).toEqual([ + 'altText', + 'title', + 'caption', + ]) + expect(parseStringArrayValue(['["altText","title"]'])).toEqual(['altText', 'title']) + }) + + it('parses JSON objects for auto-typed flags like image resize --crop', async () => { + const { createSpy } = await runIntentCommand([ + 'image', + 'resize', + '--input', + 'demo.jpg', + '--crop', + '{"x1":80,"y1":100,"x2":"60%","y2":"80%"}', + '--out', + 'resized.jpg', + ]) + + expect(process.exitCode).toBeUndefined() + expect(createSpy).toHaveBeenCalledWith( + expect.any(OutputCtl), + expect.anything(), + expect.objectContaining({ + stepsData: { + [getIntentStepName(['image', 'resize'])]: expect.objectContaining({ + crop: { + x1: 80, + y1: 100, + x2: '60%', + y2: '80%', + }, + }), + }, + }), + ) + }) + + it('parses JSON arrays for auto-typed flags like image resize --watermark-position', async () => { + const { createSpy } = await runIntentCommand([ + 'image', + 'resize', + '--input', + 'demo.jpg', + '--watermark-position', + '["center","left"]', + '--out', + 'resized.jpg', + ]) + + expect(process.exitCode).toBeUndefined() + expect(createSpy).toHaveBeenCalledWith( + expect.any(OutputCtl), + expect.anything(), + expect.objectContaining({ + stepsData: { + [getIntentStepName(['image', 'resize'])]: expect.objectContaining({ + watermark_position: ['center', 'left'], + }), + }, + }), + ) + }) + + it('coerces mixed rotation flags like image resize --rotation 90', async () => { + const { createSpy } = await runIntentCommand([ + 'image', + 'resize', + '--input', + 'demo.jpg', + '--rotation', + '90', + '--out', + 'resized.jpg', + ]) + + expect(process.exitCode).toBeUndefined() + expect(createSpy).toHaveBeenCalledWith( + expect.any(OutputCtl), + expect.anything(), + expect.objectContaining({ + stepsData: { + [getIntentStepName(['image', 'resize'])]: expect.objectContaining({ + robot: '/image/resize', + rotation: 90, + }), + }, + }), + ) + }) + + it('coerces mixed boolean-or-number flags like audio waveform --antialiasing 1', async () => { + const { createSpy } = await runIntentCommand([ + 'audio', + 'waveform', + '--input', + 'song.mp3', + '--antialiasing', + '1', + '--out', + 'waveform.png', + ]) + + expect(process.exitCode).toBeUndefined() + expect(createSpy).toHaveBeenCalledWith( + expect.any(OutputCtl), + expect.anything(), + expect.objectContaining({ + stepsData: { + [getIntentStepName(['audio', 'waveform'])]: expect.objectContaining({ + robot: '/audio/waveform', + antialiasing: 1, + }), + }, + }), + ) + }) + + it('maps file compress to a bundled single assembly by default', async () => { + const { createSpy } = await runIntentCommand([ + 'file', + 'compress', + '--input', + 'assets', + '--format', + 'zip', + '--gzip', + '--out', + 'assets.zip', + ]) + + expect(process.exitCode).toBeUndefined() + expect(createSpy).toHaveBeenCalledWith( + expect.any(OutputCtl), + expect.anything(), + expect.objectContaining({ + inputs: ['assets'], + output: 'assets.zip', + singleAssembly: true, + stepsData: { + [getIntentStepName(['file', 'compress'])]: expect.objectContaining({ + robot: '/file/compress', + result: true, + format: 'zip', + gzip: true, + use: { + steps: [':original'], + bundle_steps: true, + }, + }), + }, + }), + ) + }) + + it('omits nullable defaults like file compress password when not provided', async () => { + const { createSpy } = await runIntentCommand([ + 'file', + 'compress', + '--input', + 'assets', + '--format', + 'zip', + '--out', + 'assets.zip', + ]) + + expect(process.exitCode).toBeUndefined() + expect(createSpy).toHaveBeenCalledWith( + expect.any(OutputCtl), + expect.anything(), + expect.objectContaining({ + stepsData: { + [getIntentStepName(['file', 'compress'])]: { + robot: '/file/compress', + result: true, + format: 'zip', + use: { + steps: [':original'], + bundle_steps: true, + }, + }, + }, + }), + ) + }) + + it('omits numeric defaults like video thumbs rotate when not provided', async () => { + const { createSpy } = await runIntentCommand([ + 'video', + 'thumbs', + '--input', + 'demo.mp4', + '--out', + 'thumbs', + ]) + + expect(process.exitCode).toBeUndefined() + expect(createSpy).toHaveBeenCalledWith( + expect.any(OutputCtl), + expect.anything(), + expect.objectContaining({ + stepsData: { + [getIntentStepName(['video', 'thumbs'])]: { + robot: '/video/thumbs', + result: true, + use: ':original', + }, + }, + }), + ) + }) + + it('includes required schema flags in generated usage examples', () => { + expect(getIntentCommand(['document', 'convert']).usage.examples).toEqual([ + ['Run the command', expect.stringContaining('--format')], + ]) + expect(getIntentCommand(['text', 'speak']).usage.examples).toEqual([ + ['Run the command', expect.stringContaining('--provider')], + ]) + expect(getIntentCommand(['document', 'convert']).usage.examples).toEqual([ + ['Run the command', expect.stringContaining('output.pdf')], + ]) + }) + + it('keeps the catalog, generated commands, and smoke cases in sync', () => { + const catalogPaths = intentCatalog.map((definition) => getIntentPaths(definition).join(' ')) + const generatedPaths = intentCommands.map((command) => command.paths[0]?.join(' ')) + const smokePaths = intentSmokeCases.map((smokeCase) => smokeCase.paths.join(' ')) + + expect([...catalogPaths].sort()).toEqual([...generatedPaths].sort()) + expect([...catalogPaths].sort()).toEqual([...smokePaths].sort()) + }) +}) diff --git a/packages/node/test/unit/cli/result-urls.test.ts b/packages/node/test/unit/cli/result-urls.test.ts new file mode 100644 index 00000000..ed25432a --- /dev/null +++ b/packages/node/test/unit/cli/result-urls.test.ts @@ -0,0 +1,47 @@ +import { describe, expect, it } from 'vitest' + +import { collectResultUrlRows, formatResultUrlRows } from '../../../src/cli/resultUrls.ts' + +describe('result url helpers', () => { + it('prefers ssl_url and falls back to basename/name fields', () => { + const rows = collectResultUrlRows({ + assemblyId: 'assembly-1', + results: { + generated: [ + { + basename: 'fallback-name.png', + name: null, + ssl_url: 'https://secure.example.com/file.png', + url: 'http://insecure.example.com/file.png', + }, + ], + }, + }) + + expect(rows).toEqual([ + { + assemblyId: 'assembly-1', + step: 'generated', + name: 'fallback-name.png', + url: 'https://secure.example.com/file.png', + }, + ]) + }) + + it('formats aligned human-readable tables', () => { + const table = formatResultUrlRows([ + { + assemblyId: 'assembly-1', + step: 'describe', + name: 'hero.json', + url: 'https://example.com/hero.json', + }, + ]) + + expect(table).toContain('STEP') + expect(table).toContain('NAME') + expect(table).toContain('URL') + expect(table).toContain('describe') + expect(table).toContain('hero.json') + }) +}) diff --git a/packages/node/test/unit/input-files.test.ts b/packages/node/test/unit/input-files.test.ts index 01179a54..a498fc45 100644 --- a/packages/node/test/unit/input-files.test.ts +++ b/packages/node/test/unit/input-files.test.ts @@ -1,9 +1,24 @@ import { mkdtemp, rm } from 'node:fs/promises' import { tmpdir } from 'node:os' import { join } from 'node:path' -import { describe, expect, it } from 'vitest' +import nock from 'nock' +import { afterEach, describe, expect, it, vi } from 'vitest' import { prepareInputFiles } from '../../src/inputFiles.ts' +const { lookupMock } = vi.hoisted(() => ({ + lookupMock: vi.fn(), +})) + +vi.mock('node:dns/promises', () => ({ + lookup: lookupMock, +})) + +afterEach(() => { + vi.restoreAllMocks() + lookupMock.mockReset() + nock.cleanAll() +}) + describe('prepareInputFiles', () => { it('splits files, uploads, and url imports', async () => { const base64 = Buffer.from('hello').toString('base64') @@ -93,4 +108,75 @@ describe('prepareInputFiles', () => { }), ).rejects.toThrow('URL downloads are limited') }) + + it('rejects hostnames that resolve to private IPs', async () => { + lookupMock.mockResolvedValue([{ address: '127.0.0.1', family: 4 }]) + const downloadScope = nock('http://rebind.test').get('/secret').reply(200, 'secret') + + await expect( + prepareInputFiles({ + inputFiles: [ + { + kind: 'url', + field: 'remote', + url: 'http://rebind.test/secret', + }, + ], + urlStrategy: 'download', + allowPrivateUrls: false, + }), + ).rejects.toThrow('URL downloads are limited') + + expect(downloadScope.isDone()).toBe(false) + }) + + it('rejects redirects to private URL downloads', async () => { + lookupMock.mockResolvedValue([{ address: '198.51.100.10', family: 4 }]) + const publicScope = nock('http://198.51.100.10') + .get('/public') + .reply(302, undefined, { Location: 'http://127.0.0.1/secret' }) + const privateScope = nock('http://127.0.0.1').get('/secret').reply(200, 'secret') + + await expect( + prepareInputFiles({ + inputFiles: [ + { + kind: 'url', + field: 'remote', + url: 'http://198.51.100.10/public', + }, + ], + urlStrategy: 'download', + allowPrivateUrls: false, + }), + ).rejects.toThrow('URL downloads are limited') + + expect(publicScope.isDone()).toBe(true) + expect(privateScope.isDone()).toBe(false) + }) + + it('pins URL downloads to the validated DNS answer', async () => { + lookupMock.mockResolvedValue([{ address: '198.51.100.10', family: 4 }]) + const downloadScope = nock('http://rebind.test').get('/public').reply(200, 'public-data') + + const result = await prepareInputFiles({ + inputFiles: [ + { + kind: 'url', + field: 'remote', + url: 'http://rebind.test/public', + }, + ], + urlStrategy: 'download', + allowPrivateUrls: false, + }) + + try { + const downloadedPath = result.files.remote + expect(downloadedPath).toBeDefined() + expect(downloadScope.isDone()).toBe(true) + } finally { + await Promise.all(result.cleanup.map((cleanup) => cleanup())) + } + }) }) diff --git a/packages/transloadit/package.json b/packages/transloadit/package.json index 63814af2..99acf0ed 100644 --- a/packages/transloadit/package.json +++ b/packages/transloadit/package.json @@ -1,6 +1,6 @@ { "name": "transloadit", - "version": "4.7.4", + "version": "4.7.5", "description": "Node.js SDK for Transloadit", "homepage": "https://github.com/transloadit/node-sdk/tree/main/packages/node", "bugs": { @@ -70,19 +70,19 @@ "src": "./src" }, "scripts": { - "check": "yarn lint:ts && yarn fix && yarn test:unit", + "check": "yarn lint:ts && yarn test:unit", "fix:js": "biome check --write .", "lint:ts": "yarn --cwd ../.. tsc:node", "fix:js:unsafe": "biome check --write . --unsafe", "lint:js": "biome check .", - "lint": "npm-run-all --parallel 'lint:js'", - "fix": "npm-run-all --serial 'fix:js'", + "lint": "yarn lint:js", + "fix": "yarn fix:js", "lint:deps": "knip --dependencies --no-progress", "fix:deps": "knip --dependencies --no-progress --fix", "prepack": "node ../../scripts/prepare-transloadit.ts", - "test:unit": "yarn --cwd ../.. tsc:utils && ../../node_modules/.bin/vitest run --coverage ./test/unit", - "test:e2e": "yarn --cwd ../.. tsc:utils && ../../node_modules/.bin/vitest run ./test/e2e", - "test": "yarn --cwd ../.. tsc:utils && ../../node_modules/.bin/vitest run --coverage" + "test:unit": "yarn --cwd ../.. tsc:utils && ../../node_modules/.bin/vitest run --coverage --passWithNoTests ./test/unit", + "test:e2e": "yarn --cwd ../.. tsc:utils && ../../node_modules/.bin/vitest run --passWithNoTests ./test/e2e", + "test": "yarn --cwd ../.. tsc:utils && ../../node_modules/.bin/vitest run --coverage --passWithNoTests" }, "license": "MIT", "main": "./dist/Transloadit.js", diff --git a/scripts/fingerprint-pack.ts b/scripts/fingerprint-pack.ts index beef9e2c..cad2f1a0 100644 --- a/scripts/fingerprint-pack.ts +++ b/scripts/fingerprint-pack.ts @@ -3,7 +3,7 @@ import { createHash } from 'node:crypto' import { createReadStream } from 'node:fs' import { mkdir, mkdtemp, readFile, rm, stat, writeFile } from 'node:fs/promises' import { tmpdir } from 'node:os' -import { resolve } from 'node:path' +import { relative, resolve, sep } from 'node:path' import { promisify } from 'node:util' const execFileAsync = promisify(execFile) @@ -112,6 +112,11 @@ const runWithConcurrency = async ( return results } +const normalizePackageDir = (cwd: string): string => { + const normalized = relative(process.cwd(), cwd).split(sep).join('/') + return normalized === '' ? '.' : normalized +} + const main = async (): Promise => { const { target, out, keep, ignoreScripts, quiet } = parseArgs() const cwd = resolve(process.cwd(), target) @@ -153,7 +158,7 @@ const main = async (): Promise => { const packageJson = JSON.parse(packageJsonRaw) const summary = { - packageDir: cwd, + packageDir: normalizePackageDir(cwd), tarball: { filename: info.filename, sizeBytes: tarballStat.size, diff --git a/scripts/prepare-transloadit.ts b/scripts/prepare-transloadit.ts index d0f298c1..64ecb8c7 100644 --- a/scripts/prepare-transloadit.ts +++ b/scripts/prepare-transloadit.ts @@ -28,13 +28,62 @@ const formatPackageJson = (data: Record): string => { type PackageJson = Record & { scripts?: Record } +function replaceRequired( + value: string, + searchValue: string, + replaceValue: string, + label: string, +): string { + if (!value.includes(searchValue)) { + throw new Error(`Expected ${label} to include ${JSON.stringify(searchValue)}`) + } + + return value.replace(searchValue, replaceValue) +} + +function deriveLegacyScripts(nodeScripts: Record): Record { + const scripts = { ...nodeScripts } + if (scripts.check != null) { + scripts.check = replaceRequired(scripts.check, ' && yarn fix', '', 'scripts.check') + } + + if (scripts['test:unit'] != null) { + scripts['test:unit'] = replaceRequired( + scripts['test:unit'], + 'vitest run --coverage ./test/unit', + 'vitest run --coverage --passWithNoTests ./test/unit', + 'scripts.test:unit', + ) + } + + if (scripts['test:e2e'] != null) { + scripts['test:e2e'] = replaceRequired( + scripts['test:e2e'], + 'vitest run ./test/e2e', + 'vitest run --passWithNoTests ./test/e2e', + 'scripts.test:e2e', + ) + } + + if (scripts.test != null) { + scripts.test = replaceRequired( + scripts.test, + 'vitest run --coverage', + 'vitest run --coverage --passWithNoTests', + 'scripts.test', + ) + } + + scripts.prepack = 'node ../../scripts/prepare-transloadit.ts' + return scripts +} + const writeLegacyPackageJson = async (): Promise => { const nodePackageJson = await readJson(resolve(nodePackage, 'package.json')) const legacyExisting = await readJson(resolve(legacyPackage, 'package.json')).catch( () => null, ) - const scripts = { ...(nodePackageJson.scripts ?? {}) } - scripts.prepack = 'node ../../scripts/prepare-transloadit.ts' + const scripts = deriveLegacyScripts(nodePackageJson.scripts ?? {}) const legacyPackageJson: PackageJson = { ...nodePackageJson, name: 'transloadit',