diff --git a/.ai/skills/council-refactor-ideas/SKILL.md b/.ai/skills/council-refactor-ideas/SKILL.md new file mode 100644 index 00000000..51b79acb --- /dev/null +++ b/.ai/skills/council-refactor-ideas/SKILL.md @@ -0,0 +1,18 @@ +--- +name: council-refactor-ideas +description: + Run `~/code/dotfiles/bin/council.ts refactor` to get a multi-agent refactoring ideas, evaluate + proposals, and bring them to the human. +--- + +# Council Refactoring Ideas + +## Overview + +Run the local `council.ts refactor` script to get multi-agent refactoring brainstorms, then triage +and address valid issues before final validation and PR creation. + +## Workflow + +Run `~/code/dotfiles/bin/council.ts refactor` to get some more refactoring ideas and see which ones +make sense. diff --git a/.ai/skills/council-review/SKILL.md b/.ai/skills/council-review/SKILL.md new file mode 100644 index 00000000..4bfb4a82 --- /dev/null +++ b/.ai/skills/council-review/SKILL.md @@ -0,0 +1,53 @@ +--- +name: council-review +description: + Run `~/code/dotfiles/bin/council.ts review` to perform a multi-model review, evaluate findings, + fix valid issues (with tests when possible), add clarifying comments for invalid findings, then + run required checks. Use before PR creation or when the user asks for a deep review. +--- + +# Council Review + +## Overview + +Run the local `council.ts review` script to get multi-model review findings, then triage and address +valid issues before final validation and PR creation. + +## Workflow + +### 1) Preflight + +- Confirm the script exists and is executable: `~/code/dotfiles/bin/council.ts`. +- Do not use `--post` unless explicitly asked. +- Expect a long runtime (30+ minutes); so set any command timeout on your side to >= 45 minutes. + +### 2) Run the review + +- Run: `~/code/dotfiles/bin/council.ts review` from inside the repository we were working on. +- Capture the consolidated output (P0-P3 items or "No issues found."). + +### 3) Triage findings + +For each item: + +- **Valid + worth fixing** + - If tests exist for the area, add or extend coverage for the issue. + - Run the test and confirm it fails. + - Apply the fix and re-run the test to confirm it passes. +- **Invalid or not worth fixing** + - Add a brief code comment explaining why it is safe or intentional. + +### 4) Re-run review if needed + +- Re-run `council.ts review` only if you made substantive changes that affect earlier findings. + +### 5) Final validation + +- Run repo-required checks (for this repo: `yarn check`). +- Note any warnings but avoid unrelated changes unless required. + +## Notes + +- Avoid modifications while running; only edit after it finishes. +- If `council.ts review` fails, consider the errors, source code, and propose a fix for the review + script, but do not change it, just propose it to the human diff --git a/.changeset/mcp-logger-redaction.md b/.changeset/mcp-logger-redaction.md new file mode 100644 index 00000000..e2c89c1d --- /dev/null +++ b/.changeset/mcp-logger-redaction.md @@ -0,0 +1,6 @@ +--- +"@transloadit/mcp-server": patch +"@transloadit/node": patch +--- + +Add sev-logger based logging with redaction for MCP server, and improve input handling with trusted assembly URLs and configurable URL download restrictions. diff --git a/README.md b/README.md index c3494d8e..c5843e85 100644 --- a/README.md +++ b/README.md @@ -11,18 +11,51 @@ # Transloadit JavaScript/TypeScript SDKs -Monorepo for Transloadit SDKs and shared packages. +Monorepo for Transloadit SDKs, shared packages, and the MCP server. -For SDK usage docs, see `packages/node/README.md`. +For full SDK usage docs, see `packages/node/README.md`. ## Packages -- `@transloadit/node` — Node.js SDK + CLI (experimental). See `packages/node/README.md`. +- `@transloadit/node` — Node.js SDK + CLI. See `packages/node/README.md`. - `transloadit` — Stable unscoped package (built from `@transloadit/node`). +- `@transloadit/mcp-server` — MCP server (Streamable HTTP + stdio). - `@transloadit/types` — Shared TypeScript types. - `@transloadit/utils` — Shared utilities. - `@transloadit/zod` — Zod schemas for Transloadit APIs. +## Quick start + +### Node SDK + +```ts +import { Transloadit } from '@transloadit/node' + +const client = new Transloadit({ + authKey: process.env.TRANSLOADIT_KEY as string, + authSecret: process.env.TRANSLOADIT_SECRET as string, +}) + +const result = await client.createAssembly({ + params: { + steps: { + ':original': { robot: '/upload/handle' }, + }, + }, + files: { file: '/path/to/file.jpg' }, + waitForCompletion: true, +}) +``` + +### MCP server (local) + +```bash +corepack yarn workspace @transloadit/mcp-server build +node packages/mcp-server/dist/cli.js http --host 127.0.0.1 --port 5723 +``` + +See `docs/mcp-spec.md` for the MCP design and `docs/mcp-todo.md` for the remaining work. + ## Development - Install: `corepack yarn` @@ -35,4 +68,3 @@ See `CONTRIBUTING.md` for full guidelines. - Docs live under `docs/`. - The `transloadit` package is prepared via `scripts/prepare-transloadit.ts`. - diff --git a/docs/fingerprint/transloadit-baseline.json b/docs/fingerprint/transloadit-baseline.json index a0e6e14c..264fe0d8 100644 --- a/docs/fingerprint/transloadit-baseline.json +++ b/docs/fingerprint/transloadit-baseline.json @@ -2,8 +2,8 @@ "packageDir": "/home/kvz/code/node-sdk/packages/transloadit", "tarball": { "filename": "transloadit-4.3.0.tgz", - "sizeBytes": 1208994, - "sha256": "f608600142ce700cc7a4f4d71eed502fecd9d20c3ce5832cd81c7a066f64a7b4" + "sizeBytes": 1229255, + "sha256": "0b1adb20b160254719eedb2dbcd6f28c7561b2258840643f20867fa664090531" }, "packageJson": { "name": "transloadit", @@ -311,6 +311,11 @@ "sizeBytes": 3534, "sha256": "c4bd648bb097acadbc349406192105367b9d94c516700b99c9f4d7a4b6c7a6f0" }, + { + "path": "dist/alphalib/goldenTemplates.js", + "sizeBytes": 1462, + "sha256": "73213f340a82273eefd58585345af6b4237a136ec97fe2946893ab933feab659" + }, { "path": "dist/alphalib/types/robots/google-import.js", "sizeBytes": 3748, @@ -386,6 +391,11 @@ "sizeBytes": 1794, "sha256": "cea0e51dbb809beef425325c681fc3ce087a082f02ff66b6474001a11b2fbd37" }, + { + "path": "dist/inputFiles.js", + "sizeBytes": 7836, + "sha256": "1d77d129abc1b11be894d1cf6c34afc93370165e39871d6d5b672c058d1a0489" + }, { "path": "dist/lintAssemblyInput.js", "sizeBytes": 2335, @@ -456,6 +466,11 @@ "sizeBytes": 935, "sha256": "e01935073eab55214d9e37fa2d25e5615368efb8e9e2aedfa7a765e0d6e2bd84" }, + { + "path": "dist/robots.js", + "sizeBytes": 8031, + "sha256": "2e0f9d3136a5244287c77acf529b71294408d4e77b9ac8f8368bd42f60f04645" + }, { "path": "dist/alphalib/types/robots/s3-import.js", "sizeBytes": 8446, @@ -568,8 +583,8 @@ }, { "path": "dist/Transloadit.js", - "sizeBytes": 33909, - "sha256": "81ebf8c22c7b0afee5b7a739f839d780efbf71565168c03c8a37656cc166ebc7" + "sizeBytes": 36726, + "sha256": "1b3ded5575fb9e02032831df6f5ca10b6c33b0181b59cf44a195248f78bd68ef" }, { "path": "dist/alphalib/tryCatch.js", @@ -583,8 +598,8 @@ }, { "path": "dist/tus.js", - "sizeBytes": 5118, - "sha256": "755dbda7676d8f50a52cfe6278d1e2f65b7fbfb388636ad33b37bd14df9f98f1" + "sizeBytes": 7775, + "sha256": "9e009aad8928e1417f312de5fe6d7300dd1bfd671458fab57607217aeda6f8c6" }, { "path": "dist/cli/types.js", @@ -663,8 +678,8 @@ }, { "path": "package.json", - "sizeBytes": 2417, - "sha256": "1427cfc8713e21dc96e87cdb75f42517ac9b68fce11873682950a77b39393d56" + "sizeBytes": 2392, + "sha256": "da426af5fcb55e65975b94d1e31b01b3e046ee561db0742e0e2a621d6a837b8b" }, { "path": "dist/alphalib/types/robots/_index.d.ts.map", @@ -1246,6 +1261,16 @@ "sizeBytes": 2145, "sha256": "ce1bf48c1cc713ae843061cba3c3b119475baa5cb6b62ac4b575e50b297bcf71" }, + { + "path": "dist/alphalib/goldenTemplates.d.ts.map", + "sizeBytes": 372, + "sha256": "9d03bb8544e479187f7cc025e510299f7c8c4a7605f6387799c3d6e0d677e205" + }, + { + "path": "dist/alphalib/goldenTemplates.js.map", + "sizeBytes": 965, + "sha256": "4c179dfd32215c7ecc51885928c5449486ae90b0653485624466390401384e5c" + }, { "path": "dist/alphalib/types/robots/google-import.d.ts.map", "sizeBytes": 960, @@ -1396,6 +1421,16 @@ "sizeBytes": 1640, "sha256": "52cc8c7351fa5905ce7541db198cef4fc55f504a868ca673e843ee8dcb988d16" }, + { + "path": "dist/inputFiles.d.ts.map", + "sizeBytes": 1438, + "sha256": "ac8a1b3b69cfd346810bd841eb66bc8b61788a56ba75c1149dc7fba5757009b0" + }, + { + "path": "dist/inputFiles.js.map", + "sizeBytes": 8595, + "sha256": "fa96090c58247759bef9b7767bd4b4f474bba332ee5a6edf0429e89e99a0c25c" + }, { "path": "dist/lintAssemblyInput.d.ts.map", "sizeBytes": 522, @@ -1536,6 +1571,16 @@ "sizeBytes": 854, "sha256": "c743fb4ea5217d34ff665926bd14ecbb259dec99c2de862abfe787ece58817a0" }, + { + "path": "dist/robots.d.ts.map", + "sizeBytes": 1120, + "sha256": "f9efcf3e84bb7f29193f068541a9f60b6f7ddd8cdffffa7e673dc25471217be8" + }, + { + "path": "dist/robots.js.map", + "sizeBytes": 9019, + "sha256": "e783d0ae0670710daaa702b8b8d37aeb494e9de30c9854da4add78fe73eb4748" + }, { "path": "dist/alphalib/types/robots/s3-import.d.ts.map", "sizeBytes": 1023, @@ -1758,13 +1803,13 @@ }, { "path": "dist/Transloadit.d.ts.map", - "sizeBytes": 5489, - "sha256": "b7868cdad601dae6744c0430f32d51dbbf43da547f8eae92878c909e59fbc77a" + "sizeBytes": 6364, + "sha256": "d04fe4e23e6f9c46f828838b60d0e3999a0d3f33f7e7ff0e193d280e5d6e8da5" }, { "path": "dist/Transloadit.js.map", - "sizeBytes": 24358, - "sha256": "6126b3624bd35033aaac508ccd1dcc572fdacc6a5298a43d6274ea23325349f0" + "sizeBytes": 26804, + "sha256": "42b0aada7680ba8686ce130b06b70fff5a0c75f2f81aa28f834eaea49fd58a4a" }, { "path": "dist/alphalib/tryCatch.d.ts.map", @@ -1788,13 +1833,13 @@ }, { "path": "dist/tus.d.ts.map", - "sizeBytes": 741, - "sha256": "01a29f00b0d447ba7cfdd25c86900d5d31be3662f1575866716be6d742385bf3" + "sizeBytes": 840, + "sha256": "c741ce723a2028dea0ed5765f8872bd4697b973f1c89156694ac907ee8a29d11" }, { "path": "dist/tus.js.map", - "sizeBytes": 4055, - "sha256": "92a26fe06403b13ba34271a80fdb23781bf30da4ca7ced0bc378b4a6231cbdd6" + "sizeBytes": 6459, + "sha256": "76c76629a1424f4aa3cd8952403cd06b8055557b4b70648d43b74064d1183b47" }, { "path": "dist/cli/types.d.ts.map", @@ -1948,8 +1993,8 @@ }, { "path": "README.md", - "sizeBytes": 33341, - "sha256": "556700c6ab771e25f501ab992baa19c9816331e55be577636c36bee87c652891" + "sizeBytes": 35551, + "sha256": "442743aa79f063ee5da4e50601debb28b492377b3359aa4c2596f4adefbd372a" }, { "path": "dist/alphalib/types/robots/_index.d.ts", @@ -2531,6 +2576,16 @@ "sizeBytes": 4197, "sha256": "1bbaa2361cc3675a29178cbd0f4fcecaad1033032f154a6da36c5c677a9c9447" }, + { + "path": "dist/alphalib/goldenTemplates.d.ts", + "sizeBytes": 1405, + "sha256": "119113a122aff76eb1940fd1965ae543f882bee68c558a408a8ee2879666ccf8" + }, + { + "path": "src/alphalib/goldenTemplates.ts", + "sizeBytes": 1291, + "sha256": "8aee08c17888c3ebb7dd83a096f75d3517ce565c2d2ae518ca229e6e9e0d84e1" + }, { "path": "dist/alphalib/types/robots/google-import.d.ts", "sizeBytes": 9781, @@ -2681,6 +2736,16 @@ "sizeBytes": 1711, "sha256": "a4646e7d078b97e32d7a3c0c0f61aeb32898d1b25bda89ba20703a23b302f6f2" }, + { + "path": "dist/inputFiles.d.ts", + "sizeBytes": 1294, + "sha256": "dd490923c8af01790b1a7c72cd6578312a0af78ee035cc5fca55e24738d87fc1" + }, + { + "path": "src/inputFiles.ts", + "sizeBytes": 8411, + "sha256": "0df54cb83ac5c718f3d3f78ffb77a31d485e2ab5f0a9d91b4f64852e72d1a589" + }, { "path": "src/alphalib/typings/json-to-ast.d.ts", "sizeBytes": 760, @@ -2826,6 +2891,16 @@ "sizeBytes": 1325, "sha256": "0591686d6c3787e0af4821649506d88034d3f302b021969dc91d612f7e9b3e8b" }, + { + "path": "dist/robots.d.ts", + "sizeBytes": 974, + "sha256": "8ed3cad4ab59561f16313dad21429f85e188480ffd6a6ee6bdf6f19392429a83" + }, + { + "path": "src/robots.ts", + "sizeBytes": 9285, + "sha256": "dac695b754f892dc4fae4dcf4b39881599964ac4affe319c5046636a569b0c2c" + }, { "path": "dist/alphalib/types/robots/s3-import.d.ts", "sizeBytes": 13045, @@ -3048,13 +3123,13 @@ }, { "path": "dist/Transloadit.d.ts", - "sizeBytes": 10725, - "sha256": "17ef13b3851676c7b028a4822f496fa6c275b9e33bf9e52dc4f392af3ff659e6" + "sizeBytes": 11723, + "sha256": "dee5f012aaf6faef6ca2154f3566c97aeaaf95ff07433e2573628e215dbbf9d3" }, { "path": "src/Transloadit.ts", - "sizeBytes": 37819, - "sha256": "f359a6d1054a0d727225c8c13437ede5da105e0d56bf9efec026c7be4a856da8" + "sizeBytes": 41153, + "sha256": "198560ba943a5c33862e8b735b66a2bb7483d76d29e43efdc7354283217202f1" }, { "path": "dist/alphalib/tryCatch.d.ts", @@ -3078,13 +3153,13 @@ }, { "path": "dist/tus.d.ts", - "sizeBytes": 751, - "sha256": "1d507688120e43a395a409f34f916e908799824a241fdc880fd5e11bf97ac4c9" + "sizeBytes": 905, + "sha256": "29ecc74f6570ffbf3e567a1c8d6b25fc0c767b26858c75bc998d252e26b04b2c" }, { "path": "src/tus.ts", - "sizeBytes": 5132, - "sha256": "24969d1add5f85fea69f9c705b54a6257b2aa4188cd107e1c13b976086a1f2fe" + "sizeBytes": 7554, + "sha256": "31f2245fdab12daedb7bc82b07e9ecc8ba6cfeaf5ef6a2a2a4c2b32789c5288b" }, { "path": "dist/cli/types.d.ts", diff --git a/docs/fingerprint/transloadit-baseline.package.json b/docs/fingerprint/transloadit-baseline.package.json index 78458d70..78ea9f72 100644 --- a/docs/fingerprint/transloadit-baseline.package.json +++ b/docs/fingerprint/transloadit-baseline.package.json @@ -79,7 +79,5 @@ "dist", "src" ], - "bin": { - "transloadit": "./dist/cli.js" - } + "bin": "./dist/cli.js" } diff --git a/docs/mcp-spec.md b/docs/mcp-spec.md index 5717668b..4564c6a8 100644 --- a/docs/mcp-spec.md +++ b/docs/mcp-spec.md @@ -37,7 +37,7 @@ Build a delightful, agent-native interface to Transloadit Assemblies. The MCP se The MCP server should delegate as much work as possible to existing packages: - `@transloadit/node` for API calls, tus uploads, polling, and future resume support. -- `@transloadit/zod/v3` for schemas and robot metadata. +- `@transloadit/zod/v3` for schemas and robot metadata (single source of truth). - Shared alphalib for golden templates. This means we should add missing functionality to `@transloadit/node` first (see todo list). @@ -72,6 +72,10 @@ preferred path is to accept MCP tokens directly as `Authorization: Bearer`. - stdio and localhost HTTP: no MCP auth required by default. - non-localhost HTTP: must be configured with a static bearer token, otherwise refuse to start. - Transloadit API calls use `TRANSLOADIT_KEY` + `TRANSLOADIT_SECRET`. +- **Signature auth note:** if the account enforces mandatory signature auth, bearer tokens do **not** + bypass it. The MCP server must be able to sign requests, so provide `TRANSLOADIT_KEY` + + `TRANSLOADIT_SECRET` alongside bearer tokens or expect API2 to reject requests with + `NO_SIGNATURE_FIELD` / `NO_AUTH_EXPIRES_PARAMETER`. ## 5. CORS and network safety @@ -171,6 +175,7 @@ Create or resume an Assembly, optionally uploading files. wait_timeout_ms?: number upload_concurrency?: number upload_chunk_size?: number + upload_behavior?: 'await' | 'background' | 'none' assembly_url?: string } ``` @@ -182,9 +187,17 @@ Create or resume an Assembly, optionally uploading files. - Resume is driven by Assembly status (`tus_uploads` + `uploads`) and the provided files. - This requires stable, **unique** `field` names and file metadata (`filename` + `size`) to match local files to remote uploads. -- URL files are imported via `/http/import` steps injected into the instructions (derived from - `field` names if those steps are not already present). +- URL files are **downloaded and uploaded via tus** by default (no instruction mutation). +- If instructions (including template + overrides) already contain an `/http/import` step, the + server sets/overrides its `url` instead of downloading: + - It first looks for a step named after the file `field`. + - If none match and there is exactly one `/http/import` step, it uses that and supplies a + `url` array when multiple URL inputs are provided. - `wait_for_completion` is opt-in. Default is non-blocking. +- `upload_behavior` controls how uploads run: + - `await`: block until uploads finish (default when `wait_for_completion=true`) + - `background`: start uploads and return once upload URLs exist (default) + - `none`: create upload URLs only; no bytes uploaded **Resume mapping rules** @@ -407,7 +420,22 @@ Defaults: - Host: `127.0.0.1` - Port: `5723` -- Warn and require explicit `--host` when binding to non-localhost. +- Bind to localhost by default (no MCP auth required). +- When binding to a non-localhost host, require `TRANSLOADIT_MCP_TOKEN`. + +Example `mcp.json`: + +```json +{ + "authKey": "your_key", + "authSecret": "your_secret", + "mcpToken": "local-dev-token", + "path": "/mcp", + "allowedOrigins": ["https://example.com"], + "allowedHosts": ["127.0.0.1:5723"], + "enableDnsRebindingProtection": true +} +``` ## 11. Implementation notes @@ -416,14 +444,14 @@ Defaults: - Prefer named exports everywhere. - Keep tool responses short; avoid dumping massive schemas into MCP responses. -## 12. Error codes (standardized) +## 12. Error codes (current) -- `BAD_REQUEST` -- `AUTH_REQUIRED` -- `AUTH_INVALID` -- `TRANSLOADIT_ERROR` -- `VALIDATION_ERROR` -- `BASE64_TOO_LARGE` -- `INTERNAL_ERROR` +These are the `code` values currently used inside `errors`/`warnings` arrays: -These are the `code` values used inside `errors`/`warnings` arrays. +- `mcp_invalid_args` +- `mcp_missing_args` +- `mcp_missing_auth` +- `mcp_duplicate_field` +- `mcp_base64_too_large` +- `mcp_unknown_template` +- `mcp_signature_auth_required` diff --git a/docs/mcp-todo.md b/docs/mcp-todo.md index 491cd800..abf2ade0 100644 --- a/docs/mcp-todo.md +++ b/docs/mcp-todo.md @@ -17,101 +17,110 @@ This list is ordered. The top section is intentionally focused on other packages - Resume only for path-based inputs; non-file inputs always start a new tus upload. - Ensure `createAssembly()` can return `upload_urls` for best UX (optional, not required for resume). -- Allow non-blocking uploads: - - Add `uploadBehavior` option to `createAssembly()`: - - `await` (current behavior) - - `background` (return immediately after starting uploads) - - `none` (return assembly + upload URLs without starting uploads) - - Return `upload_urls` for each file field when `background`/`none` is selected. -- Add a small helper that converts `InputFile[]` to: - - `uploads` streams (base64) - - `files` paths - - `/http/import` step injections (URL files) -- Export robot catalog helpers: - - `listRobots()` - - `getRobotHelp()` +- ✅ Allow non-blocking uploads: + - ✅ Add `uploadBehavior` option to `createAssembly()`: + - ✅ `await` (current behavior) + - ✅ `background` (return immediately after starting uploads) + - ✅ `none` (return assembly + upload URLs without starting uploads) + - ✅ Return `upload_urls` for each file field when `background`/`none` is selected. +- ✅ Add a small helper that converts `InputFile[]` to: + - ✅ `uploads` streams (base64) + - ✅ `files` paths + - ✅ `/http/import` step injections (URL files) +- ✅ Export robot catalog helpers: + - ✅ `listRobots()` + - ✅ `getRobotHelp()` ### 1.2 Shared alphalib -- Add golden template definitions under alphalib: +- ✅ Add golden template definitions under alphalib: - `~transloadit/encode-hls-video@0.0.1` - Export as a plain object map for SDK + MCP server. ### 1.3 `@transloadit/zod/v3` -- Export a minimal robot metadata registry (name, summary, category, param schema). -- Ensure `AssemblyStatus` schema and `AssemblyInstructionsInput` are publicly exported. +- ✅ Export a minimal robot metadata registry (name, summary, category, param schema). +- ✅ Ensure `AssemblyStatus` schema and `AssemblyInstructionsInput` are publicly exported. ## 2. API2 groundwork -- Implement `POST /token`: - - Accept key/secret auth. - - Return opaque bearer token (TTL 6 hours) with scopes. -- Accept MCP bearer tokens for Assembly endpoints used by the MCP server +- ✅ Implement `POST /token`: + - ✅ Accept key/secret auth. + - ✅ Return opaque bearer token (TTL 6 hours) with scopes. +- ✅ Accept MCP bearer tokens for Assembly endpoints used by the MCP server (create assembly, get assembly status, replay notification, etc.). -- Scope enforcement and audit logging for MCP tokens. +- ✅ Scope enforcement and audit logging for MCP tokens. ## 3. `@transloadit/mcp-server` package ### 3.1 Package scaffold -- Add `packages/mcp-server` with ESM-only build, Node ≥ 22. +- ✅ Add `packages/mcp-server` with ESM-only build, Node ≥ 22. - Provide exports: - - `createTransloaditMcpServer()` - - `createTransloaditMcpHttpHandler()` - - `createTransloaditMcpExpressRouter()` -- Add CLI entrypoint `transloadit-mcp`. + - ✅ `createTransloaditMcpServer()` + - ✅ `createTransloaditMcpHttpHandler()` + - ✅ `createTransloaditMcpExpressRouter()` +- ✅ Add CLI entrypoint `transloadit-mcp`. ### 3.2 Transports -- Streamable HTTP handler at `/mcp`. -- stdio transport for local execution. +- ✅ Streamable HTTP handler at `/mcp`. +- ✅ stdio transport for local execution. - No SSE. ### 3.3 Tool implementations -- `transloadit_create_assembly`: +- ✅ `transloadit_create_assembly`: - New Assembly creation and resume logic. - Optional background uploads via `@transloadit/node`. - URL imports via injected `/http/import` steps. -- `transloadit_get_assembly_status` -- `transloadit_wait_for_assembly` -- `transloadit_validate_assembly` -- `transloadit_list_robots` -- `transloadit_get_robot_help` -- `transloadit_list_golden_templates` +- ✅ `transloadit_get_assembly_status` +- ✅ `transloadit_wait_for_assembly` +- ✅ `transloadit_validate_assembly` +- ✅ `transloadit_list_robots` +- ✅ `transloadit_get_robot_help` +- ✅ `transloadit_list_golden_templates` ### 3.4 Auth & security -- Hosted bearer token validation (pass-through to API2). -- Self-hosted auth: - - No auth on localhost. - - Required static bearer token on non-localhost. -- Configurable CORS allowlist with 403 on disallowed origins. +- ✅ Hosted bearer token validation (pass-through to API2). +- ✅ Self-hosted auth: + - ✅ No auth on localhost. + - ✅ Required static bearer token on non-localhost. +- ✅ Configurable CORS allowlist with 403 on disallowed origins. ### 3.5 Config surface -- Env support: `TRANSLOADIT_KEY`, `TRANSLOADIT_SECRET`, `TRANSLOADIT_MCP_TOKEN`. -- CLI flags: `--host`, `--port`, `--config`. -- Defaults: host `127.0.0.1`, port `5723`. +- ✅ Env support: `TRANSLOADIT_KEY`, `TRANSLOADIT_SECRET`, `TRANSLOADIT_MCP_TOKEN`. +- ✅ CLI flags: `--host`, `--port`, `--config`. +- ✅ Defaults: host `127.0.0.1`, port `5723`. ## 4. Tests ### 4.1 Unit tests -- Linting output formatting and error mapping. -- URL import injection logic. -- Base64 size limit enforcement. -- Robot catalog and help utilities. +- ✅ Linting output formatting and error mapping. +- ✅ URL import injection logic. +- ✅ Base64 size limit enforcement. +- ✅ Robot catalog and help utilities. ### 4.2 E2E tests -- Full flow: create → upload → wait → results. -- Resume flow: interrupt upload, resume using Assembly status and the same input files. -- Gate live tests behind env vars (e.g., `TRANSLOADIT_E2E=1`). +- ✅ Stdio smoke test (official MCP client). +- ✅ Robot list + help smoke test. +- ✅ Full flow: create → upload → wait → results. +- ✅ Golden templates list smoke test. +- ✅ Resume flow: interrupt upload, resume using Assembly status and the same input files. +- ✅ Gate live tests behind env vars (credentials present). ## 5. Docs - `docs/mcp-spec.md` (this design doc). - `docs/mcp-todo.md` (this task list). -- README snippet for local usage (CLI + Claude Desktop example). +- ✅ README overview + local usage snippet. + +## 6. Claude Web integration (Next.js) + +- Build a Next.js UI flow to mint and pass bearer tokens for Claude Web. +- Add MCP client connection steps and UX guidance for the token handoff. +- Document the Claude Web flow in the MCP spec and repo README once implemented. diff --git a/knip.ts b/knip.ts index f9e04f18..eb344c49 100644 --- a/knip.ts +++ b/knip.ts @@ -41,6 +41,12 @@ const config: KnipConfig = { 'vitest/config', ], }, + 'packages/mcp-server': { + entry: ['src/**/*.ts', 'test/**/*.{ts,tsx,js,jsx}'], + project: ['{src,test}/**/*.{ts,tsx,js,jsx}'], + ignore: [...sharedIgnore], + ignoreDependencies: ['@types/express', '@types/node', 'vitest', 'vitest/config'], + }, 'packages/transloadit': { entry: [ 'src/Transloadit.ts', diff --git a/packages/mcp-server/package.json b/packages/mcp-server/package.json new file mode 100644 index 00000000..3afa3ae1 --- /dev/null +++ b/packages/mcp-server/package.json @@ -0,0 +1,53 @@ +{ + "name": "@transloadit/mcp-server", + "version": "0.0.0", + "description": "Transloadit MCP server", + "type": "module", + "license": "MIT", + "packageManager": "yarn@4.12.0", + "engines": { + "node": ">= 22" + }, + "repository": { + "type": "git", + "url": "https://github.com/transloadit/node-sdk", + "directory": "packages/mcp-server" + }, + "files": [ + "dist" + ], + "main": "./dist/index.js", + "types": "./dist/index.d.ts", + "exports": { + ".": { + "types": "./dist/index.d.ts", + "default": "./dist/index.js" + }, + "./package.json": "./package.json" + }, + "bin": { + "transloadit-mcp": "./dist/cli.js" + }, + "scripts": { + "lint:ts": "yarn --cwd ../.. tsc:utils && yarn --cwd ../.. tsc:node && ../../node_modules/.bin/tsc --build tsconfig.build.json", + "build": "yarn lint:ts", + "test:unit": "yarn --cwd ../.. tsc:utils && yarn --cwd ../.. tsc:node && ../../node_modules/.bin/vitest run ./test/unit", + "test:e2e": "yarn --cwd ../.. tsc:utils && yarn --cwd ../.. tsc:node && ../../node_modules/.bin/vitest run ./test/e2e", + "check": "yarn lint:ts && yarn test:unit", + "prepack": "yarn build" + }, + "dependencies": { + "@modelcontextprotocol/sdk": "^1.25.3", + "@transloadit/node": "^4.3.0", + "@transloadit/sev-logger": "^0.0.15", + "express": "^4.21.2", + "zod": "^4.0.0" + }, + "devDependencies": { + "@types/express": "^4.17.23", + "@types/node": "^24.10.3" + }, + "publishConfig": { + "tag": "experimental" + } +} diff --git a/packages/mcp-server/src/cli.ts b/packages/mcp-server/src/cli.ts new file mode 100644 index 00000000..95cc9b9e --- /dev/null +++ b/packages/mcp-server/src/cli.ts @@ -0,0 +1,150 @@ +import { readFile } from 'node:fs/promises' +import { createServer } from 'node:http' +import { StdioServerTransport } from '@modelcontextprotocol/sdk/server/stdio.js' +import { createTransloaditMcpHttpHandler, createTransloaditMcpServer } from './index.ts' +import { buildRedactor, getLogger } from './logger.ts' + +const printHelp = (): void => { + process.stdout.write(`transloadit-mcp + +Usage: + transloadit-mcp stdio + transloadit-mcp http [--host 127.0.0.1] [--port 5723] [--config path] + +Environment: + TRANSLOADIT_KEY + TRANSLOADIT_SECRET + TRANSLOADIT_MCP_TOKEN +`) +} + +type CliConfig = { + host?: string + port?: number + configPath?: string +} + +const parseArgs = (args: string[]): { command: string; config: CliConfig } => { + const command = args[0] ?? 'stdio' + const config: CliConfig = {} + + for (let i = 1; i < args.length; i += 1) { + const arg = args[i] + if (arg === '--host') { + config.host = args[i + 1] + i += 1 + continue + } + if (arg.startsWith('--host=')) { + config.host = arg.slice('--host='.length) + continue + } + if (arg === '--port') { + const value = Number(args[i + 1]) + config.port = Number.isFinite(value) ? value : undefined + i += 1 + continue + } + if (arg.startsWith('--port=')) { + const value = Number(arg.slice('--port='.length)) + config.port = Number.isFinite(value) ? value : undefined + continue + } + if (arg === '--config') { + config.configPath = args[i + 1] + i += 1 + continue + } + if (arg.startsWith('--config=')) { + config.configPath = arg.slice('--config='.length) + } + } + + return { command, config } +} + +const isLocalHost = (host: string | undefined): boolean => + host === '127.0.0.1' || host === 'localhost' || host === '::1' + +const loadConfig = async (configPath?: string): Promise> => { + if (!configPath) { + return {} + } + const contents = await readFile(configPath, 'utf8') + return JSON.parse(contents) as Record +} + +const main = async (): Promise => { + const logger = getLogger().nest('cli') + const args = process.argv.slice(2) + const { command, config } = parseArgs(args) + + if (command === '--help' || command === '-h') { + printHelp() + return + } + + if (command && command !== 'stdio' && command !== 'http') { + throw new Error(`Unknown command: ${command}`) + } + + if (command === 'http') { + const fileConfig = await loadConfig(config.configPath) + const host = (config.host ?? fileConfig.host ?? '127.0.0.1') as string + const port = Number(config.port ?? fileConfig.port ?? 5723) + const path = (fileConfig.path as string | undefined) ?? '/mcp' + const mcpToken = (fileConfig.mcpToken ?? process.env.TRANSLOADIT_MCP_TOKEN) as + | string + | undefined + + if (!isLocalHost(host) && !mcpToken) { + throw new Error('TRANSLOADIT_MCP_TOKEN is required when binding to non-localhost host.') + } + + const handler = await createTransloaditMcpHttpHandler({ + authKey: (fileConfig.authKey ?? process.env.TRANSLOADIT_KEY) as string | undefined, + authSecret: (fileConfig.authSecret ?? process.env.TRANSLOADIT_SECRET) as string | undefined, + mcpToken, + allowedOrigins: fileConfig.allowedOrigins as string[] | undefined, + allowedHosts: fileConfig.allowedHosts as string[] | undefined, + enableDnsRebindingProtection: fileConfig.enableDnsRebindingProtection as boolean | undefined, + path, + logger, + }) + + const server = createServer(handler) + + await new Promise((resolve) => { + server.listen(port, host, resolve) + }) + + logger.notice(`Transloadit MCP server listening on http://${host}:${port}${path}`) + + const shutdown = async () => { + await handler.close() + server.close() + } + + process.once('SIGINT', shutdown) + process.once('SIGTERM', shutdown) + return + } + + const server = createTransloaditMcpServer({ + authKey: process.env.TRANSLOADIT_KEY, + authSecret: process.env.TRANSLOADIT_SECRET, + }) + const transport = new StdioServerTransport() + await server.connect(transport) +} + +main().catch((err) => { + const logger = getLogger().nest('cli') + const redact = buildRedactor([ + process.env.TRANSLOADIT_KEY, + process.env.TRANSLOADIT_SECRET, + process.env.TRANSLOADIT_MCP_TOKEN, + ]) + logger.err('MCP server failed: %s', redact(err)) + process.exit(1) +}) diff --git a/packages/mcp-server/src/express.ts b/packages/mcp-server/src/express.ts new file mode 100644 index 00000000..69da3464 --- /dev/null +++ b/packages/mcp-server/src/express.ts @@ -0,0 +1,40 @@ +import { randomUUID } from 'node:crypto' +import { StreamableHTTPServerTransport } from '@modelcontextprotocol/sdk/server/streamableHttp.js' +import express from 'express' +import type { TransloaditMcpHttpOptions } from './http.ts' +import { createMcpRequestHandler } from './http-request-handler.ts' +import { createTransloaditMcpServer } from './server.ts' + +export type TransloaditMcpExpressOptions = TransloaditMcpHttpOptions & { + path?: string +} + +export const createTransloaditMcpExpressRouter = async ( + options: TransloaditMcpExpressOptions = {}, +) => { + const server = createTransloaditMcpServer(options) + const transport = new StreamableHTTPServerTransport({ + sessionIdGenerator: options.sessionIdGenerator ?? (() => randomUUID()), + allowedOrigins: options.allowedOrigins, + allowedHosts: options.allowedHosts, + enableDnsRebindingProtection: options.enableDnsRebindingProtection, + }) + + await server.connect(transport) + + const router = express.Router() + const routePath = options.path ?? '/mcp' + const handler = createMcpRequestHandler(transport, { + allowedOrigins: options.allowedOrigins, + mcpToken: options.mcpToken, + path: { expectedPath: routePath, allowRoot: true }, + logger: options.logger, + redactSecrets: [options.mcpToken, options.authKey, options.authSecret], + }) + + router.all(routePath, (req, res) => { + void handler(req, res) + }) + + return router +} diff --git a/packages/mcp-server/src/http-helpers.ts b/packages/mcp-server/src/http-helpers.ts new file mode 100644 index 00000000..681d9f87 --- /dev/null +++ b/packages/mcp-server/src/http-helpers.ts @@ -0,0 +1,61 @@ +import { timingSafeEqual } from 'node:crypto' +import type { IncomingMessage, ServerResponse } from 'node:http' + +export const parsePathname = (url: string | undefined, fallback: string): string => { + try { + return new URL(url ?? fallback, 'http://localhost').pathname + } catch { + return fallback + } +} + +export const normalizePath = (path: string): string => + path.length > 1 && path.endsWith('/') ? path.slice(0, -1) : path + +export const extractBearerToken = (header: string | undefined): string | undefined => { + if (!header) return undefined + const match = header.trim().match(/^Bearer\s+(.+)$/i) + const token = match?.[1]?.trim() + return token ? token : undefined +} + +export const isAuthorized = (req: IncomingMessage, token: string): boolean => { + const provided = extractBearerToken(req.headers.authorization) + if (!provided) return false + const a = Buffer.from(provided) + const b = Buffer.from(token) + if (a.length !== b.length) return false + return timingSafeEqual(a, b) +} + +export const applyCorsHeaders = ( + req: IncomingMessage, + res: ServerResponse, + allowedOrigins?: string[], +): boolean => { + const origin = req.headers.origin + if (!origin) { + return true + } + + if (allowedOrigins && allowedOrigins.length > 0) { + if (!allowedOrigins.includes(origin)) { + res.statusCode = 403 + res.end('Forbidden') + return false + } + res.setHeader('Access-Control-Allow-Origin', origin) + res.setHeader('Vary', 'Origin') + } else { + res.setHeader('Access-Control-Allow-Origin', '*') + } + + res.setHeader('Access-Control-Allow-Methods', 'GET,POST,DELETE,OPTIONS') + res.setHeader( + 'Access-Control-Allow-Headers', + 'Authorization,Content-Type,Mcp-Session-Id,Last-Event-ID', + ) + res.setHeader('Access-Control-Expose-Headers', 'Mcp-Session-Id') + + return true +} diff --git a/packages/mcp-server/src/http-request-handler.ts b/packages/mcp-server/src/http-request-handler.ts new file mode 100644 index 00000000..c6729299 --- /dev/null +++ b/packages/mcp-server/src/http-request-handler.ts @@ -0,0 +1,63 @@ +import type { IncomingMessage, ServerResponse } from 'node:http' +import type { StreamableHTTPServerTransport } from '@modelcontextprotocol/sdk/server/streamableHttp.js' +import type { SevLogger } from '@transloadit/sev-logger' +import { applyCorsHeaders, isAuthorized, normalizePath, parsePathname } from './http-helpers.ts' +import { buildRedactor, getLogger } from './logger.ts' + +type PathPolicy = { + expectedPath: string + allowRoot?: boolean +} + +type RequestHandlerOptions = { + allowedOrigins?: string[] + mcpToken?: string + path: PathPolicy + logger?: SevLogger + redactSecrets?: Array +} + +export const createMcpRequestHandler = ( + transport: StreamableHTTPServerTransport, + options: RequestHandlerOptions, +) => { + const expectedPath = normalizePath(options.path.expectedPath) + const allowRoot = options.path.allowRoot ?? false + const logger = options.logger ?? getLogger().nest('http') + const redact = buildRedactor(options.redactSecrets ?? []) + + return async (req: IncomingMessage, res: ServerResponse) => { + const pathname = normalizePath(parsePathname(req.url, expectedPath)) + if (pathname !== expectedPath && (!allowRoot || pathname !== '/')) { + res.statusCode = 404 + res.end('Not Found') + return + } + + if (!applyCorsHeaders(req, res, options.allowedOrigins)) { + return + } + + if (req.method === 'OPTIONS') { + res.statusCode = 204 + res.end() + return + } + + if (options.mcpToken && !isAuthorized(req, options.mcpToken)) { + res.statusCode = 401 + res.setHeader('WWW-Authenticate', 'Bearer') + res.end('Unauthorized') + return + } + + try { + const parsedBody = (req as { body?: unknown }).body + await transport.handleRequest(req, res, parsedBody) + } catch (error) { + logger.err('Request failed: %s', redact({ url: req.url, method: req.method, error })) + res.statusCode = 500 + res.end('Internal Server Error') + } + } +} diff --git a/packages/mcp-server/src/http.ts b/packages/mcp-server/src/http.ts new file mode 100644 index 00000000..54479104 --- /dev/null +++ b/packages/mcp-server/src/http.ts @@ -0,0 +1,54 @@ +import { randomUUID } from 'node:crypto' +import type { IncomingMessage, ServerResponse } from 'node:http' +import { StreamableHTTPServerTransport } from '@modelcontextprotocol/sdk/server/streamableHttp.js' +import type { SevLogger } from '@transloadit/sev-logger' +import { createMcpRequestHandler } from './http-request-handler.ts' +import type { TransloaditMcpServerOptions } from './server.ts' +import { createTransloaditMcpServer } from './server.ts' + +export type TransloaditMcpHttpOptions = TransloaditMcpServerOptions & { + allowedOrigins?: string[] + allowedHosts?: string[] + enableDnsRebindingProtection?: boolean + mcpToken?: string + path?: string + sessionIdGenerator?: (() => string) | undefined + logger?: SevLogger +} + +export type TransloaditMcpHttpHandler = (( + req: IncomingMessage, + res: ServerResponse, +) => Promise) & { + close: () => Promise +} + +const defaultPath = '/mcp' + +export const createTransloaditMcpHttpHandler = async ( + options: TransloaditMcpHttpOptions = {}, +): Promise => { + const server = createTransloaditMcpServer(options) + const transport = new StreamableHTTPServerTransport({ + sessionIdGenerator: options.sessionIdGenerator ?? (() => randomUUID()), + allowedOrigins: options.allowedOrigins, + allowedHosts: options.allowedHosts, + enableDnsRebindingProtection: options.enableDnsRebindingProtection, + }) + + await server.connect(transport) + + const handler = createMcpRequestHandler(transport, { + allowedOrigins: options.allowedOrigins, + mcpToken: options.mcpToken, + path: { expectedPath: options.path ?? defaultPath }, + logger: options.logger, + redactSecrets: [options.mcpToken, options.authKey, options.authSecret], + }) as TransloaditMcpHttpHandler + + handler.close = async () => { + await transport.close() + } + + return handler +} diff --git a/packages/mcp-server/src/index.ts b/packages/mcp-server/src/index.ts new file mode 100644 index 00000000..e4448564 --- /dev/null +++ b/packages/mcp-server/src/index.ts @@ -0,0 +1,6 @@ +export type { TransloaditMcpExpressOptions } from './express.ts' +export { createTransloaditMcpExpressRouter } from './express.ts' +export type { TransloaditMcpHttpHandler, TransloaditMcpHttpOptions } from './http.ts' +export { createTransloaditMcpHttpHandler } from './http.ts' +export type { TransloaditMcpServerOptions } from './server.ts' +export { createTransloaditMcpServer } from './server.ts' diff --git a/packages/mcp-server/src/logger.ts b/packages/mcp-server/src/logger.ts new file mode 100644 index 00000000..0f37f86a --- /dev/null +++ b/packages/mcp-server/src/logger.ts @@ -0,0 +1,35 @@ +import { SevLogger } from '@transloadit/sev-logger' + +const baseLogger = new SevLogger({ breadcrumbs: ['mcp-server'] }) + +const redactString = (value: string, secrets: string[]): string => { + let output = value.replace(/Bearer\s+[^\s]+/gi, 'Bearer [redacted]') + for (const secret of secrets) { + if (!secret) continue + output = output.split(secret).join('[redacted]') + } + return output +} + +export const redactForLog = (value: unknown, secrets: string[]): string => { + if (typeof value === 'string') return redactString(value, secrets) + if (value instanceof Error) { + const message = redactString(value.message, secrets) + const stack = value.stack ? redactString(value.stack, secrets) : undefined + return stack ? `${message}\n${stack}` : message + } + try { + const serialized = JSON.stringify(value) + if (serialized) return redactString(serialized, secrets) + } catch { + // ignore + } + return redactString(String(value), secrets) +} + +export const buildRedactor = (secrets: Array): ((value: unknown) => string) => { + const normalized = secrets.filter((secret): secret is string => Boolean(secret)) + return (value) => redactForLog(value, normalized) +} + +export const getLogger = (): SevLogger => baseLogger diff --git a/packages/mcp-server/src/server.ts b/packages/mcp-server/src/server.ts new file mode 100644 index 00000000..be42493d --- /dev/null +++ b/packages/mcp-server/src/server.ts @@ -0,0 +1,730 @@ +import { McpServer } from '@modelcontextprotocol/sdk/server/mcp.js' +import type { LintAssemblyInstructionsResult } from '@transloadit/node' +import { + getRobotHelp, + goldenTemplates, + listRobots, + mergeTemplateContent, + prepareInputFiles, + Transloadit, +} from '@transloadit/node' +import { z } from 'zod' +import packageJson from '../package.json' with { type: 'json' } +import { extractBearerToken } from './http-helpers.ts' + +export type TransloaditMcpServerOptions = { + authKey?: string + authSecret?: string + mcpToken?: string + serverName?: string + serverVersion?: string +} + +type LintIssueOutput = { + path: string + message: string + severity: 'error' | 'warning' + hint?: string +} + +type ToolMessage = { + code: string + message: string + hint?: string + path?: string +} + +type UploadSummary = { + status: 'none' | 'uploading' | 'complete' + total_files: number + resumed?: boolean + upload_urls?: Record +} + +type HeaderMap = Record + +type ToolExtra = { + requestInfo?: { + headers?: HeaderMap + } +} + +const maxBase64Bytes = 512_000 + +const lintIssueSchema = z.object({ + path: z.string(), + message: z.string(), + severity: z.enum(['error', 'warning']), + hint: z.string().optional(), +}) + +const toolMessageSchema = z.object({ + code: z.string(), + message: z.string(), + hint: z.string().optional(), + path: z.string().optional(), +}) + +const listRobotsInputSchema = z.object({ + category: z.string().optional(), + search: z.string().optional(), + limit: z.number().int().positive().optional(), + cursor: z.string().optional(), +}) + +const listRobotsOutputSchema = z.object({ + status: z.literal('ok'), + robots: z.array( + z.object({ + name: z.string(), + title: z.string().optional(), + summary: z.string(), + category: z.string().optional(), + }), + ), + next_cursor: z.string().optional(), +}) + +const robotParamSchema = z.object({ + name: z.string(), + type: z.string(), + description: z.string().optional(), +}) + +const getRobotHelpInputSchema = z.object({ + robot_name: z.string(), + detail_level: z.enum(['summary', 'params', 'examples']).optional(), +}) + +const getRobotHelpOutputSchema = z.object({ + status: z.enum(['ok', 'error']), + robot: z.object({ + name: z.string(), + summary: z.string(), + required_params: z.array(robotParamSchema), + optional_params: z.array(robotParamSchema), + examples: z + .array( + z.object({ + description: z.string(), + snippet: z.record(z.string(), z.unknown()), + }), + ) + .optional(), + }), +}) + +const inputFileSchema = z.discriminatedUnion('kind', [ + z.object({ + kind: z.literal('path'), + field: z.string(), + path: z.string(), + }), + z.object({ + kind: z.literal('base64'), + field: z.string(), + base64: z.string(), + filename: z.string(), + contentType: z.string().optional(), + }), + z.object({ + kind: z.literal('url'), + field: z.string(), + url: z.string(), + filename: z.string().optional(), + contentType: z.string().optional(), + }), +]) + +const createAssemblyInputSchema = z.object({ + instructions: z.unknown().optional(), + golden_template: z + .object({ + slug: z.string(), + version: z.string().optional(), + overrides: z.record(z.string(), z.unknown()).optional(), + }) + .optional(), + files: z.array(inputFileSchema).optional(), + fields: z.record(z.string(), z.unknown()).optional(), + wait_for_completion: z.boolean().optional(), + wait_timeout_ms: z.number().int().positive().optional(), + upload_concurrency: z.number().int().positive().optional(), + upload_chunk_size: z.number().int().positive().optional(), + upload_behavior: z.enum(['await', 'background', 'none']).optional(), + assembly_url: z.string().optional(), +}) + +const createAssemblyOutputSchema = z.object({ + status: z.enum(['ok', 'error']), + assembly: z.unknown().optional(), + upload: z + .object({ + status: z.enum(['none', 'uploading', 'complete']), + total_files: z.number().int().nonnegative(), + resumed: z.boolean().optional(), + upload_urls: z.record(z.string(), z.string()).optional(), + }) + .optional(), + next_steps: z.array(z.string()).optional(), + errors: z.array(toolMessageSchema).optional(), + warnings: z.array(toolMessageSchema).optional(), +}) + +const getAssemblyStatusInputSchema = z.object({ + assembly_url: z.string().optional(), + assembly_id: z.string().optional(), +}) + +const getAssemblyStatusOutputSchema = z.object({ + status: z.enum(['ok', 'error']), + assembly: z.unknown().optional(), + errors: z.array(toolMessageSchema).optional(), + warnings: z.array(toolMessageSchema).optional(), +}) + +const waitForAssemblyInputSchema = z.object({ + assembly_url: z.string().optional(), + assembly_id: z.string().optional(), + timeout_ms: z.number().int().positive().optional(), + poll_interval_ms: z.number().int().positive().optional(), +}) + +const waitForAssemblyOutputSchema = z.object({ + status: z.enum(['ok', 'error']), + assembly: z.unknown().optional(), + waited_ms: z.number().int().nonnegative().optional(), + errors: z.array(toolMessageSchema).optional(), + warnings: z.array(toolMessageSchema).optional(), +}) + +const listGoldenTemplatesInputSchema = z.object({}) + +const listGoldenTemplatesOutputSchema = z.object({ + status: z.enum(['ok', 'error']), + templates: z.array( + z.object({ + slug: z.string(), + version: z.string(), + description: z.string(), + steps: z.record(z.string(), z.unknown()), + }), + ), +}) + +const validateAssemblyInputSchema = z.object({ + instructions: z.unknown(), + strict: z.boolean().optional(), + return_fixed: z.boolean().optional(), +}) + +const validateAssemblyOutputSchema = z.object({ + status: z.enum(['ok', 'error']), + linting_issues: z.array(lintIssueSchema), + normalized_instructions: z.unknown().optional(), +}) + +const toLintIssues = (issues: LintAssemblyInstructionsResult['issues']): LintIssueOutput[] => + issues.map((issue) => ({ + path: issue.stepName ? `steps.${issue.stepName}` : 'instructions', + message: issue.summary, + severity: issue.type, + hint: issue.desc && issue.desc !== issue.summary ? issue.desc : undefined, + })) + +const safeJsonParse = (value: string): unknown => { + try { + return JSON.parse(value) + } catch { + return value + } +} + +const buildToolResponse = (payload: Record) => ({ + content: [ + { + type: 'text', + text: JSON.stringify(payload), + }, + ], + structuredContent: payload, +}) + +const buildToolError = ( + code: string, + message: string, + options: { hint?: string; path?: string } = {}, +) => + buildToolResponse({ + status: 'error', + errors: [ + { + code, + message, + hint: options.hint, + path: options.path, + }, + ], + }) + +const signatureAuthWarning: ToolMessage = { + code: 'mcp_signature_auth_required', + message: + 'Bearer tokens still require signature auth if your account enforces it. Configure TRANSLOADIT_KEY/TRANSLOADIT_SECRET so MCP can sign requests.', + hint: 'If you see NO_SIGNATURE_FIELD or NO_AUTH_EXPIRES_PARAMETER, provide key+secret or disable signature auth for the account.', +} + +const createLintClient = (options: TransloaditMcpServerOptions): Transloadit => + new Transloadit({ + authKey: options.authKey ?? 'mcp', + authSecret: options.authSecret ?? 'mcp', + }) + +const getHeaderValue = (headers: HeaderMap | undefined, name: string): string | undefined => { + if (!headers) return undefined + const normalized = name.toLowerCase() + for (const [key, value] of Object.entries(headers)) { + if (key.toLowerCase() !== normalized) continue + if (Array.isArray(value)) return value[0] + return value + } + return undefined +} + +const getBearerToken = (headers: HeaderMap | undefined): string | undefined => + extractBearerToken(getHeaderValue(headers, 'authorization')) + +const getSignatureAuthWarnings = ( + options: TransloaditMcpServerOptions, + extra: ToolExtra, +): ToolMessage[] => { + const token = getBearerToken(extra.requestInfo?.headers) + if (!token) return [] + if (token === options.mcpToken) return [] + if (options.authKey && options.authSecret) return [] + return [signatureAuthWarning] +} + +type LiveClientResult = { client: Transloadit } | { error: ReturnType } + +const createLiveClient = ( + options: TransloaditMcpServerOptions, + extra: ToolExtra, +): LiveClientResult => { + const token = getBearerToken(extra.requestInfo?.headers) + const authToken = token && token !== options.mcpToken ? token : undefined + + if (authToken) { + return { + client: new Transloadit({ + authToken, + authKey: options.authKey, + authSecret: options.authSecret, + }), + } + } + + if (!options.authKey || !options.authSecret) { + return { + error: buildToolError( + 'mcp_missing_auth', + 'Missing TRANSLOADIT_KEY/TRANSLOADIT_SECRET or Authorization: Bearer token for live API calls.', + ), + } + } + + return { + client: new Transloadit({ + authKey: options.authKey, + authSecret: options.authSecret, + }), + } +} + +const isRecord = (value: unknown): value is Record => + typeof value === 'object' && value !== null + +const getAssemblyIdFromUrl = (assemblyUrl: string): string => { + const match = assemblyUrl.match(/\/assemblies\/([^/?#]+)/) + if (!match) { + throw new Error(`Invalid assembly URL: ${assemblyUrl}`) + } + return match[1] ?? '' +} + +type AssemblyAccessResult = + | { + client: Transloadit + warnings: ToolMessage[] + assemblyId: string + assemblyUrl?: string + } + | { error: ReturnType } + +const resolveAssemblyAccess = ( + options: TransloaditMcpServerOptions, + extra: ToolExtra, + args: { assembly_url?: string; assembly_id?: string }, +): AssemblyAccessResult => { + const liveClient = createLiveClient(options, extra) + if ('error' in liveClient) return liveClient + + if (!args.assembly_url && !args.assembly_id) { + return { error: buildToolError('mcp_missing_args', 'Provide assembly_url or assembly_id.') } + } + + const assemblyId = args.assembly_url + ? getAssemblyIdFromUrl(args.assembly_url) + : (args.assembly_id as string) + + return { + client: liveClient.client, + warnings: getSignatureAuthWarnings(options, extra), + assemblyId, + assemblyUrl: args.assembly_url, + } +} + +const resolveGoldenTemplate = ( + slug: string, + version?: string, +): (typeof goldenTemplates)[string] | undefined => { + if (slug.includes('@')) { + return goldenTemplates[slug] + } + + if (version) { + return goldenTemplates[`${slug}@${version}`] + } + + const matches = Object.keys(goldenTemplates).filter((key) => key.startsWith(`${slug}@`)) + if (matches.length === 0) return undefined + const latest = matches.sort().at(-1) + return latest ? goldenTemplates[latest] : undefined +} + +const parseInstructions = (input: unknown): Record | undefined => { + if (input == null) return undefined + if (typeof input === 'string') { + const parsed = safeJsonParse(input) + return isRecord(parsed) ? parsed : undefined + } + if (isRecord(input)) { + if ('steps' in input) { + return input as Record + } + return { steps: input } + } + return undefined +} + +export const createTransloaditMcpServer = ( + options: TransloaditMcpServerOptions = {}, +): McpServer => { + const server = new McpServer({ + name: options.serverName ?? 'Transloadit MCP', + version: options.serverVersion ?? packageJson.version, + }) + + server.registerTool( + 'transloadit_validate_assembly', + { + title: 'Validate Assembly Instructions', + description: + 'Lint Assembly Instructions without creating an Assembly. Returns structured issues.', + inputSchema: validateAssemblyInputSchema, + outputSchema: validateAssemblyOutputSchema, + }, + async ({ instructions, strict, return_fixed }) => { + const client = createLintClient(options) + const result = await client.lintAssemblyInstructions({ + assemblyInstructions: instructions, + fix: return_fixed ?? false, + fatal: strict ? 'warning' : 'error', + }) + + const payload: Record = { + status: result.success ? 'ok' : 'error', + linting_issues: toLintIssues(result.issues), + } + + if (return_fixed && result.fixedInstructions) { + payload.normalized_instructions = safeJsonParse(result.fixedInstructions) + } + + return buildToolResponse(payload) + }, + ) + + server.registerTool( + 'transloadit_create_assembly', + { + title: 'Create or resume an Assembly', + description: + 'Create or resume an Assembly, optionally uploading files and waiting for completion.', + inputSchema: createAssemblyInputSchema, + outputSchema: createAssemblyOutputSchema, + }, + async ( + { + instructions, + golden_template, + files, + fields, + wait_for_completion, + wait_timeout_ms, + upload_concurrency, + upload_chunk_size, + upload_behavior, + assembly_url, + }, + extra, + ) => { + if (instructions && golden_template) { + return buildToolError( + 'mcp_invalid_args', + 'Provide either instructions or golden_template, not both.', + { path: 'instructions' }, + ) + } + + const liveClient = createLiveClient(options, extra) + if ('error' in liveClient) return liveClient.error + const { client } = liveClient + const warnings = getSignatureAuthWarnings(options, extra) + + const tempCleanups: Array<() => Promise> = [] + + try { + const fileInputs = files ?? [] + let params = parseInstructions(instructions) ?? {} + + if (golden_template) { + const template = resolveGoldenTemplate(golden_template.slug, golden_template.version) + + if (!template) { + return buildToolError( + 'mcp_unknown_template', + `Unknown golden template: ${golden_template.slug}`, + { path: 'golden_template.slug' }, + ) + } + + const overrides = golden_template.overrides + const templateContent = { + steps: template.steps, + } + params = mergeTemplateContent( + templateContent, + overrides && isRecord(overrides) ? (overrides as Record) : undefined, + ) as Record + } + const prep = await prepareInputFiles({ + inputFiles: fileInputs, + params, + fields, + base64Strategy: 'tempfile', + urlStrategy: 'import-if-present', + maxBase64Bytes, + }).catch((error) => { + const message = error instanceof Error ? error.message : 'Invalid file input.' + if (message.startsWith('Duplicate file field')) { + return buildToolError('mcp_duplicate_field', message, { path: 'files' }) + } + if (message.startsWith('Base64 payload exceeds')) { + return buildToolError('mcp_base64_too_large', message, { + hint: 'Use a URL import or path upload instead.', + }) + } + return buildToolError('mcp_invalid_args', message) + }) + if ('content' in prep) { + return prep + } + params = prep.params + const filesMap = prep.files + const uploadsMap = prep.uploads + tempCleanups.push(...prep.cleanup) + + const totalFiles = Object.keys(filesMap).length + Object.keys(uploadsMap).length + const uploadSummary: UploadSummary = { + status: totalFiles > 0 ? 'complete' : 'none', + total_files: totalFiles, + } + + const timeout = wait_timeout_ms + const waitForCompletion = wait_for_completion ?? false + const uploadBehavior = upload_behavior ?? (waitForCompletion ? 'await' : 'background') + const uploadConcurrency = upload_concurrency + const chunkSize = upload_chunk_size + + const assembly = assembly_url + ? await client.resumeAssemblyUploads({ + assemblyUrl: assembly_url, + files: filesMap, + uploads: uploadsMap, + waitForCompletion, + timeout, + uploadConcurrency, + chunkSize, + uploadBehavior, + }) + : await client.createAssembly({ + params, + files: filesMap, + uploads: uploadsMap, + waitForCompletion, + timeout, + uploadConcurrency, + chunkSize, + uploadBehavior, + }) + + if (assembly_url) { + uploadSummary.resumed = true + } + + if (totalFiles === 0) { + uploadSummary.status = 'none' + } else if (uploadBehavior === 'none') { + uploadSummary.status = 'none' + } else if (uploadBehavior === 'background') { + uploadSummary.status = 'uploading' + } + + if (isRecord(assembly.upload_urls)) { + uploadSummary.upload_urls = assembly.upload_urls as Record + } + + const nextSteps = waitForCompletion + ? [] + : ['transloadit_wait_for_assembly', 'transloadit_get_assembly_status'] + + return buildToolResponse({ + status: 'ok', + assembly, + upload: uploadSummary, + next_steps: nextSteps, + ...(warnings.length > 0 ? { warnings } : {}), + }) + } finally { + await Promise.all(tempCleanups.map((cleanup) => cleanup())) + } + }, + ) + + server.registerTool( + 'transloadit_get_assembly_status', + { + title: 'Get Assembly status', + description: 'Fetch the latest Assembly status by URL or ID.', + inputSchema: getAssemblyStatusInputSchema, + outputSchema: getAssemblyStatusOutputSchema, + }, + async ({ assembly_url, assembly_id }, extra) => { + const access = resolveAssemblyAccess(options, extra, { assembly_url, assembly_id }) + if ('error' in access) return access.error + + const assembly = await access.client.getAssembly(access.assemblyId) + + return buildToolResponse({ + status: 'ok', + assembly, + ...(access.warnings.length > 0 ? { warnings: access.warnings } : {}), + }) + }, + ) + + server.registerTool( + 'transloadit_wait_for_assembly', + { + title: 'Wait for Assembly completion', + description: 'Polls until the Assembly completes or timeout is reached.', + inputSchema: waitForAssemblyInputSchema, + outputSchema: waitForAssemblyOutputSchema, + }, + async ({ assembly_url, assembly_id, timeout_ms, poll_interval_ms }, extra) => { + const access = resolveAssemblyAccess(options, extra, { assembly_url, assembly_id }) + if ('error' in access) return access.error + + const start = Date.now() + const assembly = await access.client.awaitAssemblyCompletion(access.assemblyId, { + timeout: timeout_ms, + interval: poll_interval_ms, + assemblyUrl: access.assemblyUrl, + }) + const waited_ms = Date.now() - start + + return buildToolResponse({ + status: 'ok', + assembly, + waited_ms, + ...(access.warnings.length > 0 ? { warnings: access.warnings } : {}), + }) + }, + ) + + server.registerTool( + 'transloadit_list_robots', + { + title: 'List Transloadit robots', + description: 'Returns a filtered list of robots with short summaries.', + inputSchema: listRobotsInputSchema, + outputSchema: listRobotsOutputSchema, + }, + ({ category, search, limit, cursor }) => { + const result = listRobots({ category, search, limit, cursor }) + + return buildToolResponse({ + status: 'ok', + robots: result.robots, + next_cursor: result.nextCursor, + }) + }, + ) + + server.registerTool( + 'transloadit_get_robot_help', + { + title: 'Get robot parameter help', + description: 'Returns a robot summary and parameter details.', + inputSchema: getRobotHelpInputSchema, + outputSchema: getRobotHelpOutputSchema, + }, + ({ robot_name, detail_level }) => { + const help = getRobotHelp({ + robotName: robot_name, + detailLevel: detail_level ?? 'summary', + }) + + return buildToolResponse({ + status: 'ok', + robot: { + name: help.name, + summary: help.summary, + required_params: help.requiredParams, + optional_params: help.optionalParams, + examples: help.examples, + }, + }) + }, + ) + + server.registerTool( + 'transloadit_list_golden_templates', + { + title: 'List golden templates', + description: 'Returns curated starter templates with ready-to-run steps.', + inputSchema: listGoldenTemplatesInputSchema, + outputSchema: listGoldenTemplatesOutputSchema, + }, + () => { + return buildToolResponse({ + status: 'ok', + templates: Object.values(goldenTemplates), + }) + }, + ) + + return server +} diff --git a/packages/mcp-server/test/e2e/base64-limit.test.ts b/packages/mcp-server/test/e2e/base64-limit.test.ts new file mode 100644 index 00000000..21ed24a0 --- /dev/null +++ b/packages/mcp-server/test/e2e/base64-limit.test.ts @@ -0,0 +1,51 @@ +import type { Client } from '@modelcontextprotocol/sdk/client' +import { afterAll, beforeAll, describe, expect, it } from 'vitest' +import { createMcpClient, parseToolPayload } from './mcp-client.ts' + +const shouldRun = process.env.TRANSLOADIT_KEY != null && process.env.TRANSLOADIT_SECRET != null +const maybeDescribe = shouldRun ? describe : describe.skip + +maybeDescribe('mcp-server base64 limit (stdio)', { timeout: 30000 }, () => { + let client: Client + + beforeAll(async () => { + client = await createMcpClient() + }) + + afterAll(async () => { + await client?.close() + }) + + it('fails fast with a helpful error when base64 exceeds the limit', async () => { + const tooLarge = Buffer.alloc(600_000).toString('base64') + + const result = await client.callTool({ + name: 'transloadit_create_assembly', + arguments: { + instructions: { + steps: { + ':original': { + robot: '/upload/handle', + }, + }, + }, + files: [ + { + kind: 'base64', + field: 'file', + filename: 'too-large.bin', + base64: tooLarge, + }, + ], + }, + }) + + const payload = parseToolPayload(result) + expect(payload.status).toBe('error') + expect(Array.isArray(payload.errors)).toBe(true) + const error = payload.errors?.[0] as { code?: string; hint?: string; message?: string } + expect(error?.code).toBe('mcp_base64_too_large') + expect(error?.hint).toContain('Use a URL import or path upload instead') + expect(error?.message).toContain('Base64 payload exceeds') + }) +}) diff --git a/packages/mcp-server/test/e2e/bearer-auth.test.ts b/packages/mcp-server/test/e2e/bearer-auth.test.ts new file mode 100644 index 00000000..8fb0970d --- /dev/null +++ b/packages/mcp-server/test/e2e/bearer-auth.test.ts @@ -0,0 +1,98 @@ +import type { Client } from '@modelcontextprotocol/sdk/client' +import { afterAll, beforeAll, describe, expect, it } from 'vitest' +import { createHttpClient, startHttpServer } from './http-server.ts' +import { parseToolPayload } from './mcp-client.ts' + +const shouldRun = process.env.TRANSLOADIT_KEY != null && process.env.TRANSLOADIT_SECRET != null +const maybeDescribe = shouldRun ? describe : describe.skip + +const fetchBearerToken = async (): Promise => { + const authKey = process.env.TRANSLOADIT_KEY as string + const authSecret = process.env.TRANSLOADIT_SECRET as string + const basic = Buffer.from(`${authKey}:${authSecret}`).toString('base64') + const body = new URLSearchParams({ + grant_type: 'client_credentials', + scope: 'assemblies:write assemblies:read', + aud: 'mcp', + }) + + const response = await fetch('https://api2.transloadit.com/token', { + method: 'POST', + headers: { + Authorization: `Basic ${basic}`, + 'Content-Type': 'application/x-www-form-urlencoded', + }, + body: body.toString(), + }) + + if (!response.ok) { + const text = await response.text() + throw new Error(`Failed to mint bearer token: ${response.status} ${text}`) + } + + const payload = (await response.json()) as { access_token?: string } + if (!payload.access_token) { + throw new Error('Bearer token response missing access_token.') + } + + return payload.access_token +} + +maybeDescribe('mcp-server bearer auth (http)', { timeout: 60000 }, () => { + let client: Client + let closeServer: (() => Promise) | undefined + + beforeAll(async () => { + const authKey = process.env.TRANSLOADIT_KEY as string + const authSecret = process.env.TRANSLOADIT_SECRET as string + const { url, close } = await startHttpServer({ authKey, authSecret }) + closeServer = close + const token = await fetchBearerToken() + const clientInfo = await createHttpClient(url, { + Authorization: `Bearer ${token}`, + }) + client = clientInfo.client + }) + + afterAll(async () => { + await client?.close() + await closeServer?.() + }) + + it('creates an assembly using a bearer token', async () => { + const pixelPng = + 'iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAQAAAC1HAwCAAAAC0lEQVR4nGNgYAAAAAMAASsJTYQAAAAASUVORK5CYII=' + + const result = await client.callTool({ + name: 'transloadit_create_assembly', + arguments: { + instructions: { + steps: { + ':original': { + robot: '/upload/handle', + }, + resized: { + robot: '/image/resize', + use: ':original', + width: 1, + height: 1, + result: true, + }, + }, + }, + files: [ + { + kind: 'base64', + field: 'file', + filename: 'pixel.png', + base64: pixelPng, + }, + ], + wait_for_completion: true, + }, + }) + + const payload = parseToolPayload(result) + expect(payload.status).toBe('ok') + }) +}) diff --git a/packages/mcp-server/test/e2e/create-assembly.test.ts b/packages/mcp-server/test/e2e/create-assembly.test.ts new file mode 100644 index 00000000..b6db3e78 --- /dev/null +++ b/packages/mcp-server/test/e2e/create-assembly.test.ts @@ -0,0 +1,64 @@ +import type { Client } from '@modelcontextprotocol/sdk/client' +import { afterAll, beforeAll, describe, expect, it } from 'vitest' +import { createMcpClient, isRecord, parseToolPayload } from './mcp-client.ts' + +const shouldRun = process.env.TRANSLOADIT_KEY != null && process.env.TRANSLOADIT_SECRET != null +const maybeDescribe = shouldRun ? describe : describe.skip + +maybeDescribe('mcp-server create assembly (stdio)', { timeout: 30000 }, () => { + let client: Client + + beforeAll(async () => { + client = await createMcpClient() + }) + + afterAll(async () => { + await client?.close() + }) + + it('creates an assembly, uploads a file, and returns results', async () => { + const pixelPng = + 'iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAQAAAC1HAwCAAAAC0lEQVR4nGNgYAAAAAMAASsJTYQAAAAASUVORK5CYII=' + + const result = await client.callTool({ + name: 'transloadit_create_assembly', + arguments: { + instructions: { + steps: { + ':original': { + robot: '/upload/handle', + }, + resized: { + robot: '/image/resize', + use: ':original', + width: 1, + height: 1, + result: true, + }, + }, + }, + files: [ + { + kind: 'base64', + field: 'file', + filename: 'pixel.png', + base64: pixelPng, + }, + ], + wait_for_completion: true, + }, + }) + + const payload = parseToolPayload(result) + + expect(payload.status).toBe('ok') + expect(isRecord(payload.assembly)).toBe(true) + const assembly = payload.assembly as Record + expect(assembly.assembly_id).toBeDefined() + const results = isRecord(assembly.results) ? assembly.results : {} + const resized = (results as Record).resized + expect(Array.isArray(resized)).toBe(true) + expect((payload.upload as Record | undefined)?.status).toBe('complete') + expect(Array.isArray(payload.next_steps)).toBe(true) + }) +}) diff --git a/packages/mcp-server/test/e2e/golden-templates.test.ts b/packages/mcp-server/test/e2e/golden-templates.test.ts new file mode 100644 index 00000000..834387ec --- /dev/null +++ b/packages/mcp-server/test/e2e/golden-templates.test.ts @@ -0,0 +1,41 @@ +import type { Client } from '@modelcontextprotocol/sdk/client' +import { afterAll, beforeAll, describe, expect, it } from 'vitest' +import { createMcpClient, isRecord, parseToolPayload } from './mcp-client.ts' + +describe('mcp-server golden templates', { timeout: 20000 }, () => { + let client: Client + + beforeAll(async () => { + client = await createMcpClient() + }) + + afterAll(async () => { + await client?.close() + }) + + it('lists golden templates with steps', async () => { + const result = await client.callTool({ + name: 'transloadit_list_golden_templates', + arguments: {}, + }) + + const payload = parseToolPayload(result) + + expect(payload.status).toBe('ok') + expect(Array.isArray(payload.templates)).toBe(true) + + const templates = payload.templates as Array> + const hls = templates.find( + (template) => template.slug === '~transloadit/encode-hls-video@0.0.1', + ) + + expect(isRecord(hls)).toBe(true) + const steps = isRecord(hls?.steps) ? hls?.steps : {} + const original = isRecord((steps as Record)[':original']) + ? (steps as Record)[':original'] + : undefined + + expect(isRecord(original)).toBe(true) + expect((original as Record).robot).toBe('/upload/handle') + }) +}) diff --git a/packages/mcp-server/test/e2e/http-server.ts b/packages/mcp-server/test/e2e/http-server.ts new file mode 100644 index 00000000..777a8131 --- /dev/null +++ b/packages/mcp-server/test/e2e/http-server.ts @@ -0,0 +1,50 @@ +import { createServer } from 'node:http' +import type { AddressInfo } from 'node:net' +import { Client } from '@modelcontextprotocol/sdk/client' +import { StreamableHTTPClientTransport } from '@modelcontextprotocol/sdk/client/streamableHttp.js' +import { createTransloaditMcpHttpHandler } from '../../src/index.ts' + +type HeadersInit = Record + +export const startHttpServer = async ( + options: Parameters[0] = {}, +) => { + const handler = await createTransloaditMcpHttpHandler(options) + const server = createServer(handler) + + await new Promise((resolve) => { + server.listen(0, '127.0.0.1', resolve) + }) + + const { port } = server.address() as AddressInfo + const url = new URL(`http://127.0.0.1:${port}${options?.path ?? '/mcp'}`) + + return { + url, + close: async () => { + await new Promise((resolve, reject) => { + server.close((err) => (err ? reject(err) : resolve())) + }) + await handler.close() + }, + } +} + +export const createHttpClient = async (url: URL, headers: HeadersInit = {}) => { + const transport = new StreamableHTTPClientTransport(url, { + requestInit: { headers }, + }) + const client = new Client( + { + name: 'transloadit-mcp-http-e2e', + version: '0.1.0', + }, + { + capabilities: {}, + }, + ) + + await client.connect(transport) + + return { client, transport } +} diff --git a/packages/mcp-server/test/e2e/mcp-client.ts b/packages/mcp-server/test/e2e/mcp-client.ts new file mode 100644 index 00000000..a1e1ddcb --- /dev/null +++ b/packages/mcp-server/test/e2e/mcp-client.ts @@ -0,0 +1,61 @@ +import { fileURLToPath } from 'node:url' +import { Client } from '@modelcontextprotocol/sdk/client' +import { StdioClientTransport } from '@modelcontextprotocol/sdk/client/stdio.js' + +type JsonRecord = Record + +type ToolTextContent = { + type: 'text' + text: string +} + +export const cliPath = fileURLToPath(new URL('../../src/cli.ts', import.meta.url)) + +export const isRecord = (value: unknown): value is Record => + typeof value === 'object' && value !== null + +const isTextContent = (value: unknown): value is ToolTextContent => + isRecord(value) && value.type === 'text' && typeof value.text === 'string' + +export const parseToolPayload = (result: { + structuredContent?: Record + content?: Array +}): JsonRecord => { + if (isRecord(result.structuredContent)) { + return result.structuredContent + } + + const content = result.content?.[0] + if (!isTextContent(content)) { + throw new Error('Expected tool response content to be text JSON.') + } + + const parsed = JSON.parse(content.text) + if (!isRecord(parsed)) { + throw new Error('Expected tool response to be a JSON object.') + } + + return parsed +} + +export const createMcpClient = async (): Promise => { + const transport = new StdioClientTransport({ + command: process.execPath, + args: [cliPath, 'stdio'], + env: process.env, + }) + + const client = new Client( + { + name: 'transloadit-mcp-e2e', + version: '0.1.0', + }, + { + capabilities: {}, + }, + ) + + await client.connect(transport) + + return client +} diff --git a/packages/mcp-server/test/e2e/resume-assembly.test.ts b/packages/mcp-server/test/e2e/resume-assembly.test.ts new file mode 100644 index 00000000..85d0553b --- /dev/null +++ b/packages/mcp-server/test/e2e/resume-assembly.test.ts @@ -0,0 +1,97 @@ +import type { Client } from '@modelcontextprotocol/sdk/client' +import { afterAll, beforeAll, describe, expect, it } from 'vitest' +import { createMcpClient, isRecord, parseToolPayload } from './mcp-client.ts' + +const shouldRun = process.env.TRANSLOADIT_KEY != null && process.env.TRANSLOADIT_SECRET != null +const maybeDescribe = shouldRun ? describe : describe.skip + +maybeDescribe('mcp-server resume assembly (stdio)', { timeout: 60000 }, () => { + let client: Client + + beforeAll(async () => { + client = await createMcpClient() + }) + + afterAll(async () => { + await client?.close() + }) + + it('returns upload URLs and lets us resume with the same input', async () => { + const pixelPng = + 'iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAQAAAC1HAwCAAAAC0lEQVR4nGNgYAAAAAMAASsJTYQAAAAASUVORK5CYII=' + + const createResult = await client.callTool({ + name: 'transloadit_create_assembly', + arguments: { + instructions: { + steps: { + ':original': { + robot: '/upload/handle', + }, + resized: { + robot: '/image/resize', + use: ':original', + width: 1, + height: 1, + result: true, + }, + }, + }, + files: [ + { + kind: 'base64', + field: 'file', + filename: 'pixel.png', + base64: pixelPng, + }, + ], + upload_behavior: 'none', + }, + }) + + const createPayload = parseToolPayload(createResult) + expect(createPayload.status).toBe('ok') + + const createUpload = isRecord(createPayload.upload) ? createPayload.upload : {} + expect(createUpload.status).toBe('none') + expect(createUpload.total_files).toBe(1) + + const uploadUrls = isRecord(createUpload.upload_urls) ? createUpload.upload_urls : {} + expect(typeof uploadUrls.file).toBe('string') + + const createAssembly = isRecord(createPayload.assembly) ? createPayload.assembly : {} + const assemblyUrl = + (createAssembly.assembly_ssl_url as string | undefined) ?? + (createAssembly.assembly_url as string | undefined) + + expect(assemblyUrl).toBeDefined() + + const resumeResult = await client.callTool({ + name: 'transloadit_create_assembly', + arguments: { + assembly_url: assemblyUrl, + files: [ + { + kind: 'base64', + field: 'file', + filename: 'pixel.png', + base64: pixelPng, + }, + ], + wait_for_completion: true, + }, + }) + + const resumePayload = parseToolPayload(resumeResult) + expect(resumePayload.status).toBe('ok') + + const resumeUpload = isRecord(resumePayload.upload) ? resumePayload.upload : {} + expect(resumeUpload.resumed).toBe(true) + expect(resumeUpload.status).toBe('complete') + + const finalAssembly = isRecord(resumePayload.assembly) ? resumePayload.assembly : {} + const results = isRecord(finalAssembly.results) ? finalAssembly.results : {} + const resized = (results as Record).resized + expect(Array.isArray(resized)).toBe(true) + }) +}) diff --git a/packages/mcp-server/test/e2e/robots.test.ts b/packages/mcp-server/test/e2e/robots.test.ts new file mode 100644 index 00000000..26cb6ce0 --- /dev/null +++ b/packages/mcp-server/test/e2e/robots.test.ts @@ -0,0 +1,54 @@ +import type { Client } from '@modelcontextprotocol/sdk/client' +import { afterAll, beforeAll, describe, expect, it } from 'vitest' +import { createMcpClient, isRecord, parseToolPayload } from './mcp-client.ts' + +describe('mcp-server robots (stdio)', { timeout: 20000 }, () => { + let client: Client + + beforeAll(async () => { + client = await createMcpClient() + }) + + afterAll(async () => { + await client?.close() + }) + + it('lists robots and returns example snippets', async () => { + const listResult = await client.callTool({ + name: 'transloadit_list_robots', + arguments: { + category: 'file-importing', + search: 'import', + limit: 5, + }, + }) + + const listPayload = parseToolPayload(listResult) + expect(listPayload.status).toBe('ok') + const robots = Array.isArray(listPayload.robots) ? listPayload.robots : [] + expect(robots.length).toBeGreaterThan(0) + + const importRobot = (robots as Array<{ name: string }>).find( + (robot) => robot.name === '/http/import', + ) + + const helpResult = await client.callTool({ + name: 'transloadit_get_robot_help', + arguments: { + robot_name: importRobot?.name ?? '/http/import', + detail_level: 'examples', + }, + }) + + const helpPayload = parseToolPayload(helpResult) + expect(helpPayload.status).toBe('ok') + + const robot = isRecord(helpPayload.robot) ? helpPayload.robot : {} + expect(robot.name).toBe('/http/import') + expect(typeof robot.summary).toBe('string') + expect(Array.isArray(robot.examples)).toBe(true) + + const example = (robot.examples as Array<{ snippet?: unknown }>)[0] + expect(isRecord(example?.snippet)).toBe(true) + }) +}) diff --git a/packages/mcp-server/test/e2e/stdio.test.ts b/packages/mcp-server/test/e2e/stdio.test.ts new file mode 100644 index 00000000..2f477e7b --- /dev/null +++ b/packages/mcp-server/test/e2e/stdio.test.ts @@ -0,0 +1,79 @@ +import type { Client } from '@modelcontextprotocol/sdk/client' +import { afterAll, beforeAll, describe, expect, it } from 'vitest' +import { createMcpClient, parseToolPayload } from './mcp-client.ts' + +describe('mcp-server stdio', { timeout: 20000 }, () => { + let client: Client + + beforeAll(async () => { + client = await createMcpClient() + }) + + afterAll(async () => { + await client?.close() + }) + + it('lists tools and validates instructions', async () => { + const toolsResult = await client.listTools() + const toolNames = toolsResult.tools.map((tool) => tool.name) + + expect(toolNames).toContain('transloadit_validate_assembly') + + const result = await client.callTool({ + name: 'transloadit_validate_assembly', + arguments: { + instructions: { + steps: { + ':original': { + robot: '/upload/handle', + }, + }, + }, + }, + }) + + const payload = parseToolPayload(result) + + expect(payload.status).toBe('ok') + expect(payload.linting_issues.length).toBeGreaterThan(0) + expect(payload.linting_issues[0]?.severity).toBe('warning') + }) + + it('lists robots and fetches parameter help', async () => { + const listResult = await client.callTool({ + name: 'transloadit_list_robots', + arguments: { + category: 'content-delivery', + search: 'serve', + limit: 10, + }, + }) + + const listPayload = parseToolPayload(listResult) + + expect(listPayload.status).toBe('ok') + expect(Array.isArray(listPayload.robots)).toBe(true) + + const robots = listPayload.robots as Array<{ name: string }> + const serveRobot = robots.find((robot) => robot.name === '/file/serve') + expect(serveRobot).toBeDefined() + + const helpResult = await client.callTool({ + name: 'transloadit_get_robot_help', + arguments: { + robot_name: '/file/serve', + detail_level: 'params', + }, + }) + + const helpPayload = parseToolPayload(helpResult) + + expect(helpPayload.status).toBe('ok') + expect(helpPayload.robot).toBeDefined() + expect(Array.isArray(helpPayload.robot?.required_params)).toBe(true) + expect(Array.isArray(helpPayload.robot?.optional_params)).toBe(true) + + const optional = helpPayload.robot?.optional_params as Array<{ name: string }> + expect(optional.some((param) => param.name === 'headers')).toBe(true) + }) +}) diff --git a/packages/mcp-server/test/e2e/streamable-http-auth.test.ts b/packages/mcp-server/test/e2e/streamable-http-auth.test.ts new file mode 100644 index 00000000..4b396e6f --- /dev/null +++ b/packages/mcp-server/test/e2e/streamable-http-auth.test.ts @@ -0,0 +1,67 @@ +import { expect, test } from 'vitest' +import { createHttpClient, startHttpServer } from './http-server.ts' +import { parseToolPayload } from './mcp-client.ts' + +test('streamable http: requires bearer token when configured', async () => { + const server = await startHttpServer({ mcpToken: 'secret-token' }) + + try { + const response = await fetch(server.url, { + method: 'GET', + headers: { Accept: 'text/event-stream' }, + }) + + expect(response.status).toBe(401) + } finally { + await server.close() + } +}) + +test('streamable http: allows authenticated client', async () => { + const server = await startHttpServer({ mcpToken: 'secret-token' }) + + try { + const { client, transport } = await createHttpClient(server.url, { + Authorization: 'Bearer secret-token', + }) + + try { + const tools = await client.listTools() + expect(tools.tools.length).toBeGreaterThan(0) + + const robots = await client.callTool({ + name: 'transloadit_list_robots', + arguments: { limit: 1 }, + }) + + const payload = parseToolPayload(robots) + expect(payload.status).toBe('ok') + } finally { + await transport.close() + await client.close() + } + } finally { + await server.close() + } +}) + +test('streamable http: rejects disallowed origins', async () => { + const server = await startHttpServer({ + allowedOrigins: ['https://allowed.example'], + }) + + try { + const response = await fetch(server.url, { + method: 'POST', + headers: { + Origin: 'https://blocked.example', + 'Content-Type': 'application/json', + }, + body: JSON.stringify({ jsonrpc: '2.0', id: 1, method: 'initialize', params: {} }), + }) + + expect(response.status).toBe(403) + } finally { + await server.close() + } +}) diff --git a/packages/mcp-server/test/e2e/streamable-http.test.ts b/packages/mcp-server/test/e2e/streamable-http.test.ts new file mode 100644 index 00000000..d07d5170 --- /dev/null +++ b/packages/mcp-server/test/e2e/streamable-http.test.ts @@ -0,0 +1,39 @@ +import { expect, test } from 'vitest' +import { createHttpClient, startHttpServer } from './http-server.ts' +import { parseToolPayload } from './mcp-client.ts' + +test('streamable http: lists robots and provides param help', async () => { + const server = await startHttpServer() + + try { + const { client, transport } = await createHttpClient(server.url) + + try { + const robots = await client.callTool({ + name: 'transloadit_list_robots', + arguments: { search: 'image' }, + }) + + const robotsPayload = parseToolPayload(robots) + expect(robotsPayload.status).toBe('ok') + expect(Array.isArray(robotsPayload.robots)).toBe(true) + expect(robotsPayload.robots.length).toBeGreaterThan(0) + + const firstRobot = robotsPayload.robots[0] as { name: string } + const help = await client.callTool({ + name: 'transloadit_get_robot_help', + arguments: { robot_name: firstRobot.name, detail_level: 'params' }, + }) + + const helpPayload = parseToolPayload(help) + expect(helpPayload.status).toBe('ok') + expect(helpPayload.robot?.name).toBe(firstRobot.name) + expect(Array.isArray(helpPayload.robot?.optional_params)).toBe(true) + } finally { + await transport.close() + await client.close() + } + } finally { + await server.close() + } +}) diff --git a/packages/mcp-server/test/e2e/url-import.test.ts b/packages/mcp-server/test/e2e/url-import.test.ts new file mode 100644 index 00000000..38042e06 --- /dev/null +++ b/packages/mcp-server/test/e2e/url-import.test.ts @@ -0,0 +1,106 @@ +import type { Client } from '@modelcontextprotocol/sdk/client' +import { afterAll, beforeAll, describe, expect, it } from 'vitest' +import { createMcpClient, isRecord, parseToolPayload } from './mcp-client.ts' + +const shouldRun = process.env.TRANSLOADIT_KEY != null && process.env.TRANSLOADIT_SECRET != null +const maybeDescribe = shouldRun ? describe : describe.skip + +const demoImage = 'https://demos.transloadit.com/66/01604e7d0248109df8c7cc0f8daef8/snowflake.jpg' + +maybeDescribe('mcp-server URL inputs (stdio)', { timeout: 60000 }, () => { + let client: Client + + beforeAll(async () => { + client = await createMcpClient() + }) + + afterAll(async () => { + await client?.close() + }) + + it('downloads the URL when no import step is present', async () => { + const result = await client.callTool({ + name: 'transloadit_create_assembly', + arguments: { + instructions: { + steps: { + ':original': { + robot: '/upload/handle', + }, + resize: { + robot: '/image/resize', + use: ':original', + width: 1, + height: 1, + result: true, + }, + }, + }, + files: [ + { + kind: 'url', + field: 'remote', + url: demoImage, + }, + ], + wait_for_completion: true, + }, + }) + + const payload = parseToolPayload(result) + expect(payload.status).toBe('ok') + const upload = isRecord(payload.upload) ? payload.upload : {} + expect(upload.status).toBe('complete') + + const assembly = isRecord(payload.assembly) ? payload.assembly : {} + const results = isRecord(assembly.results) ? assembly.results : {} + const resized = (results as Record).resize + expect(Array.isArray(resized)).toBe(true) + }) + + it('uses the existing /http/import step when provided', async () => { + const result = await client.callTool({ + name: 'transloadit_create_assembly', + arguments: { + instructions: { + steps: { + remote: { + robot: '/http/import', + }, + resize: { + robot: '/image/resize', + use: 'remote', + width: 1, + height: 1, + result: true, + }, + }, + }, + files: [ + { + kind: 'url', + field: 'remote_1', + url: demoImage, + }, + { + kind: 'url', + field: 'remote_2', + url: demoImage, + }, + ], + wait_for_completion: true, + }, + }) + + const payload = parseToolPayload(result) + expect(payload.status).toBe('ok') + const upload = isRecord(payload.upload) ? payload.upload : {} + expect(upload.status).toBe('none') + + const assembly = isRecord(payload.assembly) ? payload.assembly : {} + const results = isRecord(assembly.results) ? assembly.results : {} + const resized = (results as Record).resize + expect(Array.isArray(resized)).toBe(true) + expect(resized).toHaveLength(2) + }) +}) diff --git a/packages/mcp-server/test/e2e/validate-assembly.test.ts b/packages/mcp-server/test/e2e/validate-assembly.test.ts new file mode 100644 index 00000000..48dd61cc --- /dev/null +++ b/packages/mcp-server/test/e2e/validate-assembly.test.ts @@ -0,0 +1,89 @@ +import type { Client } from '@modelcontextprotocol/sdk/client' +import { afterAll, beforeAll, describe, expect, it } from 'vitest' +import { lintAssemblyInstructions } from '../../../node/src/lintAssemblyInstructions.ts' +import { createMcpClient, isRecord, parseToolPayload } from './mcp-client.ts' + +const shouldRun = process.env.TRANSLOADIT_KEY != null && process.env.TRANSLOADIT_SECRET != null +const maybeDescribe = shouldRun ? describe : describe.skip + +const toExpectedLintIssue = (issue: { + summary: string + desc?: string + type: string + stepName?: string +}) => ({ + path: issue.stepName ? `steps.${issue.stepName}` : 'instructions', + message: issue.summary, + severity: issue.type, + hint: issue.desc && issue.desc !== issue.summary ? issue.desc : undefined, +}) + +maybeDescribe('mcp-server validate assembly (stdio)', { timeout: 30000 }, () => { + let client: Client + + beforeAll(async () => { + client = await createMcpClient() + }) + + afterAll(async () => { + await client?.close() + }) + + it('returns lint issues with consistent formatting', async () => { + const instructions = { + steps: { + ':original': { + robot: '/upload/handle', + }, + resize: { + use: ':original', + width: 100, + height: 100, + }, + }, + } + + const lintResult = await lintAssemblyInstructions({ + assemblyInstructions: instructions, + }) + + const expectedIssues = lintResult.issues.map(toExpectedLintIssue) + + const result = await client.callTool({ + name: 'transloadit_validate_assembly', + arguments: { + instructions, + }, + }) + + const payload = parseToolPayload(result) + expect(payload.status).toBe('error') + const lintingIssues = Array.isArray(payload.linting_issues) ? payload.linting_issues : [] + + for (const expected of expectedIssues) { + expect(lintingIssues).toContainEqual(expected) + } + }) + + it('treats warnings as fatal in strict mode', async () => { + const result = await client.callTool({ + name: 'transloadit_validate_assembly', + arguments: { + instructions: { + steps: { + ':original': { + robot: '/upload/handle', + }, + }, + }, + strict: true, + }, + }) + + const payload = parseToolPayload(result) + expect(payload.status).toBe('error') + expect(Array.isArray(payload.linting_issues)).toBe(true) + const firstIssue = (payload.linting_issues as Array>)[0] + expect(isRecord(firstIssue)).toBe(true) + }) +}) diff --git a/packages/mcp-server/test/e2e/wait-assembly.test.ts b/packages/mcp-server/test/e2e/wait-assembly.test.ts new file mode 100644 index 00000000..309a36c6 --- /dev/null +++ b/packages/mcp-server/test/e2e/wait-assembly.test.ts @@ -0,0 +1,90 @@ +import type { Client } from '@modelcontextprotocol/sdk/client' +import { afterAll, beforeAll, describe, expect, it } from 'vitest' +import { createMcpClient, isRecord, parseToolPayload } from './mcp-client.ts' + +const shouldRun = process.env.TRANSLOADIT_KEY != null && process.env.TRANSLOADIT_SECRET != null +const maybeDescribe = shouldRun ? describe : describe.skip + +maybeDescribe('mcp-server wait for assembly (stdio)', { timeout: 30000 }, () => { + let client: Client + + beforeAll(async () => { + client = await createMcpClient() + }) + + afterAll(async () => { + await client?.close() + }) + + it('waits for completion and returns results', async () => { + const pixelPng = + 'iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAQAAAC1HAwCAAAAC0lEQVR4nGNgYAAAAAMAASsJTYQAAAAASUVORK5CYII=' + + const createResult = await client.callTool({ + name: 'transloadit_create_assembly', + arguments: { + instructions: { + steps: { + ':original': { + robot: '/upload/handle', + }, + resized: { + robot: '/image/resize', + use: ':original', + width: 1, + height: 1, + result: true, + }, + }, + }, + files: [ + { + kind: 'base64', + field: 'file', + filename: 'pixel.png', + base64: pixelPng, + }, + ], + }, + }) + + const createPayload = parseToolPayload(createResult) + expect(createPayload.status).toBe('ok') + expect(isRecord(createPayload.assembly)).toBe(true) + + const assembly = createPayload.assembly as Record + const assemblyUrl = + (assembly.assembly_ssl_url as string | undefined) ?? + (assembly.assembly_url as string | undefined) + + expect(assemblyUrl).toBeDefined() + + const statusResult = await client.callTool({ + name: 'transloadit_get_assembly_status', + arguments: { + assembly_url: assemblyUrl, + }, + }) + + const statusPayload = parseToolPayload(statusResult) + expect(statusPayload.status).toBe('ok') + expect(statusPayload.assembly).toBeDefined() + + const waitResult = await client.callTool({ + name: 'transloadit_wait_for_assembly', + arguments: { + assembly_url: assemblyUrl, + timeout_ms: 60000, + }, + }) + + const waitPayload = parseToolPayload(waitResult) + expect(waitPayload.status).toBe('ok') + expect(isRecord(waitPayload.assembly)).toBe(true) + + const finalAssembly = waitPayload.assembly as Record + const results = isRecord(finalAssembly.results) ? finalAssembly.results : {} + const resized = (results as Record).resized + expect(Array.isArray(resized)).toBe(true) + }) +}) diff --git a/packages/mcp-server/tsconfig.build.json b/packages/mcp-server/tsconfig.build.json new file mode 100644 index 00000000..6b07e33b --- /dev/null +++ b/packages/mcp-server/tsconfig.build.json @@ -0,0 +1,21 @@ +{ + "include": ["src"], + "exclude": ["test", "coverage", "dist"], + "compilerOptions": { + "composite": true, + "declaration": true, + "declarationMap": true, + "erasableSyntaxOnly": true, + "isolatedModules": true, + "module": "NodeNext", + "allowImportingTsExtensions": true, + "target": "ES2022", + "noImplicitOverride": true, + "rewriteRelativeImportExtensions": true, + "outDir": "dist", + "resolveJsonModule": true, + "rootDir": "src", + "sourceMap": true, + "strict": true + } +} diff --git a/packages/mcp-server/tsconfig.json b/packages/mcp-server/tsconfig.json new file mode 100644 index 00000000..59824675 --- /dev/null +++ b/packages/mcp-server/tsconfig.json @@ -0,0 +1,16 @@ +{ + "exclude": ["dist", "src", "coverage"], + "references": [{ "path": "./tsconfig.build.json" }], + "compilerOptions": { + "checkJs": true, + "erasableSyntaxOnly": true, + "isolatedModules": true, + "module": "NodeNext", + "allowImportingTsExtensions": true, + "noImplicitOverride": true, + "noEmit": true, + "resolveJsonModule": true, + "strict": true, + "types": ["vitest/globals"] + } +} diff --git a/packages/node/README.md b/packages/node/README.md index 52f78695..7abe6e00 100644 --- a/packages/node/README.md +++ b/packages/node/README.md @@ -131,6 +131,54 @@ When both `--template` and steps input are provided, Transloadit merges the temp the provided steps before linting, matching the API's runtime behavior. If the template sets `allow_steps_override=false`, providing steps will fail with `TEMPLATE_DENIES_STEPS_OVERRIDE`. +## SDK Helpers + +### prepareInputFiles + +`prepareInputFiles()` converts mixed file inputs into `files`, `uploads`, and optional +`/http/import` steps so you can pass them directly into `createAssembly()` or +`resumeAssemblyUploads()`. + +```ts +import { prepareInputFiles } from '@transloadit/node' + +const prepared = await prepareInputFiles({ + inputFiles: [ + { kind: 'path', field: 'video', path: '/tmp/video.mp4' }, + { kind: 'base64', field: 'logo', filename: 'logo.png', base64: '...' }, + { kind: 'url', field: 'remote', url: 'https://example.com/file.jpg' }, + ], + params: { + steps: { + ':original': { robot: '/upload/handle' }, + encode: { robot: '/video/encode', use: ':original' }, + }, + }, + base64Strategy: 'tempfile', + urlStrategy: 'import-if-present', + maxBase64Bytes: 512_000, + allowPrivateUrls: true, +}) + +await client.createAssembly({ + params: prepared.params, + files: prepared.files, + uploads: prepared.uploads, +}) +``` + +Options: + +- `inputFiles` – Array of `{ kind, field, ... }` entries for `path`, `base64`, or `url` inputs. +- `params` – Assembly instructions; steps will be extended when URL imports are injected. +- `fields` – Extra form fields to merge into `params.fields`. +- `base64Strategy` – `'buffer'` (default) or `'tempfile'` for base64 inputs. +- `urlStrategy` – `'import'`, `'download'`, or `'import-if-present'` (default `'import'`). +- `maxBase64Bytes` – Optional size cap (decoded bytes). Overages throw before decoding. +- `allowPrivateUrls` – Allow downloading private/loopback URLs when using `urlStrategy: 'download'` + (default `true`). Hosted deployments should disable this. +- `tempDir` – Optional temp directory base when `base64Strategy: 'tempfile'`. + ### Managing Templates ```bash @@ -287,7 +335,8 @@ names stable and pass the same files. Only path-based inputs resume; Buffer/stri start a new tus upload automatically. You can pass the same upload and progress options as `createAssembly` (such as `chunkSize`, -`uploadConcurrency`, `waitForCompletion`, `timeout`, `onUploadProgress`, and `onAssemblyProgress`). +`uploadConcurrency`, `uploadBehavior`, `waitForCompletion`, `timeout`, `onUploadProgress`, and +`onAssemblyProgress`). When `waitForCompletion` is `true`, the SDK will poll and resolve once the Assembly is finished. ```javascript @@ -373,9 +422,17 @@ You can provide the following keys inside the `options` object: - `onAssemblyProgress` - Once the Assembly has started processing this will be periodically called with the _Assembly Execution Status_ (result of `getAssembly`) **only if `waitForCompletion` is `true`**. - `chunkSize` - (for uploads) a number indicating the maximum size of a tus `PATCH` request body in bytes. Default to `Infinity` for file uploads and 50MB for streams of unknown length. See [tus-js-client](https://github.com/tus/tus-js-client/blob/master/docs/api.md#chunksize). - `uploadConcurrency` - Maximum number of concurrent tus file uploads to occur at any given time (default 10.) +- `uploadBehavior` - Controls how uploads are handled: + - `await` (default) waits for all uploads to finish. + - `background` starts uploads and returns once upload URLs are created. + - `none` returns upload URLs without uploading any bytes. + - When `uploadBehavior` is not `await`, `waitForCompletion` is ignored. **NOTE**: Make sure the key in `files` and `uploads` is not one of `signature`, `params` or `max_size`. +When `uploadBehavior` is `background` or `none`, the resolved Assembly object includes +`upload_urls` with a map of field names to tus upload URLs. + Example code showing all options: ```js diff --git a/packages/node/src/Transloadit.ts b/packages/node/src/Transloadit.ts index f8cab234..4af45509 100644 --- a/packages/node/src/Transloadit.ts +++ b/packages/node/src/Transloadit.ts @@ -52,7 +52,7 @@ import type { import { lintAssemblyInstructions as lintAssemblyInstructionsInternal } from './lintAssemblyInstructions.ts' import PaginationStream from './PaginationStream.ts' import PollingTimeoutError from './PollingTimeoutError.ts' -import type { Stream } from './tus.ts' +import type { Stream, UploadBehavior } from './tus.ts' import { sendTusRequest } from './tus.ts' // See https://github.com/sindresorhus/got/tree/v11.8.6?tab=readme-ov-file#errors @@ -66,11 +66,30 @@ export { TimeoutError, UploadError, } from 'got' - +export { goldenTemplates } from './alphalib/goldenTemplates.ts' export type { AssemblyStatus } from './alphalib/types/assemblyStatus.ts' export * from './apiTypes.ts' export { InconsistentResponseError, ApiError } +export { mergeTemplateContent } from './alphalib/templateMerge.ts' +export type { + Base64Strategy, + InputFile, + PrepareInputFilesOptions, + PrepareInputFilesResult, + UploadInput, + UrlStrategy, +} from './inputFiles.ts' +export { prepareInputFiles } from './inputFiles.ts' export type { LintAssemblyInstructionsResult, LintFatalLevel } from './lintAssemblyInstructions.ts' +export type { + RobotHelp, + RobotHelpOptions, + RobotListItem, + RobotListOptions, + RobotListResult, + RobotParamHelp, +} from './robots.ts' +export { getRobotHelp, listRobots } from './robots.ts' const log = debug('transloadit') const logWarn = debug('transloadit:warn') @@ -80,6 +99,12 @@ export interface UploadProgress { totalBytes?: number | undefined } +export type { UploadBehavior } + +export type AssemblyStatusWithUploadUrls = AssemblyStatus & { + upload_urls?: Record +} + const { version } = packageJson export type AssemblyProgress = (assembly: AssemblyStatus) => void @@ -157,6 +182,7 @@ interface AssemblyUploadOptions { uploads?: { [name: string]: Readable | IntoStreamInput } + uploadBehavior?: UploadBehavior waitForCompletion?: boolean chunkSize?: number uploadConcurrency?: number @@ -237,7 +263,7 @@ export interface SmartCDNUrlOptions { export type Fields = Record // A special promise that lets the user immediately get the assembly ID (synchronously before the request is sent) -interface CreateAssemblyPromise extends Promise { +interface CreateAssemblyPromise extends Promise { assemblyId: string } @@ -273,9 +299,19 @@ function checkResult(result: T | { error: string }): asserts result is T { } } -export interface Options { +type AuthKeySecret = { authKey: string authSecret: string + authToken?: undefined +} + +type AuthToken = { + authToken: string + authKey?: string + authSecret?: string +} + +type BaseOptions = { endpoint?: string maxRetries?: number timeout?: number @@ -283,11 +319,15 @@ export interface Options { validateResponses?: boolean } +export type Options = BaseOptions & (AuthKeySecret | AuthToken) + export class Transloadit { private _authKey: string private _authSecret: string + private _authToken: string | null + private _endpoint: string private _maxRetries: number @@ -301,20 +341,26 @@ export class Transloadit { private _validateResponses = false constructor(opts: Options) { - if (opts?.authKey == null) { - throw new Error('Please provide an authKey') - } - - if (opts.authSecret == null) { - throw new Error('Please provide an authSecret') - } + const rawToken = typeof opts?.authToken === 'string' ? opts.authToken.trim() : '' + const hasToken = rawToken.length > 0 if (opts.endpoint?.endsWith('/')) { throw new Error('Trailing slash in endpoint is not allowed') } - this._authKey = opts.authKey - this._authSecret = opts.authSecret + if (!hasToken) { + if (opts?.authKey == null) { + throw new Error('Please provide an authKey') + } + + if (opts.authSecret == null) { + throw new Error('Please provide an authSecret') + } + } + + this._authKey = opts.authKey ?? '' + this._authSecret = opts.authSecret ?? '' + this._authToken = hasToken ? rawToken : null this._endpoint = opts.endpoint || 'https://api2.transloadit.com' this._maxRetries = opts.maxRetries != null ? opts.maxRetries : 5 this._defaultTimeout = opts.timeout != null ? opts.timeout : 60000 @@ -351,6 +397,7 @@ export class Transloadit { uploads = {}, assemblyId, signal, + uploadBehavior = 'await', } = opts // Keep track of how long the request took @@ -406,7 +453,7 @@ export class Transloadit { const streamErrorPromise = createStreamErrorPromise(allStreamsMap) const createAssemblyAndUpload = async () => { - const result: AssemblyStatus = await this._remoteJson({ + const result: AssemblyStatusWithUploadUrls = await this._remoteJson({ urlSuffix, method: 'post', timeout: { request: timeout }, @@ -419,17 +466,22 @@ export class Transloadit { checkResult(result) if (Object.keys(allStreamsMap).length > 0) { - await sendTusRequest({ + const { uploadUrls } = await sendTusRequest({ streamsMap: allStreamsMap, assembly: result, onProgress: onUploadProgress, requestedChunkSize, uploadConcurrency, signal, + uploadBehavior, }) + if (uploadBehavior !== 'await' && Object.keys(uploadUrls).length > 0) { + result.upload_urls = uploadUrls + } } - if (!waitForCompletion) return result + const shouldWaitForCompletion = waitForCompletion && uploadBehavior === 'await' + if (!shouldWaitForCompletion) return result if (result.assembly_id == null) { throw new InconsistentResponseError( @@ -478,7 +530,9 @@ export class Transloadit { }) } - async resumeAssemblyUploads(opts: ResumeAssemblyUploadsOptions): Promise { + async resumeAssemblyUploads( + opts: ResumeAssemblyUploadsOptions, + ): Promise { const { assemblyUrl, files = {}, @@ -490,12 +544,16 @@ export class Transloadit { onUploadProgress = () => {}, onAssemblyProgress = () => {}, signal, + uploadBehavior = 'await', } = opts const startTimeMs = getHrTimeMs() getAssemblyIdFromUrl(assemblyUrl) - const assembly = await this._fetchAssemblyStatus({ url: assemblyUrl, signal }) + const assembly: AssemblyStatusWithUploadUrls = await this._fetchAssemblyStatus({ + url: assemblyUrl, + signal, + }) const statusUrl = assembly.assembly_ssl_url ?? assembly.assembly_url ?? assemblyUrl const finishedKeys = new Set() @@ -571,13 +629,25 @@ export class Transloadit { onProgress: onUploadProgress, signal, uploadUrls: uploadUrlsByLabel, + uploadBehavior, }) await Promise.race([uploadPromise, streamErrorPromise]) + const { uploadUrls } = await uploadPromise + if (uploadBehavior !== 'await' && Object.keys(uploadUrls).length > 0) { + assembly.upload_urls = uploadUrls + } } - const latestAssembly = await this._fetchAssemblyStatus({ url: statusUrl, signal }) - if (!waitForCompletion) return latestAssembly + const latestAssembly: AssemblyStatusWithUploadUrls = await this._fetchAssemblyStatus({ + url: statusUrl, + signal, + }) + if (uploadBehavior !== 'await' && assembly.upload_urls) { + latestAssembly.upload_urls = assembly.upload_urls + } + const shouldWaitForCompletion = waitForCompletion && uploadBehavior === 'await' + if (!shouldWaitForCompletion) return latestAssembly if (latestAssembly.assembly_id == null) { throw new InconsistentResponseError( @@ -701,6 +771,7 @@ export class Transloadit { const { assembly_ssl_url: url } = await this.getAssembly(assemblyId) const rawResult = await this._remoteJson, OptionalAuthParams>({ url, + isTrustedUrl: true, method: 'delete', }) @@ -829,6 +900,7 @@ export class Transloadit { const rawResult = await this._remoteJson, OptionalAuthParams>({ url, urlSuffix: url ? undefined : `/assemblies/${assemblyId}`, + isTrustedUrl: Boolean(url), signal, }) @@ -1021,6 +1093,9 @@ export class Transloadit { params: OptionalAuthParams, algorithm?: string, ): { signature: string; params: string } { + if (!this._authKey || !this._authSecret) { + throw new Error('Cannot sign params without authKey and authSecret.') + } const jsonParams = this._prepareParams(params) const signature = this._calcSignature(jsonParams, algorithm) @@ -1031,6 +1106,9 @@ export class Transloadit { * Construct a signed Smart CDN URL. See https://transloadit.com/docs/topics/signature-authentication/#smart-cdn. */ getSignedSmartCDNUrl(opts: SmartCDNUrlOptions): string { + if (!this._authKey || !this._authSecret) { + throw new Error('authKey and authSecret are required to sign Smart CDN URLs.') + } return getSignedSmartCdnUrl({ ...opts, authKey: this._authKey, @@ -1039,15 +1117,24 @@ export class Transloadit { } private _calcSignature(toSign: string, algorithm = 'sha384'): string { + if (!this._authSecret) { + throw new Error('Cannot sign params without authSecret.') + } return signParamsSync(toSign, this._authSecret, algorithm) } // Sets the multipart/form-data for POST, PUT and DELETE requests, including // the streams, the signed params, and any additional fields. private _appendForm(form: FormData, params: OptionalAuthParams, fields?: Fields): void { - const sigData = this.calcSignature(params) - const jsonParams = sigData.params - const { signature } = sigData + const shouldSign = Boolean(this._authKey && this._authSecret) + let jsonParams = JSON.stringify(params ?? {}) + let signature: string | undefined + + if (shouldSign) { + const sigData = this.calcSignature(params) + jsonParams = sigData.params + signature = sigData.signature + } form.append('params', jsonParams) @@ -1057,16 +1144,24 @@ export class Transloadit { } } - form.append('signature', signature) + if (signature) { + form.append('signature', signature) + } } // Implements HTTP GET query params, handling the case where the url already // has params. private _appendParamsToUrl(url: string, params: OptionalAuthParams): string { - const { signature, params: jsonParams } = this.calcSignature(params) - const prefix = url.indexOf('?') === -1 ? '?' : '&' + const shouldSign = Boolean(this._authKey && this._authSecret) + if (!shouldSign) { + const jsonParams = JSON.stringify(params ?? {}) + return `${url}${prefix}params=${encodeURIComponent(jsonParams)}` + } + + const { signature, params: jsonParams } = this.calcSignature(params) + return `${url}${prefix}signature=${signature}¶ms=${encodeURIComponent(jsonParams)}` } @@ -1102,6 +1197,7 @@ export class Transloadit { private async _remoteJson(opts: { urlSuffix?: string url?: string + isTrustedUrl?: boolean timeout?: Delays method?: 'delete' | 'get' | 'post' | 'put' params?: TParams @@ -1112,6 +1208,7 @@ export class Transloadit { const { urlSuffix, url: urlInput, + isTrustedUrl = false, timeout = { request: this._defaultTimeout }, method = 'get', params = {}, @@ -1123,6 +1220,13 @@ export class Transloadit { // Allow providing either a `urlSuffix` or a full `url` if (!urlSuffix && !urlInput) throw new Error('No URL provided') let url = urlInput || `${this._endpoint}${urlSuffix}` + if (urlInput && !isTrustedUrl) { + const allowed = new URL(this._endpoint) + const candidate = new URL(urlInput) + if (allowed.origin !== candidate.origin) { + throw new Error(`Untrusted URL: ${candidate.origin}`) + } + } if (method === 'get') { url = this._appendParamsToUrl(url, params) @@ -1147,6 +1251,7 @@ export class Transloadit { headers: { 'Transloadit-Client': `node-sdk:${version}`, 'User-Agent': undefined, // Remove got's user-agent + ...(this._authToken ? { Authorization: `Bearer ${this._authToken}` } : {}), ...headers, }, responseType: 'json', diff --git a/packages/node/src/alphalib/goldenTemplates.ts b/packages/node/src/alphalib/goldenTemplates.ts new file mode 100644 index 00000000..1a968bea --- /dev/null +++ b/packages/node/src/alphalib/goldenTemplates.ts @@ -0,0 +1,53 @@ +export type GoldenTemplate = { + slug: string + version: string + description: string + steps: Record +} + +export const goldenTemplates = { + '~transloadit/encode-hls-video@0.0.1': { + slug: '~transloadit/encode-hls-video@0.0.1', + version: '0.0.1', + description: + 'Encode an input video into HLS renditions (270p, 360p, 540p) with an adaptive playlist.', + steps: { + ':original': { + robot: '/upload/handle', + }, + low: { + robot: '/video/encode', + use: ':original', + ffmpeg_stack: 'v7.0.0', + preset: 'hls-270p', + result: true, + turbo: true, + }, + mid: { + robot: '/video/encode', + use: ':original', + ffmpeg_stack: 'v7.0.0', + preset: 'hls-360p', + result: true, + turbo: true, + }, + high: { + robot: '/video/encode', + use: ':original', + ffmpeg_stack: 'v7.0.0', + preset: 'hls-540p', + result: true, + turbo: true, + }, + adaptive: { + robot: '/video/adaptive', + use: { + steps: ['low', 'mid', 'high'], + bundle_steps: true, + }, + technique: 'hls', + playlist_name: 'my_playlist.m3u8', + }, + }, + }, +} satisfies Record diff --git a/packages/node/src/inputFiles.ts b/packages/node/src/inputFiles.ts new file mode 100644 index 00000000..00f7acdf --- /dev/null +++ b/packages/node/src/inputFiles.ts @@ -0,0 +1,278 @@ +import { createWriteStream } from 'node:fs' +import { mkdtemp, rm, writeFile } from 'node:fs/promises' +import { isIP } from 'node:net' +import { tmpdir } from 'node:os' +import { basename, join } from 'node:path' +import type { Readable } from 'node:stream' +import { pipeline } from 'node:stream/promises' +import got from 'got' +import type { Input as IntoStreamInput } from 'into-stream' +import type { CreateAssemblyParams } from './apiTypes.ts' + +export type InputFile = + | { + kind: 'path' + field: string + path: string + } + | { + kind: 'base64' + field: string + base64: string + filename: string + contentType?: string + } + | { + kind: 'url' + field: string + url: string + filename?: string + contentType?: string + } + +export type UploadInput = Readable | IntoStreamInput + +export type Base64Strategy = 'buffer' | 'tempfile' +export type UrlStrategy = 'import' | 'download' | 'import-if-present' + +export type PrepareInputFilesOptions = { + inputFiles?: InputFile[] + params?: CreateAssemblyParams + fields?: Record + base64Strategy?: Base64Strategy + urlStrategy?: UrlStrategy + maxBase64Bytes?: number + allowPrivateUrls?: boolean + tempDir?: string +} + +export type PrepareInputFilesResult = { + params: CreateAssemblyParams + files: Record + uploads: Record + cleanup: Array<() => Promise> +} + +const isRecord = (value: unknown): value is Record => + typeof value === 'object' && value !== null + +const ensureUnique = (field: string, used: Set): void => { + if (used.has(field)) { + throw new Error(`Duplicate file field: ${field}`) + } + used.add(field) +} + +const ensureUniqueStepName = (baseName: string, used: Set): string => { + let name = baseName + let counter = 1 + while (used.has(name)) { + name = `${baseName}_${counter}` + counter += 1 + } + used.add(name) + return name +} + +const decodeBase64 = (value: string): Buffer => Buffer.from(value, 'base64') + +const estimateBase64DecodedBytes = (value: string): number => { + const trimmed = value.trim() + if (!trimmed) return 0 + let padding = 0 + if (trimmed.endsWith('==')) padding = 2 + else if (trimmed.endsWith('=')) padding = 1 + return Math.floor((trimmed.length * 3) / 4) - padding +} + +const getFilenameFromUrl = (value: string): string | null => { + try { + const pathname = new URL(value).pathname + const base = basename(pathname) + if (base && base !== '/' && base !== '.') return base + } catch { + return null + } + return null +} + +const isHttpImportStep = (value: unknown): value is Record => + isRecord(value) && value.robot === '/http/import' + +const findImportStepName = (field: string, steps: Record): string | null => { + if (isHttpImportStep(steps[field])) return field + const matches = Object.entries(steps).filter(([, step]) => isHttpImportStep(step)) + if (matches.length === 1) return matches[0]?.[0] ?? null + return null +} + +const downloadUrlToFile = async (url: string, filePath: string): Promise => { + await pipeline(got.stream(url), createWriteStream(filePath)) +} + +const isPrivateIp = (address: string): boolean => { + if (address === 'localhost') return true + const family = isIP(address) + if (family === 4) { + const parts = address.split('.').map((chunk) => Number(chunk)) + const [a, b] = parts + if (a === 10) return true + if (a === 127) return true + if (a === 0) return true + if (a === 169 && b === 254) return true + if (a === 172 && b >= 16 && b <= 31) return true + if (a === 192 && b === 168) return true + return false + } + if (family === 6) { + const normalized = address.toLowerCase() + if (normalized === '::1') return true + if (normalized.startsWith('fe80:')) return true + if (normalized.startsWith('fc') || normalized.startsWith('fd')) return true + return false + } + return false +} + +const assertPublicDownloadUrl = (value: string): void => { + const parsed = new URL(value) + if (!['http:', 'https:'].includes(parsed.protocol)) { + throw new Error(`URL downloads are limited to http/https: ${value}`) + } + if (isPrivateIp(parsed.hostname)) { + throw new Error(`URL downloads are limited to public hosts: ${value}`) + } +} + +export const prepareInputFiles = async ( + options: PrepareInputFilesOptions = {}, +): Promise => { + const { + inputFiles = [], + params = {}, + fields, + base64Strategy = 'buffer', + urlStrategy = 'import', + maxBase64Bytes, + allowPrivateUrls = true, + tempDir, + } = options + + let nextParams: CreateAssemblyParams = { ...params } + const files: Record = {} + const uploads: Record = {} + const cleanup: Array<() => Promise> = [] + + if (fields && Object.keys(fields).length > 0) { + nextParams = { + ...nextParams, + fields: { + ...(isRecord(nextParams.fields) ? nextParams.fields : {}), + ...fields, + }, + } + } + + const steps = isRecord(nextParams.steps) ? { ...nextParams.steps } : {} + const usedSteps = new Set(Object.keys(steps)) + const usedFields = new Set() + const importUrlsByStep = new Map() + const importStepNames = Object.keys(steps).filter((name) => isHttpImportStep(steps[name])) + const sharedImportStep = importStepNames.length === 1 ? importStepNames[0] : null + + let tempRoot: string | null = null + const ensureTempRoot = async (): Promise => { + if (!tempRoot) { + const root = await mkdtemp(join(tempDir ?? tmpdir(), 'transloadit-input-')) + tempRoot = root + cleanup.push(() => rm(root, { recursive: true, force: true })) + } + return tempRoot + } + + try { + for (const file of inputFiles) { + ensureUnique(file.field, usedFields) + if (file.kind === 'path') { + files[file.field] = file.path + continue + } + if (file.kind === 'base64') { + if (maxBase64Bytes) { + const estimated = estimateBase64DecodedBytes(file.base64) + if (estimated > maxBase64Bytes) { + throw new Error(`Base64 payload exceeds ${maxBase64Bytes} bytes.`) + } + } + const buffer = decodeBase64(file.base64) + if (maxBase64Bytes && buffer.length > maxBase64Bytes) { + throw new Error(`Base64 payload exceeds ${maxBase64Bytes} bytes.`) + } + if (base64Strategy === 'tempfile') { + const root = await ensureTempRoot() + const filename = file.filename ? basename(file.filename) : `${file.field}.bin` + const filePath = join(root, filename) + await writeFile(filePath, buffer) + files[file.field] = filePath + } else { + uploads[file.field] = buffer + } + continue + } + if (file.kind === 'url') { + const matchedStep = findImportStepName(file.field, steps) + const targetStep = matchedStep ?? sharedImportStep + const shouldImport = + urlStrategy === 'import' || (urlStrategy === 'import-if-present' && targetStep) + + if (shouldImport) { + const stepName = targetStep ?? ensureUniqueStepName(file.field, usedSteps) + const urls = importUrlsByStep.get(stepName) ?? [] + urls.push(file.url) + importUrlsByStep.set(stepName, urls) + continue + } + + const root = await ensureTempRoot() + const filename = + (file.filename ? basename(file.filename) : null) ?? + getFilenameFromUrl(file.url) ?? + `${file.field}.bin` + const filePath = join(root, filename) + if (!allowPrivateUrls) { + assertPublicDownloadUrl(file.url) + } + await downloadUrlToFile(file.url, filePath) + files[file.field] = filePath + } + } + } catch (error) { + await Promise.all(cleanup.map((fn) => fn())) + throw error + } + + if (Object.keys(steps).length > 0 || importUrlsByStep.size > 0) { + if (importUrlsByStep.size > 0) { + for (const [stepName, urls] of importUrlsByStep.entries()) { + const existing = isRecord(steps[stepName]) ? steps[stepName] : {} + steps[stepName] = { + ...existing, + robot: '/http/import', + url: urls.length === 1 ? urls[0] : urls, + } + } + } + + nextParams = { + ...nextParams, + steps, + } + } + + return { + params: nextParams, + files, + uploads, + cleanup, + } +} diff --git a/packages/node/src/robots.ts b/packages/node/src/robots.ts new file mode 100644 index 00000000..c7874d87 --- /dev/null +++ b/packages/node/src/robots.ts @@ -0,0 +1,317 @@ +import type { z } from 'zod' +import { robotsMeta, robotsSchema } from './alphalib/types/robots/_index.ts' + +export type RobotListOptions = { + category?: string + search?: string + limit?: number + cursor?: string +} + +export type RobotListItem = { + name: string + title?: string + summary: string + category?: string +} + +export type RobotListResult = { + robots: RobotListItem[] + nextCursor?: string +} + +export type RobotParamHelp = { + name: string + type: string + description?: string +} + +export type RobotHelp = { + name: string + summary: string + requiredParams: RobotParamHelp[] + optionalParams: RobotParamHelp[] + examples?: Array<{ description: string; snippet: Record }> +} + +export type RobotHelpOptions = { + robotName: string + detailLevel?: 'summary' | 'params' | 'examples' +} + +type RobotsMetaMap = typeof robotsMeta +type RobotMeta = RobotsMetaMap[keyof RobotsMetaMap] + +const isRecord = (value: unknown): value is Record => + typeof value === 'object' && value !== null + +const getDef = (schema: z.ZodTypeAny): Record => + (schema as unknown as { _def?: Record; def?: Record })._def ?? + (schema as unknown as { def?: Record }).def ?? + {} + +const getDefType = (def: Record): string | undefined => + (def.type as string | undefined) ?? (def.typeName as string | undefined) + +const robotNameToPath = (name: string): string => { + const base = name.replace(/Robot$/, '') + const spaced = base + .replace(/([a-z0-9])([A-Z])/g, '$1 $2') + .replace(/([A-Z]+)([A-Z][a-z0-9])/g, '$1 $2') + const parts = spaced.split(/\s+/).filter(Boolean) + return `/${parts.map((part) => part.toLowerCase()).join('/')}` +} + +const selectSummary = (meta: RobotMeta): string => + meta.purpose_sentence ?? meta.purpose_words ?? meta.purpose_word ?? meta.title ?? meta.name + +const resolveRobotPath = (robotName: string): string => + robotName.startsWith('/') ? robotName : robotNameToPath(robotName) + +const unwrapSchema = (schema: z.ZodTypeAny): { base: z.ZodTypeAny; optional: boolean } => { + let base = schema + let optional = typeof base.isOptional === 'function' ? base.isOptional() : false + + while (true) { + const def = getDef(base) + const defType = getDefType(def) + if ( + defType === 'optional' || + defType === 'default' || + defType === 'nullable' || + defType === 'catch' || + defType === 'ZodOptional' || + defType === 'ZodDefault' || + defType === 'ZodNullable' || + defType === 'ZodCatch' + ) { + const inner = def.innerType as z.ZodTypeAny | undefined + if (inner) { + base = inner + if (defType !== 'nullable' && defType !== 'ZodNullable') { + optional = true + } + continue + } + } + break + } + + return { base, optional } +} + +const describeSchemaType = (schema: z.ZodTypeAny): string => { + const { base } = unwrapSchema(schema) + const def = getDef(base) + const defType = getDefType(def) + + switch (defType) { + case 'string': + case 'ZodString': + return 'string' + case 'number': + case 'ZodNumber': + return 'number' + case 'boolean': + case 'ZodBoolean': + return 'boolean' + case 'bigint': + case 'ZodBigInt': + return 'bigint' + case 'literal': + case 'ZodLiteral': { + const value = (def.values as unknown[] | undefined)?.[0] ?? def.value + return value === undefined ? 'literal' : JSON.stringify(value) + } + case 'enum': + case 'ZodEnum': { + const values = Array.isArray(def.values) ? def.values : [] + return values.length ? `enum(${values.join(' | ')})` : 'enum' + } + case 'array': + case 'ZodArray': { + const element = def.element as z.ZodTypeAny | undefined + const inner = element ? describeSchemaType(element) : 'unknown' + return `array<${inner}>` + } + case 'object': + case 'ZodObject': + return 'object' + case 'record': + case 'ZodRecord': + return 'record' + case 'union': + case 'ZodUnion': { + const options = Array.isArray(def.options) ? def.options : [] + const rendered = options + .map((option) => describeSchemaType(option as z.ZodTypeAny)) + .join(' | ') + return rendered ? `union<${rendered}>` : 'union' + } + case 'ZodDiscriminatedUnion': + return 'object' + default: + return defType ?? 'unknown' + } +} + +const getParamDescription = (schema: z.ZodTypeAny): string | undefined => { + if (schema.description?.trim()) { + return schema.description.trim() + } + const inner = unwrapSchema(schema).base + return inner.description?.trim() +} + +const getShape = (schema: z.ZodTypeAny): Record => { + const { base } = unwrapSchema(schema) + const def = getDef(base) + const shape = def.shape as + | Record + | (() => Record) + | undefined + if (typeof shape === 'function') { + return shape() + } + return shape ?? {} +} + +const getRobotParams = ( + schema: z.ZodTypeAny, +): { required: RobotParamHelp[]; optional: RobotParamHelp[] } => { + const shape = getShape(schema) + const required: RobotParamHelp[] = [] + const optional: RobotParamHelp[] = [] + + for (const [key, value] of Object.entries(shape)) { + if (key === 'robot') continue + const { optional: isOptional } = unwrapSchema(value) + const param: RobotParamHelp = { + name: key, + type: describeSchemaType(value), + description: getParamDescription(value), + } + + if (isOptional) { + optional.push(param) + } else { + required.push(param) + } + } + + return { required, optional } +} + +const getRobotsMetaIndex = (): { + byName: Map + byPath: Map +} => { + const byName = new Map() + const byPath = new Map() + + for (const meta of Object.values(robotsMeta)) { + byName.set(meta.name, meta) + byPath.set(robotNameToPath(meta.name), meta) + } + + return { byName, byPath } +} + +const getRobotSchemaIndex = (): Map => { + const index = new Map() + for (const option of robotsSchema.options) { + const shape = getShape(option) + const robotSchema = shape.robot + if (!robotSchema) continue + const robotDef = getDef(robotSchema) + const robotLiteral = (robotDef.values as unknown[] | undefined)?.[0] ?? robotDef.value + if (typeof robotLiteral === 'string') { + index.set(robotLiteral, option) + } + } + return index +} + +let cachedMetaIndex: ReturnType | null = null +let cachedSchemaIndex: ReturnType | null = null + +const getMetaIndex = (): ReturnType => { + if (!cachedMetaIndex) { + cachedMetaIndex = getRobotsMetaIndex() + } + return cachedMetaIndex +} + +const getSchemaIndex = (): ReturnType => { + if (!cachedSchemaIndex) { + cachedSchemaIndex = getRobotSchemaIndex() + } + return cachedSchemaIndex +} + +export const listRobots = (options: RobotListOptions = {}): RobotListResult => { + const normalizedSearch = options.search?.toLowerCase() + const normalizedCategory = options.category?.toLowerCase() + const { byPath } = getMetaIndex() + + const allRobots: RobotListItem[] = Array.from(byPath.entries()).map(([path, meta]) => ({ + name: path, + title: meta.title, + summary: selectSummary(meta), + category: meta.service_slug, + })) + + const filtered = allRobots + .filter((robot) => { + if (normalizedCategory && robot.category?.toLowerCase() !== normalizedCategory) { + return false + } + if (!normalizedSearch) return true + const haystack = `${robot.name} ${robot.title ?? ''} ${robot.summary}`.toLowerCase() + return haystack.includes(normalizedSearch) + }) + .sort((a, b) => a.name.localeCompare(b.name)) + + const start = options.cursor ? Number.parseInt(options.cursor, 10) : 0 + const safeStart = Number.isFinite(start) && start > 0 ? start : 0 + const safeLimit = options.limit && options.limit > 0 ? options.limit : 20 + const page = filtered.slice(safeStart, safeStart + safeLimit) + const nextCursor = + safeStart + safeLimit < filtered.length ? String(safeStart + safeLimit) : undefined + + return { + robots: page, + nextCursor, + } +} + +export const getRobotHelp = (options: RobotHelpOptions): RobotHelp => { + const detailLevel = options.detailLevel ?? 'summary' + const { byPath, byName } = getMetaIndex() + const schemaIndex = getSchemaIndex() + + const path = resolveRobotPath(options.robotName) + const meta = byPath.get(path) ?? byName.get(options.robotName) ?? null + const summary = meta ? selectSummary(meta) : `Robot ${path}` + const schema = schemaIndex.get(path) + const params = schema ? getRobotParams(schema) : { required: [], optional: [] } + + const help: RobotHelp = { + name: path, + summary, + requiredParams: detailLevel === 'params' ? params.required : [], + optionalParams: detailLevel === 'params' ? params.optional : [], + } + + if (detailLevel === 'examples' && meta?.example_code) { + const snippet = isRecord(meta.example_code) ? meta.example_code : {} + help.examples = [ + { + description: meta.example_code_description ?? 'Example', + snippet, + }, + ] + } + + return help +} diff --git a/packages/node/src/tus.ts b/packages/node/src/tus.ts index 60754d0e..172913f0 100644 --- a/packages/node/src/tus.ts +++ b/packages/node/src/tus.ts @@ -9,6 +9,9 @@ import type { AssemblyStatus } from './alphalib/types/assemblyStatus.ts' import type { UploadProgress } from './Transloadit.ts' const log = debug('transloadit') +const logWarn = debug('transloadit:warn') + +export type UploadBehavior = 'await' | 'background' | 'none' export interface Stream { path?: string @@ -23,6 +26,7 @@ interface SendTusRequestOptions { onProgress: (options: UploadProgress) => void signal?: AbortSignal uploadUrls?: Record + uploadBehavior?: UploadBehavior } export async function sendTusRequest({ @@ -33,6 +37,7 @@ export async function sendTusRequest({ onProgress, signal, uploadUrls, + uploadBehavior = 'await', }: SendTusRequestOptions) { const streamLabels = Object.keys(streamsMap) @@ -40,6 +45,7 @@ export async function sendTusRequest({ let lastEmittedProgress = 0 const sizes: Record = {} + const uploadUrlsResult: Record = { ...(uploadUrls ?? {}) } const haveUnknownLengthStreams = streamLabels.some((label) => !streamsMap[label]?.path) @@ -67,6 +73,9 @@ export async function sendTusRequest({ const uploadProgresses: Record = {} + const completionPromises: Array> = [] + const uploadUrlPromises: Array> = [] + async function uploadSingleStream(label: string) { uploadProgresses[label] = 0 @@ -110,7 +119,45 @@ export async function sendTusRequest({ const filename = path ? basename(path) : label - await new Promise((resolvePromise, rejectPromise) => { + if (uploadBehavior === 'none' && uploadUrls?.[label]) { + uploadUrlsResult[label] = uploadUrls[label] + uploadUrlPromises.push(Promise.resolve()) + completionPromises.push(Promise.resolve()) + return + } + + let urlResolved = false + let resolveUrl: () => void = () => {} + let rejectUrl: (err: Error) => void = () => {} + const uploadUrlPromise = new Promise((resolve, reject) => { + resolveUrl = () => { + if (urlResolved) return + urlResolved = true + resolve() + } + rejectUrl = (err) => { + if (urlResolved) return + urlResolved = true + reject(err) + } + }) + + let resolveCompletion: () => void = () => {} + let rejectCompletion: (err: Error) => void = () => {} + const completionPromise = new Promise((resolve, reject) => { + resolveCompletion = resolve + rejectCompletion = reject + }) + + uploadUrlPromises.push(uploadUrlPromise) + completionPromises.push(completionPromise) + + if (uploadUrls?.[label]) { + uploadUrlsResult[label] = uploadUrls[label] + resolveUrl() + } + + const startPromise = new Promise((resolvePromise, rejectPromise) => { if (!assembly.assembly_ssl_url) { rejectPromise(new Error('assembly_ssl_url is not present in the assembly status')) return @@ -124,15 +171,20 @@ export async function sendTusRequest({ // Wrap resolve/reject to clean up abort listener let abortHandler: (() => void) | undefined - const resolve = (payload: OnSuccessPayload) => { + const resolve = (_payload: OnSuccessPayload) => { if (abortHandler) signal?.removeEventListener('abort', abortHandler) - resolvePromise(payload) + resolveCompletion() + resolveUrl() + resolvePromise() } const reject = (err: unknown) => { if (abortHandler) signal?.removeEventListener('abort', abortHandler) + rejectCompletion(err as Error) + rejectUrl(err as Error) rejectPromise(err) } + let tusUpload: Upload const tusOptions: UploadOptions = { endpoint: assembly.tus_url, uploadUrl: uploadUrls?.[label], @@ -144,13 +196,24 @@ export async function sendTusRequest({ onError: reject, onProgress: onTusProgress, onSuccess: resolve, + onUploadUrlAvailable: () => { + const url = tusUpload?.url + if (url) { + uploadUrlsResult[label] = url + } + resolveUrl() + if (uploadBehavior === 'none') { + tusUpload.abort() + resolveCompletion() + } + }, } // tus-js-client doesn't like undefined/null if (size != null) tusOptions.uploadSize = size if (chunkSize) tusOptions.chunkSize = chunkSize if (uploadLengthDeferred) tusOptions.uploadLengthDeferred = uploadLengthDeferred - const tusUpload = new Upload(stream, tusOptions) + tusUpload = new Upload(stream, tusOptions) // Handle abort signal if (signal) { @@ -164,8 +227,31 @@ export async function sendTusRequest({ tusUpload.start() }) - log(label, 'upload done') + if (uploadBehavior === 'await') { + await startPromise + log(label, 'upload done') + return + } + + startPromise.catch((err) => { + logWarn('Background upload failed', err) + }) + + await uploadUrlPromise + log(label, 'upload started') } await pMap(streamLabels, uploadSingleStream, { concurrency: uploadConcurrency, signal }) + + await Promise.all(uploadUrlPromises) + + if (uploadBehavior === 'await') { + await Promise.all(completionPromises) + } else { + Promise.allSettled(completionPromises).catch((err) => { + logWarn('Background upload failed', err) + }) + } + + return { uploadUrls: uploadUrlsResult } } diff --git a/packages/node/test/unit/input-files.test.ts b/packages/node/test/unit/input-files.test.ts new file mode 100644 index 00000000..01179a54 --- /dev/null +++ b/packages/node/test/unit/input-files.test.ts @@ -0,0 +1,96 @@ +import { mkdtemp, rm } from 'node:fs/promises' +import { tmpdir } from 'node:os' +import { join } from 'node:path' +import { describe, expect, it } from 'vitest' +import { prepareInputFiles } from '../../src/inputFiles.ts' + +describe('prepareInputFiles', () => { + it('splits files, uploads, and url imports', async () => { + const base64 = Buffer.from('hello').toString('base64') + + const result = await prepareInputFiles({ + inputFiles: [ + { kind: 'path', field: 'video', path: '/tmp/video.mp4' }, + { kind: 'base64', field: 'logo', base64, filename: 'logo.png' }, + { kind: 'url', field: 'remote', url: 'https://example.com/remote.jpg' }, + ], + params: { + steps: { + resize: { robot: '/image/resize', use: ':original' }, + }, + fields: { a: 1 }, + }, + fields: { b: 2 }, + }) + + expect(result.files).toEqual({ video: '/tmp/video.mp4' }) + expect(result.uploads.logo).toBeInstanceOf(Buffer) + expect(result.cleanup).toHaveLength(0) + + expect(result.params.fields).toEqual({ a: 1, b: 2 }) + expect(result.params.steps?.resize).toEqual({ robot: '/image/resize', use: ':original' }) + expect(result.params.steps?.remote).toEqual({ + robot: '/http/import', + url: 'https://example.com/remote.jpg', + }) + }) + + it('keeps base64 tempfiles inside the temp directory', async () => { + const tempDir = await mkdtemp(join(tmpdir(), 'transloadit-test-')) + + try { + const base64 = Buffer.from('hello').toString('base64') + + const result = await prepareInputFiles({ + inputFiles: [ + { + kind: 'base64', + field: 'logo', + base64, + filename: '../escape.txt', + }, + ], + base64Strategy: 'tempfile', + tempDir, + }) + + expect(result.files.logo.startsWith(tempDir)).toBe(true) + } finally { + await rm(tempDir, { recursive: true, force: true }) + } + }) + + it('rejects oversized base64 payloads before decoding', async () => { + const oversized = '!'.repeat(128) + + await expect( + prepareInputFiles({ + inputFiles: [ + { + kind: 'base64', + field: 'logo', + base64: oversized, + filename: 'logo.png', + }, + ], + maxBase64Bytes: 4, + }), + ).rejects.toThrow('Base64 payload exceeds') + }) + + it('rejects private URL downloads', async () => { + await expect( + prepareInputFiles({ + inputFiles: [ + { + kind: 'url', + field: 'remote', + url: 'http://127.0.0.1/secret', + }, + ], + urlStrategy: 'download', + allowPrivateUrls: false, + }), + ).rejects.toThrow('URL downloads are limited') + }) +}) diff --git a/packages/node/test/unit/resume-assembly.test.ts b/packages/node/test/unit/resume-assembly.test.ts index e2a84f19..d8517ec6 100644 --- a/packages/node/test/unit/resume-assembly.test.ts +++ b/packages/node/test/unit/resume-assembly.test.ts @@ -64,8 +64,8 @@ describe('resumeAssemblyUploads', () => { sendTusRequestMock.mockImplementation( () => - new Promise((resolve) => { - setTimeout(resolve, 50) + new Promise<{ uploadUrls: Record }>((resolve) => { + setTimeout(() => resolve({ uploadUrls: {} }), 50) }), ) @@ -197,7 +197,7 @@ describe('resumeAssemblyUploads', () => { '_fetchAssemblyStatus', ).mockResolvedValue(assembly) - sendTusRequestMock.mockResolvedValue(undefined) + sendTusRequestMock.mockResolvedValue({ uploadUrls: {} }) await client.resumeAssemblyUploads({ assemblyUrl: assembly.assembly_url, @@ -257,6 +257,7 @@ describe('resumeAssemblyUploads', () => { for (const stream of Object.values(opts.streamsMap)) { stream.stream.destroy() } + return Promise.resolve({ uploadUrls: {} }) }) await client.resumeAssemblyUploads({ @@ -305,6 +306,7 @@ describe('resumeAssemblyUploads', () => { for (const stream of Object.values(opts.streamsMap)) { stream.stream.destroy() } + return Promise.resolve({ uploadUrls: {} }) }) await client.resumeAssemblyUploads({ @@ -342,6 +344,7 @@ describe('resumeAssemblyUploads', () => { await new Promise((resolve) => { setTimeout(resolve, 50) }) + return { uploadUrls: {} } }) await expect( diff --git a/packages/node/test/unit/robots.test.ts b/packages/node/test/unit/robots.test.ts new file mode 100644 index 00000000..b40c3942 --- /dev/null +++ b/packages/node/test/unit/robots.test.ts @@ -0,0 +1,27 @@ +import { describe, expect, it } from 'vitest' +import { getRobotHelp, listRobots } from '../../src/Transloadit.ts' + +describe('robot catalog helpers', () => { + it('lists robots with searchable summaries', () => { + const { robots, nextCursor } = listRobots({ search: 'image', limit: 3 }) + + expect(robots.length).toBeGreaterThan(0) + expect(robots[0]?.summary.length).toBeGreaterThan(0) + for (const robot of robots) { + const haystack = `${robot.name} ${robot.title ?? ''} ${robot.summary}`.toLowerCase() + expect(haystack).toContain('image') + } + + if (nextCursor) { + expect(Number.parseInt(nextCursor, 10)).toBeGreaterThan(0) + } + }) + + it('returns robot help and resolves class names', () => { + const help = getRobotHelp({ robotName: 'ImageResizeRobot', detailLevel: 'params' }) + + expect(help.name).toBe('/image/resize') + expect(help.summary.length).toBeGreaterThan(0) + expect(help.requiredParams.length + help.optionalParams.length).toBeGreaterThan(0) + }) +}) diff --git a/packages/node/test/unit/test-transloadit-client.test.ts b/packages/node/test/unit/test-transloadit-client.test.ts index 0fc8f228..1b5f83c2 100644 --- a/packages/node/test/unit/test-transloadit-client.test.ts +++ b/packages/node/test/unit/test-transloadit-client.test.ts @@ -132,7 +132,7 @@ describe('Transloadit', () => { describe('add stream', () => { it('should pause streams', async () => { - vi.spyOn(tus, 'sendTusRequest').mockImplementation(() => Promise.resolve()) + vi.spyOn(tus, 'sendTusRequest').mockResolvedValue({ uploadUrls: {} }) const client = new Transloadit({ authKey: 'foo_key', authSecret: 'foo_secret' }) const name = 'foo_name' @@ -199,6 +199,63 @@ describe('Transloadit', () => { }) }) + describe('upload behavior', () => { + it('returns upload urls without waiting for completion (background)', async () => { + const client = new Transloadit({ authKey: 'foo_key', authSecret: 'foo_secret' }) + const assembly = { + assembly_id: 'assembly', + assembly_url: 'http://localhost/assemblies/assembly', + assembly_ssl_url: 'https://localhost/assemblies/assembly', + tus_url: 'https://localhost/tus', + } + + vi.spyOn( + client as unknown as Record unknown>, + '_remoteJson', + ).mockResolvedValue(assembly) + const uploadUrls = { file: 'https://localhost/tus/1' } + const sendTusSpy = vi.spyOn(tus, 'sendTusRequest').mockResolvedValue({ uploadUrls }) + const awaitSpy = vi.spyOn(client, 'awaitAssemblyCompletion').mockResolvedValue(assembly) + + const result = await client.createAssembly({ + uploads: { file: Buffer.from('hi') }, + waitForCompletion: true, + uploadBehavior: 'background', + }) + + expect(sendTusSpy).toHaveBeenCalledWith( + expect.objectContaining({ uploadBehavior: 'background' }), + ) + expect(awaitSpy).not.toHaveBeenCalled() + expect(result.upload_urls).toEqual(uploadUrls) + }) + + it('returns upload urls without uploading (none)', async () => { + const client = new Transloadit({ authKey: 'foo_key', authSecret: 'foo_secret' }) + const assembly = { + assembly_id: 'assembly', + assembly_url: 'http://localhost/assemblies/assembly', + assembly_ssl_url: 'https://localhost/assemblies/assembly', + tus_url: 'https://localhost/tus', + } + + vi.spyOn( + client as unknown as Record unknown>, + '_remoteJson', + ).mockResolvedValue(assembly) + const uploadUrls = { file: 'https://localhost/tus/2' } + const sendTusSpy = vi.spyOn(tus, 'sendTusRequest').mockResolvedValue({ uploadUrls }) + + const result = await client.createAssembly({ + uploads: { file: Buffer.from('hi') }, + uploadBehavior: 'none', + }) + + expect(sendTusSpy).toHaveBeenCalledWith(expect.objectContaining({ uploadBehavior: 'none' })) + expect(result.upload_urls).toEqual(uploadUrls) + }) + }) + describe('_prepareParams', () => { it('should add the auth key, secret and expires parameters', () => { let client = new Transloadit({ authKey: 'foo_key', authSecret: 'foo_secret' }) @@ -324,7 +381,7 @@ describe('Transloadit', () => { const url = '/some-url' // @ts-expect-error This tests private internals - await client._remoteJson({ url, method: 'get' }) + await client._remoteJson({ url, method: 'get', isTrustedUrl: true }) expect(get).toHaveBeenCalledWith( expect.any(String), diff --git a/packages/transloadit/package.json b/packages/transloadit/package.json index 78458d70..78ea9f72 100644 --- a/packages/transloadit/package.json +++ b/packages/transloadit/package.json @@ -79,7 +79,5 @@ "dist", "src" ], - "bin": { - "transloadit": "./dist/cli.js" - } + "bin": "./dist/cli.js" } diff --git a/scripts/prepare-transloadit.ts b/scripts/prepare-transloadit.ts index a3a76721..d0f298c1 100644 --- a/scripts/prepare-transloadit.ts +++ b/scripts/prepare-transloadit.ts @@ -44,6 +44,16 @@ const writeLegacyPackageJson = async (): Promise => { if ('publishConfig' in legacyPackageJson) { delete legacyPackageJson.publishConfig } + // Normalize bin shape the same way npm does to avoid churn. + const legacyBin = legacyPackageJson.bin + if ( + legacyBin && + typeof legacyBin === 'object' && + 'transloadit' in legacyBin && + Object.keys(legacyBin).length === 1 + ) { + legacyPackageJson.bin = legacyBin.transloadit as string + } const formatted = formatPackageJson(legacyPackageJson) await writeFile(resolve(legacyPackage, 'package.json'), formatted) diff --git a/yarn.lock b/yarn.lock index ed9fe176..7b7e3b03 100644 --- a/yarn.lock +++ b/yarn.lock @@ -1281,6 +1281,15 @@ __metadata: languageName: node linkType: hard +"@hono/node-server@npm:^1.19.9": + version: 1.19.9 + resolution: "@hono/node-server@npm:1.19.9" + peerDependencies: + hono: ^4 + checksum: 10c0/de18c06b6b266dc45fe55fb82053bd1da8fe84939c49b6fbab4d2448b679d54ab5affbf8b15de9bead26f29b1755284d770aafb5ad14a8e4b3cfb4f79334554e + languageName: node + linkType: hard + "@inquirer/external-editor@npm:^1.0.2": version: 1.0.3 resolution: "@inquirer/external-editor@npm:1.0.3" @@ -1433,6 +1442,38 @@ __metadata: languageName: node linkType: hard +"@modelcontextprotocol/sdk@npm:^1.25.3": + version: 1.25.3 + resolution: "@modelcontextprotocol/sdk@npm:1.25.3" + dependencies: + "@hono/node-server": "npm:^1.19.9" + ajv: "npm:^8.17.1" + ajv-formats: "npm:^3.0.1" + content-type: "npm:^1.0.5" + cors: "npm:^2.8.5" + cross-spawn: "npm:^7.0.5" + eventsource: "npm:^3.0.2" + eventsource-parser: "npm:^3.0.0" + express: "npm:^5.0.1" + express-rate-limit: "npm:^7.5.0" + jose: "npm:^6.1.1" + json-schema-typed: "npm:^8.0.2" + pkce-challenge: "npm:^5.0.0" + raw-body: "npm:^3.0.0" + zod: "npm:^3.25 || ^4.0" + zod-to-json-schema: "npm:^3.25.0" + peerDependencies: + "@cfworker/json-schema": ^4.1.1 + zod: ^3.25 || ^4.0 + peerDependenciesMeta: + "@cfworker/json-schema": + optional: true + zod: + optional: false + checksum: 10c0/0d2943450cab4c352376e5c0d888afe6129fcbf13d7b3b0c86c02094143e30b5f3f9380a27ecaa8f7007364793f9907494d8675bb0671727cf67eefa4e63c2df + languageName: node + linkType: hard + "@mswjs/interceptors@npm:^0.39.5": version: 0.39.6 resolution: "@mswjs/interceptors@npm:0.39.6" @@ -2465,7 +2506,23 @@ __metadata: languageName: node linkType: hard -"@transloadit/node@workspace:packages/node": +"@transloadit/mcp-server@workspace:packages/mcp-server": + version: 0.0.0-use.local + resolution: "@transloadit/mcp-server@workspace:packages/mcp-server" + dependencies: + "@modelcontextprotocol/sdk": "npm:^1.25.3" + "@transloadit/node": "npm:^4.3.0" + "@transloadit/sev-logger": "npm:^0.0.15" + "@types/express": "npm:^4.17.23" + "@types/node": "npm:^24.10.3" + express: "npm:^4.21.2" + zod: "npm:^4.0.0" + bin: + transloadit-mcp: ./dist/cli.js + languageName: unknown + linkType: soft + +"@transloadit/node@npm:^4.3.0, @transloadit/node@workspace:packages/node": version: 0.0.0-use.local resolution: "@transloadit/node@workspace:packages/node" dependencies: @@ -2549,6 +2606,16 @@ __metadata: languageName: node linkType: hard +"@types/body-parser@npm:*": + version: 1.19.6 + resolution: "@types/body-parser@npm:1.19.6" + dependencies: + "@types/connect": "npm:*" + "@types/node": "npm:*" + checksum: 10c0/542da05c924dce58ee23f50a8b981fee36921850c82222e384931fda3e106f750f7880c47be665217d72dbe445129049db6eb1f44e7a06b09d62af8f3cca8ea7 + languageName: node + linkType: hard + "@types/chai@npm:^5.2.2": version: 5.2.2 resolution: "@types/chai@npm:5.2.2" @@ -2558,6 +2625,15 @@ __metadata: languageName: node linkType: hard +"@types/connect@npm:*": + version: 3.4.38 + resolution: "@types/connect@npm:3.4.38" + dependencies: + "@types/node": "npm:*" + checksum: 10c0/2e1cdba2c410f25649e77856505cd60223250fa12dff7a503e492208dbfdd25f62859918f28aba95315251fd1f5e1ffbfca1e25e73037189ab85dd3f8d0a148c + languageName: node + linkType: hard + "@types/debug@npm:^4.1.12": version: 4.1.12 resolution: "@types/debug@npm:4.1.12" @@ -2581,6 +2657,30 @@ __metadata: languageName: node linkType: hard +"@types/express-serve-static-core@npm:^4.17.33": + version: 4.19.8 + resolution: "@types/express-serve-static-core@npm:4.19.8" + dependencies: + "@types/node": "npm:*" + "@types/qs": "npm:*" + "@types/range-parser": "npm:*" + "@types/send": "npm:*" + checksum: 10c0/6fb58a85b209e0e421b29c52e0a51dbf7c039b711c604cf45d46470937a5c7c16b30aa5ce9bf7da0bd8a2e9361c95b5055599c0500a96bf4414d26c81f02d7fe + languageName: node + linkType: hard + +"@types/express@npm:^4.17.23": + version: 4.17.25 + resolution: "@types/express@npm:4.17.25" + dependencies: + "@types/body-parser": "npm:*" + "@types/express-serve-static-core": "npm:^4.17.33" + "@types/qs": "npm:*" + "@types/serve-static": "npm:^1" + checksum: 10c0/f42b616d2c9dbc50352c820db7de182f64ebbfa8dba6fb6c98e5f8f0e2ef3edde0131719d9dc6874803d25ad9ca2d53471d0fec2fbc60a6003a43d015bab72c4 + languageName: node + linkType: hard + "@types/http-cache-semantics@npm:^4.0.4": version: 4.0.4 resolution: "@types/http-cache-semantics@npm:4.0.4" @@ -2588,6 +2688,13 @@ __metadata: languageName: node linkType: hard +"@types/http-errors@npm:*": + version: 2.0.5 + resolution: "@types/http-errors@npm:2.0.5" + checksum: 10c0/00f8140fbc504f47356512bd88e1910c2f07e04233d99c88c854b3600ce0523c8cd0ba7d1897667243282eb44c59abb9245959e2428b9de004f93937f52f7c15 + languageName: node + linkType: hard + "@types/lodash-es@npm:^4.17.12": version: 4.17.12 resolution: "@types/lodash-es@npm:4.17.12" @@ -2604,6 +2711,13 @@ __metadata: languageName: node linkType: hard +"@types/mime@npm:^1": + version: 1.3.5 + resolution: "@types/mime@npm:1.3.5" + checksum: 10c0/c2ee31cd9b993804df33a694d5aa3fa536511a49f2e06eeab0b484fef59b4483777dbb9e42a4198a0809ffbf698081fdbca1e5c2218b82b91603dfab10a10fbc + languageName: node + linkType: hard + "@types/minimist@npm:^1.2.5": version: 1.2.5 resolution: "@types/minimist@npm:1.2.5" @@ -2634,6 +2748,20 @@ __metadata: languageName: node linkType: hard +"@types/qs@npm:*": + version: 6.14.0 + resolution: "@types/qs@npm:6.14.0" + checksum: 10c0/5b3036df6e507483869cdb3858201b2e0b64b4793dc4974f188caa5b5732f2333ab9db45c08157975054d3b070788b35088b4bc60257ae263885016ee2131310 + languageName: node + linkType: hard + +"@types/range-parser@npm:*": + version: 1.2.7 + resolution: "@types/range-parser@npm:1.2.7" + checksum: 10c0/361bb3e964ec5133fa40644a0b942279ed5df1949f21321d77de79f48b728d39253e5ce0408c9c17e4e0fd95ca7899da36841686393b9f7a1e209916e9381a3c + languageName: node + linkType: hard + "@types/recursive-readdir@npm:^2.2.4": version: 2.2.4 resolution: "@types/recursive-readdir@npm:2.2.4" @@ -2643,6 +2771,36 @@ __metadata: languageName: node linkType: hard +"@types/send@npm:*": + version: 1.2.1 + resolution: "@types/send@npm:1.2.1" + dependencies: + "@types/node": "npm:*" + checksum: 10c0/7673747f8c2d8e67f3b1b3b57e9d4d681801a4f7b526ecf09987bb9a84a61cf94aa411c736183884dc762c1c402a61681eb1ef200d8d45d7e5ec0ab67ea5f6c1 + languageName: node + linkType: hard + +"@types/send@npm:<1": + version: 0.17.6 + resolution: "@types/send@npm:0.17.6" + dependencies: + "@types/mime": "npm:^1" + "@types/node": "npm:*" + checksum: 10c0/a9d76797f0637738062f1b974e0fcf3d396a28c5dc18c3f95ecec5dabda82e223afbc2d56a0bca46b6326fd7bb229979916cea40de2270a98128fd94441b87c2 + languageName: node + linkType: hard + +"@types/serve-static@npm:^1": + version: 1.15.10 + resolution: "@types/serve-static@npm:1.15.10" + dependencies: + "@types/http-errors": "npm:*" + "@types/node": "npm:*" + "@types/send": "npm:<1" + checksum: 10c0/842fca14c9e80468f89b6cea361773f2dcd685d4616a9f59013b55e1e83f536e4c93d6d8e3ba5072d40c4e7e64085210edd6646b15d538ded94512940a23021f + languageName: node + linkType: hard + "@types/temp@npm:^0.9.4": version: 0.9.4 resolution: "@types/temp@npm:0.9.4" @@ -2776,6 +2934,26 @@ __metadata: languageName: node linkType: hard +"accepts@npm:^2.0.0": + version: 2.0.0 + resolution: "accepts@npm:2.0.0" + dependencies: + mime-types: "npm:^3.0.0" + negotiator: "npm:^1.0.0" + checksum: 10c0/98374742097e140891546076215f90c32644feacf652db48412329de4c2a529178a81aa500fbb13dd3e6cbf6e68d829037b123ac037fc9a08bcec4b87b358eef + languageName: node + linkType: hard + +"accepts@npm:~1.3.8": + version: 1.3.8 + resolution: "accepts@npm:1.3.8" + dependencies: + mime-types: "npm:~2.1.34" + negotiator: "npm:0.6.3" + checksum: 10c0/3a35c5f5586cfb9a21163ca47a5f77ac34fa8ceb5d17d2fa2c0d81f41cbd7f8c6fa52c77e2c039acc0f4d09e71abdc51144246900f6bef5e3c4b333f77d89362 + languageName: node + linkType: hard + "agent-base@npm:^7.1.0, agent-base@npm:^7.1.2": version: 7.1.3 resolution: "agent-base@npm:7.1.3" @@ -2783,6 +2961,32 @@ __metadata: languageName: node linkType: hard +"ajv-formats@npm:^3.0.1": + version: 3.0.1 + resolution: "ajv-formats@npm:3.0.1" + dependencies: + ajv: "npm:^8.0.0" + peerDependencies: + ajv: ^8.0.0 + peerDependenciesMeta: + ajv: + optional: true + checksum: 10c0/168d6bca1ea9f163b41c8147bae537e67bd963357a5488a1eaf3abe8baa8eec806d4e45f15b10767e6020679315c7e1e5e6803088dfb84efa2b4e9353b83dd0a + languageName: node + linkType: hard + +"ajv@npm:^8.0.0, ajv@npm:^8.17.1": + version: 8.17.1 + resolution: "ajv@npm:8.17.1" + dependencies: + fast-deep-equal: "npm:^3.1.3" + fast-uri: "npm:^3.0.1" + json-schema-traverse: "npm:^1.0.0" + require-from-string: "npm:^2.0.2" + checksum: 10c0/ec3ba10a573c6b60f94639ffc53526275917a2df6810e4ab5a6b959d87459f9ef3f00d5e7865b82677cb7d21590355b34da14d1d0b9c32d75f95a187e76fff35 + languageName: node + linkType: hard + "anafanafo@npm:2.0.0": version: 2.0.0 resolution: "anafanafo@npm:2.0.0" @@ -2871,6 +3075,13 @@ __metadata: languageName: node linkType: hard +"array-flatten@npm:1.1.1": + version: 1.1.1 + resolution: "array-flatten@npm:1.1.1" + checksum: 10c0/806966c8abb2f858b08f5324d9d18d7737480610f3bd5d3498aaae6eb5efdc501a884ba019c9b4a8f02ff67002058749d05548fd42fa8643f02c9c7f22198b91 + languageName: node + linkType: hard + "array-union@npm:^2.1.0": version: 2.1.0 resolution: "array-union@npm:2.1.0" @@ -2969,6 +3180,43 @@ __metadata: languageName: node linkType: hard +"body-parser@npm:^2.2.1": + version: 2.2.2 + resolution: "body-parser@npm:2.2.2" + dependencies: + bytes: "npm:^3.1.2" + content-type: "npm:^1.0.5" + debug: "npm:^4.4.3" + http-errors: "npm:^2.0.0" + iconv-lite: "npm:^0.7.0" + on-finished: "npm:^2.4.1" + qs: "npm:^6.14.1" + raw-body: "npm:^3.0.1" + type-is: "npm:^2.0.1" + checksum: 10c0/95a830a003b38654b75166ca765358aa92ee3d561bf0e41d6ccdde0e1a0c9783cab6b90b20eb635d23172c010b59d3563a137a738e74da4ba714463510d05137 + languageName: node + linkType: hard + +"body-parser@npm:~1.20.3": + version: 1.20.4 + resolution: "body-parser@npm:1.20.4" + dependencies: + bytes: "npm:~3.1.2" + content-type: "npm:~1.0.5" + debug: "npm:2.6.9" + depd: "npm:2.0.0" + destroy: "npm:~1.2.0" + http-errors: "npm:~2.0.1" + iconv-lite: "npm:~0.4.24" + on-finished: "npm:~2.4.1" + qs: "npm:~6.14.0" + raw-body: "npm:~2.5.3" + type-is: "npm:~1.6.18" + unpipe: "npm:~1.0.0" + checksum: 10c0/569c1e896297d1fcd8f34026c8d0ab70b90d45343c15c5d8dff5de2bad08125fc1e2f8c2f3f4c1ac6c0caaad115218202594d37dcb8d89d9b5dcae1c2b736aa9 + languageName: node + linkType: hard + "bowser@npm:^2.11.0": version: 2.11.0 resolution: "bowser@npm:2.11.0" @@ -3011,6 +3259,13 @@ __metadata: languageName: node linkType: hard +"bytes@npm:^3.1.2, bytes@npm:~3.1.2": + version: 3.1.2 + resolution: "bytes@npm:3.1.2" + checksum: 10c0/76d1c43cbd602794ad8ad2ae94095cddeb1de78c5dddaa7005c51af10b0176c69971a6d88e805a90c2b6550d76636e43c40d8427a808b8645ede885de4a0358e + languageName: node + linkType: hard + "cac@npm:^6.7.14": version: 6.7.14 resolution: "cac@npm:6.7.14" @@ -3246,6 +3501,60 @@ __metadata: languageName: node linkType: hard +"content-disposition@npm:^1.0.0": + version: 1.0.1 + resolution: "content-disposition@npm:1.0.1" + checksum: 10c0/bd7ff1fe8d2542d3a2b9a29428cc3591f6ac27bb5595bba2c69664408a68f9538b14cbd92479796ea835b317a09a527c8c7209c4200381dedb0c34d3b658849e + languageName: node + linkType: hard + +"content-disposition@npm:~0.5.4": + version: 0.5.4 + resolution: "content-disposition@npm:0.5.4" + dependencies: + safe-buffer: "npm:5.2.1" + checksum: 10c0/bac0316ebfeacb8f381b38285dc691c9939bf0a78b0b7c2d5758acadad242d04783cee5337ba7d12a565a19075af1b3c11c728e1e4946de73c6ff7ce45f3f1bb + languageName: node + linkType: hard + +"content-type@npm:^1.0.5, content-type@npm:~1.0.4, content-type@npm:~1.0.5": + version: 1.0.5 + resolution: "content-type@npm:1.0.5" + checksum: 10c0/b76ebed15c000aee4678c3707e0860cb6abd4e680a598c0a26e17f0bfae723ec9cc2802f0ff1bc6e4d80603719010431d2231018373d4dde10f9ccff9dadf5af + languageName: node + linkType: hard + +"cookie-signature@npm:^1.2.1": + version: 1.2.2 + resolution: "cookie-signature@npm:1.2.2" + checksum: 10c0/54e05df1a293b3ce81589b27dddc445f462f6fa6812147c033350cd3561a42bc14481674e05ed14c7bd0ce1e8bb3dc0e40851bad75415733711294ddce0b7bc6 + languageName: node + linkType: hard + +"cookie-signature@npm:~1.0.6": + version: 1.0.7 + resolution: "cookie-signature@npm:1.0.7" + checksum: 10c0/e7731ad2995ae2efeed6435ec1e22cdd21afef29d300c27281438b1eab2bae04ef0d1a203928c0afec2cee72aa36540b8747406ebe308ad23c8e8cc3c26c9c51 + languageName: node + linkType: hard + +"cookie@npm:^0.7.1, cookie@npm:~0.7.1": + version: 0.7.2 + resolution: "cookie@npm:0.7.2" + checksum: 10c0/9596e8ccdbf1a3a88ae02cf5ee80c1c50959423e1022e4e60b91dd87c622af1da309253d8abdb258fb5e3eacb4f08e579dc58b4897b8087574eee0fd35dfa5d2 + languageName: node + linkType: hard + +"cors@npm:^2.8.5": + version: 2.8.6 + resolution: "cors@npm:2.8.6" + dependencies: + object-assign: "npm:^4" + vary: "npm:^1" + checksum: 10c0/ab2bc57b8af8ef8476682a59647f7c55c1a7d406b559ac06119aa1c5f70b96d35036864d197b24cf86e228e4547231088f1f94ca05061dbb14d89cc0bc9d4cab + languageName: node + linkType: hard + "cross-spawn@npm:^6.0.5": version: 6.0.6 resolution: "cross-spawn@npm:6.0.6" @@ -3328,7 +3637,16 @@ __metadata: languageName: node linkType: hard -"debug@npm:4, debug@npm:^4.1.1, debug@npm:^4.3.4, debug@npm:^4.4.1, debug@npm:^4.4.3": +"debug@npm:2.6.9": + version: 2.6.9 + resolution: "debug@npm:2.6.9" + dependencies: + ms: "npm:2.0.0" + checksum: 10c0/121908fb839f7801180b69a7e218a40b5a0b718813b886b7d6bdb82001b931c938e2941d1e4450f33a1b1df1da653f5f7a0440c197f29fbf8a6e9d45ff6ef589 + languageName: node + linkType: hard + +"debug@npm:4, debug@npm:^4.1.1, debug@npm:^4.3.4, debug@npm:^4.4.0, debug@npm:^4.4.1, debug@npm:^4.4.3": version: 4.4.3 resolution: "debug@npm:4.4.3" dependencies: @@ -3392,6 +3710,20 @@ __metadata: languageName: node linkType: hard +"depd@npm:2.0.0, depd@npm:^2.0.0, depd@npm:~2.0.0": + version: 2.0.0 + resolution: "depd@npm:2.0.0" + checksum: 10c0/58bd06ec20e19529b06f7ad07ddab60e504d9e0faca4bd23079fac2d279c3594334d736508dc350e06e510aba5e22e4594483b3a6562ce7c17dd797f4cc4ad2c + languageName: node + linkType: hard + +"destroy@npm:1.2.0, destroy@npm:~1.2.0": + version: 1.2.0 + resolution: "destroy@npm:1.2.0" + checksum: 10c0/bd7633942f57418f5a3b80d5cb53898127bcf53e24cdf5d5f4396be471417671f0fee48a4ebe9a1e9defbde2a31280011af58a57e090ff822f589b443ed4e643 + languageName: node + linkType: hard + "detect-indent@npm:^6.0.0": version: 6.1.0 resolution: "detect-indent@npm:6.1.0" @@ -3433,6 +3765,13 @@ __metadata: languageName: node linkType: hard +"ee-first@npm:1.1.1": + version: 1.1.1 + resolution: "ee-first@npm:1.1.1" + checksum: 10c0/b5bb125ee93161bc16bfe6e56c6b04de5ad2aa44234d8f644813cc95d861a6910903132b05093706de2b706599367c4130eb6d170f6b46895686b95f87d017b7 + languageName: node + linkType: hard + "emoji-regex@npm:^8.0.0": version: 8.0.0 resolution: "emoji-regex@npm:8.0.0" @@ -3447,6 +3786,13 @@ __metadata: languageName: node linkType: hard +"encodeurl@npm:^2.0.0, encodeurl@npm:~2.0.0": + version: 2.0.0 + resolution: "encodeurl@npm:2.0.0" + checksum: 10c0/5d317306acb13e6590e28e27924c754163946a2480de11865c991a3a7eed4315cd3fba378b543ca145829569eefe9b899f3d84bb09870f675ae60bc924b01ceb + languageName: node + linkType: hard + "encoding@npm:^0.1.13": version: 0.1.13 resolution: "encoding@npm:0.1.13" @@ -3690,6 +4036,13 @@ __metadata: languageName: node linkType: hard +"escape-html@npm:^1.0.3, escape-html@npm:~1.0.3": + version: 1.0.3 + resolution: "escape-html@npm:1.0.3" + checksum: 10c0/524c739d776b36c3d29fa08a22e03e8824e3b2fd57500e5e44ecf3cc4707c34c60f9ca0781c0e33d191f2991161504c295e98f68c78fe7baa6e57081ec6ac0a3 + languageName: node + linkType: hard + "escape-string-regexp@npm:^1.0.5": version: 1.0.5 resolution: "escape-string-regexp@npm:1.0.5" @@ -3716,6 +4069,13 @@ __metadata: languageName: node linkType: hard +"etag@npm:^1.8.1, etag@npm:~1.8.1": + version: 1.8.1 + resolution: "etag@npm:1.8.1" + checksum: 10c0/12be11ef62fb9817314d790089a0a49fae4e1b50594135dcb8076312b7d7e470884b5100d249b28c18581b7fd52f8b485689ffae22a11ed9ec17377a33a08f84 + languageName: node + linkType: hard + "eventemitter3@npm:^5.0.1": version: 5.0.1 resolution: "eventemitter3@npm:5.0.1" @@ -3723,6 +4083,22 @@ __metadata: languageName: node linkType: hard +"eventsource-parser@npm:^3.0.0, eventsource-parser@npm:^3.0.1": + version: 3.0.6 + resolution: "eventsource-parser@npm:3.0.6" + checksum: 10c0/70b8ccec7dac767ef2eca43f355e0979e70415701691382a042a2df8d6a68da6c2fca35363669821f3da876d29c02abe9b232964637c1b6635c940df05ada78a + languageName: node + linkType: hard + +"eventsource@npm:^3.0.2": + version: 3.0.7 + resolution: "eventsource@npm:3.0.7" + dependencies: + eventsource-parser: "npm:^3.0.1" + checksum: 10c0/c48a73c38f300e33e9f11375d4ee969f25cbb0519608a12378a38068055ae8b55b6e0e8a49c3f91c784068434efe1d9f01eb49b6315b04b0da9157879ce2f67d + languageName: node + linkType: hard + "execa@npm:9.6.0": version: 9.6.0 resolution: "execa@npm:9.6.0" @@ -3757,6 +4133,90 @@ __metadata: languageName: node linkType: hard +"express-rate-limit@npm:^7.5.0": + version: 7.5.1 + resolution: "express-rate-limit@npm:7.5.1" + peerDependencies: + express: ">= 4.11" + checksum: 10c0/b07de84d700a2c07c4bf2f040e7558ed5a1f660f03ed5f30bf8ff7b51e98ba7a85215640e70fc48cbbb9151066ea51239d9a1b41febc9b84d98c7915b0186161 + languageName: node + linkType: hard + +"express@npm:^4.21.2": + version: 4.22.1 + resolution: "express@npm:4.22.1" + dependencies: + accepts: "npm:~1.3.8" + array-flatten: "npm:1.1.1" + body-parser: "npm:~1.20.3" + content-disposition: "npm:~0.5.4" + content-type: "npm:~1.0.4" + cookie: "npm:~0.7.1" + cookie-signature: "npm:~1.0.6" + debug: "npm:2.6.9" + depd: "npm:2.0.0" + encodeurl: "npm:~2.0.0" + escape-html: "npm:~1.0.3" + etag: "npm:~1.8.1" + finalhandler: "npm:~1.3.1" + fresh: "npm:~0.5.2" + http-errors: "npm:~2.0.0" + merge-descriptors: "npm:1.0.3" + methods: "npm:~1.1.2" + on-finished: "npm:~2.4.1" + parseurl: "npm:~1.3.3" + path-to-regexp: "npm:~0.1.12" + proxy-addr: "npm:~2.0.7" + qs: "npm:~6.14.0" + range-parser: "npm:~1.2.1" + safe-buffer: "npm:5.2.1" + send: "npm:~0.19.0" + serve-static: "npm:~1.16.2" + setprototypeof: "npm:1.2.0" + statuses: "npm:~2.0.1" + type-is: "npm:~1.6.18" + utils-merge: "npm:1.0.1" + vary: "npm:~1.1.2" + checksum: 10c0/ea57f512ab1e05e26b53a14fd432f65a10ec735ece342b37d0b63a7bcb8d337ffbb830ecb8ca15bcdfe423fbff88cea09786277baff200e8cde3ab40faa665cd + languageName: node + linkType: hard + +"express@npm:^5.0.1": + version: 5.2.1 + resolution: "express@npm:5.2.1" + dependencies: + accepts: "npm:^2.0.0" + body-parser: "npm:^2.2.1" + content-disposition: "npm:^1.0.0" + content-type: "npm:^1.0.5" + cookie: "npm:^0.7.1" + cookie-signature: "npm:^1.2.1" + debug: "npm:^4.4.0" + depd: "npm:^2.0.0" + encodeurl: "npm:^2.0.0" + escape-html: "npm:^1.0.3" + etag: "npm:^1.8.1" + finalhandler: "npm:^2.1.0" + fresh: "npm:^2.0.0" + http-errors: "npm:^2.0.0" + merge-descriptors: "npm:^2.0.0" + mime-types: "npm:^3.0.0" + on-finished: "npm:^2.4.1" + once: "npm:^1.4.0" + parseurl: "npm:^1.3.3" + proxy-addr: "npm:^2.0.7" + qs: "npm:^6.14.0" + range-parser: "npm:^1.2.1" + router: "npm:^2.2.0" + send: "npm:^1.1.0" + serve-static: "npm:^2.2.0" + statuses: "npm:^2.0.1" + type-is: "npm:^2.0.1" + vary: "npm:^1.1.2" + checksum: 10c0/45e8c841ad188a41402ddcd1294901e861ee0819f632fb494f2ed344ef9c43315d294d443fb48d594e6586a3b779785120f43321417adaef8567316a55072949 + languageName: node + linkType: hard + "extendable-error@npm:^0.1.5": version: 0.1.7 resolution: "extendable-error@npm:0.1.7" @@ -3764,6 +4224,13 @@ __metadata: languageName: node linkType: hard +"fast-deep-equal@npm:^3.1.3": + version: 3.1.3 + resolution: "fast-deep-equal@npm:3.1.3" + checksum: 10c0/40dedc862eb8992c54579c66d914635afbec43350afbbe991235fdcb4e3a8d5af1b23ae7e79bef7d4882d0ecee06c3197488026998fb19f72dc95acff1d1b1d0 + languageName: node + linkType: hard + "fast-glob@npm:^3.2.9, fast-glob@npm:^3.3.3": version: 3.3.3 resolution: "fast-glob@npm:3.3.3" @@ -3777,6 +4244,13 @@ __metadata: languageName: node linkType: hard +"fast-uri@npm:^3.0.1": + version: 3.1.0 + resolution: "fast-uri@npm:3.1.0" + checksum: 10c0/44364adca566f70f40d1e9b772c923138d47efeac2ae9732a872baafd77061f26b097ba2f68f0892885ad177becd065520412b8ffeec34b16c99433c5b9e2de7 + languageName: node + linkType: hard + "fast-xml-parser@npm:5.2.5": version: 5.2.5 resolution: "fast-xml-parser@npm:5.2.5" @@ -3836,6 +4310,35 @@ __metadata: languageName: node linkType: hard +"finalhandler@npm:^2.1.0": + version: 2.1.1 + resolution: "finalhandler@npm:2.1.1" + dependencies: + debug: "npm:^4.4.0" + encodeurl: "npm:^2.0.0" + escape-html: "npm:^1.0.3" + on-finished: "npm:^2.4.1" + parseurl: "npm:^1.3.3" + statuses: "npm:^2.0.1" + checksum: 10c0/6bd664e21b7b2e79efcaace7d1a427169f61cce048fae68eb56290e6934e676b78e55d89f5998c5508871345bc59a61f47002dc505dc7288be68cceac1b701e2 + languageName: node + linkType: hard + +"finalhandler@npm:~1.3.1": + version: 1.3.2 + resolution: "finalhandler@npm:1.3.2" + dependencies: + debug: "npm:2.6.9" + encodeurl: "npm:~2.0.0" + escape-html: "npm:~1.0.3" + on-finished: "npm:~2.4.1" + parseurl: "npm:~1.3.3" + statuses: "npm:~2.0.2" + unpipe: "npm:~1.0.0" + checksum: 10c0/435a4fd65e4e4e4c71bb5474980090b73c353a123dd415583f67836bdd6516e528cf07298e219a82b94631dee7830eae5eece38d3c178073cf7df4e8c182f413 + languageName: node + linkType: hard + "find-up@npm:^4.1.0": version: 4.1.0 resolution: "find-up@npm:4.1.0" @@ -3896,6 +4399,27 @@ __metadata: languageName: node linkType: hard +"forwarded@npm:0.2.0": + version: 0.2.0 + resolution: "forwarded@npm:0.2.0" + checksum: 10c0/9b67c3fac86acdbc9ae47ba1ddd5f2f81526fa4c8226863ede5600a3f7c7416ef451f6f1e240a3cc32d0fd79fcfe6beb08fd0da454f360032bde70bf80afbb33 + languageName: node + linkType: hard + +"fresh@npm:^2.0.0": + version: 2.0.0 + resolution: "fresh@npm:2.0.0" + checksum: 10c0/0557548194cb9a809a435bf92bcfbc20c89e8b5eb38861b73ced36750437251e39a111fc3a18b98531be9dd91fe1411e4969f229dc579ec0251ce6c5d4900bbc + languageName: node + linkType: hard + +"fresh@npm:~0.5.2": + version: 0.5.2 + resolution: "fresh@npm:0.5.2" + checksum: 10c0/c6d27f3ed86cc5b601404822f31c900dd165ba63fff8152a3ef714e2012e7535027063bc67ded4cb5b3a49fa596495d46cacd9f47d6328459cf570f08b7d9e5a + languageName: node + linkType: hard + "fs-extra@npm:^7.0.1": version: 7.0.1 resolution: "fs-extra@npm:7.0.1" @@ -4229,6 +4753,19 @@ __metadata: languageName: node linkType: hard +"http-errors@npm:^2.0.0, http-errors@npm:^2.0.1, http-errors@npm:~2.0.0, http-errors@npm:~2.0.1": + version: 2.0.1 + resolution: "http-errors@npm:2.0.1" + dependencies: + depd: "npm:~2.0.0" + inherits: "npm:~2.0.4" + setprototypeof: "npm:~1.2.0" + statuses: "npm:~2.0.2" + toidentifier: "npm:~1.0.1" + checksum: 10c0/fb38906cef4f5c83952d97661fe14dc156cb59fe54812a42cd448fa57b5c5dfcb38a40a916957737bd6b87aab257c0648d63eb5b6a9ca9f548e105b6072712d4 + languageName: node + linkType: hard + "http-proxy-agent@npm:^7.0.0": version: 7.0.2 resolution: "http-proxy-agent@npm:7.0.2" @@ -4284,7 +4821,7 @@ __metadata: languageName: node linkType: hard -"iconv-lite@npm:^0.7.0": +"iconv-lite@npm:^0.7.0, iconv-lite@npm:~0.7.0": version: 0.7.2 resolution: "iconv-lite@npm:0.7.2" dependencies: @@ -4293,6 +4830,15 @@ __metadata: languageName: node linkType: hard +"iconv-lite@npm:~0.4.24": + version: 0.4.24 + resolution: "iconv-lite@npm:0.4.24" + dependencies: + safer-buffer: "npm:>= 2.1.2 < 3" + checksum: 10c0/c6886a24cc00f2a059767440ec1bc00d334a89f250db8e0f7feb4961c8727118457e27c495ba94d082e51d3baca378726cd110aaf7ded8b9bbfd6a44760cf1d4 + languageName: node + linkType: hard + "ignore@npm:^5.2.0": version: 5.3.2 resolution: "ignore@npm:5.3.2" @@ -4326,7 +4872,7 @@ __metadata: languageName: node linkType: hard -"inherits@npm:2": +"inherits@npm:2, inherits@npm:~2.0.4": version: 2.0.4 resolution: "inherits@npm:2.0.4" checksum: 10c0/4e531f648b29039fb7426fb94075e6545faa1eb9fe83c29f0b6d9e7263aceb4289d2d4557db0d428188eeb449cc7c5e77b0a0b2c4e248ff2a65933a0dee49ef2 @@ -4361,6 +4907,13 @@ __metadata: languageName: node linkType: hard +"ipaddr.js@npm:1.9.1": + version: 1.9.1 + resolution: "ipaddr.js@npm:1.9.1" + checksum: 10c0/0486e775047971d3fdb5fb4f063829bac45af299ae0b82dcf3afa2145338e08290563a2a70f34b732d795ecc8311902e541a8530eeb30d75860a78ff4e94ce2a + languageName: node + linkType: hard + "is-array-buffer@npm:^3.0.4, is-array-buffer@npm:^3.0.5": version: 3.0.5 resolution: "is-array-buffer@npm:3.0.5" @@ -4537,6 +5090,13 @@ __metadata: languageName: node linkType: hard +"is-promise@npm:^4.0.0": + version: 4.0.0 + resolution: "is-promise@npm:4.0.0" + checksum: 10c0/ebd5c672d73db781ab33ccb155fb9969d6028e37414d609b115cc534654c91ccd061821d5b987eefaa97cf4c62f0b909bb2f04db88306de26e91bfe8ddc01503 + languageName: node + linkType: hard + "is-regex@npm:^1.2.1": version: 1.2.1 resolution: "is-regex@npm:1.2.1" @@ -4752,6 +5312,13 @@ __metadata: languageName: node linkType: hard +"jose@npm:^6.1.1": + version: 6.1.3 + resolution: "jose@npm:6.1.3" + checksum: 10c0/b9577b4a7a5e84131011c23823db9f5951eae3ba796771a6a2401ae5dd50daf71104febc8ded9c38146aa5ebe94a92ac09c725e699e613ef26949b9f5a8bc30f + languageName: node + linkType: hard + "js-base64@npm:^3.7.2": version: 3.7.7 resolution: "js-base64@npm:3.7.7" @@ -4810,6 +5377,20 @@ __metadata: languageName: node linkType: hard +"json-schema-traverse@npm:^1.0.0": + version: 1.0.0 + resolution: "json-schema-traverse@npm:1.0.0" + checksum: 10c0/71e30015d7f3d6dc1c316d6298047c8ef98a06d31ad064919976583eb61e1018a60a0067338f0f79cabc00d84af3fcc489bd48ce8a46ea165d9541ba17fb30c6 + languageName: node + linkType: hard + +"json-schema-typed@npm:^8.0.2": + version: 8.0.2 + resolution: "json-schema-typed@npm:8.0.2" + checksum: 10c0/89f5e2fb1495483b705c027203c07277ee6bf2665165ad25a9cb55de5af7f72570326d13d32565180781e4083ad5c9688102f222baed7b353c2f39c1e02b0428 + languageName: node + linkType: hard + "json-stringify-safe@npm:^5.0.1": version: 5.0.1 resolution: "json-stringify-safe@npm:5.0.1" @@ -5058,6 +5639,20 @@ __metadata: languageName: node linkType: hard +"media-typer@npm:0.3.0": + version: 0.3.0 + resolution: "media-typer@npm:0.3.0" + checksum: 10c0/d160f31246907e79fed398470285f21bafb45a62869dc469b1c8877f3f064f5eabc4bcc122f9479b8b605bc5c76187d7871cf84c4ee3ecd3e487da1993279928 + languageName: node + linkType: hard + +"media-typer@npm:^1.1.0": + version: 1.1.0 + resolution: "media-typer@npm:1.1.0" + checksum: 10c0/7b4baa40b25964bb90e2121ee489ec38642127e48d0cc2b6baa442688d3fde6262bfdca86d6bbf6ba708784afcac168c06840c71facac70e390f5f759ac121b9 + languageName: node + linkType: hard + "memorystream@npm:^0.3.1": version: 0.3.1 resolution: "memorystream@npm:0.3.1" @@ -5065,6 +5660,20 @@ __metadata: languageName: node linkType: hard +"merge-descriptors@npm:1.0.3": + version: 1.0.3 + resolution: "merge-descriptors@npm:1.0.3" + checksum: 10c0/866b7094afd9293b5ea5dcd82d71f80e51514bed33b4c4e9f516795dc366612a4cbb4dc94356e943a8a6914889a914530badff27f397191b9b75cda20b6bae93 + languageName: node + linkType: hard + +"merge-descriptors@npm:^2.0.0": + version: 2.0.0 + resolution: "merge-descriptors@npm:2.0.0" + checksum: 10c0/95389b7ced3f9b36fbdcf32eb946dc3dd1774c2fdf164609e55b18d03aa499b12bd3aae3a76c1c7185b96279e9803525550d3eb292b5224866060a288f335cb3 + languageName: node + linkType: hard + "merge2@npm:^1.3.0, merge2@npm:^1.4.1": version: 1.4.1 resolution: "merge2@npm:1.4.1" @@ -5072,6 +5681,13 @@ __metadata: languageName: node linkType: hard +"methods@npm:~1.1.2": + version: 1.1.2 + resolution: "methods@npm:1.1.2" + checksum: 10c0/bdf7cc72ff0a33e3eede03708c08983c4d7a173f91348b4b1e4f47d4cdbf734433ad971e7d1e8c77247d9e5cd8adb81ea4c67b0a2db526b758b2233d7814b8b2 + languageName: node + linkType: hard + "micromatch@npm:^4.0.8": version: 4.0.8 resolution: "micromatch@npm:4.0.8" @@ -5089,7 +5705,14 @@ __metadata: languageName: node linkType: hard -"mime-types@npm:^2.1.12": +"mime-db@npm:^1.54.0": + version: 1.54.0 + resolution: "mime-db@npm:1.54.0" + checksum: 10c0/8d907917bc2a90fa2df842cdf5dfeaf509adc15fe0531e07bb2f6ab15992416479015828d6a74200041c492e42cce3ebf78e5ce714388a0a538ea9c53eece284 + languageName: node + linkType: hard + +"mime-types@npm:^2.1.12, mime-types@npm:~2.1.24, mime-types@npm:~2.1.34": version: 2.1.35 resolution: "mime-types@npm:2.1.35" dependencies: @@ -5098,6 +5721,24 @@ __metadata: languageName: node linkType: hard +"mime-types@npm:^3.0.0, mime-types@npm:^3.0.2": + version: 3.0.2 + resolution: "mime-types@npm:3.0.2" + dependencies: + mime-db: "npm:^1.54.0" + checksum: 10c0/35a0dd1035d14d185664f346efcdb72e93ef7a9b6e9ae808bd1f6358227010267fab52657b37562c80fc888ff76becb2b2938deb5e730818b7983bf8bd359767 + languageName: node + linkType: hard + +"mime@npm:1.6.0": + version: 1.6.0 + resolution: "mime@npm:1.6.0" + bin: + mime: cli.js + checksum: 10c0/b92cd0adc44888c7135a185bfd0dddc42c32606401c72896a842ae15da71eb88858f17669af41e498b463cd7eb998f7b48939a25b08374c7924a9c8a6f8a81b0 + languageName: node + linkType: hard + "mimic-response@npm:^3.1.0": version: 3.1.0 resolution: "mimic-response@npm:3.1.0" @@ -5249,7 +5890,14 @@ __metadata: languageName: node linkType: hard -"ms@npm:^2.1.3": +"ms@npm:2.0.0": + version: 2.0.0 + resolution: "ms@npm:2.0.0" + checksum: 10c0/f8fda810b39fd7255bbdc451c46286e549794fcc700dc9cd1d25658bbc4dc2563a5de6fe7c60f798a16a60c6ceb53f033cb353f493f0cf63e5199b702943159d + languageName: node + linkType: hard + +"ms@npm:2.1.3, ms@npm:^2.1.3": version: 2.1.3 resolution: "ms@npm:2.1.3" checksum: 10c0/d924b57e7312b3b63ad21fc5b3dc0af5e78d61a1fc7cfb5457edaf26326bf62be5307cc87ffb6862ef1c2b33b0233cdb5d4f01c4c958cc0d660948b65a287a48 @@ -5265,6 +5913,13 @@ __metadata: languageName: node linkType: hard +"negotiator@npm:0.6.3": + version: 0.6.3 + resolution: "negotiator@npm:0.6.3" + checksum: 10c0/3ec9fd413e7bf071c937ae60d572bc67155262068ed522cf4b3be5edbe6ddf67d095ec03a3a14ebf8fc8e95f8e1d61be4869db0dbb0de696f6b837358bd43fc2 + languageName: node + linkType: hard + "negotiator@npm:^1.0.0": version: 1.0.0 resolution: "negotiator@npm:1.0.0" @@ -5378,6 +6033,13 @@ __metadata: languageName: node linkType: hard +"object-assign@npm:^4": + version: 4.1.1 + resolution: "object-assign@npm:4.1.1" + checksum: 10c0/1f4df9945120325d041ccf7b86f31e8bcc14e73d29171e37a7903050e96b81323784ec59f93f102ec635bcf6fa8034ba3ea0a8c7e69fa202b87ae3b6cec5a414 + languageName: node + linkType: hard + "object-inspect@npm:^1.13.3, object-inspect@npm:^1.13.4": version: 1.13.4 resolution: "object-inspect@npm:1.13.4" @@ -5406,7 +6068,16 @@ __metadata: languageName: node linkType: hard -"once@npm:^1.3.0": +"on-finished@npm:^2.4.1, on-finished@npm:~2.4.1": + version: 2.4.1 + resolution: "on-finished@npm:2.4.1" + dependencies: + ee-first: "npm:1.1.1" + checksum: 10c0/46fb11b9063782f2d9968863d9cbba33d77aa13c17f895f56129c274318b86500b22af3a160fe9995aa41317efcd22941b6eba747f718ced08d9a73afdb087b4 + languageName: node + linkType: hard + +"once@npm:^1.3.0, once@npm:^1.4.0": version: 1.4.0 resolution: "once@npm:1.4.0" dependencies: @@ -5623,6 +6294,13 @@ __metadata: languageName: node linkType: hard +"parseurl@npm:^1.3.3, parseurl@npm:~1.3.3": + version: 1.3.3 + resolution: "parseurl@npm:1.3.3" + checksum: 10c0/90dd4760d6f6174adb9f20cf0965ae12e23879b5f5464f38e92fce8073354341e4b3b76fa3d878351efe7d01e617121955284cfd002ab087fba1a0726ec0b4f5 + languageName: node + linkType: hard + "path-exists@npm:^4.0.0": version: 4.0.0 resolution: "path-exists@npm:4.0.0" @@ -5685,6 +6363,20 @@ __metadata: languageName: node linkType: hard +"path-to-regexp@npm:^8.0.0": + version: 8.3.0 + resolution: "path-to-regexp@npm:8.3.0" + checksum: 10c0/ee1544a73a3f294a97a4c663b0ce71bbf1621d732d80c9c9ed201b3e911a86cb628ebad691b9d40f40a3742fe22011e5a059d8eed2cf63ec2cb94f6fb4efe67c + languageName: node + linkType: hard + +"path-to-regexp@npm:~0.1.12": + version: 0.1.12 + resolution: "path-to-regexp@npm:0.1.12" + checksum: 10c0/1c6ff10ca169b773f3bba943bbc6a07182e332464704572962d277b900aeee81ac6aa5d060ff9e01149636c30b1f63af6e69dd7786ba6e0ddb39d4dee1f0645b + languageName: node + linkType: hard + "path-type@npm:^3.0.0": version: 3.0.0 resolution: "path-type@npm:3.0.0" @@ -5759,6 +6451,13 @@ __metadata: languageName: node linkType: hard +"pkce-challenge@npm:^5.0.0": + version: 5.0.1 + resolution: "pkce-challenge@npm:5.0.1" + checksum: 10c0/207f4cb976682f27e8324eb49cf71937c98fbb8341a0b8f6142bc6f664825b30e049a54a21b5c034e823ee3c3d412f10d74bd21de78e17452a6a496c2991f57c + languageName: node + linkType: hard + "possible-typed-array-names@npm:^1.0.0": version: 1.1.0 resolution: "possible-typed-array-names@npm:1.1.0" @@ -5841,6 +6540,25 @@ __metadata: languageName: node linkType: hard +"proxy-addr@npm:^2.0.7, proxy-addr@npm:~2.0.7": + version: 2.0.7 + resolution: "proxy-addr@npm:2.0.7" + dependencies: + forwarded: "npm:0.2.0" + ipaddr.js: "npm:1.9.1" + checksum: 10c0/c3eed999781a35f7fd935f398b6d8920b6fb00bbc14287bc6de78128ccc1a02c89b95b56742bf7cf0362cc333c61d138532049c7dedc7a328ef13343eff81210 + languageName: node + linkType: hard + +"qs@npm:^6.14.0, qs@npm:^6.14.1, qs@npm:~6.14.0": + version: 6.14.1 + resolution: "qs@npm:6.14.1" + dependencies: + side-channel: "npm:^1.1.0" + checksum: 10c0/0e3b22dc451f48ce5940cbbc7c7d9068d895074f8c969c0801ac15c1313d1859c4d738e46dc4da2f498f41a9ffd8c201bd9fb12df67799b827db94cc373d2613 + languageName: node + linkType: hard + "quansync@npm:^0.2.7": version: 0.2.11 resolution: "quansync@npm:0.2.11" @@ -5869,6 +6587,37 @@ __metadata: languageName: node linkType: hard +"range-parser@npm:^1.2.1, range-parser@npm:~1.2.1": + version: 1.2.1 + resolution: "range-parser@npm:1.2.1" + checksum: 10c0/96c032ac2475c8027b7a4e9fe22dc0dfe0f6d90b85e496e0f016fbdb99d6d066de0112e680805075bd989905e2123b3b3d002765149294dce0c1f7f01fcc2ea0 + languageName: node + linkType: hard + +"raw-body@npm:^3.0.0, raw-body@npm:^3.0.1": + version: 3.0.2 + resolution: "raw-body@npm:3.0.2" + dependencies: + bytes: "npm:~3.1.2" + http-errors: "npm:~2.0.1" + iconv-lite: "npm:~0.7.0" + unpipe: "npm:~1.0.0" + checksum: 10c0/d266678d08e1e7abea62c0ce5864344e980fa81c64f6b481e9842c5beaed2cdcf975f658a3ccd67ad35fc919c1f6664ccc106067801850286a6cbe101de89f29 + languageName: node + linkType: hard + +"raw-body@npm:~2.5.3": + version: 2.5.3 + resolution: "raw-body@npm:2.5.3" + dependencies: + bytes: "npm:~3.1.2" + http-errors: "npm:~2.0.1" + iconv-lite: "npm:~0.4.24" + unpipe: "npm:~1.0.0" + checksum: 10c0/449844344fc90547fb994383a494b83300e4f22199f146a79f68d78a199a8f2a923ea9fd29c3be979bfd50291a3884733619ffc15ba02a32e703b612f8d3f74a + languageName: node + linkType: hard + "react-is@npm:^18.3.1": version: 18.3.1 resolution: "react-is@npm:18.3.1" @@ -5938,6 +6687,13 @@ __metadata: languageName: node linkType: hard +"require-from-string@npm:^2.0.2": + version: 2.0.2 + resolution: "require-from-string@npm:2.0.2" + checksum: 10c0/aaa267e0c5b022fc5fd4eef49d8285086b15f2a1c54b28240fdf03599cbd9c26049fee3eab894f2e1f6ca65e513b030a7c264201e3f005601e80c49fb2937ce2 + languageName: node + linkType: hard + "requires-port@npm:^1.0.0": version: 1.0.0 resolution: "requires-port@npm:1.0.0" @@ -6109,6 +6865,19 @@ __metadata: languageName: node linkType: hard +"router@npm:^2.2.0": + version: 2.2.0 + resolution: "router@npm:2.2.0" + dependencies: + debug: "npm:^4.4.0" + depd: "npm:^2.0.0" + is-promise: "npm:^4.0.0" + parseurl: "npm:^1.3.3" + path-to-regexp: "npm:^8.0.0" + checksum: 10c0/3279de7450c8eae2f6e095e9edacbdeec0abb5cb7249c6e719faa0db2dba43574b4fff5892d9220631c9abaff52dd3cad648cfea2aaace845e1a071915ac8867 + languageName: node + linkType: hard + "run-parallel@npm:^1.1.9": version: 1.2.0 resolution: "run-parallel@npm:1.2.0" @@ -6131,6 +6900,13 @@ __metadata: languageName: node linkType: hard +"safe-buffer@npm:5.2.1": + version: 5.2.1 + resolution: "safe-buffer@npm:5.2.1" + checksum: 10c0/6501914237c0a86e9675d4e51d89ca3c21ffd6a31642efeba25ad65720bce6921c9e7e974e5be91a786b25aa058b5303285d3c15dbabf983a919f5f630d349f3 + languageName: node + linkType: hard + "safe-push-apply@npm:^1.0.0": version: 1.0.0 resolution: "safe-push-apply@npm:1.0.0" @@ -6152,7 +6928,7 @@ __metadata: languageName: node linkType: hard -"safer-buffer@npm:>= 2.1.2 < 3.0.0": +"safer-buffer@npm:>= 2.1.2 < 3, safer-buffer@npm:>= 2.1.2 < 3.0.0": version: 2.1.2 resolution: "safer-buffer@npm:2.1.2" checksum: 10c0/7e3c8b2e88a1841c9671094bbaeebd94448111dd90a81a1f606f3f67708a6ec57763b3b47f06da09fc6054193e0e6709e77325415dc8422b04497a8070fa02d4 @@ -6177,6 +6953,70 @@ __metadata: languageName: node linkType: hard +"send@npm:^1.1.0, send@npm:^1.2.0": + version: 1.2.1 + resolution: "send@npm:1.2.1" + dependencies: + debug: "npm:^4.4.3" + encodeurl: "npm:^2.0.0" + escape-html: "npm:^1.0.3" + etag: "npm:^1.8.1" + fresh: "npm:^2.0.0" + http-errors: "npm:^2.0.1" + mime-types: "npm:^3.0.2" + ms: "npm:^2.1.3" + on-finished: "npm:^2.4.1" + range-parser: "npm:^1.2.1" + statuses: "npm:^2.0.2" + checksum: 10c0/fbbbbdc902a913d65605274be23f3d604065cfc3ee3d78bf9fc8af1dc9fc82667c50d3d657f5e601ac657bac9b396b50ee97bd29cd55436320cf1cddebdcec72 + languageName: node + linkType: hard + +"send@npm:~0.19.0, send@npm:~0.19.1": + version: 0.19.2 + resolution: "send@npm:0.19.2" + dependencies: + debug: "npm:2.6.9" + depd: "npm:2.0.0" + destroy: "npm:1.2.0" + encodeurl: "npm:~2.0.0" + escape-html: "npm:~1.0.3" + etag: "npm:~1.8.1" + fresh: "npm:~0.5.2" + http-errors: "npm:~2.0.1" + mime: "npm:1.6.0" + ms: "npm:2.1.3" + on-finished: "npm:~2.4.1" + range-parser: "npm:~1.2.1" + statuses: "npm:~2.0.2" + checksum: 10c0/20c2389fe0fdf3fc499938cac598bc32272287e993c4960717381a10de8550028feadfb9076f959a3a3ebdea42e1f690e116f0d16468fa56b9fd41866d3dc267 + languageName: node + linkType: hard + +"serve-static@npm:^2.2.0": + version: 2.2.1 + resolution: "serve-static@npm:2.2.1" + dependencies: + encodeurl: "npm:^2.0.0" + escape-html: "npm:^1.0.3" + parseurl: "npm:^1.3.3" + send: "npm:^1.2.0" + checksum: 10c0/37986096e8572e2dfaad35a3925fa8da0c0969f8814fd7788e84d4d388bc068cf0c06d1658509788e55bed942a6b6d040a8a267fa92bb9ffb1179f8bacde5fd7 + languageName: node + linkType: hard + +"serve-static@npm:~1.16.2": + version: 1.16.3 + resolution: "serve-static@npm:1.16.3" + dependencies: + encodeurl: "npm:~2.0.0" + escape-html: "npm:~1.0.3" + parseurl: "npm:~1.3.3" + send: "npm:~0.19.1" + checksum: 10c0/36320397a073c71bedf58af48a4a100fe6d93f07459af4d6f08b9a7217c04ce2a4939e0effd842dc7bece93ffcd59eb52f58c4fff2a8e002dc29ae6b219cd42b + languageName: node + linkType: hard + "set-function-length@npm:^1.2.2": version: 1.2.2 resolution: "set-function-length@npm:1.2.2" @@ -6214,6 +7054,13 @@ __metadata: languageName: node linkType: hard +"setprototypeof@npm:1.2.0, setprototypeof@npm:~1.2.0": + version: 1.2.0 + resolution: "setprototypeof@npm:1.2.0" + checksum: 10c0/68733173026766fa0d9ecaeb07f0483f4c2dc70ca376b3b7c40b7cda909f94b0918f6c5ad5ce27a9160bdfb475efaa9d5e705a11d8eaae18f9835d20976028bc + languageName: node + linkType: hard + "shebang-command@npm:^1.2.0": version: 1.2.0 resolution: "shebang-command@npm:1.2.0" @@ -6445,6 +7292,13 @@ __metadata: languageName: node linkType: hard +"statuses@npm:^2.0.1, statuses@npm:^2.0.2, statuses@npm:~2.0.1, statuses@npm:~2.0.2": + version: 2.0.2 + resolution: "statuses@npm:2.0.2" + checksum: 10c0/a9947d98ad60d01f6b26727570f3bcceb6c8fa789da64fe6889908fe2e294d57503b14bf2b5af7605c2d36647259e856635cd4c49eab41667658ec9d0080ec3f + languageName: node + linkType: hard + "std-env@npm:^3.9.0": version: 3.9.0 resolution: "std-env@npm:3.9.0" @@ -6707,6 +7561,13 @@ __metadata: languageName: node linkType: hard +"toidentifier@npm:~1.0.1": + version: 1.0.1 + resolution: "toidentifier@npm:1.0.1" + checksum: 10c0/93937279934bd66cc3270016dd8d0afec14fb7c94a05c72dc57321f8bd1fa97e5bea6d1f7c89e728d077ca31ea125b78320a616a6c6cd0e6b9cb94cb864381c1 + languageName: node + linkType: hard + "transloadit-node-sdk@workspace:.": version: 0.0.0-use.local resolution: "transloadit-node-sdk@workspace:." @@ -6794,6 +7655,27 @@ __metadata: languageName: node linkType: hard +"type-is@npm:^2.0.1": + version: 2.0.1 + resolution: "type-is@npm:2.0.1" + dependencies: + content-type: "npm:^1.0.5" + media-typer: "npm:^1.1.0" + mime-types: "npm:^3.0.0" + checksum: 10c0/7f7ec0a060b16880bdad36824ab37c26019454b67d73e8a465ed5a3587440fbe158bc765f0da68344498235c877e7dbbb1600beccc94628ed05599d667951b99 + languageName: node + linkType: hard + +"type-is@npm:~1.6.18": + version: 1.6.18 + resolution: "type-is@npm:1.6.18" + dependencies: + media-typer: "npm:0.3.0" + mime-types: "npm:~2.1.24" + checksum: 10c0/a23daeb538591b7efbd61ecf06b6feb2501b683ffdc9a19c74ef5baba362b4347e42f1b4ed81f5882a8c96a3bfff7f93ce3ffaf0cbbc879b532b04c97a55db9d + languageName: node + linkType: hard + "typed-array-buffer@npm:^1.0.3": version: 1.0.3 resolution: "typed-array-buffer@npm:1.0.3" @@ -6918,6 +7800,13 @@ __metadata: languageName: node linkType: hard +"unpipe@npm:~1.0.0": + version: 1.0.0 + resolution: "unpipe@npm:1.0.0" + checksum: 10c0/193400255bd48968e5c5383730344fbb4fa114cdedfab26e329e50dd2d81b134244bb8a72c6ac1b10ab0281a58b363d06405632c9d49ca9dfd5e90cbd7d0f32c + languageName: node + linkType: hard + "url-parse@npm:^1.5.7": version: 1.5.10 resolution: "url-parse@npm:1.5.10" @@ -6928,6 +7817,13 @@ __metadata: languageName: node linkType: hard +"utils-merge@npm:1.0.1": + version: 1.0.1 + resolution: "utils-merge@npm:1.0.1" + checksum: 10c0/02ba649de1b7ca8854bfe20a82f1dfbdda3fb57a22ab4a8972a63a34553cf7aa51bc9081cf7e001b035b88186d23689d69e71b510e610a09a4c66f68aa95b672 + languageName: node + linkType: hard + "uuid@npm:^9.0.1": version: 9.0.1 resolution: "uuid@npm:9.0.1" @@ -6947,6 +7843,13 @@ __metadata: languageName: node linkType: hard +"vary@npm:^1, vary@npm:^1.1.2, vary@npm:~1.1.2": + version: 1.1.2 + resolution: "vary@npm:1.1.2" + checksum: 10c0/f15d588d79f3675135ba783c91a4083dcd290a2a5be9fcb6514220a1634e23df116847b1cc51f66bfb0644cf9353b2abb7815ae499bab06e46dd33c1a6bf1f4f + languageName: node + linkType: hard + "vite-node@npm:3.2.4": version: 3.2.4 resolution: "vite-node@npm:3.2.4" @@ -7236,6 +8139,15 @@ __metadata: languageName: node linkType: hard +"zod-to-json-schema@npm:^3.25.0": + version: 3.25.1 + resolution: "zod-to-json-schema@npm:3.25.1" + peerDependencies: + zod: ^3.25 || ^4 + checksum: 10c0/711b30e34d1f1211f1afe64bf457f0d799234199dc005cca720b236ea808804c03164039c232f5df33c46f462023874015a8a0b3aab1585eca14124c324db7e2 + languageName: node + linkType: hard + "zod@npm:3.25.76": version: 3.25.76 resolution: "zod@npm:3.25.76" @@ -7243,6 +8155,13 @@ __metadata: languageName: node linkType: hard +"zod@npm:^3.25 || ^4.0": + version: 4.3.6 + resolution: "zod@npm:4.3.6" + checksum: 10c0/860d25a81ab41d33aa25f8d0d07b091a04acb426e605f396227a796e9e800c44723ed96d0f53a512b57be3d1520f45bf69c0cb3b378a232a00787a2609625307 + languageName: node + linkType: hard + "zod@npm:^4.0.0, zod@npm:^4.1.11": version: 4.3.5 resolution: "zod@npm:4.3.5"