diff --git a/.github/workflows/services-build.yaml b/.github/workflows/services-build.yaml index 87635990a..14d5299f9 100644 --- a/.github/workflows/services-build.yaml +++ b/.github/workflows/services-build.yaml @@ -124,7 +124,7 @@ jobs: BRANCH: ${{ github.ref_name }} COMMIT_SHA: ${{ github.sha }} run: | - TARGETS="console rotor" + TARGETS="console rotor functions-server" REGISTRY="${{ secrets.DOCKERHUB_USERNAME }}" SHORT_SHA=$(git rev-parse --short=7 HEAD) diff --git a/all.Dockerfile b/all.Dockerfile index e2fc5f95e..a9d6c2f42 100644 --- a/all.Dockerfile +++ b/all.Dockerfile @@ -200,3 +200,50 @@ ENV JITSU_VERSION_STRING=${JITSU_BUILD_VERSION} ENTRYPOINT ["/app/entrypoint.sh"] + +# ============================================================================ +# FUNCTIONS-SERVER STAGE - Deno-based UDF execution with Web Worker isolation +# ============================================================================ +# Sandboxed functions execution for free-tier workspaces +FROM denoland/deno:debian AS functions-server + +ARG JITSU_BUILD_VERSION=dev, +ARG JITSU_BUILD_DOCKER_TAG=dev, +ARG JITSU_BUILD_COMMIT_SHA=unknown, + +WORKDIR /app + +# Install curl for healthchecks +RUN apt-get update && \ + apt-get install -y --no-install-recommends ca-certificates curl && \ + rm -rf /var/lib/apt/lists/* + +EXPOSE 3401 + +# Copy Deno-specific build artifacts from builder +COPY --from=builder /app/services/rotor/dist/functions-server.mjs ./functions-server.mjs +COPY --from=builder /app/services/rotor/dist/workspace-worker.mjs ./workspace-worker.mjs +# Copy node_modules with native deps (installed by build.mts) +# Workspace packages and pure JS deps are bundled into functions-server.mjs by esbuild +COPY --from=builder /app/services/rotor/dist/node_modules ./node_modules +COPY --from=builder /app/services/rotor/dist/package.json ./package.json + +ENV JITSU_VERSION_COMMIT_SHA=${JITSU_BUILD_COMMIT_SHA} +ENV JITSU_VERSION_DOCKER_TAG=${JITSU_BUILD_DOCKER_TAG} +ENV JITSU_VERSION_STRING=${JITSU_BUILD_VERSION} +ENV NODE_ENV=production + +HEALTHCHECK CMD curl --fail http://localhost:3401/health || exit 1 + +ENTRYPOINT ["deno", "run", \ + "--allow-net", \ + "--allow-read", \ + "--allow-write=/tmp/jitsu-udf,/data", \ + "--allow-env", \ + "--allow-sys", \ + "--allow-ffi", \ + "--allow-run=/app/node_modules/@esbuild/linux-arm64/bin/esbuild,/app/node_modules/@esbuild/linux-x64/bin/esbuild,/app/node_modules/esbuild/bin/esbuild", \ + "--unstable-worker-options", \ + "--no-check", \ + "--v8-flags=--max-old-space-size=2048", \ + "functions-server.mjs"] diff --git a/build-fs.sh b/build-fs.sh new file mode 100755 index 000000000..456527309 --- /dev/null +++ b/build-fs.sh @@ -0,0 +1,19 @@ +#!/bin/bash +set -e + +DATE_TAG=$(date +"%Y%m%d%H%M") +IMAGE="jitsucom/fs:dev-${DATE_TAG}" + +echo "Building rotor image..." +docker buildx build \ + --target functions-server \ + --progress=plain \ + --load \ + -t "$IMAGE" \ + -f all.Dockerfile \ + . + +echo "Loading image into minikube..." +minikube image load --overwrite=true "$IMAGE" + +echo "Done: $IMAGE" diff --git a/builder.Dockerfile b/builder.Dockerfile index 0c963595b..fb0ebad41 100644 --- a/builder.Dockerfile +++ b/builder.Dockerfile @@ -3,10 +3,15 @@ FROM node:24-bookworm-slim # Install Node.js 24 manually from NodeSource + all runtime dependencies # This includes everything needed for building AND running the final images RUN apt-get update && \ - apt-get install -y ca-certificates gnupg git curl telnet python3 g++ make jq nano cron bash netcat-traditional procps && \ + apt-get install -y ca-certificates gnupg git curl telnet python3 g++ make jq nano cron bash netcat-traditional procps unzip && \ rm -rf /var/lib/apt/lists/* && \ npm -g install pnpm@10 && \ - npm cache clean --force + npm cache clean --force && \ + ARCH=$(uname -m) && \ + curl -fsSL "https://github.com/denoland/deno/releases/latest/download/deno-${ARCH}-unknown-linux-gnu.zip" -o /tmp/deno.zip && \ + unzip -o /tmp/deno.zip -d /usr/local/bin && \ + chmod +x /usr/local/bin/deno && \ + rm /tmp/deno.zip #print current user RUN whoami && echo "Current user is $(whoami)" diff --git a/bulker/operator/operator.go b/bulker/operator/operator.go index 4bf3c167c..eda8940a1 100644 --- a/bulker/operator/operator.go +++ b/bulker/operator/operator.go @@ -1119,54 +1119,70 @@ func (o *Operator) buildDeploymentFromData(data *DeploymentData) *appsv1.Deploym } volumes := make([]corev1.Volume, 0) volumeMounts := make([]corev1.VolumeMount, 0) - - // Mount connections ConfigMaps as parts - // Files are stored with keys like ${workspaceId}__connections.json.gz - for i := 0; i < data.ConnectionsConfigMapCount; i++ { - volName := fmt.Sprintf("connections-%d", i) - cmName := fmt.Sprintf("%s%s-%d", data.DeploymentID, connectionsCMSuffix, i) - - volumes = append(volumes, corev1.Volume{ - Name: volName, - VolumeSource: corev1.VolumeSource{ - ConfigMap: &corev1.ConfigMapVolumeSource{ - LocalObjectReference: corev1.LocalObjectReference{ - Name: cmName, + initVolumeMounts := make([]corev1.VolumeMount, 0) + // Free tier: init container copies ConfigMap parts into writable emptyDir (multiple workspaces merged). + // Dedicated/premium: ConfigMaps mounted directly into /data (no init container needed). + useCopyInit := data.FunctionsClass == FunctionsClassFree + + // Add ConfigMap volumes for connections and functions + type cmVolConfig struct { + count int + prefix string // volume name prefix ("connections" or "functions") + suffix string // ConfigMap name suffix + subdir string // subdirectory under mount base ("connections" or "functions") + } + for _, cfg := range []cmVolConfig{ + {data.ConnectionsConfigMapCount, "connections", connectionsCMSuffix, "connections"}, + {data.FunctionsConfigMapCount, "functions", functionsCMSuffix, "functions"}, + } { + for i := 0; i < cfg.count; i++ { + volName := fmt.Sprintf("%s-%d", cfg.prefix, i) + cmName := fmt.Sprintf("%s%s-%d", data.DeploymentID, cfg.suffix, i) + + volumes = append(volumes, corev1.Volume{ + Name: volName, + VolumeSource: corev1.VolumeSource{ + ConfigMap: &corev1.ConfigMapVolumeSource{ + LocalObjectReference: corev1.LocalObjectReference{ + Name: cmName, + }, }, }, - }, - }) - - // Mount connections ConfigMaps to /data/connections/part-{n} - volumeMounts = append(volumeMounts, corev1.VolumeMount{ - Name: volName, - MountPath: fmt.Sprintf("/data/connections/part-%d", i), - ReadOnly: true, - }) + }) + + if useCopyInit { + // Init container reads from /config-src, copies to writable /data + initVolumeMounts = append(initVolumeMounts, corev1.VolumeMount{ + Name: volName, + MountPath: fmt.Sprintf("/config-src/%s/part-%d", cfg.subdir, i), + ReadOnly: true, + }) + } else { + // Mount directly into /data for the main container + volumeMounts = append(volumeMounts, corev1.VolumeMount{ + Name: volName, + MountPath: fmt.Sprintf("/data/%s/part-%d", cfg.subdir, i), + ReadOnly: true, + }) + } + } } - // Add volumes for functions ConfigMaps - // Functions are stored with keys like ${workspaceId}__${functionId}.json.gz - for i := 0; i < data.FunctionsConfigMapCount; i++ { - volName := fmt.Sprintf("functions-%d", i) - cmName := fmt.Sprintf("%s%s-%d", data.DeploymentID, functionsCMSuffix, i) - + if useCopyInit { + // Writable emptyDir for merged config data volumes = append(volumes, corev1.Volume{ - Name: volName, + Name: "config-data", VolumeSource: corev1.VolumeSource{ - ConfigMap: &corev1.ConfigMapVolumeSource{ - LocalObjectReference: corev1.LocalObjectReference{ - Name: cmName, - }, - }, + EmptyDir: &corev1.EmptyDirVolumeSource{}, }, }) - - // Mount functions ConfigMaps to /data/functions/part-{n} + initVolumeMounts = append(initVolumeMounts, corev1.VolumeMount{ + Name: "config-data", + MountPath: "/data", + }) volumeMounts = append(volumeMounts, corev1.VolumeMount{ - Name: volName, - MountPath: fmt.Sprintf("/data/functions/part-%d", i), - ReadOnly: true, + Name: "config-data", + MountPath: "/data", }) } @@ -1216,6 +1232,31 @@ func (o *Operator) buildDeploymentFromData(data *DeploymentData) *appsv1.Deploym }, } + // Init container: only needed for free tier to copy/merge ConfigMap parts into writable emptyDir + initContainers := []corev1.Container{} + if useCopyInit { + initCopyScript := `#!/bin/sh +set -e +mkdir -p /data/connections /data/functions +# Copy connections from all parts +for dir in /config-src/connections/part-*; do + [ -d "$dir" ] && cp "$dir"/* /data/connections/ 2>/dev/null || true +done +# Copy functions from all parts +for dir in /config-src/functions/part-*; do + [ -d "$dir" ] && cp "$dir"/* /data/functions/ 2>/dev/null || true +done +echo "Config data copied to /data" +ls -la /data/connections/ /data/functions/ 2>/dev/null || true +` + initContainers = append(initContainers, corev1.Container{ + Name: "copy-config", + Image: "busybox:1.37", + Command: []string{"sh", "-c", initCopyScript}, + VolumeMounts: initVolumeMounts, + }) + } + // Build containers list containers := []corev1.Container{} @@ -1351,6 +1392,7 @@ func (o *Operator) buildDeploymentFromData(data *DeploymentData) *appsv1.Deploym sec60 := int64(60) podSpec := corev1.PodSpec{ TerminationGracePeriodSeconds: &sec60, + InitContainers: initContainers, Containers: containers, Volumes: volumes, NodeSelector: nodeSelector, diff --git a/libs/destination-functions/src/functions/bulker-destination.ts b/libs/destination-functions/src/functions/bulker-destination.ts index 055a955da..c7b7c085b 100644 --- a/libs/destination-functions/src/functions/bulker-destination.ts +++ b/libs/destination-functions/src/functions/bulker-destination.ts @@ -63,7 +63,7 @@ export type DataLayoutImpl = ( ) => MappedEvent[] | MappedEvent; export function jitsuLegacy(event: AnalyticsServerEvent, ctx: FullContext): MappedEvent { - const flat = toJitsuClassic(event, ctx); + const flat = toJitsuClassic(event, ctx, true); return { event: omit(flat, JitsuInternalProperties), table: event[TableNameParameter] ?? "events" }; } diff --git a/libs/functions/__tests__/classic-mapping.test.ts b/libs/functions/__tests__/classic-mapping.test.ts index 24b830764..f4f81da8a 100644 --- a/libs/functions/__tests__/classic-mapping.test.ts +++ b/libs/functions/__tests__/classic-mapping.test.ts @@ -1,7 +1,7 @@ import { AnalyticsServerEvent } from "@jitsu/protocols/analytics"; import type { Event as JitsuLegacyEvent } from "@jitsu/sdk-js"; -import { FullContext, UserAgent } from "@jitsu/protocols/functions"; -import { fromJitsuClassic, removeUndefined, TableNameParameter, toJitsuClassic, toSnakeCase } from "../src"; +import { FullContext } from "@jitsu/protocols/functions"; +import { fromJitsuClassic, toJitsuClassic } from "../src"; import { classicEvents } from "./data/classic-events"; const identify: AnalyticsServerEvent = { @@ -264,14 +264,22 @@ const legacyPageExpectedWarehouse = { }; test("legacy event s3", () => { - const identifyLegacyResult = toJitsuClassic(identify, { - props: { keepOriginalNames: true }, - destination: { type: "s3" }, - } as unknown as FullContext); - const pageLegacyResult = toJitsuClassic(page, { - props: { keepOriginalNames: true }, - destination: { type: "s3" }, - } as unknown as FullContext); + const identifyLegacyResult = toJitsuClassic( + identify, + { + props: { keepOriginalNames: true }, + destination: { type: "s3" }, + } as unknown as FullContext, + true + ); + const pageLegacyResult = toJitsuClassic( + page, + { + props: { keepOriginalNames: true }, + destination: { type: "s3" }, + } as unknown as FullContext, + true + ); console.log(JSON.stringify(identifyLegacyResult, null, 2)); expect(identifyLegacyResult).toStrictEqual(legacyIdentifyExpectedS3); @@ -280,14 +288,22 @@ test("legacy event s3", () => { }); test("legacy event warehouse", () => { - const identifyLegacyResult = toJitsuClassic(identify, { - props: { keepOriginalNames: true }, - destination: { type: "postgres" }, - } as unknown as FullContext); - const pageLegacyResult = toJitsuClassic(page, { - props: { keepOriginalNames: true }, - destination: { type: "postgres" }, - } as unknown as FullContext); + const identifyLegacyResult = toJitsuClassic( + identify, + { + props: { keepOriginalNames: true }, + destination: { type: "postgres" }, + } as unknown as FullContext, + true + ); + const pageLegacyResult = toJitsuClassic( + page, + { + props: { keepOriginalNames: true }, + destination: { type: "postgres" }, + } as unknown as FullContext, + true + ); console.log(JSON.stringify(identifyLegacyResult, null, 2)); expect(identifyLegacyResult).toStrictEqual(legacyIdentifyExpectedWarehouse); @@ -307,7 +323,8 @@ test("classic events mapping", () => { const restored = fromJitsuClassic(event); const mapped = toJitsuClassic( restored as AnalyticsServerEvent, - { props: { keepOriginalNames: true }, destination: { type: "s3" } } as unknown as FullContext + { props: { keepOriginalNames: true }, destination: { type: "s3" } } as unknown as FullContext, + true ); delete mapped.anon_ip; expect(mapped).toStrictEqual(event); diff --git a/libs/functions/src/lib/functions.ts b/libs/functions/src/lib/functions.ts index bbfe53235..5e0a2d504 100644 --- a/libs/functions/src/lib/functions.ts +++ b/libs/functions/src/lib/functions.ts @@ -96,15 +96,18 @@ function anonymizeIp(ip: string | undefined) { } } -export function toJitsuClassic(event: AnalyticsServerEvent, ctx: FullContext): AnyEvent { - const keepOriginalNames = !!ctx.props.keepOriginalNames; - const fileStorage = ctx.destination.type === "s3" || ctx.destination.type === "gcs"; - let transferFunc = transferAsSnakeCase; - if (keepOriginalNames) { - if (fileStorage) { - transferFunc = transfer; - } else { - transferFunc = transferAsClassic; +export function toJitsuClassic(event: AnalyticsServerEvent, ctx: FullContext, bulker: boolean = false): AnyEvent { + let transferFunc = transfer; + if (bulker) { + const keepOriginalNames = !!ctx?.props?.keepOriginalNames; + const fileStorage = ctx?.destination?.type === "s3" || ctx?.destination?.type === "gcs"; + transferFunc = transferAsSnakeCase; + if (keepOriginalNames) { + if (fileStorage) { + transferFunc = transfer; + } else { + transferFunc = transferAsClassic; + } } } let url: URL | undefined = undefined; diff --git a/libs/jitsu-js/package.json b/libs/jitsu-js/package.json index 5169f9fa5..a686aea33 100644 --- a/libs/jitsu-js/package.json +++ b/libs/jitsu-js/package.json @@ -28,7 +28,7 @@ "devDependencies": { "@jitsu/common-config": "workspace:*", "tslib": "catalog:", - "@playwright/test": "^1.57.0", + "@playwright/test": "1.58.2", "esbuild": "catalog:", "tsx": "catalog:", "@segment/analytics-next": "^1.75.0", diff --git a/libs/jitsu-js/src/analytics-plugin.ts b/libs/jitsu-js/src/analytics-plugin.ts index 18aff31c0..8cfb09886 100644 --- a/libs/jitsu-js/src/analytics-plugin.ts +++ b/libs/jitsu-js/src/analytics-plugin.ts @@ -473,7 +473,9 @@ export function ensureAnonymousId(opts: JitsuOptions): string | undefined { secure: window.location.protocol === "https:", }); if (opts.debug) { - console.log(`[JITSU DEBUG] preInitAnonymousId: created anonymous ID cookie '${cookieName}'=${id} on domain '${domain}'`); + console.log( + `[JITSU DEBUG] preInitAnonymousId: created anonymous ID cookie '${cookieName}'=${id} on domain '${domain}'` + ); } return id; } diff --git a/package.json b/package.json index 8853533a6..9b1bf4d36 100644 --- a/package.json +++ b/package.json @@ -38,7 +38,7 @@ "release:canary": "monorel --filter ./types/protocols --filter ./cli/jitsu-cli --filter ./libs/functions --filter ./libs/jitsu-js --filter ./libs/jitsu-react --version '1.10.5-canary.{rev}.{time}' --npm-tag canary --git-tag 'jitsu-js-libs-canary-v{version}' --push-tag" }, "devDependencies": { - "@playwright/test": "1.57.0", + "@playwright/test": "1.58.2", "@semantic-release/changelog": "^6.0.3", "@semantic-release/exec": "^6.0.3", "@semantic-release/git": "^10.0.1", diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index a9cd72bf5..40bc538ff 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -66,8 +66,8 @@ importers: .: devDependencies: '@playwright/test': - specifier: 1.57.0 - version: 1.57.0 + specifier: 1.58.2 + version: 1.58.2 '@semantic-release/changelog': specifier: ^6.0.3 version: 6.0.3(semantic-release@23.1.1(typescript@5.6.3)) @@ -182,7 +182,7 @@ importers: version: 1.30.0 semver: specifier: ^7.5.4 - version: 7.7.2 + version: 7.7.3 tsx: specifier: 'catalog:' version: 4.21.0 @@ -266,7 +266,7 @@ importers: version: 2.8.0 zod: specifier: ^3.23.8 - version: 3.24.3 + version: 3.25.76 devDependencies: '@jitsu/common-config': specifier: workspace:* @@ -330,7 +330,7 @@ importers: version: 7.22.0 zod: specifier: ^3.23.8 - version: 3.24.3 + version: 3.25.76 devDependencies: '@jitsu/common-config': specifier: workspace:* @@ -403,8 +403,8 @@ importers: specifier: workspace:* version: link:../common-config '@playwright/test': - specifier: ^1.57.0 - version: 1.57.0 + specifier: 1.58.2 + version: 1.58.2 '@segment/analytics-next': specifier: ^1.75.0 version: 1.75.0(encoding@0.1.13) @@ -643,7 +643,7 @@ importers: version: 15.1.3 semver: specifier: ^7.6.3 - version: 7.7.2 + version: 7.7.3 tar: specifier: ^7.4.3 version: 7.4.3 @@ -655,7 +655,7 @@ importers: version: 7.22.0 zod: specifier: ^3.23.8 - version: 3.24.3 + version: 3.25.76 devDependencies: '@jitsu/common-config': specifier: workspace:* @@ -680,10 +680,10 @@ importers: version: 0.0.152 '@typescript-eslint/eslint-plugin': specifier: ^8.20.0 - version: 8.47.0(@typescript-eslint/parser@8.47.0(eslint@9.39.1(jiti@2.4.2))(typescript@5.6.3))(eslint@9.39.1(jiti@2.4.2))(typescript@5.6.3) + version: 8.49.0(@typescript-eslint/parser@8.49.0(eslint@9.39.2(jiti@2.4.2))(typescript@5.6.3))(eslint@9.39.2(jiti@2.4.2))(typescript@5.6.3) '@typescript-eslint/parser': specifier: ^8.20.0 - version: 8.47.0(eslint@9.39.1(jiti@2.4.2))(typescript@5.6.3) + version: 8.49.0(eslint@9.39.2(jiti@2.4.2))(typescript@5.6.3) '@vitest/ui': specifier: 'catalog:' version: 2.1.9(vitest@2.1.9) @@ -692,7 +692,7 @@ importers: version: 7.4.2 eslint: specifier: ^9.18.0 - version: 9.39.1(jiti@2.4.2) + version: 9.39.2(jiti@2.4.2) lodash: specifier: 'catalog:' version: 4.17.21 @@ -878,10 +878,10 @@ importers: version: 0.52.0 next: specifier: ^16.1.4 - version: 16.1.4(@babel/core@7.26.10)(@opentelemetry/api@1.9.0)(@playwright/test@1.57.0)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + version: 16.1.4(@babel/core@7.26.10)(@opentelemetry/api@1.9.0)(@playwright/test@1.58.2)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) next-auth: specifier: ^4.24.13 - version: 4.24.13(next@16.1.4(@babel/core@7.26.10)(@opentelemetry/api@1.9.0)(@playwright/test@1.57.0)(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(nodemailer@7.0.11)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + version: 4.24.13(next@16.1.4(@babel/core@7.26.10)(@opentelemetry/api@1.9.0)(@playwright/test@1.58.2)(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(nodemailer@7.0.11)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) node-cache: specifier: ^5.1.2 version: 5.1.2 @@ -938,10 +938,10 @@ importers: version: 10.0.5(react@18.3.1) zod: specifier: ^3.23.8 - version: 3.24.3 + version: 3.25.76 zod-to-json-schema: specifier: ^3.23.2 - version: 3.23.5(zod@3.24.3) + version: 3.23.5(zod@3.25.76) devDependencies: '@ant-design/icons': specifier: ^5.5.1 @@ -1050,7 +1050,7 @@ importers: version: 2.1.9(@types/node@18.19.61)(@vitest/ui@2.1.9)(jsdom@16.7.0)(less@4.2.0)(terser@5.36.0) zod-prisma: specifier: ^0.5.4 - version: 0.5.4(decimal.js@10.4.3)(prisma@6.5.0(typescript@5.6.3))(zod@3.24.3) + version: 0.5.4(decimal.js@10.4.3)(prisma@6.5.0(typescript@5.6.3))(zod@3.25.76) webapps/ee-api: dependencies: @@ -1101,7 +1101,7 @@ importers: version: 4.17.21 next: specifier: ^16.1.4 - version: 16.1.4(@babel/core@7.26.10)(@opentelemetry/api@1.9.0)(@playwright/test@1.57.0)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + version: 16.1.4(@babel/core@7.26.10)(@opentelemetry/api@1.9.0)(@playwright/test@1.58.2)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) pg: specifier: ^8.18.0 version: 8.18.0 @@ -1122,14 +1122,14 @@ importers: version: 2.8.0 zod: specifier: ^3.23.8 - version: 3.24.3 + version: 3.25.76 devDependencies: '@jitsu/common-config': specifier: workspace:* version: link:../../libs/common-config '@react-email/preview-server': specifier: ^5.0.6 - version: 5.0.6(@opentelemetry/api@1.9.0)(@playwright/test@1.57.0)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + version: 5.0.6(@opentelemetry/api@1.9.0)(@playwright/test@1.58.2)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) '@types/crypto-js': specifier: ^4.1.1 version: 4.2.2 @@ -1210,7 +1210,7 @@ importers: version: 2.8.0 zod: specifier: ^3.23.8 - version: 3.24.3 + version: 3.25.76 devDependencies: '@jitsu/common-config': specifier: workspace:* @@ -2242,10 +2242,6 @@ packages: resolution: {integrity: sha512-Kr+LPIUVKz2qkx1HAMH8q1q6azbqBAsXJUxBl/ODDuVPX45Z9DfwB8tPjTi6nNZ8BuM3nbJxC5zCAg5elnBUTQ==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} - '@eslint/js@9.39.1': - resolution: {integrity: sha512-S26Stp4zCy88tH94QbBv3XCuzRQiZ9yXofEILmglYTh/Ug/a9/umqvgFtYBAo3Lp0nsI/5/qH1CCrbdK3AP1Tw==} - engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} - '@eslint/js@9.39.2': resolution: {integrity: sha512-q1mjIoW1VX4IvSocvM/vbTiveKC4k9eLrajNEuSsmjymSDEbpGddtpfOoN7YGAqBK3NG+uqo8ia4PDTt8buCYA==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} @@ -3035,8 +3031,8 @@ packages: resolution: {integrity: sha512-+1VkjdD0QBLPodGrJUeqarH8VAIvQODIbwh9XpP5Syisf7YoQgsJKPNFoqqLQlu+VQ/tVSshMR6loPMn8U+dPg==} engines: {node: '>=14'} - '@playwright/test@1.57.0': - resolution: {integrity: sha512-6TyEnHgd6SArQO8UO2OMTxshln3QMWBtPGrOCgs3wVEmQmwyuNtB10IZMfmYDE0riwNR1cu4q+pPcxMVtaG3TA==} + '@playwright/test@1.58.2': + resolution: {integrity: sha512-akea+6bHYBBfA9uQqSYmlJXn61cTa+jbO87xVLCWbTqbWadRVmhxlXATaOjOgcBaWU4ePo0wB41KMFv3o35IXA==} engines: {node: '>=18'} hasBin: true @@ -4407,14 +4403,6 @@ packages: '@types/yargs@17.0.33': resolution: {integrity: sha512-WpxBCKWPLr4xSsHgz511rFJAM+wS28w2zEO1QDNY5zM/S8ok70NNfztH0xwhqKyaK0OHCbN98LDAZuy1ctxDkA==} - '@typescript-eslint/eslint-plugin@8.47.0': - resolution: {integrity: sha512-fe0rz9WJQ5t2iaLfdbDc9T80GJy0AeO453q8C3YCilnGozvOyCG5t+EZtg7j7D88+c3FipfP/x+wzGnh1xp8ZA==} - engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} - peerDependencies: - '@typescript-eslint/parser': ^8.47.0 - eslint: ^8.57.0 || ^9.0.0 - typescript: '>=4.8.4 <6.0.0' - '@typescript-eslint/eslint-plugin@8.49.0': resolution: {integrity: sha512-JXij0vzIaTtCwu6SxTh8qBc66kmf1xs7pI4UOiMDFVct6q86G0Zs7KRcEoJgY3Cav3x5Tq0MF5jwgpgLqgKG3A==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} @@ -4423,13 +4411,6 @@ packages: eslint: ^8.57.0 || ^9.0.0 typescript: '>=4.8.4 <6.0.0' - '@typescript-eslint/parser@8.47.0': - resolution: {integrity: sha512-lJi3PfxVmo0AkEY93ecfN+r8SofEqZNGByvHAI3GBLrvt1Cw6H5k1IM02nSzu0RfUafr2EvFSw0wAsZgubNplQ==} - engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} - peerDependencies: - eslint: ^8.57.0 || ^9.0.0 - typescript: '>=4.8.4 <6.0.0' - '@typescript-eslint/parser@8.49.0': resolution: {integrity: sha512-N9lBGA9o9aqb1hVMc9hzySbhKibHmB+N3IpoShyV6HyQYRGIhlrO5rQgttypi+yEeKsKI4idxC8Jw6gXKD4THA==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} @@ -4437,45 +4418,22 @@ packages: eslint: ^8.57.0 || ^9.0.0 typescript: '>=4.8.4 <6.0.0' - '@typescript-eslint/project-service@8.47.0': - resolution: {integrity: sha512-2X4BX8hUeB5JcA1TQJ7GjcgulXQ+5UkNb0DL8gHsHUHdFoiCTJoYLTpib3LtSDPZsRET5ygN4qqIWrHyYIKERA==} - engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} - peerDependencies: - typescript: '>=4.8.4 <6.0.0' - '@typescript-eslint/project-service@8.49.0': resolution: {integrity: sha512-/wJN0/DKkmRUMXjZUXYZpD1NEQzQAAn9QWfGwo+Ai8gnzqH7tvqS7oNVdTjKqOcPyVIdZdyCMoqN66Ia789e7g==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} peerDependencies: typescript: '>=4.8.4 <6.0.0' - '@typescript-eslint/scope-manager@8.47.0': - resolution: {integrity: sha512-a0TTJk4HXMkfpFkL9/WaGTNuv7JWfFTQFJd6zS9dVAjKsojmv9HT55xzbEpnZoY+VUb+YXLMp+ihMLz/UlZfDg==} - engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} - '@typescript-eslint/scope-manager@8.49.0': resolution: {integrity: sha512-npgS3zi+/30KSOkXNs0LQXtsg9ekZ8OISAOLGWA/ZOEn0ZH74Ginfl7foziV8DT+D98WfQ5Kopwqb/PZOaIJGg==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} - '@typescript-eslint/tsconfig-utils@8.47.0': - resolution: {integrity: sha512-ybUAvjy4ZCL11uryalkKxuT3w3sXJAuWhOoGS3T/Wu+iUu1tGJmk5ytSY8gbdACNARmcYEB0COksD2j6hfGK2g==} - engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} - peerDependencies: - typescript: '>=4.8.4 <6.0.0' - '@typescript-eslint/tsconfig-utils@8.49.0': resolution: {integrity: sha512-8prixNi1/6nawsRYxet4YOhnbW+W9FK/bQPxsGB1D3ZrDzbJ5FXw5XmzxZv82X3B+ZccuSxo/X8q9nQ+mFecWA==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} peerDependencies: typescript: '>=4.8.4 <6.0.0' - '@typescript-eslint/type-utils@8.47.0': - resolution: {integrity: sha512-QC9RiCmZ2HmIdCEvhd1aJELBlD93ErziOXXlHEZyuBo3tBiAZieya0HLIxp+DoDWlsQqDawyKuNEhORyku+P8A==} - engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} - peerDependencies: - eslint: ^8.57.0 || ^9.0.0 - typescript: '>=4.8.4 <6.0.0' - '@typescript-eslint/type-utils@8.49.0': resolution: {integrity: sha512-KTExJfQ+svY8I10P4HdxKzWsvtVnsuCifU5MvXrRwoP2KOlNZ9ADNEWWsQTJgMxLzS5VLQKDjkCT/YzgsnqmZg==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} @@ -4483,33 +4441,16 @@ packages: eslint: ^8.57.0 || ^9.0.0 typescript: '>=4.8.4 <6.0.0' - '@typescript-eslint/types@8.47.0': - resolution: {integrity: sha512-nHAE6bMKsizhA2uuYZbEbmp5z2UpffNrPEqiKIeN7VsV6UY/roxanWfoRrf6x/k9+Obf+GQdkm0nPU+vnMXo9A==} - engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} - '@typescript-eslint/types@8.49.0': resolution: {integrity: sha512-e9k/fneezorUo6WShlQpMxXh8/8wfyc+biu6tnAqA81oWrEic0k21RHzP9uqqpyBBeBKu4T+Bsjy9/b8u7obXQ==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} - '@typescript-eslint/typescript-estree@8.47.0': - resolution: {integrity: sha512-k6ti9UepJf5NpzCjH31hQNLHQWupTRPhZ+KFF8WtTuTpy7uHPfeg2NM7cP27aCGajoEplxJDFVCEm9TGPYyiVg==} - engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} - peerDependencies: - typescript: '>=4.8.4 <6.0.0' - '@typescript-eslint/typescript-estree@8.49.0': resolution: {integrity: sha512-jrLdRuAbPfPIdYNppHJ/D0wN+wwNfJ32YTAm10eJVsFmrVpXQnDWBn8niCSMlWjvml8jsce5E/O+86IQtTbJWA==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} peerDependencies: typescript: '>=4.8.4 <6.0.0' - '@typescript-eslint/utils@8.47.0': - resolution: {integrity: sha512-g7XrNf25iL4TJOiPqatNuaChyqt49a/onq5YsJ9+hXeugK+41LVg7AxikMfM02PC6jbNtZLCJj6AUcQXJS/jGQ==} - engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} - peerDependencies: - eslint: ^8.57.0 || ^9.0.0 - typescript: '>=4.8.4 <6.0.0' - '@typescript-eslint/utils@8.49.0': resolution: {integrity: sha512-N3W7rJw7Rw+z1tRsHZbK395TWSYvufBXumYtEGzypgMUthlg0/hmCImeA8hgO2d2G4pd7ftpxxul2J8OdtdaFA==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} @@ -4517,10 +4458,6 @@ packages: eslint: ^8.57.0 || ^9.0.0 typescript: '>=4.8.4 <6.0.0' - '@typescript-eslint/visitor-keys@8.47.0': - resolution: {integrity: sha512-SIV3/6eftCy1bNzCQoPmbWsRLujS8t5iDIZ4spZOBHqrM+yfX2ogg8Tt3PDTAVKw3sSCiUgg30uOAvK2r9zGjQ==} - engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} - '@typescript-eslint/visitor-keys@8.49.0': resolution: {integrity: sha512-LlKaciDe3GmZFphXIc79THF/YYBugZ7FS1pO581E/edlVVNbZKDy93evqmrfQ9/Y4uN0vVhX4iuchq26mK/iiA==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} @@ -6210,16 +6147,6 @@ packages: resolution: {integrity: sha512-Uhdk5sfqcee/9H/rCOJikYz67o0a2Tw2hGRPOG2Y1R2dg7brRe1uG0yaNQDHu+TO/uQPF/5eCapvYSmHUjt7JQ==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} - eslint@9.39.1: - resolution: {integrity: sha512-BhHmn2yNOFA9H9JmmIVKJmd288g9hrVRDkdoIgRCRuSySRUHH7r/DI6aAXW9T1WwUuY3DFgrcaqB+deURBLR5g==} - engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} - hasBin: true - peerDependencies: - jiti: '*' - peerDependenciesMeta: - jiti: - optional: true - eslint@9.39.2: resolution: {integrity: sha512-LEyamqS7W5HB3ujJyvi0HQK/dtVINZvd5mAAp9eT5S/ujByGjiZLCzPcHVzuXbpJDJF/cxwHlfceVUDZ2lnSTw==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} @@ -6728,9 +6655,6 @@ packages: graceful-fs@4.2.11: resolution: {integrity: sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ==} - graphemer@1.4.0: - resolution: {integrity: sha512-EtKwoO6kxCL9WO5xipiHTZlSzBm7WLT627TqC/uVRd0HKmq8NXyebnNYxDoBi7wt8eTWrUrKXCOVaFq9x1kgag==} - gtoken@5.3.2: resolution: {integrity: sha512-gkvEKREW7dXWF8NV8pVrKfW7WqReAmjjkMBh6lNCCGOM4ucS0r0YyXXl0r/9Yj8wcW/32ISkfc8h5mPTDbtifQ==} engines: {node: '>=10'} @@ -8899,13 +8823,13 @@ packages: pkg-types@2.2.0: resolution: {integrity: sha512-2SM/GZGAEkPp3KWORxQZns4M+WSeXbC2HEvmOIJe3Cmiv6ieAJvdVhDldtHqM5J1Y7MrR1XhkBT/rMlhh9FdqQ==} - playwright-core@1.57.0: - resolution: {integrity: sha512-agTcKlMw/mjBWOnD6kFZttAAGHgi/Nw0CZ2o6JqWSbMlI219lAFLZZCyqByTsvVAJq5XA5H8cA6PrvBRpBWEuQ==} + playwright-core@1.58.2: + resolution: {integrity: sha512-yZkEtftgwS8CsfYo7nm0KE8jsvm6i/PTgVtB8DL726wNf6H2IMsDuxCpJj59KDaxCtSnrWan2AeDqM7JBaultg==} engines: {node: '>=18'} hasBin: true - playwright@1.57.0: - resolution: {integrity: sha512-ilYQj1s8sr2ppEJ2YVadYBN0Mb3mdo9J0wQ+UuDhzYqURwSoW4n1Xs5vs7ORwgDGmyEh33tRMeS8KhdkMoLXQw==} + playwright@1.58.2: + resolution: {integrity: sha512-vA30H8Nvkq/cPBnNw4Q8TWz1EJyqgpuinBcHET0YVJVFldr8JDNiU9LaWAE1KqSkRYazuaBhTpB5ZzShOezQ6A==} engines: {node: '>=18'} hasBin: true @@ -9681,16 +9605,6 @@ packages: resolution: {integrity: sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==} hasBin: true - semver@7.6.3: - resolution: {integrity: sha512-oVekP1cKtI+CTDvHWYFUcMtsK/00wmAEfyqKfNdARm8u1wNVhSgaX7A8d4UuIlUI5e84iEwOhs7ZPYRmzU9U6A==} - engines: {node: '>=10'} - hasBin: true - - semver@7.7.2: - resolution: {integrity: sha512-RF0Fw+rO5AMf9MAyaRXI4AV0Ulj5lMHqVxxdSgiVbixSCXoEmmX/jk0CuJw4+3SqroYO9VoUh+HcuJivvtJemA==} - engines: {node: '>=10'} - hasBin: true - semver@7.7.3: resolution: {integrity: sha512-SdsKMrI9TdgjdweUSR9MweHA4EJ8YxHn8DFaDisvhVlUOe4BF1tLD7GAj0lIqWVl+dPb/rExr0Btby5loQm20Q==} engines: {node: '>=10'} @@ -10148,10 +10062,12 @@ packages: tar@6.2.1: resolution: {integrity: sha512-DZ4yORTwrbTj/7MZYq2w+/ZFdI6OZ/f9SFHR+71gIVUZhOQPHzVCLpvRnPgyaMpfWxxk/4ONva3GQSyNIKRv6A==} engines: {node: '>=10'} + deprecated: Old versions of tar are not supported, and contain widely publicized security vulnerabilities, which have been fixed in the current version. Please update. Support for old versions may be purchased (at exorbitant rates) by contacting i@izs.me tar@7.4.3: resolution: {integrity: sha512-5S7Va8hKfV7W5U6g3aYxXmlPoZVAwUMy9AOKyF2fVuZa2UD3qZjg578OrLRt8PcNN1PleVaL/5/yYATNL0ICUw==} engines: {node: '>=18'} + deprecated: Old versions of tar are not supported, and contain widely publicized security vulnerabilities, which have been fixed in the current version. Please update. Support for old versions may be purchased (at exorbitant rates) by contacting i@izs.me tdigest@0.1.2: resolution: {integrity: sha512-+G0LLgjjo9BZX2MfdvPfH+MKLCrxlXSYec5DaPYP1fe6Iyhf0/fSmJ0bFiZ1F8BT6cGXl2LpltQptzjXKWEkKA==} @@ -10830,6 +10746,7 @@ packages: whatwg-encoding@3.1.1: resolution: {integrity: sha512-6qN4hJdMwfYBtE3YBTTHhoeuUrDBPZmbQaxWAqSALV/MeEnR5z1xd8UKud2RAkFoPkmB+hli1TZSnyi84xz1vQ==} engines: {node: '>=18'} + deprecated: Use @exodus/bytes instead for a more spec-conformant and faster implementation whatwg-mimetype@2.3.0: resolution: {integrity: sha512-M4yMwr6mAnQz76TbJm914+gPpB/nCwvZbJU28cUD6dR004SAxDLOOSUaB1JDRqLtaOV/vi0IC5lEAGFgrjGv/g==} @@ -11030,9 +10947,6 @@ packages: peerDependencies: zod: ^3.25.0 || ^4.0.0 - zod@3.24.3: - resolution: {integrity: sha512-HhY1oqzWCQWuUqvBFnsyrtZRhyPeR7SUGv+C4+MsisMuVfSPx8HpwWqH8tRahSlt6M3PiFAcoeFhZAqIXTxoSg==} - zod@3.25.76: resolution: {integrity: sha512-gzUt/qt81nXsFGKIFcC3YnfEAx5NkunCfnDlvuBSSFS02bcXu4Lmea0AFIUwbLWxWPx3d9p8S5QoaujKcNQxcQ==} @@ -12315,11 +12229,6 @@ snapshots: '@esbuild/win32-x64@0.27.0': optional: true - '@eslint-community/eslint-utils@4.9.0(eslint@9.39.1(jiti@2.4.2))': - dependencies: - eslint: 9.39.1(jiti@2.4.2) - eslint-visitor-keys: 3.4.3 - '@eslint-community/eslint-utils@4.9.0(eslint@9.39.2(jiti@2.4.2))': dependencies: eslint: 9.39.2(jiti@2.4.2) @@ -12357,8 +12266,6 @@ snapshots: transitivePeerDependencies: - supports-color - '@eslint/js@9.39.1': {} - '@eslint/js@9.39.2': {} '@eslint/object-schema@2.1.7': {} @@ -13168,7 +13075,7 @@ snapshots: nopt: 5.0.0 npmlog: 5.0.1 rimraf: 3.0.2 - semver: 7.7.2 + semver: 7.7.3 tar: 6.2.1 transitivePeerDependencies: - encoding @@ -13353,9 +13260,9 @@ snapshots: '@pkgjs/parseargs@0.11.0': optional: true - '@playwright/test@1.57.0': + '@playwright/test@1.58.2': dependencies: - playwright: 1.57.0 + playwright: 1.58.2 '@pnpm/config.env-replace@1.1.0': {} @@ -13872,9 +13779,9 @@ snapshots: marked: 15.0.12 react: 18.3.1 - '@react-email/preview-server@5.0.6(@opentelemetry/api@1.9.0)(@playwright/test@1.57.0)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)': + '@react-email/preview-server@5.0.6(@opentelemetry/api@1.9.0)(@playwright/test@1.58.2)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)': dependencies: - next: 16.0.7(@opentelemetry/api@1.9.0)(@playwright/test@1.57.0)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + next: 16.0.7(@opentelemetry/api@1.9.0)(@playwright/test@1.58.2)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) transitivePeerDependencies: - '@babel/core' - '@opentelemetry/api' @@ -14993,23 +14900,6 @@ snapshots: dependencies: '@types/yargs-parser': 21.0.3 - '@typescript-eslint/eslint-plugin@8.47.0(@typescript-eslint/parser@8.47.0(eslint@9.39.1(jiti@2.4.2))(typescript@5.6.3))(eslint@9.39.1(jiti@2.4.2))(typescript@5.6.3)': - dependencies: - '@eslint-community/regexpp': 4.12.1 - '@typescript-eslint/parser': 8.47.0(eslint@9.39.1(jiti@2.4.2))(typescript@5.6.3) - '@typescript-eslint/scope-manager': 8.47.0 - '@typescript-eslint/type-utils': 8.47.0(eslint@9.39.1(jiti@2.4.2))(typescript@5.6.3) - '@typescript-eslint/utils': 8.47.0(eslint@9.39.1(jiti@2.4.2))(typescript@5.6.3) - '@typescript-eslint/visitor-keys': 8.47.0 - eslint: 9.39.1(jiti@2.4.2) - graphemer: 1.4.0 - ignore: 7.0.5 - natural-compare: 1.4.0 - ts-api-utils: 2.1.0(typescript@5.6.3) - typescript: 5.6.3 - transitivePeerDependencies: - - supports-color - '@typescript-eslint/eslint-plugin@8.49.0(@typescript-eslint/parser@8.49.0(eslint@9.39.2(jiti@2.4.2))(typescript@5.6.3))(eslint@9.39.2(jiti@2.4.2))(typescript@5.6.3)': dependencies: '@eslint-community/regexpp': 4.12.1 @@ -15026,18 +14916,6 @@ snapshots: transitivePeerDependencies: - supports-color - '@typescript-eslint/parser@8.47.0(eslint@9.39.1(jiti@2.4.2))(typescript@5.6.3)': - dependencies: - '@typescript-eslint/scope-manager': 8.47.0 - '@typescript-eslint/types': 8.47.0 - '@typescript-eslint/typescript-estree': 8.47.0(typescript@5.6.3) - '@typescript-eslint/visitor-keys': 8.47.0 - debug: 4.3.7(supports-color@5.5.0) - eslint: 9.39.1(jiti@2.4.2) - typescript: 5.6.3 - transitivePeerDependencies: - - supports-color - '@typescript-eslint/parser@8.49.0(eslint@9.39.2(jiti@2.4.2))(typescript@5.6.3)': dependencies: '@typescript-eslint/scope-manager': 8.49.0 @@ -15050,15 +14928,6 @@ snapshots: transitivePeerDependencies: - supports-color - '@typescript-eslint/project-service@8.47.0(typescript@5.6.3)': - dependencies: - '@typescript-eslint/tsconfig-utils': 8.49.0(typescript@5.6.3) - '@typescript-eslint/types': 8.49.0 - debug: 4.4.3 - typescript: 5.6.3 - transitivePeerDependencies: - - supports-color - '@typescript-eslint/project-service@8.49.0(typescript@5.6.3)': dependencies: '@typescript-eslint/tsconfig-utils': 8.49.0(typescript@5.6.3) @@ -15068,36 +14937,15 @@ snapshots: transitivePeerDependencies: - supports-color - '@typescript-eslint/scope-manager@8.47.0': - dependencies: - '@typescript-eslint/types': 8.47.0 - '@typescript-eslint/visitor-keys': 8.47.0 - '@typescript-eslint/scope-manager@8.49.0': dependencies: '@typescript-eslint/types': 8.49.0 '@typescript-eslint/visitor-keys': 8.49.0 - '@typescript-eslint/tsconfig-utils@8.47.0(typescript@5.6.3)': - dependencies: - typescript: 5.6.3 - '@typescript-eslint/tsconfig-utils@8.49.0(typescript@5.6.3)': dependencies: typescript: 5.6.3 - '@typescript-eslint/type-utils@8.47.0(eslint@9.39.1(jiti@2.4.2))(typescript@5.6.3)': - dependencies: - '@typescript-eslint/types': 8.47.0 - '@typescript-eslint/typescript-estree': 8.47.0(typescript@5.6.3) - '@typescript-eslint/utils': 8.47.0(eslint@9.39.1(jiti@2.4.2))(typescript@5.6.3) - debug: 4.4.3 - eslint: 9.39.1(jiti@2.4.2) - ts-api-utils: 2.1.0(typescript@5.6.3) - typescript: 5.6.3 - transitivePeerDependencies: - - supports-color - '@typescript-eslint/type-utils@8.49.0(eslint@9.39.2(jiti@2.4.2))(typescript@5.6.3)': dependencies: '@typescript-eslint/types': 8.49.0 @@ -15110,26 +14958,8 @@ snapshots: transitivePeerDependencies: - supports-color - '@typescript-eslint/types@8.47.0': {} - '@typescript-eslint/types@8.49.0': {} - '@typescript-eslint/typescript-estree@8.47.0(typescript@5.6.3)': - dependencies: - '@typescript-eslint/project-service': 8.47.0(typescript@5.6.3) - '@typescript-eslint/tsconfig-utils': 8.47.0(typescript@5.6.3) - '@typescript-eslint/types': 8.47.0 - '@typescript-eslint/visitor-keys': 8.47.0 - debug: 4.4.3 - fast-glob: 3.3.2 - is-glob: 4.0.3 - minimatch: 9.0.5 - semver: 7.7.2 - ts-api-utils: 2.1.0(typescript@5.6.3) - typescript: 5.6.3 - transitivePeerDependencies: - - supports-color - '@typescript-eslint/typescript-estree@8.49.0(typescript@5.6.3)': dependencies: '@typescript-eslint/project-service': 8.49.0(typescript@5.6.3) @@ -15145,17 +14975,6 @@ snapshots: transitivePeerDependencies: - supports-color - '@typescript-eslint/utils@8.47.0(eslint@9.39.1(jiti@2.4.2))(typescript@5.6.3)': - dependencies: - '@eslint-community/eslint-utils': 4.9.0(eslint@9.39.1(jiti@2.4.2)) - '@typescript-eslint/scope-manager': 8.47.0 - '@typescript-eslint/types': 8.47.0 - '@typescript-eslint/typescript-estree': 8.47.0(typescript@5.6.3) - eslint: 9.39.1(jiti@2.4.2) - typescript: 5.6.3 - transitivePeerDependencies: - - supports-color - '@typescript-eslint/utils@8.49.0(eslint@9.39.2(jiti@2.4.2))(typescript@5.6.3)': dependencies: '@eslint-community/eslint-utils': 4.9.0(eslint@9.39.2(jiti@2.4.2)) @@ -15167,11 +14986,6 @@ snapshots: transitivePeerDependencies: - supports-color - '@typescript-eslint/visitor-keys@8.47.0': - dependencies: - '@typescript-eslint/types': 8.47.0 - eslint-visitor-keys: 4.2.1 - '@typescript-eslint/visitor-keys@8.49.0': dependencies: '@typescript-eslint/types': 8.49.0 @@ -16241,7 +16055,7 @@ snapshots: dot-prop: 10.1.0 env-paths: 3.0.0 json-schema-typed: 8.0.2 - semver: 7.7.2 + semver: 7.7.3 uint8array-extras: 1.5.0 confbox@0.2.2: {} @@ -17181,7 +16995,7 @@ snapshots: eslint: 9.39.2(jiti@2.4.2) eslint-import-resolver-node: 0.3.9 eslint-import-resolver-typescript: 3.6.3(@typescript-eslint/parser@8.49.0(eslint@9.39.2(jiti@2.4.2))(typescript@5.6.3))(eslint-import-resolver-node@0.3.9)(eslint-plugin-import@2.32.0(@typescript-eslint/parser@8.49.0(eslint@9.39.2(jiti@2.4.2))(typescript@5.6.3))(eslint@9.39.2(jiti@2.4.2)))(eslint@9.39.2(jiti@2.4.2)) - eslint-plugin-import: 2.32.0(@typescript-eslint/parser@8.49.0(eslint@9.39.2(jiti@2.4.2))(typescript@5.6.3))(eslint-import-resolver-typescript@3.6.3)(eslint@9.39.2(jiti@2.4.2)) + eslint-plugin-import: 2.32.0(@typescript-eslint/parser@8.49.0(eslint@9.39.2(jiti@2.4.2))(typescript@5.6.3))(eslint-import-resolver-typescript@3.6.3(@typescript-eslint/parser@8.49.0(eslint@9.39.2(jiti@2.4.2))(typescript@5.6.3))(eslint-import-resolver-node@0.3.9)(eslint-plugin-import@2.32.0(@typescript-eslint/parser@8.49.0(eslint@9.39.2(jiti@2.4.2))(typescript@5.6.3))(eslint@9.39.2(jiti@2.4.2)))(eslint@9.39.2(jiti@2.4.2)))(eslint@9.39.2(jiti@2.4.2)) eslint-plugin-jsx-a11y: 6.10.2(eslint@9.39.2(jiti@2.4.2)) eslint-plugin-react: 7.37.2(eslint@9.39.2(jiti@2.4.2)) eslint-plugin-react-hooks: 7.0.1(eslint@9.39.2(jiti@2.4.2)) @@ -17233,13 +17047,13 @@ snapshots: debug: 4.4.3 enhanced-resolve: 5.17.1 eslint: 9.39.2(jiti@2.4.2) - eslint-module-utils: 2.12.1(@typescript-eslint/parser@8.49.0(eslint@9.39.2(jiti@2.4.2))(typescript@5.6.3))(eslint-import-resolver-node@0.3.9)(eslint-import-resolver-typescript@3.6.3)(eslint@9.39.2(jiti@2.4.2)) + eslint-module-utils: 2.12.1(@typescript-eslint/parser@8.49.0(eslint@9.39.2(jiti@2.4.2))(typescript@5.6.3))(eslint-import-resolver-node@0.3.9)(eslint-import-resolver-typescript@3.6.3(@typescript-eslint/parser@8.49.0(eslint@9.39.2(jiti@2.4.2))(typescript@5.6.3))(eslint-import-resolver-node@0.3.9)(eslint-plugin-import@2.32.0(@typescript-eslint/parser@8.49.0(eslint@9.39.2(jiti@2.4.2))(typescript@5.6.3))(eslint@9.39.2(jiti@2.4.2)))(eslint@9.39.2(jiti@2.4.2)))(eslint@9.39.2(jiti@2.4.2)) fast-glob: 3.3.2 get-tsconfig: 4.8.1 is-bun-module: 1.2.1 is-glob: 4.0.3 optionalDependencies: - eslint-plugin-import: 2.32.0(@typescript-eslint/parser@8.49.0(eslint@9.39.2(jiti@2.4.2))(typescript@5.6.3))(eslint-import-resolver-typescript@3.6.3)(eslint@9.39.2(jiti@2.4.2)) + eslint-plugin-import: 2.32.0(@typescript-eslint/parser@8.49.0(eslint@9.39.2(jiti@2.4.2))(typescript@5.6.3))(eslint-import-resolver-typescript@3.6.3(@typescript-eslint/parser@8.49.0(eslint@9.39.2(jiti@2.4.2))(typescript@5.6.3))(eslint-import-resolver-node@0.3.9)(eslint-plugin-import@2.32.0(@typescript-eslint/parser@8.49.0(eslint@9.39.2(jiti@2.4.2))(typescript@5.6.3))(eslint@9.39.2(jiti@2.4.2)))(eslint@9.39.2(jiti@2.4.2)))(eslint@9.39.2(jiti@2.4.2)) transitivePeerDependencies: - '@typescript-eslint/parser' - eslint-import-resolver-node @@ -17265,7 +17079,7 @@ snapshots: - eslint-import-resolver-webpack - supports-color - eslint-module-utils@2.12.1(@typescript-eslint/parser@8.49.0(eslint@9.39.2(jiti@2.4.2))(typescript@5.6.3))(eslint-import-resolver-node@0.3.9)(eslint-import-resolver-typescript@3.6.3)(eslint@9.39.2(jiti@2.4.2)): + eslint-module-utils@2.12.1(@typescript-eslint/parser@8.49.0(eslint@9.39.2(jiti@2.4.2))(typescript@5.6.3))(eslint-import-resolver-node@0.3.9)(eslint-import-resolver-typescript@3.6.3(@typescript-eslint/parser@8.49.0(eslint@9.39.2(jiti@2.4.2))(typescript@5.6.3))(eslint-import-resolver-node@0.3.9)(eslint-plugin-import@2.32.0(@typescript-eslint/parser@8.49.0(eslint@9.39.2(jiti@2.4.2))(typescript@5.6.3))(eslint@9.39.2(jiti@2.4.2)))(eslint@9.39.2(jiti@2.4.2)))(eslint@9.39.2(jiti@2.4.2)): dependencies: debug: 3.2.7 optionalDependencies: @@ -17286,7 +17100,7 @@ snapshots: transitivePeerDependencies: - supports-color - eslint-plugin-import@2.32.0(@typescript-eslint/parser@8.49.0(eslint@9.39.2(jiti@2.4.2))(typescript@5.6.3))(eslint-import-resolver-typescript@3.6.3)(eslint@9.39.2(jiti@2.4.2)): + eslint-plugin-import@2.32.0(@typescript-eslint/parser@8.49.0(eslint@9.39.2(jiti@2.4.2))(typescript@5.6.3))(eslint-import-resolver-typescript@3.6.3(@typescript-eslint/parser@8.49.0(eslint@9.39.2(jiti@2.4.2))(typescript@5.6.3))(eslint-import-resolver-node@0.3.9)(eslint-plugin-import@2.32.0(@typescript-eslint/parser@8.49.0(eslint@9.39.2(jiti@2.4.2))(typescript@5.6.3))(eslint@9.39.2(jiti@2.4.2)))(eslint@9.39.2(jiti@2.4.2)))(eslint@9.39.2(jiti@2.4.2)): dependencies: '@rtsao/scc': 1.1.0 array-includes: 3.1.9 @@ -17297,7 +17111,7 @@ snapshots: doctrine: 2.1.0 eslint: 9.39.2(jiti@2.4.2) eslint-import-resolver-node: 0.3.9 - eslint-module-utils: 2.12.1(@typescript-eslint/parser@8.49.0(eslint@9.39.2(jiti@2.4.2))(typescript@5.6.3))(eslint-import-resolver-node@0.3.9)(eslint-import-resolver-typescript@3.6.3)(eslint@9.39.2(jiti@2.4.2)) + eslint-module-utils: 2.12.1(@typescript-eslint/parser@8.49.0(eslint@9.39.2(jiti@2.4.2))(typescript@5.6.3))(eslint-import-resolver-node@0.3.9)(eslint-import-resolver-typescript@3.6.3(@typescript-eslint/parser@8.49.0(eslint@9.39.2(jiti@2.4.2))(typescript@5.6.3))(eslint-import-resolver-node@0.3.9)(eslint-plugin-import@2.32.0(@typescript-eslint/parser@8.49.0(eslint@9.39.2(jiti@2.4.2))(typescript@5.6.3))(eslint@9.39.2(jiti@2.4.2)))(eslint@9.39.2(jiti@2.4.2)))(eslint@9.39.2(jiti@2.4.2)) hasown: 2.0.2 is-core-module: 2.16.1 is-glob: 4.0.3 @@ -17414,47 +17228,6 @@ snapshots: eslint-visitor-keys@4.2.1: {} - eslint@9.39.1(jiti@2.4.2): - dependencies: - '@eslint-community/eslint-utils': 4.9.0(eslint@9.39.1(jiti@2.4.2)) - '@eslint-community/regexpp': 4.12.1 - '@eslint/config-array': 0.21.1 - '@eslint/config-helpers': 0.4.2 - '@eslint/core': 0.17.0 - '@eslint/eslintrc': 3.3.3 - '@eslint/js': 9.39.1 - '@eslint/plugin-kit': 0.4.1 - '@humanfs/node': 0.16.7 - '@humanwhocodes/module-importer': 1.0.1 - '@humanwhocodes/retry': 0.4.3 - '@types/estree': 1.0.6 - ajv: 6.12.6 - chalk: 4.1.2 - cross-spawn: 7.0.6 - debug: 4.4.3 - escape-string-regexp: 4.0.0 - eslint-scope: 8.4.0 - eslint-visitor-keys: 4.2.1 - espree: 10.4.0 - esquery: 1.6.0 - esutils: 2.0.3 - fast-deep-equal: 3.1.3 - file-entry-cache: 8.0.0 - find-up: 5.0.0 - glob-parent: 6.0.2 - ignore: 5.3.2 - imurmurhash: 0.1.4 - is-glob: 4.0.3 - json-stable-stringify-without-jsonify: 1.0.1 - lodash.merge: 4.6.2 - minimatch: 3.1.2 - natural-compare: 1.4.0 - optionator: 0.9.4 - optionalDependencies: - jiti: 2.4.2 - transitivePeerDependencies: - - supports-color - eslint@9.39.2(jiti@2.4.2): dependencies: '@eslint-community/eslint-utils': 4.9.0(eslint@9.39.2(jiti@2.4.2)) @@ -18195,8 +17968,6 @@ snapshots: graceful-fs@4.2.11: {} - graphemer@1.4.0: {} - gtoken@5.3.2(encoding@0.1.13): dependencies: gaxios: 4.3.3(encoding@0.1.13) @@ -19366,7 +19137,7 @@ snapshots: lodash.isstring: 4.0.1 lodash.once: 4.1.1 ms: 2.1.3 - semver: 7.6.3 + semver: 7.7.3 jsprim@2.0.2: dependencies: @@ -20314,13 +20085,13 @@ snapshots: dependencies: '@segment/isodate': 1.0.3 - next-auth@4.24.13(next@16.1.4(@babel/core@7.26.10)(@opentelemetry/api@1.9.0)(@playwright/test@1.57.0)(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(nodemailer@7.0.11)(react-dom@18.3.1(react@18.3.1))(react@18.3.1): + next-auth@4.24.13(next@16.1.4(@babel/core@7.26.10)(@opentelemetry/api@1.9.0)(@playwright/test@1.58.2)(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(nodemailer@7.0.11)(react-dom@18.3.1(react@18.3.1))(react@18.3.1): dependencies: '@babel/runtime': 7.26.0 '@panva/hkdf': 1.2.1 cookie: 0.7.2 jose: 4.15.9 - next: 16.1.4(@babel/core@7.26.10)(@opentelemetry/api@1.9.0)(@playwright/test@1.57.0)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + next: 16.1.4(@babel/core@7.26.10)(@opentelemetry/api@1.9.0)(@playwright/test@1.58.2)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) oauth: 0.9.15 openid-client: 5.7.0 preact: 10.24.3 @@ -20331,7 +20102,7 @@ snapshots: optionalDependencies: nodemailer: 7.0.11 - next@16.0.7(@opentelemetry/api@1.9.0)(@playwright/test@1.57.0)(react-dom@18.3.1(react@18.3.1))(react@18.3.1): + next@16.0.7(@opentelemetry/api@1.9.0)(@playwright/test@1.58.2)(react-dom@18.3.1(react@18.3.1))(react@18.3.1): dependencies: '@next/env': 16.0.7 '@swc/helpers': 0.5.15 @@ -20350,13 +20121,13 @@ snapshots: '@next/swc-win32-arm64-msvc': 16.0.7 '@next/swc-win32-x64-msvc': 16.0.7 '@opentelemetry/api': 1.9.0 - '@playwright/test': 1.57.0 + '@playwright/test': 1.58.2 sharp: 0.34.5 transitivePeerDependencies: - '@babel/core' - babel-plugin-macros - next@16.1.4(@babel/core@7.26.10)(@opentelemetry/api@1.9.0)(@playwright/test@1.57.0)(react-dom@18.3.1(react@18.3.1))(react@18.3.1): + next@16.1.4(@babel/core@7.26.10)(@opentelemetry/api@1.9.0)(@playwright/test@1.58.2)(react-dom@18.3.1(react@18.3.1))(react@18.3.1): dependencies: '@next/env': 16.1.4 '@swc/helpers': 0.5.15 @@ -20376,7 +20147,7 @@ snapshots: '@next/swc-win32-arm64-msvc': 16.1.4 '@next/swc-win32-x64-msvc': 16.1.4 '@opentelemetry/api': 1.9.0 - '@playwright/test': 1.57.0 + '@playwright/test': 1.58.2 sharp: 0.34.5 transitivePeerDependencies: - '@babel/core' @@ -20429,7 +20200,7 @@ snapshots: ignore-by-default: 1.0.1 minimatch: 3.1.2 pstree.remy: 1.1.8 - semver: 7.7.2 + semver: 7.7.3 simple-update-notifier: 2.0.0 supports-color: 5.5.0 touch: 3.1.1 @@ -20893,11 +20664,11 @@ snapshots: exsolve: 1.0.7 pathe: 2.0.3 - playwright-core@1.57.0: {} + playwright-core@1.58.2: {} - playwright@1.57.0: + playwright@1.58.2: dependencies: - playwright-core: 1.57.0 + playwright-core: 1.58.2 optionalDependencies: fsevents: 2.3.2 @@ -21815,7 +21586,7 @@ snapshots: p-reduce: 3.0.0 read-package-up: 11.0.0 resolve-from: 5.0.0 - semver: 7.7.2 + semver: 7.7.3 semver-diff: 4.0.0 signale: 1.4.0 yargs: 17.7.2 @@ -21834,10 +21605,6 @@ snapshots: semver@6.3.1: {} - semver@7.6.3: {} - - semver@7.7.2: {} - semver@7.7.3: {} send@0.19.0: @@ -22002,7 +21769,7 @@ snapshots: simple-update-notifier@2.0.0: dependencies: - semver: 7.7.2 + semver: 7.7.3 sirv@2.0.4: dependencies: @@ -22638,7 +22405,7 @@ snapshots: json5: 2.2.3 lodash.memoize: 4.1.2 make-error: 1.3.6 - semver: 7.7.2 + semver: 7.7.3 typescript: 5.6.3 yargs-parser: 21.1.1 optionalDependencies: @@ -22656,7 +22423,7 @@ snapshots: json5: 2.2.3 lodash.memoize: 4.1.2 make-error: 1.3.6 - semver: 7.6.3 + semver: 7.7.3 typescript: 5.6.3 yargs-parser: 21.1.1 optionalDependencies: @@ -23491,24 +23258,22 @@ snapshots: compress-commons: 4.1.2 readable-stream: 3.6.2 - zod-prisma@0.5.4(decimal.js@10.4.3)(prisma@6.5.0(typescript@5.6.3))(zod@3.24.3): + zod-prisma@0.5.4(decimal.js@10.4.3)(prisma@6.5.0(typescript@5.6.3))(zod@3.25.76): dependencies: '@prisma/generator-helper': 3.8.1 parenthesis: 3.1.8 prisma: 6.5.0(typescript@5.6.3) ts-morph: 13.0.3 - zod: 3.24.3 + zod: 3.25.76 optionalDependencies: decimal.js: 10.4.3 - zod-to-json-schema@3.23.5(zod@3.24.3): + zod-to-json-schema@3.23.5(zod@3.25.76): dependencies: - zod: 3.24.3 + zod: 3.25.76 zod-validation-error@4.0.2(zod@3.25.76): dependencies: zod: 3.25.76 - zod@3.24.3: {} - zod@3.25.76: {} diff --git a/services/rotor/__tests__/functions-chain.test.ts b/services/rotor/__tests__/functions-chain.test.ts index 9fb1371dc..801b1faf4 100644 --- a/services/rotor/__tests__/functions-chain.test.ts +++ b/services/rotor/__tests__/functions-chain.test.ts @@ -345,6 +345,7 @@ const messageId = "message1"; const testModes: Array<{ name: string; functionsClass: string }> = [ { name: "legacy", functionsClass: "legacy" }, { name: "free", functionsClass: "free" }, + { name: "dedicated", functionsClass: "dedicated" }, ]; describe.each(testModes)("Test Functions Chain ($name mode)", ({ name: modeName, functionsClass }) => { @@ -353,6 +354,7 @@ describe.each(testModes)("Test Functions Chain ($name mode)", ({ name: modeName, let lastError: any; const counters: Record = {}; let originalEnv: string | undefined; + const webhookServerPort = 3089 + (functionsClass === "free" ? 100 : functionsClass === "dedicated" ? 200 : 0); function testName() { const currentTestName = expect.getState().currentTestName as string; @@ -366,7 +368,7 @@ describe.each(testModes)("Test Functions Chain ($name mode)", ({ name: modeName, originalEnv = process.env.FUNCTIONS_SERVER_URL_TEMPLATE; // Set up functions server for "free" mode - if (functionsClass === "free") { + if (functionsClass === "free" || functionsClass === "dedicated") { const configDir = path.join(os.tmpdir(), `rotor-test-${Date.now()}`); // Write test configs @@ -376,6 +378,8 @@ describe.each(testModes)("Test Functions Chain ($name mode)", ({ name: modeName, functions as unknown as Record ); + process.env.FUNCTIONS_CLASS = functionsClass; + // Start functions server const fsPort = 3457 + Math.floor(Math.random() * 100); functionsServer = await startTestFunctionsServer(configDir, fsPort); @@ -413,7 +417,7 @@ describe.each(testModes)("Test Functions Chain ($name mode)", ({ name: modeName, }; webhookServer = await createServer({ - port: 3089 + (functionsClass === "free" ? 100 : 0), // Use different port for each mode + port: webhookServerPort, https: false, handlers: { "/simple": handlerF("simple"), @@ -440,6 +444,7 @@ describe.each(testModes)("Test Functions Chain ($name mode)", ({ name: modeName, } else { delete process.env.FUNCTIONS_SERVER_URL_TEMPLATE; } + delete process.env.FUNCTIONS_CLASS; // Reset cache so original env value is restored resetServerEnvCache(); @@ -462,7 +467,6 @@ describe.each(testModes)("Test Functions Chain ($name mode)", ({ name: modeName, // Update connection URLs to use the correct webhook server port and add functionsClasses function getConnectionStoreForMode(): EntityStore { - const portOffset = functionsClass === "free" ? 100 : 0; return { getObject: (id: string) => { const conn = connections[id]; @@ -472,7 +476,7 @@ describe.each(testModes)("Test Functions Chain ($name mode)", ({ name: modeName, ...conn, credentials: { ...conn.credentials, - url: conn.credentials.url.replace(":3089", `:${3089 + portOffset}`), + url: conn.credentials.url.replace(":3089", `:${webhookServerPort}`), }, options: { ...conn.options, @@ -489,7 +493,7 @@ describe.each(testModes)("Test Functions Chain ($name mode)", ({ name: modeName, ...conn, credentials: { ...conn.credentials, - url: conn.credentials.url.replace(":3089", `:${3089 + portOffset}`), + url: conn.credentials.url.replace(":3089", `:${webhookServerPort}`), }, options: { ...conn.options, diff --git a/services/rotor/__tests__/functions-server-helper.ts b/services/rotor/__tests__/functions-server-helper.ts index db671122f..b126f2c5e 100644 --- a/services/rotor/__tests__/functions-server-helper.ts +++ b/services/rotor/__tests__/functions-server-helper.ts @@ -78,16 +78,32 @@ export async function startTestFunctionsServer(configDir: string, port: number = const env = { ...process.env, PORT: String(port), + ROTOR_METRICS_PORT: String(port + 1), // Metrics on different port CONFIG_DIR: configDir, ROTOR_MODE: "functions", LOG_FORMAT: "text", }; - const serverProcess = spawn("npx", ["tsx", "src/functions-server.ts"], { - cwd: rotorDir, - env, - stdio: ["ignore", "pipe", "pipe"], - }); + const serverProcess = spawn( + "deno", + [ + "run", + "--allow-net", + "--allow-read", + "--allow-write", + "--allow-env", + "--allow-sys", + "--allow-ffi", + "--allow-run", + "--unstable-worker-options", + "dist/functions-server.mjs", + ], + { + cwd: rotorDir, + env, + stdio: ["ignore", "pipe", "pipe"], + } + ); // Collect output for debugging const stderrOutput: string[] = []; diff --git a/services/rotor/build.mts b/services/rotor/build.mts index 79e2afa10..bd004fc05 100644 --- a/services/rotor/build.mts +++ b/services/rotor/build.mts @@ -11,11 +11,98 @@ const nativeDeps = { esbuild: "0.27.0", "@jitsu/functions-lib": "2.14.0-beta.19", mongodb: "6.12.0", + "prom-client": "15.1.3", }; +// External deps for Deno functions-server. +// Only runtime-compiled code and native binaries stay external. +// Everything else (mongodb, prom-client, workspace packages, etc.) is bundled by esbuild. +const denoExternalDeps: Record = { + esbuild: "0.27.0", // Native binary — used at runtime for UDF compilation + "@jitsu/functions-lib": "2.14.0-beta.19", // Needs to match version used by UDF IIFE builds +}; + +// MongoDB's optional peer deps — loaded via try/catch require() in deps.js. +// Must be external so esbuild doesn't try to resolve them at build time. +const mongoOptionalPeers = [ + "@mongodb-js/zstd", + "kerberos", + "@aws-sdk/credential-providers", + "gcp-metadata", + "snappy", + "socks", + "aws4", + "mongodb-client-encryption", +]; + +const denoExternalModules = [...Object.keys(denoExternalDeps), ...mongoOptionalPeers]; + // pg-native is optional for pg package, mark as external but don't install const externalModules = [...Object.keys(nativeDeps), "pg-native"]; +// Node built-in modules that must use "node:" prefix for Deno compatibility. +// esbuild's platform: "node" normally bundles these as bare require("fs") etc., +// but Deno requires the "node:" prefix. This plugin rewrites them to external "node:*" imports. +const nodeBuiltins = [ + "assert", + "buffer", + "child_process", + "cluster", + "console", + "constants", + "crypto", + "dgram", + "dns", + "domain", + "events", + "fs", + "fs/promises", + "http", + "http2", + "https", + "inspector", + "module", + "net", + "os", + "path", + "perf_hooks", + "process", + "punycode", + "querystring", + "readline", + "repl", + "stream", + "string_decoder", + "sys", + "timers", + "tls", + "tty", + "url", + "util", + "v8", + "vm", + "wasi", + "worker_threads", + "zlib", +]; + +function denoNodePrefixPlugin(): esbuild.Plugin { + return { + name: "deno-node-prefix", + setup(build) { + // Match bare Node built-in imports (without node: prefix) + const filter = new RegExp(`^(${nodeBuiltins.map(m => m.replace("/", "\\/")).join("|")})$`); + build.onResolve({ filter }, args => { + return { path: `node:${args.path}`, external: true }; + }); + // Also pass through already-prefixed imports + build.onResolve({ filter: /^node:/ }, args => { + return { path: args.path, external: true }; + }); + }, + }; +} + // Bundle the app esbuild .build({ @@ -31,30 +118,44 @@ esbuild logLevel: "info", }) .then(() => { + // Deno functions-server (ESM format). + // Only native deps are externalized. + // Everything else (workspace packages, pure JS/ESM, prom-client) is bundled by esbuild. + // The banner polyfills require() via createRequire so that CJS packages bundled + // into ESM (which esbuild converts to __require() calls) work under Deno. return esbuild.build({ entryPoints: ["./src/functions-server.ts"], bundle: true, platform: "node", - target: "node20", - format: "cjs", - outfile: "./dist/functions-server.js", + target: "es2022", + format: "esm", + outfile: "./dist/functions-server.mjs", sourcemap: false, minify: false, - external: externalModules, + external: denoExternalModules, + plugins: [denoNodePrefixPlugin()], + banner: { + js: 'import { createRequire } from "node:module"; const require = createRequire(import.meta.url);', + }, logLevel: "info", }); }) .then(() => { + // Deno workspace worker (ESM – runs in Web Worker sandbox with permissions: "none") return esbuild.build({ - entryPoints: ["./src/lib/udf-worker.ts"], + entryPoints: ["./src/lib/workspace-worker.ts"], bundle: true, platform: "node", - target: "node20", - format: "cjs", - outfile: "./dist/udf-worker.js", + target: "es2022", + format: "esm", + outfile: "./dist/workspace-worker.mjs", sourcemap: false, - minify: true, - external: externalModules, + minify: false, + external: [], + plugins: [denoNodePrefixPlugin()], + banner: { + js: 'import { createRequire } from "node:module"; const require = createRequire(import.meta.url);', + }, logLevel: "info", }); }) diff --git a/services/rotor/entrypoint.sh b/services/rotor/entrypoint.sh index 1f493f797..2e4ee12db 100755 --- a/services/rotor/entrypoint.sh +++ b/services/rotor/entrypoint.sh @@ -1,8 +1,17 @@ #!/bin/sh if [ "$ROTOR_MODE" = "functions" ]; then - echo "Running in function-server mode" - exec node --no-node-snapshot --max-old-space-size=2048 functions-server.js + echo "Running in function-server mode (Deno)" + exec deno run \ + --allow-net \ + --allow-read \ + --allow-write=/tmp/jitsu-udf \ + --allow-env \ + --allow-sys \ + --allow-ffi \ + --allow-run=/app/node_modules/@esbuild/linux-arm64/bin/esbuild,/app/node_modules/@esbuild/linux-x64/bin/esbuild,/app/node_modules/esbuild/bin/esbuild \ + --unstable-worker-options \ + functions-server.mjs else echo "Running in default mode" exec node --no-node-snapshot --max-old-space-size=2048 main.js diff --git a/services/rotor/package.json b/services/rotor/package.json index f01819a5c..d4e10d354 100644 --- a/services/rotor/package.json +++ b/services/rotor/package.json @@ -15,7 +15,7 @@ "start": "dotenv -e ../../.env.local -- node dist/main.js", "rotor:dev": "tsx --env-file-if-exists=../../.env --env-file-if-exists=../../.env.local --watch src/index.ts", "rotor:profile": "tsx --env-file-if-exists=../../.env --env-file-if-exists=../../.env.local --watch --inspect src/index.ts", - "test": "vitest run", + "test": "pnpm build && vitest run", "lint": "eslint src/**/*.ts", "lint:fix": "eslint src/**/*.ts --fix" }, diff --git a/services/rotor/src/functions-server.ts b/services/rotor/src/functions-server.ts index fda56b67f..3cc680a7a 100644 --- a/services/rotor/src/functions-server.ts +++ b/services/rotor/src/functions-server.ts @@ -1,4 +1,3 @@ -import http from "http"; import path from "path"; import fs from "fs"; import os from "os"; @@ -7,47 +6,62 @@ import { promisify } from "util"; import { AnyEvent, EventContext, - FuncReturn, FullContext, + FuncReturn, + FunctionMetrics, JitsuFunction, TTLStore, - FunctionMetrics, } from "@jitsu/protocols/functions"; import Prometheus from "prom-client"; - -const gunzip = promisify(zlib.gunzip); import { disableService, getLog, isTruish, LogLevel, parseNumber, setServerJsonFormat, stopwatch } from "juava"; import { + createMemoryStore, EnrichedConnectionConfig, - FunctionConfig, - isDropResult, + EntityStore, FuncChainResult, - FunctionExecRes, + FunctionConfig, FunctionExecLog, + FunctionExecRes, + isDropResult, makeFetch, - EntityStore, - createMemoryStore, + parseUserAgent, StoreMetrics, } from "@jitsu/core-functions-lib"; import { getServerEnv } from "./serverEnv"; -import { DropRetryErrorName, RetryErrorName, NoRetryErrorName, NoRetryError, RetryError } from "@jitsu/functions-lib"; -import { mongodb, createMongoStore } from "./lib/mongodb"; +import { DropRetryErrorName, NoRetryErrorName, RetryError, RetryErrorName } from "@jitsu/functions-lib"; +import { createMongoStore, mongodb } from "./lib/mongodb"; import { warehouseQuery } from "./lib/warehouse-store"; import { parse as semverParse } from "semver"; import * as jsondiffpatch from "jsondiffpatch"; import isEqual from "lodash/isEqual"; import { IngestMessage } from "@jitsu/protocols/async-request"; -import { parseUserAgent } from "@jitsu/core-functions-lib"; import type { MongoClient } from "mongodb"; -import { Agent, setGlobalDispatcher } from "undici"; -import { runUdfInWorker } from "./lib/udf-worker-runner"; -import { compileUdfFunction } from "./lib/udf-shared"; +import { compileUdfFunction, compileUdfToIIFE } from "./lib/udf-shared"; +import type { + ExecMessage, + InitMessage, + ProxyResponseMessage, + ResultMessage, + StrippedConnectionConfig, + WorkerConnectionInit, + WorkerFunctionInit, + WorkerToMainMessage, +} from "./lib/worker-protocol"; +import { runUdfInWorker } from "./lib/worker-udf-runner"; -setGlobalDispatcher( - new Agent({ - connections: 500, // per origin - }) -); +const gunzip = promisify(zlib.gunzip); + +// Configure Deno's HTTP client connection pool for proxied UDF fetch calls +// @ts-ignore +if (typeof Deno !== "undefined") { + // @ts-ignore + const httpClient = (Deno as any).createHttpClient({ + poolMaxIdlePerHost: 100, + poolIdleTimeout: 120_000, + }); + const originalFetch = globalThis.fetch; + globalThis.fetch = (input: any, init?: any) => originalFetch(input, { ...init, client: httpClient }); +} const env = getServerEnv(); const deploymentId = env.DEPLOYMENT_ID || os.hostname(); @@ -160,7 +174,7 @@ function setupMongoPoolMetrics(client: MongoClient) { } catch (_) { // ignore - topology may not be ready } - }, 5000).unref(); + }, 5000); } const metricsPort = parseInt(env.ROTOR_METRICS_PORT || "9091"); @@ -173,12 +187,6 @@ type LoadedFunction = { }; type FunctionChainContext = { - // log: { - // info: (ctx: FunctionContext, message: string, ...args: any[]) => void | Promise; - // warn: (ctx: FunctionContext, message: string, ...args: any[]) => void | Promise; - // debug: (ctx: FunctionContext, message: string, ...args: any[]) => void | Promise; - // error: (ctx: FunctionContext, message: string, ...args: any[]) => void | Promise; - // }; store: TTLStore; query: (conId: string, query: string, params?: any) => Promise; metrics?: FunctionMetrics; @@ -188,6 +196,7 @@ type FunctionChainContext = { type FunctionChain = { context: FunctionChainContext; connectionId: string; + connection: StrippedConnectionConfig; functions: LoadedFunction[]; }; @@ -281,7 +290,6 @@ async function loadFunctionsFromDir(dir: string, functions: Map): Promise { if (!fs.existsSync(dir)) return; @@ -355,7 +363,34 @@ async function loadConfigsFromFiles(configDir: string): Promise<{ return { connections, functions }; } -// Build function chain for a connection (UDF functions only) +// Clear all contents of a directory (files and subdirectories) +async function clearDirectory(dir: string, label: string): Promise { + try { + if (!fs.existsSync(dir)) return; + const entries = fs.readdirSync(dir, { withFileTypes: true }); + for (const entry of entries) { + const fullPath = path.join(dir, entry.name); + if (entry.isDirectory()) { + fs.rmSync(fullPath, { recursive: true, force: true }); + } else { + fs.unlinkSync(fullPath); + } + } + log.atInfo().log(`Cleared ${label}: ${dir} (${entries.length} entries removed)`); + } catch (e: any) { + log.atWarn().log(`Failed to clear ${label} (${dir}): ${e.message}`); + } +} + +// Strip credentials from connection config (safe to store in chain / send to worker) +function stripConnection(connection: EnrichedConnectionConfig): StrippedConnectionConfig { + const { credentials, credentialsHash, ...connWithoutCreds } = connection; + const strippedOptions = { ...connWithoutCreds.options }; + delete strippedOptions.functionsEnv; + return { ...connWithoutCreds, options: strippedOptions }; +} + +// Build function chain for a connection (UDF functions only) — runs in main process async function buildFunctionChain( conEntityStore: EntityStore, connection: EnrichedConnectionConfig, @@ -424,17 +459,23 @@ async function buildFunctionChain( store = createMemoryStore({}); } + const isFreeClass = env.FUNCTIONS_CLASS === "free"; const chainCtx: FunctionChainContext = { store, - query: async (conId: string, query: string, params: any) => { - return warehouseQuery(connection.workspaceId, conEntityStore, conId, query, params, storeMetrics); - }, + query: isFreeClass + ? async () => { + throw new Error("Warehouse queries are not available on the free plan. Please upgrade to use this feature."); + } + : async (conId: string, query: string, params: any) => { + return warehouseQuery(connection.workspaceId, conEntityStore, conId, query, params, storeMetrics); + }, connectionOptions: connectionData, }; return { context: chainCtx, connectionId: connection.id, + connection: stripConnection(connection), functions: funcs, }; } @@ -444,6 +485,18 @@ type FuncChainResultWithLogs = FuncChainResult & { logs: LogEntry[]; }; +// Unified runtime interface — both in-process chains and worker-backed execution implement this +interface FunctionRuntime { + runChain( + connectionId: string, + event: AnyEvent, + eventContext: EventContext, + fetchTimeoutMs: number + ): Promise>; + /** Returns the stripped connection config (used for actorId/streamId lookup) */ + getConnection(): StrippedConnectionConfig; +} + // Deep copy helper (same as legacy udf-wrapper) function deepCopy(o: T): T { if (typeof o !== "object") { @@ -613,13 +666,48 @@ async function runChain( return { connectionId: chain.connectionId, events, execLog, logs }; } -function safeCloseResponse(res: Response) { - try { - if (res?.body && !res.bodyUsed) { - res.body.cancel?.(); - } - } catch (_) { - // ignore +// ── FunctionRuntime implementations ────────────────────────────────── + +class InProcessRuntime implements FunctionRuntime { + constructor(private chain: FunctionChain) {} + + async runChain( + connectionId: string, + event: AnyEvent, + eventContext: EventContext, + fetchTimeoutMs: number + ): Promise> { + const result = await runChain(this.chain, event, eventContext, fetchTimeoutMs); + const totalMs = result.execLog.reduce((sum, e) => sum + (e.ms || 0), 0); + log.atDebug().log(`← ${connectionId} (${this.chain.functions.length} functions) completed in ${totalMs}ms`); + return result; + } + + getConnection(): StrippedConnectionConfig { + return this.chain.connection; + } +} + +class WorkerRuntime implements FunctionRuntime { + constructor(private ws: WorkspaceWorker, private connection: StrippedConnectionConfig) {} + + async runChain( + connectionId: string, + event: AnyEvent, + eventContext: EventContext, + fetchTimeoutMs: number + ): Promise> { + const resultMsg = await execInWorker(this.ws, connectionId, event, eventContext, fetchTimeoutMs); + return { + connectionId: resultMsg.connectionId, + events: resultMsg.events, + execLog: resultMsg.execLog, + logs: resultMsg.logs.map((l: any) => ({ ...l, timestamp: new Date(l.timestamp) })), + }; + } + + getConnection(): StrippedConnectionConfig { + return this.connection; } } @@ -659,28 +747,22 @@ function mapDiff(originalEvent: AnyEvent, newEvents?: AnyEvent[]) { }); } -// Parse request body -async function parseBody(req: http.IncomingMessage): Promise { - return new Promise((resolve, reject) => { - let body = ""; - req.on("data", chunk => { - body += chunk.toString(); - }); - req.on("end", () => { - try { - resolve(body ? JSON.parse(body) : {}); - } catch (e) { - reject(new Error("Invalid JSON body")); - } - }); - req.on("error", reject); - }); +// Parse request body (supports both gzipped and plain JSON) +async function parseBody(req: Request): Promise { + // const encoding = req.headers.get("content-encoding"); + // if (encoding === "gzip") { + // const buffer = await req.arrayBuffer(); + // const decompressed = await gunzip(Buffer.from(buffer)); + // return JSON.parse(decompressed.toString()); + // } + const text = await req.text(); + return text ? JSON.parse(text) : {}; } // Create event context from IngestMessage and connection (compatible with FunctionsHandlerMulti) function createEventContextFromMessage( message: IngestMessage, - connection: EnrichedConnectionConfig, + connection: StrippedConnectionConfig, retries: number = 0 ): EventContext { return { @@ -711,6 +793,144 @@ function createEventContextFromMessage( }; } +// ── Workspace Worker management (Deno Web Workers with permissions: "none") ── + +type WorkspaceWorker = { + worker: Worker; + pending: Map void; reject: (e: Error) => void }>; + ready: Promise; +}; + +function getWorkerUrl(): string { + return new URL("./workspace-worker.mjs", import.meta.url).href; +} + +function createWorkspaceWorker( + workspaceId: string, + connections: WorkerConnectionInit[], + store: TTLStore, + conEntityStore: EntityStore +): WorkspaceWorker { + const worker = new Worker(getWorkerUrl(), { + type: "module", + // @ts-ignore Deno-specific option for sandboxing + deno: { permissions: "none" }, + }); + + const pendingExec = new Map void; reject: (e: Error) => void }>(); + let readyResolve: () => void; + const readyPromise = new Promise(resolve => { + readyResolve = resolve; + }); + + worker.onmessage = async (e: MessageEvent) => { + const msg = e.data; + + if (msg.type === "ready") { + readyResolve!(); + return; + } + + if (msg.type === "result") { + const p = pendingExec.get(msg.requestId); + if (p) { + pendingExec.delete(msg.requestId); + p.resolve(msg); + } + return; + } + + if (msg.type === "log") { + return; // fire-and-forget + } + + if (msg.type === "debug") { + log.atInfo().log(`[Worker ${workspaceId} DEBUG] ${JSON.stringify(msg.value)}`); + return; // fire-and-forget + } + + if (msg.type === "proxyRequest") { + const { callId, method, args } = msg; + try { + let result: any; + if (method.startsWith("store.")) { + const op = method.split(".")[1]; + result = await (store as any)[op](...args); + } else if (method === "fetch") { + const [connectionId, url, init] = args; + const fetchImpl = makeFetch( + connectionId, + { log() {}, close() {}, deadLetter() {} }, + "debug", + parseNumber(env.FETCH_TIMEOUT_MS, 2000) + ); + const res = await fetchImpl(url, init); + const headers: Record = {}; + res.headers.forEach((v: string, k: string) => { + headers[k] = v; + }); + result = { + status: res.status, + statusText: res.statusText, + ok: res.ok, + url: res.url, + type: res.type, + redirected: res.redirected, + headers, + body: await res.text(), + }; + } else if (method === "warehouse.query") { + const [destinationId, sql, params] = args; + const storeMetrics: StoreMetrics = { + storeStatus: (ns, op, st) => promStoreStatuses.labels(deploymentId, ns, op, st).inc(), + warehouseStatus: (id, tbl, st, ms) => + promWarehouseStatuses.labels(deploymentId, id, tbl, st).observe(ms / 1000), + }; + result = await warehouseQuery(workspaceId, conEntityStore, destinationId, sql, params, storeMetrics); + } + const response: ProxyResponseMessage = { type: "proxyResponse", callId, result }; + worker.postMessage(response); + } catch (err: any) { + const response: ProxyResponseMessage = { type: "proxyResponse", callId, error: err.message }; + worker.postMessage(response); + } + } + }; + + worker.onerror = e => { + log.atError().log(`Worker error for workspace ${workspaceId}: ${e.message}`); + }; + + // Send init message + const initMsg: InitMessage = { type: "init", connections }; + worker.postMessage(initMsg); + + return { worker, pending: pendingExec, ready: readyPromise }; +} + +async function execInWorker( + ws: WorkspaceWorker, + connectionId: string, + event: AnyEvent, + eventContext: EventContext, + fetchTimeoutMs: number +): Promise { + await ws.ready; + const requestId = crypto.randomUUID(); + return new Promise((resolve, reject) => { + ws.pending.set(requestId, { resolve, reject }); + const execMsg: ExecMessage = { + type: "exec", + requestId, + connectionId, + event, + eventContext: JSON.parse(JSON.stringify(eventContext)), + fetchTimeoutMs, + }; + ws.worker.postMessage(execMsg); + }); +} + async function main() { if (env.MONGODB_URL) { const mongoClient = await mongodb.waitInit(); @@ -740,77 +960,135 @@ async function main() { log.atWarn().log("No connections found"); } - // Function chains cache - stores promises to avoid parallel builds for the same connection - let chains = new Map>(); + const runtimes = new Map(); + const activeWorkers: { id: string; worker: Worker }[] = []; // for graceful shutdown + const isFreeClass = env.FUNCTIONS_CLASS === "free"; - // Prebuild function chains for all connections at startup (for non-free tier servers) - // This ensures UDF compilation happens during startup rather than on first request - const functionsClass = env.FUNCTIONS_CLASS; - if (functionsClass && functionsClass !== "free" && connections.size > 0) { - log - .atInfo() - .log(`Prebuilding function chains for ${connections.size} connections (functions class: ${functionsClass})...`); - const prebuildStart = Date.now(); + if (isFreeClass) { + // Free tier: compile UDFs to IIFE strings and spawn one Deno Web Worker per workspace. + // Workers run with permissions: "none" — all I/O is proxied back to the main process. + const workspaceConnections = new Map(); for (const [connectionId, connection] of connections) { - await buildFunctionChain(conEntityStore, connection, functions) - .then(chain => { - log.atInfo().log(`✓ Prebuilt chain for connection: ${connectionId} (${chain.functions.length} functions)`); - chains.set(connectionId, Promise.resolve(chain)); - }) - .catch(e => { - log.atError().log(`✗ Failed to prebuild chain for ${connectionId}: ${e.message}`); - chains.set(connectionId, Promise.resolve(undefined)); - }); + const wsId = connection.workspaceId; + if (!workspaceConnections.has(wsId)) { + workspaceConnections.set(wsId, []); + } + + const connectionData = connection.options as any; + const udfs = (connectionData?.functions || []).filter((f: any) => f.functionId.startsWith("udf.")); + const workerFuncs: WorkerFunctionInit[] = []; + + for (const f of udfs) { + const functionId = f.functionId.substring(4); + const funcConfig = functions.get(functionId); + if (funcConfig && funcConfig.code) { + try { + const iifeCode = await compileUdfToIIFE(funcConfig.code, functionId, connectionData.functionsEnv); + workerFuncs.push({ id: f.functionId, iifeCode }); + log.atDebug().log(` ✓ Compiled UDF to IIFE: ${functionId}`); + } catch (e: any) { + log.atError().log(` ✗ Failed to compile UDF ${functionId}: ${e.message}`); + const errorIife = `var __udf = { default: async function() { throw new Error(${JSON.stringify( + e.message + )}); } };`; + workerFuncs.push({ id: f.functionId, iifeCode: errorIife }); + } + } else { + const msg = `Function ${functionId} not found or has no code`; + log.atWarn().log(msg); + const errorIife = `var __udf = { default: async function() { throw new Error(${JSON.stringify(msg)}); } };`; + workerFuncs.push({ id: f.functionId, iifeCode: errorIife }); + } + } + + workspaceConnections.get(wsId)!.push({ + connectionId, + connection: stripConnection(connection), + functions: workerFuncs, + warehouseEnabled: false, + debugTill: connectionData?.debugTill, + fetchLogLevel: connectionData?.fetchLogLevel, + props: connectionData?.functionsEnv || {}, + }); } - const prebuildMs = Date.now() - prebuildStart; - log.atInfo().log(`Prebuilt ${chains.size} function chains in ${prebuildMs}ms`); - } + // Spawn one worker per workspace, register each connection to a WorkerRuntime + for (const [wsId, conns] of workspaceConnections) { + log.atInfo().log(`Spawning worker for workspace ${wsId} (${conns.length} connections)`); + const storeMetrics: StoreMetrics = { + storeStatus: (ns, op, st) => promStoreStatuses.labels(deploymentId, ns, op, st).inc(), + warehouseStatus: (id, tbl, st, ms) => + promWarehouseStatuses.labels(deploymentId, id, tbl, st).observe(ms / 1000), + }; + const store = env.MONGODB_URL + ? createMongoStore(wsId, mongodb, false, isTruish(env.FAST_STORE), storeMetrics) + : createMemoryStore({}); - // Get or build chain for a connection (lazy loading with single-flight pattern) - async function getOrBuildChain(connectionId: string): Promise { - const connection = connections.get(connectionId); - if (!connection) { - return undefined; + try { + const ws = createWorkspaceWorker(wsId, conns, store, conEntityStore); + activeWorkers.push({ id: wsId, worker: ws.worker }); + for (const conn of conns) { + const connectionConfig = connections.get(conn.connectionId); + runtimes.set(conn.connectionId, new WorkerRuntime(ws, stripConnection(connectionConfig!))); + } + } catch (e: any) { + log.atError().log(`Failed to spawn worker for workspace ${wsId}: ${e.message}`); + } } - const cached = chains.get(connectionId); - if (cached) { - return cached; + log.atInfo().log(`Spawned ${activeWorkers.length} workspace workers`); + } else { + // Non-free: prebuild function chains in main process + if (connections.size > 0) { + log.atInfo().log(`Prebuilding function chains for ${connections.size} connections...`); + const prebuildStart = Date.now(); + + for (const [connectionId, connection] of connections) { + try { + const chain = await buildFunctionChain(conEntityStore, connection, functions); + runtimes.set(connectionId, new InProcessRuntime(chain)); + log.atInfo().log(`✓ Prebuilt chain for connection: ${connectionId} (${chain.functions.length} functions)`); + } catch (e: any) { + log.atError().log(`✗ Failed to prebuild chain for ${connectionId}: ${e.message}`); + } + } + + const prebuildMs = Date.now() - prebuildStart; + log.atInfo().log(`Prebuilt ${runtimes.size} function chains in ${prebuildMs}ms`); } + } - const buildPromise = buildFunctionChain(conEntityStore, connection, functions) - .then(chain => { - log.atInfo().log(`✓ Built chain for connection: ${connectionId} (${chain.functions.length} functions)`); - return chain; - }) - .catch(e => { - log.atError().log(`✗ Failed to build chain for ${connectionId}: ${e.message}`); - return undefined; - }); + // Functions map is no longer needed after prebuilding (code is compiled into chains/workers) + functions.clear(); + log.atInfo().log(`Cleared functions map`); - chains.set(connectionId, buildPromise); - return buildPromise; + if (isFreeClass) { + // await clearDirectory(configDir, "CONFIG_DIR"); + // await clearDirectory(UDF_TEMP_DIR, "UDF_TEMP_DIR"); + connections.clear(); + log.atInfo().log(`Cleared connections map (free deployment)`); } // HTTP response helpers - function sendJson(res: http.ServerResponse, status: number, data: any): void { - res.writeHead(status, { "Content-Type": "application/json" }); - res.end(JSON.stringify(data)); + function jsonResponse(status: number, data: any, headers?: Record): Response { + return new Response(JSON.stringify(data), { + status, + headers: { "Content-Type": "application/json", ...headers }, + }); } - function sendError(res: http.ServerResponse, status: number, error: string): void { - sendJson(res, status, { error }); + function errorResponse(status: number, error: string, headers?: Record): Response { + return jsonResponse(status, { error }, headers); } // Health check handler: GET /health or GET / - function handleHealth(res: http.ServerResponse): void { - sendJson(res, 200, { + function handleHealth(): Response { + return jsonResponse(200, { status: "ok", configDir, connections: Array.from(connections.keys()), - cachedChains: Array.from(chains.keys()), + runtimes: Array.from(runtimes.keys()), }); } @@ -820,83 +1098,56 @@ async function main() { // Query params: // - ids: comma-separated connection IDs (required) // - fullEvents: if "true", return full events instead of diffs - async function handleMulti(req: http.IncomingMessage, res: http.ServerResponse, url: URL): Promise { + async function handleMulti(req: Request, url: URL): Promise<{ response: Response; actorId: string }> { if (req.method !== "POST") { - sendError(res, 405, "Method not allowed. Use POST."); - return ""; + return { response: errorResponse(405, "Method not allowed. Use POST."), actorId: "" }; } const connectionIds = (url.searchParams.get("ids") ?? "").split(",").filter(id => !!id); const fullEvents = url.searchParams.get("fullEvents") === "true"; if (connectionIds.length === 0) { - sendError(res, 400, "No connection IDs provided. Use ?ids=conn1,conn2,..."); - return ""; + return { response: errorResponse(400, "No connection IDs provided. Use ?ids=conn1,conn2,..."), actorId: "" }; } // actorId = streamId of first connection (for metrics) - const firstConnection = connections.get(connectionIds[0]); - const actorId = firstConnection?.streamId || connectionIds[0] || ""; + const firstRuntime = runtimes.get(connectionIds[0]); + const actorId = firstRuntime?.getConnection()?.streamId || connectionIds[0] || ""; const message = (await parseBody(req)) as IngestMessage; - - // Extract event from IngestMessage (handle classic format conversion) const event = message.httpPayload; - - // Ensure event has context if (!event.context) { event.context = {}; } - type StrictFuncChainResult = Required; - // Process all connections in parallel - const promises = connectionIds.map(async connectionId => { - const connection = connections.get(connectionId); - if (!connection) { - log.atError().log(`[multi] Connection '${connectionId}' not found`); - return { - connectionId, - execLog: [ - { - error: { message: `Connection '${connectionId}' not found`, name: NoRetryErrorName }, - ms: 0, - eventIndex: 0, - functionId: "", - }, - ], - logs: [], - events: [], - } as StrictFuncChainResult; - } + type StrictFuncChainResult = Required; - const chain = await getOrBuildChain(connectionId); - if (!chain) { - log.atError().log(`[multi] Failed to build chain for connection '${connectionId}'`); - return { - connectionId, - execLog: [ - { - error: { message: "Internal Functions Error: please contact support", name: NoRetryErrorName }, - ms: 0, - eventIndex: 0, - functionId: "", - }, - ], - events: [], - logs: [], - } as StrictFuncChainResult; - } + const timeoutHeader = req.headers.get("x-request-timeout-ms"); + const functionsFetchTimeout = timeoutHeader + ? parseNumber(timeoutHeader, 2000) + : parseNumber(env.FETCH_TIMEOUT_MS, 2000); - // Create EventContext from IngestMessage (same as message-handler.ts) - const eventContext = createEventContextFromMessage(message, connection, 0); - const functionsFetchTimeout = req.headers["x-request-timeout-ms"] - ? parseNumber(req.headers["x-request-timeout-ms"] as string, 2000) - : parseNumber(env.FETCH_TIMEOUT_MS, 2000); + // Process all connections in parallel + const promises = connectionIds.map(async (connectionId): Promise => { try { - const result = await runChain(chain, event, eventContext, functionsFetchTimeout); - const totalMs = result.execLog.reduce((sum, e) => sum + (e.ms || 0), 0); - log.atDebug().log(`← ${connectionId} (${chain.functions.length} functions) completed in ${totalMs}ms`); - return result; + const runtime = runtimes.get(connectionId); + if (!runtime) { + return { + connectionId, + events: [], + execLog: [ + { + error: { message: `Connection '${connectionId}' not found`, name: NoRetryErrorName }, + ms: 0, + eventIndex: 0, + functionId: "", + }, + ], + logs: [], + } as StrictFuncChainResult; + } + const eventContext = createEventContextFromMessage(message, runtime.getConnection()); + return await runtime.runChain(connectionId, event, eventContext, functionsFetchTimeout); } catch (e: any) { const errorMessage = `${e.name}: ${e.message}`; log.atError().log(`[multi] Error processing connection ${connectionId}: ${errorMessage}`); @@ -913,7 +1164,7 @@ async function main() { // Build response with events and execLog // Map connectionId -> { events, execLog } - const response = Object.fromEntries( + const responseBody = Object.fromEntries( results.map(result => { recordChainResultMetrics(result); return [ @@ -927,39 +1178,22 @@ async function main() { }) ); - sendJson(res, 200, response); - return actorId; + return { response: jsonResponse(200, responseBody), actorId }; } // Single connection handler: POST /connection/ async function handleConnection( - req: http.IncomingMessage, - res: http.ServerResponse, + req: Request, connectionId: string - ): Promise { - const connection = connections.get(connectionId); - if (!connection) { - sendError(res, 404, `Connection '${connectionId}' not found`); - return connectionId; - } - - const chain = await getOrBuildChain(connectionId); - if (!chain) { - sendError(res, 500, `Failed to build chain for connection '${connectionId}'`); - return connectionId; - } - + ): Promise<{ response: Response; actorId: string }> { if (req.method !== "POST") { - sendError(res, 405, "Method not allowed. Use POST."); - return connectionId; + return { response: errorResponse(405, "Method not allowed. Use POST."), actorId: connectionId }; } const body = await parseBody(req); - const event = body.event as AnyEvent; const eventContext = body.context as EventContext; - // Parse receivedAt from string if needed (JSON serialization converts Date to string) if (eventContext?.receivedAt && typeof eventContext.receivedAt === "string") { eventContext.receivedAt = new Date(eventContext.receivedAt); } @@ -967,59 +1201,57 @@ async function main() { eventContext.destination.updatedAt = new Date(eventContext.destination.updatedAt); } - const functionsFetchTimeout = req.headers["x-request-timeout-ms"] - ? parseNumber(req.headers["x-request-timeout-ms"] as string, 2000) + const timeoutHeader = req.headers.get("x-request-timeout-ms"); + const functionsFetchTimeout = timeoutHeader + ? parseNumber(timeoutHeader, 2000) : parseNumber(env.FETCH_TIMEOUT_MS, 2000); - const result = await runChain(chain, event, eventContext, functionsFetchTimeout); + const runtime = runtimes.get(connectionId); + if (!runtime) { + return { response: errorResponse(404, `Connection '${connectionId}' not found`), actorId: connectionId }; + } + const result = await runtime.runChain(connectionId, event, eventContext, functionsFetchTimeout); recordChainResultMetrics(result); - - const totalMs = result.execLog.reduce((sum, e) => sum + (e.ms || 0), 0); - log.atDebug().log(`← ${connectionId} (${chain.functions.length} functions) completed in ${totalMs}ms`); - - sendJson(res, 200, result); - return connectionId; + return { response: jsonResponse(200, result), actorId: connectionId }; } - // Create HTTP server + // Create HTTP server using Deno.serve let isShuttingDown = false; - const server = http.createServer(async (req, res) => { - res.setHeader("Access-Control-Allow-Origin", "*"); - res.setHeader("Access-Control-Allow-Methods", "GET, POST, OPTIONS"); - res.setHeader("Access-Control-Allow-Headers", "Content-Type"); - // During shutdown, tell clients to close connections so they reconnect to healthy pods + // @ts-ignore + const server = (Deno as any).serve({ port, hostname: "0.0.0.0" }, async (req: Request): Promise => { + let extraHeaders: Record = {}; + if (isShuttingDown) { - res.setHeader("Connection", "close"); + extraHeaders = { Connection: "close" }; } if (req.method === "OPTIONS") { - res.writeHead(204); - res.end(); - return; + return new Response(null, { status: 204, headers: extraHeaders }); } - const url = new URL(req.url || "/", `http://localhost:${port}`); + const url = new URL(req.url); const pathname = url.pathname; try { // Health check if (pathname === "/health" || pathname === "/") { - handleHealth(res); - return; + return handleHealth(); } - // Determine endpoint label for metrics const endpoint = pathname === "/multi" ? "multi" : pathname.startsWith("/connection/") ? "connection" : "other"; const sw = stopwatch(); promConcurrentRequests.labels(deploymentId, endpoint).inc(); let actorId = ""; + let status = 200; try { // Multi connection handler if (pathname === "/multi") { - actorId = await handleMulti(req, res, url); - return; + const result = await handleMulti(req, url); + actorId = result.actorId; + status = result.response.status; + return result.response; } // UDF test runner @@ -1038,49 +1270,52 @@ async function main() { ); } result.backend = "functions-server"; - sendJson(res, 200, result); - return; + return jsonResponse(200, result); } // Single connection handler const match = pathname.match(/^\/connection\/([^\/]+)$/); if (match) { - actorId = await handleConnection(req, res, match[1]); - return; + const result = await handleConnection(req, match[1]); + actorId = result.actorId; + status = result.response.status; + return result.response; } // Not found - sendError(res, 404, "Not found. Use /connection/, /multi?ids=conn1,conn2,..., or /udfrun"); + status = 404; + return errorResponse(404, "Not found. Use /connection/, /multi?ids=conn1,conn2,..., or /udfrun"); } finally { promConcurrentRequests.labels(deploymentId, endpoint).dec(); promRequestDuration.labels(deploymentId, endpoint, actorId).observe(sw.elapsedMs()); - promRequestCount.labels(deploymentId, endpoint, actorId, String(res.statusCode || 200)).inc(); + promRequestCount.labels(deploymentId, endpoint, actorId, String(status)).inc(); } } catch (e: any) { log.atError().log(`Error processing request:`, e); - sendError(res, 500, e.message); + return errorResponse(500, e.message); } }); - server.listen(port, () => { - log.atInfo().log(`Server running at http://localhost:${port}`); - log.atInfo().log(`Available connections: ${connections.size}`); - }); + log.atInfo().log(`Server running at http://localhost:${port}`); + log.atInfo().log(`Runtimes: ${runtimes.size} (mode: ${isFreeClass ? "worker" : "in-process"})`); // Metrics HTTP server (separate port, same as rotor) - const metricsServer = http.createServer(async (req, res) => { - if (req.url === "/metrics") { - res.writeHead(200, { "Content-Type": Prometheus.register.contentType }); - const result = await Prometheus.register.metrics(); - res.end(result); - } else { - res.writeHead(404); - res.end(); + // @ts-ignore + const metricsServer = (Deno as any).serve( + { port: metricsPort, hostname: "0.0.0.0" }, + async (req: Request): Promise => { + if (req.url.endsWith("/metrics")) { + const result = await Prometheus.register.metrics(); + return new Response(result, { + status: 200, + headers: { "Content-Type": Prometheus.register.contentType }, + }); + } + return new Response(null, { status: 404 }); } - }); - metricsServer.listen(metricsPort, () => { - log.atInfo().log(`Metrics server running at http://localhost:${metricsPort}/metrics`); - }); + ); + + log.atInfo().log(`Metrics server running at http://localhost:${metricsPort}/metrics`); // Graceful shutdown handler const shutdown = (signal: string) => { @@ -1096,17 +1331,18 @@ async function main() { setTimeout(() => { log.atWarn().log(`Forcing exit after ${forceExitTimeout}ms timeout`); process.exit(1); - }, forceExitTimeout).unref(); + }, forceExitTimeout); // wait some seconds for connections to drain before shutting down metrics server const extraDelay = env.SHUTDOWN_EXTRA_DELAY_SEC ? 1000 * parseInt(env.SHUTDOWN_EXTRA_DELAY_SEC) : 5000; setTimeout(() => { - // Stop accepting new connections - server.close(err => { - if (err) { - log.atError().log(`Error during server close:`, err); - process.exit(1); - } + // Terminate all workspace workers + for (const { id, worker } of activeWorkers) { + worker.terminate(); + log.atInfo().log(`Terminated worker for workspace ${id}`); + } + + server.shutdown().then(() => { log.atInfo().log(`Server closed, all connections drained`); process.exit(0); }); @@ -1118,6 +1354,6 @@ async function main() { } main().catch(e => { - log.atError().log("Fatal error:", e); + log.atError().withCause(e).log("Fatal error"); process.exit(1); }); diff --git a/services/rotor/src/lib/rotor.ts b/services/rotor/src/lib/rotor.ts index a7084cbeb..83cfe5c71 100644 --- a/services/rotor/src/lib/rotor.ts +++ b/services/rotor/src/lib/rotor.ts @@ -263,6 +263,7 @@ export function kafkaRotor(cfg: KafkaRotorConfig): KafkaRotor { if (metrics) { metrics.close(); } + await producer?.flush({ timeout: 20000 }); await producer?.disconnect(); if (interval) { clearInterval(interval); diff --git a/services/rotor/src/lib/udf-shared.ts b/services/rotor/src/lib/udf-shared.ts index e3daf63d7..31c619b08 100644 --- a/services/rotor/src/lib/udf-shared.ts +++ b/services/rotor/src/lib/udf-shared.ts @@ -1,6 +1,6 @@ -import path from "path"; -import os from "os"; -import fsp from "fs/promises"; +import path from "node:path"; +import os from "node:os"; +import fsp from "node:fs/promises"; import * as esbuild from "esbuild"; // Whitelist of packages that UDF code is allowed to import (will be bundled) @@ -26,9 +26,9 @@ export function createWhitelistPlugin(allowedPackages: string[]): esbuild.Plugin return null; } - // Node built-ins - mark as external (available at runtime) + // Node built-ins - mark as external with node: prefix (required by Deno) if (NODE_BUILTINS.includes(packageName)) { - return { path: args.path, external: true }; + return { path: `node:${args.path}`, external: true }; } // Everything else - error @@ -157,3 +157,71 @@ export async function compileUdfToFile( return tempFile; } + +// Virtual module that provides @jitsu/functions-lib exports from globalThis. +// Used in IIFE builds where the real package can't be resolved (platform: "neutral"). +// Classes (RetryError, NoRetryError) are set on globalThis by the worker before UDF evaluation. +// toJitsuClassic/fromJitsuClassic are rarely used by UDFs; stub with clear error. +const FUNCTIONS_LIB_SHIM = ` +export const RetryError = globalThis.RetryError; +export const NoRetryError = globalThis.NoRetryError; +export const TableNameParameter = globalThis.TableNameParameter; +export const DropRetryErrorName = "Drop & RetryError"; +export const RetryErrorName = "RetryError"; +export const NoRetryErrorName = "NoRetryError"; +export const toJitsuClassic = globalThis.toJitsuClassic; +export const fromJitsuClassic = globalThis.fromJitsuClassic; +`; + +// esbuild plugin that resolves @jitsu/functions-lib to a virtual module +// providing exports from globalThis (set by the worker before UDF evaluation). +function functionsLibShimPlugin(): esbuild.Plugin { + return { + name: "functions-lib-shim", + setup(build) { + build.onResolve({ filter: /^@jitsu\/functions-lib$/ }, () => ({ + path: "@jitsu/functions-lib", + namespace: "functions-lib-shim", + })); + build.onLoad({ filter: /.*/, namespace: "functions-lib-shim" }, () => ({ + contents: FUNCTIONS_LIB_SHIM, + loader: "js", + })); + }, + }; +} + +// Compile UDF to an IIFE code string for use inside Deno Web Workers. +// The result is a self-contained string that, when evaluated via +// `new Function(iifeCode + "\nreturn __udf;")()`, +// returns an object with { default: , config?: ... }. +// +// Unlike compileUdfToFile, this does NOT write to disk – the code string +// is sent to the worker via postMessage. +export async function compileUdfToIIFE(code: string, functionId: string, env: any): Promise { + const envs = `var process = { env: ${JSON.stringify(env || {})} };\n`; + const fullCode = envs + code; + + const result = await esbuild.build({ + stdin: { + contents: fullCode, + loader: "js", + resolveDir: process.cwd(), + }, + bundle: true, + write: false, + format: "iife", + globalName: "__udf", + platform: "node", + target: "es2022", + plugins: [functionsLibShimPlugin(), createWhitelistPlugin(ALLOWED_PACKAGES)], + logLevel: "silent", + }); + + if (result.errors.length > 0) { + const errorMessages = result.errors.map(e => e.text).join("\n"); + throw new Error(`Failed to compile function ${functionId}:\n${errorMessages}`); + } + + return result.outputFiles[0].text; +} diff --git a/services/rotor/src/lib/udf-worker-runner.ts b/services/rotor/src/lib/udf-worker-runner.ts deleted file mode 100644 index 239d977c4..000000000 --- a/services/rotor/src/lib/udf-worker-runner.ts +++ /dev/null @@ -1,326 +0,0 @@ -import { Worker } from "worker_threads"; -import path from "path"; -import fsp from "fs/promises"; -import { getLog, LogLevel, parseNumber, stopwatch } from "juava"; -import { makeFetch, isDropResult, EntityStore, EnrichedConnectionConfig, logType } from "@jitsu/core-functions-lib"; -import { EventContext, TTLStore } from "@jitsu/protocols/functions"; -import { parseUserAgent } from "@jitsu/core-functions-lib"; -import { warehouseQuery } from "./warehouse-store"; -import { compileUdfToFile } from "./udf-shared"; -import { getServerEnv } from "../serverEnv"; -import { randomUUID } from "node:crypto"; - -const log = getLog("udf-worker-runner"); -const serverEnv = getServerEnv(); - -export type UDFTestRequest = { - functionId: string; - functionName: string; - code: string; - event: any; - variables: any; - workspaceId: string; - userAgent?: string; -}; - -export type UDFTestResponse = { - error?: { - message: string; - stack?: string; - name: string; - retryPolicy?: any; - }; - dropped?: boolean; - result: any; - store: any; - logs: logType[]; - backend?: "functions-server" | "rotor"; -}; - -// Resolve the worker script path. -// In production (built), the worker is at dist/udf-worker.js alongside the main bundle. -// During development (tsx), use the .ts source directly. -function getWorkerPath(): string { - // Check if we're running from dist/ (production build) - const distWorker = path.join(__dirname, "udf-worker.js"); - try { - require.resolve(distWorker); - return distWorker; - } catch { - // Fallback: dev mode — use the TS source via tsx - return path.join(__dirname, "udf-worker.ts"); - } -} - -export async function runUdfInWorker( - request: UDFTestRequest, - store: TTLStore, - conEntityStore?: EntityStore -): Promise { - const logs: logType[] = []; - const udfTimeoutMs = parseNumber(serverEnv.UDF_TIMEOUT_MS, 5000); - const dumpStore = () => (typeof (store as any).dump === "function" ? (store as any).dump() : {}); - let compiledCodePath: string | undefined; - let worker: Worker | undefined; - - try { - // 1. Compile using shared compilation utility - compiledCodePath = await compileUdfToFile(randomUUID(), request.code, request.functionId, request.variables); - // 3. Create fetch with minimal eventsStore that collects fetch logs - const fetchImpl = makeFetch( - "functionsDebugger", - { - log(connectionId: string, level: LogLevel, msg: Record) { - let statusText; - if (msg.error) { - statusText = `${msg.error}`; - } else { - statusText = `${msg.statusText ?? ""}${msg.status ? `(${msg.status})` : ""}`; - } - logs.push({ - message: `${msg.method} ${msg.url} :: ${statusText}`, - level: msg.error ? "error" : "debug", - timestamp: new Date(), - type: "http", - }); - }, - close() {}, - deadLetter() {}, - }, - "info" - ); - - // 4. Build eventContext - const eventContext: EventContext = { - receivedAt: new Date(), - geo: { - country: { code: "US", name: "United States", isEU: false }, - city: { name: "New York" }, - region: { code: "NY", name: "New York" }, - location: { latitude: 40.6808, longitude: -73.9701 }, - postalCode: { code: "11238" }, - }, - ua: parseUserAgent( - request.event?.context?.userAgent || - request.userAgent || - "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/118.0.0.0 Safari/537.36" - ), - headers: { - host: "example.com", - "user-agent": - request.event?.context?.userAgent || - request.userAgent || - "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/118.0.0.0 Safari/537.36", - accept: "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8", - "accept-language": "en-US,en;q=0.9", - "accept-encoding": "gzip, deflate, br", - connection: "keep-alive", - referer: "https://example.com/", - origin: "https://example.com", - }, - source: { - id: "functionsDebugger-streamId", - name: "Functions Debugger Stream", - type: "browser", - }, - destination: { - id: "functionsDebugger-destinationId", - type: "clickhouse", - updatedAt: new Date(), - hash: "hash", - }, - connection: { - id: "functionsDebugger", - }, - workspace: { - id: request.workspaceId, - }, - }; - // 5. Spawn Worker - const workerPath = getWorkerPath(); - worker = new Worker(workerPath); - - // 6. Execute with timeout - const result = await new Promise((resolve, reject) => { - const timer = setTimeout(() => { - worker?.terminate(); - resolve({ - error: { - message: `Function execution timed out after ${udfTimeoutMs}ms`, - name: "TimeoutError", - }, - result: {}, - store: dumpStore(), - logs, - }); - }, udfTimeoutMs); - - worker!.on("message", async (msg: any) => { - try { - if (msg.type === "inited") { - } - if (msg.type === "log") { - logs.push({ - message: msg.message + (Array.isArray(msg.args) && msg.args.length > 0 ? `, ${msg.args.join(",")}` : ""), - level: msg.level, - timestamp: new Date(msg.timestamp), - type: "log", - }); - return; - } - - if (msg.type === "result") { - clearTimeout(timer); - if (msg.error) { - resolve({ - error: { - message: msg.error.message, - stack: msg.error.stack, - name: msg.error.name, - retryPolicy: msg.error.retryPolicy, - }, - result: {}, - store: dumpStore(), - logs, - }); - } else { - resolve({ - dropped: isDropResult(msg.result), - result: msg.result, - store: dumpStore(), - logs, - }); - } - return; - } - - // Proxy requests from worker - if (msg.type.startsWith("store.")) { - const op = msg.type.split(".")[1]; // get, set, del, ttl, getOrSet, getWithTTL - try { - const result = await (store as any)[op](...msg.args); - worker!.postMessage({ type: "response", id: msg.id, result }); - } catch (e: any) { - worker!.postMessage({ type: "response", id: msg.id, error: e.message }); - } - return; - } - - if (msg.type === "fetch") { - try { - const [url, init] = msg.args; - const res = await fetchImpl(url, init); - const headers: Record = {}; - res.headers.forEach((v: string, k: string) => { - headers[k] = v; - }); - const text = await res.text(); - worker!.postMessage({ - type: "response", - id: msg.id, - result: { - status: res.status, - statusText: res.statusText, - ok: res.ok, - url: res.url, - type: res.type, - redirected: res.redirected, - headers, - body: text, - }, - }); - } catch (e: any) { - worker!.postMessage({ type: "response", id: msg.id, error: e.message }); - } - return; - } - - if (msg.type === "warehouse.query") { - try { - const [destinationId, sql, params] = msg.args; - if (!conEntityStore) { - throw new Error("Connection store is not provided"); - } - const result = await warehouseQuery(request.workspaceId, conEntityStore, destinationId, sql, params); - worker!.postMessage({ type: "response", id: msg.id, result }); - } catch (e: any) { - worker!.postMessage({ type: "response", id: msg.id, error: e.message }); - } - return; - } - } catch (e: any) { - log.atError().log(`Error handling worker message: ${e.message}`); - } - }); - - worker!.on("error", (err: Error) => { - clearTimeout(timer); - resolve({ - error: { message: err.message, name: err.name, stack: err.stack }, - result: {}, - store: dumpStore(), - logs, - }); - }); - - worker!.on("exit", (code: number) => { - clearTimeout(timer); - if (code !== 0 && code !== 1) { - // code 1 is normal termination via worker.terminate() - resolve({ - error: { message: `Worker exited with code ${code}`, name: "WorkerError" }, - result: {}, - store: dumpStore(), - logs, - }); - } - }); - - // Send init message - // Serialize eventContext dates to strings for worker transfer - const serializableContext = JSON.parse(JSON.stringify(eventContext)); - worker!.postMessage({ - type: "init", - compiledCodePath, - event: request.event, - eventContext: serializableContext, - variables: request.variables || {}, - workspaceId: request.workspaceId, - }); - }); - return result; - } catch (e: any) { - // Handle compilation errors or other setup failures - if (e.errors && Array.isArray(e.errors)) { - const errorMessages = e.errors.map((err: any) => err.text).join("\n"); - return { - error: { - message: `Failed to compile function ${request.functionId}:\n${errorMessages}`, - name: "CompilationError", - }, - result: {}, - store: dumpStore(), - logs, - }; - } - return { - error: { message: e.message, name: e.name || "Error", stack: e.stack }, - result: {}, - store: dumpStore(), - logs, - }; - } finally { - // Cleanup: terminate worker - if (worker) { - try { - await worker.terminate(); - } catch { - // Worker may already be terminated - } - } - // Cleanup: remove compiled temp file - if (compiledCodePath) { - fsp.unlink(compiledCodePath).catch(() => {}); - } - } -} diff --git a/services/rotor/src/lib/udf-worker.ts b/services/rotor/src/lib/udf-worker.ts deleted file mode 100644 index 792128302..000000000 --- a/services/rotor/src/lib/udf-worker.ts +++ /dev/null @@ -1,165 +0,0 @@ -import { parentPort } from "worker_threads"; - -if (!parentPort) { - throw new Error("udf-worker must be run as a worker thread"); -} - -// Pending proxy requests awaiting response from main thread -const pending = new Map void; reject: (e: Error) => void }>(); -let requestIdCounter = 0; - -function nextId(): string { - return String(++requestIdCounter); -} - -// Send a proxy request to the main thread and await the response -function callMain(type: string, args: any[]): Promise { - return new Promise((resolve, reject) => { - const id = nextId(); - pending.set(id, { resolve, reject }); - parentPort!.postMessage({ type, id, args }); - }); -} - -function deepCopy(o: T): T { - if (typeof o !== "object") { - return o; - } - if (!o) { - return o; - } - - if (Array.isArray(o)) { - const newO: any[] = []; - for (let i = 0; i < o.length; i += 1) { - const v = o[i]; - newO[i] = !v || typeof v !== "object" ? v : deepCopy(v); - } - return newO as T; - } - - const newO: Record = {}; - for (const [k, v] of Object.entries(o)) { - newO[k] = !v || typeof v !== "object" ? v : deepCopy(v); - } - return newO as T; -} - -function isDropResult(result: any): boolean { - return result === "drop" || (Array.isArray(result) && result.length === 0) || result === null || result === false; -} - -// Handle response messages from main thread -parentPort.on("message", async (msg: any) => { - if (msg.type === "response") { - const p = pending.get(msg.id); - if (p) { - pending.delete(msg.id); - if (msg.error) { - p.reject(new Error(msg.error)); - } else { - p.resolve(msg.result); - } - } - return; - } - - if (msg.type === "init") { - const { compiledCodePath, event, eventContext, variables, workspaceId } = msg; - - // Build proxied store - const store = { - get: (key: string) => callMain("store.get", [key]), - set: (key: string, obj: any, opts?: any) => callMain("store.set", [key, obj, opts]), - del: (key: string) => callMain("store.del", [key]), - ttl: (key: string) => callMain("store.ttl", [key]), - getOrSet: (key: string, value: any, opts?: any) => callMain("store.getOrSet", [key, value, opts]), - getWithTTL: (key: string) => callMain("store.getWithTTL", [key]), - }; - - // Build proxied logger (fire-and-forget — no response needed) - const log = { - info: (message: string, ...args: any[]) => { - parentPort!.postMessage({ type: "log", level: "info", message, args, timestamp: new Date().toISOString() }); - }, - warn: (message: string, ...args: any[]) => { - parentPort!.postMessage({ type: "log", level: "warn", message, args, timestamp: new Date().toISOString() }); - }, - debug: (message: string, ...args: any[]) => { - parentPort!.postMessage({ type: "log", level: "debug", message, args, timestamp: new Date().toISOString() }); - }, - error: (message: string, ...args: any[]) => { - parentPort!.postMessage({ type: "log", level: "error", message, args, timestamp: new Date().toISOString() }); - }, - }; - - // Build proxied fetch - const proxiedFetch = async (url: string, init?: any) => { - const serialized = await callMain("fetch", [url, init]); - // Reconstruct a Response-like object from the serialized data - return { - status: serialized.status, - statusText: serialized.statusText, - ok: serialized.ok, - url: serialized.url, - type: serialized.type, - redirected: serialized.redirected, - headers: serialized.headers, - bodyUsed: true, - body: serialized.body, - text: () => Promise.resolve(serialized.body), - json: () => Promise.resolve(JSON.parse(serialized.body)), - }; - }; - - // Build proxied warehouse - const getWarehouse = (destinationId: string) => ({ - query: (sql: string, params?: Record) => callMain("warehouse.query", [destinationId, sql, params]), - }); - - // Build full context - const ctx = { - ...eventContext, - log, - fetch: proxiedFetch, - store, - props: variables || {}, - retries: 0, - getWarehouse, - }; - - let module: any; - try { - // Dynamic import of the compiled .mjs UDF file - module = await import(compiledCodePath); - const func = module.default; - if (typeof func !== "function") { - parentPort!.postMessage({ - type: "result", - error: { message: `Default export is not a function: ${typeof func}`, name: "CompilationError" }, - }); - return; - } - - const result = await func(deepCopy(event), ctx); - - // Check for "drop" result - const dropped = isDropResult(result); - parentPort!.postMessage({ - type: "result", - result: typeof result === "undefined" ? event : result, - dropped, - }); - } catch (e: any) { - parentPort!.postMessage({ - type: "result", - error: { - message: e.message || String(e), - name: e.name || "Error", - stack: e.stack, - retryPolicy: module.config?.retryPolicy, - }, - }); - } - } -}); diff --git a/services/rotor/src/lib/worker-protocol.ts b/services/rotor/src/lib/worker-protocol.ts new file mode 100644 index 000000000..e9560801e --- /dev/null +++ b/services/rotor/src/lib/worker-protocol.ts @@ -0,0 +1,129 @@ +// Shared types for main↔worker communication (Deno Web Workers) + +import type { AnyEvent, EventContext } from "@jitsu/protocols/functions"; +import type { EnrichedConnectionConfig, FunctionExecLog } from "@jitsu/core-functions-lib"; + +// ── Messages: Main → Worker ───────────────────────────────────────── + +/** Sent once after worker creation to bootstrap it with compiled UDF code and connection configs */ +export type InitMessage = { + type: "init"; + /** One entry per connection in the workspace */ + connections: WorkerConnectionInit[]; +}; + +export type WorkerConnectionInit = { + connectionId: string; + /** Stripped connection config (no credentials / functionsEnv) */ + connection: StrippedConnectionConfig; + /** Each UDF compiled to an IIFE string by esbuild */ + functions: WorkerFunctionInit[]; + /** Whether warehouse queries are allowed */ + warehouseEnabled: boolean; + /** debugTill ISO string (if set) */ + debugTill?: string; + /** fetchLogLevel from connection options */ + fetchLogLevel?: string; + /** functionsEnv / props */ + props: Record; +}; + +export type WorkerFunctionInit = { + id: string; + /** IIFE code string – evaluated via new Function() inside the worker */ + iifeCode: string; +}; + +/** Ask the worker to execute a chain for a given connection */ +export type ExecMessage = { + type: "exec"; + requestId: string; + connectionId: string; + event: AnyEvent; + eventContext: EventContext; + fetchTimeoutMs: number; +}; + +/** Cancel a pending execution */ +export type CancelMessage = { + type: "cancel"; + requestId: string; +}; + +/** Response to a proxy request from the worker */ +export type ProxyResponseMessage = { + type: "proxyResponse"; + callId: string; + result?: any; + error?: string; +}; + +export type MainToWorkerMessage = InitMessage | ExecMessage | CancelMessage | ProxyResponseMessage; + +// ── Messages: Worker → Main ───────────────────────────────────────── + +/** Worker is ready to accept exec messages */ +export type ReadyMessage = { + type: "ready"; +}; + +export type DebugMessage = { + type: "debug"; + value: any; +}; + +/** Result of a chain execution */ +export type ResultMessage = { + type: "result"; + requestId: string; + connectionId: string; + events: AnyEvent[]; + execLog: FunctionExecLog; + logs: SerializedLogEntry[]; +}; + +/** Proxy request for I/O that the sandboxed worker cannot perform */ +export type ProxyRequestMessage = { + type: "proxyRequest"; + callId: string; + method: ProxyMethod; + args: any[]; +}; + +export type ProxyMethod = + | "store.get" + | "store.set" + | "store.del" + | "store.ttl" + | "store.getOrSet" + | "store.getWithTTL" + | "fetch" + | "warehouse.query"; + +/** Fire-and-forget log from the worker */ +export type LogMessage = { + type: "log"; + level: "info" | "warn" | "debug" | "error"; + functionId: string; + functionType: string; + message: any; + args?: any[]; + timestamp: string; +}; + +export type WorkerToMainMessage = ReadyMessage | ResultMessage | ProxyRequestMessage | LogMessage | DebugMessage; + +// ── Shared helper types ───────────────────────────────────────────── + +/** Serialized log entry (Date → ISO string for postMessage transfer) */ +export type SerializedLogEntry = { + level: "info" | "warn" | "debug" | "error"; + functionId: string; + functionType: string; + message: any; + args?: any[]; + timestamp: string; +}; + +/** Connection config without credentials (safe to send to worker) */ +export type StrippedConnectionConfig = Omit; diff --git a/services/rotor/src/lib/worker-udf-runner.ts b/services/rotor/src/lib/worker-udf-runner.ts new file mode 100644 index 000000000..554d545a7 --- /dev/null +++ b/services/rotor/src/lib/worker-udf-runner.ts @@ -0,0 +1,256 @@ +// ── /udfrun: run a single UDF in a temporary Deno Web Worker ── +import { EventContext, TTLStore } from "@jitsu/protocols/functions"; +import { EnrichedConnectionConfig, EntityStore, makeFetch, parseUserAgent } from "@jitsu/core-functions-lib"; +import { LogLevel, parseNumber } from "juava"; +import { compileUdfToIIFE } from "./udf-shared"; +import type { ExecMessage, InitMessage, ProxyResponseMessage, WorkerConnectionInit } from "./worker-protocol"; +import { warehouseQuery } from "./warehouse-store"; +import { getServerEnv } from "../serverEnv"; + +const env = getServerEnv(); + +function getWorkerUrl(): string { + return new URL("./workspace-worker.mjs", import.meta.url).href; +} + +export async function runUdfInWorker( + request: any, + store: TTLStore, + conEntityStore: EntityStore +): Promise { + const logs: any[] = []; + const udfTimeoutMs = parseNumber(env.UDF_TIMEOUT_MS, 5000); + const dumpStore = () => (typeof (store as any).dump === "function" ? (store as any).dump() : {}); + + try { + const iifeCode = await compileUdfToIIFE(request.code, request.functionId, request.variables); + + const eventContext: EventContext = { + receivedAt: new Date(), + geo: { + country: { code: "US", name: "United States", isEU: false }, + city: { name: "New York" }, + region: { code: "NY", name: "New York" }, + location: { latitude: 40.6808, longitude: -73.9701 }, + postalCode: { code: "11238" }, + }, + ua: parseUserAgent( + request.event?.context?.userAgent || + request.userAgent || + "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/118.0.0.0 Safari/537.36" + ), + headers: { + host: "example.com", + "user-agent": + request.event?.context?.userAgent || + request.userAgent || + "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/118.0.0.0 Safari/537.36", + accept: "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8", + "accept-language": "en-US,en;q=0.9", + "accept-encoding": "gzip, deflate, br", + connection: "keep-alive", + referer: "https://example.com/", + origin: "https://example.com", + }, + source: { + id: "functionsDebugger-streamId", + name: "Functions Debugger Stream", + type: "browser", + }, + destination: { + id: "functionsDebugger-destinationId", + type: "clickhouse", + updatedAt: new Date(), + hash: "hash", + }, + connection: { + id: "functionsDebugger", + }, + workspace: { + id: request.workspaceId, + }, + }; + + const connectionInit: WorkerConnectionInit = { + connectionId: "udfrun", + connection: { + id: "udfrun", + workspaceId: request.workspaceId, + streamId: "udfrun-stream", + streamName: "UDF Runner", + destinationId: "udfrun-dest", + type: "clickhouse", + updatedAt: new Date(), + usesBulker: false, + metricsKeyPrefix: "udfrun", + options: {}, + optionsHash: "", + }, + functions: [{ id: `udf.${request.functionId}`, iifeCode }], + warehouseEnabled: env.FUNCTIONS_CLASS !== "free", + props: request.variables || {}, + }; + + const worker = new Worker(getWorkerUrl(), { + type: "module", + // @ts-ignore Deno-specific + deno: { permissions: "none" }, + }); + + const fetchImpl = makeFetch( + "functionsDebugger", + { + log(connectionId: string, level: LogLevel, msg: Record) { + let statusText; + if (msg.error) { + statusText = `${msg.error}`; + } else { + statusText = `${msg.statusText ?? ""}${msg.status ? `(${msg.status})` : ""}`; + } + logs.push({ + message: `${msg.method} ${msg.url} :: ${statusText}`, + level: msg.error ? "error" : "debug", + timestamp: new Date(), + type: "http", + }); + }, + close() {}, + deadLetter() {}, + }, + "info" + ); + + const result = await new Promise((resolve, reject) => { + const timer = setTimeout(() => { + worker.terminate(); + resolve({ + error: { message: `Function execution timed out after ${udfTimeoutMs}ms`, name: "TimeoutError" }, + result: {}, + store: dumpStore(), + logs, + }); + }, udfTimeoutMs); + + worker.onmessage = async (e: MessageEvent) => { + const msg = e.data; + + if (msg.type === "ready") { + worker.postMessage({ + type: "exec", + requestId: "udfrun-1", + connectionId: "udfrun", + event: request.event, + eventContext: JSON.parse(JSON.stringify(eventContext)), + fetchTimeoutMs: parseNumber(env.FETCH_TIMEOUT_MS, 2000), + } as ExecMessage); + return; + } + + if (msg.type === "log") { + logs.push({ + message: msg.message + (Array.isArray(msg.args) && msg.args.length > 0 ? `, ${msg.args.join(",")}` : ""), + level: msg.level, + timestamp: new Date(msg.timestamp), + type: "log", + }); + return; + } + + if (msg.type === "result") { + clearTimeout(timer); + worker.terminate(); + const hasError = msg.execLog?.some((e: any) => e.error); + if (hasError) { + const err = msg.execLog.find((e: any) => e.error)?.error; + resolve({ + error: { message: err.message, stack: err.stack, name: err.name, retryPolicy: err.retryPolicy }, + result: {}, + store: dumpStore(), + logs, + }); + } else { + const dropped = msg.events.length === 0; + resolve({ + dropped, + result: dropped ? {} : msg.events.length === 1 ? msg.events[0] : msg.events, + store: dumpStore(), + logs, + }); + } + return; + } + + if (msg.type === "proxyRequest") { + const { callId, method, args } = msg; + try { + let result: any; + if (method.startsWith("store.")) { + const op = method.split(".")[1]; + result = await (store as any)[op](...args); + } else if (method === "fetch") { + const [url, init] = args; + const res = await fetchImpl(url, init); + const responseHeaders: Record = {}; + res.headers.forEach((v: string, k: string) => { + responseHeaders[k] = v; + }); + result = { + status: res.status, + statusText: res.statusText, + ok: res.ok, + url: res.url, + type: res.type, + redirected: res.redirected, + headers: responseHeaders, + body: await res.text(), + }; + } else if (method === "warehouse.query") { + if (env.FUNCTIONS_CLASS === "free") { + throw new Error("Warehouse queries are not available on the free plan."); + } + const [destinationId, sql, params] = args; + result = await warehouseQuery(request.workspaceId, conEntityStore, destinationId, sql, params); + } + worker.postMessage({ type: "proxyResponse", callId, result } as ProxyResponseMessage); + } catch (err: any) { + worker.postMessage({ type: "proxyResponse", callId, error: err.message } as ProxyResponseMessage); + } + return; + } + }; + + worker.onerror = (err: ErrorEvent) => { + clearTimeout(timer); + resolve({ + error: { message: err.message, name: "WorkerError" }, + result: {}, + store: dumpStore(), + logs, + }); + }; + + worker.postMessage({ type: "init", connections: [connectionInit] } as InitMessage); + }); + + return result; + } catch (e: any) { + if (e.errors && Array.isArray(e.errors)) { + const errorMessages = e.errors.map((err: any) => err.text).join("\n"); + return { + error: { + message: `Failed to compile function ${request.functionId}:\n${errorMessages}`, + name: "CompilationError", + }, + result: {}, + store: dumpStore(), + logs, + }; + } + return { + error: { message: e.message, name: e.name || "Error", stack: e.stack }, + result: {}, + store: dumpStore(), + logs, + }; + } +} diff --git a/services/rotor/src/lib/workspace-worker.ts b/services/rotor/src/lib/workspace-worker.ts new file mode 100644 index 000000000..85f43b647 --- /dev/null +++ b/services/rotor/src/lib/workspace-worker.ts @@ -0,0 +1,478 @@ +// Deno Web Worker script – runs with permissions: "none". +// All I/O (store, fetch, warehouse) is proxied to the main process via postMessage. +// +// Used for: +// 1. Long-lived per-workspace workers (free tier) +// 2. Temporary workers for /udfrun endpoint + +import type { + InitMessage, + ExecMessage, + ProxyResponseMessage, + MainToWorkerMessage, + WorkerConnectionInit, + WorkerFunctionInit, + ResultMessage, + SerializedLogEntry, + ProxyMethod, + StrippedConnectionConfig, +} from "./worker-protocol"; +import type { AnyEvent, EventContext, FuncReturn, FullContext } from "@jitsu/protocols/functions"; +import { FunctionExecLog, FunctionExecRes } from "@jitsu/core-functions-lib"; + +import { + DropRetryErrorName, + NoRetryErrorName, + RetryError, + NoRetryError, + TableNameParameter, + toJitsuClassic, + fromJitsuClassic, +} from "@jitsu/functions-lib"; + +// Set globals so UDF IIFE code (compiled via functionsLibShimPlugin) can access them +(globalThis as any).RetryError = RetryError; +(globalThis as any).NoRetryError = NoRetryError; +(globalThis as any).TableNameParameter = TableNameParameter; +(globalThis as any).toJitsuClassic = toJitsuClassic; +(globalThis as any).fromJitsuClassic = fromJitsuClassic; + +// Pre-imported Node built-in modules for UDF code. +// UDF IIFE bundles use __require("node:crypto") etc. which needs a sync require(). +// We pre-import them at worker startup and serve them from a map. +const nodeBuiltinModules: Record = {}; + +async function preloadNodeBuiltins() { + const builtins = ["node:crypto"]; + for (const mod of builtins) { + try { + nodeBuiltinModules[mod] = await import(mod); + // Also register without prefix + nodeBuiltinModules[mod.replace("node:", "")] = nodeBuiltinModules[mod]; + } catch (_) { + // Not available in sandbox — UDFs using this module will get a clear error + } + } + // Set up global require for IIFE bundles + (globalThis as any).require = (specifier: string) => { + const m = nodeBuiltinModules[specifier]; + if (m) return m; + throw new Error(`Module "${specifier}" is not available in the sandboxed worker`); + }; +} + +// ── Proxy helpers ─────────────────────────────────────────────────── + +const pending = new Map void; reject: (e: Error) => void }>(); +let callIdCounter = 0; + +function nextCallId(): string { + return String(++callIdCounter); +} + +function callMain(method: ProxyMethod, args: any[]): Promise { + return new Promise((resolve, reject) => { + const callId = nextCallId(); + pending.set(callId, { resolve, reject }); + self.postMessage({ type: "proxyRequest", callId, method, args }); + }); +} + +// ── Deep copy (same as chain-runner, inlined to avoid import issues in sandbox) ── + +function deepCopy(o: T): T { + if (typeof o !== "object") return o; + if (!o) return o; + if (Array.isArray(o)) { + const newO: any[] = []; + for (let i = 0; i < o.length; i++) { + const v = o[i]; + newO[i] = !v || typeof v !== "object" ? v : deepCopy(v); + } + return newO as T; + } + const newO: Record = {}; + for (const [k, v] of Object.entries(o)) { + newO[k] = !v || typeof v !== "object" ? v : deepCopy(v); + } + return newO as T; +} + +function isDropResult(result: any): boolean { + return result === "drop" || (Array.isArray(result) && result.length === 0) || result === null || result === false; +} + +// ── Loaded function type ──────────────────────────────────────────── + +type LoadedFunc = { + id: string; + exec: (event: AnyEvent, ctx: FullContext) => FuncReturn; + config?: any; +}; + +type ConnectionChain = { + connection: StrippedConnectionConfig; + functions: LoadedFunc[]; + props: Record; + debugTill?: string; + fetchLogLevel?: string; + warehouseEnabled: boolean; +}; + +const chains = new Map(); + +// ── UDF instantiation from IIFE code ──────────────────────────────── + +function instantiateUdf(funcInit: WorkerFunctionInit): LoadedFunc { + // The IIFE code defines `var __udf = (()=>{ ... })();` + // We wrap it in a Function that returns __udf. + const factory = new Function(funcInit.iifeCode + "\nreturn __udf;"); + const mod = factory(); + const func = mod.default; + if (typeof func !== "function") { + throw new Error(`UDF ${funcInit.id}: default export is not a function (got ${typeof func})`); + } + return { + id: funcInit.id, + exec: func, + config: mod.config, + }; +} + +// ── Build proxied context ─────────────────────────────────────────── + +function buildContext( + chain: ConnectionChain, + eventContext: EventContext, + functionId: string, + functionType: string, + logs: SerializedLogEntry[] +): FullContext { + const debugTill = chain.debugTill ? new Date(chain.debugTill) : undefined; + + // Proxied store + const store = { + get: (key: string) => callMain("store.get", [key]), + set: (key: string, obj: any, opts?: any) => callMain("store.set", [key, obj, opts]), + del: (key: string) => callMain("store.del", [key]), + ttl: (key: string) => callMain("store.ttl", [key]), + getOrSet: (key: string, value: any, opts?: any) => callMain("store.getOrSet", [key, value, opts]), + getWithTTL: (key: string) => callMain("store.getWithTTL", [key]), + }; + + // Collecting logger (fire-and-forget via postMessage) + const addLogEntry = (level: SerializedLogEntry["level"], message: string, args: any[]) => { + if (level === "debug" && !(debugTill && debugTill.getTime() > Date.now())) { + return; + } + const entry: SerializedLogEntry = { + level, + functionId, + functionType, + message, + args: args.length > 0 ? args : undefined, + timestamp: new Date().toISOString(), + }; + logs.push(entry); + //self.postMessage({ type: "log", ...entry }); + }; + + const log = { + info: (message: string, ...args: any[]) => addLogEntry("info", message, args), + warn: (message: string, ...args: any[]) => addLogEntry("warn", message, args), + debug: (message: string, ...args: any[]) => addLogEntry("debug", message, args), + error: (message: string, ...args: any[]) => addLogEntry("error", message, args), + }; + + // Proxied fetch – delegates to main process, logs request/response like makeFetch + const proxiedFetch = async (url: string, init?: any) => { + const startTime = Date.now(); + + const baseInfo = { + functionId, + functionType, + type: "http-request" as const, + url, + method: init?.method || "GET", + body: init?.body, + event: {}, + }; + + try { + const serialized = await callMain("fetch", [chain.connection.id, url, init]); + const elapsedMs = Date.now() - startTime; + + if (baseInfo) { + logs.push({ + level: "info", + functionId, + functionType, + message: { + ...baseInfo, + status: serialized.status, + statusText: serialized.statusText, + elapsedMs, + }, + timestamp: new Date().toISOString(), + }); + } + + return { + status: serialized.status, + statusText: serialized.statusText, + ok: serialized.ok, + url: serialized.url, + type: serialized.type, + redirected: serialized.redirected, + headers: serialized.headers, + bodyUsed: true, + body: serialized.body, + text: () => Promise.resolve(serialized.body), + json: () => Promise.resolve(JSON.parse(serialized.body)), + }; + } catch (err: any) { + const elapsedMs = Date.now() - startTime; + if (baseInfo) { + logs.push({ + level: "error", + functionId, + functionType, + message: { + ...baseInfo, + error: err.message || String(err), + elapsedMs, + }, + timestamp: new Date().toISOString(), + }); + } + throw err; + } + }; + + // Proxied warehouse + const getWarehouse = (destinationId: string) => ({ + query: (sql: string, params?: Record) => { + if (!chain.warehouseEnabled) { + return Promise.reject( + new Error("Warehouse queries are not available on the free plan. Please upgrade to use this feature.") + ); + } + return callMain("warehouse.query", [destinationId, sql, params]); + }, + }); + + const retries = (eventContext as EventContext & { retries?: number }).retries ?? 0; + + return { + ...eventContext, + log, + fetch: proxiedFetch as any, + store, + props: chain.props, + retries, + getWarehouse, + }; +} + +// ── Chain execution (inline – mirrors chain-runner.ts logic) ──────── + +async function runChainInWorker( + chain: ConnectionChain, + event: AnyEvent, + eventContext: EventContext, + fetchTimeoutMs: number +): Promise<{ events: AnyEvent[]; execLog: FunctionExecLog; logs: SerializedLogEntry[] }> { + const execLog: FunctionExecLog = []; + const logs: SerializedLogEntry[] = []; + let events: AnyEvent[] = [event]; + + for (let k = 0; k < chain.functions.length; k++) { + const func = chain.functions[k]; + const newEvents: AnyEvent[] = []; + + for (let i = 0; i < events.length; i++) { + const currentEvent = events[i]; + const startMs = Date.now(); + let result: FuncReturn = undefined; + + const ar = func.id.split("."); + const id = ar.pop() as string; + const functionType = ar.join("."); + const execLogEntry: Partial & { functionType?: string } = { + eventIndex: i, + receivedAt: eventContext.receivedAt, + functionId: id, + functionType, + }; + + try { + const ctx = buildContext(chain, eventContext, id, functionType, logs); + result = await func.exec(deepCopy(currentEvent), ctx); + + if (k < chain.functions.length - 1 && Array.isArray(result) && result.length > 1) { + const l = result.length; + result = undefined; + const err = new Error( + `Got ${l} events as result of function #${k + 1} of ${ + chain.functions.length + }. Only the last function in a chain is allowed to multiply events.` + ); + err.name = NoRetryErrorName; + throw err; + } + } catch (err: any) { + if (err?.name === DropRetryErrorName || err?.name === NoRetryErrorName) { + result = "drop"; + } + if (func?.config?.retryPolicy) { + err.retryPolicy = func.config.retryPolicy; + } + execLogEntry.error = { + name: err.name, + message: err.message, + stack: err.stack, + retryPolicy: err.retryPolicy, + functionId: id, + }; + } + + execLogEntry.ms = Date.now() - startMs; + execLogEntry.dropped = isDropResult(result); + execLog.push(execLogEntry as FunctionExecRes); + + if (!isDropResult(result)) { + if (result) { + if (Array.isArray(result)) { + newEvents.push(...result); + } else { + newEvents.push(result as AnyEvent); + } + } else { + newEvents.push(currentEvent); + } + } + } + + events = newEvents; + if (events.length === 0) break; + } + + return { events, execLog, logs }; +} + +// ── Message handler ───────────────────────────────────────────────── + +self.onmessage = async (e: MessageEvent) => { + const msg = e.data; + + // Handle proxy responses + if (msg.type === "proxyResponse") { + const p = pending.get(msg.callId); + if (p) { + pending.delete(msg.callId); + if (msg.error) { + p.reject(new Error(msg.error)); + } else { + p.resolve(msg.result); + } + } + return; + } + + // Handle init + if (msg.type === "init") { + // Pre-import Node built-ins so UDF require() calls work + await preloadNodeBuiltins(); + + for (const conn of msg.connections) { + const funcs: LoadedFunc[] = []; + for (const funcInit of conn.functions) { + try { + funcs.push(instantiateUdf(funcInit)); + } catch (err: any) { + // Create error-throwing placeholder + const errorMessage = err.message; + funcs.push({ + id: funcInit.id, + exec: async () => { + throw new Error(errorMessage); + }, + }); + } + } + chains.set(conn.connectionId, { + connection: conn.connection, + functions: funcs, + props: conn.props, + debugTill: conn.debugTill, + fetchLogLevel: conn.fetchLogLevel, + warehouseEnabled: conn.warehouseEnabled, + }); + } + self.postMessage({ type: "ready" }); + return; + } + + // Handle exec + if (msg.type === "exec") { + const chain = chains.get(msg.connectionId); + if (!chain) { + const result: ResultMessage = { + type: "result", + requestId: msg.requestId, + connectionId: msg.connectionId, + events: [], + execLog: [ + { + error: { message: `Connection '${msg.connectionId}' not found in worker`, name: "NoRetryError" }, + ms: 0, + eventIndex: 0, + functionId: "", + } as any, + ], + logs: [], + }; + self.postMessage(result); + return; + } + + // Parse dates back from serialization + if (msg.eventContext?.receivedAt && typeof msg.eventContext.receivedAt === "string") { + msg.eventContext.receivedAt = new Date(msg.eventContext.receivedAt); + } + if (msg.eventContext?.destination?.updatedAt && typeof msg.eventContext.destination.updatedAt === "string") { + msg.eventContext.destination.updatedAt = new Date(msg.eventContext.destination.updatedAt); + } + + try { + const { events, execLog, logs } = await runChainInWorker(chain, msg.event, msg.eventContext, msg.fetchTimeoutMs); + const result: ResultMessage = { + type: "result", + requestId: msg.requestId, + connectionId: msg.connectionId, + events, + execLog, + logs, + }; + self.postMessage(result); + } catch (err: any) { + const result: ResultMessage = { + type: "result", + requestId: msg.requestId, + connectionId: msg.connectionId, + events: [], + execLog: [ + { + error: { message: err.message, name: err.name || "Error" }, + ms: 0, + eventIndex: 0, + functionId: "", + } as any, + ], + logs: [], + }; + self.postMessage(result); + } + return; + } +}; diff --git a/webapps/console/lib/server/serverEnv.ts b/webapps/console/lib/server/serverEnv.ts index d1f42e824..603c315d4 100644 --- a/webapps/console/lib/server/serverEnv.ts +++ b/webapps/console/lib/server/serverEnv.ts @@ -180,6 +180,12 @@ const ServerEnvSchema = ClientEnvSchema.extend({ // Authentication key for rotor API ROTOR_AUTH_KEY: z.string().optional(), + // Functions server URL template (use ${workspaceId} as placeholder) + FUNCTIONS_SERVER_URL_TEMPLATE: z.string().optional(), + + // Default functions class when workspace has no explicit setting + DEFAULT_FUNCTIONS_CLASS: z.string().optional().default("legacy"), + // ============================================ // Email Configuration // ============================================ diff --git a/webapps/console/pages/api/[workspaceId]/function/run.ts b/webapps/console/pages/api/[workspaceId]/function/run.ts index 0c867ce66..8c0088973 100644 --- a/webapps/console/pages/api/[workspaceId]/function/run.ts +++ b/webapps/console/pages/api/[workspaceId]/function/run.ts @@ -3,6 +3,7 @@ import { z } from "zod"; import { Api, inferUrl, nextJsApiHandler, verifyAccessWithRole } from "../../../../lib/api"; import { requireDefined, rpc } from "juava"; import { getServerEnv } from "../../../../lib/server/serverEnv"; +import { db } from "../../../../lib/server/db"; const log = getServerLog("function-run"); @@ -23,10 +24,43 @@ const resultType = z.object({ store: z.record(z.any()), logs: z.array(z.any()), meta: z.any().nullish(), + backend: z.string().optional(), }); export type FunctionRunType = z.infer; +function extractFunctionsClasses(featuresEnabled: string[], defaultClass: string): string[] { + const prefix = "functionsClasses="; + for (const feature of featuresEnabled) { + if (feature.startsWith(prefix)) { + return feature + .substring(prefix.length) + .split(",") + .map(f => f.trim()); + } + } + return [defaultClass]; +} + +function getUdfRunUrl( + workspaceId: string, + functionsClasses: string[], + serverEnv: ReturnType +): string { + const template = serverEnv.FUNCTIONS_SERVER_URL_TEMPLATE; + const isLegacy = functionsClasses.includes("legacy") || functionsClasses.includes(""); + if (!template || isLegacy) { + const rotorURL = requireDefined( + serverEnv.ROTOR_URL, + `env ROTOR_URL is not set. Rotor is required to run functions` + ); + return rotorURL + "/udfrun"; + } + const functionsClass = functionsClasses[0]; + const baseUrl = template.replace("${workspaceId}", functionsClass === "free" ? "free" : workspaceId); + return baseUrl + "/udfrun"; +} + export const api: Api = { url: inferUrl(__filename), POST: { @@ -50,10 +84,25 @@ export const api: Api = { const { workspaceId } = query; await verifyAccessWithRole(user, workspaceId, "editEntities"); const serverEnv = getServerEnv(); - const rotorURL = requireDefined( - serverEnv.ROTOR_URL, - `env ROTOR_URL is not set. Rotor is required to run functions` + + const workspace = await db.prisma().workspace.findFirst({ + where: { id: workspaceId }, + select: { featuresEnabled: true }, + }); + const functionsClasses = extractFunctionsClasses( + workspace?.featuresEnabled ?? [], + serverEnv.DEFAULT_FUNCTIONS_CLASS ); + const url = getUdfRunUrl(workspaceId, functionsClasses, serverEnv); + + log + .atInfo() + .log( + `Running function ${ + body.functionId + } for workspace ${workspaceId} via ${url} (classes: ${functionsClasses.join(",")})` + ); + const rotorAuthKey = serverEnv.ROTOR_AUTH_KEY; const headers: Record = { "Content-Type": "application/json", @@ -62,7 +111,7 @@ export const api: Api = { headers["Authorization"] = `Bearer ${rotorAuthKey}`; } - const res = await rpc(rotorURL + "/udfrun", { + const res = await rpc(url, { method: "POST", body: { ...body,