From 60a8227b255e6ed5cefc4f573db2ee9945a8e0ed Mon Sep 17 00:00:00 2001 From: KeKs0r Date: Sun, 29 Mar 2026 11:16:49 +0200 Subject: [PATCH 01/12] refactor(plugin-obsessiondb): replace hand-rolled API client with ORPC Replace the manual fetch-based api-client.ts with a typed ORPC client backed by contracts copied from the platform repo. Narrows remote commands to status/cancel/list and adds --job-id/--service-id flags. Co-Authored-By: Claude Opus 4.6 (1M context) --- bun.lock | 39 +++++- package.json | 6 +- packages/plugin-obsessiondb/package.json | 5 +- .../src/backfill/api-client.ts | 113 ------------------ .../plugin-obsessiondb/src/backfill/client.ts | 36 ++++++ .../src/backfill/contract.ts | 70 +++++++++++ .../src/backfill/handler.test.ts | 87 ++++++++++++-- .../src/backfill/handler.ts | 63 +++++----- .../plugin-obsessiondb/src/backfill/index.ts | 24 ++++ packages/plugin-obsessiondb/src/index.test.ts | 31 ++++- 10 files changed, 314 insertions(+), 160 deletions(-) delete mode 100644 packages/plugin-obsessiondb/src/backfill/api-client.ts create mode 100644 packages/plugin-obsessiondb/src/backfill/client.ts create mode 100644 packages/plugin-obsessiondb/src/backfill/contract.ts diff --git a/bun.lock b/bun.lock index 902efff..d66ca9d 100644 --- a/bun.lock +++ b/bun.lock @@ -11,10 +11,14 @@ "@biomejs/biome": "^2.3.14", "@changesets/cli": "^2.29.8", "@chkit/plugin-backfill": "workspace:*", + "@chkit/plugin-obsessiondb": "workspace:*", + "@orpc/client": "1.13.4", + "@orpc/contract": "1.13.4", "@types/node": "^24.0.0", "p-map": "^7.0.4", "turbo": "^2.8.20", "typescript": "^5.8.0", + "zod": "3.24.4", }, }, "apps/docs": { @@ -90,6 +94,9 @@ "version": "0.1.0-beta.19", "dependencies": { "@chkit/core": "workspace:*", + "@orpc/client": "1.13.4", + "@orpc/contract": "1.13.4", + "zod": "3.24.4", }, }, "packages/plugin-pull": { @@ -353,6 +360,18 @@ "@nodelib/fs.walk": ["@nodelib/fs.walk@1.2.8", "", { "dependencies": { "@nodelib/fs.scandir": "2.1.5", "fastq": "^1.6.0" } }, "sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg=="], + "@orpc/client": ["@orpc/client@1.13.4", "", { "dependencies": { "@orpc/shared": "1.13.4", "@orpc/standard-server": "1.13.4", "@orpc/standard-server-fetch": "1.13.4", "@orpc/standard-server-peer": "1.13.4" } }, "sha512-s13GPMeoooJc5Th2EaYT5HMFtWG8S03DUVytYfJv8pIhP87RYKl94w52A36denH6r/B4LaAgBeC9nTAOslK+Og=="], + + "@orpc/contract": ["@orpc/contract@1.13.4", "", { "dependencies": { "@orpc/client": "1.13.4", "@orpc/shared": "1.13.4", "@standard-schema/spec": "^1.1.0", "openapi-types": "^12.1.3" } }, "sha512-TIxyaF67uOlihCRcasjHZxguZpbqfNK7aMrDLnhoufmQBE4OKvguNzmrOFHgsuM0OXoopX0Nuhun1ccaxKP10A=="], + + "@orpc/shared": ["@orpc/shared@1.13.4", "", { "dependencies": { "radash": "^12.1.1", "type-fest": "^5.3.1" }, "peerDependencies": { "@opentelemetry/api": ">=1.9.0" }, "optionalPeers": ["@opentelemetry/api"] }, "sha512-TYt9rLG/BUkNQBeQ6C1tEiHS/Seb8OojHgj9GlvqyjHJhMZx5qjsIyTW6RqLPZJ4U2vgK6x4Her36+tlFCKJug=="], + + "@orpc/standard-server": ["@orpc/standard-server@1.13.4", "", { "dependencies": { "@orpc/shared": "1.13.4" } }, "sha512-ZOzgfVp6XUg+wVYw+gqesfRfGPtQbnBIrIiSnFMtZF+6ncmFJeF2Shc4RI2Guqc0Qz25juy8Ogo4tX3YqysOcg=="], + + "@orpc/standard-server-fetch": ["@orpc/standard-server-fetch@1.13.4", "", { "dependencies": { "@orpc/shared": "1.13.4", "@orpc/standard-server": "1.13.4" } }, "sha512-/zmKwnuxfAXbppJpgr1CMnQX3ptPlYcDzLz1TaVzz9VG/Xg58Ov3YhabS2Oi1utLVhy5t4kaCppUducAvoKN+A=="], + + "@orpc/standard-server-peer": ["@orpc/standard-server-peer@1.13.4", "", { "dependencies": { "@orpc/shared": "1.13.4", "@orpc/standard-server": "1.13.4" } }, "sha512-UfqnTLqevjCKUk4cmImOG8cQUwANpV1dp9e9u2O1ki6BRBsg/zlXFg6G2N6wP0zr9ayIiO1d2qJdH55yl/1BNw=="], + "@oslojs/encoding": ["@oslojs/encoding@1.1.0", "", {}, "sha512-70wQhgYmndg4GCPxPPxPGevRKqTIJ2Nh4OkiMWmDAVYsTQ+Ta7Sq+rPevXyXGdzr30/qZBnyOalCszoMxlyldQ=="], "@pagefind/darwin-arm64": ["@pagefind/darwin-arm64@1.4.0", "", { "os": "darwin", "cpu": "arm64" }, "sha512-2vMqkbv3lbx1Awea90gTaBsvpzgRs7MuSgKDxW0m9oV1GPZCZbZBJg/qL83GIUEN2BFlY46dtUZi54pwH+/pTQ=="], @@ -445,6 +464,8 @@ "@speed-highlight/core": ["@speed-highlight/core@1.2.14", "", {}, "sha512-G4ewlBNhUtlLvrJTb88d2mdy2KRijzs4UhnlrOSRT4bmjh/IqNElZa3zkrZ+TC47TwtlDWzVLFADljF1Ijp5hA=="], + "@standard-schema/spec": ["@standard-schema/spec@1.1.0", "", {}, "sha512-l2aFy5jALhniG5HgqrD6jXLi/rUWrKvqN/qJx6yoJsgKhblVd+iqqU4RCXavm/jPityDo5TCvKMnpjKnOriy0w=="], + "@turbo/darwin-64": ["@turbo/darwin-64@2.8.20", "", { "os": "darwin", "cpu": "x64" }, "sha512-FQ9EX1xMU5nbwjxXxM3yU88AQQ6Sqc6S44exPRroMcx9XZHqqppl5ymJF0Ig/z3nvQNwDmz1Gsnvxubo+nXWjQ=="], "@turbo/darwin-arm64": ["@turbo/darwin-arm64@2.8.20", "", { "os": "darwin", "cpu": "arm64" }, "sha512-Gpyh9ATFGThD6/s9L95YWY54cizg/VRWl2B67h0yofG8BpHf67DFAh9nuJVKG7bY0+SBJDAo5cMur+wOl9YOYw=="], @@ -961,6 +982,8 @@ "oniguruma-to-es": ["oniguruma-to-es@4.3.4", "", { "dependencies": { "oniguruma-parser": "^0.12.1", "regex": "^6.0.1", "regex-recursion": "^6.0.2" } }, "sha512-3VhUGN3w2eYxnTzHn+ikMI+fp/96KoRSVK9/kMTcFqj1NRDh2IhQCKvYxDnWePKRXY/AqH+Fuiyb7VHSzBjHfA=="], + "openapi-types": ["openapi-types@12.1.3", "", {}, "sha512-N4YtSYJqghVu4iek2ZUvcN/0aqH1kRDuNqzcycDxhOUpg7GdvLa2F3DgS6yBNhInhv2r/6I0Flkn7CqL8+nIcw=="], + "outdent": ["outdent@0.5.0", "", {}, "sha512-/jHxFIzoMXdqPzTaCpFzAAWhpkSjZPF4Vsn6jAfNpmbH/ymsmd7Qc6VE9BGn0L6YMj6uwpQLxCECpus4ukKS9Q=="], "p-filter": ["p-filter@2.1.0", "", { "dependencies": { "p-map": "^2.0.0" } }, "sha512-ZBxxZ5sL2HghephhpGAQdoskxplTwr7ICaehZwLIlfL6acuVgZPm8yBNuRAFBGEqtD/hmUeq9eqLg2ys9Xr/yw=="], @@ -1025,6 +1048,8 @@ "queue-microtask": ["queue-microtask@1.2.3", "", {}, "sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A=="], + "radash": ["radash@12.1.1", "", {}, "sha512-h36JMxKRqrAxVD8201FrCpyeNuUY9Y5zZwujr20fFO77tpUtGa6EZzfKw/3WaiBX95fq7+MpsuMLNdSnORAwSA=="], + "radix3": ["radix3@1.1.2", "", {}, "sha512-b484I/7b8rDEdSDKckSSBA8knMpcdsXudlE/LNL639wFoHKwLbEkQFZHWEYwDC0wa0FKUcCY+GAF73Z7wxNVFA=="], "read-yaml-file": ["read-yaml-file@1.1.0", "", { "dependencies": { "graceful-fs": "^4.1.5", "js-yaml": "^3.6.1", "pify": "^4.0.1", "strip-bom": "^3.0.0" } }, "sha512-VIMnQi/Z4HT2Fxuwg5KrY174U1VdUIASQVWXXyqtNRtxSr9IYkn1rsI6Tb6HsrHCmB7gVpNwX6JxPTHcH6IoTA=="], @@ -1141,6 +1166,8 @@ "svgo": ["svgo@4.0.0", "", { "dependencies": { "commander": "^11.1.0", "css-select": "^5.1.0", "css-tree": "^3.0.1", "css-what": "^6.1.0", "csso": "^5.0.5", "picocolors": "^1.1.1", "sax": "^1.4.1" }, "bin": "./bin/svgo.js" }, "sha512-VvrHQ+9uniE+Mvx3+C9IEe/lWasXCU0nXMY2kZeLrHNICuRiC8uMPyM14UEaMOFA5mhyQqEkB02VoQ16n3DLaw=="], + "tagged-tag": ["tagged-tag@1.0.0", "", {}, "sha512-yEFYrVhod+hdNyx7g5Bnkkb0G6si8HJurOoOEgC8B/O0uXLHlaey/65KRv6cuWBNhBgHKAROVpc7QyYqE5gFng=="], + "term-size": ["term-size@2.2.1", "", {}, "sha512-wK0Ri4fOGjv/XPy8SBHZChl8CM7uMc5VML7SqiQ0zG7+J5Vr+RMQDoHa2CNT6KHUnTGIXH34UDMkPzAUyapBZg=="], "tiny-inflate": ["tiny-inflate@1.0.3", "", {}, "sha512-pkY1fj1cKHb2seWDy0B16HeWyczlJA9/WW3u3c4z/NiWDsO3DOU5D7nhTLE9CF0yXv/QZFY7sEJmj24dK+Rrqw=="], @@ -1161,7 +1188,7 @@ "turbo": ["turbo@2.8.20", "", { "optionalDependencies": { "@turbo/darwin-64": "2.8.20", "@turbo/darwin-arm64": "2.8.20", "@turbo/linux-64": "2.8.20", "@turbo/linux-arm64": "2.8.20", "@turbo/windows-64": "2.8.20", "@turbo/windows-arm64": "2.8.20" }, "bin": { "turbo": "bin/turbo" } }, "sha512-Rb4qk5YT8RUwwdXtkLpkVhNEe/lor6+WV7S5tTlLpxSz6MjV5Qi8jGNn4gS6NAvrYGA/rNrE6YUQM85sCZUDbQ=="], - "type-fest": ["type-fest@4.41.0", "", {}, "sha512-TeTSQ6H5YHvpqVwBRcnLDCBnDOHWYu7IvGbHT6N8AOymcr9PJGjc1GTtiWZTYg0NCgYwvnYWEkVChQAr9bjfwA=="], + "type-fest": ["type-fest@5.5.0", "", { "dependencies": { "tagged-tag": "^1.0.0" } }, "sha512-PlBfpQwiUvGViBNX84Yxwjsdhd1TUlXr6zjX7eoirtCPIr08NAmxwa+fcYBTeRQxHo9YC9wwF3m9i700sHma8g=="], "typescript": ["typescript@5.9.3", "", { "bin": { "tsc": "bin/tsc", "tsserver": "bin/tsserver" } }, "sha512-jl1vZzPDinLr9eUt3J/t7V6FgNEw9QjvBPdysz9KfQDD41fQrC2Y4vKQdiaUpFT4bXlb1RHhLpp8wtm6M5TgSw=="], @@ -1247,7 +1274,7 @@ "youch-core": ["youch-core@0.3.3", "", { "dependencies": { "@poppinss/exception": "^1.2.2", "error-stack-parser-es": "^1.0.5" } }, "sha512-ho7XuGjLaJ2hWHoK8yFnsUGy2Y5uDpqSTq1FkHLK4/oqKtyUU1AFbOOxY4IpC9f0fTLjwYbslUz0Po5BpD1wrA=="], - "zod": ["zod@3.25.76", "", {}, "sha512-gzUt/qt81nXsFGKIFcC3YnfEAx5NkunCfnDlvuBSSFS02bcXu4Lmea0AFIUwbLWxWPx3d9p8S5QoaujKcNQxcQ=="], + "zod": ["zod@3.24.4", "", {}, "sha512-OdqJE9UDRPwWsrHjLN2F8bPxvwJBK22EHLWtanu0LSYr5YqzsaaW3RMgmjwr8Rypg5k+meEJdSPXJZXE/yqOMg=="], "zod-to-json-schema": ["zod-to-json-schema@3.25.1", "", { "peerDependencies": { "zod": "^3.25 || ^4" } }, "sha512-pM/SU9d3YAggzi6MtR4h7ruuQlqKtad8e9S0fmxcMi+ueAK5Korys/aWcV9LIIHTVbj01NdzxcnXSN+O74ZIVA=="], @@ -1255,8 +1282,12 @@ "zwitch": ["zwitch@2.0.4", "", {}, "sha512-bXE4cR/kVZhKZX/RjPEflHaKVhUVl85noU3v6b8apfQEc1x4A+zBxjZ4lN8LqGd6WZ3dl98pY4o717VFmoPp+A=="], + "@astrojs/sitemap/zod": ["zod@3.25.76", "", {}, "sha512-gzUt/qt81nXsFGKIFcC3YnfEAx5NkunCfnDlvuBSSFS02bcXu4Lmea0AFIUwbLWxWPx3d9p8S5QoaujKcNQxcQ=="], + "@astrojs/telemetry/ci-info": ["ci-info@4.4.0", "", {}, "sha512-77PSwercCZU2Fc4sX94eF8k8Pxte6JAwL4/ICZLFjJLqegs7kCuAsqqj/70NQF6TvDpgFjkubQB2FW2ZZddvQg=="], + "@chkit/docs/zod": ["zod@3.25.76", "", {}, "sha512-gzUt/qt81nXsFGKIFcC3YnfEAx5NkunCfnDlvuBSSFS02bcXu4Lmea0AFIUwbLWxWPx3d9p8S5QoaujKcNQxcQ=="], + "@chkit/plugin-backfill/zod": ["zod@4.3.6", "", {}, "sha512-rftlrkhHZOcjDwkGlnUtZZkvaPHCsDATp4pGpuOOMDaTdDDXF91wuVDJoWoPsKX/3YPQ5fHuF3STjcYyKr+Qhg=="], "@chkit/plugin-codegen/zod": ["zod@4.3.6", "", {}, "sha512-rftlrkhHZOcjDwkGlnUtZZkvaPHCsDATp4pGpuOOMDaTdDDXF91wuVDJoWoPsKX/3YPQ5fHuF3STjcYyKr+Qhg=="], @@ -1285,6 +1316,10 @@ "astro/package-manager-detector": ["package-manager-detector@1.6.0", "", {}, "sha512-61A5ThoTiDG/C8s8UMZwSorAGwMJ0ERVGj2OjoW5pAalsNOg15+iQiPzrLJ4jhZ1HJzmC2PIHT2oEiH3R5fzNA=="], + "astro/zod": ["zod@3.25.76", "", {}, "sha512-gzUt/qt81nXsFGKIFcC3YnfEAx5NkunCfnDlvuBSSFS02bcXu4Lmea0AFIUwbLWxWPx3d9p8S5QoaujKcNQxcQ=="], + + "boxen/type-fest": ["type-fest@4.41.0", "", {}, "sha512-TeTSQ6H5YHvpqVwBRcnLDCBnDOHWYu7IvGbHT6N8AOymcr9PJGjc1GTtiWZTYg0NCgYwvnYWEkVChQAr9bjfwA=="], + "csso/css-tree": ["css-tree@2.2.1", "", { "dependencies": { "mdn-data": "2.0.28", "source-map-js": "^1.0.1" } }, "sha512-OA0mILzGc1kCOCSJerOeqDxDQ4HOh+G8NbOJFOTgOCzpw7fCBubk0fEyxp8AgOL/jvLgYA/uV0cMbe43ElF1JA=="], "dom-serializer/entities": ["entities@4.5.0", "", {}, "sha512-V0hjH4dGPh9Ao5p0MoRY6BVqtwCjhz6vI5LT8AJ55H+4g9/4vbHx1I54fS0XuclLhDHArPQCiMjDxjaL8fPxhw=="], diff --git a/package.json b/package.json index 4f8b9d9..f9f9716 100644 --- a/package.json +++ b/package.json @@ -37,10 +37,14 @@ "@biomejs/biome": "^2.3.14", "@changesets/cli": "^2.29.8", "@chkit/plugin-backfill": "workspace:*", + "@chkit/plugin-obsessiondb": "workspace:*", + "@orpc/client": "1.13.4", + "@orpc/contract": "1.13.4", "@types/node": "^24.0.0", "p-map": "^7.0.4", "turbo": "^2.8.20", - "typescript": "^5.8.0" + "typescript": "^5.8.0", + "zod": "3.24.4" }, "dependencies": { "wrangler": "^4.65.0" diff --git a/packages/plugin-obsessiondb/package.json b/packages/plugin-obsessiondb/package.json index fe7eaf1..adf2527 100644 --- a/packages/plugin-obsessiondb/package.json +++ b/packages/plugin-obsessiondb/package.json @@ -41,6 +41,9 @@ "clean": "rm -rf dist" }, "dependencies": { - "@chkit/core": "workspace:*" + "@chkit/core": "workspace:*", + "@orpc/client": "1.13.4", + "@orpc/contract": "1.13.4", + "zod": "3.24.4" } } diff --git a/packages/plugin-obsessiondb/src/backfill/api-client.ts b/packages/plugin-obsessiondb/src/backfill/api-client.ts deleted file mode 100644 index 308a48c..0000000 --- a/packages/plugin-obsessiondb/src/backfill/api-client.ts +++ /dev/null @@ -1,113 +0,0 @@ -import type { Credentials } from '../auth/index.js' - -export interface RemotePlanResponse { - ok: boolean - plan_id?: string - error?: string - [key: string]: unknown -} - -export interface RemoteRunResponse { - ok: boolean - run_id?: string - error?: string - [key: string]: unknown -} - -export interface RemoteStatusResponse { - ok: boolean - status?: string - error?: string - [key: string]: unknown -} - -export interface RemoteCancelResponse { - ok: boolean - error?: string - [key: string]: unknown -} - -export interface RemoteDoctorResponse { - ok: boolean - error?: string - [key: string]: unknown -} - -class SessionExpiredError extends Error { - constructor() { - super('Session expired. Run `chkit obsessiondb login` to re-authenticate.') - } -} - -async function apiRequest( - path: string, - creds: Credentials, - body?: unknown -): Promise { - const res = await fetch(`${creds.base_url}${path}`, { - method: body !== undefined ? 'POST' : 'GET', - headers: { - Authorization: `Bearer ${creds.access_token}`, - 'Content-Type': 'application/json', - 'User-Agent': 'chkit-cli', - }, - ...(body !== undefined ? { body: JSON.stringify(body) } : {}), - }) - - if (res.status === 401) { - throw new SessionExpiredError() - } - - if (!res.ok) { - const text = await res.text() - throw new Error(`Remote backfill API error: ${res.status} ${text}`) - } - - return (await res.json()) as T -} - -export function isSessionExpiredError(error: unknown): boolean { - return error instanceof SessionExpiredError -} - -export async function submitBackfillPlan( - input: Record, - creds: Credentials -): Promise { - return apiRequest('/api/v1/backfill/plan', creds, input) -} - -export async function runRemoteBackfill( - input: Record, - creds: Credentials -): Promise { - return apiRequest('/api/v1/backfill/run', creds, input) -} - -export async function resumeRemoteBackfill( - input: Record, - creds: Credentials -): Promise { - return apiRequest('/api/v1/backfill/resume', creds, input) -} - -export async function getRemoteBackfillStatus( - input: Record, - creds: Credentials -): Promise { - return apiRequest('/api/v1/backfill/status', creds, input) -} - -export async function cancelRemoteBackfill( - input: Record, - creds: Credentials -): Promise { - return apiRequest('/api/v1/backfill/cancel', creds, input) -} - -export async function getRemoteBackfillDoctor( - input: Record, - creds: Credentials -): Promise { - return apiRequest('/api/v1/backfill/doctor', creds, input) -} diff --git a/packages/plugin-obsessiondb/src/backfill/client.ts b/packages/plugin-obsessiondb/src/backfill/client.ts new file mode 100644 index 0000000..1930741 --- /dev/null +++ b/packages/plugin-obsessiondb/src/backfill/client.ts @@ -0,0 +1,36 @@ +import { createORPCClient } from '@orpc/client' +import { RPCLink } from '@orpc/client/fetch' +import type { ContractRouterClient } from '@orpc/contract' +import type { Credentials } from '../auth/index.js' +import { jobsContract } from './contract.js' + +export type JobsClient = ContractRouterClient + +export class SessionExpiredError extends Error { + constructor() { + super('Session expired. Run `chkit obsessiondb login` to re-authenticate.') + } +} + +export function isSessionExpiredError(error: unknown): boolean { + return error instanceof SessionExpiredError +} + +export function createJobsClient(creds: Credentials): JobsClient { + const link = new RPCLink({ + url: `${creds.base_url}/rpc/jobs`, + headers: () => ({ + Authorization: `Bearer ${creds.access_token}`, + 'User-Agent': 'chkit-cli', + }), + fetch: async (input, init) => { + const res = await globalThis.fetch(input, init) + if (res.status === 401) { + throw new SessionExpiredError() + } + return res + }, + }) + + return createORPCClient(link) +} diff --git a/packages/plugin-obsessiondb/src/backfill/contract.ts b/packages/plugin-obsessiondb/src/backfill/contract.ts new file mode 100644 index 0000000..1d1423d --- /dev/null +++ b/packages/plugin-obsessiondb/src/backfill/contract.ts @@ -0,0 +1,70 @@ +import { oc } from '@orpc/contract' +import { z } from 'zod' + +export const jobStatusSchema = z.enum(['pending', 'running', 'completed', 'failed', 'cancelled']) + +export const taskStatusSchema = z.enum(['pending', 'running', 'done', 'failed']) + +export const jobTaskSchema = z.object({ + id: z.string(), + taskIndex: z.number().int(), + status: taskStatusSchema, + sql: z.string(), + queryId: z.string().nullable(), + estimatedBytes: z.number().nullable(), + writtenRows: z.number().nullable(), + writtenBytes: z.number().nullable(), + durationMs: z.number().nullable(), + error: z.string().nullable(), + startedAt: z.string().datetime().nullable(), + finishedAt: z.string().datetime().nullable(), +}) + +export const jobSummarySchema = z.object({ + id: z.string(), + serviceId: z.string(), + type: z.string(), + target: z.string(), + status: jobStatusSchema, + concurrency: z.number().int(), + totalTasks: z.number().int(), + completedTasks: z.number().int(), + failedTasks: z.number().int(), + createdAt: z.string().datetime(), + updatedAt: z.string().datetime(), +}) + +export const jobDetailSchema = jobSummarySchema.extend({ + workflowId: z.string().nullable(), + metadata: z.record(z.unknown()).nullable(), + tasks: z.array(jobTaskSchema), +}) + +export const jobsContract = { + submit: oc + .input( + z.object({ + serviceId: z.string(), + type: z.enum(['backfill']), + target: z.string(), + concurrency: z.number().int().min(1).max(12).optional(), + tasks: z.array( + z.object({ + id: z.string(), + sql: z.string(), + estimatedBytes: z.number().optional(), + }), + ), + metadata: z.record(z.unknown()).optional(), + }), + ) + .output(z.object({ jobId: z.string() })), + + get: oc.input(z.object({ jobId: z.string() })).output(jobDetailSchema), + + list: oc + .input(z.object({ serviceId: z.string() })) + .output(z.object({ jobs: z.array(jobSummarySchema) })), + + cancel: oc.input(z.object({ jobId: z.string() })).output(z.object({})), +} diff --git a/packages/plugin-obsessiondb/src/backfill/handler.test.ts b/packages/plugin-obsessiondb/src/backfill/handler.test.ts index 2af8ff6..a477c8d 100644 --- a/packages/plugin-obsessiondb/src/backfill/handler.test.ts +++ b/packages/plugin-obsessiondb/src/backfill/handler.test.ts @@ -11,12 +11,12 @@ function makeContext(overrides: Partial return { context: { targetPlugin: 'backfill', - command: 'run', + command: 'status', config: {}, configPath: '/fake/clickhouse.config.ts', jsonMode: false, args: [], - flags: {}, + flags: { '--job-id': 'job-123' }, options: {}, print: (v: unknown) => printed.push(v), ...overrides, @@ -25,6 +25,13 @@ function makeContext(overrides: Partial } } +function orpcResponse(data: unknown): Response { + return new Response(JSON.stringify({ json: data }), { + status: 200, + headers: { 'content-type': 'application/json' }, + }) +} + describe('handleBackfillCommand', () => { let tempDir: string let originalXdg: string | undefined @@ -73,19 +80,34 @@ describe('handleBackfillCommand', () => { expect(printed[0]).toContain('chkit obsessiondb login') }) - test('routes to remote API when authenticated', async () => { + test('routes status to remote ORPC get when authenticated', async () => { await setupAuth() - globalThis.fetch = mock(async () => - new Response(JSON.stringify({ ok: true, run_id: 'r-123' }), { status: 200 }) - ) as typeof fetch + const jobDetail = { + id: 'job-123', + serviceId: 'svc-1', + type: 'backfill', + target: 'my_table', + status: 'running', + concurrency: 4, + totalTasks: 10, + completedTasks: 3, + failedTasks: 0, + createdAt: '2026-03-29T00:00:00Z', + updatedAt: '2026-03-29T01:00:00Z', + workflowId: null, + metadata: null, + tasks: [], + } + + globalThis.fetch = mock(async () => orpcResponse(jobDetail)) as typeof fetch const { context, printed } = makeContext() const result = await handleBackfillCommand(context) expect(result).toEqual({ handled: true, exitCode: 0 }) expect(printed).toHaveLength(1) - expect((printed[0] as Record).ok).toBe(true) + expect((printed[0] as Record).id).toBe('job-123') }) test('handles 401 with session expired message', async () => { @@ -109,4 +131,55 @@ describe('handleBackfillCommand', () => { const result = await handleBackfillCommand(context) expect(result).toEqual({ handled: false }) }) + + test('routes cancel to remote ORPC cancel', async () => { + await setupAuth() + + globalThis.fetch = mock(async () => orpcResponse({})) as typeof fetch + + const { context, printed } = makeContext({ command: 'cancel', flags: { '--job-id': 'job-456' } }) + const result = await handleBackfillCommand(context) + + expect(result).toEqual({ handled: true, exitCode: 0 }) + expect(printed).toHaveLength(1) + }) + + test('routes list to remote ORPC list', async () => { + await setupAuth() + + const listResponse = { + jobs: [ + { + id: 'job-1', + serviceId: 'svc-1', + type: 'backfill', + target: 'table_a', + status: 'completed', + concurrency: 2, + totalTasks: 5, + completedTasks: 5, + failedTasks: 0, + createdAt: '2026-03-28T00:00:00Z', + updatedAt: '2026-03-28T01:00:00Z', + }, + ], + } + + globalThis.fetch = mock(async () => orpcResponse(listResponse)) as typeof fetch + + const { context, printed } = makeContext({ command: 'list', flags: { '--service-id': 'svc-1' } }) + const result = await handleBackfillCommand(context) + + expect(result).toEqual({ handled: true, exitCode: 0 }) + expect(printed).toHaveLength(1) + expect((printed[0] as { jobs: unknown[] }).jobs).toHaveLength(1) + }) + + test('returns handled: false for non-remote commands like run', async () => { + await setupAuth() + + const { context } = makeContext({ command: 'run' }) + const result = await handleBackfillCommand(context) + expect(result).toEqual({ handled: false }) + }) }) diff --git a/packages/plugin-obsessiondb/src/backfill/handler.ts b/packages/plugin-obsessiondb/src/backfill/handler.ts index f413f00..61812b8 100644 --- a/packages/plugin-obsessiondb/src/backfill/handler.ts +++ b/packages/plugin-obsessiondb/src/backfill/handler.ts @@ -1,13 +1,5 @@ import { loadCredentials, resolveBaseUrl } from '../auth/index.js' -import { - cancelRemoteBackfill, - getRemoteBackfillDoctor, - getRemoteBackfillStatus, - isSessionExpiredError, - resumeRemoteBackfill, - runRemoteBackfill, - submitBackfillPlan, -} from './api-client.js' +import { createJobsClient, isSessionExpiredError, type JobsClient } from './client.js' interface BeforePluginCommandContext { targetPlugin: string @@ -25,17 +17,7 @@ type HandlerResult = | { handled: true; exitCode: number } | { handled: false } -const BACKFILL_SUBCOMMANDS: Record< - string, - (input: Record, creds: { access_token: string; base_url: string }) => Promise -> = { - plan: submitBackfillPlan, - run: runRemoteBackfill, - resume: resumeRemoteBackfill, - status: getRemoteBackfillStatus, - cancel: cancelRemoteBackfill, - doctor: getRemoteBackfillDoctor, -} +const REMOTE_SUBCOMMANDS = new Set(['status', 'cancel', 'list']) export async function handleBackfillCommand(context: BeforePluginCommandContext): Promise { if (context.targetPlugin !== 'backfill') return { handled: false } @@ -43,8 +25,7 @@ export async function handleBackfillCommand(context: BeforePluginCommandContext) // --local flag bypasses remote execution if (context.flags['--local'] === true) return { handled: false } - const handler = BACKFILL_SUBCOMMANDS[context.command] - if (!handler) return { handled: false } + if (!REMOTE_SUBCOMMANDS.has(context.command)) return { handled: false } const creds = await loadCredentials() if (!creds) { @@ -54,18 +35,11 @@ export async function handleBackfillCommand(context: BeforePluginCommandContext) // Allow OBSESSIONDB_API_URL env var to override the stored base_url const effectiveCreds = { ...creds, base_url: resolveBaseUrl(creds.base_url) } + const client = createJobsClient(effectiveCreds) try { - const input = { - command: context.command, - args: context.args, - flags: context.flags, - } - - const result = await handler(input, effectiveCreds) - + const result = await dispatchCommand(client, context.command, context.flags) context.print(result) - return { handled: true, exitCode: 0 } } catch (error) { if (isSessionExpiredError(error)) { @@ -75,3 +49,30 @@ export async function handleBackfillCommand(context: BeforePluginCommandContext) throw error } } + +async function dispatchCommand( + client: JobsClient, + command: string, + flags: Record, +): Promise { + const jobId = typeof flags['--job-id'] === 'string' ? flags['--job-id'] : undefined + const serviceId = typeof flags['--service-id'] === 'string' ? flags['--service-id'] : undefined + + switch (command) { + case 'status': { + if (jobId) return client.get({ jobId }) + if (serviceId) return client.list({ serviceId }) + throw new Error('Either --job-id or --service-id is required for remote status') + } + case 'cancel': { + if (!jobId) throw new Error('--job-id is required for remote cancel') + return client.cancel({ jobId }) + } + case 'list': { + if (!serviceId) throw new Error('--service-id is required for remote list') + return client.list({ serviceId }) + } + default: + throw new Error(`Unsupported remote command: ${command}`) + } +} diff --git a/packages/plugin-obsessiondb/src/backfill/index.ts b/packages/plugin-obsessiondb/src/backfill/index.ts index db72220..8d32ff9 100644 --- a/packages/plugin-obsessiondb/src/backfill/index.ts +++ b/packages/plugin-obsessiondb/src/backfill/index.ts @@ -1,4 +1,13 @@ export { handleBackfillCommand } from './handler.js' +export { createJobsClient, type JobsClient } from './client.js' +export { + jobsContract, + jobStatusSchema, + taskStatusSchema, + jobTaskSchema, + jobSummarySchema, + jobDetailSchema, +} from './contract.js' export const BACKFILL_EXTEND_COMMANDS = [ { @@ -11,4 +20,19 @@ export const BACKFILL_EXTEND_COMMANDS = [ }, ], }, + { + command: ['backfill status', 'backfill cancel', 'backfill list'], + flags: [ + { + name: '--job-id', + type: 'string' as const, + description: 'Remote job ID for status/cancel', + }, + { + name: '--service-id', + type: 'string' as const, + description: 'ObsessionDB service ID for listing jobs', + }, + ], + }, ] diff --git a/packages/plugin-obsessiondb/src/index.test.ts b/packages/plugin-obsessiondb/src/index.test.ts index 81dd1c5..28d1b2b 100644 --- a/packages/plugin-obsessiondb/src/index.test.ts +++ b/packages/plugin-obsessiondb/src/index.test.ts @@ -328,12 +328,12 @@ describe('onBeforePluginCommand — backfill interception', () => { return { context: { targetPlugin: 'backfill', - command: 'run', + command: 'status', config: {}, configPath: '/fake/clickhouse.config.ts', jsonMode: false, args: [], - flags: {}, + flags: { '--job-id': 'job-123' }, options: {}, print: (v: unknown) => printed.push(v), ...overrides, @@ -351,8 +351,29 @@ describe('onBeforePluginCommand — backfill interception', () => { test('intercepts backfill commands when authenticated', async () => { await setupAuth() + + const jobDetail = { + id: 'job-123', + serviceId: 'svc-1', + type: 'backfill', + target: 'my_table', + status: 'running', + concurrency: 4, + totalTasks: 10, + completedTasks: 3, + failedTasks: 0, + createdAt: '2026-03-29T00:00:00Z', + updatedAt: '2026-03-29T01:00:00Z', + workflowId: null, + metadata: null, + tasks: [], + } + globalThis.fetch = mock(async () => - new Response(JSON.stringify({ ok: true, run_id: 'r-abc' }), { status: 200 }) + new Response(JSON.stringify({ json: jobDetail }), { + status: 200, + headers: { 'content-type': 'application/json' }, + }) ) as typeof fetch const { context, printed } = makeHookContext() @@ -361,7 +382,7 @@ describe('onBeforePluginCommand — backfill interception', () => { expect(result.handled).toBe(true) expect(result.exitCode).toBe(0) - expect((printed[0] as Record).run_id).toBe('r-abc') + expect((printed[0] as Record).id).toBe('job-123') }) test('requires login when not authenticated', async () => { @@ -369,7 +390,7 @@ describe('onBeforePluginCommand — backfill interception', () => { originalXdg = process.env.XDG_CONFIG_HOME process.env.XDG_CONFIG_HOME = tempDir - const { context, printed } = makeHookContext() + const { context, printed } = makeHookContext({ command: 'status', flags: { '--job-id': 'job-1' } }) const plugin = obsessiondb().plugin const result = await plugin.hooks.onBeforePluginCommand(context as Parameters[0]) From 5797c9dd4652a21b49b91e544cbd24cd6fd083e6 Mon Sep 17 00:00:00 2001 From: KeKs0r Date: Sun, 29 Mar 2026 11:27:08 +0200 Subject: [PATCH 02/12] fix(plugin-obsessiondb): add grant_type to device token poll request RFC 8628 requires the grant_type field in the device authorization token request, which better-auth's device plugin enforces. Co-Authored-By: Claude Opus 4.6 (1M context) --- packages/plugin-obsessiondb/src/auth/api-client.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/plugin-obsessiondb/src/auth/api-client.ts b/packages/plugin-obsessiondb/src/auth/api-client.ts index 4fb1746..2449976 100644 --- a/packages/plugin-obsessiondb/src/auth/api-client.ts +++ b/packages/plugin-obsessiondb/src/auth/api-client.ts @@ -63,7 +63,7 @@ export async function pollDeviceToken( 'Content-Type': 'application/json', 'User-Agent': userAgent(), }, - body: JSON.stringify({ client_id: CLIENT_ID, device_code: deviceCode }), + body: JSON.stringify({ client_id: CLIENT_ID, device_code: deviceCode, grant_type: 'urn:ietf:params:oauth:grant-type:device_code' }), }) if (!res.ok) { From ea4527269da4810f3fa1d958881afa4a1bc8ea8c Mon Sep 17 00:00:00 2001 From: KeKs0r Date: Sun, 29 Mar 2026 11:58:17 +0200 Subject: [PATCH 03/12] fix(plugin-obsessiondb): handle 400 responses in device token polling RFC 8628 device flow returns authorization_pending and slow_down as 400 responses. The !res.ok guard was throwing before the body could be parsed, preventing the existing switch/case from handling these expected states. Co-Authored-By: Claude Opus 4.6 (1M context) --- packages/plugin-obsessiondb/src/auth/api-client.ts | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/packages/plugin-obsessiondb/src/auth/api-client.ts b/packages/plugin-obsessiondb/src/auth/api-client.ts index 2449976..662d2b2 100644 --- a/packages/plugin-obsessiondb/src/auth/api-client.ts +++ b/packages/plugin-obsessiondb/src/auth/api-client.ts @@ -66,13 +66,12 @@ export async function pollDeviceToken( body: JSON.stringify({ client_id: CLIENT_ID, device_code: deviceCode, grant_type: 'urn:ietf:params:oauth:grant-type:device_code' }), }) - if (!res.ok) { - const text = await res.text() - throw new Error(`Token poll failed: ${res.status} ${text}`) - } - const body = (await res.json()) as TokenPollResponse + if (!body.access_token && !body.error) { + throw new Error(`Token poll failed: ${res.status} ${JSON.stringify(body)}`) + } + if (body.access_token) return body.access_token switch (body.error) { From 6c5044835a2a5f38dc3191cfc60f370aa0d23fec Mon Sep 17 00:00:00 2001 From: KeKs0r Date: Sun, 29 Mar 2026 16:41:21 +0200 Subject: [PATCH 04/12] Commit currnet Progress --- bun.lock | 12 +++--------- package.json | 2 +- packages/plugin-backfill/src/index.ts | 3 +++ packages/plugin-obsessiondb/package.json | 2 +- packages/plugin-obsessiondb/src/index.ts | 3 +++ 5 files changed, 11 insertions(+), 11 deletions(-) diff --git a/bun.lock b/bun.lock index d66ca9d..82f2106 100644 --- a/bun.lock +++ b/bun.lock @@ -18,7 +18,7 @@ "p-map": "^7.0.4", "turbo": "^2.8.20", "typescript": "^5.8.0", - "zod": "3.24.4", + "zod": "3.25.76", }, }, "apps/docs": { @@ -96,7 +96,7 @@ "@chkit/core": "workspace:*", "@orpc/client": "1.13.4", "@orpc/contract": "1.13.4", - "zod": "3.24.4", + "zod": "3.25.76", }, }, "packages/plugin-pull": { @@ -1274,7 +1274,7 @@ "youch-core": ["youch-core@0.3.3", "", { "dependencies": { "@poppinss/exception": "^1.2.2", "error-stack-parser-es": "^1.0.5" } }, "sha512-ho7XuGjLaJ2hWHoK8yFnsUGy2Y5uDpqSTq1FkHLK4/oqKtyUU1AFbOOxY4IpC9f0fTLjwYbslUz0Po5BpD1wrA=="], - "zod": ["zod@3.24.4", "", {}, "sha512-OdqJE9UDRPwWsrHjLN2F8bPxvwJBK22EHLWtanu0LSYr5YqzsaaW3RMgmjwr8Rypg5k+meEJdSPXJZXE/yqOMg=="], + "zod": ["zod@3.25.76", "", {}, "sha512-gzUt/qt81nXsFGKIFcC3YnfEAx5NkunCfnDlvuBSSFS02bcXu4Lmea0AFIUwbLWxWPx3d9p8S5QoaujKcNQxcQ=="], "zod-to-json-schema": ["zod-to-json-schema@3.25.1", "", { "peerDependencies": { "zod": "^3.25 || ^4" } }, "sha512-pM/SU9d3YAggzi6MtR4h7ruuQlqKtad8e9S0fmxcMi+ueAK5Korys/aWcV9LIIHTVbj01NdzxcnXSN+O74ZIVA=="], @@ -1282,12 +1282,8 @@ "zwitch": ["zwitch@2.0.4", "", {}, "sha512-bXE4cR/kVZhKZX/RjPEflHaKVhUVl85noU3v6b8apfQEc1x4A+zBxjZ4lN8LqGd6WZ3dl98pY4o717VFmoPp+A=="], - "@astrojs/sitemap/zod": ["zod@3.25.76", "", {}, "sha512-gzUt/qt81nXsFGKIFcC3YnfEAx5NkunCfnDlvuBSSFS02bcXu4Lmea0AFIUwbLWxWPx3d9p8S5QoaujKcNQxcQ=="], - "@astrojs/telemetry/ci-info": ["ci-info@4.4.0", "", {}, "sha512-77PSwercCZU2Fc4sX94eF8k8Pxte6JAwL4/ICZLFjJLqegs7kCuAsqqj/70NQF6TvDpgFjkubQB2FW2ZZddvQg=="], - "@chkit/docs/zod": ["zod@3.25.76", "", {}, "sha512-gzUt/qt81nXsFGKIFcC3YnfEAx5NkunCfnDlvuBSSFS02bcXu4Lmea0AFIUwbLWxWPx3d9p8S5QoaujKcNQxcQ=="], - "@chkit/plugin-backfill/zod": ["zod@4.3.6", "", {}, "sha512-rftlrkhHZOcjDwkGlnUtZZkvaPHCsDATp4pGpuOOMDaTdDDXF91wuVDJoWoPsKX/3YPQ5fHuF3STjcYyKr+Qhg=="], "@chkit/plugin-codegen/zod": ["zod@4.3.6", "", {}, "sha512-rftlrkhHZOcjDwkGlnUtZZkvaPHCsDATp4pGpuOOMDaTdDDXF91wuVDJoWoPsKX/3YPQ5fHuF3STjcYyKr+Qhg=="], @@ -1316,8 +1312,6 @@ "astro/package-manager-detector": ["package-manager-detector@1.6.0", "", {}, "sha512-61A5ThoTiDG/C8s8UMZwSorAGwMJ0ERVGj2OjoW5pAalsNOg15+iQiPzrLJ4jhZ1HJzmC2PIHT2oEiH3R5fzNA=="], - "astro/zod": ["zod@3.25.76", "", {}, "sha512-gzUt/qt81nXsFGKIFcC3YnfEAx5NkunCfnDlvuBSSFS02bcXu4Lmea0AFIUwbLWxWPx3d9p8S5QoaujKcNQxcQ=="], - "boxen/type-fest": ["type-fest@4.41.0", "", {}, "sha512-TeTSQ6H5YHvpqVwBRcnLDCBnDOHWYu7IvGbHT6N8AOymcr9PJGjc1GTtiWZTYg0NCgYwvnYWEkVChQAr9bjfwA=="], "csso/css-tree": ["css-tree@2.2.1", "", { "dependencies": { "mdn-data": "2.0.28", "source-map-js": "^1.0.1" } }, "sha512-OA0mILzGc1kCOCSJerOeqDxDQ4HOh+G8NbOJFOTgOCzpw7fCBubk0fEyxp8AgOL/jvLgYA/uV0cMbe43ElF1JA=="], diff --git a/package.json b/package.json index f9f9716..c09af5c 100644 --- a/package.json +++ b/package.json @@ -44,7 +44,7 @@ "p-map": "^7.0.4", "turbo": "^2.8.20", "typescript": "^5.8.0", - "zod": "3.24.4" + "zod": "3.25.76" }, "dependencies": { "wrangler": "^4.65.0" diff --git a/packages/plugin-backfill/src/index.ts b/packages/plugin-backfill/src/index.ts index a781305..3420da7 100644 --- a/packages/plugin-backfill/src/index.ts +++ b/packages/plugin-backfill/src/index.ts @@ -2,6 +2,7 @@ import './table-config.js' export { backfill, createBackfillPlugin } from './plugin.js' export { executeBackfill, syncProgress } from './async-backfill.js' +export { analyzeAndChunk } from './chunking/analyze.js' export type { BackfillOptions, BackfillChunkState, @@ -11,3 +12,5 @@ export type { export type { BackfillPlugin, BackfillPluginOptions, BackfillPluginRegistration } from './types.js' export type { PluginConfig } from './options.js' export type { BackfillTableConfig } from './table-config.js' +export type { AnalyzeAndChunkInput, AnalyzeAndChunkResult } from './chunking/analyze.js' +export type { PlannedChunk, PartitionInfo, SortKeyInfo } from './chunking/types.js' diff --git a/packages/plugin-obsessiondb/package.json b/packages/plugin-obsessiondb/package.json index adf2527..77a28c6 100644 --- a/packages/plugin-obsessiondb/package.json +++ b/packages/plugin-obsessiondb/package.json @@ -44,6 +44,6 @@ "@chkit/core": "workspace:*", "@orpc/client": "1.13.4", "@orpc/contract": "1.13.4", - "zod": "3.24.4" + "zod": "3.25.76" } } diff --git a/packages/plugin-obsessiondb/src/index.ts b/packages/plugin-obsessiondb/src/index.ts index 15077db..e2717a7 100644 --- a/packages/plugin-obsessiondb/src/index.ts +++ b/packages/plugin-obsessiondb/src/index.ts @@ -7,6 +7,9 @@ import type { import { AUTH_COMMANDS, loadCredentials } from './auth/index.js' import { BACKFILL_EXTEND_COMMANDS, handleBackfillCommand } from './backfill/index.js' +export { loadCredentials, resolveBaseUrl, type Credentials } from './auth/index.js' +export { createJobsClient, type JobsClient } from './backfill/index.js' + export type ObsessionDBPluginOptions = Record interface PluginCommand { From 5f612bae60451593d53b7b1a6c548554bc2cd126 Mon Sep 17 00:00:00 2001 From: KeKs0r Date: Sun, 29 Mar 2026 19:13:54 +0200 Subject: [PATCH 05/12] refactor(plugin-obsessiondb): migrate remote executor and services to oRPC Replace REST API calls with oRPC client for all remote operations. The remote executor now routes SQL through workbench.query.execute instead of the removed /api/v1/ endpoints. Extract shared introspection helpers (buildIntrospectedTables, normalize functions) from @chkit/clickhouse so both local and remote executors reuse the same logic. Co-Authored-By: Claude Opus 4.6 (1M context) --- bun.lock | 16 +- package.json | 2 +- packages/clickhouse/src/index.ts | 112 +++++++------- .../src/backfill/api-client.ts | 79 ---------- .../plugin-obsessiondb/src/backfill/client.ts | 35 +---- .../plugin-obsessiondb/src/backfill/index.ts | 4 +- packages/plugin-obsessiondb/src/client.ts | 33 ++++ .../plugin-obsessiondb/src/contract/index.ts | 3 + .../contract.ts => contract/jobs.ts} | 12 +- .../src/contract/services.ts | 56 +++++++ .../src/contract/workbench.ts | 36 +++++ .../src/query/api-client.ts | 88 ----------- .../src/query/remote-executor.ts | 142 +++++++++++++++--- .../plugin-obsessiondb/src/service/api.ts | 7 +- .../plugin-obsessiondb/src/service/select.ts | 4 +- .../plugin-obsessiondb/src/service/types.ts | 9 +- 16 files changed, 337 insertions(+), 301 deletions(-) delete mode 100644 packages/plugin-obsessiondb/src/backfill/api-client.ts create mode 100644 packages/plugin-obsessiondb/src/client.ts create mode 100644 packages/plugin-obsessiondb/src/contract/index.ts rename packages/plugin-obsessiondb/src/{backfill/contract.ts => contract/jobs.ts} (85%) create mode 100644 packages/plugin-obsessiondb/src/contract/services.ts create mode 100644 packages/plugin-obsessiondb/src/contract/workbench.ts delete mode 100644 packages/plugin-obsessiondb/src/query/api-client.ts diff --git a/bun.lock b/bun.lock index 82f2106..1191f8c 100644 --- a/bun.lock +++ b/bun.lock @@ -16,7 +16,7 @@ "@orpc/contract": "1.13.4", "@types/node": "^24.0.0", "p-map": "^7.0.4", - "turbo": "^2.8.20", + "turbo": "^2.8.21", "typescript": "^5.8.0", "zod": "3.25.76", }, @@ -466,17 +466,17 @@ "@standard-schema/spec": ["@standard-schema/spec@1.1.0", "", {}, "sha512-l2aFy5jALhniG5HgqrD6jXLi/rUWrKvqN/qJx6yoJsgKhblVd+iqqU4RCXavm/jPityDo5TCvKMnpjKnOriy0w=="], - "@turbo/darwin-64": ["@turbo/darwin-64@2.8.20", "", { "os": "darwin", "cpu": "x64" }, "sha512-FQ9EX1xMU5nbwjxXxM3yU88AQQ6Sqc6S44exPRroMcx9XZHqqppl5ymJF0Ig/z3nvQNwDmz1Gsnvxubo+nXWjQ=="], + "@turbo/darwin-64": ["@turbo/darwin-64@2.8.21", "", { "os": "darwin", "cpu": "x64" }, "sha512-kfGoM0Iw8ZNZpbds+4IzOe0hjvHldqJwUPRAjXJi3KBxg/QOZL95N893SRoMtf2aJ+jJ3dk32yPkp8rvcIjP9g=="], - "@turbo/darwin-arm64": ["@turbo/darwin-arm64@2.8.20", "", { "os": "darwin", "cpu": "arm64" }, "sha512-Gpyh9ATFGThD6/s9L95YWY54cizg/VRWl2B67h0yofG8BpHf67DFAh9nuJVKG7bY0+SBJDAo5cMur+wOl9YOYw=="], + "@turbo/darwin-arm64": ["@turbo/darwin-arm64@2.8.21", "", { "os": "darwin", "cpu": "arm64" }, "sha512-o9HEflxUEyr987x0cTUzZBhDOyL6u95JmdmlkH2VyxAw7zq2sdtM5e72y9ufv2N5SIoOBw1fVn9UES5VY5H6vQ=="], - "@turbo/linux-64": ["@turbo/linux-64@2.8.20", "", { "os": "linux", "cpu": "x64" }, "sha512-p2QxWUYyYUgUFG0b0kR+pPi8t7c9uaVlRtjTTI1AbCvVqkpjUfCcReBn6DgG/Hu8xrWdKLuyQFaLYFzQskZbcA=="], + "@turbo/linux-64": ["@turbo/linux-64@2.8.21", "", { "os": "linux", "cpu": "x64" }, "sha512-uTxlCcXWy5h1fSSymP8XSJ+AudzEHMDV3IDfKX7+DGB8kgJ+SLoTUAH7z4OFA7I/l2sznz0upPdbNNZs91YMag=="], - "@turbo/linux-arm64": ["@turbo/linux-arm64@2.8.20", "", { "os": "linux", "cpu": "arm64" }, "sha512-Gn5yjlZGLRZWarLWqdQzv0wMqyBNIdq1QLi48F1oY5Lo9kiohuf7BPQWtWxeNVS2NgJ1+nb/DzK1JduYC4AWOA=="], + "@turbo/linux-arm64": ["@turbo/linux-arm64@2.8.21", "", { "os": "linux", "cpu": "arm64" }, "sha512-cdHIcxNcihHHkCHp0Y4Zb60K4Qz+CK4xw1gb6s/t/9o4SMeMj+hTBCtoW6QpPnl9xPYmxuTou8Zw6+cylTnREg=="], - "@turbo/windows-64": ["@turbo/windows-64@2.8.20", "", { "os": "win32", "cpu": "x64" }, "sha512-vyaDpYk/8T6Qz5V/X+ihKvKFEZFUoC0oxYpC1sZanK6gaESJlmV3cMRT3Qhcg4D2VxvtC2Jjs9IRkrZGL+exLw=="], + "@turbo/windows-64": ["@turbo/windows-64@2.8.21", "", { "os": "win32", "cpu": "x64" }, "sha512-/iBj4OzbqEY8CX+eaeKbBTMZv2CLXNrt0692F7HnK7LcyYwyDecaAiSET6ZzL4opT7sbwkKvzAC/fhqT3Quu1A=="], - "@turbo/windows-arm64": ["@turbo/windows-arm64@2.8.20", "", { "os": "win32", "cpu": "arm64" }, "sha512-voicVULvUV5yaGXo0Iue13BcHGYW3u0VgqSbfQwBaHbpj1zLjYV4KIe+7fYIo6DO8FVUJzxFps3ODCQG/Wy2Qw=="], + "@turbo/windows-arm64": ["@turbo/windows-arm64@2.8.21", "", { "os": "win32", "cpu": "arm64" }, "sha512-95tMA/ZbIidJFUUtkmqioQ1gf3n3I1YbRP3ZgVdWTVn2qVbkodcIdGXBKRHHrIbRsLRl99SiHi/L7IxhpZDagQ=="], "@types/debug": ["@types/debug@4.1.12", "", { "dependencies": { "@types/ms": "*" } }, "sha512-vIChWdVG3LG1SMxEvI/AK+FWJthlrqlTu7fbrlywTkkaONwk/UAGaULXRlf8vkzFBLVm0zkMdCquhL5aOjhXPQ=="], @@ -1186,7 +1186,7 @@ "tslib": ["tslib@2.8.1", "", {}, "sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w=="], - "turbo": ["turbo@2.8.20", "", { "optionalDependencies": { "@turbo/darwin-64": "2.8.20", "@turbo/darwin-arm64": "2.8.20", "@turbo/linux-64": "2.8.20", "@turbo/linux-arm64": "2.8.20", "@turbo/windows-64": "2.8.20", "@turbo/windows-arm64": "2.8.20" }, "bin": { "turbo": "bin/turbo" } }, "sha512-Rb4qk5YT8RUwwdXtkLpkVhNEe/lor6+WV7S5tTlLpxSz6MjV5Qi8jGNn4gS6NAvrYGA/rNrE6YUQM85sCZUDbQ=="], + "turbo": ["turbo@2.8.21", "", { "optionalDependencies": { "@turbo/darwin-64": "2.8.21", "@turbo/darwin-arm64": "2.8.21", "@turbo/linux-64": "2.8.21", "@turbo/linux-arm64": "2.8.21", "@turbo/windows-64": "2.8.21", "@turbo/windows-arm64": "2.8.21" }, "bin": { "turbo": "bin/turbo" } }, "sha512-FlJ8OD5Qcp0jTAM7E4a/RhUzRNds2GzKlyxHKA6N247VLy628rrxAGlMpIXSz6VB430+TiQDJ/SMl6PL1lu6wQ=="], "type-fest": ["type-fest@5.5.0", "", { "dependencies": { "tagged-tag": "^1.0.0" } }, "sha512-PlBfpQwiUvGViBNX84Yxwjsdhd1TUlXr6zjX7eoirtCPIr08NAmxwa+fcYBTeRQxHo9YC9wwF3m9i700sHma8g=="], diff --git a/package.json b/package.json index c09af5c..d51b17f 100644 --- a/package.json +++ b/package.json @@ -42,7 +42,7 @@ "@orpc/contract": "1.13.4", "@types/node": "^24.0.0", "p-map": "^7.0.4", - "turbo": "^2.8.20", + "turbo": "^2.8.21", "typescript": "^5.8.0", "zod": "3.25.76" }, diff --git a/packages/clickhouse/src/index.ts b/packages/clickhouse/src/index.ts index 2626b24..6081976 100644 --- a/packages/clickhouse/src/index.ts +++ b/packages/clickhouse/src/index.ts @@ -58,14 +58,14 @@ export interface SchemaObjectRef { name: string } -interface SystemTableRow { +export interface SystemTableRow { database: string name: string engine: string create_table_query?: string } -interface SystemColumnRow { +export interface SystemColumnRow { database: string table: string name: string @@ -76,7 +76,7 @@ interface SystemColumnRow { position: number } -interface SystemSkippingIndexRow { +export interface SystemSkippingIndexRow { database: string table: string name: string @@ -119,7 +119,7 @@ export function inferSchemaKindFromEngine(engine: string): SchemaObjectRef['kind } -function normalizeColumnFromSystemRow(row: SystemColumnRow): ColumnDefinition { +export function normalizeColumnFromSystemRow(row: SystemColumnRow): ColumnDefinition { const nullableMatch = row.type.match(/^Nullable\((.+)\)$/) const type = nullableMatch?.[1] ? nullableMatch[1] : row.type const nullable = Boolean(nullableMatch?.[1]) @@ -155,7 +155,7 @@ function parseIndexType(value: string): Pick inferSchemaKindFromEngine(row.engine) === 'table') + if (tableRows.length === 0) return [] + + const columnsByTable = new Map() + for (const row of columns) { + const key = `${row.database}.${row.table}` + const rows = columnsByTable.get(key) + if (rows) rows.push(row) + else columnsByTable.set(key, [row]) + } + + const indexesByTable = new Map() + for (const row of indexes) { + const key = `${row.database}.${row.table}` + const rows = indexesByTable.get(key) + if (rows) rows.push(row) + else indexesByTable.set(key, [row]) + } + + return tableRows + .map((row) => { + const key = `${row.database}.${row.name}` + const columnRows = (columnsByTable.get(key) ?? []).sort((a, b) => a.position - b.position) + const indexRows = indexesByTable.get(key) ?? [] + return { + database: row.database, + name: row.name, + engine: parseEngineFromCreateTableQuery(row.create_table_query), + primaryKey: parsePrimaryKeyFromCreateTableQuery(row.create_table_query), + orderBy: parseOrderByFromCreateTableQuery(row.create_table_query), + uniqueKey: parseUniqueKeyFromCreateTableQuery(row.create_table_query), + partitionBy: parsePartitionByFromCreateTableQuery(row.create_table_query), + columns: columnRows.map(normalizeColumnFromSystemRow), + settings: parseSettingsFromCreateTableQuery(row.create_table_query), + indexes: indexRows.map(normalizeIndexFromSystemRow), + projections: parseProjectionsFromCreateTableQuery(row.create_table_query), + ttl: parseTTLFromCreateTableQuery(row.create_table_query), + } + }) + .sort((a, b) => { + const dbOrder = a.database.localeCompare(b.database) + if (dbOrder !== 0) return dbOrder + return a.name.localeCompare(b.name) + }) +} + const NETWORK_ERROR_LABELS: Record = { ECONNREFUSED: 'connection refused', ENOTFOUND: 'host not found', @@ -378,9 +429,6 @@ FROM system.tables WHERE is_temporary = 0 AND database IN (${quotedDatabases})` ) - const tableRows = tables.filter((row) => inferSchemaKindFromEngine(row.engine) === 'table') - if (tableRows.length === 0) return [] - const columns = await this.query( `SELECT database, table, name, type, default_kind, default_expression, comment, position FROM system.columns @@ -392,53 +440,7 @@ FROM system.data_skipping_indices WHERE database IN (${quotedDatabases})` ) - const columnsByTable = new Map() - for (const row of columns) { - const key = `${row.database}.${row.table}` - const rows = columnsByTable.get(key) - if (rows) { - rows.push(row) - } else { - columnsByTable.set(key, [row]) - } - } - - const indexesByTable = new Map() - for (const row of indexes) { - const key = `${row.database}.${row.table}` - const rows = indexesByTable.get(key) - if (rows) { - rows.push(row) - } else { - indexesByTable.set(key, [row]) - } - } - - return tableRows - .map((row) => { - const key = `${row.database}.${row.name}` - const columnRows = (columnsByTable.get(key) ?? []).sort((a, b) => a.position - b.position) - const indexRows = indexesByTable.get(key) ?? [] - return { - database: row.database, - name: row.name, - engine: parseEngineFromCreateTableQuery(row.create_table_query), - primaryKey: parsePrimaryKeyFromCreateTableQuery(row.create_table_query), - orderBy: parseOrderByFromCreateTableQuery(row.create_table_query), - uniqueKey: parseUniqueKeyFromCreateTableQuery(row.create_table_query), - partitionBy: parsePartitionByFromCreateTableQuery(row.create_table_query), - columns: columnRows.map(normalizeColumnFromSystemRow), - settings: parseSettingsFromCreateTableQuery(row.create_table_query), - indexes: indexRows.map(normalizeIndexFromSystemRow), - projections: parseProjectionsFromCreateTableQuery(row.create_table_query), - ttl: parseTTLFromCreateTableQuery(row.create_table_query), - } - }) - .sort((a, b) => { - const dbOrder = a.database.localeCompare(b.database) - if (dbOrder !== 0) return dbOrder - return a.name.localeCompare(b.name) - }) + return buildIntrospectedTables(tables, columns, indexes) }, } } diff --git a/packages/plugin-obsessiondb/src/backfill/api-client.ts b/packages/plugin-obsessiondb/src/backfill/api-client.ts deleted file mode 100644 index 12e6e9a..0000000 --- a/packages/plugin-obsessiondb/src/backfill/api-client.ts +++ /dev/null @@ -1,79 +0,0 @@ -import type { Credentials } from '../auth/index.js' -import { apiRequest, isSessionExpiredError } from '../api-request.js' - -export { isSessionExpiredError } - -export interface RemotePlanResponse { - ok: boolean - plan_id?: string - error?: string - [key: string]: unknown -} - -export interface RemoteRunResponse { - ok: boolean - run_id?: string - error?: string - [key: string]: unknown -} - -export interface RemoteStatusResponse { - ok: boolean - status?: string - error?: string - [key: string]: unknown -} - -export interface RemoteCancelResponse { - ok: boolean - error?: string - [key: string]: unknown -} - -export interface RemoteDoctorResponse { - ok: boolean - error?: string - [key: string]: unknown -} - -export async function submitBackfillPlan( - input: Record, - creds: Credentials -): Promise { - return apiRequest('/api/v1/backfill/plan', creds, input) -} - -export async function runRemoteBackfill( - input: Record, - creds: Credentials -): Promise { - return apiRequest('/api/v1/backfill/run', creds, input) -} - -export async function resumeRemoteBackfill( - input: Record, - creds: Credentials -): Promise { - return apiRequest('/api/v1/backfill/resume', creds, input) -} - -export async function getRemoteBackfillStatus( - input: Record, - creds: Credentials -): Promise { - return apiRequest('/api/v1/backfill/status', creds, input) -} - -export async function cancelRemoteBackfill( - input: Record, - creds: Credentials -): Promise { - return apiRequest('/api/v1/backfill/cancel', creds, input) -} - -export async function getRemoteBackfillDoctor( - input: Record, - creds: Credentials -): Promise { - return apiRequest('/api/v1/backfill/doctor', creds, input) -} diff --git a/packages/plugin-obsessiondb/src/backfill/client.ts b/packages/plugin-obsessiondb/src/backfill/client.ts index 1930741..96642aa 100644 --- a/packages/plugin-obsessiondb/src/backfill/client.ts +++ b/packages/plugin-obsessiondb/src/backfill/client.ts @@ -1,36 +1,9 @@ -import { createORPCClient } from '@orpc/client' -import { RPCLink } from '@orpc/client/fetch' -import type { ContractRouterClient } from '@orpc/contract' import type { Credentials } from '../auth/index.js' -import { jobsContract } from './contract.js' +import { createApiClient, type ApiClient } from '../client.js' +export { SessionExpiredError, isSessionExpiredError } from '../api-request.js' -export type JobsClient = ContractRouterClient - -export class SessionExpiredError extends Error { - constructor() { - super('Session expired. Run `chkit obsessiondb login` to re-authenticate.') - } -} - -export function isSessionExpiredError(error: unknown): boolean { - return error instanceof SessionExpiredError -} +export type JobsClient = ApiClient['jobs'] export function createJobsClient(creds: Credentials): JobsClient { - const link = new RPCLink({ - url: `${creds.base_url}/rpc/jobs`, - headers: () => ({ - Authorization: `Bearer ${creds.access_token}`, - 'User-Agent': 'chkit-cli', - }), - fetch: async (input, init) => { - const res = await globalThis.fetch(input, init) - if (res.status === 401) { - throw new SessionExpiredError() - } - return res - }, - }) - - return createORPCClient(link) + return createApiClient(creds).jobs } diff --git a/packages/plugin-obsessiondb/src/backfill/index.ts b/packages/plugin-obsessiondb/src/backfill/index.ts index 8d32ff9..4098174 100644 --- a/packages/plugin-obsessiondb/src/backfill/index.ts +++ b/packages/plugin-obsessiondb/src/backfill/index.ts @@ -3,11 +3,9 @@ export { createJobsClient, type JobsClient } from './client.js' export { jobsContract, jobStatusSchema, - taskStatusSchema, - jobTaskSchema, jobSummarySchema, jobDetailSchema, -} from './contract.js' +} from '../contract/jobs.js' export const BACKFILL_EXTEND_COMMANDS = [ { diff --git a/packages/plugin-obsessiondb/src/client.ts b/packages/plugin-obsessiondb/src/client.ts new file mode 100644 index 0000000..ca50c4c --- /dev/null +++ b/packages/plugin-obsessiondb/src/client.ts @@ -0,0 +1,33 @@ +import { createORPCClient } from '@orpc/client' +import { RPCLink } from '@orpc/client/fetch' +import type { ContractRouterClient } from '@orpc/contract' +import { SessionExpiredError } from './api-request.js' +import type { Credentials } from './auth/index.js' +import { servicesContract, jobsContract, workbenchContract } from './contract/index.js' + +const contract = { + services: servicesContract, + jobs: jobsContract, + workbench: workbenchContract, +} + +export type ApiClient = ContractRouterClient + +export function createApiClient(creds: Credentials): ApiClient { + const link = new RPCLink({ + url: `${creds.base_url}/rpc`, + headers: () => ({ + Authorization: `Bearer ${creds.access_token}`, + 'User-Agent': 'chkit-cli', + }), + fetch: async (input, init) => { + const res = await globalThis.fetch(input, init) + if (res.status === 401) { + throw new SessionExpiredError() + } + return res + }, + }) + + return createORPCClient(link) +} diff --git a/packages/plugin-obsessiondb/src/contract/index.ts b/packages/plugin-obsessiondb/src/contract/index.ts new file mode 100644 index 0000000..1e22b0a --- /dev/null +++ b/packages/plugin-obsessiondb/src/contract/index.ts @@ -0,0 +1,3 @@ +export { serviceSchema, serviceStatusSchema, servicesContract } from './services.js' +export { jobsContract, jobDetailSchema, jobSummarySchema, jobStatusSchema } from './jobs.js' +export { workbenchContract } from './workbench.js' diff --git a/packages/plugin-obsessiondb/src/backfill/contract.ts b/packages/plugin-obsessiondb/src/contract/jobs.ts similarity index 85% rename from packages/plugin-obsessiondb/src/backfill/contract.ts rename to packages/plugin-obsessiondb/src/contract/jobs.ts index 1d1423d..6b8364d 100644 --- a/packages/plugin-obsessiondb/src/backfill/contract.ts +++ b/packages/plugin-obsessiondb/src/contract/jobs.ts @@ -1,3 +1,7 @@ +/** + * Copied from @obsessiondb/feature-jobs-contract — will be replaced + * by a direct dependency once the contract package is published. + */ import { oc } from '@orpc/contract' import { z } from 'zod' @@ -60,11 +64,15 @@ export const jobsContract = { ) .output(z.object({ jobId: z.string() })), - get: oc.input(z.object({ jobId: z.string() })).output(jobDetailSchema), + get: oc + .input(z.object({ jobId: z.string() })) + .output(jobDetailSchema), list: oc .input(z.object({ serviceId: z.string() })) .output(z.object({ jobs: z.array(jobSummarySchema) })), - cancel: oc.input(z.object({ jobId: z.string() })).output(z.object({})), + cancel: oc + .input(z.object({ jobId: z.string() })) + .output(z.object({})), } diff --git a/packages/plugin-obsessiondb/src/contract/services.ts b/packages/plugin-obsessiondb/src/contract/services.ts new file mode 100644 index 0000000..3782fb1 --- /dev/null +++ b/packages/plugin-obsessiondb/src/contract/services.ts @@ -0,0 +1,56 @@ +/** + * Copied from @obsessiondb/contract-console — will be replaced + * by a direct dependency once the contract package is published. + */ +import { oc } from '@orpc/contract' +import { z } from 'zod' + +export const serviceStatusSchema = z.enum([ + 'provisioning', + 'running', + 'scaling', + 'stopping', + 'stopped', + 'starting', + 'terminating', + 'terminated', + 'error', +]) + +export const serviceSchema = z.object({ + id: z.string(), + name: z.string(), + status: serviceStatusSchema, + tier: z.number().int(), + nodes: z.number().int(), + connectionUrl: z.string().nullable(), + connectionUsername: z.string().nullable(), + desiredStatus: z.enum(['running', 'stopped', 'terminated']), + desiredTier: z.number().int(), + desiredNodes: z.number().int(), + createdAt: z.string().datetime(), + managed: z.boolean(), +}) + +export const servicesContract = { + list: oc + .input(z.object({})) + .output(z.object({ services: z.array(serviceSchema) })), + + listAll: oc.input(z.object({})).output( + z.object({ + organizations: z.array( + z.object({ + id: z.string(), + name: z.string(), + slug: z.string(), + services: z.array(serviceSchema), + }), + ), + }), + ), + + get: oc + .input(z.object({ serviceId: z.string() })) + .output(serviceSchema), +} diff --git a/packages/plugin-obsessiondb/src/contract/workbench.ts b/packages/plugin-obsessiondb/src/contract/workbench.ts new file mode 100644 index 0000000..c6d7843 --- /dev/null +++ b/packages/plugin-obsessiondb/src/contract/workbench.ts @@ -0,0 +1,36 @@ +/** + * Copied from @obsessiondb/feature-workbench-contract — will be replaced + * by a direct dependency once the contract package is published. + */ +import { oc } from '@orpc/contract' +import { z } from 'zod' + +const queryResultSchema = z.object({ + data: z.array(z.record(z.unknown())), + meta: z.array(z.object({ name: z.string(), type: z.string() })), + rows: z.number().int(), + statistics: z + .object({ + bytes_read: z.number().int().optional(), + rows_read: z.number().int().optional(), + elapsed: z.number().optional(), + }) + .optional(), + message: z.string().optional(), + error: z.string().optional(), +}) + +export const workbenchContract = { + query: { + execute: oc + .input( + z.object({ + serviceId: z.string(), + query: z.string().min(1), + settings: z.record(z.union([z.string(), z.number()])).optional(), + database: z.string().optional(), + }), + ) + .output(queryResultSchema), + }, +} diff --git a/packages/plugin-obsessiondb/src/query/api-client.ts b/packages/plugin-obsessiondb/src/query/api-client.ts deleted file mode 100644 index 24cecfe..0000000 --- a/packages/plugin-obsessiondb/src/query/api-client.ts +++ /dev/null @@ -1,88 +0,0 @@ -import type { Credentials } from '../auth/index.js' -import { apiRequest } from '../api-request.js' - -function queryPath(serviceId: string, action: string): string { - return `/api/v1/services/${serviceId}/query/${action}` -} - -export async function remoteCommand( - serviceId: string, - sql: string, - creds: Credentials -): Promise { - await apiRequest<{ ok: boolean }>(queryPath(serviceId, 'command'), creds, { sql }) -} - -export async function remoteQuery( - serviceId: string, - sql: string, - creds: Credentials -): Promise { - const res = await apiRequest<{ rows: T[] }>(queryPath(serviceId, 'query'), creds, { sql }) - return res.rows -} - -export async function remoteInsert>( - serviceId: string, - params: { table: string; values: T[] }, - creds: Credentials -): Promise { - await apiRequest<{ ok: boolean }>(queryPath(serviceId, 'insert'), creds, params) -} - -export async function remoteSubmit( - serviceId: string, - sql: string, - creds: Credentials, - queryId?: string -): Promise { - const res = await apiRequest<{ query_id: string }>(queryPath(serviceId, 'submit'), creds, { - sql, - query_id: queryId, - }) - return res.query_id -} - -export async function remoteQueryStatus( - serviceId: string, - queryId: string, - creds: Credentials, - options?: { afterTime?: string } -): Promise<{ - status: 'running' | 'finished' | 'failed' | 'unknown' - readRows?: number - readBytes?: number - writtenRows?: number - writtenBytes?: number - elapsedMs?: number - durationMs?: number - error?: string -}> { - return apiRequest(queryPath(serviceId, 'status'), creds, { - query_id: queryId, - ...options, - }) -} - -export async function remoteListSchemaObjects( - serviceId: string, - creds: Credentials -): Promise> { - const res = await apiRequest<{ - objects: Array<{ kind: 'table' | 'view' | 'materialized_view'; database: string; name: string }> - }>(queryPath(serviceId, 'schema-objects'), creds) - return res.objects -} - -export async function remoteListTableDetails( - serviceId: string, - databases: string[], - creds: Credentials -): Promise { - const res = await apiRequest<{ tables: unknown[] }>( - queryPath(serviceId, 'table-details'), - creds, - { databases } - ) - return res.tables -} diff --git a/packages/plugin-obsessiondb/src/query/remote-executor.ts b/packages/plugin-obsessiondb/src/query/remote-executor.ts index 678c18e..ef240ce 100644 --- a/packages/plugin-obsessiondb/src/query/remote-executor.ts +++ b/packages/plugin-obsessiondb/src/query/remote-executor.ts @@ -1,47 +1,141 @@ -import type { ClickHouseExecutor } from '@chkit/clickhouse' -import type { Credentials } from '../auth/index.js' +import type { ClickHouseExecutor, QueryStatus, SchemaObjectRef } from '@chkit/clickhouse' import { - remoteCommand, - remoteInsert, - remoteListSchemaObjects, - remoteListTableDetails, - remoteQuery, - remoteQueryStatus, - remoteSubmit, -} from './api-client.js' + buildIntrospectedTables, + inferSchemaKindFromEngine, + type SystemColumnRow, + type SystemSkippingIndexRow, + type SystemTableRow, +} from '@chkit/clickhouse' +import type { Credentials } from '../auth/index.js' +import { createApiClient } from '../client.js' export function createRemoteExecutor(deps: { credentials: Credentials serviceId: string }): ClickHouseExecutor { const { credentials, serviceId } = deps + const client = createApiClient(credentials) - return { + const executor: ClickHouseExecutor = { async command(sql) { - await remoteCommand(serviceId, sql, credentials) + await client.workbench.query.execute({ serviceId, query: sql }) }, - async query(sql: string) { - return remoteQuery(serviceId, sql, credentials) + + async query(sql: string): Promise { + const res = await client.workbench.query.execute({ serviceId, query: sql }) + return res.data as T[] }, + async insert>(params: { table: string; values: T[] }) { - await remoteInsert(serviceId, params, credentials) + if (params.values.length === 0) return + const columns = Object.keys(params.values[0]!) + const rows = params.values + .map( + (row) => + `(${columns.map((col) => { + const val = row[col] + if (val === null || val === undefined) return 'NULL' + if (typeof val === 'number') return String(val) + return `'${String(val).replace(/'/g, "\\'")}'` + }).join(', ')})`, + ) + .join(', ') + await executor.command(`INSERT INTO ${params.table} (${columns.join(', ')}) VALUES ${rows}`) }, + async submit(sql, queryId?) { - return remoteSubmit(serviceId, sql, credentials, queryId) + await client.workbench.query.execute({ + serviceId, + query: sql, + settings: queryId ? { query_id: queryId } : undefined, + }) + return queryId ?? 'submitted' }, + async queryStatus(queryId, options?) { - return remoteQueryStatus(serviceId, queryId, credentials, options) + const afterFilter = options?.afterTime + ? `AND event_time >= '${options.afterTime}'` + : '' + + const running = await executor.query<{ query_id: string }>( + `SELECT query_id FROM system.processes WHERE query_id = '${queryId}' LIMIT 1`, + ) + if (running.length > 0) return { status: 'running' as const } + + const log = await executor.query<{ + type: string + written_rows: string + written_bytes: string + query_duration_ms: string + exception: string + }>( + `SELECT type, written_rows, written_bytes, query_duration_ms, exception +FROM system.query_log +WHERE query_id = '${queryId}' + AND type IN ('QueryFinish', 'ExceptionWhileProcessing') + ${afterFilter} +ORDER BY event_time DESC +LIMIT 1`, + ) + + if (log.length === 0) return { status: 'unknown' as const } + const row = log[0]! + + if (row.type === 'QueryFinish') { + return { + status: 'finished' as const, + writtenRows: Number(row.written_rows), + writtenBytes: Number(row.written_bytes), + durationMs: Number(row.query_duration_ms), + } + } + + return { + status: 'failed' as const, + durationMs: Number(row.query_duration_ms), + error: row.exception, + } satisfies QueryStatus }, + async listSchemaObjects() { - return remoteListSchemaObjects(serviceId, credentials) + const rows = await executor.query<{ database: string; name: string; engine: string }>( + `SELECT database, name, engine +FROM system.tables +WHERE is_temporary = 0 + AND database NOT IN ('system', 'information_schema', 'INFORMATION_SCHEMA') + AND name NOT LIKE '_chkit_%'`, + ) + + const out: SchemaObjectRef[] = [] + for (const row of rows) { + const kind = inferSchemaKindFromEngine(row.engine) + if (!kind) continue + out.push({ kind, database: row.database, name: row.name }) + } + return out }, + async listTableDetails(databases) { - return remoteListTableDetails(serviceId, databases, credentials) as ReturnType< - ClickHouseExecutor['listTableDetails'] - > - }, - async close() { - // No-op — remote executor has no persistent connection + if (databases.length === 0) return [] + const quoted = databases.map((db) => `'${db.replace(/'/g, "''")}'`).join(', ') + + const [tables, columns, indexes] = await Promise.all([ + executor.query( + `SELECT database, name, engine, create_table_query FROM system.tables WHERE is_temporary = 0 AND database IN (${quoted})`, + ), + executor.query( + `SELECT database, \`table\`, name, type, default_kind, default_expression, comment, position FROM system.columns WHERE database IN (${quoted})`, + ), + executor.query( + `SELECT database, \`table\`, name, expr, type, granularity FROM system.data_skipping_indices WHERE database IN (${quoted})`, + ), + ]) + + return buildIntrospectedTables(tables, columns, indexes) }, + + async close() {}, } + + return executor } diff --git a/packages/plugin-obsessiondb/src/service/api.ts b/packages/plugin-obsessiondb/src/service/api.ts index 15e5803..33f3e77 100644 --- a/packages/plugin-obsessiondb/src/service/api.ts +++ b/packages/plugin-obsessiondb/src/service/api.ts @@ -1,8 +1,9 @@ import type { Credentials } from '../auth/index.js' -import { apiRequest } from '../api-request.js' +import { createApiClient } from '../client.js' import type { Service } from './types.js' export async function listServices(creds: Credentials): Promise { - const res = await apiRequest<{ services: Service[] }>('/api/v1/services', creds) - return res.services + const client = createApiClient(creds) + const res = await client.services.listAll({}) + return res.organizations.flatMap((org) => org.services) } diff --git a/packages/plugin-obsessiondb/src/service/select.ts b/packages/plugin-obsessiondb/src/service/select.ts index 1f1588d..5898a39 100644 --- a/packages/plugin-obsessiondb/src/service/select.ts +++ b/packages/plugin-obsessiondb/src/service/select.ts @@ -13,13 +13,13 @@ export async function selectServiceInteractive( if (services.length === 1) { const service = services[0]! - print(`Auto-selected service: ${service.name}${service.region ? ` (${service.region})` : ''}`) + print(`Auto-selected service: ${service.name} (${service.status})`) return service } print('\nAvailable services:') for (const [i, service] of services.entries()) { - print(` ${i + 1}. ${service.name}${service.region ? ` (${service.region})` : ''}`) + print(` ${i + 1}. ${service.name} (${service.status})`) } const rl = createInterface({ input: process.stdin, output: process.stdout }) diff --git a/packages/plugin-obsessiondb/src/service/types.ts b/packages/plugin-obsessiondb/src/service/types.ts index c7d30e0..69072be 100644 --- a/packages/plugin-obsessiondb/src/service/types.ts +++ b/packages/plugin-obsessiondb/src/service/types.ts @@ -1,8 +1,7 @@ -export interface Service { - id: string - name: string - region?: string -} +import type { z } from 'zod' +import type { serviceSchema } from '../contract/index.js' + +export type Service = z.infer export interface SelectedService { service_id: string From 2f1767f66f4457ebb83a6680e3ee08d977ff4ebe Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Marc=20H=C3=B6ffl?= Date: Sun, 29 Mar 2026 22:25:43 +0200 Subject: [PATCH 06/12] feat(cli): add debug logging via CHKIT_DEBUG env variable (#105) Adds a lightweight stderr debug logger gated by CHKIT_DEBUG=1. Instruments config loading, command dispatch, all plugin lifecycle hooks, ClickHouse executor (queries, timing, errors), journal operations, schema loading, and per-command details (migrate, generate, status, drift, check). Co-authored-by: Claude Opus 4.6 --- .changeset/add-debug-logging.md | 5 + packages/cli/src/bin/chkit.ts | 4 + packages/cli/src/bin/command-dispatch.ts | 3 + packages/cli/src/bin/commands/check.ts | 3 + packages/cli/src/bin/commands/drift.ts | 4 + packages/cli/src/bin/commands/generate.ts | 7 ++ packages/cli/src/bin/commands/migrate.ts | 8 ++ packages/cli/src/bin/commands/status.ts | 3 + packages/cli/src/bin/config.ts | 20 +++- packages/cli/src/bin/debug.ts | 22 ++++ packages/cli/src/bin/journal-store.ts | 23 ++-- packages/cli/src/bin/plugin-runtime.ts | 135 +++++++++++++++++++++- packages/cli/src/bin/schema-loader.ts | 7 +- 13 files changed, 223 insertions(+), 21 deletions(-) create mode 100644 .changeset/add-debug-logging.md create mode 100644 packages/cli/src/bin/debug.ts diff --git a/.changeset/add-debug-logging.md b/.changeset/add-debug-logging.md new file mode 100644 index 0000000..a4058fb --- /dev/null +++ b/.changeset/add-debug-logging.md @@ -0,0 +1,5 @@ +--- +"chkit": patch +--- + +Add debug logging via `CHKIT_DEBUG=1` environment variable. Logs config loading, command dispatch, plugin lifecycle hooks, ClickHouse queries with timing, journal operations, and per-command details to stderr. diff --git a/packages/cli/src/bin/chkit.ts b/packages/cli/src/bin/chkit.ts index cc3ec5b..49e300c 100644 --- a/packages/cli/src/bin/chkit.ts +++ b/packages/cli/src/bin/chkit.ts @@ -16,6 +16,7 @@ import { formatGlobalHelp, formatCommandHelp } from './help.js' import { loadPluginRuntime } from './plugin-runtime.js' import { getInternalPlugins } from './internal-plugins/index.js' import { CLI_VERSION } from './version.js' +import { debug } from './debug.js' const WELL_KNOWN_PLUGIN_COMMANDS: Record = { codegen: 'Codegen', @@ -74,6 +75,7 @@ function collectPluginCommands(runtime: Awaited { const argv = process.argv.slice(2) const commandName = argv[0] + debug('cli', `chkit ${CLI_VERSION} — argv: [${argv.join(', ')}]`) if (!commandName || commandName === '-h' || commandName === '--help') { const configPathArg = extractConfigPath(argv) @@ -135,6 +137,7 @@ async function main(): Promise { }) const resolved = registry.get(commandName) + debug('cli', `command "${commandName}" resolved: ${resolved ? (resolved.isPlugin ? 'plugin' : 'core') : 'not found'}`) if (!resolved) { const wellKnown = WELL_KNOWN_PLUGIN_COMMANDS[commandName] @@ -188,6 +191,7 @@ main() } catch { // onComplete errors must not mask the original error } + debug('cli', 'fatal error', error instanceof Error ? { message: error.message, stack: error.stack, ...(('code' in error) ? { code: (error as NodeJS.ErrnoException).code } : {}) } : error) console.error(formatFatalError(error)) process.exit(1) }) diff --git a/packages/cli/src/bin/command-dispatch.ts b/packages/cli/src/bin/command-dispatch.ts index 73d9831..757ae8d 100644 --- a/packages/cli/src/bin/command-dispatch.ts +++ b/packages/cli/src/bin/command-dispatch.ts @@ -3,6 +3,7 @@ import { parseFlags, UnknownFlagError, MissingFlagValueError, type ParsedFlags } import { typedFlags } from '../plugins.js' import type { CommandRegistry, RegisteredCommand } from './command-registry.js' import { resolveDirs } from './config.js' +import { debug } from './debug.js' import { GLOBAL_FLAGS } from './global-flags.js' import { printOutput } from './json-output.js' import type { PluginRuntime } from './plugin-runtime.js' @@ -214,8 +215,10 @@ export async function runResolvedCommand(input: { onAmbiguousPluginSubcommand?: () => void }): Promise { if (input.resolved.isPlugin && !input.resolved.run) { + debug('dispatch', `routing to plugin command "${input.commandName}"`) await runPluginCommand(input) return } + debug('dispatch', `routing to core command "${input.commandName}"`) await runCoreOrBuiltinCommand(input) } diff --git a/packages/cli/src/bin/commands/check.ts b/packages/cli/src/bin/commands/check.ts index 5e0e94d..20a7e3a 100644 --- a/packages/cli/src/bin/commands/check.ts +++ b/packages/cli/src/bin/commands/check.ts @@ -2,6 +2,7 @@ import { mkdir } from 'node:fs/promises' import { summarizeDriftReasons } from '../../drift.js' import { typedFlags, type CommandDef, type CommandRunContext } from '../../plugins.js' +import { debug } from '../debug.js' import { GLOBAL_FLAGS } from '../global-flags.js' import { emitJson } from '../json-output.js' import { createJournalStore } from '../journal-store.js' @@ -25,6 +26,7 @@ async function cmdCheck(runCtx: CommandRunContext): Promise { const jsonMode = f['--json'] === true const tableSelector = f['--table'] const { migrationsDir, metaDir } = dirs + debug('check', `flags: strict=${strict}, json=${jsonMode}`) await mkdir(migrationsDir, { recursive: true }) if (!ctx.hasExecutor) { @@ -100,6 +102,7 @@ async function cmdCheck(runCtx: CommandRunContext): Promise { failedChecks.push(`plugin:${result.plugin}`) } } + debug('check', `results: pending=${pending.length}, checksumMismatches=${checksumMismatches.length}, drift=${drift?.drifted ?? 'n/a'}, pluginChecks=${pluginResults.length}, failedChecks=[${failedChecks.join(', ')}]`) const ok = failedChecks.length === 0 const driftReasonSummary = drift ? summarizeDriftReasons({ diff --git a/packages/cli/src/bin/commands/drift.ts b/packages/cli/src/bin/commands/drift.ts index f1cad00..81392c9 100644 --- a/packages/cli/src/bin/commands/drift.ts +++ b/packages/cli/src/bin/commands/drift.ts @@ -11,6 +11,7 @@ import { type ObjectDriftDetail, type TableDriftDetail, } from '../../drift.js' +import { debug } from '../debug.js' import { emitJson } from '../json-output.js' import { readSnapshot } from '../migration-store.js' import { resolveTableScope, tableKeysFromDefinitions, type TableScope } from '../table-scope.js' @@ -49,6 +50,7 @@ export async function buildDriftPayload( if (!executor) throw new Error('clickhouse config is required for drift checks') const db = executor + debug('drift', `building drift payload — expected: ${snapshot.definitions.length} definitions`) let actualObjects: SchemaObjectRef[] try { actualObjects = await db.listSchemaObjects() @@ -115,6 +117,8 @@ export async function buildDriftPayload( .filter((item): item is NonNullable => item !== null) .sort((a, b) => a.table.localeCompare(b.table)) + debug('drift', `comparison: missing=${missing.length}, extra=${extra.length}, kindMismatches=${kindMismatches.length}, objectDrift=${objectDrift.length}, tableDrift=${tableDrift.length}`) + const drifted = missing.length > 0 || extra.length > 0 || diff --git a/packages/cli/src/bin/commands/generate.ts b/packages/cli/src/bin/commands/generate.ts index ad1c5cf..50484ec 100644 --- a/packages/cli/src/bin/commands/generate.ts +++ b/packages/cli/src/bin/commands/generate.ts @@ -35,6 +35,7 @@ import { resolveActiveTableMappings, } from './generate/rename-mappings.js' import { emitGenerateApplyOutput, emitGeneratePlanOutput } from './generate/output.js' +import { debug } from '../debug.js' const GENERATE_FLAGS = defineFlags([ { name: '--name', type: 'string', description: 'Migration name', placeholder: '' }, @@ -60,6 +61,8 @@ async function cmdGenerate(ctx: CommandRunContext): Promise { const planMode = f['--dryrun'] === true const jsonMode = f['--json'] === true + debug('generate', `flags: name=${migrationName ?? '(auto)'}, dryrun=${planMode}, json=${jsonMode}`) + await pluginRuntime.runOnConfigLoaded({ command: 'generate', config, @@ -119,6 +122,8 @@ async function cmdGenerate(ctx: CommandRunContext): Promise { activeTableMappings ) + debug('generate', `previous snapshot: ${previousDefinitions.length} definitions, current: ${definitions.length} definitions`) + let plan: ReturnType try { plan = planDiff(remappedPreviousDefinitions, definitions) @@ -173,6 +178,8 @@ async function cmdGenerate(ctx: CommandRunContext): Promise { }) : definitions + debug('generate', `plan: ${plan.operations.length} operations — writing artifacts`) + const result = await generateArtifacts({ definitions: artifactDefinitions, migrationsDir, diff --git a/packages/cli/src/bin/commands/migrate.ts b/packages/cli/src/bin/commands/migrate.ts index 4aba62b..0444aa4 100644 --- a/packages/cli/src/bin/commands/migrate.ts +++ b/packages/cli/src/bin/commands/migrate.ts @@ -23,6 +23,7 @@ import { type DestructiveOperationMarker, } from '../safety-markers.js' import { databaseKeyFromOperationKey, resolveTableScope, tableKeyFromOperationKey, tableKeysFromDefinitions } from '../table-scope.js' +import { debug } from '../debug.js' const MIGRATE_FLAGS = defineFlags([ { name: '--apply', type: 'boolean', description: 'Apply pending migrations on ClickHouse (no prompt)' }, @@ -103,6 +104,7 @@ async function cmdMigrate(runCtx: CommandRunContext): Promise { const jsonMode = f['--json'] === true const { migrationsDir, metaDir } = dirs + debug('migrate', `flags: execute=${executeRequested}, allowDestructive=${allowDestructive}, json=${jsonMode}`) if (!ctx.hasExecutor) { throw new Error('clickhouse config is required for migrate (journal is stored in ClickHouse)') @@ -127,7 +129,11 @@ async function cmdMigrate(runCtx: CommandRunContext): Promise { const journal = await journalStore.readJournal() const appliedNames = new Set(journal.applied.map((entry) => entry.name)) const pendingAll = files.filter((f) => !appliedNames.has(f)) + debug('migrate', `migrations: total=${files.length}, applied=${journal.applied.length}, pending=${pendingAll.length}`) const checksumMismatches = await findChecksumMismatches(migrationsDir, journal) + if (checksumMismatches.length > 0) { + debug('migrate', `checksum mismatches: ${checksumMismatches.map((m) => m.name).join(', ')}`) + } if (checksumMismatches.length > 0) { if (jsonMode) { @@ -275,10 +281,12 @@ async function cmdMigrate(runCtx: CommandRunContext): Promise { const appliedNow: MigrationJournalEntry[] = [] for (const file of pending) { + debug('migrate', `applying ${file}`) const fullPath = join(migrationsDir, file) const sql = await readFile(fullPath, 'utf8') const parsedStatements = extractExecutableStatements(sql) const operationSummaries = extractMigrationOperationSummaries(sql) + debug('migrate', `${file}: ${parsedStatements.length} statements, ${operationSummaries.length} operations`) const statements = await pluginRuntime.runOnBeforeApply({ command: 'migrate', config, diff --git a/packages/cli/src/bin/commands/status.ts b/packages/cli/src/bin/commands/status.ts index c258299..826a190 100644 --- a/packages/cli/src/bin/commands/status.ts +++ b/packages/cli/src/bin/commands/status.ts @@ -1,6 +1,7 @@ import { mkdir } from 'node:fs/promises' import type { CommandDef, CommandRunContext } from '../../plugins.js' +import { debug } from '../debug.js' import { emitJson } from '../json-output.js' import { createJournalStore } from '../journal-store.js' import { findChecksumMismatches, listMigrations } from '../migration-store.js' @@ -31,6 +32,8 @@ async function cmdStatus(runCtx: CommandRunContext): Promise { const pending = files.filter((f) => !appliedNames.has(f)) const checksumMismatches = await findChecksumMismatches(migrationsDir, journal) + debug('status', `files=${files.length}, applied=${journal.applied.length}, pending=${pending.length}, checksumMismatches=${checksumMismatches.length}`) + const databaseMissing = journalStore.databaseMissing const payload = { migrationsDir, diff --git a/packages/cli/src/bin/config.ts b/packages/cli/src/bin/config.ts index 3ebec57..c3e1b86 100644 --- a/packages/cli/src/bin/config.ts +++ b/packages/cli/src/bin/config.ts @@ -12,6 +12,7 @@ import { type ChxConfigInput, type ResolvedChxConfig, } from '@chkit/core' +import { debug } from './debug.js' export const DEFAULT_CONFIG_FILE = 'clickhouse.config.ts' @@ -24,6 +25,7 @@ export async function loadConfig( env: ChxConfigEnv = {} ): Promise<{ config: ResolvedChxConfig; path: string }> { const configPath = resolve(process.cwd(), configPathArg ?? DEFAULT_CONFIG_FILE) + debug('config', `resolving config at ${configPath}`) if (!existsSync(configPath)) { throw new Error(`Config not found at ${configPath}. Run 'chkit init' first.`) } @@ -36,12 +38,20 @@ export async function loadConfig( ) } - const userConfig = isConfigFunction(candidate) ? await candidate(env) : (candidate as ChxConfig) + const isFn = isConfigFunction(candidate) + debug('config', `config export is ${isFn ? 'function' : 'object'}`) + const userConfig = isFn ? await candidate(env) : (candidate as ChxConfig) + const config = resolveConfig(userConfig) - return { - config: resolveConfig(userConfig), - path: configPath, - } + debug('config', `loaded`, { + schema: config.schema, + outDir: config.outDir, + migrationsDir: config.migrationsDir, + clickhouse: config.clickhouse ? `${config.clickhouse.url} (db: ${config.clickhouse.database ?? 'default'})` : 'not configured', + plugins: (config.plugins ?? []).length, + }) + + return { config, path: configPath } } export async function writeIfMissing(filePath: string, content: string): Promise { diff --git a/packages/cli/src/bin/debug.ts b/packages/cli/src/bin/debug.ts new file mode 100644 index 0000000..5893a18 --- /dev/null +++ b/packages/cli/src/bin/debug.ts @@ -0,0 +1,22 @@ +import process from 'node:process' + +const enabled = process.env.CHKIT_DEBUG === '1' || process.env.CHKIT_DEBUG === 'true' + +function timestamp(): string { + const now = new Date() + return now.toISOString().slice(11, 23) // HH:mm:ss.SSS +} + +export function debug(category: string, message: string, detail?: unknown): void { + if (!enabled) return + const prefix = `[chkit:${category}]` + if (detail !== undefined) { + console.error(`${timestamp()} ${prefix} ${message}`, detail) + } else { + console.error(`${timestamp()} ${prefix} ${message}`) + } +} + +export function isDebugEnabled(): boolean { + return enabled +} diff --git a/packages/cli/src/bin/journal-store.ts b/packages/cli/src/bin/journal-store.ts index acca90d..204c8ee 100644 --- a/packages/cli/src/bin/journal-store.ts +++ b/packages/cli/src/bin/journal-store.ts @@ -3,6 +3,7 @@ import type { ChxConfig } from '@chkit/core' import type { MigrationJournal, MigrationJournalEntry } from './migration-store.js' import { CLI_VERSION } from './version.js' +import { debug } from './debug.js' export interface JournalStore { readJournal(): Promise @@ -38,6 +39,7 @@ function isRetryableInsertRace(error: unknown): boolean { export function createJournalStore(db: ClickHouseExecutor): JournalStore { const journalTable = resolveJournalTableName() + debug('journal', `journal table: ${journalTable}${process.env.CHKIT_JOURNAL_TABLE ? ' (from CHKIT_JOURNAL_TABLE)' : ''}`) const createTableSql = `CREATE TABLE IF NOT EXISTS ${journalTable} ( name String, applied_at DateTime64(3, 'UTC'), @@ -51,38 +53,36 @@ SETTINGS index_granularity = 1` async function ensureTable(): Promise { if (bootstrapped) return - // Probe whether the table already exists before issuing CREATE TABLE. - // On ClickHouse Cloud, repeated CREATE TABLE IF NOT EXISTS can fail with - // "already exists in metadata backend with different schema" because the - // engine normalisation (SharedMergeTree vs SharedMergeTree()) differs - // between the stored metadata and the new DDL statement. + debug('journal', `probing journal table "${journalTable}"`) try { await db.query(`SELECT name FROM ${journalTable} LIMIT 0`) + debug('journal', 'journal table exists') bootstrapped = true return } catch (error) { if (isUnknownDatabaseError(error)) { + debug('journal', 'database does not exist') _databaseMissing = true bootstrapped = true return } - // Table does not exist yet – create it below. } + debug('journal', 'creating journal table') try { await db.command(createTableSql) } catch (error) { if (isUnknownDatabaseError(error)) { + debug('journal', 'database missing on CREATE — deferring') _databaseMissing = true bootstrapped = true return } throw error } - // On ClickHouse Cloud, DDL propagation across nodes may lag behind the - // CREATE TABLE acknowledgment. Wait until the table is queryable. for (let attempt = 0; attempt < 10; attempt++) { try { await db.query(`SELECT name FROM ${journalTable} LIMIT 0`) + debug('journal', `DDL propagation confirmed (attempt ${attempt + 1})`) break } catch { await new Promise((r) => setTimeout(r, 250)) @@ -96,8 +96,10 @@ SETTINGS index_granularity = 1` return _databaseMissing }, async readJournal(): Promise { + debug('journal', 'reading journal') await ensureTable() if (_databaseMissing) { + debug('journal', 'database missing — returning empty journal') return { version: 1, applied: [] } } try { @@ -108,6 +110,7 @@ SETTINGS index_granularity = 1` const rows = await db.query( `SELECT name, applied_at, checksum, chkit_version FROM ${journalTable} ORDER BY name SETTINGS select_sequential_consistency = 1` ) + debug('journal', `journal has ${rows.length} applied entries`) return { version: 1, applied: rows.map((row) => ({ @@ -119,8 +122,9 @@ SETTINGS index_granularity = 1` }, async appendEntry(entry: MigrationJournalEntry): Promise { + debug('journal', `appending entry: ${entry.name} (checksum: ${entry.checksum})`) if (_databaseMissing) { - // A migration may have created the database — reset and retry. + debug('journal', 'resetting databaseMissing flag — migration may have created the database') _databaseMissing = false bootstrapped = false } @@ -136,6 +140,7 @@ SETTINGS index_granularity = 1` if (!isRetryableInsertRace(error) || attempt === maxAttempts) { throw error } + debug('journal', `insert race detected — retrying (attempt ${attempt}/${maxAttempts})`) await new Promise((r) => setTimeout(r, attempt * 150)) } } diff --git a/packages/cli/src/bin/plugin-runtime.ts b/packages/cli/src/bin/plugin-runtime.ts index 275df4e..d0c5386 100644 --- a/packages/cli/src/bin/plugin-runtime.ts +++ b/packages/cli/src/bin/plugin-runtime.ts @@ -31,6 +31,7 @@ import type { } from '../plugins.js' import { isInlinePluginRegistration } from '../plugins.js' import type { TableScope } from './table-scope.js' +import { debug, isDebugEnabled } from './debug.js' interface LoadedPlugin { options: Record @@ -126,6 +127,99 @@ function normalizePluginRegistration( } } +function wrapExecutorWithDebug(executor: ClickHouseExecutor): ClickHouseExecutor { + if (!isDebugEnabled()) return executor + + return { + async command(sql: string): Promise { + debug('clickhouse', `command: ${sql.slice(0, 200)}${sql.length > 200 ? '...' : ''}`) + const start = performance.now() + try { + await executor.command(sql) + debug('clickhouse', `command OK (${Math.round(performance.now() - start)}ms)`) + } catch (error) { + debug('clickhouse', `command FAILED (${Math.round(performance.now() - start)}ms)`, error instanceof Error ? error.message : error) + throw error + } + }, + async query(sql: string): Promise { + debug('clickhouse', `query: ${sql.slice(0, 200)}${sql.length > 200 ? '...' : ''}`) + const start = performance.now() + try { + const rows = await executor.query(sql) + debug('clickhouse', `query OK — ${rows.length} rows (${Math.round(performance.now() - start)}ms)`) + return rows + } catch (error) { + debug('clickhouse', `query FAILED (${Math.round(performance.now() - start)}ms)`, error instanceof Error ? error.message : error) + throw error + } + }, + async insert>(params: { table: string; values: T[] }): Promise { + debug('clickhouse', `insert into ${params.table} — ${params.values.length} rows`) + const start = performance.now() + try { + await executor.insert(params) + debug('clickhouse', `insert OK (${Math.round(performance.now() - start)}ms)`) + } catch (error) { + debug('clickhouse', `insert FAILED (${Math.round(performance.now() - start)}ms)`, error instanceof Error ? error.message : error) + throw error + } + }, + async submit(sql: string, queryId?: string): Promise { + debug('clickhouse', `submit${queryId ? ` (id: ${queryId})` : ''}: ${sql.slice(0, 200)}${sql.length > 200 ? '...' : ''}`) + const start = performance.now() + try { + const id = await executor.submit(sql, queryId) + debug('clickhouse', `submit OK — id: ${id} (${Math.round(performance.now() - start)}ms)`) + return id + } catch (error) { + debug('clickhouse', `submit FAILED (${Math.round(performance.now() - start)}ms)`, error instanceof Error ? error.message : error) + throw error + } + }, + async queryStatus(queryId: string, options?: { afterTime?: string }) { + debug('clickhouse', `queryStatus for ${queryId}`) + const start = performance.now() + try { + const status = await executor.queryStatus(queryId, options) + debug('clickhouse', `queryStatus: ${status.status} (${Math.round(performance.now() - start)}ms)`) + return status + } catch (error) { + debug('clickhouse', `queryStatus FAILED (${Math.round(performance.now() - start)}ms)`, error instanceof Error ? error.message : error) + throw error + } + }, + async listSchemaObjects() { + debug('clickhouse', 'listSchemaObjects') + const start = performance.now() + try { + const objects = await executor.listSchemaObjects() + debug('clickhouse', `listSchemaObjects OK — ${objects.length} objects (${Math.round(performance.now() - start)}ms)`) + return objects + } catch (error) { + debug('clickhouse', `listSchemaObjects FAILED (${Math.round(performance.now() - start)}ms)`, error instanceof Error ? error.message : error) + throw error + } + }, + async listTableDetails(databases: string[]) { + debug('clickhouse', `listTableDetails for databases: [${databases.join(', ')}]`) + const start = performance.now() + try { + const tables = await executor.listTableDetails(databases) + debug('clickhouse', `listTableDetails OK — ${tables.length} tables (${Math.round(performance.now() - start)}ms)`) + return tables + } catch (error) { + debug('clickhouse', `listTableDetails FAILED (${Math.round(performance.now() - start)}ms)`, error instanceof Error ? error.message : error) + throw error + } + }, + async close(): Promise { + debug('clickhouse', 'closing connections') + await executor.close() + }, + } +} + function formatPluginError(pluginName: string, hook: string, error: unknown): Error { const message = error instanceof Error ? error.message : String(error) return new Error(`Plugin "${pluginName}" failed in ${hook}: ${message}`) @@ -207,6 +301,11 @@ export async function loadPluginRuntime(input: { throw new Error(`Duplicate plugin name "${plugin.manifest.name}" in config.plugins.`) } + debug('plugin', `loaded "${plugin.manifest.name}" v${plugin.manifest.version ?? '?'}`, { + hooks: Object.keys(plugin.hooks ?? {}), + commands: (plugin.commands ?? []).map((c) => c.name), + }) + const item: LoadedPlugin = { plugin, options: normalized.options, @@ -231,6 +330,7 @@ export async function loadPluginRuntime(input: { if (item.plugin.manifest.name === pluginName) continue const hook = item.plugin.hooks?.onBeforePluginCommand if (!hook) continue + debug('hook', `onBeforePluginCommand → ${item.plugin.manifest.name} (target: ${pluginName}:${commandName})`) try { const result = await hook({ ...context, @@ -238,7 +338,10 @@ export async function loadPluginRuntime(input: { command: commandName, options: item.options, }) - if (result.handled) return result + if (result.handled) { + debug('hook', `onBeforePluginCommand ← ${item.plugin.manifest.name} handled command (exitCode: ${result.exitCode})`) + return result + } } catch (error) { throw formatPluginError(item.plugin.manifest.name, 'onBeforePluginCommand', error) } @@ -266,10 +369,12 @@ export async function loadPluginRuntime(input: { plugins: loaded, async resolveContext(input) { const hasClickhouseConfig = !!input.config.clickhouse + debug('context', `resolving executor — clickhouse config: ${hasClickhouseConfig ? 'yes' : 'no'}`) + const rawExecutor = hasClickhouseConfig + ? createClickHouseExecutor(input.config.clickhouse!) + : NULL_EXECUTOR const defaults: PluginContext = { - executor: hasClickhouseConfig - ? createClickHouseExecutor(input.config.clickhouse!) - : NULL_EXECUTOR, + executor: wrapExecutorWithDebug(rawExecutor), hasExecutor: hasClickhouseConfig, } let ctx = defaults @@ -279,6 +384,7 @@ export async function loadPluginRuntime(input: { try { const result = await hook({ ...input, defaults }) if (result && typeof result === 'object' && 'executor' in result && result.executor) { + debug('context', `plugin "${item.plugin.manifest.name}" provided executor override`) // Plugin returned an executor override — close the default one if (ctx.executor !== defaults.executor) { // A previous plugin already overrode — close that one @@ -310,6 +416,7 @@ export async function loadPluginRuntime(input: { for (const item of loaded) { const hook = item.plugin.hooks?.onInit if (!hook) continue + debug('hook', `onInit → ${item.plugin.manifest.name}`) try { await hook({ ...context, options: item.options }) } catch (error) { @@ -322,6 +429,7 @@ export async function loadPluginRuntime(input: { for (const item of loaded) { const hook = item.plugin.hooks?.onComplete if (!hook) continue + debug('hook', `onComplete → ${item.plugin.manifest.name} (exitCode: ${exitCode})`) try { await hook({ ...context, exitCode, options: item.options }) } catch (error) { @@ -334,6 +442,7 @@ export async function loadPluginRuntime(input: { for (const item of loaded) { const hook = item.plugin.hooks?.onConfigLoaded if (!hook) continue + debug('hook', `onConfigLoaded → ${item.plugin.manifest.name}`) try { await hook({ ...context, options: item.options, tableScope }) } catch (error) { @@ -346,9 +455,11 @@ export async function loadPluginRuntime(input: { for (const item of loaded) { const hook = item.plugin.hooks?.onSchemaLoaded if (!hook) continue + debug('hook', `onSchemaLoaded → ${item.plugin.manifest.name} (${definitions.length} definitions)`) try { const next = await hook({ ...context, definitions }) if (Array.isArray(next)) { + debug('hook', `onSchemaLoaded ← ${item.plugin.manifest.name} returned ${next.length} definitions`) definitions = canonicalizeDefinitions(next) } } catch (error) { @@ -363,9 +474,13 @@ export async function loadPluginRuntime(input: { for (const item of loaded) { const hook = item.plugin.hooks?.onPlanCreated if (!hook) continue + debug('hook', `onPlanCreated → ${item.plugin.manifest.name} (${plan.operations.length} operations)`) try { const next = await hook({ ...context, tableScope, plan }) - if (next) plan = next + if (next) { + debug('hook', `onPlanCreated ← ${item.plugin.manifest.name} modified plan (${next.operations.length} operations)`) + plan = next + } } catch (error) { throw formatPluginError(item.plugin.manifest.name, 'onPlanCreated', error) } @@ -377,9 +492,13 @@ export async function loadPluginRuntime(input: { for (const item of loaded) { const hook = item.plugin.hooks?.onBeforeApply if (!hook) continue + debug('hook', `onBeforeApply → ${item.plugin.manifest.name} (${context.migration}, ${statements.length} statements)`) try { const result = await hook({ ...context, statements }) - if (result?.statements) statements = result.statements + if (result?.statements) { + debug('hook', `onBeforeApply ← ${item.plugin.manifest.name} modified statements (${result.statements.length})`) + statements = result.statements + } } catch (error) { throw formatPluginError(item.plugin.manifest.name, 'onBeforeApply', error) } @@ -390,6 +509,7 @@ export async function loadPluginRuntime(input: { for (const item of loaded) { const hook = item.plugin.hooks?.onAfterApply if (!hook) continue + debug('hook', `onAfterApply → ${item.plugin.manifest.name} (${context.migration})`) try { await hook(context) } catch (error) { @@ -403,9 +523,11 @@ export async function loadPluginRuntime(input: { for (const item of loaded) { const hook = item.plugin.hooks?.onCheck if (!hook) continue + debug('hook', `onCheck → ${item.plugin.manifest.name}`) try { const result = await hook({ ...context, options: item.options, tableScope }) if (!result) continue + debug('hook', `onCheck ← ${item.plugin.manifest.name}: ok=${result.ok}, findings=${result.findings.length}`) results.push({ plugin: result.plugin || item.plugin.manifest.name, evaluated: result.evaluated, @@ -434,6 +556,7 @@ export async function loadPluginRuntime(input: { }, runOnBeforePluginCommand: runBeforePluginCommandHooks, async runPluginCommand(pluginName, commandName, context) { + debug('command', `running plugin command "${pluginName}:${commandName}"`) const item = byName.get(pluginName) if (!item) return 1 const command = (item.plugin.commands ?? []).find((entry) => entry.name === commandName) diff --git a/packages/cli/src/bin/schema-loader.ts b/packages/cli/src/bin/schema-loader.ts index ad7e5ef..76b73c6 100644 --- a/packages/cli/src/bin/schema-loader.ts +++ b/packages/cli/src/bin/schema-loader.ts @@ -2,7 +2,12 @@ import process from 'node:process' import { loadSchemaDefinitions as loadSchemaDefinitionsFromCore } from '@chkit/core/schema-loader' import type { SchemaDefinition } from '@chkit/core' +import { debug } from './debug.js' export async function loadSchemaDefinitions(schemaGlobs: string | string[]): Promise { - return loadSchemaDefinitionsFromCore(schemaGlobs, { cwd: process.cwd() }) + const globs = Array.isArray(schemaGlobs) ? schemaGlobs : [schemaGlobs] + debug('schema', `loading definitions from globs: [${globs.join(', ')}] (cwd: ${process.cwd()})`) + const definitions = await loadSchemaDefinitionsFromCore(schemaGlobs, { cwd: process.cwd() }) + debug('schema', `loaded ${definitions.length} schema definitions`) + return definitions } From 9754d4b79fc5636407bf7542ae02b865cf80a7ff Mon Sep 17 00:00:00 2001 From: KeKs0r Date: Sun, 29 Mar 2026 11:16:49 +0200 Subject: [PATCH 07/12] refactor(plugin-obsessiondb): replace hand-rolled API client with ORPC Replace the manual fetch-based api-client.ts with a typed ORPC client backed by contracts copied from the platform repo. Narrows remote commands to status/cancel/list and adds --job-id/--service-id flags. Co-Authored-By: Claude Opus 4.6 (1M context) --- bun.lock | 39 ++++++++- package.json | 6 +- packages/plugin-obsessiondb/package.json | 5 +- .../src/backfill/api-client.ts | 79 ----------------- .../plugin-obsessiondb/src/backfill/client.ts | 36 ++++++++ .../src/backfill/contract.ts | 70 +++++++++++++++ .../src/backfill/handler.test.ts | 87 +++++++++++++++++-- .../src/backfill/handler.ts | 63 +++++++------- .../plugin-obsessiondb/src/backfill/index.ts | 24 +++++ packages/plugin-obsessiondb/src/index.test.ts | 31 +++++-- 10 files changed, 314 insertions(+), 126 deletions(-) delete mode 100644 packages/plugin-obsessiondb/src/backfill/api-client.ts create mode 100644 packages/plugin-obsessiondb/src/backfill/client.ts create mode 100644 packages/plugin-obsessiondb/src/backfill/contract.ts diff --git a/bun.lock b/bun.lock index 902efff..d66ca9d 100644 --- a/bun.lock +++ b/bun.lock @@ -11,10 +11,14 @@ "@biomejs/biome": "^2.3.14", "@changesets/cli": "^2.29.8", "@chkit/plugin-backfill": "workspace:*", + "@chkit/plugin-obsessiondb": "workspace:*", + "@orpc/client": "1.13.4", + "@orpc/contract": "1.13.4", "@types/node": "^24.0.0", "p-map": "^7.0.4", "turbo": "^2.8.20", "typescript": "^5.8.0", + "zod": "3.24.4", }, }, "apps/docs": { @@ -90,6 +94,9 @@ "version": "0.1.0-beta.19", "dependencies": { "@chkit/core": "workspace:*", + "@orpc/client": "1.13.4", + "@orpc/contract": "1.13.4", + "zod": "3.24.4", }, }, "packages/plugin-pull": { @@ -353,6 +360,18 @@ "@nodelib/fs.walk": ["@nodelib/fs.walk@1.2.8", "", { "dependencies": { "@nodelib/fs.scandir": "2.1.5", "fastq": "^1.6.0" } }, "sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg=="], + "@orpc/client": ["@orpc/client@1.13.4", "", { "dependencies": { "@orpc/shared": "1.13.4", "@orpc/standard-server": "1.13.4", "@orpc/standard-server-fetch": "1.13.4", "@orpc/standard-server-peer": "1.13.4" } }, "sha512-s13GPMeoooJc5Th2EaYT5HMFtWG8S03DUVytYfJv8pIhP87RYKl94w52A36denH6r/B4LaAgBeC9nTAOslK+Og=="], + + "@orpc/contract": ["@orpc/contract@1.13.4", "", { "dependencies": { "@orpc/client": "1.13.4", "@orpc/shared": "1.13.4", "@standard-schema/spec": "^1.1.0", "openapi-types": "^12.1.3" } }, "sha512-TIxyaF67uOlihCRcasjHZxguZpbqfNK7aMrDLnhoufmQBE4OKvguNzmrOFHgsuM0OXoopX0Nuhun1ccaxKP10A=="], + + "@orpc/shared": ["@orpc/shared@1.13.4", "", { "dependencies": { "radash": "^12.1.1", "type-fest": "^5.3.1" }, "peerDependencies": { "@opentelemetry/api": ">=1.9.0" }, "optionalPeers": ["@opentelemetry/api"] }, "sha512-TYt9rLG/BUkNQBeQ6C1tEiHS/Seb8OojHgj9GlvqyjHJhMZx5qjsIyTW6RqLPZJ4U2vgK6x4Her36+tlFCKJug=="], + + "@orpc/standard-server": ["@orpc/standard-server@1.13.4", "", { "dependencies": { "@orpc/shared": "1.13.4" } }, "sha512-ZOzgfVp6XUg+wVYw+gqesfRfGPtQbnBIrIiSnFMtZF+6ncmFJeF2Shc4RI2Guqc0Qz25juy8Ogo4tX3YqysOcg=="], + + "@orpc/standard-server-fetch": ["@orpc/standard-server-fetch@1.13.4", "", { "dependencies": { "@orpc/shared": "1.13.4", "@orpc/standard-server": "1.13.4" } }, "sha512-/zmKwnuxfAXbppJpgr1CMnQX3ptPlYcDzLz1TaVzz9VG/Xg58Ov3YhabS2Oi1utLVhy5t4kaCppUducAvoKN+A=="], + + "@orpc/standard-server-peer": ["@orpc/standard-server-peer@1.13.4", "", { "dependencies": { "@orpc/shared": "1.13.4", "@orpc/standard-server": "1.13.4" } }, "sha512-UfqnTLqevjCKUk4cmImOG8cQUwANpV1dp9e9u2O1ki6BRBsg/zlXFg6G2N6wP0zr9ayIiO1d2qJdH55yl/1BNw=="], + "@oslojs/encoding": ["@oslojs/encoding@1.1.0", "", {}, "sha512-70wQhgYmndg4GCPxPPxPGevRKqTIJ2Nh4OkiMWmDAVYsTQ+Ta7Sq+rPevXyXGdzr30/qZBnyOalCszoMxlyldQ=="], "@pagefind/darwin-arm64": ["@pagefind/darwin-arm64@1.4.0", "", { "os": "darwin", "cpu": "arm64" }, "sha512-2vMqkbv3lbx1Awea90gTaBsvpzgRs7MuSgKDxW0m9oV1GPZCZbZBJg/qL83GIUEN2BFlY46dtUZi54pwH+/pTQ=="], @@ -445,6 +464,8 @@ "@speed-highlight/core": ["@speed-highlight/core@1.2.14", "", {}, "sha512-G4ewlBNhUtlLvrJTb88d2mdy2KRijzs4UhnlrOSRT4bmjh/IqNElZa3zkrZ+TC47TwtlDWzVLFADljF1Ijp5hA=="], + "@standard-schema/spec": ["@standard-schema/spec@1.1.0", "", {}, "sha512-l2aFy5jALhniG5HgqrD6jXLi/rUWrKvqN/qJx6yoJsgKhblVd+iqqU4RCXavm/jPityDo5TCvKMnpjKnOriy0w=="], + "@turbo/darwin-64": ["@turbo/darwin-64@2.8.20", "", { "os": "darwin", "cpu": "x64" }, "sha512-FQ9EX1xMU5nbwjxXxM3yU88AQQ6Sqc6S44exPRroMcx9XZHqqppl5ymJF0Ig/z3nvQNwDmz1Gsnvxubo+nXWjQ=="], "@turbo/darwin-arm64": ["@turbo/darwin-arm64@2.8.20", "", { "os": "darwin", "cpu": "arm64" }, "sha512-Gpyh9ATFGThD6/s9L95YWY54cizg/VRWl2B67h0yofG8BpHf67DFAh9nuJVKG7bY0+SBJDAo5cMur+wOl9YOYw=="], @@ -961,6 +982,8 @@ "oniguruma-to-es": ["oniguruma-to-es@4.3.4", "", { "dependencies": { "oniguruma-parser": "^0.12.1", "regex": "^6.0.1", "regex-recursion": "^6.0.2" } }, "sha512-3VhUGN3w2eYxnTzHn+ikMI+fp/96KoRSVK9/kMTcFqj1NRDh2IhQCKvYxDnWePKRXY/AqH+Fuiyb7VHSzBjHfA=="], + "openapi-types": ["openapi-types@12.1.3", "", {}, "sha512-N4YtSYJqghVu4iek2ZUvcN/0aqH1kRDuNqzcycDxhOUpg7GdvLa2F3DgS6yBNhInhv2r/6I0Flkn7CqL8+nIcw=="], + "outdent": ["outdent@0.5.0", "", {}, "sha512-/jHxFIzoMXdqPzTaCpFzAAWhpkSjZPF4Vsn6jAfNpmbH/ymsmd7Qc6VE9BGn0L6YMj6uwpQLxCECpus4ukKS9Q=="], "p-filter": ["p-filter@2.1.0", "", { "dependencies": { "p-map": "^2.0.0" } }, "sha512-ZBxxZ5sL2HghephhpGAQdoskxplTwr7ICaehZwLIlfL6acuVgZPm8yBNuRAFBGEqtD/hmUeq9eqLg2ys9Xr/yw=="], @@ -1025,6 +1048,8 @@ "queue-microtask": ["queue-microtask@1.2.3", "", {}, "sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A=="], + "radash": ["radash@12.1.1", "", {}, "sha512-h36JMxKRqrAxVD8201FrCpyeNuUY9Y5zZwujr20fFO77tpUtGa6EZzfKw/3WaiBX95fq7+MpsuMLNdSnORAwSA=="], + "radix3": ["radix3@1.1.2", "", {}, "sha512-b484I/7b8rDEdSDKckSSBA8knMpcdsXudlE/LNL639wFoHKwLbEkQFZHWEYwDC0wa0FKUcCY+GAF73Z7wxNVFA=="], "read-yaml-file": ["read-yaml-file@1.1.0", "", { "dependencies": { "graceful-fs": "^4.1.5", "js-yaml": "^3.6.1", "pify": "^4.0.1", "strip-bom": "^3.0.0" } }, "sha512-VIMnQi/Z4HT2Fxuwg5KrY174U1VdUIASQVWXXyqtNRtxSr9IYkn1rsI6Tb6HsrHCmB7gVpNwX6JxPTHcH6IoTA=="], @@ -1141,6 +1166,8 @@ "svgo": ["svgo@4.0.0", "", { "dependencies": { "commander": "^11.1.0", "css-select": "^5.1.0", "css-tree": "^3.0.1", "css-what": "^6.1.0", "csso": "^5.0.5", "picocolors": "^1.1.1", "sax": "^1.4.1" }, "bin": "./bin/svgo.js" }, "sha512-VvrHQ+9uniE+Mvx3+C9IEe/lWasXCU0nXMY2kZeLrHNICuRiC8uMPyM14UEaMOFA5mhyQqEkB02VoQ16n3DLaw=="], + "tagged-tag": ["tagged-tag@1.0.0", "", {}, "sha512-yEFYrVhod+hdNyx7g5Bnkkb0G6si8HJurOoOEgC8B/O0uXLHlaey/65KRv6cuWBNhBgHKAROVpc7QyYqE5gFng=="], + "term-size": ["term-size@2.2.1", "", {}, "sha512-wK0Ri4fOGjv/XPy8SBHZChl8CM7uMc5VML7SqiQ0zG7+J5Vr+RMQDoHa2CNT6KHUnTGIXH34UDMkPzAUyapBZg=="], "tiny-inflate": ["tiny-inflate@1.0.3", "", {}, "sha512-pkY1fj1cKHb2seWDy0B16HeWyczlJA9/WW3u3c4z/NiWDsO3DOU5D7nhTLE9CF0yXv/QZFY7sEJmj24dK+Rrqw=="], @@ -1161,7 +1188,7 @@ "turbo": ["turbo@2.8.20", "", { "optionalDependencies": { "@turbo/darwin-64": "2.8.20", "@turbo/darwin-arm64": "2.8.20", "@turbo/linux-64": "2.8.20", "@turbo/linux-arm64": "2.8.20", "@turbo/windows-64": "2.8.20", "@turbo/windows-arm64": "2.8.20" }, "bin": { "turbo": "bin/turbo" } }, "sha512-Rb4qk5YT8RUwwdXtkLpkVhNEe/lor6+WV7S5tTlLpxSz6MjV5Qi8jGNn4gS6NAvrYGA/rNrE6YUQM85sCZUDbQ=="], - "type-fest": ["type-fest@4.41.0", "", {}, "sha512-TeTSQ6H5YHvpqVwBRcnLDCBnDOHWYu7IvGbHT6N8AOymcr9PJGjc1GTtiWZTYg0NCgYwvnYWEkVChQAr9bjfwA=="], + "type-fest": ["type-fest@5.5.0", "", { "dependencies": { "tagged-tag": "^1.0.0" } }, "sha512-PlBfpQwiUvGViBNX84Yxwjsdhd1TUlXr6zjX7eoirtCPIr08NAmxwa+fcYBTeRQxHo9YC9wwF3m9i700sHma8g=="], "typescript": ["typescript@5.9.3", "", { "bin": { "tsc": "bin/tsc", "tsserver": "bin/tsserver" } }, "sha512-jl1vZzPDinLr9eUt3J/t7V6FgNEw9QjvBPdysz9KfQDD41fQrC2Y4vKQdiaUpFT4bXlb1RHhLpp8wtm6M5TgSw=="], @@ -1247,7 +1274,7 @@ "youch-core": ["youch-core@0.3.3", "", { "dependencies": { "@poppinss/exception": "^1.2.2", "error-stack-parser-es": "^1.0.5" } }, "sha512-ho7XuGjLaJ2hWHoK8yFnsUGy2Y5uDpqSTq1FkHLK4/oqKtyUU1AFbOOxY4IpC9f0fTLjwYbslUz0Po5BpD1wrA=="], - "zod": ["zod@3.25.76", "", {}, "sha512-gzUt/qt81nXsFGKIFcC3YnfEAx5NkunCfnDlvuBSSFS02bcXu4Lmea0AFIUwbLWxWPx3d9p8S5QoaujKcNQxcQ=="], + "zod": ["zod@3.24.4", "", {}, "sha512-OdqJE9UDRPwWsrHjLN2F8bPxvwJBK22EHLWtanu0LSYr5YqzsaaW3RMgmjwr8Rypg5k+meEJdSPXJZXE/yqOMg=="], "zod-to-json-schema": ["zod-to-json-schema@3.25.1", "", { "peerDependencies": { "zod": "^3.25 || ^4" } }, "sha512-pM/SU9d3YAggzi6MtR4h7ruuQlqKtad8e9S0fmxcMi+ueAK5Korys/aWcV9LIIHTVbj01NdzxcnXSN+O74ZIVA=="], @@ -1255,8 +1282,12 @@ "zwitch": ["zwitch@2.0.4", "", {}, "sha512-bXE4cR/kVZhKZX/RjPEflHaKVhUVl85noU3v6b8apfQEc1x4A+zBxjZ4lN8LqGd6WZ3dl98pY4o717VFmoPp+A=="], + "@astrojs/sitemap/zod": ["zod@3.25.76", "", {}, "sha512-gzUt/qt81nXsFGKIFcC3YnfEAx5NkunCfnDlvuBSSFS02bcXu4Lmea0AFIUwbLWxWPx3d9p8S5QoaujKcNQxcQ=="], + "@astrojs/telemetry/ci-info": ["ci-info@4.4.0", "", {}, "sha512-77PSwercCZU2Fc4sX94eF8k8Pxte6JAwL4/ICZLFjJLqegs7kCuAsqqj/70NQF6TvDpgFjkubQB2FW2ZZddvQg=="], + "@chkit/docs/zod": ["zod@3.25.76", "", {}, "sha512-gzUt/qt81nXsFGKIFcC3YnfEAx5NkunCfnDlvuBSSFS02bcXu4Lmea0AFIUwbLWxWPx3d9p8S5QoaujKcNQxcQ=="], + "@chkit/plugin-backfill/zod": ["zod@4.3.6", "", {}, "sha512-rftlrkhHZOcjDwkGlnUtZZkvaPHCsDATp4pGpuOOMDaTdDDXF91wuVDJoWoPsKX/3YPQ5fHuF3STjcYyKr+Qhg=="], "@chkit/plugin-codegen/zod": ["zod@4.3.6", "", {}, "sha512-rftlrkhHZOcjDwkGlnUtZZkvaPHCsDATp4pGpuOOMDaTdDDXF91wuVDJoWoPsKX/3YPQ5fHuF3STjcYyKr+Qhg=="], @@ -1285,6 +1316,10 @@ "astro/package-manager-detector": ["package-manager-detector@1.6.0", "", {}, "sha512-61A5ThoTiDG/C8s8UMZwSorAGwMJ0ERVGj2OjoW5pAalsNOg15+iQiPzrLJ4jhZ1HJzmC2PIHT2oEiH3R5fzNA=="], + "astro/zod": ["zod@3.25.76", "", {}, "sha512-gzUt/qt81nXsFGKIFcC3YnfEAx5NkunCfnDlvuBSSFS02bcXu4Lmea0AFIUwbLWxWPx3d9p8S5QoaujKcNQxcQ=="], + + "boxen/type-fest": ["type-fest@4.41.0", "", {}, "sha512-TeTSQ6H5YHvpqVwBRcnLDCBnDOHWYu7IvGbHT6N8AOymcr9PJGjc1GTtiWZTYg0NCgYwvnYWEkVChQAr9bjfwA=="], + "csso/css-tree": ["css-tree@2.2.1", "", { "dependencies": { "mdn-data": "2.0.28", "source-map-js": "^1.0.1" } }, "sha512-OA0mILzGc1kCOCSJerOeqDxDQ4HOh+G8NbOJFOTgOCzpw7fCBubk0fEyxp8AgOL/jvLgYA/uV0cMbe43ElF1JA=="], "dom-serializer/entities": ["entities@4.5.0", "", {}, "sha512-V0hjH4dGPh9Ao5p0MoRY6BVqtwCjhz6vI5LT8AJ55H+4g9/4vbHx1I54fS0XuclLhDHArPQCiMjDxjaL8fPxhw=="], diff --git a/package.json b/package.json index 4f8b9d9..f9f9716 100644 --- a/package.json +++ b/package.json @@ -37,10 +37,14 @@ "@biomejs/biome": "^2.3.14", "@changesets/cli": "^2.29.8", "@chkit/plugin-backfill": "workspace:*", + "@chkit/plugin-obsessiondb": "workspace:*", + "@orpc/client": "1.13.4", + "@orpc/contract": "1.13.4", "@types/node": "^24.0.0", "p-map": "^7.0.4", "turbo": "^2.8.20", - "typescript": "^5.8.0" + "typescript": "^5.8.0", + "zod": "3.24.4" }, "dependencies": { "wrangler": "^4.65.0" diff --git a/packages/plugin-obsessiondb/package.json b/packages/plugin-obsessiondb/package.json index fe7eaf1..adf2527 100644 --- a/packages/plugin-obsessiondb/package.json +++ b/packages/plugin-obsessiondb/package.json @@ -41,6 +41,9 @@ "clean": "rm -rf dist" }, "dependencies": { - "@chkit/core": "workspace:*" + "@chkit/core": "workspace:*", + "@orpc/client": "1.13.4", + "@orpc/contract": "1.13.4", + "zod": "3.24.4" } } diff --git a/packages/plugin-obsessiondb/src/backfill/api-client.ts b/packages/plugin-obsessiondb/src/backfill/api-client.ts deleted file mode 100644 index 12e6e9a..0000000 --- a/packages/plugin-obsessiondb/src/backfill/api-client.ts +++ /dev/null @@ -1,79 +0,0 @@ -import type { Credentials } from '../auth/index.js' -import { apiRequest, isSessionExpiredError } from '../api-request.js' - -export { isSessionExpiredError } - -export interface RemotePlanResponse { - ok: boolean - plan_id?: string - error?: string - [key: string]: unknown -} - -export interface RemoteRunResponse { - ok: boolean - run_id?: string - error?: string - [key: string]: unknown -} - -export interface RemoteStatusResponse { - ok: boolean - status?: string - error?: string - [key: string]: unknown -} - -export interface RemoteCancelResponse { - ok: boolean - error?: string - [key: string]: unknown -} - -export interface RemoteDoctorResponse { - ok: boolean - error?: string - [key: string]: unknown -} - -export async function submitBackfillPlan( - input: Record, - creds: Credentials -): Promise { - return apiRequest('/api/v1/backfill/plan', creds, input) -} - -export async function runRemoteBackfill( - input: Record, - creds: Credentials -): Promise { - return apiRequest('/api/v1/backfill/run', creds, input) -} - -export async function resumeRemoteBackfill( - input: Record, - creds: Credentials -): Promise { - return apiRequest('/api/v1/backfill/resume', creds, input) -} - -export async function getRemoteBackfillStatus( - input: Record, - creds: Credentials -): Promise { - return apiRequest('/api/v1/backfill/status', creds, input) -} - -export async function cancelRemoteBackfill( - input: Record, - creds: Credentials -): Promise { - return apiRequest('/api/v1/backfill/cancel', creds, input) -} - -export async function getRemoteBackfillDoctor( - input: Record, - creds: Credentials -): Promise { - return apiRequest('/api/v1/backfill/doctor', creds, input) -} diff --git a/packages/plugin-obsessiondb/src/backfill/client.ts b/packages/plugin-obsessiondb/src/backfill/client.ts new file mode 100644 index 0000000..1930741 --- /dev/null +++ b/packages/plugin-obsessiondb/src/backfill/client.ts @@ -0,0 +1,36 @@ +import { createORPCClient } from '@orpc/client' +import { RPCLink } from '@orpc/client/fetch' +import type { ContractRouterClient } from '@orpc/contract' +import type { Credentials } from '../auth/index.js' +import { jobsContract } from './contract.js' + +export type JobsClient = ContractRouterClient + +export class SessionExpiredError extends Error { + constructor() { + super('Session expired. Run `chkit obsessiondb login` to re-authenticate.') + } +} + +export function isSessionExpiredError(error: unknown): boolean { + return error instanceof SessionExpiredError +} + +export function createJobsClient(creds: Credentials): JobsClient { + const link = new RPCLink({ + url: `${creds.base_url}/rpc/jobs`, + headers: () => ({ + Authorization: `Bearer ${creds.access_token}`, + 'User-Agent': 'chkit-cli', + }), + fetch: async (input, init) => { + const res = await globalThis.fetch(input, init) + if (res.status === 401) { + throw new SessionExpiredError() + } + return res + }, + }) + + return createORPCClient(link) +} diff --git a/packages/plugin-obsessiondb/src/backfill/contract.ts b/packages/plugin-obsessiondb/src/backfill/contract.ts new file mode 100644 index 0000000..1d1423d --- /dev/null +++ b/packages/plugin-obsessiondb/src/backfill/contract.ts @@ -0,0 +1,70 @@ +import { oc } from '@orpc/contract' +import { z } from 'zod' + +export const jobStatusSchema = z.enum(['pending', 'running', 'completed', 'failed', 'cancelled']) + +export const taskStatusSchema = z.enum(['pending', 'running', 'done', 'failed']) + +export const jobTaskSchema = z.object({ + id: z.string(), + taskIndex: z.number().int(), + status: taskStatusSchema, + sql: z.string(), + queryId: z.string().nullable(), + estimatedBytes: z.number().nullable(), + writtenRows: z.number().nullable(), + writtenBytes: z.number().nullable(), + durationMs: z.number().nullable(), + error: z.string().nullable(), + startedAt: z.string().datetime().nullable(), + finishedAt: z.string().datetime().nullable(), +}) + +export const jobSummarySchema = z.object({ + id: z.string(), + serviceId: z.string(), + type: z.string(), + target: z.string(), + status: jobStatusSchema, + concurrency: z.number().int(), + totalTasks: z.number().int(), + completedTasks: z.number().int(), + failedTasks: z.number().int(), + createdAt: z.string().datetime(), + updatedAt: z.string().datetime(), +}) + +export const jobDetailSchema = jobSummarySchema.extend({ + workflowId: z.string().nullable(), + metadata: z.record(z.unknown()).nullable(), + tasks: z.array(jobTaskSchema), +}) + +export const jobsContract = { + submit: oc + .input( + z.object({ + serviceId: z.string(), + type: z.enum(['backfill']), + target: z.string(), + concurrency: z.number().int().min(1).max(12).optional(), + tasks: z.array( + z.object({ + id: z.string(), + sql: z.string(), + estimatedBytes: z.number().optional(), + }), + ), + metadata: z.record(z.unknown()).optional(), + }), + ) + .output(z.object({ jobId: z.string() })), + + get: oc.input(z.object({ jobId: z.string() })).output(jobDetailSchema), + + list: oc + .input(z.object({ serviceId: z.string() })) + .output(z.object({ jobs: z.array(jobSummarySchema) })), + + cancel: oc.input(z.object({ jobId: z.string() })).output(z.object({})), +} diff --git a/packages/plugin-obsessiondb/src/backfill/handler.test.ts b/packages/plugin-obsessiondb/src/backfill/handler.test.ts index 2af8ff6..a477c8d 100644 --- a/packages/plugin-obsessiondb/src/backfill/handler.test.ts +++ b/packages/plugin-obsessiondb/src/backfill/handler.test.ts @@ -11,12 +11,12 @@ function makeContext(overrides: Partial return { context: { targetPlugin: 'backfill', - command: 'run', + command: 'status', config: {}, configPath: '/fake/clickhouse.config.ts', jsonMode: false, args: [], - flags: {}, + flags: { '--job-id': 'job-123' }, options: {}, print: (v: unknown) => printed.push(v), ...overrides, @@ -25,6 +25,13 @@ function makeContext(overrides: Partial } } +function orpcResponse(data: unknown): Response { + return new Response(JSON.stringify({ json: data }), { + status: 200, + headers: { 'content-type': 'application/json' }, + }) +} + describe('handleBackfillCommand', () => { let tempDir: string let originalXdg: string | undefined @@ -73,19 +80,34 @@ describe('handleBackfillCommand', () => { expect(printed[0]).toContain('chkit obsessiondb login') }) - test('routes to remote API when authenticated', async () => { + test('routes status to remote ORPC get when authenticated', async () => { await setupAuth() - globalThis.fetch = mock(async () => - new Response(JSON.stringify({ ok: true, run_id: 'r-123' }), { status: 200 }) - ) as typeof fetch + const jobDetail = { + id: 'job-123', + serviceId: 'svc-1', + type: 'backfill', + target: 'my_table', + status: 'running', + concurrency: 4, + totalTasks: 10, + completedTasks: 3, + failedTasks: 0, + createdAt: '2026-03-29T00:00:00Z', + updatedAt: '2026-03-29T01:00:00Z', + workflowId: null, + metadata: null, + tasks: [], + } + + globalThis.fetch = mock(async () => orpcResponse(jobDetail)) as typeof fetch const { context, printed } = makeContext() const result = await handleBackfillCommand(context) expect(result).toEqual({ handled: true, exitCode: 0 }) expect(printed).toHaveLength(1) - expect((printed[0] as Record).ok).toBe(true) + expect((printed[0] as Record).id).toBe('job-123') }) test('handles 401 with session expired message', async () => { @@ -109,4 +131,55 @@ describe('handleBackfillCommand', () => { const result = await handleBackfillCommand(context) expect(result).toEqual({ handled: false }) }) + + test('routes cancel to remote ORPC cancel', async () => { + await setupAuth() + + globalThis.fetch = mock(async () => orpcResponse({})) as typeof fetch + + const { context, printed } = makeContext({ command: 'cancel', flags: { '--job-id': 'job-456' } }) + const result = await handleBackfillCommand(context) + + expect(result).toEqual({ handled: true, exitCode: 0 }) + expect(printed).toHaveLength(1) + }) + + test('routes list to remote ORPC list', async () => { + await setupAuth() + + const listResponse = { + jobs: [ + { + id: 'job-1', + serviceId: 'svc-1', + type: 'backfill', + target: 'table_a', + status: 'completed', + concurrency: 2, + totalTasks: 5, + completedTasks: 5, + failedTasks: 0, + createdAt: '2026-03-28T00:00:00Z', + updatedAt: '2026-03-28T01:00:00Z', + }, + ], + } + + globalThis.fetch = mock(async () => orpcResponse(listResponse)) as typeof fetch + + const { context, printed } = makeContext({ command: 'list', flags: { '--service-id': 'svc-1' } }) + const result = await handleBackfillCommand(context) + + expect(result).toEqual({ handled: true, exitCode: 0 }) + expect(printed).toHaveLength(1) + expect((printed[0] as { jobs: unknown[] }).jobs).toHaveLength(1) + }) + + test('returns handled: false for non-remote commands like run', async () => { + await setupAuth() + + const { context } = makeContext({ command: 'run' }) + const result = await handleBackfillCommand(context) + expect(result).toEqual({ handled: false }) + }) }) diff --git a/packages/plugin-obsessiondb/src/backfill/handler.ts b/packages/plugin-obsessiondb/src/backfill/handler.ts index f413f00..61812b8 100644 --- a/packages/plugin-obsessiondb/src/backfill/handler.ts +++ b/packages/plugin-obsessiondb/src/backfill/handler.ts @@ -1,13 +1,5 @@ import { loadCredentials, resolveBaseUrl } from '../auth/index.js' -import { - cancelRemoteBackfill, - getRemoteBackfillDoctor, - getRemoteBackfillStatus, - isSessionExpiredError, - resumeRemoteBackfill, - runRemoteBackfill, - submitBackfillPlan, -} from './api-client.js' +import { createJobsClient, isSessionExpiredError, type JobsClient } from './client.js' interface BeforePluginCommandContext { targetPlugin: string @@ -25,17 +17,7 @@ type HandlerResult = | { handled: true; exitCode: number } | { handled: false } -const BACKFILL_SUBCOMMANDS: Record< - string, - (input: Record, creds: { access_token: string; base_url: string }) => Promise -> = { - plan: submitBackfillPlan, - run: runRemoteBackfill, - resume: resumeRemoteBackfill, - status: getRemoteBackfillStatus, - cancel: cancelRemoteBackfill, - doctor: getRemoteBackfillDoctor, -} +const REMOTE_SUBCOMMANDS = new Set(['status', 'cancel', 'list']) export async function handleBackfillCommand(context: BeforePluginCommandContext): Promise { if (context.targetPlugin !== 'backfill') return { handled: false } @@ -43,8 +25,7 @@ export async function handleBackfillCommand(context: BeforePluginCommandContext) // --local flag bypasses remote execution if (context.flags['--local'] === true) return { handled: false } - const handler = BACKFILL_SUBCOMMANDS[context.command] - if (!handler) return { handled: false } + if (!REMOTE_SUBCOMMANDS.has(context.command)) return { handled: false } const creds = await loadCredentials() if (!creds) { @@ -54,18 +35,11 @@ export async function handleBackfillCommand(context: BeforePluginCommandContext) // Allow OBSESSIONDB_API_URL env var to override the stored base_url const effectiveCreds = { ...creds, base_url: resolveBaseUrl(creds.base_url) } + const client = createJobsClient(effectiveCreds) try { - const input = { - command: context.command, - args: context.args, - flags: context.flags, - } - - const result = await handler(input, effectiveCreds) - + const result = await dispatchCommand(client, context.command, context.flags) context.print(result) - return { handled: true, exitCode: 0 } } catch (error) { if (isSessionExpiredError(error)) { @@ -75,3 +49,30 @@ export async function handleBackfillCommand(context: BeforePluginCommandContext) throw error } } + +async function dispatchCommand( + client: JobsClient, + command: string, + flags: Record, +): Promise { + const jobId = typeof flags['--job-id'] === 'string' ? flags['--job-id'] : undefined + const serviceId = typeof flags['--service-id'] === 'string' ? flags['--service-id'] : undefined + + switch (command) { + case 'status': { + if (jobId) return client.get({ jobId }) + if (serviceId) return client.list({ serviceId }) + throw new Error('Either --job-id or --service-id is required for remote status') + } + case 'cancel': { + if (!jobId) throw new Error('--job-id is required for remote cancel') + return client.cancel({ jobId }) + } + case 'list': { + if (!serviceId) throw new Error('--service-id is required for remote list') + return client.list({ serviceId }) + } + default: + throw new Error(`Unsupported remote command: ${command}`) + } +} diff --git a/packages/plugin-obsessiondb/src/backfill/index.ts b/packages/plugin-obsessiondb/src/backfill/index.ts index db72220..8d32ff9 100644 --- a/packages/plugin-obsessiondb/src/backfill/index.ts +++ b/packages/plugin-obsessiondb/src/backfill/index.ts @@ -1,4 +1,13 @@ export { handleBackfillCommand } from './handler.js' +export { createJobsClient, type JobsClient } from './client.js' +export { + jobsContract, + jobStatusSchema, + taskStatusSchema, + jobTaskSchema, + jobSummarySchema, + jobDetailSchema, +} from './contract.js' export const BACKFILL_EXTEND_COMMANDS = [ { @@ -11,4 +20,19 @@ export const BACKFILL_EXTEND_COMMANDS = [ }, ], }, + { + command: ['backfill status', 'backfill cancel', 'backfill list'], + flags: [ + { + name: '--job-id', + type: 'string' as const, + description: 'Remote job ID for status/cancel', + }, + { + name: '--service-id', + type: 'string' as const, + description: 'ObsessionDB service ID for listing jobs', + }, + ], + }, ] diff --git a/packages/plugin-obsessiondb/src/index.test.ts b/packages/plugin-obsessiondb/src/index.test.ts index 81dd1c5..28d1b2b 100644 --- a/packages/plugin-obsessiondb/src/index.test.ts +++ b/packages/plugin-obsessiondb/src/index.test.ts @@ -328,12 +328,12 @@ describe('onBeforePluginCommand — backfill interception', () => { return { context: { targetPlugin: 'backfill', - command: 'run', + command: 'status', config: {}, configPath: '/fake/clickhouse.config.ts', jsonMode: false, args: [], - flags: {}, + flags: { '--job-id': 'job-123' }, options: {}, print: (v: unknown) => printed.push(v), ...overrides, @@ -351,8 +351,29 @@ describe('onBeforePluginCommand — backfill interception', () => { test('intercepts backfill commands when authenticated', async () => { await setupAuth() + + const jobDetail = { + id: 'job-123', + serviceId: 'svc-1', + type: 'backfill', + target: 'my_table', + status: 'running', + concurrency: 4, + totalTasks: 10, + completedTasks: 3, + failedTasks: 0, + createdAt: '2026-03-29T00:00:00Z', + updatedAt: '2026-03-29T01:00:00Z', + workflowId: null, + metadata: null, + tasks: [], + } + globalThis.fetch = mock(async () => - new Response(JSON.stringify({ ok: true, run_id: 'r-abc' }), { status: 200 }) + new Response(JSON.stringify({ json: jobDetail }), { + status: 200, + headers: { 'content-type': 'application/json' }, + }) ) as typeof fetch const { context, printed } = makeHookContext() @@ -361,7 +382,7 @@ describe('onBeforePluginCommand — backfill interception', () => { expect(result.handled).toBe(true) expect(result.exitCode).toBe(0) - expect((printed[0] as Record).run_id).toBe('r-abc') + expect((printed[0] as Record).id).toBe('job-123') }) test('requires login when not authenticated', async () => { @@ -369,7 +390,7 @@ describe('onBeforePluginCommand — backfill interception', () => { originalXdg = process.env.XDG_CONFIG_HOME process.env.XDG_CONFIG_HOME = tempDir - const { context, printed } = makeHookContext() + const { context, printed } = makeHookContext({ command: 'status', flags: { '--job-id': 'job-1' } }) const plugin = obsessiondb().plugin const result = await plugin.hooks.onBeforePluginCommand(context as Parameters[0]) From 6c3d8e3f8cce67207639019c1ede82370a712390 Mon Sep 17 00:00:00 2001 From: KeKs0r Date: Sun, 29 Mar 2026 11:27:08 +0200 Subject: [PATCH 08/12] fix(plugin-obsessiondb): add grant_type to device token poll request RFC 8628 requires the grant_type field in the device authorization token request, which better-auth's device plugin enforces. Co-Authored-By: Claude Opus 4.6 (1M context) --- packages/plugin-obsessiondb/src/auth/api-client.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/plugin-obsessiondb/src/auth/api-client.ts b/packages/plugin-obsessiondb/src/auth/api-client.ts index 4fb1746..2449976 100644 --- a/packages/plugin-obsessiondb/src/auth/api-client.ts +++ b/packages/plugin-obsessiondb/src/auth/api-client.ts @@ -63,7 +63,7 @@ export async function pollDeviceToken( 'Content-Type': 'application/json', 'User-Agent': userAgent(), }, - body: JSON.stringify({ client_id: CLIENT_ID, device_code: deviceCode }), + body: JSON.stringify({ client_id: CLIENT_ID, device_code: deviceCode, grant_type: 'urn:ietf:params:oauth:grant-type:device_code' }), }) if (!res.ok) { From 05b12e54294a21983ca1cc4ed5f66d4e246a144a Mon Sep 17 00:00:00 2001 From: KeKs0r Date: Sun, 29 Mar 2026 11:58:17 +0200 Subject: [PATCH 09/12] fix(plugin-obsessiondb): handle 400 responses in device token polling RFC 8628 device flow returns authorization_pending and slow_down as 400 responses. The !res.ok guard was throwing before the body could be parsed, preventing the existing switch/case from handling these expected states. Co-Authored-By: Claude Opus 4.6 (1M context) --- packages/plugin-obsessiondb/src/auth/api-client.ts | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/packages/plugin-obsessiondb/src/auth/api-client.ts b/packages/plugin-obsessiondb/src/auth/api-client.ts index 2449976..662d2b2 100644 --- a/packages/plugin-obsessiondb/src/auth/api-client.ts +++ b/packages/plugin-obsessiondb/src/auth/api-client.ts @@ -66,13 +66,12 @@ export async function pollDeviceToken( body: JSON.stringify({ client_id: CLIENT_ID, device_code: deviceCode, grant_type: 'urn:ietf:params:oauth:grant-type:device_code' }), }) - if (!res.ok) { - const text = await res.text() - throw new Error(`Token poll failed: ${res.status} ${text}`) - } - const body = (await res.json()) as TokenPollResponse + if (!body.access_token && !body.error) { + throw new Error(`Token poll failed: ${res.status} ${JSON.stringify(body)}`) + } + if (body.access_token) return body.access_token switch (body.error) { From 0da33e0f27bc7dbfdf1e168613d26bbda1814412 Mon Sep 17 00:00:00 2001 From: KeKs0r Date: Sun, 29 Mar 2026 16:41:21 +0200 Subject: [PATCH 10/12] Commit currnet Progress --- bun.lock | 12 +++--------- package.json | 2 +- packages/plugin-backfill/src/index.ts | 3 +++ packages/plugin-obsessiondb/package.json | 2 +- packages/plugin-obsessiondb/src/index.ts | 3 +++ 5 files changed, 11 insertions(+), 11 deletions(-) diff --git a/bun.lock b/bun.lock index d66ca9d..82f2106 100644 --- a/bun.lock +++ b/bun.lock @@ -18,7 +18,7 @@ "p-map": "^7.0.4", "turbo": "^2.8.20", "typescript": "^5.8.0", - "zod": "3.24.4", + "zod": "3.25.76", }, }, "apps/docs": { @@ -96,7 +96,7 @@ "@chkit/core": "workspace:*", "@orpc/client": "1.13.4", "@orpc/contract": "1.13.4", - "zod": "3.24.4", + "zod": "3.25.76", }, }, "packages/plugin-pull": { @@ -1274,7 +1274,7 @@ "youch-core": ["youch-core@0.3.3", "", { "dependencies": { "@poppinss/exception": "^1.2.2", "error-stack-parser-es": "^1.0.5" } }, "sha512-ho7XuGjLaJ2hWHoK8yFnsUGy2Y5uDpqSTq1FkHLK4/oqKtyUU1AFbOOxY4IpC9f0fTLjwYbslUz0Po5BpD1wrA=="], - "zod": ["zod@3.24.4", "", {}, "sha512-OdqJE9UDRPwWsrHjLN2F8bPxvwJBK22EHLWtanu0LSYr5YqzsaaW3RMgmjwr8Rypg5k+meEJdSPXJZXE/yqOMg=="], + "zod": ["zod@3.25.76", "", {}, "sha512-gzUt/qt81nXsFGKIFcC3YnfEAx5NkunCfnDlvuBSSFS02bcXu4Lmea0AFIUwbLWxWPx3d9p8S5QoaujKcNQxcQ=="], "zod-to-json-schema": ["zod-to-json-schema@3.25.1", "", { "peerDependencies": { "zod": "^3.25 || ^4" } }, "sha512-pM/SU9d3YAggzi6MtR4h7ruuQlqKtad8e9S0fmxcMi+ueAK5Korys/aWcV9LIIHTVbj01NdzxcnXSN+O74ZIVA=="], @@ -1282,12 +1282,8 @@ "zwitch": ["zwitch@2.0.4", "", {}, "sha512-bXE4cR/kVZhKZX/RjPEflHaKVhUVl85noU3v6b8apfQEc1x4A+zBxjZ4lN8LqGd6WZ3dl98pY4o717VFmoPp+A=="], - "@astrojs/sitemap/zod": ["zod@3.25.76", "", {}, "sha512-gzUt/qt81nXsFGKIFcC3YnfEAx5NkunCfnDlvuBSSFS02bcXu4Lmea0AFIUwbLWxWPx3d9p8S5QoaujKcNQxcQ=="], - "@astrojs/telemetry/ci-info": ["ci-info@4.4.0", "", {}, "sha512-77PSwercCZU2Fc4sX94eF8k8Pxte6JAwL4/ICZLFjJLqegs7kCuAsqqj/70NQF6TvDpgFjkubQB2FW2ZZddvQg=="], - "@chkit/docs/zod": ["zod@3.25.76", "", {}, "sha512-gzUt/qt81nXsFGKIFcC3YnfEAx5NkunCfnDlvuBSSFS02bcXu4Lmea0AFIUwbLWxWPx3d9p8S5QoaujKcNQxcQ=="], - "@chkit/plugin-backfill/zod": ["zod@4.3.6", "", {}, "sha512-rftlrkhHZOcjDwkGlnUtZZkvaPHCsDATp4pGpuOOMDaTdDDXF91wuVDJoWoPsKX/3YPQ5fHuF3STjcYyKr+Qhg=="], "@chkit/plugin-codegen/zod": ["zod@4.3.6", "", {}, "sha512-rftlrkhHZOcjDwkGlnUtZZkvaPHCsDATp4pGpuOOMDaTdDDXF91wuVDJoWoPsKX/3YPQ5fHuF3STjcYyKr+Qhg=="], @@ -1316,8 +1312,6 @@ "astro/package-manager-detector": ["package-manager-detector@1.6.0", "", {}, "sha512-61A5ThoTiDG/C8s8UMZwSorAGwMJ0ERVGj2OjoW5pAalsNOg15+iQiPzrLJ4jhZ1HJzmC2PIHT2oEiH3R5fzNA=="], - "astro/zod": ["zod@3.25.76", "", {}, "sha512-gzUt/qt81nXsFGKIFcC3YnfEAx5NkunCfnDlvuBSSFS02bcXu4Lmea0AFIUwbLWxWPx3d9p8S5QoaujKcNQxcQ=="], - "boxen/type-fest": ["type-fest@4.41.0", "", {}, "sha512-TeTSQ6H5YHvpqVwBRcnLDCBnDOHWYu7IvGbHT6N8AOymcr9PJGjc1GTtiWZTYg0NCgYwvnYWEkVChQAr9bjfwA=="], "csso/css-tree": ["css-tree@2.2.1", "", { "dependencies": { "mdn-data": "2.0.28", "source-map-js": "^1.0.1" } }, "sha512-OA0mILzGc1kCOCSJerOeqDxDQ4HOh+G8NbOJFOTgOCzpw7fCBubk0fEyxp8AgOL/jvLgYA/uV0cMbe43ElF1JA=="], diff --git a/package.json b/package.json index f9f9716..c09af5c 100644 --- a/package.json +++ b/package.json @@ -44,7 +44,7 @@ "p-map": "^7.0.4", "turbo": "^2.8.20", "typescript": "^5.8.0", - "zod": "3.24.4" + "zod": "3.25.76" }, "dependencies": { "wrangler": "^4.65.0" diff --git a/packages/plugin-backfill/src/index.ts b/packages/plugin-backfill/src/index.ts index a781305..3420da7 100644 --- a/packages/plugin-backfill/src/index.ts +++ b/packages/plugin-backfill/src/index.ts @@ -2,6 +2,7 @@ import './table-config.js' export { backfill, createBackfillPlugin } from './plugin.js' export { executeBackfill, syncProgress } from './async-backfill.js' +export { analyzeAndChunk } from './chunking/analyze.js' export type { BackfillOptions, BackfillChunkState, @@ -11,3 +12,5 @@ export type { export type { BackfillPlugin, BackfillPluginOptions, BackfillPluginRegistration } from './types.js' export type { PluginConfig } from './options.js' export type { BackfillTableConfig } from './table-config.js' +export type { AnalyzeAndChunkInput, AnalyzeAndChunkResult } from './chunking/analyze.js' +export type { PlannedChunk, PartitionInfo, SortKeyInfo } from './chunking/types.js' diff --git a/packages/plugin-obsessiondb/package.json b/packages/plugin-obsessiondb/package.json index adf2527..77a28c6 100644 --- a/packages/plugin-obsessiondb/package.json +++ b/packages/plugin-obsessiondb/package.json @@ -44,6 +44,6 @@ "@chkit/core": "workspace:*", "@orpc/client": "1.13.4", "@orpc/contract": "1.13.4", - "zod": "3.24.4" + "zod": "3.25.76" } } diff --git a/packages/plugin-obsessiondb/src/index.ts b/packages/plugin-obsessiondb/src/index.ts index c25122c..77efd28 100644 --- a/packages/plugin-obsessiondb/src/index.ts +++ b/packages/plugin-obsessiondb/src/index.ts @@ -10,6 +10,9 @@ import { createRemoteExecutor } from './query/remote-executor.js' import { SELECT_SERVICE_COMMAND } from './service/commands.js' import { loadSelectedService } from './service/storage.js' +export { loadCredentials, resolveBaseUrl, type Credentials } from './auth/index.js' +export { createJobsClient, type JobsClient } from './backfill/index.js' + export type ObsessionDBPluginOptions = Record interface PluginCommand { From 31e99272dbdfd2ea454ee27524fcbc510b98fa28 Mon Sep 17 00:00:00 2001 From: KeKs0r Date: Sun, 29 Mar 2026 19:13:54 +0200 Subject: [PATCH 11/12] refactor(plugin-obsessiondb): migrate remote executor and services to oRPC Replace REST API calls with oRPC client for all remote operations. The remote executor now routes SQL through workbench.query.execute instead of the removed /api/v1/ endpoints. Extract shared introspection helpers (buildIntrospectedTables, normalize functions) from @chkit/clickhouse so both local and remote executors reuse the same logic. Co-Authored-By: Claude Opus 4.6 (1M context) --- bun.lock | 16 +- package.json | 2 +- packages/clickhouse/src/index.ts | 112 +++++++------- .../plugin-obsessiondb/src/backfill/client.ts | 35 +---- .../plugin-obsessiondb/src/backfill/index.ts | 4 +- packages/plugin-obsessiondb/src/client.ts | 33 ++++ .../plugin-obsessiondb/src/contract/index.ts | 3 + .../contract.ts => contract/jobs.ts} | 12 +- .../src/contract/services.ts | 56 +++++++ .../src/contract/workbench.ts | 36 +++++ .../src/query/api-client.ts | 88 ----------- .../src/query/remote-executor.ts | 142 +++++++++++++++--- .../plugin-obsessiondb/src/service/api.ts | 7 +- .../plugin-obsessiondb/src/service/select.ts | 4 +- .../plugin-obsessiondb/src/service/types.ts | 9 +- 15 files changed, 337 insertions(+), 222 deletions(-) create mode 100644 packages/plugin-obsessiondb/src/client.ts create mode 100644 packages/plugin-obsessiondb/src/contract/index.ts rename packages/plugin-obsessiondb/src/{backfill/contract.ts => contract/jobs.ts} (85%) create mode 100644 packages/plugin-obsessiondb/src/contract/services.ts create mode 100644 packages/plugin-obsessiondb/src/contract/workbench.ts delete mode 100644 packages/plugin-obsessiondb/src/query/api-client.ts diff --git a/bun.lock b/bun.lock index 82f2106..1191f8c 100644 --- a/bun.lock +++ b/bun.lock @@ -16,7 +16,7 @@ "@orpc/contract": "1.13.4", "@types/node": "^24.0.0", "p-map": "^7.0.4", - "turbo": "^2.8.20", + "turbo": "^2.8.21", "typescript": "^5.8.0", "zod": "3.25.76", }, @@ -466,17 +466,17 @@ "@standard-schema/spec": ["@standard-schema/spec@1.1.0", "", {}, "sha512-l2aFy5jALhniG5HgqrD6jXLi/rUWrKvqN/qJx6yoJsgKhblVd+iqqU4RCXavm/jPityDo5TCvKMnpjKnOriy0w=="], - "@turbo/darwin-64": ["@turbo/darwin-64@2.8.20", "", { "os": "darwin", "cpu": "x64" }, "sha512-FQ9EX1xMU5nbwjxXxM3yU88AQQ6Sqc6S44exPRroMcx9XZHqqppl5ymJF0Ig/z3nvQNwDmz1Gsnvxubo+nXWjQ=="], + "@turbo/darwin-64": ["@turbo/darwin-64@2.8.21", "", { "os": "darwin", "cpu": "x64" }, "sha512-kfGoM0Iw8ZNZpbds+4IzOe0hjvHldqJwUPRAjXJi3KBxg/QOZL95N893SRoMtf2aJ+jJ3dk32yPkp8rvcIjP9g=="], - "@turbo/darwin-arm64": ["@turbo/darwin-arm64@2.8.20", "", { "os": "darwin", "cpu": "arm64" }, "sha512-Gpyh9ATFGThD6/s9L95YWY54cizg/VRWl2B67h0yofG8BpHf67DFAh9nuJVKG7bY0+SBJDAo5cMur+wOl9YOYw=="], + "@turbo/darwin-arm64": ["@turbo/darwin-arm64@2.8.21", "", { "os": "darwin", "cpu": "arm64" }, "sha512-o9HEflxUEyr987x0cTUzZBhDOyL6u95JmdmlkH2VyxAw7zq2sdtM5e72y9ufv2N5SIoOBw1fVn9UES5VY5H6vQ=="], - "@turbo/linux-64": ["@turbo/linux-64@2.8.20", "", { "os": "linux", "cpu": "x64" }, "sha512-p2QxWUYyYUgUFG0b0kR+pPi8t7c9uaVlRtjTTI1AbCvVqkpjUfCcReBn6DgG/Hu8xrWdKLuyQFaLYFzQskZbcA=="], + "@turbo/linux-64": ["@turbo/linux-64@2.8.21", "", { "os": "linux", "cpu": "x64" }, "sha512-uTxlCcXWy5h1fSSymP8XSJ+AudzEHMDV3IDfKX7+DGB8kgJ+SLoTUAH7z4OFA7I/l2sznz0upPdbNNZs91YMag=="], - "@turbo/linux-arm64": ["@turbo/linux-arm64@2.8.20", "", { "os": "linux", "cpu": "arm64" }, "sha512-Gn5yjlZGLRZWarLWqdQzv0wMqyBNIdq1QLi48F1oY5Lo9kiohuf7BPQWtWxeNVS2NgJ1+nb/DzK1JduYC4AWOA=="], + "@turbo/linux-arm64": ["@turbo/linux-arm64@2.8.21", "", { "os": "linux", "cpu": "arm64" }, "sha512-cdHIcxNcihHHkCHp0Y4Zb60K4Qz+CK4xw1gb6s/t/9o4SMeMj+hTBCtoW6QpPnl9xPYmxuTou8Zw6+cylTnREg=="], - "@turbo/windows-64": ["@turbo/windows-64@2.8.20", "", { "os": "win32", "cpu": "x64" }, "sha512-vyaDpYk/8T6Qz5V/X+ihKvKFEZFUoC0oxYpC1sZanK6gaESJlmV3cMRT3Qhcg4D2VxvtC2Jjs9IRkrZGL+exLw=="], + "@turbo/windows-64": ["@turbo/windows-64@2.8.21", "", { "os": "win32", "cpu": "x64" }, "sha512-/iBj4OzbqEY8CX+eaeKbBTMZv2CLXNrt0692F7HnK7LcyYwyDecaAiSET6ZzL4opT7sbwkKvzAC/fhqT3Quu1A=="], - "@turbo/windows-arm64": ["@turbo/windows-arm64@2.8.20", "", { "os": "win32", "cpu": "arm64" }, "sha512-voicVULvUV5yaGXo0Iue13BcHGYW3u0VgqSbfQwBaHbpj1zLjYV4KIe+7fYIo6DO8FVUJzxFps3ODCQG/Wy2Qw=="], + "@turbo/windows-arm64": ["@turbo/windows-arm64@2.8.21", "", { "os": "win32", "cpu": "arm64" }, "sha512-95tMA/ZbIidJFUUtkmqioQ1gf3n3I1YbRP3ZgVdWTVn2qVbkodcIdGXBKRHHrIbRsLRl99SiHi/L7IxhpZDagQ=="], "@types/debug": ["@types/debug@4.1.12", "", { "dependencies": { "@types/ms": "*" } }, "sha512-vIChWdVG3LG1SMxEvI/AK+FWJthlrqlTu7fbrlywTkkaONwk/UAGaULXRlf8vkzFBLVm0zkMdCquhL5aOjhXPQ=="], @@ -1186,7 +1186,7 @@ "tslib": ["tslib@2.8.1", "", {}, "sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w=="], - "turbo": ["turbo@2.8.20", "", { "optionalDependencies": { "@turbo/darwin-64": "2.8.20", "@turbo/darwin-arm64": "2.8.20", "@turbo/linux-64": "2.8.20", "@turbo/linux-arm64": "2.8.20", "@turbo/windows-64": "2.8.20", "@turbo/windows-arm64": "2.8.20" }, "bin": { "turbo": "bin/turbo" } }, "sha512-Rb4qk5YT8RUwwdXtkLpkVhNEe/lor6+WV7S5tTlLpxSz6MjV5Qi8jGNn4gS6NAvrYGA/rNrE6YUQM85sCZUDbQ=="], + "turbo": ["turbo@2.8.21", "", { "optionalDependencies": { "@turbo/darwin-64": "2.8.21", "@turbo/darwin-arm64": "2.8.21", "@turbo/linux-64": "2.8.21", "@turbo/linux-arm64": "2.8.21", "@turbo/windows-64": "2.8.21", "@turbo/windows-arm64": "2.8.21" }, "bin": { "turbo": "bin/turbo" } }, "sha512-FlJ8OD5Qcp0jTAM7E4a/RhUzRNds2GzKlyxHKA6N247VLy628rrxAGlMpIXSz6VB430+TiQDJ/SMl6PL1lu6wQ=="], "type-fest": ["type-fest@5.5.0", "", { "dependencies": { "tagged-tag": "^1.0.0" } }, "sha512-PlBfpQwiUvGViBNX84Yxwjsdhd1TUlXr6zjX7eoirtCPIr08NAmxwa+fcYBTeRQxHo9YC9wwF3m9i700sHma8g=="], diff --git a/package.json b/package.json index c09af5c..d51b17f 100644 --- a/package.json +++ b/package.json @@ -42,7 +42,7 @@ "@orpc/contract": "1.13.4", "@types/node": "^24.0.0", "p-map": "^7.0.4", - "turbo": "^2.8.20", + "turbo": "^2.8.21", "typescript": "^5.8.0", "zod": "3.25.76" }, diff --git a/packages/clickhouse/src/index.ts b/packages/clickhouse/src/index.ts index 2626b24..6081976 100644 --- a/packages/clickhouse/src/index.ts +++ b/packages/clickhouse/src/index.ts @@ -58,14 +58,14 @@ export interface SchemaObjectRef { name: string } -interface SystemTableRow { +export interface SystemTableRow { database: string name: string engine: string create_table_query?: string } -interface SystemColumnRow { +export interface SystemColumnRow { database: string table: string name: string @@ -76,7 +76,7 @@ interface SystemColumnRow { position: number } -interface SystemSkippingIndexRow { +export interface SystemSkippingIndexRow { database: string table: string name: string @@ -119,7 +119,7 @@ export function inferSchemaKindFromEngine(engine: string): SchemaObjectRef['kind } -function normalizeColumnFromSystemRow(row: SystemColumnRow): ColumnDefinition { +export function normalizeColumnFromSystemRow(row: SystemColumnRow): ColumnDefinition { const nullableMatch = row.type.match(/^Nullable\((.+)\)$/) const type = nullableMatch?.[1] ? nullableMatch[1] : row.type const nullable = Boolean(nullableMatch?.[1]) @@ -155,7 +155,7 @@ function parseIndexType(value: string): Pick inferSchemaKindFromEngine(row.engine) === 'table') + if (tableRows.length === 0) return [] + + const columnsByTable = new Map() + for (const row of columns) { + const key = `${row.database}.${row.table}` + const rows = columnsByTable.get(key) + if (rows) rows.push(row) + else columnsByTable.set(key, [row]) + } + + const indexesByTable = new Map() + for (const row of indexes) { + const key = `${row.database}.${row.table}` + const rows = indexesByTable.get(key) + if (rows) rows.push(row) + else indexesByTable.set(key, [row]) + } + + return tableRows + .map((row) => { + const key = `${row.database}.${row.name}` + const columnRows = (columnsByTable.get(key) ?? []).sort((a, b) => a.position - b.position) + const indexRows = indexesByTable.get(key) ?? [] + return { + database: row.database, + name: row.name, + engine: parseEngineFromCreateTableQuery(row.create_table_query), + primaryKey: parsePrimaryKeyFromCreateTableQuery(row.create_table_query), + orderBy: parseOrderByFromCreateTableQuery(row.create_table_query), + uniqueKey: parseUniqueKeyFromCreateTableQuery(row.create_table_query), + partitionBy: parsePartitionByFromCreateTableQuery(row.create_table_query), + columns: columnRows.map(normalizeColumnFromSystemRow), + settings: parseSettingsFromCreateTableQuery(row.create_table_query), + indexes: indexRows.map(normalizeIndexFromSystemRow), + projections: parseProjectionsFromCreateTableQuery(row.create_table_query), + ttl: parseTTLFromCreateTableQuery(row.create_table_query), + } + }) + .sort((a, b) => { + const dbOrder = a.database.localeCompare(b.database) + if (dbOrder !== 0) return dbOrder + return a.name.localeCompare(b.name) + }) +} + const NETWORK_ERROR_LABELS: Record = { ECONNREFUSED: 'connection refused', ENOTFOUND: 'host not found', @@ -378,9 +429,6 @@ FROM system.tables WHERE is_temporary = 0 AND database IN (${quotedDatabases})` ) - const tableRows = tables.filter((row) => inferSchemaKindFromEngine(row.engine) === 'table') - if (tableRows.length === 0) return [] - const columns = await this.query( `SELECT database, table, name, type, default_kind, default_expression, comment, position FROM system.columns @@ -392,53 +440,7 @@ FROM system.data_skipping_indices WHERE database IN (${quotedDatabases})` ) - const columnsByTable = new Map() - for (const row of columns) { - const key = `${row.database}.${row.table}` - const rows = columnsByTable.get(key) - if (rows) { - rows.push(row) - } else { - columnsByTable.set(key, [row]) - } - } - - const indexesByTable = new Map() - for (const row of indexes) { - const key = `${row.database}.${row.table}` - const rows = indexesByTable.get(key) - if (rows) { - rows.push(row) - } else { - indexesByTable.set(key, [row]) - } - } - - return tableRows - .map((row) => { - const key = `${row.database}.${row.name}` - const columnRows = (columnsByTable.get(key) ?? []).sort((a, b) => a.position - b.position) - const indexRows = indexesByTable.get(key) ?? [] - return { - database: row.database, - name: row.name, - engine: parseEngineFromCreateTableQuery(row.create_table_query), - primaryKey: parsePrimaryKeyFromCreateTableQuery(row.create_table_query), - orderBy: parseOrderByFromCreateTableQuery(row.create_table_query), - uniqueKey: parseUniqueKeyFromCreateTableQuery(row.create_table_query), - partitionBy: parsePartitionByFromCreateTableQuery(row.create_table_query), - columns: columnRows.map(normalizeColumnFromSystemRow), - settings: parseSettingsFromCreateTableQuery(row.create_table_query), - indexes: indexRows.map(normalizeIndexFromSystemRow), - projections: parseProjectionsFromCreateTableQuery(row.create_table_query), - ttl: parseTTLFromCreateTableQuery(row.create_table_query), - } - }) - .sort((a, b) => { - const dbOrder = a.database.localeCompare(b.database) - if (dbOrder !== 0) return dbOrder - return a.name.localeCompare(b.name) - }) + return buildIntrospectedTables(tables, columns, indexes) }, } } diff --git a/packages/plugin-obsessiondb/src/backfill/client.ts b/packages/plugin-obsessiondb/src/backfill/client.ts index 1930741..96642aa 100644 --- a/packages/plugin-obsessiondb/src/backfill/client.ts +++ b/packages/plugin-obsessiondb/src/backfill/client.ts @@ -1,36 +1,9 @@ -import { createORPCClient } from '@orpc/client' -import { RPCLink } from '@orpc/client/fetch' -import type { ContractRouterClient } from '@orpc/contract' import type { Credentials } from '../auth/index.js' -import { jobsContract } from './contract.js' +import { createApiClient, type ApiClient } from '../client.js' +export { SessionExpiredError, isSessionExpiredError } from '../api-request.js' -export type JobsClient = ContractRouterClient - -export class SessionExpiredError extends Error { - constructor() { - super('Session expired. Run `chkit obsessiondb login` to re-authenticate.') - } -} - -export function isSessionExpiredError(error: unknown): boolean { - return error instanceof SessionExpiredError -} +export type JobsClient = ApiClient['jobs'] export function createJobsClient(creds: Credentials): JobsClient { - const link = new RPCLink({ - url: `${creds.base_url}/rpc/jobs`, - headers: () => ({ - Authorization: `Bearer ${creds.access_token}`, - 'User-Agent': 'chkit-cli', - }), - fetch: async (input, init) => { - const res = await globalThis.fetch(input, init) - if (res.status === 401) { - throw new SessionExpiredError() - } - return res - }, - }) - - return createORPCClient(link) + return createApiClient(creds).jobs } diff --git a/packages/plugin-obsessiondb/src/backfill/index.ts b/packages/plugin-obsessiondb/src/backfill/index.ts index 8d32ff9..4098174 100644 --- a/packages/plugin-obsessiondb/src/backfill/index.ts +++ b/packages/plugin-obsessiondb/src/backfill/index.ts @@ -3,11 +3,9 @@ export { createJobsClient, type JobsClient } from './client.js' export { jobsContract, jobStatusSchema, - taskStatusSchema, - jobTaskSchema, jobSummarySchema, jobDetailSchema, -} from './contract.js' +} from '../contract/jobs.js' export const BACKFILL_EXTEND_COMMANDS = [ { diff --git a/packages/plugin-obsessiondb/src/client.ts b/packages/plugin-obsessiondb/src/client.ts new file mode 100644 index 0000000..ca50c4c --- /dev/null +++ b/packages/plugin-obsessiondb/src/client.ts @@ -0,0 +1,33 @@ +import { createORPCClient } from '@orpc/client' +import { RPCLink } from '@orpc/client/fetch' +import type { ContractRouterClient } from '@orpc/contract' +import { SessionExpiredError } from './api-request.js' +import type { Credentials } from './auth/index.js' +import { servicesContract, jobsContract, workbenchContract } from './contract/index.js' + +const contract = { + services: servicesContract, + jobs: jobsContract, + workbench: workbenchContract, +} + +export type ApiClient = ContractRouterClient + +export function createApiClient(creds: Credentials): ApiClient { + const link = new RPCLink({ + url: `${creds.base_url}/rpc`, + headers: () => ({ + Authorization: `Bearer ${creds.access_token}`, + 'User-Agent': 'chkit-cli', + }), + fetch: async (input, init) => { + const res = await globalThis.fetch(input, init) + if (res.status === 401) { + throw new SessionExpiredError() + } + return res + }, + }) + + return createORPCClient(link) +} diff --git a/packages/plugin-obsessiondb/src/contract/index.ts b/packages/plugin-obsessiondb/src/contract/index.ts new file mode 100644 index 0000000..1e22b0a --- /dev/null +++ b/packages/plugin-obsessiondb/src/contract/index.ts @@ -0,0 +1,3 @@ +export { serviceSchema, serviceStatusSchema, servicesContract } from './services.js' +export { jobsContract, jobDetailSchema, jobSummarySchema, jobStatusSchema } from './jobs.js' +export { workbenchContract } from './workbench.js' diff --git a/packages/plugin-obsessiondb/src/backfill/contract.ts b/packages/plugin-obsessiondb/src/contract/jobs.ts similarity index 85% rename from packages/plugin-obsessiondb/src/backfill/contract.ts rename to packages/plugin-obsessiondb/src/contract/jobs.ts index 1d1423d..6b8364d 100644 --- a/packages/plugin-obsessiondb/src/backfill/contract.ts +++ b/packages/plugin-obsessiondb/src/contract/jobs.ts @@ -1,3 +1,7 @@ +/** + * Copied from @obsessiondb/feature-jobs-contract — will be replaced + * by a direct dependency once the contract package is published. + */ import { oc } from '@orpc/contract' import { z } from 'zod' @@ -60,11 +64,15 @@ export const jobsContract = { ) .output(z.object({ jobId: z.string() })), - get: oc.input(z.object({ jobId: z.string() })).output(jobDetailSchema), + get: oc + .input(z.object({ jobId: z.string() })) + .output(jobDetailSchema), list: oc .input(z.object({ serviceId: z.string() })) .output(z.object({ jobs: z.array(jobSummarySchema) })), - cancel: oc.input(z.object({ jobId: z.string() })).output(z.object({})), + cancel: oc + .input(z.object({ jobId: z.string() })) + .output(z.object({})), } diff --git a/packages/plugin-obsessiondb/src/contract/services.ts b/packages/plugin-obsessiondb/src/contract/services.ts new file mode 100644 index 0000000..3782fb1 --- /dev/null +++ b/packages/plugin-obsessiondb/src/contract/services.ts @@ -0,0 +1,56 @@ +/** + * Copied from @obsessiondb/contract-console — will be replaced + * by a direct dependency once the contract package is published. + */ +import { oc } from '@orpc/contract' +import { z } from 'zod' + +export const serviceStatusSchema = z.enum([ + 'provisioning', + 'running', + 'scaling', + 'stopping', + 'stopped', + 'starting', + 'terminating', + 'terminated', + 'error', +]) + +export const serviceSchema = z.object({ + id: z.string(), + name: z.string(), + status: serviceStatusSchema, + tier: z.number().int(), + nodes: z.number().int(), + connectionUrl: z.string().nullable(), + connectionUsername: z.string().nullable(), + desiredStatus: z.enum(['running', 'stopped', 'terminated']), + desiredTier: z.number().int(), + desiredNodes: z.number().int(), + createdAt: z.string().datetime(), + managed: z.boolean(), +}) + +export const servicesContract = { + list: oc + .input(z.object({})) + .output(z.object({ services: z.array(serviceSchema) })), + + listAll: oc.input(z.object({})).output( + z.object({ + organizations: z.array( + z.object({ + id: z.string(), + name: z.string(), + slug: z.string(), + services: z.array(serviceSchema), + }), + ), + }), + ), + + get: oc + .input(z.object({ serviceId: z.string() })) + .output(serviceSchema), +} diff --git a/packages/plugin-obsessiondb/src/contract/workbench.ts b/packages/plugin-obsessiondb/src/contract/workbench.ts new file mode 100644 index 0000000..c6d7843 --- /dev/null +++ b/packages/plugin-obsessiondb/src/contract/workbench.ts @@ -0,0 +1,36 @@ +/** + * Copied from @obsessiondb/feature-workbench-contract — will be replaced + * by a direct dependency once the contract package is published. + */ +import { oc } from '@orpc/contract' +import { z } from 'zod' + +const queryResultSchema = z.object({ + data: z.array(z.record(z.unknown())), + meta: z.array(z.object({ name: z.string(), type: z.string() })), + rows: z.number().int(), + statistics: z + .object({ + bytes_read: z.number().int().optional(), + rows_read: z.number().int().optional(), + elapsed: z.number().optional(), + }) + .optional(), + message: z.string().optional(), + error: z.string().optional(), +}) + +export const workbenchContract = { + query: { + execute: oc + .input( + z.object({ + serviceId: z.string(), + query: z.string().min(1), + settings: z.record(z.union([z.string(), z.number()])).optional(), + database: z.string().optional(), + }), + ) + .output(queryResultSchema), + }, +} diff --git a/packages/plugin-obsessiondb/src/query/api-client.ts b/packages/plugin-obsessiondb/src/query/api-client.ts deleted file mode 100644 index 24cecfe..0000000 --- a/packages/plugin-obsessiondb/src/query/api-client.ts +++ /dev/null @@ -1,88 +0,0 @@ -import type { Credentials } from '../auth/index.js' -import { apiRequest } from '../api-request.js' - -function queryPath(serviceId: string, action: string): string { - return `/api/v1/services/${serviceId}/query/${action}` -} - -export async function remoteCommand( - serviceId: string, - sql: string, - creds: Credentials -): Promise { - await apiRequest<{ ok: boolean }>(queryPath(serviceId, 'command'), creds, { sql }) -} - -export async function remoteQuery( - serviceId: string, - sql: string, - creds: Credentials -): Promise { - const res = await apiRequest<{ rows: T[] }>(queryPath(serviceId, 'query'), creds, { sql }) - return res.rows -} - -export async function remoteInsert>( - serviceId: string, - params: { table: string; values: T[] }, - creds: Credentials -): Promise { - await apiRequest<{ ok: boolean }>(queryPath(serviceId, 'insert'), creds, params) -} - -export async function remoteSubmit( - serviceId: string, - sql: string, - creds: Credentials, - queryId?: string -): Promise { - const res = await apiRequest<{ query_id: string }>(queryPath(serviceId, 'submit'), creds, { - sql, - query_id: queryId, - }) - return res.query_id -} - -export async function remoteQueryStatus( - serviceId: string, - queryId: string, - creds: Credentials, - options?: { afterTime?: string } -): Promise<{ - status: 'running' | 'finished' | 'failed' | 'unknown' - readRows?: number - readBytes?: number - writtenRows?: number - writtenBytes?: number - elapsedMs?: number - durationMs?: number - error?: string -}> { - return apiRequest(queryPath(serviceId, 'status'), creds, { - query_id: queryId, - ...options, - }) -} - -export async function remoteListSchemaObjects( - serviceId: string, - creds: Credentials -): Promise> { - const res = await apiRequest<{ - objects: Array<{ kind: 'table' | 'view' | 'materialized_view'; database: string; name: string }> - }>(queryPath(serviceId, 'schema-objects'), creds) - return res.objects -} - -export async function remoteListTableDetails( - serviceId: string, - databases: string[], - creds: Credentials -): Promise { - const res = await apiRequest<{ tables: unknown[] }>( - queryPath(serviceId, 'table-details'), - creds, - { databases } - ) - return res.tables -} diff --git a/packages/plugin-obsessiondb/src/query/remote-executor.ts b/packages/plugin-obsessiondb/src/query/remote-executor.ts index 678c18e..ef240ce 100644 --- a/packages/plugin-obsessiondb/src/query/remote-executor.ts +++ b/packages/plugin-obsessiondb/src/query/remote-executor.ts @@ -1,47 +1,141 @@ -import type { ClickHouseExecutor } from '@chkit/clickhouse' -import type { Credentials } from '../auth/index.js' +import type { ClickHouseExecutor, QueryStatus, SchemaObjectRef } from '@chkit/clickhouse' import { - remoteCommand, - remoteInsert, - remoteListSchemaObjects, - remoteListTableDetails, - remoteQuery, - remoteQueryStatus, - remoteSubmit, -} from './api-client.js' + buildIntrospectedTables, + inferSchemaKindFromEngine, + type SystemColumnRow, + type SystemSkippingIndexRow, + type SystemTableRow, +} from '@chkit/clickhouse' +import type { Credentials } from '../auth/index.js' +import { createApiClient } from '../client.js' export function createRemoteExecutor(deps: { credentials: Credentials serviceId: string }): ClickHouseExecutor { const { credentials, serviceId } = deps + const client = createApiClient(credentials) - return { + const executor: ClickHouseExecutor = { async command(sql) { - await remoteCommand(serviceId, sql, credentials) + await client.workbench.query.execute({ serviceId, query: sql }) }, - async query(sql: string) { - return remoteQuery(serviceId, sql, credentials) + + async query(sql: string): Promise { + const res = await client.workbench.query.execute({ serviceId, query: sql }) + return res.data as T[] }, + async insert>(params: { table: string; values: T[] }) { - await remoteInsert(serviceId, params, credentials) + if (params.values.length === 0) return + const columns = Object.keys(params.values[0]!) + const rows = params.values + .map( + (row) => + `(${columns.map((col) => { + const val = row[col] + if (val === null || val === undefined) return 'NULL' + if (typeof val === 'number') return String(val) + return `'${String(val).replace(/'/g, "\\'")}'` + }).join(', ')})`, + ) + .join(', ') + await executor.command(`INSERT INTO ${params.table} (${columns.join(', ')}) VALUES ${rows}`) }, + async submit(sql, queryId?) { - return remoteSubmit(serviceId, sql, credentials, queryId) + await client.workbench.query.execute({ + serviceId, + query: sql, + settings: queryId ? { query_id: queryId } : undefined, + }) + return queryId ?? 'submitted' }, + async queryStatus(queryId, options?) { - return remoteQueryStatus(serviceId, queryId, credentials, options) + const afterFilter = options?.afterTime + ? `AND event_time >= '${options.afterTime}'` + : '' + + const running = await executor.query<{ query_id: string }>( + `SELECT query_id FROM system.processes WHERE query_id = '${queryId}' LIMIT 1`, + ) + if (running.length > 0) return { status: 'running' as const } + + const log = await executor.query<{ + type: string + written_rows: string + written_bytes: string + query_duration_ms: string + exception: string + }>( + `SELECT type, written_rows, written_bytes, query_duration_ms, exception +FROM system.query_log +WHERE query_id = '${queryId}' + AND type IN ('QueryFinish', 'ExceptionWhileProcessing') + ${afterFilter} +ORDER BY event_time DESC +LIMIT 1`, + ) + + if (log.length === 0) return { status: 'unknown' as const } + const row = log[0]! + + if (row.type === 'QueryFinish') { + return { + status: 'finished' as const, + writtenRows: Number(row.written_rows), + writtenBytes: Number(row.written_bytes), + durationMs: Number(row.query_duration_ms), + } + } + + return { + status: 'failed' as const, + durationMs: Number(row.query_duration_ms), + error: row.exception, + } satisfies QueryStatus }, + async listSchemaObjects() { - return remoteListSchemaObjects(serviceId, credentials) + const rows = await executor.query<{ database: string; name: string; engine: string }>( + `SELECT database, name, engine +FROM system.tables +WHERE is_temporary = 0 + AND database NOT IN ('system', 'information_schema', 'INFORMATION_SCHEMA') + AND name NOT LIKE '_chkit_%'`, + ) + + const out: SchemaObjectRef[] = [] + for (const row of rows) { + const kind = inferSchemaKindFromEngine(row.engine) + if (!kind) continue + out.push({ kind, database: row.database, name: row.name }) + } + return out }, + async listTableDetails(databases) { - return remoteListTableDetails(serviceId, databases, credentials) as ReturnType< - ClickHouseExecutor['listTableDetails'] - > - }, - async close() { - // No-op — remote executor has no persistent connection + if (databases.length === 0) return [] + const quoted = databases.map((db) => `'${db.replace(/'/g, "''")}'`).join(', ') + + const [tables, columns, indexes] = await Promise.all([ + executor.query( + `SELECT database, name, engine, create_table_query FROM system.tables WHERE is_temporary = 0 AND database IN (${quoted})`, + ), + executor.query( + `SELECT database, \`table\`, name, type, default_kind, default_expression, comment, position FROM system.columns WHERE database IN (${quoted})`, + ), + executor.query( + `SELECT database, \`table\`, name, expr, type, granularity FROM system.data_skipping_indices WHERE database IN (${quoted})`, + ), + ]) + + return buildIntrospectedTables(tables, columns, indexes) }, + + async close() {}, } + + return executor } diff --git a/packages/plugin-obsessiondb/src/service/api.ts b/packages/plugin-obsessiondb/src/service/api.ts index 15e5803..33f3e77 100644 --- a/packages/plugin-obsessiondb/src/service/api.ts +++ b/packages/plugin-obsessiondb/src/service/api.ts @@ -1,8 +1,9 @@ import type { Credentials } from '../auth/index.js' -import { apiRequest } from '../api-request.js' +import { createApiClient } from '../client.js' import type { Service } from './types.js' export async function listServices(creds: Credentials): Promise { - const res = await apiRequest<{ services: Service[] }>('/api/v1/services', creds) - return res.services + const client = createApiClient(creds) + const res = await client.services.listAll({}) + return res.organizations.flatMap((org) => org.services) } diff --git a/packages/plugin-obsessiondb/src/service/select.ts b/packages/plugin-obsessiondb/src/service/select.ts index 1f1588d..5898a39 100644 --- a/packages/plugin-obsessiondb/src/service/select.ts +++ b/packages/plugin-obsessiondb/src/service/select.ts @@ -13,13 +13,13 @@ export async function selectServiceInteractive( if (services.length === 1) { const service = services[0]! - print(`Auto-selected service: ${service.name}${service.region ? ` (${service.region})` : ''}`) + print(`Auto-selected service: ${service.name} (${service.status})`) return service } print('\nAvailable services:') for (const [i, service] of services.entries()) { - print(` ${i + 1}. ${service.name}${service.region ? ` (${service.region})` : ''}`) + print(` ${i + 1}. ${service.name} (${service.status})`) } const rl = createInterface({ input: process.stdin, output: process.stdout }) diff --git a/packages/plugin-obsessiondb/src/service/types.ts b/packages/plugin-obsessiondb/src/service/types.ts index c7d30e0..69072be 100644 --- a/packages/plugin-obsessiondb/src/service/types.ts +++ b/packages/plugin-obsessiondb/src/service/types.ts @@ -1,8 +1,7 @@ -export interface Service { - id: string - name: string - region?: string -} +import type { z } from 'zod' +import type { serviceSchema } from '../contract/index.js' + +export type Service = z.infer export interface SelectedService { service_id: string From 512c48c66a998dc13a5e81a9a9115c386697dd74 Mon Sep 17 00:00:00 2001 From: KeKs0r Date: Sun, 29 Mar 2026 22:38:06 +0200 Subject: [PATCH 12/12] =?UTF-8?q?=F0=9F=90=9B=20Throw=20error=20on=20remot?= =?UTF-8?q?e=20query=20failure?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../src/query/remote-executor.ts | 17 ++++++++++++++--- 1 file changed, 14 insertions(+), 3 deletions(-) diff --git a/packages/plugin-obsessiondb/src/query/remote-executor.ts b/packages/plugin-obsessiondb/src/query/remote-executor.ts index ef240ce..4ec54e4 100644 --- a/packages/plugin-obsessiondb/src/query/remote-executor.ts +++ b/packages/plugin-obsessiondb/src/query/remote-executor.ts @@ -7,7 +7,15 @@ import { type SystemTableRow, } from '@chkit/clickhouse' import type { Credentials } from '../auth/index.js' -import { createApiClient } from '../client.js' +import { createApiClient, type ApiClient } from '../client.js' + +function throwIfError( + res: Awaited>, +): void { + if (res.error) { + throw new Error(res.error) + } +} export function createRemoteExecutor(deps: { credentials: Credentials @@ -18,11 +26,13 @@ export function createRemoteExecutor(deps: { const executor: ClickHouseExecutor = { async command(sql) { - await client.workbench.query.execute({ serviceId, query: sql }) + const res = await client.workbench.query.execute({ serviceId, query: sql }) + throwIfError(res) }, async query(sql: string): Promise { const res = await client.workbench.query.execute({ serviceId, query: sql }) + throwIfError(res) return res.data as T[] }, @@ -44,11 +54,12 @@ export function createRemoteExecutor(deps: { }, async submit(sql, queryId?) { - await client.workbench.query.execute({ + const res = await client.workbench.query.execute({ serviceId, query: sql, settings: queryId ? { query_id: queryId } : undefined, }) + throwIfError(res) return queryId ?? 'submitted' },