diff --git a/.github/workflows/ci-superdoc.yml b/.github/workflows/ci-superdoc.yml index 1fd91d203a..c5e5f5fb73 100644 --- a/.github/workflows/ci-superdoc.yml +++ b/.github/workflows/ci-superdoc.yml @@ -33,6 +33,10 @@ jobs: node-version-file: .nvmrc cache: pnpm + - uses: oven-sh/setup-bun@v2 + with: + bun-version: 1.3.8 + - name: Install canvas system dependencies run: | sudo apt-get update diff --git a/.github/workflows/release-cli.yml b/.github/workflows/release-cli.yml deleted file mode 100644 index 49a21fc290..0000000000 --- a/.github/workflows/release-cli.yml +++ /dev/null @@ -1,61 +0,0 @@ -# Auto-releases on push to main (@next channel) -# For stable (@latest): cherry-pick commits to stable branch, then manually dispatch this workflow -name: 📦 Release cli - -on: - push: - branches: - - main - paths: - - 'apps/cli/**' - - '!**/*.md' - workflow_dispatch: - -permissions: - contents: write - packages: write - -concurrency: - group: release-cli-${{ github.ref }} - cancel-in-progress: true - -jobs: - release: - runs-on: ubuntu-24.04 - steps: - - name: Generate token - id: generate_token - uses: actions/create-github-app-token@v2 - with: - app-id: ${{ secrets.APP_ID }} - private-key: ${{ secrets.APP_PRIVATE_KEY }} - - - uses: actions/checkout@v6 - with: - fetch-depth: 0 - token: ${{ steps.generate_token.outputs.token }} - - - uses: pnpm/action-setup@v4 - - - uses: actions/setup-node@v6 - with: - node-version-file: .nvmrc - cache: pnpm - registry-url: 'https://registry.npmjs.org' - - - uses: oven-sh/setup-bun@v2 - - - name: Install dependencies - run: pnpm install - - - name: Build packages - run: pnpm run build - - - name: Release - env: - GITHUB_TOKEN: ${{ steps.generate_token.outputs.token }} - NPM_TOKEN: ${{ secrets.NPM_TOKEN }} - NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }} - LINEAR_TOKEN: ${{ secrets.LINEAR_TOKEN }} - working-directory: apps/cli - run: pnpx semantic-release diff --git a/.gitignore b/.gitignore index 8e3a6765f6..67f41ef92c 100644 --- a/.gitignore +++ b/.gitignore @@ -76,3 +76,5 @@ test-corpus/ .pnpm-store devtools/visual-testing/pnpm-lock.yaml +.bun-cache/ + diff --git a/apps/cli/.gitignore b/apps/cli/.gitignore index b22604b141..2c22e7f9d5 100644 --- a/apps/cli/.gitignore +++ b/apps/cli/.gitignore @@ -12,3 +12,12 @@ dist # Finder (MacOS) folder config .DS_Store + +# local debug fixture captures +src/__tests__/fixtures-cli-debug*/ + +# native build artifacts +artifacts/ + +# staged platform binaries +platforms/*/bin/ diff --git a/apps/cli/.releaserc.cjs b/apps/cli/.releaserc.cjs index cbfdc9ed8f..9b930c6572 100644 --- a/apps/cli/.releaserc.cjs +++ b/apps/cli/.releaserc.cjs @@ -11,7 +11,7 @@ const config = { 'semantic-release-commit-filter', '@semantic-release/commit-analyzer', '@semantic-release/release-notes-generator', - ['@semantic-release/npm', { npmPublish: true }], + ['@semantic-release/npm', { npmPublish: false }], ], }; @@ -31,14 +31,14 @@ if (!isPrerelease) { config.plugins.push(['semantic-release-linear-app', { teamKeys: ['SD'], addComment: true, - packageName: 'cli', + packageName: 'superdoc-cli', commentTemplate: 'shipped in {package} {releaseLink} {channel}' }]); config.plugins.push([ '@semantic-release/github', { - successComment: ':tada: This ${issue.pull_request ? "PR" : "issue"} is included in **cli** v${nextRelease.version}\n\nThe release is available on [GitHub release](https://github.com/superdoc-dev/superdoc/releases/tag/${nextRelease.gitTag})', + successComment: ':tada: This ${issue.pull_request ? "PR" : "issue"} is included in **superdoc-cli** v${nextRelease.version}\n\nThe release is available on [GitHub release](${releases.find(release => release.pluginName === "@semantic-release/github").url})', } ]); diff --git a/apps/cli/AGENTS.md b/apps/cli/AGENTS.md new file mode 100644 index 0000000000..97a24f5746 --- /dev/null +++ b/apps/cli/AGENTS.md @@ -0,0 +1,78 @@ +# CLI Guardrails + +This package is the LLM-first CLI surface for SuperDoc. It should stay thin, predictable, and machine-safe. + +## Engine Agnosticism (Non-Negotiable) + +**The canonical v1 command surface must be engine-agnostic.** Command metadata, parsing, dispatch, and non-legacy command paths in `apps/cli/` must not import from, reference, or depend on ProseMirror (or any other editor engine) in any way. This includes: + +- No `editor.state`, `editor.view`, `editor.commands`, or `editor.storage` access. +- No ProseMirror types (`Node`, `EditorState`, `Transaction`, `Schema`, `Plugin`, etc.). +- No direct document traversal (`doc.descendants`, `doc.content.size`, `doc.textContent`, `doc.nodeAt`, etc.). +- No PM position arithmetic (`pos`, `nodeSize`, `resolve()`, `from`/`to` ranges from the engine). +- No PM schema name knowledge (e.g., mapping `'paragraph'`/`'tableHeader'` type strings). +- No PM attribute shape knowledge (e.g., reading `attrs.paraId`, `attrs.sdBlockId`, `attrs.paragraphProperties`). + +The canonical interface to document behavior is `editor.doc.*` (the Document API), typically reached via the shared operation dispatcher. If a capability is missing from the Document API, the fix is to add it there (or in adapters), never to work around it in CLI code. + +**Flag any violation immediately** — engine-aware code in the CLI is a structural bug, not a shortcut. + +`src/commands/legacy-compat.ts` is a temporary v0.x compatibility bridge (`search`, `read`, `replace-legacy`) and the only accepted exception. Do not copy this pattern into any v1 command path. + +## Core Design + +- Command handlers are orchestration only. +- Business logic belongs in Document API (`editor.doc.*`) and adapters, not CLI command files. +- JSON envelope output is the stable contract; pretty mode is a human helper. +- CLI metadata lives in `apps/cli/src/cli/*` and is derived from `@superdoc/document-api`. +- Runtime command registry/help/parser spec is built from `apps/cli/src/cli/commands.ts`. +- Wrapper/call execution funnels through `apps/cli/src/lib/operation-executor.ts`. +- Doc-backed operations dispatch through `apps/cli/src/lib/generic-dispatch.ts` (`read-orchestrator` / `mutation-orchestrator`). +- Runtime self-description must stay aligned with that same metadata source (`describe`, `describe command`, `host.describe`, `host.describe.command`). +- Contract/version truth comes from `@superdoc/document-api` (`CONTRACT_VERSION`, operation metadata, schemas). + +## Command Implementation Rules + +For each new command/operation, follow this flow: + +1. Define/extend metadata in `src/cli/operation-set.ts`, `src/cli/operation-params.ts`, and derived command specs. +2. Parse wrapper inputs with shared argument helpers (`src/lib/args.ts`, `src/lib/operation-wrapper-input.ts`). +3. Build canonical payloads (`Query`, `NodeAddress`, `TextAddress`) via shared libs (`find-query`, `payload`, `create-paragraph-input`). +4. Validate operation input and output through `src/lib/operation-args.ts` (plus structural validators in `src/lib/validate.ts`). +5. Dispatch through `src/lib/operation-executor.ts` (doc-backed, introspection, or allowed manual lifecycle/session path). +6. Return a stable `CommandExecution`; keep envelope formatting centralized in `src/index.ts`. + +## Non-Negotiables + +- **No engine internals in v1 CLI paths** — see "Engine Agnosticism" above. This is the highest-priority guardrail. +- Do not reintroduce address translation/mapping layers in CLI. +- Do not duplicate validation logic inside command files. +- Do not add PM node traversal, position resolution, or attribute inspection — these belong in Document API adapters. +- Keep `find` query-first: + - `--query-json` / `--query-file` are canonical. + - Flat flags are convenience syntax normalized in `src/lib/find-query.ts`. +- Use shared validators/constants from Document API types; do not fork enum lists in CLI. +- Keep command output deterministic and structured for SDK/agent use. + +## Session + Stateless Behavior + +- Explicit ``/`--doc` means stateless execution. +- No `` means session-context execution (`--session` or active default session). +- Mutating commands: + - Stateless mode requires explicit output path. + - Stateful mode updates working doc and revision metadata. + +## Error and Output Rules + +- Emit stable error codes from `src/lib/errors.ts`. +- Preserve envelope shape from `src/lib/envelope.ts`. +- `--output json` is default; `--pretty` must not hide machine data. + +## Testing Rules + +- Add/extend tests in `src/__tests__/` for every new command path. +- Cover: + - stateless and session modes, + - JSON and pretty outputs, + - validation failures and error codes. +- Keep `src/__tests__/lib/cli-import-boundaries.test.ts` passing to prevent engine/internal import regressions. diff --git a/apps/cli/README.md b/apps/cli/README.md index 2e03a4273b..5bacc2606e 100644 --- a/apps/cli/README.md +++ b/apps/cli/README.md @@ -1,71 +1,262 @@ # @superdoc-dev/cli -The command-line interface for [SuperDoc](https://superdoc.dev) — DOCX editing in your terminal. +LLM-first CLI for deterministic DOCX operations through SuperDoc's Document API. + +## Install ```bash -npx @superdoc-dev/cli search "CONFIDENTIAL" ./legal/*.docx +npm install -g @superdoc-dev/cli ``` -## Commands +The package automatically installs a native binary for your platform via optionalDependencies. Supported platforms: -| Command | Status | Description | -|---------|--------|-------------| -| `search` | Available | Find text across documents | -| `replace` | Available | Find and replace text | -| `replace --track` | Coming soon | Replace with track changes | -| `read` | Available | Extract plain text | -| `diff` | Coming soon | Compare two documents | -| `convert` | Coming soon | DOCX ↔ HTML ↔ Markdown | -| `comments` | Coming soon | List, add, resolve comments | -| `accept` | Coming soon | Accept/reject track changes | +| Platform | Package | +|----------|---------| +| macOS (Apple Silicon) | `@superdoc-dev/cli-darwin-arm64` | +| macOS (Intel) | `@superdoc-dev/cli-darwin-x64` | +| Linux (x64) | `@superdoc-dev/cli-linux-x64` | +| Linux (ARM64) | `@superdoc-dev/cli-linux-arm64` | +| Windows (x64) | `@superdoc-dev/cli-windows-x64` | -Powered by the SuperDoc document engine. Bulk operations, glob patterns, JSON output. - -## Install +## Usage ```bash -npm install -g @superdoc-dev/cli +superdoc [options] ``` -Or run directly: +## Getting Started + +Stateful editing flow (recommended for multi-step edits): ```bash -npx @superdoc-dev/cli +superdoc open ./contract.docx +superdoc find --type text --pattern "termination" +superdoc replace --target-json '{"kind":"text","blockId":"p1","range":{"start":0,"end":11}}' --text "expiration" +superdoc save --in-place +superdoc close ``` -## Usage +Legacy compatibility commands (v0.x behavior): ```bash -# Search across documents superdoc search "indemnification" ./contracts/*.docx +superdoc replace-legacy "ACME Corp" "Globex Inc" ./merger/*.docx +superdoc read ./proposal.docx +``` -# Find and replace -superdoc replace "ACME Corp" "Globex Inc" ./merger/*.docx +## Command Index -# Extract text -superdoc read ./proposal.docx +| Category | Commands | +|----------|----------| +| query | `find`, `get-node`, `get-node-by-id`, `info` | +| mutation | `insert`, `replace`, `delete` | +| format | `format bold` | +| create | `create paragraph` | +| lists | `lists list`, `lists get`, `lists insert`, `lists set-type`, `lists indent`, `lists outdent`, `lists restart`, `lists exit` | +| comments | `comments add`, `comments edit`, `comments reply`, `comments move`, `comments resolve`, `comments remove`, `comments set-internal`, `comments set-active`, `comments go-to`, `comments get`, `comments list` | +| trackChanges | `track-changes list`, `track-changes get`, `track-changes accept`, `track-changes reject`, `track-changes accept-all`, `track-changes reject-all` | +| lifecycle | `open`, `save`, `close` | +| session | `session list`, `session save`, `session close`, `session set-default`, `session use` | +| introspection | `status`, `describe`, `describe command` | +| low-level | `call ` | +| legacy compat | `search`, `replace-legacy `, `read` | + +For full command help and examples, run: + +```bash +superdoc --help +``` + +## v1 Breaking Changes + +This CLI replaces the previous `@superdoc-dev/cli` package surface with the v1 contract-driven command set. + +| Legacy command | v1 status | Migration | +|---------------|-----------|-----------| +| `superdoc replace ` | Renamed to `replace-legacy` | Use `replace-legacy`, or use `find` + `replace --target-json` for the v1 workflow. | + +Legacy compatibility is retained for `search`, `read`, and `replace-legacy`. + +## Normative Policy + +- Canonical contract/version metadata comes from `@superdoc/document-api` (`CONTRACT_VERSION`, operation metadata, and schemas). +- This README is usage guidance for CLI consumers. +- If guidance here conflicts with `superdoc describe`/`describe command` output or document-api contract exports, those are authoritative. + +## Host mode (stdio JSON-RPC) + +```bash +superdoc host --stdio +``` + +- Starts a persistent JSON-RPC 2.0 host over newline-delimited stdio frames. +- Intended for SDK/runtime integrations that need long-lived command execution in a single process. +- Supported methods: + - `host.ping` + - `host.capabilities` + - `host.describe` + - `host.describe.command` (requires `params.operationId`) + - `host.shutdown` + - `cli.invoke` (executes canonical CLI command semantics) + +## API introspection commands + +```bash +superdoc describe +superdoc describe command doc.find +superdoc status +``` + +- `describe` returns contract + protocol metadata and the operation catalog. +- `describe command ` returns one operation definition (inputs, response schema, errors, examples). +- `status` shows current session status and document metadata. + +## Stateful session commands + +```bash +superdoc open ./contract.docx +superdoc status +superdoc find --type text --pattern "termination" +superdoc replace --target-json '{...}' --text "Updated clause" +superdoc save --in-place +superdoc close +``` + +- `open` creates a new session id automatically unless `--session ` is provided. +- If `` is omitted, commands run against the active default session. +- Explicit `` (or `--doc`) always runs in stateless mode and does not use session state. + +## Session management + +```bash +superdoc session list +superdoc session save [--in-place] [--out ] [--force] +superdoc session set-default +superdoc session use +superdoc session close [--discard] +``` + +## Read / locate commands + +```bash +superdoc info [] +superdoc find [] --type text --pattern "termination" +superdoc find [] --type run +superdoc get-node [] --address-json '{"kind":"block","nodeType":"paragraph","nodeId":"p1"}' +superdoc get-node-by-id [] --id p1 --node-type paragraph +``` + +- Flat `find` flags are convenience syntax and are normalized into the canonical query object used by `editor.doc.find`. +- Use `--query-json` / `--query-file` for complex or programmatic queries. +- For text queries, use `result.context[*].textRanges[*]` as targets for `replace`, `comments add`, and formatting commands. + +## Mutating commands + +```bash +superdoc comments add [] --target-json '{...}' --text "Please revise" [--out ./with-comment.docx] +superdoc replace [] --target-json '{...}' --text "Updated text" [--out ./updated.docx] +superdoc format bold [] --target-json '{...}' [--out ./bolded.docx] +``` + +- In stateless mode (`` provided), mutating commands require `--out`. +- In stateful mode (after `open`), mutating commands update the active working document and `--out` is optional. +- Use `--expected-revision ` with stateful mutating commands for optimistic concurrency checks. + +## Low-level invocation -# JSON output for scripting -superdoc search "Article 7" ./**/*.docx --json +```bash +superdoc call --input-json '{...}' ``` -## Options +- Invokes any document-api operation directly with a JSON payload. + +## Save command modes -| Flag | Description | -|------|-------------| -| `--json` | Machine-readable output | -| `--help` | Show help | +```bash +superdoc save --in-place +superdoc save --out ./final.docx +``` -## AI Integration +- `save` persists the active session but keeps it open for more edits. +- If no source path exists (for example stdin-opened docs), `save` requires `--out `. +- `save --in-place` checks for source-file drift and refuses overwrite unless `--force` is passed. -Works with AI coding assistants. Copy the skill file so Claude Code, Cursor, etc. know to use `superdoc` for DOCX operations instead of python-docx. +## Close command modes ```bash -# Copy skill to Claude Code -cp -r skills/superdoc ~/.claude/skills/ +superdoc close +superdoc close --discard ``` -See [`skills/superdoc/SKILL.md`](../../skills/superdoc/SKILL.md) for the skill definition. +- Dirty contexts require explicit `--discard` (or run `save` first, then `close`). + +## Output modes + +- Default: `--output json` (machine-oriented envelope) +- Human mode: `--output pretty` (or `--pretty`) + +```bash +superdoc info ./contract.docx --output json +superdoc info ./contract.docx --pretty +``` + +## Global flags + +- `--output ` +- `--json` +- `--pretty` +- `--session ` +- `--timeout-ms ` +- `--help` + +## Input payload flags + +- `--query-json`, `--query-file` +- `--address-json`, `--address-file` +- `--target-json`, `--target-file` + +## Stdin support + +Use `-` as `` to read DOCX bytes from stdin: + +```bash +cat ./contract.docx | superdoc open - +cat ./contract.docx | superdoc info - +``` + +## JSON envelope contract + +Normative operation/version metadata comes from `@superdoc/document-api`; use `superdoc describe` for the runtime contract surface. + +Success: + +```json +{ + "ok": true, + "command": "find", + "data": {}, + "meta": { + "version": "1.0.0", + "elapsedMs": 42 + } +} +``` + +Error: + +```json +{ + "ok": false, + "error": { + "code": "VALIDATION_ERROR", + "message": "..." + }, + "meta": { + "version": "1.0.0", + "elapsedMs": 8 + } +} +``` ## Part of SuperDoc diff --git a/apps/cli/bunfig.toml b/apps/cli/bunfig.toml new file mode 100644 index 0000000000..37ff1f9482 --- /dev/null +++ b/apps/cli/bunfig.toml @@ -0,0 +1,2 @@ +[test] +preload = ["./src/__tests__/setup.ts"] diff --git a/apps/cli/package.json b/apps/cli/package.json index b5f58a9386..0e98cb4582 100644 --- a/apps/cli/package.json +++ b/apps/cli/package.json @@ -1,33 +1,56 @@ { "name": "@superdoc-dev/cli", - "version": "0.1.0", + "version": "1.0.0", "type": "module", "bin": { "superdoc": "./dist/index.js" }, "files": [ - "dist" + "dist", + "skill" ], "scripts": { "dev": "bun run src/index.ts", "build": "bun build src/index.ts --outdir dist --target node --format esm", - "test": "bun test", + "build:native": "bun build src/index.ts --compile --outfile dist/superdoc", + "build:native:all": "node scripts/build-native-cli.js --all", + "build:native:host": "node scripts/build-native-cli.js", + "build:stage": "node scripts/stage-artifacts.js", + "build:sync-version": "node scripts/sync-version.js", + "build:prepublish": "node scripts/build-and-stage.js", + "publish:platforms": "node scripts/publish.js --tag latest", + "publish:platforms:dry": "node scripts/publish.js --tag latest --dry-run", + "test": "NODE_ENV=test bun test", "lint": "eslint .", "lint:fix": "eslint --fix .", "format": "prettier --write .", - "typecheck": "tsc --noEmit", + "typecheck": "tsc --noEmit -p tsconfig.check.json", "prepublishOnly": "pnpm run build", "release": "pnpx semantic-release", "release:dry-run": "pnpx semantic-release --dry-run" }, + "dependencies": { + "@hocuspocus/provider": "catalog:", + "@superdoc/document-api": "workspace:*", + "fast-glob": "catalog:", + "y-websocket": "catalog:", + "yjs": "catalog:" + }, "devDependencies": { + "@superdoc/super-editor": "workspace:*", "@types/bun": "catalog:", "@types/node": "catalog:", - "fast-glob": "catalog:", "superdoc": "workspace:*", "typescript": "catalog:" }, "module": "src/index.ts", + "optionalDependencies": { + "@superdoc-dev/cli-darwin-arm64": "workspace:*", + "@superdoc-dev/cli-darwin-x64": "workspace:*", + "@superdoc-dev/cli-linux-x64": "workspace:*", + "@superdoc-dev/cli-linux-arm64": "workspace:*", + "@superdoc-dev/cli-windows-x64": "workspace:*" + }, "publishConfig": { "access": "public" } diff --git a/apps/cli/platforms/cli-darwin-arm64/package.json b/apps/cli/platforms/cli-darwin-arm64/package.json new file mode 100644 index 0000000000..1aa9c3879f --- /dev/null +++ b/apps/cli/platforms/cli-darwin-arm64/package.json @@ -0,0 +1,19 @@ +{ + "name": "@superdoc-dev/cli-darwin-arm64", + "version": "1.0.0", + "os": [ + "darwin" + ], + "cpu": [ + "arm64" + ], + "bin": { + "superdoc": "bin/superdoc" + }, + "files": [ + "bin" + ], + "publishConfig": { + "access": "public" + } +} diff --git a/apps/cli/platforms/cli-darwin-x64/package.json b/apps/cli/platforms/cli-darwin-x64/package.json new file mode 100644 index 0000000000..d965dbfabc --- /dev/null +++ b/apps/cli/platforms/cli-darwin-x64/package.json @@ -0,0 +1,19 @@ +{ + "name": "@superdoc-dev/cli-darwin-x64", + "version": "1.0.0", + "os": [ + "darwin" + ], + "cpu": [ + "x64" + ], + "bin": { + "superdoc": "bin/superdoc" + }, + "files": [ + "bin" + ], + "publishConfig": { + "access": "public" + } +} diff --git a/apps/cli/platforms/cli-linux-arm64/package.json b/apps/cli/platforms/cli-linux-arm64/package.json new file mode 100644 index 0000000000..f403c6eebd --- /dev/null +++ b/apps/cli/platforms/cli-linux-arm64/package.json @@ -0,0 +1,19 @@ +{ + "name": "@superdoc-dev/cli-linux-arm64", + "version": "1.0.0", + "os": [ + "linux" + ], + "cpu": [ + "arm64" + ], + "bin": { + "superdoc": "bin/superdoc" + }, + "files": [ + "bin" + ], + "publishConfig": { + "access": "public" + } +} diff --git a/apps/cli/platforms/cli-linux-x64/package.json b/apps/cli/platforms/cli-linux-x64/package.json new file mode 100644 index 0000000000..32adba8b65 --- /dev/null +++ b/apps/cli/platforms/cli-linux-x64/package.json @@ -0,0 +1,19 @@ +{ + "name": "@superdoc-dev/cli-linux-x64", + "version": "1.0.0", + "os": [ + "linux" + ], + "cpu": [ + "x64" + ], + "bin": { + "superdoc": "bin/superdoc" + }, + "files": [ + "bin" + ], + "publishConfig": { + "access": "public" + } +} diff --git a/apps/cli/platforms/cli-windows-x64/package.json b/apps/cli/platforms/cli-windows-x64/package.json new file mode 100644 index 0000000000..fdc9528987 --- /dev/null +++ b/apps/cli/platforms/cli-windows-x64/package.json @@ -0,0 +1,19 @@ +{ + "name": "@superdoc-dev/cli-windows-x64", + "version": "1.0.0", + "os": [ + "win32" + ], + "cpu": [ + "x64" + ], + "bin": { + "superdoc": "bin/superdoc.exe" + }, + "files": [ + "bin" + ], + "publishConfig": { + "access": "public" + } +} diff --git a/apps/cli/scripts/__tests__/build-and-stage.test.ts b/apps/cli/scripts/__tests__/build-and-stage.test.ts new file mode 100644 index 0000000000..97e038b675 --- /dev/null +++ b/apps/cli/scripts/__tests__/build-and-stage.test.ts @@ -0,0 +1,85 @@ +import { describe, expect, test } from 'bun:test'; +import { main, resolveBuildAndStageOptions } from '../build-and-stage.js'; + +type ScriptCall = { + scriptPath: string; + args: string[]; + label: string | undefined; +}; + +describe('resolveBuildAndStageOptions', () => { + test('defaults to all-platform native build', () => { + expect(resolveBuildAndStageOptions([])).toEqual({ + allPlatforms: true, + nativeBuildArgs: ['--all'], + skipCliBuild: false, + }); + }); + + test('supports single-platform mode', () => { + expect(resolveBuildAndStageOptions(['--single-platform'])).toEqual({ + allPlatforms: false, + nativeBuildArgs: [], + skipCliBuild: false, + }); + }); + + test('rejects conflicting target selection flags', () => { + expect(() => resolveBuildAndStageOptions(['--all', '--single-platform'])).toThrow( + 'Use either --all or --single-platform, not both.', + ); + }); + + test('rejects unknown flags', () => { + expect(() => resolveBuildAndStageOptions(['--bogus'])).toThrow('Unknown flag(s): --bogus'); + }); +}); + +describe('main', () => { + test('runs sync, native build, and stage by default', () => { + const calls: ScriptCall[] = []; + + main([], (scriptPath, args = [], label) => { + calls.push({ scriptPath, args, label }); + }); + + expect(calls).toEqual([ + { + scriptPath: './apps/cli/scripts/sync-version.js', + args: [], + label: 'Sync CLI versions', + }, + { + scriptPath: './apps/cli/scripts/build-native-cli.js', + args: ['--all'], + label: 'Build native CLI artifacts (all)', + }, + { + scriptPath: './apps/cli/scripts/stage-artifacts.js', + args: [], + label: 'Stage CLI artifacts', + }, + ]); + }); + + test('skips native build step when requested', () => { + const calls: ScriptCall[] = []; + + main(['--skip-cli-build'], (scriptPath, args = [], label) => { + calls.push({ scriptPath, args, label }); + }); + + expect(calls).toEqual([ + { + scriptPath: './apps/cli/scripts/sync-version.js', + args: [], + label: 'Sync CLI versions', + }, + { + scriptPath: './apps/cli/scripts/stage-artifacts.js', + args: [], + label: 'Stage CLI artifacts', + }, + ]); + }); +}); diff --git a/apps/cli/scripts/__tests__/build-native-cli.test.ts b/apps/cli/scripts/__tests__/build-native-cli.test.ts new file mode 100644 index 0000000000..b509ed25a8 --- /dev/null +++ b/apps/cli/scripts/__tests__/build-native-cli.test.ts @@ -0,0 +1,38 @@ +import { describe, expect, test } from 'bun:test'; +import { TARGETS, resolveHostTargetId, resolveRequestedTargets } from '../build-native-cli.js'; + +describe('resolveHostTargetId', () => { + test('maps known platform/arch pairs', () => { + expect(resolveHostTargetId('darwin', 'arm64')).toBe('darwin-arm64'); + expect(resolveHostTargetId('darwin', 'x64')).toBe('darwin-x64'); + expect(resolveHostTargetId('linux', 'x64')).toBe('linux-x64'); + expect(resolveHostTargetId('linux', 'arm64')).toBe('linux-arm64'); + expect(resolveHostTargetId('win32', 'x64')).toBe('windows-x64'); + }); + + test('throws for unsupported host combinations', () => { + expect(() => resolveHostTargetId('linux', 'ppc64')).toThrow('Unsupported host platform'); + }); +}); + +describe('resolveRequestedTargets', () => { + test('returns all configured targets for --all', () => { + expect(resolveRequestedTargets(['--all'])).toEqual(Object.keys(TARGETS)); + }); + + test('throws when --all and --targets are both provided', () => { + expect(() => resolveRequestedTargets(['--all', '--targets', 'linux-x64'])).toThrow( + 'Use either --all or --targets, not both.', + ); + }); + + test('throws when --targets has no value', () => { + expect(() => resolveRequestedTargets(['--targets'])).toThrow('Flag --targets requires a value.'); + }); + + test('throws on unsupported targets', () => { + expect(() => resolveRequestedTargets(['--targets', 'linux-x64,bogus-target'])).toThrow( + 'Unknown target "bogus-target"', + ); + }); +}); diff --git a/apps/cli/scripts/__tests__/publish.test.ts b/apps/cli/scripts/__tests__/publish.test.ts new file mode 100644 index 0000000000..ce124db339 --- /dev/null +++ b/apps/cli/scripts/__tests__/publish.test.ts @@ -0,0 +1,65 @@ +import { describe, expect, test } from 'bun:test'; +import { isAlreadyPublished, resolvePublishOptions } from '../publish.js'; + +describe('resolvePublishOptions', () => { + test('allows dry-run without npm auth token', () => { + const options = resolvePublishOptions(['--dry-run'], {}); + expect(options.dryRun).toBe(true); + expect(options.tag).toBe('latest'); + expect(options.authToken).toBe(''); + }); + + test('requires auth token when not dry-run', () => { + expect(() => resolvePublishOptions([], {})).toThrow( + 'Missing npm auth token. Set NPM_TOKEN or NODE_AUTH_TOKEN in your environment.', + ); + }); + + test('throws when --tag is provided without value', () => { + expect(() => resolvePublishOptions(['--tag'], { NODE_AUTH_TOKEN: 'token' })).toThrow( + 'Flag --tag requires a value.', + ); + }); + + test('parses equals-form --tag=value', () => { + const options = resolvePublishOptions(['--tag=beta', '--dry-run'], {}); + expect(options.tag).toBe('beta'); + }); +}); + +describe('isAlreadyPublished', () => { + test('returns true when npm view succeeds', () => { + const fakeSpawn = () => ({ + status: 0, + stdout: '1.0.0-alpha.1', + stderr: '', + error: undefined, + }); + + expect(isAlreadyPublished('@superdoc-dev/cli', '1.0.0-alpha.1', 'token', {}, fakeSpawn)).toBe(true); + }); + + test('returns false for not-found responses', () => { + const fakeSpawn = () => ({ + status: 1, + stdout: '', + stderr: 'npm ERR! code E404', + error: undefined, + }); + + expect(isAlreadyPublished('@superdoc-dev/cli', '1.0.0-alpha.1', 'token', {}, fakeSpawn)).toBe(false); + }); + + test('throws for unknown npm view failures', () => { + const fakeSpawn = () => ({ + status: 1, + stdout: '', + stderr: 'npm ERR! code ECONNRESET', + error: undefined, + }); + + expect(() => isAlreadyPublished('@superdoc-dev/cli', '1.0.0-alpha.1', 'token', {}, fakeSpawn)).toThrow( + 'Failed to check published version', + ); + }); +}); diff --git a/apps/cli/scripts/__tests__/stage-artifacts.test.ts b/apps/cli/scripts/__tests__/stage-artifacts.test.ts new file mode 100644 index 0000000000..1403bcdc4b --- /dev/null +++ b/apps/cli/scripts/__tests__/stage-artifacts.test.ts @@ -0,0 +1,32 @@ +import { describe, expect, test } from 'bun:test'; +import { resolveArtifactSourcePath, validateBinaryName } from '../stage-artifacts.js'; + +describe('resolveArtifactSourcePath', () => { + test('resolves valid artifact-relative paths', () => { + const resolvedPath = resolveArtifactSourcePath('linux-x64/superdoc'); + expect(resolvedPath.includes('apps/cli/artifacts')).toBe(true); + expect(resolvedPath.includes('linux-x64')).toBe(true); + expect(resolvedPath.endsWith('superdoc')).toBe(true); + }); + + test('rejects absolute paths', () => { + expect(() => resolveArtifactSourcePath('/tmp/superdoc')).toThrow('Artifact path must be relative'); + }); + + test('rejects traversal outside artifacts root', () => { + expect(() => resolveArtifactSourcePath('../package.json')).toThrow('Artifact path escapes artifacts root'); + }); +}); + +describe('validateBinaryName', () => { + test('accepts simple binary file names', () => { + expect(validateBinaryName('superdoc', 'linux-x64')).toBe('superdoc'); + expect(validateBinaryName('superdoc.exe', 'windows-x64')).toBe('superdoc.exe'); + }); + + test('rejects path traversal or path segments', () => { + expect(() => validateBinaryName('../superdoc', 'linux-x64')).toThrow('Invalid binaryName'); + expect(() => validateBinaryName('nested/superdoc', 'linux-x64')).toThrow('Invalid binaryName'); + expect(() => validateBinaryName('nested\\superdoc.exe', 'windows-x64')).toThrow('Invalid binaryName'); + }); +}); diff --git a/apps/cli/scripts/__tests__/sync-version.test.ts b/apps/cli/scripts/__tests__/sync-version.test.ts new file mode 100644 index 0000000000..259c41dc76 --- /dev/null +++ b/apps/cli/scripts/__tests__/sync-version.test.ts @@ -0,0 +1,22 @@ +import { describe, expect, test } from 'bun:test'; +import { syncOptionalDependencyVersions } from '../sync-version.js'; + +describe('syncOptionalDependencyVersions', () => { + test('updates all platform package specs while preserving other dependencies', () => { + const optionalDependencies = { + '@superdoc-dev/cli-darwin-arm64': '0.0.1', + '@superdoc-dev/unrelated': '9.9.9', + }; + + const next = syncOptionalDependencyVersions(optionalDependencies, [ + '@superdoc-dev/cli-darwin-arm64', + '@superdoc-dev/cli-linux-x64', + ]); + + expect(next).toEqual({ + '@superdoc-dev/cli-darwin-arm64': 'workspace:*', + '@superdoc-dev/cli-linux-x64': 'workspace:*', + '@superdoc-dev/unrelated': '9.9.9', + }); + }); +}); diff --git a/apps/cli/scripts/__tests__/utils.test.ts b/apps/cli/scripts/__tests__/utils.test.ts new file mode 100644 index 0000000000..4a9fe69a2a --- /dev/null +++ b/apps/cli/scripts/__tests__/utils.test.ts @@ -0,0 +1,34 @@ +import { describe, expect, test } from 'bun:test'; +import { ensureNoUnknownFlags, getOptionalFlagValue } from '../utils.js'; + +describe('ensureNoUnknownFlags', () => { + test('accepts long flags and equals-form flags in allowlist', () => { + const argv = ['--tag=alpha', '--dry-run']; + expect(() => ensureNoUnknownFlags(argv, new Set(['--tag', '--dry-run']))).not.toThrow(); + }); + + test('throws when unknown flags are provided', () => { + const argv = ['--tag=alpha', '--unknown']; + expect(() => ensureNoUnknownFlags(argv, new Set(['--tag']))).toThrow('Unknown flag(s): --unknown'); + }); +}); + +describe('getOptionalFlagValue', () => { + test('reads space-separated flag values', () => { + expect(getOptionalFlagValue(['--tag', 'beta'], '--tag')).toBe('beta'); + }); + + test('reads equals-form flag values', () => { + expect(getOptionalFlagValue(['--tag=beta'], '--tag')).toBe('beta'); + }); + + test('returns null when flag is absent', () => { + expect(getOptionalFlagValue(['--dry-run'], '--tag')).toBeNull(); + }); + + test('throws when flag is missing a value', () => { + expect(() => getOptionalFlagValue(['--tag'], '--tag')).toThrow('Flag --tag requires a value.'); + expect(() => getOptionalFlagValue(['--tag', '--dry-run'], '--tag')).toThrow('Flag --tag requires a value.'); + expect(() => getOptionalFlagValue(['--tag='], '--tag')).toThrow('Flag --tag requires a value.'); + }); +}); diff --git a/apps/cli/scripts/build-and-stage.js b/apps/cli/scripts/build-and-stage.js new file mode 100644 index 0000000000..4c2907b868 --- /dev/null +++ b/apps/cli/scripts/build-and-stage.js @@ -0,0 +1,59 @@ +import { ensureNoUnknownFlags, isDirectExecution, runNodeScript } from './utils.js'; + +const allowedFlags = new Set(['--all', '--single-platform', '--skip-cli-build']); + +/** + * Parses options for the prepublish build pipeline. + * + * @param {string[]} argv - CLI args. + * @returns {{ allPlatforms: boolean; nativeBuildArgs: string[]; skipCliBuild: boolean }} + * @throws {Error} If unsupported flag combinations are provided. + */ +export function resolveBuildAndStageOptions(argv) { + ensureNoUnknownFlags(argv, allowedFlags); + const forceAll = argv.includes('--all'); + const singlePlatform = argv.includes('--single-platform'); + const skipCliBuild = argv.includes('--skip-cli-build'); + + if (forceAll && singlePlatform) { + throw new Error('Use either --all or --single-platform, not both.'); + } + + const allPlatforms = forceAll || !singlePlatform; + const nativeBuildArgs = allPlatforms ? ['--all'] : []; + return { allPlatforms, nativeBuildArgs, skipCliBuild }; +} + +/** + * Runs the full native CLI prepublish pipeline. + * + * @param {string[]} [argv=process.argv.slice(2)] - CLI args. + * @param {(scriptPath: string, args?: string[], label?: string) => void} [runScript=runNodeScript] - Script runner. + * @returns {void} + */ +export function main(argv = process.argv.slice(2), runScript = runNodeScript) { + const { allPlatforms, nativeBuildArgs, skipCliBuild } = resolveBuildAndStageOptions(argv); + + runScript('./apps/cli/scripts/sync-version.js', [], 'Sync CLI versions'); + + if (!skipCliBuild) { + runScript( + './apps/cli/scripts/build-native-cli.js', + nativeBuildArgs, + allPlatforms ? 'Build native CLI artifacts (all)' : 'Build native CLI artifacts', + ); + } + + runScript('./apps/cli/scripts/stage-artifacts.js', [], 'Stage CLI artifacts'); + + console.log('\n[cli] Build pipeline complete.'); +} + +if (isDirectExecution(import.meta.url)) { + try { + main(); + } catch (error) { + console.error(error instanceof Error ? error.message : error); + process.exitCode = 1; + } +} diff --git a/apps/cli/scripts/build-native-cli.js b/apps/cli/scripts/build-native-cli.js new file mode 100644 index 0000000000..2175a40bd0 --- /dev/null +++ b/apps/cli/scripts/build-native-cli.js @@ -0,0 +1,175 @@ +import { mkdir, writeFile } from 'node:fs/promises'; +import { createHash } from 'node:crypto'; +import { existsSync, mkdirSync, readFileSync } from 'node:fs'; +import path from 'node:path'; +import { spawnSync } from 'node:child_process'; +import { cliRoot, ensureNoUnknownFlags, getOptionalFlagValue, isDirectExecution, repoRoot } from './utils.js'; + +const cliEntry = path.join(cliRoot, 'src/index.ts'); +const cliPackagePath = path.join(cliRoot, 'package.json'); +const artifactsRoot = path.join(cliRoot, 'artifacts'); +const manifestPath = path.join(artifactsRoot, 'manifest.json'); +const allowedFlags = new Set(['--all', '--targets']); + +/** + * Supported Bun native build targets and output metadata. + * + * @type {Record} + */ +export const TARGETS = { + 'darwin-arm64': { bunTarget: 'bun-darwin-arm64', binaryName: 'superdoc' }, + 'darwin-x64': { bunTarget: 'bun-darwin-x64', binaryName: 'superdoc' }, + 'linux-x64': { bunTarget: 'bun-linux-x64', binaryName: 'superdoc' }, + 'linux-arm64': { bunTarget: 'bun-linux-arm64', binaryName: 'superdoc' }, + 'windows-x64': { bunTarget: 'bun-windows-x64', binaryName: 'superdoc.exe' }, +}; + +/** + * Resolves a runtime platform/arch pair to a supported target id. + * + * @param {NodeJS.Platform} [platform=process.platform] - Host platform. + * @param {string} [arch=process.arch] - Host architecture. + * @returns {string} Target id for the host. + * @throws {Error} If the host combination is unsupported. + */ +export function resolveHostTargetId(platform = process.platform, arch = process.arch) { + if (platform === 'darwin' && arch === 'arm64') return 'darwin-arm64'; + if (platform === 'darwin' && arch === 'x64') return 'darwin-x64'; + if (platform === 'linux' && arch === 'x64') return 'linux-x64'; + if (platform === 'linux' && arch === 'arm64') return 'linux-arm64'; + if (platform === 'win32' && arch === 'x64') return 'windows-x64'; + + throw new Error(`Unsupported host platform for default target selection: ${platform}/${arch}`); +} + +/** + * Resolves requested build targets from CLI args. + * + * @param {string[]} argv - CLI args. + * @returns {string[]} Target ids to build. + * @throws {Error} If provided targets are invalid. + */ +export function resolveRequestedTargets(argv) { + const all = argv.includes('--all'); + const targetArg = getOptionalFlagValue(argv, '--targets'); + + if (all && targetArg) { + throw new Error('Use either --all or --targets, not both.'); + } + + if (all) { + return Object.keys(TARGETS); + } + + if (targetArg) { + const requested = targetArg + .split(',') + .map((item) => item.trim()) + .filter(Boolean); + + if (!requested.length) { + throw new Error('--targets was provided but no targets were parsed.'); + } + + for (const target of requested) { + if (!TARGETS[target]) { + throw new Error(`Unknown target "${target}". Supported: ${Object.keys(TARGETS).join(', ')}`); + } + } + + return requested; + } + + return [resolveHostTargetId()]; +} + +function computeSha256(filePath) { + const hash = createHash('sha256'); + hash.update(readFileSync(filePath)); + return hash.digest('hex'); +} + +function runBunBuild(targetId, hostTarget) { + const config = TARGETS[targetId]; + const targetDir = path.join(artifactsRoot, targetId); + const outPath = path.join(targetDir, config.binaryName); + + mkdirSync(targetDir, { recursive: true }); + + const args = ['build', cliEntry, '--compile', '--outfile', outPath]; + + // Cross-target builds require --target, host target can use native compile. + if (targetId !== hostTarget) { + args.push('--target', config.bunTarget); + } + + const result = spawnSync('bun', args, { + cwd: repoRoot, + stdio: 'inherit', + env: { + ...process.env, + BUN_INSTALL_CACHE_DIR: process.env.BUN_INSTALL_CACHE_DIR ?? path.join(repoRoot, '.bun-cache'), + }, + }); + + if (result.status !== 0) { + throw new Error(`bun build failed for target ${targetId}`); + } + + if (!existsSync(outPath)) { + throw new Error(`Expected build output is missing for ${targetId}: ${outPath}`); + } + + return { + targetId, + binaryName: config.binaryName, + outputPath: outPath, + }; +} + +async function writeManifest(entries) { + const cliPackageRaw = readFileSync(cliPackagePath, 'utf8'); + const cliPackage = JSON.parse(cliPackageRaw); + + const manifest = { + createdAt: new Date().toISOString(), + cliVersion: cliPackage.version, + targets: entries.map((entry) => ({ + target: entry.targetId, + binaryName: entry.binaryName, + relativePath: path.relative(artifactsRoot, entry.outputPath), + sha256: computeSha256(entry.outputPath), + })), + }; + + await mkdir(path.dirname(manifestPath), { recursive: true }); + await writeFile(manifestPath, `${JSON.stringify(manifest, null, 2)}\n`, 'utf8'); + console.log(`Wrote ${path.relative(repoRoot, manifestPath)}`); +} + +/** + * Builds native artifacts and writes the build manifest. + * + * @param {string[]} [argv=process.argv.slice(2)] - CLI args. + * @returns {Promise} + */ +export async function main(argv = process.argv.slice(2)) { + ensureNoUnknownFlags(argv, allowedFlags); + const targets = resolveRequestedTargets(argv); + const hostTarget = resolveHostTargetId(); + + const entries = []; + for (const targetId of targets) { + console.log(`Building native CLI for ${targetId}...`); + entries.push(runBunBuild(targetId, hostTarget)); + } + + await writeManifest(entries); +} + +if (isDirectExecution(import.meta.url)) { + main().catch((error) => { + console.error(error instanceof Error ? error.message : error); + process.exitCode = 1; + }); +} diff --git a/apps/cli/scripts/publish.js b/apps/cli/scripts/publish.js new file mode 100644 index 0000000000..02ab3eeafa --- /dev/null +++ b/apps/cli/scripts/publish.js @@ -0,0 +1,156 @@ +import { spawnSync } from 'node:child_process'; +import { mkdirSync, readFileSync } from 'node:fs'; +import path from 'node:path'; +import { cliRoot, ensureNoUnknownFlags, getOptionalFlagValue, isDirectExecution, repoRoot } from './utils.js'; + +const npmCacheDir = path.join(repoRoot, '.cache', 'npm'); +const allowedFlags = new Set(['--tag', '--dry-run']); + +const PLATFORM_PACKAGES = [ + '@superdoc-dev/cli-darwin-arm64', + '@superdoc-dev/cli-darwin-x64', + '@superdoc-dev/cli-linux-x64', + '@superdoc-dev/cli-linux-arm64', + '@superdoc-dev/cli-windows-x64', +]; +const MAIN_PACKAGE = '@superdoc-dev/cli'; + +const PACKAGE_DIR_BY_NAME = { + '@superdoc-dev/cli-darwin-arm64': path.join(cliRoot, 'platforms/cli-darwin-arm64'), + '@superdoc-dev/cli-darwin-x64': path.join(cliRoot, 'platforms/cli-darwin-x64'), + '@superdoc-dev/cli-linux-x64': path.join(cliRoot, 'platforms/cli-linux-x64'), + '@superdoc-dev/cli-linux-arm64': path.join(cliRoot, 'platforms/cli-linux-arm64'), + '@superdoc-dev/cli-windows-x64': path.join(cliRoot, 'platforms/cli-windows-x64'), + '@superdoc-dev/cli': cliRoot, +}; + +function getPackageVersion(packageName) { + const pkgDir = PACKAGE_DIR_BY_NAME[packageName]; + if (!pkgDir) { + throw new Error(`No package directory mapping found for ${packageName}`); + } + + const pkg = JSON.parse(readFileSync(path.join(pkgDir, 'package.json'), 'utf8')); + if (!pkg.version) { + throw new Error(`Failed to read version for ${packageName}`); + } + return pkg.version; +} + +function createNpmEnv(baseEnv, authToken) { + return { + ...baseEnv, + npm_config_cache: npmCacheDir, + ...(authToken ? { NODE_AUTH_TOKEN: authToken } : {}), + }; +} + +/** + * Checks whether a package version is already published to npm. + * + * @param {string} packageName - Package name. + * @param {string} version - Version to check. + * @param {string} authToken - npm token. + * @param {NodeJS.ProcessEnv} [baseEnv=process.env] - Base environment for the command. + * @param {typeof spawnSync} [spawn=spawnSync] - Command runner (injectable for tests). + * @returns {boolean} `true` if published, otherwise `false`. + * @throws {Error} If the check fails for reasons other than a not-found response. + */ +export function isAlreadyPublished(packageName, version, authToken, baseEnv = process.env, spawn = spawnSync) { + const result = spawn('npm', ['view', `${packageName}@${version}`, 'version'], { + cwd: repoRoot, + encoding: 'utf8', + env: createNpmEnv(baseEnv, authToken), + }); + + if (result.error) { + throw result.error; + } + + if (result.status === 0) { + return true; + } + + const stderr = (result.stderr ?? '').toString(); + if (stderr.includes('E404') || stderr.includes('Not found') || stderr.includes('not found')) { + return false; + } + + const stdout = (result.stdout ?? '').toString(); + const details = (stderr || stdout).trim() || `exit status ${result.status ?? 'unknown'}`; + throw new Error(`Failed to check published version for ${packageName}@${version}: ${details}`); +} + +function runPnpmPublish(packageName, tag, dryRun, authToken, baseEnv = process.env) { + const pkgDir = PACKAGE_DIR_BY_NAME[packageName]; + if (!pkgDir) { + throw new Error(`No package directory mapping found for ${packageName}`); + } + + const version = getPackageVersion(packageName); + if (!dryRun && isAlreadyPublished(packageName, version, authToken, baseEnv)) { + console.log(`Skipping ${packageName}@${version} (already published).`); + return; + } + + const args = ['publish', '--access', 'public', '--tag', tag, '--no-git-checks']; + if (dryRun) args.push('--dry-run'); + + console.log(`Publishing ${packageName} (${tag})${dryRun ? ' [dry-run]' : ''}...`); + const result = spawnSync('pnpm', args, { + cwd: pkgDir, + stdio: 'inherit', + env: createNpmEnv(baseEnv, authToken), + }); + + if (result.status !== 0) { + throw new Error(`Publish failed for ${packageName}`); + } +} + +/** + * Parses and validates publish-script CLI options. + * + * @param {string[]} argv - CLI args. + * @param {NodeJS.ProcessEnv} [env=process.env] - Environment used to resolve auth tokens. + * @returns {{ tag: string; dryRun: boolean; authToken: string }} + * @throws {Error} If options are invalid or auth is missing for non-dry runs. + */ +export function resolvePublishOptions(argv, env = process.env) { + ensureNoUnknownFlags(argv, allowedFlags); + const tag = getOptionalFlagValue(argv, '--tag') ?? 'latest'; + const dryRun = argv.includes('--dry-run'); + const authToken = env.NODE_AUTH_TOKEN ?? env.NPM_TOKEN ?? ''; + + if (!dryRun && !authToken) { + throw new Error('Missing npm auth token. Set NPM_TOKEN or NODE_AUTH_TOKEN in your environment.'); + } + + return { tag, dryRun, authToken }; +} + +/** + * Publishes platform packages and the root CLI package. + * + * @param {string[]} [argv=process.argv.slice(2)] - CLI args. + * @param {NodeJS.ProcessEnv} [env=process.env] - Environment for publish commands. + * @returns {void} + */ +export function main(argv = process.argv.slice(2), env = process.env) { + const { tag, dryRun, authToken } = resolvePublishOptions(argv, env); + mkdirSync(npmCacheDir, { recursive: true }); + + for (const packageName of PLATFORM_PACKAGES) { + runPnpmPublish(packageName, tag, dryRun, authToken, env); + } + runPnpmPublish(MAIN_PACKAGE, tag, dryRun, authToken, env); +} + +if (isDirectExecution(import.meta.url)) { + try { + main(); + } catch (error) { + console.error(error instanceof Error ? error.message : error); + process.exitCode = 1; + } +} diff --git a/apps/cli/scripts/stage-artifacts.js b/apps/cli/scripts/stage-artifacts.js new file mode 100644 index 0000000000..622d3f6676 --- /dev/null +++ b/apps/cli/scripts/stage-artifacts.js @@ -0,0 +1,117 @@ +import { copyFile, mkdir, readFile } from 'node:fs/promises'; +import { existsSync } from 'node:fs'; +import path from 'node:path'; +import { cliRoot, isDirectExecution } from './utils.js'; + +const artifactsRoot = path.join(cliRoot, 'artifacts'); +const manifestPath = path.join(artifactsRoot, 'manifest.json'); + +const PLATFORM_DIRS = { + 'darwin-arm64': 'cli-darwin-arm64', + 'darwin-x64': 'cli-darwin-x64', + 'linux-x64': 'cli-linux-x64', + 'linux-arm64': 'cli-linux-arm64', + 'windows-x64': 'cli-windows-x64', +}; + +function requireFile(filePath) { + if (!existsSync(filePath)) { + throw new Error(`Required file is missing: ${filePath}`); + } +} + +/** + * Resolves an artifact manifest path and ensures it stays under artifacts root. + * + * @param {string} relativePath - Relative artifact path from manifest. + * @returns {string} Absolute source path. + * @throws {Error} If the path is invalid or escapes the artifacts directory. + */ +export function resolveArtifactSourcePath(relativePath) { + if (!relativePath || typeof relativePath !== 'string') { + throw new Error('Invalid target relativePath in artifacts manifest.'); + } + if (path.isAbsolute(relativePath)) { + throw new Error(`Artifact path must be relative: ${relativePath}`); + } + + const resolvedPath = path.resolve(artifactsRoot, relativePath); + const normalizedArtifactsRoot = path.resolve(artifactsRoot); + + if (resolvedPath === normalizedArtifactsRoot || !resolvedPath.startsWith(`${normalizedArtifactsRoot}${path.sep}`)) { + throw new Error(`Artifact path escapes artifacts root: ${relativePath}`); + } + + return resolvedPath; +} + +/** + * Validates a manifest-provided binary name before staging it to a package `bin/` directory. + * + * @param {unknown} binaryName - Candidate binary name from manifest. + * @param {string} targetId - Target id for error context. + * @returns {string} Safe binary filename. + * @throws {Error} If the name is empty or contains path traversal/path separators. + */ +export function validateBinaryName(binaryName, targetId) { + if (typeof binaryName !== 'string' || !binaryName) { + throw new Error(`Invalid binaryName for target ${targetId}`); + } + + if ( + binaryName === '.' || + binaryName === '..' || + path.isAbsolute(binaryName) || + binaryName.includes('/') || + binaryName.includes('\\') + ) { + throw new Error(`Invalid binaryName for target ${targetId}: ${binaryName}`); + } + + return binaryName; +} + +/** + * Copies built artifacts into platform package `bin/` directories. + * + * @returns {Promise} + */ +export async function main() { + requireFile(manifestPath); + + const manifest = JSON.parse(await readFile(manifestPath, 'utf8')); + if (!Array.isArray(manifest.targets) || manifest.targets.length === 0) { + throw new Error('Artifacts manifest contains no targets. Run build:native:all first.'); + } + + for (const target of manifest.targets) { + const targetId = target.target; + const relativePath = target.relativePath; + + if (typeof targetId !== 'string' || typeof relativePath !== 'string') { + throw new Error('Invalid target entry in artifacts manifest.'); + } + + const binaryName = validateBinaryName(target.binaryName, targetId); + + const sourcePath = resolveArtifactSourcePath(relativePath); + requireFile(sourcePath); + + const platformDirName = PLATFORM_DIRS[targetId]; + if (!platformDirName) { + throw new Error(`No platform package mapping for target ${targetId}`); + } + + const destPath = path.join(cliRoot, 'platforms', platformDirName, 'bin', binaryName); + await mkdir(path.dirname(destPath), { recursive: true }); + await copyFile(sourcePath, destPath); + console.log(`Staged binary for ${targetId}`); + } +} + +if (isDirectExecution(import.meta.url)) { + main().catch((error) => { + console.error(error instanceof Error ? error.message : error); + process.exitCode = 1; + }); +} diff --git a/apps/cli/scripts/sync-version.js b/apps/cli/scripts/sync-version.js new file mode 100644 index 0000000000..53913f2d15 --- /dev/null +++ b/apps/cli/scripts/sync-version.js @@ -0,0 +1,78 @@ +import { readFile, writeFile } from 'node:fs/promises'; +import path from 'node:path'; +import { cliRoot, isDirectExecution } from './utils.js'; + +const CLI_PACKAGE = path.join(cliRoot, 'package.json'); + +const PLATFORM_PACKAGES = [ + path.join(cliRoot, 'platforms/cli-darwin-arm64/package.json'), + path.join(cliRoot, 'platforms/cli-darwin-x64/package.json'), + path.join(cliRoot, 'platforms/cli-linux-x64/package.json'), + path.join(cliRoot, 'platforms/cli-linux-arm64/package.json'), + path.join(cliRoot, 'platforms/cli-windows-x64/package.json'), +]; + +const PLATFORM_PACKAGE_NAMES = [ + '@superdoc-dev/cli-darwin-arm64', + '@superdoc-dev/cli-darwin-x64', + '@superdoc-dev/cli-linux-x64', + '@superdoc-dev/cli-linux-arm64', + '@superdoc-dev/cli-windows-x64', +]; + +async function readJson(filePath) { + return JSON.parse(await readFile(filePath, 'utf8')); +} + +async function writeJson(filePath, data) { + await writeFile(filePath, `${JSON.stringify(data, null, 2)}\n`, 'utf8'); +} + +/** + * Applies workspace protocol specs to platform optional dependencies. + * + * @param {Record | undefined} optionalDependencies - Existing optional dependencies. + * @param {string[]} packageNames - Platform package names to update. + * @returns {Record} Updated optionalDependencies object. + */ +export function syncOptionalDependencyVersions(optionalDependencies, packageNames) { + const synced = { ...(optionalDependencies ?? {}) }; + for (const packageName of packageNames) { + synced[packageName] = 'workspace:*'; + } + return synced; +} + +/** + * Synchronizes root and platform package versions. + * + * @returns {Promise} + */ +export async function main() { + const cliPkg = await readJson(CLI_PACKAGE); + const version = cliPkg.version; + + if (!version || typeof version !== 'string') { + throw new Error(`Missing or invalid version in ${CLI_PACKAGE}`); + } + + // Sync optionalDependencies in main CLI package + cliPkg.optionalDependencies = syncOptionalDependencyVersions(cliPkg.optionalDependencies, PLATFORM_PACKAGE_NAMES); + await writeJson(CLI_PACKAGE, cliPkg); + + // Sync version in each platform package + for (const filePath of PLATFORM_PACKAGES) { + const pkg = await readJson(filePath); + pkg.version = version; + await writeJson(filePath, pkg); + } + + console.log(`Synchronized CLI package versions to ${version}`); +} + +if (isDirectExecution(import.meta.url)) { + main().catch((error) => { + console.error(error instanceof Error ? error.message : error); + process.exitCode = 1; + }); +} diff --git a/apps/cli/scripts/utils.js b/apps/cli/scripts/utils.js new file mode 100644 index 0000000000..8bb4da42cc --- /dev/null +++ b/apps/cli/scripts/utils.js @@ -0,0 +1,122 @@ +import { spawnSync } from 'node:child_process'; +import path from 'node:path'; +import { fileURLToPath } from 'node:url'; + +const __filename = fileURLToPath(import.meta.url); +const __dirname = path.dirname(__filename); + +/** + * Absolute path to the CLI package root. + * + * @type {string} + */ +export const cliRoot = path.resolve(__dirname, '..'); +/** + * Absolute path to the repository root. + * + * @type {string} + */ +export const repoRoot = path.resolve(cliRoot, '../..'); + +function describeCommand(command, args) { + return [command, ...args].join(' '); +} + +/** + * Ensures a command line only includes known flags. + * + * @param {string[]} argv - Raw argv tokens (excluding node/script path). + * @param {Set} allowedFlags - Allowed long-form flags (for example `--all`). + * @returns {void} + * @throws {Error} If unknown flags are present. + */ +export function ensureNoUnknownFlags(argv, allowedFlags) { + const unknown = argv.filter((arg) => { + if (!arg.startsWith('--') || arg === '--') { + return false; + } + const normalized = arg.includes('=') ? arg.slice(0, arg.indexOf('=')) : arg; + return !allowedFlags.has(normalized); + }); + if (unknown.length > 0) { + throw new Error(`Unknown flag(s): ${unknown.join(', ')}`); + } +} + +/** + * Reads an optional flag value from `argv`. + * + * Supports both `--flag value` and `--flag=value`. + * + * @param {string[]} argv - Raw argv tokens (excluding node/script path). + * @param {string} flagName - Flag to read (for example `--tag`). + * @returns {string | null} The parsed value, or `null` when the flag is absent. + * @throws {Error} If the flag is present without a value. + */ +export function getOptionalFlagValue(argv, flagName) { + const equalsPrefix = `${flagName}=`; + const equalsToken = argv.find((token) => token.startsWith(equalsPrefix)); + if (equalsToken) { + const value = equalsToken.slice(equalsPrefix.length).trim(); + if (!value) { + throw new Error(`Flag ${flagName} requires a value.`); + } + return value; + } + + const index = argv.indexOf(flagName); + if (index === -1) return null; + + const value = argv[index + 1]; + if (!value || value.startsWith('--')) { + throw new Error(`Flag ${flagName} requires a value.`); + } + return value; +} + +/** + * Indicates whether a module is being executed directly by Node. + * + * @param {string} importMetaUrl - Current module `import.meta.url`. + * @returns {boolean} `true` when executed as entrypoint, otherwise `false`. + */ +export function isDirectExecution(importMetaUrl) { + return Boolean(process.argv[1]) && path.resolve(process.argv[1]) === fileURLToPath(importMetaUrl); +} + +/** + * Executes a command synchronously from the repository root. + * + * @param {string} command - Program to execute. + * @param {string[]} args - Program arguments. + * @param {string} [label] - Optional human-readable step label. + * @returns {void} + * @throws {Error} If the command fails. + */ +export function runCommand(command, args, label) { + const heading = label ?? describeCommand(command, args); + console.log(`\n[cli] ${heading}`); + const result = spawnSync(command, args, { + cwd: repoRoot, + env: process.env, + stdio: 'inherit', + }); + if (result.error) { + throw result.error; + } + if (result.status !== 0) { + throw new Error(`Step failed (${result.status ?? 'unknown'}): ${heading}`); + } +} + +/** + * Executes a Node script from the repository root. + * + * @param {string} scriptPath - Relative script path from repository root. + * @param {string[]} [args=[]] - Script arguments. + * @param {string} [label] - Optional human-readable step label. + * @returns {void} + */ +export function runNodeScript(scriptPath, args = [], label) { + runCommand('node', [scriptPath, ...args], label); +} diff --git a/apps/cli/skill/SKILL.md b/apps/cli/skill/SKILL.md new file mode 100644 index 0000000000..d0cc803f97 --- /dev/null +++ b/apps/cli/skill/SKILL.md @@ -0,0 +1,45 @@ +--- +name: editing-docx +description: Searches, replaces, and reads text in Word documents. Use when the user asks to edit, search, or extract text from .docx files. +--- + +# SuperDoc CLI + +Edit Word documents from the command line. Use instead of python-docx. + +## Commands + +| Command | Description | +|---------|-------------| +| `npx @superdoc-dev/cli@latest search ` | Find text across documents | +| `npx @superdoc-dev/cli@latest replace-legacy ` | Find and replace text | +| `npx @superdoc-dev/cli@latest read ` | Extract plain text | + +## When to Use + +Use superdoc when the user asks to: +- Search text in .docx files +- Find and replace text in Word documents +- Extract text content from .docx files +- Bulk edit multiple Word documents + +## Examples + +```bash +# Search across documents +npx @superdoc-dev/cli@latest search "indemnification" ./contracts/*.docx + +# Find and replace +npx @superdoc-dev/cli@latest replace-legacy "ACME Corp" "Globex Inc" ./merger/*.docx + +# Extract text +npx @superdoc-dev/cli@latest read ./proposal.docx + +# JSON output for scripting +npx @superdoc-dev/cli@latest search "Article 7" ./**/*.docx --json +``` + +## Options + +- `--json` — Machine-readable output +- `--help` — Show help diff --git a/apps/cli/src/__tests__/cli.test.ts b/apps/cli/src/__tests__/cli.test.ts new file mode 100644 index 0000000000..8663e16de4 --- /dev/null +++ b/apps/cli/src/__tests__/cli.test.ts @@ -0,0 +1,1959 @@ +import { afterAll, beforeAll, beforeEach, describe, expect, test } from 'bun:test'; +import { access, copyFile, mkdir, readFile, rm, writeFile } from 'node:fs/promises'; +import { join } from 'node:path'; +import { run } from '../index'; + +type RunResult = { + code: number; + stdout: string; + stderr: string; +}; + +type TextRange = { + kind: 'text'; + blockId: string; + range: { + start: number; + end: number; + }; +}; + +type ListItemAddress = { + kind: 'block'; + nodeType: 'listItem'; + nodeId: string; +}; + +type SuccessEnvelope = { + ok: true; + command: string; + data: TData; + meta: { + elapsedMs: number; + }; +}; + +type ErrorEnvelope = { + ok: false; + error: { + code: string; + message: string; + }; +}; + +const TEST_DIR = join(import.meta.dir, 'fixtures-cli'); +const STATE_DIR = join(TEST_DIR, 'state'); +const SOURCE_DOC = join(import.meta.dir, '../../../../e2e-tests/test-data/basic-documents/advanced-text.docx'); +const LIST_SOURCE_DOC_CANDIDATES = [ + join(import.meta.dir, '../../../../devtools/document-api-tests/fixtures/matrix-list.input.docx'), + join(import.meta.dir, '../../../../e2e-tests/test-data/basic-documents/lists-complex-items.docx'), +]; +const SAMPLE_DOC = join(TEST_DIR, 'sample.docx'); +const LIST_SAMPLE_DOC = join(TEST_DIR, 'lists-sample.docx'); + +async function resolveListSourceDoc(): Promise { + for (const candidate of LIST_SOURCE_DOC_CANDIDATES) { + try { + await access(candidate); + return candidate; + } catch { + // try next candidate + } + } + + throw new Error(`No list fixture found. Tried: ${LIST_SOURCE_DOC_CANDIDATES.join(', ')}`); +} + +async function runCli(args: string[], stdinBytes?: Uint8Array): Promise { + let stdout = ''; + let stderr = ''; + + const code = await run(args, { + stdout(message: string) { + stdout += message; + }, + stderr(message: string) { + stderr += message; + }, + async readStdinBytes() { + return stdinBytes ?? new Uint8Array(); + }, + }); + + return { code, stdout, stderr }; +} + +function parseJsonOutput(result: RunResult): T { + const source = result.stdout.trim() || result.stderr.trim(); + if (!source) { + throw new Error('No JSON output found.'); + } + + return JSON.parse(source) as T; +} + +function asRecord(value: unknown): Record | null { + if (typeof value !== 'object' || value == null || Array.isArray(value)) return null; + return value as Record; +} + +function hasPrettyProperties(node: unknown): boolean { + const record = asRecord(node); + if (!record) return false; + const properties = asRecord(record.properties); + if (!properties) return false; + return Object.values(properties).some((value) => value != null && value !== '' && value !== false); +} + +async function firstTextRange(args: string[]): Promise { + const result = await runCli(args); + expect(result.code).toBe(0); + + const envelope = parseJsonOutput< + SuccessEnvelope<{ + result: { + context?: Array<{ textRanges?: TextRange[] }>; + }; + }> + >(result); + + const range = envelope.data.result.context?.[0]?.textRanges?.[0]; + if (!range) { + throw new Error('Expected at least one text range from find result.'); + } + + return range; +} + +function firstInsertedEntityId(result: RunResult): string { + const envelope = parseJsonOutput< + SuccessEnvelope<{ + receipt?: { + inserted?: Array<{ entityId?: string }>; + }; + }> + >(result); + const entityId = envelope.data.receipt?.inserted?.[0]?.entityId; + if (!entityId) { + throw new Error('Expected inserted entity id in receipt.'); + } + return entityId; +} + +async function firstListItemAddress(args: string[]): Promise { + const result = await runCli(args); + expect(result.code).toBe(0); + + const envelope = parseJsonOutput< + SuccessEnvelope<{ + result: { + matches: ListItemAddress[]; + }; + }> + >(result); + + const address = envelope.data.result.matches[0]; + if (!address) { + throw new Error('Expected at least one list item address from lists.list result.'); + } + + return address; +} + +describe('superdoc CLI', () => { + beforeAll(async () => { + process.env.SUPERDOC_CLI_STATE_DIR = STATE_DIR; + await mkdir(TEST_DIR, { recursive: true }); + await copyFile(SOURCE_DOC, SAMPLE_DOC); + await copyFile(await resolveListSourceDoc(), LIST_SAMPLE_DOC); + }); + + beforeEach(async () => { + await rm(STATE_DIR, { recursive: true, force: true }); + }); + + afterAll(async () => { + await rm(TEST_DIR, { recursive: true, force: true }); + delete process.env.SUPERDOC_CLI_STATE_DIR; + }); + + test('status returns inactive when no document is open', async () => { + const result = await runCli(['status']); + expect(result.code).toBe(0); + + const envelope = parseJsonOutput>(result); + expect(envelope.command).toBe('status'); + expect(envelope.data.active).toBe(false); + }); + + test('commands without require an active context', async () => { + const result = await runCli(['find', '--type', 'text', '--pattern', 'Wilde']); + expect(result.code).toBe(1); + + const envelope = parseJsonOutput(result); + expect(envelope.error.code).toBe('NO_ACTIVE_DOCUMENT'); + }); + + test('info returns required contract fields', async () => { + const result = await runCli(['info', SAMPLE_DOC]); + expect(result.code).toBe(0); + + const envelope = parseJsonOutput< + SuccessEnvelope<{ + document: { source: string; revision: number }; + counts: { words: number; paragraphs: number }; + capabilities: { canFind: boolean }; + }> + >(result); + + expect(envelope.ok).toBe(true); + expect(envelope.command).toBe('info'); + expect(envelope.data.document.source).toBe('path'); + expect(envelope.data.document.revision).toBe(0); + expect(envelope.data.counts.words).toBeGreaterThan(0); + expect(envelope.data.counts.paragraphs).toBeGreaterThan(0); + expect(envelope.data.capabilities.canFind).toBe(true); + expect(envelope.meta.elapsedMs).toBeGreaterThanOrEqual(0); + }); + + test('info pretty includes revision summary and outline section when available', async () => { + const jsonResult = await runCli(['info', SAMPLE_DOC]); + expect(jsonResult.code).toBe(0); + + const jsonEnvelope = parseJsonOutput< + SuccessEnvelope<{ + outline: Array<{ level: number; text: string; nodeId: string }>; + }> + >(jsonResult); + + const prettyResult = await runCli(['info', SAMPLE_DOC, '--output', 'pretty']); + expect(prettyResult.code).toBe(0); + expect(prettyResult.stdout).toContain('Revision 0:'); + expect(prettyResult.stdout).toContain('words'); + if (jsonEnvelope.data.outline.length > 0) { + expect(prettyResult.stdout).toContain('Outline:'); + } + }); + + test('describe returns contract overview', async () => { + const result = await runCli(['describe']); + expect(result.code).toBe(0); + + const envelope = parseJsonOutput< + SuccessEnvelope<{ + contractVersion: string; + operationCount: number; + operations: Array<{ id: string; command: string[] }>; + }> + >(result); + + expect(envelope.command).toBe('describe'); + expect(envelope.data.contractVersion.length).toBeGreaterThan(0); + expect(envelope.data.operationCount).toBeGreaterThan(0); + expect(envelope.data.operations.some((operation) => operation.id === 'doc.find')).toBe(true); + }); + + test('describe command returns one operation by id', async () => { + const result = await runCli(['describe', 'command', 'doc.find']); + expect(result.code).toBe(0); + + const envelope = parseJsonOutput< + SuccessEnvelope<{ + contractVersion: string; + operation: { + id: string; + command: string[]; + }; + }> + >(result); + + expect(envelope.command).toBe('describe command'); + expect(envelope.data.contractVersion.length).toBeGreaterThan(0); + expect(envelope.data.operation.id).toBe('doc.find'); + expect(envelope.data.operation.command).toEqual(['find']); + }); + + test('describe command pretty prints parameters and constraints', async () => { + const result = await runCli(['describe', 'command', 'doc.find', '--output', 'pretty']); + expect(result.code).toBe(0); + expect(result.stdout).toContain('Parameters:'); + expect(result.stdout).toContain('--session'); + expect(result.stdout).toContain('--include-nodes'); + expect(result.stdout).toContain('Constraints:'); + }); + + test('describe command pretty labels operation positional args by name', async () => { + const result = await runCli(['describe', 'command', 'doc.describeCommand', '--output', 'pretty']); + expect(result.code).toBe(0); + expect(result.stdout).toContain(''); + expect(result.stdout).not.toContain(' Document path or stdin'); + }); + + test('describe command pretty labels session ids as positional ids', async () => { + const result = await runCli(['describe', 'command', 'doc.session.save', '--output', 'pretty']); + expect(result.code).toBe(0); + expect(result.stdout).toContain(''); + expect(result.stdout).not.toContain(' Document path or stdin'); + }); + + test('call executes an operation from canonical input payload', async () => { + const result = await runCli([ + 'call', + 'doc.find', + '--input-json', + JSON.stringify({ + doc: SAMPLE_DOC, + query: { + select: { + type: 'text', + pattern: 'Wilde', + mode: 'contains', + }, + limit: 1, + }, + }), + ]); + expect(result.code).toBe(0); + + const envelope = parseJsonOutput< + SuccessEnvelope<{ + operationId: string; + result: { + query: { + select: { + type: string; + }; + }; + document: { + source: string; + }; + }; + }> + >(result); + + expect(envelope.command).toBe('call'); + expect(envelope.data.operationId).toBe('doc.find'); + expect(envelope.data.result.query.select.type).toBe('text'); + expect(envelope.data.result.document.source).toBe('path'); + }); + + test('call resolves operation ids from command-key shorthand', async () => { + const result = await runCli([ + 'call', + 'find', + '--input-json', + JSON.stringify({ + doc: SAMPLE_DOC, + query: { + select: { + type: 'text', + pattern: 'Wilde', + }, + }, + }), + ]); + expect(result.code).toBe(0); + + const envelope = parseJsonOutput< + SuccessEnvelope<{ + operationId: string; + }> + >(result); + expect(envelope.data.operationId).toBe('doc.find'); + }); + + test('call supports operations with non-doc positional kind:"doc" params', async () => { + const result = await runCli([ + 'call', + 'doc.describeCommand', + '--input-json', + JSON.stringify({ + operationId: 'doc.find', + }), + ]); + expect(result.code).toBe(0); + + const envelope = parseJsonOutput< + SuccessEnvelope<{ + operationId: string; + result: { + operation: { + id: string; + }; + }; + }> + >(result); + expect(envelope.data.operationId).toBe('doc.describeCommand'); + expect(envelope.data.result.operation.id).toBe('doc.find'); + }); + + test('call supports alias command keys with spaces', async () => { + const sessionId = 'call-session-use-alias'; + const openResult = await runCli(['open', SAMPLE_DOC, '--session', sessionId]); + expect(openResult.code).toBe(0); + + const callResult = await runCli([ + 'call', + 'session', + 'use', + '--input-json', + JSON.stringify({ + sessionId, + }), + ]); + expect(callResult.code).toBe(0); + + const envelope = parseJsonOutput< + SuccessEnvelope<{ + operationId: string; + result: { + activeSessionId: string; + }; + }> + >(callResult); + expect(envelope.data.operationId).toBe('doc.session.setDefault'); + expect(envelope.data.result.activeSessionId).toBe(sessionId); + + const closeResult = await runCli(['close', '--session', sessionId, '--discard']); + expect(closeResult.code).toBe(0); + }); + + test('call doc.open accepts doc + sessionId in input payload', async () => { + const sessionId = 'call-open-with-session-id'; + + const openCall = await runCli([ + 'call', + 'doc.open', + '--input-json', + JSON.stringify({ + doc: SAMPLE_DOC, + sessionId, + }), + ]); + expect(openCall.code).toBe(0); + + const openEnvelope = parseJsonOutput< + SuccessEnvelope<{ + operationId: string; + result: { + contextId: string; + active: boolean; + }; + }> + >(openCall); + expect(openEnvelope.data.operationId).toBe('doc.open'); + expect(openEnvelope.data.result.contextId).toBe(sessionId); + expect(openEnvelope.data.result.active).toBe(true); + + const closeResult = await runCli(['close', '--session', sessionId, '--discard']); + expect(closeResult.code).toBe(0); + }); + + test('call doc.save and doc.close use active session when input.sessionId is omitted', async () => { + const sessionId = 'call-save-close-active-session'; + const savedOut = join(TEST_DIR, 'call-save-close-active-session.docx'); + + const openResult = await runCli(['open', SAMPLE_DOC, '--session', sessionId]); + expect(openResult.code).toBe(0); + + const saveCall = await runCli([ + 'call', + 'doc.save', + '--input-json', + JSON.stringify({ + out: savedOut, + force: true, + }), + ]); + expect(saveCall.code).toBe(0); + + const closeCall = await runCli([ + 'call', + 'doc.close', + '--input-json', + JSON.stringify({ + discard: true, + }), + ]); + expect(closeCall.code).toBe(0); + }); + + test('call rejects mixing stateless doc input with session targets', async () => { + const result = await runCli([ + 'call', + 'doc.find', + '--input-json', + JSON.stringify({ + doc: SAMPLE_DOC, + sessionId: 'mixed-mode-session', + query: { + select: { + type: 'text', + pattern: 'Wilde', + }, + }, + }), + ]); + expect(result.code).toBe(1); + const envelope = parseJsonOutput(result); + expect(envelope.error.code).toBe('INVALID_ARGUMENT'); + expect(envelope.error.message).toContain('stateless input.doc cannot be combined'); + }); + + test('call executes direct text-mutation operations without token round-trip semantics drift', async () => { + const source = join(TEST_DIR, 'call-insert-source.docx'); + const out = join(TEST_DIR, 'call-insert-out.docx'); + await copyFile(SAMPLE_DOC, source); + + const callResult = await runCli([ + 'call', + 'doc.insert', + '--input-json', + JSON.stringify({ + doc: source, + text: 'CALL_INSERT_TOKEN_1597', + out, + }), + ]); + expect(callResult.code).toBe(0); + + const envelope = parseJsonOutput< + SuccessEnvelope<{ + operationId: string; + result: { + document: { source: string }; + target: TextRange; + }; + }> + >(callResult); + expect(envelope.data.operationId).toBe('doc.insert'); + expect(envelope.data.result.document.source).toBe('path'); + expect(envelope.data.result.target.range.start).toBe(0); + + const verifyResult = await runCli(['find', out, '--type', 'text', '--pattern', 'CALL_INSERT_TOKEN_1597']); + expect(verifyResult.code).toBe(0); + const verifyEnvelope = parseJsonOutput>(verifyResult); + expect(verifyEnvelope.data.result.total).toBeGreaterThan(0); + }); + + test('call only supports JSON output mode', async () => { + const result = await runCli([ + 'call', + 'doc.find', + '--input-json', + JSON.stringify({ + doc: SAMPLE_DOC, + query: { + select: { + type: 'text', + pattern: 'Wilde', + }, + }, + }), + '--output', + 'pretty', + ]); + expect(result.code).toBe(1); + expect(result.stderr).toContain('INVALID_ARGUMENT'); + expect(result.stderr).toContain('call: only --output json is supported.'); + }); + + test('describe command returns TARGET_NOT_FOUND for unknown operation', async () => { + const result = await runCli(['describe', 'command', 'doc.missing']); + expect(result.code).toBe(1); + + const envelope = parseJsonOutput(result); + expect(envelope.error.code).toBe('TARGET_NOT_FOUND'); + }); + + test('find supports run node type', async () => { + const result = await runCli(['find', SAMPLE_DOC, '--type', 'run', '--limit', '1']); + expect(result.code).toBe(0); + + const envelope = parseJsonOutput< + SuccessEnvelope<{ + result: { + total: number; + matches: Array<{ kind: string; nodeType: string }>; + }; + }> + >(result); + + expect(envelope.ok).toBe(true); + expect(envelope.command).toBe('find'); + expect(envelope.data.result.total).toBeGreaterThan(0); + expect(envelope.data.result.matches[0].kind).toBe('inline'); + expect(envelope.data.result.matches[0].nodeType).toBe('run'); + }); + + test('find rejects legacy query.include payloads', async () => { + const result = await runCli([ + 'find', + SAMPLE_DOC, + '--query-json', + JSON.stringify({ + select: { type: 'text', pattern: 'Wilde' }, + include: ['context'], + }), + ]); + expect(result.code).toBe(1); + + const envelope = parseJsonOutput(result); + expect(envelope.error.code).toBe('VALIDATION_ERROR'); + expect(envelope.error.message).toContain('query.include'); + }); + + test('find text queries return context and textRanges without includeNodes', async () => { + const result = await runCli([ + 'find', + SAMPLE_DOC, + '--query-json', + JSON.stringify({ + select: { type: 'text', pattern: 'Wilde' }, + limit: 1, + }), + ]); + expect(result.code).toBe(0); + + const envelope = parseJsonOutput< + SuccessEnvelope<{ + result: { + context?: Array<{ + textRanges?: Array<{ kind: 'text'; blockId: string; range: { start: number; end: number } }>; + }>; + }; + }> + >(result); + + const firstContext = envelope.data.result.context?.[0]; + expect(firstContext).toBeDefined(); + expect(firstContext?.textRanges?.length).toBeGreaterThan(0); + }); + + test('get-node resolves address returned by find', async () => { + const findResult = await runCli(['find', SAMPLE_DOC, '--type', 'text', '--pattern', 'Wilde', '--limit', '1']); + expect(findResult.code).toBe(0); + + const findEnvelope = parseJsonOutput< + SuccessEnvelope<{ + result: { + matches: Array>; + }; + }> + >(findResult); + + const address = findEnvelope.data.result.matches[0]; + expect(address).toBeDefined(); + + const getNodeResult = await runCli(['get-node', SAMPLE_DOC, '--address-json', JSON.stringify(address)]); + expect(getNodeResult.code).toBe(0); + + const nodeEnvelope = parseJsonOutput>(getNodeResult); + expect(nodeEnvelope.ok).toBe(true); + expect(nodeEnvelope.command).toBe('get-node'); + expect(nodeEnvelope.data.node).toBeDefined(); + }); + + test('get-node pretty includes resolved identity and optional node details', async () => { + const findResult = await runCli(['find', SAMPLE_DOC, '--type', 'text', '--pattern', 'Wilde', '--limit', '1']); + expect(findResult.code).toBe(0); + + const findEnvelope = parseJsonOutput< + SuccessEnvelope<{ + result: { + matches: Array>; + }; + }> + >(findResult); + const address = findEnvelope.data.result.matches[0]; + expect(address).toBeDefined(); + if (!address) return; + + const prettyResult = await runCli([ + 'get-node', + SAMPLE_DOC, + '--address-json', + JSON.stringify(address), + '--output', + 'pretty', + ]); + expect(prettyResult.code).toBe(0); + expect(prettyResult.stdout).toContain('Revision 0:'); + + const jsonResult = await runCli(['get-node', SAMPLE_DOC, '--address-json', JSON.stringify(address)]); + expect(jsonResult.code).toBe(0); + const jsonEnvelope = parseJsonOutput>(jsonResult); + const node = asRecord(jsonEnvelope.data.node); + if (typeof node?.text === 'string' && node.text.length > 0) { + expect(prettyResult.stdout).toContain('Text:'); + } + if (hasPrettyProperties(jsonEnvelope.data.node)) { + expect(prettyResult.stdout).toContain('Properties:'); + } + }); + + test('get-node-by-id resolves block ID returned by find', async () => { + const findResult = await runCli(['find', SAMPLE_DOC, '--type', 'text', '--pattern', 'Wilde', '--limit', '1']); + expect(findResult.code).toBe(0); + + const findEnvelope = parseJsonOutput< + SuccessEnvelope<{ + result: { + matches: Array<{ kind: string; nodeType: string; nodeId: string }>; + }; + }> + >(findResult); + + const firstMatch = findEnvelope.data.result.matches[0]; + expect(firstMatch.kind).toBe('block'); + + const getByIdResult = await runCli([ + 'get-node-by-id', + SAMPLE_DOC, + '--id', + firstMatch.nodeId, + '--node-type', + firstMatch.nodeType, + ]); + expect(getByIdResult.code).toBe(0); + + const envelope = parseJsonOutput>(getByIdResult); + expect(envelope.command).toBe('get-node-by-id'); + expect(envelope.data.node).toBeDefined(); + }); + + test('get-node-by-id pretty includes resolved identity and optional node details', async () => { + const findResult = await runCli(['find', SAMPLE_DOC, '--type', 'text', '--pattern', 'Wilde', '--limit', '1']); + expect(findResult.code).toBe(0); + + const findEnvelope = parseJsonOutput< + SuccessEnvelope<{ + result: { + matches: Array<{ kind: string; nodeType: string; nodeId: string }>; + }; + }> + >(findResult); + + const firstMatch = findEnvelope.data.result.matches[0]; + expect(firstMatch.kind).toBe('block'); + + const prettyResult = await runCli([ + 'get-node-by-id', + SAMPLE_DOC, + '--id', + firstMatch.nodeId, + '--node-type', + firstMatch.nodeType, + '--output', + 'pretty', + ]); + expect(prettyResult.code).toBe(0); + expect(prettyResult.stdout).toContain('Revision 0:'); + expect(prettyResult.stdout).toContain(firstMatch.nodeId); + + const jsonResult = await runCli([ + 'get-node-by-id', + SAMPLE_DOC, + '--id', + firstMatch.nodeId, + '--node-type', + firstMatch.nodeType, + ]); + expect(jsonResult.code).toBe(0); + const jsonEnvelope = parseJsonOutput>(jsonResult); + const node = asRecord(jsonEnvelope.data.node); + if (typeof node?.text === 'string' && node.text.length > 0) { + expect(prettyResult.stdout).toContain('Text:'); + } + if (hasPrettyProperties(jsonEnvelope.data.node)) { + expect(prettyResult.stdout).toContain('Properties:'); + } + }); + + test('replace dry-run does not write output file', async () => { + const target = await firstTextRange(['find', SAMPLE_DOC, '--type', 'text', '--pattern', 'Wilde']); + const dryRunOut = join(TEST_DIR, 'dry-run.docx'); + + const result = await runCli([ + 'replace', + SAMPLE_DOC, + '--target-json', + JSON.stringify(target), + '--text', + 'WILDE_DRY_RUN', + '--out', + dryRunOut, + '--dry-run', + ]); + + expect(result.code).toBe(0); + + const envelope = parseJsonOutput>(result); + expect(envelope.ok).toBe(true); + expect(envelope.data.dryRun).toBe(true); + + await expect(access(dryRunOut)).rejects.toThrow(); + }); + + test('replace writes output and updates text target', async () => { + const replaceSource = join(TEST_DIR, 'replace-source.docx'); + const replaceOut = join(TEST_DIR, 'replace-out.docx'); + await copyFile(SAMPLE_DOC, replaceSource); + + const target = await firstTextRange(['find', replaceSource, '--type', 'text', '--pattern', 'Wilde']); + + const replaceResult = await runCli([ + 'replace', + replaceSource, + '--target-json', + JSON.stringify(target), + '--text', + 'WILDE_CLI', + '--out', + replaceOut, + ]); + + expect(replaceResult.code).toBe(0); + + const verifyResult = await runCli(['find', replaceOut, '--type', 'text', '--pattern', 'WILDE_CLI']); + expect(verifyResult.code).toBe(0); + + const verifyEnvelope = parseJsonOutput< + SuccessEnvelope<{ + result: { total: number }; + }> + >(verifyResult); + + expect(verifyEnvelope.data.result.total).toBeGreaterThan(0); + }); + + test('insert writes output and adds text at target', async () => { + const insertSource = join(TEST_DIR, 'insert-source.docx'); + const insertOut = join(TEST_DIR, 'insert-out.docx'); + await copyFile(SAMPLE_DOC, insertSource); + + const target = await firstTextRange(['find', insertSource, '--type', 'text', '--pattern', 'Wilde']); + const collapsedTarget: TextRange = { + ...target, + range: { + start: target.range.start, + end: target.range.start, + }, + }; + + const insertResult = await runCli([ + 'insert', + insertSource, + '--target-json', + JSON.stringify(collapsedTarget), + '--text', + 'CLI_INSERT_TOKEN_1597', + '--out', + insertOut, + ]); + + expect(insertResult.code).toBe(0); + + const verifyResult = await runCli(['find', insertOut, '--type', 'text', '--pattern', 'CLI_INSERT_TOKEN_1597']); + expect(verifyResult.code).toBe(0); + + const verifyEnvelope = parseJsonOutput>(verifyResult); + expect(verifyEnvelope.data.result.total).toBeGreaterThan(0); + }); + + test('insert without target defaults to document-start insertion', async () => { + const insertSource = join(TEST_DIR, 'insert-default-source.docx'); + const insertOut = join(TEST_DIR, 'insert-default-out.docx'); + await copyFile(SAMPLE_DOC, insertSource); + + const insertResult = await runCli([ + 'insert', + insertSource, + '--text', + 'CLI_DEFAULT_INSERT_TOKEN_1597', + '--out', + insertOut, + ]); + + expect(insertResult.code).toBe(0); + + const insertEnvelope = parseJsonOutput< + SuccessEnvelope<{ + target: TextRange; + }> + >(insertResult); + expect(insertEnvelope.data.target.range.start).toBe(0); + expect(insertEnvelope.data.target.range.end).toBe(0); + + const verifyResult = await runCli([ + 'find', + insertOut, + '--type', + 'text', + '--pattern', + 'CLI_DEFAULT_INSERT_TOKEN_1597', + ]); + expect(verifyResult.code).toBe(0); + + const verifyEnvelope = parseJsonOutput>(verifyResult); + expect(verifyEnvelope.data.result.total).toBeGreaterThan(0); + }); + + test('insert without target resolves blank first paragraphs deterministically', async () => { + const source = join(TEST_DIR, 'insert-blank-first-source.docx'); + const blankFirstOut = join(TEST_DIR, 'insert-blank-first.docx'); + const insertOut = join(TEST_DIR, 'insert-blank-first-result.docx'); + await copyFile(SAMPLE_DOC, source); + + const createResult = await runCli([ + 'create', + 'paragraph', + source, + '--at', + 'document-start', + '--out', + blankFirstOut, + ]); + expect(createResult.code).toBe(0); + + const insertResult = await runCli([ + 'insert', + blankFirstOut, + '--text', + 'CLI_BLANK_INSERT_TOKEN_1597', + '--out', + insertOut, + ]); + expect(insertResult.code).toBe(0); + + const insertEnvelope = parseJsonOutput< + SuccessEnvelope<{ + target: TextRange; + resolvedRange: { from: number; to: number }; + }> + >(insertResult); + + expect(insertEnvelope.data.target.range).toEqual({ start: 0, end: 0 }); + expect(insertEnvelope.data.resolvedRange.from).toBe(insertEnvelope.data.resolvedRange.to); + + const verifyResult = await runCli([ + 'find', + insertOut, + '--type', + 'text', + '--pattern', + 'CLI_BLANK_INSERT_TOKEN_1597', + ]); + expect(verifyResult.code).toBe(0); + const verifyEnvelope = parseJsonOutput>(verifyResult); + expect(verifyEnvelope.data.result.total).toBeGreaterThan(0); + }); + + test('create paragraph writes output and adds a new paragraph with seed text', async () => { + const createSource = join(TEST_DIR, 'create-paragraph-source.docx'); + const createOut = join(TEST_DIR, 'create-paragraph-out.docx'); + await copyFile(SAMPLE_DOC, createSource); + + const createResult = await runCli([ + 'create', + 'paragraph', + createSource, + '--text', + 'CLI_CREATE_PARAGRAPH_TOKEN_1597', + '--at', + 'document-end', + '--out', + createOut, + ]); + + expect(createResult.code).toBe(0); + + const createEnvelope = parseJsonOutput< + SuccessEnvelope<{ + result: { + success: boolean; + paragraph: { kind: string; nodeType: string }; + insertionPoint: TextRange; + }; + }> + >(createResult); + + expect(createEnvelope.data.result.success).toBe(true); + expect(createEnvelope.data.result.paragraph.kind).toBe('block'); + expect(createEnvelope.data.result.paragraph.nodeType).toBe('paragraph'); + expect(createEnvelope.data.result.insertionPoint.kind).toBe('text'); + + const verifyResult = await runCli([ + 'find', + createOut, + '--type', + 'text', + '--pattern', + 'CLI_CREATE_PARAGRAPH_TOKEN_1597', + ]); + expect(verifyResult.code).toBe(0); + + const verifyEnvelope = parseJsonOutput>(verifyResult); + expect(verifyEnvelope.data.result.total).toBeGreaterThan(0); + }); + + test('lists list/get resolve list items in stateless mode', async () => { + const listResult = await runCli(['lists', 'list', LIST_SAMPLE_DOC, '--limit', '2']); + expect(listResult.code).toBe(0); + + const listEnvelope = parseJsonOutput< + SuccessEnvelope<{ + result: { + total: number; + matches: ListItemAddress[]; + }; + }> + >(listResult); + expect(listEnvelope.data.result.total).toBeGreaterThan(0); + + const address = listEnvelope.data.result.matches[0]; + expect(address).toBeDefined(); + if (!address) return; + + const getResult = await runCli(['lists', 'get', LIST_SAMPLE_DOC, '--address-json', JSON.stringify(address)]); + expect(getResult.code).toBe(0); + + const getEnvelope = parseJsonOutput< + SuccessEnvelope<{ + address: ListItemAddress; + item: { address: ListItemAddress }; + }> + >(getResult); + expect(getEnvelope.data.item.address.nodeId).toBe(address.nodeId); + }); + + test('lists list pretty prints list rows', async () => { + const result = await runCli(['lists', 'list', LIST_SAMPLE_DOC, '--limit', '2', '--output', 'pretty']); + expect(result.code).toBe(0); + expect(result.stdout).toContain('Revision 0:'); + expect(result.stdout).toContain('list items'); + expect(result.stdout.trim().split('\n').length).toBeGreaterThan(1); + }); + + test('lists insert writes output and returns deterministic insertionPoint', async () => { + const source = join(TEST_DIR, 'lists-insert-source.docx'); + const out = join(TEST_DIR, 'lists-insert-out.docx'); + await copyFile(LIST_SAMPLE_DOC, source); + + const target = await firstListItemAddress(['lists', 'list', source, '--limit', '1']); + const insertResult = await runCli([ + 'lists', + 'insert', + source, + '--target-json', + JSON.stringify(target), + '--position', + 'after', + '--text', + 'CLI_LIST_INSERT_TOKEN_1597', + '--out', + out, + ]); + + expect(insertResult.code).toBe(0); + + const insertEnvelope = parseJsonOutput< + SuccessEnvelope<{ + result: { + success: boolean; + item: ListItemAddress; + insertionPoint: TextRange; + }; + }> + >(insertResult); + expect(insertEnvelope.data.result.success).toBe(true); + expect(insertEnvelope.data.result.insertionPoint.range).toEqual({ start: 0, end: 0 }); + + const verifyResult = await runCli(['find', out, '--type', 'text', '--pattern', 'CLI_LIST_INSERT_TOKEN_1597']); + expect(verifyResult.code).toBe(0); + const verifyEnvelope = parseJsonOutput>(verifyResult); + expect(verifyEnvelope.data.result.total).toBeGreaterThan(0); + }); + + test('lists exit updates stateful document and invalidates list-item target', async () => { + const openResult = await runCli(['open', LIST_SAMPLE_DOC]); + expect(openResult.code).toBe(0); + + const target = await firstListItemAddress(['lists', 'list', '--limit', '1']); + const exitResult = await runCli(['lists', 'exit', '--target-json', JSON.stringify(target)]); + expect(exitResult.code).toBe(0); + + const staleGet = await runCli(['lists', 'get', '--address-json', JSON.stringify(target)]); + expect(staleGet.code).toBe(1); + const staleEnvelope = parseJsonOutput(staleGet); + expect(staleEnvelope.error.code).toBe('TARGET_NOT_FOUND'); + + const closeResult = await runCli(['close', '--discard']); + expect(closeResult.code).toBe(0); + }); + + test('lists set-type tracked mode maps to TRACK_CHANGE_COMMAND_UNAVAILABLE', async () => { + const source = join(TEST_DIR, 'lists-set-type-source.docx'); + const out = join(TEST_DIR, 'lists-set-type-out.docx'); + await copyFile(LIST_SAMPLE_DOC, source); + + const target = await firstListItemAddress(['lists', 'list', source, '--limit', '1']); + const setTypeResult = await runCli([ + 'lists', + 'set-type', + source, + '--target-json', + JSON.stringify(target), + '--kind', + 'bullet', + '--change-mode', + 'tracked', + '--out', + out, + ]); + + expect(setTypeResult.code).toBe(1); + const envelope = parseJsonOutput(setTypeResult); + expect(envelope.error.code).toBe('TRACK_CHANGE_COMMAND_UNAVAILABLE'); + }); + + test('delete writes output and removes inserted text target', async () => { + const deleteSource = join(TEST_DIR, 'delete-source.docx'); + const insertedOut = join(TEST_DIR, 'delete-inserted.docx'); + const deletedOut = join(TEST_DIR, 'delete-out.docx'); + await copyFile(SAMPLE_DOC, deleteSource); + + const baseTarget = await firstTextRange(['find', deleteSource, '--type', 'text', '--pattern', 'Wilde']); + const collapsedTarget: TextRange = { + ...baseTarget, + range: { + start: baseTarget.range.start, + end: baseTarget.range.start, + }, + }; + + const insertResult = await runCli([ + 'insert', + deleteSource, + '--target-json', + JSON.stringify(collapsedTarget), + '--text', + 'CLI_DELETE_TOKEN_1597', + '--out', + insertedOut, + ]); + expect(insertResult.code).toBe(0); + + const deleteTarget = await firstTextRange([ + 'find', + insertedOut, + '--type', + 'text', + '--pattern', + 'CLI_DELETE_TOKEN_1597', + ]); + const deleteResult = await runCli([ + 'delete', + insertedOut, + '--target-json', + JSON.stringify(deleteTarget), + '--out', + deletedOut, + ]); + expect(deleteResult.code).toBe(0); + + const verifyResult = await runCli(['find', deletedOut, '--type', 'text', '--pattern', 'CLI_DELETE_TOKEN_1597']); + expect(verifyResult.code).toBe(0); + const verifyEnvelope = parseJsonOutput>(verifyResult); + expect(verifyEnvelope.data.result.total).toBe(0); + }); + + test('format bold writes output for a valid text target', async () => { + const formatSource = join(TEST_DIR, 'format-source.docx'); + const formatOut = join(TEST_DIR, 'format-out.docx'); + await copyFile(SAMPLE_DOC, formatSource); + + const target = await firstTextRange(['find', formatSource, '--type', 'text', '--pattern', 'Wilde']); + + const result = await runCli([ + 'format', + 'bold', + formatSource, + '--target-json', + JSON.stringify(target), + '--out', + formatOut, + ]); + + expect(result.code).toBe(0); + await access(formatOut); + }); + + test('format bold rejects collapsed target ranges', async () => { + const formatSource = join(TEST_DIR, 'format-invalid-source.docx'); + const formatOut = join(TEST_DIR, 'format-invalid-out.docx'); + await copyFile(SAMPLE_DOC, formatSource); + + const baseTarget = await firstTextRange(['find', formatSource, '--type', 'text', '--pattern', 'Wilde']); + const collapsedTarget: TextRange = { + ...baseTarget, + range: { + start: baseTarget.range.start, + end: baseTarget.range.start, + }, + }; + + const result = await runCli([ + 'format', + 'bold', + formatSource, + '--target-json', + JSON.stringify(collapsedTarget), + '--out', + formatOut, + ]); + + expect(result.code).toBe(1); + const envelope = parseJsonOutput(result); + expect(envelope.error.code).toBe('INVALID_ARGUMENT'); + }); + + test('track-changes list is capability-aware', async () => { + const result = await runCli(['track-changes', 'list', SAMPLE_DOC]); + if (result.code === 0) { + const envelope = parseJsonOutput>(result); + expect(envelope.data.result.total).toBeGreaterThanOrEqual(0); + return; + } + + const envelope = parseJsonOutput(result); + expect(envelope.error.code).toBe('TRACK_CHANGE_COMMAND_UNAVAILABLE'); + }); + + test('track-changes list pretty includes an actionable id when data is available', async () => { + const jsonResult = await runCli(['track-changes', 'list', SAMPLE_DOC]); + if (jsonResult.code !== 0) { + const envelope = parseJsonOutput(jsonResult); + expect(envelope.error.code).toBe('TRACK_CHANGE_COMMAND_UNAVAILABLE'); + return; + } + + const jsonEnvelope = parseJsonOutput< + SuccessEnvelope<{ + result: { + changes?: Array<{ id?: string }>; + matches?: Array<{ entityId?: string }>; + }; + }> + >(jsonResult); + + const prettyResult = await runCli(['track-changes', 'list', SAMPLE_DOC, '--output', 'pretty']); + expect(prettyResult.code).toBe(0); + expect(prettyResult.stdout).toContain('Revision 0:'); + expect(prettyResult.stdout).toContain('tracked changes'); + + const firstChangeId = jsonEnvelope.data.result.changes?.[0]?.id; + const firstMatchId = jsonEnvelope.data.result.matches?.[0]?.entityId; + if (firstChangeId) { + expect(prettyResult.stdout).toContain(firstChangeId); + } else if (firstMatchId) { + expect(prettyResult.stdout).toContain(firstMatchId); + } + }); + + test('track-changes get maps missing ids to TRACK_CHANGE_NOT_FOUND when capability is available', async () => { + const listResult = await runCli(['track-changes', 'list', SAMPLE_DOC]); + if (listResult.code !== 0) { + const envelope = parseJsonOutput(listResult); + expect(envelope.error.code).toBe('TRACK_CHANGE_COMMAND_UNAVAILABLE'); + return; + } + + const getResult = await runCli(['track-changes', 'get', SAMPLE_DOC, '--id', 'missing-track-change-id']); + expect(getResult.code).toBe(1); + + const envelope = parseJsonOutput(getResult); + expect(envelope.error.code).toBe('TRACK_CHANGE_NOT_FOUND'); + }); + + test('track-changes accept/reject map missing ids to TRACK_CHANGE_NOT_FOUND when capability is available', async () => { + const listResult = await runCli(['track-changes', 'list', SAMPLE_DOC]); + if (listResult.code !== 0) { + const envelope = parseJsonOutput(listResult); + expect(envelope.error.code).toBe('TRACK_CHANGE_COMMAND_UNAVAILABLE'); + return; + } + + const acceptResult = await runCli([ + 'track-changes', + 'accept', + SAMPLE_DOC, + '--id', + 'missing-track-change-id', + '--out', + join(TEST_DIR, 'track-changes-accept-missing-id.docx'), + ]); + expect(acceptResult.code).toBe(1); + const acceptEnvelope = parseJsonOutput(acceptResult); + expect(acceptEnvelope.error.code).toBe('TRACK_CHANGE_NOT_FOUND'); + + const rejectResult = await runCli([ + 'track-changes', + 'reject', + SAMPLE_DOC, + '--id', + 'missing-track-change-id', + '--out', + join(TEST_DIR, 'track-changes-reject-missing-id.docx'), + ]); + expect(rejectResult.code).toBe(1); + const rejectEnvelope = parseJsonOutput(rejectResult); + expect(rejectEnvelope.error.code).toBe('TRACK_CHANGE_NOT_FOUND'); + }); + + test('comments add writes output file', async () => { + const commentsSource = join(TEST_DIR, 'comments-source.docx'); + const commentsOut = join(TEST_DIR, 'comments-out.docx'); + await copyFile(SAMPLE_DOC, commentsSource); + + const target = await firstTextRange(['find', commentsSource, '--type', 'text', '--pattern', 'Wilde']); + + const result = await runCli([ + 'comments', + 'add', + commentsSource, + '--target-json', + JSON.stringify(target), + '--text', + 'CLI comment', + '--out', + commentsOut, + ]); + + expect(result.code).toBe(0); + await access(commentsOut); + }); + + test('comments add returns TARGET_NOT_FOUND for missing block targets', async () => { + const commentsSource = join(TEST_DIR, 'comments-missing-target-source.docx'); + const commentsOut = join(TEST_DIR, 'comments-missing-target-out.docx'); + await copyFile(SAMPLE_DOC, commentsSource); + + const target = await firstTextRange(['find', commentsSource, '--type', 'text', '--pattern', 'Wilde']); + const missingTarget: TextRange = { + ...target, + blockId: 'missing-block-id', + }; + + const result = await runCli([ + 'comments', + 'add', + commentsSource, + '--target-json', + JSON.stringify(missingTarget), + '--text', + 'CLI comment', + '--out', + commentsOut, + ]); + + expect(result.code).toBe(1); + const envelope = parseJsonOutput(result); + expect(envelope.error.code).toBe('TARGET_NOT_FOUND'); + }); + + test('comments add without --out returns MISSING_REQUIRED in stateless mode', async () => { + const commentsSource = join(TEST_DIR, 'comments-no-out-source.docx'); + await copyFile(SAMPLE_DOC, commentsSource); + + const target = await firstTextRange(['find', commentsSource, '--type', 'text', '--pattern', 'Wilde']); + + const result = await runCli([ + 'comments', + 'add', + commentsSource, + '--target-json', + JSON.stringify(target), + '--text', + 'CLI comment without out', + ]); + + expect(result.code).toBe(1); + const envelope = parseJsonOutput(result); + expect(envelope.error.code).toBe('MISSING_REQUIRED'); + }); + + test('comments set-active without --out succeeds in stateless mode', async () => { + const source = join(TEST_DIR, 'comments-set-active-no-out-source.docx'); + const addOut = join(TEST_DIR, 'comments-set-active-no-out-added.docx'); + await copyFile(SAMPLE_DOC, source); + + const target = await firstTextRange(['find', source, '--type', 'text', '--pattern', 'Wilde']); + const addResult = await runCli([ + 'comments', + 'add', + source, + '--target-json', + JSON.stringify(target), + '--text', + 'anchor for set-active', + '--out', + addOut, + ]); + expect(addResult.code).toBe(0); + const commentId = firstInsertedEntityId(addResult); + + const setActiveResult = await runCli(['comments', 'set-active', addOut, '--clear']); + expect(setActiveResult.code).toBe(0); + }); + + test('comments list pretty includes comment ids for actionable output', async () => { + const openResult = await runCli(['open', SAMPLE_DOC]); + expect(openResult.code).toBe(0); + + const target = await firstTextRange(['find', '--type', 'text', '--pattern', 'Wilde']); + const addResult = await runCli([ + 'comments', + 'add', + '--target-json', + JSON.stringify(target), + '--text', + 'Pretty comments output', + ]); + expect(addResult.code).toBe(0); + const commentId = firstInsertedEntityId(addResult); + + const listPrettyResult = await runCli(['comments', 'list', '--include-resolved', 'false', '--output', 'pretty']); + expect(listPrettyResult.code).toBe(0); + expect(listPrettyResult.stdout).toContain('Revision '); + expect(listPrettyResult.stdout).toContain(commentId); + + const closeResult = await runCli(['close', '--discard']); + expect(closeResult.code).toBe(0); + }); + + test('comments lifecycle commands work in stateful mode', async () => { + const openResult = await runCli(['open', SAMPLE_DOC]); + expect(openResult.code).toBe(0); + + const initialTarget = await firstTextRange(['find', '--type', 'text', '--pattern', 'Wilde']); + + const addResult = await runCli([ + 'comments', + 'add', + '--target-json', + JSON.stringify(initialTarget), + '--text', + 'Lifecycle comment', + ]); + expect(addResult.code).toBe(0); + const commentId = firstInsertedEntityId(addResult); + + const editResult = await runCli(['comments', 'edit', '--id', commentId, '--text', 'Lifecycle comment (edited)']); + expect(editResult.code).toBe(0); + + const replyResult = await runCli(['comments', 'reply', '--parent-id', commentId, '--text', 'Reply from CLI test']); + expect(replyResult.code).toBe(0); + + const moveTarget = await firstTextRange(['find', '--type', 'text', '--pattern', 'overflow']); + const moveResult = await runCli([ + 'comments', + 'move', + '--id', + commentId, + '--target-json', + JSON.stringify(moveTarget), + ]); + expect(moveResult.code).toBe(0); + + const getResult = await runCli(['comments', 'get', '--id', commentId]); + expect(getResult.code).toBe(0); + const getEnvelope = parseJsonOutput>(getResult); + expect(getEnvelope.data.comment.commentId).toBe(commentId); + + const listResult = await runCli(['comments', 'list', '--include-resolved', 'false']); + expect(listResult.code).toBe(0); + const listEnvelope = parseJsonOutput>(listResult); + expect(listEnvelope.data.result.total).toBeGreaterThanOrEqual(1); + + const setActiveResult = await runCli(['comments', 'set-active', '--id', commentId]); + expect(setActiveResult.code).toBe(0); + + const goToResult = await runCli(['comments', 'go-to', '--id', commentId]); + expect(goToResult.code).toBe(0); + + const resolveResult = await runCli(['comments', 'resolve', '--id', commentId]); + expect(resolveResult.code).toBe(0); + + const secondaryTarget = initialTarget; + const addSecondResult = await runCli([ + 'comments', + 'add', + '--target-json', + JSON.stringify(secondaryTarget), + '--text', + 'Comment to remove', + ]); + expect(addSecondResult.code).toBe(0); + const removableCommentId = firstInsertedEntityId(addSecondResult); + + const removeResult = await runCli(['comments', 'remove', '--id', removableCommentId]); + expect(removeResult.code).toBe(0); + + const missingGetResult = await runCli(['comments', 'get', '--id', removableCommentId]); + expect(missingGetResult.code).toBe(1); + const missingGetEnvelope = parseJsonOutput(missingGetResult); + expect(missingGetEnvelope.error.code).toBe('TARGET_NOT_FOUND'); + + const clearActiveResult = await runCli(['comments', 'set-active', '--clear']); + expect(clearActiveResult.code).toBe(0); + + const setInternalResult = await runCli(['comments', 'set-internal', '--id', commentId, '--is-internal', 'true']); + expect(setInternalResult.code).toBe(0); + + const closeResult = await runCli(['close', '--discard']); + expect(closeResult.code).toBe(0); + }); + + test('stdin doc source is supported', async () => { + const bytes = new Uint8Array(await readFile(SAMPLE_DOC)); + + const result = await runCli(['info', '-'], bytes); + expect(result.code).toBe(0); + + const envelope = parseJsonOutput< + SuccessEnvelope<{ + document: { source: string }; + }> + >(result); + + expect(envelope.ok).toBe(true); + expect(envelope.data.document.source).toBe('stdin'); + }); + + test('open from stdin and save to out path keeps the session active', async () => { + const bytes = new Uint8Array(await readFile(SAMPLE_DOC)); + + const openResult = await runCli(['open', '-'], bytes); + expect(openResult.code).toBe(0); + + const outPath = join(TEST_DIR, 'stdin-open-close.docx'); + const saveResult = await runCli(['save', '--out', outPath]); + expect(saveResult.code).toBe(0); + await access(outPath); + + const statusResult = await runCli(['status']); + expect(statusResult.code).toBe(0); + const statusEnvelope = parseJsonOutput>(statusResult); + expect(statusEnvelope.data.active).toBe(true); + }); + + test('validation errors use structured JSON error envelope', async () => { + const result = await runCli(['find', SAMPLE_DOC, '--query-json', '{"foo":"bar"}']); + expect(result.code).toBe(1); + + const envelope = parseJsonOutput(result); + expect(envelope.ok).toBe(false); + expect(envelope.error.code).toBe('VALIDATION_ERROR'); + expect(typeof envelope.error.message).toBe('string'); + }); + + test('global output flag works when passed after command args', async () => { + const result = await runCli(['find', SAMPLE_DOC, '--type', 'text', '--pattern', 'Wilde', '--output', 'pretty']); + expect(result.code).toBe(0); + expect(result.stdout).toContain('Revision 0:'); + expect(result.stdout).toContain('matches'); + expect(result.stdout).toContain('['); + expect(result.stderr).toBe(''); + }); + + test('stateful open/find/replace/save/close flow works without explicit doc', async () => { + const openResult = await runCli(['open', SAMPLE_DOC]); + expect(openResult.code).toBe(0); + + const target = await firstTextRange(['find', '--type', 'text', '--pattern', 'Wilde']); + + const replaceResult = await runCli(['replace', '--target-json', JSON.stringify(target), '--text', 'WILDE_CONTEXT']); + expect(replaceResult.code).toBe(0); + + const statusResult = await runCli(['status']); + expect(statusResult.code).toBe(0); + + const statusEnvelope = parseJsonOutput< + SuccessEnvelope<{ + active: boolean; + dirty: boolean; + document: { revision: number }; + }> + >(statusResult); + + expect(statusEnvelope.data.active).toBe(true); + expect(statusEnvelope.data.dirty).toBe(true); + expect(statusEnvelope.data.document.revision).toBe(1); + + const savedOut = join(TEST_DIR, 'stateful-saved.docx'); + const saveResult = await runCli(['save', '--out', savedOut]); + expect(saveResult.code).toBe(0); + + const statusAfterSave = await runCli(['status']); + expect(statusAfterSave.code).toBe(0); + const statusAfterSaveEnvelope = parseJsonOutput< + SuccessEnvelope<{ + active: boolean; + dirty: boolean; + }> + >(statusAfterSave); + expect(statusAfterSaveEnvelope.data.active).toBe(true); + expect(statusAfterSaveEnvelope.data.dirty).toBe(false); + + const verifyResult = await runCli(['find', savedOut, '--type', 'text', '--pattern', 'WILDE_CONTEXT']); + expect(verifyResult.code).toBe(0); + + const verifyEnvelope = parseJsonOutput>(verifyResult); + expect(verifyEnvelope.data.result.total).toBeGreaterThan(0); + + const closeResult = await runCli(['close']); + expect(closeResult.code).toBe(0); + }); + + test('stateful insert without target uses document-start default', async () => { + const openResult = await runCli(['open', SAMPLE_DOC]); + expect(openResult.code).toBe(0); + + const insertResult = await runCli(['insert', '--text', 'STATEFUL_DEFAULT_INSERT_1597']); + expect(insertResult.code).toBe(0); + + const insertEnvelope = parseJsonOutput< + SuccessEnvelope<{ + target: TextRange; + }> + >(insertResult); + expect(insertEnvelope.data.target.range.start).toBe(0); + expect(insertEnvelope.data.target.range.end).toBe(0); + + const verifyResult = await runCli(['find', '--type', 'text', '--pattern', 'STATEFUL_DEFAULT_INSERT_1597']); + expect(verifyResult.code).toBe(0); + const verifyEnvelope = parseJsonOutput>(verifyResult); + expect(verifyEnvelope.data.result.total).toBeGreaterThan(0); + + const closeResult = await runCli(['close', '--discard']); + expect(closeResult.code).toBe(0); + }); + + test('stateful insert keeps success semantics when optional --out export fails', async () => { + const openResult = await runCli(['open', SAMPLE_DOC]); + expect(openResult.code).toBe(0); + + const blockedOutPath = join(TEST_DIR, 'stateful-insert-blocked-output.docx'); + await writeFile(blockedOutPath, 'already-exists'); + + const insertResult = await runCli([ + 'insert', + '--text', + 'STATEFUL_INSERT_EXPORT_FAILURE_1597', + '--out', + blockedOutPath, + ]); + expect(insertResult.code).toBe(0); + + const insertEnvelope = parseJsonOutput< + SuccessEnvelope<{ + receipt: { success: boolean }; + output?: { path: string; byteLength: number }; + }> + >(insertResult); + expect(insertEnvelope.data.receipt.success).toBe(true); + expect(insertEnvelope.data.output).toBeUndefined(); + + const verifyResult = await runCli(['find', '--type', 'text', '--pattern', 'STATEFUL_INSERT_EXPORT_FAILURE_1597']); + expect(verifyResult.code).toBe(0); + const verifyEnvelope = parseJsonOutput>(verifyResult); + expect(verifyEnvelope.data.result.total).toBeGreaterThan(0); + + const statusResult = await runCli(['status']); + expect(statusResult.code).toBe(0); + const statusEnvelope = parseJsonOutput>(statusResult); + expect(statusEnvelope.data.document.revision).toBe(1); + + const closeResult = await runCli(['close', '--discard']); + expect(closeResult.code).toBe(0); + }); + + test('stateful create paragraph keeps success semantics when optional --out export fails', async () => { + const openResult = await runCli(['open', SAMPLE_DOC]); + expect(openResult.code).toBe(0); + + const blockedOutPath = join(TEST_DIR, 'stateful-create-blocked-output.docx'); + await writeFile(blockedOutPath, 'already-exists'); + + const createResult = await runCli([ + 'create', + 'paragraph', + '--input-json', + JSON.stringify({ text: 'STATEFUL_CREATE_EXPORT_FAILURE_1597' }), + '--out', + blockedOutPath, + ]); + expect(createResult.code).toBe(0); + + const createEnvelope = parseJsonOutput< + SuccessEnvelope<{ + result: { success: boolean }; + output?: { path: string; byteLength: number }; + }> + >(createResult); + expect(createEnvelope.data.result.success).toBe(true); + expect(createEnvelope.data.output).toBeUndefined(); + + const verifyResult = await runCli(['find', '--type', 'text', '--pattern', 'STATEFUL_CREATE_EXPORT_FAILURE_1597']); + expect(verifyResult.code).toBe(0); + const verifyEnvelope = parseJsonOutput>(verifyResult); + expect(verifyEnvelope.data.result.total).toBeGreaterThan(0); + + const statusResult = await runCli(['status']); + expect(statusResult.code).toBe(0); + const statusEnvelope = parseJsonOutput>(statusResult); + expect(statusEnvelope.data.document.revision).toBe(1); + + const closeResult = await runCli(['close', '--discard']); + expect(closeResult.code).toBe(0); + }); + + test('close requires explicit save or discard when context is dirty', async () => { + await runCli(['open', SAMPLE_DOC]); + + const target = await firstTextRange(['find', '--type', 'text', '--pattern', 'Wilde']); + await runCli(['replace', '--target-json', JSON.stringify(target), '--text', 'WILDE_DIRTY']); + + const closeResult = await runCli(['close']); + expect(closeResult.code).toBe(1); + + const closeEnvelope = parseJsonOutput(closeResult); + expect(closeEnvelope.error.code).toBe('DIRTY_CLOSE_REQUIRES_DECISION'); + + const discardResult = await runCli(['close', '--discard']); + expect(discardResult.code).toBe(0); + }); + + test('open without --session creates new session ids', async () => { + const firstOpen = await runCli(['open', SAMPLE_DOC]); + expect(firstOpen.code).toBe(0); + + const firstEnvelope = parseJsonOutput>(firstOpen); + const firstContextId = firstEnvelope.data.contextId; + expect(firstContextId.length).toBeGreaterThan(0); + + const secondOpen = await runCli(['open', SAMPLE_DOC]); + expect(secondOpen.code).toBe(0); + + const secondEnvelope = parseJsonOutput>(secondOpen); + const secondContextId = secondEnvelope.data.contextId; + expect(secondContextId.length).toBeGreaterThan(0); + expect(secondContextId).not.toBe(firstContextId); + + const listResult = await runCli(['session', 'list']); + expect(listResult.code).toBe(0); + const listEnvelope = parseJsonOutput< + SuccessEnvelope<{ + sessions: Array<{ sessionId: string }>; + }> + >(listResult); + expect(listEnvelope.data.sessions.map((item) => item.sessionId)).toEqual( + expect.arrayContaining([firstContextId, secondContextId]), + ); + }); + + test('status and session list include sessionType metadata', async () => { + const openResult = await runCli(['open', SAMPLE_DOC, '--session', 'local-a']); + expect(openResult.code).toBe(0); + + const statusResult = await runCli(['status', '--session', 'local-a']); + expect(statusResult.code).toBe(0); + const statusEnvelope = parseJsonOutput< + SuccessEnvelope<{ + sessionType: string; + }> + >(statusResult); + expect(statusEnvelope.data.sessionType).toBe('local'); + + const listResult = await runCli(['session', 'list']); + expect(listResult.code).toBe(0); + const listEnvelope = parseJsonOutput< + SuccessEnvelope<{ + sessions: Array<{ sessionId: string; sessionType: string }>; + }> + >(listResult); + + const localSession = listEnvelope.data.sessions.find((session) => session.sessionId === 'local-a'); + expect(localSession?.sessionType).toBe('local'); + }); + + test('open rejects unsupported collaboration payload fields', async () => { + const invalidProvider = await runCli([ + 'open', + SAMPLE_DOC, + '--collaboration-json', + JSON.stringify({ providerType: 'invalid', url: 'ws://localhost:1234' }), + ]); + expect(invalidProvider.code).toBe(1); + const invalidProviderEnvelope = parseJsonOutput(invalidProvider); + expect(invalidProviderEnvelope.error.code).toBe('VALIDATION_ERROR'); + + const unsupportedToken = await runCli([ + 'open', + SAMPLE_DOC, + '--collaboration-json', + JSON.stringify({ providerType: 'hocuspocus', url: 'ws://localhost:1234', token: 'raw-secret' }), + ]); + expect(unsupportedToken.code).toBe(1); + const unsupportedTokenEnvelope = parseJsonOutput(unsupportedToken); + expect(unsupportedTokenEnvelope.error.code).toBe('VALIDATION_ERROR'); + }); + + test('open with --session is idempotent for the same session id', async () => { + const firstOpen = await runCli(['open', SAMPLE_DOC, '--session', 'draft-a']); + expect(firstOpen.code).toBe(0); + + const secondOpen = await runCli(['open', SAMPLE_DOC, '--session', 'draft-a']); + expect(secondOpen.code).toBe(0); + + const closeResult = await runCli(['close', '--discard', '--session', 'draft-a']); + expect(closeResult.code).toBe(0); + }); + + test('expected revision protects stateful mutate commands', async () => { + await runCli(['open', SAMPLE_DOC]); + + const target = await firstTextRange(['find', '--type', 'text', '--pattern', 'Wilde']); + + const mismatch = await runCli([ + 'replace', + '--target-json', + JSON.stringify(target), + '--text', + 'WILDE_REV', + '--expected-revision', + '1', + ]); + expect(mismatch.code).toBe(1); + + const mismatchEnvelope = parseJsonOutput(mismatch); + expect(mismatchEnvelope.error.code).toBe('REVISION_MISMATCH'); + + const success = await runCli([ + 'replace', + '--target-json', + JSON.stringify(target), + '--text', + 'WILDE_REV', + '--expected-revision', + '0', + ]); + expect(success.code).toBe(0); + + const closeResult = await runCli(['close', '--discard']); + expect(closeResult.code).toBe(0); + }); + + test('session use switches default session', async () => { + const alphaOpen = await runCli(['open', SAMPLE_DOC, '--session', 'alpha']); + expect(alphaOpen.code).toBe(0); + + const betaOpen = await runCli(['open', SAMPLE_DOC, '--session', 'beta']); + expect(betaOpen.code).toBe(0); + + const statusBefore = await runCli(['status']); + expect(statusBefore.code).toBe(0); + const statusBeforeEnvelope = parseJsonOutput>(statusBefore); + expect(statusBeforeEnvelope.data.contextId).toBe('beta'); + + const useResult = await runCli(['session', 'use', 'alpha']); + expect(useResult.code).toBe(0); + + const statusAfter = await runCli(['status']); + expect(statusAfter.code).toBe(0); + const statusAfterEnvelope = parseJsonOutput>(statusAfter); + expect(statusAfterEnvelope.data.contextId).toBe('alpha'); + }); + + test('session close closes a specific non-default session', async () => { + await runCli(['open', SAMPLE_DOC, '--session', 'alpha']); + await runCli(['open', SAMPLE_DOC, '--session', 'beta']); + + const closeAlpha = await runCli(['session', 'close', 'alpha', '--discard']); + expect(closeAlpha.code).toBe(0); + + const statusResult = await runCli(['status']); + expect(statusResult.code).toBe(0); + const statusEnvelope = parseJsonOutput>(statusResult); + expect(statusEnvelope.data.contextId).toBe('beta'); + + const useAlpha = await runCli(['session', 'use', 'alpha']); + expect(useAlpha.code).toBe(1); + const useAlphaEnvelope = parseJsonOutput(useAlpha); + expect(useAlphaEnvelope.error.code).toBe('SESSION_NOT_FOUND'); + }); + + test('session save persists a specific session and keeps it open', async () => { + await runCli(['open', SAMPLE_DOC, '--session', 'alpha']); + + const insertResult = await runCli(['insert', '--session', 'alpha', '--text', 'SESSION_SAVE_TOKEN_1597']); + expect(insertResult.code).toBe(0); + + const savedOut = join(TEST_DIR, 'session-save-alpha.docx'); + const sessionSaveResult = await runCli(['session', 'save', 'alpha', '--out', savedOut]); + expect(sessionSaveResult.code).toBe(0); + await access(savedOut); + + const statusResult = await runCli(['status', '--session', 'alpha']); + expect(statusResult.code).toBe(0); + const statusEnvelope = parseJsonOutput>(statusResult); + expect(statusEnvelope.data.active).toBe(true); + expect(statusEnvelope.data.dirty).toBe(false); + + const verifyResult = await runCli(['find', savedOut, '--type', 'text', '--pattern', 'SESSION_SAVE_TOKEN_1597']); + expect(verifyResult.code).toBe(0); + const verifyEnvelope = parseJsonOutput>(verifyResult); + expect(verifyEnvelope.data.result.total).toBeGreaterThan(0); + }); + + test('save --in-place detects source drift unless forced', async () => { + const driftSource = join(TEST_DIR, 'drift-source.docx'); + await copyFile(SAMPLE_DOC, driftSource); + + const openResult = await runCli(['open', driftSource]); + expect(openResult.code).toBe(0); + + const sourceBytes = new Uint8Array(await readFile(driftSource)); + sourceBytes[0] = sourceBytes[0] === 0 ? 1 : 0; + await writeFile(driftSource, sourceBytes); + + const saveResult = await runCli(['save', '--in-place']); + expect(saveResult.code).toBe(1); + + const saveEnvelope = parseJsonOutput(saveResult); + expect(saveEnvelope.error.code).toBe('SOURCE_DRIFT_DETECTED'); + + const forcedSave = await runCli(['save', '--in-place', '--force']); + expect(forcedSave.code).toBe(0); + }); + + test('project context mismatch is enforced', async () => { + const openResult = await runCli(['open', SAMPLE_DOC]); + expect(openResult.code).toBe(0); + + const openEnvelope = parseJsonOutput>(openResult); + const metadataPath = join(STATE_DIR, 'contexts', openEnvelope.data.contextId, 'metadata.json'); + + const metadataRaw = await readFile(metadataPath, 'utf8'); + const metadata = JSON.parse(metadataRaw) as Record; + metadata.projectRoot = '/tmp/not-this-project'; + await writeFile(metadataPath, `${JSON.stringify(metadata, null, 2)}\n`, 'utf8'); + + const findResult = await runCli(['find', '--type', 'text', '--pattern', 'Wilde']); + expect(findResult.code).toBe(1); + + const findEnvelope = parseJsonOutput(findResult); + expect(findEnvelope.error.code).toBe('PROJECT_CONTEXT_MISMATCH'); + }); +}); diff --git a/apps/cli/src/__tests__/collab-session-pool.test.ts b/apps/cli/src/__tests__/collab-session-pool.test.ts new file mode 100644 index 0000000000..eb6a99cb69 --- /dev/null +++ b/apps/cli/src/__tests__/collab-session-pool.test.ts @@ -0,0 +1,173 @@ +import { describe, expect, test } from 'bun:test'; +import { InMemoryCollaborationSessionPool } from '../host/collab-session-pool'; +import type { CollaborationProfile } from '../lib/collaboration'; +import type { OpenedDocument } from '../lib/document'; + +const NOOP = () => undefined; + +const TEST_PROFILE: CollaborationProfile = { + providerType: 'hocuspocus', + url: 'ws://example.test', + documentId: 'doc-1', +}; + +const TEST_IO = { + now: () => Date.now(), + readStdinBytes: async () => new Uint8Array(), + stdout: NOOP, + stderr: NOOP, +}; + +function createOpened(disposeCounter: { count: number }): OpenedDocument { + return { + editor: {} as OpenedDocument['editor'], + meta: { + source: 'path', + path: '/tmp/working.docx', + byteLength: 1, + }, + dispose: () => { + disposeCounter.count += 1; + }, + }; +} + +describe('InMemoryCollaborationSessionPool', () => { + test('acquire reuses matching session handles', async () => { + const disposeCounter = { count: 0 }; + let openCount = 0; + + const pool = new InMemoryCollaborationSessionPool({ + openCollaborative: async () => { + openCount += 1; + return createOpened(disposeCounter); + }, + now: () => 1, + }); + + const metadata = { + contextId: 's1', + sessionType: 'collab' as const, + collaboration: TEST_PROFILE, + sourcePath: '/tmp/source.docx', + workingDocPath: '/tmp/working.docx', + }; + + const first = await pool.acquire('s1', '/tmp/working.docx', metadata, TEST_IO); + const second = await pool.acquire('s1', '/tmp/working.docx', metadata, TEST_IO); + + expect(openCount).toBe(1); + first.dispose(); + second.dispose(); + expect(disposeCounter.count).toBe(0); + + await pool.disposeSession('s1'); + expect(disposeCounter.count).toBe(1); + }); + + test('acquire recreates stale handles on fingerprint mismatch', async () => { + const disposeCounter = { count: 0 }; + let openCount = 0; + + const pool = new InMemoryCollaborationSessionPool({ + openCollaborative: async () => { + openCount += 1; + return createOpened(disposeCounter); + }, + now: () => 1, + }); + + const metadataA = { + contextId: 's1', + sessionType: 'collab' as const, + collaboration: TEST_PROFILE, + sourcePath: '/tmp/source-a.docx', + workingDocPath: '/tmp/working.docx', + }; + + const metadataB = { + ...metadataA, + collaboration: { + ...TEST_PROFILE, + documentId: 'doc-2', + }, + }; + + await pool.acquire('s1', '/tmp/working.docx', metadataA, TEST_IO); + await pool.acquire('s1', '/tmp/working.docx', metadataB, TEST_IO); + + expect(openCount).toBe(2); + expect(disposeCounter.count).toBe(1); + + await pool.disposeAll(); + expect(disposeCounter.count).toBe(2); + }); + + test('acquire reuses handle when only source path changes', async () => { + const disposeCounter = { count: 0 }; + let openCount = 0; + + const pool = new InMemoryCollaborationSessionPool({ + openCollaborative: async () => { + openCount += 1; + return createOpened(disposeCounter); + }, + now: () => 1, + }); + + const metadataA = { + contextId: 's1', + sessionType: 'collab' as const, + collaboration: TEST_PROFILE, + sourcePath: '/tmp/source-a.docx', + workingDocPath: '/tmp/working.docx', + }; + + const metadataB = { + ...metadataA, + sourcePath: '/tmp/source-b.docx', + }; + + await pool.acquire('s1', '/tmp/working.docx', metadataA, TEST_IO); + await pool.acquire('s1', '/tmp/working.docx', metadataB, TEST_IO); + + expect(openCount).toBe(1); + expect(disposeCounter.count).toBe(0); + + await pool.disposeAll(); + expect(disposeCounter.count).toBe(1); + }); + + test('adoptFromOpen replaces existing handle', async () => { + const disposeCounter = { count: 0 }; + let openCount = 0; + + const pool = new InMemoryCollaborationSessionPool({ + openCollaborative: async () => { + openCount += 1; + return createOpened(disposeCounter); + }, + now: () => 1, + }); + + const metadata = { + contextId: 's1', + sessionType: 'collab' as const, + collaboration: TEST_PROFILE, + sourcePath: '/tmp/source.docx', + workingDocPath: '/tmp/working.docx', + }; + + await pool.acquire('s1', '/tmp/working.docx', metadata, TEST_IO); + + const adoptedDisposeCounter = { count: 0 }; + const adopted = createOpened(adoptedDisposeCounter); + await pool.adoptFromOpen('s1', adopted, metadata, TEST_IO); + + expect(openCount).toBe(1); + expect(disposeCounter.count).toBe(1); + + await pool.disposeSession('s1'); + expect(adoptedDisposeCounter.count).toBe(1); + }); +}); diff --git a/apps/cli/src/__tests__/commands.test.ts b/apps/cli/src/__tests__/commands.test.ts deleted file mode 100644 index 243d88a715..0000000000 --- a/apps/cli/src/__tests__/commands.test.ts +++ /dev/null @@ -1,79 +0,0 @@ -import { describe, expect, test, beforeAll, afterAll } from 'bun:test'; -import { copyFile, rm, mkdir } from 'node:fs/promises'; -import { join } from 'node:path'; -import { read } from '../commands/read'; -import { search } from '../commands/search'; -import { replace } from '../commands/replace'; - -const TEST_DIR = join(import.meta.dir, 'fixtures'); -const SAMPLE_DOC = join(TEST_DIR, 'sample.docx'); - -describe('CLI Commands', () => { - beforeAll(async () => { - await mkdir(TEST_DIR, { recursive: true }); - // Copy a test document to our fixtures folder - const sourceDoc = join(import.meta.dir, '../../../../e2e-tests/test-data/basic-documents/advanced-text.docx'); - await copyFile(sourceDoc, SAMPLE_DOC); - }); - - afterAll(async () => { - await rm(TEST_DIR, { recursive: true, force: true }); - }); - - describe('read', () => { - test('reads document content', async () => { - const result = await read(SAMPLE_DOC); - - expect(result).toHaveProperty('path', SAMPLE_DOC); - expect(result).toHaveProperty('content'); - expect(typeof result.content).toBe('string'); - expect(result.content.length).toBeGreaterThan(0); - }); - }); - - describe('search', () => { - test('finds text in document', async () => { - const result = await search('the', [SAMPLE_DOC]); - - expect(result).toHaveProperty('totalMatches'); - expect(result).toHaveProperty('files'); - expect(result.files).toHaveLength(1); - expect(result.files[0].path).toBe(SAMPLE_DOC); - }); - - test('returns empty for non-matching pattern', async () => { - const result = await search('xyz123nonexistent', [SAMPLE_DOC]); - - expect(result.totalMatches).toBe(0); - expect(result.files).toHaveLength(0); - }); - }); - - describe('replace', () => { - test('replaces text in document', async () => { - // Create a copy for replace test - const replaceCopy = join(TEST_DIR, 'replace-test.docx'); - await copyFile(SAMPLE_DOC, replaceCopy); - - // First verify the text exists - const beforeSearch = await search('the', [replaceCopy]); - const beforeCount = beforeSearch.totalMatches; - - if (beforeCount > 0) { - // Replace and verify - const result = await replace('the', 'THE', [replaceCopy]); - - expect(result).toHaveProperty('totalReplacements'); - expect(result.totalReplacements).toBe(beforeCount); - expect(result.files).toHaveLength(1); - expect(result.files[0].replacements).toBe(beforeCount); - - // Verify the replacement happened - const afterSearch = await search('THE', [replaceCopy]); - expect(afterSearch.totalMatches).toBe(beforeCount); - } - - await rm(replaceCopy); - }); - }); -}); diff --git a/apps/cli/src/__tests__/conformance/harness.ts b/apps/cli/src/__tests__/conformance/harness.ts new file mode 100644 index 0000000000..c28b351168 --- /dev/null +++ b/apps/cli/src/__tests__/conformance/harness.ts @@ -0,0 +1,362 @@ +import { access, copyFile, mkdtemp, mkdir, rm } from 'node:fs/promises'; +import { tmpdir } from 'node:os'; +import path from 'node:path'; +import { run } from '../../index'; + +type RunResult = { + code: number; + stdout: string; + stderr: string; +}; + +export type SuccessEnvelope = { + ok: true; + command: string; + data: unknown; + meta: { + elapsedMs: number; + }; +}; + +export type ErrorEnvelope = { + ok: false; + error: { + code: string; + message: string; + details?: unknown; + }; + meta: { + elapsedMs: number; + }; +}; + +export type CommandEnvelope = SuccessEnvelope | ErrorEnvelope; + +export type TextRangeAddress = { + kind: 'text'; + blockId: string; + range: { + start: number; + end: number; + }; +}; + +export type ListItemAddress = { + kind: 'block'; + nodeType: 'listItem'; + nodeId: string; +}; + +const REPO_ROOT = path.resolve(import.meta.dir, '../../../../../'); +const SOURCE_DOC = path.join(REPO_ROOT, 'e2e-tests/test-data/basic-documents/advanced-text.docx'); +const LIST_SOURCE_DOC_CANDIDATES = [ + path.join(REPO_ROOT, 'devtools/document-api-tests/fixtures/matrix-list.input.docx'), + path.join(REPO_ROOT, 'e2e-tests/test-data/basic-documents/lists-complex-items.docx'), +]; + +let resolvedListSourceDoc: string | null = null; + +async function resolveListSourceDoc(): Promise { + if (resolvedListSourceDoc != null) return resolvedListSourceDoc; + + for (const candidate of LIST_SOURCE_DOC_CANDIDATES) { + try { + await access(candidate); + resolvedListSourceDoc = candidate; + return candidate; + } catch { + // try next candidate + } + } + + throw new Error(`No list fixture found. Tried: ${LIST_SOURCE_DOC_CANDIDATES.join(', ')}`); +} + +function parseEnvelope(raw: RunResult): CommandEnvelope { + const source = raw.stdout.trim() || raw.stderr.trim(); + if (!source) { + throw new Error('No CLI envelope output found.'); + } + + try { + return JSON.parse(source) as CommandEnvelope; + } catch { + const lines = source.split(/\r?\n/); + for (let index = 0; index < lines.length; index += 1) { + const candidate = lines.slice(index).join('\n').trim(); + if (!candidate.startsWith('{')) continue; + try { + return JSON.parse(candidate) as CommandEnvelope; + } catch { + // continue + } + } + throw new Error(`Failed to parse CLI JSON envelope:\n${source}`); + } +} + +function assertSuccessEnvelope(envelope: CommandEnvelope): asserts envelope is SuccessEnvelope { + if (envelope.ok !== true) { + throw new Error(`Expected success envelope, got error: ${envelope.error.code} ${envelope.error.message}`); + } +} + +export class ConformanceHarness { + readonly rootDir: string; + readonly docsDir: string; + readonly statesDir: string; + #counter = 0; + + private constructor(rootDir: string) { + this.rootDir = rootDir; + this.docsDir = path.join(rootDir, 'docs'); + this.statesDir = path.join(rootDir, 'states'); + } + + static async create(): Promise { + const rootDir = await mkdtemp(path.join(tmpdir(), 'superdoc-cli-conformance-')); + const harness = new ConformanceHarness(rootDir); + await mkdir(harness.docsDir, { recursive: true }); + await mkdir(harness.statesDir, { recursive: true }); + return harness; + } + + async cleanup(): Promise { + await rm(this.rootDir, { recursive: true, force: true }); + } + + async createStateDir(label: string): Promise { + const dir = path.join(this.statesDir, `${this.nextId()}-${label}`); + await mkdir(dir, { recursive: true }); + return dir; + } + + async copyFixtureDoc(label: string): Promise { + const filePath = path.join(this.docsDir, `${this.nextId()}-${label}.docx`); + await copyFile(SOURCE_DOC, filePath); + return filePath; + } + + async copyListFixtureDoc(label: string): Promise { + const filePath = path.join(this.docsDir, `${this.nextId()}-${label}.docx`); + await copyFile(await resolveListSourceDoc(), filePath); + return filePath; + } + + createOutputPath(label: string): string { + return path.join(this.docsDir, `${this.nextId()}-${label}.docx`); + } + + async runCli( + args: string[], + stateDir: string, + stdinBytes?: Uint8Array, + ): Promise<{ result: RunResult; envelope: CommandEnvelope }> { + const previousStateDir = process.env.SUPERDOC_CLI_STATE_DIR; + process.env.SUPERDOC_CLI_STATE_DIR = stateDir; + + let stdout = ''; + let stderr = ''; + try { + const code = await run(args, { + stdout(message: string) { + stdout += message; + }, + stderr(message: string) { + stderr += message; + }, + async readStdinBytes() { + return stdinBytes ?? new Uint8Array(); + }, + }); + + const result: RunResult = { code, stdout, stderr }; + return { result, envelope: parseEnvelope(result) }; + } finally { + if (previousStateDir == null) { + delete process.env.SUPERDOC_CLI_STATE_DIR; + } else { + process.env.SUPERDOC_CLI_STATE_DIR = previousStateDir; + } + } + } + + async firstTextRange(docPath: string, stateDir: string, pattern = 'Wilde'): Promise { + const { result, envelope } = await this.runCli( + ['find', docPath, '--type', 'text', '--pattern', pattern, '--limit', '1'], + stateDir, + ); + if (result.code !== 0) { + throw new Error(`Unable to resolve first text range for ${docPath}`); + } + + assertSuccessEnvelope(envelope); + const data = envelope.data as { + result?: { + context?: Array<{ + textRanges?: TextRangeAddress[]; + }>; + }; + }; + const range = data.result?.context?.[0]?.textRanges?.[0]; + if (!range) { + throw new Error(`No text range found for pattern "${pattern}" in ${docPath}`); + } + return range; + } + + async firstBlockMatch( + docPath: string, + stateDir: string, + pattern = 'Wilde', + ): Promise<{ nodeId: string; nodeType: string; address: Record }> { + const { result, envelope } = await this.runCli( + ['find', docPath, '--type', 'text', '--pattern', pattern, '--limit', '1'], + stateDir, + ); + if (result.code !== 0) { + throw new Error(`Unable to resolve first block match for ${docPath}`); + } + + assertSuccessEnvelope(envelope); + const data = envelope.data as { + result?: { + matches?: Array>; + }; + }; + const match = data.result?.matches?.find( + (entry) => entry.kind === 'block' && typeof entry.nodeId === 'string' && typeof entry.nodeType === 'string', + ); + if (!match) { + throw new Error(`No block match found for pattern "${pattern}" in ${docPath}`); + } + return { + nodeId: match.nodeId as string, + nodeType: match.nodeType as string, + address: match, + }; + } + + async firstListItemAddress(docPath: string, stateDir: string): Promise { + const { result, envelope } = await this.runCli(['lists', 'list', docPath, '--limit', '1'], stateDir); + if (result.code !== 0) { + throw new Error(`Unable to resolve first list item for ${docPath}`); + } + + assertSuccessEnvelope(envelope); + const data = envelope.data as { + result?: { + matches?: ListItemAddress[]; + }; + }; + const address = data.result?.matches?.[0]; + if (!address) { + throw new Error(`No list item address found in ${docPath}`); + } + return address; + } + + async addCommentFixture( + stateDir: string, + label: string, + ): Promise<{ docPath: string; commentId: string; target: TextRangeAddress }> { + const sourceDoc = await this.copyFixtureDoc(`${label}-source`); + const target = await this.firstTextRange(sourceDoc, stateDir); + const outDoc = this.createOutputPath(`${label}-with-comment`); + + const { result, envelope } = await this.runCli( + [ + 'comments', + 'add', + sourceDoc, + '--target-json', + JSON.stringify(target), + '--text', + 'Conformance seed comment', + '--out', + outDoc, + ], + stateDir, + ); + if (result.code !== 0) { + throw new Error(`Failed to create comment fixture for ${label}`); + } + + assertSuccessEnvelope(envelope); + const data = envelope.data as { + receipt?: { + inserted?: Array<{ entityId?: string }>; + }; + }; + const commentId = data.receipt?.inserted?.[0]?.entityId; + if (!commentId) { + throw new Error(`Comment fixture did not return an inserted comment id for ${label}`); + } + + return { docPath: outDoc, commentId, target }; + } + + async addTrackedChangeFixture( + stateDir: string, + label: string, + ): Promise<{ docPath: string; changeId: string; target: TextRangeAddress }> { + const sourceDoc = await this.copyFixtureDoc(`${label}-source`); + const target = await this.firstTextRange(sourceDoc, stateDir); + const collapsedTarget: TextRangeAddress = { + ...target, + range: { start: target.range.start, end: target.range.start }, + }; + const outDoc = this.createOutputPath(`${label}-with-tracked-change`); + + const insert = await this.runCli( + [ + 'insert', + sourceDoc, + '--target-json', + JSON.stringify(collapsedTarget), + '--text', + 'TRACKED_CONFORMANCE_TOKEN', + '--change-mode', + 'tracked', + '--out', + outDoc, + ], + stateDir, + ); + if (insert.result.code !== 0) { + throw new Error(`Failed to create tracked-change fixture for ${label}`); + } + + const list = await this.runCli(['track-changes', 'list', outDoc, '--limit', '1'], stateDir); + if (list.result.code !== 0) { + throw new Error(`Failed to list tracked changes for fixture ${label}`); + } + assertSuccessEnvelope(list.envelope); + const matches = + (list.envelope.data as { result?: { matches?: Array<{ entityId?: string }> } }).result?.matches ?? []; + const changeId = matches[0]?.entityId; + if (!changeId) { + throw new Error(`Tracked-change fixture did not produce a tracked change id for ${label}`); + } + + return { docPath: outDoc, changeId, target: collapsedTarget }; + } + + async openSessionFixture( + stateDir: string, + label: string, + sessionId: string, + ): Promise<{ sessionId: string; docPath: string }> { + const docPath = await this.copyFixtureDoc(`${label}-source`); + const open = await this.runCli(['open', docPath, '--session', sessionId], stateDir); + if (open.result.code !== 0) { + throw new Error(`Failed to open session fixture ${sessionId}`); + } + return { sessionId, docPath }; + } + + nextId(): string { + this.#counter += 1; + return String(this.#counter).padStart(4, '0'); + } +} diff --git a/apps/cli/src/__tests__/conformance/scenarios.ts b/apps/cli/src/__tests__/conformance/scenarios.ts new file mode 100644 index 0000000000..3ec4687db3 --- /dev/null +++ b/apps/cli/src/__tests__/conformance/scenarios.ts @@ -0,0 +1,671 @@ +import type { CliOperationId } from '../../cli'; +import { CLI_OPERATION_COMMAND_KEYS } from '../../cli'; +import type { ConformanceHarness } from './harness'; + +export type ScenarioInvocation = { + stateDir: string; + args: string[]; + stdinBytes?: Uint8Array; +}; + +export type OperationScenario = { + operationId: CliOperationId; + success: (harness: ConformanceHarness) => Promise; + failure: (harness: ConformanceHarness) => Promise; + expectedFailureCodes: string[]; +}; + +function commandTokens(operationId: CliOperationId): string[] { + const key = CLI_OPERATION_COMMAND_KEYS[operationId]; + return key.split(' '); +} + +function genericInvalidArgumentFailure(operationId: CliOperationId) { + return async (harness: ConformanceHarness): Promise => ({ + stateDir: await harness.createStateDir(`${operationId}-failure`), + args: [...commandTokens(operationId), '--invalid-flag-for-conformance'], + }); +} + +export const SUCCESS_SCENARIOS = { + 'doc.open': async (harness: ConformanceHarness): Promise => { + const stateDir = await harness.createStateDir('doc-open-success'); + const docPath = await harness.copyFixtureDoc('doc-open'); + return { + stateDir, + args: ['open', docPath, '--session', 'open-success-session'], + }; + }, + 'doc.status': async (harness: ConformanceHarness): Promise => ({ + stateDir: await harness.createStateDir('doc-status-success'), + args: ['status'], + }), + 'doc.save': async (harness: ConformanceHarness): Promise => { + const stateDir = await harness.createStateDir('doc-save-success'); + await harness.openSessionFixture(stateDir, 'doc-save', 'doc-save-session'); + return { + stateDir, + args: ['save', '--session', 'doc-save-session', '--out', harness.createOutputPath('doc-save-output')], + }; + }, + 'doc.close': async (harness: ConformanceHarness): Promise => { + const stateDir = await harness.createStateDir('doc-close-success'); + await harness.openSessionFixture(stateDir, 'doc-close', 'doc-close-session'); + return { + stateDir, + args: ['close', '--session', 'doc-close-session', '--discard'], + }; + }, + 'doc.info': async (harness: ConformanceHarness): Promise => { + const stateDir = await harness.createStateDir('doc-info-success'); + const docPath = await harness.copyFixtureDoc('doc-info'); + return { stateDir, args: ['info', docPath] }; + }, + 'doc.describe': async (harness: ConformanceHarness): Promise => ({ + stateDir: await harness.createStateDir('doc-describe-success'), + args: ['describe'], + }), + 'doc.describeCommand': async (harness: ConformanceHarness): Promise => ({ + stateDir: await harness.createStateDir('doc-describe-command-success'), + args: ['describe', 'command', 'doc.find'], + }), + 'doc.find': async (harness: ConformanceHarness): Promise => { + const stateDir = await harness.createStateDir('doc-find-success'); + const docPath = await harness.copyFixtureDoc('doc-find'); + return { stateDir, args: ['find', docPath, '--type', 'text', '--pattern', 'Wilde', '--limit', '1'] }; + }, + 'doc.getNode': async (harness: ConformanceHarness): Promise => { + const stateDir = await harness.createStateDir('doc-get-node-success'); + const docPath = await harness.copyFixtureDoc('doc-get-node'); + const { address } = await harness.firstBlockMatch(docPath, stateDir); + return { + stateDir, + args: ['get-node', docPath, '--address-json', JSON.stringify(address)], + }; + }, + 'doc.getNodeById': async (harness: ConformanceHarness): Promise => { + const stateDir = await harness.createStateDir('doc-get-node-by-id-success'); + const docPath = await harness.copyFixtureDoc('doc-get-node-by-id'); + const match = await harness.firstBlockMatch(docPath, stateDir); + return { + stateDir, + args: ['get-node-by-id', docPath, '--id', match.nodeId, '--node-type', match.nodeType], + }; + }, + 'doc.comments.add': async (harness: ConformanceHarness): Promise => { + const stateDir = await harness.createStateDir('doc-comments-add-success'); + const docPath = await harness.copyFixtureDoc('doc-comments-add'); + const target = await harness.firstTextRange(docPath, stateDir); + return { + stateDir, + args: [ + 'comments', + 'add', + docPath, + '--target-json', + JSON.stringify(target), + '--text', + 'Conformance add comment', + '--out', + harness.createOutputPath('doc-comments-add-output'), + ], + }; + }, + 'doc.comments.edit': async (harness: ConformanceHarness): Promise => { + const stateDir = await harness.createStateDir('doc-comments-edit-success'); + const fixture = await harness.addCommentFixture(stateDir, 'doc-comments-edit'); + return { + stateDir, + args: [ + 'comments', + 'edit', + fixture.docPath, + '--id', + fixture.commentId, + '--text', + 'Conformance edited comment', + '--out', + harness.createOutputPath('doc-comments-edit-output'), + ], + }; + }, + 'doc.comments.reply': async (harness: ConformanceHarness): Promise => { + const stateDir = await harness.createStateDir('doc-comments-reply-success'); + const fixture = await harness.addCommentFixture(stateDir, 'doc-comments-reply'); + return { + stateDir, + args: [ + 'comments', + 'reply', + fixture.docPath, + '--parent-id', + fixture.commentId, + '--text', + 'Conformance reply', + '--out', + harness.createOutputPath('doc-comments-reply-output'), + ], + }; + }, + 'doc.comments.move': async (harness: ConformanceHarness): Promise => { + const stateDir = await harness.createStateDir('doc-comments-move-success'); + const fixture = await harness.addCommentFixture(stateDir, 'doc-comments-move'); + const moveTarget = await harness.firstTextRange(fixture.docPath, stateDir, 'overflow'); + return { + stateDir, + args: [ + 'comments', + 'move', + fixture.docPath, + '--id', + fixture.commentId, + '--target-json', + JSON.stringify(moveTarget), + '--out', + harness.createOutputPath('doc-comments-move-output'), + ], + }; + }, + 'doc.comments.resolve': async (harness: ConformanceHarness): Promise => { + const stateDir = await harness.createStateDir('doc-comments-resolve-success'); + const fixture = await harness.addCommentFixture(stateDir, 'doc-comments-resolve'); + return { + stateDir, + args: [ + 'comments', + 'resolve', + fixture.docPath, + '--id', + fixture.commentId, + '--out', + harness.createOutputPath('doc-comments-resolve-output'), + ], + }; + }, + 'doc.comments.remove': async (harness: ConformanceHarness): Promise => { + const stateDir = await harness.createStateDir('doc-comments-remove-success'); + const fixture = await harness.addCommentFixture(stateDir, 'doc-comments-remove'); + return { + stateDir, + args: [ + 'comments', + 'remove', + fixture.docPath, + '--id', + fixture.commentId, + '--out', + harness.createOutputPath('doc-comments-remove-output'), + ], + }; + }, + 'doc.comments.setInternal': async (harness: ConformanceHarness): Promise => { + const stateDir = await harness.createStateDir('doc-comments-set-internal-success'); + const fixture = await harness.addCommentFixture(stateDir, 'doc-comments-set-internal'); + return { + stateDir, + args: [ + 'comments', + 'set-internal', + fixture.docPath, + '--id', + fixture.commentId, + '--is-internal', + 'true', + '--out', + harness.createOutputPath('doc-comments-set-internal-output'), + ], + }; + }, + 'doc.comments.setActive': async (harness: ConformanceHarness): Promise => { + const stateDir = await harness.createStateDir('doc-comments-set-active-success'); + const fixture = await harness.addCommentFixture(stateDir, 'doc-comments-set-active'); + return { + stateDir, + args: ['comments', 'set-active', fixture.docPath, '--id', fixture.commentId], + }; + }, + 'doc.comments.goTo': async (harness: ConformanceHarness): Promise => { + const stateDir = await harness.createStateDir('doc-comments-go-to-success'); + const fixture = await harness.addCommentFixture(stateDir, 'doc-comments-go-to'); + return { + stateDir, + args: ['comments', 'go-to', fixture.docPath, '--id', fixture.commentId], + }; + }, + 'doc.comments.get': async (harness: ConformanceHarness): Promise => { + const stateDir = await harness.createStateDir('doc-comments-get-success'); + const fixture = await harness.addCommentFixture(stateDir, 'doc-comments-get'); + return { + stateDir, + args: ['comments', 'get', fixture.docPath, '--id', fixture.commentId], + }; + }, + 'doc.comments.list': async (harness: ConformanceHarness): Promise => { + const stateDir = await harness.createStateDir('doc-comments-list-success'); + const fixture = await harness.addCommentFixture(stateDir, 'doc-comments-list'); + return { + stateDir, + args: ['comments', 'list', fixture.docPath, '--include-resolved', 'false'], + }; + }, + 'doc.create.paragraph': async (harness: ConformanceHarness): Promise => { + const stateDir = await harness.createStateDir('doc-create-paragraph-success'); + const docPath = await harness.copyFixtureDoc('doc-create-paragraph'); + return { + stateDir, + args: [ + 'create', + 'paragraph', + docPath, + '--input-json', + JSON.stringify({ text: 'Conformance paragraph text' }), + '--out', + harness.createOutputPath('doc-create-paragraph-output'), + ], + }; + }, + 'doc.lists.list': async (harness: ConformanceHarness): Promise => { + const stateDir = await harness.createStateDir('doc-lists-list-success'); + const docPath = await harness.copyListFixtureDoc('doc-lists-list'); + return { + stateDir, + args: ['lists', 'list', docPath, '--limit', '10'], + }; + }, + 'doc.lists.get': async (harness: ConformanceHarness): Promise => { + const stateDir = await harness.createStateDir('doc-lists-get-success'); + const docPath = await harness.copyListFixtureDoc('doc-lists-get'); + const address = await harness.firstListItemAddress(docPath, stateDir); + return { + stateDir, + args: ['lists', 'get', docPath, '--address-json', JSON.stringify(address)], + }; + }, + 'doc.lists.insert': async (harness: ConformanceHarness): Promise => { + const stateDir = await harness.createStateDir('doc-lists-insert-success'); + const docPath = await harness.copyListFixtureDoc('doc-lists-insert'); + const target = await harness.firstListItemAddress(docPath, stateDir); + return { + stateDir, + args: [ + 'lists', + 'insert', + docPath, + '--target-json', + JSON.stringify(target), + '--position', + 'after', + '--text', + 'CONFORMANCE_LIST_INSERT', + '--out', + harness.createOutputPath('doc-lists-insert-output'), + ], + }; + }, + 'doc.lists.setType': async (harness: ConformanceHarness): Promise => { + const stateDir = await harness.createStateDir('doc-lists-set-type-success'); + const docPath = await harness.copyListFixtureDoc('doc-lists-set-type'); + const target = await harness.firstListItemAddress(docPath, stateDir); + const getResult = await harness.runCli( + ['lists', 'get', docPath, '--address-json', JSON.stringify(target)], + stateDir, + ); + if (getResult.result.code !== 0 || getResult.envelope.ok !== true) { + throw new Error('Failed to resolve list item kind for set-type conformance scenario.'); + } + const currentKind = (getResult.envelope.data as { item?: { kind?: string } }).item?.kind; + const requestedKind = currentKind === 'ordered' ? 'bullet' : 'ordered'; + + return { + stateDir, + args: [ + 'lists', + 'set-type', + docPath, + '--target-json', + JSON.stringify(target), + '--kind', + requestedKind, + '--out', + harness.createOutputPath('doc-lists-set-type-output'), + ], + }; + }, + 'doc.lists.indent': async (harness: ConformanceHarness): Promise => { + const stateDir = await harness.createStateDir('doc-lists-indent-success'); + const docPath = await harness.copyListFixtureDoc('doc-lists-indent'); + const target = await harness.firstListItemAddress(docPath, stateDir); + return { + stateDir, + args: [ + 'lists', + 'indent', + docPath, + '--target-json', + JSON.stringify(target), + '--out', + harness.createOutputPath('doc-lists-indent-output'), + ], + }; + }, + 'doc.lists.outdent': async (harness: ConformanceHarness): Promise => { + const stateDir = await harness.createStateDir('doc-lists-outdent-success'); + const docPath = await harness.copyListFixtureDoc('doc-lists-outdent'); + const target = await harness.firstListItemAddress(docPath, stateDir); + const prepOut = harness.createOutputPath('doc-lists-outdent-prepared'); + const prep = await harness.runCli( + ['lists', 'indent', docPath, '--target-json', JSON.stringify(target), '--out', prepOut], + stateDir, + ); + if (prep.result.code !== 0) { + throw new Error('Failed to prepare outdent conformance fixture via lists indent.'); + } + + return { + stateDir, + args: [ + 'lists', + 'outdent', + prepOut, + '--target-json', + JSON.stringify(target), + '--out', + harness.createOutputPath('doc-lists-outdent-output'), + ], + }; + }, + 'doc.lists.restart': async (harness: ConformanceHarness): Promise => { + const stateDir = await harness.createStateDir('doc-lists-restart-success'); + const docPath = await harness.copyListFixtureDoc('doc-lists-restart'); + const listed = await harness.runCli(['lists', 'list', docPath, '--limit', '50'], stateDir); + if (listed.result.code !== 0 || listed.envelope.ok !== true) { + throw new Error('Failed to list list items for restart conformance scenario.'); + } + const restartTarget = ( + ( + listed.envelope.data as { + result?: { items?: Array<{ ordinal?: number; address?: Record }> }; + } + ).result?.items ?? [] + ).find((item) => typeof item.ordinal === 'number' && item.ordinal > 1)?.address; + if (!restartTarget) { + throw new Error('Restart conformance scenario requires a list item with ordinal > 1.'); + } + + return { + stateDir, + args: [ + 'lists', + 'restart', + docPath, + '--target-json', + JSON.stringify(restartTarget), + '--out', + harness.createOutputPath('doc-lists-restart-output'), + ], + }; + }, + 'doc.lists.exit': async (harness: ConformanceHarness): Promise => { + const stateDir = await harness.createStateDir('doc-lists-exit-success'); + const docPath = await harness.copyListFixtureDoc('doc-lists-exit'); + const target = await harness.firstListItemAddress(docPath, stateDir); + return { + stateDir, + args: [ + 'lists', + 'exit', + docPath, + '--target-json', + JSON.stringify(target), + '--out', + harness.createOutputPath('doc-lists-exit-output'), + ], + }; + }, + 'doc.insert': async (harness: ConformanceHarness): Promise => { + const stateDir = await harness.createStateDir('doc-insert-success'); + const docPath = await harness.copyFixtureDoc('doc-insert'); + const target = await harness.firstTextRange(docPath, stateDir); + const collapsed = { ...target, range: { start: target.range.start, end: target.range.start } }; + return { + stateDir, + args: [ + 'insert', + docPath, + '--target-json', + JSON.stringify(collapsed), + '--text', + 'CONFORMANCE_INSERT', + '--out', + harness.createOutputPath('doc-insert-output'), + ], + }; + }, + 'doc.replace': async (harness: ConformanceHarness): Promise => { + const stateDir = await harness.createStateDir('doc-replace-success'); + const docPath = await harness.copyFixtureDoc('doc-replace'); + const target = await harness.firstTextRange(docPath, stateDir); + return { + stateDir, + args: [ + 'replace', + docPath, + '--target-json', + JSON.stringify(target), + '--text', + 'CONFORMANCE_REPLACE', + '--out', + harness.createOutputPath('doc-replace-output'), + ], + }; + }, + 'doc.delete': async (harness: ConformanceHarness): Promise => { + const stateDir = await harness.createStateDir('doc-delete-success'); + const docPath = await harness.copyFixtureDoc('doc-delete'); + const target = await harness.firstTextRange(docPath, stateDir); + return { + stateDir, + args: [ + 'delete', + docPath, + '--target-json', + JSON.stringify(target), + '--out', + harness.createOutputPath('doc-delete-output'), + ], + }; + }, + 'doc.format.bold': async (harness: ConformanceHarness): Promise => { + const stateDir = await harness.createStateDir('doc-format-bold-success'); + const docPath = await harness.copyFixtureDoc('doc-format-bold'); + const target = await harness.firstTextRange(docPath, stateDir); + return { + stateDir, + args: [ + 'format', + 'bold', + docPath, + '--target-json', + JSON.stringify(target), + '--out', + harness.createOutputPath('doc-format-bold-output'), + ], + }; + }, + 'doc.format.italic': async (harness: ConformanceHarness): Promise => { + const stateDir = await harness.createStateDir('doc-format-italic-success'); + const docPath = await harness.copyFixtureDoc('doc-format-italic'); + const target = await harness.firstTextRange(docPath, stateDir); + return { + stateDir, + args: [ + 'format', + 'italic', + docPath, + '--target-json', + JSON.stringify(target), + '--out', + harness.createOutputPath('doc-format-italic-output'), + ], + }; + }, + 'doc.format.underline': async (harness: ConformanceHarness): Promise => { + const stateDir = await harness.createStateDir('doc-format-underline-success'); + const docPath = await harness.copyFixtureDoc('doc-format-underline'); + const target = await harness.firstTextRange(docPath, stateDir); + return { + stateDir, + args: [ + 'format', + 'underline', + docPath, + '--target-json', + JSON.stringify(target), + '--out', + harness.createOutputPath('doc-format-underline-output'), + ], + }; + }, + 'doc.format.strikethrough': async (harness: ConformanceHarness): Promise => { + const stateDir = await harness.createStateDir('doc-format-strikethrough-success'); + const docPath = await harness.copyFixtureDoc('doc-format-strikethrough'); + const target = await harness.firstTextRange(docPath, stateDir); + return { + stateDir, + args: [ + 'format', + 'strikethrough', + docPath, + '--target-json', + JSON.stringify(target), + '--out', + harness.createOutputPath('doc-format-strikethrough-output'), + ], + }; + }, + 'doc.trackChanges.list': async (harness: ConformanceHarness): Promise => { + const stateDir = await harness.createStateDir('doc-track-changes-list-success'); + const fixture = await harness.addTrackedChangeFixture(stateDir, 'doc-track-changes-list'); + return { + stateDir, + args: ['track-changes', 'list', fixture.docPath, '--limit', '10'], + }; + }, + 'doc.trackChanges.get': async (harness: ConformanceHarness): Promise => { + const stateDir = await harness.createStateDir('doc-track-changes-get-success'); + const fixture = await harness.addTrackedChangeFixture(stateDir, 'doc-track-changes-get'); + return { + stateDir, + args: ['track-changes', 'get', fixture.docPath, '--id', fixture.changeId], + }; + }, + 'doc.trackChanges.accept': async (harness: ConformanceHarness): Promise => { + const stateDir = await harness.createStateDir('doc-track-changes-accept-success'); + const fixture = await harness.addTrackedChangeFixture(stateDir, 'doc-track-changes-accept'); + return { + stateDir, + args: [ + 'track-changes', + 'accept', + fixture.docPath, + '--id', + fixture.changeId, + '--out', + harness.createOutputPath('doc-track-changes-accept-output'), + ], + }; + }, + 'doc.trackChanges.reject': async (harness: ConformanceHarness): Promise => { + const stateDir = await harness.createStateDir('doc-track-changes-reject-success'); + const fixture = await harness.addTrackedChangeFixture(stateDir, 'doc-track-changes-reject'); + return { + stateDir, + args: [ + 'track-changes', + 'reject', + fixture.docPath, + '--id', + fixture.changeId, + '--out', + harness.createOutputPath('doc-track-changes-reject-output'), + ], + }; + }, + 'doc.trackChanges.acceptAll': async (harness: ConformanceHarness): Promise => { + const stateDir = await harness.createStateDir('doc-track-changes-accept-all-success'); + const fixture = await harness.addTrackedChangeFixture(stateDir, 'doc-track-changes-accept-all'); + return { + stateDir, + args: [ + 'track-changes', + 'accept-all', + fixture.docPath, + '--out', + harness.createOutputPath('doc-track-changes-accept-all-output'), + ], + }; + }, + 'doc.trackChanges.rejectAll': async (harness: ConformanceHarness): Promise => { + const stateDir = await harness.createStateDir('doc-track-changes-reject-all-success'); + const fixture = await harness.addTrackedChangeFixture(stateDir, 'doc-track-changes-reject-all'); + return { + stateDir, + args: [ + 'track-changes', + 'reject-all', + fixture.docPath, + '--out', + harness.createOutputPath('doc-track-changes-reject-all-output'), + ], + }; + }, + 'doc.session.list': async (harness: ConformanceHarness): Promise => { + const stateDir = await harness.createStateDir('doc-session-list-success'); + await harness.openSessionFixture(stateDir, 'doc-session-list', 'session-list-success'); + return { + stateDir, + args: ['session', 'list'], + }; + }, + 'doc.session.save': async (harness: ConformanceHarness): Promise => { + const stateDir = await harness.createStateDir('doc-session-save-success'); + await harness.openSessionFixture(stateDir, 'doc-session-save', 'session-save-success'); + return { + stateDir, + args: [ + 'session', + 'save', + '--session', + 'session-save-success', + '--out', + harness.createOutputPath('doc-session-save-output'), + ], + }; + }, + 'doc.session.close': async (harness: ConformanceHarness): Promise => { + const stateDir = await harness.createStateDir('doc-session-close-success'); + await harness.openSessionFixture(stateDir, 'doc-session-close', 'session-close-success'); + return { + stateDir, + args: ['session', 'close', '--session', 'session-close-success', '--discard'], + }; + }, + 'doc.session.setDefault': async (harness: ConformanceHarness): Promise => { + const stateDir = await harness.createStateDir('doc-session-set-default-success'); + await harness.openSessionFixture(stateDir, 'doc-session-set-default', 'session-default-success'); + return { + stateDir, + args: ['session', 'set-default', '--session', 'session-default-success'], + }; + }, +} as const satisfies Record Promise>; + +export const OPERATION_SCENARIOS = (Object.keys(SUCCESS_SCENARIOS) as CliOperationId[]).map((operationId) => { + const scenario: OperationScenario = { + operationId, + success: SUCCESS_SCENARIOS[operationId], + failure: genericInvalidArgumentFailure(operationId), + expectedFailureCodes: ['INVALID_ARGUMENT', 'MISSING_REQUIRED'], + }; + return scenario; +}); diff --git a/apps/cli/src/__tests__/contract-response-conformance.test.ts b/apps/cli/src/__tests__/contract-response-conformance.test.ts new file mode 100644 index 0000000000..32d6dfadc6 --- /dev/null +++ b/apps/cli/src/__tests__/contract-response-conformance.test.ts @@ -0,0 +1,52 @@ +import { afterAll, beforeAll, describe, expect, test } from 'bun:test'; +import { CLI_OPERATION_COMMAND_KEYS, type CliOperationId } from '../cli'; +import { validateOperationResponseData } from '../lib/operation-args'; +import type { ErrorEnvelope, SuccessEnvelope } from './conformance/harness'; +import { ConformanceHarness } from './conformance/harness'; +import { OPERATION_SCENARIOS } from './conformance/scenarios'; + +describe('contract response conformance', () => { + let harness: ConformanceHarness; + + beforeAll(async () => { + harness = await ConformanceHarness.create(); + }); + + afterAll(async () => { + await harness.cleanup(); + }); + + test('scenario registry covers every canonical operation id', () => { + const expectedIds = new Set(Object.keys(CLI_OPERATION_COMMAND_KEYS) as CliOperationId[]); + const actualIds = new Set(OPERATION_SCENARIOS.map((scenario) => scenario.operationId)); + + expect(actualIds).toEqual(expectedIds); + }); + + for (const scenario of OPERATION_SCENARIOS) { + const commandKey = CLI_OPERATION_COMMAND_KEYS[scenario.operationId]; + + test(`success envelope conforms for ${scenario.operationId}`, async () => { + const invocation = await scenario.success(harness); + const { result, envelope } = await harness.runCli(invocation.args, invocation.stateDir, invocation.stdinBytes); + + expect(result.code).toBe(0); + expect(envelope.ok).toBe(true); + + const success = envelope as SuccessEnvelope; + validateOperationResponseData(scenario.operationId, success.data, commandKey); + }); + + test(`failure envelope conforms for ${scenario.operationId}`, async () => { + const invocation = await scenario.failure(harness); + const { result, envelope } = await harness.runCli(invocation.args, invocation.stateDir, invocation.stdinBytes); + + expect(result.code).toBe(1); + expect(envelope.ok).toBe(false); + + const error = envelope as ErrorEnvelope; + expect(scenario.expectedFailureCodes).toContain(error.error.code); + expect(typeof error.error.message).toBe('string'); + }); + } +}); diff --git a/apps/cli/src/__tests__/cross-paragraph.test.ts b/apps/cli/src/__tests__/cross-paragraph.test.ts deleted file mode 100644 index efd8712c02..0000000000 --- a/apps/cli/src/__tests__/cross-paragraph.test.ts +++ /dev/null @@ -1,88 +0,0 @@ -import { describe, expect, test, beforeAll, afterAll } from 'bun:test'; -import { copyFile, rm, mkdir } from 'node:fs/promises'; -import { join } from 'node:path'; -import { search } from '../commands/search'; -import { replace } from '../commands/replace'; -import { read } from '../commands/read'; -import { openDocument, closeDocument, searchDocument } from '../lib/editor'; - -const TEST_DIR = join(import.meta.dir, 'fixtures-cross-paragraph'); -const SAMPLE_DOC = join(TEST_DIR, 'sample.docx'); - -describe('Cross-paragraph search and replace', () => { - beforeAll(async () => { - await mkdir(TEST_DIR, { recursive: true }); - // Copy a test document to our fixtures folder - const sourceDoc = join(import.meta.dir, '../../../../e2e-tests/test-data/basic-documents/advanced-text.docx'); - await copyFile(sourceDoc, SAMPLE_DOC); - }); - - afterAll(async () => { - await rm(TEST_DIR, { recursive: true, force: true }); - }); - - test('searchDocument returns ranges for cross-paragraph matches', async () => { - const doc = await openDocument(SAMPLE_DOC); - try { - // Search for something that exists - const matches = searchDocument(doc, 'Wilde'); - - expect(matches.length).toBeGreaterThan(0); - - // Check that the first match has the expected structure - const firstMatch = matches[0]; - expect(firstMatch).toHaveProperty('from'); - expect(firstMatch).toHaveProperty('to'); - expect(firstMatch).toHaveProperty('text'); - // ranges should be present (may be undefined for single-range matches) - expect('ranges' in firstMatch).toBe(true); - } finally { - closeDocument(doc); - } - }); - - test('replace preserves paragraph structure', async () => { - // Create a copy for this test - const testCopy = join(TEST_DIR, 'preserve-structure.docx'); - await copyFile(SAMPLE_DOC, testCopy); - - // Count paragraphs before (by counting double newlines or paragraph indicators) - const beforeContent = await read(testCopy); - - // Replace some text - await replace('Wilde', 'WILDE', [testCopy]); - - // Read after - const afterContent = await read(testCopy); - - // The replacement should have happened - expect(afterContent.content).toContain('WILDE'); - expect(afterContent.content).not.toContain('Wilde'); - - // Content length should be similar (same number of characters replaced) - // This is a rough check - exact length may differ slightly - const lengthDiff = Math.abs(afterContent.content.length - beforeContent.content.length); - expect(lengthDiff).toBeLessThan(10); // Should be basically the same length - - await rm(testCopy); - }); - - test('single-range match replacement works correctly', async () => { - const testCopy = join(TEST_DIR, 'single-range.docx'); - await copyFile(SAMPLE_DOC, testCopy); - - // Search for a unique word that exists in a single paragraph - const beforeSearch = await search('bold', [testCopy]); - expect(beforeSearch.totalMatches).toBeGreaterThan(0); - - // Replace it - const result = await replace('bold', 'BOLD', [testCopy]); - expect(result.totalReplacements).toBe(beforeSearch.totalMatches); - - // Verify replacement - const afterSearch = await search('BOLD', [testCopy]); - expect(afterSearch.totalMatches).toBe(beforeSearch.totalMatches); - - await rm(testCopy); - }); -}); diff --git a/apps/cli/src/__tests__/fixtures-cli-legacy/replace-test.docx b/apps/cli/src/__tests__/fixtures-cli-legacy/replace-test.docx new file mode 100644 index 0000000000..cf9ef53479 Binary files /dev/null and b/apps/cli/src/__tests__/fixtures-cli-legacy/replace-test.docx differ diff --git a/apps/cli/src/__tests__/fixtures-cli-legacy/sample.docx b/apps/cli/src/__tests__/fixtures-cli-legacy/sample.docx new file mode 100644 index 0000000000..cf9ef53479 Binary files /dev/null and b/apps/cli/src/__tests__/fixtures-cli-legacy/sample.docx differ diff --git a/apps/cli/src/__tests__/host.test.ts b/apps/cli/src/__tests__/host.test.ts new file mode 100644 index 0000000000..97a8be3b72 --- /dev/null +++ b/apps/cli/src/__tests__/host.test.ts @@ -0,0 +1,344 @@ +import { afterEach, describe, expect, test } from 'bun:test'; +import { spawn, type ChildProcessWithoutNullStreams } from 'node:child_process'; +import { copyFile, mkdir, mkdtemp, rm } from 'node:fs/promises'; +import { tmpdir } from 'node:os'; +import path from 'node:path'; +import type { CliOperationId } from '../cli'; +import { validateOperationResponseData } from '../lib/operation-args'; + +const REPO_ROOT = path.resolve(import.meta.dir, '../../../..'); +const CLI_BIN = path.join(REPO_ROOT, 'apps/cli/src/index.ts'); +const SOURCE_DOC = path.join(REPO_ROOT, 'e2e-tests/test-data/basic-documents/advanced-text.docx'); + +type JsonRpcMessage = { + jsonrpc: '2.0'; + id?: number | null; + result?: unknown; + error?: { + code: number; + message: string; + data?: unknown; + }; +}; + +async function withTimeout(promise: Promise, timeoutMs: number, message: string): Promise { + return new Promise((resolve, reject) => { + const timer = setTimeout(() => reject(new Error(message)), timeoutMs); + promise + .then((value) => { + clearTimeout(timer); + resolve(value); + }) + .catch((error) => { + clearTimeout(timer); + reject(error); + }); + }); +} + +function launchHost(stateDir: string): { + child: ChildProcessWithoutNullStreams; + request(method: string, params?: unknown): Promise; + sendRaw(frame: string): void; + nextMessage(): Promise; + shutdown(): Promise; +} { + const child = spawn('bun', [CLI_BIN, 'host', '--stdio'], { + cwd: REPO_ROOT, + env: { + ...process.env, + SUPERDOC_CLI_STATE_DIR: stateDir, + }, + stdio: ['pipe', 'pipe', 'pipe'], + }); + + let nextId = 1; + const pending = new Map void; reject: (error: Error) => void }>(); + const inbox: JsonRpcMessage[] = []; + const inboxWaiters: Array<(msg: JsonRpcMessage) => void> = []; + let stdoutBuffer = ''; + + child.stdout.on('data', (chunk) => { + stdoutBuffer += String(chunk); + const lines = stdoutBuffer.split('\n'); + stdoutBuffer = lines.pop() ?? ''; + + for (const line of lines) { + const trimmed = line.trim(); + if (!trimmed) continue; + if (!trimmed.startsWith('{')) continue; + + const message = JSON.parse(trimmed) as JsonRpcMessage; + + if (typeof message.id === 'number') { + const waiter = pending.get(message.id); + if (waiter) { + pending.delete(message.id); + waiter.resolve(message); + continue; + } + } + + const inboxWaiter = inboxWaiters.shift(); + if (inboxWaiter) { + inboxWaiter(message); + } else { + inbox.push(message); + } + } + }); + + child.on('close', () => { + for (const [id, waiter] of pending) { + pending.delete(id); + waiter.reject(new Error('Host exited before response.')); + } + }); + + function request(method: string, params?: unknown): Promise { + const id = nextId; + nextId += 1; + + const frame = JSON.stringify({ + jsonrpc: '2.0', + id, + method, + params, + }); + + const responsePromise = new Promise((resolve, reject) => { + pending.set(id, { resolve, reject }); + child.stdin.write(`${frame}\n`); + }); + + return withTimeout(responsePromise, 10_000, `Timed out waiting for response to ${method}.`); + } + + function sendRaw(frame: string): void { + child.stdin.write(`${frame}\n`); + } + + function nextMessage(): Promise { + if (inbox.length > 0) { + return Promise.resolve(inbox.shift() as JsonRpcMessage); + } + + return withTimeout( + new Promise((resolve) => { + inboxWaiters.push(resolve); + }), + 10_000, + 'Timed out waiting for host message.', + ); + } + + async function shutdown(): Promise { + try { + await request('host.shutdown'); + } catch { + child.kill('SIGKILL'); + } + + await withTimeout( + new Promise((resolve) => { + child.once('close', () => resolve()); + }), + 10_000, + 'Timed out waiting for host shutdown.', + ); + } + + return { + child, + request, + sendRaw, + nextMessage, + shutdown, + }; +} + +describe('CLI host mode', () => { + const cleanup: string[] = []; + + afterEach(async () => { + while (cleanup.length > 0) { + const pathToRemove = cleanup.pop(); + if (pathToRemove) { + await rm(pathToRemove, { recursive: true, force: true }); + } + } + }); + + test('handles ping/capabilities/describe/cli.invoke/shutdown', async () => { + const stateDir = await mkdtemp(path.join(tmpdir(), 'superdoc-host-test-')); + cleanup.push(stateDir); + await mkdir(stateDir, { recursive: true }); + + const host = launchHost(stateDir); + + const ping = await host.request('host.ping'); + expect(ping.error).toBeUndefined(); + expect((ping.result as { ok: boolean }).ok).toBe(true); + + const capabilities = await host.request('host.capabilities'); + expect(capabilities.error).toBeUndefined(); + const capabilityPayload = capabilities.result as { + protocolVersion: string; + features: string[]; + }; + expect(capabilityPayload.protocolVersion).toBe('1.0'); + expect(capabilityPayload.features).toEqual( + expect.arrayContaining(['cli.invoke', 'host.shutdown', 'host.describe', 'host.describe.command']), + ); + + const describe = await host.request('host.describe'); + expect(describe.error).toBeUndefined(); + const describePayload = describe.result as { operationCount: number }; + expect(describePayload.operationCount).toBeGreaterThan(0); + + const describeCommand = await host.request('host.describe.command', { + operationId: 'doc.find', + }); + expect(describeCommand.error).toBeUndefined(); + const describeCommandPayload = describeCommand.result as { + operation: { id: string }; + }; + expect(describeCommandPayload.operation.id).toBe('doc.find'); + + const invoke = await host.request('cli.invoke', { + argv: ['status'], + stdinBase64: '', + }); + expect(invoke.error).toBeUndefined(); + + const invokeResult = invoke.result as { + command: string; + data: { active: boolean }; + meta: { elapsedMs: number }; + }; + + expect(invokeResult.command).toBe('status'); + expect(invokeResult.data.active).toBe(false); + expect(invokeResult.meta.elapsedMs).toBeGreaterThanOrEqual(0); + + await host.shutdown(); + }); + + test('host cli.invoke responses conform to contract for representative commands', async () => { + const stateDir = await mkdtemp(path.join(tmpdir(), 'superdoc-host-test-')); + cleanup.push(stateDir); + await mkdir(stateDir, { recursive: true }); + + const docPath = path.join(stateDir, 'host-conformance.docx'); + await copyFile(SOURCE_DOC, docPath); + + const host = launchHost(stateDir); + + async function invokeAndValidate(operationId: CliOperationId, argv: string[]) { + const response = await host.request('cli.invoke', { + argv, + stdinBase64: '', + }); + expect(response.error).toBeUndefined(); + const payload = response.result as { + command: string; + data: unknown; + meta: { elapsedMs: number }; + }; + validateOperationResponseData(operationId, payload.data, payload.command); + expect(payload.meta.elapsedMs).toBeGreaterThanOrEqual(0); + return payload.data as Record; + } + + const findData = await invokeAndValidate('doc.find', [ + 'find', + docPath, + '--type', + 'text', + '--pattern', + 'Wilde', + '--limit', + '1', + ]); + const findResult = findData.result as { + matches?: Array>; + context?: Array<{ textRanges?: Array<{ kind: 'text'; blockId: string; range: { start: number; end: number } }> }>; + }; + const firstMatch = findResult.matches?.[0]; + expect(firstMatch).toBeDefined(); + await invokeAndValidate('doc.getNode', ['get-node', docPath, '--address-json', JSON.stringify(firstMatch)]); + + const textTarget = findResult.context?.[0]?.textRanges?.[0]; + expect(textTarget).toBeDefined(); + const collapsedTarget = { + ...textTarget, + range: { + start: textTarget!.range.start, + end: textTarget!.range.start, + }, + }; + await invokeAndValidate('doc.insert', [ + 'insert', + docPath, + '--target-json', + JSON.stringify(collapsedTarget), + '--text', + 'HOST_CONFORMANCE_INSERT', + '--out', + path.join(stateDir, 'host-conformance-insert.docx'), + ]); + + const sessionId = 'host-conformance-session'; + await invokeAndValidate('doc.open', ['open', docPath, '--session', sessionId]); + await invokeAndValidate('doc.status', ['status', '--session', sessionId]); + await invokeAndValidate('doc.close', ['close', '--session', sessionId, '--discard']); + + await invokeAndValidate('doc.trackChanges.list', ['track-changes', 'list', docPath, '--limit', '1']); + await invokeAndValidate('doc.comments.list', ['comments', 'list', docPath, '--include-resolved', 'false']); + + await host.shutdown(); + }); + + test('returns parse errors for malformed frames', async () => { + const stateDir = await mkdtemp(path.join(tmpdir(), 'superdoc-host-test-')); + cleanup.push(stateDir); + await mkdir(stateDir, { recursive: true }); + + const host = launchHost(stateDir); + + host.sendRaw('{'); + const message = await host.nextMessage(); + expect(message.error?.code).toBe(-32700); + expect(message.id).toBe(null); + + await host.shutdown(); + }); + + test('returns invalid request and cli invoke validation errors', async () => { + const stateDir = await mkdtemp(path.join(tmpdir(), 'superdoc-host-test-')); + cleanup.push(stateDir); + await mkdir(stateDir, { recursive: true }); + + const host = launchHost(stateDir); + + host.sendRaw(JSON.stringify({ jsonrpc: '2.0', id: 99 })); + const invalidRequest = await host.nextMessage(); + expect(invalidRequest.error?.code).toBe(-32600); + + const invalidInvoke = await host.request('cli.invoke', { + argv: ['status'], + stdinBase64: '***', + }); + + expect(invalidInvoke.error?.code).toBe(-32010); + const errorData = invalidInvoke.error?.data as { cliCode?: string }; + expect(errorData.cliCode).toBe('INVALID_ARGUMENT'); + + const invalidDescribe = await host.request('host.describe.command', { + operationId: 'doc.missing', + }); + expect(invalidDescribe.error?.code).toBe(-32602); + + await host.shutdown(); + }); +}); diff --git a/apps/cli/src/__tests__/host/protocol.test.ts b/apps/cli/src/__tests__/host/protocol.test.ts new file mode 100644 index 0000000000..6870567655 --- /dev/null +++ b/apps/cli/src/__tests__/host/protocol.test.ts @@ -0,0 +1,161 @@ +import { describe, expect, test } from 'bun:test'; +import { + JsonRpcCode, + hasRequestId, + makeError, + makeSuccess, + parseJsonRpcLine, + serializeFrame, + type JsonRpcRequest, +} from '../../host/protocol'; + +describe('parseJsonRpcLine', () => { + test('parses a valid request', () => { + const line = JSON.stringify({ jsonrpc: '2.0', id: 1, method: 'test', params: { a: 1 } }); + const result = parseJsonRpcLine(line); + expect(result.error).toBeUndefined(); + expect(result.request).toEqual({ jsonrpc: '2.0', id: 1, method: 'test', params: { a: 1 } }); + }); + + test('parses a notification (no id)', () => { + const line = JSON.stringify({ jsonrpc: '2.0', method: 'notify' }); + const result = parseJsonRpcLine(line); + expect(result.error).toBeUndefined(); + expect(result.request?.method).toBe('notify'); + expect(result.request?.id).toBeUndefined(); + }); + + test('returns error for empty line', () => { + const result = parseJsonRpcLine(''); + expect(result.request).toBeUndefined(); + expect(result.error?.code).toBe(JsonRpcCode.InvalidRequest); + }); + + test('returns error for whitespace-only line', () => { + const result = parseJsonRpcLine(' '); + expect(result.request).toBeUndefined(); + expect(result.error?.code).toBe(JsonRpcCode.InvalidRequest); + }); + + test('returns parse error for invalid JSON', () => { + const result = parseJsonRpcLine('{broken'); + expect(result.request).toBeUndefined(); + expect(result.error?.code).toBe(JsonRpcCode.ParseError); + }); + + test('returns error for non-object JSON (array)', () => { + const result = parseJsonRpcLine('[1,2,3]'); + expect(result.error?.code).toBe(JsonRpcCode.InvalidRequest); + }); + + test('returns error for non-object JSON (string)', () => { + const result = parseJsonRpcLine('"hello"'); + expect(result.error?.code).toBe(JsonRpcCode.InvalidRequest); + }); + + test('returns error for wrong jsonrpc version', () => { + const result = parseJsonRpcLine(JSON.stringify({ jsonrpc: '1.0', method: 'test' })); + expect(result.error?.code).toBe(JsonRpcCode.InvalidRequest); + expect(result.error?.message).toContain('2.0'); + }); + + test('returns error for missing method', () => { + const result = parseJsonRpcLine(JSON.stringify({ jsonrpc: '2.0', id: 1 })); + expect(result.error?.code).toBe(JsonRpcCode.InvalidRequest); + }); + + test('returns error for empty method string', () => { + const result = parseJsonRpcLine(JSON.stringify({ jsonrpc: '2.0', id: 1, method: '' })); + expect(result.error?.code).toBe(JsonRpcCode.InvalidRequest); + }); + + test('returns error for non-string method', () => { + const result = parseJsonRpcLine(JSON.stringify({ jsonrpc: '2.0', id: 1, method: 42 })); + expect(result.error?.code).toBe(JsonRpcCode.InvalidRequest); + }); + + test('returns error for invalid id type (boolean)', () => { + const result = parseJsonRpcLine(JSON.stringify({ jsonrpc: '2.0', id: true, method: 'test' })); + expect(result.error?.code).toBe(JsonRpcCode.InvalidRequest); + expect(result.error?.message).toContain('id type'); + }); + + test('accepts null id', () => { + const result = parseJsonRpcLine(JSON.stringify({ jsonrpc: '2.0', id: null, method: 'test' })); + expect(result.error).toBeUndefined(); + expect(result.request?.id).toBeNull(); + }); + + test('accepts string id', () => { + const result = parseJsonRpcLine(JSON.stringify({ jsonrpc: '2.0', id: 'abc', method: 'test' })); + expect(result.error).toBeUndefined(); + expect(result.request?.id).toBe('abc'); + }); +}); + +describe('makeSuccess', () => { + test('creates a well-formed success response', () => { + const result = makeSuccess(1, { ok: true }); + expect(result).toEqual({ jsonrpc: '2.0', id: 1, result: { ok: true } }); + }); + + test('handles null id', () => { + const result = makeSuccess(null, 'data'); + expect(result.id).toBeNull(); + }); +}); + +describe('makeError', () => { + test('creates error response without data', () => { + const result = makeError(1, -32600, 'bad request'); + expect(result).toEqual({ + jsonrpc: '2.0', + id: 1, + error: { code: -32600, message: 'bad request' }, + }); + }); + + test('creates error response with data', () => { + const result = makeError(2, -32010, 'failed', { reason: 'timeout' }); + expect(result.error.data).toEqual({ reason: 'timeout' }); + }); + + test('omits data field when undefined', () => { + const result = makeError(3, -32600, 'bad'); + expect('data' in result.error).toBe(false); + }); +}); + +describe('serializeFrame', () => { + test('produces newline-terminated JSON', () => { + const frame = makeSuccess(1, 'ok'); + const serialized = serializeFrame(frame); + expect(serialized.endsWith('\n')).toBe(true); + expect(JSON.parse(serialized)).toEqual(frame); + }); + + test('serializes error frames', () => { + const frame = makeError(null, -32700, 'parse error'); + const serialized = serializeFrame(frame); + expect(serialized.endsWith('\n')).toBe(true); + const parsed = JSON.parse(serialized); + expect(parsed.error.code).toBe(-32700); + }); +}); + +describe('hasRequestId', () => { + test('returns true when id is present', () => { + const request: JsonRpcRequest = { jsonrpc: '2.0', id: 1, method: 'test' }; + expect(hasRequestId(request)).toBe(true); + }); + + test('returns true when id is null', () => { + const request: JsonRpcRequest = { jsonrpc: '2.0', id: null, method: 'test' }; + expect(hasRequestId(request)).toBe(true); + }); + + test('returns false when id is absent', () => { + const request: JsonRpcRequest = { jsonrpc: '2.0', method: 'test' }; + expect(hasRequestId(request)).toBe(false); + }); +}); diff --git a/apps/cli/src/__tests__/install-uninstall.test.ts b/apps/cli/src/__tests__/install-uninstall.test.ts new file mode 100644 index 0000000000..ba4d3905c8 --- /dev/null +++ b/apps/cli/src/__tests__/install-uninstall.test.ts @@ -0,0 +1,150 @@ +import { afterEach, describe, expect, test } from 'bun:test'; +import { existsSync, mkdirSync, rmSync, writeFileSync, mkdtempSync } from 'node:fs'; +import { join } from 'node:path'; +import { tmpdir } from 'node:os'; +import { run } from '../index'; + +type RunResult = { + code: number; + stdout: string; + stderr: string; +}; + +async function runCli(args: string[], cwd?: string): Promise { + let stdout = ''; + let stderr = ''; + const originalCwd = process.cwd(); + + if (cwd) process.chdir(cwd); + try { + const code = await run(args, { + stdout(message: string) { + stdout += message; + }, + stderr(message: string) { + stderr += message; + }, + async readStdinBytes() { + return new Uint8Array(); + }, + }); + + return { code, stdout, stderr }; + } finally { + process.chdir(originalCwd); + } +} + +describe('install and uninstall commands', () => { + let testDir: string; + + afterEach(() => { + if (testDir && existsSync(testDir)) { + rmSync(testDir, { recursive: true, force: true }); + } + }); + + function createTestDir(): string { + testDir = mkdtempSync(join(tmpdir(), 'superdoc-cli-install-test-')); + return testDir; + } + + test('install --skills copies skill into .claude/', async () => { + const dir = createTestDir(); + mkdirSync(join(dir, '.claude')); + + const result = await runCli(['install', '--skills'], dir); + expect(result.code).toBe(0); + expect(result.stdout).toContain('.claude/skills/superdoc/'); + expect(existsSync(join(dir, '.claude', 'skills', 'superdoc', 'SKILL.md'))).toBe(true); + }); + + test('install --skills copies skill into .agents/', async () => { + const dir = createTestDir(); + mkdirSync(join(dir, '.agents')); + + const result = await runCli(['install', '--skills'], dir); + expect(result.code).toBe(0); + expect(result.stdout).toContain('.agents/skills/superdoc/'); + expect(existsSync(join(dir, '.agents', 'skills', 'superdoc', 'SKILL.md'))).toBe(true); + }); + + test('install --skills copies into both when both exist', async () => { + const dir = createTestDir(); + mkdirSync(join(dir, '.claude')); + mkdirSync(join(dir, '.agents')); + + const result = await runCli(['install', '--skills'], dir); + expect(result.code).toBe(0); + expect(result.stdout).toContain('.claude/skills/superdoc/'); + expect(result.stdout).toContain('.agents/skills/superdoc/'); + }); + + test('install --skills warns when no agent directories exist', async () => { + const dir = createTestDir(); + + const result = await runCli(['install', '--skills'], dir); + expect(result.code).toBe(1); + expect(result.stderr).toContain('No agent directories found'); + }); + + test('install without --skills prints usage', async () => { + const dir = createTestDir(); + + const result = await runCli(['install'], dir); + expect(result.code).toBe(1); + expect(result.stderr).toContain('Usage: superdoc install --skills'); + }); + + test('install runtime failures return structured CLI errors', async () => { + const dir = createTestDir(); + mkdirSync(join(dir, '.claude')); + writeFileSync(join(dir, '.claude', 'skills'), 'blocked'); + + const result = await runCli(['install', '--skills'], dir); + expect(result.code).toBe(1); + expect(result.stderr).toContain('"ok":false'); + expect(result.stderr).toContain('"code":"COMMAND_FAILED"'); + }); + + test('uninstall --skills removes installed skill directories', async () => { + const dir = createTestDir(); + mkdirSync(join(dir, '.claude', 'skills', 'superdoc'), { recursive: true }); + mkdirSync(join(dir, '.agents', 'skills', 'superdoc'), { recursive: true }); + + const result = await runCli(['uninstall', '--skills'], dir); + expect(result.code).toBe(0); + expect(result.stdout).toContain('.claude/skills/superdoc/'); + expect(result.stdout).toContain('.agents/skills/superdoc/'); + expect(existsSync(join(dir, '.claude', 'skills', 'superdoc'))).toBe(false); + expect(existsSync(join(dir, '.agents', 'skills', 'superdoc'))).toBe(false); + }); + + test('uninstall --skills is no-op when nothing is installed', async () => { + const dir = createTestDir(); + + const result = await runCli(['uninstall', '--skills'], dir); + expect(result.code).toBe(0); + expect(result.stdout).toContain('No installed skills found'); + }); + + test('uninstall without --skills prints usage', async () => { + const dir = createTestDir(); + + const result = await runCli(['uninstall'], dir); + expect(result.code).toBe(1); + expect(result.stderr).toContain('Usage: superdoc uninstall --skills'); + }); + + test('install --help shows global CLI help', async () => { + const result = await runCli(['install', '--help']); + expect(result.code).toBe(0); + expect(result.stdout).toContain('Usage: superdoc [options]'); + }); + + test('uninstall --help shows global CLI help', async () => { + const result = await runCli(['uninstall', '--help']); + expect(result.code).toBe(0); + expect(result.stdout).toContain('Usage: superdoc [options]'); + }); +}); diff --git a/apps/cli/src/__tests__/legacy-compat.test.ts b/apps/cli/src/__tests__/legacy-compat.test.ts new file mode 100644 index 0000000000..6e882f0189 --- /dev/null +++ b/apps/cli/src/__tests__/legacy-compat.test.ts @@ -0,0 +1,148 @@ +import { beforeAll, beforeEach, describe, expect, test } from 'bun:test'; +import { copyFile, mkdir } from 'node:fs/promises'; +import { join } from 'node:path'; +import { run } from '../index'; + +type RunResult = { + code: number; + stdout: string; + stderr: string; +}; + +const TEST_DIR = join(import.meta.dir, 'fixtures-cli-legacy'); +const SOURCE_DOC = join(import.meta.dir, '../../../../e2e-tests/test-data/basic-documents/advanced-text.docx'); +const SAMPLE_DOC = join(TEST_DIR, 'sample.docx'); +const REPLACE_DOC = join(TEST_DIR, 'replace-test.docx'); + +async function runCli(args: string[]): Promise { + let stdout = ''; + let stderr = ''; + + const code = await run(args, { + stdout(message: string) { + stdout += message; + }, + stderr(message: string) { + stderr += message; + }, + async readStdinBytes() { + return new Uint8Array(); + }, + }); + + return { code, stdout, stderr }; +} + +describe('legacy command compatibility', () => { + beforeAll(async () => { + await mkdir(TEST_DIR, { recursive: true }); + await copyFile(SOURCE_DOC, SAMPLE_DOC); + }); + + test('search supports legacy pretty output by default', async () => { + const result = await runCli(['search', 'Wilde', SAMPLE_DOC]); + expect(result.code).toBe(0); + expect(result.stderr).toBe(''); + expect(result.stdout).toContain('Found '); + expect(result.stdout).toContain(`${SAMPLE_DOC}:`); + expect(result.stdout).toContain('"'); + }); + + test('search supports legacy --json output shape', async () => { + const result = await runCli(['search', 'Wilde', SAMPLE_DOC, '--json']); + expect(result.code).toBe(0); + expect(result.stderr).toBe(''); + + const payload = JSON.parse(result.stdout) as { + pattern: string; + files: Array<{ path: string; matches: unknown[] }>; + totalMatches: number; + ok?: boolean; + }; + expect(payload.ok).toBeUndefined(); + expect(payload.pattern).toBe('Wilde'); + expect(payload.totalMatches).toBeGreaterThan(0); + expect(payload.files.length).toBeGreaterThan(0); + }); + + test('read supports legacy pretty output by default', async () => { + const result = await runCli(['read', SAMPLE_DOC]); + expect(result.code).toBe(0); + expect(result.stderr).toBe(''); + expect(result.stdout.length).toBeGreaterThan(0); + expect(result.stdout).toContain('Wilde'); + }); + + test('read supports legacy --json output shape', async () => { + const result = await runCli(['read', SAMPLE_DOC, '--json']); + expect(result.code).toBe(0); + expect(result.stderr).toBe(''); + + const payload = JSON.parse(result.stdout) as { + path: string; + content: string; + ok?: boolean; + }; + expect(payload.ok).toBeUndefined(); + expect(payload.path).toBe(SAMPLE_DOC); + expect(payload.content).toContain('Wilde'); + }); + + test('global --help still prints CLI help for legacy commands', async () => { + const result = await runCli(['search', '--help']); + expect(result.code).toBe(0); + expect(result.stdout).toContain('Usage: superdoc [options]'); + }); + + describe('replace-legacy', () => { + beforeEach(async () => { + await copyFile(SOURCE_DOC, REPLACE_DOC); + }); + + test('replace-legacy supports legacy pretty output by default', async () => { + const result = await runCli(['replace-legacy', 'Wilde', 'WILDE', REPLACE_DOC]); + expect(result.code).toBe(0); + expect(result.stderr).toBe(''); + expect(result.stdout).toContain('Replaced '); + + // Verify the replacement persisted + const after = await runCli(['search', 'WILDE', REPLACE_DOC, '--json']); + const payload = JSON.parse(after.stdout) as { totalMatches: number }; + expect(payload.totalMatches).toBeGreaterThan(0); + }); + + test('replace-legacy supports legacy --json output shape', async () => { + const result = await runCli(['replace-legacy', 'Wilde', 'WILDE', REPLACE_DOC, '--json']); + expect(result.code).toBe(0); + expect(result.stderr).toBe(''); + + const payload = JSON.parse(result.stdout) as { + find: string; + replace: string; + files: Array<{ path: string; replacements: number }>; + totalReplacements: number; + ok?: boolean; + }; + expect(payload.ok).toBeUndefined(); + expect(payload.find).toBe('Wilde'); + expect(payload.replace).toBe('WILDE'); + expect(payload.totalReplacements).toBeGreaterThan(0); + expect(payload.files.length).toBeGreaterThan(0); + expect(payload.files[0].path).toBe(REPLACE_DOC); + }); + + test('replace-legacy prints usage when files arg is missing', async () => { + const result = await runCli(['replace-legacy', 'Wilde', 'WILDE']); + expect(result.code).toBe(1); + expect(result.stderr).toContain('Usage: superdoc replace-legacy '); + }); + + test('replace-legacy with no matches does not error', async () => { + const result = await runCli(['replace-legacy', 'xyz123nonexistent', 'foo', REPLACE_DOC, '--json']); + expect(result.code).toBe(0); + + const payload = JSON.parse(result.stdout) as { totalReplacements: number }; + expect(payload.totalReplacements).toBe(0); + }); + }); +}); diff --git a/apps/cli/src/__tests__/lib/args.test.ts b/apps/cli/src/__tests__/lib/args.test.ts new file mode 100644 index 0000000000..5c039f430a --- /dev/null +++ b/apps/cli/src/__tests__/lib/args.test.ts @@ -0,0 +1,292 @@ +import { describe, expect, test } from 'bun:test'; +import { + ensureValidArgs, + getBooleanOption, + getNumberOption, + getOptionalBooleanOption, + getStringListOption, + getStringOption, + parseCommandArgs, + parseCommaList, + parseGlobalArgs, + requireDocArg, + resolveDocArg, + type OptionSpec, +} from '../../lib/args'; +import { CliError } from '../../lib/errors'; + +describe('parseGlobalArgs', () => { + test('defaults to json output', () => { + const { globals } = parseGlobalArgs(['doc', 'open']); + expect(globals.output).toBe('json'); + }); + + test('parses --json flag', () => { + const { globals, rest } = parseGlobalArgs(['--json', 'doc', 'open']); + expect(globals.output).toBe('json'); + expect(rest).toEqual(['doc', 'open']); + }); + + test('parses --pretty flag', () => { + const { globals } = parseGlobalArgs(['--pretty']); + expect(globals.output).toBe('pretty'); + }); + + test('throws when both --json and --pretty are provided', () => { + expect(() => parseGlobalArgs(['--json', '--pretty'])).toThrow(CliError); + }); + + test('parses --output=json', () => { + const { globals } = parseGlobalArgs(['--output=json']); + expect(globals.output).toBe('json'); + }); + + test('parses --output pretty (space-separated)', () => { + const { globals } = parseGlobalArgs(['--output', 'pretty']); + expect(globals.output).toBe('pretty'); + }); + + test('throws for invalid --output value', () => { + expect(() => parseGlobalArgs(['--output=xml'])).toThrow(CliError); + }); + + test('throws when --output conflicts with --json', () => { + expect(() => parseGlobalArgs(['--json', '--output=pretty'])).toThrow(CliError); + }); + + test('parses --session with space-separated value', () => { + const { globals } = parseGlobalArgs(['--session', 'my-session']); + expect(globals.sessionId).toBe('my-session'); + }); + + test('parses --session=value', () => { + const { globals } = parseGlobalArgs(['--session=my-session']); + expect(globals.sessionId).toBe('my-session'); + }); + + test('throws when --session has no value', () => { + expect(() => parseGlobalArgs(['--session'])).toThrow(CliError); + }); + + test('parses --timeout-ms', () => { + const { globals } = parseGlobalArgs(['--timeout-ms', '5000']); + expect(globals.timeoutMs).toBe(5000); + }); + + test('throws for non-positive --timeout-ms', () => { + expect(() => parseGlobalArgs(['--timeout-ms', '0'])).toThrow(CliError); + expect(() => parseGlobalArgs(['--timeout-ms', '-1'])).toThrow(CliError); + }); + + test('parses --help / -h', () => { + expect(parseGlobalArgs(['--help']).globals.help).toBe(true); + expect(parseGlobalArgs(['-h']).globals.help).toBe(true); + }); + + test('stops at -- separator', () => { + const { rest } = parseGlobalArgs(['--json', '--', '--pretty']); + expect(rest).toEqual(['--', '--pretty']); + }); + + test('passes unknown tokens to rest', () => { + const { rest } = parseGlobalArgs(['doc', 'open', 'file.docx']); + expect(rest).toEqual(['doc', 'open', 'file.docx']); + }); +}); + +describe('parseCommandArgs', () => { + const specs: OptionSpec[] = [ + { name: 'name', type: 'string' }, + { name: 'count', type: 'number' }, + { name: 'verbose', type: 'boolean' }, + { name: 'tag', type: 'string', multiple: true }, + ]; + + test('parses string options', () => { + const result = parseCommandArgs(['--name', 'alice'], specs); + expect(result.options.name).toBe('alice'); + }); + + test('parses string options with =', () => { + const result = parseCommandArgs(['--name=bob'], specs); + expect(result.options.name).toBe('bob'); + }); + + test('parses number options', () => { + const result = parseCommandArgs(['--count', '42'], specs); + expect(result.options.count).toBe(42); + }); + + test('reports error for non-numeric number option', () => { + const result = parseCommandArgs(['--count', 'abc'], specs); + expect(result.errors.length).toBeGreaterThan(0); + }); + + test('parses boolean flags', () => { + const result = parseCommandArgs(['--verbose'], specs); + expect(result.options.verbose).toBe(true); + }); + + test('parses explicit boolean true/false', () => { + expect(parseCommandArgs(['--verbose', 'true'], specs).options.verbose).toBe(true); + expect(parseCommandArgs(['--verbose', 'false'], specs).options.verbose).toBe(false); + expect(parseCommandArgs(['--verbose=1'], specs).options.verbose).toBe(true); + expect(parseCommandArgs(['--verbose=0'], specs).options.verbose).toBe(false); + }); + + test('reports error for invalid explicit boolean', () => { + const result = parseCommandArgs(['--verbose=abc'], specs); + expect(result.errors.length).toBeGreaterThan(0); + }); + + test('collects positionals', () => { + const result = parseCommandArgs(['pos1', 'pos2', '--name', 'x'], specs); + expect(result.positionals).toEqual(['pos1', 'pos2']); + }); + + test('collects unknown flags', () => { + const result = parseCommandArgs(['--unknown-flag'], specs); + expect(result.unknown).toEqual(['--unknown-flag']); + }); + + test('handles multiple option', () => { + const result = parseCommandArgs(['--tag', 'a', '--tag', 'b'], specs); + expect(result.options.tag).toEqual(['a', 'b']); + }); + + test('reports error for duplicate non-multiple options', () => { + const result = parseCommandArgs(['--name', 'a', '--name', 'b'], specs); + expect(result.errors.length).toBeGreaterThan(0); + }); + + test('stops at -- separator', () => { + const result = parseCommandArgs(['--name', 'x', '--', '--verbose'], specs); + expect(result.options.name).toBe('x'); + expect(result.positionals).toEqual(['--verbose']); + }); + + test('handles aliases', () => { + const aliasedSpecs: OptionSpec[] = [{ name: 'output', type: 'string', aliases: ['o'] }]; + const result = parseCommandArgs(['--o', 'json'], aliasedSpecs); + expect(result.options.output).toBe('json'); + }); +}); + +describe('ensureValidArgs', () => { + test('throws on unknown options', () => { + expect(() => ensureValidArgs({ positionals: [], options: {}, unknown: ['--bad'], errors: [] })).toThrow(CliError); + }); + + test('throws on parse errors', () => { + expect(() => + ensureValidArgs({ positionals: [], options: {}, unknown: [], errors: ['--count must be a number.'] }), + ).toThrow(CliError); + }); + + test('passes with no errors', () => { + expect(() => ensureValidArgs({ positionals: [], options: {}, unknown: [], errors: [] })).not.toThrow(); + }); +}); + +describe('option getter helpers', () => { + const parsed = { + positionals: [], + options: { name: 'alice', count: 42, verbose: true, tags: ['a', 'b'] }, + unknown: [], + errors: [], + }; + + test('getStringOption returns string or undefined', () => { + expect(getStringOption(parsed, 'name')).toBe('alice'); + expect(getStringOption(parsed, 'count')).toBeUndefined(); + expect(getStringOption(parsed, 'missing')).toBeUndefined(); + }); + + test('getNumberOption returns number or undefined', () => { + expect(getNumberOption(parsed, 'count')).toBe(42); + expect(getNumberOption(parsed, 'name')).toBeUndefined(); + }); + + test('getBooleanOption returns boolean', () => { + expect(getBooleanOption(parsed, 'verbose')).toBe(true); + expect(getBooleanOption(parsed, 'missing')).toBe(false); + }); + + test('getOptionalBooleanOption returns boolean or undefined', () => { + expect(getOptionalBooleanOption(parsed, 'verbose')).toBe(true); + expect(getOptionalBooleanOption(parsed, 'missing')).toBeUndefined(); + }); + + test('getStringListOption returns string array', () => { + expect(getStringListOption(parsed, 'tags')).toEqual(['a', 'b']); + expect(getStringListOption(parsed, 'name')).toEqual(['alice']); + expect(getStringListOption(parsed, 'missing')).toEqual([]); + }); +}); + +describe('resolveDocArg', () => { + test('returns doc from --doc flag', () => { + const parsed = { positionals: [], options: { doc: 'file.docx' }, unknown: [], errors: [] }; + const result = resolveDocArg(parsed, 'cmd'); + expect(result.doc).toBe('file.docx'); + }); + + test('returns doc from first positional', () => { + const parsed = { positionals: ['file.docx'], options: {}, unknown: [], errors: [] }; + const result = resolveDocArg(parsed, 'cmd'); + expect(result.doc).toBe('file.docx'); + expect(result.positionals).toEqual([]); + }); + + test('throws when flag and positional conflict', () => { + const parsed = { positionals: ['other.docx'], options: { doc: 'file.docx' }, unknown: [], errors: [] }; + expect(() => resolveDocArg(parsed, 'cmd')).toThrow(CliError); + }); + + test('allows matching flag and positional', () => { + const parsed = { positionals: ['file.docx'], options: { doc: 'file.docx' }, unknown: [], errors: [] }; + const result = resolveDocArg(parsed, 'cmd'); + expect(result.doc).toBe('file.docx'); + }); + + test('returns undefined when no doc provided', () => { + const parsed = { positionals: [], options: {}, unknown: [], errors: [] }; + const result = resolveDocArg(parsed, 'cmd'); + expect(result.doc).toBeUndefined(); + }); +}); + +describe('requireDocArg', () => { + test('throws when no doc', () => { + const parsed = { positionals: [], options: {}, unknown: [], errors: [] }; + expect(() => requireDocArg(parsed, 'cmd')).toThrow(CliError); + }); + + test('returns doc when available', () => { + const parsed = { positionals: ['file.docx'], options: {}, unknown: [], errors: [] }; + const result = requireDocArg(parsed, 'cmd'); + expect(result.doc).toBe('file.docx'); + }); +}); + +describe('parseCommaList', () => { + test('splits comma-separated values', () => { + expect(parseCommaList('a,b,c')).toEqual(['a', 'b', 'c']); + }); + + test('trims whitespace', () => { + expect(parseCommaList('a , b , c')).toEqual(['a', 'b', 'c']); + }); + + test('filters empty segments', () => { + expect(parseCommaList('a,,b,')).toEqual(['a', 'b']); + }); + + test('returns empty array for undefined', () => { + expect(parseCommaList(undefined)).toEqual([]); + }); + + test('returns empty array for empty string', () => { + expect(parseCommaList('')).toEqual([]); + }); +}); diff --git a/apps/cli/src/__tests__/lib/change-mode.test.ts b/apps/cli/src/__tests__/lib/change-mode.test.ts new file mode 100644 index 0000000000..1482bf06e1 --- /dev/null +++ b/apps/cli/src/__tests__/lib/change-mode.test.ts @@ -0,0 +1,35 @@ +import { describe, expect, test } from 'bun:test'; +import { resolveChangeMode } from '../../lib/change-mode'; +import { CliError } from '../../lib/errors'; +import type { ParsedArgs } from '../../lib/args'; + +function makeParsed(options: Record = {}): ParsedArgs { + return { positionals: [], options, unknown: [], errors: [] }; +} + +describe('resolveChangeMode', () => { + test('defaults to "direct"', () => { + expect(resolveChangeMode(makeParsed(), 'cmd')).toBe('direct'); + }); + + test('returns "tracked" when --tracked is set', () => { + expect(resolveChangeMode(makeParsed({ tracked: true }), 'cmd')).toBe('tracked'); + }); + + test('returns "direct" when --direct is set', () => { + expect(resolveChangeMode(makeParsed({ direct: true }), 'cmd')).toBe('direct'); + }); + + test('returns value from --change-mode', () => { + expect(resolveChangeMode(makeParsed({ 'change-mode': 'tracked' }), 'cmd')).toBe('tracked'); + expect(resolveChangeMode(makeParsed({ 'change-mode': 'direct' }), 'cmd')).toBe('direct'); + }); + + test('throws when both --tracked and --direct are set', () => { + expect(() => resolveChangeMode(makeParsed({ tracked: true, direct: true }), 'cmd')).toThrow(CliError); + }); + + test('throws for invalid --change-mode value', () => { + expect(() => resolveChangeMode(makeParsed({ 'change-mode': 'auto' }), 'cmd')).toThrow(CliError); + }); +}); diff --git a/apps/cli/src/__tests__/lib/cli-import-boundaries.test.ts b/apps/cli/src/__tests__/lib/cli-import-boundaries.test.ts new file mode 100644 index 0000000000..6cf42dfd6c --- /dev/null +++ b/apps/cli/src/__tests__/lib/cli-import-boundaries.test.ts @@ -0,0 +1,101 @@ +import { describe, expect, test } from 'bun:test'; +import { readdirSync, readFileSync, statSync } from 'node:fs'; +import { join } from 'node:path'; + +type ImportViolation = { + filePath: string; + specifier: string; + reason: string; +}; + +const CLI_SRC_ROOT = new URL('../../', import.meta.url); +const CLI_SRC_ROOT_PATH = CLI_SRC_ROOT.pathname; +const BANNED_IMPORT_PATTERNS: ReadonlyArray<{ pattern: RegExp; reason: string }> = [ + { + pattern: /document-api-adapters\//, + reason: 'CLI modules must not import document-api-adapters internals directly.', + }, + { + pattern: /(?:^|\/)super-editor\/src\//, + reason: 'CLI modules must not import super-editor source internals directly.', + }, + { + pattern: /(?:^|\/)layout-engine\/(?:pm-adapter|layout-engine|painters|style-engine)\//, + reason: 'CLI modules must not import layout-engine internals directly.', + }, + { + pattern: /(?:^|\/)prosemirror(?:-|\/)/, + reason: 'CLI modules must not depend on ProseMirror internals directly.', + }, +]; + +function listTypeScriptFiles(rootPath: string): string[] { + const files: string[] = []; + + function walk(currentPath: string): void { + const entries = readdirSync(currentPath); + for (const entry of entries) { + const absolutePath = join(currentPath, entry); + const info = statSync(absolutePath); + if (info.isDirectory()) { + walk(absolutePath); + continue; + } + + if (!entry.endsWith('.ts')) continue; + if (absolutePath.includes('/__tests__/')) continue; + files.push(absolutePath); + } + } + + walk(rootPath); + return files; +} + +function extractImportSpecifiers(fileContents: string): string[] { + const importSpecifiers: string[] = []; + const staticImportPattern = /import\s+(?:type\s+)?[\s\S]*?\sfrom\s+['"]([^'"]+)['"]/g; + const sideEffectImportPattern = /import\s+['"]([^'"]+)['"]/g; + const dynamicImportPattern = /import\(\s*['"]([^'"]+)['"]\s*\)/g; + + for (const pattern of [staticImportPattern, sideEffectImportPattern, dynamicImportPattern]) { + for (const match of fileContents.matchAll(pattern)) { + const specifier = match[1]; + if (specifier) importSpecifiers.push(specifier); + } + } + + return importSpecifiers; +} + +function findImportViolations(): ImportViolation[] { + const files = listTypeScriptFiles(CLI_SRC_ROOT_PATH); + const violations: ImportViolation[] = []; + + for (const filePath of files) { + const contents = readFileSync(filePath, 'utf8'); + const specifiers = extractImportSpecifiers(contents); + + for (const specifier of specifiers) { + for (const { pattern, reason } of BANNED_IMPORT_PATTERNS) { + if (!pattern.test(specifier)) continue; + violations.push({ filePath, specifier, reason }); + } + } + } + + return violations; +} + +describe('cli import boundaries', () => { + test('prevents adapter and engine-internal imports outside bridge modules', () => { + const violations = findImportViolations(); + const details = violations.map((entry) => ({ + filePath: entry.filePath.replace(CLI_SRC_ROOT_PATH, 'src/'), + specifier: entry.specifier, + reason: entry.reason, + })); + + expect(details).toEqual([]); + }); +}); diff --git a/apps/cli/src/__tests__/lib/envelope.test.ts b/apps/cli/src/__tests__/lib/envelope.test.ts new file mode 100644 index 0000000000..d0a33ad5f9 --- /dev/null +++ b/apps/cli/src/__tests__/lib/envelope.test.ts @@ -0,0 +1,27 @@ +import { describe, expect, test } from 'bun:test'; +import { CONTRACT_VERSION } from '@superdoc/document-api'; +import { createFailureEnvelope, createSuccessEnvelope } from '../../lib/envelope'; +import { CliError } from '../../lib/errors'; + +describe('createSuccessEnvelope', () => { + test('creates a success envelope', () => { + const envelope = createSuccessEnvelope('doc open', { sessionId: 'abc' }, 123); + expect(envelope.ok).toBe(true); + expect(envelope.command).toBe('doc open'); + expect(envelope.data).toEqual({ sessionId: 'abc' }); + expect(envelope.meta.elapsedMs).toBe(123); + expect(envelope.meta.version).toBe(CONTRACT_VERSION); + }); +}); + +describe('createFailureEnvelope', () => { + test('creates a failure envelope from CliError', () => { + const error = new CliError('INVALID_ARGUMENT', 'bad input', { field: 'doc' }); + const envelope = createFailureEnvelope(error, 50); + expect(envelope.ok).toBe(false); + expect(envelope.error.code).toBe('INVALID_ARGUMENT'); + expect(envelope.error.message).toBe('bad input'); + expect(envelope.error.details).toEqual({ field: 'doc' }); + expect(envelope.meta.elapsedMs).toBe(50); + }); +}); diff --git a/apps/cli/src/__tests__/lib/errors.test.ts b/apps/cli/src/__tests__/lib/errors.test.ts new file mode 100644 index 0000000000..b6f4931375 --- /dev/null +++ b/apps/cli/src/__tests__/lib/errors.test.ts @@ -0,0 +1,62 @@ +import { describe, expect, test } from 'bun:test'; +import { CliError, toCliError } from '../../lib/errors'; + +describe('CliError', () => { + test('sets code, message, and details', () => { + const error = new CliError('INVALID_ARGUMENT', 'bad input', { field: 'name' }); + expect(error.code).toBe('INVALID_ARGUMENT'); + expect(error.message).toBe('bad input'); + expect(error.details).toEqual({ field: 'name' }); + expect(error.exitCode).toBe(1); + }); + + test('defaults exitCode to 1', () => { + const error = new CliError('COMMAND_FAILED', 'fail'); + expect(error.exitCode).toBe(1); + }); + + test('accepts custom exitCode', () => { + const error = new CliError('COMMAND_FAILED', 'fail', undefined, 2); + expect(error.exitCode).toBe(2); + }); + + test('is instanceof Error', () => { + const error = new CliError('COMMAND_FAILED', 'fail'); + expect(error instanceof Error).toBe(true); + expect(error instanceof CliError).toBe(true); + }); + + test('has name set to CliError', () => { + const error = new CliError('COMMAND_FAILED', 'fail'); + expect(error.name).toBe('CliError'); + }); +}); + +describe('toCliError', () => { + test('returns CliError as-is', () => { + const original = new CliError('INVALID_ARGUMENT', 'bad'); + expect(toCliError(original)).toBe(original); + }); + + test('wraps Error into CliError', () => { + const original = new Error('something broke'); + const wrapped = toCliError(original); + expect(wrapped).toBeInstanceOf(CliError); + expect(wrapped.code).toBe('COMMAND_FAILED'); + expect(wrapped.message).toBe('something broke'); + }); + + test('wraps non-Error into CliError', () => { + const wrapped = toCliError('string error'); + expect(wrapped).toBeInstanceOf(CliError); + expect(wrapped.code).toBe('COMMAND_FAILED'); + expect(wrapped.message).toBe('Unknown error'); + expect(wrapped.details).toEqual({ error: 'string error' }); + }); + + test('wraps null into CliError', () => { + const wrapped = toCliError(null); + expect(wrapped).toBeInstanceOf(CliError); + expect(wrapped.code).toBe('COMMAND_FAILED'); + }); +}); diff --git a/apps/cli/src/__tests__/lib/guards.test.ts b/apps/cli/src/__tests__/lib/guards.test.ts new file mode 100644 index 0000000000..79751f8a44 --- /dev/null +++ b/apps/cli/src/__tests__/lib/guards.test.ts @@ -0,0 +1,45 @@ +import { describe, expect, test } from 'bun:test'; +import { asRecord, isRecord } from '../../lib/guards'; + +describe('isRecord', () => { + test('returns true for plain objects', () => { + expect(isRecord({})).toBe(true); + expect(isRecord({ a: 1 })).toBe(true); + expect(isRecord(Object.create(null))).toBe(true); + }); + + test('returns false for null', () => { + expect(isRecord(null)).toBe(false); + }); + + test('returns false for undefined', () => { + expect(isRecord(undefined)).toBe(false); + }); + + test('returns false for arrays', () => { + expect(isRecord([])).toBe(false); + expect(isRecord([1, 2, 3])).toBe(false); + }); + + test('returns false for primitives', () => { + expect(isRecord('string')).toBe(false); + expect(isRecord(42)).toBe(false); + expect(isRecord(true)).toBe(false); + expect(isRecord(Symbol())).toBe(false); + }); +}); + +describe('asRecord', () => { + test('returns the object for plain objects', () => { + const obj = { a: 1 }; + expect(asRecord(obj)).toBe(obj); + }); + + test('returns null for non-objects', () => { + expect(asRecord(null)).toBeNull(); + expect(asRecord(undefined)).toBeNull(); + expect(asRecord([])).toBeNull(); + expect(asRecord('string')).toBeNull(); + expect(asRecord(42)).toBeNull(); + }); +}); diff --git a/apps/cli/src/__tests__/lib/input-readers.test.ts b/apps/cli/src/__tests__/lib/input-readers.test.ts new file mode 100644 index 0000000000..a6e828993a --- /dev/null +++ b/apps/cli/src/__tests__/lib/input-readers.test.ts @@ -0,0 +1,162 @@ +import { describe, expect, test } from 'bun:test'; +import { + hasNonEmptyString, + normalizeJsonValue, + readBoolean, + readChangeMode, + readOptionalNumber, + readOptionalString, + readRequiredString, +} from '../../lib/input-readers'; +import { CliError } from '../../lib/errors'; + +describe('input-readers', () => { + describe('hasNonEmptyString', () => { + test('returns true for non-empty strings', () => { + expect(hasNonEmptyString('hello')).toBe(true); + expect(hasNonEmptyString('a')).toBe(true); + }); + + test('returns false for empty string', () => { + expect(hasNonEmptyString('')).toBe(false); + }); + + test('returns false for non-string values', () => { + expect(hasNonEmptyString(null)).toBe(false); + expect(hasNonEmptyString(undefined)).toBe(false); + expect(hasNonEmptyString(42)).toBe(false); + expect(hasNonEmptyString(true)).toBe(false); + expect(hasNonEmptyString({})).toBe(false); + }); + }); + + describe('readRequiredString', () => { + test('returns string value when present', () => { + expect(readRequiredString({ name: 'test' }, 'name', 'op')).toBe('test'); + }); + + test('throws MISSING_REQUIRED for missing field', () => { + try { + readRequiredString({}, 'name', 'op'); + expect.unreachable('should have thrown'); + } catch (error) { + expect(error).toBeInstanceOf(CliError); + expect((error as CliError).code).toBe('MISSING_REQUIRED'); + expect((error as CliError).message).toContain('input.name'); + } + }); + + test('throws MISSING_REQUIRED for empty string', () => { + expect(() => readRequiredString({ name: '' }, 'name', 'op')).toThrow(); + }); + + test('throws MISSING_REQUIRED for non-string value', () => { + expect(() => readRequiredString({ name: 42 }, 'name', 'op')).toThrow(); + }); + }); + + describe('readOptionalString', () => { + test('returns string value when present', () => { + expect(readOptionalString({ name: 'test' }, 'name')).toBe('test'); + }); + + test('returns undefined for missing field', () => { + expect(readOptionalString({}, 'name')).toBeUndefined(); + }); + + test('returns undefined for empty string', () => { + expect(readOptionalString({ name: '' }, 'name')).toBeUndefined(); + }); + + test('returns undefined for non-string value', () => { + expect(readOptionalString({ name: 42 }, 'name')).toBeUndefined(); + }); + }); + + describe('readOptionalNumber', () => { + test('returns number value when present', () => { + expect(readOptionalNumber({ count: 5 }, 'count')).toBe(5); + }); + + test('returns zero for zero', () => { + expect(readOptionalNumber({ count: 0 }, 'count')).toBe(0); + }); + + test('returns undefined for missing field', () => { + expect(readOptionalNumber({}, 'count')).toBeUndefined(); + }); + + test('returns undefined for NaN', () => { + expect(readOptionalNumber({ count: NaN }, 'count')).toBeUndefined(); + }); + + test('returns undefined for Infinity', () => { + expect(readOptionalNumber({ count: Infinity }, 'count')).toBeUndefined(); + }); + + test('returns undefined for non-number value', () => { + expect(readOptionalNumber({ count: '5' }, 'count')).toBeUndefined(); + }); + }); + + describe('readBoolean', () => { + test('returns true when field is true', () => { + expect(readBoolean({ flag: true }, 'flag')).toBe(true); + }); + + test('returns false when field is false', () => { + expect(readBoolean({ flag: false }, 'flag')).toBe(false); + }); + + test('returns false for missing field', () => { + expect(readBoolean({}, 'flag')).toBe(false); + }); + + test('returns false for truthy non-boolean values', () => { + expect(readBoolean({ flag: 1 }, 'flag')).toBe(false); + expect(readBoolean({ flag: 'true' }, 'flag')).toBe(false); + }); + }); + + describe('readChangeMode', () => { + test('returns tracked when explicitly set', () => { + expect(readChangeMode({ changeMode: 'tracked' })).toBe('tracked'); + }); + + test('returns direct by default', () => { + expect(readChangeMode({})).toBe('direct'); + expect(readChangeMode({ changeMode: 'direct' })).toBe('direct'); + }); + + test('returns direct for unknown values', () => { + expect(readChangeMode({ changeMode: 'unknown' })).toBe('direct'); + }); + }); + + describe('normalizeJsonValue', () => { + test('round-trips JSON-serializable values', () => { + expect(normalizeJsonValue({ a: 1 }, 'test')).toEqual({ a: 1 }); + expect(normalizeJsonValue([1, 2, 3], 'test')).toEqual([1, 2, 3]); + expect(normalizeJsonValue('hello', 'test')).toBe('hello'); + }); + + test('strips undefined values from objects', () => { + const result = normalizeJsonValue({ a: 1, b: undefined }, 'test'); + expect(result).toEqual({ a: 1 }); + }); + + test('throws for non-serializable values', () => { + const circular: Record = {}; + circular.self = circular; + + try { + normalizeJsonValue(circular, 'test'); + expect.unreachable('should have thrown'); + } catch (error) { + expect(error).toBeInstanceOf(CliError); + expect((error as CliError).code).toBe('VALIDATION_ERROR'); + expect((error as CliError).message).toContain('JSON-serializable'); + } + }); + }); +}); diff --git a/apps/cli/src/__tests__/lib/manual-command-allowlist.test.ts b/apps/cli/src/__tests__/lib/manual-command-allowlist.test.ts new file mode 100644 index 0000000000..95880ffeb8 --- /dev/null +++ b/apps/cli/src/__tests__/lib/manual-command-allowlist.test.ts @@ -0,0 +1,68 @@ +import { describe, expect, test } from 'bun:test'; +import { readdirSync } from 'node:fs'; +import { MANUAL_COMMAND_ALLOWLIST, MANUAL_OPERATION_ALLOWLIST } from '../../lib/manual-command-allowlist'; +import { getLegacyRunner } from '../../lib/legacy-operation-dispatch'; +import { CLI_OPERATION_METADATA, type CliOperationId } from '../../cli'; + +describe('manual command allowlist', () => { + test('contains only lifecycle/session commands plus call', () => { + expect(MANUAL_COMMAND_ALLOWLIST).toEqual([ + 'call', + 'open', + 'save', + 'close', + 'session list', + 'session save', + 'session close', + 'session set-default', + 'session use', + ]); + }); + + test('operation allowlist contains only lifecycle/session operations', () => { + expect(MANUAL_OPERATION_ALLOWLIST).toEqual([ + 'doc.open', + 'doc.save', + 'doc.close', + 'doc.session.list', + 'doc.session.save', + 'doc.session.close', + 'doc.session.setDefault', + ]); + }); + + test('commands directory contains only allowlisted command handlers plus shared runners', () => { + const commandDirUrl = new URL('../../commands/', import.meta.url); + const actual = readdirSync(commandDirUrl) + .filter((entry) => entry.endsWith('.ts')) + .sort(); + + expect(actual).toEqual([ + 'call.ts', + 'close.ts', + 'install.ts', + 'legacy-compat.ts', + 'open.ts', + 'save.ts', + 'session-close.ts', + 'session-list.ts', + 'session-save.ts', + 'session-set-default.ts', + 'uninstall.ts', + ]); + }); + + test('legacy runner map is restricted to manual operation allowlist', () => { + const manualAllowlist = new Set(MANUAL_OPERATION_ALLOWLIST); + const operationIds = Object.keys(CLI_OPERATION_METADATA) as CliOperationId[]; + + for (const operationId of operationIds) { + const runner = getLegacyRunner(operationId); + if (manualAllowlist.has(operationId)) { + expect(runner).toBeDefined(); + } else { + expect(runner).toBeUndefined(); + } + } + }); +}); diff --git a/apps/cli/src/__tests__/lib/operation-invoker-coverage.test.ts b/apps/cli/src/__tests__/lib/operation-invoker-coverage.test.ts new file mode 100644 index 0000000000..ecdc598280 --- /dev/null +++ b/apps/cli/src/__tests__/lib/operation-invoker-coverage.test.ts @@ -0,0 +1,23 @@ +import { describe, expect, test } from 'bun:test'; +import { CLI_OPERATION_METADATA, isDocBackedOperation, type CliOperationId } from '../../cli'; +import { MANUAL_OPERATION_ALLOWLIST } from '../../lib/manual-command-allowlist'; +import { dispatchIntrospectionOperation } from '../../lib/introspection-dispatch'; + +const MANUAL_ALLOWLIST = new Set(MANUAL_OPERATION_ALLOWLIST); + +/** CLI-only introspection operations handled by dispatchIntrospectionOperation. */ +const INTROSPECTION_OPS = new Set(['doc.describe', 'doc.describeCommand', 'doc.status']); + +describe('operation invoker coverage', () => { + test('covers every non-allowlisted operation id', () => { + const operationIds = Object.keys(CLI_OPERATION_METADATA) as CliOperationId[]; + + for (const operationId of operationIds) { + if (MANUAL_ALLOWLIST.has(operationId)) continue; + if (INTROSPECTION_OPS.has(operationId)) continue; + + // All remaining operations must be doc-backed and handled by the generic dispatch + expect(isDocBackedOperation(operationId)).toBe(true); + } + }); +}); diff --git a/apps/cli/src/__tests__/lib/operation-runtime-metadata.test.ts b/apps/cli/src/__tests__/lib/operation-runtime-metadata.test.ts new file mode 100644 index 0000000000..611d100aa2 --- /dev/null +++ b/apps/cli/src/__tests__/lib/operation-runtime-metadata.test.ts @@ -0,0 +1,45 @@ +import { describe, expect, test } from 'bun:test'; +import { CLI_OPERATION_METADATA, type CliOperationId } from '../../cli'; +import { getOperationRuntimeMetadata } from '../../lib/operation-runtime-metadata'; + +describe('operation runtime metadata', () => { + test('covers every CLI operation id', () => { + const operationIds = Object.keys(CLI_OPERATION_METADATA) as CliOperationId[]; + for (const operationId of operationIds) { + const runtime = getOperationRuntimeMetadata(operationId); + expect(runtime.operationId).toBe(operationId); + expect(runtime.profile).toBeDefined(); + expect(runtime.context).toBeDefined(); + expect(runtime.traits).toBeDefined(); + } + }); + + test('marks lifecycle and session admin operations explicitly', () => { + expect(getOperationRuntimeMetadata('doc.open').profile).toBe('lifecycle'); + expect(getOperationRuntimeMetadata('doc.save').profile).toBe('lifecycle'); + expect(getOperationRuntimeMetadata('doc.close').profile).toBe('lifecycle'); + expect(getOperationRuntimeMetadata('doc.session.list').profile).toBe('sessionAdmin'); + expect(getOperationRuntimeMetadata('doc.session.save').profile).toBe('sessionAdmin'); + expect(getOperationRuntimeMetadata('doc.session.close').profile).toBe('sessionAdmin'); + expect(getOperationRuntimeMetadata('doc.session.setDefault').profile).toBe('sessionAdmin'); + }); + + test('derives mutation traits for text operations', () => { + const insert = getOperationRuntimeMetadata('doc.insert'); + expect(insert.profile).toBe('mutation'); + expect(insert.traits.supportsDryRun).toBe(true); + expect(insert.traits.supportsChangeMode).toBe(true); + expect(insert.traits.supportsExpectedRevision).toBe(true); + expect(insert.traits.requiresOutInStateless).toBe(true); + }); + + test('marks describe operations as stateless only', () => { + const describe = getOperationRuntimeMetadata('doc.describe'); + const describeCommand = getOperationRuntimeMetadata('doc.describeCommand'); + + expect(describe.context.supportsStateless).toBe(true); + expect(describe.context.supportsSession).toBe(false); + expect(describeCommand.context.supportsStateless).toBe(true); + expect(describeCommand.context.supportsSession).toBe(false); + }); +}); diff --git a/apps/cli/src/__tests__/lib/session.test.ts b/apps/cli/src/__tests__/lib/session.test.ts new file mode 100644 index 0000000000..051aaafca8 --- /dev/null +++ b/apps/cli/src/__tests__/lib/session.test.ts @@ -0,0 +1,78 @@ +import { describe, expect, test } from 'bun:test'; +import { generateSessionId, validateSessionId } from '../../lib/session'; +import { CliError } from '../../lib/errors'; + +describe('validateSessionId', () => { + test('accepts valid session ids', () => { + expect(validateSessionId('my-session')).toBe('my-session'); + expect(validateSessionId('abc123')).toBe('abc123'); + expect(validateSessionId('file.docx')).toBe('file.docx'); + expect(validateSessionId('a_b-c.d')).toBe('a_b-c.d'); + expect(validateSessionId('x')).toBe('x'); + }); + + test('accepts ids up to 64 characters', () => { + const longId = 'a'.repeat(64); + expect(validateSessionId(longId)).toBe(longId); + }); + + test('rejects empty string', () => { + expect(() => validateSessionId('')).toThrow(CliError); + }); + + test('rejects ids over 64 characters', () => { + const tooLong = 'a'.repeat(65); + expect(() => validateSessionId(tooLong)).toThrow(CliError); + }); + + test('rejects ids with special characters', () => { + expect(() => validateSessionId('has space')).toThrow(CliError); + expect(() => validateSessionId('has/slash')).toThrow(CliError); + expect(() => validateSessionId('has@at')).toThrow(CliError); + expect(() => validateSessionId('has$dollar')).toThrow(CliError); + }); + + test('uses custom source in error message', () => { + try { + validateSessionId('bad id!', 'active session'); + expect.unreachable('should have thrown'); + } catch (error) { + expect(error).toBeInstanceOf(CliError); + expect((error as CliError).message).toContain('active session'); + } + }); +}); + +describe('generateSessionId', () => { + test('produces valid session ids', () => { + const id = generateSessionId('report.docx'); + expect(() => validateSessionId(id)).not.toThrow(); + }); + + test('derives base from file name', () => { + const id = generateSessionId('My Report.docx'); + expect(id).toMatch(/^my-report-[a-f0-9]{6}$/); + }); + + test('handles stdin marker', () => { + const id = generateSessionId('-'); + expect(id).toMatch(/^stdin-[a-f0-9]{6}$/); + }); + + test('handles file with no extension', () => { + const id = generateSessionId('README'); + expect(id).toMatch(/^readme-[a-f0-9]{6}$/); + }); + + test('respects max length', () => { + const longName = 'a'.repeat(100) + '.docx'; + const id = generateSessionId(longName); + expect(id.length).toBeLessThanOrEqual(64); + expect(() => validateSessionId(id)).not.toThrow(); + }); + + test('produces unique ids', () => { + const ids = new Set(Array.from({ length: 20 }, () => generateSessionId('test.docx'))); + expect(ids.size).toBe(20); + }); +}); diff --git a/apps/cli/src/__tests__/lib/validate.test.ts b/apps/cli/src/__tests__/lib/validate.test.ts new file mode 100644 index 0000000000..7cff03d5d6 --- /dev/null +++ b/apps/cli/src/__tests__/lib/validate.test.ts @@ -0,0 +1,248 @@ +import { describe, expect, test } from 'bun:test'; +import { + validateNodeAddress, + validateTextAddress, + validateListItemAddress, + validateCreateParagraphInput, + validateQuery, + validateNodeKind, + isNodeType, + isBlockNodeType, +} from '../../lib/validate'; +import { CliError } from '../../lib/errors'; + +describe('validateTextAddress', () => { + test('validates a valid text address', () => { + const result = validateTextAddress({ + kind: 'text', + blockId: 'abc', + range: { start: 0, end: 5 }, + }); + expect(result).toEqual({ + kind: 'text', + blockId: 'abc', + range: { start: 0, end: 5 }, + }); + }); + + test('rejects non-text kind', () => { + expect(() => validateTextAddress({ kind: 'block', blockId: 'abc', range: { start: 0, end: 0 } })).toThrow(CliError); + }); + + test('rejects missing blockId', () => { + expect(() => validateTextAddress({ kind: 'text', range: { start: 0, end: 0 } })).toThrow(CliError); + }); + + test('rejects negative range values', () => { + expect(() => validateTextAddress({ kind: 'text', blockId: 'abc', range: { start: -1, end: 0 } })).toThrow(CliError); + }); + + test('rejects end < start', () => { + expect(() => validateTextAddress({ kind: 'text', blockId: 'abc', range: { start: 5, end: 3 } })).toThrow(CliError); + }); + + test('rejects non-object input', () => { + expect(() => validateTextAddress('not an object')).toThrow(CliError); + expect(() => validateTextAddress(null)).toThrow(CliError); + }); +}); + +describe('validateNodeAddress', () => { + test('validates a block address', () => { + const result = validateNodeAddress({ + kind: 'block', + nodeType: 'paragraph', + nodeId: 'p1', + }); + expect(result).toEqual({ kind: 'block', nodeType: 'paragraph', nodeId: 'p1' }); + }); + + test('validates an inline address', () => { + const result = validateNodeAddress({ + kind: 'inline', + nodeType: 'image', + anchor: { + start: { blockId: 'b1', offset: 0 }, + end: { blockId: 'b1', offset: 5 }, + }, + }); + expect(result.kind).toBe('inline'); + }); + + test('rejects unknown kind', () => { + expect(() => validateNodeAddress({ kind: 'unknown', nodeType: 'paragraph', nodeId: 'p1' })).toThrow(CliError); + }); + + test('rejects non-block node type for block address', () => { + expect(() => validateNodeAddress({ kind: 'block', nodeType: 'notAType', nodeId: 'p1' })).toThrow(CliError); + }); + + test('rejects inline anchor spanning multiple blocks', () => { + expect(() => + validateNodeAddress({ + kind: 'inline', + nodeType: 'image', + anchor: { + start: { blockId: 'b1', offset: 0 }, + end: { blockId: 'b2', offset: 5 }, + }, + }), + ).toThrow(CliError); + }); +}); + +describe('validateListItemAddress', () => { + test('validates a listItem address', () => { + const result = validateListItemAddress({ + kind: 'block', + nodeType: 'listItem', + nodeId: 'li1', + }); + expect(result.nodeType).toBe('listItem'); + }); + + test('rejects non-listItem block address', () => { + expect(() => validateListItemAddress({ kind: 'block', nodeType: 'paragraph', nodeId: 'p1' })).toThrow(CliError); + }); +}); + +describe('validateCreateParagraphInput', () => { + test('validates empty input (defaults)', () => { + const result = validateCreateParagraphInput({}); + expect(result).toEqual({}); + }); + + test('validates input with text', () => { + const result = validateCreateParagraphInput({ text: 'hello' }); + expect(result.text).toBe('hello'); + }); + + test('validates at: documentEnd', () => { + const result = validateCreateParagraphInput({ at: { kind: 'documentEnd' } }); + expect(result.at?.kind).toBe('documentEnd'); + }); + + test('validates at: before with block target', () => { + const result = validateCreateParagraphInput({ + at: { + kind: 'before', + target: { kind: 'block', nodeType: 'paragraph', nodeId: 'p1' }, + }, + }); + expect(result.at?.kind).toBe('before'); + }); + + test('rejects non-string text', () => { + expect(() => validateCreateParagraphInput({ text: 42 })).toThrow(CliError); + }); + + test('rejects unknown at.kind', () => { + expect(() => validateCreateParagraphInput({ at: { kind: 'middle' } })).toThrow(CliError); + }); +}); + +describe('validateQuery', () => { + test('validates a text query', () => { + const result = validateQuery({ + select: { type: 'text', pattern: 'hello', mode: 'contains' }, + }); + expect(result.select.type).toBe('text'); + }); + + test('validates a node query', () => { + const result = validateQuery({ + select: { type: 'node', nodeType: 'paragraph' }, + }); + expect(result.select.type).toBe('node'); + }); + + test('validates shorthand node type selector', () => { + const result = validateQuery({ + select: { type: 'paragraph' }, + }); + expect(result.select.type).toBe('node'); + expect((result.select as { nodeType?: string }).nodeType).toBe('paragraph'); + }); + + test('validates with limit and offset', () => { + const result = validateQuery({ + select: { type: 'paragraph' }, + limit: 10, + offset: 5, + }); + expect(result.limit).toBe(10); + expect(result.offset).toBe(5); + }); + + test('validates includeNodes', () => { + const result = validateQuery({ + select: { type: 'node', nodeType: 'paragraph' }, + includeNodes: true, + }); + expect(result.includeNodes).toBe(true); + }); + + test('rejects non-object input', () => { + expect(() => validateQuery('not an object')).toThrow(CliError); + }); + + test('rejects unknown selector type', () => { + expect(() => validateQuery({ select: { type: 'magic' } })).toThrow(CliError); + }); + + test('rejects invalid text query mode', () => { + expect(() => validateQuery({ select: { type: 'text', pattern: 'hello', mode: 'fuzzy' } })).toThrow(CliError); + }); + + test('rejects removed include field', () => { + expect(() => + validateQuery({ + select: { type: 'node', nodeType: 'paragraph' }, + include: ['nodes'], + }), + ).toThrow(CliError); + }); + + test('rejects non-boolean includeNodes', () => { + expect(() => + validateQuery({ + select: { type: 'node', nodeType: 'paragraph' }, + includeNodes: 'true', + }), + ).toThrow(CliError); + }); +}); + +describe('validateNodeKind', () => { + test('accepts "block"', () => { + expect(validateNodeKind('block', 'test')).toBe('block'); + }); + + test('accepts "inline"', () => { + expect(validateNodeKind('inline', 'test')).toBe('inline'); + }); + + test('rejects unknown kind', () => { + expect(() => validateNodeKind('other', 'test')).toThrow(CliError); + }); +}); + +describe('isNodeType / isBlockNodeType', () => { + test('isNodeType recognizes valid types', () => { + expect(isNodeType('paragraph')).toBe(true); + expect(isNodeType('table')).toBe(true); + }); + + test('isNodeType rejects invalid types', () => { + expect(isNodeType('notAType')).toBe(false); + }); + + test('isBlockNodeType recognizes block types', () => { + expect(isBlockNodeType('paragraph')).toBe(true); + expect(isBlockNodeType('table')).toBe(true); + }); + + test('isBlockNodeType rejects non-block types', () => { + expect(isBlockNodeType('notAType')).toBe(false); + }); +}); diff --git a/apps/cli/src/__tests__/multi-range.test.ts b/apps/cli/src/__tests__/multi-range.test.ts deleted file mode 100644 index 7afc9ad42c..0000000000 --- a/apps/cli/src/__tests__/multi-range.test.ts +++ /dev/null @@ -1,108 +0,0 @@ -import { describe, expect, test, beforeAll, afterAll } from 'bun:test'; -import { copyFile, rm, mkdir } from 'node:fs/promises'; -import { join } from 'node:path'; -import { - openDocument, - closeDocument, - searchDocument, - replaceInDocument, - saveDocument, - getDocumentText, -} from '../lib/editor'; - -const TEST_DIR = join(import.meta.dir, 'fixtures-multi-range'); -const SAMPLE_DOC = join(TEST_DIR, 'sample.docx'); - -describe('Multi-range replacement', () => { - beforeAll(async () => { - await mkdir(TEST_DIR, { recursive: true }); - const sourceDoc = join(import.meta.dir, '../../../../e2e-tests/test-data/basic-documents/advanced-text.docx'); - await copyFile(sourceDoc, SAMPLE_DOC); - }); - - afterAll(async () => { - await rm(TEST_DIR, { recursive: true, force: true }); - }); - - test('searchDocument returns matches with ranges property', async () => { - const doc = await openDocument(SAMPLE_DOC); - try { - const matches = searchDocument(doc, 'Oscar'); - - expect(matches.length).toBeGreaterThan(0); - - // All matches should have the ranges property - for (const match of matches) { - expect(match).toHaveProperty('ranges'); - expect(match.ranges).toBeArray(); - expect(match.ranges!.length).toBeGreaterThan(0); - } - } finally { - closeDocument(doc); - } - }); - - test('replacement with single-range matches works', async () => { - const testCopy = join(TEST_DIR, 'single-range-replace.docx'); - await copyFile(SAMPLE_DOC, testCopy); - - const doc = await openDocument(testCopy); - try { - const beforeText = getDocumentText(doc); - const oscarCount = (beforeText.match(/Oscar/g) || []).length; - - // Replace Oscar with OSCAR - const replacements = replaceInDocument(doc, 'Oscar', 'OSCAR'); - expect(replacements).toBe(oscarCount); - - await saveDocument(doc); - - // Re-open to verify - closeDocument(doc); - const doc2 = await openDocument(testCopy); - const afterText = getDocumentText(doc2); - const oscarAfter = (afterText.match(/Oscar/g) || []).length; - const OSCARAfter = (afterText.match(/OSCAR/g) || []).length; - - expect(oscarAfter).toBe(0); - expect(OSCARAfter).toBe(oscarCount); - - closeDocument(doc2); - } catch (e) { - closeDocument(doc); - throw e; - } - - await rm(testCopy); - }); - - test('multi-range match handling preserves document structure', async () => { - const testCopy = join(TEST_DIR, 'multi-range-logic.docx'); - await copyFile(SAMPLE_DOC, testCopy); - - const doc = await openDocument(testCopy); - try { - const beforeText = getDocumentText(doc); - const wildeCount = (beforeText.match(/Wilde/g) || []).length; - - const replacements = replaceInDocument(doc, 'Wilde', 'WILDE'); - expect(replacements).toBe(wildeCount); - - await saveDocument(doc); - } finally { - closeDocument(doc); - } - - // Verify the replacement - const doc2 = await openDocument(testCopy); - try { - const afterText = getDocumentText(doc2); - expect(afterText).not.toContain('Wilde'); - expect(afterText).toContain('WILDE'); - } finally { - closeDocument(doc2); - } - - await rm(testCopy); - }); -}); diff --git a/apps/cli/src/__tests__/setup.ts b/apps/cli/src/__tests__/setup.ts new file mode 100644 index 0000000000..d9964016c5 --- /dev/null +++ b/apps/cli/src/__tests__/setup.ts @@ -0,0 +1,2 @@ +// Ensure telemetry is disabled in test environments +process.env.NODE_ENV = 'test'; diff --git a/apps/cli/src/cli/__tests__/parity.test.ts b/apps/cli/src/cli/__tests__/parity.test.ts new file mode 100644 index 0000000000..5b4c7c5c16 --- /dev/null +++ b/apps/cli/src/cli/__tests__/parity.test.ts @@ -0,0 +1,88 @@ +import { describe, expect, test } from 'bun:test'; +import { isOperationId } from '@superdoc/document-api'; +import { + CLI_COMMAND_SPECS, + CLI_DOC_OPERATIONS, + CLI_ONLY_OPERATIONS, + CLI_OPERATION_IDS, + getResponseSchema, + isDocBackedOperation, + type CliOperationId, +} from '../index'; +import { buildContractOperationDetail, buildContractOverview } from '../../lib/contract'; +import { MANUAL_OPERATION_ALLOWLIST } from '../../lib/manual-command-allowlist'; + +const INTROSPECTION_OPS = new Set(['doc.describe', 'doc.describeCommand', 'doc.status']); + +describe('cli parity', () => { + test('canonical operation set matches CLI_DOC_OPERATIONS + CLI_ONLY_OPERATIONS', () => { + const expected = new Set([ + ...CLI_DOC_OPERATIONS.map((id) => `doc.${id}` as CliOperationId), + ...CLI_ONLY_OPERATIONS.map((id) => `doc.${id}` as CliOperationId), + ]); + + expect(new Set(CLI_OPERATION_IDS)).toEqual(expected); + expect(CLI_OPERATION_IDS).toHaveLength(expected.size); + }); + + test('every CLI doc-backed operation is a valid document-api operation id', () => { + for (const operationId of CLI_DOC_OPERATIONS) { + expect(isOperationId(operationId)).toBe(true); + } + }); + + test('every non-manual canonical operation is handled by doc-backed generic dispatch', () => { + const manualOps = new Set(MANUAL_OPERATION_ALLOWLIST); + const canonicalSpecs = CLI_COMMAND_SPECS.filter((spec) => !spec.alias); + + for (const spec of canonicalSpecs) { + const operationId = spec.operationId as CliOperationId; + if (manualOps.has(operationId)) continue; + if (INTROSPECTION_OPS.has(operationId)) continue; + expect(isDocBackedOperation(operationId)).toBe(true); + } + }); + + test('every canonical operation has a response schema', () => { + for (const spec of CLI_COMMAND_SPECS) { + if (spec.alias) continue; + expect(getResponseSchema(spec.operationId)).not.toBeNull(); + } + }); + + test('alias specs resolve to canonical command specs', () => { + const canonicalByKey = new Map( + CLI_COMMAND_SPECS.filter((spec) => !spec.alias).map((spec) => [spec.key, spec] as const), + ); + + for (const aliasSpec of CLI_COMMAND_SPECS.filter((spec) => spec.alias)) { + const canonical = canonicalByKey.get(aliasSpec.canonicalKey); + expect(canonical).toBeDefined(); + expect(canonical?.operationId).toBe(aliasSpec.operationId); + } + }); + + test('contract overview shape matches canonical CLI operation set', () => { + const canonicalSpecs = CLI_COMMAND_SPECS.filter((spec) => !spec.alias); + const overview = buildContractOverview(); + + expect(overview.operations).toHaveLength(canonicalSpecs.length); + expect(overview.operationCount).toBe(canonicalSpecs.length); + expect(overview.contractVersion.length).toBeGreaterThan(0); + }); + + test('contract operation detail resolves by id, command key, and alias', () => { + for (const spec of CLI_COMMAND_SPECS.filter((candidate) => !candidate.alias)) { + const byId = buildContractOperationDetail(spec.operationId); + expect(byId?.operation.id).toBe(spec.operationId); + + const byCommand = buildContractOperationDetail(spec.key); + expect(byCommand?.operation.id).toBe(spec.operationId); + } + + for (const aliasSpec of CLI_COMMAND_SPECS.filter((candidate) => candidate.alias)) { + const byAlias = buildContractOperationDetail(aliasSpec.key); + expect(byAlias?.operation.id).toBe(aliasSpec.operationId); + } + }); +}); diff --git a/apps/cli/src/cli/commands.ts b/apps/cli/src/cli/commands.ts new file mode 100644 index 0000000000..2c10e93da8 --- /dev/null +++ b/apps/cli/src/cli/commands.ts @@ -0,0 +1,208 @@ +/** + * CLI command routing table — derives from document-api + CLI operation set. + * + * For doc-backed operations, metadata is inherited from document-api. + * Only CLI-only operations and aliases are hand-written. + */ + +import { COMMAND_CATALOG } from '@superdoc/document-api'; +import type { CliCommandSpec } from './types'; +import { + CLI_DOC_OPERATIONS, + CLI_ONLY_OPERATIONS, + CLI_OPERATION_IDS, + cliCategory, + cliCommandTokens, + cliDescription, + cliRequiresDocumentContext, + toDocApiId, + type CliOperationId, +} from './operation-set'; + +// --------------------------------------------------------------------------- +// Build command specs for doc-backed operations +// --------------------------------------------------------------------------- + +function buildDocBackedSpec(docApiId: string, cliOpId: CliOperationId): CliCommandSpec { + const tokens = cliCommandTokens(cliOpId); + const key = tokens.join(' '); + const catalog = COMMAND_CATALOG[docApiId as keyof typeof COMMAND_CATALOG]; + + return { + key, + tokens, + operationId: cliOpId, + category: cliCategory(cliOpId), + description: cliDescription(cliOpId), + mutates: catalog.mutates, + requiresDocumentContext: cliRequiresDocumentContext(cliOpId), + alias: false, + canonicalKey: key, + examples: [], + }; +} + +// --------------------------------------------------------------------------- +// CLI-only operation specs (hand-written) +// --------------------------------------------------------------------------- + +type CliOnlySpecOverride = { + mutates: boolean; + examples?: readonly string[]; +}; + +const CLI_ONLY_OVERRIDES: Record = { + open: { mutates: true, examples: ['superdoc open my-doc.docx', 'superdoc open my-doc.docx --session my-session'] }, + save: { mutates: true, examples: ['superdoc save', 'superdoc save --out copy.docx'] }, + close: { mutates: true, examples: ['superdoc close'] }, + status: { mutates: false, examples: ['superdoc status'] }, + describe: { mutates: false, examples: ['superdoc describe'] }, + describeCommand: { mutates: false, examples: ['superdoc describe command find'] }, + 'session.list': { mutates: false, examples: ['superdoc session list'] }, + 'session.save': { + mutates: true, + examples: ['superdoc session save my-session', 'superdoc session save --session my-session --out copy.docx'], + }, + 'session.close': { + mutates: true, + examples: ['superdoc session close my-session', 'superdoc session close --session my-session --discard'], + }, + 'session.setDefault': { + mutates: true, + examples: ['superdoc session set-default my-session', 'superdoc session set-default --session my-session'], + }, +}; + +function buildCliOnlySpec(cliOnlyOp: string, cliOpId: CliOperationId): CliCommandSpec { + const tokens = cliCommandTokens(cliOpId); + const key = tokens.join(' '); + const override = CLI_ONLY_OVERRIDES[cliOnlyOp] ?? { mutates: false }; + + return { + key, + tokens, + operationId: cliOpId, + category: cliCategory(cliOpId), + description: cliDescription(cliOpId), + mutates: override.mutates, + requiresDocumentContext: cliRequiresDocumentContext(cliOpId), + alias: false, + canonicalKey: key, + examples: override.examples ?? [], + }; +} + +// --------------------------------------------------------------------------- +// Alias specs +// --------------------------------------------------------------------------- + +const ALIAS_SPECS: CliCommandSpec[] = [ + { + key: 'session use', + tokens: ['session', 'use'], + operationId: 'doc.session.setDefault', + category: 'session', + description: 'Alias for session set-default.', + mutates: true, + requiresDocumentContext: false, + alias: true, + canonicalKey: 'session set-default', + examples: ['superdoc session use my-session', 'superdoc session use --session my-session'], + }, +]; + +// --------------------------------------------------------------------------- +// Build and export +// --------------------------------------------------------------------------- + +function buildAllSpecs(): CliCommandSpec[] { + const specs: CliCommandSpec[] = []; + + for (const docApiId of CLI_DOC_OPERATIONS) { + const cliOpId = `doc.${docApiId}` as CliOperationId; + specs.push(buildDocBackedSpec(docApiId, cliOpId)); + } + + for (const cliOnlyOp of CLI_ONLY_OPERATIONS) { + const cliOpId = `doc.${cliOnlyOp}` as CliOperationId; + specs.push(buildCliOnlySpec(cliOnlyOp, cliOpId)); + } + + specs.push(...ALIAS_SPECS); + + return specs; +} + +export const CLI_COMMAND_SPECS: readonly CliCommandSpec[] = buildAllSpecs(); + +export type CliCommandKey = (typeof CLI_COMMAND_SPECS)[number]['key']; + +export const CLI_COMMAND_KEYS: readonly string[] = CLI_COMMAND_SPECS.map((spec) => spec.key); + +export const CLI_MAX_COMMAND_TOKENS: number = Math.max(...CLI_COMMAND_SPECS.map((spec) => spec.tokens.length)); + +// --------------------------------------------------------------------------- +// Help text +// --------------------------------------------------------------------------- + +function buildHelpText(): string { + const lines: string[] = ['Usage: superdoc [options]', '']; + + const categories = new Map(); + for (const spec of CLI_COMMAND_SPECS) { + if (spec.alias) continue; + const list = categories.get(spec.category) ?? []; + list.push(spec); + categories.set(spec.category, list); + } + + const categoryOrder = [ + 'query', + 'mutation', + 'format', + 'create', + 'lists', + 'comments', + 'trackChanges', + 'lifecycle', + 'session', + 'introspection', + ]; + + for (const category of categoryOrder) { + const specs = categories.get(category); + if (!specs || specs.length === 0) continue; + + lines.push(`${category}:`); + const maxKey = Math.max(...specs.map((spec) => spec.key.length)); + for (const spec of specs) { + lines.push(` ${spec.key.padEnd(maxKey)} ${spec.description}`); + } + lines.push(''); + } + + return lines.join('\n').trimEnd(); +} + +export const CLI_HELP: string = buildHelpText(); + +// --------------------------------------------------------------------------- +// Lookup maps +// --------------------------------------------------------------------------- + +/** Maps CliOperationId → CLI command key. */ +const CANONICAL_SPEC_BY_OPERATION = new Map( + CLI_COMMAND_SPECS.filter((spec) => !spec.alias).map((spec) => [spec.operationId as CliOperationId, spec] as const), +); + +export const CLI_OPERATION_COMMAND_KEYS: Record = Object.fromEntries( + CLI_OPERATION_IDS.map((operationId) => { + const spec = CANONICAL_SPEC_BY_OPERATION.get(operationId); + if (!spec) { + throw new Error(`Missing canonical command spec for operation: ${operationId}`); + } + return [operationId, spec.key] as const; + }), +) as Record; + +export { CLI_OPERATION_IDS, toDocApiId, type CliOperationId } from './operation-set'; diff --git a/apps/cli/src/cli/index.ts b/apps/cli/src/cli/index.ts new file mode 100644 index 0000000000..ad363683f8 --- /dev/null +++ b/apps/cli/src/cli/index.ts @@ -0,0 +1,60 @@ +/** + * CLI metadata barrel export. + * + * All metadata is derived from `@superdoc/document-api` at init time. + */ + +// Types +export type { + CliTypeSpec, + CliOperationParamSpec, + CliOperationConstraints, + CliOperationMetadata, + CliOperationOptionSpec, + CliCommandSpec, + CliOperationArgsById, +} from './types'; + +// Operation set +export { + CLI_DOC_OPERATIONS, + CLI_ONLY_OPERATIONS, + CLI_OPERATION_IDS, + type CliOperationId, + type CliExposedOperationId, + type DocBackedCliOpId, + toDocApiId, + isDocBackedOperation, + cliCategory, + cliDescription, + cliRequiresDocumentContext, + cliCommandTokens, + type CliCategory, +} from './operation-set'; + +// Operation hints (CLI-local metadata tables) +export { + orchestrationKind, + SUCCESS_VERB, + OUTPUT_FORMAT, + RESPONSE_ENVELOPE_KEY, + OPERATION_FAMILY, + type OutputFormat, + type OperationFamily, +} from './operation-hints'; + +// Commands +export { + CLI_COMMAND_SPECS, + CLI_COMMAND_KEYS, + CLI_MAX_COMMAND_TOKENS, + CLI_HELP, + CLI_OPERATION_COMMAND_KEYS, + type CliCommandKey, +} from './commands'; + +// Operation params +export { CLI_OPERATION_METADATA, CLI_OPERATION_OPTION_SPECS } from './operation-params'; + +// Response schemas +export { getResponseSchema } from './response-schemas'; diff --git a/apps/cli/src/cli/operation-hints.ts b/apps/cli/src/cli/operation-hints.ts new file mode 100644 index 0000000000..4082528a87 --- /dev/null +++ b/apps/cli/src/cli/operation-hints.ts @@ -0,0 +1,252 @@ +/** + * CLI-local metadata for each exposed doc-backed operation. + * + * Drives the generic dispatch path — orchestrator selection, success messaging, + * output formatting, response envelope key, and error-mapping family. + * + * All tables are keyed by CliExposedOperationId. A missing entry is a compile + * error — TypeScript enforces completeness. When a new operation is added to + * OPERATION_DEFINITIONS, the CLI requires only a one-line entry in each table. + */ + +import { COMMAND_CATALOG } from '@superdoc/document-api'; +import type { CliExposedOperationId } from './operation-set.js'; + +// --------------------------------------------------------------------------- +// Orchestration kind (derived from COMMAND_CATALOG) +// --------------------------------------------------------------------------- + +/** Which orchestrator to use: read or mutation. Derived from COMMAND_CATALOG. */ +export function orchestrationKind(opId: CliExposedOperationId): 'read' | 'mutation' { + return COMMAND_CATALOG[opId].mutates ? 'mutation' : 'read'; +} + +// --------------------------------------------------------------------------- +// Success verb (past-tense for pretty output) +// --------------------------------------------------------------------------- + +/** Past-tense verb for success messages. */ +export const SUCCESS_VERB: Record = { + find: 'completed search', + getNode: 'resolved node', + getNodeById: 'resolved node', + info: 'retrieved info', + insert: 'inserted text', + replace: 'replaced text', + delete: 'deleted text', + 'format.bold': 'applied bold', + 'format.italic': 'applied italic', + 'format.underline': 'applied underline', + 'format.strikethrough': 'applied strikethrough', + 'create.paragraph': 'created paragraph', + 'lists.list': 'listed items', + 'lists.get': 'resolved list item', + 'lists.insert': 'inserted list item', + 'lists.setType': 'set list type', + 'lists.indent': 'indented list item', + 'lists.outdent': 'outdented list item', + 'lists.restart': 'restarted list numbering', + 'lists.exit': 'exited list item', + 'comments.add': 'added comment', + 'comments.edit': 'edited comment', + 'comments.reply': 'replied to comment', + 'comments.move': 'moved comment', + 'comments.resolve': 'resolved comment', + 'comments.remove': 'removed comment', + 'comments.setInternal': 'set comment internal flag', + 'comments.setActive': 'set active comment', + 'comments.goTo': 'focused comment', + 'comments.get': 'resolved comment', + 'comments.list': 'listed comments', + 'trackChanges.list': 'listed tracked changes', + 'trackChanges.get': 'resolved tracked change', + 'trackChanges.accept': 'accepted tracked change', + 'trackChanges.reject': 'rejected tracked change', + 'trackChanges.acceptAll': 'accepted all tracked changes', + 'trackChanges.rejectAll': 'rejected all tracked changes', +}; + +// --------------------------------------------------------------------------- +// Output format (selects the pretty-printer) +// --------------------------------------------------------------------------- + +export type OutputFormat = + | 'queryResult' + | 'nodeInfo' + | 'mutationReceipt' + | 'createResult' + | 'listResult' + | 'listItemInfo' + | 'listsMutationResult' + | 'commentInfo' + | 'commentList' + | 'commentReceipt' + | 'trackChangeInfo' + | 'trackChangeList' + | 'trackChangeMutationReceipt' + | 'documentInfo' + | 'receipt' + | 'plain' + | 'void'; + +export const OUTPUT_FORMAT: Record = { + find: 'queryResult', + getNode: 'nodeInfo', + getNodeById: 'nodeInfo', + info: 'documentInfo', + insert: 'mutationReceipt', + replace: 'mutationReceipt', + delete: 'mutationReceipt', + 'format.bold': 'mutationReceipt', + 'format.italic': 'mutationReceipt', + 'format.underline': 'mutationReceipt', + 'format.strikethrough': 'mutationReceipt', + 'create.paragraph': 'createResult', + 'lists.list': 'listResult', + 'lists.get': 'listItemInfo', + 'lists.insert': 'listsMutationResult', + 'lists.setType': 'listsMutationResult', + 'lists.indent': 'listsMutationResult', + 'lists.outdent': 'listsMutationResult', + 'lists.restart': 'listsMutationResult', + 'lists.exit': 'listsMutationResult', + 'comments.add': 'commentReceipt', + 'comments.edit': 'commentReceipt', + 'comments.reply': 'commentReceipt', + 'comments.move': 'commentReceipt', + 'comments.resolve': 'commentReceipt', + 'comments.remove': 'commentReceipt', + 'comments.setInternal': 'commentReceipt', + 'comments.setActive': 'commentReceipt', + 'comments.goTo': 'commentReceipt', + 'comments.get': 'commentInfo', + 'comments.list': 'commentList', + 'trackChanges.list': 'trackChangeList', + 'trackChanges.get': 'trackChangeInfo', + 'trackChanges.accept': 'trackChangeMutationReceipt', + 'trackChanges.reject': 'trackChangeMutationReceipt', + 'trackChanges.acceptAll': 'trackChangeMutationReceipt', + 'trackChanges.rejectAll': 'trackChangeMutationReceipt', +}; + +// --------------------------------------------------------------------------- +// Response envelope key (single source of truth) +// --------------------------------------------------------------------------- + +/** + * Envelope key where the doc-api result payload lives in the CLI response. + * This is the SINGLE SOURCE OF TRUTH — used by both orchestrators + * and validateOperationResponseData(). + * + * `null` means the result is spread across multiple top-level keys (e.g. info). + */ +export const RESPONSE_ENVELOPE_KEY: Record = { + find: 'result', + getNode: 'node', + getNodeById: 'node', + info: null, + insert: null, + replace: null, + delete: null, + 'format.bold': null, + 'format.italic': null, + 'format.underline': null, + 'format.strikethrough': null, + 'create.paragraph': 'result', + 'lists.list': 'result', + 'lists.get': 'item', + 'lists.insert': 'result', + 'lists.setType': 'result', + 'lists.indent': 'result', + 'lists.outdent': 'result', + 'lists.restart': 'result', + 'lists.exit': 'result', + 'comments.add': 'receipt', + 'comments.edit': 'receipt', + 'comments.reply': 'receipt', + 'comments.move': 'receipt', + 'comments.resolve': 'receipt', + 'comments.remove': 'receipt', + 'comments.setInternal': 'receipt', + 'comments.setActive': 'receipt', + 'comments.goTo': 'receipt', + 'comments.get': 'comment', + 'comments.list': 'result', + 'trackChanges.list': 'result', + 'trackChanges.get': 'change', + 'trackChanges.accept': 'receipt', + 'trackChanges.reject': 'receipt', + 'trackChanges.acceptAll': 'receipt', + 'trackChanges.rejectAll': 'receipt', +}; + +// --------------------------------------------------------------------------- +// Response validation key (fallback for null envelope keys) +// --------------------------------------------------------------------------- + +/** + * When RESPONSE_ENVELOPE_KEY is `null` (result is spread across top-level keys), + * this map specifies which key to validate against the doc-api output schema. + * + * Operations without an entry here AND a null envelope key skip schema validation + * (e.g. `info`, which splits output across counts/outline/capabilities). + */ +export const RESPONSE_VALIDATION_KEY: Partial> = { + insert: 'receipt', + replace: 'receipt', + delete: 'receipt', + 'format.bold': 'receipt', + 'format.italic': 'receipt', + 'format.underline': 'receipt', + 'format.strikethrough': 'receipt', +}; + +// --------------------------------------------------------------------------- +// Operation family (determines error-mapping rules) +// --------------------------------------------------------------------------- + +/** + * Operation family — determines which error-mapping rules apply. + * Explicit Record for compile-time completeness (no string-prefix heuristics). + */ +export type OperationFamily = 'trackChanges' | 'comments' | 'lists' | 'textMutation' | 'create' | 'query' | 'general'; + +export const OPERATION_FAMILY: Record = { + find: 'query', + getNode: 'query', + getNodeById: 'query', + info: 'general', + insert: 'textMutation', + replace: 'textMutation', + delete: 'textMutation', + 'format.bold': 'textMutation', + 'format.italic': 'textMutation', + 'format.underline': 'textMutation', + 'format.strikethrough': 'textMutation', + 'create.paragraph': 'create', + 'lists.list': 'lists', + 'lists.get': 'lists', + 'lists.insert': 'lists', + 'lists.setType': 'lists', + 'lists.indent': 'lists', + 'lists.outdent': 'lists', + 'lists.restart': 'lists', + 'lists.exit': 'lists', + 'comments.add': 'comments', + 'comments.edit': 'comments', + 'comments.reply': 'comments', + 'comments.move': 'comments', + 'comments.resolve': 'comments', + 'comments.remove': 'comments', + 'comments.setInternal': 'comments', + 'comments.setActive': 'comments', + 'comments.goTo': 'comments', + 'comments.get': 'comments', + 'comments.list': 'comments', + 'trackChanges.list': 'trackChanges', + 'trackChanges.get': 'trackChanges', + 'trackChanges.accept': 'trackChanges', + 'trackChanges.reject': 'trackChanges', + 'trackChanges.acceptAll': 'trackChanges', + 'trackChanges.rejectAll': 'trackChanges', +}; diff --git a/apps/cli/src/cli/operation-params.ts b/apps/cli/src/cli/operation-params.ts new file mode 100644 index 0000000000..907d8812af --- /dev/null +++ b/apps/cli/src/cli/operation-params.ts @@ -0,0 +1,545 @@ +/** + * Per-operation CLI param metadata — derived from document-api input schemas. + * + * For doc-backed operations, param specs are derived at init time from + * `buildInternalContractSchemas()` input schemas. The CLI only hand-writes: + * - Envelope params (session, out, force, dry-run, change-mode, expected-revision) + * - Constraints (mutuallyExclusive, requiresOneOf) for a handful of ops + * - Positional overrides (describeCommand) + * - CLI-only operation metadata (10 ops) + */ + +import { + buildInternalContractSchemas, + COMMAND_CATALOG, + OPERATION_REQUIRES_DOCUMENT_CONTEXT_MAP, + type OperationId, +} from '@superdoc/document-api'; +import type { + CliOperationConstraints, + CliOperationMetadata, + CliOperationOptionSpec, + CliOperationParamSpec, + CliTypeSpec, +} from './types'; +import { + CLI_DOC_OPERATIONS, + CLI_ONLY_OPERATIONS, + CLI_OPERATION_IDS, + type CliOperationId, + type CliOnlyOperation, + type DocBackedCliOpId, +} from './operation-set'; +import { CLI_OPERATION_COMMAND_KEYS } from './commands'; + +// --------------------------------------------------------------------------- +// Envelope param templates (CLI transport — not in document-api) +// --------------------------------------------------------------------------- + +const DOC_PARAM: CliOperationParamSpec = { name: 'doc', kind: 'doc', type: 'string' }; +const SESSION_PARAM: CliOperationParamSpec = { name: 'sessionId', kind: 'flag', flag: 'session', type: 'string' }; +const OUT_PARAM: CliOperationParamSpec = { name: 'out', kind: 'flag', type: 'string' }; +const FORCE_PARAM: CliOperationParamSpec = { name: 'force', kind: 'flag', type: 'boolean' }; +const DRY_RUN_PARAM: CliOperationParamSpec = { name: 'dryRun', kind: 'flag', flag: 'dry-run', type: 'boolean' }; +const CHANGE_MODE_PARAM: CliOperationParamSpec = { + name: 'changeMode', + kind: 'flag', + flag: 'change-mode', + type: 'string', + schema: { oneOf: [{ const: 'direct' }, { const: 'tracked' }] } as CliTypeSpec, +}; +const EXPECTED_REVISION_PARAM: CliOperationParamSpec = { + name: 'expectedRevision', + kind: 'flag', + flag: 'expected-revision', + type: 'number', +}; + +// --------------------------------------------------------------------------- +// Schema → param derivation +// --------------------------------------------------------------------------- + +type JsonSchema = Record; + +function schemaToParamType(schema: JsonSchema): CliOperationParamSpec['type'] { + if (schema.type === 'string') return 'string'; + if (schema.type === 'number' || schema.type === 'integer') return 'number'; + if (schema.type === 'boolean') return 'boolean'; + if (schema.type === 'array' && (schema.items as JsonSchema | undefined)?.type === 'string') return 'string[]'; + // Enums and oneOf-const are string enums + if (schema.enum && Array.isArray(schema.enum)) return 'string'; + if (schema.oneOf && Array.isArray(schema.oneOf) && (schema.oneOf as JsonSchema[]).every((v) => 'const' in v)) + return 'string'; + return 'json'; +} + +function isSimpleType(schema: JsonSchema): boolean { + const t = schema.type; + if (t === 'string' || t === 'number' || t === 'integer' || t === 'boolean') return true; + // Enums without explicit type are string enums + if (schema.enum && Array.isArray(schema.enum)) return true; + // oneOf with all const values is a string enum + if (schema.oneOf && Array.isArray(schema.oneOf)) { + const allConst = (schema.oneOf as JsonSchema[]).every((v) => 'const' in v); + if (allConst) return true; + } + return false; +} + +function jsonSchemaToTypeSpec(schema: JsonSchema): CliTypeSpec { + if ('const' in schema) return { const: schema.const } as CliTypeSpec; + + if (schema.oneOf) { + return { + oneOf: (schema.oneOf as JsonSchema[]).map(jsonSchemaToTypeSpec), + } as CliTypeSpec; + } + + if (schema.enum && Array.isArray(schema.enum)) { + return { + oneOf: (schema.enum as unknown[]).map((v) => ({ const: v }) as CliTypeSpec), + } as CliTypeSpec; + } + + if (schema.type === 'string') return { type: 'string' } as CliTypeSpec; + if (schema.type === 'number' || schema.type === 'integer') return { type: 'number' } as CliTypeSpec; + if (schema.type === 'boolean') return { type: 'boolean' } as CliTypeSpec; + + if (schema.type === 'array') { + const items = (schema.items as JsonSchema) ?? {}; + return { type: 'array', items: jsonSchemaToTypeSpec(items) } as CliTypeSpec; + } + + if (schema.type === 'object') { + const properties: Record = {}; + for (const [key, propSchema] of Object.entries((schema.properties as Record) ?? {})) { + properties[key] = jsonSchemaToTypeSpec(propSchema); + } + const result: CliTypeSpec = { type: 'object', properties } as CliTypeSpec; + if (schema.required && Array.isArray(schema.required)) { + (result as { required: readonly string[] }).required = schema.required as string[]; + } + return result; + } + + return { type: 'json' } as CliTypeSpec; +} + +function deriveParamsFromInputSchema(inputSchema: JsonSchema): { + params: CliOperationParamSpec[]; + positionalParams: string[]; +} { + const params: CliOperationParamSpec[] = []; + const positionalParams: string[] = []; + const properties = (inputSchema.properties ?? {}) as Record; + const required = new Set((inputSchema.required as string[]) ?? []); + + for (const [name, propSchema] of Object.entries(properties)) { + const paramType = schemaToParamType(propSchema); + const isComplex = !isSimpleType(propSchema) && paramType === 'json'; + + const flagBase = camelToKebab(name); + const param: CliOperationParamSpec = { + name, + kind: isComplex ? 'jsonFlag' : 'flag', + flag: isComplex ? `${flagBase}-json` : flagBase, + type: paramType, + required: required.has(name), + }; + + if (isComplex || (!isSimpleType(propSchema) && paramType !== 'json')) { + param.schema = jsonSchemaToTypeSpec(propSchema); + } + + // Attach enum schema for simple string params with oneOf/enum + if (paramType === 'string' && (propSchema.oneOf || propSchema.enum)) { + param.schema = jsonSchemaToTypeSpec(propSchema); + } + + params.push(param); + } + + return { params, positionalParams }; +} + +function camelToKebab(str: string): string { + return str.replace(/[A-Z]/g, (ch) => `-${ch.toLowerCase()}`); +} + +// --------------------------------------------------------------------------- +// Envelope params per operation profile +// --------------------------------------------------------------------------- + +function envelopeParams(docApiId: OperationId): CliOperationParamSpec[] { + const catalog = COMMAND_CATALOG[docApiId]; + const envelope: CliOperationParamSpec[] = []; + const requiresDoc = OPERATION_REQUIRES_DOCUMENT_CONTEXT_MAP[docApiId]; + + if (requiresDoc) { + envelope.push(DOC_PARAM); + } + + envelope.push(SESSION_PARAM); + + if (catalog.mutates) { + envelope.push(OUT_PARAM, FORCE_PARAM, EXPECTED_REVISION_PARAM, CHANGE_MODE_PARAM); + + if (catalog.supportsDryRun) { + envelope.push(DRY_RUN_PARAM); + } + } + + return envelope; +} + +// --------------------------------------------------------------------------- +// Per-operation constraint overrides +// --------------------------------------------------------------------------- + +const OPERATION_CONSTRAINTS: Partial> = { + 'doc.find': { + requiresOneOf: [['type', 'query']], + mutuallyExclusive: [['type', 'query']], + }, + 'doc.comments.setActive': { + requiresOneOf: [['id', 'clear']], + mutuallyExclusive: [['id', 'clear']], + }, + 'doc.lists.list': { + mutuallyExclusive: [ + ['query', 'within'], + ['query', 'kind'], + ['query', 'level'], + ['query', 'ordinal'], + ['query', 'limit'], + ['query', 'offset'], + ], + }, +}; + +// --------------------------------------------------------------------------- +// Per-operation param flag overrides +// +// Rename schema-derived params to match CLI flag conventions. +// E.g., document-api uses `commentId` but CLI flag is `--id`. +// --------------------------------------------------------------------------- + +const PARAM_FLAG_OVERRIDES: Partial>> = { + 'doc.getNodeById': { + nodeId: { name: 'id', flag: 'id' }, + }, + 'doc.comments.add': { + commentId: { name: 'id', flag: 'id' }, + }, + 'doc.comments.edit': { + commentId: { name: 'id', flag: 'id' }, + }, + 'doc.comments.reply': { + parentCommentId: { name: 'parentId', flag: 'parent-id' }, + }, + 'doc.comments.move': { + commentId: { name: 'id', flag: 'id' }, + }, + 'doc.comments.resolve': { + commentId: { name: 'id', flag: 'id' }, + }, + 'doc.comments.remove': { + commentId: { name: 'id', flag: 'id' }, + }, + 'doc.comments.setInternal': { + commentId: { name: 'id', flag: 'id' }, + }, + 'doc.comments.goTo': { + commentId: { name: 'id', flag: 'id' }, + }, + 'doc.comments.get': { + commentId: { name: 'id', flag: 'id' }, + }, + 'doc.lists.get': { + address: { flag: 'address-json' }, + }, +}; + +// --------------------------------------------------------------------------- +// Schema-derived param exclusions +// +// Params derived from the document-api input schema that should NOT be +// exposed in CLI metadata because the CLI provides an alternative interface. +// --------------------------------------------------------------------------- + +const PARAM_EXCLUSIONS: Partial>> = { + // CLI uses flat flags (--type, --pattern, --mode) or --query-json; `select` + // is an internal document-api field that the invoker builds from flat flags. + 'doc.find': new Set(['select']), +}; + +// --------------------------------------------------------------------------- +// Extra CLI-specific params for doc-backed operations +// +// These are convenience params that CLI invokers accept but are NOT in the +// document-api input schema. They are merged into the metadata alongside +// schema-derived and envelope params. +// --------------------------------------------------------------------------- + +const EXTRA_CLI_PARAMS: Partial> = { + 'doc.find': [ + { name: 'type', kind: 'flag', type: 'string' }, + { name: 'nodeType', kind: 'flag', flag: 'node-type', type: 'string' }, + { name: 'kind', kind: 'flag', type: 'string' }, + { name: 'pattern', kind: 'flag', type: 'string' }, + { name: 'mode', kind: 'flag', type: 'string' }, + { name: 'caseSensitive', kind: 'flag', flag: 'case-sensitive', type: 'boolean' }, + { name: 'query', kind: 'jsonFlag', flag: 'query-json', type: 'json' }, + ], + 'doc.lists.list': [{ name: 'query', kind: 'jsonFlag', flag: 'query-json', type: 'json' }], + 'doc.getNode': [{ name: 'address', kind: 'jsonFlag', flag: 'address-json', type: 'json' }], + 'doc.comments.setActive': [ + { name: 'id', kind: 'flag', type: 'string' }, + { name: 'clear', kind: 'flag', type: 'boolean' }, + ], + 'doc.lists.insert': [{ name: 'input', kind: 'jsonFlag', flag: 'input-json', type: 'json' }], + 'doc.lists.setType': [{ name: 'input', kind: 'jsonFlag', flag: 'input-json', type: 'json' }], + 'doc.lists.indent': [{ name: 'input', kind: 'jsonFlag', flag: 'input-json', type: 'json' }], + 'doc.lists.outdent': [{ name: 'input', kind: 'jsonFlag', flag: 'input-json', type: 'json' }], + 'doc.lists.restart': [{ name: 'input', kind: 'jsonFlag', flag: 'input-json', type: 'json' }], + 'doc.lists.exit': [{ name: 'input', kind: 'jsonFlag', flag: 'input-json', type: 'json' }], + 'doc.create.paragraph': [{ name: 'input', kind: 'jsonFlag', flag: 'input-json', type: 'json' }], +}; + +// --------------------------------------------------------------------------- +// Doc requirement derivation +// --------------------------------------------------------------------------- + +function docRequirement(docApiId: OperationId): 'required' | 'optional' | 'none' { + const requiresDoc = OPERATION_REQUIRES_DOCUMENT_CONTEXT_MAP[docApiId]; + const catalog = COMMAND_CATALOG[docApiId]; + + if (!requiresDoc) return 'none'; + if (catalog.mutates) return 'optional'; + return 'optional'; +} + +// --------------------------------------------------------------------------- +// CLI-only operation metadata (hand-written) +// --------------------------------------------------------------------------- + +type CliOnlyOperationId = `doc.${CliOnlyOperation}`; + +const CLI_ONLY_METADATA: Record = { + 'doc.open': { + command: 'open', + positionalParams: ['doc'], + docRequirement: 'required', + params: [ + { name: 'doc', kind: 'doc', type: 'string', required: true }, + SESSION_PARAM, + { name: 'collaboration', kind: 'jsonFlag', flag: 'collaboration-json', type: 'json' }, + { name: 'collabDocumentId', kind: 'flag', flag: 'collab-document-id', type: 'string' }, + { name: 'collabUrl', kind: 'flag', flag: 'collab-url', type: 'string' }, + ], + constraints: null, + }, + 'doc.save': { + command: 'save', + positionalParams: [], + docRequirement: 'none', + params: [ + SESSION_PARAM, + OUT_PARAM, + FORCE_PARAM, + { name: 'inPlace', kind: 'flag', flag: 'in-place', type: 'boolean' }, + ], + constraints: null, + }, + 'doc.close': { + command: 'close', + positionalParams: [], + docRequirement: 'none', + params: [SESSION_PARAM, { name: 'discard', kind: 'flag', type: 'boolean' }], + constraints: null, + }, + 'doc.status': { + command: 'status', + positionalParams: [], + docRequirement: 'none', + params: [SESSION_PARAM], + constraints: null, + }, + 'doc.describe': { + command: 'describe', + positionalParams: [], + docRequirement: 'none', + params: [], + constraints: null, + }, + 'doc.describeCommand': { + command: 'describe command', + positionalParams: ['operationId'], + docRequirement: 'none', + params: [{ name: 'operationId', kind: 'doc', type: 'string', required: true }], + constraints: null, + }, + 'doc.session.list': { + command: 'session list', + positionalParams: [], + docRequirement: 'none', + params: [], + constraints: null, + }, + 'doc.session.save': { + command: 'session save', + positionalParams: ['sessionId'], + docRequirement: 'none', + params: [ + { name: 'sessionId', kind: 'doc', type: 'string', required: true }, + OUT_PARAM, + FORCE_PARAM, + { name: 'inPlace', kind: 'flag', flag: 'in-place', type: 'boolean' }, + ], + constraints: null, + }, + 'doc.session.close': { + command: 'session close', + positionalParams: ['sessionId'], + docRequirement: 'none', + params: [ + { name: 'sessionId', kind: 'doc', type: 'string', required: true }, + { name: 'discard', kind: 'flag', type: 'boolean' }, + ], + constraints: null, + }, + 'doc.session.setDefault': { + command: 'session set-default', + positionalParams: ['sessionId'], + docRequirement: 'none', + params: [{ name: 'sessionId', kind: 'doc', type: 'string', required: true }], + constraints: null, + }, +}; + +// --------------------------------------------------------------------------- +// Build doc-backed operation metadata +// --------------------------------------------------------------------------- + +function buildDocBackedMetadata(): Record { + const schemas = buildInternalContractSchemas(); + const result = {} as Record; + + for (const docApiId of CLI_DOC_OPERATIONS) { + const cliOpId = `doc.${docApiId}` as DocBackedCliOpId; + const schemaSet = schemas.operations[docApiId]; + const inputSchema = schemaSet.input as JsonSchema; + + const { params: schemaParams } = deriveParamsFromInputSchema(inputSchema); + const envelope = envelopeParams(docApiId); + + // Merge: envelope params first, then schema-derived params (skip duplicates) + const seenNames = new Set(); + const mergedParams: CliOperationParamSpec[] = []; + + for (const param of envelope) { + seenNames.add(param.name); + mergedParams.push(param); + } + + // Apply flag overrides and exclusions to schema params before merging + const overrides = PARAM_FLAG_OVERRIDES[cliOpId]; + const exclusions = PARAM_EXCLUSIONS[cliOpId]; + for (const param of schemaParams) { + if (exclusions?.has(param.name)) continue; + if (overrides && overrides[param.name]) { + const override = overrides[param.name]; + if (override.name) param.name = override.name; + if (override.flag) param.flag = override.flag; + } + if (seenNames.has(param.name)) continue; + seenNames.add(param.name); + mergedParams.push(param); + } + + // Merge extra CLI-specific params (skip duplicates). + // Operations with extra CLI params have custom invokers that handle their + // own validation, so strip `required` from schema-derived params. + const extraParams = EXTRA_CLI_PARAMS[cliOpId]; + if (extraParams) { + for (const p of mergedParams) { + if (p.required) p.required = false; + } + for (const param of extraParams) { + if (seenNames.has(param.name)) continue; + seenNames.add(param.name); + mergedParams.push(param); + } + } + + // Positional params: doc (if applicable) + const positionalParams: string[] = []; + if (OPERATION_REQUIRES_DOCUMENT_CONTEXT_MAP[docApiId]) { + positionalParams.push('doc'); + } + + const commandKey = CLI_OPERATION_COMMAND_KEYS[cliOpId] ?? docApiId; + + result[cliOpId] = { + command: commandKey, + positionalParams, + docRequirement: docRequirement(docApiId), + params: mergedParams, + constraints: OPERATION_CONSTRAINTS[cliOpId] ?? null, + }; + } + + return result; +} + +// --------------------------------------------------------------------------- +// Compose full metadata map +// --------------------------------------------------------------------------- + +function buildAllMetadata(): Record { + const docBacked = buildDocBackedMetadata(); + const merged = { + ...docBacked, + ...CLI_ONLY_METADATA, + } as Record; + + return Object.fromEntries( + CLI_OPERATION_IDS.map((operationId) => { + const metadata = merged[operationId]; + if (!metadata) { + throw new Error(`Missing CLI metadata for operation: ${operationId}`); + } + return [operationId, metadata] as const; + }), + ) as Record; +} + +export const CLI_OPERATION_METADATA: Record = buildAllMetadata(); + +// --------------------------------------------------------------------------- +// Option specs (derived mechanically from params) +// --------------------------------------------------------------------------- + +function deriveOptionSpecs(params: readonly CliOperationParamSpec[]): CliOperationOptionSpec[] { + const specs: CliOperationOptionSpec[] = []; + + for (const param of params) { + // Skip positional-only params (operationId, sessionId) but include the + // document path param so --doc is recognized by the parser. + if (param.kind === 'doc' && param.name !== 'doc') continue; + + const optionType: CliOperationOptionSpec['type'] = + param.type === 'json' || param.type === 'string[]' ? 'string' : param.type; + + specs.push({ + name: param.flag ?? param.name, + type: optionType, + }); + } + + return specs; +} + +export const CLI_OPERATION_OPTION_SPECS: Record = Object.fromEntries( + CLI_OPERATION_IDS.map((operationId) => [operationId, deriveOptionSpecs(CLI_OPERATION_METADATA[operationId].params)]), +) as Record; diff --git a/apps/cli/src/cli/operation-set.ts b/apps/cli/src/cli/operation-set.ts new file mode 100644 index 0000000000..56e3e3b454 --- /dev/null +++ b/apps/cli/src/cli/operation-set.ts @@ -0,0 +1,233 @@ +/** + * Canonical CLI operation set — the root definition. + * + * All CLI metadata derives from this file. The doc-backed operation set is + * derived from document-api's OPERATION_IDS via an explicit denylist. + * 10 CLI-only operations are added for lifecycle/session/introspection. + */ + +import { + COMMAND_CATALOG, + OPERATION_IDS, + OPERATION_MEMBER_PATH_MAP, + OPERATION_DESCRIPTION_MAP, + OPERATION_REQUIRES_DOCUMENT_CONTEXT_MAP, + isOperationId, + type OperationId, + REFERENCE_OPERATION_GROUPS, + type ReferenceGroupKey, +} from '@superdoc/document-api'; + +// --------------------------------------------------------------------------- +// Doc-backed operations (derived from document-api with denylist) +// --------------------------------------------------------------------------- + +/** Operations explicitly excluded from the CLI (with justification). */ +const CLI_OPERATION_DENYLIST = [ + 'getText', // Subsumed by find + info; revisit if needed + 'capabilities.get', // Internal engine concern, not user-facing + 'create.heading', // Currently unavailable in the CLI/runtime command surface +] as const satisfies readonly OperationId[]; + +type DeniedOperationId = (typeof CLI_OPERATION_DENYLIST)[number]; + +/** + * Narrowed type: only the document-api operations the CLI actually exposes. + * Uses Exclude to get a precise literal union — filter() would widen to OperationId. + */ +export type CliExposedOperationId = Exclude; + +/** Runtime list of CLI-exposed operations — typed to match the Exclude union. */ +const denySet: ReadonlySet = new Set(CLI_OPERATION_DENYLIST); +export const CLI_DOC_OPERATIONS: readonly CliExposedOperationId[] = OPERATION_IDS.filter( + (id): id is CliExposedOperationId => !denySet.has(id), +); + +// --------------------------------------------------------------------------- +// CLI-only operations (not in document-api) +// --------------------------------------------------------------------------- + +export const CLI_ONLY_OPERATIONS = [ + 'open', + 'save', + 'close', + 'status', + 'describe', + 'describeCommand', + 'session.list', + 'session.save', + 'session.close', + 'session.setDefault', +] as const; + +export type CliOnlyOperation = (typeof CLI_ONLY_OPERATIONS)[number]; + +// --------------------------------------------------------------------------- +// CliOperationId — union of all CLI operation IDs +// --------------------------------------------------------------------------- + +export type DocBackedCliOpId = `doc.${CliExposedOperationId}`; +type CliOnlyOpId = `doc.${CliOnlyOperation}`; + +export type CliOperationId = DocBackedCliOpId | CliOnlyOpId; + +/** All CLI operation IDs as an array. */ +export const CLI_OPERATION_IDS: readonly CliOperationId[] = [ + ...CLI_DOC_OPERATIONS.map((id) => `doc.${id}` as CliOperationId), + ...CLI_ONLY_OPERATIONS.map((id) => `doc.${id}` as CliOperationId), +]; + +// --------------------------------------------------------------------------- +// Mapping helpers +// --------------------------------------------------------------------------- + +/** Strips the `doc.` prefix and returns the document-api OperationId, or null for CLI-only ops. */ +export function toDocApiId(cliOpId: string): OperationId | null { + if (!cliOpId.startsWith('doc.')) return null; + const stripped = cliOpId.slice(4); + return isOperationId(stripped) ? stripped : null; +} + +/** Returns true if the CLI operation is backed by a document-api operation. */ +export function isDocBackedOperation(cliOpId: string): boolean { + return toDocApiId(cliOpId) !== null; +} + +// --------------------------------------------------------------------------- +// Category derivation +// --------------------------------------------------------------------------- + +export type CliCategory = + | 'query' + | 'mutation' + | 'format' + | 'create' + | 'lists' + | 'comments' + | 'trackChanges' + | 'capabilities' + | 'lifecycle' + | 'session' + | 'introspection'; + +const CLI_ONLY_CATEGORIES: Record = { + open: 'lifecycle', + save: 'lifecycle', + close: 'lifecycle', + status: 'introspection', + describe: 'introspection', + describeCommand: 'introspection', + 'session.list': 'session', + 'session.save': 'session', + 'session.close': 'session', + 'session.setDefault': 'session', +}; + +const REFERENCE_GROUP_BY_OP = new Map(); +for (const group of REFERENCE_OPERATION_GROUPS) { + for (const opId of group.operations) { + REFERENCE_GROUP_BY_OP.set(opId, group.key); + } +} + +function deriveCategoryFromDocApi(docApiId: OperationId): CliCategory { + const group = REFERENCE_GROUP_BY_OP.get(docApiId); + if (!group) return 'query'; + + if (group === 'core') { + return COMMAND_CATALOG[docApiId].mutates ? 'mutation' : 'query'; + } + + return group as CliCategory; +} + +export function cliCategory(cliOpId: CliOperationId): CliCategory { + const docApiId = toDocApiId(cliOpId); + if (docApiId) return deriveCategoryFromDocApi(docApiId); + + const stripped = cliOpId.slice(4) as CliOnlyOperation; + return CLI_ONLY_CATEGORIES[stripped] ?? 'introspection'; +} + +// --------------------------------------------------------------------------- +// Description + requiresDocumentContext accessors +// --------------------------------------------------------------------------- + +const CLI_ONLY_DESCRIPTIONS: Record = { + open: 'Open a document and create a persistent editing session.', + save: 'Save the current session to the original file or a new path.', + close: 'Close the active editing session and clean up resources.', + status: 'Show the current session status and document metadata.', + describe: 'List all available CLI operations and contract metadata.', + describeCommand: 'Show detailed metadata for a single CLI operation.', + 'session.list': 'List all active editing sessions.', + 'session.save': 'Persist the current session state.', + 'session.close': 'Close a specific editing session by ID.', + 'session.setDefault': 'Set the default session for subsequent commands.', +}; + +const CLI_ONLY_REQUIRES_DOCUMENT: Record = { + open: false, + save: false, + close: false, + status: false, + describe: false, + describeCommand: false, + 'session.list': false, + 'session.save': false, + 'session.close': false, + 'session.setDefault': false, +}; + +export function cliDescription(cliOpId: CliOperationId): string { + const docApiId = toDocApiId(cliOpId); + if (docApiId) return OPERATION_DESCRIPTION_MAP[docApiId]; + + const stripped = cliOpId.slice(4) as CliOnlyOperation; + return CLI_ONLY_DESCRIPTIONS[stripped] ?? ''; +} + +export function cliRequiresDocumentContext(cliOpId: CliOperationId): boolean { + const docApiId = toDocApiId(cliOpId); + if (docApiId) return OPERATION_REQUIRES_DOCUMENT_CONTEXT_MAP[docApiId]; + + const stripped = cliOpId.slice(4) as CliOnlyOperation; + return CLI_ONLY_REQUIRES_DOCUMENT[stripped] ?? false; +} + +// --------------------------------------------------------------------------- +// Command token derivation +// --------------------------------------------------------------------------- + +/** + * Derives CLI command tokens from a doc-api member path. + * E.g. "comments.add" → ["comments", "add"], "find" → ["find"] + * + * For CLI-only ops, converts camelCase to kebab-case: + * E.g. "session.setDefault" → ["session", "set-default"] + */ +function camelToKebab(str: string): string { + return str.replace(/[A-Z]/g, (ch) => `-${ch.toLowerCase()}`); +} + +/** + * Explicit command token overrides for CLI-only operations whose + * algorithmic derivation doesn't match the expected CLI surface. + */ +const CLI_ONLY_TOKEN_OVERRIDES: Partial> = { + describeCommand: ['describe', 'command'], +}; + +export function cliCommandTokens(cliOpId: CliOperationId): readonly string[] { + const docApiId = toDocApiId(cliOpId); + if (docApiId) { + const memberPath = OPERATION_MEMBER_PATH_MAP[docApiId]; + return memberPath.split('.').map(camelToKebab); + } + + const stripped = cliOpId.slice(4) as CliOnlyOperation; + const override = CLI_ONLY_TOKEN_OVERRIDES[stripped]; + if (override) return override; + + return stripped.split('.').map(camelToKebab); +} diff --git a/apps/cli/src/cli/response-schemas.ts b/apps/cli/src/cli/response-schemas.ts new file mode 100644 index 0000000000..778d30c479 --- /dev/null +++ b/apps/cli/src/cli/response-schemas.ts @@ -0,0 +1,89 @@ +/** + * CLI response schemas — delegates to document-api for doc-backed operations. + * + * `validateOperationResponseData()` validates `CommandExecution["data"]`, + * which for doc-backed ops IS the document-api output directly. + * For CLI-only ops, schemas are defined inline. + */ + +import { buildInternalContractSchemas, type OperationId } from '@superdoc/document-api'; +import type { CliTypeSpec } from './types'; +import { toDocApiId, type CliOperationId } from './operation-set'; + +type JsonSchema = Record; + +function jsonSchemaToTypeSpec(schema: JsonSchema): CliTypeSpec { + if ('const' in schema) return { const: schema.const } as CliTypeSpec; + + if (schema.oneOf) { + return { + oneOf: (schema.oneOf as JsonSchema[]).map(jsonSchemaToTypeSpec), + } as CliTypeSpec; + } + + if (schema.type === 'string') return { type: 'string' } as CliTypeSpec; + if (schema.type === 'number' || schema.type === 'integer') return { type: 'number' } as CliTypeSpec; + if (schema.type === 'boolean') return { type: 'boolean' } as CliTypeSpec; + + if (schema.type === 'array') { + const items = (schema.items as JsonSchema) ?? {}; + return { type: 'array', items: jsonSchemaToTypeSpec(items) } as CliTypeSpec; + } + + if (schema.type === 'object') { + const properties: Record = {}; + for (const [key, propSchema] of Object.entries((schema.properties as Record) ?? {})) { + properties[key] = jsonSchemaToTypeSpec(propSchema); + } + const result: CliTypeSpec = { type: 'object', properties } as CliTypeSpec; + if (schema.required && Array.isArray(schema.required)) { + (result as { required: readonly string[] }).required = schema.required as string[]; + } + return result; + } + + return { type: 'json' } as CliTypeSpec; +} + +/** Lazy-init cache for doc-backed response schemas. */ +let cachedDocSchemas: Map | null = null; + +function getDocResponseSchemas(): Map { + if (cachedDocSchemas) return cachedDocSchemas; + + const schemas = buildInternalContractSchemas(); + cachedDocSchemas = new Map(); + + for (const [opId, schemaSet] of Object.entries(schemas.operations)) { + const cliOpId = `doc.${opId}`; + cachedDocSchemas.set(cliOpId, jsonSchemaToTypeSpec(schemaSet.output as JsonSchema)); + } + + return cachedDocSchemas; +} + +/** CLI-only operation response schemas (permissive — CLI-only ops have varied shapes). */ +const CLI_ONLY_RESPONSE_SCHEMAS: Record = { + 'doc.open': { type: 'json' }, + 'doc.save': { type: 'json' }, + 'doc.close': { type: 'json' }, + 'doc.status': { type: 'json' }, + 'doc.describe': { type: 'json' }, + 'doc.describeCommand': { type: 'json' }, + 'doc.session.list': { type: 'json' }, + 'doc.session.save': { type: 'json' }, + 'doc.session.close': { type: 'json' }, + 'doc.session.setDefault': { type: 'json' }, +}; + +/** + * Returns the response validation schema for a CLI operation. + * Doc-backed ops get strict schemas from document-api; CLI-only ops get permissive JSON. + */ +export function getResponseSchema(cliOpId: string): CliTypeSpec | null { + const docSchemas = getDocResponseSchemas(); + const fromDoc = docSchemas.get(cliOpId); + if (fromDoc) return fromDoc; + + return CLI_ONLY_RESPONSE_SCHEMAS[cliOpId] ?? null; +} diff --git a/apps/cli/src/cli/types.ts b/apps/cli/src/cli/types.ts new file mode 100644 index 0000000000..0b880c4981 --- /dev/null +++ b/apps/cli/src/cli/types.ts @@ -0,0 +1,103 @@ +/** + * Shared type definitions for the CLI metadata layer. + * + * These types mirror the shapes that consuming code (operation-args.ts, + * operation-executor.ts, etc.) expects from the CLI metadata modules. + */ + +// --------------------------------------------------------------------------- +// JSON Schema type spec (used for response validation + param schemas) +// --------------------------------------------------------------------------- + +type TypeSpecBase = { + description?: string; +}; + +export type CliTypeSpec = + | ({ const: unknown } & TypeSpecBase) + | ({ oneOf: readonly CliTypeSpec[] } & TypeSpecBase) + | ({ type: 'json' } & TypeSpecBase) + | ({ type: 'string' } & TypeSpecBase) + | ({ type: 'number' } & TypeSpecBase) + | ({ type: 'boolean' } & TypeSpecBase) + | ({ type: 'array'; items: CliTypeSpec } & TypeSpecBase) + | ({ + type: 'object'; + properties: Record; + required?: readonly string[]; + } & TypeSpecBase); + +// --------------------------------------------------------------------------- +// Per-operation param spec +// --------------------------------------------------------------------------- + +export type CliOperationParamSpec = { + name: string; + kind: 'doc' | 'flag' | 'jsonFlag'; + flag?: string; + type: 'string' | 'number' | 'boolean' | 'string[]' | 'json'; + required?: boolean; + schema?: CliTypeSpec; +}; + +// --------------------------------------------------------------------------- +// Constraints +// --------------------------------------------------------------------------- + +export type CliOperationConstraints = { + requiresOneOf?: readonly (readonly string[])[]; + mutuallyExclusive?: readonly (readonly string[])[]; + requiredWhen?: readonly { + param: string; + whenParam: string; + equals?: unknown; + present?: boolean; + }[]; +}; + +// --------------------------------------------------------------------------- +// Per-operation metadata (combines params + response) +// --------------------------------------------------------------------------- + +export type CliOperationMetadata = { + command: string; + positionalParams: readonly string[]; + docRequirement: 'required' | 'optional' | 'none'; + params: readonly CliOperationParamSpec[]; + constraints: CliOperationConstraints | null; +}; + +// --------------------------------------------------------------------------- +// Option spec (for arg parsing) +// --------------------------------------------------------------------------- + +export type CliOperationOptionSpec = { + name: string; + type: 'string' | 'number' | 'boolean'; + aliases?: string[]; +}; + +// --------------------------------------------------------------------------- +// Command spec +// --------------------------------------------------------------------------- + +export type CliCommandSpec = { + key: string; + tokens: readonly string[]; + operationId: string; + category: string; + description: string; + mutates: boolean; + requiresDocumentContext: boolean; + alias: boolean; + canonicalKey: string; + examples: readonly string[]; +}; + +// --------------------------------------------------------------------------- +// Args-by-id type (for generic param extraction) +// --------------------------------------------------------------------------- + +export type CliOperationArgsById = { + [K: string]: Record; +}; diff --git a/apps/cli/src/commands/call.ts b/apps/cli/src/commands/call.ts new file mode 100644 index 0000000000..63faac1b4e --- /dev/null +++ b/apps/cli/src/commands/call.ts @@ -0,0 +1,104 @@ +import { ensureValidArgs, getBooleanOption, parseCommandArgs, resolveJsonInput, type OptionSpec } from '../lib/args'; +import { CliError } from '../lib/errors'; +import { normalizeJsonValue } from '../lib/input-readers'; +import { executeOperation } from '../lib/operation-executor'; +import { validateOperationResponseData } from '../lib/operation-args'; +import type { CommandContext, CommandExecution } from '../lib/types'; +import { CLI_COMMAND_SPECS, CLI_OPERATION_COMMAND_KEYS, CLI_OPERATION_METADATA, type CliOperationId } from '../cli'; + +const CALL_OPTION_SPECS: OptionSpec[] = [ + { name: 'input-json', type: 'string' }, + { name: 'input-file', type: 'string' }, + { name: 'help', type: 'boolean', aliases: ['h'] }, +]; + +const OPERATION_IDS = new Set(Object.keys(CLI_OPERATION_METADATA) as CliOperationId[]); + +const OPERATION_IDS_BY_COMMAND_KEY = new Map>(); +for (const spec of CLI_COMMAND_SPECS) { + const operationId = spec.operationId as CliOperationId; + const existing = OPERATION_IDS_BY_COMMAND_KEY.get(spec.key); + if (existing) { + existing.add(operationId); + continue; + } + OPERATION_IDS_BY_COMMAND_KEY.set(spec.key, new Set([operationId])); +} + +function resolveOperationId(query: string | undefined): CliOperationId { + const normalizedQuery = query?.trim(); + if (!normalizedQuery) { + throw new CliError('MISSING_REQUIRED', 'call: missing required .'); + } + + if (OPERATION_IDS.has(normalizedQuery as CliOperationId)) { + return normalizedQuery as CliOperationId; + } + + const byCommand = OPERATION_IDS_BY_COMMAND_KEY.get(normalizedQuery); + if (byCommand && byCommand.size === 1) { + return [...byCommand][0]; + } + + if (byCommand && byCommand.size > 1) { + const candidates = [...byCommand].sort(); + throw new CliError('INVALID_ARGUMENT', `call: command key "${normalizedQuery}" is ambiguous.`, { + operationIds: candidates, + }); + } + + throw new CliError('TARGET_NOT_FOUND', `call: unknown operation "${normalizedQuery}".`); +} + +function parseHelpExecution(): CommandExecution { + return { + command: 'call', + data: { + usage: [ + 'superdoc call [--input-json "{...}"|--input-file payload.json]', + 'superdoc call doc.find --input-json \'{"doc":"./file.docx","query":{"select":{"type":"text","pattern":"test"}}}\'', + ], + }, + pretty: [ + 'Usage:', + ' superdoc call [--input-json "{...}"|--input-file payload.json]', + ' superdoc call doc.find --input-json \'{"doc":"./file.docx","query":{"select":{"type":"text","pattern":"test"}}}\'', + ].join('\n'), + }; +} + +export async function runCall(tokens: string[], context: CommandContext): Promise { + const parsed = parseCommandArgs(tokens, CALL_OPTION_SPECS); + ensureValidArgs(parsed); + + const help = getBooleanOption(parsed, 'help'); + if (help) return parseHelpExecution(); + + const operationQuery = parsed.positionals.join(' '); + const operationId = resolveOperationId(operationQuery); + const input = (await resolveJsonInput(parsed, 'input')) ?? {}; + const operationExecution = await executeOperation({ + mode: 'call', + operationId, + input, + context, + }); + const normalizedResult = normalizeJsonValue(operationExecution.data, 'call'); + validateOperationResponseData(operationId, normalizedResult, CLI_OPERATION_COMMAND_KEYS[operationId]); + + return { + command: 'call', + data: { + operationId, + result: normalizedResult, + }, + pretty: JSON.stringify( + { + operationId, + result: normalizedResult, + }, + null, + 2, + ), + }; +} diff --git a/apps/cli/src/commands/close.ts b/apps/cli/src/commands/close.ts new file mode 100644 index 0000000000..2e9b231bc9 --- /dev/null +++ b/apps/cli/src/commands/close.ts @@ -0,0 +1,78 @@ +import { getBooleanOption } from '../lib/args'; +import { CliError } from '../lib/errors'; +import { parseOperationArgs } from '../lib/operation-args'; +import { clearActiveSessionId, clearContext, getActiveSessionId, withActiveContext } from '../lib/context'; +import type { CommandContext, CommandExecution } from '../lib/types'; +function validateCloseMode(discard: boolean): { + discard: boolean; +} { + return { + discard, + }; +} + +export async function runClose(tokens: string[], context: CommandContext): Promise { + const { parsed, help } = parseOperationArgs('doc.close', tokens, { commandName: 'close' }); + + if (help) { + return { + command: 'close', + data: { + usage: ['superdoc close [--discard]'], + }, + pretty: ['Usage:', ' superdoc close [--discard]'].join('\n'), + }; + } + + const mode = validateCloseMode(getBooleanOption(parsed, 'discard')); + + return withActiveContext( + context.io, + 'close', + async ({ metadata, paths }) => { + const effectiveMetadata = metadata; + const activeSessionId = await getActiveSessionId(); + const wasDefaultSession = activeSessionId === effectiveMetadata.contextId; + + if (effectiveMetadata.dirty && !mode.discard) { + throw new CliError( + 'DIRTY_CLOSE_REQUIRES_DECISION', + 'Active document has unsaved changes. Run "superdoc save" first or close with --discard.', + { + revision: effectiveMetadata.revision, + }, + ); + } + + const result = { + command: 'close', + data: { + contextId: effectiveMetadata.contextId, + closed: true, + saved: false, + discarded: mode.discard, + defaultSessionCleared: wasDefaultSession, + wasDirty: effectiveMetadata.dirty, + document: { + path: effectiveMetadata.sourcePath, + source: effectiveMetadata.source, + revision: effectiveMetadata.revision, + }, + }, + pretty: mode.discard ? 'Closed context (discarded unsaved changes)' : 'Closed context', + }; + + if (context.executionMode === 'host' && context.collabSessionPool) { + await context.collabSessionPool.disposeSession(effectiveMetadata.contextId); + } + + await clearContext(paths); + if (wasDefaultSession) { + await clearActiveSessionId(); + } + + return result; + }, + context.sessionId, + ); +} diff --git a/apps/cli/src/commands/install.ts b/apps/cli/src/commands/install.ts new file mode 100644 index 0000000000..1a5aaab6f7 --- /dev/null +++ b/apps/cli/src/commands/install.ts @@ -0,0 +1,53 @@ +import { existsSync, cpSync, mkdirSync } from 'node:fs'; +import { join, dirname } from 'node:path'; +import { fileURLToPath } from 'node:url'; +import type { CliIO } from '../lib/types'; + +const __filename = fileURLToPath(import.meta.url); +const __dirname = dirname(__filename); + +const AGENT_TARGETS = [ + { name: 'Claude Code', dir: '.claude' }, + { name: 'Codex', dir: '.agents' }, +] as const; + +function resolveSkillSource(): string { + // In compiled dist: __dirname is dist/, skill/ is at dist/../skill/ + // In dev (bun run src/index.ts): __dirname is src/commands/, skill/ is at src/commands/../../skill/ + const fromDist = join(__dirname, '..', 'skill'); + if (existsSync(fromDist)) return fromDist; + + const fromSrc = join(__dirname, '..', '..', 'skill'); + if (existsSync(fromSrc)) return fromSrc; + + throw new Error('Could not locate bundled skill directory. Is the package installed correctly?'); +} + +export async function runInstall(tokens: string[], io: CliIO): Promise { + if (!tokens.includes('--skills')) { + io.stderr('Usage: superdoc install --skills\n'); + return 1; + } + + const cwd = process.cwd(); + const skillSource = resolveSkillSource(); + let installed = 0; + + for (const target of AGENT_TARGETS) { + const agentDir = join(cwd, target.dir); + if (!existsSync(agentDir)) continue; + + const dest = join(agentDir, 'skills', 'superdoc'); + mkdirSync(dest, { recursive: true }); + cpSync(skillSource, dest, { recursive: true }); + io.stdout(`Installed skill to ${target.dir}/skills/superdoc/\n`); + installed += 1; + } + + if (installed === 0) { + io.stderr('No agent directories found. Create .claude/ (Claude Code) or .agents/ (Codex) first, then re-run.\n'); + return 1; + } + + return 0; +} diff --git a/apps/cli/src/commands/legacy-compat.ts b/apps/cli/src/commands/legacy-compat.ts new file mode 100644 index 0000000000..329364abb6 --- /dev/null +++ b/apps/cli/src/commands/legacy-compat.ts @@ -0,0 +1,356 @@ +import { glob } from 'fast-glob'; +import { openDocument, exportToPath } from '../lib/document'; +import type { CliIO } from '../lib/types'; + +type LegacyCommand = 'search' | 'read' | 'replace-legacy'; + +interface LegacySearchMatch { + from: number; + to: number; + text: string; + context?: string; +} + +interface LegacySearchFileResult { + path: string; + matches: LegacySearchMatch[]; +} + +interface LegacySearchResult { + pattern: string; + files: LegacySearchFileResult[]; + totalMatches: number; +} + +interface LegacyReadResult { + path: string; + content: string; +} + +interface LegacyReplaceFileResult { + path: string; + replacements: number; +} + +interface LegacyReplaceResult { + find: string; + replace: string; + files: LegacyReplaceFileResult[]; + totalReplacements: number; +} + +type LegacyCompatHandled = { + handled: true; + exitCode: number; +}; + +type LegacyCompatNotHandled = { + handled: false; +}; + +/** Discriminated result of a legacy compatibility command attempt. */ +export type LegacyCompatResult = LegacyCompatHandled | LegacyCompatNotHandled; + +type RawSearchMatch = { + from: number; + to: number; + text: string; +}; + +function getMatchContext(fullText: string, from: number, to: number, contextChars = 40): string { + const start = Math.max(0, from - contextChars); + const end = Math.min(fullText.length, to + contextChars); + + let context = fullText.slice(start, end); + if (start > 0) context = `...${context}`; + if (end < fullText.length) context = `${context}...`; + + return context.replace(/\n/g, ' '); +} + +async function expandGlobs(patterns: string[]): Promise { + const files: string[] = []; + + for (const pattern of patterns) { + if (pattern.includes('*')) { + const matches = await glob(pattern, { absolute: true }); + for (const file of matches) { + if (file.endsWith('.docx')) { + files.push(file); + } + } + } else { + files.push(pattern); + } + } + + return files; +} + +async function searchSingleFile(filePath: string, pattern: string, io: CliIO): Promise { + const opened = await openDocument(filePath, io); + try { + const matches = + (opened.editor.commands.search?.(pattern, { + highlight: false, + }) as RawSearchMatch[] | undefined) ?? []; + const fullText = opened.editor.state.doc.textContent; + + return { + path: filePath, + matches: matches.map((match) => ({ + ...match, + context: getMatchContext(fullText, match.from, match.to), + })), + }; + } finally { + opened.dispose(); + } +} + +async function runLegacySearch(pattern: string, files: string[], io: CliIO): Promise { + const results = await Promise.all(files.map((filePath) => searchSingleFile(filePath, pattern, io))); + const filesWithMatches = results.filter((entry) => entry.matches.length > 0); + const totalMatches = filesWithMatches.reduce((sum, entry) => sum + entry.matches.length, 0); + + return { + pattern, + files: filesWithMatches, + totalMatches, + }; +} + +async function runLegacyRead(filePath: string, io: CliIO): Promise { + const opened = await openDocument(filePath, io); + try { + return { + path: filePath, + content: opened.editor.state.doc.textContent, + }; + } finally { + opened.dispose(); + } +} + +type DocRange = { from: number; to: number }; +type RawSearchMatchWithRanges = RawSearchMatch & { ranges?: DocRange[] }; + +/** + * Replace all occurrences of a pattern in a document with replacement text. + * + * Handles cross-paragraph matches by replacing each range individually + * (back-to-front) to preserve document structure and positions. + */ +function applyReplacements(editor: import('../lib/document').EditorWithDoc, find: string, replaceWith: string): number { + const matches = + (editor.commands.search?.(find, { highlight: false }) as RawSearchMatchWithRanges[] | undefined) ?? []; + if (matches.length === 0) return 0; + + // Collect all ranges, marking the first range of each match for replacement text + const allRanges: Array<{ from: number; to: number; isFirst: boolean }> = []; + + for (const match of matches) { + if (match.ranges && match.ranges.length > 0) { + match.ranges.forEach((range, index) => { + allRanges.push({ from: range.from, to: range.to, isFirst: index === 0 }); + }); + } else { + allRanges.push({ from: match.from, to: match.to, isFirst: true }); + } + } + + // Sort descending so replacements don't shift earlier positions + allRanges.sort((a, b) => b.from - a.from); + + for (const range of allRanges) { + const content = range.isFirst ? replaceWith : ''; + // eslint-disable-next-line @typescript-eslint/no-explicit-any + (editor.chain() as any).setTextSelection({ from: range.from, to: range.to }).insertContent(content).run(); + } + + return matches.length; +} + +async function replaceInFile( + filePath: string, + find: string, + replaceWith: string, + io: CliIO, +): Promise { + const opened = await openDocument(filePath, io); + try { + const replacements = applyReplacements(opened.editor, find, replaceWith); + if (replacements > 0) { + await exportToPath(opened.editor, filePath, true); + } + return { path: filePath, replacements }; + } finally { + opened.dispose(); + } +} + +async function runLegacyReplace( + find: string, + replaceWith: string, + files: string[], + io: CliIO, +): Promise { + const results = await Promise.all(files.map((fp) => replaceInFile(fp, find, replaceWith, io))); + const filesWithReplacements = results.filter((r) => r.replacements > 0); + const totalReplacements = results.reduce((sum, r) => sum + r.replacements, 0); + + return { + find, + replace: replaceWith, + files: filesWithReplacements, + totalReplacements, + }; +} + +function formatLegacyReplaceResult(result: LegacyReplaceResult): string { + const lines: string[] = []; + lines.push(`Replaced ${result.totalReplacements} occurrences across ${result.files.length} files`); + lines.push(''); + + for (const file of result.files) { + lines.push(` ${file.path}: ${file.replacements} replacements`); + } + + return lines.join('\n'); +} + +function formatLegacySearchResult(result: LegacySearchResult): string { + const lines: string[] = []; + + lines.push(`Found ${result.totalMatches} matches in ${result.files.length} files`); + lines.push(''); + + for (const file of result.files) { + lines.push(` ${file.path}: ${file.matches.length} matches`); + for (const match of file.matches.slice(0, 3)) { + lines.push(` "${match.context}"`); + } + if (file.matches.length > 3) { + lines.push(` ... and ${file.matches.length - 3} more`); + } + } + + return lines.join('\n'); +} + +function resolveLegacyJsonOutput(argv: string[]): boolean { + for (let index = 0; index < argv.length; index += 1) { + const token = argv[index]; + if (token === '--json') return true; + if (token === '--pretty') return false; + if (token.startsWith('--output=')) { + return token.slice('--output='.length) === 'json'; + } + if (token === '--output') { + return argv[index + 1] === 'json'; + } + } + + // Legacy default: pretty output unless JSON is explicitly requested. + return false; +} + +function writeLegacySuccess( + io: CliIO, + payload: LegacySearchResult | LegacyReadResult | LegacyReplaceResult, + jsonOutput: boolean, +): void { + if (jsonOutput) { + io.stdout(`${JSON.stringify(payload, null, 2)}\n`); + return; + } + + if ('pattern' in payload) { + io.stdout(`${formatLegacySearchResult(payload)}\n`); + return; + } + + if ('totalReplacements' in payload) { + io.stdout(`${formatLegacyReplaceResult(payload)}\n`); + return; + } + + io.stdout(`${payload.content}\n`); +} + +function usageFor(command: LegacyCommand): string { + if (command === 'search') return 'Usage: superdoc search '; + if (command === 'replace-legacy') return 'Usage: superdoc replace-legacy '; + return 'Usage: superdoc read '; +} + +/** + * Attempts to handle a CLI invocation as a legacy v0.x command (`search`, `read`, or `replace-legacy`). + * + * @param argv - Raw process arguments (used to detect `--json` / `--output` flags). + * @param rest - Remaining tokens after global flag extraction. + * @param io - CLI I/O streams. + * @returns `{ handled: true, exitCode }` if the command was a legacy command, otherwise `{ handled: false }`. + */ +export async function tryRunLegacyCompatCommand( + argv: string[], + rest: string[], + io: CliIO, +): Promise { + const [command, ...args] = rest; + if (command !== 'search' && command !== 'read' && command !== 'replace-legacy') { + return { handled: false }; + } + + const jsonOutput = resolveLegacyJsonOutput(argv); + + try { + if (command === 'search') { + if (args.length < 2) { + io.stderr(`${usageFor('search')}\n`); + return { handled: true, exitCode: 1 }; + } + + const [pattern, ...filePatterns] = args; + const files = await expandGlobs(filePatterns); + if (files.length === 0) { + io.stderr('No .docx files found matching the pattern.\n'); + return { handled: true, exitCode: 1 }; + } + + const payload = await runLegacySearch(pattern, files, io); + writeLegacySuccess(io, payload, jsonOutput); + return { handled: true, exitCode: 0 }; + } + + if (command === 'replace-legacy') { + if (args.length < 3) { + io.stderr(`${usageFor('replace-legacy')}\n`); + return { handled: true, exitCode: 1 }; + } + + const [find, to, ...filePatterns] = args; + const files = await expandGlobs(filePatterns); + if (files.length === 0) { + io.stderr('No .docx files found matching the pattern.\n'); + return { handled: true, exitCode: 1 }; + } + + const payload = await runLegacyReplace(find, to, files, io); + writeLegacySuccess(io, payload, jsonOutput); + return { handled: true, exitCode: 0 }; + } + + if (args.length < 1) { + io.stderr(`${usageFor('read')}\n`); + return { handled: true, exitCode: 1 }; + } + + const payload = await runLegacyRead(args[0], io); + writeLegacySuccess(io, payload, jsonOutput); + return { handled: true, exitCode: 0 }; + } catch (error) { + io.stderr(`Error: ${error instanceof Error ? error.message : String(error)}\n`); + return { handled: true, exitCode: 1 }; + } +} diff --git a/apps/cli/src/commands/open.ts b/apps/cli/src/commands/open.ts new file mode 100644 index 0000000000..a6bc83d5d8 --- /dev/null +++ b/apps/cli/src/commands/open.ts @@ -0,0 +1,157 @@ +import { getBooleanOption, getStringOption, requireDocArg, resolveJsonInput } from '../lib/args'; +import { parseCollaborationInput, resolveCollaborationProfile } from '../lib/collaboration'; +import { + getProjectRoot, + createInitialContextMetadata, + readContextMetadata, + resolveSourcePathForMetadata, + setActiveSessionId, + snapshotSourceFile, + withContextLock, + writeContextMetadata, +} from '../lib/context'; +import { exportToPath, openCollaborativeDocument, openDocument } from '../lib/document'; +import { CliError } from '../lib/errors'; +import { parseOperationArgs } from '../lib/operation-args'; +import { generateSessionId } from '../lib/session'; +import type { CommandContext, CommandExecution } from '../lib/types'; + +export async function runOpen(tokens: string[], context: CommandContext): Promise { + const { parsed, help } = parseOperationArgs('doc.open', tokens, { + commandName: 'open', + extraOptionSpecs: [{ name: 'collaboration-file', type: 'string' }], + }); + + if (help || getBooleanOption(parsed, 'help')) { + return { + command: 'open', + data: { + usage: [ + 'superdoc open [--session ]', + 'superdoc open --collaboration-json "{...}" [--session ]', + ], + }, + pretty: [ + 'Usage:', + ' superdoc open [--session ]', + ' superdoc open --collaboration-json "{...}" [--session ]', + ].join('\n'), + }; + } + + const { doc } = requireDocArg(parsed, 'open'); + + const sessionId = context.sessionId ?? generateSessionId(doc); + const collaborationPayload = await resolveJsonInput(parsed, 'collaboration'); + const collabUrl = getStringOption(parsed, 'collab-url'); + const collabDocumentId = getStringOption(parsed, 'collab-document-id'); + + if (collaborationPayload != null && (collabUrl || collabDocumentId)) { + throw new CliError( + 'INVALID_ARGUMENT', + 'open: do not combine --collaboration-json with --collab-url / --collab-document-id.', + ); + } + + let collaborationInput; + if (collaborationPayload != null) { + collaborationInput = parseCollaborationInput(collaborationPayload); + } else if (collabUrl) { + collaborationInput = parseCollaborationInput({ + providerType: 'hocuspocus', + url: collabUrl, + documentId: collabDocumentId, + }); + } else if (collabDocumentId) { + throw new CliError('MISSING_REQUIRED', 'open: --collab-document-id requires --collab-url.'); + } + + const collaboration = collaborationInput ? resolveCollaborationProfile(collaborationInput, sessionId) : undefined; + const sessionType = collaboration ? 'collab' : 'local'; + + return withContextLock( + context.io, + 'open', + async (paths) => { + const existing = await readContextMetadata(paths); + + if (existing && existing.projectRoot !== getProjectRoot()) { + throw new CliError( + 'PROJECT_CONTEXT_MISMATCH', + 'The requested session id belongs to a different project root.', + { + sessionId, + expectedProjectRoot: existing.projectRoot, + actualProjectRoot: getProjectRoot(), + }, + ); + } + + if (existing && existing.dirty) { + throw new CliError( + 'DIRTY_SESSION_EXISTS', + `Session "${sessionId}" has unsaved changes. Run "superdoc save" or "superdoc close --discard" first.`, + { + sessionId, + revision: existing.revision, + }, + ); + } + + const opened = collaboration + ? await openCollaborativeDocument(doc, context.io, collaboration) + : await openDocument(doc, context.io); + let adoptedToHostPool = false; + try { + const output = await exportToPath(opened.editor, paths.workingDocPath, true); + const sourcePath = + opened.meta.source === 'path' && opened.meta.path + ? resolveSourcePathForMetadata(opened.meta.path) + : undefined; + const sourceSnapshot = sourcePath ? await snapshotSourceFile(sourcePath) : undefined; + + const metadata = createInitialContextMetadata(context.io, paths, sessionId, { + source: opened.meta.source, + sourcePath, + sourceSnapshot, + sessionType, + collaboration, + }); + + await writeContextMetadata(paths, metadata); + await setActiveSessionId(metadata.contextId); + + if (collaboration && context.executionMode === 'host' && context.collabSessionPool) { + await context.collabSessionPool.adoptFromOpen(sessionId, opened, metadata, context.io); + adoptedToHostPool = true; + } + + return { + command: 'open', + data: { + active: true, + contextId: metadata.contextId, + document: { + path: metadata.sourcePath, + source: metadata.source, + byteLength: output.byteLength, + revision: metadata.revision, + }, + dirty: metadata.dirty, + sessionType: metadata.sessionType, + collaboration: metadata.collaboration, + openedAt: metadata.openedAt, + updatedAt: metadata.updatedAt, + }, + pretty: `Opened ${metadata.sourcePath ?? ''} in context ${metadata.contextId} (${metadata.sessionType})`, + }; + } finally { + if (!adoptedToHostPool) { + opened.dispose(); + } + } + }, + undefined, + sessionId, + ); +} diff --git a/apps/cli/src/commands/read.ts b/apps/cli/src/commands/read.ts deleted file mode 100644 index c8d21e00f6..0000000000 --- a/apps/cli/src/commands/read.ts +++ /dev/null @@ -1,20 +0,0 @@ -import { closeDocument, getDocumentText, openDocument } from '../lib/editor'; - -export interface ReadResult { - path: string; - content: string; -} - -/** - * Read a document and output its text content - */ -export async function read(filePath: string): Promise { - const doc = await openDocument(filePath); - - try { - const content = getDocumentText(doc); - return { path: filePath, content }; - } finally { - closeDocument(doc); - } -} diff --git a/apps/cli/src/commands/replace.ts b/apps/cli/src/commands/replace.ts deleted file mode 100644 index da22979b52..0000000000 --- a/apps/cli/src/commands/replace.ts +++ /dev/null @@ -1,49 +0,0 @@ -import { closeDocument, openDocument, replaceInDocument, saveDocument } from '../lib/editor'; - -export interface ReplaceFileResult { - path: string; - replacements: number; -} - -export interface ReplaceResult { - find: string; - replace: string; - files: ReplaceFileResult[]; - totalReplacements: number; -} - -/** - * Replace pattern in a single file - */ -async function replaceInFile(filePath: string, find: string, replace: string): Promise { - const doc = await openDocument(filePath); - - try { - const replacements = replaceInDocument(doc, find, replace); - - if (replacements > 0) { - await saveDocument(doc); - } - - return { path: filePath, replacements }; - } finally { - closeDocument(doc); - } -} - -/** - * Replace a pattern across multiple files - */ -export async function replace(find: string, replaceWith: string, filePaths: string[]): Promise { - const results = await Promise.all(filePaths.map((fp) => replaceInFile(fp, find, replaceWith))); - - const filesWithReplacements = results.filter((r) => r.replacements > 0); - const totalReplacements = results.reduce((sum, r) => sum + r.replacements, 0); - - return { - find, - replace: replaceWith, - files: filesWithReplacements, - totalReplacements, - }; -} diff --git a/apps/cli/src/commands/save.ts b/apps/cli/src/commands/save.ts new file mode 100644 index 0000000000..0635fdfbdd --- /dev/null +++ b/apps/cli/src/commands/save.ts @@ -0,0 +1,139 @@ +import { getBooleanOption, getStringOption } from '../lib/args'; +import { CliError } from '../lib/errors'; +import { parseOperationArgs } from '../lib/operation-args'; +import { + copyWorkingDocumentToPath, + detectSourceDrift, + markContextUpdated, + resolveSourcePathForMetadata, + snapshotSourceFile, + withActiveContext, + writeContextMetadata, +} from '../lib/context'; +import { openSessionDocument } from '../lib/document'; +import { syncCollaborativeSessionSnapshot } from '../lib/session-collab'; +import type { CommandContext, CommandExecution } from '../lib/types'; +function validateSaveMode( + inPlace: boolean, + outPath: string | undefined, + force: boolean, +): { + inPlace: boolean; + outPath?: string; + force: boolean; +} { + if (inPlace && outPath) { + throw new CliError('INVALID_ARGUMENT', 'save: use either --in-place or --out, not both.'); + } + + return { + inPlace, + outPath, + force, + }; +} + +export async function runSave(tokens: string[], context: CommandContext): Promise { + const { parsed, help } = parseOperationArgs('doc.save', tokens, { commandName: 'save' }); + + if (help) { + return { + command: 'save', + data: { + usage: ['superdoc save [--in-place] [--out ] [--force]'], + }, + pretty: ['Usage:', ' superdoc save [--in-place] [--out ] [--force]'].join('\n'), + }; + } + + const mode = validateSaveMode( + getBooleanOption(parsed, 'in-place'), + getStringOption(parsed, 'out'), + getBooleanOption(parsed, 'force'), + ); + + return withActiveContext( + context.io, + 'save', + async ({ metadata, paths }) => { + let effectiveMetadata = metadata; + if (metadata.sessionType === 'collab') { + const opened = await openSessionDocument(paths.workingDocPath, context.io, metadata, { + sessionId: context.sessionId ?? metadata.contextId, + executionMode: context.executionMode, + collabSessionPool: context.collabSessionPool, + }); + try { + const synced = await syncCollaborativeSessionSnapshot(context.io, metadata, paths, opened.editor); + effectiveMetadata = synced.updatedMetadata; + } finally { + opened.dispose(); + } + } + + const resolvedOutPath = mode.outPath ? resolveSourcePathForMetadata(mode.outPath) : undefined; + const sourcePath = effectiveMetadata.sourcePath; + const targetPath = resolvedOutPath ?? sourcePath; + if (!targetPath) { + throw new CliError('MISSING_REQUIRED', 'save: this session has no source path; use --out .'); + } + + const isInPlace = mode.inPlace || (sourcePath != null && targetPath === sourcePath); + if (isInPlace && !sourcePath) { + throw new CliError('MISSING_REQUIRED', 'save: --in-place requires a source path; use --out .'); + } + + let output: { path: string; byteLength: number }; + if (isInPlace) { + const drift = await detectSourceDrift(effectiveMetadata); + if (drift.drifted && !mode.force) { + throw new CliError('SOURCE_DRIFT_DETECTED', 'Source document changed since open. Refusing to overwrite.', { + sourcePath: effectiveMetadata.sourcePath, + expected: drift.expected, + actual: drift.actual, + reason: drift.reason, + hint: 'Use --force to overwrite anyway or save with --out .', + }); + } + + output = await copyWorkingDocumentToPath(paths, sourcePath!, true); + } else { + output = await copyWorkingDocumentToPath(paths, targetPath, mode.force); + } + + const nextSourcePath = isInPlace ? sourcePath! : targetPath; + const nextSnapshot = await snapshotSourceFile(nextSourcePath); + const nowIso = new Date(context.io.now()).toISOString(); + const updatedMetadata = markContextUpdated(context.io, effectiveMetadata, { + source: 'path', + sourcePath: nextSourcePath, + sourceSnapshot: nextSnapshot, + dirty: false, + lastSavedAt: nowIso, + }); + await writeContextMetadata(paths, updatedMetadata); + + return { + command: 'save', + data: { + contextId: updatedMetadata.contextId, + saved: true, + inPlace: isInPlace, + document: { + path: updatedMetadata.sourcePath, + source: updatedMetadata.source, + revision: updatedMetadata.revision, + }, + context: { + dirty: updatedMetadata.dirty, + revision: updatedMetadata.revision, + lastSavedAt: updatedMetadata.lastSavedAt, + }, + output, + }, + pretty: `Saved context to ${output.path}`, + }; + }, + context.sessionId, + ); +} diff --git a/apps/cli/src/commands/search.ts b/apps/cli/src/commands/search.ts deleted file mode 100644 index de70b7a2d6..0000000000 --- a/apps/cli/src/commands/search.ts +++ /dev/null @@ -1,73 +0,0 @@ -import { closeDocument, getDocumentText, openDocument, searchDocument } from '../lib/editor'; - -export interface SearchMatch { - from: number; - to: number; - text: string; - context?: string; -} - -export interface SearchFileResult { - path: string; - matches: SearchMatch[]; -} - -export interface SearchResult { - pattern: string; - files: SearchFileResult[]; - totalMatches: number; -} - -/** - * Extract context around a match position - */ -function getMatchContext(fullText: string, from: number, to: number, contextChars = 40): string { - const start = Math.max(0, from - contextChars); - const end = Math.min(fullText.length, to + contextChars); - - let context = fullText.slice(start, end); - - // Add ellipsis if truncated - if (start > 0) context = `...${context}`; - if (end < fullText.length) context = `${context}...`; - - return context.replace(/\n/g, ' '); -} - -/** - * Search for a pattern in a single file - */ -async function searchFile(filePath: string, pattern: string): Promise { - const doc = await openDocument(filePath); - - try { - const matches = searchDocument(doc, pattern); - const fullText = getDocumentText(doc); - - return { - path: filePath, - matches: matches.map((m) => ({ - ...m, - context: getMatchContext(fullText, m.from, m.to), - })), - }; - } finally { - closeDocument(doc); - } -} - -/** - * Search for a pattern across multiple files - */ -export async function search(pattern: string, filePaths: string[]): Promise { - const results = await Promise.all(filePaths.map((fp) => searchFile(fp, pattern))); - - const filesWithMatches = results.filter((r) => r.matches.length > 0); - const totalMatches = filesWithMatches.reduce((sum, r) => sum + r.matches.length, 0); - - return { - pattern, - files: filesWithMatches, - totalMatches, - }; -} diff --git a/apps/cli/src/commands/session-close.ts b/apps/cli/src/commands/session-close.ts new file mode 100644 index 0000000000..4db7519771 --- /dev/null +++ b/apps/cli/src/commands/session-close.ts @@ -0,0 +1,68 @@ +import { ensureSessionExistsForProject } from '../lib/context'; +import { CliError } from '../lib/errors'; +import { validateSessionId } from '../lib/session'; +import type { CommandContext, CommandExecution } from '../lib/types'; +import { runClose } from './close'; + +function parseSessionCloseTarget( + tokens: string[], + context: CommandContext, +): { sessionId: string; closeTokens: string[] } { + const [first, ...tail] = tokens; + + if (first === '--help' || first === '-h') { + return { sessionId: '', closeTokens: ['--help'] }; + } + + const positionalSessionId = first && !first.startsWith('--') ? first : undefined; + if (positionalSessionId && context.sessionId && positionalSessionId !== context.sessionId) { + throw new CliError('INVALID_ARGUMENT', 'session close: positional conflicts with --session.'); + } + + const sessionId = positionalSessionId ?? context.sessionId; + if (!sessionId) { + throw new CliError('MISSING_REQUIRED', 'session close: missing required (or --session).'); + } + + return { + sessionId: validateSessionId(sessionId, 'session close session id'), + closeTokens: positionalSessionId ? tail : tokens, + }; +} + +export async function runSessionClose(tokens: string[], context: CommandContext): Promise { + const { sessionId, closeTokens } = parseSessionCloseTarget(tokens, context); + if (closeTokens[0] === '--help') { + return { + command: 'session close', + data: { + usage: [ + 'superdoc session close [--discard]', + 'superdoc session close --session [--discard]', + ], + }, + pretty: [ + 'Usage:', + ' superdoc session close [--discard]', + ' superdoc session close --session [--discard]', + ].join('\n'), + }; + } + + await ensureSessionExistsForProject(sessionId); + const closeResult = await runClose(closeTokens, { + ...context, + sessionId, + }); + + const data = + closeResult.data && typeof closeResult.data === 'object' + ? { ...(closeResult.data as Record), sessionId } + : closeResult.data; + + return { + ...closeResult, + command: 'session close', + data, + }; +} diff --git a/apps/cli/src/commands/session-list.ts b/apps/cli/src/commands/session-list.ts new file mode 100644 index 0000000000..838bc7d267 --- /dev/null +++ b/apps/cli/src/commands/session-list.ts @@ -0,0 +1,49 @@ +import { getActiveSessionId, listProjectSessions } from '../lib/context'; +import { parseOperationArgs } from '../lib/operation-args'; +import type { CommandContext, CommandExecution } from '../lib/types'; + +function buildPretty( + activeSessionId: string | null, + sessions: Awaited>, +): string { + if (sessions.length === 0) { + return 'No sessions found'; + } + + const lines = [`Default session: ${activeSessionId ?? ''}`, 'Sessions:']; + for (const session of sessions) { + const marker = session.sessionId === activeSessionId ? '*' : ' '; + const sessionTypeLabel = session.sessionType === 'collab' ? 'collab' : 'local'; + const collabDocId = session.collaboration?.documentId ? `, doc ${session.collaboration.documentId}` : ''; + lines.push( + `${marker} ${session.sessionId} (${sessionTypeLabel}, ${session.dirty ? 'dirty' : 'clean'}, rev ${session.revision}${collabDocId})`, + ); + } + return lines.join('\n'); +} + +export async function runSessionList(tokens: string[], _context: CommandContext): Promise { + const { help } = parseOperationArgs('doc.session.list', tokens, { commandName: 'session list' }); + + if (help) { + return { + command: 'session list', + data: { + usage: 'superdoc session list', + }, + pretty: 'Usage: superdoc session list', + }; + } + + const [sessions, activeSessionId] = await Promise.all([listProjectSessions(), getActiveSessionId()]); + + return { + command: 'session list', + data: { + activeSessionId: activeSessionId ?? undefined, + sessions, + total: sessions.length, + }, + pretty: buildPretty(activeSessionId, sessions), + }; +} diff --git a/apps/cli/src/commands/session-save.ts b/apps/cli/src/commands/session-save.ts new file mode 100644 index 0000000000..efbf422d63 --- /dev/null +++ b/apps/cli/src/commands/session-save.ts @@ -0,0 +1,68 @@ +import { ensureSessionExistsForProject } from '../lib/context'; +import { CliError } from '../lib/errors'; +import { validateSessionId } from '../lib/session'; +import type { CommandContext, CommandExecution } from '../lib/types'; +import { runSave } from './save'; + +function parseSessionSaveTarget( + tokens: string[], + context: CommandContext, +): { sessionId: string; saveTokens: string[] } { + const [first, ...tail] = tokens; + + if (first === '--help' || first === '-h') { + return { sessionId: '', saveTokens: ['--help'] }; + } + + const positionalSessionId = first && !first.startsWith('--') ? first : undefined; + if (positionalSessionId && context.sessionId && positionalSessionId !== context.sessionId) { + throw new CliError('INVALID_ARGUMENT', 'session save: positional conflicts with --session.'); + } + + const sessionId = positionalSessionId ?? context.sessionId; + if (!sessionId) { + throw new CliError('MISSING_REQUIRED', 'session save: missing required (or --session).'); + } + + return { + sessionId: validateSessionId(sessionId, 'session save session id'), + saveTokens: positionalSessionId ? tail : tokens, + }; +} + +export async function runSessionSave(tokens: string[], context: CommandContext): Promise { + const { sessionId, saveTokens } = parseSessionSaveTarget(tokens, context); + if (saveTokens[0] === '--help') { + return { + command: 'session save', + data: { + usage: [ + 'superdoc session save [--in-place] [--out ] [--force]', + 'superdoc session save --session [--in-place] [--out ] [--force]', + ], + }, + pretty: [ + 'Usage:', + ' superdoc session save [--in-place] [--out ] [--force]', + ' superdoc session save --session [--in-place] [--out ] [--force]', + ].join('\n'), + }; + } + + await ensureSessionExistsForProject(sessionId); + const saveResult = await runSave(saveTokens, { + ...context, + sessionId, + }); + + const data = + saveResult.data && typeof saveResult.data === 'object' + ? { ...(saveResult.data as Record), sessionId } + : saveResult.data; + + return { + ...saveResult, + command: 'session save', + data, + }; +} diff --git a/apps/cli/src/commands/session-set-default.ts b/apps/cli/src/commands/session-set-default.ts new file mode 100644 index 0000000000..112baee7fa --- /dev/null +++ b/apps/cli/src/commands/session-set-default.ts @@ -0,0 +1,75 @@ +import { ensureSessionExistsForProject, setActiveSessionId } from '../lib/context'; +import { CliError } from '../lib/errors'; +import { validateSessionId } from '../lib/session'; +import type { CommandContext, CommandExecution } from '../lib/types'; + +function parseSessionId( + tokens: string[], + context: CommandContext, + commandName: string, +): { sessionId: string; rest: string[] } { + const [first, ...tail] = tokens; + + if (first === '--help' || first === '-h') { + return { sessionId: '', rest: ['--help'] }; + } + + const positionalSessionId = first && !first.startsWith('--') ? first : undefined; + if (positionalSessionId && context.sessionId && positionalSessionId !== context.sessionId) { + throw new CliError('INVALID_ARGUMENT', `${commandName}: positional conflicts with --session.`); + } + + const sessionId = positionalSessionId ?? context.sessionId; + if (!sessionId) { + throw new CliError('MISSING_REQUIRED', `${commandName}: missing required (or --session).`); + } + + return { + sessionId: validateSessionId(sessionId, `${commandName} session id`), + rest: positionalSessionId ? tail : tokens, + }; +} + +async function runSetDefault( + tokens: string[], + context: CommandContext, + commandName: 'session set-default' | 'session use', +): Promise { + const { sessionId, rest } = parseSessionId(tokens, context, commandName); + if (rest[0] === '--help') { + return { + command: commandName, + data: { + usage: [`superdoc ${commandName} `, `superdoc ${commandName} --session `], + }, + pretty: [ + `Usage:`, + ` superdoc ${commandName} `, + ` superdoc ${commandName} --session `, + ].join('\n'), + }; + } + + if (rest.length > 0) { + throw new CliError('INVALID_ARGUMENT', `${commandName}: unexpected argument(s): ${rest.join(' ')}`); + } + + await ensureSessionExistsForProject(sessionId); + await setActiveSessionId(sessionId); + + return { + command: commandName, + data: { + activeSessionId: sessionId, + }, + pretty: `Default session set to ${sessionId}`, + }; +} + +export async function runSessionSetDefault(tokens: string[], context: CommandContext): Promise { + return runSetDefault(tokens, context, 'session set-default'); +} + +export async function runSessionUse(tokens: string[], context: CommandContext): Promise { + return runSetDefault(tokens, context, 'session use'); +} diff --git a/apps/cli/src/commands/uninstall.ts b/apps/cli/src/commands/uninstall.ts new file mode 100644 index 0000000000..29496129ad --- /dev/null +++ b/apps/cli/src/commands/uninstall.ts @@ -0,0 +1,33 @@ +import { existsSync, rmSync } from 'node:fs'; +import { join } from 'node:path'; +import type { CliIO } from '../lib/types'; + +const SKILL_PATHS = [ + { name: 'Claude Code', path: '.claude/skills/superdoc' }, + { name: 'Codex', path: '.agents/skills/superdoc' }, +] as const; + +export async function runUninstall(tokens: string[], io: CliIO): Promise { + if (!tokens.includes('--skills')) { + io.stderr('Usage: superdoc uninstall --skills\n'); + return 1; + } + + const cwd = process.cwd(); + let removed = 0; + + for (const target of SKILL_PATHS) { + const fullPath = join(cwd, target.path); + if (!existsSync(fullPath)) continue; + + rmSync(fullPath, { recursive: true }); + io.stdout(`Removed ${target.path}/\n`); + removed += 1; + } + + if (removed === 0) { + io.stdout('No installed skills found.\n'); + } + + return 0; +} diff --git a/apps/cli/src/host/collab-session-pool.ts b/apps/cli/src/host/collab-session-pool.ts new file mode 100644 index 0000000000..28e20f9732 --- /dev/null +++ b/apps/cli/src/host/collab-session-pool.ts @@ -0,0 +1,175 @@ +import type { CollaborationProfile } from '../lib/collaboration'; +import { openCollaborativeDocument, type OpenedDocument } from '../lib/document'; +import { CliError } from '../lib/errors'; +import type { CliIO } from '../lib/types'; + +/** Metadata describing a document editing session and its optional collaboration configuration. */ +export interface CollaborationSessionMetadata { + contextId: string; + sessionType: 'local' | 'collab'; + collaboration?: CollaborationProfile; + sourcePath?: string; + workingDocPath: string; +} + +type SessionFingerprint = { + profileKey: string; + workingDocPath: string; +}; + +type PooledSessionHandle = { + opened: OpenedDocument; + fingerprint: SessionFingerprint; + lastUsedAtMs: number; +}; + +type OpenCollaborativeDocumentFn = ( + docPath: string, + io: CliIO, + profile: CollaborationProfile, +) => Promise; + +function profileToKey(profile: CollaborationProfile): string { + return JSON.stringify({ + providerType: profile.providerType, + url: profile.url, + documentId: profile.documentId, + tokenEnv: profile.tokenEnv ?? null, + syncTimeoutMs: profile.syncTimeoutMs ?? null, + }); +} + +function buildFingerprint(metadata: CollaborationSessionMetadata): SessionFingerprint { + if (metadata.sessionType !== 'collab') { + throw new CliError('COMMAND_FAILED', 'Session is not collaborative.', { + contextId: metadata.contextId, + sessionType: metadata.sessionType, + }); + } + + if (!metadata.collaboration) { + throw new CliError('COMMAND_FAILED', 'Collaborative session metadata is missing collaboration profile.', { + contextId: metadata.contextId, + }); + } + + return { + profileKey: profileToKey(metadata.collaboration), + workingDocPath: metadata.workingDocPath, + }; +} + +function sameFingerprint(left: SessionFingerprint, right: SessionFingerprint): boolean { + return left.profileKey === right.profileKey && left.workingDocPath === right.workingDocPath; +} + +/** + * Manages pooled collaboration sessions, reusing connections when the session + * fingerprint (provider profile + working document path) matches. + */ +export interface CollaborationSessionPool { + /** Acquires (or reuses) a collaborative session, returning a leased document handle. */ + acquire( + sessionId: string, + docPath: string, + metadata: CollaborationSessionMetadata, + io: CliIO, + ): Promise; + /** Adopts an externally-opened document into the pool, replacing any existing session. */ + adoptFromOpen( + sessionId: string, + opened: OpenedDocument, + metadata: CollaborationSessionMetadata, + io: CliIO, + ): Promise; + /** Disposes a single session by id, closing its underlying document. */ + disposeSession(sessionId: string): Promise; + /** Disposes all pooled sessions. */ + disposeAll(): Promise; +} + +/** In-memory implementation of {@link CollaborationSessionPool}. */ +export class InMemoryCollaborationSessionPool implements CollaborationSessionPool { + private readonly handles = new Map(); + private readonly openCollaborative: OpenCollaborativeDocumentFn; + private readonly now: () => number; + + constructor(options: { openCollaborative?: OpenCollaborativeDocumentFn; now?: () => number } = {}) { + this.openCollaborative = options.openCollaborative ?? openCollaborativeDocument; + this.now = options.now ?? Date.now; + } + + async acquire( + sessionId: string, + docPath: string, + metadata: CollaborationSessionMetadata, + io: CliIO, + ): Promise { + const fingerprint = buildFingerprint(metadata); + const existing = this.handles.get(sessionId); + + if (existing) { + if (sameFingerprint(existing.fingerprint, fingerprint)) { + existing.lastUsedAtMs = this.now(); + return this.createLease(existing); + } + + await this.disposeSession(sessionId); + } + + // Safe to assert: buildFingerprint above already validated metadata.collaboration + const profile = metadata.collaboration!; + + const opened = await this.openCollaborative(docPath, io, profile); + const created: PooledSessionHandle = { + opened, + fingerprint, + lastUsedAtMs: this.now(), + }; + this.handles.set(sessionId, created); + + return this.createLease(created); + } + + async adoptFromOpen( + sessionId: string, + opened: OpenedDocument, + metadata: CollaborationSessionMetadata, + _io: CliIO, + ): Promise { + const fingerprint = buildFingerprint(metadata); + + await this.disposeSession(sessionId); + + this.handles.set(sessionId, { + opened, + fingerprint, + lastUsedAtMs: this.now(), + }); + } + + async disposeSession(sessionId: string): Promise { + const existing = this.handles.get(sessionId); + if (!existing) return; + + this.handles.delete(sessionId); + existing.opened.dispose(); + } + + async disposeAll(): Promise { + const sessionIds = Array.from(this.handles.keys()); + for (const sessionId of sessionIds) { + await this.disposeSession(sessionId); + } + } + + private createLease(handle: PooledSessionHandle): OpenedDocument { + return { + editor: handle.opened.editor, + meta: handle.opened.meta, + dispose: () => { + handle.lastUsedAtMs = this.now(); + }, + }; + } +} diff --git a/apps/cli/src/host/invoke.ts b/apps/cli/src/host/invoke.ts new file mode 100644 index 0000000000..31411753d4 --- /dev/null +++ b/apps/cli/src/host/invoke.ts @@ -0,0 +1,150 @@ +import { invokeCommand } from '../index'; +import { CliError } from '../lib/errors'; +import { asRecord } from '../lib/guards'; +import type { CliIO } from '../lib/types'; +import type { CollaborationSessionPool } from './collab-session-pool'; +import { DEFAULT_MAX_STDIN_BYTES } from './protocol'; + +const BASE64_PATTERN = /^[A-Za-z0-9+/]*={0,2}$/; + +type CliInvokeParams = { + argv: string[]; + stdinBytes?: Uint8Array; +}; + +/** + * Options for invoking CLI commands from the host process. + * + * @param ioNow - Clock function used for elapsed-time tracking + * @param collabSessionPool - Pool for reusing collaboration sessions across invocations + * @param maxStdinBytes - Maximum allowed size (bytes) for base64-decoded stdin payloads + */ +export interface HostInvokeCliOptions { + ioNow?: () => number; + collabSessionPool?: CollaborationSessionPool; + maxStdinBytes?: number; +} + +function estimateBase64RawLength(base64: string): number { + const padding = base64.endsWith('==') ? 2 : base64.endsWith('=') ? 1 : 0; + return Math.floor((base64.length * 3) / 4) - padding; +} + +function decodeStdinBase64(value: string, maxStdinBytes: number): Uint8Array { + const normalized = value.trim(); + if (!normalized) { + return new Uint8Array(); + } + + if (normalized.length % 4 !== 0 || !BASE64_PATTERN.test(normalized)) { + throw new CliError('INVALID_ARGUMENT', 'cli.invoke params.stdinBase64 must be valid base64.'); + } + + const estimatedBytes = estimateBase64RawLength(normalized); + if (estimatedBytes > maxStdinBytes) { + throw new CliError( + 'INVALID_ARGUMENT', + `cli.invoke stdin payload exceeds ${maxStdinBytes} bytes; use file-path input instead of stdin.`, + { + maxStdinBytes, + estimatedBytes, + }, + ); + } + + const buffer = Buffer.from(normalized, 'base64'); + if (buffer.byteLength > maxStdinBytes) { + throw new CliError( + 'INVALID_ARGUMENT', + `cli.invoke stdin payload exceeds ${maxStdinBytes} bytes; use file-path input instead of stdin.`, + { + maxStdinBytes, + byteLength: buffer.byteLength, + }, + ); + } + + return new Uint8Array(buffer.buffer, buffer.byteOffset, buffer.byteLength); +} + +function parseCliInvokeParams(rawParams: unknown, maxStdinBytes: number): CliInvokeParams { + const record = asRecord(rawParams); + if (!record) { + throw new CliError('INVALID_ARGUMENT', 'cli.invoke params must be an object.'); + } + + const argvRaw = record.argv; + if (!Array.isArray(argvRaw) || argvRaw.length === 0 || argvRaw.some((token) => typeof token !== 'string')) { + throw new CliError('INVALID_ARGUMENT', 'cli.invoke params.argv must be a non-empty string array.'); + } + + const stdinBase64 = record.stdinBase64; + if (stdinBase64 == null) { + return { argv: argvRaw as string[] }; + } + + if (typeof stdinBase64 !== 'string') { + throw new CliError('INVALID_ARGUMENT', 'cli.invoke params.stdinBase64 must be a string when provided.'); + } + + return { + argv: argvRaw as string[], + stdinBytes: decodeStdinBase64(stdinBase64, maxStdinBytes), + }; +} + +/** + * Parses raw JSON-RPC params and executes a CLI command within the host process. + * + * @param rawParams - Untyped params from the JSON-RPC request (expected shape: `{ argv: string[], stdinBase64?: string }`) + * @param options - Host invocation options (clock, session pool, stdin size limit) + * @returns The command name, result data, and elapsed-time metadata + * @throws {CliError} On invalid params, stdin size violations, or command failures + */ +export async function invokeCliFromHost( + rawParams: unknown, + options: HostInvokeCliOptions = {}, +): Promise<{ command: string; data: unknown; meta: { elapsedMs: number } }> { + const maxStdinBytes = options.maxStdinBytes ?? DEFAULT_MAX_STDIN_BYTES; + const params = parseCliInvokeParams(rawParams, maxStdinBytes); + + const stdinBytes = params.stdinBytes; + const readStdinBytes = async () => stdinBytes ?? new Uint8Array(); + + const io: Partial = { + readStdinBytes, + now: options.ioNow, + stdout() {}, + stderr() {}, + }; + + const invocation = await invokeCommand(params.argv, { + ioOverrides: io, + executionMode: 'host', + collabSessionPool: options.collabSessionPool, + }); + + if (invocation.helpText) { + return { + command: 'help', + data: { + usage: invocation.helpText, + }, + meta: { + elapsedMs: invocation.elapsedMs, + }, + }; + } + + if (!invocation.execution) { + throw new CliError('COMMAND_FAILED', 'cli.invoke produced no command result.'); + } + + return { + command: invocation.execution.command, + data: invocation.execution.data, + meta: { + elapsedMs: invocation.elapsedMs, + }, + }; +} diff --git a/apps/cli/src/host/protocol.ts b/apps/cli/src/host/protocol.ts new file mode 100644 index 0000000000..e121b7ab75 --- /dev/null +++ b/apps/cli/src/host/protocol.ts @@ -0,0 +1,198 @@ +import { isRecord } from '../lib/guards'; + +/** Current host protocol version string. */ +export const HOST_PROTOCOL_VERSION = '1.0'; + +/** JSON-RPC methods the host server supports. */ +export const HOST_PROTOCOL_FEATURES = [ + 'cli.invoke', + 'host.shutdown', + 'host.describe', + 'host.describe.command', +] as const; + +/** Notification methods the host may emit to connected clients. */ +export const HOST_PROTOCOL_NOTIFICATIONS = ['event.remoteChange', 'event.sessionClosed'] as const; + +/** Maximum byte size for base64-decoded stdin payloads (32 MiB). */ +export const DEFAULT_MAX_STDIN_BYTES = 32 * 1024 * 1024; + +/** A JSON-RPC 2.0 request id — string, number, or null. */ +export type JsonRpcId = string | number | null; + +/** A JSON-RPC 2.0 request object. */ +export type JsonRpcRequest = { + jsonrpc: '2.0'; + id?: JsonRpcId; + method: string; + params?: unknown; +}; + +/** A JSON-RPC 2.0 success response. */ +export type JsonRpcSuccess = { + jsonrpc: '2.0'; + id: JsonRpcId; + result: unknown; +}; + +/** The `error` payload within a JSON-RPC 2.0 error response. */ +export type JsonRpcErrorObject = { + code: number; + message: string; + data?: unknown; +}; + +/** A JSON-RPC 2.0 error response. */ +export type JsonRpcError = { + jsonrpc: '2.0'; + id: JsonRpcId; + error: JsonRpcErrorObject; +}; + +/** A JSON-RPC 2.0 notification (no `id`). */ +export type JsonRpcNotification = { + jsonrpc: '2.0'; + method: string; + params?: unknown; +}; + +/** Standard and application-specific JSON-RPC error codes. */ +export const JsonRpcCode = { + ParseError: -32700, + InvalidRequest: -32600, + MethodNotFound: -32601, + InvalidParams: -32602, + InternalError: -32603, + CliInvokeFailed: -32010, + RequestTimeout: -32011, + RequestTooLarge: -32012, +} as const; + +/** + * Parses a single newline-delimited JSON-RPC 2.0 frame. + * + * @param line - Raw line from the transport (may include whitespace) + * @returns Either a parsed `request` or a structured `error` describing the parse failure + */ +export function parseJsonRpcLine(line: string): { request?: JsonRpcRequest; error?: JsonRpcErrorObject } { + if (!line.trim()) { + return { + error: { + code: JsonRpcCode.InvalidRequest, + message: 'Invalid JSON-RPC request: empty frame.', + }, + }; + } + + let parsed: unknown; + try { + parsed = JSON.parse(line); + } catch { + return { + error: { + code: JsonRpcCode.ParseError, + message: 'Parse error: invalid JSON.', + }, + }; + } + + if (!isRecord(parsed)) { + return { + error: { + code: JsonRpcCode.InvalidRequest, + message: 'Invalid JSON-RPC request object.', + }, + }; + } + + if (parsed.jsonrpc !== '2.0') { + return { + error: { + code: JsonRpcCode.InvalidRequest, + message: 'Invalid JSON-RPC version; expected "2.0".', + }, + }; + } + + if (typeof parsed.method !== 'string' || parsed.method.length === 0) { + return { + error: { + code: JsonRpcCode.InvalidRequest, + message: 'Invalid JSON-RPC method.', + }, + }; + } + + if ('id' in parsed) { + const id = parsed.id; + const validIdType = typeof id === 'string' || typeof id === 'number' || id === null; + if (!validIdType) { + return { + error: { + code: JsonRpcCode.InvalidRequest, + message: 'Invalid JSON-RPC id type.', + }, + }; + } + } + + return { + request: parsed as JsonRpcRequest, + }; +} + +/** + * Constructs a JSON-RPC 2.0 success response. + * + * @param id - The request id to echo back + * @param result - The result payload + * @returns A well-formed success response object + */ +export function makeSuccess(id: JsonRpcId, result: unknown): JsonRpcSuccess { + return { + jsonrpc: '2.0', + id, + result, + }; +} + +/** + * Constructs a JSON-RPC 2.0 error response. + * + * @param id - The request id to echo back (null for parse-level errors) + * @param code - Numeric error code (see {@link JsonRpcCode}) + * @param message - Human-readable error description + * @param data - Optional structured error payload + * @returns A well-formed error response object + */ +export function makeError(id: JsonRpcId, code: number, message: string, data?: unknown): JsonRpcError { + return { + jsonrpc: '2.0', + id, + error: { + code, + message, + ...(data === undefined ? {} : { data }), + }, + }; +} + +/** + * Serializes a JSON-RPC response or notification to a newline-terminated JSON string. + * + * @param frame - The response or notification to serialize + * @returns A single-line JSON string terminated by `\n` + */ +export function serializeFrame(frame: JsonRpcSuccess | JsonRpcError | JsonRpcNotification): string { + return `${JSON.stringify(frame)}\n`; +} + +/** + * Type guard that checks whether a JSON-RPC request has an `id` field (i.e. is not a notification). + * + * @param request - The request to inspect + * @returns `true` if the request carries an id and expects a response + */ +export function hasRequestId(request: JsonRpcRequest): request is JsonRpcRequest & { id: JsonRpcId } { + return 'id' in request; +} diff --git a/apps/cli/src/host/server.ts b/apps/cli/src/host/server.ts new file mode 100644 index 0000000000..edf341c3c2 --- /dev/null +++ b/apps/cli/src/host/server.ts @@ -0,0 +1,359 @@ +import { createInterface } from 'node:readline'; +import { readFileSync } from 'node:fs'; +import { dirname, resolve } from 'node:path'; +import { fileURLToPath } from 'node:url'; +import { CliError, toCliError } from '../lib/errors'; +import { asRecord } from '../lib/guards'; +import type { CliIO } from '../lib/types'; +import { buildContractOperationDetail, buildContractOverview } from '../lib/contract'; +import { InMemoryCollaborationSessionPool, type CollaborationSessionPool } from './collab-session-pool'; +import { invokeCliFromHost } from './invoke'; +import { + DEFAULT_MAX_STDIN_BYTES, + HOST_PROTOCOL_FEATURES, + HOST_PROTOCOL_NOTIFICATIONS, + HOST_PROTOCOL_VERSION, + JsonRpcCode, + hasRequestId, + makeError, + makeSuccess, + parseJsonRpcLine, + serializeFrame, + type JsonRpcRequest, +} from './protocol'; + +const HOST_HELP = `Usage:\n superdoc host --stdio\n`; +const DEFAULT_REQUEST_TIMEOUT_MS = 30_000; + +type HostServerOptions = { + io: Pick; + requestTimeoutMs?: number; + maxStdinBytes?: number; + collabSessionPool?: CollaborationSessionPool; +}; + +function resolveCliVersion(): string { + try { + const __filename = fileURLToPath(import.meta.url); + const __dirname = dirname(__filename); + const packagePath = resolve(__dirname, '../../package.json'); + const raw = readFileSync(packagePath, 'utf8'); + const parsed = JSON.parse(raw) as { version?: unknown }; + if (typeof parsed.version === 'string' && parsed.version.length > 0) { + return parsed.version; + } + } catch { + // ignore and fall through + } + + return '0.0.0'; +} + +function parseHostCommandTokens(tokens: string[]): { stdio: boolean; help: boolean } { + let stdio = false; + let help = false; + + for (const token of tokens) { + if (token === '--stdio') { + stdio = true; + continue; + } + + if (token === '--help' || token === '-h') { + help = true; + continue; + } + + throw new CliError('INVALID_ARGUMENT', `host: unknown option ${token}`); + } + + return { stdio, help }; +} + +type SettledOutcome = + | { kind: 'success'; value: T } + | { kind: 'error'; error: unknown } + | { kind: 'timeout'; awaitSettle: Promise }; + +async function settleWithTimeout(promise: Promise, timeoutMs: number): Promise> { + const settled = promise.then( + (value) => ({ kind: 'success', value }) as const, + (error) => ({ kind: 'error', error }) as const, + ); + + let timeoutHandle: ReturnType | undefined; + const timeout = new Promise<{ kind: 'timeout' }>((resolve) => { + timeoutHandle = setTimeout(() => resolve({ kind: 'timeout' }), timeoutMs); + }); + + const raced = await Promise.race([settled, timeout]); + if (timeoutHandle != null) { + clearTimeout(timeoutHandle); + } + + if (raced.kind !== 'timeout') { + return raced; + } + + return { + kind: 'timeout', + awaitSettle: settled.then(() => undefined), + }; +} + +class HostServer { + private readonly io: Pick; + private readonly requestTimeoutMs: number; + private readonly maxStdinBytes: number; + private readonly collabSessionPool: CollaborationSessionPool; + private readonly ownsPool: boolean; + private queue: Promise = Promise.resolve(); + private shutdownRequested = false; + + constructor(options: HostServerOptions) { + this.io = options.io; + this.requestTimeoutMs = options.requestTimeoutMs ?? DEFAULT_REQUEST_TIMEOUT_MS; + this.maxStdinBytes = options.maxStdinBytes ?? DEFAULT_MAX_STDIN_BYTES; + + if (options.collabSessionPool) { + this.collabSessionPool = options.collabSessionPool; + this.ownsPool = false; + } else { + this.collabSessionPool = new InMemoryCollaborationSessionPool(); + this.ownsPool = true; + } + } + + isShutdownRequested(): boolean { + return this.shutdownRequested; + } + + async handleLine(line: string): Promise { + const parsed = parseJsonRpcLine(line); + if (parsed.error) { + this.writeFrame(makeError(null, parsed.error.code, parsed.error.message)); + return; + } + + const request = parsed.request; + if (!request) { + this.writeFrame(makeError(null, JsonRpcCode.InvalidRequest, 'Invalid JSON-RPC request.')); + return; + } + + this.queue = this.queue + .then(() => this.handleRequest(request)) + .catch((error) => { + const normalized = toCliError(error); + if (hasRequestId(request)) { + this.writeFrame( + makeError(request.id, JsonRpcCode.InternalError, normalized.message, { + cliCode: normalized.code, + details: normalized.details, + exitCode: normalized.exitCode, + }), + ); + } + }); + + await this.queue; + } + + async dispose(): Promise { + if (this.ownsPool) { + await this.collabSessionPool.disposeAll(); + } + } + + private writeFrame(frame: ReturnType | ReturnType): void { + this.io.stdout(serializeFrame(frame)); + } + + private async handleRequest(request: JsonRpcRequest): Promise { + const id = hasRequestId(request) ? request.id : null; + const isNotification = !hasRequestId(request); + + if (request.method === 'host.ping') { + if (!isNotification) { + this.writeFrame( + makeSuccess(id, { + ok: true, + now: this.io.now(), + }), + ); + } + return; + } + + if (request.method === 'host.capabilities') { + if (!isNotification) { + this.writeFrame( + makeSuccess(id, { + protocolVersion: HOST_PROTOCOL_VERSION, + features: [...HOST_PROTOCOL_FEATURES], + notifications: [...HOST_PROTOCOL_NOTIFICATIONS], + cliVersion: resolveCliVersion(), + }), + ); + } + return; + } + + if (request.method === 'host.describe') { + if (!isNotification) { + this.writeFrame(makeSuccess(id, buildContractOverview())); + } + return; + } + + if (request.method === 'host.describe.command') { + const params = asRecord(request.params); + const operationId = typeof params?.operationId === 'string' ? params.operationId.trim() : ''; + if (!operationId) { + if (!isNotification) { + this.writeFrame( + makeError(id, JsonRpcCode.InvalidParams, 'host.describe.command requires params.operationId (string).'), + ); + } + return; + } + + const detail = buildContractOperationDetail(operationId); + if (!detail) { + if (!isNotification) { + this.writeFrame( + makeError(id, JsonRpcCode.InvalidParams, `Unknown operation: ${operationId}`, { + operationId, + }), + ); + } + return; + } + + if (!isNotification) { + this.writeFrame(makeSuccess(id, detail)); + } + return; + } + + if (request.method === 'host.shutdown') { + this.shutdownRequested = true; + if (!isNotification) { + this.writeFrame( + makeSuccess(id, { + shutdown: true, + }), + ); + } + return; + } + + if (request.method !== 'cli.invoke') { + if (!isNotification) { + this.writeFrame(makeError(id, JsonRpcCode.MethodNotFound, `Method not found: ${request.method}`)); + } + return; + } + + const outcome = await settleWithTimeout( + invokeCliFromHost(request.params, { + ioNow: this.io.now, + collabSessionPool: this.collabSessionPool, + maxStdinBytes: this.maxStdinBytes, + }), + this.requestTimeoutMs, + ); + + if (outcome.kind === 'timeout') { + if (!isNotification) { + this.writeFrame( + makeError(id, JsonRpcCode.RequestTimeout, `Host request timed out after ${this.requestTimeoutMs}ms.`, { + timeoutMs: this.requestTimeoutMs, + }), + ); + } + + // The invoke operation has no cooperative cancellation yet. Wait for it to + // settle so queued requests cannot overlap with a timed-out mutation. + await outcome.awaitSettle; + return; + } + + if (outcome.kind === 'success') { + if (!isNotification) { + this.writeFrame(makeSuccess(id, outcome.value)); + } + return; + } + + const cliError = toCliError(outcome.error); + if (isNotification) return; + + const isHostTimeout = cliError.code === 'TIMEOUT'; + const isPayloadTooLarge = + cliError.code === 'INVALID_ARGUMENT' && + typeof cliError.message === 'string' && + cliError.message.includes('stdin payload exceeds'); + + this.writeFrame( + makeError( + id, + isHostTimeout + ? JsonRpcCode.RequestTimeout + : isPayloadTooLarge + ? JsonRpcCode.RequestTooLarge + : JsonRpcCode.CliInvokeFailed, + cliError.message, + { + cliCode: cliError.code, + message: cliError.message, + details: cliError.details, + exitCode: cliError.exitCode, + }, + ), + ); + } +} + +/** + * Starts the host server in stdio mode, reading newline-delimited JSON-RPC requests from stdin + * and writing responses to stdout. + * + * @param tokens - CLI tokens after "host" (e.g. `["--stdio"]`) + * @param io - I/O adapter for stdout output and clock + * @returns Exit code (0 on clean shutdown) + * @throws {CliError} If an unsupported transport is requested + */ +export async function runHostStdio(tokens: string[], io: CliIO): Promise { + const parsed = parseHostCommandTokens(tokens); + if (parsed.help) { + io.stdout(HOST_HELP); + return 0; + } + + if (!parsed.stdio) { + throw new CliError('INVALID_ARGUMENT', 'host: only --stdio is supported in v1.'); + } + + const server = new HostServer({ io }); + const rl = createInterface({ + input: process.stdin, + crlfDelay: Number.POSITIVE_INFINITY, + }); + + try { + for await (const line of rl) { + await server.handleLine(line); + if (server.isShutdownRequested()) { + rl.close(); + break; + } + } + } finally { + await server.dispose(); + } + + return 0; +} + +export { HostServer }; diff --git a/apps/cli/src/index.ts b/apps/cli/src/index.ts index 45ef951afb..17062f800d 100644 --- a/apps/cli/src/index.ts +++ b/apps/cli/src/index.ts @@ -1,179 +1,351 @@ #!/usr/bin/env node -import { glob } from 'fast-glob'; -import { read } from './commands/read'; -import { type ReplaceResult, replace } from './commands/replace'; -import { type SearchResult, search } from './commands/search'; +import { parseGlobalArgs } from './lib/args'; +import { createFailureEnvelope, createSuccessEnvelope } from './lib/envelope'; +import { CliError, toCliError } from './lib/errors'; +import { normalizeJsonValue } from './lib/input-readers'; +import type { CliIO, CommandContext, CommandExecution, ExecutionMode, GlobalOptions, OutputMode } from './lib/types'; +import { runCall } from './commands/call'; +import { runClose } from './commands/close'; +import { runOpen } from './commands/open'; +import { runSessionClose } from './commands/session-close'; +import { runSessionList } from './commands/session-list'; +import { runSessionSave } from './commands/session-save'; +import { runSessionSetDefault, runSessionUse } from './commands/session-set-default'; +import { runSave } from './commands/save'; +import { tryRunLegacyCompatCommand } from './commands/legacy-compat'; +import { runCommandWrapper } from './lib/wrapper-dispatch'; +import { MANUAL_COMMAND_ALLOWLIST, type ManualCommandKey } from './lib/manual-command-allowlist'; +import { validateOperationResponseData } from './lib/operation-args'; +import { runInstall } from './commands/install'; +import { runUninstall } from './commands/uninstall'; +import { + CLI_COMMAND_SPECS, + CLI_COMMAND_KEYS, + CLI_HELP, + CLI_MAX_COMMAND_TOKENS, + type CliCommandKey, + type CliOperationId, +} from './cli'; + +const HELP = [ + CLI_HELP, + '', + 'Legacy compatibility (v0.x):', + ' superdoc search ', + ' superdoc replace-legacy ', + ' superdoc read ', + '', + 'Canonical machine call:', + ' superdoc call [--input-json "{...}"|--input-file payload.json]', +].join('\n'); + +type CommandRunner = (tokens: string[], context: CommandContext) => Promise; + +type ParsedInvocation = { + globals: GlobalOptions; + rest: string[]; +}; + +/** The result of a programmatic CLI invocation via {@link invokeCommand}. */ +export type InvokeCommandResult = { + globals: GlobalOptions; + execution?: CommandExecution; + helpText?: string; + elapsedMs: number; +}; + +/** Options accepted by {@link invokeCommand}. */ +export type InvokeCommandOptions = { + ioOverrides?: Partial; + executionMode?: ExecutionMode; + collabSessionPool?: CommandContext['collabSessionPool']; +}; + +const MANUAL_COMMANDS = { + call: runCall, + close: runClose, + open: runOpen, + save: runSave, + 'session list': runSessionList, + 'session save': runSessionSave, + 'session close': runSessionClose, + 'session set-default': runSessionSetDefault, + 'session use': runSessionUse, +} satisfies Record; + +const EXTRA_COMMAND_KEYS = ['call'] as const; +const COMMAND_KEY_SET = new Set([...CLI_COMMAND_KEYS, ...EXTRA_COMMAND_KEYS]); +const CLI_COMMAND_KEY_SET = new Set(CLI_COMMAND_KEYS); +const MANUAL_COMMAND_KEY_SET = new Set(MANUAL_COMMAND_ALLOWLIST); +const COMMAND_OPERATION_ID_BY_KEY = new Map( + CLI_COMMAND_SPECS.map((spec) => [spec.key, spec.operationId as CliOperationId] as const), +); + +function hasCommandHelpFlag(args: string[]): boolean { + return args.includes('--help') || args.includes('-h'); +} -const HELP = ` -superdoc - docx editing in your terminal +function defaultIo(): CliIO { + let stdinCache: Promise | null = null; + + return { + stdout(message: string) { + process.stdout.write(message); + }, + stderr(message: string) { + process.stderr.write(message); + }, + readStdinBytes() { + if (stdinCache) return stdinCache; + + stdinCache = new Promise((resolve, reject) => { + const chunks: Buffer[] = []; + process.stdin.on('data', (chunk: Buffer | Uint8Array | string) => { + if (typeof chunk === 'string') { + chunks.push(Buffer.from(chunk)); + return; + } + chunks.push(Buffer.from(chunk)); + }); + process.stdin.on('end', () => { + resolve(new Uint8Array(Buffer.concat(chunks))); + }); + process.stdin.on('error', (error) => { + reject(error); + }); + }); + + return stdinCache; + }, + now() { + return Date.now(); + }, + }; +} -Commands: - search Find text across documents - replace Find and replace text - read Extract plain text +function mergeIo(overrides?: Partial): CliIO { + const base = defaultIo(); + if (!overrides) return base; -Options: - --json Machine-readable output - -h, --help Show this message + return { + stdout: overrides.stdout ?? base.stdout, + stderr: overrides.stderr ?? base.stderr, + readStdinBytes: overrides.readStdinBytes ?? base.readStdinBytes, + now: overrides.now ?? base.now, + }; +} -Examples: - superdoc search "indemnification" ./contracts/*.docx - superdoc replace "ACME Corp" "Globex Inc" ./merger/*.docx - superdoc read ./proposal.docx +function parseCommand(rest: string[]): { key: string; args: string[] } { + if (rest.length === 0) { + throw new CliError('MISSING_REQUIRED', 'Missing command.'); + } -Docs: https://github.com/superdoc-dev/superdoc -`; + const maxTokens = Math.min(Math.max(CLI_MAX_COMMAND_TOKENS, 1), rest.length); + for (let tokenCount = maxTokens; tokenCount >= 1; tokenCount -= 1) { + const candidate = rest.slice(0, tokenCount).join(' '); + if (!COMMAND_KEY_SET.has(candidate)) continue; + return { + key: candidate, + args: rest.slice(tokenCount), + }; + } -/** - * Expand glob patterns to file paths - * @param patterns - Array of file patterns (supports wildcards) - */ -async function expandGlobs(patterns: string[]): Promise { - const files: string[] = []; - - for (const pattern of patterns) { - if (pattern.includes('*')) { - const matches = await glob(pattern, { absolute: true }); - for (const file of matches) { - if (file.endsWith('.docx')) { - files.push(file); - } - } - } else { - files.push(pattern); - } + const attempted = rest.slice(0, maxTokens).join(' '); + throw new CliError('UNKNOWN_COMMAND', `Unknown command: ${attempted}`); +} + +async function executeWithTimeout(operation: () => Promise, timeoutMs?: number): Promise { + if (!timeoutMs) return operation(); + + return new Promise((resolve, reject) => { + const timer = setTimeout(() => { + reject( + new CliError('TIMEOUT', `Command timed out after ${timeoutMs}ms.`, { + timeoutMs, + }), + ); + }, timeoutMs); + + operation() + .then((result) => { + clearTimeout(timer); + resolve(result); + }) + .catch((error) => { + clearTimeout(timer); + reject(error); + }); + }); +} + +function writeSuccess(io: CliIO, mode: OutputMode, payload: CommandExecution, elapsedMs: number): void { + if (mode === 'json') { + io.stdout(`${JSON.stringify(createSuccessEnvelope(payload.command, payload.data, elapsedMs))}\n`); + return; } - return files; + io.stdout(`${payload.pretty}\n`); } -/** - * Format search results for human-readable output - * @returns Formatted string with match summary - */ -function formatSearchResult(result: SearchResult): string { - const lines: string[] = []; +function writeFailure(io: CliIO, mode: OutputMode, error: CliError, elapsedMs: number): void { + if (mode === 'json') { + io.stderr(`${JSON.stringify(createFailureEnvelope(error, elapsedMs))}\n`); + return; + } + + io.stderr(`Error [${error.code}]: ${error.message}\n`); +} + +function parseInvocation(argv: string[]): ParsedInvocation { + const { globals, rest } = parseGlobalArgs(argv); + return { globals, rest }; +} + +async function executeParsedInvocation( + parsed: ParsedInvocation, + io: CliIO, + executionMode: ExecutionMode, + collabSessionPool?: CommandContext['collabSessionPool'], +): Promise<{ execution?: CommandExecution; helpText?: string }> { + if (parsed.globals.help || parsed.rest.length === 0) { + return { helpText: HELP }; + } + + const { key, args } = parseCommand(parsed.rest); - lines.push(`Found ${result.totalMatches} matches in ${result.files.length} files`); - lines.push(''); + const context: CommandContext = { + io, + timeoutMs: parsed.globals.timeoutMs, + sessionId: parsed.globals.sessionId, + executionMode, + collabSessionPool, + }; - for (const file of result.files) { - lines.push(` ${file.path}: ${file.matches.length} matches`); - for (const match of file.matches.slice(0, 3)) { - lines.push(` "${match.context}"`); + const execution = await executeWithTimeout(async () => { + if (MANUAL_COMMAND_KEY_SET.has(key)) { + const handler = MANUAL_COMMANDS[key as ManualCommandKey]; + return handler(args, context); } - if (file.matches.length > 3) { - lines.push(` ... and ${file.matches.length - 3} more`); + + if (CLI_COMMAND_KEY_SET.has(key)) { + return runCommandWrapper(key as CliCommandKey, args, context); } + + throw new CliError('UNKNOWN_COMMAND', `Unknown command: ${key}`); + }, parsed.globals.timeoutMs); + + const operationId = COMMAND_OPERATION_ID_BY_KEY.get(key) as CliOperationId | undefined; + const shouldValidateResponse = operationId != null && !hasCommandHelpFlag(args); + if (!shouldValidateResponse) { + return { execution }; } - return lines.join('\n'); + const normalizedData = normalizeJsonValue(execution.data, key); + validateOperationResponseData(operationId, normalizedData, key); + return { + execution: { + ...execution, + data: normalizedData as Record, + }, + }; } /** - * Format replace results for human-readable output - * @param result - Replace operation result - * @returns Formatted string with replacement summary + * Programmatically invokes a CLI command without process-level I/O side effects. + * + * @param argv - The argument tokens (e.g. `["find", "doc.docx", "--type", "text"]`) + * @param options - I/O overrides, execution mode, and collaboration pool + * @returns Parsed globals, optional execution result or help text, and elapsed time + * @throws {CliError} On unknown commands, validation failures, or command errors */ -function formatReplaceResult(result: ReplaceResult): string { - const lines: string[] = []; - - lines.push(`Updated ${result.files.length} files (${result.totalReplacements} replacements total)`); - - for (const file of result.files) { - lines.push(` ${file.path}: ${file.replacements} replacements`); - } +export async function invokeCommand(argv: string[], options: InvokeCommandOptions = {}): Promise { + const io = mergeIo(options.ioOverrides); + const startedAt = io.now(); + const parsed = parseInvocation(argv); + const output = await executeParsedInvocation( + parsed, + io, + options.executionMode ?? 'oneshot', + options.collabSessionPool, + ); + + return { + globals: parsed.globals, + execution: output.execution, + helpText: output.helpText, + elapsedMs: io.now() - startedAt, + }; +} - return lines.join('\n'); +async function runHostCommand(tokens: string[], io: CliIO): Promise { + const { runHostStdio } = await import('./host/server'); + return runHostStdio(tokens, io); } -async function main() { - const args = process.argv.slice(2); +/** + * Top-level CLI entry point. Parses arguments, routes to the appropriate command, + * and writes JSON or pretty output to the provided I/O streams. + * + * @param argv - Raw process arguments (after stripping the binary path) + * @param ioOverrides - Optional overrides for stdout, stderr, stdin, and clock + * @returns Process exit code (0 on success, non-zero on error) + */ +export async function run(argv: string[], ioOverrides?: Partial): Promise { + const io = mergeIo(ioOverrides); + const startedAt = io.now(); + let outputMode: OutputMode = 'json'; - if (args.length === 0 || args.includes('--help') || args.includes('-h')) { - console.log(HELP); - process.exit(0); - } + try { + const parsed = parseInvocation(argv); + outputMode = parsed.globals.output; - const jsonOutput = args.includes('--json'); - const filteredArgs = args.filter((a) => a !== '--json'); + if (parsed.rest[0] === 'host') { + const hostTokens = parsed.rest.slice(1); + if (parsed.globals.help) hostTokens.push('--help'); + return await runHostCommand(hostTokens, io); + } - const [command, ...rest] = filteredArgs; + if (parsed.rest[0] === 'install' && !parsed.globals.help) { + return await runInstall(parsed.rest.slice(1), io); + } - try { - switch (command) { - case 'search': { - if (rest.length < 2) { - console.error('Usage: superdoc search '); - process.exit(1); - } - const [pattern, ...filePatterns] = rest; - const files = await expandGlobs(filePatterns); - - if (files.length === 0) { - console.error('No .docx files found matching the pattern.'); - process.exit(1); - } - - const result = await search(pattern, files); - - if (jsonOutput) { - console.log(JSON.stringify(result, null, 2)); - } else { - console.log(formatSearchResult(result)); - } - break; - } + if (parsed.rest[0] === 'uninstall' && !parsed.globals.help) { + return await runUninstall(parsed.rest.slice(1), io); + } - case 'replace': { - if (rest.length < 3) { - console.error('Usage: superdoc replace '); - process.exit(1); - } - const [find, replaceWith, ...filePatterns] = rest; - const files = await expandGlobs(filePatterns); - - if (files.length === 0) { - console.error('No .docx files found matching the pattern.'); - process.exit(1); - } - - const result = await replace(find, replaceWith, files); - - if (jsonOutput) { - console.log(JSON.stringify(result, null, 2)); - } else { - console.log(formatReplaceResult(result)); - } - break; - } + if (parsed.rest[0] === 'call' && outputMode !== 'json') { + throw new CliError('INVALID_ARGUMENT', 'call: only --output json is supported.'); + } - case 'read': { - if (rest.length < 1) { - console.error('Usage: superdoc read '); - process.exit(1); - } - const [filePath] = rest; - const result = await read(filePath); - - if (jsonOutput) { - console.log(JSON.stringify(result, null, 2)); - } else { - console.log(result.content); - } - break; + if (!parsed.globals.help) { + const legacyCompat = await tryRunLegacyCompatCommand(argv, parsed.rest, io); + if (legacyCompat.handled) { + return legacyCompat.exitCode; } + } - default: - console.error(`Unknown command: ${command}`); - console.log(HELP); - process.exit(1); + const output = await executeParsedInvocation(parsed, io, 'oneshot'); + if (output.helpText) { + io.stdout(output.helpText); + return 0; + } + if (!output.execution) { + throw new CliError('COMMAND_FAILED', 'Command produced no execution result and no help text.'); } + + const elapsedMs = io.now() - startedAt; + writeSuccess(io, outputMode, output.execution, elapsedMs); + return 0; } catch (error) { - console.error('Error:', error instanceof Error ? error.message : error); - process.exit(1); + const cliError = toCliError(error); + const elapsedMs = io.now() - startedAt; + writeFailure(io, outputMode, cliError, elapsedMs); + return cliError.exitCode; } } -main(); +if (import.meta.main) { + const exitCode = await run(process.argv.slice(2)); + process.exit(exitCode); +} diff --git a/apps/cli/src/lib/args.ts b/apps/cli/src/lib/args.ts new file mode 100644 index 0000000000..0c342da811 --- /dev/null +++ b/apps/cli/src/lib/args.ts @@ -0,0 +1,410 @@ +import { readFile } from 'node:fs/promises'; +import { CliError } from './errors'; +import { validateSessionId } from './session'; +import type { GlobalOptions, OutputMode } from './types'; + +export type OptionType = 'string' | 'number' | 'boolean'; + +export interface OptionSpec { + name: string; + type: OptionType; + aliases?: string[]; + multiple?: boolean; +} + +export interface ParsedArgs { + positionals: string[]; + options: Record; + unknown: string[]; + errors: string[]; +} + +function parseGlobalOutput(outputValue: string | undefined, jsonFlag: boolean, prettyFlag: boolean): OutputMode { + if (jsonFlag && prettyFlag) { + throw new CliError('INVALID_ARGUMENT', 'Use only one of --json or --pretty.'); + } + + if (outputValue) { + if (outputValue !== 'json' && outputValue !== 'pretty') { + throw new CliError('INVALID_ARGUMENT', '--output must be either "json" or "pretty".'); + } + if (jsonFlag && outputValue !== 'json') { + throw new CliError('INVALID_ARGUMENT', 'Conflicting output flags: --output and --json.'); + } + if (prettyFlag && outputValue !== 'pretty') { + throw new CliError('INVALID_ARGUMENT', 'Conflicting output flags: --output and --pretty.'); + } + return outputValue; + } + + if (prettyFlag) return 'pretty'; + return 'json'; +} + +export function parseGlobalArgs(argv: string[]): { globals: GlobalOptions; rest: string[] } { + let outputValue: string | undefined; + let jsonFlag = false; + let prettyFlag = false; + let timeoutMs: number | undefined; + let sessionId: string | undefined; + let help = false; + const rest: string[] = []; + + for (let index = 0; index < argv.length; index += 1) { + const token = argv[index]; + + if (token === '--') { + rest.push(...argv.slice(index)); + break; + } + + if (token === '--json') { + jsonFlag = true; + continue; + } + + if (token === '--pretty') { + prettyFlag = true; + continue; + } + + if (token === '--help' || token === '-h') { + help = true; + continue; + } + + if (token === '--session') { + const next = argv[index + 1]; + if (!next) { + throw new CliError('MISSING_REQUIRED', '--session requires a value.'); + } + sessionId = validateSessionId(next); + index += 1; + continue; + } + + if (token.startsWith('--session=')) { + sessionId = validateSessionId(token.slice('--session='.length)); + continue; + } + + if (token === '--output') { + const next = argv[index + 1]; + if (!next) { + throw new CliError('MISSING_REQUIRED', '--output requires a value.'); + } + outputValue = next; + index += 1; + continue; + } + + if (token.startsWith('--output=')) { + outputValue = token.slice('--output='.length); + continue; + } + + if (token === '--timeout-ms') { + const next = argv[index + 1]; + if (!next) { + throw new CliError('MISSING_REQUIRED', '--timeout-ms requires a value.'); + } + const parsed = Number(next); + if (!Number.isFinite(parsed) || parsed <= 0) { + throw new CliError('INVALID_ARGUMENT', '--timeout-ms must be a positive number.'); + } + timeoutMs = parsed; + index += 1; + continue; + } + + if (token.startsWith('--timeout-ms=')) { + const parsed = Number(token.slice('--timeout-ms='.length)); + if (!Number.isFinite(parsed) || parsed <= 0) { + throw new CliError('INVALID_ARGUMENT', '--timeout-ms must be a positive number.'); + } + timeoutMs = parsed; + continue; + } + + rest.push(token); + } + + const output = parseGlobalOutput(outputValue, jsonFlag, prettyFlag); + + return { + globals: { + output, + timeoutMs, + sessionId, + help, + }, + rest, + }; +} + +function normalizeBooleanValue(value: string): boolean | undefined { + if (value === 'true' || value === '1') return true; + if (value === 'false' || value === '0') return false; + return undefined; +} + +export function parseCommandArgs(tokens: string[], specs: OptionSpec[]): ParsedArgs { + const positionals: string[] = []; + const options: Record = {}; + const unknown: string[] = []; + const errors: string[] = []; + + const byName = new Map(); + for (const spec of specs) { + byName.set(spec.name, spec); + for (const alias of spec.aliases ?? []) { + byName.set(alias, spec); + } + } + + let i = 0; + while (i < tokens.length) { + const token = tokens[i]; + + if (token === '--') { + positionals.push(...tokens.slice(i + 1)); + break; + } + + if (!token.startsWith('--')) { + positionals.push(token); + i += 1; + continue; + } + + const eqIndex = token.indexOf('='); + const rawName = eqIndex >= 0 ? token.slice(2, eqIndex) : token.slice(2); + const inlineValue = eqIndex >= 0 ? token.slice(eqIndex + 1) : undefined; + + const spec = byName.get(rawName); + if (!spec) { + unknown.push(`--${rawName}`); + i += 1; + continue; + } + + let parsedValue: unknown; + + if (spec.type === 'boolean') { + if (inlineValue == null) { + const nextToken = tokens[i + 1]; + const normalizedNext = typeof nextToken === 'string' ? normalizeBooleanValue(nextToken) : undefined; + if (normalizedNext != null) { + parsedValue = normalizedNext; + i += 2; + } else { + parsedValue = true; + i += 1; + } + } else { + const normalized = normalizeBooleanValue(inlineValue); + if (normalized == null) { + errors.push(`--${rawName} must be true/false when provided with an explicit value.`); + i += 1; + continue; + } + parsedValue = normalized; + i += 1; + } + } else { + const valueToken = inlineValue ?? tokens[i + 1]; + if (valueToken == null) { + errors.push(`--${rawName} requires a value.`); + i += 1; + continue; + } + + if (spec.type === 'number') { + const n = Number(valueToken); + if (!Number.isFinite(n)) { + errors.push(`--${rawName} must be a number.`); + i += inlineValue == null ? 2 : 1; + continue; + } + parsedValue = n; + } else { + parsedValue = valueToken; + } + + i += inlineValue == null ? 2 : 1; + } + + const existing = options[spec.name]; + if (spec.multiple) { + if (existing == null) { + options[spec.name] = [parsedValue]; + } else if (Array.isArray(existing)) { + existing.push(parsedValue); + } else { + options[spec.name] = [existing, parsedValue]; + } + continue; + } + + if (existing != null) { + errors.push(`--${spec.name} was provided more than once.`); + continue; + } + + options[spec.name] = parsedValue; + } + + return { + positionals, + options, + unknown, + errors, + }; +} + +export function ensureValidArgs(parsed: ParsedArgs): void { + if (parsed.unknown.length > 0) { + throw new CliError('INVALID_ARGUMENT', `Unknown option(s): ${parsed.unknown.join(', ')}`); + } + + if (parsed.errors.length > 0) { + throw new CliError('INVALID_ARGUMENT', parsed.errors[0], { + errors: parsed.errors, + }); + } +} + +export function getStringOption(parsed: ParsedArgs, name: string): string | undefined { + const value = parsed.options[name]; + return typeof value === 'string' ? value : undefined; +} + +export function getNumberOption(parsed: ParsedArgs, name: string): number | undefined { + const value = parsed.options[name]; + return typeof value === 'number' ? value : undefined; +} + +export function getBooleanOption(parsed: ParsedArgs, name: string): boolean { + const value = parsed.options[name]; + return value === true; +} + +export function getOptionalBooleanOption(parsed: ParsedArgs, name: string): boolean | undefined { + const value = parsed.options[name]; + return typeof value === 'boolean' ? value : undefined; +} + +export function getStringListOption(parsed: ParsedArgs, name: string): string[] { + const value = parsed.options[name]; + if (typeof value === 'string') return [value]; + if (!Array.isArray(value)) return []; + return value.filter((item): item is string => typeof item === 'string'); +} + +export function resolveDocArg(parsed: ParsedArgs, commandName: string): { doc?: string; positionals: string[] } { + const docFromFlag = getStringOption(parsed, 'doc'); + const firstPositional = parsed.positionals[0]; + + if (docFromFlag && firstPositional) { + if (docFromFlag !== firstPositional) { + throw new CliError( + 'INVALID_ARGUMENT', + `${commandName}: positional and --doc must match when both are provided.`, + ); + } + return { doc: docFromFlag, positionals: parsed.positionals.slice(1) }; + } + + if (docFromFlag) { + return { doc: docFromFlag, positionals: parsed.positionals }; + } + + if (firstPositional) { + return { doc: firstPositional, positionals: parsed.positionals.slice(1) }; + } + + return { doc: undefined, positionals: parsed.positionals }; +} + +export function requireDocArg(parsed: ParsedArgs, commandName: string): { doc: string; positionals: string[] } { + const resolved = resolveDocArg(parsed, commandName); + if (resolved.doc) { + return { + doc: resolved.doc, + positionals: resolved.positionals, + }; + } + + throw new CliError('MISSING_REQUIRED', `${commandName}: missing required argument.`); +} + +export function expectNoPositionals(parsed: ParsedArgs, positionals: string[], commandName: string): void { + if (positionals.length === 0) return; + throw new CliError('INVALID_ARGUMENT', `${commandName}: unexpected positional argument(s): ${positionals.join(' ')}`); +} + +export function requireStringOption(parsed: ParsedArgs, name: string, commandName: string): string { + const value = getStringOption(parsed, name); + if (value) return value; + throw new CliError('MISSING_REQUIRED', `${commandName}: missing required --${name}.`); +} + +export function requireBooleanOption(parsed: ParsedArgs, name: string, commandName: string): boolean { + const value = getOptionalBooleanOption(parsed, name); + if (typeof value === 'boolean') return value; + throw new CliError('MISSING_REQUIRED', `${commandName}: missing required --${name}.`); +} + +export async function resolveJsonInput(parsed: ParsedArgs, baseName: string): Promise { + const jsonFlag = getStringOption(parsed, `${baseName}-json`); + const fileFlag = getStringOption(parsed, `${baseName}-file`); + + if (jsonFlag && fileFlag) { + throw new CliError( + 'INVALID_ARGUMENT', + `Use only one of --${baseName}-json or --${baseName}-file for the ${baseName} payload.`, + ); + } + + if (jsonFlag) { + try { + return JSON.parse(jsonFlag) as unknown; + } catch (error) { + const message = error instanceof Error ? error.message : String(error); + throw new CliError('JSON_PARSE_ERROR', `Invalid --${baseName}-json payload.`, { + message, + }); + } + } + + if (fileFlag) { + let raw: string; + try { + raw = await readFile(fileFlag, 'utf8'); + } catch (error) { + const message = error instanceof Error ? error.message : String(error); + throw new CliError('FILE_READ_ERROR', `Could not read --${baseName}-file: ${fileFlag}`, { + message, + }); + } + + try { + return JSON.parse(raw) as unknown; + } catch (error) { + const message = error instanceof Error ? error.message : String(error); + throw new CliError('JSON_PARSE_ERROR', `Invalid JSON in --${baseName}-file: ${fileFlag}`, { + message, + }); + } + } + + return undefined; +} + +export function parseCommaList(value: string | undefined): string[] { + if (!value) return []; + return value + .split(',') + .map((item) => item.trim()) + .filter((item) => item.length > 0); +} diff --git a/apps/cli/src/lib/change-mode.ts b/apps/cli/src/lib/change-mode.ts new file mode 100644 index 0000000000..3ffff1c1ce --- /dev/null +++ b/apps/cli/src/lib/change-mode.ts @@ -0,0 +1,22 @@ +import type { ParsedArgs } from './args'; +import { getBooleanOption, getStringOption } from './args'; +import { CliError } from './errors'; + +export type ChangeMode = 'direct' | 'tracked'; + +export function resolveChangeMode(parsed: ParsedArgs, commandName: string): ChangeMode { + const tracked = getBooleanOption(parsed, 'tracked'); + const direct = getBooleanOption(parsed, 'direct'); + const mode = getStringOption(parsed, 'change-mode'); + + if (tracked && direct) { + throw new CliError('INVALID_ARGUMENT', `${commandName}: use only one of --tracked or --direct.`); + } + + if (mode && mode !== 'direct' && mode !== 'tracked') { + throw new CliError('INVALID_ARGUMENT', `${commandName}: --change-mode must be "direct" or "tracked".`); + } + + const requested = tracked ? 'tracked' : direct ? 'direct' : mode; + return (requested as ChangeMode | undefined) ?? 'direct'; +} diff --git a/apps/cli/src/lib/collaboration.ts b/apps/cli/src/lib/collaboration.ts new file mode 100644 index 0000000000..6cff8e1cb6 --- /dev/null +++ b/apps/cli/src/lib/collaboration.ts @@ -0,0 +1,218 @@ +import { HocuspocusProvider } from '@hocuspocus/provider'; +import { WebsocketProvider } from 'y-websocket'; +import { Doc as YDoc } from 'yjs'; +import { CliError } from './errors'; +import { isRecord } from './guards'; + +export type CollaborationProviderType = 'hocuspocus' | 'y-websocket'; + +export type CollaborationInput = { + providerType: CollaborationProviderType; + url: string; + documentId?: string; + tokenEnv?: string; + syncTimeoutMs?: number; +}; + +export type CollaborationProfile = { + providerType: CollaborationProviderType; + url: string; + documentId: string; + tokenEnv?: string; + syncTimeoutMs?: number; +}; + +type SyncableProvider = { + on?(event: string, handler: (...args: unknown[]) => void): void; + off?(event: string, handler: (...args: unknown[]) => void): void; + disconnect?(): void; + destroy?(): void; + synced?: boolean; + isSynced?: boolean; +}; + +export type CollaborationRuntime = { + ydoc: YDoc; + provider: SyncableProvider; + waitForSync(): Promise; + dispose(): void; +}; + +const DEFAULT_SYNC_TIMEOUT_MS = 10_000; +const SYNC_POLL_INTERVAL_MS = 25; +const ENV_VAR_NAME_PATTERN = /^[A-Za-z_][A-Za-z0-9_]*$/; + +function isSynced(provider: SyncableProvider): boolean { + return provider.synced === true || provider.isSynced === true; +} + +function expectNonEmptyString(value: unknown, path: string): string { + if (typeof value !== 'string' || value.trim().length === 0) { + throw new CliError('VALIDATION_ERROR', `${path} must be a non-empty string.`); + } + return value; +} + +function expectOptionalPositiveNumber(value: unknown, path: string): number | undefined { + if (value == null) return undefined; + if (typeof value !== 'number' || !Number.isFinite(value) || value <= 0) { + throw new CliError('VALIDATION_ERROR', `${path} must be a positive number.`); + } + return value; +} + +function expectOptionalEnvVarName(value: unknown, path: string): string | undefined { + if (value == null) return undefined; + if (typeof value !== 'string' || !ENV_VAR_NAME_PATTERN.test(value)) { + throw new CliError('VALIDATION_ERROR', `${path} must be a valid environment variable name.`); + } + return value; +} + +function normalizeProviderType(value: unknown, path: string): CollaborationProviderType { + if (value === 'hocuspocus' || value === 'y-websocket') return value; + throw new CliError('VALIDATION_ERROR', `${path} must be "hocuspocus" or "y-websocket".`); +} + +export function parseCollaborationInput(value: unknown): CollaborationInput { + if (!isRecord(value)) { + throw new CliError('VALIDATION_ERROR', 'collaboration must be an object.'); + } + + if ('token' in value) { + throw new CliError('VALIDATION_ERROR', 'collaboration.token is not supported in v1; use collaboration.tokenEnv.'); + } + + if ('params' in value) { + throw new CliError('VALIDATION_ERROR', 'collaboration.params is not supported in v1.'); + } + + const allowedKeys = new Set(['providerType', 'url', 'documentId', 'tokenEnv', 'syncTimeoutMs']); + for (const key of Object.keys(value)) { + if (!allowedKeys.has(key)) { + throw new CliError('VALIDATION_ERROR', `collaboration.${key} is not supported.`); + } + } + + return { + providerType: normalizeProviderType(value.providerType, 'collaboration.providerType'), + url: expectNonEmptyString(value.url, 'collaboration.url').trim(), + documentId: + value.documentId != null ? expectNonEmptyString(value.documentId, 'collaboration.documentId') : undefined, + tokenEnv: expectOptionalEnvVarName(value.tokenEnv, 'collaboration.tokenEnv'), + syncTimeoutMs: expectOptionalPositiveNumber(value.syncTimeoutMs, 'collaboration.syncTimeoutMs'), + }; +} + +export function resolveCollaborationProfile(input: CollaborationInput, sessionId: string): CollaborationProfile { + const documentId = input.documentId?.trim() || sessionId; + return { + providerType: input.providerType, + url: input.url, + documentId, + tokenEnv: input.tokenEnv, + syncTimeoutMs: input.syncTimeoutMs, + }; +} + +export function resolveCollaborationToken(profile: CollaborationProfile): string | undefined { + if (!profile.tokenEnv) return undefined; + const token = process.env[profile.tokenEnv]; + if (!token) { + throw new CliError('MISSING_REQUIRED', `Missing collaboration token env var: ${profile.tokenEnv}`, { + tokenEnv: profile.tokenEnv, + }); + } + return token; +} + +function waitForProviderSync(provider: SyncableProvider, timeoutMs: number): Promise { + if (isSynced(provider)) return Promise.resolve(); + + return new Promise((resolve, reject) => { + let settled = false; + const cleanup: Array<() => void> = []; + + const finish = (error?: CliError) => { + if (settled) return; + settled = true; + for (const run of cleanup) { + run(); + } + if (error) { + reject(error); + return; + } + resolve(); + }; + + const onSync = (value?: unknown) => { + if (value === false) return; + finish(); + }; + + if (provider.on) { + provider.on('synced', onSync); + cleanup.push(() => provider.off?.('synced', onSync)); + + provider.on('sync', onSync); + cleanup.push(() => provider.off?.('sync', onSync)); + } + + const timer = setTimeout(() => { + finish( + new CliError('COLLABORATION_SYNC_TIMEOUT', `Collaboration sync timed out after ${timeoutMs}ms.`, { + timeoutMs, + }), + ); + }, timeoutMs); + cleanup.push(() => clearTimeout(timer)); + + const poll = setInterval(() => { + if (isSynced(provider)) { + finish(); + } + }, SYNC_POLL_INTERVAL_MS); + cleanup.push(() => clearInterval(poll)); + }); +} + +export function createCollaborationRuntime(profile: CollaborationProfile): CollaborationRuntime { + const token = resolveCollaborationToken(profile); + const ydoc = new YDoc({ gc: false }); + + let provider: SyncableProvider; + if (profile.providerType === 'y-websocket') { + const providerOptions: { params?: Record } = {}; + if (token) { + providerOptions.params = { token }; + } + provider = new WebsocketProvider( + profile.url, + profile.documentId, + ydoc, + providerOptions, + ) as unknown as SyncableProvider; + } else { + provider = new HocuspocusProvider({ + url: profile.url, + document: ydoc, + name: profile.documentId, + token: token ?? '', + preserveConnection: false, + }) as unknown as SyncableProvider; + } + + return { + ydoc, + provider, + waitForSync() { + return waitForProviderSync(provider, profile.syncTimeoutMs ?? DEFAULT_SYNC_TIMEOUT_MS); + }, + dispose() { + provider.disconnect?.(); + provider.destroy?.(); + ydoc.destroy(); + }, + }; +} diff --git a/apps/cli/src/lib/context.ts b/apps/cli/src/lib/context.ts new file mode 100644 index 0000000000..00b1a955df --- /dev/null +++ b/apps/cli/src/lib/context.ts @@ -0,0 +1,680 @@ +import type { Dirent } from 'node:fs'; +import { copyFile, mkdir, open, readdir, readFile, rename, rm, stat, unlink, writeFile } from 'node:fs/promises'; +import { createHash } from 'node:crypto'; +import { homedir, hostname } from 'node:os'; +import { join, resolve } from 'node:path'; +import { CliError } from './errors'; +import { asRecord, pathExists } from './guards'; +import type { CollaborationProfile } from './collaboration'; +import { validateSessionId } from './session'; +import type { CliIO } from './types'; + +const CONTEXT_VERSION = 'v1'; +const ACTIVE_SESSION_FILENAME = 'active-session'; +const DEFAULT_LOCK_TIMEOUT_MS = 5_000; +const LOCK_RETRY_INTERVAL_MS = 50; + +export type SourceSnapshot = { + mtimeMs: number; + size: number; + checksum: string; +}; + +export type SessionType = 'local' | 'collab'; + +export type ContextMetadata = { + contextId: string; + projectRoot: string; + source: 'path' | 'stdin'; + sourcePath?: string; + workingDocPath: string; + dirty: boolean; + revision: number; + sessionType: SessionType; + collaboration?: CollaborationProfile; + openedAt: string; + updatedAt: string; + lastSavedAt?: string; + sourceSnapshot?: SourceSnapshot; +}; + +export type ContextPaths = { + stateRoot: string; + contextDir: string; + metadataPath: string; + workingDocPath: string; + lockPath: string; +}; + +export type ProjectSessionSummary = { + sessionId: string; + source: 'path' | 'stdin'; + sourcePath?: string; + dirty: boolean; + revision: number; + sessionType: SessionType; + collaboration?: CollaborationProfile; + openedAt: string; + updatedAt: string; + lastSavedAt?: string; +}; + +type ProjectPaths = { + projectHash: string; + projectDir: string; + activeSessionPath: string; +}; + +type LockMetadata = { + pid: number; + hostname: string; + startedAt: string; + projectRoot: string; + command: string; +}; + +function getStateRoot(): string { + const override = process.env.SUPERDOC_CLI_STATE_DIR; + if (override && override.length > 0) { + return resolve(override); + } + + return join(homedir(), '.superdoc-cli', 'state', CONTEXT_VERSION); +} + +export function getContextPaths(contextId: string): ContextPaths { + const normalizedContextId = validateSessionId(contextId, 'session id'); + const stateRoot = getStateRoot(); + const contextDir = join(stateRoot, 'contexts', normalizedContextId); + + return { + stateRoot, + contextDir, + metadataPath: join(contextDir, 'metadata.json'), + workingDocPath: join(contextDir, 'working.docx'), + lockPath: join(contextDir, 'lock'), + }; +} + +export function getProjectRoot(): string { + return resolve(process.cwd()); +} + +function getProjectPaths(projectRoot = getProjectRoot()): ProjectPaths { + const stateRoot = getStateRoot(); + const projectHash = createHash('sha256').update(projectRoot).digest('hex').slice(0, 16); + const projectDir = join(stateRoot, 'projects', projectHash); + + return { + projectHash, + projectDir, + activeSessionPath: join(projectDir, ACTIVE_SESSION_FILENAME), + }; +} + +function nowIso(io: CliIO): string { + return new Date(io.now()).toISOString(); +} + +function normalizeSessionType(value: unknown): SessionType { + if (value === 'collab') return 'collab'; + return 'local'; +} + +function normalizeCollaborationProfile(value: unknown): CollaborationProfile | undefined { + const record = asRecord(value); + if (!record) return undefined; + + const providerType = record.providerType; + const url = record.url; + const documentId = record.documentId; + const tokenEnv = record.tokenEnv; + const syncTimeoutMs = record.syncTimeoutMs; + + if (providerType !== 'hocuspocus' && providerType !== 'y-websocket') return undefined; + if (typeof url !== 'string' || url.length === 0) return undefined; + if (typeof documentId !== 'string' || documentId.length === 0) return undefined; + if (tokenEnv != null && (typeof tokenEnv !== 'string' || tokenEnv.length === 0)) return undefined; + if ( + syncTimeoutMs != null && + (typeof syncTimeoutMs !== 'number' || !Number.isFinite(syncTimeoutMs) || syncTimeoutMs <= 0) + ) { + return undefined; + } + + return { + providerType, + url, + documentId, + tokenEnv: typeof tokenEnv === 'string' ? tokenEnv : undefined, + syncTimeoutMs: typeof syncTimeoutMs === 'number' ? syncTimeoutMs : undefined, + }; +} + +function normalizeContextMetadata(metadata: ContextMetadata): ContextMetadata { + const sessionType = normalizeSessionType(metadata.sessionType); + const collaboration = normalizeCollaborationProfile(metadata.collaboration); + + if (sessionType === 'collab' && collaboration) { + return { + ...metadata, + sessionType, + collaboration, + }; + } + + return { + ...metadata, + sessionType: 'local', + collaboration: undefined, + }; +} + +function sleep(ms: number): Promise { + return new Promise((resolveSleep) => { + setTimeout(resolveSleep, ms); + }); +} + +function isLockAlive(pid: number): boolean { + if (!Number.isInteger(pid) || pid <= 0) return false; + + try { + process.kill(pid, 0); + return true; + } catch (error) { + const code = (error as NodeJS.ErrnoException | undefined)?.code; + if (code === 'ESRCH') return false; + if (code === 'EPERM') return true; + return true; + } +} + +async function readLockMetadata(lockPath: string): Promise { + let raw: string; + try { + raw = await readFile(lockPath, 'utf8'); + } catch (error) { + const code = (error as NodeJS.ErrnoException | undefined)?.code; + if (code === 'ENOENT') return null; + throw new CliError('FILE_READ_ERROR', `Could not read context lock file: ${lockPath}`, { + message: error instanceof Error ? error.message : String(error), + }); + } + + try { + const parsed = JSON.parse(raw) as LockMetadata; + if (typeof parsed?.pid !== 'number') return null; + return parsed; + } catch { + return null; + } +} + +async function writeLockMetadata(lockPath: string, metadata: LockMetadata): Promise { + try { + const handle = await open(lockPath, 'wx'); + try { + await handle.writeFile(`${JSON.stringify(metadata, null, 2)}\n`, 'utf8'); + } finally { + await handle.close(); + } + } catch (error) { + const code = (error as NodeJS.ErrnoException | undefined)?.code; + if (code === 'EEXIST') { + throw new CliError('CONTEXT_LOCK_TIMEOUT', 'Context lock already exists.', { + lockPath, + }); + } + + throw new CliError('FILE_WRITE_ERROR', `Could not acquire context lock: ${lockPath}`, { + message: error instanceof Error ? error.message : String(error), + }); + } +} + +async function tryRemoveLock(lockPath: string): Promise { + try { + await unlink(lockPath); + } catch (error) { + const code = (error as NodeJS.ErrnoException | undefined)?.code; + if (code === 'ENOENT') return; + throw new CliError('FILE_WRITE_ERROR', `Could not release context lock: ${lockPath}`, { + message: error instanceof Error ? error.message : String(error), + }); + } +} + +function assertProjectMatch(metadata: ContextMetadata): void { + const currentProjectRoot = getProjectRoot(); + if (metadata.projectRoot === currentProjectRoot) return; + + throw new CliError( + 'PROJECT_CONTEXT_MISMATCH', + 'Active context belongs to a different project root than the current working directory.', + { + expectedProjectRoot: metadata.projectRoot, + actualProjectRoot: currentProjectRoot, + }, + ); +} + +async function writeAtomic(path: string, content: string): Promise { + const tempPath = `${path}.${process.pid}.${Date.now()}.tmp`; + await writeFile(tempPath, content, 'utf8'); + await rename(tempPath, path); +} + +export async function getActiveSessionId(projectRoot = getProjectRoot()): Promise { + const paths = getProjectPaths(projectRoot); + let raw: string; + try { + raw = await readFile(paths.activeSessionPath, 'utf8'); + } catch (error) { + const code = (error as NodeJS.ErrnoException | undefined)?.code; + if (code === 'ENOENT') return null; + throw new CliError('FILE_READ_ERROR', `Unable to read active session pointer: ${paths.activeSessionPath}`, { + message: error instanceof Error ? error.message : String(error), + }); + } + + const sessionId = raw.trim(); + if (!sessionId) return null; + + try { + return validateSessionId(sessionId, 'active session id'); + } catch (error) { + if (error instanceof CliError) { + return null; + } + throw error; + } +} + +export async function setActiveSessionId(sessionId: string, projectRoot = getProjectRoot()): Promise { + const normalizedSessionId = validateSessionId(sessionId, 'session id'); + const paths = getProjectPaths(projectRoot); + await mkdir(paths.projectDir, { recursive: true }); + + try { + await writeAtomic(paths.activeSessionPath, `${normalizedSessionId}\n`); + } catch (error) { + throw new CliError('FILE_WRITE_ERROR', `Unable to write active session pointer: ${paths.activeSessionPath}`, { + message: error instanceof Error ? error.message : String(error), + projectHash: paths.projectHash, + }); + } +} + +export async function clearActiveSessionId(projectRoot = getProjectRoot()): Promise { + const paths = getProjectPaths(projectRoot); + try { + await unlink(paths.activeSessionPath); + } catch (error) { + const code = (error as NodeJS.ErrnoException | undefined)?.code; + if (code === 'ENOENT') return; + throw new CliError('FILE_WRITE_ERROR', `Unable to clear active session pointer: ${paths.activeSessionPath}`, { + message: error instanceof Error ? error.message : String(error), + projectHash: paths.projectHash, + }); + } +} + +export async function withContextLock( + io: CliIO, + command: string, + action: (paths: ContextPaths) => Promise, + timeoutMs = DEFAULT_LOCK_TIMEOUT_MS, + contextId?: string, +): Promise { + const resolvedContextId = contextId ?? 'default'; + const paths = getContextPaths(resolvedContextId); + await mkdir(paths.contextDir, { recursive: true }); + + const startedAt = io.now(); + const lockMetadata: LockMetadata = { + pid: process.pid, + hostname: hostname(), + startedAt: nowIso(io), + projectRoot: getProjectRoot(), + command, + }; + + let acquired = false; + + while (!acquired) { + try { + await writeLockMetadata(paths.lockPath, lockMetadata); + acquired = true; + break; + } catch (error) { + if (!(error instanceof CliError) || error.code !== 'CONTEXT_LOCK_TIMEOUT') { + throw error; + } + + const owner = await readLockMetadata(paths.lockPath); + const ownerAlive = owner ? isLockAlive(owner.pid) : false; + + if (!ownerAlive) { + await tryRemoveLock(paths.lockPath); + continue; + } + + if (io.now() - startedAt >= timeoutMs) { + throw new CliError('CONTEXT_LOCK_TIMEOUT', `Timed out waiting for context lock after ${timeoutMs}ms.`, { + timeoutMs, + lockPath: paths.lockPath, + owner, + }); + } + + await sleep(LOCK_RETRY_INTERVAL_MS); + } + } + + try { + return await action(paths); + } finally { + if (acquired) { + await tryRemoveLock(paths.lockPath); + } + } +} + +export async function readContextMetadata(paths: ContextPaths): Promise { + let raw: string; + try { + raw = await readFile(paths.metadataPath, 'utf8'); + } catch (error) { + const code = (error as NodeJS.ErrnoException | undefined)?.code; + if (code === 'ENOENT') return null; + + throw new CliError('FILE_READ_ERROR', `Unable to read active context metadata: ${paths.metadataPath}`, { + message: error instanceof Error ? error.message : String(error), + }); + } + + try { + const parsed = JSON.parse(raw) as ContextMetadata; + return normalizeContextMetadata(parsed); + } catch (error) { + throw new CliError('JSON_PARSE_ERROR', `Active context metadata is invalid JSON: ${paths.metadataPath}`, { + message: error instanceof Error ? error.message : String(error), + }); + } +} + +export async function writeContextMetadata(paths: ContextPaths, metadata: ContextMetadata): Promise { + await mkdir(paths.contextDir, { recursive: true }); + + try { + await writeFile(paths.metadataPath, `${JSON.stringify(metadata, null, 2)}\n`, 'utf8'); + } catch (error) { + throw new CliError('FILE_WRITE_ERROR', `Unable to write active context metadata: ${paths.metadataPath}`, { + message: error instanceof Error ? error.message : String(error), + }); + } +} + +export async function readContextMetadataById(contextId: string): Promise { + const paths = getContextPaths(contextId); + return readContextMetadata(paths); +} + +export async function listProjectSessions(): Promise { + const stateRoot = getStateRoot(); + const contextsDir = join(stateRoot, 'contexts'); + const projectRoot = getProjectRoot(); + + let entries: Dirent[] = []; + try { + entries = await readdir(contextsDir, { withFileTypes: true }); + } catch (error) { + const code = (error as NodeJS.ErrnoException | undefined)?.code; + if (code === 'ENOENT') return []; + throw new CliError('FILE_READ_ERROR', `Unable to read sessions directory: ${contextsDir}`, { + message: error instanceof Error ? error.message : String(error), + }); + } + + const sessions: ProjectSessionSummary[] = []; + + for (const entry of entries) { + if (!entry.isDirectory()) continue; + const sessionId = entry.name; + let metadata: ContextMetadata | null = null; + + try { + metadata = await readContextMetadataById(sessionId); + } catch { + continue; + } + + if (!metadata) continue; + if (metadata.projectRoot !== projectRoot) continue; + + sessions.push({ + sessionId: metadata.contextId, + source: metadata.source, + sourcePath: metadata.sourcePath, + dirty: metadata.dirty, + revision: metadata.revision, + sessionType: metadata.sessionType, + collaboration: metadata.collaboration, + openedAt: metadata.openedAt, + updatedAt: metadata.updatedAt, + lastSavedAt: metadata.lastSavedAt, + }); + } + + sessions.sort((a, b) => { + if (a.updatedAt === b.updatedAt) { + return a.sessionId.localeCompare(b.sessionId); + } + return b.updatedAt.localeCompare(a.updatedAt); + }); + + return sessions; +} + +export async function ensureSessionExistsForProject(sessionId: string): Promise { + const metadata = await readContextMetadataById(sessionId); + if (!metadata) { + throw new CliError('SESSION_NOT_FOUND', `Session not found: ${sessionId}`, { + sessionId, + }); + } + + if (metadata.projectRoot !== getProjectRoot()) { + throw new CliError('SESSION_NOT_FOUND', `Session not found in this project: ${sessionId}`, { + sessionId, + }); + } + + return metadata; +} + +export async function clearContext(paths: ContextPaths): Promise { + await rm(paths.contextDir, { recursive: true, force: true }); +} + +export async function withActiveContext( + io: CliIO, + command: string, + action: (state: { metadata: ContextMetadata; paths: ContextPaths }) => Promise, + contextId?: string, +): Promise { + const resolvedContextId = contextId ?? (await getActiveSessionId()); + if (!resolvedContextId) { + throw new CliError('NO_ACTIVE_DOCUMENT', 'No active document. Run "superdoc open " first.'); + } + + return withContextLock( + io, + command, + async (paths) => { + const metadata = await readContextMetadata(paths); + if (!metadata) { + throw new CliError('NO_ACTIVE_DOCUMENT', 'No active document. Run "superdoc open " first.'); + } + + assertProjectMatch(metadata); + + return action({ metadata, paths }); + }, + DEFAULT_LOCK_TIMEOUT_MS, + resolvedContextId, + ); +} + +export function resolveSourcePathForMetadata(docArg: string): string { + return resolve(getProjectRoot(), docArg); +} + +export async function snapshotSourceFile(path: string): Promise { + let bytes: Uint8Array; + let sourceStat: Awaited>; + + try { + const buffer = await readFile(path); + bytes = new Uint8Array(buffer.buffer, buffer.byteOffset, buffer.byteLength); + sourceStat = await stat(path); + } catch (error) { + throw new CliError('FILE_READ_ERROR', `Unable to read source file snapshot: ${path}`, { + message: error instanceof Error ? error.message : String(error), + }); + } + + const checksum = createHash('sha256').update(bytes).digest('hex'); + return { + mtimeMs: sourceStat.mtimeMs, + size: sourceStat.size, + checksum, + }; +} + +export async function detectSourceDrift(metadata: ContextMetadata): Promise<{ + drifted: boolean; + expected?: SourceSnapshot; + actual?: SourceSnapshot; + reason?: string; +}> { + if (metadata.source !== 'path' || !metadata.sourcePath || !metadata.sourceSnapshot) { + return { drifted: false }; + } + + if (!(await pathExists(metadata.sourcePath))) { + return { + drifted: true, + expected: metadata.sourceSnapshot, + reason: 'SOURCE_MISSING', + }; + } + + const actual = await snapshotSourceFile(metadata.sourcePath); + const expected = metadata.sourceSnapshot; + const drifted = + actual.mtimeMs !== expected.mtimeMs || actual.size !== expected.size || actual.checksum !== expected.checksum; + + return { + drifted, + expected, + actual, + }; +} + +export async function copyWorkingDocumentToPath( + paths: ContextPaths, + outputPath: string, + force = false, +): Promise<{ path: string; byteLength: number }> { + const exists = await pathExists(outputPath); + if (exists && !force) { + throw new CliError('OUTPUT_EXISTS', `Output path already exists: ${outputPath}`, { + path: outputPath, + hint: 'Use --force to overwrite.', + }); + } + + try { + await copyFile(paths.workingDocPath, outputPath); + } catch (error) { + throw new CliError('FILE_WRITE_ERROR', `Failed to write output file: ${outputPath}`, { + message: error instanceof Error ? error.message : String(error), + }); + } + + const outputStat = await stat(outputPath); + return { + path: outputPath, + byteLength: outputStat.size, + }; +} + +export async function getWorkingDocumentSize(paths: ContextPaths): Promise { + try { + const info = await stat(paths.workingDocPath); + return info.size; + } catch (error) { + throw new CliError('FILE_READ_ERROR', `Failed to read working document: ${paths.workingDocPath}`, { + message: error instanceof Error ? error.message : String(error), + }); + } +} + +export function markContextUpdated( + io: CliIO, + metadata: ContextMetadata, + patch: Partial, +): ContextMetadata { + return { + ...metadata, + ...patch, + updatedAt: nowIso(io), + }; +} + +export function assertExpectedRevision(metadata: ContextMetadata, expectedRevision: number | undefined): void { + if (expectedRevision == null) return; + if (!Number.isInteger(expectedRevision) || expectedRevision < 0) { + throw new CliError('VALIDATION_ERROR', '--expected-revision must be a non-negative integer.'); + } + + if (metadata.revision !== expectedRevision) { + throw new CliError('REVISION_MISMATCH', 'Document revision did not match --expected-revision.', { + expectedRevision, + actualRevision: metadata.revision, + }); + } +} + +export function createInitialContextMetadata( + io: CliIO, + paths: ContextPaths, + contextId: string, + input: { + source: 'path' | 'stdin'; + sourcePath?: string; + sourceSnapshot?: SourceSnapshot; + sessionType?: SessionType; + collaboration?: CollaborationProfile; + }, +): ContextMetadata { + const timestamp = nowIso(io); + const sessionType = input.sessionType ?? 'local'; + + return { + contextId, + projectRoot: getProjectRoot(), + source: input.source, + sourcePath: input.sourcePath, + workingDocPath: paths.workingDocPath, + dirty: false, + revision: 0, + sessionType, + collaboration: sessionType === 'collab' ? input.collaboration : undefined, + openedAt: timestamp, + updatedAt: timestamp, + sourceSnapshot: input.sourceSnapshot, + }; +} diff --git a/apps/cli/src/lib/contract.ts b/apps/cli/src/lib/contract.ts new file mode 100644 index 0000000000..1227ae7a7f --- /dev/null +++ b/apps/cli/src/lib/contract.ts @@ -0,0 +1,230 @@ +/** + * Contract introspection — powers `describe` and `describeCommand`. + * + * Rebuilt from document-api exports + CLI metadata. No SDK dependency. + */ + +import { CONTRACT_VERSION, COMMAND_CATALOG, type OperationId } from '@superdoc/document-api'; +import { HOST_PROTOCOL_FEATURES, HOST_PROTOCOL_NOTIFICATIONS, HOST_PROTOCOL_VERSION } from '../host/protocol'; +import { + CLI_COMMAND_SPECS, + CLI_OPERATION_METADATA, + toDocApiId, + type CliOperationId, + type CliCommandSpec, + type CliOperationMetadata, +} from '../cli'; + +// --------------------------------------------------------------------------- +// Types +// --------------------------------------------------------------------------- + +type ContractOperationSummary = { + id: string; + command: string[]; + description: string; + category: string; + stability: string; + mutates: boolean; + requiresDocumentContext: boolean; + capabilities: string[]; + aliases: string[]; + examples: string[]; + errors: string[]; +}; + +type ContractOperationDetail = ContractOperationSummary & { + params: readonly { + name: string; + kind: string; + flag?: string; + type: string; + required?: boolean; + schema?: unknown; + }[]; + constraints: unknown; +}; + +type ContractOverview = { + contractVersion: string; + cli: { + package: string; + minVersion: string; + }; + protocol: { + transport: string; + host: { + protocolVersion: string; + features: string[]; + notifications: string[]; + }; + }; + invariants: string[]; + operationCount: number; + operations: ContractOperationSummary[]; +}; + +// --------------------------------------------------------------------------- +// Helpers +// --------------------------------------------------------------------------- + +function normalizeLookup(value: string): string { + return value.trim().toLowerCase(); +} + +function deriveCapabilities(cliOpId: CliOperationId): string[] { + const docApiId = toDocApiId(cliOpId); + if (!docApiId) return []; + + const catalog = COMMAND_CATALOG[docApiId]; + const caps: string[] = []; + if (catalog.supportsDryRun) caps.push('dryRun'); + if (catalog.supportsTrackedMode) caps.push('trackedMode'); + return caps; +} + +function deriveErrors(cliOpId: CliOperationId): string[] { + const docApiId = toDocApiId(cliOpId); + if (!docApiId) return []; + + const catalog = COMMAND_CATALOG[docApiId]; + return [...catalog.throws.preApply, ...catalog.possibleFailureCodes]; +} + +function buildOperationSummary(spec: CliCommandSpec): ContractOperationSummary { + const cliOpId = spec.operationId as CliOperationId; + + // Collect aliases for this operation + const aliases = CLI_COMMAND_SPECS.filter((s) => s.alias && s.operationId === spec.operationId).map((s) => s.key); + + return { + id: spec.operationId, + command: [...spec.tokens], + description: spec.description, + category: spec.category, + stability: 'stable', + mutates: spec.mutates, + requiresDocumentContext: spec.requiresDocumentContext, + capabilities: deriveCapabilities(cliOpId), + aliases, + examples: [...spec.examples], + errors: deriveErrors(cliOpId), + }; +} + +function buildOperationDetail(spec: CliCommandSpec, metadata: CliOperationMetadata): ContractOperationDetail { + const summary = buildOperationSummary(spec); + + return { + ...summary, + params: metadata.params.map((p) => ({ + name: p.name, + kind: p.kind, + flag: p.flag, + type: p.type, + required: p.required, + schema: p.schema, + })), + constraints: metadata.constraints, + }; +} + +function metadataForSpec(spec: CliCommandSpec): CliOperationMetadata | null { + const operationId = spec.operationId as CliOperationId; + return CLI_OPERATION_METADATA[operationId] ?? null; +} + +// --------------------------------------------------------------------------- +// Public API +// --------------------------------------------------------------------------- + +export function getContractSpec(): ContractOverview { + return buildContractOverview(); +} + +export function listContractOperations(): ContractOperationSummary[] { + return CLI_COMMAND_SPECS.filter((spec) => !spec.alias).map(buildOperationSummary); +} + +export function resolveContractOperation(query: string): ContractOperationDetail | null { + const normalizedQuery = normalizeLookup(query); + if (!normalizedQuery) return null; + + // Match by operation id + const byId = CLI_COMMAND_SPECS.find((spec) => !spec.alias && normalizeLookup(spec.operationId) === normalizedQuery); + if (byId) { + const metadata = metadataForSpec(byId); + return metadata ? buildOperationDetail(byId, metadata) : null; + } + + // Match by command key + const byCommand = CLI_COMMAND_SPECS.find((spec) => !spec.alias && normalizeLookup(spec.key) === normalizedQuery); + if (byCommand) { + const metadata = metadataForSpec(byCommand); + return metadata ? buildOperationDetail(byCommand, metadata) : null; + } + + // Match by alias + const byAlias = CLI_COMMAND_SPECS.find((spec) => spec.alias && normalizeLookup(spec.key) === normalizedQuery); + if (byAlias) { + // Resolve to canonical spec + const canonical = CLI_COMMAND_SPECS.find((spec) => !spec.alias && spec.operationId === byAlias.operationId); + if (canonical) { + const metadata = metadataForSpec(canonical); + return metadata ? buildOperationDetail(canonical, metadata) : null; + } + } + + // Match by doc.X suffix (strip doc. prefix from query) + const bySuffix = CLI_COMMAND_SPECS.find( + (spec) => !spec.alias && normalizeLookup(spec.operationId.slice('doc.'.length)) === normalizedQuery, + ); + if (bySuffix) { + const metadata = metadataForSpec(bySuffix); + return metadata ? buildOperationDetail(bySuffix, metadata) : null; + } + + return null; +} + +export function buildContractOverview(): ContractOverview { + const operations = listContractOperations(); + + return { + contractVersion: CONTRACT_VERSION, + cli: { + package: 'superdoc', + minVersion: CONTRACT_VERSION, + }, + protocol: { + transport: 'stdio', + host: { + protocolVersion: HOST_PROTOCOL_VERSION, + features: [...HOST_PROTOCOL_FEATURES], + notifications: [...HOST_PROTOCOL_NOTIFICATIONS], + }, + }, + invariants: [ + 'All mutation operations require an open document context or a stateless doc path.', + 'Response envelopes include elapsed_ms for all operations.', + 'JSON output mode is the default and must always be supported.', + ], + operationCount: operations.length, + operations, + }; +} + +export function buildContractOperationDetail(query: string): { + contractVersion: string; + query: string; + operation: ContractOperationDetail; +} | null { + const operation = resolveContractOperation(query); + if (!operation) return null; + + return { + contractVersion: CONTRACT_VERSION, + query, + operation, + }; +} diff --git a/apps/cli/src/lib/create-paragraph-input.ts b/apps/cli/src/lib/create-paragraph-input.ts new file mode 100644 index 0000000000..407442fda8 --- /dev/null +++ b/apps/cli/src/lib/create-paragraph-input.ts @@ -0,0 +1,107 @@ +import type { ParsedArgs } from './args'; +import { getStringOption, resolveJsonInput } from './args'; +import { CliError } from './errors'; +import { validateCreateParagraphInput, validateNodeAddress } from './validate'; +import type { CreateParagraphInput } from './types'; + +type FlatLocation = + | { kind: 'documentStart' } + | { kind: 'documentEnd' } + | { kind: 'before'; target: Extract['target'] } + | { kind: 'after'; target: Extract['target'] }; + +function parseAtFlag(rawAt: string | undefined, commandName: string): FlatLocation | undefined { + if (!rawAt) return undefined; + + if (rawAt === 'document-start') return { kind: 'documentStart' }; + if (rawAt === 'document-end') return { kind: 'documentEnd' }; + + throw new CliError( + 'INVALID_ARGUMENT', + `${commandName}: --at must be "document-start" or "document-end" when provided.`, + ); +} + +function ensureBlockTarget( + value: unknown, + path: string, +): Extract['target'] { + const target = validateNodeAddress(value, path); + if (target.kind !== 'block') { + throw new CliError('VALIDATION_ERROR', `${path}.kind must be "block".`); + } + return target; +} + +async function buildFlatInput(parsed: ParsedArgs, commandName: string): Promise { + const text = getStringOption(parsed, 'text'); + const at = parseAtFlag(getStringOption(parsed, 'at'), commandName); + const beforePayload = await resolveJsonInput(parsed, 'before-address'); + const afterPayload = await resolveJsonInput(parsed, 'after-address'); + + if (beforePayload != null && afterPayload != null) { + throw new CliError( + 'INVALID_ARGUMENT', + `${commandName}: use only one of --before-address-json or --after-address-json.`, + ); + } + + if (at && (beforePayload != null || afterPayload != null)) { + throw new CliError( + 'INVALID_ARGUMENT', + `${commandName}: --at cannot be combined with --before-address-json/--after-address-json.`, + ); + } + + if (beforePayload != null) { + return { + text, + at: { + kind: 'before', + target: ensureBlockTarget(beforePayload, 'before-address'), + }, + }; + } + + if (afterPayload != null) { + return { + text, + at: { + kind: 'after', + target: ensureBlockTarget(afterPayload, 'after-address'), + }, + }; + } + + return { + text, + at, + }; +} + +export async function resolveCreateParagraphInput( + parsed: ParsedArgs, + commandName: string, +): Promise { + const inputJson = await resolveJsonInput(parsed, 'input'); + const hasFlatFlags = + getStringOption(parsed, 'text') != null || + getStringOption(parsed, 'at') != null || + getStringOption(parsed, 'before-address-json') != null || + getStringOption(parsed, 'before-address-file') != null || + getStringOption(parsed, 'after-address-json') != null || + getStringOption(parsed, 'after-address-file') != null; + + if (inputJson && hasFlatFlags) { + throw new CliError( + 'INVALID_ARGUMENT', + `${commandName}: --input-json/--input-file cannot be combined with flat create flags.`, + ); + } + + if (inputJson) { + return validateCreateParagraphInput(inputJson, 'input'); + } + + return buildFlatInput(parsed, commandName); +} diff --git a/apps/cli/src/lib/document.ts b/apps/cli/src/lib/document.ts new file mode 100644 index 0000000000..4daa46dd75 --- /dev/null +++ b/apps/cli/src/lib/document.ts @@ -0,0 +1,237 @@ +import { readFile, writeFile } from 'node:fs/promises'; +import { createHash } from 'node:crypto'; +import { Editor } from 'superdoc/super-editor'; +import { getDocumentApiAdapters } from '@superdoc/super-editor/document-api-adapters'; + +import { createDocumentApi, type DocumentApi } from '@superdoc/document-api'; +import type { CollaborationProfile } from './collaboration'; +import { createCollaborationRuntime } from './collaboration'; +import { CliError } from './errors'; +import { pathExists } from './guards'; +import type { ContextMetadata } from './context'; +import type { CliIO, DocumentSourceMeta, ExecutionMode } from './types'; +import type { CollaborationSessionPool } from '../host/collab-session-pool'; + +export type EditorWithDoc = Editor & { + doc: DocumentApi; +}; + +export interface OpenedDocument { + editor: EditorWithDoc; + meta: DocumentSourceMeta; + dispose(): void; +} + +interface OpenDocumentOptions { + documentId?: string; + ydoc?: unknown; + collaborationProvider?: unknown; +} + +export interface FileOutputMeta { + path: string; + byteLength: number; +} + +function toUint8Array(data: unknown): Uint8Array { + if (data instanceof Uint8Array) return data; + if (data instanceof ArrayBuffer) return new Uint8Array(data); + if (ArrayBuffer.isView(data)) { + return new Uint8Array(data.buffer, data.byteOffset, data.byteLength); + } + + throw new CliError('DOCUMENT_EXPORT_FAILED', 'Exported document data is not binary.'); +} + +async function readDocumentSource(doc: string, io: CliIO): Promise<{ bytes: Uint8Array; meta: DocumentSourceMeta }> { + if (doc === '-') { + const bytes = await io.readStdinBytes(); + if (bytes.byteLength === 0) { + throw new CliError('MISSING_REQUIRED', 'No DOCX bytes were provided on stdin.'); + } + + return { + bytes, + meta: { + source: 'stdin', + byteLength: bytes.byteLength, + }, + }; + } + + let bytes: Uint8Array; + try { + const raw = await readFile(doc); + bytes = new Uint8Array(raw.buffer, raw.byteOffset, raw.byteLength); + } catch (error) { + const message = error instanceof Error ? error.message : String(error); + throw new CliError('FILE_READ_ERROR', `Unable to read document: ${doc}`, { + message, + }); + } + + return { + bytes, + meta: { + source: 'path', + path: doc, + byteLength: bytes.byteLength, + }, + }; +} + +export async function openDocument(doc: string, io: CliIO, options: OpenDocumentOptions = {}): Promise { + const { bytes, meta } = await readDocumentSource(doc, io); + + let editor: Editor; + try { + const isTest = process.env.NODE_ENV === 'test'; + editor = await Editor.open(Buffer.from(bytes), { + documentId: options.documentId ?? meta.path ?? 'stdin.docx', + user: { id: 'cli', name: 'CLI' }, + ...(isTest ? { telemetry: { enabled: false } } : {}), + ydoc: options.ydoc, + ...(options.collaborationProvider != null ? { collaborationProvider: options.collaborationProvider } : {}), + }); + } catch (error) { + const message = error instanceof Error ? error.message : String(error); + throw new CliError('DOCUMENT_OPEN_FAILED', 'Failed to open document.', { + message, + source: meta, + }); + } + + const adapters = getDocumentApiAdapters(editor); + const docApi = createDocumentApi(adapters); + Object.defineProperty(editor, 'doc', { value: docApi, configurable: true, writable: true }); + const editorWithDoc = editor as EditorWithDoc; + + return { + editor: editorWithDoc, + meta, + dispose() { + editor.destroy(); + }, + }; +} + +export async function openCollaborativeDocument( + doc: string, + io: CliIO, + profile: CollaborationProfile, +): Promise { + const runtime = createCollaborationRuntime(profile); + + try { + await runtime.waitForSync(); + const opened = await openDocument(doc, io, { + documentId: profile.documentId, + ydoc: runtime.ydoc, + collaborationProvider: runtime.provider, + }); + + return { + editor: opened.editor, + meta: opened.meta, + dispose() { + try { + opened.dispose(); + } finally { + runtime.dispose(); + } + }, + }; + } catch (error) { + runtime.dispose(); + throw error; + } +} + +export async function openSessionDocument( + doc: string, + io: CliIO, + metadata: Pick, + options: { + sessionId?: string; + executionMode?: ExecutionMode; + collabSessionPool?: CollaborationSessionPool; + } = {}, +): Promise { + if (metadata.sessionType !== 'collab') { + return openDocument(doc, io); + } + + if (!metadata.collaboration) { + throw new CliError('COMMAND_FAILED', 'Session is marked as collaborative but has no collaboration profile.'); + } + + if (options.executionMode === 'host' && options.collabSessionPool) { + const sessionId = options.sessionId ?? metadata.contextId; + if (!sessionId) { + throw new CliError('COMMAND_FAILED', 'Session id is required for host-mode collaboration operations.'); + } + + const metadataForPool = { + contextId: sessionId, + sessionType: metadata.sessionType, + collaboration: metadata.collaboration, + sourcePath: metadata.sourcePath, + workingDocPath: metadata.workingDocPath, + }; + + return options.collabSessionPool.acquire(sessionId, doc, metadataForPool, io); + } + + return openCollaborativeDocument(doc, io, metadata.collaboration); +} + +export async function getFileChecksum(path: string): Promise { + let bytes: Uint8Array; + try { + const data = await readFile(path); + bytes = new Uint8Array(data.buffer, data.byteOffset, data.byteLength); + } catch (error) { + const message = error instanceof Error ? error.message : String(error); + throw new CliError('FILE_READ_ERROR', `Failed to read file checksum: ${path}`, { + message, + }); + } + + return createHash('sha256').update(bytes).digest('hex'); +} + +export async function exportToPath(editor: Editor, outputPath: string, force = false): Promise { + const exists = await pathExists(outputPath); + if (exists && !force) { + throw new CliError('OUTPUT_EXISTS', `Output path already exists: ${outputPath}`, { + path: outputPath, + hint: 'Use --force to overwrite.', + }); + } + + let exported: unknown; + try { + exported = await editor.exportDocument(); + } catch (error) { + const message = error instanceof Error ? error.message : String(error); + throw new CliError('DOCUMENT_EXPORT_FAILED', 'Failed to export document.', { + message, + }); + } + + const bytes = toUint8Array(exported); + + try { + await writeFile(outputPath, bytes); + } catch (error) { + const message = error instanceof Error ? error.message : String(error); + throw new CliError('FILE_WRITE_ERROR', `Failed to write output file: ${outputPath}`, { + message, + }); + } + + return { + path: outputPath, + byteLength: bytes.byteLength, + }; +} diff --git a/apps/cli/src/lib/editor.ts b/apps/cli/src/lib/editor.ts deleted file mode 100644 index 8b544634c7..0000000000 --- a/apps/cli/src/lib/editor.ts +++ /dev/null @@ -1,124 +0,0 @@ -import { readFile, writeFile } from 'node:fs/promises'; -import { Editor } from 'superdoc/super-editor'; - -export interface DocumentEditor { - editor: Editor; - path: string; -} - -/** - * Opens a document in headless mode using the new Editor.open() API - */ -export async function openDocument(path: string): Promise { - const buffer = await readFile(path); - - const editor = await Editor.open(buffer, { - documentId: path, - }); - - return { editor, path }; -} - -/** - * Saves the document back to disk - */ -export async function saveDocument(doc: DocumentEditor): Promise { - const result = await doc.editor.exportDocument({ format: 'docx' }); - // In headless mode, exportDocument returns a Buffer/Uint8Array directly - await writeFile(doc.path, result as Buffer); -} - -/** - * Closes and cleans up the editor - */ -export function closeDocument(doc: DocumentEditor): void { - doc.editor.destroy(); -} - -/** - * Gets the plain text content of the document - */ -export function getDocumentText(doc: DocumentEditor): string { - const { state } = doc.editor; - return state.doc.textContent; -} - -export interface DocRange { - from: number; - to: number; -} - -export interface SearchMatch { - from: number; - to: number; - text: string; - ranges?: DocRange[]; -} - -/** - * Search for text in the document - * Returns array of matches with positions - */ -export function searchDocument(doc: DocumentEditor, pattern: string): SearchMatch[] { - const matches = doc.editor.commands.search?.(pattern, { - highlight: false, - }) as SearchMatch[] | undefined; - if (!matches) return []; - return matches.map((m) => ({ - from: m.from, - to: m.to, - text: m.text, - ranges: m.ranges, - })); -} - -/** - * Replace all occurrences of a pattern with replacement text - * Returns the number of replacements made - * - * Handles cross-paragraph matches by replacing each range individually, - * preserving document structure (paragraph boundaries, bookmarks, etc.) - */ -export function replaceInDocument(doc: DocumentEditor, find: string, replaceWith: string): number { - // Search for all matches - const matches = searchDocument(doc, find); - if (matches.length === 0) return 0; - - // Collect all ranges from all matches, then sort by position descending - // For multi-range matches (cross-paragraph), we replace each range separately - // to avoid deleting content between ranges (paragraph boundaries, etc.) - const allRanges: Array<{ from: number; to: number; isFirst: boolean }> = []; - - for (const match of matches) { - if (match.ranges && match.ranges.length > 0) { - // Multi-range match: add each range, marking the first one for replacement text - match.ranges.forEach((range, index) => { - allRanges.push({ - from: range.from, - to: range.to, - isFirst: index === 0, - }); - }); - } else { - // Single range match - allRanges.push({ - from: match.from, - to: match.to, - isFirst: true, - }); - } - } - - // Sort by position descending (replace from end to start to avoid position shifts) - allRanges.sort((a, b) => b.from - a.from); - - // Replace each range - // For multi-range matches: first range gets the replacement text, others are deleted - for (const range of allRanges) { - const content = range.isFirst ? replaceWith : ''; - // eslint-disable-next-line @typescript-eslint/no-explicit-any - (doc.editor.chain() as any).setTextSelection({ from: range.from, to: range.to }).insertContent(content).run(); - } - - return matches.length; -} diff --git a/apps/cli/src/lib/envelope.ts b/apps/cli/src/lib/envelope.ts new file mode 100644 index 0000000000..b592898624 --- /dev/null +++ b/apps/cli/src/lib/envelope.ts @@ -0,0 +1,54 @@ +import { CONTRACT_VERSION } from '@superdoc/document-api'; +import type { CliError } from './errors'; + +const CLI_VERSION = CONTRACT_VERSION; + +export type SuccessEnvelope = { + ok: true; + command: string; + data: unknown; + meta: { + version: string; + elapsedMs: number; + }; +}; + +export type FailureEnvelope = { + ok: false; + error: { + code: string; + message: string; + details?: unknown; + }; + meta: { + version: string; + elapsedMs: number; + }; +}; + +export function createSuccessEnvelope(command: string, data: unknown, elapsedMs: number): SuccessEnvelope { + return { + ok: true, + command, + data, + meta: { + version: CLI_VERSION, + elapsedMs, + }, + }; +} + +export function createFailureEnvelope(error: CliError, elapsedMs: number): FailureEnvelope { + return { + ok: false, + error: { + code: error.code, + message: error.message, + details: error.details, + }, + meta: { + version: CLI_VERSION, + elapsedMs, + }, + }; +} diff --git a/apps/cli/src/lib/error-mapping.ts b/apps/cli/src/lib/error-mapping.ts new file mode 100644 index 0000000000..d5d2aef308 --- /dev/null +++ b/apps/cli/src/lib/error-mapping.ts @@ -0,0 +1,284 @@ +/** + * Error mapping layer — translates invoke() errors to CLI error codes. + * + * The generic dispatch path calls mapInvokeError() after every invoke() failure. + * It translates adapter-level error codes into stable CLI error codes that + * consumers (tests, host protocol, LLM agents) depend on. + * + * Also handles failed-receipt mapping for mutations that return { success: false } + * without throwing. + */ + +import type { CliExposedOperationId } from '../cli/operation-set.js'; +import { OPERATION_FAMILY, type OperationFamily } from '../cli/operation-hints.js'; +import { CliError, type AdapterLikeError } from './errors.js'; + +// --------------------------------------------------------------------------- +// Error code extraction +// --------------------------------------------------------------------------- + +function extractErrorCode(error: unknown): string | undefined { + const maybe = error as AdapterLikeError; + if (typeof maybe?.code === 'string') return maybe.code; + return undefined; +} + +function extractErrorMessage(error: unknown): string { + if (error instanceof Error) return error.message; + return String(error); +} + +function extractErrorDetails(error: unknown): unknown { + const maybe = error as AdapterLikeError; + return maybe?.details; +} + +// --------------------------------------------------------------------------- +// Per-family error mappers (thrown errors) +// --------------------------------------------------------------------------- + +function mapTrackChangesError(operationId: CliExposedOperationId, error: unknown, code: string | undefined): CliError { + const message = extractErrorMessage(error); + const details = extractErrorDetails(error); + + if (code === 'TARGET_NOT_FOUND' || (typeof message === 'string' && message.includes('was not found'))) { + return new CliError('TRACK_CHANGE_NOT_FOUND', message, { operationId, details }); + } + + if (code === 'COMMAND_UNAVAILABLE' || code === 'TRACK_CHANGE_COMMAND_UNAVAILABLE') { + return new CliError('TRACK_CHANGE_COMMAND_UNAVAILABLE', message, { operationId, details }); + } + + if (error instanceof CliError) return error; + return new CliError('COMMAND_FAILED', message, { operationId, details }); +} + +function mapCommentsError(operationId: CliExposedOperationId, error: unknown, code: string | undefined): CliError { + const message = extractErrorMessage(error); + const details = extractErrorDetails(error); + + if (code === 'TARGET_NOT_FOUND' || (typeof message === 'string' && message.includes('could not be resolved'))) { + return new CliError('TARGET_NOT_FOUND', message, { operationId, details }); + } + + if (code === 'INVALID_TARGET') { + return new CliError('INVALID_ARGUMENT', message, { operationId, details }); + } + + if (code === 'COMMAND_UNAVAILABLE') { + return new CliError('COMMAND_FAILED', message, { operationId, details }); + } + + if (error instanceof CliError) return error; + return new CliError('COMMAND_FAILED', message, { operationId, details }); +} + +function mapListsError(operationId: CliExposedOperationId, error: unknown, code: string | undefined): CliError { + const message = extractErrorMessage(error); + const details = extractErrorDetails(error); + + if (code === 'TARGET_NOT_FOUND') { + return new CliError('TARGET_NOT_FOUND', message, { operationId, details }); + } + + if (code === 'INVALID_TARGET') { + return new CliError('INVALID_ARGUMENT', message, { operationId, details }); + } + + if (code === 'TRACK_CHANGE_COMMAND_UNAVAILABLE' || code === 'CAPABILITY_UNAVAILABLE') { + return new CliError('TRACK_CHANGE_COMMAND_UNAVAILABLE', message, { operationId, details }); + } + + if (code === 'COMMAND_UNAVAILABLE') { + return new CliError('COMMAND_FAILED', message, { operationId, details }); + } + + if (error instanceof CliError) return error; + return new CliError('COMMAND_FAILED', message, { operationId, details }); +} + +function mapTextMutationError(operationId: CliExposedOperationId, error: unknown, code: string | undefined): CliError { + const message = extractErrorMessage(error); + const details = extractErrorDetails(error); + + if (code === 'TARGET_NOT_FOUND') { + return new CliError('TARGET_NOT_FOUND', message, { operationId, details }); + } + + if (code === 'TRACK_CHANGE_COMMAND_UNAVAILABLE' || code === 'CAPABILITY_UNAVAILABLE') { + return new CliError('TRACK_CHANGE_COMMAND_UNAVAILABLE', message, { operationId, details }); + } + + if (code === 'INVALID_TARGET') { + return new CliError('INVALID_ARGUMENT', message, { operationId, details }); + } + + if (code === 'COMMAND_UNAVAILABLE') { + return new CliError('COMMAND_FAILED', message, { operationId, details }); + } + + if (error instanceof CliError) return error; + return new CliError('COMMAND_FAILED', message, { operationId, details }); +} + +function mapCreateError(operationId: CliExposedOperationId, error: unknown, code: string | undefined): CliError { + const message = extractErrorMessage(error); + const details = extractErrorDetails(error); + + if (code === 'TARGET_NOT_FOUND') { + return new CliError('TARGET_NOT_FOUND', message, { operationId, details }); + } + + if (code === 'TRACK_CHANGE_COMMAND_UNAVAILABLE') { + return new CliError('TRACK_CHANGE_COMMAND_UNAVAILABLE', message, { operationId, details }); + } + + if (code === 'COMMAND_UNAVAILABLE') { + return new CliError('COMMAND_FAILED', message, { operationId, details }); + } + + if (error instanceof CliError) return error; + return new CliError('COMMAND_FAILED', message, { operationId, details }); +} + +function mapQueryError(operationId: CliExposedOperationId, error: unknown, code: string | undefined): CliError { + const message = extractErrorMessage(error); + const details = extractErrorDetails(error); + + if (code === 'TARGET_NOT_FOUND' || (typeof message === 'string' && /not found/i.test(message))) { + return new CliError('TARGET_NOT_FOUND', message, { operationId, details }); + } + + if (error instanceof CliError) return error; + return new CliError('COMMAND_FAILED', message, { operationId, details }); +} + +// --------------------------------------------------------------------------- +// Per-family error mappers (dispatch by family) +// --------------------------------------------------------------------------- + +const FAMILY_MAPPERS: Record< + OperationFamily, + (operationId: CliExposedOperationId, error: unknown, code: string | undefined) => CliError +> = { + trackChanges: mapTrackChangesError, + comments: mapCommentsError, + lists: mapListsError, + textMutation: mapTextMutationError, + create: mapCreateError, + query: mapQueryError, + general: (operationId, error) => { + if (error instanceof CliError) return error; + return new CliError('COMMAND_FAILED', extractErrorMessage(error), { operationId }); + }, +}; + +/** + * Maps an invoke() exception to a CLI error with the appropriate error code. + * Called by the generic dispatch path after every invoke() failure. + */ +export function mapInvokeError(operationId: CliExposedOperationId, error: unknown): CliError { + if (error instanceof CliError) return error; + const code = extractErrorCode(error); + const family = OPERATION_FAMILY[operationId]; + return FAMILY_MAPPERS[family](operationId, error, code); +} + +// --------------------------------------------------------------------------- +// Failed receipt mapping (non-throwing failure path) +// --------------------------------------------------------------------------- + +type ReceiptLike = { + success: boolean; + failure?: { + code?: string; + message?: string; + details?: unknown; + }; +}; + +function isReceiptLike(value: unknown): value is ReceiptLike { + if (typeof value !== 'object' || value == null) return false; + return 'success' in value && typeof (value as ReceiptLike).success === 'boolean'; +} + +/** + * Checks a mutation result for { success: false } and maps it to a CliError. + * Many mutation operations return failed receipts without throwing — this + * handles that non-throwing failure path. + * + * Returns null if the result is not a failed receipt (either successful or + * not receipt-shaped at all). + */ +export function mapFailedReceipt(operationId: CliExposedOperationId, result: unknown): CliError | null { + if (!isReceiptLike(result)) return null; + if (result.success) return null; + + const failure = result.failure; + const family = OPERATION_FAMILY[operationId]; + + if (!failure) { + return new CliError('COMMAND_FAILED', `${operationId}: operation failed.`, { operationId }); + } + + const failureCode = failure.code; + const failureMessage = failure.message ?? `${operationId}: operation failed.`; + + // Track-changes family + if (family === 'trackChanges') { + if (failureCode === 'TRACK_CHANGE_COMMAND_UNAVAILABLE') { + return new CliError('TRACK_CHANGE_COMMAND_UNAVAILABLE', failureMessage, { operationId, failure }); + } + if (failureCode === 'INVALID_TARGET') { + return new CliError('TRACK_CHANGE_NOT_FOUND', failureMessage, { operationId, failure }); + } + return new CliError('COMMAND_FAILED', failureMessage, { operationId, failure }); + } + + // Comments family + if (family === 'comments') { + if (failureCode === 'TARGET_NOT_FOUND') { + return new CliError('TARGET_NOT_FOUND', failureMessage, { operationId, failure }); + } + if (failureCode === 'INVALID_TARGET') { + return new CliError('INVALID_ARGUMENT', failureMessage, { operationId, failure }); + } + return new CliError('COMMAND_FAILED', failureMessage, { operationId, failure }); + } + + // Lists family + if (family === 'lists') { + if (failureCode === 'INVALID_TARGET') { + return new CliError('INVALID_ARGUMENT', failureMessage, { operationId, failure }); + } + if (failureCode === 'CAPABILITY_UNAVAILABLE') { + return new CliError('TRACK_CHANGE_COMMAND_UNAVAILABLE', failureMessage, { operationId, failure }); + } + return new CliError('COMMAND_FAILED', failureMessage, { operationId, failure }); + } + + // Text mutation family + if (family === 'textMutation') { + if (failureCode === 'TRACK_CHANGE_COMMAND_UNAVAILABLE' || failureCode === 'CAPABILITY_UNAVAILABLE') { + return new CliError('TRACK_CHANGE_COMMAND_UNAVAILABLE', failureMessage, { operationId, failure }); + } + if (failureCode === 'INVALID_TARGET') { + return new CliError('INVALID_ARGUMENT', failureMessage, { operationId, failure }); + } + return new CliError('COMMAND_FAILED', failureMessage, { operationId, failure }); + } + + // Create family + if (family === 'create') { + if (failureCode === 'TRACK_CHANGE_COMMAND_UNAVAILABLE') { + return new CliError('TRACK_CHANGE_COMMAND_UNAVAILABLE', failureMessage, { operationId, failure }); + } + if (failureCode === 'INVALID_TARGET') { + return new CliError('INVALID_ARGUMENT', failureMessage, { operationId, failure }); + } + return new CliError('COMMAND_FAILED', failureMessage, { operationId, failure }); + } + + // Default + return new CliError('COMMAND_FAILED', failureMessage, { operationId, failure }); +} diff --git a/apps/cli/src/lib/errors.ts b/apps/cli/src/lib/errors.ts new file mode 100644 index 0000000000..8ce5474c91 --- /dev/null +++ b/apps/cli/src/lib/errors.ts @@ -0,0 +1,66 @@ +export type CliErrorCode = + | 'INVALID_ARGUMENT' + | 'SESSION_ID_INVALID' + | 'SESSION_NOT_FOUND' + | 'UNKNOWN_COMMAND' + | 'VALIDATION_ERROR' + | 'MISSING_REQUIRED' + | 'JSON_PARSE_ERROR' + | 'FILE_READ_ERROR' + | 'DOCUMENT_OPEN_FAILED' + | 'DOCUMENT_EXPORT_FAILED' + | 'FILE_WRITE_ERROR' + | 'OUTPUT_EXISTS' + | 'TARGET_NOT_FOUND' + | 'NO_ACTIVE_DOCUMENT' + | 'DIRTY_CLOSE_REQUIRES_DECISION' + | 'REVISION_MISMATCH' + | 'CONTEXT_LOCK_TIMEOUT' + | 'PROJECT_CONTEXT_MISMATCH' + | 'DIRTY_SESSION_EXISTS' + | 'SOURCE_DRIFT_DETECTED' + | 'COLLABORATION_SYNC_TIMEOUT' + | 'TRACK_CHANGE_NOT_FOUND' + | 'TRACK_CHANGE_MODE_UNSUPPORTED' + | 'TRACK_CHANGE_COMMAND_UNAVAILABLE' + | 'TRACK_CHANGE_CONFLICT' + | 'COMMAND_FAILED' + | 'TIMEOUT'; + +/** + * Intersection type for errors thrown by document-api adapter operations. + * These may carry a `code` string (e.g. `'TARGET_NOT_FOUND'`) and optional `details`. + */ +export type AdapterLikeError = Error & { + code?: unknown; + details?: unknown; +}; + +export class CliError extends Error { + readonly code: CliErrorCode; + readonly details?: unknown; + readonly exitCode: number; + + constructor(code: CliErrorCode, message: string, details?: unknown, exitCode = 1) { + super(message); + Object.setPrototypeOf(this, CliError.prototype); + this.name = 'CliError'; + this.code = code; + this.details = details; + this.exitCode = exitCode; + } +} + +export function toCliError(error: unknown): CliError { + if (error instanceof CliError) return error; + + if (error instanceof Error) { + return new CliError('COMMAND_FAILED', error.message, { + name: error.name, + }); + } + + return new CliError('COMMAND_FAILED', 'Unknown error', { + error, + }); +} diff --git a/apps/cli/src/lib/find-query.ts b/apps/cli/src/lib/find-query.ts new file mode 100644 index 0000000000..88f0a31f9e --- /dev/null +++ b/apps/cli/src/lib/find-query.ts @@ -0,0 +1,147 @@ +import { getNumberOption, getOptionalBooleanOption, getStringOption, resolveJsonInput, type ParsedArgs } from './args'; +import { CliError } from './errors'; +import { PRETTY_ROW_LIMIT, moreLine, padCol, safeNumber, toSingleLine, truncate } from './pretty-helpers'; +import { validateQuery } from './validate'; +import type { Query, QueryResult } from './types'; + +const FLAT_FIND_FLAGS = [ + 'type', + 'node-type', + 'kind', + 'pattern', + 'mode', + 'case-sensitive', + 'limit', + 'offset', + 'include-nodes', + 'include-unknown', +]; + +function hasFlatFindFlags(parsed: ParsedArgs): boolean { + return FLAT_FIND_FLAGS.some((flag) => parsed.options[flag] != null); +} + +function buildFlatFindQueryDraft(parsed: ParsedArgs): unknown { + const selectorType = getStringOption(parsed, 'type'); + if (!selectorType) { + throw new CliError('MISSING_REQUIRED', 'find: missing required --type, or provide --query-json/--query-file.'); + } + + const includeNodesFlag = getOptionalBooleanOption(parsed, 'include-nodes'); + const includeNodes = typeof includeNodesFlag === 'boolean' ? includeNodesFlag : undefined; + const includeUnknownFlag = getOptionalBooleanOption(parsed, 'include-unknown'); + const includeUnknown = typeof includeUnknownFlag === 'boolean' ? includeUnknownFlag : undefined; + const caseSensitive = getOptionalBooleanOption(parsed, 'case-sensitive'); + + if (selectorType === 'text') { + return { + select: { + type: 'text', + pattern: getStringOption(parsed, 'pattern'), + mode: getStringOption(parsed, 'mode'), + caseSensitive, + }, + limit: getNumberOption(parsed, 'limit'), + offset: getNumberOption(parsed, 'offset'), + includeNodes, + includeUnknown, + }; + } + + if (selectorType === 'node') { + return { + select: { + type: 'node', + nodeType: getStringOption(parsed, 'node-type'), + kind: getStringOption(parsed, 'kind'), + }, + limit: getNumberOption(parsed, 'limit'), + offset: getNumberOption(parsed, 'offset'), + includeNodes, + includeUnknown, + }; + } + + const kind = getStringOption(parsed, 'kind'); + const select = kind + ? { + type: 'node', + nodeType: selectorType, + kind, + } + : { + type: selectorType, + }; + + return { + select, + limit: getNumberOption(parsed, 'limit'), + offset: getNumberOption(parsed, 'offset'), + includeNodes, + includeUnknown, + }; +} + +export async function resolveFindQuery(parsed: ParsedArgs): Promise { + // Canonical path: always execute against a normalized Query object. + // Flat CLI flags are convenience syntax that is converted into Query here. + const queryPayload = await resolveJsonInput(parsed, 'query'); + const withinPayload = await resolveJsonInput(parsed, 'within'); + + if (queryPayload && hasFlatFindFlags(parsed)) { + throw new CliError('INVALID_ARGUMENT', 'find: do not combine --query-* with flat selector flags.'); + } + + const queryDraft = queryPayload ?? buildFlatFindQueryDraft(parsed); + const finalDraft = + withinPayload == null + ? queryDraft + : { + ...(queryDraft as Record), + within: withinPayload, + }; + + return validateQuery(finalDraft, 'query'); +} + +function resolveMatchLabel(match: QueryResult['matches'][number], maxTypeLength: number): string { + const nodeId = match.kind === 'block' ? match.nodeId : 'inline'; + return `[${padCol(match.nodeType, maxTypeLength)} ${nodeId}]`; +} + +function resolveNodeText(result: QueryResult, index: number): string | null { + const snippet = result.context?.[index]?.snippet; + if (typeof snippet === 'string' && snippet.length > 0) return snippet; + + const node = result.nodes?.[index]; + if (typeof node !== 'object' || node == null) return null; + const text = (node as { text?: unknown }).text; + if (typeof text === 'string' && text.length > 0) return text; + return null; +} + +export function formatFindPretty(result: QueryResult, revision: number): string { + const total = safeNumber(result.total, result.matches.length); + const suffix = result.matches.length !== total ? ` (${total} total)` : ''; + const lines: string[] = [`Revision ${revision}: ${result.matches.length} matches${suffix}`]; + if (result.matches.length === 0) return lines[0]; + + lines.push(''); + const shownCount = Math.min(result.matches.length, PRETTY_ROW_LIMIT); + const shownMatches = result.matches.slice(0, shownCount); + const maxTypeLength = Math.max(1, ...shownMatches.map((match) => match.nodeType.length)); + + for (let index = 0; index < shownMatches.length; index += 1) { + const label = resolveMatchLabel(shownMatches[index], maxTypeLength); + const snippet = resolveNodeText(result, index); + if (!snippet) { + lines.push(label); + continue; + } + lines.push(`${label} "${truncate(toSingleLine(snippet), 50)}"`); + } + + const remaining = moreLine(shownMatches.length, Math.max(total, result.matches.length)); + if (remaining) lines.push(remaining); + return lines.join('\n'); +} diff --git a/apps/cli/src/lib/generic-dispatch.ts b/apps/cli/src/lib/generic-dispatch.ts new file mode 100644 index 0000000000..205f38d7dc --- /dev/null +++ b/apps/cli/src/lib/generic-dispatch.ts @@ -0,0 +1,32 @@ +/** + * Single dispatch entry point for all doc-backed operations. + * + * Replaces the 3-tier cascade (tryRunDirectCallOperation → tryRunExtraOperationInvoker + * → getLegacyRunner) with a single generic path driven by orchestrationKind(). + */ + +import { orchestrationKind } from '../cli/operation-hints.js'; +import type { CliExposedOperationId } from '../cli/operation-set.js'; +import { executeReadOperation } from './read-orchestrator.js'; +import { executeMutationOperation } from './mutation-orchestrator.js'; +import type { CommandContext, CommandExecution } from './types.js'; + +export type DocOperationRequest = { + operationId: CliExposedOperationId; + input: Record; + context: CommandContext; +}; + +/** + * Dispatches a doc-backed operation through the appropriate orchestrator. + * All doc-backed operations flow through this single entry point. + */ +export async function dispatchDocOperation(request: DocOperationRequest): Promise { + const kind = orchestrationKind(request.operationId); + + if (kind === 'read') { + return executeReadOperation(request); + } + + return executeMutationOperation(request); +} diff --git a/apps/cli/src/lib/guards.ts b/apps/cli/src/lib/guards.ts new file mode 100644 index 0000000000..5738af470c --- /dev/null +++ b/apps/cli/src/lib/guards.ts @@ -0,0 +1,38 @@ +import { constants } from 'node:fs'; +import { access } from 'node:fs/promises'; + +/** + * Type guard that checks whether a value is a plain object (not null, not an array). + * + * @param value - The value to check + * @returns `true` if value is a non-null, non-array object + */ +export function isRecord(value: unknown): value is Record { + return typeof value === 'object' && value !== null && !Array.isArray(value); +} + +/** + * Returns the value as a `Record` if it is a plain object, or `null` otherwise. + * + * @param value - The value to check + * @returns The value typed as a record, or `null` + */ +export function asRecord(value: unknown): Record | null { + if (!isRecord(value)) return null; + return value; +} + +/** + * Checks whether a filesystem path exists. + * + * @param path - Absolute path to check + * @returns `true` if the path exists and is accessible + */ +export async function pathExists(path: string): Promise { + try { + await access(path, constants.F_OK); + return true; + } catch { + return false; + } +} diff --git a/apps/cli/src/lib/input-readers.ts b/apps/cli/src/lib/input-readers.ts new file mode 100644 index 0000000000..bca16c6692 --- /dev/null +++ b/apps/cli/src/lib/input-readers.ts @@ -0,0 +1,94 @@ +import { CliError } from './errors'; + +/** + * Checks whether a value is a non-empty string. + * + * @param value - The value to check + * @returns `true` if the value is a string with length > 0 + */ +export function hasNonEmptyString(value: unknown): value is string { + return typeof value === 'string' && value.length > 0; +} + +/** + * Reads a required non-empty string field from an input record. + * + * @param input - The input record to read from + * @param field - The field name to read + * @param operation - The operation name for error messages + * @returns The string value + * @throws {CliError} MISSING_REQUIRED if the field is missing or empty + */ +export function readRequiredString(input: Record, field: string, operation: string): string { + const value = input[field]; + if (hasNonEmptyString(value)) return value; + throw new CliError('MISSING_REQUIRED', `${operation}: missing required input.${field}.`); +} + +/** + * Reads an optional string field from an input record. + * + * @param input - The input record to read from + * @param field - The field name to read + * @returns The string value, or `undefined` if missing or empty + */ +export function readOptionalString(input: Record, field: string): string | undefined { + const value = input[field]; + return hasNonEmptyString(value) ? value : undefined; +} + +/** + * Reads an optional finite number field from an input record. + * + * @param input - The input record to read from + * @param field - The field name to read + * @returns The number value, or `undefined` if missing or not a finite number + */ +export function readOptionalNumber(input: Record, field: string): number | undefined { + const value = input[field]; + return typeof value === 'number' && Number.isFinite(value) ? value : undefined; +} + +/** + * Reads a boolean field from an input record, defaulting to `false`. + * + * @param input - The input record to read from + * @param field - The field name to read + * @returns `true` only if the field is strictly `true` + */ +export function readBoolean(input: Record, field: string): boolean { + return input[field] === true; +} + +/** + * Reads the change mode from an input record. + * + * @param input - The input record to read from + * @returns `'tracked'` if explicitly set, otherwise `'direct'` + */ +export function readChangeMode(input: Record): 'direct' | 'tracked' { + return input.changeMode === 'tracked' ? 'tracked' : 'direct'; +} + +/** + * JSON round-trip normalizes a value to ensure it is serializable. + * + * @param value - The value to normalize + * @param commandName - The command name for error messages + * @returns The normalized value + * @throws {CliError} VALIDATION_ERROR if the value is not JSON-serializable + */ +export function normalizeJsonValue(value: unknown, commandName: string): unknown { + try { + const serialized = JSON.stringify(value); + if (serialized == null) { + throw new CliError('VALIDATION_ERROR', `${commandName}: response payload must be JSON-serializable.`); + } + return JSON.parse(serialized) as unknown; + } catch (error) { + if (error instanceof CliError) throw error; + throw new CliError('VALIDATION_ERROR', `${commandName}: response payload must be JSON-serializable.`, { + cause: error instanceof Error ? error.message : String(error), + }); + } +} diff --git a/apps/cli/src/lib/introspection-dispatch.ts b/apps/cli/src/lib/introspection-dispatch.ts new file mode 100644 index 0000000000..78f2a23a33 --- /dev/null +++ b/apps/cli/src/lib/introspection-dispatch.ts @@ -0,0 +1,302 @@ +/** + * Dispatch for CLI-only introspection operations: describe, describeCommand, status. + * + * These operations are not doc-backed (they don't call editor.doc.invoke()) and + * are not lifecycle operations (they don't create/save/close sessions). They are + * CLI-level introspection that runs without a document context or with an + * optional session for status. + */ + +import type { CliOperationId } from '../cli'; +import { buildContractOverview, buildContractOperationDetail } from './contract'; +import { getActiveSessionId, getWorkingDocumentSize, withActiveContext } from './context'; +import { CliError } from './errors'; +import { readRequiredString } from './input-readers'; +import type { CommandContext, CommandExecution } from './types'; + +type IntrospectionInvoker = (input: Record, context: CommandContext) => Promise; + +// --------------------------------------------------------------------------- +// Describe +// --------------------------------------------------------------------------- + +function buildDescribePretty(data: ReturnType): string { + const lines: string[] = [ + `Contract ${data.contractVersion} (${data.operationCount} operations)`, + `CLI: ${data.cli.package}@${data.cli.minVersion}`, + `Host protocol: ${data.protocol.host.protocolVersion}`, + ]; + + for (const operation of data.operations) { + lines.push(`- ${operation.id} -> ${operation.command.join(' ')} (${operation.category})`); + } + + return lines.join('\n'); +} + +type OperationDetail = NonNullable>; +type DescribedParam = { + name: string; + kind: string; + flag?: string; + type: string; + required?: boolean; + schema?: unknown; +}; +type ConstraintsShape = { + requiresOneOf?: ReadonlyArray>; + mutuallyExclusive?: ReadonlyArray>; + requiredWhen?: ReadonlyArray<{ param: string; whenParam: string; equals?: unknown; present?: boolean }>; +}; + +function extractEnumValues(schema: unknown): string | null { + if (typeof schema !== 'object' || schema == null) return null; + const record = schema as Record; + + if (Array.isArray(record.oneOf)) { + const values = record.oneOf + .map((entry) => { + if (typeof entry !== 'object' || entry == null) return null; + if (!Object.prototype.hasOwnProperty.call(entry, 'const')) return null; + return String((entry as { const: unknown }).const); + }) + .filter((value): value is string => Boolean(value)); + if (values.length > 0 && values.length <= 6) return values.join('|'); + } + + if (Array.isArray(record.enum) && record.enum.length > 0 && record.enum.length <= 6) { + return record.enum.map(String).join('|'); + } + + return null; +} + +function flagForParamName(name: string, params: readonly DescribedParam[]): string { + const param = params.find((candidate) => candidate.name === name); + if (!param) return name; + if (param.kind === 'doc') return `<${param.name}>`; + if (param.kind === 'flag' || param.kind === 'jsonFlag') return param.flag ? `--${param.flag}` : param.name; + return param.name; +} + +function paramLabel(param: DescribedParam): string { + if (param.kind === 'doc') return `<${param.name}>`; + if (param.kind !== 'flag' && param.kind !== 'jsonFlag') return param.name; + + const base = param.flag ? `--${param.flag}` : param.name; + if (param.type === 'boolean') return base; + + const enumValues = extractEnumValues(param.schema); + const valueLabel = enumValues ? `<${enumValues}>` : `<${param.type}>`; + return `${base} ${valueLabel}`; +} + +function formatConstraints(constraints: ConstraintsShape, params: readonly DescribedParam[]): string[] { + const lines: string[] = []; + + for (const group of constraints.requiresOneOf ?? []) { + if (group.length === 0) continue; + lines.push(`Requires one of: ${group.map((name) => flagForParamName(name, params)).join(' | ')}`); + } + + const conflictMap = new Map(); + for (const pair of constraints.mutuallyExclusive ?? []) { + if (pair.length < 2) continue; + const [first, second] = pair; + const existing = conflictMap.get(first) ?? []; + if (!existing.includes(second)) existing.push(second); + conflictMap.set(first, existing); + } + + for (const [name, conflicts] of conflictMap) { + const left = flagForParamName(name, params); + const right = conflicts.map((conflict) => flagForParamName(conflict, params)).join(', '); + lines.push(`Mutually exclusive: ${left} conflicts with ${right}`); + } + + for (const rule of constraints.requiredWhen ?? []) { + const required = flagForParamName(rule.param, params); + const when = flagForParamName(rule.whenParam, params); + if (rule.present === true) { + lines.push(`Required when: ${required} required when ${when} is present`); + continue; + } + if (rule.present === false) { + lines.push(`Required when: ${required} required when ${when} is absent`); + continue; + } + if (Object.prototype.hasOwnProperty.call(rule, 'equals')) { + lines.push(`Required when: ${required} required when ${when} = ${JSON.stringify(rule.equals)}`); + } + } + + return lines; +} + +function buildDescribeCommandPretty(data: OperationDetail): string { + const operation = data.operation; + const lines: string[] = []; + + lines.push(`superdoc ${operation.command.join(' ')} (${operation.id}): ${operation.description}`); + lines.push(''); + lines.push( + ` Category: ${operation.category} | Stability: ${operation.stability} | Mutates: ${ + operation.mutates ? 'yes' : 'no' + } | Requires document context: ${operation.requiresDocumentContext ? 'yes' : 'no'}`, + ); + lines.push(` Capabilities: ${(operation.capabilities ?? []).join(', ') || ''}`); + + const params = [...operation.params] as DescribedParam[]; + const constraints = + 'constraints' in operation ? (operation.constraints as unknown as ConstraintsShape | undefined) : undefined; + if (params.length > 0) { + lines.push(''); + lines.push('Parameters:'); + + const requiresOneOf = constraints?.requiresOneOf; + const formatted = params.map((param) => { + const label = paramLabel(param); + const detailParts: string[] = []; + if (param.kind === 'doc' && param.name === 'doc') { + detailParts.push('Document path or stdin'); + } + const oneOfGroup = requiresOneOf?.find((group) => group.includes(param.name)); + if (oneOfGroup && oneOfGroup.length > 1) { + const peers = oneOfGroup.map((name) => flagForParamName(name, params)).join(' or '); + detailParts.push(`(required with one of: ${peers})`); + } else if (param.required === true) { + detailParts.push('(required)'); + } + return { label, detail: detailParts.join(' ') }; + }); + const maxLabel = Math.max(...formatted.map((entry) => entry.label.length)); + for (const entry of formatted) { + const suffix = entry.detail.length > 0 ? ` ${entry.detail}` : ''; + lines.push(` ${entry.label.padEnd(maxLabel)}${suffix}`); + } + } + + if (constraints) { + const constraintLines = formatConstraints(constraints, params); + if (constraintLines.length > 0) { + lines.push(''); + lines.push('Constraints:'); + for (const constraintLine of constraintLines) { + lines.push(` ${constraintLine}`); + } + } + } + + if (operation.errors.length > 0) { + lines.push(''); + lines.push('Error codes:'); + for (const code of operation.errors) { + lines.push(` ${code}`); + } + } + + return lines.join('\n'); +} + +// --------------------------------------------------------------------------- +// Invoker map +// --------------------------------------------------------------------------- + +const INTROSPECTION_INVOKERS: Partial> = { + 'doc.describe': async () => { + const data = buildContractOverview(); + return { + command: 'describe', + data, + pretty: buildDescribePretty(data), + }; + }, + + 'doc.describeCommand': async (input) => { + const query = readRequiredString(input, 'operationId', 'describe command'); + const detail = buildContractOperationDetail(query); + if (!detail) { + throw new CliError('TARGET_NOT_FOUND', `Unknown operation: ${query}`, { query }); + } + return { + command: 'describe command', + data: detail, + pretty: buildDescribeCommandPretty(detail), + }; + }, + + 'doc.status': async (_input, context) => { + const activeSessionId = await getActiveSessionId(); + + try { + return await withActiveContext( + context.io, + 'status', + async ({ metadata, paths }) => { + const byteLength = await getWorkingDocumentSize(paths); + + return { + command: 'status', + data: { + active: true, + contextId: metadata.contextId, + activeSessionId: activeSessionId ?? undefined, + projectRoot: metadata.projectRoot, + document: { + path: metadata.sourcePath, + source: metadata.source, + byteLength, + revision: metadata.revision, + }, + dirty: metadata.dirty, + sessionType: metadata.sessionType, + collaboration: metadata.collaboration, + openedAt: metadata.openedAt, + updatedAt: metadata.updatedAt, + lastSavedAt: metadata.lastSavedAt, + }, + pretty: [ + `Context: ${metadata.contextId}`, + `Default: ${activeSessionId ?? ''}`, + `Document: ${metadata.sourcePath ?? ''}`, + `Session Type: ${metadata.sessionType}`, + metadata.collaboration ? `Collab Doc ID: ${metadata.collaboration.documentId}` : undefined, + `Revision: ${metadata.revision}`, + `Dirty: ${metadata.dirty ? 'yes' : 'no'}`, + ] + .filter((line): line is string => Boolean(line)) + .join('\n'), + }; + }, + context.sessionId, + ); + } catch (error) { + if (error instanceof CliError && error.code === 'NO_ACTIVE_DOCUMENT') { + return { + command: 'status', + data: { + active: false, + activeSessionId: activeSessionId ?? undefined, + requestedSessionId: context.sessionId, + }, + pretty: 'No active document', + }; + } + throw error; + } + }, +}; + +/** + * Dispatches a CLI-only introspection operation. + * Returns the execution result, or null if the operation is not an introspection op. + */ +export async function dispatchIntrospectionOperation( + operationId: CliOperationId, + input: Record, + context: CommandContext, +): Promise { + const invoker = INTROSPECTION_INVOKERS[operationId]; + if (!invoker) return null; + return invoker(input, context); +} diff --git a/apps/cli/src/lib/invoke-input.ts b/apps/cli/src/lib/invoke-input.ts new file mode 100644 index 0000000000..39cb1178bf --- /dev/null +++ b/apps/cli/src/lib/invoke-input.ts @@ -0,0 +1,79 @@ +/** + * Extracts the API-level input from the CLI input object. + * + * The CLI wrapper parsing produces objects that mix API-level fields with + * CLI-level fields (doc, sessionId, out, force, etc.). Some operations wrap + * their API input in a named field (query, address, input). Some operations + * rename API field names for the CLI (commentId → id). + * + * This module strips CLI-level fields, unwraps operation-specific input + * keys, and reverses param renames so that `invoke()` receives the correct + * input shape. + */ + +import type { CliExposedOperationId } from '../cli/operation-set.js'; + +/** + * Operations whose API input is wrapped in a named field on the CLI input object. + * + * For example, the `find` wrapper produces `{ doc, sessionId, query: Query }`. + * The API's `invoke('find', input)` expects the `Query` object directly as input, + * so we extract `cliInput.query` as the invoke input. + */ +const WRAPPED_INPUT_KEY: Partial> = { + find: 'query', + getNode: 'address', + 'lists.list': 'query', + 'lists.insert': 'input', + 'lists.setType': 'input', + 'lists.indent': 'input', + 'lists.outdent': 'input', + 'lists.restart': 'input', + 'lists.exit': 'input', + 'create.paragraph': 'input', +}; + +/** + * Reverse param name mapping: CLI param name → API field name. + * + * Derived from PARAM_FLAG_OVERRIDES in operation-params.ts. + * The CLI renames certain API fields for user convenience (e.g. `commentId` → `id`). + * We reverse these so `invoke()` receives the original API field names. + */ +const PARAM_RENAMES: Partial>> = { + getNodeById: { id: 'nodeId' }, + 'comments.add': { id: 'commentId' }, + 'comments.edit': { id: 'commentId' }, + 'comments.reply': { parentId: 'parentCommentId' }, + 'comments.move': { id: 'commentId' }, + 'comments.resolve': { id: 'commentId' }, + 'comments.remove': { id: 'commentId' }, + 'comments.setInternal': { id: 'commentId' }, + 'comments.setActive': { id: 'commentId' }, + 'comments.goTo': { id: 'commentId' }, + 'comments.get': { id: 'commentId' }, +}; + +/** Fields that belong to the CLI layer, not the document API. */ +const CLI_LEVEL_KEYS = new Set(['doc', 'sessionId', 'out', 'dryRun', 'force', 'expectedRevision', 'changeMode']); + +/** + * Extracts the invoke-level input from a CLI input object. + * + * Returns the input that should be passed to `editor.doc.invoke({ input })`. + */ +export function extractInvokeInput(operationId: CliExposedOperationId, cliInput: Record): unknown { + const wrapperKey = WRAPPED_INPUT_KEY[operationId]; + if (wrapperKey && cliInput[wrapperKey] != null) { + return cliInput[wrapperKey]; + } + + const renames = PARAM_RENAMES[operationId]; + const apiInput: Record = {}; + for (const [key, value] of Object.entries(cliInput)) { + if (CLI_LEVEL_KEYS.has(key)) continue; + const apiKey = renames?.[key] ?? key; + apiInput[apiKey] = value; + } + return apiInput; +} diff --git a/apps/cli/src/lib/legacy-operation-dispatch.ts b/apps/cli/src/lib/legacy-operation-dispatch.ts new file mode 100644 index 0000000000..dd6773ea78 --- /dev/null +++ b/apps/cli/src/lib/legacy-operation-dispatch.ts @@ -0,0 +1,25 @@ +import type { CliOperationId } from '../cli'; +import type { CommandContext, CommandExecution } from './types'; +import { runClose } from '../commands/close'; +import { runOpen } from '../commands/open'; +import { runSave } from '../commands/save'; +import { runSessionClose } from '../commands/session-close'; +import { runSessionList } from '../commands/session-list'; +import { runSessionSave } from '../commands/session-save'; +import { runSessionSetDefault } from '../commands/session-set-default'; + +export type OperationRunner = (tokens: string[], context: CommandContext) => Promise; + +const LEGACY_RUNNERS: Partial> = { + 'doc.open': runOpen, + 'doc.save': runSave, + 'doc.close': runClose, + 'doc.session.list': runSessionList, + 'doc.session.save': runSessionSave, + 'doc.session.close': runSessionClose, + 'doc.session.setDefault': runSessionSetDefault, +}; + +export function getLegacyRunner(operationId: CliOperationId): OperationRunner | undefined { + return LEGACY_RUNNERS[operationId]; +} diff --git a/apps/cli/src/lib/manual-command-allowlist.ts b/apps/cli/src/lib/manual-command-allowlist.ts new file mode 100644 index 0000000000..2546964b3a --- /dev/null +++ b/apps/cli/src/lib/manual-command-allowlist.ts @@ -0,0 +1,30 @@ +import type { CliOperationId } from '../cli'; + +/** + * These commands are intentionally manual. + * They are lifecycle/session orchestration entry points, not main Document API operation wrappers. + * Keep this list explicit and bounded. + */ +export const MANUAL_COMMAND_ALLOWLIST = [ + 'call', + 'open', + 'save', + 'close', + 'session list', + 'session save', + 'session close', + 'session set-default', + 'session use', +] as const; + +export type ManualCommandKey = (typeof MANUAL_COMMAND_ALLOWLIST)[number]; + +export const MANUAL_OPERATION_ALLOWLIST = [ + 'doc.open', + 'doc.save', + 'doc.close', + 'doc.session.list', + 'doc.session.save', + 'doc.session.close', + 'doc.session.setDefault', +] as const satisfies readonly CliOperationId[]; diff --git a/apps/cli/src/lib/mutation-orchestrator.ts b/apps/cli/src/lib/mutation-orchestrator.ts new file mode 100644 index 0000000000..c4ac257942 --- /dev/null +++ b/apps/cli/src/lib/mutation-orchestrator.ts @@ -0,0 +1,305 @@ +/** + * Generic mutation orchestrator — handles all mutating doc operations. + * + * Replaces the 5 copy-pasted orchestrators across write-command.ts, + * comments-mutation-shared.ts, lists-mutation-shared.ts, and inline + * in operation-extra-invokers.ts with a single generic path. + * + * The 3-branch session structure (stateless / session+collab / session+local) + * is preserved but unified into one function. + */ + +import { COMMAND_CATALOG } from '@superdoc/document-api'; +import { RESPONSE_ENVELOPE_KEY, SUCCESS_VERB } from '../cli/operation-hints.js'; +import type { CliExposedOperationId } from '../cli/operation-set.js'; +import { cliCommandTokens } from '../cli/operation-set.js'; +import { assertExpectedRevision, markContextUpdated, withActiveContext, writeContextMetadata } from './context.js'; +import { exportToPath, openDocument, openSessionDocument, type EditorWithDoc } from './document.js'; +import { mapInvokeError, mapFailedReceipt } from './error-mapping.js'; +import { CliError } from './errors.js'; +import { formatOutput } from './output-formatters.js'; +import { syncCollaborativeSessionSnapshot } from './session-collab.js'; +import { PRE_INVOKE_HOOKS, POST_INVOKE_HOOKS } from './special-handlers.js'; +import type { CommandExecution } from './types.js'; +import type { DocOperationRequest } from './generic-dispatch.js'; +import { readOptionalString, readOptionalNumber, readBoolean, readChangeMode } from './input-readers.js'; +import { extractInvokeInput } from './invoke-input.js'; + +/** + * Mutations that do NOT require --out in stateless mode. + * These are state-only operations that don't produce document changes worth exporting. + */ +const STATELESS_OUT_EXEMPT = new Set(['comments.setActive']); + +type DocumentPayload = { + path?: string; + source: 'path' | 'stdin'; + byteLength: number; + revision: number; +}; + +function deriveCommandName(operationId: CliExposedOperationId): string { + return cliCommandTokens(`doc.${operationId}` as `doc.${CliExposedOperationId}`).join(' '); +} + +function invokeOperation( + editor: EditorWithDoc, + operationId: CliExposedOperationId, + input: Record, + options?: Record, +): unknown { + const apiInput = extractInvokeInput(operationId, input); + const preHook = PRE_INVOKE_HOOKS[operationId]; + const transformedInput = preHook ? preHook(apiInput as Record, { editor }) : apiInput; + + let result: unknown; + try { + result = editor.doc.invoke({ + operationId, + input: transformedInput, + options, + }); + } catch (error) { + throw mapInvokeError(operationId, error); + } + + // Check for failed receipts (non-throwing failure path) + const failedReceiptError = mapFailedReceipt(operationId, result); + if (failedReceiptError) throw failedReceiptError; + + const postHook = POST_INVOKE_HOOKS[operationId]; + return postHook ? postHook(result, { editor, apiInput: transformedInput }) : result; +} + +function buildEnvelopeData( + operationId: CliExposedOperationId, + document: DocumentPayload, + result: unknown, + extras: Record, +): Record { + const envelopeKey = RESPONSE_ENVELOPE_KEY[operationId]; + + if (envelopeKey === null) { + const resultObj = typeof result === 'object' && result != null ? result : {}; + return { document, ...(resultObj as Record), ...extras }; + } + + return { document, [envelopeKey]: result, ...extras }; +} + +function buildPrettyOutput( + operationId: CliExposedOperationId, + document: DocumentPayload, + result: unknown, + outputPath?: string, +): string { + const formatted = formatOutput(operationId, result, { revision: document.revision }); + if (formatted != null) { + return outputPath ? `${formatted} -> ${outputPath}` : formatted; + } + + const verb = SUCCESS_VERB[operationId]; + return outputPath + ? `Revision ${document.revision}: ${verb} -> ${outputPath}` + : `Revision ${document.revision}: ${verb}`; +} + +async function exportOptionalSessionOutput( + editor: EditorWithDoc, + outPath: string | undefined, + force: boolean, +): Promise<{ path: string; byteLength: number } | undefined> { + if (!outPath) return undefined; + try { + return await exportToPath(editor, outPath, force); + } catch (error) { + const message = error instanceof Error ? error.message : String(error); + process.stderr.write(`[warn] optional export to ${outPath} failed: ${message}\n`); + return undefined; + } +} + +export async function executeMutationOperation(request: DocOperationRequest): Promise { + const { operationId, input, context } = request; + const doc = readOptionalString(input, 'doc'); + const outPath = readOptionalString(input, 'out'); + const dryRun = readBoolean(input, 'dryRun'); + const changeMode = readChangeMode(input); + const force = readBoolean(input, 'force'); + const expectedRevision = readOptionalNumber(input, 'expectedRevision'); + const commandName = deriveCommandName(operationId); + + const catalog = COMMAND_CATALOG[operationId]; + const invokeOptions: Record = {}; + if (catalog.supportsTrackedMode) { + invokeOptions.changeMode = changeMode; + } else if (changeMode === 'tracked') { + throw new CliError( + 'TRACK_CHANGE_COMMAND_UNAVAILABLE', + `${commandName}: tracked mode is not supported for this operation.`, + ); + } + if (catalog.supportsDryRun && dryRun) invokeOptions.dryRun = true; + + if (doc && expectedRevision != null) { + throw new CliError( + 'INVALID_ARGUMENT', + `${commandName}: --expected-revision is only supported with an active open context.`, + ); + } + + // ----------------------------------------------------------------------- + // Stateless path (--doc) + // ----------------------------------------------------------------------- + if (doc) { + if (!outPath && !dryRun && !STATELESS_OUT_EXEMPT.has(operationId)) { + throw new CliError('MISSING_REQUIRED', `${commandName}: missing required --out.`); + } + + const source = doc === '-' ? 'stdin' : 'path'; + const opened = await openDocument(doc, context.io); + try { + const result = invokeOperation(opened.editor, operationId, input, invokeOptions); + const document: DocumentPayload = { + path: source === 'path' ? doc : undefined, + source, + byteLength: opened.meta.byteLength, + revision: 0, + }; + + if (dryRun) { + return { + command: commandName, + data: { + ...buildEnvelopeData(operationId, document, result, { changeMode, dryRun: true }), + output: outPath ? { path: outPath, skippedWrite: true } : undefined, + }, + pretty: `Revision 0: dry run`, + }; + } + + const output = outPath ? await exportToPath(opened.editor, outPath, force) : undefined; + return { + command: commandName, + data: buildEnvelopeData(operationId, document, result, { + changeMode, + dryRun: false, + output, + }), + pretty: buildPrettyOutput(operationId, document, result, output?.path), + }; + } finally { + opened.dispose(); + } + } + + // ----------------------------------------------------------------------- + // Session paths (collab or local) + // ----------------------------------------------------------------------- + return withActiveContext( + context.io, + commandName, + async ({ metadata, paths }) => { + assertExpectedRevision(metadata, expectedRevision); + + // --- Session + collab --- + if (metadata.sessionType === 'collab') { + const opened = await openSessionDocument(paths.workingDocPath, context.io, metadata, { + sessionId: context.sessionId ?? metadata.contextId, + executionMode: context.executionMode, + collabSessionPool: context.collabSessionPool, + }); + + try { + const result = invokeOperation(opened.editor, operationId, input, invokeOptions); + const synced = await syncCollaborativeSessionSnapshot(context.io, metadata, paths, opened.editor); + const document: DocumentPayload = { + path: synced.updatedMetadata.sourcePath, + source: synced.updatedMetadata.source, + byteLength: synced.output.byteLength, + revision: synced.updatedMetadata.revision, + }; + + if (dryRun) { + return { + command: commandName, + data: { + ...buildEnvelopeData(operationId, document, result, { changeMode, dryRun: true }), + context: { dirty: synced.updatedMetadata.dirty, revision: synced.updatedMetadata.revision }, + output: outPath ? { path: outPath, skippedWrite: true } : undefined, + }, + pretty: `Revision ${synced.updatedMetadata.revision}: dry run`, + }; + } + + const externalOutput = await exportOptionalSessionOutput(opened.editor, outPath, force); + return { + command: commandName, + data: buildEnvelopeData(operationId, document, result, { + changeMode, + dryRun: false, + context: { dirty: synced.updatedMetadata.dirty, revision: synced.updatedMetadata.revision }, + output: externalOutput, + }), + pretty: buildPrettyOutput(operationId, document, result, externalOutput?.path), + }; + } finally { + opened.dispose(); + } + } + + // --- Session + local --- + const opened = await openDocument(paths.workingDocPath, context.io); + try { + const result = invokeOperation(opened.editor, operationId, input, invokeOptions); + const document: DocumentPayload = { + path: metadata.sourcePath, + source: metadata.source, + byteLength: opened.meta.byteLength, + revision: metadata.revision, + }; + + if (dryRun) { + return { + command: commandName, + data: { + ...buildEnvelopeData(operationId, document, result, { changeMode, dryRun: true }), + context: { dirty: metadata.dirty, revision: metadata.revision }, + output: outPath ? { path: outPath, skippedWrite: true } : undefined, + }, + pretty: `Revision ${metadata.revision}: dry run`, + }; + } + + const workingOutput = await exportToPath(opened.editor, paths.workingDocPath, true); + const externalOutput = await exportOptionalSessionOutput(opened.editor, outPath, force); + const updatedMetadata = markContextUpdated(context.io, metadata, { + dirty: true, + revision: metadata.revision + 1, + }); + await writeContextMetadata(paths, updatedMetadata); + + const updatedDocument: DocumentPayload = { + path: updatedMetadata.sourcePath, + source: updatedMetadata.source, + byteLength: workingOutput.byteLength, + revision: updatedMetadata.revision, + }; + + return { + command: commandName, + data: buildEnvelopeData(operationId, updatedDocument, result, { + changeMode, + dryRun: false, + context: { dirty: updatedMetadata.dirty, revision: updatedMetadata.revision }, + output: externalOutput, + }), + pretty: buildPrettyOutput(operationId, updatedDocument, result, externalOutput?.path), + }; + } finally { + opened.dispose(); + } + }, + context.sessionId, + ); +} diff --git a/apps/cli/src/lib/node-pretty.ts b/apps/cli/src/lib/node-pretty.ts new file mode 100644 index 0000000000..b9fae88c60 --- /dev/null +++ b/apps/cli/src/lib/node-pretty.ts @@ -0,0 +1,59 @@ +import { toSingleLine, truncate } from './pretty-helpers'; + +type NodeLike = Record; + +function asRecord(value: unknown): NodeLike | null { + if (typeof value !== 'object' || value == null || Array.isArray(value)) return null; + return value as NodeLike; +} + +function formatPropertyValue(value: unknown): string | null { + if (value == null || value === false) return null; + if (typeof value === 'string') return value.length > 0 ? value : null; + if (typeof value === 'number' || typeof value === 'boolean') return String(value); + try { + const serialized = JSON.stringify(value); + return serialized && serialized !== 'null' ? serialized : null; + } catch { + return null; + } +} + +export function buildNodePretty(revision: number, headerLabel: string, node: unknown): string { + const lines: string[] = [`Revision ${revision}: ${headerLabel}`]; + const record = asRecord(node); + if (!record) return lines.join('\n'); + + const nodeId = typeof record.nodeId === 'string' ? record.nodeId : ''; + const nodeType = typeof record.nodeType === 'string' ? record.nodeType : ''; + if (nodeId.length > 0 || nodeType.length > 0) { + const parts: string[] = []; + if (nodeId.length > 0) parts.push(nodeId); + if (nodeType.length > 0) parts.push(`(${nodeType})`); + lines.push(` ${parts.join(' ')}`); + } + + const text = typeof record.text === 'string' ? toSingleLine(record.text) : ''; + if (text.length > 0) { + lines.push(''); + lines.push(` Text: "${truncate(text, 80)}"`); + } + + const properties = asRecord(record.properties); + if (!properties) return lines.join('\n'); + + const formatted = Object.entries(properties) + .map(([key, raw]) => { + const value = formatPropertyValue(raw); + if (!value) return null; + return `${key}=${truncate(toSingleLine(value), 48)}`; + }) + .filter((entry): entry is string => entry != null) + .slice(0, 6); + + if (formatted.length > 0) { + lines.push(` Properties: ${formatted.join(', ')}`); + } + + return lines.join('\n'); +} diff --git a/apps/cli/src/lib/operation-args.ts b/apps/cli/src/lib/operation-args.ts new file mode 100644 index 0000000000..64cc2765c6 --- /dev/null +++ b/apps/cli/src/lib/operation-args.ts @@ -0,0 +1,489 @@ +import { CliError } from './errors'; +import { isRecord } from './guards'; +import { + ensureValidArgs, + expectNoPositionals, + getBooleanOption, + getNumberOption, + getOptionalBooleanOption, + getStringListOption, + getStringOption, + parseCommandArgs, + resolveDocArg, + type OptionSpec, + type ParsedArgs, +} from './args'; +import { + CLI_OPERATION_COMMAND_KEYS, + CLI_OPERATION_METADATA, + CLI_OPERATION_OPTION_SPECS, + getResponseSchema, + toDocApiId, + type CliOperationArgsById, + type CliOperationConstraints, + type CliOperationId, + type CliOperationParamSpec, + type CliTypeSpec, +} from '../cli'; +import type { CliExposedOperationId } from '../cli/operation-set.js'; +import { RESPONSE_ENVELOPE_KEY, RESPONSE_VALIDATION_KEY } from '../cli/operation-hints.js'; + +type ParseOperationArgsOptions = { + commandName?: string; + extraOptionSpecs?: OptionSpec[]; + allowExtraPositionals?: boolean; + skipConstraints?: boolean; +}; + +type ParsedOperationArgs = { + parsed: ParsedArgs; + args: CliOperationArgsById[TOperationId]; + help: boolean; + positionals: string[]; + commandName: string; +}; + +const HELP_OPTION_SPEC: OptionSpec = { name: 'help', type: 'boolean', aliases: ['h'] }; + +function buildOptionSpecs(operationId: CliOperationId, extras: OptionSpec[] = []): OptionSpec[] { + const seen = new Set(); + const merged: OptionSpec[] = []; + for (const spec of [...CLI_OPERATION_OPTION_SPECS[operationId], ...extras, HELP_OPTION_SPEC]) { + if (seen.has(spec.name)) continue; + seen.add(spec.name); + merged.push(spec); + } + return merged; +} + +function parseJsonFlagValue(commandName: string, flag: string, raw: string | undefined): unknown | undefined { + if (raw == null) return undefined; + try { + return JSON.parse(raw) as unknown; + } catch (error) { + const message = error instanceof Error ? error.message : String(error); + throw new CliError('JSON_PARSE_ERROR', `${commandName}: invalid --${flag} JSON payload.`, { + message, + flag, + }); + } +} + +function getParamLabel(param: CliOperationParamSpec): string { + if (param.kind === 'doc') return `<${param.name}>`; + return `--${param.flag}`; +} + +function isPresent(value: unknown): boolean { + if (value == null) return false; + if (Array.isArray(value)) return value.length > 0; + return true; +} + +export function validateValueAgainstTypeSpec(value: unknown, schema: CliTypeSpec, path: string): void { + if ('const' in schema) { + if (value !== schema.const) { + throw new CliError('VALIDATION_ERROR', `${path} must equal ${JSON.stringify(schema.const)}.`); + } + return; + } + + if ('oneOf' in schema) { + const variants = schema.oneOf as CliTypeSpec[]; + const errors: string[] = []; + for (const variant of variants) { + try { + validateValueAgainstTypeSpec(value, variant, path); + return; + } catch (error) { + errors.push(error instanceof Error ? error.message : String(error)); + } + } + throw new CliError('VALIDATION_ERROR', `${path} must match one of the allowed schema variants.`, { errors }); + } + + if (schema.type === 'json') return; + + if (schema.type === 'string') { + if (typeof value !== 'string') throw new CliError('VALIDATION_ERROR', `${path} must be a string.`); + return; + } + + if (schema.type === 'number') { + if (typeof value !== 'number' || !Number.isFinite(value)) { + throw new CliError('VALIDATION_ERROR', `${path} must be a finite number.`); + } + return; + } + + if (schema.type === 'boolean') { + if (typeof value !== 'boolean') throw new CliError('VALIDATION_ERROR', `${path} must be a boolean.`); + return; + } + + if (schema.type === 'array') { + if (!Array.isArray(value)) throw new CliError('VALIDATION_ERROR', `${path} must be an array.`); + for (let index = 0; index < value.length; index += 1) { + validateValueAgainstTypeSpec(value[index], schema.items, `${path}[${index}]`); + } + return; + } + + if (schema.type === 'object') { + if (!isRecord(value)) throw new CliError('VALIDATION_ERROR', `${path} must be an object.`); + + const required = schema.required ?? []; + for (const key of required) { + if (!Object.prototype.hasOwnProperty.call(value, key)) { + throw new CliError('VALIDATION_ERROR', `${path}.${key} is required.`); + } + } + + const knownKeys = new Set(Object.keys(schema.properties)); + for (const key of Object.keys(value)) { + if (!knownKeys.has(key)) { + throw new CliError('VALIDATION_ERROR', `${path}.${key} is not allowed by schema.`); + } + } + + for (const [key, propSchema] of Object.entries(schema.properties)) { + if (!Object.prototype.hasOwnProperty.call(value, key)) continue; + validateValueAgainstTypeSpec(value[key], propSchema, `${path}.${key}`); + } + return; + } + + throw new CliError('VALIDATION_ERROR', `${path} uses an unsupported schema type.`); +} + +/** + * Loose structural validation — checks required fields and types of known + * properties but does NOT reject additional properties. This matches JSON + * Schema's default `additionalProperties: true` and is appropriate for + * response validation where the doc-api output may include extra fields + * beyond what the schema explicitly enumerates. + */ +function validateResponseValueAgainstTypeSpec(value: unknown, schema: CliTypeSpec, path: string): void { + if ('const' in schema) { + if (value !== schema.const) { + throw new CliError('VALIDATION_ERROR', `${path} must be ${JSON.stringify(schema.const)}.`); + } + return; + } + + if ('oneOf' in schema) { + const errors: string[] = []; + for (const variant of schema.oneOf) { + try { + validateResponseValueAgainstTypeSpec(value, variant, path); + return; + } catch (error) { + errors.push(error instanceof Error ? error.message : String(error)); + } + } + throw new CliError('VALIDATION_ERROR', `${path} must match one of the allowed schema variants.`, { errors }); + } + + if (schema.type === 'json') return; + if (schema.type === 'string') { + if (typeof value !== 'string') throw new CliError('VALIDATION_ERROR', `${path} must be a string.`); + return; + } + if (schema.type === 'number') { + if (typeof value !== 'number' || !Number.isFinite(value)) { + throw new CliError('VALIDATION_ERROR', `${path} must be a finite number.`); + } + return; + } + if (schema.type === 'boolean') { + if (typeof value !== 'boolean') throw new CliError('VALIDATION_ERROR', `${path} must be a boolean.`); + return; + } + if (schema.type === 'array') { + if (!Array.isArray(value)) throw new CliError('VALIDATION_ERROR', `${path} must be an array.`); + for (let index = 0; index < value.length; index += 1) { + validateResponseValueAgainstTypeSpec(value[index], schema.items, `${path}[${index}]`); + } + return; + } + if (schema.type === 'object') { + if (!isRecord(value)) throw new CliError('VALIDATION_ERROR', `${path} must be an object.`); + + const required = schema.required ?? []; + for (const key of required) { + if (!Object.prototype.hasOwnProperty.call(value, key)) { + throw new CliError('VALIDATION_ERROR', `${path}.${key} is required.`); + } + } + + // Validate known properties but allow additional properties (JSON Schema default). + for (const [key, propSchema] of Object.entries(schema.properties)) { + if (!Object.prototype.hasOwnProperty.call(value, key)) continue; + validateResponseValueAgainstTypeSpec(value[key], propSchema, `${path}.${key}`); + } + return; + } +} + +/** + * Resolves the envelope key for a doc-backed CLI operation. + * + * Derived from the single source of truth in `operation-hints.ts` (RESPONSE_ENVELOPE_KEY). + * Returns `undefined` for CLI-only operations that aren't doc-backed. + */ +function resolveResponsePayloadKey(operationId: CliOperationId): string | null | undefined { + const docApiId = toDocApiId(operationId); + if (!docApiId) return undefined; + const envelopeKey = RESPONSE_ENVELOPE_KEY[docApiId as CliExposedOperationId]; + // For operations with null envelope key (result spread across top-level), fall + // back to RESPONSE_VALIDATION_KEY so schema validation still runs on the receipt. + return envelopeKey ?? RESPONSE_VALIDATION_KEY[docApiId as CliExposedOperationId] ?? null; +} + +export function validateOperationResponseData(operationId: CliOperationId, value: unknown, commandName: string): void { + const schema = getResponseSchema(operationId); + if (!schema) return; + + // CLI-only operations use permissive { type: 'json' } schemas. + if ('type' in schema && schema.type === 'json') return; + + // Resolve the envelope key from the single source of truth. + const payloadKey = resolveResponsePayloadKey(operationId); + + // Null entries are intentionally exempt (e.g. doc.info which splits output + // across multiple keys). + if (payloadKey === null || payloadKey === undefined) return; + + if (!isRecord(value)) { + throw new CliError('VALIDATION_ERROR', `${commandName}:response must be an object.`); + } + + // Dry-run responses use a different envelope shape (proposed instead of + // receipt/result), so skip the key-presence check when dryRun is set. + if (!(payloadKey in value)) { + if (value.dryRun === true) return; + throw new CliError( + 'VALIDATION_ERROR', + `${commandName}:response.${payloadKey} is required by ${operationId} response schema.`, + ); + } + + // Validate the payload field against the doc-api output schema. Uses loose + // validation (allows extra properties) to match JSON Schema defaults. + validateResponseValueAgainstTypeSpec(value[payloadKey], schema, `${commandName}:response.${payloadKey}`); +} + +function validateValueAgainstParamType(value: unknown, param: CliOperationParamSpec, path: string): void { + if (param.type === 'json') return; + + if (param.type === 'string') { + if (typeof value !== 'string') { + throw new CliError('VALIDATION_ERROR', `${path} must be a string.`); + } + return; + } + + if (param.type === 'number') { + if (typeof value !== 'number' || !Number.isFinite(value)) { + throw new CliError('VALIDATION_ERROR', `${path} must be a finite number.`); + } + return; + } + + if (param.type === 'boolean') { + if (typeof value !== 'boolean') { + throw new CliError('VALIDATION_ERROR', `${path} must be a boolean.`); + } + return; + } + + if (param.type === 'string[]') { + if (!Array.isArray(value) || value.some((entry) => typeof entry !== 'string')) { + throw new CliError('VALIDATION_ERROR', `${path} must be an array of strings.`); + } + return; + } +} + +function resolveFlagParamValue(parsed: ParsedArgs, commandName: string, param: CliOperationParamSpec): unknown { + if (param.kind === 'doc') return undefined; + const flag = param.flag ?? param.name; + switch (param.type) { + case 'string': + return getStringOption(parsed, flag); + case 'number': + return getNumberOption(parsed, flag); + case 'boolean': + return getOptionalBooleanOption(parsed, flag); + case 'string[]': + return getStringListOption(parsed, flag); + case 'json': + return parseJsonFlagValue(commandName, flag, getStringOption(parsed, flag)); + default: + return undefined; + } +} + +function applyConstraints(operationId: CliOperationId, commandName: string, args: Record): void { + const constraints = CLI_OPERATION_METADATA[operationId].constraints; + if (!constraints) return; + + const typedConstraints = constraints as CliOperationConstraints; + const mutuallyExclusive: string[][] = Array.isArray(typedConstraints.mutuallyExclusive) + ? typedConstraints.mutuallyExclusive.map((group) => [...group]) + : []; + const requiresOneOf: string[][] = Array.isArray(typedConstraints.requiresOneOf) + ? typedConstraints.requiresOneOf.map((group) => [...group]) + : []; + const requiredWhen: Array<{ + param: string; + whenParam: string; + equals?: unknown; + present?: boolean; + }> = Array.isArray(typedConstraints.requiredWhen) ? typedConstraints.requiredWhen.map((rule) => ({ ...rule })) : []; + + for (const group of mutuallyExclusive) { + const present = group.filter((name) => isPresent(args[name])); + if (present.length > 1) { + throw new CliError( + 'INVALID_ARGUMENT', + `${commandName}: options are mutually exclusive: ${group.map((name) => `--${name}`).join(', ')}`, + ); + } + } + + for (const group of requiresOneOf) { + const hasAny = group.some((name: string) => isPresent(args[name])); + if (!hasAny) { + throw new CliError( + 'MISSING_REQUIRED', + `${commandName}: one of ${group.map((name: string) => `--${name}`).join(', ')} is required.`, + ); + } + } + + for (const rule of requiredWhen) { + const whenValue = args[rule.whenParam]; + let shouldRequire = false; + if (Object.prototype.hasOwnProperty.call(rule, 'equals')) { + shouldRequire = whenValue === rule.equals; + } else if (Object.prototype.hasOwnProperty.call(rule, 'present')) { + shouldRequire = rule.present ? isPresent(whenValue) : !isPresent(whenValue); + } else { + shouldRequire = isPresent(whenValue); + } + + if (shouldRequire && !isPresent(args[rule.param])) { + throw new CliError('MISSING_REQUIRED', `${commandName}: --${rule.param} is required by argument constraints.`, { + param: rule.param, + whenParam: rule.whenParam, + }); + } + } +} + +export function validateOperationInputData(operationId: CliOperationId, input: unknown, commandName = 'call'): void { + if (!isRecord(input)) { + throw new CliError('VALIDATION_ERROR', `${commandName}: input must be a JSON object.`); + } + + const metadata = CLI_OPERATION_METADATA[operationId]; + const paramNames = new Set(metadata.params.map((param) => param.name as string)); + for (const key of Object.keys(input)) { + if (!paramNames.has(key)) { + throw new CliError('VALIDATION_ERROR', `${commandName}: input.${key} is not allowed for ${operationId}.`); + } + } + + const argsRecord: Record = {}; + for (const param of metadata.params) { + const value = input[param.name]; + argsRecord[param.name] = value; + if (!isPresent(value)) continue; + + if ('schema' in param && param.schema) { + validateValueAgainstTypeSpec(value, param.schema, `${commandName}:input.${param.name}`); + continue; + } + + validateValueAgainstParamType(value, param, `${commandName}:input.${param.name}`); + } + + for (const param of metadata.params) { + const isRequired = 'required' in param && Boolean(param.required); + if (!isRequired) continue; + if (isPresent(argsRecord[param.name])) continue; + const requiredLabel = param.kind === 'doc' ? `<${param.name}>` : `input.${param.name}`; + throw new CliError('MISSING_REQUIRED', `${commandName}: missing required ${requiredLabel}.`); + } + + applyConstraints(operationId, commandName, argsRecord); +} + +export function parseOperationArgs( + operationId: TOperationId, + tokens: string[], + options: ParseOperationArgsOptions = {}, +): ParsedOperationArgs { + const commandName = options.commandName ?? CLI_OPERATION_COMMAND_KEYS[operationId]; + const parsed = parseCommandArgs(tokens, buildOptionSpecs(operationId, options.extraOptionSpecs ?? [])); + ensureValidArgs(parsed); + + const help = getBooleanOption(parsed, 'help'); + const metadata = CLI_OPERATION_METADATA[operationId]; + const argsRecord: Record = {}; + let remainingPositionals = [...parsed.positionals]; + + const positionalParamNames = [...metadata.positionalParams]; + if (positionalParamNames[0] === 'doc') { + const resolved = resolveDocArg(parsed, commandName); + if (resolved.doc != null) { + argsRecord.doc = resolved.doc; + } + remainingPositionals = [...resolved.positionals]; + positionalParamNames.shift(); + } + + for (const positionalName of positionalParamNames) { + const value = remainingPositionals.shift(); + if (value != null) { + argsRecord[positionalName] = value; + } + } + + if (!options.allowExtraPositionals) { + expectNoPositionals(parsed, remainingPositionals, commandName); + } + + for (const param of metadata.params) { + if (param.kind === 'doc') continue; + argsRecord[param.name] = resolveFlagParamValue(parsed, commandName, param); + } + + for (const param of metadata.params) { + if (!('schema' in param) || !param.schema) continue; + const value = argsRecord[param.name]; + if (!isPresent(value)) continue; + validateValueAgainstTypeSpec(value, param.schema, `${commandName}:${param.name}`); + } + + if (!help && !options.skipConstraints) { + for (const param of metadata.params) { + const isRequired = 'required' in param && Boolean(param.required); + if (!isRequired) continue; + const value = argsRecord[param.name]; + if (!isPresent(value)) { + throw new CliError('MISSING_REQUIRED', `${commandName}: missing required ${getParamLabel(param)}.`); + } + } + applyConstraints(operationId, commandName, argsRecord); + } + + return { + parsed, + args: argsRecord as CliOperationArgsById[TOperationId], + help, + positionals: remainingPositionals, + commandName, + }; +} diff --git a/apps/cli/src/lib/operation-executor.ts b/apps/cli/src/lib/operation-executor.ts new file mode 100644 index 0000000000..750035c3c3 --- /dev/null +++ b/apps/cli/src/lib/operation-executor.ts @@ -0,0 +1,231 @@ +import { getActiveSessionId } from './context'; +import { CliError } from './errors'; +import { isRecord } from './guards'; +import { hasNonEmptyString } from './input-readers'; +import { dispatchDocOperation } from './generic-dispatch.js'; +import { dispatchIntrospectionOperation } from './introspection-dispatch.js'; +import { parseWrapperOperationInput } from './operation-wrapper-input'; +import { getLegacyRunner } from './legacy-operation-dispatch'; +import { MANUAL_OPERATION_ALLOWLIST } from './manual-command-allowlist'; +import { validateOperationInputData } from './operation-args'; +import { getOperationRuntimeMetadata } from './operation-runtime-metadata'; +import { + CLI_OPERATION_METADATA, + isDocBackedOperation, + toDocApiId, + type CliOperationId, + type CliOperationParamSpec, +} from '../cli'; +import type { CliExposedOperationId } from '../cli/operation-set.js'; +import type { CommandContext, CommandExecution } from './types'; + +type ExecuteOperationWrapperRequest = { + mode: 'wrapper'; + operationId: CliOperationId; + commandName: string; + tokens: string[]; + context: CommandContext; +}; + +type ExecuteOperationCallRequest = { + mode: 'call'; + operationId: CliOperationId; + input: unknown; + context: CommandContext; +}; + +export type ExecuteOperationRequest = ExecuteOperationWrapperRequest | ExecuteOperationCallRequest; + +const MANUAL_OPERATION_ALLOWLIST_SET = new Set(MANUAL_OPERATION_ALLOWLIST); + +function pruneUndefinedDeep(value: unknown): unknown { + if (Array.isArray(value)) { + return value.map((entry) => pruneUndefinedDeep(entry)); + } + + if (!isRecord(value)) { + return value; + } + + const normalized: Record = {}; + for (const [key, entry] of Object.entries(value)) { + if (entry === undefined) continue; + normalized[key] = pruneUndefinedDeep(entry); + } + return normalized; +} + +function serializeJsonValue(value: unknown, operationId: CliOperationId, param: CliOperationParamSpec): string { + try { + const encoded = JSON.stringify(value); + if (encoded == null) { + throw new CliError('VALIDATION_ERROR', `call: input.${param.name} for ${operationId} must be JSON-serializable.`); + } + return encoded; + } catch (error) { + if (error instanceof CliError) throw error; + throw new CliError('VALIDATION_ERROR', `call: input.${param.name} for ${operationId} must be JSON-serializable.`, { + cause: error instanceof Error ? error.message : String(error), + }); + } +} + +function serializeOperationInputToTokens(operationId: CliOperationId, input: Record): string[] { + const metadata = CLI_OPERATION_METADATA[operationId]; + const params = metadata.params as readonly CliOperationParamSpec[]; + const tokens: string[] = []; + + for (const positionalName of metadata.positionalParams) { + const positionalValue = input[positionalName]; + if (positionalValue == null) continue; + tokens.push(String(positionalValue)); + } + + for (const param of params) { + if (param.kind === 'doc') continue; + + const value = input[param.name]; + if (value == null) continue; + + const flag = `--${param.flag ?? param.name}`; + if (param.type === 'boolean') { + tokens.push(flag, value === true ? 'true' : 'false'); + continue; + } + + if (param.type === 'string[]') { + if (!Array.isArray(value)) continue; + for (const entry of value) { + tokens.push(flag, String(entry)); + } + continue; + } + + if (param.type === 'json') { + tokens.push(flag, serializeJsonValue(value, operationId, param)); + continue; + } + + tokens.push(flag, String(value)); + } + + return tokens; +} + +function applySessionInputToContext(context: CommandContext, input: Record): CommandContext { + const inputSessionId = input.sessionId; + if (typeof inputSessionId !== 'string' || inputSessionId.length === 0) { + return context; + } + + if (context.sessionId && context.sessionId !== inputSessionId) { + throw new CliError( + 'INVALID_ARGUMENT', + `call: conflicting session ids. Global --session (${context.sessionId}) does not match input.sessionId (${inputSessionId}).`, + ); + } + + return { + ...context, + sessionId: inputSessionId, + }; +} + +async function preflightCallContext( + operationId: CliOperationId, + input: Record, + context: CommandContext, +): Promise { + const runtime = getOperationRuntimeMetadata(operationId); + const hasDocInput = hasNonEmptyString(input.doc); + const hasInputSessionId = hasNonEmptyString(input.sessionId); + const hasContextSessionId = hasNonEmptyString(context.sessionId); + const hasExplicitSessionTarget = hasInputSessionId || hasContextSessionId; + const allowsDocAndSessionTarget = operationId === 'doc.open'; + + if (hasDocInput && hasExplicitSessionTarget && !allowsDocAndSessionTarget) { + throw new CliError( + 'INVALID_ARGUMENT', + 'call: stateless input.doc cannot be combined with a session target (--session or input.sessionId).', + ); + } + + if (hasDocInput && !runtime.context.supportsStateless) { + throw new CliError('INVALID_ARGUMENT', `call: ${operationId} does not support stateless execution.`); + } + + const selectedMode = hasDocInput ? 'stateless' : runtime.context.supportsSession ? 'session' : 'stateless'; + + if (selectedMode === 'stateless' && !runtime.context.supportsStateless) { + throw new CliError('INVALID_ARGUMENT', `call: ${operationId} requires session execution.`); + } + + if (selectedMode === 'session' && !runtime.context.supportsSession) { + throw new CliError('INVALID_ARGUMENT', `call: ${operationId} does not support session execution.`); + } + + if (runtime.context.requiresDocument && !hasDocInput) { + throw new CliError('MISSING_REQUIRED', `call: ${operationId} requires input.doc for stateless execution.`); + } + + if (!runtime.context.requiresSession || hasExplicitSessionTarget) { + return; + } + + const activeSessionId = await getActiveSessionId(); + if (!hasNonEmptyString(activeSessionId)) { + throw new CliError('NO_ACTIVE_DOCUMENT', `call: ${operationId} requires an active session or input.sessionId.`); + } +} + +export async function executeOperation(request: ExecuteOperationRequest): Promise { + let input: Record; + const baseContext = request.context; + let commandName: string; + + if (request.mode === 'wrapper') { + commandName = request.commandName; + input = (pruneUndefinedDeep( + await parseWrapperOperationInput(request.operationId, request.tokens, request.commandName), + ) ?? {}) as Record; + } else { + commandName = 'call'; + if (!isRecord(request.input)) { + throw new CliError('VALIDATION_ERROR', 'call: --input-json/--input-file must be a JSON object.'); + } + input = (pruneUndefinedDeep(request.input) ?? {}) as Record; + } + + validateOperationInputData(request.operationId, input, commandName); + await preflightCallContext(request.operationId, input, baseContext); + const effectiveContext = applySessionInputToContext(baseContext, input); + + // Doc-backed operations → generic dispatch + const docApiId = toDocApiId(request.operationId); + if (docApiId) { + return dispatchDocOperation({ + operationId: docApiId as CliExposedOperationId, + input, + context: effectiveContext, + }); + } + + // CLI-only introspection operations (describe, describeCommand, status) + const introspectionResult = await dispatchIntrospectionOperation(request.operationId, input, effectiveContext); + if (introspectionResult) { + return introspectionResult; + } + + // Lifecycle/session operations → legacy runners + if (!MANUAL_OPERATION_ALLOWLIST_SET.has(request.operationId)) { + throw new CliError('COMMAND_FAILED', `No operation invoker is registered for ${request.operationId}.`); + } + + const runner = getLegacyRunner(request.operationId); + if (!runner) { + throw new CliError('COMMAND_FAILED', `No operation runner is registered for ${request.operationId}.`); + } + + const tokens = serializeOperationInputToTokens(request.operationId, input); + return runner(tokens, effectiveContext); +} diff --git a/apps/cli/src/lib/operation-runtime-metadata.ts b/apps/cli/src/lib/operation-runtime-metadata.ts new file mode 100644 index 0000000000..c42f867635 --- /dev/null +++ b/apps/cli/src/lib/operation-runtime-metadata.ts @@ -0,0 +1,218 @@ +import { + CLI_COMMAND_SPECS, + CLI_OPERATION_METADATA, + type CliCommandSpec, + type CliOperationId, + type CliOperationParamSpec, +} from '../cli'; + +export type OperationProfile = 'read' | 'mutation' | 'lifecycle' | 'sessionAdmin'; + +export type OperationTraits = { + supportsDryRun: boolean; + supportsChangeMode: boolean; + requiresOutInStateless: boolean; + supportsExpectedRevision: boolean; +}; + +export type OperationContextCapabilities = { + requiresDocument: boolean; + requiresSession: boolean; + supportsStateless: boolean; + supportsSession: boolean; + supportsCollab: boolean; +}; + +export type OperationRuntimeMetadata = { + operationId: CliOperationId; + profile: OperationProfile; + traits: OperationTraits; + context: OperationContextCapabilities; +}; + +type RuntimeOverride = Partial>; + +const CANONICAL_COMMAND_SPEC_BY_OPERATION = new Map(); +for (const spec of CLI_COMMAND_SPECS) { + if (spec.alias) continue; + CANONICAL_COMMAND_SPEC_BY_OPERATION.set(spec.operationId as CliOperationId, spec); +} + +const RUNTIME_OVERRIDES: Record = { + 'doc.open': { + profile: 'lifecycle', + context: { + requiresDocument: true, + requiresSession: false, + supportsStateless: true, + supportsSession: false, + supportsCollab: false, + }, + }, + 'doc.save': { + profile: 'lifecycle', + context: { + requiresDocument: false, + requiresSession: true, + supportsStateless: false, + supportsSession: true, + supportsCollab: true, + }, + }, + 'doc.close': { + profile: 'lifecycle', + context: { + requiresDocument: false, + requiresSession: true, + supportsStateless: false, + supportsSession: true, + supportsCollab: true, + }, + }, + 'doc.session.list': { + profile: 'sessionAdmin', + context: { + requiresDocument: false, + requiresSession: false, + supportsStateless: true, + supportsSession: false, + supportsCollab: false, + }, + }, + 'doc.session.save': { + profile: 'sessionAdmin', + context: { + requiresDocument: false, + requiresSession: false, + supportsStateless: true, + supportsSession: false, + supportsCollab: false, + }, + }, + 'doc.session.close': { + profile: 'sessionAdmin', + context: { + requiresDocument: false, + requiresSession: false, + supportsStateless: true, + supportsSession: false, + supportsCollab: false, + }, + }, + 'doc.session.setDefault': { + profile: 'sessionAdmin', + context: { + requiresDocument: false, + requiresSession: false, + supportsStateless: true, + supportsSession: false, + supportsCollab: false, + }, + }, + 'doc.describe': { + context: { + requiresDocument: false, + requiresSession: false, + supportsStateless: true, + supportsSession: false, + supportsCollab: false, + }, + }, + 'doc.describeCommand': { + context: { + requiresDocument: false, + requiresSession: false, + supportsStateless: true, + supportsSession: false, + supportsCollab: false, + }, + }, + 'doc.status': { + context: { + requiresDocument: false, + requiresSession: false, + supportsStateless: true, + supportsSession: true, + supportsCollab: true, + }, + }, +}; + +function hasParam(params: readonly CliOperationParamSpec[], name: string): boolean { + return params.some((param) => param.name === name); +} + +function deriveProfile(operationId: CliOperationId): OperationProfile { + const spec = CANONICAL_COMMAND_SPEC_BY_OPERATION.get(operationId); + if (!spec) return 'read'; + return spec.mutates ? 'mutation' : 'read'; +} + +function deriveTraits(params: readonly CliOperationParamSpec[], profile: OperationProfile): OperationTraits { + return { + supportsDryRun: hasParam(params, 'dryRun'), + supportsChangeMode: hasParam(params, 'changeMode'), + requiresOutInStateless: profile === 'mutation', + supportsExpectedRevision: hasParam(params, 'expectedRevision'), + }; +} + +function deriveContextCapabilities(params: readonly CliOperationParamSpec[]): OperationContextCapabilities { + const hasDocumentParam = params.some((param) => param.kind === 'doc' && param.name === 'doc'); + const hasSessionParam = hasParam(params, 'sessionId'); + + return { + requiresDocument: false, + requiresSession: false, + supportsStateless: hasDocumentParam || !hasSessionParam, + supportsSession: hasSessionParam, + supportsCollab: hasSessionParam, + }; +} + +function applyOverride( + base: OperationRuntimeMetadata, + override: RuntimeOverride | undefined, +): OperationRuntimeMetadata { + if (!override) return base; + return { + ...base, + ...(override.profile ? { profile: override.profile } : {}), + ...(override.context + ? { + context: { + ...base.context, + ...override.context, + }, + } + : {}), + }; +} + +function buildRuntimeMetadata(): Record { + const entries = Object.keys(CLI_OPERATION_METADATA) as CliOperationId[]; + const metadataByOperation = {} as Record; + + for (const operationId of entries) { + const operation = CLI_OPERATION_METADATA[operationId]; + const profile = deriveProfile(operationId); + const runtime = applyOverride( + { + operationId, + profile, + traits: deriveTraits(operation.params, profile), + context: deriveContextCapabilities(operation.params), + }, + RUNTIME_OVERRIDES[operationId], + ); + metadataByOperation[operationId] = runtime; + } + + return metadataByOperation; +} + +const OPERATION_RUNTIME_METADATA = buildRuntimeMetadata(); + +export function getOperationRuntimeMetadata(operationId: CliOperationId): OperationRuntimeMetadata { + return OPERATION_RUNTIME_METADATA[operationId]; +} diff --git a/apps/cli/src/lib/operation-wrapper-input.ts b/apps/cli/src/lib/operation-wrapper-input.ts new file mode 100644 index 0000000000..493d708da6 --- /dev/null +++ b/apps/cli/src/lib/operation-wrapper-input.ts @@ -0,0 +1,182 @@ +import { resolveJsonInput, type ParsedArgs } from './args'; +import { resolveChangeMode } from './change-mode'; +import { resolveCreateParagraphInput } from './create-paragraph-input'; +import { CliError } from './errors'; +import { resolveFindQuery } from './find-query'; +import { parseOperationArgs } from './operation-args'; +import { requireListItemAddressPayload, requireNodeAddressPayload, resolveListsListQueryPayload } from './payload'; +import { validateListsListQuery, validateNodeAddress } from './validate'; +import type { CliOperationId } from '../cli'; + +function stripUndefinedFields(value: Record): Record { + const normalized: Record = {}; + for (const [key, entry] of Object.entries(value)) { + if (entry === undefined) continue; + normalized[key] = entry; + } + return normalized; +} + +const FLAT_LIST_QUERY_FLAGS = ['limit', 'offset', 'kind', 'level', 'ordinal']; + +function hasFlatListQueryFlags(parsed: ParsedArgs): boolean { + return FLAT_LIST_QUERY_FLAGS.some((flag) => parsed.options[flag] != null); +} + +async function resolveListsListQuery( + parsed: ParsedArgs, + args: Record, +): Promise> { + const queryPayload = await resolveListsListQueryPayload(parsed, 'query'); + if (queryPayload && hasFlatListQueryFlags(parsed)) { + throw new CliError('INVALID_ARGUMENT', 'lists list: do not combine --query-* with flat query flags.'); + } + + const withinPayload = await resolveJsonInput(parsed, 'within'); + let within: Record | undefined; + if (withinPayload != null) { + const validated = validateNodeAddress(withinPayload, 'within'); + if (validated.kind !== 'block') { + throw new CliError('VALIDATION_ERROR', 'within.kind must be "block".'); + } + within = validated as unknown as Record; + } + + const draft = + queryPayload ?? + ({ + limit: args.limit, + offset: args.offset, + kind: args.kind, + level: args.level, + ordinal: args.ordinal, + } as Record); + + const query = validateListsListQuery( + within + ? { + ...(draft as Record), + within, + } + : draft, + 'query', + ); + + return stripUndefinedFields(query as unknown as Record); +} + +export async function parseWrapperOperationInput( + operationId: CliOperationId, + tokens: string[], + commandName: string, +): Promise> { + if (operationId === 'doc.find') { + const { parsed, args } = parseOperationArgs('doc.find', tokens, { + commandName, + skipConstraints: true, + extraOptionSpecs: [ + { name: 'type', type: 'string' }, + { name: 'node-type', type: 'string' }, + { name: 'kind', type: 'string' }, + { name: 'pattern', type: 'string' }, + { name: 'mode', type: 'string' }, + { name: 'case-sensitive', type: 'boolean' }, + { name: 'query-json', type: 'string' }, + { name: 'query-file', type: 'string' }, + { name: 'within-json', type: 'string' }, + { name: 'within-file', type: 'string' }, + ], + }); + + const query = await resolveFindQuery(parsed); + return stripUndefinedFields({ + doc: args.doc, + sessionId: args.sessionId, + query, + }); + } + + if (operationId === 'doc.getNode') { + const { parsed, args } = parseOperationArgs('doc.getNode', tokens, { + commandName, + extraOptionSpecs: [{ name: 'address-file', type: 'string' }], + }); + + const address = await requireNodeAddressPayload(parsed, commandName, 'address'); + return stripUndefinedFields({ + doc: args.doc, + sessionId: args.sessionId, + address, + }); + } + + if (operationId === 'doc.lists.get') { + const { parsed, args } = parseOperationArgs('doc.lists.get', tokens, { + commandName, + extraOptionSpecs: [{ name: 'address-file', type: 'string' }], + }); + + const address = await requireListItemAddressPayload(parsed, commandName, 'address'); + return stripUndefinedFields({ + doc: args.doc, + sessionId: args.sessionId, + address, + }); + } + + if (operationId === 'doc.lists.list') { + const { parsed, args } = parseOperationArgs('doc.lists.list', tokens, { + commandName, + extraOptionSpecs: [ + { name: 'kind', type: 'string' }, + { name: 'level', type: 'number' }, + { name: 'ordinal', type: 'number' }, + { name: 'query-json', type: 'string' }, + { name: 'query-file', type: 'string' }, + { name: 'within-json', type: 'string' }, + { name: 'within-file', type: 'string' }, + ], + }); + + const query = await resolveListsListQuery(parsed, args as Record); + return stripUndefinedFields({ + doc: args.doc, + sessionId: args.sessionId, + query, + }); + } + + if (operationId === 'doc.create.paragraph') { + const { parsed, args } = parseOperationArgs('doc.create.paragraph', tokens, { + commandName, + extraOptionSpecs: [ + { name: 'input-file', type: 'string' }, + { name: 'text', type: 'string' }, + { name: 'at', type: 'string' }, + { name: 'before-address-json', type: 'string' }, + { name: 'before-address-file', type: 'string' }, + { name: 'after-address-json', type: 'string' }, + { name: 'after-address-file', type: 'string' }, + { name: 'tracked', type: 'boolean' }, + { name: 'direct', type: 'boolean' }, + ], + }); + + const input = await resolveCreateParagraphInput(parsed, commandName); + const changeMode = resolveChangeMode(parsed, commandName); + + return stripUndefinedFields({ + doc: args.doc, + sessionId: args.sessionId, + input, + changeMode, + out: args.out, + dryRun: args.dryRun, + force: args.force, + expectedRevision: args.expectedRevision, + }); + } + + const { args } = parseOperationArgs(operationId, tokens, { commandName }); + return stripUndefinedFields(args as Record); +} diff --git a/apps/cli/src/lib/output-formatters.ts b/apps/cli/src/lib/output-formatters.ts new file mode 100644 index 0000000000..98c0593b84 --- /dev/null +++ b/apps/cli/src/lib/output-formatters.ts @@ -0,0 +1,229 @@ +/** + * Output formatting registry — maps OutputFormat tags to pretty-printers. + * + * Each formatter receives the raw invoke() result and returns a human-readable + * string (or null to fall back to the default SUCCESS_VERB-based output). + */ + +import type { CliExposedOperationId } from '../cli/operation-set.js'; +import { OUTPUT_FORMAT, type OutputFormat } from '../cli/operation-hints.js'; +import { formatFindPretty } from './find-query.js'; +import { buildNodePretty } from './node-pretty.js'; +import { PRETTY_ROW_LIMIT, moreLine, padCol, safeNumber, toSingleLine, truncate } from './pretty-helpers.js'; + +// --------------------------------------------------------------------------- +// Helpers +// --------------------------------------------------------------------------- + +function asRecord(value: unknown): Record | null { + if (typeof value !== 'object' || value == null || Array.isArray(value)) return null; + return value as Record; +} + +function asArray(value: unknown): unknown[] { + return Array.isArray(value) ? value : []; +} + +function hasNonEmptyString(value: unknown): value is string { + return typeof value === 'string' && value.length > 0; +} + +// --------------------------------------------------------------------------- +// Per-format formatters +// --------------------------------------------------------------------------- + +type FormatContext = { revision: number }; + +function formatCommentList(result: unknown, ctx: FormatContext): string { + const record = asRecord(result); + const total = safeNumber(record?.total, 0); + const rows = asArray(record?.matches).map((entry) => { + const comment = asRecord(entry) ?? {}; + const status = hasNonEmptyString(comment.status) ? comment.status : 'unknown'; + const commentId = hasNonEmptyString(comment.commentId) ? comment.commentId : ''; + const creatorName = hasNonEmptyString(comment.creatorName) ? comment.creatorName : ''; + const creatorEmail = hasNonEmptyString(comment.creatorEmail) ? comment.creatorEmail : ''; + const author = creatorName || creatorEmail || 'unknown'; + const text = hasNonEmptyString(comment.text) ? comment.text : ''; + return { status, commentId, author, text }; + }); + + const lines: string[] = [`Revision ${ctx.revision}: ${total} comments`]; + if (rows.length === 0) return lines[0]; + + lines.push(''); + const shownRows = rows.slice(0, PRETTY_ROW_LIMIT); + const maxStatus = Math.max(1, ...shownRows.map((row) => `[${row.status}]`.length)); + const maxId = Math.max(8, ...shownRows.map((row) => row.commentId.length)); + + for (const row of shownRows) { + const status = padCol(`[${row.status}]`, maxStatus); + const id = padCol(row.commentId, maxId); + const author = padCol(row.author, 20); + const text = truncate(toSingleLine(row.text), 50); + lines.push(`${status} ${id} ${author} "${text}"`); + } + + const remaining = moreLine(shownRows.length, Math.max(total, rows.length)); + if (remaining) lines.push(remaining); + return lines.join('\n'); +} + +function formatListResult(result: unknown, ctx: FormatContext): string { + const record = asRecord(result); + const total = safeNumber(record?.total, 0); + const items = asArray(record?.items) + .map((entry) => asRecord(entry)) + .filter((entry): entry is Record => Boolean(entry)); + + const lines: string[] = [`Revision ${ctx.revision}: ${total} list items`]; + if (items.length === 0) return lines[0]; + + lines.push(''); + const shown = items.slice(0, PRETTY_ROW_LIMIT); + const maxMarkerLength = Math.max( + 1, + ...shown.map((item) => { + const marker = hasNonEmptyString(item.marker) ? item.marker : ''; + return toSingleLine(marker).length; + }), + ); + + for (const item of shown) { + const markerRaw = toSingleLine(hasNonEmptyString(item.marker) ? item.marker : ''); + const marker = (markerRaw.length > 0 ? markerRaw : '-').padEnd(maxMarkerLength); + const level = + typeof item.level === 'number' && Number.isFinite(item.level) ? Math.max(0, Math.floor(item.level)) : 0; + const text = truncate(toSingleLine(hasNonEmptyString(item.text) ? item.text : ''), 60); + const indent = ' '.repeat(level); + lines.push(text.length > 0 ? `${indent}${marker} ${text}` : `${indent}${marker}`); + } + + const remaining = moreLine(shown.length, Math.max(total, items.length)); + if (remaining) lines.push(remaining); + return lines.join('\n'); +} + +function formatTrackChangeList(result: unknown, ctx: FormatContext): string { + const record = asRecord(result); + const total = safeNumber(record?.total, 0); + const changes = asArray(record?.changes); + + const rows = + changes.length > 0 + ? changes.map((entry) => { + const change = asRecord(entry) ?? {}; + const address = asRecord(change.address); + const type = hasNonEmptyString(change.type) ? change.type : 'change'; + const id = hasNonEmptyString(change.id) + ? change.id + : hasNonEmptyString(address?.entityId) + ? String(address?.entityId) + : ''; + const authorName = hasNonEmptyString(change.author) ? change.author : ''; + const authorEmail = hasNonEmptyString(change.authorEmail) ? change.authorEmail : ''; + const excerpt = hasNonEmptyString(change.excerpt) ? change.excerpt : ''; + return { + type, + id, + author: authorName || authorEmail || 'unknown', + excerpt, + }; + }) + : asArray(record?.matches).map((entry) => { + const match = asRecord(entry) ?? {}; + const id = hasNonEmptyString(match.entityId) ? match.entityId : ''; + return { + type: 'change', + id, + author: 'unknown', + excerpt: '', + }; + }); + + const lines: string[] = [`Revision ${ctx.revision}: ${total} tracked changes`]; + if (rows.length === 0) return lines[0]; + + lines.push(''); + const shownRows = rows.slice(0, PRETTY_ROW_LIMIT); + const maxType = Math.max(1, ...shownRows.map((row) => `[${row.type}]`.length)); + const maxId = Math.max(8, ...shownRows.map((row) => row.id.length)); + + for (const row of shownRows) { + const type = padCol(`[${row.type}]`, maxType); + const id = padCol(row.id, maxId); + const author = padCol(row.author, 20); + const excerpt = truncate(toSingleLine(row.excerpt), 50); + lines.push(`${type} ${id} ${author} "${excerpt}"`); + } + + const remaining = moreLine(shownRows.length, Math.max(total, rows.length)); + if (remaining) lines.push(remaining); + return lines.join('\n'); +} + +function formatDocumentInfo(result: unknown, ctx: FormatContext): string { + const record = asRecord(result); + if (!record) return `Revision ${ctx.revision}: retrieved info`; + + const counts = asRecord(record.counts) ?? {}; + const outline = asArray(record.outline) + .map((entry) => asRecord(entry)) + .filter((entry): entry is Record => Boolean(entry)); + + const words = safeNumber(counts.words, 0); + const paragraphs = safeNumber(counts.paragraphs, 0); + const headings = safeNumber(counts.headings, 0); + const tables = safeNumber(counts.tables, 0); + const images = safeNumber(counts.images, 0); + const comments = safeNumber(counts.comments, 0); + + const lines: string[] = [ + `Revision ${ctx.revision}: ${words} words, ${paragraphs} paragraphs, ${headings} headings, ${tables} tables, ${images} images, ${comments} comments`, + ]; + + if (outline.length === 0) return lines[0]; + + lines.push(''); + lines.push('Outline:'); + const shownOutline = outline.slice(0, PRETTY_ROW_LIMIT); + for (const entry of shownOutline) { + const level = Math.max(1, Math.floor(safeNumber(entry.level, 1))); + const indent = ' '.repeat(level - 1); + const text = truncate(toSingleLine(hasNonEmptyString(entry.text) ? entry.text : ''), 60) || '(untitled)'; + lines.push(` ${indent}${text}`); + } + + const remaining = moreLine(shownOutline.length, outline.length); + if (remaining) lines.push(` ${remaining}`); + + return lines.join('\n'); +} + +// --------------------------------------------------------------------------- +// Dispatch +// --------------------------------------------------------------------------- + +type Formatter = (result: unknown, ctx: FormatContext) => string | null; + +const FORMAT_DISPATCH: Partial> = { + queryResult: (result, ctx) => formatFindPretty(result as Parameters[0], ctx.revision), + nodeInfo: (result, ctx) => buildNodePretty(ctx.revision, 'resolved node', result), + commentList: (result, ctx) => formatCommentList(result, ctx), + listResult: (result, ctx) => formatListResult(result, ctx), + trackChangeList: (result, ctx) => formatTrackChangeList(result, ctx), + documentInfo: (result, ctx) => formatDocumentInfo(result, ctx), +}; + +/** + * Formats the invoke() result for pretty output. + * + * Returns a formatted string for operations with custom formatters, or null + * to fall back to the default `Revision N: ` output. + */ +export function formatOutput(operationId: CliExposedOperationId, result: unknown, ctx: FormatContext): string | null { + const format = OUTPUT_FORMAT[operationId]; + const formatter = FORMAT_DISPATCH[format]; + if (!formatter) return null; + return formatter(result, ctx); +} diff --git a/apps/cli/src/lib/payload.ts b/apps/cli/src/lib/payload.ts new file mode 100644 index 0000000000..9a3a2ff57c --- /dev/null +++ b/apps/cli/src/lib/payload.ts @@ -0,0 +1,121 @@ +import type { + CreateParagraphInput, + ListInsertInput, + ListItemAddress, + ListsListQuery, + ListSetTypeInput, + ListTargetInput, + NodeAddress, + TextAddress, +} from './types'; +import { CliError } from './errors'; +import { resolveJsonInput, type ParsedArgs } from './args'; +import { + validateCreateParagraphInput, + validateListInsertInput, + validateListsListQuery, + validateListItemAddress, + validateListSetTypeInput, + validateListTargetInput, + validateNodeAddress, + validateTextAddress, +} from './validate'; + +export async function resolveTextAddressPayload( + parsed: ParsedArgs, + baseName = 'target', +): Promise { + const payload = await resolveJsonInput(parsed, baseName); + if (!payload) return undefined; + return validateTextAddress(payload, baseName); +} + +export async function requireTextAddressPayload( + parsed: ParsedArgs, + commandName: string, + baseName = 'target', +): Promise { + const payload = await resolveTextAddressPayload(parsed, baseName); + if (!payload) { + throw new CliError('MISSING_REQUIRED', `${commandName}: provide --${baseName}-json or --${baseName}-file.`); + } + return payload; +} + +export async function requireNodeAddressPayload( + parsed: ParsedArgs, + commandName: string, + baseName = 'address', +): Promise { + const payload = await resolveJsonInput(parsed, baseName); + if (!payload) { + throw new CliError('MISSING_REQUIRED', `${commandName}: provide --${baseName}-json or --${baseName}-file.`); + } + return validateNodeAddress(payload, baseName); +} + +export async function resolveCreateParagraphPayload( + parsed: ParsedArgs, + baseName = 'input', +): Promise { + const payload = await resolveJsonInput(parsed, baseName); + if (!payload) return undefined; + return validateCreateParagraphInput(payload, baseName); +} + +export async function resolveListsListQueryPayload( + parsed: ParsedArgs, + baseName = 'query', +): Promise { + const payload = await resolveJsonInput(parsed, baseName); + if (!payload) return undefined; + return validateListsListQuery(payload, baseName); +} + +export async function resolveListItemAddressPayload( + parsed: ParsedArgs, + baseName = 'target', +): Promise { + const payload = await resolveJsonInput(parsed, baseName); + if (!payload) return undefined; + return validateListItemAddress(payload, baseName); +} + +export async function requireListItemAddressPayload( + parsed: ParsedArgs, + commandName: string, + baseName = 'target', +): Promise { + const payload = await resolveListItemAddressPayload(parsed, baseName); + if (!payload) { + throw new CliError('MISSING_REQUIRED', `${commandName}: provide --${baseName}-json or --${baseName}-file.`); + } + return payload; +} + +export async function resolveListInsertPayload( + parsed: ParsedArgs, + baseName = 'input', +): Promise { + const payload = await resolveJsonInput(parsed, baseName); + if (!payload) return undefined; + return validateListInsertInput(payload, baseName); +} + +export async function resolveListSetTypePayload( + parsed: ParsedArgs, + baseName = 'input', +): Promise { + const payload = await resolveJsonInput(parsed, baseName); + if (!payload) return undefined; + return validateListSetTypeInput(payload, baseName); +} + +export async function resolveListTargetPayload( + parsed: ParsedArgs, + baseName = 'input', +): Promise { + const payload = await resolveJsonInput(parsed, baseName); + if (!payload) return undefined; + return validateListTargetInput(payload, baseName); +} diff --git a/apps/cli/src/lib/pretty-helpers.ts b/apps/cli/src/lib/pretty-helpers.ts new file mode 100644 index 0000000000..e41e2b93af --- /dev/null +++ b/apps/cli/src/lib/pretty-helpers.ts @@ -0,0 +1,24 @@ +export const PRETTY_ROW_LIMIT = 20; + +export function truncate(text: string, maxLen: number): string { + if (maxLen <= 3) return text.slice(0, Math.max(0, maxLen)); + if (text.length <= maxLen) return text; + return `${text.slice(0, maxLen - 3)}...`; +} + +export function toSingleLine(text: string): string { + return text.replace(/\s+/g, ' ').trim(); +} + +export function padCol(text: string, width: number): string { + return text.padEnd(width); +} + +export function safeNumber(value: unknown, fallback = 0): number { + return typeof value === 'number' && Number.isFinite(value) ? value : fallback; +} + +export function moreLine(shown: number, total: number): string | null { + if (total <= shown) return null; + return `...and ${total - shown} more`; +} diff --git a/apps/cli/src/lib/read-orchestrator.ts b/apps/cli/src/lib/read-orchestrator.ts new file mode 100644 index 0000000000..2c03988ea0 --- /dev/null +++ b/apps/cli/src/lib/read-orchestrator.ts @@ -0,0 +1,175 @@ +/** + * Generic read orchestrator — handles all read (non-mutating) doc operations. + * + * Replaces the per-operation runReadOperation() calls scattered across + * operation-extra-invokers.ts with a single generic path. + */ + +import { RESPONSE_ENVELOPE_KEY, SUCCESS_VERB } from '../cli/operation-hints.js'; +import type { CliExposedOperationId } from '../cli/operation-set.js'; +import { cliCommandTokens } from '../cli/operation-set.js'; +import { withActiveContext } from './context.js'; +import { openDocument, openSessionDocument, type EditorWithDoc } from './document.js'; +import { mapInvokeError } from './error-mapping.js'; +import { formatOutput } from './output-formatters.js'; +import { syncCollaborativeSessionSnapshot } from './session-collab.js'; +import { PRE_INVOKE_HOOKS, POST_INVOKE_HOOKS } from './special-handlers.js'; +import type { CommandExecution } from './types.js'; +import type { DocOperationRequest } from './generic-dispatch.js'; +import { readOptionalString } from './input-readers.js'; +import { extractInvokeInput } from './invoke-input.js'; + +type DocumentPayload = { + path?: string; + source: 'path' | 'stdin'; + byteLength: number; + revision: number; +}; + +function deriveCommandName(operationId: CliExposedOperationId): string { + return cliCommandTokens(`doc.${operationId}` as `doc.${CliExposedOperationId}`).join(' '); +} + +function invokeOperation( + editor: EditorWithDoc, + operationId: CliExposedOperationId, + input: Record, +): unknown { + const apiInput = extractInvokeInput(operationId, input); + const preHook = PRE_INVOKE_HOOKS[operationId]; + const transformedInput = preHook ? preHook(apiInput as Record, { editor }) : apiInput; + + let result: unknown; + try { + result = editor.doc.invoke({ + operationId, + input: transformedInput, + }); + } catch (error) { + throw mapInvokeError(operationId, error); + } + + const postHook = POST_INVOKE_HOOKS[operationId]; + return postHook ? postHook(result, { editor, apiInput: transformedInput }) : result; +} + +/** + * Input fields to echo in the response envelope alongside the result. + * For example, `find` echoes the `query` input so callers can correlate results. + */ +const ECHO_INPUT_FIELDS: Partial> = { + find: ['query'], +}; + +function buildEnvelopeData( + operationId: CliExposedOperationId, + document: DocumentPayload, + result: unknown, + input: Record, +): Record { + const envelopeKey = RESPONSE_ENVELOPE_KEY[operationId]; + + const echoFields = ECHO_INPUT_FIELDS[operationId]; + const extras: Record = {}; + if (echoFields) { + for (const field of echoFields) { + if (input[field] != null) extras[field] = input[field]; + } + } + + if (envelopeKey === null) { + // Spread result across top-level keys (e.g. info → counts, outline, capabilities) + const resultObj = typeof result === 'object' && result != null ? result : {}; + return { document, ...(resultObj as Record), ...extras }; + } + + return { document, [envelopeKey]: result, ...extras }; +} + +function buildPrettyOutput(operationId: CliExposedOperationId, document: DocumentPayload, result: unknown): string { + const formatted = formatOutput(operationId, result, { revision: document.revision }); + if (formatted != null) return formatted; + + return `Revision ${document.revision}: ${SUCCESS_VERB[operationId]}`; +} + +export async function executeReadOperation(request: DocOperationRequest): Promise { + const { operationId, input, context } = request; + const doc = readOptionalString(input, 'doc'); + const commandName = deriveCommandName(operationId); + + if (doc) { + const source = doc === '-' ? 'stdin' : 'path'; + const opened = await openDocument(doc, context.io); + try { + const result = invokeOperation(opened.editor, operationId, input); + const document: DocumentPayload = { + path: source === 'path' ? doc : undefined, + source, + byteLength: opened.meta.byteLength, + revision: 0, + }; + + return { + command: commandName, + data: buildEnvelopeData(operationId, document, result, input), + pretty: buildPrettyOutput(operationId, document, result), + }; + } finally { + opened.dispose(); + } + } + + return withActiveContext( + context.io, + commandName, + async ({ metadata, paths }) => { + if (metadata.sessionType === 'collab') { + const opened = await openSessionDocument(paths.workingDocPath, context.io, metadata, { + sessionId: context.sessionId ?? metadata.contextId, + executionMode: context.executionMode, + collabSessionPool: context.collabSessionPool, + }); + + try { + const result = invokeOperation(opened.editor, operationId, input); + const synced = await syncCollaborativeSessionSnapshot(context.io, metadata, paths, opened.editor); + const document: DocumentPayload = { + path: synced.updatedMetadata.sourcePath, + source: synced.updatedMetadata.source, + byteLength: synced.output.byteLength, + revision: synced.updatedMetadata.revision, + }; + + return { + command: commandName, + data: buildEnvelopeData(operationId, document, result, input), + pretty: buildPrettyOutput(operationId, document, result), + }; + } finally { + opened.dispose(); + } + } + + const opened = await openDocument(paths.workingDocPath, context.io); + try { + const result = invokeOperation(opened.editor, operationId, input); + const document: DocumentPayload = { + path: metadata.sourcePath, + source: metadata.source, + byteLength: opened.meta.byteLength, + revision: metadata.revision, + }; + + return { + command: commandName, + data: buildEnvelopeData(operationId, document, result, input), + pretty: buildPrettyOutput(operationId, document, result), + }; + } finally { + opened.dispose(); + } + }, + context.sessionId, + ); +} diff --git a/apps/cli/src/lib/session-collab.ts b/apps/cli/src/lib/session-collab.ts new file mode 100644 index 0000000000..71505189be --- /dev/null +++ b/apps/cli/src/lib/session-collab.ts @@ -0,0 +1,37 @@ +import type { Editor } from 'superdoc/super-editor'; +import { markContextUpdated, type ContextMetadata, type ContextPaths, writeContextMetadata } from './context'; +import { exportToPath, getFileChecksum } from './document'; +import { CliError } from './errors'; +import type { CliIO } from './types'; + +export async function syncCollaborativeSessionSnapshot( + io: CliIO, + metadata: ContextMetadata, + paths: ContextPaths, + editor: Editor, +): Promise<{ + output: { path: string; byteLength: number }; + updatedMetadata: ContextMetadata; + changed: boolean; +}> { + if (metadata.sessionType !== 'collab') { + throw new CliError('COMMAND_FAILED', 'syncCollaborativeSessionSnapshot called for a non-collaborative session.'); + } + + const beforeChecksum = await getFileChecksum(paths.workingDocPath); + const output = await exportToPath(editor, paths.workingDocPath, true); + const afterChecksum = await getFileChecksum(paths.workingDocPath); + const changed = beforeChecksum !== afterChecksum; + + const updatedMetadata = markContextUpdated(io, metadata, { + dirty: false, + revision: changed ? metadata.revision + 1 : metadata.revision, + }); + await writeContextMetadata(paths, updatedMetadata); + + return { + output, + updatedMetadata, + changed, + }; +} diff --git a/apps/cli/src/lib/session.ts b/apps/cli/src/lib/session.ts new file mode 100644 index 0000000000..1c5868b0a7 --- /dev/null +++ b/apps/cli/src/lib/session.ts @@ -0,0 +1,49 @@ +import { randomBytes } from 'node:crypto'; +import { basename, extname } from 'node:path'; +import { CliError } from './errors'; + +const SESSION_ID_PATTERN = /^[A-Za-z0-9._-]{1,64}$/; +const MAX_SESSION_ID_LENGTH = 64; +const GENERATED_SUFFIX_LENGTH = 6; + +function normalizeSessionBase(value: string): string { + const normalized = value + .toLowerCase() + .replace(/[^a-z0-9._-]+/g, '-') + .replace(/-{2,}/g, '-') + .replace(/^[._-]+|[._-]+$/g, ''); + + return normalized || 'session'; +} + +function deriveBaseFromDoc(docArg: string): string { + if (docArg === '-') return 'stdin'; + + const fileName = basename(docArg); + const extension = extname(fileName); + const stem = extension.length > 0 ? fileName.slice(0, -extension.length) : fileName; + return normalizeSessionBase(stem || fileName || 'session'); +} + +export function validateSessionId(value: string, source = '--session'): string { + if (!SESSION_ID_PATTERN.test(value)) { + throw new CliError( + 'SESSION_ID_INVALID', + `${source} must be 1-64 characters using only letters, numbers, dot, underscore, or dash.`, + { + value, + }, + ); + } + + return value; +} + +export function generateSessionId(docArg: string): string { + const base = deriveBaseFromDoc(docArg); + const suffix = randomBytes(4).toString('hex').slice(0, GENERATED_SUFFIX_LENGTH); + + const maxBaseLength = MAX_SESSION_ID_LENGTH - suffix.length - 1; + const trimmedBase = base.slice(0, maxBaseLength).replace(/[._-]+$/g, '') || 'session'; + return `${trimmedBase}-${suffix}`; +} diff --git a/apps/cli/src/lib/special-handlers.ts b/apps/cli/src/lib/special-handlers.ts new file mode 100644 index 0000000000..8d65423aa9 --- /dev/null +++ b/apps/cli/src/lib/special-handlers.ts @@ -0,0 +1,264 @@ +/** + * Special-handlers registry — explicit per-operation exception hooks. + * + * Operations NOT in these maps use the fully generic path. + * Every entry must have a comment explaining why it exists. + * + * Boundary rule: if this file grows past ~15 entries, that signals + * capability should move into document-api. + */ + +import { createHash } from 'node:crypto'; +import type { CliExposedOperationId } from '../cli/operation-set.js'; +import type { EditorWithDoc } from './document.js'; + +// --------------------------------------------------------------------------- +// Hook types +// --------------------------------------------------------------------------- + +type HookContext = { + editor: EditorWithDoc; + apiInput?: unknown; +}; + +type PreInvokeHook = (input: unknown, context: HookContext) => unknown; + +type PostInvokeHook = (result: unknown, context: HookContext) => unknown; + +// --------------------------------------------------------------------------- +// Track-changes stable-ID helpers +// --------------------------------------------------------------------------- + +type TrackChangeLike = Record; + +function asRecord(value: unknown): Record | null { + if (typeof value !== 'object' || value == null || Array.isArray(value)) return null; + return value as Record; +} + +function asArray(value: unknown): unknown[] { + return Array.isArray(value) ? value : []; +} + +function asTrackChangeAddress(value: unknown): { kind: string; entityType: string; entityId: string } | null { + const record = asRecord(value); + if (!record) return null; + if (record.kind !== 'entity' || record.entityType !== 'trackedChange') return null; + if (typeof record.entityId !== 'string' || record.entityId.length === 0) return null; + return { + kind: 'entity', + entityType: 'trackedChange', + entityId: record.entityId, + }; +} + +function stableTrackChangeSignature(change: TrackChangeLike): string { + const type = typeof change.type === 'string' ? change.type : ''; + const author = typeof change.author === 'string' ? change.author : ''; + const authorEmail = typeof change.authorEmail === 'string' ? change.authorEmail : ''; + const date = typeof change.date === 'string' ? change.date : ''; + const excerpt = typeof change.excerpt === 'string' ? change.excerpt : ''; + return `${type}|${author}|${authorEmail}|${date}|${excerpt}`; +} + +/** + * Builds stable-ID ↔ raw-ID mappings from a track-changes list result. + * The CLI uses SHA-1-based stable IDs instead of adapter raw IDs. + */ +function buildStableIdMappings(rawListResult: unknown): { + normalizedResult: unknown; + stableToRawId: Map; + rawToStableId: Map; +} { + const record = asRecord(rawListResult); + if (!record) { + return { normalizedResult: rawListResult, stableToRawId: new Map(), rawToStableId: new Map() }; + } + + const stableToRawId = new Map(); + const rawToStableId = new Map(); + const signatureCounts = new Map(); + + const normalizedChanges = asArray(record.changes) + .map((entry) => asRecord(entry)) + .filter((entry): entry is Record => Boolean(entry)) + .map((entry) => { + const rawId = + (typeof entry.id === 'string' && entry.id.length > 0 ? entry.id : undefined) ?? + asTrackChangeAddress(entry.address)?.entityId; + if (!rawId) return entry; + + const signature = stableTrackChangeSignature(entry); + const hash = createHash('sha1').update(signature).digest('hex').slice(0, 24); + const nextCount = (signatureCounts.get(hash) ?? 0) + 1; + signatureCounts.set(hash, nextCount); + const stableId = nextCount === 1 ? hash : `${hash}-${nextCount}`; + + stableToRawId.set(stableId, rawId); + rawToStableId.set(rawId, stableId); + + const normalizedAddress = asTrackChangeAddress(entry.address); + return { + ...entry, + id: stableId, + address: normalizedAddress ? { ...normalizedAddress, entityId: stableId } : entry.address, + }; + }); + + const normalizedMatches = asArray(record.matches).map((entry) => { + const address = asTrackChangeAddress(entry); + if (!address) return entry; + const stableId = rawToStableId.get(address.entityId) ?? address.entityId; + return { ...address, entityId: stableId }; + }); + + return { + normalizedResult: { + ...record, + matches: normalizedMatches, + changes: normalizedChanges.length > 0 ? normalizedChanges : record.changes, + }, + stableToRawId, + rawToStableId, + }; +} + +// --------------------------------------------------------------------------- +// Pre-invoke hooks +// --------------------------------------------------------------------------- + +/** + * Track-changes mutations (accept/reject/get) need stable-ID → raw-ID + * translation because the CLI uses SHA-1-based stable IDs. + */ +const resolveTrackChangeId: PreInvokeHook = (input, context) => { + const record = asRecord(input); + if (!record) return input; + + const stableId = typeof record.id === 'string' ? record.id : undefined; + if (!stableId) return input; + + // List all track changes to build the stable → raw mapping + const listResult = context.editor.doc.invoke({ + operationId: 'trackChanges.list' as const, + input: {}, + }); + const { stableToRawId } = buildStableIdMappings(listResult); + const rawId = stableToRawId.get(stableId) ?? stableId; + + return { ...record, id: rawId }; +}; + +// --------------------------------------------------------------------------- +// Post-invoke hooks +// --------------------------------------------------------------------------- + +/** + * Track-changes list returns raw adapter IDs — normalize to stable IDs. + */ +const normalizeTrackChangesListIds: PostInvokeHook = (result) => { + return buildStableIdMappings(result).normalizedResult; +}; + +/** + * Track-changes get returns a single change with a raw adapter ID — normalize. + */ +const normalizeTrackChangeGetId: PostInvokeHook = (result, context) => { + const record = asRecord(result); + if (!record) return result; + + // We need the full list to build the raw → stable mapping + const listResult = context.editor.doc.invoke({ + operationId: 'trackChanges.list' as const, + input: {}, + }); + const { rawToStableId } = buildStableIdMappings(listResult); + + const rawId = typeof record.id === 'string' ? record.id : undefined; + if (!rawId) return result; + + const stableId = rawToStableId.get(rawId) ?? rawId; + const normalizedAddress = asTrackChangeAddress(record.address); + + return { + ...record, + id: stableId, + address: normalizedAddress ? { ...normalizedAddress, entityId: stableId } : record.address, + }; +}; + +// --------------------------------------------------------------------------- +// Text-mutation receipt flattening +// --------------------------------------------------------------------------- + +/** + * Text mutations (insert/replace/delete/format.*) return a TextMutationReceipt. + * The CLI response hoists `resolution.target` and `resolution.range` to the + * top level alongside the full receipt for backwards-compatible envelope shape: + * { target, resolvedRange, receipt, ... } + */ +const flattenTextMutationReceipt: PostInvokeHook = (result) => { + const record = asRecord(result); + if (!record) return { receipt: result }; + + const resolution = asRecord(record.resolution); + return { + target: resolution?.target, + resolvedRange: resolution?.range, + receipt: result, + }; +}; + +// --------------------------------------------------------------------------- +// comments.setActive input normalization +// --------------------------------------------------------------------------- + +/** + * comments.setActive accepts either `--id ` or `--clear`. + * The API expects `{ commentId: string | null }`. + * - `--clear` → `{ commentId: null }` + * - `--id X` → rename handled by PARAM_RENAMES (id → commentId) + */ +const normalizeSetActiveInput: PreInvokeHook = (input) => { + const record = asRecord(input); + if (!record) return input; + + if (record.clear === true) { + return { commentId: null }; + } + return input; +}; + +/** Pre-invoke: custom input resolution before calling editor.doc.invoke(). */ +export const PRE_INVOKE_HOOKS: Partial> = { + // Track-changes mutations need stable-ID → raw-ID translation + 'trackChanges.accept': resolveTrackChangeId, + 'trackChanges.reject': resolveTrackChangeId, + 'trackChanges.get': resolveTrackChangeId, + // comments.setActive --clear → { commentId: null } + 'comments.setActive': normalizeSetActiveInput, +}; + +/** Post-invoke: transform the raw invoke() result before envelope wrapping. */ +export const POST_INVOKE_HOOKS: Partial> = { + // Track-changes list/get results need raw-ID → stable-ID normalization + 'trackChanges.list': normalizeTrackChangesListIds, + 'trackChanges.get': normalizeTrackChangeGetId, + // Text mutations hoist target/resolvedRange from receipt.resolution + insert: flattenTextMutationReceipt, + replace: flattenTextMutationReceipt, + delete: flattenTextMutationReceipt, + 'format.bold': flattenTextMutationReceipt, + 'format.italic': flattenTextMutationReceipt, + 'format.underline': flattenTextMutationReceipt, + 'format.strikethrough': flattenTextMutationReceipt, + // getNodeById: merge nodeId from input into result for pretty output + getNodeById: (result, context) => { + const record = asRecord(result); + const inputRecord = asRecord(context.apiInput); + if (!record || !inputRecord) return result; + const nodeId = typeof inputRecord.nodeId === 'string' ? inputRecord.nodeId : undefined; + if (!nodeId) return result; + return { ...record, nodeId }; + }, +}; diff --git a/apps/cli/src/lib/types.ts b/apps/cli/src/lib/types.ts new file mode 100644 index 0000000000..4e74684670 --- /dev/null +++ b/apps/cli/src/lib/types.ts @@ -0,0 +1,85 @@ +import type { + BlockNodeType as DocumentApiBlockNodeType, + CreateParagraphInput as DocumentApiCreateParagraphInput, + CreateParagraphResult as DocumentApiCreateParagraphResult, + ListInsertInput as DocumentApiListInsertInput, + ListItemAddress as DocumentApiListItemAddress, + ListItemInfo as DocumentApiListItemInfo, + ListKind as DocumentApiListKind, + ListSetTypeInput as DocumentApiListSetTypeInput, + ListsExitResult as DocumentApiListsExitResult, + ListsGetInput as DocumentApiListsGetInput, + ListsInsertResult as DocumentApiListsInsertResult, + ListsListQuery as DocumentApiListsListQuery, + ListsListResult as DocumentApiListsListResult, + ListsMutateItemResult as DocumentApiListsMutateItemResult, + ListTargetInput as DocumentApiListTargetInput, + NodeAddress as DocumentApiNodeAddress, + NodeKind as DocumentApiNodeKind, + NodeType as DocumentApiNodeType, + Query as DocumentApiQuery, + QueryResult as DocumentApiQueryResult, + Selector as DocumentApiSelector, + TextAddress as DocumentApiTextAddress, +} from '@superdoc/document-api'; +import type { CollaborationSessionPool } from '../host/collab-session-pool'; + +export type NodeKind = DocumentApiNodeKind; +export type NodeType = DocumentApiNodeType; +export type BlockNodeType = DocumentApiBlockNodeType; +export type NodeAddress = DocumentApiNodeAddress; +export type TextAddress = DocumentApiTextAddress; +export type CreateParagraphInput = DocumentApiCreateParagraphInput; +export type CreateParagraphResult = DocumentApiCreateParagraphResult; +export type ListItemAddress = DocumentApiListItemAddress; +export type ListItemInfo = DocumentApiListItemInfo; +export type ListKind = DocumentApiListKind; +export type ListsListQuery = DocumentApiListsListQuery; +export type ListsListResult = DocumentApiListsListResult; +export type ListsGetInput = DocumentApiListsGetInput; +export type ListInsertInput = DocumentApiListInsertInput; +export type ListSetTypeInput = DocumentApiListSetTypeInput; +export type ListTargetInput = DocumentApiListTargetInput; +export type ListsInsertResult = DocumentApiListsInsertResult; +export type ListsMutateItemResult = DocumentApiListsMutateItemResult; +export type ListsExitResult = DocumentApiListsExitResult; +export type Selector = DocumentApiSelector; +export type Query = DocumentApiQuery; +export type QueryResult = DocumentApiQueryResult; + +export type OutputMode = 'json' | 'pretty'; +export type ExecutionMode = 'oneshot' | 'host'; + +export interface GlobalOptions { + output: OutputMode; + timeoutMs?: number; + sessionId?: string; + help: boolean; +} + +export interface CliIO { + stdout(message: string): void; + stderr(message: string): void; + readStdinBytes(): Promise; + now(): number; +} + +export interface CommandExecution { + command: string; + data: unknown; + pretty: string; +} + +export interface CommandContext { + io: CliIO; + timeoutMs?: number; + sessionId?: string; + executionMode?: ExecutionMode; + collabSessionPool?: CollaborationSessionPool; +} + +export interface DocumentSourceMeta { + source: 'path' | 'stdin'; + path?: string; + byteLength: number; +} diff --git a/apps/cli/src/lib/validate.ts b/apps/cli/src/lib/validate.ts new file mode 100644 index 0000000000..02ac03f2cd --- /dev/null +++ b/apps/cli/src/lib/validate.ts @@ -0,0 +1,391 @@ +import { CliError } from './errors'; +import { isRecord } from './guards'; +import type { + BlockNodeType, + CreateParagraphInput, + ListInsertInput, + ListItemAddress, + ListKind, + ListsListQuery, + ListSetTypeInput, + ListTargetInput, + NodeAddress, + NodeKind, + NodeType, + Query, + TextAddress, +} from './types'; +import { + BLOCK_NODE_TYPES as DOCUMENT_API_BLOCK_NODE_TYPES, + LIST_INSERT_POSITIONS as DOCUMENT_API_LIST_INSERT_POSITIONS, + LIST_KINDS as DOCUMENT_API_LIST_KINDS, + NODE_KINDS as DOCUMENT_API_NODE_KINDS, + NODE_TYPES as DOCUMENT_API_NODE_TYPES, +} from '@superdoc/document-api'; + +const NODE_TYPES = new Set(DOCUMENT_API_NODE_TYPES); +const BLOCK_NODE_TYPES = new Set(DOCUMENT_API_BLOCK_NODE_TYPES); +const NODE_KINDS = new Set(DOCUMENT_API_NODE_KINDS); +const LIST_KINDS = new Set(DOCUMENT_API_LIST_KINDS); +const LIST_INSERT_POSITIONS = new Set(DOCUMENT_API_LIST_INSERT_POSITIONS); + +function expectRecord(value: unknown, path: string): Record { + if (!isRecord(value)) { + throw new CliError('VALIDATION_ERROR', `${path} must be an object.`); + } + return value; +} + +function expectString(value: unknown, path: string): string { + if (typeof value !== 'string' || value.length === 0) { + throw new CliError('VALIDATION_ERROR', `${path} must be a non-empty string.`); + } + return value; +} + +function expectNumber(value: unknown, path: string): number { + if (typeof value !== 'number' || !Number.isFinite(value)) { + throw new CliError('VALIDATION_ERROR', `${path} must be a finite number.`); + } + return value; +} + +function expectNonNegativeInteger(value: unknown, path: string): number { + const numberValue = expectNumber(value, path); + if (!Number.isInteger(numberValue) || numberValue < 0) { + throw new CliError('VALIDATION_ERROR', `${path} must be a non-negative integer.`); + } + return numberValue; +} + +function expectOnlyKeys(obj: Record, allowedKeys: readonly string[], path: string): void { + const allowed = new Set(allowedKeys); + for (const key of Object.keys(obj)) { + if (!allowed.has(key)) { + throw new CliError('VALIDATION_ERROR', `${path}.${key} is not allowed.`); + } + } +} + +function validateNodeType(value: unknown, path: string): NodeType { + const nodeType = expectString(value, path); + if (!NODE_TYPES.has(nodeType as NodeType)) { + throw new CliError('VALIDATION_ERROR', `${path} must be a supported node type.`); + } + return nodeType as NodeType; +} + +function validateBlockNodeType(value: unknown, path: string): BlockNodeType { + const nodeType = validateNodeType(value, path); + if (!BLOCK_NODE_TYPES.has(nodeType as BlockNodeType)) { + throw new CliError('VALIDATION_ERROR', `${path} must be a supported block node type.`); + } + return nodeType as BlockNodeType; +} + +function validateRange(value: unknown, path: string): { start: number; end: number } { + const obj = expectRecord(value, path); + const start = expectNonNegativeInteger(obj.start, `${path}.start`); + const end = expectNonNegativeInteger(obj.end, `${path}.end`); + if (end < start) { + throw new CliError('VALIDATION_ERROR', `${path}.end must be greater than or equal to ${path}.start.`); + } + return { start, end }; +} + +function validateInlineAnchor(value: unknown, path: string): Extract['anchor'] { + const obj = expectRecord(value, path); + const startObj = expectRecord(obj.start, `${path}.start`); + const endObj = expectRecord(obj.end, `${path}.end`); + + const startBlockId = expectString(startObj.blockId, `${path}.start.blockId`); + const endBlockId = expectString(endObj.blockId, `${path}.end.blockId`); + const startOffset = expectNonNegativeInteger(startObj.offset, `${path}.start.offset`); + const endOffset = expectNonNegativeInteger(endObj.offset, `${path}.end.offset`); + + if (startBlockId !== endBlockId) { + throw new CliError('VALIDATION_ERROR', `${path} must not span multiple blocks.`); + } + if (endOffset < startOffset) { + throw new CliError('VALIDATION_ERROR', `${path}.end.offset must be greater than or equal to start.offset.`); + } + + return { + start: { blockId: startBlockId, offset: startOffset }, + end: { blockId: endBlockId, offset: endOffset }, + }; +} + +export function validateNodeAddress(value: unknown, path = 'address'): NodeAddress { + const obj = expectRecord(value, path); + const kind = expectString(obj.kind, `${path}.kind`); + + if (kind === 'block') { + return { + kind: 'block', + nodeType: validateBlockNodeType(obj.nodeType, `${path}.nodeType`), + nodeId: expectString(obj.nodeId, `${path}.nodeId`), + }; + } + + if (kind === 'inline') { + return { + kind: 'inline', + nodeType: validateNodeType(obj.nodeType, `${path}.nodeType`) as Extract< + NodeAddress, + { kind: 'inline' } + >['nodeType'], + anchor: validateInlineAnchor(obj.anchor, `${path}.anchor`), + }; + } + + throw new CliError('VALIDATION_ERROR', `${path}.kind must be one of: block, inline.`); +} + +export function validateTextAddress(value: unknown, path = 'target'): TextAddress { + const obj = expectRecord(value, path); + const kind = expectString(obj.kind, `${path}.kind`); + + if (kind !== 'text') { + throw new CliError('VALIDATION_ERROR', `${path}.kind must be "text".`); + } + + return { + kind: 'text', + blockId: expectString(obj.blockId, `${path}.blockId`), + range: validateRange(obj.range, `${path}.range`), + }; +} + +export function validateListItemAddress(value: unknown, path = 'target'): ListItemAddress { + const address = validateNodeAddress(value, path); + if (address.kind !== 'block' || address.nodeType !== 'listItem') { + throw new CliError('VALIDATION_ERROR', `${path} must be a block listItem address.`); + } + return address as ListItemAddress; +} + +export function validateListsListQuery(value: unknown, path = 'query'): ListsListQuery { + const obj = expectRecord(value, path); + const query: ListsListQuery = {}; + + if (obj.within != null) { + const within = validateNodeAddress(obj.within, `${path}.within`); + if (within.kind !== 'block') { + throw new CliError('VALIDATION_ERROR', `${path}.within.kind must be "block".`); + } + query.within = within; + } + + if (obj.limit != null) { + query.limit = expectNonNegativeInteger(obj.limit, `${path}.limit`); + } + + if (obj.offset != null) { + query.offset = expectNonNegativeInteger(obj.offset, `${path}.offset`); + } + + if (obj.kind != null) { + const kind = expectString(obj.kind, `${path}.kind`); + if (!LIST_KINDS.has(kind as ListKind)) { + throw new CliError('VALIDATION_ERROR', `${path}.kind must be "ordered" or "bullet".`); + } + query.kind = kind as ListKind; + } + + if (obj.level != null) { + query.level = expectNonNegativeInteger(obj.level, `${path}.level`); + } + + if (obj.ordinal != null) { + query.ordinal = expectNonNegativeInteger(obj.ordinal, `${path}.ordinal`); + } + + return query; +} + +export function validateListTargetInput(value: unknown, path = 'input'): ListTargetInput { + const obj = expectRecord(value, path); + return { + target: validateListItemAddress(obj.target, `${path}.target`), + }; +} + +export function validateListSetTypeInput(value: unknown, path = 'input'): ListSetTypeInput { + const obj = expectRecord(value, path); + const kind = expectString(obj.kind, `${path}.kind`); + if (!LIST_KINDS.has(kind as ListKind)) { + throw new CliError('VALIDATION_ERROR', `${path}.kind must be "ordered" or "bullet".`); + } + + return { + target: validateListItemAddress(obj.target, `${path}.target`), + kind: kind as ListKind, + }; +} + +export function validateListInsertInput(value: unknown, path = 'input'): ListInsertInput { + const obj = expectRecord(value, path); + const position = expectString(obj.position, `${path}.position`); + if (!LIST_INSERT_POSITIONS.has(position)) { + throw new CliError('VALIDATION_ERROR', `${path}.position must be "before" or "after".`); + } + + if (obj.text != null && typeof obj.text !== 'string') { + throw new CliError('VALIDATION_ERROR', `${path}.text must be a string.`); + } + + return { + target: validateListItemAddress(obj.target, `${path}.target`), + position: position as ListInsertInput['position'], + text: typeof obj.text === 'string' ? obj.text : undefined, + }; +} + +function validateCreateParagraphLocation(value: unknown, path: string): NonNullable { + const obj = expectRecord(value, path); + const kind = expectString(obj.kind, `${path}.kind`); + + if (kind === 'documentStart' || kind === 'documentEnd') { + return { kind }; + } + + if (kind === 'before' || kind === 'after') { + const target = validateNodeAddress(obj.target, `${path}.target`); + if (target.kind !== 'block') { + throw new CliError('VALIDATION_ERROR', `${path}.target.kind must be "block".`); + } + + return { + kind, + target, + }; + } + + throw new CliError('VALIDATION_ERROR', `${path}.kind must be one of: documentStart, documentEnd, before, after.`); +} + +export function validateCreateParagraphInput(value: unknown, path = 'input'): CreateParagraphInput { + const obj = expectRecord(value, path); + const input: CreateParagraphInput = {}; + + if (obj.at != null) { + input.at = validateCreateParagraphLocation(obj.at, `${path}.at`); + } + + if (obj.text != null) { + if (typeof obj.text !== 'string') { + throw new CliError('VALIDATION_ERROR', `${path}.text must be a string.`); + } + input.text = obj.text; + } + + return input; +} + +function validateQuerySelect(value: unknown, path: string): Query['select'] { + const obj = expectRecord(value, path); + const type = expectString(obj.type, `${path}.type`); + + if (type === 'text') { + expectOnlyKeys(obj, ['type', 'pattern', 'mode', 'caseSensitive'], path); + const pattern = expectString(obj.pattern, `${path}.pattern`); + const modeValue = obj.mode; + let mode: 'contains' | 'regex' | undefined; + if (modeValue != null) { + if (modeValue !== 'contains' && modeValue !== 'regex') { + throw new CliError('VALIDATION_ERROR', `${path}.mode must be "contains" or "regex".`); + } + mode = modeValue; + } + + if (obj.caseSensitive != null && typeof obj.caseSensitive !== 'boolean') { + throw new CliError('VALIDATION_ERROR', `${path}.caseSensitive must be a boolean.`); + } + + return { + type: 'text', + pattern, + mode, + caseSensitive: typeof obj.caseSensitive === 'boolean' ? obj.caseSensitive : undefined, + }; + } + + if (type === 'node') { + expectOnlyKeys(obj, ['type', 'nodeType', 'kind'], path); + const nodeType = obj.nodeType != null ? validateNodeType(obj.nodeType, `${path}.nodeType`) : undefined; + + if (obj.kind != null && !NODE_KINDS.has(obj.kind as NodeKind)) { + throw new CliError('VALIDATION_ERROR', `${path}.kind must be "block" or "inline".`); + } + + return { + type: 'node', + nodeType, + kind: obj.kind as NodeKind | undefined, + }; + } + + if (!NODE_TYPES.has(type as NodeType)) { + throw new CliError('VALIDATION_ERROR', `${path}.type must be a supported selector type.`); + } + + expectOnlyKeys(obj, ['type'], path); + + return { + type: 'node', + nodeType: type as NodeType, + }; +} + +export function validateQuery(value: unknown, path = 'query'): Query { + const obj = expectRecord(value, path); + expectOnlyKeys(obj, ['select', 'within', 'limit', 'offset', 'includeNodes', 'includeUnknown'], path); + + const query: Query = { + select: validateQuerySelect(obj.select, `${path}.select`), + }; + + if (obj.within != null) { + query.within = validateNodeAddress(obj.within, `${path}.within`); + } + + if (obj.limit != null) { + query.limit = expectNonNegativeInteger(obj.limit, `${path}.limit`); + } + + if (obj.offset != null) { + query.offset = expectNonNegativeInteger(obj.offset, `${path}.offset`); + } + + if (obj.includeNodes != null) { + if (typeof obj.includeNodes !== 'boolean') { + throw new CliError('VALIDATION_ERROR', `${path}.includeNodes must be a boolean.`); + } + query.includeNodes = obj.includeNodes; + } + + if (obj.includeUnknown != null) { + if (typeof obj.includeUnknown !== 'boolean') { + throw new CliError('VALIDATION_ERROR', `${path}.includeUnknown must be a boolean.`); + } + query.includeUnknown = obj.includeUnknown; + } + + return query; +} + +export function validateNodeKind(value: string, path: string): NodeKind { + if (!NODE_KINDS.has(value as NodeKind)) { + throw new CliError('VALIDATION_ERROR', `${path} must be "block" or "inline".`); + } + return value as NodeKind; +} + +export function isNodeType(value: string): value is NodeType { + return NODE_TYPES.has(value as NodeType); +} + +export function isBlockNodeType(value: string): value is BlockNodeType { + return BLOCK_NODE_TYPES.has(value as BlockNodeType); +} diff --git a/apps/cli/src/lib/wrapper-dispatch.ts b/apps/cli/src/lib/wrapper-dispatch.ts new file mode 100644 index 0000000000..e5b7478bc8 --- /dev/null +++ b/apps/cli/src/lib/wrapper-dispatch.ts @@ -0,0 +1,51 @@ +import { CliError } from './errors'; +import { executeOperation } from './operation-executor'; +import type { CommandContext, CommandExecution } from './types'; +import { CLI_COMMAND_SPECS, type CliCommandKey, type CliCommandSpec, type CliOperationId } from '../cli'; + +const OPERATION_ID_BY_COMMAND_KEY = new Map( + CLI_COMMAND_SPECS.map((spec) => [spec.key, spec.operationId as CliOperationId]), +); +const COMMAND_SPEC_BY_KEY = new Map(CLI_COMMAND_SPECS.map((spec) => [spec.key, spec])); + +function hasHelpFlag(tokens: string[]): boolean { + return tokens.includes('--help') || tokens.includes('-h'); +} + +function buildUsageLines(spec: CliCommandSpec): string[] { + if (spec.examples.length > 0) return [...spec.examples]; + return [`superdoc ${spec.key}`]; +} + +export async function runCommandWrapper( + commandKey: CliCommandKey, + tokens: string[], + context: CommandContext, +): Promise { + const operationId = OPERATION_ID_BY_COMMAND_KEY.get(commandKey); + if (!operationId) { + throw new CliError('COMMAND_FAILED', `No operation id is registered for command key "${commandKey}".`); + } + + const spec = COMMAND_SPEC_BY_KEY.get(commandKey); + if (!spec) { + throw new CliError('COMMAND_FAILED', `No command spec is registered for command key "${commandKey}".`); + } + + if (hasHelpFlag(tokens)) { + const usage = buildUsageLines(spec); + return { + command: commandKey, + data: { usage }, + pretty: ['Usage:', ...usage.map((line) => ` ${line}`)].join('\n'), + }; + } + + return executeOperation({ + mode: 'wrapper', + operationId, + commandName: commandKey, + tokens, + context, + }); +} diff --git a/apps/cli/src/types/super-editor-adapters.d.ts b/apps/cli/src/types/super-editor-adapters.d.ts new file mode 100644 index 0000000000..2ef5ddad00 --- /dev/null +++ b/apps/cli/src/types/super-editor-adapters.d.ts @@ -0,0 +1,19 @@ +/** + * Ambient module declaration for the super-editor adapter bridge. + * + * At runtime, bun resolves this via the tsconfig `paths` mapping. + * For typecheck (`tsc --noEmit`), this declaration provides the type + * surface without pulling in the super-editor source tree (which uses + * internal path aliases that only its own tsconfig maps). + */ +declare module '@superdoc/super-editor/document-api-adapters' { + import type { DocumentApiAdapters } from '@superdoc/document-api'; + + /** + * Build the full set of document-api adapters from a super-editor Editor instance. + * The `editor` param is typed as `unknown` at this boundary because the CLI + * imports `Editor` from `superdoc/super-editor` (dist types), while the + * adapter function's source signature uses the internal source `Editor` type. + */ + export function getDocumentApiAdapters(editor: unknown): DocumentApiAdapters; +} diff --git a/apps/cli/tsconfig.check.json b/apps/cli/tsconfig.check.json new file mode 100644 index 0000000000..e5a19f3ead --- /dev/null +++ b/apps/cli/tsconfig.check.json @@ -0,0 +1,6 @@ +{ + "extends": "./tsconfig.json", + "compilerOptions": { + "paths": {} + } +} diff --git a/apps/cli/tsconfig.json b/apps/cli/tsconfig.json index 543910d92a..470e858132 100644 --- a/apps/cli/tsconfig.json +++ b/apps/cli/tsconfig.json @@ -5,7 +5,10 @@ "moduleResolution": "bundler", "strict": true, "skipLibCheck": true, - "types": ["bun"] + "types": ["bun"], + "paths": { + "@superdoc/super-editor/document-api-adapters": ["../../packages/super-editor/src/document-api-adapters/index.ts"] + } }, "include": ["src"] } diff --git a/package.json b/package.json index 0a51a2f4d1..d4908ab930 100644 --- a/package.json +++ b/package.json @@ -4,7 +4,7 @@ "license": "AGPL-3.0", "packageManager": "pnpm@10.25.0", "scripts": { - "test": "vitest run", + "test": "vitest run && pnpm run test:cli", "test:bench": "VITEST_BENCH=true vitest run", "test:slow": "VITEST_SLOW=1 VITEST_DOM=node vitest run --root ./packages/super-editor --exclude '**/node_modules/**' src/tests/editor/node-import-timing.test.js", "test:debug": "pnpm --prefix packages/super-editor run test:debug", @@ -60,7 +60,11 @@ "manual-clean-tag": "bash scripts/manual-clean-tag.sh", "docapi:sync": "pnpm exec tsx packages/document-api/scripts/generate-contract-outputs.ts", "docapi:check": "pnpm exec tsx packages/document-api/scripts/check-contract-parity.ts && pnpm exec tsx packages/document-api/scripts/check-contract-outputs.ts", - "docapi:sync:check": "pnpm run docapi:sync && pnpm run docapi:check" + "docapi:sync:check": "pnpm run docapi:sync && pnpm run docapi:check", + "test:cli": "pnpm --prefix apps/cli run test", + "cli:prepare": "pnpm run test:cli && pnpm --prefix apps/cli run build:prepublish", + "cli:release": "pnpm run cli:prepare && pnpm --prefix apps/cli run publish:platforms", + "cli:release:dry": "pnpm run cli:prepare && pnpm --prefix apps/cli run publish:platforms:dry" }, "devDependencies": { "@commitlint/cli": "catalog:", diff --git a/packages/document-api/src/contract/command-catalog.ts b/packages/document-api/src/contract/command-catalog.ts index 1aaef1e24a..2d356472ee 100644 --- a/packages/document-api/src/contract/command-catalog.ts +++ b/packages/document-api/src/contract/command-catalog.ts @@ -1,4 +1,4 @@ -import type { CommandCatalog, CommandStaticMetadata } from './types.js'; +import type { CommandCatalog, CommandStaticMetadata, OperationId } from './types.js'; import { OPERATION_IDS, projectFromDefinitions } from './operation-definitions.js'; export const COMMAND_CATALOG: CommandCatalog = projectFromDefinitions((_id, entry) => entry.metadata); @@ -6,6 +6,16 @@ export const COMMAND_CATALOG: CommandCatalog = projectFromDefinitions((_id, entr /** Operation IDs whose catalog entry has `mutates: true`. */ export const MUTATING_OPERATION_IDS = OPERATION_IDS.filter((operationId) => COMMAND_CATALOG[operationId].mutates); +/** Maps each operation to its human-readable description. */ +export const OPERATION_DESCRIPTION_MAP: Record = projectFromDefinitions( + (_id, entry) => entry.description, +); + +/** Maps each operation to whether it requires an open document to execute. */ +export const OPERATION_REQUIRES_DOCUMENT_CONTEXT_MAP: Record = projectFromDefinitions( + (_id, entry) => entry.requiresDocumentContext, +); + /** * Returns the static metadata for a given operation. * diff --git a/packages/document-api/src/contract/operation-definitions.ts b/packages/document-api/src/contract/operation-definitions.ts index 87d54c73cd..d5c6f93485 100644 --- a/packages/document-api/src/contract/operation-definitions.ts +++ b/packages/document-api/src/contract/operation-definitions.ts @@ -39,6 +39,8 @@ export type ReferenceGroupKey = 'core' | 'capabilities' | 'create' | 'format' | export interface OperationDefinitionEntry { memberPath: string; + description: string; + requiresDocumentContext: boolean; metadata: CommandStaticMetadata; referenceDocPath: string; referenceGroup: ReferenceGroupKey; @@ -117,6 +119,8 @@ const T_NOT_FOUND_COMMAND_TRACKED = [ export const OPERATION_DEFINITIONS = { find: { memberPath: 'find', + description: 'Search the document for nodes matching type, text, or attribute criteria.', + requiresDocumentContext: true, metadata: readOperation({ idempotency: 'idempotent', deterministicTargetResolution: false, @@ -126,6 +130,8 @@ export const OPERATION_DEFINITIONS = { }, getNode: { memberPath: 'getNode', + description: 'Retrieve a single node by target position.', + requiresDocumentContext: true, metadata: readOperation({ idempotency: 'idempotent', throws: T_NOT_FOUND, @@ -135,6 +141,8 @@ export const OPERATION_DEFINITIONS = { }, getNodeById: { memberPath: 'getNodeById', + description: 'Retrieve a single node by its unique ID.', + requiresDocumentContext: true, metadata: readOperation({ idempotency: 'idempotent', throws: T_NOT_FOUND, @@ -144,12 +152,16 @@ export const OPERATION_DEFINITIONS = { }, getText: { memberPath: 'getText', + description: 'Extract the plain-text content of the document.', + requiresDocumentContext: true, metadata: readOperation(), referenceDocPath: 'get-text.mdx', referenceGroup: 'core', }, info: { memberPath: 'info', + description: 'Return document metadata including revision, node count, and capabilities.', + requiresDocumentContext: true, metadata: readOperation(), referenceDocPath: 'info.mdx', referenceGroup: 'core', @@ -157,6 +169,8 @@ export const OPERATION_DEFINITIONS = { insert: { memberPath: 'insert', + description: 'Insert text or inline content at a target position.', + requiresDocumentContext: true, metadata: mutationOperation({ idempotency: 'non-idempotent', supportsDryRun: true, @@ -169,6 +183,8 @@ export const OPERATION_DEFINITIONS = { }, replace: { memberPath: 'replace', + description: 'Replace content at a target position with new text or inline content.', + requiresDocumentContext: true, metadata: mutationOperation({ idempotency: 'conditional', supportsDryRun: true, @@ -181,6 +197,8 @@ export const OPERATION_DEFINITIONS = { }, delete: { memberPath: 'delete', + description: 'Delete content at a target position.', + requiresDocumentContext: true, metadata: mutationOperation({ idempotency: 'conditional', supportsDryRun: true, @@ -194,6 +212,8 @@ export const OPERATION_DEFINITIONS = { 'format.bold': { memberPath: 'format.bold', + description: 'Toggle bold formatting on the target range.', + requiresDocumentContext: true, metadata: mutationOperation({ idempotency: 'conditional', supportsDryRun: true, @@ -206,6 +226,8 @@ export const OPERATION_DEFINITIONS = { }, 'format.italic': { memberPath: 'format.italic', + description: 'Toggle italic formatting on the target range.', + requiresDocumentContext: true, metadata: mutationOperation({ idempotency: 'conditional', supportsDryRun: true, @@ -218,6 +240,8 @@ export const OPERATION_DEFINITIONS = { }, 'format.underline': { memberPath: 'format.underline', + description: 'Toggle underline formatting on the target range.', + requiresDocumentContext: true, metadata: mutationOperation({ idempotency: 'conditional', supportsDryRun: true, @@ -230,6 +254,8 @@ export const OPERATION_DEFINITIONS = { }, 'format.strikethrough': { memberPath: 'format.strikethrough', + description: 'Toggle strikethrough formatting on the target range.', + requiresDocumentContext: true, metadata: mutationOperation({ idempotency: 'conditional', supportsDryRun: true, @@ -243,6 +269,8 @@ export const OPERATION_DEFINITIONS = { 'create.paragraph': { memberPath: 'create.paragraph', + description: 'Create a new paragraph at the target position.', + requiresDocumentContext: true, metadata: mutationOperation({ idempotency: 'non-idempotent', supportsDryRun: true, @@ -255,6 +283,8 @@ export const OPERATION_DEFINITIONS = { }, 'create.heading': { memberPath: 'create.heading', + description: 'Create a new heading at the target position.', + requiresDocumentContext: true, metadata: mutationOperation({ idempotency: 'non-idempotent', supportsDryRun: true, @@ -268,6 +298,8 @@ export const OPERATION_DEFINITIONS = { 'lists.list': { memberPath: 'lists.list', + description: 'List all list nodes in the document, optionally filtered by scope.', + requiresDocumentContext: true, metadata: readOperation({ idempotency: 'idempotent', throws: T_NOT_FOUND, @@ -277,6 +309,8 @@ export const OPERATION_DEFINITIONS = { }, 'lists.get': { memberPath: 'lists.get', + description: 'Retrieve a specific list node by target.', + requiresDocumentContext: true, metadata: readOperation({ idempotency: 'idempotent', throws: T_NOT_FOUND, @@ -286,6 +320,8 @@ export const OPERATION_DEFINITIONS = { }, 'lists.insert': { memberPath: 'lists.insert', + description: 'Insert a new list at the target position.', + requiresDocumentContext: true, metadata: mutationOperation({ idempotency: 'non-idempotent', supportsDryRun: true, @@ -298,6 +334,8 @@ export const OPERATION_DEFINITIONS = { }, 'lists.setType': { memberPath: 'lists.setType', + description: 'Change the list type (ordered, unordered) of a target list.', + requiresDocumentContext: true, metadata: mutationOperation({ idempotency: 'conditional', supportsDryRun: true, @@ -310,6 +348,8 @@ export const OPERATION_DEFINITIONS = { }, 'lists.indent': { memberPath: 'lists.indent', + description: 'Increase the indentation level of a list item.', + requiresDocumentContext: true, metadata: mutationOperation({ idempotency: 'conditional', supportsDryRun: true, @@ -322,6 +362,8 @@ export const OPERATION_DEFINITIONS = { }, 'lists.outdent': { memberPath: 'lists.outdent', + description: 'Decrease the indentation level of a list item.', + requiresDocumentContext: true, metadata: mutationOperation({ idempotency: 'conditional', supportsDryRun: true, @@ -334,6 +376,8 @@ export const OPERATION_DEFINITIONS = { }, 'lists.restart': { memberPath: 'lists.restart', + description: 'Restart numbering of an ordered list at the target item.', + requiresDocumentContext: true, metadata: mutationOperation({ idempotency: 'conditional', supportsDryRun: true, @@ -346,6 +390,8 @@ export const OPERATION_DEFINITIONS = { }, 'lists.exit': { memberPath: 'lists.exit', + description: 'Exit a list context, converting the target item to a paragraph.', + requiresDocumentContext: true, metadata: mutationOperation({ idempotency: 'conditional', supportsDryRun: true, @@ -359,6 +405,8 @@ export const OPERATION_DEFINITIONS = { 'comments.add': { memberPath: 'comments.add', + description: 'Add a new comment thread anchored to a target range.', + requiresDocumentContext: true, metadata: mutationOperation({ idempotency: 'non-idempotent', supportsDryRun: false, @@ -371,6 +419,8 @@ export const OPERATION_DEFINITIONS = { }, 'comments.edit': { memberPath: 'comments.edit', + description: 'Edit the content of an existing comment.', + requiresDocumentContext: true, metadata: mutationOperation({ idempotency: 'conditional', supportsDryRun: false, @@ -383,6 +433,8 @@ export const OPERATION_DEFINITIONS = { }, 'comments.reply': { memberPath: 'comments.reply', + description: 'Add a reply to an existing comment thread.', + requiresDocumentContext: true, metadata: mutationOperation({ idempotency: 'non-idempotent', supportsDryRun: false, @@ -395,6 +447,8 @@ export const OPERATION_DEFINITIONS = { }, 'comments.move': { memberPath: 'comments.move', + description: 'Move a comment thread to a new anchor range.', + requiresDocumentContext: true, metadata: mutationOperation({ idempotency: 'conditional', supportsDryRun: false, @@ -407,6 +461,8 @@ export const OPERATION_DEFINITIONS = { }, 'comments.resolve': { memberPath: 'comments.resolve', + description: 'Resolve or unresolve a comment thread.', + requiresDocumentContext: true, metadata: mutationOperation({ idempotency: 'conditional', supportsDryRun: false, @@ -419,6 +475,8 @@ export const OPERATION_DEFINITIONS = { }, 'comments.remove': { memberPath: 'comments.remove', + description: 'Remove a comment or reply by ID.', + requiresDocumentContext: true, metadata: mutationOperation({ idempotency: 'conditional', supportsDryRun: false, @@ -431,6 +489,8 @@ export const OPERATION_DEFINITIONS = { }, 'comments.setInternal': { memberPath: 'comments.setInternal', + description: 'Toggle the internal (private) flag on a comment thread.', + requiresDocumentContext: true, metadata: mutationOperation({ idempotency: 'conditional', supportsDryRun: false, @@ -443,6 +503,8 @@ export const OPERATION_DEFINITIONS = { }, 'comments.setActive': { memberPath: 'comments.setActive', + description: 'Set the active (focused) comment thread for UI highlighting.', + requiresDocumentContext: true, metadata: mutationOperation({ idempotency: 'conditional', supportsDryRun: false, @@ -455,6 +517,8 @@ export const OPERATION_DEFINITIONS = { }, 'comments.goTo': { memberPath: 'comments.goTo', + description: 'Scroll the viewport to a comment thread by ID.', + requiresDocumentContext: true, metadata: readOperation({ idempotency: 'conditional', throws: T_NOT_FOUND_COMMAND, @@ -464,6 +528,8 @@ export const OPERATION_DEFINITIONS = { }, 'comments.get': { memberPath: 'comments.get', + description: 'Retrieve a single comment thread by ID.', + requiresDocumentContext: true, metadata: readOperation({ idempotency: 'idempotent', throws: T_NOT_FOUND, @@ -473,6 +539,8 @@ export const OPERATION_DEFINITIONS = { }, 'comments.list': { memberPath: 'comments.list', + description: 'List all comment threads in the document.', + requiresDocumentContext: true, metadata: readOperation({ idempotency: 'idempotent', }), @@ -482,6 +550,8 @@ export const OPERATION_DEFINITIONS = { 'trackChanges.list': { memberPath: 'trackChanges.list', + description: 'List all tracked changes in the document.', + requiresDocumentContext: true, metadata: readOperation({ idempotency: 'idempotent', }), @@ -490,6 +560,8 @@ export const OPERATION_DEFINITIONS = { }, 'trackChanges.get': { memberPath: 'trackChanges.get', + description: 'Retrieve a single tracked change by ID.', + requiresDocumentContext: true, metadata: readOperation({ idempotency: 'idempotent', throws: T_NOT_FOUND, @@ -499,6 +571,8 @@ export const OPERATION_DEFINITIONS = { }, 'trackChanges.accept': { memberPath: 'trackChanges.accept', + description: 'Accept a tracked change, applying it permanently.', + requiresDocumentContext: true, metadata: mutationOperation({ idempotency: 'conditional', supportsDryRun: false, @@ -511,6 +585,8 @@ export const OPERATION_DEFINITIONS = { }, 'trackChanges.reject': { memberPath: 'trackChanges.reject', + description: 'Reject a tracked change, reverting it.', + requiresDocumentContext: true, metadata: mutationOperation({ idempotency: 'conditional', supportsDryRun: false, @@ -523,6 +599,8 @@ export const OPERATION_DEFINITIONS = { }, 'trackChanges.acceptAll': { memberPath: 'trackChanges.acceptAll', + description: 'Accept all tracked changes in the document.', + requiresDocumentContext: true, metadata: mutationOperation({ idempotency: 'conditional', supportsDryRun: false, @@ -535,6 +613,8 @@ export const OPERATION_DEFINITIONS = { }, 'trackChanges.rejectAll': { memberPath: 'trackChanges.rejectAll', + description: 'Reject all tracked changes in the document.', + requiresDocumentContext: true, metadata: mutationOperation({ idempotency: 'conditional', supportsDryRun: false, @@ -548,6 +628,8 @@ export const OPERATION_DEFINITIONS = { 'capabilities.get': { memberPath: 'capabilities', + description: 'Query runtime capabilities supported by the current document engine.', + requiresDocumentContext: false, metadata: readOperation({ idempotency: 'idempotent', throws: NONE_THROWS, diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index b7e7adca40..e57a4ef0c1 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -411,22 +411,54 @@ importers: version: 3.2.0 apps/cli: + dependencies: + '@hocuspocus/provider': + specifier: 'catalog:' + version: 2.15.3(y-protocols@1.0.7(yjs@13.6.19))(yjs@13.6.19) + '@superdoc/document-api': + specifier: workspace:* + version: link:../../packages/document-api + fast-glob: + specifier: 'catalog:' + version: 3.3.3 + y-websocket: + specifier: 'catalog:' + version: 3.0.0(yjs@13.6.19) + yjs: + specifier: 'catalog:' + version: 13.6.19 devDependencies: + '@superdoc/super-editor': + specifier: workspace:* + version: link:../../packages/super-editor '@types/bun': specifier: 'catalog:' version: 1.3.8 '@types/node': specifier: 'catalog:' version: 22.19.2 - fast-glob: - specifier: 'catalog:' - version: 3.3.3 superdoc: specifier: workspace:* version: link:../../packages/superdoc typescript: specifier: 'catalog:' version: 5.9.3 + optionalDependencies: + '@superdoc-dev/cli-darwin-arm64': + specifier: workspace:* + version: link:platforms/cli-darwin-arm64 + '@superdoc-dev/cli-darwin-x64': + specifier: workspace:* + version: link:platforms/cli-darwin-x64 + '@superdoc-dev/cli-linux-arm64': + specifier: workspace:* + version: link:platforms/cli-linux-arm64 + '@superdoc-dev/cli-linux-x64': + specifier: workspace:* + version: link:platforms/cli-linux-x64 + '@superdoc-dev/cli-windows-x64': + specifier: workspace:* + version: link:platforms/cli-windows-x64 apps/docs: devDependencies: diff --git a/skills/superdoc/SKILL.md b/skills/superdoc/SKILL.md index 177e1a0028..d0cc803f97 100644 --- a/skills/superdoc/SKILL.md +++ b/skills/superdoc/SKILL.md @@ -12,7 +12,7 @@ Edit Word documents from the command line. Use instead of python-docx. | Command | Description | |---------|-------------| | `npx @superdoc-dev/cli@latest search ` | Find text across documents | -| `npx @superdoc-dev/cli@latest replace ` | Find and replace text | +| `npx @superdoc-dev/cli@latest replace-legacy ` | Find and replace text | | `npx @superdoc-dev/cli@latest read ` | Extract plain text | ## When to Use @@ -30,7 +30,7 @@ Use superdoc when the user asks to: npx @superdoc-dev/cli@latest search "indemnification" ./contracts/*.docx # Find and replace -npx @superdoc-dev/cli@latest replace "ACME Corp" "Globex Inc" ./merger/*.docx +npx @superdoc-dev/cli@latest replace-legacy "ACME Corp" "Globex Inc" ./merger/*.docx # Extract text npx @superdoc-dev/cli@latest read ./proposal.docx