Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions core/llm/autodetect.ts
Original file line number Diff line number Diff line change
Expand Up @@ -65,6 +65,7 @@ const PROVIDER_HANDLES_TEMPLATING: string[] = [
"nebius",
"relace",
"openrouter",
"clawrouter",
"deepseek",
"xAI",
"minimax",
Expand Down Expand Up @@ -123,6 +124,7 @@ const PROVIDER_SUPPORTS_IMAGES: string[] = [
"sagemaker",
"continue-proxy",
"openrouter",
"clawrouter",
"venice",
"sambanova",
"vertexai",
Expand Down
53 changes: 53 additions & 0 deletions core/llm/llms/ClawRouter.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,53 @@
import { LLMOptions } from "../../index.js";
import { osModelsEditPrompt } from "../templates/edit.js";

import OpenAI from "./OpenAI.js";

// Get Continue version from package.json at build time
const CONTINUE_VERSION = process.env.npm_package_version || "unknown";

/**
* ClawRouter LLM Provider
*
* ClawRouter is an open-source LLM router that automatically selects the
* cheapest capable model for each request based on prompt complexity,
* providing 78-96% cost savings on blended inference costs.
*
* Features:
* - 15-dimension prompt complexity scoring
* - Automatic model selection (cheap → capable based on task)
* - OpenAI-compatible API at localhost:1337
* - Support for multiple routing tiers (auto, free, eco)
*
* @see https://github.com/BlockRunAI/ClawRouter
*/
class ClawRouter extends OpenAI {
static providerName = "clawrouter";

// ClawRouter can route to models that support reasoning fields
protected supportsReasoningField = true;
protected supportsReasoningDetailsField = true;

static defaultOptions: Partial<LLMOptions> = {
apiBase: "http://localhost:1337/v1/",
model: "blockrun/auto",
promptTemplates: {
edit: osModelsEditPrompt,
},
useLegacyCompletionsEndpoint: false,
};

/**
* Override headers to include Continue-specific User-Agent
* This helps ClawRouter track integration usage and optimize accordingly
*/
protected _getHeaders() {
return {
...super._getHeaders(),
"User-Agent": `Continue/${CONTINUE_VERSION}`,
"X-Continue-Provider": "clawrouter",
};
}
}

export default ClawRouter;
50 changes: 50 additions & 0 deletions core/llm/llms/ClawRouter.vitest.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,50 @@
import { describe, expect, it } from "vitest";

import ClawRouter from "./ClawRouter";

describe("ClawRouter", () => {
it("should have correct provider name", () => {
expect(ClawRouter.providerName).toBe("clawrouter");
});

it("should have correct default options", () => {
expect(ClawRouter.defaultOptions.apiBase).toBe("http://localhost:1337/v1/");
expect(ClawRouter.defaultOptions.model).toBe("blockrun/auto");
expect(ClawRouter.defaultOptions.useLegacyCompletionsEndpoint).toBe(false);
});

it("should support reasoning fields", () => {
const clawRouter = new ClawRouter({
model: "blockrun/auto",
});

// ClawRouter routes to models that may support reasoning
expect(clawRouter["supportsReasoningField"]).toBe(true);
expect(clawRouter["supportsReasoningDetailsField"]).toBe(true);
});

it("should include Continue User-Agent header", () => {
const clawRouter = new ClawRouter({
model: "blockrun/auto",
});

const headers = clawRouter["_getHeaders"]();

expect(headers["User-Agent"]).toMatch(/^Continue\//);
expect(headers["X-Continue-Provider"]).toBe("clawrouter");
});

it("should accept all routing profiles", () => {
const profiles = [
"blockrun/auto",
"blockrun/eco",
"blockrun/premium",
"blockrun/free",
];

for (const profile of profiles) {
const clawRouter = new ClawRouter({ model: profile });
expect(clawRouter.model).toBe(profile);
}
});
});
2 changes: 2 additions & 0 deletions core/llm/llms/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -50,6 +50,7 @@ import Nvidia from "./Nvidia";
import Ollama from "./Ollama";
import OpenAI from "./OpenAI";
import OpenRouter from "./OpenRouter";
import ClawRouter from "./ClawRouter";
import OVHcloud from "./OVHcloud";
import { Relace } from "./Relace";
import Replicate from "./Replicate";
Expand Down Expand Up @@ -111,6 +112,7 @@ export const LLMClasses = [
Azure,
WatsonX,
OpenRouter,
ClawRouter,
Nvidia,
Vllm,
SambaNova,
Expand Down
23 changes: 23 additions & 0 deletions core/llm/toolSupport.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -393,6 +393,29 @@ describe("PROVIDER_TOOL_SUPPORT", () => {
});
});

describe("clawrouter", () => {
const supportsFn = PROVIDER_TOOL_SUPPORT["clawrouter"];

it("should return true for blockrun routing profiles", () => {
expect(supportsFn("blockrun/auto")).toBe(true);
expect(supportsFn("blockrun/eco")).toBe(true);
expect(supportsFn("blockrun/premium")).toBe(true);
expect(supportsFn("blockrun/free")).toBe(true);
});

it("should return true for tool-supporting models", () => {
expect(supportsFn("gpt-4o")).toBe(true);
expect(supportsFn("claude-3-sonnet")).toBe(true);
expect(supportsFn("gemini-pro")).toBe(true);
expect(supportsFn("anthropic/claude-opus-4.6")).toBe(true);
});

it("should return false for non-tool-supporting patterns", () => {
expect(supportsFn("random-model")).toBe(false);
expect(supportsFn("")).toBe(false);
});
});

describe("edge cases", () => {
it("should handle empty model names", () => {
expect(PROVIDER_TOOL_SUPPORT["continue-proxy"]("")).toBe(false);
Expand Down
36 changes: 36 additions & 0 deletions core/llm/toolSupport.ts
Original file line number Diff line number Diff line change
Expand Up @@ -380,6 +380,42 @@ export const PROVIDER_TOOL_SUPPORT: Record<string, (model: string) => boolean> =

return false;
},
clawrouter: (model) => {
// ClawRouter routes to various providers, so we check common tool-supporting patterns
const lower = model.toLowerCase();

// blockrun/* models are routing aliases - assume tool support
if (lower.startsWith("blockrun/")) {
return true;
}

// Check for common tool-supporting model patterns
const toolSupportingPatterns = [
"gpt-4",
"gpt-5",
"o1",
"o3",
"o4",
"claude-3",
"claude-4",
"sonnet",
"opus",
"haiku",
"gemini",
"command-r",
"mistral",
"mixtral",
"llama-3.1",
"llama-3.2",
"llama-3.3",
"llama-4",
"qwen3",
"qwen-2.5",
"deepseek",
];

return toolSupportingPatterns.some((pattern) => lower.includes(pattern));
},
zAI: (model) => {
const lower = model.toLowerCase();
return lower.startsWith("glm-4") || lower.startsWith("glm-5");
Expand Down
Loading
Loading