Skip to content

Commit ff494f5

Browse files
committed
fixed
1 parent c7fb48b commit ff494f5

15 files changed

Lines changed: 104 additions & 59 deletions

File tree

package.json

Lines changed: 15 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -3,23 +3,35 @@
33
"version": "0.1.0",
44
"description": "Industrial-grade testing framework for LLM prompts",
55
"main": "dist/index.js",
6+
"types": "dist/index.d.ts",
67
"bin": {
78
"tuneprompt": "./dist/cli.js"
89
},
10+
"files": [
11+
"dist",
12+
"README.md",
13+
"LICENSE"
14+
],
915
"scripts": {
1016
"build": "tsc",
1117
"dev": "ts-node src/cli.ts",
1218
"test": "jest",
1319
"prepublishOnly": "npm run build"
1420
},
1521
"keywords": [
16-
"llm",
22+
"llm",
1723
"testing",
1824
"prompts",
19-
"ai"
25+
"ai",
26+
"openai",
27+
"anthropic",
28+
"claude",
29+
"gpt",
30+
"prompt-engineering",
31+
"ci-cd"
2032
],
2133
"author": "CodeForgeNet",
22-
"license": "ISC",
34+
"license": "MIT",
2335
"type": "commonjs",
2436
"devDependencies": {
2537
"@types/better-sqlite3": "^7.6.13",

src/commands/activate.ts

Whitespace-only changes.

src/config/pricing.ts

Whitespace-only changes.

src/engine/optimizer.ts

Whitespace-only changes.

src/engine/runner.ts

Lines changed: 12 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -12,7 +12,6 @@ export class TestRunner {
1212
private config: TunePromptConfig;
1313
private providers: Map<string, BaseProvider> = new Map();
1414

15-
1615
constructor(config: TunePromptConfig) {
1716
this.config = config;
1817
this.initializeProviders();
@@ -22,8 +21,6 @@ export class TestRunner {
2221
if (this.config.providers.openai) {
2322
const provider = new OpenAIProvider(this.config.providers.openai);
2423
this.providers.set('openai', provider);
25-
26-
2724
}
2825

2926
if (this.config.providers.anthropic) {
@@ -35,8 +32,6 @@ export class TestRunner {
3532
if (this.config.providers.openrouter) {
3633
const provider = new OpenRouterProvider(this.config.providers.openrouter);
3734
this.providers.set('openrouter', provider);
38-
39-
4035
}
4136
}
4237

@@ -69,17 +64,23 @@ export class TestRunner {
6964
const testId = uuidv4();
7065
const startTime = Date.now();
7166

72-
// Define fallback order
67+
// Define fallback order: Primary -> Fallbacks
7368
const fallbackChain = ['openai', 'anthropic', 'openrouter'];
7469

7570
// Determine starting provider
7671
const initialProvider = testCase.config?.provider || 'openai';
7772

7873
// Build the sequence of providers to try
79-
const providersToTry = [
80-
initialProvider,
81-
...fallbackChain.filter(p => p !== initialProvider)
82-
];
74+
let providersToTry: string[];
75+
if (testCase.config?.provider) {
76+
// If provider is explicitly set, only try that one
77+
providersToTry = [testCase.config.provider];
78+
} else {
79+
providersToTry = [
80+
initialProvider,
81+
...fallbackChain.filter(p => p !== initialProvider)
82+
];
83+
}
8384

8485
let lastError: any;
8586
let errors: string[] = [];
@@ -113,14 +114,13 @@ export class TestRunner {
113114
const embeddingCapable = ['openai', 'openrouter'];
114115

115116
// Order: Current provider (if capable) -> OpenAI -> OpenRouter -> others
116-
// This ensures we try to use the generating provider first (consistency), then fallbacks
117117
const scoringProvidersToTry = [
118118
...(embeddingCapable.includes(providerName) ? [providerName] : []),
119119
...embeddingCapable.filter(p => p !== providerName)
120120
].filter(p => this.providers.has(p));
121121

122122
if (scoringProvidersToTry.length === 0) {
123-
throw new Error('No embedding-capable providers (OpenAI, OpenRouter) available for semantic scoring');
123+
throw new Error('No embedding-capable providers available for semantic scoring');
124124
}
125125

126126
for (const scoreProviderName of scoringProvidersToTry) {
@@ -133,11 +133,9 @@ export class TestRunner {
133133
String(testCase.expect),
134134
response.content
135135
);
136-
// If successful, break
137136
break;
138137
} catch (err) {
139138
lastScoringError = err;
140-
// verify if this was an auth error? For now just try next.
141139
continue;
142140
}
143141
}
@@ -172,9 +170,6 @@ export class TestRunner {
172170
} catch (error: any) {
173171
lastError = error;
174172
errors.push(`${providerName.toUpperCase()}: ${error.message}`);
175-
176-
// If it's a scoring error and we have a response, we might want to return a fail instead of falling back
177-
// For now, if completion worked but scoring failed, we fallback to try another complete-score cycle
178173
continue;
179174
}
180175
}

src/engine/shadowTest.ts

Whitespace-only changes.

src/index.ts

Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,8 @@
1+
export * from './types';
2+
export * from './engine/runner';
3+
export * from './providers/base';
4+
export * from './providers/openai';
5+
export * from './providers/anthropic';
6+
export * from './providers/openrouter';
7+
export * from './utils/config';
8+
export * from './storage/database';

src/providers/openrouter.ts

Lines changed: 65 additions & 36 deletions
Original file line numberDiff line numberDiff line change
@@ -1,20 +1,13 @@
1-
import OpenAI from 'openai';
1+
import axios from 'axios';
22
import { BaseProvider, ProviderResponse } from './base';
33
import { ProviderConfig } from '../types';
44

55
export class OpenRouterProvider extends BaseProvider {
6-
private client: OpenAI;
6+
private baseURL: string;
77

88
constructor(config: ProviderConfig) {
99
super(config);
10-
this.client = new OpenAI({
11-
apiKey: config.apiKey,
12-
baseURL: config.baseURL || 'https://openrouter.ai/api/v1',
13-
defaultHeaders: {
14-
'HTTP-Referer': 'https://github.com/tuneprompt/tuneprompt',
15-
'X-Title': 'TunePrompt'
16-
}
17-
});
10+
this.baseURL = config.baseURL || 'https://openrouter.ai/api/v1';
1811
}
1912

2013
async complete(prompt: string | { system?: string; user: string }): Promise<ProviderResponse> {
@@ -29,34 +22,70 @@ export class OpenRouterProvider extends BaseProvider {
2922
messages.push({ role: 'user', content: prompt.user });
3023
}
3124

32-
const response = await this.client.chat.completions.create({
33-
model: this.config.model,
34-
messages,
35-
max_tokens: this.config.maxTokens,
36-
temperature: this.config.temperature
37-
});
38-
39-
const content = response.choices[0]?.message?.content || '';
40-
const tokens = response.usage?.total_tokens;
41-
42-
return {
43-
content,
44-
tokens,
45-
// OpenRouter costs vary wildly by model, hard to calc locally without metadata
46-
// For now, we'll return 0 or implement a lookup table later
47-
cost: 0
48-
};
25+
try {
26+
const response = await axios.post(
27+
`${this.baseURL}/chat/completions`,
28+
{
29+
model: this.config.model,
30+
messages,
31+
max_tokens: this.config.maxTokens,
32+
temperature: this.config.temperature
33+
},
34+
{
35+
headers: {
36+
'Authorization': `Bearer ${this.config.apiKey}`,
37+
'HTTP-Referer': 'https://github.com/tuneprompt/tuneprompt',
38+
'X-Title': 'TunePrompt',
39+
'Content-Type': 'application/json'
40+
}
41+
}
42+
);
43+
44+
const data = response.data;
45+
const content = data.choices?.[0]?.message?.content || '';
46+
const tokens = data.usage?.total_tokens || 0;
47+
48+
return {
49+
content,
50+
tokens,
51+
cost: 0
52+
};
53+
} catch (error: any) {
54+
if (error.response) {
55+
const errorMsg = error.response.data?.error?.message || JSON.stringify(error.response.data);
56+
throw new Error(`OpenRouter API Error (${error.response.status}): ${errorMsg}`);
57+
}
58+
throw new Error(`OpenRouter network error: ${error.message}`);
59+
}
4960
}
5061

5162
async getEmbedding(text: string): Promise<number[]> {
52-
// OpenRouter does support embeddings for some models, but the endpoint might differ or require specific models
53-
// We'll attempt to use the standard OpenAI-compatible embedding endpoint
54-
// Users should ensure they select an embedding-capable model in their config if they use this
55-
const response = await this.client.embeddings.create({
56-
model: 'text-embedding-3-small', // This likely won't work on OR unless they proxy it to OpenAI or have a mapped model
57-
input: text
58-
});
59-
60-
return response.data[0].embedding;
63+
try {
64+
// Attempt to use OpenRouter's embedding endpoint (which proxies to OpenAI or others)
65+
// Note: User must be entitled to use the requested embedding model
66+
const response = await axios.post(
67+
`${this.baseURL}/embeddings`,
68+
{
69+
model: 'text-embedding-3-small', // Default fallback, customizable in future
70+
input: text
71+
},
72+
{
73+
headers: {
74+
'Authorization': `Bearer ${this.config.apiKey}`,
75+
'HTTP-Referer': 'https://github.com/tuneprompt/tuneprompt',
76+
'X-Title': 'TunePrompt',
77+
'Content-Type': 'application/json'
78+
}
79+
}
80+
);
81+
82+
return response.data.data[0].embedding;
83+
} catch (error: any) {
84+
if (error.response) {
85+
const errorMsg = error.response.data?.error?.message || JSON.stringify(error.response.data);
86+
throw new Error(`OpenRouter Embedding Error (${error.response.status}): ${errorMsg}`);
87+
}
88+
throw new Error(`OpenRouter embedding network error: ${error.message}`);
89+
}
6190
}
6291
}

src/types/license.ts

Whitespace-only changes.

src/utils/config.ts

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -61,7 +61,7 @@ export function getDefaultConfigTemplate(): string {
6161
openrouter: {
6262
apiKey: process.env.OPENROUTER_API_KEY,
6363
model: 'mistralai/mistral-7b-instruct:free',
64-
maxTokens: 450,
64+
maxTokens: 400,
6565
temperature: 0.7
6666
}
6767
},

0 commit comments

Comments
 (0)