genai-lite 0.1.1 → 0.1.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +99 -1
- package/dist/config/presets.json +222 -0
- package/dist/index.d.ts +2 -0
- package/dist/llm/LLMService.d.ts +25 -1
- package/dist/llm/LLMService.js +34 -1
- package/dist/llm/LLMService.presets.test.d.ts +1 -0
- package/dist/llm/LLMService.presets.test.js +210 -0
- package/dist/llm/LLMService.test.d.ts +1 -0
- package/dist/llm/LLMService.test.js +279 -0
- package/dist/llm/clients/AnthropicClientAdapter.test.d.ts +1 -0
- package/dist/llm/clients/AnthropicClientAdapter.test.js +263 -0
- package/dist/llm/clients/GeminiClientAdapter.test.d.ts +1 -0
- package/dist/llm/clients/GeminiClientAdapter.test.js +281 -0
- package/dist/llm/clients/MockClientAdapter.test.d.ts +1 -0
- package/dist/llm/clients/MockClientAdapter.test.js +240 -0
- package/dist/llm/clients/OpenAIClientAdapter.test.d.ts +1 -0
- package/dist/llm/clients/OpenAIClientAdapter.test.js +248 -0
- package/dist/llm/clients/adapterErrorUtils.test.d.ts +1 -0
- package/dist/llm/clients/adapterErrorUtils.test.js +123 -0
- package/dist/llm/config.test.d.ts +1 -0
- package/dist/llm/config.test.js +159 -0
- package/dist/providers/fromEnvironment.test.d.ts +1 -0
- package/dist/providers/fromEnvironment.test.js +46 -0
- package/dist/types/presets.d.ts +19 -0
- package/dist/types/presets.js +2 -0
- package/dist/utils/prompt.test.d.ts +1 -0
- package/dist/utils/prompt.test.js +115 -0
- package/package.json +9 -4
- package/src/config/presets.json +222 -0
|
@@ -0,0 +1,115 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
const prompt_1 = require("./prompt");
|
|
4
|
+
describe('Prompt Utilities', () => {
|
|
5
|
+
describe('countTokens', () => {
|
|
6
|
+
it('should return 0 for empty string', () => {
|
|
7
|
+
expect((0, prompt_1.countTokens)('')).toBe(0);
|
|
8
|
+
});
|
|
9
|
+
it('should count tokens for simple text', () => {
|
|
10
|
+
const text = 'Hello, world!';
|
|
11
|
+
const count = (0, prompt_1.countTokens)(text);
|
|
12
|
+
expect(count).toBeGreaterThan(0);
|
|
13
|
+
expect(count).toBeLessThan(text.length); // Tokens are typically fewer than characters
|
|
14
|
+
});
|
|
15
|
+
it('should count tokens with default gpt-4 model', () => {
|
|
16
|
+
const text = 'The quick brown fox jumps over the lazy dog';
|
|
17
|
+
const count = (0, prompt_1.countTokens)(text);
|
|
18
|
+
expect(count).toBeGreaterThan(0);
|
|
19
|
+
});
|
|
20
|
+
it('should count tokens with different models', () => {
|
|
21
|
+
const text = 'Testing different models';
|
|
22
|
+
const gpt4Count = (0, prompt_1.countTokens)(text, 'gpt-4');
|
|
23
|
+
const gpt35Count = (0, prompt_1.countTokens)(text, 'gpt-3.5-turbo');
|
|
24
|
+
expect(gpt4Count).toBeGreaterThan(0);
|
|
25
|
+
expect(gpt35Count).toBeGreaterThan(0);
|
|
26
|
+
});
|
|
27
|
+
it('should handle special characters and emojis', () => {
|
|
28
|
+
const text = '🚀 Special chars: @#$% and \n\t newlines';
|
|
29
|
+
const count = (0, prompt_1.countTokens)(text);
|
|
30
|
+
expect(count).toBeGreaterThan(0);
|
|
31
|
+
});
|
|
32
|
+
it('should fallback to estimate for invalid model', () => {
|
|
33
|
+
const text = 'Test fallback behavior';
|
|
34
|
+
const count = (0, prompt_1.countTokens)(text, 'invalid-model');
|
|
35
|
+
// Should fallback to length/4 estimate
|
|
36
|
+
expect(count).toBe(Math.ceil(text.length / 4));
|
|
37
|
+
});
|
|
38
|
+
it('should handle very long text', () => {
|
|
39
|
+
const longText = 'a'.repeat(10000);
|
|
40
|
+
const count = (0, prompt_1.countTokens)(longText);
|
|
41
|
+
expect(count).toBeGreaterThan(0);
|
|
42
|
+
expect(count).toBeLessThan(longText.length);
|
|
43
|
+
});
|
|
44
|
+
});
|
|
45
|
+
describe('getSmartPreview', () => {
|
|
46
|
+
const config = { minLines: 5, maxLines: 10 };
|
|
47
|
+
it('should return full content if shorter than maxLines', () => {
|
|
48
|
+
const content = 'Line 1\nLine 2\nLine 3';
|
|
49
|
+
const preview = (0, prompt_1.getSmartPreview)(content, config);
|
|
50
|
+
expect(preview).toBe(content);
|
|
51
|
+
});
|
|
52
|
+
it('should truncate at maxLines if no empty lines found', () => {
|
|
53
|
+
const lines = Array.from({ length: 20 }, (_, i) => `Line ${i + 1}`);
|
|
54
|
+
const content = lines.join('\n');
|
|
55
|
+
const preview = (0, prompt_1.getSmartPreview)(content, config);
|
|
56
|
+
const previewLines = preview.split('\n');
|
|
57
|
+
// Should extend up to maxLines when no empty lines are found
|
|
58
|
+
expect(previewLines.length).toBe(config.maxLines + 1); // +1 for truncation message
|
|
59
|
+
expect(preview).toContain('... (content truncated)');
|
|
60
|
+
});
|
|
61
|
+
it('should extend to next empty line within maxLines', () => {
|
|
62
|
+
const content = `Line 1
|
|
63
|
+
Line 2
|
|
64
|
+
Line 3
|
|
65
|
+
Line 4
|
|
66
|
+
Line 5
|
|
67
|
+
Line 6
|
|
68
|
+
|
|
69
|
+
Line 8
|
|
70
|
+
Line 9
|
|
71
|
+
Line 10
|
|
72
|
+
Line 11`;
|
|
73
|
+
const preview = (0, prompt_1.getSmartPreview)(content, config);
|
|
74
|
+
const previewLines = preview.split('\n');
|
|
75
|
+
// Should include up to line 7 (the empty line)
|
|
76
|
+
expect(previewLines[6]).toBe('');
|
|
77
|
+
expect(preview).toContain('... (content truncated)');
|
|
78
|
+
});
|
|
79
|
+
it('should handle content exactly at maxLines', () => {
|
|
80
|
+
const lines = Array.from({ length: config.maxLines }, (_, i) => `Line ${i + 1}`);
|
|
81
|
+
const content = lines.join('\n');
|
|
82
|
+
const preview = (0, prompt_1.getSmartPreview)(content, config);
|
|
83
|
+
expect(preview).toBe(content);
|
|
84
|
+
});
|
|
85
|
+
it('should handle empty content', () => {
|
|
86
|
+
const preview = (0, prompt_1.getSmartPreview)('', config);
|
|
87
|
+
expect(preview).toBe('');
|
|
88
|
+
});
|
|
89
|
+
it('should handle content with multiple consecutive empty lines', () => {
|
|
90
|
+
const content = `Line 1
|
|
91
|
+
Line 2
|
|
92
|
+
|
|
93
|
+
|
|
94
|
+
Line 5
|
|
95
|
+
Line 6
|
|
96
|
+
|
|
97
|
+
Line 8
|
|
98
|
+
Line 9
|
|
99
|
+
Line 10
|
|
100
|
+
Line 11`;
|
|
101
|
+
const preview = (0, prompt_1.getSmartPreview)(content, config);
|
|
102
|
+
const previewLines = preview.split('\n');
|
|
103
|
+
// Should stop at first empty line after minLines
|
|
104
|
+
expect(previewLines.length).toBeLessThanOrEqual(config.maxLines + 1);
|
|
105
|
+
expect(preview).toContain('... (content truncated)');
|
|
106
|
+
});
|
|
107
|
+
it('should respect maxLines limit even with empty lines', () => {
|
|
108
|
+
const lines = Array.from({ length: 15 }, (_, i) => i % 3 === 0 ? '' : `Line ${i + 1}`);
|
|
109
|
+
const content = lines.join('\n');
|
|
110
|
+
const preview = (0, prompt_1.getSmartPreview)(content, config);
|
|
111
|
+
const previewLines = preview.split('\n');
|
|
112
|
+
expect(previewLines.length).toBeLessThanOrEqual(config.maxLines + 1);
|
|
113
|
+
});
|
|
114
|
+
});
|
|
115
|
+
});
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "genai-lite",
|
|
3
|
-
"version": "0.1.
|
|
3
|
+
"version": "0.1.3",
|
|
4
4
|
"description": "A lightweight, portable toolkit for interacting with various Generative AI APIs.",
|
|
5
5
|
"main": "dist/index.js",
|
|
6
6
|
"types": "dist/index.d.ts",
|
|
@@ -30,16 +30,21 @@
|
|
|
30
30
|
"url": "https://github.com/lacerbi/genai-lite/issues"
|
|
31
31
|
},
|
|
32
32
|
"homepage": "https://github.com/lacerbi/genai-lite#readme",
|
|
33
|
+
"files": [
|
|
34
|
+
"dist",
|
|
35
|
+
"src/config/presets.json"
|
|
36
|
+
],
|
|
33
37
|
"scripts": {
|
|
34
38
|
"build": "tsc",
|
|
35
39
|
"test": "jest --coverage",
|
|
36
|
-
"test:watch": "jest --watch"
|
|
40
|
+
"test:watch": "jest --watch",
|
|
41
|
+
"test:e2e": "npm run build && jest --config jest.e2e.config.js"
|
|
37
42
|
},
|
|
38
43
|
"dependencies": {
|
|
39
|
-
"@anthropic-ai/sdk": "^0.
|
|
44
|
+
"@anthropic-ai/sdk": "^0.56.0",
|
|
40
45
|
"@google/genai": "^1.0.1",
|
|
41
46
|
"js-tiktoken": "^1.0.20",
|
|
42
|
-
"openai": "^
|
|
47
|
+
"openai": "^5.8.2"
|
|
43
48
|
},
|
|
44
49
|
"devDependencies": {
|
|
45
50
|
"@types/jest": ">=30.0.0",
|
|
@@ -0,0 +1,222 @@
|
|
|
1
|
+
[
|
|
2
|
+
{
|
|
3
|
+
"id": "anthropic-claude-sonnet-4-20250514-default",
|
|
4
|
+
"displayName": "Anthropic - Claude Sonnet 4",
|
|
5
|
+
"description": "Default preset for Claude Sonnet 4.",
|
|
6
|
+
"providerId": "anthropic",
|
|
7
|
+
"modelId": "claude-sonnet-4-20250514",
|
|
8
|
+
"settings": {
|
|
9
|
+
"temperature": 0.3
|
|
10
|
+
}
|
|
11
|
+
},
|
|
12
|
+
{
|
|
13
|
+
"id": "anthropic-claude-opus-4-20250514-default",
|
|
14
|
+
"displayName": "Anthropic - Claude Opus 4",
|
|
15
|
+
"description": "Default preset for Claude Opus 4.",
|
|
16
|
+
"providerId": "anthropic",
|
|
17
|
+
"modelId": "claude-opus-4-20250514",
|
|
18
|
+
"settings": {
|
|
19
|
+
"temperature": 0.3
|
|
20
|
+
}
|
|
21
|
+
},
|
|
22
|
+
{
|
|
23
|
+
"id": "anthropic-claude-3-7-sonnet-20250219-default",
|
|
24
|
+
"displayName": "Anthropic - Claude 3.7 Sonnet",
|
|
25
|
+
"description": "Default preset for Claude 3.7 Sonnet.",
|
|
26
|
+
"providerId": "anthropic",
|
|
27
|
+
"modelId": "claude-3-7-sonnet-20250219",
|
|
28
|
+
"settings": {
|
|
29
|
+
"temperature": 0.3
|
|
30
|
+
}
|
|
31
|
+
},
|
|
32
|
+
{
|
|
33
|
+
"id": "anthropic-claude-3-5-sonnet-20241022-default",
|
|
34
|
+
"displayName": "Anthropic - Claude 3.5 Sonnet",
|
|
35
|
+
"description": "Default preset for Claude 3.5 Sonnet.",
|
|
36
|
+
"providerId": "anthropic",
|
|
37
|
+
"modelId": "claude-3-5-sonnet-20241022",
|
|
38
|
+
"settings": {
|
|
39
|
+
"temperature": 0.3
|
|
40
|
+
}
|
|
41
|
+
},
|
|
42
|
+
{
|
|
43
|
+
"id": "anthropic-claude-3-5-haiku-20241022-default",
|
|
44
|
+
"displayName": "Anthropic - Claude 3.5 Haiku",
|
|
45
|
+
"description": "Default preset for Claude 3.5 Haiku.",
|
|
46
|
+
"providerId": "anthropic",
|
|
47
|
+
"modelId": "claude-3-5-haiku-20241022",
|
|
48
|
+
"settings": {
|
|
49
|
+
"temperature": 0.3
|
|
50
|
+
}
|
|
51
|
+
},
|
|
52
|
+
{
|
|
53
|
+
"id": "google-gemini-2.5-pro",
|
|
54
|
+
"displayName": "Google - Gemini 2.5 Pro",
|
|
55
|
+
"description": "Default preset for Gemini 2.5 Pro.",
|
|
56
|
+
"providerId": "gemini",
|
|
57
|
+
"modelId": "gemini-2.5-pro",
|
|
58
|
+
"settings": {
|
|
59
|
+
"temperature": 0.3,
|
|
60
|
+
"geminiSafetySettings": [
|
|
61
|
+
{ "category": "HARM_CATEGORY_HATE_SPEECH", "threshold": "BLOCK_NONE" },
|
|
62
|
+
{
|
|
63
|
+
"category": "HARM_CATEGORY_SEXUALLY_EXPLICIT",
|
|
64
|
+
"threshold": "BLOCK_NONE"
|
|
65
|
+
},
|
|
66
|
+
{
|
|
67
|
+
"category": "HARM_CATEGORY_DANGEROUS_CONTENT",
|
|
68
|
+
"threshold": "BLOCK_NONE"
|
|
69
|
+
},
|
|
70
|
+
{ "category": "HARM_CATEGORY_HARASSMENT", "threshold": "BLOCK_NONE" }
|
|
71
|
+
]
|
|
72
|
+
}
|
|
73
|
+
},
|
|
74
|
+
{
|
|
75
|
+
"id": "google-gemini-2.5-flash",
|
|
76
|
+
"displayName": "Google - Gemini 2.5 Flash",
|
|
77
|
+
"description": "Default preset for Gemini 2.5 Flash.",
|
|
78
|
+
"providerId": "gemini",
|
|
79
|
+
"modelId": "gemini-2.5-flash",
|
|
80
|
+
"settings": {
|
|
81
|
+
"temperature": 0.3,
|
|
82
|
+
"geminiSafetySettings": [
|
|
83
|
+
{ "category": "HARM_CATEGORY_HATE_SPEECH", "threshold": "BLOCK_NONE" },
|
|
84
|
+
{
|
|
85
|
+
"category": "HARM_CATEGORY_SEXUALLY_EXPLICIT",
|
|
86
|
+
"threshold": "BLOCK_NONE"
|
|
87
|
+
},
|
|
88
|
+
{
|
|
89
|
+
"category": "HARM_CATEGORY_DANGEROUS_CONTENT",
|
|
90
|
+
"threshold": "BLOCK_NONE"
|
|
91
|
+
},
|
|
92
|
+
{ "category": "HARM_CATEGORY_HARASSMENT", "threshold": "BLOCK_NONE" }
|
|
93
|
+
]
|
|
94
|
+
}
|
|
95
|
+
},
|
|
96
|
+
{
|
|
97
|
+
"id": "google-gemini-2.5-flash-lite-preview",
|
|
98
|
+
"displayName": "Google - Gemini 2.5 Flash-Lite Preview",
|
|
99
|
+
"description": "Default preset for Gemini 2.5 Flash-Lite.",
|
|
100
|
+
"providerId": "gemini",
|
|
101
|
+
"modelId": "gemini-2.5-flash-lite-preview-06-17",
|
|
102
|
+
"settings": {
|
|
103
|
+
"temperature": 0.3,
|
|
104
|
+
"geminiSafetySettings": [
|
|
105
|
+
{ "category": "HARM_CATEGORY_HATE_SPEECH", "threshold": "BLOCK_NONE" },
|
|
106
|
+
{
|
|
107
|
+
"category": "HARM_CATEGORY_SEXUALLY_EXPLICIT",
|
|
108
|
+
"threshold": "BLOCK_NONE"
|
|
109
|
+
},
|
|
110
|
+
{
|
|
111
|
+
"category": "HARM_CATEGORY_DANGEROUS_CONTENT",
|
|
112
|
+
"threshold": "BLOCK_NONE"
|
|
113
|
+
},
|
|
114
|
+
{ "category": "HARM_CATEGORY_HARASSMENT", "threshold": "BLOCK_NONE" }
|
|
115
|
+
]
|
|
116
|
+
}
|
|
117
|
+
},
|
|
118
|
+
{
|
|
119
|
+
"id": "google-gemini-2.0-flash-default",
|
|
120
|
+
"displayName": "Google - Gemini 2.0 Flash",
|
|
121
|
+
"description": "Default preset for Gemini 2.0 Flash.",
|
|
122
|
+
"providerId": "gemini",
|
|
123
|
+
"modelId": "gemini-2.0-flash",
|
|
124
|
+
"settings": {
|
|
125
|
+
"temperature": 0.3,
|
|
126
|
+
"geminiSafetySettings": [
|
|
127
|
+
{ "category": "HARM_CATEGORY_HATE_SPEECH", "threshold": "BLOCK_NONE" },
|
|
128
|
+
{
|
|
129
|
+
"category": "HARM_CATEGORY_SEXUALLY_EXPLICIT",
|
|
130
|
+
"threshold": "BLOCK_NONE"
|
|
131
|
+
},
|
|
132
|
+
{
|
|
133
|
+
"category": "HARM_CATEGORY_DANGEROUS_CONTENT",
|
|
134
|
+
"threshold": "BLOCK_NONE"
|
|
135
|
+
},
|
|
136
|
+
{ "category": "HARM_CATEGORY_HARASSMENT", "threshold": "BLOCK_NONE" }
|
|
137
|
+
]
|
|
138
|
+
}
|
|
139
|
+
},
|
|
140
|
+
{
|
|
141
|
+
"id": "google-gemini-2.0-flash-lite-default",
|
|
142
|
+
"displayName": "Google - Gemini 2.0 Flash Lite",
|
|
143
|
+
"description": "Default preset for Gemini 2.0 Flash Lite.",
|
|
144
|
+
"providerId": "gemini",
|
|
145
|
+
"modelId": "gemini-2.0-flash-lite",
|
|
146
|
+
"settings": {
|
|
147
|
+
"temperature": 0.3,
|
|
148
|
+
"geminiSafetySettings": [
|
|
149
|
+
{ "category": "HARM_CATEGORY_HATE_SPEECH", "threshold": "BLOCK_NONE" },
|
|
150
|
+
{
|
|
151
|
+
"category": "HARM_CATEGORY_SEXUALLY_EXPLICIT",
|
|
152
|
+
"threshold": "BLOCK_NONE"
|
|
153
|
+
},
|
|
154
|
+
{
|
|
155
|
+
"category": "HARM_CATEGORY_DANGEROUS_CONTENT",
|
|
156
|
+
"threshold": "BLOCK_NONE"
|
|
157
|
+
},
|
|
158
|
+
{ "category": "HARM_CATEGORY_HARASSMENT", "threshold": "BLOCK_NONE" }
|
|
159
|
+
]
|
|
160
|
+
}
|
|
161
|
+
},
|
|
162
|
+
{
|
|
163
|
+
"id": "openai-o4-mini-default",
|
|
164
|
+
"displayName": "OpenAI - o4-mini",
|
|
165
|
+
"description": "Default preset for o4-mini.",
|
|
166
|
+
"providerId": "openai",
|
|
167
|
+
"modelId": "o4-mini",
|
|
168
|
+
"settings": {
|
|
169
|
+
"temperature": 1.0
|
|
170
|
+
}
|
|
171
|
+
},
|
|
172
|
+
{
|
|
173
|
+
"id": "openai-gpt-4.1-default",
|
|
174
|
+
"displayName": "OpenAI - GPT-4.1",
|
|
175
|
+
"description": "Default preset for GPT-4.1.",
|
|
176
|
+
"providerId": "openai",
|
|
177
|
+
"modelId": "gpt-4.1",
|
|
178
|
+
"settings": {
|
|
179
|
+
"temperature": 0.3
|
|
180
|
+
}
|
|
181
|
+
},
|
|
182
|
+
{
|
|
183
|
+
"id": "openai-gpt-4.1-mini-default",
|
|
184
|
+
"displayName": "OpenAI - GPT-4.1 Mini",
|
|
185
|
+
"description": "Default preset for GPT-4.1 Mini.",
|
|
186
|
+
"providerId": "openai",
|
|
187
|
+
"modelId": "gpt-4.1-mini",
|
|
188
|
+
"settings": {
|
|
189
|
+
"temperature": 0.3
|
|
190
|
+
}
|
|
191
|
+
},
|
|
192
|
+
{
|
|
193
|
+
"id": "openai-gpt-4.1-nano-default",
|
|
194
|
+
"displayName": "OpenAI - GPT-4.1 Nano",
|
|
195
|
+
"description": "Default preset for GPT-4.1 Nano.",
|
|
196
|
+
"providerId": "openai",
|
|
197
|
+
"modelId": "gpt-4.1-nano",
|
|
198
|
+
"settings": {
|
|
199
|
+
"temperature": 0.3
|
|
200
|
+
}
|
|
201
|
+
},
|
|
202
|
+
{
|
|
203
|
+
"id": "mistral-codestral-2501-default",
|
|
204
|
+
"displayName": "Mistral AI - Codestral",
|
|
205
|
+
"description": "Default preset for Codestral.",
|
|
206
|
+
"providerId": "mistral",
|
|
207
|
+
"modelId": "codestral-2501",
|
|
208
|
+
"settings": {
|
|
209
|
+
"temperature": 0.3
|
|
210
|
+
}
|
|
211
|
+
},
|
|
212
|
+
{
|
|
213
|
+
"id": "mistral-devstral-small-2505-default",
|
|
214
|
+
"displayName": "Mistral AI - Devstral Small",
|
|
215
|
+
"description": "Default preset for Devstral Small.",
|
|
216
|
+
"providerId": "mistral",
|
|
217
|
+
"modelId": "devstral-small-2505",
|
|
218
|
+
"settings": {
|
|
219
|
+
"temperature": 0.3
|
|
220
|
+
}
|
|
221
|
+
}
|
|
222
|
+
]
|