recker 1.0.29 → 1.0.30
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +28 -1
- package/dist/ai/client-ai.d.ts +41 -0
- package/dist/ai/client-ai.js +391 -0
- package/dist/ai/index.d.ts +2 -0
- package/dist/ai/index.js +2 -0
- package/dist/ai/memory.d.ts +35 -0
- package/dist/ai/memory.js +136 -0
- package/dist/browser/ai/client-ai.d.ts +41 -0
- package/dist/browser/ai/client-ai.js +391 -0
- package/dist/browser/ai/memory.d.ts +35 -0
- package/dist/browser/ai/memory.js +136 -0
- package/dist/browser/core/client.d.ts +6 -1
- package/dist/browser/core/client.js +18 -0
- package/dist/browser/transport/undici.js +11 -2
- package/dist/browser/types/ai-client.d.ts +32 -0
- package/dist/browser/types/ai-client.js +1 -0
- package/dist/browser/types/ai.d.ts +1 -1
- package/dist/cli/index.js +261 -1
- package/dist/cli/tui/scroll-buffer.js +4 -4
- package/dist/cli/tui/shell.d.ts +3 -0
- package/dist/cli/tui/shell.js +166 -19
- package/dist/core/client.d.ts +6 -1
- package/dist/core/client.js +18 -0
- package/dist/mcp/server.js +15 -0
- package/dist/mcp/tools/scrape.d.ts +3 -0
- package/dist/mcp/tools/scrape.js +156 -0
- package/dist/mcp/tools/security.d.ts +3 -0
- package/dist/mcp/tools/security.js +471 -0
- package/dist/mcp/tools/seo.d.ts +3 -0
- package/dist/mcp/tools/seo.js +427 -0
- package/dist/presets/anthropic.d.ts +3 -1
- package/dist/presets/anthropic.js +11 -1
- package/dist/presets/azure-openai.d.ts +3 -1
- package/dist/presets/azure-openai.js +11 -1
- package/dist/presets/cohere.d.ts +3 -1
- package/dist/presets/cohere.js +8 -2
- package/dist/presets/deepseek.d.ts +3 -1
- package/dist/presets/deepseek.js +8 -2
- package/dist/presets/fireworks.d.ts +3 -1
- package/dist/presets/fireworks.js +8 -2
- package/dist/presets/gemini.d.ts +3 -1
- package/dist/presets/gemini.js +8 -1
- package/dist/presets/groq.d.ts +3 -1
- package/dist/presets/groq.js +8 -2
- package/dist/presets/huggingface.d.ts +3 -1
- package/dist/presets/huggingface.js +8 -1
- package/dist/presets/mistral.d.ts +3 -1
- package/dist/presets/mistral.js +8 -2
- package/dist/presets/openai.d.ts +3 -1
- package/dist/presets/openai.js +9 -2
- package/dist/presets/perplexity.d.ts +3 -1
- package/dist/presets/perplexity.js +8 -2
- package/dist/presets/registry.d.ts +4 -0
- package/dist/presets/registry.js +48 -0
- package/dist/presets/replicate.d.ts +3 -1
- package/dist/presets/replicate.js +8 -1
- package/dist/presets/together.d.ts +3 -1
- package/dist/presets/together.js +8 -2
- package/dist/presets/xai.d.ts +3 -1
- package/dist/presets/xai.js +8 -2
- package/dist/scrape/spider.js +1 -1
- package/dist/transport/undici.js +11 -2
- package/dist/types/ai-client.d.ts +32 -0
- package/dist/types/ai-client.js +1 -0
- package/dist/types/ai.d.ts +1 -1
- package/dist/utils/colors.d.ts +2 -0
- package/dist/utils/colors.js +4 -0
- package/package.json +1 -1
|
@@ -0,0 +1,41 @@
|
|
|
1
|
+
import type { AIProvider, AIResponse, AIStream, ChatMessage } from '../types/ai.js';
|
|
2
|
+
import type { ClientAI, PresetAIConfig, AIMemoryConfig } from '../types/ai-client.js';
|
|
3
|
+
import type { Client } from '../core/client.js';
|
|
4
|
+
export declare class AIConfigurationError extends Error {
|
|
5
|
+
constructor(message: string);
|
|
6
|
+
}
|
|
7
|
+
export declare class ClientAIImpl implements ClientAI {
|
|
8
|
+
private readonly client;
|
|
9
|
+
private readonly config;
|
|
10
|
+
private readonly memory;
|
|
11
|
+
constructor(client: Client, config: PresetAIConfig);
|
|
12
|
+
get provider(): AIProvider;
|
|
13
|
+
get model(): string;
|
|
14
|
+
chat(prompt: string): Promise<AIResponse>;
|
|
15
|
+
chatStream(prompt: string): Promise<AIStream>;
|
|
16
|
+
prompt(prompt: string): Promise<AIResponse>;
|
|
17
|
+
promptStream(prompt: string): Promise<AIStream>;
|
|
18
|
+
clearMemory(): void;
|
|
19
|
+
getMemory(): readonly ChatMessage[];
|
|
20
|
+
setMemoryConfig(config: Partial<AIMemoryConfig>): void;
|
|
21
|
+
getMemoryConfig(): AIMemoryConfig;
|
|
22
|
+
private makeRequest;
|
|
23
|
+
private makeStreamRequest;
|
|
24
|
+
private buildRequestBody;
|
|
25
|
+
private buildAnthropicBody;
|
|
26
|
+
private buildGoogleBody;
|
|
27
|
+
private buildCohereBody;
|
|
28
|
+
private getEndpoint;
|
|
29
|
+
private getExtraHeaders;
|
|
30
|
+
private parseResponse;
|
|
31
|
+
private parseOpenAIResponse;
|
|
32
|
+
private parseAnthropicResponse;
|
|
33
|
+
private parseGoogleResponse;
|
|
34
|
+
private parseCohereResponse;
|
|
35
|
+
private buildLatency;
|
|
36
|
+
private parseSSEStream;
|
|
37
|
+
private parseStreamChunk;
|
|
38
|
+
private parseOpenAIStreamChunk;
|
|
39
|
+
private parseAnthropicStreamChunk;
|
|
40
|
+
private wrapStreamWithMemory;
|
|
41
|
+
}
|
|
@@ -0,0 +1,391 @@
|
|
|
1
|
+
import { ConversationMemory } from './memory.js';
|
|
2
|
+
export class AIConfigurationError extends Error {
|
|
3
|
+
constructor(message) {
|
|
4
|
+
super(message);
|
|
5
|
+
this.name = 'AIConfigurationError';
|
|
6
|
+
}
|
|
7
|
+
}
|
|
8
|
+
const PROVIDER_ENDPOINTS = {
|
|
9
|
+
openai: '/chat/completions',
|
|
10
|
+
anthropic: '/messages',
|
|
11
|
+
google: '/models/{model}:generateContent',
|
|
12
|
+
groq: '/chat/completions',
|
|
13
|
+
mistral: '/chat/completions',
|
|
14
|
+
cohere: '/chat',
|
|
15
|
+
together: '/chat/completions',
|
|
16
|
+
perplexity: '/chat/completions',
|
|
17
|
+
deepseek: '/chat/completions',
|
|
18
|
+
fireworks: '/chat/completions',
|
|
19
|
+
xai: '/chat/completions',
|
|
20
|
+
replicate: '/predictions',
|
|
21
|
+
huggingface: '/models/{model}/v1/chat/completions',
|
|
22
|
+
ollama: '/api/chat',
|
|
23
|
+
'azure-openai': '/chat/completions',
|
|
24
|
+
'cloudflare-workers-ai': '/ai/run/@cf/meta/llama-2-7b-chat-int8',
|
|
25
|
+
custom: '/chat/completions',
|
|
26
|
+
};
|
|
27
|
+
export class ClientAIImpl {
|
|
28
|
+
client;
|
|
29
|
+
config;
|
|
30
|
+
memory;
|
|
31
|
+
constructor(client, config) {
|
|
32
|
+
this.client = client;
|
|
33
|
+
this.config = config;
|
|
34
|
+
this.memory = new ConversationMemory(config.memory);
|
|
35
|
+
}
|
|
36
|
+
get provider() {
|
|
37
|
+
return this.config.provider;
|
|
38
|
+
}
|
|
39
|
+
get model() {
|
|
40
|
+
return this.config.model;
|
|
41
|
+
}
|
|
42
|
+
async chat(prompt) {
|
|
43
|
+
const messages = this.memory.buildMessages(prompt);
|
|
44
|
+
const response = await this.makeRequest(messages, false);
|
|
45
|
+
this.memory.recordResponse(response.content);
|
|
46
|
+
return response;
|
|
47
|
+
}
|
|
48
|
+
async chatStream(prompt) {
|
|
49
|
+
const messages = this.memory.buildMessages(prompt);
|
|
50
|
+
const stream = await this.makeStreamRequest(messages);
|
|
51
|
+
return this.wrapStreamWithMemory(stream);
|
|
52
|
+
}
|
|
53
|
+
async prompt(prompt) {
|
|
54
|
+
const messages = [{ role: 'user', content: prompt }];
|
|
55
|
+
return this.makeRequest(messages, false);
|
|
56
|
+
}
|
|
57
|
+
async promptStream(prompt) {
|
|
58
|
+
const messages = [{ role: 'user', content: prompt }];
|
|
59
|
+
return this.makeStreamRequest(messages);
|
|
60
|
+
}
|
|
61
|
+
clearMemory() {
|
|
62
|
+
this.memory.clear();
|
|
63
|
+
}
|
|
64
|
+
getMemory() {
|
|
65
|
+
return this.memory.getConversation();
|
|
66
|
+
}
|
|
67
|
+
setMemoryConfig(config) {
|
|
68
|
+
this.memory.setConfig(config);
|
|
69
|
+
}
|
|
70
|
+
getMemoryConfig() {
|
|
71
|
+
return this.memory.getConfig();
|
|
72
|
+
}
|
|
73
|
+
async makeRequest(messages, stream) {
|
|
74
|
+
const startTime = performance.now();
|
|
75
|
+
const body = this.buildRequestBody(messages, stream);
|
|
76
|
+
const endpoint = this.getEndpoint();
|
|
77
|
+
const response = await this.client.post(endpoint, {
|
|
78
|
+
json: body,
|
|
79
|
+
headers: this.getExtraHeaders(),
|
|
80
|
+
});
|
|
81
|
+
const data = await response.json();
|
|
82
|
+
return this.parseResponse(data, startTime);
|
|
83
|
+
}
|
|
84
|
+
async makeStreamRequest(messages) {
|
|
85
|
+
const body = this.buildRequestBody(messages, true);
|
|
86
|
+
const endpoint = this.getEndpoint();
|
|
87
|
+
const response = await this.client.post(endpoint, {
|
|
88
|
+
json: body,
|
|
89
|
+
headers: this.getExtraHeaders(),
|
|
90
|
+
});
|
|
91
|
+
return this.parseSSEStream(response.raw);
|
|
92
|
+
}
|
|
93
|
+
buildRequestBody(messages, stream) {
|
|
94
|
+
const provider = this.config.provider;
|
|
95
|
+
if (provider === 'anthropic') {
|
|
96
|
+
return this.buildAnthropicBody(messages, stream);
|
|
97
|
+
}
|
|
98
|
+
if (provider === 'google') {
|
|
99
|
+
return this.buildGoogleBody(messages, stream);
|
|
100
|
+
}
|
|
101
|
+
if (provider === 'cohere') {
|
|
102
|
+
return this.buildCohereBody(messages, stream);
|
|
103
|
+
}
|
|
104
|
+
return {
|
|
105
|
+
model: this.config.model,
|
|
106
|
+
messages: messages.map(m => ({
|
|
107
|
+
role: m.role,
|
|
108
|
+
content: m.content,
|
|
109
|
+
})),
|
|
110
|
+
stream,
|
|
111
|
+
...(stream && { stream_options: { include_usage: true } }),
|
|
112
|
+
};
|
|
113
|
+
}
|
|
114
|
+
buildAnthropicBody(messages, stream) {
|
|
115
|
+
const systemMessages = messages.filter(m => m.role === 'system');
|
|
116
|
+
const otherMessages = messages.filter(m => m.role !== 'system');
|
|
117
|
+
return {
|
|
118
|
+
model: this.config.model,
|
|
119
|
+
max_tokens: 4096,
|
|
120
|
+
system: systemMessages.map(m => m.content).join('\n') || undefined,
|
|
121
|
+
messages: otherMessages.map(m => ({
|
|
122
|
+
role: m.role === 'assistant' ? 'assistant' : 'user',
|
|
123
|
+
content: m.content,
|
|
124
|
+
})),
|
|
125
|
+
stream,
|
|
126
|
+
};
|
|
127
|
+
}
|
|
128
|
+
buildGoogleBody(messages, stream) {
|
|
129
|
+
const contents = messages
|
|
130
|
+
.filter(m => m.role !== 'system')
|
|
131
|
+
.map(m => ({
|
|
132
|
+
role: m.role === 'assistant' ? 'model' : 'user',
|
|
133
|
+
parts: [{ text: m.content }],
|
|
134
|
+
}));
|
|
135
|
+
const systemInstruction = messages
|
|
136
|
+
.filter(m => m.role === 'system')
|
|
137
|
+
.map(m => m.content)
|
|
138
|
+
.join('\n');
|
|
139
|
+
return {
|
|
140
|
+
contents,
|
|
141
|
+
...(systemInstruction && {
|
|
142
|
+
systemInstruction: { parts: [{ text: systemInstruction }] },
|
|
143
|
+
}),
|
|
144
|
+
generationConfig: {
|
|
145
|
+
maxOutputTokens: 4096,
|
|
146
|
+
},
|
|
147
|
+
};
|
|
148
|
+
}
|
|
149
|
+
buildCohereBody(messages, stream) {
|
|
150
|
+
const chatHistory = messages.slice(0, -1).map(m => ({
|
|
151
|
+
role: m.role === 'assistant' ? 'CHATBOT' : 'USER',
|
|
152
|
+
message: m.content,
|
|
153
|
+
}));
|
|
154
|
+
const lastMessage = messages[messages.length - 1];
|
|
155
|
+
return {
|
|
156
|
+
model: this.config.model,
|
|
157
|
+
message: typeof lastMessage.content === 'string' ? lastMessage.content : '',
|
|
158
|
+
chat_history: chatHistory.length > 0 ? chatHistory : undefined,
|
|
159
|
+
stream,
|
|
160
|
+
};
|
|
161
|
+
}
|
|
162
|
+
getEndpoint() {
|
|
163
|
+
let endpoint = PROVIDER_ENDPOINTS[this.config.provider] || '/chat/completions';
|
|
164
|
+
endpoint = endpoint.replace('{model}', this.config.model);
|
|
165
|
+
return endpoint;
|
|
166
|
+
}
|
|
167
|
+
getExtraHeaders() {
|
|
168
|
+
const headers = {};
|
|
169
|
+
if (this.config.headers) {
|
|
170
|
+
Object.assign(headers, this.config.headers);
|
|
171
|
+
}
|
|
172
|
+
return headers;
|
|
173
|
+
}
|
|
174
|
+
parseResponse(data, startTime) {
|
|
175
|
+
const provider = this.config.provider;
|
|
176
|
+
const endTime = performance.now();
|
|
177
|
+
if (provider === 'anthropic') {
|
|
178
|
+
return this.parseAnthropicResponse(data, startTime, endTime);
|
|
179
|
+
}
|
|
180
|
+
if (provider === 'google') {
|
|
181
|
+
return this.parseGoogleResponse(data, startTime, endTime);
|
|
182
|
+
}
|
|
183
|
+
if (provider === 'cohere') {
|
|
184
|
+
return this.parseCohereResponse(data, startTime, endTime);
|
|
185
|
+
}
|
|
186
|
+
return this.parseOpenAIResponse(data, startTime, endTime);
|
|
187
|
+
}
|
|
188
|
+
parseOpenAIResponse(data, startTime, endTime) {
|
|
189
|
+
const d = data;
|
|
190
|
+
const content = d.choices?.[0]?.message?.content || '';
|
|
191
|
+
const usage = {
|
|
192
|
+
inputTokens: d.usage?.prompt_tokens || 0,
|
|
193
|
+
outputTokens: d.usage?.completion_tokens || 0,
|
|
194
|
+
totalTokens: d.usage?.total_tokens || 0,
|
|
195
|
+
};
|
|
196
|
+
return {
|
|
197
|
+
content,
|
|
198
|
+
usage,
|
|
199
|
+
latency: this.buildLatency(startTime, endTime, usage.outputTokens),
|
|
200
|
+
model: d.model || this.config.model,
|
|
201
|
+
provider: this.config.provider,
|
|
202
|
+
cached: false,
|
|
203
|
+
finishReason: d.choices?.[0]?.finish_reason,
|
|
204
|
+
raw: data,
|
|
205
|
+
};
|
|
206
|
+
}
|
|
207
|
+
parseAnthropicResponse(data, startTime, endTime) {
|
|
208
|
+
const d = data;
|
|
209
|
+
const textContent = d.content?.find(c => c.type === 'text');
|
|
210
|
+
const content = textContent?.text || '';
|
|
211
|
+
const usage = {
|
|
212
|
+
inputTokens: d.usage?.input_tokens || 0,
|
|
213
|
+
outputTokens: d.usage?.output_tokens || 0,
|
|
214
|
+
totalTokens: (d.usage?.input_tokens || 0) + (d.usage?.output_tokens || 0),
|
|
215
|
+
};
|
|
216
|
+
return {
|
|
217
|
+
content,
|
|
218
|
+
usage,
|
|
219
|
+
latency: this.buildLatency(startTime, endTime, usage.outputTokens),
|
|
220
|
+
model: d.model || this.config.model,
|
|
221
|
+
provider: 'anthropic',
|
|
222
|
+
cached: false,
|
|
223
|
+
finishReason: d.stop_reason === 'end_turn' ? 'stop' : undefined,
|
|
224
|
+
raw: data,
|
|
225
|
+
};
|
|
226
|
+
}
|
|
227
|
+
parseGoogleResponse(data, startTime, endTime) {
|
|
228
|
+
const d = data;
|
|
229
|
+
const content = d.candidates?.[0]?.content?.parts?.[0]?.text || '';
|
|
230
|
+
const usage = {
|
|
231
|
+
inputTokens: d.usageMetadata?.promptTokenCount || 0,
|
|
232
|
+
outputTokens: d.usageMetadata?.candidatesTokenCount || 0,
|
|
233
|
+
totalTokens: d.usageMetadata?.totalTokenCount || 0,
|
|
234
|
+
};
|
|
235
|
+
return {
|
|
236
|
+
content,
|
|
237
|
+
usage,
|
|
238
|
+
latency: this.buildLatency(startTime, endTime, usage.outputTokens),
|
|
239
|
+
model: this.config.model,
|
|
240
|
+
provider: 'google',
|
|
241
|
+
cached: false,
|
|
242
|
+
finishReason: d.candidates?.[0]?.finishReason === 'STOP' ? 'stop' : undefined,
|
|
243
|
+
raw: data,
|
|
244
|
+
};
|
|
245
|
+
}
|
|
246
|
+
parseCohereResponse(data, startTime, endTime) {
|
|
247
|
+
const d = data;
|
|
248
|
+
const content = d.text || '';
|
|
249
|
+
const usage = {
|
|
250
|
+
inputTokens: d.meta?.tokens?.input_tokens || 0,
|
|
251
|
+
outputTokens: d.meta?.tokens?.output_tokens || 0,
|
|
252
|
+
totalTokens: (d.meta?.tokens?.input_tokens || 0) + (d.meta?.tokens?.output_tokens || 0),
|
|
253
|
+
};
|
|
254
|
+
return {
|
|
255
|
+
content,
|
|
256
|
+
usage,
|
|
257
|
+
latency: this.buildLatency(startTime, endTime, usage.outputTokens),
|
|
258
|
+
model: this.config.model,
|
|
259
|
+
provider: 'cohere',
|
|
260
|
+
cached: false,
|
|
261
|
+
finishReason: d.finish_reason === 'COMPLETE' ? 'stop' : undefined,
|
|
262
|
+
raw: data,
|
|
263
|
+
};
|
|
264
|
+
}
|
|
265
|
+
buildLatency(startTime, endTime, outputTokens) {
|
|
266
|
+
const total = endTime - startTime;
|
|
267
|
+
return {
|
|
268
|
+
ttft: total,
|
|
269
|
+
tps: outputTokens > 0 ? (outputTokens / (total / 1000)) : 0,
|
|
270
|
+
total,
|
|
271
|
+
};
|
|
272
|
+
}
|
|
273
|
+
async *parseSSEStream(response) {
|
|
274
|
+
const reader = response.body?.getReader();
|
|
275
|
+
if (!reader) {
|
|
276
|
+
throw new Error('No response body');
|
|
277
|
+
}
|
|
278
|
+
const decoder = new TextDecoder();
|
|
279
|
+
let buffer = '';
|
|
280
|
+
let firstChunkTime;
|
|
281
|
+
const startTime = performance.now();
|
|
282
|
+
try {
|
|
283
|
+
while (true) {
|
|
284
|
+
const { done, value } = await reader.read();
|
|
285
|
+
if (done) {
|
|
286
|
+
break;
|
|
287
|
+
}
|
|
288
|
+
buffer += decoder.decode(value, { stream: true });
|
|
289
|
+
const lines = buffer.split('\n');
|
|
290
|
+
buffer = lines.pop() || '';
|
|
291
|
+
for (const line of lines) {
|
|
292
|
+
const trimmed = line.trim();
|
|
293
|
+
if (!trimmed || trimmed === 'data: [DONE]') {
|
|
294
|
+
continue;
|
|
295
|
+
}
|
|
296
|
+
if (trimmed.startsWith('data: ')) {
|
|
297
|
+
const jsonStr = trimmed.slice(6);
|
|
298
|
+
try {
|
|
299
|
+
const event = this.parseStreamChunk(jsonStr);
|
|
300
|
+
if (event) {
|
|
301
|
+
if (!firstChunkTime && event.type === 'text') {
|
|
302
|
+
firstChunkTime = performance.now();
|
|
303
|
+
}
|
|
304
|
+
yield event;
|
|
305
|
+
}
|
|
306
|
+
}
|
|
307
|
+
catch {
|
|
308
|
+
}
|
|
309
|
+
}
|
|
310
|
+
}
|
|
311
|
+
}
|
|
312
|
+
}
|
|
313
|
+
finally {
|
|
314
|
+
reader.releaseLock();
|
|
315
|
+
}
|
|
316
|
+
}
|
|
317
|
+
parseStreamChunk(jsonStr) {
|
|
318
|
+
const data = JSON.parse(jsonStr);
|
|
319
|
+
const provider = this.config.provider;
|
|
320
|
+
if (provider === 'anthropic') {
|
|
321
|
+
return this.parseAnthropicStreamChunk(data);
|
|
322
|
+
}
|
|
323
|
+
return this.parseOpenAIStreamChunk(data);
|
|
324
|
+
}
|
|
325
|
+
parseOpenAIStreamChunk(data) {
|
|
326
|
+
const choice = data.choices?.[0];
|
|
327
|
+
if (!choice) {
|
|
328
|
+
if (data.usage) {
|
|
329
|
+
return {
|
|
330
|
+
type: 'usage',
|
|
331
|
+
usage: {
|
|
332
|
+
inputTokens: data.usage.prompt_tokens || 0,
|
|
333
|
+
outputTokens: data.usage.completion_tokens || 0,
|
|
334
|
+
totalTokens: data.usage.total_tokens || 0,
|
|
335
|
+
},
|
|
336
|
+
};
|
|
337
|
+
}
|
|
338
|
+
return null;
|
|
339
|
+
}
|
|
340
|
+
if (choice.delta?.content) {
|
|
341
|
+
return {
|
|
342
|
+
type: 'text',
|
|
343
|
+
content: choice.delta.content,
|
|
344
|
+
};
|
|
345
|
+
}
|
|
346
|
+
if (choice.finish_reason) {
|
|
347
|
+
return {
|
|
348
|
+
type: 'done',
|
|
349
|
+
finishReason: choice.finish_reason,
|
|
350
|
+
};
|
|
351
|
+
}
|
|
352
|
+
return null;
|
|
353
|
+
}
|
|
354
|
+
parseAnthropicStreamChunk(data) {
|
|
355
|
+
if (data.type === 'content_block_delta' && data.delta?.type === 'text_delta') {
|
|
356
|
+
return {
|
|
357
|
+
type: 'text',
|
|
358
|
+
content: data.delta.text || '',
|
|
359
|
+
};
|
|
360
|
+
}
|
|
361
|
+
if (data.type === 'message_stop') {
|
|
362
|
+
return {
|
|
363
|
+
type: 'done',
|
|
364
|
+
finishReason: 'stop',
|
|
365
|
+
};
|
|
366
|
+
}
|
|
367
|
+
if (data.type === 'message_delta' && data.message?.usage) {
|
|
368
|
+
return {
|
|
369
|
+
type: 'usage',
|
|
370
|
+
usage: {
|
|
371
|
+
inputTokens: data.message.usage.input_tokens || 0,
|
|
372
|
+
outputTokens: data.message.usage.output_tokens || 0,
|
|
373
|
+
totalTokens: (data.message.usage.input_tokens || 0) + (data.message.usage.output_tokens || 0),
|
|
374
|
+
},
|
|
375
|
+
};
|
|
376
|
+
}
|
|
377
|
+
return null;
|
|
378
|
+
}
|
|
379
|
+
async *wrapStreamWithMemory(stream) {
|
|
380
|
+
let fullContent = '';
|
|
381
|
+
for await (const event of stream) {
|
|
382
|
+
if (event.type === 'text') {
|
|
383
|
+
fullContent += event.content;
|
|
384
|
+
}
|
|
385
|
+
yield event;
|
|
386
|
+
}
|
|
387
|
+
if (fullContent) {
|
|
388
|
+
this.memory.recordResponse(fullContent);
|
|
389
|
+
}
|
|
390
|
+
}
|
|
391
|
+
}
|
|
@@ -0,0 +1,35 @@
|
|
|
1
|
+
import type { ChatMessage } from '../types/ai.js';
|
|
2
|
+
import type { AIMemoryConfig } from '../types/ai-client.js';
|
|
3
|
+
export declare class ConversationMemory {
|
|
4
|
+
private config;
|
|
5
|
+
private systemMessage;
|
|
6
|
+
private messages;
|
|
7
|
+
constructor(config?: AIMemoryConfig);
|
|
8
|
+
setSystemPrompt(prompt: string): void;
|
|
9
|
+
getSystemPrompt(): string;
|
|
10
|
+
addUserMessage(content: string): void;
|
|
11
|
+
addAssistantMessage(content: string): void;
|
|
12
|
+
addMessage(message: ChatMessage): void;
|
|
13
|
+
buildMessages(userPrompt: string): ChatMessage[];
|
|
14
|
+
recordResponse(content: string): void;
|
|
15
|
+
getMessages(): ChatMessage[];
|
|
16
|
+
getConversation(): readonly ChatMessage[];
|
|
17
|
+
getPairCount(): number;
|
|
18
|
+
clear(): void;
|
|
19
|
+
reset(): void;
|
|
20
|
+
setConfig(config: Partial<AIMemoryConfig>): void;
|
|
21
|
+
getConfig(): AIMemoryConfig;
|
|
22
|
+
private prune;
|
|
23
|
+
isEmpty(): boolean;
|
|
24
|
+
getMessageCount(): number;
|
|
25
|
+
toJSON(): {
|
|
26
|
+
config: AIMemoryConfig;
|
|
27
|
+
systemPrompt: string | null;
|
|
28
|
+
messages: ChatMessage[];
|
|
29
|
+
};
|
|
30
|
+
static fromJSON(data: {
|
|
31
|
+
config?: AIMemoryConfig;
|
|
32
|
+
systemPrompt?: string | null;
|
|
33
|
+
messages?: ChatMessage[];
|
|
34
|
+
}): ConversationMemory;
|
|
35
|
+
}
|
|
@@ -0,0 +1,136 @@
|
|
|
1
|
+
const DEFAULT_MAX_PAIRS = 12;
|
|
2
|
+
export class ConversationMemory {
|
|
3
|
+
config;
|
|
4
|
+
systemMessage = null;
|
|
5
|
+
messages = [];
|
|
6
|
+
constructor(config = {}) {
|
|
7
|
+
this.config = {
|
|
8
|
+
maxPairs: config.maxPairs ?? DEFAULT_MAX_PAIRS,
|
|
9
|
+
systemPrompt: config.systemPrompt ?? '',
|
|
10
|
+
};
|
|
11
|
+
if (this.config.systemPrompt) {
|
|
12
|
+
this.systemMessage = {
|
|
13
|
+
role: 'system',
|
|
14
|
+
content: this.config.systemPrompt,
|
|
15
|
+
};
|
|
16
|
+
}
|
|
17
|
+
}
|
|
18
|
+
setSystemPrompt(prompt) {
|
|
19
|
+
this.config.systemPrompt = prompt;
|
|
20
|
+
if (prompt) {
|
|
21
|
+
this.systemMessage = {
|
|
22
|
+
role: 'system',
|
|
23
|
+
content: prompt,
|
|
24
|
+
};
|
|
25
|
+
}
|
|
26
|
+
else {
|
|
27
|
+
this.systemMessage = null;
|
|
28
|
+
}
|
|
29
|
+
}
|
|
30
|
+
getSystemPrompt() {
|
|
31
|
+
return this.config.systemPrompt;
|
|
32
|
+
}
|
|
33
|
+
addUserMessage(content) {
|
|
34
|
+
this.messages.push({
|
|
35
|
+
role: 'user',
|
|
36
|
+
content,
|
|
37
|
+
});
|
|
38
|
+
this.prune();
|
|
39
|
+
}
|
|
40
|
+
addAssistantMessage(content) {
|
|
41
|
+
this.messages.push({
|
|
42
|
+
role: 'assistant',
|
|
43
|
+
content,
|
|
44
|
+
});
|
|
45
|
+
this.prune();
|
|
46
|
+
}
|
|
47
|
+
addMessage(message) {
|
|
48
|
+
if (message.role === 'system') {
|
|
49
|
+
this.setSystemPrompt(typeof message.content === 'string' ? message.content : '');
|
|
50
|
+
return;
|
|
51
|
+
}
|
|
52
|
+
this.messages.push(message);
|
|
53
|
+
this.prune();
|
|
54
|
+
}
|
|
55
|
+
buildMessages(userPrompt) {
|
|
56
|
+
this.addUserMessage(userPrompt);
|
|
57
|
+
return this.getMessages();
|
|
58
|
+
}
|
|
59
|
+
recordResponse(content) {
|
|
60
|
+
this.addAssistantMessage(content);
|
|
61
|
+
}
|
|
62
|
+
getMessages() {
|
|
63
|
+
const result = [];
|
|
64
|
+
if (this.systemMessage) {
|
|
65
|
+
result.push(this.systemMessage);
|
|
66
|
+
}
|
|
67
|
+
result.push(...this.messages);
|
|
68
|
+
return result;
|
|
69
|
+
}
|
|
70
|
+
getConversation() {
|
|
71
|
+
return this.messages;
|
|
72
|
+
}
|
|
73
|
+
getPairCount() {
|
|
74
|
+
let pairs = 0;
|
|
75
|
+
for (let i = 0; i < this.messages.length - 1; i += 2) {
|
|
76
|
+
if (this.messages[i].role === 'user' &&
|
|
77
|
+
this.messages[i + 1]?.role === 'assistant') {
|
|
78
|
+
pairs++;
|
|
79
|
+
}
|
|
80
|
+
}
|
|
81
|
+
return pairs;
|
|
82
|
+
}
|
|
83
|
+
clear() {
|
|
84
|
+
this.messages = [];
|
|
85
|
+
}
|
|
86
|
+
reset() {
|
|
87
|
+
this.messages = [];
|
|
88
|
+
this.systemMessage = null;
|
|
89
|
+
this.config.systemPrompt = '';
|
|
90
|
+
}
|
|
91
|
+
setConfig(config) {
|
|
92
|
+
if (config.maxPairs !== undefined) {
|
|
93
|
+
this.config.maxPairs = config.maxPairs;
|
|
94
|
+
this.prune();
|
|
95
|
+
}
|
|
96
|
+
if (config.systemPrompt !== undefined) {
|
|
97
|
+
this.setSystemPrompt(config.systemPrompt);
|
|
98
|
+
}
|
|
99
|
+
}
|
|
100
|
+
getConfig() {
|
|
101
|
+
return { ...this.config };
|
|
102
|
+
}
|
|
103
|
+
prune() {
|
|
104
|
+
const maxMessages = this.config.maxPairs * 2;
|
|
105
|
+
if (this.messages.length > maxMessages) {
|
|
106
|
+
const excess = this.messages.length - maxMessages;
|
|
107
|
+
const toRemove = Math.ceil(excess / 2) * 2;
|
|
108
|
+
this.messages = this.messages.slice(toRemove);
|
|
109
|
+
}
|
|
110
|
+
}
|
|
111
|
+
isEmpty() {
|
|
112
|
+
return this.messages.length === 0;
|
|
113
|
+
}
|
|
114
|
+
getMessageCount() {
|
|
115
|
+
return this.messages.length;
|
|
116
|
+
}
|
|
117
|
+
toJSON() {
|
|
118
|
+
return {
|
|
119
|
+
config: this.config,
|
|
120
|
+
systemPrompt: this.systemMessage?.content,
|
|
121
|
+
messages: [...this.messages],
|
|
122
|
+
};
|
|
123
|
+
}
|
|
124
|
+
static fromJSON(data) {
|
|
125
|
+
const memory = new ConversationMemory(data.config);
|
|
126
|
+
if (data.systemPrompt) {
|
|
127
|
+
memory.setSystemPrompt(data.systemPrompt);
|
|
128
|
+
}
|
|
129
|
+
if (data.messages) {
|
|
130
|
+
for (const msg of data.messages) {
|
|
131
|
+
memory.addMessage(msg);
|
|
132
|
+
}
|
|
133
|
+
}
|
|
134
|
+
return memory;
|
|
135
|
+
}
|
|
136
|
+
}
|
|
@@ -1,4 +1,5 @@
|
|
|
1
1
|
import { ClientOptions, Middleware, ReckerRequest, ReckerResponse, RequestOptions, CacheStorage, PageResult } from '../types/index.js';
|
|
2
|
+
import type { ClientAI, ClientOptionsWithAI } from '../types/ai-client.js';
|
|
2
3
|
import { RequestPromise } from './request-promise.js';
|
|
3
4
|
import { PaginationOptions } from '../plugins/pagination.js';
|
|
4
5
|
import { RetryOptions } from '../plugins/retry.js';
|
|
@@ -36,7 +37,9 @@ export declare class Client {
|
|
|
36
37
|
private cookieJar?;
|
|
37
38
|
private cookieIgnoreInvalid;
|
|
38
39
|
private defaultTimeout?;
|
|
39
|
-
|
|
40
|
+
private _aiConfig?;
|
|
41
|
+
private _ai?;
|
|
42
|
+
constructor(options?: ExtendedClientOptions & Partial<ClientOptionsWithAI>);
|
|
40
43
|
private createLoggingMiddleware;
|
|
41
44
|
private createMaxSizeMiddleware;
|
|
42
45
|
private setupCookieJar;
|
|
@@ -113,6 +116,8 @@ export declare class Client {
|
|
|
113
116
|
whois(query: string, options?: WhoisOptions): Promise<WhoisResult>;
|
|
114
117
|
isDomainAvailable(domain: string, options?: WhoisOptions): Promise<boolean>;
|
|
115
118
|
hls(manifestUrl: string, options?: HlsOptions): HlsPromise;
|
|
119
|
+
get ai(): ClientAI;
|
|
120
|
+
get hasAI(): boolean;
|
|
116
121
|
}
|
|
117
122
|
export declare function createClient(options?: ExtendedClientOptions): Client;
|
|
118
123
|
export {};
|
|
@@ -1,4 +1,5 @@
|
|
|
1
1
|
import { consoleLogger } from '../types/index.js';
|
|
2
|
+
import { ClientAIImpl } from '../ai/client-ai.js';
|
|
2
3
|
import { HttpRequest } from './request.js';
|
|
3
4
|
import { UndiciTransport } from '../transport/undici.js';
|
|
4
5
|
import { RequestPromise } from './request-promise.js';
|
|
@@ -41,6 +42,8 @@ export class Client {
|
|
|
41
42
|
cookieJar;
|
|
42
43
|
cookieIgnoreInvalid = false;
|
|
43
44
|
defaultTimeout;
|
|
45
|
+
_aiConfig;
|
|
46
|
+
_ai;
|
|
44
47
|
constructor(options = {}) {
|
|
45
48
|
this.baseUrl = options.baseUrl || '';
|
|
46
49
|
this.middlewares = options.middlewares || [];
|
|
@@ -150,6 +153,9 @@ export class Client {
|
|
|
150
153
|
if (options.cookies) {
|
|
151
154
|
this.setupCookieJar(options.cookies);
|
|
152
155
|
}
|
|
156
|
+
if (options._aiConfig) {
|
|
157
|
+
this._aiConfig = options._aiConfig;
|
|
158
|
+
}
|
|
153
159
|
if (this.maxResponseSize !== undefined) {
|
|
154
160
|
this.middlewares.push(this.createMaxSizeMiddleware(this.maxResponseSize));
|
|
155
161
|
}
|
|
@@ -661,6 +667,18 @@ export class Client {
|
|
|
661
667
|
hls(manifestUrl, options = {}) {
|
|
662
668
|
return new HlsPromise(this, manifestUrl, options);
|
|
663
669
|
}
|
|
670
|
+
get ai() {
|
|
671
|
+
if (!this._ai) {
|
|
672
|
+
if (!this._aiConfig) {
|
|
673
|
+
throw new ConfigurationError('AI features require an AI-enabled preset. Use createClient(openai({...})), createClient(anthropic({...})), etc.', { configKey: '_aiConfig' });
|
|
674
|
+
}
|
|
675
|
+
this._ai = new ClientAIImpl(this, this._aiConfig);
|
|
676
|
+
}
|
|
677
|
+
return this._ai;
|
|
678
|
+
}
|
|
679
|
+
get hasAI() {
|
|
680
|
+
return this._aiConfig !== undefined;
|
|
681
|
+
}
|
|
664
682
|
}
|
|
665
683
|
export function createClient(options = {}) {
|
|
666
684
|
return new Client(options);
|
|
@@ -215,8 +215,17 @@ export class UndiciTransport {
|
|
|
215
215
|
const uploadTotal = contentLengthHeader ? parseInt(contentLengthHeader, 10) : undefined;
|
|
216
216
|
let currentUrl;
|
|
217
217
|
if (this.baseUrl) {
|
|
218
|
-
|
|
219
|
-
|
|
218
|
+
if (req.url.startsWith(this.baseUrl)) {
|
|
219
|
+
currentUrl = req.url;
|
|
220
|
+
}
|
|
221
|
+
else if (req.url.startsWith('http://') || req.url.startsWith('https://')) {
|
|
222
|
+
currentUrl = req.url;
|
|
223
|
+
}
|
|
224
|
+
else {
|
|
225
|
+
const base = this.baseUrl.endsWith('/') ? this.baseUrl.slice(0, -1) : this.baseUrl;
|
|
226
|
+
const path = req.url.startsWith('/') ? req.url : '/' + req.url;
|
|
227
|
+
currentUrl = base + path;
|
|
228
|
+
}
|
|
220
229
|
}
|
|
221
230
|
else {
|
|
222
231
|
currentUrl = req.url;
|