ak-gemini 1.2.0 → 2.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +259 -294
- package/base.js +485 -0
- package/chat.js +87 -0
- package/code-agent.js +563 -0
- package/index.cjs +1550 -1215
- package/index.js +38 -1501
- package/json-helpers.js +352 -0
- package/message.js +170 -0
- package/package.json +14 -7
- package/tool-agent.js +311 -0
- package/transformer.js +502 -0
- package/types.d.ts +353 -229
- package/agent.js +0 -481
- package/tools.js +0 -134
package/transformer.js
ADDED
|
@@ -0,0 +1,502 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* @fileoverview Transformer class — AI-powered JSON transformation via few-shot learning.
|
|
3
|
+
* Extends BaseGemini with validation, retry logic, and structured JSON output.
|
|
4
|
+
*/
|
|
5
|
+
|
|
6
|
+
import BaseGemini from './base.js';
|
|
7
|
+
import { extractJSON, isJSON } from './json-helpers.js';
|
|
8
|
+
import log from './logger.js';
|
|
9
|
+
import fs from 'fs/promises';
|
|
10
|
+
import path from 'path';
|
|
11
|
+
|
|
12
|
+
const DEFAULT_SYSTEM_INSTRUCTIONS = `
|
|
13
|
+
You are an expert JSON transformation engine. Your task is to accurately convert data payloads from one format to another.
|
|
14
|
+
|
|
15
|
+
You will be provided with example transformations (Source JSON -> Target JSON).
|
|
16
|
+
|
|
17
|
+
Learn the mapping rules from these examples.
|
|
18
|
+
|
|
19
|
+
When presented with new Source JSON, apply the learned transformation rules to produce a new Target JSON payload.
|
|
20
|
+
|
|
21
|
+
Always respond ONLY with a valid JSON object that strictly adheres to the expected output format.
|
|
22
|
+
|
|
23
|
+
Do not include any additional text, explanations, or formatting before or after the JSON object.
|
|
24
|
+
`;
|
|
25
|
+
|
|
26
|
+
/**
|
|
27
|
+
* @typedef {import('./types').TransformerOptions} TransformerOptions
|
|
28
|
+
* @typedef {import('./types').AsyncValidatorFunction} AsyncValidatorFunction
|
|
29
|
+
* @typedef {import('./types').TransformationExample} TransformationExample
|
|
30
|
+
* @typedef {import('./types').UsageData} UsageData
|
|
31
|
+
*/
|
|
32
|
+
|
|
33
|
+
/**
|
|
34
|
+
* AI-powered JSON transformation using few-shot learning.
|
|
35
|
+
*
|
|
36
|
+
* Seed with example input/output pairs, then send new payloads to transform.
|
|
37
|
+
* Supports validation, automatic retry with AI-powered error correction,
|
|
38
|
+
* and structured JSON output.
|
|
39
|
+
*
|
|
40
|
+
* @example
|
|
41
|
+
* ```javascript
|
|
42
|
+
* import { Transformer } from 'ak-gemini';
|
|
43
|
+
*
|
|
44
|
+
* const t = new Transformer({
|
|
45
|
+
* promptKey: 'INPUT',
|
|
46
|
+
* answerKey: 'OUTPUT'
|
|
47
|
+
* });
|
|
48
|
+
*
|
|
49
|
+
* await t.seed([
|
|
50
|
+
* { INPUT: { name: "Alice" }, OUTPUT: { greeting: "Hello, Alice!" } },
|
|
51
|
+
* { INPUT: { name: "Bob" }, OUTPUT: { greeting: "Hello, Bob!" } }
|
|
52
|
+
* ]);
|
|
53
|
+
*
|
|
54
|
+
* const result = await t.send({ name: "Charlie" });
|
|
55
|
+
* // => { greeting: "Hello, Charlie!" }
|
|
56
|
+
* ```
|
|
57
|
+
*/
|
|
58
|
+
class Transformer extends BaseGemini {
|
|
59
|
+
/**
|
|
60
|
+
* @param {TransformerOptions} [options={}]
|
|
61
|
+
*/
|
|
62
|
+
constructor(options = {}) {
|
|
63
|
+
// Set Transformer-specific systemPrompt default before calling super
|
|
64
|
+
if (options.systemPrompt === undefined) {
|
|
65
|
+
options = { ...options, systemPrompt: DEFAULT_SYSTEM_INSTRUCTIONS };
|
|
66
|
+
}
|
|
67
|
+
|
|
68
|
+
super(options);
|
|
69
|
+
|
|
70
|
+
// ── JSON-specific config ──
|
|
71
|
+
this.chatConfig.responseMimeType = 'application/json';
|
|
72
|
+
this.onlyJSON = options.onlyJSON !== undefined ? options.onlyJSON : true;
|
|
73
|
+
|
|
74
|
+
// ── Response schema ──
|
|
75
|
+
if (options.responseSchema) {
|
|
76
|
+
this.chatConfig.responseSchema = options.responseSchema;
|
|
77
|
+
}
|
|
78
|
+
|
|
79
|
+
// ── Example key mapping ──
|
|
80
|
+
this.promptKey = options.promptKey || options.sourceKey || 'PROMPT';
|
|
81
|
+
this.answerKey = options.answerKey || options.targetKey || 'ANSWER';
|
|
82
|
+
this.contextKey = options.contextKey || 'CONTEXT';
|
|
83
|
+
this.explanationKey = options.explanationKey || 'EXPLANATION';
|
|
84
|
+
this.systemPromptKey = options.systemPromptKey || 'SYSTEM';
|
|
85
|
+
|
|
86
|
+
if (this.promptKey === this.answerKey) {
|
|
87
|
+
throw new Error("Source and target keys cannot be the same. Please provide distinct keys.");
|
|
88
|
+
}
|
|
89
|
+
|
|
90
|
+
// ── Examples source ──
|
|
91
|
+
this.examplesFile = options.examplesFile || null;
|
|
92
|
+
this.exampleData = options.exampleData || null;
|
|
93
|
+
|
|
94
|
+
// ── Validation & retry ──
|
|
95
|
+
this.asyncValidator = options.asyncValidator || null;
|
|
96
|
+
this.maxRetries = options.maxRetries || 3;
|
|
97
|
+
this.retryDelay = options.retryDelay || 1000;
|
|
98
|
+
|
|
99
|
+
// ── Grounding ──
|
|
100
|
+
this.enableGrounding = options.enableGrounding || false;
|
|
101
|
+
this.groundingConfig = options.groundingConfig || {};
|
|
102
|
+
|
|
103
|
+
log.debug(`Transformer keys — Source: "${this.promptKey}", Target: "${this.answerKey}", Context: "${this.contextKey}"`);
|
|
104
|
+
}
|
|
105
|
+
|
|
106
|
+
// ── Chat Create Options Override ──────────────────────────────────────────
|
|
107
|
+
|
|
108
|
+
/** @protected */
|
|
109
|
+
_getChatCreateOptions() {
|
|
110
|
+
const opts = super._getChatCreateOptions();
|
|
111
|
+
|
|
112
|
+
if (this.enableGrounding) {
|
|
113
|
+
opts.config.tools = [{ googleSearch: this.groundingConfig }];
|
|
114
|
+
log.debug(`Search grounding ENABLED (WARNING: costs $35/1k queries)`);
|
|
115
|
+
}
|
|
116
|
+
|
|
117
|
+
return opts;
|
|
118
|
+
}
|
|
119
|
+
|
|
120
|
+
// ── Seeding ──────────────────────────────────────────────────────────────
|
|
121
|
+
|
|
122
|
+
/**
|
|
123
|
+
* Seeds the chat with transformation examples using the configured key mapping.
|
|
124
|
+
* Overrides base seed() to use Transformer-specific keys and support
|
|
125
|
+
* examplesFile/exampleData fallbacks.
|
|
126
|
+
*
|
|
127
|
+
* @param {TransformationExample[]} [examples] - Array of example objects
|
|
128
|
+
* @returns {Promise<Array>} The updated chat history
|
|
129
|
+
*/
|
|
130
|
+
async seed(examples) {
|
|
131
|
+
await this.init();
|
|
132
|
+
|
|
133
|
+
if (!examples || !Array.isArray(examples) || examples.length === 0) {
|
|
134
|
+
if (this.examplesFile) {
|
|
135
|
+
log.debug(`No examples provided, loading from file: ${this.examplesFile}`);
|
|
136
|
+
try {
|
|
137
|
+
const filePath = path.resolve(this.examplesFile);
|
|
138
|
+
const raw = await fs.readFile(filePath, 'utf-8');
|
|
139
|
+
examples = JSON.parse(raw);
|
|
140
|
+
} catch (err) {
|
|
141
|
+
throw new Error(`Could not load examples from file: ${this.examplesFile}. ${err.message}`);
|
|
142
|
+
}
|
|
143
|
+
} else if (this.exampleData) {
|
|
144
|
+
log.debug(`Using example data provided in options.`);
|
|
145
|
+
if (Array.isArray(this.exampleData)) {
|
|
146
|
+
examples = this.exampleData;
|
|
147
|
+
} else {
|
|
148
|
+
throw new Error(`Invalid example data provided. Expected an array of examples.`);
|
|
149
|
+
}
|
|
150
|
+
} else {
|
|
151
|
+
log.debug("No examples provided and no examples file specified. Skipping seeding.");
|
|
152
|
+
return this.getHistory();
|
|
153
|
+
}
|
|
154
|
+
}
|
|
155
|
+
|
|
156
|
+
// Delegate to base.seed() with our key mapping
|
|
157
|
+
return await super.seed(examples, {
|
|
158
|
+
promptKey: this.promptKey,
|
|
159
|
+
answerKey: this.answerKey,
|
|
160
|
+
contextKey: this.contextKey,
|
|
161
|
+
explanationKey: this.explanationKey,
|
|
162
|
+
systemPromptKey: this.systemPromptKey
|
|
163
|
+
});
|
|
164
|
+
}
|
|
165
|
+
|
|
166
|
+
// ── Primary Send Method ──────────────────────────────────────────────────
|
|
167
|
+
|
|
168
|
+
/**
|
|
169
|
+
* Transforms a payload using the seeded examples and model.
|
|
170
|
+
* Includes validation and automatic retry with AI-powered error correction.
|
|
171
|
+
*
|
|
172
|
+
* @param {Object|string} payload - The source payload to transform
|
|
173
|
+
* @param {import('./types').SendOptions} [opts={}] - Per-message options
|
|
174
|
+
* @param {AsyncValidatorFunction|null} [validatorFn] - Validator for this call (overrides constructor validator)
|
|
175
|
+
* @returns {Promise<Object>} The transformed payload
|
|
176
|
+
*/
|
|
177
|
+
async send(payload, opts = {}, validatorFn = null) {
|
|
178
|
+
if (!this.chatSession) {
|
|
179
|
+
throw new Error("Chat session not initialized. Please call init() first.");
|
|
180
|
+
}
|
|
181
|
+
|
|
182
|
+
// Use the validator from this call, or fall back to the constructor validator
|
|
183
|
+
const validator = validatorFn || this.asyncValidator;
|
|
184
|
+
|
|
185
|
+
// Handle stateless messages
|
|
186
|
+
if (opts.stateless) {
|
|
187
|
+
return await this._statelessSend(payload, opts, validator);
|
|
188
|
+
}
|
|
189
|
+
|
|
190
|
+
const maxRetries = opts.maxRetries ?? this.maxRetries;
|
|
191
|
+
const retryDelay = opts.retryDelay ?? this.retryDelay;
|
|
192
|
+
|
|
193
|
+
// Handle per-message grounding override
|
|
194
|
+
if (opts.enableGrounding !== undefined && opts.enableGrounding !== this.enableGrounding) {
|
|
195
|
+
const originalGrounding = this.enableGrounding;
|
|
196
|
+
const originalConfig = this.groundingConfig;
|
|
197
|
+
try {
|
|
198
|
+
this.enableGrounding = opts.enableGrounding;
|
|
199
|
+
this.groundingConfig = opts.groundingConfig ?? this.groundingConfig;
|
|
200
|
+
await this.init(true);
|
|
201
|
+
} catch (error) {
|
|
202
|
+
this.enableGrounding = originalGrounding;
|
|
203
|
+
this.groundingConfig = originalConfig;
|
|
204
|
+
throw error;
|
|
205
|
+
}
|
|
206
|
+
opts._restoreGrounding = async () => {
|
|
207
|
+
this.enableGrounding = originalGrounding;
|
|
208
|
+
this.groundingConfig = originalConfig;
|
|
209
|
+
await this.init(true);
|
|
210
|
+
};
|
|
211
|
+
}
|
|
212
|
+
|
|
213
|
+
// Prepare the payload
|
|
214
|
+
let lastPayload = this._preparePayload(payload);
|
|
215
|
+
|
|
216
|
+
// Extract per-message labels
|
|
217
|
+
const messageOptions = {};
|
|
218
|
+
if (opts.labels) messageOptions.labels = opts.labels;
|
|
219
|
+
|
|
220
|
+
// Reset cumulative usage tracking
|
|
221
|
+
this._cumulativeUsage = { promptTokens: 0, responseTokens: 0, totalTokens: 0, attempts: 0 };
|
|
222
|
+
|
|
223
|
+
let lastError = null;
|
|
224
|
+
|
|
225
|
+
for (let attempt = 0; attempt <= maxRetries; attempt++) {
|
|
226
|
+
try {
|
|
227
|
+
const transformedPayload = (attempt === 0)
|
|
228
|
+
? await this.rawSend(lastPayload, messageOptions)
|
|
229
|
+
: await this.rebuild(lastPayload, lastError.message);
|
|
230
|
+
|
|
231
|
+
// Accumulate token usage
|
|
232
|
+
if (this.lastResponseMetadata) {
|
|
233
|
+
this._cumulativeUsage.promptTokens += this.lastResponseMetadata.promptTokens || 0;
|
|
234
|
+
this._cumulativeUsage.responseTokens += this.lastResponseMetadata.responseTokens || 0;
|
|
235
|
+
this._cumulativeUsage.totalTokens += this.lastResponseMetadata.totalTokens || 0;
|
|
236
|
+
this._cumulativeUsage.attempts = attempt + 1;
|
|
237
|
+
}
|
|
238
|
+
|
|
239
|
+
lastPayload = transformedPayload;
|
|
240
|
+
|
|
241
|
+
// Validate
|
|
242
|
+
if (validator) {
|
|
243
|
+
await validator(transformedPayload);
|
|
244
|
+
}
|
|
245
|
+
|
|
246
|
+
log.debug(`Transformation succeeded on attempt ${attempt + 1}`);
|
|
247
|
+
|
|
248
|
+
if (opts._restoreGrounding) await opts._restoreGrounding();
|
|
249
|
+
return transformedPayload;
|
|
250
|
+
|
|
251
|
+
} catch (error) {
|
|
252
|
+
lastError = error;
|
|
253
|
+
log.warn(`Attempt ${attempt + 1} failed: ${error.message}`);
|
|
254
|
+
|
|
255
|
+
if (attempt >= maxRetries) {
|
|
256
|
+
log.error(`All ${maxRetries + 1} attempts failed.`);
|
|
257
|
+
if (opts._restoreGrounding) await opts._restoreGrounding();
|
|
258
|
+
throw new Error(`Transformation failed after ${maxRetries + 1} attempts. Last error: ${error.message}`);
|
|
259
|
+
}
|
|
260
|
+
|
|
261
|
+
const delay = retryDelay * Math.pow(2, attempt);
|
|
262
|
+
await new Promise(res => setTimeout(res, delay));
|
|
263
|
+
}
|
|
264
|
+
}
|
|
265
|
+
}
|
|
266
|
+
|
|
267
|
+
// ── Raw Send ─────────────────────────────────────────────────────────────
|
|
268
|
+
|
|
269
|
+
/**
|
|
270
|
+
* Sends a single prompt to the model and parses the JSON response.
|
|
271
|
+
* No validation or retry logic.
|
|
272
|
+
*
|
|
273
|
+
* @param {Object|string} payload - The source payload
|
|
274
|
+
* @param {Object} [messageOptions={}] - Per-message options (e.g., labels)
|
|
275
|
+
* @returns {Promise<Object>} The transformed payload
|
|
276
|
+
*/
|
|
277
|
+
async rawSend(payload, messageOptions = {}) {
|
|
278
|
+
if (!this.chatSession) {
|
|
279
|
+
throw new Error("Chat session not initialized.");
|
|
280
|
+
}
|
|
281
|
+
|
|
282
|
+
const actualPayload = typeof payload === 'string'
|
|
283
|
+
? payload
|
|
284
|
+
: JSON.stringify(payload, null, 2);
|
|
285
|
+
|
|
286
|
+
const mergedLabels = { ...this.labels, ...(messageOptions.labels || {}) };
|
|
287
|
+
const hasLabels = this.vertexai && Object.keys(mergedLabels).length > 0;
|
|
288
|
+
|
|
289
|
+
try {
|
|
290
|
+
const sendParams = { message: actualPayload };
|
|
291
|
+
if (hasLabels) {
|
|
292
|
+
sendParams.config = { labels: mergedLabels };
|
|
293
|
+
}
|
|
294
|
+
|
|
295
|
+
const result = await this.chatSession.sendMessage(sendParams);
|
|
296
|
+
|
|
297
|
+
this._captureMetadata(result);
|
|
298
|
+
|
|
299
|
+
if (result.usageMetadata && log.level !== 'silent') {
|
|
300
|
+
log.debug(`API response: model=${result.modelVersion || 'unknown'}, tokens=${result.usageMetadata.totalTokenCount}`);
|
|
301
|
+
}
|
|
302
|
+
|
|
303
|
+
const modelResponse = result.text;
|
|
304
|
+
const extractedJSON = extractJSON(modelResponse);
|
|
305
|
+
|
|
306
|
+
// Unwrap the 'data' property if it exists
|
|
307
|
+
if (extractedJSON?.data) {
|
|
308
|
+
return extractedJSON.data;
|
|
309
|
+
}
|
|
310
|
+
return extractedJSON;
|
|
311
|
+
|
|
312
|
+
} catch (error) {
|
|
313
|
+
if (this.onlyJSON && error.message.includes("Could not extract valid JSON")) {
|
|
314
|
+
throw new Error(`Invalid JSON response from Gemini: ${error.message}`);
|
|
315
|
+
}
|
|
316
|
+
throw new Error(`Transformation failed: ${error.message}`);
|
|
317
|
+
}
|
|
318
|
+
}
|
|
319
|
+
|
|
320
|
+
// ── Rebuild ──────────────────────────────────────────────────────────────
|
|
321
|
+
|
|
322
|
+
/**
|
|
323
|
+
* Asks the model to fix a payload that failed validation.
|
|
324
|
+
*
|
|
325
|
+
* @param {Object} lastPayload - The payload that failed
|
|
326
|
+
* @param {string} serverError - The error message
|
|
327
|
+
* @returns {Promise<Object>} Corrected payload
|
|
328
|
+
*/
|
|
329
|
+
async rebuild(lastPayload, serverError) {
|
|
330
|
+
await this.init();
|
|
331
|
+
const prompt = `
|
|
332
|
+
The previous JSON payload (below) failed validation.
|
|
333
|
+
The server's error message is quoted afterward.
|
|
334
|
+
|
|
335
|
+
---------------- BAD PAYLOAD ----------------
|
|
336
|
+
${JSON.stringify(lastPayload, null, 2)}
|
|
337
|
+
|
|
338
|
+
|
|
339
|
+
---------------- SERVER ERROR ----------------
|
|
340
|
+
${serverError}
|
|
341
|
+
|
|
342
|
+
Please return a NEW JSON payload that corrects the issue.
|
|
343
|
+
Respond with JSON only – no comments or explanations.
|
|
344
|
+
`;
|
|
345
|
+
|
|
346
|
+
let result;
|
|
347
|
+
try {
|
|
348
|
+
result = await this.chatSession.sendMessage({ message: prompt });
|
|
349
|
+
this._captureMetadata(result);
|
|
350
|
+
} catch (err) {
|
|
351
|
+
throw new Error(`Gemini call failed while repairing payload: ${err.message}`);
|
|
352
|
+
}
|
|
353
|
+
|
|
354
|
+
try {
|
|
355
|
+
const text = result.text ?? result.response ?? '';
|
|
356
|
+
return typeof text === 'object' ? text : JSON.parse(text);
|
|
357
|
+
} catch (parseErr) {
|
|
358
|
+
throw new Error(`Gemini returned non-JSON while repairing payload: ${parseErr.message}`);
|
|
359
|
+
}
|
|
360
|
+
}
|
|
361
|
+
|
|
362
|
+
// ── Stateless Send ───────────────────────────────────────────────────────
|
|
363
|
+
|
|
364
|
+
/**
|
|
365
|
+
* Sends a one-off message using generateContent (not chat).
|
|
366
|
+
* Does NOT affect chat history.
|
|
367
|
+
* @param {Object|string} payload
|
|
368
|
+
* @param {Object} [opts={}]
|
|
369
|
+
* @param {AsyncValidatorFunction|null} [validatorFn]
|
|
370
|
+
* @returns {Promise<Object>}
|
|
371
|
+
* @private
|
|
372
|
+
*/
|
|
373
|
+
async _statelessSend(payload, opts = {}, validatorFn = null) {
|
|
374
|
+
if (!this.chatSession) {
|
|
375
|
+
throw new Error("Chat session not initialized. Please call init() first.");
|
|
376
|
+
}
|
|
377
|
+
|
|
378
|
+
const payloadStr = typeof payload === 'string'
|
|
379
|
+
? payload
|
|
380
|
+
: JSON.stringify(payload, null, 2);
|
|
381
|
+
|
|
382
|
+
const contents = [];
|
|
383
|
+
|
|
384
|
+
// Include seeded examples
|
|
385
|
+
if (this.exampleCount > 0) {
|
|
386
|
+
const history = this.chatSession.getHistory();
|
|
387
|
+
const exampleHistory = history.slice(0, this.exampleCount);
|
|
388
|
+
contents.push(...exampleHistory);
|
|
389
|
+
}
|
|
390
|
+
|
|
391
|
+
contents.push({ role: 'user', parts: [{ text: payloadStr }] });
|
|
392
|
+
|
|
393
|
+
const mergedLabels = { ...this.labels, ...(opts.labels || {}) };
|
|
394
|
+
|
|
395
|
+
const result = await this.genAIClient.models.generateContent({
|
|
396
|
+
model: this.modelName,
|
|
397
|
+
contents: contents,
|
|
398
|
+
config: {
|
|
399
|
+
...this.chatConfig,
|
|
400
|
+
...(this.vertexai && Object.keys(mergedLabels).length > 0 && { labels: mergedLabels })
|
|
401
|
+
}
|
|
402
|
+
});
|
|
403
|
+
|
|
404
|
+
this._captureMetadata(result);
|
|
405
|
+
|
|
406
|
+
this._cumulativeUsage = {
|
|
407
|
+
promptTokens: this.lastResponseMetadata.promptTokens,
|
|
408
|
+
responseTokens: this.lastResponseMetadata.responseTokens,
|
|
409
|
+
totalTokens: this.lastResponseMetadata.totalTokens,
|
|
410
|
+
attempts: 1
|
|
411
|
+
};
|
|
412
|
+
|
|
413
|
+
const modelResponse = result.text;
|
|
414
|
+
const extractedJSON = extractJSON(modelResponse);
|
|
415
|
+
let transformedPayload = extractedJSON?.data ? extractedJSON.data : extractedJSON;
|
|
416
|
+
|
|
417
|
+
if (validatorFn) {
|
|
418
|
+
await validatorFn(transformedPayload);
|
|
419
|
+
}
|
|
420
|
+
|
|
421
|
+
return transformedPayload;
|
|
422
|
+
}
|
|
423
|
+
|
|
424
|
+
// ── History Management ───────────────────────────────────────────────────
|
|
425
|
+
|
|
426
|
+
/**
|
|
427
|
+
* Clears conversation history while preserving seeded examples.
|
|
428
|
+
* @returns {Promise<void>}
|
|
429
|
+
*/
|
|
430
|
+
async clearHistory() {
|
|
431
|
+
if (!this.chatSession) {
|
|
432
|
+
log.warn("Cannot clear history: chat not initialized.");
|
|
433
|
+
return;
|
|
434
|
+
}
|
|
435
|
+
|
|
436
|
+
const history = this.chatSession.getHistory();
|
|
437
|
+
const exampleHistory = history.slice(0, this.exampleCount || 0);
|
|
438
|
+
|
|
439
|
+
this.chatSession = this._createChatSession(exampleHistory);
|
|
440
|
+
|
|
441
|
+
this.lastResponseMetadata = null;
|
|
442
|
+
this._cumulativeUsage = { promptTokens: 0, responseTokens: 0, totalTokens: 0, attempts: 0 };
|
|
443
|
+
|
|
444
|
+
log.debug(`Conversation cleared. Preserved ${exampleHistory.length} example items.`);
|
|
445
|
+
}
|
|
446
|
+
|
|
447
|
+
/**
|
|
448
|
+
* Fully resets the chat session, clearing all history including examples.
|
|
449
|
+
* @returns {Promise<void>}
|
|
450
|
+
*/
|
|
451
|
+
async reset() {
|
|
452
|
+
if (this.chatSession) {
|
|
453
|
+
log.debug("Resetting chat session...");
|
|
454
|
+
this.chatSession = this._createChatSession([]);
|
|
455
|
+
this.exampleCount = 0;
|
|
456
|
+
log.debug("Chat session reset.");
|
|
457
|
+
} else {
|
|
458
|
+
log.warn("Cannot reset: chat not yet initialized.");
|
|
459
|
+
}
|
|
460
|
+
}
|
|
461
|
+
|
|
462
|
+
/**
|
|
463
|
+
* Updates system prompt and reinitializes the chat session.
|
|
464
|
+
* @param {string} newPrompt - The new system prompt
|
|
465
|
+
* @returns {Promise<void>}
|
|
466
|
+
*/
|
|
467
|
+
async updateSystemPrompt(newPrompt) {
|
|
468
|
+
if (!newPrompt || typeof newPrompt !== 'string') {
|
|
469
|
+
throw new Error('System prompt must be a non-empty string');
|
|
470
|
+
}
|
|
471
|
+
|
|
472
|
+
this.systemPrompt = newPrompt.trim();
|
|
473
|
+
this.chatConfig.systemInstruction = this.systemPrompt;
|
|
474
|
+
|
|
475
|
+
log.debug('Updating system prompt and reinitializing chat...');
|
|
476
|
+
await this.init(true);
|
|
477
|
+
}
|
|
478
|
+
|
|
479
|
+
// ── Private Helpers ──────────────────────────────────────────────────────
|
|
480
|
+
|
|
481
|
+
/**
|
|
482
|
+
* Normalizes a payload to a string for sending.
|
|
483
|
+
* @param {*} payload
|
|
484
|
+
* @returns {string}
|
|
485
|
+
* @private
|
|
486
|
+
*/
|
|
487
|
+
_preparePayload(payload) {
|
|
488
|
+
if (payload && isJSON(payload)) {
|
|
489
|
+
return JSON.stringify(payload, null, 2);
|
|
490
|
+
} else if (typeof payload === 'string') {
|
|
491
|
+
return payload;
|
|
492
|
+
} else if (typeof payload === 'boolean' || typeof payload === 'number') {
|
|
493
|
+
return payload.toString();
|
|
494
|
+
} else if (payload === null || payload === undefined) {
|
|
495
|
+
return JSON.stringify({});
|
|
496
|
+
} else {
|
|
497
|
+
throw new Error("Invalid source payload. Must be a JSON object or string.");
|
|
498
|
+
}
|
|
499
|
+
}
|
|
500
|
+
}
|
|
501
|
+
|
|
502
|
+
export default Transformer;
|