ak-gemini 1.0.54 → 1.0.55
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/index.cjs +26 -3
- package/index.js +40 -6
- package/package.json +15 -4
- package/types.d.ts +83 -0
package/index.cjs
CHANGED
|
@@ -56,10 +56,14 @@ var logger_default = logger;
|
|
|
56
56
|
// index.js
|
|
57
57
|
var import_meta = {};
|
|
58
58
|
import_dotenv.default.config();
|
|
59
|
-
var { NODE_ENV = "unknown", GEMINI_API_KEY } = process.env;
|
|
59
|
+
var { NODE_ENV = "unknown", GEMINI_API_KEY, LOG_LEVEL = "" } = process.env;
|
|
60
60
|
if (NODE_ENV === "dev") logger_default.level = "debug";
|
|
61
61
|
if (NODE_ENV === "test") logger_default.level = "warn";
|
|
62
62
|
if (NODE_ENV.startsWith("prod")) logger_default.level = "error";
|
|
63
|
+
if (LOG_LEVEL) {
|
|
64
|
+
logger_default.level = LOG_LEVEL;
|
|
65
|
+
logger_default.debug(`Setting log level to ${LOG_LEVEL}`);
|
|
66
|
+
}
|
|
63
67
|
var DEFAULT_SAFETY_SETTINGS = [
|
|
64
68
|
{ category: import_genai.HarmCategory.HARM_CATEGORY_HARASSMENT, threshold: import_genai.HarmBlockThreshold.BLOCK_NONE },
|
|
65
69
|
{ category: import_genai.HarmCategory.HARM_CATEGORY_DANGEROUS_CONTENT, threshold: import_genai.HarmBlockThreshold.BLOCK_NONE }
|
|
@@ -161,6 +165,12 @@ async function initChat(force = false) {
|
|
|
161
165
|
config: this.chatConfig,
|
|
162
166
|
history: []
|
|
163
167
|
});
|
|
168
|
+
try {
|
|
169
|
+
await this.genAIClient.models.list();
|
|
170
|
+
logger_default.debug("Gemini API connection successful.");
|
|
171
|
+
} catch (e) {
|
|
172
|
+
throw new Error(`Gemini chat initialization failed: ${e.message}`);
|
|
173
|
+
}
|
|
164
174
|
logger_default.debug("Gemini chat session initialized.");
|
|
165
175
|
}
|
|
166
176
|
async function seedWithExamples(examples) {
|
|
@@ -173,6 +183,13 @@ async function seedWithExamples(examples) {
|
|
|
173
183
|
} catch (err) {
|
|
174
184
|
throw new Error(`Could not load examples from file: ${this.examplesFile}. Please check the file path and format.`);
|
|
175
185
|
}
|
|
186
|
+
} else if (this.exampleData) {
|
|
187
|
+
logger_default.debug(`Using example data provided in options.`);
|
|
188
|
+
if (Array.isArray(this.exampleData)) {
|
|
189
|
+
examples = this.exampleData;
|
|
190
|
+
} else {
|
|
191
|
+
throw new Error(`Invalid example data provided. Expected an array of examples.`);
|
|
192
|
+
}
|
|
176
193
|
} else {
|
|
177
194
|
logger_default.debug("No examples provided and no examples file specified. Skipping seeding.");
|
|
178
195
|
return;
|
|
@@ -214,14 +231,16 @@ ${contextText}
|
|
|
214
231
|
}
|
|
215
232
|
}
|
|
216
233
|
const currentHistory = this?.chat?.getHistory() || [];
|
|
234
|
+
logger_default.debug(`Adding ${historyToAdd.length} examples to chat history (${currentHistory.length} current examples)...`);
|
|
217
235
|
this.chat = await this.genAIClient.chats.create({
|
|
218
236
|
model: this.modelName,
|
|
219
237
|
// @ts-ignore
|
|
220
238
|
config: this.chatConfig,
|
|
221
239
|
history: [...currentHistory, ...historyToAdd]
|
|
222
240
|
});
|
|
223
|
-
|
|
224
|
-
|
|
241
|
+
const newHistory = this.chat.getHistory();
|
|
242
|
+
logger_default.debug(`Created new chat session with ${newHistory.length} examples.`);
|
|
243
|
+
return newHistory;
|
|
225
244
|
}
|
|
226
245
|
async function rawMessage(sourcePayload) {
|
|
227
246
|
if (!this.chat) {
|
|
@@ -255,6 +274,10 @@ async function prepareAndValidateMessage(sourcePayload, options = {}, validatorF
|
|
|
255
274
|
lastPayload = JSON.stringify(sourcePayload, null, 2);
|
|
256
275
|
} else if (typeof sourcePayload === "string") {
|
|
257
276
|
lastPayload = sourcePayload;
|
|
277
|
+
} else if (typeof sourcePayload === "boolean" || typeof sourcePayload === "number") {
|
|
278
|
+
lastPayload = sourcePayload.toString();
|
|
279
|
+
} else if (sourcePayload === null || sourcePayload === void 0) {
|
|
280
|
+
lastPayload = JSON.stringify({});
|
|
258
281
|
} else {
|
|
259
282
|
throw new Error("Invalid source payload. Must be a JSON object or string.");
|
|
260
283
|
}
|
package/index.js
CHANGED
|
@@ -18,7 +18,7 @@
|
|
|
18
18
|
//env
|
|
19
19
|
import dotenv from 'dotenv';
|
|
20
20
|
dotenv.config();
|
|
21
|
-
const { NODE_ENV = "unknown", GEMINI_API_KEY } = process.env;
|
|
21
|
+
const { NODE_ENV = "unknown", GEMINI_API_KEY, LOG_LEVEL = "" } = process.env;
|
|
22
22
|
|
|
23
23
|
|
|
24
24
|
|
|
@@ -33,6 +33,11 @@ if (NODE_ENV === 'dev') log.level = 'debug';
|
|
|
33
33
|
if (NODE_ENV === 'test') log.level = 'warn';
|
|
34
34
|
if (NODE_ENV.startsWith('prod')) log.level = 'error';
|
|
35
35
|
|
|
36
|
+
if (LOG_LEVEL) {
|
|
37
|
+
log.level = LOG_LEVEL;
|
|
38
|
+
log.debug(`Setting log level to ${LOG_LEVEL}`);
|
|
39
|
+
}
|
|
40
|
+
|
|
36
41
|
|
|
37
42
|
|
|
38
43
|
// defaults
|
|
@@ -196,6 +201,15 @@ async function initChat(force = false) {
|
|
|
196
201
|
history: [],
|
|
197
202
|
});
|
|
198
203
|
|
|
204
|
+
try {
|
|
205
|
+
await this.genAIClient.models.list();
|
|
206
|
+
log.debug("Gemini API connection successful.");
|
|
207
|
+
} catch (e) {
|
|
208
|
+
throw new Error(`Gemini chat initialization failed: ${e.message}`);
|
|
209
|
+
}
|
|
210
|
+
|
|
211
|
+
|
|
212
|
+
|
|
199
213
|
log.debug("Gemini chat session initialized.");
|
|
200
214
|
}
|
|
201
215
|
|
|
@@ -218,7 +232,18 @@ async function seedWithExamples(examples) {
|
|
|
218
232
|
catch (err) {
|
|
219
233
|
throw new Error(`Could not load examples from file: ${this.examplesFile}. Please check the file path and format.`);
|
|
220
234
|
}
|
|
221
|
-
}
|
|
235
|
+
}
|
|
236
|
+
|
|
237
|
+
else if (this.exampleData) {
|
|
238
|
+
log.debug(`Using example data provided in options.`);
|
|
239
|
+
if (Array.isArray(this.exampleData)) {
|
|
240
|
+
examples = this.exampleData;
|
|
241
|
+
} else {
|
|
242
|
+
throw new Error(`Invalid example data provided. Expected an array of examples.`);
|
|
243
|
+
}
|
|
244
|
+
}
|
|
245
|
+
|
|
246
|
+
else {
|
|
222
247
|
log.debug("No examples provided and no examples file specified. Skipping seeding.");
|
|
223
248
|
return;
|
|
224
249
|
}
|
|
@@ -267,8 +292,9 @@ async function seedWithExamples(examples) {
|
|
|
267
292
|
|
|
268
293
|
}
|
|
269
294
|
|
|
270
|
-
const currentHistory = this?.chat?.getHistory() || [];
|
|
271
295
|
|
|
296
|
+
const currentHistory = this?.chat?.getHistory() || [];
|
|
297
|
+
log.debug(`Adding ${historyToAdd.length} examples to chat history (${currentHistory.length} current examples)...`);
|
|
272
298
|
this.chat = await this.genAIClient.chats.create({
|
|
273
299
|
model: this.modelName,
|
|
274
300
|
// @ts-ignore
|
|
@@ -276,9 +302,10 @@ async function seedWithExamples(examples) {
|
|
|
276
302
|
history: [...currentHistory, ...historyToAdd],
|
|
277
303
|
});
|
|
278
304
|
|
|
279
|
-
log.debug("Transformation examples seeded successfully.");
|
|
280
305
|
|
|
281
|
-
|
|
306
|
+
const newHistory = this.chat.getHistory();
|
|
307
|
+
log.debug(`Created new chat session with ${newHistory.length} examples.`);
|
|
308
|
+
return newHistory;
|
|
282
309
|
}
|
|
283
310
|
|
|
284
311
|
/**
|
|
@@ -346,7 +373,14 @@ async function prepareAndValidateMessage(sourcePayload, options = {}, validatorF
|
|
|
346
373
|
lastPayload = JSON.stringify(sourcePayload, null, 2);
|
|
347
374
|
} else if (typeof sourcePayload === 'string') {
|
|
348
375
|
lastPayload = sourcePayload;
|
|
349
|
-
}
|
|
376
|
+
}
|
|
377
|
+
else if (typeof sourcePayload === 'boolean' || typeof sourcePayload === 'number') {
|
|
378
|
+
lastPayload = sourcePayload.toString();
|
|
379
|
+
}
|
|
380
|
+
else if (sourcePayload === null || sourcePayload === undefined) {
|
|
381
|
+
lastPayload = JSON.stringify({}); // Convert null/undefined to empty object
|
|
382
|
+
}
|
|
383
|
+
else {
|
|
350
384
|
throw new Error("Invalid source payload. Must be a JSON object or string.");
|
|
351
385
|
}
|
|
352
386
|
|
package/package.json
CHANGED
|
@@ -2,19 +2,25 @@
|
|
|
2
2
|
"name": "ak-gemini",
|
|
3
3
|
"author": "ak@mixpanel.com",
|
|
4
4
|
"description": "AK's Generative AI Helper for doing... transforms",
|
|
5
|
-
"version": "1.0.
|
|
5
|
+
"version": "1.0.55",
|
|
6
6
|
"main": "index.js",
|
|
7
7
|
"files": [
|
|
8
8
|
"index.js",
|
|
9
9
|
"index.cjs",
|
|
10
|
-
"types.ts",
|
|
10
|
+
"types.d.ts",
|
|
11
11
|
"logger.js"
|
|
12
12
|
],
|
|
13
13
|
"types": "types.d.ts",
|
|
14
14
|
"exports": {
|
|
15
15
|
".": {
|
|
16
|
-
"import":
|
|
17
|
-
|
|
16
|
+
"import": {
|
|
17
|
+
"types": "./types.d.ts",
|
|
18
|
+
"default": "./index.js"
|
|
19
|
+
},
|
|
20
|
+
"require": {
|
|
21
|
+
"types": "./types.d.ts",
|
|
22
|
+
"default": "./index.cjs"
|
|
23
|
+
}
|
|
18
24
|
}
|
|
19
25
|
},
|
|
20
26
|
"repository": {
|
|
@@ -36,6 +42,11 @@
|
|
|
36
42
|
"update-deps": "npx npm-check-updates -u && npm install",
|
|
37
43
|
"prune": "rm -rf tmp/*",
|
|
38
44
|
"test": "node --no-warnings --experimental-vm-modules node_modules/jest/bin/jest.js",
|
|
45
|
+
"test:all": "./scripts/test-all.sh",
|
|
46
|
+
"test:integration": "./scripts/test-local.sh",
|
|
47
|
+
"test:unit": "npm test -- tests/module.test.js",
|
|
48
|
+
"test:function": "npm test -- tests/function.integration.test.js",
|
|
49
|
+
"test:http": "npm test -- tests/function.http.test.js",
|
|
39
50
|
"build:cjs": "esbuild index.js --bundle --platform=node --format=cjs --outfile=index.cjs --external:@google/genai --external:ak-tools --external:dotenv --external:pino-pretty --external:pino"
|
|
40
51
|
},
|
|
41
52
|
"type": "module",
|
package/types.d.ts
ADDED
|
@@ -0,0 +1,83 @@
|
|
|
1
|
+
import type { GoogleGenAI } from '@google/genai';
|
|
2
|
+
|
|
3
|
+
export interface SafetySetting {
|
|
4
|
+
category: string; // The harm category
|
|
5
|
+
threshold: string; // The blocking threshold
|
|
6
|
+
}
|
|
7
|
+
|
|
8
|
+
export interface ChatConfig {
|
|
9
|
+
responseMimeType?: string; // MIME type for responses
|
|
10
|
+
temperature?: number; // Controls randomness (0.0 to 1.0)
|
|
11
|
+
topP?: number; // Controls diversity via nucleus sampling
|
|
12
|
+
topK?: number; // Controls diversity by limiting top-k tokens
|
|
13
|
+
systemInstruction?: string; // System instruction for the model
|
|
14
|
+
safetySettings?: SafetySetting[]; // Safety settings array
|
|
15
|
+
responseSchema?: Object; // Schema for validating model responses
|
|
16
|
+
[key: string]: any; // Additional properties for flexibility
|
|
17
|
+
}
|
|
18
|
+
|
|
19
|
+
export interface AITransformerContext {
|
|
20
|
+
modelName?: string;
|
|
21
|
+
systemInstructions?: string;
|
|
22
|
+
chatConfig?: ChatConfig;
|
|
23
|
+
genAI?: any;
|
|
24
|
+
chat?: any;
|
|
25
|
+
examplesFile?: string | null;
|
|
26
|
+
exampleData?: TransformationExample[] | null;
|
|
27
|
+
promptKey?: string;
|
|
28
|
+
answerKey?: string;
|
|
29
|
+
contextKey?: string;
|
|
30
|
+
explanationKey?: string;
|
|
31
|
+
systemInstructionsKey?: string;
|
|
32
|
+
maxRetries?: number;
|
|
33
|
+
retryDelay?: number;
|
|
34
|
+
init?: () => Promise<void>; // Initialization function
|
|
35
|
+
seed?: () => Promise<void>; // Function to seed the transformer with examples
|
|
36
|
+
message?: (payload: Record<string, unknown>) => Promise<Record<string, unknown>>; // Function to send messages to the model
|
|
37
|
+
rebuild?: () => Promise<Record<string, unknown>; // Function to rebuild the transformer
|
|
38
|
+
rawMessage?: (payload: Record<string, unknown>) => Promise<Record<string, unknown>>; // Function to send raw messages to the model
|
|
39
|
+
genAIClient?: GoogleGenAI; // Google GenAI client instance
|
|
40
|
+
onlyJSON?: boolean; // If true, only JSON responses are allowed
|
|
41
|
+
|
|
42
|
+
}
|
|
43
|
+
|
|
44
|
+
export interface TransformationExample {
|
|
45
|
+
CONTEXT?: Record<string, unknown>; // optional context for the transformation
|
|
46
|
+
PROMPT?: Record<string, unknown>; // what the user provides as input
|
|
47
|
+
ANSWER?: Record<string, unknown>; // what the model should return as output
|
|
48
|
+
}
|
|
49
|
+
|
|
50
|
+
export interface ExampleFileContent {
|
|
51
|
+
examples: TransformationExample[];
|
|
52
|
+
}
|
|
53
|
+
|
|
54
|
+
export interface AITransformerOptions {
|
|
55
|
+
// ? https://ai.google.dev/gemini-api/docs/models
|
|
56
|
+
modelName?: string; // The Gemini model to use
|
|
57
|
+
systemInstructions?: string; // Custom system instructions for the model
|
|
58
|
+
chatConfig?: ChatConfig; // Configuration object for the chat session
|
|
59
|
+
examplesFile?: string; // Path to JSON file containing transformation examples
|
|
60
|
+
exampleData?: TransformationExample[]; // Inline examples to seed the transformer
|
|
61
|
+
sourceKey?: string; // Key name for source data in examples
|
|
62
|
+
targetKey?: string; // Key name for target data in examples
|
|
63
|
+
contextKey?: string; // Key name for context data in examples
|
|
64
|
+
explanationKey?: string; // Key name for explanation data in examples
|
|
65
|
+
systemInstructionsKey?: string; // Key for system instructions in examples
|
|
66
|
+
maxRetries?: number; // Maximum retry attempts for auto-retry functionality
|
|
67
|
+
retryDelay?: number; // Initial retry delay in milliseconds
|
|
68
|
+
// ? https://ai.google.dev/gemini-api/docs/structured-output
|
|
69
|
+
responseSchema?: Object; // Schema for validating model responses
|
|
70
|
+
apiKey?: string; // API key for Google GenAI
|
|
71
|
+
onlyJSON?: boolean; // If true, only JSON responses are allowed
|
|
72
|
+
asyncValidator?: AsyncValidatorFunction; // Optional async validator function for response validation
|
|
73
|
+
promptKey?: string; // Key for the prompt in examples
|
|
74
|
+
answerKey?: string; // Key for the answer in examples
|
|
75
|
+
contextKey?: string; // Key for the context in examples
|
|
76
|
+
explanationKey?: string; // Key for the explanation in examples
|
|
77
|
+
}
|
|
78
|
+
|
|
79
|
+
// Async validator function type
|
|
80
|
+
export type AsyncValidatorFunction = (payload: Record<string, unknown>) => Promise<unknown>;
|
|
81
|
+
|
|
82
|
+
|
|
83
|
+
export declare class AITransformer implements AITransformerContext {}
|