@elizaos/plugin-google-genai 1.1.0 → 2.0.0-alpha.10
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +8 -1
- package/dist/browser/index.browser.js +537 -0
- package/dist/browser/index.browser.js.map +19 -0
- package/dist/browser/index.d.ts +2 -0
- package/dist/cjs/index.d.ts +2 -0
- package/dist/cjs/index.node.cjs +579 -0
- package/dist/cjs/index.node.js.map +19 -0
- package/dist/index.d.ts +2 -21
- package/dist/node/index.d.ts +2 -0
- package/dist/node/index.node.js +537 -0
- package/dist/node/index.node.js.map +19 -0
- package/package.json +55 -20
- package/README.md +0 -65
- package/dist/index.js +0 -582
- package/dist/index.js.map +0 -1
package/LICENSE
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
MIT License
|
|
2
2
|
|
|
3
|
-
Copyright (c)
|
|
3
|
+
Copyright (c) 2024 elizaOS
|
|
4
4
|
|
|
5
5
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
6
6
|
of this software and associated documentation files (the "Software"), to deal
|
|
@@ -19,3 +19,10 @@ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
|
19
19
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
20
20
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
21
21
|
SOFTWARE.
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
|
|
@@ -0,0 +1,537 @@
|
|
|
1
|
+
// index.ts
|
|
2
|
+
import { logger as logger7, ModelType as ModelType3 } from "@elizaos/core";
|
|
3
|
+
import { GoogleGenAI as GoogleGenAI3 } from "@google/genai";
|
|
4
|
+
|
|
5
|
+
// init.ts
|
|
6
|
+
import { logger as logger2 } from "@elizaos/core";
|
|
7
|
+
import { GoogleGenAI as GoogleGenAI2 } from "@google/genai";
|
|
8
|
+
|
|
9
|
+
// utils/config.ts
|
|
10
|
+
import { logger } from "@elizaos/core";
|
|
11
|
+
import { GoogleGenAI, HarmBlockThreshold, HarmCategory } from "@google/genai";
|
|
12
|
+
function getEnvValue(key) {
|
|
13
|
+
if (typeof process === "undefined") {
|
|
14
|
+
return;
|
|
15
|
+
}
|
|
16
|
+
const value = process.env[key];
|
|
17
|
+
return value === undefined ? undefined : String(value);
|
|
18
|
+
}
|
|
19
|
+
function getSetting(runtime, key, defaultValue) {
|
|
20
|
+
const runtimeValue = runtime.getSetting(key);
|
|
21
|
+
if (runtimeValue !== undefined) {
|
|
22
|
+
return String(runtimeValue);
|
|
23
|
+
}
|
|
24
|
+
return getEnvValue(key) ?? defaultValue;
|
|
25
|
+
}
|
|
26
|
+
function getApiKey(runtime) {
|
|
27
|
+
return getSetting(runtime, "GOOGLE_GENERATIVE_AI_API_KEY");
|
|
28
|
+
}
|
|
29
|
+
function getSmallModel(runtime) {
|
|
30
|
+
return getSetting(runtime, "GOOGLE_SMALL_MODEL") ?? getSetting(runtime, "SMALL_MODEL", "gemini-2.0-flash-001") ?? "gemini-2.0-flash-001";
|
|
31
|
+
}
|
|
32
|
+
function getLargeModel(runtime) {
|
|
33
|
+
return getSetting(runtime, "GOOGLE_LARGE_MODEL") ?? getSetting(runtime, "LARGE_MODEL", "gemini-2.5-pro-preview-03-25") ?? "gemini-2.5-pro-preview-03-25";
|
|
34
|
+
}
|
|
35
|
+
function getImageModel(runtime) {
|
|
36
|
+
return getSetting(runtime, "GOOGLE_IMAGE_MODEL") ?? getSetting(runtime, "IMAGE_MODEL", "gemini-2.5-pro-preview-03-25") ?? "gemini-2.5-pro-preview-03-25";
|
|
37
|
+
}
|
|
38
|
+
function getEmbeddingModel(runtime) {
|
|
39
|
+
return getSetting(runtime, "GOOGLE_EMBEDDING_MODEL", "text-embedding-004") ?? "text-embedding-004";
|
|
40
|
+
}
|
|
41
|
+
function createGoogleGenAI(runtime) {
|
|
42
|
+
const apiKey = getApiKey(runtime);
|
|
43
|
+
if (!apiKey) {
|
|
44
|
+
logger.error("Google Generative AI API Key is missing");
|
|
45
|
+
return null;
|
|
46
|
+
}
|
|
47
|
+
return new GoogleGenAI({ apiKey });
|
|
48
|
+
}
|
|
49
|
+
function getSafetySettings() {
|
|
50
|
+
return [
|
|
51
|
+
{
|
|
52
|
+
category: HarmCategory.HARM_CATEGORY_HARASSMENT,
|
|
53
|
+
threshold: HarmBlockThreshold.BLOCK_MEDIUM_AND_ABOVE
|
|
54
|
+
},
|
|
55
|
+
{
|
|
56
|
+
category: HarmCategory.HARM_CATEGORY_HATE_SPEECH,
|
|
57
|
+
threshold: HarmBlockThreshold.BLOCK_MEDIUM_AND_ABOVE
|
|
58
|
+
},
|
|
59
|
+
{
|
|
60
|
+
category: HarmCategory.HARM_CATEGORY_SEXUALLY_EXPLICIT,
|
|
61
|
+
threshold: HarmBlockThreshold.BLOCK_MEDIUM_AND_ABOVE
|
|
62
|
+
},
|
|
63
|
+
{
|
|
64
|
+
category: HarmCategory.HARM_CATEGORY_DANGEROUS_CONTENT,
|
|
65
|
+
threshold: HarmBlockThreshold.BLOCK_MEDIUM_AND_ABOVE
|
|
66
|
+
}
|
|
67
|
+
];
|
|
68
|
+
}
|
|
69
|
+
|
|
70
|
+
// init.ts
|
|
71
|
+
function initializeGoogleGenAI(_config, runtime) {
|
|
72
|
+
(async () => {
|
|
73
|
+
try {
|
|
74
|
+
const apiKey = getApiKey(runtime);
|
|
75
|
+
if (!apiKey) {
|
|
76
|
+
logger2.warn("GOOGLE_GENERATIVE_AI_API_KEY is not set");
|
|
77
|
+
return;
|
|
78
|
+
}
|
|
79
|
+
const genAI = new GoogleGenAI2({ apiKey });
|
|
80
|
+
const modelList = await genAI.models.list();
|
|
81
|
+
const models = [];
|
|
82
|
+
for await (const model of modelList) {
|
|
83
|
+
models.push(model);
|
|
84
|
+
}
|
|
85
|
+
logger2.log(`Google AI API key validated. Available models: ${models.length}`);
|
|
86
|
+
} catch (error) {
|
|
87
|
+
logger2.warn(`Google AI configuration error: ${error instanceof Error ? error.message : String(error)}`);
|
|
88
|
+
}
|
|
89
|
+
})();
|
|
90
|
+
}
|
|
91
|
+
|
|
92
|
+
// models/embedding.ts
|
|
93
|
+
import { logger as logger3, ModelType } from "@elizaos/core";
|
|
94
|
+
|
|
95
|
+
// utils/events.ts
|
|
96
|
+
import { EventType } from "@elizaos/core";
|
|
97
|
+
function emitModelUsageEvent(runtime, type, _prompt, usage) {
|
|
98
|
+
runtime.emitEvent(EventType.MODEL_USED, {
|
|
99
|
+
runtime,
|
|
100
|
+
source: "plugin-google-genai",
|
|
101
|
+
type,
|
|
102
|
+
tokens: {
|
|
103
|
+
prompt: usage.promptTokens,
|
|
104
|
+
completion: usage.completionTokens,
|
|
105
|
+
total: usage.totalTokens
|
|
106
|
+
}
|
|
107
|
+
});
|
|
108
|
+
}
|
|
109
|
+
|
|
110
|
+
// utils/tokenization.ts
|
|
111
|
+
async function countTokens(text) {
|
|
112
|
+
return Math.ceil(text.length / 4);
|
|
113
|
+
}
|
|
114
|
+
|
|
115
|
+
// models/embedding.ts
|
|
116
|
+
async function handleTextEmbedding(runtime, params) {
|
|
117
|
+
const genAI = createGoogleGenAI(runtime);
|
|
118
|
+
if (!genAI) {
|
|
119
|
+
throw new Error("Google Generative AI client not initialized");
|
|
120
|
+
}
|
|
121
|
+
const embeddingModelName = getEmbeddingModel(runtime);
|
|
122
|
+
logger3.debug(`[TEXT_EMBEDDING] Using model: ${embeddingModelName}`);
|
|
123
|
+
if (params === null) {
|
|
124
|
+
return Array(768).fill(0);
|
|
125
|
+
}
|
|
126
|
+
let text = typeof params === "string" ? params : typeof params === "object" && params.text ? params.text : "";
|
|
127
|
+
if (!text.trim()) {
|
|
128
|
+
logger3.warn("Empty text for embedding");
|
|
129
|
+
return Array(768).fill(0);
|
|
130
|
+
}
|
|
131
|
+
const maxChars = 8192 * 4;
|
|
132
|
+
if (text.length > maxChars) {
|
|
133
|
+
logger3.warn(`[Google GenAI] Embedding input too long (~${Math.ceil(text.length / 4)} tokens), truncating to ~8192 tokens`);
|
|
134
|
+
text = text.slice(0, maxChars);
|
|
135
|
+
}
|
|
136
|
+
try {
|
|
137
|
+
const response = await genAI.models.embedContent({
|
|
138
|
+
model: embeddingModelName,
|
|
139
|
+
contents: text
|
|
140
|
+
});
|
|
141
|
+
const embedding = response.embeddings?.[0]?.values || [];
|
|
142
|
+
const promptTokens = await countTokens(text);
|
|
143
|
+
emitModelUsageEvent(runtime, ModelType.TEXT_EMBEDDING, text, {
|
|
144
|
+
promptTokens,
|
|
145
|
+
completionTokens: 0,
|
|
146
|
+
totalTokens: promptTokens
|
|
147
|
+
});
|
|
148
|
+
logger3.log(`Got embedding with length ${embedding.length}`);
|
|
149
|
+
return embedding;
|
|
150
|
+
} catch (error) {
|
|
151
|
+
logger3.error(`Error generating embedding: ${error instanceof Error ? error.message : String(error)}`);
|
|
152
|
+
return Array(768).fill(0);
|
|
153
|
+
}
|
|
154
|
+
}
|
|
155
|
+
// models/image.ts
|
|
156
|
+
import { logger as logger4 } from "@elizaos/core";
|
|
157
|
+
var crossFetch = typeof globalThis.fetch === "function" ? globalThis.fetch : fetch;
|
|
158
|
+
async function handleImageDescription(runtime, params) {
|
|
159
|
+
const genAI = createGoogleGenAI(runtime);
|
|
160
|
+
if (!genAI) {
|
|
161
|
+
throw new Error("Google Generative AI client not initialized");
|
|
162
|
+
}
|
|
163
|
+
let imageUrl;
|
|
164
|
+
let promptText;
|
|
165
|
+
const modelName = getImageModel(runtime);
|
|
166
|
+
logger4.log(`[IMAGE_DESCRIPTION] Using model: ${modelName}`);
|
|
167
|
+
if (typeof params === "string") {
|
|
168
|
+
imageUrl = params;
|
|
169
|
+
promptText = "Please analyze this image and provide a title and detailed description.";
|
|
170
|
+
} else {
|
|
171
|
+
imageUrl = params.imageUrl;
|
|
172
|
+
promptText = params.prompt || "Please analyze this image and provide a title and detailed description.";
|
|
173
|
+
}
|
|
174
|
+
try {
|
|
175
|
+
const imageResponse = await crossFetch(imageUrl);
|
|
176
|
+
if (!imageResponse.ok) {
|
|
177
|
+
throw new Error(`Failed to fetch image: ${imageResponse.statusText}`);
|
|
178
|
+
}
|
|
179
|
+
const imageData = await imageResponse.arrayBuffer();
|
|
180
|
+
const base64Image = Buffer.from(imageData).toString("base64");
|
|
181
|
+
const contentType = imageResponse.headers.get("content-type") || "image/jpeg";
|
|
182
|
+
const response = await genAI.models.generateContent({
|
|
183
|
+
model: modelName,
|
|
184
|
+
contents: [
|
|
185
|
+
{
|
|
186
|
+
role: "user",
|
|
187
|
+
parts: [
|
|
188
|
+
{ text: promptText },
|
|
189
|
+
{
|
|
190
|
+
inlineData: {
|
|
191
|
+
mimeType: contentType,
|
|
192
|
+
data: base64Image
|
|
193
|
+
}
|
|
194
|
+
}
|
|
195
|
+
]
|
|
196
|
+
}
|
|
197
|
+
],
|
|
198
|
+
config: {
|
|
199
|
+
temperature: 0.7,
|
|
200
|
+
topK: 40,
|
|
201
|
+
topP: 0.95,
|
|
202
|
+
maxOutputTokens: 8192,
|
|
203
|
+
safetySettings: getSafetySettings()
|
|
204
|
+
}
|
|
205
|
+
});
|
|
206
|
+
const responseText = response.text || "";
|
|
207
|
+
try {
|
|
208
|
+
const jsonResponse = JSON.parse(responseText);
|
|
209
|
+
if (typeof jsonResponse.title === "string" && typeof jsonResponse.description === "string") {
|
|
210
|
+
return {
|
|
211
|
+
title: jsonResponse.title,
|
|
212
|
+
description: jsonResponse.description
|
|
213
|
+
};
|
|
214
|
+
}
|
|
215
|
+
} catch {}
|
|
216
|
+
const titleMatch = responseText.match(/title[:\s]+(.+?)(?:\n|$)/i);
|
|
217
|
+
const title = titleMatch?.[1]?.trim() || "Image Analysis";
|
|
218
|
+
const description = titleMatch ? responseText.replace(/title[:\s]+(.+?)(?:\n|$)/i, "").trim() : responseText.trim();
|
|
219
|
+
return { title, description };
|
|
220
|
+
} catch (error) {
|
|
221
|
+
const message = error instanceof Error ? error.message : String(error);
|
|
222
|
+
logger4.error(`Error analyzing image: ${message}`);
|
|
223
|
+
return {
|
|
224
|
+
title: "Failed to analyze image",
|
|
225
|
+
description: `Error: ${message}`
|
|
226
|
+
};
|
|
227
|
+
}
|
|
228
|
+
}
|
|
229
|
+
// models/object.ts
|
|
230
|
+
import { logger as logger5 } from "@elizaos/core";
|
|
231
|
+
async function generateObjectByModelType(runtime, params, modelType, getModelFn) {
|
|
232
|
+
const genAI = createGoogleGenAI(runtime);
|
|
233
|
+
if (!genAI) {
|
|
234
|
+
throw new Error("Google Generative AI client not initialized");
|
|
235
|
+
}
|
|
236
|
+
const modelName = getModelFn(runtime);
|
|
237
|
+
const temperature = params.temperature ?? 0.1;
|
|
238
|
+
logger5.info(`Using ${modelType} model: ${modelName}`);
|
|
239
|
+
try {
|
|
240
|
+
let enhancedPrompt = params.prompt;
|
|
241
|
+
if (params.schema) {
|
|
242
|
+
enhancedPrompt += `
|
|
243
|
+
|
|
244
|
+
Please respond with a JSON object that follows this schema:
|
|
245
|
+
${JSON.stringify(params.schema, null, 2)}`;
|
|
246
|
+
}
|
|
247
|
+
const response = await genAI.models.generateContent({
|
|
248
|
+
model: modelName,
|
|
249
|
+
contents: enhancedPrompt,
|
|
250
|
+
config: {
|
|
251
|
+
temperature,
|
|
252
|
+
topK: 40,
|
|
253
|
+
topP: 0.95,
|
|
254
|
+
maxOutputTokens: 8192,
|
|
255
|
+
responseMimeType: "application/json",
|
|
256
|
+
safetySettings: getSafetySettings()
|
|
257
|
+
}
|
|
258
|
+
});
|
|
259
|
+
const text = response.text || "";
|
|
260
|
+
const promptTokens = await countTokens(enhancedPrompt);
|
|
261
|
+
const completionTokens = await countTokens(text);
|
|
262
|
+
emitModelUsageEvent(runtime, modelType, params.prompt, {
|
|
263
|
+
promptTokens,
|
|
264
|
+
completionTokens,
|
|
265
|
+
totalTokens: promptTokens + completionTokens
|
|
266
|
+
});
|
|
267
|
+
try {
|
|
268
|
+
return JSON.parse(text);
|
|
269
|
+
} catch {
|
|
270
|
+
const jsonMatch = text.match(/\{[\s\S]*\}/);
|
|
271
|
+
if (jsonMatch) {
|
|
272
|
+
try {
|
|
273
|
+
return JSON.parse(jsonMatch[0]);
|
|
274
|
+
} catch {
|
|
275
|
+
throw new Error("Failed to parse JSON from response");
|
|
276
|
+
}
|
|
277
|
+
}
|
|
278
|
+
throw new Error("Failed to parse JSON from response");
|
|
279
|
+
}
|
|
280
|
+
} catch (error) {
|
|
281
|
+
logger5.error(`[generateObject] Error: ${error instanceof Error ? error.message : String(error)}`);
|
|
282
|
+
throw error;
|
|
283
|
+
}
|
|
284
|
+
}
|
|
285
|
+
async function handleObjectSmall(runtime, params) {
|
|
286
|
+
return generateObjectByModelType(runtime, params, "OBJECT_SMALL", getSmallModel);
|
|
287
|
+
}
|
|
288
|
+
async function handleObjectLarge(runtime, params) {
|
|
289
|
+
return generateObjectByModelType(runtime, params, "OBJECT_LARGE", getLargeModel);
|
|
290
|
+
}
|
|
291
|
+
// models/text.ts
|
|
292
|
+
import { logger as logger6, ModelType as ModelType2 } from "@elizaos/core";
|
|
293
|
+
async function handleTextSmall(runtime, { prompt, stopSequences = [], maxTokens = 8192, temperature = 0.7 }) {
|
|
294
|
+
const genAI = createGoogleGenAI(runtime);
|
|
295
|
+
if (!genAI) {
|
|
296
|
+
throw new Error("Google Generative AI client not initialized");
|
|
297
|
+
}
|
|
298
|
+
const modelName = getSmallModel(runtime);
|
|
299
|
+
logger6.log(`[TEXT_SMALL] Using model: ${modelName}`);
|
|
300
|
+
try {
|
|
301
|
+
const systemInstruction = runtime.character.system || undefined;
|
|
302
|
+
const response = await genAI.models.generateContent({
|
|
303
|
+
model: modelName,
|
|
304
|
+
contents: prompt,
|
|
305
|
+
config: {
|
|
306
|
+
temperature,
|
|
307
|
+
topK: 40,
|
|
308
|
+
topP: 0.95,
|
|
309
|
+
maxOutputTokens: maxTokens,
|
|
310
|
+
stopSequences,
|
|
311
|
+
safetySettings: getSafetySettings(),
|
|
312
|
+
...systemInstruction && { systemInstruction }
|
|
313
|
+
}
|
|
314
|
+
});
|
|
315
|
+
const text = response.text || "";
|
|
316
|
+
const promptTokens = await countTokens(prompt);
|
|
317
|
+
const completionTokens = await countTokens(text);
|
|
318
|
+
emitModelUsageEvent(runtime, ModelType2.TEXT_SMALL, prompt, {
|
|
319
|
+
promptTokens,
|
|
320
|
+
completionTokens,
|
|
321
|
+
totalTokens: promptTokens + completionTokens
|
|
322
|
+
});
|
|
323
|
+
return text;
|
|
324
|
+
} catch (error) {
|
|
325
|
+
logger6.error(`[TEXT_SMALL] Error: ${error instanceof Error ? error.message : String(error)}`);
|
|
326
|
+
throw error;
|
|
327
|
+
}
|
|
328
|
+
}
|
|
329
|
+
async function handleTextLarge(runtime, { prompt, stopSequences = [], maxTokens = 8192, temperature = 0.7 }) {
|
|
330
|
+
const genAI = createGoogleGenAI(runtime);
|
|
331
|
+
if (!genAI) {
|
|
332
|
+
throw new Error("Google Generative AI client not initialized");
|
|
333
|
+
}
|
|
334
|
+
const modelName = getLargeModel(runtime);
|
|
335
|
+
logger6.log(`[TEXT_LARGE] Using model: ${modelName}`);
|
|
336
|
+
try {
|
|
337
|
+
const systemInstruction = runtime.character.system || undefined;
|
|
338
|
+
const response = await genAI.models.generateContent({
|
|
339
|
+
model: modelName,
|
|
340
|
+
contents: prompt,
|
|
341
|
+
config: {
|
|
342
|
+
temperature,
|
|
343
|
+
topK: 40,
|
|
344
|
+
topP: 0.95,
|
|
345
|
+
maxOutputTokens: maxTokens,
|
|
346
|
+
stopSequences,
|
|
347
|
+
safetySettings: getSafetySettings(),
|
|
348
|
+
...systemInstruction && { systemInstruction }
|
|
349
|
+
}
|
|
350
|
+
});
|
|
351
|
+
const text = response.text || "";
|
|
352
|
+
const promptTokens = await countTokens(prompt);
|
|
353
|
+
const completionTokens = await countTokens(text);
|
|
354
|
+
emitModelUsageEvent(runtime, ModelType2.TEXT_LARGE, prompt, {
|
|
355
|
+
promptTokens,
|
|
356
|
+
completionTokens,
|
|
357
|
+
totalTokens: promptTokens + completionTokens
|
|
358
|
+
});
|
|
359
|
+
return text;
|
|
360
|
+
} catch (error) {
|
|
361
|
+
logger6.error(`[TEXT_LARGE] Error: ${error instanceof Error ? error.message : String(error)}`);
|
|
362
|
+
throw error;
|
|
363
|
+
}
|
|
364
|
+
}
|
|
365
|
+
// index.ts
|
|
366
|
+
var pluginTests = [
|
|
367
|
+
{
|
|
368
|
+
name: "google_genai_plugin_tests",
|
|
369
|
+
tests: [
|
|
370
|
+
{
|
|
371
|
+
name: "google_test_api_key_validation",
|
|
372
|
+
fn: async (runtime) => {
|
|
373
|
+
const apiKey = getApiKey(runtime);
|
|
374
|
+
if (!apiKey) {
|
|
375
|
+
throw new Error("GOOGLE_GENERATIVE_AI_API_KEY not set");
|
|
376
|
+
}
|
|
377
|
+
const genAI = new GoogleGenAI3({ apiKey });
|
|
378
|
+
const modelList = await genAI.models.list();
|
|
379
|
+
const models = [];
|
|
380
|
+
for await (const model of modelList) {
|
|
381
|
+
models.push(model);
|
|
382
|
+
}
|
|
383
|
+
logger7.log(`Available models: ${models.length}`);
|
|
384
|
+
}
|
|
385
|
+
},
|
|
386
|
+
{
|
|
387
|
+
name: "google_test_text_embedding",
|
|
388
|
+
fn: async (runtime) => {
|
|
389
|
+
try {
|
|
390
|
+
const embedding = await runtime.useModel(ModelType3.TEXT_EMBEDDING, {
|
|
391
|
+
text: "Hello, world!"
|
|
392
|
+
});
|
|
393
|
+
logger7.log(`Embedding dimension: ${embedding.length}`);
|
|
394
|
+
if (embedding.length === 0) {
|
|
395
|
+
throw new Error("Failed to generate embedding");
|
|
396
|
+
}
|
|
397
|
+
} catch (error) {
|
|
398
|
+
logger7.error(`Error in test_text_embedding: ${error instanceof Error ? error.message : String(error)}`);
|
|
399
|
+
throw error;
|
|
400
|
+
}
|
|
401
|
+
}
|
|
402
|
+
},
|
|
403
|
+
{
|
|
404
|
+
name: "google_test_text_small",
|
|
405
|
+
fn: async (runtime) => {
|
|
406
|
+
try {
|
|
407
|
+
const text = await runtime.useModel(ModelType3.TEXT_SMALL, {
|
|
408
|
+
prompt: "What is the nature of reality in 10 words?"
|
|
409
|
+
});
|
|
410
|
+
if (text.length === 0) {
|
|
411
|
+
throw new Error("Failed to generate text");
|
|
412
|
+
}
|
|
413
|
+
logger7.log("Generated with TEXT_SMALL:", text);
|
|
414
|
+
} catch (error) {
|
|
415
|
+
logger7.error(`Error in test_text_small: ${error instanceof Error ? error.message : String(error)}`);
|
|
416
|
+
throw error;
|
|
417
|
+
}
|
|
418
|
+
}
|
|
419
|
+
},
|
|
420
|
+
{
|
|
421
|
+
name: "google_test_text_large",
|
|
422
|
+
fn: async (runtime) => {
|
|
423
|
+
try {
|
|
424
|
+
const text = await runtime.useModel(ModelType3.TEXT_LARGE, {
|
|
425
|
+
prompt: "Explain quantum mechanics in simple terms."
|
|
426
|
+
});
|
|
427
|
+
if (text.length === 0) {
|
|
428
|
+
throw new Error("Failed to generate text");
|
|
429
|
+
}
|
|
430
|
+
logger7.log("Generated with TEXT_LARGE:", `${text.substring(0, 100)}...`);
|
|
431
|
+
} catch (error) {
|
|
432
|
+
logger7.error(`Error in test_text_large: ${error instanceof Error ? error.message : String(error)}`);
|
|
433
|
+
throw error;
|
|
434
|
+
}
|
|
435
|
+
}
|
|
436
|
+
},
|
|
437
|
+
{
|
|
438
|
+
name: "google_test_image_description",
|
|
439
|
+
fn: async (runtime) => {
|
|
440
|
+
try {
|
|
441
|
+
const result = await runtime.useModel(ModelType3.IMAGE_DESCRIPTION, "https://upload.wikimedia.org/wikipedia/commons/thumb/1/1c/Vitalik_Buterin_TechCrunch_London_2015_%28cropped%29.jpg/537px-Vitalik_Buterin_TechCrunch_London_2015_%28cropped%29.jpg");
|
|
442
|
+
if (result && typeof result === "object" && "title" in result && "description" in result) {
|
|
443
|
+
logger7.log("Image description:", JSON.stringify(result));
|
|
444
|
+
} else {
|
|
445
|
+
logger7.error(`Invalid image description result format: ${JSON.stringify(result)}`);
|
|
446
|
+
}
|
|
447
|
+
} catch (error) {
|
|
448
|
+
logger7.error(`Error in test_image_description: ${error instanceof Error ? error.message : String(error)}`);
|
|
449
|
+
throw error;
|
|
450
|
+
}
|
|
451
|
+
}
|
|
452
|
+
},
|
|
453
|
+
{
|
|
454
|
+
name: "google_test_object_generation",
|
|
455
|
+
fn: async (runtime) => {
|
|
456
|
+
try {
|
|
457
|
+
const schema = {
|
|
458
|
+
type: "object",
|
|
459
|
+
properties: {
|
|
460
|
+
name: { type: "string" },
|
|
461
|
+
age: { type: "number" },
|
|
462
|
+
hobbies: { type: "array", items: { type: "string" } }
|
|
463
|
+
},
|
|
464
|
+
required: ["name", "age", "hobbies"]
|
|
465
|
+
};
|
|
466
|
+
const result = await runtime.useModel(ModelType3.OBJECT_SMALL, {
|
|
467
|
+
prompt: "Generate a person profile with name, age, and hobbies.",
|
|
468
|
+
schema
|
|
469
|
+
});
|
|
470
|
+
logger7.log("Generated object:", JSON.stringify(result));
|
|
471
|
+
if (!result.name || !result.age || !result.hobbies) {
|
|
472
|
+
throw new Error("Generated object missing required fields");
|
|
473
|
+
}
|
|
474
|
+
} catch (error) {
|
|
475
|
+
logger7.error(`Error in test_object_generation: ${error instanceof Error ? error.message : String(error)}`);
|
|
476
|
+
throw error;
|
|
477
|
+
}
|
|
478
|
+
}
|
|
479
|
+
}
|
|
480
|
+
]
|
|
481
|
+
}
|
|
482
|
+
];
|
|
483
|
+
function getProcessEnv() {
|
|
484
|
+
if (typeof process === "undefined") {
|
|
485
|
+
return {};
|
|
486
|
+
}
|
|
487
|
+
return process.env;
|
|
488
|
+
}
|
|
489
|
+
var env = getProcessEnv();
|
|
490
|
+
var googleGenAIPlugin = {
|
|
491
|
+
name: "google-genai",
|
|
492
|
+
description: "Google Generative AI plugin for Gemini models",
|
|
493
|
+
config: {
|
|
494
|
+
GOOGLE_GENERATIVE_AI_API_KEY: env.GOOGLE_GENERATIVE_AI_API_KEY ?? null,
|
|
495
|
+
GOOGLE_SMALL_MODEL: env.GOOGLE_SMALL_MODEL ?? null,
|
|
496
|
+
GOOGLE_LARGE_MODEL: env.GOOGLE_LARGE_MODEL ?? null,
|
|
497
|
+
GOOGLE_IMAGE_MODEL: env.GOOGLE_IMAGE_MODEL ?? null,
|
|
498
|
+
GOOGLE_EMBEDDING_MODEL: env.GOOGLE_EMBEDDING_MODEL ?? null,
|
|
499
|
+
SMALL_MODEL: env.SMALL_MODEL ?? null,
|
|
500
|
+
LARGE_MODEL: env.LARGE_MODEL ?? null,
|
|
501
|
+
IMAGE_MODEL: env.IMAGE_MODEL ?? null
|
|
502
|
+
},
|
|
503
|
+
async init(config, runtime) {
|
|
504
|
+
initializeGoogleGenAI(config, runtime);
|
|
505
|
+
},
|
|
506
|
+
models: {
|
|
507
|
+
[ModelType3.TEXT_SMALL]: async (runtime, params) => {
|
|
508
|
+
return handleTextSmall(runtime, params);
|
|
509
|
+
},
|
|
510
|
+
[ModelType3.TEXT_LARGE]: async (runtime, params) => {
|
|
511
|
+
return handleTextLarge(runtime, params);
|
|
512
|
+
},
|
|
513
|
+
[ModelType3.TEXT_EMBEDDING]: async (runtime, params) => {
|
|
514
|
+
return handleTextEmbedding(runtime, params);
|
|
515
|
+
},
|
|
516
|
+
[ModelType3.IMAGE_DESCRIPTION]: async (runtime, params) => {
|
|
517
|
+
return handleImageDescription(runtime, params);
|
|
518
|
+
},
|
|
519
|
+
[ModelType3.OBJECT_SMALL]: async (runtime, params) => {
|
|
520
|
+
return handleObjectSmall(runtime, params);
|
|
521
|
+
},
|
|
522
|
+
[ModelType3.OBJECT_LARGE]: async (runtime, params) => {
|
|
523
|
+
return handleObjectLarge(runtime, params);
|
|
524
|
+
}
|
|
525
|
+
},
|
|
526
|
+
tests: pluginTests
|
|
527
|
+
};
|
|
528
|
+
var typescript_default = googleGenAIPlugin;
|
|
529
|
+
|
|
530
|
+
// index.browser.ts
|
|
531
|
+
var index_browser_default = typescript_default;
|
|
532
|
+
export {
|
|
533
|
+
googleGenAIPlugin,
|
|
534
|
+
index_browser_default as default
|
|
535
|
+
};
|
|
536
|
+
|
|
537
|
+
//# debugId=E75B1BEAE9D5BC6A64756E2164756E21
|
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
{
|
|
2
|
+
"version": 3,
|
|
3
|
+
"sources": ["../../index.ts", "../../init.ts", "../../utils/config.ts", "../../models/embedding.ts", "../../utils/events.ts", "../../utils/tokenization.ts", "../../models/image.ts", "../../models/object.ts", "../../models/text.ts", "../../index.browser.ts"],
|
|
4
|
+
"sourcesContent": [
|
|
5
|
+
"import type {\n GenerateTextParams,\n IAgentRuntime,\n ImageDescriptionParams,\n ObjectGenerationParams,\n Plugin,\n TestCase,\n TestSuite,\n TextEmbeddingParams,\n} from \"@elizaos/core\";\nimport { logger, ModelType } from \"@elizaos/core\";\nimport { GoogleGenAI } from \"@google/genai\";\nimport { initializeGoogleGenAI, type PluginConfig } from \"./init\";\nimport {\n handleImageDescription,\n handleObjectLarge,\n handleObjectSmall,\n handleTextEmbedding,\n handleTextLarge,\n handleTextSmall,\n} from \"./models\";\nimport { getApiKey } from \"./utils/config\";\n\nexport type { PluginConfig } from \"./init\";\nexport * from \"./types\";\n\nconst pluginTests = [\n {\n name: \"google_genai_plugin_tests\",\n tests: [\n {\n name: \"google_test_api_key_validation\",\n fn: async (runtime: IAgentRuntime) => {\n const apiKey = getApiKey(runtime);\n if (!apiKey) {\n throw new Error(\"GOOGLE_GENERATIVE_AI_API_KEY not set\");\n }\n const genAI = new GoogleGenAI({ apiKey });\n const modelList = await genAI.models.list();\n const models = [];\n for await (const model of modelList) {\n models.push(model);\n }\n logger.log(`Available models: ${models.length}`);\n },\n },\n {\n name: \"google_test_text_embedding\",\n fn: async (runtime: IAgentRuntime) => {\n try {\n const embedding = await runtime.useModel(ModelType.TEXT_EMBEDDING, {\n text: \"Hello, world!\",\n });\n logger.log(`Embedding dimension: ${embedding.length}`);\n if (embedding.length === 0) {\n throw new Error(\"Failed to generate embedding\");\n }\n } catch (error) {\n logger.error(\n `Error in test_text_embedding: ${error instanceof Error ? error.message : String(error)}`\n );\n throw error;\n }\n },\n },\n {\n name: \"google_test_text_small\",\n fn: async (runtime: IAgentRuntime) => {\n try {\n const text = await runtime.useModel(ModelType.TEXT_SMALL, {\n prompt: \"What is the nature of reality in 10 words?\",\n });\n if (text.length === 0) {\n throw new Error(\"Failed to generate text\");\n }\n logger.log(\"Generated with TEXT_SMALL:\", text);\n } catch (error) {\n logger.error(\n `Error in test_text_small: ${error instanceof Error ? error.message : String(error)}`\n );\n throw error;\n }\n },\n },\n {\n name: \"google_test_text_large\",\n fn: async (runtime: IAgentRuntime) => {\n try {\n const text = await runtime.useModel(ModelType.TEXT_LARGE, {\n prompt: \"Explain quantum mechanics in simple terms.\",\n });\n if (text.length === 0) {\n throw new Error(\"Failed to generate text\");\n }\n logger.log(\"Generated with TEXT_LARGE:\", `${text.substring(0, 100)}...`);\n } catch (error) {\n logger.error(\n `Error in test_text_large: ${error instanceof Error ? error.message : String(error)}`\n );\n throw error;\n }\n },\n },\n {\n name: \"google_test_image_description\",\n fn: async (runtime: IAgentRuntime) => {\n try {\n const result = await runtime.useModel(\n ModelType.IMAGE_DESCRIPTION,\n \"https://upload.wikimedia.org/wikipedia/commons/thumb/1/1c/Vitalik_Buterin_TechCrunch_London_2015_%28cropped%29.jpg/537px-Vitalik_Buterin_TechCrunch_London_2015_%28cropped%29.jpg\"\n );\n\n if (\n result &&\n typeof result === \"object\" &&\n \"title\" in result &&\n \"description\" in result\n ) {\n logger.log(\"Image description:\", JSON.stringify(result));\n } else {\n logger.error(`Invalid image description result format: ${JSON.stringify(result)}`);\n }\n } catch (error) {\n logger.error(\n `Error in test_image_description: ${error instanceof Error ? error.message : String(error)}`\n );\n throw error;\n }\n },\n },\n {\n name: \"google_test_object_generation\",\n fn: async (runtime: IAgentRuntime) => {\n try {\n const schema = {\n type: \"object\",\n properties: {\n name: { type: \"string\" },\n age: { type: \"number\" },\n hobbies: { type: \"array\", items: { type: \"string\" } },\n },\n required: [\"name\", \"age\", \"hobbies\"],\n };\n\n const result = await runtime.useModel(ModelType.OBJECT_SMALL, {\n prompt: \"Generate a person profile with name, age, and hobbies.\",\n schema,\n });\n\n logger.log(\"Generated object:\", JSON.stringify(result));\n\n if (!result.name || !result.age || !result.hobbies) {\n throw new Error(\"Generated object missing required fields\");\n }\n } catch (error) {\n logger.error(\n `Error in test_object_generation: ${error instanceof Error ? error.message : String(error)}`\n );\n throw error;\n }\n },\n },\n ] as TestCase[],\n },\n] as TestSuite[];\n\ntype ProcessEnvLike = Record<string, string | undefined>;\n\nfunction getProcessEnv(): ProcessEnvLike {\n if (typeof process === \"undefined\") {\n return {};\n }\n return process.env as ProcessEnvLike;\n}\n\nconst env = getProcessEnv();\n\nexport const googleGenAIPlugin: Plugin = {\n name: \"google-genai\",\n description: \"Google Generative AI plugin for Gemini models\",\n\n config: {\n GOOGLE_GENERATIVE_AI_API_KEY: env.GOOGLE_GENERATIVE_AI_API_KEY ?? null,\n GOOGLE_SMALL_MODEL: env.GOOGLE_SMALL_MODEL ?? null,\n GOOGLE_LARGE_MODEL: env.GOOGLE_LARGE_MODEL ?? null,\n GOOGLE_IMAGE_MODEL: env.GOOGLE_IMAGE_MODEL ?? null,\n GOOGLE_EMBEDDING_MODEL: env.GOOGLE_EMBEDDING_MODEL ?? null,\n SMALL_MODEL: env.SMALL_MODEL ?? null,\n LARGE_MODEL: env.LARGE_MODEL ?? null,\n IMAGE_MODEL: env.IMAGE_MODEL ?? null,\n },\n\n async init(config, runtime) {\n initializeGoogleGenAI(config as PluginConfig, runtime);\n },\n\n models: {\n [ModelType.TEXT_SMALL]: async (\n runtime: IAgentRuntime,\n params: GenerateTextParams\n ): Promise<string> => {\n return handleTextSmall(runtime, params);\n },\n\n [ModelType.TEXT_LARGE]: async (\n runtime: IAgentRuntime,\n params: GenerateTextParams\n ): Promise<string> => {\n return handleTextLarge(runtime, params);\n },\n\n [ModelType.TEXT_EMBEDDING]: async (\n runtime: IAgentRuntime,\n params: TextEmbeddingParams | string | null\n ): Promise<number[]> => {\n return handleTextEmbedding(runtime, params);\n },\n\n [ModelType.IMAGE_DESCRIPTION]: async (\n runtime: IAgentRuntime,\n params: ImageDescriptionParams | string\n ): Promise<{ title: string; description: string }> => {\n return handleImageDescription(runtime, params);\n },\n\n [ModelType.OBJECT_SMALL]: async (\n runtime: IAgentRuntime,\n params: ObjectGenerationParams\n ): Promise<Record<string, string | number | boolean | null>> => {\n return handleObjectSmall(runtime, params);\n },\n\n [ModelType.OBJECT_LARGE]: async (\n runtime: IAgentRuntime,\n params: ObjectGenerationParams\n ): Promise<Record<string, string | number | boolean | null>> => {\n return handleObjectLarge(runtime, params);\n },\n },\n\n tests: pluginTests,\n};\n\nexport default googleGenAIPlugin;\n",
|
|
6
|
+
"import { type IAgentRuntime, logger } from \"@elizaos/core\";\nimport { GoogleGenAI } from \"@google/genai\";\nimport { getApiKey } from \"./utils/config\";\n\nexport interface PluginConfig {\n readonly GOOGLE_GENERATIVE_AI_API_KEY?: string;\n readonly GOOGLE_SMALL_MODEL?: string;\n readonly GOOGLE_LARGE_MODEL?: string;\n readonly GOOGLE_IMAGE_MODEL?: string;\n readonly GOOGLE_EMBEDDING_MODEL?: string;\n readonly SMALL_MODEL?: string;\n readonly LARGE_MODEL?: string;\n readonly IMAGE_MODEL?: string;\n}\n\nexport function initializeGoogleGenAI(_config: PluginConfig, runtime: IAgentRuntime): void {\n (async () => {\n try {\n const apiKey = getApiKey(runtime);\n if (!apiKey) {\n logger.warn(\"GOOGLE_GENERATIVE_AI_API_KEY is not set\");\n return;\n }\n\n const genAI = new GoogleGenAI({ apiKey });\n const modelList = await genAI.models.list();\n const models = [];\n for await (const model of modelList) {\n models.push(model);\n }\n logger.log(`Google AI API key validated. Available models: ${models.length}`);\n } catch (error) {\n logger.warn(\n `Google AI configuration error: ${error instanceof Error ? error.message : String(error)}`\n );\n }\n })();\n}\n",
|
|
7
|
+
"import type { IAgentRuntime } from \"@elizaos/core\";\nimport { logger } from \"@elizaos/core\";\nimport { GoogleGenAI, HarmBlockThreshold, HarmCategory } from \"@google/genai\";\n\nfunction getEnvValue(key: string): string | undefined {\n // In browsers, `process` is not defined. `typeof process` is safe.\n if (typeof process === \"undefined\") {\n return undefined;\n }\n const value = process.env[key];\n return value === undefined ? undefined : String(value);\n}\n\nexport function getSetting(\n runtime: IAgentRuntime,\n key: string,\n defaultValue?: string\n): string | undefined {\n const runtimeValue = runtime.getSetting(key);\n if (runtimeValue !== undefined) {\n return String(runtimeValue);\n }\n return getEnvValue(key) ?? defaultValue;\n}\n\nexport function getApiKey(runtime: IAgentRuntime): string | undefined {\n return getSetting(runtime, \"GOOGLE_GENERATIVE_AI_API_KEY\");\n}\n\nexport function getSmallModel(runtime: IAgentRuntime): string {\n return (\n getSetting(runtime, \"GOOGLE_SMALL_MODEL\") ??\n getSetting(runtime, \"SMALL_MODEL\", \"gemini-2.0-flash-001\") ??\n \"gemini-2.0-flash-001\"\n );\n}\n\nexport function getLargeModel(runtime: IAgentRuntime): string {\n return (\n getSetting(runtime, \"GOOGLE_LARGE_MODEL\") ??\n getSetting(runtime, \"LARGE_MODEL\", \"gemini-2.5-pro-preview-03-25\") ??\n \"gemini-2.5-pro-preview-03-25\"\n );\n}\n\nexport function getImageModel(runtime: IAgentRuntime): string {\n return (\n getSetting(runtime, \"GOOGLE_IMAGE_MODEL\") ??\n getSetting(runtime, \"IMAGE_MODEL\", \"gemini-2.5-pro-preview-03-25\") ??\n \"gemini-2.5-pro-preview-03-25\"\n );\n}\n\nexport function getEmbeddingModel(runtime: IAgentRuntime): string {\n return (\n getSetting(runtime, \"GOOGLE_EMBEDDING_MODEL\", \"text-embedding-004\") ?? \"text-embedding-004\"\n );\n}\n\nexport function createGoogleGenAI(runtime: IAgentRuntime): GoogleGenAI | null {\n const apiKey = getApiKey(runtime);\n if (!apiKey) {\n logger.error(\"Google Generative AI API Key is missing\");\n return null;\n }\n\n return new GoogleGenAI({ apiKey });\n}\n\nexport function getSafetySettings() {\n return [\n {\n category: HarmCategory.HARM_CATEGORY_HARASSMENT,\n threshold: HarmBlockThreshold.BLOCK_MEDIUM_AND_ABOVE,\n },\n {\n category: HarmCategory.HARM_CATEGORY_HATE_SPEECH,\n threshold: HarmBlockThreshold.BLOCK_MEDIUM_AND_ABOVE,\n },\n {\n category: HarmCategory.HARM_CATEGORY_SEXUALLY_EXPLICIT,\n threshold: HarmBlockThreshold.BLOCK_MEDIUM_AND_ABOVE,\n },\n {\n category: HarmCategory.HARM_CATEGORY_DANGEROUS_CONTENT,\n threshold: HarmBlockThreshold.BLOCK_MEDIUM_AND_ABOVE,\n },\n ];\n}\n",
|
|
8
|
+
"import type { IAgentRuntime, TextEmbeddingParams } from \"@elizaos/core\";\nimport { logger, ModelType } from \"@elizaos/core\";\nimport { createGoogleGenAI, getEmbeddingModel } from \"../utils/config\";\nimport { emitModelUsageEvent } from \"../utils/events\";\nimport { countTokens } from \"../utils/tokenization\";\n\nexport async function handleTextEmbedding(\n runtime: IAgentRuntime,\n params: TextEmbeddingParams | string | null\n): Promise<number[]> {\n const genAI = createGoogleGenAI(runtime);\n if (!genAI) {\n throw new Error(\"Google Generative AI client not initialized\");\n }\n\n const embeddingModelName = getEmbeddingModel(runtime);\n logger.debug(`[TEXT_EMBEDDING] Using model: ${embeddingModelName}`);\n\n if (params === null) {\n return Array(768).fill(0) as number[];\n }\n\n let text =\n typeof params === \"string\"\n ? params\n : typeof params === \"object\" && params.text\n ? params.text\n : \"\";\n\n if (!text.trim()) {\n logger.warn(\"Empty text for embedding\");\n return Array(768).fill(0) as number[];\n }\n\n // Truncate to stay within embedding model token limits (~4 chars per token)\n const maxChars = 8_192 * 4;\n if (text.length > maxChars) {\n logger.warn(\n `[Google GenAI] Embedding input too long (~${Math.ceil(text.length / 4)} tokens), truncating to ~8192 tokens`\n );\n text = text.slice(0, maxChars);\n }\n\n try {\n const response = await genAI.models.embedContent({\n model: embeddingModelName,\n contents: text,\n });\n\n const embedding = response.embeddings?.[0]?.values || [];\n\n const promptTokens = await countTokens(text);\n\n emitModelUsageEvent(runtime, ModelType.TEXT_EMBEDDING, text, {\n promptTokens,\n completionTokens: 0,\n totalTokens: promptTokens,\n });\n\n logger.log(`Got embedding with length ${embedding.length}`);\n return embedding;\n } catch (error) {\n logger.error(\n `Error generating embedding: ${error instanceof Error ? error.message : String(error)}`\n );\n return Array(768).fill(0) as number[];\n }\n}\n",
|
|
9
|
+
"import type { IAgentRuntime, ModelTypeName } from \"@elizaos/core\";\nimport { EventType } from \"@elizaos/core\";\n\nexport function emitModelUsageEvent(\n runtime: IAgentRuntime,\n type: ModelTypeName,\n _prompt: string,\n usage: {\n promptTokens: number;\n completionTokens: number;\n totalTokens: number;\n }\n): void {\n void _prompt; // Not included in ModelEventPayload\n runtime.emitEvent(EventType.MODEL_USED, {\n runtime,\n source: \"plugin-google-genai\",\n type,\n tokens: {\n prompt: usage.promptTokens,\n completion: usage.completionTokens,\n total: usage.totalTokens,\n },\n });\n}\n",
|
|
10
|
+
"export async function countTokens(text: string): Promise<number> {\n return Math.ceil(text.length / 4);\n}\n",
|
|
11
|
+
"import type { IAgentRuntime, ImageDescriptionParams } from \"@elizaos/core\";\nimport { logger } from \"@elizaos/core\";\nimport type { ImageDescriptionResponse } from \"../types\";\nimport { createGoogleGenAI, getImageModel, getSafetySettings } from \"../utils/config\";\n\nconst crossFetch = typeof globalThis.fetch === \"function\" ? globalThis.fetch : fetch;\n\nexport async function handleImageDescription(\n runtime: IAgentRuntime,\n params: ImageDescriptionParams | string\n): Promise<ImageDescriptionResponse> {\n const genAI = createGoogleGenAI(runtime);\n if (!genAI) {\n throw new Error(\"Google Generative AI client not initialized\");\n }\n\n let imageUrl: string;\n let promptText: string;\n const modelName = getImageModel(runtime);\n logger.log(`[IMAGE_DESCRIPTION] Using model: ${modelName}`);\n\n if (typeof params === \"string\") {\n imageUrl = params;\n promptText = \"Please analyze this image and provide a title and detailed description.\";\n } else {\n imageUrl = params.imageUrl;\n promptText =\n params.prompt || \"Please analyze this image and provide a title and detailed description.\";\n }\n\n try {\n const imageResponse = await crossFetch(imageUrl);\n if (!imageResponse.ok) {\n throw new Error(`Failed to fetch image: ${imageResponse.statusText}`);\n }\n\n const imageData = await imageResponse.arrayBuffer();\n const base64Image = Buffer.from(imageData).toString(\"base64\");\n const contentType = imageResponse.headers.get(\"content-type\") || \"image/jpeg\";\n\n const response = await genAI.models.generateContent({\n model: modelName,\n contents: [\n {\n role: \"user\",\n parts: [\n { text: promptText },\n {\n inlineData: {\n mimeType: contentType,\n data: base64Image,\n },\n },\n ],\n },\n ],\n config: {\n temperature: 0.7,\n topK: 40,\n topP: 0.95,\n maxOutputTokens: 8192,\n safetySettings: getSafetySettings(),\n },\n });\n\n const responseText = response.text || \"\";\n\n try {\n const jsonResponse = JSON.parse(responseText) as { title?: string; description?: string };\n if (typeof jsonResponse.title === \"string\" && typeof jsonResponse.description === \"string\") {\n return {\n title: jsonResponse.title,\n description: jsonResponse.description,\n };\n }\n } catch {\n // Fall through to text parsing\n }\n\n const titleMatch = responseText.match(/title[:\\s]+(.+?)(?:\\n|$)/i);\n const title = titleMatch?.[1]?.trim() || \"Image Analysis\";\n const description = titleMatch\n ? responseText.replace(/title[:\\s]+(.+?)(?:\\n|$)/i, \"\").trim()\n : responseText.trim();\n\n return { title, description };\n } catch (error) {\n const message = error instanceof Error ? error.message : String(error);\n logger.error(`Error analyzing image: ${message}`);\n return {\n title: \"Failed to analyze image\",\n description: `Error: ${message}`,\n };\n }\n}\n",
|
|
12
|
+
"import type { IAgentRuntime, ModelTypeName, ObjectGenerationParams } from \"@elizaos/core\";\nimport { logger } from \"@elizaos/core\";\nimport {\n createGoogleGenAI,\n getLargeModel,\n getSafetySettings,\n getSmallModel,\n} from \"../utils/config\";\nimport { emitModelUsageEvent } from \"../utils/events\";\nimport { countTokens } from \"../utils/tokenization\";\n\nasync function generateObjectByModelType(\n runtime: IAgentRuntime,\n params: ObjectGenerationParams,\n modelType: string,\n getModelFn: (runtime: IAgentRuntime) => string\n): Promise<Record<string, string | number | boolean | null>> {\n const genAI = createGoogleGenAI(runtime);\n if (!genAI) {\n throw new Error(\"Google Generative AI client not initialized\");\n }\n\n const modelName = getModelFn(runtime);\n const temperature = params.temperature ?? 0.1;\n\n logger.info(`Using ${modelType} model: ${modelName}`);\n\n try {\n let enhancedPrompt = params.prompt;\n if (params.schema) {\n enhancedPrompt += `\\n\\nPlease respond with a JSON object that follows this schema:\\n${JSON.stringify(params.schema, null, 2)}`;\n }\n\n const response = await genAI.models.generateContent({\n model: modelName,\n contents: enhancedPrompt,\n config: {\n temperature,\n topK: 40,\n topP: 0.95,\n maxOutputTokens: 8192,\n responseMimeType: \"application/json\",\n safetySettings: getSafetySettings(),\n },\n });\n\n const text = response.text || \"\";\n\n const promptTokens = await countTokens(enhancedPrompt);\n const completionTokens = await countTokens(text);\n\n emitModelUsageEvent(runtime, modelType as ModelTypeName, params.prompt, {\n promptTokens,\n completionTokens,\n totalTokens: promptTokens + completionTokens,\n });\n\n try {\n return JSON.parse(text) as Record<string, string | number | boolean | null>;\n } catch {\n const jsonMatch = text.match(/\\{[\\s\\S]*\\}/);\n if (jsonMatch) {\n try {\n return JSON.parse(jsonMatch[0]) as Record<string, string | number | boolean | null>;\n } catch {\n throw new Error(\"Failed to parse JSON from response\");\n }\n }\n throw new Error(\"Failed to parse JSON from response\");\n }\n } catch (error) {\n logger.error(\n `[generateObject] Error: ${error instanceof Error ? error.message : String(error)}`\n );\n throw error;\n }\n}\n\nexport async function handleObjectSmall(\n runtime: IAgentRuntime,\n params: ObjectGenerationParams\n): Promise<Record<string, string | number | boolean | null>> {\n return generateObjectByModelType(runtime, params, \"OBJECT_SMALL\", getSmallModel);\n}\n\nexport async function handleObjectLarge(\n runtime: IAgentRuntime,\n params: ObjectGenerationParams\n): Promise<Record<string, string | number | boolean | null>> {\n return generateObjectByModelType(runtime, params, \"OBJECT_LARGE\", getLargeModel);\n}\n",
|
|
13
|
+
"import type { GenerateTextParams, IAgentRuntime } from \"@elizaos/core\";\nimport { logger, ModelType } from \"@elizaos/core\";\nimport {\n createGoogleGenAI,\n getLargeModel,\n getSafetySettings,\n getSmallModel,\n} from \"../utils/config\";\nimport { emitModelUsageEvent } from \"../utils/events\";\nimport { countTokens } from \"../utils/tokenization\";\n\nexport async function handleTextSmall(\n runtime: IAgentRuntime,\n { prompt, stopSequences = [], maxTokens = 8192, temperature = 0.7 }: GenerateTextParams\n): Promise<string> {\n const genAI = createGoogleGenAI(runtime);\n if (!genAI) {\n throw new Error(\"Google Generative AI client not initialized\");\n }\n\n const modelName = getSmallModel(runtime);\n\n logger.log(`[TEXT_SMALL] Using model: ${modelName}`);\n\n try {\n const systemInstruction = runtime.character.system || undefined;\n const response = await genAI.models.generateContent({\n model: modelName,\n contents: prompt,\n config: {\n temperature,\n topK: 40,\n topP: 0.95,\n maxOutputTokens: maxTokens,\n stopSequences,\n safetySettings: getSafetySettings(),\n ...(systemInstruction && { systemInstruction }),\n },\n });\n\n const text = response.text || \"\";\n\n const promptTokens = await countTokens(prompt);\n const completionTokens = await countTokens(text);\n\n emitModelUsageEvent(runtime, ModelType.TEXT_SMALL, prompt, {\n promptTokens,\n completionTokens,\n totalTokens: promptTokens + completionTokens,\n });\n\n return text;\n } catch (error) {\n logger.error(`[TEXT_SMALL] Error: ${error instanceof Error ? error.message : String(error)}`);\n throw error;\n }\n}\n\nexport async function handleTextLarge(\n runtime: IAgentRuntime,\n { prompt, stopSequences = [], maxTokens = 8192, temperature = 0.7 }: GenerateTextParams\n): Promise<string> {\n const genAI = createGoogleGenAI(runtime);\n if (!genAI) {\n throw new Error(\"Google Generative AI client not initialized\");\n }\n\n const modelName = getLargeModel(runtime);\n\n logger.log(`[TEXT_LARGE] Using model: ${modelName}`);\n\n try {\n const systemInstruction = runtime.character.system || undefined;\n const response = await genAI.models.generateContent({\n model: modelName,\n contents: prompt,\n config: {\n temperature,\n topK: 40,\n topP: 0.95,\n maxOutputTokens: maxTokens,\n stopSequences,\n safetySettings: getSafetySettings(),\n ...(systemInstruction && { systemInstruction }),\n },\n });\n\n const text = response.text || \"\";\n\n const promptTokens = await countTokens(prompt);\n const completionTokens = await countTokens(text);\n\n emitModelUsageEvent(runtime, ModelType.TEXT_LARGE, prompt, {\n promptTokens,\n completionTokens,\n totalTokens: promptTokens + completionTokens,\n });\n\n return text;\n } catch (error) {\n logger.error(`[TEXT_LARGE] Error: ${error instanceof Error ? error.message : String(error)}`);\n throw error;\n }\n}\n",
|
|
14
|
+
"import pluginDefault from \"./index\";\n\nexport * from \"./index\";\nexport default pluginDefault;\n"
|
|
15
|
+
],
|
|
16
|
+
"mappings": ";AAUA,mBAAS,sBAAQ;AACjB,wBAAS;;;ACXT,mBAA6B;AAC7B,wBAAS;;;ACAT;AACA;AAEA,SAAS,WAAW,CAAC,KAAiC;AAAA,EAEpD,IAAI,OAAO,YAAY,aAAa;AAAA,IAClC;AAAA,EACF;AAAA,EACA,MAAM,QAAQ,QAAQ,IAAI;AAAA,EAC1B,OAAO,UAAU,YAAY,YAAY,OAAO,KAAK;AAAA;AAGhD,SAAS,UAAU,CACxB,SACA,KACA,cACoB;AAAA,EACpB,MAAM,eAAe,QAAQ,WAAW,GAAG;AAAA,EAC3C,IAAI,iBAAiB,WAAW;AAAA,IAC9B,OAAO,OAAO,YAAY;AAAA,EAC5B;AAAA,EACA,OAAO,YAAY,GAAG,KAAK;AAAA;AAGtB,SAAS,SAAS,CAAC,SAA4C;AAAA,EACpE,OAAO,WAAW,SAAS,8BAA8B;AAAA;AAGpD,SAAS,aAAa,CAAC,SAAgC;AAAA,EAC5D,OACE,WAAW,SAAS,oBAAoB,KACxC,WAAW,SAAS,eAAe,sBAAsB,KACzD;AAAA;AAIG,SAAS,aAAa,CAAC,SAAgC;AAAA,EAC5D,OACE,WAAW,SAAS,oBAAoB,KACxC,WAAW,SAAS,eAAe,8BAA8B,KACjE;AAAA;AAIG,SAAS,aAAa,CAAC,SAAgC;AAAA,EAC5D,OACE,WAAW,SAAS,oBAAoB,KACxC,WAAW,SAAS,eAAe,8BAA8B,KACjE;AAAA;AAIG,SAAS,iBAAiB,CAAC,SAAgC;AAAA,EAChE,OACE,WAAW,SAAS,0BAA0B,oBAAoB,KAAK;AAAA;AAIpE,SAAS,iBAAiB,CAAC,SAA4C;AAAA,EAC5E,MAAM,SAAS,UAAU,OAAO;AAAA,EAChC,IAAI,CAAC,QAAQ;AAAA,IACX,OAAO,MAAM,yCAAyC;AAAA,IACtD,OAAO;AAAA,EACT;AAAA,EAEA,OAAO,IAAI,YAAY,EAAE,OAAO,CAAC;AAAA;AAG5B,SAAS,iBAAiB,GAAG;AAAA,EAClC,OAAO;AAAA,IACL;AAAA,MACE,UAAU,aAAa;AAAA,MACvB,WAAW,mBAAmB;AAAA,IAChC;AAAA,IACA;AAAA,MACE,UAAU,aAAa;AAAA,MACvB,WAAW,mBAAmB;AAAA,IAChC;AAAA,IACA;AAAA,MACE,UAAU,aAAa;AAAA,MACvB,WAAW,mBAAmB;AAAA,IAChC;AAAA,IACA;AAAA,MACE,UAAU,aAAa;AAAA,MACvB,WAAW,mBAAmB;AAAA,IAChC;AAAA,EACF;AAAA;;;ADxEK,SAAS,qBAAqB,CAAC,SAAuB,SAA8B;AAAA,GACxF,YAAY;AAAA,IACX,IAAI;AAAA,MACF,MAAM,SAAS,UAAU,OAAO;AAAA,MAChC,IAAI,CAAC,QAAQ;AAAA,QACX,QAAO,KAAK,yCAAyC;AAAA,QACrD;AAAA,MACF;AAAA,MAEA,MAAM,QAAQ,IAAI,aAAY,EAAE,OAAO,CAAC;AAAA,MACxC,MAAM,YAAY,MAAM,MAAM,OAAO,KAAK;AAAA,MAC1C,MAAM,SAAS,CAAC;AAAA,MAChB,iBAAiB,SAAS,WAAW;AAAA,QACnC,OAAO,KAAK,KAAK;AAAA,MACnB;AAAA,MACA,QAAO,IAAI,kDAAkD,OAAO,QAAQ;AAAA,MAC5E,OAAO,OAAO;AAAA,MACd,QAAO,KACL,kCAAkC,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK,GACzF;AAAA;AAAA,KAED;AAAA;;;AEnCL,mBAAS;;;ACAT;AAEO,SAAS,mBAAmB,CACjC,SACA,MACA,SACA,OAKM;AAAA,EAEN,QAAQ,UAAU,UAAU,YAAY;AAAA,IACtC;AAAA,IACA,QAAQ;AAAA,IACR;AAAA,IACA,QAAQ;AAAA,MACN,QAAQ,MAAM;AAAA,MACd,YAAY,MAAM;AAAA,MAClB,OAAO,MAAM;AAAA,IACf;AAAA,EACF,CAAC;AAAA;;;ACvBH,eAAsB,WAAW,CAAC,MAA+B;AAAA,EAC/D,OAAO,KAAK,KAAK,KAAK,SAAS,CAAC;AAAA;;;AFKlC,eAAsB,mBAAmB,CACvC,SACA,QACmB;AAAA,EACnB,MAAM,QAAQ,kBAAkB,OAAO;AAAA,EACvC,IAAI,CAAC,OAAO;AAAA,IACV,MAAM,IAAI,MAAM,6CAA6C;AAAA,EAC/D;AAAA,EAEA,MAAM,qBAAqB,kBAAkB,OAAO;AAAA,EACpD,QAAO,MAAM,iCAAiC,oBAAoB;AAAA,EAElE,IAAI,WAAW,MAAM;AAAA,IACnB,OAAO,MAAM,GAAG,EAAE,KAAK,CAAC;AAAA,EAC1B;AAAA,EAEA,IAAI,OACF,OAAO,WAAW,WACd,SACA,OAAO,WAAW,YAAY,OAAO,OACnC,OAAO,OACP;AAAA,EAER,IAAI,CAAC,KAAK,KAAK,GAAG;AAAA,IAChB,QAAO,KAAK,0BAA0B;AAAA,IACtC,OAAO,MAAM,GAAG,EAAE,KAAK,CAAC;AAAA,EAC1B;AAAA,EAGA,MAAM,WAAW,OAAQ;AAAA,EACzB,IAAI,KAAK,SAAS,UAAU;AAAA,IAC1B,QAAO,KACL,6CAA6C,KAAK,KAAK,KAAK,SAAS,CAAC,uCACxE;AAAA,IACA,OAAO,KAAK,MAAM,GAAG,QAAQ;AAAA,EAC/B;AAAA,EAEA,IAAI;AAAA,IACF,MAAM,WAAW,MAAM,MAAM,OAAO,aAAa;AAAA,MAC/C,OAAO;AAAA,MACP,UAAU;AAAA,IACZ,CAAC;AAAA,IAED,MAAM,YAAY,SAAS,aAAa,IAAI,UAAU,CAAC;AAAA,IAEvD,MAAM,eAAe,MAAM,YAAY,IAAI;AAAA,IAE3C,oBAAoB,SAAS,UAAU,gBAAgB,MAAM;AAAA,MAC3D;AAAA,MACA,kBAAkB;AAAA,MAClB,aAAa;AAAA,IACf,CAAC;AAAA,IAED,QAAO,IAAI,6BAA6B,UAAU,QAAQ;AAAA,IAC1D,OAAO;AAAA,IACP,OAAO,OAAO;AAAA,IACd,QAAO,MACL,+BAA+B,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK,GACtF;AAAA,IACA,OAAO,MAAM,GAAG,EAAE,KAAK,CAAC;AAAA;AAAA;;AGhE5B,mBAAS;AAIT,IAAM,aAAa,OAAO,WAAW,UAAU,aAAa,WAAW,QAAQ;AAE/E,eAAsB,sBAAsB,CAC1C,SACA,QACmC;AAAA,EACnC,MAAM,QAAQ,kBAAkB,OAAO;AAAA,EACvC,IAAI,CAAC,OAAO;AAAA,IACV,MAAM,IAAI,MAAM,6CAA6C;AAAA,EAC/D;AAAA,EAEA,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,MAAM,YAAY,cAAc,OAAO;AAAA,EACvC,QAAO,IAAI,oCAAoC,WAAW;AAAA,EAE1D,IAAI,OAAO,WAAW,UAAU;AAAA,IAC9B,WAAW;AAAA,IACX,aAAa;AAAA,EACf,EAAO;AAAA,IACL,WAAW,OAAO;AAAA,IAClB,aACE,OAAO,UAAU;AAAA;AAAA,EAGrB,IAAI;AAAA,IACF,MAAM,gBAAgB,MAAM,WAAW,QAAQ;AAAA,IAC/C,IAAI,CAAC,cAAc,IAAI;AAAA,MACrB,MAAM,IAAI,MAAM,0BAA0B,cAAc,YAAY;AAAA,IACtE;AAAA,IAEA,MAAM,YAAY,MAAM,cAAc,YAAY;AAAA,IAClD,MAAM,cAAc,OAAO,KAAK,SAAS,EAAE,SAAS,QAAQ;AAAA,IAC5D,MAAM,cAAc,cAAc,QAAQ,IAAI,cAAc,KAAK;AAAA,IAEjE,MAAM,WAAW,MAAM,MAAM,OAAO,gBAAgB;AAAA,MAClD,OAAO;AAAA,MACP,UAAU;AAAA,QACR;AAAA,UACE,MAAM;AAAA,UACN,OAAO;AAAA,YACL,EAAE,MAAM,WAAW;AAAA,YACnB;AAAA,cACE,YAAY;AAAA,gBACV,UAAU;AAAA,gBACV,MAAM;AAAA,cACR;AAAA,YACF;AAAA,UACF;AAAA,QACF;AAAA,MACF;AAAA,MACA,QAAQ;AAAA,QACN,aAAa;AAAA,QACb,MAAM;AAAA,QACN,MAAM;AAAA,QACN,iBAAiB;AAAA,QACjB,gBAAgB,kBAAkB;AAAA,MACpC;AAAA,IACF,CAAC;AAAA,IAED,MAAM,eAAe,SAAS,QAAQ;AAAA,IAEtC,IAAI;AAAA,MACF,MAAM,eAAe,KAAK,MAAM,YAAY;AAAA,MAC5C,IAAI,OAAO,aAAa,UAAU,YAAY,OAAO,aAAa,gBAAgB,UAAU;AAAA,QAC1F,OAAO;AAAA,UACL,OAAO,aAAa;AAAA,UACpB,aAAa,aAAa;AAAA,QAC5B;AAAA,MACF;AAAA,MACA,MAAM;AAAA,IAIR,MAAM,aAAa,aAAa,MAAM,2BAA2B;AAAA,IACjE,MAAM,QAAQ,aAAa,IAAI,KAAK,KAAK;AAAA,IACzC,MAAM,cAAc,aAChB,aAAa,QAAQ,6BAA6B,EAAE,EAAE,KAAK,IAC3D,aAAa,KAAK;AAAA,IAEtB,OAAO,EAAE,OAAO,YAAY;AAAA,IAC5B,OAAO,OAAO;AAAA,IACd,MAAM,UAAU,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK;AAAA,IACrE,QAAO,MAAM,0BAA0B,SAAS;AAAA,IAChD,OAAO;AAAA,MACL,OAAO;AAAA,MACP,aAAa,UAAU;AAAA,IACzB;AAAA;AAAA;;AC3FJ,mBAAS;AAUT,eAAe,yBAAyB,CACtC,SACA,QACA,WACA,YAC2D;AAAA,EAC3D,MAAM,QAAQ,kBAAkB,OAAO;AAAA,EACvC,IAAI,CAAC,OAAO;AAAA,IACV,MAAM,IAAI,MAAM,6CAA6C;AAAA,EAC/D;AAAA,EAEA,MAAM,YAAY,WAAW,OAAO;AAAA,EACpC,MAAM,cAAc,OAAO,eAAe;AAAA,EAE1C,QAAO,KAAK,SAAS,oBAAoB,WAAW;AAAA,EAEpD,IAAI;AAAA,IACF,IAAI,iBAAiB,OAAO;AAAA,IAC5B,IAAI,OAAO,QAAQ;AAAA,MACjB,kBAAkB;AAAA;AAAA;AAAA,EAAoE,KAAK,UAAU,OAAO,QAAQ,MAAM,CAAC;AAAA,IAC7H;AAAA,IAEA,MAAM,WAAW,MAAM,MAAM,OAAO,gBAAgB;AAAA,MAClD,OAAO;AAAA,MACP,UAAU;AAAA,MACV,QAAQ;AAAA,QACN;AAAA,QACA,MAAM;AAAA,QACN,MAAM;AAAA,QACN,iBAAiB;AAAA,QACjB,kBAAkB;AAAA,QAClB,gBAAgB,kBAAkB;AAAA,MACpC;AAAA,IACF,CAAC;AAAA,IAED,MAAM,OAAO,SAAS,QAAQ;AAAA,IAE9B,MAAM,eAAe,MAAM,YAAY,cAAc;AAAA,IACrD,MAAM,mBAAmB,MAAM,YAAY,IAAI;AAAA,IAE/C,oBAAoB,SAAS,WAA4B,OAAO,QAAQ;AAAA,MACtE;AAAA,MACA;AAAA,MACA,aAAa,eAAe;AAAA,IAC9B,CAAC;AAAA,IAED,IAAI;AAAA,MACF,OAAO,KAAK,MAAM,IAAI;AAAA,MACtB,MAAM;AAAA,MACN,MAAM,YAAY,KAAK,MAAM,aAAa;AAAA,MAC1C,IAAI,WAAW;AAAA,QACb,IAAI;AAAA,UACF,OAAO,KAAK,MAAM,UAAU,EAAE;AAAA,UAC9B,MAAM;AAAA,UACN,MAAM,IAAI,MAAM,oCAAoC;AAAA;AAAA,MAExD;AAAA,MACA,MAAM,IAAI,MAAM,oCAAoC;AAAA;AAAA,IAEtD,OAAO,OAAO;AAAA,IACd,QAAO,MACL,2BAA2B,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK,GAClF;AAAA,IACA,MAAM;AAAA;AAAA;AAIV,eAAsB,iBAAiB,CACrC,SACA,QAC2D;AAAA,EAC3D,OAAO,0BAA0B,SAAS,QAAQ,gBAAgB,aAAa;AAAA;AAGjF,eAAsB,iBAAiB,CACrC,SACA,QAC2D;AAAA,EAC3D,OAAO,0BAA0B,SAAS,QAAQ,gBAAgB,aAAa;AAAA;;ACxFjF,mBAAS,sBAAQ;AAUjB,eAAsB,eAAe,CACnC,WACE,QAAQ,gBAAgB,CAAC,GAAG,YAAY,MAAM,cAAc,OAC7C;AAAA,EACjB,MAAM,QAAQ,kBAAkB,OAAO;AAAA,EACvC,IAAI,CAAC,OAAO;AAAA,IACV,MAAM,IAAI,MAAM,6CAA6C;AAAA,EAC/D;AAAA,EAEA,MAAM,YAAY,cAAc,OAAO;AAAA,EAEvC,QAAO,IAAI,6BAA6B,WAAW;AAAA,EAEnD,IAAI;AAAA,IACF,MAAM,oBAAoB,QAAQ,UAAU,UAAU;AAAA,IACtD,MAAM,WAAW,MAAM,MAAM,OAAO,gBAAgB;AAAA,MAClD,OAAO;AAAA,MACP,UAAU;AAAA,MACV,QAAQ;AAAA,QACN;AAAA,QACA,MAAM;AAAA,QACN,MAAM;AAAA,QACN,iBAAiB;AAAA,QACjB;AAAA,QACA,gBAAgB,kBAAkB;AAAA,WAC9B,qBAAqB,EAAE,kBAAkB;AAAA,MAC/C;AAAA,IACF,CAAC;AAAA,IAED,MAAM,OAAO,SAAS,QAAQ;AAAA,IAE9B,MAAM,eAAe,MAAM,YAAY,MAAM;AAAA,IAC7C,MAAM,mBAAmB,MAAM,YAAY,IAAI;AAAA,IAE/C,oBAAoB,SAAS,WAAU,YAAY,QAAQ;AAAA,MACzD;AAAA,MACA;AAAA,MACA,aAAa,eAAe;AAAA,IAC9B,CAAC;AAAA,IAED,OAAO;AAAA,IACP,OAAO,OAAO;AAAA,IACd,QAAO,MAAM,uBAAuB,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK,GAAG;AAAA,IAC5F,MAAM;AAAA;AAAA;AAIV,eAAsB,eAAe,CACnC,WACE,QAAQ,gBAAgB,CAAC,GAAG,YAAY,MAAM,cAAc,OAC7C;AAAA,EACjB,MAAM,QAAQ,kBAAkB,OAAO;AAAA,EACvC,IAAI,CAAC,OAAO;AAAA,IACV,MAAM,IAAI,MAAM,6CAA6C;AAAA,EAC/D;AAAA,EAEA,MAAM,YAAY,cAAc,OAAO;AAAA,EAEvC,QAAO,IAAI,6BAA6B,WAAW;AAAA,EAEnD,IAAI;AAAA,IACF,MAAM,oBAAoB,QAAQ,UAAU,UAAU;AAAA,IACtD,MAAM,WAAW,MAAM,MAAM,OAAO,gBAAgB;AAAA,MAClD,OAAO;AAAA,MACP,UAAU;AAAA,MACV,QAAQ;AAAA,QACN;AAAA,QACA,MAAM;AAAA,QACN,MAAM;AAAA,QACN,iBAAiB;AAAA,QACjB;AAAA,QACA,gBAAgB,kBAAkB;AAAA,WAC9B,qBAAqB,EAAE,kBAAkB;AAAA,MAC/C;AAAA,IACF,CAAC;AAAA,IAED,MAAM,OAAO,SAAS,QAAQ;AAAA,IAE9B,MAAM,eAAe,MAAM,YAAY,MAAM;AAAA,IAC7C,MAAM,mBAAmB,MAAM,YAAY,IAAI;AAAA,IAE/C,oBAAoB,SAAS,WAAU,YAAY,QAAQ;AAAA,MACzD;AAAA,MACA;AAAA,MACA,aAAa,eAAe;AAAA,IAC9B,CAAC;AAAA,IAED,OAAO;AAAA,IACP,OAAO,OAAO;AAAA,IACd,QAAO,MAAM,uBAAuB,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK,GAAG;AAAA,IAC5F,MAAM;AAAA;AAAA;;AR3EV,IAAM,cAAc;AAAA,EAClB;AAAA,IACE,MAAM;AAAA,IACN,OAAO;AAAA,MACL;AAAA,QACE,MAAM;AAAA,QACN,IAAI,OAAO,YAA2B;AAAA,UACpC,MAAM,SAAS,UAAU,OAAO;AAAA,UAChC,IAAI,CAAC,QAAQ;AAAA,YACX,MAAM,IAAI,MAAM,sCAAsC;AAAA,UACxD;AAAA,UACA,MAAM,QAAQ,IAAI,aAAY,EAAE,OAAO,CAAC;AAAA,UACxC,MAAM,YAAY,MAAM,MAAM,OAAO,KAAK;AAAA,UAC1C,MAAM,SAAS,CAAC;AAAA,UAChB,iBAAiB,SAAS,WAAW;AAAA,YACnC,OAAO,KAAK,KAAK;AAAA,UACnB;AAAA,UACA,QAAO,IAAI,qBAAqB,OAAO,QAAQ;AAAA;AAAA,MAEnD;AAAA,MACA;AAAA,QACE,MAAM;AAAA,QACN,IAAI,OAAO,YAA2B;AAAA,UACpC,IAAI;AAAA,YACF,MAAM,YAAY,MAAM,QAAQ,SAAS,WAAU,gBAAgB;AAAA,cACjE,MAAM;AAAA,YACR,CAAC;AAAA,YACD,QAAO,IAAI,wBAAwB,UAAU,QAAQ;AAAA,YACrD,IAAI,UAAU,WAAW,GAAG;AAAA,cAC1B,MAAM,IAAI,MAAM,8BAA8B;AAAA,YAChD;AAAA,YACA,OAAO,OAAO;AAAA,YACd,QAAO,MACL,iCAAiC,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK,GACxF;AAAA,YACA,MAAM;AAAA;AAAA;AAAA,MAGZ;AAAA,MACA;AAAA,QACE,MAAM;AAAA,QACN,IAAI,OAAO,YAA2B;AAAA,UACpC,IAAI;AAAA,YACF,MAAM,OAAO,MAAM,QAAQ,SAAS,WAAU,YAAY;AAAA,cACxD,QAAQ;AAAA,YACV,CAAC;AAAA,YACD,IAAI,KAAK,WAAW,GAAG;AAAA,cACrB,MAAM,IAAI,MAAM,yBAAyB;AAAA,YAC3C;AAAA,YACA,QAAO,IAAI,8BAA8B,IAAI;AAAA,YAC7C,OAAO,OAAO;AAAA,YACd,QAAO,MACL,6BAA6B,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK,GACpF;AAAA,YACA,MAAM;AAAA;AAAA;AAAA,MAGZ;AAAA,MACA;AAAA,QACE,MAAM;AAAA,QACN,IAAI,OAAO,YAA2B;AAAA,UACpC,IAAI;AAAA,YACF,MAAM,OAAO,MAAM,QAAQ,SAAS,WAAU,YAAY;AAAA,cACxD,QAAQ;AAAA,YACV,CAAC;AAAA,YACD,IAAI,KAAK,WAAW,GAAG;AAAA,cACrB,MAAM,IAAI,MAAM,yBAAyB;AAAA,YAC3C;AAAA,YACA,QAAO,IAAI,8BAA8B,GAAG,KAAK,UAAU,GAAG,GAAG,MAAM;AAAA,YACvE,OAAO,OAAO;AAAA,YACd,QAAO,MACL,6BAA6B,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK,GACpF;AAAA,YACA,MAAM;AAAA;AAAA;AAAA,MAGZ;AAAA,MACA;AAAA,QACE,MAAM;AAAA,QACN,IAAI,OAAO,YAA2B;AAAA,UACpC,IAAI;AAAA,YACF,MAAM,SAAS,MAAM,QAAQ,SAC3B,WAAU,mBACV,mLACF;AAAA,YAEA,IACE,UACA,OAAO,WAAW,YAClB,WAAW,UACX,iBAAiB,QACjB;AAAA,cACA,QAAO,IAAI,sBAAsB,KAAK,UAAU,MAAM,CAAC;AAAA,YACzD,EAAO;AAAA,cACL,QAAO,MAAM,4CAA4C,KAAK,UAAU,MAAM,GAAG;AAAA;AAAA,YAEnF,OAAO,OAAO;AAAA,YACd,QAAO,MACL,oCAAoC,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK,GAC3F;AAAA,YACA,MAAM;AAAA;AAAA;AAAA,MAGZ;AAAA,MACA;AAAA,QACE,MAAM;AAAA,QACN,IAAI,OAAO,YAA2B;AAAA,UACpC,IAAI;AAAA,YACF,MAAM,SAAS;AAAA,cACb,MAAM;AAAA,cACN,YAAY;AAAA,gBACV,MAAM,EAAE,MAAM,SAAS;AAAA,gBACvB,KAAK,EAAE,MAAM,SAAS;AAAA,gBACtB,SAAS,EAAE,MAAM,SAAS,OAAO,EAAE,MAAM,SAAS,EAAE;AAAA,cACtD;AAAA,cACA,UAAU,CAAC,QAAQ,OAAO,SAAS;AAAA,YACrC;AAAA,YAEA,MAAM,SAAS,MAAM,QAAQ,SAAS,WAAU,cAAc;AAAA,cAC5D,QAAQ;AAAA,cACR;AAAA,YACF,CAAC;AAAA,YAED,QAAO,IAAI,qBAAqB,KAAK,UAAU,MAAM,CAAC;AAAA,YAEtD,IAAI,CAAC,OAAO,QAAQ,CAAC,OAAO,OAAO,CAAC,OAAO,SAAS;AAAA,cAClD,MAAM,IAAI,MAAM,0CAA0C;AAAA,YAC5D;AAAA,YACA,OAAO,OAAO;AAAA,YACd,QAAO,MACL,oCAAoC,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK,GAC3F;AAAA,YACA,MAAM;AAAA;AAAA;AAAA,MAGZ;AAAA,IACF;AAAA,EACF;AACF;AAIA,SAAS,aAAa,GAAmB;AAAA,EACvC,IAAI,OAAO,YAAY,aAAa;AAAA,IAClC,OAAO,CAAC;AAAA,EACV;AAAA,EACA,OAAO,QAAQ;AAAA;AAGjB,IAAM,MAAM,cAAc;AAEnB,IAAM,oBAA4B;AAAA,EACvC,MAAM;AAAA,EACN,aAAa;AAAA,EAEb,QAAQ;AAAA,IACN,8BAA8B,IAAI,gCAAgC;AAAA,IAClE,oBAAoB,IAAI,sBAAsB;AAAA,IAC9C,oBAAoB,IAAI,sBAAsB;AAAA,IAC9C,oBAAoB,IAAI,sBAAsB;AAAA,IAC9C,wBAAwB,IAAI,0BAA0B;AAAA,IACtD,aAAa,IAAI,eAAe;AAAA,IAChC,aAAa,IAAI,eAAe;AAAA,IAChC,aAAa,IAAI,eAAe;AAAA,EAClC;AAAA,OAEM,KAAI,CAAC,QAAQ,SAAS;AAAA,IAC1B,sBAAsB,QAAwB,OAAO;AAAA;AAAA,EAGvD,QAAQ;AAAA,KACL,WAAU,aAAa,OACtB,SACA,WACoB;AAAA,MACpB,OAAO,gBAAgB,SAAS,MAAM;AAAA;AAAA,KAGvC,WAAU,aAAa,OACtB,SACA,WACoB;AAAA,MACpB,OAAO,gBAAgB,SAAS,MAAM;AAAA;AAAA,KAGvC,WAAU,iBAAiB,OAC1B,SACA,WACsB;AAAA,MACtB,OAAO,oBAAoB,SAAS,MAAM;AAAA;AAAA,KAG3C,WAAU,oBAAoB,OAC7B,SACA,WACoD;AAAA,MACpD,OAAO,uBAAuB,SAAS,MAAM;AAAA;AAAA,KAG9C,WAAU,eAAe,OACxB,SACA,WAC8D;AAAA,MAC9D,OAAO,kBAAkB,SAAS,MAAM;AAAA;AAAA,KAGzC,WAAU,eAAe,OACxB,SACA,WAC8D;AAAA,MAC9D,OAAO,kBAAkB,SAAS,MAAM;AAAA;AAAA,EAE5C;AAAA,EAEA,OAAO;AACT;AAEA,IAAe;;;AShPf,IAAe;",
|
|
17
|
+
"debugId": "E75B1BEAE9D5BC6A64756E2164756E21",
|
|
18
|
+
"names": []
|
|
19
|
+
}
|