@elizaos/plugin-openai 1.0.0-beta.3 → 1.0.0-beta.33

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/LICENSE CHANGED
@@ -1,6 +1,6 @@
1
1
  MIT License
2
2
 
3
- Copyright (c) 2025 Shaw Walters, aka Moon aka @lalalune
3
+ Copyright (c) 2025 Shaw Walters and elizaOS Contributors
4
4
 
5
5
  Permission is hereby granted, free of charge, to any person obtaining a copy
6
6
  of this software and associated documentation files (the "Software"), to deal
package/README.md CHANGED
@@ -19,7 +19,9 @@ The plugin requires these environment variables (can be set in .env file or char
19
19
  "OPENAI_API_KEY": "your_openai_api_key",
20
20
  "OPENAI_BASE_URL": "optional_custom_endpoint",
21
21
  "OPENAI_SMALL_MODEL": "gpt-4o-mini",
22
- "OPENAI_LARGE_MODEL": "gpt-4o"
22
+ "OPENAI_LARGE_MODEL": "gpt-4o",
23
+ "OPENAI_EMBEDDING_MODEL": "text-embedding-3-small",
24
+ "OPENAI_EMBEDDING_DIMENSIONS": "1536"
23
25
  }
24
26
  ```
25
27
 
@@ -31,6 +33,8 @@ OPENAI_API_KEY=your_openai_api_key
31
33
  OPENAI_BASE_URL=optional_custom_endpoint
32
34
  OPENAI_SMALL_MODEL=gpt-4o-mini
33
35
  OPENAI_LARGE_MODEL=gpt-4o
36
+ OPENAI_EMBEDDING_MODEL=text-embedding-3-small
37
+ OPENAI_EMBEDDING_DIMENSIONS=1536
34
38
  ```
35
39
 
36
40
  ### Configuration Options
@@ -39,12 +43,14 @@ OPENAI_LARGE_MODEL=gpt-4o
39
43
  - `OPENAI_BASE_URL`: Custom API endpoint (default: https://api.openai.com/v1)
40
44
  - `OPENAI_SMALL_MODEL`: Defaults to GPT-4o Mini ("gpt-4o-mini")
41
45
  - `OPENAI_LARGE_MODEL`: Defaults to GPT-4o ("gpt-4o")
46
+ - `OPENAI_EMBEDDING_MODEL`: Defaults to text-embedding-3-small ("text-embedding-3-small")
47
+ - `OPENAI_EMBEDDING_DIMENSIONS`: Defaults to 1536 (1536)
42
48
 
43
49
  The plugin provides these model classes:
44
50
 
45
51
  - `TEXT_SMALL`: Optimized for fast, cost-effective responses
46
52
  - `TEXT_LARGE`: For complex tasks requiring deeper reasoning
47
- - `TEXT_EMBEDDING`: Text embedding model (text-embedding-3-small)
53
+ - `TEXT_EMBEDDING`: Text embedding model (text-embedding-3-small by default)
48
54
  - `IMAGE`: DALL-E image generation
49
55
  - `IMAGE_DESCRIPTION`: GPT-4o image analysis
50
56
  - `TRANSCRIPTION`: Whisper audio transcription
package/dist/index.js CHANGED
@@ -2,10 +2,34 @@
2
2
  import { createOpenAI } from "@ai-sdk/openai";
3
3
  import {
4
4
  ModelType,
5
- logger
5
+ logger,
6
+ VECTOR_DIMS
6
7
  } from "@elizaos/core";
7
- import { generateObject, generateText } from "ai";
8
+ import { generateObject, generateText, JSONParseError } from "ai";
8
9
  import { encodingForModel } from "js-tiktoken";
10
+ import FormData from "form-data";
11
+ import fetch from "node-fetch";
12
+ function getSetting(runtime, key, defaultValue) {
13
+ return runtime.getSetting(key) ?? process.env[key] ?? defaultValue;
14
+ }
15
+ function getBaseURL(runtime) {
16
+ return getSetting(runtime, "OPENAI_BASE_URL", "https://api.openai.com/v1");
17
+ }
18
+ function getApiKey(runtime) {
19
+ return getSetting(runtime, "OPENAI_API_KEY");
20
+ }
21
+ function getSmallModel(runtime) {
22
+ return getSetting(runtime, "OPENAI_SMALL_MODEL") ?? getSetting(runtime, "SMALL_MODEL", "gpt-4o-mini");
23
+ }
24
+ function getLargeModel(runtime) {
25
+ return getSetting(runtime, "OPENAI_LARGE_MODEL") ?? getSetting(runtime, "LARGE_MODEL", "gpt-4o");
26
+ }
27
+ function createOpenAIClient(runtime) {
28
+ return createOpenAI({
29
+ apiKey: getApiKey(runtime),
30
+ baseURL: getBaseURL(runtime)
31
+ });
32
+ }
9
33
  async function tokenizeText(model, prompt) {
10
34
  const modelName = model === ModelType.TEXT_SMALL ? process.env.OPENAI_SMALL_MODEL ?? process.env.SMALL_MODEL ?? "gpt-4o-mini" : process.env.LARGE_MODEL ?? "gpt-4o";
11
35
  const encoding = encodingForModel(modelName);
@@ -17,6 +41,69 @@ async function detokenizeText(model, tokens) {
17
41
  const encoding = encodingForModel(modelName);
18
42
  return encoding.decode(tokens);
19
43
  }
44
+ async function generateObjectByModelType(runtime, params, modelType, getModelFn) {
45
+ const openai = createOpenAIClient(runtime);
46
+ const model = getModelFn(runtime);
47
+ try {
48
+ if (params.schema) {
49
+ logger.info(`Using ${modelType} without schema validation`);
50
+ }
51
+ const { object } = await generateObject({
52
+ model: openai.languageModel(model),
53
+ output: "no-schema",
54
+ prompt: params.prompt,
55
+ temperature: params.temperature,
56
+ experimental_repairText: getJsonRepairFunction()
57
+ });
58
+ return object;
59
+ } catch (error) {
60
+ logger.error(`Error generating object with ${modelType}:`, error);
61
+ throw error;
62
+ }
63
+ }
64
+ function getJsonRepairFunction() {
65
+ return async ({ text, error }) => {
66
+ try {
67
+ if (error instanceof JSONParseError) {
68
+ const cleanedText = text.replace(/```json\n|\n```|```/g, "");
69
+ JSON.parse(cleanedText);
70
+ return cleanedText;
71
+ }
72
+ } catch (jsonError) {
73
+ logger.warn("Failed to repair JSON text:", jsonError);
74
+ return null;
75
+ }
76
+ };
77
+ }
78
+ async function fetchTextToSpeech(runtime, text) {
79
+ const apiKey = getApiKey(runtime);
80
+ const model = getSetting(runtime, "OPENAI_TTS_MODEL", "gpt-4o-mini-tts");
81
+ const voice = getSetting(runtime, "OPENAI_TTS_VOICE", "nova");
82
+ const instructions = getSetting(runtime, "OPENAI_TTS_INSTRUCTIONS", "");
83
+ const baseURL = getBaseURL(runtime);
84
+ try {
85
+ const res = await fetch(`${baseURL}/audio/speech`, {
86
+ method: "POST",
87
+ headers: {
88
+ Authorization: `Bearer ${apiKey}`,
89
+ "Content-Type": "application/json"
90
+ },
91
+ body: JSON.stringify({
92
+ model,
93
+ voice,
94
+ input: text,
95
+ ...instructions && { instructions }
96
+ })
97
+ });
98
+ if (!res.ok) {
99
+ const err = await res.text();
100
+ throw new Error(`OpenAI TTS error ${res.status}: ${err}`);
101
+ }
102
+ return res.body;
103
+ } catch (err) {
104
+ throw new Error(`Failed to fetch speech from OpenAI TTS: ${err.message || err}`);
105
+ }
106
+ }
20
107
  var openaiPlugin = {
21
108
  name: "openai",
22
109
  description: "OpenAI plugin",
@@ -26,20 +113,22 @@ var openaiPlugin = {
26
113
  OPENAI_SMALL_MODEL: process.env.OPENAI_SMALL_MODEL,
27
114
  OPENAI_LARGE_MODEL: process.env.OPENAI_LARGE_MODEL,
28
115
  SMALL_MODEL: process.env.SMALL_MODEL,
29
- LARGE_MODEL: process.env.LARGE_MODEL
116
+ LARGE_MODEL: process.env.LARGE_MODEL,
117
+ OPENAI_EMBEDDING_MODEL: process.env.OPENAI_EMBEDDING_MODEL,
118
+ OPENAI_EMBEDDING_DIMENSIONS: process.env.OPENAI_EMBEDDING_DIMENSIONS
30
119
  },
31
- async init(config) {
120
+ async init(_config, runtime) {
32
121
  try {
33
- if (!process.env.OPENAI_API_KEY) {
122
+ if (!getApiKey(runtime)) {
34
123
  logger.warn(
35
124
  "OPENAI_API_KEY is not set in environment - OpenAI functionality will be limited"
36
125
  );
37
126
  return;
38
127
  }
39
128
  try {
40
- const baseURL = process.env.OPENAI_BASE_URL ?? "https://api.openai.com/v1";
129
+ const baseURL = getBaseURL(runtime);
41
130
  const response = await fetch(`${baseURL}/models`, {
42
- headers: { Authorization: `Bearer ${process.env.OPENAI_API_KEY}` }
131
+ headers: { Authorization: `Bearer ${getApiKey(runtime)}` }
43
132
  });
44
133
  if (!response.ok) {
45
134
  logger.warn(`OpenAI API key validation failed: ${response.statusText}`);
@@ -57,10 +146,21 @@ var openaiPlugin = {
57
146
  }
58
147
  },
59
148
  models: {
60
- [ModelType.TEXT_EMBEDDING]: async (_runtime, params) => {
149
+ [ModelType.TEXT_EMBEDDING]: async (runtime, params) => {
150
+ const embeddingDimension = parseInt(
151
+ getSetting(runtime, "OPENAI_EMBEDDING_DIMENSIONS", "1536")
152
+ );
153
+ if (!Object.values(VECTOR_DIMS).includes(embeddingDimension)) {
154
+ logger.error(
155
+ `Invalid embedding dimension: ${embeddingDimension}. Must be one of: ${Object.values(VECTOR_DIMS).join(", ")}`
156
+ );
157
+ throw new Error(
158
+ `Invalid embedding dimension: ${embeddingDimension}. Must be one of: ${Object.values(VECTOR_DIMS).join(", ")}`
159
+ );
160
+ }
61
161
  if (params === null) {
62
162
  logger.debug("Creating test embedding for initialization");
63
- const testVector = Array(1536).fill(0);
163
+ const testVector = Array(embeddingDimension).fill(0);
64
164
  testVector[0] = 0.1;
65
165
  return testVector;
66
166
  }
@@ -71,39 +171,39 @@ var openaiPlugin = {
71
171
  text = params.text;
72
172
  } else {
73
173
  logger.warn("Invalid input format for embedding");
74
- const fallbackVector = Array(1536).fill(0);
174
+ const fallbackVector = Array(embeddingDimension).fill(0);
75
175
  fallbackVector[0] = 0.2;
76
176
  return fallbackVector;
77
177
  }
78
178
  if (!text.trim()) {
79
179
  logger.warn("Empty text for embedding");
80
- const emptyVector = Array(1536).fill(0);
180
+ const emptyVector = Array(embeddingDimension).fill(0);
81
181
  emptyVector[0] = 0.3;
82
182
  return emptyVector;
83
183
  }
84
184
  try {
85
- const baseURL = process.env.OPENAI_BASE_URL ?? "https://api.openai.com/v1";
185
+ const baseURL = getBaseURL(runtime);
86
186
  const response = await fetch(`${baseURL}/embeddings`, {
87
187
  method: "POST",
88
188
  headers: {
89
- Authorization: `Bearer ${process.env.OPENAI_API_KEY}`,
189
+ Authorization: `Bearer ${getApiKey(runtime)}`,
90
190
  "Content-Type": "application/json"
91
191
  },
92
192
  body: JSON.stringify({
93
- model: "text-embedding-3-small",
193
+ model: getSetting(runtime, "OPENAI_EMBEDDING_MODEL", "text-embedding-3-small"),
94
194
  input: text
95
195
  })
96
196
  });
97
197
  if (!response.ok) {
98
198
  logger.error(`OpenAI API error: ${response.status} - ${response.statusText}`);
99
- const errorVector = Array(1536).fill(0);
199
+ const errorVector = Array(embeddingDimension).fill(0);
100
200
  errorVector[0] = 0.4;
101
201
  return errorVector;
102
202
  }
103
203
  const data = await response.json();
104
204
  if (!data?.data?.[0]?.embedding) {
105
205
  logger.error("API returned invalid structure");
106
- const errorVector = Array(1536).fill(0);
206
+ const errorVector = Array(embeddingDimension).fill(0);
107
207
  errorVector[0] = 0.5;
108
208
  return errorVector;
109
209
  }
@@ -112,7 +212,7 @@ var openaiPlugin = {
112
212
  return embedding;
113
213
  } catch (error) {
114
214
  logger.error("Error generating embedding:", error);
115
- const errorVector = Array(1536).fill(0);
215
+ const errorVector = Array(embeddingDimension).fill(0);
116
216
  errorVector[0] = 0.6;
117
217
  return errorVector;
118
218
  }
@@ -128,12 +228,8 @@ var openaiPlugin = {
128
228
  const frequency_penalty = 0.7;
129
229
  const presence_penalty = 0.7;
130
230
  const max_response_length = 8192;
131
- const baseURL = runtime.getSetting("OPENAI_BASE_URL") ?? "https://api.openai.com/v1";
132
- const openai = createOpenAI({
133
- apiKey: runtime.getSetting("OPENAI_API_KEY"),
134
- baseURL
135
- });
136
- const model = runtime.getSetting("OPENAI_SMALL_MODEL") ?? runtime.getSetting("SMALL_MODEL") ?? "gpt-4o-mini";
231
+ const openai = createOpenAIClient(runtime);
232
+ const model = getSmallModel(runtime);
137
233
  logger.log("generating text");
138
234
  logger.log(prompt);
139
235
  const { text: openaiResponse } = await generateText({
@@ -156,12 +252,8 @@ var openaiPlugin = {
156
252
  frequencyPenalty = 0.7,
157
253
  presencePenalty = 0.7
158
254
  }) => {
159
- const baseURL = runtime.getSetting("OPENAI_BASE_URL") ?? "https://api.openai.com/v1";
160
- const openai = createOpenAI({
161
- apiKey: runtime.getSetting("OPENAI_API_KEY"),
162
- baseURL
163
- });
164
- const model = runtime.getSetting("OPENAI_LARGE_MODEL") ?? runtime.getSetting("LARGE_MODEL") ?? "gpt-4o";
255
+ const openai = createOpenAIClient(runtime);
256
+ const model = getLargeModel(runtime);
165
257
  const { text: openaiResponse } = await generateText({
166
258
  model: openai.languageModel(model),
167
259
  prompt,
@@ -175,11 +267,11 @@ var openaiPlugin = {
175
267
  return openaiResponse;
176
268
  },
177
269
  [ModelType.IMAGE]: async (runtime, params) => {
178
- const baseURL = runtime.getSetting("OPENAI_BASE_URL") ?? "https://api.openai.com/v1";
270
+ const baseURL = getBaseURL(runtime);
179
271
  const response = await fetch(`${baseURL}/images/generations`, {
180
272
  method: "POST",
181
273
  headers: {
182
- Authorization: `Bearer ${runtime.getSetting("OPENAI_API_KEY")}`,
274
+ Authorization: `Bearer ${getApiKey(runtime)}`,
183
275
  "Content-Type": "application/json"
184
276
  },
185
277
  body: JSON.stringify({
@@ -206,8 +298,8 @@ var openaiPlugin = {
206
298
  prompt = params.prompt;
207
299
  }
208
300
  try {
209
- const baseURL = process.env.OPENAI_BASE_URL ?? "https://api.openai.com/v1";
210
- const apiKey = process.env.OPENAI_API_KEY;
301
+ const baseURL = getBaseURL(runtime);
302
+ const apiKey = getApiKey(runtime);
211
303
  if (!apiKey) {
212
304
  logger.error("OpenAI API key not set");
213
305
  return {
@@ -222,7 +314,7 @@ var openaiPlugin = {
222
314
  Authorization: `Bearer ${apiKey}`
223
315
  },
224
316
  body: JSON.stringify({
225
- model: "gpt-4-vision-preview",
317
+ model: "gpt-4o-mini",
226
318
  messages: [
227
319
  {
228
320
  role: "user",
@@ -266,15 +358,17 @@ var openaiPlugin = {
266
358
  },
267
359
  [ModelType.TRANSCRIPTION]: async (runtime, audioBuffer) => {
268
360
  logger.log("audioBuffer", audioBuffer);
269
- const baseURL = runtime.getSetting("OPENAI_BASE_URL") ?? "https://api.openai.com/v1";
361
+ const baseURL = getBaseURL(runtime);
270
362
  const formData = new FormData();
271
- formData.append("file", new Blob([audioBuffer], { type: "audio/mp3" }));
363
+ formData.append("file", audioBuffer, {
364
+ filename: "recording.mp3",
365
+ contentType: "audio/mp3"
366
+ });
272
367
  formData.append("model", "whisper-1");
273
368
  const response = await fetch(`${baseURL}/audio/transcriptions`, {
274
369
  method: "POST",
275
370
  headers: {
276
- Authorization: `Bearer ${runtime.getSetting("OPENAI_API_KEY")}`
277
- // Note: Do not set a Content-Type header—letting fetch set it for FormData is best
371
+ Authorization: `Bearer ${getApiKey(runtime)}`
278
372
  },
279
373
  body: formData
280
374
  });
@@ -285,65 +379,14 @@ var openaiPlugin = {
285
379
  const data = await response.json();
286
380
  return data.text;
287
381
  },
382
+ [ModelType.TEXT_TO_SPEECH]: async (runtime, text) => {
383
+ return await fetchTextToSpeech(runtime, text);
384
+ },
288
385
  [ModelType.OBJECT_SMALL]: async (runtime, params) => {
289
- const baseURL = runtime.getSetting("OPENAI_BASE_URL") ?? "https://api.openai.com/v1";
290
- const openai = createOpenAI({
291
- apiKey: runtime.getSetting("OPENAI_API_KEY"),
292
- baseURL
293
- });
294
- const model = runtime.getSetting("OPENAI_SMALL_MODEL") ?? runtime.getSetting("SMALL_MODEL") ?? "gpt-4o-mini";
295
- try {
296
- if (params.schema) {
297
- logger.info("Using OBJECT_SMALL without schema validation");
298
- const { object: object2 } = await generateObject({
299
- model: openai.languageModel(model),
300
- output: "no-schema",
301
- prompt: params.prompt,
302
- temperature: params.temperature
303
- });
304
- return object2;
305
- }
306
- const { object } = await generateObject({
307
- model: openai.languageModel(model),
308
- output: "no-schema",
309
- prompt: params.prompt,
310
- temperature: params.temperature
311
- });
312
- return object;
313
- } catch (error) {
314
- logger.error("Error generating object:", error);
315
- throw error;
316
- }
386
+ return generateObjectByModelType(runtime, params, ModelType.OBJECT_SMALL, getSmallModel);
317
387
  },
318
388
  [ModelType.OBJECT_LARGE]: async (runtime, params) => {
319
- const baseURL = runtime.getSetting("OPENAI_BASE_URL") ?? "https://api.openai.com/v1";
320
- const openai = createOpenAI({
321
- apiKey: runtime.getSetting("OPENAI_API_KEY"),
322
- baseURL
323
- });
324
- const model = runtime.getSetting("OPENAI_LARGE_MODEL") ?? runtime.getSetting("LARGE_MODEL") ?? "gpt-4o";
325
- try {
326
- if (params.schema) {
327
- logger.info("Using OBJECT_LARGE without schema validation");
328
- const { object: object2 } = await generateObject({
329
- model: openai.languageModel(model),
330
- output: "no-schema",
331
- prompt: params.prompt,
332
- temperature: params.temperature
333
- });
334
- return object2;
335
- }
336
- const { object } = await generateObject({
337
- model: openai.languageModel(model),
338
- output: "no-schema",
339
- prompt: params.prompt,
340
- temperature: params.temperature
341
- });
342
- return object;
343
- } catch (error) {
344
- logger.error("Error generating object:", error);
345
- throw error;
346
- }
389
+ return generateObjectByModelType(runtime, params, ModelType.OBJECT_LARGE, getLargeModel);
347
390
  }
348
391
  },
349
392
  tests: [
@@ -353,10 +396,10 @@ var openaiPlugin = {
353
396
  {
354
397
  name: "openai_test_url_and_api_key_validation",
355
398
  fn: async (runtime) => {
356
- const baseURL = runtime.getSetting("OPENAI_BASE_URL") ?? "https://api.openai.com/v1";
399
+ const baseURL = getBaseURL(runtime);
357
400
  const response = await fetch(`${baseURL}/models`, {
358
401
  headers: {
359
- Authorization: `Bearer ${runtime.getSetting("OPENAI_API_KEY")}`
402
+ Authorization: `Bearer ${getApiKey(runtime)}`
360
403
  }
361
404
  });
362
405
  const data = await response.json();
@@ -498,6 +541,22 @@ var openaiPlugin = {
498
541
  }
499
542
  logger.log("Decoded text:", decodedText);
500
543
  }
544
+ },
545
+ {
546
+ name: "openai_test_text_to_speech",
547
+ fn: async (runtime) => {
548
+ try {
549
+ const text = "Hello, this is a test for text-to-speech.";
550
+ const response = await fetchTextToSpeech(runtime, text);
551
+ if (!response) {
552
+ throw new Error("Failed to generate speech");
553
+ }
554
+ logger.log("Generated speech successfully");
555
+ } catch (error) {
556
+ logger.error("Error in openai_test_text_to_speech:", error);
557
+ throw error;
558
+ }
559
+ }
501
560
  }
502
561
  ]
503
562
  }
package/dist/index.js.map CHANGED
@@ -1 +1 @@
1
- {"version":3,"sources":["../src/index.ts"],"sourcesContent":["import { createOpenAI } from '@ai-sdk/openai';\nimport type {\n ImageDescriptionParams,\n ModelTypeName,\n ObjectGenerationParams,\n Plugin,\n TextEmbeddingParams,\n} from '@elizaos/core';\nimport {\n type DetokenizeTextParams,\n type GenerateTextParams,\n ModelType,\n type TokenizeTextParams,\n logger,\n} from '@elizaos/core';\nimport { generateObject, generateText } from 'ai';\nimport { type TiktokenModel, encodingForModel } from 'js-tiktoken';\nimport { z } from 'zod';\n\n/**\n * Asynchronously tokenizes the given text based on the specified model and prompt.\n *\n * @param {ModelTypeName} model - The type of model to use for tokenization.\n * @param {string} prompt - The text prompt to tokenize.\n * @returns {number[]} - An array of tokens representing the encoded prompt.\n */\nasync function tokenizeText(model: ModelTypeName, prompt: string) {\n const modelName =\n model === ModelType.TEXT_SMALL\n ? (process.env.OPENAI_SMALL_MODEL ?? process.env.SMALL_MODEL ?? 'gpt-4o-mini')\n : (process.env.LARGE_MODEL ?? 'gpt-4o');\n const encoding = encodingForModel(modelName as TiktokenModel);\n const tokens = encoding.encode(prompt);\n return tokens;\n}\n\n/**\n * Detokenize a sequence of tokens back into text using the specified model.\n *\n * @param {ModelTypeName} model - The type of model to use for detokenization.\n * @param {number[]} tokens - The sequence of tokens to detokenize.\n * @returns {string} The detokenized text.\n */\nasync function detokenizeText(model: ModelTypeName, tokens: number[]) {\n const modelName =\n model === ModelType.TEXT_SMALL\n ? (process.env.OPENAI_SMALL_MODEL ?? process.env.SMALL_MODEL ?? 'gpt-4o-mini')\n : (process.env.OPENAI_LARGE_MODEL ?? process.env.LARGE_MODEL ?? 'gpt-4o');\n const encoding = encodingForModel(modelName as TiktokenModel);\n return encoding.decode(tokens);\n}\n\n/**\n * Defines the OpenAI plugin with its name, description, and configuration options.\n * @type {Plugin}\n */\nexport const openaiPlugin: Plugin = {\n name: 'openai',\n description: 'OpenAI plugin',\n config: {\n OPENAI_API_KEY: process.env.OPENAI_API_KEY,\n OPENAI_BASE_URL: process.env.OPENAI_BASE_URL,\n OPENAI_SMALL_MODEL: process.env.OPENAI_SMALL_MODEL,\n OPENAI_LARGE_MODEL: process.env.OPENAI_LARGE_MODEL,\n SMALL_MODEL: process.env.SMALL_MODEL,\n LARGE_MODEL: process.env.LARGE_MODEL,\n },\n async init(config: Record<string, string>) {\n try {\n // const validatedConfig = await configSchema.parseAsync(config);\n\n // // Set all environment variables at once\n // for (const [key, value] of Object.entries(validatedConfig)) {\n // \tif (value) process.env[key] = value;\n // }\n\n // If API key is not set, we'll show a warning but continue\n if (!process.env.OPENAI_API_KEY) {\n logger.warn(\n 'OPENAI_API_KEY is not set in environment - OpenAI functionality will be limited'\n );\n // Return early without throwing an error\n return;\n }\n\n // Verify API key only if we have one\n try {\n const baseURL = process.env.OPENAI_BASE_URL ?? 'https://api.openai.com/v1';\n const response = await fetch(`${baseURL}/models`, {\n headers: { Authorization: `Bearer ${process.env.OPENAI_API_KEY}` },\n });\n\n if (!response.ok) {\n logger.warn(`OpenAI API key validation failed: ${response.statusText}`);\n logger.warn('OpenAI functionality will be limited until a valid API key is provided');\n // Continue execution instead of throwing\n } else {\n // logger.log(\"OpenAI API key validated successfully\");\n }\n } catch (fetchError) {\n logger.warn(`Error validating OpenAI API key: ${fetchError}`);\n logger.warn('OpenAI functionality will be limited until a valid API key is provided');\n // Continue execution instead of throwing\n }\n } catch (error) {\n // Convert to warning instead of error\n logger.warn(\n `OpenAI plugin configuration issue: ${error.errors\n .map((e) => e.message)\n .join(', ')} - You need to configure the OPENAI_API_KEY in your environment variables`\n );\n }\n },\n models: {\n [ModelType.TEXT_EMBEDDING]: async (\n _runtime,\n params: TextEmbeddingParams | string | null\n ): Promise<number[]> => {\n // Handle null input (initialization case)\n if (params === null) {\n logger.debug('Creating test embedding for initialization');\n // Return a consistent vector for null input\n const testVector = Array(1536).fill(0);\n testVector[0] = 0.1; // Make it non-zero\n return testVector;\n }\n\n // Get the text from whatever format was provided\n let text: string;\n if (typeof params === 'string') {\n text = params; // Direct string input\n } else if (typeof params === 'object' && params.text) {\n text = params.text; // Object with text property\n } else {\n logger.warn('Invalid input format for embedding');\n // Return a fallback for invalid input\n const fallbackVector = Array(1536).fill(0);\n fallbackVector[0] = 0.2; // Different value for tracking\n return fallbackVector;\n }\n\n // Skip API call for empty text\n if (!text.trim()) {\n logger.warn('Empty text for embedding');\n const emptyVector = Array(1536).fill(0);\n emptyVector[0] = 0.3; // Different value for tracking\n return emptyVector;\n }\n\n try {\n const baseURL = process.env.OPENAI_BASE_URL ?? 'https://api.openai.com/v1';\n\n // Call the OpenAI API\n const response = await fetch(`${baseURL}/embeddings`, {\n method: 'POST',\n headers: {\n Authorization: `Bearer ${process.env.OPENAI_API_KEY}`,\n 'Content-Type': 'application/json',\n },\n body: JSON.stringify({\n model: 'text-embedding-3-small',\n input: text,\n }),\n });\n\n if (!response.ok) {\n logger.error(`OpenAI API error: ${response.status} - ${response.statusText}`);\n const errorVector = Array(1536).fill(0);\n errorVector[0] = 0.4; // Different value for tracking\n return errorVector;\n }\n\n const data = (await response.json()) as {\n data: [{ embedding: number[] }];\n };\n\n if (!data?.data?.[0]?.embedding) {\n logger.error('API returned invalid structure');\n const errorVector = Array(1536).fill(0);\n errorVector[0] = 0.5; // Different value for tracking\n return errorVector;\n }\n\n const embedding = data.data[0].embedding;\n logger.log(`Got valid embedding with length ${embedding.length}`);\n return embedding;\n } catch (error) {\n logger.error('Error generating embedding:', error);\n const errorVector = Array(1536).fill(0);\n errorVector[0] = 0.6; // Different value for tracking\n return errorVector;\n }\n },\n [ModelType.TEXT_TOKENIZER_ENCODE]: async (\n _runtime,\n { prompt, modelType = ModelType.TEXT_LARGE }: TokenizeTextParams\n ) => {\n return await tokenizeText(modelType ?? ModelType.TEXT_LARGE, prompt);\n },\n [ModelType.TEXT_TOKENIZER_DECODE]: async (\n _runtime,\n { tokens, modelType = ModelType.TEXT_LARGE }: DetokenizeTextParams\n ) => {\n return await detokenizeText(modelType ?? ModelType.TEXT_LARGE, tokens);\n },\n [ModelType.TEXT_SMALL]: async (runtime, { prompt, stopSequences = [] }: GenerateTextParams) => {\n const temperature = 0.7;\n const frequency_penalty = 0.7;\n const presence_penalty = 0.7;\n const max_response_length = 8192;\n\n const baseURL = runtime.getSetting('OPENAI_BASE_URL') ?? 'https://api.openai.com/v1';\n\n const openai = createOpenAI({\n apiKey: runtime.getSetting('OPENAI_API_KEY'),\n baseURL,\n });\n\n const model =\n runtime.getSetting('OPENAI_SMALL_MODEL') ??\n runtime.getSetting('SMALL_MODEL') ??\n 'gpt-4o-mini';\n\n logger.log('generating text');\n logger.log(prompt);\n\n const { text: openaiResponse } = await generateText({\n model: openai.languageModel(model),\n prompt: prompt,\n system: runtime.character.system ?? undefined,\n temperature: temperature,\n maxTokens: max_response_length,\n frequencyPenalty: frequency_penalty,\n presencePenalty: presence_penalty,\n stopSequences: stopSequences,\n });\n\n return openaiResponse;\n },\n [ModelType.TEXT_LARGE]: async (\n runtime,\n {\n prompt,\n stopSequences = [],\n maxTokens = 8192,\n temperature = 0.7,\n frequencyPenalty = 0.7,\n presencePenalty = 0.7,\n }: GenerateTextParams\n ) => {\n const baseURL = runtime.getSetting('OPENAI_BASE_URL') ?? 'https://api.openai.com/v1';\n\n const openai = createOpenAI({\n apiKey: runtime.getSetting('OPENAI_API_KEY'),\n baseURL,\n });\n\n const model =\n runtime.getSetting('OPENAI_LARGE_MODEL') ?? runtime.getSetting('LARGE_MODEL') ?? 'gpt-4o';\n\n const { text: openaiResponse } = await generateText({\n model: openai.languageModel(model),\n prompt: prompt,\n system: runtime.character.system ?? undefined,\n temperature: temperature,\n maxTokens: maxTokens,\n frequencyPenalty: frequencyPenalty,\n presencePenalty: presencePenalty,\n stopSequences: stopSequences,\n });\n\n return openaiResponse;\n },\n [ModelType.IMAGE]: async (\n runtime,\n params: {\n prompt: string;\n n?: number;\n size?: string;\n }\n ) => {\n const baseURL = runtime.getSetting('OPENAI_BASE_URL') ?? 'https://api.openai.com/v1';\n const response = await fetch(`${baseURL}/images/generations`, {\n method: 'POST',\n headers: {\n Authorization: `Bearer ${runtime.getSetting('OPENAI_API_KEY')}`,\n 'Content-Type': 'application/json',\n },\n body: JSON.stringify({\n prompt: params.prompt,\n n: params.n || 1,\n size: params.size || '1024x1024',\n }),\n });\n if (!response.ok) {\n throw new Error(`Failed to generate image: ${response.statusText}`);\n }\n const data = await response.json();\n const typedData = data as { data: { url: string }[] };\n return typedData.data;\n },\n [ModelType.IMAGE_DESCRIPTION]: async (runtime, params: ImageDescriptionParams | string) => {\n // Handle string case (direct URL)\n let imageUrl: string;\n let prompt: string | undefined;\n\n if (typeof params === 'string') {\n imageUrl = params;\n prompt = undefined;\n } else {\n // Object parameter case\n imageUrl = params.imageUrl;\n prompt = params.prompt;\n }\n\n try {\n const baseURL = process.env.OPENAI_BASE_URL ?? 'https://api.openai.com/v1';\n const apiKey = process.env.OPENAI_API_KEY;\n\n if (!apiKey) {\n logger.error('OpenAI API key not set');\n return {\n title: 'Failed to analyze image',\n description: 'API key not configured',\n };\n }\n\n // Call the GPT-4 Vision API\n const response = await fetch(`${baseURL}/chat/completions`, {\n method: 'POST',\n headers: {\n 'Content-Type': 'application/json',\n Authorization: `Bearer ${apiKey}`,\n },\n body: JSON.stringify({\n model: 'gpt-4-vision-preview',\n messages: [\n {\n role: 'user',\n content: [\n {\n type: 'text',\n text:\n prompt ||\n 'Please analyze this image and provide a title and detailed description.',\n },\n {\n type: 'image_url',\n image_url: { url: imageUrl },\n },\n ],\n },\n ],\n max_tokens: 300,\n }),\n });\n\n if (!response.ok) {\n throw new Error(`OpenAI API error: ${response.status}`);\n }\n\n const result: any = await response.json();\n const content = result.choices?.[0]?.message?.content;\n\n if (!content) {\n return {\n title: 'Failed to analyze image',\n description: 'No response from API',\n };\n }\n\n // Extract title and description\n const titleMatch = content.match(/title[:\\s]+(.+?)(?:\\n|$)/i);\n const title = titleMatch?.[1] || 'Image Analysis';\n\n // Rest of content is the description\n const description = content.replace(/title[:\\s]+(.+?)(?:\\n|$)/i, '').trim();\n\n return { title, description };\n } catch (error) {\n logger.error('Error analyzing image:', error);\n return {\n title: 'Failed to analyze image',\n description: `Error: ${error instanceof Error ? error.message : String(error)}`,\n };\n }\n },\n [ModelType.TRANSCRIPTION]: async (runtime, audioBuffer: Buffer) => {\n logger.log('audioBuffer', audioBuffer);\n const baseURL = runtime.getSetting('OPENAI_BASE_URL') ?? 'https://api.openai.com/v1';\n const formData = new FormData();\n formData.append('file', new Blob([audioBuffer], { type: 'audio/mp3' }));\n formData.append('model', 'whisper-1');\n const response = await fetch(`${baseURL}/audio/transcriptions`, {\n method: 'POST',\n headers: {\n Authorization: `Bearer ${runtime.getSetting('OPENAI_API_KEY')}`,\n // Note: Do not set a Content-Type header—letting fetch set it for FormData is best\n },\n body: formData,\n });\n\n logger.log('response', response);\n if (!response.ok) {\n throw new Error(`Failed to transcribe audio: ${response.statusText}`);\n }\n const data = (await response.json()) as { text: string };\n return data.text;\n },\n [ModelType.OBJECT_SMALL]: async (runtime, params: ObjectGenerationParams) => {\n const baseURL = runtime.getSetting('OPENAI_BASE_URL') ?? 'https://api.openai.com/v1';\n const openai = createOpenAI({\n apiKey: runtime.getSetting('OPENAI_API_KEY'),\n baseURL,\n });\n const model =\n runtime.getSetting('OPENAI_SMALL_MODEL') ??\n runtime.getSetting('SMALL_MODEL') ??\n 'gpt-4o-mini';\n\n try {\n if (params.schema) {\n // Skip zod validation and just use the generateObject without schema\n logger.info('Using OBJECT_SMALL without schema validation');\n const { object } = await generateObject({\n model: openai.languageModel(model),\n output: 'no-schema',\n prompt: params.prompt,\n temperature: params.temperature,\n });\n return object;\n }\n\n const { object } = await generateObject({\n model: openai.languageModel(model),\n output: 'no-schema',\n prompt: params.prompt,\n temperature: params.temperature,\n });\n return object;\n } catch (error) {\n logger.error('Error generating object:', error);\n throw error;\n }\n },\n [ModelType.OBJECT_LARGE]: async (runtime, params: ObjectGenerationParams) => {\n const baseURL = runtime.getSetting('OPENAI_BASE_URL') ?? 'https://api.openai.com/v1';\n const openai = createOpenAI({\n apiKey: runtime.getSetting('OPENAI_API_KEY'),\n baseURL,\n });\n const model =\n runtime.getSetting('OPENAI_LARGE_MODEL') ?? runtime.getSetting('LARGE_MODEL') ?? 'gpt-4o';\n\n try {\n if (params.schema) {\n // Skip zod validation and just use the generateObject without schema\n logger.info('Using OBJECT_LARGE without schema validation');\n const { object } = await generateObject({\n model: openai.languageModel(model),\n output: 'no-schema',\n prompt: params.prompt,\n temperature: params.temperature,\n });\n return object;\n }\n\n const { object } = await generateObject({\n model: openai.languageModel(model),\n output: 'no-schema',\n prompt: params.prompt,\n temperature: params.temperature,\n });\n return object;\n } catch (error) {\n logger.error('Error generating object:', error);\n throw error;\n }\n },\n },\n tests: [\n {\n name: 'openai_plugin_tests',\n tests: [\n {\n name: 'openai_test_url_and_api_key_validation',\n fn: async (runtime) => {\n const baseURL = runtime.getSetting('OPENAI_BASE_URL') ?? 'https://api.openai.com/v1';\n const response = await fetch(`${baseURL}/models`, {\n headers: {\n Authorization: `Bearer ${runtime.getSetting('OPENAI_API_KEY')}`,\n },\n });\n const data = await response.json();\n logger.log('Models Available:', (data as any)?.data.length);\n if (!response.ok) {\n throw new Error(`Failed to validate OpenAI API key: ${response.statusText}`);\n }\n },\n },\n {\n name: 'openai_test_text_embedding',\n fn: async (runtime) => {\n try {\n const embedding = await runtime.useModel(ModelType.TEXT_EMBEDDING, {\n text: 'Hello, world!',\n });\n logger.log('embedding', embedding);\n } catch (error) {\n logger.error('Error in test_text_embedding:', error);\n throw error;\n }\n },\n },\n {\n name: 'openai_test_text_large',\n fn: async (runtime) => {\n try {\n const text = await runtime.useModel(ModelType.TEXT_LARGE, {\n prompt: 'What is the nature of reality in 10 words?',\n });\n if (text.length === 0) {\n throw new Error('Failed to generate text');\n }\n logger.log('generated with test_text_large:', text);\n } catch (error) {\n logger.error('Error in test_text_large:', error);\n throw error;\n }\n },\n },\n {\n name: 'openai_test_text_small',\n fn: async (runtime) => {\n try {\n const text = await runtime.useModel(ModelType.TEXT_SMALL, {\n prompt: 'What is the nature of reality in 10 words?',\n });\n if (text.length === 0) {\n throw new Error('Failed to generate text');\n }\n logger.log('generated with test_text_small:', text);\n } catch (error) {\n logger.error('Error in test_text_small:', error);\n throw error;\n }\n },\n },\n {\n name: 'openai_test_image_generation',\n fn: async (runtime) => {\n logger.log('openai_test_image_generation');\n try {\n const image = await runtime.useModel(ModelType.IMAGE, {\n prompt: 'A beautiful sunset over a calm ocean',\n n: 1,\n size: '1024x1024',\n });\n logger.log('generated with test_image_generation:', image);\n } catch (error) {\n logger.error('Error in test_image_generation:', error);\n throw error;\n }\n },\n },\n {\n name: 'image-description',\n fn: async (runtime) => {\n try {\n logger.log('openai_test_image_description');\n try {\n const result = await runtime.useModel(\n ModelType.IMAGE_DESCRIPTION,\n 'https://upload.wikimedia.org/wikipedia/commons/thumb/1/1c/Vitalik_Buterin_TechCrunch_London_2015_%28cropped%29.jpg/537px-Vitalik_Buterin_TechCrunch_London_2015_%28cropped%29.jpg'\n );\n\n // Check if result has the expected structure\n if (\n result &&\n typeof result === 'object' &&\n 'title' in result &&\n 'description' in result\n ) {\n logger.log('Image description:', result);\n } else {\n logger.error('Invalid image description result format:', result);\n }\n } catch (e) {\n logger.error('Error in image description test:', e);\n }\n } catch (e) {\n logger.error('Error in openai_test_image_description:', e);\n }\n },\n },\n {\n name: 'openai_test_transcription',\n fn: async (runtime) => {\n logger.log('openai_test_transcription');\n try {\n const response = await fetch(\n 'https://upload.wikimedia.org/wikipedia/en/4/40/Chris_Benoit_Voice_Message.ogg'\n );\n const arrayBuffer = await response.arrayBuffer();\n const transcription = await runtime.useModel(\n ModelType.TRANSCRIPTION,\n Buffer.from(new Uint8Array(arrayBuffer))\n );\n logger.log('generated with test_transcription:', transcription);\n } catch (error) {\n logger.error('Error in test_transcription:', error);\n throw error;\n }\n },\n },\n {\n name: 'openai_test_text_tokenizer_encode',\n fn: async (runtime) => {\n const prompt = 'Hello tokenizer encode!';\n const tokens = await runtime.useModel(ModelType.TEXT_TOKENIZER_ENCODE, { prompt });\n if (!Array.isArray(tokens) || tokens.length === 0) {\n throw new Error('Failed to tokenize text: expected non-empty array of tokens');\n }\n logger.log('Tokenized output:', tokens);\n },\n },\n {\n name: 'openai_test_text_tokenizer_decode',\n fn: async (runtime) => {\n const prompt = 'Hello tokenizer decode!';\n // Encode the string into tokens first\n const tokens = await runtime.useModel(ModelType.TEXT_TOKENIZER_ENCODE, { prompt });\n // Now decode tokens back into text\n const decodedText = await runtime.useModel(ModelType.TEXT_TOKENIZER_DECODE, { tokens });\n if (decodedText !== prompt) {\n throw new Error(\n `Decoded text does not match original. Expected \"${prompt}\", got \"${decodedText}\"`\n );\n }\n logger.log('Decoded text:', decodedText);\n },\n },\n ],\n },\n ],\n};\nexport default openaiPlugin;\n"],"mappings":";AAAA,SAAS,oBAAoB;AAQ7B;AAAA,EAGE;AAAA,EAEA;AAAA,OACK;AACP,SAAS,gBAAgB,oBAAoB;AAC7C,SAA6B,wBAAwB;AAUrD,eAAe,aAAa,OAAsB,QAAgB;AAChE,QAAM,YACJ,UAAU,UAAU,aACf,QAAQ,IAAI,sBAAsB,QAAQ,IAAI,eAAe,gBAC7D,QAAQ,IAAI,eAAe;AAClC,QAAM,WAAW,iBAAiB,SAA0B;AAC5D,QAAM,SAAS,SAAS,OAAO,MAAM;AACrC,SAAO;AACT;AASA,eAAe,eAAe,OAAsB,QAAkB;AACpE,QAAM,YACJ,UAAU,UAAU,aACf,QAAQ,IAAI,sBAAsB,QAAQ,IAAI,eAAe,gBAC7D,QAAQ,IAAI,sBAAsB,QAAQ,IAAI,eAAe;AACpE,QAAM,WAAW,iBAAiB,SAA0B;AAC5D,SAAO,SAAS,OAAO,MAAM;AAC/B;AAMO,IAAM,eAAuB;AAAA,EAClC,MAAM;AAAA,EACN,aAAa;AAAA,EACb,QAAQ;AAAA,IACN,gBAAgB,QAAQ,IAAI;AAAA,IAC5B,iBAAiB,QAAQ,IAAI;AAAA,IAC7B,oBAAoB,QAAQ,IAAI;AAAA,IAChC,oBAAoB,QAAQ,IAAI;AAAA,IAChC,aAAa,QAAQ,IAAI;AAAA,IACzB,aAAa,QAAQ,IAAI;AAAA,EAC3B;AAAA,EACA,MAAM,KAAK,QAAgC;AACzC,QAAI;AASF,UAAI,CAAC,QAAQ,IAAI,gBAAgB;AAC/B,eAAO;AAAA,UACL;AAAA,QACF;AAEA;AAAA,MACF;AAGA,UAAI;AACF,cAAM,UAAU,QAAQ,IAAI,mBAAmB;AAC/C,cAAM,WAAW,MAAM,MAAM,GAAG,OAAO,WAAW;AAAA,UAChD,SAAS,EAAE,eAAe,UAAU,QAAQ,IAAI,cAAc,GAAG;AAAA,QACnE,CAAC;AAED,YAAI,CAAC,SAAS,IAAI;AAChB,iBAAO,KAAK,qCAAqC,SAAS,UAAU,EAAE;AACtE,iBAAO,KAAK,wEAAwE;AAAA,QAEtF,OAAO;AAAA,QAEP;AAAA,MACF,SAAS,YAAY;AACnB,eAAO,KAAK,oCAAoC,UAAU,EAAE;AAC5D,eAAO,KAAK,wEAAwE;AAAA,MAEtF;AAAA,IACF,SAAS,OAAO;AAEd,aAAO;AAAA,QACL,sCAAsC,MAAM,OACzC,IAAI,CAAC,MAAM,EAAE,OAAO,EACpB,KAAK,IAAI,CAAC;AAAA,MACf;AAAA,IACF;AAAA,EACF;AAAA,EACA,QAAQ;AAAA,IACN,CAAC,UAAU,cAAc,GAAG,OAC1B,UACA,WACsB;AAEtB,UAAI,WAAW,MAAM;AACnB,eAAO,MAAM,4CAA4C;AAEzD,cAAM,aAAa,MAAM,IAAI,EAAE,KAAK,CAAC;AACrC,mBAAW,CAAC,IAAI;AAChB,eAAO;AAAA,MACT;AAGA,UAAI;AACJ,UAAI,OAAO,WAAW,UAAU;AAC9B,eAAO;AAAA,MACT,WAAW,OAAO,WAAW,YAAY,OAAO,MAAM;AACpD,eAAO,OAAO;AAAA,MAChB,OAAO;AACL,eAAO,KAAK,oCAAoC;AAEhD,cAAM,iBAAiB,MAAM,IAAI,EAAE,KAAK,CAAC;AACzC,uBAAe,CAAC,IAAI;AACpB,eAAO;AAAA,MACT;AAGA,UAAI,CAAC,KAAK,KAAK,GAAG;AAChB,eAAO,KAAK,0BAA0B;AACtC,cAAM,cAAc,MAAM,IAAI,EAAE,KAAK,CAAC;AACtC,oBAAY,CAAC,IAAI;AACjB,eAAO;AAAA,MACT;AAEA,UAAI;AACF,cAAM,UAAU,QAAQ,IAAI,mBAAmB;AAG/C,cAAM,WAAW,MAAM,MAAM,GAAG,OAAO,eAAe;AAAA,UACpD,QAAQ;AAAA,UACR,SAAS;AAAA,YACP,eAAe,UAAU,QAAQ,IAAI,cAAc;AAAA,YACnD,gBAAgB;AAAA,UAClB;AAAA,UACA,MAAM,KAAK,UAAU;AAAA,YACnB,OAAO;AAAA,YACP,OAAO;AAAA,UACT,CAAC;AAAA,QACH,CAAC;AAED,YAAI,CAAC,SAAS,IAAI;AAChB,iBAAO,MAAM,qBAAqB,SAAS,MAAM,MAAM,SAAS,UAAU,EAAE;AAC5E,gBAAM,cAAc,MAAM,IAAI,EAAE,KAAK,CAAC;AACtC,sBAAY,CAAC,IAAI;AACjB,iBAAO;AAAA,QACT;AAEA,cAAM,OAAQ,MAAM,SAAS,KAAK;AAIlC,YAAI,CAAC,MAAM,OAAO,CAAC,GAAG,WAAW;AAC/B,iBAAO,MAAM,gCAAgC;AAC7C,gBAAM,cAAc,MAAM,IAAI,EAAE,KAAK,CAAC;AACtC,sBAAY,CAAC,IAAI;AACjB,iBAAO;AAAA,QACT;AAEA,cAAM,YAAY,KAAK,KAAK,CAAC,EAAE;AAC/B,eAAO,IAAI,mCAAmC,UAAU,MAAM,EAAE;AAChE,eAAO;AAAA,MACT,SAAS,OAAO;AACd,eAAO,MAAM,+BAA+B,KAAK;AACjD,cAAM,cAAc,MAAM,IAAI,EAAE,KAAK,CAAC;AACtC,oBAAY,CAAC,IAAI;AACjB,eAAO;AAAA,MACT;AAAA,IACF;AAAA,IACA,CAAC,UAAU,qBAAqB,GAAG,OACjC,UACA,EAAE,QAAQ,YAAY,UAAU,WAAW,MACxC;AACH,aAAO,MAAM,aAAa,aAAa,UAAU,YAAY,MAAM;AAAA,IACrE;AAAA,IACA,CAAC,UAAU,qBAAqB,GAAG,OACjC,UACA,EAAE,QAAQ,YAAY,UAAU,WAAW,MACxC;AACH,aAAO,MAAM,eAAe,aAAa,UAAU,YAAY,MAAM;AAAA,IACvE;AAAA,IACA,CAAC,UAAU,UAAU,GAAG,OAAO,SAAS,EAAE,QAAQ,gBAAgB,CAAC,EAAE,MAA0B;AAC7F,YAAM,cAAc;AACpB,YAAM,oBAAoB;AAC1B,YAAM,mBAAmB;AACzB,YAAM,sBAAsB;AAE5B,YAAM,UAAU,QAAQ,WAAW,iBAAiB,KAAK;AAEzD,YAAM,SAAS,aAAa;AAAA,QAC1B,QAAQ,QAAQ,WAAW,gBAAgB;AAAA,QAC3C;AAAA,MACF,CAAC;AAED,YAAM,QACJ,QAAQ,WAAW,oBAAoB,KACvC,QAAQ,WAAW,aAAa,KAChC;AAEF,aAAO,IAAI,iBAAiB;AAC5B,aAAO,IAAI,MAAM;AAEjB,YAAM,EAAE,MAAM,eAAe,IAAI,MAAM,aAAa;AAAA,QAClD,OAAO,OAAO,cAAc,KAAK;AAAA,QACjC;AAAA,QACA,QAAQ,QAAQ,UAAU,UAAU;AAAA,QACpC;AAAA,QACA,WAAW;AAAA,QACX,kBAAkB;AAAA,QAClB,iBAAiB;AAAA,QACjB;AAAA,MACF,CAAC;AAED,aAAO;AAAA,IACT;AAAA,IACA,CAAC,UAAU,UAAU,GAAG,OACtB,SACA;AAAA,MACE;AAAA,MACA,gBAAgB,CAAC;AAAA,MACjB,YAAY;AAAA,MACZ,cAAc;AAAA,MACd,mBAAmB;AAAA,MACnB,kBAAkB;AAAA,IACpB,MACG;AACH,YAAM,UAAU,QAAQ,WAAW,iBAAiB,KAAK;AAEzD,YAAM,SAAS,aAAa;AAAA,QAC1B,QAAQ,QAAQ,WAAW,gBAAgB;AAAA,QAC3C;AAAA,MACF,CAAC;AAED,YAAM,QACJ,QAAQ,WAAW,oBAAoB,KAAK,QAAQ,WAAW,aAAa,KAAK;AAEnF,YAAM,EAAE,MAAM,eAAe,IAAI,MAAM,aAAa;AAAA,QAClD,OAAO,OAAO,cAAc,KAAK;AAAA,QACjC;AAAA,QACA,QAAQ,QAAQ,UAAU,UAAU;AAAA,QACpC;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,MACF,CAAC;AAED,aAAO;AAAA,IACT;AAAA,IACA,CAAC,UAAU,KAAK,GAAG,OACjB,SACA,WAKG;AACH,YAAM,UAAU,QAAQ,WAAW,iBAAiB,KAAK;AACzD,YAAM,WAAW,MAAM,MAAM,GAAG,OAAO,uBAAuB;AAAA,QAC5D,QAAQ;AAAA,QACR,SAAS;AAAA,UACP,eAAe,UAAU,QAAQ,WAAW,gBAAgB,CAAC;AAAA,UAC7D,gBAAgB;AAAA,QAClB;AAAA,QACA,MAAM,KAAK,UAAU;AAAA,UACnB,QAAQ,OAAO;AAAA,UACf,GAAG,OAAO,KAAK;AAAA,UACf,MAAM,OAAO,QAAQ;AAAA,QACvB,CAAC;AAAA,MACH,CAAC;AACD,UAAI,CAAC,SAAS,IAAI;AAChB,cAAM,IAAI,MAAM,6BAA6B,SAAS,UAAU,EAAE;AAAA,MACpE;AACA,YAAM,OAAO,MAAM,SAAS,KAAK;AACjC,YAAM,YAAY;AAClB,aAAO,UAAU;AAAA,IACnB;AAAA,IACA,CAAC,UAAU,iBAAiB,GAAG,OAAO,SAAS,WAA4C;AAEzF,UAAI;AACJ,UAAI;AAEJ,UAAI,OAAO,WAAW,UAAU;AAC9B,mBAAW;AACX,iBAAS;AAAA,MACX,OAAO;AAEL,mBAAW,OAAO;AAClB,iBAAS,OAAO;AAAA,MAClB;AAEA,UAAI;AACF,cAAM,UAAU,QAAQ,IAAI,mBAAmB;AAC/C,cAAM,SAAS,QAAQ,IAAI;AAE3B,YAAI,CAAC,QAAQ;AACX,iBAAO,MAAM,wBAAwB;AACrC,iBAAO;AAAA,YACL,OAAO;AAAA,YACP,aAAa;AAAA,UACf;AAAA,QACF;AAGA,cAAM,WAAW,MAAM,MAAM,GAAG,OAAO,qBAAqB;AAAA,UAC1D,QAAQ;AAAA,UACR,SAAS;AAAA,YACP,gBAAgB;AAAA,YAChB,eAAe,UAAU,MAAM;AAAA,UACjC;AAAA,UACA,MAAM,KAAK,UAAU;AAAA,YACnB,OAAO;AAAA,YACP,UAAU;AAAA,cACR;AAAA,gBACE,MAAM;AAAA,gBACN,SAAS;AAAA,kBACP;AAAA,oBACE,MAAM;AAAA,oBACN,MACE,UACA;AAAA,kBACJ;AAAA,kBACA;AAAA,oBACE,MAAM;AAAA,oBACN,WAAW,EAAE,KAAK,SAAS;AAAA,kBAC7B;AAAA,gBACF;AAAA,cACF;AAAA,YACF;AAAA,YACA,YAAY;AAAA,UACd,CAAC;AAAA,QACH,CAAC;AAED,YAAI,CAAC,SAAS,IAAI;AAChB,gBAAM,IAAI,MAAM,qBAAqB,SAAS,MAAM,EAAE;AAAA,QACxD;AAEA,cAAM,SAAc,MAAM,SAAS,KAAK;AACxC,cAAM,UAAU,OAAO,UAAU,CAAC,GAAG,SAAS;AAE9C,YAAI,CAAC,SAAS;AACZ,iBAAO;AAAA,YACL,OAAO;AAAA,YACP,aAAa;AAAA,UACf;AAAA,QACF;AAGA,cAAM,aAAa,QAAQ,MAAM,2BAA2B;AAC5D,cAAM,QAAQ,aAAa,CAAC,KAAK;AAGjC,cAAM,cAAc,QAAQ,QAAQ,6BAA6B,EAAE,EAAE,KAAK;AAE1E,eAAO,EAAE,OAAO,YAAY;AAAA,MAC9B,SAAS,OAAO;AACd,eAAO,MAAM,0BAA0B,KAAK;AAC5C,eAAO;AAAA,UACL,OAAO;AAAA,UACP,aAAa,UAAU,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK,CAAC;AAAA,QAC/E;AAAA,MACF;AAAA,IACF;AAAA,IACA,CAAC,UAAU,aAAa,GAAG,OAAO,SAAS,gBAAwB;AACjE,aAAO,IAAI,eAAe,WAAW;AACrC,YAAM,UAAU,QAAQ,WAAW,iBAAiB,KAAK;AACzD,YAAM,WAAW,IAAI,SAAS;AAC9B,eAAS,OAAO,QAAQ,IAAI,KAAK,CAAC,WAAW,GAAG,EAAE,MAAM,YAAY,CAAC,CAAC;AACtE,eAAS,OAAO,SAAS,WAAW;AACpC,YAAM,WAAW,MAAM,MAAM,GAAG,OAAO,yBAAyB;AAAA,QAC9D,QAAQ;AAAA,QACR,SAAS;AAAA,UACP,eAAe,UAAU,QAAQ,WAAW,gBAAgB,CAAC;AAAA;AAAA,QAE/D;AAAA,QACA,MAAM;AAAA,MACR,CAAC;AAED,aAAO,IAAI,YAAY,QAAQ;AAC/B,UAAI,CAAC,SAAS,IAAI;AAChB,cAAM,IAAI,MAAM,+BAA+B,SAAS,UAAU,EAAE;AAAA,MACtE;AACA,YAAM,OAAQ,MAAM,SAAS,KAAK;AAClC,aAAO,KAAK;AAAA,IACd;AAAA,IACA,CAAC,UAAU,YAAY,GAAG,OAAO,SAAS,WAAmC;AAC3E,YAAM,UAAU,QAAQ,WAAW,iBAAiB,KAAK;AACzD,YAAM,SAAS,aAAa;AAAA,QAC1B,QAAQ,QAAQ,WAAW,gBAAgB;AAAA,QAC3C;AAAA,MACF,CAAC;AACD,YAAM,QACJ,QAAQ,WAAW,oBAAoB,KACvC,QAAQ,WAAW,aAAa,KAChC;AAEF,UAAI;AACF,YAAI,OAAO,QAAQ;AAEjB,iBAAO,KAAK,8CAA8C;AAC1D,gBAAM,EAAE,QAAAA,QAAO,IAAI,MAAM,eAAe;AAAA,YACtC,OAAO,OAAO,cAAc,KAAK;AAAA,YACjC,QAAQ;AAAA,YACR,QAAQ,OAAO;AAAA,YACf,aAAa,OAAO;AAAA,UACtB,CAAC;AACD,iBAAOA;AAAA,QACT;AAEA,cAAM,EAAE,OAAO,IAAI,MAAM,eAAe;AAAA,UACtC,OAAO,OAAO,cAAc,KAAK;AAAA,UACjC,QAAQ;AAAA,UACR,QAAQ,OAAO;AAAA,UACf,aAAa,OAAO;AAAA,QACtB,CAAC;AACD,eAAO;AAAA,MACT,SAAS,OAAO;AACd,eAAO,MAAM,4BAA4B,KAAK;AAC9C,cAAM;AAAA,MACR;AAAA,IACF;AAAA,IACA,CAAC,UAAU,YAAY,GAAG,OAAO,SAAS,WAAmC;AAC3E,YAAM,UAAU,QAAQ,WAAW,iBAAiB,KAAK;AACzD,YAAM,SAAS,aAAa;AAAA,QAC1B,QAAQ,QAAQ,WAAW,gBAAgB;AAAA,QAC3C;AAAA,MACF,CAAC;AACD,YAAM,QACJ,QAAQ,WAAW,oBAAoB,KAAK,QAAQ,WAAW,aAAa,KAAK;AAEnF,UAAI;AACF,YAAI,OAAO,QAAQ;AAEjB,iBAAO,KAAK,8CAA8C;AAC1D,gBAAM,EAAE,QAAAA,QAAO,IAAI,MAAM,eAAe;AAAA,YACtC,OAAO,OAAO,cAAc,KAAK;AAAA,YACjC,QAAQ;AAAA,YACR,QAAQ,OAAO;AAAA,YACf,aAAa,OAAO;AAAA,UACtB,CAAC;AACD,iBAAOA;AAAA,QACT;AAEA,cAAM,EAAE,OAAO,IAAI,MAAM,eAAe;AAAA,UACtC,OAAO,OAAO,cAAc,KAAK;AAAA,UACjC,QAAQ;AAAA,UACR,QAAQ,OAAO;AAAA,UACf,aAAa,OAAO;AAAA,QACtB,CAAC;AACD,eAAO;AAAA,MACT,SAAS,OAAO;AACd,eAAO,MAAM,4BAA4B,KAAK;AAC9C,cAAM;AAAA,MACR;AAAA,IACF;AAAA,EACF;AAAA,EACA,OAAO;AAAA,IACL;AAAA,MACE,MAAM;AAAA,MACN,OAAO;AAAA,QACL;AAAA,UACE,MAAM;AAAA,UACN,IAAI,OAAO,YAAY;AACrB,kBAAM,UAAU,QAAQ,WAAW,iBAAiB,KAAK;AACzD,kBAAM,WAAW,MAAM,MAAM,GAAG,OAAO,WAAW;AAAA,cAChD,SAAS;AAAA,gBACP,eAAe,UAAU,QAAQ,WAAW,gBAAgB,CAAC;AAAA,cAC/D;AAAA,YACF,CAAC;AACD,kBAAM,OAAO,MAAM,SAAS,KAAK;AACjC,mBAAO,IAAI,qBAAsB,MAAc,KAAK,MAAM;AAC1D,gBAAI,CAAC,SAAS,IAAI;AAChB,oBAAM,IAAI,MAAM,sCAAsC,SAAS,UAAU,EAAE;AAAA,YAC7E;AAAA,UACF;AAAA,QACF;AAAA,QACA;AAAA,UACE,MAAM;AAAA,UACN,IAAI,OAAO,YAAY;AACrB,gBAAI;AACF,oBAAM,YAAY,MAAM,QAAQ,SAAS,UAAU,gBAAgB;AAAA,gBACjE,MAAM;AAAA,cACR,CAAC;AACD,qBAAO,IAAI,aAAa,SAAS;AAAA,YACnC,SAAS,OAAO;AACd,qBAAO,MAAM,iCAAiC,KAAK;AACnD,oBAAM;AAAA,YACR;AAAA,UACF;AAAA,QACF;AAAA,QACA;AAAA,UACE,MAAM;AAAA,UACN,IAAI,OAAO,YAAY;AACrB,gBAAI;AACF,oBAAM,OAAO,MAAM,QAAQ,SAAS,UAAU,YAAY;AAAA,gBACxD,QAAQ;AAAA,cACV,CAAC;AACD,kBAAI,KAAK,WAAW,GAAG;AACrB,sBAAM,IAAI,MAAM,yBAAyB;AAAA,cAC3C;AACA,qBAAO,IAAI,mCAAmC,IAAI;AAAA,YACpD,SAAS,OAAO;AACd,qBAAO,MAAM,6BAA6B,KAAK;AAC/C,oBAAM;AAAA,YACR;AAAA,UACF;AAAA,QACF;AAAA,QACA;AAAA,UACE,MAAM;AAAA,UACN,IAAI,OAAO,YAAY;AACrB,gBAAI;AACF,oBAAM,OAAO,MAAM,QAAQ,SAAS,UAAU,YAAY;AAAA,gBACxD,QAAQ;AAAA,cACV,CAAC;AACD,kBAAI,KAAK,WAAW,GAAG;AACrB,sBAAM,IAAI,MAAM,yBAAyB;AAAA,cAC3C;AACA,qBAAO,IAAI,mCAAmC,IAAI;AAAA,YACpD,SAAS,OAAO;AACd,qBAAO,MAAM,6BAA6B,KAAK;AAC/C,oBAAM;AAAA,YACR;AAAA,UACF;AAAA,QACF;AAAA,QACA;AAAA,UACE,MAAM;AAAA,UACN,IAAI,OAAO,YAAY;AACrB,mBAAO,IAAI,8BAA8B;AACzC,gBAAI;AACF,oBAAM,QAAQ,MAAM,QAAQ,SAAS,UAAU,OAAO;AAAA,gBACpD,QAAQ;AAAA,gBACR,GAAG;AAAA,gBACH,MAAM;AAAA,cACR,CAAC;AACD,qBAAO,IAAI,yCAAyC,KAAK;AAAA,YAC3D,SAAS,OAAO;AACd,qBAAO,MAAM,mCAAmC,KAAK;AACrD,oBAAM;AAAA,YACR;AAAA,UACF;AAAA,QACF;AAAA,QACA;AAAA,UACE,MAAM;AAAA,UACN,IAAI,OAAO,YAAY;AACrB,gBAAI;AACF,qBAAO,IAAI,+BAA+B;AAC1C,kBAAI;AACF,sBAAM,SAAS,MAAM,QAAQ;AAAA,kBAC3B,UAAU;AAAA,kBACV;AAAA,gBACF;AAGA,oBACE,UACA,OAAO,WAAW,YAClB,WAAW,UACX,iBAAiB,QACjB;AACA,yBAAO,IAAI,sBAAsB,MAAM;AAAA,gBACzC,OAAO;AACL,yBAAO,MAAM,4CAA4C,MAAM;AAAA,gBACjE;AAAA,cACF,SAAS,GAAG;AACV,uBAAO,MAAM,oCAAoC,CAAC;AAAA,cACpD;AAAA,YACF,SAAS,GAAG;AACV,qBAAO,MAAM,2CAA2C,CAAC;AAAA,YAC3D;AAAA,UACF;AAAA,QACF;AAAA,QACA;AAAA,UACE,MAAM;AAAA,UACN,IAAI,OAAO,YAAY;AACrB,mBAAO,IAAI,2BAA2B;AACtC,gBAAI;AACF,oBAAM,WAAW,MAAM;AAAA,gBACrB;AAAA,cACF;AACA,oBAAM,cAAc,MAAM,SAAS,YAAY;AAC/C,oBAAM,gBAAgB,MAAM,QAAQ;AAAA,gBAClC,UAAU;AAAA,gBACV,OAAO,KAAK,IAAI,WAAW,WAAW,CAAC;AAAA,cACzC;AACA,qBAAO,IAAI,sCAAsC,aAAa;AAAA,YAChE,SAAS,OAAO;AACd,qBAAO,MAAM,gCAAgC,KAAK;AAClD,oBAAM;AAAA,YACR;AAAA,UACF;AAAA,QACF;AAAA,QACA;AAAA,UACE,MAAM;AAAA,UACN,IAAI,OAAO,YAAY;AACrB,kBAAM,SAAS;AACf,kBAAM,SAAS,MAAM,QAAQ,SAAS,UAAU,uBAAuB,EAAE,OAAO,CAAC;AACjF,gBAAI,CAAC,MAAM,QAAQ,MAAM,KAAK,OAAO,WAAW,GAAG;AACjD,oBAAM,IAAI,MAAM,6DAA6D;AAAA,YAC/E;AACA,mBAAO,IAAI,qBAAqB,MAAM;AAAA,UACxC;AAAA,QACF;AAAA,QACA;AAAA,UACE,MAAM;AAAA,UACN,IAAI,OAAO,YAAY;AACrB,kBAAM,SAAS;AAEf,kBAAM,SAAS,MAAM,QAAQ,SAAS,UAAU,uBAAuB,EAAE,OAAO,CAAC;AAEjF,kBAAM,cAAc,MAAM,QAAQ,SAAS,UAAU,uBAAuB,EAAE,OAAO,CAAC;AACtF,gBAAI,gBAAgB,QAAQ;AAC1B,oBAAM,IAAI;AAAA,gBACR,mDAAmD,MAAM,WAAW,WAAW;AAAA,cACjF;AAAA,YACF;AACA,mBAAO,IAAI,iBAAiB,WAAW;AAAA,UACzC;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAAA,EACF;AACF;AACA,IAAO,gBAAQ;","names":["object"]}
1
+ {"version":3,"sources":["../src/index.ts"],"sourcesContent":["import { createOpenAI } from '@ai-sdk/openai';\nimport type {\n AgentRuntime,\n ImageDescriptionParams,\n ModelTypeName,\n ObjectGenerationParams,\n Plugin,\n TextEmbeddingParams,\n} from '@elizaos/core';\nimport {\n type DetokenizeTextParams,\n type GenerateTextParams,\n ModelType,\n type TokenizeTextParams,\n logger,\n VECTOR_DIMS,\n} from '@elizaos/core';\nimport { generateObject, generateText, JSONParseError, JSONValue } from 'ai';\nimport { type TiktokenModel, encodingForModel } from 'js-tiktoken';\nimport FormData from 'form-data';\nimport fetch from 'node-fetch';\n\n/**\n * Helper function to get settings with fallback to process.env\n *\n * @param runtime The runtime context\n * @param key The setting key to retrieve\n * @param defaultValue Optional default value if not found\n * @returns The setting value with proper fallbacks\n */\nfunction getSetting(runtime: any, key: string, defaultValue?: string): string | undefined {\n return runtime.getSetting(key) ?? process.env[key] ?? defaultValue;\n}\n\n/**\n * Helper function to get the base URL for OpenAI API\n *\n * @param runtime The runtime context\n * @returns The configured base URL or default\n */\nfunction getBaseURL(runtime: any): string {\n return getSetting(runtime, 'OPENAI_BASE_URL', 'https://api.openai.com/v1');\n}\n\n/**\n * Helper function to get the API key for OpenAI\n *\n * @param runtime The runtime context\n * @returns The configured API key\n */\nfunction getApiKey(runtime: any): string | undefined {\n return getSetting(runtime, 'OPENAI_API_KEY');\n}\n\n/**\n * Helper function to get the small model name with fallbacks\n *\n * @param runtime The runtime context\n * @returns The configured small model name\n */\nfunction getSmallModel(runtime: any): string {\n return (\n getSetting(runtime, 'OPENAI_SMALL_MODEL') ?? getSetting(runtime, 'SMALL_MODEL', 'gpt-4o-mini')\n );\n}\n\n/**\n * Helper function to get the large model name with fallbacks\n *\n * @param runtime The runtime context\n * @returns The configured large model name\n */\nfunction getLargeModel(runtime: any): string {\n return getSetting(runtime, 'OPENAI_LARGE_MODEL') ?? getSetting(runtime, 'LARGE_MODEL', 'gpt-4o');\n}\n\n/**\n * Create an OpenAI client with proper configuration\n *\n * @param runtime The runtime context\n * @returns Configured OpenAI client\n */\nfunction createOpenAIClient(runtime: any) {\n return createOpenAI({\n apiKey: getApiKey(runtime),\n baseURL: getBaseURL(runtime),\n });\n}\n\n/**\n * Asynchronously tokenizes the given text based on the specified model and prompt.\n *\n * @param {ModelTypeName} model - The type of model to use for tokenization.\n * @param {string} prompt - The text prompt to tokenize.\n * @returns {number[]} - An array of tokens representing the encoded prompt.\n */\nasync function tokenizeText(model: ModelTypeName, prompt: string) {\n const modelName =\n model === ModelType.TEXT_SMALL\n ? (process.env.OPENAI_SMALL_MODEL ?? process.env.SMALL_MODEL ?? 'gpt-4o-mini')\n : (process.env.LARGE_MODEL ?? 'gpt-4o');\n const encoding = encodingForModel(modelName as TiktokenModel);\n const tokens = encoding.encode(prompt);\n return tokens;\n}\n\n/**\n * Detokenize a sequence of tokens back into text using the specified model.\n *\n * @param {ModelTypeName} model - The type of model to use for detokenization.\n * @param {number[]} tokens - The sequence of tokens to detokenize.\n * @returns {string} The detokenized text.\n */\nasync function detokenizeText(model: ModelTypeName, tokens: number[]) {\n const modelName =\n model === ModelType.TEXT_SMALL\n ? (process.env.OPENAI_SMALL_MODEL ?? process.env.SMALL_MODEL ?? 'gpt-4o-mini')\n : (process.env.OPENAI_LARGE_MODEL ?? process.env.LARGE_MODEL ?? 'gpt-4o');\n const encoding = encodingForModel(modelName as TiktokenModel);\n return encoding.decode(tokens);\n}\n\n/**\n * Helper function to generate objects using specified model type\n */\nasync function generateObjectByModelType(\n runtime: AgentRuntime,\n params: ObjectGenerationParams,\n modelType: string,\n getModelFn: (runtime: AgentRuntime) => string\n): Promise<JSONValue> {\n const openai = createOpenAIClient(runtime);\n const model = getModelFn(runtime);\n\n try {\n if (params.schema) {\n // Skip zod validation and just use the generateObject without schema\n logger.info(`Using ${modelType} without schema validation`);\n }\n\n const { object } = await generateObject({\n model: openai.languageModel(model),\n output: 'no-schema',\n prompt: params.prompt,\n temperature: params.temperature,\n experimental_repairText: getJsonRepairFunction(),\n });\n return object;\n } catch (error) {\n logger.error(`Error generating object with ${modelType}:`, error);\n throw error;\n }\n}\n\n/**\n * Returns a function to repair JSON text\n */\nfunction getJsonRepairFunction(): (params: {\n text: string;\n error: unknown;\n}) => Promise<string | null> {\n return async ({ text, error }: { text: string; error: unknown }) => {\n try {\n if (error instanceof JSONParseError) {\n const cleanedText = text.replace(/```json\\n|\\n```|```/g, '');\n\n JSON.parse(cleanedText);\n return cleanedText;\n }\n } catch (jsonError) {\n logger.warn('Failed to repair JSON text:', jsonError);\n return null;\n }\n };\n}\n\n/**\n * function for text-to-speech\n */\nasync function fetchTextToSpeech(runtime: AgentRuntime, text: string) {\n const apiKey = getApiKey(runtime);\n const model = getSetting(runtime, 'OPENAI_TTS_MODEL', 'gpt-4o-mini-tts');\n const voice = getSetting(runtime, 'OPENAI_TTS_VOICE', 'nova');\n const instructions = getSetting(runtime, 'OPENAI_TTS_INSTRUCTIONS', '');\n const baseURL = getBaseURL(runtime);\n\n try {\n const res = await fetch(`${baseURL}/audio/speech`, {\n method: 'POST',\n headers: {\n Authorization: `Bearer ${apiKey}`,\n 'Content-Type': 'application/json',\n },\n body: JSON.stringify({\n model,\n voice,\n input: text,\n ...(instructions && { instructions }),\n }),\n });\n\n if (!res.ok) {\n const err = await res.text();\n throw new Error(`OpenAI TTS error ${res.status}: ${err}`);\n }\n\n return res.body;\n } catch (err: any) {\n throw new Error(`Failed to fetch speech from OpenAI TTS: ${err.message || err}`);\n }\n}\n\n/**\n * Defines the OpenAI plugin with its name, description, and configuration options.\n * @type {Plugin}\n */\nexport const openaiPlugin: Plugin = {\n name: 'openai',\n description: 'OpenAI plugin',\n config: {\n OPENAI_API_KEY: process.env.OPENAI_API_KEY,\n OPENAI_BASE_URL: process.env.OPENAI_BASE_URL,\n OPENAI_SMALL_MODEL: process.env.OPENAI_SMALL_MODEL,\n OPENAI_LARGE_MODEL: process.env.OPENAI_LARGE_MODEL,\n SMALL_MODEL: process.env.SMALL_MODEL,\n LARGE_MODEL: process.env.LARGE_MODEL,\n OPENAI_EMBEDDING_MODEL: process.env.OPENAI_EMBEDDING_MODEL,\n OPENAI_EMBEDDING_DIMENSIONS: process.env.OPENAI_EMBEDDING_DIMENSIONS,\n },\n async init(_config, runtime) {\n try {\n // const validatedConfig = await configSchema.parseAsync(config);\n\n // // Set all environment variables at once\n // for (const [key, value] of Object.entries(validatedConfig)) {\n // \tif (value) process.env[key] = value;\n // }\n\n // If API key is not set, we'll show a warning but continue\n if (!getApiKey(runtime)) {\n logger.warn(\n 'OPENAI_API_KEY is not set in environment - OpenAI functionality will be limited'\n );\n // Return early without throwing an error\n return;\n }\n\n // Verify API key only if we have one\n try {\n const baseURL = getBaseURL(runtime);\n const response = await fetch(`${baseURL}/models`, {\n headers: { Authorization: `Bearer ${getApiKey(runtime)}` },\n });\n\n if (!response.ok) {\n logger.warn(`OpenAI API key validation failed: ${response.statusText}`);\n logger.warn('OpenAI functionality will be limited until a valid API key is provided');\n // Continue execution instead of throwing\n } else {\n // logger.log(\"OpenAI API key validated successfully\");\n }\n } catch (fetchError) {\n logger.warn(`Error validating OpenAI API key: ${fetchError}`);\n logger.warn('OpenAI functionality will be limited until a valid API key is provided');\n // Continue execution instead of throwing\n }\n } catch (error) {\n // Convert to warning instead of error\n logger.warn(\n `OpenAI plugin configuration issue: ${error.errors\n .map((e) => e.message)\n .join(', ')} - You need to configure the OPENAI_API_KEY in your environment variables`\n );\n }\n },\n models: {\n [ModelType.TEXT_EMBEDDING]: async (\n runtime,\n params: TextEmbeddingParams | string | null\n ): Promise<number[]> => {\n const embeddingDimension = parseInt(\n getSetting(runtime, 'OPENAI_EMBEDDING_DIMENSIONS', '1536')\n ) as (typeof VECTOR_DIMS)[keyof typeof VECTOR_DIMS];\n\n // Validate embedding dimension\n if (!Object.values(VECTOR_DIMS).includes(embeddingDimension)) {\n logger.error(\n `Invalid embedding dimension: ${embeddingDimension}. Must be one of: ${Object.values(VECTOR_DIMS).join(', ')}`\n );\n throw new Error(\n `Invalid embedding dimension: ${embeddingDimension}. Must be one of: ${Object.values(VECTOR_DIMS).join(', ')}`\n );\n }\n\n // Handle null input (initialization case)\n if (params === null) {\n logger.debug('Creating test embedding for initialization');\n // Return a consistent vector for null input\n const testVector = Array(embeddingDimension).fill(0);\n testVector[0] = 0.1; // Make it non-zero\n return testVector;\n }\n\n // Get the text from whatever format was provided\n let text: string;\n if (typeof params === 'string') {\n text = params; // Direct string input\n } else if (typeof params === 'object' && params.text) {\n text = params.text; // Object with text property\n } else {\n logger.warn('Invalid input format for embedding');\n // Return a fallback for invalid input\n const fallbackVector = Array(embeddingDimension).fill(0);\n fallbackVector[0] = 0.2; // Different value for tracking\n return fallbackVector;\n }\n\n // Skip API call for empty text\n if (!text.trim()) {\n logger.warn('Empty text for embedding');\n const emptyVector = Array(embeddingDimension).fill(0);\n emptyVector[0] = 0.3; // Different value for tracking\n return emptyVector;\n }\n\n try {\n const baseURL = getBaseURL(runtime);\n\n // Call the OpenAI API\n const response = await fetch(`${baseURL}/embeddings`, {\n method: 'POST',\n headers: {\n Authorization: `Bearer ${getApiKey(runtime)}`,\n 'Content-Type': 'application/json',\n },\n body: JSON.stringify({\n model: getSetting(runtime, 'OPENAI_EMBEDDING_MODEL', 'text-embedding-3-small'),\n input: text,\n }),\n });\n\n if (!response.ok) {\n logger.error(`OpenAI API error: ${response.status} - ${response.statusText}`);\n const errorVector = Array(embeddingDimension).fill(0);\n errorVector[0] = 0.4; // Different value for tracking\n return errorVector;\n }\n\n const data = (await response.json()) as {\n data: [{ embedding: number[] }];\n };\n\n if (!data?.data?.[0]?.embedding) {\n logger.error('API returned invalid structure');\n const errorVector = Array(embeddingDimension).fill(0);\n errorVector[0] = 0.5; // Different value for tracking\n return errorVector;\n }\n\n const embedding = data.data[0].embedding;\n logger.log(`Got valid embedding with length ${embedding.length}`);\n return embedding;\n } catch (error) {\n logger.error('Error generating embedding:', error);\n const errorVector = Array(embeddingDimension).fill(0);\n errorVector[0] = 0.6; // Different value for tracking\n return errorVector;\n }\n },\n [ModelType.TEXT_TOKENIZER_ENCODE]: async (\n _runtime,\n { prompt, modelType = ModelType.TEXT_LARGE }: TokenizeTextParams\n ) => {\n return await tokenizeText(modelType ?? ModelType.TEXT_LARGE, prompt);\n },\n [ModelType.TEXT_TOKENIZER_DECODE]: async (\n _runtime,\n { tokens, modelType = ModelType.TEXT_LARGE }: DetokenizeTextParams\n ) => {\n return await detokenizeText(modelType ?? ModelType.TEXT_LARGE, tokens);\n },\n [ModelType.TEXT_SMALL]: async (runtime, { prompt, stopSequences = [] }: GenerateTextParams) => {\n const temperature = 0.7;\n const frequency_penalty = 0.7;\n const presence_penalty = 0.7;\n const max_response_length = 8192;\n\n const openai = createOpenAIClient(runtime);\n const model = getSmallModel(runtime);\n\n logger.log('generating text');\n logger.log(prompt);\n\n const { text: openaiResponse } = await generateText({\n model: openai.languageModel(model),\n prompt: prompt,\n system: runtime.character.system ?? undefined,\n temperature: temperature,\n maxTokens: max_response_length,\n frequencyPenalty: frequency_penalty,\n presencePenalty: presence_penalty,\n stopSequences: stopSequences,\n });\n\n return openaiResponse;\n },\n [ModelType.TEXT_LARGE]: async (\n runtime,\n {\n prompt,\n stopSequences = [],\n maxTokens = 8192,\n temperature = 0.7,\n frequencyPenalty = 0.7,\n presencePenalty = 0.7,\n }: GenerateTextParams\n ) => {\n const openai = createOpenAIClient(runtime);\n const model = getLargeModel(runtime);\n\n const { text: openaiResponse } = await generateText({\n model: openai.languageModel(model),\n prompt: prompt,\n system: runtime.character.system ?? undefined,\n temperature: temperature,\n maxTokens: maxTokens,\n frequencyPenalty: frequencyPenalty,\n presencePenalty: presencePenalty,\n stopSequences: stopSequences,\n });\n\n return openaiResponse;\n },\n [ModelType.IMAGE]: async (\n runtime,\n params: {\n prompt: string;\n n?: number;\n size?: string;\n }\n ) => {\n const baseURL = getBaseURL(runtime);\n const response = await fetch(`${baseURL}/images/generations`, {\n method: 'POST',\n headers: {\n Authorization: `Bearer ${getApiKey(runtime)}`,\n 'Content-Type': 'application/json',\n },\n body: JSON.stringify({\n prompt: params.prompt,\n n: params.n || 1,\n size: params.size || '1024x1024',\n }),\n });\n if (!response.ok) {\n throw new Error(`Failed to generate image: ${response.statusText}`);\n }\n const data = await response.json();\n const typedData = data as { data: { url: string }[] };\n return typedData.data;\n },\n [ModelType.IMAGE_DESCRIPTION]: async (runtime, params: ImageDescriptionParams | string) => {\n // Handle string case (direct URL)\n let imageUrl: string;\n let prompt: string | undefined;\n\n if (typeof params === 'string') {\n imageUrl = params;\n prompt = undefined;\n } else {\n // Object parameter case\n imageUrl = params.imageUrl;\n prompt = params.prompt;\n }\n\n try {\n const baseURL = getBaseURL(runtime);\n const apiKey = getApiKey(runtime);\n\n if (!apiKey) {\n logger.error('OpenAI API key not set');\n return {\n title: 'Failed to analyze image',\n description: 'API key not configured',\n };\n }\n\n // Call the GPT-4 Vision API\n const response = await fetch(`${baseURL}/chat/completions`, {\n method: 'POST',\n headers: {\n 'Content-Type': 'application/json',\n Authorization: `Bearer ${apiKey}`,\n },\n body: JSON.stringify({\n model: 'gpt-4o-mini',\n messages: [\n {\n role: 'user',\n content: [\n {\n type: 'text',\n text:\n prompt ||\n 'Please analyze this image and provide a title and detailed description.',\n },\n {\n type: 'image_url',\n image_url: { url: imageUrl },\n },\n ],\n },\n ],\n max_tokens: 300,\n }),\n });\n\n if (!response.ok) {\n throw new Error(`OpenAI API error: ${response.status}`);\n }\n\n const result: any = await response.json();\n const content = result.choices?.[0]?.message?.content;\n\n if (!content) {\n return {\n title: 'Failed to analyze image',\n description: 'No response from API',\n };\n }\n\n // Extract title and description\n const titleMatch = content.match(/title[:\\s]+(.+?)(?:\\n|$)/i);\n const title = titleMatch?.[1] || 'Image Analysis';\n\n // Rest of content is the description\n const description = content.replace(/title[:\\s]+(.+?)(?:\\n|$)/i, '').trim();\n\n return { title, description };\n } catch (error) {\n logger.error('Error analyzing image:', error);\n return {\n title: 'Failed to analyze image',\n description: `Error: ${error instanceof Error ? error.message : String(error)}`,\n };\n }\n },\n [ModelType.TRANSCRIPTION]: async (runtime, audioBuffer: Buffer) => {\n logger.log('audioBuffer', audioBuffer);\n const baseURL = getBaseURL(runtime);\n\n const formData = new FormData();\n formData.append('file', audioBuffer, {\n filename: 'recording.mp3',\n contentType: 'audio/mp3',\n });\n formData.append('model', 'whisper-1');\n\n const response = await fetch(`${baseURL}/audio/transcriptions`, {\n method: 'POST',\n headers: {\n Authorization: `Bearer ${getApiKey(runtime)}`,\n },\n body: formData,\n });\n\n logger.log('response', response);\n if (!response.ok) {\n throw new Error(`Failed to transcribe audio: ${response.statusText}`);\n }\n\n const data = (await response.json()) as { text: string };\n return data.text;\n },\n [ModelType.TEXT_TO_SPEECH]: async (runtime: AgentRuntime, text: string) => {\n return await fetchTextToSpeech(runtime, text);\n },\n\n [ModelType.OBJECT_SMALL]: async (runtime, params: ObjectGenerationParams) => {\n return generateObjectByModelType(runtime, params, ModelType.OBJECT_SMALL, getSmallModel);\n },\n [ModelType.OBJECT_LARGE]: async (runtime, params: ObjectGenerationParams) => {\n return generateObjectByModelType(runtime, params, ModelType.OBJECT_LARGE, getLargeModel);\n },\n },\n tests: [\n {\n name: 'openai_plugin_tests',\n tests: [\n {\n name: 'openai_test_url_and_api_key_validation',\n fn: async (runtime) => {\n const baseURL = getBaseURL(runtime);\n const response = await fetch(`${baseURL}/models`, {\n headers: {\n Authorization: `Bearer ${getApiKey(runtime)}`,\n },\n });\n const data = await response.json();\n logger.log('Models Available:', (data as any)?.data.length);\n if (!response.ok) {\n throw new Error(`Failed to validate OpenAI API key: ${response.statusText}`);\n }\n },\n },\n {\n name: 'openai_test_text_embedding',\n fn: async (runtime) => {\n try {\n const embedding = await runtime.useModel(ModelType.TEXT_EMBEDDING, {\n text: 'Hello, world!',\n });\n logger.log('embedding', embedding);\n } catch (error) {\n logger.error('Error in test_text_embedding:', error);\n throw error;\n }\n },\n },\n {\n name: 'openai_test_text_large',\n fn: async (runtime) => {\n try {\n const text = await runtime.useModel(ModelType.TEXT_LARGE, {\n prompt: 'What is the nature of reality in 10 words?',\n });\n if (text.length === 0) {\n throw new Error('Failed to generate text');\n }\n logger.log('generated with test_text_large:', text);\n } catch (error) {\n logger.error('Error in test_text_large:', error);\n throw error;\n }\n },\n },\n {\n name: 'openai_test_text_small',\n fn: async (runtime) => {\n try {\n const text = await runtime.useModel(ModelType.TEXT_SMALL, {\n prompt: 'What is the nature of reality in 10 words?',\n });\n if (text.length === 0) {\n throw new Error('Failed to generate text');\n }\n logger.log('generated with test_text_small:', text);\n } catch (error) {\n logger.error('Error in test_text_small:', error);\n throw error;\n }\n },\n },\n {\n name: 'openai_test_image_generation',\n fn: async (runtime) => {\n logger.log('openai_test_image_generation');\n try {\n const image = await runtime.useModel(ModelType.IMAGE, {\n prompt: 'A beautiful sunset over a calm ocean',\n n: 1,\n size: '1024x1024',\n });\n logger.log('generated with test_image_generation:', image);\n } catch (error) {\n logger.error('Error in test_image_generation:', error);\n throw error;\n }\n },\n },\n {\n name: 'image-description',\n fn: async (runtime) => {\n try {\n logger.log('openai_test_image_description');\n try {\n const result = await runtime.useModel(\n ModelType.IMAGE_DESCRIPTION,\n 'https://upload.wikimedia.org/wikipedia/commons/thumb/1/1c/Vitalik_Buterin_TechCrunch_London_2015_%28cropped%29.jpg/537px-Vitalik_Buterin_TechCrunch_London_2015_%28cropped%29.jpg'\n );\n\n // Check if result has the expected structure\n if (\n result &&\n typeof result === 'object' &&\n 'title' in result &&\n 'description' in result\n ) {\n logger.log('Image description:', result);\n } else {\n logger.error('Invalid image description result format:', result);\n }\n } catch (e) {\n logger.error('Error in image description test:', e);\n }\n } catch (e) {\n logger.error('Error in openai_test_image_description:', e);\n }\n },\n },\n {\n name: 'openai_test_transcription',\n fn: async (runtime) => {\n logger.log('openai_test_transcription');\n try {\n const response = await fetch(\n 'https://upload.wikimedia.org/wikipedia/en/4/40/Chris_Benoit_Voice_Message.ogg'\n );\n const arrayBuffer = await response.arrayBuffer();\n const transcription = await runtime.useModel(\n ModelType.TRANSCRIPTION,\n Buffer.from(new Uint8Array(arrayBuffer))\n );\n logger.log('generated with test_transcription:', transcription);\n } catch (error) {\n logger.error('Error in test_transcription:', error);\n throw error;\n }\n },\n },\n {\n name: 'openai_test_text_tokenizer_encode',\n fn: async (runtime) => {\n const prompt = 'Hello tokenizer encode!';\n const tokens = await runtime.useModel(ModelType.TEXT_TOKENIZER_ENCODE, { prompt });\n if (!Array.isArray(tokens) || tokens.length === 0) {\n throw new Error('Failed to tokenize text: expected non-empty array of tokens');\n }\n logger.log('Tokenized output:', tokens);\n },\n },\n {\n name: 'openai_test_text_tokenizer_decode',\n fn: async (runtime) => {\n const prompt = 'Hello tokenizer decode!';\n // Encode the string into tokens first\n const tokens = await runtime.useModel(ModelType.TEXT_TOKENIZER_ENCODE, { prompt });\n // Now decode tokens back into text\n const decodedText = await runtime.useModel(ModelType.TEXT_TOKENIZER_DECODE, { tokens });\n if (decodedText !== prompt) {\n throw new Error(\n `Decoded text does not match original. Expected \"${prompt}\", got \"${decodedText}\"`\n );\n }\n logger.log('Decoded text:', decodedText);\n },\n },\n {\n name: 'openai_test_text_to_speech',\n fn: async (runtime: AgentRuntime) => {\n try {\n const text = 'Hello, this is a test for text-to-speech.';\n const response = await fetchTextToSpeech(runtime, text);\n if (!response) {\n throw new Error('Failed to generate speech');\n }\n logger.log('Generated speech successfully');\n } catch (error) {\n logger.error('Error in openai_test_text_to_speech:', error);\n throw error;\n }\n },\n },\n ],\n },\n ],\n};\nexport default openaiPlugin;\n"],"mappings":";AAAA,SAAS,oBAAoB;AAS7B;AAAA,EAGE;AAAA,EAEA;AAAA,EACA;AAAA,OACK;AACP,SAAS,gBAAgB,cAAc,sBAAiC;AACxE,SAA6B,wBAAwB;AACrD,OAAO,cAAc;AACrB,OAAO,WAAW;AAUlB,SAAS,WAAW,SAAc,KAAa,cAA2C;AACxF,SAAO,QAAQ,WAAW,GAAG,KAAK,QAAQ,IAAI,GAAG,KAAK;AACxD;AAQA,SAAS,WAAW,SAAsB;AACxC,SAAO,WAAW,SAAS,mBAAmB,2BAA2B;AAC3E;AAQA,SAAS,UAAU,SAAkC;AACnD,SAAO,WAAW,SAAS,gBAAgB;AAC7C;AAQA,SAAS,cAAc,SAAsB;AAC3C,SACE,WAAW,SAAS,oBAAoB,KAAK,WAAW,SAAS,eAAe,aAAa;AAEjG;AAQA,SAAS,cAAc,SAAsB;AAC3C,SAAO,WAAW,SAAS,oBAAoB,KAAK,WAAW,SAAS,eAAe,QAAQ;AACjG;AAQA,SAAS,mBAAmB,SAAc;AACxC,SAAO,aAAa;AAAA,IAClB,QAAQ,UAAU,OAAO;AAAA,IACzB,SAAS,WAAW,OAAO;AAAA,EAC7B,CAAC;AACH;AASA,eAAe,aAAa,OAAsB,QAAgB;AAChE,QAAM,YACJ,UAAU,UAAU,aACf,QAAQ,IAAI,sBAAsB,QAAQ,IAAI,eAAe,gBAC7D,QAAQ,IAAI,eAAe;AAClC,QAAM,WAAW,iBAAiB,SAA0B;AAC5D,QAAM,SAAS,SAAS,OAAO,MAAM;AACrC,SAAO;AACT;AASA,eAAe,eAAe,OAAsB,QAAkB;AACpE,QAAM,YACJ,UAAU,UAAU,aACf,QAAQ,IAAI,sBAAsB,QAAQ,IAAI,eAAe,gBAC7D,QAAQ,IAAI,sBAAsB,QAAQ,IAAI,eAAe;AACpE,QAAM,WAAW,iBAAiB,SAA0B;AAC5D,SAAO,SAAS,OAAO,MAAM;AAC/B;AAKA,eAAe,0BACb,SACA,QACA,WACA,YACoB;AACpB,QAAM,SAAS,mBAAmB,OAAO;AACzC,QAAM,QAAQ,WAAW,OAAO;AAEhC,MAAI;AACF,QAAI,OAAO,QAAQ;AAEjB,aAAO,KAAK,SAAS,SAAS,4BAA4B;AAAA,IAC5D;AAEA,UAAM,EAAE,OAAO,IAAI,MAAM,eAAe;AAAA,MACtC,OAAO,OAAO,cAAc,KAAK;AAAA,MACjC,QAAQ;AAAA,MACR,QAAQ,OAAO;AAAA,MACf,aAAa,OAAO;AAAA,MACpB,yBAAyB,sBAAsB;AAAA,IACjD,CAAC;AACD,WAAO;AAAA,EACT,SAAS,OAAO;AACd,WAAO,MAAM,gCAAgC,SAAS,KAAK,KAAK;AAChE,UAAM;AAAA,EACR;AACF;AAKA,SAAS,wBAGoB;AAC3B,SAAO,OAAO,EAAE,MAAM,MAAM,MAAwC;AAClE,QAAI;AACF,UAAI,iBAAiB,gBAAgB;AACnC,cAAM,cAAc,KAAK,QAAQ,wBAAwB,EAAE;AAE3D,aAAK,MAAM,WAAW;AACtB,eAAO;AAAA,MACT;AAAA,IACF,SAAS,WAAW;AAClB,aAAO,KAAK,+BAA+B,SAAS;AACpD,aAAO;AAAA,IACT;AAAA,EACF;AACF;AAKA,eAAe,kBAAkB,SAAuB,MAAc;AACpE,QAAM,SAAS,UAAU,OAAO;AAChC,QAAM,QAAQ,WAAW,SAAS,oBAAoB,iBAAiB;AACvE,QAAM,QAAQ,WAAW,SAAS,oBAAoB,MAAM;AAC5D,QAAM,eAAe,WAAW,SAAS,2BAA2B,EAAE;AACtE,QAAM,UAAU,WAAW,OAAO;AAElC,MAAI;AACF,UAAM,MAAM,MAAM,MAAM,GAAG,OAAO,iBAAiB;AAAA,MACjD,QAAQ;AAAA,MACR,SAAS;AAAA,QACP,eAAe,UAAU,MAAM;AAAA,QAC/B,gBAAgB;AAAA,MAClB;AAAA,MACA,MAAM,KAAK,UAAU;AAAA,QACnB;AAAA,QACA;AAAA,QACA,OAAO;AAAA,QACP,GAAI,gBAAgB,EAAE,aAAa;AAAA,MACrC,CAAC;AAAA,IACH,CAAC;AAED,QAAI,CAAC,IAAI,IAAI;AACX,YAAM,MAAM,MAAM,IAAI,KAAK;AAC3B,YAAM,IAAI,MAAM,oBAAoB,IAAI,MAAM,KAAK,GAAG,EAAE;AAAA,IAC1D;AAEA,WAAO,IAAI;AAAA,EACb,SAAS,KAAU;AACjB,UAAM,IAAI,MAAM,2CAA2C,IAAI,WAAW,GAAG,EAAE;AAAA,EACjF;AACF;AAMO,IAAM,eAAuB;AAAA,EAClC,MAAM;AAAA,EACN,aAAa;AAAA,EACb,QAAQ;AAAA,IACN,gBAAgB,QAAQ,IAAI;AAAA,IAC5B,iBAAiB,QAAQ,IAAI;AAAA,IAC7B,oBAAoB,QAAQ,IAAI;AAAA,IAChC,oBAAoB,QAAQ,IAAI;AAAA,IAChC,aAAa,QAAQ,IAAI;AAAA,IACzB,aAAa,QAAQ,IAAI;AAAA,IACzB,wBAAwB,QAAQ,IAAI;AAAA,IACpC,6BAA6B,QAAQ,IAAI;AAAA,EAC3C;AAAA,EACA,MAAM,KAAK,SAAS,SAAS;AAC3B,QAAI;AASF,UAAI,CAAC,UAAU,OAAO,GAAG;AACvB,eAAO;AAAA,UACL;AAAA,QACF;AAEA;AAAA,MACF;AAGA,UAAI;AACF,cAAM,UAAU,WAAW,OAAO;AAClC,cAAM,WAAW,MAAM,MAAM,GAAG,OAAO,WAAW;AAAA,UAChD,SAAS,EAAE,eAAe,UAAU,UAAU,OAAO,CAAC,GAAG;AAAA,QAC3D,CAAC;AAED,YAAI,CAAC,SAAS,IAAI;AAChB,iBAAO,KAAK,qCAAqC,SAAS,UAAU,EAAE;AACtE,iBAAO,KAAK,wEAAwE;AAAA,QAEtF,OAAO;AAAA,QAEP;AAAA,MACF,SAAS,YAAY;AACnB,eAAO,KAAK,oCAAoC,UAAU,EAAE;AAC5D,eAAO,KAAK,wEAAwE;AAAA,MAEtF;AAAA,IACF,SAAS,OAAO;AAEd,aAAO;AAAA,QACL,sCAAsC,MAAM,OACzC,IAAI,CAAC,MAAM,EAAE,OAAO,EACpB,KAAK,IAAI,CAAC;AAAA,MACf;AAAA,IACF;AAAA,EACF;AAAA,EACA,QAAQ;AAAA,IACN,CAAC,UAAU,cAAc,GAAG,OAC1B,SACA,WACsB;AACtB,YAAM,qBAAqB;AAAA,QACzB,WAAW,SAAS,+BAA+B,MAAM;AAAA,MAC3D;AAGA,UAAI,CAAC,OAAO,OAAO,WAAW,EAAE,SAAS,kBAAkB,GAAG;AAC5D,eAAO;AAAA,UACL,gCAAgC,kBAAkB,qBAAqB,OAAO,OAAO,WAAW,EAAE,KAAK,IAAI,CAAC;AAAA,QAC9G;AACA,cAAM,IAAI;AAAA,UACR,gCAAgC,kBAAkB,qBAAqB,OAAO,OAAO,WAAW,EAAE,KAAK,IAAI,CAAC;AAAA,QAC9G;AAAA,MACF;AAGA,UAAI,WAAW,MAAM;AACnB,eAAO,MAAM,4CAA4C;AAEzD,cAAM,aAAa,MAAM,kBAAkB,EAAE,KAAK,CAAC;AACnD,mBAAW,CAAC,IAAI;AAChB,eAAO;AAAA,MACT;AAGA,UAAI;AACJ,UAAI,OAAO,WAAW,UAAU;AAC9B,eAAO;AAAA,MACT,WAAW,OAAO,WAAW,YAAY,OAAO,MAAM;AACpD,eAAO,OAAO;AAAA,MAChB,OAAO;AACL,eAAO,KAAK,oCAAoC;AAEhD,cAAM,iBAAiB,MAAM,kBAAkB,EAAE,KAAK,CAAC;AACvD,uBAAe,CAAC,IAAI;AACpB,eAAO;AAAA,MACT;AAGA,UAAI,CAAC,KAAK,KAAK,GAAG;AAChB,eAAO,KAAK,0BAA0B;AACtC,cAAM,cAAc,MAAM,kBAAkB,EAAE,KAAK,CAAC;AACpD,oBAAY,CAAC,IAAI;AACjB,eAAO;AAAA,MACT;AAEA,UAAI;AACF,cAAM,UAAU,WAAW,OAAO;AAGlC,cAAM,WAAW,MAAM,MAAM,GAAG,OAAO,eAAe;AAAA,UACpD,QAAQ;AAAA,UACR,SAAS;AAAA,YACP,eAAe,UAAU,UAAU,OAAO,CAAC;AAAA,YAC3C,gBAAgB;AAAA,UAClB;AAAA,UACA,MAAM,KAAK,UAAU;AAAA,YACnB,OAAO,WAAW,SAAS,0BAA0B,wBAAwB;AAAA,YAC7E,OAAO;AAAA,UACT,CAAC;AAAA,QACH,CAAC;AAED,YAAI,CAAC,SAAS,IAAI;AAChB,iBAAO,MAAM,qBAAqB,SAAS,MAAM,MAAM,SAAS,UAAU,EAAE;AAC5E,gBAAM,cAAc,MAAM,kBAAkB,EAAE,KAAK,CAAC;AACpD,sBAAY,CAAC,IAAI;AACjB,iBAAO;AAAA,QACT;AAEA,cAAM,OAAQ,MAAM,SAAS,KAAK;AAIlC,YAAI,CAAC,MAAM,OAAO,CAAC,GAAG,WAAW;AAC/B,iBAAO,MAAM,gCAAgC;AAC7C,gBAAM,cAAc,MAAM,kBAAkB,EAAE,KAAK,CAAC;AACpD,sBAAY,CAAC,IAAI;AACjB,iBAAO;AAAA,QACT;AAEA,cAAM,YAAY,KAAK,KAAK,CAAC,EAAE;AAC/B,eAAO,IAAI,mCAAmC,UAAU,MAAM,EAAE;AAChE,eAAO;AAAA,MACT,SAAS,OAAO;AACd,eAAO,MAAM,+BAA+B,KAAK;AACjD,cAAM,cAAc,MAAM,kBAAkB,EAAE,KAAK,CAAC;AACpD,oBAAY,CAAC,IAAI;AACjB,eAAO;AAAA,MACT;AAAA,IACF;AAAA,IACA,CAAC,UAAU,qBAAqB,GAAG,OACjC,UACA,EAAE,QAAQ,YAAY,UAAU,WAAW,MACxC;AACH,aAAO,MAAM,aAAa,aAAa,UAAU,YAAY,MAAM;AAAA,IACrE;AAAA,IACA,CAAC,UAAU,qBAAqB,GAAG,OACjC,UACA,EAAE,QAAQ,YAAY,UAAU,WAAW,MACxC;AACH,aAAO,MAAM,eAAe,aAAa,UAAU,YAAY,MAAM;AAAA,IACvE;AAAA,IACA,CAAC,UAAU,UAAU,GAAG,OAAO,SAAS,EAAE,QAAQ,gBAAgB,CAAC,EAAE,MAA0B;AAC7F,YAAM,cAAc;AACpB,YAAM,oBAAoB;AAC1B,YAAM,mBAAmB;AACzB,YAAM,sBAAsB;AAE5B,YAAM,SAAS,mBAAmB,OAAO;AACzC,YAAM,QAAQ,cAAc,OAAO;AAEnC,aAAO,IAAI,iBAAiB;AAC5B,aAAO,IAAI,MAAM;AAEjB,YAAM,EAAE,MAAM,eAAe,IAAI,MAAM,aAAa;AAAA,QAClD,OAAO,OAAO,cAAc,KAAK;AAAA,QACjC;AAAA,QACA,QAAQ,QAAQ,UAAU,UAAU;AAAA,QACpC;AAAA,QACA,WAAW;AAAA,QACX,kBAAkB;AAAA,QAClB,iBAAiB;AAAA,QACjB;AAAA,MACF,CAAC;AAED,aAAO;AAAA,IACT;AAAA,IACA,CAAC,UAAU,UAAU,GAAG,OACtB,SACA;AAAA,MACE;AAAA,MACA,gBAAgB,CAAC;AAAA,MACjB,YAAY;AAAA,MACZ,cAAc;AAAA,MACd,mBAAmB;AAAA,MACnB,kBAAkB;AAAA,IACpB,MACG;AACH,YAAM,SAAS,mBAAmB,OAAO;AACzC,YAAM,QAAQ,cAAc,OAAO;AAEnC,YAAM,EAAE,MAAM,eAAe,IAAI,MAAM,aAAa;AAAA,QAClD,OAAO,OAAO,cAAc,KAAK;AAAA,QACjC;AAAA,QACA,QAAQ,QAAQ,UAAU,UAAU;AAAA,QACpC;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,MACF,CAAC;AAED,aAAO;AAAA,IACT;AAAA,IACA,CAAC,UAAU,KAAK,GAAG,OACjB,SACA,WAKG;AACH,YAAM,UAAU,WAAW,OAAO;AAClC,YAAM,WAAW,MAAM,MAAM,GAAG,OAAO,uBAAuB;AAAA,QAC5D,QAAQ;AAAA,QACR,SAAS;AAAA,UACP,eAAe,UAAU,UAAU,OAAO,CAAC;AAAA,UAC3C,gBAAgB;AAAA,QAClB;AAAA,QACA,MAAM,KAAK,UAAU;AAAA,UACnB,QAAQ,OAAO;AAAA,UACf,GAAG,OAAO,KAAK;AAAA,UACf,MAAM,OAAO,QAAQ;AAAA,QACvB,CAAC;AAAA,MACH,CAAC;AACD,UAAI,CAAC,SAAS,IAAI;AAChB,cAAM,IAAI,MAAM,6BAA6B,SAAS,UAAU,EAAE;AAAA,MACpE;AACA,YAAM,OAAO,MAAM,SAAS,KAAK;AACjC,YAAM,YAAY;AAClB,aAAO,UAAU;AAAA,IACnB;AAAA,IACA,CAAC,UAAU,iBAAiB,GAAG,OAAO,SAAS,WAA4C;AAEzF,UAAI;AACJ,UAAI;AAEJ,UAAI,OAAO,WAAW,UAAU;AAC9B,mBAAW;AACX,iBAAS;AAAA,MACX,OAAO;AAEL,mBAAW,OAAO;AAClB,iBAAS,OAAO;AAAA,MAClB;AAEA,UAAI;AACF,cAAM,UAAU,WAAW,OAAO;AAClC,cAAM,SAAS,UAAU,OAAO;AAEhC,YAAI,CAAC,QAAQ;AACX,iBAAO,MAAM,wBAAwB;AACrC,iBAAO;AAAA,YACL,OAAO;AAAA,YACP,aAAa;AAAA,UACf;AAAA,QACF;AAGA,cAAM,WAAW,MAAM,MAAM,GAAG,OAAO,qBAAqB;AAAA,UAC1D,QAAQ;AAAA,UACR,SAAS;AAAA,YACP,gBAAgB;AAAA,YAChB,eAAe,UAAU,MAAM;AAAA,UACjC;AAAA,UACA,MAAM,KAAK,UAAU;AAAA,YACnB,OAAO;AAAA,YACP,UAAU;AAAA,cACR;AAAA,gBACE,MAAM;AAAA,gBACN,SAAS;AAAA,kBACP;AAAA,oBACE,MAAM;AAAA,oBACN,MACE,UACA;AAAA,kBACJ;AAAA,kBACA;AAAA,oBACE,MAAM;AAAA,oBACN,WAAW,EAAE,KAAK,SAAS;AAAA,kBAC7B;AAAA,gBACF;AAAA,cACF;AAAA,YACF;AAAA,YACA,YAAY;AAAA,UACd,CAAC;AAAA,QACH,CAAC;AAED,YAAI,CAAC,SAAS,IAAI;AAChB,gBAAM,IAAI,MAAM,qBAAqB,SAAS,MAAM,EAAE;AAAA,QACxD;AAEA,cAAM,SAAc,MAAM,SAAS,KAAK;AACxC,cAAM,UAAU,OAAO,UAAU,CAAC,GAAG,SAAS;AAE9C,YAAI,CAAC,SAAS;AACZ,iBAAO;AAAA,YACL,OAAO;AAAA,YACP,aAAa;AAAA,UACf;AAAA,QACF;AAGA,cAAM,aAAa,QAAQ,MAAM,2BAA2B;AAC5D,cAAM,QAAQ,aAAa,CAAC,KAAK;AAGjC,cAAM,cAAc,QAAQ,QAAQ,6BAA6B,EAAE,EAAE,KAAK;AAE1E,eAAO,EAAE,OAAO,YAAY;AAAA,MAC9B,SAAS,OAAO;AACd,eAAO,MAAM,0BAA0B,KAAK;AAC5C,eAAO;AAAA,UACL,OAAO;AAAA,UACP,aAAa,UAAU,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK,CAAC;AAAA,QAC/E;AAAA,MACF;AAAA,IACF;AAAA,IACA,CAAC,UAAU,aAAa,GAAG,OAAO,SAAS,gBAAwB;AACjE,aAAO,IAAI,eAAe,WAAW;AACrC,YAAM,UAAU,WAAW,OAAO;AAElC,YAAM,WAAW,IAAI,SAAS;AAC9B,eAAS,OAAO,QAAQ,aAAa;AAAA,QACnC,UAAU;AAAA,QACV,aAAa;AAAA,MACf,CAAC;AACD,eAAS,OAAO,SAAS,WAAW;AAEpC,YAAM,WAAW,MAAM,MAAM,GAAG,OAAO,yBAAyB;AAAA,QAC9D,QAAQ;AAAA,QACR,SAAS;AAAA,UACP,eAAe,UAAU,UAAU,OAAO,CAAC;AAAA,QAC7C;AAAA,QACA,MAAM;AAAA,MACR,CAAC;AAED,aAAO,IAAI,YAAY,QAAQ;AAC/B,UAAI,CAAC,SAAS,IAAI;AAChB,cAAM,IAAI,MAAM,+BAA+B,SAAS,UAAU,EAAE;AAAA,MACtE;AAEA,YAAM,OAAQ,MAAM,SAAS,KAAK;AAClC,aAAO,KAAK;AAAA,IACd;AAAA,IACA,CAAC,UAAU,cAAc,GAAG,OAAO,SAAuB,SAAiB;AACzE,aAAO,MAAM,kBAAkB,SAAS,IAAI;AAAA,IAC9C;AAAA,IAEA,CAAC,UAAU,YAAY,GAAG,OAAO,SAAS,WAAmC;AAC3E,aAAO,0BAA0B,SAAS,QAAQ,UAAU,cAAc,aAAa;AAAA,IACzF;AAAA,IACA,CAAC,UAAU,YAAY,GAAG,OAAO,SAAS,WAAmC;AAC3E,aAAO,0BAA0B,SAAS,QAAQ,UAAU,cAAc,aAAa;AAAA,IACzF;AAAA,EACF;AAAA,EACA,OAAO;AAAA,IACL;AAAA,MACE,MAAM;AAAA,MACN,OAAO;AAAA,QACL;AAAA,UACE,MAAM;AAAA,UACN,IAAI,OAAO,YAAY;AACrB,kBAAM,UAAU,WAAW,OAAO;AAClC,kBAAM,WAAW,MAAM,MAAM,GAAG,OAAO,WAAW;AAAA,cAChD,SAAS;AAAA,gBACP,eAAe,UAAU,UAAU,OAAO,CAAC;AAAA,cAC7C;AAAA,YACF,CAAC;AACD,kBAAM,OAAO,MAAM,SAAS,KAAK;AACjC,mBAAO,IAAI,qBAAsB,MAAc,KAAK,MAAM;AAC1D,gBAAI,CAAC,SAAS,IAAI;AAChB,oBAAM,IAAI,MAAM,sCAAsC,SAAS,UAAU,EAAE;AAAA,YAC7E;AAAA,UACF;AAAA,QACF;AAAA,QACA;AAAA,UACE,MAAM;AAAA,UACN,IAAI,OAAO,YAAY;AACrB,gBAAI;AACF,oBAAM,YAAY,MAAM,QAAQ,SAAS,UAAU,gBAAgB;AAAA,gBACjE,MAAM;AAAA,cACR,CAAC;AACD,qBAAO,IAAI,aAAa,SAAS;AAAA,YACnC,SAAS,OAAO;AACd,qBAAO,MAAM,iCAAiC,KAAK;AACnD,oBAAM;AAAA,YACR;AAAA,UACF;AAAA,QACF;AAAA,QACA;AAAA,UACE,MAAM;AAAA,UACN,IAAI,OAAO,YAAY;AACrB,gBAAI;AACF,oBAAM,OAAO,MAAM,QAAQ,SAAS,UAAU,YAAY;AAAA,gBACxD,QAAQ;AAAA,cACV,CAAC;AACD,kBAAI,KAAK,WAAW,GAAG;AACrB,sBAAM,IAAI,MAAM,yBAAyB;AAAA,cAC3C;AACA,qBAAO,IAAI,mCAAmC,IAAI;AAAA,YACpD,SAAS,OAAO;AACd,qBAAO,MAAM,6BAA6B,KAAK;AAC/C,oBAAM;AAAA,YACR;AAAA,UACF;AAAA,QACF;AAAA,QACA;AAAA,UACE,MAAM;AAAA,UACN,IAAI,OAAO,YAAY;AACrB,gBAAI;AACF,oBAAM,OAAO,MAAM,QAAQ,SAAS,UAAU,YAAY;AAAA,gBACxD,QAAQ;AAAA,cACV,CAAC;AACD,kBAAI,KAAK,WAAW,GAAG;AACrB,sBAAM,IAAI,MAAM,yBAAyB;AAAA,cAC3C;AACA,qBAAO,IAAI,mCAAmC,IAAI;AAAA,YACpD,SAAS,OAAO;AACd,qBAAO,MAAM,6BAA6B,KAAK;AAC/C,oBAAM;AAAA,YACR;AAAA,UACF;AAAA,QACF;AAAA,QACA;AAAA,UACE,MAAM;AAAA,UACN,IAAI,OAAO,YAAY;AACrB,mBAAO,IAAI,8BAA8B;AACzC,gBAAI;AACF,oBAAM,QAAQ,MAAM,QAAQ,SAAS,UAAU,OAAO;AAAA,gBACpD,QAAQ;AAAA,gBACR,GAAG;AAAA,gBACH,MAAM;AAAA,cACR,CAAC;AACD,qBAAO,IAAI,yCAAyC,KAAK;AAAA,YAC3D,SAAS,OAAO;AACd,qBAAO,MAAM,mCAAmC,KAAK;AACrD,oBAAM;AAAA,YACR;AAAA,UACF;AAAA,QACF;AAAA,QACA;AAAA,UACE,MAAM;AAAA,UACN,IAAI,OAAO,YAAY;AACrB,gBAAI;AACF,qBAAO,IAAI,+BAA+B;AAC1C,kBAAI;AACF,sBAAM,SAAS,MAAM,QAAQ;AAAA,kBAC3B,UAAU;AAAA,kBACV;AAAA,gBACF;AAGA,oBACE,UACA,OAAO,WAAW,YAClB,WAAW,UACX,iBAAiB,QACjB;AACA,yBAAO,IAAI,sBAAsB,MAAM;AAAA,gBACzC,OAAO;AACL,yBAAO,MAAM,4CAA4C,MAAM;AAAA,gBACjE;AAAA,cACF,SAAS,GAAG;AACV,uBAAO,MAAM,oCAAoC,CAAC;AAAA,cACpD;AAAA,YACF,SAAS,GAAG;AACV,qBAAO,MAAM,2CAA2C,CAAC;AAAA,YAC3D;AAAA,UACF;AAAA,QACF;AAAA,QACA;AAAA,UACE,MAAM;AAAA,UACN,IAAI,OAAO,YAAY;AACrB,mBAAO,IAAI,2BAA2B;AACtC,gBAAI;AACF,oBAAM,WAAW,MAAM;AAAA,gBACrB;AAAA,cACF;AACA,oBAAM,cAAc,MAAM,SAAS,YAAY;AAC/C,oBAAM,gBAAgB,MAAM,QAAQ;AAAA,gBAClC,UAAU;AAAA,gBACV,OAAO,KAAK,IAAI,WAAW,WAAW,CAAC;AAAA,cACzC;AACA,qBAAO,IAAI,sCAAsC,aAAa;AAAA,YAChE,SAAS,OAAO;AACd,qBAAO,MAAM,gCAAgC,KAAK;AAClD,oBAAM;AAAA,YACR;AAAA,UACF;AAAA,QACF;AAAA,QACA;AAAA,UACE,MAAM;AAAA,UACN,IAAI,OAAO,YAAY;AACrB,kBAAM,SAAS;AACf,kBAAM,SAAS,MAAM,QAAQ,SAAS,UAAU,uBAAuB,EAAE,OAAO,CAAC;AACjF,gBAAI,CAAC,MAAM,QAAQ,MAAM,KAAK,OAAO,WAAW,GAAG;AACjD,oBAAM,IAAI,MAAM,6DAA6D;AAAA,YAC/E;AACA,mBAAO,IAAI,qBAAqB,MAAM;AAAA,UACxC;AAAA,QACF;AAAA,QACA;AAAA,UACE,MAAM;AAAA,UACN,IAAI,OAAO,YAAY;AACrB,kBAAM,SAAS;AAEf,kBAAM,SAAS,MAAM,QAAQ,SAAS,UAAU,uBAAuB,EAAE,OAAO,CAAC;AAEjF,kBAAM,cAAc,MAAM,QAAQ,SAAS,UAAU,uBAAuB,EAAE,OAAO,CAAC;AACtF,gBAAI,gBAAgB,QAAQ;AAC1B,oBAAM,IAAI;AAAA,gBACR,mDAAmD,MAAM,WAAW,WAAW;AAAA,cACjF;AAAA,YACF;AACA,mBAAO,IAAI,iBAAiB,WAAW;AAAA,UACzC;AAAA,QACF;AAAA,QACA;AAAA,UACE,MAAM;AAAA,UACN,IAAI,OAAO,YAA0B;AACnC,gBAAI;AACF,oBAAM,OAAO;AACb,oBAAM,WAAW,MAAM,kBAAkB,SAAS,IAAI;AACtD,kBAAI,CAAC,UAAU;AACb,sBAAM,IAAI,MAAM,2BAA2B;AAAA,cAC7C;AACA,qBAAO,IAAI,+BAA+B;AAAA,YAC5C,SAAS,OAAO;AACd,qBAAO,MAAM,wCAAwC,KAAK;AAC1D,oBAAM;AAAA,YACR;AAAA,UACF;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAAA,EACF;AACF;AACA,IAAO,gBAAQ;","names":[]}
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@elizaos/plugin-openai",
3
- "version": "1.0.0-beta.3",
3
+ "version": "1.0.0-beta.33",
4
4
  "type": "module",
5
5
  "main": "dist/index.js",
6
6
  "module": "dist/index.js",
@@ -24,7 +24,7 @@
24
24
  "dependencies": {
25
25
  "@ai-sdk/openai": "^1.1.9",
26
26
  "@ai-sdk/ui-utils": "1.1.9",
27
- "@elizaos/core": "^1.0.0-beta.3",
27
+ "@elizaos/core": "^1.0.0-beta.33",
28
28
  "ai": "^4.1.25",
29
29
  "js-tiktoken": "^1.0.18",
30
30
  "tsup": "8.4.0"
@@ -49,7 +49,7 @@
49
49
  }
50
50
  }
51
51
  },
52
- "gitHead": "7fafcbac799e40bd30f1e2acd3239262e79bbb2f",
52
+ "gitHead": "9da6c2edde9ccc19112c6aa5b028810fb8d2da54",
53
53
  "devDependencies": {
54
54
  "prettier": "3.5.3"
55
55
  }