@elizaos/plugin-openai 1.0.6 → 1.0.8

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -25,7 +25,8 @@ The plugin requires these environment variables (can be set in .env file or char
25
25
  "OPENAI_EMBEDDING_URL": "optional_custom_endpoint",
26
26
  "OPENAI_EMBEDDING_DIMENSIONS": "1536",
27
27
  "OPENAI_IMAGE_DESCRIPTION_MODEL": "gpt-4o-mini",
28
- "OPENAI_IMAGE_DESCRIPTION_MAX_TOKENS": "8192"
28
+ "OPENAI_IMAGE_DESCRIPTION_MAX_TOKENS": "8192",
29
+ "OPENAI_EXPERIMENTAL_TELEMETRY": "false"
29
30
  }
30
31
  ```
31
32
 
@@ -43,6 +44,7 @@ OPENAI_EMBEDDING_URL=optional_custom_endpoint
43
44
  OPENAI_EMBEDDING_DIMENSIONS=1536
44
45
  OPENAI_IMAGE_DESCRIPTION_MODEL=gpt-4o-mini
45
46
  OPENAI_IMAGE_DESCRIPTION_MAX_TOKENS=8192
47
+ OPENAI_EXPERIMENTAL_TELEMETRY=false
46
48
  ```
47
49
 
48
50
  ### Configuration Options
@@ -57,6 +59,18 @@ OPENAI_IMAGE_DESCRIPTION_MAX_TOKENS=8192
57
59
  - `OPENAI_EMBEDDING_DIMENSIONS`: Defaults to 1536 (1536)
58
60
  - `OPENAI_IMAGE_DESCRIPTION_MODEL`: Model used for image description (default: "gpt-4o-mini")
59
61
  - `OPENAI_IMAGE_DESCRIPTION_MAX_TOKENS`: Maximum tokens for image descriptions (default: 8192)
62
+ - `OPENAI_EXPERIMENTAL_TELEMETRY`: Enable experimental telemetry features for enhanced debugging and usage analytics (default: false)
63
+
64
+ ### Experimental Telemetry
65
+
66
+ When `OPENAI_EXPERIMENTAL_TELEMETRY` is set to `true`, the plugin enables advanced telemetry features that provide:
67
+
68
+ - Enhanced debugging capabilities for model performance issues
69
+ - Detailed usage analytics for optimization
70
+ - Better observability into OpenAI API interactions
71
+ - Foundation for future monitoring and analytics features through Sentry or other frameworks
72
+
73
+ **Note**: This feature is opt-in due to privacy considerations, as telemetry data may contain information about model usage patterns. Enable only when you need enhanced debugging or analytics capabilities.
60
74
 
61
75
  The plugin provides these model classes:
62
76
 
@@ -84,7 +98,10 @@ await runtime.useModel(ModelType.IMAGE, {
84
98
  ### Audio Transcription
85
99
 
86
100
  ```js
87
- const transcription = await runtime.useModel(ModelType.TRANSCRIPTION, audioBuffer);
101
+ const transcription = await runtime.useModel(
102
+ ModelType.TRANSCRIPTION,
103
+ audioBuffer
104
+ );
88
105
  ```
89
106
 
90
107
  ### Image Analysis
@@ -99,5 +116,8 @@ const { title, description } = await runtime.useModel(
99
116
  ### Text Embeddings
100
117
 
101
118
  ```js
102
- const embedding = await runtime.useModel(ModelType.TEXT_EMBEDDING, 'text to embed');
119
+ const embedding = await runtime.useModel(
120
+ ModelType.TEXT_EMBEDDING,
121
+ 'text to embed'
122
+ );
103
123
  ```
package/dist/index.js CHANGED
@@ -17,7 +17,11 @@ function getSetting(runtime, key, defaultValue) {
17
17
  return runtime.getSetting(key) ?? process.env[key] ?? defaultValue;
18
18
  }
19
19
  function getBaseURL(runtime) {
20
- const baseURL = getSetting(runtime, "OPENAI_BASE_URL", "https://api.openai.com/v1");
20
+ const baseURL = getSetting(
21
+ runtime,
22
+ "OPENAI_BASE_URL",
23
+ "https://api.openai.com/v1"
24
+ );
21
25
  logger.debug(`[OpenAI] Default base URL: ${baseURL}`);
22
26
  return baseURL;
23
27
  }
@@ -36,7 +40,9 @@ function getApiKey(runtime) {
36
40
  function getEmbeddingApiKey(runtime) {
37
41
  const embeddingApiKey = getSetting(runtime, "OPENAI_EMBEDDING_API_KEY");
38
42
  if (embeddingApiKey) {
39
- logger.debug(`[OpenAI] Using specific embedding API key: ${embeddingApiKey}`);
43
+ logger.debug(
44
+ `[OpenAI] Using specific embedding API key: ${embeddingApiKey}`
45
+ );
40
46
  return embeddingApiKey;
41
47
  }
42
48
  logger.debug("[OpenAI] Falling back to general API key for embeddings.");
@@ -51,6 +57,15 @@ function getLargeModel(runtime) {
51
57
  function getImageDescriptionModel(runtime) {
52
58
  return getSetting(runtime, "OPENAI_IMAGE_DESCRIPTION_MODEL", "gpt-4o-mini") ?? "gpt-4o-mini";
53
59
  }
60
+ function getExperimentalTelemetry(runtime) {
61
+ const setting = getSetting(runtime, "OPENAI_EXPERIMENTAL_TELEMETRY", "false");
62
+ const normalizedSetting = String(setting).toLowerCase();
63
+ const result = normalizedSetting === "true";
64
+ logger.debug(
65
+ `[OpenAI] Experimental telemetry in function: "${setting}" (type: ${typeof setting}, normalized: "${normalizedSetting}", result: ${result})`
66
+ );
67
+ return result;
68
+ }
54
69
  function createOpenAIClient(runtime) {
55
70
  return createOpenAI({
56
71
  apiKey: getApiKey(runtime),
@@ -88,7 +103,12 @@ async function generateObjectByModelType(runtime, params, modelType, getModelFn)
88
103
  experimental_repairText: getJsonRepairFunction()
89
104
  });
90
105
  if (usage) {
91
- emitModelUsageEvent(runtime, modelType, params.prompt, usage);
106
+ emitModelUsageEvent(
107
+ runtime,
108
+ modelType,
109
+ params.prompt,
110
+ usage
111
+ );
92
112
  }
93
113
  return object;
94
114
  } catch (error) {
@@ -106,7 +126,9 @@ async function generateObjectByModelType(runtime, params, modelType, getModelFn)
106
126
  return repairedObject;
107
127
  } catch (repairParseError) {
108
128
  const message = repairParseError instanceof Error ? repairParseError.message : String(repairParseError);
109
- logger.error(`[generateObject] Failed to parse repaired JSON: ${message}`);
129
+ logger.error(
130
+ `[generateObject] Failed to parse repaired JSON: ${message}`
131
+ );
110
132
  throw repairParseError;
111
133
  }
112
134
  } else {
@@ -193,7 +215,8 @@ var openaiPlugin = {
193
215
  OPENAI_EMBEDDING_URL: process.env.OPENAI_EMBEDDING_URL,
194
216
  OPENAI_EMBEDDING_DIMENSIONS: process.env.OPENAI_EMBEDDING_DIMENSIONS,
195
217
  OPENAI_IMAGE_DESCRIPTION_MODEL: process.env.OPENAI_IMAGE_DESCRIPTION_MODEL,
196
- OPENAI_IMAGE_DESCRIPTION_MAX_TOKENS: process.env.OPENAI_IMAGE_DESCRIPTION_MAX_TOKENS
218
+ OPENAI_IMAGE_DESCRIPTION_MAX_TOKENS: process.env.OPENAI_IMAGE_DESCRIPTION_MAX_TOKENS,
219
+ OPENAI_EXPERIMENTAL_TELEMETRY: process.env.OPENAI_EXPERIMENTAL_TELEMETRY
197
220
  },
198
221
  async init(_config, runtime) {
199
222
  new Promise(async (resolve) => {
@@ -211,15 +234,21 @@ var openaiPlugin = {
211
234
  headers: { Authorization: `Bearer ${getApiKey(runtime)}` }
212
235
  });
213
236
  if (!response.ok) {
214
- logger.warn(`OpenAI API key validation failed: ${response.statusText}`);
215
- logger.warn("OpenAI functionality will be limited until a valid API key is provided");
237
+ logger.warn(
238
+ `OpenAI API key validation failed: ${response.statusText}`
239
+ );
240
+ logger.warn(
241
+ "OpenAI functionality will be limited until a valid API key is provided"
242
+ );
216
243
  } else {
217
244
  logger.log("OpenAI API key validated successfully");
218
245
  }
219
246
  } catch (fetchError) {
220
247
  const message = fetchError instanceof Error ? fetchError.message : String(fetchError);
221
248
  logger.warn(`Error validating OpenAI API key: ${message}`);
222
- logger.warn("OpenAI functionality will be limited until a valid API key is provided");
249
+ logger.warn(
250
+ "OpenAI functionality will be limited until a valid API key is provided"
251
+ );
223
252
  }
224
253
  } catch (error) {
225
254
  const message = error?.errors?.map((e) => e.message).join(", ") || (error instanceof Error ? error.message : String(error));
@@ -240,9 +269,6 @@ var openaiPlugin = {
240
269
  getSetting(runtime, "OPENAI_EMBEDDING_DIMENSIONS", "1536") || "1536",
241
270
  10
242
271
  );
243
- logger.debug(
244
- `[OpenAI] Using embedding model: ${embeddingModelName} with dimension: ${embeddingDimension}`
245
- );
246
272
  if (!Object.values(VECTOR_DIMS).includes(embeddingDimension)) {
247
273
  const errorMsg = `Invalid embedding dimension: ${embeddingDimension}. Must be one of: ${Object.values(VECTOR_DIMS).join(", ")}`;
248
274
  logger.error(errorMsg);
@@ -291,7 +317,9 @@ var openaiPlugin = {
291
317
  const responseClone = response.clone();
292
318
  const rawResponseBody = await responseClone.text();
293
319
  if (!response.ok) {
294
- logger.error(`OpenAI API error: ${response.status} - ${response.statusText}`);
320
+ logger.error(
321
+ `OpenAI API error: ${response.status} - ${response.statusText}`
322
+ );
295
323
  const errorVector = Array(embeddingDimension).fill(0);
296
324
  errorVector[0] = 0.4;
297
325
  return errorVector;
@@ -335,6 +363,7 @@ var openaiPlugin = {
335
363
  const max_response_length = 8192;
336
364
  const openai = createOpenAIClient(runtime);
337
365
  const modelName = getSmallModel(runtime);
366
+ const experimentalTelemetry = getExperimentalTelemetry(runtime);
338
367
  logger.log(`[OpenAI] Using TEXT_SMALL model: ${modelName}`);
339
368
  logger.log(prompt);
340
369
  const { text: openaiResponse, usage } = await generateText({
@@ -345,7 +374,10 @@ var openaiPlugin = {
345
374
  maxTokens: max_response_length,
346
375
  frequencyPenalty: frequency_penalty,
347
376
  presencePenalty: presence_penalty,
348
- stopSequences
377
+ stopSequences,
378
+ experimental_telemetry: {
379
+ isEnabled: experimentalTelemetry
380
+ }
349
381
  });
350
382
  if (usage) {
351
383
  emitModelUsageEvent(runtime, ModelType.TEXT_SMALL, prompt, usage);
@@ -362,6 +394,7 @@ var openaiPlugin = {
362
394
  }) => {
363
395
  const openai = createOpenAIClient(runtime);
364
396
  const modelName = getLargeModel(runtime);
397
+ const experimentalTelemetry = getExperimentalTelemetry(runtime);
365
398
  logger.log(`[OpenAI] Using TEXT_LARGE model: ${modelName}`);
366
399
  logger.log(prompt);
367
400
  const { text: openaiResponse, usage } = await generateText({
@@ -372,7 +405,10 @@ var openaiPlugin = {
372
405
  maxTokens,
373
406
  frequencyPenalty,
374
407
  presencePenalty,
375
- stopSequences
408
+ stopSequences,
409
+ experimental_telemetry: {
410
+ isEnabled: experimentalTelemetry
411
+ }
376
412
  });
377
413
  if (usage) {
378
414
  emitModelUsageEvent(runtime, ModelType.TEXT_LARGE, prompt, usage);
@@ -472,7 +508,6 @@ var openaiPlugin = {
472
508
  const result = await response.json();
473
509
  const typedResult = result;
474
510
  const content = typedResult.choices?.[0]?.message?.content;
475
- console.log("############## CONTENT", content);
476
511
  if (typedResult.usage) {
477
512
  emitModelUsageEvent(
478
513
  runtime,
@@ -491,7 +526,6 @@ var openaiPlugin = {
491
526
  description: "No response from API"
492
527
  };
493
528
  }
494
- console.log("######################## CONTENT", content);
495
529
  const isCustomPrompt = typeof params === "object" && params.prompt && params.prompt !== "Please analyze this image and provide a title and detailed description.";
496
530
  if (isCustomPrompt) {
497
531
  return content;
@@ -548,7 +582,11 @@ var openaiPlugin = {
548
582
  }
549
583
  },
550
584
  [ModelType.TEXT_TO_SPEECH]: async (runtime, text) => {
551
- const ttsModelName = getSetting(runtime, "OPENAI_TTS_MODEL", "gpt-4o-mini-tts");
585
+ const ttsModelName = getSetting(
586
+ runtime,
587
+ "OPENAI_TTS_MODEL",
588
+ "gpt-4o-mini-tts"
589
+ );
552
590
  logger.log(`[OpenAI] Using TEXT_TO_SPEECH model: ${ttsModelName}`);
553
591
  try {
554
592
  const speechStream = await fetchTextToSpeech(runtime, text);
@@ -559,10 +597,20 @@ var openaiPlugin = {
559
597
  }
560
598
  },
561
599
  [ModelType.OBJECT_SMALL]: async (runtime, params) => {
562
- return generateObjectByModelType(runtime, params, ModelType.OBJECT_SMALL, getSmallModel);
600
+ return generateObjectByModelType(
601
+ runtime,
602
+ params,
603
+ ModelType.OBJECT_SMALL,
604
+ getSmallModel
605
+ );
563
606
  },
564
607
  [ModelType.OBJECT_LARGE]: async (runtime, params) => {
565
- return generateObjectByModelType(runtime, params, ModelType.OBJECT_LARGE, getLargeModel);
608
+ return generateObjectByModelType(
609
+ runtime,
610
+ params,
611
+ ModelType.OBJECT_LARGE,
612
+ getLargeModel
613
+ );
566
614
  }
567
615
  },
568
616
  tests: [
@@ -579,9 +627,14 @@ var openaiPlugin = {
579
627
  }
580
628
  });
581
629
  const data = await response.json();
582
- logger.log("Models Available:", data?.data?.length ?? "N/A");
630
+ logger.log(
631
+ "Models Available:",
632
+ data?.data?.length ?? "N/A"
633
+ );
583
634
  if (!response.ok) {
584
- throw new Error(`Failed to validate OpenAI API key: ${response.statusText}`);
635
+ throw new Error(
636
+ `Failed to validate OpenAI API key: ${response.statusText}`
637
+ );
585
638
  }
586
639
  }
587
640
  },
@@ -589,9 +642,12 @@ var openaiPlugin = {
589
642
  name: "openai_test_text_embedding",
590
643
  fn: async (runtime) => {
591
644
  try {
592
- const embedding = await runtime.useModel(ModelType.TEXT_EMBEDDING, {
593
- text: "Hello, world!"
594
- });
645
+ const embedding = await runtime.useModel(
646
+ ModelType.TEXT_EMBEDDING,
647
+ {
648
+ text: "Hello, world!"
649
+ }
650
+ );
595
651
  logger.log("embedding", embedding);
596
652
  } catch (error) {
597
653
  const message = error instanceof Error ? error.message : String(error);
@@ -667,7 +723,10 @@ var openaiPlugin = {
667
723
  if (result && typeof result === "object" && "title" in result && "description" in result) {
668
724
  logger.log("Image description:", result);
669
725
  } else {
670
- logger.error("Invalid image description result format:", result);
726
+ logger.error(
727
+ "Invalid image description result format:",
728
+ result
729
+ );
671
730
  }
672
731
  } catch (e) {
673
732
  const message = e instanceof Error ? e.message : String(e);
@@ -675,7 +734,9 @@ var openaiPlugin = {
675
734
  }
676
735
  } catch (e) {
677
736
  const message = e instanceof Error ? e.message : String(e);
678
- logger.error(`Error in openai_test_image_description: ${message}`);
737
+ logger.error(
738
+ `Error in openai_test_image_description: ${message}`
739
+ );
679
740
  }
680
741
  }
681
742
  },
@@ -704,9 +765,14 @@ var openaiPlugin = {
704
765
  name: "openai_test_text_tokenizer_encode",
705
766
  fn: async (runtime) => {
706
767
  const prompt = "Hello tokenizer encode!";
707
- const tokens = await runtime.useModel(ModelType.TEXT_TOKENIZER_ENCODE, { prompt });
768
+ const tokens = await runtime.useModel(
769
+ ModelType.TEXT_TOKENIZER_ENCODE,
770
+ { prompt }
771
+ );
708
772
  if (!Array.isArray(tokens) || tokens.length === 0) {
709
- throw new Error("Failed to tokenize text: expected non-empty array of tokens");
773
+ throw new Error(
774
+ "Failed to tokenize text: expected non-empty array of tokens"
775
+ );
710
776
  }
711
777
  logger.log("Tokenized output:", tokens);
712
778
  }
@@ -715,8 +781,14 @@ var openaiPlugin = {
715
781
  name: "openai_test_text_tokenizer_decode",
716
782
  fn: async (runtime) => {
717
783
  const prompt = "Hello tokenizer decode!";
718
- const tokens = await runtime.useModel(ModelType.TEXT_TOKENIZER_ENCODE, { prompt });
719
- const decodedText = await runtime.useModel(ModelType.TEXT_TOKENIZER_DECODE, { tokens });
784
+ const tokens = await runtime.useModel(
785
+ ModelType.TEXT_TOKENIZER_ENCODE,
786
+ { prompt }
787
+ );
788
+ const decodedText = await runtime.useModel(
789
+ ModelType.TEXT_TOKENIZER_DECODE,
790
+ { tokens }
791
+ );
720
792
  if (decodedText !== prompt) {
721
793
  throw new Error(
722
794
  `Decoded text does not match original. Expected "${prompt}", got "${decodedText}"`
package/dist/index.js.map CHANGED
@@ -1 +1 @@
1
- {"version":3,"sources":["../src/index.ts"],"sourcesContent":["import { createOpenAI } from '@ai-sdk/openai';\nimport type {\n DetokenizeTextParams,\n GenerateTextParams,\n IAgentRuntime,\n ImageDescriptionParams,\n ModelTypeName,\n ObjectGenerationParams,\n Plugin,\n TextEmbeddingParams,\n TokenizeTextParams,\n} from '@elizaos/core';\nimport {\n EventType,\n logger,\n ModelType,\n safeReplacer,\n ServiceType,\n VECTOR_DIMS,\n} from '@elizaos/core';\nimport {\n generateObject,\n generateText,\n JSONParseError,\n type JSONValue,\n type LanguageModelUsage,\n} from 'ai';\nimport { encodingForModel, type TiktokenModel } from 'js-tiktoken';\nimport { fetch, FormData } from 'undici';\n\n\n\n/**\n * Retrieves a configuration setting from the runtime, falling back to environment variables or a default value if not found.\n *\n * @param key - The name of the setting to retrieve.\n * @param defaultValue - The value to return if the setting is not found in the runtime or environment.\n * @returns The resolved setting value, or {@link defaultValue} if not found.\n */\nfunction getSetting(\n runtime: IAgentRuntime,\n key: string,\n defaultValue?: string\n): string | undefined {\n return runtime.getSetting(key) ?? process.env[key] ?? defaultValue;\n}\n\n/**\n * Retrieves the OpenAI API base URL from runtime settings, environment variables, or defaults, using provider-aware resolution.\n *\n * @returns The resolved base URL for OpenAI API requests.\n */\nfunction getBaseURL(runtime: IAgentRuntime): string {\n const baseURL = getSetting(runtime, 'OPENAI_BASE_URL', 'https://api.openai.com/v1') as string;\n logger.debug(`[OpenAI] Default base URL: ${baseURL}`);\n return baseURL;\n}\n\n/**\n * Retrieves the OpenAI API base URL for embeddings, falling back to the general base URL.\n *\n * @returns The resolved base URL for OpenAI embedding requests.\n */\nfunction getEmbeddingBaseURL(runtime: IAgentRuntime): string {\n const embeddingURL = getSetting(runtime, 'OPENAI_EMBEDDING_URL');\n if (embeddingURL) {\n logger.debug(`[OpenAI] Using specific embedding base URL: ${embeddingURL}`);\n return embeddingURL;\n }\n logger.debug('[OpenAI] Falling back to general base URL for embeddings.');\n return getBaseURL(runtime);\n}\n\n/**\n * Helper function to get the API key for OpenAI\n *\n * @param runtime The runtime context\n * @returns The configured API key\n */\nfunction getApiKey(runtime: IAgentRuntime): string | undefined {\n return getSetting(runtime, 'OPENAI_API_KEY');\n}\n\n/**\n * Helper function to get the embedding API key for OpenAI, falling back to the general API key if not set.\n *\n * @param runtime The runtime context\n * @returns The configured API key\n */\nfunction getEmbeddingApiKey(runtime: IAgentRuntime): string | undefined {\n const embeddingApiKey = getSetting(runtime, 'OPENAI_EMBEDDING_API_KEY');\n if (embeddingApiKey) {\n logger.debug(`[OpenAI] Using specific embedding API key: ${embeddingApiKey}`);\n return embeddingApiKey;\n }\n logger.debug('[OpenAI] Falling back to general API key for embeddings.');\n return getApiKey(runtime);\n}\n\n/**\n * Helper function to get the small model name with fallbacks\n *\n * @param runtime The runtime context\n * @returns The configured small model name\n */\nfunction getSmallModel(runtime: IAgentRuntime): string {\n return (\n getSetting(runtime, 'OPENAI_SMALL_MODEL') ??\n (getSetting(runtime, 'SMALL_MODEL', 'gpt-4o-mini') as string)\n );\n}\n\n/**\n * Helper function to get the large model name with fallbacks\n *\n * @param runtime The runtime context\n * @returns The configured large model name\n */\nfunction getLargeModel(runtime: IAgentRuntime): string {\n return (\n getSetting(runtime, 'OPENAI_LARGE_MODEL') ??\n (getSetting(runtime, 'LARGE_MODEL', 'gpt-4o') as string)\n );\n}\n\n/**\n * Helper function to get the image description model name with fallbacks\n *\n * @param runtime The runtime context\n * @returns The configured image description model name\n */\nfunction getImageDescriptionModel(runtime: IAgentRuntime): string {\n return getSetting(runtime, 'OPENAI_IMAGE_DESCRIPTION_MODEL', 'gpt-4o-mini') ?? 'gpt-4o-mini';\n}\n\n/**\n * Create an OpenAI client with proper configuration\n *\n * @param runtime The runtime context\n * @returns Configured OpenAI client\n */\nfunction createOpenAIClient(runtime: IAgentRuntime) {\n return createOpenAI({\n apiKey: getApiKey(runtime),\n baseURL: getBaseURL(runtime),\n });\n}\n\n/**\n * Asynchronously tokenizes the given text based on the specified model and prompt.\n *\n * @param {ModelTypeName} model - The type of model to use for tokenization.\n * @param {string} prompt - The text prompt to tokenize.\n * @returns {number[]} - An array of tokens representing the encoded prompt.\n */\nasync function tokenizeText(model: ModelTypeName, prompt: string) {\n const modelName =\n model === ModelType.TEXT_SMALL\n ? (process.env.OPENAI_SMALL_MODEL ?? process.env.SMALL_MODEL ?? 'gpt-4o-mini')\n : (process.env.LARGE_MODEL ?? 'gpt-4o');\n const encoding = encodingForModel(modelName as TiktokenModel);\n const tokens = encoding.encode(prompt);\n return tokens;\n}\n\n/**\n * Detokenize a sequence of tokens back into text using the specified model.\n *\n * @param {ModelTypeName} model - The type of model to use for detokenization.\n * @param {number[]} tokens - The sequence of tokens to detokenize.\n * @returns {string} The detokenized text.\n */\nasync function detokenizeText(model: ModelTypeName, tokens: number[]) {\n const modelName =\n model === ModelType.TEXT_SMALL\n ? (process.env.OPENAI_SMALL_MODEL ?? process.env.SMALL_MODEL ?? 'gpt-4o-mini')\n : (process.env.OPENAI_LARGE_MODEL ?? process.env.LARGE_MODEL ?? 'gpt-4o');\n const encoding = encodingForModel(modelName as TiktokenModel);\n return encoding.decode(tokens);\n}\n\n/**\n * Helper function to generate objects using specified model type\n */\nasync function generateObjectByModelType(\n runtime: IAgentRuntime,\n params: ObjectGenerationParams,\n modelType: string,\n getModelFn: (runtime: IAgentRuntime) => string\n): Promise<JSONValue> {\n const openai = createOpenAIClient(runtime);\n const modelName = getModelFn(runtime);\n logger.log(`[OpenAI] Using ${modelType} model: ${modelName}`);\n const temperature = params.temperature ?? 0;\n const schemaPresent = !!params.schema;\n\n if (schemaPresent) {\n logger.info(\n `Using ${modelType} without schema validation (schema provided but output=no-schema)`\n );\n }\n\n try {\n const { object, usage } = await generateObject({\n model: openai.languageModel(modelName),\n output: 'no-schema',\n prompt: params.prompt,\n temperature: temperature,\n experimental_repairText: getJsonRepairFunction(),\n });\n\n if (usage) {\n emitModelUsageEvent(runtime, modelType as ModelTypeName, params.prompt, usage);\n }\n return object;\n } catch (error: unknown) {\n if (error instanceof JSONParseError) {\n logger.error(`[generateObject] Failed to parse JSON: ${error.message}`);\n\n const repairFunction = getJsonRepairFunction();\n const repairedJsonString = await repairFunction({\n text: error.text,\n error,\n });\n\n if (repairedJsonString) {\n try {\n const repairedObject = JSON.parse(repairedJsonString);\n logger.info('[generateObject] Successfully repaired JSON.');\n return repairedObject;\n } catch (repairParseError: unknown) {\n const message =\n repairParseError instanceof Error\n ? repairParseError.message\n : String(repairParseError);\n logger.error(`[generateObject] Failed to parse repaired JSON: ${message}`);\n throw repairParseError;\n }\n } else {\n logger.error('[generateObject] JSON repair failed.');\n throw error;\n }\n } else {\n const message = error instanceof Error ? error.message : String(error);\n logger.error(`[generateObject] Unknown error: ${message}`);\n throw error;\n }\n }\n}\n\n/**\n * Returns a function to repair JSON text\n */\nfunction getJsonRepairFunction(): (params: {\n text: string;\n error: unknown;\n}) => Promise<string | null> {\n return async ({ text, error }: { text: string; error: unknown }) => {\n try {\n if (error instanceof JSONParseError) {\n const cleanedText = text.replace(/```json\\n|\\n```|```/g, '');\n JSON.parse(cleanedText);\n return cleanedText;\n }\n return null;\n } catch (jsonError: unknown) {\n const message = jsonError instanceof Error ? jsonError.message : String(jsonError);\n logger.warn(`Failed to repair JSON text: ${message}`);\n return null;\n }\n };\n}\n\n/**\n * Emits a model usage event\n * @param runtime The runtime context\n * @param type The model type\n * @param prompt The prompt used\n * @param usage The LLM usage data\n */\nfunction emitModelUsageEvent(\n runtime: IAgentRuntime,\n type: ModelTypeName,\n prompt: string,\n usage: LanguageModelUsage\n) {\n runtime.emitEvent(EventType.MODEL_USED, {\n provider: 'openai',\n type,\n prompt,\n tokens: {\n prompt: usage.promptTokens,\n completion: usage.completionTokens,\n total: usage.totalTokens,\n },\n });\n}\n\n/**\n * function for text-to-speech\n */\nasync function fetchTextToSpeech(runtime: IAgentRuntime, text: string) {\n const apiKey = getApiKey(runtime);\n const model = getSetting(runtime, 'OPENAI_TTS_MODEL', 'gpt-4o-mini-tts');\n const voice = getSetting(runtime, 'OPENAI_TTS_VOICE', 'nova');\n const instructions = getSetting(runtime, 'OPENAI_TTS_INSTRUCTIONS', '');\n const baseURL = getBaseURL(runtime);\n\n try {\n const res = await fetch(`${baseURL}/audio/speech`, {\n method: 'POST',\n headers: {\n Authorization: `Bearer ${apiKey}`,\n 'Content-Type': 'application/json',\n },\n body: JSON.stringify({\n model,\n voice,\n input: text,\n ...(instructions && { instructions }),\n }),\n });\n\n if (!res.ok) {\n const err = await res.text();\n throw new Error(`OpenAI TTS error ${res.status}: ${err}`);\n }\n\n return res.body;\n } catch (err: unknown) {\n const message = err instanceof Error ? err.message : String(err);\n throw new Error(`Failed to fetch speech from OpenAI TTS: ${message}`);\n }\n}\n\n/**\n * Defines the OpenAI plugin with its name, description, and configuration options.\n * @type {Plugin}\n */\nexport const openaiPlugin: Plugin = {\n name: 'openai',\n description: 'OpenAI plugin',\n config: {\n OPENAI_API_KEY: process.env.OPENAI_API_KEY,\n OPENAI_BASE_URL: process.env.OPENAI_BASE_URL,\n OPENAI_SMALL_MODEL: process.env.OPENAI_SMALL_MODEL,\n OPENAI_LARGE_MODEL: process.env.OPENAI_LARGE_MODEL,\n SMALL_MODEL: process.env.SMALL_MODEL,\n LARGE_MODEL: process.env.LARGE_MODEL,\n OPENAI_EMBEDDING_MODEL: process.env.OPENAI_EMBEDDING_MODEL,\n OPENAI_EMBEDDING_API_KEY: process.env.OPENAI_EMBEDDING_API_KEY,\n OPENAI_EMBEDDING_URL: process.env.OPENAI_EMBEDDING_URL,\n OPENAI_EMBEDDING_DIMENSIONS: process.env.OPENAI_EMBEDDING_DIMENSIONS,\n OPENAI_IMAGE_DESCRIPTION_MODEL: process.env.OPENAI_IMAGE_DESCRIPTION_MODEL,\n OPENAI_IMAGE_DESCRIPTION_MAX_TOKENS: process.env.OPENAI_IMAGE_DESCRIPTION_MAX_TOKENS,\n },\n async init(_config, runtime) {\n // do check in the background\n new Promise<void>(async resolve => {\n resolve()\n try {\n if (!getApiKey(runtime)) {\n logger.warn(\n 'OPENAI_API_KEY is not set in environment - OpenAI functionality will be limited'\n );\n return;\n }\n try {\n const baseURL = getBaseURL(runtime);\n const response = await fetch(`${baseURL}/models`, {\n headers: { Authorization: `Bearer ${getApiKey(runtime)}` },\n });\n if (!response.ok) {\n logger.warn(`OpenAI API key validation failed: ${response.statusText}`);\n logger.warn('OpenAI functionality will be limited until a valid API key is provided');\n } else {\n logger.log('OpenAI API key validated successfully');\n }\n } catch (fetchError: unknown) {\n const message = fetchError instanceof Error ? fetchError.message : String(fetchError);\n logger.warn(`Error validating OpenAI API key: ${message}`);\n logger.warn('OpenAI functionality will be limited until a valid API key is provided');\n }\n } catch (error: unknown) {\n const message =\n (error as { errors?: Array<{ message: string }> })?.errors\n ?.map((e) => e.message)\n .join(', ') || (error instanceof Error ? error.message : String(error));\n logger.warn(\n `OpenAI plugin configuration issue: ${message} - You need to configure the OPENAI_API_KEY in your environment variables`\n );\n }\n })\n },\n models: {\n [ModelType.TEXT_EMBEDDING]: async (\n runtime: IAgentRuntime,\n params: TextEmbeddingParams | string | null\n ): Promise<number[]> => {\n const embeddingModelName = getSetting(\n runtime,\n 'OPENAI_EMBEDDING_MODEL',\n 'text-embedding-3-small'\n );\n const embeddingDimension = Number.parseInt(\n getSetting(runtime, 'OPENAI_EMBEDDING_DIMENSIONS', '1536') || '1536',\n 10\n ) as (typeof VECTOR_DIMS)[keyof typeof VECTOR_DIMS];\n\n // Added log for specific embedding model\n logger.debug(\n `[OpenAI] Using embedding model: ${embeddingModelName} with dimension: ${embeddingDimension}`\n );\n\n if (!Object.values(VECTOR_DIMS).includes(embeddingDimension)) {\n const errorMsg = `Invalid embedding dimension: ${embeddingDimension}. Must be one of: ${Object.values(VECTOR_DIMS).join(', ')}`;\n logger.error(errorMsg);\n throw new Error(errorMsg);\n }\n if (params === null) {\n logger.debug('Creating test embedding for initialization');\n const testVector = Array(embeddingDimension).fill(0);\n testVector[0] = 0.1;\n return testVector;\n }\n let text: string;\n if (typeof params === 'string') {\n text = params;\n } else if (typeof params === 'object' && params.text) {\n text = params.text;\n } else {\n logger.warn('Invalid input format for embedding');\n const fallbackVector = Array(embeddingDimension).fill(0);\n fallbackVector[0] = 0.2;\n return fallbackVector;\n }\n if (!text.trim()) {\n logger.warn('Empty text for embedding');\n const emptyVector = Array(embeddingDimension).fill(0);\n emptyVector[0] = 0.3;\n return emptyVector;\n }\n\n const embeddingBaseURL = getEmbeddingBaseURL(runtime);\n const apiKey = getEmbeddingApiKey(runtime);\n\n if (!apiKey) {\n throw new Error('OpenAI API key not configured');\n }\n\n try {\n const response = await fetch(`${embeddingBaseURL}/embeddings`, {\n method: 'POST',\n headers: {\n Authorization: `Bearer ${apiKey}`,\n 'Content-Type': 'application/json',\n },\n body: JSON.stringify({\n model: embeddingModelName,\n input: text,\n }),\n });\n\n const responseClone = response.clone();\n const rawResponseBody = await responseClone.text();\n\n if (!response.ok) {\n logger.error(`OpenAI API error: ${response.status} - ${response.statusText}`);\n const errorVector = Array(embeddingDimension).fill(0);\n errorVector[0] = 0.4;\n return errorVector;\n }\n\n const data = (await response.json()) as {\n data: [{ embedding: number[] }];\n usage?: { prompt_tokens: number; total_tokens: number };\n };\n\n if (!data?.data?.[0]?.embedding) {\n logger.error('API returned invalid structure');\n const errorVector = Array(embeddingDimension).fill(0);\n errorVector[0] = 0.5;\n return errorVector;\n }\n\n const embedding = data.data[0].embedding;\n\n if (data.usage) {\n const usage = {\n promptTokens: data.usage.prompt_tokens,\n completionTokens: 0,\n totalTokens: data.usage.total_tokens,\n };\n\n emitModelUsageEvent(runtime, ModelType.TEXT_EMBEDDING, text, usage);\n }\n\n logger.log(`Got valid embedding with length ${embedding.length}`);\n return embedding;\n } catch (error: unknown) {\n const message = error instanceof Error ? error.message : String(error);\n logger.error(`Error generating embedding: ${message}`);\n const errorVector = Array(embeddingDimension).fill(0);\n errorVector[0] = 0.6;\n return errorVector;\n }\n },\n [ModelType.TEXT_TOKENIZER_ENCODE]: async (\n _runtime,\n { prompt, modelType = ModelType.TEXT_LARGE }: TokenizeTextParams\n ) => {\n return await tokenizeText(modelType ?? ModelType.TEXT_LARGE, prompt);\n },\n [ModelType.TEXT_TOKENIZER_DECODE]: async (\n _runtime,\n { tokens, modelType = ModelType.TEXT_LARGE }: DetokenizeTextParams\n ) => {\n return await detokenizeText(modelType ?? ModelType.TEXT_LARGE, tokens);\n },\n [ModelType.TEXT_SMALL]: async (\n runtime: IAgentRuntime,\n { prompt, stopSequences = [] }: GenerateTextParams\n ) => {\n const temperature = 0.7;\n const frequency_penalty = 0.7;\n const presence_penalty = 0.7;\n const max_response_length = 8192;\n\n const openai = createOpenAIClient(runtime);\n const modelName = getSmallModel(runtime);\n\n logger.log(`[OpenAI] Using TEXT_SMALL model: ${modelName}`);\n logger.log(prompt);\n\n const { text: openaiResponse, usage } = await generateText({\n model: openai.languageModel(modelName),\n prompt: prompt,\n system: runtime.character.system ?? undefined,\n temperature: temperature,\n maxTokens: max_response_length,\n frequencyPenalty: frequency_penalty,\n presencePenalty: presence_penalty,\n stopSequences: stopSequences,\n });\n\n if (usage) {\n emitModelUsageEvent(runtime, ModelType.TEXT_SMALL, prompt, usage);\n }\n\n return openaiResponse;\n },\n [ModelType.TEXT_LARGE]: async (\n runtime: IAgentRuntime,\n {\n prompt,\n stopSequences = [],\n maxTokens = 8192,\n temperature = 0.7,\n frequencyPenalty = 0.7,\n presencePenalty = 0.7,\n }: GenerateTextParams\n ) => {\n const openai = createOpenAIClient(runtime);\n const modelName = getLargeModel(runtime);\n\n logger.log(`[OpenAI] Using TEXT_LARGE model: ${modelName}`);\n logger.log(prompt);\n\n const { text: openaiResponse, usage } = await generateText({\n model: openai.languageModel(modelName),\n prompt: prompt,\n system: runtime.character.system ?? undefined,\n temperature: temperature,\n maxTokens: maxTokens,\n frequencyPenalty: frequencyPenalty,\n presencePenalty: presencePenalty,\n stopSequences: stopSequences,\n });\n\n if (usage) {\n emitModelUsageEvent(runtime, ModelType.TEXT_LARGE, prompt, usage);\n }\n\n return openaiResponse;\n },\n [ModelType.IMAGE]: async (\n runtime: IAgentRuntime,\n params: {\n prompt: string;\n n?: number;\n size?: string;\n }\n ) => {\n const n = params.n || 1;\n const size = params.size || '1024x1024';\n const prompt = params.prompt;\n const modelName = 'dall-e-3'; // Default DALL-E model\n logger.log(`[OpenAI] Using IMAGE model: ${modelName}`);\n\n const baseURL = getBaseURL(runtime);\n const apiKey = getApiKey(runtime);\n\n if (!apiKey) {\n throw new Error('OpenAI API key not configured');\n }\n\n try {\n const response = await fetch(`${baseURL}/images/generations`, {\n method: 'POST',\n headers: {\n Authorization: `Bearer ${apiKey}`,\n 'Content-Type': 'application/json',\n },\n body: JSON.stringify({\n prompt: prompt,\n n: n,\n size: size,\n }),\n });\n\n const responseClone = response.clone();\n const rawResponseBody = await responseClone.text();\n\n if (!response.ok) {\n throw new Error(`Failed to generate image: ${response.statusText}`);\n }\n\n const data = await response.json();\n const typedData = data as { data: { url: string }[] };\n\n return typedData.data;\n } catch (error: unknown) {\n const message = error instanceof Error ? error.message : String(error);\n throw error;\n }\n },\n [ModelType.IMAGE_DESCRIPTION]: async (\n runtime: IAgentRuntime,\n params: ImageDescriptionParams | string\n ) => {\n let imageUrl: string;\n let promptText: string | undefined;\n const modelName = getImageDescriptionModel(runtime);\n logger.log(`[OpenAI] Using IMAGE_DESCRIPTION model: ${modelName}`);\n const maxTokens = Number.parseInt(\n getSetting(runtime, 'OPENAI_IMAGE_DESCRIPTION_MAX_TOKENS', '8192') || '8192',\n 10\n );\n\n if (typeof params === 'string') {\n imageUrl = params;\n promptText = 'Please analyze this image and provide a title and detailed description.';\n } else {\n imageUrl = params.imageUrl;\n promptText =\n params.prompt ||\n 'Please analyze this image and provide a title and detailed description.';\n }\n\n const messages = [\n {\n role: 'user',\n content: [\n { type: 'text', text: promptText },\n { type: 'image_url', image_url: { url: imageUrl } },\n ],\n },\n ];\n\n const baseURL = getBaseURL(runtime);\n const apiKey = getApiKey(runtime);\n\n if (!apiKey) {\n logger.error('OpenAI API key not set');\n return {\n title: 'Failed to analyze image',\n description: 'API key not configured',\n };\n }\n\n try {\n const requestBody: Record<string, any> = {\n model: modelName,\n messages: messages,\n max_tokens: maxTokens,\n };\n\n const response = await fetch(`${baseURL}/chat/completions`, {\n method: 'POST',\n headers: {\n 'Content-Type': 'application/json',\n Authorization: `Bearer ${apiKey}`,\n },\n body: JSON.stringify(requestBody),\n });\n\n const responseClone = response.clone();\n const rawResponseBody = await responseClone.text();\n\n if (!response.ok) {\n throw new Error(`OpenAI API error: ${response.status}`);\n }\n\n const result: unknown = await response.json();\n\n type OpenAIResponseType = {\n choices?: Array<{\n message?: { content?: string };\n finish_reason?: string;\n }>;\n usage?: {\n prompt_tokens: number;\n completion_tokens: number;\n total_tokens: number;\n };\n };\n\n const typedResult = result as OpenAIResponseType;\n const content = typedResult.choices?.[0]?.message?.content;\n\n console.log('############## CONTENT', content);\n\n if (typedResult.usage) {\n emitModelUsageEvent(\n runtime,\n ModelType.IMAGE_DESCRIPTION,\n typeof params === 'string' ? params : params.prompt || '',\n {\n promptTokens: typedResult.usage.prompt_tokens,\n completionTokens: typedResult.usage.completion_tokens,\n totalTokens: typedResult.usage.total_tokens,\n }\n );\n }\n\n if (!content) {\n return {\n title: 'Failed to analyze image',\n description: 'No response from API',\n };\n }\n\n console.log('######################## CONTENT', content);\n\n // Check if a custom prompt was provided (not the default prompt)\n const isCustomPrompt =\n typeof params === 'object' &&\n params.prompt &&\n params.prompt !==\n 'Please analyze this image and provide a title and detailed description.';\n\n // If custom prompt is used, return the raw content\n if (isCustomPrompt) {\n return content;\n }\n\n // Otherwise, maintain backwards compatibility with object return\n const titleMatch = content.match(/title[:\\s]+(.+?)(?:\\n|$)/i);\n const title = titleMatch?.[1]?.trim() || 'Image Analysis';\n const description = content.replace(/title[:\\s]+(.+?)(?:\\n|$)/i, '').trim();\n\n const processedResult = { title, description };\n return processedResult;\n } catch (error: unknown) {\n const message = error instanceof Error ? error.message : String(error);\n logger.error(`Error analyzing image: ${message}`);\n return {\n title: 'Failed to analyze image',\n description: `Error: ${message}`,\n };\n }\n },\n [ModelType.TRANSCRIPTION]: async (runtime: IAgentRuntime, audioBuffer: Buffer) => {\n logger.log('audioBuffer', audioBuffer);\n\n const modelName = 'whisper-1';\n logger.log(`[OpenAI] Using TRANSCRIPTION model: ${modelName}`);\n\n const baseURL = getBaseURL(runtime);\n const apiKey = getApiKey(runtime);\n\n if (!apiKey) {\n throw new Error('OpenAI API key not configured - Cannot make request');\n }\n if (!audioBuffer || audioBuffer.length === 0) {\n throw new Error('Audio buffer is empty or invalid for transcription');\n }\n\n const formData = new FormData();\n formData.append('file', new Blob([audioBuffer]), 'recording.mp3');\n formData.append('model', 'whisper-1');\n\n try {\n const response = await fetch(`${baseURL}/audio/transcriptions`, {\n method: 'POST',\n headers: {\n Authorization: `Bearer ${apiKey}`,\n },\n body: formData,\n });\n\n const responseClone = response.clone();\n const rawResponseBody = await responseClone.text();\n\n logger.log('response', response);\n\n if (!response.ok) {\n throw new Error(`Failed to transcribe audio: ${response.statusText}`);\n }\n\n const data = (await response.json()) as { text: string };\n const processedText = data.text;\n\n return processedText;\n } catch (error: unknown) {\n const message = error instanceof Error ? error.message : String(error);\n throw error;\n }\n },\n [ModelType.TEXT_TO_SPEECH]: async (runtime: IAgentRuntime, text: string) => {\n const ttsModelName = getSetting(runtime, 'OPENAI_TTS_MODEL', 'gpt-4o-mini-tts');\n logger.log(`[OpenAI] Using TEXT_TO_SPEECH model: ${ttsModelName}`);\n try {\n const speechStream = await fetchTextToSpeech(runtime, text);\n return speechStream;\n } catch (error: unknown) {\n const message = error instanceof Error ? error.message : String(error);\n throw error;\n }\n },\n [ModelType.OBJECT_SMALL]: async (runtime: IAgentRuntime, params: ObjectGenerationParams) => {\n return generateObjectByModelType(runtime, params, ModelType.OBJECT_SMALL, getSmallModel);\n },\n [ModelType.OBJECT_LARGE]: async (runtime: IAgentRuntime, params: ObjectGenerationParams) => {\n return generateObjectByModelType(runtime, params, ModelType.OBJECT_LARGE, getLargeModel);\n },\n },\n tests: [\n {\n name: 'openai_plugin_tests',\n tests: [\n {\n name: 'openai_test_url_and_api_key_validation',\n fn: async (runtime: IAgentRuntime) => {\n const baseURL = getBaseURL(runtime);\n const response = await fetch(`${baseURL}/models`, {\n headers: {\n Authorization: `Bearer ${getApiKey(runtime)}`,\n },\n });\n const data = await response.json();\n logger.log('Models Available:', (data as { data?: unknown[] })?.data?.length ?? 'N/A');\n if (!response.ok) {\n throw new Error(`Failed to validate OpenAI API key: ${response.statusText}`);\n }\n },\n },\n {\n name: 'openai_test_text_embedding',\n fn: async (runtime: IAgentRuntime) => {\n try {\n const embedding = await runtime.useModel(ModelType.TEXT_EMBEDDING, {\n text: 'Hello, world!',\n });\n logger.log('embedding', embedding);\n } catch (error: unknown) {\n const message = error instanceof Error ? error.message : String(error);\n logger.error(`Error in test_text_embedding: ${message}`);\n throw error;\n }\n },\n },\n {\n name: 'openai_test_text_large',\n fn: async (runtime: IAgentRuntime) => {\n try {\n const text = await runtime.useModel(ModelType.TEXT_LARGE, {\n prompt: 'What is the nature of reality in 10 words?',\n });\n if (text.length === 0) {\n throw new Error('Failed to generate text');\n }\n logger.log('generated with test_text_large:', text);\n } catch (error: unknown) {\n const message = error instanceof Error ? error.message : String(error);\n logger.error(`Error in test_text_large: ${message}`);\n throw error;\n }\n },\n },\n {\n name: 'openai_test_text_small',\n fn: async (runtime: IAgentRuntime) => {\n try {\n const text = await runtime.useModel(ModelType.TEXT_SMALL, {\n prompt: 'What is the nature of reality in 10 words?',\n });\n if (text.length === 0) {\n throw new Error('Failed to generate text');\n }\n logger.log('generated with test_text_small:', text);\n } catch (error: unknown) {\n const message = error instanceof Error ? error.message : String(error);\n logger.error(`Error in test_text_small: ${message}`);\n throw error;\n }\n },\n },\n {\n name: 'openai_test_image_generation',\n fn: async (runtime: IAgentRuntime) => {\n logger.log('openai_test_image_generation');\n try {\n const image = await runtime.useModel(ModelType.IMAGE, {\n prompt: 'A beautiful sunset over a calm ocean',\n n: 1,\n size: '1024x1024',\n });\n logger.log('generated with test_image_generation:', image);\n } catch (error: unknown) {\n const message = error instanceof Error ? error.message : String(error);\n logger.error(`Error in test_image_generation: ${message}`);\n throw error;\n }\n },\n },\n {\n name: 'image-description',\n fn: async (runtime: IAgentRuntime) => {\n try {\n logger.log('openai_test_image_description');\n try {\n const result = await runtime.useModel(\n ModelType.IMAGE_DESCRIPTION,\n 'https://upload.wikimedia.org/wikipedia/commons/thumb/1/1c/Vitalik_Buterin_TechCrunch_London_2015_%28cropped%29.jpg/537px-Vitalik_Buterin_TechCrunch_London_2015_%28cropped%29.jpg'\n );\n\n if (\n result &&\n typeof result === 'object' &&\n 'title' in result &&\n 'description' in result\n ) {\n logger.log('Image description:', result);\n } else {\n logger.error('Invalid image description result format:', result);\n }\n } catch (e: unknown) {\n const message = e instanceof Error ? e.message : String(e);\n logger.error(`Error in image description test: ${message}`);\n }\n } catch (e: unknown) {\n const message = e instanceof Error ? e.message : String(e);\n logger.error(`Error in openai_test_image_description: ${message}`);\n }\n },\n },\n {\n name: 'openai_test_transcription',\n fn: async (runtime: IAgentRuntime) => {\n logger.log('openai_test_transcription');\n try {\n const response = await fetch(\n 'https://upload.wikimedia.org/wikipedia/en/4/40/Chris_Benoit_Voice_Message.ogg'\n );\n const arrayBuffer = await response.arrayBuffer();\n const transcription = await runtime.useModel(\n ModelType.TRANSCRIPTION,\n Buffer.from(new Uint8Array(arrayBuffer))\n );\n logger.log('generated with test_transcription:', transcription);\n } catch (error: unknown) {\n const message = error instanceof Error ? error.message : String(error);\n logger.error(`Error in test_transcription: ${message}`);\n throw error;\n }\n },\n },\n {\n name: 'openai_test_text_tokenizer_encode',\n fn: async (runtime: IAgentRuntime) => {\n const prompt = 'Hello tokenizer encode!';\n const tokens = await runtime.useModel(ModelType.TEXT_TOKENIZER_ENCODE, { prompt });\n if (!Array.isArray(tokens) || tokens.length === 0) {\n throw new Error('Failed to tokenize text: expected non-empty array of tokens');\n }\n logger.log('Tokenized output:', tokens);\n },\n },\n {\n name: 'openai_test_text_tokenizer_decode',\n fn: async (runtime: IAgentRuntime) => {\n const prompt = 'Hello tokenizer decode!';\n const tokens = await runtime.useModel(ModelType.TEXT_TOKENIZER_ENCODE, { prompt });\n const decodedText = await runtime.useModel(ModelType.TEXT_TOKENIZER_DECODE, { tokens });\n if (decodedText !== prompt) {\n throw new Error(\n `Decoded text does not match original. Expected \"${prompt}\", got \"${decodedText}\"`\n );\n }\n logger.log('Decoded text:', decodedText);\n },\n },\n {\n name: 'openai_test_text_to_speech',\n fn: async (runtime: IAgentRuntime) => {\n try {\n const text = 'Hello, this is a test for text-to-speech.';\n const response = await fetchTextToSpeech(runtime, text);\n if (!response) {\n throw new Error('Failed to generate speech');\n }\n logger.log('Generated speech successfully');\n } catch (error: unknown) {\n const message = error instanceof Error ? error.message : String(error);\n logger.error(`Error in openai_test_text_to_speech: ${message}`);\n throw error;\n }\n },\n },\n ],\n },\n ],\n};\nexport default openaiPlugin;\n"],"mappings":";AAAA,SAAS,oBAAoB;AAY7B;AAAA,EACE;AAAA,EACA;AAAA,EACA;AAAA,EAGA;AAAA,OACK;AACP;AAAA,EACE;AAAA,EACA;AAAA,EACA;AAAA,OAGK;AACP,SAAS,wBAA4C;AACrD,SAAS,OAAO,gBAAgB;AAWhC,SAAS,WACP,SACA,KACA,cACoB;AACpB,SAAO,QAAQ,WAAW,GAAG,KAAK,QAAQ,IAAI,GAAG,KAAK;AACxD;AAOA,SAAS,WAAW,SAAgC;AAClD,QAAM,UAAU,WAAW,SAAS,mBAAmB,2BAA2B;AAClF,SAAO,MAAM,8BAA8B,OAAO,EAAE;AACpD,SAAO;AACT;AAOA,SAAS,oBAAoB,SAAgC;AAC3D,QAAM,eAAe,WAAW,SAAS,sBAAsB;AAC/D,MAAI,cAAc;AAChB,WAAO,MAAM,+CAA+C,YAAY,EAAE;AAC1E,WAAO;AAAA,EACT;AACA,SAAO,MAAM,2DAA2D;AACxE,SAAO,WAAW,OAAO;AAC3B;AAQA,SAAS,UAAU,SAA4C;AAC7D,SAAO,WAAW,SAAS,gBAAgB;AAC7C;AAQA,SAAS,mBAAmB,SAA4C;AACtE,QAAM,kBAAkB,WAAW,SAAS,0BAA0B;AACtE,MAAI,iBAAiB;AACnB,WAAO,MAAM,8CAA8C,eAAe,EAAE;AAC5E,WAAO;AAAA,EACT;AACA,SAAO,MAAM,0DAA0D;AACvE,SAAO,UAAU,OAAO;AAC1B;AAQA,SAAS,cAAc,SAAgC;AACrD,SACE,WAAW,SAAS,oBAAoB,KACvC,WAAW,SAAS,eAAe,aAAa;AAErD;AAQA,SAAS,cAAc,SAAgC;AACrD,SACE,WAAW,SAAS,oBAAoB,KACvC,WAAW,SAAS,eAAe,QAAQ;AAEhD;AAQA,SAAS,yBAAyB,SAAgC;AAChE,SAAO,WAAW,SAAS,kCAAkC,aAAa,KAAK;AACjF;AAQA,SAAS,mBAAmB,SAAwB;AAClD,SAAO,aAAa;AAAA,IAClB,QAAQ,UAAU,OAAO;AAAA,IACzB,SAAS,WAAW,OAAO;AAAA,EAC7B,CAAC;AACH;AASA,eAAe,aAAa,OAAsB,QAAgB;AAChE,QAAM,YACJ,UAAU,UAAU,aACf,QAAQ,IAAI,sBAAsB,QAAQ,IAAI,eAAe,gBAC7D,QAAQ,IAAI,eAAe;AAClC,QAAM,WAAW,iBAAiB,SAA0B;AAC5D,QAAM,SAAS,SAAS,OAAO,MAAM;AACrC,SAAO;AACT;AASA,eAAe,eAAe,OAAsB,QAAkB;AACpE,QAAM,YACJ,UAAU,UAAU,aACf,QAAQ,IAAI,sBAAsB,QAAQ,IAAI,eAAe,gBAC7D,QAAQ,IAAI,sBAAsB,QAAQ,IAAI,eAAe;AACpE,QAAM,WAAW,iBAAiB,SAA0B;AAC5D,SAAO,SAAS,OAAO,MAAM;AAC/B;AAKA,eAAe,0BACb,SACA,QACA,WACA,YACoB;AACpB,QAAM,SAAS,mBAAmB,OAAO;AACzC,QAAM,YAAY,WAAW,OAAO;AACpC,SAAO,IAAI,kBAAkB,SAAS,WAAW,SAAS,EAAE;AAC5D,QAAM,cAAc,OAAO,eAAe;AAC1C,QAAM,gBAAgB,CAAC,CAAC,OAAO;AAE/B,MAAI,eAAe;AACjB,WAAO;AAAA,MACL,SAAS,SAAS;AAAA,IACpB;AAAA,EACF;AAEA,MAAI;AACF,UAAM,EAAE,QAAQ,MAAM,IAAI,MAAM,eAAe;AAAA,MAC7C,OAAO,OAAO,cAAc,SAAS;AAAA,MACrC,QAAQ;AAAA,MACR,QAAQ,OAAO;AAAA,MACf;AAAA,MACA,yBAAyB,sBAAsB;AAAA,IACjD,CAAC;AAED,QAAI,OAAO;AACT,0BAAoB,SAAS,WAA4B,OAAO,QAAQ,KAAK;AAAA,IAC/E;AACA,WAAO;AAAA,EACT,SAAS,OAAgB;AACvB,QAAI,iBAAiB,gBAAgB;AACnC,aAAO,MAAM,0CAA0C,MAAM,OAAO,EAAE;AAEtE,YAAM,iBAAiB,sBAAsB;AAC7C,YAAM,qBAAqB,MAAM,eAAe;AAAA,QAC9C,MAAM,MAAM;AAAA,QACZ;AAAA,MACF,CAAC;AAED,UAAI,oBAAoB;AACtB,YAAI;AACF,gBAAM,iBAAiB,KAAK,MAAM,kBAAkB;AACpD,iBAAO,KAAK,8CAA8C;AAC1D,iBAAO;AAAA,QACT,SAAS,kBAA2B;AAClC,gBAAM,UACJ,4BAA4B,QACxB,iBAAiB,UACjB,OAAO,gBAAgB;AAC7B,iBAAO,MAAM,mDAAmD,OAAO,EAAE;AACzE,gBAAM;AAAA,QACR;AAAA,MACF,OAAO;AACL,eAAO,MAAM,sCAAsC;AACnD,cAAM;AAAA,MACR;AAAA,IACF,OAAO;AACL,YAAM,UAAU,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK;AACrE,aAAO,MAAM,mCAAmC,OAAO,EAAE;AACzD,YAAM;AAAA,IACR;AAAA,EACF;AACF;AAKA,SAAS,wBAGoB;AAC3B,SAAO,OAAO,EAAE,MAAM,MAAM,MAAwC;AAClE,QAAI;AACF,UAAI,iBAAiB,gBAAgB;AACnC,cAAM,cAAc,KAAK,QAAQ,wBAAwB,EAAE;AAC3D,aAAK,MAAM,WAAW;AACtB,eAAO;AAAA,MACT;AACA,aAAO;AAAA,IACT,SAAS,WAAoB;AAC3B,YAAM,UAAU,qBAAqB,QAAQ,UAAU,UAAU,OAAO,SAAS;AACjF,aAAO,KAAK,+BAA+B,OAAO,EAAE;AACpD,aAAO;AAAA,IACT;AAAA,EACF;AACF;AASA,SAAS,oBACP,SACA,MACA,QACA,OACA;AACA,UAAQ,UAAU,UAAU,YAAY;AAAA,IACtC,UAAU;AAAA,IACV;AAAA,IACA;AAAA,IACA,QAAQ;AAAA,MACN,QAAQ,MAAM;AAAA,MACd,YAAY,MAAM;AAAA,MAClB,OAAO,MAAM;AAAA,IACf;AAAA,EACF,CAAC;AACH;AAKA,eAAe,kBAAkB,SAAwB,MAAc;AACrE,QAAM,SAAS,UAAU,OAAO;AAChC,QAAM,QAAQ,WAAW,SAAS,oBAAoB,iBAAiB;AACvE,QAAM,QAAQ,WAAW,SAAS,oBAAoB,MAAM;AAC5D,QAAM,eAAe,WAAW,SAAS,2BAA2B,EAAE;AACtE,QAAM,UAAU,WAAW,OAAO;AAElC,MAAI;AACF,UAAM,MAAM,MAAM,MAAM,GAAG,OAAO,iBAAiB;AAAA,MACjD,QAAQ;AAAA,MACR,SAAS;AAAA,QACP,eAAe,UAAU,MAAM;AAAA,QAC/B,gBAAgB;AAAA,MAClB;AAAA,MACA,MAAM,KAAK,UAAU;AAAA,QACnB;AAAA,QACA;AAAA,QACA,OAAO;AAAA,QACP,GAAI,gBAAgB,EAAE,aAAa;AAAA,MACrC,CAAC;AAAA,IACH,CAAC;AAED,QAAI,CAAC,IAAI,IAAI;AACX,YAAM,MAAM,MAAM,IAAI,KAAK;AAC3B,YAAM,IAAI,MAAM,oBAAoB,IAAI,MAAM,KAAK,GAAG,EAAE;AAAA,IAC1D;AAEA,WAAO,IAAI;AAAA,EACb,SAAS,KAAc;AACrB,UAAM,UAAU,eAAe,QAAQ,IAAI,UAAU,OAAO,GAAG;AAC/D,UAAM,IAAI,MAAM,2CAA2C,OAAO,EAAE;AAAA,EACtE;AACF;AAMO,IAAM,eAAuB;AAAA,EAClC,MAAM;AAAA,EACN,aAAa;AAAA,EACb,QAAQ;AAAA,IACN,gBAAgB,QAAQ,IAAI;AAAA,IAC5B,iBAAiB,QAAQ,IAAI;AAAA,IAC7B,oBAAoB,QAAQ,IAAI;AAAA,IAChC,oBAAoB,QAAQ,IAAI;AAAA,IAChC,aAAa,QAAQ,IAAI;AAAA,IACzB,aAAa,QAAQ,IAAI;AAAA,IACzB,wBAAwB,QAAQ,IAAI;AAAA,IACpC,0BAA0B,QAAQ,IAAI;AAAA,IACtC,sBAAsB,QAAQ,IAAI;AAAA,IAClC,6BAA6B,QAAQ,IAAI;AAAA,IACzC,gCAAgC,QAAQ,IAAI;AAAA,IAC5C,qCAAqC,QAAQ,IAAI;AAAA,EACnD;AAAA,EACA,MAAM,KAAK,SAAS,SAAS;AAE3B,QAAI,QAAc,OAAM,YAAW;AACjC,cAAQ;AACR,UAAI;AACF,YAAI,CAAC,UAAU,OAAO,GAAG;AACvB,iBAAO;AAAA,YACL;AAAA,UACF;AACA;AAAA,QACF;AACA,YAAI;AACF,gBAAM,UAAU,WAAW,OAAO;AAClC,gBAAM,WAAW,MAAM,MAAM,GAAG,OAAO,WAAW;AAAA,YAChD,SAAS,EAAE,eAAe,UAAU,UAAU,OAAO,CAAC,GAAG;AAAA,UAC3D,CAAC;AACD,cAAI,CAAC,SAAS,IAAI;AAChB,mBAAO,KAAK,qCAAqC,SAAS,UAAU,EAAE;AACtE,mBAAO,KAAK,wEAAwE;AAAA,UACtF,OAAO;AACL,mBAAO,IAAI,uCAAuC;AAAA,UACpD;AAAA,QACF,SAAS,YAAqB;AAC5B,gBAAM,UAAU,sBAAsB,QAAQ,WAAW,UAAU,OAAO,UAAU;AACpF,iBAAO,KAAK,oCAAoC,OAAO,EAAE;AACzD,iBAAO,KAAK,wEAAwE;AAAA,QACtF;AAAA,MACF,SAAS,OAAgB;AACvB,cAAM,UACH,OAAmD,QAChD,IAAI,CAAC,MAAM,EAAE,OAAO,EACrB,KAAK,IAAI,MAAM,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK;AACzE,eAAO;AAAA,UACL,sCAAsC,OAAO;AAAA,QAC/C;AAAA,MACF;AAAA,IACF,CAAC;AAAA,EACH;AAAA,EACA,QAAQ;AAAA,IACN,CAAC,UAAU,cAAc,GAAG,OAC1B,SACA,WACsB;AACtB,YAAM,qBAAqB;AAAA,QACzB;AAAA,QACA;AAAA,QACA;AAAA,MACF;AACA,YAAM,qBAAqB,OAAO;AAAA,QAChC,WAAW,SAAS,+BAA+B,MAAM,KAAK;AAAA,QAC9D;AAAA,MACF;AAGA,aAAO;AAAA,QACL,mCAAmC,kBAAkB,oBAAoB,kBAAkB;AAAA,MAC7F;AAEA,UAAI,CAAC,OAAO,OAAO,WAAW,EAAE,SAAS,kBAAkB,GAAG;AAC5D,cAAM,WAAW,gCAAgC,kBAAkB,qBAAqB,OAAO,OAAO,WAAW,EAAE,KAAK,IAAI,CAAC;AAC7H,eAAO,MAAM,QAAQ;AACrB,cAAM,IAAI,MAAM,QAAQ;AAAA,MAC1B;AACA,UAAI,WAAW,MAAM;AACnB,eAAO,MAAM,4CAA4C;AACzD,cAAM,aAAa,MAAM,kBAAkB,EAAE,KAAK,CAAC;AACnD,mBAAW,CAAC,IAAI;AAChB,eAAO;AAAA,MACT;AACA,UAAI;AACJ,UAAI,OAAO,WAAW,UAAU;AAC9B,eAAO;AAAA,MACT,WAAW,OAAO,WAAW,YAAY,OAAO,MAAM;AACpD,eAAO,OAAO;AAAA,MAChB,OAAO;AACL,eAAO,KAAK,oCAAoC;AAChD,cAAM,iBAAiB,MAAM,kBAAkB,EAAE,KAAK,CAAC;AACvD,uBAAe,CAAC,IAAI;AACpB,eAAO;AAAA,MACT;AACA,UAAI,CAAC,KAAK,KAAK,GAAG;AAChB,eAAO,KAAK,0BAA0B;AACtC,cAAM,cAAc,MAAM,kBAAkB,EAAE,KAAK,CAAC;AACpD,oBAAY,CAAC,IAAI;AACjB,eAAO;AAAA,MACT;AAEA,YAAM,mBAAmB,oBAAoB,OAAO;AACpD,YAAM,SAAS,mBAAmB,OAAO;AAEzC,UAAI,CAAC,QAAQ;AACX,cAAM,IAAI,MAAM,+BAA+B;AAAA,MACjD;AAEA,UAAI;AACF,cAAM,WAAW,MAAM,MAAM,GAAG,gBAAgB,eAAe;AAAA,UAC7D,QAAQ;AAAA,UACR,SAAS;AAAA,YACP,eAAe,UAAU,MAAM;AAAA,YAC/B,gBAAgB;AAAA,UAClB;AAAA,UACA,MAAM,KAAK,UAAU;AAAA,YACnB,OAAO;AAAA,YACP,OAAO;AAAA,UACT,CAAC;AAAA,QACH,CAAC;AAED,cAAM,gBAAgB,SAAS,MAAM;AACrC,cAAM,kBAAkB,MAAM,cAAc,KAAK;AAEjD,YAAI,CAAC,SAAS,IAAI;AAChB,iBAAO,MAAM,qBAAqB,SAAS,MAAM,MAAM,SAAS,UAAU,EAAE;AAC5E,gBAAM,cAAc,MAAM,kBAAkB,EAAE,KAAK,CAAC;AACpD,sBAAY,CAAC,IAAI;AACjB,iBAAO;AAAA,QACT;AAEA,cAAM,OAAQ,MAAM,SAAS,KAAK;AAKlC,YAAI,CAAC,MAAM,OAAO,CAAC,GAAG,WAAW;AAC/B,iBAAO,MAAM,gCAAgC;AAC7C,gBAAM,cAAc,MAAM,kBAAkB,EAAE,KAAK,CAAC;AACpD,sBAAY,CAAC,IAAI;AACjB,iBAAO;AAAA,QACT;AAEA,cAAM,YAAY,KAAK,KAAK,CAAC,EAAE;AAE/B,YAAI,KAAK,OAAO;AACd,gBAAM,QAAQ;AAAA,YACZ,cAAc,KAAK,MAAM;AAAA,YACzB,kBAAkB;AAAA,YAClB,aAAa,KAAK,MAAM;AAAA,UAC1B;AAEA,8BAAoB,SAAS,UAAU,gBAAgB,MAAM,KAAK;AAAA,QACpE;AAEA,eAAO,IAAI,mCAAmC,UAAU,MAAM,EAAE;AAChE,eAAO;AAAA,MACT,SAAS,OAAgB;AACvB,cAAM,UAAU,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK;AACrE,eAAO,MAAM,+BAA+B,OAAO,EAAE;AACrD,cAAM,cAAc,MAAM,kBAAkB,EAAE,KAAK,CAAC;AACpD,oBAAY,CAAC,IAAI;AACjB,eAAO;AAAA,MACT;AAAA,IACF;AAAA,IACA,CAAC,UAAU,qBAAqB,GAAG,OACjC,UACA,EAAE,QAAQ,YAAY,UAAU,WAAW,MACxC;AACH,aAAO,MAAM,aAAa,aAAa,UAAU,YAAY,MAAM;AAAA,IACrE;AAAA,IACA,CAAC,UAAU,qBAAqB,GAAG,OACjC,UACA,EAAE,QAAQ,YAAY,UAAU,WAAW,MACxC;AACH,aAAO,MAAM,eAAe,aAAa,UAAU,YAAY,MAAM;AAAA,IACvE;AAAA,IACA,CAAC,UAAU,UAAU,GAAG,OACtB,SACA,EAAE,QAAQ,gBAAgB,CAAC,EAAE,MAC1B;AACH,YAAM,cAAc;AACpB,YAAM,oBAAoB;AAC1B,YAAM,mBAAmB;AACzB,YAAM,sBAAsB;AAE5B,YAAM,SAAS,mBAAmB,OAAO;AACzC,YAAM,YAAY,cAAc,OAAO;AAEvC,aAAO,IAAI,oCAAoC,SAAS,EAAE;AAC1D,aAAO,IAAI,MAAM;AAEjB,YAAM,EAAE,MAAM,gBAAgB,MAAM,IAAI,MAAM,aAAa;AAAA,QACzD,OAAO,OAAO,cAAc,SAAS;AAAA,QACrC;AAAA,QACA,QAAQ,QAAQ,UAAU,UAAU;AAAA,QACpC;AAAA,QACA,WAAW;AAAA,QACX,kBAAkB;AAAA,QAClB,iBAAiB;AAAA,QACjB;AAAA,MACF,CAAC;AAED,UAAI,OAAO;AACT,4BAAoB,SAAS,UAAU,YAAY,QAAQ,KAAK;AAAA,MAClE;AAEA,aAAO;AAAA,IACT;AAAA,IACA,CAAC,UAAU,UAAU,GAAG,OACtB,SACA;AAAA,MACE;AAAA,MACA,gBAAgB,CAAC;AAAA,MACjB,YAAY;AAAA,MACZ,cAAc;AAAA,MACd,mBAAmB;AAAA,MACnB,kBAAkB;AAAA,IACpB,MACG;AACH,YAAM,SAAS,mBAAmB,OAAO;AACzC,YAAM,YAAY,cAAc,OAAO;AAEvC,aAAO,IAAI,oCAAoC,SAAS,EAAE;AAC1D,aAAO,IAAI,MAAM;AAEjB,YAAM,EAAE,MAAM,gBAAgB,MAAM,IAAI,MAAM,aAAa;AAAA,QACzD,OAAO,OAAO,cAAc,SAAS;AAAA,QACrC;AAAA,QACA,QAAQ,QAAQ,UAAU,UAAU;AAAA,QACpC;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,MACF,CAAC;AAED,UAAI,OAAO;AACT,4BAAoB,SAAS,UAAU,YAAY,QAAQ,KAAK;AAAA,MAClE;AAEA,aAAO;AAAA,IACT;AAAA,IACA,CAAC,UAAU,KAAK,GAAG,OACjB,SACA,WAKG;AACH,YAAM,IAAI,OAAO,KAAK;AACtB,YAAM,OAAO,OAAO,QAAQ;AAC5B,YAAM,SAAS,OAAO;AACtB,YAAM,YAAY;AAClB,aAAO,IAAI,+BAA+B,SAAS,EAAE;AAErD,YAAM,UAAU,WAAW,OAAO;AAClC,YAAM,SAAS,UAAU,OAAO;AAEhC,UAAI,CAAC,QAAQ;AACX,cAAM,IAAI,MAAM,+BAA+B;AAAA,MACjD;AAEA,UAAI;AACF,cAAM,WAAW,MAAM,MAAM,GAAG,OAAO,uBAAuB;AAAA,UAC5D,QAAQ;AAAA,UACR,SAAS;AAAA,YACP,eAAe,UAAU,MAAM;AAAA,YAC/B,gBAAgB;AAAA,UAClB;AAAA,UACA,MAAM,KAAK,UAAU;AAAA,YACnB;AAAA,YACA;AAAA,YACA;AAAA,UACF,CAAC;AAAA,QACH,CAAC;AAED,cAAM,gBAAgB,SAAS,MAAM;AACrC,cAAM,kBAAkB,MAAM,cAAc,KAAK;AAEjD,YAAI,CAAC,SAAS,IAAI;AAChB,gBAAM,IAAI,MAAM,6BAA6B,SAAS,UAAU,EAAE;AAAA,QACpE;AAEA,cAAM,OAAO,MAAM,SAAS,KAAK;AACjC,cAAM,YAAY;AAElB,eAAO,UAAU;AAAA,MACnB,SAAS,OAAgB;AACvB,cAAM,UAAU,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK;AACrE,cAAM;AAAA,MACR;AAAA,IACF;AAAA,IACA,CAAC,UAAU,iBAAiB,GAAG,OAC7B,SACA,WACG;AACH,UAAI;AACJ,UAAI;AACJ,YAAM,YAAY,yBAAyB,OAAO;AAClD,aAAO,IAAI,2CAA2C,SAAS,EAAE;AACjE,YAAM,YAAY,OAAO;AAAA,QACvB,WAAW,SAAS,uCAAuC,MAAM,KAAK;AAAA,QACtE;AAAA,MACF;AAEA,UAAI,OAAO,WAAW,UAAU;AAC9B,mBAAW;AACX,qBAAa;AAAA,MACf,OAAO;AACL,mBAAW,OAAO;AAClB,qBACE,OAAO,UACP;AAAA,MACJ;AAEA,YAAM,WAAW;AAAA,QACf;AAAA,UACE,MAAM;AAAA,UACN,SAAS;AAAA,YACP,EAAE,MAAM,QAAQ,MAAM,WAAW;AAAA,YACjC,EAAE,MAAM,aAAa,WAAW,EAAE,KAAK,SAAS,EAAE;AAAA,UACpD;AAAA,QACF;AAAA,MACF;AAEA,YAAM,UAAU,WAAW,OAAO;AAClC,YAAM,SAAS,UAAU,OAAO;AAEhC,UAAI,CAAC,QAAQ;AACX,eAAO,MAAM,wBAAwB;AACrC,eAAO;AAAA,UACL,OAAO;AAAA,UACP,aAAa;AAAA,QACf;AAAA,MACF;AAEA,UAAI;AACF,cAAM,cAAmC;AAAA,UACvC,OAAO;AAAA,UACP;AAAA,UACA,YAAY;AAAA,QACd;AAEA,cAAM,WAAW,MAAM,MAAM,GAAG,OAAO,qBAAqB;AAAA,UAC1D,QAAQ;AAAA,UACR,SAAS;AAAA,YACP,gBAAgB;AAAA,YAChB,eAAe,UAAU,MAAM;AAAA,UACjC;AAAA,UACA,MAAM,KAAK,UAAU,WAAW;AAAA,QAClC,CAAC;AAED,cAAM,gBAAgB,SAAS,MAAM;AACrC,cAAM,kBAAkB,MAAM,cAAc,KAAK;AAEjD,YAAI,CAAC,SAAS,IAAI;AAChB,gBAAM,IAAI,MAAM,qBAAqB,SAAS,MAAM,EAAE;AAAA,QACxD;AAEA,cAAM,SAAkB,MAAM,SAAS,KAAK;AAc5C,cAAM,cAAc;AACpB,cAAM,UAAU,YAAY,UAAU,CAAC,GAAG,SAAS;AAEnD,gBAAQ,IAAI,0BAA0B,OAAO;AAE7C,YAAI,YAAY,OAAO;AACrB;AAAA,YACE;AAAA,YACA,UAAU;AAAA,YACV,OAAO,WAAW,WAAW,SAAS,OAAO,UAAU;AAAA,YACvD;AAAA,cACE,cAAc,YAAY,MAAM;AAAA,cAChC,kBAAkB,YAAY,MAAM;AAAA,cACpC,aAAa,YAAY,MAAM;AAAA,YACjC;AAAA,UACF;AAAA,QACF;AAEA,YAAI,CAAC,SAAS;AACZ,iBAAO;AAAA,YACL,OAAO;AAAA,YACP,aAAa;AAAA,UACf;AAAA,QACF;AAEA,gBAAQ,IAAI,oCAAoC,OAAO;AAGvD,cAAM,iBACJ,OAAO,WAAW,YAClB,OAAO,UACP,OAAO,WACL;AAGJ,YAAI,gBAAgB;AAClB,iBAAO;AAAA,QACT;AAGA,cAAM,aAAa,QAAQ,MAAM,2BAA2B;AAC5D,cAAM,QAAQ,aAAa,CAAC,GAAG,KAAK,KAAK;AACzC,cAAM,cAAc,QAAQ,QAAQ,6BAA6B,EAAE,EAAE,KAAK;AAE1E,cAAM,kBAAkB,EAAE,OAAO,YAAY;AAC7C,eAAO;AAAA,MACT,SAAS,OAAgB;AACvB,cAAM,UAAU,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK;AACrE,eAAO,MAAM,0BAA0B,OAAO,EAAE;AAChD,eAAO;AAAA,UACL,OAAO;AAAA,UACP,aAAa,UAAU,OAAO;AAAA,QAChC;AAAA,MACF;AAAA,IACF;AAAA,IACA,CAAC,UAAU,aAAa,GAAG,OAAO,SAAwB,gBAAwB;AAChF,aAAO,IAAI,eAAe,WAAW;AAErC,YAAM,YAAY;AAClB,aAAO,IAAI,uCAAuC,SAAS,EAAE;AAE7D,YAAM,UAAU,WAAW,OAAO;AAClC,YAAM,SAAS,UAAU,OAAO;AAEhC,UAAI,CAAC,QAAQ;AACX,cAAM,IAAI,MAAM,qDAAqD;AAAA,MACvE;AACA,UAAI,CAAC,eAAe,YAAY,WAAW,GAAG;AAC5C,cAAM,IAAI,MAAM,oDAAoD;AAAA,MACtE;AAEA,YAAM,WAAW,IAAI,SAAS;AAC9B,eAAS,OAAO,QAAQ,IAAI,KAAK,CAAC,WAAW,CAAC,GAAG,eAAe;AAChE,eAAS,OAAO,SAAS,WAAW;AAEpC,UAAI;AACF,cAAM,WAAW,MAAM,MAAM,GAAG,OAAO,yBAAyB;AAAA,UAC9D,QAAQ;AAAA,UACR,SAAS;AAAA,YACP,eAAe,UAAU,MAAM;AAAA,UACjC;AAAA,UACA,MAAM;AAAA,QACR,CAAC;AAED,cAAM,gBAAgB,SAAS,MAAM;AACrC,cAAM,kBAAkB,MAAM,cAAc,KAAK;AAEjD,eAAO,IAAI,YAAY,QAAQ;AAE/B,YAAI,CAAC,SAAS,IAAI;AAChB,gBAAM,IAAI,MAAM,+BAA+B,SAAS,UAAU,EAAE;AAAA,QACtE;AAEA,cAAM,OAAQ,MAAM,SAAS,KAAK;AAClC,cAAM,gBAAgB,KAAK;AAE3B,eAAO;AAAA,MACT,SAAS,OAAgB;AACvB,cAAM,UAAU,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK;AACrE,cAAM;AAAA,MACR;AAAA,IACF;AAAA,IACA,CAAC,UAAU,cAAc,GAAG,OAAO,SAAwB,SAAiB;AAC1E,YAAM,eAAe,WAAW,SAAS,oBAAoB,iBAAiB;AAC9E,aAAO,IAAI,wCAAwC,YAAY,EAAE;AACjE,UAAI;AACF,cAAM,eAAe,MAAM,kBAAkB,SAAS,IAAI;AAC1D,eAAO;AAAA,MACT,SAAS,OAAgB;AACvB,cAAM,UAAU,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK;AACrE,cAAM;AAAA,MACR;AAAA,IACF;AAAA,IACA,CAAC,UAAU,YAAY,GAAG,OAAO,SAAwB,WAAmC;AAC1F,aAAO,0BAA0B,SAAS,QAAQ,UAAU,cAAc,aAAa;AAAA,IACzF;AAAA,IACA,CAAC,UAAU,YAAY,GAAG,OAAO,SAAwB,WAAmC;AAC1F,aAAO,0BAA0B,SAAS,QAAQ,UAAU,cAAc,aAAa;AAAA,IACzF;AAAA,EACF;AAAA,EACA,OAAO;AAAA,IACL;AAAA,MACE,MAAM;AAAA,MACN,OAAO;AAAA,QACL;AAAA,UACE,MAAM;AAAA,UACN,IAAI,OAAO,YAA2B;AACpC,kBAAM,UAAU,WAAW,OAAO;AAClC,kBAAM,WAAW,MAAM,MAAM,GAAG,OAAO,WAAW;AAAA,cAChD,SAAS;AAAA,gBACP,eAAe,UAAU,UAAU,OAAO,CAAC;AAAA,cAC7C;AAAA,YACF,CAAC;AACD,kBAAM,OAAO,MAAM,SAAS,KAAK;AACjC,mBAAO,IAAI,qBAAsB,MAA+B,MAAM,UAAU,KAAK;AACrF,gBAAI,CAAC,SAAS,IAAI;AAChB,oBAAM,IAAI,MAAM,sCAAsC,SAAS,UAAU,EAAE;AAAA,YAC7E;AAAA,UACF;AAAA,QACF;AAAA,QACA;AAAA,UACE,MAAM;AAAA,UACN,IAAI,OAAO,YAA2B;AACpC,gBAAI;AACF,oBAAM,YAAY,MAAM,QAAQ,SAAS,UAAU,gBAAgB;AAAA,gBACjE,MAAM;AAAA,cACR,CAAC;AACD,qBAAO,IAAI,aAAa,SAAS;AAAA,YACnC,SAAS,OAAgB;AACvB,oBAAM,UAAU,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK;AACrE,qBAAO,MAAM,iCAAiC,OAAO,EAAE;AACvD,oBAAM;AAAA,YACR;AAAA,UACF;AAAA,QACF;AAAA,QACA;AAAA,UACE,MAAM;AAAA,UACN,IAAI,OAAO,YAA2B;AACpC,gBAAI;AACF,oBAAM,OAAO,MAAM,QAAQ,SAAS,UAAU,YAAY;AAAA,gBACxD,QAAQ;AAAA,cACV,CAAC;AACD,kBAAI,KAAK,WAAW,GAAG;AACrB,sBAAM,IAAI,MAAM,yBAAyB;AAAA,cAC3C;AACA,qBAAO,IAAI,mCAAmC,IAAI;AAAA,YACpD,SAAS,OAAgB;AACvB,oBAAM,UAAU,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK;AACrE,qBAAO,MAAM,6BAA6B,OAAO,EAAE;AACnD,oBAAM;AAAA,YACR;AAAA,UACF;AAAA,QACF;AAAA,QACA;AAAA,UACE,MAAM;AAAA,UACN,IAAI,OAAO,YAA2B;AACpC,gBAAI;AACF,oBAAM,OAAO,MAAM,QAAQ,SAAS,UAAU,YAAY;AAAA,gBACxD,QAAQ;AAAA,cACV,CAAC;AACD,kBAAI,KAAK,WAAW,GAAG;AACrB,sBAAM,IAAI,MAAM,yBAAyB;AAAA,cAC3C;AACA,qBAAO,IAAI,mCAAmC,IAAI;AAAA,YACpD,SAAS,OAAgB;AACvB,oBAAM,UAAU,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK;AACrE,qBAAO,MAAM,6BAA6B,OAAO,EAAE;AACnD,oBAAM;AAAA,YACR;AAAA,UACF;AAAA,QACF;AAAA,QACA;AAAA,UACE,MAAM;AAAA,UACN,IAAI,OAAO,YAA2B;AACpC,mBAAO,IAAI,8BAA8B;AACzC,gBAAI;AACF,oBAAM,QAAQ,MAAM,QAAQ,SAAS,UAAU,OAAO;AAAA,gBACpD,QAAQ;AAAA,gBACR,GAAG;AAAA,gBACH,MAAM;AAAA,cACR,CAAC;AACD,qBAAO,IAAI,yCAAyC,KAAK;AAAA,YAC3D,SAAS,OAAgB;AACvB,oBAAM,UAAU,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK;AACrE,qBAAO,MAAM,mCAAmC,OAAO,EAAE;AACzD,oBAAM;AAAA,YACR;AAAA,UACF;AAAA,QACF;AAAA,QACA;AAAA,UACE,MAAM;AAAA,UACN,IAAI,OAAO,YAA2B;AACpC,gBAAI;AACF,qBAAO,IAAI,+BAA+B;AAC1C,kBAAI;AACF,sBAAM,SAAS,MAAM,QAAQ;AAAA,kBAC3B,UAAU;AAAA,kBACV;AAAA,gBACF;AAEA,oBACE,UACA,OAAO,WAAW,YAClB,WAAW,UACX,iBAAiB,QACjB;AACA,yBAAO,IAAI,sBAAsB,MAAM;AAAA,gBACzC,OAAO;AACL,yBAAO,MAAM,4CAA4C,MAAM;AAAA,gBACjE;AAAA,cACF,SAAS,GAAY;AACnB,sBAAM,UAAU,aAAa,QAAQ,EAAE,UAAU,OAAO,CAAC;AACzD,uBAAO,MAAM,oCAAoC,OAAO,EAAE;AAAA,cAC5D;AAAA,YACF,SAAS,GAAY;AACnB,oBAAM,UAAU,aAAa,QAAQ,EAAE,UAAU,OAAO,CAAC;AACzD,qBAAO,MAAM,2CAA2C,OAAO,EAAE;AAAA,YACnE;AAAA,UACF;AAAA,QACF;AAAA,QACA;AAAA,UACE,MAAM;AAAA,UACN,IAAI,OAAO,YAA2B;AACpC,mBAAO,IAAI,2BAA2B;AACtC,gBAAI;AACF,oBAAM,WAAW,MAAM;AAAA,gBACrB;AAAA,cACF;AACA,oBAAM,cAAc,MAAM,SAAS,YAAY;AAC/C,oBAAM,gBAAgB,MAAM,QAAQ;AAAA,gBAClC,UAAU;AAAA,gBACV,OAAO,KAAK,IAAI,WAAW,WAAW,CAAC;AAAA,cACzC;AACA,qBAAO,IAAI,sCAAsC,aAAa;AAAA,YAChE,SAAS,OAAgB;AACvB,oBAAM,UAAU,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK;AACrE,qBAAO,MAAM,gCAAgC,OAAO,EAAE;AACtD,oBAAM;AAAA,YACR;AAAA,UACF;AAAA,QACF;AAAA,QACA;AAAA,UACE,MAAM;AAAA,UACN,IAAI,OAAO,YAA2B;AACpC,kBAAM,SAAS;AACf,kBAAM,SAAS,MAAM,QAAQ,SAAS,UAAU,uBAAuB,EAAE,OAAO,CAAC;AACjF,gBAAI,CAAC,MAAM,QAAQ,MAAM,KAAK,OAAO,WAAW,GAAG;AACjD,oBAAM,IAAI,MAAM,6DAA6D;AAAA,YAC/E;AACA,mBAAO,IAAI,qBAAqB,MAAM;AAAA,UACxC;AAAA,QACF;AAAA,QACA;AAAA,UACE,MAAM;AAAA,UACN,IAAI,OAAO,YAA2B;AACpC,kBAAM,SAAS;AACf,kBAAM,SAAS,MAAM,QAAQ,SAAS,UAAU,uBAAuB,EAAE,OAAO,CAAC;AACjF,kBAAM,cAAc,MAAM,QAAQ,SAAS,UAAU,uBAAuB,EAAE,OAAO,CAAC;AACtF,gBAAI,gBAAgB,QAAQ;AAC1B,oBAAM,IAAI;AAAA,gBACR,mDAAmD,MAAM,WAAW,WAAW;AAAA,cACjF;AAAA,YACF;AACA,mBAAO,IAAI,iBAAiB,WAAW;AAAA,UACzC;AAAA,QACF;AAAA,QACA;AAAA,UACE,MAAM;AAAA,UACN,IAAI,OAAO,YAA2B;AACpC,gBAAI;AACF,oBAAM,OAAO;AACb,oBAAM,WAAW,MAAM,kBAAkB,SAAS,IAAI;AACtD,kBAAI,CAAC,UAAU;AACb,sBAAM,IAAI,MAAM,2BAA2B;AAAA,cAC7C;AACA,qBAAO,IAAI,+BAA+B;AAAA,YAC5C,SAAS,OAAgB;AACvB,oBAAM,UAAU,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK;AACrE,qBAAO,MAAM,wCAAwC,OAAO,EAAE;AAC9D,oBAAM;AAAA,YACR;AAAA,UACF;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAAA,EACF;AACF;AACA,IAAO,gBAAQ;","names":[]}
1
+ {"version":3,"sources":["../src/index.ts"],"sourcesContent":["import { createOpenAI } from '@ai-sdk/openai';\nimport type {\n DetokenizeTextParams,\n GenerateTextParams,\n IAgentRuntime,\n ImageDescriptionParams,\n ModelTypeName,\n ObjectGenerationParams,\n Plugin,\n TextEmbeddingParams,\n TokenizeTextParams,\n} from '@elizaos/core';\nimport {\n EventType,\n logger,\n ModelType,\n safeReplacer,\n ServiceType,\n VECTOR_DIMS,\n} from '@elizaos/core';\nimport {\n generateObject,\n generateText,\n JSONParseError,\n type JSONValue,\n type LanguageModelUsage,\n} from 'ai';\nimport { encodingForModel, type TiktokenModel } from 'js-tiktoken';\nimport { fetch, FormData } from 'undici';\n\n/**\n * Retrieves a configuration setting from the runtime, falling back to environment variables or a default value if not found.\n *\n * @param key - The name of the setting to retrieve.\n * @param defaultValue - The value to return if the setting is not found in the runtime or environment.\n * @returns The resolved setting value, or {@link defaultValue} if not found.\n */\nfunction getSetting(\n runtime: IAgentRuntime,\n key: string,\n defaultValue?: string\n): string | undefined {\n return runtime.getSetting(key) ?? process.env[key] ?? defaultValue;\n}\n\n/**\n * Retrieves the OpenAI API base URL from runtime settings, environment variables, or defaults, using provider-aware resolution.\n *\n * @returns The resolved base URL for OpenAI API requests.\n */\nfunction getBaseURL(runtime: IAgentRuntime): string {\n const baseURL = getSetting(\n runtime,\n 'OPENAI_BASE_URL',\n 'https://api.openai.com/v1'\n ) as string;\n logger.debug(`[OpenAI] Default base URL: ${baseURL}`);\n return baseURL;\n}\n\n/**\n * Retrieves the OpenAI API base URL for embeddings, falling back to the general base URL.\n *\n * @returns The resolved base URL for OpenAI embedding requests.\n */\nfunction getEmbeddingBaseURL(runtime: IAgentRuntime): string {\n const embeddingURL = getSetting(runtime, 'OPENAI_EMBEDDING_URL');\n if (embeddingURL) {\n logger.debug(`[OpenAI] Using specific embedding base URL: ${embeddingURL}`);\n return embeddingURL;\n }\n logger.debug('[OpenAI] Falling back to general base URL for embeddings.');\n return getBaseURL(runtime);\n}\n\n/**\n * Helper function to get the API key for OpenAI\n *\n * @param runtime The runtime context\n * @returns The configured API key\n */\nfunction getApiKey(runtime: IAgentRuntime): string | undefined {\n return getSetting(runtime, 'OPENAI_API_KEY');\n}\n\n/**\n * Helper function to get the embedding API key for OpenAI, falling back to the general API key if not set.\n *\n * @param runtime The runtime context\n * @returns The configured API key\n */\nfunction getEmbeddingApiKey(runtime: IAgentRuntime): string | undefined {\n const embeddingApiKey = getSetting(runtime, 'OPENAI_EMBEDDING_API_KEY');\n if (embeddingApiKey) {\n logger.debug(\n `[OpenAI] Using specific embedding API key: ${embeddingApiKey}`\n );\n return embeddingApiKey;\n }\n logger.debug('[OpenAI] Falling back to general API key for embeddings.');\n return getApiKey(runtime);\n}\n\n/**\n * Helper function to get the small model name with fallbacks\n *\n * @param runtime The runtime context\n * @returns The configured small model name\n */\nfunction getSmallModel(runtime: IAgentRuntime): string {\n return (\n getSetting(runtime, 'OPENAI_SMALL_MODEL') ??\n (getSetting(runtime, 'SMALL_MODEL', 'gpt-4o-mini') as string)\n );\n}\n\n/**\n * Helper function to get the large model name with fallbacks\n *\n * @param runtime The runtime context\n * @returns The configured large model name\n */\nfunction getLargeModel(runtime: IAgentRuntime): string {\n return (\n getSetting(runtime, 'OPENAI_LARGE_MODEL') ??\n (getSetting(runtime, 'LARGE_MODEL', 'gpt-4o') as string)\n );\n}\n\n/**\n * Helper function to get the image description model name with fallbacks\n *\n * @param runtime The runtime context\n * @returns The configured image description model name\n */\nfunction getImageDescriptionModel(runtime: IAgentRuntime): string {\n return (\n getSetting(runtime, 'OPENAI_IMAGE_DESCRIPTION_MODEL', 'gpt-4o-mini') ??\n 'gpt-4o-mini'\n );\n}\n\n/**\n * Helper function to get experimental telemetry setting\n *\n * @param runtime The runtime context\n * @returns Whether experimental telemetry is enabled\n */\nfunction getExperimentalTelemetry(runtime: IAgentRuntime): boolean {\n const setting = getSetting(runtime, 'OPENAI_EXPERIMENTAL_TELEMETRY', 'false');\n // Convert to string and check for truthy values\n const normalizedSetting = String(setting).toLowerCase();\n const result = normalizedSetting === 'true';\n logger.debug(\n `[OpenAI] Experimental telemetry in function: \"${setting}\" (type: ${typeof setting}, normalized: \"${normalizedSetting}\", result: ${result})`\n );\n return result;\n}\n\n/**\n * Create an OpenAI client with proper configuration\n *\n * @param runtime The runtime context\n * @returns Configured OpenAI client\n */\nfunction createOpenAIClient(runtime: IAgentRuntime) {\n return createOpenAI({\n apiKey: getApiKey(runtime),\n baseURL: getBaseURL(runtime),\n });\n}\n\n/**\n * Asynchronously tokenizes the given text based on the specified model and prompt.\n *\n * @param {ModelTypeName} model - The type of model to use for tokenization.\n * @param {string} prompt - The text prompt to tokenize.\n * @returns {number[]} - An array of tokens representing the encoded prompt.\n */\nasync function tokenizeText(model: ModelTypeName, prompt: string) {\n const modelName =\n model === ModelType.TEXT_SMALL\n ? (process.env.OPENAI_SMALL_MODEL ??\n process.env.SMALL_MODEL ??\n 'gpt-4o-mini')\n : (process.env.LARGE_MODEL ?? 'gpt-4o');\n const encoding = encodingForModel(modelName as TiktokenModel);\n const tokens = encoding.encode(prompt);\n return tokens;\n}\n\n/**\n * Detokenize a sequence of tokens back into text using the specified model.\n *\n * @param {ModelTypeName} model - The type of model to use for detokenization.\n * @param {number[]} tokens - The sequence of tokens to detokenize.\n * @returns {string} The detokenized text.\n */\nasync function detokenizeText(model: ModelTypeName, tokens: number[]) {\n const modelName =\n model === ModelType.TEXT_SMALL\n ? (process.env.OPENAI_SMALL_MODEL ??\n process.env.SMALL_MODEL ??\n 'gpt-4o-mini')\n : (process.env.OPENAI_LARGE_MODEL ?? process.env.LARGE_MODEL ?? 'gpt-4o');\n const encoding = encodingForModel(modelName as TiktokenModel);\n return encoding.decode(tokens);\n}\n\n/**\n * Helper function to generate objects using specified model type\n */\nasync function generateObjectByModelType(\n runtime: IAgentRuntime,\n params: ObjectGenerationParams,\n modelType: string,\n getModelFn: (runtime: IAgentRuntime) => string\n): Promise<JSONValue> {\n const openai = createOpenAIClient(runtime);\n const modelName = getModelFn(runtime);\n logger.log(`[OpenAI] Using ${modelType} model: ${modelName}`);\n const temperature = params.temperature ?? 0;\n const schemaPresent = !!params.schema;\n\n if (schemaPresent) {\n logger.info(\n `Using ${modelType} without schema validation (schema provided but output=no-schema)`\n );\n }\n\n try {\n const { object, usage } = await generateObject({\n model: openai.languageModel(modelName),\n output: 'no-schema',\n prompt: params.prompt,\n temperature: temperature,\n experimental_repairText: getJsonRepairFunction(),\n });\n\n if (usage) {\n emitModelUsageEvent(\n runtime,\n modelType as ModelTypeName,\n params.prompt,\n usage\n );\n }\n return object;\n } catch (error: unknown) {\n if (error instanceof JSONParseError) {\n logger.error(`[generateObject] Failed to parse JSON: ${error.message}`);\n\n const repairFunction = getJsonRepairFunction();\n const repairedJsonString = await repairFunction({\n text: error.text,\n error,\n });\n\n if (repairedJsonString) {\n try {\n const repairedObject = JSON.parse(repairedJsonString);\n logger.info('[generateObject] Successfully repaired JSON.');\n return repairedObject;\n } catch (repairParseError: unknown) {\n const message =\n repairParseError instanceof Error\n ? repairParseError.message\n : String(repairParseError);\n logger.error(\n `[generateObject] Failed to parse repaired JSON: ${message}`\n );\n throw repairParseError;\n }\n } else {\n logger.error('[generateObject] JSON repair failed.');\n throw error;\n }\n } else {\n const message = error instanceof Error ? error.message : String(error);\n logger.error(`[generateObject] Unknown error: ${message}`);\n throw error;\n }\n }\n}\n\n/**\n * Returns a function to repair JSON text\n */\nfunction getJsonRepairFunction(): (params: {\n text: string;\n error: unknown;\n}) => Promise<string | null> {\n return async ({ text, error }: { text: string; error: unknown }) => {\n try {\n if (error instanceof JSONParseError) {\n const cleanedText = text.replace(/```json\\n|\\n```|```/g, '');\n JSON.parse(cleanedText);\n return cleanedText;\n }\n return null;\n } catch (jsonError: unknown) {\n const message =\n jsonError instanceof Error ? jsonError.message : String(jsonError);\n logger.warn(`Failed to repair JSON text: ${message}`);\n return null;\n }\n };\n}\n\n/**\n * Emits a model usage event\n * @param runtime The runtime context\n * @param type The model type\n * @param prompt The prompt used\n * @param usage The LLM usage data\n */\nfunction emitModelUsageEvent(\n runtime: IAgentRuntime,\n type: ModelTypeName,\n prompt: string,\n usage: LanguageModelUsage\n) {\n runtime.emitEvent(EventType.MODEL_USED, {\n provider: 'openai',\n type,\n prompt,\n tokens: {\n prompt: usage.promptTokens,\n completion: usage.completionTokens,\n total: usage.totalTokens,\n },\n });\n}\n\n/**\n * function for text-to-speech\n */\nasync function fetchTextToSpeech(runtime: IAgentRuntime, text: string) {\n const apiKey = getApiKey(runtime);\n const model = getSetting(runtime, 'OPENAI_TTS_MODEL', 'gpt-4o-mini-tts');\n const voice = getSetting(runtime, 'OPENAI_TTS_VOICE', 'nova');\n const instructions = getSetting(runtime, 'OPENAI_TTS_INSTRUCTIONS', '');\n const baseURL = getBaseURL(runtime);\n\n try {\n const res = await fetch(`${baseURL}/audio/speech`, {\n method: 'POST',\n headers: {\n Authorization: `Bearer ${apiKey}`,\n 'Content-Type': 'application/json',\n },\n body: JSON.stringify({\n model,\n voice,\n input: text,\n ...(instructions && { instructions }),\n }),\n });\n\n if (!res.ok) {\n const err = await res.text();\n throw new Error(`OpenAI TTS error ${res.status}: ${err}`);\n }\n\n return res.body;\n } catch (err: unknown) {\n const message = err instanceof Error ? err.message : String(err);\n throw new Error(`Failed to fetch speech from OpenAI TTS: ${message}`);\n }\n}\n\n/**\n * Defines the OpenAI plugin with its name, description, and configuration options.\n * @type {Plugin}\n */\nexport const openaiPlugin: Plugin = {\n name: 'openai',\n description: 'OpenAI plugin',\n config: {\n OPENAI_API_KEY: process.env.OPENAI_API_KEY,\n OPENAI_BASE_URL: process.env.OPENAI_BASE_URL,\n OPENAI_SMALL_MODEL: process.env.OPENAI_SMALL_MODEL,\n OPENAI_LARGE_MODEL: process.env.OPENAI_LARGE_MODEL,\n SMALL_MODEL: process.env.SMALL_MODEL,\n LARGE_MODEL: process.env.LARGE_MODEL,\n OPENAI_EMBEDDING_MODEL: process.env.OPENAI_EMBEDDING_MODEL,\n OPENAI_EMBEDDING_API_KEY: process.env.OPENAI_EMBEDDING_API_KEY,\n OPENAI_EMBEDDING_URL: process.env.OPENAI_EMBEDDING_URL,\n OPENAI_EMBEDDING_DIMENSIONS: process.env.OPENAI_EMBEDDING_DIMENSIONS,\n OPENAI_IMAGE_DESCRIPTION_MODEL: process.env.OPENAI_IMAGE_DESCRIPTION_MODEL,\n OPENAI_IMAGE_DESCRIPTION_MAX_TOKENS:\n process.env.OPENAI_IMAGE_DESCRIPTION_MAX_TOKENS,\n OPENAI_EXPERIMENTAL_TELEMETRY: process.env.OPENAI_EXPERIMENTAL_TELEMETRY,\n },\n async init(_config, runtime) {\n // do check in the background\n new Promise<void>(async (resolve) => {\n resolve();\n try {\n if (!getApiKey(runtime)) {\n logger.warn(\n 'OPENAI_API_KEY is not set in environment - OpenAI functionality will be limited'\n );\n return;\n }\n try {\n const baseURL = getBaseURL(runtime);\n const response = await fetch(`${baseURL}/models`, {\n headers: { Authorization: `Bearer ${getApiKey(runtime)}` },\n });\n if (!response.ok) {\n logger.warn(\n `OpenAI API key validation failed: ${response.statusText}`\n );\n logger.warn(\n 'OpenAI functionality will be limited until a valid API key is provided'\n );\n } else {\n logger.log('OpenAI API key validated successfully');\n }\n } catch (fetchError: unknown) {\n const message =\n fetchError instanceof Error\n ? fetchError.message\n : String(fetchError);\n logger.warn(`Error validating OpenAI API key: ${message}`);\n logger.warn(\n 'OpenAI functionality will be limited until a valid API key is provided'\n );\n }\n } catch (error: unknown) {\n const message =\n (error as { errors?: Array<{ message: string }> })?.errors\n ?.map((e) => e.message)\n .join(', ') ||\n (error instanceof Error ? error.message : String(error));\n logger.warn(\n `OpenAI plugin configuration issue: ${message} - You need to configure the OPENAI_API_KEY in your environment variables`\n );\n }\n });\n },\n\n models: {\n [ModelType.TEXT_EMBEDDING]: async (\n runtime: IAgentRuntime,\n params: TextEmbeddingParams | string | null\n ): Promise<number[]> => {\n const embeddingModelName = getSetting(\n runtime,\n 'OPENAI_EMBEDDING_MODEL',\n 'text-embedding-3-small'\n );\n const embeddingDimension = Number.parseInt(\n getSetting(runtime, 'OPENAI_EMBEDDING_DIMENSIONS', '1536') || '1536',\n 10\n ) as (typeof VECTOR_DIMS)[keyof typeof VECTOR_DIMS];\n\n if (!Object.values(VECTOR_DIMS).includes(embeddingDimension)) {\n const errorMsg = `Invalid embedding dimension: ${embeddingDimension}. Must be one of: ${Object.values(VECTOR_DIMS).join(', ')}`;\n logger.error(errorMsg);\n throw new Error(errorMsg);\n }\n if (params === null) {\n logger.debug('Creating test embedding for initialization');\n const testVector = Array(embeddingDimension).fill(0);\n testVector[0] = 0.1;\n return testVector;\n }\n let text: string;\n if (typeof params === 'string') {\n text = params;\n } else if (typeof params === 'object' && params.text) {\n text = params.text;\n } else {\n logger.warn('Invalid input format for embedding');\n const fallbackVector = Array(embeddingDimension).fill(0);\n fallbackVector[0] = 0.2;\n return fallbackVector;\n }\n if (!text.trim()) {\n logger.warn('Empty text for embedding');\n const emptyVector = Array(embeddingDimension).fill(0);\n emptyVector[0] = 0.3;\n return emptyVector;\n }\n\n const embeddingBaseURL = getEmbeddingBaseURL(runtime);\n const apiKey = getEmbeddingApiKey(runtime);\n\n if (!apiKey) {\n throw new Error('OpenAI API key not configured');\n }\n\n try {\n const response = await fetch(`${embeddingBaseURL}/embeddings`, {\n method: 'POST',\n headers: {\n Authorization: `Bearer ${apiKey}`,\n 'Content-Type': 'application/json',\n },\n body: JSON.stringify({\n model: embeddingModelName,\n input: text,\n }),\n });\n\n const responseClone = response.clone();\n const rawResponseBody = await responseClone.text();\n\n if (!response.ok) {\n logger.error(\n `OpenAI API error: ${response.status} - ${response.statusText}`\n );\n const errorVector = Array(embeddingDimension).fill(0);\n errorVector[0] = 0.4;\n return errorVector;\n }\n\n const data = (await response.json()) as {\n data: [{ embedding: number[] }];\n usage?: { prompt_tokens: number; total_tokens: number };\n };\n\n if (!data?.data?.[0]?.embedding) {\n logger.error('API returned invalid structure');\n const errorVector = Array(embeddingDimension).fill(0);\n errorVector[0] = 0.5;\n return errorVector;\n }\n\n const embedding = data.data[0].embedding;\n\n if (data.usage) {\n const usage = {\n promptTokens: data.usage.prompt_tokens,\n completionTokens: 0,\n totalTokens: data.usage.total_tokens,\n };\n\n emitModelUsageEvent(runtime, ModelType.TEXT_EMBEDDING, text, usage);\n }\n\n logger.log(`Got valid embedding with length ${embedding.length}`);\n return embedding;\n } catch (error: unknown) {\n const message = error instanceof Error ? error.message : String(error);\n logger.error(`Error generating embedding: ${message}`);\n const errorVector = Array(embeddingDimension).fill(0);\n errorVector[0] = 0.6;\n return errorVector;\n }\n },\n [ModelType.TEXT_TOKENIZER_ENCODE]: async (\n _runtime,\n { prompt, modelType = ModelType.TEXT_LARGE }: TokenizeTextParams\n ) => {\n return await tokenizeText(modelType ?? ModelType.TEXT_LARGE, prompt);\n },\n [ModelType.TEXT_TOKENIZER_DECODE]: async (\n _runtime,\n { tokens, modelType = ModelType.TEXT_LARGE }: DetokenizeTextParams\n ) => {\n return await detokenizeText(modelType ?? ModelType.TEXT_LARGE, tokens);\n },\n [ModelType.TEXT_SMALL]: async (\n runtime: IAgentRuntime,\n { prompt, stopSequences = [] }: GenerateTextParams\n ) => {\n const temperature = 0.7;\n const frequency_penalty = 0.7;\n const presence_penalty = 0.7;\n const max_response_length = 8192;\n\n const openai = createOpenAIClient(runtime);\n const modelName = getSmallModel(runtime);\n const experimentalTelemetry = getExperimentalTelemetry(runtime);\n\n logger.log(`[OpenAI] Using TEXT_SMALL model: ${modelName}`);\n logger.log(prompt);\n\n const { text: openaiResponse, usage } = await generateText({\n model: openai.languageModel(modelName),\n prompt: prompt,\n system: runtime.character.system ?? undefined,\n temperature: temperature,\n maxTokens: max_response_length,\n frequencyPenalty: frequency_penalty,\n presencePenalty: presence_penalty,\n stopSequences: stopSequences,\n experimental_telemetry: {\n isEnabled: experimentalTelemetry,\n },\n });\n\n if (usage) {\n emitModelUsageEvent(runtime, ModelType.TEXT_SMALL, prompt, usage);\n }\n\n return openaiResponse;\n },\n [ModelType.TEXT_LARGE]: async (\n runtime: IAgentRuntime,\n {\n prompt,\n stopSequences = [],\n maxTokens = 8192,\n temperature = 0.7,\n frequencyPenalty = 0.7,\n presencePenalty = 0.7,\n }: GenerateTextParams\n ) => {\n const openai = createOpenAIClient(runtime);\n const modelName = getLargeModel(runtime);\n const experimentalTelemetry = getExperimentalTelemetry(runtime);\n\n logger.log(`[OpenAI] Using TEXT_LARGE model: ${modelName}`);\n logger.log(prompt);\n\n const { text: openaiResponse, usage } = await generateText({\n model: openai.languageModel(modelName),\n prompt: prompt,\n system: runtime.character.system ?? undefined,\n temperature: temperature,\n maxTokens: maxTokens,\n frequencyPenalty: frequencyPenalty,\n presencePenalty: presencePenalty,\n stopSequences: stopSequences,\n experimental_telemetry: {\n isEnabled: experimentalTelemetry,\n },\n });\n\n if (usage) {\n emitModelUsageEvent(runtime, ModelType.TEXT_LARGE, prompt, usage);\n }\n\n return openaiResponse;\n },\n [ModelType.IMAGE]: async (\n runtime: IAgentRuntime,\n params: {\n prompt: string;\n n?: number;\n size?: string;\n }\n ) => {\n const n = params.n || 1;\n const size = params.size || '1024x1024';\n const prompt = params.prompt;\n const modelName = 'dall-e-3'; // Default DALL-E model\n logger.log(`[OpenAI] Using IMAGE model: ${modelName}`);\n\n const baseURL = getBaseURL(runtime);\n const apiKey = getApiKey(runtime);\n\n if (!apiKey) {\n throw new Error('OpenAI API key not configured');\n }\n\n try {\n const response = await fetch(`${baseURL}/images/generations`, {\n method: 'POST',\n headers: {\n Authorization: `Bearer ${apiKey}`,\n 'Content-Type': 'application/json',\n },\n body: JSON.stringify({\n prompt: prompt,\n n: n,\n size: size,\n }),\n });\n\n const responseClone = response.clone();\n const rawResponseBody = await responseClone.text();\n\n if (!response.ok) {\n throw new Error(`Failed to generate image: ${response.statusText}`);\n }\n\n const data = await response.json();\n const typedData = data as { data: { url: string }[] };\n\n return typedData.data;\n } catch (error: unknown) {\n const message = error instanceof Error ? error.message : String(error);\n throw error;\n }\n },\n [ModelType.IMAGE_DESCRIPTION]: async (\n runtime: IAgentRuntime,\n params: ImageDescriptionParams | string\n ) => {\n let imageUrl: string;\n let promptText: string | undefined;\n const modelName = getImageDescriptionModel(runtime);\n logger.log(`[OpenAI] Using IMAGE_DESCRIPTION model: ${modelName}`);\n const maxTokens = Number.parseInt(\n getSetting(runtime, 'OPENAI_IMAGE_DESCRIPTION_MAX_TOKENS', '8192') ||\n '8192',\n 10\n );\n\n if (typeof params === 'string') {\n imageUrl = params;\n promptText =\n 'Please analyze this image and provide a title and detailed description.';\n } else {\n imageUrl = params.imageUrl;\n promptText =\n params.prompt ||\n 'Please analyze this image and provide a title and detailed description.';\n }\n\n const messages = [\n {\n role: 'user',\n content: [\n { type: 'text', text: promptText },\n { type: 'image_url', image_url: { url: imageUrl } },\n ],\n },\n ];\n\n const baseURL = getBaseURL(runtime);\n const apiKey = getApiKey(runtime);\n\n if (!apiKey) {\n logger.error('OpenAI API key not set');\n return {\n title: 'Failed to analyze image',\n description: 'API key not configured',\n };\n }\n\n try {\n const requestBody: Record<string, any> = {\n model: modelName,\n messages: messages,\n max_tokens: maxTokens,\n };\n\n const response = await fetch(`${baseURL}/chat/completions`, {\n method: 'POST',\n headers: {\n 'Content-Type': 'application/json',\n Authorization: `Bearer ${apiKey}`,\n },\n body: JSON.stringify(requestBody),\n });\n\n const responseClone = response.clone();\n const rawResponseBody = await responseClone.text();\n\n if (!response.ok) {\n throw new Error(`OpenAI API error: ${response.status}`);\n }\n\n const result: unknown = await response.json();\n\n type OpenAIResponseType = {\n choices?: Array<{\n message?: { content?: string };\n finish_reason?: string;\n }>;\n usage?: {\n prompt_tokens: number;\n completion_tokens: number;\n total_tokens: number;\n };\n };\n\n const typedResult = result as OpenAIResponseType;\n const content = typedResult.choices?.[0]?.message?.content;\n\n if (typedResult.usage) {\n emitModelUsageEvent(\n runtime,\n ModelType.IMAGE_DESCRIPTION,\n typeof params === 'string' ? params : params.prompt || '',\n {\n promptTokens: typedResult.usage.prompt_tokens,\n completionTokens: typedResult.usage.completion_tokens,\n totalTokens: typedResult.usage.total_tokens,\n }\n );\n }\n\n if (!content) {\n return {\n title: 'Failed to analyze image',\n description: 'No response from API',\n };\n }\n\n // Check if a custom prompt was provided (not the default prompt)\n const isCustomPrompt =\n typeof params === 'object' &&\n params.prompt &&\n params.prompt !==\n 'Please analyze this image and provide a title and detailed description.';\n\n // If custom prompt is used, return the raw content\n if (isCustomPrompt) {\n return content;\n }\n\n // Otherwise, maintain backwards compatibility with object return\n const titleMatch = content.match(/title[:\\s]+(.+?)(?:\\n|$)/i);\n const title = titleMatch?.[1]?.trim() || 'Image Analysis';\n const description = content\n .replace(/title[:\\s]+(.+?)(?:\\n|$)/i, '')\n .trim();\n\n const processedResult = { title, description };\n return processedResult;\n } catch (error: unknown) {\n const message = error instanceof Error ? error.message : String(error);\n logger.error(`Error analyzing image: ${message}`);\n return {\n title: 'Failed to analyze image',\n description: `Error: ${message}`,\n };\n }\n },\n [ModelType.TRANSCRIPTION]: async (\n runtime: IAgentRuntime,\n audioBuffer: Buffer\n ) => {\n logger.log('audioBuffer', audioBuffer);\n\n const modelName = 'whisper-1';\n logger.log(`[OpenAI] Using TRANSCRIPTION model: ${modelName}`);\n\n const baseURL = getBaseURL(runtime);\n const apiKey = getApiKey(runtime);\n\n if (!apiKey) {\n throw new Error('OpenAI API key not configured - Cannot make request');\n }\n if (!audioBuffer || audioBuffer.length === 0) {\n throw new Error('Audio buffer is empty or invalid for transcription');\n }\n\n const formData = new FormData();\n formData.append('file', new Blob([audioBuffer]), 'recording.mp3');\n formData.append('model', 'whisper-1');\n\n try {\n const response = await fetch(`${baseURL}/audio/transcriptions`, {\n method: 'POST',\n headers: {\n Authorization: `Bearer ${apiKey}`,\n },\n body: formData,\n });\n\n const responseClone = response.clone();\n const rawResponseBody = await responseClone.text();\n\n logger.log('response', response);\n\n if (!response.ok) {\n throw new Error(`Failed to transcribe audio: ${response.statusText}`);\n }\n\n const data = (await response.json()) as { text: string };\n const processedText = data.text;\n\n return processedText;\n } catch (error: unknown) {\n const message = error instanceof Error ? error.message : String(error);\n throw error;\n }\n },\n [ModelType.TEXT_TO_SPEECH]: async (\n runtime: IAgentRuntime,\n text: string\n ) => {\n const ttsModelName = getSetting(\n runtime,\n 'OPENAI_TTS_MODEL',\n 'gpt-4o-mini-tts'\n );\n logger.log(`[OpenAI] Using TEXT_TO_SPEECH model: ${ttsModelName}`);\n try {\n const speechStream = await fetchTextToSpeech(runtime, text);\n return speechStream;\n } catch (error: unknown) {\n const message = error instanceof Error ? error.message : String(error);\n throw error;\n }\n },\n [ModelType.OBJECT_SMALL]: async (\n runtime: IAgentRuntime,\n params: ObjectGenerationParams\n ) => {\n return generateObjectByModelType(\n runtime,\n params,\n ModelType.OBJECT_SMALL,\n getSmallModel\n );\n },\n [ModelType.OBJECT_LARGE]: async (\n runtime: IAgentRuntime,\n params: ObjectGenerationParams\n ) => {\n return generateObjectByModelType(\n runtime,\n params,\n ModelType.OBJECT_LARGE,\n getLargeModel\n );\n },\n },\n tests: [\n {\n name: 'openai_plugin_tests',\n tests: [\n {\n name: 'openai_test_url_and_api_key_validation',\n fn: async (runtime: IAgentRuntime) => {\n const baseURL = getBaseURL(runtime);\n const response = await fetch(`${baseURL}/models`, {\n headers: {\n Authorization: `Bearer ${getApiKey(runtime)}`,\n },\n });\n const data = await response.json();\n logger.log(\n 'Models Available:',\n (data as { data?: unknown[] })?.data?.length ?? 'N/A'\n );\n if (!response.ok) {\n throw new Error(\n `Failed to validate OpenAI API key: ${response.statusText}`\n );\n }\n },\n },\n {\n name: 'openai_test_text_embedding',\n fn: async (runtime: IAgentRuntime) => {\n try {\n const embedding = await runtime.useModel(\n ModelType.TEXT_EMBEDDING,\n {\n text: 'Hello, world!',\n }\n );\n logger.log('embedding', embedding);\n } catch (error: unknown) {\n const message =\n error instanceof Error ? error.message : String(error);\n logger.error(`Error in test_text_embedding: ${message}`);\n throw error;\n }\n },\n },\n {\n name: 'openai_test_text_large',\n fn: async (runtime: IAgentRuntime) => {\n try {\n const text = await runtime.useModel(ModelType.TEXT_LARGE, {\n prompt: 'What is the nature of reality in 10 words?',\n });\n if (text.length === 0) {\n throw new Error('Failed to generate text');\n }\n logger.log('generated with test_text_large:', text);\n } catch (error: unknown) {\n const message =\n error instanceof Error ? error.message : String(error);\n logger.error(`Error in test_text_large: ${message}`);\n throw error;\n }\n },\n },\n {\n name: 'openai_test_text_small',\n fn: async (runtime: IAgentRuntime) => {\n try {\n const text = await runtime.useModel(ModelType.TEXT_SMALL, {\n prompt: 'What is the nature of reality in 10 words?',\n });\n if (text.length === 0) {\n throw new Error('Failed to generate text');\n }\n logger.log('generated with test_text_small:', text);\n } catch (error: unknown) {\n const message =\n error instanceof Error ? error.message : String(error);\n logger.error(`Error in test_text_small: ${message}`);\n throw error;\n }\n },\n },\n {\n name: 'openai_test_image_generation',\n fn: async (runtime: IAgentRuntime) => {\n logger.log('openai_test_image_generation');\n try {\n const image = await runtime.useModel(ModelType.IMAGE, {\n prompt: 'A beautiful sunset over a calm ocean',\n n: 1,\n size: '1024x1024',\n });\n logger.log('generated with test_image_generation:', image);\n } catch (error: unknown) {\n const message =\n error instanceof Error ? error.message : String(error);\n logger.error(`Error in test_image_generation: ${message}`);\n throw error;\n }\n },\n },\n {\n name: 'image-description',\n fn: async (runtime: IAgentRuntime) => {\n try {\n logger.log('openai_test_image_description');\n try {\n const result = await runtime.useModel(\n ModelType.IMAGE_DESCRIPTION,\n 'https://upload.wikimedia.org/wikipedia/commons/thumb/1/1c/Vitalik_Buterin_TechCrunch_London_2015_%28cropped%29.jpg/537px-Vitalik_Buterin_TechCrunch_London_2015_%28cropped%29.jpg'\n );\n\n if (\n result &&\n typeof result === 'object' &&\n 'title' in result &&\n 'description' in result\n ) {\n logger.log('Image description:', result);\n } else {\n logger.error(\n 'Invalid image description result format:',\n result\n );\n }\n } catch (e: unknown) {\n const message = e instanceof Error ? e.message : String(e);\n logger.error(`Error in image description test: ${message}`);\n }\n } catch (e: unknown) {\n const message = e instanceof Error ? e.message : String(e);\n logger.error(\n `Error in openai_test_image_description: ${message}`\n );\n }\n },\n },\n {\n name: 'openai_test_transcription',\n fn: async (runtime: IAgentRuntime) => {\n logger.log('openai_test_transcription');\n try {\n const response = await fetch(\n 'https://upload.wikimedia.org/wikipedia/en/4/40/Chris_Benoit_Voice_Message.ogg'\n );\n const arrayBuffer = await response.arrayBuffer();\n const transcription = await runtime.useModel(\n ModelType.TRANSCRIPTION,\n Buffer.from(new Uint8Array(arrayBuffer))\n );\n logger.log('generated with test_transcription:', transcription);\n } catch (error: unknown) {\n const message =\n error instanceof Error ? error.message : String(error);\n logger.error(`Error in test_transcription: ${message}`);\n throw error;\n }\n },\n },\n {\n name: 'openai_test_text_tokenizer_encode',\n fn: async (runtime: IAgentRuntime) => {\n const prompt = 'Hello tokenizer encode!';\n const tokens = await runtime.useModel(\n ModelType.TEXT_TOKENIZER_ENCODE,\n { prompt }\n );\n if (!Array.isArray(tokens) || tokens.length === 0) {\n throw new Error(\n 'Failed to tokenize text: expected non-empty array of tokens'\n );\n }\n logger.log('Tokenized output:', tokens);\n },\n },\n {\n name: 'openai_test_text_tokenizer_decode',\n fn: async (runtime: IAgentRuntime) => {\n const prompt = 'Hello tokenizer decode!';\n const tokens = await runtime.useModel(\n ModelType.TEXT_TOKENIZER_ENCODE,\n { prompt }\n );\n const decodedText = await runtime.useModel(\n ModelType.TEXT_TOKENIZER_DECODE,\n { tokens }\n );\n if (decodedText !== prompt) {\n throw new Error(\n `Decoded text does not match original. Expected \"${prompt}\", got \"${decodedText}\"`\n );\n }\n logger.log('Decoded text:', decodedText);\n },\n },\n {\n name: 'openai_test_text_to_speech',\n fn: async (runtime: IAgentRuntime) => {\n try {\n const text = 'Hello, this is a test for text-to-speech.';\n const response = await fetchTextToSpeech(runtime, text);\n if (!response) {\n throw new Error('Failed to generate speech');\n }\n logger.log('Generated speech successfully');\n } catch (error: unknown) {\n const message =\n error instanceof Error ? error.message : String(error);\n logger.error(`Error in openai_test_text_to_speech: ${message}`);\n throw error;\n }\n },\n },\n ],\n },\n ],\n};\nexport default openaiPlugin;\n"],"mappings":";AAAA,SAAS,oBAAoB;AAY7B;AAAA,EACE;AAAA,EACA;AAAA,EACA;AAAA,EAGA;AAAA,OACK;AACP;AAAA,EACE;AAAA,EACA;AAAA,EACA;AAAA,OAGK;AACP,SAAS,wBAA4C;AACrD,SAAS,OAAO,gBAAgB;AAShC,SAAS,WACP,SACA,KACA,cACoB;AACpB,SAAO,QAAQ,WAAW,GAAG,KAAK,QAAQ,IAAI,GAAG,KAAK;AACxD;AAOA,SAAS,WAAW,SAAgC;AAClD,QAAM,UAAU;AAAA,IACd;AAAA,IACA;AAAA,IACA;AAAA,EACF;AACA,SAAO,MAAM,8BAA8B,OAAO,EAAE;AACpD,SAAO;AACT;AAOA,SAAS,oBAAoB,SAAgC;AAC3D,QAAM,eAAe,WAAW,SAAS,sBAAsB;AAC/D,MAAI,cAAc;AAChB,WAAO,MAAM,+CAA+C,YAAY,EAAE;AAC1E,WAAO;AAAA,EACT;AACA,SAAO,MAAM,2DAA2D;AACxE,SAAO,WAAW,OAAO;AAC3B;AAQA,SAAS,UAAU,SAA4C;AAC7D,SAAO,WAAW,SAAS,gBAAgB;AAC7C;AAQA,SAAS,mBAAmB,SAA4C;AACtE,QAAM,kBAAkB,WAAW,SAAS,0BAA0B;AACtE,MAAI,iBAAiB;AACnB,WAAO;AAAA,MACL,8CAA8C,eAAe;AAAA,IAC/D;AACA,WAAO;AAAA,EACT;AACA,SAAO,MAAM,0DAA0D;AACvE,SAAO,UAAU,OAAO;AAC1B;AAQA,SAAS,cAAc,SAAgC;AACrD,SACE,WAAW,SAAS,oBAAoB,KACvC,WAAW,SAAS,eAAe,aAAa;AAErD;AAQA,SAAS,cAAc,SAAgC;AACrD,SACE,WAAW,SAAS,oBAAoB,KACvC,WAAW,SAAS,eAAe,QAAQ;AAEhD;AAQA,SAAS,yBAAyB,SAAgC;AAChE,SACE,WAAW,SAAS,kCAAkC,aAAa,KACnE;AAEJ;AAQA,SAAS,yBAAyB,SAAiC;AACjE,QAAM,UAAU,WAAW,SAAS,iCAAiC,OAAO;AAE5E,QAAM,oBAAoB,OAAO,OAAO,EAAE,YAAY;AACtD,QAAM,SAAS,sBAAsB;AACrC,SAAO;AAAA,IACL,iDAAiD,OAAO,YAAY,OAAO,OAAO,kBAAkB,iBAAiB,cAAc,MAAM;AAAA,EAC3I;AACA,SAAO;AACT;AAQA,SAAS,mBAAmB,SAAwB;AAClD,SAAO,aAAa;AAAA,IAClB,QAAQ,UAAU,OAAO;AAAA,IACzB,SAAS,WAAW,OAAO;AAAA,EAC7B,CAAC;AACH;AASA,eAAe,aAAa,OAAsB,QAAgB;AAChE,QAAM,YACJ,UAAU,UAAU,aACf,QAAQ,IAAI,sBACb,QAAQ,IAAI,eACZ,gBACC,QAAQ,IAAI,eAAe;AAClC,QAAM,WAAW,iBAAiB,SAA0B;AAC5D,QAAM,SAAS,SAAS,OAAO,MAAM;AACrC,SAAO;AACT;AASA,eAAe,eAAe,OAAsB,QAAkB;AACpE,QAAM,YACJ,UAAU,UAAU,aACf,QAAQ,IAAI,sBACb,QAAQ,IAAI,eACZ,gBACC,QAAQ,IAAI,sBAAsB,QAAQ,IAAI,eAAe;AACpE,QAAM,WAAW,iBAAiB,SAA0B;AAC5D,SAAO,SAAS,OAAO,MAAM;AAC/B;AAKA,eAAe,0BACb,SACA,QACA,WACA,YACoB;AACpB,QAAM,SAAS,mBAAmB,OAAO;AACzC,QAAM,YAAY,WAAW,OAAO;AACpC,SAAO,IAAI,kBAAkB,SAAS,WAAW,SAAS,EAAE;AAC5D,QAAM,cAAc,OAAO,eAAe;AAC1C,QAAM,gBAAgB,CAAC,CAAC,OAAO;AAE/B,MAAI,eAAe;AACjB,WAAO;AAAA,MACL,SAAS,SAAS;AAAA,IACpB;AAAA,EACF;AAEA,MAAI;AACF,UAAM,EAAE,QAAQ,MAAM,IAAI,MAAM,eAAe;AAAA,MAC7C,OAAO,OAAO,cAAc,SAAS;AAAA,MACrC,QAAQ;AAAA,MACR,QAAQ,OAAO;AAAA,MACf;AAAA,MACA,yBAAyB,sBAAsB;AAAA,IACjD,CAAC;AAED,QAAI,OAAO;AACT;AAAA,QACE;AAAA,QACA;AAAA,QACA,OAAO;AAAA,QACP;AAAA,MACF;AAAA,IACF;AACA,WAAO;AAAA,EACT,SAAS,OAAgB;AACvB,QAAI,iBAAiB,gBAAgB;AACnC,aAAO,MAAM,0CAA0C,MAAM,OAAO,EAAE;AAEtE,YAAM,iBAAiB,sBAAsB;AAC7C,YAAM,qBAAqB,MAAM,eAAe;AAAA,QAC9C,MAAM,MAAM;AAAA,QACZ;AAAA,MACF,CAAC;AAED,UAAI,oBAAoB;AACtB,YAAI;AACF,gBAAM,iBAAiB,KAAK,MAAM,kBAAkB;AACpD,iBAAO,KAAK,8CAA8C;AAC1D,iBAAO;AAAA,QACT,SAAS,kBAA2B;AAClC,gBAAM,UACJ,4BAA4B,QACxB,iBAAiB,UACjB,OAAO,gBAAgB;AAC7B,iBAAO;AAAA,YACL,mDAAmD,OAAO;AAAA,UAC5D;AACA,gBAAM;AAAA,QACR;AAAA,MACF,OAAO;AACL,eAAO,MAAM,sCAAsC;AACnD,cAAM;AAAA,MACR;AAAA,IACF,OAAO;AACL,YAAM,UAAU,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK;AACrE,aAAO,MAAM,mCAAmC,OAAO,EAAE;AACzD,YAAM;AAAA,IACR;AAAA,EACF;AACF;AAKA,SAAS,wBAGoB;AAC3B,SAAO,OAAO,EAAE,MAAM,MAAM,MAAwC;AAClE,QAAI;AACF,UAAI,iBAAiB,gBAAgB;AACnC,cAAM,cAAc,KAAK,QAAQ,wBAAwB,EAAE;AAC3D,aAAK,MAAM,WAAW;AACtB,eAAO;AAAA,MACT;AACA,aAAO;AAAA,IACT,SAAS,WAAoB;AAC3B,YAAM,UACJ,qBAAqB,QAAQ,UAAU,UAAU,OAAO,SAAS;AACnE,aAAO,KAAK,+BAA+B,OAAO,EAAE;AACpD,aAAO;AAAA,IACT;AAAA,EACF;AACF;AASA,SAAS,oBACP,SACA,MACA,QACA,OACA;AACA,UAAQ,UAAU,UAAU,YAAY;AAAA,IACtC,UAAU;AAAA,IACV;AAAA,IACA;AAAA,IACA,QAAQ;AAAA,MACN,QAAQ,MAAM;AAAA,MACd,YAAY,MAAM;AAAA,MAClB,OAAO,MAAM;AAAA,IACf;AAAA,EACF,CAAC;AACH;AAKA,eAAe,kBAAkB,SAAwB,MAAc;AACrE,QAAM,SAAS,UAAU,OAAO;AAChC,QAAM,QAAQ,WAAW,SAAS,oBAAoB,iBAAiB;AACvE,QAAM,QAAQ,WAAW,SAAS,oBAAoB,MAAM;AAC5D,QAAM,eAAe,WAAW,SAAS,2BAA2B,EAAE;AACtE,QAAM,UAAU,WAAW,OAAO;AAElC,MAAI;AACF,UAAM,MAAM,MAAM,MAAM,GAAG,OAAO,iBAAiB;AAAA,MACjD,QAAQ;AAAA,MACR,SAAS;AAAA,QACP,eAAe,UAAU,MAAM;AAAA,QAC/B,gBAAgB;AAAA,MAClB;AAAA,MACA,MAAM,KAAK,UAAU;AAAA,QACnB;AAAA,QACA;AAAA,QACA,OAAO;AAAA,QACP,GAAI,gBAAgB,EAAE,aAAa;AAAA,MACrC,CAAC;AAAA,IACH,CAAC;AAED,QAAI,CAAC,IAAI,IAAI;AACX,YAAM,MAAM,MAAM,IAAI,KAAK;AAC3B,YAAM,IAAI,MAAM,oBAAoB,IAAI,MAAM,KAAK,GAAG,EAAE;AAAA,IAC1D;AAEA,WAAO,IAAI;AAAA,EACb,SAAS,KAAc;AACrB,UAAM,UAAU,eAAe,QAAQ,IAAI,UAAU,OAAO,GAAG;AAC/D,UAAM,IAAI,MAAM,2CAA2C,OAAO,EAAE;AAAA,EACtE;AACF;AAMO,IAAM,eAAuB;AAAA,EAClC,MAAM;AAAA,EACN,aAAa;AAAA,EACb,QAAQ;AAAA,IACN,gBAAgB,QAAQ,IAAI;AAAA,IAC5B,iBAAiB,QAAQ,IAAI;AAAA,IAC7B,oBAAoB,QAAQ,IAAI;AAAA,IAChC,oBAAoB,QAAQ,IAAI;AAAA,IAChC,aAAa,QAAQ,IAAI;AAAA,IACzB,aAAa,QAAQ,IAAI;AAAA,IACzB,wBAAwB,QAAQ,IAAI;AAAA,IACpC,0BAA0B,QAAQ,IAAI;AAAA,IACtC,sBAAsB,QAAQ,IAAI;AAAA,IAClC,6BAA6B,QAAQ,IAAI;AAAA,IACzC,gCAAgC,QAAQ,IAAI;AAAA,IAC5C,qCACE,QAAQ,IAAI;AAAA,IACd,+BAA+B,QAAQ,IAAI;AAAA,EAC7C;AAAA,EACA,MAAM,KAAK,SAAS,SAAS;AAE3B,QAAI,QAAc,OAAO,YAAY;AACnC,cAAQ;AACR,UAAI;AACF,YAAI,CAAC,UAAU,OAAO,GAAG;AACvB,iBAAO;AAAA,YACL;AAAA,UACF;AACA;AAAA,QACF;AACA,YAAI;AACF,gBAAM,UAAU,WAAW,OAAO;AAClC,gBAAM,WAAW,MAAM,MAAM,GAAG,OAAO,WAAW;AAAA,YAChD,SAAS,EAAE,eAAe,UAAU,UAAU,OAAO,CAAC,GAAG;AAAA,UAC3D,CAAC;AACD,cAAI,CAAC,SAAS,IAAI;AAChB,mBAAO;AAAA,cACL,qCAAqC,SAAS,UAAU;AAAA,YAC1D;AACA,mBAAO;AAAA,cACL;AAAA,YACF;AAAA,UACF,OAAO;AACL,mBAAO,IAAI,uCAAuC;AAAA,UACpD;AAAA,QACF,SAAS,YAAqB;AAC5B,gBAAM,UACJ,sBAAsB,QAClB,WAAW,UACX,OAAO,UAAU;AACvB,iBAAO,KAAK,oCAAoC,OAAO,EAAE;AACzD,iBAAO;AAAA,YACL;AAAA,UACF;AAAA,QACF;AAAA,MACF,SAAS,OAAgB;AACvB,cAAM,UACH,OAAmD,QAChD,IAAI,CAAC,MAAM,EAAE,OAAO,EACrB,KAAK,IAAI,MACX,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK;AACxD,eAAO;AAAA,UACL,sCAAsC,OAAO;AAAA,QAC/C;AAAA,MACF;AAAA,IACF,CAAC;AAAA,EACH;AAAA,EAEA,QAAQ;AAAA,IACN,CAAC,UAAU,cAAc,GAAG,OAC1B,SACA,WACsB;AACtB,YAAM,qBAAqB;AAAA,QACzB;AAAA,QACA;AAAA,QACA;AAAA,MACF;AACA,YAAM,qBAAqB,OAAO;AAAA,QAChC,WAAW,SAAS,+BAA+B,MAAM,KAAK;AAAA,QAC9D;AAAA,MACF;AAEA,UAAI,CAAC,OAAO,OAAO,WAAW,EAAE,SAAS,kBAAkB,GAAG;AAC5D,cAAM,WAAW,gCAAgC,kBAAkB,qBAAqB,OAAO,OAAO,WAAW,EAAE,KAAK,IAAI,CAAC;AAC7H,eAAO,MAAM,QAAQ;AACrB,cAAM,IAAI,MAAM,QAAQ;AAAA,MAC1B;AACA,UAAI,WAAW,MAAM;AACnB,eAAO,MAAM,4CAA4C;AACzD,cAAM,aAAa,MAAM,kBAAkB,EAAE,KAAK,CAAC;AACnD,mBAAW,CAAC,IAAI;AAChB,eAAO;AAAA,MACT;AACA,UAAI;AACJ,UAAI,OAAO,WAAW,UAAU;AAC9B,eAAO;AAAA,MACT,WAAW,OAAO,WAAW,YAAY,OAAO,MAAM;AACpD,eAAO,OAAO;AAAA,MAChB,OAAO;AACL,eAAO,KAAK,oCAAoC;AAChD,cAAM,iBAAiB,MAAM,kBAAkB,EAAE,KAAK,CAAC;AACvD,uBAAe,CAAC,IAAI;AACpB,eAAO;AAAA,MACT;AACA,UAAI,CAAC,KAAK,KAAK,GAAG;AAChB,eAAO,KAAK,0BAA0B;AACtC,cAAM,cAAc,MAAM,kBAAkB,EAAE,KAAK,CAAC;AACpD,oBAAY,CAAC,IAAI;AACjB,eAAO;AAAA,MACT;AAEA,YAAM,mBAAmB,oBAAoB,OAAO;AACpD,YAAM,SAAS,mBAAmB,OAAO;AAEzC,UAAI,CAAC,QAAQ;AACX,cAAM,IAAI,MAAM,+BAA+B;AAAA,MACjD;AAEA,UAAI;AACF,cAAM,WAAW,MAAM,MAAM,GAAG,gBAAgB,eAAe;AAAA,UAC7D,QAAQ;AAAA,UACR,SAAS;AAAA,YACP,eAAe,UAAU,MAAM;AAAA,YAC/B,gBAAgB;AAAA,UAClB;AAAA,UACA,MAAM,KAAK,UAAU;AAAA,YACnB,OAAO;AAAA,YACP,OAAO;AAAA,UACT,CAAC;AAAA,QACH,CAAC;AAED,cAAM,gBAAgB,SAAS,MAAM;AACrC,cAAM,kBAAkB,MAAM,cAAc,KAAK;AAEjD,YAAI,CAAC,SAAS,IAAI;AAChB,iBAAO;AAAA,YACL,qBAAqB,SAAS,MAAM,MAAM,SAAS,UAAU;AAAA,UAC/D;AACA,gBAAM,cAAc,MAAM,kBAAkB,EAAE,KAAK,CAAC;AACpD,sBAAY,CAAC,IAAI;AACjB,iBAAO;AAAA,QACT;AAEA,cAAM,OAAQ,MAAM,SAAS,KAAK;AAKlC,YAAI,CAAC,MAAM,OAAO,CAAC,GAAG,WAAW;AAC/B,iBAAO,MAAM,gCAAgC;AAC7C,gBAAM,cAAc,MAAM,kBAAkB,EAAE,KAAK,CAAC;AACpD,sBAAY,CAAC,IAAI;AACjB,iBAAO;AAAA,QACT;AAEA,cAAM,YAAY,KAAK,KAAK,CAAC,EAAE;AAE/B,YAAI,KAAK,OAAO;AACd,gBAAM,QAAQ;AAAA,YACZ,cAAc,KAAK,MAAM;AAAA,YACzB,kBAAkB;AAAA,YAClB,aAAa,KAAK,MAAM;AAAA,UAC1B;AAEA,8BAAoB,SAAS,UAAU,gBAAgB,MAAM,KAAK;AAAA,QACpE;AAEA,eAAO,IAAI,mCAAmC,UAAU,MAAM,EAAE;AAChE,eAAO;AAAA,MACT,SAAS,OAAgB;AACvB,cAAM,UAAU,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK;AACrE,eAAO,MAAM,+BAA+B,OAAO,EAAE;AACrD,cAAM,cAAc,MAAM,kBAAkB,EAAE,KAAK,CAAC;AACpD,oBAAY,CAAC,IAAI;AACjB,eAAO;AAAA,MACT;AAAA,IACF;AAAA,IACA,CAAC,UAAU,qBAAqB,GAAG,OACjC,UACA,EAAE,QAAQ,YAAY,UAAU,WAAW,MACxC;AACH,aAAO,MAAM,aAAa,aAAa,UAAU,YAAY,MAAM;AAAA,IACrE;AAAA,IACA,CAAC,UAAU,qBAAqB,GAAG,OACjC,UACA,EAAE,QAAQ,YAAY,UAAU,WAAW,MACxC;AACH,aAAO,MAAM,eAAe,aAAa,UAAU,YAAY,MAAM;AAAA,IACvE;AAAA,IACA,CAAC,UAAU,UAAU,GAAG,OACtB,SACA,EAAE,QAAQ,gBAAgB,CAAC,EAAE,MAC1B;AACH,YAAM,cAAc;AACpB,YAAM,oBAAoB;AAC1B,YAAM,mBAAmB;AACzB,YAAM,sBAAsB;AAE5B,YAAM,SAAS,mBAAmB,OAAO;AACzC,YAAM,YAAY,cAAc,OAAO;AACvC,YAAM,wBAAwB,yBAAyB,OAAO;AAE9D,aAAO,IAAI,oCAAoC,SAAS,EAAE;AAC1D,aAAO,IAAI,MAAM;AAEjB,YAAM,EAAE,MAAM,gBAAgB,MAAM,IAAI,MAAM,aAAa;AAAA,QACzD,OAAO,OAAO,cAAc,SAAS;AAAA,QACrC;AAAA,QACA,QAAQ,QAAQ,UAAU,UAAU;AAAA,QACpC;AAAA,QACA,WAAW;AAAA,QACX,kBAAkB;AAAA,QAClB,iBAAiB;AAAA,QACjB;AAAA,QACA,wBAAwB;AAAA,UACtB,WAAW;AAAA,QACb;AAAA,MACF,CAAC;AAED,UAAI,OAAO;AACT,4BAAoB,SAAS,UAAU,YAAY,QAAQ,KAAK;AAAA,MAClE;AAEA,aAAO;AAAA,IACT;AAAA,IACA,CAAC,UAAU,UAAU,GAAG,OACtB,SACA;AAAA,MACE;AAAA,MACA,gBAAgB,CAAC;AAAA,MACjB,YAAY;AAAA,MACZ,cAAc;AAAA,MACd,mBAAmB;AAAA,MACnB,kBAAkB;AAAA,IACpB,MACG;AACH,YAAM,SAAS,mBAAmB,OAAO;AACzC,YAAM,YAAY,cAAc,OAAO;AACvC,YAAM,wBAAwB,yBAAyB,OAAO;AAE9D,aAAO,IAAI,oCAAoC,SAAS,EAAE;AAC1D,aAAO,IAAI,MAAM;AAEjB,YAAM,EAAE,MAAM,gBAAgB,MAAM,IAAI,MAAM,aAAa;AAAA,QACzD,OAAO,OAAO,cAAc,SAAS;AAAA,QACrC;AAAA,QACA,QAAQ,QAAQ,UAAU,UAAU;AAAA,QACpC;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA,wBAAwB;AAAA,UACtB,WAAW;AAAA,QACb;AAAA,MACF,CAAC;AAED,UAAI,OAAO;AACT,4BAAoB,SAAS,UAAU,YAAY,QAAQ,KAAK;AAAA,MAClE;AAEA,aAAO;AAAA,IACT;AAAA,IACA,CAAC,UAAU,KAAK,GAAG,OACjB,SACA,WAKG;AACH,YAAM,IAAI,OAAO,KAAK;AACtB,YAAM,OAAO,OAAO,QAAQ;AAC5B,YAAM,SAAS,OAAO;AACtB,YAAM,YAAY;AAClB,aAAO,IAAI,+BAA+B,SAAS,EAAE;AAErD,YAAM,UAAU,WAAW,OAAO;AAClC,YAAM,SAAS,UAAU,OAAO;AAEhC,UAAI,CAAC,QAAQ;AACX,cAAM,IAAI,MAAM,+BAA+B;AAAA,MACjD;AAEA,UAAI;AACF,cAAM,WAAW,MAAM,MAAM,GAAG,OAAO,uBAAuB;AAAA,UAC5D,QAAQ;AAAA,UACR,SAAS;AAAA,YACP,eAAe,UAAU,MAAM;AAAA,YAC/B,gBAAgB;AAAA,UAClB;AAAA,UACA,MAAM,KAAK,UAAU;AAAA,YACnB;AAAA,YACA;AAAA,YACA;AAAA,UACF,CAAC;AAAA,QACH,CAAC;AAED,cAAM,gBAAgB,SAAS,MAAM;AACrC,cAAM,kBAAkB,MAAM,cAAc,KAAK;AAEjD,YAAI,CAAC,SAAS,IAAI;AAChB,gBAAM,IAAI,MAAM,6BAA6B,SAAS,UAAU,EAAE;AAAA,QACpE;AAEA,cAAM,OAAO,MAAM,SAAS,KAAK;AACjC,cAAM,YAAY;AAElB,eAAO,UAAU;AAAA,MACnB,SAAS,OAAgB;AACvB,cAAM,UAAU,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK;AACrE,cAAM;AAAA,MACR;AAAA,IACF;AAAA,IACA,CAAC,UAAU,iBAAiB,GAAG,OAC7B,SACA,WACG;AACH,UAAI;AACJ,UAAI;AACJ,YAAM,YAAY,yBAAyB,OAAO;AAClD,aAAO,IAAI,2CAA2C,SAAS,EAAE;AACjE,YAAM,YAAY,OAAO;AAAA,QACvB,WAAW,SAAS,uCAAuC,MAAM,KAC/D;AAAA,QACF;AAAA,MACF;AAEA,UAAI,OAAO,WAAW,UAAU;AAC9B,mBAAW;AACX,qBACE;AAAA,MACJ,OAAO;AACL,mBAAW,OAAO;AAClB,qBACE,OAAO,UACP;AAAA,MACJ;AAEA,YAAM,WAAW;AAAA,QACf;AAAA,UACE,MAAM;AAAA,UACN,SAAS;AAAA,YACP,EAAE,MAAM,QAAQ,MAAM,WAAW;AAAA,YACjC,EAAE,MAAM,aAAa,WAAW,EAAE,KAAK,SAAS,EAAE;AAAA,UACpD;AAAA,QACF;AAAA,MACF;AAEA,YAAM,UAAU,WAAW,OAAO;AAClC,YAAM,SAAS,UAAU,OAAO;AAEhC,UAAI,CAAC,QAAQ;AACX,eAAO,MAAM,wBAAwB;AACrC,eAAO;AAAA,UACL,OAAO;AAAA,UACP,aAAa;AAAA,QACf;AAAA,MACF;AAEA,UAAI;AACF,cAAM,cAAmC;AAAA,UACvC,OAAO;AAAA,UACP;AAAA,UACA,YAAY;AAAA,QACd;AAEA,cAAM,WAAW,MAAM,MAAM,GAAG,OAAO,qBAAqB;AAAA,UAC1D,QAAQ;AAAA,UACR,SAAS;AAAA,YACP,gBAAgB;AAAA,YAChB,eAAe,UAAU,MAAM;AAAA,UACjC;AAAA,UACA,MAAM,KAAK,UAAU,WAAW;AAAA,QAClC,CAAC;AAED,cAAM,gBAAgB,SAAS,MAAM;AACrC,cAAM,kBAAkB,MAAM,cAAc,KAAK;AAEjD,YAAI,CAAC,SAAS,IAAI;AAChB,gBAAM,IAAI,MAAM,qBAAqB,SAAS,MAAM,EAAE;AAAA,QACxD;AAEA,cAAM,SAAkB,MAAM,SAAS,KAAK;AAc5C,cAAM,cAAc;AACpB,cAAM,UAAU,YAAY,UAAU,CAAC,GAAG,SAAS;AAEnD,YAAI,YAAY,OAAO;AACrB;AAAA,YACE;AAAA,YACA,UAAU;AAAA,YACV,OAAO,WAAW,WAAW,SAAS,OAAO,UAAU;AAAA,YACvD;AAAA,cACE,cAAc,YAAY,MAAM;AAAA,cAChC,kBAAkB,YAAY,MAAM;AAAA,cACpC,aAAa,YAAY,MAAM;AAAA,YACjC;AAAA,UACF;AAAA,QACF;AAEA,YAAI,CAAC,SAAS;AACZ,iBAAO;AAAA,YACL,OAAO;AAAA,YACP,aAAa;AAAA,UACf;AAAA,QACF;AAGA,cAAM,iBACJ,OAAO,WAAW,YAClB,OAAO,UACP,OAAO,WACL;AAGJ,YAAI,gBAAgB;AAClB,iBAAO;AAAA,QACT;AAGA,cAAM,aAAa,QAAQ,MAAM,2BAA2B;AAC5D,cAAM,QAAQ,aAAa,CAAC,GAAG,KAAK,KAAK;AACzC,cAAM,cAAc,QACjB,QAAQ,6BAA6B,EAAE,EACvC,KAAK;AAER,cAAM,kBAAkB,EAAE,OAAO,YAAY;AAC7C,eAAO;AAAA,MACT,SAAS,OAAgB;AACvB,cAAM,UAAU,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK;AACrE,eAAO,MAAM,0BAA0B,OAAO,EAAE;AAChD,eAAO;AAAA,UACL,OAAO;AAAA,UACP,aAAa,UAAU,OAAO;AAAA,QAChC;AAAA,MACF;AAAA,IACF;AAAA,IACA,CAAC,UAAU,aAAa,GAAG,OACzB,SACA,gBACG;AACH,aAAO,IAAI,eAAe,WAAW;AAErC,YAAM,YAAY;AAClB,aAAO,IAAI,uCAAuC,SAAS,EAAE;AAE7D,YAAM,UAAU,WAAW,OAAO;AAClC,YAAM,SAAS,UAAU,OAAO;AAEhC,UAAI,CAAC,QAAQ;AACX,cAAM,IAAI,MAAM,qDAAqD;AAAA,MACvE;AACA,UAAI,CAAC,eAAe,YAAY,WAAW,GAAG;AAC5C,cAAM,IAAI,MAAM,oDAAoD;AAAA,MACtE;AAEA,YAAM,WAAW,IAAI,SAAS;AAC9B,eAAS,OAAO,QAAQ,IAAI,KAAK,CAAC,WAAW,CAAC,GAAG,eAAe;AAChE,eAAS,OAAO,SAAS,WAAW;AAEpC,UAAI;AACF,cAAM,WAAW,MAAM,MAAM,GAAG,OAAO,yBAAyB;AAAA,UAC9D,QAAQ;AAAA,UACR,SAAS;AAAA,YACP,eAAe,UAAU,MAAM;AAAA,UACjC;AAAA,UACA,MAAM;AAAA,QACR,CAAC;AAED,cAAM,gBAAgB,SAAS,MAAM;AACrC,cAAM,kBAAkB,MAAM,cAAc,KAAK;AAEjD,eAAO,IAAI,YAAY,QAAQ;AAE/B,YAAI,CAAC,SAAS,IAAI;AAChB,gBAAM,IAAI,MAAM,+BAA+B,SAAS,UAAU,EAAE;AAAA,QACtE;AAEA,cAAM,OAAQ,MAAM,SAAS,KAAK;AAClC,cAAM,gBAAgB,KAAK;AAE3B,eAAO;AAAA,MACT,SAAS,OAAgB;AACvB,cAAM,UAAU,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK;AACrE,cAAM;AAAA,MACR;AAAA,IACF;AAAA,IACA,CAAC,UAAU,cAAc,GAAG,OAC1B,SACA,SACG;AACH,YAAM,eAAe;AAAA,QACnB;AAAA,QACA;AAAA,QACA;AAAA,MACF;AACA,aAAO,IAAI,wCAAwC,YAAY,EAAE;AACjE,UAAI;AACF,cAAM,eAAe,MAAM,kBAAkB,SAAS,IAAI;AAC1D,eAAO;AAAA,MACT,SAAS,OAAgB;AACvB,cAAM,UAAU,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK;AACrE,cAAM;AAAA,MACR;AAAA,IACF;AAAA,IACA,CAAC,UAAU,YAAY,GAAG,OACxB,SACA,WACG;AACH,aAAO;AAAA,QACL;AAAA,QACA;AAAA,QACA,UAAU;AAAA,QACV;AAAA,MACF;AAAA,IACF;AAAA,IACA,CAAC,UAAU,YAAY,GAAG,OACxB,SACA,WACG;AACH,aAAO;AAAA,QACL;AAAA,QACA;AAAA,QACA,UAAU;AAAA,QACV;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAAA,EACA,OAAO;AAAA,IACL;AAAA,MACE,MAAM;AAAA,MACN,OAAO;AAAA,QACL;AAAA,UACE,MAAM;AAAA,UACN,IAAI,OAAO,YAA2B;AACpC,kBAAM,UAAU,WAAW,OAAO;AAClC,kBAAM,WAAW,MAAM,MAAM,GAAG,OAAO,WAAW;AAAA,cAChD,SAAS;AAAA,gBACP,eAAe,UAAU,UAAU,OAAO,CAAC;AAAA,cAC7C;AAAA,YACF,CAAC;AACD,kBAAM,OAAO,MAAM,SAAS,KAAK;AACjC,mBAAO;AAAA,cACL;AAAA,cACC,MAA+B,MAAM,UAAU;AAAA,YAClD;AACA,gBAAI,CAAC,SAAS,IAAI;AAChB,oBAAM,IAAI;AAAA,gBACR,sCAAsC,SAAS,UAAU;AAAA,cAC3D;AAAA,YACF;AAAA,UACF;AAAA,QACF;AAAA,QACA;AAAA,UACE,MAAM;AAAA,UACN,IAAI,OAAO,YAA2B;AACpC,gBAAI;AACF,oBAAM,YAAY,MAAM,QAAQ;AAAA,gBAC9B,UAAU;AAAA,gBACV;AAAA,kBACE,MAAM;AAAA,gBACR;AAAA,cACF;AACA,qBAAO,IAAI,aAAa,SAAS;AAAA,YACnC,SAAS,OAAgB;AACvB,oBAAM,UACJ,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK;AACvD,qBAAO,MAAM,iCAAiC,OAAO,EAAE;AACvD,oBAAM;AAAA,YACR;AAAA,UACF;AAAA,QACF;AAAA,QACA;AAAA,UACE,MAAM;AAAA,UACN,IAAI,OAAO,YAA2B;AACpC,gBAAI;AACF,oBAAM,OAAO,MAAM,QAAQ,SAAS,UAAU,YAAY;AAAA,gBACxD,QAAQ;AAAA,cACV,CAAC;AACD,kBAAI,KAAK,WAAW,GAAG;AACrB,sBAAM,IAAI,MAAM,yBAAyB;AAAA,cAC3C;AACA,qBAAO,IAAI,mCAAmC,IAAI;AAAA,YACpD,SAAS,OAAgB;AACvB,oBAAM,UACJ,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK;AACvD,qBAAO,MAAM,6BAA6B,OAAO,EAAE;AACnD,oBAAM;AAAA,YACR;AAAA,UACF;AAAA,QACF;AAAA,QACA;AAAA,UACE,MAAM;AAAA,UACN,IAAI,OAAO,YAA2B;AACpC,gBAAI;AACF,oBAAM,OAAO,MAAM,QAAQ,SAAS,UAAU,YAAY;AAAA,gBACxD,QAAQ;AAAA,cACV,CAAC;AACD,kBAAI,KAAK,WAAW,GAAG;AACrB,sBAAM,IAAI,MAAM,yBAAyB;AAAA,cAC3C;AACA,qBAAO,IAAI,mCAAmC,IAAI;AAAA,YACpD,SAAS,OAAgB;AACvB,oBAAM,UACJ,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK;AACvD,qBAAO,MAAM,6BAA6B,OAAO,EAAE;AACnD,oBAAM;AAAA,YACR;AAAA,UACF;AAAA,QACF;AAAA,QACA;AAAA,UACE,MAAM;AAAA,UACN,IAAI,OAAO,YAA2B;AACpC,mBAAO,IAAI,8BAA8B;AACzC,gBAAI;AACF,oBAAM,QAAQ,MAAM,QAAQ,SAAS,UAAU,OAAO;AAAA,gBACpD,QAAQ;AAAA,gBACR,GAAG;AAAA,gBACH,MAAM;AAAA,cACR,CAAC;AACD,qBAAO,IAAI,yCAAyC,KAAK;AAAA,YAC3D,SAAS,OAAgB;AACvB,oBAAM,UACJ,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK;AACvD,qBAAO,MAAM,mCAAmC,OAAO,EAAE;AACzD,oBAAM;AAAA,YACR;AAAA,UACF;AAAA,QACF;AAAA,QACA;AAAA,UACE,MAAM;AAAA,UACN,IAAI,OAAO,YAA2B;AACpC,gBAAI;AACF,qBAAO,IAAI,+BAA+B;AAC1C,kBAAI;AACF,sBAAM,SAAS,MAAM,QAAQ;AAAA,kBAC3B,UAAU;AAAA,kBACV;AAAA,gBACF;AAEA,oBACE,UACA,OAAO,WAAW,YAClB,WAAW,UACX,iBAAiB,QACjB;AACA,yBAAO,IAAI,sBAAsB,MAAM;AAAA,gBACzC,OAAO;AACL,yBAAO;AAAA,oBACL;AAAA,oBACA;AAAA,kBACF;AAAA,gBACF;AAAA,cACF,SAAS,GAAY;AACnB,sBAAM,UAAU,aAAa,QAAQ,EAAE,UAAU,OAAO,CAAC;AACzD,uBAAO,MAAM,oCAAoC,OAAO,EAAE;AAAA,cAC5D;AAAA,YACF,SAAS,GAAY;AACnB,oBAAM,UAAU,aAAa,QAAQ,EAAE,UAAU,OAAO,CAAC;AACzD,qBAAO;AAAA,gBACL,2CAA2C,OAAO;AAAA,cACpD;AAAA,YACF;AAAA,UACF;AAAA,QACF;AAAA,QACA;AAAA,UACE,MAAM;AAAA,UACN,IAAI,OAAO,YAA2B;AACpC,mBAAO,IAAI,2BAA2B;AACtC,gBAAI;AACF,oBAAM,WAAW,MAAM;AAAA,gBACrB;AAAA,cACF;AACA,oBAAM,cAAc,MAAM,SAAS,YAAY;AAC/C,oBAAM,gBAAgB,MAAM,QAAQ;AAAA,gBAClC,UAAU;AAAA,gBACV,OAAO,KAAK,IAAI,WAAW,WAAW,CAAC;AAAA,cACzC;AACA,qBAAO,IAAI,sCAAsC,aAAa;AAAA,YAChE,SAAS,OAAgB;AACvB,oBAAM,UACJ,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK;AACvD,qBAAO,MAAM,gCAAgC,OAAO,EAAE;AACtD,oBAAM;AAAA,YACR;AAAA,UACF;AAAA,QACF;AAAA,QACA;AAAA,UACE,MAAM;AAAA,UACN,IAAI,OAAO,YAA2B;AACpC,kBAAM,SAAS;AACf,kBAAM,SAAS,MAAM,QAAQ;AAAA,cAC3B,UAAU;AAAA,cACV,EAAE,OAAO;AAAA,YACX;AACA,gBAAI,CAAC,MAAM,QAAQ,MAAM,KAAK,OAAO,WAAW,GAAG;AACjD,oBAAM,IAAI;AAAA,gBACR;AAAA,cACF;AAAA,YACF;AACA,mBAAO,IAAI,qBAAqB,MAAM;AAAA,UACxC;AAAA,QACF;AAAA,QACA;AAAA,UACE,MAAM;AAAA,UACN,IAAI,OAAO,YAA2B;AACpC,kBAAM,SAAS;AACf,kBAAM,SAAS,MAAM,QAAQ;AAAA,cAC3B,UAAU;AAAA,cACV,EAAE,OAAO;AAAA,YACX;AACA,kBAAM,cAAc,MAAM,QAAQ;AAAA,cAChC,UAAU;AAAA,cACV,EAAE,OAAO;AAAA,YACX;AACA,gBAAI,gBAAgB,QAAQ;AAC1B,oBAAM,IAAI;AAAA,gBACR,mDAAmD,MAAM,WAAW,WAAW;AAAA,cACjF;AAAA,YACF;AACA,mBAAO,IAAI,iBAAiB,WAAW;AAAA,UACzC;AAAA,QACF;AAAA,QACA;AAAA,UACE,MAAM;AAAA,UACN,IAAI,OAAO,YAA2B;AACpC,gBAAI;AACF,oBAAM,OAAO;AACb,oBAAM,WAAW,MAAM,kBAAkB,SAAS,IAAI;AACtD,kBAAI,CAAC,UAAU;AACb,sBAAM,IAAI,MAAM,2BAA2B;AAAA,cAC7C;AACA,qBAAO,IAAI,+BAA+B;AAAA,YAC5C,SAAS,OAAgB;AACvB,oBAAM,UACJ,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK;AACvD,qBAAO,MAAM,wCAAwC,OAAO,EAAE;AAC9D,oBAAM;AAAA,YACR;AAAA,UACF;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAAA,EACF;AACF;AACA,IAAO,gBAAQ;","names":[]}
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@elizaos/plugin-openai",
3
- "version": "1.0.6",
3
+ "version": "1.0.8",
4
4
  "type": "module",
5
5
  "main": "dist/index.js",
6
6
  "module": "dist/index.js",
@@ -142,6 +142,13 @@
142
142
  "description": "Optional instructions to control the style or behavior of the text-to-speech request.",
143
143
  "required": false,
144
144
  "sensitive": false
145
+ },
146
+ "OPENAI_EXPERIMENTAL_TELEMETRY": {
147
+ "type": "boolean",
148
+ "description": "Enable experimental telemetry features for enhanced debugging and usage analytics.",
149
+ "required": false,
150
+ "default": false,
151
+ "sensitive": false
145
152
  }
146
153
  }
147
154
  },