@elizaos/plugin-openai 2.0.0-alpha.5 → 2.0.0-alpha.537
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +163 -0
- package/dist/browser/index.browser.js +2 -2
- package/dist/browser/index.browser.js.map +12 -11
- package/dist/build.d.ts +1 -1
- package/dist/cjs/index.d.ts +2 -2
- package/dist/cjs/index.node.cjs +1533 -1076
- package/dist/cjs/index.node.js.map +10 -9
- package/dist/generated/specs/specs.d.ts +27 -27
- package/dist/index.browser.d.ts +2 -1
- package/dist/index.browser.d.ts.map +1 -1
- package/dist/index.d.ts.map +1 -1
- package/dist/index.node.d.ts +2 -1
- package/dist/index.node.d.ts.map +1 -1
- package/dist/models/embedding.d.ts.map +1 -1
- package/dist/models/index.d.ts +1 -1
- package/dist/models/index.d.ts.map +1 -1
- package/dist/models/text.d.ts +5 -0
- package/dist/models/text.d.ts.map +1 -1
- package/dist/node/index.node.js +128 -16
- package/dist/node/index.node.js.map +12 -11
- package/dist/types/index.d.ts +16 -0
- package/dist/types/index.d.ts.map +1 -1
- package/dist/utils/config.d.ts +5 -0
- package/dist/utils/config.d.ts.map +1 -1
- package/dist/utils/events.d.ts +6 -0
- package/dist/utils/events.d.ts.map +1 -1
- package/dist/utils/index.d.ts +1 -1
- package/package.json +13 -11
package/LICENSE
ADDED
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
MIT License
|
|
2
|
+
|
|
3
|
+
Copyright (c) 2025 Shaw Walters and elizaOS Contributors
|
|
4
|
+
|
|
5
|
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
6
|
+
of this software and associated documentation files (the "Software"), to deal
|
|
7
|
+
in the Software without restriction, including without limitation the rights
|
|
8
|
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
9
|
+
copies of the Software, and to permit persons to whom the Software is
|
|
10
|
+
furnished to do so, subject to the following conditions:
|
|
11
|
+
|
|
12
|
+
The above copyright notice and this permission notice shall be included in all
|
|
13
|
+
copies or substantial portions of the Software.
|
|
14
|
+
|
|
15
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
16
|
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
17
|
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
18
|
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
19
|
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
20
|
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
21
|
+
SOFTWARE.
|
package/README.md
ADDED
|
@@ -0,0 +1,163 @@
|
|
|
1
|
+
# OpenAI Plugin
|
|
2
|
+
|
|
3
|
+
This plugin provides integration with OpenAI's models through the elizaOS platform.
|
|
4
|
+
|
|
5
|
+
## Usage
|
|
6
|
+
|
|
7
|
+
Add the plugin to your character configuration:
|
|
8
|
+
|
|
9
|
+
```json
|
|
10
|
+
"plugins": ["@elizaos/plugin-openai"]
|
|
11
|
+
```
|
|
12
|
+
|
|
13
|
+
## Configuration
|
|
14
|
+
|
|
15
|
+
The plugin requires these environment variables (can be set in .env file or character settings):
|
|
16
|
+
|
|
17
|
+
```json
|
|
18
|
+
"settings": {
|
|
19
|
+
"OPENAI_API_KEY": "your_openai_api_key",
|
|
20
|
+
"OPENAI_BASE_URL": "optional_custom_endpoint",
|
|
21
|
+
"OPENAI_SMALL_MODEL": "gpt-5-mini",
|
|
22
|
+
"OPENAI_LARGE_MODEL": "gpt-5",
|
|
23
|
+
"OPENAI_EMBEDDING_MODEL": "text-embedding-3-small",
|
|
24
|
+
"OPENAI_EMBEDDING_API_KEY": "your_openai_api_key_for_embedding",
|
|
25
|
+
"OPENAI_EMBEDDING_URL": "optional_custom_endpoint",
|
|
26
|
+
"OPENAI_EMBEDDING_DIMENSIONS": "1536",
|
|
27
|
+
"OPENAI_IMAGE_DESCRIPTION_MODEL": "gpt-5-mini",
|
|
28
|
+
"OPENAI_IMAGE_DESCRIPTION_MAX_TOKENS": "8192",
|
|
29
|
+
"OPENAI_EXPERIMENTAL_TELEMETRY": "false",
|
|
30
|
+
"OPENAI_BROWSER_BASE_URL": "https://your-proxy.example.com/openai",
|
|
31
|
+
"OPENAI_BROWSER_EMBEDDING_URL": "https://your-proxy.example.com/openai"
|
|
32
|
+
}
|
|
33
|
+
```
|
|
34
|
+
|
|
35
|
+
Or in `.env` file:
|
|
36
|
+
|
|
37
|
+
```
|
|
38
|
+
OPENAI_API_KEY=your_openai_api_key
|
|
39
|
+
# Optional overrides:
|
|
40
|
+
OPENAI_BASE_URL=optional_custom_endpoint
|
|
41
|
+
OPENAI_SMALL_MODEL=gpt-5-mini
|
|
42
|
+
OPENAI_LARGE_MODEL=gpt-5
|
|
43
|
+
OPENAI_EMBEDDING_MODEL=text-embedding-3-small
|
|
44
|
+
OPENAI_EMBEDDING_API_KEY=your_openai_api_key_for_embedding
|
|
45
|
+
OPENAI_EMBEDDING_URL=optional_custom_endpoint
|
|
46
|
+
OPENAI_EMBEDDING_DIMENSIONS=1536
|
|
47
|
+
OPENAI_IMAGE_DESCRIPTION_MODEL=gpt-5-mini
|
|
48
|
+
OPENAI_IMAGE_DESCRIPTION_MAX_TOKENS=8192
|
|
49
|
+
OPENAI_EXPERIMENTAL_TELEMETRY=false
|
|
50
|
+
# Browser proxy (frontend builds only)
|
|
51
|
+
OPENAI_BROWSER_BASE_URL=https://your-proxy.example.com/openai
|
|
52
|
+
OPENAI_BROWSER_EMBEDDING_URL=https://your-proxy.example.com/openai
|
|
53
|
+
```
|
|
54
|
+
|
|
55
|
+
### Configuration Options
|
|
56
|
+
|
|
57
|
+
- `OPENAI_API_KEY` (required): Your OpenAI API credentials
|
|
58
|
+
- `OPENAI_BASE_URL`: Custom API endpoint (default: https://api.openai.com/v1)
|
|
59
|
+
- `OPENAI_SMALL_MODEL`: Defaults to GPT-4o Mini ("gpt-5-mini")
|
|
60
|
+
- `OPENAI_LARGE_MODEL`: Defaults to GPT-4o ("gpt-5")
|
|
61
|
+
- `OPENAI_EMBEDDING_MODEL`: Defaults to text-embedding-3-small ("text-embedding-3-small")
|
|
62
|
+
- `OPENAI_EMBEDDING_API_KEY`: Custom embedding api key (defaults to `OPENAI_API_KEY`)
|
|
63
|
+
- `OPENAI_EMBEDDING_URL`: Custom embedding endpoint (defaults to `OPENAI_BASE_URL`)
|
|
64
|
+
- `OPENAI_EMBEDDING_DIMENSIONS`: Defaults to 1536 (1536)
|
|
65
|
+
- `OPENAI_IMAGE_DESCRIPTION_MODEL`: Model used for image description (default: "gpt-5-mini")
|
|
66
|
+
- `OPENAI_IMAGE_DESCRIPTION_MAX_TOKENS`: Maximum tokens for image descriptions (default: 8192)
|
|
67
|
+
- `OPENAI_EXPERIMENTAL_TELEMETRY`: Enable experimental telemetry features for enhanced debugging and usage analytics (default: false)
|
|
68
|
+
- `OPENAI_BROWSER_BASE_URL`: Browser-only base URL to a proxy endpoint that forwards requests to OpenAI without exposing keys
|
|
69
|
+
- `OPENAI_BROWSER_EMBEDDING_URL`: Browser-only embeddings endpoint base URL
|
|
70
|
+
|
|
71
|
+
### Browser mode and proxying
|
|
72
|
+
|
|
73
|
+
When bundled for the browser, this plugin avoids sending Authorization headers. Set `OPENAI_BROWSER_BASE_URL` (and optionally `OPENAI_BROWSER_EMBEDDING_URL`) to a server-side proxy you control that injects the OpenAI API key. This prevents exposing secrets in frontend builds.
|
|
74
|
+
|
|
75
|
+
Example minimal proxy (Express):
|
|
76
|
+
|
|
77
|
+
```ts
|
|
78
|
+
import express from "express";
|
|
79
|
+
import fetch from "node-fetch";
|
|
80
|
+
|
|
81
|
+
const app = express();
|
|
82
|
+
app.use(express.json());
|
|
83
|
+
|
|
84
|
+
app.post("/openai/*", async (req, res) => {
|
|
85
|
+
const url = `https://api.openai.com/v1/${req.params[0]}`;
|
|
86
|
+
const r = await fetch(url, {
|
|
87
|
+
method: "POST",
|
|
88
|
+
headers: {
|
|
89
|
+
Authorization: `Bearer ${process.env.OPENAI_API_KEY}`,
|
|
90
|
+
"Content-Type": "application/json",
|
|
91
|
+
},
|
|
92
|
+
body: JSON.stringify(req.body),
|
|
93
|
+
});
|
|
94
|
+
res
|
|
95
|
+
.status(r.status)
|
|
96
|
+
.set(Object.fromEntries(r.headers))
|
|
97
|
+
.send(await r.text());
|
|
98
|
+
});
|
|
99
|
+
|
|
100
|
+
app.listen(3000);
|
|
101
|
+
```
|
|
102
|
+
|
|
103
|
+
### Experimental Telemetry
|
|
104
|
+
|
|
105
|
+
When `OPENAI_EXPERIMENTAL_TELEMETRY` is set to `true`, the plugin enables advanced telemetry features that provide:
|
|
106
|
+
|
|
107
|
+
- Enhanced debugging capabilities for model performance issues
|
|
108
|
+
- Detailed usage analytics for optimization
|
|
109
|
+
- Better observability into OpenAI API interactions
|
|
110
|
+
- Foundation for future monitoring and analytics features through Sentry or other frameworks
|
|
111
|
+
|
|
112
|
+
**Note**: This feature is opt-in due to privacy considerations, as telemetry data may contain information about model usage patterns. Enable only when you need enhanced debugging or analytics capabilities.
|
|
113
|
+
|
|
114
|
+
The plugin provides these model classes:
|
|
115
|
+
|
|
116
|
+
- `TEXT_SMALL`: Optimized for fast, cost-effective responses
|
|
117
|
+
- `TEXT_LARGE`: For complex tasks requiring deeper reasoning
|
|
118
|
+
- `TEXT_EMBEDDING`: Text embedding model (text-embedding-3-small by default)
|
|
119
|
+
- `IMAGE`: DALL-E image generation
|
|
120
|
+
- `IMAGE_DESCRIPTION`: GPT-4o image analysis
|
|
121
|
+
- `TRANSCRIPTION`: Whisper audio transcription
|
|
122
|
+
- `TEXT_TOKENIZER_ENCODE`: Text tokenization
|
|
123
|
+
- `TEXT_TOKENIZER_DECODE`: Token decoding
|
|
124
|
+
|
|
125
|
+
## Additional Features
|
|
126
|
+
|
|
127
|
+
### Image Generation
|
|
128
|
+
|
|
129
|
+
```js
|
|
130
|
+
await runtime.useModel(ModelType.IMAGE, {
|
|
131
|
+
prompt: "A sunset over mountains",
|
|
132
|
+
n: 1, // number of images
|
|
133
|
+
size: "1024x1024", // image resolution
|
|
134
|
+
});
|
|
135
|
+
```
|
|
136
|
+
|
|
137
|
+
### Audio Transcription
|
|
138
|
+
|
|
139
|
+
```js
|
|
140
|
+
const transcription = await runtime.useModel(
|
|
141
|
+
ModelType.TRANSCRIPTION,
|
|
142
|
+
audioBuffer,
|
|
143
|
+
);
|
|
144
|
+
```
|
|
145
|
+
|
|
146
|
+
### Image Analysis
|
|
147
|
+
|
|
148
|
+
```js
|
|
149
|
+
const { title, description } = await runtime.useModel(
|
|
150
|
+
ModelType.IMAGE_DESCRIPTION,
|
|
151
|
+
"https://example.com/image.jpg",
|
|
152
|
+
);
|
|
153
|
+
```
|
|
154
|
+
|
|
155
|
+
### Text Embeddings
|
|
156
|
+
|
|
157
|
+
```js
|
|
158
|
+
await runtime.useModel(ModelType.TEXT_EMBEDDING, "text to embed");
|
|
159
|
+
```
|
|
160
|
+
|
|
161
|
+
### Tokenizer in browser
|
|
162
|
+
|
|
163
|
+
js-tiktoken is WASM and browser-safe; this plugin uses `encodingForModel` directly in both Node and browser builds.
|
|
@@ -1,3 +1,3 @@
|
|
|
1
|
-
import{logger as l,ModelType as w}from"@elizaos/core";import{logger as C}from"@elizaos/core";import{logger as L}from"@elizaos/core";function Co(o){if(typeof process>"u"||!process.env)return;let n=process.env[o];return n===void 0?void 0:String(n)}function I(o,n,c){let r=o.getSetting(n);if(r!==void 0&&r!==null)return String(r);return Co(n)??c}function K(o,n,c){let r=I(o,n);if(r===void 0)return c;let f=Number.parseInt(r,10);if(!Number.isFinite(f))throw Error(`Setting '${n}' must be a valid integer, got: ${r}`);return f}function v(o,n,c){let r=I(o,n);if(r===void 0)return c;let f=r.toLowerCase();return f==="true"||f==="1"||f==="yes"}function E(){return typeof globalThis<"u"&&typeof globalThis.document<"u"}function e(o){return E()&&!!I(o,"OPENAI_BROWSER_BASE_URL")}function y(o){return I(o,"OPENAI_API_KEY")}function xo(o){let n=I(o,"OPENAI_EMBEDDING_API_KEY");if(n)return L.debug("[OpenAI] Using specific embedding API key"),n;return L.debug("[OpenAI] Falling back to general API key for embeddings"),y(o)}function R(o,n=!1){if(E()&&!v(o,"OPENAI_ALLOW_BROWSER_API_KEY",!1))return{};let c=n?xo(o):y(o);return c?{Authorization:`Bearer ${c}`}:{}}function P(o){let n=I(o,"OPENAI_BROWSER_BASE_URL"),c=E()&&n?n:I(o,"OPENAI_BASE_URL")??"https://api.openai.com/v1";return L.debug(`[OpenAI] Base URL: ${c}`),c}function u(o){let n=E()?I(o,"OPENAI_BROWSER_EMBEDDING_URL")??I(o,"OPENAI_BROWSER_BASE_URL"):I(o,"OPENAI_EMBEDDING_URL");if(n)return L.debug(`[OpenAI] Using embedding base URL: ${n}`),n;return L.debug("[OpenAI] Falling back to general base URL for embeddings"),P(o)}function F(o){return I(o,"OPENAI_SMALL_MODEL")??I(o,"SMALL_MODEL")??"gpt-5-mini"}function U(o){return I(o,"OPENAI_LARGE_MODEL")??I(o,"LARGE_MODEL")??"gpt-5"}function m(o){return I(o,"OPENAI_EMBEDDING_MODEL")??"text-embedding-3-small"}function oo(o){return I(o,"OPENAI_IMAGE_DESCRIPTION_MODEL")??"gpt-5-mini"}function no(o){return I(o,"OPENAI_TRANSCRIPTION_MODEL")??"gpt-5-mini-transcribe"}function co(o){return I(o,"OPENAI_TTS_MODEL")??"tts-1"}function ro(o){return I(o,"OPENAI_TTS_VOICE")??"nova"}function fo(o){return I(o,"OPENAI_TTS_INSTRUCTIONS")??""}function io(o){return I(o,"OPENAI_IMAGE_MODEL")??"dall-e-3"}function To(o){return v(o,"OPENAI_EXPERIMENTAL_TELEMETRY",!1)}function po(o){return K(o,"OPENAI_EMBEDDING_DIMENSIONS",1536)}function Ao(o){return K(o,"OPENAI_IMAGE_DESCRIPTION_MAX_TOKENS",8192)}function ko(o){return I(o,"OPENAI_RESEARCH_MODEL")??"o3-deep-research"}function Oo(o){return K(o,"OPENAI_RESEARCH_TIMEOUT",3600000)}globalThis.AI_SDK_LOG_WARNINGS??=!1;function wo(o,n){Ko(n)}async function Ko(o){if(E()){C.debug("[OpenAI] Skipping API validation in browser environment");return}if(!y(o)){C.warn("[OpenAI] OPENAI_API_KEY is not configured. OpenAI functionality will fail until a valid API key is provided.");return}try{let c=P(o),r=await fetch(`${c}/models`,{headers:R(o)});if(!r.ok){C.warn(`[OpenAI] API key validation failed: ${r.status} ${r.statusText}. Please verify your OPENAI_API_KEY is correct.`);return}}catch(c){let r=c instanceof Error?c.message:String(c);C.warn(`[OpenAI] API validation error: ${r}. OpenAI functionality may be limited.`)}}import{logger as z}from"@elizaos/core";import{logger as Wo}from"@elizaos/core";var S={WAV:{HEADER:[82,73,70,70],IDENTIFIER:[87,65,86,69]},MP3_ID3:[73,68,51],OGG:[79,103,103,83],FLAC:[102,76,97,67],FTYP:[102,116,121,112],WEBM_EBML:[26,69,223,163]},Xo=12;function b(o,n,c){for(let r=0;r<c.length;r++){let f=c[r];if(f===void 0||o[n+r]!==f)return!1}return!0}function W(o){if(o.length<Xo)return"application/octet-stream";if(b(o,0,S.WAV.HEADER)&&b(o,8,S.WAV.IDENTIFIER))return"audio/wav";let n=o[0],c=o[1];if(b(o,0,S.MP3_ID3)||n===255&&c!==void 0&&(c&224)===224)return"audio/mpeg";if(b(o,0,S.OGG))return"audio/ogg";if(b(o,0,S.FLAC))return"audio/flac";if(b(o,4,S.FTYP))return"audio/mp4";if(b(o,0,S.WEBM_EBML))return"audio/webm";return Wo.warn("Could not detect audio format from buffer, using generic binary type"),"application/octet-stream"}function Ho(o){switch(o){case"audio/wav":return"wav";case"audio/mpeg":return"mp3";case"audio/ogg":return"ogg";case"audio/flac":return"flac";case"audio/mp4":return"m4a";case"audio/webm":return"webm";case"application/octet-stream":return"bin"}}function to(o){return`recording.${Ho(o)}`}function Po(o){return o instanceof Blob||o instanceof File}function X(o){return Buffer.isBuffer(o)}function Vo(o){return typeof o==="object"&&o!==null&&"audio"in o&&(Po(o.audio)||X(o.audio))}function Qo(o){return typeof o==="object"&&o!==null&&"audioUrl"in o&&typeof o.audioUrl==="string"}async function Io(o){let n=await fetch(o);if(!n.ok)throw Error(`Failed to fetch audio from URL: ${n.status}`);return n.blob()}async function d(o,n){let c=no(o),r,f={};if(typeof n==="string")z.debug(`[OpenAI] Fetching audio from URL: ${n}`),r=await Io(n);else if(Po(n))r=n;else if(X(n)){let O=W(n);z.debug(`[OpenAI] Auto-detected audio MIME type: ${O}`),r=new Blob([new Uint8Array(n)],{type:O})}else if(Vo(n)){if(f=n,n.model)c=n.model;if(X(n.audio)){let O=n.mimeType??W(n.audio);z.debug(`[OpenAI] Using MIME type: ${O}`),r=new Blob([new Uint8Array(n.audio)],{type:O})}else r=n.audio}else if(Qo(n))z.debug(`[OpenAI] Fetching audio from URL: ${n.audioUrl}`),r=await Io(n.audioUrl),f={prompt:n.prompt};else throw Error("TRANSCRIPTION expects Blob, File, Buffer, URL string, or TranscriptionParams object");z.debug(`[OpenAI] Using TRANSCRIPTION model: ${c}`);let i=r.type||"audio/webm",k=r.name||to(i.startsWith("audio/")?i:"audio/webm"),p=new FormData;if(p.append("file",r,k),p.append("model",c),f.language)p.append("language",f.language);if(f.responseFormat)p.append("response_format",f.responseFormat);if(f.prompt)p.append("prompt",f.prompt);if(f.temperature!==void 0)p.append("temperature",String(f.temperature));if(f.timestampGranularities)for(let O of f.timestampGranularities)p.append("timestamp_granularities[]",O);let A=P(o),T=await fetch(`${A}/audio/transcriptions`,{method:"POST",headers:R(o),body:p});if(!T.ok){let O=await T.text().catch(()=>"Unknown error");throw Error(`OpenAI transcription failed: ${T.status} ${T.statusText} - ${O}`)}return(await T.json()).text}async function H(o,n){let c,r,f="mp3",i,k;if(typeof n==="string")c=n,r=void 0;else{if(c=n.text,r=n.voice,"format"in n&&n.format)f=n.format;if("model"in n&&n.model)i=n.model;if("instructions"in n&&n.instructions)k=n.instructions}if(i=i??co(o),r=r??ro(o),k=k??fo(o),z.debug(`[OpenAI] Using TEXT_TO_SPEECH model: ${i}`),!c||c.trim().length===0)throw Error("TEXT_TO_SPEECH requires non-empty text");if(c.length>4096)throw Error("TEXT_TO_SPEECH text exceeds 4096 character limit");let p=["alloy","echo","fable","onyx","nova","shimmer"];if(r&&!p.includes(r))throw Error(`Invalid voice: ${r}. Must be one of: ${p.join(", ")}`);let A=P(o),T={model:i,voice:r,input:c,response_format:f};if(k&&k.length>0)T.instructions=k;let t=await fetch(`${A}/audio/speech`,{method:"POST",headers:{...R(o),"Content-Type":"application/json",...f==="mp3"?{Accept:"audio/mpeg"}:{}},body:JSON.stringify(T)});if(!t.ok){let O=await t.text().catch(()=>"Unknown error");throw Error(`OpenAI TTS failed: ${t.status} ${t.statusText} - ${O}`)}return t.arrayBuffer()}import{logger as x,ModelType as Do,VECTOR_DIMS as qo}from"@elizaos/core";import{EventType as Zo}from"@elizaos/core";var so=200;function Yo(o){if(o.length<=so)return o;return`${o.slice(0,so)}…`}function Go(o){if("promptTokens"in o)return{promptTokens:o.promptTokens??0,completionTokens:o.completionTokens??0,totalTokens:o.totalTokens??(o.promptTokens??0)+(o.completionTokens??0)};if("inputTokens"in o||"outputTokens"in o){let n=o.inputTokens??0,c=o.outputTokens??0;return{promptTokens:n,completionTokens:c,totalTokens:n+c}}return{promptTokens:0,completionTokens:0,totalTokens:0}}function J(o,n,c,r){let f=Go(r),i={runtime:o,source:"openai",provider:"openai",type:n,prompt:Yo(c),tokens:{prompt:f.promptTokens,completion:f.completionTokens,total:f.totalTokens}};o.emitEvent(Zo.MODEL_USED,i)}var ho=8000;function Mo(o){let n=Object.values(qo);if(!n.includes(o))throw Error(`Invalid embedding dimension: ${o}. Must be one of: ${n.join(", ")}`);return o}function Bo(o){if(o===null)return null;if(typeof o==="string")return o;if(typeof o==="object"&&typeof o.text==="string")return o.text;throw Error("Invalid embedding params: expected string, { text: string }, or null")}async function V(o,n){let c=m(o),r=Mo(po(o)),f=Bo(n);if(f===null){x.debug("[OpenAI] Creating test embedding for initialization");let j=Array(r).fill(0);return j[0]=0.1,j}let i=f.trim();if(i.length===0)throw Error("Cannot generate embedding for empty text");let k=ho*4;if(i.length>k)x.warn(`[OpenAI] Embedding input too long (~${Math.ceil(i.length/4)} tokens), truncating to ~${ho} tokens`),i=i.slice(0,k);let A=`${u(o)}/embeddings`;x.debug(`[OpenAI] Generating embedding with model: ${c}`);let T=await fetch(A,{method:"POST",headers:{...R(o,!0),"Content-Type":"application/json"},body:JSON.stringify({model:c,input:i})});if(!T.ok){let j=await T.text().catch(()=>"Unknown error");throw Error(`OpenAI embedding API error: ${T.status} ${T.statusText} - ${j}`)}let t=await T.json(),O=t?.data?.[0];if(!O||!O.embedding)throw Error("OpenAI API returned invalid embedding response structure");let h=O.embedding;if(h.length!==r)throw Error(`Embedding dimension mismatch: got ${h.length}, expected ${r}. Check OPENAI_EMBEDDING_DIMENSIONS setting.`);if(t.usage)J(o,Do.TEXT_EMBEDDING,i,{promptTokens:t.usage.prompt_tokens,completionTokens:0,totalTokens:t.usage.total_tokens});return x.debug(`[OpenAI] Generated embedding with ${h.length} dimensions`),h}import{logger as lo,ModelType as ao}from"@elizaos/core";var Ro="Please analyze this image and provide a title and detailed description.";async function Q(o,n){let c=io(o),r=n.count??1,f=n.size??"1024x1024",i=n;if(lo.debug(`[OpenAI] Using IMAGE model: ${c}`),!n.prompt||n.prompt.trim().length===0)throw Error("IMAGE generation requires a non-empty prompt");if(r<1||r>10)throw Error("IMAGE count must be between 1 and 10");let k=P(o),p={model:c,prompt:n.prompt,n:r,size:f};if(i.quality)p.quality=i.quality;if(i.style)p.style=i.style;let A=await fetch(`${k}/images/generations`,{method:"POST",headers:{...R(o),"Content-Type":"application/json"},body:JSON.stringify(p)});if(!A.ok){let t=await A.text().catch(()=>"Unknown error");throw Error(`OpenAI image generation failed: ${A.status} ${A.statusText} - ${t}`)}let T=await A.json();if(!T.data||T.data.length===0)throw Error("OpenAI API returned no images");return T.data.map((t)=>({url:t.url,revisedPrompt:t.revised_prompt}))}function go(o){return o.match(/title[:\s]+(.+?)(?:\n|$)/i)?.[1]?.trim()??"Image Analysis"}function vo(o){return o.replace(/title[:\s]+(.+?)(?:\n|$)/i,"").trim()}async function Z(o,n){let c=oo(o),r=Ao(o);lo.debug(`[OpenAI] Using IMAGE_DESCRIPTION model: ${c}`);let f,i;if(typeof n==="string")f=n,i=Ro;else f=n.imageUrl,i=n.prompt??Ro;if(!f||f.trim().length===0)throw Error("IMAGE_DESCRIPTION requires a valid image URL");let k=P(o),p={model:c,messages:[{role:"user",content:[{type:"text",text:i},{type:"image_url",image_url:{url:f}}]}],max_tokens:r},A=await fetch(`${k}/chat/completions`,{method:"POST",headers:{...R(o),"Content-Type":"application/json"},body:JSON.stringify(p)});if(!A.ok){let h=await A.text().catch(()=>"Unknown error");throw Error(`OpenAI image description failed: ${A.status} ${A.statusText} - ${h}`)}let T=await A.json();if(T.usage)J(o,ao.IMAGE_DESCRIPTION,typeof n==="string"?n:n.prompt??"",{promptTokens:T.usage.prompt_tokens,completionTokens:T.usage.completion_tokens,totalTokens:T.usage.total_tokens});let O=T.choices?.[0]?.message?.content;if(!O)throw Error("OpenAI API returned empty image description");return{title:go(O),description:vo(O)}}import{logger as No,ModelType as So}from"@elizaos/core";import{generateObject as on}from"ai";import{createOpenAI as yo}from"@ai-sdk/openai";var eo="sk-proxy";function _(o){let n=P(o),c=y(o);if(!c&&e(o))return yo({apiKey:eo,baseURL:n});if(!c)throw Error("OPENAI_API_KEY is required. Set it in your environment variables or runtime settings.");return yo({apiKey:c,baseURL:n})}import{logger as Jo}from"@elizaos/core";import{JSONParseError as uo}from"ai";var mo={MARKDOWN_JSON:/```json\n|\n```|```/g,WHITESPACE:/^\s+|\s+$/g};function $o(){return async({text:o,error:n})=>{if(!(n instanceof uo))return null;try{let c=o.replace(mo.MARKDOWN_JSON,"");return JSON.parse(c),Jo.debug("[JSON Repair] Successfully repaired JSON by removing markdown wrappers"),c}catch{return Jo.warn("[JSON Repair] Unable to repair JSON text"),null}}}async function bo(o,n,c,r){let f=_(o),i=r(o);if(No.debug(`[OpenAI] Using ${c} model: ${i}`),!n.prompt||n.prompt.trim().length===0)throw Error("Object generation requires a non-empty prompt");if(n.schema)No.debug("[OpenAI] Schema provided but using no-schema mode. Structure is determined by prompt instructions.");let k=f.chat(i),{object:p,usage:A}=await on({model:k,output:"no-schema",prompt:n.prompt,experimental_repairText:$o()});if(A)J(o,c,n.prompt,A);if(typeof p!=="object"||p===null)throw Error(`Object generation returned ${typeof p}, expected object`);return p}async function Y(o,n){return bo(o,n,So.OBJECT_SMALL,F)}async function G(o,n){return bo(o,n,So.OBJECT_LARGE,U)}import{logger as $}from"@elizaos/core";function nn(o){switch(o.type){case"web_search_preview":return{type:"web_search_preview"};case"file_search":return{type:"file_search",vector_store_ids:o.vectorStoreIds};case"code_interpreter":return{type:"code_interpreter",container:o.container??{type:"auto"}};case"mcp":return{type:"mcp",server_label:o.serverLabel,server_url:o.serverUrl,require_approval:o.requireApproval??"never"};default:throw Error(`Unknown research tool type: ${o.type}`)}}function cn(o){switch(o.type){case"web_search_call":return{id:o.id??"",type:"web_search_call",status:o.status??"completed",action:{type:o.action?.type??"search",query:o.action?.query,url:o.action?.url}};case"file_search_call":return{id:o.id??"",type:"file_search_call",status:o.status??"completed",query:o.query??"",results:o.results?.map((n)=>({fileId:n.file_id,fileName:n.file_name,score:n.score}))};case"code_interpreter_call":return{id:o.id??"",type:"code_interpreter_call",status:o.status??"completed",code:o.code??"",output:o.output};case"mcp_tool_call":return{id:o.id??"",type:"mcp_tool_call",status:o.status??"completed",serverLabel:o.server_label??"",toolName:o.tool_name??"",arguments:o.arguments??{},result:o.result};case"message":return{type:"message",content:o.content?.map((n)=>({type:"output_text",text:n.text,annotations:n.annotations?.map((c)=>({url:c.url,title:c.title,startIndex:c.start_index,endIndex:c.end_index}))??[]}))??[]};default:return null}}function rn(o){if(o.output_text){let r=[];if(o.output){for(let f of o.output)if(f.type==="message"&&f.content){for(let i of f.content)if(i.annotations)for(let k of i.annotations)r.push({url:k.url,title:k.title,startIndex:k.start_index,endIndex:k.end_index})}}return{text:o.output_text,annotations:r}}let n="",c=[];if(o.output){for(let r of o.output)if(r.type==="message"&&r.content){for(let f of r.content)if(n+=f.text,f.annotations)for(let i of f.annotations)c.push({url:i.url,title:i.title,startIndex:i.start_index,endIndex:i.end_index})}}return{text:n,annotations:c}}async function D(o,n){let c=y(o);if(!c)throw Error("OPENAI_API_KEY is required for deep research. Set it in your environment variables or runtime settings.");let r=P(o),f=n.model??ko(o),i=Oo(o);$.debug(`[OpenAI] Starting deep research with model: ${f}`),$.debug(`[OpenAI] Research input: ${n.input.substring(0,100)}...`);let k=n.tools?.filter((N)=>N.type==="web_search_preview"||N.type==="file_search"||N.type==="mcp");if(!k||k.length===0)$.debug("[OpenAI] No data source tools specified, defaulting to web_search_preview"),n.tools=[{type:"web_search_preview"},...n.tools??[]];let p={model:f,input:n.input};if(n.instructions)p.instructions=n.instructions;if(n.background!==void 0)p.background=n.background;if(n.tools&&n.tools.length>0)p.tools=n.tools.map(nn);if(n.maxToolCalls!==void 0)p.max_tool_calls=n.maxToolCalls;if(n.reasoningSummary)p.reasoning={summary:n.reasoningSummary};$.debug(`[OpenAI] Research request body: ${JSON.stringify(p,null,2)}`);let A=await fetch(`${r}/responses`,{method:"POST",headers:{Authorization:`Bearer ${c}`,"Content-Type":"application/json"},body:JSON.stringify(p),signal:AbortSignal.timeout(i)});if(!A.ok){let N=await A.text();throw $.error(`[OpenAI] Research request failed: ${A.status} ${N}`),Error(`Deep research request failed: ${A.status} ${A.statusText}`)}let T=await A.json();if(T.error)throw $.error(`[OpenAI] Research API error: ${T.error.message}`),Error(`Deep research error: ${T.error.message}`);$.debug(`[OpenAI] Research response received. Status: ${T.status??"completed"}`);let{text:t,annotations:O}=rn(T),h=[];if(T.output)for(let N of T.output){let g=cn(N);if(g)h.push(g)}let j={id:T.id,text:t,annotations:O,outputItems:h,status:T.status};return $.info(`[OpenAI] Research completed. Text length: ${t.length}, Annotations: ${O.length}, Output items: ${h.length}`),j}import{logger as fn,ModelType as jo}from"@elizaos/core";import{generateText as Tn,streamText as pn}from"ai";function An(o){if(!o)return;let n=o.inputTokens??0,c=o.outputTokens??0;return{promptTokens:n,completionTokens:c,totalTokens:n+c}}async function Eo(o,n,c,r){let f=_(o),i=r(o);fn.debug(`[OpenAI] Using ${c} model: ${i}`);let k=o.character.system??void 0,A={model:f.chat(i),prompt:n.prompt,system:k,maxOutputTokens:n.maxTokens??8192,experimental_telemetry:{isEnabled:To(o)}};if(n.stream){let O=pn(A);return{textStream:O.textStream,text:Promise.resolve(O.text),usage:Promise.resolve(O.usage).then(An),finishReason:Promise.resolve(O.finishReason).then((h)=>h)}}let{text:T,usage:t}=await Tn(A);if(t)J(o,c,n.prompt,t);return T}async function q(o,n){return Eo(o,n,jo.TEXT_SMALL,F)}async function M(o,n){return Eo(o,n,jo.TEXT_LARGE,U)}import{ModelType as _o}from"@elizaos/core";import{ModelType as kn}from"@elizaos/core";import{encodingForModel as On,getEncoding as wn}from"js-tiktoken";function Fo(o){let c=o.toLowerCase().includes("4o")?"o200k_base":"cl100k_base";try{return On(o)}catch{return wn(c)}}function Uo(o,n){if(n===kn.TEXT_SMALL)return F(o);return U(o)}function zo(o,n,c){let r=Uo(o,n);return Fo(r).encode(c)}function Lo(o,n,c){let r=Uo(o,n);return Fo(r).decode(c)}async function B(o,n){if(!n.prompt)throw Error("Tokenization requires a non-empty prompt");let c=n.modelType??_o.TEXT_LARGE;return zo(o,c,n.prompt)}async function a(o,n){if(!n.tokens||!Array.isArray(n.tokens))throw Error("Detokenization requires a valid tokens array");if(n.tokens.length===0)return"";for(let r=0;r<n.tokens.length;r++){let f=n.tokens[r];if(typeof f!=="number"||!Number.isFinite(f))throw Error(`Invalid token at index ${r}: expected number`)}let c=n.modelType??_o.TEXT_LARGE;return Lo(o,c,n.tokens)}function tn(){if(typeof process>"u")return{};return process.env}var s=tn(),In={name:"openai",description:"OpenAI API integration for text, image, audio, and embedding models",config:{OPENAI_API_KEY:s.OPENAI_API_KEY??null,OPENAI_BASE_URL:s.OPENAI_BASE_URL??null,OPENAI_SMALL_MODEL:s.OPENAI_SMALL_MODEL??null,OPENAI_LARGE_MODEL:s.OPENAI_LARGE_MODEL??null,SMALL_MODEL:s.SMALL_MODEL??null,LARGE_MODEL:s.LARGE_MODEL??null,OPENAI_EMBEDDING_MODEL:s.OPENAI_EMBEDDING_MODEL??null,OPENAI_EMBEDDING_API_KEY:s.OPENAI_EMBEDDING_API_KEY??null,OPENAI_EMBEDDING_URL:s.OPENAI_EMBEDDING_URL??null,OPENAI_EMBEDDING_DIMENSIONS:s.OPENAI_EMBEDDING_DIMENSIONS??null,OPENAI_IMAGE_DESCRIPTION_MODEL:s.OPENAI_IMAGE_DESCRIPTION_MODEL??null,OPENAI_IMAGE_DESCRIPTION_MAX_TOKENS:s.OPENAI_IMAGE_DESCRIPTION_MAX_TOKENS??null,OPENAI_EXPERIMENTAL_TELEMETRY:s.OPENAI_EXPERIMENTAL_TELEMETRY??null,OPENAI_RESEARCH_MODEL:s.OPENAI_RESEARCH_MODEL??null,OPENAI_RESEARCH_TIMEOUT:s.OPENAI_RESEARCH_TIMEOUT??null},async init(o,n){wo(o,n)},models:{[w.TEXT_EMBEDDING]:async(o,n)=>{return V(o,n)},[w.TEXT_TOKENIZER_ENCODE]:async(o,n)=>{return B(o,n)},[w.TEXT_TOKENIZER_DECODE]:async(o,n)=>{return a(o,n)},[w.TEXT_SMALL]:async(o,n)=>{return q(o,n)},[w.TEXT_LARGE]:async(o,n)=>{return M(o,n)},[w.IMAGE]:async(o,n)=>{return Q(o,n)},[w.IMAGE_DESCRIPTION]:async(o,n)=>{return Z(o,n)},[w.TRANSCRIPTION]:async(o,n)=>{return d(o,n)},[w.TEXT_TO_SPEECH]:async(o,n)=>{return H(o,n)},[w.OBJECT_SMALL]:async(o,n)=>{return Y(o,n)},[w.OBJECT_LARGE]:async(o,n)=>{return G(o,n)},[w.RESEARCH]:async(o,n)=>{return D(o,n)}},tests:[{name:"openai_plugin_tests",tests:[{name:"openai_test_api_connectivity",fn:async(o)=>{let n=P(o),c=await fetch(`${n}/models`,{headers:R(o)});if(!c.ok)throw Error(`API connectivity test failed: ${c.status} ${c.statusText}`);let r=await c.json();l.info(`[OpenAI Test] API connected. ${r.data?.length??0} models available.`)}},{name:"openai_test_text_embedding",fn:async(o)=>{let n=await o.useModel(w.TEXT_EMBEDDING,{text:"Hello, world!"});if(!Array.isArray(n)||n.length===0)throw Error("Embedding should return a non-empty array");l.info(`[OpenAI Test] Generated embedding with ${n.length} dimensions`)}},{name:"openai_test_text_small",fn:async(o)=>{let n=await o.useModel(w.TEXT_SMALL,{prompt:"Say hello in exactly 5 words."});if(typeof n!=="string"||n.length===0)throw Error("TEXT_SMALL should return non-empty string");l.info(`[OpenAI Test] TEXT_SMALL generated: "${n.substring(0,50)}..."`)}},{name:"openai_test_text_large",fn:async(o)=>{let n=await o.useModel(w.TEXT_LARGE,{prompt:"Explain quantum computing in 2 sentences."});if(typeof n!=="string"||n.length===0)throw Error("TEXT_LARGE should return non-empty string");l.info(`[OpenAI Test] TEXT_LARGE generated: "${n.substring(0,50)}..."`)}},{name:"openai_test_tokenizer_roundtrip",fn:async(o)=>{let c=await o.useModel(w.TEXT_TOKENIZER_ENCODE,{prompt:"Hello, tokenizer test!",modelType:w.TEXT_SMALL});if(!Array.isArray(c)||c.length===0)throw Error("Tokenization should return non-empty token array");let r=await o.useModel(w.TEXT_TOKENIZER_DECODE,{tokens:c,modelType:w.TEXT_SMALL});if(r!=="Hello, tokenizer test!")throw Error(`Tokenizer roundtrip failed: expected "Hello, tokenizer test!", got "${r}"`);l.info(`[OpenAI Test] Tokenizer roundtrip successful (${c.length} tokens)`)}},{name:"openai_test_streaming",fn:async(o)=>{let n=[],c=await o.useModel(w.TEXT_LARGE,{prompt:"Count from 1 to 5, one number per line.",stream:!0,onStreamChunk:(r)=>{n.push(r)}});if(typeof c!=="string"||c.length===0)throw Error("Streaming should return non-empty result");if(n.length===0)throw Error("No streaming chunks received");l.info(`[OpenAI Test] Streaming test: ${n.length} chunks received`)}},{name:"openai_test_image_description",fn:async(o)=>{let c=await o.useModel(w.IMAGE_DESCRIPTION,"https://upload.wikimedia.org/wikipedia/commons/thumb/a/a7/Camponotus_flavomarginatus_ant.jpg/440px-Camponotus_flavomarginatus_ant.jpg");if(!c||typeof c!=="object"||!("title"in c)||!("description"in c))throw Error("Image description should return { title, description }");l.info(`[OpenAI Test] Image described: "${c.title}"`)}},{name:"openai_test_transcription",fn:async(o)=>{let r=await(await fetch("https://upload.wikimedia.org/wikipedia/commons/2/25/En-Open_Source.ogg")).arrayBuffer(),f=Buffer.from(new Uint8Array(r)),i=await o.useModel(w.TRANSCRIPTION,f);if(typeof i!=="string")throw Error("Transcription should return a string");l.info(`[OpenAI Test] Transcription: "${i.substring(0,50)}..."`)}},{name:"openai_test_text_to_speech",fn:async(o)=>{let n=await o.useModel(w.TEXT_TO_SPEECH,{text:"Hello, this is a text-to-speech test."});if(!(n instanceof ArrayBuffer)||n.byteLength===0)throw Error("TTS should return non-empty ArrayBuffer");l.info(`[OpenAI Test] TTS generated ${n.byteLength} bytes of audio`)}},{name:"openai_test_object_generation",fn:async(o)=>{let n=await o.useModel(w.OBJECT_SMALL,{prompt:"Return a JSON object with exactly these fields: name (string), age (number), active (boolean)"});if(!n||typeof n!=="object")throw Error("Object generation should return an object");l.info(`[OpenAI Test] Object generated: ${JSON.stringify(n).substring(0,100)}`)}},{name:"openai_test_research",fn:async(o)=>{let n=await o.useModel(w.RESEARCH,{input:"What is the current date and time?",tools:[{type:"web_search_preview"}],maxToolCalls:3});if(!n||typeof n!=="object"||!("text"in n))throw Error("Research should return an object with text property");if(typeof n.text!=="string"||n.text.length===0)throw Error("Research result text should be a non-empty string");l.info(`[OpenAI Test] Research completed. Text length: ${n.text.length}, Annotations: ${n.annotations?.length??0}`)}}]}]},Pn=In;export{In as openaiPlugin,Pn as default};
|
|
1
|
+
import{logger as j,ModelType as P}from"@elizaos/core";import{logger as Z}from"@elizaos/core";import{logger as Q}from"@elizaos/core";function Yo(o){if(typeof process>"u"||!process.env)return;let c=process.env[o];return c===void 0?void 0:String(c)}function T(o,c,n){let f=o.getSetting(c);if(f!==void 0&&f!==null)return String(f);return Yo(c)??n}function C(o,c,n){let f=T(o,c);if(f===void 0)return n;let k=Number.parseInt(f,10);if(!Number.isFinite(k))throw Error(`Setting '${c}' must be a valid integer, got: ${f}`);return k}function Io(o,c,n){let f=T(o,c);if(f===void 0)return n;let k=f.toLowerCase();return k==="true"||k==="1"||k==="yes"}function V(){return typeof globalThis<"u"&&typeof globalThis.document<"u"}function wo(o){return V()&&!!T(o,"OPENAI_BROWSER_BASE_URL")}function p(o){return T(o,"OPENAI_API_KEY")}function Bo(o){let c=T(o,"OPENAI_EMBEDDING_API_KEY");if(c)return Q.debug("[OpenAI] Using specific embedding API key"),c;return Q.debug("[OpenAI] Falling back to general API key for embeddings"),p(o)}function b(o,c=!1){if(V()&&!Io(o,"OPENAI_ALLOW_BROWSER_API_KEY",!1))return{};let n=c?Bo(o):p(o);return n?{Authorization:`Bearer ${n}`}:{}}function $(o){let c=T(o,"OPENAI_BROWSER_BASE_URL"),n=V()&&c?c:T(o,"OPENAI_BASE_URL")??"https://api.openai.com/v1";return Q.debug(`[OpenAI] Base URL: ${n}`),n}function Ao(o){let c=V()?T(o,"OPENAI_BROWSER_EMBEDDING_URL")??T(o,"OPENAI_BROWSER_BASE_URL"):T(o,"OPENAI_EMBEDDING_URL");if(c)return Q.debug(`[OpenAI] Using embedding base URL: ${c}`),c;return Q.debug("[OpenAI] Falling back to general base URL for embeddings"),$(o)}function N(o){return T(o,"OPENAI_SMALL_MODEL")??T(o,"SMALL_MODEL")??"gpt-5-mini"}function G(o){return T(o,"OPENAI_NANO_MODEL")??T(o,"NANO_MODEL")??N(o)}function q(o){return T(o,"OPENAI_MEDIUM_MODEL")??T(o,"MEDIUM_MODEL")??N(o)}function y(o){return T(o,"OPENAI_LARGE_MODEL")??T(o,"LARGE_MODEL")??"gpt-5"}function To(o){return T(o,"OPENAI_MEGA_MODEL")??T(o,"MEGA_MODEL")??y(o)}function Po(o){return T(o,"OPENAI_RESPONSE_HANDLER_MODEL")??T(o,"OPENAI_SHOULD_RESPOND_MODEL")??T(o,"RESPONSE_HANDLER_MODEL")??T(o,"SHOULD_RESPOND_MODEL")??G(o)}function ro(o){return T(o,"OPENAI_ACTION_PLANNER_MODEL")??T(o,"OPENAI_PLANNER_MODEL")??T(o,"ACTION_PLANNER_MODEL")??T(o,"PLANNER_MODEL")??q(o)}function Oo(o){return T(o,"OPENAI_EMBEDDING_MODEL")??"text-embedding-3-small"}function ho(o){return T(o,"OPENAI_IMAGE_DESCRIPTION_MODEL")??"gpt-5-mini"}function Ro(o){return T(o,"OPENAI_TRANSCRIPTION_MODEL")??"gpt-5-mini-transcribe"}function Jo(o){return T(o,"OPENAI_TTS_MODEL")??"tts-1"}function $o(o){return T(o,"OPENAI_TTS_VOICE")??"nova"}function so(o){return T(o,"OPENAI_TTS_INSTRUCTIONS")??""}function bo(o){return T(o,"OPENAI_IMAGE_MODEL")??"dall-e-3"}function jo(o){return Io(o,"OPENAI_EXPERIMENTAL_TELEMETRY",!1)}function Fo(o){return C(o,"OPENAI_EMBEDDING_DIMENSIONS",1536)}function io(o){return C(o,"OPENAI_IMAGE_DESCRIPTION_MAX_TOKENS",8192)}function po(o){return T(o,"OPENAI_RESEARCH_MODEL")??"o3-deep-research"}function No(o){return C(o,"OPENAI_RESEARCH_TIMEOUT",3600000)}globalThis.AI_SDK_LOG_WARNINGS??=!1;function Uo(o,c){vo(c)}async function vo(o){if(V()){Z.debug("[OpenAI] Skipping API validation in browser environment");return}if(!p(o)){Z.warn("[OpenAI] OPENAI_API_KEY is not configured. OpenAI functionality will fail until a valid API key is provided.");return}try{let n=$(o),f=await fetch(`${n}/models`,{headers:b(o)});if(!f.ok){Z.warn(`[OpenAI] API key validation failed: ${f.status} ${f.statusText}. Please verify your OPENAI_API_KEY is correct.`);return}}catch(n){let f=n instanceof Error?n.message:String(n);Z.warn(`[OpenAI] API validation error: ${f}. OpenAI functionality may be limited.`)}}import{logger as X}from"@elizaos/core";import{logger as Mo}from"@elizaos/core";var S={WAV:{HEADER:[82,73,70,70],IDENTIFIER:[87,65,86,69]},MP3_ID3:[73,68,51],OGG:[79,103,103,83],FLAC:[102,76,97,67],FTYP:[102,116,121,112],WEBM_EBML:[26,69,223,163]},go=12;function l(o,c,n){for(let f=0;f<n.length;f++){let k=n[f];if(k===void 0||o[c+f]!==k)return!1}return!0}function D(o){if(o.length<go)return"application/octet-stream";if(l(o,0,S.WAV.HEADER)&&l(o,8,S.WAV.IDENTIFIER))return"audio/wav";let c=o[0],n=o[1];if(l(o,0,S.MP3_ID3)||c===255&&n!==void 0&&(n&224)===224)return"audio/mpeg";if(l(o,0,S.OGG))return"audio/ogg";if(l(o,0,S.FLAC))return"audio/flac";if(l(o,4,S.FTYP))return"audio/mp4";if(l(o,0,S.WEBM_EBML))return"audio/webm";return Mo.warn("Could not detect audio format from buffer, using generic binary type"),"application/octet-stream"}function to(o){switch(o){case"audio/wav":return"wav";case"audio/mpeg":return"mp3";case"audio/ogg":return"ogg";case"audio/flac":return"flac";case"audio/mp4":return"m4a";case"audio/webm":return"webm";case"application/octet-stream":return"bin"}}function zo(o){return`recording.${to(o)}`}function yo(o){return o instanceof Blob||o instanceof File}function E(o){return Buffer.isBuffer(o)}function ao(o){return typeof o==="object"&&o!==null&&"audio"in o&&(yo(o.audio)||E(o.audio))}function uo(o){return typeof o==="object"&&o!==null&&"audioUrl"in o&&typeof o.audioUrl==="string"}async function Ko(o){let c=await fetch(o);if(!c.ok)throw Error(`Failed to fetch audio from URL: ${c.status}`);return c.blob()}async function d(o,c){let n=Ro(o),f,k={};if(typeof c==="string")X.debug(`[OpenAI] Fetching audio from URL: ${c}`),f=await Ko(c);else if(yo(c))f=c;else if(E(c)){let J=D(c);X.debug(`[OpenAI] Auto-detected audio MIME type: ${J}`),f=new Blob([new Uint8Array(c)],{type:J})}else if(ao(c)){if(k=c,c.model)n=c.model;if(E(c.audio)){let J=c.mimeType??D(c.audio);X.debug(`[OpenAI] Using MIME type: ${J}`),f=new Blob([new Uint8Array(c.audio)],{type:J})}else f=c.audio}else if(uo(c))X.debug(`[OpenAI] Fetching audio from URL: ${c.audioUrl}`),f=await Ko(c.audioUrl),k={prompt:c.prompt};else throw Error("TRANSCRIPTION expects Blob, File, Buffer, URL string, or TranscriptionParams object");X.debug(`[OpenAI] Using TRANSCRIPTION model: ${n}`);let I=f.type||"audio/webm",O=f.name||zo(I.startsWith("audio/")?I:"audio/webm"),w=new FormData;if(w.append("file",f,O),w.append("model",n),k.language)w.append("language",k.language);if(k.responseFormat)w.append("response_format",k.responseFormat);if(k.prompt)w.append("prompt",k.prompt);if(k.temperature!==void 0)w.append("temperature",String(k.temperature));if(k.timestampGranularities)for(let J of k.timestampGranularities)w.append("timestamp_granularities[]",J);let h=$(o),A=await fetch(`${h}/audio/transcriptions`,{method:"POST",headers:b(o),body:w});if(!A.ok){let J=await A.text().catch(()=>"Unknown error");throw Error(`OpenAI transcription failed: ${A.status} ${A.statusText} - ${J}`)}return(await A.json()).text}async function _(o,c){let n,f,k="mp3",I,O;if(typeof c==="string")n=c,f=void 0;else{if(n=c.text,f=c.voice,"format"in c&&c.format)k=c.format;if("model"in c&&c.model)I=c.model;if("instructions"in c&&c.instructions)O=c.instructions}if(I=I??Jo(o),f=f??$o(o),O=O??so(o),X.debug(`[OpenAI] Using TEXT_TO_SPEECH model: ${I}`),!n||n.trim().length===0)throw Error("TEXT_TO_SPEECH requires non-empty text");if(n.length>4096)throw Error("TEXT_TO_SPEECH text exceeds 4096 character limit");let w=["alloy","echo","fable","onyx","nova","shimmer"];if(f&&!w.includes(f))throw Error(`Invalid voice: ${f}. Must be one of: ${w.join(", ")}`);let h=$(o),A={model:I,voice:f,input:n,response_format:k};if(O&&O.length>0)A.instructions=O;let R=await fetch(`${h}/audio/speech`,{method:"POST",headers:{...b(o),"Content-Type":"application/json",...k==="mp3"?{Accept:"audio/mpeg"}:{}},body:JSON.stringify(A)});if(!R.ok){let J=await R.text().catch(()=>"Unknown error");throw Error(`OpenAI TTS failed: ${R.status} ${R.statusText} - ${J}`)}return R.arrayBuffer()}import{logger as L,ModelType as cc,VECTOR_DIMS as nc}from"@elizaos/core";import{EventType as eo}from"@elizaos/core";var So=200;function mo(o){if(o.length<=So)return o;return`${o.slice(0,So)}…`}function oc(o){if("promptTokens"in o){let c="promptTokensDetails"in o?o.promptTokensDetails:void 0,n=o.cachedPromptTokens??c?.cachedTokens;return{promptTokens:o.promptTokens??0,completionTokens:o.completionTokens??0,totalTokens:o.totalTokens??(o.promptTokens??0)+(o.completionTokens??0),cachedPromptTokens:n}}if("inputTokens"in o||"outputTokens"in o){let c=o.inputTokens??0,n=o.outputTokens??0,f=o.totalTokens??c+n;return{promptTokens:c,completionTokens:n,totalTokens:f,cachedPromptTokens:o.cachedInputTokens}}return{promptTokens:0,completionTokens:0,totalTokens:0}}function U(o,c,n,f){let k=oc(f),I={runtime:o,source:"openai",provider:"openai",type:c,prompt:mo(n),tokens:{prompt:k.promptTokens,completion:k.completionTokens,total:k.totalTokens,...k.cachedPromptTokens!==void 0?{cached:k.cachedPromptTokens}:{}}};o.emitEvent(eo.MODEL_USED,I)}function fc(o){let c=Object.values(nc);if(!c.includes(o))throw Error(`Invalid embedding dimension: ${o}. Must be one of: ${c.join(", ")}`);return o}function kc(o){if(o===null)return null;if(typeof o==="string")return o;if(typeof o==="object"&&typeof o.text==="string")return o.text;throw Error("Invalid embedding params: expected string, { text: string }, or null")}async function Y(o,c){let n=Oo(o),f=fc(Fo(o)),k=kc(c);if(k===null){L.debug("[OpenAI] Creating test embedding for initialization");let i=Array(f).fill(0);return i[0]=0.1,i}let I=k.trim();if(I.length===0)throw Error("Cannot generate embedding for empty text");let O=32000;if(I.length>O)L.warn(`[OpenAI] Embedding input too long (~${Math.ceil(I.length/4)} tokens), truncating to ~8000 tokens`),I=I.slice(0,O);let h=`${Ao(o)}/embeddings`;L.debug(`[OpenAI] Generating embedding with model: ${n}`);let A=await fetch(h,{method:"POST",headers:{...b(o,!0),"Content-Type":"application/json"},body:JSON.stringify({model:n,input:I})});if(!A.ok){let i=await A.text().catch(()=>"Unknown error");throw Error(`OpenAI embedding API error: ${A.status} ${A.statusText} - ${i}`)}let R=await A.json(),J=R?.data?.[0];if(!J?.embedding)throw Error("OpenAI API returned invalid embedding response structure");let s=J.embedding;if(s.length!==f)throw Error(`Embedding dimension mismatch: got ${s.length}, expected ${f}. Check OPENAI_EMBEDDING_DIMENSIONS setting.`);if(R.usage)U(o,cc.TEXT_EMBEDDING,I,{promptTokens:R.usage.prompt_tokens,completionTokens:0,totalTokens:R.usage.total_tokens});return L.debug(`[OpenAI] Generated embedding with ${s.length} dimensions`),s}import{logger as xo,ModelType as Ic}from"@elizaos/core";var lo="Please analyze this image and provide a title and detailed description.";async function B(o,c){let n=bo(o),f=c.count??1,k=c.size??"1024x1024",I=c;if(xo.debug(`[OpenAI] Using IMAGE model: ${n}`),c.prompt.trim().length===0)throw Error("IMAGE generation requires a non-empty prompt");if(f<1||f>10)throw Error("IMAGE count must be between 1 and 10");let O=$(o),w={model:n,prompt:c.prompt,n:f,size:k};if(I.quality)w.quality=I.quality;if(I.style)w.style=I.style;let h=await fetch(`${O}/images/generations`,{method:"POST",headers:{...b(o),"Content-Type":"application/json"},body:JSON.stringify(w)});if(!h.ok){let R=await h.text().catch(()=>"Unknown error");throw Error(`OpenAI image generation failed: ${h.status} ${h.statusText} - ${R}`)}let A=await h.json();if(A.data.length===0)throw Error("OpenAI API returned no images");return A.data.map((R)=>({url:R.url,revisedPrompt:R.revised_prompt}))}function wc(o){return o.match(/title[:\s]+(.+?)(?:\n|$)/i)?.[1]?.trim()??"Image Analysis"}function Ac(o){return o.replace(/title[:\s]+(.+?)(?:\n|$)/i,"").trim()}async function v(o,c){let n=ho(o),f=io(o);xo.debug(`[OpenAI] Using IMAGE_DESCRIPTION model: ${n}`);let k,I;if(typeof c==="string")k=c,I=lo;else k=c.imageUrl,I=c.prompt??lo;if(!k||k.trim().length===0)throw Error("IMAGE_DESCRIPTION requires a valid image URL");let O=$(o),w={model:n,messages:[{role:"user",content:[{type:"text",text:I},{type:"image_url",image_url:{url:k}}]}],max_tokens:f},h=await fetch(`${O}/chat/completions`,{method:"POST",headers:{...b(o),"Content-Type":"application/json"},body:JSON.stringify(w)});if(!h.ok){let s=await h.text().catch(()=>"Unknown error");throw Error(`OpenAI image description failed: ${h.status} ${h.statusText} - ${s}`)}let A=await h.json();if(A.usage)U(o,Ic.IMAGE_DESCRIPTION,typeof c==="string"?c:c.prompt??"",{promptTokens:A.usage.prompt_tokens,completionTokens:A.usage.completion_tokens,totalTokens:A.usage.total_tokens});let J=A.choices?.[0]?.message?.content;if(!J)throw Error("OpenAI API returned empty image description");return{title:wc(J),description:Ac(J)}}import{logger as Qo,ModelType as Ho}from"@elizaos/core";import{generateObject as Oc}from"ai";import{createOpenAI as Wo}from"@ai-sdk/openai";var Tc="sk-proxy";function H(o){let c=$(o),n=p(o);if(!n&&wo(o))return Wo({apiKey:Tc,baseURL:c});if(!n)throw Error("OPENAI_API_KEY is required. Set it in your environment variables or runtime settings.");return Wo({apiKey:n,baseURL:c})}import{logger as Vo}from"@elizaos/core";import{JSONParseError as Pc}from"ai";var rc={MARKDOWN_JSON:/```json\n|\n```|```/g,WHITESPACE:/^\s+|\s+$/g};function Xo(){return async({text:o,error:c})=>{if(!(c instanceof Pc))return null;try{let n=o.replace(rc.MARKDOWN_JSON,"");return JSON.parse(n),Vo.debug("[JSON Repair] Successfully repaired JSON by removing markdown wrappers"),n}catch{return Vo.warn("[JSON Repair] Unable to repair JSON text"),null}}}async function Zo(o,c,n,f){let k=H(o),I=f(o);if(Qo.debug(`[OpenAI] Using ${n} model: ${I}`),c.prompt.trim().length===0)throw Error("Object generation requires a non-empty prompt");if(c.schema)Qo.debug("[OpenAI] Schema provided but using no-schema mode. Structure is determined by prompt instructions.");let O=k.chat(I),{object:w,usage:h}=await Oc({model:O,output:"no-schema",prompt:c.prompt,experimental_repairText:Xo()});if(h)U(o,n,c.prompt,h);if(typeof w!=="object"||w===null)throw Error(`Object generation returned ${typeof w}, expected object`);return w}async function M(o,c){return Zo(o,c,Ho.OBJECT_SMALL,N)}async function g(o,c){return Zo(o,c,Ho.OBJECT_LARGE,y)}import{logger as z}from"@elizaos/core";function hc(o){switch(o.type){case"web_search_preview":return{type:"web_search_preview"};case"file_search":return{type:"file_search",vector_store_ids:o.vectorStoreIds};case"code_interpreter":return{type:"code_interpreter",container:o.container??{type:"auto"}};case"mcp":return{type:"mcp",server_label:o.serverLabel,server_url:o.serverUrl,require_approval:o.requireApproval??"never"};default:throw Error(`Unknown research tool type: ${o.type}`)}}function Rc(o){switch(o.type){case"web_search_call":return{id:o.id??"",type:"web_search_call",status:o.status??"completed",action:{type:o.action?.type??"search",query:o.action?.query,url:o.action?.url}};case"file_search_call":return{id:o.id??"",type:"file_search_call",status:o.status??"completed",query:o.query??"",results:o.results?.map((c)=>({fileId:c.file_id,fileName:c.file_name,score:c.score}))};case"code_interpreter_call":return{id:o.id??"",type:"code_interpreter_call",status:o.status??"completed",code:o.code??"",output:o.output};case"mcp_tool_call":return{id:o.id??"",type:"mcp_tool_call",status:o.status??"completed",serverLabel:o.server_label??"",toolName:o.tool_name??"",arguments:o.arguments??{},result:o.result};case"message":return{type:"message",content:o.content?.map((c)=>({type:"output_text",text:c.text,annotations:c.annotations?.map((n)=>({url:n.url,title:n.title,startIndex:n.start_index,endIndex:n.end_index}))??[]}))??[]};default:return null}}function Jc(o){if(o.output_text){let f=[];if(o.output){for(let k of o.output)if(k.type==="message"&&k.content){for(let I of k.content)if(I.annotations)for(let O of I.annotations)f.push({url:O.url,title:O.title,startIndex:O.start_index,endIndex:O.end_index})}}return{text:o.output_text,annotations:f}}let c="",n=[];if(o.output){for(let f of o.output)if(f.type==="message"&&f.content){for(let k of f.content)if(c+=k.text,k.annotations)for(let I of k.annotations)n.push({url:I.url,title:I.title,startIndex:I.start_index,endIndex:I.end_index})}}return{text:c,annotations:n}}async function t(o,c){let n=p(o);if(!n)throw Error("OPENAI_API_KEY is required for deep research. Set it in your environment variables or runtime settings.");let f=$(o),k=c.model??po(o),I=No(o);z.debug(`[OpenAI] Starting deep research with model: ${k}`),z.debug(`[OpenAI] Research input: ${c.input.substring(0,100)}...`);let O=c.tools?.filter((F)=>F.type==="web_search_preview"||F.type==="file_search"||F.type==="mcp");if(!O||O.length===0)z.debug("[OpenAI] No data source tools specified, defaulting to web_search_preview"),c.tools=[{type:"web_search_preview"},...c.tools??[]];let w={model:k,input:c.input};if(c.instructions)w.instructions=c.instructions;if(c.background!==void 0)w.background=c.background;if(c.tools&&c.tools.length>0)w.tools=c.tools.map(hc);if(c.maxToolCalls!==void 0)w.max_tool_calls=c.maxToolCalls;if(c.reasoningSummary)w.reasoning={summary:c.reasoningSummary};z.debug(`[OpenAI] Research request body: ${JSON.stringify(w,null,2)}`);let h=await fetch(`${f}/responses`,{method:"POST",headers:{Authorization:`Bearer ${n}`,"Content-Type":"application/json"},body:JSON.stringify(w),signal:AbortSignal.timeout(I)});if(!h.ok){let F=await h.text();throw z.error(`[OpenAI] Research request failed: ${h.status} ${F}`),Error(`Deep research request failed: ${h.status} ${h.statusText}`)}let A=await h.json();if(A.error)throw z.error(`[OpenAI] Research API error: ${A.error.message}`),Error(`Deep research error: ${A.error.message}`);z.debug(`[OpenAI] Research response received. Status: ${A.status??"completed"}`);let{text:R,annotations:J}=Jc(A),s=[];if(A.output)for(let F of A.output){let K=Rc(F);if(K)s.push(K)}let i={id:A.id,text:R,annotations:J,outputItems:s,status:A.status};return z.info(`[OpenAI] Research completed. Text length: ${R.length}, Annotations: ${J.length}, Output items: ${s.length}`),i}import{logger as $c,ModelType as x}from"@elizaos/core";import{generateText as sc,streamText as bc}from"ai";var jc=x.TEXT_NANO??"TEXT_NANO",Fc=x.TEXT_MEDIUM??"TEXT_MEDIUM",ic=x.TEXT_MEGA??"TEXT_MEGA",pc=x.RESPONSE_HANDLER??"RESPONSE_HANDLER",Nc=x.ACTION_PLANNER??"ACTION_PLANNER";function Uc(o){let c=[{type:"text",text:o.prompt}];for(let n of o.attachments??[])c.push({type:"file",data:n.data,mediaType:n.mediaType,...n.filename?{filename:n.filename}:{}});return c}function zc(o){if(!o)return;let c=o.inputTokens??0,n=o.outputTokens??0,f=o;return{promptTokens:c,completionTokens:n,totalTokens:c+n,cachedPromptTokens:f.cachedInputTokens}}function Kc(o){let c=o;return{promptCacheKey:c.providerOptions?.openai?.promptCacheKey,promptCacheRetention:c.providerOptions?.openai?.promptCacheRetention}}async function W(o,c,n,f){let k=c,I=H(o),O=f(o);$c.debug(`[OpenAI] Using ${n} model: ${O}`);let w=Kc(c),A=(k.attachments?.length??0)>0?Uc(k):void 0,R=o.character.system??void 0,s={model:I.chat(O),...A?{messages:[{role:"user",content:A}]}:{prompt:c.prompt},system:R,maxOutputTokens:c.maxTokens??8192,experimental_telemetry:{isEnabled:jo(o)},...w.promptCacheKey||w.promptCacheRetention?{providerOptions:{openai:{...w.promptCacheKey?{promptCacheKey:w.promptCacheKey}:{},...w.promptCacheRetention?{promptCacheRetention:w.promptCacheRetention}:{}}}}:{}};if(c.stream){let K=bc(s);return{textStream:K.textStream,text:Promise.resolve(K.text),usage:Promise.resolve(K.usage).then(zc),finishReason:Promise.resolve(K.finishReason).then((_o)=>_o)}}let{text:i,usage:F}=await sc(s);if(F)U(o,n,c.prompt,F);return i}async function a(o,c){return W(o,c,x.TEXT_SMALL,N)}async function u(o,c){return W(o,c,jc,G)}async function e(o,c){return W(o,c,Fc,q)}async function m(o,c){return W(o,c,x.TEXT_LARGE,y)}async function oo(o,c){return W(o,c,ic,To)}async function co(o,c){return W(o,c,pc,Po)}async function no(o,c){return W(o,c,Nc,ro)}import{ModelType as Do}from"@elizaos/core";import{ModelType as yc}from"@elizaos/core";import{encodingForModel as Sc,getEncoding as lc}from"js-tiktoken";function Lo(o){let n=o.toLowerCase().includes("4o")?"o200k_base":"cl100k_base";try{return Sc(o)}catch{return lc(n)}}function Co(o,c){if(c===yc.TEXT_SMALL)return N(o);return y(o)}function Go(o,c,n){let f=Co(o,c);return Lo(f).encode(n)}function qo(o,c,n){let f=Co(o,c);return Lo(f).decode(n)}async function fo(o,c){if(!c.prompt)throw Error("Tokenization requires a non-empty prompt");let n=c.modelType??Do.TEXT_LARGE;return Go(o,n,c.prompt)}async function ko(o,c){if(!c.tokens||!Array.isArray(c.tokens))throw Error("Detokenization requires a valid tokens array");if(c.tokens.length===0)return"";for(let f=0;f<c.tokens.length;f++){let k=c.tokens[f];if(typeof k!=="number"||!Number.isFinite(k))throw Error(`Invalid token at index ${f}: expected number`)}let n=c.modelType??Do.TEXT_LARGE;return qo(o,n,c.tokens)}function xc(){if(typeof process>"u")return{};return process.env}var r=xc(),Wc=P.TEXT_NANO??"TEXT_NANO",Vc=P.TEXT_MEDIUM??"TEXT_MEDIUM",Xc=P.TEXT_MEGA??"TEXT_MEGA",Qc=P.RESPONSE_HANDLER??"RESPONSE_HANDLER",Hc=P.ACTION_PLANNER??"ACTION_PLANNER",Zc={name:"openai",description:"OpenAI API integration for text, image, audio, and embedding models",config:{OPENAI_API_KEY:r.OPENAI_API_KEY??null,OPENAI_BASE_URL:r.OPENAI_BASE_URL??null,OPENAI_NANO_MODEL:r.OPENAI_NANO_MODEL??null,OPENAI_MEDIUM_MODEL:r.OPENAI_MEDIUM_MODEL??null,OPENAI_SMALL_MODEL:r.OPENAI_SMALL_MODEL??null,OPENAI_LARGE_MODEL:r.OPENAI_LARGE_MODEL??null,OPENAI_MEGA_MODEL:r.OPENAI_MEGA_MODEL??null,OPENAI_RESPONSE_HANDLER_MODEL:r.OPENAI_RESPONSE_HANDLER_MODEL??null,OPENAI_SHOULD_RESPOND_MODEL:r.OPENAI_SHOULD_RESPOND_MODEL??null,OPENAI_ACTION_PLANNER_MODEL:r.OPENAI_ACTION_PLANNER_MODEL??null,OPENAI_PLANNER_MODEL:r.OPENAI_PLANNER_MODEL??null,NANO_MODEL:r.NANO_MODEL??null,MEDIUM_MODEL:r.MEDIUM_MODEL??null,SMALL_MODEL:r.SMALL_MODEL??null,LARGE_MODEL:r.LARGE_MODEL??null,MEGA_MODEL:r.MEGA_MODEL??null,RESPONSE_HANDLER_MODEL:r.RESPONSE_HANDLER_MODEL??null,SHOULD_RESPOND_MODEL:r.SHOULD_RESPOND_MODEL??null,ACTION_PLANNER_MODEL:r.ACTION_PLANNER_MODEL??null,PLANNER_MODEL:r.PLANNER_MODEL??null,OPENAI_EMBEDDING_MODEL:r.OPENAI_EMBEDDING_MODEL??null,OPENAI_EMBEDDING_API_KEY:r.OPENAI_EMBEDDING_API_KEY??null,OPENAI_EMBEDDING_URL:r.OPENAI_EMBEDDING_URL??null,OPENAI_EMBEDDING_DIMENSIONS:r.OPENAI_EMBEDDING_DIMENSIONS??null,OPENAI_IMAGE_DESCRIPTION_MODEL:r.OPENAI_IMAGE_DESCRIPTION_MODEL??null,OPENAI_IMAGE_DESCRIPTION_MAX_TOKENS:r.OPENAI_IMAGE_DESCRIPTION_MAX_TOKENS??null,OPENAI_EXPERIMENTAL_TELEMETRY:r.OPENAI_EXPERIMENTAL_TELEMETRY??null,OPENAI_RESEARCH_MODEL:r.OPENAI_RESEARCH_MODEL??null,OPENAI_RESEARCH_TIMEOUT:r.OPENAI_RESEARCH_TIMEOUT??null},async init(o,c){Uo(o,c)},models:{[P.TEXT_EMBEDDING]:async(o,c)=>{return Y(o,c)},[P.TEXT_TOKENIZER_ENCODE]:async(o,c)=>{return fo(o,c)},[P.TEXT_TOKENIZER_DECODE]:async(o,c)=>{return ko(o,c)},[P.TEXT_SMALL]:async(o,c)=>{return a(o,c)},[Wc]:async(o,c)=>{return u(o,c)},[Vc]:async(o,c)=>{return e(o,c)},[P.TEXT_LARGE]:async(o,c)=>{return m(o,c)},[Xc]:async(o,c)=>{return oo(o,c)},[Qc]:async(o,c)=>{return co(o,c)},[Hc]:async(o,c)=>{return no(o,c)},[P.IMAGE]:async(o,c)=>{return B(o,c)},[P.IMAGE_DESCRIPTION]:async(o,c)=>{return v(o,c)},[P.TRANSCRIPTION]:async(o,c)=>{return d(o,c)},[P.TEXT_TO_SPEECH]:async(o,c)=>{return _(o,c)},[P.OBJECT_SMALL]:async(o,c)=>{return M(o,c)},[P.OBJECT_LARGE]:async(o,c)=>{return g(o,c)},[P.RESEARCH]:async(o,c)=>{return t(o,c)}},tests:[{name:"openai_plugin_tests",tests:[{name:"openai_test_api_connectivity",fn:async(o)=>{let c=$(o),n=await fetch(`${c}/models`,{headers:b(o)});if(!n.ok)throw Error(`API connectivity test failed: ${n.status} ${n.statusText}`);let f=await n.json();j.info(`[OpenAI Test] API connected. ${f.data?.length??0} models available.`)}},{name:"openai_test_text_embedding",fn:async(o)=>{let c=await o.useModel(P.TEXT_EMBEDDING,{text:"Hello, world!"});if(!Array.isArray(c)||c.length===0)throw Error("Embedding should return a non-empty array");j.info(`[OpenAI Test] Generated embedding with ${c.length} dimensions`)}},{name:"openai_test_text_small",fn:async(o)=>{let c=await o.useModel(P.TEXT_SMALL,{prompt:"Say hello in exactly 5 words."});if(typeof c!=="string"||c.length===0)throw Error("TEXT_SMALL should return non-empty string");j.info(`[OpenAI Test] TEXT_SMALL generated: "${c.substring(0,50)}..."`)}},{name:"openai_test_text_large",fn:async(o)=>{let c=await o.useModel(P.TEXT_LARGE,{prompt:"Explain quantum computing in 2 sentences."});if(typeof c!=="string"||c.length===0)throw Error("TEXT_LARGE should return non-empty string");j.info(`[OpenAI Test] TEXT_LARGE generated: "${c.substring(0,50)}..."`)}},{name:"openai_test_tokenizer_roundtrip",fn:async(o)=>{let n=await o.useModel(P.TEXT_TOKENIZER_ENCODE,{prompt:"Hello, tokenizer test!",modelType:P.TEXT_SMALL});if(!Array.isArray(n)||n.length===0)throw Error("Tokenization should return non-empty token array");let f=await o.useModel(P.TEXT_TOKENIZER_DECODE,{tokens:n,modelType:P.TEXT_SMALL});if(f!=="Hello, tokenizer test!")throw Error(`Tokenizer roundtrip failed: expected "Hello, tokenizer test!", got "${f}"`);j.info(`[OpenAI Test] Tokenizer roundtrip successful (${n.length} tokens)`)}},{name:"openai_test_streaming",fn:async(o)=>{let c=[],n=await o.useModel(P.TEXT_LARGE,{prompt:"Count from 1 to 5, one number per line.",stream:!0,onStreamChunk:(f)=>{c.push(f)}});if(typeof n!=="string"||n.length===0)throw Error("Streaming should return non-empty result");if(c.length===0)throw Error("No streaming chunks received");j.info(`[OpenAI Test] Streaming test: ${c.length} chunks received`)}},{name:"openai_test_image_description",fn:async(o)=>{let n=await o.useModel(P.IMAGE_DESCRIPTION,"https://upload.wikimedia.org/wikipedia/commons/thumb/a/a7/Camponotus_flavomarginatus_ant.jpg/440px-Camponotus_flavomarginatus_ant.jpg");if(!n||typeof n!=="object"||!("title"in n)||!("description"in n))throw Error("Image description should return { title, description }");j.info(`[OpenAI Test] Image described: "${n.title}"`)}},{name:"openai_test_transcription",fn:async(o)=>{let f=await(await fetch("https://upload.wikimedia.org/wikipedia/commons/2/25/En-Open_Source.ogg")).arrayBuffer(),k=Buffer.from(new Uint8Array(f)),I=await o.useModel(P.TRANSCRIPTION,k);if(typeof I!=="string")throw Error("Transcription should return a string");j.info(`[OpenAI Test] Transcription: "${I.substring(0,50)}..."`)}},{name:"openai_test_text_to_speech",fn:async(o)=>{let c=await o.useModel(P.TEXT_TO_SPEECH,{text:"Hello, this is a text-to-speech test."});if(!(c instanceof ArrayBuffer)||c.byteLength===0)throw Error("TTS should return non-empty ArrayBuffer");j.info(`[OpenAI Test] TTS generated ${c.byteLength} bytes of audio`)}},{name:"openai_test_object_generation",fn:async(o)=>{let c=await o.useModel(P.OBJECT_SMALL,{prompt:"Return a JSON object with exactly these fields: name (string), age (number), active (boolean)"});if(!c||typeof c!=="object")throw Error("Object generation should return an object");j.info(`[OpenAI Test] Object generated: ${JSON.stringify(c).substring(0,100)}`)}},{name:"openai_test_research",fn:async(o)=>{let c=await o.useModel(P.RESEARCH,{input:"What is the current date and time?",tools:[{type:"web_search_preview"}],maxToolCalls:3});if(!c||typeof c!=="object"||!("text"in c))throw Error("Research should return an object with text property");if(typeof c.text!=="string"||c.text.length===0)throw Error("Research result text should be a non-empty string");j.info(`[OpenAI Test] Research completed. Text length: ${c.text.length}, Annotations: ${c.annotations?.length??0}`)}}]}]},Eo=Zc;var un=Eo;export{Zc as openaiPlugin,un as default};
|
|
2
2
|
|
|
3
|
-
//# debugId=
|
|
3
|
+
//# debugId=0D9712B5ABF160A464756E2164756E21
|