@plasius/ai 1.1.4 → 1.1.6
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +22 -2
- package/README.md +101 -2
- package/dist/components/pixelverse/balance.d.ts +6 -2
- package/dist/components/pixelverse/balance.d.ts.map +1 -1
- package/dist/components/pixelverse/balance.js +13 -23
- package/dist/components/pixelverse/index.d.ts +1 -1
- package/dist/components/pixelverse/index.d.ts.map +1 -1
- package/dist/components/pixelverse/index.js +1 -1
- package/dist/components/pixelverse/video-generation-editor.d.ts +10 -0
- package/dist/components/pixelverse/video-generation-editor.d.ts.map +1 -0
- package/dist/components/pixelverse/video-generation-editor.js +79 -0
- package/dist/platform/adapter-platform.d.ts +60 -0
- package/dist/platform/adapter-platform.d.ts.map +1 -0
- package/dist/platform/adapter-platform.js +222 -0
- package/dist/platform/gemini-adapter.d.ts +15 -0
- package/dist/platform/gemini-adapter.d.ts.map +1 -0
- package/dist/platform/gemini-adapter.js +293 -0
- package/dist/platform/http-resilience.d.ts +19 -0
- package/dist/platform/http-resilience.d.ts.map +1 -0
- package/dist/platform/http-resilience.js +126 -0
- package/dist/platform/index.d.ts +22 -1
- package/dist/platform/index.d.ts.map +1 -1
- package/dist/platform/index.js +24 -0
- package/dist/platform/openai-adapter.d.ts +24 -0
- package/dist/platform/openai-adapter.d.ts.map +1 -0
- package/dist/platform/openai-adapter.js +398 -0
- package/dist/platform/video-provider-adapter.d.ts +54 -0
- package/dist/platform/video-provider-adapter.d.ts.map +1 -0
- package/dist/platform/video-provider-adapter.js +165 -0
- package/dist/platform/video-provider-platform.d.ts +13 -0
- package/dist/platform/video-provider-platform.d.ts.map +1 -0
- package/dist/platform/video-provider-platform.js +102 -0
- package/dist-cjs/components/pixelverse/balance.d.ts +6 -2
- package/dist-cjs/components/pixelverse/balance.d.ts.map +1 -1
- package/dist-cjs/components/pixelverse/balance.js +13 -23
- package/dist-cjs/components/pixelverse/index.d.ts +1 -1
- package/dist-cjs/components/pixelverse/index.d.ts.map +1 -1
- package/dist-cjs/components/pixelverse/index.js +1 -1
- package/dist-cjs/components/pixelverse/video-generation-editor.d.ts +10 -0
- package/dist-cjs/components/pixelverse/video-generation-editor.d.ts.map +1 -0
- package/dist-cjs/components/pixelverse/video-generation-editor.js +85 -0
- package/dist-cjs/platform/adapter-platform.d.ts +60 -0
- package/dist-cjs/platform/adapter-platform.d.ts.map +1 -0
- package/dist-cjs/platform/adapter-platform.js +225 -0
- package/dist-cjs/platform/gemini-adapter.d.ts +15 -0
- package/dist-cjs/platform/gemini-adapter.d.ts.map +1 -0
- package/dist-cjs/platform/gemini-adapter.js +296 -0
- package/dist-cjs/platform/http-resilience.d.ts +19 -0
- package/dist-cjs/platform/http-resilience.d.ts.map +1 -0
- package/dist-cjs/platform/http-resilience.js +129 -0
- package/dist-cjs/platform/index.d.ts +22 -1
- package/dist-cjs/platform/index.d.ts.map +1 -1
- package/dist-cjs/platform/index.js +30 -1
- package/dist-cjs/platform/openai-adapter.d.ts +24 -0
- package/dist-cjs/platform/openai-adapter.d.ts.map +1 -0
- package/dist-cjs/platform/openai-adapter.js +401 -0
- package/dist-cjs/platform/video-provider-adapter.d.ts +54 -0
- package/dist-cjs/platform/video-provider-adapter.d.ts.map +1 -0
- package/dist-cjs/platform/video-provider-adapter.js +168 -0
- package/dist-cjs/platform/video-provider-platform.d.ts +13 -0
- package/dist-cjs/platform/video-provider-platform.d.ts.map +1 -0
- package/dist-cjs/platform/video-provider-platform.js +105 -0
- package/docs/api-reference.md +59 -0
- package/docs/architecture.md +5 -1
- package/docs/providers.md +24 -6
- package/package.json +6 -6
- package/src/components/pixelverse/balance.tsx +22 -35
- package/src/components/pixelverse/index.ts +1 -1
- package/src/components/pixelverse/video-generation-editor.tsx +164 -0
- package/src/platform/adapter-platform.ts +440 -0
- package/src/platform/gemini-adapter.ts +391 -0
- package/src/platform/http-resilience.ts +198 -0
- package/src/platform/index.ts +68 -0
- package/src/platform/openai-adapter.ts +552 -0
- package/src/platform/video-provider-adapter.ts +303 -0
- package/src/platform/video-provider-platform.ts +208 -0
- package/dist/components/pixelverse/pixelverseeditor.d.ts +0 -16
- package/dist/components/pixelverse/pixelverseeditor.d.ts.map +0 -1
- package/dist/components/pixelverse/pixelverseeditor.js +0 -21
- package/dist/platform/openai.d.ts +0 -8
- package/dist/platform/openai.d.ts.map +0 -1
- package/dist/platform/openai.js +0 -61
- package/dist/platform/pixelverse.d.ts +0 -6
- package/dist/platform/pixelverse.d.ts.map +0 -1
- package/dist/platform/pixelverse.js +0 -196
- package/dist-cjs/components/pixelverse/pixelverseeditor.d.ts +0 -16
- package/dist-cjs/components/pixelverse/pixelverseeditor.d.ts.map +0 -1
- package/dist-cjs/components/pixelverse/pixelverseeditor.js +0 -27
- package/dist-cjs/platform/openai.d.ts +0 -8
- package/dist-cjs/platform/openai.d.ts.map +0 -1
- package/dist-cjs/platform/openai.js +0 -67
- package/dist-cjs/platform/pixelverse.d.ts +0 -6
- package/dist-cjs/platform/pixelverse.d.ts.map +0 -1
- package/dist-cjs/platform/pixelverse.js +0 -199
- package/src/components/pixelverse/pixelverseeditor.mocule.css +0 -0
- package/src/components/pixelverse/pixelverseeditor.tsx +0 -74
- package/src/platform/openai.ts +0 -123
- package/src/platform/pixelverse.ts +0 -309
|
@@ -0,0 +1,398 @@
|
|
|
1
|
+
import { performance } from "node:perf_hooks";
|
|
2
|
+
import { AICapability, } from "./index.js";
|
|
3
|
+
import { fetchWithPolicy } from "./http-resilience.js";
|
|
4
|
+
function normalizeBaseUrl(baseUrl) {
|
|
5
|
+
const normalized = (baseUrl ?? "https://api.openai.com/v1").trim();
|
|
6
|
+
return normalized.endsWith("/") ? normalized.slice(0, -1) : normalized;
|
|
7
|
+
}
|
|
8
|
+
function resolveFetch(fetchFn) {
|
|
9
|
+
const resolved = fetchFn ?? (typeof fetch !== "undefined" ? fetch : undefined);
|
|
10
|
+
if (!resolved) {
|
|
11
|
+
throw new Error("No fetch implementation available for OpenAI adapter.");
|
|
12
|
+
}
|
|
13
|
+
return resolved;
|
|
14
|
+
}
|
|
15
|
+
function asRecord(value) {
|
|
16
|
+
return value && typeof value === "object" ? value : {};
|
|
17
|
+
}
|
|
18
|
+
function asNumber(value) {
|
|
19
|
+
return typeof value === "number" && Number.isFinite(value) ? value : undefined;
|
|
20
|
+
}
|
|
21
|
+
function asString(value) {
|
|
22
|
+
return typeof value === "string" && value.length > 0 ? value : undefined;
|
|
23
|
+
}
|
|
24
|
+
async function parseResponseBody(response) {
|
|
25
|
+
const contentType = response.headers.get("content-type") ?? "";
|
|
26
|
+
if (contentType.includes("application/json")) {
|
|
27
|
+
return await response.json();
|
|
28
|
+
}
|
|
29
|
+
const text = await response.text();
|
|
30
|
+
if (!text) {
|
|
31
|
+
return undefined;
|
|
32
|
+
}
|
|
33
|
+
try {
|
|
34
|
+
return JSON.parse(text);
|
|
35
|
+
}
|
|
36
|
+
catch {
|
|
37
|
+
return text;
|
|
38
|
+
}
|
|
39
|
+
}
|
|
40
|
+
function resolveErrorMessage(body, fallback) {
|
|
41
|
+
const payload = asRecord(body);
|
|
42
|
+
const nested = asRecord(payload.error);
|
|
43
|
+
return (asString(nested.message) ??
|
|
44
|
+
asString(payload.message) ??
|
|
45
|
+
asString(payload.error) ??
|
|
46
|
+
fallback);
|
|
47
|
+
}
|
|
48
|
+
function requireApiKey(apiKey) {
|
|
49
|
+
const trimmed = apiKey.trim();
|
|
50
|
+
if (!trimmed) {
|
|
51
|
+
throw new Error("OpenAI API key is required.");
|
|
52
|
+
}
|
|
53
|
+
return trimmed;
|
|
54
|
+
}
|
|
55
|
+
function usageFromPayload(payload) {
|
|
56
|
+
const usagePayload = asRecord(payload);
|
|
57
|
+
const inputTokens = usagePayload.prompt_tokens ?? usagePayload.input_tokens;
|
|
58
|
+
const outputTokens = usagePayload.completion_tokens ?? usagePayload.output_tokens;
|
|
59
|
+
const totalTokens = usagePayload.total_tokens;
|
|
60
|
+
const usage = {};
|
|
61
|
+
if (asNumber(inputTokens) !== undefined) {
|
|
62
|
+
usage.inputTokens = inputTokens;
|
|
63
|
+
}
|
|
64
|
+
if (asNumber(outputTokens) !== undefined) {
|
|
65
|
+
usage.outputTokens = outputTokens;
|
|
66
|
+
}
|
|
67
|
+
if (asNumber(totalTokens) !== undefined) {
|
|
68
|
+
usage.totalTokens = totalTokens;
|
|
69
|
+
}
|
|
70
|
+
return Object.keys(usage).length > 0 ? usage : undefined;
|
|
71
|
+
}
|
|
72
|
+
function createBaseCompletion(type, model, requestor, durationMs, usage) {
|
|
73
|
+
return {
|
|
74
|
+
partitionKey: requestor,
|
|
75
|
+
id: crypto.randomUUID(),
|
|
76
|
+
type,
|
|
77
|
+
model,
|
|
78
|
+
createdAt: new Date().toISOString(),
|
|
79
|
+
durationMs,
|
|
80
|
+
usage: usage ?? {},
|
|
81
|
+
};
|
|
82
|
+
}
|
|
83
|
+
function extractChatMessage(body) {
|
|
84
|
+
const root = asRecord(body);
|
|
85
|
+
const choices = Array.isArray(root.choices) ? root.choices : [];
|
|
86
|
+
const firstChoice = choices[0] && typeof choices[0] === "object" ? asRecord(choices[0]) : {};
|
|
87
|
+
const message = asRecord(firstChoice.message);
|
|
88
|
+
const content = message.content;
|
|
89
|
+
if (typeof content === "string" && content.length > 0) {
|
|
90
|
+
return content;
|
|
91
|
+
}
|
|
92
|
+
if (Array.isArray(content)) {
|
|
93
|
+
const text = content
|
|
94
|
+
.map((part) => {
|
|
95
|
+
const maybePart = asRecord(part);
|
|
96
|
+
return asString(maybePart.text) ?? "";
|
|
97
|
+
})
|
|
98
|
+
.join("")
|
|
99
|
+
.trim();
|
|
100
|
+
if (text.length > 0) {
|
|
101
|
+
return text;
|
|
102
|
+
}
|
|
103
|
+
}
|
|
104
|
+
throw new Error("OpenAI chat response did not contain assistant content.");
|
|
105
|
+
}
|
|
106
|
+
function extractImageUrl(body) {
|
|
107
|
+
const root = asRecord(body);
|
|
108
|
+
const data = Array.isArray(root.data) ? root.data : [];
|
|
109
|
+
const first = data[0] && typeof data[0] === "object" ? asRecord(data[0]) : {};
|
|
110
|
+
const url = asString(first.url);
|
|
111
|
+
const b64 = asString(first.b64_json);
|
|
112
|
+
if (url) {
|
|
113
|
+
return new URL(url);
|
|
114
|
+
}
|
|
115
|
+
if (b64) {
|
|
116
|
+
return new URL(`data:image/png;base64,${b64}`);
|
|
117
|
+
}
|
|
118
|
+
throw new Error("OpenAI image generation response did not contain image data.");
|
|
119
|
+
}
|
|
120
|
+
function extractTranscriptionText(body) {
|
|
121
|
+
const root = asRecord(body);
|
|
122
|
+
const text = asString(root.text) ?? asString(root.output_text);
|
|
123
|
+
if (!text) {
|
|
124
|
+
throw new Error("OpenAI transcription response did not contain text.");
|
|
125
|
+
}
|
|
126
|
+
return text;
|
|
127
|
+
}
|
|
128
|
+
function parseGeneratedModel(content) {
|
|
129
|
+
const trimmed = content.trim();
|
|
130
|
+
if (!trimmed) {
|
|
131
|
+
return {
|
|
132
|
+
modelId: crypto.randomUUID(),
|
|
133
|
+
};
|
|
134
|
+
}
|
|
135
|
+
try {
|
|
136
|
+
const parsed = JSON.parse(trimmed);
|
|
137
|
+
const root = asRecord(parsed);
|
|
138
|
+
const modelId = asString(root.modelId);
|
|
139
|
+
const artifactUrl = asString(root.artifactUrl);
|
|
140
|
+
if (modelId) {
|
|
141
|
+
return {
|
|
142
|
+
modelId,
|
|
143
|
+
artifactUrl: artifactUrl ? new URL(artifactUrl) : undefined,
|
|
144
|
+
};
|
|
145
|
+
}
|
|
146
|
+
}
|
|
147
|
+
catch {
|
|
148
|
+
// Fallback to plain text handling below.
|
|
149
|
+
}
|
|
150
|
+
return {
|
|
151
|
+
modelId: trimmed,
|
|
152
|
+
};
|
|
153
|
+
}
|
|
154
|
+
export function createOpenAIAdapter(options = {}) {
|
|
155
|
+
const providerId = (options.id ?? "openai").trim() || "openai";
|
|
156
|
+
const baseUrl = normalizeBaseUrl(options.baseUrl);
|
|
157
|
+
const fetcher = resolveFetch(options.fetchFn);
|
|
158
|
+
const chatWithAI = async (request) => {
|
|
159
|
+
const startedAt = performance.now();
|
|
160
|
+
const apiKey = requireApiKey(request.apiKey);
|
|
161
|
+
const resolvedModel = request.model || options.defaultModels?.chat || "gpt-4.1-mini";
|
|
162
|
+
const response = await fetchWithPolicy({
|
|
163
|
+
url: `${baseUrl}/chat/completions`,
|
|
164
|
+
operation: "OpenAI chat request",
|
|
165
|
+
fetchFn: fetcher,
|
|
166
|
+
policy: options.httpPolicy,
|
|
167
|
+
createRequestInit: () => ({
|
|
168
|
+
method: "POST",
|
|
169
|
+
headers: {
|
|
170
|
+
Authorization: `Bearer ${apiKey}`,
|
|
171
|
+
"Content-Type": "application/json",
|
|
172
|
+
"Idempotency-Key": request.traceId,
|
|
173
|
+
"X-Request-Id": request.traceId,
|
|
174
|
+
"X-Plasius-Client": "@plasius/ai/openai-adapter",
|
|
175
|
+
},
|
|
176
|
+
body: JSON.stringify({
|
|
177
|
+
model: resolvedModel,
|
|
178
|
+
messages: [
|
|
179
|
+
...(request.context
|
|
180
|
+
? [{ role: "system", content: request.context }]
|
|
181
|
+
: []),
|
|
182
|
+
{ role: "user", content: request.input },
|
|
183
|
+
],
|
|
184
|
+
}),
|
|
185
|
+
}),
|
|
186
|
+
});
|
|
187
|
+
const body = await parseResponseBody(response);
|
|
188
|
+
if (!response.ok) {
|
|
189
|
+
throw new Error(resolveErrorMessage(body, `OpenAI chat request failed (${response.status} ${response.statusText}).`));
|
|
190
|
+
}
|
|
191
|
+
const message = extractChatMessage(body);
|
|
192
|
+
const usage = usageFromPayload(asRecord(body).usage);
|
|
193
|
+
const durationMs = performance.now() - startedAt;
|
|
194
|
+
const base = createBaseCompletion("chat", resolvedModel, request.userId, durationMs, usage);
|
|
195
|
+
return {
|
|
196
|
+
...base,
|
|
197
|
+
message,
|
|
198
|
+
outputUser: "assistant",
|
|
199
|
+
};
|
|
200
|
+
};
|
|
201
|
+
const synthesizeSpeech = async (request) => {
|
|
202
|
+
const startedAt = performance.now();
|
|
203
|
+
const apiKey = requireApiKey(request.apiKey);
|
|
204
|
+
const resolvedModel = request.model || options.defaultModels?.speech || "gpt-4o-mini-tts";
|
|
205
|
+
const resolvedVoice = request.voice || options.speech?.voice || "alloy";
|
|
206
|
+
const resolvedFormat = options.speech?.format ?? "mp3";
|
|
207
|
+
const response = await fetchWithPolicy({
|
|
208
|
+
url: `${baseUrl}/audio/speech`,
|
|
209
|
+
operation: "OpenAI speech synthesis",
|
|
210
|
+
fetchFn: fetcher,
|
|
211
|
+
policy: options.httpPolicy,
|
|
212
|
+
createRequestInit: () => ({
|
|
213
|
+
method: "POST",
|
|
214
|
+
headers: {
|
|
215
|
+
Authorization: `Bearer ${apiKey}`,
|
|
216
|
+
"Content-Type": "application/json",
|
|
217
|
+
"Idempotency-Key": request.traceId,
|
|
218
|
+
"X-Request-Id": request.traceId,
|
|
219
|
+
"X-Plasius-Client": "@plasius/ai/openai-adapter",
|
|
220
|
+
},
|
|
221
|
+
body: JSON.stringify({
|
|
222
|
+
model: resolvedModel,
|
|
223
|
+
input: request.input,
|
|
224
|
+
voice: resolvedVoice,
|
|
225
|
+
format: resolvedFormat,
|
|
226
|
+
}),
|
|
227
|
+
}),
|
|
228
|
+
});
|
|
229
|
+
if (!response.ok) {
|
|
230
|
+
const errorBody = await parseResponseBody(response);
|
|
231
|
+
throw new Error(resolveErrorMessage(errorBody, `OpenAI speech synthesis failed (${response.status} ${response.statusText}).`));
|
|
232
|
+
}
|
|
233
|
+
const mimeTypeHeader = response.headers.get("content-type");
|
|
234
|
+
const mimeType = (mimeTypeHeader?.split(";")[0]?.trim() || "audio/mpeg").toLowerCase();
|
|
235
|
+
const bytes = Buffer.from(await response.arrayBuffer()).toString("base64");
|
|
236
|
+
const durationMs = performance.now() - startedAt;
|
|
237
|
+
const base = createBaseCompletion("speech", resolvedModel, request.userId, durationMs);
|
|
238
|
+
return {
|
|
239
|
+
...base,
|
|
240
|
+
url: new URL(`data:${mimeType};base64,${bytes}`),
|
|
241
|
+
};
|
|
242
|
+
};
|
|
243
|
+
const transcribeSpeech = async (request) => {
|
|
244
|
+
const startedAt = performance.now();
|
|
245
|
+
const apiKey = requireApiKey(request.apiKey);
|
|
246
|
+
const resolvedModel = request.model || options.defaultModels?.transcription || "gpt-4o-mini-transcribe";
|
|
247
|
+
const response = await fetchWithPolicy({
|
|
248
|
+
url: `${baseUrl}/audio/transcriptions`,
|
|
249
|
+
operation: "OpenAI transcription",
|
|
250
|
+
fetchFn: fetcher,
|
|
251
|
+
policy: options.httpPolicy,
|
|
252
|
+
createRequestInit: () => {
|
|
253
|
+
const formData = new FormData();
|
|
254
|
+
formData.append("model", resolvedModel);
|
|
255
|
+
const fileBytes = Uint8Array.from(request.input);
|
|
256
|
+
formData.append("file", new Blob([fileBytes], { type: "application/octet-stream" }), "audio.webm");
|
|
257
|
+
return {
|
|
258
|
+
method: "POST",
|
|
259
|
+
headers: {
|
|
260
|
+
Authorization: `Bearer ${apiKey}`,
|
|
261
|
+
"Idempotency-Key": request.traceId,
|
|
262
|
+
"X-Request-Id": request.traceId,
|
|
263
|
+
"X-Plasius-Client": "@plasius/ai/openai-adapter",
|
|
264
|
+
},
|
|
265
|
+
body: formData,
|
|
266
|
+
};
|
|
267
|
+
},
|
|
268
|
+
});
|
|
269
|
+
const body = await parseResponseBody(response);
|
|
270
|
+
if (!response.ok) {
|
|
271
|
+
throw new Error(resolveErrorMessage(body, `OpenAI transcription failed (${response.status} ${response.statusText}).`));
|
|
272
|
+
}
|
|
273
|
+
const message = extractTranscriptionText(body);
|
|
274
|
+
const durationMs = performance.now() - startedAt;
|
|
275
|
+
const base = createBaseCompletion("text", resolvedModel, request.userId, durationMs);
|
|
276
|
+
return {
|
|
277
|
+
...base,
|
|
278
|
+
message,
|
|
279
|
+
};
|
|
280
|
+
};
|
|
281
|
+
const generateImage = async (request) => {
|
|
282
|
+
const startedAt = performance.now();
|
|
283
|
+
const apiKey = requireApiKey(request.apiKey);
|
|
284
|
+
const resolvedModel = request.model || options.defaultModels?.image || "gpt-image-1";
|
|
285
|
+
const combinedPrompt = request.context
|
|
286
|
+
? `${request.context}\n\n${request.input}`
|
|
287
|
+
: request.input;
|
|
288
|
+
const response = await fetchWithPolicy({
|
|
289
|
+
url: `${baseUrl}/images/generations`,
|
|
290
|
+
operation: "OpenAI image generation",
|
|
291
|
+
fetchFn: fetcher,
|
|
292
|
+
policy: options.httpPolicy,
|
|
293
|
+
createRequestInit: () => ({
|
|
294
|
+
method: "POST",
|
|
295
|
+
headers: {
|
|
296
|
+
Authorization: `Bearer ${apiKey}`,
|
|
297
|
+
"Content-Type": "application/json",
|
|
298
|
+
"Idempotency-Key": request.traceId,
|
|
299
|
+
"X-Request-Id": request.traceId,
|
|
300
|
+
"X-Plasius-Client": "@plasius/ai/openai-adapter",
|
|
301
|
+
},
|
|
302
|
+
body: JSON.stringify({
|
|
303
|
+
model: resolvedModel,
|
|
304
|
+
prompt: combinedPrompt,
|
|
305
|
+
size: options.image?.size ?? "1024x1024",
|
|
306
|
+
response_format: "b64_json",
|
|
307
|
+
}),
|
|
308
|
+
}),
|
|
309
|
+
});
|
|
310
|
+
const body = await parseResponseBody(response);
|
|
311
|
+
if (!response.ok) {
|
|
312
|
+
throw new Error(resolveErrorMessage(body, `OpenAI image generation failed (${response.status} ${response.statusText}).`));
|
|
313
|
+
}
|
|
314
|
+
const url = extractImageUrl(body);
|
|
315
|
+
const usage = usageFromPayload(asRecord(body).usage);
|
|
316
|
+
const durationMs = performance.now() - startedAt;
|
|
317
|
+
const base = createBaseCompletion("image", resolvedModel, request.userId, durationMs, usage);
|
|
318
|
+
return {
|
|
319
|
+
...base,
|
|
320
|
+
url,
|
|
321
|
+
};
|
|
322
|
+
};
|
|
323
|
+
const generateModel = async (request) => {
|
|
324
|
+
const startedAt = performance.now();
|
|
325
|
+
const apiKey = requireApiKey(request.apiKey);
|
|
326
|
+
const resolvedModel = request.model || options.defaultModels?.model || "gpt-4.1-mini";
|
|
327
|
+
const systemInstruction = [
|
|
328
|
+
request.context,
|
|
329
|
+
"Return JSON only with fields modelId (string) and optional artifactUrl (string URL).",
|
|
330
|
+
]
|
|
331
|
+
.filter(Boolean)
|
|
332
|
+
.join("\n");
|
|
333
|
+
const response = await fetchWithPolicy({
|
|
334
|
+
url: `${baseUrl}/chat/completions`,
|
|
335
|
+
operation: "OpenAI model generation",
|
|
336
|
+
fetchFn: fetcher,
|
|
337
|
+
policy: options.httpPolicy,
|
|
338
|
+
createRequestInit: () => ({
|
|
339
|
+
method: "POST",
|
|
340
|
+
headers: {
|
|
341
|
+
Authorization: `Bearer ${apiKey}`,
|
|
342
|
+
"Content-Type": "application/json",
|
|
343
|
+
"Idempotency-Key": request.traceId,
|
|
344
|
+
"X-Request-Id": request.traceId,
|
|
345
|
+
"X-Plasius-Client": "@plasius/ai/openai-adapter",
|
|
346
|
+
},
|
|
347
|
+
body: JSON.stringify({
|
|
348
|
+
model: resolvedModel,
|
|
349
|
+
response_format: { type: "json_object" },
|
|
350
|
+
messages: [
|
|
351
|
+
{ role: "system", content: systemInstruction },
|
|
352
|
+
{ role: "user", content: request.input },
|
|
353
|
+
],
|
|
354
|
+
}),
|
|
355
|
+
}),
|
|
356
|
+
});
|
|
357
|
+
const body = await parseResponseBody(response);
|
|
358
|
+
if (!response.ok) {
|
|
359
|
+
throw new Error(resolveErrorMessage(body, `OpenAI model generation failed (${response.status} ${response.statusText}).`));
|
|
360
|
+
}
|
|
361
|
+
const responseText = extractChatMessage(body);
|
|
362
|
+
const parsed = parseGeneratedModel(responseText);
|
|
363
|
+
const usage = usageFromPayload(asRecord(body).usage);
|
|
364
|
+
const durationMs = performance.now() - startedAt;
|
|
365
|
+
const base = createBaseCompletion("model", resolvedModel, request.userId, durationMs, usage);
|
|
366
|
+
return {
|
|
367
|
+
...base,
|
|
368
|
+
modelId: parsed.modelId,
|
|
369
|
+
artifactUrl: parsed.artifactUrl,
|
|
370
|
+
};
|
|
371
|
+
};
|
|
372
|
+
const checkBalance = async (request) => {
|
|
373
|
+
const startedAt = performance.now();
|
|
374
|
+
const resolvedModel = options.defaultModels?.chat ?? "";
|
|
375
|
+
const durationMs = performance.now() - startedAt;
|
|
376
|
+
const base = createBaseCompletion("balanceCompletion", resolvedModel, request.userId, durationMs);
|
|
377
|
+
return {
|
|
378
|
+
...base,
|
|
379
|
+
balance: 0,
|
|
380
|
+
};
|
|
381
|
+
};
|
|
382
|
+
return {
|
|
383
|
+
id: providerId,
|
|
384
|
+
capabilities: [
|
|
385
|
+
AICapability.Chat,
|
|
386
|
+
AICapability.Speech,
|
|
387
|
+
AICapability.Image,
|
|
388
|
+
AICapability.Model,
|
|
389
|
+
AICapability.Balance,
|
|
390
|
+
],
|
|
391
|
+
chatWithAI,
|
|
392
|
+
synthesizeSpeech,
|
|
393
|
+
transcribeSpeech,
|
|
394
|
+
generateImage,
|
|
395
|
+
generateModel,
|
|
396
|
+
checkBalance,
|
|
397
|
+
};
|
|
398
|
+
}
|
|
@@ -0,0 +1,54 @@
|
|
|
1
|
+
import { type HttpClientPolicy } from "./http-resilience.js";
|
|
2
|
+
export interface ProviderBalance {
|
|
3
|
+
monthlyCredit: number;
|
|
4
|
+
packageCredit: number;
|
|
5
|
+
}
|
|
6
|
+
export interface VideoUploadResult {
|
|
7
|
+
imageId: number;
|
|
8
|
+
}
|
|
9
|
+
export interface VideoGenerationRequest {
|
|
10
|
+
imageId: number;
|
|
11
|
+
prompt: string;
|
|
12
|
+
negativePrompt?: string;
|
|
13
|
+
templateId?: string;
|
|
14
|
+
seed?: number;
|
|
15
|
+
durationSeconds?: number;
|
|
16
|
+
model?: string;
|
|
17
|
+
motionMode?: string;
|
|
18
|
+
quality?: string;
|
|
19
|
+
watermark?: boolean;
|
|
20
|
+
}
|
|
21
|
+
export interface VideoGenerationResult {
|
|
22
|
+
videoId: number;
|
|
23
|
+
}
|
|
24
|
+
export type VideoJobState = "pending" | "completed" | "failed";
|
|
25
|
+
export interface VideoJobResult {
|
|
26
|
+
state: VideoJobState;
|
|
27
|
+
videoUrl?: string;
|
|
28
|
+
}
|
|
29
|
+
export interface VideoProviderRequestContext {
|
|
30
|
+
apiKey: string;
|
|
31
|
+
traceId?: string;
|
|
32
|
+
fetchFn?: typeof fetch;
|
|
33
|
+
}
|
|
34
|
+
export interface VideoProviderAdapter {
|
|
35
|
+
uploadImage(image: File | URL, context: VideoProviderRequestContext): Promise<VideoUploadResult>;
|
|
36
|
+
generateVideo(request: VideoGenerationRequest, context: VideoProviderRequestContext): Promise<VideoGenerationResult>;
|
|
37
|
+
getVideoResult(videoId: number, context: VideoProviderRequestContext): Promise<VideoJobResult>;
|
|
38
|
+
getBalance?(context: VideoProviderRequestContext): Promise<ProviderBalance>;
|
|
39
|
+
}
|
|
40
|
+
export interface HttpVideoProviderAdapterConfig {
|
|
41
|
+
uploadImagePath: string;
|
|
42
|
+
generateVideoPath: string;
|
|
43
|
+
getVideoResultPath: (videoId: number) => string;
|
|
44
|
+
getBalancePath?: string;
|
|
45
|
+
httpPolicy?: HttpClientPolicy;
|
|
46
|
+
mapUploadImageId?: (data: unknown) => number | undefined;
|
|
47
|
+
mapGeneratedVideoId?: (data: unknown) => number | undefined;
|
|
48
|
+
mapVideoResult?: (data: unknown) => VideoJobResult;
|
|
49
|
+
mapBalance?: (data: unknown) => ProviderBalance;
|
|
50
|
+
mapGenerateRequestBody?: (request: VideoGenerationRequest) => unknown;
|
|
51
|
+
additionalHeaders?: Record<string, string>;
|
|
52
|
+
}
|
|
53
|
+
export declare function createHttpVideoProviderAdapter(config: HttpVideoProviderAdapterConfig): VideoProviderAdapter;
|
|
54
|
+
//# sourceMappingURL=video-provider-adapter.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"video-provider-adapter.d.ts","sourceRoot":"","sources":["../../src/platform/video-provider-adapter.ts"],"names":[],"mappings":"AAAA,OAAO,EAAmB,KAAK,gBAAgB,EAAE,MAAM,sBAAsB,CAAC;AAE9E,MAAM,WAAW,eAAe;IAC9B,aAAa,EAAE,MAAM,CAAC;IACtB,aAAa,EAAE,MAAM,CAAC;CACvB;AAED,MAAM,WAAW,iBAAiB;IAChC,OAAO,EAAE,MAAM,CAAC;CACjB;AAED,MAAM,WAAW,sBAAsB;IACrC,OAAO,EAAE,MAAM,CAAC;IAChB,MAAM,EAAE,MAAM,CAAC;IACf,cAAc,CAAC,EAAE,MAAM,CAAC;IACxB,UAAU,CAAC,EAAE,MAAM,CAAC;IACpB,IAAI,CAAC,EAAE,MAAM,CAAC;IACd,eAAe,CAAC,EAAE,MAAM,CAAC;IACzB,KAAK,CAAC,EAAE,MAAM,CAAC;IACf,UAAU,CAAC,EAAE,MAAM,CAAC;IACpB,OAAO,CAAC,EAAE,MAAM,CAAC;IACjB,SAAS,CAAC,EAAE,OAAO,CAAC;CACrB;AAED,MAAM,WAAW,qBAAqB;IACpC,OAAO,EAAE,MAAM,CAAC;CACjB;AAED,MAAM,MAAM,aAAa,GAAG,SAAS,GAAG,WAAW,GAAG,QAAQ,CAAC;AAE/D,MAAM,WAAW,cAAc;IAC7B,KAAK,EAAE,aAAa,CAAC;IACrB,QAAQ,CAAC,EAAE,MAAM,CAAC;CACnB;AAED,MAAM,WAAW,2BAA2B;IAC1C,MAAM,EAAE,MAAM,CAAC;IACf,OAAO,CAAC,EAAE,MAAM,CAAC;IACjB,OAAO,CAAC,EAAE,OAAO,KAAK,CAAC;CACxB;AAED,MAAM,WAAW,oBAAoB;IACnC,WAAW,CACT,KAAK,EAAE,IAAI,GAAG,GAAG,EACjB,OAAO,EAAE,2BAA2B,GACnC,OAAO,CAAC,iBAAiB,CAAC,CAAC;IAC9B,aAAa,CACX,OAAO,EAAE,sBAAsB,EAC/B,OAAO,EAAE,2BAA2B,GACnC,OAAO,CAAC,qBAAqB,CAAC,CAAC;IAClC,cAAc,CACZ,OAAO,EAAE,MAAM,EACf,OAAO,EAAE,2BAA2B,GACnC,OAAO,CAAC,cAAc,CAAC,CAAC;IAC3B,UAAU,CAAC,CAAC,OAAO,EAAE,2BAA2B,GAAG,OAAO,CAAC,eAAe,CAAC,CAAC;CAC7E;AAED,MAAM,WAAW,8BAA8B;IAC7C,eAAe,EAAE,MAAM,CAAC;IACxB,iBAAiB,EAAE,MAAM,CAAC;IAC1B,kBAAkB,EAAE,CAAC,OAAO,EAAE,MAAM,KAAK,MAAM,CAAC;IAChD,cAAc,CAAC,EAAE,MAAM,CAAC;IACxB,UAAU,CAAC,EAAE,gBAAgB,CAAC;IAC9B,gBAAgB,CAAC,EAAE,CAAC,IAAI,EAAE,OAAO,KAAK,MAAM,GAAG,SAAS,CAAC;IACzD,mBAAmB,CAAC,EAAE,CAAC,IAAI,EAAE,OAAO,KAAK,MAAM,GAAG,SAAS,CAAC;IAC5D,cAAc,CAAC,EAAE,CAAC,IAAI,EAAE,OAAO,KAAK,cAAc,CAAC;IACnD,UAAU,CAAC,EAAE,CAAC,IAAI,EAAE,OAAO,KAAK,eAAe,CAAC;IAChD,sBAAsB,CAAC,EAAE,CAAC,OAAO,EAAE,sBAAsB,KAAK,OAAO,CAAC;IACtE,iBAAiB,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;CAC5C;AAqGD,wBAAgB,8BAA8B,CAC5C,MAAM,EAAE,8BAA8B,GACrC,oBAAoB,CAkItB"}
|
|
@@ -0,0 +1,165 @@
|
|
|
1
|
+
import { fetchWithPolicy } from "./http-resilience.js";
|
|
2
|
+
function asRecord(value) {
|
|
3
|
+
return value && typeof value === "object" ? value : {};
|
|
4
|
+
}
|
|
5
|
+
function asNumber(value) {
|
|
6
|
+
return typeof value === "number" && Number.isFinite(value) ? value : undefined;
|
|
7
|
+
}
|
|
8
|
+
function asString(value) {
|
|
9
|
+
return typeof value === "string" && value.length > 0 ? value : undefined;
|
|
10
|
+
}
|
|
11
|
+
function requireJson(value, label, mapper) {
|
|
12
|
+
const id = mapper(value);
|
|
13
|
+
if (!id) {
|
|
14
|
+
throw new Error(`Invalid ${label} response.`);
|
|
15
|
+
}
|
|
16
|
+
return id;
|
|
17
|
+
}
|
|
18
|
+
function defaultRequestBody(request) {
|
|
19
|
+
const body = {
|
|
20
|
+
img_id: request.imageId,
|
|
21
|
+
prompt: request.prompt,
|
|
22
|
+
duration: request.durationSeconds ?? 5,
|
|
23
|
+
model: request.model ?? "standard",
|
|
24
|
+
motion_mode: request.motionMode ?? "normal",
|
|
25
|
+
quality: request.quality ?? "720p",
|
|
26
|
+
water_mark: request.watermark ?? false,
|
|
27
|
+
};
|
|
28
|
+
if (request.seed !== undefined) {
|
|
29
|
+
body.seed = request.seed;
|
|
30
|
+
}
|
|
31
|
+
if (request.templateId) {
|
|
32
|
+
body.template_id = request.templateId;
|
|
33
|
+
}
|
|
34
|
+
if (request.negativePrompt) {
|
|
35
|
+
body.negative_prompt = request.negativePrompt;
|
|
36
|
+
}
|
|
37
|
+
return body;
|
|
38
|
+
}
|
|
39
|
+
function defaultHeaders(context) {
|
|
40
|
+
const apiKey = context.apiKey.trim();
|
|
41
|
+
if (!apiKey) {
|
|
42
|
+
throw new Error("Provider API key is required.");
|
|
43
|
+
}
|
|
44
|
+
return {
|
|
45
|
+
"API-KEY": apiKey,
|
|
46
|
+
"AI-trace-ID": context.traceId ?? crypto.randomUUID(),
|
|
47
|
+
Accept: "application/json",
|
|
48
|
+
"X-Plasius-Client": "@plasius/ai/video-http-adapter",
|
|
49
|
+
};
|
|
50
|
+
}
|
|
51
|
+
function defaultUploadMapper(data) {
|
|
52
|
+
const root = asRecord(data);
|
|
53
|
+
const resp = asRecord(root.Resp);
|
|
54
|
+
return asNumber(resp.id);
|
|
55
|
+
}
|
|
56
|
+
function defaultVideoIdMapper(data) {
|
|
57
|
+
const root = asRecord(data);
|
|
58
|
+
const resp = asRecord(root.Resp);
|
|
59
|
+
return asNumber(resp.id);
|
|
60
|
+
}
|
|
61
|
+
function defaultResultMapper(data) {
|
|
62
|
+
const root = asRecord(data);
|
|
63
|
+
const resp = asRecord(root.Resp);
|
|
64
|
+
const status = asNumber(resp.status);
|
|
65
|
+
const url = asString(resp.url);
|
|
66
|
+
if (status === 1 && url) {
|
|
67
|
+
return { state: "completed", videoUrl: url };
|
|
68
|
+
}
|
|
69
|
+
if (status === -1) {
|
|
70
|
+
return { state: "failed" };
|
|
71
|
+
}
|
|
72
|
+
return { state: "pending" };
|
|
73
|
+
}
|
|
74
|
+
function defaultBalanceMapper(data) {
|
|
75
|
+
const root = asRecord(data);
|
|
76
|
+
const resp = asRecord(root.Resp);
|
|
77
|
+
return {
|
|
78
|
+
monthlyCredit: asNumber(resp.credit_monthly) ?? 0,
|
|
79
|
+
packageCredit: asNumber(resp.credit_package) ?? 0,
|
|
80
|
+
};
|
|
81
|
+
}
|
|
82
|
+
export function createHttpVideoProviderAdapter(config) {
|
|
83
|
+
const fetchJson = async (path, createRequestInit, context) => {
|
|
84
|
+
const fetchFn = context.fetchFn ?? fetch;
|
|
85
|
+
const response = await fetchWithPolicy({
|
|
86
|
+
url: path,
|
|
87
|
+
operation: "Video provider HTTP request",
|
|
88
|
+
fetchFn,
|
|
89
|
+
policy: config.httpPolicy,
|
|
90
|
+
createRequestInit,
|
|
91
|
+
});
|
|
92
|
+
if (!response.ok) {
|
|
93
|
+
throw new Error(`Provider request failed (${response.status} ${response.statusText}).`);
|
|
94
|
+
}
|
|
95
|
+
return response.json();
|
|
96
|
+
};
|
|
97
|
+
const uploadImage = async (image, context) => {
|
|
98
|
+
const fetchFn = context.fetchFn ?? fetch;
|
|
99
|
+
const data = await fetchJson(config.uploadImagePath, async () => {
|
|
100
|
+
const formData = new FormData();
|
|
101
|
+
if (image instanceof File) {
|
|
102
|
+
formData.append("image", image, "upload-image");
|
|
103
|
+
}
|
|
104
|
+
else {
|
|
105
|
+
const blob = await fetchFn(image.toString()).then((result) => result.blob());
|
|
106
|
+
formData.append("image", blob, "upload-image");
|
|
107
|
+
}
|
|
108
|
+
return {
|
|
109
|
+
method: "POST",
|
|
110
|
+
headers: {
|
|
111
|
+
...defaultHeaders(context),
|
|
112
|
+
...config.additionalHeaders,
|
|
113
|
+
},
|
|
114
|
+
body: formData,
|
|
115
|
+
};
|
|
116
|
+
}, context);
|
|
117
|
+
const imageId = requireJson(data, "image upload", config.mapUploadImageId ?? defaultUploadMapper);
|
|
118
|
+
return { imageId };
|
|
119
|
+
};
|
|
120
|
+
const generateVideo = async (request, context) => {
|
|
121
|
+
const data = await fetchJson(config.generateVideoPath, () => {
|
|
122
|
+
const body = JSON.stringify(config.mapGenerateRequestBody?.(request) ?? defaultRequestBody(request));
|
|
123
|
+
return {
|
|
124
|
+
method: "POST",
|
|
125
|
+
headers: {
|
|
126
|
+
...defaultHeaders(context),
|
|
127
|
+
"Content-Type": "application/json",
|
|
128
|
+
...config.additionalHeaders,
|
|
129
|
+
},
|
|
130
|
+
body,
|
|
131
|
+
};
|
|
132
|
+
}, context);
|
|
133
|
+
const videoId = requireJson(data, "video generation", config.mapGeneratedVideoId ?? defaultVideoIdMapper);
|
|
134
|
+
return { videoId };
|
|
135
|
+
};
|
|
136
|
+
const getVideoResult = async (videoId, context) => {
|
|
137
|
+
const data = await fetchJson(config.getVideoResultPath(videoId), () => ({
|
|
138
|
+
method: "GET",
|
|
139
|
+
headers: {
|
|
140
|
+
...defaultHeaders(context),
|
|
141
|
+
...config.additionalHeaders,
|
|
142
|
+
},
|
|
143
|
+
}), context);
|
|
144
|
+
return (config.mapVideoResult ?? defaultResultMapper)(data);
|
|
145
|
+
};
|
|
146
|
+
const getBalance = config.getBalancePath
|
|
147
|
+
? async (context) => {
|
|
148
|
+
const data = await fetchJson(config.getBalancePath, () => ({
|
|
149
|
+
method: "GET",
|
|
150
|
+
headers: {
|
|
151
|
+
...defaultHeaders(context),
|
|
152
|
+
"Content-Type": "application/json",
|
|
153
|
+
...config.additionalHeaders,
|
|
154
|
+
},
|
|
155
|
+
}), context);
|
|
156
|
+
return (config.mapBalance ?? defaultBalanceMapper)(data);
|
|
157
|
+
}
|
|
158
|
+
: undefined;
|
|
159
|
+
return {
|
|
160
|
+
uploadImage,
|
|
161
|
+
generateVideo,
|
|
162
|
+
getVideoResult,
|
|
163
|
+
getBalance,
|
|
164
|
+
};
|
|
165
|
+
}
|
|
@@ -0,0 +1,13 @@
|
|
|
1
|
+
import type { AIPlatform } from "./index.js";
|
|
2
|
+
import type { VideoGenerationRequest, VideoProviderAdapter } from "./video-provider-adapter.js";
|
|
3
|
+
export interface VideoProviderPlatformProps {
|
|
4
|
+
apiKey: string;
|
|
5
|
+
adapter: VideoProviderAdapter;
|
|
6
|
+
polling?: {
|
|
7
|
+
maxRetries?: number;
|
|
8
|
+
delayMs?: number;
|
|
9
|
+
};
|
|
10
|
+
defaultVideoRequest?: Partial<Omit<VideoGenerationRequest, "imageId" | "prompt">>;
|
|
11
|
+
}
|
|
12
|
+
export declare function createVideoProviderPlatform(userId: string, props: VideoProviderPlatformProps): Promise<AIPlatform>;
|
|
13
|
+
//# sourceMappingURL=video-provider-platform.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"video-provider-platform.d.ts","sourceRoot":"","sources":["../../src/platform/video-provider-platform.ts"],"names":[],"mappings":"AAEA,OAAO,KAAK,EACV,UAAU,EASX,MAAM,YAAY,CAAC;AACpB,OAAO,KAAK,EACV,sBAAsB,EAEtB,oBAAoB,EACrB,MAAM,6BAA6B,CAAC;AAErC,MAAM,WAAW,0BAA0B;IACzC,MAAM,EAAE,MAAM,CAAC;IACf,OAAO,EAAE,oBAAoB,CAAC;IAC9B,OAAO,CAAC,EAAE;QACR,UAAU,CAAC,EAAE,MAAM,CAAC;QACpB,OAAO,CAAC,EAAE,MAAM,CAAC;KAClB,CAAC;IACF,mBAAmB,CAAC,EAAE,OAAO,CAAC,IAAI,CAAC,sBAAsB,EAAE,SAAS,GAAG,QAAQ,CAAC,CAAC,CAAC;CACnF;AA8CD,wBAAsB,2BAA2B,CAC/C,MAAM,EAAE,MAAM,EACd,KAAK,EAAE,0BAA0B,GAChC,OAAO,CAAC,UAAU,CAAC,CAmIrB"}
|