@plasius/ai 1.1.4 → 1.1.6
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +22 -2
- package/README.md +101 -2
- package/dist/components/pixelverse/balance.d.ts +6 -2
- package/dist/components/pixelverse/balance.d.ts.map +1 -1
- package/dist/components/pixelverse/balance.js +13 -23
- package/dist/components/pixelverse/index.d.ts +1 -1
- package/dist/components/pixelverse/index.d.ts.map +1 -1
- package/dist/components/pixelverse/index.js +1 -1
- package/dist/components/pixelverse/video-generation-editor.d.ts +10 -0
- package/dist/components/pixelverse/video-generation-editor.d.ts.map +1 -0
- package/dist/components/pixelverse/video-generation-editor.js +79 -0
- package/dist/platform/adapter-platform.d.ts +60 -0
- package/dist/platform/adapter-platform.d.ts.map +1 -0
- package/dist/platform/adapter-platform.js +222 -0
- package/dist/platform/gemini-adapter.d.ts +15 -0
- package/dist/platform/gemini-adapter.d.ts.map +1 -0
- package/dist/platform/gemini-adapter.js +293 -0
- package/dist/platform/http-resilience.d.ts +19 -0
- package/dist/platform/http-resilience.d.ts.map +1 -0
- package/dist/platform/http-resilience.js +126 -0
- package/dist/platform/index.d.ts +22 -1
- package/dist/platform/index.d.ts.map +1 -1
- package/dist/platform/index.js +24 -0
- package/dist/platform/openai-adapter.d.ts +24 -0
- package/dist/platform/openai-adapter.d.ts.map +1 -0
- package/dist/platform/openai-adapter.js +398 -0
- package/dist/platform/video-provider-adapter.d.ts +54 -0
- package/dist/platform/video-provider-adapter.d.ts.map +1 -0
- package/dist/platform/video-provider-adapter.js +165 -0
- package/dist/platform/video-provider-platform.d.ts +13 -0
- package/dist/platform/video-provider-platform.d.ts.map +1 -0
- package/dist/platform/video-provider-platform.js +102 -0
- package/dist-cjs/components/pixelverse/balance.d.ts +6 -2
- package/dist-cjs/components/pixelverse/balance.d.ts.map +1 -1
- package/dist-cjs/components/pixelverse/balance.js +13 -23
- package/dist-cjs/components/pixelverse/index.d.ts +1 -1
- package/dist-cjs/components/pixelverse/index.d.ts.map +1 -1
- package/dist-cjs/components/pixelverse/index.js +1 -1
- package/dist-cjs/components/pixelverse/video-generation-editor.d.ts +10 -0
- package/dist-cjs/components/pixelverse/video-generation-editor.d.ts.map +1 -0
- package/dist-cjs/components/pixelverse/video-generation-editor.js +85 -0
- package/dist-cjs/platform/adapter-platform.d.ts +60 -0
- package/dist-cjs/platform/adapter-platform.d.ts.map +1 -0
- package/dist-cjs/platform/adapter-platform.js +225 -0
- package/dist-cjs/platform/gemini-adapter.d.ts +15 -0
- package/dist-cjs/platform/gemini-adapter.d.ts.map +1 -0
- package/dist-cjs/platform/gemini-adapter.js +296 -0
- package/dist-cjs/platform/http-resilience.d.ts +19 -0
- package/dist-cjs/platform/http-resilience.d.ts.map +1 -0
- package/dist-cjs/platform/http-resilience.js +129 -0
- package/dist-cjs/platform/index.d.ts +22 -1
- package/dist-cjs/platform/index.d.ts.map +1 -1
- package/dist-cjs/platform/index.js +30 -1
- package/dist-cjs/platform/openai-adapter.d.ts +24 -0
- package/dist-cjs/platform/openai-adapter.d.ts.map +1 -0
- package/dist-cjs/platform/openai-adapter.js +401 -0
- package/dist-cjs/platform/video-provider-adapter.d.ts +54 -0
- package/dist-cjs/platform/video-provider-adapter.d.ts.map +1 -0
- package/dist-cjs/platform/video-provider-adapter.js +168 -0
- package/dist-cjs/platform/video-provider-platform.d.ts +13 -0
- package/dist-cjs/platform/video-provider-platform.d.ts.map +1 -0
- package/dist-cjs/platform/video-provider-platform.js +105 -0
- package/docs/api-reference.md +59 -0
- package/docs/architecture.md +5 -1
- package/docs/providers.md +24 -6
- package/package.json +6 -6
- package/src/components/pixelverse/balance.tsx +22 -35
- package/src/components/pixelverse/index.ts +1 -1
- package/src/components/pixelverse/video-generation-editor.tsx +164 -0
- package/src/platform/adapter-platform.ts +440 -0
- package/src/platform/gemini-adapter.ts +391 -0
- package/src/platform/http-resilience.ts +198 -0
- package/src/platform/index.ts +68 -0
- package/src/platform/openai-adapter.ts +552 -0
- package/src/platform/video-provider-adapter.ts +303 -0
- package/src/platform/video-provider-platform.ts +208 -0
- package/dist/components/pixelverse/pixelverseeditor.d.ts +0 -16
- package/dist/components/pixelverse/pixelverseeditor.d.ts.map +0 -1
- package/dist/components/pixelverse/pixelverseeditor.js +0 -21
- package/dist/platform/openai.d.ts +0 -8
- package/dist/platform/openai.d.ts.map +0 -1
- package/dist/platform/openai.js +0 -61
- package/dist/platform/pixelverse.d.ts +0 -6
- package/dist/platform/pixelverse.d.ts.map +0 -1
- package/dist/platform/pixelverse.js +0 -196
- package/dist-cjs/components/pixelverse/pixelverseeditor.d.ts +0 -16
- package/dist-cjs/components/pixelverse/pixelverseeditor.d.ts.map +0 -1
- package/dist-cjs/components/pixelverse/pixelverseeditor.js +0 -27
- package/dist-cjs/platform/openai.d.ts +0 -8
- package/dist-cjs/platform/openai.d.ts.map +0 -1
- package/dist-cjs/platform/openai.js +0 -67
- package/dist-cjs/platform/pixelverse.d.ts +0 -6
- package/dist-cjs/platform/pixelverse.d.ts.map +0 -1
- package/dist-cjs/platform/pixelverse.js +0 -199
- package/src/components/pixelverse/pixelverseeditor.mocule.css +0 -0
- package/src/components/pixelverse/pixelverseeditor.tsx +0 -74
- package/src/platform/openai.ts +0 -123
- package/src/platform/pixelverse.ts +0 -309
package/CHANGELOG.md
CHANGED
|
@@ -9,15 +9,35 @@ The format is based on **[Keep a Changelog](https://keepachangelog.com/en/1.1.0/
|
|
|
9
9
|
## [Unreleased]
|
|
10
10
|
|
|
11
11
|
- **Added**
|
|
12
|
-
-
|
|
12
|
+
- Added built-in provider adapter factories:
|
|
13
|
+
- `createOpenAIAdapter` for chat, speech synthesis, transcription, image generation, and model generation.
|
|
14
|
+
- `createGeminiAdapter` for chat, image generation, and model generation.
|
|
15
|
+
- Added shared HTTP resilience policy contracts (`HttpClientPolicy`) and transport helper for consistent retry/timeout behavior across adapters.
|
|
16
|
+
- Added generic multi-capability adapter contracts (`AICapabilityAdapter`, request context/request types) and `createAdapterPlatform` for routing chat/voice/image/video/model operations.
|
|
17
|
+
- Added `AICapability.Model`, `ModelCompletion`, `modelCompletionSchema`, and `AIPlatform.generateModel(...)`.
|
|
18
|
+
- Added generic video-provider adapter contracts (`VideoProviderAdapter`, request/result types).
|
|
19
|
+
- Added `createHttpVideoProviderAdapter` for host-defined HTTP endpoint mapping.
|
|
20
|
+
- Added `createVideoProviderPlatform` to compose `AIPlatform` video/balance behavior from adapters.
|
|
13
21
|
|
|
14
22
|
- **Changed**
|
|
23
|
+
- Updated docs and examples to use built-in OpenAI/Gemini adapter factories with developer-supplied API keys.
|
|
24
|
+
- Hardened OpenAI/Gemini/video HTTP adapters with internet-friendly client behavior:
|
|
25
|
+
- request timeout defaults
|
|
26
|
+
- exponential backoff with jitter
|
|
27
|
+
- `Retry-After` handling
|
|
28
|
+
- retry on transient status codes (`408`, `409`, `425`, `429`, `500`, `502`, `503`, `504`)
|
|
29
|
+
- Hardened provisional adapters to align with injected-key usage:
|
|
30
|
+
- Removed provisional `OpenAIPlatform` runtime scaffold (`src/platform/openai.ts`) in favor of `createOpenAIAdapter`.
|
|
31
|
+
- `createVideoProviderPlatform` now validates non-empty API key input.
|
|
32
|
+
- `createHttpVideoProviderAdapter` now validates API keys and uses request-scoped `fetchFn` for URL image uploads.
|
|
15
33
|
- Hardened GitHub CD publish flow to publish only after successful install, test, and build, then push tags/releases post-publish.
|
|
16
34
|
- Standardized npm publish path on workflow-dispatched `.github/workflows/cd.yml` using provenance and production environment secrets.
|
|
17
35
|
- Replaced `audit:deps` from `depcheck` to `npm ls --all --omit=optional --omit=peer > /dev/null 2>&1 || true` to avoid deprecated dependency-chain risk.
|
|
36
|
+
- Refactored video editor/balance components to rely on injected provider adapters instead of hardcoded vendor wiring.
|
|
37
|
+
- Removed provider-specific identifiers from code roots to enforce public package boundaries.
|
|
18
38
|
|
|
19
39
|
- **Fixed**
|
|
20
|
-
-
|
|
40
|
+
- `pack:check` now passes vendor-namespace checks for `src/**` by using generic provider naming in runtime/editor code.
|
|
21
41
|
|
|
22
42
|
- **Security**
|
|
23
43
|
- Removed `depcheck` (and its `multimatch`/`minimatch` chain) from devDependencies to resolve reported high-severity audit findings.
|
package/README.md
CHANGED
|
@@ -15,8 +15,9 @@ AI capability contracts and completion schemas for Plasius applications.
|
|
|
15
15
|
This package currently provides:
|
|
16
16
|
|
|
17
17
|
- capability contracts (`AICapability`, `AIPlatform`)
|
|
18
|
-
- completion model interfaces (`ChatCompletion`, `ImageCompletion`, etc.)
|
|
18
|
+
- completion model interfaces (`ChatCompletion`, `ImageCompletion`, `ModelCompletion`, etc.)
|
|
19
19
|
- schema definitions for completion entities
|
|
20
|
+
- adapter contracts/factories for multi-provider routing with developer-supplied API keys
|
|
20
21
|
|
|
21
22
|
Provider wiring and runtime adapters are documented in [`docs/providers.md`](./docs/providers.md).
|
|
22
23
|
|
|
@@ -65,6 +66,9 @@ const platform: AIPlatform = {
|
|
|
65
66
|
produceVideo: async () => {
|
|
66
67
|
throw new Error("Not implemented");
|
|
67
68
|
},
|
|
69
|
+
generateModel: async () => {
|
|
70
|
+
throw new Error("Not implemented");
|
|
71
|
+
},
|
|
68
72
|
checkBalance: async () => ({
|
|
69
73
|
id: crypto.randomUUID(),
|
|
70
74
|
partitionKey: "user-1",
|
|
@@ -84,12 +88,25 @@ void platform;
|
|
|
84
88
|
|
|
85
89
|
- `AICapability`: enum describing logical capability routing.
|
|
86
90
|
- `AIPlatform`: interface your runtime adapter must implement.
|
|
91
|
+
- Generic multi-capability adapter contracts and helpers:
|
|
92
|
+
- `AICapabilityAdapter`
|
|
93
|
+
- `AdapterPlatformProps`
|
|
94
|
+
- `HttpClientPolicy`
|
|
95
|
+
- `createAdapterPlatform`
|
|
96
|
+
- `createOpenAIAdapter`
|
|
97
|
+
- `createGeminiAdapter`
|
|
98
|
+
- Generic video-provider adapter contracts and helpers:
|
|
99
|
+
- `VideoProviderAdapter`
|
|
100
|
+
- `VideoGenerationRequest`
|
|
101
|
+
- `createHttpVideoProviderAdapter`
|
|
102
|
+
- `createVideoProviderPlatform`
|
|
87
103
|
- `Completion` + typed completion variants:
|
|
88
104
|
- `ChatCompletion`
|
|
89
105
|
- `TextCompletion`
|
|
90
106
|
- `ImageCompletion`
|
|
91
107
|
- `SpeechCompletion`
|
|
92
108
|
- `VideoCompletion`
|
|
109
|
+
- `ModelCompletion`
|
|
93
110
|
- `BalanceCompletion`
|
|
94
111
|
- Schemas:
|
|
95
112
|
- `completionSchema`
|
|
@@ -98,6 +115,7 @@ void platform;
|
|
|
98
115
|
- `imageCompletionSchema`
|
|
99
116
|
- `speechCompletionSchema`
|
|
100
117
|
- `videoCompletionSchema`
|
|
118
|
+
- `modelCompletionSchema`
|
|
101
119
|
- `balanceCompletionSchema`
|
|
102
120
|
|
|
103
121
|
## Documentation
|
|
@@ -109,9 +127,90 @@ void platform;
|
|
|
109
127
|
## Known Limitations
|
|
110
128
|
|
|
111
129
|
- `src/lib/*` currently contains placeholder files and is not part of the public API.
|
|
112
|
-
-
|
|
130
|
+
- Provider-specific runtime adapters are still under stabilization and should be wrapped by host applications.
|
|
113
131
|
- The package focuses on contracts/schemas first; runtime behavior is expected to be composed by consumers.
|
|
114
132
|
|
|
133
|
+
### Multi-Capability Adapter Composition
|
|
134
|
+
|
|
135
|
+
```ts
|
|
136
|
+
import {
|
|
137
|
+
AICapability,
|
|
138
|
+
createAdapterPlatform,
|
|
139
|
+
createGeminiAdapter,
|
|
140
|
+
createOpenAIAdapter,
|
|
141
|
+
} from "@plasius/ai";
|
|
142
|
+
|
|
143
|
+
const openAIAdapter = createOpenAIAdapter({
|
|
144
|
+
id: "openai",
|
|
145
|
+
httpPolicy: {
|
|
146
|
+
maxAttempts: 3,
|
|
147
|
+
timeoutMs: 30000,
|
|
148
|
+
baseDelayMs: 250,
|
|
149
|
+
maxDelayMs: 4000,
|
|
150
|
+
jitterRatio: 0.2,
|
|
151
|
+
},
|
|
152
|
+
defaultModels: {
|
|
153
|
+
chat: "gpt-4.1-mini",
|
|
154
|
+
speech: "gpt-4o-mini-tts",
|
|
155
|
+
transcription: "gpt-4o-mini-transcribe",
|
|
156
|
+
image: "gpt-image-1",
|
|
157
|
+
model: "gpt-4.1-mini",
|
|
158
|
+
},
|
|
159
|
+
});
|
|
160
|
+
|
|
161
|
+
const geminiAdapter = createGeminiAdapter({
|
|
162
|
+
id: "gemini",
|
|
163
|
+
httpPolicy: {
|
|
164
|
+
maxAttempts: 3,
|
|
165
|
+
timeoutMs: 30000,
|
|
166
|
+
},
|
|
167
|
+
defaultModels: {
|
|
168
|
+
chat: "gemini-2.0-flash",
|
|
169
|
+
image: "imagen-3.0-generate-002",
|
|
170
|
+
model: "gemini-2.0-flash",
|
|
171
|
+
},
|
|
172
|
+
});
|
|
173
|
+
|
|
174
|
+
const platform = await createAdapterPlatform("user-1", {
|
|
175
|
+
adapters: [openAIAdapter, geminiAdapter],
|
|
176
|
+
apiKeys: {
|
|
177
|
+
openai: process.env.OPENAI_API_KEY ?? "",
|
|
178
|
+
gemini: process.env.GEMINI_API_KEY ?? "",
|
|
179
|
+
},
|
|
180
|
+
defaultAdapterByCapability: {
|
|
181
|
+
[AICapability.Chat]: "openai",
|
|
182
|
+
[AICapability.Speech]: "openai",
|
|
183
|
+
[AICapability.Image]: "gemini",
|
|
184
|
+
[AICapability.Model]: "gemini",
|
|
185
|
+
},
|
|
186
|
+
});
|
|
187
|
+
|
|
188
|
+
void platform;
|
|
189
|
+
```
|
|
190
|
+
|
|
191
|
+
### Generic Video Adapter Composition
|
|
192
|
+
|
|
193
|
+
```ts
|
|
194
|
+
import {
|
|
195
|
+
createHttpVideoProviderAdapter,
|
|
196
|
+
createVideoProviderPlatform,
|
|
197
|
+
} from "@plasius/ai";
|
|
198
|
+
|
|
199
|
+
const videoAdapter = createHttpVideoProviderAdapter({
|
|
200
|
+
uploadImagePath: "/provider/image/upload",
|
|
201
|
+
generateVideoPath: "/provider/video/generate",
|
|
202
|
+
getVideoResultPath: (videoId) => `/provider/video/result/${videoId}`,
|
|
203
|
+
getBalancePath: "/provider/account/balance",
|
|
204
|
+
});
|
|
205
|
+
|
|
206
|
+
const platform = await createVideoProviderPlatform("user-1", {
|
|
207
|
+
apiKey: process.env.PROVIDER_API_KEY ?? "",
|
|
208
|
+
adapter: videoAdapter,
|
|
209
|
+
});
|
|
210
|
+
|
|
211
|
+
void platform;
|
|
212
|
+
```
|
|
213
|
+
|
|
115
214
|
## Development
|
|
116
215
|
|
|
117
216
|
```bash
|
|
@@ -1,4 +1,8 @@
|
|
|
1
|
-
|
|
1
|
+
import type { VideoProviderAdapter } from "../../platform/video-provider-adapter.js";
|
|
2
|
+
export interface BalanceProps {
|
|
2
3
|
apiKey: string;
|
|
3
|
-
|
|
4
|
+
adapter: VideoProviderAdapter;
|
|
5
|
+
refreshMs?: number;
|
|
6
|
+
}
|
|
7
|
+
export default function Balance({ apiKey, adapter, refreshMs }: BalanceProps): import("react/jsx-runtime").JSX.Element;
|
|
4
8
|
//# sourceMappingURL=balance.d.ts.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"balance.d.ts","sourceRoot":"","sources":["../../../src/components/pixelverse/balance.tsx"],"names":[],"mappings":"
|
|
1
|
+
{"version":3,"file":"balance.d.ts","sourceRoot":"","sources":["../../../src/components/pixelverse/balance.tsx"],"names":[],"mappings":"AACA,OAAO,KAAK,EAAmB,oBAAoB,EAAE,MAAM,0CAA0C,CAAC;AAGtG,MAAM,WAAW,YAAY;IAC3B,MAAM,EAAE,MAAM,CAAC;IACf,OAAO,EAAE,oBAAoB,CAAC;IAC9B,SAAS,CAAC,EAAE,MAAM,CAAC;CACpB;AAED,MAAM,CAAC,OAAO,UAAU,OAAO,CAAC,EAAE,MAAM,EAAE,OAAO,EAAE,SAAkB,EAAE,EAAE,YAAY,2CAyCpF"}
|
|
@@ -1,40 +1,30 @@
|
|
|
1
1
|
import { jsxs as _jsxs, jsx as _jsx } from "react/jsx-runtime";
|
|
2
2
|
import { useEffect, useState } from "react";
|
|
3
|
-
import { v4 as uuidv4 } from "uuid";
|
|
4
3
|
import styles from "./balance.module.css";
|
|
5
|
-
export default function Balance({ apiKey }) {
|
|
4
|
+
export default function Balance({ apiKey, adapter, refreshMs = 600000 }) {
|
|
6
5
|
const [balance, setBalance] = useState(null);
|
|
7
6
|
const fetchBalance = async () => {
|
|
7
|
+
if (!adapter.getBalance) {
|
|
8
|
+
setBalance({ monthlyCredit: 0, packageCredit: 0 });
|
|
9
|
+
return;
|
|
10
|
+
}
|
|
8
11
|
try {
|
|
9
|
-
const
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
"API-KEY": apiKey,
|
|
13
|
-
"AI-trace-ID": uuidv4(),
|
|
14
|
-
Accept: "application/json",
|
|
15
|
-
"Content-Type": "application/json",
|
|
16
|
-
},
|
|
17
|
-
referrerPolicy: "no-referrer",
|
|
12
|
+
const value = await adapter.getBalance({
|
|
13
|
+
apiKey,
|
|
14
|
+
traceId: crypto.randomUUID(),
|
|
18
15
|
});
|
|
19
|
-
|
|
20
|
-
console.error("Failed to fetch balance:", response.status, response.statusText);
|
|
21
|
-
return;
|
|
22
|
-
}
|
|
23
|
-
const data = (await response.json());
|
|
24
|
-
if (data?.Resp) {
|
|
25
|
-
setBalance(data.Resp);
|
|
26
|
-
}
|
|
16
|
+
setBalance(value);
|
|
27
17
|
}
|
|
28
18
|
catch (err) {
|
|
29
19
|
console.error("fetchBalance() error", err);
|
|
30
20
|
}
|
|
31
21
|
};
|
|
32
22
|
useEffect(() => {
|
|
33
|
-
void fetchBalance();
|
|
23
|
+
void fetchBalance();
|
|
34
24
|
const intervalId = setInterval(() => {
|
|
35
25
|
void fetchBalance();
|
|
36
|
-
},
|
|
26
|
+
}, refreshMs);
|
|
37
27
|
return () => clearInterval(intervalId);
|
|
38
|
-
}, [apiKey]);
|
|
39
|
-
return (_jsx("div", { className: styles.balance_container, children: balance ? (_jsxs("div", { children: [_jsxs("p", { children: ["Monthly Credit: ", balance.
|
|
28
|
+
}, [apiKey, adapter, refreshMs]);
|
|
29
|
+
return (_jsx("div", { className: styles.balance_container, children: balance ? (_jsxs("div", { children: [_jsxs("p", { children: ["Monthly Credit: ", balance.monthlyCredit] }), _jsxs("p", { children: ["Package Credit: ", balance.packageCredit] })] })) : (_jsx("p", { children: "Loading balance..." })) }));
|
|
40
30
|
}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../../src/components/pixelverse/index.ts"],"names":[],"mappings":"AAAA,cAAc,cAAc,CAAC;AAC7B,cAAc,
|
|
1
|
+
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../../src/components/pixelverse/index.ts"],"names":[],"mappings":"AAAA,cAAc,cAAc,CAAC;AAC7B,cAAc,8BAA8B,CAAC"}
|
|
@@ -1,2 +1,2 @@
|
|
|
1
1
|
export * from "./balance.js";
|
|
2
|
-
export * from "./
|
|
2
|
+
export * from "./video-generation-editor.js";
|
|
@@ -0,0 +1,10 @@
|
|
|
1
|
+
import type { VideoGenerationRequest, VideoProviderAdapter } from "../../platform/video-provider-adapter.js";
|
|
2
|
+
export interface VideoGenerationEditorProps {
|
|
3
|
+
apiKey: string;
|
|
4
|
+
adapter: VideoProviderAdapter;
|
|
5
|
+
onVideoGenerated?: (videoUrl: string) => void;
|
|
6
|
+
initialRequest?: Partial<Omit<VideoGenerationRequest, "imageId">>;
|
|
7
|
+
}
|
|
8
|
+
export declare function VideoGenerationEditor({ apiKey, adapter, onVideoGenerated, initialRequest, }: VideoGenerationEditorProps): import("react/jsx-runtime").JSX.Element;
|
|
9
|
+
export default VideoGenerationEditor;
|
|
10
|
+
//# sourceMappingURL=video-generation-editor.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"video-generation-editor.d.ts","sourceRoot":"","sources":["../../../src/components/pixelverse/video-generation-editor.tsx"],"names":[],"mappings":"AAEA,OAAO,KAAK,EACV,sBAAsB,EACtB,oBAAoB,EACrB,MAAM,0CAA0C,CAAC;AAElD,MAAM,WAAW,0BAA0B;IACzC,MAAM,EAAE,MAAM,CAAC;IACf,OAAO,EAAE,oBAAoB,CAAC;IAC9B,gBAAgB,CAAC,EAAE,CAAC,QAAQ,EAAE,MAAM,KAAK,IAAI,CAAC;IAC9C,cAAc,CAAC,EAAE,OAAO,CAAC,IAAI,CAAC,sBAAsB,EAAE,SAAS,CAAC,CAAC,CAAC;CACnE;AA8CD,wBAAgB,qBAAqB,CAAC,EACpC,MAAM,EACN,OAAO,EACP,gBAAgB,EAChB,cAAc,GACf,EAAE,0BAA0B,2CAkG5B;AAED,eAAe,qBAAqB,CAAC"}
|
|
@@ -0,0 +1,79 @@
|
|
|
1
|
+
import { jsx as _jsx, jsxs as _jsxs } from "react/jsx-runtime";
|
|
2
|
+
import { useState } from "react";
|
|
3
|
+
import Balance from "./balance.js";
|
|
4
|
+
const defaultRequest = {
|
|
5
|
+
prompt: "",
|
|
6
|
+
model: "standard",
|
|
7
|
+
motionMode: "normal",
|
|
8
|
+
quality: "720p",
|
|
9
|
+
durationSeconds: 5,
|
|
10
|
+
watermark: false,
|
|
11
|
+
};
|
|
12
|
+
function toRequest(overrides) {
|
|
13
|
+
return {
|
|
14
|
+
...defaultRequest,
|
|
15
|
+
...overrides,
|
|
16
|
+
};
|
|
17
|
+
}
|
|
18
|
+
async function waitForVideoCompletion(adapter, videoId, apiKey, maxRetries = 20, delayMs = 3000) {
|
|
19
|
+
for (let attempt = 0; attempt < maxRetries; attempt += 1) {
|
|
20
|
+
await new Promise((resolve) => setTimeout(resolve, delayMs));
|
|
21
|
+
const result = await adapter.getVideoResult(videoId, {
|
|
22
|
+
apiKey,
|
|
23
|
+
traceId: crypto.randomUUID(),
|
|
24
|
+
});
|
|
25
|
+
if (result.state === "completed" && result.videoUrl) {
|
|
26
|
+
return result.videoUrl;
|
|
27
|
+
}
|
|
28
|
+
if (result.state === "failed") {
|
|
29
|
+
throw new Error("Video generation failed.");
|
|
30
|
+
}
|
|
31
|
+
}
|
|
32
|
+
throw new Error("Timed out waiting for video generation result.");
|
|
33
|
+
}
|
|
34
|
+
export function VideoGenerationEditor({ apiKey, adapter, onVideoGenerated, initialRequest, }) {
|
|
35
|
+
const [videoUrl, setVideoUrl] = useState("");
|
|
36
|
+
const [selectedFile, setSelectedFile] = useState(null);
|
|
37
|
+
const [loading, setLoading] = useState(false);
|
|
38
|
+
const [videoReady, setVideoReady] = useState(false);
|
|
39
|
+
const [request, setRequest] = useState(toRequest(initialRequest));
|
|
40
|
+
const handleFileChange = (event) => {
|
|
41
|
+
const file = event.target.files?.[0] ?? null;
|
|
42
|
+
setSelectedFile(file);
|
|
43
|
+
};
|
|
44
|
+
const handleRegenerate = () => {
|
|
45
|
+
void handleUploadProcess();
|
|
46
|
+
};
|
|
47
|
+
const handleUploadProcess = async () => {
|
|
48
|
+
if (!selectedFile) {
|
|
49
|
+
return;
|
|
50
|
+
}
|
|
51
|
+
setLoading(true);
|
|
52
|
+
setVideoReady(false);
|
|
53
|
+
try {
|
|
54
|
+
const uploaded = await adapter.uploadImage(selectedFile, {
|
|
55
|
+
apiKey,
|
|
56
|
+
traceId: crypto.randomUUID(),
|
|
57
|
+
});
|
|
58
|
+
const generated = await adapter.generateVideo({
|
|
59
|
+
...request,
|
|
60
|
+
imageId: uploaded.imageId,
|
|
61
|
+
}, {
|
|
62
|
+
apiKey,
|
|
63
|
+
traceId: crypto.randomUUID(),
|
|
64
|
+
});
|
|
65
|
+
const generatedUrl = await waitForVideoCompletion(adapter, generated.videoId, apiKey);
|
|
66
|
+
setVideoUrl(generatedUrl);
|
|
67
|
+
setVideoReady(true);
|
|
68
|
+
onVideoGenerated?.(generatedUrl);
|
|
69
|
+
}
|
|
70
|
+
finally {
|
|
71
|
+
setLoading(false);
|
|
72
|
+
}
|
|
73
|
+
};
|
|
74
|
+
return (_jsxs("div", { children: [_jsx(Balance, { apiKey: apiKey, adapter: adapter }), !videoReady && !selectedFile && (_jsxs("div", { children: [_jsx("p", { children: "Drag/Drop or Click HERE to upload" }), _jsx("input", { title: "Upload Image", type: "file", accept: ".jpg,.jpeg,.png,.webp", onChange: handleFileChange })] })), !videoReady ? (_jsx("div", { children: _jsxs("label", { children: ["Prompt", _jsx("textarea", { value: request.prompt, onChange: (event) => setRequest((previous) => ({
|
|
75
|
+
...previous,
|
|
76
|
+
prompt: event.target.value,
|
|
77
|
+
})) })] }) })) : null, loading && _jsx("div", { children: "Loading..." }), !videoReady && selectedFile && !loading && (_jsx("button", { onClick: handleUploadProcess, children: "Start Upload" })), videoReady && (_jsxs("div", { children: [_jsx("video", { src: videoUrl, controls: true }), _jsx("button", { onClick: handleRegenerate, children: "Regenerate" })] }))] }));
|
|
78
|
+
}
|
|
79
|
+
export default VideoGenerationEditor;
|
|
@@ -0,0 +1,60 @@
|
|
|
1
|
+
import { AICapability, type AIPlatform, type BalanceCompletion, type ChatCompletion, type ImageCompletion, type ModelCompletion, type SpeechCompletion, type TextCompletion, type VideoCompletion } from "./index.js";
|
|
2
|
+
export interface AdapterRequestContext {
|
|
3
|
+
userId: string;
|
|
4
|
+
providerId: string;
|
|
5
|
+
apiKey: string;
|
|
6
|
+
traceId: string;
|
|
7
|
+
}
|
|
8
|
+
export interface AdapterChatRequest extends AdapterRequestContext {
|
|
9
|
+
input: string;
|
|
10
|
+
context: string;
|
|
11
|
+
model: string;
|
|
12
|
+
}
|
|
13
|
+
export interface AdapterSynthesizeSpeechRequest extends AdapterRequestContext {
|
|
14
|
+
input: string;
|
|
15
|
+
voice: string;
|
|
16
|
+
context: string;
|
|
17
|
+
model: string;
|
|
18
|
+
}
|
|
19
|
+
export interface AdapterTranscribeSpeechRequest extends AdapterRequestContext {
|
|
20
|
+
input: Buffer;
|
|
21
|
+
context: string;
|
|
22
|
+
model: string;
|
|
23
|
+
}
|
|
24
|
+
export interface AdapterGenerateImageRequest extends AdapterRequestContext {
|
|
25
|
+
input: string;
|
|
26
|
+
context: string;
|
|
27
|
+
model: string;
|
|
28
|
+
}
|
|
29
|
+
export interface AdapterVideoRequest extends AdapterRequestContext {
|
|
30
|
+
input: string;
|
|
31
|
+
image: URL;
|
|
32
|
+
context: string;
|
|
33
|
+
model: string;
|
|
34
|
+
}
|
|
35
|
+
export interface AdapterGenerateModelRequest extends AdapterRequestContext {
|
|
36
|
+
input: string;
|
|
37
|
+
context: string;
|
|
38
|
+
model: string;
|
|
39
|
+
}
|
|
40
|
+
export interface AdapterBalanceRequest extends AdapterRequestContext {
|
|
41
|
+
}
|
|
42
|
+
export interface AICapabilityAdapter {
|
|
43
|
+
id: string;
|
|
44
|
+
capabilities: AICapability[];
|
|
45
|
+
canHandle?: (capabilities: AICapability[]) => Promise<boolean> | boolean;
|
|
46
|
+
chatWithAI?: (request: AdapterChatRequest) => Promise<ChatCompletion>;
|
|
47
|
+
synthesizeSpeech?: (request: AdapterSynthesizeSpeechRequest) => Promise<SpeechCompletion>;
|
|
48
|
+
transcribeSpeech?: (request: AdapterTranscribeSpeechRequest) => Promise<TextCompletion>;
|
|
49
|
+
generateImage?: (request: AdapterGenerateImageRequest) => Promise<ImageCompletion>;
|
|
50
|
+
produceVideo?: (request: AdapterVideoRequest) => Promise<VideoCompletion>;
|
|
51
|
+
generateModel?: (request: AdapterGenerateModelRequest) => Promise<ModelCompletion>;
|
|
52
|
+
checkBalance?: (request: AdapterBalanceRequest) => Promise<BalanceCompletion>;
|
|
53
|
+
}
|
|
54
|
+
export interface AdapterPlatformProps {
|
|
55
|
+
adapters: AICapabilityAdapter[];
|
|
56
|
+
apiKeys: Record<string, string>;
|
|
57
|
+
defaultAdapterByCapability?: Partial<Record<AICapability, string>>;
|
|
58
|
+
}
|
|
59
|
+
export declare function createAdapterPlatform(userId: string, props: AdapterPlatformProps): Promise<AIPlatform>;
|
|
60
|
+
//# sourceMappingURL=adapter-platform.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"adapter-platform.d.ts","sourceRoot":"","sources":["../../src/platform/adapter-platform.ts"],"names":[],"mappings":"AAEA,OAAO,EACL,YAAY,EACZ,KAAK,UAAU,EACf,KAAK,iBAAiB,EACtB,KAAK,cAAc,EAEnB,KAAK,eAAe,EACpB,KAAK,eAAe,EACpB,KAAK,gBAAgB,EACrB,KAAK,cAAc,EACnB,KAAK,eAAe,EACrB,MAAM,YAAY,CAAC;AAEpB,MAAM,WAAW,qBAAqB;IACpC,MAAM,EAAE,MAAM,CAAC;IACf,UAAU,EAAE,MAAM,CAAC;IACnB,MAAM,EAAE,MAAM,CAAC;IACf,OAAO,EAAE,MAAM,CAAC;CACjB;AAED,MAAM,WAAW,kBAAmB,SAAQ,qBAAqB;IAC/D,KAAK,EAAE,MAAM,CAAC;IACd,OAAO,EAAE,MAAM,CAAC;IAChB,KAAK,EAAE,MAAM,CAAC;CACf;AAED,MAAM,WAAW,8BAA+B,SAAQ,qBAAqB;IAC3E,KAAK,EAAE,MAAM,CAAC;IACd,KAAK,EAAE,MAAM,CAAC;IACd,OAAO,EAAE,MAAM,CAAC;IAChB,KAAK,EAAE,MAAM,CAAC;CACf;AAED,MAAM,WAAW,8BAA+B,SAAQ,qBAAqB;IAC3E,KAAK,EAAE,MAAM,CAAC;IACd,OAAO,EAAE,MAAM,CAAC;IAChB,KAAK,EAAE,MAAM,CAAC;CACf;AAED,MAAM,WAAW,2BAA4B,SAAQ,qBAAqB;IACxE,KAAK,EAAE,MAAM,CAAC;IACd,OAAO,EAAE,MAAM,CAAC;IAChB,KAAK,EAAE,MAAM,CAAC;CACf;AAED,MAAM,WAAW,mBAAoB,SAAQ,qBAAqB;IAChE,KAAK,EAAE,MAAM,CAAC;IACd,KAAK,EAAE,GAAG,CAAC;IACX,OAAO,EAAE,MAAM,CAAC;IAChB,KAAK,EAAE,MAAM,CAAC;CACf;AAED,MAAM,WAAW,2BAA4B,SAAQ,qBAAqB;IACxE,KAAK,EAAE,MAAM,CAAC;IACd,OAAO,EAAE,MAAM,CAAC;IAChB,KAAK,EAAE,MAAM,CAAC;CACf;AAED,MAAM,WAAW,qBAAsB,SAAQ,qBAAqB;CAAG;AAEvE,MAAM,WAAW,mBAAmB;IAClC,EAAE,EAAE,MAAM,CAAC;IACX,YAAY,EAAE,YAAY,EAAE,CAAC;IAC7B,SAAS,CAAC,EAAE,CAAC,YAAY,EAAE,YAAY,EAAE,KAAK,OAAO,CAAC,OAAO,CAAC,GAAG,OAAO,CAAC;IACzE,UAAU,CAAC,EAAE,CAAC,OAAO,EAAE,kBAAkB,KAAK,OAAO,CAAC,cAAc,CAAC,CAAC;IACtE,gBAAgB,CAAC,EAAE,CACjB,OAAO,EAAE,8BAA8B,KACpC,OAAO,CAAC,gBAAgB,CAAC,CAAC;IAC/B,gBAAgB,CAAC,EAAE,CACjB,OAAO,EAAE,8BAA8B,KACpC,OAAO,CAAC,cAAc,CAAC,CAAC;IAC7B,aAAa,CAAC,EAAE,CACd,OAAO,EAAE,2BAA2B,KACjC,OAAO,CAAC,eAAe,CAAC,CAAC;IAC9B,YAAY,CAAC,EAAE,CAAC,OAAO,EAAE,mBAAmB,KAAK,OAAO,CAAC,eAAe,CAAC,CAAC;IAC1E,aAAa,CAAC,EAAE,CACd,OAAO,EAAE,2BAA2B,KACjC,OAAO,CAAC,eAAe,CAAC,CAAC;IAC9B,YAAY,CAAC,EAAE,CAAC,OAAO,EAAE,qBAAqB,KAAK,OAAO,CAAC,iBAAiB,CAAC,CAAC;CAC/E;AAED,MAAM,WAAW,oBAAoB;IACnC,QAAQ,EAAE,mBAAmB,EAAE,CAAC;IAChC,OAAO,EAAE,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;IAChC,0BAA0B,CAAC,EAAE,OAAO,CAAC,MAAM,CAAC,YAAY,EAAE,MAAM,CAAC,CAAC,CAAC;CACpE;AAyED,wBAAsB,qBAAqB,CACzC,MAAM,EAAE,MAAM,EACd,KAAK,EAAE,oBAAoB,GAC1B,OAAO,CAAC,UAAU,CAAC,CAoRrB"}
|
|
@@ -0,0 +1,222 @@
|
|
|
1
|
+
import { performance } from "node:perf_hooks";
|
|
2
|
+
import { AICapability, } from "./index.js";
|
|
3
|
+
function createCompletionBase(type, model, requestor, durationMs) {
|
|
4
|
+
return {
|
|
5
|
+
partitionKey: requestor,
|
|
6
|
+
id: crypto.randomUUID(),
|
|
7
|
+
type,
|
|
8
|
+
model,
|
|
9
|
+
createdAt: new Date().toISOString(),
|
|
10
|
+
durationMs,
|
|
11
|
+
usage: {},
|
|
12
|
+
};
|
|
13
|
+
}
|
|
14
|
+
function createAdapterContext(requestorId, adapter, apiKey) {
|
|
15
|
+
return {
|
|
16
|
+
userId: requestorId,
|
|
17
|
+
providerId: adapter.id,
|
|
18
|
+
apiKey,
|
|
19
|
+
traceId: crypto.randomUUID(),
|
|
20
|
+
};
|
|
21
|
+
}
|
|
22
|
+
function requiresOperationalMethod(capability, adapter) {
|
|
23
|
+
switch (capability) {
|
|
24
|
+
case AICapability.Chat:
|
|
25
|
+
return typeof adapter.chatWithAI === "function";
|
|
26
|
+
case AICapability.Text:
|
|
27
|
+
return true;
|
|
28
|
+
case AICapability.Speech:
|
|
29
|
+
return (typeof adapter.synthesizeSpeech === "function" ||
|
|
30
|
+
typeof adapter.transcribeSpeech === "function");
|
|
31
|
+
case AICapability.Image:
|
|
32
|
+
return typeof adapter.generateImage === "function";
|
|
33
|
+
case AICapability.Video:
|
|
34
|
+
return typeof adapter.produceVideo === "function";
|
|
35
|
+
case AICapability.Balance:
|
|
36
|
+
return typeof adapter.checkBalance === "function";
|
|
37
|
+
case AICapability.Model:
|
|
38
|
+
return typeof adapter.generateModel === "function";
|
|
39
|
+
default:
|
|
40
|
+
return false;
|
|
41
|
+
}
|
|
42
|
+
}
|
|
43
|
+
export async function createAdapterPlatform(userId, props) {
|
|
44
|
+
const adapterById = new Map();
|
|
45
|
+
for (const adapter of props.adapters) {
|
|
46
|
+
if (!adapter.id || adapter.id.trim().length === 0) {
|
|
47
|
+
throw new Error("Adapter id must be a non-empty string.");
|
|
48
|
+
}
|
|
49
|
+
if (adapterById.has(adapter.id)) {
|
|
50
|
+
throw new Error(`Duplicate adapter id "${adapter.id}" detected.`);
|
|
51
|
+
}
|
|
52
|
+
adapterById.set(adapter.id, adapter);
|
|
53
|
+
}
|
|
54
|
+
const resolveApiKey = (providerId) => {
|
|
55
|
+
const value = props.apiKeys[providerId];
|
|
56
|
+
if (typeof value !== "string") {
|
|
57
|
+
return undefined;
|
|
58
|
+
}
|
|
59
|
+
const trimmed = value.trim();
|
|
60
|
+
return trimmed.length > 0 ? trimmed : undefined;
|
|
61
|
+
};
|
|
62
|
+
const resolveAdapter = (capability, method, options) => {
|
|
63
|
+
const configuredId = props.defaultAdapterByCapability?.[capability];
|
|
64
|
+
const fail = (message) => {
|
|
65
|
+
if (options.required) {
|
|
66
|
+
throw new Error(message);
|
|
67
|
+
}
|
|
68
|
+
return undefined;
|
|
69
|
+
};
|
|
70
|
+
if (configuredId) {
|
|
71
|
+
const configured = adapterById.get(configuredId);
|
|
72
|
+
if (!configured) {
|
|
73
|
+
return fail(`Configured adapter "${configuredId}" for capability "${capability}" was not found.`);
|
|
74
|
+
}
|
|
75
|
+
if (!configured.capabilities.includes(capability)) {
|
|
76
|
+
return fail(`Configured adapter "${configuredId}" does not declare capability "${capability}".`);
|
|
77
|
+
}
|
|
78
|
+
if (typeof configured[method] !== "function") {
|
|
79
|
+
return fail(`Configured adapter "${configuredId}" does not implement "${method}" for capability "${capability}".`);
|
|
80
|
+
}
|
|
81
|
+
const apiKey = resolveApiKey(configured.id);
|
|
82
|
+
if (!apiKey) {
|
|
83
|
+
return fail(`Missing API key for configured adapter "${configured.id}".`);
|
|
84
|
+
}
|
|
85
|
+
return { adapter: configured, apiKey };
|
|
86
|
+
}
|
|
87
|
+
const fallback = props.adapters.find((candidate) => {
|
|
88
|
+
return (candidate.capabilities.includes(capability) &&
|
|
89
|
+
typeof candidate[method] === "function");
|
|
90
|
+
});
|
|
91
|
+
if (!fallback) {
|
|
92
|
+
return fail(`No adapter found for capability "${capability}" implementing "${method}".`);
|
|
93
|
+
}
|
|
94
|
+
const apiKey = resolveApiKey(fallback.id);
|
|
95
|
+
if (!apiKey) {
|
|
96
|
+
return fail(`Missing API key for adapter "${fallback.id}".`);
|
|
97
|
+
}
|
|
98
|
+
return { adapter: fallback, apiKey };
|
|
99
|
+
};
|
|
100
|
+
const canHandle = async (_requestorId, capabilities) => {
|
|
101
|
+
for (const capability of capabilities) {
|
|
102
|
+
const configuredId = props.defaultAdapterByCapability?.[capability];
|
|
103
|
+
const adapter = configuredId
|
|
104
|
+
? adapterById.get(configuredId)
|
|
105
|
+
: props.adapters.find((candidate) => candidate.capabilities.includes(capability));
|
|
106
|
+
if (!adapter) {
|
|
107
|
+
return false;
|
|
108
|
+
}
|
|
109
|
+
if (!adapter.capabilities.includes(capability)) {
|
|
110
|
+
return false;
|
|
111
|
+
}
|
|
112
|
+
if (!resolveApiKey(adapter.id)) {
|
|
113
|
+
return false;
|
|
114
|
+
}
|
|
115
|
+
if (!requiresOperationalMethod(capability, adapter)) {
|
|
116
|
+
return false;
|
|
117
|
+
}
|
|
118
|
+
if (adapter.canHandle) {
|
|
119
|
+
const accepted = await adapter.canHandle([capability]);
|
|
120
|
+
if (!accepted) {
|
|
121
|
+
return false;
|
|
122
|
+
}
|
|
123
|
+
}
|
|
124
|
+
}
|
|
125
|
+
return true;
|
|
126
|
+
};
|
|
127
|
+
const chatWithAI = async (requestorId, input, context, model) => {
|
|
128
|
+
const resolved = resolveAdapter(AICapability.Chat, "chatWithAI", {
|
|
129
|
+
required: true,
|
|
130
|
+
});
|
|
131
|
+
return resolved.adapter.chatWithAI({
|
|
132
|
+
...createAdapterContext(requestorId, resolved.adapter, resolved.apiKey),
|
|
133
|
+
input,
|
|
134
|
+
context,
|
|
135
|
+
model,
|
|
136
|
+
});
|
|
137
|
+
};
|
|
138
|
+
const synthesizeSpeech = async (requestorId, input, voice, context, model) => {
|
|
139
|
+
const resolved = resolveAdapter(AICapability.Speech, "synthesizeSpeech", {
|
|
140
|
+
required: true,
|
|
141
|
+
});
|
|
142
|
+
return resolved.adapter.synthesizeSpeech({
|
|
143
|
+
...createAdapterContext(requestorId, resolved.adapter, resolved.apiKey),
|
|
144
|
+
input,
|
|
145
|
+
voice,
|
|
146
|
+
context,
|
|
147
|
+
model,
|
|
148
|
+
});
|
|
149
|
+
};
|
|
150
|
+
const transcribeSpeech = async (requestorId, input, context, model) => {
|
|
151
|
+
const resolved = resolveAdapter(AICapability.Speech, "transcribeSpeech", {
|
|
152
|
+
required: true,
|
|
153
|
+
});
|
|
154
|
+
return resolved.adapter.transcribeSpeech({
|
|
155
|
+
...createAdapterContext(requestorId, resolved.adapter, resolved.apiKey),
|
|
156
|
+
input,
|
|
157
|
+
context,
|
|
158
|
+
model,
|
|
159
|
+
});
|
|
160
|
+
};
|
|
161
|
+
const generateImage = async (requestorId, input, context, model) => {
|
|
162
|
+
const resolved = resolveAdapter(AICapability.Image, "generateImage", {
|
|
163
|
+
required: true,
|
|
164
|
+
});
|
|
165
|
+
return resolved.adapter.generateImage({
|
|
166
|
+
...createAdapterContext(requestorId, resolved.adapter, resolved.apiKey),
|
|
167
|
+
input,
|
|
168
|
+
context,
|
|
169
|
+
model,
|
|
170
|
+
});
|
|
171
|
+
};
|
|
172
|
+
const produceVideo = async (requestorId, input, image, context, model) => {
|
|
173
|
+
const resolved = resolveAdapter(AICapability.Video, "produceVideo", {
|
|
174
|
+
required: true,
|
|
175
|
+
});
|
|
176
|
+
return resolved.adapter.produceVideo({
|
|
177
|
+
...createAdapterContext(requestorId, resolved.adapter, resolved.apiKey),
|
|
178
|
+
input,
|
|
179
|
+
image,
|
|
180
|
+
context,
|
|
181
|
+
model,
|
|
182
|
+
});
|
|
183
|
+
};
|
|
184
|
+
const generateModel = async (requestorId, input, context, model) => {
|
|
185
|
+
const resolved = resolveAdapter(AICapability.Model, "generateModel", {
|
|
186
|
+
required: true,
|
|
187
|
+
});
|
|
188
|
+
return resolved.adapter.generateModel({
|
|
189
|
+
...createAdapterContext(requestorId, resolved.adapter, resolved.apiKey),
|
|
190
|
+
input,
|
|
191
|
+
context,
|
|
192
|
+
model,
|
|
193
|
+
});
|
|
194
|
+
};
|
|
195
|
+
const checkBalance = async (requestorId) => {
|
|
196
|
+
const startedAt = performance.now();
|
|
197
|
+
const resolved = resolveAdapter(AICapability.Balance, "checkBalance", {
|
|
198
|
+
required: false,
|
|
199
|
+
});
|
|
200
|
+
if (!resolved || !resolved.adapter.checkBalance) {
|
|
201
|
+
const durationMs = performance.now() - startedAt;
|
|
202
|
+
const base = createCompletionBase("balanceCompletion", "", requestorId, durationMs);
|
|
203
|
+
return {
|
|
204
|
+
...base,
|
|
205
|
+
balance: 0,
|
|
206
|
+
};
|
|
207
|
+
}
|
|
208
|
+
return resolved.adapter.checkBalance(createAdapterContext(requestorId, resolved.adapter, resolved.apiKey));
|
|
209
|
+
};
|
|
210
|
+
const currentBalance = (await checkBalance(userId)).balance;
|
|
211
|
+
return {
|
|
212
|
+
canHandle,
|
|
213
|
+
chatWithAI,
|
|
214
|
+
synthesizeSpeech,
|
|
215
|
+
transcribeSpeech,
|
|
216
|
+
generateImage,
|
|
217
|
+
produceVideo,
|
|
218
|
+
generateModel,
|
|
219
|
+
checkBalance,
|
|
220
|
+
currentBalance,
|
|
221
|
+
};
|
|
222
|
+
}
|
|
@@ -0,0 +1,15 @@
|
|
|
1
|
+
import { type AICapabilityAdapter } from "./index.js";
|
|
2
|
+
import { type HttpClientPolicy } from "./http-resilience.js";
|
|
3
|
+
export interface GeminiAdapterOptions {
|
|
4
|
+
id?: string;
|
|
5
|
+
baseUrl?: string;
|
|
6
|
+
fetchFn?: typeof fetch;
|
|
7
|
+
httpPolicy?: HttpClientPolicy;
|
|
8
|
+
defaultModels?: {
|
|
9
|
+
chat?: string;
|
|
10
|
+
image?: string;
|
|
11
|
+
model?: string;
|
|
12
|
+
};
|
|
13
|
+
}
|
|
14
|
+
export declare function createGeminiAdapter(options?: GeminiAdapterOptions): AICapabilityAdapter;
|
|
15
|
+
//# sourceMappingURL=gemini-adapter.d.ts.map
|