@boldvideo/bold-js 0.9.0 → 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +14 -0
- package/dist/index.cjs +64 -36
- package/dist/index.d.ts +144 -32
- package/dist/index.js +64 -36
- package/package.json +1 -1
package/CHANGELOG.md
CHANGED
|
@@ -1,5 +1,19 @@
|
|
|
1
1
|
# @boldvideo/bold-js
|
|
2
2
|
|
|
3
|
+
## 1.0.0
|
|
4
|
+
|
|
5
|
+
### Major Changes
|
|
6
|
+
|
|
7
|
+
- 8afb7b3: Breaking: Unified AI API
|
|
8
|
+
|
|
9
|
+
- New methods: `bold.ai.ask()`, `bold.ai.search()`, `bold.ai.chat(videoId, opts)`
|
|
10
|
+
- `bold.ai.coach()` is now an alias for `bold.ai.ask()`
|
|
11
|
+
- New parameter: `prompt` replaces `message`
|
|
12
|
+
- New parameter: `stream` (boolean, default: true) for non-streaming responses
|
|
13
|
+
- New event types: `message_start`, `text_delta`, `sources`, `message_complete`
|
|
14
|
+
- New types: `AIEvent`, `AIResponse`, `Source`, `AIUsage`, `SearchOptions`, `ChatOptions`
|
|
15
|
+
- Removed: `CoachEvent`, `Citation`, `Usage`, `CoachOptions` (old `AskOptions`)
|
|
16
|
+
|
|
3
17
|
## 0.9.0
|
|
4
18
|
|
|
5
19
|
### Minor Changes
|
package/dist/index.cjs
CHANGED
|
@@ -247,7 +247,7 @@ async function* parseSSE(response) {
|
|
|
247
247
|
try {
|
|
248
248
|
const event = JSON.parse(json);
|
|
249
249
|
yield event;
|
|
250
|
-
if (event.type === "
|
|
250
|
+
if (event.type === "message_complete" || event.type === "error") {
|
|
251
251
|
await reader.cancel();
|
|
252
252
|
return;
|
|
253
253
|
}
|
|
@@ -259,9 +259,12 @@ async function* parseSSE(response) {
|
|
|
259
259
|
reader.releaseLock();
|
|
260
260
|
}
|
|
261
261
|
}
|
|
262
|
+
function buildURL(baseURL, path) {
|
|
263
|
+
const base = baseURL.endsWith("/") ? baseURL : `${baseURL}/`;
|
|
264
|
+
return new URL(path, base);
|
|
265
|
+
}
|
|
262
266
|
async function streamRequest(path, body, config) {
|
|
263
|
-
const
|
|
264
|
-
const url = new URL(path, baseURL);
|
|
267
|
+
const url = buildURL(config.baseURL, path);
|
|
265
268
|
const response = await fetch(url, {
|
|
266
269
|
method: "POST",
|
|
267
270
|
headers: {
|
|
@@ -276,41 +279,66 @@ async function streamRequest(path, body, config) {
|
|
|
276
279
|
}
|
|
277
280
|
return parseSSE(response);
|
|
278
281
|
}
|
|
279
|
-
function
|
|
280
|
-
|
|
281
|
-
|
|
282
|
-
|
|
283
|
-
|
|
284
|
-
|
|
285
|
-
|
|
286
|
-
|
|
287
|
-
* const stream = await bold.ai.coach({ message: "How do I price my SaaS?" });
|
|
288
|
-
* for await (const event of stream) {
|
|
289
|
-
* if (event.type === "token") console.log(event.content);
|
|
290
|
-
* }
|
|
291
|
-
*/
|
|
292
|
-
async coach(options) {
|
|
293
|
-
const path = options.conversationId ? `coach/${options.conversationId}` : "coach";
|
|
294
|
-
const body = { message: options.message };
|
|
295
|
-
if (options.collectionId)
|
|
296
|
-
body.collection_id = options.collectionId;
|
|
297
|
-
return streamRequest(path, body, config);
|
|
282
|
+
async function jsonRequest(path, body, config) {
|
|
283
|
+
const url = buildURL(config.baseURL, path);
|
|
284
|
+
const response = await fetch(url, {
|
|
285
|
+
method: "POST",
|
|
286
|
+
headers: {
|
|
287
|
+
"Content-Type": "application/json",
|
|
288
|
+
"Accept": "application/json",
|
|
289
|
+
...config.headers
|
|
298
290
|
},
|
|
299
|
-
|
|
300
|
-
|
|
301
|
-
|
|
302
|
-
|
|
303
|
-
|
|
304
|
-
|
|
305
|
-
|
|
306
|
-
|
|
307
|
-
|
|
308
|
-
|
|
309
|
-
|
|
310
|
-
|
|
311
|
-
|
|
312
|
-
|
|
291
|
+
body: JSON.stringify(body)
|
|
292
|
+
});
|
|
293
|
+
if (!response.ok) {
|
|
294
|
+
throw new Error(`AI request failed: ${response.status} ${response.statusText}`);
|
|
295
|
+
}
|
|
296
|
+
return response.json();
|
|
297
|
+
}
|
|
298
|
+
function createAI(config) {
|
|
299
|
+
async function ask(options) {
|
|
300
|
+
const path = options.conversationId ? `ai/ask/${options.conversationId}` : "ai/ask";
|
|
301
|
+
const body = { prompt: options.prompt };
|
|
302
|
+
if (options.collectionId)
|
|
303
|
+
body.collection_id = options.collectionId;
|
|
304
|
+
if (options.stream === false) {
|
|
305
|
+
body.stream = false;
|
|
306
|
+
return jsonRequest(path, body, config);
|
|
313
307
|
}
|
|
308
|
+
return streamRequest(path, body, config);
|
|
309
|
+
}
|
|
310
|
+
async function coach(options) {
|
|
311
|
+
return ask(options);
|
|
312
|
+
}
|
|
313
|
+
async function search(options) {
|
|
314
|
+
const path = "ai/search";
|
|
315
|
+
const body = { prompt: options.prompt };
|
|
316
|
+
if (options.limit)
|
|
317
|
+
body.limit = options.limit;
|
|
318
|
+
if (options.collectionId)
|
|
319
|
+
body.collection_id = options.collectionId;
|
|
320
|
+
if (options.videoId)
|
|
321
|
+
body.video_id = options.videoId;
|
|
322
|
+
if (options.stream === false) {
|
|
323
|
+
body.stream = false;
|
|
324
|
+
return jsonRequest(path, body, config);
|
|
325
|
+
}
|
|
326
|
+
return streamRequest(path, body, config);
|
|
327
|
+
}
|
|
328
|
+
async function chat(videoId, options) {
|
|
329
|
+
const path = options.conversationId ? `ai/videos/${videoId}/chat/${options.conversationId}` : `ai/videos/${videoId}/chat`;
|
|
330
|
+
const body = { prompt: options.prompt };
|
|
331
|
+
if (options.stream === false) {
|
|
332
|
+
body.stream = false;
|
|
333
|
+
return jsonRequest(path, body, config);
|
|
334
|
+
}
|
|
335
|
+
return streamRequest(path, body, config);
|
|
336
|
+
}
|
|
337
|
+
return {
|
|
338
|
+
ask,
|
|
339
|
+
coach,
|
|
340
|
+
search,
|
|
341
|
+
chat
|
|
314
342
|
};
|
|
315
343
|
}
|
|
316
344
|
|
package/dist/index.d.ts
CHANGED
|
@@ -162,49 +162,164 @@ type Settings = {
|
|
|
162
162
|
theme_config: ThemeConfig;
|
|
163
163
|
version: string;
|
|
164
164
|
};
|
|
165
|
-
|
|
166
|
-
|
|
167
|
-
|
|
168
|
-
|
|
165
|
+
/**
|
|
166
|
+
* Source citation from AI responses
|
|
167
|
+
*/
|
|
168
|
+
interface Source {
|
|
169
|
+
video_id: string;
|
|
170
|
+
title: string;
|
|
171
|
+
timestamp: number;
|
|
172
|
+
timestamp_end?: number;
|
|
169
173
|
text: string;
|
|
174
|
+
playback_id?: string;
|
|
175
|
+
speaker?: string;
|
|
170
176
|
}
|
|
171
|
-
|
|
172
|
-
|
|
173
|
-
|
|
177
|
+
/**
|
|
178
|
+
* Token usage statistics
|
|
179
|
+
*/
|
|
180
|
+
interface AIUsage {
|
|
181
|
+
prompt_tokens: number;
|
|
182
|
+
completion_tokens: number;
|
|
183
|
+
total_tokens: number;
|
|
174
184
|
}
|
|
175
|
-
|
|
176
|
-
|
|
185
|
+
/**
|
|
186
|
+
* SSE event types for AI streaming responses
|
|
187
|
+
*/
|
|
188
|
+
type AIEvent = {
|
|
189
|
+
type: "message_start";
|
|
177
190
|
id: string;
|
|
178
|
-
|
|
191
|
+
model?: string;
|
|
192
|
+
} | {
|
|
193
|
+
type: "sources";
|
|
194
|
+
sources: Source[];
|
|
195
|
+
} | {
|
|
196
|
+
type: "text_delta";
|
|
197
|
+
delta: string;
|
|
179
198
|
} | {
|
|
180
199
|
type: "clarification";
|
|
181
200
|
questions: string[];
|
|
182
|
-
mode: "clarification";
|
|
183
|
-
needs_clarification: true;
|
|
184
201
|
} | {
|
|
185
|
-
type: "
|
|
202
|
+
type: "message_complete";
|
|
186
203
|
content: string;
|
|
187
|
-
|
|
188
|
-
|
|
189
|
-
content: string;
|
|
190
|
-
citations: Citation[];
|
|
191
|
-
usage: Usage;
|
|
204
|
+
sources: Source[];
|
|
205
|
+
usage: AIUsage;
|
|
192
206
|
} | {
|
|
193
207
|
type: "error";
|
|
208
|
+
code: string;
|
|
194
209
|
message: string;
|
|
195
|
-
|
|
196
|
-
|
|
197
|
-
timestamp: string;
|
|
198
|
-
} | {
|
|
199
|
-
type: "complete";
|
|
210
|
+
retryable: boolean;
|
|
211
|
+
details?: Record<string, unknown>;
|
|
200
212
|
};
|
|
201
|
-
|
|
202
|
-
|
|
213
|
+
/**
|
|
214
|
+
* Non-streaming AI response
|
|
215
|
+
*/
|
|
216
|
+
interface AIResponse {
|
|
217
|
+
id: string;
|
|
218
|
+
content: string;
|
|
219
|
+
sources: Source[];
|
|
220
|
+
usage: AIUsage;
|
|
221
|
+
model?: string;
|
|
222
|
+
}
|
|
223
|
+
/**
|
|
224
|
+
* Options for bold.ai.ask() and bold.ai.coach()
|
|
225
|
+
*/
|
|
226
|
+
interface AskOptions {
|
|
227
|
+
prompt: string;
|
|
228
|
+
stream?: boolean;
|
|
203
229
|
conversationId?: string;
|
|
204
230
|
collectionId?: string;
|
|
205
231
|
}
|
|
206
|
-
|
|
207
|
-
|
|
232
|
+
/**
|
|
233
|
+
* Options for bold.ai.search()
|
|
234
|
+
*/
|
|
235
|
+
interface SearchOptions {
|
|
236
|
+
prompt: string;
|
|
237
|
+
stream?: boolean;
|
|
238
|
+
limit?: number;
|
|
239
|
+
collectionId?: string;
|
|
240
|
+
videoId?: string;
|
|
241
|
+
}
|
|
242
|
+
/**
|
|
243
|
+
* Options for bold.ai.chat()
|
|
244
|
+
*
|
|
245
|
+
* conversationId: Pass to continue an existing conversation (multi-turn chat).
|
|
246
|
+
* If omitted, a new conversation is created. The id is returned in the
|
|
247
|
+
* message_start event - capture it to pass to subsequent requests.
|
|
248
|
+
*/
|
|
249
|
+
interface ChatOptions {
|
|
250
|
+
prompt: string;
|
|
251
|
+
stream?: boolean;
|
|
252
|
+
conversationId?: string;
|
|
253
|
+
}
|
|
254
|
+
|
|
255
|
+
/**
|
|
256
|
+
* AI client interface for type-safe method overloading
|
|
257
|
+
*/
|
|
258
|
+
interface AIClient {
|
|
259
|
+
/**
|
|
260
|
+
* Ask - Library-wide RAG assistant
|
|
261
|
+
*
|
|
262
|
+
* @example
|
|
263
|
+
* // Streaming (default)
|
|
264
|
+
* const stream = await bold.ai.ask({ prompt: "How do I price my SaaS?" });
|
|
265
|
+
* for await (const event of stream) {
|
|
266
|
+
* if (event.type === "text_delta") process.stdout.write(event.delta);
|
|
267
|
+
* }
|
|
268
|
+
*
|
|
269
|
+
* @example
|
|
270
|
+
* // Non-streaming
|
|
271
|
+
* const response = await bold.ai.ask({ prompt: "How do I price my SaaS?", stream: false });
|
|
272
|
+
* console.log(response.content);
|
|
273
|
+
*/
|
|
274
|
+
ask(options: AskOptions & {
|
|
275
|
+
stream: false;
|
|
276
|
+
}): Promise<AIResponse>;
|
|
277
|
+
ask(options: AskOptions & {
|
|
278
|
+
stream?: true;
|
|
279
|
+
}): Promise<AsyncIterable<AIEvent>>;
|
|
280
|
+
ask(options: AskOptions): Promise<AsyncIterable<AIEvent> | AIResponse>;
|
|
281
|
+
/**
|
|
282
|
+
* Coach - Alias for ask() (Library-wide RAG assistant)
|
|
283
|
+
*/
|
|
284
|
+
coach(options: AskOptions & {
|
|
285
|
+
stream: false;
|
|
286
|
+
}): Promise<AIResponse>;
|
|
287
|
+
coach(options: AskOptions & {
|
|
288
|
+
stream?: true;
|
|
289
|
+
}): Promise<AsyncIterable<AIEvent>>;
|
|
290
|
+
coach(options: AskOptions): Promise<AsyncIterable<AIEvent> | AIResponse>;
|
|
291
|
+
/**
|
|
292
|
+
* Search - Semantic search with light synthesis
|
|
293
|
+
*
|
|
294
|
+
* @example
|
|
295
|
+
* const stream = await bold.ai.search({ prompt: "pricing strategies", limit: 10 });
|
|
296
|
+
* for await (const event of stream) {
|
|
297
|
+
* if (event.type === "sources") console.log("Found:", event.sources.length, "results");
|
|
298
|
+
* }
|
|
299
|
+
*/
|
|
300
|
+
search(options: SearchOptions & {
|
|
301
|
+
stream: false;
|
|
302
|
+
}): Promise<AIResponse>;
|
|
303
|
+
search(options: SearchOptions & {
|
|
304
|
+
stream?: true;
|
|
305
|
+
}): Promise<AsyncIterable<AIEvent>>;
|
|
306
|
+
search(options: SearchOptions): Promise<AsyncIterable<AIEvent> | AIResponse>;
|
|
307
|
+
/**
|
|
308
|
+
* Chat - Video-scoped conversation
|
|
309
|
+
*
|
|
310
|
+
* @example
|
|
311
|
+
* const stream = await bold.ai.chat("video-id", { prompt: "What is discussed at 5 minutes?" });
|
|
312
|
+
* for await (const event of stream) {
|
|
313
|
+
* if (event.type === "text_delta") process.stdout.write(event.delta);
|
|
314
|
+
* }
|
|
315
|
+
*/
|
|
316
|
+
chat(videoId: string, options: ChatOptions & {
|
|
317
|
+
stream: false;
|
|
318
|
+
}): Promise<AIResponse>;
|
|
319
|
+
chat(videoId: string, options: ChatOptions & {
|
|
320
|
+
stream?: true;
|
|
321
|
+
}): Promise<AsyncIterable<AIEvent>>;
|
|
322
|
+
chat(videoId: string, options: ChatOptions): Promise<AsyncIterable<AIEvent> | AIResponse>;
|
|
208
323
|
}
|
|
209
324
|
|
|
210
325
|
type ClientOptions = {
|
|
@@ -235,10 +350,7 @@ declare function createClient(apiKey: string, options?: ClientOptions): {
|
|
|
235
350
|
data: Playlist;
|
|
236
351
|
}>;
|
|
237
352
|
};
|
|
238
|
-
ai:
|
|
239
|
-
coach(options: CoachOptions): Promise<AsyncIterable<CoachEvent>>;
|
|
240
|
-
ask(videoId: string, options: AskOptions): Promise<AsyncIterable<CoachEvent>>;
|
|
241
|
-
};
|
|
353
|
+
ai: AIClient;
|
|
242
354
|
trackEvent: (video: any, event: Event) => void;
|
|
243
355
|
trackPageView: (title: string) => void;
|
|
244
356
|
};
|
|
@@ -252,4 +364,4 @@ declare const DEFAULT_API_BASE_URL = "https://app.boldvideo.io/api/v1/";
|
|
|
252
364
|
*/
|
|
253
365
|
declare const DEFAULT_INTERNAL_API_BASE_URL = "https://app.boldvideo.io/i/v1/";
|
|
254
366
|
|
|
255
|
-
export { Account, AccountAI, AskOptions, AssistantConfig,
|
|
367
|
+
export { AIEvent, AIResponse, AIUsage, Account, AccountAI, AskOptions, AssistantConfig, ChatOptions, ClientOptions, DEFAULT_API_BASE_URL, DEFAULT_INTERNAL_API_BASE_URL, MenuItem, Playlist, Portal, PortalDisplay, PortalLayout, PortalNavigation, PortalTheme, SearchOptions, Settings, Source, ThemeColors, ThemeConfig, Video, VideoAttachment, VideoDownloadUrls, VideoMetadata, VideoSubtitles, VideoTranscript, createClient };
|
package/dist/index.js
CHANGED
|
@@ -209,7 +209,7 @@ async function* parseSSE(response) {
|
|
|
209
209
|
try {
|
|
210
210
|
const event = JSON.parse(json);
|
|
211
211
|
yield event;
|
|
212
|
-
if (event.type === "
|
|
212
|
+
if (event.type === "message_complete" || event.type === "error") {
|
|
213
213
|
await reader.cancel();
|
|
214
214
|
return;
|
|
215
215
|
}
|
|
@@ -221,9 +221,12 @@ async function* parseSSE(response) {
|
|
|
221
221
|
reader.releaseLock();
|
|
222
222
|
}
|
|
223
223
|
}
|
|
224
|
+
function buildURL(baseURL, path) {
|
|
225
|
+
const base = baseURL.endsWith("/") ? baseURL : `${baseURL}/`;
|
|
226
|
+
return new URL(path, base);
|
|
227
|
+
}
|
|
224
228
|
async function streamRequest(path, body, config) {
|
|
225
|
-
const
|
|
226
|
-
const url = new URL(path, baseURL);
|
|
229
|
+
const url = buildURL(config.baseURL, path);
|
|
227
230
|
const response = await fetch(url, {
|
|
228
231
|
method: "POST",
|
|
229
232
|
headers: {
|
|
@@ -238,41 +241,66 @@ async function streamRequest(path, body, config) {
|
|
|
238
241
|
}
|
|
239
242
|
return parseSSE(response);
|
|
240
243
|
}
|
|
241
|
-
function
|
|
242
|
-
|
|
243
|
-
|
|
244
|
-
|
|
245
|
-
|
|
246
|
-
|
|
247
|
-
|
|
248
|
-
|
|
249
|
-
* const stream = await bold.ai.coach({ message: "How do I price my SaaS?" });
|
|
250
|
-
* for await (const event of stream) {
|
|
251
|
-
* if (event.type === "token") console.log(event.content);
|
|
252
|
-
* }
|
|
253
|
-
*/
|
|
254
|
-
async coach(options) {
|
|
255
|
-
const path = options.conversationId ? `coach/${options.conversationId}` : "coach";
|
|
256
|
-
const body = { message: options.message };
|
|
257
|
-
if (options.collectionId)
|
|
258
|
-
body.collection_id = options.collectionId;
|
|
259
|
-
return streamRequest(path, body, config);
|
|
244
|
+
async function jsonRequest(path, body, config) {
|
|
245
|
+
const url = buildURL(config.baseURL, path);
|
|
246
|
+
const response = await fetch(url, {
|
|
247
|
+
method: "POST",
|
|
248
|
+
headers: {
|
|
249
|
+
"Content-Type": "application/json",
|
|
250
|
+
"Accept": "application/json",
|
|
251
|
+
...config.headers
|
|
260
252
|
},
|
|
261
|
-
|
|
262
|
-
|
|
263
|
-
|
|
264
|
-
|
|
265
|
-
|
|
266
|
-
|
|
267
|
-
|
|
268
|
-
|
|
269
|
-
|
|
270
|
-
|
|
271
|
-
|
|
272
|
-
|
|
273
|
-
|
|
274
|
-
|
|
253
|
+
body: JSON.stringify(body)
|
|
254
|
+
});
|
|
255
|
+
if (!response.ok) {
|
|
256
|
+
throw new Error(`AI request failed: ${response.status} ${response.statusText}`);
|
|
257
|
+
}
|
|
258
|
+
return response.json();
|
|
259
|
+
}
|
|
260
|
+
function createAI(config) {
|
|
261
|
+
async function ask(options) {
|
|
262
|
+
const path = options.conversationId ? `ai/ask/${options.conversationId}` : "ai/ask";
|
|
263
|
+
const body = { prompt: options.prompt };
|
|
264
|
+
if (options.collectionId)
|
|
265
|
+
body.collection_id = options.collectionId;
|
|
266
|
+
if (options.stream === false) {
|
|
267
|
+
body.stream = false;
|
|
268
|
+
return jsonRequest(path, body, config);
|
|
275
269
|
}
|
|
270
|
+
return streamRequest(path, body, config);
|
|
271
|
+
}
|
|
272
|
+
async function coach(options) {
|
|
273
|
+
return ask(options);
|
|
274
|
+
}
|
|
275
|
+
async function search(options) {
|
|
276
|
+
const path = "ai/search";
|
|
277
|
+
const body = { prompt: options.prompt };
|
|
278
|
+
if (options.limit)
|
|
279
|
+
body.limit = options.limit;
|
|
280
|
+
if (options.collectionId)
|
|
281
|
+
body.collection_id = options.collectionId;
|
|
282
|
+
if (options.videoId)
|
|
283
|
+
body.video_id = options.videoId;
|
|
284
|
+
if (options.stream === false) {
|
|
285
|
+
body.stream = false;
|
|
286
|
+
return jsonRequest(path, body, config);
|
|
287
|
+
}
|
|
288
|
+
return streamRequest(path, body, config);
|
|
289
|
+
}
|
|
290
|
+
async function chat(videoId, options) {
|
|
291
|
+
const path = options.conversationId ? `ai/videos/${videoId}/chat/${options.conversationId}` : `ai/videos/${videoId}/chat`;
|
|
292
|
+
const body = { prompt: options.prompt };
|
|
293
|
+
if (options.stream === false) {
|
|
294
|
+
body.stream = false;
|
|
295
|
+
return jsonRequest(path, body, config);
|
|
296
|
+
}
|
|
297
|
+
return streamRequest(path, body, config);
|
|
298
|
+
}
|
|
299
|
+
return {
|
|
300
|
+
ask,
|
|
301
|
+
coach,
|
|
302
|
+
search,
|
|
303
|
+
chat
|
|
276
304
|
};
|
|
277
305
|
}
|
|
278
306
|
|