clefbase 1.5.2 → 2.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/ai.d.ts ADDED
@@ -0,0 +1,369 @@
1
+ import { HttpClient } from "./http";
2
+ export type AIProvider = "anthropic" | "google";
3
+ export type AIModelCategory = "text" | "code" | "image" | "video" | "embedding";
4
+ /**
5
+ * A model available in the project's AI service.
6
+ */
7
+ export interface AIModel {
8
+ id: string;
9
+ name: string;
10
+ provider: AIProvider;
11
+ category: AIModelCategory;
12
+ description: string;
13
+ maxTokens?: number;
14
+ supportsStreaming?: boolean;
15
+ supportsSystemPrompt?: boolean;
16
+ }
17
+ export interface GenerateTextOptions {
18
+ /** Model ID, e.g. `"claude-sonnet-4-5"` or `"gemini-2.5-flash"` */
19
+ model: string;
20
+ /** The user-turn prompt */
21
+ prompt: string;
22
+ /** System-level instructions (supported by Anthropic + Gemini) */
23
+ systemPrompt?: string;
24
+ /** Hard token cap on the response */
25
+ maxTokens?: number;
26
+ /** Sampling temperature 0–2. Lower = more deterministic. */
27
+ temperature?: number;
28
+ /** Prior turns to send as context */
29
+ history?: Array<{
30
+ role: "user" | "assistant";
31
+ content: string;
32
+ }>;
33
+ }
34
+ export interface GenerateTextResult {
35
+ id: string;
36
+ model: string;
37
+ provider: AIProvider;
38
+ /** `"text"` or `"code"` */
39
+ category: AIModelCategory;
40
+ content: string;
41
+ inputTokens: number;
42
+ outputTokens: number;
43
+ durationMs: number;
44
+ createdAt: string;
45
+ }
46
+ export interface GenerateImageOptions {
47
+ /** Model ID, e.g. `"imagen-4.0-generate-001"` */
48
+ model: string;
49
+ /** Text description of the desired image */
50
+ prompt: string;
51
+ /** Things to exclude from the image */
52
+ negativePrompt?: string;
53
+ /** Output canvas ratio */
54
+ aspectRatio?: "1:1" | "16:9" | "9:16" | "4:3" | "3:4";
55
+ /** Number of images to generate (1–4) */
56
+ numberOfImages?: number;
57
+ /**
58
+ * Folder name inside the project's AI-output storage bucket where
59
+ * generated images will be saved, e.g. `"marketing"`.
60
+ * Leave undefined to use the bucket root.
61
+ */
62
+ outputFolder?: string;
63
+ }
64
+ export interface GeneratedMediaFile {
65
+ /** ID of the file in project Storage */
66
+ storageFileId: string;
67
+ /** Storage bucket ID */
68
+ bucketId: string;
69
+ /** Full storage path, e.g. `"ai-outputs/images/img_123.png"` */
70
+ fullPath: string;
71
+ sizeBytes: number;
72
+ mimeType: string;
73
+ }
74
+ export interface GenerateImageResult {
75
+ id: string;
76
+ model: string;
77
+ provider: AIProvider;
78
+ category: "image";
79
+ prompt: string;
80
+ /** Each generated image as a saved storage file */
81
+ files: GeneratedMediaFile[];
82
+ durationMs: number;
83
+ createdAt: string;
84
+ }
85
+ export interface GenerateVideoOptions {
86
+ /** Model ID, e.g. `"veo-3.1-generate-preview"` */
87
+ model: string;
88
+ /** Text description of the video */
89
+ prompt: string;
90
+ negativePrompt?: string;
91
+ /** Requested clip length in seconds (1–30) */
92
+ durationSeconds?: number;
93
+ aspectRatio?: "16:9" | "9:16" | "1:1";
94
+ /**
95
+ * Folder inside the AI-output storage bucket where the video will be saved.
96
+ */
97
+ outputFolder?: string;
98
+ }
99
+ export interface GenerateVideoResult {
100
+ id: string;
101
+ model: string;
102
+ provider: AIProvider;
103
+ category: "video";
104
+ prompt: string;
105
+ /** `"completed"` once the async Veo job finishes (may take 1–5 min) */
106
+ status: "completed" | "pending" | "failed";
107
+ /** Server-side async operation name — informational */
108
+ operationName?: string;
109
+ files: GeneratedMediaFile[];
110
+ durationMs: number;
111
+ createdAt: string;
112
+ }
113
+ export interface GenerateEmbeddingOptions {
114
+ /** Model ID, e.g. `"gemini-embedding-001"` */
115
+ model: string;
116
+ /** One string or an array of strings to embed */
117
+ input: string | string[];
118
+ }
119
+ export interface GenerateEmbeddingResult {
120
+ id: string;
121
+ model: string;
122
+ provider: AIProvider;
123
+ category: "embedding";
124
+ /** One float array per input string */
125
+ embeddings: number[][];
126
+ inputTokens: number;
127
+ durationMs: number;
128
+ createdAt: string;
129
+ }
130
+ export interface AIUsageRecord {
131
+ id: string;
132
+ model: string;
133
+ provider: AIProvider;
134
+ category: AIModelCategory;
135
+ inputTokens: number;
136
+ outputTokens: number;
137
+ mediaCount: number;
138
+ durationMs: number;
139
+ status: "success" | "error";
140
+ error?: string;
141
+ createdAt: string;
142
+ }
143
+ export interface AIUsageStats {
144
+ totalRequests: number;
145
+ successRequests: number;
146
+ errorRequests: number;
147
+ totalInputTokens: number;
148
+ totalOutputTokens: number;
149
+ totalMediaGenerated: number;
150
+ byModel: Record<string, number>;
151
+ byCategory: Partial<Record<AIModelCategory, number>>;
152
+ }
153
+ /**
154
+ * Thrown by all AI SDK methods when the server returns an error.
155
+ *
156
+ * @example
157
+ * import { AIError } from "clefbase";
158
+ *
159
+ * try {
160
+ * const result = await ai.text({ model: "claude-sonnet-4-5", prompt: "Hello" });
161
+ * } catch (err) {
162
+ * if (err instanceof AIError) {
163
+ * console.error(err.message, err.httpStatus);
164
+ * }
165
+ * }
166
+ */
167
+ export declare class AIError extends Error {
168
+ /** HTTP status code returned by the server, if available */
169
+ readonly httpStatus?: number | undefined;
170
+ constructor(message: string,
171
+ /** HTTP status code returned by the server, if available */
172
+ httpStatus?: number | undefined);
173
+ }
174
+ /**
175
+ * Clefbase AI service — obtained via `getAI(app)`.
176
+ *
177
+ * Provides access to text/code generation (Claude & Gemini), image generation
178
+ * (Imagen 3), video generation (Veo 2), and text embeddings.
179
+ * Generated images and videos are automatically saved to the project's
180
+ * configured storage bucket.
181
+ *
182
+ * @example
183
+ * import { initClefbase, getAI } from "clefbase";
184
+ *
185
+ * const app = initClefbase({ serverUrl, projectId, apiKey, adminSecret: "" });
186
+ * const ai = getAI(app);
187
+ *
188
+ * // ── Text / code ────────────────────────────────────────────────────────────
189
+ * const { content } = await ai.text({
190
+ * model: "claude-sonnet-4-5",
191
+ * prompt: "Explain promises in JavaScript",
192
+ * });
193
+ *
194
+ * // ── Multi-turn chat ────────────────────────────────────────────────────────
195
+ * const { content } = await ai.text({
196
+ * model: "gemini-2.5-flash",
197
+ * prompt: "What did I just ask you?",
198
+ * systemPrompt: "You are a helpful tutor.",
199
+ * history: [
200
+ * { role: "user", content: "What is 2 + 2?" },
201
+ * { role: "assistant", content: "4" },
202
+ * ],
203
+ * });
204
+ *
205
+ * // ── Image (saved to Storage) ────────────────────────────────────────────────
206
+ * const { files } = await ai.image({
207
+ * model: "imagen-4.0-generate-001",
208
+ * prompt: "A serene mountain lake at dawn, photorealistic",
209
+ * aspectRatio: "16:9",
210
+ * numberOfImages: 2,
211
+ * outputFolder: "landscapes",
212
+ * });
213
+ * // files[].fullPath → path in project Storage
214
+ * // files[].storageFileId → pass to storage SDK to get a download URL
215
+ *
216
+ * // ── Video (saved to Storage) ───────────────────────────────────────────────
217
+ * const { status, files } = await ai.video({
218
+ * model: "veo-3.1-generate-preview",
219
+ * prompt: "A slow-motion waterfall in a rainforest",
220
+ * durationSeconds: 8,
221
+ * aspectRatio: "16:9",
222
+ * });
223
+ *
224
+ * // ── Embeddings ─────────────────────────────────────────────────────────────
225
+ * const { embeddings } = await ai.embedding({
226
+ * model: "gemini-embedding-001",
227
+ * input: ["Hello world", "Semantic search"],
228
+ * });
229
+ * // embeddings: number[][]
230
+ *
231
+ * // ── Browse models ──────────────────────────────────────────────────────────
232
+ * const imageModels = await ai.listModels({ category: "image" });
233
+ *
234
+ * // ── Usage ──────────────────────────────────────────────────────────────────
235
+ * const stats = await ai.getStats();
236
+ * const history = await ai.getUsage({ limit: 50 });
237
+ */
238
+ export declare class ClefbaseAI {
239
+ private readonly _http;
240
+ /** @internal */
241
+ constructor(_http: HttpClient);
242
+ /**
243
+ * List all AI models available on the server.
244
+ * Optionally filter by `provider` or `category`.
245
+ *
246
+ * @example
247
+ * const textModels = await ai.listModels({ category: "text" });
248
+ * const googleModels = await ai.listModels({ provider: "google" });
249
+ * const all = await ai.listModels();
250
+ */
251
+ listModels(filter?: {
252
+ provider?: AIProvider;
253
+ category?: AIModelCategory;
254
+ }): Promise<AIModel[]>;
255
+ /**
256
+ * Generate text or code with a Claude or Gemini model.
257
+ *
258
+ * @example
259
+ * const { content, inputTokens, outputTokens } = await ai.text({
260
+ * model: "claude-sonnet-4-5",
261
+ * prompt: "Write a bubble-sort in Python",
262
+ * systemPrompt: "Return only code, no explanation.",
263
+ * maxTokens: 512,
264
+ * });
265
+ */
266
+ text(options: GenerateTextOptions): Promise<GenerateTextResult>;
267
+ /**
268
+ * Generate one or more images with an Imagen model.
269
+ * Outputs are automatically saved to the project's storage bucket.
270
+ *
271
+ * @example
272
+ * const { files } = await ai.image({
273
+ * model: "imagen-4.0-generate-001",
274
+ * prompt: "A futuristic city skyline at sunset",
275
+ * aspectRatio: "16:9",
276
+ * });
277
+ * const path = files[0].fullPath; // use with storage.ref()
278
+ */
279
+ image(options: GenerateImageOptions): Promise<GenerateImageResult>;
280
+ /**
281
+ * Generate a video with Veo 2.
282
+ * This is an async server-side operation — the SDK call blocks until the
283
+ * job completes (usually 1–5 minutes). Outputs are saved to project storage.
284
+ *
285
+ * @example
286
+ * const { status, files } = await ai.video({
287
+ * model: "veo-3.1-generate-preview",
288
+ * prompt: "A golden retriever running on a sunny beach",
289
+ * durationSeconds: 5,
290
+ * outputFolder: "clips",
291
+ * });
292
+ */
293
+ video(options: GenerateVideoOptions): Promise<GenerateVideoResult>;
294
+ /**
295
+ * Generate text embeddings — one float vector per input string.
296
+ *
297
+ * @example
298
+ * const { embeddings } = await ai.embedding({
299
+ * model: "gemini-embedding-001",
300
+ * input: ["cat", "dog", "automobile"],
301
+ * });
302
+ * // embeddings[0].length === 768
303
+ */
304
+ embedding(options: GenerateEmbeddingOptions): Promise<GenerateEmbeddingResult>;
305
+ /**
306
+ * Fetch aggregated usage statistics for this project.
307
+ *
308
+ * @example
309
+ * const { totalRequests, totalInputTokens, totalMediaGenerated } = await ai.getStats();
310
+ */
311
+ getStats(): Promise<AIUsageStats>;
312
+ /**
313
+ * Fetch the raw request log for this project (newest first).
314
+ *
315
+ * @param opts.limit Max records to return (default: 50, server max: 200)
316
+ *
317
+ * @example
318
+ * const records = await ai.getUsage({ limit: 20 });
319
+ * for (const r of records) {
320
+ * console.log(r.model, r.status, r.durationMs);
321
+ * }
322
+ */
323
+ getUsage(opts?: {
324
+ limit?: number;
325
+ }): Promise<AIUsageRecord[]>;
326
+ private _wrap;
327
+ }
328
+ /**
329
+ * Generate text or code. Sugar for `ai.text(options)`.
330
+ *
331
+ * @example
332
+ * const { content } = await generateText(ai, {
333
+ * model: "claude-sonnet-4-5",
334
+ * prompt: "What is the capital of France?",
335
+ * });
336
+ */
337
+ export declare function generateText(ai: ClefbaseAI, options: GenerateTextOptions): Promise<GenerateTextResult>;
338
+ /**
339
+ * Generate images. Sugar for `ai.image(options)`.
340
+ *
341
+ * @example
342
+ * const { files } = await generateImage(ai, {
343
+ * model: "imagen-4.0-generate-001",
344
+ * prompt: "A calm ocean at sunset",
345
+ * });
346
+ */
347
+ export declare function generateImage(ai: ClefbaseAI, options: GenerateImageOptions): Promise<GenerateImageResult>;
348
+ /**
349
+ * Generate a video. Sugar for `ai.video(options)`.
350
+ *
351
+ * @example
352
+ * const { files } = await generateVideo(ai, {
353
+ * model: "veo-3.1-generate-preview",
354
+ * prompt: "A timelapse of clouds over a mountain",
355
+ * durationSeconds: 6,
356
+ * });
357
+ */
358
+ export declare function generateVideo(ai: ClefbaseAI, options: GenerateVideoOptions): Promise<GenerateVideoResult>;
359
+ /**
360
+ * Generate embeddings. Sugar for `ai.embedding(options)`.
361
+ *
362
+ * @example
363
+ * const { embeddings } = await generateEmbedding(ai, {
364
+ * model: "gemini-embedding-001",
365
+ * input: "Hello world",
366
+ * });
367
+ */
368
+ export declare function generateEmbedding(ai: ClefbaseAI, options: GenerateEmbeddingOptions): Promise<GenerateEmbeddingResult>;
369
+ //# sourceMappingURL=ai.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"ai.d.ts","sourceRoot":"","sources":["../src/ai.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,UAAU,EAAE,MAAM,QAAQ,CAAC;AAKpC,MAAM,MAAM,UAAU,GAAG,WAAW,GAAG,QAAQ,CAAC;AAChD,MAAM,MAAM,eAAe,GAAG,MAAM,GAAG,MAAM,GAAG,OAAO,GAAG,OAAO,GAAG,WAAW,CAAC;AAEhF;;GAEG;AACH,MAAM,WAAW,OAAO;IACtB,EAAE,EAAE,MAAM,CAAC;IACX,IAAI,EAAE,MAAM,CAAC;IACb,QAAQ,EAAE,UAAU,CAAC;IACrB,QAAQ,EAAE,eAAe,CAAC;IAC1B,WAAW,EAAE,MAAM,CAAC;IACpB,SAAS,CAAC,EAAE,MAAM,CAAC;IACnB,iBAAiB,CAAC,EAAE,OAAO,CAAC;IAC5B,oBAAoB,CAAC,EAAE,OAAO,CAAC;CAChC;AAID,MAAM,WAAW,mBAAmB;IAClC,mEAAmE;IACnE,KAAK,EAAE,MAAM,CAAC;IACd,2BAA2B;IAC3B,MAAM,EAAE,MAAM,CAAC;IACf,kEAAkE;IAClE,YAAY,CAAC,EAAE,MAAM,CAAC;IACtB,qCAAqC;IACrC,SAAS,CAAC,EAAE,MAAM,CAAC;IACnB,4DAA4D;IAC5D,WAAW,CAAC,EAAE,MAAM,CAAC;IACrB,qCAAqC;IACrC,OAAO,CAAC,EAAE,KAAK,CAAC;QAAE,IAAI,EAAE,MAAM,GAAG,WAAW,CAAC;QAAC,OAAO,EAAE,MAAM,CAAA;KAAE,CAAC,CAAC;CAClE;AAED,MAAM,WAAW,kBAAkB;IACjC,EAAE,EAAE,MAAM,CAAC;IACX,KAAK,EAAE,MAAM,CAAC;IACd,QAAQ,EAAE,UAAU,CAAC;IACrB,2BAA2B;IAC3B,QAAQ,EAAE,eAAe,CAAC;IAC1B,OAAO,EAAE,MAAM,CAAC;IAChB,WAAW,EAAE,MAAM,CAAC;IACpB,YAAY,EAAE,MAAM,CAAC;IACrB,UAAU,EAAE,MAAM,CAAC;IACnB,SAAS,EAAE,MAAM,CAAC;CACnB;AAID,MAAM,WAAW,oBAAoB;IACnC,iDAAiD;IACjD,KAAK,EAAE,MAAM,CAAC;IACd,4CAA4C;IAC5C,MAAM,EAAE,MAAM,CAAC;IACf,uCAAuC;IACvC,cAAc,CAAC,EAAE,MAAM,CAAC;IACxB,0BAA0B;IAC1B,WAAW,CAAC,EAAE,KAAK,GAAG,MAAM,GAAG,MAAM,GAAG,KAAK,GAAG,KAAK,CAAC;IACtD,yCAAyC;IACzC,cAAc,CAAC,EAAE,MAAM,CAAC;IACxB;;;;OAIG;IACH,YAAY,CAAC,EAAE,MAAM,CAAC;CACvB;AAED,MAAM,WAAW,kBAAkB;IACjC,wCAAwC;IACxC,aAAa,EAAE,MAAM,CAAC;IACtB,wBAAwB;IACxB,QAAQ,EAAE,MAAM,CAAC;IACjB,gEAAgE;IAChE,QAAQ,EAAE,MAAM,CAAC;IACjB,SAAS,EAAE,MAAM,CAAC;IAClB,QAAQ,EAAE,MAAM,CAAC;CAClB;AAED,MAAM,WAAW,mBAAmB;IAClC,EAAE,EAAE,MAAM,CAAC;IACX,KAAK,EAAE,MAAM,CAAC;IACd,QAAQ,EAAE,UAAU,CAAC;IACrB,QAAQ,EAAE,OAAO,CAAC;IAClB,MAAM,EAAE,MAAM,CAAC;IACf,mDAAmD;IACnD,KAAK,EAAE,kBAAkB,EAAE,CAAC;IAC5B,UAAU,EAAE,MAAM,CAAC;IACnB,SAAS,EAAE,MAAM,CAAC;CACnB;AAID,MAAM,WAAW,oBAAoB;IACnC,kDAAkD;IAClD,KAAK,EAAE,MAAM,CAAC;IACd,oCAAoC;IACpC,MAAM,EAAE,MAAM,CAAC;IACf,cAAc,CAAC,EAAE,MAAM,CAAC;IACxB,8CAA8C;IAC9C,eAAe,CAAC,EAAE,MAAM,CAAC;IACzB,WAAW,CAAC,EAAE,MAAM,GAAG,MAAM,GAAG,KAAK,CAAC;IACtC;;OAEG;IACH,YAAY,CAAC,EAAE,MAAM,CAAC;CACvB;AAED,MAAM,WAAW,mBAAmB;IAClC,EAAE,EAAE,MAAM,CAAC;IACX,KAAK,EAAE,MAAM,CAAC;IACd,QAAQ,EAAE,UAAU,CAAC;IACrB,QAAQ,EAAE,OAAO,CAAC;IAClB,MAAM,EAAE,MAAM,CAAC;IACf,uEAAuE;IACvE,MAAM,EAAE,WAAW,GAAG,SAAS,GAAG,QAAQ,CAAC;IAC3C,uDAAuD;IACvD,aAAa,CAAC,EAAE,MAAM,CAAC;IACvB,KAAK,EAAE,kBAAkB,EAAE,CAAC;IAC5B,UAAU,EAAE,MAAM,CAAC;IACnB,SAAS,EAAE,MAAM,CAAC;CACnB;AAID,MAAM,WAAW,wBAAwB;IACvC,8CAA8C;IAC9C,KAAK,EAAE,MAAM,CAAC;IACd,iDAAiD;IACjD,KAAK,EAAE,MAAM,GAAG,MAAM,EAAE,CAAC;CAC1B;AAED,MAAM,WAAW,uBAAuB;IACtC,EAAE,EAAE,MAAM,CAAC;IACX,KAAK,EAAE,MAAM,CAAC;IACd,QAAQ,EAAE,UAAU,CAAC;IACrB,QAAQ,EAAE,WAAW,CAAC;IACtB,uCAAuC;IACvC,UAAU,EAAE,MAAM,EAAE,EAAE,CAAC;IACvB,WAAW,EAAE,MAAM,CAAC;IACpB,UAAU,EAAE,MAAM,CAAC;IACnB,SAAS,EAAE,MAAM,CAAC;CACnB;AAID,MAAM,WAAW,aAAa;IAC5B,EAAE,EAAE,MAAM,CAAC;IACX,KAAK,EAAE,MAAM,CAAC;IACd,QAAQ,EAAE,UAAU,CAAC;IACrB,QAAQ,EAAE,eAAe,CAAC;IAC1B,WAAW,EAAE,MAAM,CAAC;IACpB,YAAY,EAAE,MAAM,CAAC;IACrB,UAAU,EAAE,MAAM,CAAC;IACnB,UAAU,EAAE,MAAM,CAAC;IACnB,MAAM,EAAE,SAAS,GAAG,OAAO,CAAC;IAC5B,KAAK,CAAC,EAAE,MAAM,CAAC;IACf,SAAS,EAAE,MAAM,CAAC;CACnB;AAED,MAAM,WAAW,YAAY;IAC3B,aAAa,EAAE,MAAM,CAAC;IACtB,eAAe,EAAE,MAAM,CAAC;IACxB,aAAa,EAAE,MAAM,CAAC;IACtB,gBAAgB,EAAE,MAAM,CAAC;IACzB,iBAAiB,EAAE,MAAM,CAAC;IAC1B,mBAAmB,EAAE,MAAM,CAAC;IAC5B,OAAO,EAAE,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;IAChC,UAAU,EAAE,OAAO,CAAC,MAAM,CAAC,eAAe,EAAE,MAAM,CAAC,CAAC,CAAC;CACtD;AAID;;;;;;;;;;;;;GAaG;AACH,qBAAa,OAAQ,SAAQ,KAAK;IAG9B,4DAA4D;aAC5C,UAAU,CAAC,EAAE,MAAM;gBAFnC,OAAO,EAAE,MAAM;IACf,4DAA4D;IAC5C,UAAU,CAAC,EAAE,MAAM,YAAA;CAMtC;AAID;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;GA+DG;AACH,qBAAa,UAAU;IAET,OAAO,CAAC,QAAQ,CAAC,KAAK;IADlC,gBAAgB;gBACa,KAAK,EAAE,UAAU;IAI9C;;;;;;;;OAQG;IACG,UAAU,CAAC,MAAM,CAAC,EAAE;QACxB,QAAQ,CAAC,EAAE,UAAU,CAAC;QACtB,QAAQ,CAAC,EAAE,eAAe,CAAC;KAC5B,GAAG,OAAO,CAAC,OAAO,EAAE,CAAC;IActB;;;;;;;;;;OAUG;IACG,IAAI,CAAC,OAAO,EAAE,mBAAmB,GAAG,OAAO,CAAC,kBAAkB,CAAC;IAUrE;;;;;;;;;;;OAWG;IACG,KAAK,CAAC,OAAO,EAAE,oBAAoB,GAAG,OAAO,CAAC,mBAAmB,CAAC;IAUxE;;;;;;;;;;;;OAYG;IACG,KAAK,CAAC,OAAO,EAAE,oBAAoB,GAAG,OAAO,CAAC,mBAAmB,CAAC;IAUxE;;;;;;;;;OASG;IACG,SAAS,CAAC,OAAO,EAAE,wBAAwB,GAAG,OAAO,CAAC,uBAAuB,CAAC;IAUpF;;;;;OAKG;IACG,QAAQ,IAAI,OAAO,CAAC,YAAY,CAAC;IAUvC;;;;;;;;;;OAUG;IACG,QAAQ,CAAC,IAAI,CAAC,EAAE;QAAE,KAAK,CAAC,EAAE,MAAM,CAAA;KAAE,GAAG,OAAO,CAAC,aAAa,EAAE,CAAC;IAWnE,OAAO,CAAC,KAAK;CAKd;AAID;;;;;;;;GAQG;AACH,wBAAsB,YAAY,CAChC,EAAE,EAAE,UAAU,EACd,OAAO,EAAE,mBAAmB,GAC3B,OAAO,CAAC,kBAAkB,CAAC,CAE7B;AAED;;;;;;;;GAQG;AACH,wBAAsB,aAAa,CACjC,EAAE,EAAE,UAAU,EACd,OAAO,EAAE,oBAAoB,GAC5B,OAAO,CAAC,mBAAmB,CAAC,CAE9B;AAED;;;;;;;;;GASG;AACH,wBAAsB,aAAa,CACjC,EAAE,EAAE,UAAU,EACd,OAAO,EAAE,oBAAoB,GAC5B,OAAO,CAAC,mBAAmB,CAAC,CAE9B;AAED;;;;;;;;GAQG;AACH,wBAAsB,iBAAiB,CACrC,EAAE,EAAE,UAAU,EACd,OAAO,EAAE,wBAAwB,GAChC,OAAO,CAAC,uBAAuB,CAAC,CAElC"}
package/dist/ai.js ADDED
@@ -0,0 +1,308 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.ClefbaseAI = exports.AIError = void 0;
4
+ exports.generateText = generateText;
5
+ exports.generateImage = generateImage;
6
+ exports.generateVideo = generateVideo;
7
+ exports.generateEmbedding = generateEmbedding;
8
+ const types_1 = require("./types");
9
+ // ─── AIError ──────────────────────────────────────────────────────────────────
10
+ /**
11
+ * Thrown by all AI SDK methods when the server returns an error.
12
+ *
13
+ * @example
14
+ * import { AIError } from "clefbase";
15
+ *
16
+ * try {
17
+ * const result = await ai.text({ model: "claude-sonnet-4-5", prompt: "Hello" });
18
+ * } catch (err) {
19
+ * if (err instanceof AIError) {
20
+ * console.error(err.message, err.httpStatus);
21
+ * }
22
+ * }
23
+ */
24
+ class AIError extends Error {
25
+ constructor(message,
26
+ /** HTTP status code returned by the server, if available */
27
+ httpStatus) {
28
+ super(message);
29
+ this.httpStatus = httpStatus;
30
+ this.name = "AIError";
31
+ if (Error.captureStackTrace)
32
+ Error.captureStackTrace(this, AIError);
33
+ }
34
+ }
35
+ exports.AIError = AIError;
36
+ // ─── ClefbaseAI ───────────────────────────────────────────────────────────────
37
+ /**
38
+ * Clefbase AI service — obtained via `getAI(app)`.
39
+ *
40
+ * Provides access to text/code generation (Claude & Gemini), image generation
41
+ * (Imagen 3), video generation (Veo 2), and text embeddings.
42
+ * Generated images and videos are automatically saved to the project's
43
+ * configured storage bucket.
44
+ *
45
+ * @example
46
+ * import { initClefbase, getAI } from "clefbase";
47
+ *
48
+ * const app = initClefbase({ serverUrl, projectId, apiKey, adminSecret: "" });
49
+ * const ai = getAI(app);
50
+ *
51
+ * // ── Text / code ────────────────────────────────────────────────────────────
52
+ * const { content } = await ai.text({
53
+ * model: "claude-sonnet-4-5",
54
+ * prompt: "Explain promises in JavaScript",
55
+ * });
56
+ *
57
+ * // ── Multi-turn chat ────────────────────────────────────────────────────────
58
+ * const { content } = await ai.text({
59
+ * model: "gemini-2.5-flash",
60
+ * prompt: "What did I just ask you?",
61
+ * systemPrompt: "You are a helpful tutor.",
62
+ * history: [
63
+ * { role: "user", content: "What is 2 + 2?" },
64
+ * { role: "assistant", content: "4" },
65
+ * ],
66
+ * });
67
+ *
68
+ * // ── Image (saved to Storage) ────────────────────────────────────────────────
69
+ * const { files } = await ai.image({
70
+ * model: "imagen-4.0-generate-001",
71
+ * prompt: "A serene mountain lake at dawn, photorealistic",
72
+ * aspectRatio: "16:9",
73
+ * numberOfImages: 2,
74
+ * outputFolder: "landscapes",
75
+ * });
76
+ * // files[].fullPath → path in project Storage
77
+ * // files[].storageFileId → pass to storage SDK to get a download URL
78
+ *
79
+ * // ── Video (saved to Storage) ───────────────────────────────────────────────
80
+ * const { status, files } = await ai.video({
81
+ * model: "veo-3.1-generate-preview",
82
+ * prompt: "A slow-motion waterfall in a rainforest",
83
+ * durationSeconds: 8,
84
+ * aspectRatio: "16:9",
85
+ * });
86
+ *
87
+ * // ── Embeddings ─────────────────────────────────────────────────────────────
88
+ * const { embeddings } = await ai.embedding({
89
+ * model: "gemini-embedding-001",
90
+ * input: ["Hello world", "Semantic search"],
91
+ * });
92
+ * // embeddings: number[][]
93
+ *
94
+ * // ── Browse models ──────────────────────────────────────────────────────────
95
+ * const imageModels = await ai.listModels({ category: "image" });
96
+ *
97
+ * // ── Usage ──────────────────────────────────────────────────────────────────
98
+ * const stats = await ai.getStats();
99
+ * const history = await ai.getUsage({ limit: 50 });
100
+ */
101
+ class ClefbaseAI {
102
+ /** @internal */
103
+ constructor(_http) {
104
+ this._http = _http;
105
+ }
106
+ // ─── listModels ─────────────────────────────────────────────────────────────
107
+ /**
108
+ * List all AI models available on the server.
109
+ * Optionally filter by `provider` or `category`.
110
+ *
111
+ * @example
112
+ * const textModels = await ai.listModels({ category: "text" });
113
+ * const googleModels = await ai.listModels({ provider: "google" });
114
+ * const all = await ai.listModels();
115
+ */
116
+ async listModels(filter) {
117
+ const qs = new URLSearchParams();
118
+ if (filter?.provider)
119
+ qs.set("provider", filter.provider);
120
+ if (filter?.category)
121
+ qs.set("category", filter.category);
122
+ const query = qs.toString() ? `?${qs}` : "";
123
+ try {
124
+ return await this._http.get(`/models${query}`);
125
+ }
126
+ catch (err) {
127
+ throw this._wrap(err);
128
+ }
129
+ }
130
+ // ─── text ────────────────────────────────────────────────────────────────────
131
+ /**
132
+ * Generate text or code with a Claude or Gemini model.
133
+ *
134
+ * @example
135
+ * const { content, inputTokens, outputTokens } = await ai.text({
136
+ * model: "claude-sonnet-4-5",
137
+ * prompt: "Write a bubble-sort in Python",
138
+ * systemPrompt: "Return only code, no explanation.",
139
+ * maxTokens: 512,
140
+ * });
141
+ */
142
+ async text(options) {
143
+ try {
144
+ return await this._http.post("/generate/text", options);
145
+ }
146
+ catch (err) {
147
+ throw this._wrap(err);
148
+ }
149
+ }
150
+ // ─── image ───────────────────────────────────────────────────────────────────
151
+ /**
152
+ * Generate one or more images with an Imagen model.
153
+ * Outputs are automatically saved to the project's storage bucket.
154
+ *
155
+ * @example
156
+ * const { files } = await ai.image({
157
+ * model: "imagen-4.0-generate-001",
158
+ * prompt: "A futuristic city skyline at sunset",
159
+ * aspectRatio: "16:9",
160
+ * });
161
+ * const path = files[0].fullPath; // use with storage.ref()
162
+ */
163
+ async image(options) {
164
+ try {
165
+ return await this._http.post("/generate/image", options);
166
+ }
167
+ catch (err) {
168
+ throw this._wrap(err);
169
+ }
170
+ }
171
+ // ─── video ───────────────────────────────────────────────────────────────────
172
+ /**
173
+ * Generate a video with Veo 2.
174
+ * This is an async server-side operation — the SDK call blocks until the
175
+ * job completes (usually 1–5 minutes). Outputs are saved to project storage.
176
+ *
177
+ * @example
178
+ * const { status, files } = await ai.video({
179
+ * model: "veo-3.1-generate-preview",
180
+ * prompt: "A golden retriever running on a sunny beach",
181
+ * durationSeconds: 5,
182
+ * outputFolder: "clips",
183
+ * });
184
+ */
185
+ async video(options) {
186
+ try {
187
+ return await this._http.post("/generate/video", options);
188
+ }
189
+ catch (err) {
190
+ throw this._wrap(err);
191
+ }
192
+ }
193
+ // ─── embedding ───────────────────────────────────────────────────────────────
194
+ /**
195
+ * Generate text embeddings — one float vector per input string.
196
+ *
197
+ * @example
198
+ * const { embeddings } = await ai.embedding({
199
+ * model: "gemini-embedding-001",
200
+ * input: ["cat", "dog", "automobile"],
201
+ * });
202
+ * // embeddings[0].length === 768
203
+ */
204
+ async embedding(options) {
205
+ try {
206
+ return await this._http.post("/generate/embedding", options);
207
+ }
208
+ catch (err) {
209
+ throw this._wrap(err);
210
+ }
211
+ }
212
+ // ─── getStats ────────────────────────────────────────────────────────────────
213
+ /**
214
+ * Fetch aggregated usage statistics for this project.
215
+ *
216
+ * @example
217
+ * const { totalRequests, totalInputTokens, totalMediaGenerated } = await ai.getStats();
218
+ */
219
+ async getStats() {
220
+ try {
221
+ return await this._http.get("/stats");
222
+ }
223
+ catch (err) {
224
+ throw this._wrap(err);
225
+ }
226
+ }
227
+ // ─── getUsage ────────────────────────────────────────────────────────────────
228
+ /**
229
+ * Fetch the raw request log for this project (newest first).
230
+ *
231
+ * @param opts.limit Max records to return (default: 50, server max: 200)
232
+ *
233
+ * @example
234
+ * const records = await ai.getUsage({ limit: 20 });
235
+ * for (const r of records) {
236
+ * console.log(r.model, r.status, r.durationMs);
237
+ * }
238
+ */
239
+ async getUsage(opts) {
240
+ const limit = Math.min(opts?.limit ?? 50, 200);
241
+ try {
242
+ return await this._http.get(`/usage?limit=${limit}`);
243
+ }
244
+ catch (err) {
245
+ throw this._wrap(err);
246
+ }
247
+ }
248
+ // ─── Internal ─────────────────────────────────────────────────────────────────
249
+ _wrap(err) {
250
+ if (err instanceof AIError)
251
+ return err;
252
+ if (err instanceof types_1.ClefbaseError)
253
+ return new AIError(err.message, err.status);
254
+ return new AIError(err.message ?? "Unknown AI error");
255
+ }
256
+ }
257
+ exports.ClefbaseAI = ClefbaseAI;
258
+ // ─── Top-level convenience functions ─────────────────────────────────────────
259
+ /**
260
+ * Generate text or code. Sugar for `ai.text(options)`.
261
+ *
262
+ * @example
263
+ * const { content } = await generateText(ai, {
264
+ * model: "claude-sonnet-4-5",
265
+ * prompt: "What is the capital of France?",
266
+ * });
267
+ */
268
+ async function generateText(ai, options) {
269
+ return ai.text(options);
270
+ }
271
+ /**
272
+ * Generate images. Sugar for `ai.image(options)`.
273
+ *
274
+ * @example
275
+ * const { files } = await generateImage(ai, {
276
+ * model: "imagen-4.0-generate-001",
277
+ * prompt: "A calm ocean at sunset",
278
+ * });
279
+ */
280
+ async function generateImage(ai, options) {
281
+ return ai.image(options);
282
+ }
283
+ /**
284
+ * Generate a video. Sugar for `ai.video(options)`.
285
+ *
286
+ * @example
287
+ * const { files } = await generateVideo(ai, {
288
+ * model: "veo-3.1-generate-preview",
289
+ * prompt: "A timelapse of clouds over a mountain",
290
+ * durationSeconds: 6,
291
+ * });
292
+ */
293
+ async function generateVideo(ai, options) {
294
+ return ai.video(options);
295
+ }
296
+ /**
297
+ * Generate embeddings. Sugar for `ai.embedding(options)`.
298
+ *
299
+ * @example
300
+ * const { embeddings } = await generateEmbedding(ai, {
301
+ * model: "gemini-embedding-001",
302
+ * input: "Hello world",
303
+ * });
304
+ */
305
+ async function generateEmbedding(ai, options) {
306
+ return ai.embedding(options);
307
+ }
308
+ //# sourceMappingURL=ai.js.map