@veroai/transcribe 1.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md ADDED
@@ -0,0 +1,217 @@
1
+ # @veroai/transcribe
2
+
3
+ Official Node.js/TypeScript SDK for the VeroTranscribe API.
4
+
5
+ ## Installation
6
+
7
+ ```bash
8
+ npm install @veroai/transcribe
9
+ ```
10
+
11
+ ## Quick Start
12
+
13
+ ```typescript
14
+ import { VeroTranscribe } from '@veroai/transcribe';
15
+
16
+ const client = new VeroTranscribe({
17
+ apiKey: process.env.VERO_API_KEY,
18
+ });
19
+
20
+ // Transcribe an audio file
21
+ const result = await client.transcriptions.transcribe({
22
+ audioUrl: 'https://example.com/audio.mp3',
23
+ language: 'en',
24
+ aiType: 'basic',
25
+ });
26
+
27
+ console.log(result.transcript.text);
28
+ console.log(result.analysis);
29
+ ```
30
+
31
+ ## Configuration
32
+
33
+ ```typescript
34
+ const client = new VeroTranscribe({
35
+ apiKey: 'vt_sk_...', // Required: Your API key
36
+ baseUrl: 'https://custom.url', // Optional: Custom API base URL
37
+ timeout: 30000, // Optional: Request timeout in ms (default: 30000)
38
+ });
39
+ ```
40
+
41
+ ## Transcriptions
42
+
43
+ ### Methods
44
+
45
+ | Method | Description |
46
+ |--------|-------------|
47
+ | `transcriptions.create(params)` | Create a new transcription job |
48
+ | `transcriptions.get(id)` | Get a transcription by ID |
49
+ | `transcriptions.list(params?)` | List all transcriptions |
50
+ | `transcriptions.delete(id)` | Delete a transcription |
51
+ | `transcriptions.waitForCompletion(id, options?)` | Poll until transcription completes |
52
+ | `transcriptions.transcribe(params, options?)` | Create and wait for completion |
53
+
54
+ ### Create a Transcription
55
+
56
+ ```typescript
57
+ const transcription = await client.transcriptions.create({
58
+ audioUrl: 'https://example.com/audio.mp3',
59
+ language: 'en', // 'en' | 'he' | 'es' | 'auto'
60
+ aiType: 'basic', // 'none' | 'basic' | 'coach'
61
+ saveTranscript: false,
62
+ webhookUrl: 'https://your-server.com/webhook',
63
+ metadata: { callId: '123' },
64
+ });
65
+ ```
66
+
67
+ ### Get a Transcription
68
+
69
+ ```typescript
70
+ const transcription = await client.transcriptions.get('transcription-id');
71
+ console.log(transcription.status); // 'pending' | 'processing' | 'completed' | 'failed'
72
+ console.log(transcription.transcript); // { text, segments }
73
+ console.log(transcription.analysis); // { summary, keyPoints, sentiment }
74
+ ```
75
+
76
+ ### List Transcriptions
77
+
78
+ ```typescript
79
+ const { data, total } = await client.transcriptions.list({
80
+ limit: 20,
81
+ offset: 0,
82
+ status: 'completed',
83
+ });
84
+ ```
85
+
86
+ ### Wait for Completion
87
+
88
+ ```typescript
89
+ const result = await client.transcriptions.waitForCompletion('transcription-id', {
90
+ pollInterval: 2000, // Poll every 2s (default)
91
+ maxWaitTime: 300000, // Timeout after 5min (default)
92
+ onProgress: (t) => console.log(`Status: ${t.status}`),
93
+ });
94
+ ```
95
+
96
+ ### Transcribe (Create + Wait)
97
+
98
+ ```typescript
99
+ const result = await client.transcriptions.transcribe({
100
+ audioUrl: 'https://example.com/audio.mp3',
101
+ language: 'en',
102
+ aiType: 'coach',
103
+ });
104
+
105
+ // Returns the completed transcription with transcript and analysis
106
+ console.log(result.transcript.text);
107
+ console.log(result.analysis.summary);
108
+ console.log(result.analysis.actionItems);
109
+ ```
110
+
111
+ ## Webhooks
112
+
113
+ ### Methods
114
+
115
+ | Method | Description |
116
+ |--------|-------------|
117
+ | `webhooks.create(params)` | Create a webhook endpoint |
118
+ | `webhooks.get(id)` | Get a webhook by ID |
119
+ | `webhooks.list()` | List all webhooks |
120
+ | `webhooks.delete(id)` | Delete a webhook |
121
+
122
+ ### Create a Webhook
123
+
124
+ ```typescript
125
+ const webhook = await client.webhooks.create({
126
+ url: 'https://your-server.com/webhook',
127
+ events: ['transcription.completed', 'transcription.failed'],
128
+ });
129
+
130
+ // Save the secret - only shown once
131
+ console.log(webhook.secret); // 'whsec_...'
132
+ ```
133
+
134
+ ### Verify Webhook Signatures
135
+
136
+ ```typescript
137
+ import { createWebhookVerifier } from '@veroai/transcribe';
138
+
139
+ const verifier = createWebhookVerifier(process.env.WEBHOOK_SECRET);
140
+
141
+ app.post('/webhook', async (req, res) => {
142
+ const isValid = await verifier.verify(
143
+ req.body,
144
+ req.headers['x-webhook-signature']
145
+ );
146
+
147
+ if (!isValid) {
148
+ return res.status(401).send('Invalid signature');
149
+ }
150
+
151
+ const event = JSON.parse(req.body);
152
+ // Handle event...
153
+ });
154
+ ```
155
+
156
+ ### Webhook Events
157
+
158
+ | Event | Description |
159
+ |-------|-------------|
160
+ | `transcription.created` | Transcription job created |
161
+ | `transcription.completed` | Transcription finished successfully |
162
+ | `transcription.failed` | Transcription failed |
163
+ | `analysis.completed` | AI analysis completed |
164
+
165
+ ## Usage
166
+
167
+ ### Methods
168
+
169
+ | Method | Description |
170
+ |--------|-------------|
171
+ | `usage.get()` | Get current billing period usage |
172
+
173
+ ```typescript
174
+ const usage = await client.usage.get();
175
+
176
+ console.log(usage.transcriptionMinutes);
177
+ console.log(usage.estimatedCost.total);
178
+ ```
179
+
180
+ ## Error Handling
181
+
182
+ ```typescript
183
+ import { VeroAPIError } from '@veroai/transcribe';
184
+
185
+ try {
186
+ await client.transcriptions.create({ audioUrl: 'invalid' });
187
+ } catch (error) {
188
+ if (error instanceof VeroAPIError) {
189
+ console.error(error.code); // 'invalid_request'
190
+ console.error(error.message); // 'The audio URL is not accessible'
191
+ console.error(error.status); // 400
192
+ }
193
+ }
194
+ ```
195
+
196
+ ## Types
197
+
198
+ All types are exported for TypeScript users:
199
+
200
+ ```typescript
201
+ import type {
202
+ Transcription,
203
+ TranscriptionStatus,
204
+ Transcript,
205
+ Analysis,
206
+ AIType,
207
+ Language,
208
+ CreateTranscriptionParams,
209
+ Webhook,
210
+ WebhookEvent,
211
+ Usage,
212
+ } from '@veroai/transcribe';
213
+ ```
214
+
215
+ ## License
216
+
217
+ MIT
@@ -0,0 +1,492 @@
1
+ interface VeroTranscribeConfig {
2
+ apiKey: string;
3
+ baseUrl?: string;
4
+ timeout?: number;
5
+ }
6
+ type TranscriptionStatus = 'pending' | 'processing' | 'completed' | 'failed';
7
+ type AIType = 'none' | 'basic' | 'coach';
8
+ type Language = 'en' | 'he' | 'es' | 'auto';
9
+ interface WordTimestamp {
10
+ word: string;
11
+ start: number;
12
+ end: number;
13
+ }
14
+ interface TranscriptionSegment {
15
+ speaker: string;
16
+ text: string;
17
+ start: number;
18
+ end: number;
19
+ }
20
+ interface Transcript {
21
+ text: string;
22
+ segments: TranscriptionSegment[];
23
+ words?: WordTimestamp[];
24
+ language: string;
25
+ duration: number;
26
+ }
27
+ interface Analysis {
28
+ summary: string;
29
+ outcome: {
30
+ status: string;
31
+ reason: string;
32
+ followUpPotential: string;
33
+ };
34
+ sentiment: {
35
+ initial: number;
36
+ final: number;
37
+ average: number;
38
+ };
39
+ keyPhrases: string[];
40
+ agentActions?: Array<{
41
+ action: string;
42
+ timestamp: string;
43
+ }>;
44
+ participants?: {
45
+ agent?: string;
46
+ customer?: string;
47
+ };
48
+ emotions?: {
49
+ customer: Array<{
50
+ emotion: string;
51
+ intensity: string;
52
+ }>;
53
+ agent: Array<{
54
+ emotion: string;
55
+ intensity: string;
56
+ }>;
57
+ };
58
+ coaching?: {
59
+ suggestions: Array<{
60
+ item: string;
61
+ timestamp?: string;
62
+ severity: 'low' | 'medium' | 'high';
63
+ }>;
64
+ performance: {
65
+ clarity: number;
66
+ empathy: number;
67
+ compliance: number;
68
+ professionalism: number;
69
+ };
70
+ behavioral?: {
71
+ talkListenRatio: number;
72
+ questionsAsked: {
73
+ open: number;
74
+ closed: number;
75
+ };
76
+ interruptions: number;
77
+ speakingPace: number;
78
+ empathyStatements: number;
79
+ };
80
+ };
81
+ }
82
+ interface Transcription {
83
+ id: string;
84
+ externalId?: string;
85
+ status: TranscriptionStatus;
86
+ durationSeconds?: number;
87
+ language: Language;
88
+ diarize?: boolean;
89
+ aiType?: AIType;
90
+ saveTranscript?: boolean;
91
+ transcript?: Transcript;
92
+ analysis?: Analysis;
93
+ metadata?: Record<string, unknown>;
94
+ createdAt: string;
95
+ completedAt?: string;
96
+ errorMessage?: string;
97
+ }
98
+ interface CreateTranscriptionParams {
99
+ language?: Language;
100
+ diarize?: boolean;
101
+ aiAnalysis?: AIType;
102
+ saveTranscript?: boolean;
103
+ externalId?: string;
104
+ webhookUrl?: string;
105
+ metadata?: Record<string, unknown>;
106
+ /** URL to fetch audio from. When provided, transcription starts immediately without needing upload(). */
107
+ audioUrl?: string;
108
+ }
109
+ type TranscriptionProvider = 'replicate' | 'elevenlabs' | 'runpod';
110
+ interface UploadAudioParams {
111
+ provider?: TranscriptionProvider;
112
+ }
113
+ interface UploadAudioResponse {
114
+ id: string;
115
+ status: TranscriptionStatus;
116
+ }
117
+ interface ListTranscriptionsParams {
118
+ limit?: number;
119
+ offset?: number;
120
+ status?: TranscriptionStatus;
121
+ aiAnalysis?: AIType;
122
+ from?: Date | string;
123
+ to?: Date | string;
124
+ }
125
+ interface TranscriptionListItem {
126
+ id: string;
127
+ externalId?: string;
128
+ status: TranscriptionStatus;
129
+ durationSeconds?: number;
130
+ language: string;
131
+ aiType?: AIType;
132
+ createdAt: string;
133
+ completedAt?: string;
134
+ }
135
+ interface ListTranscriptionsResponse {
136
+ data: TranscriptionListItem[];
137
+ pagination: {
138
+ limit: number;
139
+ offset: number;
140
+ hasMore: boolean;
141
+ };
142
+ }
143
+ type WebhookEvent = 'transcription.created' | 'transcription.processing' | 'transcription.completed' | 'transcription.failed' | 'analysis.completed';
144
+ interface Webhook {
145
+ id: string;
146
+ url: string;
147
+ events: WebhookEvent[];
148
+ secret?: string;
149
+ isActive: boolean;
150
+ createdAt: string;
151
+ }
152
+ interface CreateWebhookParams {
153
+ url: string;
154
+ events: WebhookEvent[];
155
+ }
156
+ interface UpdateWebhookParams {
157
+ url?: string;
158
+ events?: WebhookEvent[];
159
+ isActive?: boolean;
160
+ }
161
+ interface WebhookDelivery {
162
+ id: string;
163
+ eventType: WebhookEvent;
164
+ statusCode: number | null;
165
+ success: boolean;
166
+ attempts: number;
167
+ createdAt: string;
168
+ }
169
+ interface ListWebhooksResponse {
170
+ data: Webhook[];
171
+ }
172
+ interface ListWebhookDeliveriesResponse {
173
+ data: WebhookDelivery[];
174
+ }
175
+ interface UsagePeriod {
176
+ start: string;
177
+ end: string;
178
+ }
179
+ type PackageSlug = 'starter' | 'standard' | 'insights' | 'pro';
180
+ interface PackageBreakdown {
181
+ minutes: number;
182
+ cost: number;
183
+ count: number;
184
+ }
185
+ interface PackageRates {
186
+ starter: number;
187
+ standard: number;
188
+ insights: number;
189
+ pro: number;
190
+ }
191
+ interface Usage {
192
+ period: UsagePeriod;
193
+ totalMinutes: number;
194
+ totalCost: number;
195
+ packages: Record<PackageSlug, PackageBreakdown>;
196
+ rates: PackageRates;
197
+ }
198
+ interface UsageHistoryRecord {
199
+ date: string;
200
+ recordType: string;
201
+ total: number;
202
+ }
203
+ interface UsageHistoryResponse {
204
+ data: UsageHistoryRecord[];
205
+ }
206
+ interface UsageHistoryParams {
207
+ days?: number;
208
+ }
209
+ interface BulkUploadParams {
210
+ language?: Language;
211
+ diarize?: boolean;
212
+ aiAnalysis?: AIType;
213
+ saveTranscript?: boolean;
214
+ provider?: TranscriptionProvider;
215
+ /** Optional per-file metadata, keyed by filename */
216
+ fileMetadata?: Record<string, {
217
+ externalId?: string;
218
+ metadata?: Record<string, unknown>;
219
+ }>;
220
+ }
221
+ interface BulkUploadResultItem {
222
+ filename: string;
223
+ transcriptionId?: string;
224
+ status: 'queued' | 'failed';
225
+ error?: {
226
+ code: string;
227
+ message: string;
228
+ };
229
+ }
230
+ interface BulkUploadResponse {
231
+ batchId: string;
232
+ totalFiles: number;
233
+ results: BulkUploadResultItem[];
234
+ summary: {
235
+ queued: number;
236
+ failed: number;
237
+ };
238
+ }
239
+ interface BulkTranscribeOptions {
240
+ pollInterval?: number;
241
+ maxWaitTime?: number;
242
+ /** Called when an individual file's transcription updates */
243
+ onFileProgress?: (filename: string, transcription: Transcription) => void;
244
+ /** Called when any transcription in the batch completes or fails */
245
+ onBatchProgress?: (completed: number, total: number, results: Map<string, Transcription | Error>) => void;
246
+ /** How many transcriptions to poll in parallel (default: 5) */
247
+ concurrency?: number;
248
+ }
249
+ interface BulkTranscribeResult {
250
+ batchId: string;
251
+ completed: Transcription[];
252
+ failed: Array<{
253
+ filename: string;
254
+ transcriptionId?: string;
255
+ error: Error;
256
+ }>;
257
+ }
258
+ declare class VeroAPIError extends Error {
259
+ code: string;
260
+ status: number;
261
+ constructor(message: string, code: string, status: number);
262
+ }
263
+
264
+ declare class HttpClient {
265
+ private apiKey;
266
+ private baseUrl;
267
+ private timeout;
268
+ constructor(config: VeroTranscribeConfig);
269
+ private request;
270
+ get<T>(path: string): Promise<T>;
271
+ post<T>(path: string, body?: unknown): Promise<T>;
272
+ patch<T>(path: string, body?: unknown): Promise<T>;
273
+ delete<T>(path: string): Promise<T>;
274
+ postFormData<T>(path: string, formData: FormData): Promise<T>;
275
+ }
276
+
277
+ declare class TranscriptionsResource {
278
+ private client;
279
+ constructor(client: HttpClient);
280
+ /**
281
+ * Create a new transcription.
282
+ * If audioUrl is provided, transcription starts immediately.
283
+ * Otherwise, use upload() to provide the audio file.
284
+ */
285
+ create(params: CreateTranscriptionParams): Promise<Transcription>;
286
+ /**
287
+ * Get a transcription by ID
288
+ */
289
+ get(id: string): Promise<Transcription>;
290
+ /**
291
+ * List all transcriptions
292
+ */
293
+ list(params?: ListTranscriptionsParams): Promise<ListTranscriptionsResponse>;
294
+ /**
295
+ * Delete a transcription
296
+ */
297
+ delete(id: string): Promise<{
298
+ success: boolean;
299
+ }>;
300
+ /**
301
+ * Upload an audio file for an existing transcription
302
+ * This enqueues the transcription for processing
303
+ */
304
+ upload(id: string, audio: File | Blob, params?: UploadAudioParams): Promise<UploadAudioResponse>;
305
+ /**
306
+ * Create a transcription, upload audio, and wait for completion
307
+ * Convenience method that combines create(), upload(), and waitForCompletion()
308
+ */
309
+ transcribeFile(audio: File | Blob, params?: CreateTranscriptionParams & UploadAudioParams, options?: {
310
+ pollInterval?: number;
311
+ maxWaitTime?: number;
312
+ onProgress?: (transcription: Transcription) => void;
313
+ }): Promise<Transcription>;
314
+ /**
315
+ * Wait for a transcription to complete
316
+ * Polls the API at regular intervals until the transcription is completed or failed
317
+ */
318
+ waitForCompletion(id: string, options?: {
319
+ pollInterval?: number;
320
+ maxWaitTime?: number;
321
+ onProgress?: (transcription: Transcription) => void;
322
+ }): Promise<Transcription>;
323
+ /**
324
+ * Create a transcription from a URL and wait for it to complete.
325
+ * When audioUrl is provided, transcription starts immediately.
326
+ *
327
+ * @example
328
+ * ```ts
329
+ * const result = await vero.transcriptions.transcribe({
330
+ * audioUrl: 'https://example.com/audio.mp3',
331
+ * language: 'en',
332
+ * })
333
+ * ```
334
+ */
335
+ transcribe(params: CreateTranscriptionParams & {
336
+ audioUrl: string;
337
+ }, options?: {
338
+ pollInterval?: number;
339
+ maxWaitTime?: number;
340
+ onProgress?: (transcription: Transcription) => void;
341
+ }): Promise<Transcription>;
342
+ /**
343
+ * Upload multiple audio files for transcription in a single batch.
344
+ * Returns immediately after files are queued for processing.
345
+ *
346
+ * @example
347
+ * ```ts
348
+ * const result = await vero.transcriptions.bulkUpload(
349
+ * [file1, file2, file3],
350
+ * { language: 'en', aiAnalysis: 'basic' }
351
+ * )
352
+ * console.log(`Queued ${result.summary.queued} files`)
353
+ * ```
354
+ */
355
+ bulkUpload(files: File[] | Blob[] | Array<{
356
+ file: File | Blob;
357
+ name?: string;
358
+ }>, params?: BulkUploadParams): Promise<BulkUploadResponse>;
359
+ /**
360
+ * Upload multiple files and wait for all to complete.
361
+ * Handles partial failures gracefully - returns both completed and failed transcriptions.
362
+ *
363
+ * @example
364
+ * ```ts
365
+ * const result = await vero.transcriptions.bulkTranscribe(
366
+ * [file1, file2, file3],
367
+ * { language: 'en' },
368
+ * {
369
+ * onBatchProgress: (completed, total) => {
370
+ * console.log(`Progress: ${completed}/${total}`)
371
+ * }
372
+ * }
373
+ * )
374
+ * console.log(`Completed: ${result.completed.length}, Failed: ${result.failed.length}`)
375
+ * ```
376
+ */
377
+ bulkTranscribe(files: File[] | Blob[] | Array<{
378
+ file: File | Blob;
379
+ name?: string;
380
+ }>, params?: BulkUploadParams, options?: BulkTranscribeOptions): Promise<BulkTranscribeResult>;
381
+ /**
382
+ * Wait for multiple transcriptions to complete.
383
+ * Polls in parallel with configurable concurrency.
384
+ */
385
+ waitForBatch(items: Array<{
386
+ id: string;
387
+ filename?: string;
388
+ }>, options?: BulkTranscribeOptions): Promise<Map<string, Transcription | Error>>;
389
+ }
390
+
391
+ declare class WebhooksResource {
392
+ private client;
393
+ constructor(client: HttpClient);
394
+ /**
395
+ * Create a new webhook endpoint
396
+ */
397
+ create(params: CreateWebhookParams): Promise<Webhook>;
398
+ /**
399
+ * Get a webhook by ID
400
+ */
401
+ get(id: string): Promise<Webhook>;
402
+ /**
403
+ * List all webhooks
404
+ */
405
+ list(): Promise<ListWebhooksResponse>;
406
+ /**
407
+ * Update a webhook
408
+ */
409
+ update(id: string, params: UpdateWebhookParams): Promise<Webhook>;
410
+ /**
411
+ * Delete a webhook
412
+ */
413
+ delete(id: string): Promise<{
414
+ success: boolean;
415
+ }>;
416
+ /**
417
+ * Get delivery history for a webhook (last 10)
418
+ */
419
+ deliveries(id: string): Promise<ListWebhookDeliveriesResponse>;
420
+ }
421
+ /**
422
+ * Helper function to verify webhook signatures in Node.js environments.
423
+ * The signature is computed over `${timestamp}.${body}` using HMAC-SHA256.
424
+ *
425
+ * @param secret - The webhook secret (shown once at creation)
426
+ *
427
+ * @example
428
+ * ```typescript
429
+ * const verifier = createWebhookVerifier('whsec_...')
430
+ * const isValid = await verifier.verify(rawBody, signatureHeader, timestampHeader)
431
+ * ```
432
+ */
433
+ declare function createWebhookVerifier(secret: string): {
434
+ verify: (body: string, signature: string, timestamp: string) => Promise<boolean>;
435
+ };
436
+
437
+ declare class UsageResource {
438
+ private client;
439
+ constructor(client: HttpClient);
440
+ /**
441
+ * Get usage statistics for the current billing period
442
+ */
443
+ get(): Promise<Usage>;
444
+ /**
445
+ * Get usage history (daily breakdown)
446
+ * @param params.days - Number of days to look back (default: 30)
447
+ */
448
+ history(params?: UsageHistoryParams): Promise<UsageHistoryResponse>;
449
+ }
450
+
451
+ declare class VeroTranscribe {
452
+ /**
453
+ * Transcriptions API
454
+ * Create, retrieve, list, and delete transcriptions
455
+ */
456
+ readonly transcriptions: TranscriptionsResource;
457
+ /**
458
+ * Webhooks API
459
+ * Manage webhook endpoints for receiving transcription events
460
+ */
461
+ readonly webhooks: WebhooksResource;
462
+ /**
463
+ * Usage API
464
+ * Get usage statistics and billing information
465
+ */
466
+ readonly usage: UsageResource;
467
+ /**
468
+ * Create a new VeroTranscribe client
469
+ *
470
+ * @param config - Configuration options
471
+ * @param config.apiKey - Your VeroTranscribe API key (required)
472
+ * @param config.baseUrl - Custom API base URL (optional, defaults to https://verotranscribe-api.siply.workers.dev)
473
+ * @param config.timeout - Request timeout in milliseconds (optional, defaults to 30000)
474
+ *
475
+ * @example
476
+ * ```typescript
477
+ * const client = new VeroTranscribe({
478
+ * apiKey: process.env.VERO_API_KEY,
479
+ * });
480
+ *
481
+ * const transcription = await client.transcriptions.create({
482
+ * language: 'en',
483
+ * aiAnalysis: 'basic',
484
+ * });
485
+ *
486
+ * await client.transcriptions.upload(transcription.id, audioFile);
487
+ * ```
488
+ */
489
+ constructor(config: VeroTranscribeConfig);
490
+ }
491
+
492
+ export { type AIType, type Analysis, type CreateTranscriptionParams, type CreateWebhookParams, type Language, type ListTranscriptionsParams, type ListTranscriptionsResponse, type ListWebhookDeliveriesResponse, type ListWebhooksResponse, type PackageBreakdown, type PackageRates, type PackageSlug, type Transcript, type Transcription, type TranscriptionListItem, type TranscriptionProvider, type TranscriptionSegment, type TranscriptionStatus, type UpdateWebhookParams, type UploadAudioParams, type UploadAudioResponse, type Usage, type UsageHistoryParams, type UsageHistoryRecord, type UsageHistoryResponse, type UsagePeriod, VeroAPIError, VeroTranscribe, type VeroTranscribeConfig, type Webhook, type WebhookDelivery, type WebhookEvent, type WordTimestamp, createWebhookVerifier, VeroTranscribe as default };