@ai-sdk/assemblyai 0.0.1-canary.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/CHANGELOG.md ADDED
@@ -0,0 +1,12 @@
1
+ # @ai-sdk/assemblyai
2
+
3
+ ## 0.0.1-canary.0
4
+
5
+ ### Patch Changes
6
+
7
+ - 3ea5656: feat(providers/assemblyai): add transcribe
8
+ - Updated dependencies [9bf7291]
9
+ - Updated dependencies [4617fab]
10
+ - Updated dependencies [e030615]
11
+ - @ai-sdk/provider@2.0.0-canary.11
12
+ - @ai-sdk/provider-utils@3.0.0-canary.12
package/LICENSE ADDED
@@ -0,0 +1,13 @@
1
+ Copyright 2023 Vercel, Inc.
2
+
3
+ Licensed under the Apache License, Version 2.0 (the "License");
4
+ you may not use this file except in compliance with the License.
5
+ You may obtain a copy of the License at
6
+
7
+ http://www.apache.org/licenses/LICENSE-2.0
8
+
9
+ Unless required by applicable law or agreed to in writing, software
10
+ distributed under the License is distributed on an "AS IS" BASIS,
11
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ See the License for the specific language governing permissions and
13
+ limitations under the License.
package/README.md ADDED
@@ -0,0 +1,38 @@
1
+ # AI SDK - AssemblyAI Provider
2
+
3
+ The **[AssemblyAI provider](https://sdk.vercel.ai/providers/ai-sdk-providers/assemblyai)** for the [AI SDK](https://sdk.vercel.ai/docs)
4
+ contains transcription model support for the AssemblyAI transcription API.
5
+
6
+ ## Setup
7
+
8
+ The AssemblyAI provider is available in the `@ai-sdk/assemblyai` module. You can install it with
9
+
10
+ ```bash
11
+ npm i @ai-sdk/assemblyai
12
+ ```
13
+
14
+ ## Provider Instance
15
+
16
+ You can import the default provider instance `assemblyai` from `@ai-sdk/assemblyai`:
17
+
18
+ ```ts
19
+ import { assemblyai } from '@ai-sdk/assemblyai';
20
+ ```
21
+
22
+ ## Example
23
+
24
+ ```ts
25
+ import { assemblyai } from '@ai-sdk/assemblyai';
26
+ import { experimental_transcribe as transcribe } from 'ai';
27
+
28
+ const { text } = await transcribe({
29
+ model: assemblyai.transcription('best'),
30
+ audio: new URL(
31
+ 'https://github.com/vercel/ai/raw/refs/heads/main/examples/ai-core/data/galileo.mp3',
32
+ ),
33
+ });
34
+ ```
35
+
36
+ ## Documentation
37
+
38
+ Please check out the **[AssemblyAI provider documentation](https://sdk.vercel.ai/providers/ai-sdk-providers/assemblyai)** for more information.
@@ -0,0 +1,65 @@
1
+ import { TranscriptionModelV1, ProviderV1 } from '@ai-sdk/provider';
2
+ import { FetchFunction } from '@ai-sdk/provider-utils';
3
+
4
+ type AssemblyAIConfig = {
5
+ provider: string;
6
+ url: (options: {
7
+ modelId: string;
8
+ path: string;
9
+ }) => string;
10
+ headers: () => Record<string, string | undefined>;
11
+ fetch?: FetchFunction;
12
+ generateId?: () => string;
13
+ };
14
+
15
+ type AssemblyAITranscriptionModelId = 'best' | 'nano';
16
+
17
+ interface AssemblyAITranscriptionModelConfig extends AssemblyAIConfig {
18
+ _internal?: {
19
+ currentDate?: () => Date;
20
+ };
21
+ }
22
+ declare class AssemblyAITranscriptionModel implements TranscriptionModelV1 {
23
+ readonly modelId: AssemblyAITranscriptionModelId;
24
+ private readonly config;
25
+ readonly specificationVersion = "v1";
26
+ get provider(): string;
27
+ constructor(modelId: AssemblyAITranscriptionModelId, config: AssemblyAITranscriptionModelConfig);
28
+ private getArgs;
29
+ doGenerate(options: Parameters<TranscriptionModelV1['doGenerate']>[0]): Promise<Awaited<ReturnType<TranscriptionModelV1['doGenerate']>>>;
30
+ }
31
+
32
+ interface AssemblyAIProvider extends Pick<ProviderV1, 'transcriptionModel'> {
33
+ (modelId: 'best', settings?: {}): {
34
+ transcription: AssemblyAITranscriptionModel;
35
+ };
36
+ /**
37
+ Creates a model for transcription.
38
+ */
39
+ transcription(modelId: AssemblyAITranscriptionModelId): TranscriptionModelV1;
40
+ }
41
+ interface AssemblyAIProviderSettings {
42
+ /**
43
+ API key for authenticating requests.
44
+ */
45
+ apiKey?: string;
46
+ /**
47
+ Custom headers to include in the requests.
48
+ */
49
+ headers?: Record<string, string>;
50
+ /**
51
+ Custom fetch implementation. You can use it as a middleware to intercept requests,
52
+ or to provide a custom fetch implementation for e.g. testing.
53
+ */
54
+ fetch?: FetchFunction;
55
+ }
56
+ /**
57
+ Create an AssemblyAI provider instance.
58
+ */
59
+ declare function createAssemblyAI(options?: AssemblyAIProviderSettings): AssemblyAIProvider;
60
+ /**
61
+ Default AssemblyAI provider instance.
62
+ */
63
+ declare const assemblyai: AssemblyAIProvider;
64
+
65
+ export { type AssemblyAIProvider, type AssemblyAIProviderSettings, assemblyai, createAssemblyAI };
@@ -0,0 +1,65 @@
1
+ import { TranscriptionModelV1, ProviderV1 } from '@ai-sdk/provider';
2
+ import { FetchFunction } from '@ai-sdk/provider-utils';
3
+
4
+ type AssemblyAIConfig = {
5
+ provider: string;
6
+ url: (options: {
7
+ modelId: string;
8
+ path: string;
9
+ }) => string;
10
+ headers: () => Record<string, string | undefined>;
11
+ fetch?: FetchFunction;
12
+ generateId?: () => string;
13
+ };
14
+
15
+ type AssemblyAITranscriptionModelId = 'best' | 'nano';
16
+
17
+ interface AssemblyAITranscriptionModelConfig extends AssemblyAIConfig {
18
+ _internal?: {
19
+ currentDate?: () => Date;
20
+ };
21
+ }
22
+ declare class AssemblyAITranscriptionModel implements TranscriptionModelV1 {
23
+ readonly modelId: AssemblyAITranscriptionModelId;
24
+ private readonly config;
25
+ readonly specificationVersion = "v1";
26
+ get provider(): string;
27
+ constructor(modelId: AssemblyAITranscriptionModelId, config: AssemblyAITranscriptionModelConfig);
28
+ private getArgs;
29
+ doGenerate(options: Parameters<TranscriptionModelV1['doGenerate']>[0]): Promise<Awaited<ReturnType<TranscriptionModelV1['doGenerate']>>>;
30
+ }
31
+
32
+ interface AssemblyAIProvider extends Pick<ProviderV1, 'transcriptionModel'> {
33
+ (modelId: 'best', settings?: {}): {
34
+ transcription: AssemblyAITranscriptionModel;
35
+ };
36
+ /**
37
+ Creates a model for transcription.
38
+ */
39
+ transcription(modelId: AssemblyAITranscriptionModelId): TranscriptionModelV1;
40
+ }
41
+ interface AssemblyAIProviderSettings {
42
+ /**
43
+ API key for authenticating requests.
44
+ */
45
+ apiKey?: string;
46
+ /**
47
+ Custom headers to include in the requests.
48
+ */
49
+ headers?: Record<string, string>;
50
+ /**
51
+ Custom fetch implementation. You can use it as a middleware to intercept requests,
52
+ or to provide a custom fetch implementation for e.g. testing.
53
+ */
54
+ fetch?: FetchFunction;
55
+ }
56
+ /**
57
+ Create an AssemblyAI provider instance.
58
+ */
59
+ declare function createAssemblyAI(options?: AssemblyAIProviderSettings): AssemblyAIProvider;
60
+ /**
61
+ Default AssemblyAI provider instance.
62
+ */
63
+ declare const assemblyai: AssemblyAIProvider;
64
+
65
+ export { type AssemblyAIProvider, type AssemblyAIProviderSettings, assemblyai, createAssemblyAI };
package/dist/index.js ADDED
@@ -0,0 +1,373 @@
1
+ "use strict";
2
+ var __defProp = Object.defineProperty;
3
+ var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
4
+ var __getOwnPropNames = Object.getOwnPropertyNames;
5
+ var __hasOwnProp = Object.prototype.hasOwnProperty;
6
+ var __export = (target, all) => {
7
+ for (var name in all)
8
+ __defProp(target, name, { get: all[name], enumerable: true });
9
+ };
10
+ var __copyProps = (to, from, except, desc) => {
11
+ if (from && typeof from === "object" || typeof from === "function") {
12
+ for (let key of __getOwnPropNames(from))
13
+ if (!__hasOwnProp.call(to, key) && key !== except)
14
+ __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
15
+ }
16
+ return to;
17
+ };
18
+ var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
19
+
20
+ // src/index.ts
21
+ var src_exports = {};
22
+ __export(src_exports, {
23
+ assemblyai: () => assemblyai,
24
+ createAssemblyAI: () => createAssemblyAI
25
+ });
26
+ module.exports = __toCommonJS(src_exports);
27
+
28
+ // src/assemblyai-provider.ts
29
+ var import_provider_utils3 = require("@ai-sdk/provider-utils");
30
+
31
+ // src/assemblyai-transcription-model.ts
32
+ var import_provider_utils2 = require("@ai-sdk/provider-utils");
33
+ var import_zod2 = require("zod");
34
+
35
+ // src/assemblyai-error.ts
36
+ var import_zod = require("zod");
37
+ var import_provider_utils = require("@ai-sdk/provider-utils");
38
+ var assemblyaiErrorDataSchema = import_zod.z.object({
39
+ error: import_zod.z.object({
40
+ message: import_zod.z.string(),
41
+ code: import_zod.z.number()
42
+ })
43
+ });
44
+ var assemblyaiFailedResponseHandler = (0, import_provider_utils.createJsonErrorResponseHandler)({
45
+ errorSchema: assemblyaiErrorDataSchema,
46
+ errorToMessage: (data) => data.error.message
47
+ });
48
+
49
+ // src/assemblyai-transcription-model.ts
50
+ var assemblyaiProviderOptionsSchema = import_zod2.z.object({
51
+ /**
52
+ * End time of the audio in milliseconds.
53
+ */
54
+ audioEndAt: import_zod2.z.number().int().nullish(),
55
+ /**
56
+ * Start time of the audio in milliseconds.
57
+ */
58
+ audioStartFrom: import_zod2.z.number().int().nullish(),
59
+ /**
60
+ * Whether to automatically generate chapters for the transcription.
61
+ */
62
+ autoChapters: import_zod2.z.boolean().nullish(),
63
+ /**
64
+ * Whether to automatically generate highlights for the transcription.
65
+ */
66
+ autoHighlights: import_zod2.z.boolean().nullish(),
67
+ /**
68
+ * Boost parameter for the transcription.
69
+ * Allowed values: 'low', 'default', 'high'.
70
+ */
71
+ boostParam: import_zod2.z.string().nullish(),
72
+ /**
73
+ * Whether to enable content safety filtering.
74
+ */
75
+ contentSafety: import_zod2.z.boolean().nullish(),
76
+ /**
77
+ * Confidence threshold for content safety filtering (25-100).
78
+ */
79
+ contentSafetyConfidence: import_zod2.z.number().int().min(25).max(100).nullish(),
80
+ /**
81
+ * Custom spelling rules for the transcription.
82
+ */
83
+ customSpelling: import_zod2.z.array(
84
+ import_zod2.z.object({
85
+ from: import_zod2.z.array(import_zod2.z.string()),
86
+ to: import_zod2.z.string()
87
+ })
88
+ ).nullish(),
89
+ /**
90
+ * Whether to include filler words (um, uh, etc.) in the transcription.
91
+ */
92
+ disfluencies: import_zod2.z.boolean().nullish(),
93
+ /**
94
+ * Whether to enable entity detection.
95
+ */
96
+ entityDetection: import_zod2.z.boolean().nullish(),
97
+ /**
98
+ * Whether to filter profanity from the transcription.
99
+ */
100
+ filterProfanity: import_zod2.z.boolean().nullish(),
101
+ /**
102
+ * Whether to format text with punctuation and capitalization.
103
+ */
104
+ formatText: import_zod2.z.boolean().nullish(),
105
+ /**
106
+ * Whether to enable IAB categories detection.
107
+ */
108
+ iabCategories: import_zod2.z.boolean().nullish(),
109
+ /**
110
+ * Language code for the transcription.
111
+ */
112
+ languageCode: import_zod2.z.union([import_zod2.z.literal("en"), import_zod2.z.string()]).nullish(),
113
+ /**
114
+ * Confidence threshold for language detection.
115
+ */
116
+ languageConfidenceThreshold: import_zod2.z.number().nullish(),
117
+ /**
118
+ * Whether to enable language detection.
119
+ */
120
+ languageDetection: import_zod2.z.boolean().nullish(),
121
+ /**
122
+ * Whether to process audio as multichannel.
123
+ */
124
+ multichannel: import_zod2.z.boolean().nullish(),
125
+ /**
126
+ * Whether to add punctuation to the transcription.
127
+ */
128
+ punctuate: import_zod2.z.boolean().nullish(),
129
+ /**
130
+ * Whether to redact personally identifiable information (PII).
131
+ */
132
+ redactPii: import_zod2.z.boolean().nullish(),
133
+ /**
134
+ * Whether to redact PII in the audio file.
135
+ */
136
+ redactPiiAudio: import_zod2.z.boolean().nullish(),
137
+ /**
138
+ * Audio format for PII redaction.
139
+ */
140
+ redactPiiAudioQuality: import_zod2.z.string().nullish(),
141
+ /**
142
+ * List of PII types to redact.
143
+ */
144
+ redactPiiPolicies: import_zod2.z.array(import_zod2.z.string()).nullish(),
145
+ /**
146
+ * Substitution method for redacted PII.
147
+ */
148
+ redactPiiSub: import_zod2.z.string().nullish(),
149
+ /**
150
+ * Whether to enable sentiment analysis.
151
+ */
152
+ sentimentAnalysis: import_zod2.z.boolean().nullish(),
153
+ /**
154
+ * Whether to identify different speakers in the audio.
155
+ */
156
+ speakerLabels: import_zod2.z.boolean().nullish(),
157
+ /**
158
+ * Number of speakers expected in the audio.
159
+ */
160
+ speakersExpected: import_zod2.z.number().int().nullish(),
161
+ /**
162
+ * Threshold for speech detection (0-1).
163
+ */
164
+ speechThreshold: import_zod2.z.number().min(0).max(1).nullish(),
165
+ /**
166
+ * Whether to generate a summary of the transcription.
167
+ */
168
+ summarization: import_zod2.z.boolean().nullish(),
169
+ /**
170
+ * Model to use for summarization.
171
+ */
172
+ summaryModel: import_zod2.z.string().nullish(),
173
+ /**
174
+ * Type of summary to generate.
175
+ */
176
+ summaryType: import_zod2.z.string().nullish(),
177
+ /**
178
+ * List of topics to identify in the transcription.
179
+ */
180
+ topics: import_zod2.z.array(import_zod2.z.string()).nullish(),
181
+ /**
182
+ * Name of the authentication header for webhook requests.
183
+ */
184
+ webhookAuthHeaderName: import_zod2.z.string().nullish(),
185
+ /**
186
+ * Value of the authentication header for webhook requests.
187
+ */
188
+ webhookAuthHeaderValue: import_zod2.z.string().nullish(),
189
+ /**
190
+ * URL to send webhook notifications to.
191
+ */
192
+ webhookUrl: import_zod2.z.string().nullish(),
193
+ /**
194
+ * List of words to boost recognition for.
195
+ */
196
+ wordBoost: import_zod2.z.array(import_zod2.z.string()).nullish()
197
+ });
198
+ var AssemblyAITranscriptionModel = class {
199
+ constructor(modelId, config) {
200
+ this.modelId = modelId;
201
+ this.config = config;
202
+ this.specificationVersion = "v1";
203
+ }
204
+ get provider() {
205
+ return this.config.provider;
206
+ }
207
+ async getArgs({
208
+ providerOptions
209
+ }) {
210
+ var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n, _o, _p, _q, _r, _s, _t, _u, _v, _w, _x, _y, _z, _A, _B, _C, _D, _E, _F, _G, _H, _I;
211
+ const warnings = [];
212
+ const assemblyaiOptions = await (0, import_provider_utils2.parseProviderOptions)({
213
+ provider: "assemblyai",
214
+ providerOptions,
215
+ schema: assemblyaiProviderOptionsSchema
216
+ });
217
+ const body = {
218
+ speech_model: this.modelId
219
+ };
220
+ if (assemblyaiOptions) {
221
+ body.audio_end_at = (_a = assemblyaiOptions.audioEndAt) != null ? _a : void 0;
222
+ body.audio_start_from = (_b = assemblyaiOptions.audioStartFrom) != null ? _b : void 0;
223
+ body.auto_chapters = (_c = assemblyaiOptions.autoChapters) != null ? _c : void 0;
224
+ body.auto_highlights = (_d = assemblyaiOptions.autoHighlights) != null ? _d : void 0;
225
+ body.boost_param = (_e = assemblyaiOptions.boostParam) != null ? _e : void 0;
226
+ body.content_safety = (_f = assemblyaiOptions.contentSafety) != null ? _f : void 0;
227
+ body.content_safety_confidence = (_g = assemblyaiOptions.contentSafetyConfidence) != null ? _g : void 0;
228
+ body.custom_spelling = (_h = assemblyaiOptions.customSpelling) != null ? _h : void 0;
229
+ body.disfluencies = (_i = assemblyaiOptions.disfluencies) != null ? _i : void 0;
230
+ body.entity_detection = (_j = assemblyaiOptions.entityDetection) != null ? _j : void 0;
231
+ body.filter_profanity = (_k = assemblyaiOptions.filterProfanity) != null ? _k : void 0;
232
+ body.format_text = (_l = assemblyaiOptions.formatText) != null ? _l : void 0;
233
+ body.iab_categories = (_m = assemblyaiOptions.iabCategories) != null ? _m : void 0;
234
+ body.language_code = (_n = assemblyaiOptions.languageCode) != null ? _n : void 0;
235
+ body.language_confidence_threshold = (_o = assemblyaiOptions.languageConfidenceThreshold) != null ? _o : void 0;
236
+ body.language_detection = (_p = assemblyaiOptions.languageDetection) != null ? _p : void 0;
237
+ body.multichannel = (_q = assemblyaiOptions.multichannel) != null ? _q : void 0;
238
+ body.punctuate = (_r = assemblyaiOptions.punctuate) != null ? _r : void 0;
239
+ body.redact_pii = (_s = assemblyaiOptions.redactPii) != null ? _s : void 0;
240
+ body.redact_pii_audio = (_t = assemblyaiOptions.redactPiiAudio) != null ? _t : void 0;
241
+ body.redact_pii_audio_quality = (_u = assemblyaiOptions.redactPiiAudioQuality) != null ? _u : void 0;
242
+ body.redact_pii_policies = (_v = assemblyaiOptions.redactPiiPolicies) != null ? _v : void 0;
243
+ body.redact_pii_sub = (_w = assemblyaiOptions.redactPiiSub) != null ? _w : void 0;
244
+ body.sentiment_analysis = (_x = assemblyaiOptions.sentimentAnalysis) != null ? _x : void 0;
245
+ body.speaker_labels = (_y = assemblyaiOptions.speakerLabels) != null ? _y : void 0;
246
+ body.speakers_expected = (_z = assemblyaiOptions.speakersExpected) != null ? _z : void 0;
247
+ body.speech_threshold = (_A = assemblyaiOptions.speechThreshold) != null ? _A : void 0;
248
+ body.summarization = (_B = assemblyaiOptions.summarization) != null ? _B : void 0;
249
+ body.summary_model = (_C = assemblyaiOptions.summaryModel) != null ? _C : void 0;
250
+ body.summary_type = (_D = assemblyaiOptions.summaryType) != null ? _D : void 0;
251
+ body.topics = (_E = assemblyaiOptions.topics) != null ? _E : void 0;
252
+ body.webhook_auth_header_name = (_F = assemblyaiOptions.webhookAuthHeaderName) != null ? _F : void 0;
253
+ body.webhook_auth_header_value = (_G = assemblyaiOptions.webhookAuthHeaderValue) != null ? _G : void 0;
254
+ body.webhook_url = (_H = assemblyaiOptions.webhookUrl) != null ? _H : void 0;
255
+ body.word_boost = (_I = assemblyaiOptions.wordBoost) != null ? _I : void 0;
256
+ }
257
+ return {
258
+ body,
259
+ warnings
260
+ };
261
+ }
262
+ async doGenerate(options) {
263
+ var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k;
264
+ const currentDate = (_c = (_b = (_a = this.config._internal) == null ? void 0 : _a.currentDate) == null ? void 0 : _b.call(_a)) != null ? _c : /* @__PURE__ */ new Date();
265
+ const { value: uploadResponse } = await (0, import_provider_utils2.postToApi)({
266
+ url: this.config.url({
267
+ path: "/v2/upload",
268
+ modelId: ""
269
+ }),
270
+ headers: {
271
+ "Content-Type": "application/octet-stream",
272
+ ...(0, import_provider_utils2.combineHeaders)(this.config.headers(), options.headers)
273
+ },
274
+ body: {
275
+ content: options.audio,
276
+ values: options.audio
277
+ },
278
+ failedResponseHandler: assemblyaiFailedResponseHandler,
279
+ successfulResponseHandler: (0, import_provider_utils2.createJsonResponseHandler)(
280
+ assemblyaiUploadResponseSchema
281
+ ),
282
+ abortSignal: options.abortSignal,
283
+ fetch: this.config.fetch
284
+ });
285
+ const { body, warnings } = await this.getArgs(options);
286
+ const {
287
+ value: response,
288
+ responseHeaders,
289
+ rawValue: rawResponse
290
+ } = await (0, import_provider_utils2.postJsonToApi)({
291
+ url: this.config.url({
292
+ path: "/v2/transcript",
293
+ modelId: this.modelId
294
+ }),
295
+ headers: (0, import_provider_utils2.combineHeaders)(this.config.headers(), options.headers),
296
+ body: {
297
+ ...body,
298
+ audio_url: uploadResponse.upload_url
299
+ },
300
+ failedResponseHandler: assemblyaiFailedResponseHandler,
301
+ successfulResponseHandler: (0, import_provider_utils2.createJsonResponseHandler)(
302
+ assemblyaiTranscriptionResponseSchema
303
+ ),
304
+ abortSignal: options.abortSignal,
305
+ fetch: this.config.fetch
306
+ });
307
+ return {
308
+ text: (_d = response.text) != null ? _d : "",
309
+ segments: (_f = (_e = response.words) == null ? void 0 : _e.map((word) => ({
310
+ text: word.text,
311
+ startSecond: word.start,
312
+ endSecond: word.end
313
+ }))) != null ? _f : [],
314
+ language: (_g = response.language_code) != null ? _g : void 0,
315
+ durationInSeconds: (_k = (_j = response.audio_duration) != null ? _j : (_i = (_h = response.words) == null ? void 0 : _h.at(-1)) == null ? void 0 : _i.end) != null ? _k : void 0,
316
+ warnings,
317
+ response: {
318
+ timestamp: currentDate,
319
+ modelId: this.modelId,
320
+ headers: responseHeaders,
321
+ body: rawResponse
322
+ }
323
+ };
324
+ }
325
+ };
326
+ var assemblyaiUploadResponseSchema = import_zod2.z.object({
327
+ upload_url: import_zod2.z.string()
328
+ });
329
+ var assemblyaiTranscriptionResponseSchema = import_zod2.z.object({
330
+ text: import_zod2.z.string().nullish(),
331
+ language_code: import_zod2.z.string().nullish(),
332
+ words: import_zod2.z.array(
333
+ import_zod2.z.object({
334
+ start: import_zod2.z.number(),
335
+ end: import_zod2.z.number(),
336
+ text: import_zod2.z.string()
337
+ })
338
+ ).nullish(),
339
+ audio_duration: import_zod2.z.number().nullish()
340
+ });
341
+
342
+ // src/assemblyai-provider.ts
343
+ function createAssemblyAI(options = {}) {
344
+ const getHeaders = () => ({
345
+ authorization: (0, import_provider_utils3.loadApiKey)({
346
+ apiKey: options.apiKey,
347
+ environmentVariableName: "ASSEMBLYAI_API_KEY",
348
+ description: "AssemblyAI"
349
+ }),
350
+ ...options.headers
351
+ });
352
+ const createTranscriptionModel = (modelId) => new AssemblyAITranscriptionModel(modelId, {
353
+ provider: `assemblyai.transcription`,
354
+ url: ({ path }) => `https://api.assemblyai.com${path}`,
355
+ headers: getHeaders,
356
+ fetch: options.fetch
357
+ });
358
+ const provider = function(modelId) {
359
+ return {
360
+ transcription: createTranscriptionModel(modelId)
361
+ };
362
+ };
363
+ provider.transcription = createTranscriptionModel;
364
+ provider.transcriptionModel = createTranscriptionModel;
365
+ return provider;
366
+ }
367
+ var assemblyai = createAssemblyAI();
368
+ // Annotate the CommonJS export names for ESM import in node:
369
+ 0 && (module.exports = {
370
+ assemblyai,
371
+ createAssemblyAI
372
+ });
373
+ //# sourceMappingURL=index.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../src/index.ts","../src/assemblyai-provider.ts","../src/assemblyai-transcription-model.ts","../src/assemblyai-error.ts"],"sourcesContent":["export { createAssemblyAI, assemblyai } from './assemblyai-provider';\nexport type {\n AssemblyAIProvider,\n AssemblyAIProviderSettings,\n} from './assemblyai-provider';\n","import { TranscriptionModelV1, ProviderV1 } from '@ai-sdk/provider';\nimport { FetchFunction, loadApiKey } from '@ai-sdk/provider-utils';\nimport { AssemblyAITranscriptionModel } from './assemblyai-transcription-model';\nimport { AssemblyAITranscriptionModelId } from './assemblyai-transcription-settings';\n\nexport interface AssemblyAIProvider\n extends Pick<ProviderV1, 'transcriptionModel'> {\n (\n modelId: 'best',\n settings?: {},\n ): {\n transcription: AssemblyAITranscriptionModel;\n };\n\n /**\nCreates a model for transcription.\n */\n transcription(modelId: AssemblyAITranscriptionModelId): TranscriptionModelV1;\n}\n\nexport interface AssemblyAIProviderSettings {\n /**\nAPI key for authenticating requests.\n */\n apiKey?: string;\n\n /**\nCustom headers to include in the requests.\n */\n headers?: Record<string, string>;\n\n /**\nCustom fetch implementation. You can use it as a middleware to intercept requests,\nor to provide a custom fetch implementation for e.g. testing.\n */\n fetch?: FetchFunction;\n}\n\n/**\nCreate an AssemblyAI provider instance.\n */\nexport function createAssemblyAI(\n options: AssemblyAIProviderSettings = {},\n): AssemblyAIProvider {\n const getHeaders = () => ({\n authorization: loadApiKey({\n apiKey: options.apiKey,\n environmentVariableName: 'ASSEMBLYAI_API_KEY',\n description: 'AssemblyAI',\n }),\n ...options.headers,\n });\n\n const createTranscriptionModel = (modelId: AssemblyAITranscriptionModelId) =>\n new AssemblyAITranscriptionModel(modelId, {\n provider: `assemblyai.transcription`,\n url: ({ path }) => `https://api.assemblyai.com${path}`,\n headers: getHeaders,\n fetch: options.fetch,\n });\n\n const provider = function (modelId: AssemblyAITranscriptionModelId) {\n return {\n transcription: createTranscriptionModel(modelId),\n };\n };\n\n provider.transcription = createTranscriptionModel;\n provider.transcriptionModel = createTranscriptionModel;\n\n return provider as AssemblyAIProvider;\n}\n\n/**\nDefault AssemblyAI provider instance.\n */\nexport const assemblyai = createAssemblyAI();\n","import {\n TranscriptionModelV1,\n TranscriptionModelV1CallWarning,\n} from '@ai-sdk/provider';\nimport {\n combineHeaders,\n createJsonResponseHandler,\n parseProviderOptions,\n postJsonToApi,\n postToApi,\n} from '@ai-sdk/provider-utils';\nimport { z } from 'zod';\nimport { AssemblyAIConfig } from './assemblyai-config';\nimport { assemblyaiFailedResponseHandler } from './assemblyai-error';\nimport { AssemblyAITranscriptionModelId } from './assemblyai-transcription-settings';\nimport { AssemblyAITranscriptionAPITypes } from './assemblyai-api-types';\n\n// https://www.assemblyai.com/docs/api-reference/transcripts/submit\nconst assemblyaiProviderOptionsSchema = z.object({\n /**\n * End time of the audio in milliseconds.\n */\n audioEndAt: z.number().int().nullish(),\n /**\n * Start time of the audio in milliseconds.\n */\n audioStartFrom: z.number().int().nullish(),\n /**\n * Whether to automatically generate chapters for the transcription.\n */\n autoChapters: z.boolean().nullish(),\n /**\n * Whether to automatically generate highlights for the transcription.\n */\n autoHighlights: z.boolean().nullish(),\n /**\n * Boost parameter for the transcription.\n * Allowed values: 'low', 'default', 'high'.\n */\n boostParam: z.string().nullish(),\n /**\n * Whether to enable content safety filtering.\n */\n contentSafety: z.boolean().nullish(),\n /**\n * Confidence threshold for content safety filtering (25-100).\n */\n contentSafetyConfidence: z.number().int().min(25).max(100).nullish(),\n /**\n * Custom spelling rules for the transcription.\n */\n customSpelling: z\n .array(\n z.object({\n from: z.array(z.string()),\n to: z.string(),\n }),\n )\n .nullish(),\n /**\n * Whether to include filler words (um, uh, etc.) in the transcription.\n */\n disfluencies: z.boolean().nullish(),\n /**\n * Whether to enable entity detection.\n */\n entityDetection: z.boolean().nullish(),\n /**\n * Whether to filter profanity from the transcription.\n */\n filterProfanity: z.boolean().nullish(),\n /**\n * Whether to format text with punctuation and capitalization.\n */\n formatText: z.boolean().nullish(),\n /**\n * Whether to enable IAB categories detection.\n */\n iabCategories: z.boolean().nullish(),\n /**\n * Language code for the transcription.\n */\n languageCode: z.union([z.literal('en'), z.string()]).nullish(),\n /**\n * Confidence threshold for language detection.\n */\n languageConfidenceThreshold: z.number().nullish(),\n /**\n * Whether to enable language detection.\n */\n languageDetection: z.boolean().nullish(),\n /**\n * Whether to process audio as multichannel.\n */\n multichannel: z.boolean().nullish(),\n /**\n * Whether to add punctuation to the transcription.\n */\n punctuate: z.boolean().nullish(),\n /**\n * Whether to redact personally identifiable information (PII).\n */\n redactPii: z.boolean().nullish(),\n /**\n * Whether to redact PII in the audio file.\n */\n redactPiiAudio: z.boolean().nullish(),\n /**\n * Audio format for PII redaction.\n */\n redactPiiAudioQuality: z.string().nullish(),\n /**\n * List of PII types to redact.\n */\n redactPiiPolicies: z.array(z.string()).nullish(),\n /**\n * Substitution method for redacted PII.\n */\n redactPiiSub: z.string().nullish(),\n /**\n * Whether to enable sentiment analysis.\n */\n sentimentAnalysis: z.boolean().nullish(),\n /**\n * Whether to identify different speakers in the audio.\n */\n speakerLabels: z.boolean().nullish(),\n /**\n * Number of speakers expected in the audio.\n */\n speakersExpected: z.number().int().nullish(),\n /**\n * Threshold for speech detection (0-1).\n */\n speechThreshold: z.number().min(0).max(1).nullish(),\n /**\n * Whether to generate a summary of the transcription.\n */\n summarization: z.boolean().nullish(),\n /**\n * Model to use for summarization.\n */\n summaryModel: z.string().nullish(),\n /**\n * Type of summary to generate.\n */\n summaryType: z.string().nullish(),\n /**\n * List of topics to identify in the transcription.\n */\n topics: z.array(z.string()).nullish(),\n /**\n * Name of the authentication header for webhook requests.\n */\n webhookAuthHeaderName: z.string().nullish(),\n /**\n * Value of the authentication header for webhook requests.\n */\n webhookAuthHeaderValue: z.string().nullish(),\n /**\n * URL to send webhook notifications to.\n */\n webhookUrl: z.string().nullish(),\n /**\n * List of words to boost recognition for.\n */\n wordBoost: z.array(z.string()).nullish(),\n});\n\nexport type AssemblyAITranscriptionCallOptions = z.infer<\n typeof assemblyaiProviderOptionsSchema\n>;\n\ninterface AssemblyAITranscriptionModelConfig extends AssemblyAIConfig {\n _internal?: {\n currentDate?: () => Date;\n };\n}\n\nexport class AssemblyAITranscriptionModel implements TranscriptionModelV1 {\n readonly specificationVersion = 'v1';\n\n get provider(): string {\n return this.config.provider;\n }\n\n constructor(\n readonly modelId: AssemblyAITranscriptionModelId,\n private readonly config: AssemblyAITranscriptionModelConfig,\n ) {}\n\n private async getArgs({\n providerOptions,\n }: Parameters<TranscriptionModelV1['doGenerate']>[0]) {\n const warnings: TranscriptionModelV1CallWarning[] = [];\n\n // Parse provider options\n const assemblyaiOptions = await parseProviderOptions({\n provider: 'assemblyai',\n providerOptions,\n schema: assemblyaiProviderOptionsSchema,\n });\n\n const body: Omit<AssemblyAITranscriptionAPITypes, 'audio_url'> = {\n speech_model: this.modelId,\n };\n\n // Add provider-specific options\n if (assemblyaiOptions) {\n body.audio_end_at = assemblyaiOptions.audioEndAt ?? undefined;\n body.audio_start_from = assemblyaiOptions.audioStartFrom ?? undefined;\n body.auto_chapters = assemblyaiOptions.autoChapters ?? undefined;\n body.auto_highlights = assemblyaiOptions.autoHighlights ?? undefined;\n body.boost_param = (assemblyaiOptions.boostParam as never) ?? undefined;\n body.content_safety = assemblyaiOptions.contentSafety ?? undefined;\n body.content_safety_confidence =\n assemblyaiOptions.contentSafetyConfidence ?? undefined;\n body.custom_spelling =\n (assemblyaiOptions.customSpelling as never) ?? undefined;\n body.disfluencies = assemblyaiOptions.disfluencies ?? undefined;\n body.entity_detection = assemblyaiOptions.entityDetection ?? undefined;\n body.filter_profanity = assemblyaiOptions.filterProfanity ?? undefined;\n body.format_text = assemblyaiOptions.formatText ?? undefined;\n body.iab_categories = assemblyaiOptions.iabCategories ?? undefined;\n body.language_code =\n (assemblyaiOptions.languageCode as never) ?? undefined;\n body.language_confidence_threshold =\n assemblyaiOptions.languageConfidenceThreshold ?? undefined;\n body.language_detection =\n assemblyaiOptions.languageDetection ?? undefined;\n body.multichannel = assemblyaiOptions.multichannel ?? undefined;\n body.punctuate = assemblyaiOptions.punctuate ?? undefined;\n body.redact_pii = assemblyaiOptions.redactPii ?? undefined;\n body.redact_pii_audio = assemblyaiOptions.redactPiiAudio ?? undefined;\n body.redact_pii_audio_quality =\n (assemblyaiOptions.redactPiiAudioQuality as never) ?? undefined;\n body.redact_pii_policies =\n (assemblyaiOptions.redactPiiPolicies as never) ?? undefined;\n body.redact_pii_sub =\n (assemblyaiOptions.redactPiiSub as never) ?? undefined;\n body.sentiment_analysis =\n assemblyaiOptions.sentimentAnalysis ?? undefined;\n body.speaker_labels = assemblyaiOptions.speakerLabels ?? undefined;\n body.speakers_expected = assemblyaiOptions.speakersExpected ?? undefined;\n body.speech_threshold = assemblyaiOptions.speechThreshold ?? undefined;\n body.summarization = assemblyaiOptions.summarization ?? undefined;\n body.summary_model =\n (assemblyaiOptions.summaryModel as never) ?? undefined;\n body.summary_type = (assemblyaiOptions.summaryType as never) ?? undefined;\n body.topics = assemblyaiOptions.topics ?? undefined;\n body.webhook_auth_header_name =\n assemblyaiOptions.webhookAuthHeaderName ?? undefined;\n body.webhook_auth_header_value =\n assemblyaiOptions.webhookAuthHeaderValue ?? undefined;\n body.webhook_url = assemblyaiOptions.webhookUrl ?? undefined;\n body.word_boost = assemblyaiOptions.wordBoost ?? undefined;\n }\n\n return {\n body,\n warnings,\n };\n }\n\n async doGenerate(\n options: Parameters<TranscriptionModelV1['doGenerate']>[0],\n ): Promise<Awaited<ReturnType<TranscriptionModelV1['doGenerate']>>> {\n const currentDate = this.config._internal?.currentDate?.() ?? new Date();\n\n const { value: uploadResponse } = await postToApi({\n url: this.config.url({\n path: '/v2/upload',\n modelId: '',\n }),\n headers: {\n 'Content-Type': 'application/octet-stream',\n ...combineHeaders(this.config.headers(), options.headers),\n },\n body: {\n content: options.audio,\n values: options.audio,\n },\n failedResponseHandler: assemblyaiFailedResponseHandler,\n successfulResponseHandler: createJsonResponseHandler(\n assemblyaiUploadResponseSchema,\n ),\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n });\n\n const { body, warnings } = await this.getArgs(options);\n\n const {\n value: response,\n responseHeaders,\n rawValue: rawResponse,\n } = await postJsonToApi({\n url: this.config.url({\n path: '/v2/transcript',\n modelId: this.modelId,\n }),\n headers: combineHeaders(this.config.headers(), options.headers),\n body: {\n ...body,\n audio_url: uploadResponse.upload_url,\n },\n failedResponseHandler: assemblyaiFailedResponseHandler,\n successfulResponseHandler: createJsonResponseHandler(\n assemblyaiTranscriptionResponseSchema,\n ),\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n });\n\n return {\n text: response.text ?? '',\n segments:\n response.words?.map(word => ({\n text: word.text,\n startSecond: word.start,\n endSecond: word.end,\n })) ?? [],\n language: response.language_code ?? undefined,\n durationInSeconds:\n response.audio_duration ?? response.words?.at(-1)?.end ?? undefined,\n warnings,\n response: {\n timestamp: currentDate,\n modelId: this.modelId,\n headers: responseHeaders,\n body: rawResponse,\n },\n };\n }\n}\n\nconst assemblyaiUploadResponseSchema = z.object({\n upload_url: z.string(),\n});\n\nconst assemblyaiTranscriptionResponseSchema = z.object({\n text: z.string().nullish(),\n language_code: z.string().nullish(),\n words: z\n .array(\n z.object({\n start: z.number(),\n end: z.number(),\n text: z.string(),\n }),\n )\n .nullish(),\n audio_duration: z.number().nullish(),\n});\n","import { z } from 'zod';\nimport { createJsonErrorResponseHandler } from '@ai-sdk/provider-utils';\n\nexport const assemblyaiErrorDataSchema = z.object({\n error: z.object({\n message: z.string(),\n code: z.number(),\n }),\n});\n\nexport type AssemblyAIErrorData = z.infer<typeof assemblyaiErrorDataSchema>;\n\nexport const assemblyaiFailedResponseHandler = createJsonErrorResponseHandler({\n errorSchema: assemblyaiErrorDataSchema,\n errorToMessage: data => data.error.message,\n});\n"],"mappings":";;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;;ACCA,IAAAA,yBAA0C;;;ACG1C,IAAAC,yBAMO;AACP,IAAAC,cAAkB;;;ACXlB,iBAAkB;AAClB,4BAA+C;AAExC,IAAM,4BAA4B,aAAE,OAAO;AAAA,EAChD,OAAO,aAAE,OAAO;AAAA,IACd,SAAS,aAAE,OAAO;AAAA,IAClB,MAAM,aAAE,OAAO;AAAA,EACjB,CAAC;AACH,CAAC;AAIM,IAAM,sCAAkC,sDAA+B;AAAA,EAC5E,aAAa;AAAA,EACb,gBAAgB,UAAQ,KAAK,MAAM;AACrC,CAAC;;;ADGD,IAAM,kCAAkC,cAAE,OAAO;AAAA;AAAA;AAAA;AAAA,EAI/C,YAAY,cAAE,OAAO,EAAE,IAAI,EAAE,QAAQ;AAAA;AAAA;AAAA;AAAA,EAIrC,gBAAgB,cAAE,OAAO,EAAE,IAAI,EAAE,QAAQ;AAAA;AAAA;AAAA;AAAA,EAIzC,cAAc,cAAE,QAAQ,EAAE,QAAQ;AAAA;AAAA;AAAA;AAAA,EAIlC,gBAAgB,cAAE,QAAQ,EAAE,QAAQ;AAAA;AAAA;AAAA;AAAA;AAAA,EAKpC,YAAY,cAAE,OAAO,EAAE,QAAQ;AAAA;AAAA;AAAA;AAAA,EAI/B,eAAe,cAAE,QAAQ,EAAE,QAAQ;AAAA;AAAA;AAAA;AAAA,EAInC,yBAAyB,cAAE,OAAO,EAAE,IAAI,EAAE,IAAI,EAAE,EAAE,IAAI,GAAG,EAAE,QAAQ;AAAA;AAAA;AAAA;AAAA,EAInE,gBAAgB,cACb;AAAA,IACC,cAAE,OAAO;AAAA,MACP,MAAM,cAAE,MAAM,cAAE,OAAO,CAAC;AAAA,MACxB,IAAI,cAAE,OAAO;AAAA,IACf,CAAC;AAAA,EACH,EACC,QAAQ;AAAA;AAAA;AAAA;AAAA,EAIX,cAAc,cAAE,QAAQ,EAAE,QAAQ;AAAA;AAAA;AAAA;AAAA,EAIlC,iBAAiB,cAAE,QAAQ,EAAE,QAAQ;AAAA;AAAA;AAAA;AAAA,EAIrC,iBAAiB,cAAE,QAAQ,EAAE,QAAQ;AAAA;AAAA;AAAA;AAAA,EAIrC,YAAY,cAAE,QAAQ,EAAE,QAAQ;AAAA;AAAA;AAAA;AAAA,EAIhC,eAAe,cAAE,QAAQ,EAAE,QAAQ;AAAA;AAAA;AAAA;AAAA,EAInC,cAAc,cAAE,MAAM,CAAC,cAAE,QAAQ,IAAI,GAAG,cAAE,OAAO,CAAC,CAAC,EAAE,QAAQ;AAAA;AAAA;AAAA;AAAA,EAI7D,6BAA6B,cAAE,OAAO,EAAE,QAAQ;AAAA;AAAA;AAAA;AAAA,EAIhD,mBAAmB,cAAE,QAAQ,EAAE,QAAQ;AAAA;AAAA;AAAA;AAAA,EAIvC,cAAc,cAAE,QAAQ,EAAE,QAAQ;AAAA;AAAA;AAAA;AAAA,EAIlC,WAAW,cAAE,QAAQ,EAAE,QAAQ;AAAA;AAAA;AAAA;AAAA,EAI/B,WAAW,cAAE,QAAQ,EAAE,QAAQ;AAAA;AAAA;AAAA;AAAA,EAI/B,gBAAgB,cAAE,QAAQ,EAAE,QAAQ;AAAA;AAAA;AAAA;AAAA,EAIpC,uBAAuB,cAAE,OAAO,EAAE,QAAQ;AAAA;AAAA;AAAA;AAAA,EAI1C,mBAAmB,cAAE,MAAM,cAAE,OAAO,CAAC,EAAE,QAAQ;AAAA;AAAA;AAAA;AAAA,EAI/C,cAAc,cAAE,OAAO,EAAE,QAAQ;AAAA;AAAA;AAAA;AAAA,EAIjC,mBAAmB,cAAE,QAAQ,EAAE,QAAQ;AAAA;AAAA;AAAA;AAAA,EAIvC,eAAe,cAAE,QAAQ,EAAE,QAAQ;AAAA;AAAA;AAAA;AAAA,EAInC,kBAAkB,cAAE,OAAO,EAAE,IAAI,EAAE,QAAQ;AAAA;AAAA;AAAA;AAAA,EAI3C,iBAAiB,cAAE,OAAO,EAAE,IAAI,CAAC,EAAE,IAAI,CAAC,EAAE,QAAQ;AAAA;AAAA;AAAA;AAAA,EAIlD,eAAe,cAAE,QAAQ,EAAE,QAAQ;AAAA;AAAA;AAAA;AAAA,EAInC,cAAc,cAAE,OAAO,EAAE,QAAQ;AAAA;AAAA;AAAA;AAAA,EAIjC,aAAa,cAAE,OAAO,EAAE,QAAQ;AAAA;AAAA;AAAA;AAAA,EAIhC,QAAQ,cAAE,MAAM,cAAE,OAAO,CAAC,EAAE,QAAQ;AAAA;AAAA;AAAA;AAAA,EAIpC,uBAAuB,cAAE,OAAO,EAAE,QAAQ;AAAA;AAAA;AAAA;AAAA,EAI1C,wBAAwB,cAAE,OAAO,EAAE,QAAQ;AAAA;AAAA;AAAA;AAAA,EAI3C,YAAY,cAAE,OAAO,EAAE,QAAQ;AAAA;AAAA;AAAA;AAAA,EAI/B,WAAW,cAAE,MAAM,cAAE,OAAO,CAAC,EAAE,QAAQ;AACzC,CAAC;AAYM,IAAM,+BAAN,MAAmE;AAAA,EAOxE,YACW,SACQ,QACjB;AAFS;AACQ;AARnB,SAAS,uBAAuB;AAAA,EAS7B;AAAA,EAPH,IAAI,WAAmB;AACrB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA,EAOA,MAAc,QAAQ;AAAA,IACpB;AAAA,EACF,GAAsD;AAjMxD;AAkMI,UAAM,WAA8C,CAAC;AAGrD,UAAM,oBAAoB,UAAM,6CAAqB;AAAA,MACnD,UAAU;AAAA,MACV;AAAA,MACA,QAAQ;AAAA,IACV,CAAC;AAED,UAAM,OAA2D;AAAA,MAC/D,cAAc,KAAK;AAAA,IACrB;AAGA,QAAI,mBAAmB;AACrB,WAAK,gBAAe,uBAAkB,eAAlB,YAAgC;AACpD,WAAK,oBAAmB,uBAAkB,mBAAlB,YAAoC;AAC5D,WAAK,iBAAgB,uBAAkB,iBAAlB,YAAkC;AACvD,WAAK,mBAAkB,uBAAkB,mBAAlB,YAAoC;AAC3D,WAAK,eAAe,uBAAkB,eAAlB,YAA0C;AAC9D,WAAK,kBAAiB,uBAAkB,kBAAlB,YAAmC;AACzD,WAAK,6BACH,uBAAkB,4BAAlB,YAA6C;AAC/C,WAAK,mBACF,uBAAkB,mBAAlB,YAA8C;AACjD,WAAK,gBAAe,uBAAkB,iBAAlB,YAAkC;AACtD,WAAK,oBAAmB,uBAAkB,oBAAlB,YAAqC;AAC7D,WAAK,oBAAmB,uBAAkB,oBAAlB,YAAqC;AAC7D,WAAK,eAAc,uBAAkB,eAAlB,YAAgC;AACnD,WAAK,kBAAiB,uBAAkB,kBAAlB,YAAmC;AACzD,WAAK,iBACF,uBAAkB,iBAAlB,YAA4C;AAC/C,WAAK,iCACH,uBAAkB,gCAAlB,YAAiD;AACnD,WAAK,sBACH,uBAAkB,sBAAlB,YAAuC;AACzC,WAAK,gBAAe,uBAAkB,iBAAlB,YAAkC;AACtD,WAAK,aAAY,uBAAkB,cAAlB,YAA+B;AAChD,WAAK,cAAa,uBAAkB,cAAlB,YAA+B;AACjD,WAAK,oBAAmB,uBAAkB,mBAAlB,YAAoC;AAC5D,WAAK,4BACF,uBAAkB,0BAAlB,YAAqD;AACxD,WAAK,uBACF,uBAAkB,sBAAlB,YAAiD;AACpD,WAAK,kBACF,uBAAkB,iBAAlB,YAA4C;AAC/C,WAAK,sBACH,uBAAkB,sBAAlB,YAAuC;AACzC,WAAK,kBAAiB,uBAAkB,kBAAlB,YAAmC;AACzD,WAAK,qBAAoB,uBAAkB,qBAAlB,YAAsC;AAC/D,WAAK,oBAAmB,uBAAkB,oBAAlB,YAAqC;AAC7D,WAAK,iBAAgB,uBAAkB,kBAAlB,YAAmC;AACxD,WAAK,iBACF,uBAAkB,iBAAlB,YAA4C;AAC/C,WAAK,gBAAgB,uBAAkB,gBAAlB,YAA2C;AAChE,WAAK,UAAS,uBAAkB,WAAlB,YAA4B;AAC1C,WAAK,4BACH,uBAAkB,0BAAlB,YAA2C;AAC7C,WAAK,6BACH,uBAAkB,2BAAlB,YAA4C;AAC9C,WAAK,eAAc,uBAAkB,eAAlB,YAAgC;AACnD,WAAK,cAAa,uBAAkB,cAAlB,YAA+B;AAAA,IACnD;AAEA,WAAO;AAAA,MACL;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAAA,EAEA,MAAM,WACJ,SACkE;AA1QtE;AA2QI,UAAM,eAAc,sBAAK,OAAO,cAAZ,mBAAuB,gBAAvB,4CAA0C,oBAAI,KAAK;AAEvE,UAAM,EAAE,OAAO,eAAe,IAAI,UAAM,kCAAU;AAAA,MAChD,KAAK,KAAK,OAAO,IAAI;AAAA,QACnB,MAAM;AAAA,QACN,SAAS;AAAA,MACX,CAAC;AAAA,MACD,SAAS;AAAA,QACP,gBAAgB;AAAA,QAChB,OAAG,uCAAe,KAAK,OAAO,QAAQ,GAAG,QAAQ,OAAO;AAAA,MAC1D;AAAA,MACA,MAAM;AAAA,QACJ,SAAS,QAAQ;AAAA,QACjB,QAAQ,QAAQ;AAAA,MAClB;AAAA,MACA,uBAAuB;AAAA,MACvB,+BAA2B;AAAA,QACzB;AAAA,MACF;AAAA,MACA,aAAa,QAAQ;AAAA,MACrB,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,UAAM,EAAE,MAAM,SAAS,IAAI,MAAM,KAAK,QAAQ,OAAO;AAErD,UAAM;AAAA,MACJ,OAAO;AAAA,MACP;AAAA,MACA,UAAU;AAAA,IACZ,IAAI,UAAM,sCAAc;AAAA,MACtB,KAAK,KAAK,OAAO,IAAI;AAAA,QACnB,MAAM;AAAA,QACN,SAAS,KAAK;AAAA,MAChB,CAAC;AAAA,MACD,aAAS,uCAAe,KAAK,OAAO,QAAQ,GAAG,QAAQ,OAAO;AAAA,MAC9D,MAAM;AAAA,QACJ,GAAG;AAAA,QACH,WAAW,eAAe;AAAA,MAC5B;AAAA,MACA,uBAAuB;AAAA,MACvB,+BAA2B;AAAA,QACzB;AAAA,MACF;AAAA,MACA,aAAa,QAAQ;AAAA,MACrB,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,WAAO;AAAA,MACL,OAAM,cAAS,SAAT,YAAiB;AAAA,MACvB,WACE,oBAAS,UAAT,mBAAgB,IAAI,WAAS;AAAA,QAC3B,MAAM,KAAK;AAAA,QACX,aAAa,KAAK;AAAA,QAClB,WAAW,KAAK;AAAA,MAClB,QAJA,YAIO,CAAC;AAAA,MACV,WAAU,cAAS,kBAAT,YAA0B;AAAA,MACpC,oBACE,oBAAS,mBAAT,aAA2B,oBAAS,UAAT,mBAAgB,GAAG,QAAnB,mBAAwB,QAAnD,YAA0D;AAAA,MAC5D;AAAA,MACA,UAAU;AAAA,QACR,WAAW;AAAA,QACX,SAAS,KAAK;AAAA,QACd,SAAS;AAAA,QACT,MAAM;AAAA,MACR;AAAA,IACF;AAAA,EACF;AACF;AAEA,IAAM,iCAAiC,cAAE,OAAO;AAAA,EAC9C,YAAY,cAAE,OAAO;AACvB,CAAC;AAED,IAAM,wCAAwC,cAAE,OAAO;AAAA,EACrD,MAAM,cAAE,OAAO,EAAE,QAAQ;AAAA,EACzB,eAAe,cAAE,OAAO,EAAE,QAAQ;AAAA,EAClC,OAAO,cACJ;AAAA,IACC,cAAE,OAAO;AAAA,MACP,OAAO,cAAE,OAAO;AAAA,MAChB,KAAK,cAAE,OAAO;AAAA,MACd,MAAM,cAAE,OAAO;AAAA,IACjB,CAAC;AAAA,EACH,EACC,QAAQ;AAAA,EACX,gBAAgB,cAAE,OAAO,EAAE,QAAQ;AACrC,CAAC;;;ADxTM,SAAS,iBACd,UAAsC,CAAC,GACnB;AACpB,QAAM,aAAa,OAAO;AAAA,IACxB,mBAAe,mCAAW;AAAA,MACxB,QAAQ,QAAQ;AAAA,MAChB,yBAAyB;AAAA,MACzB,aAAa;AAAA,IACf,CAAC;AAAA,IACD,GAAG,QAAQ;AAAA,EACb;AAEA,QAAM,2BAA2B,CAAC,YAChC,IAAI,6BAA6B,SAAS;AAAA,IACxC,UAAU;AAAA,IACV,KAAK,CAAC,EAAE,KAAK,MAAM,6BAA6B,IAAI;AAAA,IACpD,SAAS;AAAA,IACT,OAAO,QAAQ;AAAA,EACjB,CAAC;AAEH,QAAM,WAAW,SAAU,SAAyC;AAClE,WAAO;AAAA,MACL,eAAe,yBAAyB,OAAO;AAAA,IACjD;AAAA,EACF;AAEA,WAAS,gBAAgB;AACzB,WAAS,qBAAqB;AAE9B,SAAO;AACT;AAKO,IAAM,aAAa,iBAAiB;","names":["import_provider_utils","import_provider_utils","import_zod"]}
package/dist/index.mjs ADDED
@@ -0,0 +1,351 @@
1
+ // src/assemblyai-provider.ts
2
+ import { loadApiKey } from "@ai-sdk/provider-utils";
3
+
4
+ // src/assemblyai-transcription-model.ts
5
+ import {
6
+ combineHeaders,
7
+ createJsonResponseHandler,
8
+ parseProviderOptions,
9
+ postJsonToApi,
10
+ postToApi
11
+ } from "@ai-sdk/provider-utils";
12
+ import { z as z2 } from "zod";
13
+
14
+ // src/assemblyai-error.ts
15
+ import { z } from "zod";
16
+ import { createJsonErrorResponseHandler } from "@ai-sdk/provider-utils";
17
+ var assemblyaiErrorDataSchema = z.object({
18
+ error: z.object({
19
+ message: z.string(),
20
+ code: z.number()
21
+ })
22
+ });
23
+ var assemblyaiFailedResponseHandler = createJsonErrorResponseHandler({
24
+ errorSchema: assemblyaiErrorDataSchema,
25
+ errorToMessage: (data) => data.error.message
26
+ });
27
+
28
+ // src/assemblyai-transcription-model.ts
29
+ var assemblyaiProviderOptionsSchema = z2.object({
30
+ /**
31
+ * End time of the audio in milliseconds.
32
+ */
33
+ audioEndAt: z2.number().int().nullish(),
34
+ /**
35
+ * Start time of the audio in milliseconds.
36
+ */
37
+ audioStartFrom: z2.number().int().nullish(),
38
+ /**
39
+ * Whether to automatically generate chapters for the transcription.
40
+ */
41
+ autoChapters: z2.boolean().nullish(),
42
+ /**
43
+ * Whether to automatically generate highlights for the transcription.
44
+ */
45
+ autoHighlights: z2.boolean().nullish(),
46
+ /**
47
+ * Boost parameter for the transcription.
48
+ * Allowed values: 'low', 'default', 'high'.
49
+ */
50
+ boostParam: z2.string().nullish(),
51
+ /**
52
+ * Whether to enable content safety filtering.
53
+ */
54
+ contentSafety: z2.boolean().nullish(),
55
+ /**
56
+ * Confidence threshold for content safety filtering (25-100).
57
+ */
58
+ contentSafetyConfidence: z2.number().int().min(25).max(100).nullish(),
59
+ /**
60
+ * Custom spelling rules for the transcription.
61
+ */
62
+ customSpelling: z2.array(
63
+ z2.object({
64
+ from: z2.array(z2.string()),
65
+ to: z2.string()
66
+ })
67
+ ).nullish(),
68
+ /**
69
+ * Whether to include filler words (um, uh, etc.) in the transcription.
70
+ */
71
+ disfluencies: z2.boolean().nullish(),
72
+ /**
73
+ * Whether to enable entity detection.
74
+ */
75
+ entityDetection: z2.boolean().nullish(),
76
+ /**
77
+ * Whether to filter profanity from the transcription.
78
+ */
79
+ filterProfanity: z2.boolean().nullish(),
80
+ /**
81
+ * Whether to format text with punctuation and capitalization.
82
+ */
83
+ formatText: z2.boolean().nullish(),
84
+ /**
85
+ * Whether to enable IAB categories detection.
86
+ */
87
+ iabCategories: z2.boolean().nullish(),
88
+ /**
89
+ * Language code for the transcription.
90
+ */
91
+ languageCode: z2.union([z2.literal("en"), z2.string()]).nullish(),
92
+ /**
93
+ * Confidence threshold for language detection.
94
+ */
95
+ languageConfidenceThreshold: z2.number().nullish(),
96
+ /**
97
+ * Whether to enable language detection.
98
+ */
99
+ languageDetection: z2.boolean().nullish(),
100
+ /**
101
+ * Whether to process audio as multichannel.
102
+ */
103
+ multichannel: z2.boolean().nullish(),
104
+ /**
105
+ * Whether to add punctuation to the transcription.
106
+ */
107
+ punctuate: z2.boolean().nullish(),
108
+ /**
109
+ * Whether to redact personally identifiable information (PII).
110
+ */
111
+ redactPii: z2.boolean().nullish(),
112
+ /**
113
+ * Whether to redact PII in the audio file.
114
+ */
115
+ redactPiiAudio: z2.boolean().nullish(),
116
+ /**
117
+ * Audio format for PII redaction.
118
+ */
119
+ redactPiiAudioQuality: z2.string().nullish(),
120
+ /**
121
+ * List of PII types to redact.
122
+ */
123
+ redactPiiPolicies: z2.array(z2.string()).nullish(),
124
+ /**
125
+ * Substitution method for redacted PII.
126
+ */
127
+ redactPiiSub: z2.string().nullish(),
128
+ /**
129
+ * Whether to enable sentiment analysis.
130
+ */
131
+ sentimentAnalysis: z2.boolean().nullish(),
132
+ /**
133
+ * Whether to identify different speakers in the audio.
134
+ */
135
+ speakerLabels: z2.boolean().nullish(),
136
+ /**
137
+ * Number of speakers expected in the audio.
138
+ */
139
+ speakersExpected: z2.number().int().nullish(),
140
+ /**
141
+ * Threshold for speech detection (0-1).
142
+ */
143
+ speechThreshold: z2.number().min(0).max(1).nullish(),
144
+ /**
145
+ * Whether to generate a summary of the transcription.
146
+ */
147
+ summarization: z2.boolean().nullish(),
148
+ /**
149
+ * Model to use for summarization.
150
+ */
151
+ summaryModel: z2.string().nullish(),
152
+ /**
153
+ * Type of summary to generate.
154
+ */
155
+ summaryType: z2.string().nullish(),
156
+ /**
157
+ * List of topics to identify in the transcription.
158
+ */
159
+ topics: z2.array(z2.string()).nullish(),
160
+ /**
161
+ * Name of the authentication header for webhook requests.
162
+ */
163
+ webhookAuthHeaderName: z2.string().nullish(),
164
+ /**
165
+ * Value of the authentication header for webhook requests.
166
+ */
167
+ webhookAuthHeaderValue: z2.string().nullish(),
168
+ /**
169
+ * URL to send webhook notifications to.
170
+ */
171
+ webhookUrl: z2.string().nullish(),
172
+ /**
173
+ * List of words to boost recognition for.
174
+ */
175
+ wordBoost: z2.array(z2.string()).nullish()
176
+ });
177
+ var AssemblyAITranscriptionModel = class {
178
+ constructor(modelId, config) {
179
+ this.modelId = modelId;
180
+ this.config = config;
181
+ this.specificationVersion = "v1";
182
+ }
183
+ get provider() {
184
+ return this.config.provider;
185
+ }
186
+ async getArgs({
187
+ providerOptions
188
+ }) {
189
+ var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n, _o, _p, _q, _r, _s, _t, _u, _v, _w, _x, _y, _z, _A, _B, _C, _D, _E, _F, _G, _H, _I;
190
+ const warnings = [];
191
+ const assemblyaiOptions = await parseProviderOptions({
192
+ provider: "assemblyai",
193
+ providerOptions,
194
+ schema: assemblyaiProviderOptionsSchema
195
+ });
196
+ const body = {
197
+ speech_model: this.modelId
198
+ };
199
+ if (assemblyaiOptions) {
200
+ body.audio_end_at = (_a = assemblyaiOptions.audioEndAt) != null ? _a : void 0;
201
+ body.audio_start_from = (_b = assemblyaiOptions.audioStartFrom) != null ? _b : void 0;
202
+ body.auto_chapters = (_c = assemblyaiOptions.autoChapters) != null ? _c : void 0;
203
+ body.auto_highlights = (_d = assemblyaiOptions.autoHighlights) != null ? _d : void 0;
204
+ body.boost_param = (_e = assemblyaiOptions.boostParam) != null ? _e : void 0;
205
+ body.content_safety = (_f = assemblyaiOptions.contentSafety) != null ? _f : void 0;
206
+ body.content_safety_confidence = (_g = assemblyaiOptions.contentSafetyConfidence) != null ? _g : void 0;
207
+ body.custom_spelling = (_h = assemblyaiOptions.customSpelling) != null ? _h : void 0;
208
+ body.disfluencies = (_i = assemblyaiOptions.disfluencies) != null ? _i : void 0;
209
+ body.entity_detection = (_j = assemblyaiOptions.entityDetection) != null ? _j : void 0;
210
+ body.filter_profanity = (_k = assemblyaiOptions.filterProfanity) != null ? _k : void 0;
211
+ body.format_text = (_l = assemblyaiOptions.formatText) != null ? _l : void 0;
212
+ body.iab_categories = (_m = assemblyaiOptions.iabCategories) != null ? _m : void 0;
213
+ body.language_code = (_n = assemblyaiOptions.languageCode) != null ? _n : void 0;
214
+ body.language_confidence_threshold = (_o = assemblyaiOptions.languageConfidenceThreshold) != null ? _o : void 0;
215
+ body.language_detection = (_p = assemblyaiOptions.languageDetection) != null ? _p : void 0;
216
+ body.multichannel = (_q = assemblyaiOptions.multichannel) != null ? _q : void 0;
217
+ body.punctuate = (_r = assemblyaiOptions.punctuate) != null ? _r : void 0;
218
+ body.redact_pii = (_s = assemblyaiOptions.redactPii) != null ? _s : void 0;
219
+ body.redact_pii_audio = (_t = assemblyaiOptions.redactPiiAudio) != null ? _t : void 0;
220
+ body.redact_pii_audio_quality = (_u = assemblyaiOptions.redactPiiAudioQuality) != null ? _u : void 0;
221
+ body.redact_pii_policies = (_v = assemblyaiOptions.redactPiiPolicies) != null ? _v : void 0;
222
+ body.redact_pii_sub = (_w = assemblyaiOptions.redactPiiSub) != null ? _w : void 0;
223
+ body.sentiment_analysis = (_x = assemblyaiOptions.sentimentAnalysis) != null ? _x : void 0;
224
+ body.speaker_labels = (_y = assemblyaiOptions.speakerLabels) != null ? _y : void 0;
225
+ body.speakers_expected = (_z = assemblyaiOptions.speakersExpected) != null ? _z : void 0;
226
+ body.speech_threshold = (_A = assemblyaiOptions.speechThreshold) != null ? _A : void 0;
227
+ body.summarization = (_B = assemblyaiOptions.summarization) != null ? _B : void 0;
228
+ body.summary_model = (_C = assemblyaiOptions.summaryModel) != null ? _C : void 0;
229
+ body.summary_type = (_D = assemblyaiOptions.summaryType) != null ? _D : void 0;
230
+ body.topics = (_E = assemblyaiOptions.topics) != null ? _E : void 0;
231
+ body.webhook_auth_header_name = (_F = assemblyaiOptions.webhookAuthHeaderName) != null ? _F : void 0;
232
+ body.webhook_auth_header_value = (_G = assemblyaiOptions.webhookAuthHeaderValue) != null ? _G : void 0;
233
+ body.webhook_url = (_H = assemblyaiOptions.webhookUrl) != null ? _H : void 0;
234
+ body.word_boost = (_I = assemblyaiOptions.wordBoost) != null ? _I : void 0;
235
+ }
236
+ return {
237
+ body,
238
+ warnings
239
+ };
240
+ }
241
+ async doGenerate(options) {
242
+ var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k;
243
+ const currentDate = (_c = (_b = (_a = this.config._internal) == null ? void 0 : _a.currentDate) == null ? void 0 : _b.call(_a)) != null ? _c : /* @__PURE__ */ new Date();
244
+ const { value: uploadResponse } = await postToApi({
245
+ url: this.config.url({
246
+ path: "/v2/upload",
247
+ modelId: ""
248
+ }),
249
+ headers: {
250
+ "Content-Type": "application/octet-stream",
251
+ ...combineHeaders(this.config.headers(), options.headers)
252
+ },
253
+ body: {
254
+ content: options.audio,
255
+ values: options.audio
256
+ },
257
+ failedResponseHandler: assemblyaiFailedResponseHandler,
258
+ successfulResponseHandler: createJsonResponseHandler(
259
+ assemblyaiUploadResponseSchema
260
+ ),
261
+ abortSignal: options.abortSignal,
262
+ fetch: this.config.fetch
263
+ });
264
+ const { body, warnings } = await this.getArgs(options);
265
+ const {
266
+ value: response,
267
+ responseHeaders,
268
+ rawValue: rawResponse
269
+ } = await postJsonToApi({
270
+ url: this.config.url({
271
+ path: "/v2/transcript",
272
+ modelId: this.modelId
273
+ }),
274
+ headers: combineHeaders(this.config.headers(), options.headers),
275
+ body: {
276
+ ...body,
277
+ audio_url: uploadResponse.upload_url
278
+ },
279
+ failedResponseHandler: assemblyaiFailedResponseHandler,
280
+ successfulResponseHandler: createJsonResponseHandler(
281
+ assemblyaiTranscriptionResponseSchema
282
+ ),
283
+ abortSignal: options.abortSignal,
284
+ fetch: this.config.fetch
285
+ });
286
+ return {
287
+ text: (_d = response.text) != null ? _d : "",
288
+ segments: (_f = (_e = response.words) == null ? void 0 : _e.map((word) => ({
289
+ text: word.text,
290
+ startSecond: word.start,
291
+ endSecond: word.end
292
+ }))) != null ? _f : [],
293
+ language: (_g = response.language_code) != null ? _g : void 0,
294
+ durationInSeconds: (_k = (_j = response.audio_duration) != null ? _j : (_i = (_h = response.words) == null ? void 0 : _h.at(-1)) == null ? void 0 : _i.end) != null ? _k : void 0,
295
+ warnings,
296
+ response: {
297
+ timestamp: currentDate,
298
+ modelId: this.modelId,
299
+ headers: responseHeaders,
300
+ body: rawResponse
301
+ }
302
+ };
303
+ }
304
+ };
305
+ var assemblyaiUploadResponseSchema = z2.object({
306
+ upload_url: z2.string()
307
+ });
308
+ var assemblyaiTranscriptionResponseSchema = z2.object({
309
+ text: z2.string().nullish(),
310
+ language_code: z2.string().nullish(),
311
+ words: z2.array(
312
+ z2.object({
313
+ start: z2.number(),
314
+ end: z2.number(),
315
+ text: z2.string()
316
+ })
317
+ ).nullish(),
318
+ audio_duration: z2.number().nullish()
319
+ });
320
+
321
+ // src/assemblyai-provider.ts
322
+ function createAssemblyAI(options = {}) {
323
+ const getHeaders = () => ({
324
+ authorization: loadApiKey({
325
+ apiKey: options.apiKey,
326
+ environmentVariableName: "ASSEMBLYAI_API_KEY",
327
+ description: "AssemblyAI"
328
+ }),
329
+ ...options.headers
330
+ });
331
+ const createTranscriptionModel = (modelId) => new AssemblyAITranscriptionModel(modelId, {
332
+ provider: `assemblyai.transcription`,
333
+ url: ({ path }) => `https://api.assemblyai.com${path}`,
334
+ headers: getHeaders,
335
+ fetch: options.fetch
336
+ });
337
+ const provider = function(modelId) {
338
+ return {
339
+ transcription: createTranscriptionModel(modelId)
340
+ };
341
+ };
342
+ provider.transcription = createTranscriptionModel;
343
+ provider.transcriptionModel = createTranscriptionModel;
344
+ return provider;
345
+ }
346
+ var assemblyai = createAssemblyAI();
347
+ export {
348
+ assemblyai,
349
+ createAssemblyAI
350
+ };
351
+ //# sourceMappingURL=index.mjs.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../src/assemblyai-provider.ts","../src/assemblyai-transcription-model.ts","../src/assemblyai-error.ts"],"sourcesContent":["import { TranscriptionModelV1, ProviderV1 } from '@ai-sdk/provider';\nimport { FetchFunction, loadApiKey } from '@ai-sdk/provider-utils';\nimport { AssemblyAITranscriptionModel } from './assemblyai-transcription-model';\nimport { AssemblyAITranscriptionModelId } from './assemblyai-transcription-settings';\n\nexport interface AssemblyAIProvider\n extends Pick<ProviderV1, 'transcriptionModel'> {\n (\n modelId: 'best',\n settings?: {},\n ): {\n transcription: AssemblyAITranscriptionModel;\n };\n\n /**\nCreates a model for transcription.\n */\n transcription(modelId: AssemblyAITranscriptionModelId): TranscriptionModelV1;\n}\n\nexport interface AssemblyAIProviderSettings {\n /**\nAPI key for authenticating requests.\n */\n apiKey?: string;\n\n /**\nCustom headers to include in the requests.\n */\n headers?: Record<string, string>;\n\n /**\nCustom fetch implementation. You can use it as a middleware to intercept requests,\nor to provide a custom fetch implementation for e.g. testing.\n */\n fetch?: FetchFunction;\n}\n\n/**\nCreate an AssemblyAI provider instance.\n */\nexport function createAssemblyAI(\n options: AssemblyAIProviderSettings = {},\n): AssemblyAIProvider {\n const getHeaders = () => ({\n authorization: loadApiKey({\n apiKey: options.apiKey,\n environmentVariableName: 'ASSEMBLYAI_API_KEY',\n description: 'AssemblyAI',\n }),\n ...options.headers,\n });\n\n const createTranscriptionModel = (modelId: AssemblyAITranscriptionModelId) =>\n new AssemblyAITranscriptionModel(modelId, {\n provider: `assemblyai.transcription`,\n url: ({ path }) => `https://api.assemblyai.com${path}`,\n headers: getHeaders,\n fetch: options.fetch,\n });\n\n const provider = function (modelId: AssemblyAITranscriptionModelId) {\n return {\n transcription: createTranscriptionModel(modelId),\n };\n };\n\n provider.transcription = createTranscriptionModel;\n provider.transcriptionModel = createTranscriptionModel;\n\n return provider as AssemblyAIProvider;\n}\n\n/**\nDefault AssemblyAI provider instance.\n */\nexport const assemblyai = createAssemblyAI();\n","import {\n TranscriptionModelV1,\n TranscriptionModelV1CallWarning,\n} from '@ai-sdk/provider';\nimport {\n combineHeaders,\n createJsonResponseHandler,\n parseProviderOptions,\n postJsonToApi,\n postToApi,\n} from '@ai-sdk/provider-utils';\nimport { z } from 'zod';\nimport { AssemblyAIConfig } from './assemblyai-config';\nimport { assemblyaiFailedResponseHandler } from './assemblyai-error';\nimport { AssemblyAITranscriptionModelId } from './assemblyai-transcription-settings';\nimport { AssemblyAITranscriptionAPITypes } from './assemblyai-api-types';\n\n// https://www.assemblyai.com/docs/api-reference/transcripts/submit\nconst assemblyaiProviderOptionsSchema = z.object({\n /**\n * End time of the audio in milliseconds.\n */\n audioEndAt: z.number().int().nullish(),\n /**\n * Start time of the audio in milliseconds.\n */\n audioStartFrom: z.number().int().nullish(),\n /**\n * Whether to automatically generate chapters for the transcription.\n */\n autoChapters: z.boolean().nullish(),\n /**\n * Whether to automatically generate highlights for the transcription.\n */\n autoHighlights: z.boolean().nullish(),\n /**\n * Boost parameter for the transcription.\n * Allowed values: 'low', 'default', 'high'.\n */\n boostParam: z.string().nullish(),\n /**\n * Whether to enable content safety filtering.\n */\n contentSafety: z.boolean().nullish(),\n /**\n * Confidence threshold for content safety filtering (25-100).\n */\n contentSafetyConfidence: z.number().int().min(25).max(100).nullish(),\n /**\n * Custom spelling rules for the transcription.\n */\n customSpelling: z\n .array(\n z.object({\n from: z.array(z.string()),\n to: z.string(),\n }),\n )\n .nullish(),\n /**\n * Whether to include filler words (um, uh, etc.) in the transcription.\n */\n disfluencies: z.boolean().nullish(),\n /**\n * Whether to enable entity detection.\n */\n entityDetection: z.boolean().nullish(),\n /**\n * Whether to filter profanity from the transcription.\n */\n filterProfanity: z.boolean().nullish(),\n /**\n * Whether to format text with punctuation and capitalization.\n */\n formatText: z.boolean().nullish(),\n /**\n * Whether to enable IAB categories detection.\n */\n iabCategories: z.boolean().nullish(),\n /**\n * Language code for the transcription.\n */\n languageCode: z.union([z.literal('en'), z.string()]).nullish(),\n /**\n * Confidence threshold for language detection.\n */\n languageConfidenceThreshold: z.number().nullish(),\n /**\n * Whether to enable language detection.\n */\n languageDetection: z.boolean().nullish(),\n /**\n * Whether to process audio as multichannel.\n */\n multichannel: z.boolean().nullish(),\n /**\n * Whether to add punctuation to the transcription.\n */\n punctuate: z.boolean().nullish(),\n /**\n * Whether to redact personally identifiable information (PII).\n */\n redactPii: z.boolean().nullish(),\n /**\n * Whether to redact PII in the audio file.\n */\n redactPiiAudio: z.boolean().nullish(),\n /**\n * Audio format for PII redaction.\n */\n redactPiiAudioQuality: z.string().nullish(),\n /**\n * List of PII types to redact.\n */\n redactPiiPolicies: z.array(z.string()).nullish(),\n /**\n * Substitution method for redacted PII.\n */\n redactPiiSub: z.string().nullish(),\n /**\n * Whether to enable sentiment analysis.\n */\n sentimentAnalysis: z.boolean().nullish(),\n /**\n * Whether to identify different speakers in the audio.\n */\n speakerLabels: z.boolean().nullish(),\n /**\n * Number of speakers expected in the audio.\n */\n speakersExpected: z.number().int().nullish(),\n /**\n * Threshold for speech detection (0-1).\n */\n speechThreshold: z.number().min(0).max(1).nullish(),\n /**\n * Whether to generate a summary of the transcription.\n */\n summarization: z.boolean().nullish(),\n /**\n * Model to use for summarization.\n */\n summaryModel: z.string().nullish(),\n /**\n * Type of summary to generate.\n */\n summaryType: z.string().nullish(),\n /**\n * List of topics to identify in the transcription.\n */\n topics: z.array(z.string()).nullish(),\n /**\n * Name of the authentication header for webhook requests.\n */\n webhookAuthHeaderName: z.string().nullish(),\n /**\n * Value of the authentication header for webhook requests.\n */\n webhookAuthHeaderValue: z.string().nullish(),\n /**\n * URL to send webhook notifications to.\n */\n webhookUrl: z.string().nullish(),\n /**\n * List of words to boost recognition for.\n */\n wordBoost: z.array(z.string()).nullish(),\n});\n\nexport type AssemblyAITranscriptionCallOptions = z.infer<\n typeof assemblyaiProviderOptionsSchema\n>;\n\ninterface AssemblyAITranscriptionModelConfig extends AssemblyAIConfig {\n _internal?: {\n currentDate?: () => Date;\n };\n}\n\nexport class AssemblyAITranscriptionModel implements TranscriptionModelV1 {\n readonly specificationVersion = 'v1';\n\n get provider(): string {\n return this.config.provider;\n }\n\n constructor(\n readonly modelId: AssemblyAITranscriptionModelId,\n private readonly config: AssemblyAITranscriptionModelConfig,\n ) {}\n\n private async getArgs({\n providerOptions,\n }: Parameters<TranscriptionModelV1['doGenerate']>[0]) {\n const warnings: TranscriptionModelV1CallWarning[] = [];\n\n // Parse provider options\n const assemblyaiOptions = await parseProviderOptions({\n provider: 'assemblyai',\n providerOptions,\n schema: assemblyaiProviderOptionsSchema,\n });\n\n const body: Omit<AssemblyAITranscriptionAPITypes, 'audio_url'> = {\n speech_model: this.modelId,\n };\n\n // Add provider-specific options\n if (assemblyaiOptions) {\n body.audio_end_at = assemblyaiOptions.audioEndAt ?? undefined;\n body.audio_start_from = assemblyaiOptions.audioStartFrom ?? undefined;\n body.auto_chapters = assemblyaiOptions.autoChapters ?? undefined;\n body.auto_highlights = assemblyaiOptions.autoHighlights ?? undefined;\n body.boost_param = (assemblyaiOptions.boostParam as never) ?? undefined;\n body.content_safety = assemblyaiOptions.contentSafety ?? undefined;\n body.content_safety_confidence =\n assemblyaiOptions.contentSafetyConfidence ?? undefined;\n body.custom_spelling =\n (assemblyaiOptions.customSpelling as never) ?? undefined;\n body.disfluencies = assemblyaiOptions.disfluencies ?? undefined;\n body.entity_detection = assemblyaiOptions.entityDetection ?? undefined;\n body.filter_profanity = assemblyaiOptions.filterProfanity ?? undefined;\n body.format_text = assemblyaiOptions.formatText ?? undefined;\n body.iab_categories = assemblyaiOptions.iabCategories ?? undefined;\n body.language_code =\n (assemblyaiOptions.languageCode as never) ?? undefined;\n body.language_confidence_threshold =\n assemblyaiOptions.languageConfidenceThreshold ?? undefined;\n body.language_detection =\n assemblyaiOptions.languageDetection ?? undefined;\n body.multichannel = assemblyaiOptions.multichannel ?? undefined;\n body.punctuate = assemblyaiOptions.punctuate ?? undefined;\n body.redact_pii = assemblyaiOptions.redactPii ?? undefined;\n body.redact_pii_audio = assemblyaiOptions.redactPiiAudio ?? undefined;\n body.redact_pii_audio_quality =\n (assemblyaiOptions.redactPiiAudioQuality as never) ?? undefined;\n body.redact_pii_policies =\n (assemblyaiOptions.redactPiiPolicies as never) ?? undefined;\n body.redact_pii_sub =\n (assemblyaiOptions.redactPiiSub as never) ?? undefined;\n body.sentiment_analysis =\n assemblyaiOptions.sentimentAnalysis ?? undefined;\n body.speaker_labels = assemblyaiOptions.speakerLabels ?? undefined;\n body.speakers_expected = assemblyaiOptions.speakersExpected ?? undefined;\n body.speech_threshold = assemblyaiOptions.speechThreshold ?? undefined;\n body.summarization = assemblyaiOptions.summarization ?? undefined;\n body.summary_model =\n (assemblyaiOptions.summaryModel as never) ?? undefined;\n body.summary_type = (assemblyaiOptions.summaryType as never) ?? undefined;\n body.topics = assemblyaiOptions.topics ?? undefined;\n body.webhook_auth_header_name =\n assemblyaiOptions.webhookAuthHeaderName ?? undefined;\n body.webhook_auth_header_value =\n assemblyaiOptions.webhookAuthHeaderValue ?? undefined;\n body.webhook_url = assemblyaiOptions.webhookUrl ?? undefined;\n body.word_boost = assemblyaiOptions.wordBoost ?? undefined;\n }\n\n return {\n body,\n warnings,\n };\n }\n\n async doGenerate(\n options: Parameters<TranscriptionModelV1['doGenerate']>[0],\n ): Promise<Awaited<ReturnType<TranscriptionModelV1['doGenerate']>>> {\n const currentDate = this.config._internal?.currentDate?.() ?? new Date();\n\n const { value: uploadResponse } = await postToApi({\n url: this.config.url({\n path: '/v2/upload',\n modelId: '',\n }),\n headers: {\n 'Content-Type': 'application/octet-stream',\n ...combineHeaders(this.config.headers(), options.headers),\n },\n body: {\n content: options.audio,\n values: options.audio,\n },\n failedResponseHandler: assemblyaiFailedResponseHandler,\n successfulResponseHandler: createJsonResponseHandler(\n assemblyaiUploadResponseSchema,\n ),\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n });\n\n const { body, warnings } = await this.getArgs(options);\n\n const {\n value: response,\n responseHeaders,\n rawValue: rawResponse,\n } = await postJsonToApi({\n url: this.config.url({\n path: '/v2/transcript',\n modelId: this.modelId,\n }),\n headers: combineHeaders(this.config.headers(), options.headers),\n body: {\n ...body,\n audio_url: uploadResponse.upload_url,\n },\n failedResponseHandler: assemblyaiFailedResponseHandler,\n successfulResponseHandler: createJsonResponseHandler(\n assemblyaiTranscriptionResponseSchema,\n ),\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n });\n\n return {\n text: response.text ?? '',\n segments:\n response.words?.map(word => ({\n text: word.text,\n startSecond: word.start,\n endSecond: word.end,\n })) ?? [],\n language: response.language_code ?? undefined,\n durationInSeconds:\n response.audio_duration ?? response.words?.at(-1)?.end ?? undefined,\n warnings,\n response: {\n timestamp: currentDate,\n modelId: this.modelId,\n headers: responseHeaders,\n body: rawResponse,\n },\n };\n }\n}\n\nconst assemblyaiUploadResponseSchema = z.object({\n upload_url: z.string(),\n});\n\nconst assemblyaiTranscriptionResponseSchema = z.object({\n text: z.string().nullish(),\n language_code: z.string().nullish(),\n words: z\n .array(\n z.object({\n start: z.number(),\n end: z.number(),\n text: z.string(),\n }),\n )\n .nullish(),\n audio_duration: z.number().nullish(),\n});\n","import { z } from 'zod';\nimport { createJsonErrorResponseHandler } from '@ai-sdk/provider-utils';\n\nexport const assemblyaiErrorDataSchema = z.object({\n error: z.object({\n message: z.string(),\n code: z.number(),\n }),\n});\n\nexport type AssemblyAIErrorData = z.infer<typeof assemblyaiErrorDataSchema>;\n\nexport const assemblyaiFailedResponseHandler = createJsonErrorResponseHandler({\n errorSchema: assemblyaiErrorDataSchema,\n errorToMessage: data => data.error.message,\n});\n"],"mappings":";AACA,SAAwB,kBAAkB;;;ACG1C;AAAA,EACE;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,OACK;AACP,SAAS,KAAAA,UAAS;;;ACXlB,SAAS,SAAS;AAClB,SAAS,sCAAsC;AAExC,IAAM,4BAA4B,EAAE,OAAO;AAAA,EAChD,OAAO,EAAE,OAAO;AAAA,IACd,SAAS,EAAE,OAAO;AAAA,IAClB,MAAM,EAAE,OAAO;AAAA,EACjB,CAAC;AACH,CAAC;AAIM,IAAM,kCAAkC,+BAA+B;AAAA,EAC5E,aAAa;AAAA,EACb,gBAAgB,UAAQ,KAAK,MAAM;AACrC,CAAC;;;ADGD,IAAM,kCAAkCC,GAAE,OAAO;AAAA;AAAA;AAAA;AAAA,EAI/C,YAAYA,GAAE,OAAO,EAAE,IAAI,EAAE,QAAQ;AAAA;AAAA;AAAA;AAAA,EAIrC,gBAAgBA,GAAE,OAAO,EAAE,IAAI,EAAE,QAAQ;AAAA;AAAA;AAAA;AAAA,EAIzC,cAAcA,GAAE,QAAQ,EAAE,QAAQ;AAAA;AAAA;AAAA;AAAA,EAIlC,gBAAgBA,GAAE,QAAQ,EAAE,QAAQ;AAAA;AAAA;AAAA;AAAA;AAAA,EAKpC,YAAYA,GAAE,OAAO,EAAE,QAAQ;AAAA;AAAA;AAAA;AAAA,EAI/B,eAAeA,GAAE,QAAQ,EAAE,QAAQ;AAAA;AAAA;AAAA;AAAA,EAInC,yBAAyBA,GAAE,OAAO,EAAE,IAAI,EAAE,IAAI,EAAE,EAAE,IAAI,GAAG,EAAE,QAAQ;AAAA;AAAA;AAAA;AAAA,EAInE,gBAAgBA,GACb;AAAA,IACCA,GAAE,OAAO;AAAA,MACP,MAAMA,GAAE,MAAMA,GAAE,OAAO,CAAC;AAAA,MACxB,IAAIA,GAAE,OAAO;AAAA,IACf,CAAC;AAAA,EACH,EACC,QAAQ;AAAA;AAAA;AAAA;AAAA,EAIX,cAAcA,GAAE,QAAQ,EAAE,QAAQ;AAAA;AAAA;AAAA;AAAA,EAIlC,iBAAiBA,GAAE,QAAQ,EAAE,QAAQ;AAAA;AAAA;AAAA;AAAA,EAIrC,iBAAiBA,GAAE,QAAQ,EAAE,QAAQ;AAAA;AAAA;AAAA;AAAA,EAIrC,YAAYA,GAAE,QAAQ,EAAE,QAAQ;AAAA;AAAA;AAAA;AAAA,EAIhC,eAAeA,GAAE,QAAQ,EAAE,QAAQ;AAAA;AAAA;AAAA;AAAA,EAInC,cAAcA,GAAE,MAAM,CAACA,GAAE,QAAQ,IAAI,GAAGA,GAAE,OAAO,CAAC,CAAC,EAAE,QAAQ;AAAA;AAAA;AAAA;AAAA,EAI7D,6BAA6BA,GAAE,OAAO,EAAE,QAAQ;AAAA;AAAA;AAAA;AAAA,EAIhD,mBAAmBA,GAAE,QAAQ,EAAE,QAAQ;AAAA;AAAA;AAAA;AAAA,EAIvC,cAAcA,GAAE,QAAQ,EAAE,QAAQ;AAAA;AAAA;AAAA;AAAA,EAIlC,WAAWA,GAAE,QAAQ,EAAE,QAAQ;AAAA;AAAA;AAAA;AAAA,EAI/B,WAAWA,GAAE,QAAQ,EAAE,QAAQ;AAAA;AAAA;AAAA;AAAA,EAI/B,gBAAgBA,GAAE,QAAQ,EAAE,QAAQ;AAAA;AAAA;AAAA;AAAA,EAIpC,uBAAuBA,GAAE,OAAO,EAAE,QAAQ;AAAA;AAAA;AAAA;AAAA,EAI1C,mBAAmBA,GAAE,MAAMA,GAAE,OAAO,CAAC,EAAE,QAAQ;AAAA;AAAA;AAAA;AAAA,EAI/C,cAAcA,GAAE,OAAO,EAAE,QAAQ;AAAA;AAAA;AAAA;AAAA,EAIjC,mBAAmBA,GAAE,QAAQ,EAAE,QAAQ;AAAA;AAAA;AAAA;AAAA,EAIvC,eAAeA,GAAE,QAAQ,EAAE,QAAQ;AAAA;AAAA;AAAA;AAAA,EAInC,kBAAkBA,GAAE,OAAO,EAAE,IAAI,EAAE,QAAQ;AAAA;AAAA;AAAA;AAAA,EAI3C,iBAAiBA,GAAE,OAAO,EAAE,IAAI,CAAC,EAAE,IAAI,CAAC,EAAE,QAAQ;AAAA;AAAA;AAAA;AAAA,EAIlD,eAAeA,GAAE,QAAQ,EAAE,QAAQ;AAAA;AAAA;AAAA;AAAA,EAInC,cAAcA,GAAE,OAAO,EAAE,QAAQ;AAAA;AAAA;AAAA;AAAA,EAIjC,aAAaA,GAAE,OAAO,EAAE,QAAQ;AAAA;AAAA;AAAA;AAAA,EAIhC,QAAQA,GAAE,MAAMA,GAAE,OAAO,CAAC,EAAE,QAAQ;AAAA;AAAA;AAAA;AAAA,EAIpC,uBAAuBA,GAAE,OAAO,EAAE,QAAQ;AAAA;AAAA;AAAA;AAAA,EAI1C,wBAAwBA,GAAE,OAAO,EAAE,QAAQ;AAAA;AAAA;AAAA;AAAA,EAI3C,YAAYA,GAAE,OAAO,EAAE,QAAQ;AAAA;AAAA;AAAA;AAAA,EAI/B,WAAWA,GAAE,MAAMA,GAAE,OAAO,CAAC,EAAE,QAAQ;AACzC,CAAC;AAYM,IAAM,+BAAN,MAAmE;AAAA,EAOxE,YACW,SACQ,QACjB;AAFS;AACQ;AARnB,SAAS,uBAAuB;AAAA,EAS7B;AAAA,EAPH,IAAI,WAAmB;AACrB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA,EAOA,MAAc,QAAQ;AAAA,IACpB;AAAA,EACF,GAAsD;AAjMxD;AAkMI,UAAM,WAA8C,CAAC;AAGrD,UAAM,oBAAoB,MAAM,qBAAqB;AAAA,MACnD,UAAU;AAAA,MACV;AAAA,MACA,QAAQ;AAAA,IACV,CAAC;AAED,UAAM,OAA2D;AAAA,MAC/D,cAAc,KAAK;AAAA,IACrB;AAGA,QAAI,mBAAmB;AACrB,WAAK,gBAAe,uBAAkB,eAAlB,YAAgC;AACpD,WAAK,oBAAmB,uBAAkB,mBAAlB,YAAoC;AAC5D,WAAK,iBAAgB,uBAAkB,iBAAlB,YAAkC;AACvD,WAAK,mBAAkB,uBAAkB,mBAAlB,YAAoC;AAC3D,WAAK,eAAe,uBAAkB,eAAlB,YAA0C;AAC9D,WAAK,kBAAiB,uBAAkB,kBAAlB,YAAmC;AACzD,WAAK,6BACH,uBAAkB,4BAAlB,YAA6C;AAC/C,WAAK,mBACF,uBAAkB,mBAAlB,YAA8C;AACjD,WAAK,gBAAe,uBAAkB,iBAAlB,YAAkC;AACtD,WAAK,oBAAmB,uBAAkB,oBAAlB,YAAqC;AAC7D,WAAK,oBAAmB,uBAAkB,oBAAlB,YAAqC;AAC7D,WAAK,eAAc,uBAAkB,eAAlB,YAAgC;AACnD,WAAK,kBAAiB,uBAAkB,kBAAlB,YAAmC;AACzD,WAAK,iBACF,uBAAkB,iBAAlB,YAA4C;AAC/C,WAAK,iCACH,uBAAkB,gCAAlB,YAAiD;AACnD,WAAK,sBACH,uBAAkB,sBAAlB,YAAuC;AACzC,WAAK,gBAAe,uBAAkB,iBAAlB,YAAkC;AACtD,WAAK,aAAY,uBAAkB,cAAlB,YAA+B;AAChD,WAAK,cAAa,uBAAkB,cAAlB,YAA+B;AACjD,WAAK,oBAAmB,uBAAkB,mBAAlB,YAAoC;AAC5D,WAAK,4BACF,uBAAkB,0BAAlB,YAAqD;AACxD,WAAK,uBACF,uBAAkB,sBAAlB,YAAiD;AACpD,WAAK,kBACF,uBAAkB,iBAAlB,YAA4C;AAC/C,WAAK,sBACH,uBAAkB,sBAAlB,YAAuC;AACzC,WAAK,kBAAiB,uBAAkB,kBAAlB,YAAmC;AACzD,WAAK,qBAAoB,uBAAkB,qBAAlB,YAAsC;AAC/D,WAAK,oBAAmB,uBAAkB,oBAAlB,YAAqC;AAC7D,WAAK,iBAAgB,uBAAkB,kBAAlB,YAAmC;AACxD,WAAK,iBACF,uBAAkB,iBAAlB,YAA4C;AAC/C,WAAK,gBAAgB,uBAAkB,gBAAlB,YAA2C;AAChE,WAAK,UAAS,uBAAkB,WAAlB,YAA4B;AAC1C,WAAK,4BACH,uBAAkB,0BAAlB,YAA2C;AAC7C,WAAK,6BACH,uBAAkB,2BAAlB,YAA4C;AAC9C,WAAK,eAAc,uBAAkB,eAAlB,YAAgC;AACnD,WAAK,cAAa,uBAAkB,cAAlB,YAA+B;AAAA,IACnD;AAEA,WAAO;AAAA,MACL;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAAA,EAEA,MAAM,WACJ,SACkE;AA1QtE;AA2QI,UAAM,eAAc,sBAAK,OAAO,cAAZ,mBAAuB,gBAAvB,4CAA0C,oBAAI,KAAK;AAEvE,UAAM,EAAE,OAAO,eAAe,IAAI,MAAM,UAAU;AAAA,MAChD,KAAK,KAAK,OAAO,IAAI;AAAA,QACnB,MAAM;AAAA,QACN,SAAS;AAAA,MACX,CAAC;AAAA,MACD,SAAS;AAAA,QACP,gBAAgB;AAAA,QAChB,GAAG,eAAe,KAAK,OAAO,QAAQ,GAAG,QAAQ,OAAO;AAAA,MAC1D;AAAA,MACA,MAAM;AAAA,QACJ,SAAS,QAAQ;AAAA,QACjB,QAAQ,QAAQ;AAAA,MAClB;AAAA,MACA,uBAAuB;AAAA,MACvB,2BAA2B;AAAA,QACzB;AAAA,MACF;AAAA,MACA,aAAa,QAAQ;AAAA,MACrB,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,UAAM,EAAE,MAAM,SAAS,IAAI,MAAM,KAAK,QAAQ,OAAO;AAErD,UAAM;AAAA,MACJ,OAAO;AAAA,MACP;AAAA,MACA,UAAU;AAAA,IACZ,IAAI,MAAM,cAAc;AAAA,MACtB,KAAK,KAAK,OAAO,IAAI;AAAA,QACnB,MAAM;AAAA,QACN,SAAS,KAAK;AAAA,MAChB,CAAC;AAAA,MACD,SAAS,eAAe,KAAK,OAAO,QAAQ,GAAG,QAAQ,OAAO;AAAA,MAC9D,MAAM;AAAA,QACJ,GAAG;AAAA,QACH,WAAW,eAAe;AAAA,MAC5B;AAAA,MACA,uBAAuB;AAAA,MACvB,2BAA2B;AAAA,QACzB;AAAA,MACF;AAAA,MACA,aAAa,QAAQ;AAAA,MACrB,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,WAAO;AAAA,MACL,OAAM,cAAS,SAAT,YAAiB;AAAA,MACvB,WACE,oBAAS,UAAT,mBAAgB,IAAI,WAAS;AAAA,QAC3B,MAAM,KAAK;AAAA,QACX,aAAa,KAAK;AAAA,QAClB,WAAW,KAAK;AAAA,MAClB,QAJA,YAIO,CAAC;AAAA,MACV,WAAU,cAAS,kBAAT,YAA0B;AAAA,MACpC,oBACE,oBAAS,mBAAT,aAA2B,oBAAS,UAAT,mBAAgB,GAAG,QAAnB,mBAAwB,QAAnD,YAA0D;AAAA,MAC5D;AAAA,MACA,UAAU;AAAA,QACR,WAAW;AAAA,QACX,SAAS,KAAK;AAAA,QACd,SAAS;AAAA,QACT,MAAM;AAAA,MACR;AAAA,IACF;AAAA,EACF;AACF;AAEA,IAAM,iCAAiCA,GAAE,OAAO;AAAA,EAC9C,YAAYA,GAAE,OAAO;AACvB,CAAC;AAED,IAAM,wCAAwCA,GAAE,OAAO;AAAA,EACrD,MAAMA,GAAE,OAAO,EAAE,QAAQ;AAAA,EACzB,eAAeA,GAAE,OAAO,EAAE,QAAQ;AAAA,EAClC,OAAOA,GACJ;AAAA,IACCA,GAAE,OAAO;AAAA,MACP,OAAOA,GAAE,OAAO;AAAA,MAChB,KAAKA,GAAE,OAAO;AAAA,MACd,MAAMA,GAAE,OAAO;AAAA,IACjB,CAAC;AAAA,EACH,EACC,QAAQ;AAAA,EACX,gBAAgBA,GAAE,OAAO,EAAE,QAAQ;AACrC,CAAC;;;ADxTM,SAAS,iBACd,UAAsC,CAAC,GACnB;AACpB,QAAM,aAAa,OAAO;AAAA,IACxB,eAAe,WAAW;AAAA,MACxB,QAAQ,QAAQ;AAAA,MAChB,yBAAyB;AAAA,MACzB,aAAa;AAAA,IACf,CAAC;AAAA,IACD,GAAG,QAAQ;AAAA,EACb;AAEA,QAAM,2BAA2B,CAAC,YAChC,IAAI,6BAA6B,SAAS;AAAA,IACxC,UAAU;AAAA,IACV,KAAK,CAAC,EAAE,KAAK,MAAM,6BAA6B,IAAI;AAAA,IACpD,SAAS;AAAA,IACT,OAAO,QAAQ;AAAA,EACjB,CAAC;AAEH,QAAM,WAAW,SAAU,SAAyC;AAClE,WAAO;AAAA,MACL,eAAe,yBAAyB,OAAO;AAAA,IACjD;AAAA,EACF;AAEA,WAAS,gBAAgB;AACzB,WAAS,qBAAqB;AAE9B,SAAO;AACT;AAKO,IAAM,aAAa,iBAAiB;","names":["z","z"]}
package/package.json ADDED
@@ -0,0 +1,64 @@
1
+ {
2
+ "name": "@ai-sdk/assemblyai",
3
+ "version": "0.0.1-canary.0",
4
+ "license": "Apache-2.0",
5
+ "sideEffects": false,
6
+ "main": "./dist/index.js",
7
+ "module": "./dist/index.mjs",
8
+ "types": "./dist/index.d.ts",
9
+ "files": [
10
+ "dist/**/*",
11
+ "CHANGELOG.md"
12
+ ],
13
+ "exports": {
14
+ "./package.json": "./package.json",
15
+ ".": {
16
+ "types": "./dist/index.d.ts",
17
+ "import": "./dist/index.mjs",
18
+ "require": "./dist/index.js"
19
+ }
20
+ },
21
+ "dependencies": {
22
+ "@ai-sdk/provider": "2.0.0-canary.11",
23
+ "@ai-sdk/provider-utils": "3.0.0-canary.12"
24
+ },
25
+ "devDependencies": {
26
+ "@types/node": "20.17.24",
27
+ "tsup": "^8",
28
+ "typescript": "5.6.3",
29
+ "zod": "3.23.8",
30
+ "@vercel/ai-tsconfig": "0.0.0"
31
+ },
32
+ "peerDependencies": {
33
+ "zod": "^3.0.0"
34
+ },
35
+ "engines": {
36
+ "node": ">=18"
37
+ },
38
+ "publishConfig": {
39
+ "access": "public"
40
+ },
41
+ "homepage": "https://sdk.vercel.ai/docs",
42
+ "repository": {
43
+ "type": "git",
44
+ "url": "git+https://github.com/vercel/ai.git"
45
+ },
46
+ "bugs": {
47
+ "url": "https://github.com/vercel/ai/issues"
48
+ },
49
+ "keywords": [
50
+ "ai"
51
+ ],
52
+ "scripts": {
53
+ "build": "pnpm clean && tsup --tsconfig tsconfig.build.json",
54
+ "build:watch": "pnpm clean && tsup --watch --tsconfig tsconfig.build.json",
55
+ "clean": "rm -rf dist *.tsbuildinfo",
56
+ "lint": "eslint \"./**/*.ts*\"",
57
+ "type-check": "tsc --build",
58
+ "prettier-check": "prettier --check \"./**/*.ts*\"",
59
+ "test": "pnpm test:node && pnpm test:edge",
60
+ "test:edge": "vitest --config vitest.edge.config.js --run",
61
+ "test:node": "vitest --config vitest.node.config.js --run",
62
+ "test:node:watch": "vitest --config vitest.node.config.js --watch"
63
+ }
64
+ }