@jellypod/speech-sdk 0.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +149 -0
- package/dist/errors.d.ts +20 -0
- package/dist/errors.d.ts.map +1 -0
- package/dist/errors.js +25 -0
- package/dist/errors.js.map +1 -0
- package/dist/generate-speech.d.ts +12 -0
- package/dist/generate-speech.d.ts.map +1 -0
- package/dist/generate-speech.js +39 -0
- package/dist/generate-speech.js.map +1 -0
- package/dist/index.d.ts +6 -0
- package/dist/index.d.ts.map +1 -0
- package/dist/index.js +3 -0
- package/dist/index.js.map +1 -0
- package/dist/provider-utils.d.ts +3 -0
- package/dist/provider-utils.d.ts.map +1 -0
- package/dist/provider-utils.js +22 -0
- package/dist/provider-utils.js.map +1 -0
- package/dist/providers/elevenlabs/elevenlabs-options.d.ts +2 -0
- package/dist/providers/elevenlabs/elevenlabs-options.d.ts.map +1 -0
- package/dist/providers/elevenlabs/elevenlabs-options.js +2 -0
- package/dist/providers/elevenlabs/elevenlabs-options.js.map +1 -0
- package/dist/providers/elevenlabs/elevenlabs-provider.d.ts +5 -0
- package/dist/providers/elevenlabs/elevenlabs-provider.d.ts.map +1 -0
- package/dist/providers/elevenlabs/elevenlabs-provider.js +11 -0
- package/dist/providers/elevenlabs/elevenlabs-provider.js.map +1 -0
- package/dist/providers/elevenlabs/elevenlabs-speech-model.d.ts +44 -0
- package/dist/providers/elevenlabs/elevenlabs-speech-model.d.ts.map +1 -0
- package/dist/providers/elevenlabs/elevenlabs-speech-model.js +82 -0
- package/dist/providers/elevenlabs/elevenlabs-speech-model.js.map +1 -0
- package/dist/providers/elevenlabs/index.d.ts +5 -0
- package/dist/providers/elevenlabs/index.d.ts.map +1 -0
- package/dist/providers/elevenlabs/index.js +3 -0
- package/dist/providers/elevenlabs/index.js.map +1 -0
- package/dist/providers/openai/index.d.ts +5 -0
- package/dist/providers/openai/index.d.ts.map +1 -0
- package/dist/providers/openai/index.js +3 -0
- package/dist/providers/openai/index.js.map +1 -0
- package/dist/providers/openai/openai-options.d.ts +2 -0
- package/dist/providers/openai/openai-options.d.ts.map +1 -0
- package/dist/providers/openai/openai-options.js +2 -0
- package/dist/providers/openai/openai-options.js.map +1 -0
- package/dist/providers/openai/openai-provider.d.ts +5 -0
- package/dist/providers/openai/openai-provider.d.ts.map +1 -0
- package/dist/providers/openai/openai-provider.js +11 -0
- package/dist/providers/openai/openai-provider.js.map +1 -0
- package/dist/providers/openai/openai-speech-model.d.ts +39 -0
- package/dist/providers/openai/openai-speech-model.d.ts.map +1 -0
- package/dist/providers/openai/openai-speech-model.js +54 -0
- package/dist/providers/openai/openai-speech-model.js.map +1 -0
- package/dist/resolve-provider.d.ts +3 -0
- package/dist/resolve-provider.d.ts.map +1 -0
- package/dist/resolve-provider.js +41 -0
- package/dist/resolve-provider.js.map +1 -0
- package/dist/speech-provider.d.ts +26 -0
- package/dist/speech-provider.d.ts.map +1 -0
- package/dist/speech-provider.js +2 -0
- package/dist/speech-provider.js.map +1 -0
- package/dist/speech-result.d.ts +22 -0
- package/dist/speech-result.d.ts.map +1 -0
- package/dist/speech-result.js +42 -0
- package/dist/speech-result.js.map +1 -0
- package/dist/types.d.ts +11 -0
- package/dist/types.d.ts.map +1 -0
- package/dist/types.js +2 -0
- package/dist/types.js.map +1 -0
- package/package.json +48 -0
package/README.md
ADDED
|
@@ -0,0 +1,149 @@
|
|
|
1
|
+
# speech-sdk
|
|
2
|
+
|
|
3
|
+
A TypeScript SDK for text-to-speech with multiple provider support. Universal (Node, Edge, Browser).
|
|
4
|
+
|
|
5
|
+
## Install
|
|
6
|
+
|
|
7
|
+
```bash
|
|
8
|
+
npm install speech-sdk
|
|
9
|
+
```
|
|
10
|
+
|
|
11
|
+
### Using an AI Coding Assistant?
|
|
12
|
+
|
|
13
|
+
Add the speech-sdk skill to give your AI assistant full knowledge of this library:
|
|
14
|
+
|
|
15
|
+
```bash
|
|
16
|
+
npx skills add Jellypod-Inc/speech-sdk --skill use-speech-sdk
|
|
17
|
+
```
|
|
18
|
+
|
|
19
|
+
## Quick Start
|
|
20
|
+
|
|
21
|
+
```ts
|
|
22
|
+
import { generateSpeech } from 'speech-sdk';
|
|
23
|
+
|
|
24
|
+
const result = await generateSpeech({
|
|
25
|
+
model: 'openai/gpt-4o-mini-tts',
|
|
26
|
+
text: 'Hello from speech-sdk!',
|
|
27
|
+
voice: 'alloy',
|
|
28
|
+
});
|
|
29
|
+
|
|
30
|
+
// Access the audio
|
|
31
|
+
result.audio.uint8Array; // Uint8Array
|
|
32
|
+
result.audio.base64; // string (lazy-computed)
|
|
33
|
+
result.audio.mediaType; // "audio/mpeg"
|
|
34
|
+
```
|
|
35
|
+
|
|
36
|
+
## Supported Providers
|
|
37
|
+
|
|
38
|
+
Use unified `provider/model` strings. Passing just the provider name uses its default model.
|
|
39
|
+
|
|
40
|
+
| Provider | Model String | Default |
|
|
41
|
+
|---|---|---|
|
|
42
|
+
| OpenAI | `openai/gpt-4o-mini-tts` | Yes |
|
|
43
|
+
| OpenAI | `openai/tts-1` | |
|
|
44
|
+
| OpenAI | `openai/tts-1-hd` | |
|
|
45
|
+
| ElevenLabs | `elevenlabs/eleven_v3` | |
|
|
46
|
+
| ElevenLabs | `elevenlabs/eleven_multilingual_v2` | Yes |
|
|
47
|
+
| ElevenLabs | `elevenlabs/eleven_flash_v2_5` | |
|
|
48
|
+
| ElevenLabs | `elevenlabs/eleven_flash_v2` | |
|
|
49
|
+
|
|
50
|
+
```ts
|
|
51
|
+
generateSpeech({ model: 'openai/tts-1', text: '...', voice: 'alloy' });
|
|
52
|
+
generateSpeech({ model: 'openai', text: '...', voice: 'alloy' }); // uses default model
|
|
53
|
+
```
|
|
54
|
+
|
|
55
|
+
Provider-specific API parameters can be passed via `providerOptions` — these are sent directly to the provider's API using the API's own field names.
|
|
56
|
+
|
|
57
|
+
## Custom Configuration
|
|
58
|
+
|
|
59
|
+
Use factory functions when you need custom API keys, base URLs, or fetch implementations:
|
|
60
|
+
|
|
61
|
+
```ts
|
|
62
|
+
import { generateSpeech } from 'speech-sdk';
|
|
63
|
+
import { createOpenAI } from 'speech-sdk/openai';
|
|
64
|
+
import { createElevenLabs } from 'speech-sdk/elevenlabs';
|
|
65
|
+
|
|
66
|
+
const myOpenAI = createOpenAI({
|
|
67
|
+
apiKey: 'sk-...',
|
|
68
|
+
baseURL: 'https://my-proxy.com/v1',
|
|
69
|
+
});
|
|
70
|
+
|
|
71
|
+
const myElevenLabs = createElevenLabs({
|
|
72
|
+
apiKey: '...',
|
|
73
|
+
baseURL: 'https://my-proxy.com',
|
|
74
|
+
});
|
|
75
|
+
|
|
76
|
+
const result = await generateSpeech({
|
|
77
|
+
model: myOpenAI('gpt-4o-mini-tts'),
|
|
78
|
+
text: 'Hello!',
|
|
79
|
+
voice: 'alloy',
|
|
80
|
+
});
|
|
81
|
+
```
|
|
82
|
+
|
|
83
|
+
### API Key Resolution
|
|
84
|
+
|
|
85
|
+
When using string models (e.g., `'openai/tts-1'`), API keys are resolved from environment variables:
|
|
86
|
+
|
|
87
|
+
| Provider | Environment Variable |
|
|
88
|
+
|---|---|
|
|
89
|
+
| OpenAI | `OPENAI_API_KEY` |
|
|
90
|
+
| ElevenLabs | `ELEVENLABS_API_KEY` |
|
|
91
|
+
|
|
92
|
+
Factory functions accept an explicit `apiKey` option which takes precedence over environment variables.
|
|
93
|
+
|
|
94
|
+
## Options
|
|
95
|
+
|
|
96
|
+
```ts
|
|
97
|
+
generateSpeech({
|
|
98
|
+
model: string | ResolvedModel, // required
|
|
99
|
+
text: string, // required
|
|
100
|
+
voice: string, // required
|
|
101
|
+
providerOptions?: object, // provider-specific API params
|
|
102
|
+
maxRetries?: number, // default: 2 (retries on 5xx/network errors)
|
|
103
|
+
abortSignal?: AbortSignal, // cancel the request
|
|
104
|
+
headers?: Record<string, string>, // additional HTTP headers
|
|
105
|
+
});
|
|
106
|
+
```
|
|
107
|
+
|
|
108
|
+
## Result
|
|
109
|
+
|
|
110
|
+
```ts
|
|
111
|
+
interface SpeechResult {
|
|
112
|
+
audio: {
|
|
113
|
+
uint8Array: Uint8Array; // raw audio bytes
|
|
114
|
+
base64: string; // base64 encoded (lazy)
|
|
115
|
+
mediaType: string; // e.g. "audio/mpeg"
|
|
116
|
+
};
|
|
117
|
+
providerMetadata?: Record<string, unknown>;
|
|
118
|
+
}
|
|
119
|
+
```
|
|
120
|
+
|
|
121
|
+
## Error Handling
|
|
122
|
+
|
|
123
|
+
```ts
|
|
124
|
+
import { generateSpeech, ApiError, SpeechSDKError } from 'speech-sdk';
|
|
125
|
+
|
|
126
|
+
try {
|
|
127
|
+
const result = await generateSpeech({ ... });
|
|
128
|
+
} catch (error) {
|
|
129
|
+
if (error instanceof ApiError) {
|
|
130
|
+
console.log(error.statusCode); // 401
|
|
131
|
+
console.log(error.model); // "openai/gpt-4o-mini-tts"
|
|
132
|
+
console.log(error.responseBody);
|
|
133
|
+
}
|
|
134
|
+
}
|
|
135
|
+
```
|
|
136
|
+
|
|
137
|
+
| Error | When |
|
|
138
|
+
|---|---|
|
|
139
|
+
| `ApiError` | Provider API returns a non-2xx response |
|
|
140
|
+
| `NoSpeechGeneratedError` | Provider returned empty audio |
|
|
141
|
+
| `SpeechSDKError` | Base class for all errors |
|
|
142
|
+
|
|
143
|
+
## Retry
|
|
144
|
+
|
|
145
|
+
Built-in retry with exponential backoff via [p-retry](https://github.com/sindresorhus/p-retry). Retries on 5xx and network errors. Does not retry 4xx errors. Default: 2 retries.
|
|
146
|
+
|
|
147
|
+
## License
|
|
148
|
+
|
|
149
|
+
MIT
|
package/dist/errors.d.ts
ADDED
|
@@ -0,0 +1,20 @@
|
|
|
1
|
+
export declare class SpeechSDKError extends Error {
|
|
2
|
+
constructor(message: string, options?: {
|
|
3
|
+
cause?: unknown;
|
|
4
|
+
});
|
|
5
|
+
}
|
|
6
|
+
export declare class ApiError extends SpeechSDKError {
|
|
7
|
+
readonly statusCode: number;
|
|
8
|
+
readonly responseBody?: unknown;
|
|
9
|
+
readonly model: string;
|
|
10
|
+
constructor(message: string, options: {
|
|
11
|
+
statusCode: number;
|
|
12
|
+
model: string;
|
|
13
|
+
responseBody?: unknown;
|
|
14
|
+
cause?: unknown;
|
|
15
|
+
});
|
|
16
|
+
}
|
|
17
|
+
export declare class NoSpeechGeneratedError extends SpeechSDKError {
|
|
18
|
+
constructor();
|
|
19
|
+
}
|
|
20
|
+
//# sourceMappingURL=errors.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"errors.d.ts","sourceRoot":"","sources":["../src/errors.ts"],"names":[],"mappings":"AAAA,qBAAa,cAAe,SAAQ,KAAK;gBAC3B,OAAO,EAAE,MAAM,EAAE,OAAO,CAAC,EAAE;QAAE,KAAK,CAAC,EAAE,OAAO,CAAA;KAAE;CAI3D;AAED,qBAAa,QAAS,SAAQ,cAAc;IAC1C,QAAQ,CAAC,UAAU,EAAE,MAAM,CAAC;IAC5B,QAAQ,CAAC,YAAY,CAAC,EAAE,OAAO,CAAC;IAChC,QAAQ,CAAC,KAAK,EAAE,MAAM,CAAC;gBAGrB,OAAO,EAAE,MAAM,EACf,OAAO,EAAE;QACP,UAAU,EAAE,MAAM,CAAC;QACnB,KAAK,EAAE,MAAM,CAAC;QACd,YAAY,CAAC,EAAE,OAAO,CAAC;QACvB,KAAK,CAAC,EAAE,OAAO,CAAC;KACjB;CAQJ;AAED,qBAAa,sBAAuB,SAAQ,cAAc;;CAKzD"}
|
package/dist/errors.js
ADDED
|
@@ -0,0 +1,25 @@
|
|
|
1
|
+
export class SpeechSDKError extends Error {
|
|
2
|
+
constructor(message, options) {
|
|
3
|
+
super(message, options);
|
|
4
|
+
this.name = 'SpeechSDKError';
|
|
5
|
+
}
|
|
6
|
+
}
|
|
7
|
+
export class ApiError extends SpeechSDKError {
|
|
8
|
+
statusCode;
|
|
9
|
+
responseBody;
|
|
10
|
+
model;
|
|
11
|
+
constructor(message, options) {
|
|
12
|
+
super(message, { cause: options.cause });
|
|
13
|
+
this.name = 'ApiError';
|
|
14
|
+
this.statusCode = options.statusCode;
|
|
15
|
+
this.model = options.model;
|
|
16
|
+
this.responseBody = options.responseBody;
|
|
17
|
+
}
|
|
18
|
+
}
|
|
19
|
+
export class NoSpeechGeneratedError extends SpeechSDKError {
|
|
20
|
+
constructor() {
|
|
21
|
+
super('No speech audio was generated.');
|
|
22
|
+
this.name = 'NoSpeechGeneratedError';
|
|
23
|
+
}
|
|
24
|
+
}
|
|
25
|
+
//# sourceMappingURL=errors.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"errors.js","sourceRoot":"","sources":["../src/errors.ts"],"names":[],"mappings":"AAAA,MAAM,OAAO,cAAe,SAAQ,KAAK;IACvC,YAAY,OAAe,EAAE,OAA6B;QACxD,KAAK,CAAC,OAAO,EAAE,OAAO,CAAC,CAAC;QACxB,IAAI,CAAC,IAAI,GAAG,gBAAgB,CAAC;IAC/B,CAAC;CACF;AAED,MAAM,OAAO,QAAS,SAAQ,cAAc;IACjC,UAAU,CAAS;IACnB,YAAY,CAAW;IACvB,KAAK,CAAS;IAEvB,YACE,OAAe,EACf,OAKC;QAED,KAAK,CAAC,OAAO,EAAE,EAAE,KAAK,EAAE,OAAO,CAAC,KAAK,EAAE,CAAC,CAAC;QACzC,IAAI,CAAC,IAAI,GAAG,UAAU,CAAC;QACvB,IAAI,CAAC,UAAU,GAAG,OAAO,CAAC,UAAU,CAAC;QACrC,IAAI,CAAC,KAAK,GAAG,OAAO,CAAC,KAAK,CAAC;QAC3B,IAAI,CAAC,YAAY,GAAG,OAAO,CAAC,YAAY,CAAC;IAC3C,CAAC;CACF;AAED,MAAM,OAAO,sBAAuB,SAAQ,cAAc;IACxD;QACE,KAAK,CAAC,gCAAgC,CAAC,CAAC;QACxC,IAAI,CAAC,IAAI,GAAG,wBAAwB,CAAC;IACvC,CAAC;CACF"}
|
|
@@ -0,0 +1,12 @@
|
|
|
1
|
+
import type { ResolvedModel } from './speech-provider.js';
|
|
2
|
+
import type { SpeechResult } from './speech-result.js';
|
|
3
|
+
export declare function generateSpeech<T extends Record<string, unknown> = Record<string, unknown>>(options: {
|
|
4
|
+
model: string | ResolvedModel<T>;
|
|
5
|
+
text: string;
|
|
6
|
+
voice: string;
|
|
7
|
+
providerOptions?: T;
|
|
8
|
+
maxRetries?: number;
|
|
9
|
+
abortSignal?: AbortSignal;
|
|
10
|
+
headers?: Record<string, string>;
|
|
11
|
+
}): Promise<SpeechResult>;
|
|
12
|
+
//# sourceMappingURL=generate-speech.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"generate-speech.d.ts","sourceRoot":"","sources":["../src/generate-speech.ts"],"names":[],"mappings":"AACA,OAAO,KAAK,EAAE,aAAa,EAAE,MAAM,sBAAsB,CAAC;AAC1D,OAAO,KAAK,EAAE,YAAY,EAAE,MAAM,oBAAoB,CAAC;AAKvD,wBAAsB,cAAc,CAClC,CAAC,SAAS,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,GAAG,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,EAC3D,OAAO,EAAE;IACT,KAAK,EAAE,MAAM,GAAG,aAAa,CAAC,CAAC,CAAC,CAAC;IACjC,IAAI,EAAE,MAAM,CAAC;IACb,KAAK,EAAE,MAAM,CAAC;IACd,eAAe,CAAC,EAAE,CAAC,CAAC;IACpB,UAAU,CAAC,EAAE,MAAM,CAAC;IACpB,WAAW,CAAC,EAAE,WAAW,CAAC;IAC1B,OAAO,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;CAClC,GAAG,OAAO,CAAC,YAAY,CAAC,CA2CxB"}
|
|
@@ -0,0 +1,39 @@
|
|
|
1
|
+
import pRetry from 'p-retry';
|
|
2
|
+
import { DefaultGeneratedAudioFile } from './speech-result.js';
|
|
3
|
+
import { NoSpeechGeneratedError, ApiError } from './errors.js';
|
|
4
|
+
import { resolveModel } from './resolve-provider.js';
|
|
5
|
+
export async function generateSpeech(options) {
|
|
6
|
+
const { model, text, voice, providerOptions, abortSignal, headers } = options;
|
|
7
|
+
const maxRetries = options.maxRetries ?? 2;
|
|
8
|
+
const resolved = resolveModel(model);
|
|
9
|
+
const result = await pRetry(() => resolved.provider.generate({
|
|
10
|
+
modelId: resolved.modelId,
|
|
11
|
+
text,
|
|
12
|
+
voice,
|
|
13
|
+
providerOptions,
|
|
14
|
+
abortSignal,
|
|
15
|
+
headers,
|
|
16
|
+
}), {
|
|
17
|
+
retries: maxRetries,
|
|
18
|
+
signal: abortSignal,
|
|
19
|
+
shouldRetry: ({ error }) => {
|
|
20
|
+
if (error instanceof ApiError && error.statusCode < 500) {
|
|
21
|
+
return false;
|
|
22
|
+
}
|
|
23
|
+
return true;
|
|
24
|
+
},
|
|
25
|
+
});
|
|
26
|
+
const audioData = result.audio;
|
|
27
|
+
if (audioData.length === 0) {
|
|
28
|
+
throw new NoSpeechGeneratedError();
|
|
29
|
+
}
|
|
30
|
+
const audio = new DefaultGeneratedAudioFile({
|
|
31
|
+
data: audioData,
|
|
32
|
+
mediaType: result.mediaType,
|
|
33
|
+
});
|
|
34
|
+
return {
|
|
35
|
+
audio,
|
|
36
|
+
providerMetadata: result.providerMetadata,
|
|
37
|
+
};
|
|
38
|
+
}
|
|
39
|
+
//# sourceMappingURL=generate-speech.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"generate-speech.js","sourceRoot":"","sources":["../src/generate-speech.ts"],"names":[],"mappings":"AAAA,OAAO,MAAsB,MAAM,SAAS,CAAC;AAG7C,OAAO,EAAE,yBAAyB,EAAE,MAAM,oBAAoB,CAAC;AAC/D,OAAO,EAAE,sBAAsB,EAAE,QAAQ,EAAE,MAAM,aAAa,CAAC;AAC/D,OAAO,EAAE,YAAY,EAAE,MAAM,uBAAuB,CAAC;AAErD,MAAM,CAAC,KAAK,UAAU,cAAc,CAElC,OAQD;IACC,MAAM,EAAE,KAAK,EAAE,IAAI,EAAE,KAAK,EAAE,eAAe,EAAE,WAAW,EAAE,OAAO,EAAE,GAAG,OAAO,CAAC;IAC9E,MAAM,UAAU,GAAG,OAAO,CAAC,UAAU,IAAI,CAAC,CAAC;IAE3C,MAAM,QAAQ,GAAG,YAAY,CAAC,KAAK,CAAC,CAAC;IAErC,MAAM,MAAM,GAAG,MAAM,MAAM,CACzB,GAAG,EAAE,CACH,QAAQ,CAAC,QAAQ,CAAC,QAAQ,CAAC;QACzB,OAAO,EAAE,QAAQ,CAAC,OAAO;QACzB,IAAI;QACJ,KAAK;QACL,eAAe;QACf,WAAW;QACX,OAAO;KACR,CAAC,EACJ;QACE,OAAO,EAAE,UAAU;QACnB,MAAM,EAAE,WAAW;QACnB,WAAW,EAAE,CAAC,EAAE,KAAK,EAAE,EAAE,EAAE;YACzB,IAAI,KAAK,YAAY,QAAQ,IAAI,KAAK,CAAC,UAAU,GAAG,GAAG,EAAE,CAAC;gBACxD,OAAO,KAAK,CAAC;YACf,CAAC;YACD,OAAO,IAAI,CAAC;QACd,CAAC;KACF,CACF,CAAC;IAEF,MAAM,SAAS,GAAG,MAAM,CAAC,KAAK,CAAC;IAE/B,IAAI,SAAS,CAAC,MAAM,KAAK,CAAC,EAAE,CAAC;QAC3B,MAAM,IAAI,sBAAsB,EAAE,CAAC;IACrC,CAAC;IAED,MAAM,KAAK,GAAG,IAAI,yBAAyB,CAAC;QAC1C,IAAI,EAAE,SAAS;QACf,SAAS,EAAE,MAAM,CAAC,SAAS;KAC5B,CAAC,CAAC;IAEH,OAAO;QACL,KAAK;QACL,gBAAgB,EAAE,MAAM,CAAC,gBAAgB;KAC1C,CAAC;AACJ,CAAC"}
|
package/dist/index.d.ts
ADDED
|
@@ -0,0 +1,6 @@
|
|
|
1
|
+
export { generateSpeech } from './generate-speech.js';
|
|
2
|
+
export { SpeechSDKError, ApiError, NoSpeechGeneratedError } from './errors.js';
|
|
3
|
+
export type { SpeechProvider, ResolvedModel, ModelInfo } from './speech-provider.js';
|
|
4
|
+
export type { SpeechResult, GeneratedAudioFile } from './speech-result.js';
|
|
5
|
+
export type { GenerateSpeechOptions } from './types.js';
|
|
6
|
+
//# sourceMappingURL=index.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,cAAc,EAAE,MAAM,sBAAsB,CAAC;AACtD,OAAO,EAAE,cAAc,EAAE,QAAQ,EAAE,sBAAsB,EAAE,MAAM,aAAa,CAAC;AAC/E,YAAY,EAAE,cAAc,EAAE,aAAa,EAAE,SAAS,EAAE,MAAM,sBAAsB,CAAC;AACrF,YAAY,EAAE,YAAY,EAAE,kBAAkB,EAAE,MAAM,oBAAoB,CAAC;AAC3E,YAAY,EAAE,qBAAqB,EAAE,MAAM,YAAY,CAAC"}
|
package/dist/index.js
ADDED
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"index.js","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,cAAc,EAAE,MAAM,sBAAsB,CAAC;AACtD,OAAO,EAAE,cAAc,EAAE,QAAQ,EAAE,sBAAsB,EAAE,MAAM,aAAa,CAAC"}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"provider-utils.d.ts","sourceRoot":"","sources":["../src/provider-utils.ts"],"names":[],"mappings":"AAEA,wBAAgB,aAAa,CAC3B,MAAM,EAAE,MAAM,GAAG,SAAS,EAC1B,MAAM,EAAE,MAAM,EACd,YAAY,EAAE,MAAM,GACnB,MAAM,CAYR;AAED,wBAAsB,mBAAmB,CACvC,QAAQ,EAAE,QAAQ,EAClB,KAAK,EAAE,MAAM,GACZ,OAAO,CAAC,IAAI,CAAC,CASf"}
|
|
@@ -0,0 +1,22 @@
|
|
|
1
|
+
import { ApiError } from './errors.js';
|
|
2
|
+
export function resolveApiKey(stored, envVar, providerName) {
|
|
3
|
+
const key = stored ??
|
|
4
|
+
(typeof process !== 'undefined'
|
|
5
|
+
? process.env?.[envVar]
|
|
6
|
+
: undefined);
|
|
7
|
+
if (!key) {
|
|
8
|
+
throw new Error(`${providerName} API key is required. Pass it via apiKey option or set the ${envVar} environment variable.`);
|
|
9
|
+
}
|
|
10
|
+
return key;
|
|
11
|
+
}
|
|
12
|
+
export async function handleErrorResponse(response, model) {
|
|
13
|
+
if (!response.ok) {
|
|
14
|
+
const responseBody = await response.text().catch(() => undefined);
|
|
15
|
+
throw new ApiError(`API error: ${response.status}`, {
|
|
16
|
+
statusCode: response.status,
|
|
17
|
+
model,
|
|
18
|
+
responseBody,
|
|
19
|
+
});
|
|
20
|
+
}
|
|
21
|
+
}
|
|
22
|
+
//# sourceMappingURL=provider-utils.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"provider-utils.js","sourceRoot":"","sources":["../src/provider-utils.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,QAAQ,EAAE,MAAM,aAAa,CAAC;AAEvC,MAAM,UAAU,aAAa,CAC3B,MAA0B,EAC1B,MAAc,EACd,YAAoB;IAEpB,MAAM,GAAG,GACP,MAAM;QACN,CAAC,OAAO,OAAO,KAAK,WAAW;YAC7B,CAAC,CAAC,OAAO,CAAC,GAAG,EAAE,CAAC,MAAM,CAAC;YACvB,CAAC,CAAC,SAAS,CAAC,CAAC;IACjB,IAAI,CAAC,GAAG,EAAE,CAAC;QACT,MAAM,IAAI,KAAK,CACb,GAAG,YAAY,8DAA8D,MAAM,wBAAwB,CAC5G,CAAC;IACJ,CAAC;IACD,OAAO,GAAG,CAAC;AACb,CAAC;AAED,MAAM,CAAC,KAAK,UAAU,mBAAmB,CACvC,QAAkB,EAClB,KAAa;IAEb,IAAI,CAAC,QAAQ,CAAC,EAAE,EAAE,CAAC;QACjB,MAAM,YAAY,GAAG,MAAM,QAAQ,CAAC,IAAI,EAAE,CAAC,KAAK,CAAC,GAAG,EAAE,CAAC,SAAS,CAAC,CAAC;QAClE,MAAM,IAAI,QAAQ,CAAC,cAAc,QAAQ,CAAC,MAAM,EAAE,EAAE;YAClD,UAAU,EAAE,QAAQ,CAAC,MAAM;YAC3B,KAAK;YACL,YAAY;SACb,CAAC,CAAC;IACL,CAAC;AACH,CAAC"}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"elevenlabs-options.d.ts","sourceRoot":"","sources":["../../../src/providers/elevenlabs/elevenlabs-options.ts"],"names":[],"mappings":"AAAA,MAAM,MAAM,uBAAuB,GAAG,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC"}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"elevenlabs-options.js","sourceRoot":"","sources":["../../../src/providers/elevenlabs/elevenlabs-options.ts"],"names":[],"mappings":""}
|
|
@@ -0,0 +1,5 @@
|
|
|
1
|
+
import { type ElevenLabsSpeechProviderConfig } from './elevenlabs-speech-model.js';
|
|
2
|
+
import type { ResolvedModel } from '../../speech-provider.js';
|
|
3
|
+
import type { ElevenLabsSpeechOptions } from './elevenlabs-options.js';
|
|
4
|
+
export declare function createElevenLabs(config?: ElevenLabsSpeechProviderConfig): (modelId?: string) => ResolvedModel<ElevenLabsSpeechOptions>;
|
|
5
|
+
//# sourceMappingURL=elevenlabs-provider.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"elevenlabs-provider.d.ts","sourceRoot":"","sources":["../../../src/providers/elevenlabs/elevenlabs-provider.ts"],"names":[],"mappings":"AAAA,OAAO,EAEL,KAAK,8BAA8B,EACpC,MAAM,8BAA8B,CAAC;AACtC,OAAO,KAAK,EAAE,aAAa,EAAE,MAAM,0BAA0B,CAAC;AAC9D,OAAO,KAAK,EAAE,uBAAuB,EAAE,MAAM,yBAAyB,CAAC;AAEvE,wBAAgB,gBAAgB,CAAC,MAAM,GAAE,8BAAmC,IAIxE,UAAU,MAAM,KACf,aAAa,CAAC,uBAAuB,CAAC,CAM1C"}
|
|
@@ -0,0 +1,11 @@
|
|
|
1
|
+
import { ElevenLabsSpeechProvider, } from './elevenlabs-speech-model.js';
|
|
2
|
+
export function createElevenLabs(config = {}) {
|
|
3
|
+
const provider = new ElevenLabsSpeechProvider(config);
|
|
4
|
+
return function elevenlabs(modelId) {
|
|
5
|
+
return {
|
|
6
|
+
provider,
|
|
7
|
+
modelId: modelId ?? provider.defaultModel,
|
|
8
|
+
};
|
|
9
|
+
};
|
|
10
|
+
}
|
|
11
|
+
//# sourceMappingURL=elevenlabs-provider.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"elevenlabs-provider.js","sourceRoot":"","sources":["../../../src/providers/elevenlabs/elevenlabs-provider.ts"],"names":[],"mappings":"AAAA,OAAO,EACL,wBAAwB,GAEzB,MAAM,8BAA8B,CAAC;AAItC,MAAM,UAAU,gBAAgB,CAAC,SAAyC,EAAE;IAC1E,MAAM,QAAQ,GAAG,IAAI,wBAAwB,CAAC,MAAM,CAAC,CAAC;IAEtD,OAAO,SAAS,UAAU,CACxB,OAAgB;QAEhB,OAAO;YACL,QAAQ;YACR,OAAO,EAAE,OAAO,IAAI,QAAQ,CAAC,YAAY;SAC1C,CAAC;IACJ,CAAC,CAAC;AACJ,CAAC"}
|
|
@@ -0,0 +1,44 @@
|
|
|
1
|
+
import type { SpeechProvider } from '../../speech-provider.js';
|
|
2
|
+
import type { ElevenLabsSpeechOptions } from './elevenlabs-options.js';
|
|
3
|
+
export interface ElevenLabsSpeechProviderConfig {
|
|
4
|
+
apiKey?: string;
|
|
5
|
+
baseURL?: string;
|
|
6
|
+
fetch?: typeof globalThis.fetch;
|
|
7
|
+
}
|
|
8
|
+
export declare class ElevenLabsSpeechProvider implements SpeechProvider<string, ElevenLabsSpeechOptions> {
|
|
9
|
+
readonly id = "elevenlabs";
|
|
10
|
+
readonly defaultModel = "eleven_multilingual_v2";
|
|
11
|
+
private static readonly V2_LANGUAGES;
|
|
12
|
+
private static readonly FLASH_V2_5_LANGUAGES;
|
|
13
|
+
private static readonly V3_LANGUAGES;
|
|
14
|
+
readonly models: readonly [{
|
|
15
|
+
readonly id: "eleven_v3";
|
|
16
|
+
readonly languages: readonly ["af", "ar", "hy", "as", "az", "be", "bn", "bs", "bg", "ca", "ceb", "ny", "hr", "cs", "da", "nl", "en", "et", "fil", "fi", "fr", "gl", "ka", "de", "el", "gu", "ha", "he", "hi", "hu", "is", "id", "ga", "it", "ja", "jv", "kn", "kk", "ky", "ko", "lv", "ln", "lt", "lb", "mk", "ms", "ml", "zh", "mr", "ne", "no", "ps", "fa", "pl", "pt", "pa", "ro", "ru", "sr", "sd", "sk", "sl", "so", "es", "sw", "sv", "ta", "te", "th", "tr", "uk", "ur", "vi", "cy"];
|
|
17
|
+
}, {
|
|
18
|
+
readonly id: "eleven_multilingual_v2";
|
|
19
|
+
readonly languages: readonly ["ar", "bg", "cs", "da", "de", "el", "en", "es", "fi", "fil", "fr", "he", "hi", "hr", "id", "it", "ja", "ko", "ms", "nl", "pl", "pt", "ro", "ru", "sk", "sv", "ta", "uk", "zh"];
|
|
20
|
+
}, {
|
|
21
|
+
readonly id: "eleven_flash_v2_5";
|
|
22
|
+
readonly languages: readonly ["ar", "bg", "cs", "da", "de", "el", "en", "es", "fi", "fil", "fr", "he", "hi", "hr", "id", "it", "ja", "ko", "ms", "nl", "pl", "pt", "ro", "ru", "sk", "sv", "ta", "uk", "zh", "hu", "no", "vi"];
|
|
23
|
+
}, {
|
|
24
|
+
readonly id: "eleven_flash_v2";
|
|
25
|
+
readonly languages: readonly ["en"];
|
|
26
|
+
}];
|
|
27
|
+
private readonly apiKey;
|
|
28
|
+
private readonly baseURL;
|
|
29
|
+
private readonly fetchFn;
|
|
30
|
+
constructor(config: ElevenLabsSpeechProviderConfig);
|
|
31
|
+
generate(options: {
|
|
32
|
+
modelId: string;
|
|
33
|
+
text: string;
|
|
34
|
+
voice?: string;
|
|
35
|
+
providerOptions?: ElevenLabsSpeechOptions;
|
|
36
|
+
abortSignal?: AbortSignal;
|
|
37
|
+
headers?: Record<string, string>;
|
|
38
|
+
}): Promise<{
|
|
39
|
+
audio: Uint8Array;
|
|
40
|
+
mediaType: string;
|
|
41
|
+
providerMetadata?: Record<string, unknown>;
|
|
42
|
+
}>;
|
|
43
|
+
}
|
|
44
|
+
//# sourceMappingURL=elevenlabs-speech-model.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"elevenlabs-speech-model.d.ts","sourceRoot":"","sources":["../../../src/providers/elevenlabs/elevenlabs-speech-model.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,cAAc,EAAE,MAAM,0BAA0B,CAAC;AAG/D,OAAO,KAAK,EAAE,uBAAuB,EAAE,MAAM,yBAAyB,CAAC;AAEvE,MAAM,WAAW,8BAA8B;IAC7C,MAAM,CAAC,EAAE,MAAM,CAAC;IAChB,OAAO,CAAC,EAAE,MAAM,CAAC;IACjB,KAAK,CAAC,EAAE,OAAO,UAAU,CAAC,KAAK,CAAC;CACjC;AAED,qBAAa,wBACX,YAAW,cAAc,CAAC,MAAM,EAAE,uBAAuB,CAAC;IAE1D,QAAQ,CAAC,EAAE,gBAAgB;IAC3B,QAAQ,CAAC,YAAY,4BAA4B;IAEjD,OAAO,CAAC,MAAM,CAAC,QAAQ,CAAC,YAAY,CAIzB;IAEX,OAAO,CAAC,MAAM,CAAC,QAAQ,CAAC,oBAAoB,CAEjC;IAEX,OAAO,CAAC,MAAM,CAAC,QAAQ,CAAC,YAAY,CASzB;IAEX,QAAQ,CAAC,MAAM;;;;;;;;;;;;OAKJ;IAEX,OAAO,CAAC,QAAQ,CAAC,MAAM,CAAqB;IAC5C,OAAO,CAAC,QAAQ,CAAC,OAAO,CAAS;IACjC,OAAO,CAAC,QAAQ,CAAC,OAAO,CAA0B;gBAEtC,MAAM,EAAE,8BAA8B;IAM5C,QAAQ,CAAC,OAAO,EAAE;QACtB,OAAO,EAAE,MAAM,CAAC;QAChB,IAAI,EAAE,MAAM,CAAC;QACb,KAAK,CAAC,EAAE,MAAM,CAAC;QACf,eAAe,CAAC,EAAE,uBAAuB,CAAC;QAC1C,WAAW,CAAC,EAAE,WAAW,CAAC;QAC1B,OAAO,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;KAClC,GAAG,OAAO,CAAC;QACV,KAAK,EAAE,UAAU,CAAC;QAClB,SAAS,EAAE,MAAM,CAAC;QAClB,gBAAgB,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;KAC5C,CAAC;CAkDH"}
|
|
@@ -0,0 +1,82 @@
|
|
|
1
|
+
import { SpeechSDKError } from '../../errors.js';
|
|
2
|
+
import { resolveApiKey, handleErrorResponse } from '../../provider-utils.js';
|
|
3
|
+
export class ElevenLabsSpeechProvider {
|
|
4
|
+
id = 'elevenlabs';
|
|
5
|
+
defaultModel = 'eleven_multilingual_v2';
|
|
6
|
+
static V2_LANGUAGES = [
|
|
7
|
+
'ar', 'bg', 'cs', 'da', 'de', 'el', 'en', 'es', 'fi', 'fil',
|
|
8
|
+
'fr', 'he', 'hi', 'hr', 'id', 'it', 'ja', 'ko', 'ms',
|
|
9
|
+
'nl', 'pl', 'pt', 'ro', 'ru', 'sk', 'sv', 'ta', 'uk', 'zh',
|
|
10
|
+
];
|
|
11
|
+
static FLASH_V2_5_LANGUAGES = [
|
|
12
|
+
...ElevenLabsSpeechProvider.V2_LANGUAGES, 'hu', 'no', 'vi',
|
|
13
|
+
];
|
|
14
|
+
static V3_LANGUAGES = [
|
|
15
|
+
'af', 'ar', 'hy', 'as', 'az', 'be', 'bn', 'bs', 'bg', 'ca',
|
|
16
|
+
'ceb', 'ny', 'hr', 'cs', 'da', 'nl', 'en', 'et', 'fil', 'fi',
|
|
17
|
+
'fr', 'gl', 'ka', 'de', 'el', 'gu', 'ha', 'he', 'hi', 'hu',
|
|
18
|
+
'is', 'id', 'ga', 'it', 'ja', 'jv', 'kn', 'kk', 'ky', 'ko',
|
|
19
|
+
'lv', 'ln', 'lt', 'lb', 'mk', 'ms', 'ml', 'zh', 'mr', 'ne',
|
|
20
|
+
'no', 'ps', 'fa', 'pl', 'pt', 'pa', 'ro', 'ru', 'sr', 'sd',
|
|
21
|
+
'sk', 'sl', 'so', 'es', 'sw', 'sv', 'ta', 'te', 'th', 'tr',
|
|
22
|
+
'uk', 'ur', 'vi', 'cy',
|
|
23
|
+
];
|
|
24
|
+
models = [
|
|
25
|
+
{ id: 'eleven_v3', languages: ElevenLabsSpeechProvider.V3_LANGUAGES },
|
|
26
|
+
{ id: 'eleven_multilingual_v2', languages: ElevenLabsSpeechProvider.V2_LANGUAGES },
|
|
27
|
+
{ id: 'eleven_flash_v2_5', languages: ElevenLabsSpeechProvider.FLASH_V2_5_LANGUAGES },
|
|
28
|
+
{ id: 'eleven_flash_v2', languages: ['en'] },
|
|
29
|
+
];
|
|
30
|
+
apiKey;
|
|
31
|
+
baseURL;
|
|
32
|
+
fetchFn;
|
|
33
|
+
constructor(config) {
|
|
34
|
+
this.apiKey = config.apiKey;
|
|
35
|
+
this.baseURL = config.baseURL ?? 'https://api.elevenlabs.io';
|
|
36
|
+
this.fetchFn = config.fetch ?? globalThis.fetch;
|
|
37
|
+
}
|
|
38
|
+
async generate(options) {
|
|
39
|
+
if (!options.voice) {
|
|
40
|
+
throw new SpeechSDKError('ElevenLabs requires a voice ID. Pass it via the voice option.');
|
|
41
|
+
}
|
|
42
|
+
const providerOptions = options.providerOptions ?? {};
|
|
43
|
+
const { output_format, enable_logging, optimize_streaming_latency, ...bodyOptions } = providerOptions;
|
|
44
|
+
const body = {
|
|
45
|
+
...bodyOptions,
|
|
46
|
+
text: options.text,
|
|
47
|
+
model_id: options.modelId,
|
|
48
|
+
};
|
|
49
|
+
const queryParams = new URLSearchParams();
|
|
50
|
+
if (output_format != null)
|
|
51
|
+
queryParams.set('output_format', String(output_format));
|
|
52
|
+
if (enable_logging != null)
|
|
53
|
+
queryParams.set('enable_logging', String(enable_logging));
|
|
54
|
+
if (optimize_streaming_latency != null)
|
|
55
|
+
queryParams.set('optimize_streaming_latency', String(optimize_streaming_latency));
|
|
56
|
+
let url = `${this.baseURL}/v1/text-to-speech/${options.voice}`;
|
|
57
|
+
const queryString = queryParams.toString();
|
|
58
|
+
if (queryString) {
|
|
59
|
+
url += `?${queryString}`;
|
|
60
|
+
}
|
|
61
|
+
const response = await this.fetchFn(url, {
|
|
62
|
+
method: 'POST',
|
|
63
|
+
headers: {
|
|
64
|
+
'Content-Type': 'application/json',
|
|
65
|
+
'xi-api-key': resolveApiKey(this.apiKey, 'ELEVENLABS_API_KEY', 'ElevenLabs'),
|
|
66
|
+
...options.headers,
|
|
67
|
+
},
|
|
68
|
+
body: JSON.stringify(body),
|
|
69
|
+
signal: options.abortSignal,
|
|
70
|
+
});
|
|
71
|
+
await handleErrorResponse(response, `elevenlabs/${options.modelId}`);
|
|
72
|
+
const arrayBuffer = await response.arrayBuffer();
|
|
73
|
+
const mediaType = response.headers.get('content-type') ?? 'audio/mpeg';
|
|
74
|
+
const requestId = response.headers.get('request-id');
|
|
75
|
+
return {
|
|
76
|
+
audio: new Uint8Array(arrayBuffer),
|
|
77
|
+
mediaType,
|
|
78
|
+
providerMetadata: requestId ? { requestId } : undefined,
|
|
79
|
+
};
|
|
80
|
+
}
|
|
81
|
+
}
|
|
82
|
+
//# sourceMappingURL=elevenlabs-speech-model.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"elevenlabs-speech-model.js","sourceRoot":"","sources":["../../../src/providers/elevenlabs/elevenlabs-speech-model.ts"],"names":[],"mappings":"AACA,OAAO,EAAE,cAAc,EAAE,MAAM,iBAAiB,CAAC;AACjD,OAAO,EAAE,aAAa,EAAE,mBAAmB,EAAE,MAAM,yBAAyB,CAAC;AAS7E,MAAM,OAAO,wBAAwB;IAG1B,EAAE,GAAG,YAAY,CAAC;IAClB,YAAY,GAAG,wBAAwB,CAAC;IAEzC,MAAM,CAAU,YAAY,GAAG;QACrC,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,KAAK;QAC3D,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI;QACpD,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI;KAClD,CAAC;IAEH,MAAM,CAAU,oBAAoB,GAAG;QAC7C,GAAG,wBAAwB,CAAC,YAAY,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI;KAClD,CAAC;IAEH,MAAM,CAAU,YAAY,GAAG;QACrC,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI;QAC1D,KAAK,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,KAAK,EAAE,IAAI;QAC5D,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI;QAC1D,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI;QAC1D,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI;QAC1D,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI;QAC1D,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI;QAC1D,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI;KACd,CAAC;IAEF,MAAM,GAAG;QAChB,EAAE,EAAE,EAAE,WAAW,EAAE,SAAS,EAAE,wBAAwB,CAAC,YAAY,EAAE;QACrE,EAAE,EAAE,EAAE,wBAAwB,EAAE,SAAS,EAAE,wBAAwB,CAAC,YAAY,EAAE;QAClF,EAAE,EAAE,EAAE,mBAAmB,EAAE,SAAS,EAAE,wBAAwB,CAAC,oBAAoB,EAAE;QACrF,EAAE,EAAE,EAAE,iBAAiB,EAAE,SAAS,EAAE,CAAC,IAAI,CAAU,EAAE;KAC7C,CAAC;IAEM,MAAM,CAAqB;IAC3B,OAAO,CAAS;IAChB,OAAO,CAA0B;IAElD,YAAY,MAAsC;QAChD,IAAI,CAAC,MAAM,GAAG,MAAM,CAAC,MAAM,CAAC;QAC5B,IAAI,CAAC,OAAO,GAAG,MAAM,CAAC,OAAO,IAAI,2BAA2B,CAAC;QAC7D,IAAI,CAAC,OAAO,GAAG,MAAM,CAAC,KAAK,IAAI,UAAU,CAAC,KAAK,CAAC;IAClD,CAAC;IAED,KAAK,CAAC,QAAQ,CAAC,OAOd;QAKC,IAAI,CAAC,OAAO,CAAC,KAAK,EAAE,CAAC;YACnB,MAAM,IAAI,cAAc,CACtB,+DAA+D,CAChE,CAAC;QACJ,CAAC;QAED,MAAM,eAAe,GAAG,OAAO,CAAC,eAAe,IAAI,EAAE,CAAC;QACtD,MAAM,EAAE,aAAa,EAAE,cAAc,EAAE,0BAA0B,EAAE,GAAG,WAAW,EAAE,GAAG,eAA0C,CAAC;QAEjI,MAAM,IAAI,GAA4B;YACpC,GAAG,WAAW;YACd,IAAI,EAAE,OAAO,CAAC,IAAI;YAClB,QAAQ,EAAE,OAAO,CAAC,OAAO;SAC1B,CAAC;QAEF,MAAM,WAAW,GAAG,IAAI,eAAe,EAAE,CAAC;QAC1C,IAAI,aAAa,IAAI,IAAI;YAAE,WAAW,CAAC,GAAG,CAAC,eAAe,EAAE,MAAM,CAAC,aAAa,CAAC,CAAC,CAAC;QACnF,IAAI,cAAc,IAAI,IAAI;YAAE,WAAW,CAAC,GAAG,CAAC,gBAAgB,EAAE,MAAM,CAAC,cAAc,CAAC,CAAC,CAAC;QACtF,IAAI,0BAA0B,IAAI,IAAI;YAAE,WAAW,CAAC,GAAG,CAAC,4BAA4B,EAAE,MAAM,CAAC,0BAA0B,CAAC,CAAC,CAAC;QAE1H,IAAI,GAAG,GAAG,GAAG,IAAI,CAAC,OAAO,sBAAsB,OAAO,CAAC,KAAK,EAAE,CAAC;QAC/D,MAAM,WAAW,GAAG,WAAW,CAAC,QAAQ,EAAE,CAAC;QAC3C,IAAI,WAAW,EAAE,CAAC;YAChB,GAAG,IAAI,IAAI,WAAW,EAAE,CAAC;QAC3B,CAAC;QAED,MAAM,QAAQ,GAAG,MAAM,IAAI,CAAC,OAAO,CAAC,GAAG,EAAE;YACvC,MAAM,EAAE,MAAM;YACd,OAAO,EAAE;gBACP,cAAc,EAAE,kBAAkB;gBAClC,YAAY,EAAE,aAAa,CAAC,IAAI,CAAC,MAAM,EAAE,oBAAoB,EAAE,YAAY,CAAC;gBAC5E,GAAG,OAAO,CAAC,OAAO;aACnB;YACD,IAAI,EAAE,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC;YAC1B,MAAM,EAAE,OAAO,CAAC,WAAW;SAC5B,CAAC,CAAC;QAEH,MAAM,mBAAmB,CAAC,QAAQ,EAAE,cAAc,OAAO,CAAC,OAAO,EAAE,CAAC,CAAC;QAErE,MAAM,WAAW,GAAG,MAAM,QAAQ,CAAC,WAAW,EAAE,CAAC;QACjD,MAAM,SAAS,GAAG,QAAQ,CAAC,OAAO,CAAC,GAAG,CAAC,cAAc,CAAC,IAAI,YAAY,CAAC;QACvE,MAAM,SAAS,GAAG,QAAQ,CAAC,OAAO,CAAC,GAAG,CAAC,YAAY,CAAC,CAAC;QAErD,OAAO;YACL,KAAK,EAAE,IAAI,UAAU,CAAC,WAAW,CAAC;YAClC,SAAS;YACT,gBAAgB,EAAE,SAAS,CAAC,CAAC,CAAC,EAAE,SAAS,EAAE,CAAC,CAAC,CAAC,SAAS;SACxD,CAAC;IACJ,CAAC"}
|
|
@@ -0,0 +1,5 @@
|
|
|
1
|
+
export { createElevenLabs } from './elevenlabs-provider.js';
|
|
2
|
+
export { ElevenLabsSpeechProvider } from './elevenlabs-speech-model.js';
|
|
3
|
+
export type { ElevenLabsSpeechProviderConfig } from './elevenlabs-speech-model.js';
|
|
4
|
+
export type { ElevenLabsSpeechOptions } from './elevenlabs-options.js';
|
|
5
|
+
//# sourceMappingURL=index.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../../src/providers/elevenlabs/index.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,gBAAgB,EAAE,MAAM,0BAA0B,CAAC;AAC5D,OAAO,EAAE,wBAAwB,EAAE,MAAM,8BAA8B,CAAC;AACxE,YAAY,EAAE,8BAA8B,EAAE,MAAM,8BAA8B,CAAC;AACnF,YAAY,EAAE,uBAAuB,EAAE,MAAM,yBAAyB,CAAC"}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"index.js","sourceRoot":"","sources":["../../../src/providers/elevenlabs/index.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,gBAAgB,EAAE,MAAM,0BAA0B,CAAC;AAC5D,OAAO,EAAE,wBAAwB,EAAE,MAAM,8BAA8B,CAAC"}
|
|
@@ -0,0 +1,5 @@
|
|
|
1
|
+
export { createOpenAI } from './openai-provider.js';
|
|
2
|
+
export { OpenAISpeechProvider } from './openai-speech-model.js';
|
|
3
|
+
export type { OpenAISpeechProviderConfig } from './openai-speech-model.js';
|
|
4
|
+
export type { OpenAISpeechOptions } from './openai-options.js';
|
|
5
|
+
//# sourceMappingURL=index.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../../src/providers/openai/index.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,YAAY,EAAE,MAAM,sBAAsB,CAAC;AACpD,OAAO,EAAE,oBAAoB,EAAE,MAAM,0BAA0B,CAAC;AAChE,YAAY,EAAE,0BAA0B,EAAE,MAAM,0BAA0B,CAAC;AAC3E,YAAY,EAAE,mBAAmB,EAAE,MAAM,qBAAqB,CAAC"}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"index.js","sourceRoot":"","sources":["../../../src/providers/openai/index.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,YAAY,EAAE,MAAM,sBAAsB,CAAC;AACpD,OAAO,EAAE,oBAAoB,EAAE,MAAM,0BAA0B,CAAC"}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"openai-options.d.ts","sourceRoot":"","sources":["../../../src/providers/openai/openai-options.ts"],"names":[],"mappings":"AAAA,MAAM,MAAM,mBAAmB,GAAG,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC"}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"openai-options.js","sourceRoot":"","sources":["../../../src/providers/openai/openai-options.ts"],"names":[],"mappings":""}
|
|
@@ -0,0 +1,5 @@
|
|
|
1
|
+
import { type OpenAISpeechProviderConfig } from './openai-speech-model.js';
|
|
2
|
+
import type { ResolvedModel } from '../../speech-provider.js';
|
|
3
|
+
import type { OpenAISpeechOptions } from './openai-options.js';
|
|
4
|
+
export declare function createOpenAI(config?: OpenAISpeechProviderConfig): (modelId?: string) => ResolvedModel<OpenAISpeechOptions>;
|
|
5
|
+
//# sourceMappingURL=openai-provider.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"openai-provider.d.ts","sourceRoot":"","sources":["../../../src/providers/openai/openai-provider.ts"],"names":[],"mappings":"AAAA,OAAO,EAAwB,KAAK,0BAA0B,EAAE,MAAM,0BAA0B,CAAC;AACjG,OAAO,KAAK,EAAE,aAAa,EAAE,MAAM,0BAA0B,CAAC;AAC9D,OAAO,KAAK,EAAE,mBAAmB,EAAE,MAAM,qBAAqB,CAAC;AAE/D,wBAAgB,YAAY,CAAC,MAAM,GAAE,0BAA+B,IAG3C,UAAU,MAAM,KAAG,aAAa,CAAC,mBAAmB,CAAC,CAM7E"}
|
|
@@ -0,0 +1,11 @@
|
|
|
1
|
+
import { OpenAISpeechProvider } from './openai-speech-model.js';
|
|
2
|
+
export function createOpenAI(config = {}) {
|
|
3
|
+
const provider = new OpenAISpeechProvider(config);
|
|
4
|
+
return function openai(modelId) {
|
|
5
|
+
return {
|
|
6
|
+
provider,
|
|
7
|
+
modelId: modelId ?? provider.defaultModel,
|
|
8
|
+
};
|
|
9
|
+
};
|
|
10
|
+
}
|
|
11
|
+
//# sourceMappingURL=openai-provider.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"openai-provider.js","sourceRoot":"","sources":["../../../src/providers/openai/openai-provider.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,oBAAoB,EAAmC,MAAM,0BAA0B,CAAC;AAIjG,MAAM,UAAU,YAAY,CAAC,SAAqC,EAAE;IAClE,MAAM,QAAQ,GAAG,IAAI,oBAAoB,CAAC,MAAM,CAAC,CAAC;IAElD,OAAO,SAAS,MAAM,CAAC,OAAgB;QACrC,OAAO;YACL,QAAQ;YACR,OAAO,EAAE,OAAO,IAAI,QAAQ,CAAC,YAAY;SAC1C,CAAC;IACJ,CAAC,CAAC;AACJ,CAAC"}
|
|
@@ -0,0 +1,39 @@
|
|
|
1
|
+
import type { SpeechProvider } from '../../speech-provider.js';
|
|
2
|
+
import type { OpenAISpeechOptions } from './openai-options.js';
|
|
3
|
+
export interface OpenAISpeechProviderConfig {
|
|
4
|
+
apiKey?: string;
|
|
5
|
+
baseURL?: string;
|
|
6
|
+
fetch?: typeof globalThis.fetch;
|
|
7
|
+
}
|
|
8
|
+
export declare class OpenAISpeechProvider implements SpeechProvider<string, OpenAISpeechOptions> {
|
|
9
|
+
readonly id = "openai";
|
|
10
|
+
readonly defaultModel = "gpt-4o-mini-tts";
|
|
11
|
+
private static readonly LANGUAGES;
|
|
12
|
+
readonly models: readonly [{
|
|
13
|
+
readonly id: "gpt-4o-mini-tts";
|
|
14
|
+
readonly languages: readonly ["af", "ar", "bg", "bn", "bs", "ca", "cs", "cy", "da", "de", "el", "en", "es", "et", "fi", "fr", "gl", "gu", "he", "hi", "hr", "hu", "id", "is", "it", "ja", "jv", "ka", "kk", "km", "kn", "ko", "lo", "lt", "lv", "mk", "ml", "mn", "mr", "ms", "my", "ne", "nl", "no", "pa", "pl", "pt", "ro", "ru", "si", "sk", "sl", "so", "sq", "sr", "su", "sv", "sw", "ta", "te", "th", "tl", "tr", "uk", "ur", "vi", "zh"];
|
|
15
|
+
}, {
|
|
16
|
+
readonly id: "tts-1";
|
|
17
|
+
readonly languages: readonly ["af", "ar", "bg", "bn", "bs", "ca", "cs", "cy", "da", "de", "el", "en", "es", "et", "fi", "fr", "gl", "gu", "he", "hi", "hr", "hu", "id", "is", "it", "ja", "jv", "ka", "kk", "km", "kn", "ko", "lo", "lt", "lv", "mk", "ml", "mn", "mr", "ms", "my", "ne", "nl", "no", "pa", "pl", "pt", "ro", "ru", "si", "sk", "sl", "so", "sq", "sr", "su", "sv", "sw", "ta", "te", "th", "tl", "tr", "uk", "ur", "vi", "zh"];
|
|
18
|
+
}, {
|
|
19
|
+
readonly id: "tts-1-hd";
|
|
20
|
+
readonly languages: readonly ["af", "ar", "bg", "bn", "bs", "ca", "cs", "cy", "da", "de", "el", "en", "es", "et", "fi", "fr", "gl", "gu", "he", "hi", "hr", "hu", "id", "is", "it", "ja", "jv", "ka", "kk", "km", "kn", "ko", "lo", "lt", "lv", "mk", "ml", "mn", "mr", "ms", "my", "ne", "nl", "no", "pa", "pl", "pt", "ro", "ru", "si", "sk", "sl", "so", "sq", "sr", "su", "sv", "sw", "ta", "te", "th", "tl", "tr", "uk", "ur", "vi", "zh"];
|
|
21
|
+
}];
|
|
22
|
+
private readonly apiKey;
|
|
23
|
+
private readonly baseURL;
|
|
24
|
+
private readonly fetchFn;
|
|
25
|
+
constructor(config: OpenAISpeechProviderConfig);
|
|
26
|
+
generate(options: {
|
|
27
|
+
modelId: string;
|
|
28
|
+
text: string;
|
|
29
|
+
voice?: string;
|
|
30
|
+
providerOptions?: OpenAISpeechOptions;
|
|
31
|
+
abortSignal?: AbortSignal;
|
|
32
|
+
headers?: Record<string, string>;
|
|
33
|
+
}): Promise<{
|
|
34
|
+
audio: Uint8Array;
|
|
35
|
+
mediaType: string;
|
|
36
|
+
providerMetadata?: Record<string, unknown>;
|
|
37
|
+
}>;
|
|
38
|
+
}
|
|
39
|
+
//# sourceMappingURL=openai-speech-model.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"openai-speech-model.d.ts","sourceRoot":"","sources":["../../../src/providers/openai/openai-speech-model.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,cAAc,EAAE,MAAM,0BAA0B,CAAC;AAE/D,OAAO,KAAK,EAAE,mBAAmB,EAAE,MAAM,qBAAqB,CAAC;AAE/D,MAAM,WAAW,0BAA0B;IACzC,MAAM,CAAC,EAAE,MAAM,CAAC;IAChB,OAAO,CAAC,EAAE,MAAM,CAAC;IACjB,KAAK,CAAC,EAAE,OAAO,UAAU,CAAC,KAAK,CAAC;CACjC;AAED,qBAAa,oBAAqB,YAAW,cAAc,CAAC,MAAM,EAAE,mBAAmB,CAAC;IACtF,QAAQ,CAAC,EAAE,YAAY;IACvB,QAAQ,CAAC,YAAY,qBAAqB;IAE1C,OAAO,CAAC,MAAM,CAAC,QAAQ,CAAC,SAAS,CAQtB;IAEX,QAAQ,CAAC,MAAM;;;;;;;;;OAIJ;IAEX,OAAO,CAAC,QAAQ,CAAC,MAAM,CAAqB;IAC5C,OAAO,CAAC,QAAQ,CAAC,OAAO,CAAS;IACjC,OAAO,CAAC,QAAQ,CAAC,OAAO,CAA0B;gBAEtC,MAAM,EAAE,0BAA0B;IAMxC,QAAQ,CAAC,OAAO,EAAE;QACtB,OAAO,EAAE,MAAM,CAAC;QAChB,IAAI,EAAE,MAAM,CAAC;QACb,KAAK,CAAC,EAAE,MAAM,CAAC;QACf,eAAe,CAAC,EAAE,mBAAmB,CAAC;QACtC,WAAW,CAAC,EAAE,WAAW,CAAC;QAC1B,OAAO,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;KAClC,GAAG,OAAO,CAAC;QACV,KAAK,EAAE,UAAU,CAAC;QAClB,SAAS,EAAE,MAAM,CAAC;QAClB,gBAAgB,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;KAC5C,CAAC;CA+BH"}
|
|
@@ -0,0 +1,54 @@
|
|
|
1
|
+
import { resolveApiKey, handleErrorResponse } from '../../provider-utils.js';
|
|
2
|
+
export class OpenAISpeechProvider {
|
|
3
|
+
id = 'openai';
|
|
4
|
+
defaultModel = 'gpt-4o-mini-tts';
|
|
5
|
+
static LANGUAGES = [
|
|
6
|
+
'af', 'ar', 'bg', 'bn', 'bs', 'ca', 'cs', 'cy', 'da', 'de',
|
|
7
|
+
'el', 'en', 'es', 'et', 'fi', 'fr', 'gl', 'gu', 'he', 'hi',
|
|
8
|
+
'hr', 'hu', 'id', 'is', 'it', 'ja', 'jv', 'ka', 'kk', 'km',
|
|
9
|
+
'kn', 'ko', 'lo', 'lt', 'lv', 'mk', 'ml', 'mn', 'mr', 'ms',
|
|
10
|
+
'my', 'ne', 'nl', 'no', 'pa', 'pl', 'pt', 'ro', 'ru', 'si',
|
|
11
|
+
'sk', 'sl', 'so', 'sq', 'sr', 'su', 'sv', 'sw', 'ta', 'te',
|
|
12
|
+
'th', 'tl', 'tr', 'uk', 'ur', 'vi', 'zh',
|
|
13
|
+
];
|
|
14
|
+
models = [
|
|
15
|
+
{ id: 'gpt-4o-mini-tts', languages: OpenAISpeechProvider.LANGUAGES },
|
|
16
|
+
{ id: 'tts-1', languages: OpenAISpeechProvider.LANGUAGES },
|
|
17
|
+
{ id: 'tts-1-hd', languages: OpenAISpeechProvider.LANGUAGES },
|
|
18
|
+
];
|
|
19
|
+
apiKey;
|
|
20
|
+
baseURL;
|
|
21
|
+
fetchFn;
|
|
22
|
+
constructor(config) {
|
|
23
|
+
this.apiKey = config.apiKey;
|
|
24
|
+
this.baseURL = config.baseURL ?? 'https://api.openai.com/v1';
|
|
25
|
+
this.fetchFn = config.fetch ?? globalThis.fetch;
|
|
26
|
+
}
|
|
27
|
+
async generate(options) {
|
|
28
|
+
const body = {
|
|
29
|
+
...options.providerOptions,
|
|
30
|
+
model: options.modelId,
|
|
31
|
+
input: options.text,
|
|
32
|
+
voice: options.voice,
|
|
33
|
+
};
|
|
34
|
+
const url = `${this.baseURL}/audio/speech`;
|
|
35
|
+
const response = await this.fetchFn(url, {
|
|
36
|
+
method: 'POST',
|
|
37
|
+
headers: {
|
|
38
|
+
'Content-Type': 'application/json',
|
|
39
|
+
'Authorization': `Bearer ${resolveApiKey(this.apiKey, 'OPENAI_API_KEY', 'OpenAI')}`,
|
|
40
|
+
...options.headers,
|
|
41
|
+
},
|
|
42
|
+
body: JSON.stringify(body),
|
|
43
|
+
signal: options.abortSignal,
|
|
44
|
+
});
|
|
45
|
+
await handleErrorResponse(response, `openai/${options.modelId}`);
|
|
46
|
+
const arrayBuffer = await response.arrayBuffer();
|
|
47
|
+
const mediaType = response.headers.get('content-type') ?? 'audio/mpeg';
|
|
48
|
+
return {
|
|
49
|
+
audio: new Uint8Array(arrayBuffer),
|
|
50
|
+
mediaType,
|
|
51
|
+
};
|
|
52
|
+
}
|
|
53
|
+
}
|
|
54
|
+
//# sourceMappingURL=openai-speech-model.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"openai-speech-model.js","sourceRoot":"","sources":["../../../src/providers/openai/openai-speech-model.ts"],"names":[],"mappings":"AACA,OAAO,EAAE,aAAa,EAAE,mBAAmB,EAAE,MAAM,yBAAyB,CAAC;AAS7E,MAAM,OAAO,oBAAoB;IACtB,EAAE,GAAG,QAAQ,CAAC;IACd,YAAY,GAAG,iBAAiB,CAAC;IAElC,MAAM,CAAU,SAAS,GAAG;QAClC,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI;QAC1D,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI;QAC1D,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI;QAC1D,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI;QAC1D,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI;QAC1D,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI;QAC1D,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI;KAChC,CAAC;IAEF,MAAM,GAAG;QAChB,EAAE,EAAE,EAAE,iBAAiB,EAAE,SAAS,EAAE,oBAAoB,CAAC,SAAS,EAAE;QACpE,EAAE,EAAE,EAAE,OAAO,EAAE,SAAS,EAAE,oBAAoB,CAAC,SAAS,EAAE;QAC1D,EAAE,EAAE,EAAE,UAAU,EAAE,SAAS,EAAE,oBAAoB,CAAC,SAAS,EAAE;KACrD,CAAC;IAEM,MAAM,CAAqB;IAC3B,OAAO,CAAS;IAChB,OAAO,CAA0B;IAElD,YAAY,MAAkC;QAC5C,IAAI,CAAC,MAAM,GAAG,MAAM,CAAC,MAAM,CAAC;QAC5B,IAAI,CAAC,OAAO,GAAG,MAAM,CAAC,OAAO,IAAI,2BAA2B,CAAC;QAC7D,IAAI,CAAC,OAAO,GAAG,MAAM,CAAC,KAAK,IAAI,UAAU,CAAC,KAAK,CAAC;IAClD,CAAC;IAED,KAAK,CAAC,QAAQ,CAAC,OAOd;QAKC,MAAM,IAAI,GAA4B;YACpC,GAAG,OAAO,CAAC,eAAe;YAC1B,KAAK,EAAE,OAAO,CAAC,OAAO;YACtB,KAAK,EAAE,OAAO,CAAC,IAAI;YACnB,KAAK,EAAE,OAAO,CAAC,KAAK;SACrB,CAAC;QAEF,MAAM,GAAG,GAAG,GAAG,IAAI,CAAC,OAAO,eAAe,CAAC;QAE3C,MAAM,QAAQ,GAAG,MAAM,IAAI,CAAC,OAAO,CAAC,GAAG,EAAE;YACvC,MAAM,EAAE,MAAM;YACd,OAAO,EAAE;gBACP,cAAc,EAAE,kBAAkB;gBAClC,eAAe,EAAE,UAAU,aAAa,CAAC,IAAI,CAAC,MAAM,EAAE,gBAAgB,EAAE,QAAQ,CAAC,EAAE;gBACnF,GAAG,OAAO,CAAC,OAAO;aACnB;YACD,IAAI,EAAE,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC;YAC1B,MAAM,EAAE,OAAO,CAAC,WAAW;SAC5B,CAAC,CAAC;QAEH,MAAM,mBAAmB,CAAC,QAAQ,EAAE,UAAU,OAAO,CAAC,OAAO,EAAE,CAAC,CAAC;QAEjE,MAAM,WAAW,GAAG,MAAM,QAAQ,CAAC,WAAW,EAAE,CAAC;QACjD,MAAM,SAAS,GAAG,QAAQ,CAAC,OAAO,CAAC,GAAG,CAAC,cAAc,CAAC,IAAI,YAAY,CAAC;QAEvE,OAAO;YACL,KAAK,EAAE,IAAI,UAAU,CAAC,WAAW,CAAC;YAClC,SAAS;SACV,CAAC;IACJ,CAAC"}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"resolve-provider.d.ts","sourceRoot":"","sources":["../src/resolve-provider.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAkB,aAAa,EAAE,MAAM,sBAAsB,CAAC;AAyB1E,wBAAgB,YAAY,CAC1B,KAAK,EAAE,MAAM,GAAG,aAAa,GAC5B,aAAa,CAsBf"}
|
|
@@ -0,0 +1,41 @@
|
|
|
1
|
+
import { SpeechSDKError } from './errors.js';
|
|
2
|
+
import { OpenAISpeechProvider } from './providers/openai/openai-speech-model.js';
|
|
3
|
+
import { ElevenLabsSpeechProvider } from './providers/elevenlabs/elevenlabs-speech-model.js';
|
|
4
|
+
function isResolvedModel(model) {
|
|
5
|
+
return (model != null &&
|
|
6
|
+
typeof model === 'object' &&
|
|
7
|
+
'provider' in model &&
|
|
8
|
+
'modelId' in model);
|
|
9
|
+
}
|
|
10
|
+
function createBuiltinProvider(name) {
|
|
11
|
+
switch (name) {
|
|
12
|
+
case 'openai':
|
|
13
|
+
return new OpenAISpeechProvider({});
|
|
14
|
+
case 'elevenlabs':
|
|
15
|
+
return new ElevenLabsSpeechProvider({});
|
|
16
|
+
default:
|
|
17
|
+
throw new SpeechSDKError(`Unknown provider: ${name}`);
|
|
18
|
+
}
|
|
19
|
+
}
|
|
20
|
+
export function resolveModel(model) {
|
|
21
|
+
if (isResolvedModel(model)) {
|
|
22
|
+
return model;
|
|
23
|
+
}
|
|
24
|
+
const slashIndex = model.indexOf('/');
|
|
25
|
+
let providerName;
|
|
26
|
+
let modelId;
|
|
27
|
+
if (slashIndex !== -1) {
|
|
28
|
+
providerName = model.slice(0, slashIndex);
|
|
29
|
+
modelId = model.slice(slashIndex + 1);
|
|
30
|
+
}
|
|
31
|
+
else {
|
|
32
|
+
providerName = model;
|
|
33
|
+
modelId = undefined;
|
|
34
|
+
}
|
|
35
|
+
const provider = createBuiltinProvider(providerName);
|
|
36
|
+
return {
|
|
37
|
+
provider,
|
|
38
|
+
modelId: modelId || provider.defaultModel,
|
|
39
|
+
};
|
|
40
|
+
}
|
|
41
|
+
//# sourceMappingURL=resolve-provider.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"resolve-provider.js","sourceRoot":"","sources":["../src/resolve-provider.ts"],"names":[],"mappings":"AACA,OAAO,EAAE,cAAc,EAAE,MAAM,aAAa,CAAC;AAC7C,OAAO,EAAE,oBAAoB,EAAE,MAAM,2CAA2C,CAAC;AACjF,OAAO,EAAE,wBAAwB,EAAE,MAAM,mDAAmD,CAAC;AAE7F,SAAS,eAAe,CAAC,KAAc;IACrC,OAAO,CACL,KAAK,IAAI,IAAI;QACb,OAAO,KAAK,KAAK,QAAQ;QACzB,UAAU,IAAI,KAAK;QACnB,SAAS,IAAI,KAAK,CACnB,CAAC;AACJ,CAAC;AAED,SAAS,qBAAqB,CAAC,IAAY;IACzC,QAAQ,IAAI,EAAE,CAAC;QACb,KAAK,QAAQ;YACX,OAAO,IAAI,oBAAoB,CAAC,EAAE,CAAC,CAAC;QACtC,KAAK,YAAY;YACf,OAAO,IAAI,wBAAwB,CAAC,EAAE,CAAC,CAAC;QAC1C;YACE,MAAM,IAAI,cAAc,CAAC,qBAAqB,IAAI,EAAE,CAAC,CAAC;IAC1D,CAAC;AACH,CAAC;AAED,MAAM,UAAU,YAAY,CAC1B,KAA6B;IAE7B,IAAI,eAAe,CAAC,KAAK,CAAC,EAAE,CAAC;QAC3B,OAAO,KAAK,CAAC;IACf,CAAC;IAED,MAAM,UAAU,GAAG,KAAK,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC;IACtC,IAAI,YAAoB,CAAC;IACzB,IAAI,OAA2B,CAAC;IAEhC,IAAI,UAAU,KAAK,CAAC,CAAC,EAAE,CAAC;QACtB,YAAY,GAAG,KAAK,CAAC,KAAK,CAAC,CAAC,EAAE,UAAU,CAAC,CAAC;QAC1C,OAAO,GAAG,KAAK,CAAC,KAAK,CAAC,UAAU,GAAG,CAAC,CAAC,CAAC;IACxC,CAAC;SAAM,CAAC;QACN,YAAY,GAAG,KAAK,CAAC;QACrB,OAAO,GAAG,SAAS,CAAC;IACtB,CAAC;IAED,MAAM,QAAQ,GAAG,qBAAqB,CAAC,YAAY,CAAC,CAAC;IACrD,OAAO;QACL,QAAQ;QACR,OAAO,EAAE,OAAO,IAAI,QAAQ,CAAC,YAAY;KAC1C,CAAC;AACJ,CAAC"}
|
|
@@ -0,0 +1,26 @@
|
|
|
1
|
+
export interface ModelInfo {
|
|
2
|
+
id: string;
|
|
3
|
+
languages: readonly string[];
|
|
4
|
+
}
|
|
5
|
+
export interface SpeechProvider<TModel extends string = string, TOptions extends Record<string, unknown> = Record<string, unknown>> {
|
|
6
|
+
id: string;
|
|
7
|
+
defaultModel: TModel;
|
|
8
|
+
models: readonly ModelInfo[];
|
|
9
|
+
generate(options: {
|
|
10
|
+
modelId: string;
|
|
11
|
+
text: string;
|
|
12
|
+
voice?: string;
|
|
13
|
+
providerOptions?: TOptions;
|
|
14
|
+
abortSignal?: AbortSignal;
|
|
15
|
+
headers?: Record<string, string>;
|
|
16
|
+
}): Promise<{
|
|
17
|
+
audio: string | Uint8Array;
|
|
18
|
+
mediaType: string;
|
|
19
|
+
providerMetadata?: Record<string, unknown>;
|
|
20
|
+
}>;
|
|
21
|
+
}
|
|
22
|
+
export interface ResolvedModel<TOptions extends Record<string, unknown> = Record<string, unknown>> {
|
|
23
|
+
provider: SpeechProvider<string, TOptions>;
|
|
24
|
+
modelId: string;
|
|
25
|
+
}
|
|
26
|
+
//# sourceMappingURL=speech-provider.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"speech-provider.d.ts","sourceRoot":"","sources":["../src/speech-provider.ts"],"names":[],"mappings":"AAAA,MAAM,WAAW,SAAS;IACxB,EAAE,EAAE,MAAM,CAAC;IACX,SAAS,EAAE,SAAS,MAAM,EAAE,CAAC;CAC9B;AAED,MAAM,WAAW,cAAc,CAC7B,MAAM,SAAS,MAAM,GAAG,MAAM,EAC9B,QAAQ,SAAS,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,GAAG,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC;IAElE,EAAE,EAAE,MAAM,CAAC;IACX,YAAY,EAAE,MAAM,CAAC;IACrB,MAAM,EAAE,SAAS,SAAS,EAAE,CAAC;IAE7B,QAAQ,CAAC,OAAO,EAAE;QAChB,OAAO,EAAE,MAAM,CAAC;QAChB,IAAI,EAAE,MAAM,CAAC;QACb,KAAK,CAAC,EAAE,MAAM,CAAC;QACf,eAAe,CAAC,EAAE,QAAQ,CAAC;QAC3B,WAAW,CAAC,EAAE,WAAW,CAAC;QAC1B,OAAO,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;KAClC,GAAG,OAAO,CAAC;QACV,KAAK,EAAE,MAAM,GAAG,UAAU,CAAC;QAC3B,SAAS,EAAE,MAAM,CAAC;QAClB,gBAAgB,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;KAC5C,CAAC,CAAC;CACJ;AAED,MAAM,WAAW,aAAa,CAC5B,QAAQ,SAAS,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,GAAG,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC;IAElE,QAAQ,EAAE,cAAc,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAC;IAC3C,OAAO,EAAE,MAAM,CAAC;CACjB"}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"speech-provider.js","sourceRoot":"","sources":["../src/speech-provider.ts"],"names":[],"mappings":""}
|
|
@@ -0,0 +1,22 @@
|
|
|
1
|
+
export interface GeneratedAudioFile {
|
|
2
|
+
readonly uint8Array: Uint8Array;
|
|
3
|
+
readonly base64: string;
|
|
4
|
+
readonly mediaType: string;
|
|
5
|
+
}
|
|
6
|
+
export interface SpeechResult {
|
|
7
|
+
readonly audio: GeneratedAudioFile;
|
|
8
|
+
readonly providerMetadata?: Record<string, unknown>;
|
|
9
|
+
}
|
|
10
|
+
export declare class DefaultGeneratedAudioFile implements GeneratedAudioFile {
|
|
11
|
+
readonly mediaType: string;
|
|
12
|
+
private _data;
|
|
13
|
+
private _uint8Array?;
|
|
14
|
+
private _base64?;
|
|
15
|
+
constructor({ data, mediaType }: {
|
|
16
|
+
data: string | Uint8Array;
|
|
17
|
+
mediaType: string;
|
|
18
|
+
});
|
|
19
|
+
get uint8Array(): Uint8Array;
|
|
20
|
+
get base64(): string;
|
|
21
|
+
}
|
|
22
|
+
//# sourceMappingURL=speech-result.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"speech-result.d.ts","sourceRoot":"","sources":["../src/speech-result.ts"],"names":[],"mappings":"AAAA,MAAM,WAAW,kBAAkB;IACjC,QAAQ,CAAC,UAAU,EAAE,UAAU,CAAC;IAChC,QAAQ,CAAC,MAAM,EAAE,MAAM,CAAC;IACxB,QAAQ,CAAC,SAAS,EAAE,MAAM,CAAC;CAC5B;AAED,MAAM,WAAW,YAAY;IAC3B,QAAQ,CAAC,KAAK,EAAE,kBAAkB,CAAC;IACnC,QAAQ,CAAC,gBAAgB,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;CACrD;AAED,qBAAa,yBAA0B,YAAW,kBAAkB;IAClE,QAAQ,CAAC,SAAS,EAAE,MAAM,CAAC;IAE3B,OAAO,CAAC,KAAK,CAAsB;IACnC,OAAO,CAAC,WAAW,CAAC,CAAa;IACjC,OAAO,CAAC,OAAO,CAAC,CAAS;gBAEb,EAAE,IAAI,EAAE,SAAS,EAAE,EAAE;QAAE,IAAI,EAAE,MAAM,GAAG,UAAU,CAAC;QAAC,SAAS,EAAE,MAAM,CAAA;KAAE;IAKjF,IAAI,UAAU,IAAI,UAAU,CAa3B;IAED,IAAI,MAAM,IAAI,MAAM,CAYnB;CACF"}
|
|
@@ -0,0 +1,42 @@
|
|
|
1
|
+
export class DefaultGeneratedAudioFile {
|
|
2
|
+
mediaType;
|
|
3
|
+
_data;
|
|
4
|
+
_uint8Array;
|
|
5
|
+
_base64;
|
|
6
|
+
constructor({ data, mediaType }) {
|
|
7
|
+
this._data = data;
|
|
8
|
+
this.mediaType = mediaType;
|
|
9
|
+
}
|
|
10
|
+
get uint8Array() {
|
|
11
|
+
if (this._uint8Array != null)
|
|
12
|
+
return this._uint8Array;
|
|
13
|
+
if (this._data instanceof Uint8Array) {
|
|
14
|
+
this._uint8Array = this._data;
|
|
15
|
+
}
|
|
16
|
+
else {
|
|
17
|
+
const binaryString = atob(this._data);
|
|
18
|
+
const bytes = new Uint8Array(binaryString.length);
|
|
19
|
+
for (let i = 0; i < binaryString.length; i++) {
|
|
20
|
+
bytes[i] = binaryString.charCodeAt(i);
|
|
21
|
+
}
|
|
22
|
+
this._uint8Array = bytes;
|
|
23
|
+
}
|
|
24
|
+
return this._uint8Array;
|
|
25
|
+
}
|
|
26
|
+
get base64() {
|
|
27
|
+
if (this._base64 != null)
|
|
28
|
+
return this._base64;
|
|
29
|
+
if (typeof this._data === 'string') {
|
|
30
|
+
this._base64 = this._data;
|
|
31
|
+
}
|
|
32
|
+
else {
|
|
33
|
+
let binaryString = '';
|
|
34
|
+
for (let i = 0; i < this._data.length; i++) {
|
|
35
|
+
binaryString += String.fromCharCode(this._data[i]);
|
|
36
|
+
}
|
|
37
|
+
this._base64 = btoa(binaryString);
|
|
38
|
+
}
|
|
39
|
+
return this._base64;
|
|
40
|
+
}
|
|
41
|
+
}
|
|
42
|
+
//# sourceMappingURL=speech-result.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"speech-result.js","sourceRoot":"","sources":["../src/speech-result.ts"],"names":[],"mappings":"AAWA,MAAM,OAAO,yBAAyB;IAC3B,SAAS,CAAS;IAEnB,KAAK,CAAsB;IAC3B,WAAW,CAAc;IACzB,OAAO,CAAU;IAEzB,YAAY,EAAE,IAAI,EAAE,SAAS,EAAoD;QAC/E,IAAI,CAAC,KAAK,GAAG,IAAI,CAAC;QAClB,IAAI,CAAC,SAAS,GAAG,SAAS,CAAC;IAC7B,CAAC;IAED,IAAI,UAAU;QACZ,IAAI,IAAI,CAAC,WAAW,IAAI,IAAI;YAAE,OAAO,IAAI,CAAC,WAAW,CAAC;QACtD,IAAI,IAAI,CAAC,KAAK,YAAY,UAAU,EAAE,CAAC;YACrC,IAAI,CAAC,WAAW,GAAG,IAAI,CAAC,KAAK,CAAC;QAChC,CAAC;aAAM,CAAC;YACN,MAAM,YAAY,GAAG,IAAI,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC;YACtC,MAAM,KAAK,GAAG,IAAI,UAAU,CAAC,YAAY,CAAC,MAAM,CAAC,CAAC;YAClD,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,YAAY,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE,CAAC;gBAC7C,KAAK,CAAC,CAAC,CAAC,GAAG,YAAY,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC;YACxC,CAAC;YACD,IAAI,CAAC,WAAW,GAAG,KAAK,CAAC;QAC3B,CAAC;QACD,OAAO,IAAI,CAAC,WAAW,CAAC;IAC1B,CAAC;IAED,IAAI,MAAM;QACR,IAAI,IAAI,CAAC,OAAO,IAAI,IAAI;YAAE,OAAO,IAAI,CAAC,OAAO,CAAC;QAC9C,IAAI,OAAO,IAAI,CAAC,KAAK,KAAK,QAAQ,EAAE,CAAC;YACnC,IAAI,CAAC,OAAO,GAAG,IAAI,CAAC,KAAK,CAAC;QAC5B,CAAC;aAAM,CAAC;YACN,IAAI,YAAY,GAAG,EAAE,CAAC;YACtB,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,IAAI,CAAC,KAAK,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE,CAAC;gBAC3C,YAAY,IAAI,MAAM,CAAC,YAAY,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC;YACrD,CAAC;YACD,IAAI,CAAC,OAAO,GAAG,IAAI,CAAC,YAAY,CAAC,CAAC;QACpC,CAAC;QACD,OAAO,IAAI,CAAC,OAAO,CAAC;IACtB,CAAC;CACF"}
|
package/dist/types.d.ts
ADDED
|
@@ -0,0 +1,11 @@
|
|
|
1
|
+
import type { ResolvedModel } from './speech-provider.js';
|
|
2
|
+
export type GenerateSpeechOptions<T extends Record<string, unknown> = Record<string, unknown>> = {
|
|
3
|
+
model: string | ResolvedModel<T>;
|
|
4
|
+
text: string;
|
|
5
|
+
voice: string;
|
|
6
|
+
providerOptions?: T;
|
|
7
|
+
maxRetries?: number;
|
|
8
|
+
abortSignal?: AbortSignal;
|
|
9
|
+
headers?: Record<string, string>;
|
|
10
|
+
};
|
|
11
|
+
//# sourceMappingURL=types.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"types.d.ts","sourceRoot":"","sources":["../src/types.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,aAAa,EAAE,MAAM,sBAAsB,CAAC;AAE1D,MAAM,MAAM,qBAAqB,CAAC,CAAC,SAAS,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,GAAG,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,IAAI;IAC/F,KAAK,EAAE,MAAM,GAAG,aAAa,CAAC,CAAC,CAAC,CAAC;IACjC,IAAI,EAAE,MAAM,CAAC;IACb,KAAK,EAAE,MAAM,CAAC;IACd,eAAe,CAAC,EAAE,CAAC,CAAC;IACpB,UAAU,CAAC,EAAE,MAAM,CAAC;IACpB,WAAW,CAAC,EAAE,WAAW,CAAC;IAC1B,OAAO,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;CAClC,CAAC"}
|
package/dist/types.js
ADDED
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"types.js","sourceRoot":"","sources":["../src/types.ts"],"names":[],"mappings":""}
|
package/package.json
ADDED
|
@@ -0,0 +1,48 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "@jellypod/speech-sdk",
|
|
3
|
+
"version": "0.0.1",
|
|
4
|
+
"description": "A TypeScript SDK for text-to-speech with multiple provider support",
|
|
5
|
+
"type": "module",
|
|
6
|
+
"main": "./dist/index.js",
|
|
7
|
+
"types": "./dist/index.d.ts",
|
|
8
|
+
"exports": {
|
|
9
|
+
".": {
|
|
10
|
+
"types": "./dist/index.d.ts",
|
|
11
|
+
"default": "./dist/index.js"
|
|
12
|
+
},
|
|
13
|
+
"./openai": {
|
|
14
|
+
"types": "./dist/providers/openai/index.d.ts",
|
|
15
|
+
"default": "./dist/providers/openai/index.js"
|
|
16
|
+
},
|
|
17
|
+
"./elevenlabs": {
|
|
18
|
+
"types": "./dist/providers/elevenlabs/index.d.ts",
|
|
19
|
+
"default": "./dist/providers/elevenlabs/index.js"
|
|
20
|
+
}
|
|
21
|
+
},
|
|
22
|
+
"files": [
|
|
23
|
+
"dist"
|
|
24
|
+
],
|
|
25
|
+
"scripts": {
|
|
26
|
+
"build": "tsc",
|
|
27
|
+
"test": "vitest run",
|
|
28
|
+
"test:watch": "vitest",
|
|
29
|
+
"typecheck": "tsc --noEmit"
|
|
30
|
+
},
|
|
31
|
+
"keywords": [
|
|
32
|
+
"tts",
|
|
33
|
+
"text-to-speech",
|
|
34
|
+
"speech",
|
|
35
|
+
"openai",
|
|
36
|
+
"elevenlabs",
|
|
37
|
+
"ai"
|
|
38
|
+
],
|
|
39
|
+
"license": "MIT",
|
|
40
|
+
"dependencies": {
|
|
41
|
+
"p-retry": "^8.0.0"
|
|
42
|
+
},
|
|
43
|
+
"devDependencies": {
|
|
44
|
+
"@types/node": "^25.5.0",
|
|
45
|
+
"typescript": "^5.8.0",
|
|
46
|
+
"vitest": "^3.1.0"
|
|
47
|
+
}
|
|
48
|
+
}
|