@e9g/buffered-audio-nodes-core 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,285 @@
1
+ import { z } from 'zod';
2
+ export { ZodType as ModuleSchema } from 'zod';
3
+ import { EventEmitter } from 'node:events';
4
+
5
+ interface StreamEventMap {
6
+ started: [];
7
+ finished: [];
8
+ progress: [{
9
+ framesProcessed: number;
10
+ sourceTotalFrames?: number;
11
+ }];
12
+ }
13
+ declare abstract class BufferedStream<P extends BufferedAudioNodeProperties = BufferedAudioNodeProperties> {
14
+ readonly properties: P;
15
+ readonly events: EventEmitter<StreamEventMap>;
16
+ constructor(properties: P);
17
+ _teardown(): Promise<void> | void;
18
+ }
19
+
20
+ interface AudioChunk {
21
+ readonly samples: Array<Float32Array>;
22
+ readonly offset: number;
23
+ readonly sampleRate: number;
24
+ readonly bitDepth: number;
25
+ }
26
+ type ExecutionProvider = "gpu" | "cpu-native" | "cpu";
27
+ interface StreamContext {
28
+ readonly executionProviders: ReadonlyArray<ExecutionProvider>;
29
+ readonly memoryLimit: number;
30
+ readonly durationFrames?: number;
31
+ readonly highWaterMark: number;
32
+ readonly signal?: AbortSignal;
33
+ readonly visited: Set<BufferedAudioNode>;
34
+ }
35
+ interface RenderOptions {
36
+ readonly chunkSize?: number;
37
+ readonly highWaterMark?: number;
38
+ readonly memoryLimit?: number;
39
+ readonly signal?: AbortSignal;
40
+ readonly executionProviders?: ReadonlyArray<ExecutionProvider>;
41
+ }
42
+ interface BufferedAudioNodeProperties {
43
+ readonly id?: string;
44
+ readonly bypass?: boolean;
45
+ readonly previousProperties?: BufferedAudioNodeProperties;
46
+ readonly bufferSize?: number;
47
+ readonly latency?: number;
48
+ readonly children?: ReadonlyArray<BufferedAudioNode>;
49
+ }
50
+ type BufferedAudioNodeInput<P extends BufferedAudioNodeProperties = BufferedAudioNodeProperties> = P;
51
+ declare abstract class BufferedAudioNode<P extends BufferedAudioNodeProperties = BufferedAudioNodeProperties> {
52
+ static readonly packageName: string;
53
+ static readonly moduleName: string;
54
+ static readonly moduleDescription: string;
55
+ static readonly schema: z.ZodType;
56
+ abstract readonly type: ReadonlyArray<string>;
57
+ static is(value: unknown): value is BufferedAudioNode;
58
+ properties: P;
59
+ get id(): string | undefined;
60
+ get bufferSize(): number;
61
+ get latency(): number;
62
+ get isBypassed(): boolean;
63
+ get children(): ReadonlyArray<BufferedAudioNode>;
64
+ readonly streams: Array<BufferedStream>;
65
+ constructor(properties?: P);
66
+ abstract clone(overrides?: Partial<BufferedAudioNodeProperties>): BufferedAudioNode;
67
+ teardown(): Promise<void>;
68
+ _teardown(): Promise<void> | void;
69
+ }
70
+
71
+ interface FileInputMeta {
72
+ readonly input: "file" | "folder";
73
+ readonly mode?: "open" | "save";
74
+ readonly accept?: string;
75
+ readonly binary?: string;
76
+ readonly download?: string;
77
+ }
78
+
79
+ type BufferStorage = "memory" | "file";
80
+ declare abstract class ChunkBuffer {
81
+ protected _frames: number;
82
+ protected _channels: number;
83
+ private _sampleRate?;
84
+ private _bitDepth?;
85
+ get frames(): number;
86
+ get channels(): number;
87
+ get sampleRate(): number | undefined;
88
+ get bitDepth(): number | undefined;
89
+ setSampleRate(rate: number): void;
90
+ setBitDepth(depth: number): void;
91
+ protected validateAndSetMetadata(sampleRate?: number, bitDepth?: number): void;
92
+ protected buildAudioChunk(samples: Array<Float32Array>, offset: number): AudioChunk;
93
+ abstract append(samples: Array<Float32Array>, sampleRate?: number, bitDepth?: number): Promise<void>;
94
+ abstract read(offset: number, frames: number): Promise<AudioChunk>;
95
+ abstract write(offset: number, samples: Array<Float32Array>): Promise<void>;
96
+ abstract truncate(frames: number): Promise<void>;
97
+ abstract iterate(chunkSize: number): AsyncGenerator<AudioChunk>;
98
+ abstract reset(): Promise<void>;
99
+ abstract close(): Promise<void>;
100
+ }
101
+
102
+ declare class FileChunkBuffer extends ChunkBuffer {
103
+ private tempPath?;
104
+ private tempHandle?;
105
+ private fileFramesWritten;
106
+ private fileChannels;
107
+ private memoryBuffer;
108
+ private readonly storageThreshold;
109
+ private readonly initialBufferSize;
110
+ private readonly initialChannels;
111
+ private flushed;
112
+ constructor(bufferSize: number, channels: number, memoryLimit?: number);
113
+ private syncMetadata;
114
+ get filePath(): string | undefined;
115
+ private ensureFileHandle;
116
+ private fileOffset;
117
+ private flushToFile;
118
+ append(samples: Array<Float32Array>, sampleRate?: number, bitDepth?: number): Promise<void>;
119
+ private appendFile;
120
+ read(offset: number, frames: number): Promise<AudioChunk>;
121
+ private readFile;
122
+ write(offset: number, samples: Array<Float32Array>): Promise<void>;
123
+ private writeFile;
124
+ truncate(frames: number): Promise<void>;
125
+ iterate(chunkSize: number): AsyncGenerator<AudioChunk>;
126
+ reset(): Promise<void>;
127
+ close(): Promise<void>;
128
+ }
129
+
130
+ declare class MemoryChunkBuffer extends ChunkBuffer {
131
+ private memoryChannels;
132
+ private memoryWriteOffset;
133
+ constructor(bufferSize: number, channels: number);
134
+ append(samples: Array<Float32Array>, sampleRate?: number, bitDepth?: number): Promise<void>;
135
+ read(offset: number, frames: number): Promise<AudioChunk>;
136
+ write(offset: number, samples: Array<Float32Array>): Promise<void>;
137
+ truncate(frames: number): Promise<void>;
138
+ iterate(chunkSize: number): AsyncGenerator<AudioChunk>;
139
+ reset(): Promise<void>;
140
+ close(): Promise<void>;
141
+ private appendMemory;
142
+ private readMemory;
143
+ private writeMemory;
144
+ }
145
+
146
+ interface SourceMetadata {
147
+ readonly sampleRate: number;
148
+ readonly channels: number;
149
+ readonly durationFrames?: number;
150
+ }
151
+ interface RenderTiming {
152
+ readonly totalMs: number;
153
+ readonly audioDurationMs: number;
154
+ readonly realTimeMultiplier: number;
155
+ }
156
+ interface SourceNodeProperties extends BufferedAudioNodeProperties {
157
+ }
158
+ declare abstract class BufferedSourceStream<P extends SourceNodeProperties = SourceNodeProperties> extends BufferedStream<P> {
159
+ private framesRead;
160
+ abstract getMetadata(): Promise<SourceMetadata>;
161
+ abstract _read(): Promise<AudioChunk | undefined>;
162
+ abstract _flush(): Promise<void>;
163
+ setup(context: StreamContext): Promise<ReadableStream<AudioChunk>>;
164
+ _setup(context: StreamContext): Promise<ReadableStream<AudioChunk>>;
165
+ }
166
+ declare abstract class SourceNode<P extends SourceNodeProperties = SourceNodeProperties> extends BufferedAudioNode<P> {
167
+ static is(value: unknown): value is SourceNode;
168
+ to(child: BufferedAudioNode): void;
169
+ private renderTimingData?;
170
+ get renderTiming(): RenderTiming | undefined;
171
+ protected abstract createStream(): BufferedSourceStream<P>;
172
+ getMetadata(): Promise<SourceMetadata>;
173
+ setup(context: StreamContext): Promise<void>;
174
+ private setupChildren;
175
+ render(options?: RenderOptions): Promise<void>;
176
+ }
177
+
178
+ interface TargetNodeProperties extends BufferedAudioNodeProperties {
179
+ }
180
+ declare abstract class BufferedTargetStream<P extends TargetNodeProperties = TargetNodeProperties> extends BufferedStream<P> {
181
+ private hasStarted;
182
+ private framesWritten;
183
+ private sourceTotalFrames?;
184
+ abstract _write(chunk: AudioChunk): Promise<void>;
185
+ abstract _close(): Promise<void>;
186
+ setup(readable: ReadableStream<AudioChunk>, context: StreamContext): Promise<void>;
187
+ _setup(input: ReadableStream<AudioChunk>, _context: StreamContext): Promise<void>;
188
+ private createWritableStream;
189
+ }
190
+ declare abstract class TargetNode<P extends TargetNodeProperties = TargetNodeProperties> extends BufferedAudioNode<P> {
191
+ static is(value: unknown): value is TargetNode;
192
+ abstract createStream(): BufferedTargetStream<P>;
193
+ setup(readable: ReadableStream<AudioChunk>, context: StreamContext): Promise<Array<Promise<void>>>;
194
+ }
195
+
196
+ declare const WHOLE_FILE: number;
197
+ interface TransformNodeProperties extends BufferedAudioNodeProperties {
198
+ readonly overlap?: number;
199
+ readonly streamChunkSize?: number;
200
+ }
201
+ declare class BufferedTransformStream<P extends TransformNodeProperties = TransformNodeProperties> extends BufferedStream<P> {
202
+ bufferSize: number;
203
+ readonly overlap: number;
204
+ processingMs: number;
205
+ framesProcessed: number;
206
+ private chunkBuffer?;
207
+ private bufferOffset;
208
+ private inferredChunkSize?;
209
+ private hasStarted;
210
+ protected streamChunkSize?: number;
211
+ private sourceTotalFrames?;
212
+ private memoryLimit?;
213
+ constructor(properties: P);
214
+ protected get sampleRate(): number | undefined;
215
+ protected get bitDepth(): number | undefined;
216
+ private get outputChunkSize();
217
+ setup(input: ReadableStream<AudioChunk>, context: StreamContext): Promise<ReadableStream<AudioChunk>>;
218
+ _setup(input: ReadableStream<AudioChunk>, _context: StreamContext): Promise<ReadableStream<AudioChunk>>;
219
+ createTransformStream(): TransformStream<AudioChunk, AudioChunk>;
220
+ private handleTransform;
221
+ private handleFlush;
222
+ private processAndEmit;
223
+ private emitBuffer;
224
+ _buffer(chunk: AudioChunk, buffer: ChunkBuffer): Promise<void> | void;
225
+ _process(_buffer: ChunkBuffer): Promise<void> | void;
226
+ _unbuffer(chunk: AudioChunk): Promise<AudioChunk | undefined> | AudioChunk | undefined;
227
+ }
228
+ declare abstract class TransformNode<P extends TransformNodeProperties = TransformNodeProperties> extends BufferedAudioNode<P> {
229
+ static is(value: unknown): value is TransformNode;
230
+ to(child: BufferedAudioNode): void;
231
+ abstract createStream(): BufferedTransformStream;
232
+ setup(readable: ReadableStream<AudioChunk>, context: StreamContext): Promise<Array<Promise<void>>>;
233
+ private setupChildren;
234
+ }
235
+
236
+ declare abstract class CompositeNode extends TransformNode {
237
+ abstract get head(): BufferedAudioNode;
238
+ abstract get tail(): BufferedAudioNode;
239
+ to(child: BufferedAudioNode): void;
240
+ get children(): ReadonlyArray<BufferedAudioNode>;
241
+ setup(readable: ReadableStream<AudioChunk>, context: StreamContext): Promise<Array<Promise<void>>>;
242
+ render(options?: RenderOptions): Promise<void>;
243
+ }
244
+
245
+ declare const graphNodeSchema: z.ZodObject<{
246
+ id: z.ZodString;
247
+ packageName: z.ZodString;
248
+ nodeName: z.ZodString;
249
+ parameters: z.ZodOptional<z.ZodRecord<z.ZodString, z.ZodUnknown>>;
250
+ options: z.ZodOptional<z.ZodObject<{
251
+ bypass: z.ZodOptional<z.ZodBoolean>;
252
+ }, z.core.$strip>>;
253
+ }, z.core.$strip>;
254
+ declare const graphEdgeSchema: z.ZodObject<{
255
+ from: z.ZodString;
256
+ to: z.ZodString;
257
+ }, z.core.$strip>;
258
+ declare const graphDefinitionSchema: z.ZodObject<{
259
+ name: z.ZodDefault<z.ZodString>;
260
+ nodes: z.ZodArray<z.ZodObject<{
261
+ id: z.ZodString;
262
+ packageName: z.ZodString;
263
+ nodeName: z.ZodString;
264
+ parameters: z.ZodOptional<z.ZodRecord<z.ZodString, z.ZodUnknown>>;
265
+ options: z.ZodOptional<z.ZodObject<{
266
+ bypass: z.ZodOptional<z.ZodBoolean>;
267
+ }, z.core.$strip>>;
268
+ }, z.core.$strip>>;
269
+ edges: z.ZodArray<z.ZodObject<{
270
+ from: z.ZodString;
271
+ to: z.ZodString;
272
+ }, z.core.$strip>>;
273
+ }, z.core.$strip>;
274
+ type GraphNode = z.infer<typeof graphNodeSchema>;
275
+ type GraphEdge = z.infer<typeof graphEdgeSchema>;
276
+ type GraphDefinition = z.infer<typeof graphDefinitionSchema>;
277
+ type NodeRegistry = Map<string, Map<string, new (options?: Record<string, unknown>) => BufferedAudioNode>>;
278
+ declare function validateGraphDefinition(json: unknown): GraphDefinition;
279
+ declare function pack(sources: ReadonlyArray<SourceNode>, name?: string): GraphDefinition;
280
+ declare function unpack(definition: GraphDefinition, registry: NodeRegistry): Array<SourceNode>;
281
+ declare function renderGraph(definition: GraphDefinition, registry: NodeRegistry, options?: RenderOptions): Promise<void>;
282
+
283
+ declare function teeReadable<T>(readable: ReadableStream<AudioChunk>, items: ReadonlyArray<T>): Array<[ReadableStream<AudioChunk>, T]>;
284
+
285
+ export { type AudioChunk, type BufferStorage, BufferedAudioNode, type BufferedAudioNodeInput, type BufferedAudioNodeProperties, BufferedSourceStream, BufferedStream, BufferedTargetStream, BufferedTransformStream, ChunkBuffer, CompositeNode, type ExecutionProvider, FileChunkBuffer, type FileInputMeta, type GraphDefinition, type GraphEdge, type GraphNode, MemoryChunkBuffer, type NodeRegistry, type RenderOptions, type RenderTiming, type SourceMetadata, SourceNode, type SourceNodeProperties, type StreamContext, type StreamEventMap, TargetNode, type TargetNodeProperties, TransformNode, type TransformNodeProperties, WHOLE_FILE, pack, renderGraph, teeReadable, unpack, validateGraphDefinition };
package/dist/index.js ADDED
@@ -0,0 +1,912 @@
1
+ import { randomUUID } from 'crypto';
2
+ import { open, unlink } from 'fs/promises';
3
+ import { tmpdir } from 'os';
4
+ import { join } from 'path';
5
+ import { z } from 'zod';
6
+ import { EventEmitter } from 'events';
7
+
8
+ // src/buffer/index.ts
9
+ var ChunkBuffer = class {
10
+ constructor() {
11
+ this._frames = 0;
12
+ this._channels = 0;
13
+ }
14
+ get frames() {
15
+ return this._frames;
16
+ }
17
+ get channels() {
18
+ return this._channels;
19
+ }
20
+ get sampleRate() {
21
+ return this._sampleRate;
22
+ }
23
+ get bitDepth() {
24
+ return this._bitDepth;
25
+ }
26
+ setSampleRate(rate) {
27
+ this._sampleRate = rate;
28
+ }
29
+ setBitDepth(depth) {
30
+ this._bitDepth = depth;
31
+ }
32
+ validateAndSetMetadata(sampleRate, bitDepth) {
33
+ if (sampleRate !== void 0) {
34
+ if (this._sampleRate === void 0) {
35
+ this._sampleRate = sampleRate;
36
+ } else if (this._sampleRate !== sampleRate) {
37
+ throw new Error(`ChunkBuffer: sample rate mismatch \u2014 expected ${this._sampleRate}, got ${sampleRate}`);
38
+ }
39
+ }
40
+ if (bitDepth !== void 0) {
41
+ if (this._bitDepth === void 0) {
42
+ this._bitDepth = bitDepth;
43
+ } else if (this._bitDepth !== bitDepth) {
44
+ throw new Error(`ChunkBuffer: bit depth mismatch \u2014 expected ${this._bitDepth}, got ${bitDepth}`);
45
+ }
46
+ }
47
+ }
48
+ buildAudioChunk(samples, offset) {
49
+ return { samples, offset, sampleRate: this._sampleRate ?? 0, bitDepth: this._bitDepth ?? 0 };
50
+ }
51
+ };
52
+
53
+ // src/buffer/memory/index.ts
54
+ var MemoryChunkBuffer = class extends ChunkBuffer {
55
+ constructor(bufferSize, channels) {
56
+ super();
57
+ this.memoryChannels = [];
58
+ this.memoryWriteOffset = 0;
59
+ this._channels = channels;
60
+ const initialCapacity = bufferSize === Infinity ? 44100 : bufferSize;
61
+ for (let ch = 0; ch < channels; ch++) {
62
+ this.memoryChannels.push(new Float32Array(initialCapacity));
63
+ }
64
+ }
65
+ async append(samples, sampleRate, bitDepth) {
66
+ const duration = samples[0]?.length ?? 0;
67
+ if (duration === 0) return;
68
+ this.validateAndSetMetadata(sampleRate, bitDepth);
69
+ while (this._channels < samples.length) {
70
+ const buf = new Float32Array(this.memoryChannels[0]?.length ?? duration);
71
+ this.memoryChannels.push(buf);
72
+ this._channels++;
73
+ }
74
+ this.appendMemory(samples);
75
+ this._frames += duration;
76
+ }
77
+ async read(offset, frames) {
78
+ const actualFrames = Math.min(frames, this._frames - offset);
79
+ if (actualFrames <= 0) {
80
+ return this.buildAudioChunk([], offset);
81
+ }
82
+ return this.readMemory(offset, actualFrames);
83
+ }
84
+ async write(offset, samples) {
85
+ const duration = samples[0]?.length ?? 0;
86
+ if (duration === 0) return;
87
+ while (this._channels < samples.length) {
88
+ const buf = new Float32Array(this.memoryChannels[0]?.length ?? duration);
89
+ this.memoryChannels.push(buf);
90
+ this._channels++;
91
+ }
92
+ const endFrame = offset + duration;
93
+ if (endFrame > this._frames) {
94
+ this._frames = endFrame;
95
+ }
96
+ this.writeMemory(offset, samples);
97
+ }
98
+ async truncate(frames) {
99
+ if (frames >= this._frames) return;
100
+ this.memoryWriteOffset = frames;
101
+ this._frames = frames;
102
+ }
103
+ async *iterate(chunkSize) {
104
+ let offset = 0;
105
+ while (offset < this._frames) {
106
+ const frames = Math.min(chunkSize, this._frames - offset);
107
+ const chunk = await this.read(offset, frames);
108
+ yield chunk;
109
+ offset += frames;
110
+ }
111
+ }
112
+ async reset() {
113
+ this._frames = 0;
114
+ this.memoryWriteOffset = 0;
115
+ }
116
+ async close() {
117
+ this.memoryChannels = [];
118
+ this.memoryWriteOffset = 0;
119
+ this._frames = 0;
120
+ this._channels = 0;
121
+ }
122
+ appendMemory(samples) {
123
+ const required = this.memoryWriteOffset + (samples[0]?.length ?? 0);
124
+ for (let ch = 0; ch < samples.length; ch++) {
125
+ const channel = samples[ch];
126
+ if (!channel) continue;
127
+ let buf = this.memoryChannels[ch];
128
+ if (!buf) continue;
129
+ if (required > buf.length) {
130
+ const newBuf = new Float32Array(Math.max(required, buf.length * 2));
131
+ newBuf.set(buf.subarray(0, this.memoryWriteOffset));
132
+ this.memoryChannels[ch] = newBuf;
133
+ buf = newBuf;
134
+ }
135
+ buf.set(channel, this.memoryWriteOffset);
136
+ }
137
+ this.memoryWriteOffset = required;
138
+ }
139
+ readMemory(offset, frames) {
140
+ const samples = [];
141
+ for (let ch = 0; ch < this._channels; ch++) {
142
+ const buf = this.memoryChannels[ch];
143
+ samples.push(buf ? buf.slice(offset, offset + frames) : new Float32Array(frames));
144
+ }
145
+ return this.buildAudioChunk(samples, offset);
146
+ }
147
+ writeMemory(offset, samples) {
148
+ for (let ch = 0; ch < samples.length; ch++) {
149
+ const source = samples[ch];
150
+ if (!source) continue;
151
+ let buf = this.memoryChannels[ch];
152
+ if (!buf) continue;
153
+ const needed = offset + source.length;
154
+ if (needed > buf.length) {
155
+ const newBuf = new Float32Array(Math.max(needed, buf.length * 2));
156
+ newBuf.set(buf.subarray(0, this.memoryWriteOffset));
157
+ this.memoryChannels[ch] = newBuf;
158
+ buf = newBuf;
159
+ }
160
+ buf.set(source, offset);
161
+ if (needed > this.memoryWriteOffset) {
162
+ this.memoryWriteOffset = needed;
163
+ }
164
+ }
165
+ }
166
+ };
167
+
168
+ // src/buffer/file/index.ts
169
+ var DEFAULT_STORAGE_THRESHOLD = 10 * 1024 * 1024;
170
+ var FileChunkBuffer = class extends ChunkBuffer {
171
+ constructor(bufferSize, channels, memoryLimit) {
172
+ super();
173
+ this.fileFramesWritten = 0;
174
+ this.flushed = false;
175
+ this.storageThreshold = memoryLimit ? Math.max(1024 * 1024, Math.min(memoryLimit * 0.04, 64 * 1024 * 1024)) : DEFAULT_STORAGE_THRESHOLD;
176
+ this._channels = channels;
177
+ this.fileChannels = channels;
178
+ this.initialBufferSize = bufferSize;
179
+ this.initialChannels = channels;
180
+ this.memoryBuffer = new MemoryChunkBuffer(bufferSize, channels);
181
+ }
182
+ syncMetadata() {
183
+ this._frames = this.memoryBuffer.frames;
184
+ this._channels = this.memoryBuffer.channels;
185
+ if (this.memoryBuffer.sampleRate) this.setSampleRate(this.memoryBuffer.sampleRate);
186
+ if (this.memoryBuffer.bitDepth) this.setBitDepth(this.memoryBuffer.bitDepth);
187
+ }
188
+ get filePath() {
189
+ return this.flushed ? this.tempPath : void 0;
190
+ }
191
+ async ensureFileHandle() {
192
+ if (!this.tempHandle) {
193
+ this.tempPath = join(tmpdir(), `chunk-buffer-${randomUUID()}.bin`);
194
+ this.tempHandle = await open(this.tempPath, "w+");
195
+ this.fileFramesWritten = 0;
196
+ this.fileChannels = this._channels;
197
+ }
198
+ return this.tempHandle;
199
+ }
200
+ fileOffset(frame, channel) {
201
+ return (frame * this.fileChannels + channel) * 4;
202
+ }
203
+ async flushToFile() {
204
+ const channels = this.memoryBuffer.channels;
205
+ const frames = this.memoryBuffer.frames;
206
+ const handle = await this.ensureFileHandle();
207
+ let filePos = 0;
208
+ for await (const chunk of this.memoryBuffer.iterate(8192)) {
209
+ const chunkFrames = chunk.samples[0]?.length ?? 0;
210
+ const interleaved = new Float32Array(chunkFrames * channels);
211
+ for (let frame = 0; frame < chunkFrames; frame++) {
212
+ const base = frame * channels;
213
+ for (let ch = 0; ch < channels; ch++) {
214
+ interleaved[base + ch] = chunk.samples[ch]?.[frame] ?? 0;
215
+ }
216
+ }
217
+ const buf = Buffer.from(interleaved.buffer, interleaved.byteOffset, interleaved.byteLength);
218
+ await handle.write(buf, 0, buf.length, filePos);
219
+ filePos += buf.length;
220
+ }
221
+ this.fileFramesWritten = frames;
222
+ this.flushed = true;
223
+ }
224
+ async append(samples, sampleRate, bitDepth) {
225
+ const duration = samples[0]?.length ?? 0;
226
+ if (duration === 0) return;
227
+ if (this.flushed) {
228
+ this.validateAndSetMetadata(sampleRate, bitDepth);
229
+ while (this._channels < samples.length) {
230
+ this._channels++;
231
+ }
232
+ await this.appendFile(samples, duration);
233
+ } else {
234
+ await this.memoryBuffer.append(samples, sampleRate, bitDepth);
235
+ this.syncMetadata();
236
+ if (this.memoryBuffer.frames * this.memoryBuffer.channels * 4 > this.storageThreshold) {
237
+ await this.flushToFile();
238
+ }
239
+ }
240
+ }
241
+ async appendFile(samples, duration) {
242
+ const handle = await this.ensureFileHandle();
243
+ const channels = this._channels;
244
+ const interleaved = new Float32Array(duration * channels);
245
+ for (let frame = 0; frame < duration; frame++) {
246
+ const base = frame * channels;
247
+ for (let ch = 0; ch < channels; ch++) {
248
+ interleaved[base + ch] = samples[ch]?.[frame] ?? 0;
249
+ }
250
+ }
251
+ const buf = Buffer.from(interleaved.buffer, interleaved.byteOffset, interleaved.byteLength);
252
+ const offset = this.fileOffset(this.fileFramesWritten, 0);
253
+ await handle.write(buf, 0, buf.length, offset);
254
+ this.fileFramesWritten += duration;
255
+ this._frames += duration;
256
+ }
257
+ async read(offset, frames) {
258
+ const actualFrames = Math.min(frames, this._frames - offset);
259
+ if (actualFrames <= 0) {
260
+ return this.buildAudioChunk([], offset);
261
+ }
262
+ if (this.flushed) {
263
+ return this.readFile(offset, actualFrames);
264
+ }
265
+ return this.memoryBuffer.read(offset, actualFrames);
266
+ }
267
+ async readFile(offset, actualFrames) {
268
+ if (!this.tempHandle) {
269
+ return this.buildAudioChunk([], offset);
270
+ }
271
+ const channels = this.fileChannels;
272
+ const byteLength = actualFrames * channels * 4;
273
+ const buf = Buffer.alloc(byteLength);
274
+ const filePos = this.fileOffset(offset, 0);
275
+ await this.tempHandle.read(buf, 0, byteLength, filePos);
276
+ const interleaved = new Float32Array(buf.buffer, buf.byteOffset, actualFrames * channels);
277
+ const samples = [];
278
+ for (let ch = 0; ch < this._channels; ch++) {
279
+ samples.push(new Float32Array(actualFrames));
280
+ }
281
+ for (let frame = 0; frame < actualFrames; frame++) {
282
+ const base = frame * channels;
283
+ for (let ch = 0; ch < channels; ch++) {
284
+ const channel = samples[ch];
285
+ if (channel) {
286
+ channel[frame] = interleaved[base + ch];
287
+ }
288
+ }
289
+ }
290
+ return this.buildAudioChunk(samples, offset);
291
+ }
292
+ async write(offset, samples) {
293
+ const duration = samples[0]?.length ?? 0;
294
+ if (duration === 0) return;
295
+ if (this.flushed) {
296
+ while (this._channels < samples.length) {
297
+ this._channels++;
298
+ }
299
+ const endFrame = offset + duration;
300
+ if (endFrame > this._frames) {
301
+ this._frames = endFrame;
302
+ }
303
+ await this.writeFile(offset, samples, duration);
304
+ } else {
305
+ await this.memoryBuffer.write(offset, samples);
306
+ this.syncMetadata();
307
+ if (this.memoryBuffer.frames * this.memoryBuffer.channels * 4 > this.storageThreshold) {
308
+ await this.flushToFile();
309
+ }
310
+ }
311
+ }
312
+ async writeFile(offset, samples, duration) {
313
+ const handle = await this.ensureFileHandle();
314
+ const channels = this._channels;
315
+ const interleaved = new Float32Array(duration * channels);
316
+ for (let frame = 0; frame < duration; frame++) {
317
+ const base = frame * channels;
318
+ for (let ch = 0; ch < channels; ch++) {
319
+ interleaved[base + ch] = samples[ch]?.[frame] ?? 0;
320
+ }
321
+ }
322
+ const buf = Buffer.from(interleaved.buffer, interleaved.byteOffset, interleaved.byteLength);
323
+ const filePos = this.fileOffset(offset, 0);
324
+ await handle.write(buf, 0, buf.length, filePos);
325
+ if (offset + duration > this.fileFramesWritten) {
326
+ this.fileFramesWritten = offset + duration;
327
+ }
328
+ }
329
+ async truncate(frames) {
330
+ if (frames >= this._frames) return;
331
+ if (this.flushed) {
332
+ if (this.tempHandle) {
333
+ await this.tempHandle.truncate(frames * this.fileChannels * 4);
334
+ this.fileFramesWritten = frames;
335
+ }
336
+ this._frames = frames;
337
+ } else {
338
+ await this.memoryBuffer.truncate(frames);
339
+ this.syncMetadata();
340
+ }
341
+ }
342
+ async *iterate(chunkSize) {
343
+ if (!this.flushed) {
344
+ yield* this.memoryBuffer.iterate(chunkSize);
345
+ return;
346
+ }
347
+ let offset = 0;
348
+ while (offset < this._frames) {
349
+ const frames = Math.min(chunkSize, this._frames - offset);
350
+ const chunk = await this.readFile(offset, frames);
351
+ yield chunk;
352
+ offset += frames;
353
+ }
354
+ }
355
+ async reset() {
356
+ if (this.flushed) {
357
+ if (this.tempHandle) {
358
+ await this.tempHandle.close();
359
+ this.tempHandle = void 0;
360
+ }
361
+ if (this.tempPath) {
362
+ await unlink(this.tempPath).catch(() => void 0);
363
+ this.tempPath = void 0;
364
+ }
365
+ this.fileFramesWritten = 0;
366
+ }
367
+ this.memoryBuffer = new MemoryChunkBuffer(this.initialBufferSize, this.initialChannels);
368
+ this.flushed = false;
369
+ this._frames = 0;
370
+ }
371
+ async close() {
372
+ if (this.tempHandle) {
373
+ await this.tempHandle.close();
374
+ this.tempHandle = void 0;
375
+ }
376
+ if (this.tempPath) {
377
+ await unlink(this.tempPath).catch(() => void 0);
378
+ this.tempPath = void 0;
379
+ }
380
+ await this.memoryBuffer.close();
381
+ this.flushed = false;
382
+ this._frames = 0;
383
+ this._channels = 0;
384
+ this.fileFramesWritten = 0;
385
+ }
386
+ };
387
+ var BufferedAudioNode = class {
388
+ constructor(properties) {
389
+ this.streams = [];
390
+ this.properties = {
391
+ ...properties
392
+ };
393
+ }
394
+ static is(value) {
395
+ return typeof value === "object" && value !== null && "type" in value && Array.isArray(value.type) && value.type[0] === "buffered-audio-node";
396
+ }
397
+ get id() {
398
+ return this.properties.id;
399
+ }
400
+ get bufferSize() {
401
+ return this.properties.bufferSize ?? 0;
402
+ }
403
+ get latency() {
404
+ return this.properties.latency ?? 0;
405
+ }
406
+ get isBypassed() {
407
+ return this.properties.bypass === true;
408
+ }
409
+ get children() {
410
+ const raw = this.properties.children ?? [];
411
+ const resolved = [];
412
+ for (const child of raw) {
413
+ if (child.isBypassed) {
414
+ resolved.push(...child.children);
415
+ } else {
416
+ resolved.push(child);
417
+ }
418
+ }
419
+ return resolved;
420
+ }
421
+ async teardown() {
422
+ await this._teardown();
423
+ for (const stream of this.streams) {
424
+ await stream._teardown();
425
+ }
426
+ this.streams.length = 0;
427
+ for (const child of this.children) {
428
+ await child.teardown();
429
+ }
430
+ }
431
+ _teardown() {
432
+ return;
433
+ }
434
+ };
435
+ BufferedAudioNode.moduleDescription = "";
436
+ BufferedAudioNode.schema = z.object({});
437
+ var BufferedStream = class {
438
+ constructor(properties) {
439
+ this.events = new EventEmitter();
440
+ this.properties = properties;
441
+ }
442
+ _teardown() {
443
+ return;
444
+ }
445
+ };
446
+
447
+ // src/target.ts
448
+ var BufferedTargetStream = class extends BufferedStream {
449
+ constructor() {
450
+ super(...arguments);
451
+ this.hasStarted = false;
452
+ this.framesWritten = 0;
453
+ }
454
+ setup(readable, context) {
455
+ this.sourceTotalFrames = context.durationFrames;
456
+ return this._setup(readable, context);
457
+ }
458
+ async _setup(input, _context) {
459
+ return input.pipeTo(this.createWritableStream());
460
+ }
461
+ createWritableStream() {
462
+ this.hasStarted = false;
463
+ this.framesWritten = 0;
464
+ return new WritableStream({
465
+ write: async (chunk) => {
466
+ if (!this.hasStarted) {
467
+ this.hasStarted = true;
468
+ this.events.emit("started");
469
+ }
470
+ await this._write(chunk);
471
+ this.framesWritten += chunk.samples[0]?.length ?? 0;
472
+ this.events.emit("progress", { framesProcessed: this.framesWritten, sourceTotalFrames: this.sourceTotalFrames });
473
+ },
474
+ close: async () => {
475
+ await this._close();
476
+ this.events.emit("finished");
477
+ }
478
+ });
479
+ }
480
+ };
481
+ var TargetNode = class extends BufferedAudioNode {
482
+ static is(value) {
483
+ return BufferedAudioNode.is(value) && value.type[1] === "target";
484
+ }
485
+ setup(readable, context) {
486
+ const stream = this.createStream();
487
+ this.streams.push(stream);
488
+ return Promise.resolve([stream.setup(readable, context)]);
489
+ }
490
+ };
491
+
492
+ // src/utils/tee-readable.ts
493
+ function teeReadable(readable, items) {
494
+ if (items.length === 0) return [];
495
+ const first = items[0];
496
+ if (items.length === 1) return [[readable, first]];
497
+ const pairs = [];
498
+ let current = readable;
499
+ for (let offset = 0; offset < items.length - 1; offset++) {
500
+ const [left, right] = current.tee();
501
+ pairs.push([left, items[offset]]);
502
+ current = right;
503
+ }
504
+ pairs.push([current, items[items.length - 1]]);
505
+ return pairs;
506
+ }
507
+
508
+ // src/transform.ts
509
+ var WHOLE_FILE = Infinity;
510
+ var BufferedTransformStream = class extends BufferedStream {
511
+ constructor(properties) {
512
+ super(properties);
513
+ this.processingMs = 0;
514
+ this.framesProcessed = 0;
515
+ this.bufferOffset = 0;
516
+ this.hasStarted = false;
517
+ this.bufferSize = properties.bufferSize ?? 0;
518
+ this.overlap = properties.overlap ?? 0;
519
+ this.streamChunkSize = properties.streamChunkSize;
520
+ }
521
+ get sampleRate() {
522
+ return this.chunkBuffer?.sampleRate;
523
+ }
524
+ get bitDepth() {
525
+ return this.chunkBuffer?.bitDepth;
526
+ }
527
+ get outputChunkSize() {
528
+ return this.streamChunkSize ?? this.inferredChunkSize ?? 44100;
529
+ }
530
+ setup(input, context) {
531
+ this.sourceTotalFrames = context.durationFrames;
532
+ this.memoryLimit = context.memoryLimit;
533
+ return this._setup(input, context);
534
+ }
535
+ // eslint-disable-next-line @typescript-eslint/require-await
536
+ async _setup(input, _context) {
537
+ return input.pipeThrough(this.createTransformStream());
538
+ }
539
+ createTransformStream() {
540
+ return new TransformStream({
541
+ transform: (chunk, controller) => this.handleTransform(chunk, controller),
542
+ flush: (controller) => this.handleFlush(controller)
543
+ });
544
+ }
545
+ async handleTransform(chunk, controller) {
546
+ if (!this.hasStarted) {
547
+ this.hasStarted = true;
548
+ this.events.emit("started");
549
+ }
550
+ const chunkFrames = chunk.samples[0]?.length ?? 0;
551
+ this.inferredChunkSize ?? (this.inferredChunkSize = chunkFrames);
552
+ const channels = chunk.samples.length;
553
+ this.chunkBuffer ?? (this.chunkBuffer = new FileChunkBuffer(this.bufferSize, channels, this.memoryLimit));
554
+ const samplesIn = chunkFrames;
555
+ const start = performance.now();
556
+ await this._buffer(chunk, this.chunkBuffer);
557
+ if (this.bufferSize === 0) {
558
+ await this.emitBuffer(controller);
559
+ this.processingMs += performance.now() - start;
560
+ this.framesProcessed += samplesIn;
561
+ this.events.emit("progress", { framesProcessed: this.framesProcessed, sourceTotalFrames: this.sourceTotalFrames });
562
+ return;
563
+ }
564
+ if (this.bufferSize !== WHOLE_FILE && this.chunkBuffer.frames >= this.bufferSize) {
565
+ await this.processAndEmit(controller);
566
+ this.events.emit("progress", { framesProcessed: this.framesProcessed, sourceTotalFrames: this.sourceTotalFrames });
567
+ } else {
568
+ this.processingMs += performance.now() - start;
569
+ this.framesProcessed += samplesIn;
570
+ this.events.emit("progress", { framesProcessed: this.framesProcessed, sourceTotalFrames: this.sourceTotalFrames });
571
+ }
572
+ }
573
+ async handleFlush(controller) {
574
+ if (!this.chunkBuffer || this.chunkBuffer.frames === 0) {
575
+ this.events.emit("finished");
576
+ return;
577
+ }
578
+ if (this.bufferSize === 0) {
579
+ await this.chunkBuffer.close();
580
+ this.events.emit("finished");
581
+ return;
582
+ }
583
+ try {
584
+ await this.processAndEmit(controller);
585
+ } finally {
586
+ await this.chunkBuffer.close();
587
+ this.chunkBuffer = void 0;
588
+ }
589
+ this.events.emit("finished");
590
+ }
591
+ async processAndEmit(controller) {
592
+ if (!this.chunkBuffer) return;
593
+ const samplesBeforeProcess = this.chunkBuffer.frames;
594
+ const start = performance.now();
595
+ await this._process(this.chunkBuffer);
596
+ await this.emitBuffer(controller);
597
+ this.processingMs += performance.now() - start;
598
+ this.framesProcessed += samplesBeforeProcess;
599
+ }
600
+ async emitBuffer(controller) {
601
+ if (!this.chunkBuffer) return;
602
+ const emitSize = this.bufferSize === 0 ? this.chunkBuffer.frames : this.outputChunkSize;
603
+ for await (const chunk of this.chunkBuffer.iterate(emitSize)) {
604
+ const adjusted = {
605
+ samples: chunk.samples,
606
+ offset: this.bufferOffset + chunk.offset,
607
+ sampleRate: chunk.sampleRate,
608
+ bitDepth: chunk.bitDepth
609
+ };
610
+ const result = await this._unbuffer(adjusted);
611
+ if (result) {
612
+ controller.enqueue(result);
613
+ }
614
+ }
615
+ this.bufferOffset += this.chunkBuffer.frames;
616
+ if (this.overlap > 0 && this.bufferSize !== WHOLE_FILE) {
617
+ const overlapStart = this.chunkBuffer.frames - this.overlap;
618
+ if (overlapStart > 0) {
619
+ const overlapChunk = await this.chunkBuffer.read(overlapStart, this.overlap);
620
+ await this.chunkBuffer.reset();
621
+ await this.chunkBuffer.append(overlapChunk.samples, overlapChunk.sampleRate, overlapChunk.bitDepth);
622
+ this.bufferOffset -= this.overlap;
623
+ }
624
+ } else {
625
+ await this.chunkBuffer.reset();
626
+ }
627
+ }
628
+ _buffer(chunk, buffer) {
629
+ return buffer.append(chunk.samples, chunk.sampleRate, chunk.bitDepth);
630
+ }
631
+ _process(_buffer) {
632
+ return;
633
+ }
634
+ _unbuffer(chunk) {
635
+ return chunk;
636
+ }
637
+ };
638
+ var TransformNode = class _TransformNode extends BufferedAudioNode {
639
+ static is(value) {
640
+ return BufferedAudioNode.is(value) && value.type[1] === "transform";
641
+ }
642
+ to(child) {
643
+ this.properties = { ...this.properties, children: [...this.properties.children ?? [], child] };
644
+ }
645
+ async setup(readable, context) {
646
+ const stream = this.createStream();
647
+ this.streams.push(stream);
648
+ const output = await stream.setup(readable, context);
649
+ return this.setupChildren(output, context);
650
+ }
651
+ async setupChildren(readable, context) {
652
+ const resolved = this.children;
653
+ const pairs = teeReadable(readable, resolved);
654
+ const nested = await Promise.all(
655
+ pairs.map(async ([stream, child]) => {
656
+ if (context.visited.has(child)) throw new Error("Cycle detected in node graph");
657
+ context.visited.add(child);
658
+ if (child instanceof _TransformNode || child instanceof TargetNode) return child.setup(stream, context);
659
+ throw new Error(`Unknown child node type`);
660
+ })
661
+ );
662
+ return nested.flat();
663
+ }
664
+ };
665
+
666
+ // src/source.ts
667
+ var BufferedSourceStream = class extends BufferedStream {
668
+ constructor() {
669
+ super(...arguments);
670
+ this.framesRead = 0;
671
+ }
672
+ setup(context) {
673
+ return this._setup(context);
674
+ }
675
+ // eslint-disable-next-line @typescript-eslint/require-await
676
+ async _setup(context) {
677
+ let done = false;
678
+ this.framesRead = 0;
679
+ const { signal, durationFrames: sourceTotalFrames, highWaterMark } = context;
680
+ return new ReadableStream(
681
+ {
682
+ pull: async (controller) => {
683
+ if (done) return;
684
+ if (signal?.aborted) {
685
+ done = true;
686
+ controller.close();
687
+ return;
688
+ }
689
+ try {
690
+ const chunk = await this._read();
691
+ if (!chunk) {
692
+ done = true;
693
+ await this._flush();
694
+ controller.close();
695
+ return;
696
+ }
697
+ this.framesRead += chunk.samples[0]?.length ?? 0;
698
+ controller.enqueue(chunk);
699
+ this.events.emit("progress", { framesProcessed: this.framesRead, sourceTotalFrames });
700
+ } catch (error) {
701
+ done = true;
702
+ controller.error(error);
703
+ }
704
+ },
705
+ cancel: () => {
706
+ done = true;
707
+ }
708
+ },
709
+ { highWaterMark }
710
+ );
711
+ }
712
+ };
713
+ var SourceNode = class extends BufferedAudioNode {
714
+ static is(value) {
715
+ return BufferedAudioNode.is(value) && value.type[1] === "source";
716
+ }
717
+ to(child) {
718
+ this.properties = { ...this.properties, children: [...this.properties.children ?? [], child] };
719
+ }
720
+ get renderTiming() {
721
+ return this.renderTimingData;
722
+ }
723
+ async getMetadata() {
724
+ const stream = this.createStream();
725
+ return stream.getMetadata();
726
+ }
727
+ async setup(context) {
728
+ const stream = this.createStream();
729
+ this.streams.push(stream);
730
+ const readable = await stream.setup(context);
731
+ const promises = await this.setupChildren(readable, context);
732
+ await Promise.all(promises);
733
+ }
734
+ async setupChildren(readable, context) {
735
+ const resolved = this.children;
736
+ const pairs = teeReadable(readable, resolved);
737
+ const nested = await Promise.all(
738
+ pairs.map(async ([stream, child]) => {
739
+ if (context.visited.has(child)) throw new Error("Cycle detected in node graph");
740
+ context.visited.add(child);
741
+ if (child instanceof TargetNode || child instanceof TransformNode) return child.setup(stream, context);
742
+ throw new Error(`Unknown child node type`);
743
+ })
744
+ );
745
+ return nested.flat();
746
+ }
747
+ async render(options) {
748
+ const defaultProviders = ["gpu", "cpu-native", "cpu"];
749
+ const memoryLimit = options?.memoryLimit ?? 256 * 1024 * 1024;
750
+ const meta = await this.getMetadata();
751
+ const stages = Math.max(1, countNodes(this));
752
+ const chunkSize = options?.chunkSize ?? 128 * 1024;
753
+ const bytesPerChunk = meta.channels * chunkSize * 4;
754
+ const computedHighWaterMark = Math.max(1, Math.floor(memoryLimit / (stages * bytesPerChunk)));
755
+ const context = {
756
+ executionProviders: options?.executionProviders ?? defaultProviders,
757
+ memoryLimit,
758
+ durationFrames: meta.durationFrames,
759
+ highWaterMark: options?.highWaterMark ?? computedHighWaterMark,
760
+ signal: options?.signal,
761
+ visited: /* @__PURE__ */ new Set()
762
+ };
763
+ const start = performance.now();
764
+ try {
765
+ await this.setup(context);
766
+ } finally {
767
+ const totalMs = performance.now() - start;
768
+ const audioDurationMs = meta.durationFrames !== void 0 ? meta.durationFrames / meta.sampleRate * 1e3 : 0;
769
+ this.renderTimingData = {
770
+ totalMs,
771
+ audioDurationMs,
772
+ realTimeMultiplier: audioDurationMs > 0 ? audioDurationMs / totalMs : 0
773
+ };
774
+ await this.teardown();
775
+ }
776
+ }
777
+ };
778
+ function countNodes(node) {
779
+ let count = 0;
780
+ for (const child of node.children) {
781
+ count += 1 + countNodes(child);
782
+ }
783
+ return count;
784
+ }
785
+
786
+ // src/composite.ts
787
+ var CompositeNode = class extends TransformNode {
788
+ to(child) {
789
+ if (!(this.tail instanceof SourceNode) && !(this.tail instanceof TransformNode)) {
790
+ throw new Error("Cannot connect downstream from a TargetNode \u2014 this composite is a complete pipeline");
791
+ }
792
+ this.tail.to(child);
793
+ }
794
+ get children() {
795
+ return [this.head];
796
+ }
797
+ async setup(readable, context) {
798
+ if (!(this.head instanceof TransformNode)) {
799
+ throw new Error("Cannot setup a composite whose head is a SourceNode \u2014 use render() for complete pipelines");
800
+ }
801
+ return this.head.setup(readable, context);
802
+ }
803
+ async render(options) {
804
+ if (!(this.head instanceof SourceNode)) {
805
+ throw new Error("Cannot render a composite whose head is not a SourceNode");
806
+ }
807
+ return this.head.render(options);
808
+ }
809
+ };
810
+ var graphNodeSchema = z.object({
811
+ id: z.string().min(1),
812
+ packageName: z.string().min(1),
813
+ nodeName: z.string().min(1),
814
+ parameters: z.record(z.string(), z.unknown()).optional(),
815
+ options: z.object({
816
+ bypass: z.boolean().optional()
817
+ }).optional()
818
+ });
819
+ var graphEdgeSchema = z.object({
820
+ from: z.string().min(1),
821
+ to: z.string().min(1)
822
+ });
823
+ var graphDefinitionSchema = z.object({
824
+ name: z.string().default("Untitled"),
825
+ nodes: z.array(graphNodeSchema),
826
+ edges: z.array(graphEdgeSchema)
827
+ });
828
+ function validateGraphDefinition(json) {
829
+ return graphDefinitionSchema.parse(json);
830
+ }
831
+ function pack(sources, name) {
832
+ const visited = /* @__PURE__ */ new Set();
833
+ const nodes = [];
834
+ const edges = [];
835
+ const ensureId = (node) => {
836
+ if (node.id) return node.id;
837
+ const id = randomUUID();
838
+ node.properties = { ...node.properties, id };
839
+ return id;
840
+ };
841
+ const walk = (node) => {
842
+ if (visited.has(node)) return;
843
+ visited.add(node);
844
+ const ctor = node.constructor;
845
+ const id = ensureId(node);
846
+ const parameters = ctor.schema.parse(node.properties);
847
+ const options = {};
848
+ if (node.isBypassed) options.bypass = true;
849
+ const graphNode = {
850
+ id,
851
+ packageName: ctor.packageName,
852
+ nodeName: ctor.moduleName,
853
+ ...Object.keys(parameters).length > 0 && { parameters },
854
+ ...Object.keys(options).length > 0 && { options }
855
+ };
856
+ nodes.push(graphNode);
857
+ const rawChildren = node.properties.children ?? [];
858
+ for (const child of rawChildren) {
859
+ edges.push({ from: id, to: ensureId(child) });
860
+ walk(child);
861
+ }
862
+ };
863
+ for (const source of sources) {
864
+ walk(source);
865
+ }
866
+ return graphDefinitionSchema.parse({ name: name ?? "Untitled", nodes, edges });
867
+ }
868
+ function unpack(definition, registry) {
869
+ const nodeMap = /* @__PURE__ */ new Map();
870
+ for (const nodeDef of definition.nodes) {
871
+ const packageModules = registry.get(nodeDef.packageName);
872
+ if (!packageModules) throw new Error(`Unknown package: "${nodeDef.packageName}"`);
873
+ const NodeClass = packageModules.get(nodeDef.nodeName);
874
+ if (!NodeClass) throw new Error(`Unknown node: "${nodeDef.nodeName}" in package "${nodeDef.packageName}"`);
875
+ const instance = new NodeClass(nodeDef.parameters);
876
+ if (nodeDef.options?.bypass) {
877
+ instance.properties = { ...instance.properties, bypass: nodeDef.options.bypass };
878
+ }
879
+ nodeMap.set(nodeDef.id, instance);
880
+ }
881
+ for (const edge of definition.edges) {
882
+ const fromNode = nodeMap.get(edge.from);
883
+ const toNode = nodeMap.get(edge.to);
884
+ if (!fromNode) throw new Error(`Edge references unknown node: "${edge.from}"`);
885
+ if (!toNode) throw new Error(`Edge references unknown node: "${edge.to}"`);
886
+ if (fromNode instanceof SourceNode || fromNode instanceof TransformNode) {
887
+ fromNode.to(toNode);
888
+ } else {
889
+ throw new Error(`Cannot connect from target node "${edge.from}"`);
890
+ }
891
+ }
892
+ const targetIds = new Set(definition.edges.map((edge) => edge.to));
893
+ const sources = [];
894
+ for (const nodeDef of definition.nodes) {
895
+ if (!targetIds.has(nodeDef.id)) {
896
+ const node = nodeMap.get(nodeDef.id);
897
+ if (SourceNode.is(node)) {
898
+ sources.push(node);
899
+ }
900
+ }
901
+ }
902
+ if (sources.length === 0) {
903
+ throw new Error("No source nodes found in graph definition");
904
+ }
905
+ return sources;
906
+ }
907
+ async function renderGraph(definition, registry, options) {
908
+ const sources = unpack(definition, registry);
909
+ await Promise.all(sources.map((source) => source.render(options)));
910
+ }
911
+
912
+ export { BufferedAudioNode, BufferedSourceStream, BufferedStream, BufferedTargetStream, BufferedTransformStream, ChunkBuffer, CompositeNode, FileChunkBuffer, MemoryChunkBuffer, SourceNode, TargetNode, TransformNode, WHOLE_FILE, pack, renderGraph, teeReadable, unpack, validateGraphDefinition };
package/package.json ADDED
@@ -0,0 +1,39 @@
1
+ {
2
+ "name": "@e9g/buffered-audio-nodes-core",
3
+ "version": "0.1.0",
4
+ "type": "module",
5
+ "exports": {
6
+ ".": {
7
+ "import": "./dist/index.js",
8
+ "types": "./dist/index.d.ts"
9
+ }
10
+ },
11
+ "files": [
12
+ "dist"
13
+ ],
14
+ "sideEffects": false,
15
+ "publishConfig": {
16
+ "access": "public"
17
+ },
18
+ "author": "Matt Cavender",
19
+ "license": "ISC",
20
+ "scripts": {
21
+ "check": "concurrently \"eslint . --fix --cache --format ../../agent-eslint.js\" \"tsc --noEmit --pretty false 2>&1 | node ../../agent-tsc.js\"",
22
+ "check:verbose": "concurrently \"eslint .\" \"tsc --noEmit\"",
23
+ "lint": "eslint . --fix --cache --format ../../agent-eslint.js",
24
+ "lint:verbose": "eslint .",
25
+ "lint:fix": "eslint . --fix",
26
+ "build": "tsup",
27
+ "unit": "vitest run unit"
28
+ },
29
+ "devDependencies": {
30
+ "@types/node": "^25.3.5",
31
+ "concurrently": "*",
32
+ "tsup": "^8.0.0",
33
+ "typescript": "*",
34
+ "vitest": "^3.0.0"
35
+ },
36
+ "dependencies": {
37
+ "zod": "^4.3.6"
38
+ }
39
+ }