@opendaw/studio-core 0.0.86 → 0.0.88

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (66) hide show
  1. package/dist/AudioOfflineRenderer.d.ts.map +1 -1
  2. package/dist/AudioOfflineRenderer.js +1 -0
  3. package/dist/AudioWorklets.d.ts +3 -2
  4. package/dist/AudioWorklets.d.ts.map +1 -1
  5. package/dist/AudioWorklets.js +5 -2
  6. package/dist/OfflineEngineRenderer.d.ts +10 -0
  7. package/dist/OfflineEngineRenderer.d.ts.map +1 -0
  8. package/dist/OfflineEngineRenderer.js +141 -0
  9. package/dist/RecordingWorklet.d.ts +1 -1
  10. package/dist/RecordingWorklet.d.ts.map +1 -1
  11. package/dist/RecordingWorklet.js +3 -7
  12. package/dist/capture/CaptureAudio.d.ts.map +1 -1
  13. package/dist/capture/CaptureAudio.js +3 -2
  14. package/dist/capture/RecordAudio.d.ts +2 -1
  15. package/dist/capture/RecordAudio.d.ts.map +1 -1
  16. package/dist/capture/RecordAudio.js +84 -38
  17. package/dist/capture/RecordMidi.d.ts.map +1 -1
  18. package/dist/capture/RecordMidi.js +90 -55
  19. package/dist/capture/RecordTrack.d.ts +1 -1
  20. package/dist/capture/RecordTrack.d.ts.map +1 -1
  21. package/dist/capture/RecordTrack.js +7 -5
  22. package/dist/index.d.ts +1 -0
  23. package/dist/index.d.ts.map +1 -1
  24. package/dist/index.js +1 -0
  25. package/dist/offline-engine.js +2 -0
  26. package/dist/offline-engine.js.map +7 -0
  27. package/dist/processors.js +6 -6
  28. package/dist/processors.js.map +3 -3
  29. package/dist/project/index.d.ts +1 -0
  30. package/dist/project/index.d.ts.map +1 -1
  31. package/dist/project/index.js +1 -0
  32. package/dist/ui/generic/ClipboardManager.d.ts +19 -0
  33. package/dist/ui/generic/ClipboardManager.d.ts.map +1 -0
  34. package/dist/ui/generic/ClipboardManager.js +77 -0
  35. package/dist/ui/generic/ContextMenu.d.ts +13 -0
  36. package/dist/ui/generic/ContextMenu.d.ts.map +1 -0
  37. package/dist/ui/generic/ContextMenu.js +61 -0
  38. package/dist/ui/generic/menu-item.d.ts +95 -0
  39. package/dist/ui/generic/menu-item.d.ts.map +1 -0
  40. package/dist/ui/generic/menu-item.js +124 -0
  41. package/dist/ui/index.d.ts +7 -4
  42. package/dist/ui/index.d.ts.map +1 -1
  43. package/dist/ui/index.js +7 -4
  44. package/dist/ui/timeline/RegionClipResolver.d.ts.map +1 -0
  45. package/dist/ui/timeline/RegionClipResolver.test.d.ts.map +1 -0
  46. package/dist/ui/timeline/RegionModifyStrategies.d.ts.map +1 -0
  47. package/dist/ui/timeline/TimeGrid.d.ts.map +1 -0
  48. package/dist/ui/timeline/TimelineRange.d.ts.map +1 -0
  49. package/dist/workers-main.js +1 -1
  50. package/dist/workers-main.js.map +3 -3
  51. package/package.json +18 -16
  52. package/dist/ui/RegionClipResolver.d.ts.map +0 -1
  53. package/dist/ui/RegionClipResolver.test.d.ts.map +0 -1
  54. package/dist/ui/RegionModifyStrategies.d.ts.map +0 -1
  55. package/dist/ui/TimeGrid.d.ts.map +0 -1
  56. package/dist/ui/TimelineRange.d.ts.map +0 -1
  57. /package/dist/ui/{RegionClipResolver.d.ts → timeline/RegionClipResolver.d.ts} +0 -0
  58. /package/dist/ui/{RegionClipResolver.js → timeline/RegionClipResolver.js} +0 -0
  59. /package/dist/ui/{RegionClipResolver.test.d.ts → timeline/RegionClipResolver.test.d.ts} +0 -0
  60. /package/dist/ui/{RegionClipResolver.test.js → timeline/RegionClipResolver.test.js} +0 -0
  61. /package/dist/ui/{RegionModifyStrategies.d.ts → timeline/RegionModifyStrategies.d.ts} +0 -0
  62. /package/dist/ui/{RegionModifyStrategies.js → timeline/RegionModifyStrategies.js} +0 -0
  63. /package/dist/ui/{TimeGrid.d.ts → timeline/TimeGrid.d.ts} +0 -0
  64. /package/dist/ui/{TimeGrid.js → timeline/TimeGrid.js} +0 -0
  65. /package/dist/ui/{TimelineRange.d.ts → timeline/TimelineRange.d.ts} +0 -0
  66. /package/dist/ui/{TimelineRange.js → timeline/TimelineRange.js} +0 -0
@@ -1 +1 @@
1
- {"version":3,"file":"AudioOfflineRenderer.d.ts","sourceRoot":"","sources":["../src/AudioOfflineRenderer.ts"],"names":[],"mappings":"AAAA,OAAO,EAAS,GAAG,EAAa,MAAM,EAAS,QAAQ,EAAuB,MAAM,kBAAkB,CAAA;AAGtG,OAAO,EAAC,wBAAwB,EAAC,MAAM,0BAA0B,CAAA;AACjE,OAAO,EAAC,OAAO,EAAC,MAAM,WAAW,CAAA;AAGjC,yBAAiB,oBAAoB,CAAC;IAC3B,MAAM,KAAK,GAAU,QAAQ,OAAO,EACf,wBAAwB,MAAM,CAAC,wBAAwB,CAAC,EACxD,UAAU,QAAQ,CAAC,OAAO,EAC1B,cAAc,WAAW,EACzB,aAAY,GAAY,KAAG,OAAO,CAAC,WAAW,CA+CzE,CAAA;CACJ"}
1
+ {"version":3,"file":"AudioOfflineRenderer.d.ts","sourceRoot":"","sources":["../src/AudioOfflineRenderer.ts"],"names":[],"mappings":"AAAA,OAAO,EAAS,GAAG,EAAa,MAAM,EAAS,QAAQ,EAAuB,MAAM,kBAAkB,CAAA;AAGtG,OAAO,EAAC,wBAAwB,EAAC,MAAM,0BAA0B,CAAA;AACjE,OAAO,EAAC,OAAO,EAAC,MAAM,WAAW,CAAA;AAGjC,yBAAiB,oBAAoB,CAAC;IAE3B,MAAM,KAAK,GAAU,QAAQ,OAAO,EACf,wBAAwB,MAAM,CAAC,wBAAwB,CAAC,EACxD,UAAU,QAAQ,CAAC,OAAO,EAC1B,cAAc,WAAW,EACzB,aAAY,GAAY,KAAG,OAAO,CAAC,WAAW,CA+CzE,CAAA;CACJ"}
@@ -5,6 +5,7 @@ import { ExportStemsConfiguration } from "@opendaw/studio-adapters";
5
5
  import { AudioWorklets } from "./AudioWorklets";
6
6
  export var AudioOfflineRenderer;
7
7
  (function (AudioOfflineRenderer) {
8
+ /* Deprecated */
8
9
  AudioOfflineRenderer.start = async (source, optExportConfiguration, progress, abortSignal, sampleRate = 48_000) => {
9
10
  const numStems = ExportStemsConfiguration.countStems(optExportConfiguration);
10
11
  if (numStems === 0) {
@@ -1,12 +1,13 @@
1
1
  import { int } from "@opendaw/lib-std";
2
2
  import { ExportStemsConfiguration, ProcessorOptions } from "@opendaw/studio-adapters";
3
- import { Project } from "./project/Project";
3
+ import { Project } from "./project";
4
4
  import { EngineWorklet } from "./EngineWorklet";
5
5
  import { MeterWorklet } from "./MeterWorklet";
6
6
  import { RecordingWorklet } from "./RecordingWorklet";
7
7
  export declare class AudioWorklets {
8
8
  #private;
9
9
  static install(url: string): void;
10
+ static get processorsUrl(): string;
10
11
  static createFor(context: BaseAudioContext): Promise<AudioWorklets>;
11
12
  static get(context: BaseAudioContext): AudioWorklets;
12
13
  constructor(context: BaseAudioContext);
@@ -17,6 +18,6 @@ export declare class AudioWorklets {
17
18
  exportConfiguration?: ExportStemsConfiguration;
18
19
  options?: ProcessorOptions;
19
20
  }): EngineWorklet;
20
- createRecording(numberOfChannels: int, numChunks: int, outputLatency: number): RecordingWorklet;
21
+ createRecording(numberOfChannels: int, numChunks: int): RecordingWorklet;
21
22
  }
22
23
  //# sourceMappingURL=AudioWorklets.d.ts.map
@@ -1 +1 @@
1
- {"version":3,"file":"AudioWorklets.d.ts","sourceRoot":"","sources":["../src/AudioWorklets.ts"],"names":[],"mappings":"AAAA,OAAO,EAAY,GAAG,EAAS,MAAM,kBAAkB,CAAA;AACvD,OAAO,EAAC,wBAAwB,EAAE,gBAAgB,EAAa,MAAM,0BAA0B,CAAA;AAC/F,OAAO,EAAC,OAAO,EAAC,MAAM,mBAAmB,CAAA;AACzC,OAAO,EAAC,aAAa,EAAC,MAAM,iBAAiB,CAAA;AAC7C,OAAO,EAAC,YAAY,EAAC,MAAM,gBAAgB,CAAA;AAC3C,OAAO,EAAC,gBAAgB,EAAC,MAAM,oBAAoB,CAAA;AAGnD,qBAAa,aAAa;;IACtB,MAAM,CAAC,OAAO,CAAC,GAAG,EAAE,MAAM,GAAG,IAAI;WAOpB,SAAS,CAAC,OAAO,EAAE,gBAAgB,GAAG,OAAO,CAAC,aAAa,CAAC;IAQzE,MAAM,CAAC,GAAG,CAAC,OAAO,EAAE,gBAAgB,GAAG,aAAa;gBAMxC,OAAO,EAAE,gBAAgB;IAErC,IAAI,OAAO,IAAI,gBAAgB,CAAuB;IAEtD,WAAW,CAAC,gBAAgB,EAAE,GAAG,GAAG,YAAY;IAIhD,YAAY,CAAC,EAAC,OAAO,EAAE,mBAAmB,EAAE,OAAO,EAAC,EAAE;QAClD,OAAO,EAAE,OAAO,CAAC;QACjB,mBAAmB,CAAC,EAAE,wBAAwB,CAAC;QAC/C,OAAO,CAAC,EAAE,gBAAgB,CAAA;KAC7B,GAAG,aAAa;IAIjB,eAAe,CAAC,gBAAgB,EAAE,GAAG,EAAE,SAAS,EAAE,GAAG,EAAE,aAAa,EAAE,MAAM,GAAG,gBAAgB;CAOlG"}
1
+ {"version":3,"file":"AudioWorklets.d.ts","sourceRoot":"","sources":["../src/AudioWorklets.ts"],"names":[],"mappings":"AAAA,OAAO,EAAY,GAAG,EAAS,MAAM,kBAAkB,CAAA;AACvD,OAAO,EAAC,wBAAwB,EAAE,gBAAgB,EAAa,MAAM,0BAA0B,CAAA;AAC/F,OAAO,EAAC,OAAO,EAAC,MAAM,WAAW,CAAA;AACjC,OAAO,EAAC,aAAa,EAAC,MAAM,iBAAiB,CAAA;AAC7C,OAAO,EAAC,YAAY,EAAC,MAAM,gBAAgB,CAAA;AAC3C,OAAO,EAAC,gBAAgB,EAAC,MAAM,oBAAoB,CAAA;AAGnD,qBAAa,aAAa;;IACtB,MAAM,CAAC,OAAO,CAAC,GAAG,EAAE,MAAM,GAAG,IAAI;IAKjC,MAAM,KAAK,aAAa,IAAI,MAAM,CAEjC;WAIY,SAAS,CAAC,OAAO,EAAE,gBAAgB,GAAG,OAAO,CAAC,aAAa,CAAC;IAQzE,MAAM,CAAC,GAAG,CAAC,OAAO,EAAE,gBAAgB,GAAG,aAAa;gBAMxC,OAAO,EAAE,gBAAgB;IAErC,IAAI,OAAO,IAAI,gBAAgB,CAAuB;IAEtD,WAAW,CAAC,gBAAgB,EAAE,GAAG,GAAG,YAAY;IAIhD,YAAY,CAAC,EAAC,OAAO,EAAE,mBAAmB,EAAE,OAAO,EAAC,EAAE;QAClD,OAAO,EAAE,OAAO,CAAC;QACjB,mBAAmB,CAAC,EAAE,wBAAwB,CAAC;QAC/C,OAAO,CAAC,EAAE,gBAAgB,CAAA;KAC7B,GAAG,aAAa;IAIjB,eAAe,CAAC,gBAAgB,EAAE,GAAG,EAAE,SAAS,EAAE,GAAG,GAAG,gBAAgB;CAO3E"}
@@ -8,6 +8,9 @@ export class AudioWorklets {
8
8
  console.debug(`WorkletUrl: '${url}'`);
9
9
  this.#workletUrl = Option.wrap(url);
10
10
  }
11
+ static get processorsUrl() {
12
+ return this.#workletUrl.unwrap("WorkletUrl is missing (call 'install' first)");
13
+ }
11
14
  static #workletUrl = Option.None;
12
15
  static async createFor(context) {
13
16
  return context.audioWorklet.addModule(this.#workletUrl.unwrap("WorkletUrl is missing (call 'install' first)")).then(() => {
@@ -27,11 +30,11 @@ export class AudioWorklets {
27
30
  createEngine({ project, exportConfiguration, options }) {
28
31
  return new EngineWorklet(this.#context, project, exportConfiguration, options);
29
32
  }
30
- createRecording(numberOfChannels, numChunks, outputLatency) {
33
+ createRecording(numberOfChannels, numChunks) {
31
34
  const audioBytes = numberOfChannels * numChunks * RenderQuantum * Float32Array.BYTES_PER_ELEMENT;
32
35
  const pointerBytes = Int32Array.BYTES_PER_ELEMENT * 2;
33
36
  const sab = new SharedArrayBuffer(audioBytes + pointerBytes);
34
37
  const buffer = { sab, numChunks, numberOfChannels, bufferSize: RenderQuantum };
35
- return new RecordingWorklet(this.#context, buffer, outputLatency);
38
+ return new RecordingWorklet(this.#context, buffer);
36
39
  }
37
40
  }
@@ -0,0 +1,10 @@
1
+ import { int, Option, Progress } from "@opendaw/lib-std";
2
+ import { AudioData } from "@opendaw/lib-dsp";
3
+ import { ExportStemsConfiguration } from "@opendaw/studio-adapters";
4
+ import { Project } from "./project";
5
+ export declare namespace OfflineEngineRenderer {
6
+ const install: (url: string) => void;
7
+ const getWorkerUrl: () => string;
8
+ const start: (source: Project, optExportConfiguration: Option<ExportStemsConfiguration>, progress: Progress.Handler, abortSignal?: AbortSignal, sampleRate?: int) => Promise<AudioData>;
9
+ }
10
+ //# sourceMappingURL=OfflineEngineRenderer.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"OfflineEngineRenderer.d.ts","sourceRoot":"","sources":["../src/OfflineEngineRenderer.ts"],"names":[],"mappings":"AAAA,OAAO,EAAS,GAAG,EAAa,MAAM,EAAS,QAAQ,EAAmB,MAAM,kBAAkB,CAAA;AAClG,OAAO,EAAC,SAAS,EAAO,MAAM,kBAAkB,CAAA;AAEhD,OAAO,EAGH,wBAAwB,EAK3B,MAAM,0BAA0B,CAAA;AACjC,OAAO,EAAC,OAAO,EAAC,MAAM,WAAW,CAAA;AAIjC,yBAAiB,qBAAqB,CAAC;IAG5B,MAAM,OAAO,GAAI,KAAK,MAAM,KAAG,IAGrC,CAAA;IAEM,MAAM,YAAY,QAAO,MAE/B,CAAA;IAEM,MAAM,KAAK,GAAU,QAAQ,OAAO,EACf,wBAAwB,MAAM,CAAC,wBAAwB,CAAC,EACxD,UAAU,QAAQ,CAAC,OAAO,EAC1B,cAAc,WAAW,EACzB,aAAY,GAAY,KAAG,OAAO,CAAC,SAAS,CAuIvE,CAAA;CACJ"}
@@ -0,0 +1,141 @@
1
+ import { Errors, isDefined, Option, panic, Terminator } from "@opendaw/lib-std";
2
+ import { AudioData } from "@opendaw/lib-dsp";
3
+ import { Communicator, Messenger } from "@opendaw/lib-runtime";
4
+ import { ExportStemsConfiguration } from "@opendaw/studio-adapters";
5
+ import { AudioWorklets } from "./AudioWorklets";
6
+ export var OfflineEngineRenderer;
7
+ (function (OfflineEngineRenderer) {
8
+ let workerUrl = Option.None;
9
+ OfflineEngineRenderer.install = (url) => {
10
+ console.debug(`OfflineEngineWorkerUrl: '${url}'`);
11
+ workerUrl = Option.wrap(url);
12
+ };
13
+ OfflineEngineRenderer.getWorkerUrl = () => {
14
+ return workerUrl.unwrap("OfflineEngineWorkerUrl is missing (call 'install' first)");
15
+ };
16
+ OfflineEngineRenderer.start = async (source, optExportConfiguration, progress, abortSignal, sampleRate = 48_000) => {
17
+ const numStems = ExportStemsConfiguration.countStems(optExportConfiguration);
18
+ if (numStems === 0) {
19
+ return panic("Nothing to export");
20
+ }
21
+ const numberOfChannels = numStems * 2;
22
+ const { promise, reject, resolve } = Promise.withResolvers();
23
+ const worker = new Worker(OfflineEngineRenderer.getWorkerUrl(), { type: "module" });
24
+ const messenger = Messenger.for(worker);
25
+ const protocol = Communicator.sender(messenger.channel("offline-engine"), dispatcher => new class {
26
+ initialize(enginePort, progressPort, config) {
27
+ return dispatcher.dispatchAndReturn(this.initialize, enginePort, progressPort, config);
28
+ }
29
+ render(config) {
30
+ return dispatcher.dispatchAndReturn(this.render, config);
31
+ }
32
+ step(samples) {
33
+ return dispatcher.dispatchAndReturn(this.step, samples);
34
+ }
35
+ stop() { dispatcher.dispatchAndForget(this.stop); }
36
+ });
37
+ const channel = new MessageChannel();
38
+ const progressChannel = new MessageChannel();
39
+ const syncStreamBuffer = new SharedArrayBuffer(1024);
40
+ const controlFlagsBuffer = new SharedArrayBuffer(4);
41
+ const terminator = new Terminator();
42
+ const projectCopy = source.copy();
43
+ const { timelineBox, boxGraph } = projectCopy;
44
+ boxGraph.beginTransaction();
45
+ timelineBox.loopArea.enabled.setValue(false);
46
+ boxGraph.endTransaction();
47
+ const engineMessenger = Messenger.for(channel.port2);
48
+ Communicator.executor(engineMessenger.channel("engine-to-client"), {
49
+ log: (message) => console.log("OFFLINE-ENGINE", message),
50
+ error: (reason) => console.error("OFFLINE-ENGINE", reason),
51
+ ready: () => { },
52
+ fetchAudio: (uuid) => new Promise((resolve, reject) => {
53
+ const handler = source.sampleManager.getOrCreate(uuid);
54
+ const subscription = handler.subscribe(state => {
55
+ if (state.type === "error") {
56
+ reject(new Error(state.reason));
57
+ subscription.terminate();
58
+ }
59
+ else if (state.type === "loaded") {
60
+ resolve(handler.data.unwrap());
61
+ subscription.terminate();
62
+ }
63
+ });
64
+ }),
65
+ fetchSoundfont: (uuid) => new Promise((resolve, reject) => {
66
+ const handler = source.soundfontManager.getOrCreate(uuid);
67
+ const subscription = handler.subscribe(state => {
68
+ if (state.type === "error") {
69
+ reject(new Error(state.reason));
70
+ subscription.terminate();
71
+ }
72
+ else if (state.type === "loaded") {
73
+ resolve(handler.soundfont.unwrap());
74
+ subscription.terminate();
75
+ }
76
+ });
77
+ }),
78
+ notifyClipSequenceChanges: () => { },
79
+ switchMarkerState: () => { }
80
+ });
81
+ const engineCommands = Communicator.sender(engineMessenger.channel("engine-commands"), dispatcher => new class {
82
+ play() { dispatcher.dispatchAndForget(this.play); }
83
+ stop(reset) { dispatcher.dispatchAndForget(this.stop, reset); }
84
+ setPosition(position) { dispatcher.dispatchAndForget(this.setPosition, position); }
85
+ prepareRecordingState(countIn) { dispatcher.dispatchAndForget(this.prepareRecordingState, countIn); }
86
+ stopRecording() { dispatcher.dispatchAndForget(this.stopRecording); }
87
+ queryLoadingComplete() { return dispatcher.dispatchAndReturn(this.queryLoadingComplete); }
88
+ panic() { dispatcher.dispatchAndForget(this.panic); }
89
+ noteSignal(signal) { dispatcher.dispatchAndForget(this.noteSignal, signal); }
90
+ ignoreNoteRegion(uuid) { dispatcher.dispatchAndForget(this.ignoreNoteRegion, uuid); }
91
+ scheduleClipPlay(clipIds) { dispatcher.dispatchAndForget(this.scheduleClipPlay, clipIds); }
92
+ scheduleClipStop(trackIds) { dispatcher.dispatchAndForget(this.scheduleClipStop, trackIds); }
93
+ setupMIDI(port, buffer) { dispatcher.dispatchAndForget(this.setupMIDI, port, buffer); }
94
+ terminate() { dispatcher.dispatchAndForget(this.terminate); }
95
+ });
96
+ channel.port2.start();
97
+ progressChannel.port2.start();
98
+ let cancelled = false;
99
+ if (isDefined(abortSignal)) {
100
+ abortSignal.onabort = () => {
101
+ engineCommands.stop(true);
102
+ protocol.stop();
103
+ terminator.terminate();
104
+ worker.terminate();
105
+ cancelled = true;
106
+ reject(Errors.AbortError);
107
+ };
108
+ }
109
+ progressChannel.port2.onmessage = (event) => progress(event.data.frames / sampleRate);
110
+ await protocol.initialize(channel.port1, progressChannel.port1, {
111
+ sampleRate,
112
+ numberOfChannels,
113
+ processorsUrl: AudioWorklets.processorsUrl,
114
+ syncStreamBuffer,
115
+ controlFlagsBuffer,
116
+ project: projectCopy.toArrayBuffer(),
117
+ exportConfiguration: optExportConfiguration.unwrapOrUndefined()
118
+ });
119
+ engineCommands.play();
120
+ protocol.render({}).then(channels => {
121
+ if (cancelled) {
122
+ return;
123
+ }
124
+ terminator.terminate();
125
+ worker.terminate();
126
+ const numberOfFrames = channels[0].length;
127
+ const audioData = AudioData.create(sampleRate, numberOfFrames, numberOfChannels);
128
+ for (let channelIndex = 0; channelIndex < numberOfChannels; channelIndex++) {
129
+ audioData.frames[channelIndex].set(channels[channelIndex]);
130
+ }
131
+ resolve(audioData);
132
+ }).catch(reason => {
133
+ if (!cancelled) {
134
+ terminator.terminate();
135
+ worker.terminate();
136
+ reject(reason);
137
+ }
138
+ });
139
+ return promise;
140
+ };
141
+ })(OfflineEngineRenderer || (OfflineEngineRenderer = {}));
@@ -5,7 +5,7 @@ import { RingBuffer, SampleLoader, SampleLoaderState } from "@opendaw/studio-ada
5
5
  export declare class RecordingWorklet extends AudioWorkletNode implements Terminable, SampleLoader {
6
6
  #private;
7
7
  readonly uuid: UUID.Bytes;
8
- constructor(context: BaseAudioContext, config: RingBuffer.Config, outputLatency: number);
8
+ constructor(context: BaseAudioContext, config: RingBuffer.Config);
9
9
  own<T extends Terminable>(terminable: T): T;
10
10
  limit(count: int): void;
11
11
  setFillLength(value: int): void;
@@ -1 +1 @@
1
- {"version":3,"file":"RecordingWorklet.d.ts","sourceRoot":"","sources":["../src/RecordingWorklet.ts"],"names":[],"mappings":"AAAA,OAAO,EAEH,GAAG,EAEH,QAAQ,EACR,MAAM,EAGN,YAAY,EACZ,UAAU,EAEV,IAAI,EACP,MAAM,kBAAkB,CAAA;AACzB,OAAO,EAAC,SAAS,EAAW,MAAM,kBAAkB,CAAA;AACpD,OAAO,EAAC,KAAK,EAAc,MAAM,qBAAqB,CAAA;AACtD,OAAO,EAEH,UAAU,EACV,YAAY,EACZ,iBAAiB,EAEpB,MAAM,0BAA0B,CAAA;AAMjC,qBAAa,gBAAiB,SAAQ,gBAAiB,YAAW,UAAU,EAAE,YAAY;;IAGtF,QAAQ,CAAC,IAAI,EAAE,IAAI,CAAC,KAAK,CAAkB;gBAa/B,OAAO,EAAE,gBAAgB,EAAE,MAAM,EAAE,UAAU,CAAC,MAAM,EAAE,aAAa,EAAE,MAAM;IA2BvF,GAAG,CAAC,CAAC,SAAS,UAAU,EAAE,UAAU,EAAE,CAAC,GAAG,CAAC;IAE3C,KAAK,CAAC,KAAK,EAAE,GAAG,GAAG,IAAI;IAEvB,aAAa,CAAC,KAAK,EAAE,GAAG,GAAG,IAAI;IAE/B,IAAI,cAAc,IAAI,GAAG,CAA6C;IACtE,IAAI,IAAI,IAAI,MAAM,CAAC,SAAS,CAAC,CAAoB;IACjD,IAAI,KAAK,IAAI,MAAM,CAAC,KAAK,CAAC,CAA6E;IACvG,IAAI,KAAK,IAAI,iBAAiB,CAAqB;IAEnD,UAAU,IAAI,IAAI;IAElB,SAAS,CAAC,QAAQ,EAAE,QAAQ,CAAC,iBAAiB,CAAC,GAAG,YAAY;IAQ9D,SAAS,IAAI,IAAI;IAOjB,QAAQ,IAAI,MAAM;CAqCrB"}
1
+ {"version":3,"file":"RecordingWorklet.d.ts","sourceRoot":"","sources":["../src/RecordingWorklet.ts"],"names":[],"mappings":"AAAA,OAAO,EAEH,GAAG,EAEH,QAAQ,EACR,MAAM,EAGN,YAAY,EACZ,UAAU,EAEV,IAAI,EACP,MAAM,kBAAkB,CAAA;AACzB,OAAO,EAAC,SAAS,EAAW,MAAM,kBAAkB,CAAA;AACpD,OAAO,EAAC,KAAK,EAAc,MAAM,qBAAqB,CAAA;AACtD,OAAO,EAEH,UAAU,EACV,YAAY,EACZ,iBAAiB,EAEpB,MAAM,0BAA0B,CAAA;AAMjC,qBAAa,gBAAiB,SAAQ,gBAAiB,YAAW,UAAU,EAAE,YAAY;;IAGtF,QAAQ,CAAC,IAAI,EAAE,IAAI,CAAC,KAAK,CAAkB;gBAa/B,OAAO,EAAE,gBAAgB,EAAE,MAAM,EAAE,UAAU,CAAC,MAAM;IAuBhE,GAAG,CAAC,CAAC,SAAS,UAAU,EAAE,UAAU,EAAE,CAAC,GAAG,CAAC;IAE3C,KAAK,CAAC,KAAK,EAAE,GAAG,GAAG,IAAI;IAEvB,aAAa,CAAC,KAAK,EAAE,GAAG,GAAG,IAAI;IAE/B,IAAI,cAAc,IAAI,GAAG,CAA6C;IACtE,IAAI,IAAI,IAAI,MAAM,CAAC,SAAS,CAAC,CAAoB;IACjD,IAAI,KAAK,IAAI,MAAM,CAAC,KAAK,CAAC,CAA6E;IACvG,IAAI,KAAK,IAAI,iBAAiB,CAAqB;IAEnD,UAAU,IAAI,IAAI;IAElB,SAAS,CAAC,QAAQ,EAAE,QAAQ,CAAC,iBAAiB,CAAC,GAAG,YAAY;IAQ9D,SAAS,IAAI,IAAI;IAOjB,QAAQ,IAAI,MAAM;CAqCrB"}
@@ -18,7 +18,7 @@ export class RecordingWorklet extends AudioWorkletNode {
18
18
  #isRecording = true;
19
19
  #limitSamples = Number.POSITIVE_INFINITY;
20
20
  #state = { type: "record" };
21
- constructor(context, config, outputLatency) {
21
+ constructor(context, config) {
22
22
  super(context, "recording-processor", {
23
23
  numberOfInputs: 1,
24
24
  channelCount: config.numberOfChannels,
@@ -32,12 +32,8 @@ export class RecordingWorklet extends AudioWorkletNode {
32
32
  this.#reader = RingBuffer.reader(config, array => {
33
33
  if (this.#isRecording) {
34
34
  this.#output.push(array);
35
- const latencyInSamples = (outputLatency * this.context.sampleRate) | 0;
36
- if (this.numberOfFrames >= latencyInSamples) {
37
- this.#peakWriter.append(array);
38
- }
39
- const need = this.numberOfFrames - latencyInSamples;
40
- if (need >= this.#limitSamples) {
35
+ this.#peakWriter.append(array);
36
+ if (this.numberOfFrames >= this.#limitSamples) {
41
37
  this.#finalize().catch(error => console.warn(error));
42
38
  }
43
39
  }
@@ -1 +1 @@
1
- {"version":3,"file":"CaptureAudio.d.ts","sourceRoot":"","sources":["../../src/capture/CaptureAudio.ts"],"names":[],"mappings":"AAAA,OAAO,EAKH,uBAAuB,EACvB,MAAM,EAEN,UAAU,EACb,MAAM,kBAAkB,CAAA;AAEzB,OAAO,EAAC,YAAY,EAAE,eAAe,EAAC,MAAM,uBAAuB,CAAA;AACnE,OAAO,EAAC,OAAO,EAAC,MAAM,WAAW,CAAA;AACjC,OAAO,EAAC,cAAc,EAAC,MAAM,kBAAkB,CAAA;AAI/C,qBAAa,YAAa,SAAQ,OAAO,CAAC,eAAe,CAAC;;gBAQ1C,OAAO,EAAE,cAAc,EAAE,YAAY,EAAE,YAAY,EAAE,eAAe,EAAE,eAAe;IA4BjG,IAAI,MAAM,IAAI,MAAM,CAAsB;IAE1C,IAAI,MAAM,IAAI,uBAAuB,CAAC,WAAW,CAAC,CAAsB;IAExE,IAAI,cAAc,IAAI,MAAM,CAAC,MAAM,CAAC,CAEnC;IAED,IAAI,KAAK,IAAI,MAAM,CAAsE;IAEzF,IAAI,WAAW,IAAI,MAAM,CAAC,MAAM,CAAC,CAA+D;IAEhG,IAAI,gBAAgB,IAAI,MAAM,CAAC,gBAAgB,CAAC,CAE/C;IAEK,gBAAgB,IAAI,OAAO,CAAC,IAAI,CAAC;IAiBvC,cAAc,IAAI,UAAU;CA6D/B"}
1
+ {"version":3,"file":"CaptureAudio.d.ts","sourceRoot":"","sources":["../../src/capture/CaptureAudio.ts"],"names":[],"mappings":"AAAA,OAAO,EAKH,uBAAuB,EACvB,MAAM,EAEN,UAAU,EACb,MAAM,kBAAkB,CAAA;AAEzB,OAAO,EAAC,YAAY,EAAE,eAAe,EAAC,MAAM,uBAAuB,CAAA;AACnE,OAAO,EAAC,OAAO,EAAC,MAAM,WAAW,CAAA;AACjC,OAAO,EAAC,cAAc,EAAC,MAAM,kBAAkB,CAAA;AAI/C,qBAAa,YAAa,SAAQ,OAAO,CAAC,eAAe,CAAC;;gBAQ1C,OAAO,EAAE,cAAc,EAAE,YAAY,EAAE,YAAY,EAAE,eAAe,EAAE,eAAe;IA4BjG,IAAI,MAAM,IAAI,MAAM,CAAsB;IAE1C,IAAI,MAAM,IAAI,uBAAuB,CAAC,WAAW,CAAC,CAAsB;IAExE,IAAI,cAAc,IAAI,MAAM,CAAC,MAAM,CAAC,CAEnC;IAED,IAAI,KAAK,IAAI,MAAM,CAAsE;IAEzF,IAAI,WAAW,IAAI,MAAM,CAAC,MAAM,CAAC,CAA+D;IAEhG,IAAI,gBAAgB,IAAI,MAAM,CAAC,gBAAgB,CAAC,CAE/C;IAEK,gBAAgB,IAAI,OAAO,CAAC,IAAI,CAAC;IAiBvC,cAAc,IAAI,UAAU;CA8D/B"}
@@ -66,7 +66,7 @@ export class CaptureAudio extends Capture {
66
66
  const mediaStream = streamOption.unwrap();
67
67
  const channelCount = mediaStream.getAudioTracks().at(0)?.getSettings().channelCount ?? 1;
68
68
  const numChunks = 128;
69
- const recordingWorklet = audioWorklets.createRecording(channelCount, numChunks, audioContext.outputLatency);
69
+ const recordingWorklet = audioWorklets.createRecording(channelCount, numChunks);
70
70
  return RecordAudio.start({
71
71
  recordingWorklet,
72
72
  mediaStream,
@@ -74,7 +74,8 @@ export class CaptureAudio extends Capture {
74
74
  audioContext,
75
75
  project,
76
76
  capture: this,
77
- gainDb: this.#gainDb
77
+ gainDb: this.#gainDb,
78
+ outputLatency: audioContext.outputLatency ?? 0
78
79
  });
79
80
  }
80
81
  async #updateStream() {
@@ -12,8 +12,9 @@ export declare namespace RecordAudio {
12
12
  project: Project;
13
13
  capture: Capture;
14
14
  gainDb: number;
15
+ outputLatency: number;
15
16
  };
16
- export const start: ({ recordingWorklet, mediaStream, sampleManager, audioContext, project, capture, gainDb }: RecordAudioContext) => Terminable;
17
+ export const start: ({ recordingWorklet, mediaStream, sampleManager, audioContext, project, capture, gainDb, outputLatency }: RecordAudioContext) => Terminable;
17
18
  export {};
18
19
  }
19
20
  //# sourceMappingURL=RecordAudio.d.ts.map
@@ -1 +1 @@
1
- {"version":3,"file":"RecordAudio.d.ts","sourceRoot":"","sources":["../../src/capture/RecordAudio.ts"],"names":[],"mappings":"AAAA,OAAO,EAA6B,UAAU,EAAmB,MAAM,kBAAkB,CAAA;AAUzF,OAAO,EAAa,mBAAmB,EAAY,MAAM,0BAA0B,CAAA;AACnF,OAAO,EAAC,OAAO,EAAC,MAAM,YAAY,CAAA;AAClC,OAAO,EAAC,gBAAgB,EAAC,MAAM,qBAAqB,CAAA;AACpD,OAAO,EAAC,OAAO,EAAC,MAAM,WAAW,CAAA;AAGjC,yBAAiB,WAAW,CAAC;IACzB,KAAK,kBAAkB,GAAG;QACtB,gBAAgB,EAAE,gBAAgB,CAAA;QAClC,WAAW,EAAE,WAAW,CAAA;QACxB,aAAa,EAAE,mBAAmB,CAAA;QAClC,YAAY,EAAE,YAAY,CAAA;QAC1B,OAAO,EAAE,OAAO,CAAA;QAChB,OAAO,EAAE,OAAO,CAAA;QAChB,MAAM,EAAE,MAAM,CAAA;KACjB,CAAA;IAED,MAAM,CAAC,MAAM,KAAK,GACd,0FAAwF,kBAAkB,KACxG,UAyFL,CAAA;;CACJ"}
1
+ {"version":3,"file":"RecordAudio.d.ts","sourceRoot":"","sources":["../../src/capture/RecordAudio.ts"],"names":[],"mappings":"AAAA,OAAO,EAA6B,UAAU,EAAmB,MAAM,kBAAkB,CAAA;AAUzF,OAAO,EAAa,mBAAmB,EAAY,MAAM,0BAA0B,CAAA;AACnF,OAAO,EAAC,OAAO,EAAC,MAAM,YAAY,CAAA;AAClC,OAAO,EAAC,gBAAgB,EAAC,MAAM,qBAAqB,CAAA;AACpD,OAAO,EAAC,OAAO,EAAC,MAAM,WAAW,CAAA;AAGjC,yBAAiB,WAAW,CAAC;IACzB,KAAK,kBAAkB,GAAG;QACtB,gBAAgB,EAAE,gBAAgB,CAAA;QAClC,WAAW,EAAE,WAAW,CAAA;QACxB,aAAa,EAAE,mBAAmB,CAAA;QAClC,YAAY,EAAE,YAAY,CAAA;QAC1B,OAAO,EAAE,OAAO,CAAA;QAChB,OAAO,EAAE,OAAO,CAAA;QAChB,MAAM,EAAE,MAAM,CAAA;QACd,aAAa,EAAE,MAAM,CAAA;KACxB,CAAA;IAQD,MAAM,CAAC,MAAM,KAAK,GACd,yGASG,kBAAkB,KACnB,UA2IL,CAAA;;CACJ"}
@@ -5,12 +5,11 @@ import { ColorCodes, TrackType } from "@opendaw/studio-adapters";
5
5
  import { RecordTrack } from "./RecordTrack";
6
6
  export var RecordAudio;
7
7
  (function (RecordAudio) {
8
- RecordAudio.start = ({ recordingWorklet, mediaStream, sampleManager, audioContext, project, capture, gainDb }) => {
8
+ RecordAudio.start = ({ recordingWorklet, mediaStream, sampleManager, audioContext, project, capture, gainDb, outputLatency }) => {
9
9
  const terminator = new Terminator();
10
10
  const beats = PPQN.fromSignature(1, project.timelineBox.signature.denominator.getValue());
11
- const { editing, engine, boxGraph } = project;
12
- const trackBox = RecordTrack.findOrCreate(editing, capture.audioUnitBox, TrackType.Audio);
13
- const uuid = recordingWorklet.uuid;
11
+ const { editing, engine, boxGraph, timelineBox } = project;
12
+ const originalUuid = recordingWorklet.uuid;
14
13
  sampleManager.record(recordingWorklet);
15
14
  const streamSource = audioContext.createMediaStreamSource(mediaStream);
16
15
  const streamGain = audioContext.createGain();
@@ -20,8 +19,14 @@ export var RecordAudio;
20
19
  streamGain.disconnect();
21
20
  streamSource.disconnect();
22
21
  }));
23
- let recordingData = Option.None;
24
- const createRecordingData = (position) => editing.modify(() => {
22
+ let fileBox = Option.None;
23
+ let currentTake = Option.None;
24
+ let lastPosition = 0;
25
+ let currentWaveformOffset = outputLatency;
26
+ let takeNumber = 0;
27
+ const { tempoMap, env: { audioContext: { sampleRate } } } = project;
28
+ const { loopArea } = timelineBox;
29
+ const createFileBox = () => {
25
30
  const fileDateString = new Date()
26
31
  .toISOString()
27
32
  .replaceAll("T", "-")
@@ -29,64 +34,105 @@ export var RecordAudio;
29
34
  .replaceAll(":", "-")
30
35
  .replaceAll("Z", "");
31
36
  const fileName = `Recording-${fileDateString}`;
32
- const fileBox = AudioFileBox.create(boxGraph, uuid, box => box.fileName.setValue(fileName));
37
+ return AudioFileBox.create(boxGraph, originalUuid, box => box.fileName.setValue(fileName));
38
+ };
39
+ const createTakeRegion = (position, waveformOffset, forceNewTrack) => {
40
+ takeNumber++;
41
+ const trackBox = RecordTrack.findOrCreate(editing, capture.audioUnitBox, TrackType.Audio, forceNewTrack);
33
42
  const collectionBox = ValueEventCollectionBox.create(boxGraph, UUID.generate());
34
43
  const stretchBox = AudioPitchStretchBox.create(boxGraph, UUID.generate());
35
44
  WarpMarkerBox.create(boxGraph, UUID.generate(), box => box.owner.refer(stretchBox.warpMarkers));
36
45
  const warpMarkerBox = WarpMarkerBox.create(boxGraph, UUID.generate(), box => box.owner.refer(stretchBox.warpMarkers));
37
46
  const regionBox = AudioRegionBox.create(boxGraph, UUID.generate(), box => {
38
- box.file.refer(fileBox);
47
+ box.file.refer(fileBox.unwrap());
39
48
  box.events.refer(collectionBox.owners);
40
49
  box.regions.refer(trackBox.regions);
41
50
  box.position.setValue(position);
42
51
  box.hue.setValue(ColorCodes.forTrackType(TrackType.Audio));
43
52
  box.timeBase.setValue(TimeBase.Musical);
44
- box.label.setValue("Recording");
53
+ box.label.setValue(`Take ${takeNumber}`);
45
54
  box.playMode.refer(stretchBox);
55
+ box.waveformOffset.setValue(waveformOffset);
46
56
  });
47
57
  capture.addRecordedRegion(regionBox);
48
58
  project.selection.select(regionBox);
49
- return { fileBox, regionBox, warpMarkerBox };
50
- }, false);
51
- const { tempoMap, env: { audioContext: { sampleRate } } } = project;
59
+ return { trackBox, regionBox, warpMarkerBox };
60
+ };
61
+ const finalizeTake = (take, loopDurationPPQN) => {
62
+ const { trackBox, regionBox, warpMarkerBox } = take;
63
+ if (regionBox.isAttached()) {
64
+ regionBox.duration.setValue(loopDurationPPQN);
65
+ regionBox.loopDuration.setValue(loopDurationPPQN);
66
+ const seconds = tempoMap.intervalToSeconds(0, loopDurationPPQN);
67
+ warpMarkerBox.position.setValue(loopDurationPPQN);
68
+ warpMarkerBox.seconds.setValue(seconds);
69
+ }
70
+ if (trackBox.isAttached()) {
71
+ trackBox.enabled.setValue(false);
72
+ }
73
+ };
74
+ const startNewTake = (position) => {
75
+ currentTake = Option.wrap(createTakeRegion(position, currentWaveformOffset, true));
76
+ };
52
77
  terminator.ownAll(Terminable.create(() => {
53
- if (recordingWorklet.numberOfFrames === 0 || recordingData.isEmpty()) {
78
+ if (recordingWorklet.numberOfFrames === 0 || fileBox.isEmpty()) {
54
79
  console.debug("Abort recording audio.");
55
- sampleManager.remove(uuid);
80
+ sampleManager.remove(originalUuid);
56
81
  recordingWorklet.terminate();
57
82
  }
58
83
  else {
59
- const { regionBox: { duration }, fileBox } = recordingData.unwrap("No recording data available");
60
- recordingWorklet.limit(Math.ceil(tempoMap.intervalToSeconds(0, duration.getValue()) * sampleRate));
61
- fileBox.endInSeconds.setValue(recordingWorklet.numberOfFrames / sampleRate);
84
+ currentTake.ifSome(({ regionBox: { duration } }) => {
85
+ recordingWorklet.limit(Math.ceil((currentWaveformOffset + tempoMap.intervalToSeconds(0, duration.getValue())) * sampleRate));
86
+ });
87
+ fileBox.ifSome(fb => fb.endInSeconds.setValue(recordingWorklet.numberOfFrames / sampleRate));
62
88
  }
63
89
  }), engine.position.catchupAndSubscribe(owner => {
64
90
  if (!engine.isRecording.getValue()) {
65
91
  return;
66
92
  }
67
- if (recordingData.isEmpty()) {
93
+ const currentPosition = owner.getValue();
94
+ const loopEnabled = loopArea.enabled.getValue();
95
+ const loopFrom = loopArea.from.getValue();
96
+ const loopTo = loopArea.to.getValue();
97
+ const loopDurationPPQN = loopTo - loopFrom;
98
+ const loopDurationSeconds = tempoMap.intervalToSeconds(loopFrom, loopTo);
99
+ if (loopEnabled && currentTake.nonEmpty() && currentPosition < lastPosition) {
100
+ editing.modify(() => {
101
+ currentTake.ifSome(take => finalizeTake(take, loopDurationPPQN));
102
+ currentWaveformOffset += loopDurationSeconds;
103
+ startNewTake(loopFrom);
104
+ }, false);
105
+ }
106
+ lastPosition = currentPosition;
107
+ if (fileBox.isEmpty()) {
68
108
  streamGain.connect(recordingWorklet);
69
- recordingData = createRecordingData(quantizeFloor(owner.getValue(), beats));
109
+ editing.modify(() => {
110
+ fileBox = Option.wrap(createFileBox());
111
+ const position = quantizeFloor(currentPosition, beats);
112
+ currentTake = Option.wrap(createTakeRegion(position, currentWaveformOffset, false));
113
+ }, false);
70
114
  }
71
- const { fileBox, regionBox, warpMarkerBox } = recordingData.unwrap();
72
- editing.modify(() => {
73
- if (regionBox.isAttached()) {
74
- const { duration, loopDuration } = regionBox;
75
- const distanceInPPQN = Math.floor(engine.position.getValue() - regionBox.position.getValue());
76
- duration.setValue(distanceInPPQN);
77
- loopDuration.setValue(distanceInPPQN);
78
- warpMarkerBox.position.setValue(distanceInPPQN);
79
- const seconds = tempoMap.intervalToSeconds(0, distanceInPPQN);
80
- const totalSamples = Math.ceil(seconds * sampleRate);
81
- recordingWorklet.setFillLength(totalSamples);
82
- fileBox.endInSeconds.setValue(seconds);
83
- warpMarkerBox.seconds.setValue(seconds);
84
- }
85
- else {
86
- terminator.terminate();
87
- recordingData = Option.None;
88
- }
89
- }, false);
115
+ currentTake.ifSome(({ regionBox, warpMarkerBox }) => {
116
+ editing.modify(() => {
117
+ if (regionBox.isAttached()) {
118
+ const { duration, loopDuration } = regionBox;
119
+ const maxDuration = loopEnabled ? loopTo - regionBox.position.getValue() : Infinity;
120
+ const distanceInPPQN = Math.min(maxDuration, Math.floor(currentPosition - regionBox.position.getValue()));
121
+ duration.setValue(distanceInPPQN);
122
+ loopDuration.setValue(distanceInPPQN);
123
+ warpMarkerBox.position.setValue(distanceInPPQN);
124
+ const seconds = tempoMap.intervalToSeconds(0, distanceInPPQN);
125
+ const totalSamples = Math.ceil((currentWaveformOffset + seconds) * sampleRate);
126
+ recordingWorklet.setFillLength(totalSamples);
127
+ fileBox.ifSome(fb => fb.endInSeconds.setValue(totalSamples / sampleRate));
128
+ warpMarkerBox.seconds.setValue(seconds);
129
+ }
130
+ else {
131
+ terminator.terminate();
132
+ currentTake = Option.None;
133
+ }
134
+ }, false);
135
+ });
90
136
  }));
91
137
  return terminator;
92
138
  };
@@ -1 +1 @@
1
- {"version":3,"file":"RecordMidi.d.ts","sourceRoot":"","sources":["../../src/capture/RecordMidi.ts"],"names":[],"mappings":"AAAA,OAAO,EAAO,QAAQ,EAAuC,UAAU,EAAmB,MAAM,kBAAkB,CAAA;AAGlH,OAAO,EAAa,UAAU,EAAY,MAAM,0BAA0B,CAAA;AAC1E,OAAO,EAAC,OAAO,EAAC,MAAM,YAAY,CAAA;AAClC,OAAO,EAAC,OAAO,EAAC,MAAM,WAAW,CAAA;AAGjC,yBAAiB,UAAU,CAAC;IACxB,KAAK,iBAAiB,GAAG;QACrB,QAAQ,EAAE,QAAQ,CAAC,UAAU,CAAC,CAAC;QAC/B,OAAO,EAAE,OAAO,CAAC;QACjB,OAAO,EAAE,OAAO,CAAA;KACnB,CAAA;IAID,MAAM,CAAC,MAAM,KAAK,GAAI,gCAA8B,iBAAiB,KAAG,UAyEvE,CAAA;;CACJ"}
1
+ {"version":3,"file":"RecordMidi.d.ts","sourceRoot":"","sources":["../../src/capture/RecordMidi.ts"],"names":[],"mappings":"AAAA,OAAO,EAAY,QAAQ,EAAuC,UAAU,EAAmB,MAAM,kBAAkB,CAAA;AAGvH,OAAO,EAAa,UAAU,EAAY,MAAM,0BAA0B,CAAA;AAC1E,OAAO,EAAC,OAAO,EAAC,MAAM,YAAY,CAAA;AAClC,OAAO,EAAC,OAAO,EAAC,MAAM,WAAW,CAAA;AAGjC,yBAAiB,UAAU,CAAC;IACxB,KAAK,iBAAiB,GAAG;QACrB,QAAQ,EAAE,QAAQ,CAAC,UAAU,CAAC,CAAC;QAC/B,OAAO,EAAE,OAAO,CAAC;QACjB,OAAO,EAAE,OAAO,CAAA;KACnB,CAAA;IAgBD,MAAM,CAAC,MAAM,KAAK,GAAI,gCAA8B,iBAAiB,KAAG,UAmHvE,CAAA;;CACJ"}