@opendaw/studio-core 0.0.112 → 0.0.113
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/AudioUnitFreeze.d.ts +15 -0
- package/dist/AudioUnitFreeze.d.ts.map +1 -0
- package/dist/AudioUnitFreeze.js +129 -0
- package/dist/Engine.d.ts +1 -0
- package/dist/Engine.d.ts.map +1 -1
- package/dist/EngineFacade.d.ts +1 -0
- package/dist/EngineFacade.d.ts.map +1 -1
- package/dist/EngineFacade.js +3 -0
- package/dist/EngineWorklet.d.ts +1 -0
- package/dist/EngineWorklet.d.ts.map +1 -1
- package/dist/EngineWorklet.js +4 -0
- package/dist/OfflineEngineRenderer.d.ts.map +1 -1
- package/dist/OfflineEngineRenderer.js +1 -0
- package/dist/StudioPreferences.d.ts +1 -0
- package/dist/StudioPreferences.d.ts.map +1 -1
- package/dist/StudioSettings.d.ts +1 -0
- package/dist/StudioSettings.d.ts.map +1 -1
- package/dist/StudioSettings.js +4 -2
- package/dist/capture/CaptureDevices.d.ts.map +1 -1
- package/dist/capture/CaptureDevices.js +6 -1
- package/dist/index.d.ts +1 -0
- package/dist/index.d.ts.map +1 -1
- package/dist/index.js +1 -0
- package/dist/offline-engine.js.map +1 -1
- package/dist/processors.js +8 -8
- package/dist/processors.js.map +4 -4
- package/dist/project/Project.d.ts +2 -0
- package/dist/project/Project.d.ts.map +1 -1
- package/dist/project/Project.js +3 -0
- package/dist/ui/index.d.ts +1 -0
- package/dist/ui/index.d.ts.map +1 -1
- package/dist/ui/index.js +1 -0
- package/dist/ui/renderer/audio.d.ts +9 -0
- package/dist/ui/renderer/audio.d.ts.map +1 -0
- package/dist/ui/renderer/audio.js +277 -0
- package/dist/ui/renderer/env.d.ts +5 -0
- package/dist/ui/renderer/env.d.ts.map +1 -0
- package/dist/ui/renderer/env.js +1 -0
- package/dist/ui/renderer/fading.d.ts +7 -0
- package/dist/ui/renderer/fading.d.ts.map +1 -0
- package/dist/ui/renderer/fading.js +55 -0
- package/dist/ui/renderer/index.d.ts +6 -0
- package/dist/ui/renderer/index.d.ts.map +1 -0
- package/dist/ui/renderer/index.js +5 -0
- package/dist/ui/renderer/notes.d.ts +7 -0
- package/dist/ui/renderer/notes.d.ts.map +1 -0
- package/dist/ui/renderer/notes.js +23 -0
- package/dist/ui/renderer/value.d.ts +7 -0
- package/dist/ui/renderer/value.d.ts.map +1 -0
- package/dist/ui/renderer/value.js +101 -0
- package/package.json +10 -10
|
@@ -12,6 +12,7 @@ import { EngineWorklet } from "../EngineWorklet";
|
|
|
12
12
|
import { MIDILearning } from "../midi";
|
|
13
13
|
import { ppqn, TempoMap } from "@opendaw/lib-dsp";
|
|
14
14
|
import { RegionOverlapResolver, TimelineFocus } from "../ui";
|
|
15
|
+
import { AudioUnitFreeze } from "../AudioUnitFreeze";
|
|
15
16
|
export type RestartWorklet = {
|
|
16
17
|
unload: Func<unknown, Promise<unknown>>;
|
|
17
18
|
load: Procedure<EngineWorklet>;
|
|
@@ -47,6 +48,7 @@ export declare class Project implements BoxAdaptersContext, Terminable, Terminab
|
|
|
47
48
|
readonly overlapResolver: RegionOverlapResolver;
|
|
48
49
|
readonly timelineFocus: TimelineFocus;
|
|
49
50
|
readonly engine: EngineFacade;
|
|
51
|
+
readonly audioUnitFreeze: AudioUnitFreeze;
|
|
50
52
|
private constructor();
|
|
51
53
|
startAudioWorklet(restart?: RestartWorklet, options?: ProcessorOptions): EngineWorklet;
|
|
52
54
|
handleCpuOverload(): void;
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"Project.d.ts","sourceRoot":"","sources":["../../src/project/Project.ts"],"names":[],"mappings":"AAAA,OAAO,EAEH,IAAI,EAGJ,SAAS,EAIT,UAAU,EACV,eAAe,EACf,UAAU,EACV,IAAI,EACP,MAAM,kBAAkB,CAAA;AACzB,OAAO,EAAC,UAAU,EAAE,QAAQ,EAA0B,MAAM,kBAAkB,CAAA;AAC9E,OAAO,EACH,WAAW,EAGX,YAAY,EACZ,KAAK,EAEL,OAAO,EACP,WAAW,EAEX,gBAAgB,EACnB,MAAM,uBAAuB,CAAA;AAC9B,OAAO,EACH,mBAAmB,EACnB,mBAAmB,EACnB,WAAW,EACX,kBAAkB,EAClB,cAAc,EACd,gBAAgB,EAGhB,iBAAiB,EAEjB,sBAAsB,EACtB,gBAAgB,EAEhB,eAAe,EAEf,cAAc,EACd,mBAAmB,EACnB,sBAAsB,EACtB,kBAAkB,EAElB,kBAAkB,EAElB,eAAe,EAClB,MAAM,0BAA0B,CAAA;AACjC,OAAO,EAAC,qBAAqB,EAAE,kBAAkB,EAAC,MAAM,qBAAqB,CAAA;AAC7E,OAAO,EAAC,UAAU,EAAC,MAAM,cAAc,CAAA;AACvC,OAAO,EAAC,KAAK,EAAC,MAAM,UAAU,CAAA;AAC9B,OAAO,EAAC,UAAU,EAAC,MAAM,cAAc,CAAA;AAEvC,OAAO,EAAC,cAAc,EAAY,MAAM,YAAY,CAAA;AACpD,OAAO,EAAC,YAAY,EAAC,MAAM,iBAAiB,CAAA;AAC5C,OAAO,EAAC,aAAa,EAAC,MAAM,kBAAkB,CAAA;AAC9C,OAAO,EAAc,YAAY,EAAC,MAAM,SAAS,CAAA;AAEjD,OAAO,EAAC,IAAI,EAAE,QAAQ,EAAW,MAAM,kBAAkB,CAAA;AAGzD,OAAO,EAAC,qBAAqB,EAAE,aAAa,EAAC,MAAM,OAAO,CAAA;
|
|
1
|
+
{"version":3,"file":"Project.d.ts","sourceRoot":"","sources":["../../src/project/Project.ts"],"names":[],"mappings":"AAAA,OAAO,EAEH,IAAI,EAGJ,SAAS,EAIT,UAAU,EACV,eAAe,EACf,UAAU,EACV,IAAI,EACP,MAAM,kBAAkB,CAAA;AACzB,OAAO,EAAC,UAAU,EAAE,QAAQ,EAA0B,MAAM,kBAAkB,CAAA;AAC9E,OAAO,EACH,WAAW,EAGX,YAAY,EACZ,KAAK,EAEL,OAAO,EACP,WAAW,EAEX,gBAAgB,EACnB,MAAM,uBAAuB,CAAA;AAC9B,OAAO,EACH,mBAAmB,EACnB,mBAAmB,EACnB,WAAW,EACX,kBAAkB,EAClB,cAAc,EACd,gBAAgB,EAGhB,iBAAiB,EAEjB,sBAAsB,EACtB,gBAAgB,EAEhB,eAAe,EAEf,cAAc,EACd,mBAAmB,EACnB,sBAAsB,EACtB,kBAAkB,EAElB,kBAAkB,EAElB,eAAe,EAClB,MAAM,0BAA0B,CAAA;AACjC,OAAO,EAAC,qBAAqB,EAAE,kBAAkB,EAAC,MAAM,qBAAqB,CAAA;AAC7E,OAAO,EAAC,UAAU,EAAC,MAAM,cAAc,CAAA;AACvC,OAAO,EAAC,KAAK,EAAC,MAAM,UAAU,CAAA;AAC9B,OAAO,EAAC,UAAU,EAAC,MAAM,cAAc,CAAA;AAEvC,OAAO,EAAC,cAAc,EAAY,MAAM,YAAY,CAAA;AACpD,OAAO,EAAC,YAAY,EAAC,MAAM,iBAAiB,CAAA;AAC5C,OAAO,EAAC,aAAa,EAAC,MAAM,kBAAkB,CAAA;AAC9C,OAAO,EAAc,YAAY,EAAC,MAAM,SAAS,CAAA;AAEjD,OAAO,EAAC,IAAI,EAAE,QAAQ,EAAW,MAAM,kBAAkB,CAAA;AAGzD,OAAO,EAAC,qBAAqB,EAAE,aAAa,EAAC,MAAM,OAAO,CAAA;AAE1D,OAAO,EAAC,eAAe,EAAC,MAAM,oBAAoB,CAAA;AAElD,MAAM,MAAM,cAAc,GAAG;IAAE,MAAM,EAAE,IAAI,CAAC,OAAO,EAAE,OAAO,CAAC,OAAO,CAAC,CAAC,CAAC;IAAC,IAAI,EAAE,SAAS,CAAC,aAAa,CAAC,CAAA;CAAE,CAAA;AAExG,MAAM,MAAM,oBAAoB,GAAG;IAC/B,aAAa,CAAC,EAAE,OAAO,CAAA;CAC1B,CAAA;AAGD,qBAAa,OAAQ,YAAW,kBAAkB,EAAE,UAAU,EAAE,eAAe;;IAC3E,MAAM,CAAC,GAAG,CAAC,GAAG,EAAE,UAAU,EAAE,OAAO,CAAC,EAAE,oBAAoB,GAAG,OAAO;IAYpE,MAAM,CAAC,IAAI,CAAC,GAAG,EAAE,UAAU,EAAE,WAAW,EAAE,WAAW,GAAG,OAAO;WAIlD,cAAc,CAAC,GAAG,EAAE,UAAU,EAAE,WAAW,EAAE,WAAW,GAAG,OAAO,CAAC,OAAO,CAAC;IAOxF,MAAM,CAAC,YAAY,CAAC,GAAG,EAAE,UAAU,EAAE,QAAQ,EAAE,eAAe,EAAE,eAAe,GAAE,OAAc,GAAG,OAAO;IAYzG,QAAQ,CAAC,QAAQ,EAAE,QAAQ,CAAC,KAAK,CAAC,OAAO,CAAC,CAAA;IAE1C,QAAQ,CAAC,OAAO,EAAE,OAAO,CAAA;IACzB,QAAQ,CAAC,kBAAkB,EAAE,aAAa,CAAC,gBAAgB,CAAC,CAAA;IAC5D,QAAQ,CAAC,kBAAkB,EAAE,WAAW,CAAA;IACxC,QAAQ,CAAC,mBAAmB,EAAE,YAAY,CAAA;IAC1C,QAAQ,CAAC,WAAW,EAAE,WAAW,CAAA;IAEjC,QAAQ,CAAC,GAAG,EAAE,UAAU,CAAA;IACxB,QAAQ,CAAC,cAAc,EAAE,cAAc,CAAA;IACvC,QAAQ,CAAC,OAAO,EAAE,UAAU,CAAA;IAC5B,QAAQ,CAAC,SAAS,EAAE,eAAe,CAAA;IACnC,QAAQ,CAAC,eAAe,EAAE,iBAAiB,CAAC,gBAAgB,CAAC,CAAA;IAC7D,QAAQ,CAAC,eAAe,EAAE,iBAAiB,CAAC,mBAAmB,CAAC,CAAA;IAChE,QAAQ,CAAC,WAAW,EAAE,WAAW,CAAA;IACjC,QAAQ,CAAC,kBAAkB,EAAE,kBAAkB,CAAA;IAC/C,QAAQ,CAAC,sBAAsB,EAAE,sBAAsB,CAAA;IACvD,QAAQ,CAAC,kBAAkB,EAAE,kBAAkB,CAAA;IAC/C,QAAQ,CAAC,YAAY,EAAE,YAAY,CAAA;IACnC,QAAQ,CAAC,KAAK,EAAE,KAAK,CAAA;IACrB,QAAQ,CAAC,QAAQ,EAAE,QAAQ,CAAA;IAC3B,QAAQ,CAAC,eAAe,EAAE,qBAAqB,CAAA;IAC/C,QAAQ,CAAC,aAAa,EAAE,aAAa,CAAA;IACrC,QAAQ,CAAC,MAAM,eAAqB;IACpC,QAAQ,CAAC,eAAe,EAAE,eAAe,CAAA;IAKzC,OAAO;IAmEP,iBAAiB,CAAC,OAAO,CAAC,EAAE,cAAc,EAAE,OAAO,CAAC,EAAE,gBAAgB,GAAG,aAAa;IAoBtF,iBAAiB,IAAI,IAAI;IASzB,cAAc,CAAC,OAAO,GAAE,OAAc;IAMtC,aAAa,IAAI,IAAI;IAKrB,WAAW,IAAI,OAAO;IAEtB,MAAM,CAAC,GAAG,EAAE,gBAAgB,GAAG,IAAI;IAMnC,GAAG,CAAC,CAAC,SAAS,UAAU,EAAE,UAAU,EAAE,CAAC,GAAG,CAAC;IAC3C,MAAM,CAAC,CAAC,SAAS,UAAU,EAAE,GAAG,WAAW,EAAE,KAAK,CAAC,CAAC,CAAC,GAAG,IAAI;IAC5D,KAAK,IAAI,UAAU;IAEnB,IAAI,GAAG,IAAI,UAAU,CAAmB;IACxC,IAAI,cAAc,IAAI,cAAc,CAA8B;IAClE,IAAI,kBAAkB,IAAI,kBAAkB,CAAkC;IAC9E,IAAI,aAAa,IAAI,mBAAmB,CAAiC;IACzE,IAAI,gBAAgB,IAAI,sBAAsB,CAAoC;IAClF,IAAI,cAAc,IAAI,cAAc,CAAkD;IACtF,IAAI,cAAc,IAAI,OAAO,CAAe;IAC5C,IAAI,YAAY,IAAI,OAAO,CAAc;IACzC,IAAI,0BAA0B,IAAI,mBAAmB,CAEpD;IACD,IAAI,qBAAqB,IAAI,qBAAqB,CAAkD;IAEpG,IAAI,QAAQ,IAAI,eAAe,CAW9B;IAED,sBAAsB,CAAC,YAAY,EAAE,MAAM,EAAE,IAAI,EAAE,UAAU,EAAE,gBAAgB,EAAE,MAAM,GAAG,IAAI;IAe9F,kBAAkB,IAAI,aAAa,CAAC,IAAI,CAAC,KAAK,CAAC;IAM/C,gBAAgB,IAAI,IAAI;IAsBxB,aAAa,IAAI,eAAe;IAEhC,IAAI,CAAC,GAAG,CAAC,EAAE,OAAO,CAAC,UAAU,CAAC,GAAG,OAAO;IAIxC,OAAO,IAAI,OAAO;IAuBlB,gBAAgB,IAAI,IAAI;IAOxB,sBAAsB,CAAC,IAAI,EAAE,IAAI,CAAC,KAAK,GAAG,IAAI;IAI9C,SAAS,IAAI,IAAI;CA4BpB"}
|
package/dist/project/Project.js
CHANGED
|
@@ -15,6 +15,7 @@ import { MidiData } from "@opendaw/lib-midi";
|
|
|
15
15
|
import { StudioPreferences } from "../StudioPreferences";
|
|
16
16
|
import { RegionOverlapResolver, TimelineFocus } from "../ui";
|
|
17
17
|
import { SampleStorage } from "../samples";
|
|
18
|
+
import { AudioUnitFreeze } from "../AudioUnitFreeze";
|
|
18
19
|
// Main Entry Point for a Project
|
|
19
20
|
export class Project {
|
|
20
21
|
static new(env, options) {
|
|
@@ -73,6 +74,7 @@ export class Project {
|
|
|
73
74
|
overlapResolver;
|
|
74
75
|
timelineFocus;
|
|
75
76
|
engine = new EngineFacade();
|
|
77
|
+
audioUnitFreeze;
|
|
76
78
|
#rootBoxAdapter;
|
|
77
79
|
#timelineBoxAdapter;
|
|
78
80
|
constructor(env, boxGraph, { rootBox, userInterfaceBoxes, primaryAudioBusBox, primaryAudioUnitBox, timelineBox }) {
|
|
@@ -106,6 +108,7 @@ export class Project {
|
|
|
106
108
|
this.mixer = new Mixer(this.#rootBoxAdapter.audioUnits);
|
|
107
109
|
this.overlapResolver = new RegionOverlapResolver(this.editing, this.api, this.boxAdapters);
|
|
108
110
|
this.timelineFocus = this.#terminator.own(new TimelineFocus());
|
|
111
|
+
this.audioUnitFreeze = this.#terminator.own(new AudioUnitFreeze(this));
|
|
109
112
|
console.debug(`Project was created on ${this.rootBoxAdapter.created.toString()}`);
|
|
110
113
|
this.#sampleRegistrations = UUID.newSet(({ uuid }) => uuid);
|
|
111
114
|
for (const box of this.boxGraph.boxes()) {
|
package/dist/ui/index.d.ts
CHANGED
|
@@ -7,6 +7,7 @@ export * from "./clipboard/types/NotesClipboardHandler";
|
|
|
7
7
|
export * from "./clipboard/types/RegionsClipboardHandler";
|
|
8
8
|
export * from "./clipboard/types/ValuesClipboardHandler";
|
|
9
9
|
export * from "./menu/MenuItems";
|
|
10
|
+
export * from "./renderer";
|
|
10
11
|
export * from "./timeline/RegionClipResolver";
|
|
11
12
|
export * from "./timeline/RegionKeepExistingResolver";
|
|
12
13
|
export * from "./timeline/RegionModifyStrategies";
|
package/dist/ui/index.d.ts.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/ui/index.ts"],"names":[],"mappings":"AAAA,cAAc,8BAA8B,CAAA;AAC5C,cAAc,4BAA4B,CAAA;AAC1C,cAAc,yBAAyB,CAAA;AACvC,cAAc,8CAA8C,CAAA;AAC5D,cAAc,2CAA2C,CAAA;AACzD,cAAc,yCAAyC,CAAA;AACvD,cAAc,2CAA2C,CAAA;AACzD,cAAc,0CAA0C,CAAA;AACxD,cAAc,kBAAkB,CAAA;AAChC,cAAc,+BAA+B,CAAA;AAC7C,cAAc,uCAAuC,CAAA;AACrD,cAAc,mCAAmC,CAAA;AACjD,cAAc,kCAAkC,CAAA;AAChD,cAAc,uCAAuC,CAAA;AACrD,cAAc,0BAA0B,CAAA;AACxC,cAAc,qBAAqB,CAAA;AACnC,cAAc,0BAA0B,CAAA;AACxC,cAAc,0BAA0B,CAAA"}
|
|
1
|
+
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/ui/index.ts"],"names":[],"mappings":"AAAA,cAAc,8BAA8B,CAAA;AAC5C,cAAc,4BAA4B,CAAA;AAC1C,cAAc,yBAAyB,CAAA;AACvC,cAAc,8CAA8C,CAAA;AAC5D,cAAc,2CAA2C,CAAA;AACzD,cAAc,yCAAyC,CAAA;AACvD,cAAc,2CAA2C,CAAA;AACzD,cAAc,0CAA0C,CAAA;AACxD,cAAc,kBAAkB,CAAA;AAChC,cAAc,YAAY,CAAA;AAC1B,cAAc,+BAA+B,CAAA;AAC7C,cAAc,uCAAuC,CAAA;AACrD,cAAc,mCAAmC,CAAA;AACjD,cAAc,kCAAkC,CAAA;AAChD,cAAc,uCAAuC,CAAA;AACrD,cAAc,0BAA0B,CAAA;AACxC,cAAc,qBAAqB,CAAA;AACnC,cAAc,0BAA0B,CAAA;AACxC,cAAc,0BAA0B,CAAA"}
|
package/dist/ui/index.js
CHANGED
|
@@ -7,6 +7,7 @@ export * from "./clipboard/types/NotesClipboardHandler";
|
|
|
7
7
|
export * from "./clipboard/types/RegionsClipboardHandler";
|
|
8
8
|
export * from "./clipboard/types/ValuesClipboardHandler";
|
|
9
9
|
export * from "./menu/MenuItems";
|
|
10
|
+
export * from "./renderer";
|
|
10
11
|
export * from "./timeline/RegionClipResolver";
|
|
11
12
|
export * from "./timeline/RegionKeepExistingResolver";
|
|
12
13
|
export * from "./timeline/RegionModifyStrategies";
|
|
@@ -0,0 +1,9 @@
|
|
|
1
|
+
import { RegionBound } from "./env";
|
|
2
|
+
import { Option } from "@opendaw/lib-std";
|
|
3
|
+
import { LoopableRegion, TempoMap } from "@opendaw/lib-dsp";
|
|
4
|
+
import { AudioFileBoxAdapter, AudioPlayMode } from "@opendaw/studio-adapters";
|
|
5
|
+
import { TimelineRange } from "../timeline/TimelineRange";
|
|
6
|
+
export declare namespace AudioRenderer {
|
|
7
|
+
const render: (context: CanvasRenderingContext2D, range: TimelineRange, file: AudioFileBoxAdapter, tempoMap: TempoMap, playMode: Option<AudioPlayMode>, waveformOffset: number, gain: number, { top, bottom }: RegionBound, contentColor: string, { rawStart, resultStart, resultEnd }: LoopableRegion.LoopCycle, clip?: boolean) => void;
|
|
8
|
+
}
|
|
9
|
+
//# sourceMappingURL=audio.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"audio.d.ts","sourceRoot":"","sources":["../../../src/ui/renderer/audio.ts"],"names":[],"mappings":"AAAA,OAAO,EAAC,WAAW,EAAC,MAAM,OAAO,CAAA;AACjC,OAAO,EAAC,MAAM,EAAC,MAAM,kBAAkB,CAAA;AACvC,OAAO,EAAW,cAAc,EAAyB,QAAQ,EAAC,MAAM,kBAAkB,CAAA;AAE1F,OAAO,EAAC,mBAAmB,EAAE,aAAa,EAAC,MAAM,0BAA0B,CAAA;AAC3E,OAAO,EAAC,aAAa,EAAC,MAAM,2BAA2B,CAAA;AAEvD,yBAAiB,aAAa,CAAC;IASpB,MAAM,MAAM,GACf,SAAS,wBAAwB,EACjC,OAAO,aAAa,EACpB,MAAM,mBAAmB,EACzB,UAAU,QAAQ,EAClB,UAAU,MAAM,CAAC,aAAa,CAAC,EAC/B,gBAAgB,MAAM,EACtB,MAAM,MAAM,EACZ,iBAAe,WAAW,EAC1B,cAAc,MAAM,EACpB,sCAAoC,cAAc,CAAC,SAAS,EAC5D,OAAM,OAAc,SA2VvB,CAAA;CACJ"}
|
|
@@ -0,0 +1,277 @@
|
|
|
1
|
+
import { dbToGain, PPQN, TempoChangeGrid } from "@opendaw/lib-dsp";
|
|
2
|
+
import { PeaksPainter } from "@opendaw/lib-fusion";
|
|
3
|
+
export var AudioRenderer;
|
|
4
|
+
(function (AudioRenderer) {
|
|
5
|
+
AudioRenderer.render = (context, range, file, tempoMap, playMode, waveformOffset, gain, { top, bottom }, contentColor, { rawStart, resultStart, resultEnd }, clip = true) => {
|
|
6
|
+
if (file.peaks.isEmpty()) {
|
|
7
|
+
return;
|
|
8
|
+
}
|
|
9
|
+
const peaks = file.peaks.unwrap();
|
|
10
|
+
const durationInSeconds = file.endInSeconds - file.startInSeconds;
|
|
11
|
+
const numFrames = peaks.numFrames;
|
|
12
|
+
const numberOfChannels = peaks.numChannels;
|
|
13
|
+
const ht = bottom - top;
|
|
14
|
+
const peaksHeight = Math.floor((ht - 4) / numberOfChannels);
|
|
15
|
+
const scale = dbToGain(-gain);
|
|
16
|
+
const segments = [];
|
|
17
|
+
if (playMode.nonEmpty()) {
|
|
18
|
+
const { warpMarkers } = playMode.unwrap();
|
|
19
|
+
const markers = warpMarkers.asArray();
|
|
20
|
+
if (markers.length < 2) {
|
|
21
|
+
return;
|
|
22
|
+
}
|
|
23
|
+
const first = markers[0];
|
|
24
|
+
const second = markers[1];
|
|
25
|
+
const secondLast = markers[markers.length - 2];
|
|
26
|
+
const last = markers[markers.length - 1];
|
|
27
|
+
const firstRate = (second.seconds - first.seconds) /
|
|
28
|
+
(second.position - first.position);
|
|
29
|
+
const lastRate = (last.seconds - secondLast.seconds) /
|
|
30
|
+
(last.position - secondLast.position);
|
|
31
|
+
const addSegment = (posStart, posEnd, audioStart, audioEnd, outside) => {
|
|
32
|
+
if (posStart >= posEnd) {
|
|
33
|
+
return;
|
|
34
|
+
}
|
|
35
|
+
if (posStart > range.unitMax || posEnd < range.unitMin) {
|
|
36
|
+
return;
|
|
37
|
+
}
|
|
38
|
+
const clippedStart = Math.max(posStart, range.unitMin - range.unitPadding);
|
|
39
|
+
const clippedEnd = Math.min(posEnd, range.unitMax);
|
|
40
|
+
if (clippedStart >= clippedEnd) {
|
|
41
|
+
return;
|
|
42
|
+
}
|
|
43
|
+
const t0 = (clippedStart - posStart) / (posEnd - posStart);
|
|
44
|
+
const t1 = (clippedEnd - posStart) / (posEnd - posStart);
|
|
45
|
+
let aStart = audioStart + t0 * (audioEnd - audioStart) + waveformOffset;
|
|
46
|
+
let aEnd = audioStart + t1 * (audioEnd - audioStart) + waveformOffset;
|
|
47
|
+
let x0 = range.unitToX(clippedStart) * devicePixelRatio;
|
|
48
|
+
let x1 = range.unitToX(clippedEnd) * devicePixelRatio;
|
|
49
|
+
if (aStart < 0.0) {
|
|
50
|
+
const ratio = -aStart / (aEnd - aStart);
|
|
51
|
+
x0 = x0 + ratio * (x1 - x0);
|
|
52
|
+
aStart = 0.0;
|
|
53
|
+
}
|
|
54
|
+
if (aEnd > durationInSeconds) {
|
|
55
|
+
const ratio = (aEnd - durationInSeconds) / (aEnd - aStart);
|
|
56
|
+
x1 = x1 - ratio * (x1 - x0);
|
|
57
|
+
aEnd = durationInSeconds;
|
|
58
|
+
}
|
|
59
|
+
if (aStart >= aEnd) {
|
|
60
|
+
return;
|
|
61
|
+
}
|
|
62
|
+
segments.push({
|
|
63
|
+
x0,
|
|
64
|
+
x1,
|
|
65
|
+
u0: (aStart / durationInSeconds) * numFrames,
|
|
66
|
+
u1: (aEnd / durationInSeconds) * numFrames,
|
|
67
|
+
outside
|
|
68
|
+
});
|
|
69
|
+
};
|
|
70
|
+
const handleSegment = (segmentStart, segmentEnd, audioStartSeconds, audioEndSeconds) => {
|
|
71
|
+
if (segmentStart >= segmentEnd) {
|
|
72
|
+
return;
|
|
73
|
+
}
|
|
74
|
+
if (clip) {
|
|
75
|
+
if (segmentEnd <= resultStart || segmentStart >= resultEnd) {
|
|
76
|
+
return;
|
|
77
|
+
}
|
|
78
|
+
const clippedStart = Math.max(segmentStart, resultStart);
|
|
79
|
+
const clippedEnd = Math.min(segmentEnd, resultEnd);
|
|
80
|
+
const t0 = (clippedStart - segmentStart) / (segmentEnd - segmentStart);
|
|
81
|
+
const t1 = (clippedEnd - segmentStart) / (segmentEnd - segmentStart);
|
|
82
|
+
const aStart = audioStartSeconds + t0 * (audioEndSeconds - audioStartSeconds);
|
|
83
|
+
const aEnd = audioStartSeconds + t1 * (audioEndSeconds - audioStartSeconds);
|
|
84
|
+
addSegment(clippedStart, clippedEnd, aStart, aEnd, false);
|
|
85
|
+
}
|
|
86
|
+
else {
|
|
87
|
+
const rate = (audioEndSeconds - audioStartSeconds) / (segmentEnd - segmentStart);
|
|
88
|
+
// Before audible
|
|
89
|
+
if (segmentStart < resultStart) {
|
|
90
|
+
const endPos = Math.min(segmentEnd, resultStart);
|
|
91
|
+
const aEnd = audioStartSeconds + (endPos - segmentStart) * rate;
|
|
92
|
+
addSegment(segmentStart, endPos, audioStartSeconds, aEnd, true);
|
|
93
|
+
}
|
|
94
|
+
// Audible
|
|
95
|
+
if (segmentEnd > resultStart && segmentStart < resultEnd) {
|
|
96
|
+
const startPos = Math.max(segmentStart, resultStart);
|
|
97
|
+
const endPos = Math.min(segmentEnd, resultEnd);
|
|
98
|
+
const aStart = audioStartSeconds + (startPos - segmentStart) * rate;
|
|
99
|
+
const aEnd = audioStartSeconds + (endPos - segmentStart) * rate;
|
|
100
|
+
addSegment(startPos, endPos, aStart, aEnd, false);
|
|
101
|
+
}
|
|
102
|
+
// After audible
|
|
103
|
+
if (segmentEnd > resultEnd) {
|
|
104
|
+
const startPos = Math.max(segmentStart, resultEnd);
|
|
105
|
+
const aStart = audioStartSeconds + (startPos - segmentStart) * rate;
|
|
106
|
+
addSegment(startPos, segmentEnd, aStart, audioEndSeconds, true);
|
|
107
|
+
}
|
|
108
|
+
}
|
|
109
|
+
};
|
|
110
|
+
const visibleLocalStart = (clip ? resultStart : range.unitMin) - rawStart;
|
|
111
|
+
const visibleLocalEnd = (clip ? resultEnd : range.unitMax) - rawStart;
|
|
112
|
+
// With positive offset, audio from file start appears BEFORE first.position
|
|
113
|
+
// With negative offset, audio from file end appears AFTER last.position
|
|
114
|
+
const extraNeededBefore = waveformOffset > 0 ? waveformOffset / firstRate : 0;
|
|
115
|
+
const extraNeededAfter = waveformOffset < 0 ? -waveformOffset / lastRate : 0;
|
|
116
|
+
const extrapolateStartLocal = Math.min(visibleLocalStart, first.position - extraNeededBefore);
|
|
117
|
+
const extrapolateEndLocal = Math.max(visibleLocalEnd, last.position + extraNeededAfter);
|
|
118
|
+
// Extrapolate before the first warp marker
|
|
119
|
+
if (extrapolateStartLocal < first.position) {
|
|
120
|
+
const audioStart = first.seconds +
|
|
121
|
+
(extrapolateStartLocal - first.position) * firstRate;
|
|
122
|
+
handleSegment(rawStart + extrapolateStartLocal, rawStart + first.position, audioStart, first.seconds);
|
|
123
|
+
}
|
|
124
|
+
// Interior warp segments - only iterate visible range
|
|
125
|
+
const startIndex = Math.max(0, warpMarkers.floorLastIndex(visibleLocalStart));
|
|
126
|
+
for (let i = startIndex; i < markers.length - 1; i++) {
|
|
127
|
+
const w0 = markers[i];
|
|
128
|
+
if (w0.position > visibleLocalEnd) {
|
|
129
|
+
break;
|
|
130
|
+
}
|
|
131
|
+
const w1 = markers[i + 1];
|
|
132
|
+
handleSegment(rawStart + w0.position, rawStart + w1.position, w0.seconds, w1.seconds);
|
|
133
|
+
}
|
|
134
|
+
// Extrapolate after the last warp marker
|
|
135
|
+
if (extrapolateEndLocal > last.position) {
|
|
136
|
+
const audioEnd = last.seconds + (extrapolateEndLocal - last.position) * lastRate;
|
|
137
|
+
handleSegment(rawStart + last.position, rawStart + extrapolateEndLocal, last.seconds, audioEnd);
|
|
138
|
+
}
|
|
139
|
+
}
|
|
140
|
+
else {
|
|
141
|
+
// Non-stretch mode - audio plays at 100% original speed
|
|
142
|
+
// Audio time = elapsed timeline seconds since rawStart + waveformOffset
|
|
143
|
+
// Use rawStart (not resultStart) because resultStart is clipped to viewport
|
|
144
|
+
const regionStartSeconds = tempoMap.ppqnToSeconds(rawStart);
|
|
145
|
+
// Use absolute time conversion so it works for positions before AND after rawStart
|
|
146
|
+
const audioTimeAt = (ppqn) => tempoMap.ppqnToSeconds(ppqn) - regionStartSeconds + waveformOffset;
|
|
147
|
+
// Fixed step size for consistent rendering across zoom levels
|
|
148
|
+
const addSegmentDirect = (ppqnStart, ppqnEnd, audioStart, audioEnd, outside) => {
|
|
149
|
+
if (ppqnStart >= ppqnEnd) {
|
|
150
|
+
return;
|
|
151
|
+
}
|
|
152
|
+
if (ppqnEnd < range.unitMin - range.unitPadding ||
|
|
153
|
+
ppqnStart > range.unitMax) {
|
|
154
|
+
return;
|
|
155
|
+
}
|
|
156
|
+
const clippedStart = Math.max(ppqnStart, range.unitMin - range.unitPadding);
|
|
157
|
+
const clippedEnd = Math.min(ppqnEnd, range.unitMax);
|
|
158
|
+
if (clippedStart >= clippedEnd) {
|
|
159
|
+
return;
|
|
160
|
+
}
|
|
161
|
+
// Interpolate audio times for clipped range
|
|
162
|
+
const t0 = (clippedStart - ppqnStart) / (ppqnEnd - ppqnStart);
|
|
163
|
+
const t1 = (clippedEnd - ppqnStart) / (ppqnEnd - ppqnStart);
|
|
164
|
+
let aStart = audioStart + t0 * (audioEnd - audioStart);
|
|
165
|
+
let aEnd = audioStart + t1 * (audioEnd - audioStart);
|
|
166
|
+
let x0 = range.unitToX(clippedStart) * devicePixelRatio;
|
|
167
|
+
let x1 = range.unitToX(clippedEnd) * devicePixelRatio;
|
|
168
|
+
if (aStart < 0) {
|
|
169
|
+
const ratio = -aStart / (aEnd - aStart);
|
|
170
|
+
x0 += ratio * (x1 - x0);
|
|
171
|
+
aStart = 0;
|
|
172
|
+
}
|
|
173
|
+
if (aEnd > durationInSeconds) {
|
|
174
|
+
const ratio = (aEnd - durationInSeconds) / (aEnd - aStart);
|
|
175
|
+
x1 -= ratio * (x1 - x0);
|
|
176
|
+
aEnd = durationInSeconds;
|
|
177
|
+
}
|
|
178
|
+
if (aStart >= aEnd) {
|
|
179
|
+
return;
|
|
180
|
+
}
|
|
181
|
+
const u0 = (aStart / durationInSeconds) * numFrames;
|
|
182
|
+
const u1 = (aEnd / durationInSeconds) * numFrames;
|
|
183
|
+
if (u0 < u1 && x1 - x0 >= 1) {
|
|
184
|
+
segments.push({ x0, x1, u0, u1, outside });
|
|
185
|
+
}
|
|
186
|
+
};
|
|
187
|
+
// Similar to stretch mode: handle clipping at region boundaries
|
|
188
|
+
const handleTempoSegment = (segStart, segEnd, audioStart, audioEnd) => {
|
|
189
|
+
if (segStart >= segEnd) {
|
|
190
|
+
return;
|
|
191
|
+
}
|
|
192
|
+
if (clip) {
|
|
193
|
+
if (segEnd <= resultStart || segStart >= resultEnd) {
|
|
194
|
+
return;
|
|
195
|
+
}
|
|
196
|
+
const clippedStart = Math.max(segStart, resultStart);
|
|
197
|
+
const clippedEnd = Math.min(segEnd, resultEnd);
|
|
198
|
+
const t0 = (clippedStart - segStart) / (segEnd - segStart);
|
|
199
|
+
const t1 = (clippedEnd - segStart) / (segEnd - segStart);
|
|
200
|
+
const aStart = audioStart + t0 * (audioEnd - audioStart);
|
|
201
|
+
const aEnd = audioStart + t1 * (audioEnd - audioStart);
|
|
202
|
+
addSegmentDirect(clippedStart, clippedEnd, aStart, aEnd, false);
|
|
203
|
+
}
|
|
204
|
+
else {
|
|
205
|
+
const rate = (audioEnd - audioStart) / (segEnd - segStart);
|
|
206
|
+
// Before audible region
|
|
207
|
+
if (segStart < resultStart) {
|
|
208
|
+
const endPos = Math.min(segEnd, resultStart);
|
|
209
|
+
const aEnd = audioStart + (endPos - segStart) * rate;
|
|
210
|
+
addSegmentDirect(segStart, endPos, audioStart, aEnd, true);
|
|
211
|
+
}
|
|
212
|
+
// Audible region
|
|
213
|
+
if (segEnd > resultStart && segStart < resultEnd) {
|
|
214
|
+
const startPos = Math.max(segStart, resultStart);
|
|
215
|
+
const endPos = Math.min(segEnd, resultEnd);
|
|
216
|
+
const aStart = audioStart + (startPos - segStart) * rate;
|
|
217
|
+
const aEnd = audioStart + (endPos - segStart) * rate;
|
|
218
|
+
addSegmentDirect(startPos, endPos, aStart, aEnd, false);
|
|
219
|
+
}
|
|
220
|
+
// After audible region
|
|
221
|
+
if (segEnd > resultEnd) {
|
|
222
|
+
const startPos = Math.max(segStart, resultEnd);
|
|
223
|
+
const aStart = audioStart + (startPos - segStart) * rate;
|
|
224
|
+
addSegmentDirect(startPos, segEnd, aStart, audioEnd, true);
|
|
225
|
+
}
|
|
226
|
+
}
|
|
227
|
+
};
|
|
228
|
+
// Calculate iteration bounds
|
|
229
|
+
// Where does audioTime = 0? Solve: ppqnToSeconds(ppqn) - regionStartSeconds + waveformOffset = 0
|
|
230
|
+
const audioStartPPQN = tempoMap.secondsToPPQN(regionStartSeconds - waveformOffset);
|
|
231
|
+
// Where does audioTime = durationInSeconds?
|
|
232
|
+
const audioEndPPQN = tempoMap.secondsToPPQN(regionStartSeconds - waveformOffset + durationInSeconds);
|
|
233
|
+
// Determine visible iteration range (include padding on the left for smooth scrolling)
|
|
234
|
+
const iterStart = clip
|
|
235
|
+
? Math.max(resultStart, range.unitMin - range.unitPadding)
|
|
236
|
+
: Math.max(Math.min(audioStartPPQN, resultStart), range.unitMin - range.unitPadding);
|
|
237
|
+
const iterEnd = clip
|
|
238
|
+
? Math.min(resultEnd, range.unitMax + TempoChangeGrid)
|
|
239
|
+
: Math.min(Math.max(audioEndPPQN, resultEnd), range.unitMax + TempoChangeGrid);
|
|
240
|
+
// Dynamic step size: ensure each step is at least 1 device pixel wide
|
|
241
|
+
const minStepSize = range.unitsPerPixel * devicePixelRatio;
|
|
242
|
+
const stepSize = Math.max(TempoChangeGrid, Math.ceil(minStepSize / TempoChangeGrid) * TempoChangeGrid);
|
|
243
|
+
// Align to grid for consistent rendering across zoom levels
|
|
244
|
+
let currentPPQN = Math.floor(iterStart / stepSize) * stepSize;
|
|
245
|
+
// Compute initial audio time once, then increment (avoid O(n) ppqnToSeconds calls per step)
|
|
246
|
+
let currentAudioTime = audioTimeAt(currentPPQN);
|
|
247
|
+
while (currentPPQN < iterEnd) {
|
|
248
|
+
const nextPPQN = currentPPQN + stepSize;
|
|
249
|
+
// Incremental: get tempo at the current position and compute step duration
|
|
250
|
+
const stepSeconds = PPQN.pulsesToSeconds(stepSize, tempoMap.getTempoAt(currentPPQN));
|
|
251
|
+
const nextAudioTime = currentAudioTime + stepSeconds;
|
|
252
|
+
// Skip if entirely outside audio file range
|
|
253
|
+
if (nextAudioTime > 0 && currentAudioTime < durationInSeconds) {
|
|
254
|
+
handleTempoSegment(currentPPQN, nextPPQN, currentAudioTime, nextAudioTime);
|
|
255
|
+
}
|
|
256
|
+
currentPPQN = nextPPQN;
|
|
257
|
+
currentAudioTime = nextAudioTime;
|
|
258
|
+
}
|
|
259
|
+
}
|
|
260
|
+
context.fillStyle = contentColor;
|
|
261
|
+
for (const { x0, x1, u0, u1, outside } of segments) {
|
|
262
|
+
context.globalAlpha = outside && !clip ? 0.25 : 1.0;
|
|
263
|
+
for (let channel = 0; channel < numberOfChannels; channel++) {
|
|
264
|
+
PeaksPainter.renderBlocks(context, peaks, channel, {
|
|
265
|
+
u0,
|
|
266
|
+
u1,
|
|
267
|
+
v0: -scale,
|
|
268
|
+
v1: +scale,
|
|
269
|
+
x0,
|
|
270
|
+
x1,
|
|
271
|
+
y0: 3 + top + channel * peaksHeight,
|
|
272
|
+
y1: 3 + top + (channel + 1) * peaksHeight
|
|
273
|
+
});
|
|
274
|
+
}
|
|
275
|
+
}
|
|
276
|
+
};
|
|
277
|
+
})(AudioRenderer || (AudioRenderer = {}));
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"env.d.ts","sourceRoot":"","sources":["../../../src/ui/renderer/env.ts"],"names":[],"mappings":"AAAA,MAAM,MAAM,WAAW,GAAG;IACtB,GAAG,EAAE,MAAM,CAAA;IACX,MAAM,EAAE,MAAM,CAAA;CACjB,CAAA"}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export {};
|
|
@@ -0,0 +1,7 @@
|
|
|
1
|
+
import { FadingEnvelope } from "@opendaw/lib-dsp";
|
|
2
|
+
import { TimelineRange } from "../../index";
|
|
3
|
+
import { RegionBound } from "./env";
|
|
4
|
+
export declare namespace AudioFadingRenderer {
|
|
5
|
+
const render: (context: CanvasRenderingContext2D, range: TimelineRange, fading: FadingEnvelope.Config, { top, bottom }: RegionBound, startPPQN: number, endPPQN: number, color: string) => void;
|
|
6
|
+
}
|
|
7
|
+
//# sourceMappingURL=fading.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"fading.d.ts","sourceRoot":"","sources":["../../../src/ui/renderer/fading.ts"],"names":[],"mappings":"AACA,OAAO,EAAC,cAAc,EAAC,MAAM,kBAAkB,CAAA;AAC/C,OAAO,EAAC,aAAa,EAAC,MAAM,aAAa,CAAA;AACzC,OAAO,EAAC,WAAW,EAAC,MAAM,OAAO,CAAA;AAEjC,yBAAiB,mBAAmB,CAAC;IAC1B,MAAM,MAAM,GAAI,SAAS,wBAAwB,EACjC,OAAO,aAAa,EACpB,QAAQ,cAAc,CAAC,MAAM,EAC7B,iBAAe,WAAW,EAC1B,WAAW,MAAM,EACjB,SAAS,MAAM,EACf,OAAO,MAAM,KAAG,IAkDtC,CAAA;CACJ"}
|
|
@@ -0,0 +1,55 @@
|
|
|
1
|
+
import { Curve, TAU } from "@opendaw/lib-std";
|
|
2
|
+
export var AudioFadingRenderer;
|
|
3
|
+
(function (AudioFadingRenderer) {
|
|
4
|
+
AudioFadingRenderer.render = (context, range, fading, { top, bottom }, startPPQN, endPPQN, color) => {
|
|
5
|
+
const { inSlope: fadeInSlope, outSlope: fadeOutSlope } = fading;
|
|
6
|
+
const duration = endPPQN - startPPQN;
|
|
7
|
+
const totalFading = fading.in + fading.out;
|
|
8
|
+
const scale = totalFading > duration ? duration / totalFading : 1.0;
|
|
9
|
+
const fadeIn = fading.in * scale;
|
|
10
|
+
const fadeOut = fading.out * scale;
|
|
11
|
+
context.strokeStyle = color;
|
|
12
|
+
context.fillStyle = "rgba(0,0,0,0.25)";
|
|
13
|
+
context.lineWidth = devicePixelRatio;
|
|
14
|
+
if (fadeIn > 0) {
|
|
15
|
+
const fadeInEndPPQN = startPPQN + fadeIn;
|
|
16
|
+
const x0 = range.unitToX(startPPQN) * devicePixelRatio;
|
|
17
|
+
const x1 = range.unitToX(fadeInEndPPQN) * devicePixelRatio;
|
|
18
|
+
const xn = x1 - x0;
|
|
19
|
+
const path = new Path2D();
|
|
20
|
+
path.moveTo(x0, bottom);
|
|
21
|
+
let x = x0;
|
|
22
|
+
Curve.run(fadeInSlope, xn, bottom, top, y => path.lineTo(++x, y));
|
|
23
|
+
path.lineTo(x1, top);
|
|
24
|
+
context.stroke(path);
|
|
25
|
+
path.lineTo(x0, top);
|
|
26
|
+
path.lineTo(x0, bottom);
|
|
27
|
+
context.fill(path);
|
|
28
|
+
}
|
|
29
|
+
if (fadeOut > 0) {
|
|
30
|
+
const x0 = range.unitToX(endPPQN - fadeOut) * devicePixelRatio;
|
|
31
|
+
const x1 = range.unitToX(endPPQN) * devicePixelRatio;
|
|
32
|
+
const xn = x1 - x0;
|
|
33
|
+
const path = new Path2D();
|
|
34
|
+
path.moveTo(x0, top);
|
|
35
|
+
let x = x0;
|
|
36
|
+
Curve.run(fadeOutSlope, xn, top, bottom, y => path.lineTo(++x, y));
|
|
37
|
+
path.lineTo(x1, bottom);
|
|
38
|
+
context.strokeStyle = color;
|
|
39
|
+
context.stroke(path);
|
|
40
|
+
path.lineTo(x1, top);
|
|
41
|
+
path.lineTo(x0, top);
|
|
42
|
+
context.fill(path);
|
|
43
|
+
}
|
|
44
|
+
const handleRadius = 1.5 * devicePixelRatio;
|
|
45
|
+
const x0 = Math.max(range.unitToX(startPPQN + fadeIn), range.unitToX(startPPQN)) * devicePixelRatio;
|
|
46
|
+
const x1 = Math.min(range.unitToX(endPPQN - fadeOut), range.unitToX(endPPQN)) * devicePixelRatio;
|
|
47
|
+
context.fillStyle = color;
|
|
48
|
+
context.beginPath();
|
|
49
|
+
context.arc(x0, top, handleRadius, 0, TAU);
|
|
50
|
+
context.fill();
|
|
51
|
+
context.beginPath();
|
|
52
|
+
context.arc(x1, top, handleRadius, 0, TAU);
|
|
53
|
+
context.fill();
|
|
54
|
+
};
|
|
55
|
+
})(AudioFadingRenderer || (AudioFadingRenderer = {}));
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../../src/ui/renderer/index.ts"],"names":[],"mappings":"AAAA,cAAc,SAAS,CAAA;AACvB,cAAc,OAAO,CAAA;AACrB,cAAc,UAAU,CAAA;AACxB,cAAc,SAAS,CAAA;AACvB,cAAc,SAAS,CAAA"}
|
|
@@ -0,0 +1,7 @@
|
|
|
1
|
+
import { NoteRegionBoxAdapter } from "@opendaw/studio-adapters";
|
|
2
|
+
import { LoopableRegion } from "@opendaw/lib-dsp";
|
|
3
|
+
import { RegionBound, TimelineRange } from "../../index";
|
|
4
|
+
export declare namespace NotesRenderer {
|
|
5
|
+
const render: (context: CanvasRenderingContext2D, range: TimelineRange, region: NoteRegionBoxAdapter, { top, bottom }: RegionBound, contentColor: string, { rawStart, regionStart, resultStart, resultEnd }: LoopableRegion.LoopCycle) => void;
|
|
6
|
+
}
|
|
7
|
+
//# sourceMappingURL=notes.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"notes.d.ts","sourceRoot":"","sources":["../../../src/ui/renderer/notes.ts"],"names":[],"mappings":"AAAA,OAAO,EAAC,oBAAoB,EAAC,MAAM,0BAA0B,CAAA;AAC7D,OAAO,EAAC,cAAc,EAAC,MAAM,kBAAkB,CAAA;AAC/C,OAAO,EAAC,WAAW,EAAE,aAAa,EAAC,MAAM,aAAa,CAAA;AAEtD,yBAAiB,aAAa,CAAC;IACpB,MAAM,MAAM,GAAI,SAAS,wBAAwB,EACjC,OAAO,aAAa,EACpB,QAAQ,oBAAoB,EAC5B,iBAAe,WAAW,EAC1B,cAAc,MAAM,EACpB,mDAAiD,cAAc,CAAC,SAAS,SAiB/F,CAAA;CACJ"}
|
|
@@ -0,0 +1,23 @@
|
|
|
1
|
+
export var NotesRenderer;
|
|
2
|
+
(function (NotesRenderer) {
|
|
3
|
+
NotesRenderer.render = (context, range, region, { top, bottom }, contentColor, { rawStart, regionStart, resultStart, resultEnd }) => {
|
|
4
|
+
const collection = region.optCollection.unwrap();
|
|
5
|
+
const height = bottom - top;
|
|
6
|
+
context.fillStyle = contentColor;
|
|
7
|
+
const padding = 8;
|
|
8
|
+
const noteHeight = 5;
|
|
9
|
+
const searchStart = Math.floor(resultStart - rawStart);
|
|
10
|
+
const searchEnd = Math.floor(resultEnd - rawStart);
|
|
11
|
+
for (const note of collection.events.iterateRange(searchStart - collection.maxDuration, searchEnd)) {
|
|
12
|
+
const position = rawStart + note.position;
|
|
13
|
+
if (position < regionStart) {
|
|
14
|
+
continue;
|
|
15
|
+
}
|
|
16
|
+
const complete = Math.min(rawStart + note.complete, resultEnd);
|
|
17
|
+
const x0 = Math.floor(range.unitToX(position) * devicePixelRatio);
|
|
18
|
+
const x1 = Math.floor(range.unitToX(complete) * devicePixelRatio);
|
|
19
|
+
const y = top + padding + Math.floor(note.normalizedPitch() * (height - (padding * 2 + noteHeight)));
|
|
20
|
+
context.fillRect(x0, y, Math.max(1, x1 - x0), noteHeight);
|
|
21
|
+
}
|
|
22
|
+
};
|
|
23
|
+
})(NotesRenderer || (NotesRenderer = {}));
|
|
@@ -0,0 +1,7 @@
|
|
|
1
|
+
import { LoopableRegion, ValueEvent } from "@opendaw/lib-dsp";
|
|
2
|
+
import { Func, unitValue } from "@opendaw/lib-std";
|
|
3
|
+
import { TimelineRange } from "../../index";
|
|
4
|
+
export declare namespace ValueStreamRenderer {
|
|
5
|
+
const render: (context: CanvasRenderingContext2D, range: TimelineRange, generator: IterableIterator<ValueEvent>, valueToY: Func<unitValue, number>, contentColor: string, alphaFill: unitValue, anchor: unitValue, { resultStart, resultEnd, rawStart: delta }: LoopableRegion.LoopCycle) => void;
|
|
6
|
+
}
|
|
7
|
+
//# sourceMappingURL=value.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"value.d.ts","sourceRoot":"","sources":["../../../src/ui/renderer/value.ts"],"names":[],"mappings":"AAAA,OAAO,EAAC,cAAc,EAAE,UAAU,EAAC,MAAM,kBAAkB,CAAA;AAC3D,OAAO,EAA2B,IAAI,EAAE,SAAS,EAAC,MAAM,kBAAkB,CAAA;AAC1E,OAAO,EAAC,aAAa,EAAC,MAAM,aAAa,CAAA;AAEzC,yBAAiB,mBAAmB,CAAC;IAC1B,MAAM,MAAM,GAAI,SAAS,wBAAwB,EACjC,OAAO,aAAa,EACpB,WAAW,gBAAgB,CAAC,UAAU,CAAC,EACvC,UAAU,IAAI,CAAC,SAAS,EAAE,MAAM,CAAC,EACjC,cAAc,MAAM,EACpB,WAAW,SAAS,EACpB,QAAQ,SAAS,EACjB,6CAA2C,cAAc,CAAC,SAAS,SAuFzF,CAAA;CACJ"}
|
|
@@ -0,0 +1,101 @@
|
|
|
1
|
+
import { asDefined, assert, Curve } from "@opendaw/lib-std";
|
|
2
|
+
export var ValueStreamRenderer;
|
|
3
|
+
(function (ValueStreamRenderer) {
|
|
4
|
+
ValueStreamRenderer.render = (context, range, generator, valueToY, contentColor, alphaFill, anchor, { resultStart, resultEnd, rawStart: delta }) => {
|
|
5
|
+
const { done, value } = generator.next();
|
|
6
|
+
if (done) {
|
|
7
|
+
return;
|
|
8
|
+
}
|
|
9
|
+
const unitToX = (unit) => range.unitToX(unit + delta) * devicePixelRatio;
|
|
10
|
+
const path = new Path2D();
|
|
11
|
+
const windowMin = resultStart - delta;
|
|
12
|
+
const windowMax = resultEnd - delta;
|
|
13
|
+
const xMin = unitToX(windowMin);
|
|
14
|
+
const xMax = unitToX(windowMax);
|
|
15
|
+
let notMoved = true; // makes sure we start with a moveTo command
|
|
16
|
+
let prev = asDefined(value);
|
|
17
|
+
for (const next of generator) {
|
|
18
|
+
assert(prev !== next, "iterator error");
|
|
19
|
+
const { position: p0, value: v0, interpolation } = prev;
|
|
20
|
+
const { position: p1, value: v1 } = next;
|
|
21
|
+
const type = interpolation.type;
|
|
22
|
+
const x0 = unitToX(p0);
|
|
23
|
+
const x1 = unitToX(p1);
|
|
24
|
+
const y0 = valueToY(v0);
|
|
25
|
+
const y1 = valueToY(v1);
|
|
26
|
+
if (type === "none" || p1 - p0 === 0) { // hold and jumps to the next event's value
|
|
27
|
+
if (notMoved) {
|
|
28
|
+
if (p1 > windowMax) {
|
|
29
|
+
break;
|
|
30
|
+
} // leave the rest for after the loop
|
|
31
|
+
path.moveTo(xMin, y0); // move pen to window min
|
|
32
|
+
path.lineTo(x1, y0); // line to next event
|
|
33
|
+
notMoved = false;
|
|
34
|
+
}
|
|
35
|
+
if (x1 > xMax) {
|
|
36
|
+
break;
|
|
37
|
+
}
|
|
38
|
+
path.lineTo(x1, y0); // hold value to the next event
|
|
39
|
+
path.lineTo(x1, y1); // jump to the next event value
|
|
40
|
+
}
|
|
41
|
+
else if (type === "linear") {
|
|
42
|
+
const ratio = (v1 - v0) / (p1 - p0);
|
|
43
|
+
if (notMoved) {
|
|
44
|
+
path.moveTo(xMin, valueToY(p0 < windowMin ? v0 + ratio * (windowMin - p0) : v0)); // move pen to window min
|
|
45
|
+
if (p0 > windowMin) {
|
|
46
|
+
path.lineTo(x0, y0);
|
|
47
|
+
} // line to first event
|
|
48
|
+
notMoved = false;
|
|
49
|
+
}
|
|
50
|
+
if (p1 > windowMax) {
|
|
51
|
+
path.lineTo(xMax, valueToY(v0 + ratio * (windowMax - p0))); // line to window max
|
|
52
|
+
}
|
|
53
|
+
else {
|
|
54
|
+
path.lineTo(x1, y1); // line to next event
|
|
55
|
+
}
|
|
56
|
+
}
|
|
57
|
+
else if (type === "curve") {
|
|
58
|
+
const cx0 = Math.max(x0, xMin);
|
|
59
|
+
const cx1 = Math.min(x1, xMax);
|
|
60
|
+
const definition = { slope: interpolation.slope, steps: x1 - x0, y0, y1 };
|
|
61
|
+
if (notMoved) {
|
|
62
|
+
if (p0 > windowMin) {
|
|
63
|
+
path.moveTo(xMin, y0); // move to window edge
|
|
64
|
+
path.lineTo(x0, y0); // draw the line to the first event
|
|
65
|
+
}
|
|
66
|
+
else {
|
|
67
|
+
path.moveTo(cx0, Curve.valueAt(definition, cx0 - x0));
|
|
68
|
+
}
|
|
69
|
+
notMoved = false;
|
|
70
|
+
}
|
|
71
|
+
// TODO We can optimise this by walking the Curve.coefficients
|
|
72
|
+
for (let x = cx0; x <= cx1; x += 4) {
|
|
73
|
+
path.lineTo(x, Curve.valueAt(definition, x - x0));
|
|
74
|
+
}
|
|
75
|
+
path.lineTo(cx1, Curve.valueAt(definition, cx1 - x0));
|
|
76
|
+
}
|
|
77
|
+
prev = next;
|
|
78
|
+
}
|
|
79
|
+
if (notMoved) {
|
|
80
|
+
// we have not moved, so let's draw a straight line from min to max to respect the sole event
|
|
81
|
+
path.moveTo(xMin, valueToY(prev.value));
|
|
82
|
+
path.lineTo(xMax, valueToY(prev.value));
|
|
83
|
+
}
|
|
84
|
+
else if (prev.position < windowMax) {
|
|
85
|
+
// hold value to the window max
|
|
86
|
+
path.lineTo(xMax, valueToY(prev.value));
|
|
87
|
+
}
|
|
88
|
+
const yMin = valueToY(anchor) + devicePixelRatio;
|
|
89
|
+
const style = contentColor;
|
|
90
|
+
context.fillStyle = style;
|
|
91
|
+
context.strokeStyle = style;
|
|
92
|
+
context.beginPath();
|
|
93
|
+
context.stroke(path);
|
|
94
|
+
path.lineTo(xMax, yMin);
|
|
95
|
+
path.lineTo(xMin, yMin);
|
|
96
|
+
path.closePath();
|
|
97
|
+
context.globalAlpha = alphaFill;
|
|
98
|
+
context.fill(path);
|
|
99
|
+
context.globalAlpha = 1.00;
|
|
100
|
+
};
|
|
101
|
+
})(ValueStreamRenderer || (ValueStreamRenderer = {}));
|