@omnimedia/omnitool 1.1.0-53 → 1.1.0-56

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (71) hide show
  1. package/package.json +1 -1
  2. package/s/demo/demo.bundle.ts +5 -3
  3. package/s/demo/routines/transcode-test.ts +2 -2
  4. package/s/demo/routines/transitions-test.ts +2 -2
  5. package/s/demo/routines/waveform-test.ts +30 -5
  6. package/s/driver/driver.ts +26 -5
  7. package/s/driver/fns/schematic.ts +2 -0
  8. package/s/driver/fns/work.ts +41 -20
  9. package/s/index.html.ts +1 -2
  10. package/s/timeline/parts/waveform/parts/collect.ts +72 -0
  11. package/s/timeline/parts/waveform/parts/render.ts +45 -0
  12. package/s/timeline/parts/waveform/parts/types.ts +24 -0
  13. package/s/timeline/parts/waveform/waveform.ts +152 -0
  14. package/s/timeline/parts/waveform.ts +0 -61
  15. package/s/timeline/renderers/export/parts/cursor.ts +75 -56
  16. package/s/timeline/renderers/export/parts/produce-video.ts +2 -3
  17. package/s/timeline/renderers/player/parts/playback.ts +33 -6
  18. package/s/timeline/renderers/player/player.ts +2 -14
  19. package/s/timeline/renderers/renderers.test.ts +1 -3
  20. package/x/demo/demo.bundle.js +2 -0
  21. package/x/demo/demo.bundle.js.map +1 -1
  22. package/x/demo/demo.bundle.min.js +11 -84
  23. package/x/demo/demo.bundle.min.js.map +4 -4
  24. package/x/demo/routines/transcode-test.js +2 -2
  25. package/x/demo/routines/transcode-test.js.map +1 -1
  26. package/x/demo/routines/transitions-test.js +2 -2
  27. package/x/demo/routines/transitions-test.js.map +1 -1
  28. package/x/demo/routines/waveform-test.js +24 -4
  29. package/x/demo/routines/waveform-test.js.map +1 -1
  30. package/x/driver/driver.d.ts +14 -2
  31. package/x/driver/driver.js +26 -5
  32. package/x/driver/driver.js.map +1 -1
  33. package/x/driver/driver.worker.bundle.min.js +1 -1
  34. package/x/driver/driver.worker.bundle.min.js.map +3 -3
  35. package/x/driver/fns/host.d.ts +2 -0
  36. package/x/driver/fns/schematic.d.ts +2 -0
  37. package/x/driver/fns/work.d.ts +2 -0
  38. package/x/driver/fns/work.js +33 -20
  39. package/x/driver/fns/work.js.map +1 -1
  40. package/x/index.html +3 -3
  41. package/x/index.html.js +1 -1
  42. package/x/tests.bundle.min.js +16 -16
  43. package/x/tests.bundle.min.js.map +4 -4
  44. package/x/tests.html +1 -1
  45. package/x/timeline/parts/waveform/parts/collect.d.ts +11 -0
  46. package/x/timeline/parts/waveform/parts/collect.js +56 -0
  47. package/x/timeline/parts/waveform/parts/collect.js.map +1 -0
  48. package/x/timeline/parts/waveform/parts/render.d.ts +5 -0
  49. package/x/timeline/parts/waveform/parts/render.js +29 -0
  50. package/x/timeline/parts/waveform/parts/render.js.map +1 -0
  51. package/x/timeline/parts/waveform/parts/types.d.ts +21 -0
  52. package/x/timeline/parts/waveform/parts/types.js +2 -0
  53. package/x/timeline/parts/waveform/parts/types.js.map +1 -0
  54. package/x/timeline/parts/waveform/waveform.d.ts +17 -0
  55. package/x/timeline/parts/waveform/waveform.js +125 -0
  56. package/x/timeline/parts/waveform/waveform.js.map +1 -0
  57. package/x/timeline/parts/waveform.d.ts +1 -9
  58. package/x/timeline/parts/waveform.js +1 -48
  59. package/x/timeline/parts/waveform.js.map +1 -1
  60. package/x/timeline/renderers/export/parts/cursor.d.ts +4 -5
  61. package/x/timeline/renderers/export/parts/cursor.js +68 -50
  62. package/x/timeline/renderers/export/parts/cursor.js.map +1 -1
  63. package/x/timeline/renderers/export/parts/produce-video.js +2 -3
  64. package/x/timeline/renderers/export/parts/produce-video.js.map +1 -1
  65. package/x/timeline/renderers/player/parts/playback.d.ts +9 -5
  66. package/x/timeline/renderers/player/parts/playback.js +25 -6
  67. package/x/timeline/renderers/player/parts/playback.js.map +1 -1
  68. package/x/timeline/renderers/player/player.js +2 -9
  69. package/x/timeline/renderers/player/player.js.map +1 -1
  70. package/x/timeline/renderers/renderers.test.js +1 -2
  71. package/x/timeline/renderers/renderers.test.js.map +1 -1
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@omnimedia/omnitool",
3
- "version": "1.1.0-53",
3
+ "version": "1.1.0-56",
4
4
  "description": "open source video processing tools",
5
5
  "license": "MIT",
6
6
  "author": "Przemysław Gałęzki",
@@ -26,8 +26,11 @@ let exportState: {timeline: TimelineFile; omni: Omni} | null = null
26
26
  }
27
27
 
28
28
  const setProgress = (card: HTMLElement, state: "idle" | "running" | "done") => {
29
- const progress = card.querySelector(".progress") as HTMLProgressElement
30
- const status = card.querySelector(".status") as HTMLSpanElement
29
+ const progress = card.querySelector(".progress") as HTMLProgressElement | null
30
+ const status = card.querySelector(".status") as HTMLSpanElement | null
31
+
32
+ if (!progress || !status)
33
+ return
31
34
 
32
35
  if (state === "running") {
33
36
  progress.removeAttribute("value")
@@ -124,4 +127,3 @@ exportButton.addEventListener("click", async () => {
124
127
  await exportState.omni.render(exportState.timeline)
125
128
  setProgress(exportCard, "done")
126
129
  })
127
-
@@ -38,8 +38,8 @@ export function setupTranscodeTest(driver: Driver, source: DecoderSource) {
38
38
  const audio = driver.decodeAudio({source})
39
39
 
40
40
  const {readable, done} = driver.encode({
41
- video,
42
- audio,
41
+ video: video.readable,
42
+ audio: audio.readable,
43
43
  config: {
44
44
  audio: {codec: "opus", bitrate: 128000},
45
45
  video: {codec: "vp9", bitrate: 1000000}
@@ -30,8 +30,8 @@ export async function setupTransitionsTest(driver: Driver, source: DecoderSource
30
30
  }
31
31
  })
32
32
 
33
- await driver.encode({
34
- video,
33
+ driver.encode({
34
+ video: video.readable,
35
35
  config: {
36
36
  audio: {codec: "opus", bitrate: 128000},
37
37
  video: {codec: "vp9", bitrate: 1000000}
@@ -1,14 +1,39 @@
1
+
1
2
  import {Driver} from "../../driver/driver.js"
2
- import {Waveform} from "../../timeline/parts/waveform.js"
3
+ import {Waveform} from "../../timeline/parts/waveform/waveform.js"
3
4
 
4
5
  export async function waveformTest(driver: Driver, source: File, root: HTMLElement) {
5
- const container = root.querySelector(".waveform-canvas") as HTMLElement
6
+ const container = root.querySelector(".waveform-canvas") as HTMLDivElement
6
7
  const widthSlider = root.querySelector(".width") as HTMLInputElement
8
+
7
9
  container.replaceChildren()
8
- const waveform = await Waveform.init(driver, source, container)
10
+ container.style.position = "relative"
11
+ container.style.height = "96px"
12
+ container.style.overflow = "hidden"
13
+
14
+ const waveform = await Waveform.init(driver, source, {
15
+ tileHeight: 96,
16
+ onChange: () => renderTiles()
17
+ })
9
18
 
10
- widthSlider.oninput = () => {
19
+ const renderTiles = () => {
11
20
  const width = +widthSlider.value
12
- waveform.width = width
21
+ const zoom = pixelsPerSecond(width, waveform.duration)
22
+ waveform.zoom = zoom
23
+ container.style.width = `${width}px`
24
+ container.replaceChildren(...[...waveform.getTiles().values()].map(tile => {
25
+ tile.canvas.style.position = "absolute"
26
+ tile.canvas.style.top = "0"
27
+ tile.canvas.style.left = `${tile.startTime * pixelsPerSecond(width, waveform.duration)}px`
28
+ tile.canvas.style.height = "100%"
29
+ return tile.canvas
30
+ }))
13
31
  }
32
+
33
+ widthSlider.oninput = renderTiles
34
+ waveform.range = [0, waveform.duration]
35
+ }
36
+
37
+ function pixelsPerSecond(width: number, duration: number) {
38
+ return duration > 0 ? width / duration : 0
14
39
  }
@@ -58,33 +58,54 @@ export class Driver {
58
58
 
59
59
  decodeVideo(input: DecoderInput) {
60
60
  let lastFrame: VideoFrame | null = null
61
+ const {port1, port2} = new MessageChannel()
61
62
  const videoTransform = new TransformStream<VideoFrame, VideoFrame>({
62
63
  async transform(chunk, controller) {
63
64
  const frame = await input.onFrame?.(chunk) ?? chunk
64
- // below code is to prevent mem leaks and hardware accelerated decoder stall
65
65
  lastFrame?.close()
66
66
  controller.enqueue(frame)
67
67
  lastFrame = frame
68
68
  }
69
69
  })
70
- this.thread.work.decodeVideo[tune]({transfer: [videoTransform.writable]})({
70
+ this.thread.work.decodeVideo[tune]({transfer: [videoTransform.writable, port2]})({
71
71
  source: input.source,
72
+ cancel: port2,
72
73
  video: videoTransform.writable,
73
74
  start: input.start,
74
75
  end: input.end
75
76
  })
76
- return videoTransform.readable
77
+ return {
78
+ readable: videoTransform.readable,
79
+ /**
80
+ * use this to stop decoding (premature interruption)
81
+ * */
82
+ cancel() {
83
+ port1.postMessage("close")
84
+ port1.close()
85
+ }
86
+ }
77
87
  }
78
88
 
79
89
  decodeAudio(input: DecoderInput) {
80
90
  const audioTransform = new TransformStream<AudioData, AudioData>()
81
- this.thread.work.decodeAudio[tune]({transfer: [audioTransform.writable]})({
91
+ const {port1, port2} = new MessageChannel()
92
+ this.thread.work.decodeAudio[tune]({transfer: [audioTransform.writable, port2]})({
82
93
  source: input.source,
94
+ cancel: port2,
83
95
  audio: audioTransform.writable,
84
96
  start: input.start,
85
97
  end: input.end
86
98
  })
87
- return audioTransform.readable
99
+ return {
100
+ readable: audioTransform.readable,
101
+ /**
102
+ * use this to stop decoding (premature interruption)
103
+ * */
104
+ cancel() {
105
+ port1.postMessage("close")
106
+ port1.close()
107
+ }
108
+ }
88
109
  }
89
110
 
90
111
  encode({audio, video, config}: EncoderInput) {
@@ -14,6 +14,7 @@ export type DriverSchematic = AsSchematic<{
14
14
 
15
15
  decodeAudio(input: {
16
16
  source: DecoderSource
17
+ cancel: MessagePort
17
18
  audio: WritableStream<AudioData>
18
19
  start?: number
19
20
  end?: number
@@ -21,6 +22,7 @@ export type DriverSchematic = AsSchematic<{
21
22
 
22
23
  decodeVideo(input: {
23
24
  source: DecoderSource
25
+ cancel: MessagePort
24
26
  video: WritableStream<VideoFrame>
25
27
  start?: number
26
28
  end?: number
@@ -20,7 +20,7 @@ export const setupDriverWork = (
20
20
  await shell.host.world()
21
21
  },
22
22
 
23
- async decodeAudio({source, audio, start, end}) {
23
+ async decodeAudio({source, audio, start, end, cancel}) {
24
24
  const input = new Input({
25
25
  source: await loadSource(source),
26
26
  formats: ALL_FORMATS
@@ -30,19 +30,29 @@ export const setupDriverWork = (
30
30
  const audioDecodable = await audioTrack?.canDecode()
31
31
  const audioWriter = audio.getWriter()
32
32
 
33
- if (audioDecodable && audioTrack) {
34
- const sink = new AudioSampleSink(audioTrack)
35
- for await (const sample of sink.samples(start, end)) {
36
- const frame = sample.toAudioData()
37
- await audioWriter.write(frame)
38
- sample.close()
39
- frame.close()
40
- }
41
- await audioWriter.close()
33
+ if(!audioDecodable || !audioTrack)
34
+ return
35
+
36
+ const sink = new AudioSampleSink(audioTrack)
37
+ const samples = sink.samples(start, end)
38
+
39
+ cancel.onmessage = async () => {
40
+ samples.return()
41
+ input.dispose()
42
+ cancel.close()
42
43
  }
44
+
45
+ for await (const sample of samples) {
46
+ const frame = sample.toAudioData()
47
+ sample.close()
48
+ await audioWriter.write(frame)
49
+ frame.close()
50
+ }
51
+
52
+ await audioWriter.close()
43
53
  },
44
54
 
45
- async decodeVideo({source, video, start, end}) {
55
+ async decodeVideo({source, video, start, end, cancel}) {
46
56
  const input = new Input({
47
57
  source: await loadSource(source),
48
58
  formats: ALL_FORMATS
@@ -52,16 +62,26 @@ export const setupDriverWork = (
52
62
  const videoDecodable = await videoTrack?.canDecode()
53
63
  const videoWriter = video.getWriter()
54
64
 
55
- if (videoDecodable && videoTrack) {
56
- const sink = new VideoSampleSink(videoTrack)
57
- for await (const sample of sink.samples(start, end)) {
58
- const frame = sample.toVideoFrame()
59
- await videoWriter.write(frame)
60
- sample.close()
61
- frame.close()
62
- }
63
- await videoWriter.close()
65
+ if(!videoDecodable || !videoTrack)
66
+ return
67
+
68
+ const sink = new VideoSampleSink(videoTrack)
69
+ const samples = sink.samples(start, end)
70
+
71
+ cancel.onmessage = async () => {
72
+ samples.return()
73
+ input.dispose()
74
+ cancel.close()
64
75
  }
76
+
77
+ for await (const sample of samples) {
78
+ const frame = sample.toVideoFrame()
79
+ sample.close()
80
+ await videoWriter.write(frame)
81
+ frame.close()
82
+ }
83
+
84
+ await videoWriter.close()
65
85
  },
66
86
 
67
87
  async encode({video, audio, config, writable}) {
@@ -78,6 +98,7 @@ export const setupDriverWork = (
78
98
  const sample = new VideoSample(frame)
79
99
  await videoSource.add(sample)
80
100
  sample.close()
101
+ frame.close()
81
102
  }
82
103
  }
83
104
 
package/s/index.html.ts CHANGED
@@ -85,7 +85,7 @@ export default ssg.page(import.meta.url, async orb => ({
85
85
  </div>
86
86
  <div class="waveform-controls">
87
87
  <label>width</label>
88
- <input class="width" type="range" min="100" max="1000000" value="1000" />
88
+ <input class="width" type="range" min="100" max="1000000" value="1000" step="100" />
89
89
  </div>
90
90
  <div class="waveform-canvas"></div>
91
91
  </article>
@@ -134,4 +134,3 @@ export default ssg.page(import.meta.url, async orb => ({
134
134
  </section>
135
135
  `,
136
136
  }))
137
-
@@ -0,0 +1,72 @@
1
+
2
+ import {Driver} from "../../../../driver/driver.js"
3
+ import {DecoderSource} from "../../../../driver/fns/schematic.js"
4
+
5
+ export const PEAK_LEVELS = [2048, 1024, 512, 256, 128, 64, 32] as const
6
+
7
+ export async function collectPeakLevels(driver: Driver, source: DecoderSource) {
8
+ const duration = (await driver.getAudioDuration(source)) ?? 0
9
+ const readable = driver.decodeAudio({source}).readable
10
+ const finestSamplesPerPeak = PEAK_LEVELS[PEAK_LEVELS.length - 1]
11
+ const finestPeaks: number[] = []
12
+
13
+ let currentMax = 0
14
+ let sampleCount = 0
15
+ let sampleRate = 0
16
+
17
+ for await (const audioData of readable) {
18
+ sampleRate ||= audioData.sampleRate
19
+
20
+ const frames = audioData.numberOfFrames
21
+ const plane = new Float32Array(frames)
22
+ audioData.copyTo(plane, {planeIndex: 0})
23
+
24
+ for (let i = 0; i < plane.length; i++) {
25
+ const amplitude = Math.abs(plane[i]!)
26
+ if (amplitude > currentMax) currentMax = amplitude
27
+
28
+ sampleCount++
29
+ if (sampleCount >= finestSamplesPerPeak) {
30
+ finestPeaks.push(currentMax)
31
+ currentMax = 0
32
+ sampleCount = 0
33
+ }
34
+ }
35
+
36
+ audioData.close()
37
+ }
38
+
39
+ if (sampleCount > 0) finestPeaks.push(currentMax)
40
+
41
+ const base = new Float32Array(finestPeaks)
42
+ const levels = PEAK_LEVELS.map(samplesPerPeak => {
43
+ const factor = Math.max(1, Math.round(samplesPerPeak / finestSamplesPerPeak))
44
+ const peaks = factor === 1 ? base : downsampleMax(base, factor)
45
+ return {
46
+ samplesPerPeak,
47
+ peaks,
48
+ peaksPerSecond: sampleRate > 0 ? sampleRate / samplesPerPeak : 0,
49
+ }
50
+ })
51
+
52
+ return {duration, levels}
53
+ }
54
+
55
+ function downsampleMax(peaks: Float32Array, factor: number) {
56
+ const downsampled = new Float32Array(Math.ceil(peaks.length / factor))
57
+
58
+ for (let i = 0; i < downsampled.length; i++) {
59
+ let maxPeak = 0
60
+ const start = i * factor
61
+ const end = Math.min(start + factor, peaks.length)
62
+
63
+ for (let j = start; j < end; j++) {
64
+ if (peaks[j]! > maxPeak) maxPeak = peaks[j]!
65
+ }
66
+
67
+ downsampled[i] = maxPeak
68
+ }
69
+
70
+ return downsampled
71
+ }
72
+
@@ -0,0 +1,45 @@
1
+
2
+ export function renderTile(
3
+ peaks: Float32Array,
4
+ opts: {
5
+ width: number
6
+ height: number
7
+ color: string
8
+ }
9
+ ) {
10
+ const dpr = typeof window !== "undefined" ? window.devicePixelRatio || 1 : 1
11
+ const canvas = document.createElement("canvas")
12
+ canvas.width = opts.width * dpr
13
+ canvas.height = opts.height * dpr
14
+ canvas.style.width = `${opts.width}px`
15
+ canvas.style.height = `${opts.height}px`
16
+
17
+ const ctx = canvas.getContext("2d")
18
+ if (!ctx) return canvas
19
+
20
+ ctx.scale(dpr, dpr)
21
+ ctx.fillStyle = opts.color
22
+
23
+ const centerY = opts.height / 2
24
+ const columns = Math.max(1, opts.width)
25
+ const peaksPerPixel = peaks.length / columns
26
+
27
+ for (let px = 0; px < columns; px++) {
28
+ const startIndex = Math.floor(px * peaksPerPixel)
29
+ const endIndex = Math.max(
30
+ startIndex + 1,
31
+ Math.floor((px + 1) * peaksPerPixel)
32
+ )
33
+
34
+ let maxPeak = 0
35
+ for (let i = startIndex; i < endIndex && i < peaks.length; i++) {
36
+ if (peaks[i]! > maxPeak) maxPeak = peaks[i]!
37
+ }
38
+
39
+ const barHeight = maxPeak * opts.height
40
+ ctx.fillRect(px, centerY - barHeight / 2, 1, barHeight)
41
+ }
42
+
43
+ return canvas
44
+ }
45
+
@@ -0,0 +1,24 @@
1
+ export interface WaveformTileData {
2
+ startTime: number
3
+ endTime: number
4
+ peaks: Float32Array
5
+ canvas: HTMLCanvasElement
6
+ }
7
+
8
+ export interface WaveformOptions {
9
+ tileSize?: number
10
+ zoom?: number
11
+ tileWidth?: number
12
+ tileHeight?: number
13
+ preloadMargin?: number
14
+ color?: string
15
+ onChange?: (tiles: WaveformTileData[]) => void
16
+ }
17
+
18
+ export type WaveformTimeRange = [start: number, end: number]
19
+
20
+ export type WaveformPeakLevel = {
21
+ samplesPerPeak: number
22
+ peaks: Float32Array
23
+ peaksPerSecond: number
24
+ }
@@ -0,0 +1,152 @@
1
+
2
+ import {renderTile} from "./parts/render.js"
3
+ import {Driver} from "../../../driver/driver.js"
4
+ import {collectPeakLevels} from "./parts/collect.js"
5
+ import {DecoderSource} from "../../../driver/fns/schematic.js"
6
+ import {WaveformOptions, WaveformPeakLevel, WaveformTileData, WaveformTimeRange} from "./parts/types.js"
7
+
8
+ const MAX_TILE_WIDTH = 4096
9
+
10
+ export class Waveform {
11
+ #tiles = new Map<number, WaveformTileData>()
12
+ #activeRange: WaveformTimeRange = [0, 0]
13
+
14
+ #zoom
15
+ #levels
16
+ #onChange
17
+ #updateQueued = false
18
+
19
+ readonly color
20
+ readonly duration
21
+ readonly tileSize
22
+ readonly tileHeight
23
+ readonly preloadMargin
24
+
25
+ private constructor(levels: WaveformPeakLevel[], duration: number, options: WaveformOptions) {
26
+ this.#levels = levels
27
+ this.duration = duration
28
+ this.tileSize = options.tileSize ?? 1
29
+ this.#zoom = options.zoom ?? ((options.tileWidth ?? 256) / this.tileSize)
30
+ this.tileHeight = options.tileHeight ?? 96
31
+ this.preloadMargin = options.preloadMargin ?? 2
32
+ this.color = options.color ?? "rgb(3, 148, 129)"
33
+ this.#onChange = options.onChange
34
+ }
35
+
36
+ static async init(driver: Driver, source: DecoderSource, options: WaveformOptions = {}) {
37
+ const {duration, levels} = await collectPeakLevels(driver, source)
38
+ return new Waveform(levels, duration, options)
39
+ }
40
+
41
+ set zoom(value: number) {
42
+ const next = Math.max(1, value)
43
+ if (next === this.#zoom)
44
+ return
45
+
46
+ this.#zoom = next
47
+ this.#tiles.clear()
48
+ this.#queueUpdate()
49
+ }
50
+
51
+ get zoom() {
52
+ return this.#zoom
53
+ }
54
+
55
+ #computeActiveRange([start, end]: WaveformTimeRange, margin = 1): WaveformTimeRange {
56
+ const visibleSize = end - start
57
+ return [
58
+ Math.max(0, start - visibleSize * margin),
59
+ Math.min(this.duration, end + visibleSize * margin),
60
+ ]
61
+ }
62
+
63
+ set range(visibleRange: WaveformTimeRange) {
64
+ const [visibleStart, visibleEnd] = visibleRange
65
+ const visibleSize = visibleEnd - visibleStart
66
+ const [activeStart, activeEnd] = this.#activeRange
67
+
68
+ const leftTrigger = activeStart + visibleSize
69
+ const rightTrigger = activeEnd - visibleSize
70
+
71
+ if (visibleStart >= leftTrigger && visibleEnd <= rightTrigger) return
72
+
73
+ this.#activeRange = this.#computeActiveRange(visibleRange, this.preloadMargin)
74
+ this.#queueUpdate()
75
+ }
76
+
77
+ #queueUpdate() {
78
+ if (this.#updateQueued) return
79
+ this.#updateQueued = true
80
+
81
+ queueMicrotask(() => {
82
+ this.#updateQueued = false
83
+ this.#generateTiles()
84
+ })
85
+ }
86
+
87
+ #generateTiles() {
88
+ const [rangeStart, rangeEnd] = this.#activeRange
89
+ const neededStarts = new Set<number>()
90
+ const level = this.#levelForZoom()
91
+
92
+ const firstStart = Math.max(0, Math.floor(rangeStart / this.tileSize) * this.tileSize)
93
+ const lastStart = Math.min(this.duration, rangeEnd)
94
+
95
+ for (let startTime = firstStart; startTime <= lastStart; startTime += this.tileSize) {
96
+ neededStarts.add(startTime)
97
+ }
98
+
99
+ for (const startTime of neededStarts) {
100
+ if (!this.#tiles.has(startTime)) {
101
+ const endTime = Math.min(startTime + this.tileSize, this.duration)
102
+ this.#tiles.set(startTime, this.#buildTileData(startTime, endTime, level))
103
+ }
104
+ }
105
+
106
+ for (const startTime of this.#tiles.keys()) {
107
+ if (!neededStarts.has(startTime)) this.#tiles.delete(startTime)
108
+ }
109
+
110
+ this.#emit()
111
+ }
112
+
113
+ #buildTileData(startTime: number, endTime: number, level: WaveformPeakLevel): WaveformTileData {
114
+ const peaks = this.#slicePeaks(level, startTime, endTime)
115
+ return {
116
+ startTime,
117
+ endTime,
118
+ peaks,
119
+ canvas: renderTile(peaks, {
120
+ width: this.#tilePixelWidth(startTime, endTime),
121
+ height: this.tileHeight,
122
+ color: this.color,
123
+ }),
124
+ }
125
+ }
126
+
127
+ #levelForZoom() {
128
+ return this.#levels.find(level => level.peaksPerSecond >= this.#zoom)
129
+ ?? this.#levels[this.#levels.length - 1]!
130
+ }
131
+
132
+ #slicePeaks(level: WaveformPeakLevel, startTime: number, endTime: number) {
133
+ if (!level.peaksPerSecond) return new Float32Array()
134
+ const from = Math.max(0, Math.floor(startTime * level.peaksPerSecond))
135
+ const to = Math.max(from + 1, Math.min(level.peaks.length, Math.ceil(endTime * level.peaksPerSecond)))
136
+ return level.peaks.slice(from, to)
137
+ }
138
+
139
+ #tilePixelWidth(startTime: number, endTime: number) {
140
+ return Math.min(MAX_TILE_WIDTH, Math.max(1, Math.ceil((endTime - startTime) * this.#zoom)))
141
+ }
142
+
143
+ #emit() {
144
+ if (!this.#onChange) return
145
+ this.#onChange([...this.#tiles.values()].sort((a, b) => a.startTime - b.startTime))
146
+ }
147
+
148
+ getTiles() {
149
+ return this.#tiles
150
+ }
151
+ }
152
+
@@ -1,61 +0,0 @@
1
- import WaveSurfer from "wavesurfer.js"
2
-
3
- import {Driver} from "../../driver/driver.js"
4
- import {DecoderSource} from "../../driver/fns/schematic.js"
5
-
6
- export class Waveform {
7
- wavesurfer: WaveSurfer
8
-
9
- constructor(peaks: number[], container: HTMLElement, duration: number) {
10
- this.wavesurfer = WaveSurfer.create({
11
- container,
12
- waveColor: 'rgb(200, 0, 200)',
13
- progressColor: 'rgb(100, 0, 100)',
14
- barWidth: 10,
15
- barRadius: 10,
16
- barGap: 2,
17
- peaks: [peaks],
18
- duration
19
- })
20
- }
21
-
22
- static async init(driver: Driver, source: DecoderSource, container: HTMLElement) {
23
- const reader = driver.decodeAudio({source}).getReader()
24
-
25
- const peaks: number[] = []
26
- let buffer: number[] = []
27
- const samplesPerPeak = 1024
28
- const duration = await driver.getAudioDuration(source)
29
-
30
- while (true) {
31
- const {done, value: audioData} = await reader.read()
32
- if (done) break
33
-
34
- const frames = audioData.numberOfFrames
35
- const plane = new Float32Array(frames)
36
- audioData.copyTo(plane, {planeIndex: 0}) // Use left channel only
37
-
38
- for (let i = 0; i < plane.length; i++) {
39
- buffer.push(plane[i])
40
- if (buffer.length >= samplesPerPeak) {
41
- const chunk = buffer.splice(0, samplesPerPeak)
42
- const min = Math.min(...chunk)
43
- const max = Math.max(...chunk)
44
- peaks.push(min, max)
45
- }
46
- }
47
-
48
- audioData.close()
49
- }
50
-
51
- return new Waveform(peaks, container, duration ?? 0)
52
- }
53
-
54
- // set zoom(value: number) {
55
- // this.wavesurfer.zoom(value)
56
- // }
57
-
58
- set width(value: number) {
59
- this.wavesurfer.setOptions({width: value})
60
- }
61
- }