loukai-app 0.3.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +558 -0
- package/bin/loukai.js +32 -0
- package/package.json +243 -0
- package/src/main/appState.js +250 -0
- package/src/main/audioEngine.js +478 -0
- package/src/main/creator/conversionService.js +503 -0
- package/src/main/creator/downloadManager.js +1128 -0
- package/src/main/creator/ffmpegService.js +487 -0
- package/src/main/creator/installLogger.js +51 -0
- package/src/main/creator/keyDetection.js +212 -0
- package/src/main/creator/llmService.js +370 -0
- package/src/main/creator/lrclibService.js +340 -0
- package/src/main/creator/python/crepe_runner.py +189 -0
- package/src/main/creator/python/demucs_runner.py +158 -0
- package/src/main/creator/python/whisper_runner.py +172 -0
- package/src/main/creator/pythonRunner.js +268 -0
- package/src/main/creator/stemBuilder.js +491 -0
- package/src/main/creator/systemChecker.js +474 -0
- package/src/main/handlers/appHandlers.js +45 -0
- package/src/main/handlers/audioHandlers.js +33 -0
- package/src/main/handlers/autotuneHandlers.js +28 -0
- package/src/main/handlers/canvasHandlers.js +84 -0
- package/src/main/handlers/creatorHandlers.js +159 -0
- package/src/main/handlers/editorHandlers.js +98 -0
- package/src/main/handlers/effectsHandlers.js +100 -0
- package/src/main/handlers/fileHandlers.js +45 -0
- package/src/main/handlers/index.js +78 -0
- package/src/main/handlers/libraryHandlers.js +96 -0
- package/src/main/handlers/mixerHandlers.js +64 -0
- package/src/main/handlers/playerHandlers.js +39 -0
- package/src/main/handlers/preferencesHandlers.js +46 -0
- package/src/main/handlers/queueHandlers.js +81 -0
- package/src/main/handlers/rendererHandlers.js +63 -0
- package/src/main/handlers/settingsHandlers.js +42 -0
- package/src/main/handlers/webServerHandlers.js +105 -0
- package/src/main/main.js +2351 -0
- package/src/main/preload.js +252 -0
- package/src/main/settingsManager.js +139 -0
- package/src/main/statePersistence.js +193 -0
- package/src/main/utils/pathValidator.js +112 -0
- package/src/main/webServer.js +2535 -0
- package/src/native/autotune.js +417 -0
- package/src/renderer/adapters/ElectronBridge.js +677 -0
- package/src/renderer/canvas.html +80 -0
- package/src/renderer/components/App.jsx +303 -0
- package/src/renderer/components/AppRoot.jsx +37 -0
- package/src/renderer/components/AudioDeviceSettings.jsx +145 -0
- package/src/renderer/components/EffectsPanelWrapper.jsx +267 -0
- package/src/renderer/components/MixerTab.jsx +233 -0
- package/src/renderer/components/MixerTabWrapper.jsx +31 -0
- package/src/renderer/components/PortalSelect.jsx +239 -0
- package/src/renderer/components/QueueTab.jsx +116 -0
- package/src/renderer/components/RequestsListWrapper.jsx +78 -0
- package/src/renderer/components/ServerTab.jsx +472 -0
- package/src/renderer/components/SongInfoBarWrapper.jsx +77 -0
- package/src/renderer/components/StatusBar.jsx +92 -0
- package/src/renderer/components/TabNavigation.jsx +77 -0
- package/src/renderer/components/TransportControlsWrapper.jsx +69 -0
- package/src/renderer/components/creator/CreateTab.jsx +1236 -0
- package/src/renderer/dist/assets/kaiPlayer-CoMx__a_.js +2 -0
- package/src/renderer/dist/assets/kaiPlayer-CoMx__a_.js.map +1 -0
- package/src/renderer/dist/assets/microphoneEngine-BaCUhhQc.js +2 -0
- package/src/renderer/dist/assets/microphoneEngine-BaCUhhQc.js.map +1 -0
- package/src/renderer/dist/assets/player-DVrqp7N5.js +3 -0
- package/src/renderer/dist/assets/player-DVrqp7N5.js.map +1 -0
- package/src/renderer/dist/assets/songLoaders-BaTgGib4.js +2 -0
- package/src/renderer/dist/assets/songLoaders-BaTgGib4.js.map +1 -0
- package/src/renderer/dist/assets/webrtcManager-BhCHWceK.js +2 -0
- package/src/renderer/dist/assets/webrtcManager-BhCHWceK.js.map +1 -0
- package/src/renderer/dist/js/autoTuneWorklet.js +224 -0
- package/src/renderer/dist/js/micPitchDetectorWorklet.js +137 -0
- package/src/renderer/dist/js/musicAnalysisWorklet.js +216 -0
- package/src/renderer/dist/js/phaseVocoderWorklet.js +341 -0
- package/src/renderer/dist/js/soundtouch-worklet.js +1395 -0
- package/src/renderer/dist/renderer.css +1 -0
- package/src/renderer/dist/renderer.js +62 -0
- package/src/renderer/dist/renderer.js.map +1 -0
- package/src/renderer/dist/renderer.woff2 +0 -0
- package/src/renderer/hooks/useKeyboardShortcuts.js +154 -0
- package/src/renderer/index.html +24 -0
- package/src/renderer/index.html.backup +372 -0
- package/src/renderer/js/PlayerInterface.js +267 -0
- package/src/renderer/js/autoTuneWorklet.js +224 -0
- package/src/renderer/js/butterchurnVerify.js +46 -0
- package/src/renderer/js/canvas-app.js +114 -0
- package/src/renderer/js/cdgPlayer.js +685 -0
- package/src/renderer/js/kaiPlayer.js +1200 -0
- package/src/renderer/js/karaokeRenderer.js +3392 -0
- package/src/renderer/js/micPitchDetectorWorklet.js +137 -0
- package/src/renderer/js/microphoneEngine.js +656 -0
- package/src/renderer/js/musicAnalysisWorklet.js +216 -0
- package/src/renderer/js/phaseVocoderWorklet.js +341 -0
- package/src/renderer/js/player.js +232 -0
- package/src/renderer/js/referencePitchTracker.js +130 -0
- package/src/renderer/js/songLoaders.js +334 -0
- package/src/renderer/js/soundtouch-worklet.js +1395 -0
- package/src/renderer/js/webrtcManager.js +511 -0
- package/src/renderer/lib/butterchurn.min.js +6739 -0
- package/src/renderer/lib/butterchurnPresets.min.js +1 -0
- package/src/renderer/lib/cdgraphics-wrapper.js +16 -0
- package/src/renderer/lib/cdgraphics.js +299 -0
- package/src/renderer/public/js/autoTuneWorklet.js +224 -0
- package/src/renderer/public/js/micPitchDetectorWorklet.js +137 -0
- package/src/renderer/public/js/musicAnalysisWorklet.js +216 -0
- package/src/renderer/public/js/phaseVocoderWorklet.js +341 -0
- package/src/renderer/public/js/soundtouch-worklet.js +1395 -0
- package/src/renderer/react-entry.jsx +44 -0
- package/src/renderer/styles/tailwind.css +106 -0
- package/src/renderer/utils/qrCodeGenerator.js +98 -0
- package/src/renderer/vite.config.js +31 -0
- package/src/shared/adapters/BridgeInterface.js +195 -0
- package/src/shared/components/EffectsPanel.jsx +177 -0
- package/src/shared/components/LibraryPanel.jsx +701 -0
- package/src/shared/components/LineDetailCanvas.jsx +167 -0
- package/src/shared/components/LyricLine.jsx +505 -0
- package/src/shared/components/LyricRejection.jsx +84 -0
- package/src/shared/components/LyricSuggestion.jsx +80 -0
- package/src/shared/components/LyricsEditorCanvas.jsx +271 -0
- package/src/shared/components/MixerPanel.jsx +94 -0
- package/src/shared/components/PlayerControls.jsx +206 -0
- package/src/shared/components/PortalSelect.jsx +239 -0
- package/src/shared/components/QueueList.jsx +365 -0
- package/src/shared/components/QuickSearch.jsx +126 -0
- package/src/shared/components/RequestsList.jsx +121 -0
- package/src/shared/components/SongEditor.jsx +1362 -0
- package/src/shared/components/SongInfoBar.jsx +81 -0
- package/src/shared/components/ThemeToggle.jsx +106 -0
- package/src/shared/components/Toast.jsx +30 -0
- package/src/shared/components/VisualizationSettings.jsx +243 -0
- package/src/shared/constants.js +95 -0
- package/src/shared/context/BridgeContext.jsx +32 -0
- package/src/shared/contexts/AudioContext.jsx +37 -0
- package/src/shared/contexts/PlayerContext.jsx +66 -0
- package/src/shared/contexts/SettingsContext.jsx +50 -0
- package/src/shared/defaults.js +158 -0
- package/src/shared/formatUtils.js +59 -0
- package/src/shared/formatUtils.test.js +207 -0
- package/src/shared/hooks/useAppState.js +97 -0
- package/src/shared/hooks/useAudioEngine.js +264 -0
- package/src/shared/hooks/usePlayer.js +89 -0
- package/src/shared/hooks/useSettingsPersistence.js +74 -0
- package/src/shared/hooks/useWebRTC.js +118 -0
- package/src/shared/ipcContracts.js +299 -0
- package/src/shared/package.json +3 -0
- package/src/shared/services/creatorService.js +373 -0
- package/src/shared/services/creatorService.test.js +413 -0
- package/src/shared/services/editorService.js +213 -0
- package/src/shared/services/editorService.test.js +219 -0
- package/src/shared/services/effectsService.js +271 -0
- package/src/shared/services/effectsService.test.js +418 -0
- package/src/shared/services/libraryService.js +438 -0
- package/src/shared/services/libraryService.test.js +474 -0
- package/src/shared/services/mixerService.js +172 -0
- package/src/shared/services/mixerService.test.js +399 -0
- package/src/shared/services/playerService.js +221 -0
- package/src/shared/services/playerService.test.js +357 -0
- package/src/shared/services/preferencesService.js +219 -0
- package/src/shared/services/queueService.js +226 -0
- package/src/shared/services/queueService.test.js +430 -0
- package/src/shared/services/requestsService.js +155 -0
- package/src/shared/services/requestsService.test.js +362 -0
- package/src/shared/services/serverSettingsService.js +151 -0
- package/src/shared/services/settingsService.js +257 -0
- package/src/shared/services/settingsService.test.js +295 -0
- package/src/shared/state/StateManager.js +263 -0
- package/src/shared/utils/audio.js +42 -0
- package/src/shared/utils/format.js +32 -0
- package/src/shared/utils/lyricsUtils.js +162 -0
- package/src/test/setup.js +40 -0
- package/src/utils/cdgLoader.js +180 -0
- package/src/utils/m4aLoader.js +333 -0
- package/src/web/App.jsx +578 -0
- package/src/web/adapters/WebBridge.js +428 -0
- package/src/web/components/PlayerSettingsPanel.jsx +231 -0
- package/src/web/components/SongSearch.jsx +180 -0
- package/src/web/dist/assets/index-0H-RnRrV.js +51 -0
- package/src/web/dist/assets/index-0H-RnRrV.js.map +1 -0
- package/src/web/dist/assets/index-DYW2zB0u.css +1 -0
- package/src/web/dist/index.html +15 -0
- package/src/web/index.html +14 -0
- package/src/web/main.jsx +10 -0
- package/src/web/package-lock.json +1765 -0
- package/src/web/pages/SongRequestPage.jsx +619 -0
- package/src/web/styles/tailwind.css +68 -0
- package/src/web/vite.config.js +27 -0
- package/static/fonts/material-icons.woff2 +0 -0
- package/static/images/butterchurn-screenshots/Aderrasi - Potion of Spirits.png +0 -0
- package/static/images/butterchurn-screenshots/Aderrasi - Songflower _Moss Posy_.png +0 -0
- package/static/images/butterchurn-screenshots/Aderrasi - Storm of the Eye _Thunder_ - mash0000 - quasi pseudo meta concentrics.png +0 -0
- package/static/images/butterchurn-screenshots/Aderrasi _ Geiss - Airhandler _Kali Mix_ - Canvas Mix.png +0 -0
- package/static/images/butterchurn-screenshots/An AdamFX n Martin Infusion 2 flexi - Why The Sky Looks Diffrent Today - AdamFx n Martin Infusion - Tack Tile Disfunction B.png +0 -0
- package/static/images/butterchurn-screenshots/Cope - The Neverending Explosion of Red Liquid Fire.png +0 -0
- proton lights __Krash_s beat code_ _Phat_remix02b.png +0 -0
- package/static/images/butterchurn-screenshots/Eo_S_ _ Phat - cubetrace - v2.png +0 -0
- package/static/images/butterchurn-screenshots/Eo_S_ _ Zylot - skylight _Stained Glass Majesty mix_.png +0 -0
- package/static/images/butterchurn-screenshots/Flexi - alien fish pond.png +0 -0
- package/static/images/butterchurn-screenshots/Flexi - area 51.png +0 -0
- package/static/images/butterchurn-screenshots/Flexi - infused with the spiral.png +0 -0
- package/static/images/butterchurn-screenshots/Flexi - mindblob _shiny mix_.png +0 -0
- package/static/images/butterchurn-screenshots/Flexi - mindblob mix.png +0 -0
- package/static/images/butterchurn-screenshots/Flexi - predator-prey-spirals.png +0 -0
- package/static/images/butterchurn-screenshots/Flexi - smashing fractals _acid etching mix_.png +0 -0
- package/static/images/butterchurn-screenshots/Flexi - truly soft piece of software - this is generic texturing _Jelly_ .png +0 -0
- package/static/images/butterchurn-screenshots/Flexi _ Martin - astral projection.png +0 -0
- package/static/images/butterchurn-screenshots/Flexi _ Martin - cascading decay swing.png +0 -0
- package/static/images/butterchurn-screenshots/Flexi _ amandio c - piercing 05 - Kopie _2_ - Kopie.png +0 -0
- package/static/images/butterchurn-screenshots/Flexi _ stahlregen - jelly showoff parade.png +0 -0
- package/static/images/butterchurn-screenshots/Flexi_ fishbrain_ Geiss _ Martin - tokamak witchery.png +0 -0
- package/static/images/butterchurn-screenshots/Flexi_ martin _ geiss - dedicated to the sherwin maxawow.png +0 -0
- package/static/images/butterchurn-screenshots/Fumbling_Foo _ Flexi_ Martin_ Orb_ Unchained - Star Nova v7b.png +0 -0
- package/static/images/butterchurn-screenshots/Geiss - Cauldron - painterly 2 _saturation remix_.png +0 -0
- package/static/images/butterchurn-screenshots/Geiss - Reaction Diffusion 2.png +0 -0
- package/static/images/butterchurn-screenshots/Geiss - Spiral Artifact.png +0 -0
- package/static/images/butterchurn-screenshots/Geiss - Thumb Drum.png +0 -0
- package/static/images/butterchurn-screenshots/Geiss _ Flexi _ Martin - disconnected.png +0 -0
- package/static/images/butterchurn-screenshots/Geiss_ Flexi _ Stahlregen - Thumbdrum Tokamak _crossfiring aftermath jelly mashup_.png +0 -0
- package/static/images/butterchurn-screenshots/Goody - The Wild Vort.png +0 -0
- package/static/images/butterchurn-screenshots/Idiot - Star Of Annon.png +0 -0
- package/static/images/butterchurn-screenshots/Krash _ Illusion - Spiral Movement.png +0 -0
- package/static/images/butterchurn-screenshots/Martin - QBikal - Surface Turbulence IIb.png +0 -0
- package/static/images/butterchurn-screenshots/Martin - acid wiring.png +0 -0
- package/static/images/butterchurn-screenshots/Martin - charisma.png +0 -0
- package/static/images/butterchurn-screenshots/Martin - liquid arrows.png +0 -0
- package/static/images/butterchurn-screenshots/Milk Artist At our Best - FED - SlowFast Ft AdamFX n Martin - HD CosmoFX.png +0 -0
- package/static/images/butterchurn-screenshots/ORB - Waaa.png +0 -0
- package/static/images/butterchurn-screenshots/Phat_fiShbRaiN_Eo_S_Mandala_Chasers_remix.png +0 -0
- package/static/images/butterchurn-screenshots/Rovastar - Oozing Resistance.png +0 -0
- package/static/images/butterchurn-screenshots/Rovastar _ Loadus _ Geiss - FractalDrop _Triple Mix_.png +0 -0
- package/static/images/butterchurn-screenshots/TonyMilkdrop - Leonardo Da Vinci_s Balloon _Flexi - merry-go-round _ techstyle_.png +0 -0
- package/static/images/butterchurn-screenshots/TonyMilkdrop - Magellan_s Nebula _Flexi - you enter first _ multiverse_.png +0 -0
- package/static/images/butterchurn-screenshots/Unchained - Rewop.png +0 -0
- package/static/images/butterchurn-screenshots/Unchained - Unified Drag 2.png +0 -0
- package/static/images/butterchurn-screenshots/Unchained _ Rovastar - Wormhole Pillars _Hall of Shadows mix_.png +0 -0
- package/static/images/butterchurn-screenshots/Zylot - Paint Spill _Music Reactive Paint Mix_.png +0 -0
- package/static/images/butterchurn-screenshots/Zylot - Star Ornament.png +0 -0
- package/static/images/butterchurn-screenshots/Zylot - True Visionary _Final Mix_.png +0 -0
- package/static/images/butterchurn-screenshots/_Aderrasi - Wanderer in Curved Space - mash0000 - faclempt kibitzing meshuggana schmaltz _Geiss color mix_.png +0 -0
- package/static/images/butterchurn-screenshots/_Geiss - Artifact 01.png +0 -0
- package/static/images/butterchurn-screenshots/_Geiss - Desert Rose 2.png +0 -0
- package/static/images/butterchurn-screenshots/_Geiss - untitled.png +0 -0
- package/static/images/butterchurn-screenshots/_Mig_049.png +0 -0
- package/static/images/butterchurn-screenshots/_Mig_085.png +0 -0
- package/static/images/butterchurn-screenshots/_Rovastar _ Geiss - Hurricane Nightmare _Posterize Mix_.png +0 -0
- package/static/images/butterchurn-screenshots/___ Royal - Mashup _197_.png +0 -0
- package/static/images/butterchurn-screenshots/___ Royal - Mashup _220_.png +0 -0
- package/static/images/butterchurn-screenshots/___ Royal - Mashup _431_.png +0 -0
- package/static/images/butterchurn-screenshots/cope _ martin - mother-of-pearl.png +0 -0
- package/static/images/butterchurn-screenshots/fiShbRaiN _ Flexi - witchcraft 2_0.png +0 -0
- package/static/images/butterchurn-screenshots/flexi - bouncing balls _double mindblob neon mix_.png +0 -0
- package/static/images/butterchurn-screenshots/flexi - mom_ why the sky looks different today.png +0 -0
- package/static/images/butterchurn-screenshots/flexi - patternton_ district of media_ capitol of the united abstractions of fractopia.png +0 -0
- package/static/images/butterchurn-screenshots/flexi - swing out on the spiral.png +0 -0
- package/static/images/butterchurn-screenshots/flexi - what is the matrix.png +0 -0
- package/static/images/butterchurn-screenshots/flexi _ amandio c - organic _random mashup_.png +0 -0
- package/static/images/butterchurn-screenshots/flexi _ amandio c - organic12-3d-2_milk.png +0 -0
- package/static/images/butterchurn-screenshots/flexi _ fishbrain - neon mindblob grafitti.png +0 -0
- package/static/images/butterchurn-screenshots/flexi _ geiss - pogo cubes vs_ tokamak vs_ game of life _stahls jelly 4_5 finish_.png +0 -0
- package/static/images/butterchurn-screenshots/high-altitude basket unraveling - singh grooves nitrogen argon nz_.png +0 -0
- package/static/images/butterchurn-screenshots/martin - The Bridge of Khazad-Dum.png +0 -0
- package/static/images/butterchurn-screenshots/martin - angel flight.png +0 -0
- package/static/images/butterchurn-screenshots/martin - another kind of groove.png +0 -0
- package/static/images/butterchurn-screenshots/martin - bombyx mori.png +0 -0
- package/static/images/butterchurn-screenshots/martin - castle in the air.png +0 -0
- package/static/images/butterchurn-screenshots/martin - chain breaker.png +0 -0
- package/static/images/butterchurn-screenshots/martin - disco mix 4.png +0 -0
- package/static/images/butterchurn-screenshots/martin - extreme heat.png +0 -0
- package/static/images/butterchurn-screenshots/martin - frosty caves 2.png +0 -0
- package/static/images/butterchurn-screenshots/martin - fruit machine.png +0 -0
- package/static/images/butterchurn-screenshots/martin - ghost city.png +0 -0
- package/static/images/butterchurn-screenshots/martin - glass corridor.png +0 -0
- package/static/images/butterchurn-screenshots/martin - infinity _2010 update_.png +0 -0
- package/static/images/butterchurn-screenshots/martin - mandelbox explorer - high speed demo version.png +0 -0
- package/static/images/butterchurn-screenshots/martin - mucus cervix.png +0 -0
- package/static/images/butterchurn-screenshots/martin - reflections on black tiles.png +0 -0
- package/static/images/butterchurn-screenshots/martin - stormy sea _2010 update_.png +0 -0
- package/static/images/butterchurn-screenshots/martin - witchcraft reloaded.png +0 -0
- package/static/images/butterchurn-screenshots/martin _ flexi - diamond cutter _prismaticvortex_com_ - camille - i wish i wish i wish i was constrained.png +0 -0
- package/static/images/butterchurn-screenshots/martin _shadow harlequins shape code_ - fata morgana.png +0 -0
- package/static/images/butterchurn-screenshots/martin_ flexi_ fishbrain _ sto - enterstate _random mashup_.png +0 -0
- package/static/images/butterchurn-screenshots/sawtooth grin roam.png +0 -0
- package/static/images/butterchurn-screenshots/shifter - dark tides bdrv mix 2.png +0 -0
- package/static/images/butterchurn-screenshots/suksma - Rovastar - Sunflower Passion _Enlightment Mix__Phat_edit _ flexi und martin shaders - circumflex in character classes in regular expression.png +0 -0
- package/static/images/butterchurn-screenshots/suksma - heretical crosscut playpen.png +0 -0
- package/static/images/butterchurn-screenshots/suksma - uninitialized variabowl _hydroponic chronic_.png +0 -0
- package/static/images/butterchurn-screenshots/suksma - vector exp 1 - couldn_t not.png +0 -0
- package/static/images/butterchurn-screenshots/yin - 191 - Temporal singularities.png +0 -0
- package/static/images/logo-512.png +0 -0
- package/static/images/logo.png +0 -0
- package/static/loukai-logo.png +0 -0
- package/static/screenshot-generator.html +610 -0
|
@@ -0,0 +1,3392 @@
|
|
|
1
|
+
// TODO: State should be passed to renderer instead of accessing globals
|
|
2
|
+
|
|
3
|
+
export class KaraokeRenderer {
|
|
4
|
+
constructor(canvasId) {
|
|
5
|
+
this.canvas = document.getElementById(canvasId);
|
|
6
|
+
|
|
7
|
+
if (!this.canvas) {
|
|
8
|
+
return;
|
|
9
|
+
}
|
|
10
|
+
|
|
11
|
+
this.ctx = this.canvas.getContext('2d');
|
|
12
|
+
this.lyrics = null;
|
|
13
|
+
this.songDuration = 0;
|
|
14
|
+
this.currentTime = 0;
|
|
15
|
+
this.animationFrame = null;
|
|
16
|
+
this.isPlaying = false;
|
|
17
|
+
|
|
18
|
+
// Time interpolation for smooth progress bar (60fps)
|
|
19
|
+
this.lastReportedTime = 0;
|
|
20
|
+
this.lastReportedTimestamp = performance.now();
|
|
21
|
+
|
|
22
|
+
// Animation tracking for backup singers
|
|
23
|
+
this.backupAnimations = new Map(); // lineIndex -> { alpha, fadeDirection, lastStateChange }
|
|
24
|
+
|
|
25
|
+
// Callback for when current line's singer changes (for backup:PA feature)
|
|
26
|
+
this.onSingerChange = null; // function(singer) - called when active line's singer changes
|
|
27
|
+
this.lastActiveSinger = null; // Track last singer to detect changes
|
|
28
|
+
|
|
29
|
+
// Lyric transition animations
|
|
30
|
+
this.lyricTransitions = new Map(); // Track lyrics moving from upcoming to active
|
|
31
|
+
this.hiddenDuringTransition = new Set(); // Track lines that should be hidden during transitions
|
|
32
|
+
|
|
33
|
+
// Performance optimization - cache expensive calculations
|
|
34
|
+
this.cachedCurrentLine = -1;
|
|
35
|
+
this.lastTimeForLineCalculation = -1;
|
|
36
|
+
this.lineCalculationTolerance = 0.1; // Only recalculate if time changed by 0.1s
|
|
37
|
+
|
|
38
|
+
// Track upcoming lyric positioning
|
|
39
|
+
this.lockedUpcomingIndex = null;
|
|
40
|
+
this.lastActiveLyricsBottom = null; // Save the Y position after drawing active lyrics
|
|
41
|
+
|
|
42
|
+
// Frame rate optimization
|
|
43
|
+
this.frameCount = 0;
|
|
44
|
+
this.maxFPS = 30; // Reduce from 60fps to 30fps for better performance
|
|
45
|
+
this.frameSkip = 2; // Skip every other frame
|
|
46
|
+
|
|
47
|
+
// Microphone input for waveform
|
|
48
|
+
this.micStream = null;
|
|
49
|
+
this.audioContext = null;
|
|
50
|
+
this.analyser = null;
|
|
51
|
+
this.micDataArray = null;
|
|
52
|
+
this.waveformData = new Uint8Array(1440).fill(128); // 6 seconds at 240Hz (1440 pixels) - mic rolling buffer (128 = silence)
|
|
53
|
+
this.micGainNode = null; // For routing mic to speakers
|
|
54
|
+
this.inputDevice = 'default'; // Stored input device ID from preferences
|
|
55
|
+
|
|
56
|
+
// Waveform preferences (will be set from main app)
|
|
57
|
+
this.waveformPreferences = {
|
|
58
|
+
enableWaveforms: false,
|
|
59
|
+
micToSpeakers: true,
|
|
60
|
+
enableMic: true,
|
|
61
|
+
enableEffects: true,
|
|
62
|
+
overlayOpacity: 0.7,
|
|
63
|
+
showUpcomingLyrics: true,
|
|
64
|
+
};
|
|
65
|
+
|
|
66
|
+
// FPS and performance tracking
|
|
67
|
+
this.fpsHistory = [];
|
|
68
|
+
this.lastFrameTime = performance.now();
|
|
69
|
+
this.frameUpdateTime = 0;
|
|
70
|
+
|
|
71
|
+
// WebGL effects system
|
|
72
|
+
this.effectsCanvas = null;
|
|
73
|
+
this.effectsGL = null;
|
|
74
|
+
this.musicAnalyser = null;
|
|
75
|
+
this.musicFrequencyData = null;
|
|
76
|
+
|
|
77
|
+
// Advanced visualization libraries
|
|
78
|
+
this.butterchurn = null;
|
|
79
|
+
this.currentPreset = null;
|
|
80
|
+
this.presetList = [];
|
|
81
|
+
this.effectType = 'butterchurn';
|
|
82
|
+
// Note: Butterchurn now uses PA analyser from kaiPlayer, no separate context needed
|
|
83
|
+
|
|
84
|
+
// AudioWorklet for efficient analysis
|
|
85
|
+
this.musicWorkletNode = null;
|
|
86
|
+
this.cachedAnalysis = { energy: 0, bass: 0, mid: 0, treble: 0, centroid: 0 };
|
|
87
|
+
this.workletAvailable = false;
|
|
88
|
+
this.musicAudioBuffer = null;
|
|
89
|
+
this.musicSourceNode = null;
|
|
90
|
+
this.vocalsWaveformData = new Uint8Array(1920).fill(128); // 8 seconds at 240Hz (1920 pixels) - vocals rendering array (128 = silence)
|
|
91
|
+
this.zeroPadding = new Uint8Array(1920).fill(128); // Center value array for concatenation (128 = silence)
|
|
92
|
+
this.waveformDataIndex = 0;
|
|
93
|
+
|
|
94
|
+
// Vocals track waveform
|
|
95
|
+
this.vocalsAudioBuffer = null;
|
|
96
|
+
this.vocalsWaveformMaxLength = 480; // 8 seconds at 60fps
|
|
97
|
+
this.vocalsAnalyser = null;
|
|
98
|
+
this.vocalsSource = null;
|
|
99
|
+
this.preCalculatedVocalsWaveform = null;
|
|
100
|
+
|
|
101
|
+
// Debug audio level monitoring
|
|
102
|
+
this.lastAudioDebugTime = 0;
|
|
103
|
+
this.audioDebugInterval = 1000; // Log every 1 second for testing
|
|
104
|
+
this.lastConditionsDebugTime = 0;
|
|
105
|
+
|
|
106
|
+
// QR code for server URL
|
|
107
|
+
this.qrCodeCanvas = null;
|
|
108
|
+
this.showQrCode = false;
|
|
109
|
+
this.serverUrl = null;
|
|
110
|
+
|
|
111
|
+
// Queue display
|
|
112
|
+
this.queueItems = [];
|
|
113
|
+
this.displayQueue = true;
|
|
114
|
+
|
|
115
|
+
// Karaoke visual settings scaled for 1080p
|
|
116
|
+
this.settings = {
|
|
117
|
+
fontSize: 80, // Scaled up for 1080p (was 40 for ~800px)
|
|
118
|
+
fontFamily: 'bold Arial, sans-serif',
|
|
119
|
+
lineHeight: 140, // Increased spacing between lines
|
|
120
|
+
textColor: '#ffffff',
|
|
121
|
+
activeColor: '#00BFFF', // Light blue for active lines (easier to read)
|
|
122
|
+
upcomingColor: '#888888', // Gray for upcoming lines
|
|
123
|
+
backupColor: '#DAA520', // Golden color for backup singer lines
|
|
124
|
+
lyricTransitionDuration: 0.3, // Animation duration in seconds (300ms)
|
|
125
|
+
lyricTransitionStartBefore: 0.3, // Start animation this many seconds before active (300ms)
|
|
126
|
+
backupActiveColor: '#FFD700', // Brighter gold when active
|
|
127
|
+
// Singer type colors
|
|
128
|
+
singerBColor: '#EF4444', // Red for Singer B (duet partner)
|
|
129
|
+
duetColor: '#22C55E', // Green for duet (both singers)
|
|
130
|
+
backupPAColor: '#FFA500', // Orange for backup:PA (brighter than gold backup)
|
|
131
|
+
backgroundColor: '#1a1a1a',
|
|
132
|
+
shadowColor: '#000000',
|
|
133
|
+
linesVisible: 1, // Show only current line
|
|
134
|
+
maxWidth: 0.9, // 90% of canvas width for text
|
|
135
|
+
progressBarHeight: 30, // Taller progress bar
|
|
136
|
+
progressBarColor: '#007acc',
|
|
137
|
+
progressBarBg: '#333333',
|
|
138
|
+
progressBarMargin: 100, // More space between progress bar and lyrics
|
|
139
|
+
|
|
140
|
+
// Backup singer animation settings
|
|
141
|
+
backupFadeDuration: 0.8, // seconds to fade in/out
|
|
142
|
+
backupMaxAlpha: 0.5, // maximum opacity for backup singers (50% - subtle background feel)
|
|
143
|
+
backupMinAlpha: 0.0, // minimum opacity (fully transparent)
|
|
144
|
+
backupAnimationEasing: 'ease-out', // animation curve
|
|
145
|
+
|
|
146
|
+
// Microphone waveform settings
|
|
147
|
+
waveformHeight: 80, // Height of the waveform area
|
|
148
|
+
waveformColor: '#00ff00', // Green waveform
|
|
149
|
+
waveformBackgroundColor: '#333333',
|
|
150
|
+
waveformCurrentPosition: 0.75, // Position of current time (75% from left)
|
|
151
|
+
|
|
152
|
+
// Vocals waveform settings
|
|
153
|
+
vocalsWaveformHeight: 60, // Slightly smaller than mic waveform
|
|
154
|
+
vocalsWaveformColor: '#00bfff', // Blue waveform for vocals to match lyrics
|
|
155
|
+
vocalsWaveformGap: 10, // Gap between vocals and mic waveforms
|
|
156
|
+
};
|
|
157
|
+
|
|
158
|
+
this.setupCanvas();
|
|
159
|
+
this.setupAdvancedVisualizations();
|
|
160
|
+
this.setupResponsiveCanvas();
|
|
161
|
+
this.startAnimation();
|
|
162
|
+
}
|
|
163
|
+
|
|
164
|
+
setupAdvancedVisualizations() {
|
|
165
|
+
// Create offscreen canvas for effects
|
|
166
|
+
this.effectsCanvas = document.createElement('canvas');
|
|
167
|
+
this.effectsCanvas.width = 1920;
|
|
168
|
+
this.effectsCanvas.height = 1080;
|
|
169
|
+
|
|
170
|
+
try {
|
|
171
|
+
// Try to load Butterchurn (Milkdrop visualizations) from global variables
|
|
172
|
+
if (typeof window !== 'undefined' && window.butterchurn && window.butterchurnPresets) {
|
|
173
|
+
this.effectsGL =
|
|
174
|
+
this.effectsCanvas.getContext('webgl2') || this.effectsCanvas.getContext('webgl');
|
|
175
|
+
if (this.effectsGL) {
|
|
176
|
+
// Try different API patterns for Butterchurn
|
|
177
|
+
let butterchurnAPI = null;
|
|
178
|
+
if (typeof window.butterchurn.createVisualizer === 'function') {
|
|
179
|
+
butterchurnAPI = window.butterchurn;
|
|
180
|
+
} else if (
|
|
181
|
+
window.butterchurn.default &&
|
|
182
|
+
typeof window.butterchurn.default.createVisualizer === 'function'
|
|
183
|
+
) {
|
|
184
|
+
butterchurnAPI = window.butterchurn.default;
|
|
185
|
+
} else if (typeof window.butterchurn === 'function') {
|
|
186
|
+
// Maybe it's a constructor function
|
|
187
|
+
butterchurnAPI = window.butterchurn;
|
|
188
|
+
}
|
|
189
|
+
|
|
190
|
+
if (!butterchurnAPI || typeof butterchurnAPI.createVisualizer !== 'function') {
|
|
191
|
+
console.error(
|
|
192
|
+
'Butterchurn createVisualizer not found. Available methods:',
|
|
193
|
+
butterchurnAPI ? Object.keys(butterchurnAPI) : 'none'
|
|
194
|
+
);
|
|
195
|
+
throw new Error('Butterchurn API not compatible');
|
|
196
|
+
}
|
|
197
|
+
|
|
198
|
+
// Create waveform audio context if needed (used as dummy for butterchurn)
|
|
199
|
+
if (!this.waveformAudioContext) {
|
|
200
|
+
this.waveformAudioContext = new (window.AudioContext || window.webkitAudioContext)();
|
|
201
|
+
}
|
|
202
|
+
|
|
203
|
+
// Initialize Butterchurn with the correct API: createVisualizer(audioContext, canvas, options)
|
|
204
|
+
// Note: Audio comes from PA analyser via setVisualizationAnalyser(), not from this context
|
|
205
|
+
this.butterchurn = butterchurnAPI.createVisualizer(
|
|
206
|
+
this.waveformAudioContext,
|
|
207
|
+
this.effectsCanvas,
|
|
208
|
+
{
|
|
209
|
+
width: 1920,
|
|
210
|
+
height: 1080,
|
|
211
|
+
mesh_width: 128, // Lower for performance
|
|
212
|
+
mesh_height: 72, // Lower for performance
|
|
213
|
+
fps: 30, // Match our target framerate
|
|
214
|
+
}
|
|
215
|
+
);
|
|
216
|
+
|
|
217
|
+
// If we already have music loaded, decode it for Butterchurn
|
|
218
|
+
this.tryDecodeStoredAudioForButterchurn();
|
|
219
|
+
|
|
220
|
+
// Get available presets
|
|
221
|
+
this.presetList = Object.keys(window.butterchurnPresets.getPresets());
|
|
222
|
+
|
|
223
|
+
// Load highly reactive presets that respond strongly to audio
|
|
224
|
+
const defaultPresets = [
|
|
225
|
+
'Rovastar - Fractopia', // Very reactive to bass/drums
|
|
226
|
+
'Rovastar - Altars Of Madness (Krash Mix)', // High energy response
|
|
227
|
+
'Rovastar - Tunnel Runner', // Fast visual response
|
|
228
|
+
'martin - disco ball', // Classic reactive preset
|
|
229
|
+
'Krash - The Neverending Explosion', // Explosive audio response
|
|
230
|
+
'flexi - mindblob mix', // Good bass response
|
|
231
|
+
'Rovastar - Crystal High', // Sharp audio reactions
|
|
232
|
+
'martin - being & time', // Dynamic movement
|
|
233
|
+
];
|
|
234
|
+
|
|
235
|
+
// Only load a default preset if no preset is currently selected
|
|
236
|
+
// This prevents auto-resetting effects when new songs start
|
|
237
|
+
if (!this.currentPreset) {
|
|
238
|
+
const startPreset =
|
|
239
|
+
defaultPresets.find((p) => this.presetList.includes(p)) || this.presetList[0];
|
|
240
|
+
if (startPreset) {
|
|
241
|
+
const presetData = window.butterchurnPresets.getPresets()[startPreset];
|
|
242
|
+
this.butterchurn.loadPreset(presetData, 0.0); // 0 second transition
|
|
243
|
+
this.currentPreset = startPreset;
|
|
244
|
+
}
|
|
245
|
+
} else {
|
|
246
|
+
// If we already have a current preset, reload it to maintain continuity
|
|
247
|
+
if (this.presetList.includes(this.currentPreset)) {
|
|
248
|
+
const presetData = window.butterchurnPresets.getPresets()[this.currentPreset];
|
|
249
|
+
this.butterchurn.loadPreset(presetData, 0.0);
|
|
250
|
+
}
|
|
251
|
+
}
|
|
252
|
+
|
|
253
|
+
this.effectType = 'butterchurn';
|
|
254
|
+
|
|
255
|
+
// Check if we have music loaded but no Butterchurn buffer yet
|
|
256
|
+
if (this.musicAudioBuffer && !this.butterchurnAudioBuffer) {
|
|
257
|
+
// We need to get the original audio data, but AudioBuffer doesn't give us access
|
|
258
|
+
// This is a complex issue that requires storing the original ArrayBuffer
|
|
259
|
+
// For now, let's add a flag to trigger re-loading
|
|
260
|
+
this.needsButterchurnAudioDecode = true;
|
|
261
|
+
}
|
|
262
|
+
}
|
|
263
|
+
} else {
|
|
264
|
+
console.warn('Butterchurn libraries not available, effects disabled');
|
|
265
|
+
throw new Error('Butterchurn not available');
|
|
266
|
+
}
|
|
267
|
+
} catch (error) {
|
|
268
|
+
console.error('Butterchurn failed to load, effects disabled:', error);
|
|
269
|
+
this.effectType = 'disabled';
|
|
270
|
+
}
|
|
271
|
+
|
|
272
|
+
if (!this.effectsGL) {
|
|
273
|
+
console.warn('WebGL not available, effects disabled');
|
|
274
|
+
}
|
|
275
|
+
}
|
|
276
|
+
|
|
277
|
+
setupCanvas() {
|
|
278
|
+
// Canvas size is ALWAYS 1920x1080 (1080p)
|
|
279
|
+
// CSS controls how it stretches to fit the container
|
|
280
|
+
this.canvas.width = 1920;
|
|
281
|
+
this.canvas.height = 1080;
|
|
282
|
+
|
|
283
|
+
// Set default font
|
|
284
|
+
this.ctx.font = `${this.settings.fontSize}px ${this.settings.fontFamily}`;
|
|
285
|
+
this.ctx.textAlign = 'left';
|
|
286
|
+
this.ctx.textBaseline = 'middle';
|
|
287
|
+
}
|
|
288
|
+
|
|
289
|
+
setupResponsiveCanvas() {
|
|
290
|
+
// Get container reference for ResizeObserver
|
|
291
|
+
const container = this.canvas.parentElement;
|
|
292
|
+
|
|
293
|
+
// Function to maintain 16:9 aspect ratio (1920:1080) while scaling to fit container
|
|
294
|
+
const resizeCanvas = () => {
|
|
295
|
+
if (!container) return;
|
|
296
|
+
|
|
297
|
+
const containerRect = container.getBoundingClientRect();
|
|
298
|
+
const containerWidth = containerRect.width;
|
|
299
|
+
const containerHeight = containerRect.height;
|
|
300
|
+
|
|
301
|
+
// Skip if container has no dimensions yet
|
|
302
|
+
if (containerWidth === 0 || containerHeight === 0) {
|
|
303
|
+
// Schedule another attempt after DOM settles
|
|
304
|
+
setTimeout(() => resizeCanvas(), 100);
|
|
305
|
+
return;
|
|
306
|
+
}
|
|
307
|
+
|
|
308
|
+
// 16:9 aspect ratio (1920/1080 = 1.7777...)
|
|
309
|
+
const aspectRatio = 16 / 9;
|
|
310
|
+
|
|
311
|
+
let displayWidth, displayHeight;
|
|
312
|
+
|
|
313
|
+
// Calculate size that fits container while maintaining aspect ratio
|
|
314
|
+
if (containerWidth / containerHeight > aspectRatio) {
|
|
315
|
+
// Container is wider than 16:9, fit by height
|
|
316
|
+
displayHeight = containerHeight;
|
|
317
|
+
displayWidth = displayHeight * aspectRatio;
|
|
318
|
+
} else {
|
|
319
|
+
// Container is taller than 16:9, fit by width
|
|
320
|
+
displayWidth = containerWidth;
|
|
321
|
+
displayHeight = displayWidth / aspectRatio;
|
|
322
|
+
}
|
|
323
|
+
|
|
324
|
+
// Set CSS size to maintain proportions
|
|
325
|
+
this.canvas.style.width = displayWidth + 'px';
|
|
326
|
+
this.canvas.style.height = displayHeight + 'px';
|
|
327
|
+
};
|
|
328
|
+
|
|
329
|
+
// Initial resize
|
|
330
|
+
resizeCanvas();
|
|
331
|
+
|
|
332
|
+
// Double-check sizing after DOM fully settles
|
|
333
|
+
setTimeout(() => resizeCanvas(), 100);
|
|
334
|
+
requestAnimationFrame(() => resizeCanvas());
|
|
335
|
+
|
|
336
|
+
// Resize on window resize
|
|
337
|
+
window.addEventListener('resize', resizeCanvas);
|
|
338
|
+
|
|
339
|
+
// Watch container for size changes (e.g., sidebar drawer open/close)
|
|
340
|
+
// This catches layout changes that don't trigger window resize events
|
|
341
|
+
if (container && typeof ResizeObserver !== 'undefined') {
|
|
342
|
+
this.resizeObserver = new ResizeObserver(() => {
|
|
343
|
+
resizeCanvas();
|
|
344
|
+
});
|
|
345
|
+
this.resizeObserver.observe(container);
|
|
346
|
+
}
|
|
347
|
+
|
|
348
|
+
// Store reference to remove listener on destroy
|
|
349
|
+
this.resizeHandler = resizeCanvas;
|
|
350
|
+
}
|
|
351
|
+
|
|
352
|
+
setSongMetadata(metadata) {
|
|
353
|
+
// Store song metadata for display when not playing
|
|
354
|
+
this.songMetadata = metadata || {};
|
|
355
|
+
}
|
|
356
|
+
|
|
357
|
+
/**
|
|
358
|
+
* Set server URL and generate QR code
|
|
359
|
+
* @param {string} url - Server URL
|
|
360
|
+
* @param {boolean} show - Whether to show QR code
|
|
361
|
+
*/
|
|
362
|
+
async setServerQRCode(url, show) {
|
|
363
|
+
this.serverUrl = url;
|
|
364
|
+
this.showQrCode = show;
|
|
365
|
+
|
|
366
|
+
if (url && show) {
|
|
367
|
+
try {
|
|
368
|
+
// Dynamically import QR code generator
|
|
369
|
+
const { generateQRCodeCanvas } = await import('../utils/qrCodeGenerator.js');
|
|
370
|
+
this.qrCodeCanvas = await generateQRCodeCanvas(url, 150);
|
|
371
|
+
} catch (error) {
|
|
372
|
+
console.error('Error generating QR code:', error);
|
|
373
|
+
this.qrCodeCanvas = null;
|
|
374
|
+
}
|
|
375
|
+
} else {
|
|
376
|
+
this.qrCodeCanvas = null;
|
|
377
|
+
}
|
|
378
|
+
}
|
|
379
|
+
|
|
380
|
+
/**
|
|
381
|
+
* Set queue items and display setting
|
|
382
|
+
* @param {Array} queue - Array of queue items with title, artist, requester
|
|
383
|
+
* @param {boolean} display - Whether to display queue
|
|
384
|
+
*/
|
|
385
|
+
setQueueDisplay(queue, display) {
|
|
386
|
+
this.queueItems = queue || [];
|
|
387
|
+
this.displayQueue = display !== false;
|
|
388
|
+
}
|
|
389
|
+
|
|
390
|
+
loadLyrics(lyricsData, songDuration = 0) {
|
|
391
|
+
// Store original lyrics data for outro detection
|
|
392
|
+
this.originalLyricsData = lyricsData || [];
|
|
393
|
+
// Store filtered lyrics for display
|
|
394
|
+
this.lyrics = this.parseLyricsData(lyricsData);
|
|
395
|
+
this.songDuration = songDuration;
|
|
396
|
+
}
|
|
397
|
+
|
|
398
|
+
parseLyricsData(data) {
|
|
399
|
+
if (!data || !Array.isArray(data)) return [];
|
|
400
|
+
|
|
401
|
+
// Filter out disabled lines for playback (backup lines are still included)
|
|
402
|
+
const enabledData = data.filter((line) => line.disabled !== true);
|
|
403
|
+
|
|
404
|
+
return enabledData
|
|
405
|
+
.map((line, index) => {
|
|
406
|
+
if (typeof line === 'object' && line !== null) {
|
|
407
|
+
const words = this.parseWordsFromLine(line);
|
|
408
|
+
const text = line.text || line.lyrics || line.content || line.lyric || '';
|
|
409
|
+
// Support new singer field format (backup, backup:PA, B, duet, etc.)
|
|
410
|
+
// Falls back to legacy backup boolean for compatibility
|
|
411
|
+
const singer = line.singer || (line.backup === true ? 'backup' : null);
|
|
412
|
+
const isBackup = singer?.startsWith('backup') || false;
|
|
413
|
+
return {
|
|
414
|
+
id: index,
|
|
415
|
+
startTime: line.start || line.time || line.start_time || index * 3,
|
|
416
|
+
endTime: line.end || line.end_time || (line.start || line.time || index * 3) + 3,
|
|
417
|
+
text: text,
|
|
418
|
+
words: words,
|
|
419
|
+
singer: singer, // New: singer field (null, 'A', 'B', 'backup', 'backup:PA', 'duet')
|
|
420
|
+
isBackup: isBackup, // Derived from singer field for backward compatibility
|
|
421
|
+
};
|
|
422
|
+
} else {
|
|
423
|
+
// Simple string - create word timing estimates
|
|
424
|
+
const text = line || '';
|
|
425
|
+
const words = this.estimateWordTiming(text, index * 3);
|
|
426
|
+
return {
|
|
427
|
+
id: index,
|
|
428
|
+
startTime: index * 3,
|
|
429
|
+
endTime: index * 3 + 3,
|
|
430
|
+
text: text,
|
|
431
|
+
words: words,
|
|
432
|
+
isBackup: false,
|
|
433
|
+
};
|
|
434
|
+
}
|
|
435
|
+
})
|
|
436
|
+
.filter((line) => line.text.trim().length > 0);
|
|
437
|
+
}
|
|
438
|
+
|
|
439
|
+
parseWordsFromLine(line) {
|
|
440
|
+
// If the line has word-level timing data, use it
|
|
441
|
+
if (line.words && Array.isArray(line.words)) {
|
|
442
|
+
return line.words.map((word) => ({
|
|
443
|
+
text: word.t || word.text || word.word || '', // word.word for Whisper output
|
|
444
|
+
startTime: word.s || word.start || word.startTime || 0,
|
|
445
|
+
endTime: word.e || word.end || word.endTime || 0,
|
|
446
|
+
}));
|
|
447
|
+
}
|
|
448
|
+
|
|
449
|
+
// Otherwise estimate word timing
|
|
450
|
+
const text = line.text || line.lyrics || line.content || line.lyric || '';
|
|
451
|
+
const startTime = line.start || line.time || line.start_time || 0;
|
|
452
|
+
const endTime = line.end || line.end_time || startTime + 3;
|
|
453
|
+
const duration = endTime - startTime;
|
|
454
|
+
|
|
455
|
+
return this.estimateWordTiming(text, startTime, duration);
|
|
456
|
+
}
|
|
457
|
+
|
|
458
|
+
estimateWordTiming(text, startTime, duration = 3) {
|
|
459
|
+
const words = text.split(/\s+/).filter((w) => w.length > 0);
|
|
460
|
+
if (words.length === 0) return [];
|
|
461
|
+
|
|
462
|
+
const wordDuration = duration / words.length;
|
|
463
|
+
|
|
464
|
+
return words.map((word, index) => ({
|
|
465
|
+
text: word,
|
|
466
|
+
startTime: startTime + index * wordDuration,
|
|
467
|
+
endTime: startTime + (index + 1) * wordDuration,
|
|
468
|
+
}));
|
|
469
|
+
}
|
|
470
|
+
|
|
471
|
+
setCurrentTime(time) {
|
|
472
|
+
const oldTime = this.currentTime;
|
|
473
|
+
this.currentTime = time;
|
|
474
|
+
|
|
475
|
+
// Track for interpolation
|
|
476
|
+
this.lastReportedTime = time;
|
|
477
|
+
this.lastReportedTimestamp = performance.now();
|
|
478
|
+
|
|
479
|
+
// If time jumped significantly and we're playing, restart music analysis from new position
|
|
480
|
+
if (this.isPlaying && Math.abs(time - oldTime) > 1.0) {
|
|
481
|
+
// 1 second threshold - restart butterchurn analysis to stay in sync
|
|
482
|
+
// Stop first, then delay restart slightly to ensure it happens after PA sources restart
|
|
483
|
+
this.stopMusicAnalysis();
|
|
484
|
+
|
|
485
|
+
// Delay restart by a small amount to allow PA audio sources to stabilize first
|
|
486
|
+
setTimeout(() => {
|
|
487
|
+
if (this.isPlaying) {
|
|
488
|
+
this.startMusicAnalysis();
|
|
489
|
+
}
|
|
490
|
+
}, 50); // 50ms delay
|
|
491
|
+
}
|
|
492
|
+
}
|
|
493
|
+
|
|
494
|
+
/**
|
|
495
|
+
* Get interpolated current time for smooth 60fps progress bars
|
|
496
|
+
* When playing, calculates time based on elapsed time since last update
|
|
497
|
+
*/
|
|
498
|
+
getInterpolatedTime() {
|
|
499
|
+
if (!this.isPlaying) {
|
|
500
|
+
return this.currentTime;
|
|
501
|
+
}
|
|
502
|
+
|
|
503
|
+
// Calculate elapsed time since last report
|
|
504
|
+
const now = performance.now();
|
|
505
|
+
const elapsed = (now - this.lastReportedTimestamp) / 1000; // Convert to seconds
|
|
506
|
+
const interpolated = this.lastReportedTime + elapsed;
|
|
507
|
+
|
|
508
|
+
// Don't exceed song duration
|
|
509
|
+
return Math.min(interpolated, this.songDuration || Infinity);
|
|
510
|
+
}
|
|
511
|
+
|
|
512
|
+
/**
|
|
513
|
+
* Set the analyser node for butterchurn visualization
|
|
514
|
+
* This is provided by the PA audio context from kaiPlayer
|
|
515
|
+
* @param {AnalyserNode} analyserNode - The PA analyser node
|
|
516
|
+
*/
|
|
517
|
+
setVisualizationAnalyser(analyserNode) {
|
|
518
|
+
if (!analyserNode) return;
|
|
519
|
+
|
|
520
|
+
try {
|
|
521
|
+
console.log('🎨 Connecting butterchurn to PA analyser');
|
|
522
|
+
|
|
523
|
+
// Check if butterchurn was created with a different context
|
|
524
|
+
// If so, recreate it with the PA context from the analyser
|
|
525
|
+
const paContext = analyserNode.context;
|
|
526
|
+
|
|
527
|
+
if (this.butterchurn && this.butterchurn.audioContext !== paContext) {
|
|
528
|
+
console.log('⚠️ Butterchurn context mismatch - recreating with PA context');
|
|
529
|
+
|
|
530
|
+
// Destroy old butterchurn
|
|
531
|
+
if (this.butterchurn.destroy) {
|
|
532
|
+
this.butterchurn.destroy();
|
|
533
|
+
}
|
|
534
|
+
|
|
535
|
+
// Get butterchurn API
|
|
536
|
+
let butterchurnAPI = null;
|
|
537
|
+
if (typeof window.butterchurn.createVisualizer === 'function') {
|
|
538
|
+
butterchurnAPI = window.butterchurn;
|
|
539
|
+
} else if (
|
|
540
|
+
window.butterchurn.default &&
|
|
541
|
+
typeof window.butterchurn.default.createVisualizer === 'function'
|
|
542
|
+
) {
|
|
543
|
+
butterchurnAPI = window.butterchurn.default;
|
|
544
|
+
}
|
|
545
|
+
|
|
546
|
+
if (butterchurnAPI) {
|
|
547
|
+
// Recreate with PA context
|
|
548
|
+
this.butterchurn = butterchurnAPI.createVisualizer(paContext, this.effectsCanvas, {
|
|
549
|
+
width: 1920,
|
|
550
|
+
height: 1080,
|
|
551
|
+
mesh_width: 128,
|
|
552
|
+
mesh_height: 72,
|
|
553
|
+
fps: 30,
|
|
554
|
+
});
|
|
555
|
+
|
|
556
|
+
// Reload current preset if available
|
|
557
|
+
if (this.currentPreset && window.butterchurnPresets) {
|
|
558
|
+
const presets = window.butterchurnPresets.getPresets();
|
|
559
|
+
if (presets[this.currentPreset]) {
|
|
560
|
+
this.butterchurn.loadPreset(presets[this.currentPreset], 0.0);
|
|
561
|
+
}
|
|
562
|
+
}
|
|
563
|
+
}
|
|
564
|
+
}
|
|
565
|
+
|
|
566
|
+
// Connect analyser to butterchurn
|
|
567
|
+
if (this.butterchurn) {
|
|
568
|
+
this.butterchurn.connectAudio(analyserNode);
|
|
569
|
+
console.log('✅ Butterchurn connected to PA analyser successfully');
|
|
570
|
+
}
|
|
571
|
+
} catch (error) {
|
|
572
|
+
console.error('❌ Failed to connect butterchurn to analyser:', error);
|
|
573
|
+
console.error(' Error type:', error.name);
|
|
574
|
+
console.error(' Error message:', error.message);
|
|
575
|
+
}
|
|
576
|
+
}
|
|
577
|
+
|
|
578
|
+
// setMusicAudio() removed - butterchurn now uses PA analyser from kaiPlayer
|
|
579
|
+
// Connected via setVisualizationAnalyser() during song load
|
|
580
|
+
|
|
581
|
+
reinitializeButterchurn() {
|
|
582
|
+
try {
|
|
583
|
+
// Destroy the old Butterchurn instance if it exists
|
|
584
|
+
if (this.butterchurn && this.butterchurn.destroy) {
|
|
585
|
+
this.butterchurn.destroy();
|
|
586
|
+
}
|
|
587
|
+
|
|
588
|
+
// Clear old references
|
|
589
|
+
this.butterchurn = null;
|
|
590
|
+
|
|
591
|
+
// Get the Butterchurn API
|
|
592
|
+
let butterchurnAPI = null;
|
|
593
|
+
if (typeof window.butterchurn.createVisualizer === 'function') {
|
|
594
|
+
butterchurnAPI = window.butterchurn;
|
|
595
|
+
} else if (
|
|
596
|
+
window.butterchurn.default &&
|
|
597
|
+
typeof window.butterchurn.default.createVisualizer === 'function'
|
|
598
|
+
) {
|
|
599
|
+
butterchurnAPI = window.butterchurn.default;
|
|
600
|
+
} else if (typeof window.butterchurn === 'function') {
|
|
601
|
+
butterchurnAPI = window.butterchurn;
|
|
602
|
+
}
|
|
603
|
+
|
|
604
|
+
if (!butterchurnAPI || typeof butterchurnAPI.createVisualizer !== 'function') {
|
|
605
|
+
console.error('Butterchurn createVisualizer not found during reinit');
|
|
606
|
+
return;
|
|
607
|
+
}
|
|
608
|
+
|
|
609
|
+
// Create fresh Butterchurn instance (uses waveformAudioContext as dummy)
|
|
610
|
+
// Note: Audio comes from PA analyser via setVisualizationAnalyser(), not from this context
|
|
611
|
+
if (!this.waveformAudioContext) {
|
|
612
|
+
this.waveformAudioContext = new (window.AudioContext || window.webkitAudioContext)();
|
|
613
|
+
}
|
|
614
|
+
|
|
615
|
+
this.butterchurn = butterchurnAPI.createVisualizer(
|
|
616
|
+
this.waveformAudioContext,
|
|
617
|
+
this.effectsCanvas,
|
|
618
|
+
{
|
|
619
|
+
width: 1920,
|
|
620
|
+
height: 1080,
|
|
621
|
+
mesh_width: 128,
|
|
622
|
+
mesh_height: 72,
|
|
623
|
+
fps: 30,
|
|
624
|
+
}
|
|
625
|
+
);
|
|
626
|
+
|
|
627
|
+
// Reload presets
|
|
628
|
+
if (window.butterchurnPresets && window.butterchurnPresets.getPresets) {
|
|
629
|
+
this.presetList = Object.keys(window.butterchurnPresets.getPresets());
|
|
630
|
+
|
|
631
|
+
// Restore the current preset if it exists, otherwise load a reactive preset
|
|
632
|
+
if (this.currentPreset && this.presetList.includes(this.currentPreset)) {
|
|
633
|
+
const presetData = window.butterchurnPresets.getPresets()[this.currentPreset];
|
|
634
|
+
this.butterchurn.loadPreset(presetData, 0.0);
|
|
635
|
+
} else {
|
|
636
|
+
// Only load a reactive preset if no current preset exists
|
|
637
|
+
const reactivePresets = [
|
|
638
|
+
'Geiss - Pulse Vertex v1.02',
|
|
639
|
+
'Rovastar & Geiss - Dynamic Noise v2.0',
|
|
640
|
+
'martin - volume bar spectrogram v1.0',
|
|
641
|
+
];
|
|
642
|
+
|
|
643
|
+
for (const preset of reactivePresets) {
|
|
644
|
+
if (this.presetList.includes(preset)) {
|
|
645
|
+
this.butterchurn.loadPreset(window.butterchurnPresets.getPresets()[preset], 0.0);
|
|
646
|
+
this.currentPreset = preset;
|
|
647
|
+
break;
|
|
648
|
+
}
|
|
649
|
+
}
|
|
650
|
+
}
|
|
651
|
+
}
|
|
652
|
+
} catch (error) {
|
|
653
|
+
console.error('Failed to reinitialize Butterchurn:', error);
|
|
654
|
+
}
|
|
655
|
+
}
|
|
656
|
+
|
|
657
|
+
async setVocalsAudio(audioData) {
|
|
658
|
+
try {
|
|
659
|
+
if (!this.waveformAudioContext) {
|
|
660
|
+
this.waveformAudioContext = new (window.AudioContext || window.webkitAudioContext)();
|
|
661
|
+
}
|
|
662
|
+
|
|
663
|
+
// Ensure audioData is an ArrayBuffer
|
|
664
|
+
let arrayBuffer;
|
|
665
|
+
if (audioData instanceof ArrayBuffer) {
|
|
666
|
+
arrayBuffer = audioData;
|
|
667
|
+
} else if (audioData && audioData.buffer instanceof ArrayBuffer) {
|
|
668
|
+
// Handle Node.js Buffer-like objects (which have a .buffer property)
|
|
669
|
+
arrayBuffer = audioData.buffer.slice(
|
|
670
|
+
audioData.byteOffset,
|
|
671
|
+
audioData.byteOffset + audioData.byteLength
|
|
672
|
+
);
|
|
673
|
+
} else if (audioData instanceof Uint8Array) {
|
|
674
|
+
// Handle Uint8Array
|
|
675
|
+
arrayBuffer = audioData.buffer.slice(
|
|
676
|
+
audioData.byteOffset,
|
|
677
|
+
audioData.byteOffset + audioData.byteLength
|
|
678
|
+
);
|
|
679
|
+
} else {
|
|
680
|
+
return; // Unexpected audio data type
|
|
681
|
+
}
|
|
682
|
+
|
|
683
|
+
// Decode the audio data
|
|
684
|
+
this.vocalsAudioBuffer = await this.waveformAudioContext.decodeAudioData(arrayBuffer);
|
|
685
|
+
|
|
686
|
+
// Pre-calculate waveform for smooth animation
|
|
687
|
+
this.preCalculateVocalsWaveform();
|
|
688
|
+
} catch {
|
|
689
|
+
// Failed to load vocals audio for waveform
|
|
690
|
+
}
|
|
691
|
+
}
|
|
692
|
+
|
|
693
|
+
async setupMusicAnalysis(waveformBuffer, arrayBuffer) {
|
|
694
|
+
if (!this.effectsGL) return;
|
|
695
|
+
|
|
696
|
+
try {
|
|
697
|
+
// Use Butterchurn's AudioContext for live music analysis (for sync with playback)
|
|
698
|
+
const analysisContext = this.butterchurnAudioContext || this.waveformAudioContext;
|
|
699
|
+
|
|
700
|
+
// Store waveform buffer for UI visualization
|
|
701
|
+
this.musicAudioBuffer = waveformBuffer;
|
|
702
|
+
|
|
703
|
+
// If we have the shared Butterchurn context, decode audio for it too (if not already done)
|
|
704
|
+
if (this.butterchurnAudioContext && !this.butterchurnAudioBuffer && arrayBuffer) {
|
|
705
|
+
try {
|
|
706
|
+
this.butterchurnAudioBuffer = await this.butterchurnAudioContext.decodeAudioData(
|
|
707
|
+
arrayBuffer.slice(0)
|
|
708
|
+
);
|
|
709
|
+
} catch (error) {
|
|
710
|
+
console.warn('Failed to decode audio for Butterchurn context:', error);
|
|
711
|
+
}
|
|
712
|
+
}
|
|
713
|
+
|
|
714
|
+
// Try to use AudioWorklet for better performance (use the analysis context)
|
|
715
|
+
try {
|
|
716
|
+
await analysisContext.audioWorklet.addModule('./js/musicAnalysisWorklet.js');
|
|
717
|
+
this.workletAvailable = true;
|
|
718
|
+
} catch {
|
|
719
|
+
console.warn('AudioWorklet not available, falling back to AnalyserNode');
|
|
720
|
+
this.workletAvailable = false;
|
|
721
|
+
}
|
|
722
|
+
|
|
723
|
+
if (this.workletAvailable) {
|
|
724
|
+
// Create the worklet node but don't connect it yet
|
|
725
|
+
this.musicWorkletNode = new AudioWorkletNode(analysisContext, 'music-analysis-processor');
|
|
726
|
+
|
|
727
|
+
// Listen for analysis results
|
|
728
|
+
this.musicWorkletNode.port.onmessage = (event) => {
|
|
729
|
+
if (event.data.type === 'analysis') {
|
|
730
|
+
this.cachedAnalysis = event.data.data;
|
|
731
|
+
}
|
|
732
|
+
};
|
|
733
|
+
} else {
|
|
734
|
+
// Create analyser but don't connect it yet
|
|
735
|
+
this.musicAnalyser = analysisContext.createAnalyser();
|
|
736
|
+
this.musicAnalyser.fftSize = 512;
|
|
737
|
+
this.musicAnalyser.smoothingTimeConstant = 0.8;
|
|
738
|
+
this.musicFrequencyData = new Uint8Array(this.musicAnalyser.frequencyBinCount);
|
|
739
|
+
}
|
|
740
|
+
} catch (error) {
|
|
741
|
+
console.warn('Failed to setup music analysis:', error);
|
|
742
|
+
}
|
|
743
|
+
}
|
|
744
|
+
|
|
745
|
+
async tryDecodeStoredAudioForButterchurn() {
|
|
746
|
+
// If we already have music loaded but Butterchurn doesn't have the audio buffer
|
|
747
|
+
if (
|
|
748
|
+
this.butterchurn &&
|
|
749
|
+
this.butterchurnAudioContext &&
|
|
750
|
+
this.originalAudioArrayBuffer &&
|
|
751
|
+
!this.butterchurnAudioBuffer
|
|
752
|
+
) {
|
|
753
|
+
try {
|
|
754
|
+
this.butterchurnAudioBuffer = await this.butterchurnAudioContext.decodeAudioData(
|
|
755
|
+
this.originalAudioArrayBuffer.slice(0)
|
|
756
|
+
);
|
|
757
|
+
|
|
758
|
+
// Also update the music buffer reference for UI if we don't have it yet
|
|
759
|
+
if (!this.musicAudioBuffer && this.waveformAudioContext) {
|
|
760
|
+
try {
|
|
761
|
+
this.musicAudioBuffer = await this.waveformAudioContext.decodeAudioData(
|
|
762
|
+
this.originalAudioArrayBuffer.slice(0)
|
|
763
|
+
);
|
|
764
|
+
} catch (error) {
|
|
765
|
+
console.warn('Failed to decode audio for waveform:', error);
|
|
766
|
+
}
|
|
767
|
+
}
|
|
768
|
+
} catch (error) {
|
|
769
|
+
console.warn('Failed to decode stored audio for Butterchurn context:', error);
|
|
770
|
+
}
|
|
771
|
+
}
|
|
772
|
+
}
|
|
773
|
+
|
|
774
|
+
analyzeMusicFrequencies() {
|
|
775
|
+
// Use cached results from AudioWorklet if available
|
|
776
|
+
if (this.workletAvailable && this.musicWorkletNode) {
|
|
777
|
+
return this.cachedAnalysis;
|
|
778
|
+
}
|
|
779
|
+
|
|
780
|
+
// Fallback to traditional analysis
|
|
781
|
+
if (!this.musicAnalyser || !this.musicFrequencyData) {
|
|
782
|
+
return { energy: 0, bass: 0, mid: 0, treble: 0, centroid: 0 };
|
|
783
|
+
}
|
|
784
|
+
|
|
785
|
+
// Get frequency data (expensive operation on main thread)
|
|
786
|
+
this.musicAnalyser.getByteFrequencyData(this.musicFrequencyData);
|
|
787
|
+
|
|
788
|
+
const binCount = this.musicFrequencyData.length;
|
|
789
|
+
const bassEnd = Math.floor(binCount * 0.1); // 0-10% (bass)
|
|
790
|
+
const midEnd = Math.floor(binCount * 0.4); // 10-40% (mids)
|
|
791
|
+
// 40-100% is treble
|
|
792
|
+
|
|
793
|
+
let bassSum = 0,
|
|
794
|
+
midSum = 0,
|
|
795
|
+
trebleSum = 0,
|
|
796
|
+
totalEnergy = 0;
|
|
797
|
+
let weightedSum = 0; // for spectral centroid
|
|
798
|
+
|
|
799
|
+
for (let i = 0; i < binCount; i++) {
|
|
800
|
+
const value = this.musicFrequencyData[i] / 255.0; // Normalize to 0-1
|
|
801
|
+
totalEnergy += value;
|
|
802
|
+
weightedSum += value * i;
|
|
803
|
+
|
|
804
|
+
if (i < bassEnd) {
|
|
805
|
+
bassSum += value;
|
|
806
|
+
} else if (i < midEnd) {
|
|
807
|
+
midSum += value;
|
|
808
|
+
} else {
|
|
809
|
+
trebleSum += value;
|
|
810
|
+
}
|
|
811
|
+
}
|
|
812
|
+
|
|
813
|
+
// Calculate averages
|
|
814
|
+
const bassAvg = bassEnd > 0 ? bassSum / bassEnd : 0;
|
|
815
|
+
const midAvg = midEnd - bassEnd > 0 ? midSum / (midEnd - bassEnd) : 0;
|
|
816
|
+
const trebleAvg = binCount - midEnd > 0 ? trebleSum / (binCount - midEnd) : 0;
|
|
817
|
+
const energyAvg = binCount > 0 ? totalEnergy / binCount : 0;
|
|
818
|
+
|
|
819
|
+
// Calculate spectral centroid (normalized)
|
|
820
|
+
const centroid = totalEnergy > 0 ? weightedSum / totalEnergy / binCount : 0;
|
|
821
|
+
|
|
822
|
+
return {
|
|
823
|
+
energy: Math.min(energyAvg * 20, 1.0), // Scale for visual effects
|
|
824
|
+
bass: Math.min(bassAvg * 30, 1.0),
|
|
825
|
+
mid: Math.min(midAvg * 25, 1.0),
|
|
826
|
+
treble: Math.min(trebleAvg * 20, 1.0),
|
|
827
|
+
centroid: centroid,
|
|
828
|
+
};
|
|
829
|
+
}
|
|
830
|
+
|
|
831
|
+
renderWebGLEffects() {
|
|
832
|
+
if (!this.effectsGL) {
|
|
833
|
+
return;
|
|
834
|
+
}
|
|
835
|
+
|
|
836
|
+
// Clear effects canvas if disabled
|
|
837
|
+
if (!this.waveformPreferences.enableEffects) {
|
|
838
|
+
const gl = this.effectsGL;
|
|
839
|
+
gl.clearColor(0, 0, 0, 1);
|
|
840
|
+
gl.clear(gl.COLOR_BUFFER_BIT);
|
|
841
|
+
return;
|
|
842
|
+
}
|
|
843
|
+
|
|
844
|
+
const _gl = this.effectsGL;
|
|
845
|
+
const analysis = this.analyzeMusicFrequencies();
|
|
846
|
+
|
|
847
|
+
// if (Math.random() < 0.01) { // Debug occasionally
|
|
848
|
+
// }
|
|
849
|
+
|
|
850
|
+
// Use Butterchurn for background effects
|
|
851
|
+
if (this.effectType === 'butterchurn' && this.butterchurn) {
|
|
852
|
+
try {
|
|
853
|
+
// Convert our analysis data to Butterchurn's expected format
|
|
854
|
+
const audioData = {
|
|
855
|
+
timeArray: new Uint8Array(1024), // Time domain data (not used much)
|
|
856
|
+
freqArray: new Uint8Array(1024), // Frequency domain data
|
|
857
|
+
};
|
|
858
|
+
|
|
859
|
+
// Fill frequency data based on our analysis
|
|
860
|
+
// Butterchurn expects 0-255 values
|
|
861
|
+
const bassLevel = Math.floor(analysis.bass * 255);
|
|
862
|
+
const midLevel = Math.floor(analysis.mid * 255);
|
|
863
|
+
const trebleLevel = Math.floor(analysis.treble * 255);
|
|
864
|
+
|
|
865
|
+
// Distribute frequency data across the array
|
|
866
|
+
for (let i = 0; i < 1024; i++) {
|
|
867
|
+
if (i < 341) {
|
|
868
|
+
// Bass frequencies (0-33% of spectrum)
|
|
869
|
+
audioData.freqArray[i] = bassLevel;
|
|
870
|
+
} else if (i < 682) {
|
|
871
|
+
// Mid frequencies (33-66% of spectrum)
|
|
872
|
+
audioData.freqArray[i] = midLevel;
|
|
873
|
+
} else {
|
|
874
|
+
// Treble frequencies (66-100% of spectrum)
|
|
875
|
+
audioData.freqArray[i] = trebleLevel;
|
|
876
|
+
}
|
|
877
|
+
}
|
|
878
|
+
|
|
879
|
+
// Render Butterchurn frame
|
|
880
|
+
this.butterchurn.render();
|
|
881
|
+
|
|
882
|
+
// Debug audio levels periodically
|
|
883
|
+
this.debugAudioLevels();
|
|
884
|
+
|
|
885
|
+
// Ensure Butterchurn source is running if we're playing but source is missing
|
|
886
|
+
if (
|
|
887
|
+
this.isPlaying &&
|
|
888
|
+
this.butterchurnAnalyser &&
|
|
889
|
+
!this.butterchurnSourceNode &&
|
|
890
|
+
this.butterchurnAudioBuffer
|
|
891
|
+
) {
|
|
892
|
+
this.startMusicAnalysis();
|
|
893
|
+
} else if (
|
|
894
|
+
this.isPlaying &&
|
|
895
|
+
!this.butterchurnSourceNode &&
|
|
896
|
+
this.musicAudioBuffer &&
|
|
897
|
+
!this.butterchurnAudioBuffer
|
|
898
|
+
) {
|
|
899
|
+
// Fix missing Butterchurn audio buffer - decode the music for Butterchurn
|
|
900
|
+
try {
|
|
901
|
+
// Get the original audio data from musicAudioBuffer
|
|
902
|
+
// We need to re-decode since AudioBuffer can't be transferred between contexts
|
|
903
|
+
// This is a limitation - we'd need the original ArrayBuffer, but for now let's skip this complex case
|
|
904
|
+
} catch (error) {
|
|
905
|
+
console.warn('Failed to decode audio for Butterchurn:', error);
|
|
906
|
+
}
|
|
907
|
+
} else if (this.isPlaying && !this.butterchurnSourceNode) {
|
|
908
|
+
// Debug why auto-fix isn't triggering (limit frequency)
|
|
909
|
+
const now = performance.now();
|
|
910
|
+
if (now - this.lastConditionsDebugTime > 2000) {
|
|
911
|
+
this.lastConditionsDebugTime = now;
|
|
912
|
+
}
|
|
913
|
+
}
|
|
914
|
+
} catch (error) {
|
|
915
|
+
console.error('Butterchurn render failed:', error);
|
|
916
|
+
}
|
|
917
|
+
}
|
|
918
|
+
}
|
|
919
|
+
|
|
920
|
+
// Preset management methods
|
|
921
|
+
switchToNextPreset() {
|
|
922
|
+
if (this.effectType === 'butterchurn' && this.butterchurn && this.presetList.length) {
|
|
923
|
+
const currentIndex = this.presetList.indexOf(this.currentPreset);
|
|
924
|
+
let nextIndex = (currentIndex + 1) % this.presetList.length;
|
|
925
|
+
|
|
926
|
+
// Skip disabled effects
|
|
927
|
+
const maxAttempts = this.presetList.length;
|
|
928
|
+
let attempts = 0;
|
|
929
|
+
while (attempts < maxAttempts && this.isEffectDisabled(this.presetList[nextIndex])) {
|
|
930
|
+
nextIndex = (nextIndex + 1) % this.presetList.length;
|
|
931
|
+
attempts++;
|
|
932
|
+
}
|
|
933
|
+
|
|
934
|
+
// Only switch if we found an enabled effect
|
|
935
|
+
if (!this.isEffectDisabled(this.presetList[nextIndex])) {
|
|
936
|
+
this.switchToPreset(this.presetList[nextIndex]);
|
|
937
|
+
}
|
|
938
|
+
}
|
|
939
|
+
}
|
|
940
|
+
|
|
941
|
+
switchToPreviousPreset() {
|
|
942
|
+
if (this.effectType === 'butterchurn' && this.butterchurn && this.presetList.length) {
|
|
943
|
+
const currentIndex = this.presetList.indexOf(this.currentPreset);
|
|
944
|
+
let prevIndex = currentIndex <= 0 ? this.presetList.length - 1 : currentIndex - 1;
|
|
945
|
+
|
|
946
|
+
// Skip disabled effects
|
|
947
|
+
const maxAttempts = this.presetList.length;
|
|
948
|
+
let attempts = 0;
|
|
949
|
+
while (attempts < maxAttempts && this.isEffectDisabled(this.presetList[prevIndex])) {
|
|
950
|
+
prevIndex = prevIndex <= 0 ? this.presetList.length - 1 : prevIndex - 1;
|
|
951
|
+
attempts++;
|
|
952
|
+
}
|
|
953
|
+
|
|
954
|
+
// Only switch if we found an enabled effect
|
|
955
|
+
if (!this.isEffectDisabled(this.presetList[prevIndex])) {
|
|
956
|
+
this.switchToPreset(this.presetList[prevIndex]);
|
|
957
|
+
}
|
|
958
|
+
}
|
|
959
|
+
}
|
|
960
|
+
|
|
961
|
+
switchToPreset(presetName, transitionTime = 2.0) {
|
|
962
|
+
if (
|
|
963
|
+
this.effectType !== 'butterchurn' ||
|
|
964
|
+
!this.butterchurn ||
|
|
965
|
+
!this.presetList.includes(presetName)
|
|
966
|
+
) {
|
|
967
|
+
console.warn('Cannot switch to preset:', presetName);
|
|
968
|
+
return;
|
|
969
|
+
}
|
|
970
|
+
|
|
971
|
+
try {
|
|
972
|
+
const presetData = window.butterchurnPresets.getPresets()[presetName];
|
|
973
|
+
this.butterchurn.loadPreset(presetData, transitionTime);
|
|
974
|
+
this.currentPreset = presetName;
|
|
975
|
+
} catch (error) {
|
|
976
|
+
console.error('Failed to switch preset:', error);
|
|
977
|
+
}
|
|
978
|
+
}
|
|
979
|
+
|
|
980
|
+
isEffectDisabled(_effectName) {
|
|
981
|
+
// TODO: Get disabled effects from Context/props instead
|
|
982
|
+
return false;
|
|
983
|
+
}
|
|
984
|
+
|
|
985
|
+
setButterchurnPreset(presetData, transitionTime = 1.0) {
|
|
986
|
+
if (this.effectType !== 'butterchurn' || !this.butterchurn) {
|
|
987
|
+
console.warn('Cannot set butterchurn preset - butterchurn not active');
|
|
988
|
+
return false;
|
|
989
|
+
}
|
|
990
|
+
|
|
991
|
+
try {
|
|
992
|
+
this.butterchurn.loadPreset(presetData, transitionTime);
|
|
993
|
+
// Find the preset name for tracking
|
|
994
|
+
if (window.butterchurnPresets) {
|
|
995
|
+
const allPresets = window.butterchurnPresets.getPresets();
|
|
996
|
+
for (const [name, preset] of Object.entries(allPresets)) {
|
|
997
|
+
if (preset === presetData) {
|
|
998
|
+
this.currentPreset = name;
|
|
999
|
+
break;
|
|
1000
|
+
}
|
|
1001
|
+
}
|
|
1002
|
+
}
|
|
1003
|
+
return true;
|
|
1004
|
+
} catch (error) {
|
|
1005
|
+
console.error('Failed to set butterchurn preset:', error);
|
|
1006
|
+
return false;
|
|
1007
|
+
}
|
|
1008
|
+
}
|
|
1009
|
+
|
|
1010
|
+
getAvailablePresets() {
|
|
1011
|
+
return this.presetList;
|
|
1012
|
+
}
|
|
1013
|
+
|
|
1014
|
+
getCurrentPreset() {
|
|
1015
|
+
return this.currentPreset;
|
|
1016
|
+
}
|
|
1017
|
+
|
|
1018
|
+
switchEffectType(type) {
|
|
1019
|
+
if (type === 'butterchurn') {
|
|
1020
|
+
this.effectType = type;
|
|
1021
|
+
}
|
|
1022
|
+
}
|
|
1023
|
+
|
|
1024
|
+
setPlaying(playing) {
|
|
1025
|
+
this.isPlaying = playing;
|
|
1026
|
+
|
|
1027
|
+
// Start/stop microphone capture based on playing state
|
|
1028
|
+
if (playing) {
|
|
1029
|
+
this.startMicrophoneCapture();
|
|
1030
|
+
this.startMusicAnalysis();
|
|
1031
|
+
} else {
|
|
1032
|
+
this.stopMicrophoneCapture();
|
|
1033
|
+
this.stopMusicAnalysis();
|
|
1034
|
+
}
|
|
1035
|
+
}
|
|
1036
|
+
|
|
1037
|
+
startMusicAnalysis() {
|
|
1038
|
+
if (!this.musicAudioBuffer) {
|
|
1039
|
+
return;
|
|
1040
|
+
}
|
|
1041
|
+
if (!this.effectsGL && !this.butterchurn) {
|
|
1042
|
+
return;
|
|
1043
|
+
}
|
|
1044
|
+
|
|
1045
|
+
// NOTE: We only use Butterchurn context for actual audio playback.
|
|
1046
|
+
// The waveform context is used only for UI visualization (no playback).
|
|
1047
|
+
|
|
1048
|
+
try {
|
|
1049
|
+
// Stop any existing analysis
|
|
1050
|
+
this.stopMusicAnalysis();
|
|
1051
|
+
|
|
1052
|
+
// Start offline audio analysis for Butterchurn (NO PLAYBACK)
|
|
1053
|
+
if (this.butterchurn && this.butterchurnAudioBuffer && this.butterchurnAudioContext) {
|
|
1054
|
+
try {
|
|
1055
|
+
// Stop any existing Butterchurn source
|
|
1056
|
+
if (this.butterchurnSourceNode) {
|
|
1057
|
+
this.butterchurnSourceNode.disconnect();
|
|
1058
|
+
this.butterchurnSourceNode = null;
|
|
1059
|
+
}
|
|
1060
|
+
|
|
1061
|
+
// Create new Butterchurn source node for ANALYSIS ONLY
|
|
1062
|
+
this.butterchurnSourceNode = this.butterchurnAudioContext.createBufferSource();
|
|
1063
|
+
this.butterchurnSourceNode.buffer = this.butterchurnAudioBuffer;
|
|
1064
|
+
|
|
1065
|
+
// IMPORTANT: DO NOT connect to destination - this eliminates audio playback!
|
|
1066
|
+
// Connect only to analysers for frequency analysis (no audio output)
|
|
1067
|
+
|
|
1068
|
+
// Connect to debug analyser for monitoring
|
|
1069
|
+
if (this.butterchurnAnalyser) {
|
|
1070
|
+
this.butterchurnSourceNode.connect(this.butterchurnAnalyser);
|
|
1071
|
+
}
|
|
1072
|
+
|
|
1073
|
+
// Connect to Butterchurn's internal audio processing
|
|
1074
|
+
// Butterchurn should have its own analyser that we can connect to
|
|
1075
|
+
if (this.butterchurn && this.butterchurn.connectAudio) {
|
|
1076
|
+
// Create dedicated analyser for Butterchurn visualization
|
|
1077
|
+
this.butterchurnVisualAnalyser = this.butterchurnAudioContext.createAnalyser();
|
|
1078
|
+
this.butterchurnVisualAnalyser.fftSize = 2048;
|
|
1079
|
+
this.butterchurnVisualAnalyser.smoothingTimeConstant = 0.8;
|
|
1080
|
+
|
|
1081
|
+
// Connect audio source to Butterchurn's analyser
|
|
1082
|
+
this.butterchurnSourceNode.connect(this.butterchurnVisualAnalyser);
|
|
1083
|
+
|
|
1084
|
+
// Give Butterchurn the analyser for visualization
|
|
1085
|
+
this.butterchurn.connectAudio(this.butterchurnVisualAnalyser);
|
|
1086
|
+
}
|
|
1087
|
+
|
|
1088
|
+
// Start from current time position (analysis only, no audio output)
|
|
1089
|
+
// Ensure offset is never negative to avoid RangeError
|
|
1090
|
+
const startOffset = Math.max(0, this.currentTime);
|
|
1091
|
+
|
|
1092
|
+
// Track when we started and from what offset for drift detection
|
|
1093
|
+
this.butterchurnStartTime = this.butterchurnAudioContext.currentTime;
|
|
1094
|
+
this.butterchurnStartOffset = startOffset;
|
|
1095
|
+
|
|
1096
|
+
this.butterchurnSourceNode.start(0, startOffset);
|
|
1097
|
+
} catch (error) {
|
|
1098
|
+
console.warn('Failed to start Butterchurn offline analysis:', error);
|
|
1099
|
+
}
|
|
1100
|
+
}
|
|
1101
|
+
} catch (error) {
|
|
1102
|
+
console.error('Failed to start music analysis:', error);
|
|
1103
|
+
}
|
|
1104
|
+
}
|
|
1105
|
+
|
|
1106
|
+
stopMusicAnalysis() {
|
|
1107
|
+
// Note: We no longer create musicSourceNode from waveform context.
|
|
1108
|
+
// Only Butterchurn context is used for analysis.
|
|
1109
|
+
|
|
1110
|
+
// Stop Butterchurn source (AudioBufferSourceNode can only be used once)
|
|
1111
|
+
if (this.butterchurnSourceNode) {
|
|
1112
|
+
try {
|
|
1113
|
+
this.butterchurnSourceNode.stop();
|
|
1114
|
+
this.butterchurnSourceNode.disconnect();
|
|
1115
|
+
} catch {
|
|
1116
|
+
// Source may already be stopped
|
|
1117
|
+
}
|
|
1118
|
+
this.butterchurnSourceNode = null;
|
|
1119
|
+
}
|
|
1120
|
+
|
|
1121
|
+
// Reset tracking variables
|
|
1122
|
+
this.butterchurnStartTime = 0;
|
|
1123
|
+
this.butterchurnStartOffset = 0;
|
|
1124
|
+
|
|
1125
|
+
// Clear any stored analyser references that Butterchurn might be holding
|
|
1126
|
+
if (this.butterchurnVisualAnalyser) {
|
|
1127
|
+
this.butterchurnVisualAnalyser = null;
|
|
1128
|
+
}
|
|
1129
|
+
|
|
1130
|
+
// Clear cached analysis when stopped
|
|
1131
|
+
this.cachedAnalysis = { energy: 0, bass: 0, mid: 0, treble: 0, centroid: 0 };
|
|
1132
|
+
}
|
|
1133
|
+
|
|
1134
|
+
debugAudioLevels() {
|
|
1135
|
+
const now = performance.now();
|
|
1136
|
+
if (now - this.lastAudioDebugTime < this.audioDebugInterval) return;
|
|
1137
|
+
this.lastAudioDebugTime = now;
|
|
1138
|
+
|
|
1139
|
+
if (this.butterchurnAnalyser && this.butterchurnFrequencyData) {
|
|
1140
|
+
this.butterchurnAnalyser.getByteFrequencyData(this.butterchurnFrequencyData);
|
|
1141
|
+
const sum = this.butterchurnFrequencyData.reduce((a, b) => a + b, 0);
|
|
1142
|
+
const average = sum / this.butterchurnFrequencyData.length;
|
|
1143
|
+
const max = Math.max(...this.butterchurnFrequencyData);
|
|
1144
|
+
|
|
1145
|
+
// `Source: ${this.butterchurnSourceNode ? 'active' : 'none'}`);
|
|
1146
|
+
|
|
1147
|
+
// Also update status bar for visual feedback
|
|
1148
|
+
const statusText = document.getElementById('statusText');
|
|
1149
|
+
if (statusText) {
|
|
1150
|
+
statusText.textContent = `Audio: Avg=${average.toFixed(0)} Max=${max} State=${this.butterchurnAudioContext ? this.butterchurnAudioContext.state : 'none'}`;
|
|
1151
|
+
}
|
|
1152
|
+
}
|
|
1153
|
+
}
|
|
1154
|
+
|
|
1155
|
+
async startMicrophoneCapture() {
|
|
1156
|
+
if (!this.waveformPreferences.enableMic) return;
|
|
1157
|
+
|
|
1158
|
+
try {
|
|
1159
|
+
// Use stored input device from preferences
|
|
1160
|
+
const constraints = {
|
|
1161
|
+
audio: this.inputDevice
|
|
1162
|
+
? {
|
|
1163
|
+
deviceId: { exact: this.inputDevice },
|
|
1164
|
+
}
|
|
1165
|
+
: true,
|
|
1166
|
+
};
|
|
1167
|
+
|
|
1168
|
+
this.micStream = await navigator.mediaDevices.getUserMedia(constraints);
|
|
1169
|
+
this.audioContext = new (window.AudioContext || window.webkitAudioContext)();
|
|
1170
|
+
this.analyser = this.audioContext.createAnalyser();
|
|
1171
|
+
|
|
1172
|
+
const source = this.audioContext.createMediaStreamSource(this.micStream);
|
|
1173
|
+
source.connect(this.analyser);
|
|
1174
|
+
|
|
1175
|
+
// Create gain node for analysis only - NEVER route to speakers
|
|
1176
|
+
// (kaiPlayer handles actual microphone audio routing to PA/IEM outputs)
|
|
1177
|
+
this.micGainNode = this.audioContext.createGain();
|
|
1178
|
+
this.micGainNode.gain.setValueAtTime(0.5, this.audioContext.currentTime);
|
|
1179
|
+
source.connect(this.micGainNode);
|
|
1180
|
+
|
|
1181
|
+
// DO NOT connect to speakers - this audioContext uses default output device
|
|
1182
|
+
// and would bypass PA routing. kaiPlayer handles all mic-to-speaker routing.
|
|
1183
|
+
|
|
1184
|
+
this.analyser.fftSize = 256;
|
|
1185
|
+
this.micDataArray = new Uint8Array(this.analyser.frequencyBinCount);
|
|
1186
|
+
|
|
1187
|
+
// Give the microphone a moment to stabilize before processing
|
|
1188
|
+
await new Promise((resolve) => setTimeout(resolve, 150));
|
|
1189
|
+
|
|
1190
|
+
// Microphone capture started
|
|
1191
|
+
} catch {
|
|
1192
|
+
// Could not start microphone capture
|
|
1193
|
+
}
|
|
1194
|
+
}
|
|
1195
|
+
|
|
1196
|
+
stopMicrophoneCapture() {
|
|
1197
|
+
if (this.micStream) {
|
|
1198
|
+
this.micStream.getTracks().forEach((track) => track.stop());
|
|
1199
|
+
this.micStream = null;
|
|
1200
|
+
}
|
|
1201
|
+
|
|
1202
|
+
if (this.micGainNode) {
|
|
1203
|
+
this.micGainNode.disconnect();
|
|
1204
|
+
this.micGainNode = null;
|
|
1205
|
+
}
|
|
1206
|
+
|
|
1207
|
+
if (this.audioContext) {
|
|
1208
|
+
this.audioContext.close();
|
|
1209
|
+
this.audioContext = null;
|
|
1210
|
+
}
|
|
1211
|
+
|
|
1212
|
+
this.analyser = null;
|
|
1213
|
+
this.micDataArray = null;
|
|
1214
|
+
this.waveformData.fill(128); // Fill with center value to avoid flatline
|
|
1215
|
+
}
|
|
1216
|
+
|
|
1217
|
+
preCalculateVocalsWaveform() {
|
|
1218
|
+
if (!this.vocalsAudioBuffer) return;
|
|
1219
|
+
|
|
1220
|
+
const channelData = this.vocalsAudioBuffer.getChannelData(0);
|
|
1221
|
+
const sampleRate = this.vocalsAudioBuffer.sampleRate;
|
|
1222
|
+
const duration = this.vocalsAudioBuffer.duration;
|
|
1223
|
+
|
|
1224
|
+
// Create waveform data at 240 samples per second (pixel resolution)
|
|
1225
|
+
const samplesPerWaveformPoint = sampleRate / 240; // 240Hz
|
|
1226
|
+
const totalPoints = Math.floor(duration * 240);
|
|
1227
|
+
this.preCalculatedVocalsWaveform = new Uint8Array(totalPoints);
|
|
1228
|
+
|
|
1229
|
+
for (let i = 0; i < duration * 240; i++) {
|
|
1230
|
+
const startSample = Math.floor(i * samplesPerWaveformPoint);
|
|
1231
|
+
const endSample = Math.min(Math.floor((i + 1) * samplesPerWaveformPoint), channelData.length);
|
|
1232
|
+
|
|
1233
|
+
// Get peak value for this time segment (preserves waveform shape better than RMS)
|
|
1234
|
+
let maxVal = 0;
|
|
1235
|
+
let minVal = 0;
|
|
1236
|
+
for (let j = startSample; j < endSample; j++) {
|
|
1237
|
+
maxVal = Math.max(maxVal, channelData[j]);
|
|
1238
|
+
minVal = Math.min(minVal, channelData[j]);
|
|
1239
|
+
}
|
|
1240
|
+
|
|
1241
|
+
// Use the larger absolute value to preserve the waveform peaks
|
|
1242
|
+
const peak = Math.max(Math.abs(maxVal), Math.abs(minVal));
|
|
1243
|
+
|
|
1244
|
+
// Store as signed value: 128 is center, >128 is positive, <128 is negative
|
|
1245
|
+
// Determine sign based on which peak was larger
|
|
1246
|
+
const signedValue =
|
|
1247
|
+
Math.abs(maxVal) > Math.abs(minVal)
|
|
1248
|
+
? 128 + Math.floor(peak * 127) // Positive peak
|
|
1249
|
+
: 128 - Math.floor(peak * 127); // Negative peak
|
|
1250
|
+
|
|
1251
|
+
this.preCalculatedVocalsWaveform[i] = Math.max(0, Math.min(255, signedValue));
|
|
1252
|
+
}
|
|
1253
|
+
}
|
|
1254
|
+
|
|
1255
|
+
updateWaveformData() {
|
|
1256
|
+
if (!this.analyser || !this.micDataArray) return;
|
|
1257
|
+
|
|
1258
|
+
// Get time domain data from microphone (actual waveform)
|
|
1259
|
+
this.analyser.getByteTimeDomainData(this.micDataArray);
|
|
1260
|
+
|
|
1261
|
+
// Calculate how many samples to shift based on time elapsed
|
|
1262
|
+
if (!this.lastMicTime) {
|
|
1263
|
+
this.lastMicTime = this.currentTime;
|
|
1264
|
+
}
|
|
1265
|
+
const timeElapsed = this.currentTime - this.lastMicTime;
|
|
1266
|
+
const samplesToShift = Math.floor(timeElapsed * 240);
|
|
1267
|
+
|
|
1268
|
+
if (samplesToShift > 0) {
|
|
1269
|
+
// Shift array left by the number of samples elapsed
|
|
1270
|
+
for (let i = 0; i < 1440 - samplesToShift; i++) {
|
|
1271
|
+
this.waveformData[i] = this.waveformData[i + samplesToShift];
|
|
1272
|
+
}
|
|
1273
|
+
|
|
1274
|
+
// Fill the right side with actual waveform data
|
|
1275
|
+
// Sample multiple points from the audio buffer to create smooth waveform
|
|
1276
|
+
const samplesPerPoint = Math.floor(this.micDataArray.length / samplesToShift);
|
|
1277
|
+
for (let i = 0; i < samplesToShift; i++) {
|
|
1278
|
+
const bufferIndex = Math.min(i * samplesPerPoint, this.micDataArray.length - 1);
|
|
1279
|
+
// Use actual waveform value (already 0-255)
|
|
1280
|
+
this.waveformData[1440 - samplesToShift + i] = this.micDataArray[bufferIndex];
|
|
1281
|
+
}
|
|
1282
|
+
|
|
1283
|
+
this.lastMicTime = this.currentTime;
|
|
1284
|
+
}
|
|
1285
|
+
}
|
|
1286
|
+
|
|
1287
|
+
updateVocalsWaveformData() {
|
|
1288
|
+
// Update vocals rendering array by slicing from source and padding with zeros
|
|
1289
|
+
if (this.preCalculatedVocalsWaveform) {
|
|
1290
|
+
const startIndex = Math.floor((this.currentTime - 6) * 240); // 6 seconds back
|
|
1291
|
+
const endIndex = startIndex + 1920; // 8 seconds total
|
|
1292
|
+
|
|
1293
|
+
if (startIndex >= 0 && endIndex <= this.preCalculatedVocalsWaveform.length) {
|
|
1294
|
+
// Simple case: copy directly
|
|
1295
|
+
for (let i = 0; i < 1920; i++) {
|
|
1296
|
+
this.vocalsWaveformData[i] = this.preCalculatedVocalsWaveform[startIndex + i];
|
|
1297
|
+
}
|
|
1298
|
+
} else {
|
|
1299
|
+
// Edge cases: slice and concatenate with zero padding
|
|
1300
|
+
const validStart = Math.max(0, startIndex);
|
|
1301
|
+
const validEnd = Math.min(this.preCalculatedVocalsWaveform.length, endIndex);
|
|
1302
|
+
const leftPadding = validStart - startIndex;
|
|
1303
|
+
const rightPadding = endIndex - validEnd;
|
|
1304
|
+
|
|
1305
|
+
let destIndex = 0;
|
|
1306
|
+
|
|
1307
|
+
// Left padding (zeros)
|
|
1308
|
+
for (let i = 0; i < leftPadding; i++) {
|
|
1309
|
+
this.vocalsWaveformData[destIndex++] = 128;
|
|
1310
|
+
}
|
|
1311
|
+
|
|
1312
|
+
// Valid source data
|
|
1313
|
+
for (let i = validStart; i < validEnd; i++) {
|
|
1314
|
+
this.vocalsWaveformData[destIndex++] = this.preCalculatedVocalsWaveform[i];
|
|
1315
|
+
}
|
|
1316
|
+
|
|
1317
|
+
// Right padding (zeros)
|
|
1318
|
+
for (let i = 0; i < rightPadding; i++) {
|
|
1319
|
+
this.vocalsWaveformData[destIndex++] = 128;
|
|
1320
|
+
}
|
|
1321
|
+
}
|
|
1322
|
+
}
|
|
1323
|
+
}
|
|
1324
|
+
|
|
1325
|
+
updateWaveformDataAtFixedRate() {
|
|
1326
|
+
const now = Date.now();
|
|
1327
|
+
|
|
1328
|
+
// Initialize timing if needed
|
|
1329
|
+
if (!this.lastMicUpdateTime) {
|
|
1330
|
+
this.lastMicUpdateTime = now;
|
|
1331
|
+
this.micUpdateInterval = 1000 / 240; // 240Hz = 4.17ms intervals
|
|
1332
|
+
return;
|
|
1333
|
+
}
|
|
1334
|
+
|
|
1335
|
+
// Only update if enough time has passed for exactly 60Hz
|
|
1336
|
+
if (now - this.lastMicUpdateTime >= this.micUpdateInterval) {
|
|
1337
|
+
this.updateWaveformData();
|
|
1338
|
+
this.updateVocalsWaveformData();
|
|
1339
|
+
this.lastMicUpdateTime = now;
|
|
1340
|
+
}
|
|
1341
|
+
}
|
|
1342
|
+
|
|
1343
|
+
drawVocalsWaveform(width, height) {
|
|
1344
|
+
if (!this.isPlaying || !this.waveformPreferences.enableWaveforms) return;
|
|
1345
|
+
|
|
1346
|
+
// If mic is disabled but waveforms are enabled, only show vocals
|
|
1347
|
+
if (!this.waveformPreferences.enableMic && this.waveformPreferences.enableWaveforms) {
|
|
1348
|
+
// Draw vocals waveform but show it where mic would be
|
|
1349
|
+
}
|
|
1350
|
+
|
|
1351
|
+
const vocalsHeight = this.settings.vocalsWaveformHeight;
|
|
1352
|
+
const micHeight = this.settings.waveformHeight;
|
|
1353
|
+
const gap = this.settings.vocalsWaveformGap;
|
|
1354
|
+
const vocalsY = height - micHeight - gap - vocalsHeight - 20;
|
|
1355
|
+
const currentPositionX = width * this.settings.waveformCurrentPosition;
|
|
1356
|
+
|
|
1357
|
+
// Direct pixel-to-data mapping for vocals (1920 pixels = 1920 data points)
|
|
1358
|
+
this.ctx.strokeStyle = this.settings.vocalsWaveformColor;
|
|
1359
|
+
this.ctx.lineWidth = 2;
|
|
1360
|
+
this.ctx.beginPath();
|
|
1361
|
+
|
|
1362
|
+
const centerY = vocalsY + vocalsHeight / 2;
|
|
1363
|
+
let firstPoint = true;
|
|
1364
|
+
|
|
1365
|
+
// Direct pixel-to-data mapping, left to right
|
|
1366
|
+
for (let x = 0; x < width; x++) {
|
|
1367
|
+
// Convert byte data (0-255) to waveform position (-1 to 1), centered at 128
|
|
1368
|
+
const normalized = (this.vocalsWaveformData[x] - 128) / 128;
|
|
1369
|
+
const y = centerY + normalized * vocalsHeight * 1.5; // Increased amplitude
|
|
1370
|
+
|
|
1371
|
+
if (firstPoint) {
|
|
1372
|
+
this.ctx.moveTo(x, y);
|
|
1373
|
+
firstPoint = false;
|
|
1374
|
+
} else {
|
|
1375
|
+
this.ctx.lineTo(x, y);
|
|
1376
|
+
}
|
|
1377
|
+
}
|
|
1378
|
+
|
|
1379
|
+
this.ctx.stroke();
|
|
1380
|
+
|
|
1381
|
+
// Draw current position indicator
|
|
1382
|
+
this.ctx.strokeStyle = '#ffffff';
|
|
1383
|
+
this.ctx.lineWidth = 1;
|
|
1384
|
+
this.ctx.beginPath();
|
|
1385
|
+
this.ctx.moveTo(currentPositionX, vocalsY);
|
|
1386
|
+
this.ctx.lineTo(currentPositionX, vocalsY + vocalsHeight);
|
|
1387
|
+
this.ctx.stroke();
|
|
1388
|
+
|
|
1389
|
+
// Draw center line
|
|
1390
|
+
this.ctx.strokeStyle = '#666666';
|
|
1391
|
+
this.ctx.lineWidth = 1;
|
|
1392
|
+
this.ctx.beginPath();
|
|
1393
|
+
this.ctx.moveTo(0, centerY);
|
|
1394
|
+
this.ctx.lineTo(width, centerY);
|
|
1395
|
+
this.ctx.stroke();
|
|
1396
|
+
}
|
|
1397
|
+
|
|
1398
|
+
// Control methods for preferences
|
|
1399
|
+
setWaveformsEnabled(enabled) {
|
|
1400
|
+
this.waveformPreferences.enableWaveforms = enabled;
|
|
1401
|
+
}
|
|
1402
|
+
|
|
1403
|
+
setMicToSpeakers(enabled) {
|
|
1404
|
+
this.waveformPreferences.micToSpeakers = enabled;
|
|
1405
|
+
|
|
1406
|
+
// karaokeRenderer mic is ONLY for waveform visualization, NOT audio routing
|
|
1407
|
+
// kaiPlayer handles all microphone-to-speaker routing to PA/IEM outputs
|
|
1408
|
+
// This setting is stored for preferences sync but not used by karaokeRenderer
|
|
1409
|
+
}
|
|
1410
|
+
|
|
1411
|
+
setMicEnabled(enabled) {
|
|
1412
|
+
this.waveformPreferences.enableMic = enabled;
|
|
1413
|
+
|
|
1414
|
+
if (enabled && this.isPlaying) {
|
|
1415
|
+
this.startMicrophoneCapture();
|
|
1416
|
+
} else if (!enabled) {
|
|
1417
|
+
this.stopMicrophoneCapture();
|
|
1418
|
+
// Clear the waveform buffer so we don't render a flatline
|
|
1419
|
+
this.waveformData.fill(128); // Fill with center value (128 = silence)
|
|
1420
|
+
}
|
|
1421
|
+
}
|
|
1422
|
+
|
|
1423
|
+
setEffectsEnabled(enabled) {
|
|
1424
|
+
this.waveformPreferences.enableEffects = enabled;
|
|
1425
|
+
|
|
1426
|
+
// Clear effects canvas when disabled
|
|
1427
|
+
if (!enabled && this.effectsGL) {
|
|
1428
|
+
this.effectsGL.clearColor(0, 0, 0, 1);
|
|
1429
|
+
this.effectsGL.clear(this.effectsGL.COLOR_BUFFER_BIT);
|
|
1430
|
+
}
|
|
1431
|
+
}
|
|
1432
|
+
|
|
1433
|
+
drawMicrophoneWaveform(width, height) {
|
|
1434
|
+
if (!this.isPlaying || !this.waveformPreferences.enableWaveforms) return;
|
|
1435
|
+
|
|
1436
|
+
// Only draw mic waveform if mic is enabled AND we have actual mic data
|
|
1437
|
+
if (!this.waveformPreferences.enableMic || !this.micStream || !this.analyser) return;
|
|
1438
|
+
|
|
1439
|
+
const waveformHeight = this.settings.waveformHeight;
|
|
1440
|
+
const waveformY = height - waveformHeight - 20;
|
|
1441
|
+
const currentPositionX = width * this.settings.waveformCurrentPosition;
|
|
1442
|
+
|
|
1443
|
+
// Direct pixel-to-data mapping for mic (1440 pixels for 6 seconds, scaled to 1920 width)
|
|
1444
|
+
this.ctx.strokeStyle = this.settings.waveformColor;
|
|
1445
|
+
this.ctx.lineWidth = 2;
|
|
1446
|
+
this.ctx.beginPath();
|
|
1447
|
+
|
|
1448
|
+
const centerY = waveformY + waveformHeight / 2;
|
|
1449
|
+
let firstPoint = true;
|
|
1450
|
+
|
|
1451
|
+
// Direct pixel-to-data mapping for mic, left to right
|
|
1452
|
+
for (let x = 0; x < 1440; x++) {
|
|
1453
|
+
// Convert byte data (0-255) to waveform position (-1 to 1)
|
|
1454
|
+
const normalized = (this.waveformData[x] - 128) / 128;
|
|
1455
|
+
const y = centerY + normalized * waveformHeight * 1.5; // Increased amplitude
|
|
1456
|
+
|
|
1457
|
+
if (firstPoint) {
|
|
1458
|
+
this.ctx.moveTo(x, y);
|
|
1459
|
+
firstPoint = false;
|
|
1460
|
+
} else {
|
|
1461
|
+
this.ctx.lineTo(x, y);
|
|
1462
|
+
}
|
|
1463
|
+
}
|
|
1464
|
+
|
|
1465
|
+
this.ctx.stroke();
|
|
1466
|
+
|
|
1467
|
+
// Draw current position indicator
|
|
1468
|
+
this.ctx.strokeStyle = '#ffffff';
|
|
1469
|
+
this.ctx.lineWidth = 1;
|
|
1470
|
+
this.ctx.beginPath();
|
|
1471
|
+
this.ctx.moveTo(currentPositionX, waveformY);
|
|
1472
|
+
this.ctx.lineTo(currentPositionX, waveformY + waveformHeight);
|
|
1473
|
+
this.ctx.stroke();
|
|
1474
|
+
|
|
1475
|
+
// Draw center line
|
|
1476
|
+
this.ctx.strokeStyle = '#666666';
|
|
1477
|
+
this.ctx.lineWidth = 1;
|
|
1478
|
+
this.ctx.beginPath();
|
|
1479
|
+
this.ctx.moveTo(0, centerY);
|
|
1480
|
+
this.ctx.lineTo(width, centerY);
|
|
1481
|
+
this.ctx.stroke();
|
|
1482
|
+
}
|
|
1483
|
+
|
|
1484
|
+
startAnimation() {
|
|
1485
|
+
const animate = (currentTime) => {
|
|
1486
|
+
// Track actual FPS by measuring time between frames
|
|
1487
|
+
const deltaTime = currentTime - this.lastFrameTime;
|
|
1488
|
+
this.lastFrameTime = currentTime;
|
|
1489
|
+
|
|
1490
|
+
// Add to FPS history (keep last 60 samples for 1-second average)
|
|
1491
|
+
this.fpsHistory.push(1000 / deltaTime);
|
|
1492
|
+
if (this.fpsHistory.length > 60) {
|
|
1493
|
+
this.fpsHistory.shift();
|
|
1494
|
+
}
|
|
1495
|
+
|
|
1496
|
+
// Time the full frame including updates
|
|
1497
|
+
const frameStart = performance.now();
|
|
1498
|
+
|
|
1499
|
+
this.draw();
|
|
1500
|
+
|
|
1501
|
+
// Track time spent in updates vs rendering
|
|
1502
|
+
this.frameUpdateTime = performance.now() - frameStart;
|
|
1503
|
+
|
|
1504
|
+
this.animationFrame = requestAnimationFrame(animate);
|
|
1505
|
+
};
|
|
1506
|
+
this.animationFrame = requestAnimationFrame(animate);
|
|
1507
|
+
}
|
|
1508
|
+
|
|
1509
|
+
stopAnimation() {
|
|
1510
|
+
if (this.animationFrame) {
|
|
1511
|
+
cancelAnimationFrame(this.animationFrame);
|
|
1512
|
+
this.animationFrame = null;
|
|
1513
|
+
}
|
|
1514
|
+
}
|
|
1515
|
+
|
|
1516
|
+
draw() {
|
|
1517
|
+
const width = this.canvas.width; // Always 1920
|
|
1518
|
+
const height = this.canvas.height; // Always 1080
|
|
1519
|
+
|
|
1520
|
+
this.frameCount++;
|
|
1521
|
+
|
|
1522
|
+
// Performance profiling - sample every 2 seconds
|
|
1523
|
+
const shouldProfile = this.frameCount % 120 === 0;
|
|
1524
|
+
const frameStart = shouldProfile ? performance.now() : 0;
|
|
1525
|
+
|
|
1526
|
+
// Update microphone waveform data at consistent 60Hz rate
|
|
1527
|
+
this.updateWaveformDataAtFixedRate();
|
|
1528
|
+
|
|
1529
|
+
// Clear canvas with dark background
|
|
1530
|
+
this.ctx.fillStyle = '#000000';
|
|
1531
|
+
this.ctx.fillRect(0, 0, width, height);
|
|
1532
|
+
|
|
1533
|
+
// Render WebGL effects to offscreen canvas
|
|
1534
|
+
const effectsStart = shouldProfile ? performance.now() : 0;
|
|
1535
|
+
this.renderWebGLEffects();
|
|
1536
|
+
const effectsEnd = shouldProfile ? performance.now() : 0;
|
|
1537
|
+
|
|
1538
|
+
// Composite WebGL effects onto main canvas at full opacity
|
|
1539
|
+
if (this.effectsCanvas) {
|
|
1540
|
+
this.ctx.save();
|
|
1541
|
+
this.ctx.globalAlpha = 1.0;
|
|
1542
|
+
this.ctx.drawImage(this.effectsCanvas, 0, 0);
|
|
1543
|
+
this.ctx.restore();
|
|
1544
|
+
}
|
|
1545
|
+
|
|
1546
|
+
// Add dark overlay for text contrast (let effects show through)
|
|
1547
|
+
this.ctx.save();
|
|
1548
|
+
this.ctx.globalAlpha = this.waveformPreferences.overlayOpacity; // Configurable opacity dark overlay for better text readability
|
|
1549
|
+
this.ctx.fillStyle = '#000000';
|
|
1550
|
+
this.ctx.fillRect(0, 0, width, height);
|
|
1551
|
+
this.ctx.restore();
|
|
1552
|
+
|
|
1553
|
+
// Draw waveforms at the bottom
|
|
1554
|
+
const waveformsStart = shouldProfile ? performance.now() : 0;
|
|
1555
|
+
this.drawVocalsWaveform(width, height);
|
|
1556
|
+
const vocalsEnd = shouldProfile ? performance.now() : 0;
|
|
1557
|
+
this.drawMicrophoneWaveform(width, height);
|
|
1558
|
+
const micEnd = shouldProfile ? performance.now() : 0;
|
|
1559
|
+
|
|
1560
|
+
// Show song info when loaded but not playing
|
|
1561
|
+
if (!this.isPlaying && this.songMetadata) {
|
|
1562
|
+
this.drawSongInfo(width, height, this.songMetadata);
|
|
1563
|
+
// Draw QR code AFTER song info
|
|
1564
|
+
this.drawQRCodeOverlay(width, height);
|
|
1565
|
+
// Draw queue display AFTER QR code
|
|
1566
|
+
this.drawQueueDisplay(width, height);
|
|
1567
|
+
return;
|
|
1568
|
+
}
|
|
1569
|
+
|
|
1570
|
+
if (!this.lyrics || this.lyrics.length === 0) {
|
|
1571
|
+
// Draw QR code when no lyrics
|
|
1572
|
+
this.drawQRCodeOverlay(width, height);
|
|
1573
|
+
// Draw queue display AFTER QR code
|
|
1574
|
+
this.drawQueueDisplay(width, height);
|
|
1575
|
+
return;
|
|
1576
|
+
}
|
|
1577
|
+
|
|
1578
|
+
// Check for instrumental intro first
|
|
1579
|
+
if (this.isInInstrumentalIntro()) {
|
|
1580
|
+
this.drawInstrumentalIntro(width, height);
|
|
1581
|
+
// QR code not shown during playback
|
|
1582
|
+
return;
|
|
1583
|
+
}
|
|
1584
|
+
|
|
1585
|
+
// Check for instrumental outro (just show clean ending, no progress bar)
|
|
1586
|
+
if (this.isInInstrumentalOutro()) {
|
|
1587
|
+
this.drawInstrumentalOutro(width, height);
|
|
1588
|
+
// QR code not shown during playback
|
|
1589
|
+
return;
|
|
1590
|
+
}
|
|
1591
|
+
|
|
1592
|
+
// Find current line
|
|
1593
|
+
const currentLineIndex = this.findCurrentLine();
|
|
1594
|
+
|
|
1595
|
+
// Check for singer change (for backup:PA feature)
|
|
1596
|
+
this.checkSingerChange(currentLineIndex);
|
|
1597
|
+
|
|
1598
|
+
if (currentLineIndex >= 0 && currentLineIndex < this.lyrics.length) {
|
|
1599
|
+
// Check if we're in an instrumental gap first
|
|
1600
|
+
const isInInstrumentalGap = this.isInInstrumentalGap(currentLineIndex);
|
|
1601
|
+
|
|
1602
|
+
if (isInInstrumentalGap) {
|
|
1603
|
+
// During instrumental sections, only show the progress bar and upcoming lyrics
|
|
1604
|
+
this.drawInstrumentalProgressBar(currentLineIndex, width, height);
|
|
1605
|
+
} else {
|
|
1606
|
+
// Normal lyric display - show all active lines (main + backup)
|
|
1607
|
+
this.drawActiveLines(width, height);
|
|
1608
|
+
}
|
|
1609
|
+
} else {
|
|
1610
|
+
// No current main line found - check if we should show progress bar during backup-only periods
|
|
1611
|
+
this.drawBackupOnlyProgressBar(width, height);
|
|
1612
|
+
}
|
|
1613
|
+
|
|
1614
|
+
// Performance profiling output with real FPS tracking
|
|
1615
|
+
if (shouldProfile) {
|
|
1616
|
+
const frameEnd = performance.now();
|
|
1617
|
+
const _clearTime = effectsStart - frameStart;
|
|
1618
|
+
const _effectsTime = effectsEnd - effectsStart;
|
|
1619
|
+
const _vocalsTime = vocalsEnd - waveformsStart;
|
|
1620
|
+
const _micTime = micEnd - vocalsEnd;
|
|
1621
|
+
const _lyricsTime = frameEnd - micEnd;
|
|
1622
|
+
const _renderTime = frameEnd - frameStart;
|
|
1623
|
+
|
|
1624
|
+
// Calculate actual FPS average
|
|
1625
|
+
const _avgFPS =
|
|
1626
|
+
this.fpsHistory.length > 0
|
|
1627
|
+
? this.fpsHistory.reduce((a, b) => a + b, 0) / this.fpsHistory.length
|
|
1628
|
+
: 0;
|
|
1629
|
+
|
|
1630
|
+
// Calculate frame budget utilization
|
|
1631
|
+
const targetFrameTime = 1000 / 60; // 16.67ms for 60fps
|
|
1632
|
+
const _budgetUsed = (this.frameUpdateTime / targetFrameTime) * 100;
|
|
1633
|
+
|
|
1634
|
+
// Log as separate lines to avoid console truncation
|
|
1635
|
+
}
|
|
1636
|
+
}
|
|
1637
|
+
|
|
1638
|
+
/**
|
|
1639
|
+
* Draw QR code in bottom left corner (only when not playing)
|
|
1640
|
+
*/
|
|
1641
|
+
drawQRCodeOverlay(width, height) {
|
|
1642
|
+
// Only show when not playing
|
|
1643
|
+
if (!this.showQrCode || !this.qrCodeCanvas || this.isPlaying) {
|
|
1644
|
+
return;
|
|
1645
|
+
}
|
|
1646
|
+
|
|
1647
|
+
const padding = 20;
|
|
1648
|
+
const qrSize = 150;
|
|
1649
|
+
const x = padding; // Bottom left instead of right
|
|
1650
|
+
const y = height - qrSize - padding;
|
|
1651
|
+
|
|
1652
|
+
// Draw white background with shadow
|
|
1653
|
+
this.ctx.save();
|
|
1654
|
+
this.ctx.shadowColor = 'rgba(0, 0, 0, 0.5)';
|
|
1655
|
+
this.ctx.shadowBlur = 10;
|
|
1656
|
+
this.ctx.shadowOffsetX = 2;
|
|
1657
|
+
this.ctx.shadowOffsetY = 2;
|
|
1658
|
+
this.ctx.fillStyle = '#FFFFFF';
|
|
1659
|
+
this.ctx.fillRect(x - 10, y - 10, qrSize + 20, qrSize + 20);
|
|
1660
|
+
this.ctx.restore();
|
|
1661
|
+
|
|
1662
|
+
// Draw QR code
|
|
1663
|
+
this.ctx.drawImage(this.qrCodeCanvas, x, y, qrSize, qrSize);
|
|
1664
|
+
}
|
|
1665
|
+
|
|
1666
|
+
/**
|
|
1667
|
+
* Draw queue display in bottom right corner (only when not playing)
|
|
1668
|
+
*/
|
|
1669
|
+
drawQueueDisplay(width, height) {
|
|
1670
|
+
// Only show when setting is enabled and queue has items
|
|
1671
|
+
if (!this.displayQueue || !this.queueItems || this.queueItems.length === 0 || this.isPlaying) {
|
|
1672
|
+
return;
|
|
1673
|
+
}
|
|
1674
|
+
|
|
1675
|
+
const padding = 120; // Move further from edge (left)
|
|
1676
|
+
const bottomPadding = 80; // Move up from bottom
|
|
1677
|
+
const rightX = width - padding;
|
|
1678
|
+
const lineHeight = 64;
|
|
1679
|
+
const labelFontSize = 48;
|
|
1680
|
+
const songFontSize = 40;
|
|
1681
|
+
|
|
1682
|
+
this.ctx.save();
|
|
1683
|
+
|
|
1684
|
+
// Calculate text dimensions for background
|
|
1685
|
+
this.ctx.font = `bold ${labelFontSize}px sans-serif`;
|
|
1686
|
+
const labelText = 'Next up:';
|
|
1687
|
+
const labelWidth = this.ctx.measureText(labelText).width;
|
|
1688
|
+
|
|
1689
|
+
// Measure all song texts and prepare data
|
|
1690
|
+
let maxWidth = labelWidth;
|
|
1691
|
+
const songData = this.queueItems.slice(0, 3).map((item) => {
|
|
1692
|
+
const title = item.title || item.song?.title || 'Unknown';
|
|
1693
|
+
const singer = item.requester || item.singer || '';
|
|
1694
|
+
|
|
1695
|
+
// Measure title
|
|
1696
|
+
this.ctx.font = `${songFontSize}px sans-serif`;
|
|
1697
|
+
const titleWidth = this.ctx.measureText(title).width;
|
|
1698
|
+
|
|
1699
|
+
// Measure singer if present
|
|
1700
|
+
let singerWidth = 0;
|
|
1701
|
+
if (singer) {
|
|
1702
|
+
const singerText = ` - ${singer}`;
|
|
1703
|
+
singerWidth = this.ctx.measureText(singerText).width;
|
|
1704
|
+
}
|
|
1705
|
+
|
|
1706
|
+
const totalWidth = titleWidth + singerWidth;
|
|
1707
|
+
maxWidth = Math.max(maxWidth, totalWidth);
|
|
1708
|
+
|
|
1709
|
+
return { title, singer };
|
|
1710
|
+
});
|
|
1711
|
+
|
|
1712
|
+
// Calculate background dimensions
|
|
1713
|
+
const bgWidth = maxWidth + 30;
|
|
1714
|
+
const bgHeight = lineHeight + songData.length * lineHeight + 20;
|
|
1715
|
+
const bgX = rightX - bgWidth;
|
|
1716
|
+
const bgY = height - bgHeight - bottomPadding;
|
|
1717
|
+
|
|
1718
|
+
// Draw semi-transparent background with shadow and rounded corners
|
|
1719
|
+
this.ctx.shadowColor = 'rgba(0, 0, 0, 0.5)';
|
|
1720
|
+
this.ctx.shadowBlur = 10;
|
|
1721
|
+
this.ctx.shadowOffsetX = 2;
|
|
1722
|
+
this.ctx.shadowOffsetY = 2;
|
|
1723
|
+
this.ctx.fillStyle = 'rgba(0, 0, 0, 0.7)';
|
|
1724
|
+
|
|
1725
|
+
// Draw rounded rectangle
|
|
1726
|
+
const radius = 10;
|
|
1727
|
+
this.ctx.beginPath();
|
|
1728
|
+
this.ctx.moveTo(bgX + radius, bgY);
|
|
1729
|
+
this.ctx.lineTo(bgX + bgWidth - radius, bgY);
|
|
1730
|
+
this.ctx.quadraticCurveTo(bgX + bgWidth, bgY, bgX + bgWidth, bgY + radius);
|
|
1731
|
+
this.ctx.lineTo(bgX + bgWidth, bgY + bgHeight - radius);
|
|
1732
|
+
this.ctx.quadraticCurveTo(
|
|
1733
|
+
bgX + bgWidth,
|
|
1734
|
+
bgY + bgHeight,
|
|
1735
|
+
bgX + bgWidth - radius,
|
|
1736
|
+
bgY + bgHeight
|
|
1737
|
+
);
|
|
1738
|
+
this.ctx.lineTo(bgX + radius, bgY + bgHeight);
|
|
1739
|
+
this.ctx.quadraticCurveTo(bgX, bgY + bgHeight, bgX, bgY + bgHeight - radius);
|
|
1740
|
+
this.ctx.lineTo(bgX, bgY + radius);
|
|
1741
|
+
this.ctx.quadraticCurveTo(bgX, bgY, bgX + radius, bgY);
|
|
1742
|
+
this.ctx.closePath();
|
|
1743
|
+
this.ctx.fill();
|
|
1744
|
+
|
|
1745
|
+
this.ctx.shadowColor = 'transparent';
|
|
1746
|
+
|
|
1747
|
+
// Draw "Next up:" label in blue
|
|
1748
|
+
this.ctx.font = `bold ${labelFontSize}px sans-serif`;
|
|
1749
|
+
this.ctx.fillStyle = '#3B82F6'; // Tailwind blue-600
|
|
1750
|
+
this.ctx.textAlign = 'left';
|
|
1751
|
+
this.ctx.fillText(labelText, bgX + 15, bgY + labelFontSize + 10);
|
|
1752
|
+
|
|
1753
|
+
// Draw queue items
|
|
1754
|
+
this.ctx.font = `${songFontSize}px sans-serif`;
|
|
1755
|
+
songData.forEach((item, index) => {
|
|
1756
|
+
const textY = bgY + labelFontSize + 10 + (index + 1) * lineHeight;
|
|
1757
|
+
const textX = bgX + 15;
|
|
1758
|
+
|
|
1759
|
+
// Draw title in white
|
|
1760
|
+
this.ctx.fillStyle = '#FFFFFF';
|
|
1761
|
+
this.ctx.fillText(item.title, textX, textY);
|
|
1762
|
+
|
|
1763
|
+
// Draw singer in yellow if present and not "KJ"
|
|
1764
|
+
if (item.singer) {
|
|
1765
|
+
const titleWidth = this.ctx.measureText(item.title).width;
|
|
1766
|
+
const isKJ = item.singer.toUpperCase() === 'KJ';
|
|
1767
|
+
this.ctx.fillStyle = isKJ ? '#FFFFFF' : '#FCD34D'; // yellow-300 for non-KJ singers
|
|
1768
|
+
this.ctx.fillText(` - ${item.singer}`, textX + titleWidth, textY);
|
|
1769
|
+
}
|
|
1770
|
+
});
|
|
1771
|
+
|
|
1772
|
+
this.ctx.restore();
|
|
1773
|
+
}
|
|
1774
|
+
|
|
1775
|
+
findCurrentLine() {
|
|
1776
|
+
// Use cached result if time hasn't changed much
|
|
1777
|
+
if (
|
|
1778
|
+
Math.abs(this.currentTime - this.lastTimeForLineCalculation) < this.lineCalculationTolerance
|
|
1779
|
+
) {
|
|
1780
|
+
return this.cachedCurrentLine;
|
|
1781
|
+
}
|
|
1782
|
+
|
|
1783
|
+
// Find current main singer line (exclude backup singers for progress tracking)
|
|
1784
|
+
this.cachedCurrentLine = this.findCurrentMainLine();
|
|
1785
|
+
this.lastTimeForLineCalculation = this.currentTime;
|
|
1786
|
+
return this.cachedCurrentLine;
|
|
1787
|
+
}
|
|
1788
|
+
|
|
1789
|
+
findCurrentMainLine() {
|
|
1790
|
+
if (!this.lyrics) return -1;
|
|
1791
|
+
|
|
1792
|
+
for (let i = 0; i < this.lyrics.length; i++) {
|
|
1793
|
+
const line = this.lyrics[i];
|
|
1794
|
+
// Only consider main singer lines for progress tracking
|
|
1795
|
+
if (
|
|
1796
|
+
!line.isBackup &&
|
|
1797
|
+
this.currentTime >= line.startTime &&
|
|
1798
|
+
this.currentTime <= line.endTime
|
|
1799
|
+
) {
|
|
1800
|
+
return i;
|
|
1801
|
+
}
|
|
1802
|
+
}
|
|
1803
|
+
|
|
1804
|
+
// Find the closest upcoming main singer line
|
|
1805
|
+
for (let i = 0; i < this.lyrics.length; i++) {
|
|
1806
|
+
if (!this.lyrics[i].isBackup && this.currentTime < this.lyrics[i].startTime) {
|
|
1807
|
+
// Find the previous main singer line (not backup)
|
|
1808
|
+
for (let j = i - 1; j >= 0; j--) {
|
|
1809
|
+
if (!this.lyrics[j].isBackup) {
|
|
1810
|
+
return j;
|
|
1811
|
+
}
|
|
1812
|
+
}
|
|
1813
|
+
// No previous main singer line found, return -1 to trigger progress bar
|
|
1814
|
+
return -1;
|
|
1815
|
+
}
|
|
1816
|
+
}
|
|
1817
|
+
|
|
1818
|
+
// Find the last main singer line
|
|
1819
|
+
for (let i = this.lyrics.length - 1; i >= 0; i--) {
|
|
1820
|
+
if (!this.lyrics[i].isBackup) {
|
|
1821
|
+
return i;
|
|
1822
|
+
}
|
|
1823
|
+
}
|
|
1824
|
+
|
|
1825
|
+
return -1;
|
|
1826
|
+
}
|
|
1827
|
+
|
|
1828
|
+
/**
|
|
1829
|
+
* Check if the current active line's singer has changed and notify via callback.
|
|
1830
|
+
* This is used for the backup:PA feature to route vocals to PA when needed.
|
|
1831
|
+
*/
|
|
1832
|
+
checkSingerChange() {
|
|
1833
|
+
if (!this.onSingerChange || !this.lyrics) return;
|
|
1834
|
+
|
|
1835
|
+
// Find ANY line we're currently in (including backup lines)
|
|
1836
|
+
// This is different from findCurrentMainLine which excludes backup
|
|
1837
|
+
let currentSinger = null;
|
|
1838
|
+
for (let i = 0; i < this.lyrics.length; i++) {
|
|
1839
|
+
const line = this.lyrics[i];
|
|
1840
|
+
if (this.currentTime >= line.startTime && this.currentTime <= line.endTime) {
|
|
1841
|
+
if (line.singer) {
|
|
1842
|
+
currentSinger = line.singer;
|
|
1843
|
+
break; // Use first matching line with singer
|
|
1844
|
+
}
|
|
1845
|
+
}
|
|
1846
|
+
}
|
|
1847
|
+
|
|
1848
|
+
// Trigger callback when singer changes
|
|
1849
|
+
if (currentSinger !== this.lastActiveSinger) {
|
|
1850
|
+
this.lastActiveSinger = currentSinger;
|
|
1851
|
+
this.onSingerChange(currentSinger);
|
|
1852
|
+
}
|
|
1853
|
+
}
|
|
1854
|
+
|
|
1855
|
+
drawCurrentLyricLine(currentLineIndex, canvasWidth, canvasHeight) {
|
|
1856
|
+
if (currentLineIndex < 0 || currentLineIndex >= this.lyrics.length) return;
|
|
1857
|
+
|
|
1858
|
+
const line = this.lyrics[currentLineIndex];
|
|
1859
|
+
|
|
1860
|
+
// Set up font
|
|
1861
|
+
this.ctx.font = `${this.settings.fontSize}px ${this.settings.fontFamily}`;
|
|
1862
|
+
this.ctx.textAlign = 'center';
|
|
1863
|
+
this.ctx.fillStyle = this.settings.activeColor; // Light blue for current line
|
|
1864
|
+
|
|
1865
|
+
// Get text from line (KAI format may have different text fields)
|
|
1866
|
+
let text = '';
|
|
1867
|
+
if (line.text) {
|
|
1868
|
+
text = line.text;
|
|
1869
|
+
} else if (line.words && line.words.length > 0) {
|
|
1870
|
+
// If we have words array, join them
|
|
1871
|
+
text = line.words.map((w) => w.text || w.word || w).join(' ');
|
|
1872
|
+
}
|
|
1873
|
+
|
|
1874
|
+
if (text && text.trim() !== '') {
|
|
1875
|
+
// Handle long text with proper wrapping
|
|
1876
|
+
const maxWidth = canvasWidth * 0.9;
|
|
1877
|
+
const words = text.split(' ');
|
|
1878
|
+
const lines = [];
|
|
1879
|
+
let currentLine = '';
|
|
1880
|
+
|
|
1881
|
+
for (const word of words) {
|
|
1882
|
+
const testLine = currentLine ? currentLine + ' ' + word : word;
|
|
1883
|
+
const testWidth = this.ctx.measureText(testLine).width;
|
|
1884
|
+
|
|
1885
|
+
if (testWidth <= maxWidth) {
|
|
1886
|
+
currentLine = testLine;
|
|
1887
|
+
} else {
|
|
1888
|
+
if (currentLine) {
|
|
1889
|
+
lines.push(currentLine);
|
|
1890
|
+
currentLine = word;
|
|
1891
|
+
} else {
|
|
1892
|
+
// Single word is too long, just add it anyway
|
|
1893
|
+
lines.push(word);
|
|
1894
|
+
}
|
|
1895
|
+
}
|
|
1896
|
+
}
|
|
1897
|
+
|
|
1898
|
+
if (currentLine) {
|
|
1899
|
+
lines.push(currentLine);
|
|
1900
|
+
}
|
|
1901
|
+
|
|
1902
|
+
// Draw each line centered vertically
|
|
1903
|
+
const lineSpacing = this.settings.lineHeight * 0.9;
|
|
1904
|
+
const totalHeight = lines.length * lineSpacing;
|
|
1905
|
+
let currentY = canvasHeight / 2 - totalHeight / 2 + lineSpacing;
|
|
1906
|
+
|
|
1907
|
+
lines.forEach((line) => {
|
|
1908
|
+
this.drawTextWithBackground(line, canvasWidth / 2, currentY);
|
|
1909
|
+
currentY += lineSpacing;
|
|
1910
|
+
});
|
|
1911
|
+
}
|
|
1912
|
+
}
|
|
1913
|
+
|
|
1914
|
+
drawActiveLines(canvasWidth, canvasHeight, skipUpcoming = false) {
|
|
1915
|
+
if (!this.lyrics) return;
|
|
1916
|
+
|
|
1917
|
+
// Update backup singer animations first
|
|
1918
|
+
this.updateBackupAnimations();
|
|
1919
|
+
|
|
1920
|
+
// Find all active lines at current time (both main and backup singers)
|
|
1921
|
+
const activeLines = [];
|
|
1922
|
+
// Use interpolated time for precise 60fps lyric timing
|
|
1923
|
+
const now = this.getInterpolatedTime();
|
|
1924
|
+
|
|
1925
|
+
for (let i = 0; i < this.lyrics.length; i++) {
|
|
1926
|
+
const line = this.lyrics[i];
|
|
1927
|
+
|
|
1928
|
+
// Skip lines that are currently transitioning (prevents double rendering)
|
|
1929
|
+
if (this.lyricTransitions.has(i)) {
|
|
1930
|
+
continue;
|
|
1931
|
+
}
|
|
1932
|
+
|
|
1933
|
+
// Skip lines that are hidden during transitions (prevents overlap)
|
|
1934
|
+
if (this.hiddenDuringTransition.has(i)) {
|
|
1935
|
+
continue;
|
|
1936
|
+
}
|
|
1937
|
+
|
|
1938
|
+
if (!line.isDisabled && now >= line.startTime && now <= line.endTime) {
|
|
1939
|
+
activeLines.push({ ...line, index: i });
|
|
1940
|
+
}
|
|
1941
|
+
}
|
|
1942
|
+
|
|
1943
|
+
// Separate main and backup singers
|
|
1944
|
+
const mainLines = activeLines.filter((line) => !line.isBackup);
|
|
1945
|
+
const backupLines = activeLines.filter((line) => line.isBackup);
|
|
1946
|
+
|
|
1947
|
+
// Calculate vertical positioning for main lines only (backup renders at bottom separately)
|
|
1948
|
+
const totalMainLines = Math.max(1, mainLines.length);
|
|
1949
|
+
const lineSpacing = this.settings.lineHeight * 1.2;
|
|
1950
|
+
const totalHeight = totalMainLines * lineSpacing;
|
|
1951
|
+
let currentY = canvasHeight / 2 - totalHeight / 2 + lineSpacing - 180; // Move up by 180 pixels for more room below
|
|
1952
|
+
|
|
1953
|
+
// Draw main singer lines
|
|
1954
|
+
mainLines.forEach((line) => {
|
|
1955
|
+
const nextY = this.drawSingleLine(line, canvasWidth, currentY, false); // false = main singer
|
|
1956
|
+
currentY = nextY || currentY + lineSpacing; // Use returned Y or fallback to old spacing
|
|
1957
|
+
});
|
|
1958
|
+
|
|
1959
|
+
// Draw backup singer lines at bottom of screen (fixed position)
|
|
1960
|
+
// This keeps them out of the way and makes them feel less jarring
|
|
1961
|
+
if (backupLines.length > 0) {
|
|
1962
|
+
const bottomPadding = 10; // Distance from bottom of screen
|
|
1963
|
+
|
|
1964
|
+
// Pre-calculate total height needed for backup lines (accounting for text wrapping)
|
|
1965
|
+
const maxWidth = canvasWidth * 0.9;
|
|
1966
|
+
let totalBackupHeight = 0;
|
|
1967
|
+
|
|
1968
|
+
// Set font for measurement (italic for backup)
|
|
1969
|
+
this.ctx.font = `italic ${this.settings.fontSize}px ${this.settings.fontFamily}`;
|
|
1970
|
+
|
|
1971
|
+
backupLines.forEach((line) => {
|
|
1972
|
+
const text = line.text || '';
|
|
1973
|
+
const words = text.split(' ');
|
|
1974
|
+
let wrappedLineCount = 1;
|
|
1975
|
+
let currentLine = '';
|
|
1976
|
+
|
|
1977
|
+
for (const word of words) {
|
|
1978
|
+
const testLine = currentLine ? currentLine + ' ' + word : word;
|
|
1979
|
+
const testWidth = this.ctx.measureText(testLine).width;
|
|
1980
|
+
|
|
1981
|
+
if (testWidth <= maxWidth) {
|
|
1982
|
+
currentLine = testLine;
|
|
1983
|
+
} else {
|
|
1984
|
+
if (currentLine) {
|
|
1985
|
+
wrappedLineCount++;
|
|
1986
|
+
currentLine = word;
|
|
1987
|
+
}
|
|
1988
|
+
}
|
|
1989
|
+
}
|
|
1990
|
+
|
|
1991
|
+
totalBackupHeight += wrappedLineCount * this.settings.lineHeight * 0.8;
|
|
1992
|
+
});
|
|
1993
|
+
|
|
1994
|
+
// Position so bottom of backup content is at bottomPadding from screen bottom
|
|
1995
|
+
let backupY = canvasHeight - bottomPadding - totalBackupHeight;
|
|
1996
|
+
|
|
1997
|
+
backupLines.forEach((line) => {
|
|
1998
|
+
const animation = this.backupAnimations.get(line.index);
|
|
1999
|
+
const alpha = animation ? animation.alpha : this.settings.backupMaxAlpha;
|
|
2000
|
+
const nextY = this.drawSingleLine(line, canvasWidth, backupY, true, alpha); // true = backup singer
|
|
2001
|
+
backupY = nextY || backupY + this.settings.lineHeight * 0.8;
|
|
2002
|
+
});
|
|
2003
|
+
}
|
|
2004
|
+
|
|
2005
|
+
// Save the bottom position after drawing active lyrics (only if there were active lyrics)
|
|
2006
|
+
const hasActiveLyrics = activeLines.length > 0;
|
|
2007
|
+
if (hasActiveLyrics) {
|
|
2008
|
+
this.lastActiveLyricsBottom = currentY;
|
|
2009
|
+
}
|
|
2010
|
+
|
|
2011
|
+
// Calculate upcoming position for both animations and drawing
|
|
2012
|
+
// If there are transitioning lyrics but no active lyrics, estimate position based on transitions
|
|
2013
|
+
let upcomingY = (this.lastActiveLyricsBottom || currentY) + 10;
|
|
2014
|
+
|
|
2015
|
+
// If we have transitioning lyrics but no active lyrics (e.g., intro just ended),
|
|
2016
|
+
// position upcoming below the transitioning lyric's current position
|
|
2017
|
+
if (!hasActiveLyrics && this.lyricTransitions.size > 0) {
|
|
2018
|
+
// Find the lowest transitioning lyric position
|
|
2019
|
+
let lowestTransitionY = 0;
|
|
2020
|
+
for (const [_lineIndex, transition] of this.lyricTransitions.entries()) {
|
|
2021
|
+
const transitionY =
|
|
2022
|
+
transition.startY + (transition.endY - transition.startY) * transition.progress;
|
|
2023
|
+
lowestTransitionY = Math.max(lowestTransitionY, transitionY);
|
|
2024
|
+
}
|
|
2025
|
+
if (lowestTransitionY > 0) {
|
|
2026
|
+
upcomingY = lowestTransitionY + this.settings.lineHeight * 1.5; // Position below with spacing
|
|
2027
|
+
}
|
|
2028
|
+
}
|
|
2029
|
+
|
|
2030
|
+
// Check for lyrics transitioning from upcoming to active and start animations
|
|
2031
|
+
this.startTransitionAnimations(activeLines, upcomingY);
|
|
2032
|
+
|
|
2033
|
+
// Draw transitioning lyrics (animating from upcoming to active)
|
|
2034
|
+
for (const [lineIndex, transition] of this.lyricTransitions.entries()) {
|
|
2035
|
+
const lyricLine = this.lyrics[lineIndex];
|
|
2036
|
+
if (lyricLine) {
|
|
2037
|
+
this.drawTransitioningLine(lyricLine, canvasWidth, transition);
|
|
2038
|
+
}
|
|
2039
|
+
}
|
|
2040
|
+
|
|
2041
|
+
// Draw upcoming lyrics if enabled and not skipped (positioned dynamically after current lyrics)
|
|
2042
|
+
// Show upcoming lyrics when:
|
|
2043
|
+
// 1. Not currently skipping upcoming display
|
|
2044
|
+
// 2. Setting is enabled
|
|
2045
|
+
// 3. No active transitions (prevents flash during transitions)
|
|
2046
|
+
// The upcoming lyric will show in gray below the current lyric and animate up when it becomes active
|
|
2047
|
+
if (
|
|
2048
|
+
!skipUpcoming &&
|
|
2049
|
+
this.waveformPreferences.showUpcomingLyrics &&
|
|
2050
|
+
this.lyricTransitions.size === 0
|
|
2051
|
+
) {
|
|
2052
|
+
this.drawUpcomingLyrics(canvasWidth, canvasHeight, upcomingY);
|
|
2053
|
+
}
|
|
2054
|
+
}
|
|
2055
|
+
|
|
2056
|
+
/**
|
|
2057
|
+
* Get the display color for a line based on its singer type.
|
|
2058
|
+
* @param {object} line - The lyric line with optional singer field
|
|
2059
|
+
* @returns {string} - The hex color to use for the line
|
|
2060
|
+
*/
|
|
2061
|
+
getSingerColor(line) {
|
|
2062
|
+
const singer = line.singer;
|
|
2063
|
+
if (!singer) return this.settings.activeColor; // Default lead (A)
|
|
2064
|
+
|
|
2065
|
+
if (singer === 'B') return this.settings.singerBColor;
|
|
2066
|
+
if (singer === 'duet') return this.settings.duetColor;
|
|
2067
|
+
if (singer === 'backup:PA') return this.settings.backupPAColor;
|
|
2068
|
+
if (singer === 'backup') return this.settings.backupActiveColor;
|
|
2069
|
+
|
|
2070
|
+
// For any other value (like 'A'), use default active color
|
|
2071
|
+
return this.settings.activeColor;
|
|
2072
|
+
}
|
|
2073
|
+
|
|
2074
|
+
/**
|
|
2075
|
+
* Get the prefix icon for a singer type.
|
|
2076
|
+
* @param {object} line - The lyric line with optional singer field
|
|
2077
|
+
* @returns {string} - The prefix string (icon or empty)
|
|
2078
|
+
*/
|
|
2079
|
+
getSingerPrefix(line) {
|
|
2080
|
+
const singer = line.singer;
|
|
2081
|
+
// backup:PA uses brighter color instead of prefix (audio is the indicator)
|
|
2082
|
+
if (singer === 'backup') return '♪ ';
|
|
2083
|
+
return '';
|
|
2084
|
+
}
|
|
2085
|
+
|
|
2086
|
+
drawSingleLine(line, canvasWidth, yPosition, isBackup, alpha = 1.0) {
|
|
2087
|
+
// Set up font (italic for backup singers)
|
|
2088
|
+
if (isBackup) {
|
|
2089
|
+
this.ctx.font = `italic ${this.settings.fontSize}px ${this.settings.fontFamily}`;
|
|
2090
|
+
} else {
|
|
2091
|
+
this.ctx.font = `${this.settings.fontSize}px ${this.settings.fontFamily}`;
|
|
2092
|
+
}
|
|
2093
|
+
this.ctx.textAlign = 'center';
|
|
2094
|
+
|
|
2095
|
+
// Save context for alpha manipulation
|
|
2096
|
+
this.ctx.save();
|
|
2097
|
+
|
|
2098
|
+
// Apply alpha for backup singers
|
|
2099
|
+
if (isBackup) {
|
|
2100
|
+
this.ctx.globalAlpha = alpha;
|
|
2101
|
+
}
|
|
2102
|
+
|
|
2103
|
+
// Choose colors based on singer type (uses new singer field if available)
|
|
2104
|
+
this.ctx.fillStyle = this.getSingerColor(line);
|
|
2105
|
+
|
|
2106
|
+
// Get text from line
|
|
2107
|
+
let text = '';
|
|
2108
|
+
if (line.text) {
|
|
2109
|
+
text = line.text;
|
|
2110
|
+
} else if (line.words && line.words.length > 0) {
|
|
2111
|
+
text = line.words.map((w) => w.text || w.word || w).join(' ');
|
|
2112
|
+
}
|
|
2113
|
+
|
|
2114
|
+
if (text && text.trim() !== '') {
|
|
2115
|
+
// Handle long text with proper wrapping
|
|
2116
|
+
const maxWidth = canvasWidth * 0.9;
|
|
2117
|
+
const words = text.split(' ');
|
|
2118
|
+
const lines = [];
|
|
2119
|
+
let currentLine = '';
|
|
2120
|
+
|
|
2121
|
+
for (const word of words) {
|
|
2122
|
+
const testLine = currentLine ? currentLine + ' ' + word : word;
|
|
2123
|
+
const testWidth = this.ctx.measureText(testLine).width;
|
|
2124
|
+
|
|
2125
|
+
if (testWidth <= maxWidth) {
|
|
2126
|
+
currentLine = testLine;
|
|
2127
|
+
} else {
|
|
2128
|
+
if (currentLine) {
|
|
2129
|
+
lines.push(currentLine);
|
|
2130
|
+
currentLine = word;
|
|
2131
|
+
} else {
|
|
2132
|
+
lines.push(word);
|
|
2133
|
+
}
|
|
2134
|
+
}
|
|
2135
|
+
}
|
|
2136
|
+
|
|
2137
|
+
if (currentLine) {
|
|
2138
|
+
lines.push(currentLine);
|
|
2139
|
+
}
|
|
2140
|
+
|
|
2141
|
+
// Draw each wrapped line
|
|
2142
|
+
let finalY = yPosition;
|
|
2143
|
+
const prefix = this.getSingerPrefix(line);
|
|
2144
|
+
// Get glow color for non-lead singers (helps identify whose line it is)
|
|
2145
|
+
const glowColor = line.singer ? this.getSingerColor(line) : null;
|
|
2146
|
+
lines.forEach((textLine, index) => {
|
|
2147
|
+
const adjustedY = yPosition + index * this.settings.lineHeight * 0.8;
|
|
2148
|
+
finalY = adjustedY + this.settings.lineHeight * 0.8; // Bottom of this line
|
|
2149
|
+
|
|
2150
|
+
// Add visual indicator prefix for first line only (based on singer type)
|
|
2151
|
+
if (index === 0 && prefix) {
|
|
2152
|
+
const prefixedText = `${prefix}${textLine}`;
|
|
2153
|
+
this.drawTextWithBackground(prefixedText, canvasWidth / 2, adjustedY, glowColor);
|
|
2154
|
+
} else {
|
|
2155
|
+
this.drawTextWithBackground(textLine, canvasWidth / 2, adjustedY, glowColor);
|
|
2156
|
+
}
|
|
2157
|
+
});
|
|
2158
|
+
|
|
2159
|
+
// Restore context (removes alpha changes)
|
|
2160
|
+
this.ctx.restore();
|
|
2161
|
+
|
|
2162
|
+
// Return the bottom Y position after all wrapped lines
|
|
2163
|
+
return finalY;
|
|
2164
|
+
}
|
|
2165
|
+
|
|
2166
|
+
// Restore context (removes alpha changes)
|
|
2167
|
+
this.ctx.restore();
|
|
2168
|
+
|
|
2169
|
+
// Return null if no text was drawn (fallback to old spacing)
|
|
2170
|
+
return null;
|
|
2171
|
+
}
|
|
2172
|
+
|
|
2173
|
+
updateBackupAnimations() {
|
|
2174
|
+
if (!this.lyrics) return;
|
|
2175
|
+
|
|
2176
|
+
// Use interpolated time for smooth 60fps backup singer fade animations
|
|
2177
|
+
const now = this.getInterpolatedTime();
|
|
2178
|
+
const _frameDelta = 16; // Assuming 60fps (16ms per frame)
|
|
2179
|
+
|
|
2180
|
+
for (let i = 0; i < this.lyrics.length; i++) {
|
|
2181
|
+
const line = this.lyrics[i];
|
|
2182
|
+
|
|
2183
|
+
// Skip non-backup or disabled lines
|
|
2184
|
+
if (!line.isBackup || line.isDisabled) {
|
|
2185
|
+
this.backupAnimations.delete(i);
|
|
2186
|
+
continue;
|
|
2187
|
+
}
|
|
2188
|
+
|
|
2189
|
+
const isActive = now >= line.startTime && now <= line.endTime;
|
|
2190
|
+
const animation = this.backupAnimations.get(i) || {
|
|
2191
|
+
alpha: this.settings.backupMinAlpha,
|
|
2192
|
+
fadeDirection: 0, // 0 = stable, 1 = fading in, -1 = fading out
|
|
2193
|
+
lastStateChange: now,
|
|
2194
|
+
};
|
|
2195
|
+
|
|
2196
|
+
// Determine if we need to change fade direction
|
|
2197
|
+
const targetAlpha = isActive ? this.settings.backupMaxAlpha : this.settings.backupMinAlpha;
|
|
2198
|
+
let newFadeDirection = 0;
|
|
2199
|
+
|
|
2200
|
+
if (isActive && animation.alpha < this.settings.backupMaxAlpha) {
|
|
2201
|
+
newFadeDirection = 1; // Fade in
|
|
2202
|
+
} else if (!isActive && animation.alpha > this.settings.backupMinAlpha) {
|
|
2203
|
+
newFadeDirection = -1; // Fade out
|
|
2204
|
+
}
|
|
2205
|
+
|
|
2206
|
+
// Update fade direction if it changed
|
|
2207
|
+
if (newFadeDirection !== animation.fadeDirection) {
|
|
2208
|
+
animation.fadeDirection = newFadeDirection;
|
|
2209
|
+
animation.lastStateChange = now;
|
|
2210
|
+
}
|
|
2211
|
+
|
|
2212
|
+
// Calculate alpha based on fade direction
|
|
2213
|
+
if (animation.fadeDirection !== 0) {
|
|
2214
|
+
const elapsed = now - animation.lastStateChange;
|
|
2215
|
+
const progress = Math.min(elapsed / this.settings.backupFadeDuration, 1.0);
|
|
2216
|
+
|
|
2217
|
+
// Apply easing (simple ease-out)
|
|
2218
|
+
const easedProgress = 1 - Math.pow(1 - progress, 3);
|
|
2219
|
+
|
|
2220
|
+
if (animation.fadeDirection === 1) {
|
|
2221
|
+
// Fading in
|
|
2222
|
+
animation.alpha =
|
|
2223
|
+
this.settings.backupMinAlpha +
|
|
2224
|
+
(this.settings.backupMaxAlpha - this.settings.backupMinAlpha) * easedProgress;
|
|
2225
|
+
} else {
|
|
2226
|
+
// Fading out
|
|
2227
|
+
animation.alpha =
|
|
2228
|
+
this.settings.backupMaxAlpha -
|
|
2229
|
+
(this.settings.backupMaxAlpha - this.settings.backupMinAlpha) * easedProgress;
|
|
2230
|
+
}
|
|
2231
|
+
|
|
2232
|
+
// Stop fading when complete
|
|
2233
|
+
if (progress >= 1.0) {
|
|
2234
|
+
animation.fadeDirection = 0;
|
|
2235
|
+
animation.alpha = targetAlpha;
|
|
2236
|
+
}
|
|
2237
|
+
}
|
|
2238
|
+
|
|
2239
|
+
// Store the updated animation
|
|
2240
|
+
this.backupAnimations.set(i, animation);
|
|
2241
|
+
}
|
|
2242
|
+
|
|
2243
|
+
// Clean up animations for lines that no longer exist
|
|
2244
|
+
for (const [lineIndex] of this.backupAnimations) {
|
|
2245
|
+
if (lineIndex >= this.lyrics.length) {
|
|
2246
|
+
this.backupAnimations.delete(lineIndex);
|
|
2247
|
+
}
|
|
2248
|
+
}
|
|
2249
|
+
}
|
|
2250
|
+
|
|
2251
|
+
wrapWordsToLines(words, maxWidth) {
|
|
2252
|
+
const lines = [];
|
|
2253
|
+
let currentLine = [];
|
|
2254
|
+
let currentWidth = 0;
|
|
2255
|
+
|
|
2256
|
+
words.forEach((word, index) => {
|
|
2257
|
+
const wordWidth = this.ctx.measureText(word.text).width;
|
|
2258
|
+
const spaceWidth = index > 0 ? this.settings.wordSpacing : 0;
|
|
2259
|
+
const totalWidth = currentWidth + spaceWidth + wordWidth;
|
|
2260
|
+
|
|
2261
|
+
if (totalWidth <= maxWidth || currentLine.length === 0) {
|
|
2262
|
+
// Add word to current line
|
|
2263
|
+
currentLine.push(word);
|
|
2264
|
+
currentWidth = totalWidth;
|
|
2265
|
+
} else {
|
|
2266
|
+
// Start new line
|
|
2267
|
+
if (currentLine.length > 0) {
|
|
2268
|
+
lines.push(currentLine);
|
|
2269
|
+
}
|
|
2270
|
+
currentLine = [word];
|
|
2271
|
+
currentWidth = wordWidth;
|
|
2272
|
+
}
|
|
2273
|
+
});
|
|
2274
|
+
|
|
2275
|
+
if (currentLine.length > 0) {
|
|
2276
|
+
lines.push(currentLine);
|
|
2277
|
+
}
|
|
2278
|
+
|
|
2279
|
+
return lines;
|
|
2280
|
+
}
|
|
2281
|
+
|
|
2282
|
+
drawWordLine(words, centerX, y, maxWidth, isCurrentLine) {
|
|
2283
|
+
// Calculate total width of this line
|
|
2284
|
+
const totalWidth = words.reduce((width, word, index) => {
|
|
2285
|
+
const wordWidth = this.ctx.measureText(word.text).width;
|
|
2286
|
+
const spacing = index < words.length - 1 ? this.settings.wordSpacing : 0;
|
|
2287
|
+
return width + wordWidth + spacing;
|
|
2288
|
+
}, 0);
|
|
2289
|
+
|
|
2290
|
+
// Start position for centering
|
|
2291
|
+
let x = centerX - totalWidth / 2;
|
|
2292
|
+
|
|
2293
|
+
words.forEach((word, _index) => {
|
|
2294
|
+
const isActiveWord =
|
|
2295
|
+
isCurrentLine && this.currentTime >= word.startTime && this.currentTime <= word.endTime;
|
|
2296
|
+
|
|
2297
|
+
// Set color
|
|
2298
|
+
this.ctx.fillStyle = isActiveWord
|
|
2299
|
+
? this.settings.activeColor
|
|
2300
|
+
: isCurrentLine
|
|
2301
|
+
? '#CCCCCC'
|
|
2302
|
+
: this.settings.textColor;
|
|
2303
|
+
|
|
2304
|
+
// Draw word
|
|
2305
|
+
this.ctx.textAlign = 'left';
|
|
2306
|
+
this.drawTextWithBackground(word.text, x, y);
|
|
2307
|
+
|
|
2308
|
+
// Draw bouncing ball for active word
|
|
2309
|
+
if (isActiveWord && isCurrentLine) {
|
|
2310
|
+
this.drawBouncingBall(x, word, y);
|
|
2311
|
+
}
|
|
2312
|
+
|
|
2313
|
+
// Move to next word position
|
|
2314
|
+
const wordWidth = this.ctx.measureText(word.text).width;
|
|
2315
|
+
x += wordWidth + this.settings.wordSpacing;
|
|
2316
|
+
});
|
|
2317
|
+
}
|
|
2318
|
+
|
|
2319
|
+
isInInstrumentalIntro() {
|
|
2320
|
+
if (!this.lyrics || this.lyrics.length === 0) return false;
|
|
2321
|
+
|
|
2322
|
+
const now = this.getInterpolatedTime();
|
|
2323
|
+
const firstLine = this.lyrics[0];
|
|
2324
|
+
|
|
2325
|
+
if (!firstLine) return false;
|
|
2326
|
+
|
|
2327
|
+
// Check if we're before the first lyric starts
|
|
2328
|
+
return now < firstLine.startTime;
|
|
2329
|
+
}
|
|
2330
|
+
|
|
2331
|
+
isInInstrumentalOutro() {
|
|
2332
|
+
if (!this.lyrics || this.lyrics.length === 0 || !this.songDuration) return false;
|
|
2333
|
+
|
|
2334
|
+
const now = this.getInterpolatedTime();
|
|
2335
|
+
// Find the last enabled main singer line (not backup, not disabled)
|
|
2336
|
+
let lastMainLine = null;
|
|
2337
|
+
for (let i = this.lyrics.length - 1; i >= 0; i--) {
|
|
2338
|
+
const line = this.lyrics[i];
|
|
2339
|
+
if (!line.isBackup && !line.isDisabled) {
|
|
2340
|
+
lastMainLine = line;
|
|
2341
|
+
break;
|
|
2342
|
+
}
|
|
2343
|
+
}
|
|
2344
|
+
|
|
2345
|
+
if (!lastMainLine) return false;
|
|
2346
|
+
|
|
2347
|
+
// Check if we're after the last main singer line and there's enough outro time
|
|
2348
|
+
const outroLength = this.songDuration - lastMainLine.endTime;
|
|
2349
|
+
return now > lastMainLine.endTime && outroLength > 0;
|
|
2350
|
+
}
|
|
2351
|
+
|
|
2352
|
+
getLastMainSingerLine() {
|
|
2353
|
+
if (!this.lyrics) return null;
|
|
2354
|
+
|
|
2355
|
+
// Find the last enabled main singer line (not backup, not disabled)
|
|
2356
|
+
for (let i = this.lyrics.length - 1; i >= 0; i--) {
|
|
2357
|
+
const line = this.lyrics[i];
|
|
2358
|
+
if (!line.isBackup && !line.isDisabled) {
|
|
2359
|
+
return line;
|
|
2360
|
+
}
|
|
2361
|
+
}
|
|
2362
|
+
return null;
|
|
2363
|
+
}
|
|
2364
|
+
|
|
2365
|
+
isInInstrumentalGap(currentLineIndex) {
|
|
2366
|
+
if (!this.lyrics || currentLineIndex < 0) return false;
|
|
2367
|
+
|
|
2368
|
+
const now = this.getInterpolatedTime();
|
|
2369
|
+
const currentLine = this.lyrics[currentLineIndex];
|
|
2370
|
+
|
|
2371
|
+
// Find the NEXT MAIN SINGER line (skip backup singers)
|
|
2372
|
+
let nextMainLine = null;
|
|
2373
|
+
for (let i = currentLineIndex + 1; i < this.lyrics.length; i++) {
|
|
2374
|
+
if (!this.lyrics[i].isBackup && !this.lyrics[i].isDisabled) {
|
|
2375
|
+
nextMainLine = this.lyrics[i];
|
|
2376
|
+
break;
|
|
2377
|
+
}
|
|
2378
|
+
}
|
|
2379
|
+
|
|
2380
|
+
if (!currentLine || !nextMainLine) return false;
|
|
2381
|
+
|
|
2382
|
+
const currentLineEnd = currentLine.endTime;
|
|
2383
|
+
const nextLineStart = nextMainLine.startTime;
|
|
2384
|
+
const gapDuration = nextLineStart - currentLineEnd;
|
|
2385
|
+
|
|
2386
|
+
// Only consider it an instrumental gap if it's longer than 5 seconds
|
|
2387
|
+
if (gapDuration <= 5) return false;
|
|
2388
|
+
|
|
2389
|
+
// Check if we're currently in the gap between main singers
|
|
2390
|
+
return now >= currentLineEnd && now <= nextLineStart;
|
|
2391
|
+
}
|
|
2392
|
+
|
|
2393
|
+
isInMainSingerInstrumentalGap() {
|
|
2394
|
+
if (!this.lyrics) return { isInGap: false };
|
|
2395
|
+
|
|
2396
|
+
const now = this.getInterpolatedTime();
|
|
2397
|
+
|
|
2398
|
+
// Find the last main singer line that has ended
|
|
2399
|
+
let lastMainLine = null;
|
|
2400
|
+
let lastMainLineIndex = -1;
|
|
2401
|
+
for (let i = 0; i < this.lyrics.length; i++) {
|
|
2402
|
+
const line = this.lyrics[i];
|
|
2403
|
+
if (!line.isBackup && !line.isDisabled && now >= line.endTime) {
|
|
2404
|
+
lastMainLine = line;
|
|
2405
|
+
lastMainLineIndex = i;
|
|
2406
|
+
}
|
|
2407
|
+
}
|
|
2408
|
+
|
|
2409
|
+
if (!lastMainLine) return { isInGap: false };
|
|
2410
|
+
|
|
2411
|
+
// Find the next main singer line that hasn't started yet
|
|
2412
|
+
let nextMainLine = null;
|
|
2413
|
+
for (let i = lastMainLineIndex + 1; i < this.lyrics.length; i++) {
|
|
2414
|
+
const line = this.lyrics[i];
|
|
2415
|
+
if (!line.isBackup && !line.isDisabled && now < line.startTime) {
|
|
2416
|
+
nextMainLine = line;
|
|
2417
|
+
break;
|
|
2418
|
+
}
|
|
2419
|
+
}
|
|
2420
|
+
|
|
2421
|
+
if (!nextMainLine) return { isInGap: false };
|
|
2422
|
+
|
|
2423
|
+
const gapDuration = nextMainLine.startTime - lastMainLine.endTime;
|
|
2424
|
+
|
|
2425
|
+
// Only consider it an instrumental gap if it's longer than 5 seconds
|
|
2426
|
+
if (gapDuration <= 5) return { isInGap: false };
|
|
2427
|
+
|
|
2428
|
+
// Check if we're currently in the gap between main singers
|
|
2429
|
+
const isInGap = now >= lastMainLine.endTime && now <= nextMainLine.startTime;
|
|
2430
|
+
|
|
2431
|
+
return {
|
|
2432
|
+
isInGap,
|
|
2433
|
+
lastMainLineIndex,
|
|
2434
|
+
nextMainLine,
|
|
2435
|
+
gapProgress: isInGap ? (now - lastMainLine.endTime) / gapDuration : 0,
|
|
2436
|
+
};
|
|
2437
|
+
}
|
|
2438
|
+
|
|
2439
|
+
drawInstrumentalProgressBar(currentLineIndex, canvasWidth, canvasHeight) {
|
|
2440
|
+
if (!this.lyrics || currentLineIndex < 0) return;
|
|
2441
|
+
|
|
2442
|
+
// Use interpolated time for smooth 60fps progress bar
|
|
2443
|
+
const now = this.getInterpolatedTime();
|
|
2444
|
+
const currentLine = this.lyrics[currentLineIndex];
|
|
2445
|
+
|
|
2446
|
+
// Find the next main singer line (skip backup singers and disabled lines)
|
|
2447
|
+
let nextMainLine = null;
|
|
2448
|
+
// let nextMainLineIndex = -1; // Reserved for future use
|
|
2449
|
+
for (let i = currentLineIndex + 1; i < this.lyrics.length; i++) {
|
|
2450
|
+
if (!this.lyrics[i].isBackup && !this.lyrics[i].isDisabled) {
|
|
2451
|
+
nextMainLine = this.lyrics[i];
|
|
2452
|
+
// nextMainLineIndex = i;
|
|
2453
|
+
break;
|
|
2454
|
+
}
|
|
2455
|
+
}
|
|
2456
|
+
|
|
2457
|
+
if (!currentLine || !nextMainLine) return;
|
|
2458
|
+
|
|
2459
|
+
// Check if we're in an instrumental section (between current line end and next main line start)
|
|
2460
|
+
const currentLineEnd = currentLine.endTime;
|
|
2461
|
+
const nextLineStart = nextMainLine.startTime;
|
|
2462
|
+
const gapDuration = nextLineStart - currentLineEnd;
|
|
2463
|
+
|
|
2464
|
+
// Only show progress bar for instrumental gaps longer than 5 seconds
|
|
2465
|
+
if (gapDuration <= 5) return;
|
|
2466
|
+
|
|
2467
|
+
// Are we currently in the instrumental gap?
|
|
2468
|
+
if (now >= currentLineEnd && now <= nextLineStart) {
|
|
2469
|
+
// We're in the instrumental section - show progress bar and upcoming lyrics
|
|
2470
|
+
const gapProgress = (now - currentLineEnd) / gapDuration;
|
|
2471
|
+
const _timeRemaining = nextLineStart - now;
|
|
2472
|
+
|
|
2473
|
+
// Draw progress bar at top
|
|
2474
|
+
const barWidth = canvasWidth * 0.8;
|
|
2475
|
+
const barX = (canvasWidth - barWidth) / 2;
|
|
2476
|
+
const barY = 80;
|
|
2477
|
+
|
|
2478
|
+
const _barInfo = this.drawProgressBar(
|
|
2479
|
+
barX,
|
|
2480
|
+
barY,
|
|
2481
|
+
barWidth,
|
|
2482
|
+
undefined,
|
|
2483
|
+
gapProgress,
|
|
2484
|
+
canvasWidth
|
|
2485
|
+
);
|
|
2486
|
+
|
|
2487
|
+
// Draw upcoming lyrics preview below progress bar with proper spacing
|
|
2488
|
+
this.drawUpcomingLyricsPreview(
|
|
2489
|
+
nextMainLine,
|
|
2490
|
+
canvasWidth,
|
|
2491
|
+
canvasHeight,
|
|
2492
|
+
gapProgress,
|
|
2493
|
+
barY + this.settings.progressBarMargin
|
|
2494
|
+
);
|
|
2495
|
+
|
|
2496
|
+
// Draw any active backup singers during the instrumental gap (but skip upcoming lyrics since we already showed them)
|
|
2497
|
+
this.drawActiveLines(canvasWidth, canvasHeight, true); // true = skip upcoming lyrics
|
|
2498
|
+
}
|
|
2499
|
+
}
|
|
2500
|
+
|
|
2501
|
+
drawInstrumentalIntro(canvasWidth, canvasHeight) {
|
|
2502
|
+
if (!this.lyrics || this.lyrics.length === 0) return;
|
|
2503
|
+
|
|
2504
|
+
// Use interpolated time for smooth 60fps progress bar
|
|
2505
|
+
const now = this.getInterpolatedTime();
|
|
2506
|
+
const firstLine = this.lyrics[0];
|
|
2507
|
+
|
|
2508
|
+
if (!firstLine) return;
|
|
2509
|
+
|
|
2510
|
+
const introDuration = firstLine.startTime;
|
|
2511
|
+
const introProgress = now / introDuration;
|
|
2512
|
+
|
|
2513
|
+
// Draw progress bar at top
|
|
2514
|
+
const barWidth = canvasWidth * 0.8;
|
|
2515
|
+
const barX = (canvasWidth - barWidth) / 2;
|
|
2516
|
+
const barY = 80;
|
|
2517
|
+
|
|
2518
|
+
const _barInfo = this.drawProgressBar(
|
|
2519
|
+
barX,
|
|
2520
|
+
barY,
|
|
2521
|
+
barWidth,
|
|
2522
|
+
undefined,
|
|
2523
|
+
introProgress,
|
|
2524
|
+
canvasWidth
|
|
2525
|
+
);
|
|
2526
|
+
|
|
2527
|
+
// Calculate where the upcoming lyric is being drawn
|
|
2528
|
+
const upcomingY = barY + this.settings.progressBarMargin;
|
|
2529
|
+
|
|
2530
|
+
// Lock the first lyric as upcoming during intro
|
|
2531
|
+
if (this.lockedUpcomingIndex !== 0) {
|
|
2532
|
+
this.lockedUpcomingIndex = 0;
|
|
2533
|
+
}
|
|
2534
|
+
|
|
2535
|
+
// Check if we should start the transition animation (0.3s before first lyric starts)
|
|
2536
|
+
// This ensures smooth transition from intro preview to active lyric
|
|
2537
|
+
this.startTransitionAnimations([], upcomingY);
|
|
2538
|
+
|
|
2539
|
+
// Draw transitioning lyrics if animation has started
|
|
2540
|
+
for (const [lineIndex, transition] of this.lyricTransitions.entries()) {
|
|
2541
|
+
const lyricLine = this.lyrics[lineIndex];
|
|
2542
|
+
if (lyricLine) {
|
|
2543
|
+
this.drawTransitioningLine(lyricLine, canvasWidth, transition);
|
|
2544
|
+
}
|
|
2545
|
+
}
|
|
2546
|
+
|
|
2547
|
+
// Draw upcoming first lyrics with proper spacing (if not transitioning)
|
|
2548
|
+
if (!this.lyricTransitions.has(0)) {
|
|
2549
|
+
this.drawUpcomingLyricsPreview(
|
|
2550
|
+
firstLine,
|
|
2551
|
+
canvasWidth,
|
|
2552
|
+
canvasHeight,
|
|
2553
|
+
introProgress,
|
|
2554
|
+
upcomingY
|
|
2555
|
+
);
|
|
2556
|
+
}
|
|
2557
|
+
}
|
|
2558
|
+
|
|
2559
|
+
drawInstrumentalOutro(canvasWidth, canvasHeight) {
|
|
2560
|
+
// Find the last main singer line to calculate outro progress
|
|
2561
|
+
const lastMainLine = this.getLastMainSingerLine();
|
|
2562
|
+
if (!lastMainLine) return;
|
|
2563
|
+
|
|
2564
|
+
// Use interpolated time for smooth 60fps progress bar
|
|
2565
|
+
const currentTime = this.getInterpolatedTime();
|
|
2566
|
+
const outroStartTime = lastMainLine.endTime;
|
|
2567
|
+
const outroLength = this.songDuration - outroStartTime;
|
|
2568
|
+
const outroProgress = Math.max(0, Math.min(1, (currentTime - outroStartTime) / outroLength));
|
|
2569
|
+
|
|
2570
|
+
// Draw progress bar at top
|
|
2571
|
+
const _barInfo = this.drawProgressBar(
|
|
2572
|
+
undefined,
|
|
2573
|
+
undefined,
|
|
2574
|
+
undefined,
|
|
2575
|
+
undefined,
|
|
2576
|
+
outroProgress,
|
|
2577
|
+
canvasWidth
|
|
2578
|
+
);
|
|
2579
|
+
|
|
2580
|
+
// Show outro message below progress bar
|
|
2581
|
+
this.ctx.fillStyle = this.settings.textColor;
|
|
2582
|
+
this.ctx.font = `${this.settings.fontSize}px ${this.settings.fontFamily}`;
|
|
2583
|
+
this.ctx.textAlign = 'center';
|
|
2584
|
+
|
|
2585
|
+
const centerX = canvasWidth / 2;
|
|
2586
|
+
const centerY = canvasHeight / 2;
|
|
2587
|
+
|
|
2588
|
+
this.drawTextWithBackground('♫ Instrumental Outro ♫', centerX, centerY);
|
|
2589
|
+
}
|
|
2590
|
+
|
|
2591
|
+
drawBackupOnlyProgressBar(canvasWidth, canvasHeight) {
|
|
2592
|
+
if (!this.lyrics) return;
|
|
2593
|
+
|
|
2594
|
+
// Use interpolated time for smooth 60fps progress bar
|
|
2595
|
+
const now = this.getInterpolatedTime();
|
|
2596
|
+
|
|
2597
|
+
// Check if there's an active main singer at interpolated time
|
|
2598
|
+
// (prevents flash when interpolated time is ahead of reported time)
|
|
2599
|
+
for (let i = 0; i < this.lyrics.length; i++) {
|
|
2600
|
+
const line = this.lyrics[i];
|
|
2601
|
+
if (!line.isBackup && !line.isDisabled && now >= line.startTime && now <= line.endTime) {
|
|
2602
|
+
// There's an active main singer, don't show backup-only progress bar
|
|
2603
|
+
return;
|
|
2604
|
+
}
|
|
2605
|
+
}
|
|
2606
|
+
|
|
2607
|
+
// Find the next main singer line
|
|
2608
|
+
let nextMainLine = null;
|
|
2609
|
+
for (let i = 0; i < this.lyrics.length; i++) {
|
|
2610
|
+
if (
|
|
2611
|
+
!this.lyrics[i].isBackup &&
|
|
2612
|
+
!this.lyrics[i].isDisabled &&
|
|
2613
|
+
now < this.lyrics[i].startTime
|
|
2614
|
+
) {
|
|
2615
|
+
nextMainLine = this.lyrics[i];
|
|
2616
|
+
break;
|
|
2617
|
+
}
|
|
2618
|
+
}
|
|
2619
|
+
|
|
2620
|
+
if (!nextMainLine) return;
|
|
2621
|
+
|
|
2622
|
+
// Find when the backup-only period started (either song start or end of last main line)
|
|
2623
|
+
let gapStart = 0;
|
|
2624
|
+
for (let i = this.lyrics.length - 1; i >= 0; i--) {
|
|
2625
|
+
if (!this.lyrics[i].isBackup && !this.lyrics[i].isDisabled && this.lyrics[i].endTime <= now) {
|
|
2626
|
+
gapStart = this.lyrics[i].endTime;
|
|
2627
|
+
break;
|
|
2628
|
+
}
|
|
2629
|
+
}
|
|
2630
|
+
|
|
2631
|
+
const gapDuration = nextMainLine.startTime - gapStart;
|
|
2632
|
+
|
|
2633
|
+
// Only show progress bar for gaps longer than 5 seconds
|
|
2634
|
+
if (gapDuration <= 5) return;
|
|
2635
|
+
|
|
2636
|
+
// Calculate progress
|
|
2637
|
+
const gapProgress = (now - gapStart) / gapDuration;
|
|
2638
|
+
|
|
2639
|
+
// Draw progress bar at top
|
|
2640
|
+
const barWidth = canvasWidth * 0.8;
|
|
2641
|
+
const barX = (canvasWidth - barWidth) / 2;
|
|
2642
|
+
const barY = 80;
|
|
2643
|
+
|
|
2644
|
+
const _barInfo = this.drawProgressBar(
|
|
2645
|
+
barX,
|
|
2646
|
+
barY,
|
|
2647
|
+
barWidth,
|
|
2648
|
+
undefined,
|
|
2649
|
+
gapProgress,
|
|
2650
|
+
canvasWidth
|
|
2651
|
+
);
|
|
2652
|
+
|
|
2653
|
+
// Draw upcoming main lyrics preview
|
|
2654
|
+
this.drawUpcomingLyricsPreview(
|
|
2655
|
+
nextMainLine,
|
|
2656
|
+
canvasWidth,
|
|
2657
|
+
canvasHeight,
|
|
2658
|
+
gapProgress,
|
|
2659
|
+
barY + this.settings.progressBarMargin
|
|
2660
|
+
);
|
|
2661
|
+
|
|
2662
|
+
// Still render any active backup singers below the progress bar
|
|
2663
|
+
this.drawActiveLines(canvasWidth, canvasHeight);
|
|
2664
|
+
}
|
|
2665
|
+
|
|
2666
|
+
drawUpcomingLyricsPreview(nextLine, canvasWidth, canvasHeight, progress, startY) {
|
|
2667
|
+
if (!nextLine) return;
|
|
2668
|
+
|
|
2669
|
+
// Get text from line (handle different KAI formats)
|
|
2670
|
+
let text = '';
|
|
2671
|
+
if (nextLine.text) {
|
|
2672
|
+
text = nextLine.text;
|
|
2673
|
+
} else if (nextLine.words && nextLine.words.length > 0) {
|
|
2674
|
+
text = nextLine.words.map((w) => w.text || w.word || w).join(' ');
|
|
2675
|
+
}
|
|
2676
|
+
|
|
2677
|
+
if (!text || text.trim() === '') return;
|
|
2678
|
+
|
|
2679
|
+
// Set font for upcoming lyrics (same size as current line)
|
|
2680
|
+
this.ctx.font = `${this.settings.fontSize}px ${this.settings.fontFamily}`;
|
|
2681
|
+
this.ctx.textAlign = 'center';
|
|
2682
|
+
|
|
2683
|
+
// Determine color based on readiness
|
|
2684
|
+
const isReady = progress >= 1.0;
|
|
2685
|
+
this.ctx.fillStyle = isReady ? this.settings.activeColor : this.settings.upcomingColor;
|
|
2686
|
+
|
|
2687
|
+
// Handle long text with proper wrapping
|
|
2688
|
+
const maxWidth = canvasWidth * 0.9;
|
|
2689
|
+
const words = text.split(' ');
|
|
2690
|
+
const lines = [];
|
|
2691
|
+
let currentLine = '';
|
|
2692
|
+
|
|
2693
|
+
for (const word of words) {
|
|
2694
|
+
const testLine = currentLine ? currentLine + ' ' + word : word;
|
|
2695
|
+
const testWidth = this.ctx.measureText(testLine).width;
|
|
2696
|
+
|
|
2697
|
+
if (testWidth <= maxWidth) {
|
|
2698
|
+
currentLine = testLine;
|
|
2699
|
+
} else {
|
|
2700
|
+
if (currentLine) {
|
|
2701
|
+
lines.push(currentLine);
|
|
2702
|
+
currentLine = word;
|
|
2703
|
+
} else {
|
|
2704
|
+
// Single word is too long, just add it anyway
|
|
2705
|
+
lines.push(word);
|
|
2706
|
+
}
|
|
2707
|
+
}
|
|
2708
|
+
}
|
|
2709
|
+
|
|
2710
|
+
if (currentLine) {
|
|
2711
|
+
lines.push(currentLine);
|
|
2712
|
+
}
|
|
2713
|
+
|
|
2714
|
+
// Draw each line below the progress bar (consistent with other functions)
|
|
2715
|
+
const lineSpacing = this.settings.lineHeight * 0.8;
|
|
2716
|
+
let currentY = startY + 60; // Start below the progress bar with some padding
|
|
2717
|
+
|
|
2718
|
+
lines.forEach((line) => {
|
|
2719
|
+
this.drawTextWithBackground(line, canvasWidth / 2, currentY);
|
|
2720
|
+
currentY += lineSpacing;
|
|
2721
|
+
});
|
|
2722
|
+
}
|
|
2723
|
+
|
|
2724
|
+
wrapWordsToLinesPreview(words, maxWidth) {
|
|
2725
|
+
const lines = [];
|
|
2726
|
+
let currentLine = [];
|
|
2727
|
+
let currentWidth = 0;
|
|
2728
|
+
|
|
2729
|
+
words.forEach((word, index) => {
|
|
2730
|
+
const wordWidth = this.ctx.measureText(word).width;
|
|
2731
|
+
const spaceWidth = index > 0 ? this.settings.wordSpacing : 0;
|
|
2732
|
+
const totalWidth = currentWidth + spaceWidth + wordWidth;
|
|
2733
|
+
|
|
2734
|
+
if (totalWidth <= maxWidth || currentLine.length === 0) {
|
|
2735
|
+
currentLine.push(word);
|
|
2736
|
+
currentWidth = totalWidth;
|
|
2737
|
+
} else {
|
|
2738
|
+
if (currentLine.length > 0) {
|
|
2739
|
+
lines.push(currentLine);
|
|
2740
|
+
}
|
|
2741
|
+
currentLine = [word];
|
|
2742
|
+
currentWidth = wordWidth;
|
|
2743
|
+
}
|
|
2744
|
+
});
|
|
2745
|
+
|
|
2746
|
+
if (currentLine.length > 0) {
|
|
2747
|
+
lines.push(currentLine);
|
|
2748
|
+
}
|
|
2749
|
+
|
|
2750
|
+
return lines;
|
|
2751
|
+
}
|
|
2752
|
+
|
|
2753
|
+
drawWordLinePreview(words, centerX, y, maxWidth, textColor, isReady) {
|
|
2754
|
+
// Calculate total width of this line
|
|
2755
|
+
const totalWidth = words.reduce((width, word, index) => {
|
|
2756
|
+
const wordWidth = this.ctx.measureText(word).width;
|
|
2757
|
+
const spacing = index < words.length - 1 ? this.settings.wordSpacing : 0;
|
|
2758
|
+
return width + wordWidth + spacing;
|
|
2759
|
+
}, 0);
|
|
2760
|
+
|
|
2761
|
+
// Start position for centering
|
|
2762
|
+
let x = centerX - totalWidth / 2;
|
|
2763
|
+
|
|
2764
|
+
words.forEach((word, _index) => {
|
|
2765
|
+
this.ctx.fillStyle = textColor;
|
|
2766
|
+
this.ctx.textAlign = 'left';
|
|
2767
|
+
|
|
2768
|
+
// Add subtle glow effect when ready
|
|
2769
|
+
if (isReady) {
|
|
2770
|
+
this.ctx.save();
|
|
2771
|
+
this.ctx.shadowColor = this.settings.activeColor;
|
|
2772
|
+
this.ctx.shadowBlur = 4;
|
|
2773
|
+
this.ctx.fillText(word, x, y);
|
|
2774
|
+
this.ctx.restore();
|
|
2775
|
+
} else {
|
|
2776
|
+
this.ctx.fillText(word, x, y);
|
|
2777
|
+
}
|
|
2778
|
+
|
|
2779
|
+
// Move to next word position
|
|
2780
|
+
const wordWidth = this.ctx.measureText(word).width;
|
|
2781
|
+
x += wordWidth + this.settings.wordSpacing;
|
|
2782
|
+
});
|
|
2783
|
+
}
|
|
2784
|
+
|
|
2785
|
+
drawTextWithBackground(text, x, y, glowColor = null) {
|
|
2786
|
+
// Measure text dimensions properly
|
|
2787
|
+
const metrics = this.ctx.measureText(text);
|
|
2788
|
+
const textWidth = metrics.width;
|
|
2789
|
+
const textAscent = metrics.actualBoundingBoxAscent || this.settings.fontSize * 0.8;
|
|
2790
|
+
const textDescent = metrics.actualBoundingBoxDescent || this.settings.fontSize * 0.2;
|
|
2791
|
+
const textHeight = textAscent + textDescent;
|
|
2792
|
+
|
|
2793
|
+
// Calculate background rectangle with padding (10% larger)
|
|
2794
|
+
const padding = 12;
|
|
2795
|
+
const extraSize = 0.1; // 10% bigger
|
|
2796
|
+
const bgWidth = (textWidth + padding * 2) * (1 + extraSize);
|
|
2797
|
+
const bgHeight = (textHeight + padding) * (1 + extraSize);
|
|
2798
|
+
|
|
2799
|
+
// Center the larger background around the text
|
|
2800
|
+
const bgX = x - bgWidth / 2;
|
|
2801
|
+
const bgY = y - textAscent - (bgHeight - textHeight) / 2;
|
|
2802
|
+
const borderRadius = 12;
|
|
2803
|
+
|
|
2804
|
+
// Draw rounded background
|
|
2805
|
+
this.ctx.save();
|
|
2806
|
+
this.ctx.fillStyle = 'rgba(0, 0, 0, 0.65)';
|
|
2807
|
+
this.ctx.beginPath();
|
|
2808
|
+
this.ctx.roundRect(bgX, bgY, bgWidth, bgHeight, borderRadius);
|
|
2809
|
+
this.ctx.fill();
|
|
2810
|
+
this.ctx.restore();
|
|
2811
|
+
|
|
2812
|
+
// Draw main text with optional glow for singer identification
|
|
2813
|
+
this.ctx.save();
|
|
2814
|
+
if (glowColor) {
|
|
2815
|
+
this.ctx.shadowColor = glowColor;
|
|
2816
|
+
this.ctx.shadowBlur = 12;
|
|
2817
|
+
this.ctx.shadowOffsetX = 0;
|
|
2818
|
+
this.ctx.shadowOffsetY = 0;
|
|
2819
|
+
}
|
|
2820
|
+
this.ctx.fillText(text, x, y);
|
|
2821
|
+
this.ctx.restore();
|
|
2822
|
+
}
|
|
2823
|
+
|
|
2824
|
+
drawProgressBar(x, y, width, height, progress, canvasWidth) {
|
|
2825
|
+
// Default positioning if not provided
|
|
2826
|
+
const barX = x !== undefined ? x : 50;
|
|
2827
|
+
const barY = y !== undefined ? y : 50;
|
|
2828
|
+
const barWidth = width !== undefined ? width : canvasWidth - 100;
|
|
2829
|
+
const barHeight = height !== undefined ? height : this.settings.progressBarHeight;
|
|
2830
|
+
|
|
2831
|
+
// Progress bar background
|
|
2832
|
+
this.ctx.fillStyle = this.settings.progressBarBg;
|
|
2833
|
+
this.ctx.fillRect(barX, barY, barWidth, barHeight);
|
|
2834
|
+
|
|
2835
|
+
// Progress fill
|
|
2836
|
+
this.ctx.fillStyle = this.settings.progressBarColor;
|
|
2837
|
+
this.ctx.fillRect(barX, barY, barWidth * Math.max(0, Math.min(1, progress)), barHeight);
|
|
2838
|
+
|
|
2839
|
+
return { barX, barY, barWidth, barHeight };
|
|
2840
|
+
}
|
|
2841
|
+
|
|
2842
|
+
drawBouncingBall(x, word, lineY) {
|
|
2843
|
+
// Calculate ball position based on progress through word
|
|
2844
|
+
const progress = (this.currentTime - word.startTime) / (word.endTime - word.startTime);
|
|
2845
|
+
const wordWidth = this.ctx.measureText(word.text).width;
|
|
2846
|
+
|
|
2847
|
+
const ballX = x + progress * wordWidth;
|
|
2848
|
+
const ballY = lineY - 30 + Math.sin(progress * Math.PI * 4) * 5; // Bouncing effect
|
|
2849
|
+
|
|
2850
|
+
// Draw ball
|
|
2851
|
+
this.ctx.save();
|
|
2852
|
+
this.ctx.fillStyle = this.settings.ballColor;
|
|
2853
|
+
this.ctx.beginPath();
|
|
2854
|
+
this.ctx.arc(ballX, ballY, this.settings.ballSize, 0, Math.PI * 2);
|
|
2855
|
+
this.ctx.fill();
|
|
2856
|
+
this.ctx.restore();
|
|
2857
|
+
}
|
|
2858
|
+
|
|
2859
|
+
// Draw song info when loaded but not playing
|
|
2860
|
+
drawSongInfo(width, height, songData) {
|
|
2861
|
+
const ctx = this.ctx;
|
|
2862
|
+
ctx.save();
|
|
2863
|
+
|
|
2864
|
+
// Get song info from various possible locations
|
|
2865
|
+
const title =
|
|
2866
|
+
songData.title ||
|
|
2867
|
+
songData.metadata?.title ||
|
|
2868
|
+
songData.name?.replace('.kai', '') ||
|
|
2869
|
+
'Unknown Title';
|
|
2870
|
+
const artist = songData.artist || songData.metadata?.artist || 'Unknown Artist';
|
|
2871
|
+
const requester = songData.requester;
|
|
2872
|
+
|
|
2873
|
+
// Position higher on canvas (35% from top instead of centered)
|
|
2874
|
+
const centerX = width / 2;
|
|
2875
|
+
const centerY = height * 0.35;
|
|
2876
|
+
|
|
2877
|
+
// Draw title
|
|
2878
|
+
ctx.fillStyle = '#ffffff';
|
|
2879
|
+
ctx.font = 'bold 72px Arial, sans-serif';
|
|
2880
|
+
ctx.textAlign = 'center';
|
|
2881
|
+
ctx.textBaseline = 'middle';
|
|
2882
|
+
|
|
2883
|
+
// Add text shadow for better visibility
|
|
2884
|
+
ctx.shadowColor = 'rgba(0, 0, 0, 0.8)';
|
|
2885
|
+
ctx.shadowBlur = 4;
|
|
2886
|
+
ctx.shadowOffsetX = 2;
|
|
2887
|
+
ctx.shadowOffsetY = 2;
|
|
2888
|
+
|
|
2889
|
+
ctx.fillText(title, centerX, centerY - 50);
|
|
2890
|
+
|
|
2891
|
+
// Draw artist and singer on same line
|
|
2892
|
+
ctx.font = '48px Arial, sans-serif';
|
|
2893
|
+
const artistY = centerY + 50;
|
|
2894
|
+
|
|
2895
|
+
if (requester && requester.toUpperCase() !== 'KJ') {
|
|
2896
|
+
// Measure artist text to position singer to the right
|
|
2897
|
+
const artistText = artist;
|
|
2898
|
+
const singerText = ` - ${requester}`;
|
|
2899
|
+
|
|
2900
|
+
ctx.fillStyle = '#cccccc';
|
|
2901
|
+
const artistWidth = ctx.measureText(artistText).width;
|
|
2902
|
+
const singerWidth = ctx.measureText(singerText).width;
|
|
2903
|
+
const totalWidth = artistWidth + singerWidth;
|
|
2904
|
+
|
|
2905
|
+
// Draw centered as a group
|
|
2906
|
+
const startX = centerX - totalWidth / 2;
|
|
2907
|
+
|
|
2908
|
+
ctx.fillText(artistText, startX, artistY);
|
|
2909
|
+
|
|
2910
|
+
// Draw singer in yellow
|
|
2911
|
+
ctx.fillStyle = '#FCD34D'; // yellow-300 for non-KJ singers
|
|
2912
|
+
ctx.fillText(singerText, startX + artistWidth, artistY);
|
|
2913
|
+
} else {
|
|
2914
|
+
// Just artist, centered
|
|
2915
|
+
ctx.fillStyle = '#cccccc';
|
|
2916
|
+
ctx.fillText(artist, centerX, artistY);
|
|
2917
|
+
}
|
|
2918
|
+
|
|
2919
|
+
ctx.restore();
|
|
2920
|
+
}
|
|
2921
|
+
|
|
2922
|
+
destroy() {
|
|
2923
|
+
if (this.animationFrame) {
|
|
2924
|
+
cancelAnimationFrame(this.animationFrame);
|
|
2925
|
+
}
|
|
2926
|
+
|
|
2927
|
+
// Clean up resize listener and observer
|
|
2928
|
+
if (this.resizeHandler) {
|
|
2929
|
+
window.removeEventListener('resize', this.resizeHandler);
|
|
2930
|
+
}
|
|
2931
|
+
if (this.resizeObserver) {
|
|
2932
|
+
this.resizeObserver.disconnect();
|
|
2933
|
+
this.resizeObserver = null;
|
|
2934
|
+
}
|
|
2935
|
+
|
|
2936
|
+
// Destroy Butterchurn instance and ALL related components
|
|
2937
|
+
if (this.butterchurn && this.butterchurn.destroy) {
|
|
2938
|
+
this.butterchurn.destroy();
|
|
2939
|
+
}
|
|
2940
|
+
this.butterchurn = null;
|
|
2941
|
+
|
|
2942
|
+
// Disconnect and clean up all Butterchurn audio nodes
|
|
2943
|
+
if (this.butterchurnSourceNode) {
|
|
2944
|
+
this.butterchurnSourceNode.disconnect();
|
|
2945
|
+
this.butterchurnSourceNode = null;
|
|
2946
|
+
}
|
|
2947
|
+
|
|
2948
|
+
// Close Butterchurn audio context
|
|
2949
|
+
if (this.butterchurnAudioContext) {
|
|
2950
|
+
this.butterchurnAudioContext.close();
|
|
2951
|
+
this.butterchurnAudioContext = null;
|
|
2952
|
+
}
|
|
2953
|
+
|
|
2954
|
+
// Clear all Butterchurn-related properties
|
|
2955
|
+
this.butterchurnAnalyser = null;
|
|
2956
|
+
this.butterchurnVisualAnalyser = null;
|
|
2957
|
+
this.butterchurnFrequencyData = null;
|
|
2958
|
+
this.butterchurnAudioBuffer = null;
|
|
2959
|
+
this.effectsCanvas = null;
|
|
2960
|
+
|
|
2961
|
+
// Close waveform audio context
|
|
2962
|
+
if (this.audioContext) {
|
|
2963
|
+
this.audioContext.close();
|
|
2964
|
+
this.audioContext = null;
|
|
2965
|
+
}
|
|
2966
|
+
|
|
2967
|
+
// Stop microphone capture properly
|
|
2968
|
+
this.stopMicrophoneCapture();
|
|
2969
|
+
|
|
2970
|
+
// Clear WebGL context if available
|
|
2971
|
+
if (this.gl) {
|
|
2972
|
+
this.gl = null;
|
|
2973
|
+
}
|
|
2974
|
+
}
|
|
2975
|
+
|
|
2976
|
+
reinitialize() {
|
|
2977
|
+
// Store current preferences
|
|
2978
|
+
const currentPreferences = { ...this.waveformPreferences };
|
|
2979
|
+
|
|
2980
|
+
// Destroy everything
|
|
2981
|
+
this.destroy();
|
|
2982
|
+
|
|
2983
|
+
// Reset state variables
|
|
2984
|
+
this.lyrics = null;
|
|
2985
|
+
this.songDuration = 0;
|
|
2986
|
+
this.currentTime = 0;
|
|
2987
|
+
this.isPlaying = false;
|
|
2988
|
+
this.cachedCurrentLine = -1;
|
|
2989
|
+
this.lastTimeForLineCalculation = -1;
|
|
2990
|
+
this.backupAnimations.clear();
|
|
2991
|
+
this.lastActiveSinger = null; // Reset for backup:PA feature
|
|
2992
|
+
|
|
2993
|
+
// Restore preferences
|
|
2994
|
+
this.waveformPreferences = currentPreferences;
|
|
2995
|
+
|
|
2996
|
+
// Reinitialize everything
|
|
2997
|
+
this.setupCanvas();
|
|
2998
|
+
this.setupAdvancedVisualizations();
|
|
2999
|
+
this.setupResponsiveCanvas();
|
|
3000
|
+
this.startAnimation();
|
|
3001
|
+
|
|
3002
|
+
// Restart microphone if it was enabled (with delay to prevent issues)
|
|
3003
|
+
if (this.waveformPreferences.enableMic) {
|
|
3004
|
+
setTimeout(async () => {
|
|
3005
|
+
// Ensure the input device selection is properly restored before starting
|
|
3006
|
+
await this.ensureInputDeviceSelection();
|
|
3007
|
+
this.startMicrophoneCapture();
|
|
3008
|
+
}, 200); // Extra delay after reinitialize to let everything settle
|
|
3009
|
+
}
|
|
3010
|
+
}
|
|
3011
|
+
|
|
3012
|
+
async ensureInputDeviceSelection() {
|
|
3013
|
+
try {
|
|
3014
|
+
// Load saved input device preference from settings API
|
|
3015
|
+
if (window.kaiAPI.settings) {
|
|
3016
|
+
const prefs = await window.kaiAPI.settings.get('devicePreferences');
|
|
3017
|
+
if (prefs && prefs.input && prefs.input.id) {
|
|
3018
|
+
this.inputDevice = prefs.input.id;
|
|
3019
|
+
}
|
|
3020
|
+
}
|
|
3021
|
+
} catch (error) {
|
|
3022
|
+
console.warn('Failed to load input device preference:', error);
|
|
3023
|
+
}
|
|
3024
|
+
}
|
|
3025
|
+
|
|
3026
|
+
setShowUpcomingLyrics(enabled) {
|
|
3027
|
+
this.waveformPreferences.showUpcomingLyrics = enabled;
|
|
3028
|
+
}
|
|
3029
|
+
|
|
3030
|
+
drawUpcomingLyrics(canvasWidth, canvasHeight, startY) {
|
|
3031
|
+
if (!this.lyrics) return;
|
|
3032
|
+
|
|
3033
|
+
// Use interpolated time for precise upcoming lyric detection
|
|
3034
|
+
const now = this.getInterpolatedTime();
|
|
3035
|
+
const maxTimeAhead = 5.0; // Only show lyrics up to 5 seconds ahead
|
|
3036
|
+
|
|
3037
|
+
// Check if current locked upcoming has become active - only then clear it
|
|
3038
|
+
if (this.lockedUpcomingIndex !== null && this.lockedUpcomingIndex !== undefined) {
|
|
3039
|
+
const lockedLine = this.lyrics[this.lockedUpcomingIndex];
|
|
3040
|
+
|
|
3041
|
+
// Don't clear if this lyric is currently transitioning (prevents second lyric from flashing)
|
|
3042
|
+
const isTransitioning = this.lyricTransitions.has(this.lockedUpcomingIndex);
|
|
3043
|
+
|
|
3044
|
+
// Only clear if the line doesn't exist or has actually become active AND is not transitioning
|
|
3045
|
+
if (!lockedLine || (now >= lockedLine.startTime && !isTransitioning)) {
|
|
3046
|
+
this.lockedUpcomingIndex = null;
|
|
3047
|
+
}
|
|
3048
|
+
}
|
|
3049
|
+
|
|
3050
|
+
// If no locked upcoming or it became active, find the next one
|
|
3051
|
+
// BUT don't search if there are any transitioning lyrics (wait for them to finish)
|
|
3052
|
+
if (
|
|
3053
|
+
(this.lockedUpcomingIndex === null || this.lockedUpcomingIndex === undefined) &&
|
|
3054
|
+
this.lyricTransitions.size === 0
|
|
3055
|
+
) {
|
|
3056
|
+
// Find the next upcoming lyric that starts after now
|
|
3057
|
+
let nextUpcomingIndex = null;
|
|
3058
|
+
let closestStartTime = Infinity;
|
|
3059
|
+
|
|
3060
|
+
for (let i = 0; i < this.lyrics.length; i++) {
|
|
3061
|
+
const line = this.lyrics[i];
|
|
3062
|
+
if (
|
|
3063
|
+
!line.isDisabled &&
|
|
3064
|
+
!line.isBackup &&
|
|
3065
|
+
line.startTime > now &&
|
|
3066
|
+
line.startTime <= now + maxTimeAhead &&
|
|
3067
|
+
line.startTime < closestStartTime
|
|
3068
|
+
) {
|
|
3069
|
+
nextUpcomingIndex = i;
|
|
3070
|
+
closestStartTime = line.startTime;
|
|
3071
|
+
}
|
|
3072
|
+
}
|
|
3073
|
+
|
|
3074
|
+
this.lockedUpcomingIndex = nextUpcomingIndex;
|
|
3075
|
+
}
|
|
3076
|
+
|
|
3077
|
+
// If still no upcoming, return
|
|
3078
|
+
if (this.lockedUpcomingIndex === null || this.lockedUpcomingIndex === undefined) return;
|
|
3079
|
+
|
|
3080
|
+
const lockedLine = this.lyrics[this.lockedUpcomingIndex];
|
|
3081
|
+
if (!lockedLine) {
|
|
3082
|
+
this.lockedUpcomingIndex = null;
|
|
3083
|
+
return;
|
|
3084
|
+
}
|
|
3085
|
+
|
|
3086
|
+
// Double-check it's not active (this shouldn't happen with the logic above)
|
|
3087
|
+
if (now >= lockedLine.startTime) {
|
|
3088
|
+
this.lockedUpcomingIndex = null;
|
|
3089
|
+
return;
|
|
3090
|
+
}
|
|
3091
|
+
|
|
3092
|
+
// Don't draw if this lyric is currently transitioning/animating
|
|
3093
|
+
if (this.lyricTransitions.has(this.lockedUpcomingIndex)) {
|
|
3094
|
+
return;
|
|
3095
|
+
}
|
|
3096
|
+
|
|
3097
|
+
// Draw the locked upcoming lyric
|
|
3098
|
+
this.ctx.save();
|
|
3099
|
+
this.ctx.font = `${this.settings.fontSize}px ${this.settings.fontFamily}`;
|
|
3100
|
+
this.ctx.textAlign = 'center';
|
|
3101
|
+
this.ctx.fillStyle = '#999999';
|
|
3102
|
+
this.ctx.globalAlpha = 0.8;
|
|
3103
|
+
|
|
3104
|
+
const currentY = startY;
|
|
3105
|
+
|
|
3106
|
+
// Get text from line
|
|
3107
|
+
let text = '';
|
|
3108
|
+
if (lockedLine.text) {
|
|
3109
|
+
text = lockedLine.text;
|
|
3110
|
+
} else if (lockedLine.words && lockedLine.words.length > 0) {
|
|
3111
|
+
text = lockedLine.words.map((w) => w.text || w.word || w).join(' ');
|
|
3112
|
+
}
|
|
3113
|
+
|
|
3114
|
+
// Get glow color for non-lead singers (so you know whose line is coming up)
|
|
3115
|
+
const glowColor = lockedLine.singer ? this.getSingerColor(lockedLine) : null;
|
|
3116
|
+
|
|
3117
|
+
if (text) {
|
|
3118
|
+
this.drawWrappedText(text, canvasWidth / 2, currentY, canvasWidth * 0.9, glowColor);
|
|
3119
|
+
}
|
|
3120
|
+
|
|
3121
|
+
this.ctx.restore();
|
|
3122
|
+
}
|
|
3123
|
+
|
|
3124
|
+
drawWrappedText(text, x, y, maxWidth, glowColor = null) {
|
|
3125
|
+
const words = text.split(' ');
|
|
3126
|
+
let currentLine = '';
|
|
3127
|
+
let linesRendered = 0;
|
|
3128
|
+
const lineHeight = this.settings.fontSize * 1.2; // Match font size with some line spacing
|
|
3129
|
+
|
|
3130
|
+
for (let i = 0; i < words.length; i++) {
|
|
3131
|
+
const testLine = currentLine + (currentLine ? ' ' : '') + words[i];
|
|
3132
|
+
const testWidth = this.ctx.measureText(testLine).width;
|
|
3133
|
+
|
|
3134
|
+
if (testWidth > maxWidth && currentLine) {
|
|
3135
|
+
// Draw current line and start new line
|
|
3136
|
+
this.drawTextWithBackground(currentLine, x, y, glowColor);
|
|
3137
|
+
y += lineHeight;
|
|
3138
|
+
linesRendered++;
|
|
3139
|
+
currentLine = words[i];
|
|
3140
|
+
} else {
|
|
3141
|
+
currentLine = testLine;
|
|
3142
|
+
}
|
|
3143
|
+
}
|
|
3144
|
+
|
|
3145
|
+
// Draw the final line
|
|
3146
|
+
if (currentLine) {
|
|
3147
|
+
this.drawTextWithBackground(currentLine, x, y, glowColor);
|
|
3148
|
+
linesRendered++;
|
|
3149
|
+
}
|
|
3150
|
+
|
|
3151
|
+
return linesRendered;
|
|
3152
|
+
}
|
|
3153
|
+
|
|
3154
|
+
updateLyricTransitions(currentActiveLines, now, currentActiveEndY) {
|
|
3155
|
+
// Get current upcoming lyrics
|
|
3156
|
+
const upcomingLines = [];
|
|
3157
|
+
if (this.lyrics) {
|
|
3158
|
+
for (let i = 0; i < this.lyrics.length; i++) {
|
|
3159
|
+
const line = this.lyrics[i];
|
|
3160
|
+
if (
|
|
3161
|
+
!line.isDisabled &&
|
|
3162
|
+
!line.isBackup &&
|
|
3163
|
+
line.startTime > now &&
|
|
3164
|
+
line.startTime <= now + 5.0
|
|
3165
|
+
) {
|
|
3166
|
+
upcomingLines.push({ ...line, index: i });
|
|
3167
|
+
break; // Only track the very next one for transitions
|
|
3168
|
+
}
|
|
3169
|
+
}
|
|
3170
|
+
}
|
|
3171
|
+
|
|
3172
|
+
// Check for lyrics that should start transitioning (0.5 seconds before they become active)
|
|
3173
|
+
for (const upcomingLine of upcomingLines) {
|
|
3174
|
+
const timeToStart = upcomingLine.startTime - now;
|
|
3175
|
+
if (timeToStart <= 0.5 && timeToStart > 0 && !this.lyricTransitions.has(upcomingLine.index)) {
|
|
3176
|
+
// Use the EXACT position where the upcoming line was just displayed
|
|
3177
|
+
let upcomingPosition;
|
|
3178
|
+
if (
|
|
3179
|
+
this.lastUpcomingDisplayY !== null &&
|
|
3180
|
+
this.lastUpcomingLineIndex === upcomingLine.index
|
|
3181
|
+
) {
|
|
3182
|
+
upcomingPosition = this.lastUpcomingDisplayY;
|
|
3183
|
+
} else {
|
|
3184
|
+
// Fallback calculation
|
|
3185
|
+
upcomingPosition = currentActiveEndY + 50;
|
|
3186
|
+
}
|
|
3187
|
+
|
|
3188
|
+
const activePosition = this.canvas.height / 2 - 180; // Where active is shown (higher up)
|
|
3189
|
+
|
|
3190
|
+
this.lyricTransitions.set(upcomingLine.index, {
|
|
3191
|
+
startTime: now,
|
|
3192
|
+
duration: this.settings.lyricTransitionDuration,
|
|
3193
|
+
progress: 0,
|
|
3194
|
+
startY: upcomingPosition, // EXACT position where it was displayed
|
|
3195
|
+
endY: activePosition, // Higher on screen (lower Y value)
|
|
3196
|
+
});
|
|
3197
|
+
}
|
|
3198
|
+
}
|
|
3199
|
+
|
|
3200
|
+
// Update existing transitions
|
|
3201
|
+
for (const [lineIndex, transition] of this.lyricTransitions.entries()) {
|
|
3202
|
+
const elapsed = now - transition.startTime;
|
|
3203
|
+
|
|
3204
|
+
// Update progress (let transitions complete naturally)
|
|
3205
|
+
const newProgress = Math.min(1.0, elapsed / transition.duration);
|
|
3206
|
+
transition.progress = newProgress;
|
|
3207
|
+
|
|
3208
|
+
// Remove completed transitions
|
|
3209
|
+
if (transition.progress >= 1.0) {
|
|
3210
|
+
this.lyricTransitions.delete(lineIndex);
|
|
3211
|
+
}
|
|
3212
|
+
}
|
|
3213
|
+
}
|
|
3214
|
+
|
|
3215
|
+
startTransitionAnimations(activeLines, upcomingY) {
|
|
3216
|
+
// Use interpolated time for smooth 60fps lyric slide animations
|
|
3217
|
+
const now = this.getInterpolatedTime();
|
|
3218
|
+
|
|
3219
|
+
// Check if locked upcoming lyric should start animating
|
|
3220
|
+
if (this.lockedUpcomingIndex !== null && this.lockedUpcomingIndex !== undefined) {
|
|
3221
|
+
const upcomingLine = this.lyrics[this.lockedUpcomingIndex];
|
|
3222
|
+
if (upcomingLine) {
|
|
3223
|
+
const timeUntilActive = upcomingLine.startTime - now;
|
|
3224
|
+
|
|
3225
|
+
// Start animation before the lyric becomes active
|
|
3226
|
+
if (timeUntilActive <= this.settings.lyricTransitionStartBefore && timeUntilActive > 0) {
|
|
3227
|
+
// Only start if not already animating
|
|
3228
|
+
if (!this.lyricTransitions.has(this.lockedUpcomingIndex)) {
|
|
3229
|
+
// Calculate active position (same as drawActiveLines)
|
|
3230
|
+
const canvasHeight = this.canvas.height;
|
|
3231
|
+
const lineSpacing = this.settings.lineHeight * 1.2;
|
|
3232
|
+
|
|
3233
|
+
// Check if there are any current main lines (to calculate proper position)
|
|
3234
|
+
const currentMainLines = activeLines.filter((line) => !line.isBackup);
|
|
3235
|
+
const totalLines = Math.max(1, currentMainLines.length);
|
|
3236
|
+
const totalHeight = totalLines * lineSpacing;
|
|
3237
|
+
|
|
3238
|
+
// This matches the exact calculation in drawActiveLines
|
|
3239
|
+
const activeY = canvasHeight / 2 - totalHeight / 2 + lineSpacing - 180;
|
|
3240
|
+
|
|
3241
|
+
// Start the transition
|
|
3242
|
+
this.lyricTransitions.set(this.lockedUpcomingIndex, {
|
|
3243
|
+
startTime: now,
|
|
3244
|
+
duration: this.settings.lyricTransitionDuration,
|
|
3245
|
+
progress: 0,
|
|
3246
|
+
startY: upcomingY, // Where upcoming lyric is drawn (no offset)
|
|
3247
|
+
endY: activeY,
|
|
3248
|
+
});
|
|
3249
|
+
|
|
3250
|
+
// Hide any currently active main lines that will overlap with this transition
|
|
3251
|
+
// Check all currently active lines (not just the filtered activeLines array)
|
|
3252
|
+
for (let i = 0; i < this.lyrics.length; i++) {
|
|
3253
|
+
const line = this.lyrics[i];
|
|
3254
|
+
if (!line.isDisabled && !line.isBackup && i !== this.lockedUpcomingIndex) {
|
|
3255
|
+
// Check if this line is currently active
|
|
3256
|
+
if (now >= line.startTime && now <= line.endTime) {
|
|
3257
|
+
const timeUntilEnd = line.endTime - now;
|
|
3258
|
+
// If active line ends during the transition period, hide it immediately
|
|
3259
|
+
if (timeUntilEnd > 0 && timeUntilEnd <= this.settings.lyricTransitionDuration) {
|
|
3260
|
+
this.hiddenDuringTransition.add(i);
|
|
3261
|
+
}
|
|
3262
|
+
}
|
|
3263
|
+
}
|
|
3264
|
+
}
|
|
3265
|
+
}
|
|
3266
|
+
}
|
|
3267
|
+
}
|
|
3268
|
+
}
|
|
3269
|
+
|
|
3270
|
+
// Update existing transitions
|
|
3271
|
+
for (const [lineIndex, transition] of this.lyricTransitions.entries()) {
|
|
3272
|
+
const elapsed = now - transition.startTime;
|
|
3273
|
+
|
|
3274
|
+
// Update progress
|
|
3275
|
+
transition.progress = Math.min(1.0, elapsed / transition.duration);
|
|
3276
|
+
|
|
3277
|
+
// Remove completed transitions (let them finish naturally)
|
|
3278
|
+
if (transition.progress >= 1.0) {
|
|
3279
|
+
this.lyricTransitions.delete(lineIndex);
|
|
3280
|
+
}
|
|
3281
|
+
}
|
|
3282
|
+
|
|
3283
|
+
// Clean up hidden lines that are no longer active
|
|
3284
|
+
for (const hiddenIndex of this.hiddenDuringTransition) {
|
|
3285
|
+
const line = this.lyrics[hiddenIndex];
|
|
3286
|
+
if (line && (now > line.endTime || now < line.startTime)) {
|
|
3287
|
+
this.hiddenDuringTransition.delete(hiddenIndex);
|
|
3288
|
+
}
|
|
3289
|
+
}
|
|
3290
|
+
}
|
|
3291
|
+
|
|
3292
|
+
drawTransitioningLine(line, canvasWidth, transition) {
|
|
3293
|
+
// Simple linear interpolation for position - THIS IS THE ANIMATION
|
|
3294
|
+
const currentY =
|
|
3295
|
+
transition.startY + (transition.endY - transition.startY) * transition.progress;
|
|
3296
|
+
|
|
3297
|
+
// Get the singer's target color (instead of hardcoded blue)
|
|
3298
|
+
const targetColorHex = this.getSingerColor(line);
|
|
3299
|
+
const endColor = this.hexToRgb(targetColorHex);
|
|
3300
|
+
|
|
3301
|
+
// Interpolate color from upcoming grey to singer's active color
|
|
3302
|
+
const startColor = { r: 136, g: 136, b: 136 }; // #888888 (upcoming grey from settings)
|
|
3303
|
+
|
|
3304
|
+
const r = Math.round(startColor.r + (endColor.r - startColor.r) * transition.progress);
|
|
3305
|
+
const g = Math.round(startColor.g + (endColor.g - startColor.g) * transition.progress);
|
|
3306
|
+
const b = Math.round(startColor.b + (endColor.b - startColor.b) * transition.progress);
|
|
3307
|
+
|
|
3308
|
+
// Interpolate alpha
|
|
3309
|
+
const alpha = 0.8 + (1.0 - 0.8) * transition.progress;
|
|
3310
|
+
|
|
3311
|
+
// Set up context for animated line
|
|
3312
|
+
this.ctx.save();
|
|
3313
|
+
this.ctx.font = `${this.settings.fontSize}px ${this.settings.fontFamily}`;
|
|
3314
|
+
this.ctx.textAlign = 'center';
|
|
3315
|
+
this.ctx.fillStyle = `rgba(${r}, ${g}, ${b}, ${alpha})`;
|
|
3316
|
+
|
|
3317
|
+
// Get text from line
|
|
3318
|
+
let text = '';
|
|
3319
|
+
if (line.text) {
|
|
3320
|
+
text = line.text;
|
|
3321
|
+
} else if (line.words && line.words.length > 0) {
|
|
3322
|
+
text = line.words.map((w) => w.text || w.word || w).join(' ');
|
|
3323
|
+
}
|
|
3324
|
+
|
|
3325
|
+
// Get glow color for non-lead singers during transition
|
|
3326
|
+
const glowColor = line.singer ? targetColorHex : null;
|
|
3327
|
+
|
|
3328
|
+
if (text) {
|
|
3329
|
+
// Handle word wrapping during transition to prevent layout jumps
|
|
3330
|
+
const maxWidth = canvasWidth * 0.9;
|
|
3331
|
+
const words = text.split(' ');
|
|
3332
|
+
const lines = [];
|
|
3333
|
+
let currentLine = '';
|
|
3334
|
+
|
|
3335
|
+
for (const word of words) {
|
|
3336
|
+
const testLine = currentLine ? currentLine + ' ' + word : word;
|
|
3337
|
+
const testWidth = this.ctx.measureText(testLine).width;
|
|
3338
|
+
|
|
3339
|
+
if (testWidth <= maxWidth) {
|
|
3340
|
+
currentLine = testLine;
|
|
3341
|
+
} else {
|
|
3342
|
+
if (currentLine) {
|
|
3343
|
+
lines.push(currentLine);
|
|
3344
|
+
currentLine = word;
|
|
3345
|
+
} else {
|
|
3346
|
+
lines.push(word);
|
|
3347
|
+
}
|
|
3348
|
+
}
|
|
3349
|
+
}
|
|
3350
|
+
|
|
3351
|
+
if (currentLine) {
|
|
3352
|
+
lines.push(currentLine);
|
|
3353
|
+
}
|
|
3354
|
+
|
|
3355
|
+
// Draw each wrapped line (match drawSingleLine spacing)
|
|
3356
|
+
const lineHeight = this.settings.lineHeight * 0.8;
|
|
3357
|
+
lines.forEach((textLine, index) => {
|
|
3358
|
+
const adjustedY = currentY + index * lineHeight;
|
|
3359
|
+
this.drawTextWithBackground(textLine, canvasWidth / 2, adjustedY, glowColor);
|
|
3360
|
+
});
|
|
3361
|
+
}
|
|
3362
|
+
|
|
3363
|
+
this.ctx.restore();
|
|
3364
|
+
}
|
|
3365
|
+
|
|
3366
|
+
// Helper to convert hex color to RGB object
|
|
3367
|
+
hexToRgb(hex) {
|
|
3368
|
+
// Default to cyan if invalid
|
|
3369
|
+
const defaultColor = { r: 0, g: 191, b: 255 };
|
|
3370
|
+
if (!hex || typeof hex !== 'string') return defaultColor;
|
|
3371
|
+
|
|
3372
|
+
// Remove # if present
|
|
3373
|
+
hex = hex.replace(/^#/, '');
|
|
3374
|
+
|
|
3375
|
+
// Parse 3 or 6 digit hex
|
|
3376
|
+
if (hex.length === 3) {
|
|
3377
|
+
hex = hex[0] + hex[0] + hex[1] + hex[1] + hex[2] + hex[2];
|
|
3378
|
+
}
|
|
3379
|
+
|
|
3380
|
+
if (hex.length !== 6) return defaultColor;
|
|
3381
|
+
|
|
3382
|
+
const num = parseInt(hex, 16);
|
|
3383
|
+
return {
|
|
3384
|
+
r: (num >> 16) & 255,
|
|
3385
|
+
g: (num >> 8) & 255,
|
|
3386
|
+
b: num & 255,
|
|
3387
|
+
};
|
|
3388
|
+
}
|
|
3389
|
+
}
|
|
3390
|
+
|
|
3391
|
+
// Export removed - KaraokeRenderer is used by PlayerController
|
|
3392
|
+
// No longer attached to window global
|