create-definedmotion 0.2.0 → 0.3.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/package.json +2 -2
- package/template/src/assets/audio/testing_shadow_glow_song.mp3 +0 -0
- package/template/src/assets/for_tests/svg/gravity_text.svg +38 -0
- package/template/src/assets/for_tests/svg/grip_figure.svg +28 -0
- package/template/src/entry.ts +1 -1
- package/template/src/example_scenes/dependencyScene.ts +2 -4
- package/template/src/example_scenes/fourierSeriesScene.ts +7 -8
- package/template/src/example_scenes/keyboardScene.ts +3 -5
- package/template/src/example_scenes/latex_text_transitions_scene.ts +146 -0
- package/template/src/example_scenes/tests/animations/camera_movements/test_2d_camera_centers_labels.ts +53 -0
- package/template/src/example_scenes/tests/animations/camera_movements/test_2d_camera_hits_markers.ts +40 -0
- package/template/src/example_scenes/tests/animations/camera_movements/test_camera_rotate_quaternion.ts +17 -0
- package/template/src/example_scenes/tests/animations/camera_movements/test_camera_waypoints_sequential.ts +29 -0
- package/template/src/example_scenes/tests/animations/camera_movements/test_fly_camera_waypoints_verifiable.ts +87 -0
- package/template/src/example_scenes/tests/animations/camera_movements/test_zoom_perspective_sequential.ts +17 -0
- package/template/src/example_scenes/tests/animations/latex/test_latex_blue_particle_transition.ts +82 -0
- package/template/src/example_scenes/tests/animations/latex/test_latex_highlight_animation.ts +64 -0
- package/template/src/example_scenes/tests/animations/latex/test_latex_mark_animation.ts +42 -0
- package/template/src/example_scenes/tests/animations/latex/test_latex_particle_transition.ts +48 -0
- package/template/src/example_scenes/tests/animations/latex/test_latex_particle_transition_complex.ts +65 -0
- package/template/src/example_scenes/tests/animations/latex/test_latex_particle_transition_super_complex.ts +86 -0
- package/template/src/example_scenes/tests/animations/latex/test_with_environment_latex_particle_transition.ts +80 -0
- package/template/src/example_scenes/tests/animations/latex/test_write_latex_animation.ts +28 -0
- package/template/src/example_scenes/tests/animations/latex/test_write_latex_animation_2.ts +34 -0
- package/template/src/example_scenes/tests/animations/latex/test_write_latex_animation_3.ts +34 -0
- package/template/src/example_scenes/tests/audio/test_long_audio.ts +11 -0
- package/template/src/example_scenes/tests/audio/test_many_short_sounds.ts +50 -0
- package/template/src/example_scenes/tests/environment/test_hdri_performance.ts +14 -0
- package/template/src/example_scenes/tests/svg/test_basic_latex_query.ts +59 -0
- package/template/src/example_scenes/tests/svg/test_basic_svg.ts +11 -0
- package/template/src/example_scenes/tests/svg/test_colored_latex_to_svg.ts +42 -0
- package/template/src/example_scenes/tests/svg/test_complex_latex_to_svg.ts +22 -0
- package/template/src/example_scenes/tests/svg/test_latex_to_svg.ts +17 -0
- package/template/src/example_scenes/tests/svg/test_material_on_latex.ts +43 -0
- package/template/src/example_scenes/tests/svg/test_query_latex_variables.ts +66 -0
- package/template/src/example_scenes/tests/svg/test_regular_text_latex.ts +21 -0
- package/template/src/example_scenes/tests/svg/test_super_complex_latex_to_svg.ts +98 -0
- package/template/src/example_scenes/tests/svg/test_transition_svgs.ts +33 -0
- package/template/src/example_scenes/tests/svg/test_update_svg_object.ts +19 -0
- package/template/src/example_scenes/tests/svg/test_yellow_grip_symbol_svg.ts +11 -0
- package/template/src/example_scenes/tutorials/medium1.ts +3 -5
- package/template/src/example_scenes/vectorField.ts +2 -4
- package/template/src/example_scenes/visulizingFunctions.ts +3 -5
- package/template/src/main/rendering.ts +38 -21
- package/template/src/renderer/src/App.svelte +40 -12
- package/template/src/renderer/src/lib/animation/animations.ts +141 -88
- package/template/src/renderer/src/lib/animation/captureCanvas.ts +1 -15
- package/template/src/renderer/src/lib/animation/latexMarkAndHighlight.ts +349 -0
- package/template/src/renderer/src/lib/animation/latexTransitionsAndWrite.ts +558 -0
- package/template/src/renderer/src/lib/audio/manager.ts +185 -0
- package/template/src/renderer/src/lib/rendering/hdri.ts +273 -0
- package/template/src/renderer/src/lib/rendering/lighting3d.ts +0 -105
- package/template/src/renderer/src/lib/rendering/setup.ts +7 -1
- package/template/src/renderer/src/lib/rendering/svg/latexSVGQueries.ts +44 -0
- package/template/src/renderer/src/lib/rendering/svg/latexToSVG.ts +132 -0
- package/template/src/renderer/src/lib/rendering/svg/svgObjectHelpers.ts +59 -0
- package/template/src/renderer/src/lib/rendering/svg/svgRendering.ts +120 -0
- package/template/src/renderer/src/lib/scene/sceneClass.ts +93 -31
- package/template/src/renderer/src/lib/audio/loader.ts +0 -104
- package/template/src/renderer/src/lib/rendering/materials.ts +0 -6
- package/template/src/renderer/src/lib/rendering/protocols.ts +0 -21
- package/template/src/renderer/src/lib/rendering/svg/drawing.ts +0 -213
- package/template/src/renderer/src/lib/rendering/svg/parsing.ts +0 -717
- package/template/src/renderer/src/lib/rendering/svg/rastered.ts +0 -42
- package/template/src/renderer/src/lib/rendering/svgObjects.ts +0 -1137
|
@@ -0,0 +1,185 @@
|
|
|
1
|
+
// audio/loader.ts
|
|
2
|
+
export interface AudioInScene {
|
|
3
|
+
audioPath: string
|
|
4
|
+
volume: number
|
|
5
|
+
atFrame: number
|
|
6
|
+
}
|
|
7
|
+
|
|
8
|
+
let loadedAudio = new Map<string, AudioBuffer>()
|
|
9
|
+
let registeredAudios = new Set<string>()
|
|
10
|
+
|
|
11
|
+
// A single shared AudioContext
|
|
12
|
+
const audioContext = new (window.AudioContext || (window as any).webkitAudioContext)()
|
|
13
|
+
|
|
14
|
+
// ---- Internal tracking of playing sounds ----
|
|
15
|
+
type PlayingId = string
|
|
16
|
+
|
|
17
|
+
interface PlayingSound {
|
|
18
|
+
id: PlayingId
|
|
19
|
+
path: string
|
|
20
|
+
source: AudioBufferSourceNode | null
|
|
21
|
+
gain: GainNode
|
|
22
|
+
volume: number
|
|
23
|
+
// timeline info
|
|
24
|
+
startTick: number // tick in your scene when this sound originally “started”
|
|
25
|
+
startedCtxTime: number // audioContext.currentTime when we started this source
|
|
26
|
+
offsetAtStartSec: number // offset in the buffer where this source began
|
|
27
|
+
durationSec: number
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
let active: PlayingSound[] = []
|
|
31
|
+
const newId = () => Math.random().toString(36).slice(2)
|
|
32
|
+
|
|
33
|
+
// ----------------- Public API -----------------
|
|
34
|
+
|
|
35
|
+
export const registerAudio = (audioPath: string) => {
|
|
36
|
+
registeredAudios.add(audioPath)
|
|
37
|
+
}
|
|
38
|
+
|
|
39
|
+
export const loadAllAudio = async (): Promise<void> => {
|
|
40
|
+
const tasks: Promise<void>[] = []
|
|
41
|
+
for (const path of registeredAudios) {
|
|
42
|
+
if (!loadedAudio.has(path)) {
|
|
43
|
+
const p = fetch(path)
|
|
44
|
+
.then(r => {
|
|
45
|
+
if (!r.ok) throw new Error(`Failed to load ${path}: ${r.status} ${r.statusText}`)
|
|
46
|
+
return r.arrayBuffer()
|
|
47
|
+
})
|
|
48
|
+
.then(buf => audioContext.decodeAudioData(buf))
|
|
49
|
+
.then(decoded => { loadedAudio.set(path, decoded) })
|
|
50
|
+
.catch(err => console.error('Audio load error', path, err))
|
|
51
|
+
tasks.push(p)
|
|
52
|
+
}
|
|
53
|
+
}
|
|
54
|
+
await Promise.all(tasks)
|
|
55
|
+
}
|
|
56
|
+
|
|
57
|
+
export const cleanupAudioData = async (): Promise<void> => {
|
|
58
|
+
stopAll()
|
|
59
|
+
loadedAudio.clear()
|
|
60
|
+
registeredAudios.clear()
|
|
61
|
+
}
|
|
62
|
+
|
|
63
|
+
// Fire immediately at offset 0 (used when the “tick” with a play event is reached)
|
|
64
|
+
export const playAudio = (audioPath: string, volume: number = 1): PlayingId | null => {
|
|
65
|
+
return startBufferAtOffset(audioPath, 0, volume, /*startTick*/ 0)
|
|
66
|
+
}
|
|
67
|
+
|
|
68
|
+
// Called when the timeline jumps to a specific tick.
|
|
69
|
+
// Rebuilds audio as if we had been playing since their respective start ticks.
|
|
70
|
+
export const seekToTick = (
|
|
71
|
+
tick: number,
|
|
72
|
+
planned: Map<number, AudioInScene[]>,
|
|
73
|
+
timelineFPS: number
|
|
74
|
+
) => {
|
|
75
|
+
stopAll()
|
|
76
|
+
|
|
77
|
+
for (const [startTick, list] of planned.entries()) {
|
|
78
|
+
if (startTick > tick) continue
|
|
79
|
+
for (const item of list) {
|
|
80
|
+
const buf = loadedAudio.get(item.audioPath)
|
|
81
|
+
if (!buf) continue
|
|
82
|
+
const elapsedTicks = tick - startTick
|
|
83
|
+
const offsetSec = Math.max(0, elapsedTicks / timelineFPS)
|
|
84
|
+
if (offsetSec < buf.duration) {
|
|
85
|
+
startBufferAtOffset(item.audioPath, offsetSec, item.volume, startTick)
|
|
86
|
+
}
|
|
87
|
+
}
|
|
88
|
+
}
|
|
89
|
+
}
|
|
90
|
+
|
|
91
|
+
// Pause = capture offsets, stop sources; Resume = recreate new sources at captured offsets
|
|
92
|
+
export const pauseAll = () => {
|
|
93
|
+
// capture and stop
|
|
94
|
+
for (const p of active) {
|
|
95
|
+
if (!p.source) continue
|
|
96
|
+
const playedSec = audioContext.currentTime - p.startedCtxTime
|
|
97
|
+
const accumulated = p.offsetAtStartSec + playedSec
|
|
98
|
+
// replace with a paused placeholder (no source)
|
|
99
|
+
try { p.source.stop(0) } catch {}
|
|
100
|
+
p.source.disconnect()
|
|
101
|
+
p.source = null
|
|
102
|
+
p.offsetAtStartSec = accumulated
|
|
103
|
+
}
|
|
104
|
+
}
|
|
105
|
+
|
|
106
|
+
export const resumeAll = () => {
|
|
107
|
+
for (const p of active) {
|
|
108
|
+
if (p.source) continue
|
|
109
|
+
// restart from stored offset
|
|
110
|
+
rearmSource(p, p.offsetAtStartSec)
|
|
111
|
+
}
|
|
112
|
+
}
|
|
113
|
+
|
|
114
|
+
export const stopAll = () => {
|
|
115
|
+
for (const p of active) {
|
|
116
|
+
try { p.source?.stop(0) } catch {}
|
|
117
|
+
try { p.source?.disconnect() } catch {}
|
|
118
|
+
try { p.gain.disconnect() } catch {}
|
|
119
|
+
}
|
|
120
|
+
active = []
|
|
121
|
+
}
|
|
122
|
+
|
|
123
|
+
// -------- helpers --------
|
|
124
|
+
|
|
125
|
+
function startBufferAtOffset(
|
|
126
|
+
audioPath: string,
|
|
127
|
+
offsetSec: number,
|
|
128
|
+
volume: number,
|
|
129
|
+
startTick: number
|
|
130
|
+
): PlayingId | null {
|
|
131
|
+
const buf = loadedAudio.get(audioPath)
|
|
132
|
+
if (!buf) {
|
|
133
|
+
console.warn(`Audio not loaded: ${audioPath}`)
|
|
134
|
+
return null
|
|
135
|
+
}
|
|
136
|
+
const id = newId()
|
|
137
|
+
const gain = audioContext.createGain()
|
|
138
|
+
gain.gain.value = volume
|
|
139
|
+
gain.connect(audioContext.destination)
|
|
140
|
+
|
|
141
|
+
const node = audioContext.createBufferSource()
|
|
142
|
+
node.buffer = buf
|
|
143
|
+
node.connect(gain)
|
|
144
|
+
|
|
145
|
+
const startedCtxTime = audioContext.currentTime
|
|
146
|
+
node.start(0, offsetSec)
|
|
147
|
+
|
|
148
|
+
const playing: PlayingSound = {
|
|
149
|
+
id,
|
|
150
|
+
path: audioPath,
|
|
151
|
+
source: node,
|
|
152
|
+
gain,
|
|
153
|
+
volume,
|
|
154
|
+
startTick,
|
|
155
|
+
startedCtxTime,
|
|
156
|
+
offsetAtStartSec: offsetSec,
|
|
157
|
+
durationSec: buf.duration
|
|
158
|
+
}
|
|
159
|
+
|
|
160
|
+
// remove from active when ends
|
|
161
|
+
node.onended = () => {
|
|
162
|
+
active = active.filter(a => a.id !== id)
|
|
163
|
+
try { node.disconnect() } catch {}
|
|
164
|
+
try { gain.disconnect() } catch {}
|
|
165
|
+
}
|
|
166
|
+
|
|
167
|
+
active.push(playing)
|
|
168
|
+
return id
|
|
169
|
+
}
|
|
170
|
+
|
|
171
|
+
function rearmSource(p: PlayingSound, offsetSec: number) {
|
|
172
|
+
const buf = loadedAudio.get(p.path)
|
|
173
|
+
if (!buf) return
|
|
174
|
+
const node = audioContext.createBufferSource()
|
|
175
|
+
node.buffer = buf
|
|
176
|
+
node.connect(p.gain)
|
|
177
|
+
p.startedCtxTime = audioContext.currentTime
|
|
178
|
+
p.offsetAtStartSec = offsetSec
|
|
179
|
+
p.source = node
|
|
180
|
+
node.onended = () => {
|
|
181
|
+
active = active.filter(a => a.id !== p.id)
|
|
182
|
+
try { node.disconnect() } catch {}
|
|
183
|
+
}
|
|
184
|
+
node.start(0, offsetSec)
|
|
185
|
+
}
|
|
@@ -0,0 +1,273 @@
|
|
|
1
|
+
// src/renderer/lib/rendering/hdri.ts
|
|
2
|
+
|
|
3
|
+
import * as THREE from 'three';
|
|
4
|
+
import { RGBELoader } from 'three/examples/jsm/loaders/RGBELoader.js';
|
|
5
|
+
import { AnimatedScene } from '../scene/sceneClass';
|
|
6
|
+
|
|
7
|
+
import _photoStudio1 from '$assets/hdri/photo-studio1.hdr?url';
|
|
8
|
+
import _photoStudio2 from '$assets/hdri/photo-studio2.hdr?url';
|
|
9
|
+
import _photoStudio3 from '$assets/hdri/photo-studio3.hdr?url';
|
|
10
|
+
import _outdoor1 from '$assets/hdri/outdoor1.hdr?url';
|
|
11
|
+
import _indoor1 from '$assets/hdri/indoor1.hdr?url';
|
|
12
|
+
import _metro1 from '$assets/hdri/metro1.hdr?url';
|
|
13
|
+
|
|
14
|
+
import vert_blur_hdri from '../shaders/hdri_blur/vert.glsl?raw';
|
|
15
|
+
import frag_blur_hdri from '../shaders/hdri_blur/frag.glsl?raw';
|
|
16
|
+
|
|
17
|
+
// ---------------------------------------------------------------------------
|
|
18
|
+
// Public types & enums
|
|
19
|
+
// ---------------------------------------------------------------------------
|
|
20
|
+
|
|
21
|
+
export enum HDRIs {
|
|
22
|
+
photoStudio1 = _photoStudio1 as any,
|
|
23
|
+
photoStudio2 = _photoStudio2 as any,
|
|
24
|
+
photoStudio3 = _photoStudio3 as any,
|
|
25
|
+
outdoor1 = _outdoor1 as any,
|
|
26
|
+
indoor1 = _indoor1 as any,
|
|
27
|
+
metro1 = _metro1 as any
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
/**
|
|
31
|
+
* Lightweight description of a loaded HDRI.
|
|
32
|
+
* The actual heavy GPU setup happens when you call addHDRI().
|
|
33
|
+
*/
|
|
34
|
+
export interface HDRIData {
|
|
35
|
+
texture: THREE.DataTexture;
|
|
36
|
+
blurAmount: number;
|
|
37
|
+
}
|
|
38
|
+
|
|
39
|
+
// ---------------------------------------------------------------------------
|
|
40
|
+
// Internal caches
|
|
41
|
+
// ---------------------------------------------------------------------------
|
|
42
|
+
|
|
43
|
+
// Reused loader
|
|
44
|
+
const rgbeLoader = new RGBELoader().setDataType(THREE.FloatType);
|
|
45
|
+
|
|
46
|
+
// path -> DataTexture promise (so we only load + decode once per file)
|
|
47
|
+
const sourceCache = new Map<string, Promise<THREE.DataTexture>>();
|
|
48
|
+
|
|
49
|
+
// (rendererId|textureUUID) -> envMap (PMREM output)
|
|
50
|
+
const envMapCache = new Map<string, THREE.Texture>();
|
|
51
|
+
|
|
52
|
+
// (rendererId|textureUUID|blur|opacity) -> blurred background texture
|
|
53
|
+
const blurCache = new Map<string, THREE.Texture>();
|
|
54
|
+
|
|
55
|
+
let rendererIdCounter = 1;
|
|
56
|
+
function getRendererId(renderer: THREE.WebGLRenderer): number {
|
|
57
|
+
const r = renderer as any;
|
|
58
|
+
if (!r.__dmHdriId) {
|
|
59
|
+
r.__dmHdriId = rendererIdCounter++;
|
|
60
|
+
}
|
|
61
|
+
return r.__dmHdriId;
|
|
62
|
+
}
|
|
63
|
+
|
|
64
|
+
function envKey(renderer: THREE.WebGLRenderer, tex: THREE.Texture): string {
|
|
65
|
+
return `${getRendererId(renderer)}|${tex.uuid}`;
|
|
66
|
+
}
|
|
67
|
+
|
|
68
|
+
function blurKey(
|
|
69
|
+
renderer: THREE.WebGLRenderer,
|
|
70
|
+
tex: THREE.Texture,
|
|
71
|
+
blurAmount: number,
|
|
72
|
+
opacity: number
|
|
73
|
+
): string {
|
|
74
|
+
return `${getRendererId(renderer)}|${tex.uuid}|${blurAmount.toFixed(3)}|${opacity.toFixed(3)}`;
|
|
75
|
+
}
|
|
76
|
+
|
|
77
|
+
// ---------------------------------------------------------------------------
|
|
78
|
+
// 1) Load HDRI file at module scope (no renderer needed)
|
|
79
|
+
// ---------------------------------------------------------------------------
|
|
80
|
+
|
|
81
|
+
/**
|
|
82
|
+
* Load and decode the HDRI once (per path). Intended for top-level usage:
|
|
83
|
+
*
|
|
84
|
+
* const hdriData = await loadHDRIData(HDRIs.outdoor1, 2, 1);
|
|
85
|
+
*
|
|
86
|
+
* Then you can reuse hdriData across many AnimatedScene instances.
|
|
87
|
+
*/
|
|
88
|
+
export const loadHDRIData = async (
|
|
89
|
+
path: HDRIs | string,
|
|
90
|
+
blurAmount: number,
|
|
91
|
+
): Promise<HDRIData> => {
|
|
92
|
+
const key = String(path);
|
|
93
|
+
|
|
94
|
+
let texturePromise = sourceCache.get(key);
|
|
95
|
+
if (!texturePromise) {
|
|
96
|
+
texturePromise = new Promise<THREE.DataTexture>((resolve, reject) => {
|
|
97
|
+
rgbeLoader.load(
|
|
98
|
+
key as any,
|
|
99
|
+
(texture) => {
|
|
100
|
+
// Equirectangular HDR setup
|
|
101
|
+
texture.mapping = THREE.EquirectangularReflectionMapping;
|
|
102
|
+
texture.magFilter = THREE.LinearFilter;
|
|
103
|
+
texture.minFilter = THREE.LinearFilter;
|
|
104
|
+
texture.generateMipmaps = false;
|
|
105
|
+
resolve(texture);
|
|
106
|
+
},
|
|
107
|
+
undefined,
|
|
108
|
+
(error) => {
|
|
109
|
+
console.error('Error loading HDRI:', error);
|
|
110
|
+
reject(error);
|
|
111
|
+
}
|
|
112
|
+
);
|
|
113
|
+
});
|
|
114
|
+
sourceCache.set(key, texturePromise);
|
|
115
|
+
}
|
|
116
|
+
|
|
117
|
+
const texture = await texturePromise;
|
|
118
|
+
|
|
119
|
+
return {
|
|
120
|
+
texture,
|
|
121
|
+
blurAmount,
|
|
122
|
+
};
|
|
123
|
+
};
|
|
124
|
+
|
|
125
|
+
// ---------------------------------------------------------------------------
|
|
126
|
+
// 2) One-off blur pass: render full-screen quad into a render target
|
|
127
|
+
// ---------------------------------------------------------------------------
|
|
128
|
+
|
|
129
|
+
function blurHDRITexture(
|
|
130
|
+
renderer: THREE.WebGLRenderer,
|
|
131
|
+
source: THREE.DataTexture,
|
|
132
|
+
blurAmount: number,
|
|
133
|
+
opacity: number
|
|
134
|
+
): THREE.Texture {
|
|
135
|
+
if (blurAmount <= 0.0001) {
|
|
136
|
+
// No blur required – reuse the original texture directly.
|
|
137
|
+
return source;
|
|
138
|
+
}
|
|
139
|
+
|
|
140
|
+
const width = source.image.width;
|
|
141
|
+
const height = source.image.height;
|
|
142
|
+
|
|
143
|
+
const target = new THREE.WebGLRenderTarget(width, height, {
|
|
144
|
+
type: THREE.FloatType,
|
|
145
|
+
depthBuffer: false,
|
|
146
|
+
stencilBuffer: false,
|
|
147
|
+
magFilter: THREE.LinearFilter,
|
|
148
|
+
minFilter: THREE.LinearFilter
|
|
149
|
+
});
|
|
150
|
+
|
|
151
|
+
const scene = new THREE.Scene();
|
|
152
|
+
const camera = new THREE.OrthographicCamera(-1, 1, 1, -1, 0, 1);
|
|
153
|
+
|
|
154
|
+
const material = new THREE.ShaderMaterial({
|
|
155
|
+
uniforms: {
|
|
156
|
+
uTexture: { value: source },
|
|
157
|
+
uBlurAmount: { value: blurAmount },
|
|
158
|
+
uTextureSize: { value: new THREE.Vector2(width, height) },
|
|
159
|
+
sigma: { value: 3.0 },
|
|
160
|
+
opacity: { value: opacity },
|
|
161
|
+
uSaturation: { value: 1.0 } // kept for future color-tweak usage
|
|
162
|
+
},
|
|
163
|
+
vertexShader: vert_blur_hdri,
|
|
164
|
+
fragmentShader: frag_blur_hdri,
|
|
165
|
+
depthTest: false,
|
|
166
|
+
depthWrite: false
|
|
167
|
+
});
|
|
168
|
+
|
|
169
|
+
const quad = new THREE.Mesh(new THREE.PlaneGeometry(2, 2), material);
|
|
170
|
+
scene.add(quad);
|
|
171
|
+
|
|
172
|
+
const prevTarget = renderer.getRenderTarget();
|
|
173
|
+
|
|
174
|
+
renderer.setRenderTarget(target);
|
|
175
|
+
renderer.render(scene, camera);
|
|
176
|
+
renderer.setRenderTarget(prevTarget);
|
|
177
|
+
|
|
178
|
+
quad.geometry.dispose();
|
|
179
|
+
material.dispose();
|
|
180
|
+
|
|
181
|
+
// The render target holds the baked blur texture;
|
|
182
|
+
// we keep the target alive as long as the texture is in use.
|
|
183
|
+
return target.texture;
|
|
184
|
+
}
|
|
185
|
+
|
|
186
|
+
// ---------------------------------------------------------------------------
|
|
187
|
+
// 3) Cached envMap + blurred background texture getters
|
|
188
|
+
// ---------------------------------------------------------------------------
|
|
189
|
+
|
|
190
|
+
function getEnvMap(renderer: THREE.WebGLRenderer, tex: THREE.DataTexture): THREE.Texture {
|
|
191
|
+
const key = envKey(renderer, tex);
|
|
192
|
+
const cached = envMapCache.get(key);
|
|
193
|
+
if (cached) return cached;
|
|
194
|
+
|
|
195
|
+
const pmremGenerator = new THREE.PMREMGenerator(renderer);
|
|
196
|
+
pmremGenerator.compileEquirectangularShader();
|
|
197
|
+
const envMap = pmremGenerator.fromEquirectangular(tex).texture;
|
|
198
|
+
pmremGenerator.dispose();
|
|
199
|
+
|
|
200
|
+
envMapCache.set(key, envMap);
|
|
201
|
+
return envMap;
|
|
202
|
+
}
|
|
203
|
+
|
|
204
|
+
function getBlurredBackgroundTexture(
|
|
205
|
+
renderer: THREE.WebGLRenderer,
|
|
206
|
+
hdriData: HDRIData,
|
|
207
|
+
opacity: number
|
|
208
|
+
): THREE.Texture {
|
|
209
|
+
const { texture, blurAmount } = hdriData;
|
|
210
|
+
const key = blurKey(renderer, texture, blurAmount, opacity);
|
|
211
|
+
const cached = blurCache.get(key);
|
|
212
|
+
if (cached) return cached;
|
|
213
|
+
|
|
214
|
+
const blurred = blurHDRITexture(renderer, texture, blurAmount, opacity);
|
|
215
|
+
blurCache.set(key, blurred);
|
|
216
|
+
return blurred;
|
|
217
|
+
}
|
|
218
|
+
|
|
219
|
+
// ---------------------------------------------------------------------------
|
|
220
|
+
// 4) Public: attach HDRI to an AnimatedScene
|
|
221
|
+
// ---------------------------------------------------------------------------
|
|
222
|
+
|
|
223
|
+
/**
|
|
224
|
+
* Apply a previously loaded HDRI to the scene:
|
|
225
|
+
* - Sets scene.environment using a cached PMREM envMap.
|
|
226
|
+
* - Creates a big background sphere using a baked, blurred texture.
|
|
227
|
+
*
|
|
228
|
+
* The heavy work (file IO, decode, blur) is cached and reused.
|
|
229
|
+
*/
|
|
230
|
+
export async function addHDRI(
|
|
231
|
+
scene: AnimatedScene,
|
|
232
|
+
hdriData: HDRIData,
|
|
233
|
+
lightingIntensity = 1.0,
|
|
234
|
+
opacity: number = 1.0,
|
|
235
|
+
): Promise<THREE.Mesh> {
|
|
236
|
+
const { renderer } = scene;
|
|
237
|
+
|
|
238
|
+
// Cached per renderer + texture:
|
|
239
|
+
const envMap = getEnvMap(renderer, hdriData.texture);
|
|
240
|
+
|
|
241
|
+
// Cached per renderer + texture + blurAmount + opacity:
|
|
242
|
+
const backgroundTexture = getBlurredBackgroundTexture(renderer, hdriData, opacity);
|
|
243
|
+
|
|
244
|
+
// Background sphere
|
|
245
|
+
const geometry = new THREE.SphereGeometry(scene.farLimitRender / 2, 40, 40);
|
|
246
|
+
|
|
247
|
+
const material = new THREE.MeshBasicMaterial({
|
|
248
|
+
map: backgroundTexture,
|
|
249
|
+
side: THREE.BackSide,
|
|
250
|
+
transparent: opacity < 1,
|
|
251
|
+
opacity: opacity
|
|
252
|
+
});
|
|
253
|
+
|
|
254
|
+
const backgroundSphere = new THREE.Mesh(geometry, material);
|
|
255
|
+
backgroundSphere.renderOrder = -1;
|
|
256
|
+
scene.scene.add(backgroundSphere);
|
|
257
|
+
|
|
258
|
+
// Environment for PBR materials etc.
|
|
259
|
+
scene.scene.environment = envMap;
|
|
260
|
+
scene.scene.environmentIntensity = lightingIntensity;
|
|
261
|
+
|
|
262
|
+
return backgroundSphere;
|
|
263
|
+
}
|
|
264
|
+
|
|
265
|
+
// ---------------------------------------------------------------------------
|
|
266
|
+
// (Optional) cache clearing helpers if you ever need them
|
|
267
|
+
// ---------------------------------------------------------------------------
|
|
268
|
+
|
|
269
|
+
export function clearHDRICaches(): void {
|
|
270
|
+
sourceCache.clear();
|
|
271
|
+
envMapCache.clear();
|
|
272
|
+
blurCache.clear();
|
|
273
|
+
}
|
|
@@ -230,111 +230,6 @@ export function addSceneLighting(
|
|
|
230
230
|
return lights
|
|
231
231
|
}
|
|
232
232
|
|
|
233
|
-
/**
|
|
234
|
-
* Creates a scene with standard grid, axes, and lighting
|
|
235
|
-
*
|
|
236
|
-
* @param scene - The Three.js scene to set up
|
|
237
|
-
* @param lightingOptions - Optional lighting configuration
|
|
238
|
-
* @returns The created lights collection
|
|
239
|
-
*/
|
|
240
|
-
export function setupStandardScene(
|
|
241
|
-
scene: THREE.Scene,
|
|
242
|
-
lightingOptions: LightingOptions = {}
|
|
243
|
-
): LightCollection {
|
|
244
|
-
// Add grid
|
|
245
|
-
const gridHelper = new THREE.GridHelper(10, 10)
|
|
246
|
-
scene.add(gridHelper)
|
|
247
|
-
|
|
248
|
-
// Add axes
|
|
249
|
-
const axesHelper = new THREE.AxesHelper(5)
|
|
250
|
-
scene.add(axesHelper)
|
|
251
|
-
|
|
252
|
-
// Add lights and return them
|
|
253
|
-
return addSceneLighting(scene, lightingOptions)
|
|
254
|
-
}
|
|
255
|
-
|
|
256
|
-
export interface HDRIData {
|
|
257
|
-
texture: THREE.DataTexture
|
|
258
|
-
material: THREE.Material
|
|
259
|
-
}
|
|
260
|
-
|
|
261
|
-
export const loadHDRIData = async (
|
|
262
|
-
path: HDRIs | string,
|
|
263
|
-
blurAmount: number,
|
|
264
|
-
opacity: number = 1
|
|
265
|
-
): Promise<HDRIData> => {
|
|
266
|
-
const rgbeLoader: RGBELoader = new RGBELoader()
|
|
267
|
-
|
|
268
|
-
return new Promise((resolve, reject) => {
|
|
269
|
-
rgbeLoader.setDataType(THREE.FloatType).load(
|
|
270
|
-
path as any,
|
|
271
|
-
(texture: THREE.DataTexture): void => {
|
|
272
|
-
const blurredMaterial = new THREE.ShaderMaterial({
|
|
273
|
-
uniforms: {
|
|
274
|
-
uTexture: { value: texture },
|
|
275
|
-
uBlurAmount: { value: blurAmount }, // Increase for more blur
|
|
276
|
-
uTextureSize: {
|
|
277
|
-
value: new THREE.Vector2(texture.image.width, texture.image.height)
|
|
278
|
-
},
|
|
279
|
-
sigma: { value: 3.0 }, // Add this uniform
|
|
280
|
-
opacity: { value: opacity }
|
|
281
|
-
},
|
|
282
|
-
vertexShader: vert_blur_hdri,
|
|
283
|
-
fragmentShader: frag_blur_hdri,
|
|
284
|
-
side: THREE.BackSide,
|
|
285
|
-
transparent: true
|
|
286
|
-
})
|
|
287
|
-
|
|
288
|
-
resolve({
|
|
289
|
-
texture,
|
|
290
|
-
material: blurredMaterial
|
|
291
|
-
})
|
|
292
|
-
},
|
|
293
|
-
(_) => {},
|
|
294
|
-
// Optional error callback
|
|
295
|
-
(error): void => {
|
|
296
|
-
console.error('Error loading HDRI:', error)
|
|
297
|
-
reject(error)
|
|
298
|
-
}
|
|
299
|
-
)
|
|
300
|
-
})
|
|
301
|
-
}
|
|
302
|
-
|
|
303
|
-
export enum HDRIs {
|
|
304
|
-
photoStudio1 = _photoStudio1 as any,
|
|
305
|
-
photoStudio2 = _photoStudio2 as any,
|
|
306
|
-
photoStudio3 = _photoStudio3 as any,
|
|
307
|
-
outdoor1 = _outdoor1 as any,
|
|
308
|
-
indoor1 = _indoor1 as any,
|
|
309
|
-
metro1 = _metro1 as any
|
|
310
|
-
}
|
|
311
|
-
|
|
312
|
-
export async function addHDRI(scene: AnimatedScene, hdriData: HDRIData, lightingIntensity = 1.0) {
|
|
313
|
-
// Create PMREM Generator for converting equirectangular HDRI to cubemap
|
|
314
|
-
const pmremGenerator: THREE.PMREMGenerator = new THREE.PMREMGenerator(scene.renderer)
|
|
315
|
-
pmremGenerator.compileEquirectangularShader()
|
|
316
|
-
|
|
317
|
-
// Process the HDRI texture
|
|
318
|
-
const envMap: THREE.Texture = pmremGenerator.fromEquirectangular(hdriData.texture).texture
|
|
319
|
-
|
|
320
|
-
// Create background sphere
|
|
321
|
-
const geometry = new THREE.SphereGeometry(scene.farLimitRender / 2, 40, 40)
|
|
322
|
-
|
|
323
|
-
const backgroundSphere = new THREE.Mesh(geometry, hdriData.material)
|
|
324
|
-
backgroundSphere.renderOrder = -1 // Render before other objects
|
|
325
|
-
|
|
326
|
-
// Attach to camera
|
|
327
|
-
scene.scene.add(backgroundSphere)
|
|
328
|
-
|
|
329
|
-
// Apply to scene environment (for reflections)
|
|
330
|
-
scene.scene.environment = envMap
|
|
331
|
-
|
|
332
|
-
scene.scene.environmentIntensity = lightingIntensity
|
|
333
|
-
|
|
334
|
-
// Clean up resources
|
|
335
|
-
// texture.dispose()
|
|
336
|
-
pmremGenerator.dispose()
|
|
337
|
-
}
|
|
338
233
|
|
|
339
234
|
export function addBackgroundGradient({
|
|
340
235
|
scene,
|
|
@@ -1,7 +1,13 @@
|
|
|
1
1
|
import * as THREE from 'three'
|
|
2
|
-
import type { SceneComponents } from './protocols'
|
|
3
2
|
import { OrbitControls } from 'three/examples/jsm/controls/OrbitControls.js'
|
|
4
3
|
|
|
4
|
+
export interface SceneComponents {
|
|
5
|
+
camera: THREE.PerspectiveCamera | THREE.OrthographicCamera
|
|
6
|
+
renderer: THREE.WebGLRenderer
|
|
7
|
+
scene: THREE.Scene
|
|
8
|
+
controls: OrbitControls
|
|
9
|
+
}
|
|
10
|
+
|
|
5
11
|
export const createScene = (
|
|
6
12
|
container: HTMLElement,
|
|
7
13
|
pixelsWidth: number,
|
|
@@ -0,0 +1,44 @@
|
|
|
1
|
+
import * as THREE from 'three';
|
|
2
|
+
import { CONTENT_NAME } from './svgRendering';
|
|
3
|
+
|
|
4
|
+
|
|
5
|
+
export interface ClassQueryResult {
|
|
6
|
+
className: string; // e.g. "variable"
|
|
7
|
+
meshes: THREE.Mesh[]; // all meshes that belong to this class
|
|
8
|
+
box: THREE.Box3; // combined bounding box (in world space)
|
|
9
|
+
center: THREE.Vector3; // center of that box (world space)
|
|
10
|
+
}
|
|
11
|
+
|
|
12
|
+
export function queryLaTeXClass(
|
|
13
|
+
root: THREE.Object3D,
|
|
14
|
+
className: string
|
|
15
|
+
): ClassQueryResult | null {
|
|
16
|
+
// Try to use the inner content group if it exists
|
|
17
|
+
const content =
|
|
18
|
+
(root.getObjectByName(CONTENT_NAME) as THREE.Group | null) ??
|
|
19
|
+
(root as THREE.Group);
|
|
20
|
+
|
|
21
|
+
const meshes: THREE.Mesh[] = [];
|
|
22
|
+
|
|
23
|
+
// Make sure transforms are current
|
|
24
|
+
root.updateMatrixWorld(true);
|
|
25
|
+
|
|
26
|
+
content.traverse(obj => {
|
|
27
|
+
const mesh = obj as THREE.Mesh;
|
|
28
|
+
// @ts-ignore: runtime check
|
|
29
|
+
if (!mesh.isMesh) return;
|
|
30
|
+
const classes: string[] | undefined = mesh.userData.dmClasses;
|
|
31
|
+
if (!classes) return;
|
|
32
|
+
if (classes.includes(className)) {
|
|
33
|
+
meshes.push(mesh);
|
|
34
|
+
}
|
|
35
|
+
});
|
|
36
|
+
|
|
37
|
+
if (!meshes.length) return null;
|
|
38
|
+
|
|
39
|
+
const box = new THREE.Box3();
|
|
40
|
+
for (const m of meshes) box.expandByObject(m);
|
|
41
|
+
const center = box.getCenter(new THREE.Vector3());
|
|
42
|
+
|
|
43
|
+
return { className, meshes, box, center };
|
|
44
|
+
}
|