three-zoo 0.4.7 → 0.5.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +78 -64
- package/dist/DualFovCamera.d.ts +142 -0
- package/dist/GeometryHasher.d.ts +1 -17
- package/dist/SceneTraversal.d.ts +104 -10
- package/dist/Sun.d.ts +59 -12
- package/dist/index.d.ts +1 -4
- package/dist/index.js +353 -376
- package/dist/index.js.map +1 -1
- package/dist/index.min.js +1 -1
- package/dist/index.min.js.map +1 -1
- package/package.json +15 -7
- package/dist/BiFovCamera.d.ts +0 -53
- package/dist/Bounds.d.ts +0 -23
- package/dist/InstanceAssembler.d.ts +0 -26
- package/dist/SceneProcessor.d.ts +0 -31
package/dist/index.js
CHANGED
|
@@ -1,102 +1,157 @@
|
|
|
1
|
-
import { PerspectiveCamera, MathUtils, Vector3,
|
|
1
|
+
import { PerspectiveCamera, MathUtils, Vector3, Mesh, BufferAttribute, AnimationMixer, DirectionalLight, Box3, Spherical, RGBAFormat } from 'three';
|
|
2
2
|
|
|
3
|
+
/** Default horizontal field of view in degrees */
|
|
3
4
|
const DEFAULT_HORIZONTAL_FOV = 90;
|
|
5
|
+
/** Default vertical field of view in degrees */
|
|
4
6
|
const DEFAULT_VERTICAL_FOV = 90;
|
|
7
|
+
/** Default aspect ratio (width/height) */
|
|
5
8
|
const DEFAULT_ASPECT = 1;
|
|
9
|
+
/** Default near clipping plane distance */
|
|
6
10
|
const DEFAULT_NEAR = 1;
|
|
11
|
+
/** Default far clipping plane distance */
|
|
7
12
|
const DEFAULT_FAR = 1000;
|
|
13
|
+
/** Minimum allowed field of view in degrees */
|
|
8
14
|
const MIN_FOV = 1;
|
|
15
|
+
/** Maximum allowed field of view in degrees */
|
|
9
16
|
const MAX_FOV = 179;
|
|
10
17
|
/**
|
|
11
18
|
* A camera that supports independent horizontal and vertical FOV settings.
|
|
12
19
|
* Extends Three.js PerspectiveCamera to allow separate control over horizontal
|
|
13
20
|
* and vertical fields of view.
|
|
14
21
|
*/
|
|
15
|
-
class
|
|
22
|
+
class DualFovCamera extends PerspectiveCamera {
|
|
16
23
|
/**
|
|
17
|
-
*
|
|
18
|
-
*
|
|
19
|
-
* @param
|
|
20
|
-
* @param
|
|
21
|
-
* @param
|
|
24
|
+
* Creates a new DualFovCamera instance with independent horizontal and vertical FOV control.
|
|
25
|
+
*
|
|
26
|
+
* @param horizontalFov - Horizontal field of view in degrees. Must be between 1° and 179°. Defaults to 90°.
|
|
27
|
+
* @param verticalFov - Vertical field of view in degrees. Must be between 1° and 179°. Defaults to 90°.
|
|
28
|
+
* @param aspect - Camera aspect ratio (width/height). Defaults to 1.
|
|
29
|
+
* @param near - Near clipping plane distance. Must be greater than 0. Defaults to 1.
|
|
30
|
+
* @param far - Far clipping plane distance. Must be greater than near plane. Defaults to 1000.
|
|
22
31
|
*/
|
|
23
32
|
constructor(horizontalFov = DEFAULT_HORIZONTAL_FOV, verticalFov = DEFAULT_VERTICAL_FOV, aspect = DEFAULT_ASPECT, near = DEFAULT_NEAR, far = DEFAULT_FAR) {
|
|
24
33
|
super(verticalFov, aspect, near, far);
|
|
25
|
-
this.
|
|
26
|
-
this.
|
|
34
|
+
this._private_horizontalFovInternal = horizontalFov;
|
|
35
|
+
this._private_verticalFovInternal = verticalFov;
|
|
27
36
|
this.updateProjectionMatrix();
|
|
28
37
|
}
|
|
29
|
-
/**
|
|
38
|
+
/**
|
|
39
|
+
* Gets the current horizontal field of view in degrees.
|
|
40
|
+
*
|
|
41
|
+
* @returns The horizontal FOV value between 1° and 179°
|
|
42
|
+
*/
|
|
30
43
|
get horizontalFov() {
|
|
31
|
-
return this.
|
|
44
|
+
return this._private_horizontalFovInternal;
|
|
32
45
|
}
|
|
33
|
-
/**
|
|
46
|
+
/**
|
|
47
|
+
* Gets the current vertical field of view in degrees.
|
|
48
|
+
*
|
|
49
|
+
* @returns The vertical FOV value between 1° and 179°
|
|
50
|
+
*/
|
|
34
51
|
get verticalFov() {
|
|
35
|
-
return this.
|
|
52
|
+
return this._private_verticalFovInternal;
|
|
36
53
|
}
|
|
37
|
-
/**
|
|
54
|
+
/**
|
|
55
|
+
* Sets the horizontal field of view in degrees.
|
|
56
|
+
*
|
|
57
|
+
* @param value - The horizontal FOV value in degrees. Will be clamped between 1° and 179°.
|
|
58
|
+
*/
|
|
38
59
|
set horizontalFov(value) {
|
|
39
|
-
this.
|
|
60
|
+
this._private_horizontalFovInternal = MathUtils.clamp(value, MIN_FOV, MAX_FOV);
|
|
40
61
|
this.updateProjectionMatrix();
|
|
41
62
|
}
|
|
42
|
-
/**
|
|
63
|
+
/**
|
|
64
|
+
* Sets the vertical field of view in degrees.
|
|
65
|
+
*
|
|
66
|
+
* @param value - The vertical FOV value in degrees. Will be clamped between 1° and 179°.
|
|
67
|
+
*/
|
|
43
68
|
set verticalFov(value) {
|
|
44
|
-
this.
|
|
69
|
+
this._private_verticalFovInternal = MathUtils.clamp(value, MIN_FOV, MAX_FOV);
|
|
45
70
|
this.updateProjectionMatrix();
|
|
46
71
|
}
|
|
47
72
|
/**
|
|
48
|
-
*
|
|
49
|
-
*
|
|
50
|
-
* @param
|
|
73
|
+
* Updates both horizontal and vertical field of view values simultaneously.
|
|
74
|
+
*
|
|
75
|
+
* @param horizontal - Horizontal FOV in degrees. Will be clamped between 1° and 179°.
|
|
76
|
+
* @param vertical - Vertical FOV in degrees. Will be clamped between 1° and 179°.
|
|
51
77
|
*/
|
|
52
78
|
setFov(horizontal, vertical) {
|
|
53
|
-
this.
|
|
54
|
-
this.
|
|
79
|
+
this._private_horizontalFovInternal = MathUtils.clamp(horizontal, MIN_FOV, MAX_FOV);
|
|
80
|
+
this._private_verticalFovInternal = MathUtils.clamp(vertical, MIN_FOV, MAX_FOV);
|
|
55
81
|
this.updateProjectionMatrix();
|
|
56
82
|
}
|
|
57
83
|
/**
|
|
58
|
-
*
|
|
59
|
-
*
|
|
84
|
+
* Copies the field of view settings from another DualFovCamera instance.
|
|
85
|
+
*
|
|
86
|
+
* @param source - The DualFovCamera instance to copy FOV settings from.
|
|
60
87
|
*/
|
|
61
88
|
copyFovSettings(source) {
|
|
62
|
-
this.
|
|
63
|
-
this.
|
|
89
|
+
this._private_horizontalFovInternal = source.horizontalFov;
|
|
90
|
+
this._private_verticalFovInternal = source.verticalFov;
|
|
64
91
|
this.updateProjectionMatrix();
|
|
65
92
|
}
|
|
66
93
|
/**
|
|
67
|
-
* Updates the projection matrix based on FOV settings and aspect ratio.
|
|
68
|
-
*
|
|
69
|
-
*
|
|
94
|
+
* Updates the projection matrix based on current FOV settings and aspect ratio.
|
|
95
|
+
*
|
|
96
|
+
* The behavior differs based on orientation:
|
|
97
|
+
* - **Landscape mode (aspect > 1)**: Preserves horizontal FOV, calculates vertical FOV
|
|
98
|
+
* - **Portrait mode (aspect ≤ 1)**: Preserves vertical FOV, calculates horizontal FOV
|
|
99
|
+
*
|
|
100
|
+
* This method is automatically called when FOV values or aspect ratio changes.
|
|
101
|
+
*
|
|
102
|
+
* @override
|
|
70
103
|
*/
|
|
71
104
|
updateProjectionMatrix() {
|
|
72
105
|
if (this.aspect > 1) {
|
|
73
106
|
// Landscape orientation: preserve horizontal FOV
|
|
74
|
-
const radians = MathUtils.degToRad(this.
|
|
107
|
+
const radians = MathUtils.degToRad(this._private_horizontalFovInternal);
|
|
75
108
|
this.fov = MathUtils.radToDeg(Math.atan(Math.tan(radians / 2) / this.aspect) * 2);
|
|
76
109
|
}
|
|
77
110
|
else {
|
|
78
111
|
// Portrait orientation: preserve vertical FOV
|
|
79
|
-
this.fov = this.
|
|
112
|
+
this.fov = this._private_verticalFovInternal;
|
|
80
113
|
}
|
|
81
114
|
super.updateProjectionMatrix();
|
|
82
115
|
}
|
|
83
|
-
/**
|
|
84
|
-
|
|
116
|
+
/**
|
|
117
|
+
* Gets the actual horizontal field of view after aspect ratio adjustments.
|
|
118
|
+
*
|
|
119
|
+
* In landscape mode, this returns the set horizontal FOV.
|
|
120
|
+
* In portrait mode, this calculates the actual horizontal FOV based on the vertical FOV and aspect ratio.
|
|
121
|
+
*
|
|
122
|
+
* @returns The actual horizontal FOV in degrees
|
|
123
|
+
*/
|
|
124
|
+
getActualHorizontalFov() {
|
|
85
125
|
if (this.aspect >= 1) {
|
|
86
|
-
return this.
|
|
126
|
+
return this._private_horizontalFovInternal;
|
|
87
127
|
}
|
|
88
|
-
const verticalRadians = MathUtils.degToRad(this.
|
|
128
|
+
const verticalRadians = MathUtils.degToRad(this._private_verticalFovInternal);
|
|
89
129
|
return MathUtils.radToDeg(Math.atan(Math.tan(verticalRadians / 2) * this.aspect) * 2);
|
|
90
130
|
}
|
|
91
|
-
/**
|
|
92
|
-
|
|
131
|
+
/**
|
|
132
|
+
* Gets the actual vertical field of view after aspect ratio adjustments.
|
|
133
|
+
*
|
|
134
|
+
* In portrait mode, this returns the set vertical FOV.
|
|
135
|
+
* In landscape mode, this calculates the actual vertical FOV based on the horizontal FOV and aspect ratio.
|
|
136
|
+
*
|
|
137
|
+
* @returns The actual vertical FOV in degrees
|
|
138
|
+
*/
|
|
139
|
+
getActualVerticalFov() {
|
|
93
140
|
if (this.aspect < 1) {
|
|
94
|
-
return this.
|
|
141
|
+
return this._private_verticalFovInternal;
|
|
95
142
|
}
|
|
96
|
-
const horizontalRadians = MathUtils.degToRad(this.
|
|
143
|
+
const horizontalRadians = MathUtils.degToRad(this._private_horizontalFovInternal);
|
|
97
144
|
return MathUtils.radToDeg(Math.atan(Math.tan(horizontalRadians / 2) / this.aspect) * 2);
|
|
98
145
|
}
|
|
99
|
-
|
|
146
|
+
/**
|
|
147
|
+
* Adjusts the vertical field of view to fit all specified points within the camera's view.
|
|
148
|
+
*
|
|
149
|
+
* This method calculates the required vertical FOV to ensure all provided vertices
|
|
150
|
+
* are visible within the vertical bounds of the camera's frustum.
|
|
151
|
+
*
|
|
152
|
+
* @param vertices - Array of 3D points (in world coordinates) that should fit within the camera's vertical view
|
|
153
|
+
*/
|
|
154
|
+
fitVerticalFovToPoints(vertices) {
|
|
100
155
|
const up = new Vector3(0, 1, 0).applyQuaternion(this.quaternion);
|
|
101
156
|
let maxVerticalAngle = 0;
|
|
102
157
|
for (const vertex of vertices) {
|
|
@@ -109,11 +164,19 @@ class BiFovCamera extends PerspectiveCamera {
|
|
|
109
164
|
}
|
|
110
165
|
}
|
|
111
166
|
const requiredFov = MathUtils.radToDeg(2 * maxVerticalAngle);
|
|
112
|
-
this.
|
|
167
|
+
this._private_verticalFovInternal = MathUtils.clamp(requiredFov, MIN_FOV, MAX_FOV);
|
|
113
168
|
this.updateProjectionMatrix();
|
|
114
169
|
}
|
|
115
|
-
|
|
116
|
-
|
|
170
|
+
/**
|
|
171
|
+
* Adjusts the vertical field of view to fit a bounding box within the camera's view.
|
|
172
|
+
*
|
|
173
|
+
* This method calculates the required vertical FOV to ensure the entire bounding box
|
|
174
|
+
* is visible within the vertical bounds of the camera's frustum.
|
|
175
|
+
*
|
|
176
|
+
* @param box - The 3D bounding box (in world coordinates) that should fit within the camera's vertical view
|
|
177
|
+
*/
|
|
178
|
+
fitVerticalFovToBox(box) {
|
|
179
|
+
this.fitVerticalFovToPoints([
|
|
117
180
|
new Vector3(box.min.x, box.min.y, box.min.z),
|
|
118
181
|
new Vector3(box.min.x, box.min.y, box.max.z),
|
|
119
182
|
new Vector3(box.min.x, box.max.y, box.min.z),
|
|
@@ -124,7 +187,16 @@ class BiFovCamera extends PerspectiveCamera {
|
|
|
124
187
|
new Vector3(box.max.x, box.max.y, box.max.z),
|
|
125
188
|
]);
|
|
126
189
|
}
|
|
127
|
-
|
|
190
|
+
/**
|
|
191
|
+
* Adjusts the vertical field of view to fit a skinned mesh within the camera's view.
|
|
192
|
+
*
|
|
193
|
+
* This method updates the mesh's skeleton, applies bone transformations to all vertices,
|
|
194
|
+
* and then calculates the required vertical FOV to ensure the entire deformed mesh
|
|
195
|
+
* is visible within the vertical bounds of the camera's frustum.
|
|
196
|
+
*
|
|
197
|
+
* @param skinnedMesh - The skinned mesh (with active skeleton) that should fit within the camera's vertical view
|
|
198
|
+
*/
|
|
199
|
+
fitVerticalFovToMesh(skinnedMesh) {
|
|
128
200
|
skinnedMesh.updateWorldMatrix(true, true);
|
|
129
201
|
skinnedMesh.skeleton.update();
|
|
130
202
|
const bakedGeometry = skinnedMesh.geometry;
|
|
@@ -136,9 +208,22 @@ class BiFovCamera extends PerspectiveCamera {
|
|
|
136
208
|
skinnedMesh.applyBoneTransform(i, target);
|
|
137
209
|
points.push(target.clone());
|
|
138
210
|
}
|
|
139
|
-
this.
|
|
211
|
+
this.fitVerticalFovToPoints(points);
|
|
140
212
|
}
|
|
141
|
-
|
|
213
|
+
/**
|
|
214
|
+
* Points the camera to look at the center of mass of a skinned mesh.
|
|
215
|
+
*
|
|
216
|
+
* This method updates the mesh's skeleton, applies bone transformations to all vertices,
|
|
217
|
+
* calculates the center of mass using a clustering algorithm, and then orients the camera
|
|
218
|
+
* to look at that point.
|
|
219
|
+
*
|
|
220
|
+
* The center of mass calculation uses an iterative clustering approach to find the
|
|
221
|
+
* main concentration of vertices, which provides better results than a simple average
|
|
222
|
+
* for complex meshes.
|
|
223
|
+
*
|
|
224
|
+
* @param skinnedMesh - The skinned mesh (with active skeleton) whose center of mass should be the camera's target
|
|
225
|
+
*/
|
|
226
|
+
lookAtMeshCenterOfMass(skinnedMesh) {
|
|
142
227
|
skinnedMesh.updateWorldMatrix(true, true);
|
|
143
228
|
skinnedMesh.skeleton.update();
|
|
144
229
|
const bakedGeometry = skinnedMesh.geometry;
|
|
@@ -150,6 +235,13 @@ class BiFovCamera extends PerspectiveCamera {
|
|
|
150
235
|
skinnedMesh.applyBoneTransform(i, target);
|
|
151
236
|
points.push(target.clone());
|
|
152
237
|
}
|
|
238
|
+
/**
|
|
239
|
+
* Finds the main cluster center of a set of 3D points using iterative refinement.
|
|
240
|
+
*
|
|
241
|
+
* @param points - Array of 3D points to cluster
|
|
242
|
+
* @param iterations - Number of refinement iterations to perform
|
|
243
|
+
* @returns The calculated center point of the main cluster
|
|
244
|
+
*/
|
|
153
245
|
const findMainCluster = (points, iterations = 3) => {
|
|
154
246
|
if (points.length === 0) {
|
|
155
247
|
return new Vector3();
|
|
@@ -174,116 +266,45 @@ class BiFovCamera extends PerspectiveCamera {
|
|
|
174
266
|
const centerOfMass = findMainCluster(points);
|
|
175
267
|
this.lookAt(centerOfMass);
|
|
176
268
|
}
|
|
269
|
+
/**
|
|
270
|
+
* Creates a deep copy of this DualFovCamera instance.
|
|
271
|
+
*
|
|
272
|
+
* The cloned camera will have identical FOV settings, position, rotation,
|
|
273
|
+
* and all other camera properties.
|
|
274
|
+
*
|
|
275
|
+
* @returns A new DualFovCamera instance that is an exact copy of this one
|
|
276
|
+
* @override
|
|
277
|
+
*/
|
|
177
278
|
clone() {
|
|
178
|
-
const camera = new
|
|
279
|
+
const camera = new DualFovCamera(this._private_horizontalFovInternal, this._private_verticalFovInternal, this.aspect, this.near, this.far);
|
|
179
280
|
camera.copy(this, true);
|
|
180
281
|
return camera;
|
|
181
282
|
}
|
|
182
283
|
}
|
|
183
284
|
|
|
184
285
|
/**
|
|
185
|
-
*
|
|
186
|
-
|
|
187
|
-
class
|
|
188
|
-
|
|
189
|
-
|
|
190
|
-
|
|
191
|
-
|
|
192
|
-
|
|
193
|
-
this.setFromObject(object);
|
|
194
|
-
}
|
|
195
|
-
}
|
|
196
|
-
/** Width (x-axis length) */
|
|
197
|
-
get width() {
|
|
198
|
-
return this.max.x - this.min.x;
|
|
199
|
-
}
|
|
200
|
-
/** Height (y-axis length) */
|
|
201
|
-
get height() {
|
|
202
|
-
return this.max.y - this.min.y;
|
|
203
|
-
}
|
|
204
|
-
/** Depth (z-axis length) */
|
|
205
|
-
get depth() {
|
|
206
|
-
return this.max.z - this.min.z;
|
|
207
|
-
}
|
|
208
|
-
/** Length of the box's diagonal */
|
|
209
|
-
get diagonal() {
|
|
210
|
-
return this.tempVector3A.subVectors(this.max, this.min).length();
|
|
211
|
-
}
|
|
212
|
-
setFromSkinnedMesh(skinnedMesh) {
|
|
213
|
-
skinnedMesh.updateWorldMatrix(true, true);
|
|
214
|
-
skinnedMesh.skeleton.update();
|
|
215
|
-
const geometry = skinnedMesh.geometry;
|
|
216
|
-
const position = geometry.attributes["position"];
|
|
217
|
-
const target = new Vector3();
|
|
218
|
-
const points = [];
|
|
219
|
-
for (let i = 0; i < position.count; i++) {
|
|
220
|
-
target.fromBufferAttribute(position, i);
|
|
221
|
-
skinnedMesh.applyBoneTransform(i, target);
|
|
222
|
-
points.push(target.clone());
|
|
223
|
-
}
|
|
224
|
-
this.setFromPoints(points);
|
|
225
|
-
return this;
|
|
226
|
-
}
|
|
227
|
-
/** Volume (width * height * depth) */
|
|
228
|
-
getVolume() {
|
|
229
|
-
return this.width * this.height * this.depth;
|
|
230
|
-
}
|
|
231
|
-
/** Surface area (sum of all six faces) */
|
|
232
|
-
getSurfaceArea() {
|
|
233
|
-
const w = this.width;
|
|
234
|
-
const h = this.height;
|
|
235
|
-
const d = this.depth;
|
|
236
|
-
return 2 * (w * h + h * d + d * w);
|
|
237
|
-
}
|
|
238
|
-
}
|
|
239
|
-
|
|
240
|
-
const POSITION_COMPONENT_COUNT = 3;
|
|
241
|
-
const NORMAL_COMPONENT_COUNT = 3;
|
|
242
|
-
/**
|
|
243
|
-
* Internal utility to identify identical geometries.
|
|
244
|
-
* @internal
|
|
286
|
+
* Utility class for finding and modifying objects in a Three.js scene graph.
|
|
287
|
+
*
|
|
288
|
+
* This class provides static methods for traversing Three.js scene hierarchies,
|
|
289
|
+
* searching for specific objects or materials, and performing batch operations
|
|
290
|
+
* on collections of scene objects.
|
|
291
|
+
*
|
|
292
|
+
* All methods perform depth-first traversal of the scene graph starting from
|
|
293
|
+
* the provided root object and recursively processing all children.
|
|
245
294
|
*/
|
|
246
|
-
class GeometryHasher {
|
|
247
|
-
/**
|
|
248
|
-
* Creates a hash for a geometry based on its vertex data.
|
|
249
|
-
* Vertices that differ by less than tolerance are considered the same.
|
|
250
|
-
*
|
|
251
|
-
* @param geometry - Geometry to hash
|
|
252
|
-
* @param tolerance - How close vertices need to be to count as identical
|
|
253
|
-
* @returns Hash string that's the same for matching geometries
|
|
254
|
-
* @internal
|
|
255
|
-
*/
|
|
256
|
-
static getGeometryHash(geometry, tolerance) {
|
|
257
|
-
const position = geometry.attributes["position"];
|
|
258
|
-
const positionArray = position.array;
|
|
259
|
-
const positionHashParts = [];
|
|
260
|
-
// Sample vertex positions with tolerance
|
|
261
|
-
for (let i = 0; i < positionArray.length; i += POSITION_COMPONENT_COUNT) {
|
|
262
|
-
const x = Math.round(positionArray[i] / tolerance);
|
|
263
|
-
const y = Math.round(positionArray[i + 1] / tolerance);
|
|
264
|
-
const z = Math.round(positionArray[i + 2] / tolerance);
|
|
265
|
-
positionHashParts.push(`${x},${y},${z}`);
|
|
266
|
-
}
|
|
267
|
-
// Hash normal data if available
|
|
268
|
-
const normal = geometry.attributes["normal"];
|
|
269
|
-
const normalHashParts = [];
|
|
270
|
-
const normalArray = normal.array;
|
|
271
|
-
for (let i = 0; i < normalArray.length; i += NORMAL_COMPONENT_COUNT) {
|
|
272
|
-
const x = Math.round(normalArray[i] / tolerance);
|
|
273
|
-
const y = Math.round(normalArray[i + 1] / tolerance);
|
|
274
|
-
const z = Math.round(normalArray[i + 2] / tolerance);
|
|
275
|
-
normalHashParts.push(`${x},${y},${z}`);
|
|
276
|
-
}
|
|
277
|
-
// Combine position and normal hashes
|
|
278
|
-
const positionHash = positionHashParts.join("|");
|
|
279
|
-
const normalHash = normalHashParts.join("|");
|
|
280
|
-
return `${positionHash}#${normalHash}`;
|
|
281
|
-
}
|
|
282
|
-
}
|
|
283
|
-
|
|
284
|
-
/** Find and modify objects in a Three.js scene */
|
|
285
295
|
class SceneTraversal {
|
|
286
|
-
/**
|
|
296
|
+
/**
|
|
297
|
+
* Finds the first object in the scene hierarchy with an exact name match.
|
|
298
|
+
*
|
|
299
|
+
* Performs a depth-first search through the scene graph starting from the provided
|
|
300
|
+
* root object. Returns the first object encountered whose name property exactly
|
|
301
|
+
* matches the search string.
|
|
302
|
+
*
|
|
303
|
+
* @param object - The root Object3D to start searching from
|
|
304
|
+
* @param name - The exact name to search for (case-sensitive)
|
|
305
|
+
* @returns The first matching Object3D, or null if no match is found
|
|
306
|
+
|
|
307
|
+
*/
|
|
287
308
|
static getObjectByName(object, name) {
|
|
288
309
|
if (object.name === name) {
|
|
289
310
|
return object;
|
|
@@ -296,7 +317,19 @@ class SceneTraversal {
|
|
|
296
317
|
}
|
|
297
318
|
return null;
|
|
298
319
|
}
|
|
299
|
-
/**
|
|
320
|
+
/**
|
|
321
|
+
* Finds the first material in the scene hierarchy with an exact name match.
|
|
322
|
+
*
|
|
323
|
+
* Performs a depth-first search through the scene graph, examining materials
|
|
324
|
+
* attached to Mesh objects. Handles both single materials and material arrays.
|
|
325
|
+
* Returns the first material encountered whose name property exactly matches
|
|
326
|
+
* the search string.
|
|
327
|
+
*
|
|
328
|
+
* @param object - The root Object3D to start searching from
|
|
329
|
+
* @param name - The exact material name to search for (case-sensitive)
|
|
330
|
+
* @returns The first matching Material, or null if no match is found
|
|
331
|
+
|
|
332
|
+
*/
|
|
300
333
|
static getMaterialByName(object, name) {
|
|
301
334
|
if (object instanceof Mesh) {
|
|
302
335
|
if (Array.isArray(object.material)) {
|
|
@@ -318,7 +351,19 @@ class SceneTraversal {
|
|
|
318
351
|
}
|
|
319
352
|
return null;
|
|
320
353
|
}
|
|
321
|
-
/**
|
|
354
|
+
/**
|
|
355
|
+
* Processes all objects of a specific type in the scene hierarchy.
|
|
356
|
+
*
|
|
357
|
+
* Performs a depth-first traversal and executes the provided callback function
|
|
358
|
+
* for every object that is an instance of the specified type. This is useful
|
|
359
|
+
* for batch operations on specific object types (e.g., all lights, all meshes, etc.).
|
|
360
|
+
*
|
|
361
|
+
* @template T - The type of objects to process
|
|
362
|
+
* @param object - The root Object3D to start searching from
|
|
363
|
+
* @param type - The constructor/class to filter by (e.g., DirectionalLight, Mesh)
|
|
364
|
+
* @param callback - Function to execute for each matching object instance
|
|
365
|
+
|
|
366
|
+
*/
|
|
322
367
|
static enumerateObjectsByType(object, type, callback) {
|
|
323
368
|
if (object instanceof type) {
|
|
324
369
|
callback(object);
|
|
@@ -327,7 +372,17 @@ class SceneTraversal {
|
|
|
327
372
|
SceneTraversal.enumerateObjectsByType(child, type, callback);
|
|
328
373
|
}
|
|
329
374
|
}
|
|
330
|
-
/**
|
|
375
|
+
/**
|
|
376
|
+
* Processes all materials found in mesh objects within the scene hierarchy.
|
|
377
|
+
*
|
|
378
|
+
* Performs a depth-first traversal, finding all Mesh objects and executing
|
|
379
|
+
* the provided callback function for each material. Handles both single
|
|
380
|
+
* materials and material arrays properly.
|
|
381
|
+
*
|
|
382
|
+
* @param object - The root Object3D to start searching from
|
|
383
|
+
* @param callback - Function to execute for each material found
|
|
384
|
+
|
|
385
|
+
*/
|
|
331
386
|
static enumerateMaterials(object, callback) {
|
|
332
387
|
if (object instanceof Mesh) {
|
|
333
388
|
if (Array.isArray(object.material)) {
|
|
@@ -343,7 +398,18 @@ class SceneTraversal {
|
|
|
343
398
|
SceneTraversal.enumerateMaterials(child, callback);
|
|
344
399
|
}
|
|
345
400
|
}
|
|
346
|
-
/**
|
|
401
|
+
/**
|
|
402
|
+
* Finds all objects in the scene hierarchy that match the specified filter criteria.
|
|
403
|
+
*
|
|
404
|
+
* Performs a depth-first search and collects all objects that either match
|
|
405
|
+
* a regular expression pattern (applied to the object's name) or satisfy
|
|
406
|
+
* a custom predicate function.
|
|
407
|
+
*
|
|
408
|
+
* @param object - The root Object3D to start searching from
|
|
409
|
+
* @param filter - Either a RegExp to test against object names, or a predicate function
|
|
410
|
+
* @returns Array of all matching Object3D instances
|
|
411
|
+
|
|
412
|
+
*/
|
|
347
413
|
static filterObjects(object, filter) {
|
|
348
414
|
let result = [];
|
|
349
415
|
if (typeof filter === "function") {
|
|
@@ -361,7 +427,18 @@ class SceneTraversal {
|
|
|
361
427
|
}
|
|
362
428
|
return result;
|
|
363
429
|
}
|
|
364
|
-
/**
|
|
430
|
+
/**
|
|
431
|
+
* Finds all materials in the scene hierarchy whose names match a regular expression pattern.
|
|
432
|
+
*
|
|
433
|
+
* Performs a depth-first search through all Mesh objects and collects materials
|
|
434
|
+
* whose name property matches the provided regular expression. Handles both
|
|
435
|
+
* single materials and material arrays properly.
|
|
436
|
+
*
|
|
437
|
+
* @param object - The root Object3D to start searching from
|
|
438
|
+
* @param name - Regular expression pattern to test against material names
|
|
439
|
+
* @returns Array of all matching Material instances
|
|
440
|
+
|
|
441
|
+
*/
|
|
365
442
|
static filterMaterials(object, name) {
|
|
366
443
|
let result = [];
|
|
367
444
|
if (object instanceof Mesh) {
|
|
@@ -383,194 +460,42 @@ class SceneTraversal {
|
|
|
383
460
|
}
|
|
384
461
|
return result;
|
|
385
462
|
}
|
|
386
|
-
/**
|
|
387
|
-
|
|
388
|
-
|
|
389
|
-
|
|
390
|
-
|
|
391
|
-
|
|
392
|
-
|
|
393
|
-
|
|
394
|
-
|
|
395
|
-
|
|
396
|
-
|
|
397
|
-
|
|
398
|
-
|
|
399
|
-
|
|
400
|
-
|
|
401
|
-
|
|
402
|
-
|
|
403
|
-
|
|
404
|
-
|
|
405
|
-
|
|
406
|
-
|
|
407
|
-
* Only processes meshes that:
|
|
408
|
-
* - Have no children
|
|
409
|
-
* - Pass the filter function (if any)
|
|
410
|
-
* - Share geometry with at least one other mesh
|
|
411
|
-
*
|
|
412
|
-
* @param container - Object containing meshes to process
|
|
413
|
-
* @param options - Optional settings
|
|
414
|
-
*/
|
|
415
|
-
static assemble(container, options = {}) {
|
|
416
|
-
var _a, _b;
|
|
417
|
-
const dictionary = new Map();
|
|
418
|
-
const instances = [];
|
|
419
|
-
const tolerance = (_a = options.geometryTolerance) !== null && _a !== void 0 ? _a : DEFAULT_TOLERANCE;
|
|
420
|
-
const geometryHashes = new Map();
|
|
421
|
-
SceneTraversal.enumerateObjectsByType(container, Mesh, (child) => {
|
|
422
|
-
var _a;
|
|
423
|
-
if (child.children.length === 0 &&
|
|
424
|
-
(!options.filter || options.filter(child))) {
|
|
425
|
-
const materials = Array.isArray(child.material)
|
|
426
|
-
? child.material
|
|
427
|
-
: [child.material];
|
|
428
|
-
let geometryHash = geometryHashes.get(child.geometry.uuid);
|
|
429
|
-
if (!geometryHash) {
|
|
430
|
-
geometryHash = GeometryHasher.getGeometryHash(child.geometry, tolerance);
|
|
431
|
-
geometryHashes.set(child.geometry.uuid, geometryHash);
|
|
432
|
-
}
|
|
433
|
-
const materialKey = materials.map((m) => m.uuid).join(",");
|
|
434
|
-
const compositeKey = `${geometryHash}|${materialKey}`;
|
|
435
|
-
const entry = (_a = dictionary.get(compositeKey)) !== null && _a !== void 0 ? _a : {
|
|
436
|
-
meshes: [],
|
|
437
|
-
materials,
|
|
438
|
-
castShadow: false,
|
|
439
|
-
receiveShadow: false,
|
|
440
|
-
};
|
|
441
|
-
if (child.castShadow) {
|
|
442
|
-
entry.castShadow = true;
|
|
443
|
-
}
|
|
444
|
-
if (child.receiveShadow) {
|
|
445
|
-
entry.receiveShadow = true;
|
|
463
|
+
/**
|
|
464
|
+
* Finds all objects (mesh users) that use materials with names matching a regular expression pattern.
|
|
465
|
+
*
|
|
466
|
+
* Performs a depth-first search through all Mesh objects and collects the mesh objects
|
|
467
|
+
* whose materials have names that match the provided regular expression. This is useful
|
|
468
|
+
* for finding all objects that use specific material types or naming patterns.
|
|
469
|
+
*
|
|
470
|
+
* @param object - The root Object3D to start searching from
|
|
471
|
+
* @param materialName - Regular expression pattern to test against material names
|
|
472
|
+
* @returns Array of all Mesh objects that use materials with matching names
|
|
473
|
+
*/
|
|
474
|
+
static findMaterialUsers(object, materialName) {
|
|
475
|
+
let result = [];
|
|
476
|
+
if (object instanceof Mesh) {
|
|
477
|
+
let hasMatchingMaterial = false;
|
|
478
|
+
if (Array.isArray(object.material)) {
|
|
479
|
+
for (const material of object.material) {
|
|
480
|
+
if (material.name && materialName.test(material.name)) {
|
|
481
|
+
hasMatchingMaterial = true;
|
|
482
|
+
break;
|
|
483
|
+
}
|
|
446
484
|
}
|
|
447
|
-
entry.meshes.push(child);
|
|
448
|
-
dictionary.set(compositeKey, entry);
|
|
449
485
|
}
|
|
450
|
-
|
|
451
|
-
|
|
452
|
-
|
|
453
|
-
|
|
454
|
-
}
|
|
455
|
-
const { meshes, materials, castShadow, receiveShadow } = descriptor;
|
|
456
|
-
const sortedMeshes = meshes.sort((a, b) => a.name.localeCompare(b.name));
|
|
457
|
-
const defaultMesh = sortedMeshes[0];
|
|
458
|
-
const instancedMesh = new InstancedMesh(defaultMesh.geometry, materials.length === 1 ? materials[0] : materials, sortedMeshes.length);
|
|
459
|
-
instancedMesh.name = defaultMesh.name;
|
|
460
|
-
instancedMesh.castShadow = castShadow;
|
|
461
|
-
instancedMesh.receiveShadow = receiveShadow;
|
|
462
|
-
for (let i = 0; i < sortedMeshes.length; i++) {
|
|
463
|
-
const mesh = sortedMeshes[i];
|
|
464
|
-
mesh.updateWorldMatrix(true, false);
|
|
465
|
-
instancedMesh.setMatrixAt(i, mesh.matrixWorld);
|
|
466
|
-
instancedMesh.userData[mesh.uuid] = mesh.userData;
|
|
486
|
+
else {
|
|
487
|
+
if (object.material.name && materialName.test(object.material.name)) {
|
|
488
|
+
hasMatchingMaterial = true;
|
|
489
|
+
}
|
|
467
490
|
}
|
|
468
|
-
|
|
469
|
-
|
|
470
|
-
for (const mesh of sortedMeshes) {
|
|
471
|
-
(_b = mesh.parent) === null || _b === void 0 ? void 0 : _b.remove(mesh);
|
|
491
|
+
if (hasMatchingMaterial) {
|
|
492
|
+
result.push(object);
|
|
472
493
|
}
|
|
473
494
|
}
|
|
474
|
-
|
|
475
|
-
|
|
476
|
-
}
|
|
477
|
-
}
|
|
478
|
-
}
|
|
479
|
-
|
|
480
|
-
/**
|
|
481
|
-
* Clones the given 3D object and its descendants, ensuring that any `SkinnedMesh` instances are
|
|
482
|
-
* correctly associated with their bones. Bones are also cloned, and must be descendants of the
|
|
483
|
-
* object passed to this method. Other data, like geometries and materials, are reused by reference.
|
|
484
|
-
*
|
|
485
|
-
* @param {Object3D} source - The 3D object to clone.
|
|
486
|
-
* @return {Object3D} The cloned 3D object.
|
|
487
|
-
*/
|
|
488
|
-
function clone( source ) {
|
|
489
|
-
|
|
490
|
-
const sourceLookup = new Map();
|
|
491
|
-
const cloneLookup = new Map();
|
|
492
|
-
|
|
493
|
-
const clone = source.clone();
|
|
494
|
-
|
|
495
|
-
parallelTraverse( source, clone, function ( sourceNode, clonedNode ) {
|
|
496
|
-
|
|
497
|
-
sourceLookup.set( clonedNode, sourceNode );
|
|
498
|
-
cloneLookup.set( sourceNode, clonedNode );
|
|
499
|
-
|
|
500
|
-
} );
|
|
501
|
-
|
|
502
|
-
clone.traverse( function ( node ) {
|
|
503
|
-
|
|
504
|
-
if ( ! node.isSkinnedMesh ) return;
|
|
505
|
-
|
|
506
|
-
const clonedMesh = node;
|
|
507
|
-
const sourceMesh = sourceLookup.get( node );
|
|
508
|
-
const sourceBones = sourceMesh.skeleton.bones;
|
|
509
|
-
|
|
510
|
-
clonedMesh.skeleton = sourceMesh.skeleton.clone();
|
|
511
|
-
clonedMesh.bindMatrix.copy( sourceMesh.bindMatrix );
|
|
512
|
-
|
|
513
|
-
clonedMesh.skeleton.bones = sourceBones.map( function ( bone ) {
|
|
514
|
-
|
|
515
|
-
return cloneLookup.get( bone );
|
|
516
|
-
|
|
517
|
-
} );
|
|
518
|
-
|
|
519
|
-
clonedMesh.bind( clonedMesh.skeleton, clonedMesh.bindMatrix );
|
|
520
|
-
|
|
521
|
-
} );
|
|
522
|
-
|
|
523
|
-
return clone;
|
|
524
|
-
|
|
525
|
-
}
|
|
526
|
-
|
|
527
|
-
function parallelTraverse( a, b, callback ) {
|
|
528
|
-
|
|
529
|
-
callback( a, b );
|
|
530
|
-
|
|
531
|
-
for ( let i = 0; i < a.children.length; i ++ ) {
|
|
532
|
-
|
|
533
|
-
parallelTraverse( a.children[ i ], b.children[ i ], callback );
|
|
534
|
-
|
|
535
|
-
}
|
|
536
|
-
|
|
537
|
-
}
|
|
538
|
-
|
|
539
|
-
/** Post-processes a scene based on name patterns */
|
|
540
|
-
class SceneProcessor {
|
|
541
|
-
/**
|
|
542
|
-
* Process a scene to set up materials and shadows.
|
|
543
|
-
*
|
|
544
|
-
* @param object - Scene to process
|
|
545
|
-
* @param options - How to process the scene
|
|
546
|
-
* @returns Processed scene root objects
|
|
547
|
-
*/
|
|
548
|
-
static process(object, options) {
|
|
549
|
-
const container = options.cloneAsset !== false ? clone(object) : object;
|
|
550
|
-
if (options.assembleInstances !== false) {
|
|
551
|
-
InstanceAssembler.assemble(container);
|
|
495
|
+
for (const child of object.children) {
|
|
496
|
+
result = result.concat(SceneTraversal.findMaterialUsers(child, materialName));
|
|
552
497
|
}
|
|
553
|
-
|
|
554
|
-
var _a, _b, _c, _d, _e, _f, _g;
|
|
555
|
-
material.transparent =
|
|
556
|
-
(_b = (_a = options.transparentMaterialRegExp) === null || _a === void 0 ? void 0 : _a.test(material.name)) !== null && _b !== void 0 ? _b : false;
|
|
557
|
-
material.depthWrite = !((_d = (_c = options.noDepthWriteMaterialRegExp) === null || _c === void 0 ? void 0 : _c.test(material.name)) !== null && _d !== void 0 ? _d : false);
|
|
558
|
-
material.alphaTest = ((_e = options.alphaTestMaterialRegExp) === null || _e === void 0 ? void 0 : _e.test(material.name))
|
|
559
|
-
? 0.5
|
|
560
|
-
: 0;
|
|
561
|
-
material.alphaHash =
|
|
562
|
-
(_g = (_f = options.alphaHashMaterialRegExp) === null || _f === void 0 ? void 0 : _f.test(material.name)) !== null && _g !== void 0 ? _g : false;
|
|
563
|
-
material.side = FrontSide;
|
|
564
|
-
material.forceSinglePass = true;
|
|
565
|
-
material.depthTest = true;
|
|
566
|
-
});
|
|
567
|
-
SceneTraversal.enumerateObjectsByType(container, Mesh, (child) => {
|
|
568
|
-
var _a, _b, _c, _d;
|
|
569
|
-
child.castShadow = (_b = (_a = options.castShadowRegExp) === null || _a === void 0 ? void 0 : _a.test(child.name)) !== null && _b !== void 0 ? _b : false;
|
|
570
|
-
child.receiveShadow =
|
|
571
|
-
(_d = (_c = options.receiveShadowRegExp) === null || _c === void 0 ? void 0 : _c.test(child.name)) !== null && _d !== void 0 ? _d : false;
|
|
572
|
-
});
|
|
573
|
-
return container.children;
|
|
498
|
+
return result;
|
|
574
499
|
}
|
|
575
500
|
}
|
|
576
501
|
|
|
@@ -625,75 +550,118 @@ class SkinnedMeshBaker {
|
|
|
625
550
|
}
|
|
626
551
|
}
|
|
627
552
|
|
|
553
|
+
/** Number of color channels in RGBA format */
|
|
628
554
|
const RGBA_CHANNEL_COUNT = 4;
|
|
555
|
+
/** Number of color channels in RGB format */
|
|
629
556
|
const RGB_CHANNEL_COUNT = 3;
|
|
557
|
+
/** Red channel weight for luminance calculation (ITU-R BT.709) */
|
|
630
558
|
const LUMINANCE_R = 0.2126;
|
|
559
|
+
/** Green channel weight for luminance calculation (ITU-R BT.709) */
|
|
631
560
|
const LUMINANCE_G = 0.7152;
|
|
561
|
+
/** Blue channel weight for luminance calculation (ITU-R BT.709) */
|
|
632
562
|
const LUMINANCE_B = 0.0722;
|
|
633
|
-
/**
|
|
563
|
+
/**
|
|
564
|
+
* A directional light with spherical positioning controls and advanced shadow mapping.
|
|
565
|
+
*
|
|
566
|
+
* Extends Three.js DirectionalLight to provide intuitive spherical coordinate control
|
|
567
|
+
* (distance, elevation, azimuth) and automatic shadow map configuration for bounding boxes.
|
|
568
|
+
* Also supports automatic sun direction calculation from HDR environment maps.
|
|
569
|
+
*/
|
|
634
570
|
class Sun extends DirectionalLight {
|
|
635
571
|
constructor() {
|
|
636
572
|
super(...arguments);
|
|
637
|
-
/** Internal vectors to avoid garbage collection */
|
|
638
|
-
this.
|
|
639
|
-
this.
|
|
640
|
-
this.
|
|
641
|
-
this.
|
|
642
|
-
this.
|
|
643
|
-
this.
|
|
644
|
-
this.
|
|
645
|
-
this.
|
|
646
|
-
this.
|
|
647
|
-
this.
|
|
648
|
-
}
|
|
649
|
-
/**
|
|
573
|
+
/** Internal vectors to avoid garbage collection during calculations */
|
|
574
|
+
this._private_tempVector3D0 = new Vector3();
|
|
575
|
+
this._private_tempVector3D1 = new Vector3();
|
|
576
|
+
this._private_tempVector3D2 = new Vector3();
|
|
577
|
+
this._private_tempVector3D3 = new Vector3();
|
|
578
|
+
this._private_tempVector3D4 = new Vector3();
|
|
579
|
+
this._private_tempVector3D5 = new Vector3();
|
|
580
|
+
this._private_tempVector3D6 = new Vector3();
|
|
581
|
+
this._private_tempVector3D7 = new Vector3();
|
|
582
|
+
this._private_tempBox3 = new Box3();
|
|
583
|
+
this._private_tempSpherical = new Spherical();
|
|
584
|
+
}
|
|
585
|
+
/**
|
|
586
|
+
* Gets the distance from the light to its target (origin).
|
|
587
|
+
*
|
|
588
|
+
* @returns The distance in world units
|
|
589
|
+
*/
|
|
650
590
|
get distance() {
|
|
651
591
|
return this.position.length();
|
|
652
592
|
}
|
|
653
|
-
/**
|
|
593
|
+
/**
|
|
594
|
+
* Gets the elevation angle (vertical angle from the horizontal plane).
|
|
595
|
+
*
|
|
596
|
+
* @returns The elevation angle in radians (0 = horizontal, π/2 = directly above)
|
|
597
|
+
*/
|
|
654
598
|
get elevation() {
|
|
655
|
-
return this.
|
|
599
|
+
return this._private_tempSpherical.setFromVector3(this.position).phi;
|
|
656
600
|
}
|
|
657
|
-
/**
|
|
601
|
+
/**
|
|
602
|
+
* Gets the azimuth angle (horizontal rotation around the target).
|
|
603
|
+
*
|
|
604
|
+
* @returns The azimuth angle in radians (0 = positive X axis, π/2 = positive Z axis)
|
|
605
|
+
*/
|
|
658
606
|
get azimuth() {
|
|
659
|
-
return this.
|
|
607
|
+
return this._private_tempSpherical.setFromVector3(this.position).theta;
|
|
660
608
|
}
|
|
661
|
-
/**
|
|
609
|
+
/**
|
|
610
|
+
* Sets the distance while preserving current elevation and azimuth angles.
|
|
611
|
+
*
|
|
612
|
+
* @param value - The new distance in world units
|
|
613
|
+
*/
|
|
662
614
|
set distance(value) {
|
|
663
|
-
this.
|
|
664
|
-
this.position.setFromSphericalCoords(value, this.
|
|
615
|
+
this._private_tempSpherical.setFromVector3(this.position);
|
|
616
|
+
this.position.setFromSphericalCoords(value, this._private_tempSpherical.phi, this._private_tempSpherical.theta);
|
|
665
617
|
}
|
|
666
|
-
/**
|
|
618
|
+
/**
|
|
619
|
+
* Sets the elevation angle while preserving current distance and azimuth.
|
|
620
|
+
*
|
|
621
|
+
* @param value - The new elevation angle in radians (0 = horizontal, π/2 = directly above)
|
|
622
|
+
*/
|
|
667
623
|
set elevation(value) {
|
|
668
|
-
this.
|
|
669
|
-
this.position.setFromSphericalCoords(this.
|
|
624
|
+
this._private_tempSpherical.setFromVector3(this.position);
|
|
625
|
+
this.position.setFromSphericalCoords(this._private_tempSpherical.radius, value, this._private_tempSpherical.theta);
|
|
670
626
|
}
|
|
671
|
-
/**
|
|
627
|
+
/**
|
|
628
|
+
* Sets the azimuth angle while preserving current distance and elevation.
|
|
629
|
+
*
|
|
630
|
+
* @param value - The new azimuth angle in radians (0 = positive X axis, π/2 = positive Z axis)
|
|
631
|
+
*/
|
|
672
632
|
set azimuth(value) {
|
|
673
|
-
this.
|
|
674
|
-
this.position.setFromSphericalCoords(this.
|
|
633
|
+
this._private_tempSpherical.setFromVector3(this.position);
|
|
634
|
+
this.position.setFromSphericalCoords(this._private_tempSpherical.radius, this._private_tempSpherical.phi, value);
|
|
675
635
|
}
|
|
676
|
-
/**
|
|
677
|
-
|
|
636
|
+
/**
|
|
637
|
+
* Configures the shadow camera to optimally cover a bounding box.
|
|
638
|
+
*
|
|
639
|
+
* This method automatically adjusts the directional light's shadow camera frustum
|
|
640
|
+
* to perfectly encompass the provided bounding box, ensuring efficient shadow map
|
|
641
|
+
* usage and eliminating shadow clipping issues.
|
|
642
|
+
*
|
|
643
|
+
* @param box3 - The 3D bounding box to cover with shadows
|
|
644
|
+
*/
|
|
645
|
+
configureShadowsForBoundingBox(box3) {
|
|
678
646
|
const camera = this.shadow.camera;
|
|
679
647
|
this.target.updateWorldMatrix(true, false);
|
|
680
|
-
this.lookAt(this.target.getWorldPosition(this.
|
|
648
|
+
this.lookAt(this.target.getWorldPosition(this._private_tempVector3D0));
|
|
681
649
|
this.updateWorldMatrix(true, false);
|
|
682
650
|
const points = [
|
|
683
|
-
this.
|
|
684
|
-
this.
|
|
685
|
-
this.
|
|
686
|
-
this.
|
|
687
|
-
this.
|
|
688
|
-
this.
|
|
689
|
-
this.
|
|
690
|
-
this.
|
|
651
|
+
this._private_tempVector3D0.set(box3.min.x, box3.min.y, box3.min.z),
|
|
652
|
+
this._private_tempVector3D1.set(box3.min.x, box3.min.y, box3.max.z),
|
|
653
|
+
this._private_tempVector3D2.set(box3.min.x, box3.max.y, box3.min.z),
|
|
654
|
+
this._private_tempVector3D3.set(box3.min.x, box3.max.y, box3.max.z),
|
|
655
|
+
this._private_tempVector3D4.set(box3.max.x, box3.min.y, box3.min.z),
|
|
656
|
+
this._private_tempVector3D5.set(box3.max.x, box3.min.y, box3.max.z),
|
|
657
|
+
this._private_tempVector3D6.set(box3.max.x, box3.max.y, box3.min.z),
|
|
658
|
+
this._private_tempVector3D7.set(box3.max.x, box3.max.y, box3.max.z),
|
|
691
659
|
];
|
|
692
660
|
const inverseMatrix = this.matrixWorld.clone().invert();
|
|
693
661
|
for (const point of points) {
|
|
694
662
|
point.applyMatrix4(inverseMatrix);
|
|
695
663
|
}
|
|
696
|
-
const newBox3 = this.
|
|
664
|
+
const newBox3 = this._private_tempBox3.setFromPoints(points);
|
|
697
665
|
camera.left = newBox3.min.x;
|
|
698
666
|
camera.bottom = newBox3.min.y;
|
|
699
667
|
camera.near = -newBox3.max.z;
|
|
@@ -703,8 +671,17 @@ class Sun extends DirectionalLight {
|
|
|
703
671
|
camera.updateWorldMatrix(true, false);
|
|
704
672
|
camera.updateProjectionMatrix();
|
|
705
673
|
}
|
|
706
|
-
/**
|
|
707
|
-
|
|
674
|
+
/**
|
|
675
|
+
* Sets the sun's direction based on the brightest point in an HDR environment map.
|
|
676
|
+
*
|
|
677
|
+
* This method analyzes an HDR texture to find the pixel with the highest luminance
|
|
678
|
+
* value and positions the sun to shine from that direction. This is useful for
|
|
679
|
+
* creating realistic lighting that matches HDR environment maps.
|
|
680
|
+
*
|
|
681
|
+
* @param texture - The HDR texture to analyze (must have image data available)
|
|
682
|
+
* @param distance - The distance to place the sun from the origin (defaults to 1)
|
|
683
|
+
*/
|
|
684
|
+
setDirectionFromHDRTexture(texture, distance = 1) {
|
|
708
685
|
const data = texture.image.data;
|
|
709
686
|
const width = texture.image.width;
|
|
710
687
|
const height = texture.image.height;
|
|
@@ -734,5 +711,5 @@ class Sun extends DirectionalLight {
|
|
|
734
711
|
}
|
|
735
712
|
}
|
|
736
713
|
|
|
737
|
-
export {
|
|
714
|
+
export { DualFovCamera, SceneTraversal, SkinnedMeshBaker, Sun };
|
|
738
715
|
//# sourceMappingURL=index.js.map
|