three-zoo 0.4.2 → 0.4.4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.js CHANGED
@@ -1,31 +1,24 @@
1
- import { PerspectiveCamera, MathUtils, Box3, Vector3, Mesh, InstancedMesh, FrontSide, BufferAttribute, AnimationMixer, DirectionalLight, Spherical, RGBAFormat } from 'three';
1
+ import { PerspectiveCamera, MathUtils, Vector3, Box3, Mesh, InstancedMesh, FrontSide, BufferAttribute, AnimationMixer, DirectionalLight, Spherical, RGBAFormat } from 'three';
2
2
 
3
- /**
4
- * Default camera settings
5
- */
6
3
  const DEFAULT_HORIZONTAL_FOV = 90;
7
4
  const DEFAULT_VERTICAL_FOV = 90;
8
5
  const DEFAULT_ASPECT = 1;
9
6
  const DEFAULT_NEAR = 1;
10
7
  const DEFAULT_FAR = 1000;
8
+ const MIN_FOV = 1;
9
+ const MAX_FOV = 179;
11
10
  /**
12
- * BiFovCamera - A specialized PerspectiveCamera that supports independent horizontal and vertical FOV settings
13
- *
14
- * This camera extends Three.js PerspectiveCamera to provide better control over the field of view,
15
- * allowing separate horizontal and vertical FOV values. The camera automatically adjusts its projection
16
- * matrix based on the aspect ratio to maintain proper perspective.
17
- *
18
- * @extends PerspectiveCamera
11
+ * A camera that supports independent horizontal and vertical FOV settings.
12
+ * Extends Three.js PerspectiveCamera to allow separate control over horizontal
13
+ * and vertical fields of view.
19
14
  */
20
15
  class BiFovCamera extends PerspectiveCamera {
21
16
  /**
22
- * Creates a new BiFovCamera instance
23
- *
24
- * @param horizontalFov - Horizontal field of view in degrees (default: 90)
25
- * @param verticalFov - Vertical field of view in degrees (default: 90)
26
- * @param aspect - Aspect ratio (width/height) of the viewport (default: 1)
27
- * @param near - Near clipping plane distance (default: 1)
28
- * @param far - Far clipping plane distance (default: 1000)
17
+ * @param horizontalFov - Horizontal FOV in degrees (90° default)
18
+ * @param verticalFov - Vertical FOV in degrees (90° default)
19
+ * @param aspect - Width/height ratio (1 default)
20
+ * @param near - Near clipping plane (1 default)
21
+ * @param far - Far clipping plane (1000 default)
29
22
  */
30
23
  constructor(horizontalFov = DEFAULT_HORIZONTAL_FOV, verticalFov = DEFAULT_VERTICAL_FOV, aspect = DEFAULT_ASPECT, near = DEFAULT_NEAR, far = DEFAULT_FAR) {
31
24
  super(verticalFov, aspect, near, far);
@@ -33,47 +26,37 @@ class BiFovCamera extends PerspectiveCamera {
33
26
  this.verticalFovInternal = verticalFov;
34
27
  this.updateProjectionMatrix();
35
28
  }
36
- /**
37
- * Gets the horizontal field of view in degrees
38
- */
29
+ /** Current horizontal FOV in degrees */
39
30
  get horizontalFov() {
40
31
  return this.horizontalFovInternal;
41
32
  }
42
- /**
43
- * Gets the vertical field of view in degrees
44
- */
33
+ /** Current vertical FOV in degrees */
45
34
  get verticalFov() {
46
35
  return this.verticalFovInternal;
47
36
  }
48
- /**
49
- * Sets the horizontal field of view in degrees
50
- * @param value - The new horizontal FOV value
51
- */
37
+ /** Set horizontal FOV in degrees (clamped between 1° and 179°) */
52
38
  set horizontalFov(value) {
53
- this.horizontalFovInternal = MathUtils.clamp(value, 1, 179);
39
+ this.horizontalFovInternal = MathUtils.clamp(value, MIN_FOV, MAX_FOV);
54
40
  this.updateProjectionMatrix();
55
41
  }
56
- /**
57
- * Sets the vertical field of view in degrees
58
- * @param value - The new vertical FOV value
59
- */
42
+ /** Set vertical FOV in degrees (clamped between 1° and 179°) */
60
43
  set verticalFov(value) {
61
- this.verticalFovInternal = MathUtils.clamp(value, 1, 179);
44
+ this.verticalFovInternal = MathUtils.clamp(value, MIN_FOV, MAX_FOV);
62
45
  this.updateProjectionMatrix();
63
46
  }
64
47
  /**
65
- * Updates both horizontal and vertical FOV simultaneously
66
- * @param horizontal - New horizontal FOV in degrees
67
- * @param vertical - New vertical FOV in degrees
48
+ * Update both horizontal and vertical FOV
49
+ * @param horizontal - Horizontal FOV in degrees
50
+ * @param vertical - Vertical FOV in degrees
68
51
  */
69
52
  setFov(horizontal, vertical) {
70
- this.horizontalFovInternal = MathUtils.clamp(horizontal, 1, 179);
71
- this.verticalFovInternal = MathUtils.clamp(vertical, 1, 179);
53
+ this.horizontalFovInternal = MathUtils.clamp(horizontal, MIN_FOV, MAX_FOV);
54
+ this.verticalFovInternal = MathUtils.clamp(vertical, MIN_FOV, MAX_FOV);
72
55
  this.updateProjectionMatrix();
73
56
  }
74
57
  /**
75
- * Copies FOV settings from another BiFovCamera
76
- * @param source - The camera to copy settings from
58
+ * Copy FOV settings from another BiFovCamera
59
+ * @param source - Camera to copy from
77
60
  */
78
61
  copyFovSettings(source) {
79
62
  this.horizontalFovInternal = source.horizontalFov;
@@ -81,12 +64,12 @@ class BiFovCamera extends PerspectiveCamera {
81
64
  this.updateProjectionMatrix();
82
65
  }
83
66
  /**
84
- * Updates the projection matrix based on current FOV settings and aspect ratio
85
- * For aspect ratios >= 1 (landscape), horizontal FOV is preserved
86
- * For aspect ratios < 1 (portrait), vertical FOV is preserved
67
+ * Updates the projection matrix based on FOV settings and aspect ratio.
68
+ * In landscape: preserves horizontal FOV
69
+ * In portrait: preserves vertical FOV
87
70
  */
88
71
  updateProjectionMatrix() {
89
- if (this.aspect >= 1) {
72
+ if (this.aspect > 1) {
90
73
  // Landscape orientation: preserve horizontal FOV
91
74
  const radians = MathUtils.degToRad(this.horizontalFovInternal);
92
75
  this.fov = MathUtils.radToDeg(Math.atan(Math.tan(radians / 2) / this.aspect) * 2);
@@ -97,9 +80,7 @@ class BiFovCamera extends PerspectiveCamera {
97
80
  }
98
81
  super.updateProjectionMatrix();
99
82
  }
100
- /**
101
- * Returns the actual horizontal FOV after aspect ratio adjustments
102
- */
83
+ /** Get actual horizontal FOV after aspect ratio adjustments */
103
84
  getEffectiveHorizontalFov() {
104
85
  if (this.aspect >= 1) {
105
86
  return this.horizontalFovInternal;
@@ -107,9 +88,7 @@ class BiFovCamera extends PerspectiveCamera {
107
88
  const verticalRadians = MathUtils.degToRad(this.verticalFovInternal);
108
89
  return MathUtils.radToDeg(Math.atan(Math.tan(verticalRadians / 2) * this.aspect) * 2);
109
90
  }
110
- /**
111
- * Returns the actual vertical FOV after aspect ratio adjustments
112
- */
91
+ /** Get actual vertical FOV after aspect ratio adjustments */
113
92
  getEffectiveVerticalFov() {
114
93
  if (this.aspect < 1) {
115
94
  return this.verticalFovInternal;
@@ -117,9 +96,84 @@ class BiFovCamera extends PerspectiveCamera {
117
96
  const horizontalRadians = MathUtils.degToRad(this.horizontalFovInternal);
118
97
  return MathUtils.radToDeg(Math.atan(Math.tan(horizontalRadians / 2) / this.aspect) * 2);
119
98
  }
120
- /**
121
- * Creates a clone of this camera with the same properties
122
- */
99
+ fitPointsVerticalFov(vertices) {
100
+ const up = new Vector3(0, 1, 0).applyQuaternion(this.quaternion);
101
+ let maxVerticalAngle = 0;
102
+ for (const vertex of vertices) {
103
+ const vertexToCam = this.position.clone().sub(vertex);
104
+ const vertexDirection = vertexToCam.normalize();
105
+ const verticalAngle = Math.asin(Math.abs(vertexDirection.dot(up))) *
106
+ Math.sign(vertexDirection.dot(up));
107
+ if (Math.abs(verticalAngle) > maxVerticalAngle) {
108
+ maxVerticalAngle = Math.abs(verticalAngle);
109
+ }
110
+ }
111
+ const requiredFov = MathUtils.radToDeg(2 * maxVerticalAngle);
112
+ this.verticalFovInternal = MathUtils.clamp(requiredFov, MIN_FOV, MAX_FOV);
113
+ this.updateProjectionMatrix();
114
+ }
115
+ fitBoxVerticalFov(box) {
116
+ this.fitPointsVerticalFov([
117
+ new Vector3(box.min.x, box.min.y, box.min.z),
118
+ new Vector3(box.min.x, box.min.y, box.max.z),
119
+ new Vector3(box.min.x, box.max.y, box.min.z),
120
+ new Vector3(box.min.x, box.max.y, box.max.z),
121
+ new Vector3(box.max.x, box.min.y, box.min.z),
122
+ new Vector3(box.max.x, box.min.y, box.max.z),
123
+ new Vector3(box.max.x, box.max.y, box.min.z),
124
+ new Vector3(box.max.x, box.max.y, box.max.z),
125
+ ]);
126
+ }
127
+ fitSkinnedMeshVerticalFov(skinnedMesh) {
128
+ skinnedMesh.updateWorldMatrix(true, true);
129
+ skinnedMesh.skeleton.update();
130
+ const bakedGeometry = skinnedMesh.geometry;
131
+ const position = bakedGeometry.attributes["position"];
132
+ const target = new Vector3();
133
+ const points = [];
134
+ for (let i = 0; i < position.count; i++) {
135
+ target.fromBufferAttribute(position, i);
136
+ skinnedMesh.applyBoneTransform(i, target);
137
+ points.push(target.clone());
138
+ }
139
+ this.fitPointsVerticalFov(points);
140
+ }
141
+ lookAtCenterOfMass(skinnedMesh) {
142
+ skinnedMesh.updateWorldMatrix(true, true);
143
+ skinnedMesh.skeleton.update();
144
+ const bakedGeometry = skinnedMesh.geometry;
145
+ const position = bakedGeometry.attributes.position;
146
+ const target = new Vector3();
147
+ const points = [];
148
+ for (let i = 0; i < position.count; i++) {
149
+ target.fromBufferAttribute(position, i);
150
+ skinnedMesh.applyBoneTransform(i, target);
151
+ points.push(target.clone());
152
+ }
153
+ const findMainCluster = (points, iterations = 3) => {
154
+ if (points.length === 0) {
155
+ return new Vector3();
156
+ }
157
+ let center = points[Math.floor(points.length / 2)].clone();
158
+ for (let i = 0; i < iterations; i++) {
159
+ let total = new Vector3();
160
+ let count = 0;
161
+ for (const point of points) {
162
+ if (point.distanceTo(center) < point.distanceTo(total) ||
163
+ count === 0) {
164
+ total.add(point);
165
+ count++;
166
+ }
167
+ }
168
+ if (count > 0) {
169
+ center = total.divideScalar(count);
170
+ }
171
+ }
172
+ return center;
173
+ };
174
+ const centerOfMass = findMainCluster(points);
175
+ this.lookAt(centerOfMass);
176
+ }
123
177
  clone() {
124
178
  const camera = new BiFovCamera(this.horizontalFovInternal, this.verticalFovInternal, this.aspect, this.near, this.far);
125
179
  camera.copy(this, true);
@@ -127,44 +181,54 @@ class BiFovCamera extends PerspectiveCamera {
127
181
  }
128
182
  }
129
183
 
184
+ /**
185
+ * Box3 with additional convenience methods for width, height, depth, etc.
186
+ */
130
187
  class Bounds extends Box3 {
131
- constructor() {
132
- super(...arguments);
188
+ constructor(object) {
189
+ super();
190
+ /** Temporary vector for calculations */
133
191
  this.tempVector3A = new Vector3();
192
+ if (object) {
193
+ this.setFromObject(object);
194
+ }
134
195
  }
135
- /**
136
- * Gets the width (x-axis length) of the bounding box
137
- */
196
+ /** Width (x-axis length) */
138
197
  get width() {
139
198
  return this.max.x - this.min.x;
140
199
  }
141
- /**
142
- * Gets the height (y-axis length) of the bounding box
143
- */
200
+ /** Height (y-axis length) */
144
201
  get height() {
145
202
  return this.max.y - this.min.y;
146
203
  }
147
- /**
148
- * Gets the depth (z-axis length) of the bounding box
149
- */
204
+ /** Depth (z-axis length) */
150
205
  get depth() {
151
206
  return this.max.z - this.min.z;
152
207
  }
153
- /**
154
- * Gets the length of the box's diagonal
155
- */
208
+ /** Length of the box's diagonal */
156
209
  get diagonal() {
157
210
  return this.tempVector3A.subVectors(this.max, this.min).length();
158
211
  }
159
- /**
160
- * Gets the volume of the bounding box
161
- */
212
+ setFromSkinnedMesh(skinnedMesh) {
213
+ skinnedMesh.updateWorldMatrix(true, true);
214
+ skinnedMesh.skeleton.update();
215
+ const geometry = skinnedMesh.geometry;
216
+ const position = geometry.attributes["position"];
217
+ const target = new Vector3();
218
+ const points = [];
219
+ for (let i = 0; i < position.count; i++) {
220
+ target.fromBufferAttribute(position, i);
221
+ skinnedMesh.applyBoneTransform(i, target);
222
+ points.push(target.clone());
223
+ }
224
+ this.setFromPoints(points);
225
+ return this;
226
+ }
227
+ /** Volume (width * height * depth) */
162
228
  getVolume() {
163
229
  return this.width * this.height * this.depth;
164
230
  }
165
- /**
166
- * Gets the surface area of the bounding box
167
- */
231
+ /** Surface area (sum of all six faces) */
168
232
  getSurfaceArea() {
169
233
  const w = this.width;
170
234
  const h = this.height;
@@ -173,73 +237,53 @@ class Bounds extends Box3 {
173
237
  }
174
238
  }
175
239
 
240
+ const POSITION_COMPONENT_COUNT = 3;
241
+ const NORMAL_COMPONENT_COUNT = 3;
176
242
  /**
177
- * Utility class for comparing and hashing BufferGeometry instances with tolerance support.
243
+ * Internal utility to identify identical geometries.
244
+ * @internal
178
245
  */
179
246
  class GeometryHasher {
180
247
  /**
181
- * Generates a consistent hash for a BufferGeometry based on its contents and tolerance.
248
+ * Creates a hash for a geometry based on its vertex data.
249
+ * Vertices that differ by less than tolerance are considered the same.
182
250
  *
183
- * @param geometry - The geometry to hash
184
- * @param tolerance - Precision level for number comparison (values within tolerance are considered equal)
185
- * @returns A string hash that will be identical for geometrically equivalent geometries
251
+ * @param geometry - Geometry to hash
252
+ * @param tolerance - How close vertices need to be to count as identical
253
+ * @returns Hash string that's the same for matching geometries
254
+ * @internal
186
255
  */
187
- static getGeometryHash(geometry, tolerance = 1e-6) {
188
- const hashParts = [];
189
- // Process attributes
190
- const attributes = geometry.attributes;
191
- const attributeNames = Object.keys(attributes).sort(); // Sort for consistent order
192
- for (const name of attributeNames) {
193
- const attribute = attributes[name];
194
- hashParts.push(`${name}:${attribute.itemSize}:${this.getAttributeHash(attribute, tolerance)}`);
256
+ static getGeometryHash(geometry, tolerance) {
257
+ const position = geometry.attributes["position"];
258
+ const positionArray = position.array;
259
+ const positionHashParts = [];
260
+ // Sample vertex positions with tolerance
261
+ for (let i = 0; i < positionArray.length; i += POSITION_COMPONENT_COUNT) {
262
+ const x = Math.round(positionArray[i] / tolerance);
263
+ const y = Math.round(positionArray[i + 1] / tolerance);
264
+ const z = Math.round(positionArray[i + 2] / tolerance);
265
+ positionHashParts.push(`${x},${y},${z}`);
195
266
  }
196
- // Process index if present
197
- if (geometry.index) {
198
- hashParts.push(`index:${this.getAttributeHash(geometry.index, tolerance)}`);
267
+ // Hash normal data if available
268
+ const normal = geometry.attributes["normal"];
269
+ const normalHashParts = [];
270
+ const normalArray = normal.array;
271
+ for (let i = 0; i < normalArray.length; i += NORMAL_COMPONENT_COUNT) {
272
+ const x = Math.round(normalArray[i] / tolerance);
273
+ const y = Math.round(normalArray[i + 1] / tolerance);
274
+ const z = Math.round(normalArray[i + 2] / tolerance);
275
+ normalHashParts.push(`${x},${y},${z}`);
199
276
  }
200
- return hashParts.join("|");
201
- }
202
- /**
203
- * Compares two BufferGeometry instances for approximate equality.
204
- * Early exit if UUIDs match (same object or cloned geometry).
205
- */
206
- static compare(firstGeometry, secondGeometry, tolerance = 1e-6) {
207
- if (firstGeometry.uuid === secondGeometry.uuid) {
208
- return true;
209
- }
210
- // Use hash comparison for consistent results
211
- return (this.getGeometryHash(firstGeometry, tolerance) ===
212
- this.getGeometryHash(secondGeometry, tolerance));
213
- }
214
- /**
215
- * Generates a hash for a buffer attribute with tolerance.
216
- */
217
- static getAttributeHash(attribute, tolerance) {
218
- const array = attribute.array;
219
- const itemSize = "itemSize" in attribute ? attribute.itemSize : 1;
220
- const hashParts = [];
221
- // Group values by their "tolerance buckets"
222
- for (let i = 0; i < array.length; i += itemSize) {
223
- const itemValues = [];
224
- for (let j = 0; j < itemSize; j++) {
225
- const val = array[i + j];
226
- // Round to nearest tolerance multiple to group similar values
227
- itemValues.push(Math.round(val / tolerance) * tolerance);
228
- }
229
- hashParts.push(itemValues.join(","));
230
- }
231
- return hashParts.join(";");
232
- }
233
- /**
234
- * Compares two buffer attributes with tolerance.
235
- */
236
- static compareBufferAttributes(firstAttribute, secondAttribute, tolerance) {
237
- return (this.getAttributeHash(firstAttribute, tolerance) ===
238
- this.getAttributeHash(secondAttribute, tolerance));
277
+ // Combine position and normal hashes
278
+ const positionHash = positionHashParts.join("|");
279
+ const normalHash = normalHashParts.join("|");
280
+ return `${positionHash}#${normalHash}`;
239
281
  }
240
282
  }
241
283
 
284
+ /** Find and modify objects in a Three.js scene */
242
285
  class SceneTraversal {
286
+ /** Find first object with exact name match */
243
287
  static getObjectByName(object, name) {
244
288
  if (object.name === name) {
245
289
  return object;
@@ -252,6 +296,7 @@ class SceneTraversal {
252
296
  }
253
297
  return null;
254
298
  }
299
+ /** Find first material with exact name match */
255
300
  static getMaterialByName(object, name) {
256
301
  if (object instanceof Mesh) {
257
302
  if (Array.isArray(object.material)) {
@@ -273,6 +318,7 @@ class SceneTraversal {
273
318
  }
274
319
  return null;
275
320
  }
321
+ /** Process all objects of a specific type */
276
322
  static enumerateObjectsByType(object, type, callback) {
277
323
  if (object instanceof type) {
278
324
  callback(object);
@@ -281,6 +327,7 @@ class SceneTraversal {
281
327
  SceneTraversal.enumerateObjectsByType(child, type, callback);
282
328
  }
283
329
  }
330
+ /** Process all materials in meshes */
284
331
  static enumerateMaterials(object, callback) {
285
332
  if (object instanceof Mesh) {
286
333
  if (Array.isArray(object.material)) {
@@ -296,16 +343,25 @@ class SceneTraversal {
296
343
  SceneTraversal.enumerateMaterials(child, callback);
297
344
  }
298
345
  }
299
- static filterObjects(object, name) {
346
+ /** Find all objects whose names match a pattern */
347
+ static filterObjects(object, filter) {
300
348
  let result = [];
301
- if (object.name && name.test(object.name)) {
302
- result.push(object);
349
+ if (typeof filter === "function") {
350
+ if (filter(object)) {
351
+ result.push(object);
352
+ }
353
+ }
354
+ else {
355
+ if (object.name && filter.test(object.name)) {
356
+ result.push(object);
357
+ }
303
358
  }
304
359
  for (const child of object.children) {
305
- result = result.concat(SceneTraversal.filterObjects(child, name));
360
+ result = result.concat(SceneTraversal.filterObjects(child, filter));
306
361
  }
307
362
  return result;
308
363
  }
364
+ /** Find all materials whose names match a pattern */
309
365
  static filterMaterials(object, name) {
310
366
  let result = [];
311
367
  if (object instanceof Mesh) {
@@ -327,41 +383,58 @@ class SceneTraversal {
327
383
  }
328
384
  return result;
329
385
  }
330
- static setShadowRecursive(object, castShadow = true, receiveShadow = true) {
386
+ /** Set shadow properties on meshes */
387
+ static setShadowRecursive(object, castShadow = true, receiveShadow = true, filter) {
331
388
  if (object instanceof Mesh || "isMesh" in object) {
332
389
  object.castShadow = castShadow;
333
390
  object.receiveShadow = receiveShadow;
334
391
  }
335
392
  for (const child of object.children) {
336
- SceneTraversal.setShadowRecursive(child, castShadow, receiveShadow);
393
+ SceneTraversal.setShadowRecursive(child, castShadow, receiveShadow, filter);
337
394
  }
338
395
  }
339
396
  }
340
397
 
398
+ const MIN_INSTANCE_COUNT = 2;
399
+ const DEFAULT_TOLERANCE = 1e-6;
400
+ /**
401
+ * Combines identical meshes into instanced versions for better performance.
402
+ * Meshes are considered identical if they share the same geometry and materials.
403
+ */
341
404
  class InstanceAssembler {
342
- static assemble(options) {
405
+ /**
406
+ * Find meshes that can be instanced and combine them.
407
+ * Only processes meshes that:
408
+ * - Have no children
409
+ * - Pass the filter function (if any)
410
+ * - Share geometry with at least one other mesh
411
+ *
412
+ * @param container - Object containing meshes to process
413
+ * @param options - Optional settings
414
+ */
415
+ static assemble(container, options = {}) {
343
416
  var _a, _b;
344
417
  const dictionary = new Map();
345
- const instancedMeshes = [];
346
- const tolerance = (_a = options.geometryTolerance) !== null && _a !== void 0 ? _a : 1e-6;
347
- const geometryHashCache = new Map();
348
- SceneTraversal.enumerateObjectsByType(options.container, Mesh, (child) => {
418
+ const instances = [];
419
+ const tolerance = (_a = options.geometryTolerance) !== null && _a !== void 0 ? _a : DEFAULT_TOLERANCE;
420
+ const geometryHashes = new Map();
421
+ SceneTraversal.enumerateObjectsByType(container, Mesh, (child) => {
349
422
  var _a;
350
423
  if (child.children.length === 0 &&
351
424
  (!options.filter || options.filter(child))) {
352
425
  const materials = Array.isArray(child.material)
353
426
  ? child.material
354
427
  : [child.material];
355
- let geometryHash = geometryHashCache.get(child.geometry.uuid);
428
+ let geometryHash = geometryHashes.get(child.geometry.uuid);
356
429
  if (!geometryHash) {
357
430
  geometryHash = GeometryHasher.getGeometryHash(child.geometry, tolerance);
358
- geometryHashCache.set(child.geometry.uuid, geometryHash);
431
+ geometryHashes.set(child.geometry.uuid, geometryHash);
359
432
  }
360
433
  const materialKey = materials.map((m) => m.uuid).join(",");
361
434
  const compositeKey = `${geometryHash}|${materialKey}`;
362
435
  const entry = (_a = dictionary.get(compositeKey)) !== null && _a !== void 0 ? _a : {
363
436
  meshes: [],
364
- materials: materials,
437
+ materials,
365
438
  castShadow: false,
366
439
  receiveShadow: false,
367
440
  };
@@ -376,7 +449,7 @@ class InstanceAssembler {
376
449
  }
377
450
  });
378
451
  for (const descriptor of dictionary.values()) {
379
- if (descriptor.meshes.length < 2) {
452
+ if (descriptor.meshes.length < MIN_INSTANCE_COUNT) {
380
453
  continue;
381
454
  }
382
455
  const { meshes, materials, castShadow, receiveShadow } = descriptor;
@@ -393,63 +466,74 @@ class InstanceAssembler {
393
466
  instancedMesh.userData[mesh.uuid] = mesh.userData;
394
467
  }
395
468
  instancedMesh.instanceMatrix.needsUpdate = true;
396
- instancedMeshes.push(instancedMesh);
469
+ instances.push(instancedMesh);
397
470
  for (const mesh of sortedMeshes) {
398
471
  (_b = mesh.parent) === null || _b === void 0 ? void 0 : _b.remove(mesh);
399
472
  }
400
473
  }
401
- if (instancedMeshes.length > 0) {
402
- options.container.add(...instancedMeshes);
474
+ if (instances.length > 0) {
475
+ container.add(...instances);
403
476
  }
404
477
  }
405
478
  }
406
479
 
480
+ /** Post-processes a scene based on name patterns */
407
481
  class SceneProcessor {
408
- static process(options) {
409
- const container = options.asset.clone();
410
- InstanceAssembler.assemble({ container: container });
482
+ /**
483
+ * Process a scene to set up materials and shadows.
484
+ *
485
+ * @param asset - Scene to process
486
+ * @param options - How to process the scene
487
+ * @returns Processed scene root objects
488
+ */
489
+ static process(asset, options) {
490
+ const container = options.cloneAsset !== false ? asset.clone() : asset;
491
+ if (options.assembleInstances !== false) {
492
+ InstanceAssembler.assemble(container);
493
+ }
411
494
  SceneTraversal.enumerateMaterials(container, (material) => {
412
- material.transparent = SceneProcessor.matchesAny(material.name, options.transparentMaterialNames);
413
- material.depthWrite = !SceneProcessor.matchesAny(material.name, options.noDepthWriteMaterialNames);
495
+ material.transparent = SceneProcessor.matchesAny(material.name, options.transparentMaterialExpressions);
496
+ material.depthWrite = !SceneProcessor.matchesAny(material.name, options.noDepthWriteMaterialExpressions);
414
497
  material.side = FrontSide;
415
498
  material.forceSinglePass = true;
416
499
  material.depthTest = true;
417
500
  });
418
501
  SceneTraversal.enumerateObjectsByType(container, Mesh, (child) => {
419
- child.castShadow = SceneProcessor.matchesAny(child.name, options.castShadowMeshNames);
420
- child.receiveShadow = SceneProcessor.matchesAny(child.name, options.receiveShadowMeshNames);
502
+ child.castShadow = SceneProcessor.matchesAny(child.name, options.castShadowExpressions);
503
+ child.receiveShadow = SceneProcessor.matchesAny(child.name, options.receiveShadwoExpressions);
421
504
  });
422
505
  return container.children;
423
506
  }
424
- static matchesAny(value, patterns = []) {
425
- return patterns.some((p) => typeof p === "string" ? value === p : p.test(value));
507
+ /** Does the string match any of the patterns? */
508
+ static matchesAny(value, expressions = []) {
509
+ return expressions.some((p) => p.test(value));
426
510
  }
427
511
  }
428
512
 
429
- /**
430
- * Utilities for baking poses and animations from SkinnedMesh into a regular static Mesh.
431
- */
513
+ /** Number of components per vertex */
514
+ const COMPONENT_COUNT = 3;
515
+ /** Convert skinned meshes to regular static meshes */
432
516
  class SkinnedMeshBaker {
433
517
  /**
434
- * Bakes the current pose of a SkinnedMesh into a regular geometry.
435
- * Transforms all vertices according to the current skeleton state.
518
+ * Convert a skinned mesh to a regular mesh in its current pose.
519
+ * The resulting mesh will have no bones but look identical.
436
520
  *
437
- * @param skinnedMesh - SkinnedMesh from which to bake the geometry
438
- * @returns A new Mesh with positions corresponding to the current bone positions
521
+ * @param skinnedMesh - Mesh to convert
522
+ * @returns Static mesh with baked vertex positions
439
523
  */
440
524
  static bakePose(skinnedMesh) {
441
525
  const bakedGeometry = skinnedMesh.geometry.clone();
442
526
  const position = bakedGeometry.attributes["position"];
443
- const newPositions = new Float32Array(position.count * 3);
527
+ const newPositions = new Float32Array(position.count * COMPONENT_COUNT);
444
528
  const target = new Vector3();
445
529
  for (let i = 0; i < position.count; i++) {
446
530
  target.fromBufferAttribute(position, i);
447
531
  skinnedMesh.applyBoneTransform(i, target);
448
- newPositions[i * 3 + 0] = target.x;
449
- newPositions[i * 3 + 1] = target.y;
450
- newPositions[i * 3 + 2] = target.z;
532
+ newPositions[i * COMPONENT_COUNT + 0] = target.x;
533
+ newPositions[i * COMPONENT_COUNT + 1] = target.y;
534
+ newPositions[i * COMPONENT_COUNT + 2] = target.z;
451
535
  }
452
- bakedGeometry.setAttribute("position", new BufferAttribute(newPositions, 3));
536
+ bakedGeometry.setAttribute("position", new BufferAttribute(newPositions, COMPONENT_COUNT));
453
537
  bakedGeometry.computeVertexNormals();
454
538
  bakedGeometry.deleteAttribute("skinIndex");
455
539
  bakedGeometry.deleteAttribute("skinWeight");
@@ -458,13 +542,13 @@ class SkinnedMeshBaker {
458
542
  return mesh;
459
543
  }
460
544
  /**
461
- * Bakes a SkinnedMesh in a specific pose derived from an AnimationClip at the given timestamp.
545
+ * Bake a single frame from an animation into a static mesh.
462
546
  *
463
- * @param armature - The parent object (typically an armature from GLTF) containing the bones
464
- * @param skinnedMesh - The SkinnedMesh to be baked
465
- * @param timeOffset - The animation time in seconds to set
466
- * @param clip - The animation clip
467
- * @returns A new Mesh with geometry matching the specified animation frame
547
+ * @param armature - Root object with bones (usually from GLTF)
548
+ * @param skinnedMesh - Mesh to convert
549
+ * @param timeOffset - Time in seconds within the animation
550
+ * @param clip - Animation to get the pose from
551
+ * @returns Static mesh with baked vertex positions
468
552
  */
469
553
  static bakeAnimationFrame(armature, skinnedMesh, timeOffset, clip) {
470
554
  const mixer = new AnimationMixer(armature);
@@ -477,22 +561,16 @@ class SkinnedMeshBaker {
477
561
  }
478
562
  }
479
563
 
480
- /**
481
- * Sun extends Three.js DirectionalLight to provide a specialized light source that simulates
482
- * sunlight with advanced positioning and shadow controls.
483
- *
484
- * Features:
485
- * - Spherical coordinate control (distance, elevation, azimuth)
486
- * - Automatic shadow map configuration based on bounding boxes
487
- * - HDR environment map-based positioning
488
- * - Efficient temporary vector management for calculations
489
- *
490
- * @extends DirectionalLight
491
- */
564
+ const RGBA_CHANNEL_COUNT = 4;
565
+ const RGB_CHANNEL_COUNT = 3;
566
+ const LUMINANCE_R = 0.2126;
567
+ const LUMINANCE_G = 0.7152;
568
+ const LUMINANCE_B = 0.0722;
569
+ /** A directional light with spherical positioning controls */
492
570
  class Sun extends DirectionalLight {
493
571
  constructor() {
494
572
  super(...arguments);
495
- // Temporary vectors for calculations to avoid garbage collection
573
+ /** Internal vectors to avoid garbage collection */
496
574
  this.tempVector3D0 = new Vector3();
497
575
  this.tempVector3D1 = new Vector3();
498
576
  this.tempVector3D2 = new Vector3();
@@ -504,57 +582,34 @@ class Sun extends DirectionalLight {
504
582
  this.tempBox3 = new Box3();
505
583
  this.tempSpherical = new Spherical();
506
584
  }
507
- /**
508
- * Gets the distance of the sun from its target (radius in spherical coordinates)
509
- * @returns The distance in world units
510
- */
585
+ /** Distance from the light to its target */
511
586
  get distance() {
512
587
  return this.position.length();
513
588
  }
514
- /**
515
- * Gets the elevation angle of the sun (phi in spherical coordinates)
516
- * @returns The elevation in radians
517
- */
589
+ /** Vertical angle from the ground in radians */
518
590
  get elevation() {
519
591
  return this.tempSpherical.setFromVector3(this.position).phi;
520
592
  }
521
- /**
522
- * Gets the azimuth angle of the sun (theta in spherical coordinates)
523
- * @returns The azimuth in radians
524
- */
593
+ /** Horizontal angle around the target in radians */
525
594
  get azimuth() {
526
595
  return this.tempSpherical.setFromVector3(this.position).theta;
527
596
  }
528
- /**
529
- * Sets the distance of the sun from its target while maintaining current angles
530
- * @param value - The new distance in world units
531
- */
597
+ /** Set distance while keeping current angles */
532
598
  set distance(value) {
533
599
  this.tempSpherical.setFromVector3(this.position);
534
600
  this.position.setFromSphericalCoords(value, this.tempSpherical.phi, this.tempSpherical.theta);
535
601
  }
536
- /**
537
- * Sets the elevation angle of the sun while maintaining current distance and azimuth
538
- * @param value - The new elevation in radians
539
- */
602
+ /** Set elevation while keeping current distance and azimuth */
540
603
  set elevation(value) {
541
604
  this.tempSpherical.setFromVector3(this.position);
542
605
  this.position.setFromSphericalCoords(this.tempSpherical.radius, value, this.tempSpherical.theta);
543
606
  }
544
- /**
545
- * Sets the azimuth angle of the sun while maintaining current distance and elevation
546
- * @param value - The new azimuth in radians
547
- */
607
+ /** Set azimuth while keeping current distance and elevation */
548
608
  set azimuth(value) {
549
609
  this.tempSpherical.setFromVector3(this.position);
550
610
  this.position.setFromSphericalCoords(this.tempSpherical.radius, this.tempSpherical.phi, value);
551
611
  }
552
- /**
553
- * Configures the shadow camera's frustum to encompass the given bounding box
554
- * This ensures that shadows are cast correctly for objects within the box
555
- *
556
- * @param box3 - The bounding box to configure shadows for
557
- */
612
+ /** Configure shadows to cover all corners of a bounding box */
558
613
  setShadowMapFromBox3(box3) {
559
614
  const camera = this.shadow.camera;
560
615
  this.target.updateWorldMatrix(true, false);
@@ -584,33 +639,26 @@ class Sun extends DirectionalLight {
584
639
  camera.updateWorldMatrix(true, false);
585
640
  camera.updateProjectionMatrix();
586
641
  }
587
- /**
588
- * Sets the sun's direction based on the brightest point in an HDR texture
589
- * This is useful for matching the sun's position to an environment map
590
- *
591
- * @param texture - The HDR texture to analyze (must be loaded and have valid image data)
592
- * @param distance - Optional distance to position the sun from its target (default: 1)
593
- */
642
+ /** Set light direction based on brightest point in an HDR texture */
594
643
  setDirectionFromHDR(texture, distance = 1) {
595
644
  const data = texture.image.data;
596
645
  const width = texture.image.width;
597
646
  const height = texture.image.height;
598
647
  let maxLuminance = 0;
599
648
  let maxIndex = 0;
600
- // Find the brightest pixel in the HDR texture
601
- const step = texture.format === RGBAFormat ? 4 : 3;
649
+ // Find brightest pixel
650
+ const step = texture.format === RGBAFormat ? RGBA_CHANNEL_COUNT : RGB_CHANNEL_COUNT;
602
651
  for (let i = 0; i < data.length; i += step) {
603
652
  const r = data[i];
604
653
  const g = data[i + 1];
605
654
  const b = data[i + 2];
606
- // Calculate luminance using the Rec. 709 coefficients
607
- const luminance = 0.2126 * r + 0.7152 * g + 0.0722 * b;
655
+ const luminance = LUMINANCE_R * r + LUMINANCE_G * g + LUMINANCE_B * b;
608
656
  if (luminance > maxLuminance) {
609
657
  maxLuminance = luminance;
610
658
  maxIndex = i;
611
659
  }
612
660
  }
613
- // Convert pixel coordinates to spherical coordinates
661
+ // Convert to spherical coordinates
614
662
  const pixelIndex = maxIndex / step;
615
663
  const x = pixelIndex % width;
616
664
  const y = Math.floor(pixelIndex / width);