three-zoo 0.4.2 → 0.4.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +110 -0
- package/dist/BiFovCamera.d.ts +23 -45
- package/dist/Bounds.d.ts +12 -18
- package/dist/GeometryHasher.d.ts +9 -19
- package/dist/InstanceAssembler.d.ts +21 -6
- package/dist/SceneProcessor.d.ts +24 -9
- package/dist/SceneTraversal.d.ts +11 -3
- package/dist/SkinnedMeshBaker.d.ts +11 -13
- package/dist/Sun.d.ts +10 -49
- package/dist/index.js +172 -223
- package/dist/index.js.map +1 -1
- package/dist/index.min.js +1 -1
- package/dist/index.min.js.map +1 -1
- package/package.json +1 -1
package/dist/index.js
CHANGED
|
@@ -1,31 +1,24 @@
|
|
|
1
1
|
import { PerspectiveCamera, MathUtils, Box3, Vector3, Mesh, InstancedMesh, FrontSide, BufferAttribute, AnimationMixer, DirectionalLight, Spherical, RGBAFormat } from 'three';
|
|
2
2
|
|
|
3
|
-
/**
|
|
4
|
-
* Default camera settings
|
|
5
|
-
*/
|
|
6
3
|
const DEFAULT_HORIZONTAL_FOV = 90;
|
|
7
4
|
const DEFAULT_VERTICAL_FOV = 90;
|
|
8
5
|
const DEFAULT_ASPECT = 1;
|
|
9
6
|
const DEFAULT_NEAR = 1;
|
|
10
7
|
const DEFAULT_FAR = 1000;
|
|
8
|
+
const MIN_FOV = 1;
|
|
9
|
+
const MAX_FOV = 179;
|
|
11
10
|
/**
|
|
12
|
-
*
|
|
13
|
-
*
|
|
14
|
-
*
|
|
15
|
-
* allowing separate horizontal and vertical FOV values. The camera automatically adjusts its projection
|
|
16
|
-
* matrix based on the aspect ratio to maintain proper perspective.
|
|
17
|
-
*
|
|
18
|
-
* @extends PerspectiveCamera
|
|
11
|
+
* A camera that supports independent horizontal and vertical FOV settings.
|
|
12
|
+
* Extends Three.js PerspectiveCamera to allow separate control over horizontal
|
|
13
|
+
* and vertical fields of view.
|
|
19
14
|
*/
|
|
20
15
|
class BiFovCamera extends PerspectiveCamera {
|
|
21
16
|
/**
|
|
22
|
-
*
|
|
23
|
-
*
|
|
24
|
-
* @param
|
|
25
|
-
* @param
|
|
26
|
-
* @param
|
|
27
|
-
* @param near - Near clipping plane distance (default: 1)
|
|
28
|
-
* @param far - Far clipping plane distance (default: 1000)
|
|
17
|
+
* @param horizontalFov - Horizontal FOV in degrees (90° default)
|
|
18
|
+
* @param verticalFov - Vertical FOV in degrees (90° default)
|
|
19
|
+
* @param aspect - Width/height ratio (1 default)
|
|
20
|
+
* @param near - Near clipping plane (1 default)
|
|
21
|
+
* @param far - Far clipping plane (1000 default)
|
|
29
22
|
*/
|
|
30
23
|
constructor(horizontalFov = DEFAULT_HORIZONTAL_FOV, verticalFov = DEFAULT_VERTICAL_FOV, aspect = DEFAULT_ASPECT, near = DEFAULT_NEAR, far = DEFAULT_FAR) {
|
|
31
24
|
super(verticalFov, aspect, near, far);
|
|
@@ -33,47 +26,37 @@ class BiFovCamera extends PerspectiveCamera {
|
|
|
33
26
|
this.verticalFovInternal = verticalFov;
|
|
34
27
|
this.updateProjectionMatrix();
|
|
35
28
|
}
|
|
36
|
-
/**
|
|
37
|
-
* Gets the horizontal field of view in degrees
|
|
38
|
-
*/
|
|
29
|
+
/** Current horizontal FOV in degrees */
|
|
39
30
|
get horizontalFov() {
|
|
40
31
|
return this.horizontalFovInternal;
|
|
41
32
|
}
|
|
42
|
-
/**
|
|
43
|
-
* Gets the vertical field of view in degrees
|
|
44
|
-
*/
|
|
33
|
+
/** Current vertical FOV in degrees */
|
|
45
34
|
get verticalFov() {
|
|
46
35
|
return this.verticalFovInternal;
|
|
47
36
|
}
|
|
48
|
-
/**
|
|
49
|
-
* Sets the horizontal field of view in degrees
|
|
50
|
-
* @param value - The new horizontal FOV value
|
|
51
|
-
*/
|
|
37
|
+
/** Set horizontal FOV in degrees (clamped between 1° and 179°) */
|
|
52
38
|
set horizontalFov(value) {
|
|
53
|
-
this.horizontalFovInternal = MathUtils.clamp(value,
|
|
39
|
+
this.horizontalFovInternal = MathUtils.clamp(value, MIN_FOV, MAX_FOV);
|
|
54
40
|
this.updateProjectionMatrix();
|
|
55
41
|
}
|
|
56
|
-
/**
|
|
57
|
-
* Sets the vertical field of view in degrees
|
|
58
|
-
* @param value - The new vertical FOV value
|
|
59
|
-
*/
|
|
42
|
+
/** Set vertical FOV in degrees (clamped between 1° and 179°) */
|
|
60
43
|
set verticalFov(value) {
|
|
61
|
-
this.verticalFovInternal = MathUtils.clamp(value,
|
|
44
|
+
this.verticalFovInternal = MathUtils.clamp(value, MIN_FOV, MAX_FOV);
|
|
62
45
|
this.updateProjectionMatrix();
|
|
63
46
|
}
|
|
64
47
|
/**
|
|
65
|
-
*
|
|
66
|
-
* @param horizontal -
|
|
67
|
-
* @param vertical -
|
|
48
|
+
* Update both horizontal and vertical FOV
|
|
49
|
+
* @param horizontal - Horizontal FOV in degrees
|
|
50
|
+
* @param vertical - Vertical FOV in degrees
|
|
68
51
|
*/
|
|
69
52
|
setFov(horizontal, vertical) {
|
|
70
|
-
this.horizontalFovInternal = MathUtils.clamp(horizontal,
|
|
71
|
-
this.verticalFovInternal = MathUtils.clamp(vertical,
|
|
53
|
+
this.horizontalFovInternal = MathUtils.clamp(horizontal, MIN_FOV, MAX_FOV);
|
|
54
|
+
this.verticalFovInternal = MathUtils.clamp(vertical, MIN_FOV, MAX_FOV);
|
|
72
55
|
this.updateProjectionMatrix();
|
|
73
56
|
}
|
|
74
57
|
/**
|
|
75
|
-
*
|
|
76
|
-
* @param source -
|
|
58
|
+
* Copy FOV settings from another BiFovCamera
|
|
59
|
+
* @param source - Camera to copy from
|
|
77
60
|
*/
|
|
78
61
|
copyFovSettings(source) {
|
|
79
62
|
this.horizontalFovInternal = source.horizontalFov;
|
|
@@ -81,9 +64,9 @@ class BiFovCamera extends PerspectiveCamera {
|
|
|
81
64
|
this.updateProjectionMatrix();
|
|
82
65
|
}
|
|
83
66
|
/**
|
|
84
|
-
* Updates the projection matrix based on
|
|
85
|
-
*
|
|
86
|
-
*
|
|
67
|
+
* Updates the projection matrix based on FOV settings and aspect ratio.
|
|
68
|
+
* In landscape: preserves horizontal FOV
|
|
69
|
+
* In portrait: preserves vertical FOV
|
|
87
70
|
*/
|
|
88
71
|
updateProjectionMatrix() {
|
|
89
72
|
if (this.aspect >= 1) {
|
|
@@ -97,9 +80,7 @@ class BiFovCamera extends PerspectiveCamera {
|
|
|
97
80
|
}
|
|
98
81
|
super.updateProjectionMatrix();
|
|
99
82
|
}
|
|
100
|
-
/**
|
|
101
|
-
* Returns the actual horizontal FOV after aspect ratio adjustments
|
|
102
|
-
*/
|
|
83
|
+
/** Get actual horizontal FOV after aspect ratio adjustments */
|
|
103
84
|
getEffectiveHorizontalFov() {
|
|
104
85
|
if (this.aspect >= 1) {
|
|
105
86
|
return this.horizontalFovInternal;
|
|
@@ -107,9 +88,7 @@ class BiFovCamera extends PerspectiveCamera {
|
|
|
107
88
|
const verticalRadians = MathUtils.degToRad(this.verticalFovInternal);
|
|
108
89
|
return MathUtils.radToDeg(Math.atan(Math.tan(verticalRadians / 2) * this.aspect) * 2);
|
|
109
90
|
}
|
|
110
|
-
/**
|
|
111
|
-
* Returns the actual vertical FOV after aspect ratio adjustments
|
|
112
|
-
*/
|
|
91
|
+
/** Get actual vertical FOV after aspect ratio adjustments */
|
|
113
92
|
getEffectiveVerticalFov() {
|
|
114
93
|
if (this.aspect < 1) {
|
|
115
94
|
return this.verticalFovInternal;
|
|
@@ -117,9 +96,7 @@ class BiFovCamera extends PerspectiveCamera {
|
|
|
117
96
|
const horizontalRadians = MathUtils.degToRad(this.horizontalFovInternal);
|
|
118
97
|
return MathUtils.radToDeg(Math.atan(Math.tan(horizontalRadians / 2) / this.aspect) * 2);
|
|
119
98
|
}
|
|
120
|
-
/**
|
|
121
|
-
* Creates a clone of this camera with the same properties
|
|
122
|
-
*/
|
|
99
|
+
/** Create a clone of this camera */
|
|
123
100
|
clone() {
|
|
124
101
|
const camera = new BiFovCamera(this.horizontalFovInternal, this.verticalFovInternal, this.aspect, this.near, this.far);
|
|
125
102
|
camera.copy(this, true);
|
|
@@ -127,44 +104,39 @@ class BiFovCamera extends PerspectiveCamera {
|
|
|
127
104
|
}
|
|
128
105
|
}
|
|
129
106
|
|
|
107
|
+
/**
|
|
108
|
+
* Box3 with additional convenience methods for width, height, depth, etc.
|
|
109
|
+
*/
|
|
130
110
|
class Bounds extends Box3 {
|
|
131
|
-
constructor() {
|
|
132
|
-
super(
|
|
111
|
+
constructor(object) {
|
|
112
|
+
super();
|
|
113
|
+
/** Temporary vector for calculations */
|
|
133
114
|
this.tempVector3A = new Vector3();
|
|
115
|
+
if (object) {
|
|
116
|
+
this.setFromObject(object);
|
|
117
|
+
}
|
|
134
118
|
}
|
|
135
|
-
/**
|
|
136
|
-
* Gets the width (x-axis length) of the bounding box
|
|
137
|
-
*/
|
|
119
|
+
/** Width (x-axis length) */
|
|
138
120
|
get width() {
|
|
139
121
|
return this.max.x - this.min.x;
|
|
140
122
|
}
|
|
141
|
-
/**
|
|
142
|
-
* Gets the height (y-axis length) of the bounding box
|
|
143
|
-
*/
|
|
123
|
+
/** Height (y-axis length) */
|
|
144
124
|
get height() {
|
|
145
125
|
return this.max.y - this.min.y;
|
|
146
126
|
}
|
|
147
|
-
/**
|
|
148
|
-
* Gets the depth (z-axis length) of the bounding box
|
|
149
|
-
*/
|
|
127
|
+
/** Depth (z-axis length) */
|
|
150
128
|
get depth() {
|
|
151
129
|
return this.max.z - this.min.z;
|
|
152
130
|
}
|
|
153
|
-
/**
|
|
154
|
-
* Gets the length of the box's diagonal
|
|
155
|
-
*/
|
|
131
|
+
/** Length of the box's diagonal */
|
|
156
132
|
get diagonal() {
|
|
157
133
|
return this.tempVector3A.subVectors(this.max, this.min).length();
|
|
158
134
|
}
|
|
159
|
-
/**
|
|
160
|
-
* Gets the volume of the bounding box
|
|
161
|
-
*/
|
|
135
|
+
/** Volume (width * height * depth) */
|
|
162
136
|
getVolume() {
|
|
163
137
|
return this.width * this.height * this.depth;
|
|
164
138
|
}
|
|
165
|
-
/**
|
|
166
|
-
* Gets the surface area of the bounding box
|
|
167
|
-
*/
|
|
139
|
+
/** Surface area (sum of all six faces) */
|
|
168
140
|
getSurfaceArea() {
|
|
169
141
|
const w = this.width;
|
|
170
142
|
const h = this.height;
|
|
@@ -173,73 +145,53 @@ class Bounds extends Box3 {
|
|
|
173
145
|
}
|
|
174
146
|
}
|
|
175
147
|
|
|
148
|
+
const POSITION_COMPONENT_COUNT = 3;
|
|
149
|
+
const NORMAL_COMPONENT_COUNT = 3;
|
|
176
150
|
/**
|
|
177
|
-
*
|
|
151
|
+
* Internal utility to identify identical geometries.
|
|
152
|
+
* @internal
|
|
178
153
|
*/
|
|
179
154
|
class GeometryHasher {
|
|
180
155
|
/**
|
|
181
|
-
*
|
|
156
|
+
* Creates a hash for a geometry based on its vertex data.
|
|
157
|
+
* Vertices that differ by less than tolerance are considered the same.
|
|
182
158
|
*
|
|
183
|
-
* @param geometry -
|
|
184
|
-
* @param tolerance -
|
|
185
|
-
* @returns
|
|
186
|
-
|
|
187
|
-
static getGeometryHash(geometry, tolerance = 1e-6) {
|
|
188
|
-
const hashParts = [];
|
|
189
|
-
// Process attributes
|
|
190
|
-
const attributes = geometry.attributes;
|
|
191
|
-
const attributeNames = Object.keys(attributes).sort(); // Sort for consistent order
|
|
192
|
-
for (const name of attributeNames) {
|
|
193
|
-
const attribute = attributes[name];
|
|
194
|
-
hashParts.push(`${name}:${attribute.itemSize}:${this.getAttributeHash(attribute, tolerance)}`);
|
|
195
|
-
}
|
|
196
|
-
// Process index if present
|
|
197
|
-
if (geometry.index) {
|
|
198
|
-
hashParts.push(`index:${this.getAttributeHash(geometry.index, tolerance)}`);
|
|
199
|
-
}
|
|
200
|
-
return hashParts.join("|");
|
|
201
|
-
}
|
|
202
|
-
/**
|
|
203
|
-
* Compares two BufferGeometry instances for approximate equality.
|
|
204
|
-
* Early exit if UUIDs match (same object or cloned geometry).
|
|
159
|
+
* @param geometry - Geometry to hash
|
|
160
|
+
* @param tolerance - How close vertices need to be to count as identical
|
|
161
|
+
* @returns Hash string that's the same for matching geometries
|
|
162
|
+
* @internal
|
|
205
163
|
*/
|
|
206
|
-
static
|
|
207
|
-
|
|
208
|
-
|
|
164
|
+
static getGeometryHash(geometry, tolerance) {
|
|
165
|
+
const position = geometry.attributes["position"];
|
|
166
|
+
const positionArray = position.array;
|
|
167
|
+
const positionHashParts = [];
|
|
168
|
+
// Sample vertex positions with tolerance
|
|
169
|
+
for (let i = 0; i < positionArray.length; i += POSITION_COMPONENT_COUNT) {
|
|
170
|
+
const x = Math.round(positionArray[i] / tolerance);
|
|
171
|
+
const y = Math.round(positionArray[i + 1] / tolerance);
|
|
172
|
+
const z = Math.round(positionArray[i + 2] / tolerance);
|
|
173
|
+
positionHashParts.push(`${x},${y},${z}`);
|
|
209
174
|
}
|
|
210
|
-
//
|
|
211
|
-
|
|
212
|
-
|
|
213
|
-
|
|
214
|
-
|
|
215
|
-
|
|
216
|
-
|
|
217
|
-
|
|
218
|
-
|
|
219
|
-
const itemSize = "itemSize" in attribute ? attribute.itemSize : 1;
|
|
220
|
-
const hashParts = [];
|
|
221
|
-
// Group values by their "tolerance buckets"
|
|
222
|
-
for (let i = 0; i < array.length; i += itemSize) {
|
|
223
|
-
const itemValues = [];
|
|
224
|
-
for (let j = 0; j < itemSize; j++) {
|
|
225
|
-
const val = array[i + j];
|
|
226
|
-
// Round to nearest tolerance multiple to group similar values
|
|
227
|
-
itemValues.push(Math.round(val / tolerance) * tolerance);
|
|
228
|
-
}
|
|
229
|
-
hashParts.push(itemValues.join(","));
|
|
175
|
+
// Hash normal data if available
|
|
176
|
+
const normal = geometry.attributes["normal"];
|
|
177
|
+
const normalHashParts = [];
|
|
178
|
+
const normalArray = normal.array;
|
|
179
|
+
for (let i = 0; i < normalArray.length; i += NORMAL_COMPONENT_COUNT) {
|
|
180
|
+
const x = Math.round(normalArray[i] / tolerance);
|
|
181
|
+
const y = Math.round(normalArray[i + 1] / tolerance);
|
|
182
|
+
const z = Math.round(normalArray[i + 2] / tolerance);
|
|
183
|
+
normalHashParts.push(`${x},${y},${z}`);
|
|
230
184
|
}
|
|
231
|
-
|
|
232
|
-
|
|
233
|
-
|
|
234
|
-
|
|
235
|
-
*/
|
|
236
|
-
static compareBufferAttributes(firstAttribute, secondAttribute, tolerance) {
|
|
237
|
-
return (this.getAttributeHash(firstAttribute, tolerance) ===
|
|
238
|
-
this.getAttributeHash(secondAttribute, tolerance));
|
|
185
|
+
// Combine position and normal hashes
|
|
186
|
+
const positionHash = positionHashParts.join("|");
|
|
187
|
+
const normalHash = normalHashParts.join("|");
|
|
188
|
+
return `${positionHash}#${normalHash}`;
|
|
239
189
|
}
|
|
240
190
|
}
|
|
241
191
|
|
|
192
|
+
/** Find and modify objects in a Three.js scene */
|
|
242
193
|
class SceneTraversal {
|
|
194
|
+
/** Find first object with exact name match */
|
|
243
195
|
static getObjectByName(object, name) {
|
|
244
196
|
if (object.name === name) {
|
|
245
197
|
return object;
|
|
@@ -252,6 +204,7 @@ class SceneTraversal {
|
|
|
252
204
|
}
|
|
253
205
|
return null;
|
|
254
206
|
}
|
|
207
|
+
/** Find first material with exact name match */
|
|
255
208
|
static getMaterialByName(object, name) {
|
|
256
209
|
if (object instanceof Mesh) {
|
|
257
210
|
if (Array.isArray(object.material)) {
|
|
@@ -273,6 +226,7 @@ class SceneTraversal {
|
|
|
273
226
|
}
|
|
274
227
|
return null;
|
|
275
228
|
}
|
|
229
|
+
/** Process all objects of a specific type */
|
|
276
230
|
static enumerateObjectsByType(object, type, callback) {
|
|
277
231
|
if (object instanceof type) {
|
|
278
232
|
callback(object);
|
|
@@ -281,6 +235,7 @@ class SceneTraversal {
|
|
|
281
235
|
SceneTraversal.enumerateObjectsByType(child, type, callback);
|
|
282
236
|
}
|
|
283
237
|
}
|
|
238
|
+
/** Process all materials in meshes */
|
|
284
239
|
static enumerateMaterials(object, callback) {
|
|
285
240
|
if (object instanceof Mesh) {
|
|
286
241
|
if (Array.isArray(object.material)) {
|
|
@@ -296,6 +251,7 @@ class SceneTraversal {
|
|
|
296
251
|
SceneTraversal.enumerateMaterials(child, callback);
|
|
297
252
|
}
|
|
298
253
|
}
|
|
254
|
+
/** Find all objects whose names match a pattern */
|
|
299
255
|
static filterObjects(object, name) {
|
|
300
256
|
let result = [];
|
|
301
257
|
if (object.name && name.test(object.name)) {
|
|
@@ -306,6 +262,7 @@ class SceneTraversal {
|
|
|
306
262
|
}
|
|
307
263
|
return result;
|
|
308
264
|
}
|
|
265
|
+
/** Find all materials whose names match a pattern */
|
|
309
266
|
static filterMaterials(object, name) {
|
|
310
267
|
let result = [];
|
|
311
268
|
if (object instanceof Mesh) {
|
|
@@ -327,41 +284,58 @@ class SceneTraversal {
|
|
|
327
284
|
}
|
|
328
285
|
return result;
|
|
329
286
|
}
|
|
330
|
-
|
|
287
|
+
/** Set shadow properties on meshes */
|
|
288
|
+
static setShadowRecursive(object, castShadow = true, receiveShadow = true, filter) {
|
|
331
289
|
if (object instanceof Mesh || "isMesh" in object) {
|
|
332
290
|
object.castShadow = castShadow;
|
|
333
291
|
object.receiveShadow = receiveShadow;
|
|
334
292
|
}
|
|
335
293
|
for (const child of object.children) {
|
|
336
|
-
SceneTraversal.setShadowRecursive(child, castShadow, receiveShadow);
|
|
294
|
+
SceneTraversal.setShadowRecursive(child, castShadow, receiveShadow, filter);
|
|
337
295
|
}
|
|
338
296
|
}
|
|
339
297
|
}
|
|
340
298
|
|
|
299
|
+
const MIN_INSTANCE_COUNT = 2;
|
|
300
|
+
const DEFAULT_TOLERANCE = 1e-6;
|
|
301
|
+
/**
|
|
302
|
+
* Combines identical meshes into instanced versions for better performance.
|
|
303
|
+
* Meshes are considered identical if they share the same geometry and materials.
|
|
304
|
+
*/
|
|
341
305
|
class InstanceAssembler {
|
|
342
|
-
|
|
306
|
+
/**
|
|
307
|
+
* Find meshes that can be instanced and combine them.
|
|
308
|
+
* Only processes meshes that:
|
|
309
|
+
* - Have no children
|
|
310
|
+
* - Pass the filter function (if any)
|
|
311
|
+
* - Share geometry with at least one other mesh
|
|
312
|
+
*
|
|
313
|
+
* @param container - Object containing meshes to process
|
|
314
|
+
* @param options - Optional settings
|
|
315
|
+
*/
|
|
316
|
+
static assemble(container, options = {}) {
|
|
343
317
|
var _a, _b;
|
|
344
318
|
const dictionary = new Map();
|
|
345
|
-
const
|
|
346
|
-
const tolerance = (_a = options.geometryTolerance) !== null && _a !== void 0 ? _a :
|
|
347
|
-
const
|
|
348
|
-
SceneTraversal.enumerateObjectsByType(
|
|
319
|
+
const instances = [];
|
|
320
|
+
const tolerance = (_a = options.geometryTolerance) !== null && _a !== void 0 ? _a : DEFAULT_TOLERANCE;
|
|
321
|
+
const geometryHashes = new Map();
|
|
322
|
+
SceneTraversal.enumerateObjectsByType(container, Mesh, (child) => {
|
|
349
323
|
var _a;
|
|
350
324
|
if (child.children.length === 0 &&
|
|
351
325
|
(!options.filter || options.filter(child))) {
|
|
352
326
|
const materials = Array.isArray(child.material)
|
|
353
327
|
? child.material
|
|
354
328
|
: [child.material];
|
|
355
|
-
let geometryHash =
|
|
329
|
+
let geometryHash = geometryHashes.get(child.geometry.uuid);
|
|
356
330
|
if (!geometryHash) {
|
|
357
331
|
geometryHash = GeometryHasher.getGeometryHash(child.geometry, tolerance);
|
|
358
|
-
|
|
332
|
+
geometryHashes.set(child.geometry.uuid, geometryHash);
|
|
359
333
|
}
|
|
360
334
|
const materialKey = materials.map((m) => m.uuid).join(",");
|
|
361
335
|
const compositeKey = `${geometryHash}|${materialKey}`;
|
|
362
336
|
const entry = (_a = dictionary.get(compositeKey)) !== null && _a !== void 0 ? _a : {
|
|
363
337
|
meshes: [],
|
|
364
|
-
materials
|
|
338
|
+
materials,
|
|
365
339
|
castShadow: false,
|
|
366
340
|
receiveShadow: false,
|
|
367
341
|
};
|
|
@@ -376,7 +350,7 @@ class InstanceAssembler {
|
|
|
376
350
|
}
|
|
377
351
|
});
|
|
378
352
|
for (const descriptor of dictionary.values()) {
|
|
379
|
-
if (descriptor.meshes.length <
|
|
353
|
+
if (descriptor.meshes.length < MIN_INSTANCE_COUNT) {
|
|
380
354
|
continue;
|
|
381
355
|
}
|
|
382
356
|
const { meshes, materials, castShadow, receiveShadow } = descriptor;
|
|
@@ -393,63 +367,74 @@ class InstanceAssembler {
|
|
|
393
367
|
instancedMesh.userData[mesh.uuid] = mesh.userData;
|
|
394
368
|
}
|
|
395
369
|
instancedMesh.instanceMatrix.needsUpdate = true;
|
|
396
|
-
|
|
370
|
+
instances.push(instancedMesh);
|
|
397
371
|
for (const mesh of sortedMeshes) {
|
|
398
372
|
(_b = mesh.parent) === null || _b === void 0 ? void 0 : _b.remove(mesh);
|
|
399
373
|
}
|
|
400
374
|
}
|
|
401
|
-
if (
|
|
402
|
-
|
|
375
|
+
if (instances.length > 0) {
|
|
376
|
+
container.add(...instances);
|
|
403
377
|
}
|
|
404
378
|
}
|
|
405
379
|
}
|
|
406
380
|
|
|
381
|
+
/** Post-processes a scene based on name patterns */
|
|
407
382
|
class SceneProcessor {
|
|
408
|
-
|
|
409
|
-
|
|
410
|
-
|
|
383
|
+
/**
|
|
384
|
+
* Process a scene to set up materials and shadows.
|
|
385
|
+
*
|
|
386
|
+
* @param asset - Scene to process
|
|
387
|
+
* @param options - How to process the scene
|
|
388
|
+
* @returns Processed scene root objects
|
|
389
|
+
*/
|
|
390
|
+
static process(asset, options) {
|
|
391
|
+
const container = options.cloneAsset !== false ? asset.clone() : asset;
|
|
392
|
+
if (options.assembleInstances !== false) {
|
|
393
|
+
InstanceAssembler.assemble(container);
|
|
394
|
+
}
|
|
411
395
|
SceneTraversal.enumerateMaterials(container, (material) => {
|
|
412
|
-
material.transparent = SceneProcessor.matchesAny(material.name, options.
|
|
413
|
-
material.depthWrite = !SceneProcessor.matchesAny(material.name, options.
|
|
396
|
+
material.transparent = SceneProcessor.matchesAny(material.name, options.transparentMaterialExpressions);
|
|
397
|
+
material.depthWrite = !SceneProcessor.matchesAny(material.name, options.noDepthWriteMaterialExpressions);
|
|
414
398
|
material.side = FrontSide;
|
|
415
399
|
material.forceSinglePass = true;
|
|
416
400
|
material.depthTest = true;
|
|
417
401
|
});
|
|
418
402
|
SceneTraversal.enumerateObjectsByType(container, Mesh, (child) => {
|
|
419
|
-
child.castShadow = SceneProcessor.matchesAny(child.name, options.
|
|
420
|
-
child.receiveShadow = SceneProcessor.matchesAny(child.name, options.
|
|
403
|
+
child.castShadow = SceneProcessor.matchesAny(child.name, options.castShadowExpressions);
|
|
404
|
+
child.receiveShadow = SceneProcessor.matchesAny(child.name, options.receiveShadwoExpressions);
|
|
421
405
|
});
|
|
422
406
|
return container.children;
|
|
423
407
|
}
|
|
424
|
-
|
|
425
|
-
|
|
408
|
+
/** Does the string match any of the patterns? */
|
|
409
|
+
static matchesAny(value, expressions = []) {
|
|
410
|
+
return expressions.some((p) => p.test(value));
|
|
426
411
|
}
|
|
427
412
|
}
|
|
428
413
|
|
|
429
|
-
/**
|
|
430
|
-
|
|
431
|
-
*/
|
|
414
|
+
/** Number of components per vertex */
|
|
415
|
+
const COMPONENT_COUNT = 3;
|
|
416
|
+
/** Convert skinned meshes to regular static meshes */
|
|
432
417
|
class SkinnedMeshBaker {
|
|
433
418
|
/**
|
|
434
|
-
*
|
|
435
|
-
*
|
|
419
|
+
* Convert a skinned mesh to a regular mesh in its current pose.
|
|
420
|
+
* The resulting mesh will have no bones but look identical.
|
|
436
421
|
*
|
|
437
|
-
* @param skinnedMesh -
|
|
438
|
-
* @returns
|
|
422
|
+
* @param skinnedMesh - Mesh to convert
|
|
423
|
+
* @returns Static mesh with baked vertex positions
|
|
439
424
|
*/
|
|
440
425
|
static bakePose(skinnedMesh) {
|
|
441
426
|
const bakedGeometry = skinnedMesh.geometry.clone();
|
|
442
427
|
const position = bakedGeometry.attributes["position"];
|
|
443
|
-
const newPositions = new Float32Array(position.count *
|
|
428
|
+
const newPositions = new Float32Array(position.count * COMPONENT_COUNT);
|
|
444
429
|
const target = new Vector3();
|
|
445
430
|
for (let i = 0; i < position.count; i++) {
|
|
446
431
|
target.fromBufferAttribute(position, i);
|
|
447
432
|
skinnedMesh.applyBoneTransform(i, target);
|
|
448
|
-
newPositions[i *
|
|
449
|
-
newPositions[i *
|
|
450
|
-
newPositions[i *
|
|
433
|
+
newPositions[i * COMPONENT_COUNT + 0] = target.x;
|
|
434
|
+
newPositions[i * COMPONENT_COUNT + 1] = target.y;
|
|
435
|
+
newPositions[i * COMPONENT_COUNT + 2] = target.z;
|
|
451
436
|
}
|
|
452
|
-
bakedGeometry.setAttribute("position", new BufferAttribute(newPositions,
|
|
437
|
+
bakedGeometry.setAttribute("position", new BufferAttribute(newPositions, COMPONENT_COUNT));
|
|
453
438
|
bakedGeometry.computeVertexNormals();
|
|
454
439
|
bakedGeometry.deleteAttribute("skinIndex");
|
|
455
440
|
bakedGeometry.deleteAttribute("skinWeight");
|
|
@@ -458,13 +443,13 @@ class SkinnedMeshBaker {
|
|
|
458
443
|
return mesh;
|
|
459
444
|
}
|
|
460
445
|
/**
|
|
461
|
-
*
|
|
446
|
+
* Bake a single frame from an animation into a static mesh.
|
|
462
447
|
*
|
|
463
|
-
* @param armature -
|
|
464
|
-
* @param skinnedMesh -
|
|
465
|
-
* @param timeOffset -
|
|
466
|
-
* @param clip -
|
|
467
|
-
* @returns
|
|
448
|
+
* @param armature - Root object with bones (usually from GLTF)
|
|
449
|
+
* @param skinnedMesh - Mesh to convert
|
|
450
|
+
* @param timeOffset - Time in seconds within the animation
|
|
451
|
+
* @param clip - Animation to get the pose from
|
|
452
|
+
* @returns Static mesh with baked vertex positions
|
|
468
453
|
*/
|
|
469
454
|
static bakeAnimationFrame(armature, skinnedMesh, timeOffset, clip) {
|
|
470
455
|
const mixer = new AnimationMixer(armature);
|
|
@@ -477,22 +462,16 @@ class SkinnedMeshBaker {
|
|
|
477
462
|
}
|
|
478
463
|
}
|
|
479
464
|
|
|
480
|
-
|
|
481
|
-
|
|
482
|
-
|
|
483
|
-
|
|
484
|
-
|
|
485
|
-
|
|
486
|
-
* - Automatic shadow map configuration based on bounding boxes
|
|
487
|
-
* - HDR environment map-based positioning
|
|
488
|
-
* - Efficient temporary vector management for calculations
|
|
489
|
-
*
|
|
490
|
-
* @extends DirectionalLight
|
|
491
|
-
*/
|
|
465
|
+
const RGBA_CHANNEL_COUNT = 4;
|
|
466
|
+
const RGB_CHANNEL_COUNT = 3;
|
|
467
|
+
const LUMINANCE_R = 0.2126;
|
|
468
|
+
const LUMINANCE_G = 0.7152;
|
|
469
|
+
const LUMINANCE_B = 0.0722;
|
|
470
|
+
/** A directional light with spherical positioning controls */
|
|
492
471
|
class Sun extends DirectionalLight {
|
|
493
472
|
constructor() {
|
|
494
473
|
super(...arguments);
|
|
495
|
-
|
|
474
|
+
/** Internal vectors to avoid garbage collection */
|
|
496
475
|
this.tempVector3D0 = new Vector3();
|
|
497
476
|
this.tempVector3D1 = new Vector3();
|
|
498
477
|
this.tempVector3D2 = new Vector3();
|
|
@@ -504,57 +483,34 @@ class Sun extends DirectionalLight {
|
|
|
504
483
|
this.tempBox3 = new Box3();
|
|
505
484
|
this.tempSpherical = new Spherical();
|
|
506
485
|
}
|
|
507
|
-
/**
|
|
508
|
-
* Gets the distance of the sun from its target (radius in spherical coordinates)
|
|
509
|
-
* @returns The distance in world units
|
|
510
|
-
*/
|
|
486
|
+
/** Distance from the light to its target */
|
|
511
487
|
get distance() {
|
|
512
488
|
return this.position.length();
|
|
513
489
|
}
|
|
514
|
-
/**
|
|
515
|
-
* Gets the elevation angle of the sun (phi in spherical coordinates)
|
|
516
|
-
* @returns The elevation in radians
|
|
517
|
-
*/
|
|
490
|
+
/** Vertical angle from the ground in radians */
|
|
518
491
|
get elevation() {
|
|
519
492
|
return this.tempSpherical.setFromVector3(this.position).phi;
|
|
520
493
|
}
|
|
521
|
-
/**
|
|
522
|
-
* Gets the azimuth angle of the sun (theta in spherical coordinates)
|
|
523
|
-
* @returns The azimuth in radians
|
|
524
|
-
*/
|
|
494
|
+
/** Horizontal angle around the target in radians */
|
|
525
495
|
get azimuth() {
|
|
526
496
|
return this.tempSpherical.setFromVector3(this.position).theta;
|
|
527
497
|
}
|
|
528
|
-
/**
|
|
529
|
-
* Sets the distance of the sun from its target while maintaining current angles
|
|
530
|
-
* @param value - The new distance in world units
|
|
531
|
-
*/
|
|
498
|
+
/** Set distance while keeping current angles */
|
|
532
499
|
set distance(value) {
|
|
533
500
|
this.tempSpherical.setFromVector3(this.position);
|
|
534
501
|
this.position.setFromSphericalCoords(value, this.tempSpherical.phi, this.tempSpherical.theta);
|
|
535
502
|
}
|
|
536
|
-
/**
|
|
537
|
-
* Sets the elevation angle of the sun while maintaining current distance and azimuth
|
|
538
|
-
* @param value - The new elevation in radians
|
|
539
|
-
*/
|
|
503
|
+
/** Set elevation while keeping current distance and azimuth */
|
|
540
504
|
set elevation(value) {
|
|
541
505
|
this.tempSpherical.setFromVector3(this.position);
|
|
542
506
|
this.position.setFromSphericalCoords(this.tempSpherical.radius, value, this.tempSpherical.theta);
|
|
543
507
|
}
|
|
544
|
-
/**
|
|
545
|
-
* Sets the azimuth angle of the sun while maintaining current distance and elevation
|
|
546
|
-
* @param value - The new azimuth in radians
|
|
547
|
-
*/
|
|
508
|
+
/** Set azimuth while keeping current distance and elevation */
|
|
548
509
|
set azimuth(value) {
|
|
549
510
|
this.tempSpherical.setFromVector3(this.position);
|
|
550
511
|
this.position.setFromSphericalCoords(this.tempSpherical.radius, this.tempSpherical.phi, value);
|
|
551
512
|
}
|
|
552
|
-
/**
|
|
553
|
-
* Configures the shadow camera's frustum to encompass the given bounding box
|
|
554
|
-
* This ensures that shadows are cast correctly for objects within the box
|
|
555
|
-
*
|
|
556
|
-
* @param box3 - The bounding box to configure shadows for
|
|
557
|
-
*/
|
|
513
|
+
/** Configure shadows to cover all corners of a bounding box */
|
|
558
514
|
setShadowMapFromBox3(box3) {
|
|
559
515
|
const camera = this.shadow.camera;
|
|
560
516
|
this.target.updateWorldMatrix(true, false);
|
|
@@ -584,33 +540,26 @@ class Sun extends DirectionalLight {
|
|
|
584
540
|
camera.updateWorldMatrix(true, false);
|
|
585
541
|
camera.updateProjectionMatrix();
|
|
586
542
|
}
|
|
587
|
-
/**
|
|
588
|
-
* Sets the sun's direction based on the brightest point in an HDR texture
|
|
589
|
-
* This is useful for matching the sun's position to an environment map
|
|
590
|
-
*
|
|
591
|
-
* @param texture - The HDR texture to analyze (must be loaded and have valid image data)
|
|
592
|
-
* @param distance - Optional distance to position the sun from its target (default: 1)
|
|
593
|
-
*/
|
|
543
|
+
/** Set light direction based on brightest point in an HDR texture */
|
|
594
544
|
setDirectionFromHDR(texture, distance = 1) {
|
|
595
545
|
const data = texture.image.data;
|
|
596
546
|
const width = texture.image.width;
|
|
597
547
|
const height = texture.image.height;
|
|
598
548
|
let maxLuminance = 0;
|
|
599
549
|
let maxIndex = 0;
|
|
600
|
-
// Find
|
|
601
|
-
const step = texture.format === RGBAFormat ?
|
|
550
|
+
// Find brightest pixel
|
|
551
|
+
const step = texture.format === RGBAFormat ? RGBA_CHANNEL_COUNT : RGB_CHANNEL_COUNT;
|
|
602
552
|
for (let i = 0; i < data.length; i += step) {
|
|
603
553
|
const r = data[i];
|
|
604
554
|
const g = data[i + 1];
|
|
605
555
|
const b = data[i + 2];
|
|
606
|
-
|
|
607
|
-
const luminance = 0.2126 * r + 0.7152 * g + 0.0722 * b;
|
|
556
|
+
const luminance = LUMINANCE_R * r + LUMINANCE_G * g + LUMINANCE_B * b;
|
|
608
557
|
if (luminance > maxLuminance) {
|
|
609
558
|
maxLuminance = luminance;
|
|
610
559
|
maxIndex = i;
|
|
611
560
|
}
|
|
612
561
|
}
|
|
613
|
-
// Convert
|
|
562
|
+
// Convert to spherical coordinates
|
|
614
563
|
const pixelIndex = maxIndex / step;
|
|
615
564
|
const x = pixelIndex % width;
|
|
616
565
|
const y = Math.floor(pixelIndex / width);
|