three-gpu-pathtracer 0.0.1 → 0.0.4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (36) hide show
  1. package/LICENSE +21 -21
  2. package/README.md +678 -386
  3. package/build/index.module.js +3166 -1690
  4. package/build/index.module.js.map +1 -1
  5. package/build/index.umd.cjs +3176 -1692
  6. package/build/index.umd.cjs.map +1 -1
  7. package/package.json +60 -57
  8. package/src/core/DynamicPathTracingSceneGenerator.js +106 -0
  9. package/src/core/MaterialReducer.js +256 -256
  10. package/src/core/PathTracingRenderer.js +125 -28
  11. package/src/core/PathTracingSceneGenerator.js +52 -46
  12. package/src/core/PhysicalCamera.js +28 -0
  13. package/src/index.js +25 -21
  14. package/src/materials/AlphaDisplayMaterial.js +48 -0
  15. package/src/materials/AmbientOcclusionMaterial.js +197 -197
  16. package/src/materials/BlendMaterial.js +67 -0
  17. package/src/materials/LambertPathTracingMaterial.js +285 -285
  18. package/src/materials/MaterialBase.js +56 -56
  19. package/src/materials/PhysicalPathTracingMaterial.js +684 -370
  20. package/src/shader/shaderEnvMapSampling.js +67 -0
  21. package/src/shader/shaderGGXFunctions.js +108 -107
  22. package/src/shader/shaderMaterialSampling.js +345 -333
  23. package/src/shader/shaderStructs.js +131 -30
  24. package/src/shader/shaderUtils.js +246 -140
  25. package/src/uniforms/EquirectHdrInfoUniform.js +263 -0
  26. package/src/uniforms/MaterialsTexture.js +251 -0
  27. package/src/uniforms/PhysicalCameraUniform.js +36 -0
  28. package/src/uniforms/RenderTarget2DArray.js +93 -80
  29. package/src/utils/BlurredEnvMapGenerator.js +113 -0
  30. package/src/utils/GeometryPreparationUtils.js +194 -172
  31. package/src/utils/UVUnwrapper.js +101 -101
  32. package/src/workers/PathTracingSceneWorker.js +40 -0
  33. package/src/uniforms/EquirectPdfUniform.js +0 -132
  34. package/src/uniforms/MaterialStructArrayUniform.js +0 -18
  35. package/src/uniforms/MaterialStructUniform.js +0 -94
  36. package/src/viewers/PathTracingViewer.js +0 -259
@@ -1,25 +1,163 @@
1
- import { Color, Vector2, WebGLRenderTarget, RGBAFormat, FloatType, BufferAttribute, WebGLArrayRenderTarget, UnsignedByteType, LinearFilter, RepeatWrapping, MeshBasicMaterial, NoToneMapping, ShaderMaterial, Matrix4, Matrix3 } from 'three';
1
+ import { ShaderMaterial, NoBlending, NormalBlending, Color, Vector2, WebGLRenderTarget, RGBAFormat, FloatType, BufferAttribute, Mesh, BufferGeometry, PerspectiveCamera, DataTexture, ClampToEdgeWrapping, DoubleSide, BackSide, FrontSide, WebGLArrayRenderTarget, UnsignedByteType, LinearFilter, RepeatWrapping, MeshBasicMaterial, NoToneMapping, Source, HalfFloatType, DataUtils, RedFormat, PMREMGenerator, EquirectangularReflectionMapping, Matrix4, Matrix3 } from 'three';
2
2
  import { FullScreenQuad } from 'three/examples/jsm/postprocessing/Pass.js';
3
- import { SAH, MeshBVHUniformStruct, FloatVertexAttributeTexture, UIntVertexAttributeTexture, shaderStructs, shaderIntersectFunction } from 'three-mesh-bvh';
4
- import { GenerateMeshBVHWorker } from 'three-mesh-bvh/src/workers/GenerateMeshBVHWorker.js';
3
+ import { StaticGeometryGenerator, SAH, MeshBVH, MeshBVHUniformStruct, FloatVertexAttributeTexture, UIntVertexAttributeTexture, shaderStructs, shaderIntersectFunction } from 'three-mesh-bvh';
5
4
  import { mergeVertices, mergeBufferGeometries } from 'three/examples/jsm/utils/BufferGeometryUtils.js';
6
5
 
6
+ class MaterialBase extends ShaderMaterial {
7
+
8
+ constructor( shader ) {
9
+
10
+ super( shader );
11
+
12
+ for ( const key in this.uniforms ) {
13
+
14
+ Object.defineProperty( this, key, {
15
+
16
+ get() {
17
+
18
+ return this.uniforms[ key ].value;
19
+
20
+ },
21
+
22
+ set( v ) {
23
+
24
+ this.uniforms[ key ].value = v;
25
+
26
+ }
27
+
28
+ } );
29
+
30
+ }
31
+
32
+ }
33
+
34
+ // sets the given named define value and sets "needsUpdate" to true if it's different
35
+ setDefine( name, value = undefined ) {
36
+
37
+ if ( value === undefined || value === null ) {
38
+
39
+ if ( name in this.defines ) {
40
+
41
+ delete this.defines[ name ];
42
+ this.needsUpdate = true;
43
+
44
+ }
45
+
46
+ } else {
47
+
48
+ if ( this.defines[ name ] !== value ) {
49
+
50
+ this.defines[ name ] = value;
51
+ this.needsUpdate = true;
52
+
53
+ }
54
+
55
+ }
56
+
57
+ }
58
+
59
+ }
60
+
61
+ class BlendMaterial extends MaterialBase {
62
+
63
+ constructor( parameters ) {
64
+
65
+ super( {
66
+
67
+ blending: NoBlending,
68
+
69
+ uniforms: {
70
+
71
+ target1: { value: null },
72
+ target2: { value: null },
73
+ opacity: { value: 1.0 },
74
+
75
+ },
76
+
77
+ vertexShader: /* glsl */`
78
+
79
+ varying vec2 vUv;
80
+
81
+ void main() {
82
+
83
+ vUv = uv;
84
+ gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );
85
+
86
+ }`,
87
+
88
+ fragmentShader: /* glsl */`
89
+
90
+ uniform float opacity;
91
+
92
+ uniform sampler2D target1;
93
+ uniform sampler2D target2;
94
+
95
+ varying vec2 vUv;
96
+
97
+ void main() {
98
+
99
+ vec4 color1 = texture2D( target1, vUv );
100
+ vec4 color2 = texture2D( target2, vUv );
101
+
102
+ float invOpacity = 1.0 - opacity;
103
+ float totalAlpha = color1.a * invOpacity + color2.a * opacity;
104
+
105
+ if ( color1.a != 0.0 || color2.a != 0.0 ) {
106
+
107
+ gl_FragColor.rgb = color1.rgb * ( invOpacity * color1.a / totalAlpha ) + color2.rgb * ( opacity * color2.a / totalAlpha );
108
+ gl_FragColor.a = totalAlpha;
109
+
110
+ } else {
111
+
112
+ gl_FragColor = vec4( 0.0 );
113
+
114
+ }
115
+
116
+ }`
117
+
118
+ } );
119
+
120
+ this.setValues( parameters );
121
+
122
+ }
123
+
124
+ }
125
+
7
126
  function* renderTask() {
8
127
 
9
- const { _fsQuad, _renderer, target, camera, material } = this;
128
+ const {
129
+ _renderer,
130
+ _fsQuad,
131
+ _blendQuad,
132
+ _primaryTarget,
133
+ _blendTargets,
134
+ alpha,
135
+ camera,
136
+ material,
137
+ } = this;
138
+
139
+ const blendMaterial = _blendQuad.material;
140
+ let [ blendTarget1, blendTarget2 ] = _blendTargets;
141
+
10
142
  while ( true ) {
11
143
 
12
- material.opacity = 1 / ( this.samples + 1 );
13
- material.seed ++;
144
+ if ( alpha ) {
145
+
146
+ blendMaterial.opacity = 1 / ( this.samples + 1 );
147
+ material.blending = NoBlending;
148
+ material.opacity = 1;
149
+
150
+ } else {
151
+
152
+ material.opacity = 1 / ( this.samples + 1 );
153
+ material.blending = NormalBlending;
154
+
155
+ }
14
156
 
15
- const w = target.width;
16
- const h = target.height;
17
- camera.setViewOffset(
18
- w, h,
19
- Math.random() - 0.5, Math.random() - 0.5,
20
- w, h,
21
- );
22
- camera.updateProjectionMatrix();
157
+ const w = _primaryTarget.width;
158
+ const h = _primaryTarget.height;
159
+ material.resolution.set( w, h );
160
+ material.seed ++;
23
161
 
24
162
  const tx = this.tiles.x || 1;
25
163
  const ty = this.tiles.y || 1;
@@ -31,12 +169,15 @@ function* renderTask() {
31
169
 
32
170
  material.cameraWorldMatrix.copy( camera.matrixWorld );
33
171
  material.invProjectionMatrix.copy( camera.projectionMatrixInverse );
172
+ // An orthographic projection matrix will always have the bottom right element == 1
173
+ // And a perspective projection matrix will always have the bottom right element == 0
174
+ material.isOrthographicCamera = camera.projectionMatrix.elements[ 15 ] > 0;
34
175
 
35
176
  const ogRenderTarget = _renderer.getRenderTarget();
36
177
  const ogAutoClear = _renderer.autoClear;
37
178
 
38
179
  // three.js renderer takes values relative to the current pixel ratio
39
- _renderer.setRenderTarget( target );
180
+ _renderer.setRenderTarget( _primaryTarget );
40
181
  _renderer.setScissorTest( true );
41
182
  _renderer.setScissor(
42
183
  dprInv * Math.ceil( x * w / tx ),
@@ -50,6 +191,17 @@ function* renderTask() {
50
191
  _renderer.setRenderTarget( ogRenderTarget );
51
192
  _renderer.autoClear = ogAutoClear;
52
193
 
194
+ if ( alpha ) {
195
+
196
+ blendMaterial.target1 = blendTarget1.texture;
197
+ blendMaterial.target2 = _primaryTarget.texture;
198
+
199
+ _renderer.setRenderTarget( blendTarget2 );
200
+ _blendQuad.render( _renderer );
201
+ _renderer.setRenderTarget( ogRenderTarget );
202
+
203
+ }
204
+
53
205
  this.samples += ( 1 / totalTiles );
54
206
 
55
207
  yield;
@@ -58,6 +210,8 @@ function* renderTask() {
58
210
 
59
211
  }
60
212
 
213
+ [ blendTarget1, blendTarget2 ] = [ blendTarget2, blendTarget1 ];
214
+
61
215
  this.samples = Math.round( this.samples );
62
216
 
63
217
  }
@@ -79,43 +233,104 @@ class PathTracingRenderer {
79
233
 
80
234
  }
81
235
 
236
+ get target() {
237
+
238
+ return this._alpha ? this._blendTargets[ 1 ] : this._primaryTarget;
239
+
240
+ }
241
+
242
+ set alpha( v ) {
243
+
244
+ if ( ! v ) {
245
+
246
+ this._blendTargets[ 0 ].dispose();
247
+ this._blendTargets[ 1 ].dispose();
248
+
249
+ }
250
+
251
+ this._alpha = v;
252
+ this.reset();
253
+
254
+ }
255
+
256
+ get alpha() {
257
+
258
+ return this._alpha;
259
+
260
+ }
261
+
82
262
  constructor( renderer ) {
83
263
 
84
264
  this.camera = null;
85
265
  this.tiles = new Vector2( 1, 1 );
86
- this.target = new WebGLRenderTarget( 1, 1, {
87
- format: RGBAFormat,
88
- type: FloatType,
89
- } );
266
+
90
267
  this.samples = 0;
91
268
  this.stableNoise = false;
92
269
  this._renderer = renderer;
270
+ this._alpha = false;
93
271
  this._fsQuad = new FullScreenQuad( null );
272
+ this._blendQuad = new FullScreenQuad( new BlendMaterial() );
94
273
  this._task = null;
95
274
 
275
+ this._primaryTarget = new WebGLRenderTarget( 1, 1, {
276
+ format: RGBAFormat,
277
+ type: FloatType,
278
+ } );
279
+ this._blendTargets = [
280
+ new WebGLRenderTarget( 1, 1, {
281
+ format: RGBAFormat,
282
+ type: FloatType,
283
+ } ),
284
+ new WebGLRenderTarget( 1, 1, {
285
+ format: RGBAFormat,
286
+ type: FloatType,
287
+ } ),
288
+ ];
289
+
96
290
  }
97
291
 
98
292
  setSize( w, h ) {
99
293
 
100
- this.target.setSize( w, h );
294
+ this._primaryTarget.setSize( w, h );
295
+ this._blendTargets[ 0 ].setSize( w, h );
296
+ this._blendTargets[ 1 ].setSize( w, h );
101
297
  this.reset();
102
298
 
103
299
  }
104
300
 
301
+ dispose() {
302
+
303
+ this._primaryTarget.dispose();
304
+ this._blendTargets[ 0 ].dispose();
305
+ this._blendTargets[ 1 ].dispose();
306
+
307
+ this._fsQuad.dispose();
308
+ this._blendQuad.dispose();
309
+ this._task = null;
310
+
311
+ }
312
+
105
313
  reset() {
106
314
 
107
- const renderer = this._renderer;
108
- const target = this.target;
109
- const ogRenderTarget = renderer.getRenderTarget();
110
- const ogClearAlpha = renderer.getClearAlpha();
111
- renderer.getClearColor( ogClearColor );
315
+ const { _renderer, _primaryTarget, _blendTargets } = this;
316
+ const ogRenderTarget = _renderer.getRenderTarget();
317
+ const ogClearAlpha = _renderer.getClearAlpha();
318
+ _renderer.getClearColor( ogClearColor );
112
319
 
113
- renderer.setRenderTarget( target );
114
- renderer.setClearColor( 0, 0 );
115
- renderer.clearColor();
320
+ _renderer.setRenderTarget( _primaryTarget );
321
+ _renderer.setClearColor( 0, 0 );
322
+ _renderer.clearColor();
323
+
324
+ _renderer.setRenderTarget( _blendTargets[ 0 ] );
325
+ _renderer.setClearColor( 0, 0 );
326
+ _renderer.clearColor();
116
327
 
117
- renderer.setClearColor( ogClearColor, ogClearAlpha );
118
- renderer.setRenderTarget( ogRenderTarget );
328
+ _renderer.setRenderTarget( _blendTargets[ 1 ] );
329
+ _renderer.setClearColor( 0, 0 );
330
+ _renderer.clearColor();
331
+
332
+ _renderer.setClearColor( ogClearColor, ogClearAlpha );
333
+ _renderer.setRenderTarget( ogRenderTarget );
119
334
 
120
335
  this.samples = 0;
121
336
  this._task = null;
@@ -142,1684 +357,2945 @@ class PathTracingRenderer {
142
357
 
143
358
  }
144
359
 
145
- function getGroupMaterialIndicesAttribute( geometry, materials, allMaterials ) {
146
-
147
- if ( ! Array.isArray( materials ) ) {
148
-
149
- materials = [ materials ];
150
-
151
- }
152
-
153
- const vertCount = geometry.attributes.position.count;
154
- const materialArray = new Uint8Array( vertCount );
155
- let groups = geometry.groups;
156
- if ( groups.length === 0 ) {
157
-
158
- groups = [ { count: vertCount, start: 0, materialIndex: 0 } ];
159
-
160
- }
161
-
162
- for ( let i = 0; i < groups.length; i ++ ) {
163
-
164
- const group = groups[ i ];
165
- const { count, start } = group;
166
- const endCount = Math.min( count, vertCount - start );
167
- const mat = materials[ group.materialIndex ];
168
- const materialIndex = allMaterials.indexOf( mat );
169
-
170
- for ( let j = 0; j < endCount; j ++ ) {
171
-
172
- materialArray[ start + j ] = materialIndex;
173
-
174
- }
175
-
176
- }
177
-
178
- return new BufferAttribute( materialArray, 1, false );
179
-
180
- }
181
-
182
- function mergeMeshes( meshes, options = {} ) {
183
-
184
- options = { attributes: null, cloneGeometry: true, ...options };
185
-
186
- const transformedGeometry = [];
187
- const materialSet = new Set();
188
- for ( let i = 0, l = meshes.length; i < l; i ++ ) {
189
-
190
- // save any materials
191
- const mesh = meshes[ i ];
192
- if ( mesh.visible === false ) continue;
193
-
194
- if ( Array.isArray( mesh.material ) ) {
195
-
196
- mesh.material.forEach( m => materialSet.add( m ) );
197
-
198
- } else {
199
-
200
- materialSet.add( mesh.material );
201
-
202
- }
203
-
204
- }
205
-
206
- const materials = Array.from( materialSet );
207
- for ( let i = 0, l = meshes.length; i < l; i ++ ) {
208
-
209
- // ensure the matrix world is up to date
210
- const mesh = meshes[ i ];
211
- if ( mesh.visible === false ) continue;
212
-
213
- mesh.updateMatrixWorld();
214
-
215
- // apply the matrix world to the geometry
216
- const originalGeometry = meshes[ i ].geometry;
217
- let geometry = options.cloneGeometry ? originalGeometry.clone() : originalGeometry;
218
- geometry.applyMatrix4( mesh.matrixWorld );
219
-
220
- const attrs = options.attributes;
221
- if ( ! geometry.attributes.normal && ( attrs && attrs.includes( 'normal' ) ) ) {
222
-
223
- geometry.computeVertexNormals();
224
-
225
- }
226
-
227
- if ( ! geometry.attributes.uv && ( attrs && attrs.includes( 'uv' ) ) ) {
228
-
229
- const vertCount = geometry.attributes.position.count;
230
- geometry.setAttribute( 'uv', new BufferAttribute( new Float32Array( vertCount * 2 ), 2, false ) );
231
-
232
- }
233
-
234
- if ( ! geometry.attributes.tangent && ( attrs && attrs.includes( 'tangent' ) ) ) {
235
-
236
- if ( mesh.material.normalMap ) {
237
-
238
- // computeTangents requires an index buffer
239
- if ( geometry.index === null ) {
240
-
241
- geometry = mergeVertices( geometry );
242
-
243
- }
244
-
245
- geometry.computeTangents();
246
-
247
- } else {
248
-
249
- const vertCount = geometry.attributes.position.count;
250
- geometry.setAttribute( 'tangent', new BufferAttribute( new Float32Array( vertCount * 4 ), 4, false ) );
251
-
252
- }
253
-
254
- }
255
-
256
- if ( ! geometry.index ) {
257
-
258
- // TODO: compute a typed array
259
- const indexCount = geometry.attributes.position.count;
260
- const array = new Array( indexCount );
261
- for ( let i = 0; i < indexCount; i ++ ) {
262
-
263
- array[ i ] = i;
264
-
265
- }
266
-
267
- geometry.setIndex( array );
268
-
269
- }
270
-
271
- // trim any unneeded attributes
272
- if ( options.attributes ) {
273
-
274
- for ( const key in geometry.attributes ) {
275
-
276
- if ( ! options.attributes.includes( key ) ) {
277
-
278
- geometry.deleteAttribute( key );
279
-
280
- }
281
-
282
- }
283
-
284
- }
285
-
286
- // create the material index attribute
287
- const materialIndexAttribute = getGroupMaterialIndicesAttribute( geometry, mesh.material, materials );
288
- geometry.setAttribute( 'materialIndex', materialIndexAttribute );
289
-
290
- transformedGeometry.push( geometry );
291
-
292
- }
293
-
294
- const textureSet = new Set();
295
- materials.forEach( material => {
296
-
297
- for ( const key in material ) {
298
-
299
- const value = material[ key ];
300
- if ( value && value.isTexture ) {
301
-
302
- textureSet.add( value );
303
-
304
- }
305
-
306
- }
307
-
308
- } );
309
-
310
- const geometry = mergeBufferGeometries( transformedGeometry, false );
311
- const textures = Array.from( textureSet );
312
- return { geometry, materials, textures };
313
-
314
- }
360
+ function getGroupMaterialIndicesAttribute( geometry, materials, allMaterials ) {
315
361
 
316
- class PathTracingSceneGenerator {
317
-
318
- constructor() {
319
-
320
- this.bvhGenerator = new GenerateMeshBVHWorker();
321
-
322
- }
323
-
324
- async generate( scene, options = {} ) {
325
-
326
- const { bvhGenerator } = this;
327
- const meshes = [];
328
-
329
- scene.traverse( c => {
330
-
331
- if ( c.isMesh ) {
332
-
333
- meshes.push( c );
334
-
335
- }
336
-
337
- } );
338
-
339
- const { geometry, materials, textures } = mergeMeshes( meshes, { attributes: [ 'position', 'normal', 'tangent', 'uv' ] } );
340
- const bvhPromise = bvhGenerator.generate( geometry, { strategy: SAH, ...options, maxLeafTris: 1 } );
341
-
342
- return {
343
- scene,
344
- materials,
345
- textures,
346
- bvh: await bvhPromise,
347
- };
348
-
349
- }
350
-
351
- dispose() {
352
-
353
- this.bvhGenerator.terminate();
354
-
355
- }
356
-
357
- }
362
+ const indexAttr = geometry.index;
363
+ const posAttr = geometry.attributes.position;
364
+ const vertCount = posAttr.count;
365
+ const materialArray = new Uint8Array( vertCount );
366
+ const totalCount = indexAttr ? indexAttr.count : vertCount;
367
+ let groups = geometry.groups;
368
+ if ( groups.length === 0 ) {
369
+
370
+ groups = [ { count: totalCount, start: 0, materialIndex: 0 } ];
371
+
372
+ }
373
+
374
+ for ( let i = 0; i < groups.length; i ++ ) {
375
+
376
+ const group = groups[ i ];
377
+ const start = group.start;
378
+ const count = group.count;
379
+ const endCount = Math.min( count, totalCount - start );
380
+
381
+ const mat = Array.isArray( materials ) ? materials[ group.materialIndex ] : materials;
382
+ const materialIndex = allMaterials.indexOf( mat );
383
+
384
+ for ( let j = 0; j < endCount; j ++ ) {
385
+
386
+ let index = start + j;
387
+ if ( indexAttr ) {
388
+
389
+ index = indexAttr.getX( index );
390
+
391
+ }
392
+
393
+ materialArray[ index ] = materialIndex;
394
+
395
+ }
396
+
397
+ }
398
+
399
+ return new BufferAttribute( materialArray, 1, false );
358
400
 
359
- // https://github.com/gkjohnson/webxr-sandbox/blob/main/skinned-mesh-batching/src/MaterialReducer.js
360
-
361
- function isTypedArray( arr ) {
362
-
363
- return arr.buffer instanceof ArrayBuffer && 'BYTES_PER_ELEMENT' in arr;
364
-
365
- }
366
-
367
- class MaterialReducer {
368
-
369
- constructor() {
370
-
371
- const ignoreKeys = new Set();
372
- ignoreKeys.add( 'uuid' );
373
-
374
- this.ignoreKeys = ignoreKeys;
375
- this.shareTextures = true;
376
- this.textures = [];
377
- this.materials = [];
378
-
379
- }
380
-
381
- areEqual( objectA, objectB ) {
382
-
383
- const keySet = new Set();
384
- const traverseSet = new Set();
385
- const ignoreKeys = this.ignoreKeys;
386
-
387
- const traverse = ( a, b ) => {
388
-
389
- if ( a === b ) {
390
-
391
- return true;
392
-
393
- }
394
-
395
- if ( a && b && a instanceof Object && b instanceof Object ) {
396
-
397
- if ( traverseSet.has( a ) || traverseSet.has( b ) ) {
398
-
399
- throw new Error( 'MaterialReducer: Material is recursive.' );
400
-
401
- }
402
-
403
- const aIsElement = a instanceof Element;
404
- const bIsElement = b instanceof Element;
405
- if ( aIsElement || bIsElement ) {
406
-
407
- if ( aIsElement !== bIsElement || ! ( a instanceof Image ) || ! ( b instanceof Image ) ) {
408
-
409
- return false;
410
-
411
- }
412
-
413
- return a.src === b.src;
414
-
415
- }
416
-
417
- const aIsImageBitmap = a instanceof ImageBitmap;
418
- const bIsImageBitmap = b instanceof ImageBitmap;
419
- if ( aIsImageBitmap || bIsImageBitmap ) {
420
-
421
- return false;
422
-
423
- }
424
-
425
- if ( a.equals ) {
426
-
427
- return a.equals( b );
428
-
429
- }
430
-
431
- const aIsTypedArray = isTypedArray( a );
432
- const bIsTypedArray = isTypedArray( b );
433
- if ( aIsTypedArray || bIsTypedArray ) {
434
-
435
- if ( aIsTypedArray !== bIsTypedArray || a.constructor !== b.constructor || a.length !== b.length ) {
436
-
437
- return false;
438
-
439
- }
440
-
441
- for ( let i = 0, l = a.length; i < l; i ++ ) {
442
-
443
- if ( a[ i ] !== b[ i ] ) return false;
444
-
445
- }
446
-
447
- return true;
448
-
449
- }
450
-
451
- traverseSet.add( a );
452
- traverseSet.add( b );
453
-
454
- keySet.clear();
455
- for ( const key in a ) {
456
-
457
- if ( ! a.hasOwnProperty( key ) || a[ key ] instanceof Function || ignoreKeys.has( key ) ) {
458
-
459
- continue;
460
-
461
- }
462
-
463
- keySet.add( key );
464
-
465
- }
466
-
467
- for ( const key in b ) {
468
-
469
- if ( ! b.hasOwnProperty( key ) || b[ key ] instanceof Function || ignoreKeys.has( key ) ) {
470
-
471
- continue;
472
-
473
- }
474
-
475
- keySet.add( key );
476
-
477
- }
478
-
479
- const keys = Array.from( keySet.values() );
480
- let result = true;
481
- for ( const i in keys ) {
482
-
483
- const key = keys[ i ];
484
- if ( ignoreKeys.has( key ) ) {
485
-
486
- continue;
487
-
488
- }
489
-
490
- result = traverse( a[ key ], b[ key ] );
491
- if ( ! result ) {
492
-
493
- break;
494
-
495
- }
496
-
497
- }
498
-
499
- traverseSet.delete( a );
500
- traverseSet.delete( b );
501
- return result;
502
-
503
- }
504
-
505
- return false;
506
-
507
- };
508
-
509
- return traverse( objectA, objectB );
510
-
511
- }
512
-
513
- process( object ) {
514
-
515
- const { textures, materials } = this;
516
- let replaced = 0;
517
-
518
- const processMaterial = material => {
519
-
520
- // Check if another material matches this one
521
- let foundMaterial = null;
522
- for ( const i in materials ) {
523
-
524
- const otherMaterial = materials[ i ];
525
- if ( this.areEqual( material, otherMaterial ) ) {
526
-
527
- foundMaterial = otherMaterial;
528
-
529
- }
530
-
531
- }
532
-
533
- if ( foundMaterial ) {
534
-
535
- replaced ++;
536
- return foundMaterial;
537
-
538
- } else {
539
-
540
- materials.push( material );
541
-
542
- if ( this.shareTextures ) {
543
-
544
- // See if there's another texture that matches the ones on this material
545
- for ( const key in material ) {
546
-
547
- if ( ! material.hasOwnProperty( key ) ) continue;
548
-
549
- const value = material[ key ];
550
- if ( value && value.isTexture && value.image instanceof Image ) {
551
-
552
- let foundTexture = null;
553
- for ( const i in textures ) {
554
-
555
- const texture = textures[ i ];
556
- if ( this.areEqual( texture, value ) ) {
557
-
558
- foundTexture = texture;
559
- break;
560
-
561
- }
562
-
563
- }
564
-
565
- if ( foundTexture ) {
566
-
567
- material[ key ] = foundTexture;
568
-
569
- } else {
570
-
571
- textures.push( value );
572
-
573
- }
574
-
575
- }
576
-
577
- }
578
-
579
- }
580
-
581
- return material;
582
-
583
- }
584
-
585
- };
586
-
587
- object.traverse( c => {
588
-
589
- if ( c.isMesh && c.material ) {
590
-
591
- const material = c.material;
592
- if ( Array.isArray( material ) ) {
593
-
594
- for ( let i = 0; i < material.length; i ++ ) {
595
-
596
- material[ i ] = processMaterial( material[ i ] );
597
-
598
- }
599
-
600
- } else {
601
-
602
- c.material = processMaterial( material );
603
-
604
- }
605
-
606
- }
607
-
608
- } );
609
-
610
- return { replaced, retained: materials.length };
611
-
612
- }
613
-
614
401
  }
615
402
 
616
- class MaterialStructUniform {
617
-
618
- constructor() {
619
-
620
- this.init();
621
-
622
- }
623
-
624
- init() {
625
-
626
- this.color = new Color( 0xffffff );
627
- this.map = - 1;
628
-
629
- this.metalness = 1.0;
630
- this.metalnessMap = - 1;
631
-
632
- this.roughness = 1.0;
633
- this.roughnessMap = - 1;
634
-
635
- this.ior = 1.0;
636
- this.transmission = 0.0;
637
- this.transmissionMap = - 1;
638
-
639
- this.emissive = new Color( 0 );
640
- this.emissiveIntensity = 1.0;
641
- this.emissiveMap = - 1;
642
-
643
- this.normalMap = - 1;
644
- this.normalScale = new Vector2( 1, 1 );
645
-
646
- this.opacity = 1.0;
647
- this.alphaTest = 0.0;
648
-
649
- // TODO: Clearcoat
650
-
651
- // TODO: Sheen
652
-
653
- }
654
-
655
- updateFrom( material, textures = [] ) {
656
-
657
- this.init();
658
-
659
- // color
660
- if ( 'color' in material ) this.color.copy( material.color );
661
- else material.color.set( 0xffffff );
662
-
663
- this.map = textures.indexOf( material.map );
664
-
665
- // metalness
666
- if ( 'metalness' in material ) this.metalness = material.metalness;
667
- else this.metalness = 1.0;
668
-
669
- this.metalnessMap = textures.indexOf( material.metalnessMap );
670
-
671
- // roughness
672
- if ( 'roughness' in material ) this.roughness = material.roughness;
673
- else this.roughness = 1.0;
674
-
675
- this.roughnessMap = textures.indexOf( material.roughnessMap );
676
-
677
- // transmission
678
- if ( 'ior' in material ) this.ior = material.ior;
679
- else this.ior = 1.0;
680
-
681
- if ( 'transmission' in material ) this.transmission = material.transmission;
682
- else this.transmission = 0.0;
683
-
684
- if ( 'transmissionMap' in material ) this.transmissionMap = textures.indexOf( material.transmissionMap );
685
-
686
- // emission
687
- if ( 'emissive' in material ) this.emissive.copy( material.emissive );
688
- else this.emissive.set( 0 );
689
-
690
- if ( 'emissiveIntensity' in material ) this.emissiveIntensity = material.emissiveIntensity;
691
- else this.emissiveIntensity = 1.0;
692
-
693
- this.emissiveMap = textures.indexOf( material.emissiveMap );
694
-
695
- // normals
696
- this.normalMap = textures.indexOf( material.normalMap );
697
- if ( 'normalScale' in material ) this.normalScale.copy( material.normalScale );
698
- else this.normalScale.set( 1, 1 );
699
-
700
- // opacity
701
- this.opacity = material.opacity;
702
-
703
- // alpha test
704
- this.alphaTest = material.alphaTest;
705
-
706
- }
707
-
403
+ function trimToAttributes( geometry, attributes ) {
404
+
405
+ // trim any unneeded attributes
406
+ if ( attributes ) {
407
+
408
+ for ( const key in geometry.attributes ) {
409
+
410
+ if ( ! attributes.includes( key ) ) {
411
+
412
+ geometry.deleteAttribute( key );
413
+
414
+ }
415
+
416
+ }
417
+
418
+ }
419
+
708
420
  }
709
421
 
710
- class MaterialStructArrayUniform extends Array {
711
-
712
- updateFrom( materials, textures ) {
713
-
714
- while ( this.length > materials.length ) this.pop();
715
- while ( this.length < materials.length ) this.push( new MaterialStructUniform() );
716
-
717
- for ( let i = 0, l = this.length; i < l; i ++ ) {
718
-
719
- this[ i ].updateFrom( materials[ i ], textures );
720
-
721
- }
722
-
723
- }
724
-
422
+ function setCommonAttributes( geometry, options ) {
423
+
424
+ const { attributes = [], normalMapRequired = false } = options;
425
+
426
+ if ( ! geometry.attributes.normal && ( attributes && attributes.includes( 'normal' ) ) ) {
427
+
428
+ geometry.computeVertexNormals();
429
+
430
+ }
431
+
432
+ if ( ! geometry.attributes.uv && ( attributes && attributes.includes( 'uv' ) ) ) {
433
+
434
+ const vertCount = geometry.attributes.position.count;
435
+ geometry.setAttribute( 'uv', new BufferAttribute( new Float32Array( vertCount * 2 ), 2, false ) );
436
+
437
+ }
438
+
439
+ if ( ! geometry.attributes.tangent && ( attributes && attributes.includes( 'tangent' ) ) ) {
440
+
441
+ if ( normalMapRequired ) {
442
+
443
+ // computeTangents requires an index buffer
444
+ if ( geometry.index === null ) {
445
+
446
+ geometry = mergeVertices( geometry );
447
+
448
+ }
449
+
450
+ geometry.computeTangents();
451
+
452
+ } else {
453
+
454
+ const vertCount = geometry.attributes.position.count;
455
+ geometry.setAttribute( 'tangent', new BufferAttribute( new Float32Array( vertCount * 4 ), 4, false ) );
456
+
457
+ }
458
+
459
+ }
460
+
461
+ if ( ! geometry.index ) {
462
+
463
+ // TODO: compute a typed array
464
+ const indexCount = geometry.attributes.position.count;
465
+ const array = new Array( indexCount );
466
+ for ( let i = 0; i < indexCount; i ++ ) {
467
+
468
+ array[ i ] = i;
469
+
470
+ }
471
+
472
+ geometry.setIndex( array );
473
+
474
+ }
475
+
725
476
  }
726
477
 
727
- const prevColor = new Color();
728
- class RenderTarget2DArray extends WebGLArrayRenderTarget {
729
-
730
- constructor( ...args ) {
731
-
732
- super( ...args );
733
-
734
- const tex = this.texture;
735
- tex.format = RGBAFormat;
736
- tex.type = UnsignedByteType;
737
- tex.minFilter = LinearFilter;
738
- tex.magFilter = LinearFilter;
739
- tex.wrapS = RepeatWrapping;
740
- tex.wrapT = RepeatWrapping;
741
- tex.setTextures = ( ...args ) => {
742
-
743
- this.setTextures( ...args );
744
-
745
- };
746
-
747
- const fsQuad = new FullScreenQuad( new MeshBasicMaterial() );
748
- this.fsQuad = fsQuad;
749
-
750
- }
751
-
752
- setTextures( renderer, width, height, textures ) {
753
-
754
- // save previous renderer state
755
- const prevRenderTarget = renderer.getRenderTarget();
756
- const prevToneMapping = renderer.toneMapping;
757
- const prevAlpha = renderer.getClearAlpha();
758
- renderer.getClearColor( prevColor );
759
-
760
- // resize the render target
761
- const depth = textures.length;
762
- this.setSize( width, height, depth );
763
- renderer.setClearColor( 0, 0 );
764
- renderer.toneMapping = NoToneMapping;
765
-
766
- // render each texture into each layer of the target
767
- const fsQuad = this.fsQuad;
768
- for ( let i = 0, l = depth; i < l; i ++ ) {
769
-
770
- const texture = textures[ i ];
771
- fsQuad.material.map = texture;
772
- fsQuad.material.transparent = true;
773
-
774
- renderer.setRenderTarget( this, i );
775
- fsQuad.render( renderer );
776
-
777
- }
778
-
779
- // reset the renderer
780
- fsQuad.material.map = null;
781
- renderer.setClearColor( prevColor, prevAlpha );
782
- renderer.setRenderTarget( prevRenderTarget );
783
- renderer.toneMapping = prevToneMapping;
784
-
785
- }
786
-
787
- dispose() {
788
-
789
- super.dispose();
790
- this.fsQuad.dispose();
791
-
792
- }
793
-
478
+ function mergeMeshes( meshes, options = {} ) {
479
+
480
+ options = { attributes: null, cloneGeometry: true, ...options };
481
+
482
+ const transformedGeometry = [];
483
+ const materialSet = new Set();
484
+ for ( let i = 0, l = meshes.length; i < l; i ++ ) {
485
+
486
+ // save any materials
487
+ const mesh = meshes[ i ];
488
+ if ( mesh.visible === false ) continue;
489
+
490
+ if ( Array.isArray( mesh.material ) ) {
491
+
492
+ mesh.material.forEach( m => materialSet.add( m ) );
493
+
494
+ } else {
495
+
496
+ materialSet.add( mesh.material );
497
+
498
+ }
499
+
500
+ }
501
+
502
+ const materials = Array.from( materialSet );
503
+ for ( let i = 0, l = meshes.length; i < l; i ++ ) {
504
+
505
+ // ensure the matrix world is up to date
506
+ const mesh = meshes[ i ];
507
+ if ( mesh.visible === false ) continue;
508
+
509
+ mesh.updateMatrixWorld();
510
+
511
+ // apply the matrix world to the geometry
512
+ const originalGeometry = meshes[ i ].geometry;
513
+ const geometry = options.cloneGeometry ? originalGeometry.clone() : originalGeometry;
514
+ geometry.applyMatrix4( mesh.matrixWorld );
515
+
516
+ // ensure our geometry has common attributes
517
+ setCommonAttributes( geometry, {
518
+ attributes: options.attributes,
519
+ normalMapRequired: ! ! mesh.material.normalMap,
520
+ } );
521
+ trimToAttributes( geometry, options.attributes );
522
+
523
+ // create the material index attribute
524
+ const materialIndexAttribute = getGroupMaterialIndicesAttribute( geometry, mesh.material, materials );
525
+ geometry.setAttribute( 'materialIndex', materialIndexAttribute );
526
+
527
+ transformedGeometry.push( geometry );
528
+
529
+ }
530
+
531
+ const textureSet = new Set();
532
+ materials.forEach( material => {
533
+
534
+ for ( const key in material ) {
535
+
536
+ const value = material[ key ];
537
+ if ( value && value.isTexture ) {
538
+
539
+ textureSet.add( value );
540
+
541
+ }
542
+
543
+ }
544
+
545
+ } );
546
+
547
+ const geometry = mergeBufferGeometries( transformedGeometry, false );
548
+ const textures = Array.from( textureSet );
549
+ return { geometry, materials, textures };
550
+
794
551
  }
795
552
 
796
- class MaterialBase extends ShaderMaterial {
797
-
798
- constructor( shader ) {
799
-
800
- super( shader );
801
-
802
- for ( const key in this.uniforms ) {
803
-
804
- Object.defineProperty( this, key, {
805
-
806
- get() {
807
-
808
- return this.uniforms[ key ].value;
809
-
810
- },
811
-
812
- set( v ) {
813
-
814
- this.uniforms[ key ].value = v;
815
-
816
- }
817
-
818
- } );
819
-
820
- }
821
-
822
- }
823
-
824
- // sets the given named define value and sets "needsUpdate" to true if it's different
825
- setDefine( name, value = undefined ) {
826
-
827
- if ( value === undefined || value === null ) {
828
-
829
- if ( name in this.defines ) {
830
-
831
- delete this.defines[ name ];
832
- this.needsUpdate = true;
833
-
834
- }
835
-
836
- } else {
837
-
838
- if ( this.defines[ name ] !== value ) {
839
-
840
- this.defines[ name ] = value;
841
- this.needsUpdate = true;
842
-
843
- }
844
-
845
- }
846
-
847
- }
848
-
553
+ class PathTracingSceneGenerator {
554
+
555
+ prepScene( scene ) {
556
+
557
+ const meshes = [];
558
+ scene.traverse( c => {
559
+
560
+ if ( c.isSkinnedMesh || c.isMesh && c.morphTargetInfluences ) {
561
+
562
+ const generator = new StaticGeometryGenerator( c );
563
+ generator.applyWorldTransforms = false;
564
+ const mesh = new Mesh(
565
+ generator.generate(),
566
+ c.material,
567
+ );
568
+ mesh.matrixWorld.copy( c.matrixWorld );
569
+ mesh.matrix.copy( c.matrixWorld );
570
+ mesh.matrix.decompose( mesh.position, mesh.quaternion, mesh.scale );
571
+ meshes.push( mesh );
572
+
573
+ } else if ( c.isMesh ) {
574
+
575
+ meshes.push( c );
576
+
577
+ }
578
+
579
+ } );
580
+
581
+ return mergeMeshes( meshes, {
582
+ attributes: [ 'position', 'normal', 'tangent', 'uv' ],
583
+ } );
584
+
585
+ }
586
+
587
+ generate( scene, options = {} ) {
588
+
589
+ const { materials, textures, geometry } = this.prepScene( scene );
590
+ const bvhOptions = { strategy: SAH, ...options, maxLeafTris: 1 };
591
+ return {
592
+ scene,
593
+ materials,
594
+ textures,
595
+ bvh: new MeshBVH( geometry, bvhOptions ),
596
+ };
597
+
598
+ }
599
+
849
600
  }
850
601
 
851
- const shaderMaterialStructs = /* glsl */ `
852
-
853
- struct Material {
854
-
855
- vec3 color;
856
- int map;
857
-
858
- float metalness;
859
- int metalnessMap;
860
-
861
- float roughness;
862
- int roughnessMap;
863
-
864
- float ior;
865
- float transmission;
866
- int transmissionMap;
867
-
868
- vec3 emissive;
869
- float emissiveIntensity;
870
- int emissiveMap;
871
-
872
- int normalMap;
873
- vec2 normalScale;
874
-
875
- float opacity;
876
- float alphaTest;
877
-
878
- };
879
-
880
- `;
602
+ class DynamicPathTracingSceneGenerator {
881
603
 
882
- const shaderGGXFunctions = /* glsl */`
883
- // The GGX functions provide sampling and distribution information for normals as output so
884
- // in order to get probability of scatter direction the half vector must be computed and provided.
885
- // [0] https://www.cs.cornell.edu/~srm/publications/EGSR07-btdf.pdf
886
- // [1] https://hal.archives-ouvertes.fr/hal-01509746/document
887
- // [2] http://jcgt.org/published/0007/04/01/
888
- // [4] http://jcgt.org/published/0003/02/03/
889
-
890
- // trowbridge-reitz === GGX === GTR
891
-
892
- vec3 ggxDirection( vec3 incidentDir, float roughnessX, float roughnessY, float random1, float random2 ) {
893
-
894
- // TODO: try GGXVNDF implementation from reference [2], here. Needs to update ggxDistribution
895
- // function below, as well
896
-
897
- // Implementation from reference [1]
898
- // stretch view
899
- vec3 V = normalize( vec3( roughnessX * incidentDir.x, roughnessY * incidentDir.y, incidentDir.z ) );
900
-
901
- // orthonormal basis
902
- vec3 T1 = ( V.z < 0.9999 ) ? normalize( cross( V, vec3( 0.0, 0.0, 1.0 ) ) ) : vec3( 1.0, 0.0, 0.0 );
903
- vec3 T2 = cross( T1, V );
904
-
905
- // sample point with polar coordinates (r, phi)
906
- float a = 1.0 / ( 1.0 + V.z );
907
- float r = sqrt( random1 );
908
- float phi = ( random2 < a ) ? random2 / a * PI : PI + ( random2 - a ) / ( 1.0 - a ) * PI;
909
- float P1 = r * cos( phi );
910
- float P2 = r * sin( phi ) * ( ( random2 < a ) ? 1.0 : V.z );
911
-
912
- // compute normal
913
- vec3 N = P1 * T1 + P2 * T2 + V * sqrt( max( 0.0, 1.0 - P1 * P1 - P2 * P2 ) );
914
-
915
- // unstretch
916
- N = normalize( vec3( roughnessX * N.x, roughnessY * N.y, max( 0.0, N.z ) ) );
917
-
918
- return N;
919
-
920
- }
921
-
922
- // Below are PDF and related functions for use in a Monte Carlo path tracer
923
- // as specified in Appendix B of the following paper
924
- // See equation (2) from reference [2]
925
- float ggxLamda( float theta, float roughness ) {
926
-
927
- float tanTheta = tan( theta );
928
- float tanTheta2 = tanTheta * tanTheta;
929
- float alpha2 = roughness * roughness;
930
-
931
- float numerator = - 1.0 + sqrt( 1.0 + alpha2 * tanTheta2 );
932
- return numerator / 2.0;
933
-
934
- }
935
-
936
- // See equation (2) from reference [2]
937
- float ggxShadowMaskG1( float theta, float roughness ) {
938
-
939
- return 1.0 / ( 1.0 + ggxLamda( theta, roughness ) );
940
-
941
- }
942
-
943
- // See equation (125) from reference [4]
944
- float ggxShadowMaskG2( vec3 wi, vec3 wo, float roughness ) {
945
-
946
- float incidentTheta = acos( wi.z );
947
- float scatterTheta = acos( wo.z );
948
- return 1.0 / ( 1.0 + ggxLamda( incidentTheta, roughness ) + ggxLamda( scatterTheta, roughness ) );
949
-
950
- }
951
-
952
- float ggxDistribution( vec3 halfVector, float roughness ) {
953
-
954
- // See equation (33) from reference [0]
955
- float a2 = roughness * roughness;
956
- float cosTheta = halfVector.z;
957
- float cosTheta4 = pow( cosTheta, 4.0 );
958
-
959
- if ( cosTheta == 0.0 ) return 0.0;
960
-
961
- float theta = acos( halfVector.z );
962
- float tanTheta = tan( theta );
963
- float tanTheta2 = pow( tanTheta, 2.0 );
964
-
965
- float denom = PI * cosTheta4 * pow( a2 + tanTheta2, 2.0 );
966
- return a2 / denom;
967
-
968
- // See equation (1) from reference [2]
969
- // const { x, y, z } = halfVector;
970
- // const a2 = roughness * roughness;
971
- // const mult = x * x / a2 + y * y / a2 + z * z;
972
- // const mult2 = mult * mult;
973
-
974
- // return 1.0 / Math.PI * a2 * mult2;
975
-
976
- }
977
-
978
- // See equation (3) from reference [2]
979
- float ggxPDF( vec3 wi, vec3 halfVector, float roughness ) {
980
-
981
- float incidentTheta = acos( wi.z );
982
- float D = ggxDistribution( halfVector, roughness );
983
- float G1 = ggxShadowMaskG1( incidentTheta, roughness );
984
-
985
- return D * G1 * max( 0.0, dot( wi, halfVector ) ) / wi.z;
986
-
987
- }
988
- `;
604
+ get initialized() {
989
605
 
990
- const shaderMaterialSampling = /* glsl */`
991
-
992
- struct SurfaceRec {
993
- vec3 normal;
994
- vec3 faceNormal;
995
- bool frontFace;
996
- float roughness;
997
- float filteredRoughness;
998
- float metalness;
999
- vec3 color;
1000
- vec3 emission;
1001
- float transmission;
1002
- float ior;
1003
- };
1004
-
1005
- struct SampleRec {
1006
- float pdf;
1007
- vec3 direction;
1008
- vec3 color;
1009
- };
1010
-
1011
- ${ shaderGGXFunctions }
1012
-
1013
- // diffuse
1014
- float diffusePDF( vec3 wo, vec3 wi, SurfaceRec surf ) {
1015
-
1016
- // https://raytracing.github.io/books/RayTracingTheRestOfYourLife.html#lightscattering/thescatteringpdf
1017
- float cosValue = wi.z;
1018
- return cosValue / PI;
1019
-
1020
- }
1021
-
1022
- vec3 diffuseDirection( vec3 wo, SurfaceRec surf ) {
1023
-
1024
- vec3 lightDirection = randDirection();
1025
- lightDirection.z += 1.0;
1026
- lightDirection = normalize( lightDirection );
1027
-
1028
- return lightDirection;
1029
-
1030
- }
1031
-
1032
- vec3 diffuseColor( vec3 wo, vec3 wi, SurfaceRec surf ) {
1033
-
1034
- // TODO: scale by 1 - F here
1035
- // note on division by PI
1036
- // https://seblagarde.wordpress.com/2012/01/08/pi-or-not-to-pi-in-game-lighting-equation/
1037
- float metalFactor = ( 1.0 - surf.metalness ) * wi.z / ( PI * PI );
1038
- float transmissionFactor = 1.0 - surf.transmission;
1039
- return surf.color * metalFactor * transmissionFactor;
1040
-
1041
- }
1042
-
1043
- // specular
1044
- float specularPDF( vec3 wo, vec3 wi, SurfaceRec surf ) {
1045
-
1046
- // See equation (17) in http://jcgt.org/published/0003/02/03/
1047
- float filteredRoughness = surf.filteredRoughness;
1048
- vec3 halfVector = getHalfVector( wi, wo );
1049
- return ggxPDF( wi, halfVector, filteredRoughness ) / ( 4.0 * dot( wi, halfVector ) );
1050
-
1051
- }
1052
-
1053
- vec3 specularDirection( vec3 wo, SurfaceRec surf ) {
1054
-
1055
- // sample ggx vndf distribution which gives a new normal
1056
- float filteredRoughness = surf.filteredRoughness;
1057
- vec3 halfVector = ggxDirection(
1058
- wo,
1059
- filteredRoughness,
1060
- filteredRoughness,
1061
- rand(),
1062
- rand()
1063
- );
1064
-
1065
- // apply to new ray by reflecting off the new normal
1066
- return - reflect( wo, halfVector );
1067
-
1068
- }
1069
-
1070
- vec3 specularColor( vec3 wo, vec3 wi, SurfaceRec surf ) {
1071
-
1072
- // if roughness is set to 0 then D === NaN which results in black pixels
1073
- float metalness = surf.metalness;
1074
- float ior = surf.ior;
1075
- bool frontFace = surf.frontFace;
1076
- float filteredRoughness = surf.filteredRoughness;
1077
-
1078
- vec3 halfVector = getHalfVector( wo, wi );
1079
- float iorRatio = frontFace ? 1.0 / ior : ior;
1080
- float G = ggxShadowMaskG2( wi, wo, filteredRoughness );
1081
- float D = ggxDistribution( halfVector, filteredRoughness );
1082
-
1083
- float F = schlickFresnelFromIor( dot( wi, halfVector ), iorRatio );
1084
- float cosTheta = min( wo.z, 1.0 );
1085
- float sinTheta = sqrt( 1.0 - cosTheta * cosTheta );
1086
- bool cannotRefract = iorRatio * sinTheta > 1.0;
1087
- if ( cannotRefract ) {
1088
-
1089
- F = 1.0;
1090
-
1091
- }
1092
-
1093
- vec3 color = mix( vec3( 1.0 ), surf.color, metalness );
1094
- color = mix( color, vec3( 1.0 ), F );
1095
- color *= G * D / ( 4.0 * abs( wi.z * wo.z ) );
1096
- color *= mix( F, 1.0, metalness );
1097
- color *= wi.z; // scale the light by the direction the light is coming in from
1098
-
1099
- return color;
1100
-
1101
- }
1102
-
1103
- /*
1104
- // transmission
1105
- function transmissionPDF( wo, wi, material, surf ) {
1106
-
1107
- // See section 4.2 in https://www.cs.cornell.edu/~srm/publications/EGSR07-btdf.pdf
1108
-
1109
- const { roughness, ior } = material;
1110
- const { frontFace } = hit;
1111
- const ratio = frontFace ? ior : 1 / ior;
1112
- const minRoughness = Math.max( roughness, MIN_ROUGHNESS );
1113
-
1114
- halfVector.set( 0, 0, 0 ).addScaledVector( wi, ratio ).addScaledVector( wo, 1.0 ).normalize().multiplyScalar( - 1 );
1115
-
1116
- const denom = Math.pow( ratio * halfVector.dot( wi ) + 1.0 * halfVector.dot( wo ), 2.0 );
1117
- return ggxPDF( wo, halfVector, minRoughness ) / denom;
1118
-
1119
- }
1120
-
1121
- function transmissionDirection( wo, hit, material, lightDirection ) {
1122
-
1123
- const { roughness, ior } = material;
1124
- const { frontFace } = hit;
1125
- const ratio = frontFace ? 1 / ior : ior;
1126
- const minRoughness = Math.max( roughness, MIN_ROUGHNESS );
1127
-
1128
- // sample ggx vndf distribution which gives a new normal
1129
- ggxDirection(
1130
- wo,
1131
- minRoughness,
1132
- minRoughness,
1133
- Math.random(),
1134
- Math.random(),
1135
- halfVector,
1136
- );
1137
-
1138
- // apply to new ray by reflecting off the new normal
1139
- tempDir.copy( wo ).multiplyScalar( - 1 );
1140
- refract( tempDir, halfVector, ratio, lightDirection );
1141
-
1142
- }
1143
-
1144
- function transmissionColor( wo, wi, material, hit, colorTarget ) {
1145
-
1146
- const { metalness, transmission } = material;
1147
- colorTarget
1148
- .copy( material.color )
1149
- .multiplyScalar( ( 1.0 - metalness ) * wo.z )
1150
- .multiplyScalar( transmission );
1151
-
1152
- }
1153
- */
1154
-
1155
- // TODO: This is just using a basic cosine-weighted specular distribution with an
1156
- // incorrect PDF value at the moment. Update it to correctly use a GGX distribution
1157
- float transmissionPDF( vec3 wo, vec3 wi, SurfaceRec surf ) {
1158
-
1159
- float ior = surf.ior;
1160
- bool frontFace = surf.frontFace;
1161
-
1162
- float ratio = frontFace ? 1.0 / ior : ior;
1163
- float cosTheta = min( wo.z, 1.0 );
1164
- float sinTheta = sqrt( 1.0 - cosTheta * cosTheta );
1165
- float reflectance = schlickFresnelFromIor( cosTheta, ratio );
1166
- bool cannotRefract = ratio * sinTheta > 1.0;
1167
- if ( cannotRefract ) {
1168
-
1169
- return 0.0;
1170
-
1171
- }
1172
-
1173
- return 1.0 / ( 1.0 - reflectance );
1174
-
1175
- }
1176
-
1177
- vec3 transmissionDirection( vec3 wo, SurfaceRec surf ) {
1178
-
1179
- float roughness = surf.roughness;
1180
- float ior = surf.ior;
1181
- bool frontFace = surf.frontFace;
1182
- float ratio = frontFace ? 1.0 / ior : ior;
1183
-
1184
- vec3 lightDirection = refract( - wo, vec3( 0.0, 0.0, 1.0 ), ratio );
1185
- lightDirection += randDirection() * roughness;
1186
- return normalize( lightDirection );
1187
-
1188
- }
1189
-
1190
- vec3 transmissionColor( vec3 wo, vec3 wi, SurfaceRec surf ) {
1191
-
1192
- float metalness = surf.metalness;
1193
- float transmission = surf.transmission;
1194
-
1195
- vec3 color = surf.color;
1196
- color *= ( 1.0 - metalness );
1197
- color *= transmission;
1198
-
1199
- return color;
1200
-
1201
- }
1202
-
1203
- float bsdfPdf( vec3 wo, vec3 wi, SurfaceRec surf ) {
1204
-
1205
- float ior = surf.ior;
1206
- float metalness = surf.metalness;
1207
- float transmission = surf.transmission;
1208
- bool frontFace = surf.frontFace;
1209
-
1210
- float ratio = frontFace ? 1.0 / ior : ior;
1211
- float cosTheta = min( wo.z, 1.0 );
1212
- float sinTheta = sqrt( 1.0 - cosTheta * cosTheta );
1213
- float reflectance = schlickFresnelFromIor( cosTheta, ratio );
1214
- bool cannotRefract = ratio * sinTheta > 1.0;
1215
- if ( cannotRefract ) {
1216
-
1217
- reflectance = 1.0;
1218
-
1219
- }
1220
-
1221
- float spdf = 0.0;
1222
- float dpdf = 0.0;
1223
- float tpdf = 0.0;
1224
-
1225
- if ( wi.z < 0.0 ) {
1226
-
1227
- tpdf = transmissionPDF( wo, wi, surf );
1228
-
1229
- } else {
1230
-
1231
- spdf = specularPDF( wo, wi, surf );
1232
- dpdf = diffusePDF( wo, wi, surf );
1233
-
1234
- }
1235
-
1236
- float transSpecularProb = mix( reflectance, 1.0, metalness );
1237
- float diffSpecularProb = 0.5 + 0.5 * metalness;
1238
- float pdf =
1239
- spdf * transmission * transSpecularProb
1240
- + tpdf * transmission * ( 1.0 - transSpecularProb )
1241
- + spdf * ( 1.0 - transmission ) * diffSpecularProb
1242
- + dpdf * ( 1.0 - transmission ) * ( 1.0 - diffSpecularProb );
1243
-
1244
- return pdf;
1245
-
1246
- }
1247
-
1248
- vec3 bsdfColor( vec3 wo, vec3 wi, SurfaceRec surf ) {
1249
-
1250
- vec3 color = vec3( 0.0 );
1251
- if ( wi.z < 0.0 ) {
1252
-
1253
- color = transmissionColor( wo, wi, surf );
1254
-
1255
- } else {
1256
-
1257
- color = diffuseColor( wo, wi, surf );
1258
- color *= 1.0 - surf.transmission;
1259
-
1260
- color += specularColor( wo, wi, surf );
1261
-
1262
- }
1263
-
1264
- return color;
1265
-
1266
- }
1267
-
1268
- SampleRec bsdfSample( vec3 wo, SurfaceRec surf ) {
1269
-
1270
- float ior = surf.ior;
1271
- float metalness = surf.metalness;
1272
- float transmission = surf.transmission;
1273
- bool frontFace = surf.frontFace;
1274
-
1275
- float ratio = frontFace ? 1.0 / ior : ior;
1276
- float cosTheta = min( wo.z, 1.0 );
1277
- float sinTheta = sqrt( 1.0 - cosTheta * cosTheta );
1278
- float reflectance = schlickFresnelFromIor( cosTheta, ratio );
1279
- bool cannotRefract = ratio * sinTheta > 1.0;
1280
- if ( cannotRefract ) {
1281
-
1282
- reflectance = 1.0;
1283
-
1284
- }
1285
-
1286
- SampleRec result;
1287
- if ( rand() < transmission ) {
1288
-
1289
- float specularProb = mix( reflectance, 1.0, metalness );
1290
- if ( rand() < specularProb ) {
1291
-
1292
- result.direction = specularDirection( wo, surf );
1293
-
1294
- } else {
1295
-
1296
- result.direction = transmissionDirection( wo, surf );
1297
-
1298
- }
1299
-
1300
- } else {
1301
-
1302
- float specularProb = 0.5 + 0.5 * metalness;
1303
- if ( rand() < specularProb ) {
1304
-
1305
- result.direction = specularDirection( wo, surf );
1306
-
1307
- } else {
1308
-
1309
- result.direction = diffuseDirection( wo, surf );
1310
-
1311
- }
1312
-
1313
- }
1314
-
1315
- result.pdf = bsdfPdf( wo, result.direction, surf );
1316
- result.color = bsdfColor( wo, result.direction, surf );
1317
- return result;
1318
-
1319
- }
1320
- `;
606
+ return Boolean( this.bvh );
1321
607
 
1322
- const shaderUtils = /* glsl */`
1323
-
1324
- // https://google.github.io/filament/Filament.md.html#materialsystem/diffusebrdf
1325
- float schlickFresnel( float cosine, float f0 ) {
1326
-
1327
- return f0 + ( 1.0 - f0 ) * pow( 1.0 - cosine, 5.0 );
1328
-
1329
- }
1330
-
1331
- // https://raytracing.github.io/books/RayTracingInOneWeekend.html#dielectrics/schlickapproximation
1332
- float schlickFresnelFromIor( float cosine, float iorRatio ) {
1333
-
1334
- // Schlick approximation
1335
- float r_0 = pow( ( 1.0 - iorRatio ) / ( 1.0 + iorRatio ), 2.0 );
1336
- return schlickFresnel( cosine, r_0 );
1337
-
1338
- }
1339
-
1340
- // forms a basis with the normal vector as Z
1341
- mat3 getBasisFromNormal( vec3 normal ) {
1342
-
1343
- vec3 other;
1344
- if ( abs( normal.x ) > 0.5 ) {
1345
-
1346
- other = vec3( 0.0, 1.0, 0.0 );
1347
-
1348
- } else {
1349
-
1350
- other = vec3( 1.0, 0.0, 0.0 );
1351
-
1352
- }
1353
-
1354
- vec3 ortho = normalize( cross( normal, other ) );
1355
- vec3 ortho2 = normalize( cross( normal, ortho ) );
1356
- return mat3( ortho2, ortho, normal );
1357
-
1358
- }
1359
-
1360
- vec3 getHalfVector( vec3 a, vec3 b ) {
1361
-
1362
- return normalize( a + b );
1363
-
1364
- }
1365
-
1366
- // The discrepancy between interpolated surface normal and geometry normal can cause issues when a ray
1367
- // is cast that is on the top side of the geometry normal plane but below the surface normal plane. If
1368
- // we find a ray like that we ignore it to avoid artifacts.
1369
- // This function returns if the direction is on the same side of both planes.
1370
- bool isDirectionValid( vec3 direction, vec3 surfaceNormal, vec3 geometryNormal ) {
1371
-
1372
- bool aboveSurfaceNormal = dot( direction, surfaceNormal ) > 0.0;
1373
- bool aboveGeometryNormal = dot( direction, geometryNormal ) > 0.0;
1374
- return aboveSurfaceNormal == aboveGeometryNormal;
1375
-
1376
- }
1377
-
1378
- vec3 getHemisphereSample( vec3 n, vec2 uv ) {
1379
-
1380
- // https://www.rorydriscoll.com/2009/01/07/better-sampling/
1381
- // https://graphics.pixar.com/library/OrthonormalB/paper.pdf
1382
- float sign = n.z == 0.0 ? 1.0 : sign( n.z );
1383
- float a = - 1.0 / ( sign + n.z );
1384
- float b = n.x * n.y * a;
1385
- vec3 b1 = vec3( 1.0 + sign * n.x * n.x * a, sign * b, - sign * n.x );
1386
- vec3 b2 = vec3( b, sign + n.y * n.y * a, - n.y );
1387
-
1388
- float r = sqrt( uv.x );
1389
- float theta = 2.0 * PI * uv.y;
1390
- float x = r * cos( theta );
1391
- float y = r * sin( theta );
1392
- return x * b1 + y * b2 + sqrt( 1.0 - uv.x ) * n;
1393
-
1394
- }
1395
-
1396
- // https://www.shadertoy.com/view/wltcRS
1397
- uvec4 s0;
1398
-
1399
- void rng_initialize(vec2 p, int frame) {
1400
-
1401
- // white noise seed
1402
- s0 = uvec4( p, uint( frame ), uint( p.x ) + uint( p.y ) );
1403
-
1404
- }
1405
-
1406
- // https://www.pcg-random.org/
1407
- void pcg4d( inout uvec4 v ) {
1408
-
1409
- v = v * 1664525u + 1013904223u;
1410
- v.x += v.y * v.w;
1411
- v.y += v.z * v.x;
1412
- v.z += v.x * v.y;
1413
- v.w += v.y * v.z;
1414
- v = v ^ ( v >> 16u );
1415
- v.x += v.y*v.w;
1416
- v.y += v.z*v.x;
1417
- v.z += v.x*v.y;
1418
- v.w += v.y*v.z;
1419
-
1420
- }
1421
-
1422
- float rand() {
1423
-
1424
- pcg4d(s0);
1425
- return float( s0.x ) / float( 0xffffffffu );
1426
-
1427
- }
1428
-
1429
- vec2 rand2() {
1430
-
1431
- pcg4d( s0 );
1432
- return vec2( s0.xy ) / float(0xffffffffu);
1433
-
1434
- }
1435
-
1436
- vec3 rand3() {
1437
-
1438
- pcg4d(s0);
1439
- return vec3( s0.xyz ) / float( 0xffffffffu );
1440
-
1441
- }
1442
-
1443
- vec4 rand4() {
1444
-
1445
- pcg4d(s0);
1446
- return vec4(s0)/float(0xffffffffu);
1447
-
1448
- }
1449
-
1450
- // https://github.com/mrdoob/three.js/blob/dev/src/math/Vector3.js#L724
1451
- vec3 randDirection() {
1452
-
1453
- vec2 r = rand2();
1454
- float u = ( r.x - 0.5 ) * 2.0;
1455
- float t = r.y * PI * 2.0;
1456
- float f = sqrt( 1.0 - u * u );
1457
-
1458
- return vec3( f * cos( t ), f * sin( t ), u );
1459
-
1460
- }
1461
- `;
608
+ }
609
+
610
+ constructor( scene ) {
611
+
612
+ this.scene = scene;
613
+ this.bvh = null;
614
+ this.geometry = new BufferGeometry();
615
+ this.materials = null;
616
+ this.textures = null;
617
+ this.staticGeometryGenerator = new StaticGeometryGenerator( scene );
618
+
619
+ }
620
+
621
+ reset() {
622
+
623
+ this.bvh = null;
624
+ this.geometry.dispose();
625
+ this.geometry = new BufferGeometry();
626
+ this.materials = null;
627
+ this.textures = null;
628
+ this.staticGeometryGenerator = new StaticGeometryGenerator( this.scene );
629
+
630
+ }
631
+
632
+ dispose() {}
633
+
634
+ generate() {
635
+
636
+ const { scene, staticGeometryGenerator, geometry } = this;
637
+ if ( this.bvh === null ) {
638
+
639
+ const attributes = [ 'position', 'normal', 'tangent', 'uv' ];
640
+ scene.traverse( c => {
641
+
642
+ if ( c.isMesh ) {
643
+
644
+ const normalMapRequired = ! ! c.material.normalMap;
645
+ setCommonAttributes( c.geometry, { attributes, normalMapRequired } );
646
+
647
+ }
648
+
649
+ } );
650
+
651
+ const textureSet = new Set();
652
+ const materials = staticGeometryGenerator.getMaterials();
653
+ materials.forEach( material => {
654
+
655
+ for ( const key in material ) {
656
+
657
+ const value = material[ key ];
658
+ if ( value && value.isTexture ) {
659
+
660
+ textureSet.add( value );
661
+
662
+ }
663
+
664
+ }
665
+
666
+ } );
667
+
668
+ staticGeometryGenerator.attributes = attributes;
669
+ staticGeometryGenerator.generate( geometry );
670
+
671
+ const materialIndexAttribute = getGroupMaterialIndicesAttribute( geometry, materials, materials );
672
+ geometry.setAttribute( 'materialIndex', materialIndexAttribute );
673
+ geometry.clearGroups();
674
+
675
+ this.bvh = new MeshBVH( geometry );
676
+ this.materials = materials;
677
+ this.textures = Array.from( textureSet );
678
+
679
+ return {
680
+ bvh: this.bvh,
681
+ materials: this.materials,
682
+ textures: this.textures,
683
+ scene,
684
+ };
685
+
686
+ } else {
687
+
688
+ const { bvh } = this;
689
+ staticGeometryGenerator.generate( geometry );
690
+ bvh.refit();
691
+ return {
692
+ bvh: this.bvh,
693
+ materials: this.materials,
694
+ textures: this.textures,
695
+ scene,
696
+ };
697
+
698
+ }
699
+
700
+ }
701
+
702
+
703
+ }
704
+
705
+ // https://github.com/gkjohnson/webxr-sandbox/blob/main/skinned-mesh-batching/src/MaterialReducer.js
706
+
707
+ function isTypedArray( arr ) {
708
+
709
+ return arr.buffer instanceof ArrayBuffer && 'BYTES_PER_ELEMENT' in arr;
710
+
711
+ }
712
+
713
+ class MaterialReducer {
714
+
715
+ constructor() {
716
+
717
+ const ignoreKeys = new Set();
718
+ ignoreKeys.add( 'uuid' );
719
+
720
+ this.ignoreKeys = ignoreKeys;
721
+ this.shareTextures = true;
722
+ this.textures = [];
723
+ this.materials = [];
724
+
725
+ }
726
+
727
+ areEqual( objectA, objectB ) {
728
+
729
+ const keySet = new Set();
730
+ const traverseSet = new Set();
731
+ const ignoreKeys = this.ignoreKeys;
732
+
733
+ const traverse = ( a, b ) => {
734
+
735
+ if ( a === b ) {
736
+
737
+ return true;
738
+
739
+ }
740
+
741
+ if ( a && b && a instanceof Object && b instanceof Object ) {
742
+
743
+ if ( traverseSet.has( a ) || traverseSet.has( b ) ) {
744
+
745
+ throw new Error( 'MaterialReducer: Material is recursive.' );
746
+
747
+ }
748
+
749
+ const aIsElement = a instanceof Element;
750
+ const bIsElement = b instanceof Element;
751
+ if ( aIsElement || bIsElement ) {
752
+
753
+ if ( aIsElement !== bIsElement || ! ( a instanceof Image ) || ! ( b instanceof Image ) ) {
754
+
755
+ return false;
756
+
757
+ }
758
+
759
+ return a.src === b.src;
760
+
761
+ }
762
+
763
+ const aIsImageBitmap = a instanceof ImageBitmap;
764
+ const bIsImageBitmap = b instanceof ImageBitmap;
765
+ if ( aIsImageBitmap || bIsImageBitmap ) {
766
+
767
+ return false;
768
+
769
+ }
770
+
771
+ if ( a.equals ) {
772
+
773
+ return a.equals( b );
774
+
775
+ }
776
+
777
+ const aIsTypedArray = isTypedArray( a );
778
+ const bIsTypedArray = isTypedArray( b );
779
+ if ( aIsTypedArray || bIsTypedArray ) {
780
+
781
+ if ( aIsTypedArray !== bIsTypedArray || a.constructor !== b.constructor || a.length !== b.length ) {
782
+
783
+ return false;
784
+
785
+ }
786
+
787
+ for ( let i = 0, l = a.length; i < l; i ++ ) {
788
+
789
+ if ( a[ i ] !== b[ i ] ) return false;
790
+
791
+ }
792
+
793
+ return true;
794
+
795
+ }
796
+
797
+ traverseSet.add( a );
798
+ traverseSet.add( b );
799
+
800
+ keySet.clear();
801
+ for ( const key in a ) {
802
+
803
+ if ( ! a.hasOwnProperty( key ) || a[ key ] instanceof Function || ignoreKeys.has( key ) ) {
804
+
805
+ continue;
806
+
807
+ }
808
+
809
+ keySet.add( key );
810
+
811
+ }
812
+
813
+ for ( const key in b ) {
814
+
815
+ if ( ! b.hasOwnProperty( key ) || b[ key ] instanceof Function || ignoreKeys.has( key ) ) {
816
+
817
+ continue;
818
+
819
+ }
820
+
821
+ keySet.add( key );
822
+
823
+ }
824
+
825
+ const keys = Array.from( keySet.values() );
826
+ let result = true;
827
+ for ( const i in keys ) {
828
+
829
+ const key = keys[ i ];
830
+ if ( ignoreKeys.has( key ) ) {
831
+
832
+ continue;
833
+
834
+ }
835
+
836
+ result = traverse( a[ key ], b[ key ] );
837
+ if ( ! result ) {
838
+
839
+ break;
840
+
841
+ }
842
+
843
+ }
844
+
845
+ traverseSet.delete( a );
846
+ traverseSet.delete( b );
847
+ return result;
848
+
849
+ }
850
+
851
+ return false;
852
+
853
+ };
854
+
855
+ return traverse( objectA, objectB );
856
+
857
+ }
858
+
859
+ process( object ) {
860
+
861
+ const { textures, materials } = this;
862
+ let replaced = 0;
863
+
864
+ const processMaterial = material => {
865
+
866
+ // Check if another material matches this one
867
+ let foundMaterial = null;
868
+ for ( const i in materials ) {
869
+
870
+ const otherMaterial = materials[ i ];
871
+ if ( this.areEqual( material, otherMaterial ) ) {
872
+
873
+ foundMaterial = otherMaterial;
874
+
875
+ }
876
+
877
+ }
878
+
879
+ if ( foundMaterial ) {
880
+
881
+ replaced ++;
882
+ return foundMaterial;
883
+
884
+ } else {
885
+
886
+ materials.push( material );
887
+
888
+ if ( this.shareTextures ) {
889
+
890
+ // See if there's another texture that matches the ones on this material
891
+ for ( const key in material ) {
892
+
893
+ if ( ! material.hasOwnProperty( key ) ) continue;
894
+
895
+ const value = material[ key ];
896
+ if ( value && value.isTexture && value.image instanceof Image ) {
897
+
898
+ let foundTexture = null;
899
+ for ( const i in textures ) {
900
+
901
+ const texture = textures[ i ];
902
+ if ( this.areEqual( texture, value ) ) {
903
+
904
+ foundTexture = texture;
905
+ break;
906
+
907
+ }
908
+
909
+ }
910
+
911
+ if ( foundTexture ) {
912
+
913
+ material[ key ] = foundTexture;
914
+
915
+ } else {
916
+
917
+ textures.push( value );
918
+
919
+ }
920
+
921
+ }
922
+
923
+ }
924
+
925
+ }
926
+
927
+ return material;
928
+
929
+ }
930
+
931
+ };
932
+
933
+ object.traverse( c => {
934
+
935
+ if ( c.isMesh && c.material ) {
936
+
937
+ const material = c.material;
938
+ if ( Array.isArray( material ) ) {
939
+
940
+ for ( let i = 0; i < material.length; i ++ ) {
941
+
942
+ material[ i ] = processMaterial( material[ i ] );
943
+
944
+ }
945
+
946
+ } else {
947
+
948
+ c.material = processMaterial( material );
949
+
950
+ }
951
+
952
+ }
953
+
954
+ } );
955
+
956
+ return { replaced, retained: materials.length };
957
+
958
+ }
959
+
960
+ }
961
+
962
+ class PhysicalCamera extends PerspectiveCamera {
963
+
964
+ set bokehSize( size ) {
965
+
966
+ this.fStop = this.getFocalLength() / size;
967
+
968
+ }
969
+
970
+ get bokehSize() {
971
+
972
+ return this.getFocalLength() / this.fStop;
973
+
974
+ }
975
+
976
+ constructor( ...args ) {
977
+
978
+ super( ...args );
979
+ this.fStop = 1.4;
980
+ this.apertureBlades = 0;
981
+ this.apertureRotation = 0;
982
+ this.focusDistance = 25;
983
+ this.anamorphicRatio = 1;
984
+
985
+ }
986
+
987
+ }
988
+
989
+ const MATERIAL_PIXELS = 19;
990
+ const MATERIAL_STRIDE = MATERIAL_PIXELS * 4;
991
+
992
+ class MaterialsTexture extends DataTexture {
993
+
994
+ constructor() {
995
+
996
+ super( new Float32Array( 4 ), 1, 1 );
997
+
998
+ this.format = RGBAFormat;
999
+ this.type = FloatType;
1000
+ this.wrapS = ClampToEdgeWrapping;
1001
+ this.wrapT = ClampToEdgeWrapping;
1002
+ this.generateMipmaps = false;
1003
+
1004
+ }
1005
+
1006
+ setCastShadow( materialIndex, cast ) {
1007
+
1008
+ // invert the shadow value so we default to "true" when initializing a material
1009
+ const array = this.image.data;
1010
+ const index = materialIndex * MATERIAL_STRIDE + 6 * 4 + 0;
1011
+ array[ index ] = ! cast ? 1 : 0;
1012
+
1013
+ }
1014
+
1015
+ getCastShadow( materialIndex ) {
1016
+
1017
+ const array = this.image.data;
1018
+ const index = materialIndex * MATERIAL_STRIDE + 6 * 4 + 0;
1019
+ return ! Boolean( array[ index ] );
1020
+
1021
+ }
1022
+
1023
+ setSide( materialIndex, side ) {
1024
+
1025
+ const array = this.image.data;
1026
+ const index = materialIndex * MATERIAL_STRIDE + 5 * 4 + 2;
1027
+ switch ( side ) {
1028
+
1029
+ case FrontSide:
1030
+ array[ index ] = 1;
1031
+ break;
1032
+ case BackSide:
1033
+ array[ index ] = - 1;
1034
+ break;
1035
+ case DoubleSide:
1036
+ array[ index ] = 0;
1037
+ break;
1038
+
1039
+ }
1040
+
1041
+ }
1042
+
1043
+ getSide( materialIndex ) {
1044
+
1045
+ const array = this.image.data;
1046
+ const index = materialIndex * MATERIAL_STRIDE + 5 * 4 + 2;
1047
+ switch ( array[ index ] ) {
1048
+
1049
+ case 0:
1050
+ return DoubleSide;
1051
+ case 1:
1052
+ return FrontSide;
1053
+ case - 1:
1054
+ return BackSide;
1055
+
1056
+ }
1057
+
1058
+ return 0;
1059
+
1060
+ }
1061
+
1062
+ setMatte( materialIndex, matte ) {
1063
+
1064
+ const array = this.image.data;
1065
+ const index = materialIndex * MATERIAL_STRIDE + 5 * 4 + 3;
1066
+ array[ index ] = matte ? 1 : 0;
1067
+
1068
+ }
1069
+
1070
+ getMatte( materialIndex ) {
1071
+
1072
+ const array = this.image.data;
1073
+ const index = materialIndex * MATERIAL_STRIDE + 5 * 4 + 3;
1074
+ return Boolean( array[ index ] );
1075
+
1076
+ }
1077
+
1078
+ updateFrom( materials, textures ) {
1079
+
1080
+ function getTexture( material, key, def = - 1 ) {
1081
+
1082
+ return key in material ? textures.indexOf( material[ key ] ) : def;
1083
+
1084
+ }
1085
+
1086
+ function getField( material, key, def ) {
1087
+
1088
+ return key in material ? material[ key ] : def;
1089
+
1090
+ }
1091
+
1092
+ /**
1093
+ *
1094
+ * @param {Object} material
1095
+ * @param {string} textureKey
1096
+ * @param {Float32Array} array
1097
+ * @param {number} offset
1098
+ * @returns {8} number of floats occupied by texture transform matrix
1099
+ */
1100
+ function writeTextureMatrixToArray( material, textureKey, array, offset ) {
1101
+
1102
+ // check if texture exists
1103
+ if ( material[ textureKey ] && material[ textureKey ].isTexture ) {
1104
+
1105
+ const elements = material[ textureKey ].matrix.elements;
1106
+
1107
+ let i = 0;
1108
+
1109
+ // first row
1110
+ array[ offset + i ++ ] = elements[ 0 ];
1111
+ array[ offset + i ++ ] = elements[ 3 ];
1112
+ array[ offset + i ++ ] = elements[ 6 ];
1113
+ i ++;
1114
+
1115
+ // second row
1116
+ array[ offset + i ++ ] = elements[ 1 ];
1117
+ array[ offset + i ++ ] = elements[ 4 ];
1118
+ array[ offset + i ++ ] = elements[ 7 ];
1119
+ i ++;
1120
+
1121
+ }
1122
+
1123
+ return 8;
1124
+
1125
+ }
1126
+
1127
+ let index = 0;
1128
+ const pixelCount = materials.length * MATERIAL_PIXELS;
1129
+ const dimension = Math.ceil( Math.sqrt( pixelCount ) );
1130
+
1131
+ if ( this.image.width !== dimension ) {
1132
+
1133
+ this.dispose();
1134
+
1135
+ this.image.data = new Float32Array( dimension * dimension * 4 );
1136
+ this.image.width = dimension;
1137
+ this.image.height = dimension;
1138
+
1139
+ }
1140
+
1141
+ const floatArray = this.image.data;
1142
+
1143
+ // on some devices (Google Pixel 6) the "floatBitsToInt" function does not work correctly so we
1144
+ // can't encode texture ids that way.
1145
+ // const intArray = new Int32Array( floatArray.buffer );
1146
+
1147
+ for ( let i = 0, l = materials.length; i < l; i ++ ) {
1148
+
1149
+ const m = materials[ i ];
1150
+
1151
+ // color
1152
+ floatArray[ index ++ ] = m.color.r;
1153
+ floatArray[ index ++ ] = m.color.g;
1154
+ floatArray[ index ++ ] = m.color.b;
1155
+ floatArray[ index ++ ] = getTexture( m, 'map' );
1156
+
1157
+ // metalness & roughness
1158
+ floatArray[ index ++ ] = getField( m, 'metalness', 0.0 );
1159
+ floatArray[ index ++ ] = textures.indexOf( m.metalnessMap );
1160
+ floatArray[ index ++ ] = getField( m, 'roughness', 0.0 );
1161
+ floatArray[ index ++ ] = textures.indexOf( m.roughnessMap );
1162
+
1163
+ // transmission & emissiveIntensity
1164
+ floatArray[ index ++ ] = getField( m, 'ior', 1.0 );
1165
+ floatArray[ index ++ ] = getField( m, 'transmission', 0.0 );
1166
+ floatArray[ index ++ ] = getTexture( m, 'transmissionMap' );
1167
+ floatArray[ index ++ ] = getField( m, 'emissiveIntensity', 0.0 );
1168
+
1169
+ // emission
1170
+ if ( 'emissive' in m ) {
1171
+
1172
+ floatArray[ index ++ ] = m.emissive.r;
1173
+ floatArray[ index ++ ] = m.emissive.g;
1174
+ floatArray[ index ++ ] = m.emissive.b;
1175
+
1176
+ } else {
1177
+
1178
+ floatArray[ index ++ ] = 0.0;
1179
+ floatArray[ index ++ ] = 0.0;
1180
+ floatArray[ index ++ ] = 0.0;
1181
+
1182
+ }
1183
+
1184
+ floatArray[ index ++ ] = getTexture( m, 'emissiveMap' );
1185
+
1186
+ // normals
1187
+ floatArray[ index ++ ] = getTexture( m, 'normalMap' );
1188
+ if ( 'normalScale' in m ) {
1189
+
1190
+ floatArray[ index ++ ] = m.normalScale.x;
1191
+ floatArray[ index ++ ] = m.normalScale.y;
1192
+
1193
+ } else {
1194
+
1195
+ floatArray[ index ++ ] = 1;
1196
+ floatArray[ index ++ ] = 1;
1197
+
1198
+ }
1199
+
1200
+ floatArray[ index ++ ] = getTexture( m, 'alphaMap' );
1201
+
1202
+ // side & matte
1203
+ floatArray[ index ++ ] = m.opacity;
1204
+ floatArray[ index ++ ] = m.alphaTest;
1205
+ index ++; // side
1206
+ index ++; // matte
1207
+
1208
+ index ++; // shadow
1209
+ index ++;
1210
+ index ++;
1211
+ index ++;
1212
+
1213
+ // map transform
1214
+ index += writeTextureMatrixToArray( m, 'map', floatArray, index );
1215
+
1216
+ // metalnessMap transform
1217
+ index += writeTextureMatrixToArray( m, 'metalnessMap', floatArray, index );
1218
+
1219
+ // roughnessMap transform
1220
+ index += writeTextureMatrixToArray( m, 'roughnessMap', floatArray, index );
1221
+
1222
+ // transmissionMap transform
1223
+ index += writeTextureMatrixToArray( m, 'transmissionMap', floatArray, index );
1224
+
1225
+ // emissiveMap transform
1226
+ index += writeTextureMatrixToArray( m, 'emissiveMap', floatArray, index );
1227
+
1228
+ // normalMap transform
1229
+ index += writeTextureMatrixToArray( m, 'normalMap', floatArray, index );
1230
+
1231
+ }
1232
+
1233
+ this.needsUpdate = true;
1234
+
1235
+ }
1236
+
1237
+ }
1238
+
1239
+ const prevColor = new Color();
1240
+ class RenderTarget2DArray extends WebGLArrayRenderTarget {
1241
+
1242
+ constructor( ...args ) {
1243
+
1244
+ super( ...args );
1245
+
1246
+ const tex = this.texture;
1247
+ tex.format = RGBAFormat;
1248
+ tex.type = UnsignedByteType;
1249
+ tex.minFilter = LinearFilter;
1250
+ tex.magFilter = LinearFilter;
1251
+ tex.wrapS = RepeatWrapping;
1252
+ tex.wrapT = RepeatWrapping;
1253
+ tex.setTextures = ( ...args ) => {
1254
+
1255
+ this.setTextures( ...args );
1256
+
1257
+ };
1258
+
1259
+ const fsQuad = new FullScreenQuad( new MeshBasicMaterial() );
1260
+ this.fsQuad = fsQuad;
1261
+
1262
+ }
1263
+
1264
+ setTextures( renderer, width, height, textures ) {
1265
+
1266
+ // save previous renderer state
1267
+ const prevRenderTarget = renderer.getRenderTarget();
1268
+ const prevToneMapping = renderer.toneMapping;
1269
+ const prevAlpha = renderer.getClearAlpha();
1270
+ renderer.getClearColor( prevColor );
1271
+
1272
+ // resize the render target and ensure we don't have an empty texture
1273
+ // render target depth must be >= 1 to avoid unbound texture error on android devices
1274
+ const depth = textures.length || 1;
1275
+ this.setSize( width, height, depth );
1276
+ renderer.setClearColor( 0, 0 );
1277
+ renderer.toneMapping = NoToneMapping;
1278
+
1279
+ // render each texture into each layer of the target
1280
+ const fsQuad = this.fsQuad;
1281
+ for ( let i = 0, l = depth; i < l; i ++ ) {
1282
+
1283
+ const texture = textures[ i ];
1284
+ if ( texture ) {
1285
+
1286
+ // revert to default texture transform before rendering
1287
+ texture.matrixAutoUpdate = false;
1288
+ texture.matrix.identity();
1289
+
1290
+ fsQuad.material.map = texture;
1291
+ fsQuad.material.transparent = true;
1292
+
1293
+ renderer.setRenderTarget( this, i );
1294
+ fsQuad.render( renderer );
1295
+
1296
+ // restore custom texture transform
1297
+ texture.updateMatrix();
1298
+ texture.matrixAutoUpdate = true;
1299
+
1300
+ }
1301
+
1302
+ }
1303
+
1304
+ // reset the renderer
1305
+ fsQuad.material.map = null;
1306
+ renderer.setClearColor( prevColor, prevAlpha );
1307
+ renderer.setRenderTarget( prevRenderTarget );
1308
+ renderer.toneMapping = prevToneMapping;
1309
+
1310
+ }
1311
+
1312
+ dispose() {
1313
+
1314
+ super.dispose();
1315
+ this.fsQuad.dispose();
1316
+
1317
+ }
1318
+
1319
+ }
1320
+
1321
+ function binarySearchFindClosestIndexOf( array, targetValue, offset = 0, count = array.length ) {
1322
+
1323
+ let lower = 0;
1324
+ let upper = count;
1325
+ while ( lower < upper ) {
1326
+
1327
+ const mid = ~ ~ ( 0.5 * upper + 0.5 * lower );
1328
+
1329
+
1330
+ // check if the middle array value is above or below the target and shift
1331
+ // which half of the array we're looking at
1332
+ if ( array[ offset + mid ] < targetValue ) {
1333
+
1334
+ lower = mid + 1;
1335
+
1336
+ } else {
1337
+
1338
+ upper = mid;
1339
+
1340
+ }
1341
+
1342
+ }
1343
+
1344
+ return lower;
1345
+
1346
+ }
1347
+
1348
+ function colorToLuminance( r, g, b ) {
1349
+
1350
+ // https://en.wikipedia.org/wiki/Relative_luminance
1351
+ return 0.2126 * r + 0.7152 * g + 0.0722 * b;
1352
+
1353
+ }
1354
+
1355
+ // ensures the data is all floating point values and flipY is false
1356
+ function preprocessEnvMap( envMap ) {
1357
+
1358
+ const map = envMap.clone();
1359
+ map.source = new Source( { ...map.image } );
1360
+ const { width, height, data } = map.image;
1361
+
1362
+ // TODO: is there a simple way to avoid cloning and adjusting the env map data here?
1363
+ // convert the data from half float uint 16 arrays to float arrays for cdf computation
1364
+ let newData = data;
1365
+ if ( map.type === HalfFloatType ) {
1366
+
1367
+ newData = new Float32Array( data.length );
1368
+ for ( const i in data ) {
1369
+
1370
+ newData[ i ] = DataUtils.fromHalfFloat( data[ i ] );
1371
+
1372
+ }
1373
+
1374
+ map.image.data = newData;
1375
+ map.type = FloatType;
1376
+
1377
+ }
1378
+
1379
+ // remove any y flipping for cdf computation
1380
+ if ( map.flipY ) {
1381
+
1382
+ const ogData = newData;
1383
+ newData = newData.slice();
1384
+ for ( let y = 0; y < height; y ++ ) {
1385
+
1386
+ for ( let x = 0; x < width; x ++ ) {
1387
+
1388
+ const newY = height - y - 1;
1389
+ const ogIndex = 4 * ( y * width + x );
1390
+ const newIndex = 4 * ( newY * width + x );
1391
+
1392
+ newData[ newIndex + 0 ] = ogData[ ogIndex + 0 ];
1393
+ newData[ newIndex + 1 ] = ogData[ ogIndex + 1 ];
1394
+ newData[ newIndex + 2 ] = ogData[ ogIndex + 2 ];
1395
+ newData[ newIndex + 3 ] = ogData[ ogIndex + 3 ];
1396
+
1397
+ }
1398
+
1399
+ }
1400
+
1401
+ map.flipY = false;
1402
+ map.image.data = newData;
1403
+
1404
+ }
1405
+
1406
+ return map;
1407
+
1408
+ }
1409
+
1410
+ class EquirectHdrInfoUniform {
1411
+
1412
+ constructor() {
1413
+
1414
+ // Stores a map of [0, 1] value -> cumulative importance row & pdf
1415
+ // used to sampling a random value to a relevant row to sample from
1416
+ const marginalWeights = new DataTexture();
1417
+ marginalWeights.type = FloatType;
1418
+ marginalWeights.format = RedFormat;
1419
+ marginalWeights.minFilter = LinearFilter;
1420
+ marginalWeights.magFilter = LinearFilter;
1421
+ marginalWeights.generateMipmaps = false;
1422
+
1423
+ // Stores a map of [0, 1] value -> cumulative importance column & pdf
1424
+ // used to sampling a random value to a relevant pixel to sample from
1425
+ const conditionalWeights = new DataTexture();
1426
+ conditionalWeights.type = FloatType;
1427
+ conditionalWeights.format = RedFormat;
1428
+ conditionalWeights.minFilter = LinearFilter;
1429
+ conditionalWeights.magFilter = LinearFilter;
1430
+ conditionalWeights.generateMipmaps = false;
1431
+
1432
+ // store the total sum in a 1x1 tex since some android mobile devices have issues
1433
+ // storing large values in structs.
1434
+ const totalSumTex = new DataTexture();
1435
+ totalSumTex.type = FloatType;
1436
+ totalSumTex.format = RedFormat;
1437
+ totalSumTex.minFilter = LinearFilter;
1438
+ totalSumTex.magFilter = LinearFilter;
1439
+ totalSumTex.generateMipmaps = false;
1440
+
1441
+ this.marginalWeights = marginalWeights;
1442
+ this.conditionalWeights = conditionalWeights;
1443
+ this.totalSum = totalSumTex;
1444
+ this.map = null;
1445
+
1446
+ }
1447
+
1448
+ dispose() {
1449
+
1450
+ this.marginalWeights.dispose();
1451
+ this.conditionalWeights.dispose();
1452
+ this.totalSum.dispose();
1453
+ if ( this.map ) this.map.dispose();
1454
+
1455
+ }
1456
+
1457
+ updateFrom( hdr ) {
1458
+
1459
+ // https://github.com/knightcrawler25/GLSL-PathTracer/blob/3c6fd9b6b3da47cd50c527eeb45845eef06c55c3/src/loaders/hdrloader.cpp
1460
+ // https://pbr-book.org/3ed-2018/Light_Transport_I_Surface_Reflection/Sampling_Light_Sources#InfiniteAreaLights
1461
+ const map = preprocessEnvMap( hdr );
1462
+ map.wrapS = RepeatWrapping;
1463
+ map.wrapT = RepeatWrapping;
1464
+
1465
+ const { width, height, data } = map.image;
1466
+
1467
+ // "conditional" = "pixel relative to row pixels sum"
1468
+ // "marginal" = "row relative to row sum"
1469
+
1470
+ // track the importance of any given pixel in the image by tracking its weight relative to other pixels in the image
1471
+ const pdfConditional = new Float32Array( width * height );
1472
+ const cdfConditional = new Float32Array( width * height );
1473
+
1474
+ const pdfMarginal = new Float32Array( height );
1475
+ const cdfMarginal = new Float32Array( height );
1476
+
1477
+ let totalSumValue = 0.0;
1478
+ let cumulativeWeightMarginal = 0.0;
1479
+ for ( let y = 0; y < height; y ++ ) {
1480
+
1481
+ let cumulativeRowWeight = 0.0;
1482
+ for ( let x = 0; x < width; x ++ ) {
1483
+
1484
+ const i = y * width + x;
1485
+ const r = data[ 4 * i + 0 ];
1486
+ const g = data[ 4 * i + 1 ];
1487
+ const b = data[ 4 * i + 2 ];
1488
+
1489
+ // the probability of the pixel being selected in this row is the
1490
+ // scale of the luminance relative to the rest of the pixels.
1491
+ // TODO: this should also account for the solid angle of the pixel when sampling
1492
+ const weight = colorToLuminance( r, g, b );
1493
+ cumulativeRowWeight += weight;
1494
+ totalSumValue += weight;
1495
+
1496
+ pdfConditional[ i ] = weight;
1497
+ cdfConditional[ i ] = cumulativeRowWeight;
1498
+
1499
+ }
1500
+
1501
+ // can happen if the row is all black
1502
+ if ( cumulativeRowWeight !== 0 ) {
1503
+
1504
+ // scale the pdf and cdf to [0.0, 1.0]
1505
+ for ( let i = y * width, l = y * width + width; i < l; i ++ ) {
1506
+
1507
+ pdfConditional[ i ] /= cumulativeRowWeight;
1508
+ cdfConditional[ i ] /= cumulativeRowWeight;
1509
+
1510
+ }
1511
+
1512
+ }
1513
+
1514
+ cumulativeWeightMarginal += cumulativeRowWeight;
1515
+
1516
+ // compute the marginal pdf and cdf along the height of the map.
1517
+ pdfMarginal[ y ] = cumulativeRowWeight;
1518
+ cdfMarginal[ y ] = cumulativeWeightMarginal;
1519
+
1520
+ }
1521
+
1522
+ // can happen if the texture is all black
1523
+ if ( cumulativeWeightMarginal !== 0 ) {
1524
+
1525
+ // scale the marginal pdf and cdf to [0.0, 1.0]
1526
+ for ( let i = 0, l = pdfMarginal.length; i < l; i ++ ) {
1527
+
1528
+ pdfMarginal[ i ] /= cumulativeWeightMarginal;
1529
+ cdfMarginal[ i ] /= cumulativeWeightMarginal;
1530
+
1531
+ }
1532
+
1533
+ }
1534
+
1535
+ // compute a sorted index of distributions and the probabilities along them for both
1536
+ // the marginal and conditional data. These will be used to sample with a random number
1537
+ // to retrieve a uv value to sample in the environment map.
1538
+ // These values continually increase so it's okay to interpolate between them.
1539
+ const marginalDataArray = new Float32Array( height );
1540
+ const conditionalDataArray = new Float32Array( width * height );
1541
+
1542
+ for ( let i = 0; i < height; i ++ ) {
1543
+
1544
+ const dist = ( i + 1 ) / height;
1545
+ const row = binarySearchFindClosestIndexOf( cdfMarginal, dist );
1546
+
1547
+ marginalDataArray[ i ] = row / height;
1548
+
1549
+ }
1550
+
1551
+ for ( let y = 0; y < height; y ++ ) {
1552
+
1553
+ for ( let x = 0; x < width; x ++ ) {
1554
+
1555
+ const i = y * width + x;
1556
+ const dist = ( x + 1 ) / width;
1557
+ const col = binarySearchFindClosestIndexOf( cdfConditional, dist, y * width, width );
1558
+
1559
+ conditionalDataArray[ i ] = col / width;
1560
+
1561
+ }
1562
+
1563
+ }
1564
+
1565
+ this.dispose();
1566
+
1567
+ const { marginalWeights, conditionalWeights, totalSum } = this;
1568
+ marginalWeights.image = { width: height, height: 1, data: marginalDataArray };
1569
+ marginalWeights.needsUpdate = true;
1570
+
1571
+ conditionalWeights.image = { width, height, data: conditionalDataArray };
1572
+ conditionalWeights.needsUpdate = true;
1573
+
1574
+ totalSum.image = { width: 1, height: 1, data: new Float32Array( [ totalSumValue ] ) };
1575
+ totalSum.needsUpdate = true;
1576
+
1577
+ this.map = map;
1578
+
1579
+ }
1580
+
1581
+ }
1582
+
1583
+ class PhysicalCameraUniform {
1584
+
1585
+ constructor() {
1586
+
1587
+ this.bokehSize = 0;
1588
+ this.apertureBlades = 0;
1589
+ this.apertureRotation = 0;
1590
+ this.focusDistance = 10;
1591
+ this.anamorphicRatio = 1;
1592
+
1593
+ }
1594
+
1595
+ updateFrom( camera ) {
1596
+
1597
+ if ( camera instanceof PhysicalCamera ) {
1598
+
1599
+ this.bokehSize = camera.bokehSize;
1600
+ this.apertureBlades = camera.apertureBlades;
1601
+ this.apertureRotation = camera.apertureRotation;
1602
+ this.focusDistance = camera.focusDistance;
1603
+ this.anamorphicRatio = camera.anamorphicRatio;
1604
+
1605
+ } else {
1606
+
1607
+ this.bokehSize = 0;
1608
+ this.apertureRotation = 0;
1609
+ this.apertureBlades = 0;
1610
+ this.focusDistance = 10;
1611
+ this.anamorphicRatio = 1;
1612
+
1613
+ }
1614
+
1615
+ }
1616
+
1617
+ }
1618
+
1619
+ const shaderUtils = /* glsl */`
1620
+
1621
+ // https://google.github.io/filament/Filament.md.html#materialsystem/diffusebrdf
1622
+ float schlickFresnel( float cosine, float f0 ) {
1623
+
1624
+ return f0 + ( 1.0 - f0 ) * pow( 1.0 - cosine, 5.0 );
1625
+
1626
+ }
1627
+
1628
+ // https://raytracing.github.io/books/RayTracingInOneWeekend.html#dielectrics/schlickapproximation
1629
+ float schlickFresnelFromIor( float cosine, float iorRatio ) {
1630
+
1631
+ // Schlick approximation
1632
+ float r_0 = pow( ( 1.0 - iorRatio ) / ( 1.0 + iorRatio ), 2.0 );
1633
+ return schlickFresnel( cosine, r_0 );
1634
+
1635
+ }
1636
+
1637
+ // forms a basis with the normal vector as Z
1638
+ mat3 getBasisFromNormal( vec3 normal ) {
1639
+
1640
+ vec3 other;
1641
+ if ( abs( normal.x ) > 0.5 ) {
1642
+
1643
+ other = vec3( 0.0, 1.0, 0.0 );
1644
+
1645
+ } else {
1646
+
1647
+ other = vec3( 1.0, 0.0, 0.0 );
1648
+
1649
+ }
1650
+
1651
+ vec3 ortho = normalize( cross( normal, other ) );
1652
+ vec3 ortho2 = normalize( cross( normal, ortho ) );
1653
+ return mat3( ortho2, ortho, normal );
1654
+
1655
+ }
1656
+
1657
+ vec3 getHalfVector( vec3 a, vec3 b ) {
1658
+
1659
+ return normalize( a + b );
1660
+
1661
+ }
1662
+
1663
+ // The discrepancy between interpolated surface normal and geometry normal can cause issues when a ray
1664
+ // is cast that is on the top side of the geometry normal plane but below the surface normal plane. If
1665
+ // we find a ray like that we ignore it to avoid artifacts.
1666
+ // This function returns if the direction is on the same side of both planes.
1667
+ bool isDirectionValid( vec3 direction, vec3 surfaceNormal, vec3 geometryNormal ) {
1668
+
1669
+ bool aboveSurfaceNormal = dot( direction, surfaceNormal ) > 0.0;
1670
+ bool aboveGeometryNormal = dot( direction, geometryNormal ) > 0.0;
1671
+ return aboveSurfaceNormal == aboveGeometryNormal;
1672
+
1673
+ }
1674
+
1675
+ vec3 getHemisphereSample( vec3 n, vec2 uv ) {
1676
+
1677
+ // https://www.rorydriscoll.com/2009/01/07/better-sampling/
1678
+ // https://graphics.pixar.com/library/OrthonormalB/paper.pdf
1679
+ float sign = n.z == 0.0 ? 1.0 : sign( n.z );
1680
+ float a = - 1.0 / ( sign + n.z );
1681
+ float b = n.x * n.y * a;
1682
+ vec3 b1 = vec3( 1.0 + sign * n.x * n.x * a, sign * b, - sign * n.x );
1683
+ vec3 b2 = vec3( b, sign + n.y * n.y * a, - n.y );
1684
+
1685
+ float r = sqrt( uv.x );
1686
+ float theta = 2.0 * PI * uv.y;
1687
+ float x = r * cos( theta );
1688
+ float y = r * sin( theta );
1689
+ return x * b1 + y * b2 + sqrt( 1.0 - uv.x ) * n;
1690
+
1691
+ }
1692
+
1693
+ // https://www.shadertoy.com/view/wltcRS
1694
+ uvec4 s0;
1695
+
1696
+ void rng_initialize(vec2 p, int frame) {
1697
+
1698
+ // white noise seed
1699
+ s0 = uvec4( p, uint( frame ), uint( p.x ) + uint( p.y ) );
1700
+
1701
+ }
1702
+
1703
+ // https://www.pcg-random.org/
1704
+ void pcg4d( inout uvec4 v ) {
1705
+
1706
+ v = v * 1664525u + 1013904223u;
1707
+ v.x += v.y * v.w;
1708
+ v.y += v.z * v.x;
1709
+ v.z += v.x * v.y;
1710
+ v.w += v.y * v.z;
1711
+ v = v ^ ( v >> 16u );
1712
+ v.x += v.y*v.w;
1713
+ v.y += v.z*v.x;
1714
+ v.z += v.x*v.y;
1715
+ v.w += v.y*v.z;
1716
+
1717
+ }
1718
+
1719
+ // returns [ 0, 1 ]
1720
+ float rand() {
1721
+
1722
+ pcg4d(s0);
1723
+ return float( s0.x ) / float( 0xffffffffu );
1724
+
1725
+ }
1726
+
1727
+ vec2 rand2() {
1728
+
1729
+ pcg4d( s0 );
1730
+ return vec2( s0.xy ) / float(0xffffffffu);
1731
+
1732
+ }
1733
+
1734
+ vec3 rand3() {
1735
+
1736
+ pcg4d(s0);
1737
+ return vec3( s0.xyz ) / float( 0xffffffffu );
1738
+
1739
+ }
1740
+
1741
+ vec4 rand4() {
1742
+
1743
+ pcg4d(s0);
1744
+ return vec4(s0)/float(0xffffffffu);
1745
+
1746
+ }
1747
+
1748
+ // https://github.com/mrdoob/three.js/blob/dev/src/math/Vector3.js#L724
1749
+ vec3 randDirection() {
1750
+
1751
+ vec2 r = rand2();
1752
+ float u = ( r.x - 0.5 ) * 2.0;
1753
+ float t = r.y * PI * 2.0;
1754
+ float f = sqrt( 1.0 - u * u );
1755
+
1756
+ return vec3( f * cos( t ), f * sin( t ), u );
1757
+
1758
+ }
1759
+
1760
+ vec2 triangleSample( vec2 a, vec2 b, vec2 c ) {
1761
+
1762
+ // get the edges of the triangle and the diagonal across the
1763
+ // center of the parallelogram
1764
+ vec2 e1 = a - b;
1765
+ vec2 e2 = c - b;
1766
+ vec2 diag = normalize( e1 + e2 );
1767
+
1768
+ // pick a random point in the parallelogram
1769
+ vec2 r = rand2();
1770
+ if ( r.x + r.y > 1.0 ) {
1771
+
1772
+ r = vec2( 1.0 ) - r;
1773
+
1774
+ }
1775
+
1776
+ return e1 * r.x + e2 * r.y;
1777
+
1778
+ }
1779
+
1780
+ // samples an aperture shape with the given number of sides. 0 means circle
1781
+ vec2 sampleAperture( int blades ) {
1782
+
1783
+ if ( blades == 0 ) {
1784
+
1785
+ vec2 r = rand2();
1786
+ float angle = 2.0 * PI * r.x;
1787
+ float radius = sqrt( rand() );
1788
+ return vec2( cos( angle ), sin( angle ) ) * radius;
1789
+
1790
+ } else {
1791
+
1792
+ blades = max( blades, 3 );
1793
+
1794
+ vec3 r = rand3();
1795
+ float anglePerSegment = 2.0 * PI / float( blades );
1796
+ float segment = floor( float( blades ) * r.x );
1797
+
1798
+ float angle1 = anglePerSegment * segment;
1799
+ float angle2 = angle1 + anglePerSegment;
1800
+ vec2 a = vec2( sin( angle1 ), cos( angle1 ) );
1801
+ vec2 b = vec2( 0.0, 0.0 );
1802
+ vec2 c = vec2( sin( angle2 ), cos( angle2 ) );
1803
+
1804
+ return triangleSample( a, b, c );
1805
+
1806
+ }
1807
+
1808
+ }
1809
+
1810
+ float colorToLuminance( vec3 color ) {
1811
+
1812
+ // https://en.wikipedia.org/wiki/Relative_luminance
1813
+ return 0.2126 * color.r + 0.7152 * color.g + 0.0722 * color.b;
1814
+
1815
+ }
1816
+
1817
+ // ray sampling x and z are swapped to align with expected background view
1818
+ vec2 equirectDirectionToUv( vec3 direction ) {
1819
+
1820
+ // from Spherical.setFromCartesianCoords
1821
+ vec2 uv = vec2( atan( direction.z, direction.x ), acos( direction.y ) );
1822
+ uv /= vec2( 2.0 * PI, PI );
1823
+
1824
+ // apply adjustments to get values in range [0, 1] and y right side up
1825
+ uv.x += 0.5;
1826
+ uv.y = 1.0 - uv.y;
1827
+ return uv;
1828
+
1829
+ }
1830
+
1831
+ vec3 equirectUvToDirection( vec2 uv ) {
1832
+
1833
+ // undo above adjustments
1834
+ uv.x -= 0.5;
1835
+ uv.y = 1.0 - uv.y;
1836
+
1837
+ // from Vector3.setFromSphericalCoords
1838
+ float theta = uv.x * 2.0 * PI;
1839
+ float phi = uv.y * PI;
1840
+
1841
+ float sinPhi = sin( phi );
1842
+
1843
+ return vec3( sinPhi * cos( theta ), cos( phi ), sinPhi * sin( theta ) );
1844
+
1845
+ }
1846
+
1847
+ // Fast arccos approximation used to remove banding artifacts caused by numerical errors in acos.
1848
+ // This is a cubic Lagrange interpolating polynomial for x = [-1, -1/2, 0, 1/2, 1].
1849
+ // For more information see: https://github.com/gkjohnson/three-gpu-pathtracer/pull/171#issuecomment-1152275248
1850
+ float acosApprox( float x ) {
1851
+
1852
+ x = clamp( x, -1.0, 1.0 );
1853
+ return ( - 0.69813170079773212 * x * x - 0.87266462599716477 ) * x + 1.5707963267948966;
1854
+
1855
+ }
1856
+
1857
+ // An acos with input values bound to the range [-1, 1].
1858
+ float acosSafe( float x ) {
1859
+
1860
+ return acos( clamp( x, -1.0, 1.0 ) );
1861
+
1862
+ }
1863
+
1864
+ `;
1865
+
1866
+ class PMREMCopyMaterial extends MaterialBase {
1867
+
1868
+ constructor() {
1869
+
1870
+ super( {
1871
+
1872
+ uniforms: {
1873
+
1874
+ envMap: { value: null },
1875
+ blur: { value: 0 },
1876
+
1877
+ },
1878
+
1879
+ vertexShader: /* glsl */`
1880
+
1881
+ varying vec2 vUv;
1882
+ void main() {
1883
+ vUv = uv;
1884
+ gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );
1885
+ }
1886
+
1887
+ `,
1888
+
1889
+ fragmentShader: /* glsl */`
1890
+
1891
+ #include <common>
1892
+ #include <cube_uv_reflection_fragment>
1893
+
1894
+ ${ shaderUtils }
1895
+
1896
+ uniform sampler2D envMap;
1897
+ uniform float blur;
1898
+ varying vec2 vUv;
1899
+ void main() {
1900
+
1901
+ vec3 rayDirection = equirectUvToDirection( vUv );
1902
+ gl_FragColor = textureCubeUV( envMap, rayDirection, blur );
1903
+
1904
+ }
1905
+
1906
+ `,
1907
+
1908
+ } );
1909
+
1910
+ }
1911
+
1912
+ }
1913
+
1914
+ class BlurredEnvMapGenerator {
1915
+
1916
+ constructor( renderer ) {
1917
+
1918
+ this.renderer = renderer;
1919
+ this.pmremGenerator = new PMREMGenerator( renderer );
1920
+ this.copyQuad = new FullScreenQuad( new PMREMCopyMaterial() );
1921
+ this.renderTarget = new WebGLRenderTarget( 1, 1, { type: FloatType, format: RGBAFormat } );
1922
+
1923
+ }
1924
+
1925
+ dispose() {
1926
+
1927
+ this.pmremGenerator.dispose();
1928
+ this.copyQuad.dispose();
1929
+ this.renderTarget.dispose();
1930
+
1931
+ }
1932
+
1933
+ generate( texture, blur ) {
1934
+
1935
+ const { pmremGenerator, renderTarget, copyQuad, renderer } = this;
1936
+
1937
+ // get the pmrem target
1938
+ const pmremTarget = pmremGenerator.fromEquirectangular( texture );
1939
+
1940
+ // set up the material
1941
+ const { width, height } = texture.image;
1942
+ renderTarget.setSize( width, height );
1943
+ copyQuad.material.envMap = pmremTarget.texture;
1944
+ copyQuad.material.blur = blur;
1945
+
1946
+ // render
1947
+ const prevRenderTarget = renderer.getRenderTarget();
1948
+ const prevClear = renderer.autoClear;
1949
+
1950
+ renderer.setRenderTarget( renderTarget );
1951
+ renderer.autoClear = true;
1952
+ copyQuad.render( renderer );
1953
+
1954
+ renderer.setRenderTarget( prevRenderTarget );
1955
+ renderer.autoClear = prevClear;
1956
+
1957
+ // read the data back
1958
+ const buffer = new Float32Array( width * height * 4 );
1959
+ renderer.readRenderTargetPixels( renderTarget, 0, 0, width, height, buffer );
1960
+
1961
+ const result = new DataTexture( buffer, width, height, RGBAFormat, FloatType );
1962
+ result.minFilter = texture.minFilter;
1963
+ result.magFilter = texture.magFilter;
1964
+ result.wrapS = texture.wrapS;
1965
+ result.wrapT = texture.wrapT;
1966
+ result.mapping = EquirectangularReflectionMapping;
1967
+ result.needsUpdate = true;
1968
+
1969
+ return result;
1970
+
1971
+ }
1972
+
1973
+ }
1974
+
1975
+ const shaderMaterialStructs = /* glsl */ `
1976
+
1977
+ struct PhysicalCamera {
1978
+
1979
+ float focusDistance;
1980
+ float anamorphicRatio;
1981
+ float bokehSize;
1982
+ int apertureBlades;
1983
+ float apertureRotation;
1984
+
1985
+ };
1986
+
1987
+ struct EquirectHdrInfo {
1988
+
1989
+ sampler2D marginalWeights;
1990
+ sampler2D conditionalWeights;
1991
+ sampler2D map;
1992
+ sampler2D totalSum;
1993
+
1994
+ };
1995
+
1996
+ struct Material {
1997
+
1998
+ vec3 color;
1999
+ int map;
2000
+
2001
+ float metalness;
2002
+ int metalnessMap;
2003
+
2004
+ float roughness;
2005
+ int roughnessMap;
2006
+
2007
+ float ior;
2008
+ float transmission;
2009
+ int transmissionMap;
2010
+
2011
+ float emissiveIntensity;
2012
+ vec3 emissive;
2013
+ int emissiveMap;
2014
+
2015
+ int normalMap;
2016
+ vec2 normalScale;
2017
+
2018
+ int alphaMap;
2019
+
2020
+ bool castShadow;
2021
+ float opacity;
2022
+ float alphaTest;
2023
+
2024
+ float side;
2025
+ bool matte;
2026
+
2027
+ mat3 mapTransform;
2028
+ mat3 metalnessMapTransform;
2029
+ mat3 roughnessMapTransform;
2030
+ mat3 transmissionMapTransform;
2031
+ mat3 emissiveMapTransform;
2032
+ mat3 normalMapTransform;
2033
+
2034
+ };
2035
+
2036
+ mat3 readTextureTransform( sampler2D tex, uint index ) {
2037
+
2038
+ mat3 textureTransform;
2039
+
2040
+ vec4 row1 = texelFetch1D( tex, index );
2041
+ vec4 row2 = texelFetch1D( tex, index + 1u );
2042
+
2043
+ textureTransform[0] = vec3(row1.r, row2.r, 0.0);
2044
+ textureTransform[1] = vec3(row1.g, row2.g, 0.0);
2045
+ textureTransform[2] = vec3(row1.b, row2.b, 1.0);
2046
+
2047
+ return textureTransform;
2048
+
2049
+ }
2050
+
2051
+ Material readMaterialInfo( sampler2D tex, uint index ) {
2052
+
2053
+ uint i = index * 19u;
2054
+
2055
+ vec4 s0 = texelFetch1D( tex, i + 0u );
2056
+ vec4 s1 = texelFetch1D( tex, i + 1u );
2057
+ vec4 s2 = texelFetch1D( tex, i + 2u );
2058
+ vec4 s3 = texelFetch1D( tex, i + 3u );
2059
+ vec4 s4 = texelFetch1D( tex, i + 4u );
2060
+ vec4 s5 = texelFetch1D( tex, i + 5u );
2061
+ vec4 s6 = texelFetch1D( tex, i + 6u );
2062
+
2063
+ Material m;
2064
+ m.color = s0.rgb;
2065
+ m.map = int( round( s0.a ) );
2066
+
2067
+ m.metalness = s1.r;
2068
+ m.metalnessMap = int( round( s1.g ) );
2069
+ m.roughness = s1.b;
2070
+ m.roughnessMap = int( round( s1.a ) );
2071
+
2072
+ m.ior = s2.r;
2073
+ m.transmission = s2.g;
2074
+ m.transmissionMap = int( round( s2.b ) );
2075
+ m.emissiveIntensity = s2.a;
2076
+
2077
+ m.emissive = s3.rgb;
2078
+ m.emissiveMap = int( round( s3.a ) );
2079
+
2080
+ m.normalMap = int( round( s4.r ) );
2081
+ m.normalScale = s4.gb;
2082
+
2083
+ m.alphaMap = int( round( s4.a ) );
2084
+
2085
+ m.opacity = s5.r;
2086
+ m.alphaTest = s5.g;
2087
+ m.side = s5.b;
2088
+ m.matte = bool( s5.a );
2089
+
2090
+ m.castShadow = ! bool( s6.r );
2091
+
2092
+ uint firstTextureTransformIdx = i + 7u;
2093
+
2094
+ m.mapTransform = m.map == - 1 ? mat3( 0 ) : readTextureTransform( tex, firstTextureTransformIdx);
2095
+ m.metalnessMapTransform = m.metalnessMap == - 1 ? mat3( 0 ) : readTextureTransform( tex, firstTextureTransformIdx + 2u );
2096
+ m.roughnessMapTransform = m.roughnessMap == - 1 ? mat3( 0 ) : readTextureTransform( tex, firstTextureTransformIdx + 4u );
2097
+ m.transmissionMapTransform = m.transmissionMap == - 1 ? mat3( 0 ) : readTextureTransform( tex, firstTextureTransformIdx + 6u );
2098
+ m.emissiveMapTransform = m.emissiveMap == - 1 ? mat3( 0 ) : readTextureTransform( tex, firstTextureTransformIdx + 8u );
2099
+ m.normalMapTransform = m.normalMap == - 1 ? mat3( 0 ) : readTextureTransform( tex, firstTextureTransformIdx + 10u );
2100
+
2101
+ return m;
2102
+
2103
+ }
2104
+
2105
+ `;
2106
+
2107
+ const shaderGGXFunctions = /* glsl */`
2108
+ // The GGX functions provide sampling and distribution information for normals as output so
2109
+ // in order to get probability of scatter direction the half vector must be computed and provided.
2110
+ // [0] https://www.cs.cornell.edu/~srm/publications/EGSR07-btdf.pdf
2111
+ // [1] https://hal.archives-ouvertes.fr/hal-01509746/document
2112
+ // [2] http://jcgt.org/published/0007/04/01/
2113
+ // [4] http://jcgt.org/published/0003/02/03/
2114
+
2115
+ // trowbridge-reitz === GGX === GTR
2116
+
2117
+ vec3 ggxDirection( vec3 incidentDir, float roughnessX, float roughnessY, float random1, float random2 ) {
2118
+
2119
+ // TODO: try GGXVNDF implementation from reference [2], here. Needs to update ggxDistribution
2120
+ // function below, as well
2121
+
2122
+ // Implementation from reference [1]
2123
+ // stretch view
2124
+ vec3 V = normalize( vec3( roughnessX * incidentDir.x, roughnessY * incidentDir.y, incidentDir.z ) );
2125
+
2126
+ // orthonormal basis
2127
+ vec3 T1 = ( V.z < 0.9999 ) ? normalize( cross( V, vec3( 0.0, 0.0, 1.0 ) ) ) : vec3( 1.0, 0.0, 0.0 );
2128
+ vec3 T2 = cross( T1, V );
2129
+
2130
+ // sample point with polar coordinates (r, phi)
2131
+ float a = 1.0 / ( 1.0 + V.z );
2132
+ float r = sqrt( random1 );
2133
+ float phi = ( random2 < a ) ? random2 / a * PI : PI + ( random2 - a ) / ( 1.0 - a ) * PI;
2134
+ float P1 = r * cos( phi );
2135
+ float P2 = r * sin( phi ) * ( ( random2 < a ) ? 1.0 : V.z );
2136
+
2137
+ // compute normal
2138
+ vec3 N = P1 * T1 + P2 * T2 + V * sqrt( max( 0.0, 1.0 - P1 * P1 - P2 * P2 ) );
2139
+
2140
+ // unstretch
2141
+ N = normalize( vec3( roughnessX * N.x, roughnessY * N.y, max( 0.0, N.z ) ) );
2142
+
2143
+ return N;
2144
+
2145
+ }
2146
+
2147
+ // Below are PDF and related functions for use in a Monte Carlo path tracer
2148
+ // as specified in Appendix B of the following paper
2149
+ // See equation (2) from reference [2]
2150
+ float ggxLamda( float theta, float roughness ) {
2151
+
2152
+ float tanTheta = tan( theta );
2153
+ float tanTheta2 = tanTheta * tanTheta;
2154
+ float alpha2 = roughness * roughness;
2155
+
2156
+ float numerator = - 1.0 + sqrt( 1.0 + alpha2 * tanTheta2 );
2157
+ return numerator / 2.0;
2158
+
2159
+ }
2160
+
2161
+ // See equation (2) from reference [2]
2162
+ float ggxShadowMaskG1( float theta, float roughness ) {
2163
+
2164
+ return 1.0 / ( 1.0 + ggxLamda( theta, roughness ) );
2165
+
2166
+ }
2167
+
2168
+ // See equation (125) from reference [4]
2169
+ float ggxShadowMaskG2( vec3 wi, vec3 wo, float roughness ) {
2170
+
2171
+ float incidentTheta = acos( wi.z );
2172
+ float scatterTheta = acos( wo.z );
2173
+ return 1.0 / ( 1.0 + ggxLamda( incidentTheta, roughness ) + ggxLamda( scatterTheta, roughness ) );
2174
+
2175
+ }
2176
+
2177
+ float ggxDistribution( vec3 halfVector, float roughness ) {
2178
+
2179
+ // See equation (33) from reference [0]
2180
+ float a2 = roughness * roughness;
2181
+ a2 = max( EPSILON, a2 );
2182
+ float cosTheta = halfVector.z;
2183
+ float cosTheta4 = pow( cosTheta, 4.0 );
2184
+
2185
+ if ( cosTheta == 0.0 ) return 0.0;
2186
+
2187
+ float theta = acosSafe( halfVector.z );
2188
+ float tanTheta = tan( theta );
2189
+ float tanTheta2 = pow( tanTheta, 2.0 );
2190
+
2191
+ float denom = PI * cosTheta4 * pow( a2 + tanTheta2, 2.0 );
2192
+ return ( a2 / denom );
2193
+
2194
+ // See equation (1) from reference [2]
2195
+ // const { x, y, z } = halfVector;
2196
+ // const a2 = roughness * roughness;
2197
+ // const mult = x * x / a2 + y * y / a2 + z * z;
2198
+ // const mult2 = mult * mult;
2199
+
2200
+ // return 1.0 / Math.PI * a2 * mult2;
2201
+
2202
+ }
2203
+
2204
+ // See equation (3) from reference [2]
2205
+ float ggxPDF( vec3 wi, vec3 halfVector, float roughness ) {
2206
+
2207
+ float incidentTheta = acos( wi.z );
2208
+ float D = ggxDistribution( halfVector, roughness );
2209
+ float G1 = ggxShadowMaskG1( incidentTheta, roughness );
2210
+
2211
+ return D * G1 * max( 0.0, dot( wi, halfVector ) ) / wi.z;
2212
+
2213
+ }
2214
+ `;
2215
+
2216
+ const shaderMaterialSampling = /* glsl */`
2217
+
2218
+ struct SurfaceRec {
2219
+ vec3 normal;
2220
+ vec3 faceNormal;
2221
+ bool frontFace;
2222
+ float roughness;
2223
+ float filteredRoughness;
2224
+ float metalness;
2225
+ vec3 color;
2226
+ vec3 emission;
2227
+ float transmission;
2228
+ float ior;
2229
+ };
2230
+
2231
+ struct SampleRec {
2232
+ float specularPdf;
2233
+ float pdf;
2234
+ vec3 direction;
2235
+ vec3 color;
2236
+ };
2237
+
2238
+ ${ shaderGGXFunctions }
2239
+
2240
+ // diffuse
2241
+ float diffusePDF( vec3 wo, vec3 wi, SurfaceRec surf ) {
2242
+
2243
+ // https://raytracing.github.io/books/RayTracingTheRestOfYourLife.html#lightscattering/thescatteringpdf
2244
+ float cosValue = wi.z;
2245
+ return cosValue / PI;
2246
+
2247
+ }
2248
+
2249
+ vec3 diffuseDirection( vec3 wo, SurfaceRec surf ) {
2250
+
2251
+ vec3 lightDirection = randDirection();
2252
+ lightDirection.z += 1.0;
2253
+ lightDirection = normalize( lightDirection );
2254
+
2255
+ return lightDirection;
2256
+
2257
+ }
2258
+
2259
+ vec3 diffuseColor( vec3 wo, vec3 wi, SurfaceRec surf ) {
2260
+
2261
+ // TODO: scale by 1 - F here
2262
+ // note on division by PI
2263
+ // https://seblagarde.wordpress.com/2012/01/08/pi-or-not-to-pi-in-game-lighting-equation/
2264
+ float metalFactor = ( 1.0 - surf.metalness ) * wi.z / ( PI * PI );
2265
+ float transmissionFactor = 1.0 - surf.transmission;
2266
+ return surf.color * metalFactor * transmissionFactor;
2267
+
2268
+ }
2269
+
2270
+ // specular
2271
+ float specularPDF( vec3 wo, vec3 wi, SurfaceRec surf ) {
2272
+
2273
+ // See equation (17) in http://jcgt.org/published/0003/02/03/
2274
+ float filteredRoughness = surf.filteredRoughness;
2275
+ vec3 halfVector = getHalfVector( wi, wo );
2276
+ return ggxPDF( wi, halfVector, filteredRoughness ) / ( 4.0 * dot( wi, halfVector ) );
2277
+
2278
+ }
2279
+
2280
+ vec3 specularDirection( vec3 wo, SurfaceRec surf ) {
2281
+
2282
+ // sample ggx vndf distribution which gives a new normal
2283
+ float filteredRoughness = surf.filteredRoughness;
2284
+ vec3 halfVector = ggxDirection(
2285
+ wo,
2286
+ filteredRoughness,
2287
+ filteredRoughness,
2288
+ rand(),
2289
+ rand()
2290
+ );
2291
+
2292
+ // apply to new ray by reflecting off the new normal
2293
+ return - reflect( wo, halfVector );
2294
+
2295
+ }
2296
+
2297
+ vec3 specularColor( vec3 wo, vec3 wi, SurfaceRec surf ) {
2298
+
2299
+ // if roughness is set to 0 then D === NaN which results in black pixels
2300
+ float metalness = surf.metalness;
2301
+ float ior = surf.ior;
2302
+ bool frontFace = surf.frontFace;
2303
+ float filteredRoughness = surf.filteredRoughness;
2304
+
2305
+ vec3 halfVector = getHalfVector( wo, wi );
2306
+ float iorRatio = frontFace ? 1.0 / ior : ior;
2307
+ float G = ggxShadowMaskG2( wi, wo, filteredRoughness );
2308
+ float D = ggxDistribution( halfVector, filteredRoughness );
2309
+
2310
+ float F = schlickFresnelFromIor( dot( wi, halfVector ), iorRatio );
2311
+ float cosTheta = min( wo.z, 1.0 );
2312
+ float sinTheta = sqrt( 1.0 - cosTheta * cosTheta );
2313
+ bool cannotRefract = iorRatio * sinTheta > 1.0;
2314
+ if ( cannotRefract ) {
2315
+
2316
+ F = 1.0;
2317
+
2318
+ }
2319
+
2320
+ vec3 color = mix( vec3( 1.0 ), surf.color, metalness );
2321
+ color = mix( color, vec3( 1.0 ), F );
2322
+ color *= G * D / ( 4.0 * abs( wi.z * wo.z ) );
2323
+ color *= mix( F, 1.0, metalness );
2324
+ color *= wi.z; // scale the light by the direction the light is coming in from
2325
+
2326
+ return color;
2327
+
2328
+ }
2329
+
2330
+ /*
2331
+ // transmission
2332
+ function transmissionPDF( wo, wi, material, surf ) {
2333
+
2334
+ // See section 4.2 in https://www.cs.cornell.edu/~srm/publications/EGSR07-btdf.pdf
2335
+
2336
+ const { roughness, ior } = material;
2337
+ const { frontFace } = hit;
2338
+ const ratio = frontFace ? ior : 1 / ior;
2339
+ const minRoughness = Math.max( roughness, MIN_ROUGHNESS );
2340
+
2341
+ halfVector.set( 0, 0, 0 ).addScaledVector( wi, ratio ).addScaledVector( wo, 1.0 ).normalize().multiplyScalar( - 1 );
2342
+
2343
+ const denom = Math.pow( ratio * halfVector.dot( wi ) + 1.0 * halfVector.dot( wo ), 2.0 );
2344
+ return ggxPDF( wo, halfVector, minRoughness ) / denom;
2345
+
2346
+ }
2347
+
2348
+ function transmissionDirection( wo, hit, material, lightDirection ) {
2349
+
2350
+ const { roughness, ior } = material;
2351
+ const { frontFace } = hit;
2352
+ const ratio = frontFace ? 1 / ior : ior;
2353
+ const minRoughness = Math.max( roughness, MIN_ROUGHNESS );
2354
+
2355
+ // sample ggx vndf distribution which gives a new normal
2356
+ ggxDirection(
2357
+ wo,
2358
+ minRoughness,
2359
+ minRoughness,
2360
+ Math.random(),
2361
+ Math.random(),
2362
+ halfVector,
2363
+ );
2364
+
2365
+ // apply to new ray by reflecting off the new normal
2366
+ tempDir.copy( wo ).multiplyScalar( - 1 );
2367
+ refract( tempDir, halfVector, ratio, lightDirection );
2368
+
2369
+ }
2370
+
2371
+ function transmissionColor( wo, wi, material, hit, colorTarget ) {
2372
+
2373
+ const { metalness, transmission } = material;
2374
+ colorTarget
2375
+ .copy( material.color )
2376
+ .multiplyScalar( ( 1.0 - metalness ) * wo.z )
2377
+ .multiplyScalar( transmission );
2378
+
2379
+ }
2380
+ */
2381
+
2382
+ // TODO: This is just using a basic cosine-weighted specular distribution with an
2383
+ // incorrect PDF value at the moment. Update it to correctly use a GGX distribution
2384
+ float transmissionPDF( vec3 wo, vec3 wi, SurfaceRec surf ) {
2385
+
2386
+ float ior = surf.ior;
2387
+ bool frontFace = surf.frontFace;
2388
+
2389
+ float ratio = frontFace ? 1.0 / ior : ior;
2390
+ float cosTheta = min( wo.z, 1.0 );
2391
+ float sinTheta = sqrt( 1.0 - cosTheta * cosTheta );
2392
+ float reflectance = schlickFresnelFromIor( cosTheta, ratio );
2393
+ bool cannotRefract = ratio * sinTheta > 1.0;
2394
+ if ( cannotRefract ) {
2395
+
2396
+ return 0.0;
2397
+
2398
+ }
2399
+
2400
+ return 1.0 / ( 1.0 - reflectance );
2401
+
2402
+ }
2403
+
2404
+ vec3 transmissionDirection( vec3 wo, SurfaceRec surf ) {
2405
+
2406
+ float roughness = surf.roughness;
2407
+ float ior = surf.ior;
2408
+ bool frontFace = surf.frontFace;
2409
+ float ratio = frontFace ? 1.0 / ior : ior;
2410
+
2411
+ vec3 lightDirection = refract( - wo, vec3( 0.0, 0.0, 1.0 ), ratio );
2412
+ lightDirection += randDirection() * roughness;
2413
+ return normalize( lightDirection );
2414
+
2415
+ }
2416
+
2417
+ vec3 transmissionColor( vec3 wo, vec3 wi, SurfaceRec surf ) {
2418
+
2419
+ float metalness = surf.metalness;
2420
+ float transmission = surf.transmission;
2421
+
2422
+ vec3 color = surf.color;
2423
+ color *= ( 1.0 - metalness );
2424
+ color *= transmission;
2425
+
2426
+ return color;
2427
+
2428
+ }
2429
+
2430
+ float bsdfPdf( vec3 wo, vec3 wi, SurfaceRec surf, out float specularPdf ) {
2431
+
2432
+ float ior = surf.ior;
2433
+ float metalness = surf.metalness;
2434
+ float transmission = surf.transmission;
2435
+ bool frontFace = surf.frontFace;
2436
+
2437
+ float ratio = frontFace ? 1.0 / ior : ior;
2438
+ float cosTheta = min( wo.z, 1.0 );
2439
+ float sinTheta = sqrt( 1.0 - cosTheta * cosTheta );
2440
+ float reflectance = schlickFresnelFromIor( cosTheta, ratio );
2441
+ bool cannotRefract = ratio * sinTheta > 1.0;
2442
+ if ( cannotRefract ) {
2443
+
2444
+ reflectance = 1.0;
2445
+
2446
+ }
2447
+
2448
+ float spdf = 0.0;
2449
+ float dpdf = 0.0;
2450
+ float tpdf = 0.0;
2451
+
2452
+ if ( wi.z < 0.0 ) {
2453
+
2454
+ tpdf = transmissionPDF( wo, wi, surf );
2455
+
2456
+ } else {
2457
+
2458
+ spdf = specularPDF( wo, wi, surf );
2459
+ dpdf = diffusePDF( wo, wi, surf );
2460
+
2461
+ }
2462
+
2463
+ float transSpecularProb = mix( reflectance, 1.0, metalness );
2464
+ float diffSpecularProb = 0.5 + 0.5 * metalness;
2465
+ float pdf =
2466
+ spdf * transmission * transSpecularProb
2467
+ + tpdf * transmission * ( 1.0 - transSpecularProb )
2468
+ + spdf * ( 1.0 - transmission ) * diffSpecularProb
2469
+ + dpdf * ( 1.0 - transmission ) * ( 1.0 - diffSpecularProb );
2470
+
2471
+ // retrieve specular rays for the shadows flag
2472
+ specularPdf = spdf * transmission * transSpecularProb + spdf * ( 1.0 - transmission ) * diffSpecularProb;
2473
+
2474
+ return pdf;
2475
+
2476
+ }
2477
+
2478
+ vec3 bsdfColor( vec3 wo, vec3 wi, SurfaceRec surf ) {
2479
+
2480
+ vec3 color = vec3( 0.0 );
2481
+ if ( wi.z < 0.0 ) {
2482
+
2483
+ color = transmissionColor( wo, wi, surf );
2484
+
2485
+ } else {
2486
+
2487
+ color = diffuseColor( wo, wi, surf );
2488
+ color *= 1.0 - surf.transmission;
2489
+
2490
+ color += specularColor( wo, wi, surf );
2491
+
2492
+ }
2493
+
2494
+ return color;
2495
+
2496
+ }
2497
+
2498
+ float bsdfResult( vec3 wo, vec3 wi, SurfaceRec surf, out vec3 color ) {
2499
+
2500
+ float specularPdf;
2501
+ color = bsdfColor( wo, wi, surf );
2502
+ return bsdfPdf( wo, wi, surf, specularPdf );
2503
+
2504
+ }
2505
+
2506
+ SampleRec bsdfSample( vec3 wo, SurfaceRec surf ) {
2507
+
2508
+ float ior = surf.ior;
2509
+ float metalness = surf.metalness;
2510
+ float transmission = surf.transmission;
2511
+ bool frontFace = surf.frontFace;
2512
+
2513
+ float ratio = frontFace ? 1.0 / ior : ior;
2514
+ float cosTheta = min( wo.z, 1.0 );
2515
+ float sinTheta = sqrt( 1.0 - cosTheta * cosTheta );
2516
+ float reflectance = schlickFresnelFromIor( cosTheta, ratio );
2517
+ bool cannotRefract = ratio * sinTheta > 1.0;
2518
+ if ( cannotRefract ) {
2519
+
2520
+ reflectance = 1.0;
2521
+
2522
+ }
2523
+
2524
+ SampleRec result;
2525
+ if ( rand() < transmission ) {
2526
+
2527
+ float specularProb = mix( reflectance, 1.0, metalness );
2528
+ if ( rand() < specularProb ) {
2529
+
2530
+ result.direction = specularDirection( wo, surf );
2531
+
2532
+ } else {
2533
+
2534
+ result.direction = transmissionDirection( wo, surf );
2535
+
2536
+ }
2537
+
2538
+ } else {
2539
+
2540
+ float specularProb = 0.5 + 0.5 * metalness;
2541
+ if ( rand() < specularProb ) {
2542
+
2543
+ result.direction = specularDirection( wo, surf );
2544
+
2545
+ } else {
2546
+
2547
+ result.direction = diffuseDirection( wo, surf );
2548
+
2549
+ }
2550
+
2551
+ }
2552
+
2553
+ result.pdf = bsdfPdf( wo, result.direction, surf, result.specularPdf );
2554
+ result.color = bsdfColor( wo, result.direction, surf );
2555
+ return result;
2556
+
2557
+ }
2558
+ `;
2559
+
2560
+ const shaderEnvMapSampling = /* glsl */`
2561
+
2562
+ vec3 sampleEquirectEnvMapColor( vec3 direction, sampler2D map ) {
2563
+
2564
+ return texture2D( map, equirectDirectionToUv( direction ) ).rgb;
2565
+
2566
+ }
2567
+
2568
+ float envMapDirectionPdf( vec3 direction ) {
2569
+
2570
+ vec2 uv = equirectDirectionToUv( direction );
2571
+ float theta = uv.y * PI;
2572
+ float sinTheta = sin( theta );
2573
+ if ( sinTheta == 0.0 ) {
2574
+
2575
+ return 0.0;
2576
+
2577
+ }
2578
+
2579
+ return 1.0 / ( 2.0 * PI * PI * sinTheta );
2580
+
2581
+ }
2582
+
2583
+ float envMapSample( vec3 direction, EquirectHdrInfo info, out vec3 color ) {
2584
+
2585
+ vec2 uv = equirectDirectionToUv( direction );
2586
+ color = texture2D( info.map, uv ).rgb;
2587
+
2588
+ float totalSum = texture2D( info.totalSum, vec2( 0.0 ) ).r;
2589
+ float lum = colorToLuminance( color );
2590
+ ivec2 resolution = textureSize( info.map, 0 );
2591
+ float pdf = lum / totalSum;
2592
+
2593
+ return float( resolution.x * resolution.y ) * pdf * envMapDirectionPdf( direction );
2594
+
2595
+ }
2596
+
2597
+ float randomEnvMapSample( EquirectHdrInfo info, out vec3 color, out vec3 direction ) {
2598
+
2599
+ // sample env map cdf
2600
+ vec2 r = rand2();
2601
+ float v = texture2D( info.marginalWeights, vec2( r.x, 0.0 ) ).x;
2602
+ float u = texture2D( info.conditionalWeights, vec2( r.y, v ) ).x;
2603
+ vec2 uv = vec2( u, v );
2604
+
2605
+ vec3 derivedDirection = equirectUvToDirection( uv );
2606
+ direction = derivedDirection;
2607
+ color = texture2D( info.map, uv ).rgb;
2608
+
2609
+ float totalSum = texture2D( info.totalSum, vec2( 0.0 ) ).r;
2610
+ float lum = colorToLuminance( color );
2611
+ ivec2 resolution = textureSize( info.map, 0 );
2612
+ float pdf = lum / totalSum;
2613
+
2614
+ return float( resolution.x * resolution.y ) * pdf * envMapDirectionPdf( direction );
2615
+
2616
+ }
2617
+
2618
+ float misHeuristic( float a, float b ) {
2619
+
2620
+ float aa = a * a;
2621
+ float bb = a * b;
2622
+ return aa / ( bb + aa );
2623
+
2624
+ }
2625
+
2626
+ `;
2627
+
2628
+ class PhysicalPathTracingMaterial extends MaterialBase {
2629
+
2630
+ onBeforeRender() {
2631
+
2632
+ this.setDefine( 'FEATURE_DOF', this.physicalCamera.bokehSize === 0 ? 0 : 1 );
2633
+
2634
+ }
2635
+
2636
+ constructor( parameters ) {
2637
+
2638
+ super( {
2639
+
2640
+ transparent: true,
2641
+ depthWrite: false,
2642
+
2643
+ defines: {
2644
+ FEATURE_MIS: 1,
2645
+ FEATURE_DOF: 1,
2646
+ FEATURE_GRADIENT_BG: 0,
2647
+ TRANSPARENT_TRAVERSALS: 5,
2648
+ },
2649
+
2650
+ uniforms: {
2651
+ resolution: { value: new Vector2() },
2652
+
2653
+ bounces: { value: 3 },
2654
+ physicalCamera: { value: new PhysicalCameraUniform() },
2655
+
2656
+ bvh: { value: new MeshBVHUniformStruct() },
2657
+ normalAttribute: { value: new FloatVertexAttributeTexture() },
2658
+ tangentAttribute: { value: new FloatVertexAttributeTexture() },
2659
+ uvAttribute: { value: new FloatVertexAttributeTexture() },
2660
+ materialIndexAttribute: { value: new UIntVertexAttributeTexture() },
2661
+ materials: { value: new MaterialsTexture() },
2662
+ textures: { value: new RenderTarget2DArray().texture },
2663
+ cameraWorldMatrix: { value: new Matrix4() },
2664
+ invProjectionMatrix: { value: new Matrix4() },
2665
+ isOrthographicCamera: { value: true },
2666
+ backgroundBlur: { value: 0.0 },
2667
+ environmentIntensity: { value: 2.0 },
2668
+ environmentRotation: { value: new Matrix3() },
2669
+ envMapInfo: { value: new EquirectHdrInfoUniform() },
2670
+
2671
+ seed: { value: 0 },
2672
+ opacity: { value: 1 },
2673
+ filterGlossyFactor: { value: 0.0 },
2674
+
2675
+ bgGradientTop: { value: new Color( 0x111111 ) },
2676
+ bgGradientBottom: { value: new Color( 0x000000 ) },
2677
+ backgroundAlpha: { value: 1.0 },
2678
+ },
2679
+
2680
+ vertexShader: /* glsl */`
2681
+
2682
+ varying vec2 vUv;
2683
+ void main() {
2684
+
2685
+ vec4 mvPosition = vec4( position, 1.0 );
2686
+ mvPosition = modelViewMatrix * mvPosition;
2687
+ gl_Position = projectionMatrix * mvPosition;
2688
+
2689
+ vUv = uv;
2690
+
2691
+ }
2692
+
2693
+ `,
2694
+
2695
+ fragmentShader: /* glsl */`
2696
+ #define RAY_OFFSET 1e-4
2697
+
2698
+ precision highp isampler2D;
2699
+ precision highp usampler2D;
2700
+ precision highp sampler2DArray;
2701
+ vec4 envMapTexelToLinear( vec4 a ) { return a; }
2702
+ #include <common>
2703
+
2704
+ ${ shaderStructs }
2705
+ ${ shaderIntersectFunction }
2706
+ ${ shaderMaterialStructs }
2707
+
2708
+ ${ shaderUtils }
2709
+ ${ shaderMaterialSampling }
2710
+ ${ shaderEnvMapSampling }
2711
+
2712
+ uniform mat3 environmentRotation;
2713
+ uniform float backgroundBlur;
2714
+ uniform float backgroundAlpha;
2715
+
2716
+ #if FEATURE_GRADIENT_BG
2717
+
2718
+ uniform vec3 bgGradientTop;
2719
+ uniform vec3 bgGradientBottom;
2720
+
2721
+ #endif
2722
+
2723
+ #if FEATURE_DOF
2724
+
2725
+ uniform PhysicalCamera physicalCamera;
2726
+
2727
+ #endif
2728
+
2729
+ uniform vec2 resolution;
2730
+ uniform int bounces;
2731
+ uniform mat4 cameraWorldMatrix;
2732
+ uniform mat4 invProjectionMatrix;
2733
+ uniform bool isOrthographicCamera;
2734
+ uniform sampler2D normalAttribute;
2735
+ uniform sampler2D tangentAttribute;
2736
+ uniform sampler2D uvAttribute;
2737
+ uniform usampler2D materialIndexAttribute;
2738
+ uniform BVH bvh;
2739
+ uniform float environmentIntensity;
2740
+ uniform float filterGlossyFactor;
2741
+ uniform int seed;
2742
+ uniform float opacity;
2743
+ uniform sampler2D materials;
2744
+
2745
+ uniform EquirectHdrInfo envMapInfo;
2746
+
2747
+ uniform sampler2DArray textures;
2748
+ varying vec2 vUv;
2749
+
2750
+ vec3 sampleBackground( vec3 direction ) {
2751
+
2752
+ #if FEATURE_GRADIENT_BG
2753
+
2754
+ direction = normalize( direction + randDirection() * 0.05 );
2755
+
2756
+ float value = ( direction.y + 1.0 ) / 2.0;
2757
+ value = pow( value, 2.0 );
2758
+
2759
+ return mix( bgGradientBottom, bgGradientTop, value );
2760
+
2761
+ #else
2762
+
2763
+ vec3 sampleDir = normalize( direction + getHemisphereSample( direction, rand2() ) * 0.5 * backgroundBlur );
2764
+ return environmentIntensity * sampleEquirectEnvMapColor( sampleDir, envMapInfo.map );
2765
+
2766
+ #endif
2767
+
2768
+ }
2769
+
2770
+ // step through multiple surface hits and accumulate color attenuation based on transmissive surfaces
2771
+ bool attenuateHit( BVH bvh, vec3 rayOrigin, vec3 rayDirection, int traversals, bool isShadowRay, out vec3 color ) {
2772
+
2773
+ // hit results
2774
+ uvec4 faceIndices = uvec4( 0u );
2775
+ vec3 faceNormal = vec3( 0.0, 0.0, 1.0 );
2776
+ vec3 barycoord = vec3( 0.0 );
2777
+ float side = 1.0;
2778
+ float dist = 0.0;
2779
+
2780
+ color = vec3( 1.0 );
2781
+
2782
+ for ( int i = 0; i < traversals; i ++ ) {
2783
+
2784
+ if ( bvhIntersectFirstHit( bvh, rayOrigin, rayDirection, faceIndices, faceNormal, barycoord, side, dist ) ) {
2785
+
2786
+ // TODO: attenuate the contribution based on the PDF of the resulting ray including refraction values
2787
+ // Should be able to work using the material BSDF functions which will take into account specularity, etc.
2788
+ // TODO: should we account for emissive surfaces here?
2789
+
2790
+ vec2 uv = textureSampleBarycoord( uvAttribute, barycoord, faceIndices.xyz ).xy;
2791
+ uint materialIndex = uTexelFetch1D( materialIndexAttribute, faceIndices.x ).r;
2792
+ Material material = readMaterialInfo( materials, materialIndex );
2793
+
2794
+ // adjust the ray to the new surface
2795
+ bool isBelowSurface = dot( rayDirection, faceNormal ) < 0.0;
2796
+ vec3 point = rayOrigin + rayDirection * dist;
2797
+ vec3 absPoint = abs( point );
2798
+ float maxPoint = max( absPoint.x, max( absPoint.y, absPoint.z ) );
2799
+ rayOrigin = point + faceNormal * ( maxPoint + 1.0 ) * ( isBelowSurface ? - RAY_OFFSET : RAY_OFFSET );
2800
+
2801
+ if ( ! material.castShadow && isShadowRay ) {
2802
+
2803
+ continue;
2804
+
2805
+ }
2806
+
2807
+ // Opacity Test
2808
+
2809
+ // albedo
2810
+ vec4 albedo = vec4( material.color, material.opacity );
2811
+ if ( material.map != - 1 ) {
2812
+
2813
+ vec3 uvPrime = material.mapTransform * vec3( uv, 1 );
2814
+ albedo *= texture2D( textures, vec3( uvPrime.xy, material.map ) );
2815
+
2816
+ }
2817
+
2818
+ // alphaMap
2819
+ if ( material.alphaMap != -1 ) {
2820
+
2821
+ albedo.a *= texture2D( textures, vec3( uv, material.alphaMap ) ).x;
2822
+
2823
+ }
2824
+
2825
+ // transmission
2826
+ float transmission = material.transmission;
2827
+ if ( material.transmissionMap != - 1 ) {
2828
+
2829
+ vec3 uvPrime = material.transmissionMapTransform * vec3( uv, 1 );
2830
+ transmission *= texture2D( textures, vec3( uvPrime.xy, material.transmissionMap ) ).r;
2831
+
2832
+ }
2833
+
2834
+ // metalness
2835
+ float metalness = material.metalness;
2836
+ if ( material.metalnessMap != - 1 ) {
2837
+
2838
+ vec3 uvPrime = material.metalnessMapTransform * vec3( uv, 1 );
2839
+ metalness *= texture2D( textures, vec3( uvPrime.xy, material.metalnessMap ) ).b;
2840
+
2841
+ }
2842
+
2843
+ float alphaTest = material.alphaTest;
2844
+ bool useAlphaTest = alphaTest != 0.0;
2845
+ float transmissionFactor = ( 1.0 - metalness ) * transmission;
2846
+ if (
2847
+ transmissionFactor < rand() && ! (
2848
+ // material sidedness
2849
+ material.side != 0.0 && side == material.side
2850
+
2851
+ // alpha test
2852
+ || useAlphaTest && albedo.a < alphaTest
2853
+
2854
+ // opacity
2855
+ || ! useAlphaTest && albedo.a < rand()
2856
+ )
2857
+ ) {
2858
+
2859
+ return true;
2860
+
2861
+ }
2862
+
2863
+ // only attenuate on the way in
2864
+ if ( isBelowSurface ) {
2865
+
2866
+ color *= mix( vec3( 1.0 ), albedo.rgb, transmissionFactor );
2867
+
2868
+ }
2869
+
2870
+ } else {
2871
+
2872
+ return false;
2873
+
2874
+ }
2875
+
2876
+ }
2877
+
2878
+ return true;
2879
+
2880
+ }
2881
+
2882
+ // returns whether the ray hit anything, not just the first surface. Could be optimized to not check the full hierarchy.
2883
+ bool anyHit( BVH bvh, vec3 rayOrigin, vec3 rayDirection ) {
2884
+
2885
+ uvec4 faceIndices = uvec4( 0u );
2886
+ vec3 faceNormal = vec3( 0.0, 0.0, 1.0 );
2887
+ vec3 barycoord = vec3( 0.0 );
2888
+ float side = 1.0;
2889
+ float dist = 0.0;
2890
+ return bvhIntersectFirstHit( bvh, rayOrigin, rayDirection, faceIndices, faceNormal, barycoord, side, dist );
2891
+
2892
+ }
2893
+
2894
+ // tentFilter from Peter Shirley's 'Realistic Ray Tracing (2nd Edition)' book, pg. 60
2895
+ // erichlof/THREE.js-PathTracing-Renderer/
2896
+ float tentFilter( float x ) {
2897
+
2898
+ return x < 0.5 ? sqrt( 2.0 * x ) - 1.0 : 1.0 - sqrt( 2.0 - ( 2.0 * x ) );
2899
+
2900
+ }
2901
+
2902
+ vec3 ndcToRayOrigin( vec2 coord ) {
2903
+
2904
+ vec4 rayOrigin4 = cameraWorldMatrix * invProjectionMatrix * vec4( coord, - 1.0, 1.0 );
2905
+ return rayOrigin4.xyz / rayOrigin4.w;
2906
+ }
2907
+
2908
+ void main() {
2909
+
2910
+ rng_initialize( gl_FragCoord.xy, seed );
2911
+
2912
+ // get [-1, 1] normalized device coordinates
2913
+ vec2 ndc = 2.0 * vUv - vec2( 1.0 );
2914
+
2915
+ vec3 ss00 = ndcToRayOrigin( vec2( - 1.0, - 1.0 ) );
2916
+ vec3 ss01 = ndcToRayOrigin( vec2( - 1.0, 1.0 ) );
2917
+ vec3 ss10 = ndcToRayOrigin( vec2( 1.0, - 1.0 ) );
2918
+
2919
+ vec3 ssdX = ( ss10 - ss00 ) / resolution.x;
2920
+ vec3 ssdY = ( ss01 - ss00 ) / resolution.y;
2921
+
2922
+ // Jitter the camera ray by finding a new subpixel point to point to from the camera origin
2923
+ // This is better than just jittering the camera position since it actually results in divergent
2924
+ // rays providing better coverage for the pixel
2925
+ vec3 rayOrigin = ndcToRayOrigin( ndc ) + tentFilter( rand() ) * ssdX + tentFilter( rand() ) * ssdY;
2926
+
2927
+ vec3 rayDirection;
2928
+
2929
+ if ( isOrthographicCamera ) {
2930
+
2931
+ rayDirection = ( cameraWorldMatrix * vec4( 0.0, 0.0, -1.0, 0.0 ) ).xyz;
2932
+ rayDirection = normalize( rayDirection );
2933
+
2934
+ } else {
2935
+
2936
+ vec3 cameraOrigin = ( cameraWorldMatrix * vec4( 0.0, 0.0, 0.0, 1.0 ) ).xyz;
2937
+ rayDirection = normalize( rayOrigin - cameraOrigin );
2938
+
2939
+ }
2940
+
2941
+ #if FEATURE_DOF
2942
+ {
2943
+
2944
+ // depth of field
2945
+ vec3 focalPoint = rayOrigin + normalize( rayDirection ) * physicalCamera.focusDistance;
2946
+
2947
+ // get the aperture sample
2948
+ vec2 apertureSample = sampleAperture( physicalCamera.apertureBlades ) * physicalCamera.bokehSize * 0.5 * 1e-3;
2949
+
2950
+ // rotate the aperture shape
2951
+ float ac = cos( physicalCamera.apertureRotation );
2952
+ float as = sin( physicalCamera.apertureRotation );
2953
+ apertureSample = vec2(
2954
+ apertureSample.x * ac - apertureSample.y * as,
2955
+ apertureSample.x * as + apertureSample.y * ac
2956
+ );
2957
+ apertureSample.x *= saturate( physicalCamera.anamorphicRatio );
2958
+ apertureSample.y *= saturate( 1.0 / physicalCamera.anamorphicRatio );
2959
+
2960
+ // create the new ray
2961
+ rayOrigin += ( cameraWorldMatrix * vec4( apertureSample, 0.0, 0.0 ) ).xyz;
2962
+ rayDirection = focalPoint - rayOrigin;
2963
+
2964
+ }
2965
+ #endif
2966
+ rayDirection = normalize( rayDirection );
2967
+
2968
+ // inverse environment rotation
2969
+ mat3 invEnvironmentRotation = inverse( environmentRotation );
2970
+
2971
+ // final color
2972
+ gl_FragColor = vec4( 0.0 );
2973
+ gl_FragColor.a = 1.0;
2974
+
2975
+ // hit results
2976
+ uvec4 faceIndices = uvec4( 0u );
2977
+ vec3 faceNormal = vec3( 0.0, 0.0, 1.0 );
2978
+ vec3 barycoord = vec3( 0.0 );
2979
+ float side = 1.0;
2980
+ float dist = 0.0;
2981
+
2982
+ // path tracing state
2983
+ float accumulatedRoughness = 0.0;
2984
+ bool transmissiveRay = true;
2985
+ int transparentTraversals = TRANSPARENT_TRAVERSALS;
2986
+ vec3 throughputColor = vec3( 1.0 );
2987
+ SampleRec sampleRec;
2988
+ int i;
2989
+ bool isShadowRay = false;
2990
+
2991
+ for ( i = 0; i < bounces; i ++ ) {
2992
+
2993
+ if ( ! bvhIntersectFirstHit( bvh, rayOrigin, rayDirection, faceIndices, faceNormal, barycoord, side, dist ) ) {
2994
+
2995
+ if ( i == 0 || transmissiveRay ) {
2996
+
2997
+ gl_FragColor.rgb += sampleBackground( environmentRotation * rayDirection ) * throughputColor;
2998
+ gl_FragColor.a = backgroundAlpha;
2999
+
3000
+ } else {
3001
+
3002
+ #if FEATURE_MIS
3003
+
3004
+ // get the PDF of the hit envmap point
3005
+ vec3 envColor;
3006
+ float envPdf = envMapSample( environmentRotation * rayDirection, envMapInfo, envColor );
3007
+
3008
+ // and weight the contribution
3009
+ float misWeight = misHeuristic( sampleRec.pdf, envPdf );
3010
+ gl_FragColor.rgb += environmentIntensity * envColor * throughputColor * misWeight;
3011
+
3012
+ #else
3013
+
3014
+ gl_FragColor.rgb +=
3015
+ environmentIntensity *
3016
+ sampleEquirectEnvMapColor( environmentRotation * rayDirection, envMapInfo.map ) *
3017
+ throughputColor;
3018
+
3019
+ #endif
3020
+
3021
+ }
3022
+ break;
3023
+
3024
+ }
3025
+
3026
+ uint materialIndex = uTexelFetch1D( materialIndexAttribute, faceIndices.x ).r;
3027
+ Material material = readMaterialInfo( materials, materialIndex );
3028
+
3029
+ if ( material.matte && i == 0 ) {
3030
+
3031
+ gl_FragColor = vec4( 0.0 );
3032
+ break;
3033
+
3034
+ }
3035
+
3036
+ // if we've determined that this is a shadow ray and we've hit an item with no shadow casting
3037
+ // then skip it
3038
+ if ( ! material.castShadow && isShadowRay ) {
3039
+
3040
+ vec3 point = rayOrigin + rayDirection * dist;
3041
+ vec3 absPoint = abs( point );
3042
+ float maxPoint = max( absPoint.x, max( absPoint.y, absPoint.z ) );
3043
+ rayOrigin = point - ( maxPoint + 1.0 ) * faceNormal * RAY_OFFSET;
3044
+
3045
+ continue;
3046
+
3047
+ }
3048
+
3049
+ vec2 uv = textureSampleBarycoord( uvAttribute, barycoord, faceIndices.xyz ).xy;
3050
+ // albedo
3051
+ vec4 albedo = vec4( material.color, material.opacity );
3052
+ if ( material.map != - 1 ) {
3053
+
3054
+ vec3 uvPrime = material.mapTransform * vec3( uv, 1 );
3055
+ albedo *= texture2D( textures, vec3( uvPrime.xy, material.map ) );
3056
+ }
3057
+
3058
+ // alphaMap
3059
+ if ( material.alphaMap != -1 ) {
3060
+
3061
+ albedo.a *= texture2D( textures, vec3( uv, material.alphaMap ) ).x;
3062
+
3063
+ }
3064
+
3065
+ // possibly skip this sample if it's transparent, alpha test is enabled, or we hit the wrong material side
3066
+ // and it's single sided.
3067
+ // - alpha test is disabled when it === 0
3068
+ // - the material sidedness test is complicated because we want light to pass through the back side but still
3069
+ // be able to see the front side. This boolean checks if the side we hit is the front side on the first ray
3070
+ // and we're rendering the other then we skip it. Do the opposite on subsequent bounces to get incoming light.
3071
+ float alphaTest = material.alphaTest;
3072
+ bool useAlphaTest = alphaTest != 0.0;
3073
+ bool isFirstHit = i == 0;
3074
+ if (
3075
+ // material sidedness
3076
+ material.side != 0.0 && ( side != material.side ) == isFirstHit
3077
+
3078
+ // alpha test
3079
+ || useAlphaTest && albedo.a < alphaTest
3080
+
3081
+ // opacity
3082
+ || ! useAlphaTest && albedo.a < rand()
3083
+ ) {
3084
+
3085
+ vec3 point = rayOrigin + rayDirection * dist;
3086
+ vec3 absPoint = abs( point );
3087
+ float maxPoint = max( absPoint.x, max( absPoint.y, absPoint.z ) );
3088
+ rayOrigin = point - ( maxPoint + 1.0 ) * faceNormal * RAY_OFFSET;
3089
+
3090
+ // only allow a limited number of transparency discards otherwise we could
3091
+ // crash the context with too long a loop.
3092
+ i -= sign( transparentTraversals );
3093
+ transparentTraversals -= sign( transparentTraversals );
3094
+ continue;
3095
+
3096
+ }
3097
+
3098
+ // fetch the interpolated smooth normal
3099
+ vec3 normal = normalize( textureSampleBarycoord(
3100
+ normalAttribute,
3101
+ barycoord,
3102
+ faceIndices.xyz
3103
+ ).xyz );
3104
+
3105
+ // roughness
3106
+ float roughness = material.roughness;
3107
+ if ( material.roughnessMap != - 1 ) {
3108
+
3109
+ vec3 uvPrime = material.roughnessMapTransform * vec3( uv, 1 );
3110
+ roughness *= texture2D( textures, vec3( uvPrime.xy, material.roughnessMap ) ).g;
3111
+
3112
+ }
3113
+
3114
+ // metalness
3115
+ float metalness = material.metalness;
3116
+ if ( material.metalnessMap != - 1 ) {
3117
+
3118
+ vec3 uvPrime = material.metalnessMapTransform * vec3( uv, 1 );
3119
+ metalness *= texture2D( textures, vec3( uvPrime.xy, material.metalnessMap ) ).b;
3120
+
3121
+ }
3122
+
3123
+ // emission
3124
+ vec3 emission = material.emissiveIntensity * material.emissive;
3125
+ if ( material.emissiveMap != - 1 ) {
3126
+
3127
+ vec3 uvPrime = material.emissiveMapTransform * vec3( uv, 1 );
3128
+ emission *= texture2D( textures, vec3( uvPrime.xy, material.emissiveMap ) ).xyz;
3129
+
3130
+ }
3131
+
3132
+ // transmission
3133
+ float transmission = material.transmission;
3134
+ if ( material.transmissionMap != - 1 ) {
3135
+
3136
+ vec3 uvPrime = material.transmissionMapTransform * vec3( uv, 1 );
3137
+ transmission *= texture2D( textures, vec3( uvPrime.xy, material.transmissionMap ) ).r;
3138
+
3139
+ }
3140
+
3141
+ // normal
3142
+ if ( material.normalMap != - 1 ) {
3143
+
3144
+ vec4 tangentSample = textureSampleBarycoord(
3145
+ tangentAttribute,
3146
+ barycoord,
3147
+ faceIndices.xyz
3148
+ );
3149
+
3150
+ // some provided tangents can be malformed (0, 0, 0) causing the normal to be degenerate
3151
+ // resulting in NaNs and slow path tracing.
3152
+ if ( length( tangentSample.xyz ) > 0.0 ) {
3153
+
3154
+ vec3 tangent = normalize( tangentSample.xyz );
3155
+ vec3 bitangent = normalize( cross( normal, tangent ) * tangentSample.w );
3156
+ mat3 vTBN = mat3( tangent, bitangent, normal );
3157
+
3158
+ vec3 uvPrime = material.normalMapTransform * vec3( uv, 1 );
3159
+ vec3 texNormal = texture2D( textures, vec3( uvPrime.xy, material.normalMap ) ).xyz * 2.0 - 1.0;
3160
+ texNormal.xy *= material.normalScale;
3161
+ normal = vTBN * texNormal;
3162
+
3163
+ }
3164
+
3165
+ }
3166
+
3167
+ normal *= side;
3168
+
3169
+ SurfaceRec surfaceRec;
3170
+ surfaceRec.normal = normal;
3171
+ surfaceRec.faceNormal = faceNormal;
3172
+ surfaceRec.transmission = transmission;
3173
+ surfaceRec.ior = material.ior;
3174
+ surfaceRec.emission = emission;
3175
+ surfaceRec.metalness = metalness;
3176
+ surfaceRec.color = albedo.rgb;
3177
+ surfaceRec.roughness = roughness;
3178
+
3179
+ // frontFace is used to determine transmissive properties and PDF. If no transmission is used
3180
+ // then we can just always assume this is a front face.
3181
+ surfaceRec.frontFace = side == 1.0 || transmission == 0.0;
3182
+
3183
+ // Compute the filtered roughness value to use during specular reflection computations.
3184
+ // The accumulated roughness value is scaled by a user setting and a "magic value" of 5.0.
3185
+ // If we're exiting something transmissive then scale the factor down significantly so we can retain
3186
+ // sharp internal reflections
3187
+ surfaceRec.filteredRoughness = clamp( max( surfaceRec.roughness, accumulatedRoughness * filterGlossyFactor * 5.0 ), 0.0, 1.0 );
3188
+
3189
+ mat3 normalBasis = getBasisFromNormal( surfaceRec.normal );
3190
+ mat3 invBasis = inverse( normalBasis );
3191
+
3192
+ vec3 outgoing = - normalize( invBasis * rayDirection );
3193
+ sampleRec = bsdfSample( outgoing, surfaceRec );
3194
+
3195
+ float specRayPdf = specularPDF( outgoing, sampleRec.direction, surfaceRec );
3196
+ isShadowRay = sampleRec.specularPdf < rand();
3197
+
3198
+ // adjust the hit point by the surface normal by a factor of some offset and the
3199
+ // maximum component-wise value of the current point to accommodate floating point
3200
+ // error as values increase.
3201
+ vec3 point = rayOrigin + rayDirection * dist;
3202
+ vec3 absPoint = abs( point );
3203
+ float maxPoint = max( absPoint.x, max( absPoint.y, absPoint.z ) );
3204
+ rayDirection = normalize( normalBasis * sampleRec.direction );
3205
+
3206
+ bool isBelowSurface = dot( rayDirection, faceNormal ) < 0.0;
3207
+ rayOrigin = point + faceNormal * ( maxPoint + 1.0 ) * ( isBelowSurface ? - RAY_OFFSET : RAY_OFFSET );
3208
+
3209
+ // direct env map sampling
3210
+ #if FEATURE_MIS
3211
+ {
3212
+
3213
+ // find a sample in the environment map to include in the contribution
3214
+ vec3 envColor, envDirection;
3215
+ float envPdf = randomEnvMapSample( envMapInfo, envColor, envDirection );
3216
+ envDirection = invEnvironmentRotation * envDirection;
3217
+
3218
+ // this env sampling is not set up for transmissive sampling and yields overly bright
3219
+ // results so we ignore the sample in this case.
3220
+ // TODO: this should be improved but how? The env samples could traverse a few layers?
3221
+ bool isSampleBelowSurface = dot( faceNormal, envDirection ) < 0.0;
3222
+ if ( isSampleBelowSurface ) {
3223
+
3224
+ envPdf = 0.0;
3225
+
3226
+ }
3227
+
3228
+ // check if a ray could even reach the surface
3229
+ vec3 attenuatedColor;
3230
+ if (
3231
+ envPdf > 0.0 &&
3232
+ isDirectionValid( envDirection, normal, faceNormal ) &&
3233
+ ! attenuateHit( bvh, rayOrigin, envDirection, bounces - i, isShadowRay, attenuatedColor )
3234
+ ) {
3235
+
3236
+ // get the material pdf
3237
+ vec3 sampleColor;
3238
+ float envMaterialPdf = bsdfResult( outgoing, normalize( invBasis * envDirection ), surfaceRec, sampleColor );
3239
+ if ( envMaterialPdf > 0.0 ) {
3240
+
3241
+ // weight the direct light contribution
3242
+ float misWeight = misHeuristic( envPdf, envMaterialPdf );
3243
+ gl_FragColor.rgb += attenuatedColor * environmentIntensity * envColor * throughputColor * sampleColor * misWeight / envPdf;
3244
+
3245
+ }
3246
+
3247
+ }
3248
+
3249
+ }
3250
+ #endif
3251
+
3252
+ // accumulate a roughness value to offset diffuse, specular, diffuse rays that have high contribution
3253
+ // to a single pixel resulting in fireflies
3254
+ if ( ! isBelowSurface ) {
3255
+
3256
+ // determine if this is a rough normal or not by checking how far off straight up it is
3257
+ vec3 halfVector = normalize( outgoing + sampleRec.direction );
3258
+ accumulatedRoughness += sin( acosApprox( halfVector.z ) );
3259
+ transmissiveRay = false;
3260
+
3261
+ }
3262
+
3263
+ // accumulate color
3264
+ gl_FragColor.rgb += ( emission * throughputColor );
3265
+
3266
+ // skip the sample if our PDF or ray is impossible
3267
+ if ( sampleRec.pdf <= 0.0 || ! isDirectionValid( rayDirection, normal, faceNormal) ) {
3268
+
3269
+ break;
3270
+
3271
+ }
3272
+
3273
+ throughputColor *= sampleRec.color / sampleRec.pdf;
3274
+
3275
+ // discard the sample if there are any NaNs
3276
+ if ( any( isnan( throughputColor ) ) || any( isinf( throughputColor ) ) ) {
3277
+
3278
+ break;
3279
+
3280
+ }
3281
+
3282
+ }
3283
+
3284
+ gl_FragColor.a *= opacity;
3285
+
3286
+ }
3287
+
3288
+ `
3289
+
3290
+ } );
3291
+
3292
+ this.setValues( parameters );
3293
+
3294
+ }
1462
3295
 
1463
- class PhysicalPathTracingMaterial extends MaterialBase {
1464
-
1465
- // three.js relies on this field to add env map functions and defines
1466
- get envMap() {
1467
-
1468
- return this.environmentMap;
1469
-
1470
- }
1471
-
1472
- constructor( parameters ) {
1473
-
1474
- super( {
1475
-
1476
- transparent: true,
1477
- depthWrite: false,
1478
-
1479
- defines: {
1480
- BOUNCES: 3,
1481
- TRANSPARENT_TRAVERSALS: 5,
1482
- MATERIAL_LENGTH: 0,
1483
- GRADIENT_BG: 0,
1484
- },
1485
-
1486
- uniforms: {
1487
- bvh: { value: new MeshBVHUniformStruct() },
1488
- normalAttribute: { value: new FloatVertexAttributeTexture() },
1489
- tangentAttribute: { value: new FloatVertexAttributeTexture() },
1490
- uvAttribute: { value: new FloatVertexAttributeTexture() },
1491
- materialIndexAttribute: { value: new UIntVertexAttributeTexture() },
1492
- materials: { value: new MaterialStructArrayUniform() },
1493
- textures: { value: new RenderTarget2DArray().texture },
1494
- cameraWorldMatrix: { value: new Matrix4() },
1495
- invProjectionMatrix: { value: new Matrix4() },
1496
- environmentBlur: { value: 0.2 },
1497
- environmentIntensity: { value: 2.0 },
1498
- environmentMap: { value: null },
1499
- environmentRotation: { value: new Matrix3() },
1500
- seed: { value: 0 },
1501
- opacity: { value: 1 },
1502
- filterGlossyFactor: { value: 0.0 },
1503
-
1504
- gradientTop: { value: new Color( 0xbfd8ff ) },
1505
- gradientBottom: { value: new Color( 0xffffff ) },
1506
-
1507
- bgGradientTop: { value: new Color( 0x111111 ) },
1508
- bgGradientBottom: { value: new Color( 0x000000 ) },
1509
- },
1510
-
1511
- vertexShader: /* glsl */`
1512
-
1513
- varying vec2 vUv;
1514
- void main() {
1515
-
1516
- vec4 mvPosition = vec4( position, 1.0 );
1517
- mvPosition = modelViewMatrix * mvPosition;
1518
- gl_Position = projectionMatrix * mvPosition;
1519
-
1520
- vUv = uv;
1521
-
1522
- }
1523
-
1524
- `,
1525
-
1526
- fragmentShader: /* glsl */`
1527
- #define RAY_OFFSET 1e-5
1528
-
1529
- precision highp isampler2D;
1530
- precision highp usampler2D;
1531
- precision highp sampler2DArray;
1532
- vec4 envMapTexelToLinear( vec4 a ) { return a; }
1533
- #include <common>
1534
- #include <cube_uv_reflection_fragment>
1535
-
1536
- ${ shaderStructs }
1537
- ${ shaderIntersectFunction }
1538
- ${ shaderMaterialStructs }
1539
-
1540
- ${ shaderUtils }
1541
- ${ shaderMaterialSampling }
1542
-
1543
- #ifdef USE_ENVMAP
1544
-
1545
- uniform float environmentBlur;
1546
- uniform sampler2D environmentMap;
1547
- uniform mat3 environmentRotation;
1548
-
1549
- #else
1550
-
1551
- uniform vec3 gradientTop;
1552
- uniform vec3 gradientBottom;
1553
-
1554
- #endif
1555
-
1556
- #if GRADIENT_BG
1557
-
1558
- uniform vec3 bgGradientTop;
1559
- uniform vec3 bgGradientBottom;
1560
-
1561
- #endif
1562
-
1563
- uniform mat4 cameraWorldMatrix;
1564
- uniform mat4 invProjectionMatrix;
1565
- uniform sampler2D normalAttribute;
1566
- uniform sampler2D tangentAttribute;
1567
- uniform sampler2D uvAttribute;
1568
- uniform usampler2D materialIndexAttribute;
1569
- uniform BVH bvh;
1570
- uniform float environmentIntensity;
1571
- uniform float filterGlossyFactor;
1572
- uniform int seed;
1573
- uniform float opacity;
1574
- uniform Material materials[ MATERIAL_LENGTH ];
1575
- uniform sampler2DArray textures;
1576
- varying vec2 vUv;
1577
-
1578
- void main() {
1579
-
1580
- rng_initialize( gl_FragCoord.xy, seed );
1581
-
1582
- // get [-1, 1] normalized device coordinates
1583
- vec2 ndc = 2.0 * vUv - vec2( 1.0 );
1584
- vec3 rayOrigin, rayDirection;
1585
- ndcToCameraRay( ndc, cameraWorldMatrix, invProjectionMatrix, rayOrigin, rayDirection );
1586
-
1587
- // Lambertian render
1588
- gl_FragColor = vec4( 0.0 );
1589
-
1590
- vec3 throughputColor = vec3( 1.0 );
1591
-
1592
- // hit results
1593
- uvec4 faceIndices = uvec4( 0u );
1594
- vec3 faceNormal = vec3( 0.0, 0.0, 1.0 );
1595
- vec3 barycoord = vec3( 0.0 );
1596
- float side = 1.0;
1597
- float dist = 0.0;
1598
- float accumulatedRoughness = 0.0;
1599
- int i;
1600
- int transparentTraversals = TRANSPARENT_TRAVERSALS;
1601
- for ( i = 0; i < BOUNCES; i ++ ) {
1602
-
1603
- if ( ! bvhIntersectFirstHit( bvh, rayOrigin, rayDirection, faceIndices, faceNormal, barycoord, side, dist ) ) {
1604
-
1605
- #if GRADIENT_BG
1606
-
1607
- if ( i == 0 ) {
1608
-
1609
- rayDirection = normalize( rayDirection + randDirection() * 0.05 );
1610
- float value = ( rayDirection.y + 1.0 ) / 2.0;
1611
-
1612
- value = pow( value, 2.0 );
1613
-
1614
- gl_FragColor = vec4( mix( bgGradientBottom, bgGradientTop, value ), 1.0 );
1615
- break;
1616
-
1617
- }
1618
-
1619
- #endif
1620
-
1621
- #ifdef USE_ENVMAP
1622
-
1623
- vec3 skyColor = textureCubeUV( environmentMap, environmentRotation * rayDirection, environmentBlur ).rgb;
1624
-
1625
- #else
1626
-
1627
- rayDirection = normalize( rayDirection );
1628
- float value = ( rayDirection.y + 1.0 ) / 2.0;
1629
- vec3 skyColor = mix( gradientBottom, gradientTop, value );
1630
-
1631
- #endif
1632
-
1633
- gl_FragColor += vec4( skyColor * throughputColor * environmentIntensity, 1.0 );
1634
-
1635
- break;
1636
-
1637
- }
1638
-
1639
- uint materialIndex = uTexelFetch1D( materialIndexAttribute, faceIndices.x ).r;
1640
- Material material = materials[ materialIndex ];
1641
-
1642
- vec2 uv = textureSampleBarycoord( uvAttribute, barycoord, faceIndices.xyz ).xy;
1643
-
1644
- // albedo
1645
- vec4 albedo = vec4( material.color, material.opacity );
1646
- if ( material.map != - 1 ) {
1647
-
1648
- albedo *= texture2D( textures, vec3( uv, material.map ) );
1649
-
1650
- }
1651
-
1652
- // possibly skip this sample if it's transparent or alpha test is enabled
1653
- // alpha test is disabled when it === 0
1654
- float alphaTest = material.alphaTest;
1655
- bool useAlphaTest = alphaTest != 0.0;
1656
- if (
1657
- useAlphaTest && albedo.a < alphaTest
1658
- || ! useAlphaTest && albedo.a < rand()
1659
- ) {
1660
-
1661
- vec3 point = rayOrigin + rayDirection * dist;
1662
- rayOrigin += rayDirection * dist - faceNormal * RAY_OFFSET;
1663
-
1664
- // only allow a limited number of transparency discards otherwise we could
1665
- // crash the context with too long a loop.
1666
- i -= sign( transparentTraversals );
1667
- transparentTraversals -= sign( transparentTraversals );
1668
- continue;
1669
-
1670
- }
1671
-
1672
- // fetch the interpolated smooth normal
1673
- vec3 normal = normalize( textureSampleBarycoord(
1674
- normalAttribute,
1675
- barycoord,
1676
- faceIndices.xyz
1677
- ).xyz );
1678
-
1679
- // roughness
1680
- float roughness = material.roughness;
1681
- if ( material.roughnessMap != - 1 ) {
1682
-
1683
- roughness *= texture2D( textures, vec3( uv, material.roughnessMap ) ).g;
1684
-
1685
- }
1686
-
1687
- // metalness
1688
- float metalness = material.metalness;
1689
- if ( material.metalnessMap != - 1 ) {
1690
-
1691
- metalness *= texture2D( textures, vec3( uv, material.metalnessMap ) ).b;
1692
-
1693
- }
1694
-
1695
- // emission
1696
- vec3 emission = material.emissiveIntensity * material.emissive;
1697
- if ( material.emissiveMap != - 1 ) {
1698
-
1699
- emission *= texture2D( textures, vec3( uv, material.emissiveMap ) ).xyz;
1700
-
1701
- }
1702
-
1703
- // transmission
1704
- float transmission = material.transmission;
1705
- if ( material.transmissionMap != - 1 ) {
1706
-
1707
- transmission *= texture2D( textures, vec3( uv, material.transmissionMap ) ).r;
1708
-
1709
- }
1710
-
1711
- // normal
1712
- if ( material.normalMap != - 1 ) {
1713
-
1714
- vec4 tangentSample = textureSampleBarycoord(
1715
- tangentAttribute,
1716
- barycoord,
1717
- faceIndices.xyz
1718
- );
1719
-
1720
- // some provided tangents can be malformed (0, 0, 0) causing the normal to be degenerate
1721
- // resulting in NaNs and slow path tracing.
1722
- if ( length( tangentSample.xyz ) > 0.0 ) {
1723
-
1724
- vec3 tangent = normalize( tangentSample.xyz );
1725
- vec3 bitangent = normalize( cross( normal, tangent ) * tangentSample.w );
1726
- mat3 vTBN = mat3( tangent, bitangent, normal );
1727
-
1728
- vec3 texNormal = texture2D( textures, vec3( uv, material.normalMap ) ).xyz * 2.0 - 1.0;
1729
- texNormal.xy *= material.normalScale;
1730
- normal = vTBN * texNormal;
1731
-
1732
- }
1733
-
1734
- }
1735
-
1736
- normal *= side;
1737
-
1738
- SurfaceRec surfaceRec;
1739
- surfaceRec.normal = normal;
1740
- surfaceRec.faceNormal = faceNormal;
1741
- surfaceRec.frontFace = side == 1.0;
1742
- surfaceRec.transmission = transmission;
1743
- surfaceRec.ior = material.ior;
1744
- surfaceRec.emission = emission;
1745
- surfaceRec.metalness = metalness;
1746
- surfaceRec.color = albedo.rgb;
1747
- surfaceRec.roughness = roughness;
1748
-
1749
- // Compute the filtered roughness value to use during specular reflection computations. A minimum
1750
- // value of 1e-6 is needed because the GGX functions do not work with a roughness value of 0 and
1751
- // the accumulated roughness value is scaled by a user setting and a "magic value" of 5.0.
1752
- // If we're exiting something transmissive then scale the factor down significantly so we can retain
1753
- // sharp internal reflections
1754
- surfaceRec.filteredRoughness = clamp(
1755
- max( surfaceRec.roughness, accumulatedRoughness * filterGlossyFactor * 5.0 ),
1756
- 1e-3,
1757
- 1.0
1758
- );
1759
-
1760
- mat3 normalBasis = getBasisFromNormal( surfaceRec.normal );
1761
- mat3 invBasis = inverse( normalBasis );
1762
-
1763
- vec3 outgoing = - normalize( invBasis * rayDirection );
1764
- SampleRec sampleRec = bsdfSample( outgoing, surfaceRec );
1765
-
1766
- // adjust the hit point by the surface normal by a factor of some offset and the
1767
- // maximum component-wise value of the current point to accommodate floating point
1768
- // error as values increase.
1769
- vec3 point = rayOrigin + rayDirection * dist;
1770
- vec3 absPoint = abs( point );
1771
- float maxPoint = max( absPoint.x, max( absPoint.y, absPoint.z ) );
1772
- rayDirection = normalize( normalBasis * sampleRec.direction );
1773
-
1774
- bool isBelowSurface = dot( rayDirection, faceNormal ) < 0.0;
1775
- rayOrigin = point + faceNormal * ( maxPoint + 1.0 ) * ( isBelowSurface ? - RAY_OFFSET : RAY_OFFSET );
1776
-
1777
- // accumulate a roughness value to offset diffuse, specular, diffuse rays that have high contribution
1778
- // to a single pixel resulting in fireflies
1779
- if ( ! isBelowSurface ) {
1780
-
1781
- // determine if this is a rough normal or not by checking how far off straight up it is
1782
- vec3 halfVector = normalize( outgoing + sampleRec.direction );
1783
- accumulatedRoughness += sin( acos( halfVector.z ) );
1784
-
1785
- }
1786
-
1787
- // accumulate color
1788
- gl_FragColor.rgb += ( emission * throughputColor );
1789
-
1790
- // skip the sample if our PDF or ray is impossible
1791
- if ( sampleRec.pdf <= 0.0 || ! isDirectionValid( rayDirection, normal, faceNormal) ) {
1792
-
1793
- break;
1794
-
1795
- }
1796
-
1797
- throughputColor *= sampleRec.color / sampleRec.pdf;
1798
-
1799
- // discard the sample if there are any NaNs
1800
- if ( any( isnan( throughputColor ) ) || any( isinf( throughputColor ) ) ) {
1801
-
1802
- break;
1803
-
1804
- }
1805
-
1806
- }
1807
-
1808
- gl_FragColor.a = opacity;
1809
-
1810
- }
1811
-
1812
- `
1813
-
1814
- } );
1815
-
1816
- this.setValues( parameters );
1817
-
1818
- }
1819
-
1820
3296
  }
1821
3297
 
1822
3298
  // core
1823
3299
 
1824
- export { MaterialBase, MaterialReducer, MaterialStructArrayUniform, MaterialStructUniform, PathTracingRenderer, PathTracingSceneGenerator, PhysicalPathTracingMaterial, RenderTarget2DArray, mergeMeshes, shaderMaterialSampling, shaderMaterialStructs, shaderUtils };
3300
+ export { BlurredEnvMapGenerator, DynamicPathTracingSceneGenerator, EquirectHdrInfoUniform, MaterialBase, MaterialReducer, MaterialsTexture, PathTracingRenderer, PathTracingSceneGenerator, PhysicalCamera, PhysicalCameraUniform, PhysicalPathTracingMaterial, RenderTarget2DArray, getGroupMaterialIndicesAttribute, mergeMeshes, setCommonAttributes, shaderMaterialSampling, shaderMaterialStructs, shaderUtils, trimToAttributes };
1825
3301
  //# sourceMappingURL=index.module.js.map