three-gpu-pathtracer 0.0.8 → 0.0.9

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,5 +1,6 @@
1
- import { ShaderMaterial, NoBlending, NormalBlending, Color, Vector2, WebGLRenderTarget, RGBAFormat, FloatType, BufferAttribute, Mesh, BufferGeometry, PerspectiveCamera, Camera, SpotLight, RectAreaLight, Spherical, DataTexture, EquirectangularReflectionMapping, RepeatWrapping, ClampToEdgeWrapping, LinearFilter, Vector3, DoubleSide, BackSide, FrontSide, WebGLArrayRenderTarget, UnsignedByteType, MeshBasicMaterial, NoToneMapping, Source, HalfFloatType, DataUtils, RedFormat, Matrix4, Quaternion, Loader, MathUtils, FileLoader, PMREMGenerator, Vector4, DataArrayTexture } from 'three';
2
- import { FullScreenQuad } from 'three/examples/jsm/postprocessing/Pass.js';
1
+ import { ShaderMaterial, NoBlending, Vector2, WebGLRenderTarget, FloatType, RGBAFormat, NearestFilter, NormalBlending, Color, BufferAttribute, Mesh, BufferGeometry, PerspectiveCamera, Camera, SpotLight, RectAreaLight, Spherical, DataTexture, EquirectangularReflectionMapping, RepeatWrapping, ClampToEdgeWrapping, LinearFilter, Vector3, DoubleSide, BackSide, FrontSide, WebGLArrayRenderTarget, UnsignedByteType, MeshBasicMaterial, NoToneMapping, Source, HalfFloatType, DataUtils, RedFormat, Matrix4, Quaternion, Loader, MathUtils, FileLoader, PMREMGenerator, Vector4, DataArrayTexture } from 'three';
2
+ import { FullScreenQuad as FullScreenQuad$1 } from 'three/examples/jsm/postprocessing/Pass.js';
3
+ import { FullScreenQuad } from 'three/examples/jsm/postprocessing/Pass';
3
4
  import { StaticGeometryGenerator, SAH, MeshBVH, FloatVertexAttributeTexture, MeshBVHUniformStruct, UIntVertexAttributeTexture, shaderStructs, shaderIntersectFunction } from 'three-mesh-bvh';
4
5
  import { mergeVertices, mergeBufferGeometries } from 'three/examples/jsm/utils/BufferGeometryUtils.js';
5
6
 
@@ -123,6 +124,339 @@ class BlendMaterial extends MaterialBase {
123
124
 
124
125
  }
125
126
 
127
+ // References
128
+ // - https://jcgt.org/published/0009/04/01/
129
+ // - Code from https://www.shadertoy.com/view/WtGyDm
130
+
131
+ // functions to generate multi-dimensions variables of the same functions
132
+ // to support 1, 2, 3, and 4 dimensional sobol sampling.
133
+ function generateSobolFunctionVariants( dim = 1 ) {
134
+
135
+ let type = 'uint';
136
+ if ( dim > 1 ) {
137
+
138
+ type = 'uvec' + dim;
139
+
140
+ }
141
+
142
+ return /* glsl */`
143
+ ${ type } sobolReverseBits( ${ type } x ) {
144
+
145
+ x = ( ( ( x & 0xaaaaaaaau ) >> 1 ) | ( ( x & 0x55555555u ) << 1 ) );
146
+ x = ( ( ( x & 0xccccccccu ) >> 2 ) | ( ( x & 0x33333333u ) << 2 ) );
147
+ x = ( ( ( x & 0xf0f0f0f0u ) >> 4 ) | ( ( x & 0x0f0f0f0fu ) << 4 ) );
148
+ x = ( ( ( x & 0xff00ff00u ) >> 8 ) | ( ( x & 0x00ff00ffu ) << 8 ) );
149
+ return ( ( x >> 16 ) | ( x << 16 ) );
150
+
151
+ }
152
+
153
+ ${ type } sobolHashCombine( uint seed, ${ type } v ) {
154
+
155
+ return seed ^ ( v + ${ type }( ( seed << 6 ) + ( seed >> 2 ) ) );
156
+
157
+ }
158
+
159
+ ${ type } sobolLaineKarrasPermutation( ${ type } x, ${ type } seed ) {
160
+
161
+ x += seed;
162
+ x ^= x * 0x6c50b47cu;
163
+ x ^= x * 0xb82f1e52u;
164
+ x ^= x * 0xc7afe638u;
165
+ x ^= x * 0x8d22f6e6u;
166
+ return x;
167
+
168
+ }
169
+
170
+ ${ type } nestedUniformScrambleBase2( ${ type } x, ${ type } seed ) {
171
+
172
+ x = sobolLaineKarrasPermutation( x, seed );
173
+ x = sobolReverseBits( x );
174
+ return x;
175
+
176
+ }
177
+ `;
178
+
179
+ }
180
+
181
+ function generateSobolSampleFunctions( dim = 1 ) {
182
+
183
+ let utype = 'uint';
184
+ let vtype = 'float';
185
+ let num = '';
186
+ let components = '.r';
187
+ let combineValues = '1u';
188
+ if ( dim > 1 ) {
189
+
190
+ utype = 'uvec' + dim;
191
+ vtype = 'vec' + dim;
192
+ num = dim + '';
193
+ if ( dim === 2 ) {
194
+
195
+ components = '.rg';
196
+ combineValues = 'uvec2( 1u, 2u )';
197
+
198
+ } else if ( dim === 3 ) {
199
+
200
+ components = '.rgb';
201
+ combineValues = 'uvec3( 1u, 2u, 3u )';
202
+
203
+ } else {
204
+
205
+ components = '';
206
+ combineValues = 'uvec4( 1u, 2u, 3u, 4u )';
207
+
208
+ }
209
+
210
+ }
211
+
212
+ return /* glsl */`
213
+
214
+ ${ vtype } sobol${ num }( int effect ) {
215
+
216
+ uint seed = sobolGetSeed( sobolBounceIndex, uint( effect ) );
217
+ uint index = sobolPathIndex;
218
+
219
+ uint shuffle_seed = sobolHashCombine( seed, 0u );
220
+ uint shuffled_index = nestedUniformScrambleBase2( sobolReverseBits( index ), shuffle_seed );
221
+ ${ vtype } sobol_pt = sobolGetTexturePoint( shuffled_index )${ components };
222
+ ${ utype } result = ${ utype }( sobol_pt * 16777216.0 );
223
+
224
+ ${ utype } seed2 = sobolHashCombine( seed, ${ combineValues } );
225
+ result = nestedUniformScrambleBase2( result, seed2 );
226
+
227
+ return SOBOL_FACTOR * ${ vtype }( result >> 8 );
228
+
229
+ }
230
+ `;
231
+
232
+ }
233
+
234
+ const shaderSobolCommon = /* glsl */`
235
+
236
+ // Utils
237
+ const float SOBOL_FACTOR = 1.0 / 16777216.0;
238
+ const uint SOBOL_MAX_POINTS = 256u * 256u;
239
+
240
+ ${ generateSobolFunctionVariants( 1 ) }
241
+ ${ generateSobolFunctionVariants( 2 ) }
242
+ ${ generateSobolFunctionVariants( 3 ) }
243
+ ${ generateSobolFunctionVariants( 4 ) }
244
+
245
+ uint sobolHash( uint x ) {
246
+
247
+ // finalizer from murmurhash3
248
+ x ^= x >> 16;
249
+ x *= 0x85ebca6bu;
250
+ x ^= x >> 13;
251
+ x *= 0xc2b2ae35u;
252
+ x ^= x >> 16;
253
+ return x;
254
+
255
+ }
256
+
257
+ `;
258
+
259
+ const shaderSobolGeneration = /* glsl */`
260
+
261
+ const uint SOBOL_DIRECTIONS_1[ 32 ] = uint[ 32 ](
262
+ 0x80000000u, 0xc0000000u, 0xa0000000u, 0xf0000000u,
263
+ 0x88000000u, 0xcc000000u, 0xaa000000u, 0xff000000u,
264
+ 0x80800000u, 0xc0c00000u, 0xa0a00000u, 0xf0f00000u,
265
+ 0x88880000u, 0xcccc0000u, 0xaaaa0000u, 0xffff0000u,
266
+ 0x80008000u, 0xc000c000u, 0xa000a000u, 0xf000f000u,
267
+ 0x88008800u, 0xcc00cc00u, 0xaa00aa00u, 0xff00ff00u,
268
+ 0x80808080u, 0xc0c0c0c0u, 0xa0a0a0a0u, 0xf0f0f0f0u,
269
+ 0x88888888u, 0xccccccccu, 0xaaaaaaaau, 0xffffffffu
270
+ );
271
+
272
+ const uint SOBOL_DIRECTIONS_2[ 32 ] = uint[ 32 ](
273
+ 0x80000000u, 0xc0000000u, 0x60000000u, 0x90000000u,
274
+ 0xe8000000u, 0x5c000000u, 0x8e000000u, 0xc5000000u,
275
+ 0x68800000u, 0x9cc00000u, 0xee600000u, 0x55900000u,
276
+ 0x80680000u, 0xc09c0000u, 0x60ee0000u, 0x90550000u,
277
+ 0xe8808000u, 0x5cc0c000u, 0x8e606000u, 0xc5909000u,
278
+ 0x6868e800u, 0x9c9c5c00u, 0xeeee8e00u, 0x5555c500u,
279
+ 0x8000e880u, 0xc0005cc0u, 0x60008e60u, 0x9000c590u,
280
+ 0xe8006868u, 0x5c009c9cu, 0x8e00eeeeu, 0xc5005555u
281
+ );
282
+
283
+ const uint SOBOL_DIRECTIONS_3[ 32 ] = uint[ 32 ](
284
+ 0x80000000u, 0xc0000000u, 0x20000000u, 0x50000000u,
285
+ 0xf8000000u, 0x74000000u, 0xa2000000u, 0x93000000u,
286
+ 0xd8800000u, 0x25400000u, 0x59e00000u, 0xe6d00000u,
287
+ 0x78080000u, 0xb40c0000u, 0x82020000u, 0xc3050000u,
288
+ 0x208f8000u, 0x51474000u, 0xfbea2000u, 0x75d93000u,
289
+ 0xa0858800u, 0x914e5400u, 0xdbe79e00u, 0x25db6d00u,
290
+ 0x58800080u, 0xe54000c0u, 0x79e00020u, 0xb6d00050u,
291
+ 0x800800f8u, 0xc00c0074u, 0x200200a2u, 0x50050093u
292
+ );
293
+
294
+ const uint SOBOL_DIRECTIONS_4[ 32 ] = uint[ 32 ](
295
+ 0x80000000u, 0x40000000u, 0x20000000u, 0xb0000000u,
296
+ 0xf8000000u, 0xdc000000u, 0x7a000000u, 0x9d000000u,
297
+ 0x5a800000u, 0x2fc00000u, 0xa1600000u, 0xf0b00000u,
298
+ 0xda880000u, 0x6fc40000u, 0x81620000u, 0x40bb0000u,
299
+ 0x22878000u, 0xb3c9c000u, 0xfb65a000u, 0xddb2d000u,
300
+ 0x78022800u, 0x9c0b3c00u, 0x5a0fb600u, 0x2d0ddb00u,
301
+ 0xa2878080u, 0xf3c9c040u, 0xdb65a020u, 0x6db2d0b0u,
302
+ 0x800228f8u, 0x400b3cdcu, 0x200fb67au, 0xb00ddb9du
303
+ );
304
+
305
+ uint getMaskedSobol( uint index, uint directions[ 32 ] ) {
306
+
307
+ uint X = 0u;
308
+ for ( int bit = 0; bit < 32; bit ++ ) {
309
+
310
+ uint mask = ( index >> bit ) & 1u;
311
+ X ^= mask * directions[ bit ];
312
+
313
+ }
314
+ return X;
315
+
316
+ }
317
+
318
+ vec4 generateSobolPoint( uint index ) {
319
+
320
+ if ( index >= SOBOL_MAX_POINTS ) {
321
+
322
+ return vec4( 0.0 );
323
+
324
+ }
325
+
326
+ // NOTEL this sobol "direction" is also available but we can't write out 5 components
327
+ // uint x = index & 0x00ffffffu;
328
+ uint x = sobolReverseBits( getMaskedSobol( index, SOBOL_DIRECTIONS_1 ) ) & 0x00ffffffu;
329
+ uint y = sobolReverseBits( getMaskedSobol( index, SOBOL_DIRECTIONS_2 ) ) & 0x00ffffffu;
330
+ uint z = sobolReverseBits( getMaskedSobol( index, SOBOL_DIRECTIONS_3 ) ) & 0x00ffffffu;
331
+ uint w = sobolReverseBits( getMaskedSobol( index, SOBOL_DIRECTIONS_4 ) ) & 0x00ffffffu;
332
+
333
+ return vec4( x, y, z, w ) * SOBOL_FACTOR;
334
+
335
+ }
336
+
337
+ `;
338
+
339
+ const shaderSobolSampling = /* glsl */`
340
+
341
+ // Seeds
342
+ uniform sampler2D sobolTexture;
343
+ uint sobolPixelIndex;
344
+ uint sobolPathIndex;
345
+ uint sobolBounceIndex;
346
+
347
+ uint sobolGetSeed( uint bounce, uint effect ) {
348
+
349
+ return sobolHash(
350
+ sobolHashCombine(
351
+ sobolHashCombine(
352
+ sobolHash( bounce ),
353
+ sobolPixelIndex
354
+ ),
355
+ effect
356
+ )
357
+ );
358
+
359
+ }
360
+
361
+ vec4 sobolGetTexturePoint( uint index ) {
362
+
363
+ if ( index >= SOBOL_MAX_POINTS ) {
364
+
365
+ index = index % SOBOL_MAX_POINTS;
366
+
367
+ }
368
+
369
+ uvec2 dim = uvec2( textureSize( sobolTexture, 0 ).xy );
370
+ uint y = index / dim.x;
371
+ uint x = index - y * dim.x;
372
+ vec2 uv = vec2( x, y ) / vec2( dim );
373
+ return texture( sobolTexture, uv );
374
+
375
+ }
376
+
377
+ ${ generateSobolSampleFunctions( 1 ) }
378
+ ${ generateSobolSampleFunctions( 2 ) }
379
+ ${ generateSobolSampleFunctions( 3 ) }
380
+ ${ generateSobolSampleFunctions( 4 ) }
381
+
382
+ `;
383
+
384
+ class SobolNumbersMaterial extends MaterialBase {
385
+
386
+ constructor() {
387
+
388
+ super( {
389
+
390
+ blending: NoBlending,
391
+
392
+ uniforms: {
393
+
394
+ resolution: { value: new Vector2() },
395
+
396
+ },
397
+
398
+ vertexShader: /* glsl */`
399
+
400
+ varying vec2 vUv;
401
+ void main() {
402
+
403
+ vUv = uv;
404
+ gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );
405
+
406
+ }
407
+ `,
408
+
409
+ fragmentShader: /* glsl */`
410
+
411
+ ${ shaderSobolCommon }
412
+ ${ shaderSobolGeneration }
413
+
414
+ varying vec2 vUv;
415
+ uniform vec2 resolution;
416
+ void main() {
417
+
418
+ uint index = uint( gl_FragCoord.y ) * uint( resolution.x ) + uint( gl_FragCoord.x );
419
+ gl_FragColor = generateSobolPoint( index );
420
+
421
+ }
422
+ `,
423
+
424
+ } );
425
+
426
+ }
427
+
428
+ }
429
+
430
+ class SobolNumberMapGenerator {
431
+
432
+ generate( renderer, dimensions = 256 ) {
433
+
434
+ const target = new WebGLRenderTarget( dimensions, dimensions, {
435
+
436
+ type: FloatType,
437
+ format: RGBAFormat,
438
+ minFilter: NearestFilter,
439
+ magFilter: NearestFilter,
440
+ generateMipmaps: false,
441
+
442
+ } );
443
+
444
+ const ogTarget = renderer.getRenderTarget();
445
+ renderer.setRenderTarget( target );
446
+
447
+ const quad = new FullScreenQuad( new SobolNumbersMaterial() );
448
+ quad.material.resolution.set( dimensions, dimensions );
449
+ quad.render( renderer );
450
+
451
+ renderer.setRenderTarget( ogTarget );
452
+ quad.dispose();
453
+
454
+ return target;
455
+
456
+ }
457
+
458
+ }
459
+
126
460
  function* renderTask() {
127
461
 
128
462
  const {
@@ -131,6 +465,7 @@ function* renderTask() {
131
465
  _blendQuad,
132
466
  _primaryTarget,
133
467
  _blendTargets,
468
+ _sobolTarget,
134
469
  alpha,
135
470
  camera,
136
471
  material,
@@ -157,6 +492,7 @@ function* renderTask() {
157
492
  const w = _primaryTarget.width;
158
493
  const h = _primaryTarget.height;
159
494
  material.resolution.set( w, h );
495
+ material.sobolTexture = _sobolTarget.texture;
160
496
  material.seed ++;
161
497
 
162
498
  const tilesX = this.tiles.x || 1;
@@ -300,11 +636,12 @@ class PathTracingRenderer {
300
636
 
301
637
  this._renderer = renderer;
302
638
  this._alpha = false;
303
- this._fsQuad = new FullScreenQuad( null );
304
- this._blendQuad = new FullScreenQuad( new BlendMaterial() );
639
+ this._fsQuad = new FullScreenQuad$1( null );
640
+ this._blendQuad = new FullScreenQuad$1( new BlendMaterial() );
305
641
  this._task = null;
306
642
  this._currentTile = 0;
307
643
 
644
+ this._sobolTarget = new SobolNumberMapGenerator().generate( renderer );
308
645
  this._primaryTarget = new WebGLRenderTarget( 1, 1, {
309
646
  format: RGBAFormat,
310
647
  type: FloatType,
@@ -336,6 +673,7 @@ class PathTracingRenderer {
336
673
  this._primaryTarget.dispose();
337
674
  this._blendTargets[ 0 ].dispose();
338
675
  this._blendTargets[ 1 ].dispose();
676
+ this._sobolTarget.dispose();
339
677
 
340
678
  this._fsQuad.dispose();
341
679
  this._blendQuad.dispose();
@@ -1201,6 +1539,14 @@ class GradientEquirectTexture extends ProceduralEquirectTexture {
1201
1539
 
1202
1540
  }
1203
1541
 
1542
+ // we must hash the texture to determine uniqueness using the encoding, as well, because the
1543
+ // when rendering each texture to the texture array they must have a consistent color space.
1544
+ function getTextureHash( t ) {
1545
+
1546
+ return `${ t.source.uuid }:${ t.encoding }`;
1547
+
1548
+ }
1549
+
1204
1550
  // reduce the set of textures to just those with a unique source while retaining
1205
1551
  // the order of the textures.
1206
1552
  function reduceTexturesToUniqueSources( textures ) {
@@ -1210,9 +1556,10 @@ function reduceTexturesToUniqueSources( textures ) {
1210
1556
  for ( let i = 0, l = textures.length; i < l; i ++ ) {
1211
1557
 
1212
1558
  const tex = textures[ i ];
1213
- if ( ! sourceSet.has( tex.source ) ) {
1559
+ const hash = getTextureHash( tex );
1560
+ if ( ! sourceSet.has( hash ) ) {
1214
1561
 
1215
- sourceSet.add( tex.source );
1562
+ sourceSet.add( hash );
1216
1563
  result.push( tex );
1217
1564
 
1218
1565
  }
@@ -1283,8 +1630,8 @@ class MaterialsTexture extends DataTexture {
1283
1630
 
1284
1631
  if ( key in material && material[ key ] ) {
1285
1632
 
1286
- const source = material[ key ].source;
1287
- return uniqueTextures.findIndex( tex => tex.source === source );
1633
+ const hash = getTextureHash( material[ key ] );
1634
+ return uniqueTextureLookup[ hash ];
1288
1635
 
1289
1636
  } else {
1290
1637
 
@@ -1374,6 +1721,12 @@ class MaterialsTexture extends DataTexture {
1374
1721
 
1375
1722
  // get the list of textures with unique sources
1376
1723
  const uniqueTextures = reduceTexturesToUniqueSources( textures );
1724
+ const uniqueTextureLookup = {};
1725
+ for ( let i = 0, l = uniqueTextures.length; i < l; i ++ ) {
1726
+
1727
+ uniqueTextureLookup[ getTextureHash( uniqueTextures[ i ] ) ] = i;
1728
+
1729
+ }
1377
1730
 
1378
1731
  if ( image.width !== dimension ) {
1379
1732
 
@@ -1405,9 +1758,9 @@ class MaterialsTexture extends DataTexture {
1405
1758
  // sample 1
1406
1759
  // metalness & roughness
1407
1760
  floatArray[ index ++ ] = getField( m, 'metalness', 0.0 );
1408
- floatArray[ index ++ ] = uniqueTextures.indexOf( m.metalnessMap );
1761
+ floatArray[ index ++ ] = getTexture( m, 'metalnessMap' );
1409
1762
  floatArray[ index ++ ] = getField( m, 'roughness', 0.0 );
1410
- floatArray[ index ++ ] = uniqueTextures.indexOf( m.roughnessMap );
1763
+ floatArray[ index ++ ] = getTexture( m, 'roughnessMap' );
1411
1764
 
1412
1765
  // sample 2
1413
1766
  // transmission & emissiveIntensity
@@ -1585,7 +1938,7 @@ class MaterialsTexture extends DataTexture {
1585
1938
  // sample 14
1586
1939
  index ++; // matte
1587
1940
  index ++; // shadow
1588
- floatArray[ index ++ ] = Number( m.vertexColors ); // vertexColors
1941
+ floatArray[ index ++ ] = Number( m.vertexColors ) | ( Number( m.flatShading ) << 1 ); // vertexColors & flatShading
1589
1942
  floatArray[ index ++ ] = Number( m.transparent ); // transparent
1590
1943
 
1591
1944
  // map transform 15
@@ -1661,7 +2014,7 @@ class RenderTarget2DArray extends WebGLArrayRenderTarget {
1661
2014
 
1662
2015
  };
1663
2016
 
1664
- const fsQuad = new FullScreenQuad( new MeshBasicMaterial() );
2017
+ const fsQuad = new FullScreenQuad$1( new MeshBasicMaterial() );
1665
2018
  this.fsQuad = fsQuad;
1666
2019
 
1667
2020
  }
@@ -2024,6 +2377,8 @@ const LIGHT_PIXELS = 6;
2024
2377
  const RECT_AREA_LIGHT = 0;
2025
2378
  const CIRC_AREA_LIGHT = 1;
2026
2379
  const SPOT_LIGHT = 2;
2380
+ const DIR_LIGHT = 3;
2381
+ const POINT_LIGHT = 4;
2027
2382
  class LightsInfoUniformStruct {
2028
2383
 
2029
2384
  constructor() {
@@ -2082,8 +2437,24 @@ class LightsInfoUniformStruct {
2082
2437
 
2083
2438
  // type
2084
2439
  let type = RECT_AREA_LIGHT;
2085
- if ( l.isRectAreaLight && l.isCircular ) type = CIRC_AREA_LIGHT;
2086
- else if ( l.isSpotLight ) type = SPOT_LIGHT;
2440
+ if ( l.isRectAreaLight && l.isCircular ) {
2441
+
2442
+ type = CIRC_AREA_LIGHT;
2443
+
2444
+ } else if ( l.isSpotLight ) {
2445
+
2446
+ type = SPOT_LIGHT;
2447
+
2448
+ } else if ( l.isDirectionalLight ) {
2449
+
2450
+ type = DIR_LIGHT;
2451
+
2452
+ } else if ( l.isPointLight ) {
2453
+
2454
+ type = POINT_LIGHT;
2455
+
2456
+ }
2457
+
2087
2458
  floatArray[ baseIndex + ( index ++ ) ] = type;
2088
2459
 
2089
2460
  // sample 2
@@ -2170,6 +2541,33 @@ class LightsInfoUniformStruct {
2170
2541
  // iesProfile
2171
2542
  floatArray[ baseIndex + ( index ++ ) ] = iesTextures.indexOf( l.iesTexture );
2172
2543
 
2544
+ } else if ( l.isPointLight ) {
2545
+
2546
+ const worldPosition = l.getWorldPosition( u );
2547
+ floatArray[ baseIndex + ( index ++ ) ] = worldPosition.x;
2548
+ floatArray[ baseIndex + ( index ++ ) ] = worldPosition.y;
2549
+ floatArray[ baseIndex + ( index ++ ) ] = worldPosition.z;
2550
+ index ++;
2551
+
2552
+ // sample 4
2553
+ index += 4;
2554
+
2555
+ // sample 5
2556
+ index += 2;
2557
+
2558
+ floatArray[ baseIndex + ( index ++ ) ] = l.decay;
2559
+ floatArray[ baseIndex + ( index ++ ) ] = l.distance;
2560
+
2561
+ } else if ( l.isDirectionalLight ) {
2562
+
2563
+ const worldPosition = l.getWorldPosition( u );
2564
+ const targetPosition = l.target.getWorldPosition( v );
2565
+
2566
+ target.subVectors( worldPosition, targetPosition ).normalize();
2567
+ floatArray[ baseIndex + ( index ++ ) ] = target.x;
2568
+ floatArray[ baseIndex + ( index ++ ) ] = target.y;
2569
+ floatArray[ baseIndex + ( index ++ ) ] = target.z;
2570
+
2173
2571
  }
2174
2572
 
2175
2573
  }
@@ -2519,7 +2917,7 @@ class IESProfilesTexture extends WebGLArrayRenderTarget {
2519
2917
 
2520
2918
  };
2521
2919
 
2522
- const fsQuad = new FullScreenQuad( new MeshBasicMaterial() );
2920
+ const fsQuad = new FullScreenQuad$1( new MeshBasicMaterial() );
2523
2921
  this.fsQuad = fsQuad;
2524
2922
 
2525
2923
  this.iesLoader = new IESLoader();
@@ -2718,74 +3116,7 @@ const shaderUtils = /* glsl */`
2718
3116
 
2719
3117
  }
2720
3118
 
2721
- // https://www.shadertoy.com/view/wltcRS
2722
- uvec4 s0;
2723
-
2724
- void rng_initialize(vec2 p, int frame) {
2725
-
2726
- // white noise seed
2727
- s0 = uvec4( p, uint( frame ), uint( p.x ) + uint( p.y ) );
2728
-
2729
- }
2730
-
2731
- // https://www.pcg-random.org/
2732
- void pcg4d( inout uvec4 v ) {
2733
-
2734
- v = v * 1664525u + 1013904223u;
2735
- v.x += v.y * v.w;
2736
- v.y += v.z * v.x;
2737
- v.z += v.x * v.y;
2738
- v.w += v.y * v.z;
2739
- v = v ^ ( v >> 16u );
2740
- v.x += v.y*v.w;
2741
- v.y += v.z*v.x;
2742
- v.z += v.x*v.y;
2743
- v.w += v.y*v.z;
2744
-
2745
- }
2746
-
2747
- // returns [ 0, 1 ]
2748
- float rand() {
2749
-
2750
- pcg4d(s0);
2751
- return float( s0.x ) / float( 0xffffffffu );
2752
-
2753
- }
2754
-
2755
- vec2 rand2() {
2756
-
2757
- pcg4d( s0 );
2758
- return vec2( s0.xy ) / float(0xffffffffu);
2759
-
2760
- }
2761
-
2762
- vec3 rand3() {
2763
-
2764
- pcg4d(s0);
2765
- return vec3( s0.xyz ) / float( 0xffffffffu );
2766
-
2767
- }
2768
-
2769
- vec4 rand4() {
2770
-
2771
- pcg4d(s0);
2772
- return vec4(s0)/float(0xffffffffu);
2773
-
2774
- }
2775
-
2776
- // https://github.com/mrdoob/three.js/blob/dev/src/math/Vector3.js#L724
2777
- vec3 randDirection() {
2778
-
2779
- vec2 r = rand2();
2780
- float u = ( r.x - 0.5 ) * 2.0;
2781
- float t = r.y * PI * 2.0;
2782
- float f = sqrt( 1.0 - u * u );
2783
-
2784
- return vec3( f * cos( t ), f * sin( t ), u );
2785
-
2786
- }
2787
-
2788
- vec2 triangleSample( vec2 a, vec2 b, vec2 c ) {
3119
+ vec2 sampleTriangle( vec2 a, vec2 b, vec2 c, vec2 r ) {
2789
3120
 
2790
3121
  // get the edges of the triangle and the diagonal across the
2791
3122
  // center of the parallelogram
@@ -2793,8 +3124,7 @@ const shaderUtils = /* glsl */`
2793
3124
  vec2 e2 = c - b;
2794
3125
  vec2 diag = normalize( e1 + e2 );
2795
3126
 
2796
- // pick a random point in the parallelogram
2797
- vec2 r = rand2();
3127
+ // pick the point in the parallelogram
2798
3128
  if ( r.x + r.y > 1.0 ) {
2799
3129
 
2800
3130
  r = vec2( 1.0 ) - r;
@@ -2805,40 +3135,48 @@ const shaderUtils = /* glsl */`
2805
3135
 
2806
3136
  }
2807
3137
 
2808
- // samples an aperture shape with the given number of sides. 0 means circle
2809
- vec2 sampleAperture( int blades ) {
3138
+ vec2 sampleCircle( vec2 uv ) {
2810
3139
 
2811
- if ( blades == 0 ) {
3140
+ float angle = 2.0 * PI * uv.x;
3141
+ float radius = sqrt( uv.y );
3142
+ return vec2( cos( angle ), sin( angle ) ) * radius;
2812
3143
 
2813
- vec2 r = rand2();
2814
- float angle = 2.0 * PI * r.x;
2815
- float radius = sqrt( rand() );
2816
- return vec2( cos( angle ), sin( angle ) ) * radius;
3144
+ }
2817
3145
 
2818
- } else {
3146
+ vec3 sampleSphere( vec2 uv ) {
2819
3147
 
2820
- blades = max( blades, 3 );
3148
+ float u = ( uv.x - 0.5 ) * 2.0;
3149
+ float t = uv.y * PI * 2.0;
3150
+ float f = sqrt( 1.0 - u * u );
2821
3151
 
2822
- vec3 r = rand3();
2823
- float anglePerSegment = 2.0 * PI / float( blades );
2824
- float segment = floor( float( blades ) * r.x );
3152
+ return vec3( f * cos( t ), f * sin( t ), u );
2825
3153
 
2826
- float angle1 = anglePerSegment * segment;
2827
- float angle2 = angle1 + anglePerSegment;
2828
- vec2 a = vec2( sin( angle1 ), cos( angle1 ) );
2829
- vec2 b = vec2( 0.0, 0.0 );
2830
- vec2 c = vec2( sin( angle2 ), cos( angle2 ) );
3154
+ }
2831
3155
 
2832
- return triangleSample( a, b, c );
3156
+ vec2 sampleRegularNGon( int sides, vec3 uvw ) {
2833
3157
 
2834
- }
3158
+ sides = max( sides, 3 );
3159
+
3160
+ vec3 r = uvw;
3161
+ float anglePerSegment = 2.0 * PI / float( sides );
3162
+ float segment = floor( float( sides ) * r.x );
3163
+
3164
+ float angle1 = anglePerSegment * segment;
3165
+ float angle2 = angle1 + anglePerSegment;
3166
+ vec2 a = vec2( sin( angle1 ), cos( angle1 ) );
3167
+ vec2 b = vec2( 0.0, 0.0 );
3168
+ vec2 c = vec2( sin( angle2 ), cos( angle2 ) );
3169
+
3170
+ return sampleTriangle( a, b, c, r.yz );
2835
3171
 
2836
3172
  }
2837
3173
 
2838
- float colorToLuminance( vec3 color ) {
3174
+ // samples an aperture shape with the given number of sides. 0 means circle
3175
+ vec2 sampleAperture( int blades, vec3 uvw ) {
2839
3176
 
2840
- // https://en.wikipedia.org/wiki/Relative_luminance
2841
- return 0.2126 * color.r + 0.7152 * color.g + 0.0722 * color.b;
3177
+ return blades == 0 ?
3178
+ sampleCircle( uvw.xy ) :
3179
+ sampleRegularNGon( blades, uvw );
2842
3180
 
2843
3181
  }
2844
3182
 
@@ -2919,6 +3257,17 @@ const shaderUtils = /* glsl */`
2919
3257
 
2920
3258
  }
2921
3259
 
3260
+ vec2 rotateVector( vec2 v, float t ) {
3261
+
3262
+ float ac = cos( t );
3263
+ float as = sin( t );
3264
+ return vec2(
3265
+ v.x * ac - v.y * as,
3266
+ v.x * as + v.y * ac
3267
+ );
3268
+
3269
+ }
3270
+
2922
3271
  // Finds the point where the ray intersects the plane defined by u and v and checks if this point
2923
3272
  // falls in the bounds of the rectangle on that same plane.
2924
3273
  // Plane intersection: https://lousodrome.net/blog/light/2020/07/03/intersection-of-a-ray-and-a-plane/
@@ -2987,6 +3336,13 @@ const shaderUtils = /* glsl */`
2987
3336
 
2988
3337
  }
2989
3338
 
3339
+ // tentFilter from Peter Shirley's 'Realistic Ray Tracing (2nd Edition)' book, pg. 60
3340
+ // erichlof/THREE.js-PathTracing-Renderer/
3341
+ float tentFilter( float x ) {
3342
+
3343
+ return x < 0.5 ? sqrt( 2.0 * x ) - 1.0 : 1.0 - sqrt( 2.0 - ( 2.0 * x ) );
3344
+
3345
+ }
2990
3346
  `;
2991
3347
 
2992
3348
  class PMREMCopyMaterial extends MaterialBase {
@@ -3043,7 +3399,7 @@ class BlurredEnvMapGenerator {
3043
3399
 
3044
3400
  this.renderer = renderer;
3045
3401
  this.pmremGenerator = new PMREMGenerator( renderer );
3046
- this.copyQuad = new FullScreenQuad( new PMREMCopyMaterial() );
3402
+ this.copyQuad = new FullScreenQuad$1( new PMREMCopyMaterial() );
3047
3403
  this.renderTarget = new WebGLRenderTarget( 1, 1, { type: FloatType, format: RGBAFormat } );
3048
3404
 
3049
3405
  }
@@ -3566,6 +3922,7 @@ const shaderMaterialStructs = /* glsl */ `
3566
3922
  int sheenRoughnessMap;
3567
3923
 
3568
3924
  bool vertexColors;
3925
+ bool flatShading;
3569
3926
  bool transparent;
3570
3927
 
3571
3928
  mat3 mapTransform;
@@ -3678,7 +4035,8 @@ const shaderMaterialStructs = /* glsl */ `
3678
4035
 
3679
4036
  m.matte = bool( s14.r );
3680
4037
  m.castShadow = ! bool( s14.g );
3681
- m.vertexColors = bool( s14.b );
4038
+ m.vertexColors = bool( int( s14.b ) & 1 );
4039
+ m.flatShading = bool( int( s14.b ) & 2 );
3682
4040
  m.transparent = bool( s14.a );
3683
4041
 
3684
4042
  uint firstTextureTransformIdx = i + 15u;
@@ -3710,6 +4068,8 @@ const shaderLightStruct = /* glsl */ `
3710
4068
  #define RECT_AREA_LIGHT_TYPE 0
3711
4069
  #define CIRC_AREA_LIGHT_TYPE 1
3712
4070
  #define SPOT_LIGHT_TYPE 2
4071
+ #define DIR_LIGHT_TYPE 3
4072
+ #define POINT_LIGHT_TYPE 4
3713
4073
 
3714
4074
  struct LightsInfo {
3715
4075
 
@@ -3761,7 +4121,7 @@ const shaderLightStruct = /* glsl */ `
3761
4121
  l.v = s3.rgb;
3762
4122
  l.area = s3.a;
3763
4123
 
3764
- if ( l.type == SPOT_LIGHT_TYPE ) {
4124
+ if ( l.type == SPOT_LIGHT_TYPE || l.type == POINT_LIGHT_TYPE ) {
3765
4125
 
3766
4126
  vec4 s4 = texelFetch1D( tex, i + 4u );
3767
4127
  vec4 s5 = texelFetch1D( tex, i + 5u );
@@ -3814,14 +4174,14 @@ const shaderGGXFunctions = /* glsl */`
3814
4174
 
3815
4175
  // trowbridge-reitz === GGX === GTR
3816
4176
 
3817
- vec3 ggxDirection( vec3 incidentDir, float roughnessX, float roughnessY, float random1, float random2 ) {
4177
+ vec3 ggxDirection( vec3 incidentDir, vec2 roughness, vec2 uv ) {
3818
4178
 
3819
4179
  // TODO: try GGXVNDF implementation from reference [2], here. Needs to update ggxDistribution
3820
4180
  // function below, as well
3821
4181
 
3822
4182
  // Implementation from reference [1]
3823
4183
  // stretch view
3824
- vec3 V = normalize( vec3( roughnessX * incidentDir.x, roughnessY * incidentDir.y, incidentDir.z ) );
4184
+ vec3 V = normalize( vec3( roughness * incidentDir.xy, incidentDir.z ) );
3825
4185
 
3826
4186
  // orthonormal basis
3827
4187
  vec3 T1 = ( V.z < 0.9999 ) ? normalize( cross( V, vec3( 0.0, 0.0, 1.0 ) ) ) : vec3( 1.0, 0.0, 0.0 );
@@ -3829,16 +4189,16 @@ vec3 ggxDirection( vec3 incidentDir, float roughnessX, float roughnessY, float r
3829
4189
 
3830
4190
  // sample point with polar coordinates (r, phi)
3831
4191
  float a = 1.0 / ( 1.0 + V.z );
3832
- float r = sqrt( random1 );
3833
- float phi = ( random2 < a ) ? random2 / a * PI : PI + ( random2 - a ) / ( 1.0 - a ) * PI;
4192
+ float r = sqrt( uv.x );
4193
+ float phi = ( uv.y < a ) ? uv.y / a * PI : PI + ( uv.y - a ) / ( 1.0 - a ) * PI;
3834
4194
  float P1 = r * cos( phi );
3835
- float P2 = r * sin( phi ) * ( ( random2 < a ) ? 1.0 : V.z );
4195
+ float P2 = r * sin( phi ) * ( ( uv.y < a ) ? 1.0 : V.z );
3836
4196
 
3837
4197
  // compute normal
3838
4198
  vec3 N = P1 * T1 + P2 * T2 + V * sqrt( max( 0.0, 1.0 - P1 * P1 - P2 * P2 ) );
3839
4199
 
3840
4200
  // unstretch
3841
- N = normalize( vec3( roughnessX * N.x, roughnessY * N.y, max( 0.0, N.z ) ) );
4201
+ N = normalize( vec3( roughness * N.xy, max( 0.0, N.z ) ) );
3842
4202
 
3843
4203
  return N;
3844
4204
 
@@ -4226,7 +4586,7 @@ float diffuseEval( vec3 wo, vec3 wi, vec3 wh, SurfaceRec surf, out vec3 color )
4226
4586
 
4227
4587
  vec3 diffuseDirection( vec3 wo, SurfaceRec surf ) {
4228
4588
 
4229
- vec3 lightDirection = randDirection();
4589
+ vec3 lightDirection = sampleSphere( sobol2( 11 ) );
4230
4590
  lightDirection.z += 1.0;
4231
4591
  lightDirection = normalize( lightDirection );
4232
4592
 
@@ -4280,10 +4640,8 @@ vec3 specularDirection( vec3 wo, SurfaceRec surf ) {
4280
4640
  float filteredRoughness = surf.filteredRoughness;
4281
4641
  vec3 halfVector = ggxDirection(
4282
4642
  wo,
4283
- filteredRoughness,
4284
- filteredRoughness,
4285
- rand(),
4286
- rand()
4643
+ vec2( filteredRoughness ),
4644
+ sobol2( 12 )
4287
4645
  );
4288
4646
 
4289
4647
  // apply to new ray by reflecting off the new normal
@@ -4320,10 +4678,8 @@ vec3 transmissionDirection( vec3 wo, SurfaceRec surf ) {
4320
4678
  // sample ggx vndf distribution which gives a new normal
4321
4679
  vec3 halfVector = ggxDirection(
4322
4680
  wo,
4323
- filteredRoughness,
4324
- filteredRoughness,
4325
- rand(),
4326
- rand()
4681
+ vec2( filteredRoughness ),
4682
+ sobol2( 13 )
4327
4683
  );
4328
4684
 
4329
4685
 
@@ -4363,7 +4719,7 @@ vec3 transmissionDirection( vec3 wo, SurfaceRec surf ) {
4363
4719
 
4364
4720
  float roughness = surf.roughness;
4365
4721
  float eta = surf.eta;
4366
- vec3 halfVector = normalize( vec3( 0.0, 0.0, 1.0 ) + randDirection() * roughness );
4722
+ vec3 halfVector = normalize( vec3( 0.0, 0.0, 1.0 ) + sampleSphere( sobol2( 13 ) ) * roughness );
4367
4723
  vec3 lightDirection = refract( normalize( - wo ), halfVector, eta );
4368
4724
 
4369
4725
  if ( surf.thinFilm ) {
@@ -4411,10 +4767,8 @@ vec3 clearcoatDirection( vec3 wo, SurfaceRec surf ) {
4411
4767
  float filteredClearcoatRoughness = surf.filteredClearcoatRoughness;
4412
4768
  vec3 halfVector = ggxDirection(
4413
4769
  wo,
4414
- filteredClearcoatRoughness,
4415
- filteredClearcoatRoughness,
4416
- rand(),
4417
- rand()
4770
+ vec2( filteredClearcoatRoughness ),
4771
+ sobol2( 14 )
4418
4772
  );
4419
4773
 
4420
4774
  // apply to new ray by reflecting off the new normal
@@ -4603,7 +4957,7 @@ SampleRec bsdfSample( vec3 wo, vec3 clearcoatWo, mat3 normalBasis, mat3 invBasis
4603
4957
  vec3 wi;
4604
4958
  vec3 clearcoatWi;
4605
4959
 
4606
- float r = rand();
4960
+ float r = sobol( 15 );
4607
4961
  if ( r <= cdf[0] ) { // diffuse
4608
4962
 
4609
4963
  wi = diffuseDirection( wo, surf );
@@ -4659,13 +5013,13 @@ float envMapDirectionPdf( vec3 direction ) {
4659
5013
 
4660
5014
  }
4661
5015
 
4662
- float envMapSample( vec3 direction, EquirectHdrInfo info, out vec3 color ) {
5016
+ float sampleEnvMap( EquirectHdrInfo info, vec3 direction, out vec3 color ) {
4663
5017
 
4664
5018
  vec2 uv = equirectDirectionToUv( direction );
4665
5019
  color = texture2D( info.map, uv ).rgb;
4666
5020
 
4667
5021
  float totalSum = info.totalSumWhole + info.totalSumDecimal;
4668
- float lum = colorToLuminance( color );
5022
+ float lum = luminance( color );
4669
5023
  ivec2 resolution = textureSize( info.map, 0 );
4670
5024
  float pdf = lum / totalSum;
4671
5025
 
@@ -4673,10 +5027,9 @@ float envMapSample( vec3 direction, EquirectHdrInfo info, out vec3 color ) {
4673
5027
 
4674
5028
  }
4675
5029
 
4676
- float randomEnvMapSample( EquirectHdrInfo info, out vec3 color, out vec3 direction ) {
5030
+ float sampleEnvMapProbability( EquirectHdrInfo info, vec2 r, out vec3 color, out vec3 direction ) {
4677
5031
 
4678
5032
  // sample env map cdf
4679
- vec2 r = rand2();
4680
5033
  float v = texture2D( info.marginalWeights, vec2( r.x, 0.0 ) ).x;
4681
5034
  float u = texture2D( info.conditionalWeights, vec2( r.y, v ) ).x;
4682
5035
  vec2 uv = vec2( u, v );
@@ -4686,7 +5039,7 @@ float randomEnvMapSample( EquirectHdrInfo info, out vec3 color, out vec3 directi
4686
5039
  color = texture2D( info.map, uv ).rgb;
4687
5040
 
4688
5041
  float totalSum = info.totalSumWhole + info.totalSumDecimal;
4689
- float lum = colorToLuminance( color );
5042
+ float lum = luminance( color );
4690
5043
  ivec2 resolution = textureSize( info.map, 0 );
4691
5044
  float pdf = lum / totalSum;
4692
5045
 
@@ -4765,6 +5118,7 @@ LightSampleRec lightsClosestHit( sampler2D lights, uint lightCount, vec3 rayOrig
4765
5118
 
4766
5119
  float dist;
4767
5120
 
5121
+ // MIS / light intersection is not supported for punctual lights.
4768
5122
  if(
4769
5123
  ( light.type == RECT_AREA_LIGHT_TYPE && intersectsRectangle( light.position, normal, u, v, rayOrigin, rayDirection, dist ) ) ||
4770
5124
  ( light.type == CIRC_AREA_LIGHT_TYPE && intersectsCircle( light.position, normal, u, v, rayOrigin, rayDirection, dist ) )
@@ -4783,38 +5137,6 @@ LightSampleRec lightsClosestHit( sampler2D lights, uint lightCount, vec3 rayOrig
4783
5137
 
4784
5138
  }
4785
5139
 
4786
- } else if ( light.type == SPOT_LIGHT_TYPE ) {
4787
-
4788
- // TODO: forward path tracing sampling needs to be made consistent with direct light sampling logic
4789
- // float radius = light.radius;
4790
- // vec3 lightNormal = normalize( cross( light.u, light.v ) );
4791
- // float angle = acos( light.coneCos );
4792
- // float angleTan = tan( angle );
4793
- // float startDistance = radius / max( angleTan, EPSILON );
4794
-
4795
- // u = light.u / radius;
4796
- // v = light.v / radius;
4797
-
4798
- // if (
4799
- // intersectsCircle( light.position - normal * startDistance, normal, u, v, rayOrigin, rayDirection, dist ) &&
4800
- // ( dist < lightSampleRec.dist || ! lightSampleRec.hit )
4801
- // ) {
4802
-
4803
- // float cosTheta = dot( rayDirection, normal );
4804
- // float spotAttenuation = light.iesProfile != - 1 ?
4805
- // getPhotometricAttenuation( iesProfiles, light.iesProfile, rayDirection, normal, u, v )
4806
- // : getSpotAttenuation( light.coneCos, light.penumbraCos, cosTheta );
4807
-
4808
- // float distanceAttenuation = getDistanceAttenuation( dist, light.distance, light.decay );
4809
-
4810
- // lightSampleRec.hit = true;
4811
- // lightSampleRec.dist = dist;
4812
- // lightSampleRec.direction = rayDirection;
4813
- // lightSampleRec.emission = light.color * light.intensity * distanceAttenuation * spotAttenuation;
4814
- // lightSampleRec.pdf = ( dist * dist ) / ( light.area * cosTheta );
4815
-
4816
- // }
4817
-
4818
5140
  }
4819
5141
 
4820
5142
  }
@@ -4823,7 +5145,7 @@ LightSampleRec lightsClosestHit( sampler2D lights, uint lightCount, vec3 rayOrig
4823
5145
 
4824
5146
  }
4825
5147
 
4826
- LightSampleRec randomAreaLightSample( Light light, vec3 rayOrigin ) {
5148
+ LightSampleRec randomAreaLightSample( Light light, vec3 rayOrigin, vec2 ruv ) {
4827
5149
 
4828
5150
  LightSampleRec lightSampleRec;
4829
5151
  lightSampleRec.hit = true;
@@ -4835,13 +5157,13 @@ LightSampleRec randomAreaLightSample( Light light, vec3 rayOrigin ) {
4835
5157
  if( light.type == RECT_AREA_LIGHT_TYPE ) {
4836
5158
 
4837
5159
  // rectangular area light
4838
- randomPos = light.position + light.u * ( rand() - 0.5 ) + light.v * ( rand() - 0.5 );
5160
+ randomPos = light.position + light.u * ( ruv.x - 0.5 ) + light.v * ( ruv.y - 0.5 );
4839
5161
 
4840
- } else if( light.type == 1 ) {
5162
+ } else if( light.type == CIRC_AREA_LIGHT_TYPE ) {
4841
5163
 
4842
5164
  // circular area light
4843
- float r = 0.5 * sqrt( rand() );
4844
- float theta = rand() * 2.0 * PI;
5165
+ float r = 0.5 * sqrt( ruv.x );
5166
+ float theta = ruv.y * 2.0 * PI;
4845
5167
  float x = r * cos( theta );
4846
5168
  float y = r * sin( theta );
4847
5169
 
@@ -4863,10 +5185,10 @@ LightSampleRec randomAreaLightSample( Light light, vec3 rayOrigin ) {
4863
5185
 
4864
5186
  }
4865
5187
 
4866
- LightSampleRec randomSpotLightSample( Light light, sampler2DArray iesProfiles, vec3 rayOrigin ) {
5188
+ LightSampleRec randomSpotLightSample( Light light, sampler2DArray iesProfiles, vec3 rayOrigin, vec2 ruv ) {
4867
5189
 
4868
- float radius = light.radius * sqrt( rand() );
4869
- float theta = rand() * 2.0 * PI;
5190
+ float radius = light.radius * sqrt( ruv.x );
5191
+ float theta = ruv.y * 2.0 * PI;
4870
5192
  float x = radius * cos( theta );
4871
5193
  float y = radius * sin( theta );
4872
5194
 
@@ -4887,8 +5209,8 @@ LightSampleRec randomSpotLightSample( Light light, sampler2DArray iesProfiles, v
4887
5209
  float cosTheta = dot( direction, normal );
4888
5210
 
4889
5211
  float spotAttenuation = light.iesProfile != - 1 ?
4890
- getPhotometricAttenuation( iesProfiles, light.iesProfile, direction, normal, u, v )
4891
- : getSpotAttenuation( light.coneCos, light.penumbraCos, cosTheta );
5212
+ getPhotometricAttenuation( iesProfiles, light.iesProfile, direction, normal, u, v ) :
5213
+ getSpotAttenuation( light.coneCos, light.penumbraCos, cosTheta );
4892
5214
 
4893
5215
  float distanceAttenuation = getDistanceAttenuation( dist, light.distance, light.decay );
4894
5216
  LightSampleRec lightSampleRec;
@@ -4897,30 +5219,59 @@ LightSampleRec randomSpotLightSample( Light light, sampler2DArray iesProfiles, v
4897
5219
  lightSampleRec.dist = dist;
4898
5220
  lightSampleRec.direction = direction;
4899
5221
  lightSampleRec.emission = light.color * light.intensity * distanceAttenuation * spotAttenuation;
4900
-
4901
- // TODO: this makes the result consistent between MIS and non MIS paths but at radius 0 the pdf is infinite
4902
- // and the intensity of the light is not correct
4903
5222
  lightSampleRec.pdf = 1.0;
4904
- // lightSampleRec.pdf = lightDistSq / ( light.area * cosTheta );
4905
5223
 
4906
5224
  return lightSampleRec;
4907
5225
 
4908
5226
  }
4909
5227
 
4910
- LightSampleRec randomLightSample( sampler2D lights, sampler2DArray iesProfiles, uint lightCount, vec3 rayOrigin ) {
5228
+ LightSampleRec randomLightSample( sampler2D lights, sampler2DArray iesProfiles, uint lightCount, vec3 rayOrigin, vec3 ruv ) {
4911
5229
 
4912
5230
  // pick a random light
4913
- uint l = uint( rand() * float( lightCount ) );
5231
+ uint l = uint( ruv.x * float( lightCount ) );
4914
5232
  Light light = readLightInfo( lights, l );
4915
5233
 
4916
5234
  if ( light.type == SPOT_LIGHT_TYPE ) {
4917
5235
 
4918
- return randomSpotLightSample( light, iesProfiles, rayOrigin );
5236
+ return randomSpotLightSample( light, iesProfiles, rayOrigin, ruv.yz );
5237
+
5238
+ } else if ( light.type == POINT_LIGHT_TYPE ) {
5239
+
5240
+ vec3 lightRay = light.u - rayOrigin;
5241
+ float lightDist = length( lightRay );
5242
+ float cutoffDistance = light.distance;
5243
+ float distanceFalloff = 1.0 / max( pow( lightDist, light.decay ), 0.01 );
5244
+ if ( cutoffDistance > 0.0 ) {
5245
+
5246
+ distanceFalloff *= pow2( saturate( 1.0 - pow4( lightDist / cutoffDistance ) ) );
5247
+
5248
+ }
5249
+
5250
+ LightSampleRec rec;
5251
+ rec.hit = true;
5252
+ rec.direction = normalize( lightRay );
5253
+ rec.dist = length( lightRay );
5254
+ rec.pdf = 1.0;
5255
+ rec.emission = light.color * light.intensity * distanceFalloff;
5256
+ rec.type = light.type;
5257
+ return rec;
5258
+
5259
+ } else if ( light.type == DIR_LIGHT_TYPE ) {
5260
+
5261
+ LightSampleRec rec;
5262
+ rec.hit = true;
5263
+ rec.dist = 1e10;
5264
+ rec.direction = light.u;
5265
+ rec.pdf = 1.0;
5266
+ rec.emission = light.color * light.intensity;
5267
+ rec.type = light.type;
5268
+
5269
+ return rec;
4919
5270
 
4920
5271
  } else {
4921
5272
 
4922
5273
  // sample the light
4923
- return randomAreaLightSample( light, rayOrigin );
5274
+ return randomAreaLightSample( light, rayOrigin, ruv.yz );
4924
5275
 
4925
5276
  }
4926
5277
 
@@ -4953,6 +5304,64 @@ const shaderLayerTexelFetchFunctions = /*glsl */`
4953
5304
 
4954
5305
  `;
4955
5306
 
5307
+ const shaderRandFunctions = /* glsl */`
5308
+
5309
+ // https://www.shadertoy.com/view/wltcRS
5310
+ uvec4 WHITE_NOISE_SEED;
5311
+
5312
+ void rng_initialize( vec2 p, int frame ) {
5313
+
5314
+ // white noise seed
5315
+ WHITE_NOISE_SEED = uvec4( p, uint( frame ), uint( p.x ) + uint( p.y ) );
5316
+
5317
+ }
5318
+
5319
+ // https://www.pcg-random.org/
5320
+ void pcg4d( inout uvec4 v ) {
5321
+
5322
+ v = v * 1664525u + 1013904223u;
5323
+ v.x += v.y * v.w;
5324
+ v.y += v.z * v.x;
5325
+ v.z += v.x * v.y;
5326
+ v.w += v.y * v.z;
5327
+ v = v ^ ( v >> 16u );
5328
+ v.x += v.y*v.w;
5329
+ v.y += v.z*v.x;
5330
+ v.z += v.x*v.y;
5331
+ v.w += v.y*v.z;
5332
+
5333
+ }
5334
+
5335
+ // returns [ 0, 1 ]
5336
+ float rand() {
5337
+
5338
+ pcg4d( WHITE_NOISE_SEED );
5339
+ return float( WHITE_NOISE_SEED.x ) / float( 0xffffffffu );
5340
+
5341
+ }
5342
+
5343
+ vec2 rand2() {
5344
+
5345
+ pcg4d( WHITE_NOISE_SEED );
5346
+ return vec2( WHITE_NOISE_SEED.xy ) / float(0xffffffffu);
5347
+
5348
+ }
5349
+
5350
+ vec3 rand3() {
5351
+
5352
+ pcg4d( WHITE_NOISE_SEED );
5353
+ return vec3( WHITE_NOISE_SEED.xyz ) / float( 0xffffffffu );
5354
+
5355
+ }
5356
+
5357
+ vec4 rand4() {
5358
+
5359
+ pcg4d( WHITE_NOISE_SEED );
5360
+ return vec4( WHITE_NOISE_SEED ) / float( 0xffffffffu );
5361
+
5362
+ }
5363
+ `;
5364
+
4956
5365
  function copyArrayToArray( fromArray, fromStride, toArray, toStride, offset ) {
4957
5366
 
4958
5367
  if ( fromStride > toStride ) {
@@ -5212,6 +5621,7 @@ class PhysicalPathTracingMaterial extends MaterialBase {
5212
5621
  filterGlossyFactor: { value: 0.0 },
5213
5622
 
5214
5623
  backgroundAlpha: { value: 1.0 },
5624
+ sobolTexture: { value: null },
5215
5625
  },
5216
5626
 
5217
5627
  vertexShader: /* glsl */`
@@ -5238,6 +5648,9 @@ class PhysicalPathTracingMaterial extends MaterialBase {
5238
5648
  vec4 envMapTexelToLinear( vec4 a ) { return a; }
5239
5649
  #include <common>
5240
5650
 
5651
+ ${ shaderRandFunctions }
5652
+ ${ shaderSobolCommon }
5653
+ ${ shaderSobolSampling }
5241
5654
  ${ shaderStructs }
5242
5655
  ${ shaderIntersectFunction }
5243
5656
  ${ shaderMaterialStructs }
@@ -5298,9 +5711,9 @@ class PhysicalPathTracingMaterial extends MaterialBase {
5298
5711
 
5299
5712
  }
5300
5713
 
5301
- vec3 sampleBackground( vec3 direction ) {
5714
+ vec3 sampleBackground( vec3 direction, vec2 uv ) {
5302
5715
 
5303
- vec3 sampleDir = normalize( direction + getHemisphereSample( direction, rand2() ) * 0.5 * backgroundBlur );
5716
+ vec3 sampleDir = normalize( direction + getHemisphereSample( direction, uv ) * 0.5 * backgroundBlur );
5304
5717
 
5305
5718
  #if FEATURE_BACKGROUND_MAP
5306
5719
 
@@ -5326,6 +5739,8 @@ class PhysicalPathTracingMaterial extends MaterialBase {
5326
5739
 
5327
5740
  color = vec3( 1.0 );
5328
5741
 
5742
+ // TODO: we should be using sobol sampling here instead of rand but the sobol bounce and path indices need to be incremented
5743
+ // and then reset.
5329
5744
  for ( int i = 0; i < traversals; i ++ ) {
5330
5745
 
5331
5746
  if ( bvhIntersectFirstHit( bvh, rayOrigin, rayDirection, faceIndices, faceNormal, barycoord, side, dist ) ) {
@@ -5452,14 +5867,6 @@ class PhysicalPathTracingMaterial extends MaterialBase {
5452
5867
 
5453
5868
  }
5454
5869
 
5455
- // tentFilter from Peter Shirley's 'Realistic Ray Tracing (2nd Edition)' book, pg. 60
5456
- // erichlof/THREE.js-PathTracing-Renderer/
5457
- float tentFilter( float x ) {
5458
-
5459
- return x < 0.5 ? sqrt( 2.0 * x ) - 1.0 : 1.0 - sqrt( 2.0 - ( 2.0 * x ) );
5460
-
5461
- }
5462
-
5463
5870
  vec3 ndcToRayOrigin( vec2 coord ) {
5464
5871
 
5465
5872
  vec4 rayOrigin4 = cameraWorldMatrix * invProjectionMatrix * vec4( coord, - 1.0, 1.0 );
@@ -5471,13 +5878,13 @@ class PhysicalPathTracingMaterial extends MaterialBase {
5471
5878
  vec2 ssd = vec2( 1.0 ) / resolution;
5472
5879
 
5473
5880
  // Jitter the camera ray by finding a uv coordinate at a random sample
5474
- // around this pixel's UV coordinate
5475
- vec2 jitteredUv = vUv + vec2( tentFilter( rand() ) * ssd.x, tentFilter( rand() ) * ssd.y );
5881
+ // around this pixel's UV coordinate for AA
5882
+ vec2 ruv = sobol2( 0 );
5883
+ vec2 jitteredUv = vUv + vec2( tentFilter( ruv.x ) * ssd.x, tentFilter( ruv.y ) * ssd.y );
5476
5884
 
5477
5885
  #if CAMERA_TYPE == 2
5478
5886
 
5479
5887
  // Equirectangular projection
5480
-
5481
5888
  vec4 rayDirection4 = vec4( equirectUvToDirection( jitteredUv ), 0.0 );
5482
5889
  vec4 rayOrigin4 = vec4( 0.0, 0.0, 0.0, 1.0 );
5483
5890
 
@@ -5491,20 +5898,17 @@ class PhysicalPathTracingMaterial extends MaterialBase {
5491
5898
 
5492
5899
  // get [- 1, 1] normalized device coordinates
5493
5900
  vec2 ndc = 2.0 * jitteredUv - vec2( 1.0 );
5494
-
5495
5901
  rayOrigin = ndcToRayOrigin( ndc );
5496
5902
 
5497
5903
  #if CAMERA_TYPE == 1
5498
5904
 
5499
5905
  // Orthographic projection
5500
-
5501
5906
  rayDirection = ( cameraWorldMatrix * vec4( 0.0, 0.0, - 1.0, 0.0 ) ).xyz;
5502
5907
  rayDirection = normalize( rayDirection );
5503
5908
 
5504
5909
  #else
5505
5910
 
5506
5911
  // Perspective projection
5507
-
5508
5912
  rayDirection = normalize( mat3(cameraWorldMatrix) * ( invProjectionMatrix * vec4( ndc, 0.0, 1.0 ) ).xyz );
5509
5913
 
5510
5914
  #endif
@@ -5518,17 +5922,17 @@ class PhysicalPathTracingMaterial extends MaterialBase {
5518
5922
  vec3 focalPoint = rayOrigin + normalize( rayDirection ) * physicalCamera.focusDistance;
5519
5923
 
5520
5924
  // get the aperture sample
5521
- vec2 apertureSample = sampleAperture( physicalCamera.apertureBlades ) * physicalCamera.bokehSize * 0.5 * 1e-3;
5925
+ // if blades === 0 then we assume a circle
5926
+ vec3 shapeUVW= sobol3( 1 );
5927
+ int blades = physicalCamera.apertureBlades;
5928
+ float anamorphicRatio = physicalCamera.anamorphicRatio;
5929
+ vec2 apertureSample = blades == 0 ? sampleCircle( shapeUVW.xy ) : sampleRegularNGon( blades, shapeUVW );
5930
+ apertureSample *= physicalCamera.bokehSize * 0.5 * 1e-3;
5522
5931
 
5523
5932
  // rotate the aperture shape
5524
- float ac = cos( physicalCamera.apertureRotation );
5525
- float as = sin( physicalCamera.apertureRotation );
5526
- apertureSample = vec2(
5527
- apertureSample.x * ac - apertureSample.y * as,
5528
- apertureSample.x * as + apertureSample.y * ac
5529
- );
5530
- apertureSample.x *= saturate( physicalCamera.anamorphicRatio );
5531
- apertureSample.y *= saturate( 1.0 / physicalCamera.anamorphicRatio );
5933
+ apertureSample =
5934
+ rotateVector( apertureSample, physicalCamera.apertureRotation ) *
5935
+ saturate( vec2( anamorphicRatio, 1.0 / anamorphicRatio ) );
5532
5936
 
5533
5937
  // create the new ray
5534
5938
  rayOrigin += ( cameraWorldMatrix * vec4( apertureSample, 0.0, 0.0 ) ).xyz;
@@ -5544,6 +5948,8 @@ class PhysicalPathTracingMaterial extends MaterialBase {
5544
5948
  void main() {
5545
5949
 
5546
5950
  rng_initialize( gl_FragCoord.xy, seed );
5951
+ sobolPixelIndex = ( uint( gl_FragCoord.x ) << 16 ) | ( uint( gl_FragCoord.y ) );
5952
+ sobolPathIndex = uint( seed );
5547
5953
 
5548
5954
  vec3 rayDirection;
5549
5955
  vec3 rayOrigin;
@@ -5577,6 +5983,8 @@ class PhysicalPathTracingMaterial extends MaterialBase {
5577
5983
 
5578
5984
  for ( i = 0; i < bounces; i ++ ) {
5579
5985
 
5986
+ sobolBounceIndex ++;
5987
+
5580
5988
  bool hit = bvhIntersectFirstHit( bvh, rayOrigin, rayDirection, faceIndices, faceNormal, barycoord, side, dist );
5581
5989
 
5582
5990
  LightSampleRec lightHit = lightsClosestHit( lights.tex, lights.count, rayOrigin, rayDirection );
@@ -5591,9 +5999,8 @@ class PhysicalPathTracingMaterial extends MaterialBase {
5591
5999
 
5592
6000
  #if FEATURE_MIS
5593
6001
 
5594
- // NOTE: we skip MIS for spotlights since we haven't fixed the forward
5595
- // path tracing code path, yet
5596
- if ( lightHit.type == SPOT_LIGHT_TYPE ) {
6002
+ // NOTE: we skip MIS for punctual lights since they are not supported in forward PT case
6003
+ if ( lightHit.type == SPOT_LIGHT_TYPE || lightHit.type == DIR_LIGHT_TYPE || lightHit.type == POINT_LIGHT_TYPE ) {
5597
6004
 
5598
6005
  gl_FragColor.rgb += lightHit.emission * throughputColor;
5599
6006
 
@@ -5620,7 +6027,7 @@ class PhysicalPathTracingMaterial extends MaterialBase {
5620
6027
 
5621
6028
  if ( i == 0 || transmissiveRay ) {
5622
6029
 
5623
- gl_FragColor.rgb += sampleBackground( envRotation3x3 * rayDirection ) * throughputColor;
6030
+ gl_FragColor.rgb += sampleBackground( envRotation3x3 * rayDirection, sobol2( 2 ) ) * throughputColor;
5624
6031
  gl_FragColor.a = backgroundAlpha;
5625
6032
 
5626
6033
  } else {
@@ -5629,7 +6036,7 @@ class PhysicalPathTracingMaterial extends MaterialBase {
5629
6036
 
5630
6037
  // get the PDF of the hit envmap point
5631
6038
  vec3 envColor;
5632
- float envPdf = envMapSample( envRotation3x3 * rayDirection, envMapInfo, envColor );
6039
+ float envPdf = sampleEnvMap( envMapInfo, envRotation3x3 * rayDirection, envColor );
5633
6040
  envPdf /= float( lights.count + 1u );
5634
6041
 
5635
6042
  // and weight the contribution
@@ -5714,7 +6121,7 @@ class PhysicalPathTracingMaterial extends MaterialBase {
5714
6121
  || useAlphaTest && albedo.a < alphaTest
5715
6122
 
5716
6123
  // opacity
5717
- || material.transparent && ! useAlphaTest && albedo.a < rand()
6124
+ || material.transparent && ! useAlphaTest && albedo.a < sobol( 3 )
5718
6125
  ) {
5719
6126
 
5720
6127
  vec3 point = rayOrigin + rayDirection * dist;
@@ -5775,6 +6182,15 @@ class PhysicalPathTracingMaterial extends MaterialBase {
5775
6182
  }
5776
6183
 
5777
6184
  // normal
6185
+ if ( material.flatShading ) {
6186
+
6187
+ // if we're rendering a flat shaded object then use the face normals - the face normal
6188
+ // is provided based on the side the ray hits the mesh so flip it to align with the
6189
+ // interpolated vertex normals.
6190
+ normal = faceNormal * side;
6191
+
6192
+ }
6193
+
5778
6194
  vec3 baseNormal = normal;
5779
6195
  if ( material.normalMap != - 1 ) {
5780
6196
 
@@ -5957,7 +6373,7 @@ class PhysicalPathTracingMaterial extends MaterialBase {
5957
6373
  vec3 clearcoatOutgoing = - normalize( clearcoatInvBasis * rayDirection );
5958
6374
  sampleRec = bsdfSample( outgoing, clearcoatOutgoing, normalBasis, invBasis, clearcoatNormalBasis, clearcoatInvBasis, surfaceRec );
5959
6375
 
5960
- isShadowRay = sampleRec.specularPdf < rand();
6376
+ isShadowRay = sampleRec.specularPdf < sobol( 4 );
5961
6377
 
5962
6378
  // adjust the hit point by the surface normal by a factor of some offset and the
5963
6379
  // maximum component-wise value of the current point to accommodate floating point
@@ -5974,10 +6390,10 @@ class PhysicalPathTracingMaterial extends MaterialBase {
5974
6390
  #if FEATURE_MIS
5975
6391
 
5976
6392
  // uniformly pick a light or environment map
5977
- if( rand() > 1.0 / float( lights.count + 1u ) ) {
6393
+ if( sobol( 5 ) > 1.0 / float( lights.count + 1u ) ) {
5978
6394
 
5979
6395
  // sample a light or environment
5980
- LightSampleRec lightSampleRec = randomLightSample( lights.tex, iesProfiles, lights.count, rayOrigin );
6396
+ LightSampleRec lightSampleRec = randomLightSample( lights.tex, iesProfiles, lights.count, rayOrigin, sobol3( 6 ) );
5981
6397
 
5982
6398
  bool isSampleBelowSurface = dot( faceNormal, lightSampleRec.direction ) < 0.0;
5983
6399
  if ( isSampleBelowSurface ) {
@@ -6001,7 +6417,7 @@ class PhysicalPathTracingMaterial extends MaterialBase {
6001
6417
 
6002
6418
  // weight the direct light contribution
6003
6419
  float lightPdf = lightSampleRec.pdf / float( lights.count + 1u );
6004
- float misWeight = misHeuristic( lightPdf, lightMaterialPdf );
6420
+ float misWeight = lightSampleRec.type == SPOT_LIGHT_TYPE || lightSampleRec.type == DIR_LIGHT_TYPE || lightSampleRec.type == POINT_LIGHT_TYPE ? 1.0 : misHeuristic( lightPdf, lightMaterialPdf );
6005
6421
  gl_FragColor.rgb += lightSampleRec.emission * throughputColor * sampleColor * misWeight / lightPdf;
6006
6422
 
6007
6423
  }
@@ -6012,7 +6428,7 @@ class PhysicalPathTracingMaterial extends MaterialBase {
6012
6428
 
6013
6429
  // find a sample in the environment map to include in the contribution
6014
6430
  vec3 envColor, envDirection;
6015
- float envPdf = randomEnvMapSample( envMapInfo, envColor, envDirection );
6431
+ float envPdf = sampleEnvMapProbability( envMapInfo, sobol2( 7 ), envColor, envDirection );
6016
6432
  envDirection = invEnvRotation3x3 * envDirection;
6017
6433
 
6018
6434
  // this env sampling is not set up for transmissive sampling and yields overly bright