three-gpu-pathtracer 0.0.11 → 0.0.12

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (53) hide show
  1. package/LICENSE +21 -21
  2. package/README.md +886 -886
  3. package/build/index.module.js +6478 -6470
  4. package/build/index.module.js.map +1 -1
  5. package/build/index.umd.cjs +6473 -6465
  6. package/build/index.umd.cjs.map +1 -1
  7. package/package.json +72 -69
  8. package/src/core/DynamicPathTracingSceneGenerator.js +119 -119
  9. package/src/core/MaterialReducer.js +256 -256
  10. package/src/core/PathTracingRenderer.js +275 -275
  11. package/src/core/PathTracingSceneGenerator.js +69 -69
  12. package/src/index.js +39 -39
  13. package/src/materials/AlphaDisplayMaterial.js +48 -48
  14. package/src/materials/AmbientOcclusionMaterial.js +199 -199
  15. package/src/materials/BlendMaterial.js +67 -67
  16. package/src/materials/DenoiseMaterial.js +142 -142
  17. package/src/materials/GraphMaterial.js +243 -243
  18. package/src/materials/LambertPathTracingMaterial.js +285 -285
  19. package/src/materials/MaterialBase.js +56 -56
  20. package/src/materials/PhysicalPathTracingMaterial.js +982 -982
  21. package/src/objects/EquirectCamera.js +13 -13
  22. package/src/objects/PhysicalCamera.js +28 -28
  23. package/src/objects/PhysicalSpotLight.js +14 -14
  24. package/src/objects/ShapedAreaLight.js +12 -12
  25. package/src/shader/shaderBvhAnyHit.js +76 -0
  26. package/src/shader/shaderEnvMapSampling.js +58 -58
  27. package/src/shader/shaderGGXFunctions.js +100 -100
  28. package/src/shader/shaderIridescenceFunctions.js +130 -130
  29. package/src/shader/shaderLayerTexelFetchFunctions.js +25 -25
  30. package/src/shader/shaderLightSampling.js +229 -229
  31. package/src/shader/shaderMaterialSampling.js +506 -498
  32. package/src/shader/shaderRandFunctions.js +57 -57
  33. package/src/shader/shaderSheenFunctions.js +98 -98
  34. package/src/shader/shaderSobolSampling.js +256 -256
  35. package/src/shader/shaderStructs.js +325 -325
  36. package/src/shader/shaderUtils.js +361 -361
  37. package/src/textures/GradientEquirectTexture.js +35 -35
  38. package/src/textures/ProceduralEquirectTexture.js +75 -75
  39. package/src/uniforms/AttributesTextureArray.js +35 -35
  40. package/src/uniforms/EquirectHdrInfoUniform.js +259 -259
  41. package/src/uniforms/FloatAttributeTextureArray.js +169 -169
  42. package/src/uniforms/IESProfilesTexture.js +100 -100
  43. package/src/uniforms/LightsInfoUniformStruct.js +207 -207
  44. package/src/uniforms/MaterialsTexture.js +426 -426
  45. package/src/uniforms/PhysicalCameraUniform.js +36 -36
  46. package/src/uniforms/RenderTarget2DArray.js +97 -97
  47. package/src/uniforms/utils.js +30 -30
  48. package/src/utils/BlurredEnvMapGenerator.js +116 -116
  49. package/src/utils/GeometryPreparationUtils.js +214 -214
  50. package/src/utils/IESLoader.js +325 -325
  51. package/src/utils/SobolNumberMapGenerator.js +80 -80
  52. package/src/utils/UVUnwrapper.js +101 -101
  53. package/src/workers/PathTracingSceneWorker.js +42 -42
@@ -1,982 +1,982 @@
1
- import { Matrix4, Vector2 } from 'three';
2
- import { MaterialBase } from './MaterialBase.js';
3
- import {
4
- MeshBVHUniformStruct, UIntVertexAttributeTexture,
5
- shaderStructs, shaderIntersectFunction,
6
- } from 'three-mesh-bvh';
7
- import { shaderMaterialStructs, shaderLightStruct } from '../shader/shaderStructs.js';
8
- import { MaterialsTexture } from '../uniforms/MaterialsTexture.js';
9
- import { RenderTarget2DArray } from '../uniforms/RenderTarget2DArray.js';
10
- import { shaderMaterialSampling } from '../shader/shaderMaterialSampling.js';
11
- import { shaderEnvMapSampling } from '../shader/shaderEnvMapSampling.js';
12
- import { shaderLightSampling } from '../shader/shaderLightSampling.js';
13
- import { shaderSobolCommon, shaderSobolSampling } from '../shader/shaderSobolSampling.js';
14
- import { shaderUtils } from '../shader/shaderUtils.js';
15
- import { shaderLayerTexelFetchFunctions } from '../shader/shaderLayerTexelFetchFunctions.js';
16
- import { shaderRandFunctions } from '../shader/shaderRandFunctions.js';
17
- import { PhysicalCameraUniform } from '../uniforms/PhysicalCameraUniform.js';
18
- import { EquirectHdrInfoUniform } from '../uniforms/EquirectHdrInfoUniform.js';
19
- import { LightsInfoUniformStruct } from '../uniforms/LightsInfoUniformStruct.js';
20
- import { IESProfilesTexture } from '../uniforms/IESProfilesTexture.js';
21
- import { AttributesTextureArray } from '../uniforms/AttributesTextureArray.js';
22
-
23
- export class PhysicalPathTracingMaterial extends MaterialBase {
24
-
25
- onBeforeRender() {
26
-
27
- this.setDefine( 'FEATURE_DOF', this.physicalCamera.bokehSize === 0 ? 0 : 1 );
28
- this.setDefine( 'FEATURE_BACKGROUND_MAP', this.backgroundMap ? 1 : 0 );
29
-
30
- }
31
-
32
- constructor( parameters ) {
33
-
34
- super( {
35
-
36
- transparent: true,
37
- depthWrite: false,
38
-
39
- defines: {
40
- FEATURE_MIS: 1,
41
- FEATURE_DOF: 1,
42
- FEATURE_BACKGROUND_MAP: 0,
43
- TRANSPARENT_TRAVERSALS: 5,
44
- // 0 = Perspective
45
- // 1 = Orthographic
46
- // 2 = Equirectangular
47
- CAMERA_TYPE: 0,
48
-
49
- ATTR_NORMAL: 0,
50
- ATTR_TANGENT: 1,
51
- ATTR_UV: 2,
52
- ATTR_COLOR: 3,
53
- },
54
-
55
- uniforms: {
56
- resolution: { value: new Vector2() },
57
-
58
- bounces: { value: 3 },
59
- physicalCamera: { value: new PhysicalCameraUniform() },
60
-
61
- bvh: { value: new MeshBVHUniformStruct() },
62
- attributesArray: { value: new AttributesTextureArray() },
63
- materialIndexAttribute: { value: new UIntVertexAttributeTexture() },
64
- materials: { value: new MaterialsTexture() },
65
- textures: { value: new RenderTarget2DArray().texture },
66
- lights: { value: new LightsInfoUniformStruct() },
67
- iesProfiles: { value: new IESProfilesTexture().texture },
68
- cameraWorldMatrix: { value: new Matrix4() },
69
- invProjectionMatrix: { value: new Matrix4() },
70
- backgroundBlur: { value: 0.0 },
71
- environmentIntensity: { value: 1.0 },
72
- environmentRotation: { value: new Matrix4() },
73
- envMapInfo: { value: new EquirectHdrInfoUniform() },
74
- backgroundMap: { value: null },
75
-
76
- seed: { value: 0 },
77
- opacity: { value: 1 },
78
- filterGlossyFactor: { value: 0.0 },
79
-
80
- backgroundAlpha: { value: 1.0 },
81
- sobolTexture: { value: null },
82
- },
83
-
84
- vertexShader: /* glsl */`
85
-
86
- varying vec2 vUv;
87
- void main() {
88
-
89
- vec4 mvPosition = vec4( position, 1.0 );
90
- mvPosition = modelViewMatrix * mvPosition;
91
- gl_Position = projectionMatrix * mvPosition;
92
-
93
- vUv = uv;
94
-
95
- }
96
-
97
- `,
98
-
99
- fragmentShader: /* glsl */`
100
- #define RAY_OFFSET 1e-4
101
-
102
- precision highp isampler2D;
103
- precision highp usampler2D;
104
- precision highp sampler2DArray;
105
- vec4 envMapTexelToLinear( vec4 a ) { return a; }
106
- #include <common>
107
-
108
- ${ shaderRandFunctions }
109
- ${ shaderSobolCommon }
110
- ${ shaderSobolSampling }
111
- ${ shaderStructs }
112
- ${ shaderIntersectFunction }
113
- ${ shaderMaterialStructs }
114
- ${ shaderLightStruct }
115
-
116
- ${ shaderLayerTexelFetchFunctions }
117
- ${ shaderUtils }
118
- ${ shaderMaterialSampling }
119
- ${ shaderEnvMapSampling }
120
-
121
- uniform mat4 environmentRotation;
122
- uniform float backgroundBlur;
123
- uniform float backgroundAlpha;
124
-
125
- #if FEATURE_BACKGROUND_MAP
126
-
127
- uniform sampler2D backgroundMap;
128
-
129
- #endif
130
-
131
- #if FEATURE_DOF
132
-
133
- uniform PhysicalCamera physicalCamera;
134
-
135
- #endif
136
-
137
- uniform vec2 resolution;
138
- uniform int bounces;
139
- uniform mat4 cameraWorldMatrix;
140
- uniform mat4 invProjectionMatrix;
141
- uniform sampler2DArray attributesArray;
142
- uniform usampler2D materialIndexAttribute;
143
- uniform BVH bvh;
144
- uniform float environmentIntensity;
145
- uniform float filterGlossyFactor;
146
- uniform int seed;
147
- uniform float opacity;
148
- uniform sampler2D materials;
149
- uniform LightsInfo lights;
150
- uniform sampler2DArray iesProfiles;
151
-
152
- ${ shaderLightSampling }
153
-
154
- uniform EquirectHdrInfo envMapInfo;
155
-
156
- uniform sampler2DArray textures;
157
- varying vec2 vUv;
158
-
159
- float applyFilteredGlossy( float roughness, float accumulatedRoughness ) {
160
-
161
- return clamp(
162
- max(
163
- roughness,
164
- accumulatedRoughness * filterGlossyFactor * 5.0 ),
165
- 0.0,
166
- 1.0
167
- );
168
-
169
- }
170
-
171
- vec3 sampleBackground( vec3 direction, vec2 uv ) {
172
-
173
- vec3 sampleDir = normalize( direction + getHemisphereSample( direction, uv ) * 0.5 * backgroundBlur );
174
-
175
- #if FEATURE_BACKGROUND_MAP
176
-
177
- return sampleEquirectEnvMapColor( sampleDir, backgroundMap );
178
-
179
- #else
180
-
181
- return environmentIntensity * sampleEquirectEnvMapColor( sampleDir, envMapInfo.map );
182
-
183
- #endif
184
-
185
- }
186
-
187
- // step through multiple surface hits and accumulate color attenuation based on transmissive surfaces
188
- bool attenuateHit( BVH bvh, vec3 rayOrigin, vec3 rayDirection, int traversals, bool isShadowRay, out vec3 color ) {
189
-
190
- // hit results
191
- uvec4 faceIndices = uvec4( 0u );
192
- vec3 faceNormal = vec3( 0.0, 0.0, 1.0 );
193
- vec3 barycoord = vec3( 0.0 );
194
- float side = 1.0;
195
- float dist = 0.0;
196
-
197
- color = vec3( 1.0 );
198
-
199
- // TODO: we should be using sobol sampling here instead of rand but the sobol bounce and path indices need to be incremented
200
- // and then reset.
201
- for ( int i = 0; i < traversals; i ++ ) {
202
-
203
- if ( bvhIntersectFirstHit( bvh, rayOrigin, rayDirection, faceIndices, faceNormal, barycoord, side, dist ) ) {
204
-
205
- // TODO: attenuate the contribution based on the PDF of the resulting ray including refraction values
206
- // Should be able to work using the material BSDF functions which will take into account specularity, etc.
207
- // TODO: should we account for emissive surfaces here?
208
-
209
- vec2 uv = textureSampleBarycoord( attributesArray, ATTR_UV, barycoord, faceIndices.xyz ).xy;
210
- vec4 vertexColor = textureSampleBarycoord( attributesArray, ATTR_COLOR, barycoord, faceIndices.xyz );
211
-
212
- uint materialIndex = uTexelFetch1D( materialIndexAttribute, faceIndices.x ).r;
213
- Material material = readMaterialInfo( materials, materialIndex );
214
-
215
- // adjust the ray to the new surface
216
- bool isBelowSurface = dot( rayDirection, faceNormal ) < 0.0;
217
- vec3 point = rayOrigin + rayDirection * dist;
218
- vec3 absPoint = abs( point );
219
- float maxPoint = max( absPoint.x, max( absPoint.y, absPoint.z ) );
220
- rayOrigin = point + faceNormal * ( maxPoint + 1.0 ) * ( isBelowSurface ? - RAY_OFFSET : RAY_OFFSET );
221
-
222
- if ( ! material.castShadow && isShadowRay ) {
223
-
224
- continue;
225
-
226
- }
227
-
228
- // Opacity Test
229
-
230
- // albedo
231
- vec4 albedo = vec4( material.color, material.opacity );
232
- if ( material.map != - 1 ) {
233
-
234
- vec3 uvPrime = material.mapTransform * vec3( uv, 1 );
235
- albedo *= texture2D( textures, vec3( uvPrime.xy, material.map ) );
236
-
237
- }
238
-
239
- if ( material.vertexColors ) {
240
-
241
- albedo *= vertexColor;
242
-
243
- }
244
-
245
- // alphaMap
246
- if ( material.alphaMap != - 1 ) {
247
-
248
- albedo.a *= texture2D( textures, vec3( uv, material.alphaMap ) ).x;
249
-
250
- }
251
-
252
- // transmission
253
- float transmission = material.transmission;
254
- if ( material.transmissionMap != - 1 ) {
255
-
256
- vec3 uvPrime = material.transmissionMapTransform * vec3( uv, 1 );
257
- transmission *= texture2D( textures, vec3( uvPrime.xy, material.transmissionMap ) ).r;
258
-
259
- }
260
-
261
- // metalness
262
- float metalness = material.metalness;
263
- if ( material.metalnessMap != - 1 ) {
264
-
265
- vec3 uvPrime = material.metalnessMapTransform * vec3( uv, 1 );
266
- metalness *= texture2D( textures, vec3( uvPrime.xy, material.metalnessMap ) ).b;
267
-
268
- }
269
-
270
- float alphaTest = material.alphaTest;
271
- bool useAlphaTest = alphaTest != 0.0;
272
- float transmissionFactor = ( 1.0 - metalness ) * transmission;
273
- if (
274
- transmissionFactor < rand() && ! (
275
- // material sidedness
276
- material.side != 0.0 && side == material.side
277
-
278
- // alpha test
279
- || useAlphaTest && albedo.a < alphaTest
280
-
281
- // opacity
282
- || material.transparent && ! useAlphaTest && albedo.a < rand()
283
- )
284
- ) {
285
-
286
- return true;
287
-
288
- }
289
-
290
- if ( side == 1.0 && isBelowSurface ) {
291
-
292
- // only attenuate by surface color on the way in
293
- color *= mix( vec3( 1.0 ), albedo.rgb, transmissionFactor );
294
-
295
- } else if ( side == - 1.0 ) {
296
-
297
- // attenuate by medium once we hit the opposite side of the model
298
- color *= transmissionAttenuation( dist, material.attenuationColor, material.attenuationDistance );
299
-
300
- }
301
-
302
- } else {
303
-
304
- return false;
305
-
306
- }
307
-
308
- }
309
-
310
- return true;
311
-
312
- }
313
-
314
- // returns whether the ray hit anything before a certain distance, not just the first surface. Could be optimized to not check the full hierarchy.
315
- bool anyCloserHit( BVH bvh, vec3 rayOrigin, vec3 rayDirection, float maxDist ) {
316
-
317
- uvec4 faceIndices = uvec4( 0u );
318
- vec3 faceNormal = vec3( 0.0, 0.0, 1.0 );
319
- vec3 barycoord = vec3( 0.0 );
320
- float side = 1.0;
321
- float dist = 0.0;
322
- bool hit = bvhIntersectFirstHit( bvh, rayOrigin, rayDirection, faceIndices, faceNormal, barycoord, side, dist );
323
- return hit && dist < maxDist;
324
-
325
- }
326
-
327
- vec3 ndcToRayOrigin( vec2 coord ) {
328
-
329
- vec4 rayOrigin4 = cameraWorldMatrix * invProjectionMatrix * vec4( coord, - 1.0, 1.0 );
330
- return rayOrigin4.xyz / rayOrigin4.w;
331
- }
332
-
333
- void getCameraRay( out vec3 rayDirection, out vec3 rayOrigin ) {
334
-
335
- vec2 ssd = vec2( 1.0 ) / resolution;
336
-
337
- // Jitter the camera ray by finding a uv coordinate at a random sample
338
- // around this pixel's UV coordinate for AA
339
- vec2 ruv = sobol2( 0 );
340
- vec2 jitteredUv = vUv + vec2( tentFilter( ruv.x ) * ssd.x, tentFilter( ruv.y ) * ssd.y );
341
-
342
- #if CAMERA_TYPE == 2
343
-
344
- // Equirectangular projection
345
- vec4 rayDirection4 = vec4( equirectUvToDirection( jitteredUv ), 0.0 );
346
- vec4 rayOrigin4 = vec4( 0.0, 0.0, 0.0, 1.0 );
347
-
348
- rayDirection4 = cameraWorldMatrix * rayDirection4;
349
- rayOrigin4 = cameraWorldMatrix * rayOrigin4;
350
-
351
- rayDirection = normalize( rayDirection4.xyz );
352
- rayOrigin = rayOrigin4.xyz / rayOrigin4.w;
353
-
354
- #else
355
-
356
- // get [- 1, 1] normalized device coordinates
357
- vec2 ndc = 2.0 * jitteredUv - vec2( 1.0 );
358
- rayOrigin = ndcToRayOrigin( ndc );
359
-
360
- #if CAMERA_TYPE == 1
361
-
362
- // Orthographic projection
363
- rayDirection = ( cameraWorldMatrix * vec4( 0.0, 0.0, - 1.0, 0.0 ) ).xyz;
364
- rayDirection = normalize( rayDirection );
365
-
366
- #else
367
-
368
- // Perspective projection
369
- rayDirection = normalize( mat3(cameraWorldMatrix) * ( invProjectionMatrix * vec4( ndc, 0.0, 1.0 ) ).xyz );
370
-
371
- #endif
372
-
373
- #endif
374
-
375
- #if FEATURE_DOF
376
- {
377
-
378
- // depth of field
379
- vec3 focalPoint = rayOrigin + normalize( rayDirection ) * physicalCamera.focusDistance;
380
-
381
- // get the aperture sample
382
- // if blades === 0 then we assume a circle
383
- vec3 shapeUVW= sobol3( 1 );
384
- int blades = physicalCamera.apertureBlades;
385
- float anamorphicRatio = physicalCamera.anamorphicRatio;
386
- vec2 apertureSample = blades == 0 ? sampleCircle( shapeUVW.xy ) : sampleRegularNGon( blades, shapeUVW );
387
- apertureSample *= physicalCamera.bokehSize * 0.5 * 1e-3;
388
-
389
- // rotate the aperture shape
390
- apertureSample =
391
- rotateVector( apertureSample, physicalCamera.apertureRotation ) *
392
- saturate( vec2( anamorphicRatio, 1.0 / anamorphicRatio ) );
393
-
394
- // create the new ray
395
- rayOrigin += ( cameraWorldMatrix * vec4( apertureSample, 0.0, 0.0 ) ).xyz;
396
- rayDirection = focalPoint - rayOrigin;
397
-
398
- }
399
- #endif
400
-
401
- rayDirection = normalize( rayDirection );
402
-
403
- }
404
-
405
- void main() {
406
-
407
- rng_initialize( gl_FragCoord.xy, seed );
408
- sobolPixelIndex = ( uint( gl_FragCoord.x ) << 16 ) | ( uint( gl_FragCoord.y ) );
409
- sobolPathIndex = uint( seed );
410
-
411
- vec3 rayDirection;
412
- vec3 rayOrigin;
413
-
414
- getCameraRay( rayDirection, rayOrigin );
415
-
416
- // inverse environment rotation
417
- mat3 envRotation3x3 = mat3( environmentRotation );
418
- mat3 invEnvRotation3x3 = inverse( envRotation3x3 );
419
-
420
- // final color
421
- gl_FragColor = vec4( 0.0 );
422
- gl_FragColor.a = 1.0;
423
-
424
- // hit results
425
- uvec4 faceIndices = uvec4( 0u );
426
- vec3 faceNormal = vec3( 0.0, 0.0, 1.0 );
427
- vec3 barycoord = vec3( 0.0 );
428
- float side = 1.0;
429
- float dist = 0.0;
430
-
431
- // path tracing state
432
- float accumulatedRoughness = 0.0;
433
- float accumulatedClearcoatRoughness = 0.0;
434
- bool transmissiveRay = true;
435
- int transparentTraversals = TRANSPARENT_TRAVERSALS;
436
- vec3 throughputColor = vec3( 1.0 );
437
- SampleRec sampleRec;
438
- int i;
439
- bool isShadowRay = false;
440
-
441
- for ( i = 0; i < bounces; i ++ ) {
442
-
443
- sobolBounceIndex ++;
444
-
445
- bool hit = bvhIntersectFirstHit( bvh, rayOrigin, rayDirection, faceIndices, faceNormal, barycoord, side, dist );
446
-
447
- LightSampleRec lightHit = lightsClosestHit( lights.tex, lights.count, rayOrigin, rayDirection );
448
-
449
- if ( lightHit.hit && ( lightHit.dist < dist || !hit ) ) {
450
-
451
- if ( i == 0 || transmissiveRay ) {
452
-
453
- gl_FragColor.rgb += lightHit.emission * throughputColor;
454
-
455
- } else {
456
-
457
- #if FEATURE_MIS
458
-
459
- // NOTE: we skip MIS for punctual lights since they are not supported in forward PT case
460
- if ( lightHit.type == SPOT_LIGHT_TYPE || lightHit.type == DIR_LIGHT_TYPE || lightHit.type == POINT_LIGHT_TYPE ) {
461
-
462
- gl_FragColor.rgb += lightHit.emission * throughputColor;
463
-
464
- } else {
465
-
466
- // weight the contribution
467
- float misWeight = misHeuristic( sampleRec.pdf, lightHit.pdf / float( lights.count + 1u ) );
468
- gl_FragColor.rgb += lightHit.emission * throughputColor * misWeight;
469
-
470
- }
471
-
472
- #else
473
-
474
- gl_FragColor.rgb += lightHit.emission * throughputColor;
475
-
476
- #endif
477
-
478
- }
479
- break;
480
-
481
- }
482
-
483
- if ( ! hit ) {
484
-
485
- if ( i == 0 || transmissiveRay ) {
486
-
487
- gl_FragColor.rgb += sampleBackground( envRotation3x3 * rayDirection, sobol2( 2 ) ) * throughputColor;
488
- gl_FragColor.a = backgroundAlpha;
489
-
490
- } else {
491
-
492
- #if FEATURE_MIS
493
-
494
- // get the PDF of the hit envmap point
495
- vec3 envColor;
496
- float envPdf = sampleEnvMap( envMapInfo, envRotation3x3 * rayDirection, envColor );
497
- envPdf /= float( lights.count + 1u );
498
-
499
- // and weight the contribution
500
- float misWeight = misHeuristic( sampleRec.pdf, envPdf );
501
- gl_FragColor.rgb += environmentIntensity * envColor * throughputColor * misWeight;
502
-
503
- #else
504
-
505
- gl_FragColor.rgb +=
506
- environmentIntensity *
507
- sampleEquirectEnvMapColor( envRotation3x3 * rayDirection, envMapInfo.map ) *
508
- throughputColor;
509
-
510
- #endif
511
-
512
- }
513
- break;
514
-
515
- }
516
-
517
- uint materialIndex = uTexelFetch1D( materialIndexAttribute, faceIndices.x ).r;
518
- Material material = readMaterialInfo( materials, materialIndex );
519
-
520
- if ( material.matte && i == 0 ) {
521
-
522
- gl_FragColor = vec4( 0.0 );
523
- break;
524
-
525
- }
526
-
527
- // if we've determined that this is a shadow ray and we've hit an item with no shadow casting
528
- // then skip it
529
- if ( ! material.castShadow && isShadowRay ) {
530
-
531
- vec3 point = rayOrigin + rayDirection * dist;
532
- vec3 absPoint = abs( point );
533
- float maxPoint = max( absPoint.x, max( absPoint.y, absPoint.z ) );
534
- rayOrigin = point - ( maxPoint + 1.0 ) * faceNormal * RAY_OFFSET;
535
-
536
- continue;
537
-
538
- }
539
-
540
- // uv coord for textures
541
- vec2 uv = textureSampleBarycoord( attributesArray, ATTR_UV, barycoord, faceIndices.xyz ).xy;
542
- vec4 vertexColor = textureSampleBarycoord( attributesArray, ATTR_COLOR, barycoord, faceIndices.xyz );
543
-
544
- // albedo
545
- vec4 albedo = vec4( material.color, material.opacity );
546
- if ( material.map != - 1 ) {
547
-
548
- vec3 uvPrime = material.mapTransform * vec3( uv, 1 );
549
- albedo *= texture2D( textures, vec3( uvPrime.xy, material.map ) );
550
- }
551
-
552
- if ( material.vertexColors ) {
553
-
554
- albedo *= vertexColor;
555
-
556
- }
557
-
558
- // alphaMap
559
- if ( material.alphaMap != - 1 ) {
560
-
561
- albedo.a *= texture2D( textures, vec3( uv, material.alphaMap ) ).x;
562
-
563
- }
564
-
565
- // possibly skip this sample if it's transparent, alpha test is enabled, or we hit the wrong material side
566
- // and it's single sided.
567
- // - alpha test is disabled when it === 0
568
- // - the material sidedness test is complicated because we want light to pass through the back side but still
569
- // be able to see the front side. This boolean checks if the side we hit is the front side on the first ray
570
- // and we're rendering the other then we skip it. Do the opposite on subsequent bounces to get incoming light.
571
- float alphaTest = material.alphaTest;
572
- bool useAlphaTest = alphaTest != 0.0;
573
- if (
574
- // material sidedness
575
- material.side != 0.0 && side != material.side
576
-
577
- // alpha test
578
- || useAlphaTest && albedo.a < alphaTest
579
-
580
- // opacity
581
- || material.transparent && ! useAlphaTest && albedo.a < sobol( 3 )
582
- ) {
583
-
584
- vec3 point = rayOrigin + rayDirection * dist;
585
- vec3 absPoint = abs( point );
586
- float maxPoint = max( absPoint.x, max( absPoint.y, absPoint.z ) );
587
- rayOrigin = point - ( maxPoint + 1.0 ) * faceNormal * RAY_OFFSET;
588
-
589
- // only allow a limited number of transparency discards otherwise we could
590
- // crash the context with too long a loop.
591
- i -= sign( transparentTraversals );
592
- transparentTraversals -= sign( transparentTraversals );
593
- continue;
594
-
595
- }
596
-
597
- // fetch the interpolated smooth normal
598
- vec3 normal = normalize( textureSampleBarycoord(
599
- attributesArray,
600
- ATTR_NORMAL,
601
- barycoord,
602
- faceIndices.xyz
603
- ).xyz );
604
-
605
- // roughness
606
- float roughness = material.roughness;
607
- if ( material.roughnessMap != - 1 ) {
608
-
609
- vec3 uvPrime = material.roughnessMapTransform * vec3( uv, 1 );
610
- roughness *= texture2D( textures, vec3( uvPrime.xy, material.roughnessMap ) ).g;
611
-
612
- }
613
-
614
- // metalness
615
- float metalness = material.metalness;
616
- if ( material.metalnessMap != - 1 ) {
617
-
618
- vec3 uvPrime = material.metalnessMapTransform * vec3( uv, 1 );
619
- metalness *= texture2D( textures, vec3( uvPrime.xy, material.metalnessMap ) ).b;
620
-
621
- }
622
-
623
- // emission
624
- vec3 emission = material.emissiveIntensity * material.emissive;
625
- if ( material.emissiveMap != - 1 ) {
626
-
627
- vec3 uvPrime = material.emissiveMapTransform * vec3( uv, 1 );
628
- emission *= texture2D( textures, vec3( uvPrime.xy, material.emissiveMap ) ).xyz;
629
-
630
- }
631
-
632
- // transmission
633
- float transmission = material.transmission;
634
- if ( material.transmissionMap != - 1 ) {
635
-
636
- vec3 uvPrime = material.transmissionMapTransform * vec3( uv, 1 );
637
- transmission *= texture2D( textures, vec3( uvPrime.xy, material.transmissionMap ) ).r;
638
-
639
- }
640
-
641
- // normal
642
- if ( material.flatShading ) {
643
-
644
- // if we're rendering a flat shaded object then use the face normals - the face normal
645
- // is provided based on the side the ray hits the mesh so flip it to align with the
646
- // interpolated vertex normals.
647
- normal = faceNormal * side;
648
-
649
- }
650
-
651
- vec3 baseNormal = normal;
652
- if ( material.normalMap != - 1 ) {
653
-
654
- vec4 tangentSample = textureSampleBarycoord(
655
- attributesArray,
656
- ATTR_TANGENT,
657
- barycoord,
658
- faceIndices.xyz
659
- );
660
-
661
- // some provided tangents can be malformed (0, 0, 0) causing the normal to be degenerate
662
- // resulting in NaNs and slow path tracing.
663
- if ( length( tangentSample.xyz ) > 0.0 ) {
664
-
665
- vec3 tangent = normalize( tangentSample.xyz );
666
- vec3 bitangent = normalize( cross( normal, tangent ) * tangentSample.w );
667
- mat3 vTBN = mat3( tangent, bitangent, normal );
668
-
669
- vec3 uvPrime = material.normalMapTransform * vec3( uv, 1 );
670
- vec3 texNormal = texture2D( textures, vec3( uvPrime.xy, material.normalMap ) ).xyz * 2.0 - 1.0;
671
- texNormal.xy *= material.normalScale;
672
- normal = vTBN * texNormal;
673
-
674
- }
675
-
676
- }
677
-
678
- normal *= side;
679
-
680
- // clearcoat
681
- float clearcoat = material.clearcoat;
682
- if ( material.clearcoatMap != - 1 ) {
683
-
684
- vec3 uvPrime = material.clearcoatMapTransform * vec3( uv, 1 );
685
- clearcoat *= texture2D( textures, vec3( uvPrime.xy, material.clearcoatMap ) ).r;
686
-
687
- }
688
-
689
- // clearcoatRoughness
690
- float clearcoatRoughness = material.clearcoatRoughness;
691
- if ( material.clearcoatRoughnessMap != - 1 ) {
692
-
693
- vec3 uvPrime = material.clearcoatRoughnessMapTransform * vec3( uv, 1 );
694
- clearcoatRoughness *= texture2D( textures, vec3( uvPrime.xy, material.clearcoatRoughnessMap ) ).g;
695
-
696
- }
697
-
698
- // clearcoatNormal
699
- vec3 clearcoatNormal = baseNormal;
700
- if ( material.clearcoatNormalMap != - 1 ) {
701
-
702
- vec4 tangentSample = textureSampleBarycoord(
703
- attributesArray,
704
- ATTR_TANGENT,
705
- barycoord,
706
- faceIndices.xyz
707
- );
708
-
709
- // some provided tangents can be malformed (0, 0, 0) causing the normal to be degenerate
710
- // resulting in NaNs and slow path tracing.
711
- if ( length( tangentSample.xyz ) > 0.0 ) {
712
-
713
- vec3 tangent = normalize( tangentSample.xyz );
714
- vec3 bitangent = normalize( cross( clearcoatNormal, tangent ) * tangentSample.w );
715
- mat3 vTBN = mat3( tangent, bitangent, clearcoatNormal );
716
-
717
- vec3 uvPrime = material.clearcoatNormalMapTransform * vec3( uv, 1 );
718
- vec3 texNormal = texture2D( textures, vec3( uvPrime.xy, material.clearcoatNormalMap ) ).xyz * 2.0 - 1.0;
719
- texNormal.xy *= material.clearcoatNormalScale;
720
- clearcoatNormal = vTBN * texNormal;
721
-
722
- }
723
-
724
- }
725
-
726
- clearcoatNormal *= side;
727
-
728
- // sheenColor
729
- vec3 sheenColor = material.sheenColor;
730
- if ( material.sheenColorMap != - 1 ) {
731
-
732
- vec3 uvPrime = material.sheenColorMapTransform * vec3( uv, 1 );
733
- sheenColor *= texture2D( textures, vec3( uvPrime.xy, material.sheenColorMap ) ).rgb;
734
-
735
- }
736
-
737
- // sheenRoughness
738
- float sheenRoughness = material.sheenRoughness;
739
- if ( material.sheenRoughnessMap != - 1 ) {
740
-
741
- vec3 uvPrime = material.sheenRoughnessMapTransform * vec3( uv, 1 );
742
- sheenRoughness *= texture2D( textures, vec3( uvPrime.xy, material.sheenRoughnessMap ) ).a;
743
-
744
- }
745
-
746
- // iridescence
747
- float iridescence = material.iridescence;
748
- if ( material.iridescenceMap != - 1 ) {
749
-
750
- vec3 uvPrime = material.iridescenceMapTransform * vec3( uv, 1 );
751
- iridescence *= texture2D( textures, vec3( uvPrime.xy, material.iridescenceMap ) ).r;
752
-
753
- }
754
-
755
- // iridescence thickness
756
- float iridescenceThickness = material.iridescenceThicknessMaximum;
757
- if ( material.iridescenceThicknessMap != - 1 ) {
758
-
759
- vec3 uvPrime = material.iridescenceThicknessMapTransform * vec3( uv, 1 );
760
- float iridescenceThicknessSampled = texture2D( textures, vec3( uvPrime.xy, material.iridescenceThicknessMap ) ).g;
761
- iridescenceThickness = mix( material.iridescenceThicknessMinimum, material.iridescenceThicknessMaximum, iridescenceThicknessSampled );
762
-
763
- }
764
-
765
- iridescence = iridescenceThickness == 0.0 ? 0.0 : iridescence;
766
-
767
- // specular color
768
- vec3 specularColor = material.specularColor;
769
- if ( material.specularColorMap != - 1 ) {
770
-
771
- vec3 uvPrime = material.specularColorMapTransform * vec3( uv, 1 );
772
- specularColor *= texture2D( textures, vec3( uvPrime.xy, material.specularColorMap ) ).rgb;
773
-
774
- }
775
-
776
- // specular intensity
777
- float specularIntensity = material.specularIntensity;
778
- if ( material.specularIntensityMap != - 1 ) {
779
-
780
- vec3 uvPrime = material.specularIntensityMapTransform * vec3( uv, 1 );
781
- specularIntensity *= texture2D( textures, vec3( uvPrime.xy, material.specularIntensityMap ) ).a;
782
-
783
- }
784
-
785
- SurfaceRec surfaceRec;
786
- surfaceRec.normal = normal;
787
- surfaceRec.faceNormal = faceNormal;
788
- surfaceRec.transmission = transmission;
789
- surfaceRec.ior = material.ior;
790
- surfaceRec.emission = emission;
791
- surfaceRec.metalness = metalness;
792
- surfaceRec.color = albedo.rgb;
793
- surfaceRec.clearcoat = clearcoat;
794
- surfaceRec.sheenColor = sheenColor;
795
- surfaceRec.iridescence = iridescence;
796
- surfaceRec.iridescenceIor = material.iridescenceIor;
797
- surfaceRec.iridescenceThickness = iridescenceThickness;
798
- surfaceRec.specularColor = specularColor;
799
- surfaceRec.specularIntensity = specularIntensity;
800
- surfaceRec.attenuationColor = material.attenuationColor;
801
- surfaceRec.attenuationDistance = material.attenuationDistance;
802
-
803
- // apply perceptual roughness factor from gltf
804
- // https://registry.khronos.org/glTF/specs/2.0/glTF-2.0.html#microfacet-surfaces
805
- surfaceRec.roughness = roughness * roughness;
806
- surfaceRec.clearcoatRoughness = clearcoatRoughness * clearcoatRoughness;
807
- surfaceRec.sheenRoughness = sheenRoughness * sheenRoughness;
808
-
809
- // frontFace is used to determine transmissive properties and PDF. If no transmission is used
810
- // then we can just always assume this is a front face.
811
- surfaceRec.frontFace = side == 1.0 || transmission == 0.0;
812
- surfaceRec.eta = material.thinFilm || surfaceRec.frontFace ? 1.0 / material.ior : material.ior;
813
- surfaceRec.f0 = iorRatioToF0( surfaceRec.eta );
814
- surfaceRec.thinFilm = material.thinFilm;
815
-
816
- // Compute the filtered roughness value to use during specular reflection computations.
817
- // The accumulated roughness value is scaled by a user setting and a "magic value" of 5.0.
818
- // If we're exiting something transmissive then scale the factor down significantly so we can retain
819
- // sharp internal reflections
820
- surfaceRec.filteredRoughness = applyFilteredGlossy( surfaceRec.roughness, accumulatedRoughness );
821
- surfaceRec.filteredClearcoatRoughness = applyFilteredGlossy( surfaceRec.clearcoatRoughness, accumulatedClearcoatRoughness );
822
-
823
- mat3 normalBasis = getBasisFromNormal( surfaceRec.normal );
824
- mat3 invBasis = inverse( normalBasis );
825
-
826
- mat3 clearcoatNormalBasis = getBasisFromNormal( clearcoatNormal );
827
- mat3 clearcoatInvBasis = inverse( clearcoatNormalBasis );
828
-
829
- vec3 outgoing = - normalize( invBasis * rayDirection );
830
- vec3 clearcoatOutgoing = - normalize( clearcoatInvBasis * rayDirection );
831
- sampleRec = bsdfSample( outgoing, clearcoatOutgoing, normalBasis, invBasis, clearcoatNormalBasis, clearcoatInvBasis, surfaceRec );
832
-
833
- isShadowRay = sampleRec.specularPdf < sobol( 4 );
834
-
835
- // adjust the hit point by the surface normal by a factor of some offset and the
836
- // maximum component-wise value of the current point to accommodate floating point
837
- // error as values increase.
838
- vec3 point = rayOrigin + rayDirection * dist;
839
- vec3 absPoint = abs( point );
840
- float maxPoint = max( absPoint.x, max( absPoint.y, absPoint.z ) );
841
- rayDirection = normalize( normalBasis * sampleRec.direction );
842
-
843
- bool isBelowSurface = dot( rayDirection, faceNormal ) < 0.0;
844
- rayOrigin = point + faceNormal * ( maxPoint + 1.0 ) * ( isBelowSurface ? - RAY_OFFSET : RAY_OFFSET );
845
-
846
- // direct env map sampling
847
- #if FEATURE_MIS
848
-
849
- // uniformly pick a light or environment map
850
- if( sobol( 5 ) > 1.0 / float( lights.count + 1u ) ) {
851
-
852
- // sample a light or environment
853
- LightSampleRec lightSampleRec = randomLightSample( lights.tex, iesProfiles, lights.count, rayOrigin, sobol3( 6 ) );
854
-
855
- bool isSampleBelowSurface = dot( faceNormal, lightSampleRec.direction ) < 0.0;
856
- if ( isSampleBelowSurface ) {
857
-
858
- lightSampleRec.pdf = 0.0;
859
-
860
- }
861
-
862
- // check if a ray could even reach the light area
863
- if (
864
- lightSampleRec.pdf > 0.0 &&
865
- isDirectionValid( lightSampleRec.direction, normal, faceNormal ) &&
866
- ! anyCloserHit( bvh, rayOrigin, lightSampleRec.direction, lightSampleRec.dist )
867
- ) {
868
-
869
- // get the material pdf
870
- vec3 sampleColor;
871
- float lightMaterialPdf = bsdfResult( outgoing, clearcoatOutgoing, normalize( invBasis * lightSampleRec.direction ), normalize( clearcoatInvBasis * lightSampleRec.direction ), surfaceRec, sampleColor );
872
- bool isValidSampleColor = all( greaterThanEqual( sampleColor, vec3( 0.0 ) ) );
873
- if ( lightMaterialPdf > 0.0 && isValidSampleColor ) {
874
-
875
- // weight the direct light contribution
876
- float lightPdf = lightSampleRec.pdf / float( lights.count + 1u );
877
- float misWeight = lightSampleRec.type == SPOT_LIGHT_TYPE || lightSampleRec.type == DIR_LIGHT_TYPE || lightSampleRec.type == POINT_LIGHT_TYPE ? 1.0 : misHeuristic( lightPdf, lightMaterialPdf );
878
- gl_FragColor.rgb += lightSampleRec.emission * throughputColor * sampleColor * misWeight / lightPdf;
879
-
880
- }
881
-
882
- }
883
-
884
- } else {
885
-
886
- // find a sample in the environment map to include in the contribution
887
- vec3 envColor, envDirection;
888
- float envPdf = sampleEnvMapProbability( envMapInfo, sobol2( 7 ), envColor, envDirection );
889
- envDirection = invEnvRotation3x3 * envDirection;
890
-
891
- // this env sampling is not set up for transmissive sampling and yields overly bright
892
- // results so we ignore the sample in this case.
893
- // TODO: this should be improved but how? The env samples could traverse a few layers?
894
- bool isSampleBelowSurface = dot( faceNormal, envDirection ) < 0.0;
895
- if ( isSampleBelowSurface ) {
896
-
897
- envPdf = 0.0;
898
-
899
- }
900
-
901
- // check if a ray could even reach the surface
902
- vec3 attenuatedColor;
903
- if (
904
- envPdf > 0.0 &&
905
- isDirectionValid( envDirection, normal, faceNormal ) &&
906
- ! attenuateHit( bvh, rayOrigin, envDirection, bounces - i, isShadowRay, attenuatedColor )
907
- ) {
908
-
909
- // get the material pdf
910
- vec3 sampleColor;
911
- float envMaterialPdf = bsdfResult( outgoing, clearcoatOutgoing, normalize( invBasis * envDirection ), normalize( clearcoatInvBasis * envDirection ), surfaceRec, sampleColor );
912
- bool isValidSampleColor = all( greaterThanEqual( sampleColor, vec3( 0.0 ) ) );
913
- if ( envMaterialPdf > 0.0 && isValidSampleColor ) {
914
-
915
- // weight the direct light contribution
916
- envPdf /= float( lights.count + 1u );
917
- float misWeight = misHeuristic( envPdf, envMaterialPdf );
918
- gl_FragColor.rgb += attenuatedColor * environmentIntensity * envColor * throughputColor * sampleColor * misWeight / envPdf;
919
-
920
- }
921
-
922
- }
923
-
924
- }
925
- #endif
926
-
927
- // accumulate a roughness value to offset diffuse, specular, diffuse rays that have high contribution
928
- // to a single pixel resulting in fireflies
929
- if ( ! isBelowSurface ) {
930
-
931
- // determine if this is a rough normal or not by checking how far off straight up it is
932
- vec3 halfVector = normalize( outgoing + sampleRec.direction );
933
- accumulatedRoughness += sin( acosApprox( halfVector.z ) );
934
-
935
- vec3 clearcoatHalfVector = normalize( clearcoatOutgoing + sampleRec.clearcoatDirection );
936
- accumulatedClearcoatRoughness += sin( acosApprox( clearcoatHalfVector.z ) );
937
-
938
- transmissiveRay = false;
939
-
940
- }
941
-
942
- // accumulate color
943
- gl_FragColor.rgb += ( emission * throughputColor );
944
-
945
- // skip the sample if our PDF or ray is impossible
946
- if ( sampleRec.pdf <= 0.0 || ! isDirectionValid( rayDirection, normal, faceNormal) ) {
947
-
948
- break;
949
-
950
- }
951
-
952
- throughputColor *= sampleRec.color / sampleRec.pdf;
953
-
954
- // attenuate the throughput color by the medium color
955
- if ( side == - 1.0 ) {
956
-
957
- throughputColor *= transmissionAttenuation( dist, surfaceRec.attenuationColor, surfaceRec.attenuationDistance );
958
-
959
- }
960
-
961
- // discard the sample if there are any NaNs
962
- if ( any( isnan( throughputColor ) ) || any( isinf( throughputColor ) ) ) {
963
-
964
- break;
965
-
966
- }
967
-
968
- }
969
-
970
- gl_FragColor.a *= opacity;
971
-
972
- }
973
-
974
- `
975
-
976
- } );
977
-
978
- this.setValues( parameters );
979
-
980
- }
981
-
982
- }
1
+ import { Matrix4, Vector2 } from 'three';
2
+ import { MaterialBase } from './MaterialBase.js';
3
+ import {
4
+ MeshBVHUniformStruct, UIntVertexAttributeTexture,
5
+ shaderStructs, shaderIntersectFunction,
6
+ } from 'three-mesh-bvh';
7
+ import { shaderMaterialStructs, shaderLightStruct } from '../shader/shaderStructs.js';
8
+ import { MaterialsTexture } from '../uniforms/MaterialsTexture.js';
9
+ import { RenderTarget2DArray } from '../uniforms/RenderTarget2DArray.js';
10
+ import { shaderMaterialSampling } from '../shader/shaderMaterialSampling.js';
11
+ import { shaderEnvMapSampling } from '../shader/shaderEnvMapSampling.js';
12
+ import { shaderLightSampling } from '../shader/shaderLightSampling.js';
13
+ import { shaderSobolCommon, shaderSobolSampling } from '../shader/shaderSobolSampling.js';
14
+ import { shaderUtils } from '../shader/shaderUtils.js';
15
+ import { shaderLayerTexelFetchFunctions } from '../shader/shaderLayerTexelFetchFunctions.js';
16
+ import { shaderRandFunctions } from '../shader/shaderRandFunctions.js';
17
+ import { PhysicalCameraUniform } from '../uniforms/PhysicalCameraUniform.js';
18
+ import { EquirectHdrInfoUniform } from '../uniforms/EquirectHdrInfoUniform.js';
19
+ import { LightsInfoUniformStruct } from '../uniforms/LightsInfoUniformStruct.js';
20
+ import { IESProfilesTexture } from '../uniforms/IESProfilesTexture.js';
21
+ import { AttributesTextureArray } from '../uniforms/AttributesTextureArray.js';
22
+
23
+ export class PhysicalPathTracingMaterial extends MaterialBase {
24
+
25
+ onBeforeRender() {
26
+
27
+ this.setDefine( 'FEATURE_DOF', this.physicalCamera.bokehSize === 0 ? 0 : 1 );
28
+ this.setDefine( 'FEATURE_BACKGROUND_MAP', this.backgroundMap ? 1 : 0 );
29
+
30
+ }
31
+
32
+ constructor( parameters ) {
33
+
34
+ super( {
35
+
36
+ transparent: true,
37
+ depthWrite: false,
38
+
39
+ defines: {
40
+ FEATURE_MIS: 1,
41
+ FEATURE_DOF: 1,
42
+ FEATURE_BACKGROUND_MAP: 0,
43
+ TRANSPARENT_TRAVERSALS: 5,
44
+ // 0 = Perspective
45
+ // 1 = Orthographic
46
+ // 2 = Equirectangular
47
+ CAMERA_TYPE: 0,
48
+
49
+ ATTR_NORMAL: 0,
50
+ ATTR_TANGENT: 1,
51
+ ATTR_UV: 2,
52
+ ATTR_COLOR: 3,
53
+ },
54
+
55
+ uniforms: {
56
+ resolution: { value: new Vector2() },
57
+
58
+ bounces: { value: 3 },
59
+ physicalCamera: { value: new PhysicalCameraUniform() },
60
+
61
+ bvh: { value: new MeshBVHUniformStruct() },
62
+ attributesArray: { value: new AttributesTextureArray() },
63
+ materialIndexAttribute: { value: new UIntVertexAttributeTexture() },
64
+ materials: { value: new MaterialsTexture() },
65
+ textures: { value: new RenderTarget2DArray().texture },
66
+ lights: { value: new LightsInfoUniformStruct() },
67
+ iesProfiles: { value: new IESProfilesTexture().texture },
68
+ cameraWorldMatrix: { value: new Matrix4() },
69
+ invProjectionMatrix: { value: new Matrix4() },
70
+ backgroundBlur: { value: 0.0 },
71
+ environmentIntensity: { value: 1.0 },
72
+ environmentRotation: { value: new Matrix4() },
73
+ envMapInfo: { value: new EquirectHdrInfoUniform() },
74
+ backgroundMap: { value: null },
75
+
76
+ seed: { value: 0 },
77
+ opacity: { value: 1 },
78
+ filterGlossyFactor: { value: 0.0 },
79
+
80
+ backgroundAlpha: { value: 1.0 },
81
+ sobolTexture: { value: null },
82
+ },
83
+
84
+ vertexShader: /* glsl */`
85
+
86
+ varying vec2 vUv;
87
+ void main() {
88
+
89
+ vec4 mvPosition = vec4( position, 1.0 );
90
+ mvPosition = modelViewMatrix * mvPosition;
91
+ gl_Position = projectionMatrix * mvPosition;
92
+
93
+ vUv = uv;
94
+
95
+ }
96
+
97
+ `,
98
+
99
+ fragmentShader: /* glsl */`
100
+ #define RAY_OFFSET 1e-4
101
+
102
+ precision highp isampler2D;
103
+ precision highp usampler2D;
104
+ precision highp sampler2DArray;
105
+ vec4 envMapTexelToLinear( vec4 a ) { return a; }
106
+ #include <common>
107
+
108
+ ${ shaderRandFunctions }
109
+ ${ shaderSobolCommon }
110
+ ${ shaderSobolSampling }
111
+ ${ shaderStructs }
112
+ ${ shaderIntersectFunction }
113
+ ${ shaderMaterialStructs }
114
+ ${ shaderLightStruct }
115
+
116
+ ${ shaderLayerTexelFetchFunctions }
117
+ ${ shaderUtils }
118
+ ${ shaderMaterialSampling }
119
+ ${ shaderEnvMapSampling }
120
+
121
+ uniform mat4 environmentRotation;
122
+ uniform float backgroundBlur;
123
+ uniform float backgroundAlpha;
124
+
125
+ #if FEATURE_BACKGROUND_MAP
126
+
127
+ uniform sampler2D backgroundMap;
128
+
129
+ #endif
130
+
131
+ #if FEATURE_DOF
132
+
133
+ uniform PhysicalCamera physicalCamera;
134
+
135
+ #endif
136
+
137
+ uniform vec2 resolution;
138
+ uniform int bounces;
139
+ uniform mat4 cameraWorldMatrix;
140
+ uniform mat4 invProjectionMatrix;
141
+ uniform sampler2DArray attributesArray;
142
+ uniform usampler2D materialIndexAttribute;
143
+ uniform BVH bvh;
144
+ uniform float environmentIntensity;
145
+ uniform float filterGlossyFactor;
146
+ uniform int seed;
147
+ uniform float opacity;
148
+ uniform sampler2D materials;
149
+ uniform LightsInfo lights;
150
+ uniform sampler2DArray iesProfiles;
151
+
152
+ ${ shaderLightSampling }
153
+
154
+ uniform EquirectHdrInfo envMapInfo;
155
+
156
+ uniform sampler2DArray textures;
157
+ varying vec2 vUv;
158
+
159
+ float applyFilteredGlossy( float roughness, float accumulatedRoughness ) {
160
+
161
+ return clamp(
162
+ max(
163
+ roughness,
164
+ accumulatedRoughness * filterGlossyFactor * 5.0 ),
165
+ 0.0,
166
+ 1.0
167
+ );
168
+
169
+ }
170
+
171
+ vec3 sampleBackground( vec3 direction, vec2 uv ) {
172
+
173
+ vec3 sampleDir = normalize( direction + getHemisphereSample( direction, uv ) * 0.5 * backgroundBlur );
174
+
175
+ #if FEATURE_BACKGROUND_MAP
176
+
177
+ return sampleEquirectEnvMapColor( sampleDir, backgroundMap );
178
+
179
+ #else
180
+
181
+ return environmentIntensity * sampleEquirectEnvMapColor( sampleDir, envMapInfo.map );
182
+
183
+ #endif
184
+
185
+ }
186
+
187
+ // step through multiple surface hits and accumulate color attenuation based on transmissive surfaces
188
+ bool attenuateHit( BVH bvh, vec3 rayOrigin, vec3 rayDirection, int traversals, bool isShadowRay, out vec3 color ) {
189
+
190
+ // hit results
191
+ uvec4 faceIndices = uvec4( 0u );
192
+ vec3 faceNormal = vec3( 0.0, 0.0, 1.0 );
193
+ vec3 barycoord = vec3( 0.0 );
194
+ float side = 1.0;
195
+ float dist = 0.0;
196
+
197
+ color = vec3( 1.0 );
198
+
199
+ // TODO: we should be using sobol sampling here instead of rand but the sobol bounce and path indices need to be incremented
200
+ // and then reset.
201
+ for ( int i = 0; i < traversals; i ++ ) {
202
+
203
+ if ( bvhIntersectFirstHit( bvh, rayOrigin, rayDirection, faceIndices, faceNormal, barycoord, side, dist ) ) {
204
+
205
+ // TODO: attenuate the contribution based on the PDF of the resulting ray including refraction values
206
+ // Should be able to work using the material BSDF functions which will take into account specularity, etc.
207
+ // TODO: should we account for emissive surfaces here?
208
+
209
+ vec2 uv = textureSampleBarycoord( attributesArray, ATTR_UV, barycoord, faceIndices.xyz ).xy;
210
+ vec4 vertexColor = textureSampleBarycoord( attributesArray, ATTR_COLOR, barycoord, faceIndices.xyz );
211
+
212
+ uint materialIndex = uTexelFetch1D( materialIndexAttribute, faceIndices.x ).r;
213
+ Material material = readMaterialInfo( materials, materialIndex );
214
+
215
+ // adjust the ray to the new surface
216
+ bool isBelowSurface = dot( rayDirection, faceNormal ) < 0.0;
217
+ vec3 point = rayOrigin + rayDirection * dist;
218
+ vec3 absPoint = abs( point );
219
+ float maxPoint = max( absPoint.x, max( absPoint.y, absPoint.z ) );
220
+ rayOrigin = point + faceNormal * ( maxPoint + 1.0 ) * ( isBelowSurface ? - RAY_OFFSET : RAY_OFFSET );
221
+
222
+ if ( ! material.castShadow && isShadowRay ) {
223
+
224
+ continue;
225
+
226
+ }
227
+
228
+ // Opacity Test
229
+
230
+ // albedo
231
+ vec4 albedo = vec4( material.color, material.opacity );
232
+ if ( material.map != - 1 ) {
233
+
234
+ vec3 uvPrime = material.mapTransform * vec3( uv, 1 );
235
+ albedo *= texture2D( textures, vec3( uvPrime.xy, material.map ) );
236
+
237
+ }
238
+
239
+ if ( material.vertexColors ) {
240
+
241
+ albedo *= vertexColor;
242
+
243
+ }
244
+
245
+ // alphaMap
246
+ if ( material.alphaMap != - 1 ) {
247
+
248
+ albedo.a *= texture2D( textures, vec3( uv, material.alphaMap ) ).x;
249
+
250
+ }
251
+
252
+ // transmission
253
+ float transmission = material.transmission;
254
+ if ( material.transmissionMap != - 1 ) {
255
+
256
+ vec3 uvPrime = material.transmissionMapTransform * vec3( uv, 1 );
257
+ transmission *= texture2D( textures, vec3( uvPrime.xy, material.transmissionMap ) ).r;
258
+
259
+ }
260
+
261
+ // metalness
262
+ float metalness = material.metalness;
263
+ if ( material.metalnessMap != - 1 ) {
264
+
265
+ vec3 uvPrime = material.metalnessMapTransform * vec3( uv, 1 );
266
+ metalness *= texture2D( textures, vec3( uvPrime.xy, material.metalnessMap ) ).b;
267
+
268
+ }
269
+
270
+ float alphaTest = material.alphaTest;
271
+ bool useAlphaTest = alphaTest != 0.0;
272
+ float transmissionFactor = ( 1.0 - metalness ) * transmission;
273
+ if (
274
+ transmissionFactor < rand() && ! (
275
+ // material sidedness
276
+ material.side != 0.0 && side == material.side
277
+
278
+ // alpha test
279
+ || useAlphaTest && albedo.a < alphaTest
280
+
281
+ // opacity
282
+ || material.transparent && ! useAlphaTest && albedo.a < rand()
283
+ )
284
+ ) {
285
+
286
+ return true;
287
+
288
+ }
289
+
290
+ if ( side == 1.0 && isBelowSurface ) {
291
+
292
+ // only attenuate by surface color on the way in
293
+ color *= mix( vec3( 1.0 ), albedo.rgb, transmissionFactor );
294
+
295
+ } else if ( side == - 1.0 ) {
296
+
297
+ // attenuate by medium once we hit the opposite side of the model
298
+ color *= transmissionAttenuation( dist, material.attenuationColor, material.attenuationDistance );
299
+
300
+ }
301
+
302
+ } else {
303
+
304
+ return false;
305
+
306
+ }
307
+
308
+ }
309
+
310
+ return true;
311
+
312
+ }
313
+
314
+ // returns whether the ray hit anything before a certain distance, not just the first surface. Could be optimized to not check the full hierarchy.
315
+ bool anyCloserHit( BVH bvh, vec3 rayOrigin, vec3 rayDirection, float maxDist ) {
316
+
317
+ uvec4 faceIndices = uvec4( 0u );
318
+ vec3 faceNormal = vec3( 0.0, 0.0, 1.0 );
319
+ vec3 barycoord = vec3( 0.0 );
320
+ float side = 1.0;
321
+ float dist = 0.0;
322
+ bool hit = bvhIntersectFirstHit( bvh, rayOrigin, rayDirection, faceIndices, faceNormal, barycoord, side, dist );
323
+ return hit && dist < maxDist;
324
+
325
+ }
326
+
327
+ vec3 ndcToRayOrigin( vec2 coord ) {
328
+
329
+ vec4 rayOrigin4 = cameraWorldMatrix * invProjectionMatrix * vec4( coord, - 1.0, 1.0 );
330
+ return rayOrigin4.xyz / rayOrigin4.w;
331
+ }
332
+
333
+ void getCameraRay( out vec3 rayDirection, out vec3 rayOrigin ) {
334
+
335
+ vec2 ssd = vec2( 1.0 ) / resolution;
336
+
337
+ // Jitter the camera ray by finding a uv coordinate at a random sample
338
+ // around this pixel's UV coordinate for AA
339
+ vec2 ruv = sobol2( 0 );
340
+ vec2 jitteredUv = vUv + vec2( tentFilter( ruv.x ) * ssd.x, tentFilter( ruv.y ) * ssd.y );
341
+
342
+ #if CAMERA_TYPE == 2
343
+
344
+ // Equirectangular projection
345
+ vec4 rayDirection4 = vec4( equirectUvToDirection( jitteredUv ), 0.0 );
346
+ vec4 rayOrigin4 = vec4( 0.0, 0.0, 0.0, 1.0 );
347
+
348
+ rayDirection4 = cameraWorldMatrix * rayDirection4;
349
+ rayOrigin4 = cameraWorldMatrix * rayOrigin4;
350
+
351
+ rayDirection = normalize( rayDirection4.xyz );
352
+ rayOrigin = rayOrigin4.xyz / rayOrigin4.w;
353
+
354
+ #else
355
+
356
+ // get [- 1, 1] normalized device coordinates
357
+ vec2 ndc = 2.0 * jitteredUv - vec2( 1.0 );
358
+ rayOrigin = ndcToRayOrigin( ndc );
359
+
360
+ #if CAMERA_TYPE == 1
361
+
362
+ // Orthographic projection
363
+ rayDirection = ( cameraWorldMatrix * vec4( 0.0, 0.0, - 1.0, 0.0 ) ).xyz;
364
+ rayDirection = normalize( rayDirection );
365
+
366
+ #else
367
+
368
+ // Perspective projection
369
+ rayDirection = normalize( mat3(cameraWorldMatrix) * ( invProjectionMatrix * vec4( ndc, 0.0, 1.0 ) ).xyz );
370
+
371
+ #endif
372
+
373
+ #endif
374
+
375
+ #if FEATURE_DOF
376
+ {
377
+
378
+ // depth of field
379
+ vec3 focalPoint = rayOrigin + normalize( rayDirection ) * physicalCamera.focusDistance;
380
+
381
+ // get the aperture sample
382
+ // if blades === 0 then we assume a circle
383
+ vec3 shapeUVW= sobol3( 1 );
384
+ int blades = physicalCamera.apertureBlades;
385
+ float anamorphicRatio = physicalCamera.anamorphicRatio;
386
+ vec2 apertureSample = blades == 0 ? sampleCircle( shapeUVW.xy ) : sampleRegularNGon( blades, shapeUVW );
387
+ apertureSample *= physicalCamera.bokehSize * 0.5 * 1e-3;
388
+
389
+ // rotate the aperture shape
390
+ apertureSample =
391
+ rotateVector( apertureSample, physicalCamera.apertureRotation ) *
392
+ saturate( vec2( anamorphicRatio, 1.0 / anamorphicRatio ) );
393
+
394
+ // create the new ray
395
+ rayOrigin += ( cameraWorldMatrix * vec4( apertureSample, 0.0, 0.0 ) ).xyz;
396
+ rayDirection = focalPoint - rayOrigin;
397
+
398
+ }
399
+ #endif
400
+
401
+ rayDirection = normalize( rayDirection );
402
+
403
+ }
404
+
405
+ void main() {
406
+
407
+ rng_initialize( gl_FragCoord.xy, seed );
408
+ sobolPixelIndex = ( uint( gl_FragCoord.x ) << 16 ) | ( uint( gl_FragCoord.y ) );
409
+ sobolPathIndex = uint( seed );
410
+
411
+ vec3 rayDirection;
412
+ vec3 rayOrigin;
413
+
414
+ getCameraRay( rayDirection, rayOrigin );
415
+
416
+ // inverse environment rotation
417
+ mat3 envRotation3x3 = mat3( environmentRotation );
418
+ mat3 invEnvRotation3x3 = inverse( envRotation3x3 );
419
+
420
+ // final color
421
+ gl_FragColor = vec4( 0.0 );
422
+ gl_FragColor.a = 1.0;
423
+
424
+ // hit results
425
+ uvec4 faceIndices = uvec4( 0u );
426
+ vec3 faceNormal = vec3( 0.0, 0.0, 1.0 );
427
+ vec3 barycoord = vec3( 0.0 );
428
+ float side = 1.0;
429
+ float dist = 0.0;
430
+
431
+ // path tracing state
432
+ float accumulatedRoughness = 0.0;
433
+ float accumulatedClearcoatRoughness = 0.0;
434
+ bool transmissiveRay = true;
435
+ int transparentTraversals = TRANSPARENT_TRAVERSALS;
436
+ vec3 throughputColor = vec3( 1.0 );
437
+ SampleRec sampleRec;
438
+ int i;
439
+ bool isShadowRay = false;
440
+
441
+ for ( i = 0; i < bounces; i ++ ) {
442
+
443
+ sobolBounceIndex ++;
444
+
445
+ bool hit = bvhIntersectFirstHit( bvh, rayOrigin, rayDirection, faceIndices, faceNormal, barycoord, side, dist );
446
+
447
+ LightSampleRec lightHit = lightsClosestHit( lights.tex, lights.count, rayOrigin, rayDirection );
448
+
449
+ if ( lightHit.hit && ( lightHit.dist < dist || !hit ) ) {
450
+
451
+ if ( i == 0 || transmissiveRay ) {
452
+
453
+ gl_FragColor.rgb += lightHit.emission * throughputColor;
454
+
455
+ } else {
456
+
457
+ #if FEATURE_MIS
458
+
459
+ // NOTE: we skip MIS for punctual lights since they are not supported in forward PT case
460
+ if ( lightHit.type == SPOT_LIGHT_TYPE || lightHit.type == DIR_LIGHT_TYPE || lightHit.type == POINT_LIGHT_TYPE ) {
461
+
462
+ gl_FragColor.rgb += lightHit.emission * throughputColor;
463
+
464
+ } else {
465
+
466
+ // weight the contribution
467
+ float misWeight = misHeuristic( sampleRec.pdf, lightHit.pdf / float( lights.count + 1u ) );
468
+ gl_FragColor.rgb += lightHit.emission * throughputColor * misWeight;
469
+
470
+ }
471
+
472
+ #else
473
+
474
+ gl_FragColor.rgb += lightHit.emission * throughputColor;
475
+
476
+ #endif
477
+
478
+ }
479
+ break;
480
+
481
+ }
482
+
483
+ if ( ! hit ) {
484
+
485
+ if ( i == 0 || transmissiveRay ) {
486
+
487
+ gl_FragColor.rgb += sampleBackground( envRotation3x3 * rayDirection, sobol2( 2 ) ) * throughputColor;
488
+ gl_FragColor.a = backgroundAlpha;
489
+
490
+ } else {
491
+
492
+ #if FEATURE_MIS
493
+
494
+ // get the PDF of the hit envmap point
495
+ vec3 envColor;
496
+ float envPdf = sampleEnvMap( envMapInfo, envRotation3x3 * rayDirection, envColor );
497
+ envPdf /= float( lights.count + 1u );
498
+
499
+ // and weight the contribution
500
+ float misWeight = misHeuristic( sampleRec.pdf, envPdf );
501
+ gl_FragColor.rgb += environmentIntensity * envColor * throughputColor * misWeight;
502
+
503
+ #else
504
+
505
+ gl_FragColor.rgb +=
506
+ environmentIntensity *
507
+ sampleEquirectEnvMapColor( envRotation3x3 * rayDirection, envMapInfo.map ) *
508
+ throughputColor;
509
+
510
+ #endif
511
+
512
+ }
513
+ break;
514
+
515
+ }
516
+
517
+ uint materialIndex = uTexelFetch1D( materialIndexAttribute, faceIndices.x ).r;
518
+ Material material = readMaterialInfo( materials, materialIndex );
519
+
520
+ if ( material.matte && i == 0 ) {
521
+
522
+ gl_FragColor = vec4( 0.0 );
523
+ break;
524
+
525
+ }
526
+
527
+ // if we've determined that this is a shadow ray and we've hit an item with no shadow casting
528
+ // then skip it
529
+ if ( ! material.castShadow && isShadowRay ) {
530
+
531
+ vec3 point = rayOrigin + rayDirection * dist;
532
+ vec3 absPoint = abs( point );
533
+ float maxPoint = max( absPoint.x, max( absPoint.y, absPoint.z ) );
534
+ rayOrigin = point - ( maxPoint + 1.0 ) * faceNormal * RAY_OFFSET;
535
+
536
+ continue;
537
+
538
+ }
539
+
540
+ // uv coord for textures
541
+ vec2 uv = textureSampleBarycoord( attributesArray, ATTR_UV, barycoord, faceIndices.xyz ).xy;
542
+ vec4 vertexColor = textureSampleBarycoord( attributesArray, ATTR_COLOR, barycoord, faceIndices.xyz );
543
+
544
+ // albedo
545
+ vec4 albedo = vec4( material.color, material.opacity );
546
+ if ( material.map != - 1 ) {
547
+
548
+ vec3 uvPrime = material.mapTransform * vec3( uv, 1 );
549
+ albedo *= texture2D( textures, vec3( uvPrime.xy, material.map ) );
550
+ }
551
+
552
+ if ( material.vertexColors ) {
553
+
554
+ albedo *= vertexColor;
555
+
556
+ }
557
+
558
+ // alphaMap
559
+ if ( material.alphaMap != - 1 ) {
560
+
561
+ albedo.a *= texture2D( textures, vec3( uv, material.alphaMap ) ).x;
562
+
563
+ }
564
+
565
+ // possibly skip this sample if it's transparent, alpha test is enabled, or we hit the wrong material side
566
+ // and it's single sided.
567
+ // - alpha test is disabled when it === 0
568
+ // - the material sidedness test is complicated because we want light to pass through the back side but still
569
+ // be able to see the front side. This boolean checks if the side we hit is the front side on the first ray
570
+ // and we're rendering the other then we skip it. Do the opposite on subsequent bounces to get incoming light.
571
+ float alphaTest = material.alphaTest;
572
+ bool useAlphaTest = alphaTest != 0.0;
573
+ if (
574
+ // material sidedness
575
+ material.side != 0.0 && side != material.side
576
+
577
+ // alpha test
578
+ || useAlphaTest && albedo.a < alphaTest
579
+
580
+ // opacity
581
+ || material.transparent && ! useAlphaTest && albedo.a < sobol( 3 )
582
+ ) {
583
+
584
+ vec3 point = rayOrigin + rayDirection * dist;
585
+ vec3 absPoint = abs( point );
586
+ float maxPoint = max( absPoint.x, max( absPoint.y, absPoint.z ) );
587
+ rayOrigin = point - ( maxPoint + 1.0 ) * faceNormal * RAY_OFFSET;
588
+
589
+ // only allow a limited number of transparency discards otherwise we could
590
+ // crash the context with too long a loop.
591
+ i -= sign( transparentTraversals );
592
+ transparentTraversals -= sign( transparentTraversals );
593
+ continue;
594
+
595
+ }
596
+
597
+ // fetch the interpolated smooth normal
598
+ vec3 normal = normalize( textureSampleBarycoord(
599
+ attributesArray,
600
+ ATTR_NORMAL,
601
+ barycoord,
602
+ faceIndices.xyz
603
+ ).xyz );
604
+
605
+ // roughness
606
+ float roughness = material.roughness;
607
+ if ( material.roughnessMap != - 1 ) {
608
+
609
+ vec3 uvPrime = material.roughnessMapTransform * vec3( uv, 1 );
610
+ roughness *= texture2D( textures, vec3( uvPrime.xy, material.roughnessMap ) ).g;
611
+
612
+ }
613
+
614
+ // metalness
615
+ float metalness = material.metalness;
616
+ if ( material.metalnessMap != - 1 ) {
617
+
618
+ vec3 uvPrime = material.metalnessMapTransform * vec3( uv, 1 );
619
+ metalness *= texture2D( textures, vec3( uvPrime.xy, material.metalnessMap ) ).b;
620
+
621
+ }
622
+
623
+ // emission
624
+ vec3 emission = material.emissiveIntensity * material.emissive;
625
+ if ( material.emissiveMap != - 1 ) {
626
+
627
+ vec3 uvPrime = material.emissiveMapTransform * vec3( uv, 1 );
628
+ emission *= texture2D( textures, vec3( uvPrime.xy, material.emissiveMap ) ).xyz;
629
+
630
+ }
631
+
632
+ // transmission
633
+ float transmission = material.transmission;
634
+ if ( material.transmissionMap != - 1 ) {
635
+
636
+ vec3 uvPrime = material.transmissionMapTransform * vec3( uv, 1 );
637
+ transmission *= texture2D( textures, vec3( uvPrime.xy, material.transmissionMap ) ).r;
638
+
639
+ }
640
+
641
+ // normal
642
+ if ( material.flatShading ) {
643
+
644
+ // if we're rendering a flat shaded object then use the face normals - the face normal
645
+ // is provided based on the side the ray hits the mesh so flip it to align with the
646
+ // interpolated vertex normals.
647
+ normal = faceNormal * side;
648
+
649
+ }
650
+
651
+ vec3 baseNormal = normal;
652
+ if ( material.normalMap != - 1 ) {
653
+
654
+ vec4 tangentSample = textureSampleBarycoord(
655
+ attributesArray,
656
+ ATTR_TANGENT,
657
+ barycoord,
658
+ faceIndices.xyz
659
+ );
660
+
661
+ // some provided tangents can be malformed (0, 0, 0) causing the normal to be degenerate
662
+ // resulting in NaNs and slow path tracing.
663
+ if ( length( tangentSample.xyz ) > 0.0 ) {
664
+
665
+ vec3 tangent = normalize( tangentSample.xyz );
666
+ vec3 bitangent = normalize( cross( normal, tangent ) * tangentSample.w );
667
+ mat3 vTBN = mat3( tangent, bitangent, normal );
668
+
669
+ vec3 uvPrime = material.normalMapTransform * vec3( uv, 1 );
670
+ vec3 texNormal = texture2D( textures, vec3( uvPrime.xy, material.normalMap ) ).xyz * 2.0 - 1.0;
671
+ texNormal.xy *= material.normalScale;
672
+ normal = vTBN * texNormal;
673
+
674
+ }
675
+
676
+ }
677
+
678
+ normal *= side;
679
+
680
+ // clearcoat
681
+ float clearcoat = material.clearcoat;
682
+ if ( material.clearcoatMap != - 1 ) {
683
+
684
+ vec3 uvPrime = material.clearcoatMapTransform * vec3( uv, 1 );
685
+ clearcoat *= texture2D( textures, vec3( uvPrime.xy, material.clearcoatMap ) ).r;
686
+
687
+ }
688
+
689
+ // clearcoatRoughness
690
+ float clearcoatRoughness = material.clearcoatRoughness;
691
+ if ( material.clearcoatRoughnessMap != - 1 ) {
692
+
693
+ vec3 uvPrime = material.clearcoatRoughnessMapTransform * vec3( uv, 1 );
694
+ clearcoatRoughness *= texture2D( textures, vec3( uvPrime.xy, material.clearcoatRoughnessMap ) ).g;
695
+
696
+ }
697
+
698
+ // clearcoatNormal
699
+ vec3 clearcoatNormal = baseNormal;
700
+ if ( material.clearcoatNormalMap != - 1 ) {
701
+
702
+ vec4 tangentSample = textureSampleBarycoord(
703
+ attributesArray,
704
+ ATTR_TANGENT,
705
+ barycoord,
706
+ faceIndices.xyz
707
+ );
708
+
709
+ // some provided tangents can be malformed (0, 0, 0) causing the normal to be degenerate
710
+ // resulting in NaNs and slow path tracing.
711
+ if ( length( tangentSample.xyz ) > 0.0 ) {
712
+
713
+ vec3 tangent = normalize( tangentSample.xyz );
714
+ vec3 bitangent = normalize( cross( clearcoatNormal, tangent ) * tangentSample.w );
715
+ mat3 vTBN = mat3( tangent, bitangent, clearcoatNormal );
716
+
717
+ vec3 uvPrime = material.clearcoatNormalMapTransform * vec3( uv, 1 );
718
+ vec3 texNormal = texture2D( textures, vec3( uvPrime.xy, material.clearcoatNormalMap ) ).xyz * 2.0 - 1.0;
719
+ texNormal.xy *= material.clearcoatNormalScale;
720
+ clearcoatNormal = vTBN * texNormal;
721
+
722
+ }
723
+
724
+ }
725
+
726
+ clearcoatNormal *= side;
727
+
728
+ // sheenColor
729
+ vec3 sheenColor = material.sheenColor;
730
+ if ( material.sheenColorMap != - 1 ) {
731
+
732
+ vec3 uvPrime = material.sheenColorMapTransform * vec3( uv, 1 );
733
+ sheenColor *= texture2D( textures, vec3( uvPrime.xy, material.sheenColorMap ) ).rgb;
734
+
735
+ }
736
+
737
+ // sheenRoughness
738
+ float sheenRoughness = material.sheenRoughness;
739
+ if ( material.sheenRoughnessMap != - 1 ) {
740
+
741
+ vec3 uvPrime = material.sheenRoughnessMapTransform * vec3( uv, 1 );
742
+ sheenRoughness *= texture2D( textures, vec3( uvPrime.xy, material.sheenRoughnessMap ) ).a;
743
+
744
+ }
745
+
746
+ // iridescence
747
+ float iridescence = material.iridescence;
748
+ if ( material.iridescenceMap != - 1 ) {
749
+
750
+ vec3 uvPrime = material.iridescenceMapTransform * vec3( uv, 1 );
751
+ iridescence *= texture2D( textures, vec3( uvPrime.xy, material.iridescenceMap ) ).r;
752
+
753
+ }
754
+
755
+ // iridescence thickness
756
+ float iridescenceThickness = material.iridescenceThicknessMaximum;
757
+ if ( material.iridescenceThicknessMap != - 1 ) {
758
+
759
+ vec3 uvPrime = material.iridescenceThicknessMapTransform * vec3( uv, 1 );
760
+ float iridescenceThicknessSampled = texture2D( textures, vec3( uvPrime.xy, material.iridescenceThicknessMap ) ).g;
761
+ iridescenceThickness = mix( material.iridescenceThicknessMinimum, material.iridescenceThicknessMaximum, iridescenceThicknessSampled );
762
+
763
+ }
764
+
765
+ iridescence = iridescenceThickness == 0.0 ? 0.0 : iridescence;
766
+
767
+ // specular color
768
+ vec3 specularColor = material.specularColor;
769
+ if ( material.specularColorMap != - 1 ) {
770
+
771
+ vec3 uvPrime = material.specularColorMapTransform * vec3( uv, 1 );
772
+ specularColor *= texture2D( textures, vec3( uvPrime.xy, material.specularColorMap ) ).rgb;
773
+
774
+ }
775
+
776
+ // specular intensity
777
+ float specularIntensity = material.specularIntensity;
778
+ if ( material.specularIntensityMap != - 1 ) {
779
+
780
+ vec3 uvPrime = material.specularIntensityMapTransform * vec3( uv, 1 );
781
+ specularIntensity *= texture2D( textures, vec3( uvPrime.xy, material.specularIntensityMap ) ).a;
782
+
783
+ }
784
+
785
+ SurfaceRec surfaceRec;
786
+ surfaceRec.normal = normal;
787
+ surfaceRec.faceNormal = faceNormal;
788
+ surfaceRec.transmission = transmission;
789
+ surfaceRec.ior = material.ior;
790
+ surfaceRec.emission = emission;
791
+ surfaceRec.metalness = metalness;
792
+ surfaceRec.color = albedo.rgb;
793
+ surfaceRec.clearcoat = clearcoat;
794
+ surfaceRec.sheenColor = sheenColor;
795
+ surfaceRec.iridescence = iridescence;
796
+ surfaceRec.iridescenceIor = material.iridescenceIor;
797
+ surfaceRec.iridescenceThickness = iridescenceThickness;
798
+ surfaceRec.specularColor = specularColor;
799
+ surfaceRec.specularIntensity = specularIntensity;
800
+ surfaceRec.attenuationColor = material.attenuationColor;
801
+ surfaceRec.attenuationDistance = material.attenuationDistance;
802
+
803
+ // apply perceptual roughness factor from gltf
804
+ // https://registry.khronos.org/glTF/specs/2.0/glTF-2.0.html#microfacet-surfaces
805
+ surfaceRec.roughness = roughness * roughness;
806
+ surfaceRec.clearcoatRoughness = clearcoatRoughness * clearcoatRoughness;
807
+ surfaceRec.sheenRoughness = sheenRoughness * sheenRoughness;
808
+
809
+ // frontFace is used to determine transmissive properties and PDF. If no transmission is used
810
+ // then we can just always assume this is a front face.
811
+ surfaceRec.frontFace = side == 1.0 || transmission == 0.0;
812
+ surfaceRec.eta = material.thinFilm || surfaceRec.frontFace ? 1.0 / material.ior : material.ior;
813
+ surfaceRec.f0 = iorRatioToF0( surfaceRec.eta );
814
+ surfaceRec.thinFilm = material.thinFilm;
815
+
816
+ // Compute the filtered roughness value to use during specular reflection computations.
817
+ // The accumulated roughness value is scaled by a user setting and a "magic value" of 5.0.
818
+ // If we're exiting something transmissive then scale the factor down significantly so we can retain
819
+ // sharp internal reflections
820
+ surfaceRec.filteredRoughness = applyFilteredGlossy( surfaceRec.roughness, accumulatedRoughness );
821
+ surfaceRec.filteredClearcoatRoughness = applyFilteredGlossy( surfaceRec.clearcoatRoughness, accumulatedClearcoatRoughness );
822
+
823
+ mat3 normalBasis = getBasisFromNormal( surfaceRec.normal );
824
+ mat3 invBasis = inverse( normalBasis );
825
+
826
+ mat3 clearcoatNormalBasis = getBasisFromNormal( clearcoatNormal );
827
+ mat3 clearcoatInvBasis = inverse( clearcoatNormalBasis );
828
+
829
+ vec3 outgoing = - normalize( invBasis * rayDirection );
830
+ vec3 clearcoatOutgoing = - normalize( clearcoatInvBasis * rayDirection );
831
+ sampleRec = bsdfSample( outgoing, clearcoatOutgoing, normalBasis, invBasis, clearcoatNormalBasis, clearcoatInvBasis, surfaceRec );
832
+
833
+ isShadowRay = sampleRec.specularPdf < sobol( 4 );
834
+
835
+ // adjust the hit point by the surface normal by a factor of some offset and the
836
+ // maximum component-wise value of the current point to accommodate floating point
837
+ // error as values increase.
838
+ vec3 point = rayOrigin + rayDirection * dist;
839
+ vec3 absPoint = abs( point );
840
+ float maxPoint = max( absPoint.x, max( absPoint.y, absPoint.z ) );
841
+ rayDirection = normalize( normalBasis * sampleRec.direction );
842
+
843
+ bool isBelowSurface = dot( rayDirection, faceNormal ) < 0.0;
844
+ rayOrigin = point + faceNormal * ( maxPoint + 1.0 ) * ( isBelowSurface ? - RAY_OFFSET : RAY_OFFSET );
845
+
846
+ // direct env map sampling
847
+ #if FEATURE_MIS
848
+
849
+ // uniformly pick a light or environment map
850
+ if( sobol( 5 ) > 1.0 / float( lights.count + 1u ) ) {
851
+
852
+ // sample a light or environment
853
+ LightSampleRec lightSampleRec = randomLightSample( lights.tex, iesProfiles, lights.count, rayOrigin, sobol3( 6 ) );
854
+
855
+ bool isSampleBelowSurface = dot( faceNormal, lightSampleRec.direction ) < 0.0;
856
+ if ( isSampleBelowSurface ) {
857
+
858
+ lightSampleRec.pdf = 0.0;
859
+
860
+ }
861
+
862
+ // check if a ray could even reach the light area
863
+ if (
864
+ lightSampleRec.pdf > 0.0 &&
865
+ isDirectionValid( lightSampleRec.direction, normal, faceNormal ) &&
866
+ ! anyCloserHit( bvh, rayOrigin, lightSampleRec.direction, lightSampleRec.dist )
867
+ ) {
868
+
869
+ // get the material pdf
870
+ vec3 sampleColor;
871
+ float lightMaterialPdf = bsdfResult( outgoing, clearcoatOutgoing, normalize( invBasis * lightSampleRec.direction ), normalize( clearcoatInvBasis * lightSampleRec.direction ), surfaceRec, sampleColor );
872
+ bool isValidSampleColor = all( greaterThanEqual( sampleColor, vec3( 0.0 ) ) );
873
+ if ( lightMaterialPdf > 0.0 && isValidSampleColor ) {
874
+
875
+ // weight the direct light contribution
876
+ float lightPdf = lightSampleRec.pdf / float( lights.count + 1u );
877
+ float misWeight = lightSampleRec.type == SPOT_LIGHT_TYPE || lightSampleRec.type == DIR_LIGHT_TYPE || lightSampleRec.type == POINT_LIGHT_TYPE ? 1.0 : misHeuristic( lightPdf, lightMaterialPdf );
878
+ gl_FragColor.rgb += lightSampleRec.emission * throughputColor * sampleColor * misWeight / lightPdf;
879
+
880
+ }
881
+
882
+ }
883
+
884
+ } else {
885
+
886
+ // find a sample in the environment map to include in the contribution
887
+ vec3 envColor, envDirection;
888
+ float envPdf = sampleEnvMapProbability( envMapInfo, sobol2( 7 ), envColor, envDirection );
889
+ envDirection = invEnvRotation3x3 * envDirection;
890
+
891
+ // this env sampling is not set up for transmissive sampling and yields overly bright
892
+ // results so we ignore the sample in this case.
893
+ // TODO: this should be improved but how? The env samples could traverse a few layers?
894
+ bool isSampleBelowSurface = dot( faceNormal, envDirection ) < 0.0;
895
+ if ( isSampleBelowSurface ) {
896
+
897
+ envPdf = 0.0;
898
+
899
+ }
900
+
901
+ // check if a ray could even reach the surface
902
+ vec3 attenuatedColor;
903
+ if (
904
+ envPdf > 0.0 &&
905
+ isDirectionValid( envDirection, normal, faceNormal ) &&
906
+ ! attenuateHit( bvh, rayOrigin, envDirection, bounces - i, isShadowRay, attenuatedColor )
907
+ ) {
908
+
909
+ // get the material pdf
910
+ vec3 sampleColor;
911
+ float envMaterialPdf = bsdfResult( outgoing, clearcoatOutgoing, normalize( invBasis * envDirection ), normalize( clearcoatInvBasis * envDirection ), surfaceRec, sampleColor );
912
+ bool isValidSampleColor = all( greaterThanEqual( sampleColor, vec3( 0.0 ) ) );
913
+ if ( envMaterialPdf > 0.0 && isValidSampleColor ) {
914
+
915
+ // weight the direct light contribution
916
+ envPdf /= float( lights.count + 1u );
917
+ float misWeight = misHeuristic( envPdf, envMaterialPdf );
918
+ gl_FragColor.rgb += attenuatedColor * environmentIntensity * envColor * throughputColor * sampleColor * misWeight / envPdf;
919
+
920
+ }
921
+
922
+ }
923
+
924
+ }
925
+ #endif
926
+
927
+ // accumulate a roughness value to offset diffuse, specular, diffuse rays that have high contribution
928
+ // to a single pixel resulting in fireflies
929
+ if ( ! isBelowSurface ) {
930
+
931
+ // determine if this is a rough normal or not by checking how far off straight up it is
932
+ vec3 halfVector = normalize( outgoing + sampleRec.direction );
933
+ accumulatedRoughness += sin( acosApprox( halfVector.z ) );
934
+
935
+ vec3 clearcoatHalfVector = normalize( clearcoatOutgoing + sampleRec.clearcoatDirection );
936
+ accumulatedClearcoatRoughness += sin( acosApprox( clearcoatHalfVector.z ) );
937
+
938
+ transmissiveRay = false;
939
+
940
+ }
941
+
942
+ // accumulate color
943
+ gl_FragColor.rgb += ( emission * throughputColor );
944
+
945
+ // skip the sample if our PDF or ray is impossible
946
+ if ( sampleRec.pdf <= 0.0 || ! isDirectionValid( rayDirection, normal, faceNormal) ) {
947
+
948
+ break;
949
+
950
+ }
951
+
952
+ throughputColor *= sampleRec.color / sampleRec.pdf;
953
+
954
+ // attenuate the throughput color by the medium color
955
+ if ( side == - 1.0 ) {
956
+
957
+ throughputColor *= transmissionAttenuation( dist, surfaceRec.attenuationColor, surfaceRec.attenuationDistance );
958
+
959
+ }
960
+
961
+ // discard the sample if there are any NaNs
962
+ if ( any( isnan( throughputColor ) ) || any( isinf( throughputColor ) ) ) {
963
+
964
+ break;
965
+
966
+ }
967
+
968
+ }
969
+
970
+ gl_FragColor.a *= opacity;
971
+
972
+ }
973
+
974
+ `
975
+
976
+ } );
977
+
978
+ this.setValues( parameters );
979
+
980
+ }
981
+
982
+ }