@takram/three-geospatial-effects 0.0.1-alpha.6 → 0.0.1-alpha.8
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/build/r3f.cjs.map +1 -1
- package/build/r3f.js +1 -1
- package/build/r3f.js.map +1 -1
- package/build/shared.cjs +16 -16
- package/build/shared.cjs.map +1 -1
- package/build/shared.js +169 -185
- package/build/shared.js.map +1 -1
- package/package.json +18 -2
- package/src/DepthEffect.ts +7 -15
- package/src/LensFlareEffect.ts +1 -5
- package/src/NormalEffect.ts +9 -29
- package/src/r3f/Depth.tsx +1 -1
- package/types/DepthEffect.d.ts +1 -2
- package/types/NormalEffect.d.ts +2 -4
- package/types/r3f/Depth.d.ts +2 -1
package/build/shared.js
CHANGED
@@ -1,11 +1,8 @@
|
|
1
|
-
|
2
|
-
|
3
|
-
|
4
|
-
import {
|
5
|
-
|
6
|
-
import { resolveIncludes as b } from "@takram/three-geospatial";
|
7
|
-
import { depth as B, turbo as F, packing as C, transform as O } from "@takram/three-geospatial/shaders";
|
8
|
-
const N = `#include "core/depth"
|
1
|
+
import { BlendFunction as v, Effect as f, EffectAttribute as d, RenderPass as A, Resolution as h, ShaderPass as g, MipmapBlurPass as P, KawaseBlurPass as z, KernelSize as D } from "postprocessing";
|
2
|
+
import { Uniform as s, ShaderLib as l, HalfFloatType as m, ShaderMaterial as U, NoBlending as S, Vector2 as T, WebGLRenderTarget as w, Matrix4 as x } from "three";
|
3
|
+
import { define as p, resolveIncludes as b } from "@takram/three-geospatial";
|
4
|
+
import { turbo as M, depth as B, packing as C, transform as F } from "@takram/three-geospatial/shaders";
|
5
|
+
const O = `#include "core/depth"
|
9
6
|
#include "core/turbo"
|
10
7
|
|
11
8
|
uniform float near;
|
@@ -24,40 +21,40 @@ void mainImage(const vec4 inputColor, const vec2 uv, out vec4 outputColor) {
|
|
24
21
|
|
25
22
|
outputColor = vec4(color, inputColor.a);
|
26
23
|
}
|
27
|
-
|
28
|
-
|
24
|
+
`;
|
25
|
+
var N = Object.defineProperty, L = (o, e, t, i) => {
|
26
|
+
for (var n = void 0, r = o.length - 1, a; r >= 0; r--)
|
27
|
+
(a = o[r]) && (n = a(e, t, n) || n);
|
28
|
+
return n && N(e, t, n), n;
|
29
|
+
};
|
30
|
+
const j = {
|
31
|
+
blendFunction: v.SRC,
|
29
32
|
useTurbo: !1,
|
30
33
|
near: 1,
|
31
34
|
far: 1e3
|
32
35
|
};
|
33
|
-
class
|
36
|
+
class G extends f {
|
34
37
|
constructor(e) {
|
35
|
-
const { blendFunction: t, useTurbo:
|
36
|
-
...
|
38
|
+
const { blendFunction: t, useTurbo: i, near: n, far: r } = {
|
39
|
+
...j,
|
37
40
|
...e
|
38
41
|
};
|
39
42
|
super(
|
40
43
|
"DepthEffect",
|
41
|
-
b(
|
42
|
-
core: { depth: B, turbo:
|
44
|
+
b(O, {
|
45
|
+
core: { depth: B, turbo: M }
|
43
46
|
}),
|
44
47
|
{
|
45
48
|
blendFunction: t,
|
46
|
-
attributes:
|
49
|
+
attributes: d.DEPTH,
|
47
50
|
uniforms: new Map(
|
48
51
|
Object.entries({
|
49
|
-
near: new s(
|
50
|
-
far: new s(
|
52
|
+
near: new s(n),
|
53
|
+
far: new s(r)
|
51
54
|
})
|
52
55
|
)
|
53
56
|
}
|
54
|
-
), this.useTurbo =
|
55
|
-
}
|
56
|
-
get useTurbo() {
|
57
|
-
return this.defines.has("USE_TURBO");
|
58
|
-
}
|
59
|
-
set useTurbo(e) {
|
60
|
-
this.useTurbo !== e && (e ? this.defines.set("USE_TURBO", "1") : this.defines.delete("USE_TURBO"), this.setChanged());
|
57
|
+
), this.useTurbo = i;
|
61
58
|
}
|
62
59
|
get near() {
|
63
60
|
return this.uniforms.get("near").value;
|
@@ -72,30 +69,33 @@ class se extends h {
|
|
72
69
|
this.uniforms.get("far").value = e;
|
73
70
|
}
|
74
71
|
}
|
75
|
-
|
72
|
+
L([
|
73
|
+
p("USE_TURBO")
|
74
|
+
], G.prototype, "useTurbo");
|
75
|
+
const H = `#define DITHERING
|
76
76
|
|
77
77
|
#include <dithering_pars_fragment>
|
78
78
|
|
79
79
|
void mainImage(const vec4 inputColor, const vec2 uv, out vec4 outputColor) {
|
80
80
|
outputColor = vec4(saturate(dithering(inputColor.rgb)), inputColor.a);
|
81
81
|
}
|
82
|
-
`,
|
83
|
-
blendFunction:
|
82
|
+
`, $ = {
|
83
|
+
blendFunction: v.NORMAL
|
84
84
|
};
|
85
|
-
class
|
85
|
+
class ue extends f {
|
86
86
|
constructor(e) {
|
87
87
|
const { blendFunction: t } = {
|
88
|
-
|
88
|
+
...$,
|
89
89
|
...e
|
90
90
|
};
|
91
|
-
super("DitheringEffect",
|
91
|
+
super("DitheringEffect", H, {
|
92
92
|
blendFunction: t
|
93
93
|
});
|
94
94
|
}
|
95
95
|
}
|
96
96
|
const R = Symbol("SETUP");
|
97
|
-
function
|
98
|
-
const e =
|
97
|
+
function I(o) {
|
98
|
+
const e = o.vertexShader.replace(
|
99
99
|
/* glsl */
|
100
100
|
"#include <fog_pars_vertex>",
|
101
101
|
/* glsl */
|
@@ -125,13 +125,13 @@ function H(r) {
|
|
125
125
|
vViewPosition = - mvPosition.xyz;
|
126
126
|
`
|
127
127
|
);
|
128
|
-
|
128
|
+
o.vertexShader = /* glsl */
|
129
129
|
`
|
130
130
|
#undef FLAT_SHADED
|
131
131
|
varying vec3 vViewPosition;
|
132
132
|
${e}
|
133
133
|
`;
|
134
|
-
const t =
|
134
|
+
const t = o.fragmentShader.replace(
|
135
135
|
/#ifndef FLAT_SHADED\s+varying vec3 vNormal;\s+#endif/m,
|
136
136
|
/* glsl */
|
137
137
|
"#include <normal_pars_fragment>"
|
@@ -153,17 +153,17 @@ function H(r) {
|
|
153
153
|
#include <normal_fragment_maps>
|
154
154
|
`
|
155
155
|
);
|
156
|
-
return
|
156
|
+
return o.fragmentShader = /* glsl */
|
157
157
|
`
|
158
158
|
#undef FLAT_SHADED
|
159
159
|
varying vec3 vViewPosition;
|
160
160
|
${t}
|
161
|
-
`,
|
161
|
+
`, o;
|
162
162
|
}
|
163
|
-
function
|
164
|
-
if (
|
165
|
-
return
|
166
|
-
e === "basic" &&
|
163
|
+
function c(o, { type: e } = {}) {
|
164
|
+
if (o[R] === !0)
|
165
|
+
return o;
|
166
|
+
e === "basic" && I(o);
|
167
167
|
const t = e === "physical" ? (
|
168
168
|
/* glsl */
|
169
169
|
`
|
@@ -183,7 +183,7 @@ function v(r, { type: e } = {}) {
|
|
183
183
|
);
|
184
184
|
`
|
185
185
|
);
|
186
|
-
return
|
186
|
+
return o.fragmentShader = /* glsl */
|
187
187
|
`
|
188
188
|
layout(location = 1) out vec4 outputBuffer1;
|
189
189
|
|
@@ -192,7 +192,7 @@ function v(r, { type: e } = {}) {
|
|
192
192
|
#endif // !defined(USE_ENVMAP)
|
193
193
|
|
194
194
|
${C}
|
195
|
-
${
|
195
|
+
${o.fragmentShader.replace(
|
196
196
|
/}\s*$/m,
|
197
197
|
// Assume the last curly brace is of main()
|
198
198
|
/* glsl */
|
@@ -201,25 +201,23 @@ function v(r, { type: e } = {}) {
|
|
201
201
|
}
|
202
202
|
`
|
203
203
|
)}
|
204
|
-
`,
|
204
|
+
`, o[R] = !0, o;
|
205
205
|
}
|
206
|
-
function
|
207
|
-
|
206
|
+
function V() {
|
207
|
+
c(l.lambert), c(l.phong), c(l.basic, { type: "basic" }), c(l.standard, { type: "physical" }), c(l.physical, { type: "physical" });
|
208
208
|
}
|
209
|
-
class
|
210
|
-
constructor(
|
211
|
-
super(n,
|
212
|
-
u(this, "geometryTexture");
|
213
|
-
this.geometryTexture = t.texture.clone(), this.geometryTexture.isRenderTargetTexture = !0, this.geometryTexture.type = m, I();
|
209
|
+
class le extends A {
|
210
|
+
constructor(e, t, i, n) {
|
211
|
+
super(t, i, n), this.geometryTexture = e.texture.clone(), this.geometryTexture.isRenderTargetTexture = !0, this.geometryTexture.type = m, V();
|
214
212
|
}
|
215
|
-
render(
|
216
|
-
|
213
|
+
render(e, t, i, n, r) {
|
214
|
+
t != null && (t.textures[1] = this.geometryTexture), super.render(e, t, null), t != null && (t.textures.length = 1);
|
217
215
|
}
|
218
|
-
setSize(
|
219
|
-
this.geometryTexture.image.width =
|
216
|
+
setSize(e, t) {
|
217
|
+
this.geometryTexture.image.width = e, this.geometryTexture.image.height = t;
|
220
218
|
}
|
221
219
|
}
|
222
|
-
const
|
220
|
+
const k = `#include <common>
|
223
221
|
|
224
222
|
uniform sampler2D inputBuffer;
|
225
223
|
|
@@ -297,7 +295,7 @@ void main() {
|
|
297
295
|
float scale = saturate(smoothstep(thresholdLevel, thresholdLevel + thresholdRange, l));
|
298
296
|
gl_FragColor = vec4(color * scale, 1.0);
|
299
297
|
}
|
300
|
-
`,
|
298
|
+
`, W = `uniform vec2 texelSize;
|
301
299
|
|
302
300
|
out vec2 vCenterUv1;
|
303
301
|
out vec2 vCenterUv2;
|
@@ -331,36 +329,36 @@ void main() {
|
|
331
329
|
|
332
330
|
gl_Position = vec4(position.xy, 1.0, 1.0);
|
333
331
|
}
|
334
|
-
`,
|
332
|
+
`, K = {
|
335
333
|
thresholdLevel: 10,
|
336
334
|
thresholdRange: 1
|
337
335
|
};
|
338
|
-
class
|
336
|
+
class Z extends U {
|
339
337
|
constructor(e) {
|
340
338
|
const {
|
341
339
|
inputBuffer: t = null,
|
342
|
-
thresholdLevel:
|
343
|
-
thresholdRange:
|
344
|
-
...
|
340
|
+
thresholdLevel: i,
|
341
|
+
thresholdRange: n,
|
342
|
+
...r
|
345
343
|
} = {
|
346
|
-
|
344
|
+
...K,
|
347
345
|
...e
|
348
346
|
};
|
349
347
|
super({
|
350
348
|
name: "DownsampleThresholdMaterial",
|
351
|
-
fragmentShader:
|
352
|
-
vertexShader:
|
353
|
-
blending:
|
349
|
+
fragmentShader: k,
|
350
|
+
vertexShader: W,
|
351
|
+
blending: S,
|
354
352
|
toneMapped: !1,
|
355
353
|
depthWrite: !1,
|
356
354
|
depthTest: !1,
|
357
|
-
...
|
355
|
+
...r,
|
358
356
|
uniforms: {
|
359
357
|
inputBuffer: new s(t),
|
360
|
-
texelSize: new s(new
|
361
|
-
thresholdLevel: new s(
|
362
|
-
thresholdRange: new s(
|
363
|
-
...
|
358
|
+
texelSize: new s(new T()),
|
359
|
+
thresholdLevel: new s(i),
|
360
|
+
thresholdRange: new s(n),
|
361
|
+
...r.uniforms
|
364
362
|
}
|
365
363
|
});
|
366
364
|
}
|
@@ -386,7 +384,7 @@ class W extends U {
|
|
386
384
|
this.uniforms.thresholdRange.value = e;
|
387
385
|
}
|
388
386
|
}
|
389
|
-
const
|
387
|
+
const Q = `#include <common>
|
390
388
|
|
391
389
|
#define SQRT_2 (0.7071067811865476)
|
392
390
|
|
@@ -459,7 +457,7 @@ void main() {
|
|
459
457
|
gl_FragColor += sampleHalos(haloAmount);
|
460
458
|
}
|
461
459
|
|
462
|
-
`,
|
460
|
+
`, X = `uniform vec2 texelSize;
|
463
461
|
|
464
462
|
out vec2 vUv;
|
465
463
|
out vec2 vAspectRatio;
|
@@ -469,37 +467,37 @@ void main() {
|
|
469
467
|
vAspectRatio = vec2(texelSize.x / texelSize.y, 1.0);
|
470
468
|
gl_Position = vec4(position.xy, 1.0, 1.0);
|
471
469
|
}
|
472
|
-
`,
|
470
|
+
`, Y = {
|
473
471
|
ghostAmount: 1e-3,
|
474
472
|
haloAmount: 1e-3,
|
475
473
|
chromaticAberration: 10
|
476
474
|
};
|
477
|
-
class
|
475
|
+
class q extends U {
|
478
476
|
constructor(e) {
|
479
477
|
const {
|
480
478
|
inputBuffer: t = null,
|
481
|
-
ghostAmount:
|
482
|
-
haloAmount:
|
483
|
-
chromaticAberration:
|
479
|
+
ghostAmount: i,
|
480
|
+
haloAmount: n,
|
481
|
+
chromaticAberration: r,
|
484
482
|
...a
|
485
483
|
} = {
|
486
|
-
...
|
484
|
+
...Y,
|
487
485
|
...e
|
488
486
|
};
|
489
487
|
super({
|
490
488
|
name: "LensFlareFeaturesMaterial",
|
491
|
-
fragmentShader:
|
492
|
-
vertexShader:
|
493
|
-
blending:
|
489
|
+
fragmentShader: Q,
|
490
|
+
vertexShader: X,
|
491
|
+
blending: S,
|
494
492
|
toneMapped: !1,
|
495
493
|
depthWrite: !1,
|
496
494
|
depthTest: !1,
|
497
495
|
uniforms: {
|
498
496
|
inputBuffer: new s(t),
|
499
|
-
texelSize: new s(new
|
500
|
-
ghostAmount: new s(
|
501
|
-
haloAmount: new s(
|
502
|
-
chromaticAberration: new s(
|
497
|
+
texelSize: new s(new T()),
|
498
|
+
ghostAmount: new s(i),
|
499
|
+
haloAmount: new s(n),
|
500
|
+
chromaticAberration: new s(r),
|
503
501
|
...a.uniforms
|
504
502
|
}
|
505
503
|
});
|
@@ -532,7 +530,7 @@ class X extends U {
|
|
532
530
|
this.uniforms.chromaticAberration.value = e;
|
533
531
|
}
|
534
532
|
}
|
535
|
-
const
|
533
|
+
const J = `uniform sampler2D bloomBuffer;
|
536
534
|
uniform sampler2D featuresBuffer;
|
537
535
|
uniform float intensity;
|
538
536
|
|
@@ -541,30 +539,30 @@ void mainImage(const vec4 inputColor, const vec2 uv, out vec4 outputColor) {
|
|
541
539
|
vec3 features = texture(featuresBuffer, uv).rgb;
|
542
540
|
outputColor = vec4(inputColor.rgb + (bloom + features) * intensity, inputColor.a);
|
543
541
|
}
|
544
|
-
`,
|
545
|
-
blendFunction:
|
542
|
+
`, ee = {
|
543
|
+
blendFunction: v.NORMAL,
|
546
544
|
resolutionScale: 0.5,
|
547
|
-
width:
|
548
|
-
height:
|
545
|
+
width: h.AUTO_SIZE,
|
546
|
+
height: h.AUTO_SIZE,
|
549
547
|
intensity: 5e-3
|
550
548
|
};
|
551
|
-
class
|
552
|
-
constructor(
|
549
|
+
class ce extends f {
|
550
|
+
constructor(e) {
|
553
551
|
const {
|
554
|
-
blendFunction:
|
555
|
-
resolutionScale:
|
556
|
-
width:
|
557
|
-
height:
|
558
|
-
resolutionX:
|
559
|
-
resolutionY:
|
560
|
-
intensity:
|
552
|
+
blendFunction: t,
|
553
|
+
resolutionScale: i,
|
554
|
+
width: n,
|
555
|
+
height: r,
|
556
|
+
resolutionX: a = n,
|
557
|
+
resolutionY: u = r,
|
558
|
+
intensity: E
|
561
559
|
} = {
|
562
|
-
...
|
563
|
-
...
|
560
|
+
...ee,
|
561
|
+
...e
|
564
562
|
};
|
565
|
-
super("LensFlareEffect",
|
566
|
-
blendFunction:
|
567
|
-
attributes:
|
563
|
+
super("LensFlareEffect", J, {
|
564
|
+
blendFunction: t,
|
565
|
+
attributes: d.CONVOLUTION,
|
568
566
|
uniforms: new Map(
|
569
567
|
Object.entries({
|
570
568
|
bloomBuffer: new s(null),
|
@@ -572,20 +570,9 @@ class le extends h {
|
|
572
570
|
intensity: new s(1)
|
573
571
|
})
|
574
572
|
)
|
575
|
-
})
|
576
|
-
u(this, "resolution");
|
577
|
-
u(this, "renderTarget1");
|
578
|
-
u(this, "renderTarget2");
|
579
|
-
u(this, "thresholdMaterial");
|
580
|
-
u(this, "thresholdPass");
|
581
|
-
u(this, "blurPass");
|
582
|
-
u(this, "preBlurPass");
|
583
|
-
u(this, "featuresMaterial");
|
584
|
-
u(this, "featuresPass");
|
585
|
-
u(this, "onResolutionChange", () => {
|
573
|
+
}), this.onResolutionChange = () => {
|
586
574
|
this.setSize(this.resolution.baseWidth, this.resolution.baseHeight);
|
587
|
-
}
|
588
|
-
this.renderTarget1 = new w(1, 1, {
|
575
|
+
}, this.renderTarget1 = new w(1, 1, {
|
589
576
|
depthBuffer: !1,
|
590
577
|
stencilBuffer: !1,
|
591
578
|
type: m
|
@@ -593,50 +580,47 @@ class le extends h {
|
|
593
580
|
depthBuffer: !1,
|
594
581
|
stencilBuffer: !1,
|
595
582
|
type: m
|
596
|
-
}), this.renderTarget2.texture.name = "LensFlare.Target2", this.thresholdMaterial = new
|
597
|
-
kernelSize:
|
598
|
-
}), this.featuresMaterial = new
|
583
|
+
}), this.renderTarget2.texture.name = "LensFlare.Target2", this.thresholdMaterial = new Z(), this.thresholdPass = new g(this.thresholdMaterial), this.blurPass = new P(), this.blurPass.levels = 8, this.preBlurPass = new z({
|
584
|
+
kernelSize: D.SMALL
|
585
|
+
}), this.featuresMaterial = new q(), this.featuresPass = new g(this.featuresMaterial), this.uniforms.get("bloomBuffer").value = this.blurPass.texture, this.uniforms.get("featuresBuffer").value = this.renderTarget1.texture, this.resolution = new h(
|
599
586
|
this,
|
600
|
-
|
601
|
-
|
602
|
-
|
603
|
-
), this.resolution.addEventListener(
|
604
|
-
|
605
|
-
|
606
|
-
), this.
|
607
|
-
}
|
608
|
-
|
609
|
-
this.thresholdPass.
|
610
|
-
}
|
611
|
-
|
612
|
-
|
613
|
-
|
614
|
-
|
615
|
-
|
616
|
-
o.setBaseSize(t, n);
|
617
|
-
const { width: i, height: a } = o;
|
618
|
-
this.renderTarget1.setSize(i, a), this.renderTarget2.setSize(i, a), this.thresholdMaterial.setSize(i, a), this.blurPass.setSize(i, a), this.preBlurPass.setSize(i, a), this.featuresMaterial.setSize(i, a);
|
587
|
+
a,
|
588
|
+
u,
|
589
|
+
i
|
590
|
+
), this.resolution.addEventListener("change", this.onResolutionChange), this.intensity = E;
|
591
|
+
}
|
592
|
+
initialize(e, t, i) {
|
593
|
+
this.thresholdPass.initialize(e, t, i), this.blurPass.initialize(e, t, i), this.preBlurPass.initialize(e, t, i), this.featuresPass.initialize(e, t, i);
|
594
|
+
}
|
595
|
+
update(e, t, i) {
|
596
|
+
this.thresholdPass.render(e, t, this.renderTarget1), this.blurPass.render(e, this.renderTarget1, null), this.preBlurPass.render(e, this.renderTarget1, this.renderTarget2), this.featuresPass.render(e, this.renderTarget2, this.renderTarget1);
|
597
|
+
}
|
598
|
+
setSize(e, t) {
|
599
|
+
const i = this.resolution;
|
600
|
+
i.setBaseSize(e, t);
|
601
|
+
const { width: n, height: r } = i;
|
602
|
+
this.renderTarget1.setSize(n, r), this.renderTarget2.setSize(n, r), this.thresholdMaterial.setSize(n, r), this.blurPass.setSize(n, r), this.preBlurPass.setSize(n, r), this.featuresMaterial.setSize(n, r);
|
619
603
|
}
|
620
604
|
get intensity() {
|
621
605
|
return this.uniforms.get("intensity").value;
|
622
606
|
}
|
623
|
-
set intensity(
|
624
|
-
this.uniforms.get("intensity").value =
|
607
|
+
set intensity(e) {
|
608
|
+
this.uniforms.get("intensity").value = e;
|
625
609
|
}
|
626
610
|
get thresholdLevel() {
|
627
611
|
return this.thresholdMaterial.thresholdLevel;
|
628
612
|
}
|
629
|
-
set thresholdLevel(
|
630
|
-
this.thresholdMaterial.thresholdLevel =
|
613
|
+
set thresholdLevel(e) {
|
614
|
+
this.thresholdMaterial.thresholdLevel = e;
|
631
615
|
}
|
632
616
|
get thresholdRange() {
|
633
617
|
return this.thresholdMaterial.thresholdRange;
|
634
618
|
}
|
635
|
-
set thresholdRange(
|
636
|
-
this.thresholdMaterial.thresholdRange =
|
619
|
+
set thresholdRange(e) {
|
620
|
+
this.thresholdMaterial.thresholdRange = e;
|
637
621
|
}
|
638
622
|
}
|
639
|
-
const
|
623
|
+
const te = `#include "core/depth"
|
640
624
|
#include "core/packing"
|
641
625
|
#include "core/transform"
|
642
626
|
|
@@ -677,43 +661,49 @@ void mainImage(const vec4 inputColor, const vec2 uv, out vec4 outputColor) {
|
|
677
661
|
|
678
662
|
outputColor = vec4(normal * 0.5 + 0.5, inputColor.a);
|
679
663
|
}
|
680
|
-
|
681
|
-
|
664
|
+
`;
|
665
|
+
var ne = Object.defineProperty, _ = (o, e, t, i) => {
|
666
|
+
for (var n = void 0, r = o.length - 1, a; r >= 0; r--)
|
667
|
+
(a = o[r]) && (n = a(e, t, n) || n);
|
668
|
+
return n && ne(e, t, n), n;
|
669
|
+
};
|
670
|
+
const re = {
|
671
|
+
blendFunction: v.SRC,
|
682
672
|
octEncoded: !1,
|
683
673
|
reconstructFromDepth: !1
|
684
674
|
};
|
685
|
-
class
|
675
|
+
class y extends f {
|
686
676
|
constructor(e, t) {
|
687
677
|
const {
|
688
|
-
blendFunction:
|
689
|
-
normalBuffer:
|
690
|
-
octEncoded:
|
678
|
+
blendFunction: i,
|
679
|
+
normalBuffer: n = null,
|
680
|
+
octEncoded: r,
|
691
681
|
reconstructFromDepth: a
|
692
682
|
} = {
|
693
|
-
...
|
683
|
+
...re,
|
694
684
|
...t
|
695
685
|
};
|
696
686
|
super(
|
697
687
|
"NormalEffect",
|
698
|
-
b(
|
688
|
+
b(te, {
|
699
689
|
core: {
|
700
690
|
depth: B,
|
701
691
|
packing: C,
|
702
|
-
transform:
|
692
|
+
transform: F
|
703
693
|
}
|
704
694
|
}),
|
705
695
|
{
|
706
|
-
blendFunction:
|
707
|
-
attributes:
|
696
|
+
blendFunction: i,
|
697
|
+
attributes: d.DEPTH,
|
708
698
|
uniforms: new Map(
|
709
699
|
Object.entries({
|
710
|
-
normalBuffer: new s(
|
700
|
+
normalBuffer: new s(n),
|
711
701
|
projectionMatrix: new s(new x()),
|
712
702
|
inverseProjectionMatrix: new s(new x())
|
713
703
|
})
|
714
704
|
)
|
715
705
|
}
|
716
|
-
), this.camera = e, e != null && (this.mainCamera = e), this.octEncoded =
|
706
|
+
), this.camera = e, e != null && (this.mainCamera = e), this.octEncoded = r, this.reconstructFromDepth = a;
|
717
707
|
}
|
718
708
|
get mainCamera() {
|
719
709
|
return this.camera;
|
@@ -721,9 +711,9 @@ class ce extends h {
|
|
721
711
|
set mainCamera(e) {
|
722
712
|
this.camera = e;
|
723
713
|
}
|
724
|
-
update(e, t,
|
725
|
-
const
|
726
|
-
|
714
|
+
update(e, t, i) {
|
715
|
+
const n = this.uniforms, r = n.get("projectionMatrix"), a = n.get("inverseProjectionMatrix"), u = this.camera;
|
716
|
+
u != null && (r.value.copy(u.projectionMatrix), a.value.copy(u.projectionMatrixInverse));
|
727
717
|
}
|
728
718
|
get normalBuffer() {
|
729
719
|
return this.uniforms.get("normalBuffer").value;
|
@@ -731,29 +721,23 @@ class ce extends h {
|
|
731
721
|
set normalBuffer(e) {
|
732
722
|
this.uniforms.get("normalBuffer").value = e;
|
733
723
|
}
|
734
|
-
get octEncoded() {
|
735
|
-
return this.defines.has("OCT_ENCODED");
|
736
|
-
}
|
737
|
-
set octEncoded(e) {
|
738
|
-
e !== this.octEncoded && (e ? this.defines.set("OCT_ENCODED", "1") : this.defines.delete("OCT_ENCODED"), this.setChanged());
|
739
|
-
}
|
740
|
-
get reconstructFromDepth() {
|
741
|
-
return this.defines.has("RECONSTRUCT_FROM_DEPTH");
|
742
|
-
}
|
743
|
-
set reconstructFromDepth(e) {
|
744
|
-
e !== this.reconstructFromDepth && (e ? this.defines.set("RECONSTRUCT_FROM_DEPTH", "1") : this.defines.delete("RECONSTRUCT_FROM_DEPTH"), this.setChanged());
|
745
|
-
}
|
746
724
|
}
|
725
|
+
_([
|
726
|
+
p("OCT_ENCODED")
|
727
|
+
], y.prototype, "octEncoded");
|
728
|
+
_([
|
729
|
+
p("RECONSTRUCT_FROM_DEPTH")
|
730
|
+
], y.prototype, "reconstructFromDepth");
|
747
731
|
export {
|
748
|
-
|
749
|
-
|
750
|
-
|
751
|
-
|
752
|
-
|
753
|
-
|
754
|
-
|
755
|
-
|
756
|
-
|
757
|
-
|
732
|
+
G as D,
|
733
|
+
le as G,
|
734
|
+
ce as L,
|
735
|
+
y as N,
|
736
|
+
$ as a,
|
737
|
+
ue as b,
|
738
|
+
j as d,
|
739
|
+
ee as l,
|
740
|
+
re as n,
|
741
|
+
V as s
|
758
742
|
};
|
759
743
|
//# sourceMappingURL=shared.js.map
|