@plasius/gpu-particles 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +46 -0
- package/LICENSE +203 -0
- package/README.md +82 -0
- package/dist/effects/fire/physics.job.wgsl +84 -0
- package/dist/effects/fire/prelude.wgsl +41 -0
- package/dist/effects/fire/render.job.wgsl +8 -0
- package/dist/effects/firework/prelude.wgsl +41 -0
- package/dist/effects/firework/render.job.wgsl +8 -0
- package/dist/effects/firework/update.job.wgsl +154 -0
- package/dist/effects/rain/prelude.wgsl +35 -0
- package/dist/effects/rain/render.job.wgsl +8 -0
- package/dist/effects/rain/update.job.wgsl +51 -0
- package/dist/effects/snow/prelude.wgsl +35 -0
- package/dist/effects/snow/render.job.wgsl +8 -0
- package/dist/effects/snow/update.job.wgsl +53 -0
- package/dist/effects/sparks/prelude.wgsl +41 -0
- package/dist/effects/sparks/render.job.wgsl +8 -0
- package/dist/effects/sparks/update.job.wgsl +53 -0
- package/dist/effects/text/layout.job.wgsl +57 -0
- package/dist/effects/text/prelude.wgsl +39 -0
- package/dist/effects/text/render.job.wgsl +8 -0
- package/dist/index.cjs +286 -0
- package/dist/index.cjs.map +1 -0
- package/dist/index.js +244 -0
- package/dist/index.js.map +1 -0
- package/legal/CLA-REGISTRY.csv +2 -0
- package/legal/CLA.md +22 -0
- package/legal/CORPORATE_CLA.md +57 -0
- package/legal/INDIVIDUAL_CLA.md +91 -0
- package/package.json +74 -0
- package/src/effects/fire/physics.job.wgsl +84 -0
- package/src/effects/fire/prelude.wgsl +41 -0
- package/src/effects/fire/render.job.wgsl +8 -0
- package/src/effects/firework/prelude.wgsl +41 -0
- package/src/effects/firework/render.job.wgsl +8 -0
- package/src/effects/firework/update.job.wgsl +154 -0
- package/src/effects/rain/prelude.wgsl +35 -0
- package/src/effects/rain/render.job.wgsl +8 -0
- package/src/effects/rain/update.job.wgsl +51 -0
- package/src/effects/snow/prelude.wgsl +35 -0
- package/src/effects/snow/render.job.wgsl +8 -0
- package/src/effects/snow/update.job.wgsl +53 -0
- package/src/effects/sparks/prelude.wgsl +41 -0
- package/src/effects/sparks/render.job.wgsl +8 -0
- package/src/effects/sparks/update.job.wgsl +53 -0
- package/src/effects/text/layout.job.wgsl +57 -0
- package/src/effects/text/prelude.wgsl +39 -0
- package/src/effects/text/render.job.wgsl +8 -0
- package/src/index.js +251 -0
|
@@ -0,0 +1,35 @@
|
|
|
1
|
+
// Rain effect shared WGSL definitions.
|
|
2
|
+
|
|
3
|
+
struct Particle {
|
|
4
|
+
pos: vec4<f32>,
|
|
5
|
+
vel: vec4<f32>,
|
|
6
|
+
};
|
|
7
|
+
|
|
8
|
+
struct EffectParams {
|
|
9
|
+
time: f32,
|
|
10
|
+
dt: f32,
|
|
11
|
+
intensity: f32,
|
|
12
|
+
spawn_radius: f32,
|
|
13
|
+
bounds_min: vec2<f32>,
|
|
14
|
+
bounds_max: vec2<f32>,
|
|
15
|
+
origin: vec2<f32>,
|
|
16
|
+
drift: vec2<f32>,
|
|
17
|
+
};
|
|
18
|
+
|
|
19
|
+
@group(1) @binding(0) var<storage, read_write> particles: array<Particle>;
|
|
20
|
+
@group(1) @binding(1) var<uniform> effect_params: EffectParams;
|
|
21
|
+
|
|
22
|
+
fn hash_u32(x: u32) -> u32 {
|
|
23
|
+
var v = x;
|
|
24
|
+
v = v ^ (v >> 16u);
|
|
25
|
+
v = v * 0x7feb352du;
|
|
26
|
+
v = v ^ (v >> 15u);
|
|
27
|
+
v = v * 0x846ca68bu;
|
|
28
|
+
v = v ^ (v >> 16u);
|
|
29
|
+
return v;
|
|
30
|
+
}
|
|
31
|
+
|
|
32
|
+
fn rand01(seed: u32) -> f32 {
|
|
33
|
+
let v = hash_u32(seed) & 0x00ffffffu;
|
|
34
|
+
return f32(v) / 16777216.0;
|
|
35
|
+
}
|
|
@@ -0,0 +1,51 @@
|
|
|
1
|
+
// Rain update job.
|
|
2
|
+
|
|
3
|
+
fn rand_signed(seed: u32) -> f32 {
|
|
4
|
+
return rand01(seed) * 2.0 - 1.0;
|
|
5
|
+
}
|
|
6
|
+
|
|
7
|
+
fn respawn_rain(seed: u32) -> Particle {
|
|
8
|
+
let x = effect_params.origin.x + rand_signed(seed) * effect_params.spawn_radius;
|
|
9
|
+
let y = effect_params.origin.y + rand01(seed ^ 0x9e3779b9u) * 0.1;
|
|
10
|
+
let speed = 0.7 + rand01(seed ^ 0x85ebca6bu) * 1.2;
|
|
11
|
+
let vel = vec2<f32>(effect_params.drift.x * 0.2, -speed);
|
|
12
|
+
return Particle(vec4<f32>(vec2<f32>(x, y), 1.0, f32(seed)), vec4<f32>(vel, 0.0, 0.0));
|
|
13
|
+
}
|
|
14
|
+
|
|
15
|
+
fn process_job(job_index: u32, job_type: u32, payload_words: u32) {
|
|
16
|
+
if (payload_words == 0u) {
|
|
17
|
+
return;
|
|
18
|
+
}
|
|
19
|
+
let particle_index = payload_word(job_index, 0u);
|
|
20
|
+
if (particle_index >= arrayLength(&particles)) {
|
|
21
|
+
return;
|
|
22
|
+
}
|
|
23
|
+
|
|
24
|
+
var particle = particles[particle_index];
|
|
25
|
+
var life = particle.pos.z;
|
|
26
|
+
let seed = u32(particle.pos.w) ^ (particle_index * 97u);
|
|
27
|
+
|
|
28
|
+
if (life <= 0.0) {
|
|
29
|
+
particle = respawn_rain(seed + u32(effect_params.time * 400.0));
|
|
30
|
+
particles[particle_index] = particle;
|
|
31
|
+
return;
|
|
32
|
+
}
|
|
33
|
+
|
|
34
|
+
var pos = particle.pos.xy;
|
|
35
|
+
var vel = particle.vel.xy;
|
|
36
|
+
vel.x = vel.x + effect_params.drift.x * effect_params.dt * 0.3;
|
|
37
|
+
pos = pos + vel * effect_params.dt;
|
|
38
|
+
|
|
39
|
+
if (pos.y < effect_params.bounds_min.y - 0.1) {
|
|
40
|
+
life = -1.0;
|
|
41
|
+
}
|
|
42
|
+
|
|
43
|
+
particle.pos = vec4<f32>(pos, life, f32(seed));
|
|
44
|
+
particle.vel = vec4<f32>(vel, particle.vel.z, particle.vel.w);
|
|
45
|
+
|
|
46
|
+
if (life <= 0.0) {
|
|
47
|
+
particle = respawn_rain(seed + 13u + u32(effect_params.time * 700.0));
|
|
48
|
+
}
|
|
49
|
+
|
|
50
|
+
particles[particle_index] = particle;
|
|
51
|
+
}
|
|
@@ -0,0 +1,35 @@
|
|
|
1
|
+
// Snow effect shared WGSL definitions.
|
|
2
|
+
|
|
3
|
+
struct Particle {
|
|
4
|
+
pos: vec4<f32>,
|
|
5
|
+
vel: vec4<f32>,
|
|
6
|
+
};
|
|
7
|
+
|
|
8
|
+
struct EffectParams {
|
|
9
|
+
time: f32,
|
|
10
|
+
dt: f32,
|
|
11
|
+
intensity: f32,
|
|
12
|
+
spawn_radius: f32,
|
|
13
|
+
bounds_min: vec2<f32>,
|
|
14
|
+
bounds_max: vec2<f32>,
|
|
15
|
+
origin: vec2<f32>,
|
|
16
|
+
drift: vec2<f32>,
|
|
17
|
+
};
|
|
18
|
+
|
|
19
|
+
@group(1) @binding(0) var<storage, read_write> particles: array<Particle>;
|
|
20
|
+
@group(1) @binding(1) var<uniform> effect_params: EffectParams;
|
|
21
|
+
|
|
22
|
+
fn hash_u32(x: u32) -> u32 {
|
|
23
|
+
var v = x;
|
|
24
|
+
v = v ^ (v >> 16u);
|
|
25
|
+
v = v * 0x7feb352du;
|
|
26
|
+
v = v ^ (v >> 15u);
|
|
27
|
+
v = v * 0x846ca68bu;
|
|
28
|
+
v = v ^ (v >> 16u);
|
|
29
|
+
return v;
|
|
30
|
+
}
|
|
31
|
+
|
|
32
|
+
fn rand01(seed: u32) -> f32 {
|
|
33
|
+
let v = hash_u32(seed) & 0x00ffffffu;
|
|
34
|
+
return f32(v) / 16777216.0;
|
|
35
|
+
}
|
|
@@ -0,0 +1,53 @@
|
|
|
1
|
+
// Snow update job.
|
|
2
|
+
|
|
3
|
+
fn rand_signed(seed: u32) -> f32 {
|
|
4
|
+
return rand01(seed) * 2.0 - 1.0;
|
|
5
|
+
}
|
|
6
|
+
|
|
7
|
+
fn respawn_snow(seed: u32) -> Particle {
|
|
8
|
+
let x = effect_params.origin.x + rand_signed(seed) * effect_params.spawn_radius;
|
|
9
|
+
let y = effect_params.origin.y + rand01(seed ^ 0x9e3779b9u) * 0.1;
|
|
10
|
+
let speed = 0.12 + rand01(seed ^ 0x85ebca6bu) * 0.2;
|
|
11
|
+
let drift = rand_signed(seed ^ 0x27d4eb2du) * 0.05;
|
|
12
|
+
let vel = vec2<f32>(drift + effect_params.drift.x * 0.2, -speed);
|
|
13
|
+
return Particle(vec4<f32>(vec2<f32>(x, y), 1.0, f32(seed)), vec4<f32>(vel, 0.0, 0.0));
|
|
14
|
+
}
|
|
15
|
+
|
|
16
|
+
fn process_job(job_index: u32, job_type: u32, payload_words: u32) {
|
|
17
|
+
if (payload_words == 0u) {
|
|
18
|
+
return;
|
|
19
|
+
}
|
|
20
|
+
let particle_index = payload_word(job_index, 0u);
|
|
21
|
+
if (particle_index >= arrayLength(&particles)) {
|
|
22
|
+
return;
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
var particle = particles[particle_index];
|
|
26
|
+
var life = particle.pos.z;
|
|
27
|
+
let seed = u32(particle.pos.w) ^ (particle_index * 67u);
|
|
28
|
+
|
|
29
|
+
if (life <= 0.0) {
|
|
30
|
+
particle = respawn_snow(seed + u32(effect_params.time * 200.0));
|
|
31
|
+
particles[particle_index] = particle;
|
|
32
|
+
return;
|
|
33
|
+
}
|
|
34
|
+
|
|
35
|
+
var pos = particle.pos.xy;
|
|
36
|
+
var vel = particle.vel.xy;
|
|
37
|
+
let sway = sin(effect_params.time * 0.8 + f32(seed) * 0.01) * 0.02;
|
|
38
|
+
vel.x = vel.x + sway + effect_params.drift.x * effect_params.dt * 0.4;
|
|
39
|
+
pos = pos + vel * effect_params.dt;
|
|
40
|
+
|
|
41
|
+
if (pos.y < effect_params.bounds_min.y - 0.1) {
|
|
42
|
+
life = -1.0;
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
particle.pos = vec4<f32>(pos, life, f32(seed));
|
|
46
|
+
particle.vel = vec4<f32>(vel, particle.vel.z, particle.vel.w);
|
|
47
|
+
|
|
48
|
+
if (life <= 0.0) {
|
|
49
|
+
particle = respawn_snow(seed + 29u + u32(effect_params.time * 300.0));
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
particles[particle_index] = particle;
|
|
53
|
+
}
|
|
@@ -0,0 +1,41 @@
|
|
|
1
|
+
// Sparks effect shared WGSL definitions.
|
|
2
|
+
|
|
3
|
+
struct Particle {
|
|
4
|
+
pos: vec4<f32>,
|
|
5
|
+
vel: vec4<f32>,
|
|
6
|
+
};
|
|
7
|
+
|
|
8
|
+
struct EffectParams {
|
|
9
|
+
time: f32,
|
|
10
|
+
dt: f32,
|
|
11
|
+
intensity: f32,
|
|
12
|
+
spawn_radius: f32,
|
|
13
|
+
bounds_min: vec2<f32>,
|
|
14
|
+
bounds_max: vec2<f32>,
|
|
15
|
+
origin: vec2<f32>,
|
|
16
|
+
drift: vec2<f32>,
|
|
17
|
+
};
|
|
18
|
+
|
|
19
|
+
@group(1) @binding(0) var<storage, read_write> particles: array<Particle>;
|
|
20
|
+
@group(1) @binding(1) var<uniform> effect_params: EffectParams;
|
|
21
|
+
|
|
22
|
+
const TWO_PI: f32 = 6.2831853;
|
|
23
|
+
|
|
24
|
+
fn hash_u32(x: u32) -> u32 {
|
|
25
|
+
var v = x;
|
|
26
|
+
v = v ^ (v >> 16u);
|
|
27
|
+
v = v * 0x7feb352du;
|
|
28
|
+
v = v ^ (v >> 15u);
|
|
29
|
+
v = v * 0x846ca68bu;
|
|
30
|
+
v = v ^ (v >> 16u);
|
|
31
|
+
return v;
|
|
32
|
+
}
|
|
33
|
+
|
|
34
|
+
fn rand01(seed: u32) -> f32 {
|
|
35
|
+
let v = hash_u32(seed) & 0x00ffffffu;
|
|
36
|
+
return f32(v) / 16777216.0;
|
|
37
|
+
}
|
|
38
|
+
|
|
39
|
+
fn rand_signed(seed: u32) -> f32 {
|
|
40
|
+
return rand01(seed) * 2.0 - 1.0;
|
|
41
|
+
}
|
|
@@ -0,0 +1,53 @@
|
|
|
1
|
+
// Sparks update job.
|
|
2
|
+
|
|
3
|
+
fn respawn_spark(seed: u32) -> Particle {
|
|
4
|
+
let angle = rand01(seed) * TWO_PI;
|
|
5
|
+
let speed = 0.5 + rand01(seed ^ 0x9e3779b9u) * 1.6;
|
|
6
|
+
let radius = effect_params.spawn_radius * rand01(seed ^ 0x85ebca6bu);
|
|
7
|
+
let offset = vec2<f32>(cos(angle), sin(angle)) * radius;
|
|
8
|
+
let pos = effect_params.origin + offset;
|
|
9
|
+
let vel = vec2<f32>(cos(angle), sin(angle)) * speed;
|
|
10
|
+
let life = 0.3 + rand01(seed ^ 0x27d4eb2du) * 0.7;
|
|
11
|
+
return Particle(vec4<f32>(pos, life, f32(seed)), vec4<f32>(vel, 1.0, 0.0));
|
|
12
|
+
}
|
|
13
|
+
|
|
14
|
+
fn process_job(job_index: u32, job_type: u32, payload_words: u32) {
|
|
15
|
+
if (payload_words == 0u) {
|
|
16
|
+
return;
|
|
17
|
+
}
|
|
18
|
+
let particle_index = payload_word(job_index, 0u);
|
|
19
|
+
if (particle_index >= arrayLength(&particles)) {
|
|
20
|
+
return;
|
|
21
|
+
}
|
|
22
|
+
|
|
23
|
+
var particle = particles[particle_index];
|
|
24
|
+
var life = particle.pos.z;
|
|
25
|
+
let seed = u32(particle.pos.w) ^ (particle_index * 131u);
|
|
26
|
+
|
|
27
|
+
if (life <= 0.0) {
|
|
28
|
+
particle = respawn_spark(seed + u32(effect_params.time * 500.0));
|
|
29
|
+
particles[particle_index] = particle;
|
|
30
|
+
return;
|
|
31
|
+
}
|
|
32
|
+
|
|
33
|
+
var vel = particle.vel.xy;
|
|
34
|
+
var pos = particle.pos.xy;
|
|
35
|
+
vel.y = vel.y - 1.4 * effect_params.dt + effect_params.drift.y * effect_params.dt;
|
|
36
|
+
vel.x = vel.x + effect_params.drift.x * effect_params.dt;
|
|
37
|
+
vel = vel * 0.985;
|
|
38
|
+
pos = pos + vel * effect_params.dt;
|
|
39
|
+
life = life - effect_params.dt * 1.5;
|
|
40
|
+
|
|
41
|
+
if (pos.y < effect_params.bounds_min.y - 0.2 || pos.y > effect_params.bounds_max.y + 0.2) {
|
|
42
|
+
life = -1.0;
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
particle.pos = vec4<f32>(pos, life, f32(seed));
|
|
46
|
+
particle.vel = vec4<f32>(vel, particle.vel.z, particle.vel.w);
|
|
47
|
+
|
|
48
|
+
if (life <= 0.0) {
|
|
49
|
+
particle = respawn_spark(seed + 19u + u32(effect_params.time * 700.0));
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
particles[particle_index] = particle;
|
|
53
|
+
}
|
|
@@ -0,0 +1,57 @@
|
|
|
1
|
+
// Text layout job.
|
|
2
|
+
|
|
3
|
+
fn respawn_text(seed: u32) -> Particle {
|
|
4
|
+
let x = effect_params.origin.x + rand_signed(seed) * effect_params.spawn_radius;
|
|
5
|
+
let y = effect_params.origin.y + rand01(seed ^ 0x9e3779b9u) * 0.05;
|
|
6
|
+
let value = f32(hash_u32(seed) % 10000u);
|
|
7
|
+
let lateral = rand_signed(seed ^ 0x85ebca6bu) * 0.12;
|
|
8
|
+
let upward = 0.55 + rand01(seed ^ 0x27d4eb2du) * 0.45;
|
|
9
|
+
let vel = vec2<f32>(lateral, upward);
|
|
10
|
+
let life = 2.2 + rand01(seed ^ 0x51a3c1u) * 1.2;
|
|
11
|
+
return Particle(vec4<f32>(vec2<f32>(x, y), life, f32(seed)), vec4<f32>(vel, 0.0, value));
|
|
12
|
+
}
|
|
13
|
+
|
|
14
|
+
fn process_job(job_index: u32, job_type: u32, payload_words: u32) {
|
|
15
|
+
if (payload_words == 0u) {
|
|
16
|
+
return;
|
|
17
|
+
}
|
|
18
|
+
let particle_index = payload_word(job_index, 0u);
|
|
19
|
+
if (particle_index >= arrayLength(&particles)) {
|
|
20
|
+
return;
|
|
21
|
+
}
|
|
22
|
+
|
|
23
|
+
var particle = particles[particle_index];
|
|
24
|
+
var life = particle.pos.z;
|
|
25
|
+
let seed = u32(particle.pos.w) ^ (particle_index * 113u);
|
|
26
|
+
|
|
27
|
+
if (life <= 0.0) {
|
|
28
|
+
let burst = rand01(seed ^ u32(effect_params.time * 120.0));
|
|
29
|
+
if (burst > 0.985) {
|
|
30
|
+
particle = respawn_text(seed + u32(effect_params.time * 600.0));
|
|
31
|
+
particles[particle_index] = particle;
|
|
32
|
+
}
|
|
33
|
+
return;
|
|
34
|
+
}
|
|
35
|
+
|
|
36
|
+
var pos = particle.pos.xy;
|
|
37
|
+
var vel = particle.vel.xy;
|
|
38
|
+
let dir = select(-1.0, 1.0, pos.x >= effect_params.origin.x);
|
|
39
|
+
vel.x = vel.x + dir * 0.18 * effect_params.dt;
|
|
40
|
+
vel.x = vel.x + effect_params.drift.x * effect_params.dt * 0.05;
|
|
41
|
+
vel.y = vel.y - 0.95 * effect_params.dt;
|
|
42
|
+
vel = vel * 0.995;
|
|
43
|
+
pos = pos + vel * effect_params.dt;
|
|
44
|
+
|
|
45
|
+
if (pos.y < effect_params.bounds_min.y - 0.1 || pos.x < effect_params.bounds_min.x - 0.1 || pos.x > effect_params.bounds_max.x + 0.1) {
|
|
46
|
+
life = -1.0;
|
|
47
|
+
}
|
|
48
|
+
|
|
49
|
+
particle.pos = vec4<f32>(pos, life, f32(seed));
|
|
50
|
+
particle.vel = vec4<f32>(vel, particle.vel.z, particle.vel.w);
|
|
51
|
+
|
|
52
|
+
if (life <= 0.0) {
|
|
53
|
+
particle = respawn_text(seed + 37u + u32(effect_params.time * 500.0));
|
|
54
|
+
}
|
|
55
|
+
|
|
56
|
+
particles[particle_index] = particle;
|
|
57
|
+
}
|
|
@@ -0,0 +1,39 @@
|
|
|
1
|
+
// Text overlay effect shared WGSL definitions.
|
|
2
|
+
|
|
3
|
+
struct Particle {
|
|
4
|
+
pos: vec4<f32>,
|
|
5
|
+
vel: vec4<f32>,
|
|
6
|
+
};
|
|
7
|
+
|
|
8
|
+
struct EffectParams {
|
|
9
|
+
time: f32,
|
|
10
|
+
dt: f32,
|
|
11
|
+
intensity: f32,
|
|
12
|
+
spawn_radius: f32,
|
|
13
|
+
bounds_min: vec2<f32>,
|
|
14
|
+
bounds_max: vec2<f32>,
|
|
15
|
+
origin: vec2<f32>,
|
|
16
|
+
drift: vec2<f32>,
|
|
17
|
+
};
|
|
18
|
+
|
|
19
|
+
@group(1) @binding(0) var<storage, read_write> particles: array<Particle>;
|
|
20
|
+
@group(1) @binding(1) var<uniform> effect_params: EffectParams;
|
|
21
|
+
|
|
22
|
+
fn hash_u32(x: u32) -> u32 {
|
|
23
|
+
var v = x;
|
|
24
|
+
v = v ^ (v >> 16u);
|
|
25
|
+
v = v * 0x7feb352du;
|
|
26
|
+
v = v ^ (v >> 15u);
|
|
27
|
+
v = v * 0x846ca68bu;
|
|
28
|
+
v = v ^ (v >> 16u);
|
|
29
|
+
return v;
|
|
30
|
+
}
|
|
31
|
+
|
|
32
|
+
fn rand01(seed: u32) -> f32 {
|
|
33
|
+
let v = hash_u32(seed) & 0x00ffffffu;
|
|
34
|
+
return f32(v) / 16777216.0;
|
|
35
|
+
}
|
|
36
|
+
|
|
37
|
+
fn rand_signed(seed: u32) -> f32 {
|
|
38
|
+
return rand01(seed) * 2.0 - 1.0;
|
|
39
|
+
}
|
package/dist/index.cjs
ADDED
|
@@ -0,0 +1,286 @@
|
|
|
1
|
+
var __create = Object.create;
|
|
2
|
+
var __defProp = Object.defineProperty;
|
|
3
|
+
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
4
|
+
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
5
|
+
var __getProtoOf = Object.getPrototypeOf;
|
|
6
|
+
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
|
7
|
+
var __export = (target, all) => {
|
|
8
|
+
for (var name in all)
|
|
9
|
+
__defProp(target, name, { get: all[name], enumerable: true });
|
|
10
|
+
};
|
|
11
|
+
var __copyProps = (to, from, except, desc) => {
|
|
12
|
+
if (from && typeof from === "object" || typeof from === "function") {
|
|
13
|
+
for (let key of __getOwnPropNames(from))
|
|
14
|
+
if (!__hasOwnProp.call(to, key) && key !== except)
|
|
15
|
+
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
|
16
|
+
}
|
|
17
|
+
return to;
|
|
18
|
+
};
|
|
19
|
+
var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
|
|
20
|
+
// If the importer is in node compatibility mode or this is not an ESM
|
|
21
|
+
// file that has been converted to a CommonJS file using a Babel-
|
|
22
|
+
// compatible transform (i.e. "__esModule" has not been set), then set
|
|
23
|
+
// "default" to the CommonJS "module.exports" for node compatibility.
|
|
24
|
+
isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
|
|
25
|
+
mod
|
|
26
|
+
));
|
|
27
|
+
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
|
28
|
+
|
|
29
|
+
// src/index.js
|
|
30
|
+
var index_exports = {};
|
|
31
|
+
__export(index_exports, {
|
|
32
|
+
defaultParticleEffect: () => defaultParticleEffect,
|
|
33
|
+
getParticleEffect: () => getParticleEffect,
|
|
34
|
+
loadParticleEffectJobWgsl: () => loadParticleEffectJobWgsl,
|
|
35
|
+
loadParticleEffectJobs: () => loadParticleEffectJobs,
|
|
36
|
+
loadParticleEffectPreludeWgsl: () => loadParticleEffectPreludeWgsl,
|
|
37
|
+
loadParticleJobs: () => loadParticleJobs,
|
|
38
|
+
loadParticlePhysicsJobWgsl: () => loadParticlePhysicsJobWgsl,
|
|
39
|
+
loadParticlePreludeWgsl: () => loadParticlePreludeWgsl,
|
|
40
|
+
loadParticleRenderJobWgsl: () => loadParticleRenderJobWgsl,
|
|
41
|
+
particleEffectNames: () => particleEffectNames,
|
|
42
|
+
particleEffects: () => particleEffects,
|
|
43
|
+
particleJobLabels: () => particleJobLabels,
|
|
44
|
+
particleJobs: () => particleJobs,
|
|
45
|
+
particlePhysicsJobWgslUrl: () => particlePhysicsJobWgslUrl,
|
|
46
|
+
particlePreludeWgslUrl: () => particlePreludeWgslUrl,
|
|
47
|
+
particleRenderJobWgslUrl: () => particleRenderJobWgslUrl
|
|
48
|
+
});
|
|
49
|
+
module.exports = __toCommonJS(index_exports);
|
|
50
|
+
var baseUrl = (() => {
|
|
51
|
+
if (false) {
|
|
52
|
+
return new URL("./index.js", void 0);
|
|
53
|
+
}
|
|
54
|
+
if (typeof __filename !== "undefined" && typeof require !== "undefined") {
|
|
55
|
+
const { pathToFileURL } = require("url");
|
|
56
|
+
return pathToFileURL(__filename);
|
|
57
|
+
}
|
|
58
|
+
const base = typeof process !== "undefined" && process.cwd ? `file://${process.cwd()}/` : "file:///";
|
|
59
|
+
return new URL("./index.js", base);
|
|
60
|
+
})();
|
|
61
|
+
var effectSpecs = {
|
|
62
|
+
fire: {
|
|
63
|
+
prelude: "prelude.wgsl",
|
|
64
|
+
jobs: {
|
|
65
|
+
physics: "physics.job.wgsl",
|
|
66
|
+
render: "render.job.wgsl"
|
|
67
|
+
}
|
|
68
|
+
},
|
|
69
|
+
sparks: {
|
|
70
|
+
prelude: "prelude.wgsl",
|
|
71
|
+
jobs: {
|
|
72
|
+
update: "update.job.wgsl",
|
|
73
|
+
render: "render.job.wgsl"
|
|
74
|
+
}
|
|
75
|
+
},
|
|
76
|
+
text: {
|
|
77
|
+
prelude: "prelude.wgsl",
|
|
78
|
+
jobs: {
|
|
79
|
+
layout: "layout.job.wgsl",
|
|
80
|
+
render: "render.job.wgsl"
|
|
81
|
+
}
|
|
82
|
+
},
|
|
83
|
+
rain: {
|
|
84
|
+
prelude: "prelude.wgsl",
|
|
85
|
+
jobs: {
|
|
86
|
+
update: "update.job.wgsl",
|
|
87
|
+
render: "render.job.wgsl"
|
|
88
|
+
}
|
|
89
|
+
},
|
|
90
|
+
snow: {
|
|
91
|
+
prelude: "prelude.wgsl",
|
|
92
|
+
jobs: {
|
|
93
|
+
update: "update.job.wgsl",
|
|
94
|
+
render: "render.job.wgsl"
|
|
95
|
+
}
|
|
96
|
+
},
|
|
97
|
+
firework: {
|
|
98
|
+
prelude: "prelude.wgsl",
|
|
99
|
+
jobs: {
|
|
100
|
+
update: "update.job.wgsl",
|
|
101
|
+
render: "render.job.wgsl"
|
|
102
|
+
}
|
|
103
|
+
}
|
|
104
|
+
};
|
|
105
|
+
function buildEffect(name, spec) {
|
|
106
|
+
const preludeUrl = new URL(`./effects/${name}/${spec.prelude}`, baseUrl);
|
|
107
|
+
const jobs = Object.entries(spec.jobs).map(([key, file]) => {
|
|
108
|
+
const label = `particles.${name}.${key}`;
|
|
109
|
+
return {
|
|
110
|
+
key,
|
|
111
|
+
label,
|
|
112
|
+
url: new URL(`./effects/${name}/${file}`, baseUrl),
|
|
113
|
+
sourceName: label
|
|
114
|
+
};
|
|
115
|
+
});
|
|
116
|
+
return {
|
|
117
|
+
name,
|
|
118
|
+
preludeUrl,
|
|
119
|
+
jobs
|
|
120
|
+
};
|
|
121
|
+
}
|
|
122
|
+
var particleEffects = Object.freeze(
|
|
123
|
+
Object.fromEntries(
|
|
124
|
+
Object.entries(effectSpecs).map(([name, spec]) => [
|
|
125
|
+
name,
|
|
126
|
+
buildEffect(name, spec)
|
|
127
|
+
])
|
|
128
|
+
)
|
|
129
|
+
);
|
|
130
|
+
var particleEffectNames = Object.freeze(
|
|
131
|
+
Object.keys(particleEffects)
|
|
132
|
+
);
|
|
133
|
+
var defaultParticleEffect = "fire";
|
|
134
|
+
function getEffectJob(effect, key) {
|
|
135
|
+
const job = effect.jobs.find((entry) => entry.key === key);
|
|
136
|
+
if (!job) {
|
|
137
|
+
const available = effect.jobs.map((entry) => entry.key).join(", ");
|
|
138
|
+
throw new Error(
|
|
139
|
+
`Unknown job "${key}" for effect "${effect.name}". Available: ${available}.`
|
|
140
|
+
);
|
|
141
|
+
}
|
|
142
|
+
return job;
|
|
143
|
+
}
|
|
144
|
+
function getParticleEffect(name = defaultParticleEffect) {
|
|
145
|
+
const effect = particleEffects[name];
|
|
146
|
+
if (!effect) {
|
|
147
|
+
const available = particleEffectNames.join(", ");
|
|
148
|
+
throw new Error(`Unknown particle effect "${name}". Available: ${available}.`);
|
|
149
|
+
}
|
|
150
|
+
return effect;
|
|
151
|
+
}
|
|
152
|
+
var defaultEffect = getParticleEffect(defaultParticleEffect);
|
|
153
|
+
var defaultPhysicsJob = getEffectJob(defaultEffect, "physics");
|
|
154
|
+
var defaultRenderJob = getEffectJob(defaultEffect, "render");
|
|
155
|
+
var particlePreludeWgslUrl = defaultEffect.preludeUrl;
|
|
156
|
+
var particlePhysicsJobWgslUrl = defaultPhysicsJob.url;
|
|
157
|
+
var particleRenderJobWgslUrl = defaultRenderJob.url;
|
|
158
|
+
var particleJobLabels = {
|
|
159
|
+
physics: defaultPhysicsJob.label,
|
|
160
|
+
render: defaultRenderJob.label
|
|
161
|
+
};
|
|
162
|
+
var particleJobs = [
|
|
163
|
+
{
|
|
164
|
+
label: defaultPhysicsJob.label,
|
|
165
|
+
url: defaultPhysicsJob.url,
|
|
166
|
+
sourceName: defaultPhysicsJob.sourceName
|
|
167
|
+
},
|
|
168
|
+
{
|
|
169
|
+
label: defaultRenderJob.label,
|
|
170
|
+
url: defaultRenderJob.url,
|
|
171
|
+
sourceName: defaultRenderJob.sourceName
|
|
172
|
+
}
|
|
173
|
+
];
|
|
174
|
+
function assertNotHtmlWgsl(source, context) {
|
|
175
|
+
const sample = source.slice(0, 200).toLowerCase();
|
|
176
|
+
if (sample.includes("<!doctype") || sample.includes("<html") || sample.includes("<meta")) {
|
|
177
|
+
const label = context ? ` for ${context}` : "";
|
|
178
|
+
throw new Error(
|
|
179
|
+
`Expected WGSL${label} but received HTML. Check the URL or server root.`
|
|
180
|
+
);
|
|
181
|
+
}
|
|
182
|
+
}
|
|
183
|
+
async function loadWgslSource(options = {}) {
|
|
184
|
+
const { wgsl, url, fetcher = globalThis.fetch, base } = options ?? {};
|
|
185
|
+
if (typeof wgsl === "string") {
|
|
186
|
+
assertNotHtmlWgsl(wgsl, "inline WGSL");
|
|
187
|
+
return wgsl;
|
|
188
|
+
}
|
|
189
|
+
if (!url) {
|
|
190
|
+
return null;
|
|
191
|
+
}
|
|
192
|
+
const resolved = url instanceof URL ? url : new URL(url, base ?? baseUrl);
|
|
193
|
+
if (!fetcher || resolved.protocol === "file:") {
|
|
194
|
+
const { readFile } = await import("fs/promises");
|
|
195
|
+
const { fileURLToPath } = await import("url");
|
|
196
|
+
const source2 = await readFile(fileURLToPath(resolved), "utf8");
|
|
197
|
+
assertNotHtmlWgsl(source2, resolved.href);
|
|
198
|
+
return source2;
|
|
199
|
+
}
|
|
200
|
+
const response = await fetcher(resolved);
|
|
201
|
+
if (!response.ok) {
|
|
202
|
+
const status = "status" in response ? response.status : "unknown";
|
|
203
|
+
const statusText = "statusText" in response ? response.statusText : "";
|
|
204
|
+
const detail = statusText ? `${status} ${statusText}` : `${status}`;
|
|
205
|
+
throw new Error(`Failed to load WGSL (${detail})`);
|
|
206
|
+
}
|
|
207
|
+
const source = await response.text();
|
|
208
|
+
assertNotHtmlWgsl(source, resolved.href);
|
|
209
|
+
return source;
|
|
210
|
+
}
|
|
211
|
+
async function loadEffectPrelude(effect, fetcher) {
|
|
212
|
+
const source = await loadWgslSource({ url: effect.preludeUrl, fetcher });
|
|
213
|
+
if (typeof source !== "string") {
|
|
214
|
+
throw new Error(`Failed to load ${effect.name} prelude WGSL source.`);
|
|
215
|
+
}
|
|
216
|
+
return source;
|
|
217
|
+
}
|
|
218
|
+
async function loadEffectJob(effect, job, fetcher) {
|
|
219
|
+
const source = await loadWgslSource({ url: job.url, fetcher });
|
|
220
|
+
if (typeof source !== "string") {
|
|
221
|
+
throw new Error(
|
|
222
|
+
`Failed to load ${effect.name} job "${job.key}" WGSL source.`
|
|
223
|
+
);
|
|
224
|
+
}
|
|
225
|
+
return source;
|
|
226
|
+
}
|
|
227
|
+
async function loadParticleEffectPreludeWgsl(effectName, options = {}) {
|
|
228
|
+
const { fetcher } = options ?? {};
|
|
229
|
+
const effect = getParticleEffect(effectName);
|
|
230
|
+
return loadEffectPrelude(effect, fetcher);
|
|
231
|
+
}
|
|
232
|
+
async function loadParticleEffectJobWgsl(effectName, jobKey, options = {}) {
|
|
233
|
+
const { fetcher } = options ?? {};
|
|
234
|
+
const effect = getParticleEffect(effectName);
|
|
235
|
+
const job = getEffectJob(effect, jobKey);
|
|
236
|
+
return loadEffectJob(effect, job, fetcher);
|
|
237
|
+
}
|
|
238
|
+
async function loadParticleEffectJobs(effectName, options = {}) {
|
|
239
|
+
const { fetcher } = options ?? {};
|
|
240
|
+
const effect = getParticleEffect(effectName);
|
|
241
|
+
const preludeWgsl = await loadEffectPrelude(effect, fetcher);
|
|
242
|
+
const jobSources = await Promise.all(
|
|
243
|
+
effect.jobs.map((job) => loadEffectJob(effect, job, fetcher))
|
|
244
|
+
);
|
|
245
|
+
const jobs = effect.jobs.map((job, index) => ({
|
|
246
|
+
wgsl: jobSources[index],
|
|
247
|
+
label: job.label,
|
|
248
|
+
sourceName: job.sourceName
|
|
249
|
+
}));
|
|
250
|
+
return { preludeWgsl, jobs };
|
|
251
|
+
}
|
|
252
|
+
async function loadParticlePreludeWgsl(options = {}) {
|
|
253
|
+
const { fetcher } = options ?? {};
|
|
254
|
+
return loadEffectPrelude(defaultEffect, fetcher);
|
|
255
|
+
}
|
|
256
|
+
async function loadParticlePhysicsJobWgsl(options = {}) {
|
|
257
|
+
const { fetcher } = options ?? {};
|
|
258
|
+
return loadEffectJob(defaultEffect, defaultPhysicsJob, fetcher);
|
|
259
|
+
}
|
|
260
|
+
async function loadParticleRenderJobWgsl(options = {}) {
|
|
261
|
+
const { fetcher } = options ?? {};
|
|
262
|
+
return loadEffectJob(defaultEffect, defaultRenderJob, fetcher);
|
|
263
|
+
}
|
|
264
|
+
async function loadParticleJobs(options = {}) {
|
|
265
|
+
return loadParticleEffectJobs(defaultParticleEffect, options);
|
|
266
|
+
}
|
|
267
|
+
// Annotate the CommonJS export names for ESM import in node:
|
|
268
|
+
0 && (module.exports = {
|
|
269
|
+
defaultParticleEffect,
|
|
270
|
+
getParticleEffect,
|
|
271
|
+
loadParticleEffectJobWgsl,
|
|
272
|
+
loadParticleEffectJobs,
|
|
273
|
+
loadParticleEffectPreludeWgsl,
|
|
274
|
+
loadParticleJobs,
|
|
275
|
+
loadParticlePhysicsJobWgsl,
|
|
276
|
+
loadParticlePreludeWgsl,
|
|
277
|
+
loadParticleRenderJobWgsl,
|
|
278
|
+
particleEffectNames,
|
|
279
|
+
particleEffects,
|
|
280
|
+
particleJobLabels,
|
|
281
|
+
particleJobs,
|
|
282
|
+
particlePhysicsJobWgslUrl,
|
|
283
|
+
particlePreludeWgslUrl,
|
|
284
|
+
particleRenderJobWgslUrl
|
|
285
|
+
});
|
|
286
|
+
//# sourceMappingURL=index.cjs.map
|