@ludicon/spark.js 0.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +115 -0
- package/dist/index.esm.js +1129 -0
- package/dist/spark_astc_rgb-ylbf30mQ.js +4 -0
- package/dist/spark_astc_rgba-C4NuyfHw.js +4 -0
- package/dist/spark_bc1_rgb-CRQwJRCp.js +4 -0
- package/dist/spark_bc3_rgba-CyRcvC8t.js +4 -0
- package/dist/spark_bc4_r-BSB9VB_w.js +4 -0
- package/dist/spark_bc5_rg-NX_OBH9I.js +4 -0
- package/dist/spark_bc7_rgb-CYdL55pE.js +4 -0
- package/dist/spark_bc7_rgba-BFgOyqos.js +4 -0
- package/dist/spark_eac_r-BFwH430b.js +4 -0
- package/dist/spark_eac_rg--Gm5Gzmk.js +4 -0
- package/dist/spark_etc2_rgb-CWjBHhHQ.js +4 -0
- package/dist/spark_etc2_rgba-BRX5DwNI.js +4 -0
- package/dist/utils-BybjJ-PV.js +4 -0
- package/package.json +74 -0
|
@@ -0,0 +1,1129 @@
|
|
|
1
|
+
const modules = /* @__PURE__ */ Object.assign({ "./spark_astc_rgb.wgsl": () => import("./spark_astc_rgb-ylbf30mQ.js"), "./spark_astc_rgba.wgsl": () => import("./spark_astc_rgba-C4NuyfHw.js"), "./spark_bc1_rgb.wgsl": () => import("./spark_bc1_rgb-CRQwJRCp.js"), "./spark_bc3_rgba.wgsl": () => import("./spark_bc3_rgba-CyRcvC8t.js"), "./spark_bc4_r.wgsl": () => import("./spark_bc4_r-BSB9VB_w.js"), "./spark_bc5_rg.wgsl": () => import("./spark_bc5_rg-NX_OBH9I.js"), "./spark_bc7_rgb.wgsl": () => import("./spark_bc7_rgb-CYdL55pE.js"), "./spark_bc7_rgba.wgsl": () => import("./spark_bc7_rgba-BFgOyqos.js"), "./spark_eac_r.wgsl": () => import("./spark_eac_r-BFwH430b.js"), "./spark_eac_rg.wgsl": () => import("./spark_eac_rg--Gm5Gzmk.js"), "./spark_etc2_rgb.wgsl": () => import("./spark_etc2_rgb-CWjBHhHQ.js"), "./spark_etc2_rgba.wgsl": () => import("./spark_etc2_rgba-BRX5DwNI.js"), "./utils.wgsl": () => import("./utils-BybjJ-PV.js") });
|
|
2
|
+
const shaders = Object.fromEntries(
|
|
3
|
+
await Promise.all(
|
|
4
|
+
Object.entries(modules).map(async function([path, module]) {
|
|
5
|
+
const { default: shader } = await module(), name = path.replace("./", "");
|
|
6
|
+
return [name, shader];
|
|
7
|
+
})
|
|
8
|
+
)
|
|
9
|
+
);
|
|
10
|
+
const SparkFormat = {
|
|
11
|
+
ASTC_4x4_RGB: 0,
|
|
12
|
+
ASTC_4x4_RGBA: 1,
|
|
13
|
+
// ASTC_4x4_RGBM: 2,
|
|
14
|
+
// ASTC_6x6_RGB: 3,
|
|
15
|
+
EAC_R: 4,
|
|
16
|
+
EAC_RG: 5,
|
|
17
|
+
ETC2_RGB: 6,
|
|
18
|
+
ETC2_RGBA: 7,
|
|
19
|
+
// ETC2_RGBM: 8,
|
|
20
|
+
BC1_RGB: 9,
|
|
21
|
+
BC3_RGBA: 10,
|
|
22
|
+
// BC3_YCoCg: 11,
|
|
23
|
+
// BC3_RGBM: 12,
|
|
24
|
+
BC4_R: 13,
|
|
25
|
+
BC5_RG: 14,
|
|
26
|
+
// BC6H_RGB: 15,
|
|
27
|
+
BC7_RGB: 16,
|
|
28
|
+
BC7_RGBA: 17
|
|
29
|
+
};
|
|
30
|
+
const SparkFormatName = [
|
|
31
|
+
/* 0 */
|
|
32
|
+
"astc-4x4-rgb",
|
|
33
|
+
// ASTC_4x4_RGB
|
|
34
|
+
/* 1 */
|
|
35
|
+
"astc-4x4-rgba",
|
|
36
|
+
// ASTC_4x4_RGBA
|
|
37
|
+
/* 2 */
|
|
38
|
+
null,
|
|
39
|
+
/* 3 */
|
|
40
|
+
null,
|
|
41
|
+
/* 4 */
|
|
42
|
+
"eac-r",
|
|
43
|
+
// EAC_R
|
|
44
|
+
/* 5 */
|
|
45
|
+
"eac-rg",
|
|
46
|
+
// EAC_RG
|
|
47
|
+
/* 6 */
|
|
48
|
+
"etc2-rgb",
|
|
49
|
+
// ETC2_RGB
|
|
50
|
+
/* 7 */
|
|
51
|
+
"etc2-rgba",
|
|
52
|
+
// ETC2_RGBA
|
|
53
|
+
/* 8 */
|
|
54
|
+
null,
|
|
55
|
+
/* 9 */
|
|
56
|
+
"bc1-rgb",
|
|
57
|
+
// BC1_RGB
|
|
58
|
+
/* 10 */
|
|
59
|
+
"bc3-rgba",
|
|
60
|
+
// BC3_RGBA
|
|
61
|
+
/* 11 */
|
|
62
|
+
null,
|
|
63
|
+
/* 12 */
|
|
64
|
+
null,
|
|
65
|
+
/* 13 */
|
|
66
|
+
"bc4-r",
|
|
67
|
+
// BC4_R
|
|
68
|
+
/* 14 */
|
|
69
|
+
"bc5-rg",
|
|
70
|
+
// BC5_RG
|
|
71
|
+
/* 15 */
|
|
72
|
+
null,
|
|
73
|
+
/* 16 */
|
|
74
|
+
"bc7-rgb",
|
|
75
|
+
// BC7_RGB
|
|
76
|
+
/* 17 */
|
|
77
|
+
"bc7-rgba"
|
|
78
|
+
// BC7_RGBA
|
|
79
|
+
];
|
|
80
|
+
const SparkShaderFiles = [
|
|
81
|
+
/* 0 */
|
|
82
|
+
"spark_astc_rgb.wgsl",
|
|
83
|
+
// ASTC_4x4_RGB
|
|
84
|
+
/* 1 */
|
|
85
|
+
"spark_astc_rgba.wgsl",
|
|
86
|
+
// ASTC_4x4_RGBA
|
|
87
|
+
/* 2 */
|
|
88
|
+
null,
|
|
89
|
+
/* 3 */
|
|
90
|
+
null,
|
|
91
|
+
/* 4 */
|
|
92
|
+
"spark_eac_r.wgsl",
|
|
93
|
+
// EAC_R
|
|
94
|
+
/* 5 */
|
|
95
|
+
"spark_eac_rg.wgsl",
|
|
96
|
+
// EAC_RG
|
|
97
|
+
/* 6 */
|
|
98
|
+
"spark_etc2_rgb.wgsl",
|
|
99
|
+
// ETC2_RGB
|
|
100
|
+
/* 7 */
|
|
101
|
+
"spark_etc2_rgba.wgsl",
|
|
102
|
+
// ETC2_RGBA
|
|
103
|
+
/* 8 */
|
|
104
|
+
null,
|
|
105
|
+
/* 9 */
|
|
106
|
+
"spark_bc1_rgb.wgsl",
|
|
107
|
+
// BC1_RGB
|
|
108
|
+
/* 10 */
|
|
109
|
+
"spark_bc3_rgba.wgsl",
|
|
110
|
+
// BC3_RGBA
|
|
111
|
+
/* 11 */
|
|
112
|
+
null,
|
|
113
|
+
/* 12 */
|
|
114
|
+
null,
|
|
115
|
+
/* 13 */
|
|
116
|
+
"spark_bc4_r.wgsl",
|
|
117
|
+
// BC4_R
|
|
118
|
+
/* 14 */
|
|
119
|
+
"spark_bc5_rg.wgsl",
|
|
120
|
+
// BC5_RG
|
|
121
|
+
/* 15 */
|
|
122
|
+
null,
|
|
123
|
+
/* 16 */
|
|
124
|
+
"spark_bc7_rgb.wgsl",
|
|
125
|
+
// BC7_RGB
|
|
126
|
+
/* 17 */
|
|
127
|
+
"spark_bc7_rgba.wgsl"
|
|
128
|
+
// BC7_RGBA
|
|
129
|
+
];
|
|
130
|
+
const SparkBlockSize = [
|
|
131
|
+
/* 0 */
|
|
132
|
+
16,
|
|
133
|
+
// ASTC_4x4_RGB
|
|
134
|
+
/* 1 */
|
|
135
|
+
16,
|
|
136
|
+
// ASTC_4x4_RGBA
|
|
137
|
+
/* 2 */
|
|
138
|
+
0,
|
|
139
|
+
/* 3 */
|
|
140
|
+
0,
|
|
141
|
+
/* 4 */
|
|
142
|
+
8,
|
|
143
|
+
// EAC_R
|
|
144
|
+
/* 5 */
|
|
145
|
+
16,
|
|
146
|
+
// EAC_RG
|
|
147
|
+
/* 6 */
|
|
148
|
+
8,
|
|
149
|
+
// ETC2_RGB
|
|
150
|
+
/* 7 */
|
|
151
|
+
16,
|
|
152
|
+
// ETC2_RGBA
|
|
153
|
+
/* 8 */
|
|
154
|
+
0,
|
|
155
|
+
/* 9 */
|
|
156
|
+
8,
|
|
157
|
+
// BC1_RGB
|
|
158
|
+
/* 10 */
|
|
159
|
+
16,
|
|
160
|
+
// BC3_RGBA
|
|
161
|
+
/* 11 */
|
|
162
|
+
0,
|
|
163
|
+
/* 12 */
|
|
164
|
+
0,
|
|
165
|
+
/* 13 */
|
|
166
|
+
8,
|
|
167
|
+
// BC4_R
|
|
168
|
+
/* 14 */
|
|
169
|
+
16,
|
|
170
|
+
// BC5_RG
|
|
171
|
+
/* 15 */
|
|
172
|
+
0,
|
|
173
|
+
/* 16 */
|
|
174
|
+
16,
|
|
175
|
+
// BC7_RGB
|
|
176
|
+
/* 17 */
|
|
177
|
+
16
|
|
178
|
+
// BC7_RGB
|
|
179
|
+
];
|
|
180
|
+
const SparkFormatRatio = [
|
|
181
|
+
/* 0 */
|
|
182
|
+
4,
|
|
183
|
+
// ASTC_4x4_RGB
|
|
184
|
+
/* 1 */
|
|
185
|
+
4,
|
|
186
|
+
// ASTC_4x4_RGBA
|
|
187
|
+
/* 2 */
|
|
188
|
+
0,
|
|
189
|
+
/* 3 */
|
|
190
|
+
0,
|
|
191
|
+
/* 4 */
|
|
192
|
+
2,
|
|
193
|
+
// EAC_R
|
|
194
|
+
/* 5 */
|
|
195
|
+
2,
|
|
196
|
+
// EAC_RG
|
|
197
|
+
/* 6 */
|
|
198
|
+
8,
|
|
199
|
+
// ETC2_RGB
|
|
200
|
+
/* 7 */
|
|
201
|
+
4,
|
|
202
|
+
// ETC2_RGBA
|
|
203
|
+
/* 8 */
|
|
204
|
+
0,
|
|
205
|
+
/* 9 */
|
|
206
|
+
8,
|
|
207
|
+
// BC1_RGB
|
|
208
|
+
/* 10 */
|
|
209
|
+
4,
|
|
210
|
+
// BC3_RGBA
|
|
211
|
+
/* 11 */
|
|
212
|
+
0,
|
|
213
|
+
/* 12 */
|
|
214
|
+
0,
|
|
215
|
+
/* 13 */
|
|
216
|
+
2,
|
|
217
|
+
// BC4_R
|
|
218
|
+
/* 14 */
|
|
219
|
+
2,
|
|
220
|
+
// BC5_RG
|
|
221
|
+
/* 15 */
|
|
222
|
+
0,
|
|
223
|
+
/* 16 */
|
|
224
|
+
4,
|
|
225
|
+
// BC7_RGB
|
|
226
|
+
/* 17 */
|
|
227
|
+
4
|
|
228
|
+
// BC7_RGB
|
|
229
|
+
];
|
|
230
|
+
const SparkFormatMap = Object.freeze({
|
|
231
|
+
"astc-4x4-rgb": SparkFormat.ASTC_4x4_RGB,
|
|
232
|
+
"astc-4x4-rgba": SparkFormat.ASTC_4x4_RGBA,
|
|
233
|
+
"eac-r": SparkFormat.EAC_R,
|
|
234
|
+
"eac-rg": SparkFormat.EAC_RG,
|
|
235
|
+
"etc2-rgb": SparkFormat.ETC2_RGB,
|
|
236
|
+
"etc2-rgba": SparkFormat.ETC2_RGBA,
|
|
237
|
+
"bc1-rgb": SparkFormat.BC1_RGB,
|
|
238
|
+
"bc3-rgba": SparkFormat.BC3_RGBA,
|
|
239
|
+
"bc4-r": SparkFormat.BC4_R,
|
|
240
|
+
"bc5-rg": SparkFormat.BC5_RG,
|
|
241
|
+
"bc7-rgb": SparkFormat.BC7_RGB,
|
|
242
|
+
"bc7-rgba": SparkFormat.BC7_RGBA,
|
|
243
|
+
// aliases:
|
|
244
|
+
"astc-rgb": SparkFormat.ASTC_4x4_RGB,
|
|
245
|
+
"astc-rgba": SparkFormat.ASTC_4x4_RGBA,
|
|
246
|
+
// webgpu aliases:
|
|
247
|
+
"bc1-rgba-unorm": SparkFormat.BC1_RGB,
|
|
248
|
+
"bc1-rgba-unorm-srgb": SparkFormat.BC1_RGB,
|
|
249
|
+
"bc3-rgba-unorm": SparkFormat.BC3_RGBA,
|
|
250
|
+
"bc3-rgba-unorm-srgb": SparkFormat.BC3_RGBA,
|
|
251
|
+
"bc4-r-unorm": SparkFormat.BC4_R,
|
|
252
|
+
"bc5-rg-unorm": SparkFormat.BC5_RG,
|
|
253
|
+
"bc7-rgba-unorm": SparkFormat.BC7_RGBA,
|
|
254
|
+
"bc7-rgba-unorm-srgb": SparkFormat.BC7_RGBA,
|
|
255
|
+
"etc2-rgb8unorm": SparkFormat.ETC2_RGB,
|
|
256
|
+
"etc2-rgb8unorm-srgb": SparkFormat.ETC2_RGB,
|
|
257
|
+
"etc2-rgba8unorm": SparkFormat.ETC2_RGBA,
|
|
258
|
+
"etc2-rgba8unorm-srgb": SparkFormat.ETC2_RGBA,
|
|
259
|
+
"eac-r11unorm": SparkFormat.EAC_R,
|
|
260
|
+
"eac-rg11unorm": SparkFormat.EAC_RG,
|
|
261
|
+
"astc-4x4-unorm": SparkFormat.ASTC_4x4_RGBA,
|
|
262
|
+
"astc-4x4-unorm-srgb": SparkFormat.ASTC_4x4_RGBA
|
|
263
|
+
});
|
|
264
|
+
const SparkWebGPUFormats = [
|
|
265
|
+
/* 0 */
|
|
266
|
+
"astc-4x4-unorm",
|
|
267
|
+
// ASTC_4x4_RGB
|
|
268
|
+
/* 1 */
|
|
269
|
+
"astc-4x4-unorm",
|
|
270
|
+
// ASTC_4x4_RGBA
|
|
271
|
+
/* 2 */
|
|
272
|
+
null,
|
|
273
|
+
/* 3 */
|
|
274
|
+
null,
|
|
275
|
+
/* 4 */
|
|
276
|
+
"eac-r11unorm",
|
|
277
|
+
// EAC_R
|
|
278
|
+
/* 5 */
|
|
279
|
+
"eac-rg11unorm",
|
|
280
|
+
// EAC_RG
|
|
281
|
+
/* 6 */
|
|
282
|
+
"etc2-rgb8unorm",
|
|
283
|
+
// ETC2_RGB
|
|
284
|
+
/* 7 */
|
|
285
|
+
"etc2-rgba8unorm",
|
|
286
|
+
// ETC2_RGBA
|
|
287
|
+
/* 8 */
|
|
288
|
+
null,
|
|
289
|
+
/* 9 */
|
|
290
|
+
"bc1-rgba-unorm",
|
|
291
|
+
// BC1_RGB
|
|
292
|
+
/* 10 */
|
|
293
|
+
"bc3-rgba-unorm",
|
|
294
|
+
// BC3_RGBA
|
|
295
|
+
/* 11 */
|
|
296
|
+
null,
|
|
297
|
+
/* 12 */
|
|
298
|
+
null,
|
|
299
|
+
/* 13 */
|
|
300
|
+
"bc4-r-unorm",
|
|
301
|
+
// BC4_R
|
|
302
|
+
/* 14 */
|
|
303
|
+
"bc5-rg-unorm",
|
|
304
|
+
// BC5_RG
|
|
305
|
+
/* 15 */
|
|
306
|
+
null,
|
|
307
|
+
/* 16 */
|
|
308
|
+
"bc7-rgba-unorm",
|
|
309
|
+
// BC7_RGB
|
|
310
|
+
/* 17 */
|
|
311
|
+
"bc7-rgba-unorm"
|
|
312
|
+
// BC7_RGB
|
|
313
|
+
];
|
|
314
|
+
const SparkFormatIsRGB = [
|
|
315
|
+
/* 0 */
|
|
316
|
+
true,
|
|
317
|
+
// ASTC_4x4_RGB
|
|
318
|
+
/* 1 */
|
|
319
|
+
true,
|
|
320
|
+
// ASTC_4x4_RGBA
|
|
321
|
+
/* 2 */
|
|
322
|
+
null,
|
|
323
|
+
/* 3 */
|
|
324
|
+
null,
|
|
325
|
+
/* 4 */
|
|
326
|
+
false,
|
|
327
|
+
// EAC_R
|
|
328
|
+
/* 5 */
|
|
329
|
+
false,
|
|
330
|
+
// EAC_RG
|
|
331
|
+
/* 6 */
|
|
332
|
+
true,
|
|
333
|
+
// ETC2_RGB
|
|
334
|
+
/* 7 */
|
|
335
|
+
true,
|
|
336
|
+
// ETC2_RGBA
|
|
337
|
+
/* 8 */
|
|
338
|
+
null,
|
|
339
|
+
/* 9 */
|
|
340
|
+
true,
|
|
341
|
+
// BC1_RGB
|
|
342
|
+
/* 10 */
|
|
343
|
+
true,
|
|
344
|
+
// BC3_RGBA
|
|
345
|
+
/* 11 */
|
|
346
|
+
null,
|
|
347
|
+
/* 12 */
|
|
348
|
+
null,
|
|
349
|
+
/* 13 */
|
|
350
|
+
false,
|
|
351
|
+
// BC4_R
|
|
352
|
+
/* 14 */
|
|
353
|
+
false,
|
|
354
|
+
// BC5_RG
|
|
355
|
+
/* 15 */
|
|
356
|
+
null,
|
|
357
|
+
/* 16 */
|
|
358
|
+
true,
|
|
359
|
+
// BC7_RGB
|
|
360
|
+
/* 17 */
|
|
361
|
+
true
|
|
362
|
+
// BC7_RGB
|
|
363
|
+
];
|
|
364
|
+
function assert(condition, message) {
|
|
365
|
+
if (!condition) {
|
|
366
|
+
throw new Error(message);
|
|
367
|
+
}
|
|
368
|
+
}
|
|
369
|
+
function isWebGPU(device) {
|
|
370
|
+
return typeof GPUDevice != "undefined" && device instanceof GPUDevice;
|
|
371
|
+
}
|
|
372
|
+
function isIOS() {
|
|
373
|
+
return ["iPad Simulator", "iPhone Simulator", "iPod Simulator", "iPad", "iPhone", "iPod"].includes(navigator.platform) || // iPad on iOS 13 detection
|
|
374
|
+
navigator.userAgent.includes("Mac") && "ontouchend" in document;
|
|
375
|
+
}
|
|
376
|
+
function getSafariVersion() {
|
|
377
|
+
const match = navigator.userAgent.match(/Safari\/(\d+(\.\d+)?)/);
|
|
378
|
+
return match && parseFloat(match[1]);
|
|
379
|
+
}
|
|
380
|
+
function getFirefoxVersion() {
|
|
381
|
+
const match = navigator.userAgent.match(/Firefox\/(\d+(\.\d+)?)/);
|
|
382
|
+
return match && parseFloat(match[1]);
|
|
383
|
+
}
|
|
384
|
+
function detectWebGPUFormats(device) {
|
|
385
|
+
const supportedFormats = /* @__PURE__ */ new Set();
|
|
386
|
+
const formatMap = {
|
|
387
|
+
"texture-compression-bc": [
|
|
388
|
+
SparkFormat.BC1_RGB,
|
|
389
|
+
SparkFormat.BC3_RGBA,
|
|
390
|
+
SparkFormat.BC4_R,
|
|
391
|
+
SparkFormat.BC5_RG,
|
|
392
|
+
SparkFormat.BC7_RGB,
|
|
393
|
+
SparkFormat.BC7_RGBA
|
|
394
|
+
],
|
|
395
|
+
"texture-compression-etc2": [SparkFormat.ETC2_RGB, SparkFormat.ETC2_RGBA, SparkFormat.EAC_R, SparkFormat.EAC_RG],
|
|
396
|
+
"texture-compression-astc": [SparkFormat.ASTC_4x4_RGB, SparkFormat.ASTC_4x4_RGBA]
|
|
397
|
+
};
|
|
398
|
+
for (const [feature, formats] of Object.entries(formatMap)) {
|
|
399
|
+
if (device.features.has(feature)) {
|
|
400
|
+
for (const format of formats) {
|
|
401
|
+
supportedFormats.add(format);
|
|
402
|
+
}
|
|
403
|
+
}
|
|
404
|
+
}
|
|
405
|
+
return supportedFormats;
|
|
406
|
+
}
|
|
407
|
+
function imageToByteArray(image) {
|
|
408
|
+
const canvas = document.createElement("canvas");
|
|
409
|
+
canvas.width = image.width;
|
|
410
|
+
canvas.height = image.height;
|
|
411
|
+
const ctx = canvas.getContext("2d");
|
|
412
|
+
ctx.drawImage(image, 0, 0);
|
|
413
|
+
const imageData = ctx.getImageData(0, 0, image.width, image.height);
|
|
414
|
+
return new Uint8Array(imageData.data.buffer);
|
|
415
|
+
}
|
|
416
|
+
function loadImage(url) {
|
|
417
|
+
return new Promise(function(resolve, reject) {
|
|
418
|
+
const image = new Image();
|
|
419
|
+
image.crossOrigin = "anonymous";
|
|
420
|
+
image.onload = function() {
|
|
421
|
+
resolve(image);
|
|
422
|
+
};
|
|
423
|
+
image.onerror = reject;
|
|
424
|
+
image.src = url;
|
|
425
|
+
});
|
|
426
|
+
}
|
|
427
|
+
const BYTES_PER_ROW_ALIGNMENT = 256;
|
|
428
|
+
const MIN_MIP_SIZE = 4;
|
|
429
|
+
function computeMipmapLayout(w, h, blockSize, mipmaps) {
|
|
430
|
+
let mipmapCount = 0;
|
|
431
|
+
let offset = 0;
|
|
432
|
+
const bufferRanges = [];
|
|
433
|
+
do {
|
|
434
|
+
const bw = Math.ceil(w / 4);
|
|
435
|
+
const bh = Math.ceil(h / 4);
|
|
436
|
+
const bytesPerRow = Math.ceil(bw * blockSize / BYTES_PER_ROW_ALIGNMENT) * BYTES_PER_ROW_ALIGNMENT;
|
|
437
|
+
const alignedSize = bh * bytesPerRow;
|
|
438
|
+
mipmapCount++;
|
|
439
|
+
bufferRanges.push({ offset, alignedSize, w, h, bw, bh, bytesPerRow });
|
|
440
|
+
offset += alignedSize;
|
|
441
|
+
w = Math.max(1, Math.floor(w / 2));
|
|
442
|
+
h = Math.max(1, Math.floor(h / 2));
|
|
443
|
+
} while (mipmaps && (w >= MIN_MIP_SIZE || h >= MIN_MIP_SIZE));
|
|
444
|
+
return { mipmapCount, outputSize: offset, bufferRanges };
|
|
445
|
+
}
|
|
446
|
+
class Spark {
|
|
447
|
+
#device;
|
|
448
|
+
#supportedFormats;
|
|
449
|
+
#pipelines = [];
|
|
450
|
+
#supportsFloat16;
|
|
451
|
+
#mipmapPipeline;
|
|
452
|
+
#resizePipeline;
|
|
453
|
+
#flipYPipeline;
|
|
454
|
+
#detectChannelCountPipeline;
|
|
455
|
+
#defaultSampler;
|
|
456
|
+
#srgbUniform;
|
|
457
|
+
#noSrgbUniform;
|
|
458
|
+
#querySet;
|
|
459
|
+
#queryBuffer;
|
|
460
|
+
#queryReadbackBuffer;
|
|
461
|
+
/**
|
|
462
|
+
* Initialize the encoder by detecting available compression formats.
|
|
463
|
+
* @param {GPUDevice} device - WebGPU device.
|
|
464
|
+
* @returns {Promise<void>} Resolves when initialization is complete.
|
|
465
|
+
*/
|
|
466
|
+
static async create(device) {
|
|
467
|
+
const instance = new Spark();
|
|
468
|
+
await instance.#init(device);
|
|
469
|
+
return instance;
|
|
470
|
+
}
|
|
471
|
+
/**
|
|
472
|
+
* Returns a list of supported texture compression format names.
|
|
473
|
+
*
|
|
474
|
+
* This function checks a predefined list of common GPU compression formats
|
|
475
|
+
* (ASTC, ETC2, EAC, BCn) and filters it based on the formats actually supported
|
|
476
|
+
* by the current device as determined by `Spark.supportedFormats`.
|
|
477
|
+
*
|
|
478
|
+
* @returns {string[]} An array of format names (e.g., "bc1-rgb", "astc-4x4-rgba") that are supported on the current platform.
|
|
479
|
+
*
|
|
480
|
+
* @example
|
|
481
|
+
* const spark = await Spark.create(device);
|
|
482
|
+
* const formats = spark.enumerateSupportedFormats();
|
|
483
|
+
* console.log("Supported formats:", formats);
|
|
484
|
+
*/
|
|
485
|
+
enumerateSupportedFormats() {
|
|
486
|
+
const formats = [
|
|
487
|
+
"astc-4x4-rgb",
|
|
488
|
+
"astc-4x4-rgba",
|
|
489
|
+
"eac-r",
|
|
490
|
+
"eac-rg",
|
|
491
|
+
"etc2-rgb",
|
|
492
|
+
"etc2-rgba",
|
|
493
|
+
"bc1-rgb",
|
|
494
|
+
"bc3-rgba",
|
|
495
|
+
"bc4-r",
|
|
496
|
+
"bc5-rg",
|
|
497
|
+
"bc7-rgb",
|
|
498
|
+
"bc7-rgba"
|
|
499
|
+
];
|
|
500
|
+
const supported = [];
|
|
501
|
+
for (const format of formats) {
|
|
502
|
+
const sparkFormat = SparkFormatMap[format];
|
|
503
|
+
if (this.#isFormatSupported(sparkFormat)) {
|
|
504
|
+
const ratio = SparkFormatRatio[sparkFormat];
|
|
505
|
+
supported.push({ format, ratio });
|
|
506
|
+
}
|
|
507
|
+
}
|
|
508
|
+
return supported;
|
|
509
|
+
}
|
|
510
|
+
/**
|
|
511
|
+
* Determines the set of WebGPU features to request when initializing the device.
|
|
512
|
+
*
|
|
513
|
+
* This function inspects the given `adapter` to see which texture compression and shader
|
|
514
|
+
* features are available, and returns a list of those that are both supported and safe to enable.
|
|
515
|
+
*
|
|
516
|
+
* @param {GPUAdapter} adapter - The WebGPU adapter returned from `navigator.gpu.requestAdapter()`.
|
|
517
|
+
* @returns {string[]} An array of WebGPU feature names to request during `adapter.requestDevice()`.
|
|
518
|
+
*
|
|
519
|
+
* @example
|
|
520
|
+
* // Create device using the features required by spark.js
|
|
521
|
+
* const adapter = await navigator.gpu.requestAdapter()
|
|
522
|
+
* const requiredFeatures = Spark.getRequiredFeatures(adapter)
|
|
523
|
+
* const device = await adapter.requestDevice({ requiredFeatures })
|
|
524
|
+
*
|
|
525
|
+
* // Create spark object for the given device.
|
|
526
|
+
* const spark = Spark.create(device)
|
|
527
|
+
*/
|
|
528
|
+
static getRequiredFeatures(adapter) {
|
|
529
|
+
const features = [];
|
|
530
|
+
const IOS = isIOS();
|
|
531
|
+
if (!IOS && adapter.features.has("texture-compression-bc")) {
|
|
532
|
+
features.push("texture-compression-bc");
|
|
533
|
+
}
|
|
534
|
+
if (adapter.features.has("texture-compression-etc2")) {
|
|
535
|
+
features.push("texture-compression-etc2");
|
|
536
|
+
}
|
|
537
|
+
if (adapter.features.has("texture-compression-astc")) {
|
|
538
|
+
features.push("texture-compression-astc");
|
|
539
|
+
}
|
|
540
|
+
if (adapter.features.has("shader-f16")) {
|
|
541
|
+
features.push("shader-f16");
|
|
542
|
+
}
|
|
543
|
+
if (adapter.features.has("timestamp-query")) {
|
|
544
|
+
features.push("timestamp-query");
|
|
545
|
+
}
|
|
546
|
+
return features;
|
|
547
|
+
}
|
|
548
|
+
/**
|
|
549
|
+
* Try to determine the best compression options automatically. Do not use this in production, this is
|
|
550
|
+
* for the convenience of the spark.js image viewer only.
|
|
551
|
+
*
|
|
552
|
+
* @param {string | HTMLImageElement | HTMLCanvasElement | Blob | ArrayBuffer | GPUTexture} source - Image input.
|
|
553
|
+
* @param {Object} options - Encoding options.
|
|
554
|
+
* @returns {Object} - Recommended encoding options with an explicit encoding format.
|
|
555
|
+
*/
|
|
556
|
+
async selectPreferredOptions(source, options = {}) {
|
|
557
|
+
if (options.format == void 0 || options.format == "auto") {
|
|
558
|
+
const image = source instanceof Image || source instanceof GPUTexture ? source : await loadImage(source);
|
|
559
|
+
const format = await this.#getBestMatchingFormat(options, image);
|
|
560
|
+
options.format = SparkFormatName[format];
|
|
561
|
+
if (image instanceof GPUTexture) {
|
|
562
|
+
if (image.format.endsWith("-srgb")) options.srgb = true;
|
|
563
|
+
}
|
|
564
|
+
if (format == SparkFormat.EAC_RG || format == SparkFormat.BC5_RG) {
|
|
565
|
+
options.normal = true;
|
|
566
|
+
}
|
|
567
|
+
}
|
|
568
|
+
return options;
|
|
569
|
+
}
|
|
570
|
+
/**
|
|
571
|
+
* Load an image and transcode it to a compressed GPU texture.
|
|
572
|
+
* @param {GPUtexture | string | HTMLImageElement | HTMLCanvasElement | Blob | ArrayBuffer} source - Image input.
|
|
573
|
+
* @param {Object} options - Optional encoding options.
|
|
574
|
+
* @param {string} options.format - Desired block compression format (auto-detect by default).
|
|
575
|
+
* @param {boolean} options.generateMipmaps | options.mips - Whether to generate mipmaps (false by default).
|
|
576
|
+
* @param {boolean} options.srgb - Whether to store as sRGB. This also affects mipmap generation (false by default).
|
|
577
|
+
* @param {boolean} options.normal - Interpret the image as a normal map. Affects mipmap generation (false by default).
|
|
578
|
+
* @param {boolean} options.flipY - Flip image vertically.
|
|
579
|
+
* @returns {Promise<GPUTexture>} - A promise resolving to a GPU texture.
|
|
580
|
+
*/
|
|
581
|
+
async encodeTexture(source, options = {}) {
|
|
582
|
+
assert(this.#device, "Spark is not initialized");
|
|
583
|
+
const image = source instanceof Image || source instanceof GPUTexture ? source : await loadImage(source);
|
|
584
|
+
console.log("Loaded image", image);
|
|
585
|
+
const format = await this.#getBestMatchingFormat(options, image);
|
|
586
|
+
const width = Math.ceil(image.width / 4) * 4;
|
|
587
|
+
const height = Math.ceil(image.height / 4) * 4;
|
|
588
|
+
const blockSize = SparkBlockSize[format];
|
|
589
|
+
const mipmaps = options.generateMipmaps || options.mips;
|
|
590
|
+
const { mipmapCount, outputSize, bufferRanges } = computeMipmapLayout(width, height, blockSize, mipmaps);
|
|
591
|
+
const srgb = (options.srgb || options.format?.endsWith("srgb")) && SparkFormatIsRGB[format];
|
|
592
|
+
const webgpuFormat = SparkWebGPUFormats[format] + (srgb ? "-srgb" : "");
|
|
593
|
+
const viewFormats = srgb ? ["rgba8unorm", "rgba8unorm-srgb"] : ["rgba8unorm"];
|
|
594
|
+
console.time("create input texture");
|
|
595
|
+
let inputUsage = GPUTextureUsage.TEXTURE_BINDING | GPUTextureUsage.COPY_DST | GPUTextureUsage.STORAGE_BINDING;
|
|
596
|
+
const needsProcessing = options.flipY || width != image.width || height != image.height;
|
|
597
|
+
if (!needsProcessing && !(image instanceof GPUTexture)) {
|
|
598
|
+
inputUsage |= GPUTextureUsage.RENDER_ATTACHMENT;
|
|
599
|
+
}
|
|
600
|
+
const commandEncoder = this.#device.createCommandEncoder();
|
|
601
|
+
if (this.#querySet && typeof commandEncoder.writeTimestamp === "function") {
|
|
602
|
+
commandEncoder.writeTimestamp(this.#querySet, 0);
|
|
603
|
+
}
|
|
604
|
+
let inputTexture;
|
|
605
|
+
if (needsProcessing || !(image instanceof GPUTexture && !mipmaps)) {
|
|
606
|
+
inputTexture = this.#device.createTexture({
|
|
607
|
+
size: [width, height, 1],
|
|
608
|
+
mipLevelCount: mipmapCount,
|
|
609
|
+
format: "rgba8unorm",
|
|
610
|
+
usage: inputUsage,
|
|
611
|
+
viewFormats
|
|
612
|
+
});
|
|
613
|
+
}
|
|
614
|
+
let tmpTexture;
|
|
615
|
+
if (needsProcessing) {
|
|
616
|
+
if (image instanceof GPUTexture) {
|
|
617
|
+
this.#processInputTexture(commandEncoder, image, inputTexture, width, height, srgb, options.flipY);
|
|
618
|
+
} else {
|
|
619
|
+
tmpTexture = this.#device.createTexture({
|
|
620
|
+
size: [image.width, image.height, 1],
|
|
621
|
+
mipLevelCount: 1,
|
|
622
|
+
format: "rgba8unorm",
|
|
623
|
+
// RENDER_ATTACHMENT usage is necessary for copyExternalImageToTexture
|
|
624
|
+
usage: GPUTextureUsage.TEXTURE_BINDING | GPUTextureUsage.COPY_DST | GPUTextureUsage.RENDER_ATTACHMENT,
|
|
625
|
+
viewFormats
|
|
626
|
+
});
|
|
627
|
+
this.#device.queue.copyExternalImageToTexture(
|
|
628
|
+
{ source: image },
|
|
629
|
+
{ texture: tmpTexture },
|
|
630
|
+
{ width: image.width, height: image.height }
|
|
631
|
+
);
|
|
632
|
+
this.#processInputTexture(commandEncoder, tmpTexture, inputTexture, width, height, srgb, options.flipY);
|
|
633
|
+
}
|
|
634
|
+
} else {
|
|
635
|
+
if (image instanceof GPUTexture) {
|
|
636
|
+
if (mipmaps) {
|
|
637
|
+
commandEncoder.copyTextureToTexture({ texture: image }, { texture: inputTexture }, { width, height });
|
|
638
|
+
} else {
|
|
639
|
+
inputTexture = image;
|
|
640
|
+
}
|
|
641
|
+
} else {
|
|
642
|
+
this.#device.queue.copyExternalImageToTexture({ source: image }, { texture: inputTexture }, { width, height });
|
|
643
|
+
}
|
|
644
|
+
}
|
|
645
|
+
if (mipmaps) {
|
|
646
|
+
this.#generateMipmaps(commandEncoder, inputTexture, mipmapCount, width, height, srgb);
|
|
647
|
+
}
|
|
648
|
+
console.timeEnd("create input texture");
|
|
649
|
+
const outputTexture = this.#device.createTexture({
|
|
650
|
+
size: [width, height, 1],
|
|
651
|
+
mipLevelCount: mipmapCount,
|
|
652
|
+
format: webgpuFormat,
|
|
653
|
+
usage: GPUTextureUsage.TEXTURE_BINDING | GPUTextureUsage.COPY_DST
|
|
654
|
+
});
|
|
655
|
+
const outputBuffer = this.#device.createBuffer({
|
|
656
|
+
size: outputSize,
|
|
657
|
+
usage: GPUBufferUsage.STORAGE | GPUBufferUsage.COPY_SRC
|
|
658
|
+
});
|
|
659
|
+
console.time("loadPipeline");
|
|
660
|
+
const pipeline = await this.#loadPipeline(format);
|
|
661
|
+
console.timeEnd("loadPipeline");
|
|
662
|
+
console.time("dispatch compute shader");
|
|
663
|
+
let args = {};
|
|
664
|
+
if (this.#querySet && typeof commandEncoder.writeTimestamp !== "function") {
|
|
665
|
+
args = {
|
|
666
|
+
writeTimestamps: {
|
|
667
|
+
querySet: this.#querySet,
|
|
668
|
+
beginningOfPassWriteIndex: 0,
|
|
669
|
+
endOfPassWriteIndex: 1
|
|
670
|
+
}
|
|
671
|
+
};
|
|
672
|
+
}
|
|
673
|
+
const pass = commandEncoder.beginComputePass(args);
|
|
674
|
+
pass.setPipeline(pipeline);
|
|
675
|
+
for (let m = 0; m < mipmapCount; m++) {
|
|
676
|
+
const bindGroup = this.#device.createBindGroup({
|
|
677
|
+
layout: pipeline.getBindGroupLayout(0),
|
|
678
|
+
entries: [
|
|
679
|
+
{
|
|
680
|
+
binding: 0,
|
|
681
|
+
resource: inputTexture.createView({
|
|
682
|
+
baseMipLevel: m,
|
|
683
|
+
mipLevelCount: 1
|
|
684
|
+
})
|
|
685
|
+
},
|
|
686
|
+
{
|
|
687
|
+
binding: 1,
|
|
688
|
+
resource: this.#defaultSampler
|
|
689
|
+
},
|
|
690
|
+
{
|
|
691
|
+
binding: 2,
|
|
692
|
+
resource: {
|
|
693
|
+
buffer: outputBuffer,
|
|
694
|
+
offset: bufferRanges[m].offset,
|
|
695
|
+
size: bufferRanges[m].size
|
|
696
|
+
}
|
|
697
|
+
}
|
|
698
|
+
]
|
|
699
|
+
});
|
|
700
|
+
pass.setBindGroup(0, bindGroup);
|
|
701
|
+
pass.dispatchWorkgroups(Math.ceil(bufferRanges[m].bw / 16), Math.ceil(bufferRanges[m].bh / 16));
|
|
702
|
+
}
|
|
703
|
+
pass.end();
|
|
704
|
+
for (let m = 0; m < mipmapCount; m++) {
|
|
705
|
+
commandEncoder.copyBufferToTexture(
|
|
706
|
+
{
|
|
707
|
+
buffer: outputBuffer,
|
|
708
|
+
offset: bufferRanges[m].offset,
|
|
709
|
+
bytesPerRow: bufferRanges[m].bytesPerRow,
|
|
710
|
+
rowsPerImage: bufferRanges[m].bh
|
|
711
|
+
},
|
|
712
|
+
{
|
|
713
|
+
texture: outputTexture,
|
|
714
|
+
mipLevel: m
|
|
715
|
+
},
|
|
716
|
+
{
|
|
717
|
+
width: bufferRanges[m].bw * 4,
|
|
718
|
+
height: bufferRanges[m].bh * 4,
|
|
719
|
+
depthOrArrayLayers: 1
|
|
720
|
+
}
|
|
721
|
+
);
|
|
722
|
+
}
|
|
723
|
+
if (this.#querySet && typeof commandEncoder.writeTimestamp === "function") {
|
|
724
|
+
commandEncoder.writeTimestamp(this.#querySet, 1);
|
|
725
|
+
}
|
|
726
|
+
this.#device.queue.submit([commandEncoder.finish()]);
|
|
727
|
+
console.timeEnd("dispatch compute shader");
|
|
728
|
+
tmpTexture?.destroy();
|
|
729
|
+
if (inputTexture != image) {
|
|
730
|
+
inputTexture?.destroy();
|
|
731
|
+
}
|
|
732
|
+
outputBuffer?.destroy();
|
|
733
|
+
return outputTexture;
|
|
734
|
+
}
|
|
735
|
+
/**
|
|
736
|
+
* Returns the time (in milliseconds) it took to perform the most recent `encodeTexture()` call.
|
|
737
|
+
*
|
|
738
|
+
* This function resolves GPU timestamp queries that were recorded before and after the
|
|
739
|
+
* compression dispatch in `encodeTexture()`. It waits for the GPU to finish processing,
|
|
740
|
+
* reads back the timestamps, and computes the elapsed GPU time.
|
|
741
|
+
*
|
|
742
|
+
* Must be called *after* `encodeTexture()` has been invoked and submitted.
|
|
743
|
+
*
|
|
744
|
+
* @async
|
|
745
|
+
* @returns {Promise<number>} Elapsed GPU time in milliseconds.
|
|
746
|
+
*
|
|
747
|
+
* @example
|
|
748
|
+
* await spark.encodeTexture(...);
|
|
749
|
+
* const elapsed = await spark.getTimeElapsed();
|
|
750
|
+
* console.log(`Encode took ${elapsed.toFixed(2)} ms`);
|
|
751
|
+
*
|
|
752
|
+
* @throws {Error} If the GPU work has not been submitted, or if timestamp queries fail.
|
|
753
|
+
*/
|
|
754
|
+
async getTimeElapsed() {
|
|
755
|
+
if (!this.#querySet) {
|
|
756
|
+
return 0;
|
|
757
|
+
}
|
|
758
|
+
const commandEncoder = this.#device.createCommandEncoder();
|
|
759
|
+
commandEncoder.resolveQuerySet(this.#querySet, 0, 2, this.#queryBuffer, 0);
|
|
760
|
+
commandEncoder.copyBufferToBuffer(this.#queryBuffer, this.#queryReadbackBuffer, 16);
|
|
761
|
+
this.#device.queue.submit([commandEncoder.finish()]);
|
|
762
|
+
await this.#device.queue.onSubmittedWorkDone();
|
|
763
|
+
await this.#queryReadbackBuffer.mapAsync(GPUMapMode.READ);
|
|
764
|
+
const arrayBuffer = this.#queryReadbackBuffer.getMappedRange();
|
|
765
|
+
const timestamps = new BigUint64Array(arrayBuffer);
|
|
766
|
+
const t0 = timestamps[0];
|
|
767
|
+
const t1 = timestamps[1];
|
|
768
|
+
this.#queryReadbackBuffer.unmap();
|
|
769
|
+
const elapsedNanoseconds = Number(t1 - t0);
|
|
770
|
+
const elapsedMilliseconds = elapsedNanoseconds / 1e6;
|
|
771
|
+
return elapsedMilliseconds;
|
|
772
|
+
}
|
|
773
|
+
async #init(device) {
|
|
774
|
+
assert(device, "device is required");
|
|
775
|
+
assert(isWebGPU(device), "device is not a WebGPU device");
|
|
776
|
+
this.#device = device;
|
|
777
|
+
this.#supportedFormats = detectWebGPUFormats(this.#device);
|
|
778
|
+
this.#defaultSampler = this.#device.createSampler({
|
|
779
|
+
magFilter: "linear",
|
|
780
|
+
minFilter: "linear"
|
|
781
|
+
});
|
|
782
|
+
this.#srgbUniform = this.#device.createBuffer({
|
|
783
|
+
size: 4,
|
|
784
|
+
usage: GPUBufferUsage.UNIFORM | GPUBufferUsage.COPY_DST
|
|
785
|
+
});
|
|
786
|
+
this.#noSrgbUniform = this.#device.createBuffer({
|
|
787
|
+
size: 4,
|
|
788
|
+
usage: GPUBufferUsage.UNIFORM | GPUBufferUsage.COPY_DST
|
|
789
|
+
});
|
|
790
|
+
this.#device.queue.writeBuffer(this.#srgbUniform, 0, new Uint32Array([1]));
|
|
791
|
+
this.#device.queue.writeBuffer(this.#noSrgbUniform, 0, new Uint32Array([0]));
|
|
792
|
+
if (this.#device.features.has("timestamp-query")) {
|
|
793
|
+
const webkitVersion = getSafariVersion();
|
|
794
|
+
const firefoxVersion = getFirefoxVersion();
|
|
795
|
+
if ((!webkitVersion || webkitVersion >= 26) && !firefoxVersion) {
|
|
796
|
+
this.#querySet = this.#device.createQuerySet({ type: "timestamp", count: 2 });
|
|
797
|
+
this.#queryBuffer = this.#device.createBuffer({
|
|
798
|
+
size: 16,
|
|
799
|
+
// 2 timestamps × 8 bytes each
|
|
800
|
+
usage: GPUBufferUsage.COPY_SRC | GPUBufferUsage.QUERY_RESOLVE
|
|
801
|
+
});
|
|
802
|
+
this.#queryReadbackBuffer = this.#device.createBuffer({
|
|
803
|
+
size: 16,
|
|
804
|
+
// 2 timestamps × 8 bytes each
|
|
805
|
+
usage: GPUBufferUsage.COPY_DST | GPUBufferUsage.MAP_READ
|
|
806
|
+
});
|
|
807
|
+
}
|
|
808
|
+
}
|
|
809
|
+
this.#supportsFloat16 = this.#device.features.has("shader-f16");
|
|
810
|
+
await this.#loadUtilPipelines();
|
|
811
|
+
for (const format of this.#supportedFormats) {
|
|
812
|
+
if (!this.#pipelines[format]) {
|
|
813
|
+
await this.#loadPipeline(format);
|
|
814
|
+
}
|
|
815
|
+
}
|
|
816
|
+
}
|
|
817
|
+
async #loadUtilPipelines() {
|
|
818
|
+
const shaderModule = this.#device.createShaderModule({
|
|
819
|
+
code: shaders["utils.wgsl"],
|
|
820
|
+
label: "utils"
|
|
821
|
+
});
|
|
822
|
+
if (typeof shaderModule.compilationInfo == "function") {
|
|
823
|
+
const info = await shaderModule.compilationInfo();
|
|
824
|
+
if (info.messages.some((msg) => msg.type == "error")) {
|
|
825
|
+
console.error("WGSL compilation errors:");
|
|
826
|
+
for (const msg of info.messages) {
|
|
827
|
+
console.error(msg);
|
|
828
|
+
}
|
|
829
|
+
throw new Error("Shader compilation failed");
|
|
830
|
+
}
|
|
831
|
+
}
|
|
832
|
+
this.#mipmapPipeline = await this.#device.createComputePipelineAsync({
|
|
833
|
+
layout: "auto",
|
|
834
|
+
compute: {
|
|
835
|
+
module: shaderModule,
|
|
836
|
+
entryPoint: "mipmap"
|
|
837
|
+
}
|
|
838
|
+
});
|
|
839
|
+
this.#resizePipeline = await this.#device.createComputePipelineAsync({
|
|
840
|
+
layout: "auto",
|
|
841
|
+
compute: {
|
|
842
|
+
module: shaderModule,
|
|
843
|
+
entryPoint: "resize"
|
|
844
|
+
}
|
|
845
|
+
});
|
|
846
|
+
this.#flipYPipeline = await this.#device.createComputePipelineAsync({
|
|
847
|
+
layout: "auto",
|
|
848
|
+
compute: {
|
|
849
|
+
module: shaderModule,
|
|
850
|
+
entryPoint: "flipy"
|
|
851
|
+
}
|
|
852
|
+
});
|
|
853
|
+
this.#detectChannelCountPipeline = await this.#device.createComputePipelineAsync({
|
|
854
|
+
layout: "auto",
|
|
855
|
+
compute: {
|
|
856
|
+
module: shaderModule,
|
|
857
|
+
entryPoint: "detect_channel_count"
|
|
858
|
+
}
|
|
859
|
+
});
|
|
860
|
+
}
|
|
861
|
+
async #loadPipeline(format) {
|
|
862
|
+
if (this.#pipelines[format]) {
|
|
863
|
+
return this.#pipelines[format];
|
|
864
|
+
}
|
|
865
|
+
const pipelinePromise = (async () => {
|
|
866
|
+
const shaderFile = SparkShaderFiles[format];
|
|
867
|
+
assert(shaderFile, `No shader available for format ${SparkFormatName[format]}`);
|
|
868
|
+
let shaderCode = shaders[shaderFile];
|
|
869
|
+
if (!this.#supportsFloat16) {
|
|
870
|
+
shaderCode = shaderCode.replace(/^enable f16;\s*/m, "").replace(/\bf16\b/g, "f32").replace(/\bvec([234])h\b/g, "vec$1f").replace(/\bmat([234]x[234])h/g, "mat$1f").replace(/\b(\d*\.\d+|\d+\.)h\b/g, "$1");
|
|
871
|
+
}
|
|
872
|
+
const shaderModule = this.#device.createShaderModule({
|
|
873
|
+
code: shaderCode,
|
|
874
|
+
label: SparkFormatName[format]
|
|
875
|
+
});
|
|
876
|
+
if (typeof shaderModule.getCompilationInfo == "function") {
|
|
877
|
+
const info = await shaderModule.getCompilationInfo();
|
|
878
|
+
if (info.messages.some((msg) => msg.type == "error")) {
|
|
879
|
+
console.error("WGSL compilation errors:");
|
|
880
|
+
for (const msg of info.messages) {
|
|
881
|
+
console.error(msg);
|
|
882
|
+
}
|
|
883
|
+
throw new Error("Shader compilation failed");
|
|
884
|
+
}
|
|
885
|
+
}
|
|
886
|
+
const pipeline = await this.#device.createComputePipelineAsync({
|
|
887
|
+
layout: "auto",
|
|
888
|
+
compute: {
|
|
889
|
+
module: shaderModule,
|
|
890
|
+
entryPoint: "main"
|
|
891
|
+
}
|
|
892
|
+
});
|
|
893
|
+
return pipeline;
|
|
894
|
+
})();
|
|
895
|
+
this.#pipelines[format] = pipelinePromise;
|
|
896
|
+
return pipelinePromise;
|
|
897
|
+
}
|
|
898
|
+
#isFormatSupported(format) {
|
|
899
|
+
return this.#supportedFormats.has(format);
|
|
900
|
+
}
|
|
901
|
+
async #getBestMatchingFormat(options, image) {
|
|
902
|
+
if (!options.format || options.format == "auto") {
|
|
903
|
+
if (options.alpha) {
|
|
904
|
+
if (this.#isFormatSupported(SparkFormat.BC7_RGBA)) return SparkFormat.BC7_RGBA;
|
|
905
|
+
if (this.#isFormatSupported(SparkFormat.ASTC_4x4_RGBA)) return SparkFormat.ASTC_4x4_RGBA;
|
|
906
|
+
if (this.#isFormatSupported(SparkFormat.BC3_RGBA)) return SparkFormat.BC3_RGBA;
|
|
907
|
+
if (this.#isFormatSupported(SparkFormat.ETC2_RGBA)) return SparkFormat.ETC2_RGBA;
|
|
908
|
+
} else if (options.srgb) {
|
|
909
|
+
if (this.#isFormatSupported(SparkFormat.BC7_RGB)) return SparkFormat.BC7_RGB;
|
|
910
|
+
if (this.#isFormatSupported(SparkFormat.ASTC_4x4_RGB)) return SparkFormat.ASTC_4x4_RGB;
|
|
911
|
+
if (this.#isFormatSupported(SparkFormat.BC1_RGB)) return SparkFormat.BC1_RGB;
|
|
912
|
+
if (this.#isFormatSupported(SparkFormat.ETC2_RGB)) return SparkFormat.ETC2_RGB;
|
|
913
|
+
} else {
|
|
914
|
+
let channelCount;
|
|
915
|
+
if (image instanceof GPUTexture) {
|
|
916
|
+
if (image.format == "r8unorm" || image.format == "r16unorm") channelCount = 1;
|
|
917
|
+
else if (image.format == "rg8unorm" || image.format == "rg16unorm") channelCount = 2;
|
|
918
|
+
else {
|
|
919
|
+
channelCount = await this.#detectChannelCountGPU(image);
|
|
920
|
+
}
|
|
921
|
+
} else {
|
|
922
|
+
const buffer = imageToByteArray(image);
|
|
923
|
+
channelCount = this.#detectChannelCount(buffer);
|
|
924
|
+
}
|
|
925
|
+
if (channelCount == 4) {
|
|
926
|
+
if (this.#isFormatSupported(SparkFormat.BC7_RGBA)) return SparkFormat.BC7_RGBA;
|
|
927
|
+
if (this.#isFormatSupported(SparkFormat.ASTC_4x4_RGBA)) return SparkFormat.ASTC_4x4_RGBA;
|
|
928
|
+
if (this.#isFormatSupported(SparkFormat.BC3_RGBA)) return SparkFormat.BC3_RGBA;
|
|
929
|
+
if (this.#isFormatSupported(SparkFormat.ETC2_RGBA)) return SparkFormat.ETC2_RGBA;
|
|
930
|
+
} else if (channelCount == 3) {
|
|
931
|
+
if (this.#isFormatSupported(SparkFormat.BC7_RGB)) return SparkFormat.BC7_RGB;
|
|
932
|
+
if (this.#isFormatSupported(SparkFormat.ASTC_4x4_RGB)) return SparkFormat.ASTC_4x4_RGB;
|
|
933
|
+
if (this.#isFormatSupported(SparkFormat.BC1_RGB)) return SparkFormat.BC1_RGB;
|
|
934
|
+
if (this.#isFormatSupported(SparkFormat.ETC2_RGB)) return SparkFormat.ETC2_RGB;
|
|
935
|
+
} else if (channelCount == 2) {
|
|
936
|
+
if (this.#isFormatSupported(SparkFormat.BC5_RG)) return SparkFormat.BC5_RG;
|
|
937
|
+
if (this.#isFormatSupported(SparkFormat.EAC_RG)) return SparkFormat.EAC_RG;
|
|
938
|
+
} else if (channelCount == 1) {
|
|
939
|
+
if (this.#isFormatSupported(SparkFormat.BC4_R)) return SparkFormat.BC4_R;
|
|
940
|
+
if (this.#isFormatSupported(SparkFormat.EAC_R)) return SparkFormat.EAC_R;
|
|
941
|
+
}
|
|
942
|
+
}
|
|
943
|
+
throw new Error("No supported format found.");
|
|
944
|
+
}
|
|
945
|
+
if (SparkFormatMap[options.format] != void 0 && this.#isFormatSupported(SparkFormatMap[options.format])) {
|
|
946
|
+
return SparkFormatMap[options.format];
|
|
947
|
+
}
|
|
948
|
+
const preferenceOrder = [
|
|
949
|
+
"bc4-r",
|
|
950
|
+
"bc5-rg",
|
|
951
|
+
"bc7-rgb",
|
|
952
|
+
"bc7-rgba",
|
|
953
|
+
"bc1-rgb",
|
|
954
|
+
"bc3-rgba",
|
|
955
|
+
"astc-rgb",
|
|
956
|
+
"astc-4x4-rgb",
|
|
957
|
+
"astc-rgba",
|
|
958
|
+
"astc-4x4-rgba",
|
|
959
|
+
"eac-r",
|
|
960
|
+
"eac-rg",
|
|
961
|
+
"etc2-rgb",
|
|
962
|
+
"etc2-rgba"
|
|
963
|
+
];
|
|
964
|
+
for (const key of preferenceOrder) {
|
|
965
|
+
if (key.includes(options.format) && this.#isFormatSupported(SparkFormatMap[key])) {
|
|
966
|
+
return SparkFormatMap[key];
|
|
967
|
+
}
|
|
968
|
+
}
|
|
969
|
+
throw new Error(`Unsupported format: ${options.format}`);
|
|
970
|
+
}
|
|
971
|
+
#detectChannelCount(imageData) {
|
|
972
|
+
let opaque = true;
|
|
973
|
+
let grayscale = true;
|
|
974
|
+
let invalidNormalCount = 0;
|
|
975
|
+
const count = Math.min(1024 * 128, imageData.length);
|
|
976
|
+
for (let i = 0; i < count; i += 4) {
|
|
977
|
+
const r = imageData[i] / 255;
|
|
978
|
+
const g = imageData[i + 1] / 255;
|
|
979
|
+
const b = imageData[i + 2] / 255;
|
|
980
|
+
const a = imageData[i + 3];
|
|
981
|
+
if (a < 255) opaque = false;
|
|
982
|
+
if (r != g || g != b) grayscale = false;
|
|
983
|
+
const x = 2 * r - 1;
|
|
984
|
+
const y = 2 * g - 1;
|
|
985
|
+
const z = 2 * b - 1;
|
|
986
|
+
const len2 = x * x + y * y + z * z;
|
|
987
|
+
const len = Math.sqrt(len2);
|
|
988
|
+
if (Math.abs(len - 1) > 0.2 || z < -0.1) invalidNormalCount += 1;
|
|
989
|
+
}
|
|
990
|
+
if (!opaque) return 4;
|
|
991
|
+
if (grayscale) return 1;
|
|
992
|
+
if (4 * invalidNormalCount < count) return 2;
|
|
993
|
+
return 3;
|
|
994
|
+
}
|
|
995
|
+
async #detectChannelCountGPU(texture) {
|
|
996
|
+
const counterSize = 12;
|
|
997
|
+
const counterBuffer = this.#device.createBuffer({
|
|
998
|
+
size: counterSize,
|
|
999
|
+
usage: GPUBufferUsage.STORAGE | GPUBufferUsage.COPY_SRC
|
|
1000
|
+
});
|
|
1001
|
+
const readbackBuffer = this.#device.createBuffer({
|
|
1002
|
+
size: counterSize,
|
|
1003
|
+
usage: GPUBufferUsage.COPY_DST | GPUBufferUsage.MAP_READ
|
|
1004
|
+
});
|
|
1005
|
+
const bindGroup = this.#device.createBindGroup({
|
|
1006
|
+
layout: this.#detectChannelCountPipeline.getBindGroupLayout(0),
|
|
1007
|
+
entries: [
|
|
1008
|
+
{ binding: 0, resource: texture.createView() },
|
|
1009
|
+
{ binding: 1, resource: { buffer: counterBuffer } }
|
|
1010
|
+
]
|
|
1011
|
+
});
|
|
1012
|
+
const encoder = this.#device.createCommandEncoder();
|
|
1013
|
+
const pass = encoder.beginComputePass();
|
|
1014
|
+
pass.setPipeline(this.#detectChannelCountPipeline);
|
|
1015
|
+
pass.setBindGroup(0, bindGroup);
|
|
1016
|
+
const { width, height } = texture;
|
|
1017
|
+
const dispatchX = Math.ceil(width / 8);
|
|
1018
|
+
const dispatchY = Math.ceil(height / 8);
|
|
1019
|
+
pass.dispatchWorkgroups(dispatchX, dispatchY);
|
|
1020
|
+
pass.end();
|
|
1021
|
+
encoder.copyBufferToBuffer(counterBuffer, 0, readbackBuffer, 0, counterSize);
|
|
1022
|
+
this.#device.queue.submit([encoder.finish()]);
|
|
1023
|
+
await this.#device.queue.onSubmittedWorkDone();
|
|
1024
|
+
await readbackBuffer.mapAsync(GPUMapMode.READ);
|
|
1025
|
+
const view = new Uint32Array(readbackBuffer.getMappedRange());
|
|
1026
|
+
const opaque = view[0] == 0;
|
|
1027
|
+
const grayscale = view[1] == 0;
|
|
1028
|
+
const invalidNormalCount = view[2];
|
|
1029
|
+
readbackBuffer.unmap();
|
|
1030
|
+
readbackBuffer.destroy();
|
|
1031
|
+
counterBuffer.destroy();
|
|
1032
|
+
if (!opaque) return 4;
|
|
1033
|
+
if (grayscale) return 1;
|
|
1034
|
+
if (4 * invalidNormalCount < width * height) return 2;
|
|
1035
|
+
return 3;
|
|
1036
|
+
}
|
|
1037
|
+
// Apply scaling and flipY transform.
|
|
1038
|
+
async #processInputTexture(encoder, inputTexture, outputTexture, width, height, srgb, flipY) {
|
|
1039
|
+
const pass = encoder.beginComputePass();
|
|
1040
|
+
const pipeline = flipY ? this.#flipYPipeline : this.#resizePipeline;
|
|
1041
|
+
pass.setPipeline(pipeline);
|
|
1042
|
+
const bindGroup = this.#device.createBindGroup({
|
|
1043
|
+
layout: pipeline.getBindGroupLayout(0),
|
|
1044
|
+
entries: [
|
|
1045
|
+
{
|
|
1046
|
+
binding: 0,
|
|
1047
|
+
resource: inputTexture.createView({
|
|
1048
|
+
baseMipLevel: 0,
|
|
1049
|
+
mipLevelCount: 1,
|
|
1050
|
+
format: srgb ? "rgba8unorm-srgb" : "rgba8unorm",
|
|
1051
|
+
usage: GPUTextureUsage.TEXTURE_BINDING
|
|
1052
|
+
})
|
|
1053
|
+
},
|
|
1054
|
+
{
|
|
1055
|
+
binding: 1,
|
|
1056
|
+
resource: outputTexture.createView({
|
|
1057
|
+
baseMipLevel: 0,
|
|
1058
|
+
mipLevelCount: 1,
|
|
1059
|
+
dimension: "2d",
|
|
1060
|
+
format: "rgba8unorm",
|
|
1061
|
+
usage: GPUTextureUsage.STORAGE_BINDING
|
|
1062
|
+
})
|
|
1063
|
+
},
|
|
1064
|
+
{
|
|
1065
|
+
binding: 2,
|
|
1066
|
+
resource: this.#defaultSampler
|
|
1067
|
+
},
|
|
1068
|
+
{
|
|
1069
|
+
binding: 3,
|
|
1070
|
+
resource: { buffer: srgb ? this.#srgbUniform : this.#noSrgbUniform }
|
|
1071
|
+
}
|
|
1072
|
+
]
|
|
1073
|
+
});
|
|
1074
|
+
pass.setBindGroup(0, bindGroup);
|
|
1075
|
+
pass.dispatchWorkgroups(Math.ceil(width / 8), Math.ceil(height / 8));
|
|
1076
|
+
pass.end();
|
|
1077
|
+
}
|
|
1078
|
+
async #generateMipmaps(encoder, texture, mipmapCount, width, height, srgb) {
|
|
1079
|
+
const pass = encoder.beginComputePass();
|
|
1080
|
+
pass.setPipeline(this.#mipmapPipeline);
|
|
1081
|
+
let w = width;
|
|
1082
|
+
let h = height;
|
|
1083
|
+
for (let i = 0; i < mipmapCount - 1; i++) {
|
|
1084
|
+
w = Math.max(1, Math.floor(w / 2));
|
|
1085
|
+
h = Math.max(1, Math.floor(h / 2));
|
|
1086
|
+
this.#generateMipLevel(pass, texture, i, i + 1, w, h, srgb);
|
|
1087
|
+
}
|
|
1088
|
+
pass.end();
|
|
1089
|
+
}
|
|
1090
|
+
#generateMipLevel(pass, texture, srcLevel, dstLevel, width, height, srgb) {
|
|
1091
|
+
const bindGroup = this.#device.createBindGroup({
|
|
1092
|
+
layout: this.#mipmapPipeline.getBindGroupLayout(0),
|
|
1093
|
+
entries: [
|
|
1094
|
+
{
|
|
1095
|
+
binding: 0,
|
|
1096
|
+
resource: texture.createView({
|
|
1097
|
+
baseMipLevel: srcLevel,
|
|
1098
|
+
mipLevelCount: 1,
|
|
1099
|
+
format: srgb ? "rgba8unorm-srgb" : "rgba8unorm",
|
|
1100
|
+
usage: GPUTextureUsage.TEXTURE_BINDING
|
|
1101
|
+
})
|
|
1102
|
+
},
|
|
1103
|
+
{
|
|
1104
|
+
binding: 1,
|
|
1105
|
+
resource: texture.createView({
|
|
1106
|
+
baseMipLevel: dstLevel,
|
|
1107
|
+
mipLevelCount: 1,
|
|
1108
|
+
dimension: "2d",
|
|
1109
|
+
format: "rgba8unorm",
|
|
1110
|
+
usage: GPUTextureUsage.STORAGE_BINDING
|
|
1111
|
+
})
|
|
1112
|
+
},
|
|
1113
|
+
{
|
|
1114
|
+
binding: 2,
|
|
1115
|
+
resource: this.#defaultSampler
|
|
1116
|
+
},
|
|
1117
|
+
{
|
|
1118
|
+
binding: 3,
|
|
1119
|
+
resource: { buffer: srgb ? this.#srgbUniform : this.#noSrgbUniform }
|
|
1120
|
+
}
|
|
1121
|
+
]
|
|
1122
|
+
});
|
|
1123
|
+
pass.setBindGroup(0, bindGroup);
|
|
1124
|
+
pass.dispatchWorkgroups(Math.ceil(width / 8), Math.ceil(height / 8));
|
|
1125
|
+
}
|
|
1126
|
+
}
|
|
1127
|
+
export {
|
|
1128
|
+
Spark
|
|
1129
|
+
};
|