@plasius/gpu-worker 0.1.0 → 0.1.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +47 -1
- package/README.md +86 -9
- package/dist/index.cjs +493 -8
- package/dist/index.cjs.map +1 -1
- package/dist/index.js +481 -9
- package/dist/index.js.map +1 -1
- package/dist/worker.wgsl +23 -120
- package/package.json +3 -2
- package/src/index.js +533 -8
- package/src/worker.wgsl +23 -120
package/src/index.js
CHANGED
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import { loadQueueWgsl } from "@plasius/gpu-lock-free-queue";
|
|
1
|
+
import { loadQueueWgsl as loadQueueWgslRaw } from "@plasius/gpu-lock-free-queue";
|
|
2
2
|
|
|
3
3
|
export const workerWgslUrl = (() => {
|
|
4
4
|
if (typeof __IMPORT_META_URL__ !== "undefined") {
|
|
@@ -15,13 +15,538 @@ export const workerWgslUrl = (() => {
|
|
|
15
15
|
return new URL("./worker.wgsl", base);
|
|
16
16
|
})();
|
|
17
17
|
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
18
|
+
const jobRegistry = [];
|
|
19
|
+
let nextJobType = 0;
|
|
20
|
+
|
|
21
|
+
async function loadWgslSource(options = {}) {
|
|
22
|
+
const { wgsl, url, fetcher = globalThis.fetch, baseUrl } = options ?? {};
|
|
23
|
+
if (typeof wgsl === "string") {
|
|
24
|
+
assertNotHtmlWgsl(wgsl, "inline WGSL");
|
|
25
|
+
return wgsl;
|
|
26
|
+
}
|
|
27
|
+
if (!url) {
|
|
28
|
+
return null;
|
|
29
|
+
}
|
|
30
|
+
const resolved = url instanceof URL ? url : new URL(url, baseUrl);
|
|
31
|
+
if (!fetcher) {
|
|
32
|
+
if (resolved.protocol !== "file:") {
|
|
33
|
+
throw new Error("No fetcher available for non-file WGSL URL.");
|
|
34
|
+
}
|
|
35
|
+
const { readFile } = await import("fs/promises");
|
|
36
|
+
const { fileURLToPath } = await import("url");
|
|
37
|
+
const source = await readFile(fileURLToPath(resolved), "utf8");
|
|
38
|
+
assertNotHtmlWgsl(source, resolved.href);
|
|
39
|
+
return source;
|
|
40
|
+
}
|
|
41
|
+
const response = await fetcher(resolved);
|
|
42
|
+
if (!response.ok) {
|
|
43
|
+
const status = "status" in response ? response.status : "unknown";
|
|
44
|
+
const statusText = "statusText" in response ? response.statusText : "";
|
|
45
|
+
const detail = statusText ? `${status} ${statusText}` : `${status}`;
|
|
46
|
+
throw new Error(`Failed to load WGSL (${detail})`);
|
|
47
|
+
}
|
|
48
|
+
const source = await response.text();
|
|
49
|
+
assertNotHtmlWgsl(source, resolved.href);
|
|
50
|
+
return source;
|
|
51
|
+
}
|
|
52
|
+
|
|
53
|
+
function stripComments(source) {
|
|
54
|
+
return source
|
|
55
|
+
.replace(/\/\*[\s\S]*?\*\//g, "")
|
|
56
|
+
.replace(/\/\/.*$/gm, "");
|
|
57
|
+
}
|
|
58
|
+
|
|
59
|
+
function tokenize(source) {
|
|
60
|
+
return source.match(/[A-Za-z_][A-Za-z0-9_]*|[{}();<>,:=]/g) ?? [];
|
|
21
61
|
}
|
|
22
62
|
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
63
|
+
function isIdentifier(token) {
|
|
64
|
+
return /^[A-Za-z_][A-Za-z0-9_]*$/.test(token);
|
|
65
|
+
}
|
|
66
|
+
|
|
67
|
+
function readNameAfterType(tokens, startIndex) {
|
|
68
|
+
let i = startIndex;
|
|
69
|
+
if (tokens[i] === "<") {
|
|
70
|
+
let depth = 1;
|
|
71
|
+
i += 1;
|
|
72
|
+
while (i < tokens.length && depth > 0) {
|
|
73
|
+
if (tokens[i] === "<") {
|
|
74
|
+
depth += 1;
|
|
75
|
+
} else if (tokens[i] === ">") {
|
|
76
|
+
depth -= 1;
|
|
77
|
+
}
|
|
78
|
+
i += 1;
|
|
79
|
+
}
|
|
80
|
+
}
|
|
81
|
+
return tokens[i];
|
|
82
|
+
}
|
|
83
|
+
|
|
84
|
+
function scanModuleNames(source) {
|
|
85
|
+
const cleaned = stripComments(source);
|
|
86
|
+
const tokens = tokenize(cleaned);
|
|
87
|
+
const names = [];
|
|
88
|
+
let depth = 0;
|
|
89
|
+
for (let i = 0; i < tokens.length; i += 1) {
|
|
90
|
+
const token = tokens[i];
|
|
91
|
+
if (token === "{") {
|
|
92
|
+
depth += 1;
|
|
93
|
+
continue;
|
|
94
|
+
}
|
|
95
|
+
if (token === "}") {
|
|
96
|
+
depth = Math.max(0, depth - 1);
|
|
97
|
+
continue;
|
|
98
|
+
}
|
|
99
|
+
if (depth !== 0) {
|
|
100
|
+
continue;
|
|
101
|
+
}
|
|
102
|
+
if (token === "fn") {
|
|
103
|
+
const name = tokens[i + 1];
|
|
104
|
+
if (isIdentifier(name)) {
|
|
105
|
+
names.push({ kind: "fn", name });
|
|
106
|
+
}
|
|
107
|
+
continue;
|
|
108
|
+
}
|
|
109
|
+
if (token === "struct") {
|
|
110
|
+
const name = tokens[i + 1];
|
|
111
|
+
if (isIdentifier(name)) {
|
|
112
|
+
names.push({ kind: "struct", name });
|
|
113
|
+
}
|
|
114
|
+
continue;
|
|
115
|
+
}
|
|
116
|
+
if (token === "alias") {
|
|
117
|
+
const name = tokens[i + 1];
|
|
118
|
+
if (isIdentifier(name)) {
|
|
119
|
+
names.push({ kind: "alias", name });
|
|
120
|
+
}
|
|
121
|
+
continue;
|
|
122
|
+
}
|
|
123
|
+
if (token === "var" || token === "let" || token === "const" || token === "override") {
|
|
124
|
+
const name = readNameAfterType(tokens, i + 1);
|
|
125
|
+
if (isIdentifier(name)) {
|
|
126
|
+
names.push({ kind: token, name });
|
|
127
|
+
}
|
|
128
|
+
}
|
|
129
|
+
}
|
|
130
|
+
return names;
|
|
131
|
+
}
|
|
132
|
+
|
|
133
|
+
function buildNameIndex(modules) {
|
|
134
|
+
const index = new Map();
|
|
135
|
+
for (const module of modules) {
|
|
136
|
+
for (const item of scanModuleNames(module.source)) {
|
|
137
|
+
const bucket = index.get(item.name) ?? [];
|
|
138
|
+
bucket.push({ kind: item.kind, module: module.name });
|
|
139
|
+
index.set(item.name, bucket);
|
|
140
|
+
}
|
|
141
|
+
}
|
|
142
|
+
return index;
|
|
143
|
+
}
|
|
144
|
+
|
|
145
|
+
function assertNoNameClashes(modules) {
|
|
146
|
+
const index = buildNameIndex(modules);
|
|
147
|
+
const clashes = [];
|
|
148
|
+
for (const [name, entries] of index.entries()) {
|
|
149
|
+
if (entries.length > 1) {
|
|
150
|
+
clashes.push({ name, entries });
|
|
151
|
+
}
|
|
152
|
+
}
|
|
153
|
+
if (clashes.length === 0) {
|
|
154
|
+
return;
|
|
155
|
+
}
|
|
156
|
+
const lines = ["WGSL debug: identifier clashes detected:"];
|
|
157
|
+
for (const clash of clashes) {
|
|
158
|
+
const locations = clash.entries
|
|
159
|
+
.map((entry) => `${entry.module} (${entry.kind})`)
|
|
160
|
+
.join(", ");
|
|
161
|
+
lines.push(`- ${clash.name}: ${locations}`);
|
|
162
|
+
}
|
|
163
|
+
throw new Error(lines.join("\n"));
|
|
164
|
+
}
|
|
165
|
+
|
|
166
|
+
function assertNotHtmlWgsl(source, context) {
|
|
167
|
+
const sample = source.slice(0, 200).toLowerCase();
|
|
168
|
+
if (
|
|
169
|
+
sample.includes("<!doctype") ||
|
|
170
|
+
sample.includes("<html") ||
|
|
171
|
+
sample.includes("<meta")
|
|
172
|
+
) {
|
|
173
|
+
const label = context ? ` for ${context}` : "";
|
|
174
|
+
throw new Error(
|
|
175
|
+
`Expected WGSL${label} but received HTML. Check the URL or server root.`
|
|
176
|
+
);
|
|
177
|
+
}
|
|
178
|
+
}
|
|
179
|
+
|
|
180
|
+
function renameProcessJob(source, name) {
|
|
181
|
+
return source.replace(/\bprocess_job\b/g, name);
|
|
182
|
+
}
|
|
183
|
+
|
|
184
|
+
function getQueueCompatMap(source) {
|
|
185
|
+
if (!/\bJobMeta\b/.test(source)) {
|
|
186
|
+
return null;
|
|
187
|
+
}
|
|
188
|
+
return [{ from: /\bJobMeta\b/g, to: "JobDesc" }];
|
|
189
|
+
}
|
|
190
|
+
|
|
191
|
+
function applyCompatMap(source, map) {
|
|
192
|
+
if (!map || map.length === 0) {
|
|
193
|
+
return source;
|
|
194
|
+
}
|
|
195
|
+
let next = source;
|
|
196
|
+
for (const entry of map) {
|
|
197
|
+
next = next.replace(entry.from, entry.to);
|
|
198
|
+
}
|
|
199
|
+
return next;
|
|
200
|
+
}
|
|
201
|
+
|
|
202
|
+
function normalizeJobs(jobs) {
|
|
203
|
+
const normalized = jobs.map((job, index) => {
|
|
204
|
+
if (typeof job === "string") {
|
|
205
|
+
return {
|
|
206
|
+
jobType: index,
|
|
207
|
+
wgsl: job,
|
|
208
|
+
label: `job_${index}`,
|
|
209
|
+
sourceName: `job-${index}`,
|
|
210
|
+
};
|
|
211
|
+
}
|
|
212
|
+
if (!job || typeof job.wgsl !== "string") {
|
|
213
|
+
throw new Error("Job entries must provide WGSL source strings.");
|
|
214
|
+
}
|
|
215
|
+
const jobType = job.jobType ?? index;
|
|
216
|
+
const label = job.label ?? `job_${jobType}`;
|
|
217
|
+
return {
|
|
218
|
+
jobType,
|
|
219
|
+
wgsl: job.wgsl,
|
|
220
|
+
label,
|
|
221
|
+
sourceName: job.sourceName ?? job.label ?? `job-${jobType}`,
|
|
222
|
+
};
|
|
223
|
+
});
|
|
224
|
+
const seen = new Set();
|
|
225
|
+
for (const job of normalized) {
|
|
226
|
+
if (seen.has(job.jobType)) {
|
|
227
|
+
throw new Error(`Duplicate job_type detected: ${job.jobType}`);
|
|
228
|
+
}
|
|
229
|
+
seen.add(job.jobType);
|
|
230
|
+
}
|
|
231
|
+
return normalized;
|
|
232
|
+
}
|
|
233
|
+
|
|
234
|
+
function buildProcessJobDispatch(jobs) {
|
|
235
|
+
const lines = [
|
|
236
|
+
"fn process_job(job_index: u32, job_type: u32, payload_words: u32) {",
|
|
237
|
+
];
|
|
238
|
+
if (jobs.length === 0) {
|
|
239
|
+
lines.push(" return;");
|
|
240
|
+
lines.push("}");
|
|
241
|
+
return lines.join("\n");
|
|
242
|
+
}
|
|
243
|
+
jobs.forEach((job, idx) => {
|
|
244
|
+
const clause = idx === 0 ? "if" : "else if";
|
|
245
|
+
lines.push(` ${clause} (job_type == ${job.jobType}u) {`);
|
|
246
|
+
lines.push(
|
|
247
|
+
` ${job.entryName}(job_index, job_type, payload_words);`
|
|
248
|
+
);
|
|
249
|
+
lines.push(" }");
|
|
250
|
+
});
|
|
251
|
+
lines.push("}");
|
|
252
|
+
return lines.join("\n");
|
|
253
|
+
}
|
|
254
|
+
|
|
255
|
+
export async function loadWorkerWgsl(options = {}) {
|
|
256
|
+
const { url = workerWgslUrl, fetcher } = options ?? {};
|
|
257
|
+
const source = await loadWgslSource({
|
|
258
|
+
url,
|
|
259
|
+
fetcher,
|
|
260
|
+
baseUrl: workerWgslUrl,
|
|
261
|
+
});
|
|
262
|
+
if (typeof source !== "string") {
|
|
263
|
+
throw new Error("Failed to load worker WGSL source.");
|
|
264
|
+
}
|
|
265
|
+
return source;
|
|
266
|
+
}
|
|
267
|
+
|
|
268
|
+
export async function loadQueueWgsl(options = {}) {
|
|
269
|
+
const { queueCompat = true, ...rest } = options ?? {};
|
|
270
|
+
const source = await loadQueueWgslRaw(rest);
|
|
271
|
+
if (typeof source !== "string") {
|
|
272
|
+
throw new Error("Failed to load queue WGSL source.");
|
|
273
|
+
}
|
|
274
|
+
assertNotHtmlWgsl(source, rest?.url ? String(rest.url) : "queue WGSL");
|
|
275
|
+
if (!queueCompat) {
|
|
276
|
+
return source;
|
|
277
|
+
}
|
|
278
|
+
const compatMap = getQueueCompatMap(source);
|
|
279
|
+
return applyCompatMap(source, compatMap);
|
|
280
|
+
}
|
|
281
|
+
|
|
282
|
+
export async function loadJobWgsl(options = {}) {
|
|
283
|
+
const { wgsl, url, fetcher, label } = options ?? {};
|
|
284
|
+
const source = await loadWgslSource({
|
|
285
|
+
wgsl,
|
|
286
|
+
url,
|
|
287
|
+
fetcher,
|
|
288
|
+
baseUrl: workerWgslUrl,
|
|
289
|
+
});
|
|
290
|
+
if (typeof source !== "string") {
|
|
291
|
+
throw new Error("loadJobWgsl requires a WGSL string or URL.");
|
|
292
|
+
}
|
|
293
|
+
const jobType = nextJobType;
|
|
294
|
+
nextJobType += 1;
|
|
295
|
+
jobRegistry.push({
|
|
296
|
+
jobType,
|
|
297
|
+
wgsl: source,
|
|
298
|
+
label: label ?? `job_${jobType}`,
|
|
299
|
+
sourceName: label ?? `job-${jobType}`,
|
|
300
|
+
});
|
|
301
|
+
return jobType;
|
|
302
|
+
}
|
|
303
|
+
|
|
304
|
+
export async function assembleWorkerWgsl(workerWgsl, options = {}) {
|
|
305
|
+
const {
|
|
306
|
+
queueWgsl,
|
|
307
|
+
queueUrl,
|
|
308
|
+
preludeWgsl,
|
|
309
|
+
preludeUrl,
|
|
310
|
+
fetcher,
|
|
311
|
+
jobs,
|
|
312
|
+
debug,
|
|
313
|
+
queueCompat = true,
|
|
314
|
+
} = options ?? {};
|
|
315
|
+
const rawQueueSource =
|
|
316
|
+
queueWgsl ?? (await loadQueueWgslRaw({ url: queueUrl, fetcher }));
|
|
317
|
+
const bodyRaw = workerWgsl ?? (await loadWorkerWgsl({ fetcher }));
|
|
318
|
+
const compatMap = queueCompat ? getQueueCompatMap(rawQueueSource) : null;
|
|
319
|
+
const queueSource = applyCompatMap(rawQueueSource, compatMap);
|
|
320
|
+
const preludeRaw =
|
|
321
|
+
preludeWgsl ??
|
|
322
|
+
(preludeUrl
|
|
323
|
+
? await loadWgslSource({ url: preludeUrl, fetcher, baseUrl: workerWgslUrl })
|
|
324
|
+
: "");
|
|
325
|
+
if ((preludeWgsl || preludeUrl) && typeof preludeRaw !== "string") {
|
|
326
|
+
throw new Error("Failed to load prelude WGSL source.");
|
|
327
|
+
}
|
|
328
|
+
const preludeSource =
|
|
329
|
+
typeof preludeRaw === "string" && preludeRaw.length > 0
|
|
330
|
+
? applyCompatMap(preludeRaw, compatMap)
|
|
331
|
+
: "";
|
|
332
|
+
const body = applyCompatMap(bodyRaw, compatMap);
|
|
333
|
+
const jobList = normalizeJobs(
|
|
334
|
+
typeof jobs === "undefined" ? jobRegistry : jobs
|
|
335
|
+
);
|
|
336
|
+
if (!jobList || jobList.length === 0) {
|
|
337
|
+
return `${queueSource}\n\n${body}`;
|
|
338
|
+
}
|
|
339
|
+
const rewrittenJobs = jobList.map((job) => {
|
|
340
|
+
const source = applyCompatMap(job.wgsl, compatMap);
|
|
341
|
+
const hasProcessJob = /\bfn\s+process_job\b/.test(source);
|
|
342
|
+
if (!hasProcessJob) {
|
|
343
|
+
throw new Error(
|
|
344
|
+
`Job ${job.sourceName} is missing a process_job() entry function.`
|
|
345
|
+
);
|
|
346
|
+
}
|
|
347
|
+
const entryName = `process_job__${job.jobType}`;
|
|
348
|
+
const renamed = renameProcessJob(source, entryName);
|
|
349
|
+
return { ...job, entryName, wgsl: renamed };
|
|
350
|
+
});
|
|
351
|
+
const dispatch = buildProcessJobDispatch(rewrittenJobs);
|
|
352
|
+
const modulesForDebug = debug
|
|
353
|
+
? [
|
|
354
|
+
{ name: "queue.wgsl", source: queueSource },
|
|
355
|
+
...(preludeSource
|
|
356
|
+
? [{ name: "jobs.prelude.wgsl", source: preludeSource }]
|
|
357
|
+
: []),
|
|
358
|
+
...rewrittenJobs.map((job) => ({
|
|
359
|
+
name: job.sourceName,
|
|
360
|
+
source: job.wgsl,
|
|
361
|
+
})),
|
|
362
|
+
{ name: "jobs.dispatch.wgsl", source: dispatch },
|
|
363
|
+
{ name: "worker.wgsl", source: body },
|
|
364
|
+
]
|
|
365
|
+
: null;
|
|
366
|
+
if (modulesForDebug) {
|
|
367
|
+
assertNoNameClashes(modulesForDebug);
|
|
368
|
+
}
|
|
369
|
+
const jobBlocks = rewrittenJobs
|
|
370
|
+
.map((job) => `// Job ${job.jobType}: ${job.label}\n${job.wgsl}`)
|
|
371
|
+
.join("\n\n");
|
|
372
|
+
const preludeBlock = preludeSource ? `${preludeSource}\n\n` : "";
|
|
373
|
+
return `${queueSource}\n\n${preludeBlock}${jobBlocks}\n\n${dispatch}\n\n${body}`;
|
|
374
|
+
}
|
|
375
|
+
|
|
376
|
+
function normalizeWorkgroups(value, label) {
|
|
377
|
+
if (typeof value === "number") {
|
|
378
|
+
return [value, 1, 1];
|
|
379
|
+
}
|
|
380
|
+
if (Array.isArray(value)) {
|
|
381
|
+
const [x = 0, y = 1, z = 1] = value;
|
|
382
|
+
return [x, y, z];
|
|
383
|
+
}
|
|
384
|
+
throw new Error(`Invalid workgroup count for ${label}.`);
|
|
385
|
+
}
|
|
386
|
+
|
|
387
|
+
function resolveWorkgroups(value, label) {
|
|
388
|
+
if (typeof value === "function") {
|
|
389
|
+
return normalizeWorkgroups(value(), label);
|
|
390
|
+
}
|
|
391
|
+
if (value == null) {
|
|
392
|
+
return null;
|
|
393
|
+
}
|
|
394
|
+
return normalizeWorkgroups(value, label);
|
|
395
|
+
}
|
|
396
|
+
|
|
397
|
+
function setBindGroups(pass, bindGroups) {
|
|
398
|
+
if (!bindGroups) {
|
|
399
|
+
return;
|
|
400
|
+
}
|
|
401
|
+
bindGroups.forEach((group, index) => {
|
|
402
|
+
if (group) {
|
|
403
|
+
pass.setBindGroup(index, group);
|
|
404
|
+
}
|
|
405
|
+
});
|
|
406
|
+
}
|
|
407
|
+
|
|
408
|
+
function computeWorkerWorkgroups(maxJobs, workgroupSize) {
|
|
409
|
+
const jobs =
|
|
410
|
+
typeof maxJobs === "function" ? Number(maxJobs()) : Number(maxJobs);
|
|
411
|
+
if (!Number.isFinite(jobs) || jobs <= 0) {
|
|
412
|
+
throw new Error("maxJobsPerDispatch must be a positive number.");
|
|
413
|
+
}
|
|
414
|
+
const size = Number(workgroupSize);
|
|
415
|
+
if (!Number.isFinite(size) || size <= 0) {
|
|
416
|
+
throw new Error("workgroupSize must be a positive number.");
|
|
417
|
+
}
|
|
418
|
+
return Math.max(1, Math.ceil(jobs / size));
|
|
419
|
+
}
|
|
420
|
+
|
|
421
|
+
export function createWorkerLoop(options = {}) {
|
|
422
|
+
const {
|
|
423
|
+
device,
|
|
424
|
+
worker,
|
|
425
|
+
jobs = [],
|
|
426
|
+
workgroupSize = 64,
|
|
427
|
+
maxJobsPerDispatch,
|
|
428
|
+
rateHz,
|
|
429
|
+
label,
|
|
430
|
+
onTick,
|
|
431
|
+
onError,
|
|
432
|
+
} = options ?? {};
|
|
433
|
+
|
|
434
|
+
if (!device) {
|
|
435
|
+
throw new Error("createWorkerLoop requires a GPUDevice.");
|
|
436
|
+
}
|
|
437
|
+
if (!worker || !worker.pipeline) {
|
|
438
|
+
throw new Error("createWorkerLoop requires a worker pipeline.");
|
|
439
|
+
}
|
|
440
|
+
|
|
441
|
+
let running = false;
|
|
442
|
+
let handle = null;
|
|
443
|
+
let usingRaf = false;
|
|
444
|
+
const intervalMs =
|
|
445
|
+
Number.isFinite(rateHz) && rateHz > 0 ? 1000 / rateHz : null;
|
|
446
|
+
|
|
447
|
+
const tick = () => {
|
|
448
|
+
try {
|
|
449
|
+
const encoder = device.createCommandEncoder();
|
|
450
|
+
const pass = encoder.beginComputePass(
|
|
451
|
+
label ? { label } : undefined
|
|
452
|
+
);
|
|
453
|
+
|
|
454
|
+
pass.setPipeline(worker.pipeline);
|
|
455
|
+
setBindGroups(pass, worker.bindGroups);
|
|
456
|
+
|
|
457
|
+
const explicitWorkerGroups =
|
|
458
|
+
resolveWorkgroups(worker.workgroups, "worker") ??
|
|
459
|
+
resolveWorkgroups(worker.workgroupCount, "worker") ??
|
|
460
|
+
resolveWorkgroups(worker.dispatch, "worker");
|
|
461
|
+
|
|
462
|
+
const workerGroups = explicitWorkerGroups
|
|
463
|
+
? explicitWorkerGroups
|
|
464
|
+
: [computeWorkerWorkgroups(maxJobsPerDispatch, workgroupSize), 1, 1];
|
|
465
|
+
|
|
466
|
+
if (workerGroups[0] > 0) {
|
|
467
|
+
pass.dispatchWorkgroups(...workerGroups);
|
|
468
|
+
}
|
|
469
|
+
|
|
470
|
+
jobs.forEach((job, index) => {
|
|
471
|
+
if (!job || !job.pipeline) {
|
|
472
|
+
throw new Error(`Job pipeline missing at index ${index}.`);
|
|
473
|
+
}
|
|
474
|
+
pass.setPipeline(job.pipeline);
|
|
475
|
+
setBindGroups(pass, job.bindGroups);
|
|
476
|
+
const groups = resolveWorkgroups(
|
|
477
|
+
job.workgroups ?? job.workgroupCount ?? job.dispatch,
|
|
478
|
+
`job ${index}`
|
|
479
|
+
);
|
|
480
|
+
if (!groups) {
|
|
481
|
+
throw new Error(`Job ${index} requires a workgroup count.`);
|
|
482
|
+
}
|
|
483
|
+
if (groups[0] > 0) {
|
|
484
|
+
pass.dispatchWorkgroups(...groups);
|
|
485
|
+
}
|
|
486
|
+
});
|
|
487
|
+
|
|
488
|
+
pass.end();
|
|
489
|
+
device.queue.submit([encoder.finish()]);
|
|
490
|
+
|
|
491
|
+
if (onTick) {
|
|
492
|
+
onTick();
|
|
493
|
+
}
|
|
494
|
+
} catch (err) {
|
|
495
|
+
if (onError) {
|
|
496
|
+
onError(err);
|
|
497
|
+
return;
|
|
498
|
+
}
|
|
499
|
+
throw err;
|
|
500
|
+
}
|
|
501
|
+
};
|
|
502
|
+
|
|
503
|
+
const scheduleNext = () => {
|
|
504
|
+
if (!running) {
|
|
505
|
+
return;
|
|
506
|
+
}
|
|
507
|
+
if (intervalMs != null) {
|
|
508
|
+
tick();
|
|
509
|
+
usingRaf = false;
|
|
510
|
+
handle = setTimeout(scheduleNext, intervalMs);
|
|
511
|
+
return;
|
|
512
|
+
}
|
|
513
|
+
tick();
|
|
514
|
+
if (typeof requestAnimationFrame === "function") {
|
|
515
|
+
usingRaf = true;
|
|
516
|
+
handle = requestAnimationFrame(scheduleNext);
|
|
517
|
+
} else {
|
|
518
|
+
usingRaf = false;
|
|
519
|
+
handle = setTimeout(scheduleNext, 0);
|
|
520
|
+
}
|
|
521
|
+
};
|
|
522
|
+
|
|
523
|
+
const start = () => {
|
|
524
|
+
if (running) {
|
|
525
|
+
return;
|
|
526
|
+
}
|
|
527
|
+
running = true;
|
|
528
|
+
scheduleNext();
|
|
529
|
+
};
|
|
530
|
+
|
|
531
|
+
const stop = () => {
|
|
532
|
+
running = false;
|
|
533
|
+
if (handle == null) {
|
|
534
|
+
return;
|
|
535
|
+
}
|
|
536
|
+
if (usingRaf && typeof cancelAnimationFrame === "function") {
|
|
537
|
+
cancelAnimationFrame(handle);
|
|
538
|
+
} else {
|
|
539
|
+
clearTimeout(handle);
|
|
540
|
+
}
|
|
541
|
+
handle = null;
|
|
542
|
+
};
|
|
543
|
+
|
|
544
|
+
return {
|
|
545
|
+
start,
|
|
546
|
+
stop,
|
|
547
|
+
tick,
|
|
548
|
+
get running() {
|
|
549
|
+
return running;
|
|
550
|
+
},
|
|
551
|
+
};
|
|
27
552
|
}
|
package/src/worker.wgsl
CHANGED
|
@@ -1,134 +1,37 @@
|
|
|
1
|
-
|
|
2
|
-
|
|
3
|
-
|
|
4
|
-
|
|
5
|
-
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
};
|
|
13
|
-
|
|
14
|
-
@group(1) @binding(0) var<storage, read_write> framebuffer: array<u32>;
|
|
15
|
-
@group(1) @binding(1) var<uniform> render: RenderParams;
|
|
16
|
-
|
|
17
|
-
fn hit_sphere(center: vec3<f32>, radius: f32, origin: vec3<f32>, dir: vec3<f32>) -> f32 {
|
|
18
|
-
let oc = origin - center;
|
|
19
|
-
let a = dot(dir, dir);
|
|
20
|
-
let b = 2.0 * dot(oc, dir);
|
|
21
|
-
let c = dot(oc, oc) - radius * radius;
|
|
22
|
-
let disc = b * b - 4.0 * a * c;
|
|
23
|
-
if (disc < 0.0) {
|
|
24
|
-
return -1.0;
|
|
25
|
-
}
|
|
26
|
-
let sq = sqrt(disc);
|
|
27
|
-
let t0 = (-b - sq) / (2.0 * a);
|
|
28
|
-
if (t0 > 0.001) {
|
|
29
|
-
return t0;
|
|
1
|
+
// Minimal GPU worker entry point.
|
|
2
|
+
//
|
|
3
|
+
// This file is intended to be concatenated with the lock-free queue WGSL
|
|
4
|
+
// via assembleWorkerWgsl(). It only handles dequeue and dispatches to a
|
|
5
|
+
// user hook. Replace this file (or provide your own WGSL) to implement
|
|
6
|
+
// real workloads.
|
|
7
|
+
|
|
8
|
+
fn payload_word(job_index: u32, word_index: u32) -> u32 {
|
|
9
|
+
let stride = params.output_stride;
|
|
10
|
+
if (stride == 0u || word_index >= stride) {
|
|
11
|
+
return 0u;
|
|
30
12
|
}
|
|
31
|
-
let
|
|
32
|
-
|
|
33
|
-
return t1;
|
|
34
|
-
}
|
|
35
|
-
return -1.0;
|
|
13
|
+
let base = job_index * stride;
|
|
14
|
+
return output_payloads[base + word_index];
|
|
36
15
|
}
|
|
37
16
|
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
return mix(vec3<f32>(0.04, 0.05, 0.08), vec3<f32>(0.65, 0.78, 0.92), t);
|
|
41
|
-
}
|
|
42
|
-
|
|
43
|
-
fn shade_point(pos: vec3<f32>, normal: vec3<f32>, base: vec3<f32>) -> vec3<f32> {
|
|
44
|
-
let light_dir = normalize(vec3<f32>(0.6 + 0.2 * sin(render.time), 0.9, 0.3));
|
|
45
|
-
let diff = max(dot(normal, light_dir), 0.0);
|
|
46
|
-
let ambient = 0.2;
|
|
47
|
-
return base * (ambient + diff * 0.8);
|
|
48
|
-
}
|
|
49
|
-
|
|
50
|
-
fn pack_color(color: vec3<f32>) -> u32 {
|
|
51
|
-
let c = clamp(color, vec3<f32>(0.0), vec3<f32>(1.0));
|
|
52
|
-
let r = u32(round(c.x * 255.0));
|
|
53
|
-
let g = u32(round(c.y * 255.0));
|
|
54
|
-
let b = u32(round(c.z * 255.0));
|
|
55
|
-
return (255u << 24) | (b << 16) | (g << 8) | r;
|
|
56
|
-
}
|
|
17
|
+
// process_job(job_index, job_type, payload_words) must be defined by the
|
|
18
|
+
// job WGSL that you concatenate before this file.
|
|
57
19
|
|
|
58
20
|
@compute @workgroup_size(64)
|
|
59
|
-
fn
|
|
21
|
+
fn worker_main(@builtin(global_invocation_id) gid: vec3<u32>) {
|
|
60
22
|
let idx = gid.x;
|
|
61
|
-
|
|
23
|
+
let job_count = dequeue_job_count();
|
|
24
|
+
if (idx >= job_count) {
|
|
25
|
+
return;
|
|
26
|
+
}
|
|
27
|
+
if (!queue_config_valid()) {
|
|
62
28
|
return;
|
|
63
29
|
}
|
|
64
|
-
|
|
65
30
|
let ok = dequeue(idx);
|
|
66
31
|
if (ok == 0u) {
|
|
67
32
|
return;
|
|
68
33
|
}
|
|
69
34
|
|
|
70
|
-
let
|
|
71
|
-
|
|
72
|
-
let tile_x = job % tiles_x;
|
|
73
|
-
let tile_y = job / tiles_x;
|
|
74
|
-
let start_x = tile_x * render.tile_size;
|
|
75
|
-
let start_y = tile_y * render.tile_size;
|
|
76
|
-
|
|
77
|
-
let forward = normalize(render.camera_target.xyz - render.camera_pos.xyz);
|
|
78
|
-
let right = normalize(cross(forward, vec3<f32>(0.0, 1.0, 0.0)));
|
|
79
|
-
let up = normalize(cross(right, forward));
|
|
80
|
-
let tan_half = tan(0.5 * render.fov_y);
|
|
81
|
-
|
|
82
|
-
for (var y: u32 = 0u; y < render.tile_size; y = y + 1u) {
|
|
83
|
-
let py = start_y + y;
|
|
84
|
-
if (py >= render.height) {
|
|
85
|
-
continue;
|
|
86
|
-
}
|
|
87
|
-
for (var x: u32 = 0u; x < render.tile_size; x = x + 1u) {
|
|
88
|
-
let px = start_x + x;
|
|
89
|
-
if (px >= render.width) {
|
|
90
|
-
continue;
|
|
91
|
-
}
|
|
92
|
-
|
|
93
|
-
let u = (f32(px) + 0.5) / f32(render.width);
|
|
94
|
-
let v = (f32(py) + 0.5) / f32(render.height);
|
|
95
|
-
let ndc = vec2<f32>(u * 2.0 - 1.0, 1.0 - v * 2.0);
|
|
96
|
-
let dir = normalize(forward + ndc.x * render.aspect * tan_half * right + ndc.y * tan_half * up);
|
|
97
|
-
|
|
98
|
-
let origin = render.camera_pos.xyz;
|
|
99
|
-
var color = background(dir);
|
|
100
|
-
|
|
101
|
-
let t1 = hit_sphere(vec3<f32>(0.0, 1.0, 0.0), 1.0, origin, dir);
|
|
102
|
-
let t2 = hit_sphere(vec3<f32>(-1.6, 0.6, -0.5), 0.6, origin, dir);
|
|
103
|
-
let t3 = hit_sphere(vec3<f32>(0.0, -1000.0, 0.0), 999.0, origin, dir);
|
|
104
|
-
|
|
105
|
-
var t_hit = -1.0;
|
|
106
|
-
var base = vec3<f32>(0.0);
|
|
107
|
-
var center = vec3<f32>(0.0);
|
|
108
|
-
if (t1 > 0.0) {
|
|
109
|
-
t_hit = t1;
|
|
110
|
-
base = vec3<f32>(0.86, 0.42, 0.32);
|
|
111
|
-
center = vec3<f32>(0.0, 1.0, 0.0);
|
|
112
|
-
}
|
|
113
|
-
if (t2 > 0.0 && (t_hit < 0.0 || t2 < t_hit)) {
|
|
114
|
-
t_hit = t2;
|
|
115
|
-
base = vec3<f32>(0.2, 0.7, 0.9);
|
|
116
|
-
center = vec3<f32>(-1.6, 0.6, -0.5);
|
|
117
|
-
}
|
|
118
|
-
if (t3 > 0.0 && (t_hit < 0.0 || t3 < t_hit)) {
|
|
119
|
-
t_hit = t3;
|
|
120
|
-
base = vec3<f32>(0.32, 0.3, 0.26);
|
|
121
|
-
center = vec3<f32>(0.0, -1000.0, 0.0);
|
|
122
|
-
}
|
|
123
|
-
|
|
124
|
-
if (t_hit > 0.0) {
|
|
125
|
-
let pos = origin + t_hit * dir;
|
|
126
|
-
let normal = normalize(pos - center);
|
|
127
|
-
color = shade_point(pos, normal, base);
|
|
128
|
-
}
|
|
129
|
-
|
|
130
|
-
let pixel_index = py * render.width + px;
|
|
131
|
-
framebuffer[pixel_index] = pack_color(color);
|
|
132
|
-
}
|
|
133
|
-
}
|
|
35
|
+
let job_info = output_jobs[idx];
|
|
36
|
+
process_job(idx, job_info.job_type, job_info.payload_words);
|
|
134
37
|
}
|