@quake2ts/test-utils 0.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +454 -0
- package/dist/index.cjs +5432 -0
- package/dist/index.cjs.map +1 -0
- package/dist/index.d.cts +2150 -0
- package/dist/index.d.ts +2150 -0
- package/dist/index.js +5165 -0
- package/dist/index.js.map +1 -0
- package/package.json +82 -0
- package/src/client/helpers/hud.ts +114 -0
- package/src/client/helpers/prediction.ts +136 -0
- package/src/client/helpers/view.ts +201 -0
- package/src/client/mocks/console.ts +75 -0
- package/src/client/mocks/download.ts +48 -0
- package/src/client/mocks/input.ts +246 -0
- package/src/client/mocks/network.ts +148 -0
- package/src/client/mocks/state.ts +148 -0
- package/src/e2e/network.ts +47 -0
- package/src/e2e/playwright.ts +90 -0
- package/src/e2e/visual.ts +172 -0
- package/src/engine/helpers/pipeline-test-template.ts +113 -0
- package/src/engine/helpers/webgpu-rendering.ts +251 -0
- package/src/engine/mocks/assets.ts +129 -0
- package/src/engine/mocks/audio.ts +152 -0
- package/src/engine/mocks/buffers.ts +88 -0
- package/src/engine/mocks/lighting.ts +64 -0
- package/src/engine/mocks/particles.ts +76 -0
- package/src/engine/mocks/renderer.ts +218 -0
- package/src/engine/mocks/webgl.ts +267 -0
- package/src/engine/mocks/webgpu.ts +262 -0
- package/src/engine/rendering.ts +103 -0
- package/src/game/factories.ts +204 -0
- package/src/game/helpers/physics.ts +171 -0
- package/src/game/helpers/save.ts +232 -0
- package/src/game/helpers.ts +310 -0
- package/src/game/mocks/ai.ts +67 -0
- package/src/game/mocks/combat.ts +61 -0
- package/src/game/mocks/items.ts +166 -0
- package/src/game/mocks.ts +105 -0
- package/src/index.ts +93 -0
- package/src/server/helpers/bandwidth.ts +127 -0
- package/src/server/helpers/multiplayer.ts +158 -0
- package/src/server/helpers/snapshot.ts +241 -0
- package/src/server/mockNetDriver.ts +106 -0
- package/src/server/mockTransport.ts +50 -0
- package/src/server/mocks/commands.ts +93 -0
- package/src/server/mocks/connection.ts +139 -0
- package/src/server/mocks/master.ts +97 -0
- package/src/server/mocks/physics.ts +32 -0
- package/src/server/mocks/state.ts +162 -0
- package/src/server/mocks/transport.ts +161 -0
- package/src/setup/audio.ts +118 -0
- package/src/setup/browser.ts +249 -0
- package/src/setup/canvas.ts +142 -0
- package/src/setup/node.ts +21 -0
- package/src/setup/storage.ts +60 -0
- package/src/setup/timing.ts +142 -0
- package/src/setup/webgl.ts +8 -0
- package/src/setup/webgpu.ts +113 -0
- package/src/shared/bsp.ts +145 -0
- package/src/shared/collision.ts +64 -0
- package/src/shared/factories.ts +88 -0
- package/src/shared/math.ts +65 -0
- package/src/shared/mocks.ts +243 -0
- package/src/shared/pak-loader.ts +45 -0
- package/src/visual/snapshots.ts +292 -0
|
@@ -0,0 +1,172 @@
|
|
|
1
|
+
import { Canvas, Image } from '@napi-rs/canvas';
|
|
2
|
+
import fs from 'fs/promises';
|
|
3
|
+
import path from 'path';
|
|
4
|
+
|
|
5
|
+
export interface VisualDiff {
|
|
6
|
+
diffPercentage: number;
|
|
7
|
+
diffImage?: Buffer;
|
|
8
|
+
}
|
|
9
|
+
|
|
10
|
+
/**
|
|
11
|
+
* Captures a screenshot of the game using Playwright.
|
|
12
|
+
* @param page Playwright Page object
|
|
13
|
+
* @param name Output filename (without extension)
|
|
14
|
+
*/
|
|
15
|
+
export async function captureGameScreenshot(page: any, name: string): Promise<Buffer> {
|
|
16
|
+
return await page.screenshot({ path: `${name}.png` });
|
|
17
|
+
}
|
|
18
|
+
|
|
19
|
+
/**
|
|
20
|
+
* Saves a canvas to a PNG file.
|
|
21
|
+
* Compatible with both JSDOM/HTMLCanvasElement and @napi-rs/canvas.
|
|
22
|
+
*/
|
|
23
|
+
export async function takeScreenshot(canvas: Canvas | HTMLCanvasElement, filepath: string): Promise<void> {
|
|
24
|
+
let buffer: Buffer;
|
|
25
|
+
|
|
26
|
+
// Handle @napi-rs/canvas Canvas object
|
|
27
|
+
if ('toBuffer' in canvas && typeof canvas.toBuffer === 'function') {
|
|
28
|
+
buffer = canvas.toBuffer('image/png');
|
|
29
|
+
}
|
|
30
|
+
// Handle JSDOM HTMLCanvasElement (if backed by node-canvas or similar)
|
|
31
|
+
else if ('toDataURL' in canvas) {
|
|
32
|
+
const dataUrl = canvas.toDataURL('image/png');
|
|
33
|
+
const base64 = dataUrl.replace(/^data:image\/png;base64,/, '');
|
|
34
|
+
buffer = Buffer.from(base64, 'base64');
|
|
35
|
+
} else {
|
|
36
|
+
throw new Error('Unsupported canvas type for screenshot');
|
|
37
|
+
}
|
|
38
|
+
|
|
39
|
+
await fs.mkdir(path.dirname(filepath), { recursive: true });
|
|
40
|
+
await fs.writeFile(filepath, buffer);
|
|
41
|
+
}
|
|
42
|
+
|
|
43
|
+
/**
|
|
44
|
+
* Compares a canvas state against a baseline image file.
|
|
45
|
+
* Returns true if they match (within optional threshold logic, currently strict pixel match).
|
|
46
|
+
* If baseline does not exist, it saves the current state as baseline and returns true.
|
|
47
|
+
*/
|
|
48
|
+
export async function compareScreenshot(canvas: Canvas | HTMLCanvasElement, baselinePath: string): Promise<boolean> {
|
|
49
|
+
try {
|
|
50
|
+
await fs.access(baselinePath);
|
|
51
|
+
} catch {
|
|
52
|
+
// Baseline doesn't exist, save current as baseline
|
|
53
|
+
console.warn(`Baseline not found at ${baselinePath}, saving current as baseline.`);
|
|
54
|
+
await takeScreenshot(canvas, baselinePath);
|
|
55
|
+
return true;
|
|
56
|
+
}
|
|
57
|
+
|
|
58
|
+
// Load baseline
|
|
59
|
+
const baselineBuffer = await fs.readFile(baselinePath);
|
|
60
|
+
const baselineImage = new Image();
|
|
61
|
+
baselineImage.src = baselineBuffer;
|
|
62
|
+
|
|
63
|
+
// Get dimensions (assume match for now, or fail)
|
|
64
|
+
const width = baselineImage.width;
|
|
65
|
+
const height = baselineImage.height;
|
|
66
|
+
|
|
67
|
+
// Get current image data
|
|
68
|
+
// We need to draw both to canvases to get pixel data easily with @napi-rs/canvas
|
|
69
|
+
// If input is already a canvas, we can use it.
|
|
70
|
+
|
|
71
|
+
// Helper to get buffer from input canvas
|
|
72
|
+
let currentBuffer: Buffer;
|
|
73
|
+
if ('toBuffer' in canvas && typeof canvas.toBuffer === 'function') {
|
|
74
|
+
currentBuffer = canvas.toBuffer('image/png');
|
|
75
|
+
} else if ('toDataURL' in canvas) {
|
|
76
|
+
const dataUrl = canvas.toDataURL('image/png');
|
|
77
|
+
currentBuffer = Buffer.from(dataUrl.replace(/^data:image\/png;base64,/, ''), 'base64');
|
|
78
|
+
} else {
|
|
79
|
+
throw new Error('Unsupported canvas type');
|
|
80
|
+
}
|
|
81
|
+
|
|
82
|
+
// Simple Buffer comparison first (fastest)
|
|
83
|
+
if (baselineBuffer.equals(currentBuffer)) {
|
|
84
|
+
return true;
|
|
85
|
+
}
|
|
86
|
+
|
|
87
|
+
// If buffers differ, it could be metadata or compression. Do pixel check.
|
|
88
|
+
// Note: Creating a new canvas to draw the image onto for pixel access
|
|
89
|
+
// This requires the 'Canvas' constructor which we imported.
|
|
90
|
+
const baselineCanvas = new Canvas(width, height);
|
|
91
|
+
const ctx = baselineCanvas.getContext('2d');
|
|
92
|
+
ctx.drawImage(baselineImage, 0, 0);
|
|
93
|
+
const baselineData = ctx.getImageData(0, 0, width, height).data;
|
|
94
|
+
|
|
95
|
+
// Load current buffer to image to draw (handles JSDOM/napi differences uniformally)
|
|
96
|
+
const currentImage = new Image();
|
|
97
|
+
currentImage.src = currentBuffer;
|
|
98
|
+
|
|
99
|
+
if (currentImage.width !== width || currentImage.height !== height) {
|
|
100
|
+
console.error(`Dimension mismatch: Baseline ${width}x${height} vs Current ${currentImage.width}x${currentImage.height}`);
|
|
101
|
+
return false;
|
|
102
|
+
}
|
|
103
|
+
|
|
104
|
+
const currentCanvas = new Canvas(width, height);
|
|
105
|
+
const ctx2 = currentCanvas.getContext('2d');
|
|
106
|
+
ctx2.drawImage(currentImage, 0, 0);
|
|
107
|
+
const currentData = ctx2.getImageData(0, 0, width, height).data;
|
|
108
|
+
|
|
109
|
+
let diffPixels = 0;
|
|
110
|
+
const totalPixels = width * height;
|
|
111
|
+
|
|
112
|
+
// Simple pixel diff
|
|
113
|
+
for (let i = 0; i < baselineData.length; i += 4) {
|
|
114
|
+
if (baselineData[i] !== currentData[i] || // R
|
|
115
|
+
baselineData[i+1] !== currentData[i+1] || // G
|
|
116
|
+
baselineData[i+2] !== currentData[i+2] || // B
|
|
117
|
+
baselineData[i+3] !== currentData[i+3]) { // A
|
|
118
|
+
diffPixels++;
|
|
119
|
+
}
|
|
120
|
+
}
|
|
121
|
+
|
|
122
|
+
if (diffPixels > 0) {
|
|
123
|
+
console.error(`Visual regression: ${diffPixels} pixels differ (${(diffPixels/totalPixels*100).toFixed(2)}%)`);
|
|
124
|
+
// Save diff image? (Optional, skipping for now)
|
|
125
|
+
return false;
|
|
126
|
+
}
|
|
127
|
+
|
|
128
|
+
return true;
|
|
129
|
+
}
|
|
130
|
+
|
|
131
|
+
/**
|
|
132
|
+
* Compares two screenshots (Buffers).
|
|
133
|
+
* Uses simple buffer check. Kept for backward compatibility or direct buffer comparison.
|
|
134
|
+
*/
|
|
135
|
+
export function compareScreenshots(baseline: Buffer, current: Buffer, threshold: number = 0.01): VisualDiff {
|
|
136
|
+
// Basic length check first
|
|
137
|
+
if (baseline.length !== current.length) {
|
|
138
|
+
return { diffPercentage: 1.0 };
|
|
139
|
+
}
|
|
140
|
+
|
|
141
|
+
let diffPixels = 0;
|
|
142
|
+
const totalPixels = baseline.length; // Approximate bytes
|
|
143
|
+
|
|
144
|
+
for (let i = 0; i < baseline.length; i++) {
|
|
145
|
+
if (baseline[i] !== current[i]) {
|
|
146
|
+
diffPixels++;
|
|
147
|
+
}
|
|
148
|
+
}
|
|
149
|
+
|
|
150
|
+
const diffPercentage = diffPixels / totalPixels;
|
|
151
|
+
|
|
152
|
+
return {
|
|
153
|
+
diffPercentage,
|
|
154
|
+
};
|
|
155
|
+
}
|
|
156
|
+
|
|
157
|
+
export interface VisualScenario {
|
|
158
|
+
sceneName: string;
|
|
159
|
+
setup: () => Promise<void>;
|
|
160
|
+
}
|
|
161
|
+
|
|
162
|
+
/**
|
|
163
|
+
* Creates a visual test scenario.
|
|
164
|
+
*/
|
|
165
|
+
export function createVisualTestScenario(sceneName: string): VisualScenario {
|
|
166
|
+
return {
|
|
167
|
+
sceneName,
|
|
168
|
+
setup: async () => {
|
|
169
|
+
// Setup scene logic
|
|
170
|
+
}
|
|
171
|
+
};
|
|
172
|
+
}
|
|
@@ -0,0 +1,113 @@
|
|
|
1
|
+
import { expect } from 'vitest';
|
|
2
|
+
import {
|
|
3
|
+
createRenderTestSetup,
|
|
4
|
+
renderAndCapture,
|
|
5
|
+
createComputeTestSetup,
|
|
6
|
+
runComputeAndReadback,
|
|
7
|
+
RenderTestSetup,
|
|
8
|
+
ComputeTestSetup
|
|
9
|
+
} from './webgpu-rendering.js';
|
|
10
|
+
|
|
11
|
+
export interface GeometryBuffers {
|
|
12
|
+
vertexBuffer: GPUBuffer;
|
|
13
|
+
indexBuffer?: GPUBuffer;
|
|
14
|
+
vertexCount: number;
|
|
15
|
+
indexCount?: number;
|
|
16
|
+
}
|
|
17
|
+
|
|
18
|
+
/**
|
|
19
|
+
* Template for testing a rendering pipeline
|
|
20
|
+
*/
|
|
21
|
+
export async function testPipelineRendering(
|
|
22
|
+
name: string,
|
|
23
|
+
createPipeline: (device: GPUDevice) => GPURenderPipeline,
|
|
24
|
+
setupGeometry: (device: GPUDevice) => GeometryBuffers,
|
|
25
|
+
expectedOutput?: Uint8ClampedArray
|
|
26
|
+
) {
|
|
27
|
+
const setup = await createRenderTestSetup(256, 256);
|
|
28
|
+
|
|
29
|
+
try {
|
|
30
|
+
const pipeline = createPipeline(setup.context.device);
|
|
31
|
+
const geometry = setupGeometry(setup.context.device);
|
|
32
|
+
|
|
33
|
+
const pixels = await renderAndCapture(setup, (pass) => {
|
|
34
|
+
pass.setPipeline(pipeline);
|
|
35
|
+
pass.setVertexBuffer(0, geometry.vertexBuffer);
|
|
36
|
+
if (geometry.indexBuffer) {
|
|
37
|
+
pass.setIndexBuffer(geometry.indexBuffer, 'uint16'); // Assuming uint16 for simplicity
|
|
38
|
+
pass.drawIndexed(geometry.indexCount || 0);
|
|
39
|
+
} else {
|
|
40
|
+
pass.draw(geometry.vertexCount);
|
|
41
|
+
}
|
|
42
|
+
});
|
|
43
|
+
|
|
44
|
+
if (expectedOutput) {
|
|
45
|
+
expect(pixels).toEqual(expectedOutput);
|
|
46
|
+
} else {
|
|
47
|
+
// At least verify we got pixels
|
|
48
|
+
expect(pixels.length).toBe(256 * 256 * 4);
|
|
49
|
+
}
|
|
50
|
+
} finally {
|
|
51
|
+
await setup.cleanup();
|
|
52
|
+
}
|
|
53
|
+
}
|
|
54
|
+
|
|
55
|
+
/**
|
|
56
|
+
* Template for testing compute shaders
|
|
57
|
+
*/
|
|
58
|
+
export async function testComputeShader(
|
|
59
|
+
name: string,
|
|
60
|
+
createComputePipeline: (device: GPUDevice) => GPUComputePipeline,
|
|
61
|
+
inputData: Float32Array,
|
|
62
|
+
expectedOutput?: Float32Array
|
|
63
|
+
) {
|
|
64
|
+
const setup = await createComputeTestSetup(inputData.byteLength);
|
|
65
|
+
const { device } = setup.context;
|
|
66
|
+
|
|
67
|
+
try {
|
|
68
|
+
const pipeline = createComputePipeline(device);
|
|
69
|
+
|
|
70
|
+
// Initialize input data
|
|
71
|
+
const stagingBuffer = device.createBuffer({
|
|
72
|
+
size: inputData.byteLength,
|
|
73
|
+
usage: GPUBufferUsage.COPY_SRC | GPUBufferUsage.MAP_WRITE,
|
|
74
|
+
mappedAtCreation: true
|
|
75
|
+
});
|
|
76
|
+
new Float32Array(stagingBuffer.getMappedRange()).set(inputData);
|
|
77
|
+
stagingBuffer.unmap();
|
|
78
|
+
|
|
79
|
+
// Copy input to output buffer for processing
|
|
80
|
+
const encoder = device.createCommandEncoder();
|
|
81
|
+
encoder.copyBufferToBuffer(stagingBuffer, 0, setup.outputBuffer, 0, inputData.byteLength);
|
|
82
|
+
device.queue.submit([encoder.finish()]);
|
|
83
|
+
|
|
84
|
+
// Create a bind group for the storage buffer
|
|
85
|
+
const bindGroup = device.createBindGroup({
|
|
86
|
+
layout: pipeline.getBindGroupLayout(0),
|
|
87
|
+
entries: [
|
|
88
|
+
{
|
|
89
|
+
binding: 0,
|
|
90
|
+
resource: {
|
|
91
|
+
buffer: setup.outputBuffer
|
|
92
|
+
}
|
|
93
|
+
}
|
|
94
|
+
]
|
|
95
|
+
});
|
|
96
|
+
|
|
97
|
+
const resultBuffer = await runComputeAndReadback(setup, (pass) => {
|
|
98
|
+
pass.setPipeline(pipeline);
|
|
99
|
+
pass.setBindGroup(0, bindGroup);
|
|
100
|
+
pass.dispatchWorkgroups(Math.ceil(inputData.length / 64));
|
|
101
|
+
});
|
|
102
|
+
|
|
103
|
+
if (expectedOutput) {
|
|
104
|
+
const floatResult = new Float32Array(resultBuffer);
|
|
105
|
+
expect(floatResult).toEqual(expectedOutput);
|
|
106
|
+
}
|
|
107
|
+
|
|
108
|
+
stagingBuffer.destroy();
|
|
109
|
+
|
|
110
|
+
} finally {
|
|
111
|
+
await setup.cleanup();
|
|
112
|
+
}
|
|
113
|
+
}
|
|
@@ -0,0 +1,251 @@
|
|
|
1
|
+
import { initHeadlessWebGPU, WebGPUContextState } from '../../setup/webgpu.js';
|
|
2
|
+
|
|
3
|
+
/**
|
|
4
|
+
* Interface for render test setup
|
|
5
|
+
*/
|
|
6
|
+
export interface RenderTestSetup {
|
|
7
|
+
context: WebGPUContextState;
|
|
8
|
+
renderTarget: GPUTexture;
|
|
9
|
+
renderTargetView: GPUTextureView;
|
|
10
|
+
commandEncoder: GPUCommandEncoder;
|
|
11
|
+
cleanup: () => Promise<void>;
|
|
12
|
+
width: number;
|
|
13
|
+
height: number;
|
|
14
|
+
}
|
|
15
|
+
|
|
16
|
+
/**
|
|
17
|
+
* Creates a setup for testing rendering pipelines.
|
|
18
|
+
* Initializes a headless WebGPU context, a render target texture, and a command encoder.
|
|
19
|
+
*/
|
|
20
|
+
export async function createRenderTestSetup(
|
|
21
|
+
width: number = 256,
|
|
22
|
+
height: number = 256
|
|
23
|
+
): Promise<RenderTestSetup> {
|
|
24
|
+
const setup = await initHeadlessWebGPU();
|
|
25
|
+
const { device } = setup;
|
|
26
|
+
|
|
27
|
+
// Create a render target texture
|
|
28
|
+
// We use RGBA8Unorm for easy readback and standard rendering
|
|
29
|
+
const renderTarget = device.createTexture({
|
|
30
|
+
size: { width, height, depthOrArrayLayers: 1 },
|
|
31
|
+
format: 'rgba8unorm',
|
|
32
|
+
usage: GPUTextureUsage.RENDER_ATTACHMENT | GPUTextureUsage.COPY_SRC,
|
|
33
|
+
});
|
|
34
|
+
|
|
35
|
+
const renderTargetView = renderTarget.createView();
|
|
36
|
+
|
|
37
|
+
const commandEncoder = device.createCommandEncoder();
|
|
38
|
+
|
|
39
|
+
// We need to create a context state object compatible with our engine expectations
|
|
40
|
+
const context: WebGPUContextState = {
|
|
41
|
+
adapter: setup.adapter,
|
|
42
|
+
device: setup.device,
|
|
43
|
+
queue: setup.device.queue,
|
|
44
|
+
format: 'rgba8unorm',
|
|
45
|
+
};
|
|
46
|
+
|
|
47
|
+
return {
|
|
48
|
+
context,
|
|
49
|
+
renderTarget,
|
|
50
|
+
renderTargetView,
|
|
51
|
+
commandEncoder,
|
|
52
|
+
width,
|
|
53
|
+
height,
|
|
54
|
+
cleanup: async () => {
|
|
55
|
+
renderTarget.destroy();
|
|
56
|
+
await setup.cleanup();
|
|
57
|
+
}
|
|
58
|
+
};
|
|
59
|
+
}
|
|
60
|
+
|
|
61
|
+
/**
|
|
62
|
+
* Captures texture content to Uint8ClampedArray (RGBA).
|
|
63
|
+
* Creates its own CommandEncoder and submits immediately.
|
|
64
|
+
*/
|
|
65
|
+
export async function captureTexture(
|
|
66
|
+
device: GPUDevice,
|
|
67
|
+
texture: GPUTexture,
|
|
68
|
+
width: number,
|
|
69
|
+
height: number
|
|
70
|
+
): Promise<Uint8ClampedArray> {
|
|
71
|
+
const commandEncoder = device.createCommandEncoder();
|
|
72
|
+
|
|
73
|
+
// Create a buffer to read back the texture data
|
|
74
|
+
// Bytes per row must be a multiple of 256 for copyTextureToBuffer
|
|
75
|
+
const bytesPerPixel = 4;
|
|
76
|
+
const unpaddedBytesPerRow = width * bytesPerPixel;
|
|
77
|
+
const align = 256;
|
|
78
|
+
const paddedBytesPerRow = Math.max(
|
|
79
|
+
bytesPerPixel * width,
|
|
80
|
+
Math.ceil((bytesPerPixel * width) / align) * align
|
|
81
|
+
);
|
|
82
|
+
|
|
83
|
+
const bufferSize = paddedBytesPerRow * height;
|
|
84
|
+
|
|
85
|
+
const readbackBuffer = device.createBuffer({
|
|
86
|
+
size: bufferSize,
|
|
87
|
+
usage: GPUBufferUsage.COPY_DST | GPUBufferUsage.MAP_READ,
|
|
88
|
+
});
|
|
89
|
+
|
|
90
|
+
// Copy texture to buffer
|
|
91
|
+
commandEncoder.copyTextureToBuffer(
|
|
92
|
+
{
|
|
93
|
+
texture: texture,
|
|
94
|
+
},
|
|
95
|
+
{
|
|
96
|
+
buffer: readbackBuffer,
|
|
97
|
+
bytesPerRow: paddedBytesPerRow,
|
|
98
|
+
},
|
|
99
|
+
{
|
|
100
|
+
width,
|
|
101
|
+
height,
|
|
102
|
+
depthOrArrayLayers: 1,
|
|
103
|
+
}
|
|
104
|
+
);
|
|
105
|
+
|
|
106
|
+
device.queue.submit([commandEncoder.finish()]);
|
|
107
|
+
|
|
108
|
+
await readbackBuffer.mapAsync(GPUMapMode.READ);
|
|
109
|
+
|
|
110
|
+
const arrayBuffer = readbackBuffer.getMappedRange();
|
|
111
|
+
|
|
112
|
+
// Create a new buffer to hold the tightly packed data
|
|
113
|
+
const output = new Uint8ClampedArray(width * height * 4);
|
|
114
|
+
|
|
115
|
+
// Copy row by row to remove padding
|
|
116
|
+
const srcBytes = new Uint8Array(arrayBuffer);
|
|
117
|
+
for (let y = 0; y < height; y++) {
|
|
118
|
+
const srcOffset = y * paddedBytesPerRow;
|
|
119
|
+
const dstOffset = y * unpaddedBytesPerRow;
|
|
120
|
+
output.set(srcBytes.subarray(srcOffset, srcOffset + unpaddedBytesPerRow), dstOffset);
|
|
121
|
+
}
|
|
122
|
+
|
|
123
|
+
readbackBuffer.unmap();
|
|
124
|
+
readbackBuffer.destroy();
|
|
125
|
+
|
|
126
|
+
return output;
|
|
127
|
+
}
|
|
128
|
+
|
|
129
|
+
/**
|
|
130
|
+
* Helper to render and capture the output as pixel data.
|
|
131
|
+
* It manages the render pass, submission, and buffer readback.
|
|
132
|
+
*/
|
|
133
|
+
export async function renderAndCapture(
|
|
134
|
+
setup: RenderTestSetup,
|
|
135
|
+
renderFn: (pass: GPURenderPassEncoder) => void
|
|
136
|
+
): Promise<Uint8ClampedArray> {
|
|
137
|
+
const { device, queue } = setup.context;
|
|
138
|
+
const { renderTargetView, commandEncoder, width, height } = setup;
|
|
139
|
+
|
|
140
|
+
// Begin render pass
|
|
141
|
+
const passEncoder = commandEncoder.beginRenderPass({
|
|
142
|
+
colorAttachments: [
|
|
143
|
+
{
|
|
144
|
+
view: renderTargetView,
|
|
145
|
+
clearValue: { r: 0, g: 0, b: 0, a: 0 },
|
|
146
|
+
loadOp: 'clear',
|
|
147
|
+
storeOp: 'store',
|
|
148
|
+
},
|
|
149
|
+
],
|
|
150
|
+
});
|
|
151
|
+
|
|
152
|
+
// Invoke user render function
|
|
153
|
+
renderFn(passEncoder);
|
|
154
|
+
|
|
155
|
+
// End pass
|
|
156
|
+
passEncoder.end();
|
|
157
|
+
|
|
158
|
+
// Submit the render commands
|
|
159
|
+
queue.submit([commandEncoder.finish()]);
|
|
160
|
+
|
|
161
|
+
// Capture the texture (using a new encoder)
|
|
162
|
+
return captureTexture(device, setup.renderTarget, width, height);
|
|
163
|
+
}
|
|
164
|
+
|
|
165
|
+
/**
|
|
166
|
+
* Interface for compute test setup
|
|
167
|
+
*/
|
|
168
|
+
export interface ComputeTestSetup {
|
|
169
|
+
context: WebGPUContextState;
|
|
170
|
+
outputBuffer: GPUBuffer;
|
|
171
|
+
commandEncoder: GPUCommandEncoder;
|
|
172
|
+
cleanup: () => Promise<void>;
|
|
173
|
+
outputSize: number;
|
|
174
|
+
}
|
|
175
|
+
|
|
176
|
+
/**
|
|
177
|
+
* Creates a setup for testing compute shaders.
|
|
178
|
+
*/
|
|
179
|
+
export async function createComputeTestSetup(
|
|
180
|
+
outputSize: number
|
|
181
|
+
): Promise<ComputeTestSetup> {
|
|
182
|
+
const setup = await initHeadlessWebGPU();
|
|
183
|
+
const { device } = setup;
|
|
184
|
+
|
|
185
|
+
// Create output buffer (storage and copy source)
|
|
186
|
+
const outputBuffer = device.createBuffer({
|
|
187
|
+
size: outputSize,
|
|
188
|
+
usage: GPUBufferUsage.STORAGE | GPUBufferUsage.COPY_SRC,
|
|
189
|
+
});
|
|
190
|
+
|
|
191
|
+
const commandEncoder = device.createCommandEncoder();
|
|
192
|
+
|
|
193
|
+
const context: WebGPUContextState = {
|
|
194
|
+
adapter: setup.adapter,
|
|
195
|
+
device: setup.device,
|
|
196
|
+
queue: setup.device.queue,
|
|
197
|
+
format: 'rgba8unorm',
|
|
198
|
+
};
|
|
199
|
+
|
|
200
|
+
return {
|
|
201
|
+
context,
|
|
202
|
+
outputBuffer,
|
|
203
|
+
commandEncoder,
|
|
204
|
+
outputSize,
|
|
205
|
+
cleanup: async () => {
|
|
206
|
+
outputBuffer.destroy();
|
|
207
|
+
await setup.cleanup();
|
|
208
|
+
}
|
|
209
|
+
};
|
|
210
|
+
}
|
|
211
|
+
|
|
212
|
+
/**
|
|
213
|
+
* Helper to run a compute pass and read back the output buffer.
|
|
214
|
+
*/
|
|
215
|
+
export async function runComputeAndReadback(
|
|
216
|
+
setup: ComputeTestSetup,
|
|
217
|
+
computeFn: (pass: GPUComputePassEncoder) => void
|
|
218
|
+
): Promise<ArrayBuffer> {
|
|
219
|
+
const { device, queue } = setup.context;
|
|
220
|
+
const { outputBuffer, commandEncoder, outputSize } = setup;
|
|
221
|
+
|
|
222
|
+
const passEncoder = commandEncoder.beginComputePass();
|
|
223
|
+
computeFn(passEncoder);
|
|
224
|
+
passEncoder.end();
|
|
225
|
+
|
|
226
|
+
// Create staging buffer for readback
|
|
227
|
+
const stagingBuffer = device.createBuffer({
|
|
228
|
+
size: outputSize,
|
|
229
|
+
usage: GPUBufferUsage.COPY_DST | GPUBufferUsage.MAP_READ,
|
|
230
|
+
});
|
|
231
|
+
|
|
232
|
+
commandEncoder.copyBufferToBuffer(
|
|
233
|
+
outputBuffer,
|
|
234
|
+
0,
|
|
235
|
+
stagingBuffer,
|
|
236
|
+
0,
|
|
237
|
+
outputSize
|
|
238
|
+
);
|
|
239
|
+
|
|
240
|
+
queue.submit([commandEncoder.finish()]);
|
|
241
|
+
|
|
242
|
+
await stagingBuffer.mapAsync(GPUMapMode.READ);
|
|
243
|
+
|
|
244
|
+
const mappedRange = stagingBuffer.getMappedRange();
|
|
245
|
+
const result = mappedRange.slice(0); // Copy data
|
|
246
|
+
|
|
247
|
+
stagingBuffer.unmap();
|
|
248
|
+
stagingBuffer.destroy();
|
|
249
|
+
|
|
250
|
+
return result;
|
|
251
|
+
}
|
|
@@ -0,0 +1,129 @@
|
|
|
1
|
+
import { vi } from 'vitest';
|
|
2
|
+
import {
|
|
3
|
+
AssetManager,
|
|
4
|
+
Md2Model,
|
|
5
|
+
Md3Model,
|
|
6
|
+
BspMap,
|
|
7
|
+
PreparedTexture
|
|
8
|
+
} from '@quake2ts/engine';
|
|
9
|
+
|
|
10
|
+
export function createMockAssetManager(overrides?: Partial<AssetManager>): AssetManager {
|
|
11
|
+
return {
|
|
12
|
+
textures: {
|
|
13
|
+
get: vi.fn(),
|
|
14
|
+
set: vi.fn(),
|
|
15
|
+
has: vi.fn(),
|
|
16
|
+
clear: vi.fn(),
|
|
17
|
+
memoryUsage: 0
|
|
18
|
+
} as any,
|
|
19
|
+
audio: {
|
|
20
|
+
load: vi.fn(),
|
|
21
|
+
get: vi.fn(),
|
|
22
|
+
clearAll: vi.fn()
|
|
23
|
+
} as any,
|
|
24
|
+
loadTexture: vi.fn().mockResolvedValue({} as PreparedTexture),
|
|
25
|
+
registerTexture: vi.fn(),
|
|
26
|
+
loadSound: vi.fn().mockResolvedValue({} as any),
|
|
27
|
+
loadMd2Model: vi.fn().mockResolvedValue({} as Md2Model),
|
|
28
|
+
getMd2Model: vi.fn(),
|
|
29
|
+
loadMd3Model: vi.fn().mockResolvedValue({} as Md3Model),
|
|
30
|
+
getMd3Model: vi.fn(),
|
|
31
|
+
loadSprite: vi.fn().mockResolvedValue({} as any),
|
|
32
|
+
loadMap: vi.fn().mockResolvedValue({} as BspMap),
|
|
33
|
+
getMap: vi.fn(),
|
|
34
|
+
loadPalette: vi.fn().mockResolvedValue(undefined),
|
|
35
|
+
isAssetLoaded: vi.fn().mockReturnValue(true),
|
|
36
|
+
listFiles: vi.fn().mockReturnValue([]),
|
|
37
|
+
resetForLevelChange: vi.fn(),
|
|
38
|
+
getMemoryUsage: vi.fn().mockReturnValue({ textures: 0, audio: 0 }),
|
|
39
|
+
clearCache: vi.fn(),
|
|
40
|
+
preloadAssets: vi.fn().mockResolvedValue(undefined),
|
|
41
|
+
queueLoad: vi.fn().mockImplementation((path) => Promise.resolve({} as any)),
|
|
42
|
+
...overrides
|
|
43
|
+
} as unknown as AssetManager;
|
|
44
|
+
}
|
|
45
|
+
|
|
46
|
+
export function createMockTexture(width: number = 1, height: number = 1, data?: Uint8Array): PreparedTexture {
|
|
47
|
+
return {
|
|
48
|
+
width,
|
|
49
|
+
height,
|
|
50
|
+
data: data || new Uint8Array(width * height * 4).fill(255),
|
|
51
|
+
format: 0, // RGBA
|
|
52
|
+
name: 'mock_texture',
|
|
53
|
+
uploaded: false
|
|
54
|
+
} as unknown as PreparedTexture;
|
|
55
|
+
}
|
|
56
|
+
|
|
57
|
+
export function createMockMd2Model(overrides?: Partial<Md2Model>): Md2Model {
|
|
58
|
+
return {
|
|
59
|
+
header: {
|
|
60
|
+
skinWidth: 0,
|
|
61
|
+
skinHeight: 0,
|
|
62
|
+
frameSize: 0,
|
|
63
|
+
numSkins: 0,
|
|
64
|
+
numVertices: 0,
|
|
65
|
+
numSt: 0,
|
|
66
|
+
numTriangles: 0,
|
|
67
|
+
numGlCmds: 0,
|
|
68
|
+
numFrames: 0,
|
|
69
|
+
offsetSkins: 0,
|
|
70
|
+
offsetSt: 0,
|
|
71
|
+
offsetTriangles: 0,
|
|
72
|
+
offsetFrames: 0,
|
|
73
|
+
offsetGlCmds: 0,
|
|
74
|
+
offsetEnd: 0
|
|
75
|
+
},
|
|
76
|
+
skins: [],
|
|
77
|
+
texCoords: [],
|
|
78
|
+
triangles: [],
|
|
79
|
+
frames: [],
|
|
80
|
+
glCommands: new Int32Array(0),
|
|
81
|
+
...overrides
|
|
82
|
+
} as Md2Model;
|
|
83
|
+
}
|
|
84
|
+
|
|
85
|
+
export function createMockMd3Model(overrides?: Partial<Md3Model>): Md3Model {
|
|
86
|
+
return {
|
|
87
|
+
header: {
|
|
88
|
+
ident: 0,
|
|
89
|
+
version: 0,
|
|
90
|
+
name: '',
|
|
91
|
+
flags: 0,
|
|
92
|
+
numFrames: 0,
|
|
93
|
+
numTags: 0,
|
|
94
|
+
numSurfaces: 0,
|
|
95
|
+
numSkins: 0,
|
|
96
|
+
offsetFrames: 0,
|
|
97
|
+
offsetTags: 0,
|
|
98
|
+
offsetSurfaces: 0,
|
|
99
|
+
offsetEnd: 0
|
|
100
|
+
},
|
|
101
|
+
frames: [],
|
|
102
|
+
tags: [],
|
|
103
|
+
surfaces: [],
|
|
104
|
+
...overrides
|
|
105
|
+
} as Md3Model;
|
|
106
|
+
}
|
|
107
|
+
|
|
108
|
+
export function createMockBspMap(overrides?: Partial<BspMap>): BspMap {
|
|
109
|
+
return {
|
|
110
|
+
version: 38,
|
|
111
|
+
entities: [],
|
|
112
|
+
planes: [],
|
|
113
|
+
vertices: [],
|
|
114
|
+
visibility: new Uint8Array(0),
|
|
115
|
+
nodes: [],
|
|
116
|
+
texInfo: [],
|
|
117
|
+
faces: [],
|
|
118
|
+
lightmaps: [],
|
|
119
|
+
leafs: [],
|
|
120
|
+
leafFaces: [],
|
|
121
|
+
leafBrushes: [],
|
|
122
|
+
edges: [],
|
|
123
|
+
faceEdges: [],
|
|
124
|
+
models: [],
|
|
125
|
+
brushes: [],
|
|
126
|
+
brushSides: [],
|
|
127
|
+
...overrides
|
|
128
|
+
} as BspMap;
|
|
129
|
+
}
|