ushman-characterize 0.4.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/AGENTS.md +110 -0
- package/CHANGELOG.md +41 -0
- package/LICENSE.md +21 -0
- package/README.md +193 -0
- package/bin/ushman-characterize +19 -0
- package/dist/babel-config.d.ts +7 -0
- package/dist/babel-config.d.ts.map +1 -0
- package/dist/babel-config.js +17 -0
- package/dist/capture-server.d.ts +31 -0
- package/dist/capture-server.d.ts.map +1 -0
- package/dist/capture-server.js +199 -0
- package/dist/capture.d.ts +97 -0
- package/dist/capture.d.ts.map +1 -0
- package/dist/capture.js +620 -0
- package/dist/cli/logger.d.ts +7 -0
- package/dist/cli/logger.d.ts.map +1 -0
- package/dist/cli/logger.js +14 -0
- package/dist/cli/parse-flags.d.ts +8 -0
- package/dist/cli/parse-flags.d.ts.map +1 -0
- package/dist/cli/parse-flags.js +60 -0
- package/dist/cli.d.ts +39 -0
- package/dist/cli.d.ts.map +1 -0
- package/dist/cli.js +439 -0
- package/dist/constants.d.ts +20 -0
- package/dist/constants.d.ts.map +1 -0
- package/dist/constants.js +19 -0
- package/dist/dedupe-contract.d.ts +26 -0
- package/dist/dedupe-contract.d.ts.map +1 -0
- package/dist/dedupe-contract.js +12 -0
- package/dist/default-export.d.ts +6 -0
- package/dist/default-export.d.ts.map +1 -0
- package/dist/default-export.js +52 -0
- package/dist/format-contract.d.ts +25 -0
- package/dist/format-contract.d.ts.map +1 -0
- package/dist/format-contract.js +96 -0
- package/dist/function-utils.d.ts +6 -0
- package/dist/function-utils.d.ts.map +1 -0
- package/dist/function-utils.js +22 -0
- package/dist/generate-replay.d.ts +18 -0
- package/dist/generate-replay.d.ts.map +1 -0
- package/dist/generate-replay.js +158 -0
- package/dist/index.d.ts +13 -0
- package/dist/index.d.ts.map +1 -0
- package/dist/index.js +11 -0
- package/dist/instrument.d.ts +39 -0
- package/dist/instrument.d.ts.map +1 -0
- package/dist/instrument.js +605 -0
- package/dist/ledger.d.ts +19 -0
- package/dist/ledger.d.ts.map +1 -0
- package/dist/ledger.js +50 -0
- package/dist/puppeteer-harness.d.ts +74 -0
- package/dist/puppeteer-harness.d.ts.map +1 -0
- package/dist/puppeteer-harness.js +248 -0
- package/dist/purity-classifier.d.ts +28 -0
- package/dist/purity-classifier.d.ts.map +1 -0
- package/dist/purity-classifier.js +363 -0
- package/dist/rebind.d.ts +26 -0
- package/dist/rebind.d.ts.map +1 -0
- package/dist/rebind.js +356 -0
- package/dist/replay-report.d.ts +18 -0
- package/dist/replay-report.d.ts.map +1 -0
- package/dist/replay-report.js +12 -0
- package/dist/scene.d.ts +24 -0
- package/dist/scene.d.ts.map +1 -0
- package/dist/scene.js +235 -0
- package/dist/schema-types.d.ts +40 -0
- package/dist/schema-types.d.ts.map +1 -0
- package/dist/schema-types.js +32 -0
- package/dist/seed-scaffolds.d.ts +31 -0
- package/dist/seed-scaffolds.d.ts.map +1 -0
- package/dist/seed-scaffolds.js +96 -0
- package/dist/shared.d.ts +36 -0
- package/dist/shared.d.ts.map +1 -0
- package/dist/shared.js +390 -0
- package/dist/state-dag.d.ts +5 -0
- package/dist/state-dag.d.ts.map +1 -0
- package/dist/state-dag.js +27 -0
- package/dist/stub-pure.d.ts +57 -0
- package/dist/stub-pure.d.ts.map +1 -0
- package/dist/stub-pure.js +987 -0
- package/dist/time.d.ts +3 -0
- package/dist/time.d.ts.map +1 -0
- package/dist/time.js +10 -0
- package/dist/trace-format.d.ts +24 -0
- package/dist/trace-format.d.ts.map +1 -0
- package/dist/trace-format.js +213 -0
- package/dist/trace-serializer.d.ts +94 -0
- package/dist/trace-serializer.d.ts.map +1 -0
- package/dist/trace-serializer.js +607 -0
- package/dist/tracer-runtime.d.ts +25 -0
- package/dist/tracer-runtime.d.ts.map +1 -0
- package/dist/tracer-runtime.js +291 -0
- package/dist/types.d.ts +13 -0
- package/dist/types.d.ts.map +1 -0
- package/dist/types.js +0 -0
- package/dist/workspace-paths.d.ts +64 -0
- package/dist/workspace-paths.d.ts.map +1 -0
- package/dist/workspace-paths.js +288 -0
- package/package.json +86 -0
package/dist/rebind.js
ADDED
|
@@ -0,0 +1,356 @@
|
|
|
1
|
+
import { readdir } from 'node:fs/promises';
|
|
2
|
+
import path from 'node:path';
|
|
3
|
+
import generate from '@babel/generator';
|
|
4
|
+
import traverse from '@babel/traverse';
|
|
5
|
+
import * as t from '@babel/types';
|
|
6
|
+
import { parseModuleAst } from "./babel-config.js";
|
|
7
|
+
import { recordCharacterizeValidatorResult, writeCharacterizeReport } from "./ledger.js";
|
|
8
|
+
import { relativeImportPath } from "./shared.js";
|
|
9
|
+
import { assertV4Workspace, pathExists } from "./workspace-paths.js";
|
|
10
|
+
const GENERATED_REPLAY_HEADER = '// AUTO-GENERATED by ushman-characterize generate-replay.';
|
|
11
|
+
const GENERATED_MODULE_SEGMENT = '.lab/characterize/modules';
|
|
12
|
+
const importSpecifierName = (specifier) => t.isIdentifier(specifier.imported) ? specifier.imported.name : specifier.imported.value;
|
|
13
|
+
const collectReplayTests = async (root) => {
|
|
14
|
+
const entries = await readdir(root, { withFileTypes: true }).catch(() => []);
|
|
15
|
+
const files = [];
|
|
16
|
+
for (const entry of entries) {
|
|
17
|
+
const resolved = path.join(root, entry.name);
|
|
18
|
+
if (entry.isDirectory()) {
|
|
19
|
+
files.push(...(await collectReplayTests(resolved)));
|
|
20
|
+
continue;
|
|
21
|
+
}
|
|
22
|
+
if (entry.isFile() && entry.name.endsWith('.test.ts')) {
|
|
23
|
+
files.push(resolved);
|
|
24
|
+
}
|
|
25
|
+
}
|
|
26
|
+
return files.sort((left, right) => left.localeCompare(right));
|
|
27
|
+
};
|
|
28
|
+
const buildPreview = ({ after, before, filePath, }) => ({
|
|
29
|
+
after,
|
|
30
|
+
before,
|
|
31
|
+
filePath,
|
|
32
|
+
});
|
|
33
|
+
const isGeneratedReplayFile = (source) => source.startsWith(GENERATED_REPLAY_HEADER);
|
|
34
|
+
const normalizePathFragment = (value) => value
|
|
35
|
+
.split(path.sep)
|
|
36
|
+
.join('/')
|
|
37
|
+
.replace(/^\.\/+/u, '');
|
|
38
|
+
const splitMapping = (pair) => {
|
|
39
|
+
const separatorIndex = pair.indexOf('=');
|
|
40
|
+
if (separatorIndex <= 0 || separatorIndex === pair.length - 1) {
|
|
41
|
+
throw new Error(`Invalid --map value: ${pair}`);
|
|
42
|
+
}
|
|
43
|
+
return {
|
|
44
|
+
symbol: pair.slice(0, separatorIndex),
|
|
45
|
+
target: pair.slice(separatorIndex + 1),
|
|
46
|
+
};
|
|
47
|
+
};
|
|
48
|
+
const splitScopedSelector = (selector) => {
|
|
49
|
+
const separatorIndex = selector.lastIndexOf(':');
|
|
50
|
+
if (separatorIndex <= 0 || separatorIndex === selector.length - 1) {
|
|
51
|
+
return {
|
|
52
|
+
symbol: selector,
|
|
53
|
+
workspaceModule: null,
|
|
54
|
+
};
|
|
55
|
+
}
|
|
56
|
+
return {
|
|
57
|
+
symbol: selector.slice(separatorIndex + 1),
|
|
58
|
+
workspaceModule: normalizePathFragment(selector.slice(0, separatorIndex)),
|
|
59
|
+
};
|
|
60
|
+
};
|
|
61
|
+
export const parseRebindMapping = (pair) => {
|
|
62
|
+
const { symbol: selector, target } = splitMapping(pair);
|
|
63
|
+
const scopedSelector = splitScopedSelector(selector);
|
|
64
|
+
return {
|
|
65
|
+
selector: scopedSelector.workspaceModule
|
|
66
|
+
? `${scopedSelector.workspaceModule}:${scopedSelector.symbol}`
|
|
67
|
+
: scopedSelector.symbol,
|
|
68
|
+
symbol: scopedSelector.symbol,
|
|
69
|
+
target,
|
|
70
|
+
workspaceModule: scopedSelector.workspaceModule,
|
|
71
|
+
};
|
|
72
|
+
};
|
|
73
|
+
const resolveValidatedTargetPath = async ({ targetPath, workspaceRoot, }) => {
|
|
74
|
+
const absoluteTarget = path.isAbsolute(targetPath)
|
|
75
|
+
? path.resolve(targetPath)
|
|
76
|
+
: path.resolve(workspaceRoot, targetPath);
|
|
77
|
+
const relativeToWorkspace = path.relative(path.resolve(workspaceRoot), absoluteTarget);
|
|
78
|
+
if (relativeToWorkspace.startsWith('..') || path.isAbsolute(relativeToWorkspace)) {
|
|
79
|
+
throw new Error(`Rebind target path escapes the workspace root: ${targetPath}`);
|
|
80
|
+
}
|
|
81
|
+
if (!(await pathExists(absoluteTarget))) {
|
|
82
|
+
throw new Error(`Rebind target path does not exist: ${targetPath}`);
|
|
83
|
+
}
|
|
84
|
+
return absoluteTarget;
|
|
85
|
+
};
|
|
86
|
+
const toWorkspaceRelativePath = ({ filePath, workspaceRoot, }) => normalizePathFragment(path.relative(path.resolve(workspaceRoot), path.resolve(filePath)));
|
|
87
|
+
const resolveWorkspaceModule = ({ filePath, importSource, workspaceRoot, }) => toWorkspaceRelativePath({
|
|
88
|
+
filePath: path.resolve(path.dirname(filePath), importSource),
|
|
89
|
+
workspaceRoot,
|
|
90
|
+
});
|
|
91
|
+
const collectGeneratedImportSpecifiers = ({ filePath, source, workspaceRoot, }) => {
|
|
92
|
+
if (!isGeneratedReplayFile(source)) {
|
|
93
|
+
return [];
|
|
94
|
+
}
|
|
95
|
+
const ast = parseModuleAst({
|
|
96
|
+
source,
|
|
97
|
+
sourcePath: filePath,
|
|
98
|
+
});
|
|
99
|
+
const specifiers = [];
|
|
100
|
+
traverse(ast, {
|
|
101
|
+
ImportDeclaration(importPath) {
|
|
102
|
+
if (importPath.node.importKind === 'type') {
|
|
103
|
+
return;
|
|
104
|
+
}
|
|
105
|
+
if (!importPath.node.source.value.includes(GENERATED_MODULE_SEGMENT)) {
|
|
106
|
+
return;
|
|
107
|
+
}
|
|
108
|
+
const workspaceModule = resolveWorkspaceModule({
|
|
109
|
+
filePath,
|
|
110
|
+
importSource: importPath.node.source.value,
|
|
111
|
+
workspaceRoot,
|
|
112
|
+
});
|
|
113
|
+
for (const specifier of importPath.node.specifiers) {
|
|
114
|
+
if (!t.isImportSpecifier(specifier) || specifier.importKind === 'type') {
|
|
115
|
+
continue;
|
|
116
|
+
}
|
|
117
|
+
specifiers.push({
|
|
118
|
+
filePath,
|
|
119
|
+
importedName: importSpecifierName(specifier),
|
|
120
|
+
localName: specifier.local.name,
|
|
121
|
+
workspaceModule,
|
|
122
|
+
});
|
|
123
|
+
}
|
|
124
|
+
},
|
|
125
|
+
});
|
|
126
|
+
return specifiers;
|
|
127
|
+
};
|
|
128
|
+
const resolveMappings = async ({ files, mappings, workspaceRoot, }) => {
|
|
129
|
+
const importedSpecifiers = (await Promise.all(files.map(async (filePath) => collectGeneratedImportSpecifiers({
|
|
130
|
+
filePath,
|
|
131
|
+
source: await Bun.file(filePath).text(),
|
|
132
|
+
workspaceRoot,
|
|
133
|
+
})))).flat();
|
|
134
|
+
const symbolMatches = new Map();
|
|
135
|
+
for (const specifier of importedSpecifiers) {
|
|
136
|
+
for (const key of [specifier.importedName, specifier.localName]) {
|
|
137
|
+
const matches = symbolMatches.get(key) ?? new Set();
|
|
138
|
+
matches.add(specifier.workspaceModule);
|
|
139
|
+
symbolMatches.set(key, matches);
|
|
140
|
+
}
|
|
141
|
+
}
|
|
142
|
+
const resolvedMappings = await Promise.all([...mappings.entries()].map(async ([selector, target]) => {
|
|
143
|
+
const parsed = parseRebindMapping(`${selector}=${target}`);
|
|
144
|
+
const usedByGeneratedImport = importedSpecifiers.some((specifier) => (parsed.workspaceModule === null || parsed.workspaceModule === specifier.workspaceModule) &&
|
|
145
|
+
(parsed.symbol === specifier.importedName || parsed.symbol === specifier.localName));
|
|
146
|
+
return {
|
|
147
|
+
...parsed,
|
|
148
|
+
absoluteTarget: usedByGeneratedImport
|
|
149
|
+
? await resolveValidatedTargetPath({
|
|
150
|
+
targetPath: parsed.target,
|
|
151
|
+
workspaceRoot,
|
|
152
|
+
})
|
|
153
|
+
: path.resolve(workspaceRoot, parsed.target),
|
|
154
|
+
};
|
|
155
|
+
}));
|
|
156
|
+
for (const mapping of resolvedMappings) {
|
|
157
|
+
if (mapping.workspaceModule) {
|
|
158
|
+
continue;
|
|
159
|
+
}
|
|
160
|
+
const modules = symbolMatches.get(mapping.symbol);
|
|
161
|
+
if (modules && modules.size > 1) {
|
|
162
|
+
throw new Error(`Rebind mapping "${mapping.symbol}" is ambiguous across generated modules (${[...modules].sort().join(', ')}). Use <workspace-module>:${mapping.symbol}=... instead.`);
|
|
163
|
+
}
|
|
164
|
+
}
|
|
165
|
+
return resolvedMappings;
|
|
166
|
+
};
|
|
167
|
+
const lookupResolvedMapping = ({ localName, mappings, importedName, workspaceModule, }) => mappings.find((mapping) => mapping.workspaceModule === workspaceModule &&
|
|
168
|
+
(mapping.symbol === importedName || mapping.symbol === localName)) ??
|
|
169
|
+
mappings.find((mapping) => mapping.workspaceModule === null && (mapping.symbol === importedName || mapping.symbol === localName));
|
|
170
|
+
const appendGroupedSpecifier = ({ declarationOrder, groupedSpecifiers, source, specifier, }) => {
|
|
171
|
+
const existingGroup = groupedSpecifiers.get(source);
|
|
172
|
+
if (!existingGroup) {
|
|
173
|
+
groupedSpecifiers.set(source, [t.cloneNode(specifier, true)]);
|
|
174
|
+
declarationOrder.push(source);
|
|
175
|
+
return;
|
|
176
|
+
}
|
|
177
|
+
existingGroup.push(t.cloneNode(specifier, true));
|
|
178
|
+
};
|
|
179
|
+
const groupImportSpecifiersBySource = ({ filePath, mappings, specifiers, sourceValue, workspaceModule, }) => {
|
|
180
|
+
const groupedSpecifiers = new Map();
|
|
181
|
+
const declarationOrder = [];
|
|
182
|
+
for (const specifier of specifiers) {
|
|
183
|
+
if (!t.isImportSpecifier(specifier)) {
|
|
184
|
+
appendGroupedSpecifier({
|
|
185
|
+
declarationOrder,
|
|
186
|
+
groupedSpecifiers,
|
|
187
|
+
source: sourceValue,
|
|
188
|
+
specifier,
|
|
189
|
+
});
|
|
190
|
+
continue;
|
|
191
|
+
}
|
|
192
|
+
if (specifier.importKind === 'type') {
|
|
193
|
+
appendGroupedSpecifier({
|
|
194
|
+
declarationOrder,
|
|
195
|
+
groupedSpecifiers,
|
|
196
|
+
source: sourceValue,
|
|
197
|
+
specifier,
|
|
198
|
+
});
|
|
199
|
+
continue;
|
|
200
|
+
}
|
|
201
|
+
const importedName = importSpecifierName(specifier);
|
|
202
|
+
const mapping = lookupResolvedMapping({
|
|
203
|
+
importedName,
|
|
204
|
+
localName: specifier.local.name,
|
|
205
|
+
mappings,
|
|
206
|
+
workspaceModule,
|
|
207
|
+
});
|
|
208
|
+
appendGroupedSpecifier({
|
|
209
|
+
declarationOrder,
|
|
210
|
+
groupedSpecifiers,
|
|
211
|
+
source: mapping
|
|
212
|
+
? relativeImportPath({
|
|
213
|
+
fromFile: filePath,
|
|
214
|
+
toFile: mapping.absoluteTarget,
|
|
215
|
+
})
|
|
216
|
+
: sourceValue,
|
|
217
|
+
specifier,
|
|
218
|
+
});
|
|
219
|
+
}
|
|
220
|
+
return {
|
|
221
|
+
declarationOrder,
|
|
222
|
+
groupedSpecifiers,
|
|
223
|
+
};
|
|
224
|
+
};
|
|
225
|
+
const buildReplacementDeclarations = ({ declarationOrder, groupedSpecifiers, }) => declarationOrder.map((declarationSource) => t.importDeclaration(groupedSpecifiers.get(declarationSource) ?? [], t.stringLiteral(declarationSource)));
|
|
226
|
+
const rewriteImportDeclarations = ({ filePath, mappings, source, workspaceRoot, }) => {
|
|
227
|
+
if (!isGeneratedReplayFile(source)) {
|
|
228
|
+
return null;
|
|
229
|
+
}
|
|
230
|
+
const ast = parseModuleAst({
|
|
231
|
+
source,
|
|
232
|
+
sourcePath: filePath,
|
|
233
|
+
});
|
|
234
|
+
const beforeImports = [];
|
|
235
|
+
const afterImports = [];
|
|
236
|
+
let changed = false;
|
|
237
|
+
traverse(ast, {
|
|
238
|
+
ImportDeclaration(importPath) {
|
|
239
|
+
if (importPath.node.importKind === 'type') {
|
|
240
|
+
return;
|
|
241
|
+
}
|
|
242
|
+
const sourceValue = importPath.node.source.value;
|
|
243
|
+
if (!sourceValue.includes(GENERATED_MODULE_SEGMENT)) {
|
|
244
|
+
return;
|
|
245
|
+
}
|
|
246
|
+
const workspaceModule = resolveWorkspaceModule({
|
|
247
|
+
filePath,
|
|
248
|
+
importSource: sourceValue,
|
|
249
|
+
workspaceRoot,
|
|
250
|
+
});
|
|
251
|
+
const { declarationOrder, groupedSpecifiers } = groupImportSpecifiersBySource({
|
|
252
|
+
filePath,
|
|
253
|
+
mappings,
|
|
254
|
+
sourceValue,
|
|
255
|
+
specifiers: importPath.node.specifiers,
|
|
256
|
+
workspaceModule,
|
|
257
|
+
});
|
|
258
|
+
if (declarationOrder.length === 1 && declarationOrder[0] === sourceValue) {
|
|
259
|
+
return;
|
|
260
|
+
}
|
|
261
|
+
beforeImports.push(generate(importPath.node, { comments: false }).code);
|
|
262
|
+
const replacementDeclarations = buildReplacementDeclarations({
|
|
263
|
+
declarationOrder,
|
|
264
|
+
groupedSpecifiers,
|
|
265
|
+
});
|
|
266
|
+
afterImports.push(replacementDeclarations
|
|
267
|
+
.map((declaration) => generate(declaration, { comments: false }).code)
|
|
268
|
+
.join('\n'));
|
|
269
|
+
importPath.replaceWithMultiple(replacementDeclarations);
|
|
270
|
+
importPath.skip();
|
|
271
|
+
changed = true;
|
|
272
|
+
},
|
|
273
|
+
});
|
|
274
|
+
if (!changed) {
|
|
275
|
+
return null;
|
|
276
|
+
}
|
|
277
|
+
const code = generate(ast, { comments: true, compact: false }).code;
|
|
278
|
+
parseModuleAst({
|
|
279
|
+
source: code,
|
|
280
|
+
sourcePath: filePath,
|
|
281
|
+
});
|
|
282
|
+
return {
|
|
283
|
+
code,
|
|
284
|
+
preview: buildPreview({
|
|
285
|
+
after: afterImports.join('\n'),
|
|
286
|
+
before: beforeImports.join('\n'),
|
|
287
|
+
filePath,
|
|
288
|
+
}),
|
|
289
|
+
};
|
|
290
|
+
};
|
|
291
|
+
/**
|
|
292
|
+
* Rewrite generated replay imports to point at extracted source modules.
|
|
293
|
+
*/
|
|
294
|
+
export const rewriteReplayImports = async ({ dryRun = false, mappings, workspaceRoot, yes = false, }) => {
|
|
295
|
+
await assertV4Workspace(workspaceRoot);
|
|
296
|
+
if (!dryRun && !yes) {
|
|
297
|
+
throw new Error('rewriteReplayImports modifies files in place. Re-run with yes=true or use dryRun=true.');
|
|
298
|
+
}
|
|
299
|
+
const replayRoot = path.join(workspaceRoot, 'tests', 'replay');
|
|
300
|
+
const files = await collectReplayTests(replayRoot);
|
|
301
|
+
const resolvedMappings = await resolveMappings({
|
|
302
|
+
files,
|
|
303
|
+
mappings,
|
|
304
|
+
workspaceRoot,
|
|
305
|
+
});
|
|
306
|
+
const touched = [];
|
|
307
|
+
const previews = [];
|
|
308
|
+
for (const filePath of files) {
|
|
309
|
+
const source = await Bun.file(filePath).text();
|
|
310
|
+
const rewritten = rewriteImportDeclarations({
|
|
311
|
+
filePath,
|
|
312
|
+
mappings: resolvedMappings,
|
|
313
|
+
source,
|
|
314
|
+
workspaceRoot,
|
|
315
|
+
});
|
|
316
|
+
if (!rewritten) {
|
|
317
|
+
continue;
|
|
318
|
+
}
|
|
319
|
+
touched.push(filePath);
|
|
320
|
+
previews.push(rewritten.preview);
|
|
321
|
+
if (!dryRun) {
|
|
322
|
+
await Bun.write(filePath, `${rewritten.code.trimEnd()}\n`);
|
|
323
|
+
}
|
|
324
|
+
}
|
|
325
|
+
const reportPath = await writeCharacterizeReport({
|
|
326
|
+
payload: {
|
|
327
|
+
dryRun,
|
|
328
|
+
files: touched,
|
|
329
|
+
mappingCount: mappings.size,
|
|
330
|
+
previews,
|
|
331
|
+
yes,
|
|
332
|
+
},
|
|
333
|
+
prefix: dryRun ? 'rebind-dry-run' : 'rebind',
|
|
334
|
+
workspaceRoot,
|
|
335
|
+
});
|
|
336
|
+
await recordCharacterizeValidatorResult({
|
|
337
|
+
affectedFiles: touched.map((filePath) => toWorkspaceRelativePath({
|
|
338
|
+
filePath,
|
|
339
|
+
workspaceRoot,
|
|
340
|
+
})),
|
|
341
|
+
metrics: {
|
|
342
|
+
dryRun,
|
|
343
|
+
mappingCount: mappings.size,
|
|
344
|
+
previewCount: previews.length,
|
|
345
|
+
touchedFiles: touched.length,
|
|
346
|
+
},
|
|
347
|
+
resultPath: reportPath,
|
|
348
|
+
summary: `${dryRun ? 'rebind dry-run previewed' : 'rebind rewrote'} ${touched.length} replay test file(s)`,
|
|
349
|
+
verdict: 'green',
|
|
350
|
+
workspaceRoot,
|
|
351
|
+
});
|
|
352
|
+
return {
|
|
353
|
+
files: touched,
|
|
354
|
+
previews,
|
|
355
|
+
};
|
|
356
|
+
};
|
|
@@ -0,0 +1,18 @@
|
|
|
1
|
+
import { VERIFY_REPORT_SCHEMA_NAME, VERIFY_REPORT_SCHEMA_VERSION } from './constants.ts';
|
|
2
|
+
export type VerifyReport = {
|
|
3
|
+
readonly exitCode: number;
|
|
4
|
+
readonly filter: null | string;
|
|
5
|
+
readonly generatedAt: string;
|
|
6
|
+
readonly mode: 'lenient' | 'strict';
|
|
7
|
+
readonly schemaName: typeof VERIFY_REPORT_SCHEMA_NAME;
|
|
8
|
+
readonly schemaVersion: typeof VERIFY_REPORT_SCHEMA_VERSION;
|
|
9
|
+
readonly success: boolean;
|
|
10
|
+
readonly workspaceRoot: string;
|
|
11
|
+
};
|
|
12
|
+
export declare const createVerifyReport: ({ exitCode, filter, mode, workspaceRoot, }: {
|
|
13
|
+
readonly exitCode: number;
|
|
14
|
+
readonly filter: null | string;
|
|
15
|
+
readonly mode: "lenient" | "strict";
|
|
16
|
+
readonly workspaceRoot: string;
|
|
17
|
+
}) => VerifyReport;
|
|
18
|
+
//# sourceMappingURL=replay-report.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"replay-report.d.ts","sourceRoot":"","sources":["../src/replay-report.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,yBAAyB,EAAE,4BAA4B,EAAE,MAAM,gBAAgB,CAAC;AAGzF,MAAM,MAAM,YAAY,GAAG;IACvB,QAAQ,CAAC,QAAQ,EAAE,MAAM,CAAC;IAC1B,QAAQ,CAAC,MAAM,EAAE,IAAI,GAAG,MAAM,CAAC;IAC/B,QAAQ,CAAC,WAAW,EAAE,MAAM,CAAC;IAC7B,QAAQ,CAAC,IAAI,EAAE,SAAS,GAAG,QAAQ,CAAC;IACpC,QAAQ,CAAC,UAAU,EAAE,OAAO,yBAAyB,CAAC;IACtD,QAAQ,CAAC,aAAa,EAAE,OAAO,4BAA4B,CAAC;IAC5D,QAAQ,CAAC,OAAO,EAAE,OAAO,CAAC;IAC1B,QAAQ,CAAC,aAAa,EAAE,MAAM,CAAC;CAClC,CAAC;AAEF,eAAO,MAAM,kBAAkB,GAAI,4CAKhC;IACC,QAAQ,CAAC,QAAQ,EAAE,MAAM,CAAC;IAC1B,QAAQ,CAAC,MAAM,EAAE,IAAI,GAAG,MAAM,CAAC;IAC/B,QAAQ,CAAC,IAAI,EAAE,SAAS,GAAG,QAAQ,CAAC;IACpC,QAAQ,CAAC,aAAa,EAAE,MAAM,CAAC;CAClC,KAAG,YASF,CAAC"}
|
|
@@ -0,0 +1,12 @@
|
|
|
1
|
+
import { VERIFY_REPORT_SCHEMA_NAME, VERIFY_REPORT_SCHEMA_VERSION } from "./constants.js";
|
|
2
|
+
import { nowIso } from "./time.js";
|
|
3
|
+
export const createVerifyReport = ({ exitCode, filter, mode, workspaceRoot, }) => ({
|
|
4
|
+
exitCode,
|
|
5
|
+
filter,
|
|
6
|
+
generatedAt: nowIso(),
|
|
7
|
+
mode,
|
|
8
|
+
schemaName: VERIFY_REPORT_SCHEMA_NAME,
|
|
9
|
+
schemaVersion: VERIFY_REPORT_SCHEMA_VERSION,
|
|
10
|
+
success: exitCode === 0,
|
|
11
|
+
workspaceRoot,
|
|
12
|
+
});
|
package/dist/scene.d.ts
ADDED
|
@@ -0,0 +1,24 @@
|
|
|
1
|
+
import { canonicalizeSceneTree } from './trace-format.ts';
|
|
2
|
+
export declare const scaffoldSceneCharacterizationTests: ({ dryRun, workspaceRoot, }: {
|
|
3
|
+
readonly dryRun?: boolean;
|
|
4
|
+
readonly workspaceRoot: string;
|
|
5
|
+
}) => Promise<{
|
|
6
|
+
stale: string[];
|
|
7
|
+
written: string[];
|
|
8
|
+
}>;
|
|
9
|
+
export { canonicalizeSceneTree };
|
|
10
|
+
type PopulateSmokeScaffoldsOptions = {
|
|
11
|
+
readonly workspaceDir: string;
|
|
12
|
+
};
|
|
13
|
+
export type PopulateSmokeScaffoldsResult = {
|
|
14
|
+
readonly sidecarPath: string;
|
|
15
|
+
readonly skipped: readonly string[];
|
|
16
|
+
readonly toolingGaps: readonly string[];
|
|
17
|
+
readonly written: readonly string[];
|
|
18
|
+
};
|
|
19
|
+
/**
|
|
20
|
+
* Populate seed-time `tests/smoke/<state>.test.ts` scaffolds once the shared smoke harness
|
|
21
|
+
* and parity baseline PNGs exist. Operator-edited scaffolds are preserved by sidecar hash.
|
|
22
|
+
*/
|
|
23
|
+
export declare const populateSmokeScaffolds: ({ workspaceDir, }: PopulateSmokeScaffoldsOptions) => Promise<PopulateSmokeScaffoldsResult>;
|
|
24
|
+
//# sourceMappingURL=scene.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"scene.d.ts","sourceRoot":"","sources":["../src/scene.ts"],"names":[],"mappings":"AAYA,OAAO,EAAE,qBAAqB,EAAsD,MAAM,mBAAmB,CAAC;AA2B9G,eAAO,MAAM,kCAAkC,GAAU,4BAGtD;IACC,QAAQ,CAAC,MAAM,CAAC,EAAE,OAAO,CAAC;IAC1B,QAAQ,CAAC,aAAa,EAAE,MAAM,CAAC;CAClC;;;EA8DA,CAAC;AAEF,OAAO,EAAE,qBAAqB,EAAE,CAAC;AAEjC,KAAK,6BAA6B,GAAG;IACjC,QAAQ,CAAC,YAAY,EAAE,MAAM,CAAC;CACjC,CAAC;AAEF,MAAM,MAAM,4BAA4B,GAAG;IACvC,QAAQ,CAAC,WAAW,EAAE,MAAM,CAAC;IAC7B,QAAQ,CAAC,OAAO,EAAE,SAAS,MAAM,EAAE,CAAC;IACpC,QAAQ,CAAC,WAAW,EAAE,SAAS,MAAM,EAAE,CAAC;IACxC,QAAQ,CAAC,OAAO,EAAE,SAAS,MAAM,EAAE,CAAC;CACvC,CAAC;AA6CF;;;GAGG;AACH,eAAO,MAAM,sBAAsB,GAAU,mBAE1C,6BAA6B,KAAG,OAAO,CAAC,4BAA4B,CAkItE,CAAC"}
|
package/dist/scene.js
ADDED
|
@@ -0,0 +1,235 @@
|
|
|
1
|
+
import { mkdir, readdir } from 'node:fs/promises';
|
|
2
|
+
import path from 'node:path';
|
|
3
|
+
import { recordCharacterizeToolingGap, recordCharacterizeValidatorResult } from "./ledger.js";
|
|
4
|
+
import { hashSeedScaffoldContent, listFilesRecursive, loadSeedFingerprintSidecar, PARITY_BASELINE_RELATIVE_DIR, toWorkspaceRelativePath, updateSeedFingerprintSidecar, } from "./seed-scaffolds.js";
|
|
5
|
+
import { relativeImportPath } from "./shared.js";
|
|
6
|
+
import { canonicalizeSceneTree, ensureWorkspaceTraceHarness, workspaceHarnessPaths } from "./trace-format.js";
|
|
7
|
+
import { assertV4Workspace, atomicWriteText } from "./workspace-paths.js";
|
|
8
|
+
const slugify = (value) => value
|
|
9
|
+
.trim()
|
|
10
|
+
.toLowerCase()
|
|
11
|
+
.replace(/[^a-z0-9]+/gu, '-')
|
|
12
|
+
.replace(/^-+|-+$/gu, '');
|
|
13
|
+
const readScreenshotsIndex = async (workspaceRoot) => {
|
|
14
|
+
const file = Bun.file(path.join(workspaceRoot, 'screenshots', 'index.json'));
|
|
15
|
+
if (!(await file.exists())) {
|
|
16
|
+
return [];
|
|
17
|
+
}
|
|
18
|
+
const parsed = (await file.json());
|
|
19
|
+
return parsed.captures ?? [];
|
|
20
|
+
};
|
|
21
|
+
const quote = (value) => JSON.stringify(value);
|
|
22
|
+
export const scaffoldSceneCharacterizationTests = async ({ dryRun = false, workspaceRoot, }) => {
|
|
23
|
+
await assertV4Workspace(workspaceRoot);
|
|
24
|
+
const screenshots = await readScreenshotsIndex(workspaceRoot);
|
|
25
|
+
const testsDir = path.join(workspaceRoot, 'tests', 'scene');
|
|
26
|
+
const sceneCaptureDir = workspaceHarnessPaths(workspaceRoot).sceneFixturesDir;
|
|
27
|
+
await mkdir(testsDir, { recursive: true });
|
|
28
|
+
await mkdir(sceneCaptureDir, { recursive: true });
|
|
29
|
+
await ensureWorkspaceTraceHarness(workspaceRoot);
|
|
30
|
+
const written = [];
|
|
31
|
+
for (const capture of screenshots) {
|
|
32
|
+
const stateId = slugify(capture.label) || capture.folderName;
|
|
33
|
+
const baselinePath = path.join(workspaceRoot, 'screenshots', capture.folderName, 'scene-heavy.json');
|
|
34
|
+
if (!(await Bun.file(baselinePath).exists())) {
|
|
35
|
+
continue;
|
|
36
|
+
}
|
|
37
|
+
const filePath = path.join(testsDir, `${stateId}.test.ts`);
|
|
38
|
+
const harnessImport = relativeImportPath({
|
|
39
|
+
fromFile: filePath,
|
|
40
|
+
toFile: workspaceHarnessPaths(workspaceRoot).harnessFile,
|
|
41
|
+
});
|
|
42
|
+
const sceneOutputPath = path.join(sceneCaptureDir, `${stateId}.json`);
|
|
43
|
+
const source = `// AUTO-GENERATED by ushman-characterize stub-states. Do not hand-edit.
|
|
44
|
+
import { expect, test } from 'bun:test';
|
|
45
|
+
import { canonicalizeSceneTree } from ${quote(harnessImport)};
|
|
46
|
+
|
|
47
|
+
const baseline = JSON.parse(await Bun.file(${quote(baselinePath)}).text());
|
|
48
|
+
const outputPath = ${quote(sceneOutputPath)};
|
|
49
|
+
|
|
50
|
+
const captureLive = async () => {
|
|
51
|
+
const result = Bun.spawnSync(
|
|
52
|
+
['ushman-characterize', 'capture', '.', '--states=${stateId}', '--scene-only'],
|
|
53
|
+
{
|
|
54
|
+
cwd: process.cwd(),
|
|
55
|
+
env: process.env,
|
|
56
|
+
stderr: 'pipe',
|
|
57
|
+
stdout: 'pipe',
|
|
58
|
+
},
|
|
59
|
+
);
|
|
60
|
+
expect(result.exitCode, new TextDecoder().decode(result.stderr)).toBe(0);
|
|
61
|
+
return JSON.parse(await Bun.file(outputPath).text());
|
|
62
|
+
};
|
|
63
|
+
|
|
64
|
+
test(${quote(`${stateId} scene tree matches baseline`)}, async () => {
|
|
65
|
+
const live = await captureLive();
|
|
66
|
+
expect(canonicalizeSceneTree(live)).toEqual(canonicalizeSceneTree(baseline));
|
|
67
|
+
});
|
|
68
|
+
`;
|
|
69
|
+
if (!dryRun) {
|
|
70
|
+
await Bun.write(filePath, source);
|
|
71
|
+
}
|
|
72
|
+
written.push(filePath);
|
|
73
|
+
}
|
|
74
|
+
const stale = (await readdir(testsDir).catch(() => []))
|
|
75
|
+
.filter((entry) => entry.endsWith('.test.ts'))
|
|
76
|
+
.filter((entry) => !written.some((filePath) => path.basename(filePath) === entry));
|
|
77
|
+
return {
|
|
78
|
+
stale,
|
|
79
|
+
written,
|
|
80
|
+
};
|
|
81
|
+
};
|
|
82
|
+
export { canonicalizeSceneTree };
|
|
83
|
+
const REQUIRED_SMOKE_HARNESS_FILES = ['config.ts', 'diff.ts', 'drive.ts'];
|
|
84
|
+
const renderPopulatedSmokeTestSource = (stateId) => [
|
|
85
|
+
'// AUTO-GENERATED by ushman-characterize populateSmokeScaffolds. Do not hand-edit.',
|
|
86
|
+
"import { expect, test } from 'bun:test';",
|
|
87
|
+
"import { SMOKE_STATES } from './config.ts';",
|
|
88
|
+
"import { diffSmokeStates } from './diff.ts';",
|
|
89
|
+
"import { captureSmokeStates } from './drive.ts';",
|
|
90
|
+
'',
|
|
91
|
+
`const STATE_ID = ${quote(stateId)};`,
|
|
92
|
+
`const BASELINE_PATH = ${quote(path.join(PARITY_BASELINE_RELATIVE_DIR, `${stateId}.png`).split(path.sep).join('/'))};`,
|
|
93
|
+
'',
|
|
94
|
+
`test('${stateId} smoke state matches its baseline', async () => {`,
|
|
95
|
+
' expect(await Bun.file(BASELINE_PATH).exists()).toBe(true);',
|
|
96
|
+
' const state = SMOKE_STATES.find((entry) => entry.id === STATE_ID);',
|
|
97
|
+
' expect(state).toBeDefined();',
|
|
98
|
+
' await captureSmokeStates();',
|
|
99
|
+
' const diffResults = await diffSmokeStates();',
|
|
100
|
+
' const result = diffResults.find((entry) => entry.id === STATE_ID);',
|
|
101
|
+
' expect(result).toBeDefined();',
|
|
102
|
+
' expect(result!.ratio).toBeLessThanOrEqual(state!.diffThreshold);',
|
|
103
|
+
'});',
|
|
104
|
+
'',
|
|
105
|
+
].join('\n');
|
|
106
|
+
const summarizePopulateSmokeScaffolds = ({ skipped, toolingGaps, written, }) => [
|
|
107
|
+
`populated ${written.length} smoke scaffold file(s)`,
|
|
108
|
+
skipped.length > 0 ? `skipped ${skipped.length} operator-managed file(s)` : null,
|
|
109
|
+
toolingGaps.length > 0 ? `${toolingGaps.length} tooling gap(s)` : null,
|
|
110
|
+
]
|
|
111
|
+
.filter((value) => value !== null)
|
|
112
|
+
.join('; ');
|
|
113
|
+
/**
|
|
114
|
+
* Populate seed-time `tests/smoke/<state>.test.ts` scaffolds once the shared smoke harness
|
|
115
|
+
* and parity baseline PNGs exist. Operator-edited scaffolds are preserved by sidecar hash.
|
|
116
|
+
*/
|
|
117
|
+
export const populateSmokeScaffolds = async ({ workspaceDir, }) => {
|
|
118
|
+
await assertV4Workspace(workspaceDir);
|
|
119
|
+
const sidecar = await loadSeedFingerprintSidecar(workspaceDir);
|
|
120
|
+
const testsDir = path.join(workspaceDir, 'tests', 'smoke');
|
|
121
|
+
const smokeFiles = (await listFilesRecursive({
|
|
122
|
+
dir: testsDir,
|
|
123
|
+
suffix: '.test.ts',
|
|
124
|
+
})).filter((filePath) => path.basename(filePath) !== 'smoke.test.ts');
|
|
125
|
+
const nextFingerprints = {};
|
|
126
|
+
const skipped = [];
|
|
127
|
+
const toolingGaps = [];
|
|
128
|
+
const written = [];
|
|
129
|
+
for (const requiredFile of REQUIRED_SMOKE_HARNESS_FILES) {
|
|
130
|
+
const requiredFilePath = path.join(testsDir, requiredFile);
|
|
131
|
+
if (!(await Bun.file(requiredFilePath).exists())) {
|
|
132
|
+
const gap = `Missing smoke harness file ${toWorkspaceRelativePath({
|
|
133
|
+
filePath: requiredFilePath,
|
|
134
|
+
workspaceDir,
|
|
135
|
+
})}. Re-run \`ushman go --stage=seed\` to restore the smoke harness before retrying population.`;
|
|
136
|
+
toolingGaps.push(gap);
|
|
137
|
+
await recordCharacterizeToolingGap({
|
|
138
|
+
body: gap,
|
|
139
|
+
summary: `missing smoke harness file ${requiredFile}`,
|
|
140
|
+
workspaceRoot: workspaceDir,
|
|
141
|
+
});
|
|
142
|
+
}
|
|
143
|
+
}
|
|
144
|
+
if (toolingGaps.length > 0) {
|
|
145
|
+
await recordCharacterizeValidatorResult({
|
|
146
|
+
metrics: {
|
|
147
|
+
skippedCount: skipped.length,
|
|
148
|
+
toolingGapCount: toolingGaps.length,
|
|
149
|
+
writtenCount: written.length,
|
|
150
|
+
},
|
|
151
|
+
summary: summarizePopulateSmokeScaffolds({
|
|
152
|
+
skipped,
|
|
153
|
+
toolingGaps,
|
|
154
|
+
written,
|
|
155
|
+
}),
|
|
156
|
+
verdict: 'yellow',
|
|
157
|
+
workspaceRoot: workspaceDir,
|
|
158
|
+
});
|
|
159
|
+
return {
|
|
160
|
+
sidecarPath: sidecar.filePath,
|
|
161
|
+
skipped,
|
|
162
|
+
toolingGaps,
|
|
163
|
+
written,
|
|
164
|
+
};
|
|
165
|
+
}
|
|
166
|
+
for (const filePath of smokeFiles) {
|
|
167
|
+
const relativeTestPath = toWorkspaceRelativePath({
|
|
168
|
+
filePath,
|
|
169
|
+
workspaceDir,
|
|
170
|
+
});
|
|
171
|
+
const trackedHash = sidecar.scaffolds[relativeTestPath];
|
|
172
|
+
if (trackedHash === undefined) {
|
|
173
|
+
const gap = `Cannot manage ${relativeTestPath} because tests/.seed-fingerprints.json does not track it. Re-run \`ushman go --stage=seed\` for this workspace, or add the scaffold to the sidecar before retrying population.`;
|
|
174
|
+
toolingGaps.push(gap);
|
|
175
|
+
await recordCharacterizeToolingGap({
|
|
176
|
+
body: gap,
|
|
177
|
+
summary: `untracked smoke scaffold ${relativeTestPath}`,
|
|
178
|
+
workspaceRoot: workspaceDir,
|
|
179
|
+
});
|
|
180
|
+
continue;
|
|
181
|
+
}
|
|
182
|
+
const currentSource = await Bun.file(filePath).text();
|
|
183
|
+
const currentHash = hashSeedScaffoldContent(currentSource);
|
|
184
|
+
if (trackedHash !== currentHash) {
|
|
185
|
+
skipped.push(filePath);
|
|
186
|
+
continue;
|
|
187
|
+
}
|
|
188
|
+
const stateId = path.basename(filePath, '.test.ts');
|
|
189
|
+
const baselinePath = path.join(workspaceDir, PARITY_BASELINE_RELATIVE_DIR, `${stateId}.png`);
|
|
190
|
+
if (!(await Bun.file(baselinePath).exists())) {
|
|
191
|
+
const gap = `Missing smoke baseline for ${relativeTestPath}. Expected ${baselinePath}.`;
|
|
192
|
+
toolingGaps.push(gap);
|
|
193
|
+
await recordCharacterizeToolingGap({
|
|
194
|
+
body: `${gap}\nRun \`ushman-characterize stub-states --regen-stale\` after regenerating parity baselines for this workspace.`,
|
|
195
|
+
summary: `missing smoke baseline for ${relativeTestPath}`,
|
|
196
|
+
workspaceRoot: workspaceDir,
|
|
197
|
+
});
|
|
198
|
+
continue;
|
|
199
|
+
}
|
|
200
|
+
const nextSource = renderPopulatedSmokeTestSource(stateId);
|
|
201
|
+
await atomicWriteText(filePath, `${nextSource.endsWith('\n') ? nextSource : `${nextSource}\n`}`);
|
|
202
|
+
written.push(filePath);
|
|
203
|
+
nextFingerprints[relativeTestPath] = hashSeedScaffoldContent(nextSource);
|
|
204
|
+
}
|
|
205
|
+
const sidecarPath = Object.keys(nextFingerprints).length > 0
|
|
206
|
+
? await updateSeedFingerprintSidecar({
|
|
207
|
+
entries: nextFingerprints,
|
|
208
|
+
workspaceDir,
|
|
209
|
+
})
|
|
210
|
+
: sidecar.filePath;
|
|
211
|
+
await recordCharacterizeValidatorResult({
|
|
212
|
+
affectedFiles: written.map((filePath) => toWorkspaceRelativePath({
|
|
213
|
+
filePath,
|
|
214
|
+
workspaceDir,
|
|
215
|
+
})),
|
|
216
|
+
metrics: {
|
|
217
|
+
skippedCount: skipped.length,
|
|
218
|
+
toolingGapCount: toolingGaps.length,
|
|
219
|
+
writtenCount: written.length,
|
|
220
|
+
},
|
|
221
|
+
summary: summarizePopulateSmokeScaffolds({
|
|
222
|
+
skipped,
|
|
223
|
+
toolingGaps,
|
|
224
|
+
written,
|
|
225
|
+
}),
|
|
226
|
+
verdict: toolingGaps.length > 0 ? 'yellow' : 'green',
|
|
227
|
+
workspaceRoot: workspaceDir,
|
|
228
|
+
});
|
|
229
|
+
return {
|
|
230
|
+
sidecarPath,
|
|
231
|
+
skipped,
|
|
232
|
+
toolingGaps,
|
|
233
|
+
written,
|
|
234
|
+
};
|
|
235
|
+
};
|