browser-metro 1.0.5
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/bundler.d.ts +34 -0
- package/dist/bundler.js +320 -0
- package/dist/dependency-graph.d.ts +22 -0
- package/dist/dependency-graph.js +128 -0
- package/dist/fs.d.ts +20 -0
- package/dist/fs.js +107 -0
- package/dist/hmr-runtime.d.ts +14 -0
- package/dist/hmr-runtime.js +231 -0
- package/dist/incremental-bundler.d.ts +71 -0
- package/dist/incremental-bundler.js +646 -0
- package/dist/index.d.ts +11 -0
- package/dist/index.js +9 -0
- package/dist/module-cache.d.ts +22 -0
- package/dist/module-cache.js +31 -0
- package/dist/plugins/data-bx-path.d.ts +2 -0
- package/dist/plugins/data-bx-path.js +197 -0
- package/dist/resolver.d.ts +18 -0
- package/dist/resolver.js +84 -0
- package/dist/source-map.d.ts +36 -0
- package/dist/source-map.js +186 -0
- package/dist/transforms/react-refresh.d.ts +5 -0
- package/dist/transforms/react-refresh.js +92 -0
- package/dist/transforms/typescript.d.ts +2 -0
- package/dist/transforms/typescript.js +20 -0
- package/dist/types.d.ts +99 -0
- package/dist/types.js +1 -0
- package/dist/utils.d.ts +31 -0
- package/dist/utils.js +208 -0
- package/package.json +22 -0
|
@@ -0,0 +1,34 @@
|
|
|
1
|
+
import { VirtualFS } from "./fs.js";
|
|
2
|
+
import { BundlerConfig } from "./types.js";
|
|
3
|
+
export declare class Bundler {
|
|
4
|
+
private fs;
|
|
5
|
+
private resolver;
|
|
6
|
+
private config;
|
|
7
|
+
private plugins;
|
|
8
|
+
constructor(fs: VirtualFS, config: BundlerConfig);
|
|
9
|
+
/** Read tsconfig.json "compilerOptions.paths" from the VirtualFS */
|
|
10
|
+
private static readTsconfigPaths;
|
|
11
|
+
/** Run the full pre-transform -> Sucrase -> post-transform pipeline */
|
|
12
|
+
private runTransform;
|
|
13
|
+
/** Build a resolve callback that consults plugins then falls back to default resolution */
|
|
14
|
+
private makeResolveTarget;
|
|
15
|
+
/** Collect module aliases from all plugins */
|
|
16
|
+
private getModuleAliases;
|
|
17
|
+
/** Collect module shims from all plugins */
|
|
18
|
+
private getShimModules;
|
|
19
|
+
/** Transform a single file using the configured transformer */
|
|
20
|
+
transformFile(filename: string, src: string): string;
|
|
21
|
+
/** Bundle starting from the entry file, returning executable code */
|
|
22
|
+
bundle(entryFile: string): Promise<string>;
|
|
23
|
+
/** Read dependency versions from the project's package.json */
|
|
24
|
+
private getPackageVersions;
|
|
25
|
+
/** Resolve an npm specifier to a versioned form using package.json versions.
|
|
26
|
+
* Priority: user's package.json > transitive dep versions from manifests > bare name */
|
|
27
|
+
private resolveNpmSpecifier;
|
|
28
|
+
/** Fetch a pre-bundled npm package from the package server */
|
|
29
|
+
private fetchPackage;
|
|
30
|
+
/** Build the module map by walking the dependency graph */
|
|
31
|
+
private buildModuleMap;
|
|
32
|
+
/** Emit the final bundle string */
|
|
33
|
+
private emitBundle;
|
|
34
|
+
}
|
package/dist/bundler.js
ADDED
|
@@ -0,0 +1,320 @@
|
|
|
1
|
+
import { Resolver } from "./resolver.js";
|
|
2
|
+
import { buildCombinedSourceMap, countNewlines, inlineSourceMap, shiftSourceMapOrigLines, } from "./source-map.js";
|
|
3
|
+
import { findRequires, rewriteRequires, buildBundlePreamble } from "./utils.js";
|
|
4
|
+
export class Bundler {
|
|
5
|
+
constructor(fs, config) {
|
|
6
|
+
this.fs = fs;
|
|
7
|
+
this.config = config;
|
|
8
|
+
const paths = Bundler.readTsconfigPaths(fs);
|
|
9
|
+
this.resolver = new Resolver(fs, { ...config.resolver, ...(paths && { paths }) });
|
|
10
|
+
this.plugins = config.plugins ?? [];
|
|
11
|
+
}
|
|
12
|
+
/** Read tsconfig.json "compilerOptions.paths" from the VirtualFS */
|
|
13
|
+
static readTsconfigPaths(fs) {
|
|
14
|
+
const raw = fs.read("/tsconfig.json");
|
|
15
|
+
if (!raw)
|
|
16
|
+
return null;
|
|
17
|
+
try {
|
|
18
|
+
const tsconfig = JSON.parse(raw);
|
|
19
|
+
const paths = tsconfig?.compilerOptions?.paths;
|
|
20
|
+
return paths && typeof paths === "object" ? paths : null;
|
|
21
|
+
}
|
|
22
|
+
catch {
|
|
23
|
+
return null;
|
|
24
|
+
}
|
|
25
|
+
}
|
|
26
|
+
/** Run the full pre-transform -> Sucrase -> post-transform pipeline */
|
|
27
|
+
runTransform(filename, src) {
|
|
28
|
+
const originalLines = countNewlines(src);
|
|
29
|
+
// Pre-transform hooks
|
|
30
|
+
for (const plugin of this.plugins) {
|
|
31
|
+
if (plugin.transformSource) {
|
|
32
|
+
const result = plugin.transformSource({ src, filename });
|
|
33
|
+
if (result)
|
|
34
|
+
src = result.src;
|
|
35
|
+
}
|
|
36
|
+
}
|
|
37
|
+
const preTransformAddedLines = countNewlines(src) - originalLines;
|
|
38
|
+
// Core transform (Sucrase)
|
|
39
|
+
const transformResult = this.config.transformer.transform({ src, filename });
|
|
40
|
+
let code = transformResult.code;
|
|
41
|
+
let sourceMap = transformResult.sourceMap;
|
|
42
|
+
// Post-transform hooks -- track line additions for source map offset
|
|
43
|
+
const linesBeforePost = countNewlines(code);
|
|
44
|
+
for (const plugin of this.plugins) {
|
|
45
|
+
if (plugin.transformOutput) {
|
|
46
|
+
const result = plugin.transformOutput({ code, filename });
|
|
47
|
+
if (result)
|
|
48
|
+
code = result.code;
|
|
49
|
+
}
|
|
50
|
+
}
|
|
51
|
+
// Adjust source map for plugin modifications
|
|
52
|
+
if (sourceMap) {
|
|
53
|
+
// Post-transform: shift generated lines for prepended output lines
|
|
54
|
+
const postAddedLines = countNewlines(code) - linesBeforePost;
|
|
55
|
+
if (postAddedLines > 0) {
|
|
56
|
+
sourceMap = {
|
|
57
|
+
...sourceMap,
|
|
58
|
+
mappings: ";".repeat(postAddedLines) + sourceMap.mappings,
|
|
59
|
+
};
|
|
60
|
+
}
|
|
61
|
+
// Pre-transform: shift origLine back so mappings point to original source
|
|
62
|
+
if (preTransformAddedLines > 0) {
|
|
63
|
+
sourceMap = shiftSourceMapOrigLines(sourceMap, -preTransformAddedLines);
|
|
64
|
+
}
|
|
65
|
+
}
|
|
66
|
+
return { code, sourceMap };
|
|
67
|
+
}
|
|
68
|
+
/** Build a resolve callback that consults plugins then falls back to default resolution */
|
|
69
|
+
makeResolveTarget(fromFile) {
|
|
70
|
+
return (target) => {
|
|
71
|
+
// Let plugins resolve first
|
|
72
|
+
for (const plugin of this.plugins) {
|
|
73
|
+
if (plugin.resolveRequest) {
|
|
74
|
+
const result = plugin.resolveRequest({ fromFile }, target);
|
|
75
|
+
if (result !== null)
|
|
76
|
+
return result;
|
|
77
|
+
}
|
|
78
|
+
}
|
|
79
|
+
// Default resolution: skip npm packages, resolve local paths
|
|
80
|
+
if (this.resolver.isNpmPackage(target))
|
|
81
|
+
return null;
|
|
82
|
+
const resolved = this.resolver.resolvePath(fromFile, target);
|
|
83
|
+
const actual = this.resolver.resolveFile(resolved);
|
|
84
|
+
return actual ?? null;
|
|
85
|
+
};
|
|
86
|
+
}
|
|
87
|
+
/** Collect module aliases from all plugins */
|
|
88
|
+
getModuleAliases() {
|
|
89
|
+
const aliases = {};
|
|
90
|
+
for (const plugin of this.plugins) {
|
|
91
|
+
if (plugin.moduleAliases) {
|
|
92
|
+
Object.assign(aliases, plugin.moduleAliases());
|
|
93
|
+
}
|
|
94
|
+
}
|
|
95
|
+
return aliases;
|
|
96
|
+
}
|
|
97
|
+
/** Collect module shims from all plugins */
|
|
98
|
+
getShimModules() {
|
|
99
|
+
const shims = {};
|
|
100
|
+
for (const plugin of this.plugins) {
|
|
101
|
+
if (plugin.shimModules) {
|
|
102
|
+
Object.assign(shims, plugin.shimModules());
|
|
103
|
+
}
|
|
104
|
+
}
|
|
105
|
+
return shims;
|
|
106
|
+
}
|
|
107
|
+
/** Transform a single file using the configured transformer */
|
|
108
|
+
transformFile(filename, src) {
|
|
109
|
+
return this.runTransform(filename, src).code;
|
|
110
|
+
}
|
|
111
|
+
/** Bundle starting from the entry file, returning executable code */
|
|
112
|
+
async bundle(entryFile) {
|
|
113
|
+
const { moduleMap, sourceMapMap } = await this.buildModuleMap(entryFile);
|
|
114
|
+
return this.emitBundle(moduleMap, sourceMapMap, entryFile);
|
|
115
|
+
}
|
|
116
|
+
/** Read dependency versions from the project's package.json */
|
|
117
|
+
getPackageVersions() {
|
|
118
|
+
const raw = this.fs.read("/package.json");
|
|
119
|
+
if (!raw)
|
|
120
|
+
return {};
|
|
121
|
+
try {
|
|
122
|
+
const pkg = JSON.parse(raw);
|
|
123
|
+
return pkg.dependencies || {};
|
|
124
|
+
}
|
|
125
|
+
catch {
|
|
126
|
+
return {};
|
|
127
|
+
}
|
|
128
|
+
}
|
|
129
|
+
/** Resolve an npm specifier to a versioned form using package.json versions.
|
|
130
|
+
* Priority: user's package.json > transitive dep versions from manifests > bare name */
|
|
131
|
+
resolveNpmSpecifier(specifier, versions, transitiveDepsVersions) {
|
|
132
|
+
// Extract base package name: "@scope/pkg/sub" -> "@scope/pkg", "lodash/fp" -> "lodash"
|
|
133
|
+
let baseName;
|
|
134
|
+
if (specifier.startsWith("@")) {
|
|
135
|
+
const parts = specifier.split("/");
|
|
136
|
+
baseName = parts[0] + "/" + parts[1];
|
|
137
|
+
}
|
|
138
|
+
else {
|
|
139
|
+
baseName = specifier.split("/")[0];
|
|
140
|
+
}
|
|
141
|
+
const version = versions[baseName] || (transitiveDepsVersions && transitiveDepsVersions[baseName]);
|
|
142
|
+
if (!version)
|
|
143
|
+
return specifier;
|
|
144
|
+
const subpath = specifier.slice(baseName.length); // e.g. "/client" or ""
|
|
145
|
+
return baseName + "@" + version + subpath;
|
|
146
|
+
}
|
|
147
|
+
/** Fetch a pre-bundled npm package from the package server */
|
|
148
|
+
async fetchPackage(specifier) {
|
|
149
|
+
const url = this.config.server.packageServerUrl + "/pkg/" + specifier;
|
|
150
|
+
const res = await fetch(url);
|
|
151
|
+
if (!res.ok) {
|
|
152
|
+
const body = await res.text().catch(() => "");
|
|
153
|
+
throw new Error("Failed to fetch package '" + specifier + "' (HTTP " + res.status + ")" + (body ? ": " + body.slice(0, 200) : ""));
|
|
154
|
+
}
|
|
155
|
+
const code = await res.text();
|
|
156
|
+
let externals = {};
|
|
157
|
+
const externalsHeader = res.headers.get("X-Externals");
|
|
158
|
+
if (externalsHeader) {
|
|
159
|
+
try {
|
|
160
|
+
externals = JSON.parse(externalsHeader);
|
|
161
|
+
}
|
|
162
|
+
catch { }
|
|
163
|
+
}
|
|
164
|
+
return { code, externals };
|
|
165
|
+
}
|
|
166
|
+
/** Build the module map by walking the dependency graph */
|
|
167
|
+
async buildModuleMap(entryFile) {
|
|
168
|
+
const moduleMap = {};
|
|
169
|
+
const sourceMapMap = {};
|
|
170
|
+
const visited = {};
|
|
171
|
+
const npmPackages = {};
|
|
172
|
+
const versions = this.getPackageVersions();
|
|
173
|
+
const transitiveDepsVersions = {};
|
|
174
|
+
const walk = async (filePath) => {
|
|
175
|
+
if (visited[filePath])
|
|
176
|
+
return;
|
|
177
|
+
visited[filePath] = true;
|
|
178
|
+
// Asset files get a stub module that exports the filename (or a real URL for external assets)
|
|
179
|
+
if (this.resolver.isAssetFile(filePath)) {
|
|
180
|
+
if (this.fs.isExternalAsset(filePath) && this.config.assetPublicPath) {
|
|
181
|
+
const assetUrl = this.config.assetPublicPath + filePath;
|
|
182
|
+
moduleMap[filePath] = "module.exports = { uri: " + JSON.stringify(assetUrl) + " };";
|
|
183
|
+
}
|
|
184
|
+
else {
|
|
185
|
+
moduleMap[filePath] = "module.exports = " + JSON.stringify(filePath) + ";";
|
|
186
|
+
}
|
|
187
|
+
return;
|
|
188
|
+
}
|
|
189
|
+
const source = this.fs.read(filePath);
|
|
190
|
+
if (!source) {
|
|
191
|
+
throw new Error("File not found: " + filePath);
|
|
192
|
+
}
|
|
193
|
+
// Transform the file (TS -> JS, JSX -> JS, etc.)
|
|
194
|
+
const { code: transformed, sourceMap } = this.runTransform(filePath, source);
|
|
195
|
+
if (sourceMap)
|
|
196
|
+
sourceMapMap[filePath] = sourceMap;
|
|
197
|
+
const rewritten = rewriteRequires(transformed, filePath, this.makeResolveTarget(filePath));
|
|
198
|
+
moduleMap[filePath] = rewritten;
|
|
199
|
+
const deps = findRequires(rewritten);
|
|
200
|
+
for (let i = 0; i < deps.length; i++) {
|
|
201
|
+
const dep = deps[i];
|
|
202
|
+
if (this.resolver.isNpmPackage(dep)) {
|
|
203
|
+
if (!npmPackages[dep]) {
|
|
204
|
+
npmPackages[dep] = true;
|
|
205
|
+
}
|
|
206
|
+
}
|
|
207
|
+
else {
|
|
208
|
+
const resolved = this.resolver.resolveFile(dep);
|
|
209
|
+
if (resolved) {
|
|
210
|
+
await walk(resolved);
|
|
211
|
+
}
|
|
212
|
+
}
|
|
213
|
+
}
|
|
214
|
+
};
|
|
215
|
+
await walk(entryFile);
|
|
216
|
+
// Process module aliases: swap sources for targets in the fetch list
|
|
217
|
+
const aliases = this.getModuleAliases();
|
|
218
|
+
for (const [from, to] of Object.entries(aliases)) {
|
|
219
|
+
delete npmPackages[from];
|
|
220
|
+
npmPackages[to] = true;
|
|
221
|
+
}
|
|
222
|
+
// Collect shims: these replace npm packages with inline code
|
|
223
|
+
const shims = this.getShimModules();
|
|
224
|
+
for (const name of Object.keys(shims)) {
|
|
225
|
+
delete npmPackages[name];
|
|
226
|
+
}
|
|
227
|
+
// Fetch npm packages in parallel, then resolve any transitive deps (subpath requires etc.)
|
|
228
|
+
const skipNames = new Set([...Object.keys(aliases), ...Object.keys(shims)]);
|
|
229
|
+
const knownNpm = new Set(Object.keys(npmPackages));
|
|
230
|
+
let toFetch = [...knownNpm];
|
|
231
|
+
while (toFetch.length > 0) {
|
|
232
|
+
const fetches = toFetch.map((name) => {
|
|
233
|
+
const versionedSpecifier = this.resolveNpmSpecifier(name, versions, transitiveDepsVersions);
|
|
234
|
+
return this.fetchPackage(versionedSpecifier).then(({ code, externals }) => {
|
|
235
|
+
moduleMap[name] = code;
|
|
236
|
+
// Merge externals into transitive versions (don't overwrite existing entries)
|
|
237
|
+
for (const [dep, ver] of Object.entries(externals)) {
|
|
238
|
+
if (!transitiveDepsVersions[dep]) {
|
|
239
|
+
transitiveDepsVersions[dep] = ver;
|
|
240
|
+
}
|
|
241
|
+
}
|
|
242
|
+
});
|
|
243
|
+
});
|
|
244
|
+
await Promise.all(fetches);
|
|
245
|
+
// Discover any npm deps from fetched packages not yet known
|
|
246
|
+
toFetch = [];
|
|
247
|
+
for (const name of knownNpm) {
|
|
248
|
+
for (const dep of findRequires(moduleMap[name])) {
|
|
249
|
+
if (this.resolver.isNpmPackage(dep) && !knownNpm.has(dep) && !skipNames.has(dep)) {
|
|
250
|
+
knownNpm.add(dep);
|
|
251
|
+
toFetch.push(dep);
|
|
252
|
+
}
|
|
253
|
+
}
|
|
254
|
+
}
|
|
255
|
+
}
|
|
256
|
+
// Inject alias shim modules: require("react-native") → re-exports react-native-web
|
|
257
|
+
for (const [from, to] of Object.entries(aliases)) {
|
|
258
|
+
moduleMap[from] = 'module.exports = require("' + to + '");';
|
|
259
|
+
}
|
|
260
|
+
// Inject inline shim modules
|
|
261
|
+
for (const [name, code] of Object.entries(shims)) {
|
|
262
|
+
moduleMap[name] = code;
|
|
263
|
+
}
|
|
264
|
+
return { moduleMap, sourceMapMap };
|
|
265
|
+
}
|
|
266
|
+
/** Emit the final bundle string */
|
|
267
|
+
emitBundle(moduleMap, sourceMapMap, entryFile) {
|
|
268
|
+
const preamble = buildBundlePreamble(this.config.env, this.config.routerShim);
|
|
269
|
+
const runtimeStr = "(function(modules) {\n" +
|
|
270
|
+
" var cache = {};\n" +
|
|
271
|
+
" function require(id) {\n" +
|
|
272
|
+
" if (cache[id]) return cache[id].exports;\n" +
|
|
273
|
+
" if (!modules[id]) throw new Error('Module not found: ' + id);\n" +
|
|
274
|
+
" var module = cache[id] = { exports: {} };\n" +
|
|
275
|
+
" modules[id].call(module.exports, module, module.exports, require);\n" +
|
|
276
|
+
" return module.exports;\n" +
|
|
277
|
+
" }\n" +
|
|
278
|
+
" require(" +
|
|
279
|
+
JSON.stringify(entryFile) +
|
|
280
|
+
");\n" +
|
|
281
|
+
"})({\n";
|
|
282
|
+
const headerStr = preamble + runtimeStr;
|
|
283
|
+
const ids = Object.keys(moduleMap);
|
|
284
|
+
const moduleEntries = ids
|
|
285
|
+
.map((id) => {
|
|
286
|
+
return (JSON.stringify(id) +
|
|
287
|
+
": function(module, exports, require) {\n" +
|
|
288
|
+
moduleMap[id] +
|
|
289
|
+
"\n}");
|
|
290
|
+
})
|
|
291
|
+
.join(",\n\n");
|
|
292
|
+
let bundle = headerStr + moduleEntries + "\n});\n";
|
|
293
|
+
// Build combined source map
|
|
294
|
+
let lineOffset = countNewlines(headerStr);
|
|
295
|
+
const inputs = [];
|
|
296
|
+
for (let i = 0; i < ids.length; i++) {
|
|
297
|
+
const id = ids[i];
|
|
298
|
+
if (i > 0)
|
|
299
|
+
lineOffset += 2; // ",\n\n"
|
|
300
|
+
lineOffset += 1; // wrapper function line
|
|
301
|
+
if (sourceMapMap[id]) {
|
|
302
|
+
const sourceContent = this.fs.read(id);
|
|
303
|
+
if (sourceContent) {
|
|
304
|
+
inputs.push({
|
|
305
|
+
sourceFile: id,
|
|
306
|
+
sourceContent,
|
|
307
|
+
map: sourceMapMap[id],
|
|
308
|
+
generatedLineOffset: lineOffset,
|
|
309
|
+
});
|
|
310
|
+
}
|
|
311
|
+
}
|
|
312
|
+
lineOffset += countNewlines(moduleMap[id]);
|
|
313
|
+
lineOffset += 1; // "\n}"
|
|
314
|
+
}
|
|
315
|
+
if (inputs.length > 0) {
|
|
316
|
+
bundle += inlineSourceMap(buildCombinedSourceMap(inputs)) + "\n";
|
|
317
|
+
}
|
|
318
|
+
return bundle;
|
|
319
|
+
}
|
|
320
|
+
}
|
|
@@ -0,0 +1,22 @@
|
|
|
1
|
+
export declare class DependencyGraph {
|
|
2
|
+
/** Forward edges: module -> its dependencies */
|
|
3
|
+
private nodes;
|
|
4
|
+
/** Reverse edges: module -> modules that depend on it */
|
|
5
|
+
private reverseDeps;
|
|
6
|
+
/** Add or update a module's dependencies. Diffs against old deps to keep reverse edges in sync. */
|
|
7
|
+
setModule(id: string, localDeps: string[], npmDeps: string[]): void;
|
|
8
|
+
removeModule(id: string): void;
|
|
9
|
+
hasModule(id: string): boolean;
|
|
10
|
+
/** Get direct reverse dependencies (modules that import this one) */
|
|
11
|
+
getDependents(id: string): Set<string>;
|
|
12
|
+
/** Get all modules transitively affected by changes to the given set (BFS upward) */
|
|
13
|
+
getAffectedModules(ids: Set<string>): Set<string>;
|
|
14
|
+
/** Find modules unreachable from the entry file (BFS downward) */
|
|
15
|
+
findOrphans(entryFile: string): Set<string>;
|
|
16
|
+
/** Get the local deps of a module */
|
|
17
|
+
getLocalDeps(id: string): Set<string>;
|
|
18
|
+
/** Get the npm deps of a module */
|
|
19
|
+
getNpmDeps(id: string): Set<string>;
|
|
20
|
+
/** Get a serializable map of module -> its reverse deps (for embedding in HMR runtime) */
|
|
21
|
+
getReverseDepsMap(): Record<string, string[]>;
|
|
22
|
+
}
|
|
@@ -0,0 +1,128 @@
|
|
|
1
|
+
export class DependencyGraph {
|
|
2
|
+
constructor() {
|
|
3
|
+
/** Forward edges: module -> its dependencies */
|
|
4
|
+
this.nodes = new Map();
|
|
5
|
+
/** Reverse edges: module -> modules that depend on it */
|
|
6
|
+
this.reverseDeps = new Map();
|
|
7
|
+
}
|
|
8
|
+
/** Add or update a module's dependencies. Diffs against old deps to keep reverse edges in sync. */
|
|
9
|
+
setModule(id, localDeps, npmDeps) {
|
|
10
|
+
const newLocalSet = new Set(localDeps);
|
|
11
|
+
const newNpmSet = new Set(npmDeps);
|
|
12
|
+
const existing = this.nodes.get(id);
|
|
13
|
+
if (existing) {
|
|
14
|
+
// Remove stale reverse edges for local deps that are no longer referenced
|
|
15
|
+
for (const oldDep of existing.localDeps) {
|
|
16
|
+
if (!newLocalSet.has(oldDep)) {
|
|
17
|
+
this.reverseDeps.get(oldDep)?.delete(id);
|
|
18
|
+
}
|
|
19
|
+
}
|
|
20
|
+
// Remove stale reverse edges for npm deps
|
|
21
|
+
for (const oldDep of existing.npmDeps) {
|
|
22
|
+
if (!newNpmSet.has(oldDep)) {
|
|
23
|
+
this.reverseDeps.get(oldDep)?.delete(id);
|
|
24
|
+
}
|
|
25
|
+
}
|
|
26
|
+
}
|
|
27
|
+
// Add new reverse edges
|
|
28
|
+
for (const dep of newLocalSet) {
|
|
29
|
+
if (!this.reverseDeps.has(dep)) {
|
|
30
|
+
this.reverseDeps.set(dep, new Set());
|
|
31
|
+
}
|
|
32
|
+
this.reverseDeps.get(dep).add(id);
|
|
33
|
+
}
|
|
34
|
+
for (const dep of newNpmSet) {
|
|
35
|
+
if (!this.reverseDeps.has(dep)) {
|
|
36
|
+
this.reverseDeps.set(dep, new Set());
|
|
37
|
+
}
|
|
38
|
+
this.reverseDeps.get(dep).add(id);
|
|
39
|
+
}
|
|
40
|
+
this.nodes.set(id, { localDeps: newLocalSet, npmDeps: newNpmSet });
|
|
41
|
+
}
|
|
42
|
+
removeModule(id) {
|
|
43
|
+
const node = this.nodes.get(id);
|
|
44
|
+
if (!node)
|
|
45
|
+
return;
|
|
46
|
+
// Remove forward edges from reverse map
|
|
47
|
+
for (const dep of node.localDeps) {
|
|
48
|
+
this.reverseDeps.get(dep)?.delete(id);
|
|
49
|
+
}
|
|
50
|
+
for (const dep of node.npmDeps) {
|
|
51
|
+
this.reverseDeps.get(dep)?.delete(id);
|
|
52
|
+
}
|
|
53
|
+
// Remove reverse edges pointing to this module
|
|
54
|
+
this.reverseDeps.delete(id);
|
|
55
|
+
this.nodes.delete(id);
|
|
56
|
+
}
|
|
57
|
+
hasModule(id) {
|
|
58
|
+
return this.nodes.has(id);
|
|
59
|
+
}
|
|
60
|
+
/** Get direct reverse dependencies (modules that import this one) */
|
|
61
|
+
getDependents(id) {
|
|
62
|
+
return this.reverseDeps.get(id) ?? new Set();
|
|
63
|
+
}
|
|
64
|
+
/** Get all modules transitively affected by changes to the given set (BFS upward) */
|
|
65
|
+
getAffectedModules(ids) {
|
|
66
|
+
const affected = new Set();
|
|
67
|
+
const queue = [...ids];
|
|
68
|
+
while (queue.length > 0) {
|
|
69
|
+
const current = queue.shift();
|
|
70
|
+
if (affected.has(current))
|
|
71
|
+
continue;
|
|
72
|
+
affected.add(current);
|
|
73
|
+
const dependents = this.reverseDeps.get(current);
|
|
74
|
+
if (dependents) {
|
|
75
|
+
for (const dep of dependents) {
|
|
76
|
+
if (!affected.has(dep)) {
|
|
77
|
+
queue.push(dep);
|
|
78
|
+
}
|
|
79
|
+
}
|
|
80
|
+
}
|
|
81
|
+
}
|
|
82
|
+
return affected;
|
|
83
|
+
}
|
|
84
|
+
/** Find modules unreachable from the entry file (BFS downward) */
|
|
85
|
+
findOrphans(entryFile) {
|
|
86
|
+
const reachable = new Set();
|
|
87
|
+
const queue = [entryFile];
|
|
88
|
+
while (queue.length > 0) {
|
|
89
|
+
const current = queue.shift();
|
|
90
|
+
if (reachable.has(current))
|
|
91
|
+
continue;
|
|
92
|
+
reachable.add(current);
|
|
93
|
+
const node = this.nodes.get(current);
|
|
94
|
+
if (node) {
|
|
95
|
+
for (const dep of node.localDeps) {
|
|
96
|
+
if (!reachable.has(dep)) {
|
|
97
|
+
queue.push(dep);
|
|
98
|
+
}
|
|
99
|
+
}
|
|
100
|
+
}
|
|
101
|
+
}
|
|
102
|
+
const orphans = new Set();
|
|
103
|
+
for (const id of this.nodes.keys()) {
|
|
104
|
+
if (!reachable.has(id)) {
|
|
105
|
+
orphans.add(id);
|
|
106
|
+
}
|
|
107
|
+
}
|
|
108
|
+
return orphans;
|
|
109
|
+
}
|
|
110
|
+
/** Get the local deps of a module */
|
|
111
|
+
getLocalDeps(id) {
|
|
112
|
+
return this.nodes.get(id)?.localDeps ?? new Set();
|
|
113
|
+
}
|
|
114
|
+
/** Get the npm deps of a module */
|
|
115
|
+
getNpmDeps(id) {
|
|
116
|
+
return this.nodes.get(id)?.npmDeps ?? new Set();
|
|
117
|
+
}
|
|
118
|
+
/** Get a serializable map of module -> its reverse deps (for embedding in HMR runtime) */
|
|
119
|
+
getReverseDepsMap() {
|
|
120
|
+
const result = {};
|
|
121
|
+
for (const [id, deps] of this.reverseDeps) {
|
|
122
|
+
if (deps.size > 0) {
|
|
123
|
+
result[id] = [...deps];
|
|
124
|
+
}
|
|
125
|
+
}
|
|
126
|
+
return result;
|
|
127
|
+
}
|
|
128
|
+
}
|
package/dist/fs.d.ts
ADDED
|
@@ -0,0 +1,20 @@
|
|
|
1
|
+
import { FileMap, FileChange } from "./types.js";
|
|
2
|
+
export declare class VirtualFS {
|
|
3
|
+
private files;
|
|
4
|
+
constructor(files: FileMap);
|
|
5
|
+
read(path: string): string | undefined;
|
|
6
|
+
write(path: string, content: string): void;
|
|
7
|
+
delete(path: string): boolean;
|
|
8
|
+
exists(path: string): boolean;
|
|
9
|
+
/** Check if a file is an external asset (binary file served from public/) */
|
|
10
|
+
isExternalAsset(path: string): boolean;
|
|
11
|
+
list(): string[];
|
|
12
|
+
getEntryFile(): string | null;
|
|
13
|
+
/** Read the "main" field from package.json, if present */
|
|
14
|
+
getPackageMain(): string | null;
|
|
15
|
+
toFileMap(): FileMap;
|
|
16
|
+
/** Compare current state against an incoming FileMap, return list of changes */
|
|
17
|
+
diff(newFiles: FileMap): FileChange[];
|
|
18
|
+
/** Replace all files at once */
|
|
19
|
+
replaceAll(files: FileMap): void;
|
|
20
|
+
}
|
package/dist/fs.js
ADDED
|
@@ -0,0 +1,107 @@
|
|
|
1
|
+
export class VirtualFS {
|
|
2
|
+
constructor(files) {
|
|
3
|
+
this.files = { ...files };
|
|
4
|
+
}
|
|
5
|
+
read(path) {
|
|
6
|
+
const entry = this.files[path];
|
|
7
|
+
if (entry === undefined)
|
|
8
|
+
return undefined;
|
|
9
|
+
return entry.content;
|
|
10
|
+
}
|
|
11
|
+
write(path, content) {
|
|
12
|
+
this.files[path] = { content, isExternal: false };
|
|
13
|
+
}
|
|
14
|
+
delete(path) {
|
|
15
|
+
if (path in this.files) {
|
|
16
|
+
delete this.files[path];
|
|
17
|
+
return true;
|
|
18
|
+
}
|
|
19
|
+
return false;
|
|
20
|
+
}
|
|
21
|
+
exists(path) {
|
|
22
|
+
return path in this.files;
|
|
23
|
+
}
|
|
24
|
+
/** Check if a file is an external asset (binary file served from public/) */
|
|
25
|
+
isExternalAsset(path) {
|
|
26
|
+
const entry = this.files[path];
|
|
27
|
+
return entry !== undefined && entry.isExternal === true;
|
|
28
|
+
}
|
|
29
|
+
list() {
|
|
30
|
+
return Object.keys(this.files);
|
|
31
|
+
}
|
|
32
|
+
getEntryFile() {
|
|
33
|
+
// Try common entry points
|
|
34
|
+
const candidates = [
|
|
35
|
+
"/index.js",
|
|
36
|
+
"/index.ts",
|
|
37
|
+
"/index.tsx",
|
|
38
|
+
"/index.jsx",
|
|
39
|
+
"/App.js",
|
|
40
|
+
"/App.ts",
|
|
41
|
+
"/App.tsx",
|
|
42
|
+
"/App.jsx",
|
|
43
|
+
];
|
|
44
|
+
for (const c of candidates) {
|
|
45
|
+
if (this.exists(c))
|
|
46
|
+
return c;
|
|
47
|
+
}
|
|
48
|
+
// Check package.json "main" field for local entry
|
|
49
|
+
const pkgJson = this.read("/package.json");
|
|
50
|
+
if (pkgJson) {
|
|
51
|
+
try {
|
|
52
|
+
const pkg = JSON.parse(pkgJson);
|
|
53
|
+
if (typeof pkg.main === "string" && (pkg.main.startsWith(".") || pkg.main.startsWith("/"))) {
|
|
54
|
+
const base = pkg.main.replace(/^\.\//, "/").replace(/^([^/])/, "/$1");
|
|
55
|
+
if (this.exists(base))
|
|
56
|
+
return base;
|
|
57
|
+
for (const ext of [".js", ".ts", ".tsx", ".jsx"]) {
|
|
58
|
+
if (this.exists(base + ext))
|
|
59
|
+
return base + ext;
|
|
60
|
+
}
|
|
61
|
+
}
|
|
62
|
+
}
|
|
63
|
+
catch { }
|
|
64
|
+
}
|
|
65
|
+
return null;
|
|
66
|
+
}
|
|
67
|
+
/** Read the "main" field from package.json, if present */
|
|
68
|
+
getPackageMain() {
|
|
69
|
+
const pkgJson = this.read("/package.json");
|
|
70
|
+
if (!pkgJson)
|
|
71
|
+
return null;
|
|
72
|
+
try {
|
|
73
|
+
const pkg = JSON.parse(pkgJson);
|
|
74
|
+
return typeof pkg.main === "string" ? pkg.main : null;
|
|
75
|
+
}
|
|
76
|
+
catch {
|
|
77
|
+
return null;
|
|
78
|
+
}
|
|
79
|
+
}
|
|
80
|
+
toFileMap() {
|
|
81
|
+
return { ...this.files };
|
|
82
|
+
}
|
|
83
|
+
/** Compare current state against an incoming FileMap, return list of changes */
|
|
84
|
+
diff(newFiles) {
|
|
85
|
+
const changes = [];
|
|
86
|
+
// Check for creates and updates
|
|
87
|
+
for (const path in newFiles) {
|
|
88
|
+
if (!(path in this.files)) {
|
|
89
|
+
changes.push({ path, type: "create" });
|
|
90
|
+
}
|
|
91
|
+
else if (this.files[path].content !== newFiles[path].content) {
|
|
92
|
+
changes.push({ path, type: "update" });
|
|
93
|
+
}
|
|
94
|
+
}
|
|
95
|
+
// Check for deletes
|
|
96
|
+
for (const path in this.files) {
|
|
97
|
+
if (!(path in newFiles)) {
|
|
98
|
+
changes.push({ path, type: "delete" });
|
|
99
|
+
}
|
|
100
|
+
}
|
|
101
|
+
return changes;
|
|
102
|
+
}
|
|
103
|
+
/** Replace all files at once */
|
|
104
|
+
replaceAll(files) {
|
|
105
|
+
this.files = { ...files };
|
|
106
|
+
}
|
|
107
|
+
}
|
|
@@ -0,0 +1,14 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* HMR-capable bundle runtime template.
|
|
3
|
+
*
|
|
4
|
+
* Called at emit time as an IIFE with:
|
|
5
|
+
* modules - the module map object
|
|
6
|
+
* reverseDeps - JSON reverse dependency map
|
|
7
|
+
* entryId - entry file path string
|
|
8
|
+
* reactRefreshEnabled - boolean
|
|
9
|
+
*/
|
|
10
|
+
export declare const HMR_RUNTIME_TEMPLATE = "(function(modules, reverseDeps, entryId, reactRefreshEnabled) {\n var cache = {};\n var hotState = {};\n\n function initHot(id) {\n if (hotState[id]) return hotState[id];\n var state = {\n acceptCallbacks: [],\n disposeCallbacks: [],\n data: {},\n declined: false,\n accepted: false\n };\n hotState[id] = state;\n return state;\n }\n\n function createHotAPI(id) {\n var state = initHot(id);\n return {\n accept: function(cb) {\n state.accepted = true;\n if (cb) state.acceptCallbacks.push(cb);\n },\n dispose: function(cb) {\n if (cb) state.disposeCallbacks.push(cb);\n },\n decline: function() {\n state.declined = true;\n },\n get data() {\n return state.data;\n }\n };\n }\n\n function require(id) {\n if (cache[id]) return cache[id].exports;\n if (!modules[id]) throw new Error('Module not found: ' + id);\n var module = cache[id] = { exports: {}, hot: createHotAPI(id) };\n modules[id].call(module.exports, module, module.exports, require);\n return module.exports;\n }\n\n // Walk reverse deps to find nearest module.hot.accept() boundary\n function findAcceptBoundary(changedId) {\n var visited = {};\n var queue = [changedId];\n var boundaries = [];\n\n while (queue.length > 0) {\n var current = queue.shift();\n if (visited[current]) continue;\n visited[current] = true;\n\n var state = hotState[current];\n if (state && state.declined) {\n return null;\n }\n if (state && state.accepted) {\n boundaries.push(current);\n continue;\n }\n // Walk upward through reverse deps\n var parents = reverseDeps[current];\n if (!parents || parents.length === 0) {\n return null;\n }\n for (var i = 0; i < parents.length; i++) {\n queue.push(parents[i]);\n }\n }\n\n return boundaries.length > 0 ? boundaries : null;\n }\n\n function applyUpdate(updatedModules, removedModules, newReverseDeps) {\n // Merge updated reverse deps so boundary walk sees new modules\n if (newReverseDeps) {\n for (var key in newReverseDeps) {\n reverseDeps[key] = newReverseDeps[key];\n }\n }\n\n // Phase 1: Check boundaries FIRST, before any state mutation.\n // The dispose phase used to reset accepted=false here, which wiped\n // out the accept registrations before the boundary walk could see them.\n var needsReload = false;\n var modulesToReExecute = new Set();\n\n for (var uid in updatedModules) {\n var boundaries = findAcceptBoundary(uid);\n if (!boundaries) {\n needsReload = true;\n break;\n }\n for (var b = 0; b < boundaries.length; b++) {\n modulesToReExecute.add(boundaries[b]);\n }\n modulesToReExecute.add(uid);\n }\n\n if (needsReload) {\n console.warn('[HMR] No accept boundary found, requesting full reload');\n window.parent.postMessage({ type: 'hmr-full-reload' }, '*');\n return;\n }\n\n // Phase 2: Run dispose callbacks and reset state for modules about to be re-executed\n modulesToReExecute.forEach(function(id) {\n var state = hotState[id];\n if (state) {\n var newData = {};\n for (var i = 0; i < state.disposeCallbacks.length; i++) {\n state.disposeCallbacks[i](newData);\n }\n state.data = newData;\n state.disposeCallbacks = [];\n state.acceptCallbacks = [];\n state.accepted = false;\n }\n });\n\n // Phase 3: Update module factories\n for (var mid in updatedModules) {\n modules[mid] = new Function('module', 'exports', 'require', updatedModules[mid]);\n }\n\n // Phase 4: Remove deleted modules\n for (var r = 0; r < removedModules.length; r++) {\n var rmId = removedModules[r];\n delete modules[rmId];\n delete cache[rmId];\n delete hotState[rmId];\n }\n\n // Phase 5: Re-execute modules. Clear ALL caches first so that nested\n // require() calls during re-execution pick up the new factories rather\n // than stale cached exports (e.g. entry requiring a dependency that\n // hasn't been re-executed yet in this loop).\n modulesToReExecute.forEach(function(execId) {\n delete cache[execId];\n });\n modulesToReExecute.forEach(function(execId) {\n try {\n require(execId);\n } catch(err) {\n console.error('[HMR] Error re-executing module ' + execId + ':', err);\n window.parent.postMessage({ type: 'hmr-full-reload' }, '*');\n }\n });\n\n // Phase 6: React Refresh -- tell React to re-render with new component definitions\n if (reactRefreshEnabled && window.__REACT_REFRESH_RUNTIME__) {\n try {\n window.__REACT_REFRESH_RUNTIME__.performReactRefresh();\n } catch(err) {\n console.error('[HMR] React Refresh error:', err);\n }\n }\n\n console.log('[HMR] Updated ' + Object.keys(updatedModules).length + ' module(s)');\n }\n\n // Expose HMR API globally\n window.__BUNDLER_HMR__ = {\n applyUpdate: applyUpdate,\n modules: modules,\n cache: cache,\n hotState: hotState\n };\n\n // Listen for HMR updates from parent window\n window.addEventListener('message', function(e) {\n var data = e.data;\n if (data && data.type === 'hmr-update') {\n applyUpdate(data.updatedModules || {}, data.removedModules || [], data.reverseDepsMap);\n }\n });\n\n // Initialize React Refresh runtime BEFORE executing the entry module,\n // so $RefreshReg$ and $RefreshSig$ are available when component modules load.\n if (reactRefreshEnabled) {\n try {\n var RefreshRuntime = require('react-refresh/runtime');\n RefreshRuntime.injectIntoGlobalHook(window);\n window.__REACT_REFRESH_RUNTIME__ = RefreshRuntime;\n window.$RefreshReg$ = function() {};\n window.$RefreshSig$ = function() { return function(type) { return type; }; };\n } catch(e) {\n console.warn('[HMR] React Refresh runtime not available:', e.message || e);\n }\n }\n\n // Execute entry module\n require(entryId);\n})";
|
|
11
|
+
/**
|
|
12
|
+
* Emit an HMR-capable bundle string.
|
|
13
|
+
*/
|
|
14
|
+
export declare function emitHmrBundle(moduleMap: Record<string, string>, entryFile: string, reverseDepsMap: Record<string, string[]>, reactRefresh: boolean, env?: Record<string, string>, routerShim?: boolean): string;
|