@zenbujs/core 0.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +11 -0
- package/dist/advice-config-CjgkEf2E.mjs +135 -0
- package/dist/advice-config-Cy133IQP.mjs +2 -0
- package/dist/advice-runtime.d.mts +35 -0
- package/dist/advice-runtime.mjs +131 -0
- package/dist/advice.d.mts +36 -0
- package/dist/advice.mjs +2 -0
- package/dist/base-window-BUt8pwbw.mjs +94 -0
- package/dist/base-window-DEIAk618.mjs +2 -0
- package/dist/build-config-pbv0w4oN.mjs +17 -0
- package/dist/build-electron-B4Gd0Gi4.mjs +516 -0
- package/dist/build-source-_q1n1zTV.mjs +162 -0
- package/dist/chunk-Dm34NbLt.mjs +6 -0
- package/dist/cli/bin.d.mts +1 -0
- package/dist/cli/bin.mjs +88 -0
- package/dist/cli/build.d.mts +53 -0
- package/dist/cli/build.mjs +48 -0
- package/dist/cli-BLbQQIVB.mjs +8054 -0
- package/dist/config-CdVrW85P.mjs +59 -0
- package/dist/config-LK73dJmO.mjs +2 -0
- package/dist/db-ByKPbnP6.mjs +2 -0
- package/dist/db-DhuAJrye.mjs +531 -0
- package/dist/db.d.mts +16 -0
- package/dist/db.mjs +16 -0
- package/dist/dev-BuqklM0k.mjs +85 -0
- package/dist/env-bootstrap-BtVME-CU.d.mts +16 -0
- package/dist/env-bootstrap-rj7I-59x.mjs +53 -0
- package/dist/env-bootstrap.d.mts +2 -0
- package/dist/env-bootstrap.mjs +2 -0
- package/dist/http-IBcLzbYu.mjs +2 -0
- package/dist/index-Bhlbyrn7.d.mts +63 -0
- package/dist/index-CPZ5d6Hl.d.mts +442 -0
- package/dist/index-FtE8MXJ_.d.mts +1 -0
- package/dist/index.d.mts +6 -0
- package/dist/index.mjs +5 -0
- package/dist/launcher.mjs +173 -0
- package/dist/link-6roQ7Cn6.mjs +580 -0
- package/dist/loaders/zenbu.d.mts +22 -0
- package/dist/loaders/zenbu.mjs +267 -0
- package/dist/log-CyKv8hQg.mjs +20 -0
- package/dist/mirror-sync-CodOnwkD.mjs +332 -0
- package/dist/monorepo-CmGPHsVm.mjs +119 -0
- package/dist/node-D4M19_mV.mjs +5 -0
- package/dist/node-loader.d.mts +17 -0
- package/dist/node-loader.mjs +33 -0
- package/dist/pause-DvAUNmKn.mjs +52 -0
- package/dist/publish-source-BVgB62Zj.mjs +131 -0
- package/dist/react.d.mts +76 -0
- package/dist/react.mjs +291 -0
- package/dist/registry-Dh_e7HU1.d.mts +61 -0
- package/dist/registry.d.mts +2 -0
- package/dist/registry.mjs +1 -0
- package/dist/reloader-BCkLjDhS.mjs +2 -0
- package/dist/reloader-lLAJ3lqg.mjs +164 -0
- package/dist/renderer-host-Bg8QdeeH.mjs +1508 -0
- package/dist/renderer-host-DpvBPTHJ.mjs +2 -0
- package/dist/rpc-BwwQK6hD.mjs +71 -0
- package/dist/rpc-CqitnyR4.mjs +2 -0
- package/dist/rpc.d.mts +2 -0
- package/dist/rpc.mjs +2 -0
- package/dist/runtime-CjqDr8Yf.d.mts +109 -0
- package/dist/runtime-DUFKDIe4.mjs +409 -0
- package/dist/runtime.d.mts +2 -0
- package/dist/runtime.mjs +2 -0
- package/dist/schema-CIg4GzHQ.mjs +100 -0
- package/dist/schema-DMoSkwUx.d.mts +62 -0
- package/dist/schema-dGK6qkfR.mjs +28 -0
- package/dist/schema.d.mts +2 -0
- package/dist/schema.mjs +2 -0
- package/dist/server-BXwZEQ-n.mjs +66 -0
- package/dist/server-DjrZUbbu.mjs +2 -0
- package/dist/services/default.d.mts +11 -0
- package/dist/services/default.mjs +22 -0
- package/dist/services/index.d.mts +276 -0
- package/dist/services/index.mjs +7 -0
- package/dist/setup-gate-BeD6WS6d.mjs +110 -0
- package/dist/setup-gate-BqOzm7zp.d.mts +4 -0
- package/dist/setup-gate.d.mts +2 -0
- package/dist/setup-gate.mjs +2 -0
- package/dist/src-pELM4_iH.mjs +376 -0
- package/dist/trace-DCB7qFzT.mjs +10 -0
- package/dist/transform-DJH3vN4b.mjs +84041 -0
- package/dist/transport-BMSzG2-F.mjs +1045 -0
- package/dist/view-registry-BualWgAf.mjs +2 -0
- package/dist/vite-plugins-Bh3SCOw-.mjs +331 -0
- package/dist/vite.d.mts +68 -0
- package/dist/vite.mjs +2 -0
- package/dist/window-CM2a9Kyc.mjs +2 -0
- package/dist/window-CmmpCVX6.mjs +156 -0
- package/dist/write-9dRFczGJ.mjs +1248 -0
- package/migrations/0000_migration.ts +34 -0
- package/migrations/meta/0000_snapshot.json +18 -0
- package/migrations/meta/_journal.json +10 -0
- package/package.json +124 -0
|
@@ -0,0 +1,267 @@
|
|
|
1
|
+
import { createRequire } from "node:module";
|
|
2
|
+
import fs from "node:fs";
|
|
3
|
+
import path from "node:path";
|
|
4
|
+
import { fileURLToPath, pathToFileURL } from "node:url";
|
|
5
|
+
//#region src/loaders/zenbu.ts
|
|
6
|
+
const verbose = process.env.ZENBU_VERBOSE === "1";
|
|
7
|
+
const { subscribe } = createRequire(import.meta.url)("@parcel/watcher");
|
|
8
|
+
let registerWatcherClosable = () => {};
|
|
9
|
+
try {
|
|
10
|
+
const pause = await import("../pause-DvAUNmKn.mjs");
|
|
11
|
+
registerWatcherClosable = typeof pause.registerWatcherClosable === "function" ? pause.registerWatcherClosable : registerWatcherClosable;
|
|
12
|
+
} catch {}
|
|
13
|
+
const loaderName = "zenbu-loader";
|
|
14
|
+
let tracePort = null;
|
|
15
|
+
const stats = {
|
|
16
|
+
resolveCount: 0,
|
|
17
|
+
resolveMs: 0,
|
|
18
|
+
loadCount: 0,
|
|
19
|
+
loadMs: 0
|
|
20
|
+
};
|
|
21
|
+
const barrels = /* @__PURE__ */ new Map();
|
|
22
|
+
const dirWatchers = /* @__PURE__ */ new Map();
|
|
23
|
+
function initialize(data) {
|
|
24
|
+
if (!data?.tracePort) return;
|
|
25
|
+
tracePort = data.tracePort;
|
|
26
|
+
tracePort.on("message", (msg) => {
|
|
27
|
+
if (msg !== "flush") return;
|
|
28
|
+
try {
|
|
29
|
+
tracePort?.postMessage({
|
|
30
|
+
name: loaderName,
|
|
31
|
+
...stats
|
|
32
|
+
});
|
|
33
|
+
} catch {}
|
|
34
|
+
stats.resolveCount = 0;
|
|
35
|
+
stats.resolveMs = 0;
|
|
36
|
+
stats.loadCount = 0;
|
|
37
|
+
stats.loadMs = 0;
|
|
38
|
+
});
|
|
39
|
+
tracePort.unref?.();
|
|
40
|
+
}
|
|
41
|
+
function parseJsonc(str) {
|
|
42
|
+
let result = "";
|
|
43
|
+
let i = 0;
|
|
44
|
+
while (i < str.length) if (str[i] === "\"") {
|
|
45
|
+
let j = i + 1;
|
|
46
|
+
while (j < str.length) if (str[j] === "\\") j += 2;
|
|
47
|
+
else if (str[j] === "\"") {
|
|
48
|
+
j++;
|
|
49
|
+
break;
|
|
50
|
+
} else j++;
|
|
51
|
+
result += str.slice(i, j);
|
|
52
|
+
i = j;
|
|
53
|
+
} else if (str[i] === "/" && str[i + 1] === "/") {
|
|
54
|
+
i += 2;
|
|
55
|
+
while (i < str.length && str[i] !== "\n") i++;
|
|
56
|
+
} else if (str[i] === "/" && str[i + 1] === "*") {
|
|
57
|
+
i += 2;
|
|
58
|
+
while (i < str.length && !(str[i] === "*" && str[i + 1] === "/")) i++;
|
|
59
|
+
i += 2;
|
|
60
|
+
} else {
|
|
61
|
+
result += str[i];
|
|
62
|
+
i++;
|
|
63
|
+
}
|
|
64
|
+
return JSON.parse(result.replace(/,\s*([\]}])/g, "$1"));
|
|
65
|
+
}
|
|
66
|
+
function globRegex(filePattern) {
|
|
67
|
+
return new RegExp(`^${filePattern.replace(/\./g, "\\.").replace(/\*/g, ".*")}$`);
|
|
68
|
+
}
|
|
69
|
+
function expandGlob(pattern) {
|
|
70
|
+
const dir = path.dirname(pattern);
|
|
71
|
+
const regex = globRegex(path.basename(pattern));
|
|
72
|
+
if (!fs.existsSync(dir)) return [];
|
|
73
|
+
return fs.readdirSync(dir).filter((file) => regex.test(file)).map((file) => path.resolve(dir, file));
|
|
74
|
+
}
|
|
75
|
+
function snapshotDir(dir, regex) {
|
|
76
|
+
if (!fs.existsSync(dir)) return /* @__PURE__ */ new Set();
|
|
77
|
+
try {
|
|
78
|
+
return new Set(fs.readdirSync(dir).filter((file) => regex.test(file)));
|
|
79
|
+
} catch {
|
|
80
|
+
return /* @__PURE__ */ new Set();
|
|
81
|
+
}
|
|
82
|
+
}
|
|
83
|
+
function handleDirEvent(dir, filename) {
|
|
84
|
+
for (const entry of barrels.values()) for (const glob of entry.globs) {
|
|
85
|
+
if (glob.dir !== dir) continue;
|
|
86
|
+
if (!glob.regex.test(filename)) continue;
|
|
87
|
+
const nextSnapshot = snapshotDir(dir, glob.regex);
|
|
88
|
+
if (!(nextSnapshot.size !== glob.snapshot.size || [...nextSnapshot].some((file) => !glob.snapshot.has(file)))) continue;
|
|
89
|
+
glob.snapshot = nextSnapshot;
|
|
90
|
+
try {
|
|
91
|
+
entry.hot?.invalidate?.();
|
|
92
|
+
if (verbose) console.log(`[zenbu-loader] invalidated barrel (${filename} added/removed in ${dir})`);
|
|
93
|
+
} catch (err) {
|
|
94
|
+
console.error("[zenbu-loader] invalidate failed:", err);
|
|
95
|
+
}
|
|
96
|
+
break;
|
|
97
|
+
}
|
|
98
|
+
}
|
|
99
|
+
function ensureDirWatcher(dir) {
|
|
100
|
+
if (dirWatchers.has(dir)) return;
|
|
101
|
+
if (!fs.existsSync(dir)) return;
|
|
102
|
+
let subscription = null;
|
|
103
|
+
let closed = false;
|
|
104
|
+
subscribe(dir, (err, events) => {
|
|
105
|
+
if (err) return;
|
|
106
|
+
for (const event of events) {
|
|
107
|
+
if (path.dirname(event.path) !== dir) continue;
|
|
108
|
+
handleDirEvent(dir, path.basename(event.path));
|
|
109
|
+
}
|
|
110
|
+
}).then((sub) => {
|
|
111
|
+
if (closed) sub.unsubscribe().catch(() => {});
|
|
112
|
+
else subscription = sub;
|
|
113
|
+
}).catch((err) => {
|
|
114
|
+
console.error(`[zenbu-loader] subscribe failed for ${dir}:`, err);
|
|
115
|
+
});
|
|
116
|
+
const closable = { close: () => {
|
|
117
|
+
closed = true;
|
|
118
|
+
if (subscription) return subscription.unsubscribe().catch(() => {});
|
|
119
|
+
} };
|
|
120
|
+
registerWatcherClosable(closable);
|
|
121
|
+
dirWatchers.set(dir, closable);
|
|
122
|
+
}
|
|
123
|
+
function buildSource(imports) {
|
|
124
|
+
return imports.map((specifier) => `import ${JSON.stringify(specifier)}\n`).join("");
|
|
125
|
+
}
|
|
126
|
+
function readPluginList(configPath) {
|
|
127
|
+
const config = parseJsonc(fs.readFileSync(configPath, "utf8"));
|
|
128
|
+
if (!config || typeof config !== "object" || !Array.isArray(config.plugins)) return [];
|
|
129
|
+
return Array.from(new Set(config.plugins.filter((plugin) => typeof plugin === "string")));
|
|
130
|
+
}
|
|
131
|
+
function buildPluginRoot(configPath) {
|
|
132
|
+
return {
|
|
133
|
+
source: `${buildSource(readPluginList(configPath).map((manifestPath) => `zenbu:barrel?manifest=${encodeURIComponent(path.resolve(manifestPath))}`))}import.meta.hot?.accept()\n`,
|
|
134
|
+
watchPaths: new Set([configPath])
|
|
135
|
+
};
|
|
136
|
+
}
|
|
137
|
+
function buildBarrel(manifestPath) {
|
|
138
|
+
const manifest = parseJsonc(fs.readFileSync(manifestPath, "utf8"));
|
|
139
|
+
const baseDir = path.dirname(manifestPath);
|
|
140
|
+
const entries = manifest && typeof manifest === "object" && Array.isArray(manifest.services) ? manifest.services.filter((entry) => typeof entry === "string") : [];
|
|
141
|
+
const imports = [];
|
|
142
|
+
const watchPaths = new Set([manifestPath]);
|
|
143
|
+
const globs = [];
|
|
144
|
+
for (const entry of entries) {
|
|
145
|
+
const resolved = path.resolve(baseDir, entry);
|
|
146
|
+
if (resolved.includes("*")) {
|
|
147
|
+
const dir = path.dirname(resolved);
|
|
148
|
+
const regex = globRegex(path.basename(resolved));
|
|
149
|
+
watchPaths.add(dir);
|
|
150
|
+
globs.push({
|
|
151
|
+
dir,
|
|
152
|
+
regex,
|
|
153
|
+
snapshot: snapshotDir(dir, regex)
|
|
154
|
+
});
|
|
155
|
+
for (const file of expandGlob(resolved)) imports.push(pathToFileURL(file).href);
|
|
156
|
+
} else if (resolved.endsWith(".json") || resolved.endsWith(".jsonc")) imports.push(`zenbu:barrel?manifest=${encodeURIComponent(resolved)}`);
|
|
157
|
+
else imports.push(pathToFileURL(resolved).href);
|
|
158
|
+
}
|
|
159
|
+
return {
|
|
160
|
+
source: `${buildSource(imports)}import.meta.hot?.accept()\n`,
|
|
161
|
+
watchPaths,
|
|
162
|
+
globs
|
|
163
|
+
};
|
|
164
|
+
}
|
|
165
|
+
const CORE_PACKAGE_ROOT_FOR_LOADER = (() => {
|
|
166
|
+
const here = path.dirname(fileURLToPath(import.meta.url));
|
|
167
|
+
let dir = path.resolve(here, "..", "..");
|
|
168
|
+
while (dir !== path.dirname(dir)) {
|
|
169
|
+
const pkg = path.join(dir, "package.json");
|
|
170
|
+
if (fs.existsSync(pkg)) try {
|
|
171
|
+
if (JSON.parse(fs.readFileSync(pkg, "utf8")).name === "@zenbujs/core") return dir;
|
|
172
|
+
} catch {}
|
|
173
|
+
dir = path.dirname(dir);
|
|
174
|
+
}
|
|
175
|
+
return path.resolve(here, "..", "..");
|
|
176
|
+
})();
|
|
177
|
+
let coreExportsCache = null;
|
|
178
|
+
function getCoreExports() {
|
|
179
|
+
if (coreExportsCache) return coreExportsCache;
|
|
180
|
+
try {
|
|
181
|
+
const pkgPath = path.join(CORE_PACKAGE_ROOT_FOR_LOADER, "package.json");
|
|
182
|
+
const pkg = JSON.parse(fs.readFileSync(pkgPath, "utf8"));
|
|
183
|
+
if (pkg.name === "@zenbujs/core" && pkg.exports) {
|
|
184
|
+
coreExportsCache = pkg.exports;
|
|
185
|
+
return coreExportsCache;
|
|
186
|
+
}
|
|
187
|
+
} catch {}
|
|
188
|
+
return null;
|
|
189
|
+
}
|
|
190
|
+
function resolveCoreSubpath(specifier) {
|
|
191
|
+
const exports = getCoreExports();
|
|
192
|
+
if (!exports) return null;
|
|
193
|
+
const entry = exports[specifier === "@zenbujs/core" ? "." : "./" + specifier.slice(14)];
|
|
194
|
+
if (!entry) return null;
|
|
195
|
+
const file = typeof entry === "string" ? entry : entry.import ?? entry.default ?? null;
|
|
196
|
+
if (!file) return null;
|
|
197
|
+
return path.resolve(CORE_PACKAGE_ROOT_FOR_LOADER, file);
|
|
198
|
+
}
|
|
199
|
+
function resolve(specifier, context, nextResolve) {
|
|
200
|
+
const start = Date.now();
|
|
201
|
+
try {
|
|
202
|
+
if (specifier === "@zenbu/advice/runtime") return {
|
|
203
|
+
url: new URL("../advice-runtime.mjs", import.meta.url).href,
|
|
204
|
+
shortCircuit: true
|
|
205
|
+
};
|
|
206
|
+
if (specifier === "@zenbujs/core" || specifier.startsWith("@zenbujs/core/")) {
|
|
207
|
+
const resolved = resolveCoreSubpath(specifier);
|
|
208
|
+
if (resolved) return {
|
|
209
|
+
url: pathToFileURL(resolved).href,
|
|
210
|
+
shortCircuit: true
|
|
211
|
+
};
|
|
212
|
+
}
|
|
213
|
+
if (specifier.startsWith("zenbu:")) return {
|
|
214
|
+
url: specifier,
|
|
215
|
+
shortCircuit: true
|
|
216
|
+
};
|
|
217
|
+
return nextResolve(specifier, context);
|
|
218
|
+
} finally {
|
|
219
|
+
stats.resolveCount++;
|
|
220
|
+
stats.resolveMs += Date.now() - start;
|
|
221
|
+
}
|
|
222
|
+
}
|
|
223
|
+
function loadImpl(url, context, nextLoad) {
|
|
224
|
+
if (url.startsWith("zenbu:plugins?")) {
|
|
225
|
+
const params = new URL(url).searchParams;
|
|
226
|
+
const configPath = decodeURIComponent(params.get("config") ?? "");
|
|
227
|
+
const { source, watchPaths } = buildPluginRoot(configPath);
|
|
228
|
+
if (context.hot?.watch) for (const watchPath of watchPaths) context.hot.watch(pathToFileURL(watchPath));
|
|
229
|
+
if (verbose) console.log(`[zenbu-loader] generated plugin root for ${path.basename(configPath)} (${source.split("\n").filter(Boolean).length} imports, ${watchPaths.size} watches)`);
|
|
230
|
+
return {
|
|
231
|
+
format: "module",
|
|
232
|
+
source,
|
|
233
|
+
shortCircuit: true
|
|
234
|
+
};
|
|
235
|
+
}
|
|
236
|
+
if (url.startsWith("zenbu:barrel?")) {
|
|
237
|
+
const params = new URL(url).searchParams;
|
|
238
|
+
const manifestPath = decodeURIComponent(params.get("manifest") ?? "");
|
|
239
|
+
const { source, watchPaths, globs } = buildBarrel(manifestPath);
|
|
240
|
+
if (context.hot?.watch) for (const watchPath of watchPaths) context.hot.watch(pathToFileURL(watchPath));
|
|
241
|
+
if (context.hot) {
|
|
242
|
+
barrels.set(url, {
|
|
243
|
+
hot: context.hot,
|
|
244
|
+
globs
|
|
245
|
+
});
|
|
246
|
+
for (const glob of globs) ensureDirWatcher(glob.dir);
|
|
247
|
+
}
|
|
248
|
+
if (verbose) console.log(`[zenbu-loader] generated barrel for ${path.basename(manifestPath)} (${source.split("\n").filter(Boolean).length} imports, ${watchPaths.size} watches, ${globs.length} globs)`);
|
|
249
|
+
return {
|
|
250
|
+
format: "module",
|
|
251
|
+
source,
|
|
252
|
+
shortCircuit: true
|
|
253
|
+
};
|
|
254
|
+
}
|
|
255
|
+
return nextLoad(url, context);
|
|
256
|
+
}
|
|
257
|
+
function load(url, context, nextLoad) {
|
|
258
|
+
const start = Date.now();
|
|
259
|
+
try {
|
|
260
|
+
return loadImpl(url, context, nextLoad);
|
|
261
|
+
} finally {
|
|
262
|
+
stats.loadCount++;
|
|
263
|
+
stats.loadMs += Date.now() - start;
|
|
264
|
+
}
|
|
265
|
+
}
|
|
266
|
+
//#endregion
|
|
267
|
+
export { initialize, load, resolve };
|
|
@@ -0,0 +1,20 @@
|
|
|
1
|
+
//#region src/shared/log.ts
|
|
2
|
+
const verbose = process.env.ZENBU_VERBOSE === "1";
|
|
3
|
+
function createLogger(tag) {
|
|
4
|
+
const prefix = `[${tag}]`;
|
|
5
|
+
function log(...args) {
|
|
6
|
+
console.log(prefix, ...args);
|
|
7
|
+
}
|
|
8
|
+
log.verbose = (...args) => {
|
|
9
|
+
if (verbose) console.log(prefix, ...args);
|
|
10
|
+
};
|
|
11
|
+
log.warn = (...args) => {
|
|
12
|
+
console.warn(prefix, ...args);
|
|
13
|
+
};
|
|
14
|
+
log.error = (...args) => {
|
|
15
|
+
console.error(prefix, ...args);
|
|
16
|
+
};
|
|
17
|
+
return log;
|
|
18
|
+
}
|
|
19
|
+
//#endregion
|
|
20
|
+
export { createLogger as t };
|
|
@@ -0,0 +1,332 @@
|
|
|
1
|
+
import { n as resolveBuildConfig } from "./build-config-pbv0w4oN.mjs";
|
|
2
|
+
import { createRequire } from "node:module";
|
|
3
|
+
import fs from "node:fs";
|
|
4
|
+
import os from "node:os";
|
|
5
|
+
import path from "node:path";
|
|
6
|
+
import { pathToFileURL } from "node:url";
|
|
7
|
+
import crypto from "node:crypto";
|
|
8
|
+
import fsp from "node:fs/promises";
|
|
9
|
+
import { execFile } from "node:child_process";
|
|
10
|
+
import { promisify } from "node:util";
|
|
11
|
+
//#region src/cli/lib/load-build-config.ts
|
|
12
|
+
const localRequire = createRequire(import.meta.url);
|
|
13
|
+
const CONFIG_NAMES = [
|
|
14
|
+
"zenbu.build.ts",
|
|
15
|
+
"zenbu.build.mts",
|
|
16
|
+
"zenbu.build.js",
|
|
17
|
+
"zenbu.build.mjs"
|
|
18
|
+
];
|
|
19
|
+
function findBuildConfig(projectDir) {
|
|
20
|
+
for (const name of CONFIG_NAMES) {
|
|
21
|
+
const candidate = path.join(projectDir, name);
|
|
22
|
+
if (fs.existsSync(candidate)) return candidate;
|
|
23
|
+
}
|
|
24
|
+
throw new Error(`No zenbu.build config found at ${projectDir}. Expected one of: ${CONFIG_NAMES.join(", ")}`);
|
|
25
|
+
}
|
|
26
|
+
let tsxRegistered = null;
|
|
27
|
+
function ensureTsxRegistered() {
|
|
28
|
+
if (tsxRegistered) return tsxRegistered;
|
|
29
|
+
tsxRegistered = (async () => {
|
|
30
|
+
try {
|
|
31
|
+
const tsxApi = localRequire("tsx/esm/api");
|
|
32
|
+
if (typeof tsxApi.register === "function") tsxApi.register();
|
|
33
|
+
} catch {}
|
|
34
|
+
})();
|
|
35
|
+
return tsxRegistered;
|
|
36
|
+
}
|
|
37
|
+
async function loadBuildConfig(configPath) {
|
|
38
|
+
await ensureTsxRegistered();
|
|
39
|
+
const mod = await import(pathToFileURL(path.resolve(configPath)).href);
|
|
40
|
+
const config = mod.default ?? mod;
|
|
41
|
+
if (!config || !Array.isArray(config.include)) throw new Error(`${path.basename(configPath)} must export a config object (via defineBuildConfig) with an 'include' array.`);
|
|
42
|
+
return resolveBuildConfig(config);
|
|
43
|
+
}
|
|
44
|
+
//#endregion
|
|
45
|
+
//#region src/cli/lib/mirror-sync.ts
|
|
46
|
+
const execFile$1 = promisify(execFile);
|
|
47
|
+
const SYNCED_FROM_RE = /\[synced from ([0-9a-f]{7,40})\]/;
|
|
48
|
+
async function git(cwd, args) {
|
|
49
|
+
const { stdout } = await execFile$1("git", args, {
|
|
50
|
+
cwd,
|
|
51
|
+
env: {
|
|
52
|
+
...process.env,
|
|
53
|
+
GIT_TERMINAL_PROMPT: "0"
|
|
54
|
+
},
|
|
55
|
+
maxBuffer: 64 * 1024 * 1024
|
|
56
|
+
});
|
|
57
|
+
return stdout;
|
|
58
|
+
}
|
|
59
|
+
async function gitTry(cwd, args) {
|
|
60
|
+
try {
|
|
61
|
+
return await git(cwd, args);
|
|
62
|
+
} catch {
|
|
63
|
+
return null;
|
|
64
|
+
}
|
|
65
|
+
}
|
|
66
|
+
async function shallowCloneMirror(mirrorUrl, branch) {
|
|
67
|
+
const dir = await fsp.mkdtemp(path.join(os.tmpdir(), "zenbu-mirror-"));
|
|
68
|
+
try {
|
|
69
|
+
await git(dir, [
|
|
70
|
+
"clone",
|
|
71
|
+
"--depth",
|
|
72
|
+
"1",
|
|
73
|
+
"--branch",
|
|
74
|
+
branch,
|
|
75
|
+
mirrorUrl,
|
|
76
|
+
"."
|
|
77
|
+
]);
|
|
78
|
+
return {
|
|
79
|
+
dir,
|
|
80
|
+
isEmpty: false
|
|
81
|
+
};
|
|
82
|
+
} catch {
|
|
83
|
+
await git(dir, [
|
|
84
|
+
"init",
|
|
85
|
+
"-b",
|
|
86
|
+
branch
|
|
87
|
+
]);
|
|
88
|
+
await git(dir, [
|
|
89
|
+
"remote",
|
|
90
|
+
"add",
|
|
91
|
+
"origin",
|
|
92
|
+
mirrorUrl
|
|
93
|
+
]);
|
|
94
|
+
return {
|
|
95
|
+
dir,
|
|
96
|
+
isEmpty: true
|
|
97
|
+
};
|
|
98
|
+
}
|
|
99
|
+
}
|
|
100
|
+
async function readHeadSyncedFromSha(repoDir) {
|
|
101
|
+
if (!await gitTry(repoDir, ["rev-parse", "HEAD"])) return null;
|
|
102
|
+
const message = await git(repoDir, [
|
|
103
|
+
"log",
|
|
104
|
+
"-1",
|
|
105
|
+
"--format=%B"
|
|
106
|
+
]);
|
|
107
|
+
return SYNCED_FROM_RE.exec(message)?.[1] ?? null;
|
|
108
|
+
}
|
|
109
|
+
async function emptyTrackedTree(repoDir) {
|
|
110
|
+
const entries = await fsp.readdir(repoDir, { withFileTypes: true });
|
|
111
|
+
for (const entry of entries) {
|
|
112
|
+
if (entry.name === ".git") continue;
|
|
113
|
+
const target = path.join(repoDir, entry.name);
|
|
114
|
+
await fsp.rm(target, {
|
|
115
|
+
recursive: true,
|
|
116
|
+
force: true
|
|
117
|
+
});
|
|
118
|
+
}
|
|
119
|
+
}
|
|
120
|
+
async function copyTree(source, dest) {
|
|
121
|
+
const entries = await fsp.readdir(source, { withFileTypes: true });
|
|
122
|
+
for (const entry of entries) {
|
|
123
|
+
if (entry.name === ".sha") continue;
|
|
124
|
+
const src = path.join(source, entry.name);
|
|
125
|
+
const dst = path.join(dest, entry.name);
|
|
126
|
+
if (entry.isDirectory()) {
|
|
127
|
+
await fsp.mkdir(dst, { recursive: true });
|
|
128
|
+
await copyTree(src, dst);
|
|
129
|
+
} else if (entry.isSymbolicLink()) {
|
|
130
|
+
const link = await fsp.readlink(src);
|
|
131
|
+
await fsp.symlink(link, dst);
|
|
132
|
+
} else await fsp.copyFile(src, dst);
|
|
133
|
+
}
|
|
134
|
+
}
|
|
135
|
+
async function commitAndPush(repoDir, branch, sourceSha, options) {
|
|
136
|
+
await git(repoDir, ["add", "-A"]);
|
|
137
|
+
if (!(await git(repoDir, ["status", "--porcelain"])).trim()) return {
|
|
138
|
+
pushed: false,
|
|
139
|
+
sha: (await gitTry(repoDir, ["rev-parse", "HEAD"]))?.trim() ?? null,
|
|
140
|
+
reason: "no changes"
|
|
141
|
+
};
|
|
142
|
+
const env = {
|
|
143
|
+
...process.env,
|
|
144
|
+
GIT_TERMINAL_PROMPT: "0"
|
|
145
|
+
};
|
|
146
|
+
if (options.authorName) {
|
|
147
|
+
env.GIT_AUTHOR_NAME = options.authorName;
|
|
148
|
+
env.GIT_COMMITTER_NAME = options.authorName;
|
|
149
|
+
}
|
|
150
|
+
if (options.authorEmail) {
|
|
151
|
+
env.GIT_AUTHOR_EMAIL = options.authorEmail;
|
|
152
|
+
env.GIT_COMMITTER_EMAIL = options.authorEmail;
|
|
153
|
+
}
|
|
154
|
+
await execFile$1("git", [
|
|
155
|
+
"commit",
|
|
156
|
+
"-m",
|
|
157
|
+
`${options.commitTitle ?? `chore: sync source ${sourceSha.slice(0, 7)}`}\n\n[synced from ${sourceSha}]\n`
|
|
158
|
+
], {
|
|
159
|
+
cwd: repoDir,
|
|
160
|
+
env
|
|
161
|
+
});
|
|
162
|
+
await execFile$1("git", [
|
|
163
|
+
"push",
|
|
164
|
+
"origin",
|
|
165
|
+
`HEAD:${branch}`
|
|
166
|
+
], {
|
|
167
|
+
cwd: repoDir,
|
|
168
|
+
env
|
|
169
|
+
});
|
|
170
|
+
return {
|
|
171
|
+
pushed: true,
|
|
172
|
+
sha: (await git(repoDir, ["rev-parse", "HEAD"])).trim()
|
|
173
|
+
};
|
|
174
|
+
}
|
|
175
|
+
/**
|
|
176
|
+
* Seed an empty mirror with the staged content. Fails if the mirror's HEAD
|
|
177
|
+
* already has a `[synced from <sha>]` trailer (unless `force`).
|
|
178
|
+
*/
|
|
179
|
+
async function init(options) {
|
|
180
|
+
const branch = options.branch ?? "main";
|
|
181
|
+
const { dir: workDir } = await shallowCloneMirror(options.mirrorUrl, branch);
|
|
182
|
+
try {
|
|
183
|
+
const existing = await readHeadSyncedFromSha(workDir);
|
|
184
|
+
if (existing && !options.force) throw new Error(`mirror already initialized (HEAD has [synced from ${existing.slice(0, 7)}]); use \`zen publish:source push\` instead, or pass --force`);
|
|
185
|
+
await emptyTrackedTree(workDir);
|
|
186
|
+
await copyTree(options.staging, workDir);
|
|
187
|
+
const result = await commitAndPush(workDir, branch, options.sourceSha, {
|
|
188
|
+
commitTitle: options.commitTitle,
|
|
189
|
+
authorName: options.authorName,
|
|
190
|
+
authorEmail: options.authorEmail
|
|
191
|
+
});
|
|
192
|
+
if (!result.pushed) return {
|
|
193
|
+
status: "noop",
|
|
194
|
+
reason: result.reason
|
|
195
|
+
};
|
|
196
|
+
return {
|
|
197
|
+
status: "pushed",
|
|
198
|
+
mirrorSha: result.sha ?? void 0
|
|
199
|
+
};
|
|
200
|
+
} finally {
|
|
201
|
+
await fsp.rm(workDir, {
|
|
202
|
+
recursive: true,
|
|
203
|
+
force: true
|
|
204
|
+
});
|
|
205
|
+
}
|
|
206
|
+
}
|
|
207
|
+
/**
|
|
208
|
+
* Snapshot the staging dir into a new mirror commit. Refuses if the mirror
|
|
209
|
+
* has no `[synced from]` trailer. No-ops if the existing trailer's source SHA
|
|
210
|
+
* already matches `sourceSha` (mirror is up to date).
|
|
211
|
+
*/
|
|
212
|
+
async function push(options) {
|
|
213
|
+
const branch = options.branch ?? "main";
|
|
214
|
+
const { dir: workDir, isEmpty } = await shallowCloneMirror(options.mirrorUrl, branch);
|
|
215
|
+
try {
|
|
216
|
+
if (isEmpty) throw new Error("mirror is empty; run `zen publish:source init` first");
|
|
217
|
+
const lastSynced = await readHeadSyncedFromSha(workDir);
|
|
218
|
+
if (!lastSynced) throw new Error("mirror not initialized (HEAD has no [synced from] trailer); run `zen publish:source init` first");
|
|
219
|
+
if (lastSynced === options.sourceSha) return {
|
|
220
|
+
status: "noop",
|
|
221
|
+
reason: "already up to date"
|
|
222
|
+
};
|
|
223
|
+
await emptyTrackedTree(workDir);
|
|
224
|
+
await copyTree(options.staging, workDir);
|
|
225
|
+
const result = await commitAndPush(workDir, branch, options.sourceSha, {
|
|
226
|
+
commitTitle: options.commitTitle,
|
|
227
|
+
authorName: options.authorName,
|
|
228
|
+
authorEmail: options.authorEmail
|
|
229
|
+
});
|
|
230
|
+
if (!result.pushed) return {
|
|
231
|
+
status: "noop",
|
|
232
|
+
reason: result.reason
|
|
233
|
+
};
|
|
234
|
+
return {
|
|
235
|
+
status: "pushed",
|
|
236
|
+
mirrorSha: result.sha ?? void 0
|
|
237
|
+
};
|
|
238
|
+
} finally {
|
|
239
|
+
await fsp.rm(workDir, {
|
|
240
|
+
recursive: true,
|
|
241
|
+
force: true
|
|
242
|
+
});
|
|
243
|
+
}
|
|
244
|
+
}
|
|
245
|
+
/**
|
|
246
|
+
* Initialize a local git working tree at `dir` whose `origin` is `mirrorUrl`
|
|
247
|
+
* and whose HEAD is a fresh commit of `dir`'s contents. Used by `build:desktop`
|
|
248
|
+
* to make the seed inside the .app a real git repo, so the user's apps-dir
|
|
249
|
+
* after first launch can later `git pull` from the mirror.
|
|
250
|
+
*
|
|
251
|
+
* The commit SHA produced here is local-only; on the user's machine, after a
|
|
252
|
+
* future `zen pull`, they'll fast-forward to whatever's actually on the
|
|
253
|
+
* mirror.
|
|
254
|
+
*/
|
|
255
|
+
async function initSeedRepo(options) {
|
|
256
|
+
const branch = options.branch ?? "main";
|
|
257
|
+
const env = {
|
|
258
|
+
...process.env,
|
|
259
|
+
GIT_TERMINAL_PROMPT: "0"
|
|
260
|
+
};
|
|
261
|
+
if (!fs.existsSync(path.join(options.dir, ".git"))) await execFile$1("git", [
|
|
262
|
+
"init",
|
|
263
|
+
"-b",
|
|
264
|
+
branch
|
|
265
|
+
], {
|
|
266
|
+
cwd: options.dir,
|
|
267
|
+
env
|
|
268
|
+
});
|
|
269
|
+
await execFile$1("git", [
|
|
270
|
+
"remote",
|
|
271
|
+
"remove",
|
|
272
|
+
"origin"
|
|
273
|
+
], {
|
|
274
|
+
cwd: options.dir,
|
|
275
|
+
env
|
|
276
|
+
}).catch(() => {});
|
|
277
|
+
await execFile$1("git", [
|
|
278
|
+
"remote",
|
|
279
|
+
"add",
|
|
280
|
+
"origin",
|
|
281
|
+
options.mirrorUrl
|
|
282
|
+
], {
|
|
283
|
+
cwd: options.dir,
|
|
284
|
+
env
|
|
285
|
+
});
|
|
286
|
+
await execFile$1("git", ["add", "-A"], {
|
|
287
|
+
cwd: options.dir,
|
|
288
|
+
env
|
|
289
|
+
});
|
|
290
|
+
const localEnv = {
|
|
291
|
+
...env,
|
|
292
|
+
GIT_AUTHOR_NAME: "zenbu",
|
|
293
|
+
GIT_COMMITTER_NAME: "zenbu",
|
|
294
|
+
GIT_AUTHOR_EMAIL: "zenbu@local",
|
|
295
|
+
GIT_COMMITTER_EMAIL: "zenbu@local"
|
|
296
|
+
};
|
|
297
|
+
await execFile$1("git", [
|
|
298
|
+
"commit",
|
|
299
|
+
"--allow-empty",
|
|
300
|
+
"-m",
|
|
301
|
+
`seed ${options.sourceSha.slice(0, 7)}\n\n[synced from ${options.sourceSha}]\n`
|
|
302
|
+
], {
|
|
303
|
+
cwd: options.dir,
|
|
304
|
+
env: localEnv
|
|
305
|
+
});
|
|
306
|
+
}
|
|
307
|
+
/** Hash a directory tree's content; used by `build:source` for the .sha file. */
|
|
308
|
+
async function hashDir(dir) {
|
|
309
|
+
const hash = crypto.createHash("sha256");
|
|
310
|
+
const entries = [];
|
|
311
|
+
await collectFiles(dir, dir, entries);
|
|
312
|
+
entries.sort();
|
|
313
|
+
for (const rel of entries) {
|
|
314
|
+
const buf = await fsp.readFile(path.join(dir, rel));
|
|
315
|
+
hash.update(rel);
|
|
316
|
+
hash.update("\0");
|
|
317
|
+
hash.update(buf);
|
|
318
|
+
hash.update("\0");
|
|
319
|
+
}
|
|
320
|
+
return hash.digest("hex");
|
|
321
|
+
}
|
|
322
|
+
async function collectFiles(root, dir, out) {
|
|
323
|
+
const entries = await fsp.readdir(dir, { withFileTypes: true });
|
|
324
|
+
for (const entry of entries) {
|
|
325
|
+
if (entry.name === ".git") continue;
|
|
326
|
+
const full = path.join(dir, entry.name);
|
|
327
|
+
if (entry.isDirectory()) await collectFiles(root, full, out);
|
|
328
|
+
else if (entry.isFile()) out.push(path.relative(root, full).split(path.sep).join("/"));
|
|
329
|
+
}
|
|
330
|
+
}
|
|
331
|
+
//#endregion
|
|
332
|
+
export { findBuildConfig as a, push as i, init as n, loadBuildConfig as o, initSeedRepo as r, hashDir as t };
|