@interfere/next 9.0.1 → 10.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +33 -5
- package/dist/config.d.mts +24 -5
- package/dist/config.d.mts.map +1 -1
- package/dist/config.mjs +38 -28
- package/dist/config.mjs.map +1 -1
- package/dist/instrument-client.d.mts +14 -3
- package/dist/instrument-client.d.mts.map +1 -1
- package/dist/instrument-client.mjs +7 -9
- package/dist/instrument-client.mjs.map +1 -1
- package/dist/instrumentation-client.d.mts +1 -0
- package/dist/instrumentation-client.mjs +22 -0
- package/dist/instrumentation-client.mjs.map +1 -0
- package/dist/instrumentation.d.mts +134 -0
- package/dist/instrumentation.d.mts.map +1 -0
- package/dist/instrumentation.edge.d.mts +35 -0
- package/dist/instrumentation.edge.d.mts.map +1 -0
- package/dist/instrumentation.edge.mjs +34 -0
- package/dist/instrumentation.edge.mjs.map +1 -0
- package/dist/instrumentation.mjs +165 -0
- package/dist/instrumentation.mjs.map +1 -0
- package/dist/internal/build/configure-build.d.mts +1 -2
- package/dist/internal/build/configure-build.d.mts.map +1 -1
- package/dist/internal/build/configure-build.mjs +10 -2
- package/dist/internal/build/configure-build.mjs.map +1 -1
- package/dist/internal/build/detect-bundler.d.mts +6 -0
- package/dist/internal/build/detect-bundler.d.mts.map +1 -0
- package/dist/internal/build/detect-bundler.mjs +9 -0
- package/dist/internal/build/detect-bundler.mjs.map +1 -0
- package/dist/internal/build/pipeline.d.mts +15 -1
- package/dist/internal/build/pipeline.d.mts.map +1 -1
- package/dist/internal/build/pipeline.mjs +28 -13
- package/dist/internal/build/pipeline.mjs.map +1 -1
- package/dist/internal/build/release/destinations/index.d.mts +14 -0
- package/dist/internal/build/release/destinations/index.d.mts.map +1 -0
- package/dist/internal/build/release/destinations/index.mjs +13 -0
- package/dist/internal/build/release/destinations/index.mjs.map +1 -0
- package/dist/internal/build/release/destinations/vercel.mjs.map +1 -1
- package/dist/internal/build/release/git.d.mts +13 -0
- package/dist/internal/build/release/git.d.mts.map +1 -1
- package/dist/internal/build/release/git.mjs +13 -2
- package/dist/internal/build/release/git.mjs.map +1 -1
- package/dist/internal/build/release/index.d.mts +2 -1
- package/dist/internal/build/release/index.d.mts.map +1 -1
- package/dist/internal/build/release/index.mjs +4 -5
- package/dist/internal/build/release/index.mjs.map +1 -1
- package/dist/internal/build/release/sources/github.mjs.map +1 -1
- package/dist/internal/build/release/sources/index.d.mts +21 -0
- package/dist/internal/build/release/sources/index.d.mts.map +1 -0
- package/dist/internal/build/release/sources/index.mjs +20 -0
- package/dist/internal/build/release/sources/index.mjs.map +1 -0
- package/dist/internal/build/source-maps/discover-turbopack.d.mts +32 -0
- package/dist/internal/build/source-maps/discover-turbopack.d.mts.map +1 -0
- package/dist/internal/build/source-maps/discover-turbopack.mjs +68 -0
- package/dist/internal/build/source-maps/discover-turbopack.mjs.map +1 -0
- package/dist/internal/build/source-maps/discover-webpack.d.mts +53 -0
- package/dist/internal/build/source-maps/discover-webpack.d.mts.map +1 -0
- package/dist/internal/build/source-maps/discover-webpack.mjs +112 -0
- package/dist/internal/build/source-maps/discover-webpack.mjs.map +1 -0
- package/dist/internal/build/source-maps/discover.d.mts +28 -10
- package/dist/internal/build/source-maps/discover.d.mts.map +1 -1
- package/dist/internal/build/source-maps/discover.mjs +22 -83
- package/dist/internal/build/source-maps/discover.mjs.map +1 -1
- package/dist/internal/build/source-maps/index.d.mts +2 -24
- package/dist/internal/build/source-maps/index.d.mts.map +1 -1
- package/dist/internal/build/source-maps/index.mjs +13 -23
- package/dist/internal/build/source-maps/index.mjs.map +1 -1
- package/dist/internal/build/source-maps/paths.d.mts +28 -0
- package/dist/internal/build/source-maps/paths.d.mts.map +1 -0
- package/dist/internal/build/source-maps/paths.mjs +49 -0
- package/dist/internal/build/source-maps/paths.mjs.map +1 -0
- package/dist/internal/build/source-maps/upload.d.mts +46 -0
- package/dist/internal/build/source-maps/upload.d.mts.map +1 -0
- package/dist/internal/build/source-maps/upload.mjs +134 -0
- package/dist/internal/build/source-maps/upload.mjs.map +1 -0
- package/dist/internal/build/value-injection-loader.mjs.map +1 -1
- package/dist/internal/env.d.mts +11 -2
- package/dist/internal/env.d.mts.map +1 -1
- package/dist/internal/env.mjs +12 -3
- package/dist/internal/env.mjs.map +1 -1
- package/dist/internal/logger.d.mts +9 -1
- package/dist/internal/logger.d.mts.map +1 -1
- package/dist/internal/logger.mjs +10 -2
- package/dist/internal/logger.mjs.map +1 -1
- package/dist/internal/release-slug.d.mts +25 -0
- package/dist/internal/release-slug.d.mts.map +1 -0
- package/dist/internal/release-slug.mjs +32 -0
- package/dist/internal/release-slug.mjs.map +1 -0
- package/dist/internal/route/handle-get.d.mts +14 -1
- package/dist/internal/route/handle-get.d.mts.map +1 -1
- package/dist/internal/route/handle-get.mjs +35 -14
- package/dist/internal/route/handle-get.mjs.map +1 -1
- package/dist/internal/route/handle-post.d.mts +11 -0
- package/dist/internal/route/handle-post.d.mts.map +1 -1
- package/dist/internal/route/handle-post.mjs +11 -50
- package/dist/internal/route/handle-post.mjs.map +1 -1
- package/dist/internal/route/proxy.d.mts +21 -1
- package/dist/internal/route/proxy.d.mts.map +1 -1
- package/dist/internal/route/proxy.mjs +61 -16
- package/dist/internal/route/proxy.mjs.map +1 -1
- package/dist/internal/server/capture.d.mts +2 -2
- package/dist/internal/server/capture.d.mts.map +1 -1
- package/dist/internal/server/capture.mjs +71 -37
- package/dist/internal/server/capture.mjs.map +1 -1
- package/dist/internal/server/console-bridge.d.mts +19 -0
- package/dist/internal/server/console-bridge.d.mts.map +1 -0
- package/dist/internal/server/console-bridge.mjs +112 -0
- package/dist/internal/server/console-bridge.mjs.map +1 -0
- package/dist/internal/server/id-generator.d.mts +38 -0
- package/dist/internal/server/id-generator.d.mts.map +1 -0
- package/dist/internal/server/id-generator.mjs +68 -0
- package/dist/internal/server/id-generator.mjs.map +1 -0
- package/dist/internal/server/instrumentation-options.d.mts +86 -0
- package/dist/internal/server/instrumentation-options.d.mts.map +1 -0
- package/dist/internal/server/instrumentation-options.mjs +1 -0
- package/dist/internal/server/remote-config.mjs +2 -2
- package/dist/internal/server/remote-config.mjs.map +1 -1
- package/dist/internal/server/trace-meta.d.mts +34 -0
- package/dist/internal/server/trace-meta.d.mts.map +1 -0
- package/dist/internal/server/trace-meta.mjs +41 -0
- package/dist/internal/server/trace-meta.mjs.map +1 -0
- package/dist/internal/server/traceparent.d.mts +16 -0
- package/dist/internal/server/traceparent.d.mts.map +1 -0
- package/dist/internal/server/traceparent.mjs +26 -0
- package/dist/internal/server/traceparent.mjs.map +1 -0
- package/dist/internal/server/types.d.mts +1 -7
- package/dist/internal/server/types.d.mts.map +1 -1
- package/dist/internal/setup-warnings.d.mts +17 -0
- package/dist/internal/setup-warnings.d.mts.map +1 -0
- package/dist/internal/setup-warnings.mjs +45 -0
- package/dist/internal/setup-warnings.mjs.map +1 -0
- package/dist/package.mjs +1 -1
- package/dist/provider.d.mts +23 -2
- package/dist/provider.d.mts.map +1 -0
- package/dist/provider.mjs +23 -1
- package/dist/provider.mjs.map +1 -0
- package/dist/route-handler.d.mts +7 -2
- package/dist/route-handler.d.mts.map +1 -1
- package/dist/route-handler.mjs +11 -9
- package/dist/route-handler.mjs.map +1 -1
- package/dist/server.d.mts +2 -2
- package/dist/server.mjs +2 -2
- package/package.json +73 -20
- package/dist/internal/route/sw-script.d.mts +0 -4
- package/dist/internal/route/sw-script.d.mts.map +0 -1
- package/dist/internal/route/sw-script.mjs +0 -38
- package/dist/internal/route/sw-script.mjs.map +0 -1
- package/dist/internal/server/dedupe.d.mts +0 -5
- package/dist/internal/server/dedupe.d.mts.map +0 -1
- package/dist/internal/server/dedupe.mjs +0 -11
- package/dist/internal/server/dedupe.mjs.map +0 -1
- package/dist/internal/server/envelope.d.mts +0 -14
- package/dist/internal/server/envelope.d.mts.map +0 -1
- package/dist/internal/server/envelope.mjs +0 -59
- package/dist/internal/server/envelope.mjs.map +0 -1
- package/dist/internal/server/normalize-request.d.mts +0 -7
- package/dist/internal/server/normalize-request.d.mts.map +0 -1
- package/dist/internal/server/normalize-request.mjs +0 -50
- package/dist/internal/server/normalize-request.mjs.map +0 -1
- package/dist/internal/server/runtime.d.mts +0 -14
- package/dist/internal/server/runtime.d.mts.map +0 -1
- package/dist/internal/server/runtime.mjs +0 -18
- package/dist/internal/server/runtime.mjs.map +0 -1
- package/dist/internal/server/transport.d.mts +0 -12
- package/dist/internal/server/transport.d.mts.map +0 -1
- package/dist/internal/server/transport.mjs +0 -17
- package/dist/internal/server/transport.mjs.map +0 -1
|
@@ -1,29 +1,7 @@
|
|
|
1
1
|
import { SourceMapFile } from "./discover.mjs";
|
|
2
|
+
import { uploadSourceMaps } from "./upload.mjs";
|
|
2
3
|
|
|
3
4
|
//#region src/internal/build/source-maps/index.d.ts
|
|
4
|
-
declare function buildUploadBody(discovered: {
|
|
5
|
-
files: SourceMapFile[];
|
|
6
|
-
mapping: Record<string, string>;
|
|
7
|
-
sourceFileCount: number;
|
|
8
|
-
}): {
|
|
9
|
-
body: {
|
|
10
|
-
files: {
|
|
11
|
-
fileName: string;
|
|
12
|
-
content: Blob;
|
|
13
|
-
}[];
|
|
14
|
-
metadata: {
|
|
15
|
-
sourceMapToGenerated: Record<string, string>;
|
|
16
|
-
hashes: {
|
|
17
|
-
[k: string]: string;
|
|
18
|
-
};
|
|
19
|
-
debugIds: {
|
|
20
|
-
[k: string]: string;
|
|
21
|
-
};
|
|
22
|
-
sourceFileCount: number;
|
|
23
|
-
};
|
|
24
|
-
};
|
|
25
|
-
totalBytes: number;
|
|
26
|
-
};
|
|
27
5
|
declare function cleanupSourceMaps(files: SourceMapFile[]): Promise<void>;
|
|
28
6
|
//#endregion
|
|
29
|
-
export {
|
|
7
|
+
export { cleanupSourceMaps, uploadSourceMaps };
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"index.d.mts","names":[],"sources":["../../../../src/internal/build/source-maps/index.ts"],"mappings":"
|
|
1
|
+
{"version":3,"file":"index.d.mts","names":[],"sources":["../../../../src/internal/build/source-maps/index.ts"],"mappings":";;;;iBAKsB,iBAAA,CAAkB,KAAA,EAAO,aAAA,KAAkB,OAAA"}
|
|
@@ -1,28 +1,18 @@
|
|
|
1
|
+
import { log } from "../../logger.mjs";
|
|
2
|
+
import { uploadSourceMaps } from "./upload.mjs";
|
|
1
3
|
import { unlink } from "node:fs/promises";
|
|
2
4
|
//#region src/internal/build/source-maps/index.ts
|
|
3
|
-
function buildUploadBody(discovered) {
|
|
4
|
-
let totalBytes = 0;
|
|
5
|
-
return {
|
|
6
|
-
body: {
|
|
7
|
-
files: discovered.files.map((file) => {
|
|
8
|
-
totalBytes += file.content.length;
|
|
9
|
-
return {
|
|
10
|
-
fileName: file.path,
|
|
11
|
-
content: new Blob([file.content], { type: "application/json" })
|
|
12
|
-
};
|
|
13
|
-
}),
|
|
14
|
-
metadata: {
|
|
15
|
-
sourceMapToGenerated: discovered.mapping,
|
|
16
|
-
hashes: Object.fromEntries(discovered.files.map((f) => [f.path, f.hash])),
|
|
17
|
-
debugIds: Object.fromEntries(discovered.files.map((f) => [f.path, f.debugId])),
|
|
18
|
-
sourceFileCount: discovered.sourceFileCount
|
|
19
|
-
}
|
|
20
|
-
},
|
|
21
|
-
totalBytes
|
|
22
|
-
};
|
|
23
|
-
}
|
|
24
5
|
async function cleanupSourceMaps(files) {
|
|
25
|
-
await Promise.
|
|
6
|
+
const results = await Promise.allSettled(files.map((f) => unlink(f.absolute)));
|
|
7
|
+
const failures = [];
|
|
8
|
+
for (const [i, r] of results.entries()) {
|
|
9
|
+
if (r.status !== "rejected") continue;
|
|
10
|
+
const err = r.reason;
|
|
11
|
+
if (err.code === "ENOENT") continue;
|
|
12
|
+
const file = files[i];
|
|
13
|
+
failures.push(`${file?.absolute ?? "<unknown>"}: ${err.code ?? err.message ?? String(err)}`);
|
|
14
|
+
}
|
|
15
|
+
if (failures.length > 0) log.warn("Source-map cleanup left files on disk", failures);
|
|
26
16
|
}
|
|
27
17
|
//#endregion
|
|
28
|
-
export {
|
|
18
|
+
export { cleanupSourceMaps, uploadSourceMaps };
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"index.mjs","names":[],"sources":["../../../../src/internal/build/source-maps/index.ts"],"sourcesContent":["import { unlink } from \"node:fs/promises\";\n\nimport type { SourceMapFile } from \"./discover.js\";\n\nexport function
|
|
1
|
+
{"version":3,"file":"index.mjs","names":[],"sources":["../../../../src/internal/build/source-maps/index.ts"],"sourcesContent":["import { unlink } from \"node:fs/promises\";\n\nimport { log } from \"../../logger.js\";\nimport type { SourceMapFile } from \"./discover.js\";\n\nexport async function cleanupSourceMaps(files: SourceMapFile[]): Promise<void> {\n // ENOENT is the only failure we silently absorb — a missing file just\n // means the upload-then-cleanup race lost. Anything else (perm, IO,\n // EBUSY) is a real signal: the source map is now stranded on the\n // customer's CDN and we surface it via a single aggregated warning so\n // an operator can investigate without the build failing outright.\n const results = await Promise.allSettled(files.map((f) => unlink(f.absolute)));\n const failures: string[] = [];\n for (const [i, r] of results.entries()) {\n if (r.status !== \"rejected\") {\n continue;\n }\n const err = r.reason as NodeJS.ErrnoException;\n if (err.code === \"ENOENT\") {\n continue;\n }\n const file = files[i];\n failures.push(\n `${file?.absolute ?? \"<unknown>\"}: ${err.code ?? err.message ?? String(err)}`\n );\n }\n if (failures.length > 0) {\n log.warn(\"Source-map cleanup left files on disk\", failures);\n }\n}\n\nexport { uploadSourceMaps } from \"./upload.js\";\n"],"mappings":";;;;AAKA,eAAsB,kBAAkB,OAAuC;CAM7E,MAAM,UAAU,MAAM,QAAQ,WAAW,MAAM,KAAK,MAAM,OAAO,EAAE,SAAS,CAAC,CAAC;CAC9E,MAAM,WAAqB,EAAE;CAC7B,KAAK,MAAM,CAAC,GAAG,MAAM,QAAQ,SAAS,EAAE;EACtC,IAAI,EAAE,WAAW,YACf;EAEF,MAAM,MAAM,EAAE;EACd,IAAI,IAAI,SAAS,UACf;EAEF,MAAM,OAAO,MAAM;EACnB,SAAS,KACP,GAAG,MAAM,YAAY,YAAY,IAAI,IAAI,QAAQ,IAAI,WAAW,OAAO,IAAI,GAC5E;;CAEH,IAAI,SAAS,SAAS,GACpB,IAAI,KAAK,yCAAyC,SAAS"}
|
|
@@ -0,0 +1,28 @@
|
|
|
1
|
+
//#region src/internal/build/source-maps/paths.d.ts
|
|
2
|
+
/**
|
|
3
|
+
* Strips Windows path separators and leading `./` segments so the rest of
|
|
4
|
+
* the discovery code can assume POSIX-style paths relative to `projectDir`.
|
|
5
|
+
*/
|
|
6
|
+
declare function normalizeDistDir(distDir?: string): string;
|
|
7
|
+
/**
|
|
8
|
+
* Resolves an absolute filesystem path for the build's `distDir`. Accepts
|
|
9
|
+
* either an absolute path (passed through) or a path relative to
|
|
10
|
+
* `projectDir`.
|
|
11
|
+
*/
|
|
12
|
+
declare function resolveDistDir(projectDir: string, distDir: string): string;
|
|
13
|
+
/**
|
|
14
|
+
* Maps an on-disk path (relative to `projectDir`) to the URL path Next.js
|
|
15
|
+
* will serve it at. Both `_next/` and `.next/` shapes resolve to the public
|
|
16
|
+
* `_next/` prefix the browser sees in `//# sourceMappingURL=...` comments.
|
|
17
|
+
*/
|
|
18
|
+
declare function toPublicPath(rel: string, distDir: string): string;
|
|
19
|
+
/**
|
|
20
|
+
* Recursively collects files under `<absDistDir>/static` and
|
|
21
|
+
* `<absDistDir>/server` matching the given suffix. Both subtrees are
|
|
22
|
+
* optional — partial builds (e.g. server-only chunks during ISR) commonly
|
|
23
|
+
* leave one missing, so a missing directory is treated as empty rather
|
|
24
|
+
* than fatal.
|
|
25
|
+
*/
|
|
26
|
+
declare function walkDistTrees(absDistDir: string, suffix: ".js" | ".js.map"): Promise<string[]>;
|
|
27
|
+
//#endregion
|
|
28
|
+
export { normalizeDistDir, resolveDistDir, toPublicPath, walkDistTrees };
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"paths.d.mts","names":[],"sources":["../../../../src/internal/build/source-maps/paths.ts"],"mappings":";;AAUA;;;iBAAgB,gBAAA,CAAiB,OAAA;;AASjC;;;;iBAAgB,cAAA,CAAe,UAAA,UAAoB,OAAA;AASnD;;;;;AAAA,iBAAgB,YAAA,CAAa,GAAA,UAAa,OAAA;;;;;;;;iBAkBpB,aAAA,CACpB,UAAA,UACA,MAAA,sBACC,OAAA"}
|
|
@@ -0,0 +1,49 @@
|
|
|
1
|
+
import { isAbsolute, join, resolve } from "node:path";
|
|
2
|
+
import { readdir } from "node:fs/promises";
|
|
3
|
+
//#region src/internal/build/source-maps/paths.ts
|
|
4
|
+
const NEXT_PUBLIC_PREFIX = "_next/";
|
|
5
|
+
const NEXT_DIST_PREFIX = ".next/";
|
|
6
|
+
/**
|
|
7
|
+
* Strips Windows path separators and leading `./` segments so the rest of
|
|
8
|
+
* the discovery code can assume POSIX-style paths relative to `projectDir`.
|
|
9
|
+
*/
|
|
10
|
+
function normalizeDistDir(distDir = ".next") {
|
|
11
|
+
return distDir.replaceAll("\\", "/").replace(/^(\.\/)+/, "");
|
|
12
|
+
}
|
|
13
|
+
/**
|
|
14
|
+
* Resolves an absolute filesystem path for the build's `distDir`. Accepts
|
|
15
|
+
* either an absolute path (passed through) or a path relative to
|
|
16
|
+
* `projectDir`.
|
|
17
|
+
*/
|
|
18
|
+
function resolveDistDir(projectDir, distDir) {
|
|
19
|
+
return isAbsolute(distDir) ? distDir : resolve(projectDir, distDir);
|
|
20
|
+
}
|
|
21
|
+
/**
|
|
22
|
+
* Maps an on-disk path (relative to `projectDir`) to the URL path Next.js
|
|
23
|
+
* will serve it at. Both `_next/` and `.next/` shapes resolve to the public
|
|
24
|
+
* `_next/` prefix the browser sees in `//# sourceMappingURL=...` comments.
|
|
25
|
+
*/
|
|
26
|
+
function toPublicPath(rel, distDir) {
|
|
27
|
+
const p = rel.replaceAll("\\", "/");
|
|
28
|
+
if (p.startsWith(`${distDir}/`)) return `${NEXT_PUBLIC_PREFIX}${p.slice(distDir.length + 1)}`;
|
|
29
|
+
if (p.startsWith(NEXT_DIST_PREFIX)) return `${NEXT_PUBLIC_PREFIX}${p.slice(6)}`;
|
|
30
|
+
return p;
|
|
31
|
+
}
|
|
32
|
+
/**
|
|
33
|
+
* Recursively collects files under `<absDistDir>/static` and
|
|
34
|
+
* `<absDistDir>/server` matching the given suffix. Both subtrees are
|
|
35
|
+
* optional — partial builds (e.g. server-only chunks during ISR) commonly
|
|
36
|
+
* leave one missing, so a missing directory is treated as empty rather
|
|
37
|
+
* than fatal.
|
|
38
|
+
*/
|
|
39
|
+
async function walkDistTrees(absDistDir, suffix) {
|
|
40
|
+
return (await Promise.all(["static", "server"].map(async (subtree) => {
|
|
41
|
+
const root = join(absDistDir, subtree);
|
|
42
|
+
const entries = await readdir(root, { recursive: true }).catch(() => []);
|
|
43
|
+
const out = [];
|
|
44
|
+
for (const entry of entries) if (typeof entry === "string" && entry.endsWith(suffix)) out.push(join(root, entry));
|
|
45
|
+
return out;
|
|
46
|
+
}))).flat();
|
|
47
|
+
}
|
|
48
|
+
//#endregion
|
|
49
|
+
export { normalizeDistDir, resolveDistDir, toPublicPath, walkDistTrees };
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"paths.mjs","names":[],"sources":["../../../../src/internal/build/source-maps/paths.ts"],"sourcesContent":["import { readdir } from \"node:fs/promises\";\nimport { isAbsolute, join, resolve } from \"node:path\";\n\nconst NEXT_PUBLIC_PREFIX = \"_next/\";\nconst NEXT_DIST_PREFIX = \".next/\";\n\n/**\n * Strips Windows path separators and leading `./` segments so the rest of\n * the discovery code can assume POSIX-style paths relative to `projectDir`.\n */\nexport function normalizeDistDir(distDir = \".next\"): string {\n return distDir.replaceAll(\"\\\\\", \"/\").replace(/^(\\.\\/)+/, \"\");\n}\n\n/**\n * Resolves an absolute filesystem path for the build's `distDir`. Accepts\n * either an absolute path (passed through) or a path relative to\n * `projectDir`.\n */\nexport function resolveDistDir(projectDir: string, distDir: string): string {\n return isAbsolute(distDir) ? distDir : resolve(projectDir, distDir);\n}\n\n/**\n * Maps an on-disk path (relative to `projectDir`) to the URL path Next.js\n * will serve it at. Both `_next/` and `.next/` shapes resolve to the public\n * `_next/` prefix the browser sees in `//# sourceMappingURL=...` comments.\n */\nexport function toPublicPath(rel: string, distDir: string): string {\n const p = rel.replaceAll(\"\\\\\", \"/\");\n if (p.startsWith(`${distDir}/`)) {\n return `${NEXT_PUBLIC_PREFIX}${p.slice(distDir.length + 1)}`;\n }\n if (p.startsWith(NEXT_DIST_PREFIX)) {\n return `${NEXT_PUBLIC_PREFIX}${p.slice(NEXT_DIST_PREFIX.length)}`;\n }\n return p;\n}\n\n/**\n * Recursively collects files under `<absDistDir>/static` and\n * `<absDistDir>/server` matching the given suffix. Both subtrees are\n * optional — partial builds (e.g. server-only chunks during ISR) commonly\n * leave one missing, so a missing directory is treated as empty rather\n * than fatal.\n */\nexport async function walkDistTrees(\n absDistDir: string,\n suffix: \".js\" | \".js.map\"\n): Promise<string[]> {\n const subtrees = [\"static\", \"server\"] as const;\n\n const results = await Promise.all(\n subtrees.map(async (subtree) => {\n const root = join(absDistDir, subtree);\n const entries = await readdir(root, { recursive: true }).catch(\n () => [] as string[]\n );\n const out: string[] = [];\n for (const entry of entries) {\n if (typeof entry === \"string\" && entry.endsWith(suffix)) {\n out.push(join(root, entry));\n }\n }\n return out;\n })\n );\n\n return results.flat();\n}\n"],"mappings":";;;AAGA,MAAM,qBAAqB;AAC3B,MAAM,mBAAmB;;;;;AAMzB,SAAgB,iBAAiB,UAAU,SAAiB;CAC1D,OAAO,QAAQ,WAAW,MAAM,IAAI,CAAC,QAAQ,YAAY,GAAG;;;;;;;AAQ9D,SAAgB,eAAe,YAAoB,SAAyB;CAC1E,OAAO,WAAW,QAAQ,GAAG,UAAU,QAAQ,YAAY,QAAQ;;;;;;;AAQrE,SAAgB,aAAa,KAAa,SAAyB;CACjE,MAAM,IAAI,IAAI,WAAW,MAAM,IAAI;CACnC,IAAI,EAAE,WAAW,GAAG,QAAQ,GAAG,EAC7B,OAAO,GAAG,qBAAqB,EAAE,MAAM,QAAQ,SAAS,EAAE;CAE5D,IAAI,EAAE,WAAW,iBAAiB,EAChC,OAAO,GAAG,qBAAqB,EAAE,MAAM,EAAwB;CAEjE,OAAO;;;;;;;;;AAUT,eAAsB,cACpB,YACA,QACmB;CAmBnB,QAAO,MAhBe,QAAQ,IAC5B,CAHgB,UAAU,SAGlB,CAAC,IAAI,OAAO,YAAY;EAC9B,MAAM,OAAO,KAAK,YAAY,QAAQ;EACtC,MAAM,UAAU,MAAM,QAAQ,MAAM,EAAE,WAAW,MAAM,CAAC,CAAC,YACjD,EAAE,CACT;EACD,MAAM,MAAgB,EAAE;EACxB,KAAK,MAAM,SAAS,SAClB,IAAI,OAAO,UAAU,YAAY,MAAM,SAAS,OAAO,EACrD,IAAI,KAAK,KAAK,MAAM,MAAM,CAAC;EAG/B,OAAO;GACP,CACH,EAEc,MAAM"}
|
|
@@ -0,0 +1,46 @@
|
|
|
1
|
+
import { DiscoveryResult } from "./discover.mjs";
|
|
2
|
+
import { ReleaseSlug } from "@interfere/types/releases/slug";
|
|
3
|
+
import { HTTPClient } from "@interfere/sdk";
|
|
4
|
+
import { ManifestBundler } from "@interfere/types/data/source-maps";
|
|
5
|
+
|
|
6
|
+
//#region src/internal/build/source-maps/upload.d.ts
|
|
7
|
+
interface UploadResult {
|
|
8
|
+
fileCount: number;
|
|
9
|
+
totalBytes: number;
|
|
10
|
+
}
|
|
11
|
+
interface UploadParams {
|
|
12
|
+
apiUrl: string;
|
|
13
|
+
bundler: ManifestBundler;
|
|
14
|
+
discovered: DiscoveryResult;
|
|
15
|
+
httpClient: HTTPClient;
|
|
16
|
+
releaseSlug: ReleaseSlug;
|
|
17
|
+
}
|
|
18
|
+
/**
|
|
19
|
+
* Drives the three-step presigned-URL flow:
|
|
20
|
+
*
|
|
21
|
+
* 1. POST `/v1/releases/:slug/source-maps/sign` with per-file metadata.
|
|
22
|
+
* 2. PUT each file's content directly to R2 in parallel (concurrency
|
|
23
|
+
* bounded so a build with 1000+ chunks doesn't open 1000 sockets).
|
|
24
|
+
* 3. POST `/v1/releases/:slug/source-maps/complete` to materialize the
|
|
25
|
+
* manifest and update the release row.
|
|
26
|
+
*
|
|
27
|
+
* The collector never sees source-map bytes — uploads scale with R2
|
|
28
|
+
* limits, not Cloudflare worker / load-balancer body-size caps. The
|
|
29
|
+
* legacy multipart endpoint is still wired for older SDKs.
|
|
30
|
+
*/
|
|
31
|
+
declare function uploadSourceMaps({
|
|
32
|
+
apiUrl,
|
|
33
|
+
bundler,
|
|
34
|
+
discovered,
|
|
35
|
+
httpClient,
|
|
36
|
+
releaseSlug
|
|
37
|
+
}: UploadParams): Promise<UploadResult>;
|
|
38
|
+
interface SourceMapRichness {
|
|
39
|
+
hasSourcesContent: boolean;
|
|
40
|
+
hasNames: boolean;
|
|
41
|
+
hasFile: boolean;
|
|
42
|
+
mappingsPresent: boolean;
|
|
43
|
+
}
|
|
44
|
+
declare function extractRichness(content: string): SourceMapRichness;
|
|
45
|
+
//#endregion
|
|
46
|
+
export { UploadResult, extractRichness, uploadSourceMaps };
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"upload.d.mts","names":[],"sources":["../../../../src/internal/build/source-maps/upload.ts"],"mappings":";;;;;;UAciB,YAAA;EACf,SAAA;EACA,UAAA;AAAA;AAAA,UAGQ,YAAA;EACR,MAAA;EACA,OAAA,EAAS,eAAA;EACT,UAAA,EAAY,eAAA;EACZ,UAAA,EAAY,UAAA;EACZ,WAAA,EAAa,WAAA;AAAA;;;;;;;;;;;;;;iBAgBO,gBAAA,CAAA;EACpB,MAAA;EACA,OAAA;EACA,UAAA;EACA,UAAA;EACA;AAAA,GACC,YAAA,GAAe,OAAA,CAAQ,YAAA;AAAA,UAoEhB,iBAAA;EACR,iBAAA;EACA,QAAA;EACA,OAAA;EACA,eAAA;AAAA;AAAA,iBAyBc,eAAA,CAAgB,OAAA,WAAkB,iBAAA"}
|
|
@@ -0,0 +1,134 @@
|
|
|
1
|
+
//#region src/internal/build/source-maps/upload.ts
|
|
2
|
+
const PUT_CONCURRENCY = 12;
|
|
3
|
+
/**
|
|
4
|
+
* Drives the three-step presigned-URL flow:
|
|
5
|
+
*
|
|
6
|
+
* 1. POST `/v1/releases/:slug/source-maps/sign` with per-file metadata.
|
|
7
|
+
* 2. PUT each file's content directly to R2 in parallel (concurrency
|
|
8
|
+
* bounded so a build with 1000+ chunks doesn't open 1000 sockets).
|
|
9
|
+
* 3. POST `/v1/releases/:slug/source-maps/complete` to materialize the
|
|
10
|
+
* manifest and update the release row.
|
|
11
|
+
*
|
|
12
|
+
* The collector never sees source-map bytes — uploads scale with R2
|
|
13
|
+
* limits, not Cloudflare worker / load-balancer body-size caps. The
|
|
14
|
+
* legacy multipart endpoint is still wired for older SDKs.
|
|
15
|
+
*/
|
|
16
|
+
async function uploadSourceMaps({ apiUrl, bundler, discovered, httpClient, releaseSlug }) {
|
|
17
|
+
const filesByPath = new Map(discovered.files.map((f) => [f.path, f]));
|
|
18
|
+
const signRequest = { files: discovered.files.map((file) => ({
|
|
19
|
+
path: file.path,
|
|
20
|
+
sizeBytes: byteLengthOf(file.content),
|
|
21
|
+
...extractRichness(file.content)
|
|
22
|
+
})) };
|
|
23
|
+
const signResponse = await postJson(httpClient, new URL(`/v1/releases/${encodeURIComponent(releaseSlug)}/source-maps/sign`, apiUrl), signRequest);
|
|
24
|
+
let totalBytes = 0;
|
|
25
|
+
await mapWithConcurrency(signResponse.uploads, PUT_CONCURRENCY, async (upload) => {
|
|
26
|
+
const file = filesByPath.get(upload.path);
|
|
27
|
+
if (!file) throw new Error(`Sign response referenced unknown path "${upload.path}"`);
|
|
28
|
+
totalBytes += byteLengthOf(file.content);
|
|
29
|
+
await putToR2(upload.presignedUrl, file.content);
|
|
30
|
+
});
|
|
31
|
+
const completeRequest = {
|
|
32
|
+
files: discovered.files.map((file) => ({
|
|
33
|
+
path: file.path,
|
|
34
|
+
hash: file.hash,
|
|
35
|
+
debugId: file.debugId,
|
|
36
|
+
chunkUrl: file.chunkUrl
|
|
37
|
+
})),
|
|
38
|
+
sourceFileCount: discovered.sourceFileCount,
|
|
39
|
+
bundler
|
|
40
|
+
};
|
|
41
|
+
return {
|
|
42
|
+
fileCount: (await postJson(httpClient, new URL(`/v1/releases/${encodeURIComponent(releaseSlug)}/source-maps/complete`, apiUrl), completeRequest)).fileCount,
|
|
43
|
+
totalBytes
|
|
44
|
+
};
|
|
45
|
+
}
|
|
46
|
+
function byteLengthOf(content) {
|
|
47
|
+
return new TextEncoder().encode(content).byteLength;
|
|
48
|
+
}
|
|
49
|
+
const EMPTY_RICHNESS = {
|
|
50
|
+
hasSourcesContent: false,
|
|
51
|
+
hasNames: false,
|
|
52
|
+
hasFile: false,
|
|
53
|
+
mappingsPresent: false
|
|
54
|
+
};
|
|
55
|
+
function leafRichness(map) {
|
|
56
|
+
const sourcesContent = map["sourcesContent"];
|
|
57
|
+
const names = map["names"];
|
|
58
|
+
const file = map["file"];
|
|
59
|
+
const mappings = map["mappings"];
|
|
60
|
+
return {
|
|
61
|
+
hasSourcesContent: Array.isArray(sourcesContent) && sourcesContent.length > 0,
|
|
62
|
+
hasNames: Array.isArray(names) && names.length > 0,
|
|
63
|
+
hasFile: typeof file === "string" && file.length > 0,
|
|
64
|
+
mappingsPresent: typeof mappings === "string" && mappings.length > 0
|
|
65
|
+
};
|
|
66
|
+
}
|
|
67
|
+
function extractRichness(content) {
|
|
68
|
+
let parsed;
|
|
69
|
+
try {
|
|
70
|
+
parsed = JSON.parse(content);
|
|
71
|
+
} catch {
|
|
72
|
+
return EMPTY_RICHNESS;
|
|
73
|
+
}
|
|
74
|
+
if (!(parsed && typeof parsed === "object")) return EMPTY_RICHNESS;
|
|
75
|
+
const top = parsed;
|
|
76
|
+
const sections = top["sections"];
|
|
77
|
+
if (!Array.isArray(sections)) return leafRichness(top);
|
|
78
|
+
const merged = {
|
|
79
|
+
hasSourcesContent: false,
|
|
80
|
+
hasNames: false,
|
|
81
|
+
hasFile: typeof top["file"] === "string" && top["file"].length > 0,
|
|
82
|
+
mappingsPresent: false
|
|
83
|
+
};
|
|
84
|
+
for (const section of sections) {
|
|
85
|
+
const inner = section?.map;
|
|
86
|
+
if (!(inner && typeof inner === "object")) continue;
|
|
87
|
+
const r = leafRichness(inner);
|
|
88
|
+
merged.hasSourcesContent ||= r.hasSourcesContent;
|
|
89
|
+
merged.hasNames ||= r.hasNames;
|
|
90
|
+
merged.mappingsPresent ||= r.mappingsPresent;
|
|
91
|
+
}
|
|
92
|
+
return merged;
|
|
93
|
+
}
|
|
94
|
+
async function postJson(httpClient, url, body) {
|
|
95
|
+
const response = await httpClient.request(new Request(url, {
|
|
96
|
+
method: "POST",
|
|
97
|
+
headers: {
|
|
98
|
+
"content-type": "application/json",
|
|
99
|
+
accept: "application/json"
|
|
100
|
+
},
|
|
101
|
+
body: JSON.stringify(body)
|
|
102
|
+
}));
|
|
103
|
+
if (!response.ok) {
|
|
104
|
+
const detail = await response.text().catch(() => "");
|
|
105
|
+
throw new Error(`Source-map API request failed: POST ${url.pathname} -> ${response.status} ${detail}`);
|
|
106
|
+
}
|
|
107
|
+
return await response.json();
|
|
108
|
+
}
|
|
109
|
+
async function putToR2(presignedUrl, content) {
|
|
110
|
+
const response = await fetch(presignedUrl, {
|
|
111
|
+
method: "PUT",
|
|
112
|
+
headers: { "content-type": "application/json" },
|
|
113
|
+
body: content
|
|
114
|
+
});
|
|
115
|
+
if (!response.ok) {
|
|
116
|
+
const detail = await response.text().catch(() => "");
|
|
117
|
+
throw new Error(`Source-map upload to R2 failed: ${response.status} ${detail}`);
|
|
118
|
+
}
|
|
119
|
+
}
|
|
120
|
+
async function mapWithConcurrency(items, concurrency, fn) {
|
|
121
|
+
let cursor = 0;
|
|
122
|
+
async function worker() {
|
|
123
|
+
while (true) {
|
|
124
|
+
const i = cursor++;
|
|
125
|
+
if (i >= items.length) return;
|
|
126
|
+
const item = items[i];
|
|
127
|
+
if (item === void 0) return;
|
|
128
|
+
await fn(item, i);
|
|
129
|
+
}
|
|
130
|
+
}
|
|
131
|
+
await Promise.all(Array.from({ length: Math.min(concurrency, items.length) }, () => worker()));
|
|
132
|
+
}
|
|
133
|
+
//#endregion
|
|
134
|
+
export { extractRichness, uploadSourceMaps };
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"upload.mjs","names":[],"sources":["../../../../src/internal/build/source-maps/upload.ts"],"sourcesContent":["import type { HTTPClient } from \"@interfere/sdk\";\nimport type { ReleaseSlug } from \"@interfere/types/releases/slug\";\nimport type {\n CompleteSourceMapsRequest,\n CompleteSourceMapsResponse,\n ManifestBundler,\n SignSourceMapsRequest,\n SignSourceMapsResponse,\n} from \"@interfere/types/data/source-maps\";\n\nimport type { DiscoveryResult } from \"./discover.js\";\n\nconst PUT_CONCURRENCY = 12;\n\nexport interface UploadResult {\n fileCount: number;\n totalBytes: number;\n}\n\ninterface UploadParams {\n apiUrl: string;\n bundler: ManifestBundler;\n discovered: DiscoveryResult;\n httpClient: HTTPClient;\n releaseSlug: ReleaseSlug;\n}\n\n/**\n * Drives the three-step presigned-URL flow:\n *\n * 1. POST `/v1/releases/:slug/source-maps/sign` with per-file metadata.\n * 2. PUT each file's content directly to R2 in parallel (concurrency\n * bounded so a build with 1000+ chunks doesn't open 1000 sockets).\n * 3. POST `/v1/releases/:slug/source-maps/complete` to materialize the\n * manifest and update the release row.\n *\n * The collector never sees source-map bytes — uploads scale with R2\n * limits, not Cloudflare worker / load-balancer body-size caps. The\n * legacy multipart endpoint is still wired for older SDKs.\n */\nexport async function uploadSourceMaps({\n apiUrl,\n bundler,\n discovered,\n httpClient,\n releaseSlug,\n}: UploadParams): Promise<UploadResult> {\n const filesByPath = new Map(discovered.files.map((f) => [f.path, f]));\n\n const signRequest = {\n files: discovered.files.map((file) => ({\n path: file.path,\n sizeBytes: byteLengthOf(file.content),\n ...extractRichness(file.content),\n })),\n } satisfies SignSourceMapsRequest;\n\n const signResponse = await postJson<SignSourceMapsResponse>(\n httpClient,\n new URL(\n `/v1/releases/${encodeURIComponent(releaseSlug)}/source-maps/sign`,\n apiUrl\n ),\n signRequest\n );\n\n let totalBytes = 0;\n\n await mapWithConcurrency(\n signResponse.uploads,\n PUT_CONCURRENCY,\n async (upload) => {\n const file = filesByPath.get(upload.path);\n if (!file) {\n throw new Error(\n `Sign response referenced unknown path \"${upload.path}\"`\n );\n }\n\n totalBytes += byteLengthOf(file.content);\n await putToR2(upload.presignedUrl, file.content);\n }\n );\n\n const completeRequest = {\n files: discovered.files.map((file) => ({\n path: file.path,\n hash: file.hash,\n debugId: file.debugId,\n chunkUrl: file.chunkUrl,\n })),\n sourceFileCount: discovered.sourceFileCount,\n bundler,\n } satisfies CompleteSourceMapsRequest;\n\n const completeResponse = await postJson<CompleteSourceMapsResponse>(\n httpClient,\n new URL(\n `/v1/releases/${encodeURIComponent(releaseSlug)}/source-maps/complete`,\n apiUrl\n ),\n completeRequest\n );\n\n return {\n fileCount: completeResponse.fileCount,\n totalBytes,\n };\n}\n\nfunction byteLengthOf(content: string) {\n return new TextEncoder().encode(content).byteLength;\n}\n\ninterface SourceMapRichness {\n hasSourcesContent: boolean;\n hasNames: boolean;\n hasFile: boolean;\n mappingsPresent: boolean;\n}\n\nconst EMPTY_RICHNESS: SourceMapRichness = {\n hasSourcesContent: false,\n hasNames: false,\n hasFile: false,\n mappingsPresent: false,\n};\n\nfunction leafRichness(map: Record<string, unknown>): SourceMapRichness {\n const sourcesContent = map[\"sourcesContent\"];\n const names = map[\"names\"];\n const file = map[\"file\"];\n const mappings = map[\"mappings\"];\n\n return {\n hasSourcesContent:\n Array.isArray(sourcesContent) && sourcesContent.length > 0,\n hasNames: Array.isArray(names) && names.length > 0,\n hasFile: typeof file === \"string\" && file.length > 0,\n mappingsPresent: typeof mappings === \"string\" && mappings.length > 0,\n };\n}\n\nexport function extractRichness(content: string): SourceMapRichness {\n let parsed: unknown;\n try {\n parsed = JSON.parse(content);\n } catch {\n return EMPTY_RICHNESS;\n }\n\n if (!(parsed && typeof parsed === \"object\")) {\n return EMPTY_RICHNESS;\n }\n\n const top = parsed as Record<string, unknown>;\n const sections = top[\"sections\"];\n\n if (!Array.isArray(sections)) {\n return leafRichness(top);\n }\n\n const merged: SourceMapRichness = {\n hasSourcesContent: false,\n hasNames: false,\n hasFile: typeof top[\"file\"] === \"string\" && (top[\"file\"] as string).length > 0,\n mappingsPresent: false,\n };\n\n for (const section of sections) {\n const inner = (section as { map?: unknown } | null)?.map;\n if (!(inner && typeof inner === \"object\")) {\n continue;\n }\n const r = leafRichness(inner as Record<string, unknown>);\n merged.hasSourcesContent ||= r.hasSourcesContent;\n merged.hasNames ||= r.hasNames;\n merged.mappingsPresent ||= r.mappingsPresent;\n }\n\n return merged;\n}\n\nasync function postJson<T>(\n httpClient: HTTPClient,\n url: URL,\n body: unknown\n): Promise<T> {\n const response = await httpClient.request(\n new Request(url, {\n method: \"POST\",\n headers: {\n \"content-type\": \"application/json\",\n accept: \"application/json\",\n },\n body: JSON.stringify(body),\n })\n );\n\n if (!response.ok) {\n const detail = await response.text().catch(() => \"\");\n throw new Error(\n `Source-map API request failed: POST ${url.pathname} -> ${response.status} ${detail}`\n );\n }\n\n return (await response.json()) as T;\n}\n\nasync function putToR2(presignedUrl: string, content: string) {\n const response = await fetch(presignedUrl, {\n method: \"PUT\",\n headers: { \"content-type\": \"application/json\" },\n body: content,\n });\n\n if (!response.ok) {\n const detail = await response.text().catch(() => \"\");\n throw new Error(\n `Source-map upload to R2 failed: ${response.status} ${detail}`\n );\n }\n}\n\nasync function mapWithConcurrency<T>(\n items: T[],\n concurrency: number,\n fn: (item: T, index: number) => Promise<void>\n): Promise<void> {\n let cursor = 0;\n\n async function worker() {\n while (true) {\n const i = cursor++;\n if (i >= items.length) {\n return;\n }\n const item = items[i];\n // Index is bounded by `items.length` above; the assertion is\n // narrowing for noUncheckedIndexedAccess, not a runtime check.\n if (item === undefined) {\n return;\n }\n await fn(item, i);\n }\n }\n\n await Promise.all(\n Array.from({ length: Math.min(concurrency, items.length) }, () => worker())\n );\n}\n"],"mappings":";AAYA,MAAM,kBAAkB;;;;;;;;;;;;;;AA4BxB,eAAsB,iBAAiB,EACrC,QACA,SACA,YACA,YACA,eACsC;CACtC,MAAM,cAAc,IAAI,IAAI,WAAW,MAAM,KAAK,MAAM,CAAC,EAAE,MAAM,EAAE,CAAC,CAAC;CAErE,MAAM,cAAc,EAClB,OAAO,WAAW,MAAM,KAAK,UAAU;EACrC,MAAM,KAAK;EACX,WAAW,aAAa,KAAK,QAAQ;EACrC,GAAG,gBAAgB,KAAK,QAAQ;EACjC,EAAE,EACJ;CAED,MAAM,eAAe,MAAM,SACzB,YACA,IAAI,IACF,gBAAgB,mBAAmB,YAAY,CAAC,oBAChD,OACD,EACD,YACD;CAED,IAAI,aAAa;CAEjB,MAAM,mBACJ,aAAa,SACb,iBACA,OAAO,WAAW;EAChB,MAAM,OAAO,YAAY,IAAI,OAAO,KAAK;EACzC,IAAI,CAAC,MACH,MAAM,IAAI,MACR,0CAA0C,OAAO,KAAK,GACvD;EAGH,cAAc,aAAa,KAAK,QAAQ;EACxC,MAAM,QAAQ,OAAO,cAAc,KAAK,QAAQ;GAEnD;CAED,MAAM,kBAAkB;EACtB,OAAO,WAAW,MAAM,KAAK,UAAU;GACrC,MAAM,KAAK;GACX,MAAM,KAAK;GACX,SAAS,KAAK;GACd,UAAU,KAAK;GAChB,EAAE;EACH,iBAAiB,WAAW;EAC5B;EACD;CAWD,OAAO;EACL,YAAW,MAVkB,SAC7B,YACA,IAAI,IACF,gBAAgB,mBAAmB,YAAY,CAAC,wBAChD,OACD,EACD,gBACD,EAG6B;EAC5B;EACD;;AAGH,SAAS,aAAa,SAAiB;CACrC,OAAO,IAAI,aAAa,CAAC,OAAO,QAAQ,CAAC;;AAU3C,MAAM,iBAAoC;CACxC,mBAAmB;CACnB,UAAU;CACV,SAAS;CACT,iBAAiB;CAClB;AAED,SAAS,aAAa,KAAiD;CACrE,MAAM,iBAAiB,IAAI;CAC3B,MAAM,QAAQ,IAAI;CAClB,MAAM,OAAO,IAAI;CACjB,MAAM,WAAW,IAAI;CAErB,OAAO;EACL,mBACE,MAAM,QAAQ,eAAe,IAAI,eAAe,SAAS;EAC3D,UAAU,MAAM,QAAQ,MAAM,IAAI,MAAM,SAAS;EACjD,SAAS,OAAO,SAAS,YAAY,KAAK,SAAS;EACnD,iBAAiB,OAAO,aAAa,YAAY,SAAS,SAAS;EACpE;;AAGH,SAAgB,gBAAgB,SAAoC;CAClE,IAAI;CACJ,IAAI;EACF,SAAS,KAAK,MAAM,QAAQ;SACtB;EACN,OAAO;;CAGT,IAAI,EAAE,UAAU,OAAO,WAAW,WAChC,OAAO;CAGT,MAAM,MAAM;CACZ,MAAM,WAAW,IAAI;CAErB,IAAI,CAAC,MAAM,QAAQ,SAAS,EAC1B,OAAO,aAAa,IAAI;CAG1B,MAAM,SAA4B;EAChC,mBAAmB;EACnB,UAAU;EACV,SAAS,OAAO,IAAI,YAAY,YAAa,IAAI,QAAmB,SAAS;EAC7E,iBAAiB;EAClB;CAED,KAAK,MAAM,WAAW,UAAU;EAC9B,MAAM,QAAS,SAAsC;EACrD,IAAI,EAAE,SAAS,OAAO,UAAU,WAC9B;EAEF,MAAM,IAAI,aAAa,MAAiC;EACxD,OAAO,sBAAsB,EAAE;EAC/B,OAAO,aAAa,EAAE;EACtB,OAAO,oBAAoB,EAAE;;CAG/B,OAAO;;AAGT,eAAe,SACb,YACA,KACA,MACY;CACZ,MAAM,WAAW,MAAM,WAAW,QAChC,IAAI,QAAQ,KAAK;EACf,QAAQ;EACR,SAAS;GACP,gBAAgB;GAChB,QAAQ;GACT;EACD,MAAM,KAAK,UAAU,KAAK;EAC3B,CAAC,CACH;CAED,IAAI,CAAC,SAAS,IAAI;EAChB,MAAM,SAAS,MAAM,SAAS,MAAM,CAAC,YAAY,GAAG;EACpD,MAAM,IAAI,MACR,uCAAuC,IAAI,SAAS,MAAM,SAAS,OAAO,GAAG,SAC9E;;CAGH,OAAQ,MAAM,SAAS,MAAM;;AAG/B,eAAe,QAAQ,cAAsB,SAAiB;CAC5D,MAAM,WAAW,MAAM,MAAM,cAAc;EACzC,QAAQ;EACR,SAAS,EAAE,gBAAgB,oBAAoB;EAC/C,MAAM;EACP,CAAC;CAEF,IAAI,CAAC,SAAS,IAAI;EAChB,MAAM,SAAS,MAAM,SAAS,MAAM,CAAC,YAAY,GAAG;EACpD,MAAM,IAAI,MACR,mCAAmC,SAAS,OAAO,GAAG,SACvD;;;AAIL,eAAe,mBACb,OACA,aACA,IACe;CACf,IAAI,SAAS;CAEb,eAAe,SAAS;EACtB,OAAO,MAAM;GACX,MAAM,IAAI;GACV,IAAI,KAAK,MAAM,QACb;GAEF,MAAM,OAAO,MAAM;GAGnB,IAAI,SAAS,KAAA,GACX;GAEF,MAAM,GAAG,MAAM,EAAE;;;CAIrB,MAAM,QAAQ,IACZ,MAAM,KAAK,EAAE,QAAQ,KAAK,IAAI,aAAa,MAAM,OAAO,EAAE,QAAQ,QAAQ,CAAC,CAC5E"}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"value-injection-loader.mjs","names":[],"sources":["../../../src/internal/build/value-injection-loader.ts"],"sourcesContent":["import type { InterfereInjectedValues } from \"./configure-build.js\";\n\nexport interface ValueInjectionLoaderOptions {\n readonly serializedValues?: string;\n readonly values?: Partial<InterfereInjectedValues>;\n}\n\ninterface LoaderThis<T> {\n getOptions(): T;\n}\n\nexport default function valueInjectionLoader(\n this: LoaderThis<ValueInjectionLoaderOptions>,\n userCode: string\n): string {\n const values = resolveValues(this.getOptions());\n const lines = Object.entries(values)\n .filter(([, value]) => value !== undefined)\n .map(([key, value]) => {\n const escapedKey = key.replace(/\\\\/g, \"\\\\\\\\\").replace(/\"/g, '\\\\\"');\n return `globalThis[\"${escapedKey}\"] = ${JSON.stringify(value)};`;\n });\n\n if (lines.length === 0) {\n return userCode;\n }\n\n return `${lines.join(\"\\n\")}\\n${userCode}`;\n}\n\nfunction resolveValues(\n options: ValueInjectionLoaderOptions\n): Record<string, unknown> {\n if (typeof options.serializedValues === \"string\") {\n try {\n const parsed = JSON.parse(options.serializedValues);\n if (isRecord(parsed)) {\n return parsed;\n }\n return {};\n } catch {\n return {};\n }\n }\n\n return options.values ?? {};\n}\n\nfunction isRecord(value: unknown): value is Record<string, unknown> {\n return typeof value === \"object\" && value !== null && !Array.isArray(value);\n}\n"],"mappings":";AAWA,SAAwB,qBAEtB,UACQ;CACR,MAAM,SAAS,cAAc,KAAK,YAAY,CAAC;CAC/C,MAAM,QAAQ,OAAO,QAAQ,OAAO,CACjC,QAAQ,GAAG,WAAW,UAAU,KAAA,EAAU,CAC1C,KAAK,CAAC,KAAK,WAAW;
|
|
1
|
+
{"version":3,"file":"value-injection-loader.mjs","names":[],"sources":["../../../src/internal/build/value-injection-loader.ts"],"sourcesContent":["import type { InterfereInjectedValues } from \"./configure-build.js\";\n\nexport interface ValueInjectionLoaderOptions {\n readonly serializedValues?: string;\n readonly values?: Partial<InterfereInjectedValues>;\n}\n\ninterface LoaderThis<T> {\n getOptions(): T;\n}\n\nexport default function valueInjectionLoader(\n this: LoaderThis<ValueInjectionLoaderOptions>,\n userCode: string\n): string {\n const values = resolveValues(this.getOptions());\n const lines = Object.entries(values)\n .filter(([, value]) => value !== undefined)\n .map(([key, value]) => {\n const escapedKey = key.replace(/\\\\/g, \"\\\\\\\\\").replace(/\"/g, '\\\\\"');\n return `globalThis[\"${escapedKey}\"] = ${JSON.stringify(value)};`;\n });\n\n if (lines.length === 0) {\n return userCode;\n }\n\n return `${lines.join(\"\\n\")}\\n${userCode}`;\n}\n\nfunction resolveValues(\n options: ValueInjectionLoaderOptions\n): Record<string, unknown> {\n if (typeof options.serializedValues === \"string\") {\n try {\n const parsed = JSON.parse(options.serializedValues);\n if (isRecord(parsed)) {\n return parsed;\n }\n return {};\n } catch {\n return {};\n }\n }\n\n return options.values ?? {};\n}\n\nfunction isRecord(value: unknown): value is Record<string, unknown> {\n return typeof value === \"object\" && value !== null && !Array.isArray(value);\n}\n"],"mappings":";AAWA,SAAwB,qBAEtB,UACQ;CACR,MAAM,SAAS,cAAc,KAAK,YAAY,CAAC;CAC/C,MAAM,QAAQ,OAAO,QAAQ,OAAO,CACjC,QAAQ,GAAG,WAAW,UAAU,KAAA,EAAU,CAC1C,KAAK,CAAC,KAAK,WAAW;EAErB,OAAO,eADY,IAAI,QAAQ,OAAO,OAAO,CAAC,QAAQ,MAAM,OAC5B,CAAC,OAAO,KAAK,UAAU,MAAM,CAAC;GAC9D;CAEJ,IAAI,MAAM,WAAW,GACnB,OAAO;CAGT,OAAO,GAAG,MAAM,KAAK,KAAK,CAAC,IAAI;;AAGjC,SAAS,cACP,SACyB;CACzB,IAAI,OAAO,QAAQ,qBAAqB,UACtC,IAAI;EACF,MAAM,SAAS,KAAK,MAAM,QAAQ,iBAAiB;EACnD,IAAI,SAAS,OAAO,EAClB,OAAO;EAET,OAAO,EAAE;SACH;EACN,OAAO,EAAE;;CAIb,OAAO,QAAQ,UAAU,EAAE;;AAG7B,SAAS,SAAS,OAAkD;CAClE,OAAO,OAAO,UAAU,YAAY,UAAU,QAAQ,CAAC,MAAM,QAAQ,MAAM"}
|
package/dist/internal/env.d.mts
CHANGED
|
@@ -11,7 +11,16 @@ interface InterfereEnv {
|
|
|
11
11
|
readonly destinationId: string | null;
|
|
12
12
|
};
|
|
13
13
|
}
|
|
14
|
-
|
|
14
|
+
/**
|
|
15
|
+
* Server-side gate for the proxy route handler, the `captureError` /
|
|
16
|
+
* `onRequestError` helpers, and the remote-config fetcher. Distinct
|
|
17
|
+
* from the browser-side `isEnabledByEnvironment` (in
|
|
18
|
+
* `@interfere/react/internal/kernel`) because the server has different
|
|
19
|
+
* env conventions: `NEXT_PUBLIC_INTERFERE_FORCE_ENABLE` is the dev
|
|
20
|
+
* opt-in, and there's no "unknown runtime, default enabled" case the
|
|
21
|
+
* way the browser SDK has for plain-browser / Vite hosts.
|
|
22
|
+
*/
|
|
23
|
+
declare function isEnabledOnServer(): boolean;
|
|
15
24
|
declare function readInterfereEnv(): InterfereEnv;
|
|
16
25
|
//#endregion
|
|
17
|
-
export { InterfereEnv,
|
|
26
|
+
export { InterfereEnv, isEnabledOnServer, readInterfereEnv };
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"env.d.mts","names":[],"sources":["../../src/internal/env.ts"],"mappings":";;;UAKiB,YAAA;EAAA,SACN,MAAA;EAAA,SACA,MAAA;EAAA,SACA,WAAA;EAAA,SACA,eAAA,EAAiB,OAAA,CAAQ,GAAA;EAAA,SACzB,OAAA;IAAA,SACE,QAAA;IAAA,SACA,aAAA;EAAA;AAAA
|
|
1
|
+
{"version":3,"file":"env.d.mts","names":[],"sources":["../../src/internal/env.ts"],"mappings":";;;UAKiB,YAAA;EAAA,SACN,MAAA;EAAA,SACA,MAAA;EAAA,SACA,WAAA;EAAA,SACA,eAAA,EAAiB,OAAA,CAAQ,GAAA;EAAA,SACzB,OAAA;IAAA,SACE,QAAA;IAAA,SACA,aAAA;EAAA;AAAA;;;;;;;;AAab;;iBAAgB,iBAAA,CAAA;AAAA,iBAOA,gBAAA,CAAA,GAAoB,YAAA"}
|
package/dist/internal/env.mjs
CHANGED
|
@@ -1,8 +1,17 @@
|
|
|
1
|
-
import { parseEnvValue } from "@interfere/types/sdk/env";
|
|
2
1
|
import { API_URL } from "@interfere/constants/api";
|
|
2
|
+
import { parseEnvValue } from "@interfere/types/sdk/env";
|
|
3
3
|
import { normalizeEnv } from "@interfere/types/sdk/runtime";
|
|
4
4
|
//#region src/internal/env.ts
|
|
5
|
-
|
|
5
|
+
/**
|
|
6
|
+
* Server-side gate for the proxy route handler, the `captureError` /
|
|
7
|
+
* `onRequestError` helpers, and the remote-config fetcher. Distinct
|
|
8
|
+
* from the browser-side `isEnabledByEnvironment` (in
|
|
9
|
+
* `@interfere/react/internal/kernel`) because the server has different
|
|
10
|
+
* env conventions: `NEXT_PUBLIC_INTERFERE_FORCE_ENABLE` is the dev
|
|
11
|
+
* opt-in, and there's no "unknown runtime, default enabled" case the
|
|
12
|
+
* way the browser SDK has for plain-browser / Vite hosts.
|
|
13
|
+
*/
|
|
14
|
+
function isEnabledOnServer() {
|
|
6
15
|
if (process.env["NODE_ENV"] === "production") return true;
|
|
7
16
|
return !!process.env["NEXT_PUBLIC_INTERFERE_FORCE_ENABLE"];
|
|
8
17
|
}
|
|
@@ -20,4 +29,4 @@ function readInterfereEnv() {
|
|
|
20
29
|
};
|
|
21
30
|
}
|
|
22
31
|
//#endregion
|
|
23
|
-
export {
|
|
32
|
+
export { isEnabledOnServer, readInterfereEnv };
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"env.mjs","names":[],"sources":["../../src/internal/env.ts"],"sourcesContent":["import { API_URL } from \"@interfere/constants/api\";\nimport { parseEnvValue } from \"@interfere/types/sdk/env\";\nimport type { Env } from \"@interfere/types/sdk/runtime\";\nimport { normalizeEnv } from \"@interfere/types/sdk/runtime\";\n\nexport interface InterfereEnv {\n readonly apiKey: string | null;\n readonly apiUrl: string;\n readonly nextRuntime: string | null;\n readonly nodeEnvironment: Exclude<Env, null>;\n readonly release: {\n readonly sourceId: string | null;\n readonly destinationId: string | null;\n };\n}\n\nexport function
|
|
1
|
+
{"version":3,"file":"env.mjs","names":[],"sources":["../../src/internal/env.ts"],"sourcesContent":["import { API_URL } from \"@interfere/constants/api\";\nimport { parseEnvValue } from \"@interfere/types/sdk/env\";\nimport type { Env } from \"@interfere/types/sdk/runtime\";\nimport { normalizeEnv } from \"@interfere/types/sdk/runtime\";\n\nexport interface InterfereEnv {\n readonly apiKey: string | null;\n readonly apiUrl: string;\n readonly nextRuntime: string | null;\n readonly nodeEnvironment: Exclude<Env, null>;\n readonly release: {\n readonly sourceId: string | null;\n readonly destinationId: string | null;\n };\n}\n\n/**\n * Server-side gate for the proxy route handler, the `captureError` /\n * `onRequestError` helpers, and the remote-config fetcher. Distinct\n * from the browser-side `isEnabledByEnvironment` (in\n * `@interfere/react/internal/kernel`) because the server has different\n * env conventions: `NEXT_PUBLIC_INTERFERE_FORCE_ENABLE` is the dev\n * opt-in, and there's no \"unknown runtime, default enabled\" case the\n * way the browser SDK has for plain-browser / Vite hosts.\n */\nexport function isEnabledOnServer(): boolean {\n if (process.env[\"NODE_ENV\"] === \"production\") {\n return true;\n }\n return !!process.env[\"NEXT_PUBLIC_INTERFERE_FORCE_ENABLE\"];\n}\n\nexport function readInterfereEnv(): InterfereEnv {\n const nodeEnvironment = normalizeEnv(process.env[\"NODE_ENV\"]) ?? \"production\";\n\n return {\n apiKey: parseEnvValue(process.env[\"INTERFERE_API_KEY\"]),\n apiUrl: parseEnvValue(process.env[\"INTERFERE_API_URL\"]) ?? API_URL,\n nextRuntime: parseEnvValue(process.env[\"NEXT_RUNTIME\"]),\n nodeEnvironment,\n release: {\n sourceId: parseEnvValue(process.env[\"NEXT_PUBLIC_INTERFERE_BUILD_ID\"]),\n destinationId: parseEnvValue(\n process.env[\"NEXT_PUBLIC_INTERFERE_RELEASE_ID\"]\n ),\n },\n };\n}\n"],"mappings":";;;;;;;;;;;;;AAyBA,SAAgB,oBAA6B;CAC3C,IAAI,QAAQ,IAAI,gBAAgB,cAC9B,OAAO;CAET,OAAO,CAAC,CAAC,QAAQ,IAAI;;AAGvB,SAAgB,mBAAiC;CAC/C,MAAM,kBAAkB,aAAa,QAAQ,IAAI,YAAY,IAAI;CAEjE,OAAO;EACL,QAAQ,cAAc,QAAQ,IAAI,qBAAqB;EACvD,QAAQ,cAAc,QAAQ,IAAI,qBAAqB,IAAI;EAC3D,aAAa,cAAc,QAAQ,IAAI,gBAAgB;EACvD;EACA,SAAS;GACP,UAAU,cAAc,QAAQ,IAAI,kCAAkC;GACtE,eAAe,cACb,QAAQ,IAAI,oCACb;GACF;EACF"}
|
|
@@ -1,4 +1,12 @@
|
|
|
1
1
|
//#region src/internal/logger.d.ts
|
|
2
|
+
/**
|
|
3
|
+
* Thrown by `log.fatal` in test environments where `process.exit` is not
|
|
4
|
+
* appropriate. Tagged so callers' catch blocks can re-throw fatal errors and
|
|
5
|
+
* keep test-env behavior aligned with the production hard-exit.
|
|
6
|
+
*/
|
|
7
|
+
declare class FatalError extends Error {
|
|
8
|
+
readonly name = "FatalError";
|
|
9
|
+
}
|
|
2
10
|
declare const log: {
|
|
3
11
|
info: (title: string, lines: string[]) => void;
|
|
4
12
|
warn: (title: string, lines: string[]) => void;
|
|
@@ -6,4 +14,4 @@ declare const log: {
|
|
|
6
14
|
fatal(title: string, lines: string[]): never;
|
|
7
15
|
};
|
|
8
16
|
//#endregion
|
|
9
|
-
export { log };
|
|
17
|
+
export { FatalError, log };
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"logger.d.mts","names":[],"sources":["../../src/internal/logger.ts"],"mappings":";
|
|
1
|
+
{"version":3,"file":"logger.d.mts","names":[],"sources":["../../src/internal/logger.ts"],"mappings":";;AAuDA;;;;cAAa,UAAA,SAAmB,KAAA;EAAA,SACZ,IAAA;AAAA;AAAA,cA8BP,GAAA;wBACS,KAAA;wBACA,KAAA;yBACC,KAAA;uBACF,KAAA;AAAA"}
|
package/dist/internal/logger.mjs
CHANGED
|
@@ -35,6 +35,14 @@ const consoleMethods = {
|
|
|
35
35
|
function isTestEnv() {
|
|
36
36
|
return Boolean(process.env["VITEST"] || process.env["VITEST_WORKER_ID"]);
|
|
37
37
|
}
|
|
38
|
+
/**
|
|
39
|
+
* Thrown by `log.fatal` in test environments where `process.exit` is not
|
|
40
|
+
* appropriate. Tagged so callers' catch blocks can re-throw fatal errors and
|
|
41
|
+
* keep test-env behavior aligned with the production hard-exit.
|
|
42
|
+
*/
|
|
43
|
+
var FatalError = class extends Error {
|
|
44
|
+
name = "FatalError";
|
|
45
|
+
};
|
|
38
46
|
function emit(level, title, lines) {
|
|
39
47
|
if (isTestEnv()) return;
|
|
40
48
|
const style = styles[level];
|
|
@@ -52,9 +60,9 @@ const log = {
|
|
|
52
60
|
error: (title, lines) => emit("error", title, lines),
|
|
53
61
|
fatal(title, lines) {
|
|
54
62
|
emit("fatal", title, lines);
|
|
55
|
-
if (isTestEnv()) throw new
|
|
63
|
+
if (isTestEnv()) throw new FatalError(title);
|
|
56
64
|
process.exit(1);
|
|
57
65
|
}
|
|
58
66
|
};
|
|
59
67
|
//#endregion
|
|
60
|
-
export { log };
|
|
68
|
+
export { FatalError, log };
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"logger.mjs","names":[],"sources":["../../src/internal/logger.ts"],"sourcesContent":["import chalk from \"chalk\";\n\ntype LogLevel = \"info\" | \"warn\" | \"error\" | \"fatal\";\n\nconst styles = {\n info: {\n prefix: `${chalk.whiteBright.bold(\"❖\")}`,\n text: chalk.cyan.bold,\n content: chalk.white,\n prependLevel: false,\n },\n warn: {\n prefix: `${chalk.yellow.bold(\"⚠\")} `,\n text: chalk.yellow.bold,\n content: chalk.yellowBright,\n prependLevel: false,\n },\n error: {\n prefix: `${chalk.red.bold(\"⨯\")} `,\n text: chalk.red.bold,\n content: chalk.redBright,\n prependLevel: true,\n },\n fatal: {\n prefix: `${chalk.red.bold(\"⨯\")} `,\n text: chalk.red.bold,\n content: chalk.redBright,\n prependLevel: true,\n },\n} satisfies Record<\n LogLevel,\n {\n prefix: string;\n text: typeof chalk.bold;\n content: typeof chalk;\n prependLevel: boolean;\n }\n>;\n\nconst consoleMethods = {\n info: \"log\",\n warn: \"warn\",\n error: \"error\",\n fatal: \"error\",\n} satisfies Record<LogLevel, string>;\n\nfunction isTestEnv() {\n return Boolean(process.env[\"VITEST\"] || process.env[\"VITEST_WORKER_ID\"]);\n}\n\nfunction emit(level: LogLevel, title: string, lines: string[]) {\n if (isTestEnv()) {\n return;\n }\n\n const style = styles[level];\n const method = consoleMethods[level] as keyof Console;\n const fn = globalThis.console[method];\n if (typeof fn !== \"function\") {\n return;\n }\n\n const invoke = (...args: unknown[]) =>\n Reflect.apply(fn, globalThis.console, args);\n\n const prependLevel = style.prependLevel\n ? `[${style.text(level.toUpperCase())}] `\n : \"\";\n\n invoke(`${prependLevel} ${chalk.white(\"Interfere →\")} ${style.text(title)}`);\n\n for (const [i, line] of lines.entries()) {\n const connector = i === lines.length - 1 ? \"└\" : \"├\";\n invoke(`${prependLevel} ${connector} ${style.content(line)}`);\n }\n}\n\nexport const log = {\n info: (title: string, lines: string[]) => emit(\"info\", title, lines),\n warn: (title: string, lines: string[]) => emit(\"warn\", title, lines),\n error: (title: string, lines: string[]) => emit(\"error\", title, lines),\n fatal(title: string, lines: string[]): never {\n emit(\"fatal\", title, lines);\n\n if (isTestEnv()) {\n throw new
|
|
1
|
+
{"version":3,"file":"logger.mjs","names":[],"sources":["../../src/internal/logger.ts"],"sourcesContent":["import chalk from \"chalk\";\n\ntype LogLevel = \"info\" | \"warn\" | \"error\" | \"fatal\";\n\nconst styles = {\n info: {\n prefix: `${chalk.whiteBright.bold(\"❖\")}`,\n text: chalk.cyan.bold,\n content: chalk.white,\n prependLevel: false,\n },\n warn: {\n prefix: `${chalk.yellow.bold(\"⚠\")} `,\n text: chalk.yellow.bold,\n content: chalk.yellowBright,\n prependLevel: false,\n },\n error: {\n prefix: `${chalk.red.bold(\"⨯\")} `,\n text: chalk.red.bold,\n content: chalk.redBright,\n prependLevel: true,\n },\n fatal: {\n prefix: `${chalk.red.bold(\"⨯\")} `,\n text: chalk.red.bold,\n content: chalk.redBright,\n prependLevel: true,\n },\n} satisfies Record<\n LogLevel,\n {\n prefix: string;\n text: typeof chalk.bold;\n content: typeof chalk;\n prependLevel: boolean;\n }\n>;\n\nconst consoleMethods = {\n info: \"log\",\n warn: \"warn\",\n error: \"error\",\n fatal: \"error\",\n} satisfies Record<LogLevel, string>;\n\nfunction isTestEnv() {\n return Boolean(process.env[\"VITEST\"] || process.env[\"VITEST_WORKER_ID\"]);\n}\n\n/**\n * Thrown by `log.fatal` in test environments where `process.exit` is not\n * appropriate. Tagged so callers' catch blocks can re-throw fatal errors and\n * keep test-env behavior aligned with the production hard-exit.\n */\nexport class FatalError extends Error {\n override readonly name = \"FatalError\";\n}\n\nfunction emit(level: LogLevel, title: string, lines: string[]) {\n if (isTestEnv()) {\n return;\n }\n\n const style = styles[level];\n const method = consoleMethods[level] as keyof Console;\n const fn = globalThis.console[method];\n if (typeof fn !== \"function\") {\n return;\n }\n\n const invoke = (...args: unknown[]) =>\n Reflect.apply(fn, globalThis.console, args);\n\n const prependLevel = style.prependLevel\n ? `[${style.text(level.toUpperCase())}] `\n : \"\";\n\n invoke(`${prependLevel} ${chalk.white(\"Interfere →\")} ${style.text(title)}`);\n\n for (const [i, line] of lines.entries()) {\n const connector = i === lines.length - 1 ? \"└\" : \"├\";\n invoke(`${prependLevel} ${connector} ${style.content(line)}`);\n }\n}\n\nexport const log = {\n info: (title: string, lines: string[]) => emit(\"info\", title, lines),\n warn: (title: string, lines: string[]) => emit(\"warn\", title, lines),\n error: (title: string, lines: string[]) => emit(\"error\", title, lines),\n fatal(title: string, lines: string[]): never {\n emit(\"fatal\", title, lines);\n\n if (isTestEnv()) {\n throw new FatalError(title);\n }\n\n process.exit(1);\n },\n};\n"],"mappings":";;AAIA,MAAM,SAAS;CACb,MAAM;EACJ,QAAQ,GAAG,MAAM,YAAY,KAAK,IAAI;EACtC,MAAM,MAAM,KAAK;EACjB,SAAS,MAAM;EACf,cAAc;EACf;CACD,MAAM;EACJ,QAAQ,GAAG,MAAM,OAAO,KAAK,IAAI,CAAC;EAClC,MAAM,MAAM,OAAO;EACnB,SAAS,MAAM;EACf,cAAc;EACf;CACD,OAAO;EACL,QAAQ,GAAG,MAAM,IAAI,KAAK,IAAI,CAAC;EAC/B,MAAM,MAAM,IAAI;EAChB,SAAS,MAAM;EACf,cAAc;EACf;CACD,OAAO;EACL,QAAQ,GAAG,MAAM,IAAI,KAAK,IAAI,CAAC;EAC/B,MAAM,MAAM,IAAI;EAChB,SAAS,MAAM;EACf,cAAc;EACf;CACF;AAUD,MAAM,iBAAiB;CACrB,MAAM;CACN,MAAM;CACN,OAAO;CACP,OAAO;CACR;AAED,SAAS,YAAY;CACnB,OAAO,QAAQ,QAAQ,IAAI,aAAa,QAAQ,IAAI,oBAAoB;;;;;;;AAQ1E,IAAa,aAAb,cAAgC,MAAM;CACpC,OAAyB;;AAG3B,SAAS,KAAK,OAAiB,OAAe,OAAiB;CAC7D,IAAI,WAAW,EACb;CAGF,MAAM,QAAQ,OAAO;CACrB,MAAM,SAAS,eAAe;CAC9B,MAAM,KAAK,WAAW,QAAQ;CAC9B,IAAI,OAAO,OAAO,YAChB;CAGF,MAAM,UAAU,GAAG,SACjB,QAAQ,MAAM,IAAI,WAAW,SAAS,KAAK;CAE7C,MAAM,eAAe,MAAM,eACvB,IAAI,MAAM,KAAK,MAAM,aAAa,CAAC,CAAC,MACpC;CAEJ,OAAO,GAAG,aAAa,GAAG,MAAM,MAAM,cAAc,CAAC,GAAG,MAAM,KAAK,MAAM,GAAG;CAE5E,KAAK,MAAM,CAAC,GAAG,SAAS,MAAM,SAAS,EAErC,OAAO,GAAG,aAAa,GADL,MAAM,MAAM,SAAS,IAAI,MAAM,IACb,GAAG,MAAM,QAAQ,KAAK,GAAG;;AAIjE,MAAa,MAAM;CACjB,OAAO,OAAe,UAAoB,KAAK,QAAQ,OAAO,MAAM;CACpE,OAAO,OAAe,UAAoB,KAAK,QAAQ,OAAO,MAAM;CACpE,QAAQ,OAAe,UAAoB,KAAK,SAAS,OAAO,MAAM;CACtE,MAAM,OAAe,OAAwB;EAC3C,KAAK,SAAS,OAAO,MAAM;EAE3B,IAAI,WAAW,EACb,MAAM,IAAI,WAAW,MAAM;EAG7B,QAAQ,KAAK,EAAE;;CAElB"}
|
|
@@ -0,0 +1,25 @@
|
|
|
1
|
+
import { ReleaseSlug } from "@interfere/types/releases/slug";
|
|
2
|
+
|
|
3
|
+
//#region src/internal/release-slug.d.ts
|
|
4
|
+
/**
|
|
5
|
+
* Walks the same env keys at build time (`withInterfere`) and at runtime
|
|
6
|
+
* (server-side `register()`), then falls back to `git rev-parse HEAD`. Both
|
|
7
|
+
* call sites resolving the same SHA → both derive the same `release.slug`,
|
|
8
|
+
* so server and client spans agree by construction.
|
|
9
|
+
*
|
|
10
|
+
* Override path for non-CI builds: set `INTERFERE_SOURCE_ID` (or any other
|
|
11
|
+
* key in `releaseSourceIdEnvKeys`) on both the build env and the runtime
|
|
12
|
+
* env. The `interfere.buildId` next.config knob was removed in 10.0 because
|
|
13
|
+
* it only worked at build time and caused server/client slug drift.
|
|
14
|
+
*
|
|
15
|
+
* `runGitCommand` (`node:child_process`) keeps this module on the Node side
|
|
16
|
+
* of `@interfere/next`'s dual entry — the edge entrypoint
|
|
17
|
+
* (`instrumentation.edge.ts`) intentionally doesn't import this file.
|
|
18
|
+
*/
|
|
19
|
+
declare function resolveCommitSha(): string | null;
|
|
20
|
+
declare function resolveReleaseSlug(): {
|
|
21
|
+
commitSha: string | null;
|
|
22
|
+
slug: ReleaseSlug | null;
|
|
23
|
+
};
|
|
24
|
+
//#endregion
|
|
25
|
+
export { resolveCommitSha, resolveReleaseSlug };
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"release-slug.d.mts","names":[],"sources":["../../src/internal/release-slug.ts"],"mappings":";;;;;AAwBA;;;;;AAOA;;;;;;;;iBAPgB,gBAAA,CAAA;AAAA,iBAOA,kBAAA,CAAA;EACd,SAAA;EACA,IAAA,EAAM,WAAA;AAAA"}
|
|
@@ -0,0 +1,32 @@
|
|
|
1
|
+
import { runGitCommand } from "./build/release/git.mjs";
|
|
2
|
+
import { readFirstEnvValue } from "@interfere/types/sdk/env";
|
|
3
|
+
import { releaseSourceIdEnvKeys } from "@interfere/types/integrations";
|
|
4
|
+
import { deriveReleaseSlug } from "@interfere/types/releases/slug";
|
|
5
|
+
//#region src/internal/release-slug.ts
|
|
6
|
+
/**
|
|
7
|
+
* Walks the same env keys at build time (`withInterfere`) and at runtime
|
|
8
|
+
* (server-side `register()`), then falls back to `git rev-parse HEAD`. Both
|
|
9
|
+
* call sites resolving the same SHA → both derive the same `release.slug`,
|
|
10
|
+
* so server and client spans agree by construction.
|
|
11
|
+
*
|
|
12
|
+
* Override path for non-CI builds: set `INTERFERE_SOURCE_ID` (or any other
|
|
13
|
+
* key in `releaseSourceIdEnvKeys`) on both the build env and the runtime
|
|
14
|
+
* env. The `interfere.buildId` next.config knob was removed in 10.0 because
|
|
15
|
+
* it only worked at build time and caused server/client slug drift.
|
|
16
|
+
*
|
|
17
|
+
* `runGitCommand` (`node:child_process`) keeps this module on the Node side
|
|
18
|
+
* of `@interfere/next`'s dual entry — the edge entrypoint
|
|
19
|
+
* (`instrumentation.edge.ts`) intentionally doesn't import this file.
|
|
20
|
+
*/
|
|
21
|
+
function resolveCommitSha() {
|
|
22
|
+
return readFirstEnvValue(process.env, releaseSourceIdEnvKeys) ?? runGitCommand("git rev-parse HEAD");
|
|
23
|
+
}
|
|
24
|
+
function resolveReleaseSlug() {
|
|
25
|
+
const commitSha = resolveCommitSha();
|
|
26
|
+
return {
|
|
27
|
+
commitSha,
|
|
28
|
+
slug: commitSha ? deriveReleaseSlug(commitSha) : null
|
|
29
|
+
};
|
|
30
|
+
}
|
|
31
|
+
//#endregion
|
|
32
|
+
export { resolveCommitSha, resolveReleaseSlug };
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"release-slug.mjs","names":[],"sources":["../../src/internal/release-slug.ts"],"sourcesContent":["import { releaseSourceIdEnvKeys } from \"@interfere/types/integrations\";\nimport {\n deriveReleaseSlug,\n type ReleaseSlug,\n} from \"@interfere/types/releases/slug\";\nimport { readFirstEnvValue } from \"@interfere/types/sdk/env\";\n\nimport { runGitCommand } from \"./build/release/git.js\";\n\n/**\n * Walks the same env keys at build time (`withInterfere`) and at runtime\n * (server-side `register()`), then falls back to `git rev-parse HEAD`. Both\n * call sites resolving the same SHA → both derive the same `release.slug`,\n * so server and client spans agree by construction.\n *\n * Override path for non-CI builds: set `INTERFERE_SOURCE_ID` (or any other\n * key in `releaseSourceIdEnvKeys`) on both the build env and the runtime\n * env. The `interfere.buildId` next.config knob was removed in 10.0 because\n * it only worked at build time and caused server/client slug drift.\n *\n * `runGitCommand` (`node:child_process`) keeps this module on the Node side\n * of `@interfere/next`'s dual entry — the edge entrypoint\n * (`instrumentation.edge.ts`) intentionally doesn't import this file.\n */\nexport function resolveCommitSha(): string | null {\n return (\n readFirstEnvValue(process.env, releaseSourceIdEnvKeys) ??\n runGitCommand(\"git rev-parse HEAD\")\n );\n}\n\nexport function resolveReleaseSlug(): {\n commitSha: string | null;\n slug: ReleaseSlug | null;\n} {\n const commitSha = resolveCommitSha();\n return {\n commitSha,\n slug: commitSha ? deriveReleaseSlug(commitSha) : null,\n };\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;AAwBA,SAAgB,mBAAkC;CAChD,OACE,kBAAkB,QAAQ,KAAK,uBAAuB,IACtD,cAAc,qBAAqB;;AAIvC,SAAgB,qBAGd;CACA,MAAM,YAAY,kBAAkB;CACpC,OAAO;EACL;EACA,MAAM,YAAY,kBAAkB,UAAU,GAAG;EAClD"}
|