@tyndall/shared 0.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +29 -0
- package/dist/cache-root.d.ts +10 -0
- package/dist/cache-root.d.ts.map +1 -0
- package/dist/cache-root.js +4 -0
- package/dist/fs.d.ts +7 -0
- package/dist/fs.d.ts.map +1 -0
- package/dist/fs.js +37 -0
- package/dist/hashing.d.ts +9 -0
- package/dist/hashing.d.ts.map +1 -0
- package/dist/hashing.js +65 -0
- package/dist/index.d.ts +14 -0
- package/dist/index.d.ts.map +1 -0
- package/dist/index.js +7 -0
- package/dist/logger.d.ts +21 -0
- package/dist/logger.d.ts.map +1 -0
- package/dist/logger.js +103 -0
- package/dist/module-graph.d.ts +26 -0
- package/dist/module-graph.d.ts.map +1 -0
- package/dist/module-graph.js +231 -0
- package/dist/runtime-info.d.ts +3 -0
- package/dist/runtime-info.d.ts.map +1 -0
- package/dist/runtime-info.js +5 -0
- package/dist/timing.d.ts +7 -0
- package/dist/timing.d.ts.map +1 -0
- package/dist/timing.js +14 -0
- package/package.json +23 -0
package/README.md
ADDED
|
@@ -0,0 +1,29 @@
|
|
|
1
|
+
# @tyndall/shared
|
|
2
|
+
|
|
3
|
+
## Overview
|
|
4
|
+
Shared utility package for hashing, file operations, timing, cache keys, and platform metadata.
|
|
5
|
+
|
|
6
|
+
## Responsibilities
|
|
7
|
+
- Provide deterministic hashing and stable serialization helpers
|
|
8
|
+
- Provide safe file and cache utility primitives
|
|
9
|
+
- Keep reusable cross-package helpers independent of framework layers
|
|
10
|
+
- Provide shared logger primitives with severity filtering and readable level-based formatting
|
|
11
|
+
|
|
12
|
+
## Public API Highlights
|
|
13
|
+
- hash and normalizePath
|
|
14
|
+
- readFileSafe and writeFileAtomic
|
|
15
|
+
- computeCacheRootKey and resolveCacheRoot
|
|
16
|
+
|
|
17
|
+
## Development
|
|
18
|
+
- Build: bun run --filter @tyndall/shared build
|
|
19
|
+
- Test (from workspace root): bun test
|
|
20
|
+
|
|
21
|
+
## Documentation
|
|
22
|
+
- Package specification: [spec.md](./spec.md)
|
|
23
|
+
- Package architecture: [architecture.md](./architecture.md)
|
|
24
|
+
- Package changes: [CHANGELOG.md](./CHANGELOG.md)
|
|
25
|
+
|
|
26
|
+
## Maintenance Rules
|
|
27
|
+
- Keep this document aligned with implemented package behavior.
|
|
28
|
+
- Update spec.md and architecture.md whenever package contracts or design boundaries change.
|
|
29
|
+
- Record user-visible package changes in CHANGELOG.md.
|
|
@@ -0,0 +1,10 @@
|
|
|
1
|
+
export interface CacheRootKeyInput {
|
|
2
|
+
projectId: string;
|
|
3
|
+
hyperVersion: string;
|
|
4
|
+
bunVersion: string;
|
|
5
|
+
uiAdapter: string;
|
|
6
|
+
osArch: string;
|
|
7
|
+
}
|
|
8
|
+
export declare const computeCacheRootKey: (input: CacheRootKeyInput) => string;
|
|
9
|
+
export declare const resolveCacheRoot: (cacheDir: string, rootKey: string) => string;
|
|
10
|
+
//# sourceMappingURL=cache-root.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"cache-root.d.ts","sourceRoot":"","sources":["../src/cache-root.ts"],"names":[],"mappings":"AAGA,MAAM,WAAW,iBAAiB;IAChC,SAAS,EAAE,MAAM,CAAC;IAClB,YAAY,EAAE,MAAM,CAAC;IACrB,UAAU,EAAE,MAAM,CAAC;IACnB,SAAS,EAAE,MAAM,CAAC;IAClB,MAAM,EAAE,MAAM,CAAC;CAChB;AAED,eAAO,MAAM,mBAAmB,GAAI,OAAO,iBAAiB,KAAG,MACjC,CAAC;AAE/B,eAAO,MAAM,gBAAgB,GAAI,UAAU,MAAM,EAAE,SAAS,MAAM,KAAG,MAC5C,CAAC"}
|
package/dist/fs.d.ts
ADDED
|
@@ -0,0 +1,7 @@
|
|
|
1
|
+
export type WriteFileAtomicOptions = {
|
|
2
|
+
encoding?: BufferEncoding;
|
|
3
|
+
mode?: number;
|
|
4
|
+
};
|
|
5
|
+
export declare const readFileSafe: (filePath: string, encoding?: BufferEncoding) => Promise<string | Buffer | null>;
|
|
6
|
+
export declare const writeFileAtomic: (filePath: string, data: string | Buffer | Uint8Array, options?: WriteFileAtomicOptions) => Promise<void>;
|
|
7
|
+
//# sourceMappingURL=fs.d.ts.map
|
package/dist/fs.d.ts.map
ADDED
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"fs.d.ts","sourceRoot":"","sources":["../src/fs.ts"],"names":[],"mappings":"AAIA,MAAM,MAAM,sBAAsB,GAAG;IACnC,QAAQ,CAAC,EAAE,cAAc,CAAC;IAC1B,IAAI,CAAC,EAAE,MAAM,CAAC;CACf,CAAC;AAEF,eAAO,MAAM,YAAY,GACvB,UAAU,MAAM,EAChB,WAAW,cAAc,KACxB,OAAO,CAAC,MAAM,GAAG,MAAM,GAAG,IAAI,CAYhC,CAAC;AAEF,eAAO,MAAM,eAAe,GAC1B,UAAU,MAAM,EAChB,MAAM,MAAM,GAAG,MAAM,GAAG,UAAU,EAClC,UAAS,sBAA2B,KACnC,OAAO,CAAC,IAAI,CAmBd,CAAC"}
|
package/dist/fs.js
ADDED
|
@@ -0,0 +1,37 @@
|
|
|
1
|
+
import { mkdir, readFile, rename, rm, writeFile } from "node:fs/promises";
|
|
2
|
+
import { dirname } from "node:path";
|
|
3
|
+
import { randomUUID } from "node:crypto";
|
|
4
|
+
export const readFileSafe = async (filePath, encoding) => {
|
|
5
|
+
try {
|
|
6
|
+
if (encoding) {
|
|
7
|
+
return await readFile(filePath, encoding);
|
|
8
|
+
}
|
|
9
|
+
return await readFile(filePath);
|
|
10
|
+
}
|
|
11
|
+
catch (error) {
|
|
12
|
+
if (error.code === "ENOENT") {
|
|
13
|
+
return null;
|
|
14
|
+
}
|
|
15
|
+
throw error;
|
|
16
|
+
}
|
|
17
|
+
};
|
|
18
|
+
export const writeFileAtomic = async (filePath, data, options = {}) => {
|
|
19
|
+
const dir = dirname(filePath);
|
|
20
|
+
await mkdir(dir, { recursive: true });
|
|
21
|
+
const tempPath = `${filePath}.${process.pid}.${Date.now()}.${randomUUID()}.tmp`;
|
|
22
|
+
await writeFile(tempPath, data, options);
|
|
23
|
+
try {
|
|
24
|
+
await rename(tempPath, filePath);
|
|
25
|
+
}
|
|
26
|
+
catch (error) {
|
|
27
|
+
const code = error.code;
|
|
28
|
+
if (code === "EEXIST" || code === "EPERM") {
|
|
29
|
+
await rm(filePath, { force: true });
|
|
30
|
+
await rename(tempPath, filePath);
|
|
31
|
+
}
|
|
32
|
+
else {
|
|
33
|
+
await rm(tempPath, { force: true });
|
|
34
|
+
throw error;
|
|
35
|
+
}
|
|
36
|
+
}
|
|
37
|
+
};
|
|
@@ -0,0 +1,9 @@
|
|
|
1
|
+
export type HashInput = string | ArrayBuffer | Uint8Array;
|
|
2
|
+
export declare const normalizeLineEndings: (value: string) => string;
|
|
3
|
+
export declare const normalizePath: (value: string) => string;
|
|
4
|
+
export declare const stableStringify: (value: unknown) => string;
|
|
5
|
+
export declare const hash: (input: HashInput) => string;
|
|
6
|
+
export declare const moduleContentHash: (content: string) => string;
|
|
7
|
+
export declare const configHash: (config: unknown) => string;
|
|
8
|
+
export declare const paramsKey: (params: unknown) => string;
|
|
9
|
+
//# sourceMappingURL=hashing.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"hashing.d.ts","sourceRoot":"","sources":["../src/hashing.ts"],"names":[],"mappings":"AAEA,MAAM,MAAM,SAAS,GAAG,MAAM,GAAG,WAAW,GAAG,UAAU,CAAC;AAiC1D,eAAO,MAAM,oBAAoB,GAAI,OAAO,MAAM,KAAG,MACtB,CAAC;AAEhC,eAAO,MAAM,aAAa,GAAI,OAAO,MAAM,KAAG,MAAmC,CAAC;AA4ClF,eAAO,MAAM,eAAe,GAAI,OAAO,OAAO,KAAG,MACT,CAAC;AAEzC,eAAO,MAAM,IAAI,GAAI,OAAO,SAAS,KAAG,MAIvC,CAAC;AAEF,eAAO,MAAM,iBAAiB,GAAI,SAAS,MAAM,KAAG,MACf,CAAC;AAEtC,eAAO,MAAM,UAAU,GAAI,QAAQ,OAAO,KAAG,MAAuC,CAAC;AAErF,eAAO,MAAM,SAAS,GAAI,QAAQ,OAAO,KAAG,MAAiC,CAAC"}
|
package/dist/hashing.js
ADDED
|
@@ -0,0 +1,65 @@
|
|
|
1
|
+
import { createHash } from "node:crypto";
|
|
2
|
+
const encoder = new TextEncoder();
|
|
3
|
+
const isPlainObject = (value) => {
|
|
4
|
+
if (!value || typeof value !== "object") {
|
|
5
|
+
return false;
|
|
6
|
+
}
|
|
7
|
+
const proto = Object.getPrototypeOf(value);
|
|
8
|
+
return proto === Object.prototype || proto === null;
|
|
9
|
+
};
|
|
10
|
+
const toHashBuffer = (input) => {
|
|
11
|
+
if (typeof input === "string") {
|
|
12
|
+
return encoder.encode(input);
|
|
13
|
+
}
|
|
14
|
+
if (input instanceof ArrayBuffer) {
|
|
15
|
+
return new Uint8Array(input);
|
|
16
|
+
}
|
|
17
|
+
if (input instanceof Uint8Array) {
|
|
18
|
+
return input;
|
|
19
|
+
}
|
|
20
|
+
return encoder.encode(String(input));
|
|
21
|
+
};
|
|
22
|
+
export const normalizeLineEndings = (value) => value.replace(/\r\n?/g, "\n");
|
|
23
|
+
export const normalizePath = (value) => value.replace(/\\/g, "/");
|
|
24
|
+
const stableSortValue = (value) => {
|
|
25
|
+
if (value === null || typeof value !== "object") {
|
|
26
|
+
if (typeof value === "bigint") {
|
|
27
|
+
return value.toString();
|
|
28
|
+
}
|
|
29
|
+
if (typeof value === "undefined") {
|
|
30
|
+
return null;
|
|
31
|
+
}
|
|
32
|
+
return value;
|
|
33
|
+
}
|
|
34
|
+
if (Array.isArray(value)) {
|
|
35
|
+
return value.map((entry) => stableSortValue(entry));
|
|
36
|
+
}
|
|
37
|
+
if (value instanceof Date) {
|
|
38
|
+
return value.toISOString();
|
|
39
|
+
}
|
|
40
|
+
if (value instanceof Map) {
|
|
41
|
+
const entries = Array.from(value.entries()).sort(([a], [b]) => String(a).localeCompare(String(b)));
|
|
42
|
+
return entries.map(([key, val]) => [String(key), stableSortValue(val)]);
|
|
43
|
+
}
|
|
44
|
+
if (value instanceof Set) {
|
|
45
|
+
const entries = Array.from(value.values()).map((entry) => stableSortValue(entry));
|
|
46
|
+
return entries.sort((a, b) => JSON.stringify(a).localeCompare(JSON.stringify(b)));
|
|
47
|
+
}
|
|
48
|
+
if (isPlainObject(value)) {
|
|
49
|
+
const sorted = {};
|
|
50
|
+
for (const key of Object.keys(value).sort()) {
|
|
51
|
+
sorted[key] = stableSortValue(value[key]);
|
|
52
|
+
}
|
|
53
|
+
return sorted;
|
|
54
|
+
}
|
|
55
|
+
return JSON.parse(JSON.stringify(value));
|
|
56
|
+
};
|
|
57
|
+
export const stableStringify = (value) => JSON.stringify(stableSortValue(value));
|
|
58
|
+
export const hash = (input) => {
|
|
59
|
+
const hashInstance = createHash("sha256");
|
|
60
|
+
hashInstance.update(toHashBuffer(input));
|
|
61
|
+
return hashInstance.digest("hex");
|
|
62
|
+
};
|
|
63
|
+
export const moduleContentHash = (content) => hash(normalizeLineEndings(content));
|
|
64
|
+
export const configHash = (config) => hash(stableStringify(config));
|
|
65
|
+
export const paramsKey = (params) => stableStringify(params);
|
package/dist/index.d.ts
ADDED
|
@@ -0,0 +1,14 @@
|
|
|
1
|
+
export { hash, moduleContentHash, configHash, paramsKey, normalizeLineEndings, normalizePath, stableStringify, } from "./hashing.js";
|
|
2
|
+
export type { HashInput } from "./hashing.js";
|
|
3
|
+
export { createLogger } from "./logger.js";
|
|
4
|
+
export type { Logger, LoggerOptions, LogLevel } from "./logger.js";
|
|
5
|
+
export { readFileSafe, writeFileAtomic } from "./fs.js";
|
|
6
|
+
export type { WriteFileAtomicOptions } from "./fs.js";
|
|
7
|
+
export { measure } from "./timing.js";
|
|
8
|
+
export type { MeasureResult } from "./timing.js";
|
|
9
|
+
export { computeCacheRootKey, resolveCacheRoot } from "./cache-root.js";
|
|
10
|
+
export type { CacheRootKeyInput } from "./cache-root.js";
|
|
11
|
+
export { getBunVersion, getOsArch } from "./runtime-info.js";
|
|
12
|
+
export { buildModuleGraphSnapshot, computeGraphKey, readModuleGraphCache, writeModuleGraphCache, DEFAULT_IMPORT_EXTENSIONS, } from "./module-graph.js";
|
|
13
|
+
export type { ModuleGraphSnapshot, ModuleGraphCache } from "./module-graph.js";
|
|
14
|
+
//# sourceMappingURL=index.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AAAA,OAAO,EACL,IAAI,EACJ,iBAAiB,EACjB,UAAU,EACV,SAAS,EACT,oBAAoB,EACpB,aAAa,EACb,eAAe,GAChB,MAAM,cAAc,CAAC;AACtB,YAAY,EAAE,SAAS,EAAE,MAAM,cAAc,CAAC;AAE9C,OAAO,EAAE,YAAY,EAAE,MAAM,aAAa,CAAC;AAC3C,YAAY,EAAE,MAAM,EAAE,aAAa,EAAE,QAAQ,EAAE,MAAM,aAAa,CAAC;AAEnE,OAAO,EAAE,YAAY,EAAE,eAAe,EAAE,MAAM,SAAS,CAAC;AACxD,YAAY,EAAE,sBAAsB,EAAE,MAAM,SAAS,CAAC;AAEtD,OAAO,EAAE,OAAO,EAAE,MAAM,aAAa,CAAC;AACtC,YAAY,EAAE,aAAa,EAAE,MAAM,aAAa,CAAC;AAEjD,OAAO,EAAE,mBAAmB,EAAE,gBAAgB,EAAE,MAAM,iBAAiB,CAAC;AACxE,YAAY,EAAE,iBAAiB,EAAE,MAAM,iBAAiB,CAAC;AAEzD,OAAO,EAAE,aAAa,EAAE,SAAS,EAAE,MAAM,mBAAmB,CAAC;AAE7D,OAAO,EACL,wBAAwB,EACxB,eAAe,EACf,oBAAoB,EACpB,qBAAqB,EACrB,yBAAyB,GAC1B,MAAM,mBAAmB,CAAC;AAC3B,YAAY,EAAE,mBAAmB,EAAE,gBAAgB,EAAE,MAAM,mBAAmB,CAAC"}
|
package/dist/index.js
ADDED
|
@@ -0,0 +1,7 @@
|
|
|
1
|
+
export { hash, moduleContentHash, configHash, paramsKey, normalizeLineEndings, normalizePath, stableStringify, } from "./hashing.js";
|
|
2
|
+
export { createLogger } from "./logger.js";
|
|
3
|
+
export { readFileSafe, writeFileAtomic } from "./fs.js";
|
|
4
|
+
export { measure } from "./timing.js";
|
|
5
|
+
export { computeCacheRootKey, resolveCacheRoot } from "./cache-root.js";
|
|
6
|
+
export { getBunVersion, getOsArch } from "./runtime-info.js";
|
|
7
|
+
export { buildModuleGraphSnapshot, computeGraphKey, readModuleGraphCache, writeModuleGraphCache, DEFAULT_IMPORT_EXTENSIONS, } from "./module-graph.js";
|
package/dist/logger.d.ts
ADDED
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
export type LogLevel = "debug" | "info" | "warn" | "error" | "fatal";
|
|
2
|
+
export interface LoggerOptions {
|
|
3
|
+
level?: LogLevel;
|
|
4
|
+
json?: boolean;
|
|
5
|
+
prefix?: string;
|
|
6
|
+
color?: boolean;
|
|
7
|
+
meta?: Record<string, unknown>;
|
|
8
|
+
sink?: (level: LogLevel, line: string) => void;
|
|
9
|
+
time?: () => string;
|
|
10
|
+
}
|
|
11
|
+
export interface Logger {
|
|
12
|
+
level: LogLevel;
|
|
13
|
+
child: (meta: Record<string, unknown>) => Logger;
|
|
14
|
+
debug: (message: string, meta?: Record<string, unknown>) => void;
|
|
15
|
+
info: (message: string, meta?: Record<string, unknown>) => void;
|
|
16
|
+
warn: (message: string, meta?: Record<string, unknown>) => void;
|
|
17
|
+
error: (message: string, meta?: Record<string, unknown>) => void;
|
|
18
|
+
fatal: (message: string, meta?: Record<string, unknown>) => void;
|
|
19
|
+
}
|
|
20
|
+
export declare const createLogger: (options?: LoggerOptions) => Logger;
|
|
21
|
+
//# sourceMappingURL=logger.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"logger.d.ts","sourceRoot":"","sources":["../src/logger.ts"],"names":[],"mappings":"AAAA,MAAM,MAAM,QAAQ,GAAG,OAAO,GAAG,MAAM,GAAG,MAAM,GAAG,OAAO,GAAG,OAAO,CAAC;AAErE,MAAM,WAAW,aAAa;IAC5B,KAAK,CAAC,EAAE,QAAQ,CAAC;IACjB,IAAI,CAAC,EAAE,OAAO,CAAC;IACf,MAAM,CAAC,EAAE,MAAM,CAAC;IAChB,KAAK,CAAC,EAAE,OAAO,CAAC;IAChB,IAAI,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;IAC/B,IAAI,CAAC,EAAE,CAAC,KAAK,EAAE,QAAQ,EAAE,IAAI,EAAE,MAAM,KAAK,IAAI,CAAC;IAC/C,IAAI,CAAC,EAAE,MAAM,MAAM,CAAC;CACrB;AAED,MAAM,WAAW,MAAM;IACrB,KAAK,EAAE,QAAQ,CAAC;IAChB,KAAK,EAAE,CAAC,IAAI,EAAE,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,KAAK,MAAM,CAAC;IACjD,KAAK,EAAE,CAAC,OAAO,EAAE,MAAM,EAAE,IAAI,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,KAAK,IAAI,CAAC;IACjE,IAAI,EAAE,CAAC,OAAO,EAAE,MAAM,EAAE,IAAI,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,KAAK,IAAI,CAAC;IAChE,IAAI,EAAE,CAAC,OAAO,EAAE,MAAM,EAAE,IAAI,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,KAAK,IAAI,CAAC;IAChE,KAAK,EAAE,CAAC,OAAO,EAAE,MAAM,EAAE,IAAI,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,KAAK,IAAI,CAAC;IACjE,KAAK,EAAE,CAAC,OAAO,EAAE,MAAM,EAAE,IAAI,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,KAAK,IAAI,CAAC;CAClE;AAoFD,eAAO,MAAM,YAAY,GAAI,UAAS,aAAkB,KAAG,MA0C1D,CAAC"}
|
package/dist/logger.js
ADDED
|
@@ -0,0 +1,103 @@
|
|
|
1
|
+
const levelOrder = {
|
|
2
|
+
debug: 10,
|
|
3
|
+
info: 20,
|
|
4
|
+
warn: 30,
|
|
5
|
+
error: 40,
|
|
6
|
+
fatal: 50,
|
|
7
|
+
};
|
|
8
|
+
const defaultTime = () => new Date().toISOString();
|
|
9
|
+
const ANSI = {
|
|
10
|
+
reset: "\u001b[0m",
|
|
11
|
+
gray: "\u001b[90m",
|
|
12
|
+
cyan: "\u001b[36m",
|
|
13
|
+
yellow: "\u001b[33m",
|
|
14
|
+
red: "\u001b[31m",
|
|
15
|
+
fatal: "\u001b[97;41m",
|
|
16
|
+
};
|
|
17
|
+
const levelColor = {
|
|
18
|
+
debug: ANSI.gray,
|
|
19
|
+
info: ANSI.cyan,
|
|
20
|
+
warn: ANSI.yellow,
|
|
21
|
+
error: ANSI.red,
|
|
22
|
+
fatal: ANSI.fatal,
|
|
23
|
+
};
|
|
24
|
+
const defaultSink = (level, line) => {
|
|
25
|
+
if (level === "fatal" || level === "error") {
|
|
26
|
+
console.error(line);
|
|
27
|
+
return;
|
|
28
|
+
}
|
|
29
|
+
if (level === "warn") {
|
|
30
|
+
console.warn(line);
|
|
31
|
+
return;
|
|
32
|
+
}
|
|
33
|
+
console.log(line);
|
|
34
|
+
};
|
|
35
|
+
const shouldUseColor = () => {
|
|
36
|
+
if (process.env.NO_COLOR) {
|
|
37
|
+
return false;
|
|
38
|
+
}
|
|
39
|
+
if (process.env.CI) {
|
|
40
|
+
return false;
|
|
41
|
+
}
|
|
42
|
+
return Boolean(process.stdout.isTTY);
|
|
43
|
+
};
|
|
44
|
+
const colorizeLevel = (level, label, enabled) => {
|
|
45
|
+
if (!enabled) {
|
|
46
|
+
return label;
|
|
47
|
+
}
|
|
48
|
+
const color = levelColor[level];
|
|
49
|
+
return `${color}${label}${ANSI.reset}`;
|
|
50
|
+
};
|
|
51
|
+
const formatLine = (level, message, meta, options) => {
|
|
52
|
+
const payload = {
|
|
53
|
+
level,
|
|
54
|
+
msg: message,
|
|
55
|
+
time: options.time(),
|
|
56
|
+
...meta,
|
|
57
|
+
};
|
|
58
|
+
if (options.json) {
|
|
59
|
+
return JSON.stringify(payload);
|
|
60
|
+
}
|
|
61
|
+
const prefix = options.prefix ? `[${options.prefix}] ` : "";
|
|
62
|
+
const levelLabel = colorizeLevel(level, level.toUpperCase(), options.color);
|
|
63
|
+
const metaKeys = Object.keys(meta);
|
|
64
|
+
if (metaKeys.length === 0) {
|
|
65
|
+
return `${prefix}${levelLabel}: ${message}`;
|
|
66
|
+
}
|
|
67
|
+
return `${prefix}${levelLabel}: ${message} ${JSON.stringify(meta)}`;
|
|
68
|
+
};
|
|
69
|
+
export const createLogger = (options = {}) => {
|
|
70
|
+
const baseOptions = {
|
|
71
|
+
level: options.level ?? "info",
|
|
72
|
+
json: options.json ?? false,
|
|
73
|
+
prefix: options.prefix ?? "",
|
|
74
|
+
color: options.color ?? shouldUseColor(),
|
|
75
|
+
meta: options.meta ?? {},
|
|
76
|
+
sink: options.sink ?? defaultSink,
|
|
77
|
+
time: options.time ?? defaultTime,
|
|
78
|
+
};
|
|
79
|
+
const makeLogger = (meta) => {
|
|
80
|
+
const log = (level, message, extra) => {
|
|
81
|
+
if (levelOrder[level] < levelOrder[baseOptions.level]) {
|
|
82
|
+
return;
|
|
83
|
+
}
|
|
84
|
+
const line = formatLine(level, message, { ...meta, ...(extra ?? {}) }, {
|
|
85
|
+
json: baseOptions.json,
|
|
86
|
+
prefix: baseOptions.prefix,
|
|
87
|
+
color: baseOptions.color,
|
|
88
|
+
time: baseOptions.time,
|
|
89
|
+
});
|
|
90
|
+
baseOptions.sink(level, line);
|
|
91
|
+
};
|
|
92
|
+
return {
|
|
93
|
+
level: baseOptions.level,
|
|
94
|
+
child: (childMeta) => makeLogger({ ...meta, ...childMeta }),
|
|
95
|
+
debug: (message, extra) => log("debug", message, extra),
|
|
96
|
+
info: (message, extra) => log("info", message, extra),
|
|
97
|
+
warn: (message, extra) => log("warn", message, extra),
|
|
98
|
+
error: (message, extra) => log("error", message, extra),
|
|
99
|
+
fatal: (message, extra) => log("fatal", message, extra),
|
|
100
|
+
};
|
|
101
|
+
};
|
|
102
|
+
return makeLogger(baseOptions.meta);
|
|
103
|
+
};
|
|
@@ -0,0 +1,26 @@
|
|
|
1
|
+
export type ModuleGraphSnapshot = {
|
|
2
|
+
moduleGraph: Record<string, string[]>;
|
|
3
|
+
routeToModules: Record<string, string[]>;
|
|
4
|
+
moduleToRoutes: Record<string, string[]>;
|
|
5
|
+
};
|
|
6
|
+
export declare const DEFAULT_IMPORT_EXTENSIONS: string[];
|
|
7
|
+
type RouteEntry = {
|
|
8
|
+
id: string;
|
|
9
|
+
filePath: string;
|
|
10
|
+
};
|
|
11
|
+
export declare const buildModuleGraphSnapshot: (routes: RouteEntry[], options?: {
|
|
12
|
+
extensions?: string[];
|
|
13
|
+
}) => ModuleGraphSnapshot;
|
|
14
|
+
export type ModuleGraphCache = ModuleGraphSnapshot & {
|
|
15
|
+
graphKey: string;
|
|
16
|
+
generatedAt: number;
|
|
17
|
+
};
|
|
18
|
+
export declare const readModuleGraphCache: (cacheDir: string, graphKey: string) => Promise<ModuleGraphSnapshot | null>;
|
|
19
|
+
export declare const writeModuleGraphCache: (cacheDir: string, graphKey: string, snapshot: ModuleGraphSnapshot) => Promise<void>;
|
|
20
|
+
export declare const computeGraphKey: (input: {
|
|
21
|
+
lockfileHash: string;
|
|
22
|
+
tsconfigHash: string;
|
|
23
|
+
bundlerOptionsHash: string;
|
|
24
|
+
}) => string;
|
|
25
|
+
export {};
|
|
26
|
+
//# sourceMappingURL=module-graph.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"module-graph.d.ts","sourceRoot":"","sources":["../src/module-graph.ts"],"names":[],"mappings":"AAKA,MAAM,MAAM,mBAAmB,GAAG;IAChC,WAAW,EAAE,MAAM,CAAC,MAAM,EAAE,MAAM,EAAE,CAAC,CAAC;IACtC,cAAc,EAAE,MAAM,CAAC,MAAM,EAAE,MAAM,EAAE,CAAC,CAAC;IACzC,cAAc,EAAE,MAAM,CAAC,MAAM,EAAE,MAAM,EAAE,CAAC,CAAC;CAC1C,CAAC;AAEF,eAAO,MAAM,yBAAyB,UASrC,CAAC;AAEF,KAAK,UAAU,GAAG;IAChB,EAAE,EAAE,MAAM,CAAC;IACX,QAAQ,EAAE,MAAM,CAAC;CAClB,CAAC;AAsIF,eAAO,MAAM,wBAAwB,GACnC,QAAQ,UAAU,EAAE,EACpB,UAAS;IAAE,UAAU,CAAC,EAAE,MAAM,EAAE,CAAA;CAAO,KACtC,mBA6BF,CAAC;AAEF,MAAM,MAAM,gBAAgB,GAAG,mBAAmB,GAAG;IACnD,QAAQ,EAAE,MAAM,CAAC;IACjB,WAAW,EAAE,MAAM,CAAC;CACrB,CAAC;AAkDF,eAAO,MAAM,oBAAoB,GAC/B,UAAU,MAAM,EAChB,UAAU,MAAM,KACf,OAAO,CAAC,mBAAmB,GAAG,IAAI,CAiBpC,CAAC;AAEF,eAAO,MAAM,qBAAqB,GAChC,UAAU,MAAM,EAChB,UAAU,MAAM,EAChB,UAAU,mBAAmB,KAC5B,OAAO,CAAC,IAAI,CA0Cd,CAAC;AAEF,eAAO,MAAM,eAAe,GAAI,OAAO;IACrC,YAAY,EAAE,MAAM,CAAC;IACrB,YAAY,EAAE,MAAM,CAAC;IACrB,kBAAkB,EAAE,MAAM,CAAC;CAC5B,KAAG,MAAsC,CAAC"}
|
|
@@ -0,0 +1,231 @@
|
|
|
1
|
+
import { readFileSync, statSync } from "node:fs";
|
|
2
|
+
import { dirname, extname, join, resolve } from "node:path";
|
|
3
|
+
import { hash, normalizePath, stableStringify } from "./hashing.js";
|
|
4
|
+
import { readFileSafe, writeFileAtomic } from "./fs.js";
|
|
5
|
+
export const DEFAULT_IMPORT_EXTENSIONS = [
|
|
6
|
+
".ts",
|
|
7
|
+
".tsx",
|
|
8
|
+
".js",
|
|
9
|
+
".jsx",
|
|
10
|
+
".mjs",
|
|
11
|
+
".cjs",
|
|
12
|
+
".mts",
|
|
13
|
+
".cts",
|
|
14
|
+
];
|
|
15
|
+
const extractImportSpecifiers = (source) => {
|
|
16
|
+
const specifiers = [];
|
|
17
|
+
const regex = /(?:import|export)\s+[^'"]*?from\s*['"]([^'"]+)['"]|import\s*['"]([^'"]+)['"]|import\(\s*['"]([^'"]+)['"]\s*\)|require\(\s*['"]([^'"]+)['"]\s*\)/g;
|
|
18
|
+
let match;
|
|
19
|
+
while ((match = regex.exec(source)) !== null) {
|
|
20
|
+
const specifier = match[1] ?? match[2] ?? match[3] ?? match[4];
|
|
21
|
+
if (specifier) {
|
|
22
|
+
specifiers.push(specifier);
|
|
23
|
+
}
|
|
24
|
+
}
|
|
25
|
+
return specifiers;
|
|
26
|
+
};
|
|
27
|
+
const resolveModulePath = (baseDir, specifier, extensions) => {
|
|
28
|
+
const candidate = resolve(baseDir, specifier);
|
|
29
|
+
const extension = extname(candidate);
|
|
30
|
+
const tryFile = (filePath) => {
|
|
31
|
+
try {
|
|
32
|
+
const stats = statSync(filePath);
|
|
33
|
+
if (stats.isFile()) {
|
|
34
|
+
return filePath;
|
|
35
|
+
}
|
|
36
|
+
if (stats.isDirectory()) {
|
|
37
|
+
for (const ext of extensions) {
|
|
38
|
+
const indexPath = join(filePath, `index${ext}`);
|
|
39
|
+
try {
|
|
40
|
+
const indexStats = statSync(indexPath);
|
|
41
|
+
if (indexStats.isFile()) {
|
|
42
|
+
return indexPath;
|
|
43
|
+
}
|
|
44
|
+
}
|
|
45
|
+
catch {
|
|
46
|
+
continue;
|
|
47
|
+
}
|
|
48
|
+
}
|
|
49
|
+
}
|
|
50
|
+
}
|
|
51
|
+
catch {
|
|
52
|
+
return null;
|
|
53
|
+
}
|
|
54
|
+
return null;
|
|
55
|
+
};
|
|
56
|
+
if (extension) {
|
|
57
|
+
return tryFile(candidate);
|
|
58
|
+
}
|
|
59
|
+
for (const ext of extensions) {
|
|
60
|
+
const resolved = tryFile(`${candidate}${ext}`);
|
|
61
|
+
if (resolved) {
|
|
62
|
+
return resolved;
|
|
63
|
+
}
|
|
64
|
+
}
|
|
65
|
+
return tryFile(candidate);
|
|
66
|
+
};
|
|
67
|
+
const buildModuleGraph = (entryPaths, extensions) => {
|
|
68
|
+
const graph = {};
|
|
69
|
+
const visited = new Set();
|
|
70
|
+
const stack = entryPaths.map((entry) => normalizePath(resolve(entry)));
|
|
71
|
+
while (stack.length > 0) {
|
|
72
|
+
const modulePath = stack.pop();
|
|
73
|
+
if (!modulePath || visited.has(modulePath)) {
|
|
74
|
+
continue;
|
|
75
|
+
}
|
|
76
|
+
visited.add(modulePath);
|
|
77
|
+
let deps = [];
|
|
78
|
+
try {
|
|
79
|
+
const source = readFileSync(modulePath, "utf-8");
|
|
80
|
+
const specifiers = extractImportSpecifiers(source);
|
|
81
|
+
const baseDir = dirname(modulePath);
|
|
82
|
+
for (const specifier of specifiers) {
|
|
83
|
+
if (!specifier.startsWith(".")) {
|
|
84
|
+
continue;
|
|
85
|
+
}
|
|
86
|
+
const resolved = resolveModulePath(baseDir, specifier, extensions);
|
|
87
|
+
if (!resolved) {
|
|
88
|
+
continue;
|
|
89
|
+
}
|
|
90
|
+
const normalized = normalizePath(resolved);
|
|
91
|
+
deps.push(normalized);
|
|
92
|
+
if (!visited.has(normalized)) {
|
|
93
|
+
stack.push(normalized);
|
|
94
|
+
}
|
|
95
|
+
}
|
|
96
|
+
}
|
|
97
|
+
catch {
|
|
98
|
+
deps = [];
|
|
99
|
+
}
|
|
100
|
+
const uniqueDeps = Array.from(new Set(deps)).sort((a, b) => a.localeCompare(b));
|
|
101
|
+
graph[modulePath] = uniqueDeps;
|
|
102
|
+
}
|
|
103
|
+
return graph;
|
|
104
|
+
};
|
|
105
|
+
const collectTransitiveModules = (entry, graph) => {
|
|
106
|
+
const normalizedEntry = normalizePath(resolve(entry));
|
|
107
|
+
const visited = new Set();
|
|
108
|
+
const stack = [normalizedEntry];
|
|
109
|
+
while (stack.length > 0) {
|
|
110
|
+
const current = stack.pop();
|
|
111
|
+
if (!current || visited.has(current)) {
|
|
112
|
+
continue;
|
|
113
|
+
}
|
|
114
|
+
visited.add(current);
|
|
115
|
+
const deps = graph[current] ?? [];
|
|
116
|
+
for (const dep of deps) {
|
|
117
|
+
if (!visited.has(dep)) {
|
|
118
|
+
stack.push(dep);
|
|
119
|
+
}
|
|
120
|
+
}
|
|
121
|
+
}
|
|
122
|
+
return Array.from(visited).sort((a, b) => a.localeCompare(b));
|
|
123
|
+
};
|
|
124
|
+
export const buildModuleGraphSnapshot = (routes, options = {}) => {
|
|
125
|
+
const extensions = options.extensions ?? DEFAULT_IMPORT_EXTENSIONS;
|
|
126
|
+
const entryPaths = Array.from(new Set(routes.map((route) => normalizePath(resolve(route.filePath)))));
|
|
127
|
+
const moduleGraph = buildModuleGraph(entryPaths, extensions);
|
|
128
|
+
const routeToModules = {};
|
|
129
|
+
const moduleToRoutes = new Map();
|
|
130
|
+
for (const route of routes) {
|
|
131
|
+
const modules = collectTransitiveModules(route.filePath, moduleGraph);
|
|
132
|
+
routeToModules[route.id] = modules;
|
|
133
|
+
for (const modulePath of modules) {
|
|
134
|
+
const routesForModule = moduleToRoutes.get(modulePath) ?? new Set();
|
|
135
|
+
routesForModule.add(route.id);
|
|
136
|
+
moduleToRoutes.set(modulePath, routesForModule);
|
|
137
|
+
}
|
|
138
|
+
}
|
|
139
|
+
const moduleToRoutesRecord = {};
|
|
140
|
+
for (const [modulePath, routeIds] of moduleToRoutes.entries()) {
|
|
141
|
+
moduleToRoutesRecord[modulePath] = Array.from(routeIds).sort((a, b) => a.localeCompare(b));
|
|
142
|
+
}
|
|
143
|
+
return {
|
|
144
|
+
moduleGraph,
|
|
145
|
+
routeToModules,
|
|
146
|
+
moduleToRoutes: moduleToRoutesRecord,
|
|
147
|
+
};
|
|
148
|
+
};
|
|
149
|
+
const GRAPH_DIR = "graph";
|
|
150
|
+
const MODULE_GRAPH_FILE = "moduleGraph.json";
|
|
151
|
+
const ROUTE_TO_MODULES_FILE = "routeToModules.json";
|
|
152
|
+
const MODULE_TO_ROUTES_FILE = "moduleToRoutes.json";
|
|
153
|
+
const normalizeRecord = (record) => {
|
|
154
|
+
const normalized = {};
|
|
155
|
+
const keys = Object.keys(record).sort((a, b) => a.localeCompare(b));
|
|
156
|
+
for (const key of keys) {
|
|
157
|
+
normalized[key] = [...record[key]].sort((a, b) => a.localeCompare(b));
|
|
158
|
+
}
|
|
159
|
+
return normalized;
|
|
160
|
+
};
|
|
161
|
+
const ensureCachePayload = (graphKey, snapshot) => ({
|
|
162
|
+
graphKey,
|
|
163
|
+
generatedAt: Date.now(),
|
|
164
|
+
moduleGraph: normalizeRecord(snapshot.moduleGraph),
|
|
165
|
+
routeToModules: normalizeRecord(snapshot.routeToModules),
|
|
166
|
+
moduleToRoutes: normalizeRecord(snapshot.moduleToRoutes),
|
|
167
|
+
});
|
|
168
|
+
const parseCacheFile = (raw, expectedGraphKey, field) => {
|
|
169
|
+
if (!raw) {
|
|
170
|
+
return null;
|
|
171
|
+
}
|
|
172
|
+
try {
|
|
173
|
+
const parsed = JSON.parse(raw.toString());
|
|
174
|
+
if (parsed.graphKey !== expectedGraphKey) {
|
|
175
|
+
return null;
|
|
176
|
+
}
|
|
177
|
+
const value = parsed[field];
|
|
178
|
+
if (!value || typeof value !== "object") {
|
|
179
|
+
return null;
|
|
180
|
+
}
|
|
181
|
+
return value;
|
|
182
|
+
}
|
|
183
|
+
catch {
|
|
184
|
+
return null;
|
|
185
|
+
}
|
|
186
|
+
};
|
|
187
|
+
export const readModuleGraphCache = async (cacheDir, graphKey) => {
|
|
188
|
+
const graphDir = join(cacheDir, GRAPH_DIR);
|
|
189
|
+
const [moduleGraphRaw, routeToModulesRaw, moduleToRoutesRaw] = await Promise.all([
|
|
190
|
+
readFileSafe(join(graphDir, MODULE_GRAPH_FILE)),
|
|
191
|
+
readFileSafe(join(graphDir, ROUTE_TO_MODULES_FILE)),
|
|
192
|
+
readFileSafe(join(graphDir, MODULE_TO_ROUTES_FILE)),
|
|
193
|
+
]);
|
|
194
|
+
const moduleGraph = parseCacheFile(moduleGraphRaw, graphKey, "moduleGraph");
|
|
195
|
+
const routeToModules = parseCacheFile(routeToModulesRaw, graphKey, "routeToModules");
|
|
196
|
+
const moduleToRoutes = parseCacheFile(moduleToRoutesRaw, graphKey, "moduleToRoutes");
|
|
197
|
+
if (!moduleGraph || !routeToModules || !moduleToRoutes) {
|
|
198
|
+
return null;
|
|
199
|
+
}
|
|
200
|
+
return { moduleGraph, routeToModules, moduleToRoutes };
|
|
201
|
+
};
|
|
202
|
+
export const writeModuleGraphCache = async (cacheDir, graphKey, snapshot) => {
|
|
203
|
+
const graphDir = join(cacheDir, GRAPH_DIR);
|
|
204
|
+
const payload = ensureCachePayload(graphKey, snapshot);
|
|
205
|
+
const moduleGraphPayload = JSON.stringify({
|
|
206
|
+
graphKey: payload.graphKey,
|
|
207
|
+
generatedAt: payload.generatedAt,
|
|
208
|
+
moduleGraph: payload.moduleGraph,
|
|
209
|
+
}, null, 2);
|
|
210
|
+
const routeToModulesPayload = JSON.stringify({
|
|
211
|
+
graphKey: payload.graphKey,
|
|
212
|
+
generatedAt: payload.generatedAt,
|
|
213
|
+
routeToModules: payload.routeToModules,
|
|
214
|
+
}, null, 2);
|
|
215
|
+
const moduleToRoutesPayload = JSON.stringify({
|
|
216
|
+
graphKey: payload.graphKey,
|
|
217
|
+
generatedAt: payload.generatedAt,
|
|
218
|
+
moduleToRoutes: payload.moduleToRoutes,
|
|
219
|
+
}, null, 2);
|
|
220
|
+
// Important: use atomic writes to avoid partially written cache files.
|
|
221
|
+
await Promise.all([
|
|
222
|
+
writeFileAtomic(join(graphDir, MODULE_GRAPH_FILE), moduleGraphPayload, { encoding: "utf-8" }),
|
|
223
|
+
writeFileAtomic(join(graphDir, ROUTE_TO_MODULES_FILE), routeToModulesPayload, {
|
|
224
|
+
encoding: "utf-8",
|
|
225
|
+
}),
|
|
226
|
+
writeFileAtomic(join(graphDir, MODULE_TO_ROUTES_FILE), moduleToRoutesPayload, {
|
|
227
|
+
encoding: "utf-8",
|
|
228
|
+
}),
|
|
229
|
+
]);
|
|
230
|
+
};
|
|
231
|
+
export const computeGraphKey = (input) => hash(stableStringify(input));
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"runtime-info.d.ts","sourceRoot":"","sources":["../src/runtime-info.ts"],"names":[],"mappings":"AAAA,eAAO,MAAM,aAAa,QAAO,MAGhC,CAAC;AAEF,eAAO,MAAM,SAAS,QAAO,MAA+C,CAAC"}
|
package/dist/timing.d.ts
ADDED
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"timing.d.ts","sourceRoot":"","sources":["../src/timing.ts"],"names":[],"mappings":"AAEA,MAAM,MAAM,aAAa,CAAC,CAAC,IAAI;IAC7B,IAAI,EAAE,MAAM,CAAC;IACb,UAAU,EAAE,MAAM,CAAC;IACnB,MAAM,EAAE,CAAC,CAAC;CACX,CAAC;AAEF,eAAO,MAAM,OAAO,GAAI,CAAC,EACvB,MAAM,MAAM,EACZ,IAAI,MAAM,CAAC,GAAG,OAAO,CAAC,CAAC,CAAC,KACvB,aAAa,CAAC,CAAC,CAAC,GAAG,OAAO,CAAC,aAAa,CAAC,CAAC,CAAC,CAa7C,CAAC"}
|
package/dist/timing.js
ADDED
|
@@ -0,0 +1,14 @@
|
|
|
1
|
+
import { performance } from "node:perf_hooks";
|
|
2
|
+
export const measure = (name, fn) => {
|
|
3
|
+
const start = performance.now();
|
|
4
|
+
const finish = (result) => ({
|
|
5
|
+
name,
|
|
6
|
+
durationMs: performance.now() - start,
|
|
7
|
+
result,
|
|
8
|
+
});
|
|
9
|
+
const value = fn();
|
|
10
|
+
if (value instanceof Promise) {
|
|
11
|
+
return value.then((result) => finish(result));
|
|
12
|
+
}
|
|
13
|
+
return finish(value);
|
|
14
|
+
};
|
package/package.json
ADDED
|
@@ -0,0 +1,23 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "@tyndall/shared",
|
|
3
|
+
"version": "0.0.1",
|
|
4
|
+
"publishConfig": {
|
|
5
|
+
"access": "public"
|
|
6
|
+
},
|
|
7
|
+
"type": "module",
|
|
8
|
+
"main": "dist/index.js",
|
|
9
|
+
"types": "dist/index.d.ts",
|
|
10
|
+
"exports": {
|
|
11
|
+
".": {
|
|
12
|
+
"types": "./dist/index.d.ts",
|
|
13
|
+
"bun": "./src/index.ts",
|
|
14
|
+
"default": "./dist/index.js"
|
|
15
|
+
}
|
|
16
|
+
},
|
|
17
|
+
"files": [
|
|
18
|
+
"dist"
|
|
19
|
+
],
|
|
20
|
+
"scripts": {
|
|
21
|
+
"build": "tsc -p tsconfig.json"
|
|
22
|
+
}
|
|
23
|
+
}
|