@oh-my-pi/pi-utils 13.19.0 → 14.0.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/package.json CHANGED
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "type": "module",
3
3
  "name": "@oh-my-pi/pi-utils",
4
- "version": "13.19.0",
4
+ "version": "14.0.3",
5
5
  "description": "Shared utilities for pi packages",
6
6
  "homepage": "https://github.com/can1357/oh-my-pi",
7
7
  "author": "Can Boluk",
@@ -23,16 +23,22 @@
23
23
  "main": "./src/index.ts",
24
24
  "types": "./src/index.ts",
25
25
  "scripts": {
26
- "check": "tsgo -p tsconfig.json",
27
- "test": "bun test"
26
+ "check": "biome check . && bun run check:types",
27
+ "check:types": "tsgo -p tsconfig.json --noEmit",
28
+ "lint": "biome lint .",
29
+ "test": "bun test",
30
+ "fix": "biome check --write --unsafe .",
31
+ "fmt": "biome format --write ."
28
32
  },
29
33
  "dependencies": {
30
34
  "beautiful-mermaid": "^1.1",
35
+ "handlebars": "^4.7.9",
31
36
  "winston": "^3.19",
32
37
  "winston-daily-rotate-file": "^5.0"
33
38
  },
34
39
  "devDependencies": {
35
- "@types/bun": "^1.3"
40
+ "@types/bun": "^1.3",
41
+ "@oh-my-pi/pi-natives": "14.0.3"
36
42
  },
37
43
  "engines": {
38
44
  "bun": ">=1.3.7"
@@ -48,6 +54,7 @@
48
54
  "./*": {
49
55
  "types": "./src/*.ts",
50
56
  "import": "./src/*.ts"
51
- }
57
+ },
58
+ "./*.js": "./src/*.ts"
52
59
  }
53
60
  }
@@ -0,0 +1,118 @@
1
+ import { YAML } from "bun";
2
+ import { truncate } from "./format";
3
+ import * as logger from "./logger";
4
+
5
+ function stripHtmlComments(content: string): string {
6
+ return content.replace(/<!--[\s\S]*?-->/g, "");
7
+ }
8
+
9
+ /** Convert kebab-case to camelCase (e.g. "thinking-level" -> "thinkingLevel") */
10
+ function kebabToCamel(key: string): string {
11
+ return key.replace(/-([a-z])/g, (_, c) => c.toUpperCase());
12
+ }
13
+
14
+ /** Recursively normalize object keys from kebab-case to camelCase */
15
+ function normalizeKeys<T>(obj: T): T {
16
+ if (obj === null || typeof obj !== "object") {
17
+ return obj;
18
+ }
19
+ if (Array.isArray(obj)) {
20
+ return obj.map(normalizeKeys) as T;
21
+ }
22
+ const result: Record<string, unknown> = {};
23
+ for (const [key, value] of Object.entries(obj as Record<string, unknown>)) {
24
+ const normalizedKey = kebabToCamel(key);
25
+ result[normalizedKey] = normalizeKeys(value);
26
+ }
27
+ return result as T;
28
+ }
29
+
30
+ export class FrontmatterError extends Error {
31
+ constructor(
32
+ error: Error,
33
+ readonly source?: unknown,
34
+ ) {
35
+ super(`Failed to parse YAML frontmatter (${source}): ${error.message}`, { cause: error });
36
+ this.name = "FrontmatterError";
37
+ }
38
+
39
+ toString(): string {
40
+ // Format the error with stack and detail, including the error message, stack, and source if present
41
+ const details: string[] = [this.message];
42
+ if (this.source !== undefined) {
43
+ details.push(`Source: ${JSON.stringify(this.source)}`);
44
+ }
45
+ if (this.cause && typeof this.cause === "object" && "stack" in this.cause && this.cause.stack) {
46
+ details.push(`Stack:\n${this.cause.stack}`);
47
+ } else if (this.stack) {
48
+ details.push(`Stack:\n${this.stack}`);
49
+ }
50
+ return details.join("\n\n");
51
+ }
52
+ }
53
+
54
+ export interface FrontmatterOptions {
55
+ /** Source of the content (alias: source) */
56
+ location?: unknown;
57
+ /** Source of the content (alias for location) */
58
+ source?: unknown;
59
+ /** Fallback frontmatter values */
60
+ fallback?: Record<string, unknown>;
61
+ /** Normalize the content */
62
+ normalize?: boolean;
63
+ /** Level of error handling */
64
+ level?: "off" | "warn" | "fatal";
65
+ }
66
+
67
+ /**
68
+ * Parse YAML frontmatter from markdown content
69
+ * Returns { frontmatter, body } where body has frontmatter stripped
70
+ */
71
+ export function parseFrontmatter(
72
+ content: string,
73
+ options?: FrontmatterOptions,
74
+ ): { frontmatter: Record<string, unknown>; body: string } {
75
+ const { location, source, fallback, normalize = true, level = "warn" } = options ?? {};
76
+ const loc = location ?? source;
77
+ const frontmatter: Record<string, unknown> = { ...fallback };
78
+
79
+ const normalized = normalize ? stripHtmlComments(content.replace(/\r\n/g, "\n").replace(/\r/g, "\n")) : content;
80
+ if (!normalized.startsWith("---")) {
81
+ return { frontmatter, body: normalized };
82
+ }
83
+
84
+ const endIndex = normalized.indexOf("\n---", 3);
85
+ if (endIndex === -1) {
86
+ return { frontmatter, body: normalized };
87
+ }
88
+
89
+ const metadata = normalized.slice(4, endIndex);
90
+ const body = normalized.slice(endIndex + 4).trim();
91
+
92
+ try {
93
+ // Replace tabs with spaces for YAML compatibility, use failsafe mode for robustness
94
+ const loaded = YAML.parse(metadata.replaceAll("\t", " ")) as Record<string, unknown> | null;
95
+ return { frontmatter: normalizeKeys({ ...frontmatter, ...loaded }), body };
96
+ } catch (error) {
97
+ const err = new FrontmatterError(
98
+ error instanceof Error ? error : new Error(`YAML: ${error}`),
99
+ loc ?? `Inline '${truncate(content, 64)}'`,
100
+ );
101
+ if (level === "warn" || level === "fatal") {
102
+ logger.warn("Failed to parse YAML frontmatter", { err: err.toString() });
103
+ }
104
+ if (level === "fatal") {
105
+ throw err;
106
+ }
107
+
108
+ // Simple YAML parsing - just key: value pairs
109
+ for (const line of metadata.split("\n")) {
110
+ const match = line.match(/^([\w-]+):\s*(.*)$/);
111
+ if (match) {
112
+ frontmatter[match[1]] = match[2].trim();
113
+ }
114
+ }
115
+
116
+ return { frontmatter: normalizeKeys(frontmatter) as Record<string, unknown>, body };
117
+ }
118
+ }
package/src/index.ts CHANGED
@@ -4,19 +4,44 @@ export * from "./color";
4
4
  export * from "./dirs";
5
5
  export * from "./env";
6
6
  export * from "./format";
7
+ export * from "./frontmatter";
7
8
  export * from "./fs-error";
8
9
  export * from "./glob";
9
10
  export * from "./hook-fetch";
10
- export * from "./indent";
11
11
  export * from "./json";
12
12
  export * as logger from "./logger";
13
13
  export * from "./mermaid-ascii";
14
+ export * from "./mime";
15
+ export * from "./peek-file";
14
16
  export * as postmortem from "./postmortem";
15
17
  export * as procmgr from "./procmgr";
16
18
  export { setNativeKillTree } from "./procmgr";
19
+ export * as prompt from "./prompt";
17
20
  export * as ptree from "./ptree";
18
21
  export { AbortError, ChildProcess, Exception, NonZeroExitError } from "./ptree";
19
22
  export * from "./snowflake";
20
23
  export * from "./stream";
21
24
  export * from "./temp";
22
25
  export * from "./type-guards";
26
+ export * from "./which";
27
+
28
+ function isPlainObject(val: object): val is Record<string, unknown> {
29
+ return Object.getPrototypeOf(val) === Object.prototype || Array.isArray(val);
30
+ }
31
+
32
+ export function structuredCloneJSON<T>(value: T): T {
33
+ // primitives|null|undefined, copy
34
+ if (!value || typeof value !== "object") {
35
+ return value;
36
+ }
37
+
38
+ // deep clone
39
+ if (isPlainObject(value)) {
40
+ try {
41
+ return structuredClone(value);
42
+ } catch {
43
+ // might still fail due to nested structures
44
+ }
45
+ }
46
+ return JSON.parse(JSON.stringify(value)) as T;
47
+ }
package/src/logger.ts CHANGED
@@ -5,7 +5,6 @@
5
5
  * Each log entry includes process.pid for traceability.
6
6
  */
7
7
  import * as fs from "node:fs";
8
- import { RingBuffer } from "@oh-my-pi/pi-utils/ring";
9
8
  import winston from "winston";
10
9
  import DailyRotateFile from "winston-daily-rotate-file";
11
10
  import { getLogsDir } from "./dirs";
@@ -58,29 +57,6 @@ const winstonLogger = winston.createLogger({
58
57
  exitOnError: false,
59
58
  });
60
59
 
61
- /**
62
- * Centralized logger for omp.
63
- *
64
- * Logs to ~/.omp/logs/omp.YYYY-MM-DD.log with size-based rotation.
65
- * Safe for concurrent access from multiple omp instances.
66
- *
67
- * @example
68
- * ```typescript
69
- * import { logger } from "@oh-my-pi/pi-utils";
70
- *
71
- * logger.error("MCP request failed", { url, method });
72
- * logger.warn("Theme file invalid, using fallback", { path });
73
- * logger.debug("LSP fallback triggered", { reason });
74
- * ```
75
- */
76
- export interface Logger {
77
- error(message: string, context?: Record<string, unknown>): void;
78
- warn(message: string, context?: Record<string, unknown>): void;
79
- debug(message: string, context?: Record<string, unknown>): void;
80
- time<T>(op: string, fn: () => T): T;
81
- timeAsync<T>(op: string, fn: () => PromiseLike<T>): Promise<T>;
82
- }
83
-
84
60
  /**
85
61
  * Log an error message.
86
62
  * @param message - The message to log.
@@ -122,85 +98,107 @@ export function debug(message: string, context?: Record<string, unknown>): void
122
98
 
123
99
  const LOGGED_TIMING_THRESHOLD_MS = 5;
124
100
 
125
- const longOpBuffer = new RingBuffer<[op: string, duration: number]>(1000);
126
- let longOpRecord = false;
101
+ /** Sequential wall-clock markers (next marker closes the previous segment). */
102
+ let gTimings: [op: string, ts: number][] = [];
127
103
 
128
- function logTiming(op: string, duration: number): void {
129
- duration = Math.round(duration * 100) / 100;
130
- if (duration > LOGGED_TIMING_THRESHOLD_MS) {
131
- warn(`${op} done`, { duration, op });
132
- if (longOpRecord) {
133
- longOpBuffer.push([op, duration]);
134
- }
135
- } else {
136
- debug(`${op} done`, { duration, op });
137
- }
138
- }
104
+ /** Await-accurate durations (safe for parallel work; sums can overlap). */
105
+ let gAsyncSpans: [op: string, durationMs: number][] = [];
106
+
107
+ /** Whether to record timings. */
108
+ let gRecordTimings = false;
139
109
 
140
110
  /**
141
- * Print all collected long operation timings to stderr.
142
- * To be called at the end of a startup or timing window.
111
+ * Print collected timings to stderr.
112
+ * Wall segments are gaps between consecutive {@link time} markers only; they are wrong when
113
+ * concurrent code also calls {@link time} (e.g. parallel capability loads). Use {@link timeAsync}
114
+ * for those awaits instead.
143
115
  */
144
116
  export function printTimings(): void {
145
- // Use stderr for timings output, do not use logger (see AGENTS.md).
146
- console.error("\n--- Startup Timings ---");
147
- let totalDuration = 0;
148
- for (const [op, duration] of longOpBuffer) {
149
- console.error(` ${op}: ${duration}ms`);
150
- totalDuration += duration;
117
+ if (!gRecordTimings || gTimings.length === 0) {
118
+ console.error("\n--- Startup Timings ---\n(no markers)\n");
119
+ return;
151
120
  }
152
- console.error(` TOTAL: ${totalDuration}ms`);
121
+
122
+ const endTs = performance.now();
123
+ gTimings.push(["(end)", endTs]);
124
+
125
+ console.error("\n--- Startup timings (wall segments between time() markers) ---");
126
+ const firstTs = gTimings[0][1];
127
+ for (let i = 0; i < gTimings.length - 1; i++) {
128
+ const [op, ts] = gTimings[i];
129
+ const [, nextTs] = gTimings[i + 1];
130
+ const dur = nextTs - ts;
131
+ if (dur > LOGGED_TIMING_THRESHOLD_MS) {
132
+ console.error(` ${op}: ${dur}ms`);
133
+ }
134
+ }
135
+ console.error(` span (first marker → end): ${endTs - firstTs}ms`);
136
+
137
+ if (gAsyncSpans.length > 0) {
138
+ console.error("\n--- Async (await-accurate; parallel spans may overlap) ---");
139
+ for (const [op, dur] of gAsyncSpans) {
140
+ if (dur > LOGGED_TIMING_THRESHOLD_MS) {
141
+ console.error(` ${op}: ${dur}ms`);
142
+ }
143
+ }
144
+ }
145
+
153
146
  console.error("------------------------\n");
147
+
148
+ gTimings.pop();
154
149
  }
155
150
 
156
151
  /**
157
- * Begin recording long operation timings.
158
- * Typically called at the beginning of startup.
152
+ * Begin recording startup timings. Seeds the timeline so the first segment is meaningful.
159
153
  */
160
154
  export function startTiming(): void {
161
- longOpBuffer.clear();
162
- longOpRecord = true;
155
+ gTimings = [["(startup)", performance.now()]];
156
+ gAsyncSpans = [];
157
+ gRecordTimings = true;
163
158
  }
164
159
 
165
160
  /**
166
- * End timing window and print all timings.
167
- * Disables further buffering until next startTiming().
161
+ * End timing window and clear buffers.
168
162
  */
169
163
  export function endTiming(): void {
170
- longOpBuffer.clear();
171
- longOpRecord = false;
164
+ gTimings = [];
165
+ gAsyncSpans = [];
166
+ gRecordTimings = false;
172
167
  }
173
168
 
174
- /**
175
- * Time a synchronous operation and log the duration.
176
- * @param op - The operation name.
177
- * @param fn - The function to time.
178
- * @returns The result of the function.
179
- */
180
- export function time<T, A extends unknown[]>(op: string, fn: (...args: A) => T, ...args: A): T {
181
- const start = performance.now();
182
- try {
183
- return fn(...args);
184
- } finally {
185
- logTiming(op, performance.now() - start);
169
+ function recordAsyncSpan(op: string, start: number): void {
170
+ const dur = performance.now() - start;
171
+ if (dur > LOGGED_TIMING_THRESHOLD_MS) {
172
+ gAsyncSpans.push([op, dur]);
186
173
  }
187
174
  }
188
175
 
189
176
  /**
190
- * Time an asynchronous operation and log the duration.
191
- * @param op - The operation name.
192
- * @param fn - The function to time.
193
- * @returns The result of the function.
177
+ * Wall-clock segment boundary: duration for this label runs until the next {@link time} call.
178
+ * Do not use across `await` when other tasks may call {@link time}; use {@link timeAsync} for the awaited work.
194
179
  */
195
- export async function timeAsync<R, A extends unknown[]>(
196
- op: string,
197
- fn: (...args: A) => R,
198
- ...args: A
199
- ): Promise<Awaited<R>> {
200
- const start = performance.now();
201
- try {
202
- return await fn(...args);
203
- } finally {
204
- logTiming(op, performance.now() - start);
180
+ export function time(op: string): void;
181
+ export function time<T, A extends unknown[]>(op: string, fn: (...args: A) => T, ...args: A): T;
182
+ export function time<T, A extends unknown[]>(op: string, fn?: (...args: A) => T, ...args: A): T | undefined {
183
+ if (fn === undefined) {
184
+ if (gRecordTimings) {
185
+ gTimings.push([op, performance.now()]);
186
+ }
187
+ return undefined as T;
188
+ } else if (gRecordTimings) {
189
+ const start = performance.now();
190
+ try {
191
+ const result = fn(...args);
192
+ if (result instanceof Promise) {
193
+ return result.finally(recordAsyncSpan.bind(null, op, start)) as T;
194
+ }
195
+ recordAsyncSpan(op, start);
196
+ return result;
197
+ } catch (error) {
198
+ recordAsyncSpan(op, start);
199
+ throw error;
200
+ }
201
+ } else {
202
+ return fn(...args);
205
203
  }
206
204
  }
package/src/mime.ts ADDED
@@ -0,0 +1,159 @@
1
+ import { peekFile, peekFileSync } from "./peek-file";
2
+
3
+ const DEFAULT_IMAGE_METADATA_HEADER_BYTES = 256 * 1024;
4
+
5
+ const PNG_MAGIC = Buffer.from([0x89, 0x50, 0x4e, 0x47, 0x0d, 0x0a, 0x1a, 0x0a]);
6
+ const JPEG_MAGIC = Buffer.from([0xff, 0xd8, 0xff]);
7
+ const WEBP_RIFF_MAGIC = Buffer.from([0x52, 0x49, 0x46, 0x46]);
8
+ const WEBP_MAGIC = Buffer.from([0x57, 0x45, 0x42, 0x50]);
9
+ const PNG_IHDR = Buffer.from("IHDR");
10
+ const GIF87A = Buffer.from("GIF87a");
11
+ const GIF89A = Buffer.from("GIF89a");
12
+ const WEBP_VP8X = Buffer.from("VP8X");
13
+ const WEBP_VP8L = Buffer.from("VP8L");
14
+ const WEBP_VP8 = Buffer.from("VP8 ");
15
+
16
+ export const SUPPORTED_IMAGE_MIME_TYPES = new Set(["image/png", "image/jpeg", "image/gif", "image/webp"]);
17
+
18
+ export type ImageMetadata =
19
+ | { mimeType: "image/png"; width?: number; height?: number; channels?: number; hasAlpha?: boolean }
20
+ | { mimeType: "image/jpeg"; width?: number; height?: number; channels?: number; hasAlpha?: false }
21
+ | { mimeType: "image/gif"; width?: number; height?: number; channels?: 3; hasAlpha?: never }
22
+ | { mimeType: "image/webp"; width?: number; height?: number; channels?: number; hasAlpha?: boolean };
23
+
24
+ function magicEquals(header: Uint8Array, offset: number, magic: Buffer): boolean {
25
+ if (header.length < offset + magic.length) {
26
+ return false;
27
+ }
28
+ return magic.equals(header.subarray(offset, offset + magic.length));
29
+ }
30
+
31
+ function parsePngMetadata(header: Uint8Array): ImageMetadata | null {
32
+ if (!magicEquals(header, 0, PNG_MAGIC)) return null;
33
+ if (!magicEquals(header, 12, PNG_IHDR)) return { mimeType: "image/png" };
34
+ if (header.length < 26) return { mimeType: "image/png" };
35
+
36
+ const view = new DataView(header.buffer, header.byteOffset, header.byteLength);
37
+ const width = view.getUint32(16, false);
38
+ const height = view.getUint32(20, false);
39
+ const colorType = view.getUint8(25);
40
+ if (colorType === 0) return { mimeType: "image/png", width, height, channels: 1, hasAlpha: false };
41
+ if (colorType === 2) return { mimeType: "image/png", width, height, channels: 3, hasAlpha: false };
42
+ if (colorType === 3) return { mimeType: "image/png", width, height, channels: 3 };
43
+ if (colorType === 4) return { mimeType: "image/png", width, height, channels: 2, hasAlpha: true };
44
+ if (colorType === 6) return { mimeType: "image/png", width, height, channels: 4, hasAlpha: true };
45
+ return { mimeType: "image/png", width, height };
46
+ }
47
+
48
+ function parseJpegMetadata(header: Uint8Array): ImageMetadata | null {
49
+ if (!magicEquals(header, 0, JPEG_MAGIC)) return null;
50
+ if (header.length < 4) return { mimeType: "image/jpeg" };
51
+
52
+ const view = new DataView(header.buffer, header.byteOffset, header.byteLength);
53
+ let offset = 2;
54
+ while (offset + 9 < header.length) {
55
+ if (header[offset] !== 0xff) {
56
+ offset += 1;
57
+ continue;
58
+ }
59
+
60
+ let markerOffset = offset + 1;
61
+ while (markerOffset < header.length && header[markerOffset] === 0xff) {
62
+ markerOffset += 1;
63
+ }
64
+ if (markerOffset >= header.length) break;
65
+
66
+ const marker = header[markerOffset];
67
+ const segmentOffset = markerOffset + 1;
68
+ if (marker === 0xd8 || marker === 0xd9 || marker === 0x01 || (marker >= 0xd0 && marker <= 0xd7)) {
69
+ offset = segmentOffset;
70
+ continue;
71
+ }
72
+ if (segmentOffset + 1 >= header.length) break;
73
+
74
+ const segmentLength = view.getUint16(segmentOffset, false);
75
+ if (segmentLength < 2) break;
76
+
77
+ const isStartOfFrame = marker >= 0xc0 && marker <= 0xcf && marker !== 0xc4 && marker !== 0xc8 && marker !== 0xcc;
78
+ if (isStartOfFrame) {
79
+ if (segmentOffset + 7 >= header.length) break;
80
+ const height = view.getUint16(segmentOffset + 3, false);
81
+ const width = view.getUint16(segmentOffset + 5, false);
82
+ const channels = header[segmentOffset + 7];
83
+ return {
84
+ mimeType: "image/jpeg",
85
+ width,
86
+ height,
87
+ channels: Number.isFinite(channels) ? channels : undefined,
88
+ hasAlpha: false,
89
+ };
90
+ }
91
+
92
+ offset = segmentOffset + segmentLength;
93
+ }
94
+
95
+ return { mimeType: "image/jpeg" };
96
+ }
97
+
98
+ function parseGifMetadata(header: Uint8Array): ImageMetadata | null {
99
+ if (!magicEquals(header, 0, GIF87A) && !magicEquals(header, 0, GIF89A)) return null;
100
+ if (header.length < 10) return { mimeType: "image/gif" };
101
+ const view = new DataView(header.buffer, header.byteOffset, header.byteLength);
102
+ return {
103
+ mimeType: "image/gif",
104
+ width: view.getUint16(6, true),
105
+ height: view.getUint16(8, true),
106
+ channels: 3,
107
+ };
108
+ }
109
+
110
+ function parseWebpMetadata(header: Uint8Array): ImageMetadata | null {
111
+ if (!magicEquals(header, 0, WEBP_RIFF_MAGIC)) return null;
112
+ if (!magicEquals(header, 8, WEBP_MAGIC)) return null;
113
+ if (header.length < 30) return { mimeType: "image/webp" };
114
+
115
+ if (magicEquals(header, 12, WEBP_VP8X)) {
116
+ const hasAlpha = (header[20] & 0x10) !== 0;
117
+ const width = (header[24] | (header[25] << 8) | (header[26] << 16)) + 1;
118
+ const height = (header[27] | (header[28] << 8) | (header[29] << 16)) + 1;
119
+ return { mimeType: "image/webp", width, height, channels: hasAlpha ? 4 : 3, hasAlpha };
120
+ }
121
+
122
+ const view = new DataView(header.buffer, header.byteOffset, header.byteLength);
123
+ if (magicEquals(header, 12, WEBP_VP8L)) {
124
+ if (header.length < 25) return { mimeType: "image/webp" };
125
+ const bits = view.getUint32(21, true);
126
+ const width = (bits & 0x3fff) + 1;
127
+ const height = ((bits >> 14) & 0x3fff) + 1;
128
+ const hasAlpha = ((bits >> 28) & 0x1) === 1;
129
+ return { mimeType: "image/webp", width, height, channels: hasAlpha ? 4 : 3, hasAlpha };
130
+ }
131
+
132
+ if (magicEquals(header, 12, WEBP_VP8)) {
133
+ const width = view.getUint16(26, true) & 0x3fff;
134
+ const height = view.getUint16(28, true) & 0x3fff;
135
+ return { mimeType: "image/webp", width, height, channels: 3, hasAlpha: false };
136
+ }
137
+
138
+ return { mimeType: "image/webp" };
139
+ }
140
+
141
+ export function parseImageMetadata(header: Uint8Array): ImageMetadata | null {
142
+ return (
143
+ parsePngMetadata(header) ?? parseJpegMetadata(header) ?? parseGifMetadata(header) ?? parseWebpMetadata(header)
144
+ );
145
+ }
146
+
147
+ export function readImageMetadataSync(
148
+ filePath: string,
149
+ maxBytes = DEFAULT_IMAGE_METADATA_HEADER_BYTES,
150
+ ): ImageMetadata | null {
151
+ return peekFileSync(filePath, maxBytes, parseImageMetadata);
152
+ }
153
+
154
+ export function readImageMetadata(
155
+ filePath: string,
156
+ maxBytes = DEFAULT_IMAGE_METADATA_HEADER_BYTES,
157
+ ): Promise<ImageMetadata | null> {
158
+ return peekFile(filePath, maxBytes, parseImageMetadata);
159
+ }
@@ -0,0 +1,114 @@
1
+ /**
2
+ * Read the first `maxBytes` of a file (offset 0) and pass that slice to `op`.
3
+ *
4
+ * Buffers are reused to avoid allocating on every peek: sync uses one growable
5
+ * `Uint8Array`; async uses a small fixed pool of `Buffer`s with a bounded wait
6
+ * queue, falling back to a fresh allocation when the pool and queue are saturated
7
+ * or when `maxBytes` exceeds the pool slot size.
8
+ */
9
+ import * as fs from "node:fs";
10
+
11
+ /** Async pool slot size; larger peeks allocate ad hoc. */
12
+ const POOLED_BUFFER_SIZE = 512;
13
+ const ASYNC_POOL_SIZE = 10;
14
+ /** Cap waiter queue so heavy concurrency does not queue unbounded; overflow uses alloc. */
15
+ const MAX_ASYNC_WAITERS = 4;
16
+ const INITIAL_SYNC_BUFFER_SIZE = 1024;
17
+ const EMPTY_BUFFER = Buffer.alloc(0);
18
+
19
+ const asyncPool = Array.from({ length: ASYNC_POOL_SIZE }, () => Buffer.allocUnsafe(POOLED_BUFFER_SIZE));
20
+ const availableAsyncPoolIndexes = Array.from({ length: ASYNC_POOL_SIZE }, (_, index) => index);
21
+ const asyncPoolWaiters: Array<(index: number) => void> = [];
22
+ let syncPool = new Uint8Array(INITIAL_SYNC_BUFFER_SIZE);
23
+
24
+ /** Returns a pool slot index, or `-1` when the caller should use a standalone buffer. */
25
+ function acquireAsyncPoolIndex(): Promise<number> | number {
26
+ const index = availableAsyncPoolIndexes.pop();
27
+ if (index !== undefined) {
28
+ return index;
29
+ }
30
+ if (asyncPoolWaiters.length >= MAX_ASYNC_WAITERS) {
31
+ return -1;
32
+ }
33
+ const { promise, resolve } = Promise.withResolvers<number>();
34
+ asyncPoolWaiters.push(resolve);
35
+ return promise;
36
+ }
37
+
38
+ function releaseAsyncPoolIndex(index: number): void {
39
+ if (index < 0) {
40
+ return;
41
+ }
42
+ const waiter = asyncPoolWaiters.shift();
43
+ if (waiter) {
44
+ waiter(index);
45
+ return;
46
+ }
47
+ availableAsyncPoolIndexes.push(index);
48
+ }
49
+
50
+ async function withAsyncPoolBuffer<T>(maxBytes: number, op: (buffer: Buffer) => Promise<T>): Promise<T> {
51
+ if (maxBytes <= 0) {
52
+ return op(EMPTY_BUFFER);
53
+ }
54
+ if (maxBytes > POOLED_BUFFER_SIZE) {
55
+ return op(Buffer.allocUnsafe(maxBytes));
56
+ }
57
+
58
+ const poolIndex = await acquireAsyncPoolIndex();
59
+ const buffer = poolIndex >= 0 ? asyncPool[poolIndex] : Buffer.allocUnsafe(maxBytes);
60
+ try {
61
+ return await op(buffer.subarray(0, maxBytes));
62
+ } finally {
63
+ releaseAsyncPoolIndex(poolIndex);
64
+ }
65
+ }
66
+
67
+ function withSyncPoolBuffer<T>(maxBytes: number, op: (buffer: Uint8Array) => T): T {
68
+ if (maxBytes <= 0) {
69
+ return op(EMPTY_BUFFER);
70
+ }
71
+ if (maxBytes > syncPool.byteLength) {
72
+ syncPool = new Uint8Array(maxBytes + (maxBytes >> 1));
73
+ }
74
+ return op(syncPool.subarray(0, maxBytes));
75
+ }
76
+
77
+ /**
78
+ * Synchronously reads up to `maxBytes` from the start of `filePath` and returns `op(header)`.
79
+ * If the file is shorter, `header` is only the bytes actually read.
80
+ */
81
+ export function peekFileSync<T>(filePath: string, maxBytes: number, op: (header: Uint8Array) => T): T {
82
+ if (maxBytes <= 0) {
83
+ return op(EMPTY_BUFFER);
84
+ }
85
+
86
+ const fileHandle = fs.openSync(filePath, "r");
87
+ try {
88
+ return withSyncPoolBuffer(maxBytes, buffer => {
89
+ const bytesRead = fs.readSync(fileHandle, buffer, 0, buffer.byteLength, 0);
90
+ return op(buffer.subarray(0, bytesRead));
91
+ });
92
+ } finally {
93
+ fs.closeSync(fileHandle);
94
+ }
95
+ }
96
+
97
+ /**
98
+ * Like {@link peekFileSync} but uses async I/O.
99
+ */
100
+ export async function peekFile<T>(filePath: string, maxBytes: number, op: (header: Uint8Array) => T): Promise<T> {
101
+ if (maxBytes <= 0) {
102
+ return op(EMPTY_BUFFER);
103
+ }
104
+
105
+ const fileHandle = await fs.promises.open(filePath, "r");
106
+ try {
107
+ return await withAsyncPoolBuffer(maxBytes, async buffer => {
108
+ const { bytesRead } = await fileHandle.read(buffer, 0, buffer.byteLength, 0);
109
+ return op(buffer.subarray(0, bytesRead));
110
+ });
111
+ } finally {
112
+ await fileHandle.close();
113
+ }
114
+ }