@f5xc-salesdemos/pi-utils 14.0.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/package.json +60 -0
- package/src/abortable.ts +85 -0
- package/src/async.ts +50 -0
- package/src/cli.ts +432 -0
- package/src/color.ts +204 -0
- package/src/dirs.ts +425 -0
- package/src/env.ts +84 -0
- package/src/format.ts +106 -0
- package/src/frontmatter.ts +118 -0
- package/src/fs-error.ts +56 -0
- package/src/glob.ts +189 -0
- package/src/hook-fetch.ts +30 -0
- package/src/index.ts +47 -0
- package/src/json.ts +10 -0
- package/src/logger.ts +204 -0
- package/src/mermaid-ascii.ts +31 -0
- package/src/mime.ts +159 -0
- package/src/peek-file.ts +114 -0
- package/src/postmortem.ts +197 -0
- package/src/procmgr.ts +326 -0
- package/src/prompt.ts +401 -0
- package/src/ptree.ts +386 -0
- package/src/ring.ts +169 -0
- package/src/snowflake.ts +136 -0
- package/src/stream.ts +316 -0
- package/src/temp.ts +77 -0
- package/src/type-guards.ts +11 -0
- package/src/which.ts +230 -0
package/src/stream.ts
ADDED
|
@@ -0,0 +1,316 @@
|
|
|
1
|
+
import { createAbortableStream } from "./abortable";
|
|
2
|
+
|
|
3
|
+
const LF = 0x0a;
|
|
4
|
+
type JsonlChunkResult = {
|
|
5
|
+
values: unknown[];
|
|
6
|
+
error: unknown;
|
|
7
|
+
read: number;
|
|
8
|
+
done: boolean;
|
|
9
|
+
};
|
|
10
|
+
|
|
11
|
+
function parseJsonlChunkCompat(input: Uint8Array, beg?: number, end?: number): JsonlChunkResult;
|
|
12
|
+
function parseJsonlChunkCompat(input: string): JsonlChunkResult;
|
|
13
|
+
function parseJsonlChunkCompat(input: Uint8Array | string, beg?: number, end?: number): JsonlChunkResult {
|
|
14
|
+
if (typeof input === "string") {
|
|
15
|
+
const { values, error, read, done } = Bun.JSONL.parseChunk(input);
|
|
16
|
+
return { values, error, read, done };
|
|
17
|
+
}
|
|
18
|
+
const start = beg ?? 0;
|
|
19
|
+
const stop = end ?? input.length;
|
|
20
|
+
const { values, error, read, done } = Bun.JSONL.parseChunk(input, start, stop);
|
|
21
|
+
return { values, error, read, done };
|
|
22
|
+
}
|
|
23
|
+
|
|
24
|
+
export async function* readLines(stream: ReadableStream<Uint8Array>, signal?: AbortSignal): AsyncGenerator<Uint8Array> {
|
|
25
|
+
const buffer = new ConcatSink();
|
|
26
|
+
const source = createAbortableStream(stream, signal);
|
|
27
|
+
try {
|
|
28
|
+
for await (const chunk of source) {
|
|
29
|
+
for (const line of buffer.appendAndFlushLines(chunk)) {
|
|
30
|
+
yield line;
|
|
31
|
+
}
|
|
32
|
+
}
|
|
33
|
+
if (!buffer.isEmpty) {
|
|
34
|
+
const tail = buffer.flush();
|
|
35
|
+
if (tail) {
|
|
36
|
+
buffer.clear();
|
|
37
|
+
yield tail;
|
|
38
|
+
}
|
|
39
|
+
}
|
|
40
|
+
} catch (err) {
|
|
41
|
+
// Abort errors are expected — just stop the generator.
|
|
42
|
+
if (signal?.aborted) return;
|
|
43
|
+
throw err;
|
|
44
|
+
}
|
|
45
|
+
}
|
|
46
|
+
|
|
47
|
+
export async function* readJsonl<T>(stream: ReadableStream<Uint8Array>, signal?: AbortSignal): AsyncGenerator<T> {
|
|
48
|
+
const buffer = new ConcatSink();
|
|
49
|
+
const source = createAbortableStream(stream, signal);
|
|
50
|
+
try {
|
|
51
|
+
for await (const chunk of source) {
|
|
52
|
+
yield* buffer.pullJSONL<T>(chunk, 0, chunk.length);
|
|
53
|
+
}
|
|
54
|
+
if (!buffer.isEmpty) {
|
|
55
|
+
const tail = buffer.flush();
|
|
56
|
+
if (tail) {
|
|
57
|
+
buffer.clear();
|
|
58
|
+
const { values, error, done } = parseJsonlChunkCompat(tail, 0, tail.length);
|
|
59
|
+
if (values.length > 0) {
|
|
60
|
+
yield* values as T[];
|
|
61
|
+
}
|
|
62
|
+
if (error) throw error;
|
|
63
|
+
if (!done) {
|
|
64
|
+
throw new Error("JSONL stream ended unexpectedly");
|
|
65
|
+
}
|
|
66
|
+
}
|
|
67
|
+
}
|
|
68
|
+
} catch (err) {
|
|
69
|
+
// Abort errors are expected — just stop the generator.
|
|
70
|
+
if (signal?.aborted) return;
|
|
71
|
+
throw err;
|
|
72
|
+
}
|
|
73
|
+
}
|
|
74
|
+
|
|
75
|
+
// =============================================================================
|
|
76
|
+
// SSE (Server-Sent Events)
|
|
77
|
+
// =============================================================================
|
|
78
|
+
|
|
79
|
+
/** Byte lookup table: 1 = whitespace, 0 = not. */
|
|
80
|
+
const WS = new Uint8Array(256);
|
|
81
|
+
WS[0x09] = 1; // tab
|
|
82
|
+
WS[0x0a] = 1; // LF
|
|
83
|
+
WS[0x0d] = 1; // CR
|
|
84
|
+
WS[0x20] = 1; // space
|
|
85
|
+
|
|
86
|
+
const createPattern = (prefix: string) => {
|
|
87
|
+
const pre = Buffer.from(prefix, "utf-8");
|
|
88
|
+
return {
|
|
89
|
+
strip(buf: Uint8Array): number | null {
|
|
90
|
+
const n = pre.length;
|
|
91
|
+
if (buf.length < n) return null;
|
|
92
|
+
if (pre.equals(buf.subarray(0, n))) {
|
|
93
|
+
return n;
|
|
94
|
+
}
|
|
95
|
+
return null;
|
|
96
|
+
},
|
|
97
|
+
};
|
|
98
|
+
};
|
|
99
|
+
|
|
100
|
+
const PAT_DATA = createPattern("data:");
|
|
101
|
+
|
|
102
|
+
const PAT_DONE = createPattern("[DONE]");
|
|
103
|
+
|
|
104
|
+
class ConcatSink {
|
|
105
|
+
#space?: Buffer;
|
|
106
|
+
#length = 0;
|
|
107
|
+
|
|
108
|
+
#ensureCapacity(size: number): Buffer {
|
|
109
|
+
const space = this.#space;
|
|
110
|
+
if (space && space.length >= size) return space;
|
|
111
|
+
const nextSize = space ? Math.max(size, space.length * 2) : size;
|
|
112
|
+
const next = Buffer.allocUnsafe(nextSize);
|
|
113
|
+
if (space && this.#length > 0) {
|
|
114
|
+
space.copy(next, 0, 0, this.#length);
|
|
115
|
+
}
|
|
116
|
+
this.#space = next;
|
|
117
|
+
return next;
|
|
118
|
+
}
|
|
119
|
+
|
|
120
|
+
append(chunk: Uint8Array) {
|
|
121
|
+
const n = chunk.length;
|
|
122
|
+
if (!n) return;
|
|
123
|
+
const offset = this.#length;
|
|
124
|
+
const space = this.#ensureCapacity(offset + n);
|
|
125
|
+
space.set(chunk, offset);
|
|
126
|
+
this.#length += n;
|
|
127
|
+
}
|
|
128
|
+
|
|
129
|
+
reset(chunk: Uint8Array) {
|
|
130
|
+
const n = chunk.length;
|
|
131
|
+
if (!n) {
|
|
132
|
+
this.#length = 0;
|
|
133
|
+
return;
|
|
134
|
+
}
|
|
135
|
+
const space = this.#ensureCapacity(n);
|
|
136
|
+
space.set(chunk, 0);
|
|
137
|
+
this.#length = n;
|
|
138
|
+
}
|
|
139
|
+
|
|
140
|
+
get isEmpty(): boolean {
|
|
141
|
+
return this.#length === 0;
|
|
142
|
+
}
|
|
143
|
+
|
|
144
|
+
flush(): Uint8Array | undefined {
|
|
145
|
+
if (!this.#length) return undefined;
|
|
146
|
+
return this.#space!.subarray(0, this.#length);
|
|
147
|
+
}
|
|
148
|
+
|
|
149
|
+
clear() {
|
|
150
|
+
this.#length = 0;
|
|
151
|
+
}
|
|
152
|
+
|
|
153
|
+
*appendAndFlushLines(chunk: Uint8Array) {
|
|
154
|
+
let pos = 0;
|
|
155
|
+
while (pos < chunk.length) {
|
|
156
|
+
const nl = chunk.indexOf(LF, pos);
|
|
157
|
+
if (nl === -1) {
|
|
158
|
+
this.append(chunk.subarray(pos));
|
|
159
|
+
return;
|
|
160
|
+
}
|
|
161
|
+
const suffix = chunk.subarray(pos, nl);
|
|
162
|
+
pos = nl + 1;
|
|
163
|
+
if (this.isEmpty) {
|
|
164
|
+
yield suffix;
|
|
165
|
+
} else {
|
|
166
|
+
this.append(suffix);
|
|
167
|
+
const payload = this.flush();
|
|
168
|
+
if (payload) {
|
|
169
|
+
yield payload;
|
|
170
|
+
this.clear();
|
|
171
|
+
}
|
|
172
|
+
}
|
|
173
|
+
}
|
|
174
|
+
}
|
|
175
|
+
*pullJSONL<T>(chunk: Uint8Array, beg: number, end: number) {
|
|
176
|
+
if (this.isEmpty) {
|
|
177
|
+
const { values, error, read, done } = parseJsonlChunkCompat(chunk, beg, end);
|
|
178
|
+
if (values.length > 0) {
|
|
179
|
+
yield* values as T[];
|
|
180
|
+
}
|
|
181
|
+
if (error) throw error;
|
|
182
|
+
if (done) return;
|
|
183
|
+
this.reset(chunk.subarray(read, end));
|
|
184
|
+
return;
|
|
185
|
+
}
|
|
186
|
+
|
|
187
|
+
const offset = this.#length;
|
|
188
|
+
const n = end - beg;
|
|
189
|
+
const total = offset + n;
|
|
190
|
+
const space = this.#ensureCapacity(total);
|
|
191
|
+
space.set(chunk.subarray(beg, end), offset);
|
|
192
|
+
this.#length = total;
|
|
193
|
+
|
|
194
|
+
const { values, error, read, done } = parseJsonlChunkCompat(space.subarray(0, total), 0, total);
|
|
195
|
+
if (values.length > 0) {
|
|
196
|
+
yield* values as T[];
|
|
197
|
+
}
|
|
198
|
+
if (error) throw error;
|
|
199
|
+
if (done) {
|
|
200
|
+
this.#length = 0;
|
|
201
|
+
return;
|
|
202
|
+
}
|
|
203
|
+
const rem = total - read;
|
|
204
|
+
if (rem < total) {
|
|
205
|
+
space.copyWithin(0, read, total);
|
|
206
|
+
}
|
|
207
|
+
this.#length = rem;
|
|
208
|
+
}
|
|
209
|
+
}
|
|
210
|
+
|
|
211
|
+
const kDoneError = new Error("SSE stream done");
|
|
212
|
+
|
|
213
|
+
/**
|
|
214
|
+
* Stream parsed JSON objects from SSE `data:` lines.
|
|
215
|
+
*
|
|
216
|
+
* @example
|
|
217
|
+
* ```ts
|
|
218
|
+
* for await (const obj of readSseJson(response.body!)) {
|
|
219
|
+
* console.log(obj);
|
|
220
|
+
* }
|
|
221
|
+
* ```
|
|
222
|
+
*/
|
|
223
|
+
export async function* readSseJson<T>(stream: ReadableStream<Uint8Array>, signal?: AbortSignal): AsyncGenerator<T> {
|
|
224
|
+
const lineBuffer = new ConcatSink();
|
|
225
|
+
const jsonBuffer = new ConcatSink();
|
|
226
|
+
|
|
227
|
+
// pipeThrough with { signal } makes the stream abort-aware: the pipe
|
|
228
|
+
// cancels the source and errors the output when the signal fires,
|
|
229
|
+
// so for-await-of exits cleanly without manual reader/listener management.
|
|
230
|
+
stream = createAbortableStream(stream, signal);
|
|
231
|
+
try {
|
|
232
|
+
const processLine = function* (line: Uint8Array) {
|
|
233
|
+
// Strip trailing spaces including \r.
|
|
234
|
+
let end = line.length;
|
|
235
|
+
while (end && WS[line[end - 1]]) {
|
|
236
|
+
--end;
|
|
237
|
+
}
|
|
238
|
+
if (!end) return; // blank line
|
|
239
|
+
|
|
240
|
+
const trimmed = end === line.length ? line : line.subarray(0, end);
|
|
241
|
+
|
|
242
|
+
// Check "data:" prefix and optional space afterwards.
|
|
243
|
+
let beg = PAT_DATA.strip(trimmed);
|
|
244
|
+
if (beg === null) return;
|
|
245
|
+
while (beg < end && WS[trimmed[beg]]) {
|
|
246
|
+
++beg;
|
|
247
|
+
}
|
|
248
|
+
if (beg >= end) return;
|
|
249
|
+
|
|
250
|
+
// Fast-path: the OpenAI-style done marker isn't JSON.
|
|
251
|
+
const donePrefix = PAT_DONE.strip(trimmed.subarray(beg, end));
|
|
252
|
+
if (donePrefix !== null && donePrefix === end - beg) {
|
|
253
|
+
throw kDoneError;
|
|
254
|
+
}
|
|
255
|
+
|
|
256
|
+
yield* jsonBuffer.pullJSONL<T>(trimmed, beg, end);
|
|
257
|
+
};
|
|
258
|
+
for await (const chunk of stream) {
|
|
259
|
+
for (const line of lineBuffer.appendAndFlushLines(chunk)) {
|
|
260
|
+
yield* processLine(line);
|
|
261
|
+
}
|
|
262
|
+
}
|
|
263
|
+
if (!lineBuffer.isEmpty) {
|
|
264
|
+
const tail = lineBuffer.flush();
|
|
265
|
+
if (tail) {
|
|
266
|
+
lineBuffer.clear();
|
|
267
|
+
yield* processLine(tail);
|
|
268
|
+
}
|
|
269
|
+
}
|
|
270
|
+
} catch (err) {
|
|
271
|
+
if (err === kDoneError) return;
|
|
272
|
+
// Abort errors are expected — just stop the generator.
|
|
273
|
+
if (signal?.aborted) return;
|
|
274
|
+
throw err;
|
|
275
|
+
}
|
|
276
|
+
if (!jsonBuffer.isEmpty) {
|
|
277
|
+
throw new Error("SSE stream ended unexpectedly");
|
|
278
|
+
}
|
|
279
|
+
}
|
|
280
|
+
|
|
281
|
+
/**
|
|
282
|
+
* Parse a complete JSONL string, skipping malformed lines instead of throwing.
|
|
283
|
+
*
|
|
284
|
+
* Uses `Bun.JSONL.parseChunk` internally. On parse errors, the malformed
|
|
285
|
+
* region is skipped up to the next newline and parsing continues.
|
|
286
|
+
*
|
|
287
|
+
* @example
|
|
288
|
+
* ```ts
|
|
289
|
+
* const entries = parseJsonlLenient<MyType>(fileContents);
|
|
290
|
+
* ```
|
|
291
|
+
*/
|
|
292
|
+
export function parseJsonlLenient<T>(buffer: string): T[] {
|
|
293
|
+
let entries: T[] | undefined;
|
|
294
|
+
|
|
295
|
+
while (buffer.length > 0) {
|
|
296
|
+
const { values, error, read, done } = parseJsonlChunkCompat(buffer);
|
|
297
|
+
if (values.length > 0) {
|
|
298
|
+
const ext = values as T[];
|
|
299
|
+
if (!entries) {
|
|
300
|
+
entries = ext;
|
|
301
|
+
} else {
|
|
302
|
+
entries.push(...ext);
|
|
303
|
+
}
|
|
304
|
+
}
|
|
305
|
+
if (error) {
|
|
306
|
+
const nextNewline = buffer.indexOf("\n", read);
|
|
307
|
+
if (nextNewline === -1) break;
|
|
308
|
+
buffer = buffer.substring(nextNewline + 1);
|
|
309
|
+
continue;
|
|
310
|
+
}
|
|
311
|
+
if (read === 0) break;
|
|
312
|
+
buffer = buffer.substring(read);
|
|
313
|
+
if (done) break;
|
|
314
|
+
}
|
|
315
|
+
return entries ?? [];
|
|
316
|
+
}
|
package/src/temp.ts
ADDED
|
@@ -0,0 +1,77 @@
|
|
|
1
|
+
import * as fs from "node:fs";
|
|
2
|
+
import * as os from "node:os";
|
|
3
|
+
import * as path from "node:path";
|
|
4
|
+
|
|
5
|
+
export class TempDir {
|
|
6
|
+
#path: string;
|
|
7
|
+
private constructor(path: string) {
|
|
8
|
+
this.#path = path;
|
|
9
|
+
}
|
|
10
|
+
|
|
11
|
+
static createSync(prefix?: string): TempDir {
|
|
12
|
+
return new TempDir(fs.mkdtempSync(normalizePrefix(prefix)));
|
|
13
|
+
}
|
|
14
|
+
|
|
15
|
+
static async create(prefix?: string): Promise<TempDir> {
|
|
16
|
+
return new TempDir(await fs.promises.mkdtemp(normalizePrefix(prefix)));
|
|
17
|
+
}
|
|
18
|
+
|
|
19
|
+
#removePromise: Promise<void> | null = null;
|
|
20
|
+
|
|
21
|
+
path(): string {
|
|
22
|
+
return this.#path;
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
absolute(): string {
|
|
26
|
+
return path.resolve(this.#path);
|
|
27
|
+
}
|
|
28
|
+
|
|
29
|
+
remove(): Promise<void> {
|
|
30
|
+
if (this.#removePromise) {
|
|
31
|
+
return this.#removePromise;
|
|
32
|
+
}
|
|
33
|
+
const removePromise = fs.promises.rm(this.#path, { recursive: true, force: true });
|
|
34
|
+
this.#removePromise = removePromise;
|
|
35
|
+
return removePromise;
|
|
36
|
+
}
|
|
37
|
+
|
|
38
|
+
removeSync(): void {
|
|
39
|
+
fs.rmSync(this.#path, { recursive: true, force: true });
|
|
40
|
+
this.#removePromise = Promise.resolve();
|
|
41
|
+
}
|
|
42
|
+
|
|
43
|
+
toString(): string {
|
|
44
|
+
return this.#path;
|
|
45
|
+
}
|
|
46
|
+
|
|
47
|
+
join(...paths: string[]): string {
|
|
48
|
+
return path.join(this.#path, ...paths);
|
|
49
|
+
}
|
|
50
|
+
|
|
51
|
+
async [Symbol.asyncDispose](): Promise<void> {
|
|
52
|
+
try {
|
|
53
|
+
await this.remove();
|
|
54
|
+
} catch {
|
|
55
|
+
// Ignore cleanup errors
|
|
56
|
+
}
|
|
57
|
+
}
|
|
58
|
+
|
|
59
|
+
[Symbol.dispose](): void {
|
|
60
|
+
try {
|
|
61
|
+
this.removeSync();
|
|
62
|
+
} catch {
|
|
63
|
+
// Ignore cleanup errors
|
|
64
|
+
}
|
|
65
|
+
}
|
|
66
|
+
}
|
|
67
|
+
|
|
68
|
+
const kTempDir = os.tmpdir();
|
|
69
|
+
|
|
70
|
+
function normalizePrefix(prefix?: string): string {
|
|
71
|
+
if (!prefix) {
|
|
72
|
+
return `${kTempDir}${path.sep}pi-temp-`;
|
|
73
|
+
} else if (prefix.startsWith("@")) {
|
|
74
|
+
return path.join(kTempDir, prefix.slice(1));
|
|
75
|
+
}
|
|
76
|
+
return prefix;
|
|
77
|
+
}
|
|
@@ -0,0 +1,11 @@
|
|
|
1
|
+
export function isRecord(value: unknown): value is Record<string, unknown> {
|
|
2
|
+
return !!value && typeof value === "object" && !Array.isArray(value);
|
|
3
|
+
}
|
|
4
|
+
|
|
5
|
+
export function asRecord(value: unknown): Record<string, unknown> | null {
|
|
6
|
+
return isRecord(value) ? value : null;
|
|
7
|
+
}
|
|
8
|
+
|
|
9
|
+
export function toError(value: unknown): Error {
|
|
10
|
+
return value instanceof Error ? value : new Error(String(value));
|
|
11
|
+
}
|
package/src/which.ts
ADDED
|
@@ -0,0 +1,230 @@
|
|
|
1
|
+
// OS-agnostic "which" helper with robust macOS toolchain lookup and flexible cache control.
|
|
2
|
+
//
|
|
3
|
+
// - Falls back to macOS Xcode/CLT toolchain directories if standard `Bun.which()` fails on Darwin.
|
|
4
|
+
// Resolves the active developer directory via $DEVELOPER_DIR / /var/db/xcode_select_link symlink
|
|
5
|
+
// to avoid spawning xcrun subprocesses.
|
|
6
|
+
// - Supports four cache modes (`none`, `fresh`, `ro`, `cached`) for control over discovery cost and determinism.
|
|
7
|
+
// - Computes a stable cache key from command + options to avoid redundant lookups within a process.
|
|
8
|
+
// - Returns path to resolved binary or null if not found.
|
|
9
|
+
//
|
|
10
|
+
|
|
11
|
+
import * as fs from "node:fs";
|
|
12
|
+
import * as os from "node:os";
|
|
13
|
+
import * as path from "node:path";
|
|
14
|
+
|
|
15
|
+
// Tools shipped by Xcode / Command Line Tools that callers actually look up.
|
|
16
|
+
// Keeps the set small so darwinWhich can fast-reject non-Xcode commands without
|
|
17
|
+
// touching the filesystem. Only needs entries for binaries that live *exclusively*
|
|
18
|
+
// in toolchain dirs (not on a typical $PATH).
|
|
19
|
+
const XCODE_BINS = new Set([
|
|
20
|
+
// Compilers & driver aliases
|
|
21
|
+
"clang",
|
|
22
|
+
"clang++",
|
|
23
|
+
"gcc",
|
|
24
|
+
"g++",
|
|
25
|
+
"cc",
|
|
26
|
+
"c++",
|
|
27
|
+
"cpp",
|
|
28
|
+
"c89",
|
|
29
|
+
"c99",
|
|
30
|
+
"swift",
|
|
31
|
+
"swiftc",
|
|
32
|
+
"swift-frontend",
|
|
33
|
+
// Language servers (LSP)
|
|
34
|
+
"clangd",
|
|
35
|
+
"sourcekit-lsp",
|
|
36
|
+
// Linker & archive tools
|
|
37
|
+
"ld",
|
|
38
|
+
"ld-classic",
|
|
39
|
+
"ar",
|
|
40
|
+
"ranlib",
|
|
41
|
+
"libtool",
|
|
42
|
+
"as",
|
|
43
|
+
"lipo",
|
|
44
|
+
"install_name_tool",
|
|
45
|
+
"codesign_allocate",
|
|
46
|
+
// Build utilities
|
|
47
|
+
"make",
|
|
48
|
+
"gnumake",
|
|
49
|
+
"m4",
|
|
50
|
+
"flex",
|
|
51
|
+
"bison",
|
|
52
|
+
"yacc",
|
|
53
|
+
"lex",
|
|
54
|
+
// VCS (CLT ships git)
|
|
55
|
+
"git",
|
|
56
|
+
"git-receive-pack",
|
|
57
|
+
"git-upload-pack",
|
|
58
|
+
"git-upload-archive",
|
|
59
|
+
"git-shell",
|
|
60
|
+
"scalar",
|
|
61
|
+
// Debugger
|
|
62
|
+
"lldb",
|
|
63
|
+
"lldb-dap",
|
|
64
|
+
// Binary inspection
|
|
65
|
+
"nm",
|
|
66
|
+
"otool",
|
|
67
|
+
"objdump",
|
|
68
|
+
"strings",
|
|
69
|
+
"strip",
|
|
70
|
+
"size",
|
|
71
|
+
"dsymutil",
|
|
72
|
+
"dwarfdump",
|
|
73
|
+
"lipo",
|
|
74
|
+
"vtool",
|
|
75
|
+
// Clang tooling
|
|
76
|
+
"clang-format",
|
|
77
|
+
"swift-format",
|
|
78
|
+
]);
|
|
79
|
+
|
|
80
|
+
// Prefixes for versioned binaries (e.g. python3.9, pip3.12, pydoc3.9, 2to3-3.9)
|
|
81
|
+
const XCODE_BIN_PREFIXES = ["python", "pip", "pydoc", "2to3"];
|
|
82
|
+
|
|
83
|
+
function isXcodeBin(command: string): boolean {
|
|
84
|
+
if (XCODE_BINS.has(command)) return true;
|
|
85
|
+
for (const prefix of XCODE_BIN_PREFIXES) {
|
|
86
|
+
if (command.startsWith(prefix)) return true;
|
|
87
|
+
}
|
|
88
|
+
return false;
|
|
89
|
+
}
|
|
90
|
+
|
|
91
|
+
// Resolve the active Xcode developer directory once, without spawning any process.
|
|
92
|
+
// Priority: $DEVELOPER_DIR env → /var/db/xcode_select_link symlink → common fallback paths.
|
|
93
|
+
function getDeveloperDirs(): string | null {
|
|
94
|
+
// 1. Explicit env override
|
|
95
|
+
const envDir = process.env.DEVELOPER_DIR;
|
|
96
|
+
if (envDir && fs.existsSync(envDir)) {
|
|
97
|
+
return envDir;
|
|
98
|
+
}
|
|
99
|
+
|
|
100
|
+
// 2. xcode-select stores the active path as a symlink
|
|
101
|
+
try {
|
|
102
|
+
return fs.readlinkSync("/var/db/xcode_select_link");
|
|
103
|
+
} catch {
|
|
104
|
+
// symlink may not exist on minimal installs
|
|
105
|
+
}
|
|
106
|
+
// 3. Common locations
|
|
107
|
+
for (const candidate of ["/Applications/Xcode.app/Contents/Developer", "/Library/Developer/CommandLineTools"]) {
|
|
108
|
+
if (fs.existsSync(candidate)) {
|
|
109
|
+
return candidate;
|
|
110
|
+
}
|
|
111
|
+
}
|
|
112
|
+
return null;
|
|
113
|
+
}
|
|
114
|
+
|
|
115
|
+
// Build the list of extra toolchain bin directories to check on macOS.
|
|
116
|
+
// Computed lazily once from the resolved developer directory.
|
|
117
|
+
let macosToolPaths: Map<string, string> | undefined;
|
|
118
|
+
function getMacosToolPaths(): Map<string, string> {
|
|
119
|
+
if (macosToolPaths) return macosToolPaths;
|
|
120
|
+
const paths: string[] = [
|
|
121
|
+
// Always check Command Line Tools (may be independent of Xcode)
|
|
122
|
+
"/Library/Developer/CommandLineTools/usr/bin",
|
|
123
|
+
];
|
|
124
|
+
const devDir = getDeveloperDirs();
|
|
125
|
+
if (devDir) {
|
|
126
|
+
paths.push(path.join(devDir, "usr/bin"), path.join(devDir, "Toolchains/XcodeDefault.xctoolchain/usr/bin"));
|
|
127
|
+
}
|
|
128
|
+
|
|
129
|
+
// Deduplicate (e.g. devDir may already be CommandLineTools)
|
|
130
|
+
macosToolPaths = new Map<string, string>();
|
|
131
|
+
for (const dir of Array.from(new Set(paths))) {
|
|
132
|
+
try {
|
|
133
|
+
for (const entry of fs.readdirSync(dir, { withFileTypes: true })) {
|
|
134
|
+
if (entry.isFile() || entry.isSymbolicLink()) {
|
|
135
|
+
if (macosToolPaths.has(entry.name)) {
|
|
136
|
+
continue;
|
|
137
|
+
}
|
|
138
|
+
macosToolPaths.set(entry.name, path.join(dir, entry.name));
|
|
139
|
+
}
|
|
140
|
+
}
|
|
141
|
+
} catch {
|
|
142
|
+
// dir doesn't exist or isn't readable
|
|
143
|
+
}
|
|
144
|
+
}
|
|
145
|
+
return macosToolPaths;
|
|
146
|
+
}
|
|
147
|
+
|
|
148
|
+
// Map: cache key -> resolved binary path or null (not found)
|
|
149
|
+
const toolCache = new Map<string | bigint, string | null>();
|
|
150
|
+
|
|
151
|
+
/**
|
|
152
|
+
* Cache policy for which lookups.
|
|
153
|
+
*/
|
|
154
|
+
export const enum WhichCachePolicy {
|
|
155
|
+
/**
|
|
156
|
+
* Use cached result if available.
|
|
157
|
+
*/
|
|
158
|
+
Cached = 0,
|
|
159
|
+
/**
|
|
160
|
+
* Bypass cache and perform a new lookup.
|
|
161
|
+
*/
|
|
162
|
+
Bypass,
|
|
163
|
+
/**
|
|
164
|
+
* Always update cache.
|
|
165
|
+
*/
|
|
166
|
+
Fresh,
|
|
167
|
+
/**
|
|
168
|
+
* Read-only, serves from cache if present, but doesn't write.
|
|
169
|
+
*/
|
|
170
|
+
ReadOnly,
|
|
171
|
+
}
|
|
172
|
+
|
|
173
|
+
// Extension: additional cache policy for tool path lookup
|
|
174
|
+
export interface WhichOptions extends Bun.WhichOptions {
|
|
175
|
+
/**
|
|
176
|
+
* Cache policy for the lookup.
|
|
177
|
+
* Defaults to `WhichCachePolicy.Fresh`.
|
|
178
|
+
*/
|
|
179
|
+
cache?: WhichCachePolicy;
|
|
180
|
+
}
|
|
181
|
+
|
|
182
|
+
// Darwin-specific "which" shim: consult Xcode/CLT toolchain directories after $PATH.
|
|
183
|
+
// Uses cached directory listings instead of per-command existsSync or xcrun subprocesses.
|
|
184
|
+
function darwinWhich(command: string, _options?: Bun.WhichOptions): string | null {
|
|
185
|
+
const regular = Bun.which(command);
|
|
186
|
+
if (regular) return regular;
|
|
187
|
+
if (isXcodeBin(command)) {
|
|
188
|
+
return getMacosToolPaths().get(command) ?? null;
|
|
189
|
+
}
|
|
190
|
+
return null;
|
|
191
|
+
}
|
|
192
|
+
|
|
193
|
+
// Which function that incorporates Darwin Xcode logic if platform reports as 'darwin'
|
|
194
|
+
export const whichFresh = os.platform() === "darwin" ? darwinWhich : Bun.which;
|
|
195
|
+
|
|
196
|
+
// Derive stable cache key from command and lookup options
|
|
197
|
+
function cacheKey(command: string, options?: Bun.WhichOptions): string | bigint {
|
|
198
|
+
if (!options) return command;
|
|
199
|
+
if (!options.cwd && !options.PATH) return command;
|
|
200
|
+
let h = Bun.hash.xxHash64(command);
|
|
201
|
+
if (options.cwd) h = Bun.hash.xxHash64(options.cwd, h);
|
|
202
|
+
if (options.PATH) h = Bun.hash.xxHash64(options.PATH, h);
|
|
203
|
+
return h;
|
|
204
|
+
}
|
|
205
|
+
|
|
206
|
+
/**
|
|
207
|
+
* Locate binary on PATH (with flexible caching).
|
|
208
|
+
*
|
|
209
|
+
* @param command - Binary name to resolve
|
|
210
|
+
* @param options - Bun.WhichOptions plus `cache` control
|
|
211
|
+
* @returns Filesystem path if found, else null
|
|
212
|
+
*/
|
|
213
|
+
export function $which(command: string, options?: WhichOptions): string | null {
|
|
214
|
+
const cachePolicy = options?.cache ?? WhichCachePolicy.Cached;
|
|
215
|
+
let key: string | bigint | undefined;
|
|
216
|
+
|
|
217
|
+
if (cachePolicy !== WhichCachePolicy.Bypass) {
|
|
218
|
+
key = cacheKey(command, options);
|
|
219
|
+
if (cachePolicy !== WhichCachePolicy.Fresh) {
|
|
220
|
+
const cached = toolCache.get(key);
|
|
221
|
+
if (cached !== undefined) return cached;
|
|
222
|
+
}
|
|
223
|
+
}
|
|
224
|
+
|
|
225
|
+
const result = whichFresh(command, options);
|
|
226
|
+
if (key != null && cachePolicy !== WhichCachePolicy.ReadOnly) {
|
|
227
|
+
toolCache.set(key, result);
|
|
228
|
+
}
|
|
229
|
+
return result;
|
|
230
|
+
}
|