@cowsea2012/distill 0.1.31 → 0.1.33

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (3) hide show
  1. package/bin/distill.js +14 -168
  2. package/dist/cli.js +1138 -0
  3. package/package.json +3 -11
package/bin/distill.js CHANGED
@@ -1,179 +1,25 @@
1
1
  #!/usr/bin/env node
2
2
 
3
- const { spawn } = require("node:child_process");
3
+ const fs = require("node:fs");
4
4
  const path = require("node:path");
5
- const { createRequire } = require("node:module");
6
5
 
7
- const requireFromHere = createRequire(__filename);
8
6
  const cliPackage = require(path.join(__dirname, "..", "package.json"));
7
+ const entryCandidates = [
8
+ path.join(__dirname, "..", "dist", "cli.js"),
9
+ path.resolve(__dirname, "..", "..", "..", ".dist", "npm", "cli.js")
10
+ ];
9
11
 
10
- function getPlatformPackageName(target) {
11
- const optionalDependencies = Object.keys(cliPackage.optionalDependencies ?? {});
12
- return optionalDependencies.find((packageName) => packageName.endsWith(`-${target}`));
13
- }
14
-
15
- function resolveBinaryPath() {
16
- const target = `${process.platform}-${process.arch}`;
17
- const packageName = getPlatformPackageName(target);
18
-
19
- if (!packageName) {
20
- console.error(
21
- `[distill] Unsupported platform: ${process.platform}/${process.arch}.`
22
- );
23
- process.exit(1);
12
+ function resolveEntryPath() {
13
+ for (const candidate of entryCandidates) {
14
+ if (fs.existsSync(candidate)) {
15
+ return candidate;
16
+ }
24
17
  }
25
18
 
26
- try {
27
- const packageJsonPath = requireFromHere.resolve(`${packageName}/package.json`);
28
- return path.join(path.dirname(packageJsonPath), "bin", "distill");
29
- } catch (error) {
30
- console.error(
31
- `[distill] Missing platform package ${packageName}. Reinstall ${cliPackage.name} for this platform.`
32
- );
33
- process.exit(1);
34
- }
35
- }
36
-
37
- const PROGRESS_PREFIX = "__DISTILL_PROGRESS__:";
38
- const PROGRESS_FRAMES = ["-", "\\", "|", "/"];
39
- const PROGRESS_DOT_FRAMES = ["", ".", "..", "...", "..", "."];
40
- const PROGRESS_LABELS = {
41
- collecting: "distill: waiting",
42
- summarizing: "distill: summarizing"
43
- };
44
-
45
- const binPath = resolveBinaryPath();
46
- const progressWriter = process.stderr.isTTY ? process.stderr : process.stdout.isTTY ? process.stdout : null;
47
- let progressPhase = "collecting";
48
- let progressFrame = 0;
49
- let progressTimer = null;
50
- let progressVisible = false;
51
- let childStderrBuffer = "";
52
-
53
- function renderProgress() {
54
- if (!progressWriter) {
55
- return;
56
- }
57
-
58
- const frame = PROGRESS_FRAMES[progressFrame % PROGRESS_FRAMES.length];
59
- const dots =
60
- PROGRESS_DOT_FRAMES[
61
- Math.floor(progressFrame / PROGRESS_FRAMES.length) % PROGRESS_DOT_FRAMES.length
62
- ];
63
- progressFrame += 1;
64
- progressWriter.write(
65
- `\r\u001b[2K${frame} ${PROGRESS_LABELS[progressPhase] || PROGRESS_LABELS.collecting}${dots}`
19
+ console.error(
20
+ `[distill] Missing bundled CLI entrypoint. Rebuild or reinstall ${cliPackage.name}.`
66
21
  );
67
- progressVisible = true;
68
- }
69
-
70
- function startProgress() {
71
- if (!progressWriter || progressTimer) {
72
- return;
73
- }
74
-
75
- renderProgress();
76
- progressTimer = setInterval(renderProgress, 120);
77
- }
78
-
79
- function stopProgress() {
80
- if (progressTimer) {
81
- clearInterval(progressTimer);
82
- progressTimer = null;
83
- }
84
-
85
- if (progressVisible && progressWriter) {
86
- progressWriter.write("\r\u001b[2K");
87
- progressVisible = false;
88
- }
89
- }
90
-
91
- function handleChildStderrLine(line) {
92
- if (!line) {
93
- return;
94
- }
95
-
96
- if (!line.startsWith(PROGRESS_PREFIX)) {
97
- stopProgress();
98
- process.stderr.write(`${line}\n`);
99
- return;
100
- }
101
-
102
- if (line === `${PROGRESS_PREFIX}stop`) {
103
- stopProgress();
104
- return;
105
- }
106
-
107
- if (line.startsWith(`${PROGRESS_PREFIX}phase:`)) {
108
- progressPhase = line.slice(`${PROGRESS_PREFIX}phase:`.length) || "collecting";
109
- progressFrame = 0;
110
- renderProgress();
111
- }
112
- }
113
-
114
- function flushChildStderr(force = false) {
115
- if (!force && !childStderrBuffer.includes("\n")) {
116
- return;
117
- }
118
-
119
- const parts = childStderrBuffer.split("\n");
120
- childStderrBuffer = force ? "" : parts.pop() || "";
121
-
122
- for (const line of parts) {
123
- handleChildStderrLine(line);
124
- }
125
-
126
- if (force && childStderrBuffer) {
127
- handleChildStderrLine(childStderrBuffer);
128
- childStderrBuffer = "";
129
- }
130
- }
131
-
132
- const child = spawn(binPath, process.argv.slice(2), {
133
- stdio: ["inherit", "pipe", "pipe"],
134
- env: {
135
- ...process.env,
136
- DISTILL_PROGRESS_PROTOCOL: "stderr"
137
- }
138
- });
139
-
140
- startProgress();
141
-
142
- child.stdout.on("data", (chunk) => {
143
- stopProgress();
144
- process.stdout.write(chunk);
145
- });
146
-
147
- child.stderr.on("data", (chunk) => {
148
- childStderrBuffer += chunk.toString("utf8");
149
- flushChildStderr();
150
- });
151
-
152
- const forwardSignal = (signal) => {
153
- if (!child.killed) {
154
- child.kill(signal);
155
- }
156
- };
157
-
158
- ["SIGINT", "SIGTERM", "SIGHUP"].forEach((signal) => {
159
- process.on(signal, () => forwardSignal(signal));
160
- });
161
-
162
- child.on("error", (error) => {
163
- stopProgress();
164
- console.error(`[distill] Failed to launch native binary: ${error.message}`);
165
22
  process.exit(1);
166
- });
167
-
168
- child.on("exit", (code, signal) => {
169
- flushChildStderr(true);
170
- stopProgress();
171
-
172
- if (signal) {
173
- process.removeAllListeners(signal);
174
- process.kill(process.pid, signal);
175
- return;
176
- }
23
+ }
177
24
 
178
- process.exit(code ?? 1);
179
- });
25
+ require(resolveEntryPath());
package/dist/cli.js ADDED
@@ -0,0 +1,1138 @@
1
+ var __create = Object.create;
2
+ var __getProtoOf = Object.getPrototypeOf;
3
+ var __defProp = Object.defineProperty;
4
+ var __getOwnPropNames = Object.getOwnPropertyNames;
5
+ var __hasOwnProp = Object.prototype.hasOwnProperty;
6
+ function __accessProp(key) {
7
+ return this[key];
8
+ }
9
+ var __toESMCache_node;
10
+ var __toESMCache_esm;
11
+ var __toESM = (mod, isNodeMode, target) => {
12
+ var canCache = mod != null && typeof mod === "object";
13
+ if (canCache) {
14
+ var cache = isNodeMode ? __toESMCache_node ??= new WeakMap : __toESMCache_esm ??= new WeakMap;
15
+ var cached = cache.get(mod);
16
+ if (cached)
17
+ return cached;
18
+ }
19
+ target = mod != null ? __create(__getProtoOf(mod)) : {};
20
+ const to = isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target;
21
+ for (let key of __getOwnPropNames(mod))
22
+ if (!__hasOwnProp.call(to, key))
23
+ __defProp(to, key, {
24
+ get: __accessProp.bind(mod, key),
25
+ enumerable: true
26
+ });
27
+ if (canCache)
28
+ cache.set(mod, to);
29
+ return to;
30
+ };
31
+ // packages/cli/package.json
32
+ var package_default = {
33
+ name: "@cowsea2012/distill",
34
+ version: "0.1.33",
35
+ description: "Compress command output for downstream LLMs.",
36
+ license: "MIT",
37
+ bin: {
38
+ distill: "bin/distill.js"
39
+ },
40
+ files: [
41
+ "bin/distill.js",
42
+ "dist/cli.js",
43
+ "README.md"
44
+ ],
45
+ engines: {
46
+ node: ">=18"
47
+ },
48
+ publishConfig: {
49
+ access: "public"
50
+ }
51
+ };
52
+
53
+ // src/omlx-config.ts
54
+ var import_node_fs = require("node:fs");
55
+ var import_node_path = __toESM(require("node:path"));
56
+ var DEFAULT_OMLX_BASE_URL = "http://127.0.0.1:8000/v1";
57
+ var DEFAULT_OMLX_MODEL = "auto";
58
+ function trimString(value) {
59
+ if (typeof value !== "string") {
60
+ return;
61
+ }
62
+ const trimmed = value.trim();
63
+ return trimmed ? trimmed : undefined;
64
+ }
65
+ function resolveOmlxHome(env) {
66
+ const explicit = trimString(env.OMLX_HOME);
67
+ if (explicit) {
68
+ return explicit;
69
+ }
70
+ const home = trimString(env.HOME);
71
+ return home ? import_node_path.default.join(home, ".omlx") : undefined;
72
+ }
73
+ function resolveSettingsPath(env) {
74
+ const explicit = trimString(env.OMLX_SETTINGS_PATH);
75
+ if (explicit) {
76
+ return explicit;
77
+ }
78
+ const omlxHome = resolveOmlxHome(env);
79
+ return omlxHome ? import_node_path.default.join(omlxHome, "settings.json") : undefined;
80
+ }
81
+ function readSettings(env) {
82
+ const settingsPath = resolveSettingsPath(env);
83
+ if (!settingsPath || !import_node_fs.existsSync(settingsPath)) {
84
+ return;
85
+ }
86
+ try {
87
+ const raw = import_node_fs.readFileSync(settingsPath, "utf8");
88
+ const parsed = JSON.parse(raw);
89
+ return parsed && typeof parsed === "object" ? parsed : undefined;
90
+ } catch {
91
+ return;
92
+ }
93
+ }
94
+ function visibleSubdirectories(dirPath) {
95
+ try {
96
+ return import_node_fs.readdirSync(dirPath, { withFileTypes: true }).filter((entry) => !entry.name.startsWith(".") && entry.isDirectory()).map((entry) => entry.name).sort((left, right) => left.localeCompare(right));
97
+ } catch {
98
+ return [];
99
+ }
100
+ }
101
+ function directoryLooksLikeOrganizationFolder(dirPath) {
102
+ try {
103
+ const entries = import_node_fs.readdirSync(dirPath, { withFileTypes: true }).filter((entry) => !entry.name.startsWith("."));
104
+ return entries.length > 0 && entries.every((entry) => entry.isDirectory());
105
+ } catch {
106
+ return false;
107
+ }
108
+ }
109
+ function findDefaultModelName(modelDirs) {
110
+ for (const modelDir of modelDirs) {
111
+ const topLevel = visibleSubdirectories(modelDir);
112
+ for (const candidate of topLevel) {
113
+ const candidatePath = import_node_path.default.join(modelDir, candidate);
114
+ if (directoryLooksLikeOrganizationFolder(candidatePath)) {
115
+ const nested = visibleSubdirectories(candidatePath)[0];
116
+ if (nested) {
117
+ return nested;
118
+ }
119
+ }
120
+ return candidate;
121
+ }
122
+ }
123
+ return;
124
+ }
125
+ function resolveModelDirs(env, settings) {
126
+ const envDir = trimString(env.OMLX_MODEL_DIR);
127
+ if (envDir) {
128
+ return [envDir];
129
+ }
130
+ const configuredDir = trimString(settings?.model?.model_dir);
131
+ if (configuredDir) {
132
+ return [configuredDir];
133
+ }
134
+ const configuredDirs = (settings?.model?.model_dirs ?? []).map((entry) => trimString(entry)).filter((entry) => Boolean(entry));
135
+ if (configuredDirs.length > 0) {
136
+ return configuredDirs;
137
+ }
138
+ const omlxHome = resolveOmlxHome(env);
139
+ return omlxHome ? [import_node_path.default.join(omlxHome, "models")] : [];
140
+ }
141
+ function resolveOmlxDefaults(env) {
142
+ const settings = readSettings(env);
143
+ const host = trimString(settings?.server?.host);
144
+ const port = typeof settings?.server?.port === "number" && Number.isFinite(settings.server.port) ? settings.server.port : undefined;
145
+ const model = findDefaultModelName(resolveModelDirs(env, settings));
146
+ return {
147
+ apiKey: trimString(env.OMLX_API_KEY) ?? trimString(settings?.auth?.api_key),
148
+ baseUrl: host && port ? `http://${host}:${port}/v1` : undefined,
149
+ model
150
+ };
151
+ }
152
+
153
+ // src/config.ts
154
+ var DISTILL_VERSION = package_default.version;
155
+ var DEFAULT_PROVIDER = "omlx";
156
+ var DEFAULT_OLLAMA_MODEL = "qwen3.5:2b";
157
+ var DEFAULT_OPENAI_MODEL = "gpt-4.1-mini";
158
+ var DEFAULT_MODEL = DEFAULT_OMLX_MODEL;
159
+ var DEFAULT_OLLAMA_HOST = "http://127.0.0.1:11434";
160
+ var DEFAULT_HOST = DEFAULT_OMLX_BASE_URL;
161
+ var DEFAULT_OPENAI_BASE_URL = "https://api.openai.com/v1";
162
+ var DEFAULT_TIMEOUT_MS = 90000;
163
+ var DEFAULT_IDLE_MS = 1200;
164
+ var DEFAULT_INTERACTIVE_GAP_MS = 180;
165
+ var DEFAULT_PROGRESS_FRAME_MS = 120;
166
+
167
+ class UsageError extends Error {
168
+ exitCode = 2;
169
+ constructor(message) {
170
+ super(message);
171
+ this.name = "UsageError";
172
+ }
173
+ }
174
+ function readFlagValue(argv, index, name) {
175
+ const current = argv[index];
176
+ const inline = current.slice(name.length + 1);
177
+ if (inline.length > 0) {
178
+ return { value: inline, nextIndex: index };
179
+ }
180
+ const next = argv[index + 1];
181
+ if (!next) {
182
+ throw new UsageError(`Missing value for ${name}.`);
183
+ }
184
+ return { value: next, nextIndex: index + 1 };
185
+ }
186
+ function coerceTimeout(input) {
187
+ const value = Number(input ?? DEFAULT_TIMEOUT_MS);
188
+ if (!Number.isFinite(value) || value <= 0) {
189
+ throw new UsageError("Timeout must be a positive number.");
190
+ }
191
+ return Math.floor(value);
192
+ }
193
+ function parseBoolean(input, name) {
194
+ const value = input.trim().toLowerCase();
195
+ switch (value) {
196
+ case "true":
197
+ case "1":
198
+ case "yes":
199
+ case "on":
200
+ return true;
201
+ case "false":
202
+ case "0":
203
+ case "no":
204
+ case "off":
205
+ return false;
206
+ default:
207
+ throw new UsageError(`${name} must be true or false.`);
208
+ }
209
+ }
210
+ function normalizeHost(input) {
211
+ const value = (input ?? DEFAULT_HOST).trim();
212
+ if (!value) {
213
+ throw new UsageError("Host cannot be empty.");
214
+ }
215
+ return value.endsWith("/") ? value.slice(0, -1) : value;
216
+ }
217
+ function parseProvider(input) {
218
+ const value = input.trim().toLowerCase();
219
+ if (value === "ollama" || value === "openai" || value === "omlx") {
220
+ return value;
221
+ }
222
+ throw new UsageError(`Provider must be "ollama", "openai", or "omlx".`);
223
+ }
224
+ function resolveRuntimeDefaults(env, persisted) {
225
+ const omlxDefaults = resolveOmlxDefaults(env);
226
+ const provider = parseProvider(env.DISTILL_PROVIDER ?? persisted.provider ?? DEFAULT_PROVIDER);
227
+ const model = env.DISTILL_MODEL ?? persisted.model ?? (provider === "openai" ? DEFAULT_OPENAI_MODEL : provider === "ollama" ? DEFAULT_OLLAMA_MODEL : omlxDefaults.model ?? DEFAULT_MODEL);
228
+ const host = normalizeHost(provider === "openai" ? env.OPENAI_BASE_URL ?? persisted.host ?? DEFAULT_OPENAI_BASE_URL : provider === "ollama" ? env.OLLAMA_HOST ?? persisted.host ?? DEFAULT_OLLAMA_HOST : env.OMLX_BASE_URL ?? persisted.host ?? omlxDefaults.baseUrl ?? DEFAULT_HOST);
229
+ const apiKey = provider === "openai" ? env.OPENAI_API_KEY ?? persisted.apiKey ?? "" : provider === "omlx" ? env.OMLX_API_KEY ?? persisted.apiKey ?? omlxDefaults.apiKey ?? "" : "";
230
+ const timeoutMs = coerceTimeout(env.DISTILL_TIMEOUT_MS ?? String(persisted.timeoutMs ?? DEFAULT_TIMEOUT_MS));
231
+ const thinking = parseBoolean(env.DISTILL_THINKING ?? String(persisted.thinking ?? false), "Thinking");
232
+ return {
233
+ provider,
234
+ model,
235
+ host,
236
+ apiKey,
237
+ timeoutMs,
238
+ thinking
239
+ };
240
+ }
241
+ function parseConfigCommand(argv) {
242
+ if (argv.length === 1) {
243
+ return { kind: "configShow" };
244
+ }
245
+ const key = argv[1];
246
+ if (!["provider", "model", "host", "api-key", "timeout-ms", "thinking"].includes(key)) {
247
+ throw new UsageError(`Unknown config key: ${argv[1]}`);
248
+ }
249
+ if (argv.length === 2) {
250
+ return { kind: "configGet", key };
251
+ }
252
+ const rawValue = argv.slice(2).join(" ").trim();
253
+ if (!rawValue) {
254
+ throw new UsageError(`Missing value for config key ${key}.`);
255
+ }
256
+ if (key === "provider") {
257
+ return {
258
+ kind: "configSet",
259
+ key,
260
+ value: parseProvider(rawValue)
261
+ };
262
+ }
263
+ if (key === "thinking") {
264
+ return {
265
+ kind: "configSet",
266
+ key,
267
+ value: parseBoolean(rawValue, "Thinking")
268
+ };
269
+ }
270
+ if (key === "timeout-ms") {
271
+ return {
272
+ kind: "configSet",
273
+ key,
274
+ value: coerceTimeout(rawValue)
275
+ };
276
+ }
277
+ if (key === "host") {
278
+ return {
279
+ kind: "configSet",
280
+ key,
281
+ value: normalizeHost(rawValue)
282
+ };
283
+ }
284
+ return {
285
+ kind: "configSet",
286
+ key,
287
+ value: rawValue
288
+ };
289
+ }
290
+ function parseCommand(argv, env, persisted = {}) {
291
+ if (argv[0] === "config") {
292
+ return parseConfigCommand(argv);
293
+ }
294
+ if (argv.length === 1 && (argv[0] === "--help" || argv[0] === "-h")) {
295
+ return { kind: "help" };
296
+ }
297
+ if (argv.length === 1 && (argv[0] === "--version" || argv[0] === "-v")) {
298
+ return { kind: "version" };
299
+ }
300
+ const defaults = resolveRuntimeDefaults(env, persisted);
301
+ let provider = defaults.provider;
302
+ let model = defaults.model;
303
+ let host = defaults.host;
304
+ let apiKey = defaults.apiKey;
305
+ let timeoutMs = defaults.timeoutMs;
306
+ let thinking = defaults.thinking;
307
+ const questionParts = [];
308
+ for (let index = 0;index < argv.length; index += 1) {
309
+ const token = argv[index];
310
+ if (token === "--") {
311
+ questionParts.push(...argv.slice(index + 1));
312
+ break;
313
+ }
314
+ if (token === "--provider" || token.startsWith("--provider=")) {
315
+ const parsed = readFlagValue(argv, index, "--provider");
316
+ provider = parseProvider(parsed.value);
317
+ index = parsed.nextIndex;
318
+ continue;
319
+ }
320
+ if (token === "--model" || token.startsWith("--model=")) {
321
+ const parsed = readFlagValue(argv, index, "--model");
322
+ model = parsed.value;
323
+ index = parsed.nextIndex;
324
+ continue;
325
+ }
326
+ if (token === "--host" || token.startsWith("--host=")) {
327
+ const parsed = readFlagValue(argv, index, "--host");
328
+ host = parsed.value;
329
+ index = parsed.nextIndex;
330
+ continue;
331
+ }
332
+ if (token === "--api-key" || token.startsWith("--api-key=")) {
333
+ const parsed = readFlagValue(argv, index, "--api-key");
334
+ apiKey = parsed.value;
335
+ index = parsed.nextIndex;
336
+ continue;
337
+ }
338
+ if (token === "--timeout-ms" || token.startsWith("--timeout-ms=")) {
339
+ const parsed = readFlagValue(argv, index, "--timeout-ms");
340
+ timeoutMs = coerceTimeout(parsed.value);
341
+ index = parsed.nextIndex;
342
+ continue;
343
+ }
344
+ if (token === "--thinking" || token.startsWith("--thinking=")) {
345
+ const parsed = readFlagValue(argv, index, "--thinking");
346
+ thinking = parseBoolean(parsed.value, "Thinking");
347
+ index = parsed.nextIndex;
348
+ continue;
349
+ }
350
+ if (token.startsWith("-")) {
351
+ throw new UsageError(`Unknown flag: ${token}`);
352
+ }
353
+ questionParts.push(token);
354
+ }
355
+ const question = questionParts.join(" ").trim();
356
+ if (!question) {
357
+ throw new UsageError("A question is required.");
358
+ }
359
+ if (provider === "openai" && !apiKey) {
360
+ throw new UsageError("An API key is required for the openai provider. Set OPENAI_API_KEY or use --api-key.");
361
+ }
362
+ return {
363
+ kind: "run",
364
+ config: {
365
+ question,
366
+ provider,
367
+ model,
368
+ host: normalizeHost(host),
369
+ apiKey,
370
+ timeoutMs,
371
+ thinking
372
+ }
373
+ };
374
+ }
375
+ function formatUsage() {
376
+ return [
377
+ "Usage:",
378
+ ' cmd 2>&1 | distill "question"',
379
+ ' distill config model "Qwen3.5-2B"',
380
+ " distill config thinking false",
381
+ " distill config provider omlx",
382
+ "",
383
+ "Options:",
384
+ ` --provider <name> LLM provider: ollama, openai, or omlx (default: ${DEFAULT_PROVIDER})`,
385
+ ` --model <name> Model name (default: ${DEFAULT_MODEL}; omlx auto-detects local models)`,
386
+ " --host <url> API base URL (provider-specific; omlx defaults to local settings)",
387
+ " --api-key <key> API key for openai or omlx (env: OPENAI_API_KEY / OMLX_API_KEY)",
388
+ ` --timeout-ms <ms> Request timeout in milliseconds (default: ${DEFAULT_TIMEOUT_MS})`,
389
+ " --thinking <bool> Enable or disable model thinking (default: false)",
390
+ " --help Show usage",
391
+ " --version Show version",
392
+ "",
393
+ "omlx defaults:",
394
+ " Reads ~/.omlx/settings.json when available for host and api key."
395
+ ].join(`
396
+ `);
397
+ }
398
+
399
+ // src/ollama.ts
400
+ async function requestOllama({
401
+ host,
402
+ model,
403
+ prompt,
404
+ timeoutMs,
405
+ thinking,
406
+ fetchImpl = fetch
407
+ }) {
408
+ const controller = new AbortController;
409
+ const timeout = setTimeout(() => controller.abort(), timeoutMs);
410
+ try {
411
+ const url = new URL("/api/generate", `${host}/`);
412
+ const response = await fetchImpl(url, {
413
+ method: "POST",
414
+ headers: {
415
+ "content-type": "application/json"
416
+ },
417
+ body: JSON.stringify({
418
+ model,
419
+ prompt,
420
+ stream: false,
421
+ think: thinking,
422
+ options: {
423
+ temperature: 0.1,
424
+ num_predict: 80
425
+ }
426
+ }),
427
+ signal: controller.signal
428
+ });
429
+ if (!response.ok) {
430
+ throw new Error(`Ollama request failed with ${response.status}.`);
431
+ }
432
+ const rawText = await response.text();
433
+ let payload;
434
+ try {
435
+ payload = JSON.parse(rawText);
436
+ } catch {
437
+ throw new Error("Ollama returned invalid JSON.");
438
+ }
439
+ if (typeof payload !== "object" || payload === null || typeof payload.response !== "string") {
440
+ throw new Error("Ollama returned an invalid response payload.");
441
+ }
442
+ const output = payload.response.trim();
443
+ if (!output) {
444
+ throw new Error("Ollama returned an empty response.");
445
+ }
446
+ return output;
447
+ } finally {
448
+ clearTimeout(timeout);
449
+ }
450
+ }
451
+
452
+ // src/openai.ts
453
+ function buildAuthHeaders(apiKey) {
454
+ const key = apiKey.trim();
455
+ return key ? { authorization: `Bearer ${key}` } : {};
456
+ }
457
+ async function resolveModelName(baseUrl, apiKey, fetchImpl, signal, requestedModel) {
458
+ const model = requestedModel.trim();
459
+ if (model && model !== "auto") {
460
+ return model;
461
+ }
462
+ const response = await fetchImpl(new URL("/v1/models", `${baseUrl}/`), {
463
+ method: "GET",
464
+ headers: buildAuthHeaders(apiKey),
465
+ signal
466
+ });
467
+ if (!response.ok) {
468
+ throw new Error(`OpenAI-compatible model discovery failed with ${response.status}.`);
469
+ }
470
+ const rawText = await response.text();
471
+ let payload;
472
+ try {
473
+ payload = JSON.parse(rawText);
474
+ } catch {
475
+ throw new Error("OpenAI-compatible model discovery returned invalid JSON.");
476
+ }
477
+ const models = Array.isArray(payload.data) ? payload.data.map((entry) => typeof entry.id === "string" ? entry.id.trim() : "").filter(Boolean) : [];
478
+ if (models.length === 0) {
479
+ throw new Error("OpenAI-compatible model discovery returned no models.");
480
+ }
481
+ return models[0];
482
+ }
483
+ async function requestOpenAI({
484
+ baseUrl,
485
+ apiKey,
486
+ model,
487
+ prompt,
488
+ timeoutMs,
489
+ fetchImpl = fetch
490
+ }) {
491
+ const controller = new AbortController;
492
+ const timeout = setTimeout(() => controller.abort(), timeoutMs);
493
+ try {
494
+ const resolvedModel = await resolveModelName(baseUrl, apiKey, fetchImpl, controller.signal, model);
495
+ const url = new URL("/v1/chat/completions", `${baseUrl}/`);
496
+ const response = await fetchImpl(url, {
497
+ method: "POST",
498
+ headers: {
499
+ "content-type": "application/json",
500
+ ...buildAuthHeaders(apiKey)
501
+ },
502
+ body: JSON.stringify({
503
+ model: resolvedModel,
504
+ messages: [{ role: "user", content: prompt }],
505
+ temperature: 0.1,
506
+ max_tokens: 200
507
+ }),
508
+ signal: controller.signal
509
+ });
510
+ if (!response.ok) {
511
+ throw new Error(`OpenAI request failed with ${response.status}.`);
512
+ }
513
+ const rawText = await response.text();
514
+ let payload;
515
+ try {
516
+ payload = JSON.parse(rawText);
517
+ } catch {
518
+ throw new Error("OpenAI returned invalid JSON.");
519
+ }
520
+ if (typeof payload !== "object" || payload === null || !Array.isArray(payload.choices) || payload.choices.length === 0) {
521
+ throw new Error("OpenAI returned an invalid response payload.");
522
+ }
523
+ const choice = payload.choices[0];
524
+ const content = choice?.message?.content?.trim();
525
+ if (!content) {
526
+ throw new Error("OpenAI returned an empty response.");
527
+ }
528
+ return content;
529
+ } finally {
530
+ clearTimeout(timeout);
531
+ }
532
+ }
533
+
534
+ // src/prompt.ts
535
+ function buildBatchPrompt(question, input) {
536
+ return [
537
+ "You compress command output for another paid language model.",
538
+ "Rules:",
539
+ "- Answer only what the question asks.",
540
+ "- Use the same language as the question.",
541
+ "- No markdown.",
542
+ "- Keep the answer extremely short (but complete) unless explicitly asked to elaborate or not summarize.",
543
+ "- Prefer one sentence. Never exceed three short lines.",
544
+ "- Never ask for more input.",
545
+ '- If the command output is insufficient, reply only with "distill: Insufficient information to output anything." in the same language as the question.',
546
+ "- If the source is already shorter than your answer would be, prefer a minimal answer or reuse the source wording.",
547
+ "",
548
+ `Question: ${question}`,
549
+ "",
550
+ "Command output:",
551
+ input
552
+ ].join(`
553
+ `);
554
+ }
555
+ function buildWatchPrompt(question, previousCycle, currentCycle) {
556
+ return [
557
+ "You compare two consecutive watch-mode command cycles for another paid language model.",
558
+ "Rules:",
559
+ "- Answer only what the question asks.",
560
+ "- Focus on what changed from the previous cycle to the current cycle.",
561
+ "- Use the same language as the question.",
562
+ "- No markdown.",
563
+ "- Keep the answer extremely short (but complete) unless explicitly asked to elaborate or not summarize.",
564
+ "- Prefer one sentence. Never exceed three short lines.",
565
+ '- If nothing relevant changed, reply only with "No relevant change." in the same language as the question.',
566
+ "- Never ask for more input.",
567
+ "",
568
+ `Question: ${question}`,
569
+ "",
570
+ "Previous cycle:",
571
+ previousCycle,
572
+ "",
573
+ "Current cycle:",
574
+ currentCycle
575
+ ].join(`
576
+ `);
577
+ }
578
+
579
+ // src/summarizer.ts
580
+ function requestLLM(config, prompt, fetchImpl) {
581
+ if (config.provider === "openai" || config.provider === "omlx") {
582
+ return requestOpenAI({
583
+ baseUrl: config.host,
584
+ apiKey: config.apiKey,
585
+ model: config.model,
586
+ prompt,
587
+ timeoutMs: config.timeoutMs,
588
+ fetchImpl
589
+ });
590
+ }
591
+ return requestOllama({
592
+ host: config.host,
593
+ model: config.model,
594
+ prompt,
595
+ timeoutMs: config.timeoutMs,
596
+ thinking: config.thinking,
597
+ fetchImpl
598
+ });
599
+ }
600
+ function createSummarizer(config, fetchImpl) {
601
+ return {
602
+ summarizeBatch(input) {
603
+ return requestLLM(config, buildBatchPrompt(config.question, input), fetchImpl);
604
+ },
605
+ summarizeWatch(previousCycle, currentCycle) {
606
+ return requestLLM(config, buildWatchPrompt(config.question, previousCycle, currentCycle), fetchImpl);
607
+ }
608
+ };
609
+ }
610
+
611
+ // src/text.ts
612
+ var ANSI_PATTERN = /\u001B(?:[@-Z\\-_]|\[[0-?]*[ -/]*[@-~])/g;
613
+ var PROMPT_PATTERN = /(?:\[[Yy]\/[Nn]\]|\[[Nn]\/[Yy]\]|\([Yy]\/[Nn]\)|\([Nn]\/[Yy]\)|password:|passphrase:|continue\?|proceed\?)\s*$/i;
614
+ function normalizeForModel(input) {
615
+ return input.replace(/\r\n/g, `
616
+ `).replace(/\r/g, `
617
+ `).replace(ANSI_PATTERN, "").replace(/[ \t]+\n/g, `
618
+ `).replace(/\n{3,}/g, `
619
+
620
+ `).trim();
621
+ }
622
+ function hasPromptLikeTail(input) {
623
+ const tail = input.slice(-256);
624
+ return PROMPT_PATTERN.test(tail.trimEnd());
625
+ }
626
+ function hasRedrawSignal(input) {
627
+ return input.includes("\r") || input.includes("\x1B[2J") || input.includes("\x1Bc");
628
+ }
629
+ function structuralSignature(input) {
630
+ return normalizeForModel(input).split(`
631
+ `).map((line) => line.toLowerCase().replace(/\b\d+\b/g, "#").replace(/[0-9a-f]{7,}/g, "<hex>").replace(/\s+/g, " ").trim()).filter(Boolean).slice(0, 24);
632
+ }
633
+ function structuralSimilarity(a, b) {
634
+ const left = structuralSignature(a);
635
+ const right = structuralSignature(b);
636
+ if (left.length === 0 || right.length === 0) {
637
+ return 0;
638
+ }
639
+ const leftSet = new Set(left);
640
+ const rightSet = new Set(right);
641
+ let overlap = 0;
642
+ for (const value of leftSet) {
643
+ if (rightSet.has(value)) {
644
+ overlap += 1;
645
+ }
646
+ }
647
+ return 2 * overlap / (leftSet.size + rightSet.size);
648
+ }
649
+ function looksLikeBadDistillation(source, candidate) {
650
+ const normalizedSource = normalizeForModel(source);
651
+ const normalizedCandidate = normalizeForModel(candidate);
652
+ if (!normalizedCandidate) {
653
+ return true;
654
+ }
655
+ const lowerCandidate = normalizedCandidate.toLowerCase();
656
+ if (lowerCandidate.includes("please provide") || lowerCandidate.includes("wish summarized") || lowerCandidate.includes("provided command output")) {
657
+ return true;
658
+ }
659
+ if (normalizedSource.length >= 1024) {
660
+ return normalizedCandidate.length >= normalizedSource.length * 0.8;
661
+ }
662
+ if (normalizedSource.length > 0) {
663
+ return normalizedCandidate === normalizedSource || normalizedCandidate.length > normalizedSource.length + 40;
664
+ }
665
+ return false;
666
+ }
667
+ function ensureTrailingNewline(text) {
668
+ return text.endsWith(`
669
+ `) ? text : `${text}
670
+ `;
671
+ }
672
+
673
+ // src/stream-distiller.ts
674
+ var PROGRESS_FRAMES = ["-", "\\", "|", "/"];
675
+ var PROGRESS_DOT_FRAMES = ["", ".", "..", "...", "..", "."];
676
+ var PROGRESS_LABELS = {
677
+ collecting: "distill: waiting",
678
+ summarizing: "distill: summarizing"
679
+ };
680
+
681
+ class DistillSession {
682
+ summarizer;
683
+ stdout;
684
+ isTTY;
685
+ progress;
686
+ onProgressPhase;
687
+ onProgressStop;
688
+ idleMs;
689
+ interactiveGapMs;
690
+ progressFrameMs;
691
+ rawBuffers = [];
692
+ completedBursts = [];
693
+ currentBurstBuffers = [];
694
+ mode = "undecided";
695
+ progressPhase = "collecting";
696
+ sawRedraw = false;
697
+ idleTimer = null;
698
+ interactiveTimer = null;
699
+ progressTimer = null;
700
+ queue = Promise.resolve();
701
+ nextBurstId = 1;
702
+ renderedPairs = new Set;
703
+ emittedWatchOutput = false;
704
+ passthrough = false;
705
+ progressVisible = false;
706
+ progressFrameIndex = 0;
707
+ lastProgressRenderAt = 0;
708
+ constructor(options) {
709
+ this.summarizer = options.summarizer;
710
+ this.stdout = options.stdout;
711
+ this.isTTY = options.isTTY;
712
+ this.progress = options.progress ?? null;
713
+ this.onProgressPhase = options.onProgressPhase ?? null;
714
+ this.onProgressStop = options.onProgressStop ?? null;
715
+ this.idleMs = options.idleMs ?? DEFAULT_IDLE_MS;
716
+ this.interactiveGapMs = options.interactiveGapMs ?? DEFAULT_INTERACTIVE_GAP_MS;
717
+ this.progressFrameMs = options.progressFrameMs ?? DEFAULT_PROGRESS_FRAME_MS;
718
+ this.onProgressPhase?.(this.progressPhase);
719
+ this.startProgress();
720
+ }
721
+ push(chunk) {
722
+ if (chunk.length === 0) {
723
+ return;
724
+ }
725
+ if (this.passthrough) {
726
+ this.stdout.write(chunk);
727
+ return;
728
+ }
729
+ if (this.mode !== "watch") {
730
+ this.rawBuffers.push(chunk);
731
+ }
732
+ this.currentBurstBuffers.push(chunk);
733
+ this.sawRedraw ||= hasRedrawSignal(chunk.toString("utf8"));
734
+ this.restartIdleTimer();
735
+ this.restartInteractiveTimer();
736
+ this.renderProgressIfDue();
737
+ }
738
+ async end() {
739
+ this.clearTimers();
740
+ if (this.passthrough) {
741
+ this.stopProgress(true);
742
+ return;
743
+ }
744
+ this.closeCurrentBurst();
745
+ if (this.mode === "watch") {
746
+ this.scheduleLatestWatchRender();
747
+ await this.queue;
748
+ return;
749
+ }
750
+ const rawInput = Buffer.concat(this.rawBuffers).toString("utf8");
751
+ if (!rawInput) {
752
+ this.stopProgress(true);
753
+ return;
754
+ }
755
+ try {
756
+ this.setProgressPhase("summarizing");
757
+ const summary = await this.summarizer.summarizeBatch(normalizeForModel(rawInput));
758
+ if (looksLikeBadDistillation(rawInput, summary)) {
759
+ this.stopProgress(true);
760
+ this.stdout.write(Buffer.concat(this.rawBuffers));
761
+ return;
762
+ }
763
+ this.stopProgress(true);
764
+ this.stdout.write(ensureTrailingNewline(summary.trim()));
765
+ } catch {
766
+ this.stopProgress(true);
767
+ this.stdout.write(Buffer.concat(this.rawBuffers));
768
+ }
769
+ }
770
+ restartIdleTimer() {
771
+ if (this.idleTimer) {
772
+ clearTimeout(this.idleTimer);
773
+ }
774
+ this.idleTimer = setTimeout(() => {
775
+ this.closeCurrentBurst();
776
+ if (this.mode === "undecided" && this.shouldPromoteToWatch()) {
777
+ this.promoteToWatch();
778
+ this.scheduleLatestWatchRender();
779
+ }
780
+ }, this.idleMs);
781
+ }
782
+ restartInteractiveTimer() {
783
+ if (this.mode !== "undecided") {
784
+ return;
785
+ }
786
+ if (this.interactiveTimer) {
787
+ clearTimeout(this.interactiveTimer);
788
+ }
789
+ const tail = this.getTail();
790
+ if (!hasPromptLikeTail(tail)) {
791
+ return;
792
+ }
793
+ this.interactiveTimer = setTimeout(() => {
794
+ if (this.mode !== "undecided") {
795
+ return;
796
+ }
797
+ if (!hasPromptLikeTail(this.getTail())) {
798
+ return;
799
+ }
800
+ this.mode = "interactive";
801
+ this.passthrough = true;
802
+ this.clearTimers();
803
+ this.stopProgress(true);
804
+ this.stdout.write(Buffer.concat(this.rawBuffers));
805
+ }, this.interactiveGapMs);
806
+ }
807
+ clearTimers() {
808
+ if (this.idleTimer) {
809
+ clearTimeout(this.idleTimer);
810
+ this.idleTimer = null;
811
+ }
812
+ if (this.interactiveTimer) {
813
+ clearTimeout(this.interactiveTimer);
814
+ this.interactiveTimer = null;
815
+ }
816
+ }
817
+ startProgress() {
818
+ if (!this.progress || this.progressFrameMs <= 0 || this.progressTimer) {
819
+ return;
820
+ }
821
+ this.renderProgress();
822
+ this.progressTimer = setInterval(() => {
823
+ if (this.progressTimer === null || this.mode === "watch" || this.passthrough) {
824
+ return;
825
+ }
826
+ this.renderProgress();
827
+ }, this.progressFrameMs);
828
+ }
829
+ setProgressPhase(phase) {
830
+ if (this.progressPhase === phase) {
831
+ return;
832
+ }
833
+ this.progressPhase = phase;
834
+ this.progressFrameIndex = 0;
835
+ this.onProgressPhase?.(phase);
836
+ this.renderProgress();
837
+ }
838
+ renderProgressIfDue() {
839
+ if (!this.progress || this.mode === "watch" || this.passthrough) {
840
+ return;
841
+ }
842
+ if (Date.now() - this.lastProgressRenderAt < this.progressFrameMs) {
843
+ return;
844
+ }
845
+ this.renderProgress();
846
+ }
847
+ renderProgress() {
848
+ if (!this.progress) {
849
+ return;
850
+ }
851
+ const frame = PROGRESS_FRAMES[this.progressFrameIndex % PROGRESS_FRAMES.length];
852
+ const dots = PROGRESS_DOT_FRAMES[Math.floor(this.progressFrameIndex / PROGRESS_FRAMES.length) % PROGRESS_DOT_FRAMES.length];
853
+ this.progressFrameIndex += 1;
854
+ this.lastProgressRenderAt = Date.now();
855
+ this.progress.write(`\r\x1B[2K${frame} ${PROGRESS_LABELS[this.progressPhase]}${dots}`);
856
+ this.progressVisible = true;
857
+ }
858
+ stopProgress(clearLine = false) {
859
+ if (this.progressTimer) {
860
+ clearInterval(this.progressTimer);
861
+ this.progressTimer = null;
862
+ }
863
+ this.onProgressStop?.();
864
+ if (!clearLine || !this.progressVisible || !this.progress) {
865
+ return;
866
+ }
867
+ this.progress.write("\r\x1B[2K");
868
+ this.progressVisible = false;
869
+ }
870
+ closeCurrentBurst() {
871
+ if (this.currentBurstBuffers.length === 0 || this.passthrough) {
872
+ return;
873
+ }
874
+ const raw = Buffer.concat(this.currentBurstBuffers).toString("utf8");
875
+ this.currentBurstBuffers = [];
876
+ if (!raw) {
877
+ return;
878
+ }
879
+ this.completedBursts.push({
880
+ id: this.nextBurstId,
881
+ raw,
882
+ normalized: normalizeForModel(raw)
883
+ });
884
+ this.nextBurstId += 1;
885
+ }
886
+ shouldPromoteToWatch() {
887
+ if (this.completedBursts.length < 2) {
888
+ return false;
889
+ }
890
+ const previous = this.completedBursts[this.completedBursts.length - 2];
891
+ const current = this.completedBursts[this.completedBursts.length - 1];
892
+ const similarity = structuralSimilarity(previous.raw, current.raw);
893
+ return this.sawRedraw || similarity >= 0.55;
894
+ }
895
+ promoteToWatch() {
896
+ if (this.mode === "watch") {
897
+ return;
898
+ }
899
+ this.mode = "watch";
900
+ this.rawBuffers.length = 0;
901
+ this.clearTimers();
902
+ this.stopProgress(true);
903
+ }
904
+ scheduleLatestWatchRender() {
905
+ if (this.completedBursts.length < 2) {
906
+ return;
907
+ }
908
+ const previous = this.completedBursts[this.completedBursts.length - 2];
909
+ const current = this.completedBursts[this.completedBursts.length - 1];
910
+ const key = `${previous.id}:${current.id}`;
911
+ if (this.renderedPairs.has(key)) {
912
+ return;
913
+ }
914
+ this.renderedPairs.add(key);
915
+ this.queue = this.queue.then(async () => {
916
+ try {
917
+ const summary = await this.summarizer.summarizeWatch(previous.normalized, current.normalized);
918
+ if (looksLikeBadDistillation(current.raw, summary)) {
919
+ this.renderWatchFallback(current.raw);
920
+ return;
921
+ }
922
+ this.renderWatchSummary(summary.trim());
923
+ this.trimWatchHistory();
924
+ } catch {
925
+ this.renderWatchFallback(current.raw);
926
+ }
927
+ });
928
+ }
929
+ renderWatchSummary(summary) {
930
+ const output = ensureTrailingNewline(summary);
931
+ if (this.isTTY) {
932
+ this.stdout.write(`\x1B[2J\x1B[H${output}`);
933
+ this.emittedWatchOutput = true;
934
+ return;
935
+ }
936
+ if (this.emittedWatchOutput) {
937
+ this.stdout.write(`
938
+ `);
939
+ }
940
+ this.stdout.write(output);
941
+ this.emittedWatchOutput = true;
942
+ }
943
+ renderWatchFallback(raw) {
944
+ this.mode = "interactive";
945
+ this.passthrough = true;
946
+ this.stopProgress(true);
947
+ this.stdout.write(raw);
948
+ }
949
+ getTail() {
950
+ const tailBuffers = [];
951
+ let remaining = 256;
952
+ for (let index = this.rawBuffers.length - 1;index >= 0 && remaining > 0; index -= 1) {
953
+ const chunk = this.rawBuffers[index];
954
+ if (chunk.length <= remaining) {
955
+ tailBuffers.unshift(chunk);
956
+ remaining -= chunk.length;
957
+ continue;
958
+ }
959
+ tailBuffers.unshift(chunk.subarray(chunk.length - remaining));
960
+ remaining = 0;
961
+ }
962
+ return Buffer.concat(tailBuffers).toString("utf8");
963
+ }
964
+ trimWatchHistory() {
965
+ if (this.mode !== "watch" || this.completedBursts.length <= 2) {
966
+ return;
967
+ }
968
+ this.completedBursts.splice(0, this.completedBursts.length - 2);
969
+ }
970
+ }
971
+
972
+ // src/user-config.ts
973
+ var import_promises = require("node:fs/promises");
974
+ var import_node_path2 = __toESM(require("node:path"));
975
+ function resolveConfigBaseDir(env) {
976
+ const xdg = env.XDG_CONFIG_HOME?.trim();
977
+ if (xdg) {
978
+ return import_node_path2.default.join(xdg, "distill");
979
+ }
980
+ const home = env.HOME?.trim();
981
+ if (!home) {
982
+ throw new Error("Could not resolve a home directory for distill config.");
983
+ }
984
+ return import_node_path2.default.join(home, ".config", "distill");
985
+ }
986
+ function resolveConfigPath(env) {
987
+ const explicit = env.DISTILL_CONFIG_PATH?.trim();
988
+ if (explicit) {
989
+ return explicit;
990
+ }
991
+ return import_node_path2.default.join(resolveConfigBaseDir(env), "config.json");
992
+ }
993
+ async function readPersistedConfig(env) {
994
+ const configPath = resolveConfigPath(env);
995
+ try {
996
+ const raw = await import_promises.readFile(configPath, "utf8");
997
+ const parsed = JSON.parse(raw);
998
+ if (!parsed || typeof parsed !== "object") {
999
+ return {};
1000
+ }
1001
+ return parsed;
1002
+ } catch (error) {
1003
+ if (error.code === "ENOENT") {
1004
+ return {};
1005
+ }
1006
+ throw error;
1007
+ }
1008
+ }
1009
+ async function writePersistedConfig(env, config) {
1010
+ const configPath = resolveConfigPath(env);
1011
+ await import_promises.mkdir(import_node_path2.default.dirname(configPath), { recursive: true });
1012
+ await import_promises.writeFile(configPath, `${JSON.stringify(config, null, 2)}
1013
+ `);
1014
+ }
1015
+ async function setPersistedConfigValue(env, key, value) {
1016
+ const current = await readPersistedConfig(env);
1017
+ if (key === "provider") {
1018
+ current.provider = String(value);
1019
+ } else if (key === "timeout-ms") {
1020
+ current.timeoutMs = Number(value);
1021
+ } else if (key === "thinking") {
1022
+ current.thinking = Boolean(value);
1023
+ } else if (key === "host") {
1024
+ current.host = String(value);
1025
+ } else if (key === "api-key") {
1026
+ current.apiKey = String(value);
1027
+ } else {
1028
+ current.model = String(value);
1029
+ }
1030
+ await writePersistedConfig(env, current);
1031
+ return current;
1032
+ }
1033
+ function getPersistedConfigValue(config, key) {
1034
+ if (key === "provider") {
1035
+ return config.provider;
1036
+ }
1037
+ if (key === "timeout-ms") {
1038
+ return config.timeoutMs;
1039
+ }
1040
+ if (key === "thinking") {
1041
+ return config.thinking;
1042
+ }
1043
+ if (key === "host") {
1044
+ return config.host;
1045
+ }
1046
+ if (key === "api-key") {
1047
+ return config.apiKey;
1048
+ }
1049
+ return config.model;
1050
+ }
1051
+
1052
+ // src/cli.ts
1053
+ async function run() {
1054
+ const persisted = await readPersistedConfig(process.env);
1055
+ const command = parseCommand(process.argv.slice(2), process.env, persisted);
1056
+ if (command.kind === "help") {
1057
+ process.stdout.write(`${formatUsage()}
1058
+ `);
1059
+ return 0;
1060
+ }
1061
+ if (command.kind === "version") {
1062
+ process.stdout.write(`${DISTILL_VERSION}
1063
+ `);
1064
+ return 0;
1065
+ }
1066
+ if (command.kind === "configShow") {
1067
+ process.stdout.write([
1068
+ `path=${resolveConfigPath(process.env)}`,
1069
+ `provider=${persisted.provider ?? ""}`,
1070
+ `model=${persisted.model ?? ""}`,
1071
+ `host=${persisted.host ?? ""}`,
1072
+ `api-key=${persisted.apiKey ? "***" : ""}`,
1073
+ `timeout-ms=${persisted.timeoutMs ?? ""}`,
1074
+ `thinking=${persisted.thinking ?? ""}`
1075
+ ].join(`
1076
+ `) + `
1077
+ `);
1078
+ return 0;
1079
+ }
1080
+ if (command.kind === "configGet") {
1081
+ const value = getPersistedConfigValue(persisted, command.key);
1082
+ process.stdout.write(`${value ?? ""}
1083
+ `);
1084
+ return 0;
1085
+ }
1086
+ if (command.kind === "configSet") {
1087
+ await setPersistedConfigValue(process.env, command.key, command.value);
1088
+ process.stdout.write(`${command.key}=${String(command.value)}
1089
+ `);
1090
+ return 0;
1091
+ }
1092
+ if (process.stdin.isTTY) {
1093
+ throw new UsageError("stdin is required.");
1094
+ }
1095
+ const progressProtocol = process.env.DISTILL_PROGRESS_PROTOCOL === "stderr";
1096
+ const progress = progressProtocol ? undefined : process.stderr.isTTY ? process.stderr : process.stdout.isTTY ? process.stdout : undefined;
1097
+ const emitProgressPhase = progressProtocol ? (phase) => {
1098
+ process.stderr.write(`__DISTILL_PROGRESS__:phase:${phase}
1099
+ `);
1100
+ } : undefined;
1101
+ const emitProgressStop = progressProtocol ? () => {
1102
+ process.stderr.write(`__DISTILL_PROGRESS__:stop
1103
+ `);
1104
+ } : undefined;
1105
+ const session = new DistillSession({
1106
+ summarizer: createSummarizer(command.config),
1107
+ stdout: process.stdout,
1108
+ isTTY: Boolean(process.stdout.isTTY),
1109
+ progress,
1110
+ onProgressPhase: emitProgressPhase,
1111
+ onProgressStop: emitProgressStop
1112
+ });
1113
+ await new Promise((resolve, reject) => {
1114
+ process.stdin.on("data", (chunk) => {
1115
+ session.push(Buffer.isBuffer(chunk) ? chunk : Buffer.from(chunk));
1116
+ });
1117
+ process.stdin.on("end", resolve);
1118
+ process.stdin.on("error", reject);
1119
+ process.stdin.resume();
1120
+ });
1121
+ await session.end();
1122
+ return 0;
1123
+ }
1124
+ run().then((code) => {
1125
+ process.exit(code);
1126
+ }).catch((error) => {
1127
+ if (error instanceof UsageError) {
1128
+ process.stderr.write(`${error.message}
1129
+
1130
+ ${formatUsage()}
1131
+ `);
1132
+ process.exit(error.exitCode);
1133
+ }
1134
+ process.stderr.write(error instanceof Error ? `${error.message}
1135
+ ` : `Unexpected error.
1136
+ `);
1137
+ process.exit(1);
1138
+ });
package/package.json CHANGED
@@ -1,27 +1,19 @@
1
1
  {
2
2
  "name": "@cowsea2012/distill",
3
- "version": "0.1.31",
3
+ "version": "0.1.33",
4
4
  "description": "Compress command output for downstream LLMs.",
5
5
  "license": "MIT",
6
6
  "bin": {
7
7
  "distill": "bin/distill.js"
8
8
  },
9
- "scripts": {
10
- "prepack": "node ../../scripts/prepack-check.js"
11
- },
12
9
  "files": [
13
- "bin",
10
+ "bin/distill.js",
11
+ "dist/cli.js",
14
12
  "README.md"
15
13
  ],
16
14
  "engines": {
17
15
  "node": ">=18"
18
16
  },
19
- "optionalDependencies": {
20
- "@cowsea2012/distill-darwin-arm64": "0.1.31",
21
- "@cowsea2012/distill-darwin-x64": "0.1.31",
22
- "@cowsea2012/distill-linux-arm64": "0.1.31",
23
- "@cowsea2012/distill-linux-x64": "0.1.31"
24
- },
25
17
  "publishConfig": {
26
18
  "access": "public"
27
19
  }