@llmist/cli 15.15.0 → 15.16.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/cli.js CHANGED
@@ -19,6 +19,7 @@ var DEFAULT_MODEL = "openai:gpt-5-nano";
19
19
  var OPTION_FLAGS = {
20
20
  model: "-m, --model <identifier>",
21
21
  systemPrompt: "-s, --system <prompt>",
22
+ systemPromptFile: "--system-file <path>",
22
23
  temperature: "-t, --temperature <value>",
23
24
  maxTokens: "--max-tokens <count>",
24
25
  maxIterations: "-i, --max-iterations <count>",
@@ -60,6 +61,7 @@ var OPTION_FLAGS = {
60
61
  var OPTION_DESCRIPTIONS = {
61
62
  model: "Model identifier, e.g. openai:gpt-5-nano or anthropic:claude-sonnet-4-5.",
62
63
  systemPrompt: "Optional system prompt prepended to the conversation.",
64
+ systemPromptFile: "Path to file containing system prompt (alternative to --system).",
63
65
  temperature: "Sampling temperature between 0 and 2.",
64
66
  maxTokens: "Maximum number of output tokens requested from the model.",
65
67
  maxIterations: "Maximum number of agent loop iterations before exiting.",
@@ -106,7 +108,7 @@ import { Command, InvalidArgumentError as InvalidArgumentError2 } from "commande
106
108
  // package.json
107
109
  var package_default = {
108
110
  name: "@llmist/cli",
109
- version: "15.15.0",
111
+ version: "15.16.0",
110
112
  description: "CLI for llmist - run LLM agents from the command line",
111
113
  type: "module",
112
114
  main: "dist/cli.js",
@@ -162,7 +164,7 @@ var package_default = {
162
164
  node: ">=22.0.0"
163
165
  },
164
166
  dependencies: {
165
- llmist: "^15.15.0",
167
+ llmist: "^15.16.0",
166
168
  "@unblessed/node": "^1.0.0-alpha.23",
167
169
  chalk: "^5.6.2",
168
170
  commander: "^12.1.0",
@@ -176,7 +178,7 @@ var package_default = {
176
178
  zod: "^4.1.12"
177
179
  },
178
180
  devDependencies: {
179
- "@llmist/testing": "^15.15.0",
181
+ "@llmist/testing": "^15.16.0",
180
182
  "@types/diff": "^8.0.0",
181
183
  "@types/js-yaml": "^4.0.9",
182
184
  "@types/marked-terminal": "^6.1.1",
@@ -1443,6 +1445,17 @@ async function checkFileSize(absolutePath, filePath, maxSize) {
1443
1445
  );
1444
1446
  }
1445
1447
  }
1448
+ async function readSystemPromptFile(filePath, options = {}) {
1449
+ const absolutePath = resolve2(filePath);
1450
+ const maxFileSize = options.maxFileSize ?? DEFAULT_MAX_FILE_SIZE;
1451
+ try {
1452
+ await checkFileSize(absolutePath, filePath, maxFileSize);
1453
+ return await readFile(absolutePath, "utf-8");
1454
+ } catch (error) {
1455
+ const message = error instanceof Error ? error.message : String(error);
1456
+ throw new Error(`Failed to read system prompt file "${filePath}": ${message}`);
1457
+ }
1458
+ }
1446
1459
  async function readImageFile(filePath, options = {}) {
1447
1460
  const absolutePath = resolve2(filePath);
1448
1461
  const maxFileSize = options.maxFileSize ?? DEFAULT_MAX_FILE_SIZE;
@@ -3961,7 +3974,7 @@ async function executeAction(action, env) {
3961
3974
 
3962
3975
  // src/option-helpers.ts
3963
3976
  function addCompleteOptions(cmd, defaults) {
3964
- return cmd.option(OPTION_FLAGS.model, OPTION_DESCRIPTIONS.model, defaults?.model ?? DEFAULT_MODEL).option(OPTION_FLAGS.systemPrompt, OPTION_DESCRIPTIONS.systemPrompt, defaults?.system).option(
3977
+ return cmd.option(OPTION_FLAGS.model, OPTION_DESCRIPTIONS.model, defaults?.model ?? DEFAULT_MODEL).option(OPTION_FLAGS.systemPrompt, OPTION_DESCRIPTIONS.systemPrompt, defaults?.system).option(OPTION_FLAGS.systemPromptFile, OPTION_DESCRIPTIONS.systemPromptFile).option(
3965
3978
  OPTION_FLAGS.temperature,
3966
3979
  OPTION_DESCRIPTIONS.temperature,
3967
3980
  createNumericParser({ label: "Temperature", min: 0, max: 2 }),
@@ -4022,7 +4035,7 @@ function addAgentOptions(cmd, defaults) {
4022
4035
  value
4023
4036
  ];
4024
4037
  const defaultGadgets = defaults?.gadgets ?? defaults?.gadget ?? [];
4025
- return cmd.option(OPTION_FLAGS.model, OPTION_DESCRIPTIONS.model, defaults?.model ?? DEFAULT_MODEL).option(OPTION_FLAGS.systemPrompt, OPTION_DESCRIPTIONS.systemPrompt, defaults?.system).option(
4038
+ return cmd.option(OPTION_FLAGS.model, OPTION_DESCRIPTIONS.model, defaults?.model ?? DEFAULT_MODEL).option(OPTION_FLAGS.systemPrompt, OPTION_DESCRIPTIONS.systemPrompt, defaults?.system).option(OPTION_FLAGS.systemPromptFile, OPTION_DESCRIPTIONS.systemPromptFile).option(
4026
4039
  OPTION_FLAGS.temperature,
4027
4040
  OPTION_DESCRIPTIONS.temperature,
4028
4041
  createNumericParser({ label: "Temperature", min: 0, max: 2 }),
@@ -8874,8 +8887,15 @@ ${ctx.gadgetName} requires interactive approval. Run in a terminal to approve.`
8874
8887
  if (retryConfig) {
8875
8888
  builder.withRetry(retryConfig);
8876
8889
  }
8877
- if (options.system) {
8878
- builder.withSystem(options.system);
8890
+ let systemPrompt = options.system;
8891
+ if (options.systemFile) {
8892
+ if (options.system) {
8893
+ throw new Error("Cannot use both --system and --system-file options");
8894
+ }
8895
+ systemPrompt = await readSystemPromptFile(options.systemFile);
8896
+ }
8897
+ if (systemPrompt) {
8898
+ builder.withSystem(systemPrompt);
8879
8899
  }
8880
8900
  if (options.maxIterations !== void 0) {
8881
8901
  builder.withMaxIterations(options.maxIterations);
@@ -9097,8 +9117,15 @@ async function executeComplete(promptArg, options, env) {
9097
9117
  const client = env.createClient();
9098
9118
  const model = resolveModel2(options.model);
9099
9119
  const builder = new LLMMessageBuilder();
9100
- if (options.system) {
9101
- builder.addSystem(options.system);
9120
+ let systemPrompt = options.system;
9121
+ if (options.systemFile) {
9122
+ if (options.system) {
9123
+ throw new Error("Cannot use both --system and --system-file options");
9124
+ }
9125
+ systemPrompt = await readSystemPromptFile(options.systemFile);
9126
+ }
9127
+ if (systemPrompt) {
9128
+ builder.addSystem(systemPrompt);
9102
9129
  }
9103
9130
  if (options.image || options.audio) {
9104
9131
  const parts = [text2(prompt)];
@@ -9152,39 +9179,54 @@ async function executeComplete(promptArg, options, env) {
9152
9179
  const printer = new StreamPrinter(env.stdout);
9153
9180
  const stderrTTY = env.stderr.isTTY === true;
9154
9181
  const progress = new StreamProgress(env.stderr, stderrTTY, client.modelRegistry);
9155
- const estimatedInputTokens = Math.round(prompt.length / FALLBACK_CHARS_PER_TOKEN2);
9182
+ let estimatedInputTokens = 0;
9183
+ for (const msg of messages) {
9184
+ if (typeof msg.content === "string") {
9185
+ estimatedInputTokens += msg.content.length;
9186
+ } else if (Array.isArray(msg.content)) {
9187
+ for (const part of msg.content) {
9188
+ if (part.type === "text") {
9189
+ estimatedInputTokens += part.text.length;
9190
+ }
9191
+ }
9192
+ }
9193
+ }
9194
+ estimatedInputTokens = Math.round(estimatedInputTokens / FALLBACK_CHARS_PER_TOKEN2);
9156
9195
  progress.startCall(model, estimatedInputTokens);
9157
9196
  let finishReason;
9158
9197
  let usage;
9159
9198
  let accumulatedResponse = "";
9160
- for await (const chunk of stream) {
9161
- if (chunk.usage) {
9162
- usage = chunk.usage;
9163
- if (chunk.usage.inputTokens) {
9164
- progress.setInputTokens(chunk.usage.inputTokens, false);
9199
+ try {
9200
+ for await (const chunk of stream) {
9201
+ if (chunk.usage) {
9202
+ usage = chunk.usage;
9203
+ if (chunk.usage.inputTokens) {
9204
+ progress.setInputTokens(chunk.usage.inputTokens, false);
9205
+ }
9206
+ if (chunk.usage.outputTokens) {
9207
+ progress.setOutputTokens(chunk.usage.outputTokens, false);
9208
+ }
9165
9209
  }
9166
- if (chunk.usage.outputTokens) {
9167
- progress.setOutputTokens(chunk.usage.outputTokens, false);
9210
+ if (chunk.thinking?.content) {
9211
+ if (stderrTTY && !options.quiet) {
9212
+ progress.pause();
9213
+ env.stderr.write(`\x1B[2m${chunk.thinking.content}\x1B[0m`);
9214
+ }
9168
9215
  }
9169
- }
9170
- if (chunk.thinking?.content) {
9171
- if (stderrTTY && !options.quiet) {
9216
+ if (chunk.text) {
9172
9217
  progress.pause();
9173
- env.stderr.write(`\x1B[2m${chunk.thinking.content}\x1B[0m`);
9218
+ accumulatedResponse += chunk.text;
9219
+ progress.update(accumulatedResponse.length);
9220
+ printer.write(chunk.text);
9221
+ }
9222
+ if (chunk.finishReason !== void 0) {
9223
+ finishReason = chunk.finishReason;
9174
9224
  }
9175
9225
  }
9176
- if (chunk.text) {
9177
- progress.pause();
9178
- accumulatedResponse += chunk.text;
9179
- progress.update(accumulatedResponse.length);
9180
- printer.write(chunk.text);
9181
- }
9182
- if (chunk.finishReason !== void 0) {
9183
- finishReason = chunk.finishReason;
9184
- }
9226
+ progress.endCall(usage);
9227
+ } finally {
9228
+ progress.complete();
9185
9229
  }
9186
- progress.endCall(usage);
9187
- progress.complete();
9188
9230
  printer.ensureNewline();
9189
9231
  if (llmLogDir) {
9190
9232
  const filename = "0001.response";