llmist 1.2.0 → 1.3.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1112,6 +1112,417 @@ var init_output_viewer = __esm({
1112
1112
  }
1113
1113
  });
1114
1114
 
1115
+ // src/agent/compaction/config.ts
1116
+ function resolveCompactionConfig(config = {}) {
1117
+ const trigger = config.triggerThresholdPercent ?? DEFAULT_COMPACTION_CONFIG.triggerThresholdPercent;
1118
+ const target = config.targetPercent ?? DEFAULT_COMPACTION_CONFIG.targetPercent;
1119
+ if (target >= trigger) {
1120
+ console.warn(
1121
+ `[llmist/compaction] targetPercent (${target}) should be less than triggerThresholdPercent (${trigger}) to be effective.`
1122
+ );
1123
+ }
1124
+ const strategy = config.strategy ?? DEFAULT_COMPACTION_CONFIG.strategy;
1125
+ const strategyName = typeof strategy === "object" && "name" in strategy ? strategy.name : strategy;
1126
+ return {
1127
+ enabled: config.enabled ?? DEFAULT_COMPACTION_CONFIG.enabled,
1128
+ strategy: strategyName,
1129
+ triggerThresholdPercent: trigger,
1130
+ targetPercent: target,
1131
+ preserveRecentTurns: config.preserveRecentTurns ?? DEFAULT_COMPACTION_CONFIG.preserveRecentTurns,
1132
+ summarizationModel: config.summarizationModel,
1133
+ summarizationPrompt: config.summarizationPrompt ?? DEFAULT_SUMMARIZATION_PROMPT,
1134
+ onCompaction: config.onCompaction
1135
+ };
1136
+ }
1137
+ var DEFAULT_COMPACTION_CONFIG, DEFAULT_SUMMARIZATION_PROMPT;
1138
+ var init_config = __esm({
1139
+ "src/agent/compaction/config.ts"() {
1140
+ "use strict";
1141
+ DEFAULT_COMPACTION_CONFIG = {
1142
+ enabled: true,
1143
+ strategy: "hybrid",
1144
+ triggerThresholdPercent: 80,
1145
+ targetPercent: 50,
1146
+ preserveRecentTurns: 5
1147
+ };
1148
+ DEFAULT_SUMMARIZATION_PROMPT = `Summarize this conversation history concisely, preserving:
1149
+ 1. Key decisions made and their rationale
1150
+ 2. Important facts and data discovered
1151
+ 3. Errors encountered and how they were resolved
1152
+ 4. Current task context and goals
1153
+
1154
+ Format as a brief narrative paragraph, not bullet points.
1155
+ Previous conversation:`;
1156
+ }
1157
+ });
1158
+
1159
+ // src/agent/compaction/strategy.ts
1160
+ function groupIntoTurns(messages) {
1161
+ const turns = [];
1162
+ let currentTurn = [];
1163
+ for (const msg of messages) {
1164
+ if (msg.role === "user" && currentTurn.length > 0) {
1165
+ turns.push({
1166
+ messages: currentTurn,
1167
+ tokenEstimate: estimateTurnTokens(currentTurn)
1168
+ });
1169
+ currentTurn = [msg];
1170
+ } else {
1171
+ currentTurn.push(msg);
1172
+ }
1173
+ }
1174
+ if (currentTurn.length > 0) {
1175
+ turns.push({
1176
+ messages: currentTurn,
1177
+ tokenEstimate: estimateTurnTokens(currentTurn)
1178
+ });
1179
+ }
1180
+ return turns;
1181
+ }
1182
+ function estimateTurnTokens(messages) {
1183
+ return Math.ceil(messages.reduce((sum, msg) => sum + (msg.content?.length ?? 0), 0) / 4);
1184
+ }
1185
+ function flattenTurns(turns) {
1186
+ return turns.flatMap((turn) => turn.messages);
1187
+ }
1188
+ var init_strategy = __esm({
1189
+ "src/agent/compaction/strategy.ts"() {
1190
+ "use strict";
1191
+ }
1192
+ });
1193
+
1194
+ // src/agent/compaction/strategies/sliding-window.ts
1195
+ var TRUNCATION_MARKER_TEMPLATE, SlidingWindowStrategy;
1196
+ var init_sliding_window = __esm({
1197
+ "src/agent/compaction/strategies/sliding-window.ts"() {
1198
+ "use strict";
1199
+ init_strategy();
1200
+ TRUNCATION_MARKER_TEMPLATE = "[Previous conversation truncated. Removed {count} turn(s) to fit context window.]";
1201
+ SlidingWindowStrategy = class {
1202
+ name = "sliding-window";
1203
+ async compact(messages, config, context) {
1204
+ const turns = groupIntoTurns(messages);
1205
+ const preserveCount = Math.min(config.preserveRecentTurns, turns.length);
1206
+ if (turns.length <= preserveCount) {
1207
+ return {
1208
+ messages,
1209
+ strategyName: this.name,
1210
+ metadata: {
1211
+ originalCount: messages.length,
1212
+ compactedCount: messages.length,
1213
+ tokensBefore: context.currentTokens,
1214
+ tokensAfter: context.currentTokens
1215
+ }
1216
+ };
1217
+ }
1218
+ const turnsToKeep = turns.slice(-preserveCount);
1219
+ const turnsRemoved = turns.length - preserveCount;
1220
+ const truncationMarker = {
1221
+ role: "user",
1222
+ content: TRUNCATION_MARKER_TEMPLATE.replace("{count}", turnsRemoved.toString())
1223
+ };
1224
+ const compactedMessages = [truncationMarker, ...flattenTurns(turnsToKeep)];
1225
+ const tokensAfter = Math.ceil(
1226
+ compactedMessages.reduce((sum, msg) => sum + (msg.content?.length ?? 0), 0) / 4
1227
+ );
1228
+ return {
1229
+ messages: compactedMessages,
1230
+ strategyName: this.name,
1231
+ metadata: {
1232
+ originalCount: messages.length,
1233
+ compactedCount: compactedMessages.length,
1234
+ tokensBefore: context.currentTokens,
1235
+ tokensAfter
1236
+ }
1237
+ };
1238
+ }
1239
+ };
1240
+ }
1241
+ });
1242
+
1243
+ // src/agent/compaction/strategies/summarization.ts
1244
+ var SummarizationStrategy;
1245
+ var init_summarization = __esm({
1246
+ "src/agent/compaction/strategies/summarization.ts"() {
1247
+ "use strict";
1248
+ init_strategy();
1249
+ SummarizationStrategy = class {
1250
+ name = "summarization";
1251
+ async compact(messages, config, context) {
1252
+ const turns = groupIntoTurns(messages);
1253
+ const preserveCount = Math.min(config.preserveRecentTurns, turns.length);
1254
+ if (turns.length <= preserveCount) {
1255
+ return {
1256
+ messages,
1257
+ strategyName: this.name,
1258
+ metadata: {
1259
+ originalCount: messages.length,
1260
+ compactedCount: messages.length,
1261
+ tokensBefore: context.currentTokens,
1262
+ tokensAfter: context.currentTokens
1263
+ }
1264
+ };
1265
+ }
1266
+ const turnsToSummarize = turns.slice(0, -preserveCount);
1267
+ const turnsToKeep = turns.slice(-preserveCount);
1268
+ const conversationToSummarize = this.formatTurnsForSummary(flattenTurns(turnsToSummarize));
1269
+ const summary = await this.generateSummary(conversationToSummarize, config, context);
1270
+ const summaryMessage = {
1271
+ role: "user",
1272
+ content: `[Previous conversation summary]
1273
+ ${summary}
1274
+ [End of summary - conversation continues below]`
1275
+ };
1276
+ const compactedMessages = [summaryMessage, ...flattenTurns(turnsToKeep)];
1277
+ const tokensAfter = Math.ceil(
1278
+ compactedMessages.reduce((sum, msg) => sum + (msg.content?.length ?? 0), 0) / 4
1279
+ );
1280
+ return {
1281
+ messages: compactedMessages,
1282
+ summary,
1283
+ strategyName: this.name,
1284
+ metadata: {
1285
+ originalCount: messages.length,
1286
+ compactedCount: compactedMessages.length,
1287
+ tokensBefore: context.currentTokens,
1288
+ tokensAfter
1289
+ }
1290
+ };
1291
+ }
1292
+ /**
1293
+ * Formats messages into a readable conversation format for summarization.
1294
+ */
1295
+ formatTurnsForSummary(messages) {
1296
+ return messages.map((msg) => {
1297
+ const role = msg.role.charAt(0).toUpperCase() + msg.role.slice(1);
1298
+ return `${role}: ${msg.content}`;
1299
+ }).join("\n\n");
1300
+ }
1301
+ /**
1302
+ * Generates a summary using the configured LLM.
1303
+ */
1304
+ async generateSummary(conversation, config, context) {
1305
+ const model = config.summarizationModel ?? context.model;
1306
+ const prompt = `${config.summarizationPrompt}
1307
+
1308
+ ${conversation}`;
1309
+ const response = await context.client.complete(prompt, {
1310
+ model,
1311
+ temperature: 0.3
1312
+ // Low temperature for factual summarization
1313
+ });
1314
+ return response.trim();
1315
+ }
1316
+ };
1317
+ }
1318
+ });
1319
+
1320
+ // src/agent/compaction/strategies/hybrid.ts
1321
+ var MIN_TURNS_FOR_SUMMARIZATION, HybridStrategy;
1322
+ var init_hybrid = __esm({
1323
+ "src/agent/compaction/strategies/hybrid.ts"() {
1324
+ "use strict";
1325
+ init_strategy();
1326
+ init_sliding_window();
1327
+ init_summarization();
1328
+ MIN_TURNS_FOR_SUMMARIZATION = 3;
1329
+ HybridStrategy = class {
1330
+ name = "hybrid";
1331
+ slidingWindow = new SlidingWindowStrategy();
1332
+ summarization = new SummarizationStrategy();
1333
+ async compact(messages, config, context) {
1334
+ const turns = groupIntoTurns(messages);
1335
+ const preserveCount = Math.min(config.preserveRecentTurns, turns.length);
1336
+ if (turns.length <= preserveCount) {
1337
+ return {
1338
+ messages,
1339
+ strategyName: this.name,
1340
+ metadata: {
1341
+ originalCount: messages.length,
1342
+ compactedCount: messages.length,
1343
+ tokensBefore: context.currentTokens,
1344
+ tokensAfter: context.currentTokens
1345
+ }
1346
+ };
1347
+ }
1348
+ const turnsToSummarize = turns.length - preserveCount;
1349
+ if (turnsToSummarize < MIN_TURNS_FOR_SUMMARIZATION) {
1350
+ return this.slidingWindow.compact(messages, config, context);
1351
+ }
1352
+ return this.summarization.compact(messages, config, context);
1353
+ }
1354
+ };
1355
+ }
1356
+ });
1357
+
1358
+ // src/agent/compaction/strategies/index.ts
1359
+ var init_strategies = __esm({
1360
+ "src/agent/compaction/strategies/index.ts"() {
1361
+ "use strict";
1362
+ init_sliding_window();
1363
+ init_summarization();
1364
+ init_hybrid();
1365
+ }
1366
+ });
1367
+
1368
+ // src/agent/compaction/manager.ts
1369
+ function createStrategy(name) {
1370
+ switch (name) {
1371
+ case "sliding-window":
1372
+ return new SlidingWindowStrategy();
1373
+ case "summarization":
1374
+ return new SummarizationStrategy();
1375
+ case "hybrid":
1376
+ return new HybridStrategy();
1377
+ default:
1378
+ throw new Error(`Unknown compaction strategy: ${name}`);
1379
+ }
1380
+ }
1381
+ var CompactionManager;
1382
+ var init_manager = __esm({
1383
+ "src/agent/compaction/manager.ts"() {
1384
+ "use strict";
1385
+ init_config();
1386
+ init_strategies();
1387
+ CompactionManager = class {
1388
+ client;
1389
+ model;
1390
+ config;
1391
+ strategy;
1392
+ modelLimits;
1393
+ // Statistics
1394
+ totalCompactions = 0;
1395
+ totalTokensSaved = 0;
1396
+ lastTokenCount = 0;
1397
+ constructor(client, model, config = {}) {
1398
+ this.client = client;
1399
+ this.model = model;
1400
+ this.config = resolveCompactionConfig(config);
1401
+ if (typeof config.strategy === "object" && "compact" in config.strategy) {
1402
+ this.strategy = config.strategy;
1403
+ } else {
1404
+ this.strategy = createStrategy(this.config.strategy);
1405
+ }
1406
+ }
1407
+ /**
1408
+ * Check if compaction is needed and perform it if so.
1409
+ *
1410
+ * @param conversation - The conversation manager to compact
1411
+ * @param iteration - Current agent iteration (for event metadata)
1412
+ * @returns CompactionEvent if compaction was performed, null otherwise
1413
+ */
1414
+ async checkAndCompact(conversation, iteration) {
1415
+ if (!this.config.enabled) {
1416
+ return null;
1417
+ }
1418
+ if (!this.modelLimits) {
1419
+ this.modelLimits = this.client.modelRegistry.getModelLimits(this.model);
1420
+ if (!this.modelLimits) {
1421
+ return null;
1422
+ }
1423
+ }
1424
+ if (!this.client.countTokens) {
1425
+ return null;
1426
+ }
1427
+ const messages = conversation.getMessages();
1428
+ const currentTokens = await this.client.countTokens(this.model, messages);
1429
+ this.lastTokenCount = currentTokens;
1430
+ const usagePercent = currentTokens / this.modelLimits.contextWindow * 100;
1431
+ if (usagePercent < this.config.triggerThresholdPercent) {
1432
+ return null;
1433
+ }
1434
+ const historyMessages = conversation.getHistoryMessages();
1435
+ const baseMessages = conversation.getBaseMessages();
1436
+ const historyTokens = await this.client.countTokens(this.model, historyMessages);
1437
+ const baseTokens = await this.client.countTokens(this.model, baseMessages);
1438
+ return this.compact(conversation, iteration, {
1439
+ historyMessages,
1440
+ baseMessages,
1441
+ historyTokens,
1442
+ baseTokens,
1443
+ currentTokens: historyTokens + baseTokens
1444
+ });
1445
+ }
1446
+ /**
1447
+ * Force compaction regardless of threshold.
1448
+ *
1449
+ * @param conversation - The conversation manager to compact
1450
+ * @param iteration - Current agent iteration (for event metadata). Use -1 for manual compaction.
1451
+ * @param precomputed - Optional pre-computed token counts (passed from checkAndCompact for efficiency)
1452
+ * @returns CompactionEvent with compaction details
1453
+ */
1454
+ async compact(conversation, iteration, precomputed) {
1455
+ if (!this.modelLimits) {
1456
+ this.modelLimits = this.client.modelRegistry.getModelLimits(this.model);
1457
+ if (!this.modelLimits) {
1458
+ return null;
1459
+ }
1460
+ }
1461
+ const historyMessages = precomputed?.historyMessages ?? conversation.getHistoryMessages();
1462
+ const baseMessages = precomputed?.baseMessages ?? conversation.getBaseMessages();
1463
+ const historyTokens = precomputed?.historyTokens ?? await this.client.countTokens(this.model, historyMessages);
1464
+ const baseTokens = precomputed?.baseTokens ?? await this.client.countTokens(this.model, baseMessages);
1465
+ const currentTokens = precomputed?.currentTokens ?? historyTokens + baseTokens;
1466
+ const targetTotalTokens = Math.floor(
1467
+ this.modelLimits.contextWindow * this.config.targetPercent / 100
1468
+ );
1469
+ const targetHistoryTokens = Math.max(0, targetTotalTokens - baseTokens);
1470
+ const result = await this.strategy.compact(historyMessages, this.config, {
1471
+ currentTokens: historyTokens,
1472
+ targetTokens: targetHistoryTokens,
1473
+ modelLimits: this.modelLimits,
1474
+ client: this.client,
1475
+ model: this.config.summarizationModel ?? this.model
1476
+ });
1477
+ conversation.replaceHistory(result.messages);
1478
+ const afterTokens = await this.client.countTokens(this.model, conversation.getMessages());
1479
+ const tokensSaved = currentTokens - afterTokens;
1480
+ this.totalCompactions++;
1481
+ this.totalTokensSaved += tokensSaved;
1482
+ this.lastTokenCount = afterTokens;
1483
+ const event = {
1484
+ strategy: result.strategyName,
1485
+ tokensBefore: currentTokens,
1486
+ tokensAfter: afterTokens,
1487
+ messagesBefore: historyMessages.length + baseMessages.length,
1488
+ messagesAfter: result.messages.length + baseMessages.length,
1489
+ summary: result.summary,
1490
+ iteration
1491
+ };
1492
+ if (this.config.onCompaction) {
1493
+ try {
1494
+ this.config.onCompaction(event);
1495
+ } catch (err) {
1496
+ console.warn("[llmist/compaction] onCompaction callback error:", err);
1497
+ }
1498
+ }
1499
+ return event;
1500
+ }
1501
+ /**
1502
+ * Get compaction statistics.
1503
+ */
1504
+ getStats() {
1505
+ const contextWindow = this.modelLimits?.contextWindow ?? 0;
1506
+ return {
1507
+ totalCompactions: this.totalCompactions,
1508
+ totalTokensSaved: this.totalTokensSaved,
1509
+ currentUsage: {
1510
+ tokens: this.lastTokenCount,
1511
+ percent: contextWindow > 0 ? this.lastTokenCount / contextWindow * 100 : 0
1512
+ },
1513
+ contextWindow
1514
+ };
1515
+ }
1516
+ /**
1517
+ * Check if compaction is enabled.
1518
+ */
1519
+ isEnabled() {
1520
+ return this.config.enabled;
1521
+ }
1522
+ };
1523
+ }
1524
+ });
1525
+
1115
1526
  // src/agent/gadget-output-store.ts
1116
1527
  var import_node_crypto, GadgetOutputStore;
1117
1528
  var init_gadget_output_store = __esm({
@@ -1214,10 +1625,16 @@ var init_conversation_manager = __esm({
1214
1625
  baseMessages;
1215
1626
  initialMessages;
1216
1627
  historyBuilder;
1628
+ startPrefix;
1629
+ endPrefix;
1630
+ argPrefix;
1217
1631
  constructor(baseMessages, initialMessages, options = {}) {
1218
1632
  this.baseMessages = baseMessages;
1219
1633
  this.initialMessages = initialMessages;
1220
1634
  this.historyBuilder = new LLMMessageBuilder();
1635
+ this.startPrefix = options.startPrefix;
1636
+ this.endPrefix = options.endPrefix;
1637
+ this.argPrefix = options.argPrefix;
1221
1638
  if (options.startPrefix && options.endPrefix) {
1222
1639
  this.historyBuilder.withPrefixes(options.startPrefix, options.endPrefix, options.argPrefix);
1223
1640
  }
@@ -1234,6 +1651,25 @@ var init_conversation_manager = __esm({
1234
1651
  getMessages() {
1235
1652
  return [...this.baseMessages, ...this.initialMessages, ...this.historyBuilder.build()];
1236
1653
  }
1654
+ getHistoryMessages() {
1655
+ return this.historyBuilder.build();
1656
+ }
1657
+ getBaseMessages() {
1658
+ return [...this.baseMessages, ...this.initialMessages];
1659
+ }
1660
+ replaceHistory(newHistory) {
1661
+ this.historyBuilder = new LLMMessageBuilder();
1662
+ if (this.startPrefix && this.endPrefix) {
1663
+ this.historyBuilder.withPrefixes(this.startPrefix, this.endPrefix, this.argPrefix);
1664
+ }
1665
+ for (const msg of newHistory) {
1666
+ if (msg.role === "user") {
1667
+ this.historyBuilder.addUser(msg.content);
1668
+ } else if (msg.role === "assistant") {
1669
+ this.historyBuilder.addAssistant(msg.content);
1670
+ }
1671
+ }
1672
+ }
1237
1673
  };
1238
1674
  }
1239
1675
  });
@@ -2780,6 +3216,7 @@ var init_agent = __esm({
2780
3216
  init_model_shortcuts();
2781
3217
  init_output_viewer();
2782
3218
  init_logger();
3219
+ init_manager();
2783
3220
  init_gadget_output_store();
2784
3221
  init_agent_internal_key();
2785
3222
  init_conversation_manager();
@@ -2810,6 +3247,8 @@ var init_agent = __esm({
2810
3247
  outputStore;
2811
3248
  outputLimitEnabled;
2812
3249
  outputLimitCharLimit;
3250
+ // Context compaction
3251
+ compactionManager;
2813
3252
  /**
2814
3253
  * Creates a new Agent instance.
2815
3254
  * @internal This constructor is private. Use LLMist.createAgent() or AgentBuilder instead.
@@ -2869,6 +3308,14 @@ var init_agent = __esm({
2869
3308
  if (options.userPrompt) {
2870
3309
  this.conversation.addUserMessage(options.userPrompt);
2871
3310
  }
3311
+ const compactionEnabled = options.compactionConfig?.enabled ?? true;
3312
+ if (compactionEnabled) {
3313
+ this.compactionManager = new CompactionManager(
3314
+ this.client,
3315
+ this.model,
3316
+ options.compactionConfig
3317
+ );
3318
+ }
2872
3319
  }
2873
3320
  /**
2874
3321
  * Get the gadget registry for this agent.
@@ -2891,6 +3338,53 @@ var init_agent = __esm({
2891
3338
  getRegistry() {
2892
3339
  return this.registry;
2893
3340
  }
3341
+ /**
3342
+ * Manually trigger context compaction.
3343
+ *
3344
+ * Forces compaction regardless of threshold. Useful for:
3345
+ * - Pre-emptive context management before expected long operations
3346
+ * - Testing compaction behavior
3347
+ *
3348
+ * @returns CompactionEvent if compaction was performed, null if not configured or no history
3349
+ *
3350
+ * @example
3351
+ * ```typescript
3352
+ * const agent = await LLMist.createAgent()
3353
+ * .withModel('sonnet')
3354
+ * .withCompaction()
3355
+ * .ask('...');
3356
+ *
3357
+ * // Manually compact before a long operation
3358
+ * const event = await agent.compact();
3359
+ * if (event) {
3360
+ * console.log(`Saved ${event.tokensBefore - event.tokensAfter} tokens`);
3361
+ * }
3362
+ * ```
3363
+ */
3364
+ async compact() {
3365
+ if (!this.compactionManager) {
3366
+ return null;
3367
+ }
3368
+ return this.compactionManager.compact(this.conversation, -1);
3369
+ }
3370
+ /**
3371
+ * Get compaction statistics.
3372
+ *
3373
+ * @returns CompactionStats if compaction is enabled, null otherwise
3374
+ *
3375
+ * @example
3376
+ * ```typescript
3377
+ * const stats = agent.getCompactionStats();
3378
+ * if (stats) {
3379
+ * console.log(`Total compactions: ${stats.totalCompactions}`);
3380
+ * console.log(`Tokens saved: ${stats.totalTokensSaved}`);
3381
+ * console.log(`Current usage: ${stats.currentUsage.percent.toFixed(1)}%`);
3382
+ * }
3383
+ * ```
3384
+ */
3385
+ getCompactionStats() {
3386
+ return this.compactionManager?.getStats() ?? null;
3387
+ }
2894
3388
  /**
2895
3389
  * Run the agent loop.
2896
3390
  * Clean, simple orchestration - all complexity is in StreamProcessor.
@@ -2911,6 +3405,30 @@ var init_agent = __esm({
2911
3405
  while (currentIteration < this.maxIterations) {
2912
3406
  this.logger.debug("Starting iteration", { iteration: currentIteration });
2913
3407
  try {
3408
+ if (this.compactionManager) {
3409
+ const compactionEvent = await this.compactionManager.checkAndCompact(
3410
+ this.conversation,
3411
+ currentIteration
3412
+ );
3413
+ if (compactionEvent) {
3414
+ this.logger.info("Context compacted", {
3415
+ strategy: compactionEvent.strategy,
3416
+ tokensBefore: compactionEvent.tokensBefore,
3417
+ tokensAfter: compactionEvent.tokensAfter
3418
+ });
3419
+ yield { type: "compaction", event: compactionEvent };
3420
+ await this.safeObserve(async () => {
3421
+ if (this.hooks.observers?.onCompaction) {
3422
+ await this.hooks.observers.onCompaction({
3423
+ iteration: currentIteration,
3424
+ event: compactionEvent,
3425
+ stats: this.compactionManager.getStats(),
3426
+ logger: this.logger
3427
+ });
3428
+ }
3429
+ });
3430
+ }
3431
+ }
2914
3432
  let llmOptions = {
2915
3433
  model: this.model,
2916
3434
  messages: this.conversation.getMessages(),
@@ -2930,6 +3448,7 @@ var init_agent = __esm({
2930
3448
  if (this.hooks.controllers?.beforeLLMCall) {
2931
3449
  const context = {
2932
3450
  iteration: currentIteration,
3451
+ maxIterations: this.maxIterations,
2933
3452
  options: llmOptions,
2934
3453
  logger: this.logger
2935
3454
  };
@@ -2994,12 +3513,17 @@ var init_agent = __esm({
2994
3513
  });
2995
3514
  let finalMessage = result.finalMessage;
2996
3515
  if (this.hooks.controllers?.afterLLMCall) {
3516
+ const gadgetCallCount = result.outputs.filter(
3517
+ (output) => output.type === "gadget_result"
3518
+ ).length;
2997
3519
  const context = {
2998
3520
  iteration: currentIteration,
3521
+ maxIterations: this.maxIterations,
2999
3522
  options: llmOptions,
3000
3523
  finishReason: result.finishReason,
3001
3524
  usage: result.usage,
3002
3525
  finalMessage: result.finalMessage,
3526
+ gadgetCallCount,
3003
3527
  logger: this.logger
3004
3528
  };
3005
3529
  const action = await this.hooks.controllers.afterLLMCall(context);
@@ -3261,6 +3785,7 @@ var init_builder = __esm({
3261
3785
  defaultGadgetTimeoutMs;
3262
3786
  gadgetOutputLimit;
3263
3787
  gadgetOutputLimitPercent;
3788
+ compactionConfig;
3264
3789
  constructor(client) {
3265
3790
  this.client = client;
3266
3791
  }
@@ -3656,6 +4181,57 @@ var init_builder = __esm({
3656
4181
  this.gadgetOutputLimitPercent = percent;
3657
4182
  return this;
3658
4183
  }
4184
+ /**
4185
+ * Configure context compaction.
4186
+ *
4187
+ * Context compaction automatically manages conversation history to prevent
4188
+ * context window overflow in long-running agent conversations.
4189
+ *
4190
+ * @param config - Compaction configuration options
4191
+ * @returns This builder for chaining
4192
+ *
4193
+ * @example
4194
+ * ```typescript
4195
+ * // Custom thresholds
4196
+ * .withCompaction({
4197
+ * triggerThresholdPercent: 70,
4198
+ * targetPercent: 40,
4199
+ * preserveRecentTurns: 10,
4200
+ * })
4201
+ *
4202
+ * // Different strategy
4203
+ * .withCompaction({
4204
+ * strategy: 'sliding-window',
4205
+ * })
4206
+ *
4207
+ * // With callback
4208
+ * .withCompaction({
4209
+ * onCompaction: (event) => {
4210
+ * console.log(`Saved ${event.tokensBefore - event.tokensAfter} tokens`);
4211
+ * }
4212
+ * })
4213
+ * ```
4214
+ */
4215
+ withCompaction(config) {
4216
+ this.compactionConfig = { ...config, enabled: config.enabled ?? true };
4217
+ return this;
4218
+ }
4219
+ /**
4220
+ * Disable context compaction.
4221
+ *
4222
+ * By default, compaction is enabled. Use this method to explicitly disable it.
4223
+ *
4224
+ * @returns This builder for chaining
4225
+ *
4226
+ * @example
4227
+ * ```typescript
4228
+ * .withoutCompaction() // Disable automatic compaction
4229
+ * ```
4230
+ */
4231
+ withoutCompaction() {
4232
+ this.compactionConfig = { enabled: false };
4233
+ return this;
4234
+ }
3659
4235
  /**
3660
4236
  * Add a synthetic gadget call to the conversation history.
3661
4237
  *
@@ -3771,7 +4347,8 @@ ${endPrefix}`
3771
4347
  shouldContinueAfterError: this.shouldContinueAfterError,
3772
4348
  defaultGadgetTimeoutMs: this.defaultGadgetTimeoutMs,
3773
4349
  gadgetOutputLimit: this.gadgetOutputLimit,
3774
- gadgetOutputLimitPercent: this.gadgetOutputLimitPercent
4350
+ gadgetOutputLimitPercent: this.gadgetOutputLimitPercent,
4351
+ compactionConfig: this.compactionConfig
3775
4352
  };
3776
4353
  return new Agent(AGENT_INTERNAL_KEY, options);
3777
4354
  }
@@ -3873,7 +4450,8 @@ ${endPrefix}`
3873
4450
  shouldContinueAfterError: this.shouldContinueAfterError,
3874
4451
  defaultGadgetTimeoutMs: this.defaultGadgetTimeoutMs,
3875
4452
  gadgetOutputLimit: this.gadgetOutputLimit,
3876
- gadgetOutputLimitPercent: this.gadgetOutputLimitPercent
4453
+ gadgetOutputLimitPercent: this.gadgetOutputLimitPercent,
4454
+ compactionConfig: this.compactionConfig
3877
4455
  };
3878
4456
  return new Agent(AGENT_INTERNAL_KEY, options);
3879
4457
  }
@@ -5858,19 +6436,44 @@ var init_client = __esm({
5858
6436
  var testing_exports = {};
5859
6437
  __export(testing_exports, {
5860
6438
  MockBuilder: () => MockBuilder,
6439
+ MockConversationManager: () => MockConversationManager,
5861
6440
  MockGadgetBuilder: () => MockGadgetBuilder,
5862
6441
  MockManager: () => MockManager,
6442
+ MockPromptRecorder: () => MockPromptRecorder,
5863
6443
  MockProviderAdapter: () => MockProviderAdapter,
6444
+ collectOutput: () => collectOutput,
6445
+ collectStream: () => collectStream,
6446
+ collectStreamText: () => collectStreamText,
6447
+ createAssistantMessage: () => createAssistantMessage,
6448
+ createConversation: () => createConversation,
6449
+ createConversationWithGadgets: () => createConversationWithGadgets,
6450
+ createEmptyStream: () => createEmptyStream,
6451
+ createErrorStream: () => createErrorStream,
6452
+ createLargeConversation: () => createLargeConversation,
6453
+ createMinimalConversation: () => createMinimalConversation,
5864
6454
  createMockAdapter: () => createMockAdapter,
5865
6455
  createMockClient: () => createMockClient,
6456
+ createMockConversationManager: () => createMockConversationManager,
5866
6457
  createMockGadget: () => createMockGadget,
6458
+ createMockPrompt: () => createMockPrompt,
6459
+ createMockReadable: () => createMockReadable,
5867
6460
  createMockStream: () => createMockStream,
6461
+ createMockWritable: () => createMockWritable,
6462
+ createSystemMessage: () => createSystemMessage,
6463
+ createTestEnvironment: () => createTestEnvironment,
6464
+ createTestStream: () => createTestStream,
5868
6465
  createTextMockStream: () => createTextMockStream,
6466
+ createTextStream: () => createTextStream,
6467
+ createUserMessage: () => createUserMessage,
6468
+ estimateTokens: () => estimateTokens,
6469
+ getBufferedOutput: () => getBufferedOutput,
5869
6470
  getMockManager: () => getMockManager,
6471
+ getStreamFinalChunk: () => getStreamFinalChunk,
5870
6472
  mockGadget: () => mockGadget,
5871
6473
  mockLLM: () => mockLLM,
5872
6474
  testGadget: () => testGadget,
5873
- testGadgetBatch: () => testGadgetBatch
6475
+ testGadgetBatch: () => testGadgetBatch,
6476
+ waitFor: () => waitFor
5874
6477
  });
5875
6478
  module.exports = __toCommonJS(testing_exports);
5876
6479
 
@@ -6779,21 +7382,477 @@ var MockGadgetBuilder = class {
6779
7382
  function mockGadget() {
6780
7383
  return new MockGadgetBuilder();
6781
7384
  }
7385
+
7386
+ // src/testing/stream-helpers.ts
7387
+ function createTestStream(chunks) {
7388
+ return async function* () {
7389
+ for (const chunk of chunks) {
7390
+ yield chunk;
7391
+ }
7392
+ }();
7393
+ }
7394
+ function createTextStream(text, options) {
7395
+ return async function* () {
7396
+ if (options?.delayMs) {
7397
+ await sleep2(options.delayMs);
7398
+ }
7399
+ const chunkSize = options?.chunkSize ?? text.length;
7400
+ const chunks = [];
7401
+ for (let i = 0; i < text.length; i += chunkSize) {
7402
+ chunks.push(text.slice(i, i + chunkSize));
7403
+ }
7404
+ for (let i = 0; i < chunks.length; i++) {
7405
+ const isLast = i === chunks.length - 1;
7406
+ const chunk = { text: chunks[i] };
7407
+ if (isLast) {
7408
+ chunk.finishReason = options?.finishReason ?? "stop";
7409
+ const inputTokens = Math.ceil(text.length / 4);
7410
+ const outputTokens = Math.ceil(text.length / 4);
7411
+ chunk.usage = options?.usage ?? {
7412
+ inputTokens,
7413
+ outputTokens,
7414
+ totalTokens: inputTokens + outputTokens
7415
+ };
7416
+ }
7417
+ yield chunk;
7418
+ if (options?.chunkDelayMs && !isLast) {
7419
+ await sleep2(options.chunkDelayMs);
7420
+ }
7421
+ }
7422
+ }();
7423
+ }
7424
+ async function collectStream(stream2) {
7425
+ const chunks = [];
7426
+ for await (const chunk of stream2) {
7427
+ chunks.push(chunk);
7428
+ }
7429
+ return chunks;
7430
+ }
7431
+ async function collectStreamText(stream2) {
7432
+ let text = "";
7433
+ for await (const chunk of stream2) {
7434
+ text += chunk.text ?? "";
7435
+ }
7436
+ return text;
7437
+ }
7438
+ async function getStreamFinalChunk(stream2) {
7439
+ let lastChunk;
7440
+ for await (const chunk of stream2) {
7441
+ lastChunk = chunk;
7442
+ }
7443
+ return lastChunk;
7444
+ }
7445
+ function createEmptyStream() {
7446
+ return async function* () {
7447
+ }();
7448
+ }
7449
+ function createErrorStream(chunksBeforeError, error) {
7450
+ return async function* () {
7451
+ for (const chunk of chunksBeforeError) {
7452
+ yield chunk;
7453
+ }
7454
+ throw error;
7455
+ }();
7456
+ }
7457
+ function sleep2(ms) {
7458
+ return new Promise((resolve) => setTimeout(resolve, ms));
7459
+ }
7460
+
7461
+ // src/testing/conversation-fixtures.ts
7462
+ function createConversation(turnCount, options) {
7463
+ const messages = [];
7464
+ const userPrefix = options?.userPrefix ?? "User message";
7465
+ const assistantPrefix = options?.assistantPrefix ?? "Assistant response";
7466
+ const contentLength = options?.contentLength ?? 100;
7467
+ for (let i = 0; i < turnCount; i++) {
7468
+ const padding = " ".repeat(Math.max(0, contentLength - 30));
7469
+ messages.push({
7470
+ role: "user",
7471
+ content: `${userPrefix} ${i + 1}: This is turn ${i + 1} of the conversation.${padding}`
7472
+ });
7473
+ messages.push({
7474
+ role: "assistant",
7475
+ content: `${assistantPrefix} ${i + 1}: I acknowledge turn ${i + 1}.${padding}`
7476
+ });
7477
+ }
7478
+ return messages;
7479
+ }
7480
+ function createConversationWithGadgets(turnCount, gadgetCallsPerTurn = 1, options) {
7481
+ const messages = [];
7482
+ const gadgetNames = options?.gadgetNames ?? ["search", "calculate", "read"];
7483
+ const contentLength = options?.contentLength ?? 50;
7484
+ let gadgetIndex = 0;
7485
+ for (let turn = 0; turn < turnCount; turn++) {
7486
+ messages.push({
7487
+ role: "user",
7488
+ content: `User request ${turn + 1}${"x".repeat(contentLength)}`
7489
+ });
7490
+ for (let g = 0; g < gadgetCallsPerTurn; g++) {
7491
+ const gadgetName = gadgetNames[gadgetIndex % gadgetNames.length];
7492
+ gadgetIndex++;
7493
+ messages.push({
7494
+ role: "assistant",
7495
+ content: `!!!GADGET_START:${gadgetName}
7496
+ !!!ARG:query
7497
+ test query ${turn}-${g}
7498
+ !!!GADGET_END`
7499
+ });
7500
+ messages.push({
7501
+ role: "user",
7502
+ content: `Result: Gadget ${gadgetName} returned result for query ${turn}-${g}`
7503
+ });
7504
+ }
7505
+ messages.push({
7506
+ role: "assistant",
7507
+ content: `Final response for turn ${turn + 1}${"y".repeat(contentLength)}`
7508
+ });
7509
+ }
7510
+ return messages;
7511
+ }
7512
+ function estimateTokens(messages) {
7513
+ return Math.ceil(
7514
+ messages.reduce((sum, msg) => sum + (msg.content?.length ?? 0), 0) / 4
7515
+ );
7516
+ }
7517
+ function createUserMessage(content) {
7518
+ return { role: "user", content };
7519
+ }
7520
+ function createAssistantMessage(content) {
7521
+ return { role: "assistant", content };
7522
+ }
7523
+ function createSystemMessage(content) {
7524
+ return { role: "system", content };
7525
+ }
7526
+ function createMinimalConversation() {
7527
+ return [
7528
+ { role: "user", content: "Hello" },
7529
+ { role: "assistant", content: "Hi there!" }
7530
+ ];
7531
+ }
7532
+ function createLargeConversation(targetTokens, options) {
7533
+ const tokensPerTurn = options?.tokensPerTurn ?? 200;
7534
+ const turnsNeeded = Math.ceil(targetTokens / tokensPerTurn);
7535
+ const charsPerMessage = Math.floor(tokensPerTurn * 4 / 2);
7536
+ return createConversation(turnsNeeded, {
7537
+ contentLength: charsPerMessage
7538
+ });
7539
+ }
7540
+
7541
+ // src/testing/mock-conversation.ts
7542
+ var MockConversationManager = class {
7543
+ history;
7544
+ baseMessages;
7545
+ replacementHistory;
7546
+ replaceHistoryCallCount = 0;
7547
+ addedMessages = [];
7548
+ constructor(history = [], baseMessages = []) {
7549
+ this.history = [...history];
7550
+ this.baseMessages = [...baseMessages];
7551
+ }
7552
+ addUserMessage(content) {
7553
+ const msg = { role: "user", content };
7554
+ this.history.push(msg);
7555
+ this.addedMessages.push(msg);
7556
+ }
7557
+ addAssistantMessage(content) {
7558
+ const msg = { role: "assistant", content };
7559
+ this.history.push(msg);
7560
+ this.addedMessages.push(msg);
7561
+ }
7562
+ addGadgetCall(gadgetName, parameters, result) {
7563
+ const assistantMsg = {
7564
+ role: "assistant",
7565
+ content: `!!!GADGET_START:${gadgetName}
7566
+ ${JSON.stringify(parameters)}
7567
+ !!!GADGET_END`
7568
+ };
7569
+ const resultMsg = {
7570
+ role: "user",
7571
+ content: `Result: ${result}`
7572
+ };
7573
+ this.history.push(assistantMsg);
7574
+ this.history.push(resultMsg);
7575
+ this.addedMessages.push(assistantMsg);
7576
+ this.addedMessages.push(resultMsg);
7577
+ }
7578
+ getMessages() {
7579
+ return [...this.baseMessages, ...this.history];
7580
+ }
7581
+ getHistoryMessages() {
7582
+ return [...this.history];
7583
+ }
7584
+ getBaseMessages() {
7585
+ return [...this.baseMessages];
7586
+ }
7587
+ replaceHistory(newHistory) {
7588
+ this.replacementHistory = [...newHistory];
7589
+ this.history = [...newHistory];
7590
+ this.replaceHistoryCallCount++;
7591
+ }
7592
+ // ============================================
7593
+ // Test Helper Methods
7594
+ // ============================================
7595
+ /**
7596
+ * Check if replaceHistory was called.
7597
+ */
7598
+ wasReplaceHistoryCalled() {
7599
+ return this.replaceHistoryCallCount > 0;
7600
+ }
7601
+ /**
7602
+ * Get the number of times replaceHistory was called.
7603
+ */
7604
+ getReplaceHistoryCallCount() {
7605
+ return this.replaceHistoryCallCount;
7606
+ }
7607
+ /**
7608
+ * Get the most recent history passed to replaceHistory.
7609
+ * Returns undefined if replaceHistory was never called.
7610
+ */
7611
+ getReplacementHistory() {
7612
+ return this.replacementHistory;
7613
+ }
7614
+ /**
7615
+ * Get all messages that were added via add* methods.
7616
+ */
7617
+ getAddedMessages() {
7618
+ return [...this.addedMessages];
7619
+ }
7620
+ /**
7621
+ * Reset all tracking state while preserving the conversation.
7622
+ */
7623
+ resetTracking() {
7624
+ this.replacementHistory = void 0;
7625
+ this.replaceHistoryCallCount = 0;
7626
+ this.addedMessages = [];
7627
+ }
7628
+ /**
7629
+ * Completely reset the mock to initial state.
7630
+ * Note: baseMessages cannot be changed after construction.
7631
+ */
7632
+ reset(history = []) {
7633
+ this.history = [...history];
7634
+ this.resetTracking();
7635
+ }
7636
+ /**
7637
+ * Set the history directly (for test setup).
7638
+ */
7639
+ setHistory(messages) {
7640
+ this.history = [...messages];
7641
+ }
7642
+ /**
7643
+ * Get the current history length.
7644
+ */
7645
+ getHistoryLength() {
7646
+ return this.history.length;
7647
+ }
7648
+ /**
7649
+ * Get total message count (base + history).
7650
+ */
7651
+ getTotalMessageCount() {
7652
+ return this.baseMessages.length + this.history.length;
7653
+ }
7654
+ };
7655
+ function createMockConversationManager(turnCount, baseMessages = []) {
7656
+ const history = [];
7657
+ for (let i = 0; i < turnCount; i++) {
7658
+ history.push({
7659
+ role: "user",
7660
+ content: `User message ${i + 1}: This is turn ${i + 1} of the conversation.`
7661
+ });
7662
+ history.push({
7663
+ role: "assistant",
7664
+ content: `Assistant response ${i + 1}: I acknowledge turn ${i + 1}.`
7665
+ });
7666
+ }
7667
+ return new MockConversationManager(history, baseMessages);
7668
+ }
7669
+
7670
+ // src/testing/cli-helpers.ts
7671
+ var import_node_stream = require("stream");
7672
+ function createTestEnvironment(options = {}) {
7673
+ const stdin = createMockReadable(options.stdin);
7674
+ const stdout = new import_node_stream.PassThrough();
7675
+ const stderr = new import_node_stream.PassThrough();
7676
+ let exitCode;
7677
+ return {
7678
+ stdin,
7679
+ stdout,
7680
+ stderr,
7681
+ isTTY: options.isTTY ?? false,
7682
+ argv: options.argv ?? ["node", "llmist"],
7683
+ env: { ...filterDefinedEnv(process.env), ...options.env },
7684
+ get exitCode() {
7685
+ return exitCode;
7686
+ },
7687
+ setExitCode: (code) => {
7688
+ exitCode = code;
7689
+ }
7690
+ };
7691
+ }
7692
+ function createMockReadable(input) {
7693
+ if (!input) {
7694
+ const stream3 = new import_node_stream.Readable({ read() {
7695
+ } });
7696
+ stream3.push(null);
7697
+ return stream3;
7698
+ }
7699
+ const content = Array.isArray(input) ? `${input.join("\n")}
7700
+ ` : input;
7701
+ const stream2 = new import_node_stream.Readable({ read() {
7702
+ } });
7703
+ stream2.push(content);
7704
+ stream2.push(null);
7705
+ return stream2;
7706
+ }
7707
+ function createMockWritable() {
7708
+ const chunks = [];
7709
+ const stream2 = new import_node_stream.Writable({
7710
+ write(chunk, _encoding, callback) {
7711
+ chunks.push(Buffer.from(chunk));
7712
+ callback();
7713
+ }
7714
+ });
7715
+ stream2.getData = () => Buffer.concat(chunks).toString("utf8");
7716
+ return stream2;
7717
+ }
7718
+ async function collectOutput(stream2, timeout = 5e3) {
7719
+ return new Promise((resolve, reject) => {
7720
+ const chunks = [];
7721
+ const timeoutId = setTimeout(() => {
7722
+ resolve(Buffer.concat(chunks).toString("utf8"));
7723
+ }, timeout);
7724
+ stream2.on("data", (chunk) => {
7725
+ chunks.push(Buffer.from(chunk));
7726
+ });
7727
+ stream2.on("end", () => {
7728
+ clearTimeout(timeoutId);
7729
+ resolve(Buffer.concat(chunks).toString("utf8"));
7730
+ });
7731
+ stream2.on("error", (err) => {
7732
+ clearTimeout(timeoutId);
7733
+ reject(err);
7734
+ });
7735
+ });
7736
+ }
7737
+ function getBufferedOutput(stream2) {
7738
+ const chunks = [];
7739
+ for (; ; ) {
7740
+ const chunk = stream2.read();
7741
+ if (chunk === null) break;
7742
+ chunks.push(chunk);
7743
+ }
7744
+ return Buffer.concat(chunks).toString("utf8");
7745
+ }
7746
+ function createMockPrompt(responses) {
7747
+ let index = 0;
7748
+ return async (_question) => {
7749
+ if (index >= responses.length) {
7750
+ throw new Error(`Mock prompt exhausted: no response for question ${index + 1}`);
7751
+ }
7752
+ return responses[index++];
7753
+ };
7754
+ }
7755
+ var MockPromptRecorder = class {
7756
+ responses;
7757
+ index = 0;
7758
+ questions = [];
7759
+ constructor(responses) {
7760
+ this.responses = responses;
7761
+ }
7762
+ /**
7763
+ * The prompt function to use in tests.
7764
+ */
7765
+ prompt = async (question) => {
7766
+ this.questions.push(question);
7767
+ if (this.index >= this.responses.length) {
7768
+ throw new Error(`Mock prompt exhausted after ${this.index} questions`);
7769
+ }
7770
+ return this.responses[this.index++];
7771
+ };
7772
+ /**
7773
+ * Get all questions that were asked.
7774
+ */
7775
+ getQuestions() {
7776
+ return [...this.questions];
7777
+ }
7778
+ /**
7779
+ * Get the number of questions asked.
7780
+ */
7781
+ getQuestionCount() {
7782
+ return this.questions.length;
7783
+ }
7784
+ /**
7785
+ * Reset the recorder state.
7786
+ */
7787
+ reset(newResponses) {
7788
+ this.index = 0;
7789
+ this.questions = [];
7790
+ if (newResponses) {
7791
+ this.responses = newResponses;
7792
+ }
7793
+ }
7794
+ };
7795
+ async function waitFor(condition, timeout = 5e3, interval = 50) {
7796
+ const startTime = Date.now();
7797
+ while (!condition()) {
7798
+ if (Date.now() - startTime > timeout) {
7799
+ throw new Error(`waitFor timed out after ${timeout}ms`);
7800
+ }
7801
+ await sleep3(interval);
7802
+ }
7803
+ }
7804
+ function sleep3(ms) {
7805
+ return new Promise((resolve) => setTimeout(resolve, ms));
7806
+ }
7807
+ function filterDefinedEnv(env) {
7808
+ const result = {};
7809
+ for (const [key, value] of Object.entries(env)) {
7810
+ if (value !== void 0) {
7811
+ result[key] = value;
7812
+ }
7813
+ }
7814
+ return result;
7815
+ }
6782
7816
  // Annotate the CommonJS export names for ESM import in node:
6783
7817
  0 && (module.exports = {
6784
7818
  MockBuilder,
7819
+ MockConversationManager,
6785
7820
  MockGadgetBuilder,
6786
7821
  MockManager,
7822
+ MockPromptRecorder,
6787
7823
  MockProviderAdapter,
7824
+ collectOutput,
7825
+ collectStream,
7826
+ collectStreamText,
7827
+ createAssistantMessage,
7828
+ createConversation,
7829
+ createConversationWithGadgets,
7830
+ createEmptyStream,
7831
+ createErrorStream,
7832
+ createLargeConversation,
7833
+ createMinimalConversation,
6788
7834
  createMockAdapter,
6789
7835
  createMockClient,
7836
+ createMockConversationManager,
6790
7837
  createMockGadget,
7838
+ createMockPrompt,
7839
+ createMockReadable,
6791
7840
  createMockStream,
7841
+ createMockWritable,
7842
+ createSystemMessage,
7843
+ createTestEnvironment,
7844
+ createTestStream,
6792
7845
  createTextMockStream,
7846
+ createTextStream,
7847
+ createUserMessage,
7848
+ estimateTokens,
7849
+ getBufferedOutput,
6793
7850
  getMockManager,
7851
+ getStreamFinalChunk,
6794
7852
  mockGadget,
6795
7853
  mockLLM,
6796
7854
  testGadget,
6797
- testGadgetBatch
7855
+ testGadgetBatch,
7856
+ waitFor
6798
7857
  });
6799
7858
  //# sourceMappingURL=index.cjs.map