@elizaos/agent 2.0.0-alpha.156 → 2.0.0-alpha.161

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (42) hide show
  1. package/package.json +5 -5
  2. package/packages/agent/src/config/types.eliza.d.ts +2 -0
  3. package/packages/agent/src/config/types.eliza.d.ts.map +1 -1
  4. package/packages/typescript/src/features/autonomy/execution-facade.d.ts.map +1 -1
  5. package/packages/typescript/src/features/autonomy/execution-facade.js +27 -9
  6. package/packages/typescript/src/optimization/ab-analysis.d.ts +3 -0
  7. package/packages/typescript/src/optimization/ab-analysis.d.ts.map +1 -0
  8. package/packages/typescript/src/optimization/ab-analysis.js +8 -0
  9. package/packages/typescript/src/optimization-root-dir.d.ts +3 -0
  10. package/packages/typescript/src/optimization-root-dir.d.ts.map +1 -0
  11. package/packages/typescript/src/optimization-root-dir.js +9 -0
  12. package/packages/typescript/src/runtime.d.ts +41 -3
  13. package/packages/typescript/src/runtime.d.ts.map +1 -1
  14. package/packages/typescript/src/runtime.js +700 -30
  15. package/packages/typescript/src/services/message.d.ts +1 -0
  16. package/packages/typescript/src/services/message.d.ts.map +1 -1
  17. package/packages/typescript/src/services/message.js +328 -222
  18. package/packages/typescript/src/streaming-context.d.ts +9 -0
  19. package/packages/typescript/src/streaming-context.d.ts.map +1 -1
  20. package/packages/typescript/src/streaming-context.js +45 -0
  21. package/packages/typescript/src/trajectory-context.d.ts +6 -0
  22. package/packages/typescript/src/trajectory-context.d.ts.map +1 -1
  23. package/packages/typescript/src/types/events.d.ts +18 -1
  24. package/packages/typescript/src/types/events.d.ts.map +1 -1
  25. package/packages/typescript/src/types/events.js +2 -0
  26. package/packages/typescript/src/types/index.d.ts +4 -0
  27. package/packages/typescript/src/types/index.d.ts.map +1 -1
  28. package/packages/typescript/src/types/index.js +4 -0
  29. package/packages/typescript/src/types/pipeline-hooks.d.ts +234 -0
  30. package/packages/typescript/src/types/pipeline-hooks.d.ts.map +1 -0
  31. package/packages/typescript/src/types/pipeline-hooks.js +111 -0
  32. package/packages/typescript/src/types/prompt-optimization-hooks.d.ts +41 -0
  33. package/packages/typescript/src/types/prompt-optimization-hooks.d.ts.map +1 -0
  34. package/packages/typescript/src/types/prompt-optimization-hooks.js +1 -0
  35. package/packages/typescript/src/types/prompt-optimization-score-card.d.ts +22 -0
  36. package/packages/typescript/src/types/prompt-optimization-score-card.d.ts.map +1 -0
  37. package/packages/typescript/src/types/prompt-optimization-score-card.js +72 -0
  38. package/packages/typescript/src/types/prompt-optimization-trace.d.ts +53 -0
  39. package/packages/typescript/src/types/prompt-optimization-trace.d.ts.map +1 -0
  40. package/packages/typescript/src/types/prompt-optimization-trace.js +19 -0
  41. package/packages/typescript/src/types/runtime.d.ts +40 -1
  42. package/packages/typescript/src/types/runtime.d.ts.map +1 -1
@@ -2,19 +2,23 @@ import { v4 as uuidv4 } from "uuid";
2
2
  import Handlebars from "handlebars";
3
3
  import { withCanonicalActionDocs, withCanonicalEvaluatorDocs, } from "./action-docs";
4
4
  import { parseActionParams, validateActionParams } from "./actions";
5
+ import { createBasicCapabilitiesPlugin, } from "./features/basic-capabilities/index";
5
6
  import { ensureConnection as ensureConnectionStandalone } from "./connection";
6
7
  import { InMemoryDatabaseAdapter } from "./database/inMemoryAdapter";
7
- import { createBasicCapabilitiesPlugin, } from "./features/basic-capabilities/index";
8
8
  import { createLogger } from "./logger";
9
+ import { simpleHash } from "./optimization/ab-analysis";
10
+ import { getOptimizationRootDir } from "./optimization-root-dir";
9
11
  import { installRuntimePluginLifecycle } from "./plugin-lifecycle";
10
12
  import { getNativeRuntimeFeaturePlugin, nativeRuntimeFeatureDefaults, nativeRuntimeFeaturePluginNames, resolveNativeRuntimeFeatureFromPluginName, } from "./plugins/native-features";
11
13
  import { BM25 } from "./search";
12
14
  import { redactWithSecrets } from "./security/redact.js";
13
15
  import { DefaultMessageService } from "./services/message";
14
16
  import { decryptSecret, getSalt } from "./settings";
15
- import { getStreamingContext, runWithStreamingContext, } from "./streaming-context";
17
+ import { getStreamingContext, runInsideModelStreamChunkDelivery, runWithStreamingContext, } from "./streaming-context";
16
18
  import { getTrajectoryContext, setTrajectoryPurpose, } from "./trajectory-context";
17
19
  import { ChannelType, EventType, getModelFallbackChain, ModelType, } from "./types";
20
+ import { afterMemoryPersistedPipelineHookContext, modelStreamChunkPipelineHookContext, modelStreamEndPipelineHookContext, outgoingPipelineHookContext, PIPELINE_HOOK_DEBUG_LOG_MS, PIPELINE_HOOK_ERROR_LOG_MS, PIPELINE_HOOK_WARN_MS, pipelineHookMetricRoomId, postModelPipelineHookContext, preModelPipelineHookContext, resolvePipelineHookSpec, sortPipelineHooksByPosition, } from "./types/pipeline-hooks";
21
+ import { ScoreCard } from "./types/prompt-optimization-score-card";
18
22
  import { parseJSONObjectFromText, parseKeyValueXml, stringToUuid, } from "./utils";
19
23
  import { parseBooleanValue } from "./utils/boolean";
20
24
  import { BufferUtils } from "./utils/buffer";
@@ -56,6 +60,12 @@ const TOON_HEADER_PATTERN = /^TOON(?:\s+DOCUMENT)?[:\s-]*$/i;
56
60
  const TOON_FIELD_PATTERN = /^[A-Za-z_][A-Za-z0-9_.-]*(?:\[[^\]\n]*\])?(?:\{[^\n]*\})?:/m;
57
61
  const XML_LIKE_PATTERN = /<[/!?A-Za-z_][^>\n]*>/;
58
62
  const JSON_OBJECT_KEY_PATTERN = /(?:["'][^"'\n]+["']|[A-Za-z_][A-Za-z0-9_-]*)\s*:/;
63
+ function coerceOutgoingMessageText(text) {
64
+ if (text === null || text === undefined) {
65
+ return "";
66
+ }
67
+ return String(text);
68
+ }
59
69
  function resolveDynamicPromptModelType(modelType, modelSize) {
60
70
  if (modelType) {
61
71
  return modelType;
@@ -106,6 +116,17 @@ export class AgentRuntime {
106
116
  taskWorkers = new Map();
107
117
  sendHandlers = new Map();
108
118
  eventHandlers = new Map();
119
+ /**
120
+ * In-flight execution traces keyed by trace.id (unique uuid).
121
+ * A single run can produce multiple DPE calls; each gets its own trace.
122
+ * `runToTraces` maps runId -> set of trace ids for enrichment lookup.
123
+ */
124
+ activeTraces = new Map();
125
+ runToTraces = new Map();
126
+ /** Optional DPE-side prompt optimization I/O (merge, registry, baseline/failure traces). */
127
+ promptOptimizationHooks = null;
128
+ pipelineHookEntries = [];
129
+ pipelineHookIdToIndex = new Map();
109
130
  // A map of all plugins available to the runtime, keyed by name, for dependency resolution.
110
131
  allAvailablePlugins = new Map();
111
132
  // The initial list of plugins specified by the character configuration.
@@ -352,6 +373,245 @@ export class AgentRuntime {
352
373
  isTrajectoriesEnabled() {
353
374
  return this.hasNativeRuntimeFeature("trajectories");
354
375
  }
376
+ hooksForPhase(phase) {
377
+ return this.pipelineHookEntries.filter((e) => e.phase === phase);
378
+ }
379
+ upsertPipelineHook(entry) {
380
+ const existing = this.pipelineHookIdToIndex.get(entry.id);
381
+ if (existing !== undefined) {
382
+ this.pipelineHookEntries[existing] = entry;
383
+ return;
384
+ }
385
+ this.pipelineHookIdToIndex.set(entry.id, this.pipelineHookEntries.length);
386
+ this.pipelineHookEntries.push(entry);
387
+ }
388
+ async invokePipelineHooks(phase, ctx, logLabel, pipelineHookTelemetry = true) {
389
+ const hooks = sortPipelineHooksByPosition(this.hooksForPhase(phase));
390
+ if (!hooks.length) {
391
+ return;
392
+ }
393
+ const runtime = this;
394
+ const roomId = pipelineHookMetricRoomId(ctx);
395
+ const runOne = async (entry) => {
396
+ const t0 = performance.now();
397
+ let errorMessage;
398
+ try {
399
+ await entry.handler(runtime, ctx);
400
+ }
401
+ catch (error) {
402
+ errorMessage = error instanceof Error ? error.message : String(error);
403
+ this.logger.error({
404
+ src: "agent",
405
+ agentId: this.agentId,
406
+ hookId: entry.id,
407
+ phase: entry.phase,
408
+ error: errorMessage,
409
+ }, `${logLabel} threw; continuing`);
410
+ }
411
+ {
412
+ const durationMs = Math.round(performance.now() - t0);
413
+ if (!pipelineHookTelemetry) {
414
+ const baseLite = {
415
+ src: "pipeline_hook",
416
+ agentId: this.agentId,
417
+ hookId: entry.id,
418
+ phase,
419
+ roomId,
420
+ durationMs,
421
+ };
422
+ if (durationMs >= PIPELINE_HOOK_WARN_MS) {
423
+ this.logger.warn(baseLite, `PIPELINE HOOK SLOW (${durationMs}ms): ${entry.id} phase=${phase}`);
424
+ }
425
+ if (durationMs >= PIPELINE_HOOK_ERROR_LOG_MS) {
426
+ this.logger.error(baseLite, `PIPELINE HOOK VERY SLOW (${durationMs}ms): ${entry.id} phase=${phase}`);
427
+ }
428
+ }
429
+ else {
430
+ const slow = durationMs >= PIPELINE_HOOK_WARN_MS;
431
+ const baseFields = {
432
+ src: "pipeline_hook",
433
+ agentId: this.agentId,
434
+ hookId: entry.id,
435
+ phase,
436
+ roomId,
437
+ durationMs,
438
+ };
439
+ if (durationMs >= PIPELINE_HOOK_DEBUG_LOG_MS) {
440
+ this.logger.debug(baseFields, "Pipeline hook timing");
441
+ }
442
+ if (slow) {
443
+ this.logger.warn(baseFields, `PIPELINE HOOK SLOW (${durationMs}ms): ${entry.id} phase=${phase}`);
444
+ }
445
+ if (durationMs >= PIPELINE_HOOK_ERROR_LOG_MS) {
446
+ this.logger.error(baseFields, `PIPELINE HOOK VERY SLOW (${durationMs}ms): ${entry.id} phase=${phase}`);
447
+ }
448
+ try {
449
+ await this.emitEvent(EventType.PIPELINE_HOOK_METRIC, {
450
+ phase,
451
+ hookId: entry.id,
452
+ durationMs,
453
+ roomId,
454
+ slow,
455
+ ...(errorMessage !== undefined ? { error: errorMessage } : {}),
456
+ });
457
+ }
458
+ catch (metricError) {
459
+ this.logger.debug({
460
+ src: "pipeline_hook",
461
+ agentId: this.agentId,
462
+ hookId: entry.id,
463
+ phase,
464
+ error: metricError instanceof Error
465
+ ? metricError.message
466
+ : String(metricError),
467
+ }, "PIPELINE_HOOK_METRIC listener failed");
468
+ }
469
+ }
470
+ }
471
+ };
472
+ if (phase === "parallel_with_should_respond" ||
473
+ phase === "model_stream_chunk") {
474
+ await Promise.all(hooks.map((h) => runOne(h)));
475
+ return;
476
+ }
477
+ const mutators = hooks.filter((h) => h.mutatesPrimary);
478
+ const serialReaders = hooks.filter((h) => !h.mutatesPrimary && h.schedule === "serial");
479
+ const concurrentReaders = hooks.filter((h) => !h.mutatesPrimary && h.schedule === "concurrent");
480
+ for (const h of mutators) {
481
+ await runOne(h);
482
+ }
483
+ for (const h of serialReaders) {
484
+ await runOne(h);
485
+ }
486
+ await Promise.all(concurrentReaders.map((h) => runOne(h)));
487
+ }
488
+ registerPipelineHook(spec) {
489
+ this.upsertPipelineHook(resolvePipelineHookSpec(spec));
490
+ }
491
+ unregisterPipelineHook(id) {
492
+ const idx = this.pipelineHookIdToIndex.get(id);
493
+ if (idx === undefined) {
494
+ return;
495
+ }
496
+ this.pipelineHookEntries.splice(idx, 1);
497
+ this.pipelineHookIdToIndex.clear();
498
+ for (let i = 0; i < this.pipelineHookEntries.length; i++) {
499
+ const e = this.pipelineHookEntries[i];
500
+ this.pipelineHookIdToIndex.set(e.id, i);
501
+ }
502
+ }
503
+ /**
504
+ * Run pipeline hooks for a phase (skip metadata, ordering, and outgoing redact).
505
+ * @param pipelineHookTelemetry When false, skips debug logs / `PIPELINE_HOOK_METRIC` per hook
506
+ * (still logs warn/error for slow hooks). Defaults to false for `model_stream_chunk` only.
507
+ */
508
+ async applyPipelineHooks(phase, ctx, pipelineHookTelemetry) {
509
+ if (ctx.phase !== phase) {
510
+ throw new Error(`applyPipelineHooks: phase mismatch (expected ${phase}, ctx.phase=${ctx.phase})`);
511
+ }
512
+ const hookTelemetry = pipelineHookTelemetry !== undefined
513
+ ? pipelineHookTelemetry
514
+ : phase !== "model_stream_chunk";
515
+ const hasHooks = this.hooksForPhase(phase).length > 0;
516
+ switch (phase) {
517
+ case "incoming_before_compose": {
518
+ if (!hasHooks) {
519
+ return;
520
+ }
521
+ const c = ctx;
522
+ const md = c.message.content?.metadata;
523
+ const meta = typeof md === "object" && md !== null
524
+ ? md
525
+ : null;
526
+ if (meta?.skipIncomingMessageHooks === true) {
527
+ return;
528
+ }
529
+ const messageId = c.message.id;
530
+ await this.invokePipelineHooks(phase, c, "Incoming pipeline hook", hookTelemetry);
531
+ if (messageId) {
532
+ this.stateCache.delete(messageId);
533
+ this.stateCache.delete(`${messageId}_action_results`);
534
+ }
535
+ return;
536
+ }
537
+ case "pre_should_respond": {
538
+ if (!hasHooks) {
539
+ return;
540
+ }
541
+ const c = ctx;
542
+ const md = c.message.content?.metadata;
543
+ const meta = typeof md === "object" && md !== null
544
+ ? md
545
+ : null;
546
+ if (meta?.skipPreShouldRespondHooks === true) {
547
+ return;
548
+ }
549
+ await this.invokePipelineHooks(phase, c, "Pre-should-respond pipeline hook", hookTelemetry);
550
+ return;
551
+ }
552
+ case "parallel_with_should_respond": {
553
+ if (!hasHooks) {
554
+ return;
555
+ }
556
+ const c = ctx;
557
+ const md = c.message.content?.metadata;
558
+ const meta = typeof md === "object" && md !== null
559
+ ? md
560
+ : null;
561
+ if (meta?.skipParallelWithShouldRespondHooks === true) {
562
+ return;
563
+ }
564
+ await this.invokePipelineHooks(phase, c, "Parallel should-respond pipeline hook", hookTelemetry);
565
+ return;
566
+ }
567
+ case "outgoing_before_deliver": {
568
+ const c = ctx;
569
+ if (hasHooks) {
570
+ await this.invokePipelineHooks(phase, c, "Outgoing pipeline hook", hookTelemetry);
571
+ }
572
+ c.content.text = this.redactSecrets(coerceOutgoingMessageText(c.content.text));
573
+ return;
574
+ }
575
+ case "pre_model":
576
+ case "post_model": {
577
+ if (!hasHooks) {
578
+ return;
579
+ }
580
+ await this.invokePipelineHooks(phase, ctx, phase === "pre_model"
581
+ ? "Pre-model pipeline hook"
582
+ : "Post-model pipeline hook", hookTelemetry);
583
+ return;
584
+ }
585
+ case "after_memory_persisted": {
586
+ if (!hasHooks) {
587
+ return;
588
+ }
589
+ const c = ctx;
590
+ const md = c.memory.content?.metadata;
591
+ const meta = typeof md === "object" && md !== null
592
+ ? md
593
+ : null;
594
+ if (meta?.skipAfterMemoryPersistedHooks === true) {
595
+ return;
596
+ }
597
+ await this.invokePipelineHooks(phase, c, "After-memory-persisted pipeline hook", hookTelemetry);
598
+ return;
599
+ }
600
+ case "model_stream_chunk":
601
+ case "model_stream_end": {
602
+ if (!hasHooks) {
603
+ return;
604
+ }
605
+ await this.invokePipelineHooks(phase, ctx, phase === "model_stream_chunk"
606
+ ? "Model stream chunk pipeline hook"
607
+ : "Model stream end pipeline hook", hookTelemetry);
608
+ return;
609
+ }
610
+ default: {
611
+ throw new Error(`Unknown pipeline hook phase: ${String(phase)}`);
612
+ }
613
+ }
614
+ }
355
615
  async registerPlugin(plugin) {
356
616
  if (!plugin.name) {
357
617
  // Ensure plugin.name is defined
@@ -465,14 +725,13 @@ export class AgentRuntime {
465
725
  }
466
726
  if (pluginToRegister.routes) {
467
727
  for (const route of pluginToRegister.routes) {
728
+ // namespace plugin name infront of paths
468
729
  const routePath = route.path.startsWith("/")
469
730
  ? route.path
470
731
  : `/${route.path}`;
471
732
  this.routes.push({
472
733
  ...route,
473
- path: route.rawPath
474
- ? routePath
475
- : `/${pluginToRegister.name}${routePath}`,
734
+ path: `/${pluginToRegister.name}${routePath}`,
476
735
  });
477
736
  }
478
737
  }
@@ -1005,6 +1264,125 @@ export class AgentRuntime {
1005
1264
  // Default to true (check should respond is enabled)
1006
1265
  return true;
1007
1266
  }
1267
+ getOptimizationDir() {
1268
+ const setting = this.getSetting("OPTIMIZATION_DIR");
1269
+ return getOptimizationRootDir(typeof setting === "string" ? setting : null);
1270
+ }
1271
+ registerPromptOptimizationHooks(hooks) {
1272
+ this.promptOptimizationHooks = hooks;
1273
+ }
1274
+ getPromptOptimizationHooks() {
1275
+ return this.promptOptimizationHooks;
1276
+ }
1277
+ resolveProviderModelString(resolvedModelType, optionsModel, effectiveModelId) {
1278
+ if (effectiveModelId)
1279
+ return effectiveModelId;
1280
+ if (optionsModel)
1281
+ return optionsModel;
1282
+ const slotToSetting = {
1283
+ TEXT_NANO: "NANO_MODEL",
1284
+ TEXT_MINI: "MINI_MODEL",
1285
+ TEXT_SMALL: "SMALL_MODEL",
1286
+ TEXT_LARGE: "LARGE_MODEL",
1287
+ TEXT_MEGA: "MEGA_MODEL",
1288
+ RESPONSE_HANDLER: "RESPONSE_HANDLER_MODEL",
1289
+ ACTION_PLANNER: "ACTION_PLANNER_MODEL",
1290
+ REASONING_SMALL: "REASONING_SMALL_MODEL",
1291
+ REASONING_LARGE: "REASONING_LARGE_MODEL",
1292
+ TEXT_COMPLETION: "COMPLETION_MODEL",
1293
+ };
1294
+ const providerPrefixes = ["OLLAMA_", "OPENAI_", "ANTHROPIC_", ""];
1295
+ for (const candidate of getModelFallbackChain(resolvedModelType)) {
1296
+ const settingKey = slotToSetting[candidate];
1297
+ if (!settingKey)
1298
+ continue;
1299
+ for (const prefix of providerPrefixes) {
1300
+ const val = this.getSetting(`${prefix}${settingKey}`);
1301
+ if (typeof val === "string" && val)
1302
+ return val;
1303
+ }
1304
+ }
1305
+ return resolvedModelType;
1306
+ }
1307
+ enrichTrace(runId, signal) {
1308
+ const traceIds = this.runToTraces.get(runId);
1309
+ if (!traceIds)
1310
+ return;
1311
+ const targetTraceId = signal.traceId;
1312
+ for (const tid of traceIds) {
1313
+ if (targetTraceId && tid !== targetTraceId)
1314
+ continue;
1315
+ const trace = this.activeTraces.get(tid);
1316
+ if (!trace)
1317
+ continue;
1318
+ trace.scoreCard.signals.push(signal);
1319
+ const card = ScoreCard.fromJSON(trace.scoreCard);
1320
+ trace.scoreCard.compositeScore = card.composite();
1321
+ trace.enrichedAt = Date.now();
1322
+ }
1323
+ }
1324
+ getActiveTrace(runId) {
1325
+ const traceIds = this.runToTraces.get(runId);
1326
+ if (!traceIds)
1327
+ return undefined;
1328
+ let latest;
1329
+ for (const tid of traceIds) {
1330
+ const t = this.activeTraces.get(tid);
1331
+ if (t)
1332
+ latest = t;
1333
+ }
1334
+ return latest;
1335
+ }
1336
+ getActiveTracesForRun(runId) {
1337
+ const traceIds = this.runToTraces.get(runId);
1338
+ if (!traceIds)
1339
+ return [];
1340
+ const traces = [];
1341
+ for (const tid of traceIds) {
1342
+ const t = this.activeTraces.get(tid);
1343
+ if (t)
1344
+ traces.push(t);
1345
+ }
1346
+ return traces;
1347
+ }
1348
+ deleteActiveTrace(runId) {
1349
+ const traceIds = this.runToTraces.get(runId);
1350
+ if (traceIds) {
1351
+ for (const tid of traceIds) {
1352
+ this.activeTraces.delete(tid);
1353
+ }
1354
+ this.runToTraces.delete(runId);
1355
+ }
1356
+ }
1357
+ deleteActiveTraceById(traceId) {
1358
+ this.activeTraces.delete(traceId);
1359
+ for (const [rid, tids] of this.runToTraces) {
1360
+ if (tids.delete(traceId) && tids.size === 0) {
1361
+ this.runToTraces.delete(rid);
1362
+ }
1363
+ }
1364
+ }
1365
+ static ACTIVE_TRACE_TTL_MS = 5 * 60 * 1000;
1366
+ activeTraceTtlPurgeCounter = 0;
1367
+ purgeStaleActiveTraces() {
1368
+ const now = Date.now();
1369
+ const ttl = AgentRuntime.ACTIVE_TRACE_TTL_MS;
1370
+ for (const [id, t] of this.activeTraces) {
1371
+ if (now - t.createdAt <= ttl)
1372
+ continue;
1373
+ this.activeTraces.delete(id);
1374
+ for (const [rid, tids] of this.runToTraces) {
1375
+ tids.delete(id);
1376
+ if (tids.size === 0)
1377
+ this.runToTraces.delete(rid);
1378
+ }
1379
+ }
1380
+ }
1381
+ maybeRunActiveTraceTTLPurge() {
1382
+ if (++this.activeTraceTtlPurgeCounter % 100 !== 0)
1383
+ return;
1384
+ this.purgeStaleActiveTraces();
1385
+ }
1008
1386
  /**
1009
1387
  * Get the messaging adapter if available
1010
1388
  *
@@ -1415,10 +1793,34 @@ export class AgentRuntime {
1415
1793
  const textToStream = currentFilter.push(chunk);
1416
1794
  if (textToStream && onStreamChunk) {
1417
1795
  filteredAccumulated += textToStream;
1796
+ await this.applyPipelineHooks("model_stream_chunk", modelStreamChunkPipelineHookContext({
1797
+ source: "process_actions",
1798
+ chunk: textToStream,
1799
+ messageId: msgId,
1800
+ roomId: message.roomId,
1801
+ runId,
1802
+ responseId: responseMessageId,
1803
+ accumulated: filteredAccumulated,
1804
+ }));
1418
1805
  await onStreamChunk(textToStream, msgId, filteredAccumulated);
1419
1806
  }
1420
1807
  },
1421
1808
  onStreamEnd: () => {
1809
+ const textSnapshot = filteredAccumulated;
1810
+ void this.applyPipelineHooks("model_stream_end", modelStreamEndPipelineHookContext({
1811
+ source: "process_actions",
1812
+ roomId: message.roomId,
1813
+ runId,
1814
+ responseId: responseMessageId,
1815
+ messageId: responseMessageId,
1816
+ text: textSnapshot,
1817
+ })).catch((err) => {
1818
+ this.logger.debug({
1819
+ src: "agent",
1820
+ agentId: this.agentId,
1821
+ error: err instanceof Error ? err.message : String(err),
1822
+ }, "model_stream_end pipeline hook failed");
1823
+ });
1422
1824
  // Reset filter and local accumulator for next useModel call
1423
1825
  currentFilter = null;
1424
1826
  filteredAccumulated = "";
@@ -1558,10 +1960,13 @@ export class AgentRuntime {
1558
1960
  }
1559
1961
  if (callback) {
1560
1962
  for (const content of storedCallbackData) {
1561
- // Redact any secrets from callback content before sending
1562
- if (content.text) {
1563
- content.text = this.redactSecrets(content.text);
1564
- }
1963
+ await this.applyPipelineHooks("outgoing_before_deliver", outgoingPipelineHookContext(content, {
1964
+ source: "action",
1965
+ roomId: message.roomId,
1966
+ message,
1967
+ actionName: action.name,
1968
+ responseId: content.responseId,
1969
+ }));
1565
1970
  await callback(content);
1566
1971
  }
1567
1972
  }
@@ -2018,6 +2423,8 @@ export class AgentRuntime {
2018
2423
  }));
2019
2424
  if (trajectoryStepId && trajLogger) {
2020
2425
  const userText = typeof message.content?.text === "string" ? message.content.text : "";
2426
+ const trajCtx = getTrajectoryContext();
2427
+ const providerTraceId = this.getActiveTrace(this.getCurrentRunId())?.id;
2021
2428
  for (const r of providerData) {
2022
2429
  try {
2023
2430
  const textLen = typeof r.text === "string" ? r.text.length : 0;
@@ -2027,6 +2434,10 @@ export class AgentRuntime {
2027
2434
  data: { textLength: textLen },
2028
2435
  purpose: "compose_state",
2029
2436
  query: { message: userText.slice(0, 2000) },
2437
+ runId: trajCtx?.runId,
2438
+ roomId: trajCtx?.roomId,
2439
+ messageId: trajCtx?.messageId,
2440
+ executionTraceId: providerTraceId,
2030
2441
  });
2031
2442
  }
2032
2443
  catch {
@@ -2696,11 +3107,20 @@ export class AgentRuntime {
2696
3107
  paramsAsStreaming.stream = shouldStream;
2697
3108
  delete paramsAsStreaming.onStreamChunk;
2698
3109
  }
2699
- const response = await handler(this, modelParams);
3110
+ await this.invokePipelineHooks("pre_model", preModelPipelineHookContext({
3111
+ requestedModelType: String(modelType),
3112
+ resolvedModelKey,
3113
+ provider: resolvedModel?.provider ?? provider,
3114
+ roomId: getTrajectoryContext()?.roomId,
3115
+ params: modelParams,
3116
+ }), "Pre-model pipeline hook");
3117
+ const rawResponse = await handler(this, modelParams);
3118
+ const resultRef = { current: rawResponse };
3119
+ const modelOutToTrajectoryString = (v) => typeof v === "string" ? v : JSON.stringify(v);
2700
3120
  // Stream: broadcast to callbacks if streaming
2701
3121
  if (shouldStream &&
2702
3122
  (paramsChunk || ctxChunk) &&
2703
- isTextStreamResult(response)) {
3123
+ isTextStreamResult(rawResponse)) {
2704
3124
  // WHY undefined for accumulated: raw LLM tokens have no field-level
2705
3125
  // extraction — accumulated text is only meaningful after an XML
2706
3126
  // extractor (ValidationStreamExtractor) has parsed and isolated a
@@ -2708,25 +3128,61 @@ export class AgentRuntime {
2708
3128
  // accumulated data get it from the extractor's onChunk bridge in
2709
3129
  // dynamicPromptExecFromState, not from the raw token loop.
2710
3130
  let fullText = "";
2711
- for await (const chunk of response.textStream) {
3131
+ for await (const chunk of rawResponse.textStream) {
2712
3132
  if (abortSignal?.aborted)
2713
3133
  break;
2714
3134
  fullText += chunk;
2715
- if (paramsChunk)
2716
- await paramsChunk(chunk, msgId, undefined);
2717
- if (ctxChunk)
2718
- await ctxChunk(chunk, msgId, undefined);
3135
+ const trajStream = getTrajectoryContext();
3136
+ await this.invokePipelineHooks("model_stream_chunk", modelStreamChunkPipelineHookContext({
3137
+ source: "use_model",
3138
+ chunk,
3139
+ messageId: msgId,
3140
+ roomId: trajStream?.roomId ??
3141
+ this.currentRoomId ??
3142
+ this.agentId,
3143
+ runId: this.getCurrentRunId(),
3144
+ ...(trajStream?.messageId
3145
+ ? { responseId: trajStream.messageId }
3146
+ : {}),
3147
+ accumulated: fullText,
3148
+ }), "Model stream chunk (useModel)", false);
3149
+ await runInsideModelStreamChunkDelivery(async () => {
3150
+ if (paramsChunk)
3151
+ await paramsChunk(chunk, msgId, undefined);
3152
+ if (ctxChunk)
3153
+ await ctxChunk(chunk, msgId, undefined);
3154
+ });
2719
3155
  }
3156
+ const trajStreamEnd = getTrajectoryContext();
3157
+ await this.invokePipelineHooks("model_stream_end", modelStreamEndPipelineHookContext({
3158
+ source: "use_model",
3159
+ roomId: trajStreamEnd?.roomId ??
3160
+ this.currentRoomId ??
3161
+ this.agentId,
3162
+ runId: this.getCurrentRunId(),
3163
+ messageId: msgId ?? trajStreamEnd?.messageId,
3164
+ text: fullText,
3165
+ }), "Model stream end (useModel)", true);
2720
3166
  // Signal stream end to allow context to reset state between useModel calls
2721
3167
  const streamingCtxEnd = getStreamingContext();
2722
3168
  const ctxEnd = streamingCtxEnd?.onStreamEnd;
2723
3169
  if (ctxEnd)
2724
3170
  ctxEnd();
2725
- // Log the completed stream
3171
+ resultRef.current = fullText;
2726
3172
  const elapsedTime = (typeof performance !== "undefined" &&
2727
3173
  typeof performance.now === "function"
2728
3174
  ? performance.now()
2729
3175
  : Date.now()) - startTime;
3176
+ await this.invokePipelineHooks("post_model", postModelPipelineHookContext({
3177
+ requestedModelType: String(modelType),
3178
+ resolvedModelKey,
3179
+ provider: resolvedModel?.provider ?? provider,
3180
+ roomId: getTrajectoryContext()?.roomId,
3181
+ durationMs: Math.round(elapsedTime),
3182
+ params: modelParams,
3183
+ result: resultRef,
3184
+ streaming: true,
3185
+ }), "Post-model pipeline hook");
2730
3186
  this.logger.trace({
2731
3187
  src: "agent",
2732
3188
  agentId: this.agentId,
@@ -2734,7 +3190,7 @@ export class AgentRuntime {
2734
3190
  duration: Number(elapsedTime.toFixed(2)),
2735
3191
  streaming: true,
2736
3192
  }, "Model output (stream with callback complete)");
2737
- this.logModelCall(String(modelType), resolvedModelKey, params, promptContent, elapsedTime, resolvedModel?.provider ?? provider, fullText);
3193
+ this.logModelCall(String(modelType), resolvedModelKey, params, promptContent, elapsedTime, resolvedModel?.provider ?? provider, resultRef.current);
2738
3194
  // Optional trajectory logging: associate model calls with current trajectory step
2739
3195
  // Skip during initialization to avoid deadlock (_ensureServiceStarted awaits initPromise)
2740
3196
  if (!this.initResolver) {
@@ -2749,6 +3205,7 @@ export class AgentRuntime {
2749
3205
  const maxTokensRaw = isPlainObject(modelParams)
2750
3206
  ? modelParams.maxTokens
2751
3207
  : undefined;
3208
+ const activeTrace = this.getActiveTrace(this.getCurrentRunId());
2752
3209
  trajLogger.logLlmCall({
2753
3210
  stepId,
2754
3211
  model: String(resolvedModelKey),
@@ -2756,12 +3213,17 @@ export class AgentRuntime {
2756
3213
  ? this.character.system
2757
3214
  : "",
2758
3215
  userPrompt: promptContent ?? "",
2759
- response: fullText,
3216
+ response: modelOutToTrajectoryString(resultRef.current),
2760
3217
  temperature: typeof tempRaw === "number" ? tempRaw : 0,
2761
3218
  maxTokens: typeof maxTokensRaw === "number" ? maxTokensRaw : 0,
2762
3219
  purpose: trajCtx?.purpose ?? "action",
2763
3220
  actionType: "runtime.useModel",
2764
3221
  latencyMs: Math.max(0, Math.round(elapsedTime)),
3222
+ modelSlot: String(modelType),
3223
+ runId: trajCtx?.runId,
3224
+ roomId: trajCtx?.roomId,
3225
+ messageId: trajCtx?.messageId,
3226
+ executionTraceId: activeTrace?.id,
2765
3227
  });
2766
3228
  }
2767
3229
  }
@@ -2769,20 +3231,29 @@ export class AgentRuntime {
2769
3231
  // Trajectory logging must never break core model flow.
2770
3232
  }
2771
3233
  }
2772
- return fullText;
3234
+ return resultRef.current;
2773
3235
  }
2774
3236
  const elapsedTime = (typeof performance !== "undefined" &&
2775
3237
  typeof performance.now === "function"
2776
3238
  ? performance.now()
2777
3239
  : Date.now()) - startTime;
2778
- // Log timing / response (keep debug log if useful)
3240
+ await this.invokePipelineHooks("post_model", postModelPipelineHookContext({
3241
+ requestedModelType: String(modelType),
3242
+ resolvedModelKey,
3243
+ provider: resolvedModel?.provider ?? provider,
3244
+ roomId: getTrajectoryContext()?.roomId,
3245
+ durationMs: Math.round(elapsedTime),
3246
+ params: modelParams,
3247
+ result: resultRef,
3248
+ streaming: false,
3249
+ }), "Post-model pipeline hook");
2779
3250
  this.logger.trace({
2780
3251
  src: "agent",
2781
3252
  agentId: this.agentId,
2782
3253
  model: resolvedModelKey,
2783
3254
  duration: Number(elapsedTime.toFixed(2)),
2784
3255
  }, "Model output");
2785
- this.logModelCall(String(modelType), resolvedModelKey, params, promptContent, elapsedTime, resolvedModel?.provider ?? provider, response);
3256
+ this.logModelCall(String(modelType), resolvedModelKey, params, promptContent, elapsedTime, resolvedModel?.provider ?? provider, resultRef.current);
2786
3257
  // Optional trajectory logging: associate model calls with current trajectory step
2787
3258
  // Skip during initialization to avoid deadlock (_ensureServiceStarted awaits initPromise)
2788
3259
  if (!this.initResolver) {
@@ -2797,6 +3268,7 @@ export class AgentRuntime {
2797
3268
  const maxTokensRaw = isPlainObject(modelParams)
2798
3269
  ? modelParams.maxTokens
2799
3270
  : undefined;
3271
+ const activeTrace = this.getActiveTrace(this.getCurrentRunId());
2800
3272
  trajLogger.logLlmCall({
2801
3273
  stepId,
2802
3274
  model: String(resolvedModelKey),
@@ -2804,14 +3276,17 @@ export class AgentRuntime {
2804
3276
  ? this.character.system
2805
3277
  : "",
2806
3278
  userPrompt: promptContent ?? "",
2807
- response: typeof response === "string"
2808
- ? response
2809
- : JSON.stringify(response),
3279
+ response: modelOutToTrajectoryString(resultRef.current),
2810
3280
  temperature: typeof tempRaw === "number" ? tempRaw : 0,
2811
3281
  maxTokens: typeof maxTokensRaw === "number" ? maxTokensRaw : 0,
2812
3282
  purpose: trajCtx2?.purpose ?? "action",
2813
3283
  actionType: "runtime.useModel",
2814
3284
  latencyMs: Math.max(0, Math.round(elapsedTime)),
3285
+ modelSlot: String(modelType),
3286
+ runId: trajCtx2?.runId,
3287
+ roomId: trajCtx2?.roomId,
3288
+ messageId: trajCtx2?.messageId,
3289
+ executionTraceId: activeTrace?.id,
2815
3290
  });
2816
3291
  }
2817
3292
  }
@@ -2819,7 +3294,7 @@ export class AgentRuntime {
2819
3294
  // Trajectory logging must never break core model flow.
2820
3295
  }
2821
3296
  }
2822
- return response;
3297
+ return resultRef.current;
2823
3298
  }
2824
3299
  /**
2825
3300
  * Simplified text generation with optional character context.
@@ -2982,7 +3457,8 @@ export class AgentRuntime {
2982
3457
  * 2. Streaming with safety: Enables streaming while detecting truncation
2983
3458
  * 3. Performance tracking: Tracks success/failure rates per model+schema
2984
3459
  */
2985
- async dynamicPromptExecFromState({ state, params, schema, options = {}, }) {
3460
+ async dynamicPromptExecFromState({ state: stateArg, params, schema, options = {}, }) {
3461
+ const state = stateArg ?? { values: {}, data: {}, text: "" };
2986
3462
  // Validate schema input
2987
3463
  if (!schema || schema.length === 0) {
2988
3464
  this.logger.error("dynamicPromptExecFromState: schema must have at least one entry");
@@ -3050,9 +3526,44 @@ export class AgentRuntime {
3050
3526
  let extractor;
3051
3527
  let contextLevel = defaultContextCheckLevel;
3052
3528
  const perFieldCodes = new Map();
3529
+ let traceModelId;
3530
+ let tracePromptKey;
3531
+ let traceVariant = "baseline";
3532
+ let traceArtifactVersion;
3533
+ const traceStartTime = Date.now();
3534
+ const optimizationHooks = this.getPromptOptimizationHooks();
3535
+ if (optimizationHooks) {
3536
+ traceModelId = this.resolveProviderModelString(resolvedModelType, options.model);
3537
+ const schemaHash = this.buildSchemaMetricKey(schema)
3538
+ .split("")
3539
+ .reduce((h, c) => ((h * 31) ^ c.charCodeAt(0)) >>> 0, 5381)
3540
+ .toString(16)
3541
+ .slice(0, 8);
3542
+ tracePromptKey = options.promptName ?? schemaHash;
3543
+ }
3053
3544
  while (currentRetry <= maxRetries) {
3054
3545
  const template = params.prompt;
3055
3546
  const templateStr = typeof template === "function" ? template({ state }) : template;
3547
+ let finalTemplateStr = templateStr;
3548
+ if (optimizationHooks &&
3549
+ traceModelId &&
3550
+ tracePromptKey &&
3551
+ currentRetry === 0) {
3552
+ try {
3553
+ const merged = await optimizationHooks.mergePromptTemplate(this, {
3554
+ baselineTemplate: templateStr,
3555
+ modelId: traceModelId,
3556
+ modelSlot: resolvedModelType,
3557
+ promptKey: tracePromptKey,
3558
+ });
3559
+ finalTemplateStr = merged.template;
3560
+ traceVariant = merged.variant;
3561
+ traceArtifactVersion = merged.artifactVersion;
3562
+ }
3563
+ catch (optErr) {
3564
+ this.logger.warn({ error: optErr }, "Optimization artifact lookup failed");
3565
+ }
3566
+ }
3056
3567
  // Get keys from state (excluding text, values, data)
3057
3568
  const stateKeys = Object.keys(state);
3058
3569
  const filteredKeys = stateKeys.filter((key) => !["text", "values", "data"].includes(key));
@@ -3061,7 +3572,7 @@ export class AgentRuntime {
3061
3572
  return acc;
3062
3573
  }, {});
3063
3574
  const templateContext = { ...filteredState, ...state.values };
3064
- const outputSegments = this.renderPromptTemplateSegments(templateStr, templateContext, state);
3575
+ const outputSegments = this.renderPromptTemplateSegments(finalTemplateStr, templateContext, state);
3065
3576
  const output = outputSegments.map((segment) => segment.content).join("");
3066
3577
  // Process format options
3067
3578
  const hasNestedSchema = this.schemaHasNestedStructure(schema);
@@ -3386,6 +3897,10 @@ ${section_end}`;
3386
3897
  responseContent = this.normalizeStructuredResponse(responseContent);
3387
3898
  // Validate response
3388
3899
  let allGood = true;
3900
+ let schemaValidation = {
3901
+ missingPaths: [],
3902
+ invalidPaths: [],
3903
+ };
3389
3904
  if (!responseContent) {
3390
3905
  validationIssues.push("No structured output could be parsed from the model response.");
3391
3906
  this.logger.warn(`dynamicPromptExecFromState parse problem: ${cleanResponse}`);
@@ -3445,7 +3960,7 @@ ${section_end}`;
3445
3960
  delete responseContent.two_end_code;
3446
3961
  }
3447
3962
  }
3448
- const schemaValidation = this.validateResponseAgainstSchema(responseContent, schema);
3963
+ schemaValidation = this.validateResponseAgainstSchema(responseContent, schema);
3449
3964
  if (schemaValidation.missingPaths.length > 0 ||
3450
3965
  schemaValidation.invalidPaths.length > 0) {
3451
3966
  if (schemaValidation.missingPaths.length > 0) {
@@ -3497,6 +4012,94 @@ ${section_end}`;
3497
4012
  this.logger.debug(`dynamicPromptExecFromState success [${modelSchemaKey}]: ${outputTokenEst} tokens`);
3498
4013
  // Clean up smart retry context from state
3499
4014
  delete state._smartRetryContext;
4015
+ if (optimizationHooks && traceModelId && tracePromptKey) {
4016
+ try {
4017
+ const scoreCard = new ScoreCard();
4018
+ scoreCard.add({
4019
+ source: "dpe",
4020
+ kind: "parseSuccess",
4021
+ value: 1.0,
4022
+ reason: "Structured output parsed successfully",
4023
+ });
4024
+ const schemaOk = schemaValidation.missingPaths.length === 0 &&
4025
+ schemaValidation.invalidPaths.length === 0;
4026
+ scoreCard.add({
4027
+ source: "dpe",
4028
+ kind: "schemaValid",
4029
+ value: schemaOk ? 1.0 : 0.0,
4030
+ reason: schemaOk
4031
+ ? "Response matched schema paths"
4032
+ : `Schema issues: missing [${schemaValidation.missingPaths.join(", ")}]; invalid [${schemaValidation.invalidPaths.join(", ")}]`,
4033
+ });
4034
+ scoreCard.add({
4035
+ source: "dpe",
4036
+ kind: "retriesUsed",
4037
+ value: Math.max(0, 1.0 - currentRetry / Math.max(maxRetries, 1)),
4038
+ reason: `Succeeded on attempt ${currentRetry + 1} of ${maxRetries + 1}`,
4039
+ });
4040
+ scoreCard.add({
4041
+ source: "dpe",
4042
+ kind: "tokenEfficiency",
4043
+ value: Math.min(1.0, 500 / Math.max(outputTokenEst, 1)),
4044
+ reason: `Estimated output tokens ${outputTokenEst} vs reference 500`,
4045
+ });
4046
+ const templateHashInput = typeof params.prompt === "string"
4047
+ ? params.prompt
4048
+ : tracePromptKey;
4049
+ const computedTemplateHash = simpleHash(templateHashInput);
4050
+ const trace = {
4051
+ id: uuidv4(),
4052
+ traceVersion: 1,
4053
+ type: "trace",
4054
+ promptKey: tracePromptKey,
4055
+ modelSlot: resolvedModelType,
4056
+ modelId: traceModelId,
4057
+ runId: this.getCurrentRunId?.() ?? undefined,
4058
+ templateHash: computedTemplateHash,
4059
+ schemaFingerprint: schemaKey,
4060
+ artifactVersion: traceArtifactVersion,
4061
+ variant: traceVariant,
4062
+ parseSuccess: true,
4063
+ schemaValid: schemaValidation.missingPaths.length === 0 &&
4064
+ schemaValidation.invalidPaths.length === 0,
4065
+ validationCodesMatched: true,
4066
+ retriesUsed: currentRetry,
4067
+ tokenEstimate: outputTokenEst,
4068
+ latencyMs: Date.now() - traceStartTime,
4069
+ response: responseContent,
4070
+ scoreCard: scoreCard.toJSON(),
4071
+ createdAt: Date.now(),
4072
+ };
4073
+ this.maybeRunActiveTraceTTLPurge();
4074
+ const runId = trace.runId;
4075
+ if (runId) {
4076
+ this.activeTraces.set(trace.id, trace);
4077
+ if (!this.runToTraces.has(runId)) {
4078
+ this.runToTraces.set(runId, new Set());
4079
+ }
4080
+ this.runToTraces.get(runId)?.add(trace.id);
4081
+ }
4082
+ void optimizationHooks
4083
+ .persistRegistryEntry(this, {
4084
+ promptKey: tracePromptKey,
4085
+ schemaFingerprint: schemaKey,
4086
+ templateHash: computedTemplateHash,
4087
+ promptTemplate: typeof params.prompt === "string" ? params.prompt : "",
4088
+ schema: JSON.parse(JSON.stringify(schema)),
4089
+ })
4090
+ .catch((err) => {
4091
+ this.logger.warn({ error: err, src: "dpe" }, "Failed to write prompt optimization registry");
4092
+ });
4093
+ void optimizationHooks
4094
+ .appendBaselineTrace(this, { trace })
4095
+ .catch((err) => {
4096
+ this.logger.warn("Failed to write optimization trace", err);
4097
+ });
4098
+ }
4099
+ catch (traceErr) {
4100
+ this.logger.warn({ error: traceErr }, "Failed to build optimization trace");
4101
+ }
4102
+ }
3500
4103
  this.clearStructuredOutputFailureState(state);
3501
4104
  return responseContent;
3502
4105
  }
@@ -3602,6 +4205,71 @@ ${section_end}`;
3602
4205
  else {
3603
4206
  this.logger.error(finalFailureMessage, finalFailureSummary);
3604
4207
  }
4208
+ if (optimizationHooks && traceModelId && tracePromptKey) {
4209
+ try {
4210
+ this.purgeStaleActiveTraces();
4211
+ const scoreCard = new ScoreCard();
4212
+ scoreCard.add({
4213
+ source: "dpe",
4214
+ kind: "parseSuccess",
4215
+ value: 0.0,
4216
+ reason: `No valid parse after ${maxRetries} retries`,
4217
+ });
4218
+ scoreCard.add({
4219
+ source: "dpe",
4220
+ kind: "schemaValid",
4221
+ value: 0.0,
4222
+ reason: "Parse or validation never succeeded",
4223
+ });
4224
+ scoreCard.add({
4225
+ source: "dpe",
4226
+ kind: "retriesUsed",
4227
+ value: 0.0,
4228
+ reason: "All retry attempts exhausted",
4229
+ });
4230
+ const failTemplateHash = simpleHash(typeof params.prompt === "string" ? params.prompt : tracePromptKey);
4231
+ const trace = {
4232
+ id: uuidv4(),
4233
+ traceVersion: 1,
4234
+ type: "trace",
4235
+ promptKey: tracePromptKey,
4236
+ modelSlot: resolvedModelType,
4237
+ modelId: traceModelId,
4238
+ runId: this.getCurrentRunId?.() ?? undefined,
4239
+ templateHash: failTemplateHash,
4240
+ schemaFingerprint: schemaKey,
4241
+ artifactVersion: traceArtifactVersion,
4242
+ variant: traceVariant,
4243
+ parseSuccess: false,
4244
+ schemaValid: false,
4245
+ validationCodesMatched: false,
4246
+ retriesUsed: maxRetries,
4247
+ tokenEstimate: 0,
4248
+ latencyMs: Date.now() - traceStartTime,
4249
+ scoreCard: scoreCard.toJSON(),
4250
+ createdAt: Date.now(),
4251
+ };
4252
+ void optimizationHooks
4253
+ .persistRegistryEntry(this, {
4254
+ promptKey: tracePromptKey,
4255
+ schemaFingerprint: schemaKey,
4256
+ templateHash: failTemplateHash,
4257
+ promptTemplate: typeof params.prompt === "string" ? params.prompt : "",
4258
+ schema: JSON.parse(JSON.stringify(schema)),
4259
+ })
4260
+ .catch((err) => {
4261
+ this.logger.warn({ error: err, src: "dpe" }, "Failed to write prompt optimization registry");
4262
+ });
4263
+ void optimizationHooks
4264
+ .appendFailureTrace(this, { trace })
4265
+ .catch((err) => {
4266
+ this.logger.warn("Failed to write failure trace", err);
4267
+ });
4268
+ }
4269
+ catch (traceErr) {
4270
+ this.logger.warn({ error: traceErr }, "Failed to build failure trace");
4271
+ }
4272
+ }
3605
4273
  // Clean up smart retry context from state
3606
4274
  delete state._smartRetryContext;
3607
4275
  if (lastStructuredFailure) {
@@ -5099,7 +5767,9 @@ ${section_end}`;
5099
5767
  const ids = await this.adapter.createMemories([
5100
5768
  { memory, tableName, unique },
5101
5769
  ]);
5102
- return ids[0];
5770
+ const memoryId = ids[0];
5771
+ await this.applyPipelineHooks("after_memory_persisted", afterMemoryPersistedPipelineHookContext(memory, tableName, memoryId));
5772
+ return memoryId;
5103
5773
  }
5104
5774
  async updateMemory(memory) {
5105
5775
  await this.adapter.updateMemories([memory]);