@molroo-io/sdk 0.5.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (293) hide show
  1. package/LICENSE +21 -0
  2. package/README.md +251 -0
  3. package/dist/cjs/api-client.d.ts +23 -0
  4. package/dist/cjs/api-client.d.ts.map +1 -0
  5. package/dist/cjs/api-client.js +55 -0
  6. package/dist/cjs/defaults/index.d.ts +8 -0
  7. package/dist/cjs/defaults/index.d.ts.map +1 -0
  8. package/dist/cjs/defaults/index.js +30 -0
  9. package/dist/cjs/defaults/persona.json +17 -0
  10. package/dist/cjs/embedding/cloudflare.d.ts +15 -0
  11. package/dist/cjs/embedding/cloudflare.d.ts.map +1 -0
  12. package/dist/cjs/embedding/cloudflare.js +16 -0
  13. package/dist/cjs/embedding/cohere.d.ts +8 -0
  14. package/dist/cjs/embedding/cohere.d.ts.map +1 -0
  15. package/dist/cjs/embedding/cohere.js +31 -0
  16. package/dist/cjs/embedding/index.d.ts +9 -0
  17. package/dist/cjs/embedding/index.d.ts.map +1 -0
  18. package/dist/cjs/embedding/index.js +11 -0
  19. package/dist/cjs/embedding/local.d.ts +6 -0
  20. package/dist/cjs/embedding/local.d.ts.map +1 -0
  21. package/dist/cjs/embedding/local.js +28 -0
  22. package/dist/cjs/embedding/openai.d.ts +9 -0
  23. package/dist/cjs/embedding/openai.d.ts.map +1 -0
  24. package/dist/cjs/embedding/openai.js +26 -0
  25. package/dist/cjs/errors.d.ts +17 -0
  26. package/dist/cjs/errors.d.ts.map +1 -0
  27. package/dist/cjs/errors.js +21 -0
  28. package/dist/cjs/events/console.d.ts +25 -0
  29. package/dist/cjs/events/console.d.ts.map +1 -0
  30. package/dist/cjs/events/console.js +41 -0
  31. package/dist/cjs/events/types.d.ts +28 -0
  32. package/dist/cjs/events/types.d.ts.map +1 -0
  33. package/dist/cjs/events/types.js +13 -0
  34. package/dist/cjs/events/webhook.d.ts +30 -0
  35. package/dist/cjs/events/webhook.d.ts.map +1 -0
  36. package/dist/cjs/events/webhook.js +79 -0
  37. package/dist/cjs/generate/persona.d.ts +16 -0
  38. package/dist/cjs/generate/persona.d.ts.map +1 -0
  39. package/dist/cjs/generate/persona.js +42 -0
  40. package/dist/cjs/generate/prompt.d.ts +7 -0
  41. package/dist/cjs/generate/prompt.d.ts.map +1 -0
  42. package/dist/cjs/generate/prompt.js +41 -0
  43. package/dist/cjs/generate/schema.d.ts +32 -0
  44. package/dist/cjs/generate/schema.d.ts.map +1 -0
  45. package/dist/cjs/generate/schema.js +54 -0
  46. package/dist/cjs/generated/index.d.ts +2 -0
  47. package/dist/cjs/generated/index.d.ts.map +1 -0
  48. package/dist/cjs/generated/index.js +2 -0
  49. package/dist/cjs/index.d.ts +66 -0
  50. package/dist/cjs/index.d.ts.map +1 -0
  51. package/dist/cjs/index.js +69 -0
  52. package/dist/cjs/llm/adapter.d.ts +61 -0
  53. package/dist/cjs/llm/adapter.d.ts.map +1 -0
  54. package/dist/cjs/llm/adapter.js +2 -0
  55. package/dist/cjs/llm/resolve.d.ts +28 -0
  56. package/dist/cjs/llm/resolve.d.ts.map +1 -0
  57. package/dist/cjs/llm/resolve.js +20 -0
  58. package/dist/cjs/llm/schema.d.ts +60 -0
  59. package/dist/cjs/llm/schema.d.ts.map +1 -0
  60. package/dist/cjs/llm/schema.js +72 -0
  61. package/dist/cjs/llm/types.d.ts +24 -0
  62. package/dist/cjs/llm/types.d.ts.map +1 -0
  63. package/dist/cjs/llm/types.js +2 -0
  64. package/dist/cjs/llm/vercel-ai/adapter.d.ts +29 -0
  65. package/dist/cjs/llm/vercel-ai/adapter.d.ts.map +1 -0
  66. package/dist/cjs/llm/vercel-ai/adapter.js +234 -0
  67. package/dist/cjs/llm/vercel-ai/config.d.ts +9 -0
  68. package/dist/cjs/llm/vercel-ai/config.d.ts.map +1 -0
  69. package/dist/cjs/llm/vercel-ai/config.js +2 -0
  70. package/dist/cjs/llm/vercel-ai/index.d.ts +9 -0
  71. package/dist/cjs/llm/vercel-ai/index.d.ts.map +1 -0
  72. package/dist/cjs/llm/vercel-ai/index.js +13 -0
  73. package/dist/cjs/memory/cloudflare/index.d.ts +3 -0
  74. package/dist/cjs/memory/cloudflare/index.d.ts.map +1 -0
  75. package/dist/cjs/memory/cloudflare/index.js +5 -0
  76. package/dist/cjs/memory/cloudflare/vectorize.d.ts +62 -0
  77. package/dist/cjs/memory/cloudflare/vectorize.d.ts.map +1 -0
  78. package/dist/cjs/memory/cloudflare/vectorize.js +55 -0
  79. package/dist/cjs/memory/in-memory-semantic.d.ts +16 -0
  80. package/dist/cjs/memory/in-memory-semantic.d.ts.map +1 -0
  81. package/dist/cjs/memory/in-memory-semantic.js +57 -0
  82. package/dist/cjs/memory/in-memory.d.ts +46 -0
  83. package/dist/cjs/memory/in-memory.d.ts.map +1 -0
  84. package/dist/cjs/memory/in-memory.js +115 -0
  85. package/dist/cjs/memory/pinecone/index.d.ts +7 -0
  86. package/dist/cjs/memory/pinecone/index.d.ts.map +1 -0
  87. package/dist/cjs/memory/pinecone/index.js +8 -0
  88. package/dist/cjs/memory/pinecone/memory-adapter.d.ts +62 -0
  89. package/dist/cjs/memory/pinecone/memory-adapter.d.ts.map +1 -0
  90. package/dist/cjs/memory/pinecone/memory-adapter.js +220 -0
  91. package/dist/cjs/memory/pinecone/semantic.d.ts +44 -0
  92. package/dist/cjs/memory/pinecone/semantic.d.ts.map +1 -0
  93. package/dist/cjs/memory/pinecone/semantic.js +90 -0
  94. package/dist/cjs/memory/recall.d.ts +58 -0
  95. package/dist/cjs/memory/recall.d.ts.map +1 -0
  96. package/dist/cjs/memory/recall.js +220 -0
  97. package/dist/cjs/memory/semantic.d.ts +24 -0
  98. package/dist/cjs/memory/semantic.d.ts.map +1 -0
  99. package/dist/cjs/memory/semantic.js +2 -0
  100. package/dist/cjs/memory/sqlite/index.d.ts +3 -0
  101. package/dist/cjs/memory/sqlite/index.d.ts.map +1 -0
  102. package/dist/cjs/memory/sqlite/index.js +5 -0
  103. package/dist/cjs/memory/sqlite/memory-adapter.d.ts +58 -0
  104. package/dist/cjs/memory/sqlite/memory-adapter.d.ts.map +1 -0
  105. package/dist/cjs/memory/sqlite/memory-adapter.js +336 -0
  106. package/dist/cjs/memory/sqlite/schema.d.ts +4 -0
  107. package/dist/cjs/memory/sqlite/schema.d.ts.map +1 -0
  108. package/dist/cjs/memory/sqlite/schema.js +91 -0
  109. package/dist/cjs/memory/supabase/index.d.ts +7 -0
  110. package/dist/cjs/memory/supabase/index.d.ts.map +1 -0
  111. package/dist/cjs/memory/supabase/index.js +8 -0
  112. package/dist/cjs/memory/supabase/memory-adapter.d.ts +67 -0
  113. package/dist/cjs/memory/supabase/memory-adapter.d.ts.map +1 -0
  114. package/dist/cjs/memory/supabase/memory-adapter.js +335 -0
  115. package/dist/cjs/memory/supabase/semantic.d.ts +44 -0
  116. package/dist/cjs/memory/supabase/semantic.d.ts.map +1 -0
  117. package/dist/cjs/memory/supabase/semantic.js +72 -0
  118. package/dist/cjs/memory/types.d.ts +231 -0
  119. package/dist/cjs/memory/types.d.ts.map +1 -0
  120. package/dist/cjs/memory/types.js +12 -0
  121. package/dist/cjs/persona.d.ts +326 -0
  122. package/dist/cjs/persona.d.ts.map +1 -0
  123. package/dist/cjs/persona.js +824 -0
  124. package/dist/cjs/types.d.ts +263 -0
  125. package/dist/cjs/types.d.ts.map +1 -0
  126. package/dist/cjs/types.js +15 -0
  127. package/dist/cjs/world/client.d.ts +36 -0
  128. package/dist/cjs/world/client.d.ts.map +1 -0
  129. package/dist/cjs/world/client.js +59 -0
  130. package/dist/cjs/world/errors.d.ts +9 -0
  131. package/dist/cjs/world/errors.d.ts.map +1 -0
  132. package/dist/cjs/world/errors.js +15 -0
  133. package/dist/cjs/world/index.d.ts +10 -0
  134. package/dist/cjs/world/index.d.ts.map +1 -0
  135. package/dist/cjs/world/index.js +16 -0
  136. package/dist/cjs/world/types.d.ts +101 -0
  137. package/dist/cjs/world/types.d.ts.map +1 -0
  138. package/dist/cjs/world/types.js +8 -0
  139. package/dist/cjs/world/village.d.ts +75 -0
  140. package/dist/cjs/world/village.d.ts.map +1 -0
  141. package/dist/cjs/world/village.js +278 -0
  142. package/dist/cjs/world/world-persona.d.ts +182 -0
  143. package/dist/cjs/world/world-persona.d.ts.map +1 -0
  144. package/dist/cjs/world/world-persona.js +192 -0
  145. package/dist/cjs/world/world.d.ts +41 -0
  146. package/dist/cjs/world/world.d.ts.map +1 -0
  147. package/dist/cjs/world/world.js +91 -0
  148. package/dist/esm/api-client.d.ts +23 -0
  149. package/dist/esm/api-client.d.ts.map +1 -0
  150. package/dist/esm/api-client.js +48 -0
  151. package/dist/esm/defaults/index.d.ts +8 -0
  152. package/dist/esm/defaults/index.d.ts.map +1 -0
  153. package/dist/esm/defaults/index.js +23 -0
  154. package/dist/esm/defaults/persona.json +17 -0
  155. package/dist/esm/embedding/cloudflare.d.ts +15 -0
  156. package/dist/esm/embedding/cloudflare.d.ts.map +1 -0
  157. package/dist/esm/embedding/cloudflare.js +13 -0
  158. package/dist/esm/embedding/cohere.d.ts +8 -0
  159. package/dist/esm/embedding/cohere.d.ts.map +1 -0
  160. package/dist/esm/embedding/cohere.js +28 -0
  161. package/dist/esm/embedding/index.d.ts +9 -0
  162. package/dist/esm/embedding/index.d.ts.map +1 -0
  163. package/dist/esm/embedding/index.js +4 -0
  164. package/dist/esm/embedding/local.d.ts +6 -0
  165. package/dist/esm/embedding/local.d.ts.map +1 -0
  166. package/dist/esm/embedding/local.js +25 -0
  167. package/dist/esm/embedding/openai.d.ts +9 -0
  168. package/dist/esm/embedding/openai.d.ts.map +1 -0
  169. package/dist/esm/embedding/openai.js +23 -0
  170. package/dist/esm/errors.d.ts +17 -0
  171. package/dist/esm/errors.d.ts.map +1 -0
  172. package/dist/esm/errors.js +17 -0
  173. package/dist/esm/events/console.d.ts +25 -0
  174. package/dist/esm/events/console.d.ts.map +1 -0
  175. package/dist/esm/events/console.js +37 -0
  176. package/dist/esm/events/types.d.ts +28 -0
  177. package/dist/esm/events/types.d.ts.map +1 -0
  178. package/dist/esm/events/types.js +12 -0
  179. package/dist/esm/events/webhook.d.ts +30 -0
  180. package/dist/esm/events/webhook.d.ts.map +1 -0
  181. package/dist/esm/events/webhook.js +75 -0
  182. package/dist/esm/generate/persona.d.ts +16 -0
  183. package/dist/esm/generate/persona.d.ts.map +1 -0
  184. package/dist/esm/generate/persona.js +39 -0
  185. package/dist/esm/generate/prompt.d.ts +7 -0
  186. package/dist/esm/generate/prompt.d.ts.map +1 -0
  187. package/dist/esm/generate/prompt.js +38 -0
  188. package/dist/esm/generate/schema.d.ts +32 -0
  189. package/dist/esm/generate/schema.d.ts.map +1 -0
  190. package/dist/esm/generate/schema.js +51 -0
  191. package/dist/esm/generated/index.d.ts +2 -0
  192. package/dist/esm/generated/index.d.ts.map +1 -0
  193. package/dist/esm/generated/index.js +1 -0
  194. package/dist/esm/index.d.ts +66 -0
  195. package/dist/esm/index.d.ts.map +1 -0
  196. package/dist/esm/index.js +49 -0
  197. package/dist/esm/llm/adapter.d.ts +61 -0
  198. package/dist/esm/llm/adapter.d.ts.map +1 -0
  199. package/dist/esm/llm/adapter.js +1 -0
  200. package/dist/esm/llm/resolve.d.ts +28 -0
  201. package/dist/esm/llm/resolve.d.ts.map +1 -0
  202. package/dist/esm/llm/resolve.js +17 -0
  203. package/dist/esm/llm/schema.d.ts +60 -0
  204. package/dist/esm/llm/schema.d.ts.map +1 -0
  205. package/dist/esm/llm/schema.js +69 -0
  206. package/dist/esm/llm/types.d.ts +24 -0
  207. package/dist/esm/llm/types.d.ts.map +1 -0
  208. package/dist/esm/llm/types.js +1 -0
  209. package/dist/esm/llm/vercel-ai/adapter.d.ts +29 -0
  210. package/dist/esm/llm/vercel-ai/adapter.d.ts.map +1 -0
  211. package/dist/esm/llm/vercel-ai/adapter.js +196 -0
  212. package/dist/esm/llm/vercel-ai/config.d.ts +9 -0
  213. package/dist/esm/llm/vercel-ai/config.d.ts.map +1 -0
  214. package/dist/esm/llm/vercel-ai/config.js +1 -0
  215. package/dist/esm/llm/vercel-ai/index.d.ts +9 -0
  216. package/dist/esm/llm/vercel-ai/index.d.ts.map +1 -0
  217. package/dist/esm/llm/vercel-ai/index.js +8 -0
  218. package/dist/esm/memory/cloudflare/index.d.ts +3 -0
  219. package/dist/esm/memory/cloudflare/index.d.ts.map +1 -0
  220. package/dist/esm/memory/cloudflare/index.js +1 -0
  221. package/dist/esm/memory/cloudflare/vectorize.d.ts +62 -0
  222. package/dist/esm/memory/cloudflare/vectorize.d.ts.map +1 -0
  223. package/dist/esm/memory/cloudflare/vectorize.js +51 -0
  224. package/dist/esm/memory/in-memory-semantic.d.ts +16 -0
  225. package/dist/esm/memory/in-memory-semantic.d.ts.map +1 -0
  226. package/dist/esm/memory/in-memory-semantic.js +53 -0
  227. package/dist/esm/memory/in-memory.d.ts +46 -0
  228. package/dist/esm/memory/in-memory.d.ts.map +1 -0
  229. package/dist/esm/memory/in-memory.js +111 -0
  230. package/dist/esm/memory/pinecone/index.d.ts +7 -0
  231. package/dist/esm/memory/pinecone/index.d.ts.map +1 -0
  232. package/dist/esm/memory/pinecone/index.js +3 -0
  233. package/dist/esm/memory/pinecone/memory-adapter.d.ts +62 -0
  234. package/dist/esm/memory/pinecone/memory-adapter.d.ts.map +1 -0
  235. package/dist/esm/memory/pinecone/memory-adapter.js +216 -0
  236. package/dist/esm/memory/pinecone/semantic.d.ts +44 -0
  237. package/dist/esm/memory/pinecone/semantic.d.ts.map +1 -0
  238. package/dist/esm/memory/pinecone/semantic.js +86 -0
  239. package/dist/esm/memory/recall.d.ts +58 -0
  240. package/dist/esm/memory/recall.d.ts.map +1 -0
  241. package/dist/esm/memory/recall.js +215 -0
  242. package/dist/esm/memory/semantic.d.ts +24 -0
  243. package/dist/esm/memory/semantic.d.ts.map +1 -0
  244. package/dist/esm/memory/semantic.js +1 -0
  245. package/dist/esm/memory/sqlite/index.d.ts +3 -0
  246. package/dist/esm/memory/sqlite/index.d.ts.map +1 -0
  247. package/dist/esm/memory/sqlite/index.js +1 -0
  248. package/dist/esm/memory/sqlite/memory-adapter.d.ts +58 -0
  249. package/dist/esm/memory/sqlite/memory-adapter.d.ts.map +1 -0
  250. package/dist/esm/memory/sqlite/memory-adapter.js +296 -0
  251. package/dist/esm/memory/sqlite/schema.d.ts +4 -0
  252. package/dist/esm/memory/sqlite/schema.d.ts.map +1 -0
  253. package/dist/esm/memory/sqlite/schema.js +86 -0
  254. package/dist/esm/memory/supabase/index.d.ts +7 -0
  255. package/dist/esm/memory/supabase/index.d.ts.map +1 -0
  256. package/dist/esm/memory/supabase/index.js +3 -0
  257. package/dist/esm/memory/supabase/memory-adapter.d.ts +67 -0
  258. package/dist/esm/memory/supabase/memory-adapter.d.ts.map +1 -0
  259. package/dist/esm/memory/supabase/memory-adapter.js +331 -0
  260. package/dist/esm/memory/supabase/semantic.d.ts +44 -0
  261. package/dist/esm/memory/supabase/semantic.d.ts.map +1 -0
  262. package/dist/esm/memory/supabase/semantic.js +68 -0
  263. package/dist/esm/memory/types.d.ts +231 -0
  264. package/dist/esm/memory/types.d.ts.map +1 -0
  265. package/dist/esm/memory/types.js +9 -0
  266. package/dist/esm/persona.d.ts +326 -0
  267. package/dist/esm/persona.d.ts.map +1 -0
  268. package/dist/esm/persona.js +787 -0
  269. package/dist/esm/types.d.ts +263 -0
  270. package/dist/esm/types.d.ts.map +1 -0
  271. package/dist/esm/types.js +11 -0
  272. package/dist/esm/world/client.d.ts +36 -0
  273. package/dist/esm/world/client.d.ts.map +1 -0
  274. package/dist/esm/world/client.js +52 -0
  275. package/dist/esm/world/errors.d.ts +9 -0
  276. package/dist/esm/world/errors.d.ts.map +1 -0
  277. package/dist/esm/world/errors.js +11 -0
  278. package/dist/esm/world/index.d.ts +10 -0
  279. package/dist/esm/world/index.d.ts.map +1 -0
  280. package/dist/esm/world/index.js +8 -0
  281. package/dist/esm/world/types.d.ts +101 -0
  282. package/dist/esm/world/types.d.ts.map +1 -0
  283. package/dist/esm/world/types.js +7 -0
  284. package/dist/esm/world/village.d.ts +75 -0
  285. package/dist/esm/world/village.d.ts.map +1 -0
  286. package/dist/esm/world/village.js +274 -0
  287. package/dist/esm/world/world-persona.d.ts +182 -0
  288. package/dist/esm/world/world-persona.d.ts.map +1 -0
  289. package/dist/esm/world/world-persona.js +188 -0
  290. package/dist/esm/world/world.d.ts +41 -0
  291. package/dist/esm/world/world.d.ts.map +1 -0
  292. package/dist/esm/world/world.js +87 -0
  293. package/package.json +207 -0
@@ -0,0 +1,787 @@
1
+ import { resolveLLM } from './llm/resolve';
2
+ import { isMemoryConfig } from './memory/types';
3
+ import { recallMemories, buildMemoryBlock, stripMetaTags } from './memory/recall';
4
+ import { MolrooApiError } from './errors';
5
+ import { createApiClient, unwrap } from './api-client';
6
+ /** Clamp appraisal values to engine-valid ranges after LLM generation. */
7
+ function clampAppraisal(a) {
8
+ const c = (v, lo, hi) => Math.max(lo, Math.min(hi, v));
9
+ return {
10
+ goal_relevance: c(a.goal_relevance, -1, 1),
11
+ goal_congruence: c(a.goal_congruence, -1, 1),
12
+ expectedness: c(a.expectedness, 0, 1),
13
+ controllability: c(a.controllability, 0, 1),
14
+ agency: c(a.agency, -1, 1),
15
+ norm_compatibility: c(a.norm_compatibility, -1, 1),
16
+ internal_standards: c(a.internal_standards ?? 0, -1, 1),
17
+ adjustment_potential: c(a.adjustment_potential ?? 0.5, 0, 1),
18
+ urgency: c(a.urgency ?? 0.5, 0, 1),
19
+ };
20
+ }
21
+ /** Build a system prompt section describing the interlocutor. */
22
+ function buildInterlocutorBlock(ctx) {
23
+ const parts = [`## About ${ctx.name}`];
24
+ if (ctx.description)
25
+ parts.push(ctx.description);
26
+ if (ctx.extensions) {
27
+ for (const [title, content] of Object.entries(ctx.extensions)) {
28
+ parts.push(`### ${title}`);
29
+ parts.push(content);
30
+ }
31
+ }
32
+ return parts.join('\n');
33
+ }
34
+ // ── MolrooPersona ──
35
+ /**
36
+ * SDK client for interacting with a standalone molroo persona instance.
37
+ *
38
+ * @example
39
+ * ```typescript
40
+ * // Single adapter (recommended)
41
+ * const persona = await MolrooPersona.create(
42
+ * { baseUrl: 'https://api.molroo.io', apiKey: 'key', llm,
43
+ * memory: new SqliteMemoryAdapter({ dbPath: './memory.db' }) },
44
+ * { identity: { name: 'Sera' }, personality: { O: 0.8, C: 0.6, E: 0.7 } },
45
+ * );
46
+ *
47
+ * // Split adapters (advanced)
48
+ * const persona = await MolrooPersona.create(
49
+ * { baseUrl: 'https://api.molroo.io', apiKey: 'key', llm,
50
+ * memory: { episodes: episodeStore, semantic: vectorStore, embedding: embedProvider } },
51
+ * { identity: { name: 'Sera' }, personality: { O: 0.8, C: 0.6, E: 0.7 } },
52
+ * );
53
+ * ```
54
+ */
55
+ export class MolrooPersona {
56
+ constructor(config) {
57
+ this.client = createApiClient(config.baseUrl, config.apiKey);
58
+ this._personaId = config.personaId || '';
59
+ this.llm = config.llm ?? null;
60
+ this.engineLlm = config.engineLlm ?? null;
61
+ this.events = config.events ?? null;
62
+ // Memory infrastructure
63
+ if (config.memory) {
64
+ if (isMemoryConfig(config.memory)) {
65
+ // Split adapter path (MemoryConfig)
66
+ this.memoryAdapter = null;
67
+ this.episodeStore = config.memory.episodes;
68
+ this.semanticStore = config.memory.semantic ?? null;
69
+ this.embeddingProvider = config.memory.embedding ?? null;
70
+ this.memoryRecallConfig = config.memory.recall;
71
+ }
72
+ else {
73
+ // Single adapter path (MemoryAdapter)
74
+ this.memoryAdapter = config.memory;
75
+ this.episodeStore = null;
76
+ this.semanticStore = null;
77
+ this.embeddingProvider = null;
78
+ this.memoryRecallConfig = config.recall;
79
+ }
80
+ }
81
+ else {
82
+ this.memoryAdapter = null;
83
+ this.episodeStore = null;
84
+ this.semanticStore = null;
85
+ this.embeddingProvider = null;
86
+ this.memoryRecallConfig = undefined;
87
+ }
88
+ }
89
+ // ── Properties ──
90
+ /** Unique identifier for this persona instance. */
91
+ get id() {
92
+ return this._personaId;
93
+ }
94
+ /** Unique identifier for this persona instance (alias for {@link id}). */
95
+ get personaId() {
96
+ return this._personaId;
97
+ }
98
+ // ── Static Factory Methods ──
99
+ /**
100
+ * Create a new persona on the molroo API and return a connected instance.
101
+ *
102
+ * @param config - API connection config and optional LLM adapter.
103
+ * Accepts a full {@link LLMAdapter} or a shorthand config object:
104
+ * ```typescript
105
+ * llm: { provider: 'openai', apiKey: '...' }
106
+ * ```
107
+ * @param personaConfig - Persona identity, personality, and goals.
108
+ * @returns A connected MolrooPersona instance.
109
+ */
110
+ static async create(config, personaConfig) {
111
+ const client = createApiClient(config.baseUrl, config.apiKey);
112
+ const { data } = await client.POST('/personas', {
113
+ body: { config: personaConfig },
114
+ headers: { 'Idempotency-Key': crypto.randomUUID() },
115
+ });
116
+ const [llm, engineLlm] = await Promise.all([
117
+ config.llm ? resolveLLM(config.llm) : undefined,
118
+ config.engineLlm ? resolveLLM(config.engineLlm) : undefined,
119
+ ]);
120
+ const result = unwrap(data);
121
+ const persona = new MolrooPersona({ ...config, llm, engineLlm, personaId: result.personaId });
122
+ return persona;
123
+ }
124
+ /**
125
+ * Connect to an existing persona by ID.
126
+ *
127
+ * Verifies the persona exists and fetches its configuration.
128
+ *
129
+ * @param config - API connection config and optional LLM adapter.
130
+ * Accepts a full {@link LLMAdapter} or a shorthand config object:
131
+ * ```typescript
132
+ * llm: { provider: 'openai', apiKey: '...' }
133
+ * ```
134
+ * @param personaId - The ID of the persona to connect to.
135
+ * @returns A connected MolrooPersona instance.
136
+ */
137
+ static async connect(config, personaId) {
138
+ const [llm, engineLlm] = await Promise.all([
139
+ config.llm ? resolveLLM(config.llm) : undefined,
140
+ config.engineLlm ? resolveLLM(config.engineLlm) : undefined,
141
+ ]);
142
+ const persona = new MolrooPersona({ ...config, llm, engineLlm, personaId });
143
+ // Verify persona exists
144
+ await persona.client.GET('/personas/{id}', {
145
+ params: { path: { id: personaId } },
146
+ });
147
+ return persona;
148
+ }
149
+ /**
150
+ * List all personas for the authenticated tenant.
151
+ *
152
+ * @param config - API connection config (baseUrl + apiKey).
153
+ * @returns List of persona summaries with optional pagination cursor.
154
+ */
155
+ static async listPersonas(config) {
156
+ const client = createApiClient(config.baseUrl, config.apiKey);
157
+ const { data } = await client.GET('/personas');
158
+ return unwrap(data);
159
+ }
160
+ // ── Runtime ──
161
+ /**
162
+ * Send a message to the persona and get an emotion-processed response.
163
+ *
164
+ * Internally converts the SDK-friendly options into the API wire format
165
+ * (`{ event: PerceiveEvent, context?: PerceiveContext }`).
166
+ */
167
+ async perceive(message, options) {
168
+ const eventType = options?.type ?? 'chat_message';
169
+ const sourceName = typeof options?.from === 'string'
170
+ ? options.from
171
+ : options?.from?.name;
172
+ const event = {
173
+ type: eventType,
174
+ timestamp: Date.now(),
175
+ sourceEntity: sourceName,
176
+ payload: { message, ...options?.payload },
177
+ appraisal: options?.appraisal,
178
+ stimulus: options?.stimulus,
179
+ };
180
+ const context = (options?.priorEpisodes || options?.relationshipContext)
181
+ ? {
182
+ priorEpisodes: options?.priorEpisodes,
183
+ relationshipContext: options?.relationshipContext,
184
+ }
185
+ : undefined;
186
+ const { data } = await this.client.POST('/personas/{id}/perceive', {
187
+ params: { path: { id: this._personaId } },
188
+ body: { event, context },
189
+ });
190
+ const response = unwrap(data);
191
+ // Save episode to memory (unless caller opts out)
192
+ if (!options?.skipMemory && response.memoryEpisode) {
193
+ // Tag episode with event type so recall can distinguish chat vs events
194
+ if (!response.memoryEpisode.type) {
195
+ response.memoryEpisode.type = eventType;
196
+ }
197
+ this.postPerceivePipeline(response);
198
+ }
199
+ return response;
200
+ }
201
+ /**
202
+ * Fire a non-chat event and process through the emotion engine.
203
+ *
204
+ * Unlike chat(), this does not involve LLM generation — it directly
205
+ * sends an event to the perceive endpoint with a required appraisal vector.
206
+ * The resulting episode is saved to memory and can be recalled during chat().
207
+ *
208
+ * @param type - Event type identifier (e.g. 'attack', 'gift', 'rest').
209
+ * @param description - Human-readable event description (used as message context).
210
+ * @param options - Must include `appraisal`. Optionally `from`, `stimulus`, `payload`.
211
+ * @returns Emotion engine response.
212
+ *
213
+ * @example
214
+ * ```typescript
215
+ * await persona.event('attack', 'goblin attacks with sword', {
216
+ * from: 'goblin',
217
+ * appraisal: {
218
+ * goal_relevance: 0.8,
219
+ * goal_congruence: -0.9,
220
+ * expectedness: 0.3,
221
+ * controllability: 0.4,
222
+ * agency: -0.6,
223
+ * norm_compatibility: -0.5,
224
+ * },
225
+ * });
226
+ * ```
227
+ */
228
+ async event(type, description, options) {
229
+ return this.perceive(description, { ...options, type });
230
+ }
231
+ /**
232
+ * High-level chat: getState → LLM generate → perceive with appraisal.
233
+ *
234
+ * 1. Calls `getPromptContext()` for the server-assembled system prompt
235
+ * 2. Recalls episodic + semantic memories from client-side stores
236
+ * 3. Sends to LLM adapter for text generation + appraisal
237
+ * 4. Sends the appraisal to the API via `perceive()` for emotion computation
238
+ * 5. Runs post-chat pipeline (episode save, reflection, events)
239
+ *
240
+ * Requires {@link LLMAdapter}. Without one, use {@link perceive} directly
241
+ * for emotion-only interaction.
242
+ */
243
+ async chat(message, options) {
244
+ const fromOption = options?.from ?? 'user';
245
+ const from = typeof fromOption === 'string' ? fromOption : fromOption.name;
246
+ const interlocutor = typeof fromOption === 'object' ? fromOption : null;
247
+ const llm = this.requireLLM();
248
+ // 1. Fetch server-assembled system prompt
249
+ const ctx = await this.getPromptContext(options?.consumerSuffix, from);
250
+ let systemPrompt = ctx.systemPrompt;
251
+ const hasTools = (ctx.tools?.length ?? 0) > 0;
252
+ // 2. Recall memories (episodic + semantic + reflections)
253
+ const recalled = await recallMemories(message, {
254
+ memoryAdapter: this.memoryAdapter,
255
+ episodeStore: this.episodeStore,
256
+ semanticStore: this.semanticStore,
257
+ embeddingProvider: this.embeddingProvider,
258
+ config: this.memoryRecallConfig,
259
+ sourceEntity: from,
260
+ reflectionEntity: this._personaId,
261
+ });
262
+ if (interlocutor) {
263
+ systemPrompt += '\n\n' + buildInterlocutorBlock(interlocutor);
264
+ }
265
+ const memoryBlock = buildMemoryBlock(recalled.episodic, recalled.semantic, recalled.reflections);
266
+ if (memoryBlock) {
267
+ systemPrompt += '\n\n' + memoryBlock;
268
+ }
269
+ // 3. LLM Generate (SDK side, user's API key)
270
+ const messages = [];
271
+ if (options?.history) {
272
+ messages.push(...options.history.map(h => ({ role: h.role, content: h.content })));
273
+ }
274
+ messages.push({ role: 'user', content: message });
275
+ let responseText;
276
+ let appraisal;
277
+ let earlyPerceiveResponse;
278
+ // Split mode: engineLlm handles appraisal, primary llm handles response text
279
+ if (this.engineLlm && this.engineLlm !== llm) {
280
+ const { AppraisalVectorSchema } = await import('./llm/schema');
281
+ const appraisalInstruction = [
282
+ '',
283
+ '## Appraisal Task',
284
+ "Evaluate how RECEIVING the user's message affects this persona emotionally.",
285
+ 'The event you are appraising is the message itself arriving — not the described situation.',
286
+ "When the user shares their own experience (e.g. loss, success), appraise through the persona's relational goals: caring about the speaker makes their suffering relevant and incongruent.",
287
+ "Rate each dimension from the persona's subjective perspective.",
288
+ 'An insult should produce negative goal_congruence and norm_compatibility.',
289
+ 'A compliment should produce positive values. Neutral small talk should be near zero.',
290
+ ].join('\n');
291
+ // 3a. engineLlm → appraisal (based on prior emotional state)
292
+ // Only send last few messages for appraisal context (full history is wasteful)
293
+ const appraisalMessages = messages.length <= 5 ? messages : messages.slice(-5);
294
+ const { object: appraisalResult } = await this.engineLlm.generateObject({
295
+ system: systemPrompt + appraisalInstruction,
296
+ messages: appraisalMessages,
297
+ schema: AppraisalVectorSchema,
298
+ });
299
+ appraisal = clampAppraisal(appraisalResult);
300
+ // 3b. perceive with user message → engine updates emotion before response generation
301
+ earlyPerceiveResponse = await this.perceive(message, {
302
+ from,
303
+ appraisal,
304
+ priorEpisodes: recalled.episodic.length > 0 ? recalled.episodic : undefined,
305
+ skipMemory: true, // chat() runs its own postChatPipeline
306
+ });
307
+ // 3c. Fetch updated prompt reflecting new emotional state
308
+ let updatedPrompt = systemPrompt;
309
+ try {
310
+ const updatedCtx = await this.getPromptContext(options?.consumerSuffix, from);
311
+ updatedPrompt = updatedCtx.systemPrompt;
312
+ if (interlocutor) {
313
+ updatedPrompt += '\n\n' + buildInterlocutorBlock(interlocutor);
314
+ }
315
+ if (memoryBlock) {
316
+ updatedPrompt += '\n\n' + memoryBlock;
317
+ }
318
+ }
319
+ catch {
320
+ // Use original prompt if updated fetch fails
321
+ }
322
+ // 3d. primary llm → response (with updated emotional state in prompt)
323
+ const { text } = await llm.generateText({
324
+ system: updatedPrompt,
325
+ messages,
326
+ });
327
+ responseText = text;
328
+ }
329
+ else if (hasTools) {
330
+ // Combined mode with tool-use: LLM can request memory searches
331
+ const result = await this.generateWithToolLoop(llm, systemPrompt, messages, options?.onToolCall);
332
+ responseText = result.text;
333
+ appraisal = clampAppraisal(result.appraisal);
334
+ }
335
+ else {
336
+ // Combined mode (default): single LLM call for response + appraisal
337
+ const { LLMResponseSchema } = await import('./llm/schema');
338
+ const { object: llmResult } = await llm.generateObject({
339
+ system: systemPrompt,
340
+ messages,
341
+ schema: LLMResponseSchema,
342
+ });
343
+ responseText = llmResult.response;
344
+ appraisal = clampAppraisal(llmResult.appraisal ?? {
345
+ goal_relevance: 0,
346
+ goal_congruence: 0,
347
+ expectedness: 0.5,
348
+ controllability: 0.5,
349
+ agency: 0,
350
+ norm_compatibility: 0,
351
+ internal_standards: 0,
352
+ adjustment_potential: 0.5,
353
+ urgency: 0.5,
354
+ });
355
+ }
356
+ // 4. Send to API for emotion processing (skip if already done in split mode)
357
+ let response;
358
+ if (earlyPerceiveResponse) {
359
+ response = earlyPerceiveResponse;
360
+ }
361
+ else {
362
+ response = await this.perceive(responseText, {
363
+ from,
364
+ appraisal,
365
+ priorEpisodes: recalled.episodic.length > 0 ? recalled.episodic : undefined,
366
+ skipMemory: true, // chat() runs its own postChatPipeline
367
+ });
368
+ }
369
+ // Inject LLM-generated text
370
+ response.text = responseText;
371
+ // 5. Post-chat pipeline (fire-and-forget: episode save, reflection, events)
372
+ this.postChatPipeline(response);
373
+ return { text: responseText, response };
374
+ }
375
+ /**
376
+ * Advance persona time by the specified number of seconds.
377
+ *
378
+ * @param seconds - Number of seconds to advance.
379
+ * @returns Any pending events that were processed.
380
+ */
381
+ async tick(seconds) {
382
+ const { data } = await this.client.POST('/personas/{id}/tick', {
383
+ params: { path: { id: this._personaId } },
384
+ body: { seconds },
385
+ });
386
+ return unwrap(data);
387
+ }
388
+ /**
389
+ * Directly set the persona's emotion state in VAD space.
390
+ *
391
+ * @param vad - Partial VAD values to set (V: -1..1, A: 0..1, D: -1..1).
392
+ */
393
+ async setEmotion(vad) {
394
+ await this.client.POST('/personas/{id}/emotion', {
395
+ params: { path: { id: this._personaId } },
396
+ body: { vad },
397
+ });
398
+ }
399
+ // ── State ──
400
+ /**
401
+ * Get the current emotional and psychological state of the persona.
402
+ *
403
+ * @returns Current emotion, mood, somatic, and narrative state.
404
+ */
405
+ async getState() {
406
+ const { data } = await this.client.GET('/personas/{id}/state', {
407
+ params: { path: { id: this._personaId } },
408
+ });
409
+ return unwrap(data);
410
+ }
411
+ /**
412
+ * Get a full snapshot of the persona's internal state.
413
+ *
414
+ * @returns Complete persona snapshot for backup/restore.
415
+ */
416
+ async getSnapshot() {
417
+ const { data } = await this.client.GET('/personas/{id}/snapshot', {
418
+ params: { path: { id: this._personaId } },
419
+ });
420
+ return unwrap(data);
421
+ }
422
+ /**
423
+ * Restore the persona's internal state from a snapshot.
424
+ *
425
+ * @param snapshot - The snapshot to restore.
426
+ */
427
+ async putSnapshot(snapshot) {
428
+ await this.client.PUT('/personas/{id}/snapshot', {
429
+ params: { path: { id: this._personaId } },
430
+ body: { snapshot: snapshot },
431
+ });
432
+ }
433
+ // ── Config ──
434
+ /**
435
+ * Patch the persona's configuration (identity, personality, goals).
436
+ *
437
+ * @param updates - Configuration updates to apply.
438
+ */
439
+ async patch(updates) {
440
+ await this.client.PATCH('/personas/{id}', {
441
+ params: { path: { id: this._personaId } },
442
+ body: updates,
443
+ });
444
+ }
445
+ // ── Lifecycle ──
446
+ /** Soft-delete this persona. Can be restored with {@link restore}. */
447
+ async destroy() {
448
+ if (this._personaId) {
449
+ await this.client.DELETE('/personas/{id}', {
450
+ params: { path: { id: this._personaId } },
451
+ });
452
+ }
453
+ }
454
+ /** Restore a previously soft-deleted persona. */
455
+ async restore() {
456
+ if (this._personaId) {
457
+ await this.client.POST('/personas/{id}/restore', {
458
+ params: { path: { id: this._personaId } },
459
+ });
460
+ }
461
+ }
462
+ // ── Private Helpers ──
463
+ /** Whether memory is available (either single adapter or split stores). */
464
+ get hasMemory() {
465
+ return this.memoryAdapter !== null || this.episodeStore !== null;
466
+ }
467
+ // ── Post-Chat Pipeline (fire-and-forget) ──
468
+ /**
469
+ * Lightweight pipeline for perceive()-only interactions (non-chat events).
470
+ * Saves the episode and emits events, but skips LLM reflection.
471
+ */
472
+ postPerceivePipeline(response) {
473
+ if (!response.memoryEpisode)
474
+ return;
475
+ if (this.memoryAdapter) {
476
+ // Single adapter path — adapter handles semantic indexing internally
477
+ this.memoryAdapter.saveEpisode(response.memoryEpisode).catch(() => { });
478
+ }
479
+ else if (this.episodeStore) {
480
+ // Split adapter path
481
+ this.episodeStore.saveEpisode(response.memoryEpisode).catch(() => { });
482
+ if (this.semanticStore && this.embeddingProvider && response.memoryEpisode.context) {
483
+ const ep = response.memoryEpisode;
484
+ this.embeddingProvider
485
+ .embed(stripMetaTags(ep.context))
486
+ .then((embedding) => {
487
+ this.semanticStore.index({
488
+ id: ep.id,
489
+ embedding,
490
+ metadata: {
491
+ type: 'episode',
492
+ sourceEntity: ep.sourceEntity,
493
+ timestamp: ep.timestamp,
494
+ importance: ep.importance,
495
+ episodeType: ep.type,
496
+ },
497
+ });
498
+ })
499
+ .catch(() => { });
500
+ }
501
+ }
502
+ // Event emission
503
+ if (this.events) {
504
+ this.emitResponseEvents(response, Date.now()).catch(() => { });
505
+ }
506
+ }
507
+ postChatPipeline(response) {
508
+ const now = Date.now();
509
+ if (this.memoryAdapter) {
510
+ // Single adapter path — adapter handles semantic indexing internally
511
+ if (response.memoryEpisode) {
512
+ this.memoryAdapter.saveEpisode(response.memoryEpisode).catch(() => { });
513
+ }
514
+ // Reflection generation
515
+ if ((this.engineLlm || this.llm) && response.reflectionPrompt) {
516
+ this.handleReflection(response.reflectionPrompt, response.emotion.vad).catch(() => { });
517
+ }
518
+ }
519
+ else {
520
+ // Split adapter path
521
+ // 1. Episode storage + semantic indexing
522
+ if (this.episodeStore && response.memoryEpisode) {
523
+ this.episodeStore.saveEpisode(response.memoryEpisode).catch(() => { });
524
+ if (this.semanticStore && this.embeddingProvider && response.memoryEpisode.context) {
525
+ const ep = response.memoryEpisode;
526
+ this.embeddingProvider
527
+ .embed(stripMetaTags(ep.context))
528
+ .then((embedding) => {
529
+ this.semanticStore.index({
530
+ id: ep.id,
531
+ embedding,
532
+ metadata: {
533
+ type: 'episode',
534
+ sourceEntity: ep.sourceEntity,
535
+ timestamp: ep.timestamp,
536
+ importance: ep.importance,
537
+ episodeType: ep.type,
538
+ },
539
+ });
540
+ })
541
+ .catch(() => { });
542
+ }
543
+ }
544
+ // 2. Reflection generation (requires LLM + episodeStore)
545
+ if (this.episodeStore && (this.engineLlm || this.llm) && response.reflectionPrompt) {
546
+ this.handleReflection(response.reflectionPrompt, response.emotion.vad).catch(() => { });
547
+ }
548
+ }
549
+ // 3. Event emission
550
+ if (this.events) {
551
+ this.emitResponseEvents(response, now).catch(() => { });
552
+ }
553
+ }
554
+ async handleReflection(prompt, emotionVad) {
555
+ const reflectionLlm = this.engineLlm ?? this.llm;
556
+ if (!reflectionLlm)
557
+ return;
558
+ if (!this.memoryAdapter && !this.episodeStore)
559
+ return;
560
+ const { text } = await reflectionLlm.generateText({
561
+ system: prompt.system,
562
+ messages: [{ role: 'user', content: prompt.user }],
563
+ });
564
+ const reflection = {
565
+ id: `ref-${Date.now()}`,
566
+ timestamp: Date.now(),
567
+ sourceEntity: this._personaId,
568
+ content: text,
569
+ trigger: 'interaction',
570
+ emotionSnapshot: emotionVad ?? { V: 0, A: 0, D: 0 },
571
+ };
572
+ if (this.memoryAdapter) {
573
+ // Single adapter path — save reflection if supported
574
+ if (this.memoryAdapter.saveReflection) {
575
+ await this.memoryAdapter.saveReflection(reflection);
576
+ }
577
+ }
578
+ else if (this.episodeStore) {
579
+ // Split adapter path
580
+ await this.episodeStore.saveReflection(reflection);
581
+ if (this.semanticStore && this.embeddingProvider) {
582
+ try {
583
+ const embedding = await this.embeddingProvider.embed(text);
584
+ await this.semanticStore.index({
585
+ id: reflection.id,
586
+ embedding,
587
+ metadata: {
588
+ type: 'reflection',
589
+ sourceEntity: this._personaId,
590
+ timestamp: reflection.timestamp,
591
+ },
592
+ });
593
+ }
594
+ catch {
595
+ // semantic indexing is optional
596
+ }
597
+ }
598
+ }
599
+ await this.events?.emit({
600
+ type: 'reflection_generated',
601
+ personaId: this._personaId,
602
+ timestamp: Date.now(),
603
+ payload: { trigger: 'interaction' },
604
+ });
605
+ }
606
+ async emitResponseEvents(response, now) {
607
+ if (!this.events)
608
+ return;
609
+ const batch = [];
610
+ if (response.emotion) {
611
+ batch.push({
612
+ type: 'emotion_changed',
613
+ personaId: this._personaId,
614
+ timestamp: now,
615
+ payload: {
616
+ vad: response.emotion.vad,
617
+ primary: response.emotion.discrete.primary,
618
+ intensity: response.emotion.discrete.intensity,
619
+ },
620
+ });
621
+ }
622
+ if (response.socialUpdates?.length) {
623
+ batch.push({
624
+ type: 'relationship_changed',
625
+ personaId: this._personaId,
626
+ timestamp: now,
627
+ payload: { updates: response.socialUpdates },
628
+ });
629
+ }
630
+ if (response.memoryEpisode) {
631
+ batch.push({
632
+ type: 'memory_consolidated',
633
+ personaId: this._personaId,
634
+ timestamp: now,
635
+ payload: { episodeId: response.memoryEpisode.id },
636
+ });
637
+ }
638
+ if (response.stageTransition) {
639
+ batch.push({ type: 'stage_transition', personaId: this._personaId, timestamp: now, payload: {} });
640
+ }
641
+ if (response.maskExposure) {
642
+ batch.push({ type: 'mask_exposure', personaId: this._personaId, timestamp: now, payload: response.maskExposure });
643
+ }
644
+ if (response.goalChanges) {
645
+ batch.push({ type: 'goal_changed', personaId: this._personaId, timestamp: now, payload: response.goalChanges });
646
+ }
647
+ if (batch.length === 0)
648
+ return;
649
+ if (this.events.emitBatch) {
650
+ await this.events.emitBatch(batch);
651
+ }
652
+ else {
653
+ for (const event of batch) {
654
+ await this.events.emit(event);
655
+ }
656
+ }
657
+ }
658
+ buildAppraisalHint(appraisal) {
659
+ return [
660
+ '## Emotional Evaluation (already computed — embody this, do not describe it)',
661
+ `Goal relevance: ${appraisal.goal_relevance.toFixed(2)}, congruence: ${appraisal.goal_congruence.toFixed(2)}`,
662
+ `Expectedness: ${appraisal.expectedness.toFixed(2)}, controllability: ${appraisal.controllability.toFixed(2)}`,
663
+ `Agency: ${appraisal.agency.toFixed(2)}, norm compatibility: ${appraisal.norm_compatibility.toFixed(2)}`,
664
+ ].join('\n');
665
+ }
666
+ requireLLM() {
667
+ if (!this.llm) {
668
+ throw new MolrooApiError('LLM adapter is required for chat(). Provide llm option, or use perceive() directly.', 'LLM_NOT_CONFIGURED', 400);
669
+ }
670
+ return this.llm;
671
+ }
672
+ async getPromptContext(consumerSuffix, sourceEntity) {
673
+ const { data } = await this.client.POST('/personas/{id}/prompt-context', {
674
+ params: { path: { id: this._personaId } },
675
+ body: {
676
+ ...(consumerSuffix ? { consumerSuffix } : {}),
677
+ ...(sourceEntity ? { sourceEntity } : {}),
678
+ },
679
+ });
680
+ return unwrap(data);
681
+ }
682
+ /**
683
+ * Search persona's episodic memory via the API.
684
+ * Used internally by the tool-use loop.
685
+ */
686
+ async searchMemory(query, options) {
687
+ const { data } = await this.client.POST('/personas/{id}/memory/search', {
688
+ params: { path: { id: this._personaId } },
689
+ body: {
690
+ query,
691
+ ...(options?.topK ? { topK: options.topK } : {}),
692
+ ...(options?.minImportance ? { minImportance: options.minImportance } : {}),
693
+ },
694
+ });
695
+ const result = unwrap(data);
696
+ return result.episodes;
697
+ }
698
+ /**
699
+ * LLM generation loop with tool-use support for standalone persona.
700
+ * Uses LLMResponseWithToolsSchema which allows the LLM to request
701
+ * memory searches via the search_memory field. Capped at 3 iterations.
702
+ */
703
+ async generateWithToolLoop(llm, system, messages, onToolCall) {
704
+ const MAX_TOOL_ITERATIONS = 3;
705
+ const { LLMResponseWithToolsSchema, LLMResponseSchema } = await import('./llm/schema');
706
+ let currentSystem = system;
707
+ for (let iteration = 0; iteration < MAX_TOOL_ITERATIONS; iteration++) {
708
+ const { object: llmResult } = await llm.generateObject({
709
+ system: currentSystem,
710
+ messages,
711
+ schema: LLMResponseWithToolsSchema,
712
+ });
713
+ // If no tool call requested, return the response
714
+ if (!llmResult.search_memory) {
715
+ const text = llmResult.response;
716
+ const appraisal = llmResult.appraisal ?? {
717
+ goal_relevance: 0,
718
+ goal_congruence: 0,
719
+ expectedness: 0.5,
720
+ controllability: 0.5,
721
+ agency: 0,
722
+ norm_compatibility: 0,
723
+ internal_standards: 0,
724
+ adjustment_potential: 0.5,
725
+ urgency: 0.5,
726
+ };
727
+ return { text, appraisal };
728
+ }
729
+ // Execute memory search tool call
730
+ const query = llmResult.search_memory;
731
+ let episodes = [];
732
+ try {
733
+ episodes = await this.searchMemory(query);
734
+ }
735
+ catch {
736
+ // Memory search failed — continue without results
737
+ }
738
+ // Notify callback if provided
739
+ if (onToolCall) {
740
+ onToolCall({
741
+ name: 'search_memory',
742
+ args: { query },
743
+ result: episodes,
744
+ });
745
+ }
746
+ // Inject memory search results into system prompt for next iteration
747
+ const resultBlock = episodes.length > 0
748
+ ? `\n\n## Memory Search Results (query: "${query}")\n${episodes.map(ep => {
749
+ const ts = ep.timestamp ? new Date(ep.timestamp).toISOString() : 'unknown';
750
+ const source = ep.sourceEntity ?? 'unknown';
751
+ const context = ep.context ?? 'no context';
752
+ return `- [${ts}] ${source}: ${context}`;
753
+ }).join('\n')}`
754
+ : `\n\n## Memory Search Results (query: "${query}")\nNo matching memories found.`;
755
+ currentSystem = currentSystem + resultBlock;
756
+ }
757
+ // Exhausted tool iterations — make a final call without tool schema
758
+ const { object: finalResult } = await llm.generateObject({
759
+ system: currentSystem,
760
+ messages,
761
+ schema: LLMResponseSchema,
762
+ });
763
+ const text = finalResult.response;
764
+ const appraisal = finalResult.appraisal ?? {
765
+ goal_relevance: 0,
766
+ goal_congruence: 0,
767
+ expectedness: 0.5,
768
+ controllability: 0.5,
769
+ agency: 0,
770
+ norm_compatibility: 0,
771
+ internal_standards: 0,
772
+ adjustment_potential: 0.5,
773
+ urgency: 0.5,
774
+ };
775
+ return { text, appraisal };
776
+ }
777
+ buildContextBlock(state, from) {
778
+ const parts = [];
779
+ if (from) {
780
+ parts.push(`Speaking with: ${from}.`);
781
+ }
782
+ if (state.narrative) {
783
+ parts.push(`Narrative tone: ${state.narrative.tone.toFixed(2)}, agency: ${state.narrative.agency.toFixed(2)}`);
784
+ }
785
+ return parts.join('\n');
786
+ }
787
+ }