expo-ai-kit 0.1.17 → 0.1.18

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -31,6 +31,7 @@ On-device AI for Expo apps. Run language models locally—no API keys, no cloud,
31
31
  - **Streaming support** — Progressive token streaming for responsive UIs
32
32
  - **Simple API** — Core functions plus prompt helpers for common tasks
33
33
  - **Prompt helpers** — Built-in `summarize()`, `translate()`, `rewrite()`, and more
34
+ - **Chat memory** — Built-in `ChatMemoryManager` for managing conversation history
34
35
 
35
36
  ## Requirements
36
37
 
@@ -126,7 +127,38 @@ console.log(response.text);
126
127
 
127
128
  ### Multi-turn Conversations
128
129
 
129
- For conversations with context, pass the full conversation history:
130
+ For conversations with context, use `ChatMemoryManager` to manage history:
131
+
132
+ ```tsx
133
+ import { ChatMemoryManager, streamMessage } from 'expo-ai-kit';
134
+
135
+ // Create a memory manager (handles history automatically)
136
+ const memory = new ChatMemoryManager({
137
+ maxTurns: 10,
138
+ systemPrompt: 'You are a helpful assistant.',
139
+ });
140
+
141
+ // Add user message and get response
142
+ memory.addUserMessage('My name is Alice.');
143
+ const { promise } = streamMessage(
144
+ memory.getAllMessages(),
145
+ (event) => console.log(event.accumulatedText)
146
+ );
147
+ const response = await promise;
148
+
149
+ // Store assistant response in memory
150
+ memory.addAssistantMessage(response.text);
151
+
152
+ // Continue the conversation (memory includes full history)
153
+ memory.addUserMessage('What is my name?');
154
+ const { promise: p2 } = streamMessage(
155
+ memory.getAllMessages(),
156
+ (event) => console.log(event.accumulatedText)
157
+ );
158
+ // Response: "Your name is Alice."
159
+ ```
160
+
161
+ Or manually manage the conversation array:
130
162
 
131
163
  ```tsx
132
164
  import { sendMessage, type LLMMessage } from 'expo-ai-kit';
@@ -505,6 +537,99 @@ function answerQuestion(question: string, context: string, options?: LLMAnswerQu
505
537
 
506
538
  ---
507
539
 
540
+ ### `ChatMemoryManager`
541
+
542
+ Manages conversation history for stateless on-device AI models. Automatically handles turn limits and provides the full message array for each request.
543
+
544
+ ```typescript
545
+ class ChatMemoryManager {
546
+ constructor(options?: ChatMemoryOptions);
547
+
548
+ addUserMessage(content: string): void;
549
+ addAssistantMessage(content: string): void;
550
+ addMessage(message: LLMMessage): void;
551
+
552
+ getAllMessages(): LLMMessage[];
553
+ getMessages(): LLMMessage[];
554
+ getPrompt(): string;
555
+ getSnapshot(): ChatMemorySnapshot;
556
+ getTurnCount(): number;
557
+
558
+ setSystemPrompt(prompt: string | undefined): void;
559
+ getSystemPrompt(): string | undefined;
560
+ setMaxTurns(maxTurns: number): void;
561
+
562
+ clear(): void;
563
+ reset(): void;
564
+ }
565
+ ```
566
+
567
+ | Option | Type | Description |
568
+ |--------|------|-------------|
569
+ | `maxTurns` | `number` | Maximum conversation turns to keep (default: `10`) |
570
+ | `systemPrompt` | `string` | System prompt to include in every request |
571
+
572
+ **Why use ChatMemoryManager?**
573
+
574
+ On-device models are stateless — they have no built-in memory. Each request must include the full conversation history. `ChatMemoryManager` handles this automatically:
575
+
576
+ - Stores messages client-side
577
+ - Automatically trims old messages when limit is reached
578
+ - Preserves the system prompt (never trimmed)
579
+ - Provides `getAllMessages()` for API calls
580
+
581
+ **Example with React:**
582
+
583
+ ```tsx
584
+ import { useRef } from 'react';
585
+ import { ChatMemoryManager, streamMessage } from 'expo-ai-kit';
586
+
587
+ function Chat() {
588
+ const memoryRef = useRef(new ChatMemoryManager({
589
+ maxTurns: 10,
590
+ systemPrompt: 'You are a helpful assistant.',
591
+ }));
592
+
593
+ const sendMessage = async (text: string) => {
594
+ memoryRef.current.addUserMessage(text);
595
+
596
+ const { promise } = streamMessage(
597
+ memoryRef.current.getAllMessages(),
598
+ (event) => setResponse(event.accumulatedText)
599
+ );
600
+
601
+ const response = await promise;
602
+ memoryRef.current.addAssistantMessage(response.text);
603
+ };
604
+
605
+ const clearChat = () => memoryRef.current.clear();
606
+ }
607
+ ```
608
+
609
+ ---
610
+
611
+ ### `buildPrompt(messages)`
612
+
613
+ Converts a message array to a single prompt string. Useful for debugging or custom implementations.
614
+
615
+ ```typescript
616
+ function buildPrompt(messages: LLMMessage[]): string
617
+ ```
618
+
619
+ **Example:**
620
+ ```tsx
621
+ import { buildPrompt } from 'expo-ai-kit';
622
+
623
+ const prompt = buildPrompt([
624
+ { role: 'system', content: 'You are helpful.' },
625
+ { role: 'user', content: 'Hi!' },
626
+ { role: 'assistant', content: 'Hello!' },
627
+ ]);
628
+ // "SYSTEM: You are helpful.\nUSER: Hi!\nASSISTANT: Hello!"
629
+ ```
630
+
631
+ ---
632
+
508
633
  ### Types
509
634
 
510
635
  ```typescript
@@ -566,6 +691,21 @@ type LLMExtractKeyPointsOptions = {
566
691
  type LLMAnswerQuestionOptions = {
567
692
  detail?: 'brief' | 'medium' | 'detailed';
568
693
  };
694
+
695
+ // Chat Memory Types
696
+ type ChatMemoryOptions = {
697
+ /** Maximum conversation turns to keep (default: 10) */
698
+ maxTurns?: number;
699
+ /** System prompt to include in every request */
700
+ systemPrompt?: string;
701
+ };
702
+
703
+ type ChatMemorySnapshot = {
704
+ messages: LLMMessage[];
705
+ systemPrompt: string | undefined;
706
+ turnCount: number;
707
+ maxTurns: number;
708
+ };
569
709
  ```
570
710
 
571
711
  ## Feature Comparison
@@ -576,6 +716,7 @@ type LLMAnswerQuestionOptions = {
576
716
  | `sendMessage()` | ✅ | ✅ |
577
717
  | `streamMessage()` | ✅ | ✅ |
578
718
  | Prompt helpers | ✅ | ✅ |
719
+ | `ChatMemoryManager` | ✅ | ✅ |
579
720
  | System prompts | ✅ Native | ✅ Prepended |
580
721
  | Multi-turn context | ✅ | ✅ |
581
722
  | Cancel streaming | ✅ | ✅ |
@@ -627,7 +768,7 @@ const { text } = await sendMessage(messages, { systemPrompt: '...' });
627
768
  |---------|--------|----------|
628
769
  | ✅ Streaming responses | Done | - |
629
770
  | ✅ Prompt helpers (summarize, translate, etc.) | Done | - |
630
- | Conversation memory (useChat hook) | Planned | High |
771
+ | Chat memory management | Done | - |
631
772
  | Web/generic fallback | Idea | Medium |
632
773
  | Configurable hyperparameters (temperature, etc.) | Idea | Low |
633
774
 
package/build/index.d.ts CHANGED
@@ -1,5 +1,6 @@
1
1
  import { LLMMessage, LLMSendOptions, LLMResponse, LLMStreamOptions, LLMStreamCallback, LLMSummarizeOptions, LLMTranslateOptions, LLMRewriteOptions, LLMExtractKeyPointsOptions, LLMAnswerQuestionOptions } from './types';
2
2
  export * from './types';
3
+ export * from './memory';
3
4
  /**
4
5
  * Check if on-device AI is available on the current device.
5
6
  * Returns false on unsupported platforms (web, etc.).
@@ -1 +1 @@
1
- {"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AAEA,OAAO,EACL,UAAU,EACV,cAAc,EACd,WAAW,EACX,gBAAgB,EAEhB,iBAAiB,EACjB,mBAAmB,EACnB,mBAAmB,EACnB,iBAAiB,EACjB,0BAA0B,EAC1B,wBAAwB,EACzB,MAAM,SAAS,CAAC;AAEjB,cAAc,SAAS,CAAC;AAkGxB;;;GAGG;AACH,wBAAsB,WAAW,IAAI,OAAO,CAAC,OAAO,CAAC,CAKpD;AAED;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;GAkCG;AACH,wBAAsB,WAAW,CAC/B,QAAQ,EAAE,UAAU,EAAE,EACtB,OAAO,CAAC,EAAE,cAAc,GACvB,OAAO,CAAC,WAAW,CAAC,CAgBtB;AAED;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;GA2CG;AACH,wBAAgB,aAAa,CAC3B,QAAQ,EAAE,UAAU,EAAE,EACtB,OAAO,EAAE,iBAAiB,EAC1B,OAAO,CAAC,EAAE,gBAAgB,GACzB;IAAE,OAAO,EAAE,OAAO,CAAC,WAAW,CAAC,CAAC;IAAC,IAAI,EAAE,MAAM,IAAI,CAAA;CAAE,CAiErD;AAMD;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;GA8BG;AACH,wBAAsB,SAAS,CAC7B,IAAI,EAAE,MAAM,EACZ,OAAO,CAAC,EAAE,mBAAmB,GAC5B,OAAO,CAAC,WAAW,CAAC,CActB;AAED;;;;;;;;;;;;;;;;;GAiBG;AACH,wBAAgB,eAAe,CAC7B,IAAI,EAAE,MAAM,EACZ,OAAO,EAAE,iBAAiB,EAC1B,OAAO,CAAC,EAAE,mBAAmB,GAC5B;IAAE,OAAO,EAAE,OAAO,CAAC,WAAW,CAAC,CAAC;IAAC,IAAI,EAAE,MAAM,IAAI,CAAA;CAAE,CAmBrD;AAED;;;;;;;;;;;;;;;;;;;;;;;GAuBG;AACH,wBAAsB,SAAS,CAC7B,IAAI,EAAE,MAAM,EACZ,OAAO,EAAE,mBAAmB,GAC3B,OAAO,CAAC,WAAW,CAAC,CAatB;AAED;;;;;;;;;;;;;;;;;GAiBG;AACH,wBAAgB,eAAe,CAC7B,IAAI,EAAE,MAAM,EACZ,OAAO,EAAE,iBAAiB,EAC1B,OAAO,EAAE,mBAAmB,GAC3B;IAAE,OAAO,EAAE,OAAO,CAAC,WAAW,CAAC,CAAC;IAAC,IAAI,EAAE,MAAM,IAAI,CAAA;CAAE,CAkBrD;AAED;;;;;;;;;;;;;;;;;;;;;GAqBG;AACH,wBAAsB,OAAO,CAC3B,IAAI,EAAE,MAAM,EACZ,OAAO,EAAE,iBAAiB,GACzB,OAAO,CAAC,WAAW,CAAC,CAatB;AAED;;;;;;;;;;;;;;;;;GAiBG;AACH,wBAAgB,aAAa,CAC3B,IAAI,EAAE,MAAM,EACZ,OAAO,EAAE,iBAAiB,EAC1B,OAAO,EAAE,iBAAiB,GACzB;IAAE,OAAO,EAAE,OAAO,CAAC,WAAW,CAAC,CAAC;IAAC,IAAI,EAAE,MAAM,IAAI,CAAA;CAAE,CAkBrD;AAED;;;;;;;;;;;;;;;;;;;;GAoBG;AACH,wBAAsB,gBAAgB,CACpC,IAAI,EAAE,MAAM,EACZ,OAAO,CAAC,EAAE,0BAA0B,GACnC,OAAO,CAAC,WAAW,CAAC,CAatB;AAED;;;;;;;;;;;;;;;;;GAiBG;AACH,wBAAgB,sBAAsB,CACpC,IAAI,EAAE,MAAM,EACZ,OAAO,EAAE,iBAAiB,EAC1B,OAAO,CAAC,EAAE,0BAA0B,GACnC;IAAE,OAAO,EAAE,OAAO,CAAC,WAAW,CAAC,CAAC;IAAC,IAAI,EAAE,MAAM,IAAI,CAAA;CAAE,CAkBrD;AAED;;;;;;;;;;;;;;;;;;;;;;;;;;;GA2BG;AACH,wBAAsB,cAAc,CAClC,QAAQ,EAAE,MAAM,EAChB,OAAO,EAAE,MAAM,EACf,OAAO,CAAC,EAAE,wBAAwB,GACjC,OAAO,CAAC,WAAW,CAAC,CAkBtB;AAED;;;;;;;;;;;;;;;;;;;GAmBG;AACH,wBAAgB,oBAAoB,CAClC,QAAQ,EAAE,MAAM,EAChB,OAAO,EAAE,MAAM,EACf,OAAO,EAAE,iBAAiB,EAC1B,OAAO,CAAC,EAAE,wBAAwB,GACjC;IAAE,OAAO,EAAE,OAAO,CAAC,WAAW,CAAC,CAAC;IAAC,IAAI,EAAE,MAAM,IAAI,CAAA;CAAE,CA0BrD"}
1
+ {"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AAEA,OAAO,EACL,UAAU,EACV,cAAc,EACd,WAAW,EACX,gBAAgB,EAEhB,iBAAiB,EACjB,mBAAmB,EACnB,mBAAmB,EACnB,iBAAiB,EACjB,0BAA0B,EAC1B,wBAAwB,EACzB,MAAM,SAAS,CAAC;AAEjB,cAAc,SAAS,CAAC;AACxB,cAAc,UAAU,CAAC;AAkGzB;;;GAGG;AACH,wBAAsB,WAAW,IAAI,OAAO,CAAC,OAAO,CAAC,CAKpD;AAED;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;GAkCG;AACH,wBAAsB,WAAW,CAC/B,QAAQ,EAAE,UAAU,EAAE,EACtB,OAAO,CAAC,EAAE,cAAc,GACvB,OAAO,CAAC,WAAW,CAAC,CAgBtB;AAED;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;GA2CG;AACH,wBAAgB,aAAa,CAC3B,QAAQ,EAAE,UAAU,EAAE,EACtB,OAAO,EAAE,iBAAiB,EAC1B,OAAO,CAAC,EAAE,gBAAgB,GACzB;IAAE,OAAO,EAAE,OAAO,CAAC,WAAW,CAAC,CAAC;IAAC,IAAI,EAAE,MAAM,IAAI,CAAA;CAAE,CAiErD;AAMD;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;GA8BG;AACH,wBAAsB,SAAS,CAC7B,IAAI,EAAE,MAAM,EACZ,OAAO,CAAC,EAAE,mBAAmB,GAC5B,OAAO,CAAC,WAAW,CAAC,CActB;AAED;;;;;;;;;;;;;;;;;GAiBG;AACH,wBAAgB,eAAe,CAC7B,IAAI,EAAE,MAAM,EACZ,OAAO,EAAE,iBAAiB,EAC1B,OAAO,CAAC,EAAE,mBAAmB,GAC5B;IAAE,OAAO,EAAE,OAAO,CAAC,WAAW,CAAC,CAAC;IAAC,IAAI,EAAE,MAAM,IAAI,CAAA;CAAE,CAmBrD;AAED;;;;;;;;;;;;;;;;;;;;;;;GAuBG;AACH,wBAAsB,SAAS,CAC7B,IAAI,EAAE,MAAM,EACZ,OAAO,EAAE,mBAAmB,GAC3B,OAAO,CAAC,WAAW,CAAC,CAatB;AAED;;;;;;;;;;;;;;;;;GAiBG;AACH,wBAAgB,eAAe,CAC7B,IAAI,EAAE,MAAM,EACZ,OAAO,EAAE,iBAAiB,EAC1B,OAAO,EAAE,mBAAmB,GAC3B;IAAE,OAAO,EAAE,OAAO,CAAC,WAAW,CAAC,CAAC;IAAC,IAAI,EAAE,MAAM,IAAI,CAAA;CAAE,CAkBrD;AAED;;;;;;;;;;;;;;;;;;;;;GAqBG;AACH,wBAAsB,OAAO,CAC3B,IAAI,EAAE,MAAM,EACZ,OAAO,EAAE,iBAAiB,GACzB,OAAO,CAAC,WAAW,CAAC,CAatB;AAED;;;;;;;;;;;;;;;;;GAiBG;AACH,wBAAgB,aAAa,CAC3B,IAAI,EAAE,MAAM,EACZ,OAAO,EAAE,iBAAiB,EAC1B,OAAO,EAAE,iBAAiB,GACzB;IAAE,OAAO,EAAE,OAAO,CAAC,WAAW,CAAC,CAAC;IAAC,IAAI,EAAE,MAAM,IAAI,CAAA;CAAE,CAkBrD;AAED;;;;;;;;;;;;;;;;;;;;GAoBG;AACH,wBAAsB,gBAAgB,CACpC,IAAI,EAAE,MAAM,EACZ,OAAO,CAAC,EAAE,0BAA0B,GACnC,OAAO,CAAC,WAAW,CAAC,CAatB;AAED;;;;;;;;;;;;;;;;;GAiBG;AACH,wBAAgB,sBAAsB,CACpC,IAAI,EAAE,MAAM,EACZ,OAAO,EAAE,iBAAiB,EAC1B,OAAO,CAAC,EAAE,0BAA0B,GACnC;IAAE,OAAO,EAAE,OAAO,CAAC,WAAW,CAAC,CAAC;IAAC,IAAI,EAAE,MAAM,IAAI,CAAA;CAAE,CAkBrD;AAED;;;;;;;;;;;;;;;;;;;;;;;;;;;GA2BG;AACH,wBAAsB,cAAc,CAClC,QAAQ,EAAE,MAAM,EAChB,OAAO,EAAE,MAAM,EACf,OAAO,CAAC,EAAE,wBAAwB,GACjC,OAAO,CAAC,WAAW,CAAC,CAkBtB;AAED;;;;;;;;;;;;;;;;;;;GAmBG;AACH,wBAAgB,oBAAoB,CAClC,QAAQ,EAAE,MAAM,EAChB,OAAO,EAAE,MAAM,EACf,OAAO,EAAE,iBAAiB,EAC1B,OAAO,CAAC,EAAE,wBAAwB,GACjC;IAAE,OAAO,EAAE,OAAO,CAAC,WAAW,CAAC,CAAC;IAAC,IAAI,EAAE,MAAM,IAAI,CAAA;CAAE,CA0BrD"}
package/build/index.js CHANGED
@@ -1,6 +1,7 @@
1
1
  import ExpoAiKitModule from './ExpoAiKitModule';
2
2
  import { Platform } from 'react-native';
3
3
  export * from './types';
4
+ export * from './memory';
4
5
  const DEFAULT_SYSTEM_PROMPT = 'You are a helpful, friendly assistant. Answer the user directly and concisely.';
5
6
  let streamIdCounter = 0;
6
7
  function generateSessionId() {
@@ -1 +1 @@
1
- {"version":3,"file":"index.js","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AAAA,OAAO,eAAe,MAAM,mBAAmB,CAAC;AAChD,OAAO,EAAE,QAAQ,EAAE,MAAM,cAAc,CAAC;AAexC,cAAc,SAAS,CAAC;AAExB,MAAM,qBAAqB,GACzB,gFAAgF,CAAC;AAEnF,IAAI,eAAe,GAAG,CAAC,CAAC;AACxB,SAAS,iBAAiB;IACxB,OAAO,UAAU,IAAI,CAAC,GAAG,EAAE,IAAI,EAAE,eAAe,EAAE,CAAC;AACrD,CAAC;AAED,+EAA+E;AAC/E,0BAA0B;AAC1B,+EAA+E;AAE/E,MAAM,6BAA6B,GAAG;IACpC,KAAK,EAAE,2CAA2C;IAClD,MAAM,EAAE,mDAAmD;IAC3D,IAAI,EAAE,2DAA2D;CACzD,CAAC;AAEX,MAAM,4BAA4B,GAAG;IACnC,SAAS,EAAE,2CAA2C;IACtD,OAAO,EAAE,sCAAsC;IAC/C,IAAI,EAAE,0EAA0E;CACxE,CAAC;AAEX,MAAM,2BAA2B,GAAG;IAClC,MAAM,EAAE,uDAAuD;IAC/D,QAAQ,EAAE,gCAAgC;IAC1C,OAAO,EAAE,iCAAiC;CAClC,CAAC;AAEX,MAAM,0BAA0B,GAAG;IACjC,MAAM,EACJ,6EAA6E;IAC/E,MAAM,EAAE,2CAA2C;IACnD,YAAY,EACV,mEAAmE;IACrE,QAAQ,EAAE,mCAAmC;IAC7C,OAAO,EACL,sEAAsE;IACxE,QAAQ,EAAE,6CAA6C;IACvD,MAAM,EACJ,gFAAgF;IAClF,QAAQ,EAAE,8DAA8D;CAChE,CAAC;AAEX,MAAM,0BAA0B,GAAG;IACjC,KAAK,EAAE,+CAA+C;IACtD,MAAM,EAAE,+CAA+C;IACvD,QAAQ,EACN,6FAA6F;CACvF,CAAC;AAEX,+EAA+E;AAC/E,yBAAyB;AACzB,+EAA+E;AAE/E,SAAS,oBAAoB,CAC3B,MAAmC,EACnC,KAAuC;IAEvC,OAAO,4FAA4F,6BAA6B,CAAC,MAAM,CAAC,IAAI,4BAA4B,CAAC,KAAK,CAAC,yCAAyC,CAAC;AAC3N,CAAC;AAED,SAAS,oBAAoB,CAC3B,EAAU,EACV,IAAwB,EACxB,IAAuC;IAEvC,MAAM,UAAU,GAAG,IAAI,CAAC,CAAC,CAAC,QAAQ,IAAI,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC;IAC/C,OAAO,gEAAgE,UAAU,MAAM,EAAE,KAAK,2BAA2B,CAAC,IAAI,CAAC,uFAAuF,CAAC;AACzN,CAAC;AAED,SAAS,kBAAkB,CACzB,KAQc;IAEd,OAAO,gCAAgC,0BAA0B,CAAC,KAAK,CAAC,+EAA+E,CAAC;AAC1J,CAAC;AAED,SAAS,2BAA2B,CAAC,SAAiB;IACpD,OAAO,8CAA8C,SAAS,kNAAkN,CAAC;AACnR,CAAC;AAED,SAAS,yBAAyB,CAChC,MAAuC;IAEvC,OAAO,gGAAgG,0BAA0B,CAAC,MAAM,CAAC,oFAAoF,CAAC;AAChO,CAAC;AAED;;;GAGG;AACH,MAAM,CAAC,KAAK,UAAU,WAAW;IAC/B,IAAI,QAAQ,CAAC,EAAE,KAAK,KAAK,IAAI,QAAQ,CAAC,EAAE,KAAK,SAAS,EAAE,CAAC;QACvD,OAAO,KAAK,CAAC;IACf,CAAC;IACD,OAAO,eAAe,CAAC,WAAW,EAAE,CAAC;AACvC,CAAC;AAED;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;GAkCG;AACH,MAAM,CAAC,KAAK,UAAU,WAAW,CAC/B,QAAsB,EACtB,OAAwB;IAExB,IAAI,QAAQ,CAAC,EAAE,KAAK,KAAK,IAAI,QAAQ,CAAC,EAAE,KAAK,SAAS,EAAE,CAAC;QACvD,OAAO,EAAE,IAAI,EAAE,EAAE,EAAE,CAAC;IACtB,CAAC;IAED,IAAI,CAAC,QAAQ,IAAI,QAAQ,CAAC,MAAM,KAAK,CAAC,EAAE,CAAC;QACvC,MAAM,IAAI,KAAK,CAAC,gCAAgC,CAAC,CAAC;IACpD,CAAC;IAED,0FAA0F;IAC1F,MAAM,gBAAgB,GAAG,QAAQ,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,IAAI,KAAK,QAAQ,CAAC,CAAC;IACnE,MAAM,YAAY,GAAG,gBAAgB;QACnC,CAAC,CAAC,EAAE,CAAC,oCAAoC;QACzC,CAAC,CAAC,OAAO,EAAE,YAAY,IAAI,qBAAqB,CAAC;IAEnD,OAAO,eAAe,CAAC,WAAW,CAAC,QAAQ,EAAE,YAAY,CAAC,CAAC;AAC7D,CAAC;AAED;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;GA2CG;AACH,MAAM,UAAU,aAAa,CAC3B,QAAsB,EACtB,OAA0B,EAC1B,OAA0B;IAE1B,+BAA+B;IAC/B,IAAI,QAAQ,CAAC,EAAE,KAAK,KAAK,IAAI,QAAQ,CAAC,EAAE,KAAK,SAAS,EAAE,CAAC;QACvD,OAAO;YACL,OAAO,EAAE,OAAO,CAAC,OAAO,CAAC,EAAE,IAAI,EAAE,EAAE,EAAE,CAAC;YACtC,IAAI,EAAE,GAAG,EAAE,GAAE,CAAC;SACf,CAAC;IACJ,CAAC;IAED,IAAI,CAAC,QAAQ,IAAI,QAAQ,CAAC,MAAM,KAAK,CAAC,EAAE,CAAC;QACvC,OAAO;YACL,OAAO,EAAE,OAAO,CAAC,MAAM,CAAC,IAAI,KAAK,CAAC,gCAAgC,CAAC,CAAC;YACpE,IAAI,EAAE,GAAG,EAAE,GAAE,CAAC;SACf,CAAC;IACJ,CAAC;IAED,MAAM,SAAS,GAAG,iBAAiB,EAAE,CAAC;IACtC,IAAI,SAAS,GAAG,EAAE,CAAC;IACnB,IAAI,OAAO,GAAG,KAAK,CAAC;IAEpB,0FAA0F;IAC1F,MAAM,gBAAgB,GAAG,QAAQ,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,IAAI,KAAK,QAAQ,CAAC,CAAC;IACnE,MAAM,YAAY,GAAG,gBAAgB;QACnC,CAAC,CAAC,EAAE,CAAC,oCAAoC;QACzC,CAAC,CAAC,OAAO,EAAE,YAAY,IAAI,qBAAqB,CAAC;IAEnD,MAAM,OAAO,GAAG,IAAI,OAAO,CAAc,CAAC,OAAO,EAAE,MAAM,EAAE,EAAE;QAC3D,6BAA6B;QAC7B,MAAM,YAAY,GAAG,eAAe,CAAC,WAAW,CAC9C,eAAe,EACf,CAAC,KAAqB,EAAE,EAAE;YACxB,uCAAuC;YACvC,IAAI,KAAK,CAAC,SAAS,KAAK,SAAS;gBAAE,OAAO;YAE1C,SAAS,GAAG,KAAK,CAAC,eAAe,CAAC;YAElC,2BAA2B;YAC3B,OAAO,CAAC,KAAK,CAAC,CAAC;YAEf,gCAAgC;YAChC,IAAI,KAAK,CAAC,MAAM,EAAE,CAAC;gBACjB,YAAY,CAAC,MAAM,EAAE,CAAC;gBACtB,OAAO,CAAC,EAAE,IAAI,EAAE,SAAS,EAAE,CAAC,CAAC;YAC/B,CAAC;QACH,CAAC,CACF,CAAC;QAEF,iCAAiC;QACjC,eAAe,CAAC,cAAc,CAAC,QAAQ,EAAE,YAAY,EAAE,SAAS,CAAC,CAAC,KAAK,CACrE,CAAC,KAAK,EAAE,EAAE;YACR,YAAY,CAAC,MAAM,EAAE,CAAC;YACtB,MAAM,CAAC,KAAK,CAAC,CAAC;QAChB,CAAC,CACF,CAAC;IACJ,CAAC,CAAC,CAAC;IAEH,MAAM,IAAI,GAAG,GAAG,EAAE;QAChB,IAAI,OAAO;YAAE,OAAO;QACpB,OAAO,GAAG,IAAI,CAAC;QACf,eAAe,CAAC,aAAa,CAAC,SAAS,CAAC,CAAC,KAAK,CAAC,GAAG,EAAE;YAClD,8BAA8B;QAChC,CAAC,CAAC,CAAC;IACL,CAAC,CAAC;IAEF,OAAO,EAAE,OAAO,EAAE,IAAI,EAAE,CAAC;AAC3B,CAAC;AAED,+EAA+E;AAC/E,iBAAiB;AACjB,+EAA+E;AAE/E;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;GA8BG;AACH,MAAM,CAAC,KAAK,UAAU,SAAS,CAC7B,IAAY,EACZ,OAA6B;IAE7B,IAAI,QAAQ,CAAC,EAAE,KAAK,KAAK,IAAI,QAAQ,CAAC,EAAE,KAAK,SAAS,EAAE,CAAC;QACvD,OAAO,EAAE,IAAI,EAAE,EAAE,EAAE,CAAC;IACtB,CAAC;IAED,IAAI,CAAC,IAAI,IAAI,IAAI,CAAC,IAAI,EAAE,CAAC,MAAM,KAAK,CAAC,EAAE,CAAC;QACtC,MAAM,IAAI,KAAK,CAAC,sBAAsB,CAAC,CAAC;IAC1C,CAAC;IAED,MAAM,MAAM,GAAG,OAAO,EAAE,MAAM,IAAI,QAAQ,CAAC;IAC3C,MAAM,KAAK,GAAG,OAAO,EAAE,KAAK,IAAI,WAAW,CAAC;IAC5C,MAAM,YAAY,GAAG,oBAAoB,CAAC,MAAM,EAAE,KAAK,CAAC,CAAC;IAEzD,OAAO,WAAW,CAAC,CAAC,EAAE,IAAI,EAAE,MAAM,EAAE,OAAO,EAAE,IAAI,EAAE,CAAC,EAAE,EAAE,YAAY,EAAE,CAAC,CAAC;AAC1E,CAAC;AAED;;;;;;;;;;;;;;;;;GAiBG;AACH,MAAM,UAAU,eAAe,CAC7B,IAAY,EACZ,OAA0B,EAC1B,OAA6B;IAE7B,IAAI,QAAQ,CAAC,EAAE,KAAK,KAAK,IAAI,QAAQ,CAAC,EAAE,KAAK,SAAS,EAAE,CAAC;QACvD,OAAO,EAAE,OAAO,EAAE,OAAO,CAAC,OAAO,CAAC,EAAE,IAAI,EAAE,EAAE,EAAE,CAAC,EAAE,IAAI,EAAE,GAAG,EAAE,GAAE,CAAC,EAAE,CAAC;IACpE,CAAC;IAED,IAAI,CAAC,IAAI,IAAI,IAAI,CAAC,IAAI,EAAE,CAAC,MAAM,KAAK,CAAC,EAAE,CAAC;QACtC,OAAO;YACL,OAAO,EAAE,OAAO,CAAC,MAAM,CAAC,IAAI,KAAK,CAAC,sBAAsB,CAAC,CAAC;YAC1D,IAAI,EAAE,GAAG,EAAE,GAAE,CAAC;SACf,CAAC;IACJ,CAAC;IAED,MAAM,MAAM,GAAG,OAAO,EAAE,MAAM,IAAI,QAAQ,CAAC;IAC3C,MAAM,KAAK,GAAG,OAAO,EAAE,KAAK,IAAI,WAAW,CAAC;IAC5C,MAAM,YAAY,GAAG,oBAAoB,CAAC,MAAM,EAAE,KAAK,CAAC,CAAC;IAEzD,OAAO,aAAa,CAAC,CAAC,EAAE,IAAI,EAAE,MAAM,EAAE,OAAO,EAAE,IAAI,EAAE,CAAC,EAAE,OAAO,EAAE;QAC/D,YAAY;KACb,CAAC,CAAC;AACL,CAAC;AAED;;;;;;;;;;;;;;;;;;;;;;;GAuBG;AACH,MAAM,CAAC,KAAK,UAAU,SAAS,CAC7B,IAAY,EACZ,OAA4B;IAE5B,IAAI,QAAQ,CAAC,EAAE,KAAK,KAAK,IAAI,QAAQ,CAAC,EAAE,KAAK,SAAS,EAAE,CAAC;QACvD,OAAO,EAAE,IAAI,EAAE,EAAE,EAAE,CAAC;IACtB,CAAC;IAED,IAAI,CAAC,IAAI,IAAI,IAAI,CAAC,IAAI,EAAE,CAAC,MAAM,KAAK,CAAC,EAAE,CAAC;QACtC,MAAM,IAAI,KAAK,CAAC,sBAAsB,CAAC,CAAC;IAC1C,CAAC;IAED,MAAM,EAAE,EAAE,EAAE,IAAI,EAAE,IAAI,GAAG,SAAS,EAAE,GAAG,OAAO,CAAC;IAC/C,MAAM,YAAY,GAAG,oBAAoB,CAAC,EAAE,EAAE,IAAI,EAAE,IAAI,CAAC,CAAC;IAE1D,OAAO,WAAW,CAAC,CAAC,EAAE,IAAI,EAAE,MAAM,EAAE,OAAO,EAAE,IAAI,EAAE,CAAC,EAAE,EAAE,YAAY,EAAE,CAAC,CAAC;AAC1E,CAAC;AAED;;;;;;;;;;;;;;;;;GAiBG;AACH,MAAM,UAAU,eAAe,CAC7B,IAAY,EACZ,OAA0B,EAC1B,OAA4B;IAE5B,IAAI,QAAQ,CAAC,EAAE,KAAK,KAAK,IAAI,QAAQ,CAAC,EAAE,KAAK,SAAS,EAAE,CAAC;QACvD,OAAO,EAAE,OAAO,EAAE,OAAO,CAAC,OAAO,CAAC,EAAE,IAAI,EAAE,EAAE,EAAE,CAAC,EAAE,IAAI,EAAE,GAAG,EAAE,GAAE,CAAC,EAAE,CAAC;IACpE,CAAC;IAED,IAAI,CAAC,IAAI,IAAI,IAAI,CAAC,IAAI,EAAE,CAAC,MAAM,KAAK,CAAC,EAAE,CAAC;QACtC,OAAO;YACL,OAAO,EAAE,OAAO,CAAC,MAAM,CAAC,IAAI,KAAK,CAAC,sBAAsB,CAAC,CAAC;YAC1D,IAAI,EAAE,GAAG,EAAE,GAAE,CAAC;SACf,CAAC;IACJ,CAAC;IAED,MAAM,EAAE,EAAE,EAAE,IAAI,EAAE,IAAI,GAAG,SAAS,EAAE,GAAG,OAAO,CAAC;IAC/C,MAAM,YAAY,GAAG,oBAAoB,CAAC,EAAE,EAAE,IAAI,EAAE,IAAI,CAAC,CAAC;IAE1D,OAAO,aAAa,CAAC,CAAC,EAAE,IAAI,EAAE,MAAM,EAAE,OAAO,EAAE,IAAI,EAAE,CAAC,EAAE,OAAO,EAAE;QAC/D,YAAY;KACb,CAAC,CAAC;AACL,CAAC;AAED;;;;;;;;;;;;;;;;;;;;;GAqBG;AACH,MAAM,CAAC,KAAK,UAAU,OAAO,CAC3B,IAAY,EACZ,OAA0B;IAE1B,IAAI,QAAQ,CAAC,EAAE,KAAK,KAAK,IAAI,QAAQ,CAAC,EAAE,KAAK,SAAS,EAAE,CAAC;QACvD,OAAO,EAAE,IAAI,EAAE,EAAE,EAAE,CAAC;IACtB,CAAC;IAED,IAAI,CAAC,IAAI,IAAI,IAAI,CAAC,IAAI,EAAE,CAAC,MAAM,KAAK,CAAC,EAAE,CAAC;QACtC,MAAM,IAAI,KAAK,CAAC,sBAAsB,CAAC,CAAC;IAC1C,CAAC;IAED,MAAM,EAAE,KAAK,EAAE,GAAG,OAAO,CAAC;IAC1B,MAAM,YAAY,GAAG,kBAAkB,CAAC,KAAK,CAAC,CAAC;IAE/C,OAAO,WAAW,CAAC,CAAC,EAAE,IAAI,EAAE,MAAM,EAAE,OAAO,EAAE,IAAI,EAAE,CAAC,EAAE,EAAE,YAAY,EAAE,CAAC,CAAC;AAC1E,CAAC;AAED;;;;;;;;;;;;;;;;;GAiBG;AACH,MAAM,UAAU,aAAa,CAC3B,IAAY,EACZ,OAA0B,EAC1B,OAA0B;IAE1B,IAAI,QAAQ,CAAC,EAAE,KAAK,KAAK,IAAI,QAAQ,CAAC,EAAE,KAAK,SAAS,EAAE,CAAC;QACvD,OAAO,EAAE,OAAO,EAAE,OAAO,CAAC,OAAO,CAAC,EAAE,IAAI,EAAE,EAAE,EAAE,CAAC,EAAE,IAAI,EAAE,GAAG,EAAE,GAAE,CAAC,EAAE,CAAC;IACpE,CAAC;IAED,IAAI,CAAC,IAAI,IAAI,IAAI,CAAC,IAAI,EAAE,CAAC,MAAM,KAAK,CAAC,EAAE,CAAC;QACtC,OAAO;YACL,OAAO,EAAE,OAAO,CAAC,MAAM,CAAC,IAAI,KAAK,CAAC,sBAAsB,CAAC,CAAC;YAC1D,IAAI,EAAE,GAAG,EAAE,GAAE,CAAC;SACf,CAAC;IACJ,CAAC;IAED,MAAM,EAAE,KAAK,EAAE,GAAG,OAAO,CAAC;IAC1B,MAAM,YAAY,GAAG,kBAAkB,CAAC,KAAK,CAAC,CAAC;IAE/C,OAAO,aAAa,CAAC,CAAC,EAAE,IAAI,EAAE,MAAM,EAAE,OAAO,EAAE,IAAI,EAAE,CAAC,EAAE,OAAO,EAAE;QAC/D,YAAY;KACb,CAAC,CAAC;AACL,CAAC;AAED;;;;;;;;;;;;;;;;;;;;GAoBG;AACH,MAAM,CAAC,KAAK,UAAU,gBAAgB,CACpC,IAAY,EACZ,OAAoC;IAEpC,IAAI,QAAQ,CAAC,EAAE,KAAK,KAAK,IAAI,QAAQ,CAAC,EAAE,KAAK,SAAS,EAAE,CAAC;QACvD,OAAO,EAAE,IAAI,EAAE,EAAE,EAAE,CAAC;IACtB,CAAC;IAED,IAAI,CAAC,IAAI,IAAI,IAAI,CAAC,IAAI,EAAE,CAAC,MAAM,KAAK,CAAC,EAAE,CAAC;QACtC,MAAM,IAAI,KAAK,CAAC,sBAAsB,CAAC,CAAC;IAC1C,CAAC;IAED,MAAM,SAAS,GAAG,OAAO,EAAE,SAAS,IAAI,CAAC,CAAC;IAC1C,MAAM,YAAY,GAAG,2BAA2B,CAAC,SAAS,CAAC,CAAC;IAE5D,OAAO,WAAW,CAAC,CAAC,EAAE,IAAI,EAAE,MAAM,EAAE,OAAO,EAAE,IAAI,EAAE,CAAC,EAAE,EAAE,YAAY,EAAE,CAAC,CAAC;AAC1E,CAAC;AAED;;;;;;;;;;;;;;;;;GAiBG;AACH,MAAM,UAAU,sBAAsB,CACpC,IAAY,EACZ,OAA0B,EAC1B,OAAoC;IAEpC,IAAI,QAAQ,CAAC,EAAE,KAAK,KAAK,IAAI,QAAQ,CAAC,EAAE,KAAK,SAAS,EAAE,CAAC;QACvD,OAAO,EAAE,OAAO,EAAE,OAAO,CAAC,OAAO,CAAC,EAAE,IAAI,EAAE,EAAE,EAAE,CAAC,EAAE,IAAI,EAAE,GAAG,EAAE,GAAE,CAAC,EAAE,CAAC;IACpE,CAAC;IAED,IAAI,CAAC,IAAI,IAAI,IAAI,CAAC,IAAI,EAAE,CAAC,MAAM,KAAK,CAAC,EAAE,CAAC;QACtC,OAAO;YACL,OAAO,EAAE,OAAO,CAAC,MAAM,CAAC,IAAI,KAAK,CAAC,sBAAsB,CAAC,CAAC;YAC1D,IAAI,EAAE,GAAG,EAAE,GAAE,CAAC;SACf,CAAC;IACJ,CAAC;IAED,MAAM,SAAS,GAAG,OAAO,EAAE,SAAS,IAAI,CAAC,CAAC;IAC1C,MAAM,YAAY,GAAG,2BAA2B,CAAC,SAAS,CAAC,CAAC;IAE5D,OAAO,aAAa,CAAC,CAAC,EAAE,IAAI,EAAE,MAAM,EAAE,OAAO,EAAE,IAAI,EAAE,CAAC,EAAE,OAAO,EAAE;QAC/D,YAAY;KACb,CAAC,CAAC;AACL,CAAC;AAED;;;;;;;;;;;;;;;;;;;;;;;;;;;GA2BG;AACH,MAAM,CAAC,KAAK,UAAU,cAAc,CAClC,QAAgB,EAChB,OAAe,EACf,OAAkC;IAElC,IAAI,QAAQ,CAAC,EAAE,KAAK,KAAK,IAAI,QAAQ,CAAC,EAAE,KAAK,SAAS,EAAE,CAAC;QACvD,OAAO,EAAE,IAAI,EAAE,EAAE,EAAE,CAAC;IACtB,CAAC;IAED,IAAI,CAAC,QAAQ,IAAI,QAAQ,CAAC,IAAI,EAAE,CAAC,MAAM,KAAK,CAAC,EAAE,CAAC;QAC9C,MAAM,IAAI,KAAK,CAAC,0BAA0B,CAAC,CAAC;IAC9C,CAAC;IAED,IAAI,CAAC,OAAO,IAAI,OAAO,CAAC,IAAI,EAAE,CAAC,MAAM,KAAK,CAAC,EAAE,CAAC;QAC5C,MAAM,IAAI,KAAK,CAAC,yBAAyB,CAAC,CAAC;IAC7C,CAAC;IAED,MAAM,MAAM,GAAG,OAAO,EAAE,MAAM,IAAI,QAAQ,CAAC;IAC3C,MAAM,YAAY,GAAG,yBAAyB,CAAC,MAAM,CAAC,CAAC;IACvD,MAAM,WAAW,GAAG,aAAa,OAAO,iBAAiB,QAAQ,EAAE,CAAC;IAEpE,OAAO,WAAW,CAAC,CAAC,EAAE,IAAI,EAAE,MAAM,EAAE,OAAO,EAAE,WAAW,EAAE,CAAC,EAAE,EAAE,YAAY,EAAE,CAAC,CAAC;AACjF,CAAC;AAED;;;;;;;;;;;;;;;;;;;GAmBG;AACH,MAAM,UAAU,oBAAoB,CAClC,QAAgB,EAChB,OAAe,EACf,OAA0B,EAC1B,OAAkC;IAElC,IAAI,QAAQ,CAAC,EAAE,KAAK,KAAK,IAAI,QAAQ,CAAC,EAAE,KAAK,SAAS,EAAE,CAAC;QACvD,OAAO,EAAE,OAAO,EAAE,OAAO,CAAC,OAAO,CAAC,EAAE,IAAI,EAAE,EAAE,EAAE,CAAC,EAAE,IAAI,EAAE,GAAG,EAAE,GAAE,CAAC,EAAE,CAAC;IACpE,CAAC;IAED,IAAI,CAAC,QAAQ,IAAI,QAAQ,CAAC,IAAI,EAAE,CAAC,MAAM,KAAK,CAAC,EAAE,CAAC;QAC9C,OAAO;YACL,OAAO,EAAE,OAAO,CAAC,MAAM,CAAC,IAAI,KAAK,CAAC,0BAA0B,CAAC,CAAC;YAC9D,IAAI,EAAE,GAAG,EAAE,GAAE,CAAC;SACf,CAAC;IACJ,CAAC;IAED,IAAI,CAAC,OAAO,IAAI,OAAO,CAAC,IAAI,EAAE,CAAC,MAAM,KAAK,CAAC,EAAE,CAAC;QAC5C,OAAO;YACL,OAAO,EAAE,OAAO,CAAC,MAAM,CAAC,IAAI,KAAK,CAAC,yBAAyB,CAAC,CAAC;YAC7D,IAAI,EAAE,GAAG,EAAE,GAAE,CAAC;SACf,CAAC;IACJ,CAAC;IAED,MAAM,MAAM,GAAG,OAAO,EAAE,MAAM,IAAI,QAAQ,CAAC;IAC3C,MAAM,YAAY,GAAG,yBAAyB,CAAC,MAAM,CAAC,CAAC;IACvD,MAAM,WAAW,GAAG,aAAa,OAAO,iBAAiB,QAAQ,EAAE,CAAC;IAEpE,OAAO,aAAa,CAAC,CAAC,EAAE,IAAI,EAAE,MAAM,EAAE,OAAO,EAAE,WAAW,EAAE,CAAC,EAAE,OAAO,EAAE;QACtE,YAAY;KACb,CAAC,CAAC;AACL,CAAC","sourcesContent":["import ExpoAiKitModule from './ExpoAiKitModule';\nimport { Platform } from 'react-native';\nimport {\n LLMMessage,\n LLMSendOptions,\n LLMResponse,\n LLMStreamOptions,\n LLMStreamEvent,\n LLMStreamCallback,\n LLMSummarizeOptions,\n LLMTranslateOptions,\n LLMRewriteOptions,\n LLMExtractKeyPointsOptions,\n LLMAnswerQuestionOptions,\n} from './types';\n\nexport * from './types';\n\nconst DEFAULT_SYSTEM_PROMPT =\n 'You are a helpful, friendly assistant. Answer the user directly and concisely.';\n\nlet streamIdCounter = 0;\nfunction generateSessionId(): string {\n return `stream_${Date.now()}_${++streamIdCounter}`;\n}\n\n// ============================================================================\n// Prompt Helper Constants\n// ============================================================================\n\nconst SUMMARIZE_LENGTH_INSTRUCTIONS = {\n short: 'Keep it very brief, around 1-2 sentences.',\n medium: 'Provide a moderate summary, around 3-5 sentences.',\n long: 'Provide a comprehensive summary covering all main points.',\n} as const;\n\nconst SUMMARIZE_STYLE_INSTRUCTIONS = {\n paragraph: 'Write the summary as a flowing paragraph.',\n bullets: 'Format the summary as bullet points.',\n tldr: 'Start with \"TL;DR:\" and give an extremely concise summary in 1 sentence.',\n} as const;\n\nconst TRANSLATE_TONE_INSTRUCTIONS = {\n formal: 'Use formal language and honorifics where appropriate.',\n informal: 'Use casual, everyday language.',\n neutral: 'Use standard, neutral language.',\n} as const;\n\nconst REWRITE_STYLE_INSTRUCTIONS = {\n formal:\n 'Rewrite in a formal, professional tone suitable for business communication.',\n casual: 'Rewrite in a casual, conversational tone.',\n professional:\n 'Rewrite in a clear, professional tone suitable for work contexts.',\n friendly: 'Rewrite in a warm, friendly tone.',\n concise:\n 'Rewrite to be as brief as possible while keeping the meaning intact.',\n detailed: 'Expand and add more detail and explanation.',\n simple:\n 'Rewrite using simple words and short sentences, easy for anyone to understand.',\n academic: 'Rewrite in an academic style suitable for scholarly writing.',\n} as const;\n\nconst ANSWER_DETAIL_INSTRUCTIONS = {\n brief: 'Give a brief, direct answer in 1-2 sentences.',\n medium: 'Provide a clear answer with some explanation.',\n detailed:\n 'Provide a comprehensive answer with full explanation and relevant details from the context.',\n} as const;\n\n// ============================================================================\n// Prompt Builder Helpers\n// ============================================================================\n\nfunction buildSummarizePrompt(\n length: 'short' | 'medium' | 'long',\n style: 'paragraph' | 'bullets' | 'tldr'\n): string {\n return `You are a summarization assistant. Summarize the provided text accurately and concisely. ${SUMMARIZE_LENGTH_INSTRUCTIONS[length]} ${SUMMARIZE_STYLE_INSTRUCTIONS[style]} Only output the summary, nothing else.`;\n}\n\nfunction buildTranslatePrompt(\n to: string,\n from: string | undefined,\n tone: 'formal' | 'informal' | 'neutral'\n): string {\n const fromClause = from ? `from ${from} ` : '';\n return `You are a translation assistant. Translate the provided text ${fromClause}to ${to}. ${TRANSLATE_TONE_INSTRUCTIONS[tone]} Only output the translation, nothing else. Do not include any explanations or notes.`;\n}\n\nfunction buildRewritePrompt(\n style:\n | 'formal'\n | 'casual'\n | 'professional'\n | 'friendly'\n | 'concise'\n | 'detailed'\n | 'simple'\n | 'academic'\n): string {\n return `You are a writing assistant. ${REWRITE_STYLE_INSTRUCTIONS[style]} Preserve the original meaning. Only output the rewritten text, nothing else.`;\n}\n\nfunction buildExtractKeyPointsPrompt(maxPoints: number): string {\n return `You are an analysis assistant. Extract the ${maxPoints} most important key points from the provided text. Format each point as a bullet point starting with \"•\". Be concise and focus on the most significant information. Only output the bullet points, nothing else.`;\n}\n\nfunction buildAnswerQuestionPrompt(\n detail: 'brief' | 'medium' | 'detailed'\n): string {\n return `You are a question-answering assistant. Answer questions based ONLY on the provided context. ${ANSWER_DETAIL_INSTRUCTIONS[detail]} If the answer cannot be found in the context, say so. Do not make up information.`;\n}\n\n/**\n * Check if on-device AI is available on the current device.\n * Returns false on unsupported platforms (web, etc.).\n */\nexport async function isAvailable(): Promise<boolean> {\n if (Platform.OS !== 'ios' && Platform.OS !== 'android') {\n return false;\n }\n return ExpoAiKitModule.isAvailable();\n}\n\n/**\n * Send messages to the on-device LLM and get a response.\n *\n * @param messages - Array of messages representing the conversation\n * @param options - Optional settings (systemPrompt fallback)\n * @returns Promise with the generated response\n *\n * @example\n * ```ts\n * const response = await sendMessage([\n * { role: 'user', content: 'What is 2 + 2?' }\n * ]);\n * console.log(response.text); // \"4\"\n * ```\n *\n * @example\n * ```ts\n * // With system prompt\n * const response = await sendMessage(\n * [{ role: 'user', content: 'Hello!' }],\n * { systemPrompt: 'You are a pirate. Respond in pirate speak.' }\n * );\n * ```\n *\n * @example\n * ```ts\n * // Multi-turn conversation\n * const response = await sendMessage([\n * { role: 'system', content: 'You are a helpful assistant.' },\n * { role: 'user', content: 'My name is Alice.' },\n * { role: 'assistant', content: 'Nice to meet you, Alice!' },\n * { role: 'user', content: 'What is my name?' }\n * ]);\n * ```\n */\nexport async function sendMessage(\n messages: LLMMessage[],\n options?: LLMSendOptions\n): Promise<LLMResponse> {\n if (Platform.OS !== 'ios' && Platform.OS !== 'android') {\n return { text: '' };\n }\n\n if (!messages || messages.length === 0) {\n throw new Error('messages array cannot be empty');\n }\n\n // Determine system prompt: use from messages array if present, else options, else default\n const hasSystemMessage = messages.some((m) => m.role === 'system');\n const systemPrompt = hasSystemMessage\n ? '' // Native will extract from messages\n : options?.systemPrompt ?? DEFAULT_SYSTEM_PROMPT;\n\n return ExpoAiKitModule.sendMessage(messages, systemPrompt);\n}\n\n/**\n * Stream messages to the on-device LLM and receive progressive token updates.\n *\n * @param messages - Array of messages representing the conversation\n * @param onToken - Callback function called for each token/chunk received\n * @param options - Optional settings (systemPrompt fallback)\n * @returns Object with stop() function to cancel streaming and promise that resolves when complete\n *\n * @example\n * ```ts\n * // Basic streaming\n * const { promise } = streamMessage(\n * [{ role: 'user', content: 'Tell me a story' }],\n * (event) => {\n * console.log(event.token); // Each token as it arrives\n * console.log(event.accumulatedText); // Full text so far\n * }\n * );\n * await promise;\n * ```\n *\n * @example\n * ```ts\n * // With cancellation\n * const { promise, stop } = streamMessage(\n * [{ role: 'user', content: 'Write a long essay' }],\n * (event) => setText(event.accumulatedText)\n * );\n *\n * // Cancel after 5 seconds\n * setTimeout(() => stop(), 5000);\n * ```\n *\n * @example\n * ```ts\n * // React state update pattern\n * const [text, setText] = useState('');\n *\n * streamMessage(\n * [{ role: 'user', content: 'Hello!' }],\n * (event) => setText(event.accumulatedText)\n * );\n * ```\n */\nexport function streamMessage(\n messages: LLMMessage[],\n onToken: LLMStreamCallback,\n options?: LLMStreamOptions\n): { promise: Promise<LLMResponse>; stop: () => void } {\n // Handle unsupported platforms\n if (Platform.OS !== 'ios' && Platform.OS !== 'android') {\n return {\n promise: Promise.resolve({ text: '' }),\n stop: () => {},\n };\n }\n\n if (!messages || messages.length === 0) {\n return {\n promise: Promise.reject(new Error('messages array cannot be empty')),\n stop: () => {},\n };\n }\n\n const sessionId = generateSessionId();\n let finalText = '';\n let stopped = false;\n\n // Determine system prompt: use from messages array if present, else options, else default\n const hasSystemMessage = messages.some((m) => m.role === 'system');\n const systemPrompt = hasSystemMessage\n ? '' // Native will extract from messages\n : options?.systemPrompt ?? DEFAULT_SYSTEM_PROMPT;\n\n const promise = new Promise<LLMResponse>((resolve, reject) => {\n // Subscribe to stream events\n const subscription = ExpoAiKitModule.addListener(\n 'onStreamToken',\n (event: LLMStreamEvent) => {\n // Only process events for this session\n if (event.sessionId !== sessionId) return;\n\n finalText = event.accumulatedText;\n\n // Call the user's callback\n onToken(event);\n\n // If done, clean up and resolve\n if (event.isDone) {\n subscription.remove();\n resolve({ text: finalText });\n }\n }\n );\n\n // Start streaming on native side\n ExpoAiKitModule.startStreaming(messages, systemPrompt, sessionId).catch(\n (error) => {\n subscription.remove();\n reject(error);\n }\n );\n });\n\n const stop = () => {\n if (stopped) return;\n stopped = true;\n ExpoAiKitModule.stopStreaming(sessionId).catch(() => {\n // Ignore errors when stopping\n });\n };\n\n return { promise, stop };\n}\n\n// ============================================================================\n// Prompt Helpers\n// ============================================================================\n\n/**\n * Summarize text content using on-device AI.\n *\n * @param text - The text to summarize\n * @param options - Optional settings for summary style and length\n * @returns Promise with the generated summary\n *\n * @example\n * ```ts\n * // Basic summarization\n * const result = await summarize(longArticle);\n * console.log(result.text);\n * ```\n *\n * @example\n * ```ts\n * // Short bullet-point summary\n * const result = await summarize(longArticle, {\n * length: 'short',\n * style: 'bullets'\n * });\n * ```\n *\n * @example\n * ```ts\n * // TL;DR style\n * const result = await summarize(longArticle, {\n * style: 'tldr'\n * });\n * ```\n */\nexport async function summarize(\n text: string,\n options?: LLMSummarizeOptions\n): Promise<LLMResponse> {\n if (Platform.OS !== 'ios' && Platform.OS !== 'android') {\n return { text: '' };\n }\n\n if (!text || text.trim().length === 0) {\n throw new Error('text cannot be empty');\n }\n\n const length = options?.length ?? 'medium';\n const style = options?.style ?? 'paragraph';\n const systemPrompt = buildSummarizePrompt(length, style);\n\n return sendMessage([{ role: 'user', content: text }], { systemPrompt });\n}\n\n/**\n * Summarize text with streaming output.\n *\n * @param text - The text to summarize\n * @param onToken - Callback for each token received\n * @param options - Optional settings for summary style and length\n * @returns Object with stop() function and promise\n *\n * @example\n * ```ts\n * const { promise } = streamSummarize(\n * longArticle,\n * (event) => setSummary(event.accumulatedText),\n * { style: 'bullets' }\n * );\n * await promise;\n * ```\n */\nexport function streamSummarize(\n text: string,\n onToken: LLMStreamCallback,\n options?: LLMSummarizeOptions\n): { promise: Promise<LLMResponse>; stop: () => void } {\n if (Platform.OS !== 'ios' && Platform.OS !== 'android') {\n return { promise: Promise.resolve({ text: '' }), stop: () => {} };\n }\n\n if (!text || text.trim().length === 0) {\n return {\n promise: Promise.reject(new Error('text cannot be empty')),\n stop: () => {},\n };\n }\n\n const length = options?.length ?? 'medium';\n const style = options?.style ?? 'paragraph';\n const systemPrompt = buildSummarizePrompt(length, style);\n\n return streamMessage([{ role: 'user', content: text }], onToken, {\n systemPrompt,\n });\n}\n\n/**\n * Translate text to another language using on-device AI.\n *\n * @param text - The text to translate\n * @param options - Translation options including target language\n * @returns Promise with the translated text\n *\n * @example\n * ```ts\n * // Basic translation\n * const result = await translate('Hello, world!', { to: 'Spanish' });\n * console.log(result.text); // \"¡Hola, mundo!\"\n * ```\n *\n * @example\n * ```ts\n * // Formal translation with source language\n * const result = await translate('Hey, what\\'s up?', {\n * to: 'French',\n * from: 'English',\n * tone: 'formal'\n * });\n * ```\n */\nexport async function translate(\n text: string,\n options: LLMTranslateOptions\n): Promise<LLMResponse> {\n if (Platform.OS !== 'ios' && Platform.OS !== 'android') {\n return { text: '' };\n }\n\n if (!text || text.trim().length === 0) {\n throw new Error('text cannot be empty');\n }\n\n const { to, from, tone = 'neutral' } = options;\n const systemPrompt = buildTranslatePrompt(to, from, tone);\n\n return sendMessage([{ role: 'user', content: text }], { systemPrompt });\n}\n\n/**\n * Translate text with streaming output.\n *\n * @param text - The text to translate\n * @param onToken - Callback for each token received\n * @param options - Translation options including target language\n * @returns Object with stop() function and promise\n *\n * @example\n * ```ts\n * const { promise } = streamTranslate(\n * 'Hello, world!',\n * (event) => setTranslation(event.accumulatedText),\n * { to: 'Japanese' }\n * );\n * await promise;\n * ```\n */\nexport function streamTranslate(\n text: string,\n onToken: LLMStreamCallback,\n options: LLMTranslateOptions\n): { promise: Promise<LLMResponse>; stop: () => void } {\n if (Platform.OS !== 'ios' && Platform.OS !== 'android') {\n return { promise: Promise.resolve({ text: '' }), stop: () => {} };\n }\n\n if (!text || text.trim().length === 0) {\n return {\n promise: Promise.reject(new Error('text cannot be empty')),\n stop: () => {},\n };\n }\n\n const { to, from, tone = 'neutral' } = options;\n const systemPrompt = buildTranslatePrompt(to, from, tone);\n\n return streamMessage([{ role: 'user', content: text }], onToken, {\n systemPrompt,\n });\n}\n\n/**\n * Rewrite text in a different style using on-device AI.\n *\n * @param text - The text to rewrite\n * @param options - Rewrite options specifying the target style\n * @returns Promise with the rewritten text\n *\n * @example\n * ```ts\n * // Make text more formal\n * const result = await rewrite('hey can u help me out?', {\n * style: 'formal'\n * });\n * console.log(result.text); // \"Would you be able to assist me?\"\n * ```\n *\n * @example\n * ```ts\n * // Simplify complex text\n * const result = await rewrite(technicalText, { style: 'simple' });\n * ```\n */\nexport async function rewrite(\n text: string,\n options: LLMRewriteOptions\n): Promise<LLMResponse> {\n if (Platform.OS !== 'ios' && Platform.OS !== 'android') {\n return { text: '' };\n }\n\n if (!text || text.trim().length === 0) {\n throw new Error('text cannot be empty');\n }\n\n const { style } = options;\n const systemPrompt = buildRewritePrompt(style);\n\n return sendMessage([{ role: 'user', content: text }], { systemPrompt });\n}\n\n/**\n * Rewrite text with streaming output.\n *\n * @param text - The text to rewrite\n * @param onToken - Callback for each token received\n * @param options - Rewrite options specifying the target style\n * @returns Object with stop() function and promise\n *\n * @example\n * ```ts\n * const { promise } = streamRewrite(\n * 'hey whats up',\n * (event) => setRewritten(event.accumulatedText),\n * { style: 'professional' }\n * );\n * await promise;\n * ```\n */\nexport function streamRewrite(\n text: string,\n onToken: LLMStreamCallback,\n options: LLMRewriteOptions\n): { promise: Promise<LLMResponse>; stop: () => void } {\n if (Platform.OS !== 'ios' && Platform.OS !== 'android') {\n return { promise: Promise.resolve({ text: '' }), stop: () => {} };\n }\n\n if (!text || text.trim().length === 0) {\n return {\n promise: Promise.reject(new Error('text cannot be empty')),\n stop: () => {},\n };\n }\n\n const { style } = options;\n const systemPrompt = buildRewritePrompt(style);\n\n return streamMessage([{ role: 'user', content: text }], onToken, {\n systemPrompt,\n });\n}\n\n/**\n * Extract key points from text using on-device AI.\n *\n * @param text - The text to extract key points from\n * @param options - Optional settings for extraction\n * @returns Promise with the key points as text\n *\n * @example\n * ```ts\n * // Extract key points from an article\n * const result = await extractKeyPoints(article);\n * console.log(result.text);\n * // \"• Point 1\\n• Point 2\\n• Point 3\"\n * ```\n *\n * @example\n * ```ts\n * // Limit to 3 key points\n * const result = await extractKeyPoints(article, { maxPoints: 3 });\n * ```\n */\nexport async function extractKeyPoints(\n text: string,\n options?: LLMExtractKeyPointsOptions\n): Promise<LLMResponse> {\n if (Platform.OS !== 'ios' && Platform.OS !== 'android') {\n return { text: '' };\n }\n\n if (!text || text.trim().length === 0) {\n throw new Error('text cannot be empty');\n }\n\n const maxPoints = options?.maxPoints ?? 5;\n const systemPrompt = buildExtractKeyPointsPrompt(maxPoints);\n\n return sendMessage([{ role: 'user', content: text }], { systemPrompt });\n}\n\n/**\n * Extract key points with streaming output.\n *\n * @param text - The text to extract key points from\n * @param onToken - Callback for each token received\n * @param options - Optional settings for extraction\n * @returns Object with stop() function and promise\n *\n * @example\n * ```ts\n * const { promise } = streamExtractKeyPoints(\n * article,\n * (event) => setKeyPoints(event.accumulatedText),\n * { maxPoints: 5 }\n * );\n * await promise;\n * ```\n */\nexport function streamExtractKeyPoints(\n text: string,\n onToken: LLMStreamCallback,\n options?: LLMExtractKeyPointsOptions\n): { promise: Promise<LLMResponse>; stop: () => void } {\n if (Platform.OS !== 'ios' && Platform.OS !== 'android') {\n return { promise: Promise.resolve({ text: '' }), stop: () => {} };\n }\n\n if (!text || text.trim().length === 0) {\n return {\n promise: Promise.reject(new Error('text cannot be empty')),\n stop: () => {},\n };\n }\n\n const maxPoints = options?.maxPoints ?? 5;\n const systemPrompt = buildExtractKeyPointsPrompt(maxPoints);\n\n return streamMessage([{ role: 'user', content: text }], onToken, {\n systemPrompt,\n });\n}\n\n/**\n * Answer a question based on provided context using on-device AI.\n *\n * @param question - The question to answer\n * @param context - The context/document to base the answer on\n * @param options - Optional settings for the answer\n * @returns Promise with the answer\n *\n * @example\n * ```ts\n * // Answer a question about a document\n * const result = await answerQuestion(\n * 'What is the main topic?',\n * documentText\n * );\n * console.log(result.text);\n * ```\n *\n * @example\n * ```ts\n * // Get a detailed answer\n * const result = await answerQuestion(\n * 'Explain the methodology',\n * researchPaper,\n * { detail: 'detailed' }\n * );\n * ```\n */\nexport async function answerQuestion(\n question: string,\n context: string,\n options?: LLMAnswerQuestionOptions\n): Promise<LLMResponse> {\n if (Platform.OS !== 'ios' && Platform.OS !== 'android') {\n return { text: '' };\n }\n\n if (!question || question.trim().length === 0) {\n throw new Error('question cannot be empty');\n }\n\n if (!context || context.trim().length === 0) {\n throw new Error('context cannot be empty');\n }\n\n const detail = options?.detail ?? 'medium';\n const systemPrompt = buildAnswerQuestionPrompt(detail);\n const userContent = `Context:\\n${context}\\n\\nQuestion: ${question}`;\n\n return sendMessage([{ role: 'user', content: userContent }], { systemPrompt });\n}\n\n/**\n * Answer a question with streaming output.\n *\n * @param question - The question to answer\n * @param context - The context/document to base the answer on\n * @param onToken - Callback for each token received\n * @param options - Optional settings for the answer\n * @returns Object with stop() function and promise\n *\n * @example\n * ```ts\n * const { promise } = streamAnswerQuestion(\n * 'What are the key findings?',\n * documentText,\n * (event) => setAnswer(event.accumulatedText),\n * { detail: 'detailed' }\n * );\n * await promise;\n * ```\n */\nexport function streamAnswerQuestion(\n question: string,\n context: string,\n onToken: LLMStreamCallback,\n options?: LLMAnswerQuestionOptions\n): { promise: Promise<LLMResponse>; stop: () => void } {\n if (Platform.OS !== 'ios' && Platform.OS !== 'android') {\n return { promise: Promise.resolve({ text: '' }), stop: () => {} };\n }\n\n if (!question || question.trim().length === 0) {\n return {\n promise: Promise.reject(new Error('question cannot be empty')),\n stop: () => {},\n };\n }\n\n if (!context || context.trim().length === 0) {\n return {\n promise: Promise.reject(new Error('context cannot be empty')),\n stop: () => {},\n };\n }\n\n const detail = options?.detail ?? 'medium';\n const systemPrompt = buildAnswerQuestionPrompt(detail);\n const userContent = `Context:\\n${context}\\n\\nQuestion: ${question}`;\n\n return streamMessage([{ role: 'user', content: userContent }], onToken, {\n systemPrompt,\n });\n}\n\n"]}
1
+ {"version":3,"file":"index.js","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AAAA,OAAO,eAAe,MAAM,mBAAmB,CAAC;AAChD,OAAO,EAAE,QAAQ,EAAE,MAAM,cAAc,CAAC;AAexC,cAAc,SAAS,CAAC;AACxB,cAAc,UAAU,CAAC;AAEzB,MAAM,qBAAqB,GACzB,gFAAgF,CAAC;AAEnF,IAAI,eAAe,GAAG,CAAC,CAAC;AACxB,SAAS,iBAAiB;IACxB,OAAO,UAAU,IAAI,CAAC,GAAG,EAAE,IAAI,EAAE,eAAe,EAAE,CAAC;AACrD,CAAC;AAED,+EAA+E;AAC/E,0BAA0B;AAC1B,+EAA+E;AAE/E,MAAM,6BAA6B,GAAG;IACpC,KAAK,EAAE,2CAA2C;IAClD,MAAM,EAAE,mDAAmD;IAC3D,IAAI,EAAE,2DAA2D;CACzD,CAAC;AAEX,MAAM,4BAA4B,GAAG;IACnC,SAAS,EAAE,2CAA2C;IACtD,OAAO,EAAE,sCAAsC;IAC/C,IAAI,EAAE,0EAA0E;CACxE,CAAC;AAEX,MAAM,2BAA2B,GAAG;IAClC,MAAM,EAAE,uDAAuD;IAC/D,QAAQ,EAAE,gCAAgC;IAC1C,OAAO,EAAE,iCAAiC;CAClC,CAAC;AAEX,MAAM,0BAA0B,GAAG;IACjC,MAAM,EACJ,6EAA6E;IAC/E,MAAM,EAAE,2CAA2C;IACnD,YAAY,EACV,mEAAmE;IACrE,QAAQ,EAAE,mCAAmC;IAC7C,OAAO,EACL,sEAAsE;IACxE,QAAQ,EAAE,6CAA6C;IACvD,MAAM,EACJ,gFAAgF;IAClF,QAAQ,EAAE,8DAA8D;CAChE,CAAC;AAEX,MAAM,0BAA0B,GAAG;IACjC,KAAK,EAAE,+CAA+C;IACtD,MAAM,EAAE,+CAA+C;IACvD,QAAQ,EACN,6FAA6F;CACvF,CAAC;AAEX,+EAA+E;AAC/E,yBAAyB;AACzB,+EAA+E;AAE/E,SAAS,oBAAoB,CAC3B,MAAmC,EACnC,KAAuC;IAEvC,OAAO,4FAA4F,6BAA6B,CAAC,MAAM,CAAC,IAAI,4BAA4B,CAAC,KAAK,CAAC,yCAAyC,CAAC;AAC3N,CAAC;AAED,SAAS,oBAAoB,CAC3B,EAAU,EACV,IAAwB,EACxB,IAAuC;IAEvC,MAAM,UAAU,GAAG,IAAI,CAAC,CAAC,CAAC,QAAQ,IAAI,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC;IAC/C,OAAO,gEAAgE,UAAU,MAAM,EAAE,KAAK,2BAA2B,CAAC,IAAI,CAAC,uFAAuF,CAAC;AACzN,CAAC;AAED,SAAS,kBAAkB,CACzB,KAQc;IAEd,OAAO,gCAAgC,0BAA0B,CAAC,KAAK,CAAC,+EAA+E,CAAC;AAC1J,CAAC;AAED,SAAS,2BAA2B,CAAC,SAAiB;IACpD,OAAO,8CAA8C,SAAS,kNAAkN,CAAC;AACnR,CAAC;AAED,SAAS,yBAAyB,CAChC,MAAuC;IAEvC,OAAO,gGAAgG,0BAA0B,CAAC,MAAM,CAAC,oFAAoF,CAAC;AAChO,CAAC;AAED;;;GAGG;AACH,MAAM,CAAC,KAAK,UAAU,WAAW;IAC/B,IAAI,QAAQ,CAAC,EAAE,KAAK,KAAK,IAAI,QAAQ,CAAC,EAAE,KAAK,SAAS,EAAE,CAAC;QACvD,OAAO,KAAK,CAAC;IACf,CAAC;IACD,OAAO,eAAe,CAAC,WAAW,EAAE,CAAC;AACvC,CAAC;AAED;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;GAkCG;AACH,MAAM,CAAC,KAAK,UAAU,WAAW,CAC/B,QAAsB,EACtB,OAAwB;IAExB,IAAI,QAAQ,CAAC,EAAE,KAAK,KAAK,IAAI,QAAQ,CAAC,EAAE,KAAK,SAAS,EAAE,CAAC;QACvD,OAAO,EAAE,IAAI,EAAE,EAAE,EAAE,CAAC;IACtB,CAAC;IAED,IAAI,CAAC,QAAQ,IAAI,QAAQ,CAAC,MAAM,KAAK,CAAC,EAAE,CAAC;QACvC,MAAM,IAAI,KAAK,CAAC,gCAAgC,CAAC,CAAC;IACpD,CAAC;IAED,0FAA0F;IAC1F,MAAM,gBAAgB,GAAG,QAAQ,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,IAAI,KAAK,QAAQ,CAAC,CAAC;IACnE,MAAM,YAAY,GAAG,gBAAgB;QACnC,CAAC,CAAC,EAAE,CAAC,oCAAoC;QACzC,CAAC,CAAC,OAAO,EAAE,YAAY,IAAI,qBAAqB,CAAC;IAEnD,OAAO,eAAe,CAAC,WAAW,CAAC,QAAQ,EAAE,YAAY,CAAC,CAAC;AAC7D,CAAC;AAED;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;GA2CG;AACH,MAAM,UAAU,aAAa,CAC3B,QAAsB,EACtB,OAA0B,EAC1B,OAA0B;IAE1B,+BAA+B;IAC/B,IAAI,QAAQ,CAAC,EAAE,KAAK,KAAK,IAAI,QAAQ,CAAC,EAAE,KAAK,SAAS,EAAE,CAAC;QACvD,OAAO;YACL,OAAO,EAAE,OAAO,CAAC,OAAO,CAAC,EAAE,IAAI,EAAE,EAAE,EAAE,CAAC;YACtC,IAAI,EAAE,GAAG,EAAE,GAAE,CAAC;SACf,CAAC;IACJ,CAAC;IAED,IAAI,CAAC,QAAQ,IAAI,QAAQ,CAAC,MAAM,KAAK,CAAC,EAAE,CAAC;QACvC,OAAO;YACL,OAAO,EAAE,OAAO,CAAC,MAAM,CAAC,IAAI,KAAK,CAAC,gCAAgC,CAAC,CAAC;YACpE,IAAI,EAAE,GAAG,EAAE,GAAE,CAAC;SACf,CAAC;IACJ,CAAC;IAED,MAAM,SAAS,GAAG,iBAAiB,EAAE,CAAC;IACtC,IAAI,SAAS,GAAG,EAAE,CAAC;IACnB,IAAI,OAAO,GAAG,KAAK,CAAC;IAEpB,0FAA0F;IAC1F,MAAM,gBAAgB,GAAG,QAAQ,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,IAAI,KAAK,QAAQ,CAAC,CAAC;IACnE,MAAM,YAAY,GAAG,gBAAgB;QACnC,CAAC,CAAC,EAAE,CAAC,oCAAoC;QACzC,CAAC,CAAC,OAAO,EAAE,YAAY,IAAI,qBAAqB,CAAC;IAEnD,MAAM,OAAO,GAAG,IAAI,OAAO,CAAc,CAAC,OAAO,EAAE,MAAM,EAAE,EAAE;QAC3D,6BAA6B;QAC7B,MAAM,YAAY,GAAG,eAAe,CAAC,WAAW,CAC9C,eAAe,EACf,CAAC,KAAqB,EAAE,EAAE;YACxB,uCAAuC;YACvC,IAAI,KAAK,CAAC,SAAS,KAAK,SAAS;gBAAE,OAAO;YAE1C,SAAS,GAAG,KAAK,CAAC,eAAe,CAAC;YAElC,2BAA2B;YAC3B,OAAO,CAAC,KAAK,CAAC,CAAC;YAEf,gCAAgC;YAChC,IAAI,KAAK,CAAC,MAAM,EAAE,CAAC;gBACjB,YAAY,CAAC,MAAM,EAAE,CAAC;gBACtB,OAAO,CAAC,EAAE,IAAI,EAAE,SAAS,EAAE,CAAC,CAAC;YAC/B,CAAC;QACH,CAAC,CACF,CAAC;QAEF,iCAAiC;QACjC,eAAe,CAAC,cAAc,CAAC,QAAQ,EAAE,YAAY,EAAE,SAAS,CAAC,CAAC,KAAK,CACrE,CAAC,KAAK,EAAE,EAAE;YACR,YAAY,CAAC,MAAM,EAAE,CAAC;YACtB,MAAM,CAAC,KAAK,CAAC,CAAC;QAChB,CAAC,CACF,CAAC;IACJ,CAAC,CAAC,CAAC;IAEH,MAAM,IAAI,GAAG,GAAG,EAAE;QAChB,IAAI,OAAO;YAAE,OAAO;QACpB,OAAO,GAAG,IAAI,CAAC;QACf,eAAe,CAAC,aAAa,CAAC,SAAS,CAAC,CAAC,KAAK,CAAC,GAAG,EAAE;YAClD,8BAA8B;QAChC,CAAC,CAAC,CAAC;IACL,CAAC,CAAC;IAEF,OAAO,EAAE,OAAO,EAAE,IAAI,EAAE,CAAC;AAC3B,CAAC;AAED,+EAA+E;AAC/E,iBAAiB;AACjB,+EAA+E;AAE/E;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;GA8BG;AACH,MAAM,CAAC,KAAK,UAAU,SAAS,CAC7B,IAAY,EACZ,OAA6B;IAE7B,IAAI,QAAQ,CAAC,EAAE,KAAK,KAAK,IAAI,QAAQ,CAAC,EAAE,KAAK,SAAS,EAAE,CAAC;QACvD,OAAO,EAAE,IAAI,EAAE,EAAE,EAAE,CAAC;IACtB,CAAC;IAED,IAAI,CAAC,IAAI,IAAI,IAAI,CAAC,IAAI,EAAE,CAAC,MAAM,KAAK,CAAC,EAAE,CAAC;QACtC,MAAM,IAAI,KAAK,CAAC,sBAAsB,CAAC,CAAC;IAC1C,CAAC;IAED,MAAM,MAAM,GAAG,OAAO,EAAE,MAAM,IAAI,QAAQ,CAAC;IAC3C,MAAM,KAAK,GAAG,OAAO,EAAE,KAAK,IAAI,WAAW,CAAC;IAC5C,MAAM,YAAY,GAAG,oBAAoB,CAAC,MAAM,EAAE,KAAK,CAAC,CAAC;IAEzD,OAAO,WAAW,CAAC,CAAC,EAAE,IAAI,EAAE,MAAM,EAAE,OAAO,EAAE,IAAI,EAAE,CAAC,EAAE,EAAE,YAAY,EAAE,CAAC,CAAC;AAC1E,CAAC;AAED;;;;;;;;;;;;;;;;;GAiBG;AACH,MAAM,UAAU,eAAe,CAC7B,IAAY,EACZ,OAA0B,EAC1B,OAA6B;IAE7B,IAAI,QAAQ,CAAC,EAAE,KAAK,KAAK,IAAI,QAAQ,CAAC,EAAE,KAAK,SAAS,EAAE,CAAC;QACvD,OAAO,EAAE,OAAO,EAAE,OAAO,CAAC,OAAO,CAAC,EAAE,IAAI,EAAE,EAAE,EAAE,CAAC,EAAE,IAAI,EAAE,GAAG,EAAE,GAAE,CAAC,EAAE,CAAC;IACpE,CAAC;IAED,IAAI,CAAC,IAAI,IAAI,IAAI,CAAC,IAAI,EAAE,CAAC,MAAM,KAAK,CAAC,EAAE,CAAC;QACtC,OAAO;YACL,OAAO,EAAE,OAAO,CAAC,MAAM,CAAC,IAAI,KAAK,CAAC,sBAAsB,CAAC,CAAC;YAC1D,IAAI,EAAE,GAAG,EAAE,GAAE,CAAC;SACf,CAAC;IACJ,CAAC;IAED,MAAM,MAAM,GAAG,OAAO,EAAE,MAAM,IAAI,QAAQ,CAAC;IAC3C,MAAM,KAAK,GAAG,OAAO,EAAE,KAAK,IAAI,WAAW,CAAC;IAC5C,MAAM,YAAY,GAAG,oBAAoB,CAAC,MAAM,EAAE,KAAK,CAAC,CAAC;IAEzD,OAAO,aAAa,CAAC,CAAC,EAAE,IAAI,EAAE,MAAM,EAAE,OAAO,EAAE,IAAI,EAAE,CAAC,EAAE,OAAO,EAAE;QAC/D,YAAY;KACb,CAAC,CAAC;AACL,CAAC;AAED;;;;;;;;;;;;;;;;;;;;;;;GAuBG;AACH,MAAM,CAAC,KAAK,UAAU,SAAS,CAC7B,IAAY,EACZ,OAA4B;IAE5B,IAAI,QAAQ,CAAC,EAAE,KAAK,KAAK,IAAI,QAAQ,CAAC,EAAE,KAAK,SAAS,EAAE,CAAC;QACvD,OAAO,EAAE,IAAI,EAAE,EAAE,EAAE,CAAC;IACtB,CAAC;IAED,IAAI,CAAC,IAAI,IAAI,IAAI,CAAC,IAAI,EAAE,CAAC,MAAM,KAAK,CAAC,EAAE,CAAC;QACtC,MAAM,IAAI,KAAK,CAAC,sBAAsB,CAAC,CAAC;IAC1C,CAAC;IAED,MAAM,EAAE,EAAE,EAAE,IAAI,EAAE,IAAI,GAAG,SAAS,EAAE,GAAG,OAAO,CAAC;IAC/C,MAAM,YAAY,GAAG,oBAAoB,CAAC,EAAE,EAAE,IAAI,EAAE,IAAI,CAAC,CAAC;IAE1D,OAAO,WAAW,CAAC,CAAC,EAAE,IAAI,EAAE,MAAM,EAAE,OAAO,EAAE,IAAI,EAAE,CAAC,EAAE,EAAE,YAAY,EAAE,CAAC,CAAC;AAC1E,CAAC;AAED;;;;;;;;;;;;;;;;;GAiBG;AACH,MAAM,UAAU,eAAe,CAC7B,IAAY,EACZ,OAA0B,EAC1B,OAA4B;IAE5B,IAAI,QAAQ,CAAC,EAAE,KAAK,KAAK,IAAI,QAAQ,CAAC,EAAE,KAAK,SAAS,EAAE,CAAC;QACvD,OAAO,EAAE,OAAO,EAAE,OAAO,CAAC,OAAO,CAAC,EAAE,IAAI,EAAE,EAAE,EAAE,CAAC,EAAE,IAAI,EAAE,GAAG,EAAE,GAAE,CAAC,EAAE,CAAC;IACpE,CAAC;IAED,IAAI,CAAC,IAAI,IAAI,IAAI,CAAC,IAAI,EAAE,CAAC,MAAM,KAAK,CAAC,EAAE,CAAC;QACtC,OAAO;YACL,OAAO,EAAE,OAAO,CAAC,MAAM,CAAC,IAAI,KAAK,CAAC,sBAAsB,CAAC,CAAC;YAC1D,IAAI,EAAE,GAAG,EAAE,GAAE,CAAC;SACf,CAAC;IACJ,CAAC;IAED,MAAM,EAAE,EAAE,EAAE,IAAI,EAAE,IAAI,GAAG,SAAS,EAAE,GAAG,OAAO,CAAC;IAC/C,MAAM,YAAY,GAAG,oBAAoB,CAAC,EAAE,EAAE,IAAI,EAAE,IAAI,CAAC,CAAC;IAE1D,OAAO,aAAa,CAAC,CAAC,EAAE,IAAI,EAAE,MAAM,EAAE,OAAO,EAAE,IAAI,EAAE,CAAC,EAAE,OAAO,EAAE;QAC/D,YAAY;KACb,CAAC,CAAC;AACL,CAAC;AAED;;;;;;;;;;;;;;;;;;;;;GAqBG;AACH,MAAM,CAAC,KAAK,UAAU,OAAO,CAC3B,IAAY,EACZ,OAA0B;IAE1B,IAAI,QAAQ,CAAC,EAAE,KAAK,KAAK,IAAI,QAAQ,CAAC,EAAE,KAAK,SAAS,EAAE,CAAC;QACvD,OAAO,EAAE,IAAI,EAAE,EAAE,EAAE,CAAC;IACtB,CAAC;IAED,IAAI,CAAC,IAAI,IAAI,IAAI,CAAC,IAAI,EAAE,CAAC,MAAM,KAAK,CAAC,EAAE,CAAC;QACtC,MAAM,IAAI,KAAK,CAAC,sBAAsB,CAAC,CAAC;IAC1C,CAAC;IAED,MAAM,EAAE,KAAK,EAAE,GAAG,OAAO,CAAC;IAC1B,MAAM,YAAY,GAAG,kBAAkB,CAAC,KAAK,CAAC,CAAC;IAE/C,OAAO,WAAW,CAAC,CAAC,EAAE,IAAI,EAAE,MAAM,EAAE,OAAO,EAAE,IAAI,EAAE,CAAC,EAAE,EAAE,YAAY,EAAE,CAAC,CAAC;AAC1E,CAAC;AAED;;;;;;;;;;;;;;;;;GAiBG;AACH,MAAM,UAAU,aAAa,CAC3B,IAAY,EACZ,OAA0B,EAC1B,OAA0B;IAE1B,IAAI,QAAQ,CAAC,EAAE,KAAK,KAAK,IAAI,QAAQ,CAAC,EAAE,KAAK,SAAS,EAAE,CAAC;QACvD,OAAO,EAAE,OAAO,EAAE,OAAO,CAAC,OAAO,CAAC,EAAE,IAAI,EAAE,EAAE,EAAE,CAAC,EAAE,IAAI,EAAE,GAAG,EAAE,GAAE,CAAC,EAAE,CAAC;IACpE,CAAC;IAED,IAAI,CAAC,IAAI,IAAI,IAAI,CAAC,IAAI,EAAE,CAAC,MAAM,KAAK,CAAC,EAAE,CAAC;QACtC,OAAO;YACL,OAAO,EAAE,OAAO,CAAC,MAAM,CAAC,IAAI,KAAK,CAAC,sBAAsB,CAAC,CAAC;YAC1D,IAAI,EAAE,GAAG,EAAE,GAAE,CAAC;SACf,CAAC;IACJ,CAAC;IAED,MAAM,EAAE,KAAK,EAAE,GAAG,OAAO,CAAC;IAC1B,MAAM,YAAY,GAAG,kBAAkB,CAAC,KAAK,CAAC,CAAC;IAE/C,OAAO,aAAa,CAAC,CAAC,EAAE,IAAI,EAAE,MAAM,EAAE,OAAO,EAAE,IAAI,EAAE,CAAC,EAAE,OAAO,EAAE;QAC/D,YAAY;KACb,CAAC,CAAC;AACL,CAAC;AAED;;;;;;;;;;;;;;;;;;;;GAoBG;AACH,MAAM,CAAC,KAAK,UAAU,gBAAgB,CACpC,IAAY,EACZ,OAAoC;IAEpC,IAAI,QAAQ,CAAC,EAAE,KAAK,KAAK,IAAI,QAAQ,CAAC,EAAE,KAAK,SAAS,EAAE,CAAC;QACvD,OAAO,EAAE,IAAI,EAAE,EAAE,EAAE,CAAC;IACtB,CAAC;IAED,IAAI,CAAC,IAAI,IAAI,IAAI,CAAC,IAAI,EAAE,CAAC,MAAM,KAAK,CAAC,EAAE,CAAC;QACtC,MAAM,IAAI,KAAK,CAAC,sBAAsB,CAAC,CAAC;IAC1C,CAAC;IAED,MAAM,SAAS,GAAG,OAAO,EAAE,SAAS,IAAI,CAAC,CAAC;IAC1C,MAAM,YAAY,GAAG,2BAA2B,CAAC,SAAS,CAAC,CAAC;IAE5D,OAAO,WAAW,CAAC,CAAC,EAAE,IAAI,EAAE,MAAM,EAAE,OAAO,EAAE,IAAI,EAAE,CAAC,EAAE,EAAE,YAAY,EAAE,CAAC,CAAC;AAC1E,CAAC;AAED;;;;;;;;;;;;;;;;;GAiBG;AACH,MAAM,UAAU,sBAAsB,CACpC,IAAY,EACZ,OAA0B,EAC1B,OAAoC;IAEpC,IAAI,QAAQ,CAAC,EAAE,KAAK,KAAK,IAAI,QAAQ,CAAC,EAAE,KAAK,SAAS,EAAE,CAAC;QACvD,OAAO,EAAE,OAAO,EAAE,OAAO,CAAC,OAAO,CAAC,EAAE,IAAI,EAAE,EAAE,EAAE,CAAC,EAAE,IAAI,EAAE,GAAG,EAAE,GAAE,CAAC,EAAE,CAAC;IACpE,CAAC;IAED,IAAI,CAAC,IAAI,IAAI,IAAI,CAAC,IAAI,EAAE,CAAC,MAAM,KAAK,CAAC,EAAE,CAAC;QACtC,OAAO;YACL,OAAO,EAAE,OAAO,CAAC,MAAM,CAAC,IAAI,KAAK,CAAC,sBAAsB,CAAC,CAAC;YAC1D,IAAI,EAAE,GAAG,EAAE,GAAE,CAAC;SACf,CAAC;IACJ,CAAC;IAED,MAAM,SAAS,GAAG,OAAO,EAAE,SAAS,IAAI,CAAC,CAAC;IAC1C,MAAM,YAAY,GAAG,2BAA2B,CAAC,SAAS,CAAC,CAAC;IAE5D,OAAO,aAAa,CAAC,CAAC,EAAE,IAAI,EAAE,MAAM,EAAE,OAAO,EAAE,IAAI,EAAE,CAAC,EAAE,OAAO,EAAE;QAC/D,YAAY;KACb,CAAC,CAAC;AACL,CAAC;AAED;;;;;;;;;;;;;;;;;;;;;;;;;;;GA2BG;AACH,MAAM,CAAC,KAAK,UAAU,cAAc,CAClC,QAAgB,EAChB,OAAe,EACf,OAAkC;IAElC,IAAI,QAAQ,CAAC,EAAE,KAAK,KAAK,IAAI,QAAQ,CAAC,EAAE,KAAK,SAAS,EAAE,CAAC;QACvD,OAAO,EAAE,IAAI,EAAE,EAAE,EAAE,CAAC;IACtB,CAAC;IAED,IAAI,CAAC,QAAQ,IAAI,QAAQ,CAAC,IAAI,EAAE,CAAC,MAAM,KAAK,CAAC,EAAE,CAAC;QAC9C,MAAM,IAAI,KAAK,CAAC,0BAA0B,CAAC,CAAC;IAC9C,CAAC;IAED,IAAI,CAAC,OAAO,IAAI,OAAO,CAAC,IAAI,EAAE,CAAC,MAAM,KAAK,CAAC,EAAE,CAAC;QAC5C,MAAM,IAAI,KAAK,CAAC,yBAAyB,CAAC,CAAC;IAC7C,CAAC;IAED,MAAM,MAAM,GAAG,OAAO,EAAE,MAAM,IAAI,QAAQ,CAAC;IAC3C,MAAM,YAAY,GAAG,yBAAyB,CAAC,MAAM,CAAC,CAAC;IACvD,MAAM,WAAW,GAAG,aAAa,OAAO,iBAAiB,QAAQ,EAAE,CAAC;IAEpE,OAAO,WAAW,CAAC,CAAC,EAAE,IAAI,EAAE,MAAM,EAAE,OAAO,EAAE,WAAW,EAAE,CAAC,EAAE,EAAE,YAAY,EAAE,CAAC,CAAC;AACjF,CAAC;AAED;;;;;;;;;;;;;;;;;;;GAmBG;AACH,MAAM,UAAU,oBAAoB,CAClC,QAAgB,EAChB,OAAe,EACf,OAA0B,EAC1B,OAAkC;IAElC,IAAI,QAAQ,CAAC,EAAE,KAAK,KAAK,IAAI,QAAQ,CAAC,EAAE,KAAK,SAAS,EAAE,CAAC;QACvD,OAAO,EAAE,OAAO,EAAE,OAAO,CAAC,OAAO,CAAC,EAAE,IAAI,EAAE,EAAE,EAAE,CAAC,EAAE,IAAI,EAAE,GAAG,EAAE,GAAE,CAAC,EAAE,CAAC;IACpE,CAAC;IAED,IAAI,CAAC,QAAQ,IAAI,QAAQ,CAAC,IAAI,EAAE,CAAC,MAAM,KAAK,CAAC,EAAE,CAAC;QAC9C,OAAO;YACL,OAAO,EAAE,OAAO,CAAC,MAAM,CAAC,IAAI,KAAK,CAAC,0BAA0B,CAAC,CAAC;YAC9D,IAAI,EAAE,GAAG,EAAE,GAAE,CAAC;SACf,CAAC;IACJ,CAAC;IAED,IAAI,CAAC,OAAO,IAAI,OAAO,CAAC,IAAI,EAAE,CAAC,MAAM,KAAK,CAAC,EAAE,CAAC;QAC5C,OAAO;YACL,OAAO,EAAE,OAAO,CAAC,MAAM,CAAC,IAAI,KAAK,CAAC,yBAAyB,CAAC,CAAC;YAC7D,IAAI,EAAE,GAAG,EAAE,GAAE,CAAC;SACf,CAAC;IACJ,CAAC;IAED,MAAM,MAAM,GAAG,OAAO,EAAE,MAAM,IAAI,QAAQ,CAAC;IAC3C,MAAM,YAAY,GAAG,yBAAyB,CAAC,MAAM,CAAC,CAAC;IACvD,MAAM,WAAW,GAAG,aAAa,OAAO,iBAAiB,QAAQ,EAAE,CAAC;IAEpE,OAAO,aAAa,CAAC,CAAC,EAAE,IAAI,EAAE,MAAM,EAAE,OAAO,EAAE,WAAW,EAAE,CAAC,EAAE,OAAO,EAAE;QACtE,YAAY;KACb,CAAC,CAAC;AACL,CAAC","sourcesContent":["import ExpoAiKitModule from './ExpoAiKitModule';\nimport { Platform } from 'react-native';\nimport {\n LLMMessage,\n LLMSendOptions,\n LLMResponse,\n LLMStreamOptions,\n LLMStreamEvent,\n LLMStreamCallback,\n LLMSummarizeOptions,\n LLMTranslateOptions,\n LLMRewriteOptions,\n LLMExtractKeyPointsOptions,\n LLMAnswerQuestionOptions,\n} from './types';\n\nexport * from './types';\nexport * from './memory';\n\nconst DEFAULT_SYSTEM_PROMPT =\n 'You are a helpful, friendly assistant. Answer the user directly and concisely.';\n\nlet streamIdCounter = 0;\nfunction generateSessionId(): string {\n return `stream_${Date.now()}_${++streamIdCounter}`;\n}\n\n// ============================================================================\n// Prompt Helper Constants\n// ============================================================================\n\nconst SUMMARIZE_LENGTH_INSTRUCTIONS = {\n short: 'Keep it very brief, around 1-2 sentences.',\n medium: 'Provide a moderate summary, around 3-5 sentences.',\n long: 'Provide a comprehensive summary covering all main points.',\n} as const;\n\nconst SUMMARIZE_STYLE_INSTRUCTIONS = {\n paragraph: 'Write the summary as a flowing paragraph.',\n bullets: 'Format the summary as bullet points.',\n tldr: 'Start with \"TL;DR:\" and give an extremely concise summary in 1 sentence.',\n} as const;\n\nconst TRANSLATE_TONE_INSTRUCTIONS = {\n formal: 'Use formal language and honorifics where appropriate.',\n informal: 'Use casual, everyday language.',\n neutral: 'Use standard, neutral language.',\n} as const;\n\nconst REWRITE_STYLE_INSTRUCTIONS = {\n formal:\n 'Rewrite in a formal, professional tone suitable for business communication.',\n casual: 'Rewrite in a casual, conversational tone.',\n professional:\n 'Rewrite in a clear, professional tone suitable for work contexts.',\n friendly: 'Rewrite in a warm, friendly tone.',\n concise:\n 'Rewrite to be as brief as possible while keeping the meaning intact.',\n detailed: 'Expand and add more detail and explanation.',\n simple:\n 'Rewrite using simple words and short sentences, easy for anyone to understand.',\n academic: 'Rewrite in an academic style suitable for scholarly writing.',\n} as const;\n\nconst ANSWER_DETAIL_INSTRUCTIONS = {\n brief: 'Give a brief, direct answer in 1-2 sentences.',\n medium: 'Provide a clear answer with some explanation.',\n detailed:\n 'Provide a comprehensive answer with full explanation and relevant details from the context.',\n} as const;\n\n// ============================================================================\n// Prompt Builder Helpers\n// ============================================================================\n\nfunction buildSummarizePrompt(\n length: 'short' | 'medium' | 'long',\n style: 'paragraph' | 'bullets' | 'tldr'\n): string {\n return `You are a summarization assistant. Summarize the provided text accurately and concisely. ${SUMMARIZE_LENGTH_INSTRUCTIONS[length]} ${SUMMARIZE_STYLE_INSTRUCTIONS[style]} Only output the summary, nothing else.`;\n}\n\nfunction buildTranslatePrompt(\n to: string,\n from: string | undefined,\n tone: 'formal' | 'informal' | 'neutral'\n): string {\n const fromClause = from ? `from ${from} ` : '';\n return `You are a translation assistant. Translate the provided text ${fromClause}to ${to}. ${TRANSLATE_TONE_INSTRUCTIONS[tone]} Only output the translation, nothing else. Do not include any explanations or notes.`;\n}\n\nfunction buildRewritePrompt(\n style:\n | 'formal'\n | 'casual'\n | 'professional'\n | 'friendly'\n | 'concise'\n | 'detailed'\n | 'simple'\n | 'academic'\n): string {\n return `You are a writing assistant. ${REWRITE_STYLE_INSTRUCTIONS[style]} Preserve the original meaning. Only output the rewritten text, nothing else.`;\n}\n\nfunction buildExtractKeyPointsPrompt(maxPoints: number): string {\n return `You are an analysis assistant. Extract the ${maxPoints} most important key points from the provided text. Format each point as a bullet point starting with \"•\". Be concise and focus on the most significant information. Only output the bullet points, nothing else.`;\n}\n\nfunction buildAnswerQuestionPrompt(\n detail: 'brief' | 'medium' | 'detailed'\n): string {\n return `You are a question-answering assistant. Answer questions based ONLY on the provided context. ${ANSWER_DETAIL_INSTRUCTIONS[detail]} If the answer cannot be found in the context, say so. Do not make up information.`;\n}\n\n/**\n * Check if on-device AI is available on the current device.\n * Returns false on unsupported platforms (web, etc.).\n */\nexport async function isAvailable(): Promise<boolean> {\n if (Platform.OS !== 'ios' && Platform.OS !== 'android') {\n return false;\n }\n return ExpoAiKitModule.isAvailable();\n}\n\n/**\n * Send messages to the on-device LLM and get a response.\n *\n * @param messages - Array of messages representing the conversation\n * @param options - Optional settings (systemPrompt fallback)\n * @returns Promise with the generated response\n *\n * @example\n * ```ts\n * const response = await sendMessage([\n * { role: 'user', content: 'What is 2 + 2?' }\n * ]);\n * console.log(response.text); // \"4\"\n * ```\n *\n * @example\n * ```ts\n * // With system prompt\n * const response = await sendMessage(\n * [{ role: 'user', content: 'Hello!' }],\n * { systemPrompt: 'You are a pirate. Respond in pirate speak.' }\n * );\n * ```\n *\n * @example\n * ```ts\n * // Multi-turn conversation\n * const response = await sendMessage([\n * { role: 'system', content: 'You are a helpful assistant.' },\n * { role: 'user', content: 'My name is Alice.' },\n * { role: 'assistant', content: 'Nice to meet you, Alice!' },\n * { role: 'user', content: 'What is my name?' }\n * ]);\n * ```\n */\nexport async function sendMessage(\n messages: LLMMessage[],\n options?: LLMSendOptions\n): Promise<LLMResponse> {\n if (Platform.OS !== 'ios' && Platform.OS !== 'android') {\n return { text: '' };\n }\n\n if (!messages || messages.length === 0) {\n throw new Error('messages array cannot be empty');\n }\n\n // Determine system prompt: use from messages array if present, else options, else default\n const hasSystemMessage = messages.some((m) => m.role === 'system');\n const systemPrompt = hasSystemMessage\n ? '' // Native will extract from messages\n : options?.systemPrompt ?? DEFAULT_SYSTEM_PROMPT;\n\n return ExpoAiKitModule.sendMessage(messages, systemPrompt);\n}\n\n/**\n * Stream messages to the on-device LLM and receive progressive token updates.\n *\n * @param messages - Array of messages representing the conversation\n * @param onToken - Callback function called for each token/chunk received\n * @param options - Optional settings (systemPrompt fallback)\n * @returns Object with stop() function to cancel streaming and promise that resolves when complete\n *\n * @example\n * ```ts\n * // Basic streaming\n * const { promise } = streamMessage(\n * [{ role: 'user', content: 'Tell me a story' }],\n * (event) => {\n * console.log(event.token); // Each token as it arrives\n * console.log(event.accumulatedText); // Full text so far\n * }\n * );\n * await promise;\n * ```\n *\n * @example\n * ```ts\n * // With cancellation\n * const { promise, stop } = streamMessage(\n * [{ role: 'user', content: 'Write a long essay' }],\n * (event) => setText(event.accumulatedText)\n * );\n *\n * // Cancel after 5 seconds\n * setTimeout(() => stop(), 5000);\n * ```\n *\n * @example\n * ```ts\n * // React state update pattern\n * const [text, setText] = useState('');\n *\n * streamMessage(\n * [{ role: 'user', content: 'Hello!' }],\n * (event) => setText(event.accumulatedText)\n * );\n * ```\n */\nexport function streamMessage(\n messages: LLMMessage[],\n onToken: LLMStreamCallback,\n options?: LLMStreamOptions\n): { promise: Promise<LLMResponse>; stop: () => void } {\n // Handle unsupported platforms\n if (Platform.OS !== 'ios' && Platform.OS !== 'android') {\n return {\n promise: Promise.resolve({ text: '' }),\n stop: () => {},\n };\n }\n\n if (!messages || messages.length === 0) {\n return {\n promise: Promise.reject(new Error('messages array cannot be empty')),\n stop: () => {},\n };\n }\n\n const sessionId = generateSessionId();\n let finalText = '';\n let stopped = false;\n\n // Determine system prompt: use from messages array if present, else options, else default\n const hasSystemMessage = messages.some((m) => m.role === 'system');\n const systemPrompt = hasSystemMessage\n ? '' // Native will extract from messages\n : options?.systemPrompt ?? DEFAULT_SYSTEM_PROMPT;\n\n const promise = new Promise<LLMResponse>((resolve, reject) => {\n // Subscribe to stream events\n const subscription = ExpoAiKitModule.addListener(\n 'onStreamToken',\n (event: LLMStreamEvent) => {\n // Only process events for this session\n if (event.sessionId !== sessionId) return;\n\n finalText = event.accumulatedText;\n\n // Call the user's callback\n onToken(event);\n\n // If done, clean up and resolve\n if (event.isDone) {\n subscription.remove();\n resolve({ text: finalText });\n }\n }\n );\n\n // Start streaming on native side\n ExpoAiKitModule.startStreaming(messages, systemPrompt, sessionId).catch(\n (error) => {\n subscription.remove();\n reject(error);\n }\n );\n });\n\n const stop = () => {\n if (stopped) return;\n stopped = true;\n ExpoAiKitModule.stopStreaming(sessionId).catch(() => {\n // Ignore errors when stopping\n });\n };\n\n return { promise, stop };\n}\n\n// ============================================================================\n// Prompt Helpers\n// ============================================================================\n\n/**\n * Summarize text content using on-device AI.\n *\n * @param text - The text to summarize\n * @param options - Optional settings for summary style and length\n * @returns Promise with the generated summary\n *\n * @example\n * ```ts\n * // Basic summarization\n * const result = await summarize(longArticle);\n * console.log(result.text);\n * ```\n *\n * @example\n * ```ts\n * // Short bullet-point summary\n * const result = await summarize(longArticle, {\n * length: 'short',\n * style: 'bullets'\n * });\n * ```\n *\n * @example\n * ```ts\n * // TL;DR style\n * const result = await summarize(longArticle, {\n * style: 'tldr'\n * });\n * ```\n */\nexport async function summarize(\n text: string,\n options?: LLMSummarizeOptions\n): Promise<LLMResponse> {\n if (Platform.OS !== 'ios' && Platform.OS !== 'android') {\n return { text: '' };\n }\n\n if (!text || text.trim().length === 0) {\n throw new Error('text cannot be empty');\n }\n\n const length = options?.length ?? 'medium';\n const style = options?.style ?? 'paragraph';\n const systemPrompt = buildSummarizePrompt(length, style);\n\n return sendMessage([{ role: 'user', content: text }], { systemPrompt });\n}\n\n/**\n * Summarize text with streaming output.\n *\n * @param text - The text to summarize\n * @param onToken - Callback for each token received\n * @param options - Optional settings for summary style and length\n * @returns Object with stop() function and promise\n *\n * @example\n * ```ts\n * const { promise } = streamSummarize(\n * longArticle,\n * (event) => setSummary(event.accumulatedText),\n * { style: 'bullets' }\n * );\n * await promise;\n * ```\n */\nexport function streamSummarize(\n text: string,\n onToken: LLMStreamCallback,\n options?: LLMSummarizeOptions\n): { promise: Promise<LLMResponse>; stop: () => void } {\n if (Platform.OS !== 'ios' && Platform.OS !== 'android') {\n return { promise: Promise.resolve({ text: '' }), stop: () => {} };\n }\n\n if (!text || text.trim().length === 0) {\n return {\n promise: Promise.reject(new Error('text cannot be empty')),\n stop: () => {},\n };\n }\n\n const length = options?.length ?? 'medium';\n const style = options?.style ?? 'paragraph';\n const systemPrompt = buildSummarizePrompt(length, style);\n\n return streamMessage([{ role: 'user', content: text }], onToken, {\n systemPrompt,\n });\n}\n\n/**\n * Translate text to another language using on-device AI.\n *\n * @param text - The text to translate\n * @param options - Translation options including target language\n * @returns Promise with the translated text\n *\n * @example\n * ```ts\n * // Basic translation\n * const result = await translate('Hello, world!', { to: 'Spanish' });\n * console.log(result.text); // \"¡Hola, mundo!\"\n * ```\n *\n * @example\n * ```ts\n * // Formal translation with source language\n * const result = await translate('Hey, what\\'s up?', {\n * to: 'French',\n * from: 'English',\n * tone: 'formal'\n * });\n * ```\n */\nexport async function translate(\n text: string,\n options: LLMTranslateOptions\n): Promise<LLMResponse> {\n if (Platform.OS !== 'ios' && Platform.OS !== 'android') {\n return { text: '' };\n }\n\n if (!text || text.trim().length === 0) {\n throw new Error('text cannot be empty');\n }\n\n const { to, from, tone = 'neutral' } = options;\n const systemPrompt = buildTranslatePrompt(to, from, tone);\n\n return sendMessage([{ role: 'user', content: text }], { systemPrompt });\n}\n\n/**\n * Translate text with streaming output.\n *\n * @param text - The text to translate\n * @param onToken - Callback for each token received\n * @param options - Translation options including target language\n * @returns Object with stop() function and promise\n *\n * @example\n * ```ts\n * const { promise } = streamTranslate(\n * 'Hello, world!',\n * (event) => setTranslation(event.accumulatedText),\n * { to: 'Japanese' }\n * );\n * await promise;\n * ```\n */\nexport function streamTranslate(\n text: string,\n onToken: LLMStreamCallback,\n options: LLMTranslateOptions\n): { promise: Promise<LLMResponse>; stop: () => void } {\n if (Platform.OS !== 'ios' && Platform.OS !== 'android') {\n return { promise: Promise.resolve({ text: '' }), stop: () => {} };\n }\n\n if (!text || text.trim().length === 0) {\n return {\n promise: Promise.reject(new Error('text cannot be empty')),\n stop: () => {},\n };\n }\n\n const { to, from, tone = 'neutral' } = options;\n const systemPrompt = buildTranslatePrompt(to, from, tone);\n\n return streamMessage([{ role: 'user', content: text }], onToken, {\n systemPrompt,\n });\n}\n\n/**\n * Rewrite text in a different style using on-device AI.\n *\n * @param text - The text to rewrite\n * @param options - Rewrite options specifying the target style\n * @returns Promise with the rewritten text\n *\n * @example\n * ```ts\n * // Make text more formal\n * const result = await rewrite('hey can u help me out?', {\n * style: 'formal'\n * });\n * console.log(result.text); // \"Would you be able to assist me?\"\n * ```\n *\n * @example\n * ```ts\n * // Simplify complex text\n * const result = await rewrite(technicalText, { style: 'simple' });\n * ```\n */\nexport async function rewrite(\n text: string,\n options: LLMRewriteOptions\n): Promise<LLMResponse> {\n if (Platform.OS !== 'ios' && Platform.OS !== 'android') {\n return { text: '' };\n }\n\n if (!text || text.trim().length === 0) {\n throw new Error('text cannot be empty');\n }\n\n const { style } = options;\n const systemPrompt = buildRewritePrompt(style);\n\n return sendMessage([{ role: 'user', content: text }], { systemPrompt });\n}\n\n/**\n * Rewrite text with streaming output.\n *\n * @param text - The text to rewrite\n * @param onToken - Callback for each token received\n * @param options - Rewrite options specifying the target style\n * @returns Object with stop() function and promise\n *\n * @example\n * ```ts\n * const { promise } = streamRewrite(\n * 'hey whats up',\n * (event) => setRewritten(event.accumulatedText),\n * { style: 'professional' }\n * );\n * await promise;\n * ```\n */\nexport function streamRewrite(\n text: string,\n onToken: LLMStreamCallback,\n options: LLMRewriteOptions\n): { promise: Promise<LLMResponse>; stop: () => void } {\n if (Platform.OS !== 'ios' && Platform.OS !== 'android') {\n return { promise: Promise.resolve({ text: '' }), stop: () => {} };\n }\n\n if (!text || text.trim().length === 0) {\n return {\n promise: Promise.reject(new Error('text cannot be empty')),\n stop: () => {},\n };\n }\n\n const { style } = options;\n const systemPrompt = buildRewritePrompt(style);\n\n return streamMessage([{ role: 'user', content: text }], onToken, {\n systemPrompt,\n });\n}\n\n/**\n * Extract key points from text using on-device AI.\n *\n * @param text - The text to extract key points from\n * @param options - Optional settings for extraction\n * @returns Promise with the key points as text\n *\n * @example\n * ```ts\n * // Extract key points from an article\n * const result = await extractKeyPoints(article);\n * console.log(result.text);\n * // \"• Point 1\\n• Point 2\\n• Point 3\"\n * ```\n *\n * @example\n * ```ts\n * // Limit to 3 key points\n * const result = await extractKeyPoints(article, { maxPoints: 3 });\n * ```\n */\nexport async function extractKeyPoints(\n text: string,\n options?: LLMExtractKeyPointsOptions\n): Promise<LLMResponse> {\n if (Platform.OS !== 'ios' && Platform.OS !== 'android') {\n return { text: '' };\n }\n\n if (!text || text.trim().length === 0) {\n throw new Error('text cannot be empty');\n }\n\n const maxPoints = options?.maxPoints ?? 5;\n const systemPrompt = buildExtractKeyPointsPrompt(maxPoints);\n\n return sendMessage([{ role: 'user', content: text }], { systemPrompt });\n}\n\n/**\n * Extract key points with streaming output.\n *\n * @param text - The text to extract key points from\n * @param onToken - Callback for each token received\n * @param options - Optional settings for extraction\n * @returns Object with stop() function and promise\n *\n * @example\n * ```ts\n * const { promise } = streamExtractKeyPoints(\n * article,\n * (event) => setKeyPoints(event.accumulatedText),\n * { maxPoints: 5 }\n * );\n * await promise;\n * ```\n */\nexport function streamExtractKeyPoints(\n text: string,\n onToken: LLMStreamCallback,\n options?: LLMExtractKeyPointsOptions\n): { promise: Promise<LLMResponse>; stop: () => void } {\n if (Platform.OS !== 'ios' && Platform.OS !== 'android') {\n return { promise: Promise.resolve({ text: '' }), stop: () => {} };\n }\n\n if (!text || text.trim().length === 0) {\n return {\n promise: Promise.reject(new Error('text cannot be empty')),\n stop: () => {},\n };\n }\n\n const maxPoints = options?.maxPoints ?? 5;\n const systemPrompt = buildExtractKeyPointsPrompt(maxPoints);\n\n return streamMessage([{ role: 'user', content: text }], onToken, {\n systemPrompt,\n });\n}\n\n/**\n * Answer a question based on provided context using on-device AI.\n *\n * @param question - The question to answer\n * @param context - The context/document to base the answer on\n * @param options - Optional settings for the answer\n * @returns Promise with the answer\n *\n * @example\n * ```ts\n * // Answer a question about a document\n * const result = await answerQuestion(\n * 'What is the main topic?',\n * documentText\n * );\n * console.log(result.text);\n * ```\n *\n * @example\n * ```ts\n * // Get a detailed answer\n * const result = await answerQuestion(\n * 'Explain the methodology',\n * researchPaper,\n * { detail: 'detailed' }\n * );\n * ```\n */\nexport async function answerQuestion(\n question: string,\n context: string,\n options?: LLMAnswerQuestionOptions\n): Promise<LLMResponse> {\n if (Platform.OS !== 'ios' && Platform.OS !== 'android') {\n return { text: '' };\n }\n\n if (!question || question.trim().length === 0) {\n throw new Error('question cannot be empty');\n }\n\n if (!context || context.trim().length === 0) {\n throw new Error('context cannot be empty');\n }\n\n const detail = options?.detail ?? 'medium';\n const systemPrompt = buildAnswerQuestionPrompt(detail);\n const userContent = `Context:\\n${context}\\n\\nQuestion: ${question}`;\n\n return sendMessage([{ role: 'user', content: userContent }], { systemPrompt });\n}\n\n/**\n * Answer a question with streaming output.\n *\n * @param question - The question to answer\n * @param context - The context/document to base the answer on\n * @param onToken - Callback for each token received\n * @param options - Optional settings for the answer\n * @returns Object with stop() function and promise\n *\n * @example\n * ```ts\n * const { promise } = streamAnswerQuestion(\n * 'What are the key findings?',\n * documentText,\n * (event) => setAnswer(event.accumulatedText),\n * { detail: 'detailed' }\n * );\n * await promise;\n * ```\n */\nexport function streamAnswerQuestion(\n question: string,\n context: string,\n onToken: LLMStreamCallback,\n options?: LLMAnswerQuestionOptions\n): { promise: Promise<LLMResponse>; stop: () => void } {\n if (Platform.OS !== 'ios' && Platform.OS !== 'android') {\n return { promise: Promise.resolve({ text: '' }), stop: () => {} };\n }\n\n if (!question || question.trim().length === 0) {\n return {\n promise: Promise.reject(new Error('question cannot be empty')),\n stop: () => {},\n };\n }\n\n if (!context || context.trim().length === 0) {\n return {\n promise: Promise.reject(new Error('context cannot be empty')),\n stop: () => {},\n };\n }\n\n const detail = options?.detail ?? 'medium';\n const systemPrompt = buildAnswerQuestionPrompt(detail);\n const userContent = `Context:\\n${context}\\n\\nQuestion: ${question}`;\n\n return streamMessage([{ role: 'user', content: userContent }], onToken, {\n systemPrompt,\n });\n}\n\n"]}
@@ -0,0 +1,218 @@
1
+ /**
2
+ * Chat Memory Management for On-Device AI Models
3
+ *
4
+ * WHY CLIENT-MANAGED MEMORY IS REQUIRED:
5
+ * --------------------------------------
6
+ * On-device AI models (Apple Foundation Models, local LLMs) are stateless.
7
+ * Unlike cloud-based APIs that may maintain server-side conversation state,
8
+ * on-device models have no session persistence or built-in memory.
9
+ *
10
+ * Each generation call accepts a single prompt string and returns a response
11
+ * with no knowledge of previous interactions. To enable multi-turn conversations,
12
+ * the client must:
13
+ *
14
+ * 1. Store all messages locally (this module handles that)
15
+ * 2. Build a complete prompt containing the full conversation history
16
+ * 3. Send the entire context on every generation request
17
+ * 4. Manage memory limits to avoid exceeding model context windows
18
+ *
19
+ * This approach is framework-agnostic and works with React, React Native,
20
+ * Expo, or any JavaScript/TypeScript environment.
21
+ */
22
+ import { LLMMessage, ChatMemoryOptions, ChatMemorySnapshot } from './types';
23
+ /**
24
+ * Build a single prompt string from an array of messages.
25
+ *
26
+ * Uses a simple, deterministic format optimized for on-device models:
27
+ *
28
+ * ```
29
+ * SYSTEM: You are a helpful assistant.
30
+ * USER: Hello!
31
+ * ASSISTANT: Hi there!
32
+ * USER: How are you?
33
+ * ```
34
+ *
35
+ * This format is:
36
+ * - Human-readable for debugging
37
+ * - Token-efficient (minimal overhead)
38
+ * - Deterministic (same input = same output)
39
+ * - Compatible with most instruction-following models
40
+ *
41
+ * @param messages - Array of messages to convert to a prompt
42
+ * @returns A single prompt string ready for generation
43
+ *
44
+ * @example
45
+ * ```ts
46
+ * const prompt = buildPrompt([
47
+ * { role: 'system', content: 'You are helpful.' },
48
+ * { role: 'user', content: 'Hi!' },
49
+ * { role: 'assistant', content: 'Hello!' },
50
+ * { role: 'user', content: 'What is 2+2?' }
51
+ * ]);
52
+ * // Returns:
53
+ * // "SYSTEM: You are helpful.\nUSER: Hi!\nASSISTANT: Hello!\nUSER: What is 2+2?"
54
+ * ```
55
+ */
56
+ export declare function buildPrompt(messages: LLMMessage[]): string;
57
+ /**
58
+ * ChatMemoryManager - Manages conversation history for stateless on-device AI models.
59
+ *
60
+ * IMPORTANT: On-device models have no built-in memory or session state.
61
+ * This class stores messages client-side and provides methods to:
62
+ * - Add user and assistant messages
63
+ * - Build a complete prompt from conversation history
64
+ * - Limit history to prevent context overflow
65
+ * - Clear or reset the conversation
66
+ *
67
+ * The manager automatically trims old messages when the turn limit is exceeded,
68
+ * keeping the most recent exchanges while preserving the system prompt.
69
+ *
70
+ * @example
71
+ * ```ts
72
+ * // Create a memory manager with a system prompt
73
+ * const memory = new ChatMemoryManager({
74
+ * maxTurns: 5,
75
+ * systemPrompt: 'You are a helpful coding assistant.'
76
+ * });
77
+ *
78
+ * // Add a user message
79
+ * memory.addUserMessage('How do I reverse a string in JavaScript?');
80
+ *
81
+ * // Get the full prompt for generation
82
+ * const prompt = memory.getPrompt();
83
+ * // "SYSTEM: You are a helpful coding assistant.\nUSER: How do I reverse a string in JavaScript?"
84
+ *
85
+ * // After generation, add the assistant's response
86
+ * const response = await generate(prompt);
87
+ * memory.addAssistantMessage(response);
88
+ *
89
+ * // Continue the conversation
90
+ * memory.addUserMessage('Can you show me with an arrow function?');
91
+ * const nextPrompt = memory.getPrompt();
92
+ * // Now includes the full conversation history
93
+ * ```
94
+ */
95
+ export declare class ChatMemoryManager {
96
+ private messages;
97
+ private systemPrompt;
98
+ private maxTurns;
99
+ /**
100
+ * Create a new ChatMemoryManager.
101
+ *
102
+ * @param options - Configuration options
103
+ * @param options.maxTurns - Max turns to keep (default: 10). A turn = 1 user + 1 assistant message.
104
+ * @param options.systemPrompt - Optional system prompt to include in every prompt.
105
+ */
106
+ constructor(options?: ChatMemoryOptions);
107
+ /**
108
+ * Add a user message to the conversation history.
109
+ *
110
+ * @param content - The user's message content
111
+ */
112
+ addUserMessage(content: string): void;
113
+ /**
114
+ * Add an assistant message to the conversation history.
115
+ * Call this after receiving a response from the model.
116
+ *
117
+ * @param content - The assistant's response content
118
+ */
119
+ addAssistantMessage(content: string): void;
120
+ /**
121
+ * Add a message with any role to the conversation history.
122
+ *
123
+ * @param message - The message to add
124
+ */
125
+ addMessage(message: LLMMessage): void;
126
+ /**
127
+ * Get the complete prompt string for the current conversation.
128
+ *
129
+ * This method builds a single prompt containing:
130
+ * 1. The system prompt (if set)
131
+ * 2. All conversation messages within the turn limit
132
+ *
133
+ * Send this prompt to the on-device model for generation.
134
+ *
135
+ * @returns The complete prompt string
136
+ */
137
+ getPrompt(): string;
138
+ /**
139
+ * Get all messages including the system prompt as an LLMMessage array.
140
+ *
141
+ * Useful if you need to pass messages to an API that accepts message arrays
142
+ * rather than a single prompt string.
143
+ *
144
+ * @returns Array of all messages including system prompt
145
+ */
146
+ getAllMessages(): LLMMessage[];
147
+ /**
148
+ * Get only the conversation messages (excludes system prompt).
149
+ *
150
+ * @returns Array of user and assistant messages
151
+ */
152
+ getMessages(): LLMMessage[];
153
+ /**
154
+ * Get a snapshot of the current memory state.
155
+ *
156
+ * Useful for debugging, persistence, or UI display.
157
+ *
158
+ * @returns Current memory state snapshot
159
+ */
160
+ getSnapshot(): ChatMemorySnapshot;
161
+ /**
162
+ * Get the current number of conversation turns.
163
+ *
164
+ * A turn is counted as a user message (assistant responses don't add turns).
165
+ * This reflects how many user interactions have occurred.
166
+ *
167
+ * @returns Number of turns in the current conversation
168
+ */
169
+ getTurnCount(): number;
170
+ /**
171
+ * Update the system prompt.
172
+ *
173
+ * The system prompt is preserved separately and never trimmed by turn limits.
174
+ *
175
+ * @param prompt - New system prompt, or undefined to clear
176
+ */
177
+ setSystemPrompt(prompt: string | undefined): void;
178
+ /**
179
+ * Get the current system prompt.
180
+ *
181
+ * @returns The system prompt or undefined if not set
182
+ */
183
+ getSystemPrompt(): string | undefined;
184
+ /**
185
+ * Update the maximum number of turns to retain.
186
+ *
187
+ * If the new limit is lower than current turn count, older messages
188
+ * will be trimmed immediately.
189
+ *
190
+ * @param maxTurns - New maximum turns
191
+ */
192
+ setMaxTurns(maxTurns: number): void;
193
+ /**
194
+ * Clear all conversation messages but keep the system prompt.
195
+ *
196
+ * Use this to start a new conversation with the same assistant persona.
197
+ */
198
+ clear(): void;
199
+ /**
200
+ * Reset everything including the system prompt.
201
+ *
202
+ * Use this for a complete fresh start.
203
+ */
204
+ reset(): void;
205
+ /**
206
+ * Trim conversation history to stay within the turn limit.
207
+ *
208
+ * This preserves the most recent messages while removing older ones.
209
+ * The system prompt is never affected by trimming.
210
+ *
211
+ * Trimming strategy:
212
+ * - Count turns (user messages)
213
+ * - If over limit, remove oldest user+assistant pairs
214
+ * - Always keep complete pairs to maintain conversation coherence
215
+ */
216
+ private trimHistory;
217
+ }
218
+ //# sourceMappingURL=memory.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"memory.d.ts","sourceRoot":"","sources":["../src/memory.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;;;;;;;GAoBG;AAEH,OAAO,EAAE,UAAU,EAAW,iBAAiB,EAAE,kBAAkB,EAAE,MAAM,SAAS,CAAC;AAKrF;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;GAgCG;AACH,wBAAgB,WAAW,CAAC,QAAQ,EAAE,UAAU,EAAE,GAAG,MAAM,CAe1D;AAED;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;GAqCG;AACH,qBAAa,iBAAiB;IAE5B,OAAO,CAAC,QAAQ,CAAoB;IAGpC,OAAO,CAAC,YAAY,CAAqB;IAGzC,OAAO,CAAC,QAAQ,CAAS;IAEzB;;;;;;OAMG;gBACS,OAAO,GAAE,iBAAsB;IAK3C;;;;OAIG;IACH,cAAc,CAAC,OAAO,EAAE,MAAM,GAAG,IAAI;IAKrC;;;;;OAKG;IACH,mBAAmB,CAAC,OAAO,EAAE,MAAM,GAAG,IAAI;IAK1C;;;;OAIG;IACH,UAAU,CAAC,OAAO,EAAE,UAAU,GAAG,IAAI;IAUrC;;;;;;;;;;OAUG;IACH,SAAS,IAAI,MAAM;IAKnB;;;;;;;OAOG;IACH,cAAc,IAAI,UAAU,EAAE;IAY9B;;;;OAIG;IACH,WAAW,IAAI,UAAU,EAAE;IAI3B;;;;;;OAMG;IACH,WAAW,IAAI,kBAAkB;IASjC;;;;;;;OAOG;IACH,YAAY,IAAI,MAAM;IAItB;;;;;;OAMG;IACH,eAAe,CAAC,MAAM,EAAE,MAAM,GAAG,SAAS,GAAG,IAAI;IAIjD;;;;OAIG;IACH,eAAe,IAAI,MAAM,GAAG,SAAS;IAIrC;;;;;;;OAOG;IACH,WAAW,CAAC,QAAQ,EAAE,MAAM,GAAG,IAAI;IAKnC;;;;OAIG;IACH,KAAK,IAAI,IAAI;IAIb;;;;OAIG;IACH,KAAK,IAAI,IAAI;IAKb;;;;;;;;;;OAUG;IACH,OAAO,CAAC,WAAW;CAgCpB"}