agentic-api 1.0.3 → 1.0.5

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -27,7 +27,8 @@ Super simple API for intelligent agent orchestration with automatic sequences an
27
27
  - Applications requiring specialized agent orchestration
28
28
  - Projects needing reliable content extraction
29
29
 
30
- [![](https://mermaid.ink/img/pako:eNpVkd1OwjAUx1-lOYmJJkDYhxvswoRB0AtvdCQmbsTU7bAt2dqltCogz2K89uV8BMs-AM9F199__3N6TruDmCcIHqwK_h5nVEiymEWM6Aikpsuw_iyvGm0S3gpEiWLZ8OwyjOD36-eb3KmSsgg6Y7Ou1WsqaJWRAMVbHuPLJEUm1yRsmTTcFjtEkguMZc4ZuX88qb4RBrTAc6NvhlO1lrxEQQJVVVw3efbXCh8Ul8cMZEnEmu00fFrMlx3V05F-_4ZM2glr8I1_ZP4jq0u-uCBzLmLdAS3ylGGphyEZF_mWM0mLxuQbpD84K-KbLVstWw3Puqptwuec5oUS-EmmbeOH00_q0Q890JdQ0jzRz7g7aBHITDcTgae3Ca6oKmQEEdtrK1WSBxsWgyeFwh4IrtIMvBUt1ppUlVCJs5zqZyuPakXZM-dll5KKw1Ftur5ZFFOumATPdN3aDN4OPsC7HpkDwzRcczw0DEtHDzbgucPBtWPajuXYY9twbGffg21dfTgYufb4PPZ_wB3E1Q?type=png)](https://mermaid.live/edit#pako:eNpVkd1OwjAUx1-lOYmJJkDYhxvswoRB0AtvdCQmbsTU7bAt2dqltCogz2K89uV8BMs-AM9F199__3N6TruDmCcIHqwK_h5nVEiymEWM6Aikpsuw_iyvGm0S3gpEiWLZ8OwyjOD36-eb3KmSsgg6Y7Ou1WsqaJWRAMVbHuPLJEUm1yRsmTTcFjtEkguMZc4ZuX88qb4RBrTAc6NvhlO1lrxEQQJVVVw3efbXCh8Ul8cMZEnEmu00fFrMlx3V05F-_4ZM2glr8I1_ZP4jq0u-uCBzLmLdAS3ylGGphyEZF_mWM0mLxuQbpD84K-KbLVstWw3Puqptwuec5oUS-EmmbeOH00_q0Q890JdQ0jzRz7g7aBHITDcTgae3Ca6oKmQEEdtrK1WSBxsWgyeFwh4IrtIMvBUt1ppUlVCJs5zqZyuPakXZM-dll5KKw1Ftur5ZFFOumATPdN3aDN4OPsC7HpkDwzRcczw0DEtHDzbgucPBtWPajuXYY9twbGffg21dfTgYufb4PPZ_wB3E1Q)
30
+
31
+ [![](https://mermaid.ink/img/pako:eNpVkcluwjAQhl_FGgmJSoDIQoAcKhEQ7aGXNkiVmqDKTYYkUmJHxu5GeZaq575cH6EmC8sc7PnG_4xn7B1EPEZwYZPztyilQpLVImREmy81dYNqW1_VsVlwIxAlinXNi24Qwt_37w-5VQVlIbTCet2ql0TQMiU-itcswudZgkxuSdAwqbkpdrA4ExjJjDNy93CKekbg0xzPhZ4ZzNVW8gIF8VVZct3k2akV3CsujxnI4pDV7jx4XC3XLVXTkX7_msyaCSvwjAsyL8hqkzsdsuQi0h3QPEsYFnoYknKRfXImaV6LPIP0B2dFPLNhq2Gr5kVbtUn4WtIsVwK_yLxp_HD7KXrUQw_0IxQ0i_U37g6xEGSqmwnB1W6MG6pyGULI9lpKleT-B4vAlUJhDwRXSQruhuZbTaqMqcRFRvW3Fa2kpOyJ8yMm4nBTk60fFsWcKybBHVVScHfwrmFiDgzTGJvToWFY2nrwAe54OBg5pu1Yjj21Dcd29j34rGoPB5OxPT23_T90g8Qf?type=png)](https://mermaid.live/edit#pako:eNpVkcluwjAQhl_FGgmJSoDIQoAcKhEQ7aGXNkiVmqDKTYYkUmJHxu5GeZaq575cH6EmC8sc7PnG_4xn7B1EPEZwYZPztyilQpLVImREmy81dYNqW1_VsVlwIxAlinXNi24Qwt_37w-5VQVlIbTCet2ql0TQMiU-itcswudZgkxuSdAwqbkpdrA4ExjJjDNy93CKekbg0xzPhZ4ZzNVW8gIF8VVZct3k2akV3CsujxnI4pDV7jx4XC3XLVXTkX7_msyaCSvwjAsyL8hqkzsdsuQi0h3QPEsYFnoYknKRfXImaV6LPIP0B2dFPLNhq2Gr5kVbtUn4WtIsVwK_yLxp_HD7KXrUQw_0IxQ0i_U37g6xEGSqmwnB1W6MG6pyGULI9lpKleT-B4vAlUJhDwRXSQruhuZbTaqMqcRFRvW3Fa2kpOyJ8yMm4nBTk60fFsWcKybBHVVScHfwrmFiDgzTGJvToWFY2nrwAe54OBg5pu1Yjj21Dcd29j34rGoPB5OxPT23_T90g8Qf)
31
32
 
32
33
  ## 📦 Installation
33
34
 
@@ -150,6 +151,51 @@ Agent transfer is automatically managed with:
150
151
  - Conversation context preservation
151
152
  - Automatic system instruction updates
152
153
 
154
+ ## 💾 Pull-based Data Digestion
155
+
156
+ This feature enables agents to process large documents मोहब्बत chunk by chunk. It's designed for scenarios where the entire document cannot fit into the agent's context window.
157
+
158
+ - **Chunked Processing**: The `pullContentDigestor` tool allows an agent to request and process content in manageable chunks.
159
+ - **Stateful Digestion**: The agent receives the current chunk and relevant instructions, including the previous processing results, to maintain context throughout the digestion of the entire document.
160
+ - **Flexible Templates**: Different processing templates (e.g., "facts", "compress", "semantic", "minutes") can be applied to each chunk, guiding the agent on how to digest the information.
161
+ - **EOF Handling**: The tool signals the end of the document with an "EOF" message, allowing the agent to finalize its processing.
162
+
163
+ ```typescript
164
+ // Example of how an agent might use the pullContentDigestor tool
165
+
166
+ // Agent receives a call to process a large document
167
+ // Initial call to the tool
168
+ const initialChunkData = await pullContentDigestor({
169
+ path: "path/to/large/document.pdf",
170
+ template: "facts", // or any other relevant template
171
+ position: 0 // Initial position
172
+ });
173
+
174
+ let currentChunk = initialChunkData.content;
175
+ let nextPosition = initialChunkData.nextPosition;
176
+ let accumulatedResults = ""; // Agent will accumulate results here
177
+
178
+ while (!currentChunk.includes("EOF")) {
179
+ // Process the currentChunk based on instructions and template
180
+ // The agent would use its LLM capabilities here,
181
+ // using the provided instructions, previous results, and the new chunk.
182
+ const processingResult = await processMyChunk(currentChunk, initialChunkData.instructions, accumulatedResults);
183
+ accumulatedResults += processingResult; // Append to overall result
184
+
185
+ // Request the next chunk
186
+ const nextChunkData = await pullContentDigestor({
187
+ path: "path/to/large/document.pdf",
188
+ template: "facts",
189
+ position: nextPosition
190
+ });
191
+ currentChunk = nextChunkData.content;
192
+ nextPosition = nextChunkData.nextPosition;
193
+ }
194
+
195
+ // Final processing of accumulatedResults
196
+ const finalDigestedDocument = finalizeProcessing(accumulatedResults);
197
+ ```
198
+
153
199
  ## 🧪 Testing
154
200
 
155
201
  ```bash
@@ -1,7 +1,17 @@
1
- import { AgenticContext, Tool } from "../types";
1
+ import { AgenticCache, AgenticContext, Tool } from "../types";
2
2
  export declare const Digestor: {
3
- createTools: () => {
4
- pullContentDigestor: (args?: any, session?: AgenticContext) => Promise<void>;
3
+ createTools: (cache: AgenticCache, chunkSize: number, templates: {
4
+ [key: string]: string;
5
+ }) => {
6
+ pullContentDigestor: (args?: any, session?: AgenticContext) => Promise<{
7
+ content: string;
8
+ role?: undefined;
9
+ name?: undefined;
10
+ } | {
11
+ role: string;
12
+ name: string;
13
+ content: string;
14
+ }>;
5
15
  toolPullContentDigestor: Tool;
6
16
  };
7
17
  };
@@ -1,23 +1,24 @@
1
1
  "use strict";
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
3
  exports.Digestor = void 0;
4
+ const fs_1 = require("fs");
4
5
  const promptPullContentDigestor = `
5
- This tool allows you to load the content of a large file, chunk by chunk, for progressive processing.
6
+ This tool allows you to load the content of a large content, chunk by chunk, for progressive processing.
6
7
 
7
8
  # Usage Instructions:
8
9
  - On each call, the tool returns the next chunk of the specified file, along with instructions for processing this content.
9
- - The "position" parameter indicates where to resume reading (e.g., the last 20 characters of the previous chunk).
10
- - When all content has been processed, the tool returns "EOF" to signal the end of the file.
10
+ - The "position" parameter indicates where to resume reading.
11
+ - When all content has been processed, the tool returns "\nEOF" to signal the end of the file.
11
12
 
12
13
  # Parameters:
13
- - path (str): Absolute or relative path of the file to read.
14
- - template (enum): Indicates the type of processing to apply to the chunk (e.g., "facts", "compress", "semantic", "minutes").
15
- - position (str): Allows you to continue reading from the last known position (e.g., last 20 characters of the previous chunk).
14
+ - path (str): Absolute,relative, or inline content.
15
+ - template (enum): Indicates the type of processing to apply to the chunk (e.g., "facts", "compress", "semantic", "minutes", etc).
16
+ - position (str): Allows you to continue reading from the last known position.
16
17
 
17
18
  # Best Practices:
18
- - Use this tool to process files that are too large to be read in a single pass.
19
- - Never skip a chunk: process each piece in order.
20
- - Stop calling the tool as soon as "EOF" is returned.
19
+ - Use this tool to process contents that are too large to be read in a single pass.
20
+ - Never skip a chunk: process each piece in order and always use the last position parameter.
21
+ - Stop calling the tool as soon as "EOF" (or "\nEOF") is returned.
21
22
  `;
22
23
  const toolPullContentDigestor = {
23
24
  type: "function",
@@ -30,16 +31,16 @@ const toolPullContentDigestor = {
30
31
  properties: {
31
32
  path: {
32
33
  type: "string",
33
- description: "Absolute or relative path of the file to read."
34
+ description: "Absolute or relative path, or a unique identifier of the file to read (e.g., 'https:....html', 'file://...', 'https://.../file.pdf')."
34
35
  },
35
36
  template: {
36
37
  type: "string",
37
38
  description: "Type of processing to apply to the chunk (e.g., 'facts', 'compress', 'semantic', 'minutes').",
38
- enum: ["facts", "compress", "semantic", "minutes"]
39
+ enum: ["facts", "compress", "semantic", "minutes", "custom"]
39
40
  },
40
41
  position: {
41
- type: "string",
42
- description: "Allows you to continue reading from the last 20 characters of the previous chunk."
42
+ type: "number",
43
+ description: "position of the last loaded chunk. (0 for the first call)"
43
44
  }
44
45
  },
45
46
  required: ["path", "template", "position"],
@@ -47,11 +48,98 @@ const toolPullContentDigestor = {
47
48
  }
48
49
  }
49
50
  };
50
- const createTools = () => {
51
+ /**
52
+ * Loads a content chunk from a file or URL.
53
+ *
54
+ * @param {string} path - The path to the content file or URL.
55
+ * @param {number} position - The position in the content to start loading from.
56
+ * @param {number} chunkSize - The size of each content chunk to load.
57
+ * @param {AgenticCache} cache - The cache instance used to store and retrieve content chunks.
58
+ * @param {string} userid - The user ID for caching purposes.
59
+ * @returns {Promise<Content>} A promise that resolves to the content chunk.
60
+ */
61
+ const loadContentChunk = async (path, position, chunkSize, cache, userid) => {
62
+ //FIXME: multiple users access to the cache with different position!
63
+ const loader = async (path, cache, userid) => {
64
+ const cacheKey = `content:${path}`;
65
+ const value = await cache.get(cacheKey);
66
+ if (value) {
67
+ return value;
68
+ }
69
+ if ((0, fs_1.existsSync)(path)) {
70
+ const fileContent = (0, fs_1.readFileSync)(path, "utf8");
71
+ const value = { content: fileContent, position: { [userid]: 0 }, key: cacheKey };
72
+ await cache.set(cacheKey, value);
73
+ return value;
74
+ }
75
+ else if (path.startsWith("http://") || path.startsWith("https://")) {
76
+ const response = await fetch(path);
77
+ const fileContent = await response.text();
78
+ const value = { content: fileContent, position: { [userid]: 0 }, key: cacheKey };
79
+ await cache.set(cacheKey, value);
80
+ return value;
81
+ }
82
+ //
83
+ // if not a file or url, return the path as content
84
+ return { content: path, position: { [userid]: 0 }, key: cacheKey };
85
+ };
86
+ //
87
+ // load asset content
88
+ const value = await loader(path, cache, userid);
89
+ const hasNextLN = value.content.indexOf('\n', value.position[userid] + chunkSize - 2);
90
+ // Determine the start position for the next chunk.
91
+ // If there is no next newline, use the current position; otherwise, start after the next newline.
92
+ const startPos = value.position[userid];
93
+ const nextPos = (hasNextLN == -1) ? startPos + chunkSize : startPos + hasNextLN + 1;
94
+ value.position[userid] = startPos + nextPos;
95
+ await cache.set(value.key, value);
96
+ if (startPos >= value.content.length) {
97
+ return { content: 'EOF', position: nextPos };
98
+ }
99
+ if (startPos + chunkSize + 1 > value.content.length) {
100
+ return { content: value.content.slice(startPos) + "\nEOF", position: nextPos };
101
+ }
102
+ const endPos = Math.min(startPos + chunkSize, value.content.length);
103
+ return { content: value.content.slice(startPos, endPos), position: endPos };
104
+ };
105
+ /**
106
+ * Creates tools for chunked content loading and processing.
107
+ *
108
+ * @param {AgenticCache} cache - The cache instance used to store and retrieve content chunks.
109
+ * @param {number} chunkSize - The size of each content chunk to load.
110
+ * @param {{[key: string]: string}} templates - An object mapping template names to their instruction strings.
111
+ * @returns {{ pullContentDigestor: Function, toolPullContentDigestor: any }} An object containing the pullContentDigestor function and its tool definition.
112
+ */
113
+ const createTools = (cache, chunkSize, templates) => {
51
114
  const pullContentDigestor = async (args, session) => {
52
115
  const { path, template, position } = args;
53
- // const {memory} = session;
54
- // const {content,title} = await parse(html, temp);
116
+ const userid = session?.user?.id;
117
+ const chunk = await loadContentChunk(path, position, chunkSize, cache, userid);
118
+ if (!chunk || !chunk.content) {
119
+ throw new Error("Load content chunk failed");
120
+ }
121
+ //
122
+ // check template and ask for a valid template name
123
+ if (!templates[template]) {
124
+ return { content: 'Missing template name, recall with a valid template name' };
125
+ }
126
+ //
127
+ // get the prompt for the template
128
+ const promptPullContentDigestorFollowUp = templates[template] || '';
129
+ // Construit le message formaté pour l'agent LLM
130
+ console.log("-- Digest DBG chunk", position, chunk);
131
+ const templateInstructions = !position ? `${promptPullContentDigestorFollowUp}\nTemplate: ${template}\nChunk:\n` : '';
132
+ const content = JSON.stringify({
133
+ instructions: `${templateInstructions}`,
134
+ chunk: `${chunk.content}`,
135
+ nextPosition: chunk.position,
136
+ eof: chunk.content.includes('EOF')
137
+ });
138
+ return {
139
+ role: "assistant",
140
+ name: "pullContentDigestor",
141
+ content
142
+ };
55
143
  };
56
144
  return { pullContentDigestor, toolPullContentDigestor };
57
145
  };
@@ -0,0 +1 @@
1
+ export {};
@@ -0,0 +1,45 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ const execute_1 = require("../execute");
4
+ const digestor_1 = require("./digestor");
5
+ // Implémentation simple du cache pour les tests
6
+ class TestCache {
7
+ constructor() {
8
+ this.cache = new Map();
9
+ }
10
+ async get(key) {
11
+ return this.cache.get(key) || null;
12
+ }
13
+ async set(key, value, ttl) {
14
+ this.cache.set(key, value);
15
+ return value;
16
+ }
17
+ }
18
+ describe("Digestor Agent", () => {
19
+ it("should merge two chunks of 50 characters", async () => {
20
+ // Création d'un contenu de test de 100 caractères
21
+ const testContent = "A".repeat(50) + "B".repeat(50);
22
+ // Configuration de l'agent avec une instruction simple de fusion
23
+ const agentConfig = {
24
+ name: "chunkMerger",
25
+ publicDescription: "Agent that merges chunks of text",
26
+ instructions: `You are a simple agent that merges chunks of text.
27
+ When you receive a chunk, you should:
28
+ 1. If it's the first chunk, store it
29
+ 2. If it's the second chunk, merge it with the first one
30
+ 3. If you receive "EOF", return the final merged result`,
31
+ tools: [digestor_1.Digestor.createTools(new TestCache(), 50).toolPullContentDigestor]
32
+ };
33
+ // Mock du cache pour simuler le contenu
34
+ const cache = new TestCache();
35
+ await cache.set("content:test.txt", testContent);
36
+ // Exécution de l'agent
37
+ const result = await (0, execute_1.executeAgentSet)([agentConfig], {
38
+ messages: [],
39
+ cache,
40
+ user: { id: "test-user" }
41
+ });
42
+ // Vérification du résultat
43
+ expect(result.messages[result.messages.length - 1].content).toBe("A".repeat(50) + "B".repeat(50));
44
+ });
45
+ });
@@ -58,7 +58,7 @@ function sendFeedback(params) {
58
58
  usage,
59
59
  state
60
60
  };
61
- console.log('--- DBG feedback --:', description);
61
+ //console.log('--- DBG feedback --:',description);
62
62
  //
63
63
  // send agent state and description
64
64
  stdout.write(`\n<step>${JSON.stringify(feedback)}</step>\n`);
@@ -75,6 +75,10 @@ export interface AgenticMemorySession {
75
75
  messages: any[];
76
76
  usage: Usage;
77
77
  }
78
+ export type AgenticCache = {
79
+ get: <T>(key: string) => Promise<T | null>;
80
+ set: <T>(key: string, value: T, ttl?: number) => Promise<T>;
81
+ };
78
82
  export interface AgenticContext {
79
83
  memory?: AgenticMemorySession;
80
84
  user: UserNano;
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "agentic-api",
3
- "version": "1.0.3",
3
+ "version": "1.0.5",
4
4
  "description": "API pour l'orchestration d'agents intelligents avec séquences et escalades automatiques",
5
5
  "main": "dist/src/index.js",
6
6
  "types": "dist/src/index.d.ts",