agentic-api 2.0.585 → 2.0.592

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,130 @@
1
+ export type JobStatus = 'planned' | 'running' | 'done' | 'failed';
2
+ export type TaskStatus = 'todo' | 'doing' | 'done' | 'failed';
3
+ export interface TaskSpec {
4
+ id: string;
5
+ title: string;
6
+ type?: string;
7
+ dependsOn?: string[];
8
+ input?: any;
9
+ acceptance?: string[];
10
+ }
11
+ export interface TaskResult {
12
+ taskId: string;
13
+ ok: boolean;
14
+ summary: string;
15
+ data?: any;
16
+ artifacts?: {
17
+ kind: string;
18
+ ref: string;
19
+ meta?: any;
20
+ }[];
21
+ error?: string;
22
+ }
23
+ export interface JobPlan {
24
+ jobId: string;
25
+ goal: string;
26
+ beneficiary?: string;
27
+ tasks: TaskSpec[];
28
+ }
29
+ export interface ReducedJobMemory {
30
+ memory: string;
31
+ index: Record<string, any>;
32
+ statusLine?: string;
33
+ }
34
+ export interface JobRunnerState {
35
+ jobId: string;
36
+ plan?: JobPlan;
37
+ jobStatus: JobStatus;
38
+ taskStatus: Record<string, TaskStatus>;
39
+ lastMemory: ReducedJobMemory | null;
40
+ lastError?: string;
41
+ }
42
+ export type PlannerFn = (input: any, seedMemory?: ReducedJobMemory | null) => Promise<JobPlan>;
43
+ export type ExecutorFn = (task: TaskSpec, memory: ReducedJobMemory | null) => Promise<TaskResult>;
44
+ export type ReducerFn = (previous: ReducedJobMemory | null, task: TaskSpec, result: TaskResult) => Promise<ReducedJobMemory>;
45
+ export type JobEventType = 'job_created' | 'plan_ready' | 'task_started' | 'task_done' | 'memory_reduced' | 'job_done' | 'job_failed';
46
+ export interface JobEvent<T = any> {
47
+ type: JobEventType;
48
+ at: string;
49
+ payload?: T;
50
+ }
51
+ export interface JobRunnerConfig {
52
+ planner: PlannerFn;
53
+ executor: ExecutorFn;
54
+ reducer: ReducerFn;
55
+ /**
56
+ * Directory where snapshots and events are persisted.
57
+ * If not provided, persistence is disabled.
58
+ */
59
+ storeDir?: string;
60
+ /**
61
+ * Max attempts per task when schema validation fails or executor throws.
62
+ * (Logical failure with ok=false does not trigger retry)
63
+ */
64
+ maxAttempts?: number;
65
+ onEvent?: (event: JobEvent) => void;
66
+ }
67
+ export interface RunJobOptions {
68
+ jobId?: string;
69
+ seedMemory?: ReducedJobMemory | null;
70
+ resume?: boolean;
71
+ }
72
+ export interface JobRunSuccess {
73
+ ok: true;
74
+ jobId: string;
75
+ plan: JobPlan;
76
+ finalSummary: string;
77
+ artifactsIndex: Record<string, any>;
78
+ memory: ReducedJobMemory | null;
79
+ taskStatus: Record<string, TaskStatus>;
80
+ snapshotFile?: string;
81
+ eventsFile?: string;
82
+ }
83
+ export interface JobRunFailure {
84
+ ok: false;
85
+ jobId: string;
86
+ plan?: JobPlan;
87
+ failedTaskId?: string;
88
+ error: string;
89
+ errorSummary: {
90
+ taskId?: string;
91
+ taskTitle?: string;
92
+ nature: string;
93
+ progress?: string;
94
+ nextAction: string;
95
+ };
96
+ memory: ReducedJobMemory | null;
97
+ taskStatus: Record<string, TaskStatus>;
98
+ snapshotFile?: string;
99
+ eventsFile?: string;
100
+ }
101
+ export type JobRunOutcome = JobRunSuccess | JobRunFailure;
102
+ export declare class JobRunner {
103
+ private readonly storeDir?;
104
+ private readonly maxAttempts;
105
+ private readonly planner;
106
+ private readonly executor;
107
+ private readonly reducer;
108
+ private readonly onEvent?;
109
+ constructor(config: JobRunnerConfig);
110
+ /**
111
+ * Run a job end-to-end (Plan → Execute → Reduce) with persistence and retries.
112
+ */
113
+ run(input: any, options?: RunJobOptions): Promise<JobRunOutcome>;
114
+ private planJob;
115
+ private initTaskStatus;
116
+ private nextReadyTask;
117
+ private executeWithRetry;
118
+ private reduceWithRetry;
119
+ private handleFailure;
120
+ private validateJobPlan;
121
+ private validateTaskResult;
122
+ private validateMemory;
123
+ private persistState;
124
+ private loadState;
125
+ private ensureStoreDir;
126
+ private snapshotPath;
127
+ private eventsPath;
128
+ private logEvent;
129
+ private emit;
130
+ }
@@ -0,0 +1,339 @@
1
+ "use strict";
2
+ var __importDefault = (this && this.__importDefault) || function (mod) {
3
+ return (mod && mod.__esModule) ? mod : { "default": mod };
4
+ };
5
+ Object.defineProperty(exports, "__esModule", { value: true });
6
+ exports.JobRunner = void 0;
7
+ const crypto_1 = require("crypto");
8
+ const fs_1 = require("fs");
9
+ const path_1 = __importDefault(require("path"));
10
+ class JobRunner {
11
+ constructor(config) {
12
+ this.planner = config.planner;
13
+ this.executor = config.executor;
14
+ this.reducer = config.reducer;
15
+ this.storeDir = config.storeDir;
16
+ this.maxAttempts = config.maxAttempts || 2;
17
+ this.onEvent = config.onEvent;
18
+ }
19
+ /**
20
+ * Run a job end-to-end (Plan → Execute → Reduce) with persistence and retries.
21
+ */
22
+ async run(input, options) {
23
+ const seedMemory = options?.seedMemory ?? null;
24
+ const jobId = options?.jobId || (0, crypto_1.randomUUID)();
25
+ let state = null;
26
+ if (options?.resume && this.storeDir) {
27
+ state = await this.loadState(jobId);
28
+ }
29
+ if (!state) {
30
+ const plan = await this.planJob(jobId, input, seedMemory);
31
+ state = {
32
+ jobId: plan.jobId,
33
+ plan,
34
+ jobStatus: 'running',
35
+ taskStatus: this.initTaskStatus(plan.tasks),
36
+ lastMemory: seedMemory || null
37
+ };
38
+ await this.persistState(state);
39
+ this.emit({ type: 'job_created', at: new Date().toISOString(), payload: { jobId: state.jobId } });
40
+ }
41
+ if (!state.plan) {
42
+ return {
43
+ ok: false,
44
+ jobId,
45
+ error: 'Missing plan in restored state',
46
+ errorSummary: {
47
+ nature: 'state_corruption',
48
+ nextAction: 'Re-run planner to regenerate plan'
49
+ },
50
+ memory: state.lastMemory,
51
+ taskStatus: state.taskStatus
52
+ };
53
+ }
54
+ this.emit({ type: 'plan_ready', at: new Date().toISOString(), payload: { jobId: state.jobId, tasks: state.plan.tasks.length } });
55
+ const plan = state.plan;
56
+ state.jobStatus = 'running';
57
+ while (true) {
58
+ const nextTask = this.nextReadyTask(plan.tasks, state.taskStatus);
59
+ if (!nextTask)
60
+ break;
61
+ state.taskStatus[nextTask.id] = 'doing';
62
+ await this.persistState(state);
63
+ const execResult = await this.executeWithRetry(nextTask, state.lastMemory);
64
+ if (!execResult.valid) {
65
+ return await this.handleFailure(state, plan, nextTask, execResult.error || 'Execution failed');
66
+ }
67
+ const result = execResult.result;
68
+ this.emit({ type: 'task_done', at: new Date().toISOString(), payload: { jobId: plan.jobId, taskId: nextTask.id, ok: result.ok } });
69
+ const reduced = await this.reduceWithRetry(state.lastMemory, nextTask, result);
70
+ if (!reduced.valid) {
71
+ return await this.handleFailure(state, plan, nextTask, reduced.error || 'Reducer failed');
72
+ }
73
+ state.lastMemory = reduced.memory;
74
+ state.taskStatus[nextTask.id] = result.ok ? 'done' : 'failed';
75
+ await this.persistState(state);
76
+ this.emit({ type: 'memory_reduced', at: new Date().toISOString(), payload: { jobId: plan.jobId, taskId: nextTask.id } });
77
+ if (!result.ok) {
78
+ return await this.handleFailure(state, plan, nextTask, result.error || 'Task reported logical failure');
79
+ }
80
+ }
81
+ state.jobStatus = 'done';
82
+ await this.persistState(state);
83
+ this.emit({ type: 'job_done', at: new Date().toISOString(), payload: { jobId: plan.jobId } });
84
+ return {
85
+ ok: true,
86
+ jobId: plan.jobId,
87
+ plan,
88
+ finalSummary: state.lastMemory?.memory || '',
89
+ artifactsIndex: state.lastMemory?.index || {},
90
+ memory: state.lastMemory,
91
+ taskStatus: state.taskStatus,
92
+ snapshotFile: this.snapshotPath(plan.jobId),
93
+ eventsFile: this.eventsPath(plan.jobId)
94
+ };
95
+ }
96
+ async planJob(jobId, input, seedMemory) {
97
+ const plan = await this.planner(input, seedMemory);
98
+ const finalPlan = {
99
+ ...plan,
100
+ jobId: plan.jobId || jobId
101
+ };
102
+ this.validateJobPlan(finalPlan, true);
103
+ return finalPlan;
104
+ }
105
+ initTaskStatus(tasks) {
106
+ return tasks.reduce((acc, task) => {
107
+ acc[task.id] = 'todo';
108
+ return acc;
109
+ }, {});
110
+ }
111
+ nextReadyTask(tasks, status) {
112
+ return tasks.find(task => {
113
+ if (status[task.id] !== 'todo')
114
+ return false;
115
+ if (!task.dependsOn || task.dependsOn.length === 0)
116
+ return true;
117
+ return task.dependsOn.every(dep => status[dep] === 'done');
118
+ }) || null;
119
+ }
120
+ async executeWithRetry(task, memory) {
121
+ for (let attempt = 1; attempt <= this.maxAttempts; attempt++) {
122
+ this.emit({ type: 'task_started', at: new Date().toISOString(), payload: { taskId: task.id, attempt } });
123
+ try {
124
+ const result = await this.executor(task, memory);
125
+ const errors = this.validateTaskResult(result);
126
+ if (errors.length === 0) {
127
+ return { valid: true, result, attempts: attempt };
128
+ }
129
+ if (attempt >= this.maxAttempts) {
130
+ return { valid: false, error: `TaskResult validation failed: ${errors.join(', ')}`, attempts: attempt };
131
+ }
132
+ }
133
+ catch (err) {
134
+ if (attempt >= this.maxAttempts) {
135
+ return { valid: false, error: err?.message || 'Executor error', attempts: attempt };
136
+ }
137
+ }
138
+ }
139
+ return { valid: false, error: 'Unknown execution failure', attempts: this.maxAttempts };
140
+ }
141
+ async reduceWithRetry(previous, task, result) {
142
+ for (let attempt = 1; attempt <= this.maxAttempts; attempt++) {
143
+ try {
144
+ const nextMemory = await this.reducer(previous, task, result);
145
+ const errors = this.validateMemory(nextMemory);
146
+ if (errors.length === 0) {
147
+ return { valid: true, memory: nextMemory };
148
+ }
149
+ if (attempt >= this.maxAttempts) {
150
+ return { valid: false, error: `Reducer validation failed: ${errors.join(', ')}` };
151
+ }
152
+ }
153
+ catch (err) {
154
+ if (attempt >= this.maxAttempts) {
155
+ return { valid: false, error: err?.message || 'Reducer error' };
156
+ }
157
+ }
158
+ }
159
+ return { valid: false, error: 'Unknown reducer failure' };
160
+ }
161
+ async handleFailure(state, plan, task, reason) {
162
+ state.jobStatus = 'failed';
163
+ state.taskStatus[task.id] = state.taskStatus[task.id] === 'done' ? 'done' : 'failed';
164
+ state.lastError = reason;
165
+ await this.persistState(state);
166
+ const errorSummary = {
167
+ taskId: task.id,
168
+ taskTitle: task.title,
169
+ nature: reason,
170
+ progress: state.lastMemory?.memory,
171
+ nextAction: 'Inspect executor/reducer outputs or planner schema, then retry'
172
+ };
173
+ this.emit({ type: 'job_failed', at: new Date().toISOString(), payload: { jobId: plan.jobId, taskId: task.id, reason } });
174
+ return {
175
+ ok: false,
176
+ jobId: plan.jobId,
177
+ plan,
178
+ failedTaskId: task.id,
179
+ error: reason,
180
+ errorSummary,
181
+ memory: state.lastMemory,
182
+ taskStatus: state.taskStatus,
183
+ snapshotFile: this.snapshotPath(plan.jobId),
184
+ eventsFile: this.eventsPath(plan.jobId)
185
+ };
186
+ }
187
+ validateJobPlan(plan, throwOnError = false) {
188
+ const errors = [];
189
+ if (!plan || typeof plan !== 'object') {
190
+ errors.push('plan must be an object');
191
+ }
192
+ if (!plan.jobId || typeof plan.jobId !== 'string' || !plan.jobId.trim()) {
193
+ errors.push('jobId is required');
194
+ }
195
+ if (!plan.goal || typeof plan.goal !== 'string' || !plan.goal.trim()) {
196
+ errors.push('goal is required');
197
+ }
198
+ if (!Array.isArray(plan.tasks) || plan.tasks.length === 0) {
199
+ errors.push('tasks must be a non-empty array');
200
+ }
201
+ else {
202
+ plan.tasks.forEach((task, idx) => {
203
+ if (!task || typeof task !== 'object') {
204
+ errors.push(`task[${idx}] must be an object`);
205
+ return;
206
+ }
207
+ if (!task.id || typeof task.id !== 'string' || !task.id.trim()) {
208
+ errors.push(`task[${idx}].id is required`);
209
+ }
210
+ if (!task.title || typeof task.title !== 'string' || !task.title.trim()) {
211
+ errors.push(`task[${idx}].title is required`);
212
+ }
213
+ if (task.dependsOn && !Array.isArray(task.dependsOn)) {
214
+ errors.push(`task[${idx}].dependsOn must be an array if provided`);
215
+ }
216
+ if (task.acceptance && !Array.isArray(task.acceptance)) {
217
+ errors.push(`task[${idx}].acceptance must be an array if provided`);
218
+ }
219
+ });
220
+ }
221
+ if (throwOnError && errors.length) {
222
+ throw new Error(`Invalid JobPlan: ${errors.join(', ')}`);
223
+ }
224
+ return errors;
225
+ }
226
+ validateTaskResult(result) {
227
+ const errors = [];
228
+ if (!result || typeof result !== 'object') {
229
+ errors.push('TaskResult must be an object');
230
+ return errors;
231
+ }
232
+ if (!result.taskId || typeof result.taskId !== 'string' || !result.taskId.trim()) {
233
+ errors.push('taskId is required');
234
+ }
235
+ if (typeof result.ok !== 'boolean') {
236
+ errors.push('ok must be boolean');
237
+ }
238
+ if (!result.summary || typeof result.summary !== 'string') {
239
+ errors.push('summary is required');
240
+ }
241
+ if (result.artifacts && !Array.isArray(result.artifacts)) {
242
+ errors.push('artifacts must be an array if provided');
243
+ }
244
+ else if (Array.isArray(result.artifacts)) {
245
+ result.artifacts.forEach((artifact, idx) => {
246
+ if (!artifact || typeof artifact !== 'object') {
247
+ errors.push(`artifacts[${idx}] must be an object`);
248
+ return;
249
+ }
250
+ if (!artifact.kind || !artifact.ref) {
251
+ errors.push(`artifacts[${idx}] requires kind and ref`);
252
+ }
253
+ });
254
+ }
255
+ return errors;
256
+ }
257
+ validateMemory(memory) {
258
+ const errors = [];
259
+ if (!memory || typeof memory !== 'object') {
260
+ errors.push('memory must be an object');
261
+ return errors;
262
+ }
263
+ if (typeof memory.memory !== 'string') {
264
+ errors.push('memory.memory must be a string');
265
+ }
266
+ if (!memory.index || typeof memory.index !== 'object') {
267
+ errors.push('memory.index must be an object');
268
+ }
269
+ if (memory.statusLine && typeof memory.statusLine !== 'string') {
270
+ errors.push('memory.statusLine must be a string if provided');
271
+ }
272
+ return errors;
273
+ }
274
+ async persistState(state) {
275
+ if (!this.storeDir)
276
+ return;
277
+ await this.ensureStoreDir();
278
+ const file = this.snapshotPath(state.jobId);
279
+ if (!file)
280
+ return;
281
+ const snapshot = JSON.stringify(state, null, 2);
282
+ await fs_1.promises.writeFile(file, snapshot, 'utf-8');
283
+ }
284
+ async loadState(jobId) {
285
+ if (!this.storeDir)
286
+ return null;
287
+ const file = this.snapshotPath(jobId);
288
+ if (!file)
289
+ return null;
290
+ try {
291
+ const data = await fs_1.promises.readFile(file, 'utf-8');
292
+ return JSON.parse(data);
293
+ }
294
+ catch {
295
+ return null;
296
+ }
297
+ }
298
+ async ensureStoreDir() {
299
+ if (!this.storeDir)
300
+ return;
301
+ await fs_1.promises.mkdir(this.storeDir, { recursive: true });
302
+ }
303
+ snapshotPath(jobId) {
304
+ if (!this.storeDir)
305
+ return undefined;
306
+ return path_1.default.join(this.storeDir, `${jobId}.json`);
307
+ }
308
+ eventsPath(jobId) {
309
+ if (!this.storeDir)
310
+ return undefined;
311
+ return path_1.default.join(this.storeDir, `${jobId}.events.jsonl`);
312
+ }
313
+ async logEvent(jobId, event) {
314
+ if (!this.storeDir)
315
+ return;
316
+ await this.ensureStoreDir();
317
+ const file = this.eventsPath(jobId);
318
+ if (!file)
319
+ return;
320
+ const line = JSON.stringify(event);
321
+ await fs_1.promises.appendFile(file, `${line}\n`, 'utf-8');
322
+ }
323
+ emit(event) {
324
+ if (!event.at) {
325
+ event.at = new Date().toISOString();
326
+ }
327
+ if (this.onEvent) {
328
+ this.onEvent(event);
329
+ }
330
+ // Persist events only when jobId is present in payload or snapshot is known
331
+ const jobId = (event.payload && event.payload.jobId) || undefined;
332
+ if (jobId && this.storeDir) {
333
+ this.logEvent(jobId, event).catch(() => {
334
+ /* silent */
335
+ });
336
+ }
337
+ }
338
+ }
339
+ exports.JobRunner = JobRunner;
@@ -12,13 +12,23 @@ export interface AgentReducerConfig {
12
12
  /** Default agent name to use if task doesn't specify one */
13
13
  defaultAgent: string;
14
14
  }
15
+ /**
16
+ * Options for MapLLM constructor
17
+ */
18
+ export interface MapLLMOptions {
19
+ /** Whether to execute a final reduce pass after all chunks (default: true) */
20
+ finalReduce?: boolean;
21
+ /** Threshold in bytes to trigger automatic intermediate reduce (optional) */
22
+ reduceThresholdBytes?: number;
23
+ }
15
24
  /**
16
25
  * MapLLM - Orchestrateur principal pour le reduce hiérarchique
17
26
  */
18
27
  export declare class MapLLM {
19
28
  private loader;
20
29
  private agentConfig?;
21
- constructor(loader: NativeLoader);
30
+ private readonly options;
31
+ constructor(loader: NativeLoader, options?: MapLLMOptions);
22
32
  /**
23
33
  * Vérifie si le loader fournit des agents (TaskListLoader)
24
34
  */
@@ -5,13 +5,17 @@
5
5
  Object.defineProperty(exports, "__esModule", { value: true });
6
6
  exports.MapLLM = void 0;
7
7
  const execute_1 = require("../execute");
8
- const llm_1 = require("../llm");
9
8
  /**
10
9
  * MapLLM - Orchestrateur principal pour le reduce hiérarchique
11
10
  */
12
11
  class MapLLM {
13
- constructor(loader) {
12
+ constructor(loader, options) {
14
13
  this.loader = loader;
14
+ // Default options
15
+ this.options = {
16
+ finalReduce: options?.finalReduce ?? true,
17
+ reduceThresholdBytes: options?.reduceThresholdBytes ?? 0
18
+ };
15
19
  //
16
20
  // ✅ Si pas d'agentConfig fourni, essayer d'extraire depuis le loader
17
21
  if (this.hasAgents(loader)) {
@@ -53,11 +57,7 @@ class MapLLM {
53
57
  let position = 0;
54
58
  let totalChunkSize = 0;
55
59
  let totalReduce = 0;
56
- const model = (0, execute_1.modelConfig)(result.model);
57
- const openai = (0, llm_1.llmInstance)();
58
- const llm = Object.assign({}, model);
59
- llm.stream = false;
60
- delete llm.stream_options;
60
+ const modelName = result.model || 'LOW-fast';
61
61
  //
62
62
  // maxIterations is set by the callback
63
63
  while (!result.maxIterations) {
@@ -118,35 +118,23 @@ class MapLLM {
118
118
  }
119
119
  else {
120
120
  //
121
- // ══════════════════════════════════════
122
- // MODE DOCUMENT : openai.chat direct
123
- // ══════════════════════════════════════
124
- const messages = isFirstChunk ? [
125
- { role: "system", content: config.digestPrompt },
126
- { role: "user", content: chunk.content }
127
- ] : [
128
- { role: "system", content: config.digestPrompt },
129
- { role: "assistant", content: accContent },
130
- { role: "user", content: chunk.content }
121
+ // ══════════════════════════════════════════════════════════
122
+ // MODE DOCUMENT : executeQuery() avec API Responses unifiée
123
+ // ══════════════════════════════════════════════════════════
124
+ const messages = isFirstChunk ? [] : [
125
+ { role: "assistant", content: accContent }
131
126
  ];
132
- llm.messages = messages;
133
- //
134
- // Configure structured output if format is specified
135
- if (result.format) {
136
- llm.response_format = {
137
- type: "json_schema",
138
- json_schema: {
139
- name: result.format.name,
140
- schema: result.format.schema,
141
- strict: result.format.strict ?? true
142
- }
143
- };
144
- }
145
- const chat = await openai.chat.completions.create(llm);
146
- const digestMessage = chat.choices[0]?.message;
147
- //
148
- // Parse JSON if structured output is enabled
149
- digestContent = digestMessage.content || '';
127
+ const execResult = await (0, execute_1.executeQuery)({
128
+ query: chunk.content,
129
+ model: modelName,
130
+ instructions: config.digestPrompt,
131
+ messages,
132
+ schema: result.format ? result.format.schema : undefined,
133
+ verbose: verbose,
134
+ stdout: init.stdout || execute_1.DummyWritable
135
+ });
136
+ // executeQuery returns content - parse if structured output is enabled
137
+ digestContent = execResult.content;
150
138
  if (result.format && digestContent) {
151
139
  try {
152
140
  digestContent = JSON.parse(digestContent);
@@ -169,31 +157,31 @@ class MapLLM {
169
157
  }
170
158
  break;
171
159
  }
172
- // Décision de réduction basée sur callback
173
- if (!result.continue) {
160
+ // Auto-reduce if accumulator exceeds threshold (if configured)
161
+ const accSize = typeof result.acc === 'string' ? result.acc.length : JSON.stringify(result.acc).length;
162
+ const shouldAutoReduce = this.options.reduceThresholdBytes > 0 && accSize > this.options.reduceThresholdBytes;
163
+ // Décision de réduction basée sur callback ou auto-threshold
164
+ if (!result.continue && !shouldAutoReduce) {
174
165
  continue;
175
166
  }
176
- const accForReduce = typeof result.acc === 'string' ? result.acc : JSON.stringify(result.acc);
177
- llm.messages = [
178
- { role: "system", content: config.reducePrompt },
179
- { role: "user", content: accForReduce }
180
- ];
181
- // Configure structured output if format is specified
182
- if (result.format) {
183
- llm.response_format = {
184
- type: "json_schema",
185
- json_schema: {
186
- name: result.format.name,
187
- schema: result.format.schema,
188
- strict: result.format.strict ?? true
189
- }
190
- };
167
+ if (verbose && shouldAutoReduce) {
168
+ console.log(`🔄 Auto-reduce triggered: acc size ${accSize} > threshold ${this.options.reduceThresholdBytes}`);
191
169
  }
192
- const reduce = await openai.chat.completions.create(llm);
193
- const reduceMessage = reduce.choices[0]?.message;
170
+ const accForReduce = typeof result.acc === 'string' ? result.acc : JSON.stringify(result.acc);
171
+ //
172
+ // Intermediate reduce avec executeQuery
173
+ const reduceResult = await (0, execute_1.executeQuery)({
174
+ query: accForReduce,
175
+ model: modelName,
176
+ instructions: config.reducePrompt,
177
+ messages: [],
178
+ schema: result.format ? result.format.schema : undefined,
179
+ verbose: verbose,
180
+ stdout: init.stdout || execute_1.DummyWritable
181
+ });
194
182
  //
195
183
  // should not happen
196
- if (!reduceMessage.content) {
184
+ if (!reduceResult.content) {
197
185
  continue;
198
186
  }
199
187
  // 3. Reduce with system - Update result.acc (replace)
@@ -201,15 +189,15 @@ class MapLLM {
201
189
  // Parse JSON if structured output is enabled
202
190
  if (result.format) {
203
191
  try {
204
- result.acc = JSON.parse(reduceMessage.content);
192
+ result.acc = JSON.parse(reduceResult.content);
205
193
  }
206
194
  catch (e) {
207
- console.warn('Failed to parse reduce result as JSON:', reduceMessage.content);
208
- result.acc = reduceMessage.content;
195
+ console.warn('Failed to parse reduce result as JSON:', reduceResult.content);
196
+ result.acc = reduceResult.content;
209
197
  }
210
198
  }
211
199
  else {
212
- result.acc = reduceMessage.content;
200
+ result.acc = reduceResult.content;
213
201
  }
214
202
  if (verbose) {
215
203
  console.log(`✅ Reduce ${result.metadata?.iterations} processed (${chunk.content.length} chars)`);
@@ -224,38 +212,40 @@ class MapLLM {
224
212
  throw new Error(`Failed to process chunk ${result.metadata?.iterations}: ${error}`);
225
213
  }
226
214
  }
227
- // Final reduce
228
- const finalAccContent = typeof result.acc === 'string' ? result.acc : JSON.stringify(result.acc);
229
- const messages = [
230
- { role: "system", content: config.reducePrompt },
231
- { role: "user", content: finalAccContent }
232
- ];
233
- llm.messages = messages;
234
- // Configure structured output if format is specified
235
- if (result.format) {
236
- llm.response_format = {
237
- type: "json_schema",
238
- json_schema: {
239
- name: result.format.name,
240
- schema: result.format.schema,
241
- strict: result.format.strict ?? true
215
+ // Final reduce (optional, controlled by options.finalReduce)
216
+ if (this.options.finalReduce) {
217
+ const finalAccContent = typeof result.acc === 'string' ? result.acc : JSON.stringify(result.acc);
218
+ //
219
+ // Final reduce avec executeQuery
220
+ const finalResult = await (0, execute_1.executeQuery)({
221
+ query: finalAccContent,
222
+ model: modelName,
223
+ instructions: config.reducePrompt,
224
+ messages: [],
225
+ schema: result.format ? result.format.schema : undefined,
226
+ verbose: verbose,
227
+ stdout: init.stdout || execute_1.DummyWritable
228
+ });
229
+ const finalContent = finalResult.content || '';
230
+ // Parse JSON if structured output is enabled
231
+ if (result.format && finalContent) {
232
+ try {
233
+ result.acc = JSON.parse(finalContent);
234
+ }
235
+ catch (e) {
236
+ console.warn('Failed to parse final result as JSON:', finalContent);
237
+ result.acc = finalContent;
242
238
  }
243
- };
244
- }
245
- const reduce = await openai.chat.completions.create(llm);
246
- const finalContent = reduce.choices[0]?.message.content || '';
247
- // Parse JSON if structured output is enabled
248
- if (result.format && finalContent) {
249
- try {
250
- result.acc = JSON.parse(finalContent);
251
239
  }
252
- catch (e) {
253
- console.warn('Failed to parse final result as JSON:', finalContent);
240
+ else {
254
241
  result.acc = finalContent;
255
242
  }
243
+ if (verbose) {
244
+ console.log('🎯 Final reduce completed');
245
+ }
256
246
  }
257
- else {
258
- result.acc = finalContent;
247
+ else if (verbose) {
248
+ console.log('⏭️ Final reduce skipped (finalReduce=false)');
259
249
  }
260
250
  const endTime = Date.now();
261
251
  const processingTimeMs = endTime - startTime;
@@ -1,3 +1,4 @@
1
1
  export * from './reducer.core';
2
2
  export * from './reducer.loaders';
3
3
  export * from './reducer.types';
4
+ export * from './reducer.factory';
@@ -0,0 +1,46 @@
1
+ /**
2
+ * Factory to create a ReducerFn compatible with JobRunner using MapLLM
3
+ */
4
+ import type { StructuredOutputFormat } from './reducer.types';
5
+ import type { ReducerFn } from './job.runner';
6
+ /**
7
+ * Options for createMapLLMReducer factory
8
+ */
9
+ export interface CreateMapLLMReducerOptions {
10
+ /** Prompt for digesting task + result into facts */
11
+ digestPrompt: string;
12
+ /** Prompt for reducing/fusing with previous memory */
13
+ reducePrompt: string;
14
+ /** Custom JSON schema for ReducedJobMemory (optional, uses default if not provided) */
15
+ format?: StructuredOutputFormat;
16
+ /** Model to use (default: 'LOW') */
17
+ model?: string;
18
+ /** Whether to execute final reduce pass (default: true) */
19
+ finalReduce?: boolean;
20
+ /** Threshold in bytes to trigger auto intermediate reduce (optional) */
21
+ reduceThresholdBytes?: number;
22
+ /** Enable verbose logging (default: false) */
23
+ verbose?: boolean;
24
+ }
25
+ /**
26
+ * Creates a ReducerFn compatible with JobRunner that uses MapLLM internally.
27
+ *
28
+ * This factory bridges JobRunner and MapLLM, allowing LLM-powered reduction
29
+ * with structured outputs while keeping both modules independent.
30
+ *
31
+ * @example
32
+ * ```typescript
33
+ * const reducer = createMapLLMReducer({
34
+ * digestPrompt: "Analyze this task result and extract key facts...",
35
+ * reducePrompt: "Merge with previous memory to produce updated canonical memory...",
36
+ * model: 'LOW'
37
+ * });
38
+ *
39
+ * const runner = new JobRunner({
40
+ * planner: myPlanner,
41
+ * executor: myExecutor,
42
+ * reducer: reducer // ← ReducerFn compatible
43
+ * });
44
+ * ```
45
+ */
46
+ export declare function createMapLLMReducer(options: CreateMapLLMReducerOptions): ReducerFn;
@@ -0,0 +1,154 @@
1
+ "use strict";
2
+ /**
3
+ * Factory to create a ReducerFn compatible with JobRunner using MapLLM
4
+ */
5
+ Object.defineProperty(exports, "__esModule", { value: true });
6
+ exports.createMapLLMReducer = createMapLLMReducer;
7
+ const reducer_core_1 = require("./reducer.core");
8
+ const reducer_loaders_1 = require("./reducer.loaders");
9
+ /**
10
+ * Default JSON schema for ReducedJobMemory
11
+ * Note: For strict mode, all properties must be in required array and
12
+ * all nested objects need additionalProperties: false
13
+ */
14
+ const DEFAULT_MEMORY_SCHEMA = {
15
+ name: 'ReducedJobMemory',
16
+ schema: {
17
+ type: 'object',
18
+ properties: {
19
+ memory: {
20
+ type: 'string',
21
+ description: 'Short canonical memory summarizing progress and key facts'
22
+ },
23
+ index: {
24
+ type: 'object',
25
+ description: 'Stable references: artifact IDs, data tables, error traces',
26
+ properties: {
27
+ artifacts: {
28
+ type: 'array',
29
+ items: { type: 'string' },
30
+ description: 'List of artifact references'
31
+ },
32
+ taskIds: {
33
+ type: 'array',
34
+ items: { type: 'string' },
35
+ description: 'List of completed task IDs'
36
+ },
37
+ errors: {
38
+ type: 'array',
39
+ items: { type: 'string' },
40
+ description: 'List of error messages'
41
+ }
42
+ },
43
+ required: ['artifacts', 'taskIds', 'errors'],
44
+ additionalProperties: false
45
+ },
46
+ statusLine: {
47
+ type: 'string',
48
+ description: 'UI progress line'
49
+ }
50
+ },
51
+ required: ['memory', 'index', 'statusLine'],
52
+ additionalProperties: false
53
+ },
54
+ strict: true
55
+ };
56
+ /**
57
+ * Creates a ReducerFn compatible with JobRunner that uses MapLLM internally.
58
+ *
59
+ * This factory bridges JobRunner and MapLLM, allowing LLM-powered reduction
60
+ * with structured outputs while keeping both modules independent.
61
+ *
62
+ * @example
63
+ * ```typescript
64
+ * const reducer = createMapLLMReducer({
65
+ * digestPrompt: "Analyze this task result and extract key facts...",
66
+ * reducePrompt: "Merge with previous memory to produce updated canonical memory...",
67
+ * model: 'LOW'
68
+ * });
69
+ *
70
+ * const runner = new JobRunner({
71
+ * planner: myPlanner,
72
+ * executor: myExecutor,
73
+ * reducer: reducer // ← ReducerFn compatible
74
+ * });
75
+ * ```
76
+ */
77
+ function createMapLLMReducer(options) {
78
+ const { digestPrompt, reducePrompt, format = DEFAULT_MEMORY_SCHEMA, model = 'LOW', finalReduce = true, reduceThresholdBytes, verbose = false } = options;
79
+ return async (previous, task, result) => {
80
+ // Serialize context for reduction
81
+ const context = JSON.stringify({
82
+ previousMemory: previous,
83
+ task: {
84
+ id: task.id,
85
+ title: task.title,
86
+ type: task.type
87
+ },
88
+ result: {
89
+ taskId: result.taskId,
90
+ ok: result.ok,
91
+ summary: result.summary,
92
+ error: result.error,
93
+ artifacts: result.artifacts,
94
+ // Include data if small enough, otherwise just note its presence
95
+ data: result.data && JSON.stringify(result.data).length < 2000
96
+ ? result.data
97
+ : (result.data ? '[data truncated]' : undefined)
98
+ }
99
+ }, null, 2);
100
+ // Create loader with single-chunk strategy (the context is already compact)
101
+ const loader = new reducer_loaders_1.StringNativeLoader(context, { type: 'paragraphs', size: 10 });
102
+ // Create MapLLM with options
103
+ const mapllmOptions = {
104
+ finalReduce,
105
+ reduceThresholdBytes
106
+ };
107
+ const mapper = new reducer_core_1.MapLLM(loader, mapllmOptions);
108
+ // Config for MapLLM
109
+ const config = {
110
+ digestPrompt,
111
+ reducePrompt
112
+ };
113
+ // Callback: accumulate structured output
114
+ const callback = (res, current) => {
115
+ // If current is already an object (structured output), use it directly
116
+ if (typeof current === 'object' && current !== null) {
117
+ res.acc = current;
118
+ }
119
+ else if (typeof current === 'string') {
120
+ // Try to parse as JSON
121
+ try {
122
+ res.acc = JSON.parse(current);
123
+ }
124
+ catch {
125
+ // Fallback: wrap in memory field
126
+ res.acc = {
127
+ memory: current,
128
+ index: res.acc?.index || {}
129
+ };
130
+ }
131
+ }
132
+ return res;
133
+ };
134
+ // Init with previous memory or empty
135
+ const init = {
136
+ acc: previous || { memory: '', index: {} },
137
+ config,
138
+ format,
139
+ model,
140
+ verbose
141
+ };
142
+ // Execute MapLLM reduce
143
+ const out = await mapper.reduce(callback, init);
144
+ // Validate and return
145
+ const result_acc = out.acc;
146
+ // Ensure required fields exist
147
+ const reducedMemory = {
148
+ memory: typeof result_acc.memory === 'string' ? result_acc.memory : JSON.stringify(result_acc.memory || ''),
149
+ index: typeof result_acc.index === 'object' ? result_acc.index : {},
150
+ statusLine: result_acc.statusLine
151
+ };
152
+ return reducedMemory;
153
+ };
154
+ }
@@ -17,3 +17,4 @@ Object.defineProperty(exports, "__esModule", { value: true });
17
17
  __exportStar(require("./reducer.core"), exports);
18
18
  __exportStar(require("./reducer.loaders"), exports);
19
19
  __exportStar(require("./reducer.types"), exports);
20
+ __exportStar(require("./reducer.factory"), exports);
@@ -14,6 +14,7 @@ export * from './scrapper';
14
14
  export * from './agents/reducer';
15
15
  export * from './agents/semantic';
16
16
  export * from './agents/system';
17
+ export * from './agents/job.runner';
17
18
  export * from './rag';
18
19
  export * from './usecase';
19
20
  export * from './rules';
package/dist/src/index.js CHANGED
@@ -51,6 +51,7 @@ __exportStar(require("./scrapper"), exports);
51
51
  __exportStar(require("./agents/reducer"), exports);
52
52
  __exportStar(require("./agents/semantic"), exports);
53
53
  __exportStar(require("./agents/system"), exports);
54
+ __exportStar(require("./agents/job.runner"), exports);
54
55
  // RAG Library
55
56
  __exportStar(require("./rag"), exports);
56
57
  // Usecase
@@ -47,6 +47,9 @@ export declare class RAGManager {
47
47
  /**
48
48
  * Notifie les embeddings chargés de se mettre à jour
49
49
  *
50
+ * Cette méthode est publique pour permettre aux applications de notifier
51
+ * les utilisateurs actifs du RAG (tools, search) qu'il a été mis à jour.
52
+ *
50
53
  * @param name Nom du RAG à mettre à jour
51
54
  * @param opts Options de mise à jour (action: 'rename' pour renommer un document)
52
55
  *
@@ -59,18 +62,22 @@ export declare class RAGManager {
59
62
  *
60
63
  * @example
61
64
  * ```typescript
62
- * // Après un build
63
- * this.notifyUpdate('procedures-stable');
65
+ * // Après un build externe
66
+ * ragManager.notifyUpdate('procedures-stable');
64
67
  *
65
68
  * // Après un rename de document
66
- * this.notifyUpdate('procedures-stable', {
69
+ * ragManager.notifyUpdate('procedures-stable', {
67
70
  * action: 'rename',
68
71
  * oldFile: 'old.md',
69
72
  * newFile: 'new.md'
70
73
  * });
71
74
  * ```
72
75
  */
73
- private notifyUpdate;
76
+ notifyUpdate(name: string, opts?: {
77
+ action?: 'rename';
78
+ oldFile?: string;
79
+ newFile?: string;
80
+ }): void;
74
81
  /**
75
82
  * Génère un nom d'archive avec timestamp YYYMMDD
76
83
  */
@@ -213,6 +213,9 @@ class RAGManager {
213
213
  /**
214
214
  * Notifie les embeddings chargés de se mettre à jour
215
215
  *
216
+ * Cette méthode est publique pour permettre aux applications de notifier
217
+ * les utilisateurs actifs du RAG (tools, search) qu'il a été mis à jour.
218
+ *
216
219
  * @param name Nom du RAG à mettre à jour
217
220
  * @param opts Options de mise à jour (action: 'rename' pour renommer un document)
218
221
  *
@@ -225,11 +228,11 @@ class RAGManager {
225
228
  *
226
229
  * @example
227
230
  * ```typescript
228
- * // Après un build
229
- * this.notifyUpdate('procedures-stable');
231
+ * // Après un build externe
232
+ * ragManager.notifyUpdate('procedures-stable');
230
233
  *
231
234
  * // Après un rename de document
232
- * this.notifyUpdate('procedures-stable', {
235
+ * ragManager.notifyUpdate('procedures-stable', {
233
236
  * action: 'rename',
234
237
  * oldFile: 'old.md',
235
238
  * newFile: 'new.md'
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "agentic-api",
3
- "version": "2.0.585",
3
+ "version": "2.0.592",
4
4
  "description": "API pour l'orchestration d'agents intelligents avec séquences et escalades automatiques",
5
5
  "main": "dist/src/index.js",
6
6
  "types": "dist/src/index.d.ts",