@agentionai/agents 0.3.0-beta
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +517 -0
- package/dist/agents/Agent.d.ts +29 -0
- package/dist/agents/Agent.js +28 -0
- package/dist/agents/AgentConfig.d.ts +118 -0
- package/dist/agents/AgentConfig.js +3 -0
- package/dist/agents/AgentEvent.d.ts +18 -0
- package/dist/agents/AgentEvent.js +26 -0
- package/dist/agents/BaseAgent.d.ts +82 -0
- package/dist/agents/BaseAgent.js +121 -0
- package/dist/agents/anthropic/ClaudeAgent.d.ts +46 -0
- package/dist/agents/anthropic/ClaudeAgent.js +262 -0
- package/dist/agents/errors/AgentError.d.ts +47 -0
- package/dist/agents/errors/AgentError.js +74 -0
- package/dist/agents/google/GeminiAgent.d.ts +63 -0
- package/dist/agents/google/GeminiAgent.js +395 -0
- package/dist/agents/mistral/MistralAgent.d.ts +47 -0
- package/dist/agents/mistral/MistralAgent.js +313 -0
- package/dist/agents/model-types.d.ts +30 -0
- package/dist/agents/model-types.js +8 -0
- package/dist/agents/openai/OpenAiAgent.d.ts +48 -0
- package/dist/agents/openai/OpenAiAgent.js +338 -0
- package/dist/chunkers/Chunker.d.ts +53 -0
- package/dist/chunkers/Chunker.js +174 -0
- package/dist/chunkers/RecursiveChunker.d.ts +52 -0
- package/dist/chunkers/RecursiveChunker.js +166 -0
- package/dist/chunkers/TextChunker.d.ts +27 -0
- package/dist/chunkers/TextChunker.js +50 -0
- package/dist/chunkers/TokenChunker.d.ts +60 -0
- package/dist/chunkers/TokenChunker.js +176 -0
- package/dist/chunkers/index.d.ts +6 -0
- package/dist/chunkers/index.js +14 -0
- package/dist/chunkers/types.d.ts +95 -0
- package/dist/chunkers/types.js +3 -0
- package/dist/graph/AgentGraph.d.ts +99 -0
- package/dist/graph/AgentGraph.js +115 -0
- package/dist/graph/BaseExecutor.d.ts +86 -0
- package/dist/graph/BaseExecutor.js +61 -0
- package/dist/graph/GraphMetrics.d.ts +143 -0
- package/dist/graph/GraphMetrics.js +264 -0
- package/dist/graph/MapExecutor.d.ts +39 -0
- package/dist/graph/MapExecutor.js +123 -0
- package/dist/graph/ParallelExecutor.d.ts +51 -0
- package/dist/graph/ParallelExecutor.js +103 -0
- package/dist/graph/Pipeline.d.ts +44 -0
- package/dist/graph/Pipeline.js +109 -0
- package/dist/graph/RouterExecutor.d.ts +89 -0
- package/dist/graph/RouterExecutor.js +209 -0
- package/dist/graph/SequentialExecutor.d.ts +44 -0
- package/dist/graph/SequentialExecutor.js +115 -0
- package/dist/graph/VotingSystem.d.ts +54 -0
- package/dist/graph/VotingSystem.js +106 -0
- package/dist/history/History.d.ts +107 -0
- package/dist/history/History.js +166 -0
- package/dist/history/RedisHistory.d.ts +27 -0
- package/dist/history/RedisHistory.js +55 -0
- package/dist/history/transformers.d.ts +102 -0
- package/dist/history/transformers.js +415 -0
- package/dist/history/types.d.ts +130 -0
- package/dist/history/types.js +55 -0
- package/dist/index.d.ts +16 -0
- package/dist/index.js +48 -0
- package/dist/ingestion/IngestionPipeline.d.ts +86 -0
- package/dist/ingestion/IngestionPipeline.js +266 -0
- package/dist/ingestion/index.d.ts +3 -0
- package/dist/ingestion/index.js +7 -0
- package/dist/ingestion/types.d.ts +74 -0
- package/dist/ingestion/types.js +3 -0
- package/dist/team/Team.d.ts +46 -0
- package/dist/team/Team.js +104 -0
- package/dist/tools/Tool.d.ts +75 -0
- package/dist/tools/Tool.js +137 -0
- package/dist/vectorstore/Embeddings.d.ts +67 -0
- package/dist/vectorstore/Embeddings.js +54 -0
- package/dist/vectorstore/LanceDBVectorStore.d.ts +149 -0
- package/dist/vectorstore/LanceDBVectorStore.js +338 -0
- package/dist/vectorstore/OpenAIEmbeddings.d.ts +45 -0
- package/dist/vectorstore/OpenAIEmbeddings.js +109 -0
- package/dist/vectorstore/VectorStore.d.ts +255 -0
- package/dist/vectorstore/VectorStore.js +216 -0
- package/dist/vectorstore/index.d.ts +28 -0
- package/dist/vectorstore/index.js +35 -0
- package/dist/viz/VizConfig.d.ts +54 -0
- package/dist/viz/VizConfig.js +100 -0
- package/dist/viz/VizReporter.d.ts +127 -0
- package/dist/viz/VizReporter.js +595 -0
- package/dist/viz/index.d.ts +31 -0
- package/dist/viz/index.js +51 -0
- package/dist/viz/types.d.ts +105 -0
- package/dist/viz/types.js +7 -0
- package/package.json +109 -0
- package/readme.md +1 -0
|
@@ -0,0 +1,86 @@
|
|
|
1
|
+
import { Chunk, ChunkOptions } from "../chunkers/types";
|
|
2
|
+
import { Chunker } from "../chunkers/Chunker";
|
|
3
|
+
import { Embeddings } from "../vectorstore/Embeddings";
|
|
4
|
+
import { VectorStore } from "../vectorstore/VectorStore";
|
|
5
|
+
import { IngestionOptions, IngestionResult, DocumentInput } from "./types";
|
|
6
|
+
/**
|
|
7
|
+
* Pipeline for ingesting documents into a vector store.
|
|
8
|
+
* Orchestrates the flow: chunk → batch embed → store
|
|
9
|
+
*
|
|
10
|
+
* @example
|
|
11
|
+
* ```typescript
|
|
12
|
+
* const pipeline = new IngestionPipeline(
|
|
13
|
+
* new RecursiveChunker({ chunkSize: 1000, chunkOverlap: 100 }),
|
|
14
|
+
* new OpenAIEmbeddings(),
|
|
15
|
+
* vectorStore
|
|
16
|
+
* );
|
|
17
|
+
*
|
|
18
|
+
* const result = await pipeline.ingest(documentText, {
|
|
19
|
+
* sourceId: 'doc-123',
|
|
20
|
+
* sourcePath: '/docs/readme.md',
|
|
21
|
+
* batchSize: 50,
|
|
22
|
+
* onProgress: ({ phase, processed, total }) => {
|
|
23
|
+
* console.log(`${phase}: ${processed}/${total}`);
|
|
24
|
+
* }
|
|
25
|
+
* });
|
|
26
|
+
*
|
|
27
|
+
* console.log(`Stored ${result.chunksStored} chunks in ${result.duration}ms`);
|
|
28
|
+
* ```
|
|
29
|
+
*/
|
|
30
|
+
export declare class IngestionPipeline {
|
|
31
|
+
private chunker;
|
|
32
|
+
private embeddings;
|
|
33
|
+
private store;
|
|
34
|
+
constructor(chunker: Chunker, embeddings: Embeddings, store: VectorStore);
|
|
35
|
+
/**
|
|
36
|
+
* Ingest a single document into the vector store.
|
|
37
|
+
*
|
|
38
|
+
* @param text - The document text to ingest
|
|
39
|
+
* @param options - Chunk options and ingestion options
|
|
40
|
+
* @returns Result of the ingestion operation
|
|
41
|
+
*/
|
|
42
|
+
ingest(text: string, options?: ChunkOptions & IngestionOptions): Promise<IngestionResult>;
|
|
43
|
+
/**
|
|
44
|
+
* Ingest multiple documents into the vector store.
|
|
45
|
+
*
|
|
46
|
+
* @param documents - Array of documents with their options
|
|
47
|
+
* @param options - Ingestion options
|
|
48
|
+
* @returns Aggregated result of all ingestions
|
|
49
|
+
*/
|
|
50
|
+
ingestMany(documents: DocumentInput[], options?: IngestionOptions): Promise<IngestionResult>;
|
|
51
|
+
/**
|
|
52
|
+
* Ingest pre-chunked data into the vector store.
|
|
53
|
+
* Useful when chunking is done separately.
|
|
54
|
+
*
|
|
55
|
+
* @param chunks - Array of chunks to ingest
|
|
56
|
+
* @param options - Ingestion options
|
|
57
|
+
* @returns Result of the ingestion operation
|
|
58
|
+
*/
|
|
59
|
+
ingestChunks(chunks: Chunk[], options?: IngestionOptions): Promise<IngestionResult>;
|
|
60
|
+
/**
|
|
61
|
+
* Process chunks through embedding and storage.
|
|
62
|
+
*/
|
|
63
|
+
private processChunks;
|
|
64
|
+
/**
|
|
65
|
+
* Filter out chunks that already exist in the store (by hash).
|
|
66
|
+
* Checks the store for existing documents with the same content hash.
|
|
67
|
+
*/
|
|
68
|
+
private filterDuplicates;
|
|
69
|
+
/**
|
|
70
|
+
* Emit a progress event if callback is provided.
|
|
71
|
+
*/
|
|
72
|
+
private emitProgress;
|
|
73
|
+
/**
|
|
74
|
+
* Get the chunker used by this pipeline.
|
|
75
|
+
*/
|
|
76
|
+
getChunker(): Chunker;
|
|
77
|
+
/**
|
|
78
|
+
* Get the embeddings provider used by this pipeline.
|
|
79
|
+
*/
|
|
80
|
+
getEmbeddings(): Embeddings;
|
|
81
|
+
/**
|
|
82
|
+
* Get the vector store used by this pipeline.
|
|
83
|
+
*/
|
|
84
|
+
getStore(): VectorStore;
|
|
85
|
+
}
|
|
86
|
+
//# sourceMappingURL=IngestionPipeline.d.ts.map
|
|
@@ -0,0 +1,266 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.IngestionPipeline = void 0;
|
|
4
|
+
/**
|
|
5
|
+
* Pipeline for ingesting documents into a vector store.
|
|
6
|
+
* Orchestrates the flow: chunk → batch embed → store
|
|
7
|
+
*
|
|
8
|
+
* @example
|
|
9
|
+
* ```typescript
|
|
10
|
+
* const pipeline = new IngestionPipeline(
|
|
11
|
+
* new RecursiveChunker({ chunkSize: 1000, chunkOverlap: 100 }),
|
|
12
|
+
* new OpenAIEmbeddings(),
|
|
13
|
+
* vectorStore
|
|
14
|
+
* );
|
|
15
|
+
*
|
|
16
|
+
* const result = await pipeline.ingest(documentText, {
|
|
17
|
+
* sourceId: 'doc-123',
|
|
18
|
+
* sourcePath: '/docs/readme.md',
|
|
19
|
+
* batchSize: 50,
|
|
20
|
+
* onProgress: ({ phase, processed, total }) => {
|
|
21
|
+
* console.log(`${phase}: ${processed}/${total}`);
|
|
22
|
+
* }
|
|
23
|
+
* });
|
|
24
|
+
*
|
|
25
|
+
* console.log(`Stored ${result.chunksStored} chunks in ${result.duration}ms`);
|
|
26
|
+
* ```
|
|
27
|
+
*/
|
|
28
|
+
class IngestionPipeline {
|
|
29
|
+
constructor(chunker, embeddings, store) {
|
|
30
|
+
this.chunker = chunker;
|
|
31
|
+
this.embeddings = embeddings;
|
|
32
|
+
this.store = store;
|
|
33
|
+
}
|
|
34
|
+
/**
|
|
35
|
+
* Ingest a single document into the vector store.
|
|
36
|
+
*
|
|
37
|
+
* @param text - The document text to ingest
|
|
38
|
+
* @param options - Chunk options and ingestion options
|
|
39
|
+
* @returns Result of the ingestion operation
|
|
40
|
+
*/
|
|
41
|
+
async ingest(text, options) {
|
|
42
|
+
const startTime = Date.now();
|
|
43
|
+
const chunkOptions = {
|
|
44
|
+
sourceId: options?.sourceId,
|
|
45
|
+
sourcePath: options?.sourcePath,
|
|
46
|
+
metadata: options?.metadata,
|
|
47
|
+
};
|
|
48
|
+
const ingestionOptions = {
|
|
49
|
+
batchSize: options?.batchSize,
|
|
50
|
+
onProgress: options?.onProgress,
|
|
51
|
+
onError: options?.onError,
|
|
52
|
+
skipDuplicates: options?.skipDuplicates,
|
|
53
|
+
};
|
|
54
|
+
// Phase 1: Chunking
|
|
55
|
+
this.emitProgress(ingestionOptions.onProgress, {
|
|
56
|
+
phase: "chunking",
|
|
57
|
+
processed: 0,
|
|
58
|
+
total: 1,
|
|
59
|
+
});
|
|
60
|
+
const chunks = await this.chunker.chunk(text, chunkOptions);
|
|
61
|
+
this.emitProgress(ingestionOptions.onProgress, {
|
|
62
|
+
phase: "chunking",
|
|
63
|
+
processed: 1,
|
|
64
|
+
total: 1,
|
|
65
|
+
});
|
|
66
|
+
// Process the chunks
|
|
67
|
+
return this.processChunks(chunks, ingestionOptions, startTime);
|
|
68
|
+
}
|
|
69
|
+
/**
|
|
70
|
+
* Ingest multiple documents into the vector store.
|
|
71
|
+
*
|
|
72
|
+
* @param documents - Array of documents with their options
|
|
73
|
+
* @param options - Ingestion options
|
|
74
|
+
* @returns Aggregated result of all ingestions
|
|
75
|
+
*/
|
|
76
|
+
async ingestMany(documents, options) {
|
|
77
|
+
const startTime = Date.now();
|
|
78
|
+
// Phase 1: Chunk all documents
|
|
79
|
+
this.emitProgress(options?.onProgress, {
|
|
80
|
+
phase: "chunking",
|
|
81
|
+
processed: 0,
|
|
82
|
+
total: documents.length,
|
|
83
|
+
});
|
|
84
|
+
const allChunks = [];
|
|
85
|
+
for (let i = 0; i < documents.length; i++) {
|
|
86
|
+
const doc = documents[i];
|
|
87
|
+
const chunks = await this.chunker.chunk(doc.text, doc.options);
|
|
88
|
+
allChunks.push(...chunks);
|
|
89
|
+
this.emitProgress(options?.onProgress, {
|
|
90
|
+
phase: "chunking",
|
|
91
|
+
processed: i + 1,
|
|
92
|
+
total: documents.length,
|
|
93
|
+
});
|
|
94
|
+
}
|
|
95
|
+
// Process all chunks together
|
|
96
|
+
return this.processChunks(allChunks, options ?? {}, startTime);
|
|
97
|
+
}
|
|
98
|
+
/**
|
|
99
|
+
* Ingest pre-chunked data into the vector store.
|
|
100
|
+
* Useful when chunking is done separately.
|
|
101
|
+
*
|
|
102
|
+
* @param chunks - Array of chunks to ingest
|
|
103
|
+
* @param options - Ingestion options
|
|
104
|
+
* @returns Result of the ingestion operation
|
|
105
|
+
*/
|
|
106
|
+
async ingestChunks(chunks, options) {
|
|
107
|
+
const startTime = Date.now();
|
|
108
|
+
return this.processChunks(chunks, options ?? {}, startTime);
|
|
109
|
+
}
|
|
110
|
+
/**
|
|
111
|
+
* Process chunks through embedding and storage.
|
|
112
|
+
*/
|
|
113
|
+
async processChunks(chunks, options, startTime) {
|
|
114
|
+
const { batchSize = 100, onProgress, onError, skipDuplicates = false, } = options;
|
|
115
|
+
const result = {
|
|
116
|
+
success: true,
|
|
117
|
+
chunksProcessed: chunks.length,
|
|
118
|
+
chunksSkipped: 0,
|
|
119
|
+
chunksStored: 0,
|
|
120
|
+
errors: [],
|
|
121
|
+
duration: 0,
|
|
122
|
+
};
|
|
123
|
+
if (chunks.length === 0) {
|
|
124
|
+
result.duration = Date.now() - startTime;
|
|
125
|
+
return result;
|
|
126
|
+
}
|
|
127
|
+
// Filter duplicates if enabled
|
|
128
|
+
let chunksToProcess = chunks;
|
|
129
|
+
if (skipDuplicates) {
|
|
130
|
+
chunksToProcess = await this.filterDuplicates(chunks);
|
|
131
|
+
result.chunksSkipped = chunks.length - chunksToProcess.length;
|
|
132
|
+
}
|
|
133
|
+
if (chunksToProcess.length === 0) {
|
|
134
|
+
result.duration = Date.now() - startTime;
|
|
135
|
+
return result;
|
|
136
|
+
}
|
|
137
|
+
// Calculate batches
|
|
138
|
+
const totalBatches = Math.ceil(chunksToProcess.length / batchSize);
|
|
139
|
+
// Phase 2 & 3: Embed and store in batches
|
|
140
|
+
for (let batchIndex = 0; batchIndex < totalBatches; batchIndex++) {
|
|
141
|
+
const batchStart = batchIndex * batchSize;
|
|
142
|
+
const batchEnd = Math.min(batchStart + batchSize, chunksToProcess.length);
|
|
143
|
+
const batch = chunksToProcess.slice(batchStart, batchEnd);
|
|
144
|
+
// Embed batch
|
|
145
|
+
this.emitProgress(onProgress, {
|
|
146
|
+
phase: "embedding",
|
|
147
|
+
processed: batchStart,
|
|
148
|
+
total: chunksToProcess.length,
|
|
149
|
+
currentBatch: batchIndex + 1,
|
|
150
|
+
totalBatches,
|
|
151
|
+
});
|
|
152
|
+
let embeddings;
|
|
153
|
+
try {
|
|
154
|
+
embeddings = await this.embeddings.embed(batch.map((c) => c.content));
|
|
155
|
+
}
|
|
156
|
+
catch (error) {
|
|
157
|
+
// Handle embedding error for entire batch
|
|
158
|
+
for (const chunk of batch) {
|
|
159
|
+
if (onError) {
|
|
160
|
+
const action = onError(error, chunk);
|
|
161
|
+
if (action === "abort") {
|
|
162
|
+
result.success = false;
|
|
163
|
+
result.duration = Date.now() - startTime;
|
|
164
|
+
return result;
|
|
165
|
+
}
|
|
166
|
+
}
|
|
167
|
+
result.errors.push({ chunk, error: error });
|
|
168
|
+
}
|
|
169
|
+
continue;
|
|
170
|
+
}
|
|
171
|
+
// Create embedded documents
|
|
172
|
+
const embeddedDocs = batch.map((chunk, i) => ({
|
|
173
|
+
id: chunk.id,
|
|
174
|
+
content: chunk.content,
|
|
175
|
+
metadata: chunk.metadata,
|
|
176
|
+
embedding: embeddings[i],
|
|
177
|
+
}));
|
|
178
|
+
// Store batch
|
|
179
|
+
this.emitProgress(onProgress, {
|
|
180
|
+
phase: "storing",
|
|
181
|
+
processed: batchStart,
|
|
182
|
+
total: chunksToProcess.length,
|
|
183
|
+
currentBatch: batchIndex + 1,
|
|
184
|
+
totalBatches,
|
|
185
|
+
});
|
|
186
|
+
try {
|
|
187
|
+
await this.store.addEmbeddedDocuments(embeddedDocs);
|
|
188
|
+
result.chunksStored += embeddedDocs.length;
|
|
189
|
+
}
|
|
190
|
+
catch (error) {
|
|
191
|
+
// Try storing one by one to identify problematic chunks
|
|
192
|
+
for (let i = 0; i < embeddedDocs.length; i++) {
|
|
193
|
+
try {
|
|
194
|
+
await this.store.addEmbeddedDocuments([embeddedDocs[i]]);
|
|
195
|
+
result.chunksStored++;
|
|
196
|
+
}
|
|
197
|
+
catch (chunkError) {
|
|
198
|
+
const chunk = batch[i];
|
|
199
|
+
if (onError) {
|
|
200
|
+
const action = onError(chunkError, chunk);
|
|
201
|
+
if (action === "abort") {
|
|
202
|
+
result.success = false;
|
|
203
|
+
result.duration = Date.now() - startTime;
|
|
204
|
+
return result;
|
|
205
|
+
}
|
|
206
|
+
}
|
|
207
|
+
result.errors.push({ chunk, error: chunkError });
|
|
208
|
+
}
|
|
209
|
+
}
|
|
210
|
+
}
|
|
211
|
+
}
|
|
212
|
+
// Final progress
|
|
213
|
+
this.emitProgress(onProgress, {
|
|
214
|
+
phase: "storing",
|
|
215
|
+
processed: chunksToProcess.length,
|
|
216
|
+
total: chunksToProcess.length,
|
|
217
|
+
currentBatch: totalBatches,
|
|
218
|
+
totalBatches,
|
|
219
|
+
});
|
|
220
|
+
result.duration = Date.now() - startTime;
|
|
221
|
+
return result;
|
|
222
|
+
}
|
|
223
|
+
/**
|
|
224
|
+
* Filter out chunks that already exist in the store (by hash).
|
|
225
|
+
* Checks the store for existing documents with the same content hash.
|
|
226
|
+
*/
|
|
227
|
+
async filterDuplicates(chunks) {
|
|
228
|
+
if (chunks.length === 0) {
|
|
229
|
+
return chunks;
|
|
230
|
+
}
|
|
231
|
+
// Extract all hashes from chunks
|
|
232
|
+
const hashes = chunks.map((chunk) => chunk.metadata.hash);
|
|
233
|
+
// Check which hashes already exist in the store
|
|
234
|
+
const existingHashes = await this.store.getByHashes(hashes);
|
|
235
|
+
// Filter out chunks whose hashes exist
|
|
236
|
+
return chunks.filter((chunk) => !existingHashes.has(chunk.metadata.hash));
|
|
237
|
+
}
|
|
238
|
+
/**
|
|
239
|
+
* Emit a progress event if callback is provided.
|
|
240
|
+
*/
|
|
241
|
+
emitProgress(callback, event) {
|
|
242
|
+
if (callback) {
|
|
243
|
+
callback(event);
|
|
244
|
+
}
|
|
245
|
+
}
|
|
246
|
+
/**
|
|
247
|
+
* Get the chunker used by this pipeline.
|
|
248
|
+
*/
|
|
249
|
+
getChunker() {
|
|
250
|
+
return this.chunker;
|
|
251
|
+
}
|
|
252
|
+
/**
|
|
253
|
+
* Get the embeddings provider used by this pipeline.
|
|
254
|
+
*/
|
|
255
|
+
getEmbeddings() {
|
|
256
|
+
return this.embeddings;
|
|
257
|
+
}
|
|
258
|
+
/**
|
|
259
|
+
* Get the vector store used by this pipeline.
|
|
260
|
+
*/
|
|
261
|
+
getStore() {
|
|
262
|
+
return this.store;
|
|
263
|
+
}
|
|
264
|
+
}
|
|
265
|
+
exports.IngestionPipeline = IngestionPipeline;
|
|
266
|
+
//# sourceMappingURL=IngestionPipeline.js.map
|
|
@@ -0,0 +1,7 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.IngestionPipeline = void 0;
|
|
4
|
+
// Pipeline
|
|
5
|
+
var IngestionPipeline_1 = require("./IngestionPipeline");
|
|
6
|
+
Object.defineProperty(exports, "IngestionPipeline", { enumerable: true, get: function () { return IngestionPipeline_1.IngestionPipeline; } });
|
|
7
|
+
//# sourceMappingURL=index.js.map
|
|
@@ -0,0 +1,74 @@
|
|
|
1
|
+
import { Chunk, ChunkOptions } from "../chunkers/types";
|
|
2
|
+
/**
|
|
3
|
+
* Progress event emitted during ingestion.
|
|
4
|
+
*/
|
|
5
|
+
export interface IngestionProgressEvent {
|
|
6
|
+
/** Current phase of ingestion */
|
|
7
|
+
phase: "chunking" | "embedding" | "storing";
|
|
8
|
+
/** Number of items processed in this phase */
|
|
9
|
+
processed: number;
|
|
10
|
+
/** Total number of items in this phase */
|
|
11
|
+
total: number;
|
|
12
|
+
/** Current batch number (for embedding/storing phases) */
|
|
13
|
+
currentBatch?: number;
|
|
14
|
+
/** Total number of batches */
|
|
15
|
+
totalBatches?: number;
|
|
16
|
+
}
|
|
17
|
+
/**
|
|
18
|
+
* Options for the ingestion pipeline.
|
|
19
|
+
*/
|
|
20
|
+
export interface IngestionOptions {
|
|
21
|
+
/**
|
|
22
|
+
* Number of chunks to process per embedding batch.
|
|
23
|
+
* Larger batches are more efficient but use more memory.
|
|
24
|
+
* @default 100
|
|
25
|
+
*/
|
|
26
|
+
batchSize?: number;
|
|
27
|
+
/**
|
|
28
|
+
* Callback for progress updates.
|
|
29
|
+
*/
|
|
30
|
+
onProgress?: (event: IngestionProgressEvent) => void;
|
|
31
|
+
/**
|
|
32
|
+
* Error handling strategy.
|
|
33
|
+
* - 'skip': Skip the failed chunk and continue
|
|
34
|
+
* - 'abort': Stop the entire ingestion process
|
|
35
|
+
* @returns The action to take
|
|
36
|
+
*/
|
|
37
|
+
onError?: (error: Error, chunk: Chunk) => "skip" | "abort";
|
|
38
|
+
/**
|
|
39
|
+
* Whether to skip chunks with hashes that already exist in the store.
|
|
40
|
+
* Requires the store to support hash-based lookup.
|
|
41
|
+
* @default false
|
|
42
|
+
*/
|
|
43
|
+
skipDuplicates?: boolean;
|
|
44
|
+
}
|
|
45
|
+
/**
|
|
46
|
+
* Result of an ingestion operation.
|
|
47
|
+
*/
|
|
48
|
+
export interface IngestionResult {
|
|
49
|
+
/** Whether the ingestion completed without aborting */
|
|
50
|
+
success: boolean;
|
|
51
|
+
/** Total number of chunks that were processed */
|
|
52
|
+
chunksProcessed: number;
|
|
53
|
+
/** Number of chunks skipped (duplicates or filtered) */
|
|
54
|
+
chunksSkipped: number;
|
|
55
|
+
/** Number of chunks successfully stored */
|
|
56
|
+
chunksStored: number;
|
|
57
|
+
/** Array of errors encountered during ingestion */
|
|
58
|
+
errors: Array<{
|
|
59
|
+
chunk: Chunk;
|
|
60
|
+
error: Error;
|
|
61
|
+
}>;
|
|
62
|
+
/** Total time taken in milliseconds */
|
|
63
|
+
duration: number;
|
|
64
|
+
}
|
|
65
|
+
/**
|
|
66
|
+
* Document input for batch ingestion.
|
|
67
|
+
*/
|
|
68
|
+
export interface DocumentInput {
|
|
69
|
+
/** The text content to ingest */
|
|
70
|
+
text: string;
|
|
71
|
+
/** Options for this specific document */
|
|
72
|
+
options?: ChunkOptions;
|
|
73
|
+
}
|
|
74
|
+
//# sourceMappingURL=types.d.ts.map
|
|
@@ -0,0 +1,46 @@
|
|
|
1
|
+
import { Tool } from "../tools/Tool";
|
|
2
|
+
import { BaseAgent, BaseAgentConfig } from "../agents/BaseAgent";
|
|
3
|
+
import EventEmitter from "node:events";
|
|
4
|
+
export type TeamConfig = {
|
|
5
|
+
name: string;
|
|
6
|
+
agents: BaseAgent[];
|
|
7
|
+
leadAgent: BaseAgent;
|
|
8
|
+
delegationPrompt?: string;
|
|
9
|
+
};
|
|
10
|
+
/**
|
|
11
|
+
* A team consists of a number of agents.
|
|
12
|
+
*
|
|
13
|
+
* Todo:
|
|
14
|
+
* [] events from a team for tools and agents.
|
|
15
|
+
* [x] add a team lead agent who delegates work
|
|
16
|
+
* [x] Option for team members to work together. Tool.fomAgent
|
|
17
|
+
* [] add agents purely from text config
|
|
18
|
+
* [] keep track of the number of tokens being used
|
|
19
|
+
*/
|
|
20
|
+
export declare class Team extends EventEmitter {
|
|
21
|
+
protected tools: Tool<any>[];
|
|
22
|
+
protected leadAgent: BaseAgent;
|
|
23
|
+
protected agents: BaseAgent[];
|
|
24
|
+
protected delegationPrompt: string;
|
|
25
|
+
protected name: string;
|
|
26
|
+
constructor(config: TeamConfig);
|
|
27
|
+
private get memberAgents();
|
|
28
|
+
private setupTeam;
|
|
29
|
+
addAgent(newAgent: BaseAgent): void;
|
|
30
|
+
/**
|
|
31
|
+
* Execute a task with the team by delegating to the lead agent
|
|
32
|
+
* @param input The input for the task
|
|
33
|
+
* @returns The result from the lead agent
|
|
34
|
+
*/
|
|
35
|
+
execute(input: string): Promise<any>;
|
|
36
|
+
/**
|
|
37
|
+
* Get the lead agent
|
|
38
|
+
*/
|
|
39
|
+
getLeadAgent(): BaseAgent;
|
|
40
|
+
/**
|
|
41
|
+
* Get all tools available to the team
|
|
42
|
+
*/
|
|
43
|
+
getTools(): Tool<any>[];
|
|
44
|
+
fromConfig(config: BaseAgentConfig[]): void;
|
|
45
|
+
}
|
|
46
|
+
//# sourceMappingURL=Team.d.ts.map
|
|
@@ -0,0 +1,104 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
+
};
|
|
5
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
+
exports.Team = void 0;
|
|
7
|
+
const Tool_1 = require("../tools/Tool");
|
|
8
|
+
const node_events_1 = __importDefault(require("node:events"));
|
|
9
|
+
const Agent_1 = require("../agents/Agent");
|
|
10
|
+
/**
|
|
11
|
+
* A team consists of a number of agents.
|
|
12
|
+
*
|
|
13
|
+
* Todo:
|
|
14
|
+
* [] events from a team for tools and agents.
|
|
15
|
+
* [x] add a team lead agent who delegates work
|
|
16
|
+
* [x] Option for team members to work together. Tool.fomAgent
|
|
17
|
+
* [] add agents purely from text config
|
|
18
|
+
* [] keep track of the number of tokens being used
|
|
19
|
+
*/
|
|
20
|
+
class Team extends node_events_1.default {
|
|
21
|
+
constructor(config) {
|
|
22
|
+
super();
|
|
23
|
+
this.tools = [];
|
|
24
|
+
this.agents = [];
|
|
25
|
+
this.leadAgent = config.leadAgent;
|
|
26
|
+
this.agents = config.agents || [];
|
|
27
|
+
this.name = config.name;
|
|
28
|
+
this.delegationPrompt =
|
|
29
|
+
config.delegationPrompt ||
|
|
30
|
+
"You are the team leader. You can delegate tasks to your team members when appropriate. " +
|
|
31
|
+
"Analyze the task and decide whether to handle it yourself or delegate to a team member with the right expertise.";
|
|
32
|
+
this.setupTeam();
|
|
33
|
+
// this.emit("agentsUpdated", [...this.agents]);
|
|
34
|
+
// this.emit("toolsUpdated", [...this.tools]);
|
|
35
|
+
this.tools.forEach((tool) => tool.addListener(Tool_1.ToolResultEvent.RESULT, (...args) => {
|
|
36
|
+
this.emit(Tool_1.ToolResultEvent.RESULT, ...args);
|
|
37
|
+
}));
|
|
38
|
+
}
|
|
39
|
+
get memberAgents() {
|
|
40
|
+
return this.agents.filter((agent) => agent.getId() !== this.leadAgent.getId());
|
|
41
|
+
}
|
|
42
|
+
setupTeam() {
|
|
43
|
+
// Convert member agents to tools that the lead agent can use
|
|
44
|
+
const memberTools = this.memberAgents.map((agent) => Tool_1.Tool.fromAgent(agent, `Delegate to this team member`));
|
|
45
|
+
this.tools.push(...memberTools);
|
|
46
|
+
this.leadAgent.addTools(memberTools);
|
|
47
|
+
// Add all tools from all agents
|
|
48
|
+
this.agents.forEach((agent) => {
|
|
49
|
+
agent.getTools().forEach((tool) => {
|
|
50
|
+
if (!this.tools.includes(tool)) {
|
|
51
|
+
this.tools.push(tool);
|
|
52
|
+
}
|
|
53
|
+
});
|
|
54
|
+
});
|
|
55
|
+
// Set up event forwarding
|
|
56
|
+
// this.setupEventForwarding();
|
|
57
|
+
// Emit initial events
|
|
58
|
+
this.emit("leadAgentSet", this.leadAgent);
|
|
59
|
+
this.emit("memberAgentsUpdated", [...this.memberAgents]);
|
|
60
|
+
this.emit("toolsUpdated", [...this.tools]);
|
|
61
|
+
}
|
|
62
|
+
addAgent(newAgent) {
|
|
63
|
+
if (this.agents.find((agent) => agent.getId() === newAgent.getId())) {
|
|
64
|
+
this.agents.push(newAgent);
|
|
65
|
+
}
|
|
66
|
+
this.emit("agentsUpdated", [...this.agents]);
|
|
67
|
+
}
|
|
68
|
+
/**
|
|
69
|
+
* Execute a task with the team by delegating to the lead agent
|
|
70
|
+
* @param input The input for the task
|
|
71
|
+
* @returns The result from the lead agent
|
|
72
|
+
*/
|
|
73
|
+
async execute(input) {
|
|
74
|
+
// this.emit("teamTaskStarted", { input, teamName: this.name });
|
|
75
|
+
try {
|
|
76
|
+
const result = await this.leadAgent.execute(input);
|
|
77
|
+
// this.emit("teamTaskCompleted", { input, result, teamName: this.name });
|
|
78
|
+
return result;
|
|
79
|
+
}
|
|
80
|
+
catch (error) {
|
|
81
|
+
// this.emit("teamTaskError", { input, error, teamName: this.name });
|
|
82
|
+
throw error;
|
|
83
|
+
}
|
|
84
|
+
}
|
|
85
|
+
/**
|
|
86
|
+
* Get the lead agent
|
|
87
|
+
*/
|
|
88
|
+
getLeadAgent() {
|
|
89
|
+
return this.leadAgent;
|
|
90
|
+
}
|
|
91
|
+
/**
|
|
92
|
+
* Get all tools available to the team
|
|
93
|
+
*/
|
|
94
|
+
getTools() {
|
|
95
|
+
return [...this.tools];
|
|
96
|
+
}
|
|
97
|
+
fromConfig(config) {
|
|
98
|
+
config.forEach((agentConfig) => {
|
|
99
|
+
this.addAgent(Agent_1.Agent.create(agentConfig));
|
|
100
|
+
});
|
|
101
|
+
}
|
|
102
|
+
}
|
|
103
|
+
exports.Team = Team;
|
|
104
|
+
//# sourceMappingURL=Team.js.map
|
|
@@ -0,0 +1,75 @@
|
|
|
1
|
+
import EventEmitter from "events";
|
|
2
|
+
import { BaseAgent, AgentVendor } from "../agents/BaseAgent";
|
|
3
|
+
export interface ToolInputSchema {
|
|
4
|
+
type: "object";
|
|
5
|
+
properties: Record<string, any>;
|
|
6
|
+
required?: string[] | undefined;
|
|
7
|
+
}
|
|
8
|
+
export interface ToolDefinition {
|
|
9
|
+
name: string;
|
|
10
|
+
description: string;
|
|
11
|
+
input_schema: {
|
|
12
|
+
type: "object";
|
|
13
|
+
properties: Record<string, any>;
|
|
14
|
+
required?: string[];
|
|
15
|
+
};
|
|
16
|
+
}
|
|
17
|
+
export interface ToolConfig<T> {
|
|
18
|
+
name: string;
|
|
19
|
+
description: string;
|
|
20
|
+
inputSchema: ToolInputSchema;
|
|
21
|
+
execute: (input: any, context?: Record<string, any> | null) => Promise<T>;
|
|
22
|
+
context?: Record<string, any>;
|
|
23
|
+
}
|
|
24
|
+
export declare class ToolEvent {
|
|
25
|
+
target: Tool<any>;
|
|
26
|
+
input: Record<string, any>;
|
|
27
|
+
id: string;
|
|
28
|
+
agentId: string;
|
|
29
|
+
agentName: string;
|
|
30
|
+
static EXECUTE: string;
|
|
31
|
+
private defaultPrevented;
|
|
32
|
+
constructor(target: Tool<any>, input: Record<string, any>, id: string, agentId: string, agentName: string);
|
|
33
|
+
preventDefault(): void;
|
|
34
|
+
get isDefaultPrevented(): boolean;
|
|
35
|
+
}
|
|
36
|
+
export declare class ToolResultEvent extends ToolEvent {
|
|
37
|
+
target: Tool<any>;
|
|
38
|
+
input: Record<string, any>;
|
|
39
|
+
id: string;
|
|
40
|
+
result: any;
|
|
41
|
+
agentId: string;
|
|
42
|
+
agentName: string;
|
|
43
|
+
eventName: string;
|
|
44
|
+
static RESULT: string;
|
|
45
|
+
constructor(target: Tool<any>, input: Record<string, any>, id: string, result: any, agentId: string, agentName: string);
|
|
46
|
+
}
|
|
47
|
+
/**
|
|
48
|
+
* Tools are used to retrieve additional information for LLMs, so they can provide better results. Examples could be
|
|
49
|
+
* Retrieving weather information, stock prices or specific price information.
|
|
50
|
+
*
|
|
51
|
+
* @param T Generic. Format of the tool result
|
|
52
|
+
|
|
53
|
+
*/
|
|
54
|
+
export declare class Tool<T> extends EventEmitter {
|
|
55
|
+
protected executeFn: (input: unknown, context: Record<string, any> | null) => Promise<T>;
|
|
56
|
+
name: string;
|
|
57
|
+
protected description: string;
|
|
58
|
+
protected context: Record<string, any> | null;
|
|
59
|
+
protected schema: ToolInputSchema;
|
|
60
|
+
/**
|
|
61
|
+
* Agents can act as assistants to other agents. This static method creates a tool
|
|
62
|
+
* @param agent The agent that will act as an assistant
|
|
63
|
+
* @param description Th
|
|
64
|
+
* @returns Tool
|
|
65
|
+
*/
|
|
66
|
+
static fromAgent(agent: BaseAgent, description: string): Tool<string>;
|
|
67
|
+
constructor(config: ToolConfig<T>);
|
|
68
|
+
execute(agentId: string, agentName: string, input: Record<string, any>, id: string, agentModel?: string, agentVendor?: AgentVendor): Promise<T>;
|
|
69
|
+
getPrompt(_vendor?: string): {
|
|
70
|
+
name: string;
|
|
71
|
+
description: string;
|
|
72
|
+
input_schema: ToolInputSchema;
|
|
73
|
+
};
|
|
74
|
+
}
|
|
75
|
+
//# sourceMappingURL=Tool.d.ts.map
|