opc-agent 0.7.0 → 0.9.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/channels/email.d.ts +69 -0
- package/dist/channels/email.js +118 -0
- package/dist/channels/slack.d.ts +62 -0
- package/dist/channels/slack.js +107 -0
- package/dist/channels/wechat.d.ts +62 -0
- package/dist/channels/wechat.js +104 -0
- package/dist/cli.js +45 -17
- package/dist/core/analytics-engine.d.ts +51 -0
- package/dist/core/analytics-engine.js +186 -0
- package/dist/core/cache.d.ts +47 -0
- package/dist/core/cache.js +156 -0
- package/dist/core/compose.d.ts +35 -0
- package/dist/core/compose.js +49 -0
- package/dist/core/orchestrator.d.ts +68 -0
- package/dist/core/orchestrator.js +145 -0
- package/dist/core/rate-limiter.d.ts +47 -0
- package/dist/core/rate-limiter.js +92 -0
- package/dist/i18n/index.d.ts +6 -1
- package/dist/i18n/index.js +86 -0
- package/dist/index.d.ts +24 -0
- package/dist/index.js +39 -1
- package/dist/templates/data-analyst.d.ts +53 -0
- package/dist/templates/data-analyst.js +70 -0
- package/dist/templates/teacher.d.ts +58 -0
- package/dist/templates/teacher.js +78 -0
- package/dist/testing/index.d.ts +37 -0
- package/dist/testing/index.js +176 -0
- package/dist/tools/calculator.d.ts +7 -0
- package/dist/tools/calculator.js +70 -0
- package/dist/tools/datetime.d.ts +7 -0
- package/dist/tools/datetime.js +159 -0
- package/dist/tools/json-transform.d.ts +7 -0
- package/dist/tools/json-transform.js +184 -0
- package/dist/tools/text-analysis.d.ts +8 -0
- package/dist/tools/text-analysis.js +113 -0
- package/docs/.vitepress/config.ts +92 -0
- package/docs/api/cli.md +48 -0
- package/docs/api/sdk.md +80 -0
- package/docs/guide/configuration.md +79 -0
- package/docs/guide/deployment.md +42 -0
- package/docs/guide/testing.md +84 -0
- package/docs/index.md +27 -0
- package/docs/zh/api/oad-schema.md +3 -0
- package/docs/zh/guide/concepts.md +28 -0
- package/docs/zh/guide/configuration.md +39 -0
- package/docs/zh/guide/deployment.md +3 -0
- package/docs/zh/guide/getting-started.md +58 -0
- package/docs/zh/guide/templates.md +22 -0
- package/docs/zh/guide/testing.md +18 -0
- package/docs/zh/index.md +27 -0
- package/package.json +7 -3
- package/src/channels/email.ts +177 -0
- package/src/channels/slack.ts +160 -0
- package/src/channels/wechat.ts +149 -0
- package/src/cli.ts +45 -19
- package/src/core/analytics-engine.ts +186 -0
- package/src/core/cache.ts +141 -0
- package/src/core/compose.ts +77 -0
- package/src/core/orchestrator.ts +215 -0
- package/src/core/rate-limiter.ts +128 -0
- package/src/i18n/index.ts +87 -1
- package/src/index.ts +28 -0
- package/src/templates/data-analyst.ts +70 -0
- package/src/templates/teacher.ts +79 -0
- package/src/testing/index.ts +181 -0
- package/src/tools/calculator.ts +73 -0
- package/src/tools/datetime.ts +149 -0
- package/src/tools/json-transform.ts +187 -0
- package/src/tools/text-analysis.ts +116 -0
|
@@ -0,0 +1,186 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
|
3
|
+
if (k2 === undefined) k2 = k;
|
|
4
|
+
var desc = Object.getOwnPropertyDescriptor(m, k);
|
|
5
|
+
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
|
6
|
+
desc = { enumerable: true, get: function() { return m[k]; } };
|
|
7
|
+
}
|
|
8
|
+
Object.defineProperty(o, k2, desc);
|
|
9
|
+
}) : (function(o, m, k, k2) {
|
|
10
|
+
if (k2 === undefined) k2 = k;
|
|
11
|
+
o[k2] = m[k];
|
|
12
|
+
}));
|
|
13
|
+
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
|
14
|
+
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
|
15
|
+
}) : function(o, v) {
|
|
16
|
+
o["default"] = v;
|
|
17
|
+
});
|
|
18
|
+
var __importStar = (this && this.__importStar) || (function () {
|
|
19
|
+
var ownKeys = function(o) {
|
|
20
|
+
ownKeys = Object.getOwnPropertyNames || function (o) {
|
|
21
|
+
var ar = [];
|
|
22
|
+
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
|
|
23
|
+
return ar;
|
|
24
|
+
};
|
|
25
|
+
return ownKeys(o);
|
|
26
|
+
};
|
|
27
|
+
return function (mod) {
|
|
28
|
+
if (mod && mod.__esModule) return mod;
|
|
29
|
+
var result = {};
|
|
30
|
+
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
|
|
31
|
+
__setModuleDefault(result, mod);
|
|
32
|
+
return result;
|
|
33
|
+
};
|
|
34
|
+
})();
|
|
35
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
36
|
+
exports.AnalyticsEngine = void 0;
|
|
37
|
+
/**
|
|
38
|
+
* Analytics Engine - Persistent analytics with JSON file storage.
|
|
39
|
+
* Tracks every message, LLM call, tool use, and error with timestamps.
|
|
40
|
+
*/
|
|
41
|
+
const fs = __importStar(require("fs"));
|
|
42
|
+
const path = __importStar(require("path"));
|
|
43
|
+
class AnalyticsEngine {
|
|
44
|
+
dataDir;
|
|
45
|
+
eventsFile;
|
|
46
|
+
events = [];
|
|
47
|
+
constructor(dataDir = '.') {
|
|
48
|
+
this.dataDir = path.resolve(dataDir, 'data');
|
|
49
|
+
this.eventsFile = path.join(this.dataDir, 'analytics.json');
|
|
50
|
+
this.load();
|
|
51
|
+
}
|
|
52
|
+
load() {
|
|
53
|
+
try {
|
|
54
|
+
if (fs.existsSync(this.eventsFile)) {
|
|
55
|
+
const raw = fs.readFileSync(this.eventsFile, 'utf-8');
|
|
56
|
+
this.events = JSON.parse(raw);
|
|
57
|
+
}
|
|
58
|
+
}
|
|
59
|
+
catch {
|
|
60
|
+
this.events = [];
|
|
61
|
+
}
|
|
62
|
+
}
|
|
63
|
+
save() {
|
|
64
|
+
if (!fs.existsSync(this.dataDir)) {
|
|
65
|
+
fs.mkdirSync(this.dataDir, { recursive: true });
|
|
66
|
+
}
|
|
67
|
+
// Keep last 10000 events to prevent unbounded growth
|
|
68
|
+
if (this.events.length > 10000) {
|
|
69
|
+
this.events = this.events.slice(-10000);
|
|
70
|
+
}
|
|
71
|
+
fs.writeFileSync(this.eventsFile, JSON.stringify(this.events, null, 2));
|
|
72
|
+
}
|
|
73
|
+
track(type, data) {
|
|
74
|
+
this.events.push({ type, timestamp: Date.now(), data });
|
|
75
|
+
this.save();
|
|
76
|
+
}
|
|
77
|
+
trackMessage(userId, responseTimeMs, tokensIn, tokensOut) {
|
|
78
|
+
this.track('message', { userId, responseTimeMs, tokensIn, tokensOut });
|
|
79
|
+
}
|
|
80
|
+
trackLLMCall(provider, model, tokensIn, tokensOut, latencyMs) {
|
|
81
|
+
this.track('llm_call', { provider, model, tokensIn, tokensOut, latencyMs });
|
|
82
|
+
}
|
|
83
|
+
trackToolUse(toolName, success, latencyMs) {
|
|
84
|
+
this.track('tool_use', { toolName, success, latencyMs });
|
|
85
|
+
}
|
|
86
|
+
trackError(error, context) {
|
|
87
|
+
this.track('error', { error, context });
|
|
88
|
+
}
|
|
89
|
+
getStats(fromTs, toTs) {
|
|
90
|
+
const now = Date.now();
|
|
91
|
+
const from = fromTs ?? 0;
|
|
92
|
+
const to = toTs ?? now;
|
|
93
|
+
const filtered = this.events.filter(e => e.timestamp >= from && e.timestamp <= to);
|
|
94
|
+
const messages = filtered.filter(e => e.type === 'message');
|
|
95
|
+
const llmCalls = filtered.filter(e => e.type === 'llm_call');
|
|
96
|
+
const toolUses = filtered.filter(e => e.type === 'tool_use');
|
|
97
|
+
const errors = filtered.filter(e => e.type === 'error');
|
|
98
|
+
// Avg response time
|
|
99
|
+
const totalResponseTime = messages.reduce((sum, e) => sum + (e.data.responseTimeMs ?? 0), 0);
|
|
100
|
+
const avgResponseTimeMs = messages.length > 0 ? Math.round(totalResponseTime / messages.length) : 0;
|
|
101
|
+
// Total tokens
|
|
102
|
+
const totalTokensIn = llmCalls.reduce((sum, e) => sum + (e.data.tokensIn ?? 0), 0);
|
|
103
|
+
const totalTokensOut = llmCalls.reduce((sum, e) => sum + (e.data.tokensOut ?? 0), 0);
|
|
104
|
+
// Top skills (from tool_use)
|
|
105
|
+
const skillCounts = {};
|
|
106
|
+
for (const e of toolUses) {
|
|
107
|
+
const name = e.data.toolName ?? 'unknown';
|
|
108
|
+
skillCounts[name] = (skillCounts[name] ?? 0) + 1;
|
|
109
|
+
}
|
|
110
|
+
const topSkills = Object.entries(skillCounts)
|
|
111
|
+
.sort((a, b) => b[1] - a[1])
|
|
112
|
+
.slice(0, 10)
|
|
113
|
+
.map(([name, count]) => ({ name, count }));
|
|
114
|
+
// Top errors
|
|
115
|
+
const errorCounts = {};
|
|
116
|
+
for (const e of errors) {
|
|
117
|
+
const msg = e.data.error ?? 'unknown';
|
|
118
|
+
errorCounts[msg] = (errorCounts[msg] ?? 0) + 1;
|
|
119
|
+
}
|
|
120
|
+
const topErrors = Object.entries(errorCounts)
|
|
121
|
+
.sort((a, b) => b[1] - a[1])
|
|
122
|
+
.slice(0, 10)
|
|
123
|
+
.map(([message, count]) => ({ message, count }));
|
|
124
|
+
// Messages per day
|
|
125
|
+
const messagesPerDay = {};
|
|
126
|
+
for (const e of messages) {
|
|
127
|
+
const day = new Date(e.timestamp).toISOString().slice(0, 10);
|
|
128
|
+
messagesPerDay[day] = (messagesPerDay[day] ?? 0) + 1;
|
|
129
|
+
}
|
|
130
|
+
return {
|
|
131
|
+
totalMessages: messages.length,
|
|
132
|
+
totalLLMCalls: llmCalls.length,
|
|
133
|
+
totalToolUses: toolUses.length,
|
|
134
|
+
totalErrors: errors.length,
|
|
135
|
+
avgResponseTimeMs,
|
|
136
|
+
totalTokens: { input: totalTokensIn, output: totalTokensOut, total: totalTokensIn + totalTokensOut },
|
|
137
|
+
topSkills,
|
|
138
|
+
topErrors,
|
|
139
|
+
messagesPerDay,
|
|
140
|
+
period: { from, to },
|
|
141
|
+
};
|
|
142
|
+
}
|
|
143
|
+
getRecentEvents(limit = 50) {
|
|
144
|
+
return this.events.slice(-limit);
|
|
145
|
+
}
|
|
146
|
+
clear() {
|
|
147
|
+
this.events = [];
|
|
148
|
+
this.save();
|
|
149
|
+
}
|
|
150
|
+
/**
|
|
151
|
+
* Format stats for CLI display.
|
|
152
|
+
*/
|
|
153
|
+
static formatStats(stats) {
|
|
154
|
+
const lines = [];
|
|
155
|
+
lines.push('');
|
|
156
|
+
lines.push('══════════════════════════════════════════');
|
|
157
|
+
lines.push(' OPC Agent Analytics');
|
|
158
|
+
lines.push('══════════════════════════════════════════');
|
|
159
|
+
lines.push('');
|
|
160
|
+
lines.push(` 📨 Messages: ${stats.totalMessages}`);
|
|
161
|
+
lines.push(` 🤖 LLM Calls: ${stats.totalLLMCalls}`);
|
|
162
|
+
lines.push(` 🔧 Tool Uses: ${stats.totalToolUses}`);
|
|
163
|
+
lines.push(` ❌ Errors: ${stats.totalErrors}`);
|
|
164
|
+
lines.push(` ⏱ Avg Response: ${stats.avgResponseTimeMs}ms`);
|
|
165
|
+
lines.push(` 🪙 Tokens: ${stats.totalTokens.total} (in: ${stats.totalTokens.input}, out: ${stats.totalTokens.output})`);
|
|
166
|
+
lines.push('');
|
|
167
|
+
if (stats.topSkills.length > 0) {
|
|
168
|
+
lines.push(' Top Skills:');
|
|
169
|
+
for (const s of stats.topSkills.slice(0, 5)) {
|
|
170
|
+
lines.push(` • ${s.name}: ${s.count}`);
|
|
171
|
+
}
|
|
172
|
+
lines.push('');
|
|
173
|
+
}
|
|
174
|
+
if (stats.topErrors.length > 0) {
|
|
175
|
+
lines.push(' Top Errors:');
|
|
176
|
+
for (const e of stats.topErrors.slice(0, 3)) {
|
|
177
|
+
lines.push(` • ${e.message}: ${e.count}`);
|
|
178
|
+
}
|
|
179
|
+
lines.push('');
|
|
180
|
+
}
|
|
181
|
+
lines.push('──────────────────────────────────────────');
|
|
182
|
+
return lines.join('\n');
|
|
183
|
+
}
|
|
184
|
+
}
|
|
185
|
+
exports.AnalyticsEngine = AnalyticsEngine;
|
|
186
|
+
//# sourceMappingURL=analytics-engine.js.map
|
|
@@ -0,0 +1,47 @@
|
|
|
1
|
+
export interface CacheEntry {
|
|
2
|
+
key: string;
|
|
3
|
+
value: string;
|
|
4
|
+
createdAt: number;
|
|
5
|
+
ttlMs: number;
|
|
6
|
+
hits: number;
|
|
7
|
+
}
|
|
8
|
+
export interface CacheConfig {
|
|
9
|
+
enabled: boolean;
|
|
10
|
+
ttlMs: number;
|
|
11
|
+
maxEntries: number;
|
|
12
|
+
dataDir: string;
|
|
13
|
+
}
|
|
14
|
+
export declare class LLMCache {
|
|
15
|
+
private cache;
|
|
16
|
+
private config;
|
|
17
|
+
private filePath;
|
|
18
|
+
private stats;
|
|
19
|
+
constructor(config?: Partial<CacheConfig>);
|
|
20
|
+
private load;
|
|
21
|
+
private save;
|
|
22
|
+
private isExpired;
|
|
23
|
+
/**
|
|
24
|
+
* Generate a cache key from messages and system prompt.
|
|
25
|
+
*/
|
|
26
|
+
static makeKey(messages: Array<{
|
|
27
|
+
role: string;
|
|
28
|
+
content: string;
|
|
29
|
+
}>, systemPrompt?: string): string;
|
|
30
|
+
/**
|
|
31
|
+
* Get a cached response. Returns null if not found or expired.
|
|
32
|
+
*/
|
|
33
|
+
get(key: string): string | null;
|
|
34
|
+
/**
|
|
35
|
+
* Set a cached response.
|
|
36
|
+
*/
|
|
37
|
+
set(key: string, value: string, ttlMs?: number): void;
|
|
38
|
+
getStats(): {
|
|
39
|
+
hits: number;
|
|
40
|
+
misses: number;
|
|
41
|
+
evictions: number;
|
|
42
|
+
size: number;
|
|
43
|
+
hitRate: string;
|
|
44
|
+
};
|
|
45
|
+
clear(): void;
|
|
46
|
+
}
|
|
47
|
+
//# sourceMappingURL=cache.d.ts.map
|
|
@@ -0,0 +1,156 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
|
3
|
+
if (k2 === undefined) k2 = k;
|
|
4
|
+
var desc = Object.getOwnPropertyDescriptor(m, k);
|
|
5
|
+
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
|
6
|
+
desc = { enumerable: true, get: function() { return m[k]; } };
|
|
7
|
+
}
|
|
8
|
+
Object.defineProperty(o, k2, desc);
|
|
9
|
+
}) : (function(o, m, k, k2) {
|
|
10
|
+
if (k2 === undefined) k2 = k;
|
|
11
|
+
o[k2] = m[k];
|
|
12
|
+
}));
|
|
13
|
+
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
|
14
|
+
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
|
15
|
+
}) : function(o, v) {
|
|
16
|
+
o["default"] = v;
|
|
17
|
+
});
|
|
18
|
+
var __importStar = (this && this.__importStar) || (function () {
|
|
19
|
+
var ownKeys = function(o) {
|
|
20
|
+
ownKeys = Object.getOwnPropertyNames || function (o) {
|
|
21
|
+
var ar = [];
|
|
22
|
+
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
|
|
23
|
+
return ar;
|
|
24
|
+
};
|
|
25
|
+
return ownKeys(o);
|
|
26
|
+
};
|
|
27
|
+
return function (mod) {
|
|
28
|
+
if (mod && mod.__esModule) return mod;
|
|
29
|
+
var result = {};
|
|
30
|
+
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
|
|
31
|
+
__setModuleDefault(result, mod);
|
|
32
|
+
return result;
|
|
33
|
+
};
|
|
34
|
+
})();
|
|
35
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
36
|
+
exports.LLMCache = void 0;
|
|
37
|
+
/**
|
|
38
|
+
* Caching Layer - Cache LLM responses with configurable TTL.
|
|
39
|
+
* Hash-based key from input messages + system prompt.
|
|
40
|
+
*/
|
|
41
|
+
const fs = __importStar(require("fs"));
|
|
42
|
+
const path = __importStar(require("path"));
|
|
43
|
+
const crypto = __importStar(require("crypto"));
|
|
44
|
+
class LLMCache {
|
|
45
|
+
cache = new Map();
|
|
46
|
+
config;
|
|
47
|
+
filePath;
|
|
48
|
+
stats = { hits: 0, misses: 0, evictions: 0 };
|
|
49
|
+
constructor(config) {
|
|
50
|
+
this.config = {
|
|
51
|
+
enabled: config?.enabled ?? true,
|
|
52
|
+
ttlMs: config?.ttlMs ?? 3600_000, // 1 hour default
|
|
53
|
+
maxEntries: config?.maxEntries ?? 1000,
|
|
54
|
+
dataDir: config?.dataDir ?? '.',
|
|
55
|
+
};
|
|
56
|
+
this.filePath = path.join(this.config.dataDir, 'data', 'cache.json');
|
|
57
|
+
this.load();
|
|
58
|
+
}
|
|
59
|
+
load() {
|
|
60
|
+
try {
|
|
61
|
+
if (fs.existsSync(this.filePath)) {
|
|
62
|
+
const raw = fs.readFileSync(this.filePath, 'utf-8');
|
|
63
|
+
const entries = JSON.parse(raw);
|
|
64
|
+
for (const entry of entries) {
|
|
65
|
+
if (!this.isExpired(entry)) {
|
|
66
|
+
this.cache.set(entry.key, entry);
|
|
67
|
+
}
|
|
68
|
+
}
|
|
69
|
+
}
|
|
70
|
+
}
|
|
71
|
+
catch {
|
|
72
|
+
// ignore
|
|
73
|
+
}
|
|
74
|
+
}
|
|
75
|
+
save() {
|
|
76
|
+
const dir = path.dirname(this.filePath);
|
|
77
|
+
if (!fs.existsSync(dir))
|
|
78
|
+
fs.mkdirSync(dir, { recursive: true });
|
|
79
|
+
const entries = Array.from(this.cache.values());
|
|
80
|
+
fs.writeFileSync(this.filePath, JSON.stringify(entries));
|
|
81
|
+
}
|
|
82
|
+
isExpired(entry) {
|
|
83
|
+
return Date.now() - entry.createdAt > entry.ttlMs;
|
|
84
|
+
}
|
|
85
|
+
/**
|
|
86
|
+
* Generate a cache key from messages and system prompt.
|
|
87
|
+
*/
|
|
88
|
+
static makeKey(messages, systemPrompt) {
|
|
89
|
+
const payload = JSON.stringify({ systemPrompt, messages: messages.map(m => ({ role: m.role, content: m.content })) });
|
|
90
|
+
return crypto.createHash('sha256').update(payload).digest('hex').slice(0, 16);
|
|
91
|
+
}
|
|
92
|
+
/**
|
|
93
|
+
* Get a cached response. Returns null if not found or expired.
|
|
94
|
+
*/
|
|
95
|
+
get(key) {
|
|
96
|
+
if (!this.config.enabled)
|
|
97
|
+
return null;
|
|
98
|
+
const entry = this.cache.get(key);
|
|
99
|
+
if (!entry || this.isExpired(entry)) {
|
|
100
|
+
if (entry) {
|
|
101
|
+
this.cache.delete(key);
|
|
102
|
+
this.stats.evictions++;
|
|
103
|
+
}
|
|
104
|
+
this.stats.misses++;
|
|
105
|
+
return null;
|
|
106
|
+
}
|
|
107
|
+
entry.hits++;
|
|
108
|
+
this.stats.hits++;
|
|
109
|
+
return entry.value;
|
|
110
|
+
}
|
|
111
|
+
/**
|
|
112
|
+
* Set a cached response.
|
|
113
|
+
*/
|
|
114
|
+
set(key, value, ttlMs) {
|
|
115
|
+
if (!this.config.enabled)
|
|
116
|
+
return;
|
|
117
|
+
// Evict oldest if at capacity
|
|
118
|
+
if (this.cache.size >= this.config.maxEntries) {
|
|
119
|
+
let oldestKey = null;
|
|
120
|
+
let oldestTime = Infinity;
|
|
121
|
+
for (const [k, v] of this.cache) {
|
|
122
|
+
if (v.createdAt < oldestTime) {
|
|
123
|
+
oldestTime = v.createdAt;
|
|
124
|
+
oldestKey = k;
|
|
125
|
+
}
|
|
126
|
+
}
|
|
127
|
+
if (oldestKey) {
|
|
128
|
+
this.cache.delete(oldestKey);
|
|
129
|
+
this.stats.evictions++;
|
|
130
|
+
}
|
|
131
|
+
}
|
|
132
|
+
this.cache.set(key, {
|
|
133
|
+
key,
|
|
134
|
+
value,
|
|
135
|
+
createdAt: Date.now(),
|
|
136
|
+
ttlMs: ttlMs ?? this.config.ttlMs,
|
|
137
|
+
hits: 0,
|
|
138
|
+
});
|
|
139
|
+
this.save();
|
|
140
|
+
}
|
|
141
|
+
getStats() {
|
|
142
|
+
const total = this.stats.hits + this.stats.misses;
|
|
143
|
+
return {
|
|
144
|
+
...this.stats,
|
|
145
|
+
size: this.cache.size,
|
|
146
|
+
hitRate: total > 0 ? `${((this.stats.hits / total) * 100).toFixed(1)}%` : '0%',
|
|
147
|
+
};
|
|
148
|
+
}
|
|
149
|
+
clear() {
|
|
150
|
+
this.cache.clear();
|
|
151
|
+
this.stats = { hits: 0, misses: 0, evictions: 0 };
|
|
152
|
+
this.save();
|
|
153
|
+
}
|
|
154
|
+
}
|
|
155
|
+
exports.LLMCache = LLMCache;
|
|
156
|
+
//# sourceMappingURL=cache.js.map
|
|
@@ -0,0 +1,35 @@
|
|
|
1
|
+
import type { AgentContext, Message } from './types';
|
|
2
|
+
/**
|
|
3
|
+
* Agent Composition — v0.8.0
|
|
4
|
+
* Combine multiple agents into a pipeline: Agent A output → Agent B input.
|
|
5
|
+
* Configurable in OAD: `compose: [agent-a, agent-b]`
|
|
6
|
+
*/
|
|
7
|
+
export type AgentHandler = (context: AgentContext, message: Message) => Promise<Message>;
|
|
8
|
+
export interface ComposableAgent {
|
|
9
|
+
id: string;
|
|
10
|
+
name: string;
|
|
11
|
+
handler: AgentHandler;
|
|
12
|
+
}
|
|
13
|
+
export interface ComposeOptions {
|
|
14
|
+
/** Stop pipeline if any agent returns empty content */
|
|
15
|
+
stopOnEmpty?: boolean;
|
|
16
|
+
/** Transform output between agents */
|
|
17
|
+
transform?: (output: Message, nextAgentId: string) => Message;
|
|
18
|
+
/** Timeout per agent in ms */
|
|
19
|
+
timeoutMs?: number;
|
|
20
|
+
}
|
|
21
|
+
export declare class AgentPipeline {
|
|
22
|
+
private agents;
|
|
23
|
+
private options;
|
|
24
|
+
constructor(agents: ComposableAgent[], options?: ComposeOptions);
|
|
25
|
+
/** Run the pipeline sequentially: each agent's output becomes the next agent's input */
|
|
26
|
+
execute(context: AgentContext, initialMessage: Message): Promise<Message>;
|
|
27
|
+
/** Get the pipeline agent IDs in order */
|
|
28
|
+
getAgentIds(): string[];
|
|
29
|
+
}
|
|
30
|
+
/**
|
|
31
|
+
* Create a pipeline from an array of composable agents.
|
|
32
|
+
* Usage in OAD: `compose: [agent-a, agent-b, agent-c]`
|
|
33
|
+
*/
|
|
34
|
+
export declare function compose(agents: ComposableAgent[], options?: ComposeOptions): AgentPipeline;
|
|
35
|
+
//# sourceMappingURL=compose.d.ts.map
|
|
@@ -0,0 +1,49 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.AgentPipeline = void 0;
|
|
4
|
+
exports.compose = compose;
|
|
5
|
+
class AgentPipeline {
|
|
6
|
+
agents = [];
|
|
7
|
+
options;
|
|
8
|
+
constructor(agents, options = {}) {
|
|
9
|
+
this.agents = agents;
|
|
10
|
+
this.options = options;
|
|
11
|
+
}
|
|
12
|
+
/** Run the pipeline sequentially: each agent's output becomes the next agent's input */
|
|
13
|
+
async execute(context, initialMessage) {
|
|
14
|
+
let currentMessage = initialMessage;
|
|
15
|
+
for (const agent of this.agents) {
|
|
16
|
+
if (this.options.stopOnEmpty && !currentMessage.content.trim()) {
|
|
17
|
+
break;
|
|
18
|
+
}
|
|
19
|
+
// Apply transform if provided
|
|
20
|
+
if (this.options.transform) {
|
|
21
|
+
currentMessage = this.options.transform(currentMessage, agent.id);
|
|
22
|
+
}
|
|
23
|
+
if (this.options.timeoutMs) {
|
|
24
|
+
const result = await Promise.race([
|
|
25
|
+
agent.handler(context, currentMessage),
|
|
26
|
+
new Promise((_, reject) => setTimeout(() => reject(new Error(`Agent ${agent.id} timed out`)), this.options.timeoutMs)),
|
|
27
|
+
]);
|
|
28
|
+
currentMessage = result;
|
|
29
|
+
}
|
|
30
|
+
else {
|
|
31
|
+
currentMessage = await agent.handler(context, currentMessage);
|
|
32
|
+
}
|
|
33
|
+
}
|
|
34
|
+
return currentMessage;
|
|
35
|
+
}
|
|
36
|
+
/** Get the pipeline agent IDs in order */
|
|
37
|
+
getAgentIds() {
|
|
38
|
+
return this.agents.map((a) => a.id);
|
|
39
|
+
}
|
|
40
|
+
}
|
|
41
|
+
exports.AgentPipeline = AgentPipeline;
|
|
42
|
+
/**
|
|
43
|
+
* Create a pipeline from an array of composable agents.
|
|
44
|
+
* Usage in OAD: `compose: [agent-a, agent-b, agent-c]`
|
|
45
|
+
*/
|
|
46
|
+
function compose(agents, options) {
|
|
47
|
+
return new AgentPipeline(agents, options);
|
|
48
|
+
}
|
|
49
|
+
//# sourceMappingURL=compose.js.map
|
|
@@ -0,0 +1,68 @@
|
|
|
1
|
+
import { EventEmitter } from 'events';
|
|
2
|
+
import type { AgentContext, Message } from './types';
|
|
3
|
+
/**
|
|
4
|
+
* Multi-Agent Orchestrator — v0.8.0
|
|
5
|
+
* Routes messages to specialized sub-agents, supports parallel execution and handoffs.
|
|
6
|
+
*/
|
|
7
|
+
export interface AgentNode {
|
|
8
|
+
id: string;
|
|
9
|
+
name: string;
|
|
10
|
+
description: string;
|
|
11
|
+
/** Patterns or intents this agent handles */
|
|
12
|
+
routes: string[];
|
|
13
|
+
/** Function that processes a message and returns a response */
|
|
14
|
+
handler: (context: AgentContext, message: Message) => Promise<Message>;
|
|
15
|
+
/** Priority for routing conflicts (higher wins) */
|
|
16
|
+
priority?: number;
|
|
17
|
+
}
|
|
18
|
+
export interface OrchestratorWorkflow {
|
|
19
|
+
name: string;
|
|
20
|
+
description?: string;
|
|
21
|
+
/** Ordered list of agent IDs for sequential execution */
|
|
22
|
+
steps?: string[];
|
|
23
|
+
/** List of agent IDs for parallel execution */
|
|
24
|
+
parallel?: string[];
|
|
25
|
+
/** Router config: auto-route based on message content */
|
|
26
|
+
router?: {
|
|
27
|
+
agents: string[];
|
|
28
|
+
fallback?: string;
|
|
29
|
+
};
|
|
30
|
+
}
|
|
31
|
+
export interface HandoffRequest {
|
|
32
|
+
fromAgent: string;
|
|
33
|
+
toAgent: string;
|
|
34
|
+
context: AgentContext;
|
|
35
|
+
reason: string;
|
|
36
|
+
}
|
|
37
|
+
export interface OrchestratorConfig {
|
|
38
|
+
agents: AgentNode[];
|
|
39
|
+
workflows?: OrchestratorWorkflow[];
|
|
40
|
+
defaultWorkflow?: string;
|
|
41
|
+
maxParallel?: number;
|
|
42
|
+
}
|
|
43
|
+
export declare class Orchestrator extends EventEmitter {
|
|
44
|
+
private agents;
|
|
45
|
+
private workflows;
|
|
46
|
+
private defaultWorkflow?;
|
|
47
|
+
private maxParallel;
|
|
48
|
+
constructor(config: OrchestratorConfig);
|
|
49
|
+
/** Register a new agent node */
|
|
50
|
+
registerAgent(agent: AgentNode): void;
|
|
51
|
+
/** Unregister an agent */
|
|
52
|
+
unregisterAgent(id: string): void;
|
|
53
|
+
/** Route a message to the best-matching agent */
|
|
54
|
+
route(message: Message): AgentNode | undefined;
|
|
55
|
+
/** Execute a single agent */
|
|
56
|
+
executeAgent(agentId: string, context: AgentContext, message: Message): Promise<Message>;
|
|
57
|
+
/** Run multiple agents in parallel */
|
|
58
|
+
executeParallel(agentIds: string[], context: AgentContext, message: Message): Promise<Map<string, Message>>;
|
|
59
|
+
/** Execute a named workflow */
|
|
60
|
+
executeWorkflow(workflowName: string, context: AgentContext, message: Message): Promise<Message[]>;
|
|
61
|
+
/** Hand off conversation from one agent to another */
|
|
62
|
+
handoff(request: HandoffRequest): Promise<Message>;
|
|
63
|
+
/** Process an incoming message using the default workflow or routing */
|
|
64
|
+
process(context: AgentContext, message: Message): Promise<Message[]>;
|
|
65
|
+
getAgents(): AgentNode[];
|
|
66
|
+
getWorkflows(): OrchestratorWorkflow[];
|
|
67
|
+
}
|
|
68
|
+
//# sourceMappingURL=orchestrator.d.ts.map
|
|
@@ -0,0 +1,145 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.Orchestrator = void 0;
|
|
4
|
+
const events_1 = require("events");
|
|
5
|
+
class Orchestrator extends events_1.EventEmitter {
|
|
6
|
+
agents = new Map();
|
|
7
|
+
workflows = new Map();
|
|
8
|
+
defaultWorkflow;
|
|
9
|
+
maxParallel;
|
|
10
|
+
constructor(config) {
|
|
11
|
+
super();
|
|
12
|
+
this.maxParallel = config.maxParallel ?? 5;
|
|
13
|
+
this.defaultWorkflow = config.defaultWorkflow;
|
|
14
|
+
for (const agent of config.agents) {
|
|
15
|
+
this.agents.set(agent.id, agent);
|
|
16
|
+
}
|
|
17
|
+
for (const wf of config.workflows ?? []) {
|
|
18
|
+
this.workflows.set(wf.name, wf);
|
|
19
|
+
}
|
|
20
|
+
}
|
|
21
|
+
/** Register a new agent node */
|
|
22
|
+
registerAgent(agent) {
|
|
23
|
+
this.agents.set(agent.id, agent);
|
|
24
|
+
this.emit('agent:registered', agent.id);
|
|
25
|
+
}
|
|
26
|
+
/** Unregister an agent */
|
|
27
|
+
unregisterAgent(id) {
|
|
28
|
+
this.agents.delete(id);
|
|
29
|
+
this.emit('agent:unregistered', id);
|
|
30
|
+
}
|
|
31
|
+
/** Route a message to the best-matching agent */
|
|
32
|
+
route(message) {
|
|
33
|
+
const content = message.content.toLowerCase();
|
|
34
|
+
let bestMatch;
|
|
35
|
+
let bestPriority = -1;
|
|
36
|
+
for (const agent of this.agents.values()) {
|
|
37
|
+
for (const route of agent.routes) {
|
|
38
|
+
if (content.includes(route.toLowerCase()) || new RegExp(route, 'i').test(content)) {
|
|
39
|
+
const priority = agent.priority ?? 0;
|
|
40
|
+
if (priority > bestPriority) {
|
|
41
|
+
bestMatch = agent;
|
|
42
|
+
bestPriority = priority;
|
|
43
|
+
}
|
|
44
|
+
break;
|
|
45
|
+
}
|
|
46
|
+
}
|
|
47
|
+
}
|
|
48
|
+
return bestMatch;
|
|
49
|
+
}
|
|
50
|
+
/** Execute a single agent */
|
|
51
|
+
async executeAgent(agentId, context, message) {
|
|
52
|
+
const agent = this.agents.get(agentId);
|
|
53
|
+
if (!agent)
|
|
54
|
+
throw new Error(`Agent not found: ${agentId}`);
|
|
55
|
+
this.emit('agent:execute', agentId, message);
|
|
56
|
+
const result = await agent.handler(context, message);
|
|
57
|
+
this.emit('agent:complete', agentId, result);
|
|
58
|
+
return result;
|
|
59
|
+
}
|
|
60
|
+
/** Run multiple agents in parallel */
|
|
61
|
+
async executeParallel(agentIds, context, message) {
|
|
62
|
+
const results = new Map();
|
|
63
|
+
const batches = [];
|
|
64
|
+
// Batch by maxParallel
|
|
65
|
+
for (let i = 0; i < agentIds.length; i += this.maxParallel) {
|
|
66
|
+
batches.push(agentIds.slice(i, i + this.maxParallel));
|
|
67
|
+
}
|
|
68
|
+
for (const batch of batches) {
|
|
69
|
+
const promises = batch.map(async (id) => {
|
|
70
|
+
const result = await this.executeAgent(id, context, message);
|
|
71
|
+
results.set(id, result);
|
|
72
|
+
});
|
|
73
|
+
await Promise.all(promises);
|
|
74
|
+
}
|
|
75
|
+
return results;
|
|
76
|
+
}
|
|
77
|
+
/** Execute a named workflow */
|
|
78
|
+
async executeWorkflow(workflowName, context, message) {
|
|
79
|
+
const wf = this.workflows.get(workflowName);
|
|
80
|
+
if (!wf)
|
|
81
|
+
throw new Error(`Workflow not found: ${workflowName}`);
|
|
82
|
+
const results = [];
|
|
83
|
+
// Sequential steps
|
|
84
|
+
if (wf.steps) {
|
|
85
|
+
let currentMessage = message;
|
|
86
|
+
for (const agentId of wf.steps) {
|
|
87
|
+
const result = await this.executeAgent(agentId, context, currentMessage);
|
|
88
|
+
results.push(result);
|
|
89
|
+
currentMessage = result; // chain output → next input
|
|
90
|
+
}
|
|
91
|
+
}
|
|
92
|
+
// Parallel execution
|
|
93
|
+
if (wf.parallel) {
|
|
94
|
+
const parallelResults = await this.executeParallel(wf.parallel, context, message);
|
|
95
|
+
results.push(...parallelResults.values());
|
|
96
|
+
}
|
|
97
|
+
// Router-based
|
|
98
|
+
if (wf.router) {
|
|
99
|
+
const matched = this.route(message);
|
|
100
|
+
const targetId = matched && wf.router.agents.includes(matched.id)
|
|
101
|
+
? matched.id
|
|
102
|
+
: wf.router.fallback;
|
|
103
|
+
if (targetId) {
|
|
104
|
+
const result = await this.executeAgent(targetId, context, message);
|
|
105
|
+
results.push(result);
|
|
106
|
+
}
|
|
107
|
+
}
|
|
108
|
+
return results;
|
|
109
|
+
}
|
|
110
|
+
/** Hand off conversation from one agent to another */
|
|
111
|
+
async handoff(request) {
|
|
112
|
+
this.emit('agent:handoff', request);
|
|
113
|
+
const { toAgent, context } = request;
|
|
114
|
+
const lastMessage = context.messages[context.messages.length - 1];
|
|
115
|
+
if (!lastMessage)
|
|
116
|
+
throw new Error('No message in context for handoff');
|
|
117
|
+
return this.executeAgent(toAgent, context, lastMessage);
|
|
118
|
+
}
|
|
119
|
+
/** Process an incoming message using the default workflow or routing */
|
|
120
|
+
async process(context, message) {
|
|
121
|
+
if (this.defaultWorkflow) {
|
|
122
|
+
return this.executeWorkflow(this.defaultWorkflow, context, message);
|
|
123
|
+
}
|
|
124
|
+
// Fallback: route to single agent
|
|
125
|
+
const agent = this.route(message);
|
|
126
|
+
if (agent) {
|
|
127
|
+
const result = await this.executeAgent(agent.id, context, message);
|
|
128
|
+
return [result];
|
|
129
|
+
}
|
|
130
|
+
return [{
|
|
131
|
+
id: `orch-${Date.now()}`,
|
|
132
|
+
role: 'assistant',
|
|
133
|
+
content: 'No agent available to handle this request.',
|
|
134
|
+
timestamp: Date.now(),
|
|
135
|
+
}];
|
|
136
|
+
}
|
|
137
|
+
getAgents() {
|
|
138
|
+
return Array.from(this.agents.values());
|
|
139
|
+
}
|
|
140
|
+
getWorkflows() {
|
|
141
|
+
return Array.from(this.workflows.values());
|
|
142
|
+
}
|
|
143
|
+
}
|
|
144
|
+
exports.Orchestrator = Orchestrator;
|
|
145
|
+
//# sourceMappingURL=orchestrator.js.map
|