agentdb 1.0.0 → 1.0.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +104 -0
- package/README.md +5 -5
- package/bin/agentdb.js +296 -65
- package/dist/mcp/learning/core/experience-buffer.d.ts +61 -0
- package/dist/mcp/learning/core/experience-buffer.d.ts.map +1 -0
- package/dist/mcp/learning/core/experience-buffer.js +175 -0
- package/dist/mcp/learning/core/experience-buffer.js.map +1 -0
- package/dist/mcp/learning/core/experience-buffer.mjs +170 -0
- package/dist/mcp/learning/core/experience-recorder.d.ts +40 -0
- package/dist/mcp/learning/core/experience-recorder.d.ts.map +1 -0
- package/dist/mcp/learning/core/experience-recorder.js +200 -0
- package/dist/mcp/learning/core/experience-recorder.js.map +1 -0
- package/dist/mcp/learning/core/experience-recorder.mjs +195 -0
- package/dist/mcp/learning/core/learning-manager.d.ts +66 -0
- package/dist/mcp/learning/core/learning-manager.d.ts.map +1 -0
- package/dist/mcp/learning/core/learning-manager.js +252 -0
- package/dist/mcp/learning/core/learning-manager.js.map +1 -0
- package/dist/mcp/learning/core/learning-manager.mjs +247 -0
- package/dist/mcp/learning/core/policy-optimizer.d.ts +53 -0
- package/dist/mcp/learning/core/policy-optimizer.d.ts.map +1 -0
- package/dist/mcp/learning/core/policy-optimizer.js +251 -0
- package/dist/mcp/learning/core/policy-optimizer.js.map +1 -0
- package/dist/mcp/learning/core/policy-optimizer.mjs +246 -0
- package/dist/mcp/learning/core/reward-estimator.d.ts +44 -0
- package/dist/mcp/learning/core/reward-estimator.d.ts.map +1 -0
- package/dist/mcp/learning/core/reward-estimator.js +158 -0
- package/dist/mcp/learning/core/reward-estimator.js.map +1 -0
- package/dist/mcp/learning/core/reward-estimator.mjs +153 -0
- package/dist/mcp/learning/core/session-manager.d.ts +63 -0
- package/dist/mcp/learning/core/session-manager.d.ts.map +1 -0
- package/dist/mcp/learning/core/session-manager.js +202 -0
- package/dist/mcp/learning/core/session-manager.js.map +1 -0
- package/dist/mcp/learning/core/session-manager.mjs +197 -0
- package/dist/mcp/learning/index.d.ts +19 -0
- package/dist/mcp/learning/index.d.ts.map +1 -0
- package/dist/mcp/learning/index.js +30 -0
- package/dist/mcp/learning/index.js.map +1 -0
- package/dist/mcp/learning/index.mjs +19 -0
- package/dist/mcp/learning/tools/mcp-learning-tools.d.ts +369 -0
- package/dist/mcp/learning/tools/mcp-learning-tools.d.ts.map +1 -0
- package/dist/mcp/learning/tools/mcp-learning-tools.js +361 -0
- package/dist/mcp/learning/tools/mcp-learning-tools.js.map +1 -0
- package/dist/mcp/learning/tools/mcp-learning-tools.mjs +356 -0
- package/dist/mcp/learning/types/index.d.ts +138 -0
- package/dist/mcp/learning/types/index.d.ts.map +1 -0
- package/dist/mcp/learning/types/index.js +6 -0
- package/dist/mcp/learning/types/index.js.map +1 -0
- package/dist/mcp/learning/types/index.mjs +4 -0
- package/dist/mcp-server.d.ts +2 -0
- package/dist/mcp-server.d.ts.map +1 -1
- package/dist/mcp-server.js +72 -4
- package/dist/mcp-server.js.map +1 -1
- package/dist/mcp-server.mjs +72 -4
- package/dist/wasm/sql-wasm-debug.js +6989 -0
- package/dist/wasm/sql-wasm-debug.wasm +0 -0
- package/dist/wasm/sql-wasm.js +188 -0
- package/dist/wasm/sql-wasm.wasm +0 -0
- package/dist/wasm-loader.d.ts.map +1 -1
- package/dist/wasm-loader.js +5 -2
- package/dist/wasm-loader.js.map +1 -1
- package/dist/wasm-loader.mjs +5 -2
- package/examples/mcp-learning-example.ts +220 -0
- package/package.json +26 -5
|
@@ -0,0 +1,175 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
/**
|
|
3
|
+
* ExperienceBuffer - Manages experience replay buffer with prioritization
|
|
4
|
+
*/
|
|
5
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
+
exports.ExperienceBuffer = void 0;
|
|
7
|
+
class ExperienceBuffer {
|
|
8
|
+
constructor(maxSize = 10000) {
|
|
9
|
+
this.buffer = [];
|
|
10
|
+
this.priorities = new Map();
|
|
11
|
+
this.maxSize = maxSize;
|
|
12
|
+
}
|
|
13
|
+
/**
|
|
14
|
+
* Add experience to buffer
|
|
15
|
+
*/
|
|
16
|
+
add(experience) {
|
|
17
|
+
this.buffer.push(experience);
|
|
18
|
+
// Calculate priority based on reward and recency
|
|
19
|
+
const priority = this.calculatePriority(experience);
|
|
20
|
+
const actionId = experience.metadata.actionId || experience.timestamp.toString();
|
|
21
|
+
this.priorities.set(actionId, priority);
|
|
22
|
+
// Prune if buffer exceeds max size
|
|
23
|
+
if (this.buffer.length > this.maxSize) {
|
|
24
|
+
this.prune();
|
|
25
|
+
}
|
|
26
|
+
}
|
|
27
|
+
/**
|
|
28
|
+
* Sample random batch from buffer
|
|
29
|
+
*/
|
|
30
|
+
sample(batchSize) {
|
|
31
|
+
if (this.buffer.length === 0) {
|
|
32
|
+
return [];
|
|
33
|
+
}
|
|
34
|
+
const samples = [];
|
|
35
|
+
const size = Math.min(batchSize, this.buffer.length);
|
|
36
|
+
for (let i = 0; i < size; i++) {
|
|
37
|
+
const idx = Math.floor(Math.random() * this.buffer.length);
|
|
38
|
+
samples.push(this.buffer[idx]);
|
|
39
|
+
}
|
|
40
|
+
return samples;
|
|
41
|
+
}
|
|
42
|
+
/**
|
|
43
|
+
* Sample batch with prioritized experience replay
|
|
44
|
+
*/
|
|
45
|
+
samplePrioritized(batchSize, alpha = 0.6) {
|
|
46
|
+
if (this.buffer.length === 0) {
|
|
47
|
+
return [];
|
|
48
|
+
}
|
|
49
|
+
const size = Math.min(batchSize, this.buffer.length);
|
|
50
|
+
const samples = [];
|
|
51
|
+
// Calculate probability distribution based on priorities
|
|
52
|
+
const priorities = this.buffer.map((exp) => {
|
|
53
|
+
const actionId = exp.metadata.actionId || exp.timestamp.toString();
|
|
54
|
+
const priority = this.priorities.get(actionId) || 1.0;
|
|
55
|
+
return Math.pow(priority, alpha);
|
|
56
|
+
});
|
|
57
|
+
const totalPriority = priorities.reduce((sum, p) => sum + p, 0);
|
|
58
|
+
const probabilities = priorities.map((p) => p / totalPriority);
|
|
59
|
+
// Sample using probability distribution
|
|
60
|
+
for (let i = 0; i < size; i++) {
|
|
61
|
+
const rand = Math.random();
|
|
62
|
+
let cumulative = 0;
|
|
63
|
+
for (let j = 0; j < probabilities.length; j++) {
|
|
64
|
+
cumulative += probabilities[j];
|
|
65
|
+
if (rand <= cumulative) {
|
|
66
|
+
samples.push(this.buffer[j]);
|
|
67
|
+
break;
|
|
68
|
+
}
|
|
69
|
+
}
|
|
70
|
+
}
|
|
71
|
+
return samples;
|
|
72
|
+
}
|
|
73
|
+
/**
|
|
74
|
+
* Get recent experiences
|
|
75
|
+
*/
|
|
76
|
+
getRecent(count) {
|
|
77
|
+
const start = Math.max(0, this.buffer.length - count);
|
|
78
|
+
return this.buffer.slice(start);
|
|
79
|
+
}
|
|
80
|
+
/**
|
|
81
|
+
* Get high-reward experiences
|
|
82
|
+
*/
|
|
83
|
+
getTopRewarded(count) {
|
|
84
|
+
const sorted = [...this.buffer].sort((a, b) => b.reward - a.reward);
|
|
85
|
+
return sorted.slice(0, count);
|
|
86
|
+
}
|
|
87
|
+
/**
|
|
88
|
+
* Get experiences by task type
|
|
89
|
+
*/
|
|
90
|
+
getByTaskType(taskType) {
|
|
91
|
+
return this.buffer.filter((exp) => exp.metadata.taskType === taskType);
|
|
92
|
+
}
|
|
93
|
+
/**
|
|
94
|
+
* Get buffer statistics
|
|
95
|
+
*/
|
|
96
|
+
getStats() {
|
|
97
|
+
if (this.buffer.length === 0) {
|
|
98
|
+
return {
|
|
99
|
+
size: 0,
|
|
100
|
+
avgReward: 0,
|
|
101
|
+
maxReward: 0,
|
|
102
|
+
minReward: 0,
|
|
103
|
+
taskDistribution: {},
|
|
104
|
+
};
|
|
105
|
+
}
|
|
106
|
+
const rewards = this.buffer.map((exp) => exp.reward);
|
|
107
|
+
const avgReward = rewards.reduce((sum, r) => sum + r, 0) / rewards.length;
|
|
108
|
+
const maxReward = Math.max(...rewards);
|
|
109
|
+
const minReward = Math.min(...rewards);
|
|
110
|
+
const taskDistribution = {};
|
|
111
|
+
for (const exp of this.buffer) {
|
|
112
|
+
const taskType = exp.metadata.taskType;
|
|
113
|
+
taskDistribution[taskType] = (taskDistribution[taskType] || 0) + 1;
|
|
114
|
+
}
|
|
115
|
+
return {
|
|
116
|
+
size: this.buffer.length,
|
|
117
|
+
avgReward,
|
|
118
|
+
maxReward,
|
|
119
|
+
minReward,
|
|
120
|
+
taskDistribution,
|
|
121
|
+
};
|
|
122
|
+
}
|
|
123
|
+
/**
|
|
124
|
+
* Clear buffer
|
|
125
|
+
*/
|
|
126
|
+
clear() {
|
|
127
|
+
this.buffer = [];
|
|
128
|
+
this.priorities.clear();
|
|
129
|
+
}
|
|
130
|
+
/**
|
|
131
|
+
* Get buffer size
|
|
132
|
+
*/
|
|
133
|
+
size() {
|
|
134
|
+
return this.buffer.length;
|
|
135
|
+
}
|
|
136
|
+
/**
|
|
137
|
+
* Calculate priority for experience
|
|
138
|
+
*/
|
|
139
|
+
calculatePriority(experience) {
|
|
140
|
+
// Priority based on:
|
|
141
|
+
// 1. Reward magnitude (higher reward = higher priority)
|
|
142
|
+
// 2. Recency (more recent = higher priority)
|
|
143
|
+
// 3. Uniqueness (rare task types = higher priority)
|
|
144
|
+
const rewardComponent = Math.abs(experience.reward);
|
|
145
|
+
const recencyComponent = 1.0 / (1.0 + (Date.now() - experience.timestamp) / 1000000);
|
|
146
|
+
// Simple priority: weighted sum
|
|
147
|
+
return rewardComponent * 0.7 + recencyComponent * 0.3;
|
|
148
|
+
}
|
|
149
|
+
/**
|
|
150
|
+
* Prune buffer to maintain max size
|
|
151
|
+
*/
|
|
152
|
+
prune() {
|
|
153
|
+
// Strategy: Remove lowest priority experiences
|
|
154
|
+
const withPriorities = this.buffer.map((exp) => {
|
|
155
|
+
const actionId = exp.metadata.actionId || exp.timestamp.toString();
|
|
156
|
+
const priority = this.priorities.get(actionId) || 0;
|
|
157
|
+
return { experience: exp, priority };
|
|
158
|
+
});
|
|
159
|
+
// Sort by priority (descending)
|
|
160
|
+
withPriorities.sort((a, b) => b.priority - a.priority);
|
|
161
|
+
// Keep top maxSize experiences
|
|
162
|
+
this.buffer = withPriorities
|
|
163
|
+
.slice(0, this.maxSize)
|
|
164
|
+
.map((item) => item.experience);
|
|
165
|
+
// Clean up priorities map
|
|
166
|
+
const validActionIds = new Set(this.buffer.map((exp) => exp.metadata.actionId || exp.timestamp.toString()));
|
|
167
|
+
for (const actionId of this.priorities.keys()) {
|
|
168
|
+
if (!validActionIds.has(actionId)) {
|
|
169
|
+
this.priorities.delete(actionId);
|
|
170
|
+
}
|
|
171
|
+
}
|
|
172
|
+
}
|
|
173
|
+
}
|
|
174
|
+
exports.ExperienceBuffer = ExperienceBuffer;
|
|
175
|
+
//# sourceMappingURL=experience-buffer.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"experience-buffer.js","sourceRoot":"","sources":["../../../../src/mcp/learning/core/experience-buffer.ts"],"names":[],"mappings":";AAAA;;GAEG;;;AAIH,MAAa,gBAAgB;IAK3B,YAAY,UAAkB,KAAK;QAJ3B,WAAM,GAAiB,EAAE,CAAC;QAE1B,eAAU,GAAwB,IAAI,GAAG,EAAE,CAAC;QAGlD,IAAI,CAAC,OAAO,GAAG,OAAO,CAAC;IACzB,CAAC;IAED;;OAEG;IACH,GAAG,CAAC,UAAsB;QACxB,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,UAAU,CAAC,CAAC;QAE7B,iDAAiD;QACjD,MAAM,QAAQ,GAAG,IAAI,CAAC,iBAAiB,CAAC,UAAU,CAAC,CAAC;QACpD,MAAM,QAAQ,GAAG,UAAU,CAAC,QAAQ,CAAC,QAAQ,IAAI,UAAU,CAAC,SAAS,CAAC,QAAQ,EAAE,CAAC;QACjF,IAAI,CAAC,UAAU,CAAC,GAAG,CAAC,QAAQ,EAAE,QAAQ,CAAC,CAAC;QAExC,mCAAmC;QACnC,IAAI,IAAI,CAAC,MAAM,CAAC,MAAM,GAAG,IAAI,CAAC,OAAO,EAAE,CAAC;YACtC,IAAI,CAAC,KAAK,EAAE,CAAC;QACf,CAAC;IACH,CAAC;IAED;;OAEG;IACH,MAAM,CAAC,SAAiB;QACtB,IAAI,IAAI,CAAC,MAAM,CAAC,MAAM,KAAK,CAAC,EAAE,CAAC;YAC7B,OAAO,EAAE,CAAC;QACZ,CAAC;QAED,MAAM,OAAO,GAAiB,EAAE,CAAC;QACjC,MAAM,IAAI,GAAG,IAAI,CAAC,GAAG,CAAC,SAAS,EAAE,IAAI,CAAC,MAAM,CAAC,MAAM,CAAC,CAAC;QAErD,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,IAAI,EAAE,CAAC,EAAE,EAAE,CAAC;YAC9B,MAAM,GAAG,GAAG,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,MAAM,EAAE,GAAG,IAAI,CAAC,MAAM,CAAC,MAAM,CAAC,CAAC;YAC3D,OAAO,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC;QACjC,CAAC;QAED,OAAO,OAAO,CAAC;IACjB,CAAC;IAED;;OAEG;IACH,iBAAiB,CAAC,SAAiB,EAAE,QAAgB,GAAG;QACtD,IAAI,IAAI,CAAC,MAAM,CAAC,MAAM,KAAK,CAAC,EAAE,CAAC;YAC7B,OAAO,EAAE,CAAC;QACZ,CAAC;QAED,MAAM,IAAI,GAAG,IAAI,CAAC,GAAG,CAAC,SAAS,EAAE,IAAI,CAAC,MAAM,CAAC,MAAM,CAAC,CAAC;QACrD,MAAM,OAAO,GAAiB,EAAE,CAAC;QAEjC,yDAAyD;QACzD,MAAM,UAAU,GAAG,IAAI,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,GAAG,EAAE,EAAE;YACzC,MAAM,QAAQ,GAAG,GAAG,CAAC,QAAQ,CAAC,QAAQ,IAAI,GAAG,CAAC,SAAS,CAAC,QAAQ,EAAE,CAAC;YACnE,MAAM,QAAQ,GAAG,IAAI,CAAC,UAAU,CAAC,GAAG,CAAC,QAAQ,CAAC,IAAI,GAAG,CAAC;YACtD,OAAO,IAAI,CAAC,GAAG,CAAC,QAAQ,EAAE,KAAK,CAAC,CAAC;QACnC,CAAC,CAAC,CAAC;QAEH,MAAM,aAAa,GAAG,UAAU,CAAC,MAAM,CAAC,CAAC,GAAG,EAAE,CAAC,EAAE,EAAE,CAAC,GAAG,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC;QAChE,MAAM,aAAa,GAAG,UAAU,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,GAAG,aAAa,CAAC,CAAC;QAE/D,wCAAwC;QACxC,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,IAAI,EAAE,CAAC,EAAE,EAAE,CAAC;YAC9B,MAAM,IAAI,GAAG,IAAI,CAAC,MAAM,EAAE,CAAC;YAC3B,IAAI,UAAU,GAAG,CAAC,CAAC;YAEnB,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,aAAa,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE,CAAC;gBAC9C,UAAU,IAAI,aAAa,CAAC,CAAC,CAAC,CAAC;gBAC/B,IAAI,IAAI,IAAI,UAAU,EAAE,CAAC;oBACvB,OAAO,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC;oBAC7B,MAAM;gBACR,CAAC;YACH,CAAC;QACH,CAAC;QAED,OAAO,OAAO,CAAC;IACjB,CAAC;IAED;;OAEG;IACH,SAAS,CAAC,KAAa;QACrB,MAAM,KAAK,GAAG,IAAI,CAAC,GAAG,CAAC,CAAC,EAAE,IAAI,CAAC,MAAM,CAAC,MAAM,GAAG,KAAK,CAAC,CAAC;QACtD,OAAO,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC;IAClC,CAAC;IAED;;OAEG;IACH,cAAc,CAAC,KAAa;QAC1B,MAAM,MAAM,GAAG,CAAC,GAAG,IAAI,CAAC,MAAM,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,MAAM,GAAG,CAAC,CAAC,MAAM,CAAC,CAAC;QACpE,OAAO,MAAM,CAAC,KAAK,CAAC,CAAC,EAAE,KAAK,CAAC,CAAC;IAChC,CAAC;IAED;;OAEG;IACH,aAAa,CAAC,QAAgB;QAC5B,OAAO,IAAI,CAAC,MAAM,CAAC,MAAM,CACvB,CAAC,GAAG,EAAE,EAAE,CAAC,GAAG,CAAC,QAAQ,CAAC,QAAQ,KAAK,QAAQ,CAC5C,CAAC;IACJ,CAAC;IAED;;OAEG;IACH,QAAQ;QAON,IAAI,IAAI,CAAC,MAAM,CAAC,MAAM,KAAK,CAAC,EAAE,CAAC;YAC7B,OAAO;gBACL,IAAI,EAAE,CAAC;gBACP,SAAS,EAAE,CAAC;gBACZ,SAAS,EAAE,CAAC;gBACZ,SAAS,EAAE,CAAC;gBACZ,gBAAgB,EAAE,EAAE;aACrB,CAAC;QACJ,CAAC;QAED,MAAM,OAAO,GAAG,IAAI,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,GAAG,EAAE,EAAE,CAAC,GAAG,CAAC,MAAM,CAAC,CAAC;QACrD,MAAM,SAAS,GAAG,OAAO,CAAC,MAAM,CAAC,CAAC,GAAG,EAAE,CAAC,EAAE,EAAE,CAAC,GAAG,GAAG,CAAC,EAAE,CAAC,CAAC,GAAG,OAAO,CAAC,MAAM,CAAC;QAC1E,MAAM,SAAS,GAAG,IAAI,CAAC,GAAG,CAAC,GAAG,OAAO,CAAC,CAAC;QACvC,MAAM,SAAS,GAAG,IAAI,CAAC,GAAG,CAAC,GAAG,OAAO,CAAC,CAAC;QAEvC,MAAM,gBAAgB,GAA2B,EAAE,CAAC;QACpD,KAAK,MAAM,GAAG,IAAI,IAAI,CAAC,MAAM,EAAE,CAAC;YAC9B,MAAM,QAAQ,GAAG,GAAG,CAAC,QAAQ,CAAC,QAAQ,CAAC;YACvC,gBAAgB,CAAC,QAAQ,CAAC,GAAG,CAAC,gBAAgB,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,GAAG,CAAC,CAAC;QACrE,CAAC;QAED,OAAO;YACL,IAAI,EAAE,IAAI,CAAC,MAAM,CAAC,MAAM;YACxB,SAAS;YACT,SAAS;YACT,SAAS;YACT,gBAAgB;SACjB,CAAC;IACJ,CAAC;IAED;;OAEG;IACH,KAAK;QACH,IAAI,CAAC,MAAM,GAAG,EAAE,CAAC;QACjB,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,CAAC;IAC1B,CAAC;IAED;;OAEG;IACH,IAAI;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,MAAM,CAAC;IAC5B,CAAC;IAED;;OAEG;IACK,iBAAiB,CAAC,UAAsB;QAC9C,qBAAqB;QACrB,wDAAwD;QACxD,6CAA6C;QAC7C,oDAAoD;QAEpD,MAAM,eAAe,GAAG,IAAI,CAAC,GAAG,CAAC,UAAU,CAAC,MAAM,CAAC,CAAC;QACpD,MAAM,gBAAgB,GAAG,GAAG,GAAG,CAAC,GAAG,GAAG,CAAC,IAAI,CAAC,GAAG,EAAE,GAAG,UAAU,CAAC,SAAS,CAAC,GAAG,OAAO,CAAC,CAAC;QAErF,gCAAgC;QAChC,OAAO,eAAe,GAAG,GAAG,GAAG,gBAAgB,GAAG,GAAG,CAAC;IACxD,CAAC;IAED;;OAEG;IACK,KAAK;QACX,+CAA+C;QAC/C,MAAM,cAAc,GAAG,IAAI,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,GAAG,EAAE,EAAE;YAC7C,MAAM,QAAQ,GAAG,GAAG,CAAC,QAAQ,CAAC,QAAQ,IAAI,GAAG,CAAC,SAAS,CAAC,QAAQ,EAAE,CAAC;YACnE,MAAM,QAAQ,GAAG,IAAI,CAAC,UAAU,CAAC,GAAG,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC;YACpD,OAAO,EAAE,UAAU,EAAE,GAAG,EAAE,QAAQ,EAAE,CAAC;QACvC,CAAC,CAAC,CAAC;QAEH,gCAAgC;QAChC,cAAc,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,QAAQ,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC;QAEvD,+BAA+B;QAC/B,IAAI,CAAC,MAAM,GAAG,cAAc;aACzB,KAAK,CAAC,CAAC,EAAE,IAAI,CAAC,OAAO,CAAC;aACtB,GAAG,CAAC,CAAC,IAAI,EAAE,EAAE,CAAC,IAAI,CAAC,UAAU,CAAC,CAAC;QAElC,0BAA0B;QAC1B,MAAM,cAAc,GAAG,IAAI,GAAG,CAC5B,IAAI,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,GAAG,EAAE,EAAE,CAAC,GAAG,CAAC,QAAQ,CAAC,QAAQ,IAAI,GAAG,CAAC,SAAS,CAAC,QAAQ,EAAE,CAAC,CAC5E,CAAC;QAEF,KAAK,MAAM,QAAQ,IAAI,IAAI,CAAC,UAAU,CAAC,IAAI,EAAE,EAAE,CAAC;YAC9C,IAAI,CAAC,cAAc,CAAC,GAAG,CAAC,QAAQ,CAAC,EAAE,CAAC;gBAClC,IAAI,CAAC,UAAU,CAAC,MAAM,CAAC,QAAQ,CAAC,CAAC;YACnC,CAAC;QACH,CAAC;IACH,CAAC;CACF;AAjND,4CAiNC"}
|
|
@@ -0,0 +1,170 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* ExperienceBuffer - Manages experience replay buffer with prioritization
|
|
3
|
+
*/
|
|
4
|
+
export class ExperienceBuffer {
|
|
5
|
+
constructor(maxSize = 10000) {
|
|
6
|
+
this.buffer = [];
|
|
7
|
+
this.priorities = new Map();
|
|
8
|
+
this.maxSize = maxSize;
|
|
9
|
+
}
|
|
10
|
+
/**
|
|
11
|
+
* Add experience to buffer
|
|
12
|
+
*/
|
|
13
|
+
add(experience) {
|
|
14
|
+
this.buffer.push(experience);
|
|
15
|
+
// Calculate priority based on reward and recency
|
|
16
|
+
const priority = this.calculatePriority(experience);
|
|
17
|
+
const actionId = experience.metadata.actionId || experience.timestamp.toString();
|
|
18
|
+
this.priorities.set(actionId, priority);
|
|
19
|
+
// Prune if buffer exceeds max size
|
|
20
|
+
if (this.buffer.length > this.maxSize) {
|
|
21
|
+
this.prune();
|
|
22
|
+
}
|
|
23
|
+
}
|
|
24
|
+
/**
|
|
25
|
+
* Sample random batch from buffer
|
|
26
|
+
*/
|
|
27
|
+
sample(batchSize) {
|
|
28
|
+
if (this.buffer.length === 0) {
|
|
29
|
+
return [];
|
|
30
|
+
}
|
|
31
|
+
const samples = [];
|
|
32
|
+
const size = Math.min(batchSize, this.buffer.length);
|
|
33
|
+
for (let i = 0; i < size; i++) {
|
|
34
|
+
const idx = Math.floor(Math.random() * this.buffer.length);
|
|
35
|
+
samples.push(this.buffer[idx]);
|
|
36
|
+
}
|
|
37
|
+
return samples;
|
|
38
|
+
}
|
|
39
|
+
/**
|
|
40
|
+
* Sample batch with prioritized experience replay
|
|
41
|
+
*/
|
|
42
|
+
samplePrioritized(batchSize, alpha = 0.6) {
|
|
43
|
+
if (this.buffer.length === 0) {
|
|
44
|
+
return [];
|
|
45
|
+
}
|
|
46
|
+
const size = Math.min(batchSize, this.buffer.length);
|
|
47
|
+
const samples = [];
|
|
48
|
+
// Calculate probability distribution based on priorities
|
|
49
|
+
const priorities = this.buffer.map((exp) => {
|
|
50
|
+
const actionId = exp.metadata.actionId || exp.timestamp.toString();
|
|
51
|
+
const priority = this.priorities.get(actionId) || 1.0;
|
|
52
|
+
return Math.pow(priority, alpha);
|
|
53
|
+
});
|
|
54
|
+
const totalPriority = priorities.reduce((sum, p) => sum + p, 0);
|
|
55
|
+
const probabilities = priorities.map((p) => p / totalPriority);
|
|
56
|
+
// Sample using probability distribution
|
|
57
|
+
for (let i = 0; i < size; i++) {
|
|
58
|
+
const rand = Math.random();
|
|
59
|
+
let cumulative = 0;
|
|
60
|
+
for (let j = 0; j < probabilities.length; j++) {
|
|
61
|
+
cumulative += probabilities[j];
|
|
62
|
+
if (rand <= cumulative) {
|
|
63
|
+
samples.push(this.buffer[j]);
|
|
64
|
+
break;
|
|
65
|
+
}
|
|
66
|
+
}
|
|
67
|
+
}
|
|
68
|
+
return samples;
|
|
69
|
+
}
|
|
70
|
+
/**
|
|
71
|
+
* Get recent experiences
|
|
72
|
+
*/
|
|
73
|
+
getRecent(count) {
|
|
74
|
+
const start = Math.max(0, this.buffer.length - count);
|
|
75
|
+
return this.buffer.slice(start);
|
|
76
|
+
}
|
|
77
|
+
/**
|
|
78
|
+
* Get high-reward experiences
|
|
79
|
+
*/
|
|
80
|
+
getTopRewarded(count) {
|
|
81
|
+
const sorted = [...this.buffer].sort((a, b) => b.reward - a.reward);
|
|
82
|
+
return sorted.slice(0, count);
|
|
83
|
+
}
|
|
84
|
+
/**
|
|
85
|
+
* Get experiences by task type
|
|
86
|
+
*/
|
|
87
|
+
getByTaskType(taskType) {
|
|
88
|
+
return this.buffer.filter((exp) => exp.metadata.taskType === taskType);
|
|
89
|
+
}
|
|
90
|
+
/**
|
|
91
|
+
* Get buffer statistics
|
|
92
|
+
*/
|
|
93
|
+
getStats() {
|
|
94
|
+
if (this.buffer.length === 0) {
|
|
95
|
+
return {
|
|
96
|
+
size: 0,
|
|
97
|
+
avgReward: 0,
|
|
98
|
+
maxReward: 0,
|
|
99
|
+
minReward: 0,
|
|
100
|
+
taskDistribution: {},
|
|
101
|
+
};
|
|
102
|
+
}
|
|
103
|
+
const rewards = this.buffer.map((exp) => exp.reward);
|
|
104
|
+
const avgReward = rewards.reduce((sum, r) => sum + r, 0) / rewards.length;
|
|
105
|
+
const maxReward = Math.max(...rewards);
|
|
106
|
+
const minReward = Math.min(...rewards);
|
|
107
|
+
const taskDistribution = {};
|
|
108
|
+
for (const exp of this.buffer) {
|
|
109
|
+
const taskType = exp.metadata.taskType;
|
|
110
|
+
taskDistribution[taskType] = (taskDistribution[taskType] || 0) + 1;
|
|
111
|
+
}
|
|
112
|
+
return {
|
|
113
|
+
size: this.buffer.length,
|
|
114
|
+
avgReward,
|
|
115
|
+
maxReward,
|
|
116
|
+
minReward,
|
|
117
|
+
taskDistribution,
|
|
118
|
+
};
|
|
119
|
+
}
|
|
120
|
+
/**
|
|
121
|
+
* Clear buffer
|
|
122
|
+
*/
|
|
123
|
+
clear() {
|
|
124
|
+
this.buffer = [];
|
|
125
|
+
this.priorities.clear();
|
|
126
|
+
}
|
|
127
|
+
/**
|
|
128
|
+
* Get buffer size
|
|
129
|
+
*/
|
|
130
|
+
size() {
|
|
131
|
+
return this.buffer.length;
|
|
132
|
+
}
|
|
133
|
+
/**
|
|
134
|
+
* Calculate priority for experience
|
|
135
|
+
*/
|
|
136
|
+
calculatePriority(experience) {
|
|
137
|
+
// Priority based on:
|
|
138
|
+
// 1. Reward magnitude (higher reward = higher priority)
|
|
139
|
+
// 2. Recency (more recent = higher priority)
|
|
140
|
+
// 3. Uniqueness (rare task types = higher priority)
|
|
141
|
+
const rewardComponent = Math.abs(experience.reward);
|
|
142
|
+
const recencyComponent = 1.0 / (1.0 + (Date.now() - experience.timestamp) / 1000000);
|
|
143
|
+
// Simple priority: weighted sum
|
|
144
|
+
return rewardComponent * 0.7 + recencyComponent * 0.3;
|
|
145
|
+
}
|
|
146
|
+
/**
|
|
147
|
+
* Prune buffer to maintain max size
|
|
148
|
+
*/
|
|
149
|
+
prune() {
|
|
150
|
+
// Strategy: Remove lowest priority experiences
|
|
151
|
+
const withPriorities = this.buffer.map((exp) => {
|
|
152
|
+
const actionId = exp.metadata.actionId || exp.timestamp.toString();
|
|
153
|
+
const priority = this.priorities.get(actionId) || 0;
|
|
154
|
+
return { experience: exp, priority };
|
|
155
|
+
});
|
|
156
|
+
// Sort by priority (descending)
|
|
157
|
+
withPriorities.sort((a, b) => b.priority - a.priority);
|
|
158
|
+
// Keep top maxSize experiences
|
|
159
|
+
this.buffer = withPriorities
|
|
160
|
+
.slice(0, this.maxSize)
|
|
161
|
+
.map((item) => item.experience);
|
|
162
|
+
// Clean up priorities map
|
|
163
|
+
const validActionIds = new Set(this.buffer.map((exp) => exp.metadata.actionId || exp.timestamp.toString()));
|
|
164
|
+
for (const actionId of this.priorities.keys()) {
|
|
165
|
+
if (!validActionIds.has(actionId)) {
|
|
166
|
+
this.priorities.delete(actionId);
|
|
167
|
+
}
|
|
168
|
+
}
|
|
169
|
+
}
|
|
170
|
+
}
|
|
@@ -0,0 +1,40 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* ExperienceRecorder - Captures and stores learning experiences
|
|
3
|
+
*/
|
|
4
|
+
import type { SQLiteVectorDB } from '../../../core/vector-db.js';
|
|
5
|
+
import type { Experience, ExecutionContext, State, Outcome } from '../types/index.js';
|
|
6
|
+
export declare class ExperienceRecorder {
|
|
7
|
+
private db;
|
|
8
|
+
private rewardEstimator;
|
|
9
|
+
private actionCounter;
|
|
10
|
+
constructor(db: SQLiteVectorDB);
|
|
11
|
+
/**
|
|
12
|
+
* Record a tool execution as a learning experience
|
|
13
|
+
*/
|
|
14
|
+
recordToolExecution(toolName: string, args: any, result: any, context: ExecutionContext, outcome: Outcome): Promise<Experience>;
|
|
15
|
+
/**
|
|
16
|
+
* Capture current state representation
|
|
17
|
+
*/
|
|
18
|
+
private captureState;
|
|
19
|
+
/**
|
|
20
|
+
* Generate vector embedding for state
|
|
21
|
+
*/
|
|
22
|
+
private generateStateEmbedding;
|
|
23
|
+
/**
|
|
24
|
+
* Store experience in vector database
|
|
25
|
+
*/
|
|
26
|
+
private storeExperience;
|
|
27
|
+
/**
|
|
28
|
+
* Retrieve similar experiences
|
|
29
|
+
*/
|
|
30
|
+
retrieveSimilarExperiences(state: State, k?: number): Promise<Experience[]>;
|
|
31
|
+
/**
|
|
32
|
+
* Get experiences by session
|
|
33
|
+
*/
|
|
34
|
+
getSessionExperiences(sessionId: string): Promise<Experience[]>;
|
|
35
|
+
/**
|
|
36
|
+
* Update experience with feedback
|
|
37
|
+
*/
|
|
38
|
+
updateExperienceReward(actionId: string, feedbackReward: number): Promise<void>;
|
|
39
|
+
}
|
|
40
|
+
//# sourceMappingURL=experience-recorder.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"experience-recorder.d.ts","sourceRoot":"","sources":["../../../../src/mcp/learning/core/experience-recorder.ts"],"names":[],"mappings":"AAAA;;GAEG;AAEH,OAAO,KAAK,EAAE,cAAc,EAAE,MAAM,4BAA4B,CAAC;AACjE,OAAO,KAAK,EACV,UAAU,EACV,gBAAgB,EAEhB,KAAK,EACL,OAAO,EACR,MAAM,mBAAmB,CAAC;AAG3B,qBAAa,kBAAkB;IAC7B,OAAO,CAAC,EAAE,CAAiB;IAC3B,OAAO,CAAC,eAAe,CAAkB;IACzC,OAAO,CAAC,aAAa,CAAa;gBAEtB,EAAE,EAAE,cAAc;IAK9B;;OAEG;IACG,mBAAmB,CACvB,QAAQ,EAAE,MAAM,EAChB,IAAI,EAAE,GAAG,EACT,MAAM,EAAE,GAAG,EACX,OAAO,EAAE,gBAAgB,EACzB,OAAO,EAAE,OAAO,GACf,OAAO,CAAC,UAAU,CAAC;IAyCtB;;OAEG;YACW,YAAY;IAmB1B;;OAEG;YACW,sBAAsB;IA4BpC;;OAEG;YACW,eAAe;IA4B7B;;OAEG;IACG,0BAA0B,CAC9B,KAAK,EAAE,KAAK,EACZ,CAAC,GAAE,MAAW,GACb,OAAO,CAAC,UAAU,EAAE,CAAC;IAgCxB;;OAEG;IACG,qBAAqB,CAAC,SAAS,EAAE,MAAM,GAAG,OAAO,CAAC,UAAU,EAAE,CAAC;IAkCrE;;OAEG;IACG,sBAAsB,CAC1B,QAAQ,EAAE,MAAM,EAChB,cAAc,EAAE,MAAM,GACrB,OAAO,CAAC,IAAI,CAAC;CAKjB"}
|
|
@@ -0,0 +1,200 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
/**
|
|
3
|
+
* ExperienceRecorder - Captures and stores learning experiences
|
|
4
|
+
*/
|
|
5
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
+
exports.ExperienceRecorder = void 0;
|
|
7
|
+
const reward_estimator_js_1 = require("./reward-estimator.js");
|
|
8
|
+
class ExperienceRecorder {
|
|
9
|
+
constructor(db) {
|
|
10
|
+
this.actionCounter = 0;
|
|
11
|
+
this.db = db;
|
|
12
|
+
this.rewardEstimator = new reward_estimator_js_1.RewardEstimator();
|
|
13
|
+
}
|
|
14
|
+
/**
|
|
15
|
+
* Record a tool execution as a learning experience
|
|
16
|
+
*/
|
|
17
|
+
async recordToolExecution(toolName, args, result, context, outcome) {
|
|
18
|
+
const state = await this.captureState(context);
|
|
19
|
+
const action = {
|
|
20
|
+
tool: toolName,
|
|
21
|
+
params: args,
|
|
22
|
+
timestamp: Date.now(),
|
|
23
|
+
};
|
|
24
|
+
const reward = await this.rewardEstimator.calculateReward(outcome, context);
|
|
25
|
+
const nextState = await this.captureState({
|
|
26
|
+
...context,
|
|
27
|
+
isTerminal: outcome.success || !!outcome.error,
|
|
28
|
+
});
|
|
29
|
+
const experience = {
|
|
30
|
+
state,
|
|
31
|
+
action,
|
|
32
|
+
reward: reward.combined,
|
|
33
|
+
nextState,
|
|
34
|
+
done: context.isTerminal,
|
|
35
|
+
timestamp: Date.now(),
|
|
36
|
+
metadata: {
|
|
37
|
+
userId: context.userId,
|
|
38
|
+
sessionId: context.sessionId,
|
|
39
|
+
taskType: context.taskType,
|
|
40
|
+
actionId: `action_${this.actionCounter++}`,
|
|
41
|
+
rewardBreakdown: reward,
|
|
42
|
+
outcome: {
|
|
43
|
+
success: outcome.success,
|
|
44
|
+
executionTime: outcome.executionTime,
|
|
45
|
+
tokensUsed: outcome.tokensUsed,
|
|
46
|
+
},
|
|
47
|
+
},
|
|
48
|
+
};
|
|
49
|
+
// Store experience in vector database
|
|
50
|
+
await this.storeExperience(experience);
|
|
51
|
+
return experience;
|
|
52
|
+
}
|
|
53
|
+
/**
|
|
54
|
+
* Capture current state representation
|
|
55
|
+
*/
|
|
56
|
+
async captureState(context) {
|
|
57
|
+
const state = {
|
|
58
|
+
taskDescription: context.metadata?.taskDescription || '',
|
|
59
|
+
availableTools: context.metadata?.availableTools || [],
|
|
60
|
+
previousActions: context.metadata?.previousActions || [],
|
|
61
|
+
constraints: context.metadata?.constraints,
|
|
62
|
+
context: {
|
|
63
|
+
sessionId: context.sessionId,
|
|
64
|
+
taskType: context.taskType,
|
|
65
|
+
timestamp: context.timestamp,
|
|
66
|
+
},
|
|
67
|
+
};
|
|
68
|
+
// Generate state embedding for similarity search
|
|
69
|
+
state.embedding = await this.generateStateEmbedding(state);
|
|
70
|
+
return state;
|
|
71
|
+
}
|
|
72
|
+
/**
|
|
73
|
+
* Generate vector embedding for state
|
|
74
|
+
*/
|
|
75
|
+
async generateStateEmbedding(state) {
|
|
76
|
+
// Simple hash-based embedding (in production, use a proper embedding model)
|
|
77
|
+
const text = JSON.stringify({
|
|
78
|
+
task: state.taskDescription,
|
|
79
|
+
tools: state.availableTools,
|
|
80
|
+
type: state.context?.taskType,
|
|
81
|
+
});
|
|
82
|
+
// Create a simple hash-based embedding (768 dimensions)
|
|
83
|
+
const embedding = new Float32Array(768);
|
|
84
|
+
for (let i = 0; i < text.length; i++) {
|
|
85
|
+
const index = text.charCodeAt(i) % 768;
|
|
86
|
+
embedding[index] += 1;
|
|
87
|
+
}
|
|
88
|
+
// Normalize
|
|
89
|
+
const magnitude = Math.sqrt(embedding.reduce((sum, val) => sum + val * val, 0));
|
|
90
|
+
if (magnitude > 0) {
|
|
91
|
+
for (let i = 0; i < embedding.length; i++) {
|
|
92
|
+
embedding[i] /= magnitude;
|
|
93
|
+
}
|
|
94
|
+
}
|
|
95
|
+
return embedding;
|
|
96
|
+
}
|
|
97
|
+
/**
|
|
98
|
+
* Store experience in vector database
|
|
99
|
+
*/
|
|
100
|
+
async storeExperience(experience) {
|
|
101
|
+
if (!experience.state.embedding) {
|
|
102
|
+
throw new Error('State embedding is required');
|
|
103
|
+
}
|
|
104
|
+
await this.db.insert({
|
|
105
|
+
embedding: Array.from(experience.state.embedding),
|
|
106
|
+
metadata: {
|
|
107
|
+
type: 'learning_experience',
|
|
108
|
+
sessionId: experience.metadata.sessionId,
|
|
109
|
+
userId: experience.metadata.userId,
|
|
110
|
+
taskType: experience.metadata.taskType,
|
|
111
|
+
actionId: experience.metadata.actionId,
|
|
112
|
+
action: experience.action,
|
|
113
|
+
reward: experience.reward,
|
|
114
|
+
done: experience.done,
|
|
115
|
+
timestamp: experience.timestamp,
|
|
116
|
+
state: {
|
|
117
|
+
taskDescription: experience.state.taskDescription,
|
|
118
|
+
availableTools: experience.state.availableTools,
|
|
119
|
+
previousActionsCount: experience.state.previousActions.length,
|
|
120
|
+
},
|
|
121
|
+
outcome: experience.metadata.outcome,
|
|
122
|
+
rewardBreakdown: experience.metadata.rewardBreakdown,
|
|
123
|
+
},
|
|
124
|
+
});
|
|
125
|
+
}
|
|
126
|
+
/**
|
|
127
|
+
* Retrieve similar experiences
|
|
128
|
+
*/
|
|
129
|
+
async retrieveSimilarExperiences(state, k = 10) {
|
|
130
|
+
if (!state.embedding) {
|
|
131
|
+
state.embedding = await this.generateStateEmbedding(state);
|
|
132
|
+
}
|
|
133
|
+
const results = await this.db.search(Array.from(state.embedding), k);
|
|
134
|
+
return results.map((result) => ({
|
|
135
|
+
state: {
|
|
136
|
+
taskDescription: result.metadata.state.taskDescription,
|
|
137
|
+
availableTools: result.metadata.state.availableTools,
|
|
138
|
+
previousActions: [],
|
|
139
|
+
embedding: result.embedding,
|
|
140
|
+
},
|
|
141
|
+
action: result.metadata.action,
|
|
142
|
+
reward: result.metadata.reward,
|
|
143
|
+
nextState: {
|
|
144
|
+
taskDescription: '',
|
|
145
|
+
availableTools: [],
|
|
146
|
+
previousActions: [],
|
|
147
|
+
},
|
|
148
|
+
done: result.metadata.done,
|
|
149
|
+
timestamp: result.metadata.timestamp,
|
|
150
|
+
metadata: {
|
|
151
|
+
userId: result.metadata.userId,
|
|
152
|
+
sessionId: result.metadata.sessionId,
|
|
153
|
+
taskType: result.metadata.taskType,
|
|
154
|
+
actionId: result.metadata.actionId,
|
|
155
|
+
},
|
|
156
|
+
}));
|
|
157
|
+
}
|
|
158
|
+
/**
|
|
159
|
+
* Get experiences by session
|
|
160
|
+
*/
|
|
161
|
+
async getSessionExperiences(sessionId) {
|
|
162
|
+
// Query by metadata filter
|
|
163
|
+
const allResults = await this.db.search(Array(768).fill(0), 1000);
|
|
164
|
+
// Filter by session ID
|
|
165
|
+
const sessionResults = allResults.filter((result) => result.metadata.sessionId === sessionId);
|
|
166
|
+
return sessionResults.map((result) => ({
|
|
167
|
+
state: {
|
|
168
|
+
taskDescription: result.metadata.state.taskDescription,
|
|
169
|
+
availableTools: result.metadata.state.availableTools,
|
|
170
|
+
previousActions: [],
|
|
171
|
+
embedding: result.embedding,
|
|
172
|
+
},
|
|
173
|
+
action: result.metadata.action,
|
|
174
|
+
reward: result.metadata.reward,
|
|
175
|
+
nextState: {
|
|
176
|
+
taskDescription: '',
|
|
177
|
+
availableTools: [],
|
|
178
|
+
previousActions: [],
|
|
179
|
+
},
|
|
180
|
+
done: result.metadata.done,
|
|
181
|
+
timestamp: result.metadata.timestamp,
|
|
182
|
+
metadata: {
|
|
183
|
+
userId: result.metadata.userId,
|
|
184
|
+
sessionId: result.metadata.sessionId,
|
|
185
|
+
taskType: result.metadata.taskType,
|
|
186
|
+
actionId: result.metadata.actionId,
|
|
187
|
+
},
|
|
188
|
+
}));
|
|
189
|
+
}
|
|
190
|
+
/**
|
|
191
|
+
* Update experience with feedback
|
|
192
|
+
*/
|
|
193
|
+
async updateExperienceReward(actionId, feedbackReward) {
|
|
194
|
+
// In a real implementation, this would update the stored experience
|
|
195
|
+
// For now, we log the feedback
|
|
196
|
+
console.log(`Updated reward for ${actionId}: ${feedbackReward}`);
|
|
197
|
+
}
|
|
198
|
+
}
|
|
199
|
+
exports.ExperienceRecorder = ExperienceRecorder;
|
|
200
|
+
//# sourceMappingURL=experience-recorder.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"experience-recorder.js","sourceRoot":"","sources":["../../../../src/mcp/learning/core/experience-recorder.ts"],"names":[],"mappings":";AAAA;;GAEG;;;AAUH,+DAAwD;AAExD,MAAa,kBAAkB;IAK7B,YAAY,EAAkB;QAFtB,kBAAa,GAAW,CAAC,CAAC;QAGhC,IAAI,CAAC,EAAE,GAAG,EAAE,CAAC;QACb,IAAI,CAAC,eAAe,GAAG,IAAI,qCAAe,EAAE,CAAC;IAC/C,CAAC;IAED;;OAEG;IACH,KAAK,CAAC,mBAAmB,CACvB,QAAgB,EAChB,IAAS,EACT,MAAW,EACX,OAAyB,EACzB,OAAgB;QAEhB,MAAM,KAAK,GAAG,MAAM,IAAI,CAAC,YAAY,CAAC,OAAO,CAAC,CAAC;QAC/C,MAAM,MAAM,GAAW;YACrB,IAAI,EAAE,QAAQ;YACd,MAAM,EAAE,IAAI;YACZ,SAAS,EAAE,IAAI,CAAC,GAAG,EAAE;SACtB,CAAC;QAEF,MAAM,MAAM,GAAG,MAAM,IAAI,CAAC,eAAe,CAAC,eAAe,CAAC,OAAO,EAAE,OAAO,CAAC,CAAC;QAC5E,MAAM,SAAS,GAAG,MAAM,IAAI,CAAC,YAAY,CAAC;YACxC,GAAG,OAAO;YACV,UAAU,EAAE,OAAO,CAAC,OAAO,IAAI,CAAC,CAAC,OAAO,CAAC,KAAK;SAC/C,CAAC,CAAC;QAEH,MAAM,UAAU,GAAe;YAC7B,KAAK;YACL,MAAM;YACN,MAAM,EAAE,MAAM,CAAC,QAAQ;YACvB,SAAS;YACT,IAAI,EAAE,OAAO,CAAC,UAAU;YACxB,SAAS,EAAE,IAAI,CAAC,GAAG,EAAE;YACrB,QAAQ,EAAE;gBACR,MAAM,EAAE,OAAO,CAAC,MAAM;gBACtB,SAAS,EAAE,OAAO,CAAC,SAAS;gBAC5B,QAAQ,EAAE,OAAO,CAAC,QAAQ;gBAC1B,QAAQ,EAAE,UAAU,IAAI,CAAC,aAAa,EAAE,EAAE;gBAC1C,eAAe,EAAE,MAAM;gBACvB,OAAO,EAAE;oBACP,OAAO,EAAE,OAAO,CAAC,OAAO;oBACxB,aAAa,EAAE,OAAO,CAAC,aAAa;oBACpC,UAAU,EAAE,OAAO,CAAC,UAAU;iBAC/B;aACF;SACF,CAAC;QAEF,sCAAsC;QACtC,MAAM,IAAI,CAAC,eAAe,CAAC,UAAU,CAAC,CAAC;QAEvC,OAAO,UAAU,CAAC;IACpB,CAAC;IAED;;OAEG;IACK,KAAK,CAAC,YAAY,CAAC,OAAyB;QAClD,MAAM,KAAK,GAAU;YACnB,eAAe,EAAE,OAAO,CAAC,QAAQ,EAAE,eAAe,IAAI,EAAE;YACxD,cAAc,EAAE,OAAO,CAAC,QAAQ,EAAE,cAAc,IAAI,EAAE;YACtD,eAAe,EAAE,OAAO,CAAC,QAAQ,EAAE,eAAe,IAAI,EAAE;YACxD,WAAW,EAAE,OAAO,CAAC,QAAQ,EAAE,WAAW;YAC1C,OAAO,EAAE;gBACP,SAAS,EAAE,OAAO,CAAC,SAAS;gBAC5B,QAAQ,EAAE,OAAO,CAAC,QAAQ;gBAC1B,SAAS,EAAE,OAAO,CAAC,SAAS;aAC7B;SACF,CAAC;QAEF,iDAAiD;QACjD,KAAK,CAAC,SAAS,GAAG,MAAM,IAAI,CAAC,sBAAsB,CAAC,KAAK,CAAC,CAAC;QAE3D,OAAO,KAAK,CAAC;IACf,CAAC;IAED;;OAEG;IACK,KAAK,CAAC,sBAAsB,CAAC,KAAY;QAC/C,4EAA4E;QAC5E,MAAM,IAAI,GAAG,IAAI,CAAC,SAAS,CAAC;YAC1B,IAAI,EAAE,KAAK,CAAC,eAAe;YAC3B,KAAK,EAAE,KAAK,CAAC,cAAc;YAC3B,IAAI,EAAE,KAAK,CAAC,OAAO,EAAE,QAAQ;SAC9B,CAAC,CAAC;QAEH,wDAAwD;QACxD,MAAM,SAAS,GAAG,IAAI,YAAY,CAAC,GAAG,CAAC,CAAC;QACxC,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,IAAI,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE,CAAC;YACrC,MAAM,KAAK,GAAG,IAAI,CAAC,UAAU,CAAC,CAAC,CAAC,GAAG,GAAG,CAAC;YACvC,SAAS,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC;QACxB,CAAC;QAED,YAAY;QACZ,MAAM,SAAS,GAAG,IAAI,CAAC,IAAI,CACzB,SAAS,CAAC,MAAM,CAAC,CAAC,GAAG,EAAE,GAAG,EAAE,EAAE,CAAC,GAAG,GAAG,GAAG,GAAG,GAAG,EAAE,CAAC,CAAC,CACnD,CAAC;QACF,IAAI,SAAS,GAAG,CAAC,EAAE,CAAC;YAClB,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,SAAS,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE,CAAC;gBAC1C,SAAS,CAAC,CAAC,CAAC,IAAI,SAAS,CAAC;YAC5B,CAAC;QACH,CAAC;QAED,OAAO,SAAS,CAAC;IACnB,CAAC;IAED;;OAEG;IACK,KAAK,CAAC,eAAe,CAAC,UAAsB;QAClD,IAAI,CAAC,UAAU,CAAC,KAAK,CAAC,SAAS,EAAE,CAAC;YAChC,MAAM,IAAI,KAAK,CAAC,6BAA6B,CAAC,CAAC;QACjD,CAAC;QAED,MAAM,IAAI,CAAC,EAAE,CAAC,MAAM,CAAC;YACnB,SAAS,EAAE,KAAK,CAAC,IAAI,CAAC,UAAU,CAAC,KAAK,CAAC,SAAS,CAAC;YACjD,QAAQ,EAAE;gBACR,IAAI,EAAE,qBAAqB;gBAC3B,SAAS,EAAE,UAAU,CAAC,QAAQ,CAAC,SAAS;gBACxC,MAAM,EAAE,UAAU,CAAC,QAAQ,CAAC,MAAM;gBAClC,QAAQ,EAAE,UAAU,CAAC,QAAQ,CAAC,QAAQ;gBACtC,QAAQ,EAAE,UAAU,CAAC,QAAQ,CAAC,QAAQ;gBACtC,MAAM,EAAE,UAAU,CAAC,MAAM;gBACzB,MAAM,EAAE,UAAU,CAAC,MAAM;gBACzB,IAAI,EAAE,UAAU,CAAC,IAAI;gBACrB,SAAS,EAAE,UAAU,CAAC,SAAS;gBAC/B,KAAK,EAAE;oBACL,eAAe,EAAE,UAAU,CAAC,KAAK,CAAC,eAAe;oBACjD,cAAc,EAAE,UAAU,CAAC,KAAK,CAAC,cAAc;oBAC/C,oBAAoB,EAAE,UAAU,CAAC,KAAK,CAAC,eAAe,CAAC,MAAM;iBAC9D;gBACD,OAAO,EAAE,UAAU,CAAC,QAAQ,CAAC,OAAO;gBACpC,eAAe,EAAE,UAAU,CAAC,QAAQ,CAAC,eAAe;aACrD;SACF,CAAC,CAAC;IACL,CAAC;IAED;;OAEG;IACH,KAAK,CAAC,0BAA0B,CAC9B,KAAY,EACZ,IAAY,EAAE;QAEd,IAAI,CAAC,KAAK,CAAC,SAAS,EAAE,CAAC;YACrB,KAAK,CAAC,SAAS,GAAG,MAAM,IAAI,CAAC,sBAAsB,CAAC,KAAK,CAAC,CAAC;QAC7D,CAAC;QAED,MAAM,OAAO,GAAG,MAAM,IAAI,CAAC,EAAE,CAAC,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,KAAK,CAAC,SAAS,CAAC,EAAE,CAAC,CAAC,CAAC;QAErE,OAAO,OAAO,CAAC,GAAG,CAAC,CAAC,MAAW,EAAE,EAAE,CAAC,CAAC;YACnC,KAAK,EAAE;gBACL,eAAe,EAAE,MAAM,CAAC,QAAQ,CAAC,KAAK,CAAC,eAAe;gBACtD,cAAc,EAAE,MAAM,CAAC,QAAQ,CAAC,KAAK,CAAC,cAAc;gBACpD,eAAe,EAAE,EAAE;gBACnB,SAAS,EAAE,MAAM,CAAC,SAAS;aAC5B;YACD,MAAM,EAAE,MAAM,CAAC,QAAQ,CAAC,MAAM;YAC9B,MAAM,EAAE,MAAM,CAAC,QAAQ,CAAC,MAAM;YAC9B,SAAS,EAAE;gBACT,eAAe,EAAE,EAAE;gBACnB,cAAc,EAAE,EAAE;gBAClB,eAAe,EAAE,EAAE;aACpB;YACD,IAAI,EAAE,MAAM,CAAC,QAAQ,CAAC,IAAI;YAC1B,SAAS,EAAE,MAAM,CAAC,QAAQ,CAAC,SAAS;YACpC,QAAQ,EAAE;gBACR,MAAM,EAAE,MAAM,CAAC,QAAQ,CAAC,MAAM;gBAC9B,SAAS,EAAE,MAAM,CAAC,QAAQ,CAAC,SAAS;gBACpC,QAAQ,EAAE,MAAM,CAAC,QAAQ,CAAC,QAAQ;gBAClC,QAAQ,EAAE,MAAM,CAAC,QAAQ,CAAC,QAAQ;aACnC;SACF,CAAC,CAAC,CAAC;IACN,CAAC;IAED;;OAEG;IACH,KAAK,CAAC,qBAAqB,CAAC,SAAiB;QAC3C,2BAA2B;QAC3B,MAAM,UAAU,GAAG,MAAM,IAAI,CAAC,EAAE,CAAC,MAAM,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,IAAI,CAAC,CAAC;QAElE,uBAAuB;QACvB,MAAM,cAAc,GAAG,UAAU,CAAC,MAAM,CACtC,CAAC,MAAW,EAAE,EAAE,CAAC,MAAM,CAAC,QAAQ,CAAC,SAAS,KAAK,SAAS,CACzD,CAAC;QAEF,OAAO,cAAc,CAAC,GAAG,CAAC,CAAC,MAAW,EAAE,EAAE,CAAC,CAAC;YAC1C,KAAK,EAAE;gBACL,eAAe,EAAE,MAAM,CAAC,QAAQ,CAAC,KAAK,CAAC,eAAe;gBACtD,cAAc,EAAE,MAAM,CAAC,QAAQ,CAAC,KAAK,CAAC,cAAc;gBACpD,eAAe,EAAE,EAAE;gBACnB,SAAS,EAAE,MAAM,CAAC,SAAS;aAC5B;YACD,MAAM,EAAE,MAAM,CAAC,QAAQ,CAAC,MAAM;YAC9B,MAAM,EAAE,MAAM,CAAC,QAAQ,CAAC,MAAM;YAC9B,SAAS,EAAE;gBACT,eAAe,EAAE,EAAE;gBACnB,cAAc,EAAE,EAAE;gBAClB,eAAe,EAAE,EAAE;aACpB;YACD,IAAI,EAAE,MAAM,CAAC,QAAQ,CAAC,IAAI;YAC1B,SAAS,EAAE,MAAM,CAAC,QAAQ,CAAC,SAAS;YACpC,QAAQ,EAAE;gBACR,MAAM,EAAE,MAAM,CAAC,QAAQ,CAAC,MAAM;gBAC9B,SAAS,EAAE,MAAM,CAAC,QAAQ,CAAC,SAAS;gBACpC,QAAQ,EAAE,MAAM,CAAC,QAAQ,CAAC,QAAQ;gBAClC,QAAQ,EAAE,MAAM,CAAC,QAAQ,CAAC,QAAQ;aACnC;SACF,CAAC,CAAC,CAAC;IACN,CAAC;IAED;;OAEG;IACH,KAAK,CAAC,sBAAsB,CAC1B,QAAgB,EAChB,cAAsB;QAEtB,oEAAoE;QACpE,+BAA+B;QAC/B,OAAO,CAAC,GAAG,CAAC,sBAAsB,QAAQ,KAAK,cAAc,EAAE,CAAC,CAAC;IACnE,CAAC;CACF;AAtOD,gDAsOC"}
|