@arcanea/guardian-evolution 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/algorithms/a2c.d.ts +86 -0
- package/dist/algorithms/a2c.d.ts.map +1 -0
- package/dist/algorithms/a2c.js +361 -0
- package/dist/algorithms/a2c.js.map +1 -0
- package/dist/algorithms/curiosity.d.ts +82 -0
- package/dist/algorithms/curiosity.d.ts.map +1 -0
- package/dist/algorithms/curiosity.js +392 -0
- package/dist/algorithms/curiosity.js.map +1 -0
- package/dist/algorithms/decision-transformer.d.ts +82 -0
- package/dist/algorithms/decision-transformer.d.ts.map +1 -0
- package/dist/algorithms/decision-transformer.js +415 -0
- package/dist/algorithms/decision-transformer.js.map +1 -0
- package/dist/algorithms/dqn.d.ts +72 -0
- package/dist/algorithms/dqn.d.ts.map +1 -0
- package/dist/algorithms/dqn.js +303 -0
- package/dist/algorithms/dqn.js.map +1 -0
- package/dist/algorithms/index.d.ts +32 -0
- package/dist/algorithms/index.d.ts.map +1 -0
- package/dist/algorithms/index.js +74 -0
- package/dist/algorithms/index.js.map +1 -0
- package/dist/algorithms/ppo.d.ts +72 -0
- package/dist/algorithms/ppo.d.ts.map +1 -0
- package/dist/algorithms/ppo.js +331 -0
- package/dist/algorithms/ppo.js.map +1 -0
- package/dist/algorithms/q-learning.d.ts +77 -0
- package/dist/algorithms/q-learning.d.ts.map +1 -0
- package/dist/algorithms/q-learning.js +259 -0
- package/dist/algorithms/q-learning.js.map +1 -0
- package/dist/algorithms/sarsa.d.ts +82 -0
- package/dist/algorithms/sarsa.d.ts.map +1 -0
- package/dist/algorithms/sarsa.js +297 -0
- package/dist/algorithms/sarsa.js.map +1 -0
- package/dist/index.d.ts +118 -0
- package/dist/index.d.ts.map +1 -0
- package/dist/index.js +201 -0
- package/dist/index.js.map +1 -0
- package/dist/modes/balanced.d.ts +60 -0
- package/dist/modes/balanced.d.ts.map +1 -0
- package/dist/modes/balanced.js +234 -0
- package/dist/modes/balanced.js.map +1 -0
- package/dist/modes/batch.d.ts +82 -0
- package/dist/modes/batch.d.ts.map +1 -0
- package/dist/modes/batch.js +316 -0
- package/dist/modes/batch.js.map +1 -0
- package/dist/modes/edge.d.ts +85 -0
- package/dist/modes/edge.d.ts.map +1 -0
- package/dist/modes/edge.js +310 -0
- package/dist/modes/edge.js.map +1 -0
- package/dist/modes/index.d.ts +55 -0
- package/dist/modes/index.d.ts.map +1 -0
- package/dist/modes/index.js +83 -0
- package/dist/modes/index.js.map +1 -0
- package/dist/modes/real-time.d.ts +58 -0
- package/dist/modes/real-time.d.ts.map +1 -0
- package/dist/modes/real-time.js +196 -0
- package/dist/modes/real-time.js.map +1 -0
- package/dist/modes/research.d.ts +79 -0
- package/dist/modes/research.d.ts.map +1 -0
- package/dist/modes/research.js +389 -0
- package/dist/modes/research.js.map +1 -0
- package/dist/pattern-learner.d.ts +117 -0
- package/dist/pattern-learner.d.ts.map +1 -0
- package/dist/pattern-learner.js +603 -0
- package/dist/pattern-learner.js.map +1 -0
- package/dist/reasoning-bank.d.ts +259 -0
- package/dist/reasoning-bank.d.ts.map +1 -0
- package/dist/reasoning-bank.js +993 -0
- package/dist/reasoning-bank.js.map +1 -0
- package/dist/reasoningbank-adapter.d.ts +168 -0
- package/dist/reasoningbank-adapter.d.ts.map +1 -0
- package/dist/reasoningbank-adapter.js +463 -0
- package/dist/reasoningbank-adapter.js.map +1 -0
- package/dist/sona-integration.d.ts +168 -0
- package/dist/sona-integration.d.ts.map +1 -0
- package/dist/sona-integration.js +316 -0
- package/dist/sona-integration.js.map +1 -0
- package/dist/sona-manager.d.ts +147 -0
- package/dist/sona-manager.d.ts.map +1 -0
- package/dist/sona-manager.js +695 -0
- package/dist/sona-manager.js.map +1 -0
- package/dist/types.d.ts +431 -0
- package/dist/types.d.ts.map +1 -0
- package/dist/types.js +11 -0
- package/dist/types.js.map +1 -0
- package/package.json +47 -0
|
@@ -0,0 +1,303 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Deep Q-Network (DQN)
|
|
3
|
+
*
|
|
4
|
+
* Implements DQN with enhancements:
|
|
5
|
+
* - Experience replay
|
|
6
|
+
* - Target network
|
|
7
|
+
* - Double DQN (optional)
|
|
8
|
+
* - Dueling architecture (optional)
|
|
9
|
+
* - Epsilon-greedy exploration
|
|
10
|
+
*
|
|
11
|
+
* Performance Target: <10ms per update step
|
|
12
|
+
*/
|
|
13
|
+
/**
|
|
14
|
+
* Default DQN configuration
|
|
15
|
+
*/
|
|
16
|
+
export const DEFAULT_DQN_CONFIG = {
|
|
17
|
+
algorithm: 'dqn',
|
|
18
|
+
learningRate: 0.0001,
|
|
19
|
+
gamma: 0.99,
|
|
20
|
+
entropyCoef: 0,
|
|
21
|
+
valueLossCoef: 1,
|
|
22
|
+
maxGradNorm: 10,
|
|
23
|
+
epochs: 1,
|
|
24
|
+
miniBatchSize: 32,
|
|
25
|
+
bufferSize: 10000,
|
|
26
|
+
explorationInitial: 1.0,
|
|
27
|
+
explorationFinal: 0.01,
|
|
28
|
+
explorationDecay: 10000,
|
|
29
|
+
targetUpdateFreq: 100,
|
|
30
|
+
doubleDQN: true,
|
|
31
|
+
duelingNetwork: false,
|
|
32
|
+
};
|
|
33
|
+
/**
|
|
34
|
+
* DQN Algorithm Implementation
|
|
35
|
+
*/
|
|
36
|
+
export class DQNAlgorithm {
|
|
37
|
+
config;
|
|
38
|
+
// Q-network weights
|
|
39
|
+
qWeights;
|
|
40
|
+
targetWeights;
|
|
41
|
+
// Optimizer state
|
|
42
|
+
qMomentum;
|
|
43
|
+
// Replay buffer (circular)
|
|
44
|
+
buffer = [];
|
|
45
|
+
bufferIdx = 0;
|
|
46
|
+
// Exploration
|
|
47
|
+
epsilon;
|
|
48
|
+
stepCount = 0;
|
|
49
|
+
// Number of actions
|
|
50
|
+
numActions = 4;
|
|
51
|
+
inputDim = 768;
|
|
52
|
+
// Statistics
|
|
53
|
+
updateCount = 0;
|
|
54
|
+
avgLoss = 0;
|
|
55
|
+
constructor(config = {}) {
|
|
56
|
+
this.config = { ...DEFAULT_DQN_CONFIG, ...config };
|
|
57
|
+
this.epsilon = this.config.explorationInitial;
|
|
58
|
+
// Initialize Q-network (2 hidden layers)
|
|
59
|
+
this.qWeights = this.initializeNetwork();
|
|
60
|
+
this.targetWeights = this.copyNetwork(this.qWeights);
|
|
61
|
+
this.qMomentum = this.qWeights.map(w => new Float32Array(w.length));
|
|
62
|
+
}
|
|
63
|
+
/**
|
|
64
|
+
* Add experience from trajectory
|
|
65
|
+
*/
|
|
66
|
+
addExperience(trajectory) {
|
|
67
|
+
for (let i = 0; i < trajectory.steps.length; i++) {
|
|
68
|
+
const step = trajectory.steps[i];
|
|
69
|
+
const nextStep = i < trajectory.steps.length - 1
|
|
70
|
+
? trajectory.steps[i + 1]
|
|
71
|
+
: null;
|
|
72
|
+
const experience = {
|
|
73
|
+
state: step.stateBefore,
|
|
74
|
+
action: this.hashAction(step.action),
|
|
75
|
+
reward: step.reward,
|
|
76
|
+
nextState: step.stateAfter,
|
|
77
|
+
done: nextStep === null,
|
|
78
|
+
};
|
|
79
|
+
// Add to circular buffer
|
|
80
|
+
if (this.buffer.length < this.config.bufferSize) {
|
|
81
|
+
this.buffer.push(experience);
|
|
82
|
+
}
|
|
83
|
+
else {
|
|
84
|
+
this.buffer[this.bufferIdx] = experience;
|
|
85
|
+
}
|
|
86
|
+
this.bufferIdx = (this.bufferIdx + 1) % this.config.bufferSize;
|
|
87
|
+
}
|
|
88
|
+
}
|
|
89
|
+
/**
|
|
90
|
+
* Perform DQN update
|
|
91
|
+
* Target: <10ms
|
|
92
|
+
*/
|
|
93
|
+
update() {
|
|
94
|
+
const startTime = performance.now();
|
|
95
|
+
if (this.buffer.length < this.config.miniBatchSize) {
|
|
96
|
+
return { loss: 0, epsilon: this.epsilon };
|
|
97
|
+
}
|
|
98
|
+
// Sample mini-batch
|
|
99
|
+
const batch = this.sampleBatch();
|
|
100
|
+
// Compute TD targets
|
|
101
|
+
let totalLoss = 0;
|
|
102
|
+
const gradients = this.qWeights.map(w => new Float32Array(w.length));
|
|
103
|
+
for (const exp of batch) {
|
|
104
|
+
// Current Q-values
|
|
105
|
+
const qValues = this.forward(exp.state, this.qWeights);
|
|
106
|
+
const currentQ = qValues[exp.action];
|
|
107
|
+
// Target Q-value
|
|
108
|
+
let targetQ;
|
|
109
|
+
if (exp.done) {
|
|
110
|
+
targetQ = exp.reward;
|
|
111
|
+
}
|
|
112
|
+
else {
|
|
113
|
+
if (this.config.doubleDQN) {
|
|
114
|
+
// Double DQN: use online network to select action, target to evaluate
|
|
115
|
+
const nextQOnline = this.forward(exp.nextState, this.qWeights);
|
|
116
|
+
const bestAction = this.argmax(nextQOnline);
|
|
117
|
+
const nextQTarget = this.forward(exp.nextState, this.targetWeights);
|
|
118
|
+
targetQ = exp.reward + this.config.gamma * nextQTarget[bestAction];
|
|
119
|
+
}
|
|
120
|
+
else {
|
|
121
|
+
// Standard DQN
|
|
122
|
+
const nextQ = this.forward(exp.nextState, this.targetWeights);
|
|
123
|
+
targetQ = exp.reward + this.config.gamma * Math.max(...nextQ);
|
|
124
|
+
}
|
|
125
|
+
}
|
|
126
|
+
// TD error
|
|
127
|
+
const tdError = targetQ - currentQ;
|
|
128
|
+
const loss = tdError * tdError;
|
|
129
|
+
totalLoss += loss;
|
|
130
|
+
// Accumulate gradients
|
|
131
|
+
this.accumulateGradients(gradients, exp.state, exp.action, tdError);
|
|
132
|
+
}
|
|
133
|
+
// Apply gradients
|
|
134
|
+
this.applyGradients(gradients, batch.length);
|
|
135
|
+
// Update target network periodically
|
|
136
|
+
this.stepCount++;
|
|
137
|
+
if (this.stepCount % this.config.targetUpdateFreq === 0) {
|
|
138
|
+
this.targetWeights = this.copyNetwork(this.qWeights);
|
|
139
|
+
}
|
|
140
|
+
// Decay exploration
|
|
141
|
+
this.epsilon = Math.max(this.config.explorationFinal, this.config.explorationInitial - this.stepCount / this.config.explorationDecay);
|
|
142
|
+
this.updateCount++;
|
|
143
|
+
this.avgLoss = totalLoss / batch.length;
|
|
144
|
+
const elapsed = performance.now() - startTime;
|
|
145
|
+
if (elapsed > 10) {
|
|
146
|
+
console.warn(`DQN update exceeded target: ${elapsed.toFixed(2)}ms > 10ms`);
|
|
147
|
+
}
|
|
148
|
+
return {
|
|
149
|
+
loss: this.avgLoss,
|
|
150
|
+
epsilon: this.epsilon,
|
|
151
|
+
};
|
|
152
|
+
}
|
|
153
|
+
/**
|
|
154
|
+
* Get action using epsilon-greedy
|
|
155
|
+
*/
|
|
156
|
+
getAction(state, explore = true) {
|
|
157
|
+
if (explore && Math.random() < this.epsilon) {
|
|
158
|
+
return Math.floor(Math.random() * this.numActions);
|
|
159
|
+
}
|
|
160
|
+
const qValues = this.forward(state, this.qWeights);
|
|
161
|
+
return this.argmax(qValues);
|
|
162
|
+
}
|
|
163
|
+
/**
|
|
164
|
+
* Get Q-values for a state
|
|
165
|
+
*/
|
|
166
|
+
getQValues(state) {
|
|
167
|
+
return this.forward(state, this.qWeights);
|
|
168
|
+
}
|
|
169
|
+
/**
|
|
170
|
+
* Get statistics
|
|
171
|
+
*/
|
|
172
|
+
getStats() {
|
|
173
|
+
return {
|
|
174
|
+
updateCount: this.updateCount,
|
|
175
|
+
bufferSize: this.buffer.length,
|
|
176
|
+
epsilon: this.epsilon,
|
|
177
|
+
avgLoss: this.avgLoss,
|
|
178
|
+
stepCount: this.stepCount,
|
|
179
|
+
};
|
|
180
|
+
}
|
|
181
|
+
// ==========================================================================
|
|
182
|
+
// Private Methods
|
|
183
|
+
// ==========================================================================
|
|
184
|
+
initializeNetwork() {
|
|
185
|
+
// Simple 2-layer network: input -> hidden -> output
|
|
186
|
+
const hiddenDim = 64;
|
|
187
|
+
const weights = [];
|
|
188
|
+
// Layer 1: input_dim -> hidden
|
|
189
|
+
const w1 = new Float32Array(this.inputDim * hiddenDim);
|
|
190
|
+
const scale1 = Math.sqrt(2 / this.inputDim);
|
|
191
|
+
for (let i = 0; i < w1.length; i++) {
|
|
192
|
+
w1[i] = (Math.random() - 0.5) * scale1;
|
|
193
|
+
}
|
|
194
|
+
weights.push(w1);
|
|
195
|
+
// Layer 2: hidden -> num_actions
|
|
196
|
+
const w2 = new Float32Array(hiddenDim * this.numActions);
|
|
197
|
+
const scale2 = Math.sqrt(2 / hiddenDim);
|
|
198
|
+
for (let i = 0; i < w2.length; i++) {
|
|
199
|
+
w2[i] = (Math.random() - 0.5) * scale2;
|
|
200
|
+
}
|
|
201
|
+
weights.push(w2);
|
|
202
|
+
return weights;
|
|
203
|
+
}
|
|
204
|
+
copyNetwork(weights) {
|
|
205
|
+
return weights.map(w => new Float32Array(w));
|
|
206
|
+
}
|
|
207
|
+
forward(state, weights) {
|
|
208
|
+
const hiddenDim = 64;
|
|
209
|
+
// Layer 1: ReLU(W1 * x)
|
|
210
|
+
const hidden = new Float32Array(hiddenDim);
|
|
211
|
+
for (let h = 0; h < hiddenDim; h++) {
|
|
212
|
+
let sum = 0;
|
|
213
|
+
for (let i = 0; i < Math.min(state.length, this.inputDim); i++) {
|
|
214
|
+
sum += state[i] * weights[0][i * hiddenDim + h];
|
|
215
|
+
}
|
|
216
|
+
hidden[h] = Math.max(0, sum); // ReLU
|
|
217
|
+
}
|
|
218
|
+
// Layer 2: W2 * hidden (no activation for Q-values)
|
|
219
|
+
const output = new Float32Array(this.numActions);
|
|
220
|
+
for (let a = 0; a < this.numActions; a++) {
|
|
221
|
+
let sum = 0;
|
|
222
|
+
for (let h = 0; h < hiddenDim; h++) {
|
|
223
|
+
sum += hidden[h] * weights[1][h * this.numActions + a];
|
|
224
|
+
}
|
|
225
|
+
output[a] = sum;
|
|
226
|
+
}
|
|
227
|
+
return output;
|
|
228
|
+
}
|
|
229
|
+
accumulateGradients(gradients, state, action, tdError) {
|
|
230
|
+
const hiddenDim = 64;
|
|
231
|
+
// Forward pass to get hidden activations
|
|
232
|
+
const hidden = new Float32Array(hiddenDim);
|
|
233
|
+
for (let h = 0; h < hiddenDim; h++) {
|
|
234
|
+
let sum = 0;
|
|
235
|
+
for (let i = 0; i < Math.min(state.length, this.inputDim); i++) {
|
|
236
|
+
sum += state[i] * this.qWeights[0][i * hiddenDim + h];
|
|
237
|
+
}
|
|
238
|
+
hidden[h] = Math.max(0, sum);
|
|
239
|
+
}
|
|
240
|
+
// Gradient for layer 2 (only for selected action)
|
|
241
|
+
for (let h = 0; h < hiddenDim; h++) {
|
|
242
|
+
gradients[1][h * this.numActions + action] += hidden[h] * tdError;
|
|
243
|
+
}
|
|
244
|
+
// Gradient for layer 1 (backprop through ReLU)
|
|
245
|
+
for (let h = 0; h < hiddenDim; h++) {
|
|
246
|
+
if (hidden[h] > 0) { // ReLU gradient
|
|
247
|
+
const grad = tdError * this.qWeights[1][h * this.numActions + action];
|
|
248
|
+
for (let i = 0; i < Math.min(state.length, this.inputDim); i++) {
|
|
249
|
+
gradients[0][i * hiddenDim + h] += state[i] * grad;
|
|
250
|
+
}
|
|
251
|
+
}
|
|
252
|
+
}
|
|
253
|
+
}
|
|
254
|
+
applyGradients(gradients, batchSize) {
|
|
255
|
+
const lr = this.config.learningRate / batchSize;
|
|
256
|
+
const beta = 0.9;
|
|
257
|
+
for (let layer = 0; layer < gradients.length; layer++) {
|
|
258
|
+
for (let i = 0; i < gradients[layer].length; i++) {
|
|
259
|
+
// Gradient clipping
|
|
260
|
+
const grad = Math.max(Math.min(gradients[layer][i], this.config.maxGradNorm), -this.config.maxGradNorm);
|
|
261
|
+
// Momentum update
|
|
262
|
+
this.qMomentum[layer][i] = beta * this.qMomentum[layer][i] + (1 - beta) * grad;
|
|
263
|
+
this.qWeights[layer][i] += lr * this.qMomentum[layer][i];
|
|
264
|
+
}
|
|
265
|
+
}
|
|
266
|
+
}
|
|
267
|
+
sampleBatch() {
|
|
268
|
+
const batch = [];
|
|
269
|
+
const indices = new Set();
|
|
270
|
+
while (indices.size < this.config.miniBatchSize && indices.size < this.buffer.length) {
|
|
271
|
+
indices.add(Math.floor(Math.random() * this.buffer.length));
|
|
272
|
+
}
|
|
273
|
+
for (const idx of indices) {
|
|
274
|
+
batch.push(this.buffer[idx]);
|
|
275
|
+
}
|
|
276
|
+
return batch;
|
|
277
|
+
}
|
|
278
|
+
hashAction(action) {
|
|
279
|
+
let hash = 0;
|
|
280
|
+
for (let i = 0; i < action.length; i++) {
|
|
281
|
+
hash = (hash * 31 + action.charCodeAt(i)) % this.numActions;
|
|
282
|
+
}
|
|
283
|
+
return hash;
|
|
284
|
+
}
|
|
285
|
+
argmax(values) {
|
|
286
|
+
let maxIdx = 0;
|
|
287
|
+
let maxVal = values[0];
|
|
288
|
+
for (let i = 1; i < values.length; i++) {
|
|
289
|
+
if (values[i] > maxVal) {
|
|
290
|
+
maxVal = values[i];
|
|
291
|
+
maxIdx = i;
|
|
292
|
+
}
|
|
293
|
+
}
|
|
294
|
+
return maxIdx;
|
|
295
|
+
}
|
|
296
|
+
}
|
|
297
|
+
/**
|
|
298
|
+
* Factory function
|
|
299
|
+
*/
|
|
300
|
+
export function createDQN(config) {
|
|
301
|
+
return new DQNAlgorithm(config);
|
|
302
|
+
}
|
|
303
|
+
//# sourceMappingURL=dqn.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"dqn.js","sourceRoot":"","sources":["../../src/algorithms/dqn.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;GAWG;AAQH;;GAEG;AACH,MAAM,CAAC,MAAM,kBAAkB,GAAc;IAC3C,SAAS,EAAE,KAAK;IAChB,YAAY,EAAE,MAAM;IACpB,KAAK,EAAE,IAAI;IACX,WAAW,EAAE,CAAC;IACd,aAAa,EAAE,CAAC;IAChB,WAAW,EAAE,EAAE;IACf,MAAM,EAAE,CAAC;IACT,aAAa,EAAE,EAAE;IACjB,UAAU,EAAE,KAAK;IACjB,kBAAkB,EAAE,GAAG;IACvB,gBAAgB,EAAE,IAAI;IACtB,gBAAgB,EAAE,KAAK;IACvB,gBAAgB,EAAE,GAAG;IACrB,SAAS,EAAE,IAAI;IACf,cAAc,EAAE,KAAK;CACtB,CAAC;AAaF;;GAEG;AACH,MAAM,OAAO,YAAY;IACf,MAAM,CAAY;IAE1B,oBAAoB;IACZ,QAAQ,CAAiB;IACzB,aAAa,CAAiB;IAEtC,kBAAkB;IACV,SAAS,CAAiB;IAElC,2BAA2B;IACnB,MAAM,GAAoB,EAAE,CAAC;IAC7B,SAAS,GAAG,CAAC,CAAC;IAEtB,cAAc;IACN,OAAO,CAAS;IAChB,SAAS,GAAG,CAAC,CAAC;IAEtB,oBAAoB;IACZ,UAAU,GAAG,CAAC,CAAC;IACf,QAAQ,GAAG,GAAG,CAAC;IAEvB,aAAa;IACL,WAAW,GAAG,CAAC,CAAC;IAChB,OAAO,GAAG,CAAC,CAAC;IAEpB,YAAY,SAA6B,EAAE;QACzC,IAAI,CAAC,MAAM,GAAG,EAAE,GAAG,kBAAkB,EAAE,GAAG,MAAM,EAAE,CAAC;QACnD,IAAI,CAAC,OAAO,GAAG,IAAI,CAAC,MAAM,CAAC,kBAAkB,CAAC;QAE9C,yCAAyC;QACzC,IAAI,CAAC,QAAQ,GAAG,IAAI,CAAC,iBAAiB,EAAE,CAAC;QACzC,IAAI,CAAC,aAAa,GAAG,IAAI,CAAC,WAAW,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC;QACrD,IAAI,CAAC,SAAS,GAAG,IAAI,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,IAAI,YAAY,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC;IACtE,CAAC;IAED;;OAEG;IACH,aAAa,CAAC,UAAsB;QAClC,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,UAAU,CAAC,KAAK,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE,CAAC;YACjD,MAAM,IAAI,GAAG,UAAU,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC;YACjC,MAAM,QAAQ,GAAG,CAAC,GAAG,UAAU,CAAC,KAAK,CAAC,MAAM,GAAG,CAAC;gBAC9C,CAAC,CAAC,UAAU,CAAC,KAAK,CAAC,CAAC,GAAG,CAAC,CAAC;gBACzB,CAAC,CAAC,IAAI,CAAC;YAET,MAAM,UAAU,GAAkB;gBAChC,KAAK,EAAE,IAAI,CAAC,WAAW;gBACvB,MAAM,EAAE,IAAI,CAAC,UAAU,CAAC,IAAI,CAAC,MAAM,CAAC;gBACpC,MAAM,EAAE,IAAI,CAAC,MAAM;gBACnB,SAAS,EAAE,IAAI,CAAC,UAAU;gBAC1B,IAAI,EAAE,QAAQ,KAAK,IAAI;aACxB,CAAC;YAEF,yBAAyB;YACzB,IAAI,IAAI,CAAC,MAAM,CAAC,MAAM,GAAG,IAAI,CAAC,MAAM,CAAC,UAAU,EAAE,CAAC;gBAChD,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,UAAU,CAAC,CAAC;YAC/B,CAAC;iBAAM,CAAC;gBACN,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,SAAS,CAAC,GAAG,UAAU,CAAC;YAC3C,CAAC;YACD,IAAI,CAAC,SAAS,GAAG,CAAC,IAAI,CAAC,SAAS,GAAG,CAAC,CAAC,GAAG,IAAI,CAAC,MAAM,CAAC,UAAU,CAAC;QACjE,CAAC;IACH,CAAC;IAED;;;OAGG;IACH,MAAM;QACJ,MAAM,SAAS,GAAG,WAAW,CAAC,GAAG,EAAE,CAAC;QAEpC,IAAI,IAAI,CAAC,MAAM,CAAC,MAAM,GAAG,IAAI,CAAC,MAAM,CAAC,aAAa,EAAE,CAAC;YACnD,OAAO,EAAE,IAAI,EAAE,CAAC,EAAE,OAAO,EAAE,IAAI,CAAC,OAAO,EAAE,CAAC;QAC5C,CAAC;QAED,oBAAoB;QACpB,MAAM,KAAK,GAAG,IAAI,CAAC,WAAW,EAAE,CAAC;QAEjC,qBAAqB;QACrB,IAAI,SAAS,GAAG,CAAC,CAAC;QAClB,MAAM,SAAS,GAAG,IAAI,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,IAAI,YAAY,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC;QAErE,KAAK,MAAM,GAAG,IAAI,KAAK,EAAE,CAAC;YACxB,mBAAmB;YACnB,MAAM,OAAO,GAAG,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,KAAK,EAAE,IAAI,CAAC,QAAQ,CAAC,CAAC;YACvD,MAAM,QAAQ,GAAG,OAAO,CAAC,GAAG,CAAC,MAAM,CAAC,CAAC;YAErC,iBAAiB;YACjB,IAAI,OAAe,CAAC;YACpB,IAAI,GAAG,CAAC,IAAI,EAAE,CAAC;gBACb,OAAO,GAAG,GAAG,CAAC,MAAM,CAAC;YACvB,CAAC;iBAAM,CAAC;gBACN,IAAI,IAAI,CAAC,MAAM,CAAC,SAAS,EAAE,CAAC;oBAC1B,sEAAsE;oBACtE,MAAM,WAAW,GAAG,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,SAAS,EAAE,IAAI,CAAC,QAAQ,CAAC,CAAC;oBAC/D,MAAM,UAAU,GAAG,IAAI,CAAC,MAAM,CAAC,WAAW,CAAC,CAAC;oBAC5C,MAAM,WAAW,GAAG,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,SAAS,EAAE,IAAI,CAAC,aAAa,CAAC,CAAC;oBACpE,OAAO,GAAG,GAAG,CAAC,MAAM,GAAG,IAAI,CAAC,MAAM,CAAC,KAAK,GAAG,WAAW,CAAC,UAAU,CAAC,CAAC;gBACrE,CAAC;qBAAM,CAAC;oBACN,eAAe;oBACf,MAAM,KAAK,GAAG,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,SAAS,EAAE,IAAI,CAAC,aAAa,CAAC,CAAC;oBAC9D,OAAO,GAAG,GAAG,CAAC,MAAM,GAAG,IAAI,CAAC,MAAM,CAAC,KAAK,GAAG,IAAI,CAAC,GAAG,CAAC,GAAG,KAAK,CAAC,CAAC;gBAChE,CAAC;YACH,CAAC;YAED,WAAW;YACX,MAAM,OAAO,GAAG,OAAO,GAAG,QAAQ,CAAC;YACnC,MAAM,IAAI,GAAG,OAAO,GAAG,OAAO,CAAC;YAC/B,SAAS,IAAI,IAAI,CAAC;YAElB,uBAAuB;YACvB,IAAI,CAAC,mBAAmB,CAAC,SAAS,EAAE,GAAG,CAAC,KAAK,EAAE,GAAG,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;QACtE,CAAC;QAED,kBAAkB;QAClB,IAAI,CAAC,cAAc,CAAC,SAAS,EAAE,KAAK,CAAC,MAAM,CAAC,CAAC;QAE7C,qCAAqC;QACrC,IAAI,CAAC,SAAS,EAAE,CAAC;QACjB,IAAI,IAAI,CAAC,SAAS,GAAG,IAAI,CAAC,MAAM,CAAC,gBAAgB,KAAK,CAAC,EAAE,CAAC;YACxD,IAAI,CAAC,aAAa,GAAG,IAAI,CAAC,WAAW,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC;QACvD,CAAC;QAED,oBAAoB;QACpB,IAAI,CAAC,OAAO,GAAG,IAAI,CAAC,GAAG,CACrB,IAAI,CAAC,MAAM,CAAC,gBAAgB,EAC5B,IAAI,CAAC,MAAM,CAAC,kBAAkB,GAAG,IAAI,CAAC,SAAS,GAAG,IAAI,CAAC,MAAM,CAAC,gBAAgB,CAC/E,CAAC;QAEF,IAAI,CAAC,WAAW,EAAE,CAAC;QACnB,IAAI,CAAC,OAAO,GAAG,SAAS,GAAG,KAAK,CAAC,MAAM,CAAC;QAExC,MAAM,OAAO,GAAG,WAAW,CAAC,GAAG,EAAE,GAAG,SAAS,CAAC;QAC9C,IAAI,OAAO,GAAG,EAAE,EAAE,CAAC;YACjB,OAAO,CAAC,IAAI,CAAC,+BAA+B,OAAO,CAAC,OAAO,CAAC,CAAC,CAAC,WAAW,CAAC,CAAC;QAC7E,CAAC;QAED,OAAO;YACL,IAAI,EAAE,IAAI,CAAC,OAAO;YAClB,OAAO,EAAE,IAAI,CAAC,OAAO;SACtB,CAAC;IACJ,CAAC;IAED;;OAEG;IACH,SAAS,CAAC,KAAmB,EAAE,UAAmB,IAAI;QACpD,IAAI,OAAO,IAAI,IAAI,CAAC,MAAM,EAAE,GAAG,IAAI,CAAC,OAAO,EAAE,CAAC;YAC5C,OAAO,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,MAAM,EAAE,GAAG,IAAI,CAAC,UAAU,CAAC,CAAC;QACrD,CAAC;QAED,MAAM,OAAO,GAAG,IAAI,CAAC,OAAO,CAAC,KAAK,EAAE,IAAI,CAAC,QAAQ,CAAC,CAAC;QACnD,OAAO,IAAI,CAAC,MAAM,CAAC,OAAO,CAAC,CAAC;IAC9B,CAAC;IAED;;OAEG;IACH,UAAU,CAAC,KAAmB;QAC5B,OAAO,IAAI,CAAC,OAAO,CAAC,KAAK,EAAE,IAAI,CAAC,QAAQ,CAAC,CAAC;IAC5C,CAAC;IAED;;OAEG;IACH,QAAQ;QACN,OAAO;YACL,WAAW,EAAE,IAAI,CAAC,WAAW;YAC7B,UAAU,EAAE,IAAI,CAAC,MAAM,CAAC,MAAM;YAC9B,OAAO,EAAE,IAAI,CAAC,OAAO;YACrB,OAAO,EAAE,IAAI,CAAC,OAAO;YACrB,SAAS,EAAE,IAAI,CAAC,SAAS;SAC1B,CAAC;IACJ,CAAC;IAED,6EAA6E;IAC7E,kBAAkB;IAClB,6EAA6E;IAErE,iBAAiB;QACvB,oDAAoD;QACpD,MAAM,SAAS,GAAG,EAAE,CAAC;QACrB,MAAM,OAAO,GAAmB,EAAE,CAAC;QAEnC,+BAA+B;QAC/B,MAAM,EAAE,GAAG,IAAI,YAAY,CAAC,IAAI,CAAC,QAAQ,GAAG,SAAS,CAAC,CAAC;QACvD,MAAM,MAAM,GAAG,IAAI,CAAC,IAAI,CAAC,CAAC,GAAG,IAAI,CAAC,QAAQ,CAAC,CAAC;QAC5C,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,EAAE,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE,CAAC;YACnC,EAAE,CAAC,CAAC,CAAC,GAAG,CAAC,IAAI,CAAC,MAAM,EAAE,GAAG,GAAG,CAAC,GAAG,MAAM,CAAC;QACzC,CAAC;QACD,OAAO,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC;QAEjB,iCAAiC;QACjC,MAAM,EAAE,GAAG,IAAI,YAAY,CAAC,SAAS,GAAG,IAAI,CAAC,UAAU,CAAC,CAAC;QACzD,MAAM,MAAM,GAAG,IAAI,CAAC,IAAI,CAAC,CAAC,GAAG,SAAS,CAAC,CAAC;QACxC,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,EAAE,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE,CAAC;YACnC,EAAE,CAAC,CAAC,CAAC,GAAG,CAAC,IAAI,CAAC,MAAM,EAAE,GAAG,GAAG,CAAC,GAAG,MAAM,CAAC;QACzC,CAAC;QACD,OAAO,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC;QAEjB,OAAO,OAAO,CAAC;IACjB,CAAC;IAEO,WAAW,CAAC,OAAuB;QACzC,OAAO,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,IAAI,YAAY,CAAC,CAAC,CAAC,CAAC,CAAC;IAC/C,CAAC;IAEO,OAAO,CAAC,KAAmB,EAAE,OAAuB;QAC1D,MAAM,SAAS,GAAG,EAAE,CAAC;QAErB,wBAAwB;QACxB,MAAM,MAAM,GAAG,IAAI,YAAY,CAAC,SAAS,CAAC,CAAC;QAC3C,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,SAAS,EAAE,CAAC,EAAE,EAAE,CAAC;YACnC,IAAI,GAAG,GAAG,CAAC,CAAC;YACZ,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,IAAI,CAAC,GAAG,CAAC,KAAK,CAAC,MAAM,EAAE,IAAI,CAAC,QAAQ,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC;gBAC/D,GAAG,IAAI,KAAK,CAAC,CAAC,CAAC,GAAG,OAAO,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,SAAS,GAAG,CAAC,CAAC,CAAC;YAClD,CAAC;YACD,MAAM,CAAC,CAAC,CAAC,GAAG,IAAI,CAAC,GAAG,CAAC,CAAC,EAAE,GAAG,CAAC,CAAC,CAAC,OAAO;QACvC,CAAC;QAED,oDAAoD;QACpD,MAAM,MAAM,GAAG,IAAI,YAAY,CAAC,IAAI,CAAC,UAAU,CAAC,CAAC;QACjD,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,IAAI,CAAC,UAAU,EAAE,CAAC,EAAE,EAAE,CAAC;YACzC,IAAI,GAAG,GAAG,CAAC,CAAC;YACZ,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,SAAS,EAAE,CAAC,EAAE,EAAE,CAAC;gBACnC,GAAG,IAAI,MAAM,CAAC,CAAC,CAAC,GAAG,OAAO,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,IAAI,CAAC,UAAU,GAAG,CAAC,CAAC,CAAC;YACzD,CAAC;YACD,MAAM,CAAC,CAAC,CAAC,GAAG,GAAG,CAAC;QAClB,CAAC;QAED,OAAO,MAAM,CAAC;IAChB,CAAC;IAEO,mBAAmB,CACzB,SAAyB,EACzB,KAAmB,EACnB,MAAc,EACd,OAAe;QAEf,MAAM,SAAS,GAAG,EAAE,CAAC;QAErB,yCAAyC;QACzC,MAAM,MAAM,GAAG,IAAI,YAAY,CAAC,SAAS,CAAC,CAAC;QAC3C,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,SAAS,EAAE,CAAC,EAAE,EAAE,CAAC;YACnC,IAAI,GAAG,GAAG,CAAC,CAAC;YACZ,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,IAAI,CAAC,GAAG,CAAC,KAAK,CAAC,MAAM,EAAE,IAAI,CAAC,QAAQ,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC;gBAC/D,GAAG,IAAI,KAAK,CAAC,CAAC,CAAC,GAAG,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,SAAS,GAAG,CAAC,CAAC,CAAC;YACxD,CAAC;YACD,MAAM,CAAC,CAAC,CAAC,GAAG,IAAI,CAAC,GAAG,CAAC,CAAC,EAAE,GAAG,CAAC,CAAC;QAC/B,CAAC;QAED,kDAAkD;QAClD,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,SAAS,EAAE,CAAC,EAAE,EAAE,CAAC;YACnC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,IAAI,CAAC,UAAU,GAAG,MAAM,CAAC,IAAI,MAAM,CAAC,CAAC,CAAC,GAAG,OAAO,CAAC;QACpE,CAAC;QAED,+CAA+C;QAC/C,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,SAAS,EAAE,CAAC,EAAE,EAAE,CAAC;YACnC,IAAI,MAAM,CAAC,CAAC,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC,gBAAgB;gBACnC,MAAM,IAAI,GAAG,OAAO,GAAG,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,IAAI,CAAC,UAAU,GAAG,MAAM,CAAC,CAAC;gBACtE,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,IAAI,CAAC,GAAG,CAAC,KAAK,CAAC,MAAM,EAAE,IAAI,CAAC,QAAQ,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC;oBAC/D,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,SAAS,GAAG,CAAC,CAAC,IAAI,KAAK,CAAC,CAAC,CAAC,GAAG,IAAI,CAAC;gBACrD,CAAC;YACH,CAAC;QACH,CAAC;IACH,CAAC;IAEO,cAAc,CAAC,SAAyB,EAAE,SAAiB;QACjE,MAAM,EAAE,GAAG,IAAI,CAAC,MAAM,CAAC,YAAY,GAAG,SAAS,CAAC;QAChD,MAAM,IAAI,GAAG,GAAG,CAAC;QAEjB,KAAK,IAAI,KAAK,GAAG,CAAC,EAAE,KAAK,GAAG,SAAS,CAAC,MAAM,EAAE,KAAK,EAAE,EAAE,CAAC;YACtD,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,SAAS,CAAC,KAAK,CAAC,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE,CAAC;gBACjD,oBAAoB;gBACpB,MAAM,IAAI,GAAG,IAAI,CAAC,GAAG,CACnB,IAAI,CAAC,GAAG,CAAC,SAAS,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,EAAE,IAAI,CAAC,MAAM,CAAC,WAAW,CAAC,EACtD,CAAC,IAAI,CAAC,MAAM,CAAC,WAAW,CACzB,CAAC;gBAEF,kBAAkB;gBAClB,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,GAAG,IAAI,GAAG,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,GAAG,IAAI,CAAC,GAAG,IAAI,CAAC;gBAC/E,IAAI,CAAC,QAAQ,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,IAAI,EAAE,GAAG,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC;YAC3D,CAAC;QACH,CAAC;IACH,CAAC;IAEO,WAAW;QACjB,MAAM,KAAK,GAAoB,EAAE,CAAC;QAClC,MAAM,OAAO,GAAG,IAAI,GAAG,EAAU,CAAC;QAElC,OAAO,OAAO,CAAC,IAAI,GAAG,IAAI,CAAC,MAAM,CAAC,aAAa,IAAI,OAAO,CAAC,IAAI,GAAG,IAAI,CAAC,MAAM,CAAC,MAAM,EAAE,CAAC;YACrF,OAAO,CAAC,GAAG,CAAC,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,MAAM,EAAE,GAAG,IAAI,CAAC,MAAM,CAAC,MAAM,CAAC,CAAC,CAAC;QAC9D,CAAC;QAED,KAAK,MAAM,GAAG,IAAI,OAAO,EAAE,CAAC;YAC1B,KAAK,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC;QAC/B,CAAC;QAED,OAAO,KAAK,CAAC;IACf,CAAC;IAEO,UAAU,CAAC,MAAc;QAC/B,IAAI,IAAI,GAAG,CAAC,CAAC;QACb,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,MAAM,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE,CAAC;YACvC,IAAI,GAAG,CAAC,IAAI,GAAG,EAAE,GAAG,MAAM,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC,GAAG,IAAI,CAAC,UAAU,CAAC;QAC9D,CAAC;QACD,OAAO,IAAI,CAAC;IACd,CAAC;IAEO,MAAM,CAAC,MAAoB;QACjC,IAAI,MAAM,GAAG,CAAC,CAAC;QACf,IAAI,MAAM,GAAG,MAAM,CAAC,CAAC,CAAC,CAAC;QACvB,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,MAAM,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE,CAAC;YACvC,IAAI,MAAM,CAAC,CAAC,CAAC,GAAG,MAAM,EAAE,CAAC;gBACvB,MAAM,GAAG,MAAM,CAAC,CAAC,CAAC,CAAC;gBACnB,MAAM,GAAG,CAAC,CAAC;YACb,CAAC;QACH,CAAC;QACD,OAAO,MAAM,CAAC;IAChB,CAAC;CACF;AAED;;GAEG;AACH,MAAM,UAAU,SAAS,CAAC,MAA2B;IACnD,OAAO,IAAI,YAAY,CAAC,MAAM,CAAC,CAAC;AAClC,CAAC"}
|
|
@@ -0,0 +1,32 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* RL Algorithms Index
|
|
3
|
+
*
|
|
4
|
+
* Exports all reinforcement learning algorithm implementations.
|
|
5
|
+
*/
|
|
6
|
+
export { PPOAlgorithm, createPPO, DEFAULT_PPO_CONFIG, } from './ppo.js';
|
|
7
|
+
export type { PPOConfig } from '../types.js';
|
|
8
|
+
export { DQNAlgorithm, createDQN, DEFAULT_DQN_CONFIG, } from './dqn.js';
|
|
9
|
+
export type { DQNConfig } from '../types.js';
|
|
10
|
+
export { A2CAlgorithm, createA2C, DEFAULT_A2C_CONFIG, } from './a2c.js';
|
|
11
|
+
export type { A2CConfig } from './a2c.js';
|
|
12
|
+
export { DecisionTransformer, createDecisionTransformer, DEFAULT_DT_CONFIG, } from './decision-transformer.js';
|
|
13
|
+
export type { DecisionTransformerConfig } from '../types.js';
|
|
14
|
+
export { QLearning, createQLearning, DEFAULT_QLEARNING_CONFIG, } from './q-learning.js';
|
|
15
|
+
export type { QLearningConfig } from './q-learning.js';
|
|
16
|
+
export { SARSAAlgorithm, createSARSA, DEFAULT_SARSA_CONFIG, } from './sarsa.js';
|
|
17
|
+
export type { SARSAConfig } from './sarsa.js';
|
|
18
|
+
export { CuriosityModule, createCuriosity, DEFAULT_CURIOSITY_CONFIG, } from './curiosity.js';
|
|
19
|
+
export type { CuriosityConfig } from '../types.js';
|
|
20
|
+
/**
|
|
21
|
+
* Algorithm factory
|
|
22
|
+
*/
|
|
23
|
+
import type { RLAlgorithm, RLConfig } from '../types.js';
|
|
24
|
+
/**
|
|
25
|
+
* Create an RL algorithm by name
|
|
26
|
+
*/
|
|
27
|
+
export declare function createAlgorithm(algorithm: RLAlgorithm, config?: Partial<RLConfig>): unknown;
|
|
28
|
+
/**
|
|
29
|
+
* Get default configuration for an algorithm
|
|
30
|
+
*/
|
|
31
|
+
export declare function getDefaultConfig(algorithm: RLAlgorithm): RLConfig;
|
|
32
|
+
//# sourceMappingURL=index.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/algorithms/index.ts"],"names":[],"mappings":"AAAA;;;;GAIG;AAGH,OAAO,EACL,YAAY,EACZ,SAAS,EACT,kBAAkB,GACnB,MAAM,UAAU,CAAC;AAClB,YAAY,EAAE,SAAS,EAAE,MAAM,aAAa,CAAC;AAG7C,OAAO,EACL,YAAY,EACZ,SAAS,EACT,kBAAkB,GACnB,MAAM,UAAU,CAAC;AAClB,YAAY,EAAE,SAAS,EAAE,MAAM,aAAa,CAAC;AAG7C,OAAO,EACL,YAAY,EACZ,SAAS,EACT,kBAAkB,GACnB,MAAM,UAAU,CAAC;AAClB,YAAY,EAAE,SAAS,EAAE,MAAM,UAAU,CAAC;AAG1C,OAAO,EACL,mBAAmB,EACnB,yBAAyB,EACzB,iBAAiB,GAClB,MAAM,2BAA2B,CAAC;AACnC,YAAY,EAAE,yBAAyB,EAAE,MAAM,aAAa,CAAC;AAG7D,OAAO,EACL,SAAS,EACT,eAAe,EACf,wBAAwB,GACzB,MAAM,iBAAiB,CAAC;AACzB,YAAY,EAAE,eAAe,EAAE,MAAM,iBAAiB,CAAC;AAGvD,OAAO,EACL,cAAc,EACd,WAAW,EACX,oBAAoB,GACrB,MAAM,YAAY,CAAC;AACpB,YAAY,EAAE,WAAW,EAAE,MAAM,YAAY,CAAC;AAG9C,OAAO,EACL,eAAe,EACf,eAAe,EACf,wBAAwB,GACzB,MAAM,gBAAgB,CAAC;AACxB,YAAY,EAAE,eAAe,EAAE,MAAM,aAAa,CAAC;AAEnD;;GAEG;AACH,OAAO,KAAK,EAAE,WAAW,EAAE,QAAQ,EAAE,MAAM,aAAa,CAAC;AASzD;;GAEG;AACH,wBAAgB,eAAe,CAAC,SAAS,EAAE,WAAW,EAAE,MAAM,CAAC,EAAE,OAAO,CAAC,QAAQ,CAAC,GAAG,OAAO,CAoB3F;AAED;;GAEG;AACH,wBAAgB,gBAAgB,CAAC,SAAS,EAAE,WAAW,GAAG,QAAQ,CAmBjE"}
|
|
@@ -0,0 +1,74 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* RL Algorithms Index
|
|
3
|
+
*
|
|
4
|
+
* Exports all reinforcement learning algorithm implementations.
|
|
5
|
+
*/
|
|
6
|
+
// PPO - Proximal Policy Optimization
|
|
7
|
+
export { PPOAlgorithm, createPPO, DEFAULT_PPO_CONFIG, } from './ppo.js';
|
|
8
|
+
// DQN - Deep Q-Network
|
|
9
|
+
export { DQNAlgorithm, createDQN, DEFAULT_DQN_CONFIG, } from './dqn.js';
|
|
10
|
+
// A2C - Advantage Actor-Critic
|
|
11
|
+
export { A2CAlgorithm, createA2C, DEFAULT_A2C_CONFIG, } from './a2c.js';
|
|
12
|
+
// Decision Transformer
|
|
13
|
+
export { DecisionTransformer, createDecisionTransformer, DEFAULT_DT_CONFIG, } from './decision-transformer.js';
|
|
14
|
+
// Q-Learning (Tabular)
|
|
15
|
+
export { QLearning, createQLearning, DEFAULT_QLEARNING_CONFIG, } from './q-learning.js';
|
|
16
|
+
// SARSA
|
|
17
|
+
export { SARSAAlgorithm, createSARSA, DEFAULT_SARSA_CONFIG, } from './sarsa.js';
|
|
18
|
+
// Curiosity-Driven Exploration
|
|
19
|
+
export { CuriosityModule, createCuriosity, DEFAULT_CURIOSITY_CONFIG, } from './curiosity.js';
|
|
20
|
+
import { createPPO, DEFAULT_PPO_CONFIG } from './ppo.js';
|
|
21
|
+
import { createDQN, DEFAULT_DQN_CONFIG } from './dqn.js';
|
|
22
|
+
import { createA2C, DEFAULT_A2C_CONFIG } from './a2c.js';
|
|
23
|
+
import { createDecisionTransformer, DEFAULT_DT_CONFIG } from './decision-transformer.js';
|
|
24
|
+
import { createQLearning, DEFAULT_QLEARNING_CONFIG } from './q-learning.js';
|
|
25
|
+
import { createSARSA, DEFAULT_SARSA_CONFIG } from './sarsa.js';
|
|
26
|
+
import { createCuriosity, DEFAULT_CURIOSITY_CONFIG } from './curiosity.js';
|
|
27
|
+
/**
|
|
28
|
+
* Create an RL algorithm by name
|
|
29
|
+
*/
|
|
30
|
+
export function createAlgorithm(algorithm, config) {
|
|
31
|
+
// Use type assertions since config is validated by algorithm switch
|
|
32
|
+
switch (algorithm) {
|
|
33
|
+
case 'ppo':
|
|
34
|
+
return createPPO(config);
|
|
35
|
+
case 'dqn':
|
|
36
|
+
return createDQN(config);
|
|
37
|
+
case 'a2c':
|
|
38
|
+
return createA2C(config);
|
|
39
|
+
case 'decision-transformer':
|
|
40
|
+
return createDecisionTransformer(config);
|
|
41
|
+
case 'q-learning':
|
|
42
|
+
return createQLearning(config);
|
|
43
|
+
case 'sarsa':
|
|
44
|
+
return createSARSA(config);
|
|
45
|
+
case 'curiosity':
|
|
46
|
+
return createCuriosity(config);
|
|
47
|
+
default:
|
|
48
|
+
throw new Error(`Unknown algorithm: ${algorithm}`);
|
|
49
|
+
}
|
|
50
|
+
}
|
|
51
|
+
/**
|
|
52
|
+
* Get default configuration for an algorithm
|
|
53
|
+
*/
|
|
54
|
+
export function getDefaultConfig(algorithm) {
|
|
55
|
+
switch (algorithm) {
|
|
56
|
+
case 'ppo':
|
|
57
|
+
return { ...DEFAULT_PPO_CONFIG };
|
|
58
|
+
case 'dqn':
|
|
59
|
+
return { ...DEFAULT_DQN_CONFIG };
|
|
60
|
+
case 'a2c':
|
|
61
|
+
return { ...DEFAULT_A2C_CONFIG };
|
|
62
|
+
case 'decision-transformer':
|
|
63
|
+
return { ...DEFAULT_DT_CONFIG };
|
|
64
|
+
case 'q-learning':
|
|
65
|
+
return { ...DEFAULT_QLEARNING_CONFIG };
|
|
66
|
+
case 'sarsa':
|
|
67
|
+
return { ...DEFAULT_SARSA_CONFIG };
|
|
68
|
+
case 'curiosity':
|
|
69
|
+
return { ...DEFAULT_CURIOSITY_CONFIG };
|
|
70
|
+
default:
|
|
71
|
+
throw new Error(`Unknown algorithm: ${algorithm}`);
|
|
72
|
+
}
|
|
73
|
+
}
|
|
74
|
+
//# sourceMappingURL=index.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/algorithms/index.ts"],"names":[],"mappings":"AAAA;;;;GAIG;AAEH,qCAAqC;AACrC,OAAO,EACL,YAAY,EACZ,SAAS,EACT,kBAAkB,GACnB,MAAM,UAAU,CAAC;AAGlB,uBAAuB;AACvB,OAAO,EACL,YAAY,EACZ,SAAS,EACT,kBAAkB,GACnB,MAAM,UAAU,CAAC;AAGlB,+BAA+B;AAC/B,OAAO,EACL,YAAY,EACZ,SAAS,EACT,kBAAkB,GACnB,MAAM,UAAU,CAAC;AAGlB,uBAAuB;AACvB,OAAO,EACL,mBAAmB,EACnB,yBAAyB,EACzB,iBAAiB,GAClB,MAAM,2BAA2B,CAAC;AAGnC,uBAAuB;AACvB,OAAO,EACL,SAAS,EACT,eAAe,EACf,wBAAwB,GACzB,MAAM,iBAAiB,CAAC;AAGzB,QAAQ;AACR,OAAO,EACL,cAAc,EACd,WAAW,EACX,oBAAoB,GACrB,MAAM,YAAY,CAAC;AAGpB,+BAA+B;AAC/B,OAAO,EACL,eAAe,EACf,eAAe,EACf,wBAAwB,GACzB,MAAM,gBAAgB,CAAC;AAOxB,OAAO,EAAE,SAAS,EAAE,kBAAkB,EAAE,MAAM,UAAU,CAAC;AACzD,OAAO,EAAE,SAAS,EAAE,kBAAkB,EAAE,MAAM,UAAU,CAAC;AACzD,OAAO,EAAE,SAAS,EAAE,kBAAkB,EAAE,MAAM,UAAU,CAAC;AACzD,OAAO,EAAE,yBAAyB,EAAE,iBAAiB,EAAE,MAAM,2BAA2B,CAAC;AACzF,OAAO,EAAE,eAAe,EAAE,wBAAwB,EAAE,MAAM,iBAAiB,CAAC;AAC5E,OAAO,EAAE,WAAW,EAAE,oBAAoB,EAAE,MAAM,YAAY,CAAC;AAC/D,OAAO,EAAE,eAAe,EAAE,wBAAwB,EAAE,MAAM,gBAAgB,CAAC;AAE3E;;GAEG;AACH,MAAM,UAAU,eAAe,CAAC,SAAsB,EAAE,MAA0B;IAChF,oEAAoE;IACpE,QAAQ,SAAS,EAAE,CAAC;QAClB,KAAK,KAAK;YACR,OAAO,SAAS,CAAC,MAAyC,CAAC,CAAC;QAC9D,KAAK,KAAK;YACR,OAAO,SAAS,CAAC,MAAyC,CAAC,CAAC;QAC9D,KAAK,KAAK;YACR,OAAO,SAAS,CAAC,MAAyC,CAAC,CAAC;QAC9D,KAAK,sBAAsB;YACzB,OAAO,yBAAyB,CAAC,MAAyD,CAAC,CAAC;QAC9F,KAAK,YAAY;YACf,OAAO,eAAe,CAAC,MAA+C,CAAC,CAAC;QAC1E,KAAK,OAAO;YACV,OAAO,WAAW,CAAC,MAA2C,CAAC,CAAC;QAClE,KAAK,WAAW;YACd,OAAO,eAAe,CAAC,MAA+C,CAAC,CAAC;QAC1E;YACE,MAAM,IAAI,KAAK,CAAC,sBAAsB,SAAS,EAAE,CAAC,CAAC;IACvD,CAAC;AACH,CAAC;AAED;;GAEG;AACH,MAAM,UAAU,gBAAgB,CAAC,SAAsB;IACrD,QAAQ,SAAS,EAAE,CAAC;QAClB,KAAK,KAAK;YACR,OAAO,EAAE,GAAG,kBAAkB,EAAE,CAAC;QACnC,KAAK,KAAK;YACR,OAAO,EAAE,GAAG,kBAAkB,EAAE,CAAC;QACnC,KAAK,KAAK;YACR,OAAO,EAAE,GAAG,kBAAkB,EAAE,CAAC;QACnC,KAAK,sBAAsB;YACzB,OAAO,EAAE,GAAG,iBAAiB,EAAE,CAAC;QAClC,KAAK,YAAY;YACf,OAAO,EAAE,GAAG,wBAAwB,EAAE,CAAC;QACzC,KAAK,OAAO;YACV,OAAO,EAAE,GAAG,oBAAoB,EAAE,CAAC;QACrC,KAAK,WAAW;YACd,OAAO,EAAE,GAAG,wBAAwB,EAAE,CAAC;QACzC;YACE,MAAM,IAAI,KAAK,CAAC,sBAAsB,SAAS,EAAE,CAAC,CAAC;IACvD,CAAC;AACH,CAAC"}
|
|
@@ -0,0 +1,72 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Proximal Policy Optimization (PPO)
|
|
3
|
+
*
|
|
4
|
+
* Implements PPO algorithm for stable policy learning with:
|
|
5
|
+
* - Clipped surrogate objective
|
|
6
|
+
* - GAE (Generalized Advantage Estimation)
|
|
7
|
+
* - Value function clipping
|
|
8
|
+
* - Entropy bonus
|
|
9
|
+
*
|
|
10
|
+
* Performance Target: <10ms per update step
|
|
11
|
+
*/
|
|
12
|
+
import type { PPOConfig, Trajectory } from '../types.js';
|
|
13
|
+
/**
|
|
14
|
+
* Default PPO configuration
|
|
15
|
+
*/
|
|
16
|
+
export declare const DEFAULT_PPO_CONFIG: PPOConfig;
|
|
17
|
+
/**
|
|
18
|
+
* PPO Algorithm Implementation
|
|
19
|
+
*/
|
|
20
|
+
export declare class PPOAlgorithm {
|
|
21
|
+
private config;
|
|
22
|
+
private policyWeights;
|
|
23
|
+
private valueWeights;
|
|
24
|
+
private policyMomentum;
|
|
25
|
+
private valueMomentum;
|
|
26
|
+
private buffer;
|
|
27
|
+
private updateCount;
|
|
28
|
+
private totalLoss;
|
|
29
|
+
private approxKL;
|
|
30
|
+
private clipFraction;
|
|
31
|
+
constructor(config?: Partial<PPOConfig>);
|
|
32
|
+
/**
|
|
33
|
+
* Add experience from trajectory
|
|
34
|
+
*/
|
|
35
|
+
addExperience(trajectory: Trajectory): void;
|
|
36
|
+
/**
|
|
37
|
+
* Perform PPO update
|
|
38
|
+
* Target: <10ms
|
|
39
|
+
*/
|
|
40
|
+
update(): {
|
|
41
|
+
policyLoss: number;
|
|
42
|
+
valueLoss: number;
|
|
43
|
+
entropy: number;
|
|
44
|
+
};
|
|
45
|
+
/**
|
|
46
|
+
* Get action from policy
|
|
47
|
+
*/
|
|
48
|
+
getAction(state: Float32Array): {
|
|
49
|
+
action: number;
|
|
50
|
+
logProb: number;
|
|
51
|
+
value: number;
|
|
52
|
+
};
|
|
53
|
+
/**
|
|
54
|
+
* Get statistics
|
|
55
|
+
*/
|
|
56
|
+
getStats(): Record<string, number>;
|
|
57
|
+
private computeValue;
|
|
58
|
+
private computeLogits;
|
|
59
|
+
private computeLogProb;
|
|
60
|
+
private hashAction;
|
|
61
|
+
private softmax;
|
|
62
|
+
private sampleAction;
|
|
63
|
+
private computeGAE;
|
|
64
|
+
private computeReturns;
|
|
65
|
+
private shuffleBuffer;
|
|
66
|
+
private updateMiniBatch;
|
|
67
|
+
}
|
|
68
|
+
/**
|
|
69
|
+
* Factory function
|
|
70
|
+
*/
|
|
71
|
+
export declare function createPPO(config?: Partial<PPOConfig>): PPOAlgorithm;
|
|
72
|
+
//# sourceMappingURL=ppo.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"ppo.d.ts","sourceRoot":"","sources":["../../src/algorithms/ppo.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;GAUG;AAEH,OAAO,KAAK,EACV,SAAS,EACT,UAAU,EAEX,MAAM,aAAa,CAAC;AAErB;;GAEG;AACH,eAAO,MAAM,kBAAkB,EAAE,SAahC,CAAC;AAeF;;GAEG;AACH,qBAAa,YAAY;IACvB,OAAO,CAAC,MAAM,CAAY;IAG1B,OAAO,CAAC,aAAa,CAAe;IACpC,OAAO,CAAC,YAAY,CAAe;IAGnC,OAAO,CAAC,cAAc,CAAe;IACrC,OAAO,CAAC,aAAa,CAAe;IAGpC,OAAO,CAAC,MAAM,CAAuB;IAGrC,OAAO,CAAC,WAAW,CAAK;IACxB,OAAO,CAAC,SAAS,CAAK;IACtB,OAAO,CAAC,QAAQ,CAAK;IACrB,OAAO,CAAC,YAAY,CAAK;gBAEb,MAAM,GAAE,OAAO,CAAC,SAAS,CAAM;IAkB3C;;OAEG;IACH,aAAa,CAAC,UAAU,EAAE,UAAU,GAAG,IAAI;IAkC3C;;;OAGG;IACH,MAAM,IAAI;QAAE,UAAU,EAAE,MAAM,CAAC;QAAC,SAAS,EAAE,MAAM,CAAC;QAAC,OAAO,EAAE,MAAM,CAAA;KAAE;IAkEpE;;OAEG;IACH,SAAS,CAAC,KAAK,EAAE,YAAY,GAAG;QAAE,MAAM,EAAE,MAAM,CAAC;QAAC,OAAO,EAAE,MAAM,CAAC;QAAC,KAAK,EAAE,MAAM,CAAA;KAAE;IAYlF;;OAEG;IACH,QAAQ,IAAI,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC;IAclC,OAAO,CAAC,YAAY;IAQpB,OAAO,CAAC,aAAa;IAcrB,OAAO,CAAC,cAAc;IAOtB,OAAO,CAAC,UAAU;IASlB,OAAO,CAAC,OAAO;IAiBf,OAAO,CAAC,YAAY;IAUpB,OAAO,CAAC,UAAU;IAclB,OAAO,CAAC,cAAc;IAYtB,OAAO,CAAC,aAAa;IAOrB,OAAO,CAAC,eAAe;CA8FxB;AAED;;GAEG;AACH,wBAAgB,SAAS,CAAC,MAAM,CAAC,EAAE,OAAO,CAAC,SAAS,CAAC,GAAG,YAAY,CAEnE"}
|