agentic-qe 2.2.0 → 2.2.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (112) hide show
  1. package/.claude/agents/qe-security-scanner.md +26 -0
  2. package/.claude/skills/agentic-quality-engineering/SKILL.md +4 -4
  3. package/.claude/skills/cicd-pipeline-qe-orchestrator/README.md +14 -11
  4. package/.claude/skills/skills-manifest.json +2 -2
  5. package/CHANGELOG.md +75 -0
  6. package/README.md +92 -214
  7. package/dist/agents/BaseAgent.d.ts +5 -1
  8. package/dist/agents/BaseAgent.d.ts.map +1 -1
  9. package/dist/agents/BaseAgent.js +32 -17
  10. package/dist/agents/BaseAgent.js.map +1 -1
  11. package/dist/agents/index.js +3 -3
  12. package/dist/agents/index.js.map +1 -1
  13. package/dist/cli/commands/improve/index.d.ts +8 -1
  14. package/dist/cli/commands/improve/index.d.ts.map +1 -1
  15. package/dist/cli/commands/improve/index.js +18 -16
  16. package/dist/cli/commands/improve/index.js.map +1 -1
  17. package/dist/cli/commands/learn/index.d.ts +10 -2
  18. package/dist/cli/commands/learn/index.d.ts.map +1 -1
  19. package/dist/cli/commands/learn/index.js +99 -63
  20. package/dist/cli/commands/learn/index.js.map +1 -1
  21. package/dist/cli/commands/patterns/index.d.ts +8 -1
  22. package/dist/cli/commands/patterns/index.d.ts.map +1 -1
  23. package/dist/cli/commands/patterns/index.js +79 -45
  24. package/dist/cli/commands/patterns/index.js.map +1 -1
  25. package/dist/cli/commands/routing/index.d.ts +5 -0
  26. package/dist/cli/commands/routing/index.d.ts.map +1 -1
  27. package/dist/cli/commands/routing/index.js +11 -10
  28. package/dist/cli/commands/routing/index.js.map +1 -1
  29. package/dist/cli/init/agents.d.ts +1 -1
  30. package/dist/cli/init/agents.js +2 -2
  31. package/dist/cli/init/database-init.d.ts +7 -0
  32. package/dist/cli/init/database-init.d.ts.map +1 -1
  33. package/dist/cli/init/database-init.js +29 -48
  34. package/dist/cli/init/database-init.js.map +1 -1
  35. package/dist/core/memory/HNSWVectorMemory.d.ts +261 -0
  36. package/dist/core/memory/HNSWVectorMemory.d.ts.map +1 -0
  37. package/dist/core/memory/HNSWVectorMemory.js +647 -0
  38. package/dist/core/memory/HNSWVectorMemory.js.map +1 -0
  39. package/dist/core/memory/SwarmMemoryManager.d.ts +7 -0
  40. package/dist/core/memory/SwarmMemoryManager.d.ts.map +1 -1
  41. package/dist/core/memory/SwarmMemoryManager.js +9 -0
  42. package/dist/core/memory/SwarmMemoryManager.js.map +1 -1
  43. package/dist/core/memory/index.d.ts +2 -0
  44. package/dist/core/memory/index.d.ts.map +1 -1
  45. package/dist/core/memory/index.js +11 -1
  46. package/dist/core/memory/index.js.map +1 -1
  47. package/dist/learning/ExplainableLearning.d.ts +191 -0
  48. package/dist/learning/ExplainableLearning.d.ts.map +1 -0
  49. package/dist/learning/ExplainableLearning.js +441 -0
  50. package/dist/learning/ExplainableLearning.js.map +1 -0
  51. package/dist/learning/GossipPatternSharingProtocol.d.ts +228 -0
  52. package/dist/learning/GossipPatternSharingProtocol.d.ts.map +1 -0
  53. package/dist/learning/GossipPatternSharingProtocol.js +590 -0
  54. package/dist/learning/GossipPatternSharingProtocol.js.map +1 -0
  55. package/dist/learning/LearningEngine.d.ts +4 -4
  56. package/dist/learning/LearningEngine.d.ts.map +1 -1
  57. package/dist/learning/LearningEngine.js +20 -13
  58. package/dist/learning/LearningEngine.js.map +1 -1
  59. package/dist/learning/PerformanceOptimizer.d.ts +268 -0
  60. package/dist/learning/PerformanceOptimizer.d.ts.map +1 -0
  61. package/dist/learning/PerformanceOptimizer.js +552 -0
  62. package/dist/learning/PerformanceOptimizer.js.map +1 -0
  63. package/dist/learning/PrivacyManager.d.ts +197 -0
  64. package/dist/learning/PrivacyManager.d.ts.map +1 -0
  65. package/dist/learning/PrivacyManager.js +551 -0
  66. package/dist/learning/PrivacyManager.js.map +1 -0
  67. package/dist/learning/TransferLearningManager.d.ts +212 -0
  68. package/dist/learning/TransferLearningManager.d.ts.map +1 -0
  69. package/dist/learning/TransferLearningManager.js +497 -0
  70. package/dist/learning/TransferLearningManager.js.map +1 -0
  71. package/dist/learning/algorithms/MAMLMetaLearner.d.ts +218 -0
  72. package/dist/learning/algorithms/MAMLMetaLearner.d.ts.map +1 -0
  73. package/dist/learning/algorithms/MAMLMetaLearner.js +532 -0
  74. package/dist/learning/algorithms/MAMLMetaLearner.js.map +1 -0
  75. package/dist/learning/algorithms/index.d.ts +4 -1
  76. package/dist/learning/algorithms/index.d.ts.map +1 -1
  77. package/dist/learning/algorithms/index.js +7 -1
  78. package/dist/learning/algorithms/index.js.map +1 -1
  79. package/dist/learning/index.d.ts +8 -0
  80. package/dist/learning/index.d.ts.map +1 -1
  81. package/dist/learning/index.js +17 -1
  82. package/dist/learning/index.js.map +1 -1
  83. package/dist/mcp/server-instructions.d.ts +1 -1
  84. package/dist/mcp/server-instructions.js +1 -1
  85. package/dist/providers/HybridRouter.d.ts +217 -0
  86. package/dist/providers/HybridRouter.d.ts.map +1 -0
  87. package/dist/providers/HybridRouter.js +679 -0
  88. package/dist/providers/HybridRouter.js.map +1 -0
  89. package/dist/providers/index.d.ts +1 -0
  90. package/dist/providers/index.d.ts.map +1 -1
  91. package/dist/providers/index.js +7 -1
  92. package/dist/providers/index.js.map +1 -1
  93. package/dist/telemetry/LearningTelemetry.d.ts +190 -0
  94. package/dist/telemetry/LearningTelemetry.d.ts.map +1 -0
  95. package/dist/telemetry/LearningTelemetry.js +403 -0
  96. package/dist/telemetry/LearningTelemetry.js.map +1 -0
  97. package/dist/telemetry/index.d.ts +1 -0
  98. package/dist/telemetry/index.d.ts.map +1 -1
  99. package/dist/telemetry/index.js +20 -2
  100. package/dist/telemetry/index.js.map +1 -1
  101. package/dist/telemetry/instrumentation/agent.d.ts +1 -1
  102. package/dist/telemetry/instrumentation/agent.js +1 -1
  103. package/dist/telemetry/instrumentation/index.d.ts +1 -1
  104. package/dist/telemetry/instrumentation/index.js +1 -1
  105. package/dist/utils/math.d.ts +11 -0
  106. package/dist/utils/math.d.ts.map +1 -0
  107. package/dist/utils/math.js +16 -0
  108. package/dist/utils/math.js.map +1 -0
  109. package/docs/reference/agents.md +1 -1
  110. package/docs/reference/skills.md +3 -3
  111. package/docs/reference/usage.md +4 -4
  112. package/package.json +14 -1
@@ -0,0 +1,441 @@
1
+ "use strict";
2
+ /**
3
+ * ExplainableLearning - Explainable AI for Reinforcement Learning Decisions
4
+ *
5
+ * Provides human-readable explanations for RL agent decisions, tracking:
6
+ * - Action selection rationale (Q-values, exploration vs exploitation)
7
+ * - Confidence scores based on experience history
8
+ * - Contributing experiences that influenced decisions
9
+ * - Decision factors and alternative actions
10
+ *
11
+ * Supports transparency and trust in agent decision-making for issue #118
12
+ */
13
+ Object.defineProperty(exports, "__esModule", { value: true });
14
+ exports.ExplainableLearning = void 0;
15
+ const Logger_1 = require("../utils/Logger");
16
+ /**
17
+ * ExplainableLearning - Generates explanations for RL decisions
18
+ */
19
+ class ExplainableLearning {
20
+ constructor() {
21
+ this.logger = Logger_1.Logger.getInstance();
22
+ this.logger.info('ExplainableLearning initialized');
23
+ }
24
+ /**
25
+ * Generate explanation for action selection
26
+ *
27
+ * @param state - Current task state
28
+ * @param selectedAction - The action that was selected
29
+ * @param availableActions - All available actions
30
+ * @param qValues - Q-values for state-action pairs
31
+ * @param explorationRate - Current exploration rate
32
+ * @param experiences - Historical experiences for this agent
33
+ * @param wasExploration - Whether this was an exploration decision
34
+ * @returns Complete action explanation
35
+ */
36
+ explainAction(state, selectedAction, availableActions, qValues, explorationRate, experiences, wasExploration) {
37
+ const selectedActionKey = this.encodeAction(selectedAction);
38
+ const selectedQValue = qValues.get(selectedActionKey) ?? 0;
39
+ // Determine decision type
40
+ const decisionType = wasExploration ? 'exploration' : 'exploitation';
41
+ // Calculate confidence based on experience
42
+ const confidence = this.calculateConfidence(state, selectedAction, experiences);
43
+ // Generate reasoning
44
+ const reasoning = this.generateReasoning(decisionType, selectedQValue, confidence, explorationRate, availableActions.length, experiences);
45
+ // Identify alternatives
46
+ const alternatives = this.identifyAlternatives(selectedAction, availableActions, qValues, experiences, decisionType);
47
+ // Find contributing experiences
48
+ const contributingExperiences = this.findContributingExperiences(state, selectedAction, experiences);
49
+ // Analyze decision factors
50
+ const decisionFactors = this.analyzeDecisionFactors(state, selectedAction, selectedQValue, explorationRate, confidence, wasExploration);
51
+ return {
52
+ selectedAction,
53
+ decisionType,
54
+ qValue: selectedQValue,
55
+ confidence,
56
+ reasoning,
57
+ alternatives,
58
+ contributingExperiences,
59
+ decisionFactors,
60
+ timestamp: new Date()
61
+ };
62
+ }
63
+ /**
64
+ * Calculate confidence score based on experience history
65
+ * Higher confidence when:
66
+ * - More experiences with similar states
67
+ * - Higher success rate
68
+ * - More recent positive experiences
69
+ */
70
+ calculateConfidence(state, action, experiences) {
71
+ const stats = this.getExperienceStats(state, action, experiences);
72
+ if (stats.totalCount === 0) {
73
+ return 0.1; // Low confidence with no experience
74
+ }
75
+ // Base confidence from experience count (logarithmic scale)
76
+ const experienceConfidence = Math.min(0.4, Math.log10(stats.totalCount + 1) / 2);
77
+ // Success rate confidence
78
+ const successConfidence = stats.successCount > 0
79
+ ? (stats.successCount / stats.totalCount) * 0.3
80
+ : 0;
81
+ // Recent performance confidence
82
+ const recentConfidence = Math.max(0, stats.recentPerformance) * 0.3;
83
+ return Math.min(0.95, experienceConfidence + successConfidence + recentConfidence);
84
+ }
85
+ /**
86
+ * Get experience statistics for state-action pair
87
+ */
88
+ getExperienceStats(state, action, experiences) {
89
+ const stateKey = this.encodeState(state);
90
+ const actionKey = this.encodeAction(action);
91
+ // Find similar experiences
92
+ const similarExperiences = experiences.filter(exp => {
93
+ const expStateKey = this.encodeState(exp.state);
94
+ const expActionKey = this.encodeAction(exp.action);
95
+ // Exact match for now (could use similarity threshold)
96
+ return expStateKey === stateKey && expActionKey === actionKey;
97
+ });
98
+ const totalCount = similarExperiences.length;
99
+ const successCount = similarExperiences.filter(exp => exp.reward > 0).length;
100
+ const averageReward = totalCount > 0
101
+ ? similarExperiences.reduce((sum, exp) => sum + exp.reward, 0) / totalCount
102
+ : 0;
103
+ // Recent performance (last 10 experiences)
104
+ const recentExperiences = similarExperiences.slice(-10);
105
+ const recentPerformance = recentExperiences.length > 0
106
+ ? recentExperiences.reduce((sum, exp) => sum + exp.reward, 0) / recentExperiences.length
107
+ : 0;
108
+ return {
109
+ totalCount,
110
+ successCount,
111
+ averageReward,
112
+ recentPerformance
113
+ };
114
+ }
115
+ /**
116
+ * Generate human-readable reasoning
117
+ */
118
+ generateReasoning(decisionType, qValue, confidence, explorationRate, numActions, experiences) {
119
+ if (decisionType === 'exploration') {
120
+ const explorationPercent = (explorationRate * 100).toFixed(1);
121
+ return `Explored this action (confidence: ${confidence.toFixed(2)}) to gather more information. ` +
122
+ `Current exploration rate is ${explorationPercent}%, balancing learning with exploitation.`;
123
+ }
124
+ // Exploitation
125
+ const qValueFormatted = qValue.toFixed(3);
126
+ const confidencePercent = (confidence * 100).toFixed(0);
127
+ const experienceCount = experiences.length;
128
+ if (qValue > 0.5) {
129
+ return `Selected action with highest Q-value (${qValueFormatted}) among ${numActions} alternatives. ` +
130
+ `High confidence (${confidencePercent}%) based on ${experienceCount} past experiences with strong positive results.`;
131
+ }
132
+ else if (qValue > 0) {
133
+ return `Selected action with Q-value ${qValueFormatted} (moderate positive expectation). ` +
134
+ `Medium confidence (${confidencePercent}%) from ${experienceCount} experiences. More data will improve decision quality.`;
135
+ }
136
+ else {
137
+ return `Selected best available action (Q-value: ${qValueFormatted}) among ${numActions} options. ` +
138
+ `Lower confidence (${confidencePercent}%) suggests limited experience in this state.`;
139
+ }
140
+ }
141
+ /**
142
+ * Identify alternative actions and explain why they weren't selected
143
+ */
144
+ identifyAlternatives(selectedAction, availableActions, qValues, experiences, decisionType) {
145
+ const selectedActionKey = this.encodeAction(selectedAction);
146
+ const alternatives = [];
147
+ for (const action of availableActions) {
148
+ const actionKey = this.encodeAction(action);
149
+ // Skip the selected action
150
+ if (actionKey === selectedActionKey) {
151
+ continue;
152
+ }
153
+ const qValue = qValues.get(actionKey) ?? 0;
154
+ const confidence = this.calculateConfidence({ taskComplexity: 0, requiredCapabilities: [], contextFeatures: {}, previousAttempts: 0, availableResources: 1 }, action, experiences);
155
+ // Determine why it wasn't selected
156
+ let reason;
157
+ if (decisionType === 'exploration') {
158
+ reason = 'Random exploration selected different action';
159
+ }
160
+ else {
161
+ const selectedQValue = qValues.get(selectedActionKey) ?? 0;
162
+ if (qValue < selectedQValue) {
163
+ reason = `Lower Q-value (${qValue.toFixed(3)} vs ${selectedQValue.toFixed(3)})`;
164
+ }
165
+ else {
166
+ reason = 'Similar Q-value but other action selected during exploitation';
167
+ }
168
+ }
169
+ alternatives.push({
170
+ action,
171
+ qValue,
172
+ confidence,
173
+ reason
174
+ });
175
+ }
176
+ // Sort by Q-value descending
177
+ alternatives.sort((a, b) => b.qValue - a.qValue);
178
+ // Return top 3 alternatives
179
+ return alternatives.slice(0, 3);
180
+ }
181
+ /**
182
+ * Find experiences that contributed to this decision
183
+ */
184
+ findContributingExperiences(state, action, experiences) {
185
+ const stateKey = this.encodeState(state);
186
+ const actionKey = this.encodeAction(action);
187
+ const contributingExps = [];
188
+ for (const exp of experiences) {
189
+ const expStateKey = this.encodeState(exp.state);
190
+ const expActionKey = this.encodeAction(exp.action);
191
+ // Calculate similarity
192
+ const similarity = this.calculateStateSimilarity(state, exp.state);
193
+ // Include if same action and similar state
194
+ if (expActionKey === actionKey && similarity > 0.5) {
195
+ contributingExps.push({
196
+ experienceId: exp.taskId,
197
+ taskType: exp.taskType,
198
+ reward: exp.reward,
199
+ similarity,
200
+ timestamp: exp.timestamp
201
+ });
202
+ }
203
+ }
204
+ // Sort by similarity and recency
205
+ contributingExps.sort((a, b) => {
206
+ const similarityDiff = b.similarity - a.similarity;
207
+ if (Math.abs(similarityDiff) > 0.1) {
208
+ return similarityDiff;
209
+ }
210
+ return b.timestamp.getTime() - a.timestamp.getTime();
211
+ });
212
+ // Return top 5
213
+ return contributingExps.slice(0, 5);
214
+ }
215
+ /**
216
+ * Calculate similarity between two states (0-1)
217
+ */
218
+ calculateStateSimilarity(state1, state2) {
219
+ let similarity = 0;
220
+ let factors = 0;
221
+ // Task complexity similarity
222
+ similarity += 1 - Math.abs(state1.taskComplexity - state2.taskComplexity);
223
+ factors++;
224
+ // Available resources similarity
225
+ similarity += 1 - Math.abs(state1.availableResources - state2.availableResources);
226
+ factors++;
227
+ // Previous attempts similarity
228
+ const attemptDiff = Math.abs(state1.previousAttempts - state2.previousAttempts);
229
+ similarity += Math.max(0, 1 - attemptDiff / 5);
230
+ factors++;
231
+ // Required capabilities overlap
232
+ const capabilities1 = new Set(state1.requiredCapabilities);
233
+ const capabilities2 = new Set(state2.requiredCapabilities);
234
+ const intersection = new Set([...capabilities1].filter(x => capabilities2.has(x)));
235
+ const union = new Set([...capabilities1, ...capabilities2]);
236
+ if (union.size > 0) {
237
+ similarity += intersection.size / union.size;
238
+ factors++;
239
+ }
240
+ return factors > 0 ? similarity / factors : 0;
241
+ }
242
+ /**
243
+ * Analyze decision factors
244
+ */
245
+ analyzeDecisionFactors(state, action, qValue, explorationRate, confidence, wasExploration) {
246
+ const factors = [];
247
+ // Q-value factor
248
+ factors.push({
249
+ name: 'Q-Value',
250
+ value: qValue.toFixed(3),
251
+ impact: wasExploration ? 0.3 : 0.9,
252
+ description: 'Expected cumulative reward for this state-action pair'
253
+ });
254
+ // Exploration rate factor
255
+ factors.push({
256
+ name: 'Exploration Rate',
257
+ value: `${(explorationRate * 100).toFixed(1)}%`,
258
+ impact: wasExploration ? 0.9 : 0.1,
259
+ description: 'Probability of selecting random action for exploration'
260
+ });
261
+ // Confidence factor
262
+ factors.push({
263
+ name: 'Confidence',
264
+ value: `${(confidence * 100).toFixed(0)}%`,
265
+ impact: 0.7,
266
+ description: 'Based on number and quality of similar past experiences'
267
+ });
268
+ // Task complexity factor
269
+ factors.push({
270
+ name: 'Task Complexity',
271
+ value: state.taskComplexity.toFixed(2),
272
+ impact: 0.5,
273
+ description: 'Complexity of current task (0=simple, 1=complex)'
274
+ });
275
+ // Resource availability factor
276
+ factors.push({
277
+ name: 'Available Resources',
278
+ value: `${(state.availableResources * 100).toFixed(0)}%`,
279
+ impact: 0.4,
280
+ description: 'Resources available for task execution'
281
+ });
282
+ // Strategy factor
283
+ factors.push({
284
+ name: 'Strategy',
285
+ value: action.strategy,
286
+ impact: 0.8,
287
+ description: 'Selected execution strategy'
288
+ });
289
+ // Parallelization factor
290
+ factors.push({
291
+ name: 'Parallelization',
292
+ value: `${(action.parallelization * 100).toFixed(0)}%`,
293
+ impact: 0.5,
294
+ description: 'Degree of parallel execution'
295
+ });
296
+ // Sort by impact
297
+ factors.sort((a, b) => b.impact - a.impact);
298
+ return factors;
299
+ }
300
+ /**
301
+ * Export explanation in structured format
302
+ */
303
+ exportStructured(explanation, state, availableActions, explorationRate) {
304
+ return {
305
+ explanation,
306
+ state,
307
+ availableActions,
308
+ explorationRate
309
+ };
310
+ }
311
+ /**
312
+ * Export explanation in natural language format
313
+ */
314
+ exportNaturalLanguage(explanation) {
315
+ const summary = this.generateSummary(explanation);
316
+ const details = this.generateDetails(explanation);
317
+ const metrics = this.generateMetrics(explanation);
318
+ const recommendations = this.generateRecommendations(explanation);
319
+ return {
320
+ summary,
321
+ details,
322
+ metrics,
323
+ recommendations
324
+ };
325
+ }
326
+ /**
327
+ * Generate summary sentence
328
+ */
329
+ generateSummary(explanation) {
330
+ const action = explanation.selectedAction.strategy;
331
+ const type = explanation.decisionType;
332
+ const confidence = (explanation.confidence * 100).toFixed(0);
333
+ if (type === 'exploration') {
334
+ return `Explored "${action}" strategy with ${confidence}% confidence to gather more experience.`;
335
+ }
336
+ else {
337
+ return `Selected "${action}" strategy with ${confidence}% confidence based on Q-value of ${explanation.qValue.toFixed(3)}.`;
338
+ }
339
+ }
340
+ /**
341
+ * Generate detailed explanation paragraphs
342
+ */
343
+ generateDetails(explanation) {
344
+ const details = [];
345
+ // Main reasoning
346
+ details.push(explanation.reasoning);
347
+ // Contributing experiences
348
+ if (explanation.contributingExperiences.length > 0) {
349
+ const exp = explanation.contributingExperiences[0];
350
+ const successRate = explanation.contributingExperiences.filter(e => e.reward > 0).length /
351
+ explanation.contributingExperiences.length;
352
+ details.push(`This decision is based on ${explanation.contributingExperiences.length} similar past experiences ` +
353
+ `with a ${(successRate * 100).toFixed(0)}% success rate. The most similar experience was from ` +
354
+ `"${exp.taskType}" which had a reward of ${exp.reward.toFixed(2)}.`);
355
+ }
356
+ else {
357
+ details.push('This decision is based on limited historical data. As the agent gains more experience, ' +
358
+ 'decision quality will improve.');
359
+ }
360
+ // Top decision factors
361
+ const topFactors = explanation.decisionFactors.slice(0, 3);
362
+ if (topFactors.length > 0) {
363
+ const factorList = topFactors.map(f => `${f.name} (${f.value})`).join(', ');
364
+ details.push(`Key decision factors: ${factorList}.`);
365
+ }
366
+ // Alternatives
367
+ if (explanation.alternatives.length > 0) {
368
+ const alt = explanation.alternatives[0];
369
+ details.push(`The next best alternative was "${alt.action.strategy}" with Q-value ${alt.qValue.toFixed(3)}. ` +
370
+ `It wasn't selected because: ${alt.reason}.`);
371
+ }
372
+ return details;
373
+ }
374
+ /**
375
+ * Generate key metrics
376
+ */
377
+ generateMetrics(explanation) {
378
+ const metrics = {};
379
+ metrics['Decision Type'] = explanation.decisionType === 'exploration' ? 'Exploration' : 'Exploitation';
380
+ metrics['Q-Value'] = explanation.qValue.toFixed(3);
381
+ metrics['Confidence'] = `${(explanation.confidence * 100).toFixed(0)}%`;
382
+ metrics['Strategy'] = explanation.selectedAction.strategy;
383
+ metrics['Similar Experiences'] = explanation.contributingExperiences.length.toString();
384
+ if (explanation.contributingExperiences.length > 0) {
385
+ const successCount = explanation.contributingExperiences.filter(e => e.reward > 0).length;
386
+ metrics['Success Rate'] = `${((successCount / explanation.contributingExperiences.length) * 100).toFixed(0)}%`;
387
+ }
388
+ return metrics;
389
+ }
390
+ /**
391
+ * Generate recommendations for user
392
+ */
393
+ generateRecommendations(explanation) {
394
+ const recommendations = [];
395
+ // Low confidence recommendation
396
+ if (explanation.confidence < 0.3) {
397
+ recommendations.push('Low confidence detected. Consider providing feedback to help the agent learn faster.');
398
+ }
399
+ // Limited experience recommendation
400
+ if (explanation.contributingExperiences.length < 3) {
401
+ recommendations.push('Limited experience in this scenario. The agent will improve with more similar tasks.');
402
+ }
403
+ // Exploration recommendation
404
+ if (explanation.decisionType === 'exploration') {
405
+ recommendations.push('This was an exploratory action. If it performs well, it will be favored in future decisions.');
406
+ }
407
+ // High confidence recommendation
408
+ if (explanation.confidence > 0.8) {
409
+ recommendations.push('High confidence in this decision based on extensive past experience.');
410
+ }
411
+ // Alternative suggestion
412
+ if (explanation.alternatives.length > 0) {
413
+ const alt = explanation.alternatives[0];
414
+ if (Math.abs(alt.qValue - explanation.qValue) < 0.1) {
415
+ recommendations.push(`Alternative strategy "${alt.action.strategy}" has similar expected performance and could also work well.`);
416
+ }
417
+ }
418
+ return recommendations;
419
+ }
420
+ /**
421
+ * Encode state to string key (matches LearningEngine encoding)
422
+ */
423
+ encodeState(state) {
424
+ const features = [
425
+ state.taskComplexity,
426
+ state.requiredCapabilities.length / 10,
427
+ state.previousAttempts / 5,
428
+ state.availableResources,
429
+ state.timeConstraint ? Math.min(state.timeConstraint / 300000, 1) : 1
430
+ ];
431
+ return features.map(f => Math.round(f * 10) / 10).join(',');
432
+ }
433
+ /**
434
+ * Encode action to string key (matches LearningEngine encoding)
435
+ */
436
+ encodeAction(action) {
437
+ return `${action.strategy}:${action.parallelization.toFixed(1)}:${action.retryPolicy}`;
438
+ }
439
+ }
440
+ exports.ExplainableLearning = ExplainableLearning;
441
+ //# sourceMappingURL=ExplainableLearning.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"ExplainableLearning.js","sourceRoot":"","sources":["../../src/learning/ExplainableLearning.ts"],"names":[],"mappings":";AAAA;;;;;;;;;;GAUG;;;AAEH,4CAAyC;AA0IzC;;GAEG;AACH,MAAa,mBAAmB;IAG9B;QACE,IAAI,CAAC,MAAM,GAAG,eAAM,CAAC,WAAW,EAAE,CAAC;QACnC,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,iCAAiC,CAAC,CAAC;IACtD,CAAC;IAED;;;;;;;;;;;OAWG;IACH,aAAa,CACX,KAAgB,EAChB,cAA2B,EAC3B,gBAA+B,EAC/B,OAA4B,EAC5B,eAAuB,EACvB,WAA6B,EAC7B,cAAuB;QAEvB,MAAM,iBAAiB,GAAG,IAAI,CAAC,YAAY,CAAC,cAAc,CAAC,CAAC;QAC5D,MAAM,cAAc,GAAG,OAAO,CAAC,GAAG,CAAC,iBAAiB,CAAC,IAAI,CAAC,CAAC;QAE3D,0BAA0B;QAC1B,MAAM,YAAY,GAAiB,cAAc,CAAC,CAAC,CAAC,aAAa,CAAC,CAAC,CAAC,cAAc,CAAC;QAEnF,2CAA2C;QAC3C,MAAM,UAAU,GAAG,IAAI,CAAC,mBAAmB,CACzC,KAAK,EACL,cAAc,EACd,WAAW,CACZ,CAAC;QAEF,qBAAqB;QACrB,MAAM,SAAS,GAAG,IAAI,CAAC,iBAAiB,CACtC,YAAY,EACZ,cAAc,EACd,UAAU,EACV,eAAe,EACf,gBAAgB,CAAC,MAAM,EACvB,WAAW,CACZ,CAAC;QAEF,wBAAwB;QACxB,MAAM,YAAY,GAAG,IAAI,CAAC,oBAAoB,CAC5C,cAAc,EACd,gBAAgB,EAChB,OAAO,EACP,WAAW,EACX,YAAY,CACb,CAAC;QAEF,gCAAgC;QAChC,MAAM,uBAAuB,GAAG,IAAI,CAAC,2BAA2B,CAC9D,KAAK,EACL,cAAc,EACd,WAAW,CACZ,CAAC;QAEF,2BAA2B;QAC3B,MAAM,eAAe,GAAG,IAAI,CAAC,sBAAsB,CACjD,KAAK,EACL,cAAc,EACd,cAAc,EACd,eAAe,EACf,UAAU,EACV,cAAc,CACf,CAAC;QAEF,OAAO;YACL,cAAc;YACd,YAAY;YACZ,MAAM,EAAE,cAAc;YACtB,UAAU;YACV,SAAS;YACT,YAAY;YACZ,uBAAuB;YACvB,eAAe;YACf,SAAS,EAAE,IAAI,IAAI,EAAE;SACtB,CAAC;IACJ,CAAC;IAED;;;;;;OAMG;IACK,mBAAmB,CACzB,KAAgB,EAChB,MAAmB,EACnB,WAA6B;QAE7B,MAAM,KAAK,GAAG,IAAI,CAAC,kBAAkB,CAAC,KAAK,EAAE,MAAM,EAAE,WAAW,CAAC,CAAC;QAElE,IAAI,KAAK,CAAC,UAAU,KAAK,CAAC,EAAE,CAAC;YAC3B,OAAO,GAAG,CAAC,CAAC,oCAAoC;QAClD,CAAC;QAED,4DAA4D;QAC5D,MAAM,oBAAoB,GAAG,IAAI,CAAC,GAAG,CAAC,GAAG,EAAE,IAAI,CAAC,KAAK,CAAC,KAAK,CAAC,UAAU,GAAG,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC;QAEjF,0BAA0B;QAC1B,MAAM,iBAAiB,GAAG,KAAK,CAAC,YAAY,GAAG,CAAC;YAC9C,CAAC,CAAC,CAAC,KAAK,CAAC,YAAY,GAAG,KAAK,CAAC,UAAU,CAAC,GAAG,GAAG;YAC/C,CAAC,CAAC,CAAC,CAAC;QAEN,gCAAgC;QAChC,MAAM,gBAAgB,GAAG,IAAI,CAAC,GAAG,CAAC,CAAC,EAAE,KAAK,CAAC,iBAAiB,CAAC,GAAG,GAAG,CAAC;QAEpE,OAAO,IAAI,CAAC,GAAG,CAAC,IAAI,EAAE,oBAAoB,GAAG,iBAAiB,GAAG,gBAAgB,CAAC,CAAC;IACrF,CAAC;IAED;;OAEG;IACK,kBAAkB,CACxB,KAAgB,EAChB,MAAmB,EACnB,WAA6B;QAE7B,MAAM,QAAQ,GAAG,IAAI,CAAC,WAAW,CAAC,KAAK,CAAC,CAAC;QACzC,MAAM,SAAS,GAAG,IAAI,CAAC,YAAY,CAAC,MAAM,CAAC,CAAC;QAE5C,2BAA2B;QAC3B,MAAM,kBAAkB,GAAG,WAAW,CAAC,MAAM,CAAC,GAAG,CAAC,EAAE;YAClD,MAAM,WAAW,GAAG,IAAI,CAAC,WAAW,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;YAChD,MAAM,YAAY,GAAG,IAAI,CAAC,YAAY,CAAC,GAAG,CAAC,MAAM,CAAC,CAAC;YAEnD,uDAAuD;YACvD,OAAO,WAAW,KAAK,QAAQ,IAAI,YAAY,KAAK,SAAS,CAAC;QAChE,CAAC,CAAC,CAAC;QAEH,MAAM,UAAU,GAAG,kBAAkB,CAAC,MAAM,CAAC;QAC7C,MAAM,YAAY,GAAG,kBAAkB,CAAC,MAAM,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,MAAM,CAAC;QAE7E,MAAM,aAAa,GAAG,UAAU,GAAG,CAAC;YAClC,CAAC,CAAC,kBAAkB,CAAC,MAAM,CAAC,CAAC,GAAG,EAAE,GAAG,EAAE,EAAE,CAAC,GAAG,GAAG,GAAG,CAAC,MAAM,EAAE,CAAC,CAAC,GAAG,UAAU;YAC3E,CAAC,CAAC,CAAC,CAAC;QAEN,2CAA2C;QAC3C,MAAM,iBAAiB,GAAG,kBAAkB,CAAC,KAAK,CAAC,CAAC,EAAE,CAAC,CAAC;QACxD,MAAM,iBAAiB,GAAG,iBAAiB,CAAC,MAAM,GAAG,CAAC;YACpD,CAAC,CAAC,iBAAiB,CAAC,MAAM,CAAC,CAAC,GAAG,EAAE,GAAG,EAAE,EAAE,CAAC,GAAG,GAAG,GAAG,CAAC,MAAM,EAAE,CAAC,CAAC,GAAG,iBAAiB,CAAC,MAAM;YACxF,CAAC,CAAC,CAAC,CAAC;QAEN,OAAO;YACL,UAAU;YACV,YAAY;YACZ,aAAa;YACb,iBAAiB;SAClB,CAAC;IACJ,CAAC;IAED;;OAEG;IACK,iBAAiB,CACvB,YAA0B,EAC1B,MAAc,EACd,UAAkB,EAClB,eAAuB,EACvB,UAAkB,EAClB,WAA6B;QAE7B,IAAI,YAAY,KAAK,aAAa,EAAE,CAAC;YACnC,MAAM,kBAAkB,GAAG,CAAC,eAAe,GAAG,GAAG,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC;YAC9D,OAAO,qCAAqC,UAAU,CAAC,OAAO,CAAC,CAAC,CAAC,gCAAgC;gBAC/F,+BAA+B,kBAAkB,0CAA0C,CAAC;QAChG,CAAC;QAED,eAAe;QACf,MAAM,eAAe,GAAG,MAAM,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC;QAC1C,MAAM,iBAAiB,GAAG,CAAC,UAAU,GAAG,GAAG,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC;QACxD,MAAM,eAAe,GAAG,WAAW,CAAC,MAAM,CAAC;QAE3C,IAAI,MAAM,GAAG,GAAG,EAAE,CAAC;YACjB,OAAO,yCAAyC,eAAe,WAAW,UAAU,iBAAiB;gBACnG,oBAAoB,iBAAiB,eAAe,eAAe,iDAAiD,CAAC;QACzH,CAAC;aAAM,IAAI,MAAM,GAAG,CAAC,EAAE,CAAC;YACtB,OAAO,gCAAgC,eAAe,oCAAoC;gBACxF,sBAAsB,iBAAiB,WAAW,eAAe,wDAAwD,CAAC;QAC9H,CAAC;aAAM,CAAC;YACN,OAAO,4CAA4C,eAAe,WAAW,UAAU,YAAY;gBACjG,qBAAqB,iBAAiB,+CAA+C,CAAC;QAC1F,CAAC;IACH,CAAC;IAED;;OAEG;IACK,oBAAoB,CAC1B,cAA2B,EAC3B,gBAA+B,EAC/B,OAA4B,EAC5B,WAA6B,EAC7B,YAA0B;QAE1B,MAAM,iBAAiB,GAAG,IAAI,CAAC,YAAY,CAAC,cAAc,CAAC,CAAC;QAC5D,MAAM,YAAY,GAAwB,EAAE,CAAC;QAE7C,KAAK,MAAM,MAAM,IAAI,gBAAgB,EAAE,CAAC;YACtC,MAAM,SAAS,GAAG,IAAI,CAAC,YAAY,CAAC,MAAM,CAAC,CAAC;YAE5C,2BAA2B;YAC3B,IAAI,SAAS,KAAK,iBAAiB,EAAE,CAAC;gBACpC,SAAS;YACX,CAAC;YAED,MAAM,MAAM,GAAG,OAAO,CAAC,GAAG,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC;YAC3C,MAAM,UAAU,GAAG,IAAI,CAAC,mBAAmB,CACzC,EAAE,cAAc,EAAE,CAAC,EAAE,oBAAoB,EAAE,EAAE,EAAE,eAAe,EAAE,EAAE,EAAE,gBAAgB,EAAE,CAAC,EAAE,kBAAkB,EAAE,CAAC,EAAE,EAChH,MAAM,EACN,WAAW,CACZ,CAAC;YAEF,mCAAmC;YACnC,IAAI,MAAc,CAAC;YACnB,IAAI,YAAY,KAAK,aAAa,EAAE,CAAC;gBACnC,MAAM,GAAG,8CAA8C,CAAC;YAC1D,CAAC;iBAAM,CAAC;gBACN,MAAM,cAAc,GAAG,OAAO,CAAC,GAAG,CAAC,iBAAiB,CAAC,IAAI,CAAC,CAAC;gBAC3D,IAAI,MAAM,GAAG,cAAc,EAAE,CAAC;oBAC5B,MAAM,GAAG,kBAAkB,MAAM,CAAC,OAAO,CAAC,CAAC,CAAC,OAAO,cAAc,CAAC,OAAO,CAAC,CAAC,CAAC,GAAG,CAAC;gBAClF,CAAC;qBAAM,CAAC;oBACN,MAAM,GAAG,+DAA+D,CAAC;gBAC3E,CAAC;YACH,CAAC;YAED,YAAY,CAAC,IAAI,CAAC;gBAChB,MAAM;gBACN,MAAM;gBACN,UAAU;gBACV,MAAM;aACP,CAAC,CAAC;QACL,CAAC;QAED,6BAA6B;QAC7B,YAAY,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,MAAM,GAAG,CAAC,CAAC,MAAM,CAAC,CAAC;QAEjD,4BAA4B;QAC5B,OAAO,YAAY,CAAC,KAAK,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC;IAClC,CAAC;IAED;;OAEG;IACK,2BAA2B,CACjC,KAAgB,EAChB,MAAmB,EACnB,WAA6B;QAE7B,MAAM,QAAQ,GAAG,IAAI,CAAC,WAAW,CAAC,KAAK,CAAC,CAAC;QACzC,MAAM,SAAS,GAAG,IAAI,CAAC,YAAY,CAAC,MAAM,CAAC,CAAC;QAE5C,MAAM,gBAAgB,GAA6B,EAAE,CAAC;QAEtD,KAAK,MAAM,GAAG,IAAI,WAAW,EAAE,CAAC;YAC9B,MAAM,WAAW,GAAG,IAAI,CAAC,WAAW,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;YAChD,MAAM,YAAY,GAAG,IAAI,CAAC,YAAY,CAAC,GAAG,CAAC,MAAM,CAAC,CAAC;YAEnD,uBAAuB;YACvB,MAAM,UAAU,GAAG,IAAI,CAAC,wBAAwB,CAAC,KAAK,EAAE,GAAG,CAAC,KAAK,CAAC,CAAC;YAEnE,2CAA2C;YAC3C,IAAI,YAAY,KAAK,SAAS,IAAI,UAAU,GAAG,GAAG,EAAE,CAAC;gBACnD,gBAAgB,CAAC,IAAI,CAAC;oBACpB,YAAY,EAAE,GAAG,CAAC,MAAM;oBACxB,QAAQ,EAAE,GAAG,CAAC,QAAQ;oBACtB,MAAM,EAAE,GAAG,CAAC,MAAM;oBAClB,UAAU;oBACV,SAAS,EAAE,GAAG,CAAC,SAAS;iBACzB,CAAC,CAAC;YACL,CAAC;QACH,CAAC;QAED,iCAAiC;QACjC,gBAAgB,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE;YAC7B,MAAM,cAAc,GAAG,CAAC,CAAC,UAAU,GAAG,CAAC,CAAC,UAAU,CAAC;YACnD,IAAI,IAAI,CAAC,GAAG,CAAC,cAAc,CAAC,GAAG,GAAG,EAAE,CAAC;gBACnC,OAAO,cAAc,CAAC;YACxB,CAAC;YACD,OAAO,CAAC,CAAC,SAAS,CAAC,OAAO,EAAE,GAAG,CAAC,CAAC,SAAS,CAAC,OAAO,EAAE,CAAC;QACvD,CAAC,CAAC,CAAC;QAEH,eAAe;QACf,OAAO,gBAAgB,CAAC,KAAK,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC;IACtC,CAAC;IAED;;OAEG;IACK,wBAAwB,CAAC,MAAiB,EAAE,MAAiB;QACnE,IAAI,UAAU,GAAG,CAAC,CAAC;QACnB,IAAI,OAAO,GAAG,CAAC,CAAC;QAEhB,6BAA6B;QAC7B,UAAU,IAAI,CAAC,GAAG,IAAI,CAAC,GAAG,CAAC,MAAM,CAAC,cAAc,GAAG,MAAM,CAAC,cAAc,CAAC,CAAC;QAC1E,OAAO,EAAE,CAAC;QAEV,iCAAiC;QACjC,UAAU,IAAI,CAAC,GAAG,IAAI,CAAC,GAAG,CAAC,MAAM,CAAC,kBAAkB,GAAG,MAAM,CAAC,kBAAkB,CAAC,CAAC;QAClF,OAAO,EAAE,CAAC;QAEV,+BAA+B;QAC/B,MAAM,WAAW,GAAG,IAAI,CAAC,GAAG,CAAC,MAAM,CAAC,gBAAgB,GAAG,MAAM,CAAC,gBAAgB,CAAC,CAAC;QAChF,UAAU,IAAI,IAAI,CAAC,GAAG,CAAC,CAAC,EAAE,CAAC,GAAG,WAAW,GAAG,CAAC,CAAC,CAAC;QAC/C,OAAO,EAAE,CAAC;QAEV,gCAAgC;QAChC,MAAM,aAAa,GAAG,IAAI,GAAG,CAAC,MAAM,CAAC,oBAAoB,CAAC,CAAC;QAC3D,MAAM,aAAa,GAAG,IAAI,GAAG,CAAC,MAAM,CAAC,oBAAoB,CAAC,CAAC;QAC3D,MAAM,YAAY,GAAG,IAAI,GAAG,CAAC,CAAC,GAAG,aAAa,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,aAAa,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;QACnF,MAAM,KAAK,GAAG,IAAI,GAAG,CAAC,CAAC,GAAG,aAAa,EAAE,GAAG,aAAa,CAAC,CAAC,CAAC;QAE5D,IAAI,KAAK,CAAC,IAAI,GAAG,CAAC,EAAE,CAAC;YACnB,UAAU,IAAI,YAAY,CAAC,IAAI,GAAG,KAAK,CAAC,IAAI,CAAC;YAC7C,OAAO,EAAE,CAAC;QACZ,CAAC;QAED,OAAO,OAAO,GAAG,CAAC,CAAC,CAAC,CAAC,UAAU,GAAG,OAAO,CAAC,CAAC,CAAC,CAAC,CAAC;IAChD,CAAC;IAED;;OAEG;IACK,sBAAsB,CAC5B,KAAgB,EAChB,MAAmB,EACnB,MAAc,EACd,eAAuB,EACvB,UAAkB,EAClB,cAAuB;QAEvB,MAAM,OAAO,GAAqB,EAAE,CAAC;QAErC,iBAAiB;QACjB,OAAO,CAAC,IAAI,CAAC;YACX,IAAI,EAAE,SAAS;YACf,KAAK,EAAE,MAAM,CAAC,OAAO,CAAC,CAAC,CAAC;YACxB,MAAM,EAAE,cAAc,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,GAAG;YAClC,WAAW,EAAE,uDAAuD;SACrE,CAAC,CAAC;QAEH,0BAA0B;QAC1B,OAAO,CAAC,IAAI,CAAC;YACX,IAAI,EAAE,kBAAkB;YACxB,KAAK,EAAE,GAAG,CAAC,eAAe,GAAG,GAAG,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,GAAG;YAC/C,MAAM,EAAE,cAAc,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,GAAG;YAClC,WAAW,EAAE,wDAAwD;SACtE,CAAC,CAAC;QAEH,oBAAoB;QACpB,OAAO,CAAC,IAAI,CAAC;YACX,IAAI,EAAE,YAAY;YAClB,KAAK,EAAE,GAAG,CAAC,UAAU,GAAG,GAAG,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,GAAG;YAC1C,MAAM,EAAE,GAAG;YACX,WAAW,EAAE,yDAAyD;SACvE,CAAC,CAAC;QAEH,yBAAyB;QACzB,OAAO,CAAC,IAAI,CAAC;YACX,IAAI,EAAE,iBAAiB;YACvB,KAAK,EAAE,KAAK,CAAC,cAAc,CAAC,OAAO,CAAC,CAAC,CAAC;YACtC,MAAM,EAAE,GAAG;YACX,WAAW,EAAE,kDAAkD;SAChE,CAAC,CAAC;QAEH,+BAA+B;QAC/B,OAAO,CAAC,IAAI,CAAC;YACX,IAAI,EAAE,qBAAqB;YAC3B,KAAK,EAAE,GAAG,CAAC,KAAK,CAAC,kBAAkB,GAAG,GAAG,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,GAAG;YACxD,MAAM,EAAE,GAAG;YACX,WAAW,EAAE,wCAAwC;SACtD,CAAC,CAAC;QAEH,kBAAkB;QAClB,OAAO,CAAC,IAAI,CAAC;YACX,IAAI,EAAE,UAAU;YAChB,KAAK,EAAE,MAAM,CAAC,QAAQ;YACtB,MAAM,EAAE,GAAG;YACX,WAAW,EAAE,6BAA6B;SAC3C,CAAC,CAAC;QAEH,yBAAyB;QACzB,OAAO,CAAC,IAAI,CAAC;YACX,IAAI,EAAE,iBAAiB;YACvB,KAAK,EAAE,GAAG,CAAC,MAAM,CAAC,eAAe,GAAG,GAAG,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,GAAG;YACtD,MAAM,EAAE,GAAG;YACX,WAAW,EAAE,8BAA8B;SAC5C,CAAC,CAAC;QAEH,iBAAiB;QACjB,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,MAAM,GAAG,CAAC,CAAC,MAAM,CAAC,CAAC;QAE5C,OAAO,OAAO,CAAC;IACjB,CAAC;IAED;;OAEG;IACH,gBAAgB,CACd,WAA8B,EAC9B,KAAgB,EAChB,gBAA+B,EAC/B,eAAuB;QAEvB,OAAO;YACL,WAAW;YACX,KAAK;YACL,gBAAgB;YAChB,eAAe;SAChB,CAAC;IACJ,CAAC;IAED;;OAEG;IACH,qBAAqB,CAAC,WAA8B;QAClD,MAAM,OAAO,GAAG,IAAI,CAAC,eAAe,CAAC,WAAW,CAAC,CAAC;QAClD,MAAM,OAAO,GAAG,IAAI,CAAC,eAAe,CAAC,WAAW,CAAC,CAAC;QAClD,MAAM,OAAO,GAAG,IAAI,CAAC,eAAe,CAAC,WAAW,CAAC,CAAC;QAClD,MAAM,eAAe,GAAG,IAAI,CAAC,uBAAuB,CAAC,WAAW,CAAC,CAAC;QAElE,OAAO;YACL,OAAO;YACP,OAAO;YACP,OAAO;YACP,eAAe;SAChB,CAAC;IACJ,CAAC;IAED;;OAEG;IACK,eAAe,CAAC,WAA8B;QACpD,MAAM,MAAM,GAAG,WAAW,CAAC,cAAc,CAAC,QAAQ,CAAC;QACnD,MAAM,IAAI,GAAG,WAAW,CAAC,YAAY,CAAC;QACtC,MAAM,UAAU,GAAG,CAAC,WAAW,CAAC,UAAU,GAAG,GAAG,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC;QAE7D,IAAI,IAAI,KAAK,aAAa,EAAE,CAAC;YAC3B,OAAO,aAAa,MAAM,mBAAmB,UAAU,yCAAyC,CAAC;QACnG,CAAC;aAAM,CAAC;YACN,OAAO,aAAa,MAAM,mBAAmB,UAAU,oCAAoC,WAAW,CAAC,MAAM,CAAC,OAAO,CAAC,CAAC,CAAC,GAAG,CAAC;QAC9H,CAAC;IACH,CAAC;IAED;;OAEG;IACK,eAAe,CAAC,WAA8B;QACpD,MAAM,OAAO,GAAa,EAAE,CAAC;QAE7B,iBAAiB;QACjB,OAAO,CAAC,IAAI,CAAC,WAAW,CAAC,SAAS,CAAC,CAAC;QAEpC,2BAA2B;QAC3B,IAAI,WAAW,CAAC,uBAAuB,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC;YACnD,MAAM,GAAG,GAAG,WAAW,CAAC,uBAAuB,CAAC,CAAC,CAAC,CAAC;YACnD,MAAM,WAAW,GAAG,WAAW,CAAC,uBAAuB,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,MAAM;gBACtF,WAAW,CAAC,uBAAuB,CAAC,MAAM,CAAC;YAE7C,OAAO,CAAC,IAAI,CACV,6BAA6B,WAAW,CAAC,uBAAuB,CAAC,MAAM,4BAA4B;gBACnG,UAAU,CAAC,WAAW,GAAG,GAAG,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,uDAAuD;gBAC/F,IAAI,GAAG,CAAC,QAAQ,2BAA2B,GAAG,CAAC,MAAM,CAAC,OAAO,CAAC,CAAC,CAAC,GAAG,CACpE,CAAC;QACJ,CAAC;aAAM,CAAC;YACN,OAAO,CAAC,IAAI,CACV,yFAAyF;gBACzF,gCAAgC,CACjC,CAAC;QACJ,CAAC;QAED,uBAAuB;QACvB,MAAM,UAAU,GAAG,WAAW,CAAC,eAAe,CAAC,KAAK,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC;QAC3D,IAAI,UAAU,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC;YAC1B,MAAM,UAAU,GAAG,UAAU,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,IAAI,KAAK,CAAC,CAAC,KAAK,GAAG,CAAC,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;YAC5E,OAAO,CAAC,IAAI,CAAC,yBAAyB,UAAU,GAAG,CAAC,CAAC;QACvD,CAAC;QAED,eAAe;QACf,IAAI,WAAW,CAAC,YAAY,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC;YACxC,MAAM,GAAG,GAAG,WAAW,CAAC,YAAY,CAAC,CAAC,CAAC,CAAC;YACxC,OAAO,CAAC,IAAI,CACV,kCAAkC,GAAG,CAAC,MAAM,CAAC,QAAQ,kBAAkB,GAAG,CAAC,MAAM,CAAC,OAAO,CAAC,CAAC,CAAC,IAAI;gBAChG,+BAA+B,GAAG,CAAC,MAAM,GAAG,CAC7C,CAAC;QACJ,CAAC;QAED,OAAO,OAAO,CAAC;IACjB,CAAC;IAED;;OAEG;IACK,eAAe,CAAC,WAA8B;QACpD,MAAM,OAAO,GAA2B,EAAE,CAAC;QAE3C,OAAO,CAAC,eAAe,CAAC,GAAG,WAAW,CAAC,YAAY,KAAK,aAAa,CAAC,CAAC,CAAC,aAAa,CAAC,CAAC,CAAC,cAAc,CAAC;QACvG,OAAO,CAAC,SAAS,CAAC,GAAG,WAAW,CAAC,MAAM,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC;QACnD,OAAO,CAAC,YAAY,CAAC,GAAG,GAAG,CAAC,WAAW,CAAC,UAAU,GAAG,GAAG,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,GAAG,CAAC;QACxE,OAAO,CAAC,UAAU,CAAC,GAAG,WAAW,CAAC,cAAc,CAAC,QAAQ,CAAC;QAC1D,OAAO,CAAC,qBAAqB,CAAC,GAAG,WAAW,CAAC,uBAAuB,CAAC,MAAM,CAAC,QAAQ,EAAE,CAAC;QAEvF,IAAI,WAAW,CAAC,uBAAuB,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC;YACnD,MAAM,YAAY,GAAG,WAAW,CAAC,uBAAuB,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,MAAM,CAAC;YAC1F,OAAO,CAAC,cAAc,CAAC,GAAG,GAAG,CAAC,CAAC,YAAY,GAAG,WAAW,CAAC,uBAAuB,CAAC,MAAM,CAAC,GAAG,GAAG,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,GAAG,CAAC;QACjH,CAAC;QAED,OAAO,OAAO,CAAC;IACjB,CAAC;IAED;;OAEG;IACK,uBAAuB,CAAC,WAA8B;QAC5D,MAAM,eAAe,GAAa,EAAE,CAAC;QAErC,gCAAgC;QAChC,IAAI,WAAW,CAAC,UAAU,GAAG,GAAG,EAAE,CAAC;YACjC,eAAe,CAAC,IAAI,CAClB,sFAAsF,CACvF,CAAC;QACJ,CAAC;QAED,oCAAoC;QACpC,IAAI,WAAW,CAAC,uBAAuB,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC;YACnD,eAAe,CAAC,IAAI,CAClB,sFAAsF,CACvF,CAAC;QACJ,CAAC;QAED,6BAA6B;QAC7B,IAAI,WAAW,CAAC,YAAY,KAAK,aAAa,EAAE,CAAC;YAC/C,eAAe,CAAC,IAAI,CAClB,8FAA8F,CAC/F,CAAC;QACJ,CAAC;QAED,iCAAiC;QACjC,IAAI,WAAW,CAAC,UAAU,GAAG,GAAG,EAAE,CAAC;YACjC,eAAe,CAAC,IAAI,CAClB,sEAAsE,CACvE,CAAC;QACJ,CAAC;QAED,yBAAyB;QACzB,IAAI,WAAW,CAAC,YAAY,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC;YACxC,MAAM,GAAG,GAAG,WAAW,CAAC,YAAY,CAAC,CAAC,CAAC,CAAC;YACxC,IAAI,IAAI,CAAC,GAAG,CAAC,GAAG,CAAC,MAAM,GAAG,WAAW,CAAC,MAAM,CAAC,GAAG,GAAG,EAAE,CAAC;gBACpD,eAAe,CAAC,IAAI,CAClB,yBAAyB,GAAG,CAAC,MAAM,CAAC,QAAQ,8DAA8D,CAC3G,CAAC;YACJ,CAAC;QACH,CAAC;QAED,OAAO,eAAe,CAAC;IACzB,CAAC;IAED;;OAEG;IACK,WAAW,CAAC,KAAgB;QAClC,MAAM,QAAQ,GAAG;YACf,KAAK,CAAC,cAAc;YACpB,KAAK,CAAC,oBAAoB,CAAC,MAAM,GAAG,EAAE;YACtC,KAAK,CAAC,gBAAgB,GAAG,CAAC;YAC1B,KAAK,CAAC,kBAAkB;YACxB,KAAK,CAAC,cAAc,CAAC,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,KAAK,CAAC,cAAc,GAAG,MAAM,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;SACtE,CAAC;QACF,OAAO,QAAQ,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,GAAG,EAAE,CAAC,GAAG,EAAE,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;IAC9D,CAAC;IAED;;OAEG;IACK,YAAY,CAAC,MAAmB;QACtC,OAAO,GAAG,MAAM,CAAC,QAAQ,IAAI,MAAM,CAAC,eAAe,CAAC,OAAO,CAAC,CAAC,CAAC,IAAI,MAAM,CAAC,WAAW,EAAE,CAAC;IACzF,CAAC;CACF;AA9kBD,kDA8kBC"}