tracelattice 1.2.5
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +24 -0
- package/README.md +112 -0
- package/dist/ServerConfig.d.ts +229 -0
- package/dist/ServerConfig.d.ts.map +1 -0
- package/dist/ServerConfig.js +121 -0
- package/dist/ServerConfig.js.map +1 -0
- package/dist/__tests__/base-registry.test.d.ts +2 -0
- package/dist/__tests__/base-registry.test.d.ts.map +1 -0
- package/dist/__tests__/base-transport-cov.test.d.ts +2 -0
- package/dist/__tests__/base-transport-cov.test.d.ts.map +1 -0
- package/dist/__tests__/base-transport.test.d.ts +2 -0
- package/dist/__tests__/base-transport.test.d.ts.map +1 -0
- package/dist/__tests__/config-loader.test.d.ts +2 -0
- package/dist/__tests__/config-loader.test.d.ts.map +1 -0
- package/dist/__tests__/connection-pool-cov.test.d.ts +2 -0
- package/dist/__tests__/connection-pool-cov.test.d.ts.map +1 -0
- package/dist/__tests__/connection-pool.test.d.ts +2 -0
- package/dist/__tests__/connection-pool.test.d.ts.map +1 -0
- package/dist/__tests__/container.test.d.ts +2 -0
- package/dist/__tests__/container.test.d.ts.map +1 -0
- package/dist/__tests__/crud.test.d.ts +2 -0
- package/dist/__tests__/crud.test.d.ts.map +1 -0
- package/dist/__tests__/discovery-cache.test.d.ts +2 -0
- package/dist/__tests__/discovery-cache.test.d.ts.map +1 -0
- package/dist/__tests__/errors.test.d.ts +2 -0
- package/dist/__tests__/errors.test.d.ts.map +1 -0
- package/dist/__tests__/factories.test.d.ts +2 -0
- package/dist/__tests__/factories.test.d.ts.map +1 -0
- package/dist/__tests__/health-checker-cov.test.d.ts +2 -0
- package/dist/__tests__/health-checker-cov.test.d.ts.map +1 -0
- package/dist/__tests__/health-checker.test.d.ts +2 -0
- package/dist/__tests__/health-checker.test.d.ts.map +1 -0
- package/dist/__tests__/helpers/factories.d.ts +36 -0
- package/dist/__tests__/helpers/factories.d.ts.map +1 -0
- package/dist/__tests__/helpers/index.d.ts +3 -0
- package/dist/__tests__/helpers/index.d.ts.map +1 -0
- package/dist/__tests__/helpers/timers.d.ts +4 -0
- package/dist/__tests__/helpers/timers.d.ts.map +1 -0
- package/dist/__tests__/history-manager.test.d.ts +2 -0
- package/dist/__tests__/history-manager.test.d.ts.map +1 -0
- package/dist/__tests__/http-helpers-cov.test.d.ts +2 -0
- package/dist/__tests__/http-helpers-cov.test.d.ts.map +1 -0
- package/dist/__tests__/http-transport-cov.test.d.ts +2 -0
- package/dist/__tests__/http-transport-cov.test.d.ts.map +1 -0
- package/dist/__tests__/http-transport.test.d.ts +2 -0
- package/dist/__tests__/http-transport.test.d.ts.map +1 -0
- package/dist/__tests__/input-normalizer.test.d.ts +8 -0
- package/dist/__tests__/input-normalizer.test.d.ts.map +1 -0
- package/dist/__tests__/integration.test.d.ts +2 -0
- package/dist/__tests__/integration.test.d.ts.map +1 -0
- package/dist/__tests__/lib-server.test.d.ts +2 -0
- package/dist/__tests__/lib-server.test.d.ts.map +1 -0
- package/dist/__tests__/memory-persistence.test.d.ts +2 -0
- package/dist/__tests__/memory-persistence.test.d.ts.map +1 -0
- package/dist/__tests__/metrics-integration.test.d.ts +2 -0
- package/dist/__tests__/metrics-integration.test.d.ts.map +1 -0
- package/dist/__tests__/persistence.test.d.ts +2 -0
- package/dist/__tests__/persistence.test.d.ts.map +1 -0
- package/dist/__tests__/reasoning-integration.test.d.ts +11 -0
- package/dist/__tests__/reasoning-integration.test.d.ts.map +1 -0
- package/dist/__tests__/reasoning-types.test.d.ts +2 -0
- package/dist/__tests__/reasoning-types.test.d.ts.map +1 -0
- package/dist/__tests__/request-context.test.d.ts +2 -0
- package/dist/__tests__/request-context.test.d.ts.map +1 -0
- package/dist/__tests__/sanitize.test.d.ts +2 -0
- package/dist/__tests__/sanitize.test.d.ts.map +1 -0
- package/dist/__tests__/schema.test.d.ts +2 -0
- package/dist/__tests__/schema.test.d.ts.map +1 -0
- package/dist/__tests__/sequentialthinking-tools.test.d.ts +2 -0
- package/dist/__tests__/sequentialthinking-tools.test.d.ts.map +1 -0
- package/dist/__tests__/server-config.test.d.ts +2 -0
- package/dist/__tests__/server-config.test.d.ts.map +1 -0
- package/dist/__tests__/skill-discovery.test.d.ts +2 -0
- package/dist/__tests__/skill-discovery.test.d.ts.map +1 -0
- package/dist/__tests__/skill-registry.test.d.ts +2 -0
- package/dist/__tests__/skill-registry.test.d.ts.map +1 -0
- package/dist/__tests__/skill-watcher.test.d.ts +2 -0
- package/dist/__tests__/skill-watcher.test.d.ts.map +1 -0
- package/dist/__tests__/sqlite-persistence.test.d.ts +2 -0
- package/dist/__tests__/sqlite-persistence.test.d.ts.map +1 -0
- package/dist/__tests__/sse-transport-cov.test.d.ts +2 -0
- package/dist/__tests__/sse-transport-cov.test.d.ts.map +1 -0
- package/dist/__tests__/sse-transport.test.d.ts +2 -0
- package/dist/__tests__/sse-transport.test.d.ts.map +1 -0
- package/dist/__tests__/streamable-http-cov.test.d.ts +2 -0
- package/dist/__tests__/streamable-http-cov.test.d.ts.map +1 -0
- package/dist/__tests__/streamable-http-transport.test.d.ts +2 -0
- package/dist/__tests__/streamable-http-transport.test.d.ts.map +1 -0
- package/dist/__tests__/structured-logger.test.d.ts +2 -0
- package/dist/__tests__/structured-logger.test.d.ts.map +1 -0
- package/dist/__tests__/thought-evaluator.test.d.ts +2 -0
- package/dist/__tests__/thought-evaluator.test.d.ts.map +1 -0
- package/dist/__tests__/thought-formatter.test.d.ts +2 -0
- package/dist/__tests__/thought-formatter.test.d.ts.map +1 -0
- package/dist/__tests__/thought-processor.test.d.ts +8 -0
- package/dist/__tests__/thought-processor.test.d.ts.map +1 -0
- package/dist/__tests__/tool-registry-cov.test.d.ts +2 -0
- package/dist/__tests__/tool-registry-cov.test.d.ts.map +1 -0
- package/dist/__tests__/tool-registry.test.d.ts +2 -0
- package/dist/__tests__/tool-registry.test.d.ts.map +1 -0
- package/dist/__tests__/tool-watcher.test.d.ts +2 -0
- package/dist/__tests__/tool-watcher.test.d.ts.map +1 -0
- package/dist/__tests__/worker-manager-cov.test.d.ts +2 -0
- package/dist/__tests__/worker-manager-cov.test.d.ts.map +1 -0
- package/dist/__tests__/worker-manager.test.d.ts +2 -0
- package/dist/__tests__/worker-manager.test.d.ts.map +1 -0
- package/dist/cache/DiscoveryCache.d.ts +269 -0
- package/dist/cache/DiscoveryCache.d.ts.map +1 -0
- package/dist/cache/DiscoveryCache.js +100 -0
- package/dist/cache/DiscoveryCache.js.map +1 -0
- package/dist/cli.d.ts +3 -0
- package/dist/cli.d.ts.map +1 -0
- package/dist/cli.js +114 -0
- package/dist/cli.js.map +1 -0
- package/dist/cluster/WorkerManager.d.ts +166 -0
- package/dist/cluster/WorkerManager.d.ts.map +1 -0
- package/dist/cluster/WorkerManager.js +202 -0
- package/dist/cluster/WorkerManager.js.map +1 -0
- package/dist/cluster/worker.d.ts +11 -0
- package/dist/cluster/worker.d.ts.map +1 -0
- package/dist/cluster/worker.js +36 -0
- package/dist/cluster/worker.js.map +1 -0
- package/dist/config/ConfigLoader.d.ts +224 -0
- package/dist/config/ConfigLoader.d.ts.map +1 -0
- package/dist/config/ConfigLoader.js +85 -0
- package/dist/config/ConfigLoader.js.map +1 -0
- package/dist/context/RequestContext.d.ts +61 -0
- package/dist/context/RequestContext.d.ts.map +1 -0
- package/dist/context/RequestContext.js +17 -0
- package/dist/context/RequestContext.js.map +1 -0
- package/dist/contracts/index.d.ts +10 -0
- package/dist/contracts/index.d.ts.map +1 -0
- package/dist/contracts/index.js +1 -0
- package/dist/contracts/interfaces.d.ts +107 -0
- package/dist/contracts/interfaces.d.ts.map +1 -0
- package/dist/contracts/interfaces.js +1 -0
- package/dist/core/HistoryManager.d.ts +514 -0
- package/dist/core/HistoryManager.d.ts.map +1 -0
- package/dist/core/HistoryManager.js +331 -0
- package/dist/core/HistoryManager.js.map +1 -0
- package/dist/core/IHistoryManager.d.ts +100 -0
- package/dist/core/IHistoryManager.d.ts.map +1 -0
- package/dist/core/IHistoryManager.js +1 -0
- package/dist/core/InputNormalizer.d.ts +139 -0
- package/dist/core/InputNormalizer.d.ts.map +1 -0
- package/dist/core/InputNormalizer.js +101 -0
- package/dist/core/InputNormalizer.js.map +1 -0
- package/dist/core/ThoughtEvaluator.d.ts +127 -0
- package/dist/core/ThoughtEvaluator.d.ts.map +1 -0
- package/dist/core/ThoughtEvaluator.js +346 -0
- package/dist/core/ThoughtEvaluator.js.map +1 -0
- package/dist/core/ThoughtFormatter.d.ts +133 -0
- package/dist/core/ThoughtFormatter.d.ts.map +1 -0
- package/dist/core/ThoughtFormatter.js +70 -0
- package/dist/core/ThoughtFormatter.js.map +1 -0
- package/dist/core/ThoughtProcessor.d.ts +218 -0
- package/dist/core/ThoughtProcessor.d.ts.map +1 -0
- package/dist/core/ThoughtProcessor.js +205 -0
- package/dist/core/ThoughtProcessor.js.map +1 -0
- package/dist/core/reasoning.d.ts +169 -0
- package/dist/core/reasoning.d.ts.map +1 -0
- package/dist/core/reasoning.js +1 -0
- package/dist/core/step.d.ts +45 -0
- package/dist/core/step.d.ts.map +1 -0
- package/dist/core/step.js +1 -0
- package/dist/core/thought.d.ts +190 -0
- package/dist/core/thought.d.ts.map +1 -0
- package/dist/core/thought.js +1 -0
- package/dist/di/Container.d.ts +226 -0
- package/dist/di/Container.d.ts.map +1 -0
- package/dist/di/Container.js +96 -0
- package/dist/di/Container.js.map +1 -0
- package/dist/di/ServiceRegistry.d.ts +32 -0
- package/dist/di/ServiceRegistry.d.ts.map +1 -0
- package/dist/di/ServiceRegistry.js +1 -0
- package/dist/errors.d.ts +482 -0
- package/dist/errors.d.ts.map +1 -0
- package/dist/errors.js +108 -0
- package/dist/errors.js.map +1 -0
- package/dist/health/HealthChecker.d.ts +73 -0
- package/dist/health/HealthChecker.d.ts.map +1 -0
- package/dist/health/HealthChecker.js +69 -0
- package/dist/health/HealthChecker.js.map +1 -0
- package/dist/index.d.ts +2 -0
- package/dist/index.d.ts.map +1 -0
- package/dist/index.js +1 -0
- package/dist/lib.d.ts +205 -0
- package/dist/lib.d.ts.map +1 -0
- package/dist/lib.js +219 -0
- package/dist/lib.js.map +1 -0
- package/dist/logger/NullLogger.d.ts +154 -0
- package/dist/logger/NullLogger.d.ts.map +1 -0
- package/dist/logger/NullLogger.js +24 -0
- package/dist/logger/NullLogger.js.map +1 -0
- package/dist/logger/StructuredLogger.d.ts +327 -0
- package/dist/logger/StructuredLogger.d.ts.map +1 -0
- package/dist/logger/StructuredLogger.js +72 -0
- package/dist/logger/StructuredLogger.js.map +1 -0
- package/dist/metrics/__tests__/metrics.test.d.ts +2 -0
- package/dist/metrics/__tests__/metrics.test.d.ts.map +1 -0
- package/dist/metrics/metrics.impl.d.ts +252 -0
- package/dist/metrics/metrics.impl.d.ts.map +1 -0
- package/dist/metrics/metrics.impl.js +197 -0
- package/dist/metrics/metrics.impl.js.map +1 -0
- package/dist/persistence/FilePersistence.d.ts +66 -0
- package/dist/persistence/FilePersistence.d.ts.map +1 -0
- package/dist/persistence/FilePersistence.js +132 -0
- package/dist/persistence/FilePersistence.js.map +1 -0
- package/dist/persistence/MemoryPersistence.d.ts +68 -0
- package/dist/persistence/MemoryPersistence.d.ts.map +1 -0
- package/dist/persistence/MemoryPersistence.js +51 -0
- package/dist/persistence/MemoryPersistence.js.map +1 -0
- package/dist/persistence/PersistenceBackend.d.ts +69 -0
- package/dist/persistence/PersistenceBackend.d.ts.map +1 -0
- package/dist/persistence/PersistenceBackend.js +1 -0
- package/dist/persistence/PersistenceFactory.d.ts +21 -0
- package/dist/persistence/PersistenceFactory.d.ts.map +1 -0
- package/dist/persistence/PersistenceFactory.js +25 -0
- package/dist/persistence/PersistenceFactory.js.map +1 -0
- package/dist/persistence/SqlitePersistence.d.ts +60 -0
- package/dist/persistence/SqlitePersistence.d.ts.map +1 -0
- package/dist/persistence/SqlitePersistence.js +136 -0
- package/dist/persistence/SqlitePersistence.js.map +1 -0
- package/dist/pool/ConnectionPool.d.ts +215 -0
- package/dist/pool/ConnectionPool.d.ts.map +1 -0
- package/dist/pool/ConnectionPool.js +187 -0
- package/dist/pool/ConnectionPool.js.map +1 -0
- package/dist/registry/BaseRegistry.d.ts +203 -0
- package/dist/registry/BaseRegistry.d.ts.map +1 -0
- package/dist/registry/BaseRegistry.js +165 -0
- package/dist/registry/BaseRegistry.js.map +1 -0
- package/dist/registry/SkillRegistry.d.ts +69 -0
- package/dist/registry/SkillRegistry.d.ts.map +1 -0
- package/dist/registry/SkillRegistry.js +88 -0
- package/dist/registry/SkillRegistry.js.map +1 -0
- package/dist/registry/ToolRegistry.d.ts +69 -0
- package/dist/registry/ToolRegistry.d.ts.map +1 -0
- package/dist/registry/ToolRegistry.js +93 -0
- package/dist/registry/ToolRegistry.js.map +1 -0
- package/dist/sanitize.d.ts +63 -0
- package/dist/sanitize.d.ts.map +1 -0
- package/dist/sanitize.js +14 -0
- package/dist/sanitize.js.map +1 -0
- package/dist/schema.d.ts +531 -0
- package/dist/schema.d.ts.map +1 -0
- package/dist/schema.js +204 -0
- package/dist/schema.js.map +1 -0
- package/dist/telemetry/Telemetry.d.ts +36 -0
- package/dist/telemetry/Telemetry.d.ts.map +1 -0
- package/dist/telemetry/Telemetry.js +68 -0
- package/dist/telemetry/Telemetry.js.map +1 -0
- package/dist/telemetry/__tests__/Telemetry.test.d.ts +2 -0
- package/dist/telemetry/__tests__/Telemetry.test.d.ts.map +1 -0
- package/dist/transport/BaseTransport.d.ts +184 -0
- package/dist/transport/BaseTransport.d.ts.map +1 -0
- package/dist/transport/BaseTransport.js +200 -0
- package/dist/transport/BaseTransport.js.map +1 -0
- package/dist/transport/HttpHelpers.d.ts +60 -0
- package/dist/transport/HttpHelpers.d.ts.map +1 -0
- package/dist/transport/HttpHelpers.js +50 -0
- package/dist/transport/HttpHelpers.js.map +1 -0
- package/dist/transport/HttpTransport.d.ts +134 -0
- package/dist/transport/HttpTransport.d.ts.map +1 -0
- package/dist/transport/HttpTransport.js +175 -0
- package/dist/transport/HttpTransport.js.map +1 -0
- package/dist/transport/SseTransport.d.ts +133 -0
- package/dist/transport/SseTransport.d.ts.map +1 -0
- package/dist/transport/SseTransport.js +318 -0
- package/dist/transport/SseTransport.js.map +1 -0
- package/dist/transport/StreamableHttpTransport.d.ts +224 -0
- package/dist/transport/StreamableHttpTransport.d.ts.map +1 -0
- package/dist/transport/StreamableHttpTransport.js +407 -0
- package/dist/transport/StreamableHttpTransport.js.map +1 -0
- package/dist/types/disposable.d.ts +22 -0
- package/dist/types/disposable.d.ts.map +1 -0
- package/dist/types/disposable.js +1 -0
- package/dist/types/server-config.d.ts +32 -0
- package/dist/types/server-config.d.ts.map +1 -0
- package/dist/types/server-config.js +1 -0
- package/dist/types/skill.d.ts +69 -0
- package/dist/types/skill.d.ts.map +1 -0
- package/dist/types/skill.js +1 -0
- package/dist/types/tool.d.ts +68 -0
- package/dist/types/tool.d.ts.map +1 -0
- package/dist/types/tool.js +1 -0
- package/dist/watchers/SkillWatcher.d.ts +132 -0
- package/dist/watchers/SkillWatcher.d.ts.map +1 -0
- package/dist/watchers/SkillWatcher.js +73 -0
- package/dist/watchers/SkillWatcher.js.map +1 -0
- package/dist/watchers/ToolWatcher.d.ts +109 -0
- package/dist/watchers/ToolWatcher.d.ts.map +1 -0
- package/dist/watchers/ToolWatcher.js +71 -0
- package/dist/watchers/ToolWatcher.js.map +1 -0
- package/package.json +95 -0
|
@@ -0,0 +1,100 @@
|
|
|
1
|
+
class DiscoveryCache {
|
|
2
|
+
_cache;
|
|
3
|
+
_maxSize;
|
|
4
|
+
_ttl;
|
|
5
|
+
_metrics;
|
|
6
|
+
_cleanupTimer = null;
|
|
7
|
+
constructor(options = {}){
|
|
8
|
+
this._cache = new Map();
|
|
9
|
+
this._maxSize = options.maxSize ?? 100;
|
|
10
|
+
this._ttl = options.ttl ?? 300000;
|
|
11
|
+
this._metrics = options.metrics;
|
|
12
|
+
if (options.cleanupInterval && options.cleanupInterval > 0) {
|
|
13
|
+
this._cleanupTimer = setInterval(()=>{
|
|
14
|
+
this._cleanupExpired();
|
|
15
|
+
}, options.cleanupInterval);
|
|
16
|
+
if (this._cleanupTimer.unref) this._cleanupTimer.unref();
|
|
17
|
+
}
|
|
18
|
+
}
|
|
19
|
+
_cleanupExpired() {
|
|
20
|
+
const now = Date.now();
|
|
21
|
+
for (const [key, entry] of this._cache.entries()){
|
|
22
|
+
const age = now - entry.timestamp;
|
|
23
|
+
if (age > this._ttl) {
|
|
24
|
+
this._cache.delete(key);
|
|
25
|
+
this._metrics?.counter('cache_eviction_total', 1, {
|
|
26
|
+
cause: 'ttl_cleanup'
|
|
27
|
+
});
|
|
28
|
+
}
|
|
29
|
+
}
|
|
30
|
+
}
|
|
31
|
+
get(key) {
|
|
32
|
+
const entry = this._cache.get(key);
|
|
33
|
+
if (!entry) {
|
|
34
|
+
this._metrics?.counter('cache_miss_total', 1, {}, 'Total discovery cache misses');
|
|
35
|
+
return null;
|
|
36
|
+
}
|
|
37
|
+
const now = Date.now();
|
|
38
|
+
const age = now - entry.timestamp;
|
|
39
|
+
if (age > this._ttl) {
|
|
40
|
+
this._cache.delete(key);
|
|
41
|
+
this._metrics?.counter('cache_eviction_total', 1, {
|
|
42
|
+
cause: 'ttl'
|
|
43
|
+
}, 'Total cache evictions');
|
|
44
|
+
this._metrics?.counter('cache_miss_total', 1, {}, 'Total discovery cache misses');
|
|
45
|
+
return null;
|
|
46
|
+
}
|
|
47
|
+
entry.accessCount++;
|
|
48
|
+
entry.timestamp = now;
|
|
49
|
+
this._cache.delete(key);
|
|
50
|
+
this._cache.set(key, entry);
|
|
51
|
+
this._metrics?.counter('cache_hit_total', 1, {}, 'Total discovery cache hits');
|
|
52
|
+
return entry.data;
|
|
53
|
+
}
|
|
54
|
+
set(key, data) {
|
|
55
|
+
if (this._cache.size >= this._maxSize && !this._cache.has(key)) {
|
|
56
|
+
const lruKey = this._cache.keys().next().value;
|
|
57
|
+
if (lruKey) {
|
|
58
|
+
this._cache.delete(lruKey);
|
|
59
|
+
this._metrics?.counter('cache_eviction_total', 1, {
|
|
60
|
+
cause: 'lru'
|
|
61
|
+
}, 'Total cache evictions');
|
|
62
|
+
}
|
|
63
|
+
}
|
|
64
|
+
this._cache.set(key, {
|
|
65
|
+
data,
|
|
66
|
+
timestamp: Date.now(),
|
|
67
|
+
accessCount: 0
|
|
68
|
+
});
|
|
69
|
+
}
|
|
70
|
+
has(key) {
|
|
71
|
+
const entry = this._cache.get(key);
|
|
72
|
+
if (!entry) return false;
|
|
73
|
+
const age = Date.now() - entry.timestamp;
|
|
74
|
+
return age <= this._ttl;
|
|
75
|
+
}
|
|
76
|
+
invalidate(key) {
|
|
77
|
+
this._cache.delete(key);
|
|
78
|
+
}
|
|
79
|
+
clear() {
|
|
80
|
+
this._cache.clear();
|
|
81
|
+
}
|
|
82
|
+
dispose() {
|
|
83
|
+
if (this._cleanupTimer) {
|
|
84
|
+
clearInterval(this._cleanupTimer);
|
|
85
|
+
this._cleanupTimer = null;
|
|
86
|
+
}
|
|
87
|
+
}
|
|
88
|
+
size() {
|
|
89
|
+
return this._cache.size;
|
|
90
|
+
}
|
|
91
|
+
getStats() {
|
|
92
|
+
return {
|
|
93
|
+
size: this._cache.size,
|
|
94
|
+
keys: Array.from(this._cache.keys())
|
|
95
|
+
};
|
|
96
|
+
}
|
|
97
|
+
}
|
|
98
|
+
export { DiscoveryCache };
|
|
99
|
+
|
|
100
|
+
//# sourceMappingURL=DiscoveryCache.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"cache/DiscoveryCache.js","sources":["../../src/cache/DiscoveryCache.ts"],"sourcesContent":["import type { IMetrics } from '../contracts/index.js';\n\n/**\n * LRU Cache for tool/skill discovery results with TTL support.\n *\n * This module provides a generic Least Recently Used (LRU) cache with time-to-live\n * (TTL) support. It's designed to cache discovery results to avoid repeated expensive\n * operations like filesystem scanning.\n *\n * @module cache\n */\n\n/**\n * A cache entry containing data and metadata for LRU tracking.\n *\n * @template T - The type of data being cached (typically Tool or Skill)\n *\n * @example\n * ```typescript\n * const entry: CacheEntry<string> = {\n * data: ['tool1', 'tool2'],\n * timestamp: 1705550000000,\n * accessCount: 5\n * };\n * ```\n */\nexport interface CacheEntry<T> {\n\t/** The cached data array. */\n\tdata: T[];\n\n\t/** Unix timestamp (milliseconds) when the entry was created/last accessed. */\n\ttimestamp: number;\n\n\t/** Number of times this entry has been accessed (for LRU tracking). */\n\taccessCount: number;\n}\n\n/**\n * Configuration options for creating a `DiscoveryCache` instance.\n *\n * @example\n * ```typescript\n * const options: DiscoveryCacheOptions = {\n * maxSize: 200,\n * ttl: 600000 // 10 minutes\n * };\n * ```\n */\nexport interface DiscoveryCacheOptions {\n\t/**\n\t * Maximum number of cache entries before LRU eviction begins.\n\t * @default 100\n\t */\n\tmaxSize?: number;\n\n\t/**\n\t * Time-to-live for cache entries in milliseconds.\n\t * Entries older than this are considered expired.\n\t * @default 300000 (5 minutes)\n\t */\n\tttl?: number;\n\n\tcleanupInterval?: number;\n\tmetrics?: IMetrics;\n}\n\n/**\n * LRU (Least Recently Used) cache with TTL support for caching discovery results.\n *\n * This cache implements an LRU eviction policy where entries that haven't been\n * accessed recently are removed first when the cache reaches maximum capacity.\n * Additionally, entries expire after a configurable TTL period.\n *\n * @remarks\n * **Eviction Policy:**\n * - When `maxSize` is reached, the least recently used entry is removed\n * - An entry's \"recent use\" is tracked by its position in the underlying Map\n * - Accessing an entry moves it to the \"most recently used\" position\n * - Setting an existing key also updates its position\n *\n * **Expiration:**\n * - Entries older than `ttl` milliseconds are automatically removed on access\n * - Expiration is checked lazily (when `get()` or `has()` is called)\n *\n * **Thread Safety:**\n * - This implementation is not thread-safe and should not be shared\n * across asynchronous operations without proper synchronization\n *\n * @template T - The type of data being cached\n *\n * @example\n * ```typescript\n * const cache = new DiscoveryCache<string>({\n * maxSize: 100,\n * ttl: 300000 // 5 minutes\n * });\n *\n * // Store discovery results\n * cache.set('.claude/skills', ['commit', 'pdf', 'test']);\n *\n * // Retrieve from cache\n * const skills = cache.get('.claude/skills');\n * if (skills) {\n * console.log('Cached skills:', skills);\n * } else {\n * console.log('Not cached or expired');\n * }\n *\n * // Check if cached and valid\n * if (cache.has('.claude/skills')) {\n * console.log('Skills are cached and fresh');\n * }\n *\n * // Manually invalidate\n * cache.invalidate('.claude/skills');\n *\n * // Clear all cache\n * cache.clear();\n *\n * // Get statistics\n * const stats = cache.getStats();\n * console.log(`Cache size: ${stats.size}, keys: ${stats.keys.join(', ')}`);\n * ```\n */\nexport class DiscoveryCache<T> {\n\t/** Internal Map storing cache entries. Insertion order tracks LRU status. */\n\tprivate _cache: Map<string, CacheEntry<T>>;\n\n\t/** Maximum number of entries before eviction begins. */\n\tprivate _maxSize: number;\n\n\t/** Time-to-live in milliseconds for cache entries. */\n\tprivate _ttl: number;\n\tprivate _metrics?: IMetrics;\n\tprivate _cleanupTimer: NodeJS.Timeout | null = null;\n\n\t/**\n\t * Creates a new DiscoveryCache instance.\n\t *\n\t * @param options - Configuration options for the cache\n\t *\n\t * @example\n\t * ```typescript\n\t * // Default configuration (100 entries, 5 minute TTL)\n\t * const cache1 = new DiscoveryCache();\n\t *\n\t * // Custom configuration\n\t * const cache2 = new DiscoveryCache({\n\t * maxSize: 200,\n\t * ttl: 600000 // 10 minutes\n\t * });\n\t * ```\n\t */\n\tconstructor(options: DiscoveryCacheOptions = {}) {\n\t\tthis._cache = new Map();\n\t\tthis._maxSize = options.maxSize ?? 100;\n\t\tthis._ttl = options.ttl ?? 300000; // 5 minutes default\n\t\tthis._metrics = options.metrics;\n\n\t\tif (options.cleanupInterval && options.cleanupInterval > 0) {\n\t\t\tthis._cleanupTimer = setInterval(() => {\n\t\t\t\tthis._cleanupExpired();\n\t\t\t}, options.cleanupInterval);\n\t\t\tif (this._cleanupTimer.unref) this._cleanupTimer.unref();\n\t\t}\n\t}\n\n\tprivate _cleanupExpired(): void {\n\t\tconst now = Date.now();\n\t\tfor (const [key, entry] of this._cache.entries()) {\n\t\t\tconst age = now - entry.timestamp;\n\t\t\tif (age > this._ttl) {\n\t\t\t\tthis._cache.delete(key);\n\t\t\t\tthis._metrics?.counter('cache_eviction_total', 1, { cause: 'ttl_cleanup' });\n\t\t\t}\n\t\t}\n\t}\n\n\t/**\n\t * Retrieves a value from the cache by key.\n\t *\n\t * Returns the cached data if the key exists and the entry hasn't expired.\n\t * Updates the entry's access time and moves it to the most-recently-used position.\n\t * Returns null if the key doesn't exist or the entry has expired (expired entries\n\t * are automatically deleted).\n\t *\n\t * @param key - The cache key to retrieve\n\t * @returns The cached data array, or null if not found or expired\n\t *\n\t * @example\n\t * ```typescript\n\t * const tools = cache.get('/path/to/tools');\n\t * if (tools) {\n\t * console.log('Found cached tools:', tools);\n\t * } else {\n\t * console.log('Tools not cached or expired, need to discover');\n\t * }\n\t * ```\n\t */\n\tget(key: string): T[] | null {\n\t\tconst entry = this._cache.get(key);\n\t\tif (!entry) {\n\t\t\tthis._metrics?.counter('cache_miss_total', 1, {}, 'Total discovery cache misses');\n\t\t\treturn null;\n\t\t}\n\n\t\tconst now = Date.now();\n\t\tconst age = now - entry.timestamp;\n\n\t\t// Check TTL\n\t\tif (age > this._ttl) {\n\t\t\tthis._cache.delete(key);\n\t\t\tthis._metrics?.counter('cache_eviction_total', 1, { cause: 'ttl' }, 'Total cache evictions');\n\t\t\tthis._metrics?.counter('cache_miss_total', 1, {}, 'Total discovery cache misses');\n\t\t\treturn null;\n\t\t}\n\n\t\t// Update access metadata for LRU\n\t\tentry.accessCount++;\n\t\tentry.timestamp = now;\n\n\t\t// Move to end (most recently used)\n\t\tthis._cache.delete(key);\n\t\tthis._cache.set(key, entry);\n\t\tthis._metrics?.counter('cache_hit_total', 1, {}, 'Total discovery cache hits');\n\n\t\treturn entry.data;\n\t}\n\n\t/**\n\t * Stores a value in the cache with the given key.\n\t *\n\t * If the cache is at maximum capacity and this is a new key, the least\n\t * recently used entry is evicted before storing the new value. If the key\n\t * already exists, its value and access time are updated.\n\t *\n\t * @param key - The cache key to store under\n\t * @param data - The data array to cache\n\t *\n\t * @example\n\t * ```typescript\n\t * const discoveredTools = ['Read', 'Write', 'Bash', 'Grep'];\n\t * cache.set('/usr/local/tools', discoveredTools);\n\t * ```\n\t */\n\tset(key: string, data: T[]): void {\n\t\t// Enforce max size with LRU eviction\n\t\tif (this._cache.size >= this._maxSize && !this._cache.has(key)) {\n\t\t\t// Remove least recently used (first entry)\n\t\t\tconst lruKey = this._cache.keys().next().value;\n\t\t\tif (lruKey) {\n\t\t\t\tthis._cache.delete(lruKey);\n\t\t\t\tthis._metrics?.counter(\n\t\t\t\t\t'cache_eviction_total',\n\t\t\t\t\t1,\n\t\t\t\t\t{ cause: 'lru' },\n\t\t\t\t\t'Total cache evictions'\n\t\t\t\t);\n\t\t\t}\n\t\t}\n\n\t\tthis._cache.set(key, {\n\t\t\tdata,\n\t\t\ttimestamp: Date.now(),\n\t\t\taccessCount: 0,\n\t\t});\n\t}\n\n\t/**\n\t * Checks if a key exists in the cache and hasn't expired.\n\t *\n\t * Returns true only if the key exists and the entry is within its TTL.\n\t * Expired entries are not automatically removed by this method (use `get()`\n\t * for automatic expiration cleanup).\n\t *\n\t * @param key - The cache key to check\n\t * @returns true if the key exists and hasn't expired, false otherwise\n\t *\n\t * @example\n\t * ```typescript\n\t * if (cache.has('/path/to/skills')) {\n\t * // Key exists and is fresh\n\t * const skills = cache.get('/path/to/skills')!;\n\t * } else {\n\t * // Need to discover skills\n\t * }\n\t * ```\n\t */\n\thas(key: string): boolean {\n\t\tconst entry = this._cache.get(key);\n\t\tif (!entry) return false;\n\n\t\tconst age = Date.now() - entry.timestamp;\n\t\treturn age <= this._ttl;\n\t}\n\n\t/**\n\t * Removes a specific entry from the cache.\n\t *\n\t * This is useful for explicit invalidation when cached data is known\n\t * to be stale due to external changes.\n\t *\n\t * @param key - The cache key to invalidate\n\t *\n\t * @example\n\t * ```typescript\n\t * // Invalidate cache when files change\n\t * watcher.on('change', (path) => {\n\t * cache.invalidate(path);\n\t * });\n\t * ```\n\t */\n\tinvalidate(key: string): void {\n\t\tthis._cache.delete(key);\n\t}\n\n\t/**\n\t * Removes all entries from the cache.\n\t *\n\t * This completely resets the cache to an empty state.\n\t *\n\t * @example\n\t * ```typescript\n\t * // Clear cache before running tests\n\t * cache.clear();\n\t * ```\n\t */\n\tclear(): void {\n\t\tthis._cache.clear();\n\t}\n\n\tdispose(): void {\n\t\tif (this._cleanupTimer) {\n\t\t\tclearInterval(this._cleanupTimer);\n\t\t\tthis._cleanupTimer = null;\n\t\t}\n\t}\n\n\t/**\n\t * Gets the current number of entries in the cache.\n\t *\n\t * @returns The number of cached entries\n\t *\n\t * @example\n\t * ```typescript\n\t * console.log(`Cache contains ${cache.size()} entries`);\n\t * if (cache.size() >= maxSize) {\n\t * console.log('Cache is at capacity');\n\t * }\n\t * ```\n\t */\n\tsize(): number {\n\t\treturn this._cache.size;\n\t}\n\n\t/**\n\t * Gets cache statistics including size and all cached keys.\n\t *\n\t * Useful for monitoring and debugging cache behavior.\n\t *\n\t * @returns An object with cache size and array of all keys\n\t *\n\t * @example\n\t * ```typescript\n\t * const stats = cache.getStats();\n\t * console.log(`Cache stats:`);\n\t * console.log(` Size: ${stats.size}`);\n\t * console.log(` Keys: ${stats.keys.join(', ')}`);\n\t * ```\n\t */\n\tgetStats(): { size: number; keys: string[] } {\n\t\treturn {\n\t\t\tsize: this._cache.size,\n\t\t\tkeys: Array.from(this._cache.keys()),\n\t\t};\n\t}\n}\n"],"names":["DiscoveryCache","options","Map","setInterval","now","Date","key","entry","age","data","lruKey","clearInterval","Array"],"mappings":"AA4HO,MAAMA;IAEJ,OAAmC;IAGnC,SAAiB;IAGjB,KAAa;IACb,SAAoB;IACpB,gBAAuC,KAAK;IAmBpD,YAAYC,UAAiC,CAAC,CAAC,CAAE;QAChD,IAAI,CAAC,MAAM,GAAG,IAAIC;QAClB,IAAI,CAAC,QAAQ,GAAGD,QAAQ,OAAO,IAAI;QACnC,IAAI,CAAC,IAAI,GAAGA,QAAQ,GAAG,IAAI;QAC3B,IAAI,CAAC,QAAQ,GAAGA,QAAQ,OAAO;QAE/B,IAAIA,QAAQ,eAAe,IAAIA,QAAQ,eAAe,GAAG,GAAG;YAC3D,IAAI,CAAC,aAAa,GAAGE,YAAY;gBAChC,IAAI,CAAC,eAAe;YACrB,GAAGF,QAAQ,eAAe;YAC1B,IAAI,IAAI,CAAC,aAAa,CAAC,KAAK,EAAE,IAAI,CAAC,aAAa,CAAC,KAAK;QACvD;IACD;IAEQ,kBAAwB;QAC/B,MAAMG,MAAMC,KAAK,GAAG;QACpB,KAAK,MAAM,CAACC,KAAKC,MAAM,IAAI,IAAI,CAAC,MAAM,CAAC,OAAO,GAAI;YACjD,MAAMC,MAAMJ,MAAMG,MAAM,SAAS;YACjC,IAAIC,MAAM,IAAI,CAAC,IAAI,EAAE;gBACpB,IAAI,CAAC,MAAM,CAAC,MAAM,CAACF;gBACnB,IAAI,CAAC,QAAQ,EAAE,QAAQ,wBAAwB,GAAG;oBAAE,OAAO;gBAAc;YAC1E;QACD;IACD;IAuBA,IAAIA,GAAW,EAAc;QAC5B,MAAMC,QAAQ,IAAI,CAAC,MAAM,CAAC,GAAG,CAACD;QAC9B,IAAI,CAACC,OAAO;YACX,IAAI,CAAC,QAAQ,EAAE,QAAQ,oBAAoB,GAAG,CAAC,GAAG;YAClD,OAAO;QACR;QAEA,MAAMH,MAAMC,KAAK,GAAG;QACpB,MAAMG,MAAMJ,MAAMG,MAAM,SAAS;QAGjC,IAAIC,MAAM,IAAI,CAAC,IAAI,EAAE;YACpB,IAAI,CAAC,MAAM,CAAC,MAAM,CAACF;YACnB,IAAI,CAAC,QAAQ,EAAE,QAAQ,wBAAwB,GAAG;gBAAE,OAAO;YAAM,GAAG;YACpE,IAAI,CAAC,QAAQ,EAAE,QAAQ,oBAAoB,GAAG,CAAC,GAAG;YAClD,OAAO;QACR;QAGAC,MAAM,WAAW;QACjBA,MAAM,SAAS,GAAGH;QAGlB,IAAI,CAAC,MAAM,CAAC,MAAM,CAACE;QACnB,IAAI,CAAC,MAAM,CAAC,GAAG,CAACA,KAAKC;QACrB,IAAI,CAAC,QAAQ,EAAE,QAAQ,mBAAmB,GAAG,CAAC,GAAG;QAEjD,OAAOA,MAAM,IAAI;IAClB;IAkBA,IAAID,GAAW,EAAEG,IAAS,EAAQ;QAEjC,IAAI,IAAI,CAAC,MAAM,CAAC,IAAI,IAAI,IAAI,CAAC,QAAQ,IAAI,CAAC,IAAI,CAAC,MAAM,CAAC,GAAG,CAACH,MAAM;YAE/D,MAAMI,SAAS,IAAI,CAAC,MAAM,CAAC,IAAI,GAAG,IAAI,GAAG,KAAK;YAC9C,IAAIA,QAAQ;gBACX,IAAI,CAAC,MAAM,CAAC,MAAM,CAACA;gBACnB,IAAI,CAAC,QAAQ,EAAE,QACd,wBACA,GACA;oBAAE,OAAO;gBAAM,GACf;YAEF;QACD;QAEA,IAAI,CAAC,MAAM,CAAC,GAAG,CAACJ,KAAK;YACpBG;YACA,WAAWJ,KAAK,GAAG;YACnB,aAAa;QACd;IACD;IAsBA,IAAIC,GAAW,EAAW;QACzB,MAAMC,QAAQ,IAAI,CAAC,MAAM,CAAC,GAAG,CAACD;QAC9B,IAAI,CAACC,OAAO,OAAO;QAEnB,MAAMC,MAAMH,KAAK,GAAG,KAAKE,MAAM,SAAS;QACxC,OAAOC,OAAO,IAAI,CAAC,IAAI;IACxB;IAkBA,WAAWF,GAAW,EAAQ;QAC7B,IAAI,CAAC,MAAM,CAAC,MAAM,CAACA;IACpB;IAaA,QAAc;QACb,IAAI,CAAC,MAAM,CAAC,KAAK;IAClB;IAEA,UAAgB;QACf,IAAI,IAAI,CAAC,aAAa,EAAE;YACvBK,cAAc,IAAI,CAAC,aAAa;YAChC,IAAI,CAAC,aAAa,GAAG;QACtB;IACD;IAeA,OAAe;QACd,OAAO,IAAI,CAAC,MAAM,CAAC,IAAI;IACxB;IAiBA,WAA6C;QAC5C,OAAO;YACN,MAAM,IAAI,CAAC,MAAM,CAAC,IAAI;YACtB,MAAMC,MAAM,IAAI,CAAC,IAAI,CAAC,MAAM,CAAC,IAAI;QAClC;IACD;AACD"}
|
package/dist/cli.d.ts
ADDED
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"cli.d.ts","sourceRoot":"","sources":["../src/cli.ts"],"names":[],"mappings":""}
|
package/dist/cli.js
ADDED
|
@@ -0,0 +1,114 @@
|
|
|
1
|
+
#!/usr/bin/env bun
|
|
2
|
+
import*as e from"./lib.js";import*as t from"node:crypto";import*as s from"node:http";import*as i from"node:url";import*as n from"valibot";import{ValibotJsonSchemaAdapter as o}from"@tmcp/adapter-valibot";import{StdioTransport as r}from"@tmcp/transport-stdio";import{readFileSync as a}from"node:fs";import{dirname as l,join as c}from"node:path";import{McpServer as h}from"tmcp";import{AsyncLocalStorage as p}from"node:async_hooks";var d={472(e,t,s){s.d(t,{createConnectionPool:()=>h});class i extends Error{code;constructor(e,t){super(e),this.code=t,this.name="SequentialThinkingError",Error.captureStackTrace(this,this.constructor)}}class n extends i{constructor(e){super(`Session '${e}' is not active`,"SESSION_NOT_ACTIVE"),this.name="SessionNotActiveError"}}class o extends i{constructor(e){super(`Session not found: ${e}`,"SESSION_NOT_FOUND"),this.name="SessionNotFoundError"}}class r extends i{constructor(e){super(`Max sessions (${e}) reached. Wait for a session to close or increase maxSessions.`,"MAX_SESSIONS_REACHED"),this.name="MaxSessionsReachedError"}}class a extends i{constructor(){super("ConnectionPool has been terminated","POOL_TERMINATED"),this.name="PoolTerminatedError"}}class l{_server;_id;_createdAt;_lastActivityAt;_isActiveValue;_timeout;_cleanupTimer=null;_logger;constructor(e,t,s,i){this._server=t,this._id=e,this._createdAt=Date.now(),this._lastActivityAt=this._createdAt,this._isActiveValue=!0,this._timeout=s,this._logger=i,this._startTimeout()}get isActive(){return this._isActiveValue}async process(e){if(!this.isActive)throw new n(this._id);return this._lastActivityAt=Date.now(),this._resetTimeout(),this._server.processThought(e)}getInfo(){return{id:this._id,server:this._server,createdAt:this._createdAt,lastActivityAt:this._lastActivityAt,isActive:this.isActive}}isTimedOut(){return Date.now()-this._lastActivityAt>this._timeout}async close(){this._isActiveValue=!1,this._cleanupTimer&&(clearTimeout(this._cleanupTimer),this._cleanupTimer=null),this._server.stop()}_startTimeout(){this._cleanupTimer&&clearTimeout(this._cleanupTimer),this._cleanupTimer=setTimeout(()=>{this.isTimedOut()&&(this._logger.warn(`Session ${this._id} timed out, closing`),this.close().catch(e=>{this._logger.error(`Error closing timed out session ${this._id}:`,e)}))},this._timeout)}_resetTimeout(){this._startTimeout()}}class c{_sessions=new Map;_createSessionLock=null;_maxSessions;_sessionTimeout;_autoCleanup;_cleanupInterval;_cleanupTimerId=null;_terminated=!1;_logger;_serverFactory;constructor(e={}){this._maxSessions=e.maxSessions??100,this._sessionTimeout=e.sessionTimeout??3e5,this._autoCleanup=e.autoCleanup??!0,this._cleanupInterval=e.cleanupInterval??6e4,this._serverFactory=e.serverFactory??null,this._logger=e.logger??this._createNoopLogger(),this._autoCleanup&&this._startCleanup()}_createNoopLogger(){return{info:()=>{},warn:()=>{},error:()=>{},debug:()=>{},setLevel:()=>{},getLevel:()=>"info"}}async createSession(){let e;for(;this._createSessionLock;)await this._createSessionLock;if(this._terminated)throw new a;if(this._sessions.size>=this._maxSessions)throw new r(this._maxSessions);if(!this._serverFactory)throw Error("ConnectionPool requires a serverFactory option to create sessions");this._createSessionLock=new Promise(t=>{e=t});try{let e=`session_${Date.now()}_${Math.random().toString(36).substring(2,11)}`,t=await this._serverFactory(),s=new l(e,t,this._sessionTimeout,this._logger);return this._sessions.set(e,s),this._logger.info(`Created session ${e} (${this._sessions.size}/${this._maxSessions} active sessions)`),e}finally{e(),this._createSessionLock=null}}async process(e,t){let s=this._sessions.get(e);if(!s)throw new o(e);return s.process(t)}async closeSession(e){let t=this._sessions.get(e);if(!t)throw new o(e);await t.close(),this._sessions.delete(e),this._logger.info(`Closed session ${e} (${this._sessions.size}/${this._maxSessions} active sessions)`)}getSessionInfo(e){var t;return null==(t=this._sessions.get(e))?void 0:t.getInfo()}getActiveSessions(){return Array.from(this._sessions.values()).filter(e=>e.isActive).map(e=>e.getInfo())}getStats(){let e=this.getActiveSessions();return{totalSessions:this._sessions.size,activeSessions:e.length,maxSessions:this._maxSessions,cleanupEnabled:this._autoCleanup,sessionTimeout:this._sessionTimeout}}_startCleanup(){null!==this._cleanupTimerId&&clearInterval(this._cleanupTimerId),this._cleanupTimerId=setInterval(()=>{this._cleanupTimedOutSessions()},this._cleanupInterval)}_cleanupTimedOutSessions(){let e=0;for(let[t,s]of this._sessions.entries())s.isTimedOut()&&(s.close().catch(e=>{this._logger.error(`Error closing timed out session ${t}:`,e)}),this._sessions.delete(t),e++);e>0&&this._logger.info(`Cleaned ${e} timed-out sessions (${this._sessions.size}/${this._maxSessions} active sessions)`)}async terminate(){if(this._terminated)return;this._terminated=!0,null!==this._cleanupTimerId&&(clearInterval(this._cleanupTimerId),this._cleanupTimerId=null);let e=Array.from(this._sessions.values()).map(e=>e.close().catch(t=>{this._logger.error(`Error closing session ${e.getInfo().id}:`,t)}));await Promise.all(e),this._sessions.clear(),this._logger.info("ConnectionPool terminated")}async dispose(){await this.terminate()}isRunning(){return!this._terminated}}function h(e){return new c(e)}},555(e,t,s){s.d(t,{Uv:()=>d,ZK:()=>h,iV:()=>p});var i=s(821);let n=`A detailed tool for dynamic and reflective problem-solving through thoughts.
|
|
3
|
+
This tool helps analyze problems through a flexible thinking process that can adapt and evolve.
|
|
4
|
+
Each thought can build on, question, or revise previous insights as understanding deepens.
|
|
5
|
+
|
|
6
|
+
IMPORTANT: This server facilitates sequential thinking with MCP tool coordination and skill recommendations. The LLM analyzes available tools and skills to make intelligent recommendations, which are then tracked and organized by this server.
|
|
7
|
+
|
|
8
|
+
When to use this tool:
|
|
9
|
+
- Breaking down complex problems into steps
|
|
10
|
+
- Planning and design with room for revision
|
|
11
|
+
- Analysis that might need course correction
|
|
12
|
+
- Problems where the full scope might not be clear initially
|
|
13
|
+
- Problems that require a multi-step solution
|
|
14
|
+
- Tasks that need to maintain context over multiple steps
|
|
15
|
+
- Situations where irrelevant information needs to be filtered out
|
|
16
|
+
- When you need guidance on which tools to use and in what order
|
|
17
|
+
- When you need guidance on which skills to invoke for specific workflows
|
|
18
|
+
|
|
19
|
+
Key features:
|
|
20
|
+
- You can adjust total_thoughts up or down as you progress
|
|
21
|
+
- You can question or revise previous thoughts
|
|
22
|
+
- You can add more thoughts even after reaching what seemed like the end
|
|
23
|
+
- You can express uncertainty and explore alternative approaches
|
|
24
|
+
- Not every thought needs to build linearly - you can branch or backtrack
|
|
25
|
+
- Generates a solution hypothesis
|
|
26
|
+
- Verifies the hypothesis based on the Chain of Thought steps
|
|
27
|
+
- Recommends appropriate tools for each step
|
|
28
|
+
- Recommends appropriate skills alongside tools
|
|
29
|
+
- Provides rationale for tool recommendations
|
|
30
|
+
- Suggests tool execution order and parameters
|
|
31
|
+
- Tracks previous recommendations and remaining steps
|
|
32
|
+
|
|
33
|
+
Parameters explained:
|
|
34
|
+
- available_mcp_tools: (Optional) Array of MCP tool names that are available for use (e.g., ["mcp-omnisearch", "mcp-turso-cloud"])
|
|
35
|
+
- available_skills: (Optional) Array of skill names that are available for use (e.g., ["commit", "review-pr", "pdf"])
|
|
36
|
+
- thought: Your current thinking step, which can include:
|
|
37
|
+
* Regular analytical steps
|
|
38
|
+
* Revisions of previous thoughts
|
|
39
|
+
* Questions about previous decisions
|
|
40
|
+
* Realizations about needing more analysis
|
|
41
|
+
* Changes in approach
|
|
42
|
+
* Hypothesis generation
|
|
43
|
+
* Hypothesis verification
|
|
44
|
+
* Tool recommendations and rationale
|
|
45
|
+
- next_thought_needed: True if you need more thinking, even if at what seemed like the end
|
|
46
|
+
- thought_number: Current number in sequence (can go beyond initial total if needed)
|
|
47
|
+
- total_thoughts: Current estimate of thoughts needed (can be adjusted up/down)
|
|
48
|
+
- is_revision: A boolean indicating if this thought revises previous thinking
|
|
49
|
+
- revises_thought: If is_revision is true, which thought number is being reconsidered
|
|
50
|
+
- branch_from_thought: If branching, which thought number is the branching point
|
|
51
|
+
- branch_id: Identifier for the current branch (if any)
|
|
52
|
+
- needs_more_thoughts: If reaching end but realizing more thoughts needed
|
|
53
|
+
- current_step: Current step recommendation, including:
|
|
54
|
+
* step_description: What needs to be done
|
|
55
|
+
* recommended_tools: (CRITICAL: PLURAL - "recommended_tools" with an 's') Tools recommended for this step - MUST be an array.
|
|
56
|
+
* recommended_skills: (CRITICAL: PLURAL - "recommended_skills" with an 's') Skills recommended for this step (optional) - MUST be an array.
|
|
57
|
+
* expected_outcome: What to expect from this step
|
|
58
|
+
* next_step_conditions: Conditions to consider for the next step
|
|
59
|
+
- previous_steps: Steps already recommended (each step MUST use "recommended_tools" PLURAL)
|
|
60
|
+
- remaining_steps: High-level descriptions of upcoming steps
|
|
61
|
+
|
|
62
|
+
Reasoning Enhancement Parameters:
|
|
63
|
+
- thought_type: Thought purpose: 'regular' (default), 'hypothesis', 'verification', 'critique', 'synthesis', 'meta'
|
|
64
|
+
- quality_score: Self-assessed quality of this thought (0-1)
|
|
65
|
+
- confidence: Confidence in this thought's correctness (0-1)
|
|
66
|
+
- hypothesis_id: Links hypothesis to verification (alphanumeric, hyphens, underscores)
|
|
67
|
+
- verification_target: For 'verification'/'critique' types, the thought_number being evaluated
|
|
68
|
+
- synthesis_sources: For 'synthesis' type, the thought_numbers being combined
|
|
69
|
+
- merge_from_thoughts: Thought numbers from other branches merged (graph reasoning)
|
|
70
|
+
- merge_branch_ids: Branch IDs merged into current context
|
|
71
|
+
- meta_observation: Observation about reasoning process (with thought_type 'meta')
|
|
72
|
+
- reasoning_depth: How deep to reason: 'shallow' (quick), 'moderate' (default), 'deep' (thorough)
|
|
73
|
+
- session_id: (Optional) Unique identifier to scope thought history, branches, and statistics to an isolated session. When provided, all state is scoped to this session ID. When omitted, uses shared global state (backward compatible). Format: alphanumeric, hyphens, underscores, 1-100 chars.
|
|
74
|
+
- reset_state: (Optional) When true, clears all state for the target session (or global state if no session_id) before processing the current thought. Use this to start a fresh reasoning chain without accumulated state from previous chains.
|
|
75
|
+
|
|
76
|
+
Response Enrichment:
|
|
77
|
+
- When reasoning fields are set, response includes confidence_signals (depth, revision/branch count, type distribution, avg confidence, structural_quality, quality_components) and reasoning_stats (hypothesis tracking)
|
|
78
|
+
- confidence_signals.structural_quality: Composite 0-1 score — weighted geometric mean of type_diversity (0.3), verification_coverage (0.3), depth_efficiency (0.2), confidence_stability (0.2). All components floored at 0.01 to prevent collapse.
|
|
79
|
+
- confidence_signals.quality_components: Individual metrics — type_diversity (Shannon entropy/log₂(6)), verification_coverage (verified/total hypotheses, 1.0 if none), depth_efficiency (max(chain_depth, branch_count+1)/total, branching rewarded), confidence_stability (1 - stddev(confidence), default 0.5)
|
|
80
|
+
- reasoning_hints: (Conditional) Array of actionable hint strings from cross-thought pattern analysis. Only warning-severity patterns produce hints. Max 3 hints per response, with 3-thought cooldown per pattern per session. Present only when warnings are detected.
|
|
81
|
+
- Detected patterns (internal, not in response): consecutive_without_verification (3+ regular thoughts without verification), unverified_hypothesis (hypothesis without verification within 3 thoughts), no_alternatives_explored (5+ thoughts with no critique/branches), monotonic_type (4+ consecutive same type), confidence_drift (3+ consecutive decreasing confidence), healthy_verification (hypothesis verified within 3 thoughts — info only)
|
|
82
|
+
You should:
|
|
83
|
+
1. Start with an initial estimate of needed thoughts, but be ready to adjust
|
|
84
|
+
2. Feel free to question or revise previous thoughts
|
|
85
|
+
3. Don't hesitate to add more thoughts if needed, even at the "end"
|
|
86
|
+
4. Express uncertainty when present
|
|
87
|
+
5. Mark thoughts that revise previous thinking or branch into new paths
|
|
88
|
+
6. Ignore information that is irrelevant to the current step
|
|
89
|
+
7. Generate a solution hypothesis when appropriate
|
|
90
|
+
8. Verify the hypothesis based on the Chain of Thought steps
|
|
91
|
+
9. Consider available tools that could help with the current step
|
|
92
|
+
10. Provide clear rationale for tool recommendations
|
|
93
|
+
11. Suggest specific tool parameters when appropriate
|
|
94
|
+
12. Consider alternative tools for each step
|
|
95
|
+
13. Track progress through the recommended steps
|
|
96
|
+
14. Consider available skills that provide workflows for complex tasks
|
|
97
|
+
15. Coordinate skill invocation with tool recommendations (skills may call tools)
|
|
98
|
+
16. Provide a single, ideally correct answer as the final output
|
|
99
|
+
17. Only set next_thought_needed to false when truly done and a satisfactory answer is reached
|
|
100
|
+
18. Classify your reasoning steps using thought_type for better analytics and self-awareness
|
|
101
|
+
19. Use hypothesis → verification chains to test solutions before committing
|
|
102
|
+
20. Self-assess quality and confidence to track reasoning reliability
|
|
103
|
+
21. Use merge_from_thoughts to combine insights from multiple reasoning branches
|
|
104
|
+
22. Use session_id to isolate independent reasoning chains from each other
|
|
105
|
+
23. Use reset_state: true when starting a completely new analysis to avoid statistical contamination from previous chains`,o=i.object({tool_name:i.pipe(i.string(),i.description("Name of the tool being recommended")),confidence:i.pipe(i.number(),i.minValue(0),i.maxValue(1),i.description("0-1 indicating confidence in recommendation")),rationale:i.pipe(i.string(),i.description("Why this tool is recommended")),priority:i.optional(i.pipe(i.number(),i.description("Order in the recommendation sequence (default: 999)"))),suggested_inputs:i.optional(i.pipe(i.record(i.string(),i.unknown()),i.description("Optional suggested parameters"))),alternatives:i.optional(i.pipe(i.array(i.string()),i.description("Alternative tools that could be used")))}),r=i.object({skill_name:i.pipe(i.string(),i.description("Name of the skill being recommended")),confidence:i.optional(i.pipe(i.number(),i.minValue(0),i.maxValue(1),i.description("0-1 indicating confidence in recommendation (default: 0.5)"))),rationale:i.optional(i.pipe(i.string(),i.description("Why this skill is recommended (default: empty string)"))),priority:i.optional(i.pipe(i.number(),i.description("Order in the recommendation sequence (default: 999)"))),alternatives:i.optional(i.pipe(i.array(i.string()),i.description("Alternative skills that could be used"))),allowed_tools:i.optional(i.pipe(i.array(i.string()),i.description("Tools this skill is allowed to use (from skill frontmatter)"))),user_invocable:i.optional(i.pipe(i.boolean(),i.description("Whether this skill can be user-invoked")))}),a=i.object({step_description:i.pipe(i.string(),i.description("What needs to be done")),recommended_tools:i.pipe(i.array(o),i.description("Tools recommended for this step")),recommended_skills:i.optional(i.pipe(i.array(r),i.description("Skills recommended for this step"))),expected_outcome:i.pipe(i.string(),i.description("What to expect from this step")),next_step_conditions:i.optional(i.pipe(i.array(i.string()),i.description("Conditions to consider for the next step")))}),l=i.object({tool_name:i.pipe(i.string(),i.description("Name of the tool being recommended")),rationale:i.optional(i.pipe(i.string(),i.description("Why this tool is recommended (default: empty string)"))),confidence:i.optional(i.pipe(i.number(),i.minValue(0),i.maxValue(1),i.description("0-1 indicating confidence in recommendation (default: 0.5)"))),priority:i.optional(i.pipe(i.number(),i.description("Order in the recommendation sequence (default: 999)"))),suggested_inputs:i.optional(i.pipe(i.record(i.string(),i.unknown()),i.description("Optional suggested parameters"))),alternatives:i.optional(i.pipe(i.array(i.string()),i.description("Alternative tools that could be used")))}),c=i.object({step_description:i.pipe(i.string(),i.description("What needs to be done")),recommended_tools:i.pipe(i.array(l),i.description("Tools recommended for this step")),recommended_skills:i.optional(i.pipe(i.array(r),i.description("Skills recommended for this step"))),expected_outcome:i.optional(i.pipe(i.string(),i.description("What to expect from this step (default: empty string)"))),next_step_conditions:i.optional(i.pipe(i.array(i.string()),i.description("Conditions to consider for the next step")))}),h=i.object({available_mcp_tools:i.optional(i.pipe(i.array(i.string()),i.description('Array of MCP tool names available for use (e.g., ["mcp-omnisearch", "mcp-turso-cloud"])'))),available_skills:i.optional(i.pipe(i.array(i.string()),i.description('Array of skill names available for use (e.g., ["commit", "review-pr", "pdf"])'))),thought:i.pipe(i.string(),i.description("Your current thinking step")),next_thought_needed:i.optional(i.pipe(i.boolean(),i.description("Whether another thought step is needed (defaults to true if not provided)"))),thought_number:i.pipe(i.number(),i.minValue(1),i.description("Current thought number")),total_thoughts:i.pipe(i.number(),i.minValue(1),i.description("Estimated total thoughts needed")),is_revision:i.optional(i.pipe(i.boolean(),i.description("Whether this revises previous thinking"))),revises_thought:i.optional(i.pipe(i.number(),i.minValue(1),i.description("Which thought is being reconsidered"))),branch_from_thought:i.optional(i.pipe(i.number(),i.minValue(1),i.description("Branching point thought number"))),branch_id:i.optional(i.pipe(i.string(),i.regex(/^[a-zA-Z0-9_-]+$/,"Branch ID must contain only letters, numbers, hyphens, and underscores"),i.minLength(1),i.maxLength(50),i.description("Branch identifier (alphanumeric, hyphens, underscores only, max 50 chars)"))),needs_more_thoughts:i.optional(i.pipe(i.boolean(),i.description("If more thoughts are needed"))),current_step:i.optional(i.pipe(a,i.description("Current step recommendation"))),previous_steps:i.optional(i.pipe(i.array(c),i.description("Steps already recommended (lenient schema - allows partial data with defaults)"))),remaining_steps:i.optional(i.pipe(i.array(i.string()),i.description("High-level descriptions of upcoming steps"))),thought_type:i.optional(i.pipe(i.picklist(["regular","hypothesis","verification","critique","synthesis","meta"]),i.description("Classified purpose: regular, hypothesis, verification, critique, synthesis, meta"))),quality_score:i.optional(i.pipe(i.number(),i.minValue(0),i.maxValue(1),i.description("Self-assessed quality score (0-1)"))),confidence:i.optional(i.pipe(i.number(),i.minValue(0),i.maxValue(1),i.description("Explicit confidence in correctness (0-1)"))),hypothesis_id:i.optional(i.pipe(i.string(),i.regex(/^[a-zA-Z0-9_-]+$/,"Hypothesis ID must contain only letters, numbers, hyphens, and underscores"),i.minLength(1),i.maxLength(50),i.description("Identifier linking hypothesis to verification thoughts"))),verification_target:i.optional(i.pipe(i.number(),i.minValue(1),i.description("Thought number being verified or critiqued"))),synthesis_sources:i.optional(i.pipe(i.array(i.pipe(i.number(),i.minValue(1))),i.description("Thought numbers being synthesized"))),merge_from_thoughts:i.optional(i.pipe(i.array(i.pipe(i.number(),i.minValue(1))),i.description("Thought numbers from other branches being merged (DAG)"))),merge_branch_ids:i.optional(i.pipe(i.array(i.pipe(i.string(),i.regex(/^[a-zA-Z0-9_-]+$/),i.maxLength(50))),i.description("Branch IDs being merged into current context"))),meta_observation:i.optional(i.pipe(i.string(),i.description("Metacognitive observation about reasoning process"))),reasoning_depth:i.optional(i.pipe(i.picklist(["shallow","moderate","deep"]),i.description("Effort signal: how deep reasoning should go"))),session_id:i.optional(i.pipe(i.string(),i.regex(/^[a-zA-Z0-9_-]+$/,"Session ID must contain only letters, numbers, hyphens, and underscores"),i.minLength(1),i.maxLength(100),i.description("Optional session identifier for state isolation. When provided, thought history, branches, and statistics are scoped to this session. Omitting preserves global behavior."))),reset_state:i.optional(i.pipe(i.boolean(),i.description("When true, clears all state for the target session before processing this thought. The thought is then processed as the first in a fresh session.")))}),p={name:"sequentialthinking_tools",description:n,inputSchema:{}},d=i.object({jsonrpc:i.pipe(i.string(),i.literal("2.0"),i.description('JSON-RPC protocol version (must be "2.0")')),method:i.pipe(i.string(),i.minLength(1),i.description("Method name to invoke")),params:i.optional(i.pipe(i.union([i.object({}),i.array(i.unknown())]),i.description("Method parameters (object or array)"))),id:i.optional(i.pipe(i.union([i.string(),i.number(),i.null()]),i.description("Request ID (omit for notifications)")))})},434(e,t,s){s.d(t,{j:()=>r});class i{_level="info";info(e,t){}warn(e,t){}error(e,t){}debug(e,t){}setLevel(e){this._level=e}getLevel(){return this._level}}let n=new Set(["session","sessionId","client","clientId"]),o=/^[a-zA-Z0-9_-]+$/;class r{_port;_host;_corsOrigin;_enableCors;_rateLimitEnabled;_maxRequestsPerMinute;_allowedHosts;_rateLimitMap=new Map;_rateLimitCleanupIntervalId=null;_wasHostExplicitlySet;_isShuttingDown=!1;_logger;_healthChecker;constructor(e={}){this._port=e.port??9108,this._host=e.host??"127.0.0.1",this._wasHostExplicitlySet=void 0!==e.host,this._corsOrigin=e.corsOrigin??"*",this._enableCors=e.enableCors??!0,this._rateLimitEnabled=e.enableRateLimit??!0,this._maxRequestsPerMinute=e.maxRequestsPerMinute??100,this._allowedHosts=this._buildAllowedHosts(e.allowedHosts),this._isShuttingDown=!1,this._logger=e.logger??new i,this._healthChecker=e.healthChecker??null,this._rateLimitEnabled&&this._startRateLimitCleanup()}get serverUrl(){let e=this._wasHostExplicitlySet||"127.0.0.1"!==this._host?this._host:"localhost";return`http://${e}:${this._port}`}validateSessionId(e){return!(e.length>64)&&o.test(e)}sanitizeQueryParams(e){let t={};for(let[s,i]of e.searchParams.entries())n.has(s)&&(t[s]=i);return t}checkRateLimit(e){if(!this._rateLimitEnabled)return!1;let t=Date.now();this._cleanupExpiredRateLimitEntries(t);let s=this._rateLimitMap.get(e);return!s||t>s.resetTime?(this._rateLimitMap.set(e,{count:1,resetTime:t+6e4}),!1):s.count>=this._maxRequestsPerMinute||(s.count++,!1)}_cleanupExpiredRateLimitEntries(e=Date.now()){for(let[t,s]of this._rateLimitMap.entries())s.resetTime<=e&&this._rateLimitMap.delete(t)}_startRateLimitCleanup(){null!==this._rateLimitCleanupIntervalId&&clearInterval(this._rateLimitCleanupIntervalId),this._rateLimitCleanupIntervalId=setInterval(()=>{this._cleanupExpiredRateLimitEntries()},6e4)}_stopRateLimitCleanup(){null!==this._rateLimitCleanupIntervalId&&(clearInterval(this._rateLimitCleanupIntervalId),this._rateLimitCleanupIntervalId=null)}getClientIp(e){let t=e.headers["x-forwarded-for"];return t&&"string"==typeof t?t.split(",")[0].trim():e.socket.remoteAddress||"unknown"}validateCorsOrigin(e){if("*"===this._corsOrigin)return!0;let t=e.headers.origin;if(!t||this._corsOrigin===t)return!0;if(this._corsOrigin.includes("*")){let e=this._corsOrigin.replace(/[.+?^${}()|[\]\\]/g,"\\$&").replace(/\*/g,"[a-zA-Z0-9.-]*");return RegExp(`^${e}$`).test(t)}return!1}setCorsHeaders(e){this._enableCors&&(e.setHeader("Access-Control-Allow-Origin",this._corsOrigin),e.setHeader("Access-Control-Allow-Methods","GET, POST, OPTIONS"),e.setHeader("Access-Control-Allow-Headers","Content-Type"))}validateHostHeader(e){let t=e.headers.host;if(!t)return!0;let s=t.split(":")[0].trim().toLowerCase();return!!s&&(0===this._allowedHosts.size||this._allowedHosts.has(s))}_buildAllowedHosts(e){if(e&&e.length>0)return new Set(e.map(e=>e.toLowerCase().trim()).filter(Boolean));let t=this._host.toLowerCase(),s=["localhost","127.0.0.1","::1"];return new Set(s.includes(t)||"0.0.0.0"===t||"::"===t?s:[t])}log(e,t,s){"info"===e?this._logger.info(t,s):"warn"===e?this._logger.warn(t,s):this._logger.error(t,s)}isShuttingDown(){return this._isShuttingDown}handleHealthEndpoint(e,t){let s={status:"healthy",...t};this._healthChecker&&(s.liveness=this._healthChecker.checkLiveness()),e.writeHead(200,{"Content-Type":"application/json"}),e.end(JSON.stringify(s))}async handleReadinessEndpoint(e){if(this._healthChecker){let t=await this._healthChecker.checkReadiness(),s="ok"===t.status?200:503;e.writeHead(s,{"Content-Type":"application/json"}),e.end(JSON.stringify(t))}else e.writeHead(200,{"Content-Type":"application/json"}),e.end(JSON.stringify({status:"ok",timestamp:new Date().toISOString(),components:{}}))}handleMetricsEndpoint(e,t){if(!t){e.writeHead(404,{"Content-Type":"text/plain"}),e.end("Not Found");return}e.writeHead(200,{"Content-Type":"text/plain; version=0.0.4; charset=utf-8"}),e.end(t())}}},504(e,t,s){s.d(t,{SseTransport:()=>l});var i=s(316),n=s(61),o=s(821),r=s(555),a=s(434);class l extends a.j{_server;_path;_clients=new Set;_clientSessionMap=new Map;_messageQueue=new Map;_metrics;_connectionPool;constructor(e={}){super(e),this._path=e.path??"/sse",this._metrics=e.metrics,this._connectionPool=e.connectionPool,this._updateActiveConnectionsMetric(),this._server=(0,i.createServer)((e,t)=>this._handleRequest(e,t))}async connect(e){return this._mcpServer=e,new Promise(e=>{this._server.listen(this._port,this._host,()=>{this.log("info",`SSE transport listening on http://${this._host}:${this._port}`),e()})})}_mcpServer=null;async _handleRequest(e,t){var s,i,o,r,a;let l=Date.now(),c=e.url||"/",h=e.method||"GET";if(null==(s=this._metrics)||s.counter("http_requests_total",1,{transport:"sse",method:h,path:c},"Total HTTP requests"),t.once("finish",()=>{var e;let t=(Date.now()-l)/1e3;null==(e=this._metrics)||e.histogram("http_request_duration_seconds",t,{transport:"sse",path:c})}),!this.validateHostHeader(e)){null==(i=this._metrics)||i.counter("http_request_errors_total",1,{transport:"sse",error_type:"forbidden"},"Total HTTP request errors"),t.writeHead(403,{"Content-Type":"application/json"}),t.end(JSON.stringify({error:"Forbidden - invalid host header"}));return}let p=new n.URL(e.url||"",`http://${e.headers.host}`),d=this.getClientIp(e);if(this.checkRateLimit(d)){null==(o=this._metrics)||o.counter("http_request_errors_total",1,{transport:"sse",error_type:"rate_limit"},"Total HTTP request errors"),t.writeHead(429,{"Content-Type":"application/json","Retry-After":"60"}),t.end(JSON.stringify({error:"Too many requests"}));return}if(!this.validateCorsOrigin(e)){null==(r=this._metrics)||r.counter("http_request_errors_total",1,{transport:"sse",error_type:"forbidden"},"Total HTTP request errors"),t.writeHead(403,{"Content-Type":"application/json"}),t.end(JSON.stringify({error:"Forbidden - invalid origin"}));return}this.setCorsHeaders(t);let u=this.sanitizeQueryParams(p);if(u.session||u.sessionId){let e=u.session??u.sessionId;if(!this.validateSessionId(e)){null==(a=this._metrics)||a.counter("http_request_errors_total",1,{transport:"sse",error_type:"validation"},"Total HTTP request errors"),t.writeHead(400,{"Content-Type":"application/json"}),t.end(JSON.stringify({error:"Invalid session ID format"}));return}}if(this._enableCors&&"OPTIONS"===e.method){t.writeHead(204),t.end();return}p.pathname===this._path&&"GET"===e.method?await this._handleSseConnection(e,t,u):p.pathname===`${this._path}/message`&&"POST"===e.method?await this._handleMessage(e,t,u):"/health"===p.pathname?this._handleHealthCheck(t):"/ready"===p.pathname?await this._handleReadinessCheck(t):(t.writeHead(404,{"Content-Type":"text/plain"}),t.end("Not Found"))}_handleHealthCheck(e){let t={status:"healthy",clients:this._clients.size};this._connectionPool&&(t.pool=this._connectionPool.getStats()),this._healthChecker&&(t.liveness=this._healthChecker.checkLiveness()),e.writeHead(200,{"Content-Type":"application/json"}),e.end(JSON.stringify(t))}async _handleReadinessCheck(e){if(this._healthChecker){let t=await this._healthChecker.checkReadiness(),s="ok"===t.status?200:503;e.writeHead(s,{"Content-Type":"application/json"}),e.end(JSON.stringify(t))}else e.writeHead(200,{"Content-Type":"application/json"}),e.end(JSON.stringify({status:"ok",timestamp:new Date().toISOString(),components:{}}))}async _handleSseConnection(e,t,s){let i;if(t.writeHead(200,{"Content-Type":"text/event-stream","Cache-Control":"no-cache",Connection:"keep-alive"}),this._connectionPool){let e=s.session??s.sessionId;if(e&&this._connectionPool.getSessionInfo(e))i=e;else try{i=await this._connectionPool.createSession()}catch(e){t.write(`event: error
|
|
106
|
+
`),t.write(`data: ${JSON.stringify({error:e instanceof Error?e.message:"Failed to create session"})}
|
|
107
|
+
|
|
108
|
+
`),t.end();return}this._clientSessionMap.set(t,i),this._updatePoolMetrics()}let n={timestamp:Date.now()};i&&(n.sessionId=i),this._sendSseEvent(t,"connected",n),this._clients.add(t),this._updateActiveConnectionsMetric(),e.on("close",()=>{this._clients.delete(t),this._clientSessionMap.delete(t),this._updateActiveConnectionsMetric()});let o=this._generateClientId(),r=this._messageQueue.get(o);if(r){for(let e of r)this._sendSseEvent(t,"message",e);this._messageQueue.delete(o)}}async _handleMessage(e,t,s){var i,n,a;let l="";for await(let t of e)l+=t.toString();try{let e=JSON.parse(l),s=(0,o.safeParse)(r.Uv,e);if(!s.success){null==(i=this._metrics)||i.counter("http_request_errors_total",1,{transport:"sse",error_type:"validation"},"Total HTTP request errors"),t.writeHead(200,{"Content-Type":"application/json"}),t.end(JSON.stringify({jsonrpc:"2.0",id:(null==e?void 0:e.id)??null,error:{code:-32600,message:"Invalid Request",data:s.issues}}));return}if(this._mcpServer){let s=await this._mcpServer.receive(e,{sessionInfo:{}});t.writeHead(200,{"Content-Type":"application/json"}),s?t.end(JSON.stringify(s)):t.end(JSON.stringify({jsonrpc:"2.0",id:(null==e?void 0:e.id)??null,result:null}))}else null==(n=this._metrics)||n.counter("http_request_errors_total",1,{transport:"sse",error_type:"server_not_ready"},"Total HTTP request errors"),t.writeHead(503,{"Content-Type":"application/json"}),t.end(JSON.stringify({error:"Server not ready"}))}catch{null==(a=this._metrics)||a.counter("http_request_errors_total",1,{transport:"sse",error_type:"parse_error"},"Total HTTP request errors"),t.writeHead(400,{"Content-Type":"application/json"}),t.end(JSON.stringify({error:"Invalid JSON"}))}}_sendSseEvent(e,t,s){try{e.write(`event: ${t}
|
|
109
|
+
`),e.write(`data: ${JSON.stringify(s)}
|
|
110
|
+
|
|
111
|
+
`)}catch{this._clients.delete(e),this._updateActiveConnectionsMetric()}}_updateActiveConnectionsMetric(){var e;null==(e=this._metrics)||e.gauge("sse_active_connections",this._clients.size,{},"Current active SSE connections")}_updatePoolMetrics(){if(!this._connectionPool||!this._metrics)return;let e=this._connectionPool.getStats();this._metrics.gauge("sse_pool_active_sessions",e.activeSessions,{},"Active sessions in connection pool"),this._metrics.gauge("sse_pool_total_sessions",e.totalSessions,{},"Total sessions in connection pool"),this._metrics.gauge("sse_pool_max_sessions",e.maxSessions,{},"Maximum sessions in connection pool")}broadcast(e,t){for(let s of this._clients)this._sendSseEvent(s,e,t)}_generateClientId(){return`client_${Date.now()}_${Math.random().toString(36).substring(2,11)}`}get clientCount(){return this._clients.size}get connectionPool(){return this._connectionPool}async stop(e){return this._isShuttingDown=!0,this._stopRateLimitCleanup(),this._connectionPool&&await this._connectionPool.terminate(),new Promise(e=>{for(let e of this._clients)try{e.end()}catch{}this._clients.clear(),this._clientSessionMap.clear(),this._updateActiveConnectionsMetric(),this._server.close(()=>{this.log("info","SSE transport stopped"),e()})})}}},661(e,t,s){s.d(t,{StreamableHttpTransport:()=>l});var i=s(561),n=s(316),o=s(821),r=s(555),a=s(434);class l extends a.j{_server=null;_mcpServer=null;_path;_stateful;_sessionIdGenerator;_sessions=new Map;_requestCount=0;_activeRequests=0;_bodySizeLimitEnabled;_maxBodySize;_requestTimeout;_metrics;_metricsProvider;constructor(e={}){super(e),this._path=e.path??"/mcp",this._stateful=e.stateful??!0,this._sessionIdGenerator=e.sessionIdGenerator??(()=>(0,i.randomUUID)()),this._bodySizeLimitEnabled=e.enableBodySizeLimit??!0,this._maxBodySize=e.maxBodySize??0xa00000,this._requestTimeout=e.requestTimeout??3e4,this._metrics=e.metrics,this._metricsProvider=e.metricsProvider??null}get clientCount(){return this._stateful?this._sessions.size:this._activeRequests}get requestCount(){return this._requestCount}async connect(e){return this._mcpServer=e,this._server=(0,n.createServer)((e,t)=>this._handleRequest(e,t)),new Promise(e=>{this._server.listen(this._port,this._host,()=>{this.log("info",`Streamable HTTP transport listening on http://${this._host}:${this._port}`),e()})})}async _handleRequest(e,t){var s,i;let n=Date.now();if(null==(s=this._metrics)||s.counter("streamable_http_requests_total",1,{},"Total Streamable HTTP transport requests"),t.once("finish",()=>{var e;let t=(Date.now()-n)/1e3;null==(e=this._metrics)||e.histogram("streamable_http_request_duration_seconds",t,{})}),!this.validateHostHeader(e))return void this._sendJsonRpcError(t,403,-32e3,"Forbidden - invalid host header");if(this.isShuttingDown())return void this._sendJsonRpcError(t,503,-32603,"Server is shutting down");let o=this.getClientIp(e);if(this.checkRateLimit(o)){t.setHeader("Retry-After","60"),this._sendJsonRpcError(t,429,-32e3,"Too many requests");return}if(!this.validateCorsOrigin(e))return void this._sendJsonRpcError(t,403,-32e3,"Forbidden - invalid origin");if(this.setCorsHeaders(t),"OPTIONS"===e.method){t.setHeader("Access-Control-Allow-Headers","Content-Type, Mcp-Session-Id"),t.writeHead(204),t.end();return}let r=(null==(i=e.url)?void 0:i.split("?")[0])??"/";"GET"===e.method&&"/metrics"===r?this._handleMetrics(t):"GET"===e.method&&"/health"===r?this._handleHealthCheck(t):"GET"===e.method&&"/ready"===r?await this._handleReadinessCheck(t):r===this._path?"POST"===e.method?await this._handleMcpPost(e,t):"GET"===e.method?this._handleMcpGet(e,t):(t.writeHead(405,{"Content-Type":"application/json",Allow:"GET, POST"}),t.end(JSON.stringify({jsonrpc:"2.0",id:null,error:{code:-32601,message:"Method not allowed"}}))):this._sendJsonRpcError(t,404,-32601,"Not Found")}_sendJsonRpcError(e,t,s,i,n=null,o){let r={code:s,message:i};o&&Object.assign(r,o),e.writeHead(t,{"Content-Type":"application/json"}),e.end(JSON.stringify({jsonrpc:"2.0",id:n,error:r}))}async _handleMcpPost(e,t){this._requestCount++,this._activeRequests++;let s=setTimeout(()=>{this._activeRequests--,this._sendJsonRpcError(t,500,-32603,"Request timeout")},this._requestTimeout);try{let i,n,a=await this._readRequestBody(e);if(null===a){clearTimeout(s),this._activeRequests--,this._sendJsonRpcError(t,413,-32e3,"Request body too large");return}try{i=JSON.parse(a)}catch{clearTimeout(s),this._activeRequests--,this._sendJsonRpcError(t,200,-32700,"Parse error");return}let l=(0,o.safeParse)(r.Uv,i);if(!l.success){clearTimeout(s),this._activeRequests--,this._sendJsonRpcError(t,200,-32600,"Invalid Request",(null==i?void 0:i.id)??null,{data:l.issues});return}if(this._stateful){let i=this._resolveSession(e,t);if(!1===i){clearTimeout(s),this._activeRequests--;return}n=i}if(!this._mcpServer){clearTimeout(s),this._activeRequests--,this._sendJsonRpcError(t,503,-32603,"Server not ready",(null==i?void 0:i.id)??null);return}let c=await this._mcpServer.receive(i,{sessionInfo:{}});clearTimeout(s),this._activeRequests--;let h={"Content-Type":"application/json"};n&&(h["Mcp-Session-Id"]=n),c?(t.writeHead(200,h),t.end(JSON.stringify(c))):(n&&t.setHeader("Mcp-Session-Id",n),t.writeHead(202),t.end())}catch(e){clearTimeout(s),this._activeRequests--,this._sendJsonRpcError(t,200,-32603,"Internal error",null,{data:e instanceof Error?e.message:String(e)})}}_handleMcpGet(e,t){if(!this._stateful){t.writeHead(405,{"Content-Type":"application/json",Allow:"POST"}),t.end(JSON.stringify({jsonrpc:"2.0",id:null,error:{code:-32601,message:"GET not supported in stateless mode"}}));return}let s=this._getSessionIdFromHeader(e);if(!s){t.writeHead(400,{"Content-Type":"application/json"}),t.end(JSON.stringify({jsonrpc:"2.0",id:null,error:{code:-32600,message:"Missing Mcp-Session-Id header"}}));return}let i=this._sessions.get(s);if(!i){t.writeHead(404,{"Content-Type":"application/json"}),t.end(JSON.stringify({jsonrpc:"2.0",id:null,error:{code:-32001,message:"Session not found"}}));return}t.writeHead(200,{"Content-Type":"text/event-stream","Cache-Control":"no-cache",Connection:"keep-alive","Mcp-Session-Id":s}),this._sendSseEvent(t,"connected",{sessionId:s,timestamp:Date.now()}),i.notificationStreams.add(t),i.lastActivityAt=Date.now(),this._updateSessionMetrics(),e.on("close",()=>{i.notificationStreams.delete(t),this._updateSessionMetrics()})}_resolveSession(e,t){let s=this._getSessionIdFromHeader(e);if(s){if(!this.validateSessionId(s))return t.writeHead(400,{"Content-Type":"application/json"}),t.end(JSON.stringify({jsonrpc:"2.0",id:null,error:{code:-32600,message:"Invalid Mcp-Session-Id format"}})),!1;let e=this._sessions.get(s);return e?(e.lastActivityAt=Date.now(),s):(t.writeHead(404,{"Content-Type":"application/json"}),t.end(JSON.stringify({jsonrpc:"2.0",id:null,error:{code:-32001,message:"Session not found"}})),!1)}let i=this._sessionIdGenerator(),n={id:i,createdAt:Date.now(),lastActivityAt:Date.now(),notificationStreams:new Set};return this._sessions.set(i,n),this.log("info",`New session created: ${i}`),this._updateSessionMetrics(),i}_getSessionIdFromHeader(e){let t=e.headers["mcp-session-id"];if("string"==typeof t&&t.length>0)return t}async _readRequestBody(e){let t="",s=0;for await(let i of e){let e="string"==typeof i?i:i.toString();if(s+=e.length,this._bodySizeLimitEnabled&&s>this._maxBodySize)return null;t+=e}return t}_sendSseEvent(e,t,s){try{e.write(`event: ${t}
|
|
112
|
+
`),e.write(`data: ${JSON.stringify(s)}
|
|
113
|
+
|
|
114
|
+
`)}catch{}}broadcastToSession(e,t,s){let i=this._sessions.get(e);if(i)for(let e of i.notificationStreams)this._sendSseEvent(e,t,s)}_handleMetrics(e){if(!this._metricsProvider){e.writeHead(404,{"Content-Type":"text/plain"}),e.end("Not Found");return}e.writeHead(200,{"Content-Type":"text/plain; version=0.0.4; charset=utf-8"}),e.end(this._metricsProvider())}_handleHealthCheck(e){let t={status:"healthy",requests:this._requestCount,sessions:this._sessions.size,transport:"streamable-http"};this._healthChecker&&(t.liveness=this._healthChecker.checkLiveness()),e.writeHead(200,{"Content-Type":"application/json"}),e.end(JSON.stringify(t))}async _handleReadinessCheck(e){if(this._healthChecker){let t=await this._healthChecker.checkReadiness(),s="ok"===t.status?200:503;e.writeHead(s,{"Content-Type":"application/json"}),e.end(JSON.stringify(t))}else e.writeHead(200,{"Content-Type":"application/json"}),e.end(JSON.stringify({status:"ok",timestamp:new Date().toISOString(),components:{}}))}_updateSessionMetrics(){var e,t;null==(e=this._metrics)||e.gauge("streamable_http_active_sessions",this._sessions.size,{},"Active Streamable HTTP sessions");let s=0;for(let e of this._sessions.values())s+=e.notificationStreams.size;null==(t=this._metrics)||t.gauge("streamable_http_notification_streams",s,{},"Active SSE notification streams")}async stop(e){this._isShuttingDown=!0,this._stopRateLimitCleanup();let t=e??3e4;for(let e of this._sessions.values()){for(let t of e.notificationStreams)try{t.end()}catch{}e.notificationStreams.clear()}return this._sessions.clear(),new Promise(e=>{if(!this._server){this.log("info","Streamable HTTP transport stopped (no server)"),e();return}let s=setTimeout(()=>{this.log("warn","Streamable HTTP transport force-closing after timeout"),e()},t);this._server.close(()=>{clearTimeout(s),this.log("info","Streamable HTTP transport stopped"),e()})})}}},65(t){t.exports=e},561(e){e.exports=t},316(e){e.exports=s},61(e){e.exports=i},821(e){e.exports=n}},u={};function _(e){var t=u[e];if(void 0!==t)return t.exports;var s=u[e]={exports:{}};return d[e](s,s.exports,_),s.exports}_.d=(e,t)=>{for(var s in t)_.o(t,s)&&!_.o(e,s)&&Object.defineProperty(e,s,{enumerable:!0,get:t[s]})},_.o=(e,t)=>Object.prototype.hasOwnProperty.call(e,t),(()=>{var e=_(61),t=_(65);_(561);let s=new p;class i{_level;_context;_pretty;static LEVEL_PRIORITY={debug:0,info:1,warn:2,error:3};constructor(e={}){this._level=e.level??"info",this._context=e.context??"SequentialThinking",this._pretty=e.pretty??!0}shouldLog(e){return i.LEVEL_PRIORITY[e]>=i.LEVEL_PRIORITY[this._level]}format(e){if(this._pretty){let t=e.meta?` ${JSON.stringify(e.meta)}`:"",s=e.requestId?` [${e.requestId}]`:"";return`[${e.timestamp}] [${e.level.toUpperCase()}]${e.context?` [${e.context}]`:""}${s} ${e.message}${t}`}return JSON.stringify(e)}log(e,t,i){var n;if(!this.shouldLog(e))return;let o=null==(n=s.getStore())?void 0:n.requestId,r={level:e,message:t,timestamp:new Date().toISOString(),context:this._context,meta:i,...o?{requestId:o}:{}};console.error(this.format(r))}debug(e,t){this.log("debug",e,t)}info(e,t){this.log("info",e,t)}warn(e,t){this.log("warn",e,t)}error(e,t){this.log("error",e,t)}createChild(e){return new i({level:this._level,context:`${this._context}:${e}`,pretty:this._pretty})}setLevel(e){this._level=e}getLevel(){return this._level}}var n=_(555);let{name:d,version:u}=JSON.parse(a(c(l((0,e.fileURLToPath)(import.meta.url)),"../package.json"),"utf-8")),m=process.argv.slice(2);async function g(e,t){var s;let{SseTransport:i}=await Promise.resolve().then(_.bind(_,504)),{createConnectionPool:n}=await Promise.resolve().then(_.bind(_,472)),o=parseInt(process.env.SSE_PORT||"3000",10),r=process.env.SSE_HOST||"localhost",a=t.getContainer().resolve("Metrics"),l="false"!==process.env.SSE_ENABLE_POOL,c=parseInt(process.env.SSE_MAX_SESSIONS||"100",10),h=parseInt(process.env.SSE_SESSION_TIMEOUT||"300000",10),p=l?n({maxSessions:c,sessionTimeout:h,logger:t._logger,serverFactory:async()=>{let{createServer:e}=await Promise.resolve().then(_.bind(_,65));return await e({autoDiscover:!0})}}):void 0,d=new i({port:o,host:r,corsOrigin:process.env.CORS_ORIGIN||"*",enableCors:"false"!==process.env.ENABLE_CORS,allowedHosts:null==(s=process.env.ALLOWED_HOSTS)?void 0:s.split(",").map(e=>e.trim()),metrics:a,connectionPool:p});await d.connect(e),y(async()=>{await d.stop(),await t.stop()}),t._logger.info(`Sequential Thinking MCP Server running on SSE transport at http://${r}:${o}`)}async function f(e,t){var s;let{StreamableHttpTransport:i}=await Promise.resolve().then(_.bind(_,661)),n=parseInt(process.env.STREAMABLE_HTTP_PORT||process.env.SSE_PORT||"3000",10),o=process.env.STREAMABLE_HTTP_HOST||process.env.SSE_HOST||"localhost",r=t.getContainer().resolve("Metrics"),a="false"!==process.env.STREAMABLE_HTTP_STATEFUL,l=new i({port:n,host:o,corsOrigin:process.env.CORS_ORIGIN||"*",enableCors:"false"!==process.env.ENABLE_CORS,allowedHosts:null==(s=process.env.ALLOWED_HOSTS)?void 0:s.split(",").map(e=>e.trim()),metrics:r,stateful:a});await l.connect(e),y(async()=>{await l.stop(),await t.stop()}),t._logger.info(`Sequential Thinking MCP Server running on Streamable HTTP transport at http://${o}:${n}`)}async function v(e,t){new r(e).listen();let s=async()=>{let e=setTimeout(()=>{t._logger.error("Graceful shutdown timed out after 30s - forcing exit"),process.exit(1)},3e4).unref();try{await t.stop(),clearTimeout(e),process.exit(0)}catch(e){t._logger.error("Error during shutdown",{error:e instanceof Error?e.message:String(e)}),process.exit(1)}};process.once("SIGINT",()=>void s()),process.once("SIGTERM",()=>void s()),t._logger.info("Sequential Thinking MCP Server running on stdio")}function y(e){process.once("SIGINT",()=>{e().then(()=>process.exit(0)).catch(()=>process.exit(1))}),process.once("SIGTERM",()=>{e().then(()=>process.exit(0)).catch(()=>process.exit(1))})}(m.includes("--version")||m.includes("-v"))&&(console.log(`${d} v${u}`),process.exit(0)),(async function(){let e=new h({name:d,version:u,description:"Semantic Sequential Thinking MCP Server"},{adapter:new o,capabilities:{tools:{listChanged:!0}}}),s=await (0,t.initializeServer)();e.tool({name:"sequentialthinking_tools",description:n.iV.description,schema:n.ZK},async e=>s.processThought(e));let i=process.env.TRANSPORT_TYPE||"stdio";"sse"===i?await g(e,s):"streamable-http"===i?await f(e,s):await v(e,s)})().catch(e=>{new i({level:"error",context:"SequentialThinking",pretty:!0}).error("Fatal error running server",{error:e instanceof Error?e.message:String(e)}),process.exit(1)})})();
|
package/dist/cli.js.map
ADDED
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"cli.js","sources":["../src/cli.ts"],"sourcesContent":["#!/usr/bin/env bun\n\n// CLI entry point for tracelattice MCP server.\n// This file handles CLI argument parsing, transport selection, and signal handlers.\n// For library usage, import from './lib.js' or './index.js' instead.\n\nimport { ValibotJsonSchemaAdapter } from '@tmcp/adapter-valibot';\nimport { StdioTransport } from '@tmcp/transport-stdio';\nimport { readFileSync } from 'node:fs';\nimport { dirname, join } from 'node:path';\nimport { fileURLToPath } from 'node:url';\nimport { McpServer } from 'tmcp';\nimport type { ToolAwareSequentialThinkingServer } from './lib.js';\nimport { initializeServer } from './lib.js';\nimport { StructuredLogger } from './logger/StructuredLogger.js';\nimport { Metrics } from './metrics/metrics.impl.js';\nimport { SEQUENTIAL_THINKING_TOOL, SequentialThinkingSchema } from './schema.js';\n\n// Get version from package.json\nconst __filename = fileURLToPath(import.meta.url);\nconst __dirname = dirname(__filename);\nconst package_json = JSON.parse(readFileSync(join(__dirname, '../package.json'), 'utf-8'));\nconst { name, version } = package_json;\n// Handle CLI arguments\nconst args = process.argv.slice(2);\nconst shouldShowVersion = args.includes('--version') || args.includes('-v');\n\nif (shouldShowVersion) {\n\tconsole.log(`${name} v${version}`);\n\tprocess.exit(0);\n}\nasync function main() {\n\tconst adapter = new ValibotJsonSchemaAdapter();\n\tconst server = new McpServer(\n\t\t{\n\t\t\tname,\n\t\t\tversion,\n\t\t\tdescription: 'Semantic Sequential Thinking MCP Server',\n\t\t},\n\t\t{\n\t\t\tadapter,\n\t\t\tcapabilities: {\n\t\t\t\ttools: { listChanged: true },\n\t\t\t},\n\t\t}\n\t);\n\n\tconst thinkingServer = await initializeServer();\n\n\tserver.tool(\n\t\t{\n\t\t\tname: 'sequentialthinking_tools',\n\t\t\tdescription: SEQUENTIAL_THINKING_TOOL.description,\n\t\t\tschema: SequentialThinkingSchema,\n\t\t},\n\t\tasync (input) => {\n\t\t\treturn thinkingServer.processThought(input);\n\t\t}\n\t);\n\n\tconst transportType = process.env.TRANSPORT_TYPE || 'stdio';\n\tif (transportType === 'sse') {\n\t\tawait startSseTransport(server, thinkingServer);\n\t} else if (transportType === 'streamable-http') {\n\t\tawait startStreamableHttpTransport(server, thinkingServer);\n\t} else {\n\t\tawait startStdioTransport(server, thinkingServer);\n\t}\n}\n/**\n * Start SSE transport for multi-user support\n */\nasync function startSseTransport(server: McpServer, thinkingServer: ToolAwareSequentialThinkingServer): Promise<void> {\n const { SseTransport } = await import('./transport/SseTransport.js');\n const { createConnectionPool } = await import('./pool/ConnectionPool.js');\n const port = parseInt(process.env.SSE_PORT || '3000', 10);\n const host = process.env.SSE_HOST || 'localhost';\n const transportMetrics = thinkingServer.getContainer().resolve<Metrics>('Metrics');\n const enablePool = process.env.SSE_ENABLE_POOL !== 'false';\n const maxSessions = parseInt(process.env.SSE_MAX_SESSIONS || '100', 10);\n const sessionTimeout = parseInt(process.env.SSE_SESSION_TIMEOUT || '300000', 10);\n const connectionPool = enablePool\n ? createConnectionPool({\n maxSessions,\n sessionTimeout,\n logger: thinkingServer['_logger'],\n serverFactory: async () => {\n const { createServer: createThinkingServer } = await import('./lib.js');\n const sessionServer = await createThinkingServer({ autoDiscover: true });\n return sessionServer;\n },\n })\n : undefined;\n const sseTransport = new SseTransport({\n port,\n host,\n corsOrigin: process.env.CORS_ORIGIN || '*',\n enableCors: process.env.ENABLE_CORS !== 'false',\n allowedHosts: process.env.ALLOWED_HOSTS?.split(',').map((hostValue) => hostValue.trim()),\n metrics: transportMetrics,\n connectionPool,\n });\n // Connect the SSE transport\n await sseTransport.connect(server);\n const shutdown = async (): Promise<void> => {\n await sseTransport.stop();\n await thinkingServer.stop();\n };\n registerShutdownHandlers(shutdown);\n thinkingServer['_logger'].info(\n `Sequential Thinking MCP Server running on SSE transport at http://${host}:${port}`\n );\n}\n/**\n * Start Streamable HTTP transport (MCP spec recommended)\n */\nasync function startStreamableHttpTransport(server: McpServer, thinkingServer: ToolAwareSequentialThinkingServer): Promise<void> {\n const { StreamableHttpTransport } = await import('./transport/StreamableHttpTransport.js');\n const port = parseInt(process.env.STREAMABLE_HTTP_PORT || process.env.SSE_PORT || '3000', 10);\n const host = process.env.STREAMABLE_HTTP_HOST || process.env.SSE_HOST || 'localhost';\n const transportMetrics = thinkingServer.getContainer().resolve<Metrics>('Metrics');\n const stateful = process.env.STREAMABLE_HTTP_STATEFUL !== 'false';\n const streamableTransport = new StreamableHttpTransport({\n\t port,\n host,\n corsOrigin: process.env.CORS_ORIGIN || '*',\n enableCors: process.env.ENABLE_CORS !== 'false',\n allowedHosts: process.env.ALLOWED_HOSTS?.split(',').map((hostValue) => hostValue.trim()),\n\t metrics: transportMetrics,\n\t stateful,\n });\n // Connect the Streamable HTTP transport\n await streamableTransport.connect(server);\n const shutdown = async (): Promise<void> => {\n await streamableTransport.stop();\n await thinkingServer.stop();\n };\n registerShutdownHandlers(shutdown);\n thinkingServer['_logger'].info(\n `Sequential Thinking MCP Server running on Streamable HTTP transport at http://${host}:${port}`\n );\n}\n/**\n * Start stdio transport (default, single-user)\n */\nasync function startStdioTransport(server: McpServer, thinkingServer: ToolAwareSequentialThinkingServer): Promise<void> {\n const transport = new StdioTransport(server);\n transport.listen();\n const shutdown = async (): Promise<void> => {\n const forceExit = setTimeout(() => {\n thinkingServer['_logger'].error('Graceful shutdown timed out after 30s - forcing exit');\n process.exit(1);\n }, 30_000).unref(); // 30s timeout, don't keep process alive\n try {\n await thinkingServer.stop();\n clearTimeout(forceExit);\n process.exit(0);\n } catch (error) {\n thinkingServer['_logger'].error('Error during shutdown', {\n error: error instanceof Error ? error.message : String(error),\n });\n process.exit(1);\n }\n };\n // Register signal handlers ONCE (fixes double-registration bug)\n process.once('SIGINT', () => void shutdown());\n process.once('SIGTERM', () => void shutdown());\n thinkingServer['_logger'].info('Sequential Thinking MCP Server running on stdio');\n}\n/**\n * Register shutdown signal handlers for a common pattern\n */\nfunction registerShutdownHandlers(shutdown: () => Promise<void>): void {\n process.once('SIGINT', () => {\n shutdown()\n .then(() => process.exit(0))\n .catch(() => process.exit(1));\n });\n process.once('SIGTERM', () => {\n shutdown()\n .then(() => process.exit(0))\n .catch(() => process.exit(1));\n });\n}\nmain().catch((error) => {\n const logger = new StructuredLogger({\n level: 'error',\n context: 'SequentialThinking',\n pretty: true,\n });\n logger.error('Fatal error running server', {\n error: error instanceof Error ? error.message : String(error),\n });\n process.exit(1);\n});\n"],"names":["__filename","fileURLToPath","__dirname","dirname","package_json","JSON","readFileSync","join","name","version","args","process","shouldShowVersion","console","main","adapter","ValibotJsonSchemaAdapter","server","McpServer","thinkingServer","initializeServer","SEQUENTIAL_THINKING_TOOL","SequentialThinkingSchema","input","transportType","startSseTransport","startStreamableHttpTransport","startStdioTransport","SseTransport","createConnectionPool","port","parseInt","host","transportMetrics","enablePool","maxSessions","sessionTimeout","connectionPool","createThinkingServer","sessionServer","undefined","sseTransport","hostValue","shutdown","registerShutdownHandlers","StreamableHttpTransport","stateful","streamableTransport","transport","StdioTransport","forceExit","setTimeout","clearTimeout","error","Error","String","logger","StructuredLogger"],"mappings":";;;;;;;;;;AAmBA,MAAMA,eAAaC,cAAc,YAAY,GAAG;AAChD,MAAMC,cAAYC,QAAQH;AAC1B,MAAMI,eAAeC,KAAK,KAAK,CAACC,aAAaC,KAAKL,aAAW,oBAAoB;AACjF,MAAM,EAAEM,MAAAA,QAAI,EAAEC,SAAAA,OAAO,EAAE,GAAGL;AAE1B,MAAMM,OAAOC,QAAQ,IAAI,CAAC,KAAK,CAAC;AAChC,MAAMC,oBAAoBF,KAAK,QAAQ,CAAC,gBAAgBA,KAAK,QAAQ,CAAC;AAEtE,IAAIE,mBAAmB;IACtBC,QAAQ,GAAG,CAAC,GAAGL,SAAK,EAAE,EAAEC,SAAS;IACjCE,QAAQ,IAAI,CAAC;AACd;AACA,eAAeG;IACd,MAAMC,UAAU,IAAIC;IACpB,MAAMC,SAAS,IAAIC,UAClB;QACCV,MAAAA;QACAC,SAAAA;QACA,aAAa;IACd,GACA;QACCM;QACA,cAAc;YACb,OAAO;gBAAE,aAAa;YAAK;QAC5B;IACD;IAGD,MAAMI,iBAAiB,MAAMC;IAE7BH,OAAO,IAAI,CACV;QACC,MAAM;QACN,aAAaI,yBAAyB,WAAW;QACjD,QAAQC;IACT,GACA,OAAOC,QACCJ,eAAe,cAAc,CAACI;IAIvC,MAAMC,gBAAgBb,QAAQ,GAAG,CAAC,cAAc,IAAI;IACpD,IAAIa,AAAkB,UAAlBA,eACH,MAAMC,kBAAkBR,QAAQE;SAC1B,IAAIK,AAAkB,sBAAlBA,eACV,MAAME,6BAA6BT,QAAQE;SAE3C,MAAMQ,oBAAoBV,QAAQE;AAEpC;AAIA,eAAeM,kBAAkBR,MAAiB,EAAEE,cAAiD;IACjG,MAAM,EAAES,YAAY,EAAE,GAAG,MAAM,MAAM,CAAC;IACtC,MAAM,EAAEC,oBAAoB,EAAE,GAAG,MAAM,MAAM,CAAC;IAC9C,MAAMC,OAAOC,SAASpB,QAAQ,GAAG,CAAC,QAAQ,IAAI,QAAQ;IACtD,MAAMqB,OAAOrB,QAAQ,GAAG,CAAC,QAAQ,IAAI;IACrC,MAAMsB,mBAAmBd,eAAe,YAAY,GAAG,OAAO,CAAU;IACxE,MAAMe,aAAavB,AAAgC,YAAhCA,QAAQ,GAAG,CAAC,eAAe;IAC9C,MAAMwB,cAAcJ,SAASpB,QAAQ,GAAG,CAAC,gBAAgB,IAAI,OAAO;IACpE,MAAMyB,iBAAiBL,SAASpB,QAAQ,GAAG,CAAC,mBAAmB,IAAI,UAAU;IAC7E,MAAM0B,iBAAiBH,aACjBL,qBAAqB;QACnBM;QACAC;QACA,QAAQjB,cAAc,CAAC,UAAU;QACjC,eAAe;YACX,MAAM,EAAE,cAAcmB,oBAAoB,EAAE,GAAG,MAAM,MAAM,CAAC;YAC5D,MAAMC,gBAAgB,MAAMD,qBAAqB;gBAAE,cAAc;YAAK;YACtE,OAAOC;QACX;IACF,KACAC;IACN,MAAMC,eAAe,IAAIb,aAAa;QAClCE;QACAE;QACA,YAAYrB,QAAQ,GAAG,CAAC,WAAW,IAAI;QACvC,YAAYA,AAA4B,YAA5BA,QAAQ,GAAG,CAAC,WAAW;QACnC,cAAcA,QAAQ,GAAG,CAAC,aAAa,EAAE,MAAM,KAAK,IAAI,CAAC+B,YAAcA,UAAU,IAAI;QACrF,SAAST;QACTI;IACJ;IAEA,MAAMI,aAAa,OAAO,CAACxB;IAC3B,MAAM0B,WAAW;QACb,MAAMF,aAAa,IAAI;QACvB,MAAMtB,eAAe,IAAI;IAC7B;IACAyB,yBAAyBD;IACzBxB,cAAc,CAAC,UAAU,CAAC,IAAI,CAC1B,CAAC,kEAAkE,EAAEa,KAAK,CAAC,EAAEF,MAAM;AAE3F;AAIA,eAAeJ,6BAA6BT,MAAiB,EAAEE,cAAiD;IAC5G,MAAM,EAAE0B,uBAAuB,EAAE,GAAG,MAAM,MAAM,CAAC;IACjD,MAAMf,OAAOC,SAASpB,QAAQ,GAAG,CAAC,oBAAoB,IAAIA,QAAQ,GAAG,CAAC,QAAQ,IAAI,QAAQ;IAC1F,MAAMqB,OAAOrB,QAAQ,GAAG,CAAC,oBAAoB,IAAIA,QAAQ,GAAG,CAAC,QAAQ,IAAI;IACzE,MAAMsB,mBAAmBd,eAAe,YAAY,GAAG,OAAO,CAAU;IACxE,MAAM2B,WAAWnC,AAAyC,YAAzCA,QAAQ,GAAG,CAAC,wBAAwB;IACrD,MAAMoC,sBAAsB,IAAIF,wBAAwB;QACvDf;QACGE;QACA,YAAYrB,QAAQ,GAAG,CAAC,WAAW,IAAI;QACvC,YAAYA,AAA4B,YAA5BA,QAAQ,GAAG,CAAC,WAAW;QACnC,cAAcA,QAAQ,GAAG,CAAC,aAAa,EAAE,MAAM,KAAK,IAAI,CAAC+B,YAAcA,UAAU,IAAI;QACxF,SAAST;QACTa;IACD;IAEA,MAAMC,oBAAoB,OAAO,CAAC9B;IAClC,MAAM0B,WAAW;QACb,MAAMI,oBAAoB,IAAI;QAC9B,MAAM5B,eAAe,IAAI;IAC7B;IACAyB,yBAAyBD;IACzBxB,cAAc,CAAC,UAAU,CAAC,IAAI,CAC1B,CAAC,8EAA8E,EAAEa,KAAK,CAAC,EAAEF,MAAM;AAEvG;AAIA,eAAeH,oBAAoBV,MAAiB,EAAEE,cAAiD;IACnG,MAAM6B,YAAY,IAAIC,eAAehC;IACrC+B,UAAU,MAAM;IAChB,MAAML,WAAW;QACb,MAAMO,YAAYC,WAAW;YACzBhC,cAAc,CAAC,UAAU,CAAC,KAAK,CAAC;YAChCR,QAAQ,IAAI,CAAC;QACjB,GAAG,OAAQ,KAAK;QAChB,IAAI;YACA,MAAMQ,eAAe,IAAI;YACzBiC,aAAaF;YACbvC,QAAQ,IAAI,CAAC;QACjB,EAAE,OAAO0C,OAAO;YACZlC,cAAc,CAAC,UAAU,CAAC,KAAK,CAAC,yBAAyB;gBACrD,OAAOkC,iBAAiBC,QAAQD,MAAM,OAAO,GAAGE,OAAOF;YAC3D;YACA1C,QAAQ,IAAI,CAAC;QACjB;IACJ;IAEAA,QAAQ,IAAI,CAAC,UAAU,IAAM,KAAKgC;IAClChC,QAAQ,IAAI,CAAC,WAAW,IAAM,KAAKgC;IACnCxB,cAAc,CAAC,UAAU,CAAC,IAAI,CAAC;AACnC;AAIA,SAASyB,yBAAyBD,QAA6B;IAC3DhC,QAAQ,IAAI,CAAC,UAAU;QACnBgC,WACK,IAAI,CAAC,IAAMhC,QAAQ,IAAI,CAAC,IACxB,KAAK,CAAC,IAAMA,QAAQ,IAAI,CAAC;IAClC;IACAA,QAAQ,IAAI,CAAC,WAAW;QACpBgC,WACK,IAAI,CAAC,IAAMhC,QAAQ,IAAI,CAAC,IACxB,KAAK,CAAC,IAAMA,QAAQ,IAAI,CAAC;IAClC;AACJ;AACAG,OAAO,KAAK,CAAC,CAACuC;IACV,MAAMG,SAAS,IAAIC,iBAAiB;QAChC,OAAO;QACP,SAAS;QACT,QAAQ;IACZ;IACAD,OAAO,KAAK,CAAC,8BAA8B;QACvC,OAAOH,iBAAiBC,QAAQD,MAAM,OAAO,GAAGE,OAAOF;IAC3D;IACA1C,QAAQ,IAAI,CAAC;AACjB"}
|
|
@@ -0,0 +1,166 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Multi-process Architecture for parallel thought processing.
|
|
3
|
+
*
|
|
4
|
+
* This module provides a WorkerManager that uses Node.js worker threads
|
|
5
|
+
* to distribute thought processing across multiple CPU cores, enabling
|
|
6
|
+
* horizontal scaling and improved performance.
|
|
7
|
+
*
|
|
8
|
+
* @example
|
|
9
|
+
* ```typescript
|
|
10
|
+
* const manager = new WorkerManager({
|
|
11
|
+
* maxWorkers: 4,
|
|
12
|
+
* workerScript: './dist/worker.js'
|
|
13
|
+
* });
|
|
14
|
+
* await manager.start();
|
|
15
|
+
*
|
|
16
|
+
* const result = await manager.processThought({ thought: 'test', thought_number: 1, total_thoughts: 1 });
|
|
17
|
+
* ```
|
|
18
|
+
*/
|
|
19
|
+
import type { ThoughtData } from '../core/thought.js';
|
|
20
|
+
import type { Logger } from '../logger/StructuredLogger.js';
|
|
21
|
+
import type { IDisposable } from '../types/disposable.js';
|
|
22
|
+
export interface WorkerManagerOptions {
|
|
23
|
+
/**
|
|
24
|
+
* Maximum number of worker processes to spawn
|
|
25
|
+
* @default Number of CPU cores
|
|
26
|
+
*/
|
|
27
|
+
maxWorkers?: number;
|
|
28
|
+
/**
|
|
29
|
+
* Path to the worker script
|
|
30
|
+
* @default './dist/worker.js'
|
|
31
|
+
*/
|
|
32
|
+
workerScript?: string;
|
|
33
|
+
/**
|
|
34
|
+
* Timeout for worker responses in milliseconds
|
|
35
|
+
* @default 30000 (30 seconds)
|
|
36
|
+
*/
|
|
37
|
+
workerTimeout?: number;
|
|
38
|
+
/**
|
|
39
|
+
* Enable worker health monitoring and auto-restart
|
|
40
|
+
* @default true
|
|
41
|
+
*/
|
|
42
|
+
enableHealthCheck?: boolean;
|
|
43
|
+
/**
|
|
44
|
+
* Health check interval in milliseconds
|
|
45
|
+
* @default 60000 (1 minute)
|
|
46
|
+
*/
|
|
47
|
+
healthCheckInterval?: number;
|
|
48
|
+
/**
|
|
49
|
+
* Maximum number of retries for a failed worker
|
|
50
|
+
* @default 3
|
|
51
|
+
*/
|
|
52
|
+
maxRetries?: number;
|
|
53
|
+
/**
|
|
54
|
+
* Logger instance
|
|
55
|
+
*/
|
|
56
|
+
logger?: Logger;
|
|
57
|
+
}
|
|
58
|
+
export interface WorkerMessage {
|
|
59
|
+
type: 'process-thought' | 'health-check' | 'terminate';
|
|
60
|
+
requestId?: string;
|
|
61
|
+
input?: unknown;
|
|
62
|
+
}
|
|
63
|
+
export interface WorkerResponse {
|
|
64
|
+
type: 'result' | 'error' | 'health';
|
|
65
|
+
requestId?: string;
|
|
66
|
+
result?: unknown;
|
|
67
|
+
error?: string;
|
|
68
|
+
}
|
|
69
|
+
/**
|
|
70
|
+
* WorkerManager manages a pool of worker processes for parallel thought processing.
|
|
71
|
+
*
|
|
72
|
+
* Each worker runs in a separate process and can process thoughts independently.
|
|
73
|
+
* The manager distributes incoming requests across available workers.
|
|
74
|
+
*/
|
|
75
|
+
export declare class WorkerManager implements IDisposable {
|
|
76
|
+
private _workers;
|
|
77
|
+
private _nextWorkerId;
|
|
78
|
+
private _maxWorkers;
|
|
79
|
+
private _workerScript;
|
|
80
|
+
private _workerTimeout;
|
|
81
|
+
private _enableHealthCheck;
|
|
82
|
+
private _healthCheckInterval;
|
|
83
|
+
private _maxRetries;
|
|
84
|
+
private _activeRequests;
|
|
85
|
+
private _workerRetryCount;
|
|
86
|
+
private _healthCheckTimer;
|
|
87
|
+
private _nextWorkerIndex;
|
|
88
|
+
private _terminated;
|
|
89
|
+
private _logger;
|
|
90
|
+
constructor(options?: WorkerManagerOptions);
|
|
91
|
+
/**
|
|
92
|
+
* Create a no-op logger when none is provided.
|
|
93
|
+
*/
|
|
94
|
+
private _createNoopLogger;
|
|
95
|
+
/**
|
|
96
|
+
* Start the worker manager and spawn all worker processes.
|
|
97
|
+
*/
|
|
98
|
+
start(): Promise<void>;
|
|
99
|
+
/**
|
|
100
|
+
* Spawn a single worker process.
|
|
101
|
+
*/
|
|
102
|
+
private _spawnWorker;
|
|
103
|
+
/**
|
|
104
|
+
* Handle incoming messages from workers.
|
|
105
|
+
*/
|
|
106
|
+
private _handleWorkerMessage;
|
|
107
|
+
/**
|
|
108
|
+
* Handle worker errors.
|
|
109
|
+
*/
|
|
110
|
+
private _handleWorkerError;
|
|
111
|
+
/**
|
|
112
|
+
* Handle worker exit.
|
|
113
|
+
*/
|
|
114
|
+
private _handleWorkerExit;
|
|
115
|
+
/**
|
|
116
|
+
* Start periodic health checks for all workers.
|
|
117
|
+
*/
|
|
118
|
+
private _startHealthCheck;
|
|
119
|
+
/**
|
|
120
|
+
* Process a thought using an available worker.
|
|
121
|
+
*
|
|
122
|
+
* @param input - The thought data to process
|
|
123
|
+
* @returns Promise with the processing result
|
|
124
|
+
*/
|
|
125
|
+
processThought(input: ThoughtData): Promise<unknown>;
|
|
126
|
+
/**
|
|
127
|
+
* Get statistics about the worker pool.
|
|
128
|
+
*/
|
|
129
|
+
getStats(): {
|
|
130
|
+
activeWorkers: number;
|
|
131
|
+
activeRequests: number;
|
|
132
|
+
maxWorkers: number;
|
|
133
|
+
healthCheckEnabled: boolean;
|
|
134
|
+
};
|
|
135
|
+
/**
|
|
136
|
+
* Terminate all workers and stop the health check.
|
|
137
|
+
*/
|
|
138
|
+
terminate(): Promise<void>;
|
|
139
|
+
/**
|
|
140
|
+
* Dispose of the worker manager, releasing all resources.
|
|
141
|
+
* Implements the IDisposable interface.
|
|
142
|
+
* Delegates to terminate() for backward compatibility.
|
|
143
|
+
*/
|
|
144
|
+
dispose(): Promise<void>;
|
|
145
|
+
/**
|
|
146
|
+
* Check if worker manager is running.
|
|
147
|
+
*/
|
|
148
|
+
isRunning(): boolean;
|
|
149
|
+
}
|
|
150
|
+
/**
|
|
151
|
+
* Create a WorkerManager with the given options.
|
|
152
|
+
*
|
|
153
|
+
* @param options - Worker manager configuration
|
|
154
|
+
* @returns A configured WorkerManager
|
|
155
|
+
*
|
|
156
|
+
* @example
|
|
157
|
+
* ```typescript
|
|
158
|
+
* const manager = createWorkerManager({
|
|
159
|
+
* maxWorkers: 4,
|
|
160
|
+
* workerScript: './dist/worker.js'
|
|
161
|
+
* });
|
|
162
|
+
* await manager.start();
|
|
163
|
+
* ```
|
|
164
|
+
*/
|
|
165
|
+
export declare function createWorkerManager(options?: WorkerManagerOptions): WorkerManager;
|
|
166
|
+
//# sourceMappingURL=WorkerManager.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"WorkerManager.d.ts","sourceRoot":"","sources":["../../src/cluster/WorkerManager.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;;;;GAiBG;AAOH,OAAO,KAAK,EAAE,WAAW,EAAE,MAAM,oBAAoB,CAAC;AACtD,OAAO,KAAK,EAAE,MAAM,EAAE,MAAM,+BAA+B,CAAC;AAC5D,OAAO,KAAK,EAAE,WAAW,EAAE,MAAM,wBAAwB,CAAC;AAE1D,MAAM,WAAW,oBAAoB;IACpC;;;OAGG;IACH,UAAU,CAAC,EAAE,MAAM,CAAC;IAEpB;;;OAGG;IACH,YAAY,CAAC,EAAE,MAAM,CAAC;IAEtB;;;OAGG;IACH,aAAa,CAAC,EAAE,MAAM,CAAC;IAEvB;;;OAGG;IACH,iBAAiB,CAAC,EAAE,OAAO,CAAC;IAE5B;;;OAGG;IACH,mBAAmB,CAAC,EAAE,MAAM,CAAC;IAE7B;;;OAGG;IACH,UAAU,CAAC,EAAE,MAAM,CAAC;IAEpB;;OAEG;IACH,MAAM,CAAC,EAAE,MAAM,CAAC;CAChB;AAED,MAAM,WAAW,aAAa;IAC7B,IAAI,EAAE,iBAAiB,GAAG,cAAc,GAAG,WAAW,CAAC;IACvD,SAAS,CAAC,EAAE,MAAM,CAAC;IACnB,KAAK,CAAC,EAAE,OAAO,CAAC;CAChB;AAED,MAAM,WAAW,cAAc;IAC9B,IAAI,EAAE,QAAQ,GAAG,OAAO,GAAG,QAAQ,CAAC;IACpC,SAAS,CAAC,EAAE,MAAM,CAAC;IACnB,MAAM,CAAC,EAAE,OAAO,CAAC;IACjB,KAAK,CAAC,EAAE,MAAM,CAAC;CACf;AAED;;;;;GAKG;AACH,qBAAa,aAAc,YAAW,WAAW;IAChD,OAAO,CAAC,QAAQ,CAAkC;IAClD,OAAO,CAAC,aAAa,CAAK;IAC1B,OAAO,CAAC,WAAW,CAAS;IAC5B,OAAO,CAAC,aAAa,CAAS;IAC9B,OAAO,CAAC,cAAc,CAAS;IAC/B,OAAO,CAAC,kBAAkB,CAAU;IACpC,OAAO,CAAC,oBAAoB,CAAS;IACrC,OAAO,CAAC,WAAW,CAAS;IAC5B,OAAO,CAAC,eAAe,CAAqD;IAC5E,OAAO,CAAC,iBAAiB,CAAkC;IAC3D,OAAO,CAAC,iBAAiB,CAA+B;IACxD,OAAO,CAAC,gBAAgB,CAAK;IAE7B,OAAO,CAAC,WAAW,CAAS;IAC5B,OAAO,CAAC,OAAO,CAAS;gBAEZ,OAAO,GAAE,oBAAyB;IAW9C;;OAEG;IACH,OAAO,CAAC,iBAAiB;IAWzB;;OAEG;IACG,KAAK,IAAI,OAAO,CAAC,IAAI,CAAC;IAuB5B;;OAEG;YACW,YAAY;IA8B1B;;OAEG;IACH,OAAO,CAAC,oBAAoB;IAmB5B;;OAEG;IACH,OAAO,CAAC,kBAAkB;IA6C1B;;OAEG;IACH,OAAO,CAAC,iBAAiB;IAYzB;;OAEG;IACH,OAAO,CAAC,iBAAiB;IAYzB;;;;;OAKG;IACG,cAAc,CAAC,KAAK,EAAE,WAAW,GAAG,OAAO,CAAC,OAAO,CAAC;IAyD1D;;OAEG;IACH,QAAQ,IAAI;QACX,aAAa,EAAE,MAAM,CAAC;QACtB,cAAc,EAAE,MAAM,CAAC;QACvB,UAAU,EAAE,MAAM,CAAC;QACnB,kBAAkB,EAAE,OAAO,CAAC;KAC5B;IASD;;OAEG;IACG,SAAS,IAAI,OAAO,CAAC,IAAI,CAAC;IA8BhC;;;;OAIG;IACG,OAAO,IAAI,OAAO,CAAC,IAAI,CAAC;IAI9B;;OAEG;IACH,SAAS,IAAI,OAAO;CAGpB;AAED;;;;;;;;;;;;;;GAcG;AACH,wBAAgB,mBAAmB,CAAC,OAAO,CAAC,EAAE,oBAAoB,GAAG,aAAa,CAEjF"}
|