agentic-flow 1.9.4 → 1.10.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +246 -0
- package/dist/proxy/adaptive-proxy.js +224 -0
- package/dist/proxy/anthropic-to-gemini.js +2 -2
- package/dist/proxy/http2-proxy-optimized.js +191 -0
- package/dist/proxy/http2-proxy.js +381 -0
- package/dist/proxy/http3-proxy-old.js +331 -0
- package/dist/proxy/http3-proxy.js +51 -0
- package/dist/proxy/websocket-proxy.js +406 -0
- package/dist/utils/adaptive-pool-sizing.js +414 -0
- package/dist/utils/auth.js +52 -0
- package/dist/utils/circular-rate-limiter.js +391 -0
- package/dist/utils/compression-middleware.js +149 -0
- package/dist/utils/connection-pool.js +184 -0
- package/dist/utils/dynamic-compression.js +298 -0
- package/dist/utils/http2-multiplexing.js +319 -0
- package/dist/utils/lazy-auth.js +311 -0
- package/dist/utils/rate-limiter.js +48 -0
- package/dist/utils/response-cache.js +211 -0
- package/dist/utils/server-push.js +251 -0
- package/dist/utils/streaming-optimizer.js +141 -0
- package/dist/utils/zero-copy-buffer.js +286 -0
- package/docs/.claude-flow/metrics/performance.json +3 -3
- package/docs/.claude-flow/metrics/task-metrics.json +3 -3
- package/docs/DOCKER-VERIFICATION.md +207 -0
- package/docs/ISSUE-55-VALIDATION.md +171 -0
- package/docs/NPX_AGENTDB_SETUP.md +175 -0
- package/docs/OPTIMIZATIONS.md +460 -0
- package/docs/PHASE2-IMPLEMENTATION-SUMMARY.md +275 -0
- package/docs/PHASE2-PHASE3-COMPLETE-SUMMARY.md +453 -0
- package/docs/PHASE3-IMPLEMENTATION-SUMMARY.md +357 -0
- package/docs/PUBLISH_GUIDE.md +438 -0
- package/docs/README.md +217 -0
- package/docs/RELEASE-v1.10.0-COMPLETE.md +382 -0
- package/docs/archive/.agentdb-instructions.md +66 -0
- package/docs/archive/AGENT-BOOSTER-STATUS.md +292 -0
- package/docs/archive/CHANGELOG-v1.3.0.md +120 -0
- package/docs/archive/COMPLETION_REPORT_v1.7.1.md +335 -0
- package/docs/archive/IMPLEMENTATION_SUMMARY_v1.7.1.md +241 -0
- package/docs/archive/SUPABASE-INTEGRATION-COMPLETE.md +357 -0
- package/docs/archive/TESTING_QUICK_START.md +223 -0
- package/docs/archive/TOOL-EMULATION-INTEGRATION-ISSUE.md +669 -0
- package/docs/archive/VALIDATION_v1.7.1.md +234 -0
- package/docs/issues/ISSUE-xenova-transformers-dependency.md +380 -0
- package/docs/releases/PUBLISH_CHECKLIST_v1.10.0.md +396 -0
- package/docs/releases/PUBLISH_SUMMARY_v1.7.1.md +198 -0
- package/docs/releases/RELEASE_NOTES_v1.10.0.md +464 -0
- package/docs/releases/RELEASE_NOTES_v1.7.0.md +297 -0
- package/docs/releases/RELEASE_v1.7.1.md +327 -0
- package/package.json +1 -1
- package/scripts/claude +31 -0
- package/validation/docker-npm-validation.sh +170 -0
- package/validation/simple-npm-validation.sh +131 -0
- package/validation/test-gemini-exclusiveMinimum-fix.ts +142 -0
- package/validation/test-gemini-models.ts +200 -0
- package/validation/validate-v1.10.0-docker.sh +296 -0
- package/wasm/reasoningbank/reasoningbank_wasm_bg.js +2 -2
- package/wasm/reasoningbank/reasoningbank_wasm_bg.wasm +0 -0
- package/docs/INDEX.md +0 -279
- package/docs/guides/.claude-flow/metrics/agent-metrics.json +0 -1
- package/docs/guides/.claude-flow/metrics/performance.json +0 -9
- package/docs/guides/.claude-flow/metrics/task-metrics.json +0 -10
- package/docs/router/.claude-flow/metrics/agent-metrics.json +0 -1
- package/docs/router/.claude-flow/metrics/performance.json +0 -9
- package/docs/router/.claude-flow/metrics/task-metrics.json +0 -10
- /package/docs/{TEST-V1.7.8.Dockerfile → docker-tests/TEST-V1.7.8.Dockerfile} +0 -0
- /package/docs/{TEST-V1.7.9-NODE20.Dockerfile → docker-tests/TEST-V1.7.9-NODE20.Dockerfile} +0 -0
- /package/docs/{TEST-V1.7.9.Dockerfile → docker-tests/TEST-V1.7.9.Dockerfile} +0 -0
- /package/docs/{v1.7.1-QUICK-START.md → guides/QUICK-START-v1.7.1.md} +0 -0
- /package/docs/{INTEGRATION-COMPLETE.md → integration-docs/INTEGRATION-COMPLETE.md} +0 -0
- /package/docs/{LANDING-PAGE-PROVIDER-CONTENT.md → providers/LANDING-PAGE-PROVIDER-CONTENT.md} +0 -0
- /package/docs/{PROVIDER-FALLBACK-GUIDE.md → providers/PROVIDER-FALLBACK-GUIDE.md} +0 -0
- /package/docs/{PROVIDER-FALLBACK-SUMMARY.md → providers/PROVIDER-FALLBACK-SUMMARY.md} +0 -0
- /package/docs/{QUIC_FINAL_STATUS.md → quic/QUIC_FINAL_STATUS.md} +0 -0
- /package/docs/{README_QUIC_PHASE1.md → quic/README_QUIC_PHASE1.md} +0 -0
- /package/docs/{AGENTDB_TESTING.md → testing/AGENTDB_TESTING.md} +0 -0
|
@@ -0,0 +1,184 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Connection Pool for HTTP/2 and HTTP/3 Proxies
|
|
3
|
+
* Provides connection reuse to reduce latency by 20-30%
|
|
4
|
+
*/
|
|
5
|
+
import http2 from 'http2';
|
|
6
|
+
import { logger } from './logger.js';
|
|
7
|
+
export class ConnectionPool {
|
|
8
|
+
pools = new Map();
|
|
9
|
+
config;
|
|
10
|
+
cleanupInterval;
|
|
11
|
+
constructor(config = {}) {
|
|
12
|
+
this.config = {
|
|
13
|
+
maxSize: config.maxSize || 10,
|
|
14
|
+
maxIdleTime: config.maxIdleTime || 60000, // 60 seconds
|
|
15
|
+
acquireTimeout: config.acquireTimeout || 5000 // 5 seconds
|
|
16
|
+
};
|
|
17
|
+
// Cleanup expired connections every 30 seconds
|
|
18
|
+
this.cleanupInterval = setInterval(() => {
|
|
19
|
+
this.cleanup();
|
|
20
|
+
}, 30000);
|
|
21
|
+
}
|
|
22
|
+
async acquire(host) {
|
|
23
|
+
const pool = this.pools.get(host) || [];
|
|
24
|
+
const now = Date.now();
|
|
25
|
+
// Find idle, non-expired connection
|
|
26
|
+
const idle = pool.find(c => !c.busy &&
|
|
27
|
+
!this.isExpired(c, now) &&
|
|
28
|
+
!c.session.closed &&
|
|
29
|
+
!c.session.destroyed);
|
|
30
|
+
if (idle) {
|
|
31
|
+
idle.busy = true;
|
|
32
|
+
idle.lastUsed = now;
|
|
33
|
+
logger.debug('Reusing pooled connection', { host, poolSize: pool.length });
|
|
34
|
+
return idle.session;
|
|
35
|
+
}
|
|
36
|
+
// Create new if under limit
|
|
37
|
+
if (pool.length < this.config.maxSize) {
|
|
38
|
+
const session = await this.createConnection(host);
|
|
39
|
+
const conn = {
|
|
40
|
+
session,
|
|
41
|
+
host,
|
|
42
|
+
busy: true,
|
|
43
|
+
createdAt: now,
|
|
44
|
+
lastUsed: now
|
|
45
|
+
};
|
|
46
|
+
pool.push(conn);
|
|
47
|
+
this.pools.set(host, pool);
|
|
48
|
+
logger.debug('Created new pooled connection', {
|
|
49
|
+
host,
|
|
50
|
+
poolSize: pool.length,
|
|
51
|
+
maxSize: this.config.maxSize
|
|
52
|
+
});
|
|
53
|
+
return session;
|
|
54
|
+
}
|
|
55
|
+
// Wait for available connection
|
|
56
|
+
logger.debug('Pool full, waiting for connection', { host, poolSize: pool.length });
|
|
57
|
+
return this.waitForConnection(host);
|
|
58
|
+
}
|
|
59
|
+
async release(session, host) {
|
|
60
|
+
const pool = this.pools.get(host);
|
|
61
|
+
if (!pool)
|
|
62
|
+
return;
|
|
63
|
+
const conn = pool.find(c => c.session === session);
|
|
64
|
+
if (conn) {
|
|
65
|
+
conn.busy = false;
|
|
66
|
+
conn.lastUsed = Date.now();
|
|
67
|
+
logger.debug('Released pooled connection', { host });
|
|
68
|
+
}
|
|
69
|
+
}
|
|
70
|
+
async createConnection(host) {
|
|
71
|
+
return new Promise((resolve, reject) => {
|
|
72
|
+
const session = http2.connect(host, {
|
|
73
|
+
maxSessionMemory: 10 // 10MB per session
|
|
74
|
+
});
|
|
75
|
+
session.once('connect', () => {
|
|
76
|
+
logger.info('HTTP/2 session connected', { host });
|
|
77
|
+
resolve(session);
|
|
78
|
+
});
|
|
79
|
+
session.once('error', (error) => {
|
|
80
|
+
logger.error('HTTP/2 session error', { host, error: error.message });
|
|
81
|
+
reject(error);
|
|
82
|
+
});
|
|
83
|
+
// Cleanup on session close
|
|
84
|
+
session.once('close', () => {
|
|
85
|
+
this.removeConnection(session, host);
|
|
86
|
+
});
|
|
87
|
+
});
|
|
88
|
+
}
|
|
89
|
+
async waitForConnection(host) {
|
|
90
|
+
const startTime = Date.now();
|
|
91
|
+
return new Promise((resolve, reject) => {
|
|
92
|
+
const checkInterval = setInterval(() => {
|
|
93
|
+
const pool = this.pools.get(host);
|
|
94
|
+
if (!pool) {
|
|
95
|
+
clearInterval(checkInterval);
|
|
96
|
+
reject(new Error('Pool disappeared'));
|
|
97
|
+
return;
|
|
98
|
+
}
|
|
99
|
+
const now = Date.now();
|
|
100
|
+
const available = pool.find(c => !c.busy &&
|
|
101
|
+
!this.isExpired(c, now) &&
|
|
102
|
+
!c.session.closed);
|
|
103
|
+
if (available) {
|
|
104
|
+
clearInterval(checkInterval);
|
|
105
|
+
available.busy = true;
|
|
106
|
+
available.lastUsed = now;
|
|
107
|
+
resolve(available.session);
|
|
108
|
+
return;
|
|
109
|
+
}
|
|
110
|
+
if (now - startTime > this.config.acquireTimeout) {
|
|
111
|
+
clearInterval(checkInterval);
|
|
112
|
+
reject(new Error('Connection acquire timeout'));
|
|
113
|
+
}
|
|
114
|
+
}, 100); // Check every 100ms
|
|
115
|
+
});
|
|
116
|
+
}
|
|
117
|
+
isExpired(conn, now) {
|
|
118
|
+
return (now - conn.lastUsed) > this.config.maxIdleTime;
|
|
119
|
+
}
|
|
120
|
+
removeConnection(session, host) {
|
|
121
|
+
const pool = this.pools.get(host);
|
|
122
|
+
if (!pool)
|
|
123
|
+
return;
|
|
124
|
+
const index = pool.findIndex(c => c.session === session);
|
|
125
|
+
if (index !== -1) {
|
|
126
|
+
pool.splice(index, 1);
|
|
127
|
+
logger.debug('Removed closed connection from pool', { host, poolSize: pool.length });
|
|
128
|
+
}
|
|
129
|
+
if (pool.length === 0) {
|
|
130
|
+
this.pools.delete(host);
|
|
131
|
+
}
|
|
132
|
+
}
|
|
133
|
+
cleanup() {
|
|
134
|
+
const now = Date.now();
|
|
135
|
+
let removed = 0;
|
|
136
|
+
for (const [host, pool] of this.pools.entries()) {
|
|
137
|
+
const before = pool.length;
|
|
138
|
+
// Remove expired and closed connections
|
|
139
|
+
const active = pool.filter(c => {
|
|
140
|
+
if (this.isExpired(c, now) || c.session.closed || c.session.destroyed) {
|
|
141
|
+
if (!c.session.closed) {
|
|
142
|
+
c.session.close();
|
|
143
|
+
}
|
|
144
|
+
return false;
|
|
145
|
+
}
|
|
146
|
+
return true;
|
|
147
|
+
});
|
|
148
|
+
removed += before - active.length;
|
|
149
|
+
if (active.length === 0) {
|
|
150
|
+
this.pools.delete(host);
|
|
151
|
+
}
|
|
152
|
+
else {
|
|
153
|
+
this.pools.set(host, active);
|
|
154
|
+
}
|
|
155
|
+
}
|
|
156
|
+
if (removed > 0) {
|
|
157
|
+
logger.debug('Cleaned up expired connections', { removed });
|
|
158
|
+
}
|
|
159
|
+
}
|
|
160
|
+
destroy() {
|
|
161
|
+
clearInterval(this.cleanupInterval);
|
|
162
|
+
for (const [host, pool] of this.pools.entries()) {
|
|
163
|
+
for (const conn of pool) {
|
|
164
|
+
if (!conn.session.closed) {
|
|
165
|
+
conn.session.close();
|
|
166
|
+
}
|
|
167
|
+
}
|
|
168
|
+
}
|
|
169
|
+
this.pools.clear();
|
|
170
|
+
logger.info('Connection pool destroyed');
|
|
171
|
+
}
|
|
172
|
+
getStats() {
|
|
173
|
+
const stats = {};
|
|
174
|
+
for (const [host, pool] of this.pools.entries()) {
|
|
175
|
+
const busy = pool.filter(c => c.busy).length;
|
|
176
|
+
stats[host] = {
|
|
177
|
+
total: pool.length,
|
|
178
|
+
busy,
|
|
179
|
+
idle: pool.length - busy
|
|
180
|
+
};
|
|
181
|
+
}
|
|
182
|
+
return stats;
|
|
183
|
+
}
|
|
184
|
+
}
|
|
@@ -0,0 +1,298 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Dynamic Compression based on CPU
|
|
3
|
+
* Adaptive compression levels based on CPU availability
|
|
4
|
+
* Phase 3 Optimization
|
|
5
|
+
*/
|
|
6
|
+
import * as zlib from 'zlib';
|
|
7
|
+
import { performance } from 'perf_hooks';
|
|
8
|
+
/**
|
|
9
|
+
* Compression levels with CPU costs
|
|
10
|
+
*/
|
|
11
|
+
export const COMPRESSION_LEVELS = {
|
|
12
|
+
gzip: [
|
|
13
|
+
{ level: 1, name: 'fastest', cpuCost: 1, compressionRatio: 3 },
|
|
14
|
+
{ level: 3, name: 'fast', cpuCost: 3, compressionRatio: 5 },
|
|
15
|
+
{ level: 6, name: 'default', cpuCost: 6, compressionRatio: 7 },
|
|
16
|
+
{ level: 9, name: 'best', cpuCost: 10, compressionRatio: 9 }
|
|
17
|
+
],
|
|
18
|
+
brotli: [
|
|
19
|
+
{ level: 1, name: 'fastest', cpuCost: 2, compressionRatio: 4 },
|
|
20
|
+
{ level: 4, name: 'fast', cpuCost: 4, compressionRatio: 6 },
|
|
21
|
+
{ level: 6, name: 'default', cpuCost: 7, compressionRatio: 8 },
|
|
22
|
+
{ level: 11, name: 'best', cpuCost: 10, compressionRatio: 10 }
|
|
23
|
+
]
|
|
24
|
+
};
|
|
25
|
+
/**
|
|
26
|
+
* Dynamic Compression Manager
|
|
27
|
+
* Adjusts compression levels based on CPU load
|
|
28
|
+
*/
|
|
29
|
+
export class DynamicCompressionManager {
|
|
30
|
+
config;
|
|
31
|
+
stats;
|
|
32
|
+
currentLevelIndex = 1; // Start with 'fast'
|
|
33
|
+
cpuSamples = [];
|
|
34
|
+
monitorInterval;
|
|
35
|
+
constructor(config) {
|
|
36
|
+
this.config = {
|
|
37
|
+
enabled: config.enabled,
|
|
38
|
+
minSize: config.minSize || 1024,
|
|
39
|
+
algorithm: config.algorithm || 'gzip',
|
|
40
|
+
adaptive: config.adaptive !== false,
|
|
41
|
+
cpuThresholdHigh: config.cpuThresholdHigh || 70,
|
|
42
|
+
cpuThresholdLow: config.cpuThresholdLow || 30,
|
|
43
|
+
checkInterval: config.checkInterval || 5000
|
|
44
|
+
};
|
|
45
|
+
this.stats = {
|
|
46
|
+
totalBytes: 0,
|
|
47
|
+
compressedBytes: 0,
|
|
48
|
+
compressionRatio: 1,
|
|
49
|
+
avgCompressionTime: 0,
|
|
50
|
+
currentLevel: this.getCurrentLevel().level,
|
|
51
|
+
levelChanges: 0,
|
|
52
|
+
cpuAdjustments: 0
|
|
53
|
+
};
|
|
54
|
+
if (this.config.adaptive) {
|
|
55
|
+
this.startCPUMonitoring();
|
|
56
|
+
}
|
|
57
|
+
}
|
|
58
|
+
/**
|
|
59
|
+
* Compress data with adaptive level
|
|
60
|
+
*/
|
|
61
|
+
async compress(data) {
|
|
62
|
+
if (!this.config.enabled || data.length < this.config.minSize) {
|
|
63
|
+
return data;
|
|
64
|
+
}
|
|
65
|
+
const startTime = performance.now();
|
|
66
|
+
const level = this.getCurrentLevel();
|
|
67
|
+
let compressed;
|
|
68
|
+
try {
|
|
69
|
+
if (this.config.algorithm === 'brotli') {
|
|
70
|
+
compressed = await this.compressBrotli(data, level.level);
|
|
71
|
+
}
|
|
72
|
+
else if (this.config.algorithm === 'deflate') {
|
|
73
|
+
compressed = await this.compressDeflate(data, level.level);
|
|
74
|
+
}
|
|
75
|
+
else {
|
|
76
|
+
compressed = await this.compressGzip(data, level.level);
|
|
77
|
+
}
|
|
78
|
+
// Update statistics
|
|
79
|
+
const compressionTime = performance.now() - startTime;
|
|
80
|
+
this.updateStats(data.length, compressed.length, compressionTime);
|
|
81
|
+
return compressed;
|
|
82
|
+
}
|
|
83
|
+
catch (error) {
|
|
84
|
+
// Fallback to uncompressed
|
|
85
|
+
return data;
|
|
86
|
+
}
|
|
87
|
+
}
|
|
88
|
+
/**
|
|
89
|
+
* Compress with gzip
|
|
90
|
+
*/
|
|
91
|
+
compressGzip(data, level) {
|
|
92
|
+
return new Promise((resolve, reject) => {
|
|
93
|
+
zlib.gzip(data, { level }, (err, result) => {
|
|
94
|
+
if (err)
|
|
95
|
+
reject(err);
|
|
96
|
+
else
|
|
97
|
+
resolve(result);
|
|
98
|
+
});
|
|
99
|
+
});
|
|
100
|
+
}
|
|
101
|
+
/**
|
|
102
|
+
* Compress with brotli
|
|
103
|
+
*/
|
|
104
|
+
compressBrotli(data, level) {
|
|
105
|
+
return new Promise((resolve, reject) => {
|
|
106
|
+
zlib.brotliCompress(data, {
|
|
107
|
+
params: {
|
|
108
|
+
[zlib.constants.BROTLI_PARAM_QUALITY]: level
|
|
109
|
+
}
|
|
110
|
+
}, (err, result) => {
|
|
111
|
+
if (err)
|
|
112
|
+
reject(err);
|
|
113
|
+
else
|
|
114
|
+
resolve(result);
|
|
115
|
+
});
|
|
116
|
+
});
|
|
117
|
+
}
|
|
118
|
+
/**
|
|
119
|
+
* Compress with deflate
|
|
120
|
+
*/
|
|
121
|
+
compressDeflate(data, level) {
|
|
122
|
+
return new Promise((resolve, reject) => {
|
|
123
|
+
zlib.deflate(data, { level }, (err, result) => {
|
|
124
|
+
if (err)
|
|
125
|
+
reject(err);
|
|
126
|
+
else
|
|
127
|
+
resolve(result);
|
|
128
|
+
});
|
|
129
|
+
});
|
|
130
|
+
}
|
|
131
|
+
/**
|
|
132
|
+
* Get current compression level
|
|
133
|
+
*/
|
|
134
|
+
getCurrentLevel() {
|
|
135
|
+
const levels = COMPRESSION_LEVELS[this.config.algorithm];
|
|
136
|
+
return levels[this.currentLevelIndex] || levels[1];
|
|
137
|
+
}
|
|
138
|
+
/**
|
|
139
|
+
* Adjust compression level based on CPU
|
|
140
|
+
*/
|
|
141
|
+
adjustLevel(cpuUsage) {
|
|
142
|
+
const levels = COMPRESSION_LEVELS[this.config.algorithm];
|
|
143
|
+
const oldIndex = this.currentLevelIndex;
|
|
144
|
+
if (cpuUsage > this.config.cpuThresholdHigh && this.currentLevelIndex > 0) {
|
|
145
|
+
// CPU high, reduce compression level
|
|
146
|
+
this.currentLevelIndex--;
|
|
147
|
+
this.stats.cpuAdjustments++;
|
|
148
|
+
}
|
|
149
|
+
else if (cpuUsage < this.config.cpuThresholdLow && this.currentLevelIndex < levels.length - 1) {
|
|
150
|
+
// CPU low, increase compression level
|
|
151
|
+
this.currentLevelIndex++;
|
|
152
|
+
this.stats.cpuAdjustments++;
|
|
153
|
+
}
|
|
154
|
+
if (oldIndex !== this.currentLevelIndex) {
|
|
155
|
+
this.stats.levelChanges++;
|
|
156
|
+
this.stats.currentLevel = this.getCurrentLevel().level;
|
|
157
|
+
}
|
|
158
|
+
}
|
|
159
|
+
/**
|
|
160
|
+
* Start CPU monitoring
|
|
161
|
+
*/
|
|
162
|
+
startCPUMonitoring() {
|
|
163
|
+
this.monitorInterval = setInterval(() => {
|
|
164
|
+
const cpuUsage = this.getCPUUsage();
|
|
165
|
+
this.cpuSamples.push(cpuUsage);
|
|
166
|
+
// Keep last 10 samples
|
|
167
|
+
if (this.cpuSamples.length > 10) {
|
|
168
|
+
this.cpuSamples.shift();
|
|
169
|
+
}
|
|
170
|
+
// Calculate average CPU
|
|
171
|
+
const avgCPU = this.cpuSamples.reduce((a, b) => a + b, 0) / this.cpuSamples.length;
|
|
172
|
+
// Adjust compression level
|
|
173
|
+
this.adjustLevel(avgCPU);
|
|
174
|
+
}, this.config.checkInterval);
|
|
175
|
+
}
|
|
176
|
+
/**
|
|
177
|
+
* Get CPU usage percentage
|
|
178
|
+
*/
|
|
179
|
+
getCPUUsage() {
|
|
180
|
+
const cpus = require('os').cpus();
|
|
181
|
+
let totalIdle = 0;
|
|
182
|
+
let totalTick = 0;
|
|
183
|
+
for (const cpu of cpus) {
|
|
184
|
+
for (const type in cpu.times) {
|
|
185
|
+
totalTick += cpu.times[type];
|
|
186
|
+
}
|
|
187
|
+
totalIdle += cpu.times.idle;
|
|
188
|
+
}
|
|
189
|
+
const idle = totalIdle / cpus.length;
|
|
190
|
+
const total = totalTick / cpus.length;
|
|
191
|
+
const usage = 100 - ~~(100 * idle / total);
|
|
192
|
+
return Math.max(0, Math.min(100, usage));
|
|
193
|
+
}
|
|
194
|
+
/**
|
|
195
|
+
* Update statistics
|
|
196
|
+
*/
|
|
197
|
+
updateStats(originalSize, compressedSize, time) {
|
|
198
|
+
this.stats.totalBytes += originalSize;
|
|
199
|
+
this.stats.compressedBytes += compressedSize;
|
|
200
|
+
this.stats.compressionRatio = this.stats.totalBytes / this.stats.compressedBytes;
|
|
201
|
+
// Update average compression time
|
|
202
|
+
const totalCompressions = this.stats.totalBytes / (originalSize || 1);
|
|
203
|
+
this.stats.avgCompressionTime =
|
|
204
|
+
(this.stats.avgCompressionTime * (totalCompressions - 1) + time) / totalCompressions;
|
|
205
|
+
}
|
|
206
|
+
/**
|
|
207
|
+
* Get statistics
|
|
208
|
+
*/
|
|
209
|
+
getStats() {
|
|
210
|
+
const level = this.getCurrentLevel();
|
|
211
|
+
const avgCPU = this.cpuSamples.length > 0
|
|
212
|
+
? this.cpuSamples.reduce((a, b) => a + b, 0) / this.cpuSamples.length
|
|
213
|
+
: 0;
|
|
214
|
+
return {
|
|
215
|
+
...this.stats,
|
|
216
|
+
currentLevelName: level.name,
|
|
217
|
+
cpuUsage: avgCPU
|
|
218
|
+
};
|
|
219
|
+
}
|
|
220
|
+
/**
|
|
221
|
+
* Get compression savings
|
|
222
|
+
*/
|
|
223
|
+
getSavings() {
|
|
224
|
+
const byteSavings = this.stats.totalBytes - this.stats.compressedBytes;
|
|
225
|
+
const percentSavings = (byteSavings / this.stats.totalBytes) * 100;
|
|
226
|
+
const mbSaved = byteSavings / (1024 * 1024);
|
|
227
|
+
return {
|
|
228
|
+
byteSavings,
|
|
229
|
+
percentSavings,
|
|
230
|
+
mbSaved
|
|
231
|
+
};
|
|
232
|
+
}
|
|
233
|
+
/**
|
|
234
|
+
* Manually set compression level
|
|
235
|
+
*/
|
|
236
|
+
setLevel(levelName) {
|
|
237
|
+
const levels = COMPRESSION_LEVELS[this.config.algorithm];
|
|
238
|
+
const index = levels.findIndex(l => l.name === levelName);
|
|
239
|
+
if (index !== -1) {
|
|
240
|
+
this.currentLevelIndex = index;
|
|
241
|
+
this.stats.currentLevel = levels[index].level;
|
|
242
|
+
this.stats.levelChanges++;
|
|
243
|
+
return true;
|
|
244
|
+
}
|
|
245
|
+
return false;
|
|
246
|
+
}
|
|
247
|
+
/**
|
|
248
|
+
* Reset statistics
|
|
249
|
+
*/
|
|
250
|
+
resetStats() {
|
|
251
|
+
this.stats = {
|
|
252
|
+
totalBytes: 0,
|
|
253
|
+
compressedBytes: 0,
|
|
254
|
+
compressionRatio: 1,
|
|
255
|
+
avgCompressionTime: 0,
|
|
256
|
+
currentLevel: this.getCurrentLevel().level,
|
|
257
|
+
levelChanges: 0,
|
|
258
|
+
cpuAdjustments: 0
|
|
259
|
+
};
|
|
260
|
+
}
|
|
261
|
+
/**
|
|
262
|
+
* Stop CPU monitoring
|
|
263
|
+
*/
|
|
264
|
+
destroy() {
|
|
265
|
+
if (this.monitorInterval) {
|
|
266
|
+
clearInterval(this.monitorInterval);
|
|
267
|
+
}
|
|
268
|
+
}
|
|
269
|
+
}
|
|
270
|
+
/**
|
|
271
|
+
* Content-type aware compression
|
|
272
|
+
*/
|
|
273
|
+
export function shouldCompress(contentType, size, minSize = 1024) {
|
|
274
|
+
if (size < minSize) {
|
|
275
|
+
return false;
|
|
276
|
+
}
|
|
277
|
+
const compressibleTypes = [
|
|
278
|
+
'text/',
|
|
279
|
+
'application/json',
|
|
280
|
+
'application/javascript',
|
|
281
|
+
'application/xml',
|
|
282
|
+
'application/x-www-form-urlencoded'
|
|
283
|
+
];
|
|
284
|
+
return compressibleTypes.some(type => contentType.startsWith(type));
|
|
285
|
+
}
|
|
286
|
+
/**
|
|
287
|
+
* Calculate compression efficiency
|
|
288
|
+
*/
|
|
289
|
+
export function calculateCompressionEfficiency(stats) {
|
|
290
|
+
const timePerMB = stats.avgCompressionTime / ((stats.totalBytes / (1024 * 1024)) || 1);
|
|
291
|
+
const ratioScore = Math.min(100, (stats.compressionRatio - 1) * 10);
|
|
292
|
+
const efficiency = (ratioScore * 0.7) + ((100 - Math.min(100, timePerMB)) * 0.3);
|
|
293
|
+
return {
|
|
294
|
+
efficiency,
|
|
295
|
+
timePerMB,
|
|
296
|
+
ratioScore
|
|
297
|
+
};
|
|
298
|
+
}
|