aicodeswitch 2.0.8 → 2.0.9
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +2 -0
- package/CLAUDE.md +6 -0
- package/bin/stop.js +1 -2
- package/dist/server/database.js +39 -0
- package/dist/server/main.js +12 -2
- package/dist/server/proxy-server.js +110 -17
- package/dist/server/transformers/chunk-collector.js +103 -18
- package/dist/server/transformers/streaming.js +220 -113
- package/dist/ui/assets/{index-BOY_bl12.js → index-d74w3Uye.js} +27 -27
- package/dist/ui/index.html +1 -1
- package/package.json +1 -1
package/CHANGELOG.md
CHANGED
package/CLAUDE.md
CHANGED
|
@@ -165,6 +165,12 @@ aicos version # Show current version information
|
|
|
165
165
|
- Access logs: System access records
|
|
166
166
|
- Error logs: Error and exception records
|
|
167
167
|
|
|
168
|
+
### Usage Limits Auto-Sync
|
|
169
|
+
- **Service-Level Limits**: API services can have token and request count limits configured
|
|
170
|
+
- **Auto-Sync to Rules**: When an API service's usage limits are modified, all rules using that service are automatically updated with the new limits
|
|
171
|
+
- **Inheritance Detection**: When editing a rule, the system detects if the rule's limits match the service's limits and displays them as "inherited" (read-only)
|
|
172
|
+
- **Manual Override**: Rules can be configured with custom limits that differ from the service defaults
|
|
173
|
+
|
|
168
174
|
## Development Tips
|
|
169
175
|
|
|
170
176
|
1. **Environment Variables**: Copy `.env.example` to `.env` and modify as needed
|
package/bin/stop.js
CHANGED
|
@@ -18,9 +18,8 @@ const stop = async (options = {}) => {
|
|
|
18
18
|
|
|
19
19
|
// 第一步:如果 PID 文件存在,优先通过 PID 文件停止服务器
|
|
20
20
|
if (fs.existsSync(PID_FILE)) {
|
|
21
|
+
const pid = parseInt(fs.readFileSync(PID_FILE, 'utf-8'), 10);
|
|
21
22
|
try {
|
|
22
|
-
const pid = parseInt(fs.readFileSync(PID_FILE, 'utf-8'), 10);
|
|
23
|
-
|
|
24
23
|
const processInfo = await getProcessInfo(pid);
|
|
25
24
|
if (!silent) {
|
|
26
25
|
console.log('\n' + chalk.gray(`Process found: ${chalk.white(pid)} (${chalk.gray(processInfo)})`));
|
package/dist/server/database.js
CHANGED
|
@@ -515,8 +515,47 @@ class DatabaseManager {
|
|
|
515
515
|
if (result.changes > 0 && process.env.NODE_ENV === 'development') {
|
|
516
516
|
console.log(`[DB] Updated service ${id}: ${service.name} -> ${service.apiUrl}`);
|
|
517
517
|
}
|
|
518
|
+
// 如果更新成功,检查是否需要同步更新关联规则的超量限制
|
|
519
|
+
if (result.changes > 0) {
|
|
520
|
+
this.syncRulesWithServiceLimits(id, service);
|
|
521
|
+
}
|
|
518
522
|
return result.changes > 0;
|
|
519
523
|
}
|
|
524
|
+
/**
|
|
525
|
+
* 同步更新使用该服务的规则的超量限制
|
|
526
|
+
* 当API服务的超量限制修改时,自动更新所有使用该服务的规则
|
|
527
|
+
*/
|
|
528
|
+
syncRulesWithServiceLimits(serviceId, service) {
|
|
529
|
+
// 获取所有使用该服务的规则
|
|
530
|
+
const rules = this.db.prepare('SELECT id FROM rules WHERE target_service_id = ?').all(serviceId);
|
|
531
|
+
if (rules.length === 0) {
|
|
532
|
+
return; // 没有规则使用此服务,无需同步
|
|
533
|
+
}
|
|
534
|
+
const now = Date.now();
|
|
535
|
+
const ruleIds = rules.map(r => r.id);
|
|
536
|
+
// Token超量限制同步
|
|
537
|
+
if (service.enableTokenLimit !== undefined || service.tokenLimit !== undefined ||
|
|
538
|
+
service.tokenResetInterval !== undefined || service.tokenResetBaseTime !== undefined) {
|
|
539
|
+
// 获取当前服务的最新配置
|
|
540
|
+
const currentService = this.db.prepare('SELECT enable_token_limit, token_limit, token_reset_interval, token_reset_base_time FROM api_services WHERE id = ?').get(serviceId);
|
|
541
|
+
if (currentService && currentService.enable_token_limit === 1) {
|
|
542
|
+
// 启用了Token超量限制,同步到所有规则
|
|
543
|
+
this.db.prepare('UPDATE rules SET token_limit = ?, reset_interval = ?, token_reset_base_time = ?, updated_at = ? WHERE target_service_id = ?').run(currentService.token_limit, currentService.token_reset_interval, currentService.token_reset_base_time, now, serviceId);
|
|
544
|
+
console.log(`[DB] Synced token limits for ${ruleIds.length} rule(s) using service ${serviceId}`);
|
|
545
|
+
}
|
|
546
|
+
}
|
|
547
|
+
// 请求次数超量限制同步
|
|
548
|
+
if (service.enableRequestLimit !== undefined || service.requestCountLimit !== undefined ||
|
|
549
|
+
service.requestResetInterval !== undefined || service.requestResetBaseTime !== undefined) {
|
|
550
|
+
// 获取当前服务的最新配置
|
|
551
|
+
const currentService = this.db.prepare('SELECT enable_request_limit, request_count_limit, request_reset_interval, request_reset_base_time FROM api_services WHERE id = ?').get(serviceId);
|
|
552
|
+
if (currentService && currentService.enable_request_limit === 1) {
|
|
553
|
+
// 启用了请求次数超量限制,同步到所有规则
|
|
554
|
+
this.db.prepare('UPDATE rules SET request_count_limit = ?, request_reset_interval = ?, request_reset_base_time = ?, updated_at = ? WHERE target_service_id = ?').run(currentService.request_count_limit, currentService.request_reset_interval, currentService.request_reset_base_time, now, serviceId);
|
|
555
|
+
console.log(`[DB] Synced request count limits for ${ruleIds.length} rule(s) using service ${serviceId}`);
|
|
556
|
+
}
|
|
557
|
+
}
|
|
558
|
+
}
|
|
520
559
|
deleteAPIService(id) {
|
|
521
560
|
const result = this.db.prepare('DELETE FROM api_services WHERE id = ?').run(id);
|
|
522
561
|
return result.changes > 0;
|
package/dist/server/main.js
CHANGED
|
@@ -69,8 +69,8 @@ function getProxyAgent() {
|
|
|
69
69
|
}
|
|
70
70
|
const app = (0, express_1.default)();
|
|
71
71
|
app.use((0, cors_1.default)());
|
|
72
|
-
app.use(express_1.default.json({ limit: '
|
|
73
|
-
app.use(express_1.default.urlencoded({ extended: true }));
|
|
72
|
+
app.use(express_1.default.json({ limit: 'Infinity' }));
|
|
73
|
+
app.use(express_1.default.urlencoded({ extended: true, limit: 'Infinity' }));
|
|
74
74
|
const asyncHandler = (handler) => (req, res, next) => {
|
|
75
75
|
Promise.resolve(handler(req, res, next)).catch(next);
|
|
76
76
|
};
|
|
@@ -1391,6 +1391,16 @@ app.use((err, _req, res, _next) => {
|
|
|
1391
1391
|
console.error(err);
|
|
1392
1392
|
res.status(500).json({ error: err.message || 'Internal server error' });
|
|
1393
1393
|
});
|
|
1394
|
+
// 全局未捕获异常处理 - 防止服务崩溃
|
|
1395
|
+
process.on('uncaughtException', (error) => {
|
|
1396
|
+
console.error('[Uncaught Exception] 服务遇到未捕获的异常:', error);
|
|
1397
|
+
console.error('[Uncaught Exception] 堆栈信息:', error.stack);
|
|
1398
|
+
// 不退出进程,继续运行
|
|
1399
|
+
});
|
|
1400
|
+
process.on('unhandledRejection', (reason) => {
|
|
1401
|
+
console.error('[Unhandled Rejection] 服务遇到未处理的 Promise 拒绝:', reason);
|
|
1402
|
+
// 不退出进程,继续运行
|
|
1403
|
+
});
|
|
1394
1404
|
start().catch((error) => {
|
|
1395
1405
|
console.error('Failed to start server:', error);
|
|
1396
1406
|
process.exit(1);
|
|
@@ -643,8 +643,8 @@ class ProxyServer {
|
|
|
643
643
|
// 检查并重置到期的规则
|
|
644
644
|
this.dbManager.checkAndResetRuleIfNeeded(rule.id);
|
|
645
645
|
this.dbManager.checkAndResetRequestCountIfNeeded(rule.id);
|
|
646
|
-
// 检查token
|
|
647
|
-
if (rule.tokenLimit && rule.totalTokensUsed !== undefined && rule.totalTokensUsed >= rule.tokenLimit) {
|
|
646
|
+
// 检查token限制(tokenLimit单位是k,需要乘以1000转换为实际token数)
|
|
647
|
+
if (rule.tokenLimit && rule.totalTokensUsed !== undefined && rule.totalTokensUsed >= rule.tokenLimit * 1000) {
|
|
648
648
|
continue; // 跳过超限规则
|
|
649
649
|
}
|
|
650
650
|
// 检查请求次数限制
|
|
@@ -666,8 +666,8 @@ class ProxyServer {
|
|
|
666
666
|
// 检查并重置到期的规则
|
|
667
667
|
this.dbManager.checkAndResetRuleIfNeeded(rule.id);
|
|
668
668
|
this.dbManager.checkAndResetRequestCountIfNeeded(rule.id);
|
|
669
|
-
// 检查token
|
|
670
|
-
if (rule.tokenLimit && rule.totalTokensUsed !== undefined && rule.totalTokensUsed >= rule.tokenLimit) {
|
|
669
|
+
// 检查token限制(tokenLimit单位是k,需要乘以1000转换为实际token数)
|
|
670
|
+
if (rule.tokenLimit && rule.totalTokensUsed !== undefined && rule.totalTokensUsed >= rule.tokenLimit * 1000) {
|
|
671
671
|
continue; // 跳过超限规则
|
|
672
672
|
}
|
|
673
673
|
// 检查请求次数限制
|
|
@@ -687,8 +687,8 @@ class ProxyServer {
|
|
|
687
687
|
// 检查并重置到期的规则
|
|
688
688
|
this.dbManager.checkAndResetRuleIfNeeded(rule.id);
|
|
689
689
|
this.dbManager.checkAndResetRequestCountIfNeeded(rule.id);
|
|
690
|
-
// 检查token
|
|
691
|
-
if (rule.tokenLimit && rule.totalTokensUsed !== undefined && rule.totalTokensUsed >= rule.tokenLimit) {
|
|
690
|
+
// 检查token限制(tokenLimit单位是k,需要乘以1000转换为实际token数)
|
|
691
|
+
if (rule.tokenLimit && rule.totalTokensUsed !== undefined && rule.totalTokensUsed >= rule.tokenLimit * 1000) {
|
|
692
692
|
continue; // 跳过超限规则
|
|
693
693
|
}
|
|
694
694
|
// 检查请求次数限制
|
|
@@ -729,9 +729,9 @@ class ProxyServer {
|
|
|
729
729
|
// 5. 过滤掉超过限制的规则(仅在有多个候选规则时)
|
|
730
730
|
if (candidates.length > 1) {
|
|
731
731
|
const filteredCandidates = candidates.filter(rule => {
|
|
732
|
-
// 检查token
|
|
732
|
+
// 检查token限制(tokenLimit单位是k,需要乘以1000转换为实际token数)
|
|
733
733
|
if (rule.tokenLimit && rule.totalTokensUsed !== undefined) {
|
|
734
|
-
if (rule.totalTokensUsed >= rule.tokenLimit) {
|
|
734
|
+
if (rule.totalTokensUsed >= rule.tokenLimit * 1000) {
|
|
735
735
|
return false;
|
|
736
736
|
}
|
|
737
737
|
}
|
|
@@ -1535,11 +1535,49 @@ class ProxyServer {
|
|
|
1535
1535
|
streamChunksForLog = eventCollector.getChunks();
|
|
1536
1536
|
void finalizeLog(res.statusCode);
|
|
1537
1537
|
});
|
|
1538
|
-
|
|
1538
|
+
// 监听 res 的错误事件
|
|
1539
|
+
res.on('error', (err) => {
|
|
1540
|
+
console.error('[Proxy] Response stream error:', err);
|
|
1541
|
+
});
|
|
1542
|
+
(0, stream_1.pipeline)(response.data, parser, eventCollector, converter, serializer, res, (error) => __awaiter(this, void 0, void 0, function* () {
|
|
1539
1543
|
if (error) {
|
|
1540
|
-
|
|
1544
|
+
console.error('[Proxy] Pipeline error for claude-code:', error);
|
|
1545
|
+
// 记录到错误日志
|
|
1546
|
+
try {
|
|
1547
|
+
yield this.dbManager.addErrorLog({
|
|
1548
|
+
timestamp: Date.now(),
|
|
1549
|
+
method: req.method,
|
|
1550
|
+
path: req.path,
|
|
1551
|
+
statusCode: 500,
|
|
1552
|
+
errorMessage: error.message || 'Stream processing error',
|
|
1553
|
+
errorStack: error.stack,
|
|
1554
|
+
requestHeaders: this.normalizeHeaders(req.headers),
|
|
1555
|
+
requestBody: req.body ? JSON.stringify(req.body) : undefined,
|
|
1556
|
+
});
|
|
1557
|
+
}
|
|
1558
|
+
catch (logError) {
|
|
1559
|
+
console.error('[Proxy] Failed to log error:', logError);
|
|
1560
|
+
}
|
|
1561
|
+
// 尝试向客户端发送错误事件
|
|
1562
|
+
try {
|
|
1563
|
+
if (!res.writableEnded) {
|
|
1564
|
+
const errorEvent = `event: error\ndata: ${JSON.stringify({
|
|
1565
|
+
type: 'error',
|
|
1566
|
+
error: {
|
|
1567
|
+
type: 'api_error',
|
|
1568
|
+
message: 'Stream processing error occurred'
|
|
1569
|
+
}
|
|
1570
|
+
})}\n\n`;
|
|
1571
|
+
res.write(errorEvent);
|
|
1572
|
+
res.end();
|
|
1573
|
+
}
|
|
1574
|
+
}
|
|
1575
|
+
catch (writeError) {
|
|
1576
|
+
console.error('[Proxy] Failed to send error event:', writeError);
|
|
1577
|
+
}
|
|
1578
|
+
yield finalizeLog(500, error.message);
|
|
1541
1579
|
}
|
|
1542
|
-
});
|
|
1580
|
+
}));
|
|
1543
1581
|
return;
|
|
1544
1582
|
}
|
|
1545
1583
|
if (targetType === 'codex' && this.isClaudeSource(sourceType)) {
|
|
@@ -1566,17 +1604,55 @@ class ProxyServer {
|
|
|
1566
1604
|
streamChunksForLog = eventCollector.getChunks();
|
|
1567
1605
|
void finalizeLog(res.statusCode);
|
|
1568
1606
|
});
|
|
1569
|
-
|
|
1607
|
+
// 监听 res 的错误事件
|
|
1608
|
+
res.on('error', (err) => {
|
|
1609
|
+
console.error('[Proxy] Response stream error:', err);
|
|
1610
|
+
});
|
|
1611
|
+
(0, stream_1.pipeline)(response.data, parser, eventCollector, converter, serializer, res, (error) => __awaiter(this, void 0, void 0, function* () {
|
|
1570
1612
|
if (error) {
|
|
1571
|
-
|
|
1613
|
+
console.error('[Proxy] Pipeline error for codex:', error);
|
|
1614
|
+
// 记录到错误日志
|
|
1615
|
+
try {
|
|
1616
|
+
yield this.dbManager.addErrorLog({
|
|
1617
|
+
timestamp: Date.now(),
|
|
1618
|
+
method: req.method,
|
|
1619
|
+
path: req.path,
|
|
1620
|
+
statusCode: 500,
|
|
1621
|
+
errorMessage: error.message || 'Stream processing error',
|
|
1622
|
+
errorStack: error.stack,
|
|
1623
|
+
requestHeaders: this.normalizeHeaders(req.headers),
|
|
1624
|
+
requestBody: req.body ? JSON.stringify(req.body) : undefined,
|
|
1625
|
+
});
|
|
1626
|
+
}
|
|
1627
|
+
catch (logError) {
|
|
1628
|
+
console.error('[Proxy] Failed to log error:', logError);
|
|
1629
|
+
}
|
|
1630
|
+
// 尝试向客户端发送错误事件
|
|
1631
|
+
try {
|
|
1632
|
+
if (!res.writableEnded) {
|
|
1633
|
+
const errorEvent = `data: ${JSON.stringify({
|
|
1634
|
+
error: 'Stream processing error occurred'
|
|
1635
|
+
})}\n\n`;
|
|
1636
|
+
res.write(errorEvent);
|
|
1637
|
+
res.end();
|
|
1638
|
+
}
|
|
1639
|
+
}
|
|
1640
|
+
catch (writeError) {
|
|
1641
|
+
console.error('[Proxy] Failed to send error event:', writeError);
|
|
1642
|
+
}
|
|
1643
|
+
yield finalizeLog(500, error.message);
|
|
1572
1644
|
}
|
|
1573
|
-
});
|
|
1645
|
+
}));
|
|
1574
1646
|
return;
|
|
1575
1647
|
}
|
|
1576
1648
|
// 默认stream处理(无转换)
|
|
1577
1649
|
const eventCollector = new chunk_collector_1.SSEEventCollectorTransform();
|
|
1578
1650
|
responseHeadersForLog = this.normalizeResponseHeaders(responseHeaders);
|
|
1579
1651
|
this.copyResponseHeaders(responseHeaders, res);
|
|
1652
|
+
// 监听 res 的错误事件
|
|
1653
|
+
res.on('error', (err) => {
|
|
1654
|
+
console.error('[Proxy] Response stream error:', err);
|
|
1655
|
+
});
|
|
1580
1656
|
res.on('finish', () => {
|
|
1581
1657
|
streamChunksForLog = eventCollector.getChunks();
|
|
1582
1658
|
// 尝试从event collector中提取usage信息
|
|
@@ -1586,11 +1662,28 @@ class ProxyServer {
|
|
|
1586
1662
|
}
|
|
1587
1663
|
void finalizeLog(res.statusCode);
|
|
1588
1664
|
});
|
|
1589
|
-
(0, stream_1.pipeline)(response.data, eventCollector, res, (error) => {
|
|
1665
|
+
(0, stream_1.pipeline)(response.data, eventCollector, res, (error) => __awaiter(this, void 0, void 0, function* () {
|
|
1590
1666
|
if (error) {
|
|
1591
|
-
|
|
1667
|
+
console.error('[Proxy] Pipeline error (default stream):', error);
|
|
1668
|
+
// 记录到错误日志
|
|
1669
|
+
try {
|
|
1670
|
+
yield this.dbManager.addErrorLog({
|
|
1671
|
+
timestamp: Date.now(),
|
|
1672
|
+
method: req.method,
|
|
1673
|
+
path: req.path,
|
|
1674
|
+
statusCode: 500,
|
|
1675
|
+
errorMessage: error.message || 'Stream processing error',
|
|
1676
|
+
errorStack: error.stack,
|
|
1677
|
+
requestHeaders: this.normalizeHeaders(req.headers),
|
|
1678
|
+
requestBody: req.body ? JSON.stringify(req.body) : undefined,
|
|
1679
|
+
});
|
|
1680
|
+
}
|
|
1681
|
+
catch (logError) {
|
|
1682
|
+
console.error('[Proxy] Failed to log error:', logError);
|
|
1683
|
+
}
|
|
1684
|
+
yield finalizeLog(500, error.message);
|
|
1592
1685
|
}
|
|
1593
|
-
});
|
|
1686
|
+
}));
|
|
1594
1687
|
return;
|
|
1595
1688
|
}
|
|
1596
1689
|
let responseData = response.data;
|
|
@@ -8,20 +8,45 @@ const stream_1 = require("stream");
|
|
|
8
8
|
*/
|
|
9
9
|
class ChunkCollectorTransform extends stream_1.Transform {
|
|
10
10
|
constructor() {
|
|
11
|
-
super();
|
|
11
|
+
super({ writableObjectMode: true, readableObjectMode: true });
|
|
12
12
|
Object.defineProperty(this, "chunks", {
|
|
13
13
|
enumerable: true,
|
|
14
14
|
configurable: true,
|
|
15
15
|
writable: true,
|
|
16
16
|
value: []
|
|
17
17
|
});
|
|
18
|
+
Object.defineProperty(this, "errorEmitted", {
|
|
19
|
+
enumerable: true,
|
|
20
|
+
configurable: true,
|
|
21
|
+
writable: true,
|
|
22
|
+
value: false
|
|
23
|
+
});
|
|
24
|
+
this.on('error', (err) => {
|
|
25
|
+
console.error('[ChunkCollectorTransform] Stream error:', err);
|
|
26
|
+
this.errorEmitted = true;
|
|
27
|
+
});
|
|
18
28
|
}
|
|
19
29
|
_transform(chunk, _encoding, callback) {
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
30
|
+
if (this.errorEmitted) {
|
|
31
|
+
callback();
|
|
32
|
+
return;
|
|
33
|
+
}
|
|
34
|
+
try {
|
|
35
|
+
// 收集chunk数据 - 支持对象和Buffer/string
|
|
36
|
+
if (typeof chunk === 'object' && chunk !== null && !Buffer.isBuffer(chunk)) {
|
|
37
|
+
this.chunks.push(JSON.stringify(chunk));
|
|
38
|
+
}
|
|
39
|
+
else {
|
|
40
|
+
this.chunks.push(chunk.toString('utf8'));
|
|
41
|
+
}
|
|
42
|
+
// 将chunk传递给下一个stream
|
|
43
|
+
this.push(chunk);
|
|
44
|
+
callback();
|
|
45
|
+
}
|
|
46
|
+
catch (error) {
|
|
47
|
+
console.error('[ChunkCollectorTransform] Error in _transform:', error);
|
|
48
|
+
callback();
|
|
49
|
+
}
|
|
25
50
|
}
|
|
26
51
|
/**
|
|
27
52
|
* 获取收集的所有chunks
|
|
@@ -44,7 +69,7 @@ exports.ChunkCollectorTransform = ChunkCollectorTransform;
|
|
|
44
69
|
*/
|
|
45
70
|
class SSEEventCollectorTransform extends stream_1.Transform {
|
|
46
71
|
constructor() {
|
|
47
|
-
super();
|
|
72
|
+
super({ writableObjectMode: true, readableObjectMode: true });
|
|
48
73
|
Object.defineProperty(this, "buffer", {
|
|
49
74
|
enumerable: true,
|
|
50
75
|
configurable: true,
|
|
@@ -66,22 +91,82 @@ class SSEEventCollectorTransform extends stream_1.Transform {
|
|
|
66
91
|
writable: true,
|
|
67
92
|
value: []
|
|
68
93
|
});
|
|
94
|
+
Object.defineProperty(this, "errorEmitted", {
|
|
95
|
+
enumerable: true,
|
|
96
|
+
configurable: true,
|
|
97
|
+
writable: true,
|
|
98
|
+
value: false
|
|
99
|
+
});
|
|
100
|
+
this.on('error', (err) => {
|
|
101
|
+
console.error('[SSEEventCollectorTransform] Stream error:', err);
|
|
102
|
+
this.errorEmitted = true;
|
|
103
|
+
});
|
|
69
104
|
}
|
|
70
105
|
_transform(chunk, _encoding, callback) {
|
|
71
|
-
this.
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
106
|
+
if (this.errorEmitted) {
|
|
107
|
+
callback();
|
|
108
|
+
return;
|
|
109
|
+
}
|
|
110
|
+
try {
|
|
111
|
+
// 如果是对象(来自 SSEParserTransform),先转换为字符串格式进行处理
|
|
112
|
+
if (typeof chunk === 'object' && chunk !== null) {
|
|
113
|
+
const sseEvent = chunk;
|
|
114
|
+
const lines = [];
|
|
115
|
+
if (sseEvent.event)
|
|
116
|
+
lines.push(`event: ${sseEvent.event}`);
|
|
117
|
+
if (sseEvent.id)
|
|
118
|
+
lines.push(`id: ${sseEvent.id}`);
|
|
119
|
+
if (sseEvent.data !== undefined) {
|
|
120
|
+
if (typeof sseEvent.data === 'string') {
|
|
121
|
+
lines.push(`data: ${sseEvent.data}`);
|
|
122
|
+
}
|
|
123
|
+
else {
|
|
124
|
+
lines.push(`data: ${JSON.stringify(sseEvent.data)}`);
|
|
125
|
+
}
|
|
126
|
+
}
|
|
127
|
+
if (lines.length > 0) {
|
|
128
|
+
this.currentEvent.rawLines.push(...lines);
|
|
129
|
+
if (sseEvent.event)
|
|
130
|
+
this.currentEvent.event = sseEvent.event;
|
|
131
|
+
if (sseEvent.id)
|
|
132
|
+
this.currentEvent.id = sseEvent.id;
|
|
133
|
+
if (sseEvent.data !== undefined) {
|
|
134
|
+
const dataStr = typeof sseEvent.data === 'string' ? sseEvent.data : JSON.stringify(sseEvent.data);
|
|
135
|
+
this.currentEvent.dataLines.push(dataStr);
|
|
136
|
+
}
|
|
137
|
+
this.flushEvent();
|
|
138
|
+
}
|
|
139
|
+
// 将原始对象传递给下一个stream
|
|
140
|
+
this.push(chunk);
|
|
141
|
+
}
|
|
142
|
+
else {
|
|
143
|
+
// Buffer/string 模式
|
|
144
|
+
this.buffer += chunk.toString('utf8');
|
|
145
|
+
this.processBuffer();
|
|
146
|
+
// 将chunk传递给下一个stream
|
|
147
|
+
this.push(chunk);
|
|
148
|
+
}
|
|
149
|
+
callback();
|
|
150
|
+
}
|
|
151
|
+
catch (error) {
|
|
152
|
+
console.error('[SSEEventCollectorTransform] Error in _transform:', error);
|
|
153
|
+
callback();
|
|
154
|
+
}
|
|
76
155
|
}
|
|
77
156
|
_flush(callback) {
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
this.
|
|
157
|
+
try {
|
|
158
|
+
// 处理剩余的buffer
|
|
159
|
+
if (this.buffer.trim()) {
|
|
160
|
+
this.processBuffer();
|
|
161
|
+
}
|
|
162
|
+
// 刷新最后一个事件
|
|
163
|
+
this.flushEvent();
|
|
164
|
+
callback();
|
|
165
|
+
}
|
|
166
|
+
catch (error) {
|
|
167
|
+
console.error('[SSEEventCollectorTransform] Error in _flush:', error);
|
|
168
|
+
callback();
|
|
81
169
|
}
|
|
82
|
-
// 刷新最后一个事件
|
|
83
|
-
this.flushEvent();
|
|
84
|
-
callback();
|
|
85
170
|
}
|
|
86
171
|
processBuffer() {
|
|
87
172
|
const lines = this.buffer.split('\n');
|