@ynhcj/xiaoyi 0.0.1-beta
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +207 -0
- package/dist/auth.d.ts +36 -0
- package/dist/auth.js +111 -0
- package/dist/channel.d.ts +189 -0
- package/dist/channel.js +354 -0
- package/dist/config-schema.d.ts +46 -0
- package/dist/config-schema.js +28 -0
- package/dist/file-download.d.ts +17 -0
- package/dist/file-download.js +69 -0
- package/dist/file-handler.d.ts +36 -0
- package/dist/file-handler.js +113 -0
- package/dist/index.d.ts +29 -0
- package/dist/index.js +49 -0
- package/dist/onboarding.d.ts +6 -0
- package/dist/onboarding.js +167 -0
- package/dist/push.d.ts +28 -0
- package/dist/push.js +135 -0
- package/dist/runtime.d.ts +191 -0
- package/dist/runtime.js +438 -0
- package/dist/types.d.ts +280 -0
- package/dist/types.js +8 -0
- package/dist/websocket.d.ts +219 -0
- package/dist/websocket.js +1068 -0
- package/dist/xiaoyi-media.d.ts +81 -0
- package/dist/xiaoyi-media.js +216 -0
- package/dist/xy-bot.d.ts +19 -0
- package/dist/xy-bot.js +277 -0
- package/dist/xy-client.d.ts +26 -0
- package/dist/xy-client.js +78 -0
- package/dist/xy-config.d.ts +18 -0
- package/dist/xy-config.js +37 -0
- package/dist/xy-formatter.d.ts +94 -0
- package/dist/xy-formatter.js +303 -0
- package/dist/xy-monitor.d.ts +17 -0
- package/dist/xy-monitor.js +194 -0
- package/dist/xy-parser.d.ts +49 -0
- package/dist/xy-parser.js +109 -0
- package/dist/xy-reply-dispatcher.d.ts +17 -0
- package/dist/xy-reply-dispatcher.js +308 -0
- package/dist/xy-tools/session-manager.d.ts +29 -0
- package/dist/xy-tools/session-manager.js +80 -0
- package/dist/xy-utils/config-manager.d.ts +26 -0
- package/dist/xy-utils/config-manager.js +61 -0
- package/dist/xy-utils/crypto.d.ts +8 -0
- package/dist/xy-utils/crypto.js +21 -0
- package/dist/xy-utils/logger.d.ts +6 -0
- package/dist/xy-utils/logger.js +37 -0
- package/dist/xy-utils/session.d.ts +34 -0
- package/dist/xy-utils/session.js +55 -0
- package/openclaw.plugin.json +9 -0
- package/package.json +73 -0
- package/xiaoyi.js +1 -0
|
@@ -0,0 +1,194 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.monitorXYProvider = monitorXYProvider;
|
|
4
|
+
const xy_config_js_1 = require("./xy-config.js");
|
|
5
|
+
const xy_client_js_1 = require("./xy-client.js");
|
|
6
|
+
const xy_bot_js_1 = require("./xy-bot.js");
|
|
7
|
+
/**
|
|
8
|
+
* Per-session serial queue that ensures messages from the same session are processed
|
|
9
|
+
* in arrival order while allowing different sessions to run concurrently.
|
|
10
|
+
* Following feishu/monitor.account.ts pattern.
|
|
11
|
+
*/
|
|
12
|
+
function createSessionQueue() {
|
|
13
|
+
const queues = new Map();
|
|
14
|
+
return (sessionId, task) => {
|
|
15
|
+
const prev = queues.get(sessionId) ?? Promise.resolve();
|
|
16
|
+
const next = prev.then(task, task);
|
|
17
|
+
queues.set(sessionId, next);
|
|
18
|
+
void next.finally(() => {
|
|
19
|
+
if (queues.get(sessionId) === next) {
|
|
20
|
+
queues.delete(sessionId);
|
|
21
|
+
}
|
|
22
|
+
});
|
|
23
|
+
return next;
|
|
24
|
+
};
|
|
25
|
+
}
|
|
26
|
+
/**
|
|
27
|
+
* Monitor XY channel WebSocket connections.
|
|
28
|
+
* Keeps the connection alive until abortSignal is triggered.
|
|
29
|
+
*/
|
|
30
|
+
async function monitorXYProvider(opts = {}) {
|
|
31
|
+
const cfg = opts.config;
|
|
32
|
+
if (!cfg) {
|
|
33
|
+
throw new Error("Config is required for XY monitor");
|
|
34
|
+
}
|
|
35
|
+
const runtime = opts.runtime;
|
|
36
|
+
const log = runtime?.log ?? console.log;
|
|
37
|
+
const error = runtime?.error ?? console.error;
|
|
38
|
+
const account = (0, xy_config_js_1.resolveXYConfig)(cfg);
|
|
39
|
+
if (!account.enabled) {
|
|
40
|
+
throw new Error(`XY account is disabled`);
|
|
41
|
+
}
|
|
42
|
+
const accountId = opts.accountId ?? "default";
|
|
43
|
+
// Create trackEvent function to report health to OpenClaw framework
|
|
44
|
+
const trackEvent = opts.setStatus
|
|
45
|
+
? () => {
|
|
46
|
+
opts.setStatus({ lastEventAt: Date.now(), lastInboundAt: Date.now() });
|
|
47
|
+
}
|
|
48
|
+
: undefined;
|
|
49
|
+
// 🔍 Diagnose WebSocket managers before gateway start
|
|
50
|
+
// console.log("🔍 [DIAGNOSTICS] Checking WebSocket managers before gateway start...");
|
|
51
|
+
// diagnoseAllManagers();
|
|
52
|
+
// Get WebSocket manager (cached)
|
|
53
|
+
const wsManager = (0, xy_client_js_1.getXYWebSocketManager)(account);
|
|
54
|
+
// // ✅ Set health event callback for heartbeat reporting
|
|
55
|
+
// if (trackEvent) {
|
|
56
|
+
// wsManager.setHealthEventCallback(trackEvent);
|
|
57
|
+
// }
|
|
58
|
+
// Track logged servers to avoid duplicate logs
|
|
59
|
+
const loggedServers = new Set();
|
|
60
|
+
// Track active message processing to detect duplicates
|
|
61
|
+
const activeMessages = new Set();
|
|
62
|
+
// Create session queue for ordered message processing
|
|
63
|
+
const enqueue = createSessionQueue();
|
|
64
|
+
// Health check interval
|
|
65
|
+
let healthCheckInterval = null;
|
|
66
|
+
return new Promise((resolve, reject) => {
|
|
67
|
+
// Event handlers (defined early so they can be referenced in cleanup)
|
|
68
|
+
const messageHandler = (message, sessionId, serverId) => {
|
|
69
|
+
const messageKey = `${sessionId}::${message.id}`;
|
|
70
|
+
log(`[MONITOR-HANDLER] ####### messageHandler triggered: serverId=${serverId}, sessionId=${sessionId}, messageId=${message.id} #######`);
|
|
71
|
+
// ✅ Report health: received a message
|
|
72
|
+
trackEvent?.();
|
|
73
|
+
// Check for duplicate message handling
|
|
74
|
+
if (activeMessages.has(messageKey)) {
|
|
75
|
+
error(`[MONITOR-HANDLER] ⚠️ WARNING: Duplicate message detected! messageKey=${messageKey}, this may cause duplicate dispatchers!`);
|
|
76
|
+
}
|
|
77
|
+
activeMessages.add(messageKey);
|
|
78
|
+
log(`[MONITOR-HANDLER] 📝 Active messages count: ${activeMessages.size}, messageKey: ${messageKey}`);
|
|
79
|
+
const task = async () => {
|
|
80
|
+
try {
|
|
81
|
+
log(`[MONITOR-HANDLER] 🚀 Starting handleXYMessage for messageKey=${messageKey}`);
|
|
82
|
+
await (0, xy_bot_js_1.handleXYMessage)({
|
|
83
|
+
cfg,
|
|
84
|
+
runtime,
|
|
85
|
+
message,
|
|
86
|
+
accountId, // ✅ Pass accountId ("default")
|
|
87
|
+
});
|
|
88
|
+
log(`[MONITOR-HANDLER] ✅ Completed handleXYMessage for messageKey=${messageKey}`);
|
|
89
|
+
}
|
|
90
|
+
catch (err) {
|
|
91
|
+
// ✅ Only log error, don't re-throw to prevent gateway restart
|
|
92
|
+
error(`XY gateway: error handling message from ${serverId}: ${String(err)}`);
|
|
93
|
+
}
|
|
94
|
+
finally {
|
|
95
|
+
// Remove from active messages when done
|
|
96
|
+
activeMessages.delete(messageKey);
|
|
97
|
+
log(`[MONITOR-HANDLER] 🧹 Cleaned up messageKey=${messageKey}, remaining active: ${activeMessages.size}`);
|
|
98
|
+
}
|
|
99
|
+
};
|
|
100
|
+
void enqueue(sessionId, task).catch((err) => {
|
|
101
|
+
// Error already logged in task, this is for queue failures
|
|
102
|
+
error(`XY gateway: queue processing failed for session ${sessionId}: ${String(err)}`);
|
|
103
|
+
activeMessages.delete(messageKey);
|
|
104
|
+
});
|
|
105
|
+
};
|
|
106
|
+
const connectedHandler = (serverId) => {
|
|
107
|
+
if (!loggedServers.has(serverId)) {
|
|
108
|
+
log(`XY gateway: ${serverId} connected`);
|
|
109
|
+
loggedServers.add(serverId);
|
|
110
|
+
}
|
|
111
|
+
// ✅ Report health: connection established
|
|
112
|
+
trackEvent?.();
|
|
113
|
+
opts.setStatus?.({ connected: true });
|
|
114
|
+
};
|
|
115
|
+
const disconnectedHandler = (serverId) => {
|
|
116
|
+
console.warn(`XY gateway: ${serverId} disconnected`);
|
|
117
|
+
loggedServers.delete(serverId);
|
|
118
|
+
// ✅ Report disconnection status (only if all servers disconnected)
|
|
119
|
+
if (loggedServers.size === 0) {
|
|
120
|
+
opts.setStatus?.({ connected: false });
|
|
121
|
+
}
|
|
122
|
+
};
|
|
123
|
+
const errorHandler = (err, serverId) => {
|
|
124
|
+
error(`XY gateway: ${serverId} error: ${String(err)}`);
|
|
125
|
+
};
|
|
126
|
+
const cleanup = () => {
|
|
127
|
+
log("XY gateway: cleaning up...");
|
|
128
|
+
// // 🔍 Diagnose before cleanup
|
|
129
|
+
// console.log("🔍 [DIAGNOSTICS] Checking WebSocket managers before cleanup...");
|
|
130
|
+
// diagnoseAllManagers();
|
|
131
|
+
// Stop health check interval
|
|
132
|
+
if (healthCheckInterval) {
|
|
133
|
+
clearInterval(healthCheckInterval);
|
|
134
|
+
healthCheckInterval = null;
|
|
135
|
+
console.log("⏸️ Stopped periodic health check");
|
|
136
|
+
}
|
|
137
|
+
// Remove event handlers to prevent duplicate calls on gateway restart
|
|
138
|
+
wsManager.off("message", messageHandler);
|
|
139
|
+
wsManager.off("connected", connectedHandler);
|
|
140
|
+
wsManager.off("disconnected", disconnectedHandler);
|
|
141
|
+
wsManager.off("error", errorHandler);
|
|
142
|
+
// ✅ Disconnect the wsManager to prevent connection leaks
|
|
143
|
+
// This is safe because each gateway lifecycle should have clean connections
|
|
144
|
+
wsManager.disconnect();
|
|
145
|
+
// ✅ Remove manager from cache to prevent reusing dirty state
|
|
146
|
+
(0, xy_client_js_1.removeXYWebSocketManager)(account);
|
|
147
|
+
loggedServers.clear();
|
|
148
|
+
activeMessages.clear();
|
|
149
|
+
log(`[MONITOR-HANDLER] 🧹 Cleanup complete, cleared active messages`);
|
|
150
|
+
// // 🔍 Diagnose after cleanup
|
|
151
|
+
// console.log("🔍 [DIAGNOSTICS] Checking WebSocket managers after cleanup...");
|
|
152
|
+
// diagnoseAllManagers();
|
|
153
|
+
};
|
|
154
|
+
const handleAbort = () => {
|
|
155
|
+
log("XY gateway: abort signal received, stopping");
|
|
156
|
+
cleanup();
|
|
157
|
+
log("XY gateway stopped");
|
|
158
|
+
resolve();
|
|
159
|
+
};
|
|
160
|
+
if (opts.abortSignal?.aborted) {
|
|
161
|
+
cleanup();
|
|
162
|
+
resolve();
|
|
163
|
+
return;
|
|
164
|
+
}
|
|
165
|
+
opts.abortSignal?.addEventListener("abort", handleAbort, { once: true });
|
|
166
|
+
// Register event handlers (handlers are defined above in cleanup scope)
|
|
167
|
+
wsManager.on("message", messageHandler);
|
|
168
|
+
wsManager.on("connected", connectedHandler);
|
|
169
|
+
wsManager.on("disconnected", disconnectedHandler);
|
|
170
|
+
wsManager.on("error", errorHandler);
|
|
171
|
+
// Start periodic health check (every 5 minutes)
|
|
172
|
+
console.log("🏥 Starting periodic health check (every 5 minutes)...");
|
|
173
|
+
healthCheckInterval = setInterval(() => {
|
|
174
|
+
console.log("🏥 [HEALTH CHECK] Periodic WebSocket diagnostics...");
|
|
175
|
+
// diagnoseAllManagers();
|
|
176
|
+
// // Auto-cleanup orphan connections
|
|
177
|
+
// const cleaned = cleanupOrphanConnections();
|
|
178
|
+
// if (cleaned > 0) {
|
|
179
|
+
// console.log(`🧹 [HEALTH CHECK] Auto-cleaned ${cleaned} manager(s) with orphan connections`);
|
|
180
|
+
// }
|
|
181
|
+
}, 5 * 60 * 1000); // 5 minutes
|
|
182
|
+
// Connect to WebSocket servers
|
|
183
|
+
wsManager.connect()
|
|
184
|
+
.then(() => {
|
|
185
|
+
log("XY gateway: started successfully");
|
|
186
|
+
})
|
|
187
|
+
.catch((err) => {
|
|
188
|
+
// Connection failed but don't reject - continue monitoring for reconnection
|
|
189
|
+
error(`XY gateway: initial connection failed: ${String(err)}`);
|
|
190
|
+
// Still resolve successfully so plugin starts
|
|
191
|
+
resolve();
|
|
192
|
+
});
|
|
193
|
+
});
|
|
194
|
+
}
|
|
@@ -0,0 +1,49 @@
|
|
|
1
|
+
import type { A2AJsonRpcRequest, A2AMessagePart, A2ADataEvent } from "./types.js";
|
|
2
|
+
/**
|
|
3
|
+
* Parsed message information extracted from A2A request.
|
|
4
|
+
* Note: agentId is not extracted from message - it should come from config.
|
|
5
|
+
*/
|
|
6
|
+
export interface ParsedA2AMessage {
|
|
7
|
+
sessionId: string;
|
|
8
|
+
taskId: string;
|
|
9
|
+
messageId: string;
|
|
10
|
+
parts: A2AMessagePart[];
|
|
11
|
+
method: string;
|
|
12
|
+
}
|
|
13
|
+
/**
|
|
14
|
+
* Parse an A2A JSON-RPC request into structured message data.
|
|
15
|
+
*/
|
|
16
|
+
export declare function parseA2AMessage(request: A2AJsonRpcRequest): ParsedA2AMessage;
|
|
17
|
+
/**
|
|
18
|
+
* Extract text content from message parts.
|
|
19
|
+
*/
|
|
20
|
+
export declare function extractTextFromParts(parts: A2AMessagePart[]): string;
|
|
21
|
+
/**
|
|
22
|
+
* Extract file parts from message parts.
|
|
23
|
+
*/
|
|
24
|
+
export declare function extractFileParts(parts: A2AMessagePart[]): Array<{
|
|
25
|
+
name: string;
|
|
26
|
+
mimeType: string;
|
|
27
|
+
uri: string;
|
|
28
|
+
}>;
|
|
29
|
+
/**
|
|
30
|
+
* Extract data events from message parts (for tool responses).
|
|
31
|
+
*/
|
|
32
|
+
export declare function extractDataEvents(parts: A2AMessagePart[]): A2ADataEvent[];
|
|
33
|
+
/**
|
|
34
|
+
* Check if message is a clearContext request.
|
|
35
|
+
*/
|
|
36
|
+
export declare function isClearContextMessage(method: string): boolean;
|
|
37
|
+
/**
|
|
38
|
+
* Check if message is a tasks/cancel request.
|
|
39
|
+
*/
|
|
40
|
+
export declare function isTasksCancelMessage(method: string): boolean;
|
|
41
|
+
/**
|
|
42
|
+
* Extract push_id from message parts.
|
|
43
|
+
* Looks for push_id in data parts under variables.systemVariables.push_id
|
|
44
|
+
*/
|
|
45
|
+
export declare function extractPushId(parts: A2AMessagePart[]): string | null;
|
|
46
|
+
/**
|
|
47
|
+
* Validate A2A request structure.
|
|
48
|
+
*/
|
|
49
|
+
export declare function validateA2ARequest(request: any): request is A2AJsonRpcRequest;
|
|
@@ -0,0 +1,109 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.parseA2AMessage = parseA2AMessage;
|
|
4
|
+
exports.extractTextFromParts = extractTextFromParts;
|
|
5
|
+
exports.extractFileParts = extractFileParts;
|
|
6
|
+
exports.extractDataEvents = extractDataEvents;
|
|
7
|
+
exports.isClearContextMessage = isClearContextMessage;
|
|
8
|
+
exports.isTasksCancelMessage = isTasksCancelMessage;
|
|
9
|
+
exports.extractPushId = extractPushId;
|
|
10
|
+
exports.validateA2ARequest = validateA2ARequest;
|
|
11
|
+
const logger_js_1 = require("./xy-utils/logger.js");
|
|
12
|
+
/**
|
|
13
|
+
* Parse an A2A JSON-RPC request into structured message data.
|
|
14
|
+
*/
|
|
15
|
+
function parseA2AMessage(request) {
|
|
16
|
+
const { method, params, id } = request;
|
|
17
|
+
if (!params) {
|
|
18
|
+
throw new Error("A2A request missing params");
|
|
19
|
+
}
|
|
20
|
+
const { sessionId, message, id: paramsId } = params;
|
|
21
|
+
if (!sessionId || !message) {
|
|
22
|
+
throw new Error("A2A request params missing required fields");
|
|
23
|
+
}
|
|
24
|
+
return {
|
|
25
|
+
sessionId,
|
|
26
|
+
taskId: paramsId, // Task ID from params (对话唯一标识)
|
|
27
|
+
messageId: id, // Global unique message sequence ID from top-level request
|
|
28
|
+
parts: message.parts || [],
|
|
29
|
+
method,
|
|
30
|
+
};
|
|
31
|
+
}
|
|
32
|
+
/**
|
|
33
|
+
* Extract text content from message parts.
|
|
34
|
+
*/
|
|
35
|
+
function extractTextFromParts(parts) {
|
|
36
|
+
const textParts = parts
|
|
37
|
+
.filter((part) => part.kind === "text")
|
|
38
|
+
.map((part) => part.text);
|
|
39
|
+
return textParts.join("\n").trim();
|
|
40
|
+
}
|
|
41
|
+
/**
|
|
42
|
+
* Extract file parts from message parts.
|
|
43
|
+
*/
|
|
44
|
+
function extractFileParts(parts) {
|
|
45
|
+
return parts
|
|
46
|
+
.filter((part) => part.kind === "file")
|
|
47
|
+
.map((part) => part.file);
|
|
48
|
+
}
|
|
49
|
+
/**
|
|
50
|
+
* Extract data events from message parts (for tool responses).
|
|
51
|
+
*/
|
|
52
|
+
function extractDataEvents(parts) {
|
|
53
|
+
return parts
|
|
54
|
+
.filter((part) => part.kind === "data")
|
|
55
|
+
.map((part) => part.data.event)
|
|
56
|
+
.filter((event) => event !== undefined);
|
|
57
|
+
}
|
|
58
|
+
/**
|
|
59
|
+
* Check if message is a clearContext request.
|
|
60
|
+
*/
|
|
61
|
+
function isClearContextMessage(method) {
|
|
62
|
+
return method === "clearContext" || method === "clear_context";
|
|
63
|
+
}
|
|
64
|
+
/**
|
|
65
|
+
* Check if message is a tasks/cancel request.
|
|
66
|
+
*/
|
|
67
|
+
function isTasksCancelMessage(method) {
|
|
68
|
+
return method === "tasks/cancel" || method === "tasks_cancel";
|
|
69
|
+
}
|
|
70
|
+
/**
|
|
71
|
+
* Extract push_id from message parts.
|
|
72
|
+
* Looks for push_id in data parts under variables.systemVariables.push_id
|
|
73
|
+
*/
|
|
74
|
+
function extractPushId(parts) {
|
|
75
|
+
for (const part of parts) {
|
|
76
|
+
if (part.kind === "data" && part.data) {
|
|
77
|
+
const pushId = part.data.variables?.systemVariables?.push_id;
|
|
78
|
+
if (pushId && typeof pushId === "string") {
|
|
79
|
+
return pushId;
|
|
80
|
+
}
|
|
81
|
+
}
|
|
82
|
+
}
|
|
83
|
+
return null;
|
|
84
|
+
}
|
|
85
|
+
/**
|
|
86
|
+
* Validate A2A request structure.
|
|
87
|
+
*/
|
|
88
|
+
function validateA2ARequest(request) {
|
|
89
|
+
if (!request || typeof request !== "object") {
|
|
90
|
+
return false;
|
|
91
|
+
}
|
|
92
|
+
if (request.jsonrpc !== "2.0") {
|
|
93
|
+
logger_js_1.logger.warn("Invalid JSON-RPC version:", request.jsonrpc);
|
|
94
|
+
return false;
|
|
95
|
+
}
|
|
96
|
+
if (!request.method || typeof request.method !== "string") {
|
|
97
|
+
logger_js_1.logger.warn("Missing or invalid method");
|
|
98
|
+
return false;
|
|
99
|
+
}
|
|
100
|
+
if (!request.id) {
|
|
101
|
+
logger_js_1.logger.warn("Missing request id");
|
|
102
|
+
return false;
|
|
103
|
+
}
|
|
104
|
+
if (!request.params || typeof request.params !== "object") {
|
|
105
|
+
logger_js_1.logger.warn("Missing or invalid params");
|
|
106
|
+
return false;
|
|
107
|
+
}
|
|
108
|
+
return true;
|
|
109
|
+
}
|
|
@@ -0,0 +1,17 @@
|
|
|
1
|
+
import type { OpenClawConfig, RuntimeEnv } from "openclaw/dist/plugin-sdk/index.js";
|
|
2
|
+
type ClawdbotConfig = OpenClawConfig;
|
|
3
|
+
export interface CreateXYReplyDispatcherParams {
|
|
4
|
+
cfg: ClawdbotConfig;
|
|
5
|
+
runtime: RuntimeEnv;
|
|
6
|
+
sessionId: string;
|
|
7
|
+
taskId: string;
|
|
8
|
+
messageId: string;
|
|
9
|
+
accountId: string;
|
|
10
|
+
}
|
|
11
|
+
/**
|
|
12
|
+
* Create a reply dispatcher for XY channel messages.
|
|
13
|
+
* Follows feishu pattern with status updates and streaming support.
|
|
14
|
+
* Runtime is expected to be validated before calling this function.
|
|
15
|
+
*/
|
|
16
|
+
export declare function createXYReplyDispatcher(params: CreateXYReplyDispatcherParams): any;
|
|
17
|
+
export {};
|
|
@@ -0,0 +1,308 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.createXYReplyDispatcher = createXYReplyDispatcher;
|
|
4
|
+
const runtime_js_1 = require("./runtime.js");
|
|
5
|
+
const xy_formatter_js_1 = require("./xy-formatter.js");
|
|
6
|
+
const xy_config_js_1 = require("./xy-config.js");
|
|
7
|
+
/**
|
|
8
|
+
* Create a reply dispatcher for XY channel messages.
|
|
9
|
+
* Follows feishu pattern with status updates and streaming support.
|
|
10
|
+
* Runtime is expected to be validated before calling this function.
|
|
11
|
+
*/
|
|
12
|
+
function createXYReplyDispatcher(params) {
|
|
13
|
+
const { cfg, runtime, sessionId, taskId, messageId, accountId } = params;
|
|
14
|
+
const log = runtime?.log ?? console.log;
|
|
15
|
+
const error = runtime?.error ?? console.error;
|
|
16
|
+
log(`[DISPATCHER-CREATE] ******* Creating dispatcher for session=${sessionId}, taskId=${taskId}, messageId=${messageId} *******`);
|
|
17
|
+
log(`[DISPATCHER-CREATE] Stack trace:`, new Error().stack?.split('\n').slice(1, 4).join('\n'));
|
|
18
|
+
log(`[DISPATCHER-CREATE] ======== Creating reply dispatcher ========`);
|
|
19
|
+
log(`[DISPATCHER-CREATE] sessionId: ${sessionId}, taskId: ${taskId}, messageId: ${messageId}`);
|
|
20
|
+
log(`[DISPATCHER-CREATE] Stack trace:`, new Error().stack?.split('\n').slice(1, 4).join('\n'));
|
|
21
|
+
// Get OpenClaw PluginRuntime (not XiaoYiRuntime)
|
|
22
|
+
const xiaoYiRuntime = (0, runtime_js_1.getXiaoYiRuntime)();
|
|
23
|
+
const core = xiaoYiRuntime.getPluginRuntime();
|
|
24
|
+
// Resolve configuration
|
|
25
|
+
const config = (0, xy_config_js_1.resolveXYConfig)(cfg);
|
|
26
|
+
// Reply prefix context: not imported at runtime to avoid openclaw/plugin-sdk
|
|
27
|
+
// module resolution issues in the CJS require chain. For a bot-to-bot A2A
|
|
28
|
+
// channel the response prefix (model name badge) is not needed.
|
|
29
|
+
const prefixContext = { responsePrefix: undefined, responsePrefixContextProvider: undefined, onModelSelected: undefined };
|
|
30
|
+
// Status update interval (every 60 seconds)
|
|
31
|
+
let statusUpdateInterval = null;
|
|
32
|
+
// Track if we've sent any response
|
|
33
|
+
let hasSentResponse = false;
|
|
34
|
+
// Track if we've sent the final empty message
|
|
35
|
+
let finalSent = false;
|
|
36
|
+
// Accumulate all text from deliver calls
|
|
37
|
+
let accumulatedText = "";
|
|
38
|
+
/**
|
|
39
|
+
* Start the status update interval
|
|
40
|
+
* Call this immediately after creating the dispatcher
|
|
41
|
+
*/
|
|
42
|
+
const startStatusInterval = () => {
|
|
43
|
+
log(`[STATUS INTERVAL] Starting interval for session ${sessionId}, taskId=${taskId}`);
|
|
44
|
+
statusUpdateInterval = setInterval(() => {
|
|
45
|
+
log(`[STATUS INTERVAL] Triggering status update for session ${sessionId}, taskId=${taskId}`);
|
|
46
|
+
void (0, xy_formatter_js_1.sendStatusUpdate)({
|
|
47
|
+
config,
|
|
48
|
+
sessionId,
|
|
49
|
+
taskId,
|
|
50
|
+
messageId,
|
|
51
|
+
text: "任务正在处理中,请稍后~",
|
|
52
|
+
state: "working",
|
|
53
|
+
}).catch((err) => {
|
|
54
|
+
error(`Failed to send status update:`, err);
|
|
55
|
+
});
|
|
56
|
+
}, 30000); // 30 seconds
|
|
57
|
+
};
|
|
58
|
+
/**
|
|
59
|
+
* Stop the status update interval
|
|
60
|
+
*/
|
|
61
|
+
const stopStatusInterval = () => {
|
|
62
|
+
if (statusUpdateInterval) {
|
|
63
|
+
log(`[STATUS INTERVAL] Stopping interval for session ${sessionId}, taskId=${taskId}`);
|
|
64
|
+
clearInterval(statusUpdateInterval);
|
|
65
|
+
statusUpdateInterval = null;
|
|
66
|
+
log(`[STATUS INTERVAL] Stopped interval for session ${sessionId}, taskId=${taskId}`);
|
|
67
|
+
}
|
|
68
|
+
};
|
|
69
|
+
const { dispatcher, replyOptions, markDispatchIdle } = core.channel.reply.createReplyDispatcherWithTyping({
|
|
70
|
+
responsePrefix: prefixContext.responsePrefix,
|
|
71
|
+
responsePrefixContextProvider: prefixContext.responsePrefixContextProvider,
|
|
72
|
+
humanDelay: core.channel.reply.resolveHumanDelayConfig(cfg, accountId),
|
|
73
|
+
onReplyStart: () => {
|
|
74
|
+
log(`[REPLY START] Reply started for session ${sessionId}, taskId=${taskId}`);
|
|
75
|
+
// Status update interval is now managed externally
|
|
76
|
+
},
|
|
77
|
+
deliver: async (payload, info) => {
|
|
78
|
+
const text = payload.text ?? "";
|
|
79
|
+
// 🔍 Debug logging
|
|
80
|
+
log(`[DELIVER] sessionId=${sessionId}, info.kind=${info?.kind}, text.length=${text.length}, text="${text.slice(0, 200)}"`);
|
|
81
|
+
log(`[DELIVER] payload keys: ${Object.keys(payload).join(", ")}`);
|
|
82
|
+
if (payload.mediaUrls) {
|
|
83
|
+
log(`[DELIVER] mediaUrls: ${payload.mediaUrls.length} files`);
|
|
84
|
+
}
|
|
85
|
+
try {
|
|
86
|
+
// Skip empty messages
|
|
87
|
+
if (!text.trim()) {
|
|
88
|
+
log(`[DELIVER SKIP] Empty text, skipping`);
|
|
89
|
+
return;
|
|
90
|
+
}
|
|
91
|
+
// Accumulate text instead of sending immediately
|
|
92
|
+
accumulatedText += text;
|
|
93
|
+
hasSentResponse = true;
|
|
94
|
+
log(`[DELIVER ACCUMULATE] Accumulated text, current length=${accumulatedText.length}`);
|
|
95
|
+
// Also stream text as reasoningText for real-time display
|
|
96
|
+
await (0, xy_formatter_js_1.sendReasoningTextUpdate)({
|
|
97
|
+
config,
|
|
98
|
+
sessionId,
|
|
99
|
+
taskId,
|
|
100
|
+
messageId,
|
|
101
|
+
text,
|
|
102
|
+
});
|
|
103
|
+
log(`[DELIVER] ✅ Sent deliver text as reasoningText update`);
|
|
104
|
+
}
|
|
105
|
+
catch (deliverError) {
|
|
106
|
+
error(`Failed to deliver message:`, deliverError);
|
|
107
|
+
}
|
|
108
|
+
},
|
|
109
|
+
onError: async (err, info) => {
|
|
110
|
+
runtime.error?.(`xy: ${info.kind} reply failed: ${String(err)}`);
|
|
111
|
+
// Stop status updates
|
|
112
|
+
stopStatusInterval();
|
|
113
|
+
// Send error status if we haven't sent any response yet
|
|
114
|
+
if (!hasSentResponse) {
|
|
115
|
+
try {
|
|
116
|
+
await (0, xy_formatter_js_1.sendStatusUpdate)({
|
|
117
|
+
config,
|
|
118
|
+
sessionId,
|
|
119
|
+
taskId,
|
|
120
|
+
messageId,
|
|
121
|
+
text: "处理失败,请稍后重试",
|
|
122
|
+
state: "failed",
|
|
123
|
+
});
|
|
124
|
+
}
|
|
125
|
+
catch (statusError) {
|
|
126
|
+
error(`Failed to send error status:`, statusError);
|
|
127
|
+
}
|
|
128
|
+
}
|
|
129
|
+
},
|
|
130
|
+
onIdle: async () => {
|
|
131
|
+
log(`[ON_IDLE] Reply idle for session ${sessionId}, hasSentResponse=${hasSentResponse}, finalSent=${finalSent}`);
|
|
132
|
+
// Send accumulated text with append=false and final=true
|
|
133
|
+
if (hasSentResponse && !finalSent) {
|
|
134
|
+
log(`[ON_IDLE] Sending accumulated text, length=${accumulatedText.length}`);
|
|
135
|
+
try {
|
|
136
|
+
// Send status update before final message
|
|
137
|
+
await (0, xy_formatter_js_1.sendStatusUpdate)({
|
|
138
|
+
config,
|
|
139
|
+
sessionId,
|
|
140
|
+
taskId,
|
|
141
|
+
messageId,
|
|
142
|
+
text: "任务处理已完成~",
|
|
143
|
+
state: "completed",
|
|
144
|
+
});
|
|
145
|
+
log(`[ON_IDLE] ✅ Sent completion status update`);
|
|
146
|
+
await (0, xy_formatter_js_1.sendA2AResponse)({
|
|
147
|
+
config,
|
|
148
|
+
sessionId,
|
|
149
|
+
taskId,
|
|
150
|
+
messageId,
|
|
151
|
+
text: accumulatedText,
|
|
152
|
+
append: false,
|
|
153
|
+
final: true,
|
|
154
|
+
});
|
|
155
|
+
finalSent = true;
|
|
156
|
+
log(`[ON_IDLE] Sent accumulated text`);
|
|
157
|
+
}
|
|
158
|
+
catch (err) {
|
|
159
|
+
error(`[ON_IDLE] Failed to send accumulated text:`, err);
|
|
160
|
+
}
|
|
161
|
+
}
|
|
162
|
+
else {
|
|
163
|
+
log(`[ON_IDLE] Skipping final message: hasSentResponse=${hasSentResponse}, finalSent=${finalSent}`);
|
|
164
|
+
// Task was interrupted - send failure status and error response
|
|
165
|
+
try {
|
|
166
|
+
await (0, xy_formatter_js_1.sendStatusUpdate)({
|
|
167
|
+
config,
|
|
168
|
+
sessionId,
|
|
169
|
+
taskId,
|
|
170
|
+
messageId,
|
|
171
|
+
text: "任务处理中断了~",
|
|
172
|
+
state: "failed",
|
|
173
|
+
});
|
|
174
|
+
log(`[ON_IDLE] ✅ Sent failure status update`);
|
|
175
|
+
await (0, xy_formatter_js_1.sendA2AResponse)({
|
|
176
|
+
config,
|
|
177
|
+
sessionId,
|
|
178
|
+
taskId,
|
|
179
|
+
messageId,
|
|
180
|
+
text: "任务执行异常,请重试~",
|
|
181
|
+
append: false,
|
|
182
|
+
final: true,
|
|
183
|
+
});
|
|
184
|
+
finalSent = true;
|
|
185
|
+
log(`[ON_IDLE] ✅ Sent error response`);
|
|
186
|
+
}
|
|
187
|
+
catch (err) {
|
|
188
|
+
error(`[ON_IDLE] Failed to send failure status and error response:`, err);
|
|
189
|
+
}
|
|
190
|
+
}
|
|
191
|
+
// Stop status updates
|
|
192
|
+
stopStatusInterval();
|
|
193
|
+
},
|
|
194
|
+
onCleanup: () => {
|
|
195
|
+
log(`[ON_CLEANUP] Reply cleanup for session ${sessionId}, hasSentResponse=${hasSentResponse}, finalSent=${finalSent}`);
|
|
196
|
+
},
|
|
197
|
+
});
|
|
198
|
+
return {
|
|
199
|
+
dispatcher,
|
|
200
|
+
replyOptions: {
|
|
201
|
+
...replyOptions,
|
|
202
|
+
onModelSelected: prefixContext.onModelSelected,
|
|
203
|
+
// 🔧 Tool execution start callback
|
|
204
|
+
onToolStart: async ({ name, phase }) => {
|
|
205
|
+
log(`[TOOL START] 🔧 Tool execution started/updated: name=${name}, phase=${phase}, session=${sessionId}, taskId=${taskId}`);
|
|
206
|
+
if (phase === "start") {
|
|
207
|
+
const toolName = name || "unknown";
|
|
208
|
+
try {
|
|
209
|
+
await (0, xy_formatter_js_1.sendStatusUpdate)({
|
|
210
|
+
config,
|
|
211
|
+
sessionId,
|
|
212
|
+
taskId,
|
|
213
|
+
messageId,
|
|
214
|
+
text: `正在使用工具: ${toolName}...`,
|
|
215
|
+
state: "working",
|
|
216
|
+
});
|
|
217
|
+
log(`[TOOL START] ✅ Sent status update for tool start: ${toolName}`);
|
|
218
|
+
}
|
|
219
|
+
catch (err) {
|
|
220
|
+
error(`[TOOL START] ❌ Failed to send tool start status:`, err);
|
|
221
|
+
}
|
|
222
|
+
}
|
|
223
|
+
},
|
|
224
|
+
// 🔧 Tool execution result callback
|
|
225
|
+
onToolResult: async (payload) => {
|
|
226
|
+
const text = payload.text ?? "";
|
|
227
|
+
const hasMedia = Boolean(payload.mediaUrl || (payload.mediaUrls?.length ?? 0) > 0);
|
|
228
|
+
log(`[TOOL RESULT] 🔧 Tool execution result received: session=${sessionId}, taskId=${taskId}`);
|
|
229
|
+
log(`[TOOL RESULT] - text.length=${text.length}`);
|
|
230
|
+
log(`[TOOL RESULT] - hasMedia=${hasMedia}`);
|
|
231
|
+
log(`[TOOL RESULT] - isError=${payload.isError}`);
|
|
232
|
+
if (text.length > 0) {
|
|
233
|
+
log(`[TOOL RESULT] - text preview: "${text.slice(0, 200)}"`);
|
|
234
|
+
}
|
|
235
|
+
try {
|
|
236
|
+
if (text.length > 0 || hasMedia) {
|
|
237
|
+
const resultText = text.length > 0 ? text : "工具执行完成";
|
|
238
|
+
await (0, xy_formatter_js_1.sendStatusUpdate)({
|
|
239
|
+
config,
|
|
240
|
+
sessionId,
|
|
241
|
+
taskId,
|
|
242
|
+
messageId,
|
|
243
|
+
text: resultText,
|
|
244
|
+
state: "working",
|
|
245
|
+
});
|
|
246
|
+
log(`[TOOL RESULT] ✅ Sent tool result as status update`);
|
|
247
|
+
}
|
|
248
|
+
}
|
|
249
|
+
catch (err) {
|
|
250
|
+
error(`[TOOL RESULT] ❌ Failed to send tool result status:`, err);
|
|
251
|
+
}
|
|
252
|
+
},
|
|
253
|
+
// 🧠 Reasoning/thinking process streaming callback
|
|
254
|
+
onReasoningStream: async (payload) => {
|
|
255
|
+
const text = payload.text ?? "";
|
|
256
|
+
log(`[REASONING STREAM] 🧠 Reasoning/thinking chunk received: session=${sessionId}, taskId=${taskId}`);
|
|
257
|
+
log(`[REASONING STREAM] - text.length=${text.length}`);
|
|
258
|
+
if (text.length > 0) {
|
|
259
|
+
log(`[REASONING STREAM] - text preview: "${text.slice(0, 200)}"`);
|
|
260
|
+
}
|
|
261
|
+
// try {
|
|
262
|
+
// if (text.length > 0) {
|
|
263
|
+
// await sendReasoningTextUpdate({
|
|
264
|
+
// config,
|
|
265
|
+
// sessionId,
|
|
266
|
+
// taskId,
|
|
267
|
+
// messageId,
|
|
268
|
+
// text,
|
|
269
|
+
// });
|
|
270
|
+
// log(`[REASONING STREAM] ✅ Sent reasoning chunk as reasoningText update`);
|
|
271
|
+
// }
|
|
272
|
+
// } catch (err) {
|
|
273
|
+
// error(`[REASONING STREAM] ❌ Failed to send reasoning chunk reasoningText:`, err);
|
|
274
|
+
// }
|
|
275
|
+
},
|
|
276
|
+
// 📝 Partial reply streaming callback (real-time preview)
|
|
277
|
+
onPartialReply: async (payload) => {
|
|
278
|
+
const text = payload.text ?? "";
|
|
279
|
+
const hasMedia = Boolean(payload.mediaUrl || (payload.mediaUrls?.length ?? 0) > 0);
|
|
280
|
+
log(`[PARTIAL REPLY] 📝 Partial reply chunk received: session=${sessionId}, taskId=${taskId}`);
|
|
281
|
+
log(`[PARTIAL REPLY] - text.length=${text.length}`);
|
|
282
|
+
log(`[PARTIAL REPLY] - hasMedia=${hasMedia}`);
|
|
283
|
+
if (text.length > 0) {
|
|
284
|
+
log(`[PARTIAL REPLY] - text preview: "${text.slice(0, 200)}"`);
|
|
285
|
+
}
|
|
286
|
+
try {
|
|
287
|
+
if (text.length > 0) {
|
|
288
|
+
await (0, xy_formatter_js_1.sendReasoningTextUpdate)({
|
|
289
|
+
config,
|
|
290
|
+
sessionId,
|
|
291
|
+
taskId,
|
|
292
|
+
messageId,
|
|
293
|
+
text,
|
|
294
|
+
append: false,
|
|
295
|
+
});
|
|
296
|
+
log(`[PARTIAL REPLY] ✅ Sent partial reply as reasoningText update (append=false)`);
|
|
297
|
+
}
|
|
298
|
+
}
|
|
299
|
+
catch (err) {
|
|
300
|
+
error(`[PARTIAL REPLY] ❌ Failed to send partial reply reasoningText:`, err);
|
|
301
|
+
}
|
|
302
|
+
},
|
|
303
|
+
},
|
|
304
|
+
markDispatchIdle,
|
|
305
|
+
startStatusInterval, // Expose this to be called immediately
|
|
306
|
+
stopStatusInterval, // Expose this for manual control if needed
|
|
307
|
+
};
|
|
308
|
+
}
|