@nextclaw/channel-plugin-feishu 0.2.29-beta.0 → 0.2.29-beta.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (114) hide show
  1. package/dist/index.d.ts +23 -0
  2. package/dist/index.js +45 -0
  3. package/dist/src/accounts.js +141 -0
  4. package/dist/src/app-scope-checker.js +36 -0
  5. package/dist/src/async.js +34 -0
  6. package/dist/src/auth-errors.js +72 -0
  7. package/dist/src/bitable.js +495 -0
  8. package/dist/src/bot.d.ts +35 -0
  9. package/dist/src/bot.js +941 -0
  10. package/dist/src/calendar-calendar.js +54 -0
  11. package/dist/src/calendar-event-attendee.js +98 -0
  12. package/dist/src/calendar-event.js +193 -0
  13. package/dist/src/calendar-freebusy.js +40 -0
  14. package/dist/src/calendar-shared.js +23 -0
  15. package/dist/src/calendar.js +16 -0
  16. package/dist/src/card-action.js +49 -0
  17. package/dist/src/channel.d.ts +7 -0
  18. package/dist/src/channel.js +413 -0
  19. package/dist/src/chat-schema.js +25 -0
  20. package/dist/src/chat.js +87 -0
  21. package/dist/src/client.d.ts +16 -0
  22. package/dist/src/client.js +112 -0
  23. package/dist/src/config-schema.d.ts +357 -0
  24. package/dist/src/dedup.js +126 -0
  25. package/dist/src/device-flow.js +109 -0
  26. package/dist/src/directory.js +101 -0
  27. package/dist/src/doc-schema.js +148 -0
  28. package/dist/src/docx-batch-insert.js +104 -0
  29. package/dist/src/docx-color-text.js +80 -0
  30. package/dist/src/docx-table-ops.js +197 -0
  31. package/dist/src/docx.js +858 -0
  32. package/dist/src/domains.js +14 -0
  33. package/dist/src/drive-schema.js +41 -0
  34. package/dist/src/drive.js +126 -0
  35. package/dist/src/dynamic-agent.js +93 -0
  36. package/dist/src/external-keys.js +13 -0
  37. package/dist/src/feishu-fetch.js +12 -0
  38. package/dist/src/identity.js +92 -0
  39. package/dist/src/lark-ticket.js +11 -0
  40. package/dist/src/media.d.ts +75 -0
  41. package/dist/src/media.js +304 -0
  42. package/dist/src/mention.d.ts +52 -0
  43. package/dist/src/mention.js +82 -0
  44. package/dist/src/monitor.account.d.ts +1 -0
  45. package/dist/src/monitor.account.js +393 -0
  46. package/dist/src/monitor.d.ts +11 -0
  47. package/dist/src/monitor.js +58 -0
  48. package/dist/src/monitor.startup.js +24 -0
  49. package/dist/src/monitor.state.d.ts +1 -0
  50. package/dist/src/monitor.state.js +80 -0
  51. package/dist/src/monitor.transport.js +167 -0
  52. package/dist/src/nextclaw-sdk/account-id.js +15 -0
  53. package/dist/src/nextclaw-sdk/core-channel.js +150 -0
  54. package/dist/src/nextclaw-sdk/core-pairing.js +151 -0
  55. package/dist/src/nextclaw-sdk/dedupe.js +164 -0
  56. package/dist/src/nextclaw-sdk/feishu.d.ts +1 -0
  57. package/dist/src/nextclaw-sdk/feishu.js +14 -0
  58. package/dist/src/nextclaw-sdk/history.js +69 -0
  59. package/dist/src/nextclaw-sdk/network-body.js +180 -0
  60. package/dist/src/nextclaw-sdk/network-fetch.js +63 -0
  61. package/dist/src/nextclaw-sdk/network-webhook.js +126 -0
  62. package/dist/src/nextclaw-sdk/network.js +4 -0
  63. package/dist/src/nextclaw-sdk/runtime-store.js +21 -0
  64. package/dist/src/nextclaw-sdk/secrets-config.js +65 -0
  65. package/dist/src/nextclaw-sdk/secrets-core.d.ts +1 -0
  66. package/dist/src/nextclaw-sdk/secrets-core.js +68 -0
  67. package/dist/src/nextclaw-sdk/secrets-prompt.js +193 -0
  68. package/dist/src/nextclaw-sdk/secrets.d.ts +1 -0
  69. package/dist/src/nextclaw-sdk/secrets.js +4 -0
  70. package/dist/src/nextclaw-sdk/types.d.ts +242 -0
  71. package/dist/src/oauth.js +171 -0
  72. package/dist/src/onboarding.js +381 -0
  73. package/dist/src/outbound.js +150 -0
  74. package/dist/src/perm-schema.js +49 -0
  75. package/dist/src/perm.js +90 -0
  76. package/dist/src/policy.js +61 -0
  77. package/dist/src/post.js +160 -0
  78. package/dist/src/probe.d.ts +11 -0
  79. package/dist/src/probe.js +85 -0
  80. package/dist/src/raw-request.js +24 -0
  81. package/dist/src/reactions.d.ts +67 -0
  82. package/dist/src/reactions.js +91 -0
  83. package/dist/src/reply-dispatcher.js +250 -0
  84. package/dist/src/runtime.js +5 -0
  85. package/dist/src/secret-input.js +3 -0
  86. package/dist/src/send-result.js +12 -0
  87. package/dist/src/send-target.js +22 -0
  88. package/dist/src/send.d.ts +51 -0
  89. package/dist/src/send.js +265 -0
  90. package/dist/src/sheets-shared.js +193 -0
  91. package/dist/src/sheets.js +95 -0
  92. package/dist/src/streaming-card.js +263 -0
  93. package/dist/src/targets.js +39 -0
  94. package/dist/src/task-comment.js +76 -0
  95. package/dist/src/task-shared.js +13 -0
  96. package/dist/src/task-subtask.js +79 -0
  97. package/dist/src/task-task.js +144 -0
  98. package/dist/src/task-tasklist.js +136 -0
  99. package/dist/src/task.js +16 -0
  100. package/dist/src/token-store.js +154 -0
  101. package/dist/src/tool-account.js +65 -0
  102. package/dist/src/tool-result.js +18 -0
  103. package/dist/src/tool-scopes.js +62 -0
  104. package/dist/src/tools-config.js +30 -0
  105. package/dist/src/types.d.ts +43 -0
  106. package/dist/src/typing.js +145 -0
  107. package/dist/src/uat-client.js +102 -0
  108. package/dist/src/user-tool-client.js +132 -0
  109. package/dist/src/user-tool-helpers.js +110 -0
  110. package/dist/src/user-tool-result.js +10 -0
  111. package/dist/src/wiki-schema.js +45 -0
  112. package/dist/src/wiki.js +144 -0
  113. package/package.json +8 -4
  114. package/index.ts +0 -75
@@ -0,0 +1,858 @@
1
+ import { createFeishuToolClient, resolveFeishuToolAccount, resolveRegisteredFeishuToolsConfig } from "./tool-account.js";
2
+ import { getFeishuRuntime } from "./runtime.js";
3
+ import { FeishuDocSchema } from "./doc-schema.js";
4
+ import { cleanBlocksForDescendant, deleteTableColumns, deleteTableRows, insertTableColumn, insertTableRow, mergeTableCells } from "./docx-table-ops.js";
5
+ import { insertBlocksInBatches } from "./docx-batch-insert.js";
6
+ import { updateColorText } from "./docx-color-text.js";
7
+ import { Type } from "@sinclair/typebox";
8
+ import { existsSync, promises } from "node:fs";
9
+ import { basename, isAbsolute } from "node:path";
10
+ import { homedir } from "node:os";
11
+ //#region src/docx.ts
12
+ function json(data) {
13
+ return {
14
+ content: [{
15
+ type: "text",
16
+ text: JSON.stringify(data, null, 2)
17
+ }],
18
+ details: data
19
+ };
20
+ }
21
+ /** Extract image URLs from markdown content */
22
+ function extractImageUrls(markdown) {
23
+ const regex = /!\[[^\]]*\]\(([^)]+)\)/g;
24
+ const urls = [];
25
+ let match;
26
+ while ((match = regex.exec(markdown)) !== null) {
27
+ const url = match[1].trim();
28
+ if (url.startsWith("http://") || url.startsWith("https://")) urls.push(url);
29
+ }
30
+ return urls;
31
+ }
32
+ const BLOCK_TYPE_NAMES = {
33
+ 1: "Page",
34
+ 2: "Text",
35
+ 3: "Heading1",
36
+ 4: "Heading2",
37
+ 5: "Heading3",
38
+ 12: "Bullet",
39
+ 13: "Ordered",
40
+ 14: "Code",
41
+ 15: "Quote",
42
+ 17: "Todo",
43
+ 18: "Bitable",
44
+ 21: "Diagram",
45
+ 22: "Divider",
46
+ 23: "File",
47
+ 27: "Image",
48
+ 30: "Sheet",
49
+ 31: "Table",
50
+ 32: "TableCell"
51
+ };
52
+ const UNSUPPORTED_CREATE_TYPES = new Set([31, 32]);
53
+ /** Clean blocks for insertion (remove unsupported types and read-only fields) */
54
+ function cleanBlocksForInsert(blocks) {
55
+ const skipped = [];
56
+ return {
57
+ cleaned: blocks.filter((block) => {
58
+ if (UNSUPPORTED_CREATE_TYPES.has(block.block_type)) {
59
+ const typeName = BLOCK_TYPE_NAMES[block.block_type] || `type_${block.block_type}`;
60
+ skipped.push(typeName);
61
+ return false;
62
+ }
63
+ return true;
64
+ }).map((block) => {
65
+ if (block.block_type === 31 && block.table?.merge_info) {
66
+ const { merge_info: _merge_info, ...tableRest } = block.table;
67
+ return {
68
+ ...block,
69
+ table: tableRest
70
+ };
71
+ }
72
+ return block;
73
+ }),
74
+ skipped
75
+ };
76
+ }
77
+ const MAX_CONVERT_RETRY_DEPTH = 8;
78
+ async function convertMarkdown(client, markdown) {
79
+ const res = await client.docx.document.convert({ data: {
80
+ content_type: "markdown",
81
+ content: markdown
82
+ } });
83
+ if (res.code !== 0) throw new Error(res.msg);
84
+ return {
85
+ blocks: res.data?.blocks ?? [],
86
+ firstLevelBlockIds: res.data?.first_level_block_ids ?? []
87
+ };
88
+ }
89
+ function sortBlocksByFirstLevel(blocks, firstLevelIds) {
90
+ if (!firstLevelIds || firstLevelIds.length === 0) return blocks;
91
+ const sorted = firstLevelIds.map((id) => blocks.find((b) => b.block_id === id)).filter(Boolean);
92
+ const sortedIds = new Set(firstLevelIds);
93
+ const remaining = blocks.filter((b) => !sortedIds.has(b.block_id));
94
+ return [...sorted, ...remaining];
95
+ }
96
+ async function insertBlocks(client, docToken, blocks, parentBlockId, index) {
97
+ const { cleaned, skipped } = cleanBlocksForInsert(blocks);
98
+ const blockId = parentBlockId ?? docToken;
99
+ if (cleaned.length === 0) return {
100
+ children: [],
101
+ skipped
102
+ };
103
+ const allInserted = [];
104
+ for (const [offset, block] of cleaned.entries()) {
105
+ const res = await client.docx.documentBlockChildren.create({
106
+ path: {
107
+ document_id: docToken,
108
+ block_id: blockId
109
+ },
110
+ data: {
111
+ children: [block],
112
+ ...index !== void 0 ? { index: index + offset } : {}
113
+ }
114
+ });
115
+ if (res.code !== 0) throw new Error(res.msg);
116
+ allInserted.push(...res.data?.children ?? []);
117
+ }
118
+ return {
119
+ children: allInserted,
120
+ skipped
121
+ };
122
+ }
123
+ /** Split markdown into chunks at top-level headings (# or ##) to stay within API content limits */
124
+ function splitMarkdownByHeadings(markdown) {
125
+ const lines = markdown.split("\n");
126
+ const chunks = [];
127
+ let current = [];
128
+ let inFencedBlock = false;
129
+ for (const line of lines) {
130
+ if (/^(`{3,}|~{3,})/.test(line)) inFencedBlock = !inFencedBlock;
131
+ if (!inFencedBlock && /^#{1,2}\s/.test(line) && current.length > 0) {
132
+ chunks.push(current.join("\n"));
133
+ current = [];
134
+ }
135
+ current.push(line);
136
+ }
137
+ if (current.length > 0) chunks.push(current.join("\n"));
138
+ return chunks;
139
+ }
140
+ /** Split markdown by size, preferring to break outside fenced code blocks when possible */
141
+ function splitMarkdownBySize(markdown, maxChars) {
142
+ if (markdown.length <= maxChars) return [markdown];
143
+ const lines = markdown.split("\n");
144
+ const chunks = [];
145
+ let current = [];
146
+ let currentLength = 0;
147
+ let inFencedBlock = false;
148
+ for (const line of lines) {
149
+ if (/^(`{3,}|~{3,})/.test(line)) inFencedBlock = !inFencedBlock;
150
+ const lineLength = line.length + 1;
151
+ const wouldExceed = currentLength + lineLength > maxChars;
152
+ if (current.length > 0 && wouldExceed && !inFencedBlock) {
153
+ chunks.push(current.join("\n"));
154
+ current = [];
155
+ currentLength = 0;
156
+ }
157
+ current.push(line);
158
+ currentLength += lineLength;
159
+ }
160
+ if (current.length > 0) chunks.push(current.join("\n"));
161
+ if (chunks.length > 1) return chunks;
162
+ const midpoint = Math.floor(lines.length / 2);
163
+ if (midpoint <= 0 || midpoint >= lines.length) return [markdown];
164
+ return [lines.slice(0, midpoint).join("\n"), lines.slice(midpoint).join("\n")];
165
+ }
166
+ async function convertMarkdownWithFallback(client, markdown, depth = 0) {
167
+ try {
168
+ return await convertMarkdown(client, markdown);
169
+ } catch (error) {
170
+ if (depth >= MAX_CONVERT_RETRY_DEPTH || markdown.length < 2) throw error;
171
+ const chunks = splitMarkdownBySize(markdown, Math.max(256, Math.floor(markdown.length / 2)));
172
+ if (chunks.length <= 1) throw error;
173
+ const blocks = [];
174
+ const firstLevelBlockIds = [];
175
+ for (const chunk of chunks) {
176
+ const converted = await convertMarkdownWithFallback(client, chunk, depth + 1);
177
+ blocks.push(...converted.blocks);
178
+ firstLevelBlockIds.push(...converted.firstLevelBlockIds);
179
+ }
180
+ return {
181
+ blocks,
182
+ firstLevelBlockIds
183
+ };
184
+ }
185
+ }
186
+ /** Convert markdown in chunks to avoid document.convert content size limits */
187
+ async function chunkedConvertMarkdown(client, markdown) {
188
+ const chunks = splitMarkdownByHeadings(markdown);
189
+ const allBlocks = [];
190
+ const allFirstLevelBlockIds = [];
191
+ for (const chunk of chunks) {
192
+ const { blocks, firstLevelBlockIds } = await convertMarkdownWithFallback(client, chunk);
193
+ const sorted = sortBlocksByFirstLevel(blocks, firstLevelBlockIds);
194
+ allBlocks.push(...sorted);
195
+ allFirstLevelBlockIds.push(...firstLevelBlockIds);
196
+ }
197
+ return {
198
+ blocks: allBlocks,
199
+ firstLevelBlockIds: allFirstLevelBlockIds
200
+ };
201
+ }
202
+ /**
203
+ * Insert blocks using the Descendant API (supports tables, nested lists, large docs).
204
+ * Unlike the Children API, this supports block_type 31/32 (Table/TableCell).
205
+ *
206
+ * @param parentBlockId - Parent block to insert into (defaults to docToken = document root)
207
+ * @param index - Position within parent's children (-1 = end, 0 = first)
208
+ */
209
+ async function insertBlocksWithDescendant(client, docToken, blocks, firstLevelBlockIds, { parentBlockId = docToken, index = -1 } = {}) {
210
+ const descendants = cleanBlocksForDescendant(blocks);
211
+ if (descendants.length === 0) return { children: [] };
212
+ const res = await client.docx.documentBlockDescendant.create({
213
+ path: {
214
+ document_id: docToken,
215
+ block_id: parentBlockId
216
+ },
217
+ data: {
218
+ children_id: firstLevelBlockIds,
219
+ descendants,
220
+ index
221
+ }
222
+ });
223
+ if (res.code !== 0) throw new Error(`${res.msg} (code: ${res.code})`);
224
+ return { children: res.data?.children ?? [] };
225
+ }
226
+ async function clearDocumentContent(client, docToken) {
227
+ const existing = await client.docx.documentBlock.list({ path: { document_id: docToken } });
228
+ if (existing.code !== 0) throw new Error(existing.msg);
229
+ const childIds = existing.data?.items?.filter((b) => b.parent_id === docToken && b.block_type !== 1).map((b) => b.block_id) ?? [];
230
+ if (childIds.length > 0) {
231
+ const res = await client.docx.documentBlockChildren.batchDelete({
232
+ path: {
233
+ document_id: docToken,
234
+ block_id: docToken
235
+ },
236
+ data: {
237
+ start_index: 0,
238
+ end_index: childIds.length
239
+ }
240
+ });
241
+ if (res.code !== 0) throw new Error(res.msg);
242
+ }
243
+ return childIds.length;
244
+ }
245
+ async function uploadImageToDocx(client, blockId, imageBuffer, fileName, docToken) {
246
+ const fileToken = (await client.drive.media.uploadAll({ data: {
247
+ file_name: fileName,
248
+ parent_type: "docx_image",
249
+ parent_node: blockId,
250
+ size: imageBuffer.length,
251
+ file: imageBuffer,
252
+ ...docToken ? { extra: JSON.stringify({ drive_route_token: docToken }) } : {}
253
+ } }))?.file_token;
254
+ if (!fileToken) throw new Error("Image upload failed: no file_token returned");
255
+ return fileToken;
256
+ }
257
+ async function downloadImage(url, maxBytes) {
258
+ return (await getFeishuRuntime().channel.media.fetchRemoteMedia({
259
+ url,
260
+ maxBytes
261
+ })).buffer;
262
+ }
263
+ async function resolveUploadInput(url, filePath, maxBytes, explicitFileName, imageInput) {
264
+ const inputSources = [
265
+ url ? "url" : null,
266
+ filePath ? "file_path" : null,
267
+ imageInput ? "image" : null
268
+ ].filter(Boolean);
269
+ if (inputSources.length > 1) throw new Error(`Provide only one image source; got: ${inputSources.join(", ")}`);
270
+ if (imageInput?.startsWith("data:")) {
271
+ const commaIdx = imageInput.indexOf(",");
272
+ if (commaIdx === -1) throw new Error("Invalid data URI: missing comma separator.");
273
+ const header = imageInput.slice(0, commaIdx);
274
+ const data = imageInput.slice(commaIdx + 1);
275
+ if (!header.includes(";base64")) throw new Error("Invalid data URI: missing ';base64' marker. Expected format: data:image/png;base64,<base64data>");
276
+ const trimmedData = data.trim();
277
+ if (trimmedData.length === 0 || !/^[A-Za-z0-9+/]+=*$/.test(trimmedData)) throw new Error(`Invalid data URI: base64 payload contains characters outside the standard alphabet.`);
278
+ const ext = header.match(/data:([^;]+)/)?.[1]?.split("/")[1] ?? "png";
279
+ const estimatedBytes = Math.ceil(trimmedData.length * 3 / 4);
280
+ if (estimatedBytes > maxBytes) throw new Error(`Image data URI exceeds limit: estimated ${estimatedBytes} bytes > ${maxBytes} bytes`);
281
+ return {
282
+ buffer: Buffer.from(trimmedData, "base64"),
283
+ fileName: explicitFileName ?? `image.${ext}`
284
+ };
285
+ }
286
+ if (imageInput) {
287
+ const candidate = imageInput.startsWith("~") ? imageInput.replace(/^~/, homedir()) : imageInput;
288
+ const unambiguousPath = imageInput.startsWith("~") || imageInput.startsWith("./") || imageInput.startsWith("../");
289
+ const absolutePath = isAbsolute(imageInput);
290
+ if (unambiguousPath || absolutePath && existsSync(candidate)) {
291
+ const buffer = await promises.readFile(candidate);
292
+ if (buffer.length > maxBytes) throw new Error(`Local file exceeds limit: ${buffer.length} bytes > ${maxBytes} bytes`);
293
+ return {
294
+ buffer,
295
+ fileName: explicitFileName ?? basename(candidate)
296
+ };
297
+ }
298
+ if (absolutePath && !existsSync(candidate)) throw new Error(`File not found: "${candidate}". If you intended to pass image binary data, use a data URI instead: data:image/jpeg;base64,...`);
299
+ }
300
+ if (imageInput) {
301
+ const trimmed = imageInput.trim();
302
+ if (trimmed.length === 0 || !/^[A-Za-z0-9+/]+=*$/.test(trimmed)) throw new Error("Invalid base64: image input contains characters outside the standard base64 alphabet. Use a data URI (data:image/png;base64,...) or a local file path instead.");
303
+ const estimatedBytes = Math.ceil(trimmed.length * 3 / 4);
304
+ if (estimatedBytes > maxBytes) throw new Error(`Base64 image exceeds limit: estimated ${estimatedBytes} bytes > ${maxBytes} bytes`);
305
+ const buffer = Buffer.from(trimmed, "base64");
306
+ if (buffer.length === 0) throw new Error("Base64 image decoded to empty buffer; check the input.");
307
+ return {
308
+ buffer,
309
+ fileName: explicitFileName ?? "image.png"
310
+ };
311
+ }
312
+ if (!url && !filePath) throw new Error("Either url, file_path, or image (base64/data URI) must be provided");
313
+ if (url && filePath) throw new Error("Provide only one of url or file_path");
314
+ if (url) {
315
+ const fetched = await getFeishuRuntime().channel.media.fetchRemoteMedia({
316
+ url,
317
+ maxBytes
318
+ });
319
+ const guessed = new URL(url).pathname.split("/").pop() || "upload.bin";
320
+ return {
321
+ buffer: fetched.buffer,
322
+ fileName: explicitFileName || guessed
323
+ };
324
+ }
325
+ const buffer = await promises.readFile(filePath);
326
+ if (buffer.length > maxBytes) throw new Error(`Local file exceeds limit: ${buffer.length} bytes > ${maxBytes} bytes`);
327
+ return {
328
+ buffer,
329
+ fileName: explicitFileName || basename(filePath)
330
+ };
331
+ }
332
+ async function processImages(client, docToken, markdown, insertedBlocks, maxBytes) {
333
+ const imageUrls = extractImageUrls(markdown);
334
+ if (imageUrls.length === 0) return 0;
335
+ const imageBlocks = insertedBlocks.filter((b) => b.block_type === 27);
336
+ let processed = 0;
337
+ for (let i = 0; i < Math.min(imageUrls.length, imageBlocks.length); i++) {
338
+ const url = imageUrls[i];
339
+ const blockId = imageBlocks[i].block_id;
340
+ try {
341
+ const fileToken = await uploadImageToDocx(client, blockId, await downloadImage(url, maxBytes), new URL(url).pathname.split("/").pop() || `image_${i}.png`, docToken);
342
+ await client.docx.documentBlock.patch({
343
+ path: {
344
+ document_id: docToken,
345
+ block_id: blockId
346
+ },
347
+ data: { replace_image: { token: fileToken } }
348
+ });
349
+ processed++;
350
+ } catch (err) {
351
+ console.error(`Failed to process image ${url}:`, err);
352
+ }
353
+ }
354
+ return processed;
355
+ }
356
+ async function uploadImageBlock(client, docToken, maxBytes, url, filePath, parentBlockId, filename, index, imageInput) {
357
+ const insertRes = await client.docx.documentBlockChildren.create({
358
+ path: {
359
+ document_id: docToken,
360
+ block_id: parentBlockId ?? docToken
361
+ },
362
+ params: { document_revision_id: -1 },
363
+ data: {
364
+ children: [{
365
+ block_type: 27,
366
+ image: {}
367
+ }],
368
+ index: index ?? -1
369
+ }
370
+ });
371
+ if (insertRes.code !== 0) throw new Error(`Failed to create image block: ${insertRes.msg}`);
372
+ const imageBlockId = insertRes.data?.children?.find((b) => b.block_type === 27)?.block_id;
373
+ if (!imageBlockId) throw new Error("Failed to create image block");
374
+ const upload = await resolveUploadInput(url, filePath, maxBytes, filename, imageInput);
375
+ const fileToken = await uploadImageToDocx(client, imageBlockId, upload.buffer, upload.fileName, docToken);
376
+ const patchRes = await client.docx.documentBlock.patch({
377
+ path: {
378
+ document_id: docToken,
379
+ block_id: imageBlockId
380
+ },
381
+ data: { replace_image: { token: fileToken } }
382
+ });
383
+ if (patchRes.code !== 0) throw new Error(patchRes.msg);
384
+ return {
385
+ success: true,
386
+ block_id: imageBlockId,
387
+ file_token: fileToken,
388
+ file_name: upload.fileName,
389
+ size: upload.buffer.length
390
+ };
391
+ }
392
+ async function uploadFileBlock(client, docToken, maxBytes, url, filePath, parentBlockId, filename) {
393
+ const blockId = parentBlockId ?? docToken;
394
+ const upload = await resolveUploadInput(url, filePath, maxBytes, filename);
395
+ const converted = await convertMarkdown(client, `[${upload.fileName}](https://example.com/placeholder)`);
396
+ const { children: inserted } = await insertBlocks(client, docToken, sortBlocksByFirstLevel(converted.blocks, converted.firstLevelBlockIds), blockId);
397
+ const placeholderBlock = inserted[0];
398
+ if (!placeholderBlock?.block_id) throw new Error("Failed to create placeholder block for file upload");
399
+ const parentId = placeholderBlock.parent_id ?? blockId;
400
+ const childrenRes = await client.docx.documentBlockChildren.get({ path: {
401
+ document_id: docToken,
402
+ block_id: parentId
403
+ } });
404
+ if (childrenRes.code !== 0) throw new Error(childrenRes.msg);
405
+ const placeholderIdx = (childrenRes.data?.items ?? []).findIndex((item) => item.block_id === placeholderBlock.block_id);
406
+ if (placeholderIdx >= 0) {
407
+ const deleteRes = await client.docx.documentBlockChildren.batchDelete({
408
+ path: {
409
+ document_id: docToken,
410
+ block_id: parentId
411
+ },
412
+ data: {
413
+ start_index: placeholderIdx,
414
+ end_index: placeholderIdx + 1
415
+ }
416
+ });
417
+ if (deleteRes.code !== 0) throw new Error(deleteRes.msg);
418
+ }
419
+ const fileToken = (await client.drive.media.uploadAll({ data: {
420
+ file_name: upload.fileName,
421
+ parent_type: "docx_file",
422
+ parent_node: docToken,
423
+ size: upload.buffer.length,
424
+ file: upload.buffer
425
+ } }))?.file_token;
426
+ if (!fileToken) throw new Error("File upload failed: no file_token returned");
427
+ return {
428
+ success: true,
429
+ file_token: fileToken,
430
+ file_name: upload.fileName,
431
+ size: upload.buffer.length,
432
+ note: "File uploaded to drive. Use the file_token to reference it. Direct file block creation is not supported by the Feishu API."
433
+ };
434
+ }
435
+ const STRUCTURED_BLOCK_TYPES = new Set([
436
+ 14,
437
+ 18,
438
+ 21,
439
+ 23,
440
+ 27,
441
+ 30,
442
+ 31,
443
+ 32
444
+ ]);
445
+ async function readDoc(client, docToken) {
446
+ const [contentRes, infoRes, blocksRes] = await Promise.all([
447
+ client.docx.document.rawContent({ path: { document_id: docToken } }),
448
+ client.docx.document.get({ path: { document_id: docToken } }),
449
+ client.docx.documentBlock.list({ path: { document_id: docToken } })
450
+ ]);
451
+ if (contentRes.code !== 0) throw new Error(contentRes.msg);
452
+ const blocks = blocksRes.data?.items ?? [];
453
+ const blockCounts = {};
454
+ const structuredTypes = [];
455
+ for (const b of blocks) {
456
+ const type = b.block_type ?? 0;
457
+ const name = BLOCK_TYPE_NAMES[type] || `type_${type}`;
458
+ blockCounts[name] = (blockCounts[name] || 0) + 1;
459
+ if (STRUCTURED_BLOCK_TYPES.has(type) && !structuredTypes.includes(name)) structuredTypes.push(name);
460
+ }
461
+ let hint;
462
+ if (structuredTypes.length > 0) hint = `This document contains ${structuredTypes.join(", ")} which are NOT included in the plain text above. Use feishu_doc with action: "list_blocks" to get full content.`;
463
+ return {
464
+ title: infoRes.data?.document?.title,
465
+ content: contentRes.data?.content,
466
+ revision_id: infoRes.data?.document?.revision_id,
467
+ block_count: blocks.length,
468
+ block_types: blockCounts,
469
+ ...hint && { hint }
470
+ };
471
+ }
472
+ async function createDoc(client, title, folderToken, options) {
473
+ const res = await client.docx.document.create({ data: {
474
+ title,
475
+ folder_token: folderToken
476
+ } });
477
+ if (res.code !== 0) throw new Error(res.msg);
478
+ const doc = res.data?.document;
479
+ const docToken = doc?.document_id;
480
+ if (!docToken) throw new Error("Document creation succeeded but no document_id was returned");
481
+ const shouldGrantToRequester = options?.grantToRequester !== false;
482
+ const requesterOpenId = options?.requesterOpenId?.trim();
483
+ const requesterPermType = "edit";
484
+ let requesterPermissionAdded = false;
485
+ let requesterPermissionSkippedReason;
486
+ let requesterPermissionError;
487
+ if (shouldGrantToRequester) if (!requesterOpenId) requesterPermissionSkippedReason = "trusted requester identity unavailable";
488
+ else try {
489
+ await client.drive.permissionMember.create({
490
+ path: { token: docToken },
491
+ params: {
492
+ type: "docx",
493
+ need_notification: false
494
+ },
495
+ data: {
496
+ member_type: "openid",
497
+ member_id: requesterOpenId,
498
+ perm: requesterPermType
499
+ }
500
+ });
501
+ requesterPermissionAdded = true;
502
+ } catch (err) {
503
+ requesterPermissionError = err instanceof Error ? err.message : String(err);
504
+ }
505
+ return {
506
+ document_id: docToken,
507
+ title: doc?.title,
508
+ url: `https://feishu.cn/docx/${docToken}`,
509
+ ...shouldGrantToRequester && {
510
+ requester_permission_added: requesterPermissionAdded,
511
+ ...requesterOpenId && { requester_open_id: requesterOpenId },
512
+ requester_perm_type: requesterPermType,
513
+ ...requesterPermissionSkippedReason && { requester_permission_skipped_reason: requesterPermissionSkippedReason },
514
+ ...requesterPermissionError && { requester_permission_error: requesterPermissionError }
515
+ }
516
+ };
517
+ }
518
+ async function writeDoc(client, docToken, markdown, maxBytes, logger) {
519
+ const deleted = await clearDocumentContent(client, docToken);
520
+ logger?.info?.("feishu_doc: Converting markdown...");
521
+ const { blocks, firstLevelBlockIds } = await chunkedConvertMarkdown(client, markdown);
522
+ if (blocks.length === 0) return {
523
+ success: true,
524
+ blocks_deleted: deleted,
525
+ blocks_added: 0,
526
+ images_processed: 0
527
+ };
528
+ logger?.info?.(`feishu_doc: Converted to ${blocks.length} blocks, inserting...`);
529
+ const sortedBlocks = sortBlocksByFirstLevel(blocks, firstLevelBlockIds);
530
+ const { children: inserted } = blocks.length > 1e3 ? await insertBlocksInBatches(client, docToken, sortedBlocks, firstLevelBlockIds, logger) : await insertBlocksWithDescendant(client, docToken, sortedBlocks, firstLevelBlockIds);
531
+ const imagesProcessed = await processImages(client, docToken, markdown, inserted, maxBytes);
532
+ logger?.info?.(`feishu_doc: Done (${blocks.length} blocks, ${imagesProcessed} images)`);
533
+ return {
534
+ success: true,
535
+ blocks_deleted: deleted,
536
+ blocks_added: blocks.length,
537
+ images_processed: imagesProcessed
538
+ };
539
+ }
540
+ async function appendDoc(client, docToken, markdown, maxBytes, logger) {
541
+ logger?.info?.("feishu_doc: Converting markdown...");
542
+ const { blocks, firstLevelBlockIds } = await chunkedConvertMarkdown(client, markdown);
543
+ if (blocks.length === 0) throw new Error("Content is empty");
544
+ logger?.info?.(`feishu_doc: Converted to ${blocks.length} blocks, inserting...`);
545
+ const sortedBlocks = sortBlocksByFirstLevel(blocks, firstLevelBlockIds);
546
+ const { children: inserted } = blocks.length > 1e3 ? await insertBlocksInBatches(client, docToken, sortedBlocks, firstLevelBlockIds, logger) : await insertBlocksWithDescendant(client, docToken, sortedBlocks, firstLevelBlockIds);
547
+ const imagesProcessed = await processImages(client, docToken, markdown, inserted, maxBytes);
548
+ logger?.info?.(`feishu_doc: Done (${blocks.length} blocks, ${imagesProcessed} images)`);
549
+ return {
550
+ success: true,
551
+ blocks_added: blocks.length,
552
+ images_processed: imagesProcessed,
553
+ block_ids: inserted.map((b) => b.block_id)
554
+ };
555
+ }
556
+ async function insertDoc(client, docToken, markdown, afterBlockId, maxBytes, logger) {
557
+ const blockInfo = await client.docx.documentBlock.get({ path: {
558
+ document_id: docToken,
559
+ block_id: afterBlockId
560
+ } });
561
+ if (blockInfo.code !== 0) throw new Error(blockInfo.msg);
562
+ const parentId = blockInfo.data?.block?.parent_id ?? docToken;
563
+ const items = [];
564
+ let pageToken;
565
+ do {
566
+ const childrenRes = await client.docx.documentBlockChildren.get({
567
+ path: {
568
+ document_id: docToken,
569
+ block_id: parentId
570
+ },
571
+ params: pageToken ? { page_token: pageToken } : {}
572
+ });
573
+ if (childrenRes.code !== 0) throw new Error(childrenRes.msg);
574
+ items.push(...childrenRes.data?.items ?? []);
575
+ pageToken = childrenRes.data?.page_token ?? void 0;
576
+ } while (pageToken);
577
+ const blockIndex = items.findIndex((item) => item.block_id === afterBlockId);
578
+ if (blockIndex === -1) throw new Error(`after_block_id "${afterBlockId}" was not found among the children of parent block "${parentId}". Use list_blocks to verify the block ID.`);
579
+ const insertIndex = blockIndex + 1;
580
+ logger?.info?.("feishu_doc: Converting markdown...");
581
+ const { blocks, firstLevelBlockIds } = await chunkedConvertMarkdown(client, markdown);
582
+ if (blocks.length === 0) throw new Error("Content is empty");
583
+ const sortedBlocks = sortBlocksByFirstLevel(blocks, firstLevelBlockIds);
584
+ logger?.info?.(`feishu_doc: Converted to ${blocks.length} blocks, inserting at index ${insertIndex}...`);
585
+ const { children: inserted } = blocks.length > 1e3 ? await insertBlocksInBatches(client, docToken, sortedBlocks, firstLevelBlockIds, logger, parentId, insertIndex) : await insertBlocksWithDescendant(client, docToken, sortedBlocks, firstLevelBlockIds, {
586
+ parentBlockId: parentId,
587
+ index: insertIndex
588
+ });
589
+ const imagesProcessed = await processImages(client, docToken, markdown, inserted, maxBytes);
590
+ logger?.info?.(`feishu_doc: Done (${blocks.length} blocks, ${imagesProcessed} images)`);
591
+ return {
592
+ success: true,
593
+ blocks_added: blocks.length,
594
+ images_processed: imagesProcessed,
595
+ block_ids: inserted.map((b) => b.block_id)
596
+ };
597
+ }
598
+ async function createTable(client, docToken, rowSize, columnSize, parentBlockId, columnWidth) {
599
+ if (columnWidth && columnWidth.length !== columnSize) throw new Error("column_width length must equal column_size");
600
+ const blockId = parentBlockId ?? docToken;
601
+ const res = await client.docx.documentBlockChildren.create({
602
+ path: {
603
+ document_id: docToken,
604
+ block_id: blockId
605
+ },
606
+ data: { children: [{
607
+ block_type: 31,
608
+ table: { property: {
609
+ row_size: rowSize,
610
+ column_size: columnSize,
611
+ ...columnWidth && columnWidth.length > 0 ? { column_width: columnWidth } : {}
612
+ } }
613
+ }] }
614
+ });
615
+ if (res.code !== 0) throw new Error(res.msg);
616
+ const tableBlock = (res.data?.children)?.find((b) => b.block_type === 31);
617
+ const cells = tableBlock?.children ?? [];
618
+ return {
619
+ success: true,
620
+ table_block_id: tableBlock?.block_id,
621
+ row_size: rowSize,
622
+ column_size: columnSize,
623
+ table_cell_block_ids: cells.map((c) => c.block_id).filter(Boolean),
624
+ raw_children_count: res.data?.children?.length ?? 0
625
+ };
626
+ }
627
+ async function writeTableCells(client, docToken, tableBlockId, values) {
628
+ if (!values.length || !values[0]?.length) throw new Error("values must be a non-empty 2D array");
629
+ const tableRes = await client.docx.documentBlock.get({ path: {
630
+ document_id: docToken,
631
+ block_id: tableBlockId
632
+ } });
633
+ if (tableRes.code !== 0) throw new Error(tableRes.msg);
634
+ const tableBlock = tableRes.data?.block;
635
+ if (tableBlock?.block_type !== 31) throw new Error("table_block_id is not a table block");
636
+ const tableData = tableBlock.table;
637
+ const rows = tableData?.property?.row_size;
638
+ const cols = tableData?.property?.column_size;
639
+ const cellIds = tableData?.cells ?? [];
640
+ if (!rows || !cols || !cellIds.length) throw new Error("Table cell IDs unavailable from table block. Use list_blocks/get_block and pass explicit cell block IDs if needed.");
641
+ const writeRows = Math.min(values.length, rows);
642
+ let written = 0;
643
+ for (let r = 0; r < writeRows; r++) {
644
+ const rowValues = values[r] ?? [];
645
+ const writeCols = Math.min(rowValues.length, cols);
646
+ for (let c = 0; c < writeCols; c++) {
647
+ const cellId = cellIds[r * cols + c];
648
+ if (!cellId) continue;
649
+ const childrenRes = await client.docx.documentBlockChildren.get({ path: {
650
+ document_id: docToken,
651
+ block_id: cellId
652
+ } });
653
+ if (childrenRes.code !== 0) throw new Error(childrenRes.msg);
654
+ const existingChildren = childrenRes.data?.items ?? [];
655
+ if (existingChildren.length > 0) {
656
+ const delRes = await client.docx.documentBlockChildren.batchDelete({
657
+ path: {
658
+ document_id: docToken,
659
+ block_id: cellId
660
+ },
661
+ data: {
662
+ start_index: 0,
663
+ end_index: existingChildren.length
664
+ }
665
+ });
666
+ if (delRes.code !== 0) throw new Error(delRes.msg);
667
+ }
668
+ const converted = await convertMarkdown(client, rowValues[c] ?? "");
669
+ const sorted = sortBlocksByFirstLevel(converted.blocks, converted.firstLevelBlockIds);
670
+ if (sorted.length > 0) await insertBlocks(client, docToken, sorted, cellId);
671
+ written++;
672
+ }
673
+ }
674
+ return {
675
+ success: true,
676
+ table_block_id: tableBlockId,
677
+ cells_written: written,
678
+ table_size: {
679
+ rows,
680
+ cols
681
+ }
682
+ };
683
+ }
684
+ async function createTableWithValues(client, docToken, rowSize, columnSize, values, parentBlockId, columnWidth) {
685
+ const tableBlockId = (await createTable(client, docToken, rowSize, columnSize, parentBlockId, columnWidth)).table_block_id;
686
+ if (!tableBlockId) throw new Error("create_table succeeded but table_block_id is missing");
687
+ return {
688
+ success: true,
689
+ table_block_id: tableBlockId,
690
+ row_size: rowSize,
691
+ column_size: columnSize,
692
+ cells_written: (await writeTableCells(client, docToken, tableBlockId, values)).cells_written
693
+ };
694
+ }
695
+ async function updateBlock(client, docToken, blockId, content) {
696
+ const blockInfo = await client.docx.documentBlock.get({ path: {
697
+ document_id: docToken,
698
+ block_id: blockId
699
+ } });
700
+ if (blockInfo.code !== 0) throw new Error(blockInfo.msg);
701
+ const res = await client.docx.documentBlock.patch({
702
+ path: {
703
+ document_id: docToken,
704
+ block_id: blockId
705
+ },
706
+ data: { update_text_elements: { elements: [{ text_run: { content } }] } }
707
+ });
708
+ if (res.code !== 0) throw new Error(res.msg);
709
+ return {
710
+ success: true,
711
+ block_id: blockId
712
+ };
713
+ }
714
+ async function deleteBlock(client, docToken, blockId) {
715
+ const blockInfo = await client.docx.documentBlock.get({ path: {
716
+ document_id: docToken,
717
+ block_id: blockId
718
+ } });
719
+ if (blockInfo.code !== 0) throw new Error(blockInfo.msg);
720
+ const parentId = blockInfo.data?.block?.parent_id ?? docToken;
721
+ const children = await client.docx.documentBlockChildren.get({ path: {
722
+ document_id: docToken,
723
+ block_id: parentId
724
+ } });
725
+ if (children.code !== 0) throw new Error(children.msg);
726
+ const index = (children.data?.items ?? []).findIndex((item) => item.block_id === blockId);
727
+ if (index === -1) throw new Error("Block not found");
728
+ const res = await client.docx.documentBlockChildren.batchDelete({
729
+ path: {
730
+ document_id: docToken,
731
+ block_id: parentId
732
+ },
733
+ data: {
734
+ start_index: index,
735
+ end_index: index + 1
736
+ }
737
+ });
738
+ if (res.code !== 0) throw new Error(res.msg);
739
+ return {
740
+ success: true,
741
+ deleted_block_id: blockId
742
+ };
743
+ }
744
+ async function listBlocks(client, docToken) {
745
+ const res = await client.docx.documentBlock.list({ path: { document_id: docToken } });
746
+ if (res.code !== 0) throw new Error(res.msg);
747
+ return { blocks: res.data?.items ?? [] };
748
+ }
749
+ async function getBlock(client, docToken, blockId) {
750
+ const res = await client.docx.documentBlock.get({ path: {
751
+ document_id: docToken,
752
+ block_id: blockId
753
+ } });
754
+ if (res.code !== 0) throw new Error(res.msg);
755
+ return { block: res.data?.block };
756
+ }
757
+ async function listAppScopes(client) {
758
+ const res = await client.application.scope.list({});
759
+ if (res.code !== 0) throw new Error(res.msg);
760
+ const scopes = res.data?.scopes ?? [];
761
+ const granted = scopes.filter((s) => s.grant_status === 1);
762
+ const pending = scopes.filter((s) => s.grant_status !== 1);
763
+ return {
764
+ granted: granted.map((s) => ({
765
+ name: s.scope_name,
766
+ type: s.scope_type
767
+ })),
768
+ pending: pending.map((s) => ({
769
+ name: s.scope_name,
770
+ type: s.scope_type
771
+ })),
772
+ summary: `${granted.length} granted, ${pending.length} pending`
773
+ };
774
+ }
775
+ function registerFeishuDocTools(api) {
776
+ if (!api.config) {
777
+ api.logger.debug?.("feishu_doc: No config available, skipping doc tools");
778
+ return;
779
+ }
780
+ const toolsCfg = resolveRegisteredFeishuToolsConfig(api.config);
781
+ const registered = [];
782
+ const getClient = (params, defaultAccountId) => createFeishuToolClient({
783
+ api,
784
+ executeParams: params,
785
+ defaultAccountId
786
+ });
787
+ const getMediaMaxBytes = (params, defaultAccountId) => (resolveFeishuToolAccount({
788
+ api,
789
+ executeParams: params,
790
+ defaultAccountId
791
+ }).config?.mediaMaxMb ?? 30) * 1024 * 1024;
792
+ if (toolsCfg.doc) {
793
+ api.registerTool((ctx) => {
794
+ const defaultAccountId = ctx.agentAccountId;
795
+ const trustedRequesterOpenId = ctx.messageChannel === "feishu" ? ctx.requesterSenderId?.trim() || void 0 : void 0;
796
+ return {
797
+ name: "feishu_doc",
798
+ label: "Feishu Doc",
799
+ description: "Feishu document operations. Actions: read, write, append, insert, create, list_blocks, get_block, update_block, delete_block, create_table, write_table_cells, create_table_with_values, insert_table_row, insert_table_column, delete_table_rows, delete_table_columns, merge_table_cells, upload_image, upload_file, color_text",
800
+ parameters: FeishuDocSchema,
801
+ async execute(_toolCallId, params) {
802
+ const p = params;
803
+ try {
804
+ const client = getClient(p, defaultAccountId);
805
+ switch (p.action) {
806
+ case "read": return json(await readDoc(client, p.doc_token));
807
+ case "write": return json(await writeDoc(client, p.doc_token, p.content, getMediaMaxBytes(p, defaultAccountId), api.logger));
808
+ case "append": return json(await appendDoc(client, p.doc_token, p.content, getMediaMaxBytes(p, defaultAccountId), api.logger));
809
+ case "insert": return json(await insertDoc(client, p.doc_token, p.content, p.after_block_id, getMediaMaxBytes(p, defaultAccountId), api.logger));
810
+ case "create": return json(await createDoc(client, p.title, p.folder_token, {
811
+ grantToRequester: p.grant_to_requester,
812
+ requesterOpenId: trustedRequesterOpenId
813
+ }));
814
+ case "list_blocks": return json(await listBlocks(client, p.doc_token));
815
+ case "get_block": return json(await getBlock(client, p.doc_token, p.block_id));
816
+ case "update_block": return json(await updateBlock(client, p.doc_token, p.block_id, p.content));
817
+ case "delete_block": return json(await deleteBlock(client, p.doc_token, p.block_id));
818
+ case "create_table": return json(await createTable(client, p.doc_token, p.row_size, p.column_size, p.parent_block_id, p.column_width));
819
+ case "write_table_cells": return json(await writeTableCells(client, p.doc_token, p.table_block_id, p.values));
820
+ case "create_table_with_values": return json(await createTableWithValues(client, p.doc_token, p.row_size, p.column_size, p.values, p.parent_block_id, p.column_width));
821
+ case "upload_image": return json(await uploadImageBlock(client, p.doc_token, getMediaMaxBytes(p, defaultAccountId), p.url, p.file_path, p.parent_block_id, p.filename, p.index, p.image));
822
+ case "upload_file": return json(await uploadFileBlock(client, p.doc_token, getMediaMaxBytes(p, defaultAccountId), p.url, p.file_path, p.parent_block_id, p.filename));
823
+ case "color_text": return json(await updateColorText(client, p.doc_token, p.block_id, p.content));
824
+ case "insert_table_row": return json(await insertTableRow(client, p.doc_token, p.block_id, p.row_index));
825
+ case "insert_table_column": return json(await insertTableColumn(client, p.doc_token, p.block_id, p.column_index));
826
+ case "delete_table_rows": return json(await deleteTableRows(client, p.doc_token, p.block_id, p.row_start, p.row_count));
827
+ case "delete_table_columns": return json(await deleteTableColumns(client, p.doc_token, p.block_id, p.column_start, p.column_count));
828
+ case "merge_table_cells": return json(await mergeTableCells(client, p.doc_token, p.block_id, p.row_start, p.row_end, p.column_start, p.column_end));
829
+ default: return json({ error: `Unknown action: ${p.action}` });
830
+ }
831
+ } catch (err) {
832
+ return json({ error: err instanceof Error ? err.message : String(err) });
833
+ }
834
+ }
835
+ };
836
+ }, { name: "feishu_doc" });
837
+ registered.push("feishu_doc");
838
+ }
839
+ if (toolsCfg.scopes) {
840
+ api.registerTool((ctx) => ({
841
+ name: "feishu_app_scopes",
842
+ label: "Feishu App Scopes",
843
+ description: "List current app permissions (scopes). Use to debug permission issues or check available capabilities.",
844
+ parameters: Type.Object({}),
845
+ async execute() {
846
+ try {
847
+ return json(await listAppScopes(getClient(void 0, ctx.agentAccountId)));
848
+ } catch (err) {
849
+ return json({ error: err instanceof Error ? err.message : String(err) });
850
+ }
851
+ }
852
+ }), { name: "feishu_app_scopes" });
853
+ registered.push("feishu_app_scopes");
854
+ }
855
+ if (registered.length > 0) api.logger.info?.(`feishu_doc: Registered ${registered.join(", ")}`);
856
+ }
857
+ //#endregion
858
+ export { registerFeishuDocTools };