@bobotu/feishu-fork 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (73) hide show
  1. package/LICENSE +21 -0
  2. package/README.md +922 -0
  3. package/index.ts +65 -0
  4. package/openclaw.plugin.json +10 -0
  5. package/package.json +72 -0
  6. package/skills/feishu-doc/SKILL.md +161 -0
  7. package/skills/feishu-doc/references/block-types.md +102 -0
  8. package/skills/feishu-drive/SKILL.md +96 -0
  9. package/skills/feishu-perm/SKILL.md +90 -0
  10. package/skills/feishu-task/SKILL.md +210 -0
  11. package/skills/feishu-wiki/SKILL.md +96 -0
  12. package/src/accounts.ts +140 -0
  13. package/src/bitable-tools/actions.ts +199 -0
  14. package/src/bitable-tools/common.ts +90 -0
  15. package/src/bitable-tools/index.ts +1 -0
  16. package/src/bitable-tools/meta.ts +80 -0
  17. package/src/bitable-tools/register.ts +195 -0
  18. package/src/bitable-tools/schemas.ts +221 -0
  19. package/src/bot.ts +1125 -0
  20. package/src/channel.ts +334 -0
  21. package/src/client.ts +114 -0
  22. package/src/config-schema.ts +237 -0
  23. package/src/dedup.ts +54 -0
  24. package/src/directory.ts +165 -0
  25. package/src/doc-tools/actions.ts +341 -0
  26. package/src/doc-tools/common.ts +33 -0
  27. package/src/doc-tools/index.ts +2 -0
  28. package/src/doc-tools/register.ts +90 -0
  29. package/src/doc-tools/schemas.ts +85 -0
  30. package/src/doc-write-service.ts +711 -0
  31. package/src/drive-tools/actions.ts +182 -0
  32. package/src/drive-tools/common.ts +18 -0
  33. package/src/drive-tools/index.ts +2 -0
  34. package/src/drive-tools/register.ts +71 -0
  35. package/src/drive-tools/schemas.ts +67 -0
  36. package/src/dynamic-agent.ts +135 -0
  37. package/src/external-keys.ts +19 -0
  38. package/src/media.ts +510 -0
  39. package/src/mention.ts +121 -0
  40. package/src/monitor.ts +323 -0
  41. package/src/onboarding.ts +449 -0
  42. package/src/outbound.ts +40 -0
  43. package/src/perm-tools/actions.ts +111 -0
  44. package/src/perm-tools/common.ts +18 -0
  45. package/src/perm-tools/index.ts +2 -0
  46. package/src/perm-tools/register.ts +65 -0
  47. package/src/perm-tools/schemas.ts +52 -0
  48. package/src/policy.ts +117 -0
  49. package/src/probe.ts +147 -0
  50. package/src/reactions.ts +160 -0
  51. package/src/reply-dispatcher.ts +240 -0
  52. package/src/runtime.ts +14 -0
  53. package/src/send.ts +391 -0
  54. package/src/streaming-card.ts +211 -0
  55. package/src/targets.ts +58 -0
  56. package/src/task-tools/actions.ts +590 -0
  57. package/src/task-tools/common.ts +18 -0
  58. package/src/task-tools/constants.ts +13 -0
  59. package/src/task-tools/index.ts +1 -0
  60. package/src/task-tools/register.ts +263 -0
  61. package/src/task-tools/schemas.ts +567 -0
  62. package/src/text/markdown-links.ts +104 -0
  63. package/src/tools-common/feishu-api.ts +184 -0
  64. package/src/tools-common/tool-context.ts +23 -0
  65. package/src/tools-common/tool-exec.ts +73 -0
  66. package/src/tools-config.ts +22 -0
  67. package/src/types.ts +79 -0
  68. package/src/typing.ts +75 -0
  69. package/src/wiki-tools/actions.ts +166 -0
  70. package/src/wiki-tools/common.ts +18 -0
  71. package/src/wiki-tools/index.ts +2 -0
  72. package/src/wiki-tools/register.ts +66 -0
  73. package/src/wiki-tools/schemas.ts +55 -0
@@ -0,0 +1,711 @@
1
+ import type * as Lark from "@larksuiteoapi/node-sdk";
2
+ import { Readable } from "stream";
3
+ import { getFeishuRuntime } from "./runtime.js";
4
+
5
+ const BLOCK_TYPE_NAMES: Record<number, string> = {
6
+ 1: "Page",
7
+ 2: "Text",
8
+ 3: "Heading1",
9
+ 4: "Heading2",
10
+ 5: "Heading3",
11
+ 12: "Bullet",
12
+ 13: "Ordered",
13
+ 14: "Code",
14
+ 15: "Quote",
15
+ 17: "Todo",
16
+ 18: "Bitable",
17
+ 21: "Diagram",
18
+ 22: "Divider",
19
+ 23: "File",
20
+ 27: "Image",
21
+ 30: "Sheet",
22
+ 31: "Table",
23
+ 32: "TableCell",
24
+ };
25
+
26
+ // Block types that cannot be created via documentBlockChildren.create API
27
+ const UNSUPPORTED_CREATE_TYPES = new Set([32]);
28
+
29
+ // Maximum content length for a single API call (empirical value based on Feishu API limits)
30
+ const MAX_CONTENT_LENGTH = 50000; // ~50KB
31
+ const MAX_BLOCKS_PER_INSERT = 50; // Maximum blocks per insert API call
32
+
33
+ export type CreateDocResult = {
34
+ document_id?: string;
35
+ title?: string;
36
+ url: string;
37
+ };
38
+
39
+ export type WriteDocResult = {
40
+ success: true;
41
+ blocks_deleted: number;
42
+ blocks_added: number;
43
+ images_processed: number;
44
+ warning?: string;
45
+ };
46
+
47
+ export type AppendDocResult = {
48
+ success: true;
49
+ blocks_added: number;
50
+ images_processed: number;
51
+ block_ids: string[];
52
+ warning?: string;
53
+ };
54
+
55
+ export type CreateAndWriteDocResult = {
56
+ success: true;
57
+ document_id: string;
58
+ title: string;
59
+ url: string;
60
+ import_method: "create_and_write";
61
+ blocks_added: number;
62
+ images_processed: number;
63
+ warning?: string;
64
+ };
65
+
66
+ type InsertResult = { children: any[]; skipped: string[]; warnings: string[] };
67
+
68
+ /** Extract image URLs from markdown content */
69
+ function extractImageUrls(markdown: string): string[] {
70
+ const regex = /!\[[^\]]*\]\(([^)]+)\)/g;
71
+ const urls: string[] = [];
72
+ let match;
73
+ while ((match = regex.exec(markdown)) !== null) {
74
+ const url = match[1].trim();
75
+ if (url.startsWith("http://") || url.startsWith("https://")) {
76
+ urls.push(url);
77
+ }
78
+ }
79
+ return urls;
80
+ }
81
+
82
+ /**
83
+ * Reorder blocks according to firstLevelBlockIds from convertMarkdown API.
84
+ * The API returns blocks as a flat unordered array across all levels.
85
+ * firstLevelBlockIds provides the correct top-level document order.
86
+ */
87
+ function reorderBlocks(blocks: any[], firstLevelBlockIds: string[]): any[] {
88
+ if (!firstLevelBlockIds || firstLevelBlockIds.length === 0) return blocks;
89
+
90
+ const blockMap = new Map<string, any>();
91
+ for (const block of blocks) {
92
+ if (block.block_id) {
93
+ blockMap.set(block.block_id, block);
94
+ }
95
+ }
96
+
97
+ const ordered: any[] = [];
98
+ for (const id of firstLevelBlockIds) {
99
+ const block = blockMap.get(id);
100
+ if (block) {
101
+ ordered.push(block);
102
+ }
103
+ }
104
+
105
+ // If mapping unexpectedly fails, fall back to original to avoid hard data loss.
106
+ return ordered.length > 0 ? ordered : blocks;
107
+ }
108
+
109
+ /** Clean blocks for insertion (remove unsupported types and read-only fields) */
110
+ function cleanBlocksForInsert(blocks: any[]): { cleaned: any[]; skipped: string[] } {
111
+ const skipped: string[] = [];
112
+ const cleaned = blocks
113
+ .filter((block) => {
114
+ if (UNSUPPORTED_CREATE_TYPES.has(block.block_type)) {
115
+ const typeName = BLOCK_TYPE_NAMES[block.block_type] || `type_${block.block_type}`;
116
+ skipped.push(typeName);
117
+ return false;
118
+ }
119
+ return true;
120
+ })
121
+ .map((block) => {
122
+ const cleanedBlock = { ...block };
123
+ delete cleanedBlock.block_id;
124
+ delete cleanedBlock.parent_id;
125
+ delete cleanedBlock.children;
126
+
127
+ // Table cell IDs and merge metadata are not accepted in create payload.
128
+ if (cleanedBlock.block_type === 31 && cleanedBlock.table) {
129
+ const property = cleanedBlock.table.property ?? {};
130
+ const { merge_info, ...propertyRest } = property;
131
+ cleanedBlock.table = { property: propertyRest };
132
+ }
133
+
134
+ return cleanedBlock;
135
+ });
136
+ return { cleaned, skipped };
137
+ }
138
+
139
+ function buildBlockMap(blocks: any[]): Map<string, any> {
140
+ const map = new Map<string, any>();
141
+ for (const block of blocks) {
142
+ if (block.block_id) map.set(block.block_id, block);
143
+ }
144
+ return map;
145
+ }
146
+
147
+ function sleep(ms: number) {
148
+ return new Promise((resolve) => setTimeout(resolve, ms));
149
+ }
150
+
151
+ // Known transient/throughput-related Feishu codes observed across endpoints.
152
+ // Code matching is primary; message matching is fallback for undocumented new codes.
153
+ const RETRYABLE_CREATE_ERROR_CODES = new Set<number>([
154
+ 429, // HTTP-like throttle surfaces in some SDK wrappers
155
+ 1254290, // Too many requests
156
+ 1254291, // Write conflict
157
+ 1255040, // Request timeout
158
+ ]);
159
+
160
+ const RETRYABLE_MESSAGE_PATTERNS = [
161
+ /\brate\b/i,
162
+ /\bfrequency\b/i,
163
+ /\btoo many\b/i,
164
+ /\blimit\b/i,
165
+ /\bqps\b/i,
166
+ /频率/u,
167
+ /限流/u,
168
+ ];
169
+
170
+ function isRetryableCreateError(code?: number, msg?: string) {
171
+ if (!code || code === 0) return false;
172
+ if (RETRYABLE_CREATE_ERROR_CODES.has(code)) return true;
173
+ const text = msg ?? "";
174
+ return RETRYABLE_MESSAGE_PATTERNS.some((pattern) => pattern.test(text));
175
+ }
176
+
177
+ const CREATE_CHILDREN_RETRY_POLICY = {
178
+ maxAttempts: 4,
179
+ baseDelayMs: 250,
180
+ maxDelayMs: 2500,
181
+ jitterRatio: 0.2,
182
+ } as const;
183
+
184
+ function computeBackoffDelayMs(attempt: number, policy = CREATE_CHILDREN_RETRY_POLICY) {
185
+ const exp = Math.min(policy.maxDelayMs, policy.baseDelayMs * 2 ** (attempt - 1));
186
+ const jitter = exp * policy.jitterRatio;
187
+ const min = Math.max(0, exp - jitter);
188
+ const max = exp + jitter;
189
+ return Math.round(min + Math.random() * (max - min));
190
+ }
191
+
192
+ type CreateChildrenPayload = Parameters<Lark.Client["docx"]["documentBlockChildren"]["create"]>[0];
193
+ type CreateChildrenResponse = Awaited<
194
+ ReturnType<Lark.Client["docx"]["documentBlockChildren"]["create"]>
195
+ >;
196
+
197
+ async function executeWithBackoff<T>(args: {
198
+ operationName: string;
199
+ operation: () => Promise<T>;
200
+ isSuccess: (result: T) => boolean;
201
+ shouldRetry: (result: T) => boolean;
202
+ getMessage: (result: T) => string | undefined;
203
+ policy?: typeof CREATE_CHILDREN_RETRY_POLICY;
204
+ }): Promise<T> {
205
+ const policy = args.policy ?? CREATE_CHILDREN_RETRY_POLICY;
206
+ let lastResult: T | undefined;
207
+
208
+ for (let attempt = 1; attempt <= policy.maxAttempts; attempt++) {
209
+ const result = await args.operation();
210
+ lastResult = result;
211
+
212
+ if (args.isSuccess(result)) return result;
213
+ if (!args.shouldRetry(result) || attempt === policy.maxAttempts) return result;
214
+
215
+ const delayMs = computeBackoffDelayMs(attempt, policy);
216
+ const msg = args.getMessage(result) ?? "unknown error";
217
+ console.warn(
218
+ `[feishu_doc] ${args.operationName} retry ${attempt}/${policy.maxAttempts - 1} after ${delayMs}ms: ${msg}`,
219
+ );
220
+ await sleep(delayMs);
221
+ }
222
+
223
+ return lastResult!;
224
+ }
225
+
226
+ async function createChildrenWithRetry(
227
+ client: Lark.Client,
228
+ payload: CreateChildrenPayload,
229
+ policy = CREATE_CHILDREN_RETRY_POLICY,
230
+ ) {
231
+ return executeWithBackoff<CreateChildrenResponse>({
232
+ operationName: "docx.documentBlockChildren.create",
233
+ operation: () => client.docx.documentBlockChildren.create(payload),
234
+ isSuccess: (res) => res.code === 0,
235
+ shouldRetry: (res) => isRetryableCreateError(res.code, res.msg),
236
+ getMessage: (res) => res.msg,
237
+ policy,
238
+ });
239
+ }
240
+
241
+ async function insertBlocks(
242
+ client: Lark.Client,
243
+ docToken: string,
244
+ blocks: any[],
245
+ parentBlockId?: string,
246
+ ): Promise<{ children: any[]; skipped: string[] }> {
247
+ const { cleaned, skipped } = cleanBlocksForInsert(blocks);
248
+ const blockId = parentBlockId ?? docToken;
249
+
250
+ if (cleaned.length === 0) {
251
+ return { children: [], skipped };
252
+ }
253
+
254
+ const res = await createChildrenWithRetry(client, {
255
+ path: { document_id: docToken, block_id: blockId },
256
+ data: { children: cleaned },
257
+ });
258
+ if (res.code !== 0) throw new Error(res.msg);
259
+ return { children: res.data?.children ?? [], skipped };
260
+ }
261
+
262
+ async function insertTableWithCells(
263
+ client: Lark.Client,
264
+ docToken: string,
265
+ tableBlock: any,
266
+ blockMap: Map<string, any>,
267
+ parentBlockId?: string,
268
+ ): Promise<InsertResult> {
269
+ const tableInsert = await insertBlocks(client, docToken, [tableBlock], parentBlockId);
270
+ const insertedTable = tableInsert.children[0];
271
+
272
+ if (!insertedTable || insertedTable.block_type !== 31) {
273
+ return {
274
+ children: tableInsert.children,
275
+ skipped: tableInsert.skipped,
276
+ warnings: ["Table block was not returned after create; skipped table cell content."],
277
+ };
278
+ }
279
+
280
+ const srcCells: string[] = tableBlock.table?.cells ?? [];
281
+ const dstCells: string[] = insertedTable.table?.cells ?? [];
282
+ if (srcCells.length === 0) {
283
+ return { children: tableInsert.children, skipped: tableInsert.skipped, warnings: [] };
284
+ }
285
+
286
+ if (dstCells.length === 0) {
287
+ return {
288
+ children: tableInsert.children,
289
+ skipped: tableInsert.skipped,
290
+ warnings: ["Table created but API did not return generated cells; table content may be empty."],
291
+ };
292
+ }
293
+
294
+ const copiedChildren: any[] = [];
295
+ const allSkipped = [...tableInsert.skipped];
296
+ const warnings: string[] = [];
297
+ let sourceCellsWithContent = 0;
298
+ let copiedCellCount = 0;
299
+
300
+ const cellCount = Math.min(srcCells.length, dstCells.length);
301
+ for (let i = 0; i < cellCount; i++) {
302
+ const srcCellId = srcCells[i];
303
+ const dstCellId = dstCells[i];
304
+ const srcCell = blockMap.get(srcCellId);
305
+ const srcChildIds: string[] = srcCell?.children ?? [];
306
+ let srcChildBlocks = srcChildIds.map((id) => blockMap.get(id)).filter((b): b is any => Boolean(b));
307
+
308
+ // Some convert payloads may carry plain text directly on table_cell.
309
+ if (srcChildBlocks.length === 0 && srcCell?.text?.elements?.length) {
310
+ srcChildBlocks = [{ block_type: 2, text: srcCell.text }];
311
+ }
312
+ if (srcChildBlocks.length === 0 && srcCell?.table_cell?.text?.elements?.length) {
313
+ srcChildBlocks = [{ block_type: 2, text: srcCell.table_cell.text }];
314
+ }
315
+
316
+ if (srcChildBlocks.length === 0) continue;
317
+ sourceCellsWithContent++;
318
+
319
+ const cellInsert = await insertBlocksInBatches(client, docToken, srcChildBlocks, dstCellId);
320
+ copiedChildren.push(...cellInsert.children);
321
+ allSkipped.push(...cellInsert.skipped);
322
+ if (cellInsert.children.length > 0) copiedCellCount++;
323
+ }
324
+
325
+ if (srcCells.length !== dstCells.length) {
326
+ warnings.push(
327
+ `Table cell count mismatch after create (source=${srcCells.length}, target=${dstCells.length}); content may be partially copied.`,
328
+ );
329
+ }
330
+ if (sourceCellsWithContent > 0 && copiedCellCount < sourceCellsWithContent) {
331
+ warnings.push(
332
+ `Copied table cell content for ${copiedCellCount}/${sourceCellsWithContent} non-empty cells.`,
333
+ );
334
+ }
335
+
336
+ return {
337
+ children: [...tableInsert.children, ...copiedChildren],
338
+ skipped: [...new Set(allSkipped)],
339
+ warnings,
340
+ };
341
+ }
342
+
343
+ /**
344
+ * Insert blocks in batches to avoid API limits.
345
+ */
346
+ async function insertBlocksInBatches(
347
+ client: Lark.Client,
348
+ docToken: string,
349
+ blocks: any[],
350
+ parentBlockId?: string,
351
+ ): Promise<{ children: any[]; skipped: string[] }> {
352
+ const allInserted: any[] = [];
353
+ const allSkipped: string[] = [];
354
+ const blockId = parentBlockId ?? docToken;
355
+
356
+ for (let i = 0; i < blocks.length; i += MAX_BLOCKS_PER_INSERT) {
357
+ const batch = blocks.slice(i, i + MAX_BLOCKS_PER_INSERT);
358
+ const { cleaned, skipped } = cleanBlocksForInsert(batch);
359
+ allSkipped.push(...skipped);
360
+
361
+ if (cleaned.length === 0) {
362
+ continue;
363
+ }
364
+
365
+ try {
366
+ const res = await createChildrenWithRetry(client, {
367
+ path: { document_id: docToken, block_id: blockId },
368
+ data: { children: cleaned },
369
+ });
370
+
371
+ if (res.code !== 0) {
372
+ // If batch insert fails, try inserting one by one.
373
+ console.warn(`[feishu_doc] Batch insert failed: ${res.msg}. Trying individual inserts...`);
374
+ for (const block of cleaned) {
375
+ try {
376
+ const singleRes = await createChildrenWithRetry(client, {
377
+ path: { document_id: docToken, block_id: blockId },
378
+ data: { children: [block] },
379
+ });
380
+ if (singleRes.code === 0) {
381
+ allInserted.push(...(singleRes.data?.children ?? []));
382
+ } else {
383
+ console.error(`[feishu_doc] Failed to insert block: ${singleRes.msg}`);
384
+ }
385
+ } catch (err) {
386
+ console.error(`[feishu_doc] Error inserting block:`, err);
387
+ }
388
+ }
389
+ } else {
390
+ allInserted.push(...(res.data?.children ?? []));
391
+ }
392
+ } catch (err) {
393
+ console.error(`[feishu_doc] Error in batch insert:`, err);
394
+ throw err;
395
+ }
396
+ }
397
+
398
+ return { children: allInserted, skipped: [...new Set(allSkipped)] };
399
+ }
400
+
401
+ async function insertBlocksPreservingTables(
402
+ client: Lark.Client,
403
+ docToken: string,
404
+ blocks: any[],
405
+ blockMap: Map<string, any>,
406
+ parentBlockId?: string,
407
+ ): Promise<InsertResult> {
408
+ const inserted: any[] = [];
409
+ const skipped: string[] = [];
410
+ const warnings: string[] = [];
411
+ const buffer: any[] = [];
412
+
413
+ const flushBuffer = async () => {
414
+ if (buffer.length === 0) return;
415
+ const res = await insertBlocksInBatches(client, docToken, buffer, parentBlockId);
416
+ inserted.push(...res.children);
417
+ skipped.push(...res.skipped);
418
+ buffer.length = 0;
419
+ };
420
+
421
+ for (const block of blocks) {
422
+ if (block.block_type === 31) {
423
+ await flushBuffer();
424
+ const tableRes = await insertTableWithCells(client, docToken, block, blockMap, parentBlockId);
425
+ inserted.push(...tableRes.children);
426
+ skipped.push(...tableRes.skipped);
427
+ warnings.push(...tableRes.warnings);
428
+ continue;
429
+ }
430
+ buffer.push(block);
431
+ }
432
+
433
+ await flushBuffer();
434
+
435
+ return {
436
+ children: inserted,
437
+ skipped: [...new Set(skipped)],
438
+ warnings: [...new Set(warnings)],
439
+ };
440
+ }
441
+
442
+ async function convertMarkdown(client: Lark.Client, markdown: string) {
443
+ const res = await client.docx.document.convert({
444
+ data: { content_type: "markdown", content: markdown },
445
+ });
446
+ if (res.code !== 0) throw new Error(res.msg);
447
+ return {
448
+ blocks: res.data?.blocks ?? [],
449
+ firstLevelBlockIds: res.data?.first_level_block_ids ?? [],
450
+ };
451
+ }
452
+
453
+ async function clearDocumentContent(client: Lark.Client, docToken: string) {
454
+ const existing = await client.docx.documentBlock.list({
455
+ path: { document_id: docToken },
456
+ });
457
+ if (existing.code !== 0) throw new Error(existing.msg);
458
+
459
+ const childIds =
460
+ existing.data?.items
461
+ ?.filter((b) => b.parent_id === docToken && b.block_type !== 1)
462
+ .map((b) => b.block_id) ?? [];
463
+
464
+ if (childIds.length > 0) {
465
+ const res = await client.docx.documentBlockChildren.batchDelete({
466
+ path: { document_id: docToken, block_id: docToken },
467
+ data: { start_index: 0, end_index: childIds.length },
468
+ });
469
+ if (res.code !== 0) throw new Error(res.msg);
470
+ }
471
+
472
+ return childIds.length;
473
+ }
474
+
475
+ async function uploadImageToDocx(
476
+ client: Lark.Client,
477
+ blockId: string,
478
+ imageBuffer: Buffer,
479
+ fileName: string,
480
+ ): Promise<string> {
481
+ const res = await client.drive.media.uploadAll({
482
+ data: {
483
+ file_name: fileName,
484
+ parent_type: "docx_image",
485
+ parent_node: blockId,
486
+ size: imageBuffer.length,
487
+ file: Readable.from(imageBuffer) as any,
488
+ },
489
+ });
490
+
491
+ const fileToken = res?.file_token;
492
+ if (!fileToken) {
493
+ throw new Error("Image upload failed: no file_token returned");
494
+ }
495
+ return fileToken;
496
+ }
497
+
498
+ async function downloadImage(url: string, maxBytes: number): Promise<Buffer> {
499
+ const fetched = await getFeishuRuntime().channel.media.fetchRemoteMedia({ url, maxBytes });
500
+ return fetched.buffer;
501
+ }
502
+
503
+ async function processImages(
504
+ client: Lark.Client,
505
+ docToken: string,
506
+ markdown: string,
507
+ insertedBlocks: any[],
508
+ maxBytes: number,
509
+ ): Promise<number> {
510
+ const imageUrls = extractImageUrls(markdown);
511
+ if (imageUrls.length === 0) return 0;
512
+
513
+ const imageBlocks = insertedBlocks.filter((b) => b.block_type === 27);
514
+ let processed = 0;
515
+
516
+ for (let i = 0; i < Math.min(imageUrls.length, imageBlocks.length); i++) {
517
+ const url = imageUrls[i];
518
+ const blockId = imageBlocks[i].block_id;
519
+
520
+ try {
521
+ const buffer = await downloadImage(url, maxBytes);
522
+ const urlPath = new URL(url).pathname;
523
+ const fileName = urlPath.split("/").pop() || `image_${i}.png`;
524
+ const fileToken = await uploadImageToDocx(client, blockId, buffer, fileName);
525
+
526
+ await client.docx.documentBlock.patch({
527
+ path: { document_id: docToken, block_id: blockId },
528
+ data: {
529
+ replace_image: { token: fileToken },
530
+ },
531
+ });
532
+
533
+ processed++;
534
+ } catch (err) {
535
+ console.error(`Failed to process image ${url}:`, err);
536
+ }
537
+ }
538
+
539
+ return processed;
540
+ }
541
+
542
+ function ensureBlocksInserted(args: {
543
+ mode: "write" | "append";
544
+ markdown: string;
545
+ insertedCount: number;
546
+ skipped: string[];
547
+ warnings: string[];
548
+ }) {
549
+ if (args.markdown.trim().length === 0) {
550
+ return;
551
+ }
552
+ if (args.insertedCount > 0) {
553
+ return;
554
+ }
555
+
556
+ const details: string[] = [];
557
+ if (args.skipped.length > 0) details.push(`skipped=${args.skipped.join(", ")}`);
558
+ if (args.warnings.length > 0) details.push(`warnings=${args.warnings.join(" | ")}`);
559
+ const suffix = details.length > 0 ? ` (${details.join("; ")})` : "";
560
+ throw new Error(
561
+ `Document ${args.mode} produced zero inserted blocks for non-empty content${suffix}. Check markdown compatibility and granted scopes.`,
562
+ );
563
+ }
564
+
565
+ export async function createDoc(
566
+ client: Lark.Client,
567
+ title: string,
568
+ folderToken?: string,
569
+ ): Promise<CreateDocResult> {
570
+ const res = await client.docx.document.create({
571
+ data: { title, folder_token: folderToken },
572
+ });
573
+ if (res.code !== 0) throw new Error(res.msg);
574
+ const doc = res.data?.document;
575
+ return {
576
+ document_id: doc?.document_id,
577
+ title: doc?.title,
578
+ url: `https://feishu.cn/docx/${doc?.document_id}`,
579
+ };
580
+ }
581
+
582
+ export async function writeDoc(
583
+ client: Lark.Client,
584
+ docToken: string,
585
+ markdown: string,
586
+ maxBytes: number,
587
+ ): Promise<WriteDocResult> {
588
+ const deleted = await clearDocumentContent(client, docToken);
589
+
590
+ if (markdown.length > MAX_CONTENT_LENGTH) {
591
+ console.warn(
592
+ `[feishu_doc] Content length (${markdown.length}) exceeds recommended limit (${MAX_CONTENT_LENGTH}). May cause API errors.`,
593
+ );
594
+ }
595
+
596
+ const { blocks, firstLevelBlockIds } = await convertMarkdown(client, markdown);
597
+ if (blocks.length === 0) {
598
+ if (markdown.trim().length > 0) {
599
+ throw new Error("Markdown conversion returned no blocks for non-empty content.");
600
+ }
601
+ return { success: true, blocks_deleted: deleted, blocks_added: 0, images_processed: 0 };
602
+ }
603
+
604
+ const orderedBlocks = reorderBlocks(blocks, firstLevelBlockIds);
605
+ const blockMap = buildBlockMap(blocks);
606
+ const { children: inserted, skipped, warnings } = await insertBlocksPreservingTables(
607
+ client,
608
+ docToken,
609
+ orderedBlocks,
610
+ blockMap,
611
+ );
612
+ const imagesProcessed = await processImages(client, docToken, markdown, inserted, maxBytes);
613
+ ensureBlocksInserted({
614
+ mode: "write",
615
+ markdown,
616
+ insertedCount: inserted.length,
617
+ skipped,
618
+ warnings,
619
+ });
620
+
621
+ const warningParts: string[] = [];
622
+ if (skipped.length > 0) {
623
+ warningParts.push(`Skipped unsupported block types: ${skipped.join(", ")}.`);
624
+ }
625
+ if (warnings.length > 0) {
626
+ warningParts.push(...warnings);
627
+ }
628
+
629
+ return {
630
+ success: true,
631
+ blocks_deleted: deleted,
632
+ blocks_added: inserted.length,
633
+ images_processed: imagesProcessed,
634
+ ...(warningParts.length > 0 && {
635
+ warning: warningParts.join(" "),
636
+ }),
637
+ };
638
+ }
639
+
640
+ export async function appendDoc(
641
+ client: Lark.Client,
642
+ docToken: string,
643
+ markdown: string,
644
+ maxBytes: number,
645
+ ): Promise<AppendDocResult> {
646
+ const { blocks, firstLevelBlockIds } = await convertMarkdown(client, markdown);
647
+ if (blocks.length === 0) {
648
+ throw new Error("Content is empty");
649
+ }
650
+
651
+ const orderedBlocks = reorderBlocks(blocks, firstLevelBlockIds);
652
+ const blockMap = buildBlockMap(blocks);
653
+ const { children: inserted, skipped, warnings } = await insertBlocksPreservingTables(
654
+ client,
655
+ docToken,
656
+ orderedBlocks,
657
+ blockMap,
658
+ );
659
+ const imagesProcessed = await processImages(client, docToken, markdown, inserted, maxBytes);
660
+ ensureBlocksInserted({
661
+ mode: "append",
662
+ markdown,
663
+ insertedCount: inserted.length,
664
+ skipped,
665
+ warnings,
666
+ });
667
+
668
+ const warningParts: string[] = [];
669
+ if (skipped.length > 0) {
670
+ warningParts.push(`Skipped unsupported block types: ${skipped.join(", ")}.`);
671
+ }
672
+ if (warnings.length > 0) {
673
+ warningParts.push(...warnings);
674
+ }
675
+
676
+ return {
677
+ success: true,
678
+ blocks_added: inserted.length,
679
+ images_processed: imagesProcessed,
680
+ block_ids: inserted.map((b: any) => b.block_id),
681
+ ...(warningParts.length > 0 && {
682
+ warning: warningParts.join(" "),
683
+ }),
684
+ };
685
+ }
686
+
687
+ export async function createAndWriteDoc(
688
+ client: Lark.Client,
689
+ title: string,
690
+ markdown: string,
691
+ maxBytes: number,
692
+ folderToken?: string,
693
+ ): Promise<CreateAndWriteDocResult> {
694
+ const created = await createDoc(client, title, folderToken);
695
+ const docId = created.document_id;
696
+ if (!docId) {
697
+ throw new Error("Document created but no document_id returned");
698
+ }
699
+
700
+ const writeResult = await writeDoc(client, docId, markdown, maxBytes);
701
+ return {
702
+ success: true,
703
+ document_id: docId,
704
+ title: created.title ?? title,
705
+ url: created.url,
706
+ import_method: "create_and_write",
707
+ blocks_added: writeResult.blocks_added,
708
+ images_processed: writeResult.images_processed,
709
+ ...(writeResult.warning && { warning: writeResult.warning }),
710
+ };
711
+ }