@nextclaw/channel-plugin-feishu 0.2.29-beta.0 → 0.2.29-beta.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (114) hide show
  1. package/dist/index.d.ts +23 -0
  2. package/dist/index.js +45 -0
  3. package/dist/src/accounts.js +141 -0
  4. package/dist/src/app-scope-checker.js +36 -0
  5. package/dist/src/async.js +34 -0
  6. package/dist/src/auth-errors.js +72 -0
  7. package/dist/src/bitable.js +495 -0
  8. package/dist/src/bot.d.ts +35 -0
  9. package/dist/src/bot.js +941 -0
  10. package/dist/src/calendar-calendar.js +54 -0
  11. package/dist/src/calendar-event-attendee.js +98 -0
  12. package/dist/src/calendar-event.js +193 -0
  13. package/dist/src/calendar-freebusy.js +40 -0
  14. package/dist/src/calendar-shared.js +23 -0
  15. package/dist/src/calendar.js +16 -0
  16. package/dist/src/card-action.js +49 -0
  17. package/dist/src/channel.d.ts +7 -0
  18. package/dist/src/channel.js +413 -0
  19. package/dist/src/chat-schema.js +25 -0
  20. package/dist/src/chat.js +87 -0
  21. package/dist/src/client.d.ts +16 -0
  22. package/dist/src/client.js +112 -0
  23. package/dist/src/config-schema.d.ts +357 -0
  24. package/dist/src/dedup.js +126 -0
  25. package/dist/src/device-flow.js +109 -0
  26. package/dist/src/directory.js +101 -0
  27. package/dist/src/doc-schema.js +148 -0
  28. package/dist/src/docx-batch-insert.js +104 -0
  29. package/dist/src/docx-color-text.js +80 -0
  30. package/dist/src/docx-table-ops.js +197 -0
  31. package/dist/src/docx.js +858 -0
  32. package/dist/src/domains.js +14 -0
  33. package/dist/src/drive-schema.js +41 -0
  34. package/dist/src/drive.js +126 -0
  35. package/dist/src/dynamic-agent.js +93 -0
  36. package/dist/src/external-keys.js +13 -0
  37. package/dist/src/feishu-fetch.js +12 -0
  38. package/dist/src/identity.js +92 -0
  39. package/dist/src/lark-ticket.js +11 -0
  40. package/dist/src/media.d.ts +75 -0
  41. package/dist/src/media.js +304 -0
  42. package/dist/src/mention.d.ts +52 -0
  43. package/dist/src/mention.js +82 -0
  44. package/dist/src/monitor.account.d.ts +1 -0
  45. package/dist/src/monitor.account.js +393 -0
  46. package/dist/src/monitor.d.ts +11 -0
  47. package/dist/src/monitor.js +58 -0
  48. package/dist/src/monitor.startup.js +24 -0
  49. package/dist/src/monitor.state.d.ts +1 -0
  50. package/dist/src/monitor.state.js +80 -0
  51. package/dist/src/monitor.transport.js +167 -0
  52. package/dist/src/nextclaw-sdk/account-id.js +15 -0
  53. package/dist/src/nextclaw-sdk/core-channel.js +150 -0
  54. package/dist/src/nextclaw-sdk/core-pairing.js +151 -0
  55. package/dist/src/nextclaw-sdk/dedupe.js +164 -0
  56. package/dist/src/nextclaw-sdk/feishu.d.ts +1 -0
  57. package/dist/src/nextclaw-sdk/feishu.js +14 -0
  58. package/dist/src/nextclaw-sdk/history.js +69 -0
  59. package/dist/src/nextclaw-sdk/network-body.js +180 -0
  60. package/dist/src/nextclaw-sdk/network-fetch.js +63 -0
  61. package/dist/src/nextclaw-sdk/network-webhook.js +126 -0
  62. package/dist/src/nextclaw-sdk/network.js +4 -0
  63. package/dist/src/nextclaw-sdk/runtime-store.js +21 -0
  64. package/dist/src/nextclaw-sdk/secrets-config.js +65 -0
  65. package/dist/src/nextclaw-sdk/secrets-core.d.ts +1 -0
  66. package/dist/src/nextclaw-sdk/secrets-core.js +68 -0
  67. package/dist/src/nextclaw-sdk/secrets-prompt.js +193 -0
  68. package/dist/src/nextclaw-sdk/secrets.d.ts +1 -0
  69. package/dist/src/nextclaw-sdk/secrets.js +4 -0
  70. package/dist/src/nextclaw-sdk/types.d.ts +242 -0
  71. package/dist/src/oauth.js +171 -0
  72. package/dist/src/onboarding.js +381 -0
  73. package/dist/src/outbound.js +150 -0
  74. package/dist/src/perm-schema.js +49 -0
  75. package/dist/src/perm.js +90 -0
  76. package/dist/src/policy.js +61 -0
  77. package/dist/src/post.js +160 -0
  78. package/dist/src/probe.d.ts +11 -0
  79. package/dist/src/probe.js +85 -0
  80. package/dist/src/raw-request.js +24 -0
  81. package/dist/src/reactions.d.ts +67 -0
  82. package/dist/src/reactions.js +91 -0
  83. package/dist/src/reply-dispatcher.js +250 -0
  84. package/dist/src/runtime.js +5 -0
  85. package/dist/src/secret-input.js +3 -0
  86. package/dist/src/send-result.js +12 -0
  87. package/dist/src/send-target.js +22 -0
  88. package/dist/src/send.d.ts +51 -0
  89. package/dist/src/send.js +265 -0
  90. package/dist/src/sheets-shared.js +193 -0
  91. package/dist/src/sheets.js +95 -0
  92. package/dist/src/streaming-card.js +263 -0
  93. package/dist/src/targets.js +39 -0
  94. package/dist/src/task-comment.js +76 -0
  95. package/dist/src/task-shared.js +13 -0
  96. package/dist/src/task-subtask.js +79 -0
  97. package/dist/src/task-task.js +144 -0
  98. package/dist/src/task-tasklist.js +136 -0
  99. package/dist/src/task.js +16 -0
  100. package/dist/src/token-store.js +154 -0
  101. package/dist/src/tool-account.js +65 -0
  102. package/dist/src/tool-result.js +18 -0
  103. package/dist/src/tool-scopes.js +62 -0
  104. package/dist/src/tools-config.js +30 -0
  105. package/dist/src/types.d.ts +43 -0
  106. package/dist/src/typing.js +145 -0
  107. package/dist/src/uat-client.js +102 -0
  108. package/dist/src/user-tool-client.js +132 -0
  109. package/dist/src/user-tool-helpers.js +110 -0
  110. package/dist/src/user-tool-result.js +10 -0
  111. package/dist/src/wiki-schema.js +45 -0
  112. package/dist/src/wiki.js +144 -0
  113. package/package.json +8 -4
  114. package/index.ts +0 -75
@@ -0,0 +1,148 @@
1
+ import { Type } from "@sinclair/typebox";
2
+ //#region src/doc-schema.ts
3
+ const tableCreationProperties = {
4
+ doc_token: Type.String({ description: "Document token" }),
5
+ parent_block_id: Type.Optional(Type.String({ description: "Parent block ID (default: document root)" })),
6
+ row_size: Type.Integer({
7
+ description: "Table row count",
8
+ minimum: 1
9
+ }),
10
+ column_size: Type.Integer({
11
+ description: "Table column count",
12
+ minimum: 1
13
+ }),
14
+ column_width: Type.Optional(Type.Array(Type.Number({ minimum: 1 }), { description: "Column widths in px (length should match column_size)" }))
15
+ };
16
+ const FeishuDocSchema = Type.Union([
17
+ Type.Object({
18
+ action: Type.Literal("read"),
19
+ doc_token: Type.String({ description: "Document token (extract from URL /docx/XXX)" })
20
+ }),
21
+ Type.Object({
22
+ action: Type.Literal("write"),
23
+ doc_token: Type.String({ description: "Document token" }),
24
+ content: Type.String({ description: "Markdown content to write (replaces entire document content)" })
25
+ }),
26
+ Type.Object({
27
+ action: Type.Literal("append"),
28
+ doc_token: Type.String({ description: "Document token" }),
29
+ content: Type.String({ description: "Markdown content to append to end of document" })
30
+ }),
31
+ Type.Object({
32
+ action: Type.Literal("insert"),
33
+ doc_token: Type.String({ description: "Document token" }),
34
+ content: Type.String({ description: "Markdown content to insert" }),
35
+ after_block_id: Type.String({ description: "Insert content after this block ID. Use list_blocks to find block IDs." })
36
+ }),
37
+ Type.Object({
38
+ action: Type.Literal("create"),
39
+ title: Type.String({ description: "Document title" }),
40
+ folder_token: Type.Optional(Type.String({ description: "Target folder token (optional)" })),
41
+ grant_to_requester: Type.Optional(Type.Boolean({ description: "Grant edit permission to the trusted requesting Feishu user from runtime context (default: true)." }))
42
+ }),
43
+ Type.Object({
44
+ action: Type.Literal("list_blocks"),
45
+ doc_token: Type.String({ description: "Document token" })
46
+ }),
47
+ Type.Object({
48
+ action: Type.Literal("get_block"),
49
+ doc_token: Type.String({ description: "Document token" }),
50
+ block_id: Type.String({ description: "Block ID (from list_blocks)" })
51
+ }),
52
+ Type.Object({
53
+ action: Type.Literal("update_block"),
54
+ doc_token: Type.String({ description: "Document token" }),
55
+ block_id: Type.String({ description: "Block ID (from list_blocks)" }),
56
+ content: Type.String({ description: "New text content" })
57
+ }),
58
+ Type.Object({
59
+ action: Type.Literal("delete_block"),
60
+ doc_token: Type.String({ description: "Document token" }),
61
+ block_id: Type.String({ description: "Block ID" })
62
+ }),
63
+ Type.Object({
64
+ action: Type.Literal("create_table"),
65
+ ...tableCreationProperties
66
+ }),
67
+ Type.Object({
68
+ action: Type.Literal("write_table_cells"),
69
+ doc_token: Type.String({ description: "Document token" }),
70
+ table_block_id: Type.String({ description: "Table block ID" }),
71
+ values: Type.Array(Type.Array(Type.String()), {
72
+ description: "2D matrix values[row][col] to write into table cells",
73
+ minItems: 1
74
+ })
75
+ }),
76
+ Type.Object({
77
+ action: Type.Literal("create_table_with_values"),
78
+ ...tableCreationProperties,
79
+ values: Type.Array(Type.Array(Type.String()), {
80
+ description: "2D matrix values[row][col] to write into table cells",
81
+ minItems: 1
82
+ })
83
+ }),
84
+ Type.Object({
85
+ action: Type.Literal("insert_table_row"),
86
+ doc_token: Type.String({ description: "Document token" }),
87
+ block_id: Type.String({ description: "Table block ID" }),
88
+ row_index: Type.Optional(Type.Number({ description: "Row index to insert at (-1 for end, default: -1)" }))
89
+ }),
90
+ Type.Object({
91
+ action: Type.Literal("insert_table_column"),
92
+ doc_token: Type.String({ description: "Document token" }),
93
+ block_id: Type.String({ description: "Table block ID" }),
94
+ column_index: Type.Optional(Type.Number({ description: "Column index to insert at (-1 for end, default: -1)" }))
95
+ }),
96
+ Type.Object({
97
+ action: Type.Literal("delete_table_rows"),
98
+ doc_token: Type.String({ description: "Document token" }),
99
+ block_id: Type.String({ description: "Table block ID" }),
100
+ row_start: Type.Number({ description: "Start row index (0-based)" }),
101
+ row_count: Type.Optional(Type.Number({ description: "Number of rows to delete (default: 1)" }))
102
+ }),
103
+ Type.Object({
104
+ action: Type.Literal("delete_table_columns"),
105
+ doc_token: Type.String({ description: "Document token" }),
106
+ block_id: Type.String({ description: "Table block ID" }),
107
+ column_start: Type.Number({ description: "Start column index (0-based)" }),
108
+ column_count: Type.Optional(Type.Number({ description: "Number of columns to delete (default: 1)" }))
109
+ }),
110
+ Type.Object({
111
+ action: Type.Literal("merge_table_cells"),
112
+ doc_token: Type.String({ description: "Document token" }),
113
+ block_id: Type.String({ description: "Table block ID" }),
114
+ row_start: Type.Number({ description: "Start row index" }),
115
+ row_end: Type.Number({ description: "End row index (exclusive)" }),
116
+ column_start: Type.Number({ description: "Start column index" }),
117
+ column_end: Type.Number({ description: "End column index (exclusive)" })
118
+ }),
119
+ Type.Object({
120
+ action: Type.Literal("upload_image"),
121
+ doc_token: Type.String({ description: "Document token" }),
122
+ url: Type.Optional(Type.String({ description: "Remote image URL (http/https)" })),
123
+ file_path: Type.Optional(Type.String({ description: "Local image file path" })),
124
+ image: Type.Optional(Type.String({ description: "Image as data URI (data:image/png;base64,...) or plain base64 string. Use instead of url/file_path for DALL-E outputs, canvas screenshots, etc." })),
125
+ parent_block_id: Type.Optional(Type.String({ description: "Parent block ID (default: document root)" })),
126
+ filename: Type.Optional(Type.String({ description: "Optional filename override" })),
127
+ index: Type.Optional(Type.Integer({
128
+ minimum: 0,
129
+ description: "Insert position (0-based index among siblings). Omit to append."
130
+ }))
131
+ }),
132
+ Type.Object({
133
+ action: Type.Literal("upload_file"),
134
+ doc_token: Type.String({ description: "Document token" }),
135
+ url: Type.Optional(Type.String({ description: "Remote file URL (http/https)" })),
136
+ file_path: Type.Optional(Type.String({ description: "Local file path" })),
137
+ parent_block_id: Type.Optional(Type.String({ description: "Parent block ID (default: document root)" })),
138
+ filename: Type.Optional(Type.String({ description: "Optional filename override" }))
139
+ }),
140
+ Type.Object({
141
+ action: Type.Literal("color_text"),
142
+ doc_token: Type.String({ description: "Document token" }),
143
+ block_id: Type.String({ description: "Text block ID to update" }),
144
+ content: Type.String({ description: "Text with color markup. Tags: [red], [green], [blue], [orange], [yellow], [purple], [grey], [bold], [bg:yellow]. Example: \"Revenue [green]+15%[/green] YoY\"" })
145
+ })
146
+ ]);
147
+ //#endregion
148
+ export { FeishuDocSchema };
@@ -0,0 +1,104 @@
1
+ import { cleanBlocksForDescendant } from "./docx-table-ops.js";
2
+ //#region src/docx-batch-insert.ts
3
+ const BATCH_SIZE = 1e3;
4
+ /**
5
+ * Collect all descendant blocks for a given first-level block ID.
6
+ * Recursively traverses the block tree to gather all children.
7
+ */
8
+ function collectDescendants(blockMap, rootId) {
9
+ const result = [];
10
+ const visited = /* @__PURE__ */ new Set();
11
+ function collect(blockId) {
12
+ if (visited.has(blockId)) return;
13
+ visited.add(blockId);
14
+ const block = blockMap.get(blockId);
15
+ if (!block) return;
16
+ result.push(block);
17
+ const children = block.children;
18
+ if (Array.isArray(children)) for (const childId of children) collect(childId);
19
+ else if (typeof children === "string") collect(children);
20
+ }
21
+ collect(rootId);
22
+ return result;
23
+ }
24
+ /**
25
+ * Insert a single batch of blocks using Descendant API.
26
+ *
27
+ * @param parentBlockId - Parent block to insert into (defaults to docToken)
28
+ * @param index - Position within parent's children (-1 = end)
29
+ */
30
+ async function insertBatch(client, docToken, blocks, firstLevelBlockIds, parentBlockId = docToken, index = -1) {
31
+ const descendants = cleanBlocksForDescendant(blocks);
32
+ if (descendants.length === 0) return [];
33
+ const res = await client.docx.documentBlockDescendant.create({
34
+ path: {
35
+ document_id: docToken,
36
+ block_id: parentBlockId
37
+ },
38
+ data: {
39
+ children_id: firstLevelBlockIds,
40
+ descendants,
41
+ index
42
+ }
43
+ });
44
+ if (res.code !== 0) throw new Error(`${res.msg} (code: ${res.code})`);
45
+ return res.data?.children ?? [];
46
+ }
47
+ /**
48
+ * Insert blocks in batches for large documents (>1000 blocks).
49
+ *
50
+ * Batches are split to ensure BOTH children_id AND descendants
51
+ * arrays stay under the 1000 block API limit.
52
+ *
53
+ * @param client - Feishu API client
54
+ * @param docToken - Document ID
55
+ * @param blocks - All blocks from Convert API
56
+ * @param firstLevelBlockIds - IDs of top-level blocks to insert
57
+ * @param logger - Optional logger for progress updates
58
+ * @param parentBlockId - Parent block to insert into (defaults to docToken = document root)
59
+ * @param startIndex - Starting position within parent (-1 = end). For multi-batch inserts,
60
+ * each batch advances this by the number of first-level IDs inserted so far.
61
+ * @returns Inserted children blocks and any skipped block IDs
62
+ */
63
+ async function insertBlocksInBatches(client, docToken, blocks, firstLevelBlockIds, logger, parentBlockId = docToken, startIndex = -1) {
64
+ const allChildren = [];
65
+ const batches = [];
66
+ let currentBatch = {
67
+ firstLevelIds: [],
68
+ blocks: []
69
+ };
70
+ const usedBlockIds = /* @__PURE__ */ new Set();
71
+ const blockMap = /* @__PURE__ */ new Map();
72
+ for (const block of blocks) blockMap.set(block.block_id, block);
73
+ for (const firstLevelId of firstLevelBlockIds) {
74
+ const newBlocks = collectDescendants(blockMap, firstLevelId).filter((b) => !usedBlockIds.has(b.block_id));
75
+ if (newBlocks.length > 1e3) throw new Error(`Block "${firstLevelId}" has ${newBlocks.length} descendants, which exceeds the Feishu API limit of ${BATCH_SIZE} blocks per request. Please split the content into smaller sections.`);
76
+ if (currentBatch.blocks.length + newBlocks.length > 1e3 && currentBatch.blocks.length > 0) {
77
+ batches.push(currentBatch);
78
+ currentBatch = {
79
+ firstLevelIds: [],
80
+ blocks: []
81
+ };
82
+ }
83
+ currentBatch.firstLevelIds.push(firstLevelId);
84
+ for (const block of newBlocks) {
85
+ currentBatch.blocks.push(block);
86
+ usedBlockIds.add(block.block_id);
87
+ }
88
+ }
89
+ if (currentBatch.blocks.length > 0) batches.push(currentBatch);
90
+ let currentIndex = startIndex;
91
+ for (let i = 0; i < batches.length; i++) {
92
+ const batch = batches[i];
93
+ logger?.info?.(`feishu_doc: Inserting batch ${i + 1}/${batches.length} (${batch.blocks.length} blocks)...`);
94
+ const children = await insertBatch(client, docToken, batch.blocks, batch.firstLevelIds, parentBlockId, currentIndex);
95
+ allChildren.push(...children);
96
+ if (currentIndex !== -1) currentIndex += batch.firstLevelIds.length;
97
+ }
98
+ return {
99
+ children: allChildren,
100
+ skipped: []
101
+ };
102
+ }
103
+ //#endregion
104
+ export { BATCH_SIZE, insertBlocksInBatches };
@@ -0,0 +1,80 @@
1
+ //#region src/docx-color-text.ts
2
+ const TEXT_COLOR = {
3
+ red: 1,
4
+ orange: 2,
5
+ yellow: 3,
6
+ green: 4,
7
+ blue: 5,
8
+ purple: 6,
9
+ grey: 7,
10
+ gray: 7
11
+ };
12
+ const BACKGROUND_COLOR = {
13
+ red: 1,
14
+ orange: 2,
15
+ yellow: 3,
16
+ green: 4,
17
+ blue: 5,
18
+ purple: 6,
19
+ grey: 7,
20
+ gray: 7
21
+ };
22
+ /**
23
+ * Parse color markup into segments.
24
+ *
25
+ * Supports:
26
+ * [red]text[/red] → red text
27
+ * [bg:yellow]text[/bg] → yellow background
28
+ * [bold]text[/bold] → bold
29
+ * [green bold]text[/green] → green + bold
30
+ */
31
+ function parseColorMarkup(content) {
32
+ const segments = [];
33
+ const KNOWN = "(?:bg:[a-z]+|bold|red|orange|yellow|green|blue|purple|gr[ae]y)";
34
+ const tagPattern = new RegExp(`\\[(${KNOWN}(?:\\s+${KNOWN})*)\\](.*?)\\[\\/(?:[^\\]]+)\\]|([^[]+|\\[)`, "gis");
35
+ let match;
36
+ while ((match = tagPattern.exec(content)) !== null) if (match[3] !== void 0) {
37
+ if (match[3]) segments.push({ text: match[3] });
38
+ } else {
39
+ const tagStr = match[1].toLowerCase().trim();
40
+ const text = match[2];
41
+ const tags = tagStr.split(/\s+/);
42
+ const segment = { text };
43
+ for (const tag of tags) if (tag.startsWith("bg:")) {
44
+ const color = tag.slice(3);
45
+ if (BACKGROUND_COLOR[color]) segment.bgColor = BACKGROUND_COLOR[color];
46
+ } else if (tag === "bold") segment.bold = true;
47
+ else if (TEXT_COLOR[tag]) segment.textColor = TEXT_COLOR[tag];
48
+ if (text) segments.push(segment);
49
+ }
50
+ return segments;
51
+ }
52
+ /**
53
+ * Update a text block with colored segments.
54
+ */
55
+ async function updateColorText(client, docToken, blockId, content) {
56
+ const segments = parseColorMarkup(content);
57
+ const elements = segments.map((seg) => ({ text_run: {
58
+ content: seg.text,
59
+ text_element_style: {
60
+ ...seg.textColor && { text_color: seg.textColor },
61
+ ...seg.bgColor && { background_color: seg.bgColor },
62
+ ...seg.bold && { bold: true }
63
+ }
64
+ } }));
65
+ const res = await client.docx.documentBlock.patch({
66
+ path: {
67
+ document_id: docToken,
68
+ block_id: blockId
69
+ },
70
+ data: { update_text_elements: { elements } }
71
+ });
72
+ if (res.code !== 0) throw new Error(res.msg);
73
+ return {
74
+ success: true,
75
+ segments: segments.length,
76
+ block: res.data?.block
77
+ };
78
+ }
79
+ //#endregion
80
+ export { updateColorText };
@@ -0,0 +1,197 @@
1
+ //#region src/docx-table-ops.ts
2
+ const MIN_COLUMN_WIDTH = 50;
3
+ const MAX_COLUMN_WIDTH = 400;
4
+ const DEFAULT_TABLE_WIDTH = 730;
5
+ /**
6
+ * Calculate adaptive column widths based on cell content length.
7
+ *
8
+ * Algorithm:
9
+ * 1. For each column, find the max content length across all rows
10
+ * 2. Weight CJK characters as 2x width (they render wider)
11
+ * 3. Calculate proportional widths based on content length
12
+ * 4. Apply min/max constraints
13
+ * 5. Redistribute remaining space to fill total table width
14
+ *
15
+ * Total width is derived from the original column_width values returned
16
+ * by the Convert API, ensuring tables match Feishu's expected dimensions.
17
+ *
18
+ * @param blocks - Array of blocks from Convert API
19
+ * @param tableBlockId - The block_id of the table block
20
+ * @returns Array of column widths in pixels
21
+ */
22
+ function calculateAdaptiveColumnWidths(blocks, tableBlockId) {
23
+ const tableBlock = blocks.find((b) => b.block_id === tableBlockId && b.block_type === 31);
24
+ if (!tableBlock?.table?.property) return [];
25
+ const { row_size, column_size, column_width: originalWidths } = tableBlock.table.property;
26
+ const totalWidth = originalWidths && originalWidths.length > 0 ? originalWidths.reduce((a, b) => a + b, 0) : DEFAULT_TABLE_WIDTH;
27
+ const cellIds = tableBlock.children || [];
28
+ const blockMap = /* @__PURE__ */ new Map();
29
+ for (const block of blocks) blockMap.set(block.block_id, block);
30
+ function getCellText(cellId) {
31
+ const cell = blockMap.get(cellId);
32
+ if (!cell?.children) return "";
33
+ let text = "";
34
+ const childIds = Array.isArray(cell.children) ? cell.children : [cell.children];
35
+ for (const childId of childIds) {
36
+ const child = blockMap.get(childId);
37
+ if (child?.text?.elements) {
38
+ for (const elem of child.text.elements) if (elem.text_run?.content) text += elem.text_run.content;
39
+ }
40
+ }
41
+ return text;
42
+ }
43
+ function getWeightedLength(text) {
44
+ return [...text].reduce((sum, char) => {
45
+ return sum + (char.charCodeAt(0) > 255 ? 2 : 1);
46
+ }, 0);
47
+ }
48
+ const maxLengths = new Array(column_size).fill(0);
49
+ for (let row = 0; row < row_size; row++) for (let col = 0; col < column_size; col++) {
50
+ const cellId = cellIds[row * column_size + col];
51
+ if (cellId) {
52
+ const length = getWeightedLength(getCellText(cellId));
53
+ maxLengths[col] = Math.max(maxLengths[col], length);
54
+ }
55
+ }
56
+ const totalLength = maxLengths.reduce((a, b) => a + b, 0);
57
+ if (totalLength === 0) {
58
+ const equalWidth = Math.max(MIN_COLUMN_WIDTH, Math.min(MAX_COLUMN_WIDTH, Math.floor(totalWidth / column_size)));
59
+ return new Array(column_size).fill(equalWidth);
60
+ }
61
+ let widths = maxLengths.map((len) => {
62
+ const proportion = len / totalLength;
63
+ return Math.round(proportion * totalWidth);
64
+ });
65
+ widths = widths.map((w) => Math.max(MIN_COLUMN_WIDTH, Math.min(MAX_COLUMN_WIDTH, w)));
66
+ let remaining = totalWidth - widths.reduce((a, b) => a + b, 0);
67
+ while (remaining > 0) {
68
+ const growable = widths.map((w, i) => w < MAX_COLUMN_WIDTH ? i : -1).filter((i) => i >= 0);
69
+ if (growable.length === 0) break;
70
+ const perColumn = Math.floor(remaining / growable.length);
71
+ if (perColumn === 0) break;
72
+ for (const i of growable) {
73
+ const add = Math.min(perColumn, MAX_COLUMN_WIDTH - widths[i]);
74
+ widths[i] += add;
75
+ remaining -= add;
76
+ }
77
+ }
78
+ return widths;
79
+ }
80
+ /**
81
+ * Clean blocks for Descendant API with adaptive column widths.
82
+ *
83
+ * - Removes parent_id from all blocks
84
+ * - Fixes children type (string → array) for TableCell blocks
85
+ * - Removes merge_info (read-only, causes API error)
86
+ * - Calculates and applies adaptive column_width for tables
87
+ *
88
+ * @param blocks - Array of blocks from Convert API
89
+ * @returns Cleaned blocks ready for Descendant API
90
+ */
91
+ function cleanBlocksForDescendant(blocks) {
92
+ const tableWidths = /* @__PURE__ */ new Map();
93
+ for (const block of blocks) if (block.block_type === 31) {
94
+ const widths = calculateAdaptiveColumnWidths(blocks, block.block_id);
95
+ tableWidths.set(block.block_id, widths);
96
+ }
97
+ return blocks.map((block) => {
98
+ const { parent_id: _parentId, ...cleanBlock } = block;
99
+ if (cleanBlock.block_type === 32 && typeof cleanBlock.children === "string") cleanBlock.children = [cleanBlock.children];
100
+ if (cleanBlock.block_type === 31 && cleanBlock.table) {
101
+ const { cells: _cells, ...tableWithoutCells } = cleanBlock.table;
102
+ const { row_size, column_size } = tableWithoutCells.property || {};
103
+ const adaptiveWidths = tableWidths.get(block.block_id);
104
+ cleanBlock.table = { property: {
105
+ row_size,
106
+ column_size,
107
+ ...adaptiveWidths?.length && { column_width: adaptiveWidths }
108
+ } };
109
+ }
110
+ return cleanBlock;
111
+ });
112
+ }
113
+ async function insertTableRow(client, docToken, blockId, rowIndex = -1) {
114
+ const res = await client.docx.documentBlock.patch({
115
+ path: {
116
+ document_id: docToken,
117
+ block_id: blockId
118
+ },
119
+ data: { insert_table_row: { row_index: rowIndex } }
120
+ });
121
+ if (res.code !== 0) throw new Error(res.msg);
122
+ return {
123
+ success: true,
124
+ block: res.data?.block
125
+ };
126
+ }
127
+ async function insertTableColumn(client, docToken, blockId, columnIndex = -1) {
128
+ const res = await client.docx.documentBlock.patch({
129
+ path: {
130
+ document_id: docToken,
131
+ block_id: blockId
132
+ },
133
+ data: { insert_table_column: { column_index: columnIndex } }
134
+ });
135
+ if (res.code !== 0) throw new Error(res.msg);
136
+ return {
137
+ success: true,
138
+ block: res.data?.block
139
+ };
140
+ }
141
+ async function deleteTableRows(client, docToken, blockId, rowStart, rowCount = 1) {
142
+ const res = await client.docx.documentBlock.patch({
143
+ path: {
144
+ document_id: docToken,
145
+ block_id: blockId
146
+ },
147
+ data: { delete_table_rows: {
148
+ row_start_index: rowStart,
149
+ row_end_index: rowStart + rowCount
150
+ } }
151
+ });
152
+ if (res.code !== 0) throw new Error(res.msg);
153
+ return {
154
+ success: true,
155
+ rows_deleted: rowCount,
156
+ block: res.data?.block
157
+ };
158
+ }
159
+ async function deleteTableColumns(client, docToken, blockId, columnStart, columnCount = 1) {
160
+ const res = await client.docx.documentBlock.patch({
161
+ path: {
162
+ document_id: docToken,
163
+ block_id: blockId
164
+ },
165
+ data: { delete_table_columns: {
166
+ column_start_index: columnStart,
167
+ column_end_index: columnStart + columnCount
168
+ } }
169
+ });
170
+ if (res.code !== 0) throw new Error(res.msg);
171
+ return {
172
+ success: true,
173
+ columns_deleted: columnCount,
174
+ block: res.data?.block
175
+ };
176
+ }
177
+ async function mergeTableCells(client, docToken, blockId, rowStart, rowEnd, columnStart, columnEnd) {
178
+ const res = await client.docx.documentBlock.patch({
179
+ path: {
180
+ document_id: docToken,
181
+ block_id: blockId
182
+ },
183
+ data: { merge_table_cells: {
184
+ row_start_index: rowStart,
185
+ row_end_index: rowEnd,
186
+ column_start_index: columnStart,
187
+ column_end_index: columnEnd
188
+ } }
189
+ });
190
+ if (res.code !== 0) throw new Error(res.msg);
191
+ return {
192
+ success: true,
193
+ block: res.data?.block
194
+ };
195
+ }
196
+ //#endregion
197
+ export { cleanBlocksForDescendant, deleteTableColumns, deleteTableRows, insertTableColumn, insertTableRow, mergeTableCells };