@openclaw/feishu 2026.5.2 → 2026.5.3-beta.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/accounts-Ba3-WP1z.js +423 -0
- package/dist/api.js +2280 -0
- package/dist/app-registration-B8qc1MCM.js +184 -0
- package/dist/audio-preflight.runtime-BPlzkO3l.js +7 -0
- package/dist/card-interaction-BfRLgvw_.js +96 -0
- package/dist/channel-CSD_Jt8I.js +1668 -0
- package/dist/channel-entry.js +22 -0
- package/dist/channel-plugin-api.js +2 -0
- package/dist/channel.runtime-DYsXcD36.js +700 -0
- package/dist/client-DBVoQL5w.js +157 -0
- package/dist/contract-api.js +9 -0
- package/dist/conversation-id-DWS3Ep2A.js +139 -0
- package/dist/directory.static-f3EeoRJd.js +44 -0
- package/dist/drive-C5eJLJr7.js +883 -0
- package/dist/index.js +68 -0
- package/dist/monitor-CT189QfR.js +60 -0
- package/dist/monitor.account-dJV2jO8C.js +4990 -0
- package/dist/monitor.state-DYM02ipp.js +100 -0
- package/dist/policy-D6c-wMPl.js +118 -0
- package/dist/probe-BNzzU_uR.js +149 -0
- package/dist/rolldown-runtime-DUslC3ob.js +14 -0
- package/dist/runtime-CG0DuRCy.js +8 -0
- package/dist/runtime-api.js +14 -0
- package/dist/secret-contract-Dm4Z_zQN.js +119 -0
- package/dist/secret-contract-api.js +2 -0
- package/dist/security-audit-DqJdocrN.js +11 -0
- package/dist/security-audit-shared-ByuMx9cJ.js +38 -0
- package/dist/security-contract-api.js +2 -0
- package/dist/send-DowxxbpH.js +1218 -0
- package/dist/session-conversation-B4nrW-vo.js +27 -0
- package/dist/session-key-api.js +2 -0
- package/dist/setup-api.js +2 -0
- package/dist/setup-entry.js +15 -0
- package/dist/subagent-hooks-C3UhPVLV.js +227 -0
- package/dist/subagent-hooks-api.js +23 -0
- package/dist/targets-JMFJRKSe.js +48 -0
- package/dist/thread-bindings-BmS6TLes.js +222 -0
- package/package.json +15 -6
- package/api.ts +0 -31
- package/channel-entry.ts +0 -20
- package/channel-plugin-api.ts +0 -1
- package/contract-api.ts +0 -16
- package/index.ts +0 -82
- package/runtime-api.ts +0 -55
- package/secret-contract-api.ts +0 -5
- package/security-contract-api.ts +0 -1
- package/session-key-api.ts +0 -1
- package/setup-api.ts +0 -3
- package/setup-entry.test.ts +0 -14
- package/setup-entry.ts +0 -13
- package/src/accounts.test.ts +0 -459
- package/src/accounts.ts +0 -326
- package/src/app-registration.ts +0 -331
- package/src/approval-auth.test.ts +0 -24
- package/src/approval-auth.ts +0 -25
- package/src/async.test.ts +0 -35
- package/src/async.ts +0 -104
- package/src/audio-preflight.runtime.ts +0 -9
- package/src/bitable.test.ts +0 -131
- package/src/bitable.ts +0 -762
- package/src/bot-content.ts +0 -474
- package/src/bot-group-name.test.ts +0 -108
- package/src/bot-runtime-api.ts +0 -12
- package/src/bot-sender-name.ts +0 -125
- package/src/bot.broadcast.test.ts +0 -463
- package/src/bot.card-action.test.ts +0 -577
- package/src/bot.checkBotMentioned.test.ts +0 -265
- package/src/bot.helpers.test.ts +0 -118
- package/src/bot.stripBotMention.test.ts +0 -126
- package/src/bot.test.ts +0 -3040
- package/src/bot.ts +0 -1559
- package/src/card-action.ts +0 -447
- package/src/card-interaction.test.ts +0 -129
- package/src/card-interaction.ts +0 -159
- package/src/card-test-helpers.ts +0 -47
- package/src/card-ux-approval.ts +0 -65
- package/src/card-ux-launcher.test.ts +0 -99
- package/src/card-ux-launcher.ts +0 -121
- package/src/card-ux-shared.ts +0 -33
- package/src/channel-runtime-api.ts +0 -16
- package/src/channel.runtime.ts +0 -47
- package/src/channel.test.ts +0 -959
- package/src/channel.ts +0 -1313
- package/src/chat-schema.ts +0 -25
- package/src/chat.test.ts +0 -196
- package/src/chat.ts +0 -188
- package/src/client.test.ts +0 -433
- package/src/client.ts +0 -290
- package/src/comment-dispatcher-runtime-api.ts +0 -6
- package/src/comment-dispatcher.test.ts +0 -169
- package/src/comment-dispatcher.ts +0 -107
- package/src/comment-handler-runtime-api.ts +0 -3
- package/src/comment-handler.test.ts +0 -486
- package/src/comment-handler.ts +0 -309
- package/src/comment-reaction.test.ts +0 -166
- package/src/comment-reaction.ts +0 -259
- package/src/comment-shared.test.ts +0 -182
- package/src/comment-shared.ts +0 -406
- package/src/comment-target.ts +0 -44
- package/src/config-schema.test.ts +0 -309
- package/src/config-schema.ts +0 -333
- package/src/conversation-id.test.ts +0 -18
- package/src/conversation-id.ts +0 -199
- package/src/dedup-runtime-api.ts +0 -1
- package/src/dedup.ts +0 -141
- package/src/directory.static.ts +0 -61
- package/src/directory.test.ts +0 -136
- package/src/directory.ts +0 -124
- package/src/doc-schema.ts +0 -182
- package/src/docx-batch-insert.test.ts +0 -91
- package/src/docx-batch-insert.ts +0 -223
- package/src/docx-color-text.ts +0 -154
- package/src/docx-table-ops.test.ts +0 -53
- package/src/docx-table-ops.ts +0 -316
- package/src/docx-types.ts +0 -38
- package/src/docx.account-selection.test.ts +0 -79
- package/src/docx.test.ts +0 -685
- package/src/docx.ts +0 -1616
- package/src/drive-schema.ts +0 -92
- package/src/drive.test.ts +0 -1219
- package/src/drive.ts +0 -829
- package/src/dynamic-agent.ts +0 -137
- package/src/event-types.ts +0 -45
- package/src/external-keys.test.ts +0 -20
- package/src/external-keys.ts +0 -19
- package/src/lifecycle.test-support.ts +0 -220
- package/src/media.test.ts +0 -900
- package/src/media.ts +0 -861
- package/src/mention-target.types.ts +0 -5
- package/src/mention.ts +0 -114
- package/src/message-action-contract.ts +0 -13
- package/src/monitor-state-runtime-api.ts +0 -7
- package/src/monitor-transport-runtime-api.ts +0 -7
- package/src/monitor.account.ts +0 -468
- package/src/monitor.acp-init-failure.lifecycle.test-support.ts +0 -219
- package/src/monitor.bot-identity.ts +0 -86
- package/src/monitor.bot-menu-handler.ts +0 -165
- package/src/monitor.bot-menu.lifecycle.test-support.ts +0 -224
- package/src/monitor.bot-menu.test.ts +0 -178
- package/src/monitor.broadcast.reply-once.lifecycle.test-support.ts +0 -264
- package/src/monitor.card-action.lifecycle.test-support.ts +0 -373
- package/src/monitor.cleanup.test.ts +0 -376
- package/src/monitor.comment-notice-handler.ts +0 -105
- package/src/monitor.comment.test.ts +0 -937
- package/src/monitor.comment.ts +0 -1386
- package/src/monitor.lifecycle.test.ts +0 -4
- package/src/monitor.message-handler.ts +0 -339
- package/src/monitor.reaction.lifecycle.test-support.ts +0 -68
- package/src/monitor.reaction.test.ts +0 -713
- package/src/monitor.startup.test.ts +0 -192
- package/src/monitor.startup.ts +0 -74
- package/src/monitor.state.defaults.test.ts +0 -46
- package/src/monitor.state.ts +0 -170
- package/src/monitor.synthetic-error.ts +0 -18
- package/src/monitor.test-mocks.ts +0 -45
- package/src/monitor.transport.ts +0 -424
- package/src/monitor.ts +0 -100
- package/src/monitor.webhook-e2e.test.ts +0 -272
- package/src/monitor.webhook-security.test.ts +0 -264
- package/src/monitor.webhook.test-helpers.ts +0 -116
- package/src/outbound-runtime-api.ts +0 -1
- package/src/outbound.test.ts +0 -935
- package/src/outbound.ts +0 -718
- package/src/perm-schema.ts +0 -52
- package/src/perm.ts +0 -170
- package/src/pins.ts +0 -108
- package/src/policy.test.ts +0 -334
- package/src/policy.ts +0 -236
- package/src/post.test.ts +0 -105
- package/src/post.ts +0 -275
- package/src/probe.test.ts +0 -275
- package/src/probe.ts +0 -166
- package/src/processing-claims.ts +0 -59
- package/src/qr-terminal.ts +0 -1
- package/src/reactions.ts +0 -123
- package/src/reasoning-preview.test.ts +0 -59
- package/src/reasoning-preview.ts +0 -20
- package/src/reply-dispatcher-runtime-api.ts +0 -7
- package/src/reply-dispatcher.test.ts +0 -1144
- package/src/reply-dispatcher.ts +0 -650
- package/src/runtime.ts +0 -9
- package/src/secret-contract.ts +0 -145
- package/src/secret-input.ts +0 -1
- package/src/security-audit-shared.ts +0 -69
- package/src/security-audit.test.ts +0 -61
- package/src/security-audit.ts +0 -1
- package/src/send-result.ts +0 -29
- package/src/send-target.test.ts +0 -80
- package/src/send-target.ts +0 -35
- package/src/send.reply-fallback.test.ts +0 -292
- package/src/send.test.ts +0 -550
- package/src/send.ts +0 -800
- package/src/sequential-key.test.ts +0 -72
- package/src/sequential-key.ts +0 -28
- package/src/sequential-queue.test.ts +0 -92
- package/src/sequential-queue.ts +0 -16
- package/src/session-conversation.ts +0 -42
- package/src/session-route.ts +0 -48
- package/src/setup-core.ts +0 -51
- package/src/setup-surface.test.ts +0 -174
- package/src/setup-surface.ts +0 -581
- package/src/streaming-card.test.ts +0 -190
- package/src/streaming-card.ts +0 -490
- package/src/subagent-hooks.test.ts +0 -603
- package/src/subagent-hooks.ts +0 -397
- package/src/targets.ts +0 -97
- package/src/test-support/lifecycle-test-support.ts +0 -453
- package/src/thread-bindings.test.ts +0 -143
- package/src/thread-bindings.ts +0 -330
- package/src/tool-account-routing.test.ts +0 -187
- package/src/tool-account.test.ts +0 -44
- package/src/tool-account.ts +0 -93
- package/src/tool-factory-test-harness.ts +0 -79
- package/src/tool-result.test.ts +0 -32
- package/src/tool-result.ts +0 -16
- package/src/tools-config.test.ts +0 -21
- package/src/tools-config.ts +0 -22
- package/src/types.ts +0 -104
- package/src/typing.test.ts +0 -144
- package/src/typing.ts +0 -214
- package/src/wiki-schema.ts +0 -55
- package/src/wiki.ts +0 -227
- package/subagent-hooks-api.ts +0 -31
- package/tsconfig.json +0 -16
package/dist/api.js
ADDED
|
@@ -0,0 +1,2280 @@
|
|
|
1
|
+
import { a as parseFeishuTargetId, i as parseFeishuDirectConversationId, n as buildFeishuModelOverrideParentCandidates, r as parseFeishuConversationId, t as buildFeishuConversationId } from "./conversation-id-DWS3Ep2A.js";
|
|
2
|
+
import { n as createFeishuThreadBindingManager, r as getFeishuThreadBindingManager, t as __testing } from "./thread-bindings-BmS6TLes.js";
|
|
3
|
+
import { n as handleFeishuSubagentEnded, r as handleFeishuSubagentSpawning, t as handleFeishuSubagentDeliveryTarget } from "./subagent-hooks-C3UhPVLV.js";
|
|
4
|
+
import { r as listEnabledFeishuAccounts } from "./accounts-Ba3-WP1z.js";
|
|
5
|
+
import { a as setFeishuNamedAccountEnabled, i as feishuSetupAdapter, n as feishuSetupWizard, r as runFeishuLogin, t as feishuPlugin } from "./channel-CSD_Jt8I.js";
|
|
6
|
+
import { t as getFeishuRuntime } from "./runtime-CG0DuRCy.js";
|
|
7
|
+
import { a as jsonToolResult, d as registerFeishuChatTools, f as createFeishuToolClient, m as resolveFeishuToolAccount, n as registerFeishuDriveTools, o as toolExecutionErrorResult, p as resolveAnyEnabledFeishuToolsConfig, s as unknownToolActionResult } from "./drive-C5eJLJr7.js";
|
|
8
|
+
import { normalizeLowercaseStringOrEmpty, normalizeOptionalString, readStringValue } from "openclaw/plugin-sdk/text-runtime";
|
|
9
|
+
import { existsSync } from "node:fs";
|
|
10
|
+
import { homedir } from "node:os";
|
|
11
|
+
import { basename, isAbsolute, resolve } from "node:path";
|
|
12
|
+
import { formatErrorMessage } from "openclaw/plugin-sdk/error-runtime";
|
|
13
|
+
import { Type } from "typebox";
|
|
14
|
+
import { createClackPrompter } from "openclaw/plugin-sdk/setup-runtime";
|
|
15
|
+
//#region extensions/feishu/src/doc-schema.ts
|
|
16
|
+
const tableCreationProperties = {
|
|
17
|
+
doc_token: Type.String({ description: "Document token" }),
|
|
18
|
+
parent_block_id: Type.Optional(Type.String({ description: "Parent block ID (default: document root)" })),
|
|
19
|
+
row_size: Type.Integer({
|
|
20
|
+
description: "Table row count",
|
|
21
|
+
minimum: 1
|
|
22
|
+
}),
|
|
23
|
+
column_size: Type.Integer({
|
|
24
|
+
description: "Table column count",
|
|
25
|
+
minimum: 1
|
|
26
|
+
}),
|
|
27
|
+
column_width: Type.Optional(Type.Array(Type.Number({ minimum: 1 }), { description: "Column widths in px (length should match column_size)" }))
|
|
28
|
+
};
|
|
29
|
+
const FeishuDocSchema = Type.Union([
|
|
30
|
+
Type.Object({
|
|
31
|
+
action: Type.Literal("read"),
|
|
32
|
+
doc_token: Type.String({ description: "Document token (extract from URL /docx/XXX)" })
|
|
33
|
+
}),
|
|
34
|
+
Type.Object({
|
|
35
|
+
action: Type.Literal("write"),
|
|
36
|
+
doc_token: Type.String({ description: "Document token" }),
|
|
37
|
+
content: Type.String({ description: "Markdown content to write (replaces entire document content)" })
|
|
38
|
+
}),
|
|
39
|
+
Type.Object({
|
|
40
|
+
action: Type.Literal("append"),
|
|
41
|
+
doc_token: Type.String({ description: "Document token" }),
|
|
42
|
+
content: Type.String({ description: "Markdown content to append to end of document" })
|
|
43
|
+
}),
|
|
44
|
+
Type.Object({
|
|
45
|
+
action: Type.Literal("insert"),
|
|
46
|
+
doc_token: Type.String({ description: "Document token" }),
|
|
47
|
+
content: Type.String({ description: "Markdown content to insert" }),
|
|
48
|
+
after_block_id: Type.String({ description: "Insert content after this block ID. Use list_blocks to find block IDs." })
|
|
49
|
+
}),
|
|
50
|
+
Type.Object({
|
|
51
|
+
action: Type.Literal("create"),
|
|
52
|
+
title: Type.String({ description: "Document title" }),
|
|
53
|
+
folder_token: Type.Optional(Type.String({ description: "Target folder token (optional)" })),
|
|
54
|
+
grant_to_requester: Type.Optional(Type.Boolean({ description: "Grant edit permission to the trusted requesting Feishu user from runtime context (default: true)." }))
|
|
55
|
+
}),
|
|
56
|
+
Type.Object({
|
|
57
|
+
action: Type.Literal("list_blocks"),
|
|
58
|
+
doc_token: Type.String({ description: "Document token" })
|
|
59
|
+
}),
|
|
60
|
+
Type.Object({
|
|
61
|
+
action: Type.Literal("get_block"),
|
|
62
|
+
doc_token: Type.String({ description: "Document token" }),
|
|
63
|
+
block_id: Type.String({ description: "Block ID (from list_blocks)" })
|
|
64
|
+
}),
|
|
65
|
+
Type.Object({
|
|
66
|
+
action: Type.Literal("update_block"),
|
|
67
|
+
doc_token: Type.String({ description: "Document token" }),
|
|
68
|
+
block_id: Type.String({ description: "Block ID (from list_blocks)" }),
|
|
69
|
+
content: Type.String({ description: "New text content" })
|
|
70
|
+
}),
|
|
71
|
+
Type.Object({
|
|
72
|
+
action: Type.Literal("delete_block"),
|
|
73
|
+
doc_token: Type.String({ description: "Document token" }),
|
|
74
|
+
block_id: Type.String({ description: "Block ID" })
|
|
75
|
+
}),
|
|
76
|
+
Type.Object({
|
|
77
|
+
action: Type.Literal("create_table"),
|
|
78
|
+
...tableCreationProperties
|
|
79
|
+
}),
|
|
80
|
+
Type.Object({
|
|
81
|
+
action: Type.Literal("write_table_cells"),
|
|
82
|
+
doc_token: Type.String({ description: "Document token" }),
|
|
83
|
+
table_block_id: Type.String({ description: "Table block ID" }),
|
|
84
|
+
values: Type.Array(Type.Array(Type.String()), {
|
|
85
|
+
description: "2D matrix values[row][col] to write into table cells",
|
|
86
|
+
minItems: 1
|
|
87
|
+
})
|
|
88
|
+
}),
|
|
89
|
+
Type.Object({
|
|
90
|
+
action: Type.Literal("create_table_with_values"),
|
|
91
|
+
...tableCreationProperties,
|
|
92
|
+
values: Type.Array(Type.Array(Type.String()), {
|
|
93
|
+
description: "2D matrix values[row][col] to write into table cells",
|
|
94
|
+
minItems: 1
|
|
95
|
+
})
|
|
96
|
+
}),
|
|
97
|
+
Type.Object({
|
|
98
|
+
action: Type.Literal("insert_table_row"),
|
|
99
|
+
doc_token: Type.String({ description: "Document token" }),
|
|
100
|
+
block_id: Type.String({ description: "Table block ID" }),
|
|
101
|
+
row_index: Type.Optional(Type.Number({ description: "Row index to insert at (-1 for end, default: -1)" }))
|
|
102
|
+
}),
|
|
103
|
+
Type.Object({
|
|
104
|
+
action: Type.Literal("insert_table_column"),
|
|
105
|
+
doc_token: Type.String({ description: "Document token" }),
|
|
106
|
+
block_id: Type.String({ description: "Table block ID" }),
|
|
107
|
+
column_index: Type.Optional(Type.Number({ description: "Column index to insert at (-1 for end, default: -1)" }))
|
|
108
|
+
}),
|
|
109
|
+
Type.Object({
|
|
110
|
+
action: Type.Literal("delete_table_rows"),
|
|
111
|
+
doc_token: Type.String({ description: "Document token" }),
|
|
112
|
+
block_id: Type.String({ description: "Table block ID" }),
|
|
113
|
+
row_start: Type.Number({ description: "Start row index (0-based)" }),
|
|
114
|
+
row_count: Type.Optional(Type.Number({ description: "Number of rows to delete (default: 1)" }))
|
|
115
|
+
}),
|
|
116
|
+
Type.Object({
|
|
117
|
+
action: Type.Literal("delete_table_columns"),
|
|
118
|
+
doc_token: Type.String({ description: "Document token" }),
|
|
119
|
+
block_id: Type.String({ description: "Table block ID" }),
|
|
120
|
+
column_start: Type.Number({ description: "Start column index (0-based)" }),
|
|
121
|
+
column_count: Type.Optional(Type.Number({ description: "Number of columns to delete (default: 1)" }))
|
|
122
|
+
}),
|
|
123
|
+
Type.Object({
|
|
124
|
+
action: Type.Literal("merge_table_cells"),
|
|
125
|
+
doc_token: Type.String({ description: "Document token" }),
|
|
126
|
+
block_id: Type.String({ description: "Table block ID" }),
|
|
127
|
+
row_start: Type.Number({ description: "Start row index" }),
|
|
128
|
+
row_end: Type.Number({ description: "End row index (exclusive)" }),
|
|
129
|
+
column_start: Type.Number({ description: "Start column index" }),
|
|
130
|
+
column_end: Type.Number({ description: "End column index (exclusive)" })
|
|
131
|
+
}),
|
|
132
|
+
Type.Object({
|
|
133
|
+
action: Type.Literal("upload_image"),
|
|
134
|
+
doc_token: Type.String({ description: "Document token" }),
|
|
135
|
+
url: Type.Optional(Type.String({ description: "Remote image URL (http/https)" })),
|
|
136
|
+
file_path: Type.Optional(Type.String({ description: "Local image file path" })),
|
|
137
|
+
image: Type.Optional(Type.String({ description: "Image as data URI (data:image/png;base64,...) or plain base64 string. Use instead of url/file_path for DALL-E outputs, canvas screenshots, etc." })),
|
|
138
|
+
parent_block_id: Type.Optional(Type.String({ description: "Parent block ID (default: document root)" })),
|
|
139
|
+
filename: Type.Optional(Type.String({ description: "Optional filename override" })),
|
|
140
|
+
index: Type.Optional(Type.Integer({
|
|
141
|
+
minimum: 0,
|
|
142
|
+
description: "Insert position (0-based index among siblings). Omit to append."
|
|
143
|
+
}))
|
|
144
|
+
}),
|
|
145
|
+
Type.Object({
|
|
146
|
+
action: Type.Literal("upload_file"),
|
|
147
|
+
doc_token: Type.String({ description: "Document token" }),
|
|
148
|
+
url: Type.Optional(Type.String({ description: "Remote file URL (http/https)" })),
|
|
149
|
+
file_path: Type.Optional(Type.String({ description: "Local file path" })),
|
|
150
|
+
parent_block_id: Type.Optional(Type.String({ description: "Parent block ID (default: document root)" })),
|
|
151
|
+
filename: Type.Optional(Type.String({ description: "Optional filename override" }))
|
|
152
|
+
}),
|
|
153
|
+
Type.Object({
|
|
154
|
+
action: Type.Literal("color_text"),
|
|
155
|
+
doc_token: Type.String({ description: "Document token" }),
|
|
156
|
+
block_id: Type.String({ description: "Text block ID to update" }),
|
|
157
|
+
content: Type.String({ description: "Text with color markup. Tags: [red], [green], [blue], [orange], [yellow], [purple], [grey], [bold], [bg:yellow]. Example: \"Revenue [green]+15%[/green] YoY\"" })
|
|
158
|
+
})
|
|
159
|
+
]);
|
|
160
|
+
//#endregion
|
|
161
|
+
//#region extensions/feishu/src/docx-table-ops.ts
|
|
162
|
+
const MIN_COLUMN_WIDTH = 50;
|
|
163
|
+
const MAX_COLUMN_WIDTH = 400;
|
|
164
|
+
const DEFAULT_TABLE_WIDTH = 730;
|
|
165
|
+
/**
|
|
166
|
+
* Calculate adaptive column widths based on cell content length.
|
|
167
|
+
*
|
|
168
|
+
* Algorithm:
|
|
169
|
+
* 1. For each column, find the max content length across all rows
|
|
170
|
+
* 2. Weight CJK characters as 2x width (they render wider)
|
|
171
|
+
* 3. Calculate proportional widths based on content length
|
|
172
|
+
* 4. Apply min/max constraints
|
|
173
|
+
* 5. Redistribute remaining space to fill total table width
|
|
174
|
+
*
|
|
175
|
+
* Total width is derived from the original column_width values returned
|
|
176
|
+
* by the Convert API, ensuring tables match Feishu's expected dimensions.
|
|
177
|
+
*
|
|
178
|
+
* @param blocks - Array of blocks from Convert API
|
|
179
|
+
* @param tableBlockId - The block_id of the table block
|
|
180
|
+
* @returns Array of column widths in pixels
|
|
181
|
+
*/
|
|
182
|
+
function normalizeChildBlockIds(children) {
|
|
183
|
+
if (Array.isArray(children)) return children;
|
|
184
|
+
return typeof children === "string" ? [children] : [];
|
|
185
|
+
}
|
|
186
|
+
function omitParentId(block) {
|
|
187
|
+
const cleanBlock = { ...block };
|
|
188
|
+
delete cleanBlock.parent_id;
|
|
189
|
+
return cleanBlock;
|
|
190
|
+
}
|
|
191
|
+
function createDescendantTable(table, adaptiveWidths) {
|
|
192
|
+
const { row_size, column_size } = table.property || {};
|
|
193
|
+
return { property: {
|
|
194
|
+
row_size,
|
|
195
|
+
column_size,
|
|
196
|
+
...adaptiveWidths?.length ? { column_width: adaptiveWidths } : {}
|
|
197
|
+
} };
|
|
198
|
+
}
|
|
199
|
+
function calculateAdaptiveColumnWidths(blocks, tableBlockId) {
|
|
200
|
+
const tableBlock = blocks.find((b) => b.block_id === tableBlockId && b.block_type === 31);
|
|
201
|
+
if (!tableBlock?.table?.property) return [];
|
|
202
|
+
const { row_size, column_size, column_width: originalWidths } = tableBlock.table.property;
|
|
203
|
+
if (!row_size || !column_size) return [];
|
|
204
|
+
const totalWidth = originalWidths && originalWidths.length > 0 ? originalWidths.reduce((a, b) => a + b, 0) : DEFAULT_TABLE_WIDTH;
|
|
205
|
+
const cellIds = normalizeChildBlockIds(tableBlock.children);
|
|
206
|
+
const blockMap = /* @__PURE__ */ new Map();
|
|
207
|
+
for (const block of blocks) if (block.block_id) blockMap.set(block.block_id, block);
|
|
208
|
+
function getCellText(cellId) {
|
|
209
|
+
const cell = blockMap.get(cellId);
|
|
210
|
+
let text = "";
|
|
211
|
+
const childIds = normalizeChildBlockIds(cell?.children);
|
|
212
|
+
for (const childId of childIds) {
|
|
213
|
+
const child = blockMap.get(childId);
|
|
214
|
+
if (child?.text?.elements) {
|
|
215
|
+
for (const elem of child.text.elements) if (elem.text_run?.content) text += elem.text_run.content;
|
|
216
|
+
}
|
|
217
|
+
}
|
|
218
|
+
return text;
|
|
219
|
+
}
|
|
220
|
+
function getWeightedLength(text) {
|
|
221
|
+
return Array.from(text).reduce((sum, char) => {
|
|
222
|
+
return sum + (char.charCodeAt(0) > 255 ? 2 : 1);
|
|
223
|
+
}, 0);
|
|
224
|
+
}
|
|
225
|
+
const maxLengths = Array.from({ length: column_size }, () => 0);
|
|
226
|
+
for (let row = 0; row < row_size; row++) for (let col = 0; col < column_size; col++) {
|
|
227
|
+
const cellId = cellIds[row * column_size + col];
|
|
228
|
+
if (cellId) {
|
|
229
|
+
const length = getWeightedLength(getCellText(cellId));
|
|
230
|
+
maxLengths[col] = Math.max(maxLengths[col], length);
|
|
231
|
+
}
|
|
232
|
+
}
|
|
233
|
+
const totalLength = maxLengths.reduce((a, b) => a + b, 0);
|
|
234
|
+
if (totalLength === 0) {
|
|
235
|
+
const equalWidth = Math.max(MIN_COLUMN_WIDTH, Math.min(MAX_COLUMN_WIDTH, Math.floor(totalWidth / column_size)));
|
|
236
|
+
return Array.from({ length: column_size }, () => equalWidth);
|
|
237
|
+
}
|
|
238
|
+
let widths = maxLengths.map((len) => {
|
|
239
|
+
const proportion = len / totalLength;
|
|
240
|
+
return Math.round(proportion * totalWidth);
|
|
241
|
+
});
|
|
242
|
+
widths = widths.map((w) => Math.max(MIN_COLUMN_WIDTH, Math.min(MAX_COLUMN_WIDTH, w)));
|
|
243
|
+
let remaining = totalWidth - widths.reduce((a, b) => a + b, 0);
|
|
244
|
+
while (remaining > 0) {
|
|
245
|
+
const growable = widths.map((w, i) => w < MAX_COLUMN_WIDTH ? i : -1).filter((i) => i >= 0);
|
|
246
|
+
if (growable.length === 0) break;
|
|
247
|
+
const perColumn = Math.floor(remaining / growable.length);
|
|
248
|
+
if (perColumn === 0) break;
|
|
249
|
+
for (const i of growable) {
|
|
250
|
+
const add = Math.min(perColumn, MAX_COLUMN_WIDTH - widths[i]);
|
|
251
|
+
widths[i] += add;
|
|
252
|
+
remaining -= add;
|
|
253
|
+
}
|
|
254
|
+
}
|
|
255
|
+
return widths;
|
|
256
|
+
}
|
|
257
|
+
/**
|
|
258
|
+
* Clean blocks for Descendant API with adaptive column widths.
|
|
259
|
+
*
|
|
260
|
+
* - Removes parent_id from all blocks
|
|
261
|
+
* - Fixes children type (string → array) for TableCell blocks
|
|
262
|
+
* - Removes merge_info (read-only, causes API error)
|
|
263
|
+
* - Calculates and applies adaptive column_width for tables
|
|
264
|
+
*
|
|
265
|
+
* @param blocks - Array of blocks from Convert API
|
|
266
|
+
* @returns Cleaned blocks ready for Descendant API
|
|
267
|
+
*/
|
|
268
|
+
function cleanBlocksForDescendant(blocks) {
|
|
269
|
+
const tableWidths = /* @__PURE__ */ new Map();
|
|
270
|
+
for (const block of blocks) if (block.block_type === 31 && block.block_id) {
|
|
271
|
+
const widths = calculateAdaptiveColumnWidths(blocks, block.block_id);
|
|
272
|
+
tableWidths.set(block.block_id, widths);
|
|
273
|
+
}
|
|
274
|
+
return blocks.map((block) => {
|
|
275
|
+
const cleanBlock = omitParentId(block);
|
|
276
|
+
if (cleanBlock.block_type === 32 && typeof cleanBlock.children === "string") cleanBlock.children = [cleanBlock.children];
|
|
277
|
+
if (cleanBlock.block_type === 31 && cleanBlock.table) {
|
|
278
|
+
const adaptiveWidths = block.block_id ? tableWidths.get(block.block_id) : void 0;
|
|
279
|
+
cleanBlock.table = createDescendantTable(cleanBlock.table, adaptiveWidths);
|
|
280
|
+
}
|
|
281
|
+
return cleanBlock;
|
|
282
|
+
});
|
|
283
|
+
}
|
|
284
|
+
async function insertTableRow(client, docToken, blockId, rowIndex = -1) {
|
|
285
|
+
const res = await client.docx.documentBlock.patch({
|
|
286
|
+
path: {
|
|
287
|
+
document_id: docToken,
|
|
288
|
+
block_id: blockId
|
|
289
|
+
},
|
|
290
|
+
data: { insert_table_row: { row_index: rowIndex } }
|
|
291
|
+
});
|
|
292
|
+
if (res.code !== 0) throw new Error(res.msg);
|
|
293
|
+
return {
|
|
294
|
+
success: true,
|
|
295
|
+
block: res.data?.block
|
|
296
|
+
};
|
|
297
|
+
}
|
|
298
|
+
async function insertTableColumn(client, docToken, blockId, columnIndex = -1) {
|
|
299
|
+
const res = await client.docx.documentBlock.patch({
|
|
300
|
+
path: {
|
|
301
|
+
document_id: docToken,
|
|
302
|
+
block_id: blockId
|
|
303
|
+
},
|
|
304
|
+
data: { insert_table_column: { column_index: columnIndex } }
|
|
305
|
+
});
|
|
306
|
+
if (res.code !== 0) throw new Error(res.msg);
|
|
307
|
+
return {
|
|
308
|
+
success: true,
|
|
309
|
+
block: res.data?.block
|
|
310
|
+
};
|
|
311
|
+
}
|
|
312
|
+
async function deleteTableRows(client, docToken, blockId, rowStart, rowCount = 1) {
|
|
313
|
+
const res = await client.docx.documentBlock.patch({
|
|
314
|
+
path: {
|
|
315
|
+
document_id: docToken,
|
|
316
|
+
block_id: blockId
|
|
317
|
+
},
|
|
318
|
+
data: { delete_table_rows: {
|
|
319
|
+
row_start_index: rowStart,
|
|
320
|
+
row_end_index: rowStart + rowCount
|
|
321
|
+
} }
|
|
322
|
+
});
|
|
323
|
+
if (res.code !== 0) throw new Error(res.msg);
|
|
324
|
+
return {
|
|
325
|
+
success: true,
|
|
326
|
+
rows_deleted: rowCount,
|
|
327
|
+
block: res.data?.block
|
|
328
|
+
};
|
|
329
|
+
}
|
|
330
|
+
async function deleteTableColumns(client, docToken, blockId, columnStart, columnCount = 1) {
|
|
331
|
+
const res = await client.docx.documentBlock.patch({
|
|
332
|
+
path: {
|
|
333
|
+
document_id: docToken,
|
|
334
|
+
block_id: blockId
|
|
335
|
+
},
|
|
336
|
+
data: { delete_table_columns: {
|
|
337
|
+
column_start_index: columnStart,
|
|
338
|
+
column_end_index: columnStart + columnCount
|
|
339
|
+
} }
|
|
340
|
+
});
|
|
341
|
+
if (res.code !== 0) throw new Error(res.msg);
|
|
342
|
+
return {
|
|
343
|
+
success: true,
|
|
344
|
+
columns_deleted: columnCount,
|
|
345
|
+
block: res.data?.block
|
|
346
|
+
};
|
|
347
|
+
}
|
|
348
|
+
async function mergeTableCells(client, docToken, blockId, rowStart, rowEnd, columnStart, columnEnd) {
|
|
349
|
+
const res = await client.docx.documentBlock.patch({
|
|
350
|
+
path: {
|
|
351
|
+
document_id: docToken,
|
|
352
|
+
block_id: blockId
|
|
353
|
+
},
|
|
354
|
+
data: { merge_table_cells: {
|
|
355
|
+
row_start_index: rowStart,
|
|
356
|
+
row_end_index: rowEnd,
|
|
357
|
+
column_start_index: columnStart,
|
|
358
|
+
column_end_index: columnEnd
|
|
359
|
+
} }
|
|
360
|
+
});
|
|
361
|
+
if (res.code !== 0) throw new Error(res.msg);
|
|
362
|
+
return {
|
|
363
|
+
success: true,
|
|
364
|
+
block: res.data?.block
|
|
365
|
+
};
|
|
366
|
+
}
|
|
367
|
+
//#endregion
|
|
368
|
+
//#region extensions/feishu/src/docx-batch-insert.ts
|
|
369
|
+
const BATCH_SIZE = 1e3;
|
|
370
|
+
function normalizeChildIds$1(children) {
|
|
371
|
+
if (Array.isArray(children)) return children;
|
|
372
|
+
const child = readStringValue(children);
|
|
373
|
+
return child ? [child] : void 0;
|
|
374
|
+
}
|
|
375
|
+
function toDescendantBlock$1(block) {
|
|
376
|
+
const children = normalizeChildIds$1(block.children);
|
|
377
|
+
return {
|
|
378
|
+
...block,
|
|
379
|
+
...children ? { children } : {}
|
|
380
|
+
};
|
|
381
|
+
}
|
|
382
|
+
/**
|
|
383
|
+
* Collect all descendant blocks for a given first-level block ID.
|
|
384
|
+
* Recursively traverses the block tree to gather all children.
|
|
385
|
+
*/
|
|
386
|
+
function collectDescendants(blockMap, rootId) {
|
|
387
|
+
const result = [];
|
|
388
|
+
const visited = /* @__PURE__ */ new Set();
|
|
389
|
+
function collect(blockId) {
|
|
390
|
+
if (visited.has(blockId)) return;
|
|
391
|
+
visited.add(blockId);
|
|
392
|
+
const block = blockMap.get(blockId);
|
|
393
|
+
if (!block) return;
|
|
394
|
+
result.push(block);
|
|
395
|
+
const children = block.children;
|
|
396
|
+
if (Array.isArray(children)) for (const childId of children) collect(childId);
|
|
397
|
+
else if (typeof children === "string") collect(children);
|
|
398
|
+
}
|
|
399
|
+
collect(rootId);
|
|
400
|
+
return result;
|
|
401
|
+
}
|
|
402
|
+
/**
|
|
403
|
+
* Insert a single batch of blocks using Descendant API.
|
|
404
|
+
*
|
|
405
|
+
* @param parentBlockId - Parent block to insert into (defaults to docToken)
|
|
406
|
+
* @param index - Position within parent's children (-1 = end)
|
|
407
|
+
*/
|
|
408
|
+
async function insertBatch(client, docToken, blocks, firstLevelBlockIds, parentBlockId = docToken, index = -1) {
|
|
409
|
+
const descendants = cleanBlocksForDescendant(blocks);
|
|
410
|
+
if (descendants.length === 0) return [];
|
|
411
|
+
const res = await client.docx.documentBlockDescendant.create({
|
|
412
|
+
path: {
|
|
413
|
+
document_id: docToken,
|
|
414
|
+
block_id: parentBlockId
|
|
415
|
+
},
|
|
416
|
+
data: {
|
|
417
|
+
children_id: firstLevelBlockIds,
|
|
418
|
+
descendants: descendants.map(toDescendantBlock$1),
|
|
419
|
+
index
|
|
420
|
+
}
|
|
421
|
+
});
|
|
422
|
+
if (res.code !== 0) throw new Error(`${res.msg} (code: ${res.code})`);
|
|
423
|
+
return res.data?.children ?? [];
|
|
424
|
+
}
|
|
425
|
+
/**
|
|
426
|
+
* Insert blocks in batches for large documents (>1000 blocks).
|
|
427
|
+
*
|
|
428
|
+
* Batches are split to ensure BOTH children_id AND descendants
|
|
429
|
+
* arrays stay under the 1000 block API limit.
|
|
430
|
+
*
|
|
431
|
+
* @param client - Feishu API client
|
|
432
|
+
* @param docToken - Document ID
|
|
433
|
+
* @param blocks - All blocks from Convert API
|
|
434
|
+
* @param firstLevelBlockIds - IDs of top-level blocks to insert
|
|
435
|
+
* @param logger - Optional logger for progress updates
|
|
436
|
+
* @param parentBlockId - Parent block to insert into (defaults to docToken = document root)
|
|
437
|
+
* @param startIndex - Starting position within parent (-1 = end). For multi-batch inserts,
|
|
438
|
+
* each batch advances this by the number of first-level IDs inserted so far.
|
|
439
|
+
* @returns Inserted children blocks and any skipped block IDs
|
|
440
|
+
*/
|
|
441
|
+
async function insertBlocksInBatches(client, docToken, blocks, firstLevelBlockIds, logger, parentBlockId = docToken, startIndex = -1) {
|
|
442
|
+
const allChildren = [];
|
|
443
|
+
const batches = [];
|
|
444
|
+
let currentBatch = {
|
|
445
|
+
firstLevelIds: [],
|
|
446
|
+
blocks: []
|
|
447
|
+
};
|
|
448
|
+
const usedBlockIds = /* @__PURE__ */ new Set();
|
|
449
|
+
const blockMap = /* @__PURE__ */ new Map();
|
|
450
|
+
for (const block of blocks) if (block.block_id) blockMap.set(block.block_id, block);
|
|
451
|
+
for (const firstLevelId of firstLevelBlockIds) {
|
|
452
|
+
const newBlocks = collectDescendants(blockMap, firstLevelId).filter((b) => b.block_id && !usedBlockIds.has(b.block_id));
|
|
453
|
+
if (newBlocks.length > 1e3) throw new Error(`Block "${firstLevelId}" has ${newBlocks.length} descendants, which exceeds the Feishu API limit of ${BATCH_SIZE} blocks per request. Please split the content into smaller sections.`);
|
|
454
|
+
if (currentBatch.blocks.length + newBlocks.length > 1e3 && currentBatch.blocks.length > 0) {
|
|
455
|
+
batches.push(currentBatch);
|
|
456
|
+
currentBatch = {
|
|
457
|
+
firstLevelIds: [],
|
|
458
|
+
blocks: []
|
|
459
|
+
};
|
|
460
|
+
}
|
|
461
|
+
currentBatch.firstLevelIds.push(firstLevelId);
|
|
462
|
+
for (const block of newBlocks) {
|
|
463
|
+
currentBatch.blocks.push(block);
|
|
464
|
+
if (block.block_id) usedBlockIds.add(block.block_id);
|
|
465
|
+
}
|
|
466
|
+
}
|
|
467
|
+
if (currentBatch.blocks.length > 0) batches.push(currentBatch);
|
|
468
|
+
let currentIndex = startIndex;
|
|
469
|
+
for (let i = 0; i < batches.length; i++) {
|
|
470
|
+
const batch = batches[i];
|
|
471
|
+
logger?.info?.(`feishu_doc: Inserting batch ${i + 1}/${batches.length} (${batch.blocks.length} blocks)...`);
|
|
472
|
+
const children = await insertBatch(client, docToken, batch.blocks, batch.firstLevelIds, parentBlockId, currentIndex);
|
|
473
|
+
allChildren.push(...children);
|
|
474
|
+
if (currentIndex !== -1) currentIndex += batch.firstLevelIds.length;
|
|
475
|
+
}
|
|
476
|
+
return {
|
|
477
|
+
children: allChildren,
|
|
478
|
+
skipped: []
|
|
479
|
+
};
|
|
480
|
+
}
|
|
481
|
+
//#endregion
|
|
482
|
+
//#region extensions/feishu/src/docx-color-text.ts
|
|
483
|
+
const TEXT_COLOR = {
|
|
484
|
+
red: 1,
|
|
485
|
+
orange: 2,
|
|
486
|
+
yellow: 3,
|
|
487
|
+
green: 4,
|
|
488
|
+
blue: 5,
|
|
489
|
+
purple: 6,
|
|
490
|
+
grey: 7,
|
|
491
|
+
gray: 7
|
|
492
|
+
};
|
|
493
|
+
const BACKGROUND_COLOR = {
|
|
494
|
+
red: 1,
|
|
495
|
+
orange: 2,
|
|
496
|
+
yellow: 3,
|
|
497
|
+
green: 4,
|
|
498
|
+
blue: 5,
|
|
499
|
+
purple: 6,
|
|
500
|
+
grey: 7,
|
|
501
|
+
gray: 7
|
|
502
|
+
};
|
|
503
|
+
/**
|
|
504
|
+
* Parse color markup into segments.
|
|
505
|
+
*
|
|
506
|
+
* Supports:
|
|
507
|
+
* [red]text[/red] → red text
|
|
508
|
+
* [bg:yellow]text[/bg] → yellow background
|
|
509
|
+
* [bold]text[/bold] → bold
|
|
510
|
+
* [green bold]text[/green] → green + bold
|
|
511
|
+
*/
|
|
512
|
+
function parseColorMarkup(content) {
|
|
513
|
+
const segments = [];
|
|
514
|
+
const KNOWN = "(?:bg:[a-z]+|bold|red|orange|yellow|green|blue|purple|gr[ae]y)";
|
|
515
|
+
const tagPattern = new RegExp(`\\[(${KNOWN}(?:\\s+${KNOWN})*)\\](.*?)\\[\\/(?:[^\\]]+)\\]|([^[]+|\\[)`, "gis");
|
|
516
|
+
let match;
|
|
517
|
+
while ((match = tagPattern.exec(content)) !== null) if (match[3] !== void 0) {
|
|
518
|
+
if (match[3]) segments.push({ text: match[3] });
|
|
519
|
+
} else {
|
|
520
|
+
const tagStr = normalizeLowercaseStringOrEmpty(match[1]);
|
|
521
|
+
const text = match[2];
|
|
522
|
+
const tags = tagStr.split(/\s+/);
|
|
523
|
+
const segment = { text };
|
|
524
|
+
for (const tag of tags) if (tag.startsWith("bg:")) {
|
|
525
|
+
const color = tag.slice(3);
|
|
526
|
+
if (BACKGROUND_COLOR[color]) segment.bgColor = BACKGROUND_COLOR[color];
|
|
527
|
+
} else if (tag === "bold") segment.bold = true;
|
|
528
|
+
else if (TEXT_COLOR[tag]) segment.textColor = TEXT_COLOR[tag];
|
|
529
|
+
if (text) segments.push(segment);
|
|
530
|
+
}
|
|
531
|
+
return segments;
|
|
532
|
+
}
|
|
533
|
+
/**
|
|
534
|
+
* Update a text block with colored segments.
|
|
535
|
+
*/
|
|
536
|
+
async function updateColorText(client, docToken, blockId, content) {
|
|
537
|
+
const segments = parseColorMarkup(content);
|
|
538
|
+
const elements = segments.map((seg) => ({ text_run: {
|
|
539
|
+
content: seg.text,
|
|
540
|
+
text_element_style: {
|
|
541
|
+
...seg.textColor && { text_color: seg.textColor },
|
|
542
|
+
...seg.bgColor && { background_color: seg.bgColor },
|
|
543
|
+
...seg.bold && { bold: true }
|
|
544
|
+
}
|
|
545
|
+
} }));
|
|
546
|
+
const res = await client.docx.documentBlock.patch({
|
|
547
|
+
path: {
|
|
548
|
+
document_id: docToken,
|
|
549
|
+
block_id: blockId
|
|
550
|
+
},
|
|
551
|
+
data: { update_text_elements: { elements } }
|
|
552
|
+
});
|
|
553
|
+
if (res.code !== 0) throw new Error(res.msg);
|
|
554
|
+
return {
|
|
555
|
+
success: true,
|
|
556
|
+
segments: segments.length,
|
|
557
|
+
block: res.data?.block
|
|
558
|
+
};
|
|
559
|
+
}
|
|
560
|
+
//#endregion
|
|
561
|
+
//#region extensions/feishu/src/docx.ts
|
|
562
|
+
function json$1(data) {
|
|
563
|
+
return {
|
|
564
|
+
content: [{
|
|
565
|
+
type: "text",
|
|
566
|
+
text: JSON.stringify(data, null, 2)
|
|
567
|
+
}],
|
|
568
|
+
details: data
|
|
569
|
+
};
|
|
570
|
+
}
|
|
571
|
+
function resolveDocToolLocalRoots(ctx) {
|
|
572
|
+
if (ctx.fsPolicy?.workspaceOnly !== true) return;
|
|
573
|
+
const workspaceDir = ctx.workspaceDir?.trim();
|
|
574
|
+
if (!workspaceDir) return [];
|
|
575
|
+
return [resolve(workspaceDir)];
|
|
576
|
+
}
|
|
577
|
+
/** Extract image URLs from markdown content */
|
|
578
|
+
function extractImageUrls(markdown) {
|
|
579
|
+
const regex = /!\[[^\]]*\]\(([^)]+)\)/g;
|
|
580
|
+
const urls = [];
|
|
581
|
+
let match;
|
|
582
|
+
while ((match = regex.exec(markdown)) !== null) {
|
|
583
|
+
const url = match[1].trim();
|
|
584
|
+
if (url.startsWith("http://") || url.startsWith("https://")) urls.push(url);
|
|
585
|
+
}
|
|
586
|
+
return urls;
|
|
587
|
+
}
|
|
588
|
+
const BLOCK_TYPE_NAMES = {
|
|
589
|
+
1: "Page",
|
|
590
|
+
2: "Text",
|
|
591
|
+
3: "Heading1",
|
|
592
|
+
4: "Heading2",
|
|
593
|
+
5: "Heading3",
|
|
594
|
+
12: "Bullet",
|
|
595
|
+
13: "Ordered",
|
|
596
|
+
14: "Code",
|
|
597
|
+
15: "Quote",
|
|
598
|
+
17: "Todo",
|
|
599
|
+
18: "Bitable",
|
|
600
|
+
21: "Diagram",
|
|
601
|
+
22: "Divider",
|
|
602
|
+
23: "File",
|
|
603
|
+
27: "Image",
|
|
604
|
+
30: "Sheet",
|
|
605
|
+
31: "Table",
|
|
606
|
+
32: "TableCell"
|
|
607
|
+
};
|
|
608
|
+
const UNSUPPORTED_CREATE_TYPES = new Set([31, 32]);
|
|
609
|
+
/** Clean blocks for insertion (remove unsupported types and read-only fields) */
|
|
610
|
+
function cleanBlocksForInsert(blocks) {
|
|
611
|
+
const skipped = [];
|
|
612
|
+
return {
|
|
613
|
+
cleaned: blocks.filter((block) => {
|
|
614
|
+
if (UNSUPPORTED_CREATE_TYPES.has(block.block_type)) {
|
|
615
|
+
const typeName = BLOCK_TYPE_NAMES[block.block_type] || `type_${block.block_type}`;
|
|
616
|
+
skipped.push(typeName);
|
|
617
|
+
return false;
|
|
618
|
+
}
|
|
619
|
+
return true;
|
|
620
|
+
}).map((block) => {
|
|
621
|
+
if (block.block_type === 31 && block.table?.merge_info) {
|
|
622
|
+
const { merge_info: _merge_info, ...tableRest } = block.table;
|
|
623
|
+
return Object.assign({}, block, { table: tableRest });
|
|
624
|
+
}
|
|
625
|
+
return block;
|
|
626
|
+
}),
|
|
627
|
+
skipped
|
|
628
|
+
};
|
|
629
|
+
}
|
|
630
|
+
const MAX_CONVERT_RETRY_DEPTH = 8;
|
|
631
|
+
async function convertMarkdown(client, markdown) {
|
|
632
|
+
const res = await client.docx.document.convert({ data: {
|
|
633
|
+
content_type: "markdown",
|
|
634
|
+
content: markdown
|
|
635
|
+
} });
|
|
636
|
+
if (res.code !== 0) throw new Error(res.msg);
|
|
637
|
+
return {
|
|
638
|
+
blocks: res.data?.blocks ?? [],
|
|
639
|
+
firstLevelBlockIds: res.data?.first_level_block_ids ?? []
|
|
640
|
+
};
|
|
641
|
+
}
|
|
642
|
+
function normalizeChildIds(children) {
|
|
643
|
+
if (Array.isArray(children)) return children.filter((child) => typeof child === "string");
|
|
644
|
+
if (typeof children === "string") return [children];
|
|
645
|
+
return [];
|
|
646
|
+
}
|
|
647
|
+
function toCreateChildBlock(block) {
|
|
648
|
+
return block;
|
|
649
|
+
}
|
|
650
|
+
function toDescendantBlock(block) {
|
|
651
|
+
const children = normalizeChildIds(block.children);
|
|
652
|
+
return {
|
|
653
|
+
...block.block_id ? { block_id: block.block_id } : {},
|
|
654
|
+
...children.length > 0 ? { children } : {},
|
|
655
|
+
...block
|
|
656
|
+
};
|
|
657
|
+
}
|
|
658
|
+
function normalizeInsertedChildBlocks(children) {
|
|
659
|
+
if (!Array.isArray(children)) return [];
|
|
660
|
+
return children.filter((child) => typeof child === "object" && child !== null);
|
|
661
|
+
}
|
|
662
|
+
function normalizeConvertedBlockTree(blocks, firstLevelIds) {
|
|
663
|
+
if (blocks.length <= 1) return {
|
|
664
|
+
orderedBlocks: blocks,
|
|
665
|
+
rootIds: blocks.length === 1 && typeof blocks[0]?.block_id === "string" ? [blocks[0].block_id] : []
|
|
666
|
+
};
|
|
667
|
+
const byId = /* @__PURE__ */ new Map();
|
|
668
|
+
const originalOrder = /* @__PURE__ */ new Map();
|
|
669
|
+
for (const [index, block] of blocks.entries()) if (typeof block?.block_id === "string") {
|
|
670
|
+
byId.set(block.block_id, block);
|
|
671
|
+
originalOrder.set(block.block_id, index);
|
|
672
|
+
}
|
|
673
|
+
const childIds = /* @__PURE__ */ new Set();
|
|
674
|
+
for (const block of blocks) for (const childId of normalizeChildIds(block?.children)) childIds.add(childId);
|
|
675
|
+
const inferredTopLevelIds = blocks.filter((block) => {
|
|
676
|
+
const blockId = block?.block_id;
|
|
677
|
+
if (typeof blockId !== "string") return false;
|
|
678
|
+
const parentId = typeof block?.parent_id === "string" ? block.parent_id : "";
|
|
679
|
+
return !childIds.has(blockId) && (!parentId || !byId.has(parentId));
|
|
680
|
+
}).toSorted((a, b) => (originalOrder.get(a.block_id ?? "__missing__") ?? 0) - (originalOrder.get(b.block_id ?? "__missing__") ?? 0)).map((block) => block.block_id).filter((blockId) => typeof blockId === "string");
|
|
681
|
+
const rootIds = (firstLevelIds && firstLevelIds.length > 0 ? firstLevelIds : inferredTopLevelIds).filter((id, index, arr) => typeof id === "string" && byId.has(id) && arr.indexOf(id) === index);
|
|
682
|
+
const orderedBlocks = [];
|
|
683
|
+
const visited = /* @__PURE__ */ new Set();
|
|
684
|
+
const visit = (blockId) => {
|
|
685
|
+
if (!byId.has(blockId) || visited.has(blockId)) return;
|
|
686
|
+
visited.add(blockId);
|
|
687
|
+
const block = byId.get(blockId);
|
|
688
|
+
if (!block) return;
|
|
689
|
+
orderedBlocks.push(block);
|
|
690
|
+
for (const childId of normalizeChildIds(block?.children)) visit(childId);
|
|
691
|
+
};
|
|
692
|
+
for (const rootId of rootIds) visit(rootId);
|
|
693
|
+
for (const block of blocks) if (typeof block?.block_id === "string") visit(block.block_id);
|
|
694
|
+
else orderedBlocks.push(block);
|
|
695
|
+
return {
|
|
696
|
+
orderedBlocks,
|
|
697
|
+
rootIds: rootIds.filter((id) => typeof id === "string")
|
|
698
|
+
};
|
|
699
|
+
}
|
|
700
|
+
async function insertBlocks(client, docToken, blocks, parentBlockId, index) {
|
|
701
|
+
const { cleaned, skipped } = cleanBlocksForInsert(blocks);
|
|
702
|
+
const blockId = parentBlockId ?? docToken;
|
|
703
|
+
if (cleaned.length === 0) return {
|
|
704
|
+
children: [],
|
|
705
|
+
skipped
|
|
706
|
+
};
|
|
707
|
+
const allInserted = [];
|
|
708
|
+
for (const [offset, block] of cleaned.entries()) {
|
|
709
|
+
const res = await client.docx.documentBlockChildren.create({
|
|
710
|
+
path: {
|
|
711
|
+
document_id: docToken,
|
|
712
|
+
block_id: blockId
|
|
713
|
+
},
|
|
714
|
+
data: {
|
|
715
|
+
children: [toCreateChildBlock(block)],
|
|
716
|
+
...index !== void 0 ? { index: index + offset } : {}
|
|
717
|
+
}
|
|
718
|
+
});
|
|
719
|
+
if (res.code !== 0) throw new Error(res.msg);
|
|
720
|
+
allInserted.push(...res.data?.children ?? []);
|
|
721
|
+
}
|
|
722
|
+
return {
|
|
723
|
+
children: allInserted,
|
|
724
|
+
skipped
|
|
725
|
+
};
|
|
726
|
+
}
|
|
727
|
+
/** Split markdown into chunks at top-level headings (# or ##) to stay within API content limits */
|
|
728
|
+
function splitMarkdownByHeadings(markdown) {
|
|
729
|
+
const lines = markdown.split("\n");
|
|
730
|
+
const chunks = [];
|
|
731
|
+
let current = [];
|
|
732
|
+
let inFencedBlock = false;
|
|
733
|
+
for (const line of lines) {
|
|
734
|
+
if (/^(`{3,}|~{3,})/.test(line)) inFencedBlock = !inFencedBlock;
|
|
735
|
+
if (!inFencedBlock && /^#{1,2}\s/.test(line) && current.length > 0) {
|
|
736
|
+
chunks.push(current.join("\n"));
|
|
737
|
+
current = [];
|
|
738
|
+
}
|
|
739
|
+
current.push(line);
|
|
740
|
+
}
|
|
741
|
+
if (current.length > 0) chunks.push(current.join("\n"));
|
|
742
|
+
return chunks;
|
|
743
|
+
}
|
|
744
|
+
/** Split markdown by size, preferring to break outside fenced code blocks when possible */
|
|
745
|
+
function splitMarkdownBySize(markdown, maxChars) {
|
|
746
|
+
if (markdown.length <= maxChars) return [markdown];
|
|
747
|
+
const lines = markdown.split("\n");
|
|
748
|
+
const chunks = [];
|
|
749
|
+
let current = [];
|
|
750
|
+
let currentLength = 0;
|
|
751
|
+
let inFencedBlock = false;
|
|
752
|
+
for (const line of lines) {
|
|
753
|
+
if (/^(`{3,}|~{3,})/.test(line)) inFencedBlock = !inFencedBlock;
|
|
754
|
+
const lineLength = line.length + 1;
|
|
755
|
+
const wouldExceed = currentLength + lineLength > maxChars;
|
|
756
|
+
if (current.length > 0 && wouldExceed && !inFencedBlock) {
|
|
757
|
+
chunks.push(current.join("\n"));
|
|
758
|
+
current = [];
|
|
759
|
+
currentLength = 0;
|
|
760
|
+
}
|
|
761
|
+
current.push(line);
|
|
762
|
+
currentLength += lineLength;
|
|
763
|
+
}
|
|
764
|
+
if (current.length > 0) chunks.push(current.join("\n"));
|
|
765
|
+
if (chunks.length > 1) return chunks;
|
|
766
|
+
const midpoint = Math.floor(lines.length / 2);
|
|
767
|
+
if (midpoint <= 0 || midpoint >= lines.length) return [markdown];
|
|
768
|
+
return [lines.slice(0, midpoint).join("\n"), lines.slice(midpoint).join("\n")];
|
|
769
|
+
}
|
|
770
|
+
async function convertMarkdownWithFallback(client, markdown, depth = 0) {
|
|
771
|
+
try {
|
|
772
|
+
return await convertMarkdown(client, markdown);
|
|
773
|
+
} catch (error) {
|
|
774
|
+
if (depth >= MAX_CONVERT_RETRY_DEPTH || markdown.length < 2) throw error;
|
|
775
|
+
const chunks = splitMarkdownBySize(markdown, Math.max(256, Math.floor(markdown.length / 2)));
|
|
776
|
+
if (chunks.length <= 1) throw error;
|
|
777
|
+
const blocks = [];
|
|
778
|
+
const firstLevelBlockIds = [];
|
|
779
|
+
for (const chunk of chunks) {
|
|
780
|
+
const converted = await convertMarkdownWithFallback(client, chunk, depth + 1);
|
|
781
|
+
blocks.push(...converted.blocks);
|
|
782
|
+
firstLevelBlockIds.push(...converted.firstLevelBlockIds);
|
|
783
|
+
}
|
|
784
|
+
return {
|
|
785
|
+
blocks,
|
|
786
|
+
firstLevelBlockIds
|
|
787
|
+
};
|
|
788
|
+
}
|
|
789
|
+
}
|
|
790
|
+
/** Convert markdown in chunks to avoid document.convert content size limits */
|
|
791
|
+
async function chunkedConvertMarkdown(client, markdown) {
|
|
792
|
+
const chunks = splitMarkdownByHeadings(markdown);
|
|
793
|
+
const allBlocks = [];
|
|
794
|
+
const allRootIds = [];
|
|
795
|
+
for (const chunk of chunks) {
|
|
796
|
+
const { blocks, firstLevelBlockIds } = await convertMarkdownWithFallback(client, chunk);
|
|
797
|
+
const { orderedBlocks, rootIds } = normalizeConvertedBlockTree(blocks, firstLevelBlockIds);
|
|
798
|
+
allBlocks.push(...orderedBlocks);
|
|
799
|
+
allRootIds.push(...rootIds);
|
|
800
|
+
}
|
|
801
|
+
return {
|
|
802
|
+
blocks: allBlocks,
|
|
803
|
+
firstLevelBlockIds: allRootIds
|
|
804
|
+
};
|
|
805
|
+
}
|
|
806
|
+
/**
|
|
807
|
+
* Insert blocks using the Descendant API (supports tables, nested lists, large docs).
|
|
808
|
+
* Unlike the Children API, this supports block_type 31/32 (Table/TableCell).
|
|
809
|
+
*
|
|
810
|
+
* @param parentBlockId - Parent block to insert into (defaults to docToken = document root)
|
|
811
|
+
* @param index - Position within parent's children (-1 = end, 0 = first)
|
|
812
|
+
*/
|
|
813
|
+
async function insertBlocksWithDescendant(client, docToken, blocks, firstLevelBlockIds, { parentBlockId = docToken, index = -1 } = {}) {
|
|
814
|
+
const descendants = cleanBlocksForDescendant(blocks);
|
|
815
|
+
if (descendants.length === 0) return { children: [] };
|
|
816
|
+
const res = await client.docx.documentBlockDescendant.create({
|
|
817
|
+
path: {
|
|
818
|
+
document_id: docToken,
|
|
819
|
+
block_id: parentBlockId
|
|
820
|
+
},
|
|
821
|
+
data: {
|
|
822
|
+
children_id: firstLevelBlockIds,
|
|
823
|
+
descendants: descendants.map(toDescendantBlock),
|
|
824
|
+
index
|
|
825
|
+
}
|
|
826
|
+
});
|
|
827
|
+
if (res.code !== 0) throw new Error(`${res.msg} (code: ${res.code})`);
|
|
828
|
+
return { children: res.data?.children ?? [] };
|
|
829
|
+
}
|
|
830
|
+
async function clearDocumentContent(client, docToken) {
|
|
831
|
+
const existing = await client.docx.documentBlock.list({ path: { document_id: docToken } });
|
|
832
|
+
if (existing.code !== 0) throw new Error(existing.msg);
|
|
833
|
+
const childIds = existing.data?.items?.filter((b) => b.parent_id === docToken && b.block_type !== 1).map((b) => b.block_id) ?? [];
|
|
834
|
+
if (childIds.length > 0) {
|
|
835
|
+
const res = await client.docx.documentBlockChildren.batchDelete({
|
|
836
|
+
path: {
|
|
837
|
+
document_id: docToken,
|
|
838
|
+
block_id: docToken
|
|
839
|
+
},
|
|
840
|
+
data: {
|
|
841
|
+
start_index: 0,
|
|
842
|
+
end_index: childIds.length
|
|
843
|
+
}
|
|
844
|
+
});
|
|
845
|
+
if (res.code !== 0) throw new Error(res.msg);
|
|
846
|
+
}
|
|
847
|
+
return childIds.length;
|
|
848
|
+
}
|
|
849
|
+
async function uploadImageToDocx(client, blockId, imageBuffer, fileName, docToken) {
|
|
850
|
+
const fileToken = (await client.drive.media.uploadAll({ data: {
|
|
851
|
+
file_name: fileName,
|
|
852
|
+
parent_type: "docx_image",
|
|
853
|
+
parent_node: blockId,
|
|
854
|
+
size: imageBuffer.length,
|
|
855
|
+
file: imageBuffer,
|
|
856
|
+
...docToken ? { extra: JSON.stringify({ drive_route_token: docToken }) } : {}
|
|
857
|
+
} }))?.file_token;
|
|
858
|
+
if (!fileToken) throw new Error("Image upload failed: no file_token returned");
|
|
859
|
+
return fileToken;
|
|
860
|
+
}
|
|
861
|
+
async function downloadImage(url, maxBytes) {
|
|
862
|
+
return (await getFeishuRuntime().channel.media.fetchRemoteMedia({
|
|
863
|
+
url,
|
|
864
|
+
maxBytes
|
|
865
|
+
})).buffer;
|
|
866
|
+
}
|
|
867
|
+
async function resolveUploadInput(url, filePath, maxBytes, localRoots, explicitFileName, imageInput) {
|
|
868
|
+
const inputSources = [
|
|
869
|
+
url ? "url" : null,
|
|
870
|
+
filePath ? "file_path" : null,
|
|
871
|
+
imageInput ? "image" : null
|
|
872
|
+
].filter(Boolean);
|
|
873
|
+
if (inputSources.length > 1) throw new Error(`Provide only one image source; got: ${inputSources.join(", ")}`);
|
|
874
|
+
if (imageInput?.startsWith("data:")) {
|
|
875
|
+
const commaIdx = imageInput.indexOf(",");
|
|
876
|
+
if (commaIdx === -1) throw new Error("Invalid data URI: missing comma separator.");
|
|
877
|
+
const header = imageInput.slice(0, commaIdx);
|
|
878
|
+
const data = imageInput.slice(commaIdx + 1);
|
|
879
|
+
if (!header.includes(";base64")) throw new Error("Invalid data URI: missing ';base64' marker. Expected format: data:image/png;base64,<base64data>");
|
|
880
|
+
const trimmedData = data.trim();
|
|
881
|
+
if (trimmedData.length === 0 || !/^[A-Za-z0-9+/]+=*$/.test(trimmedData)) throw new Error(`Invalid data URI: base64 payload contains characters outside the standard alphabet.`);
|
|
882
|
+
const ext = header.match(/data:([^;]+)/)?.[1]?.split("/")[1] ?? "png";
|
|
883
|
+
const estimatedBytes = Math.ceil(trimmedData.length * 3 / 4);
|
|
884
|
+
if (estimatedBytes > maxBytes) throw new Error(`Image data URI exceeds limit: estimated ${estimatedBytes} bytes > ${maxBytes} bytes`);
|
|
885
|
+
return {
|
|
886
|
+
buffer: Buffer.from(trimmedData, "base64"),
|
|
887
|
+
fileName: explicitFileName ?? `image.${ext}`
|
|
888
|
+
};
|
|
889
|
+
}
|
|
890
|
+
if (imageInput) {
|
|
891
|
+
const candidate = imageInput.startsWith("~") ? imageInput.replace(/^~/, homedir()) : imageInput;
|
|
892
|
+
const unambiguousPath = imageInput.startsWith("~") || imageInput.startsWith("./") || imageInput.startsWith("../");
|
|
893
|
+
const absolutePath = isAbsolute(imageInput);
|
|
894
|
+
if (unambiguousPath || absolutePath && existsSync(candidate)) {
|
|
895
|
+
const resolvedPath = resolve(candidate);
|
|
896
|
+
return {
|
|
897
|
+
buffer: (await getFeishuRuntime().media.loadWebMedia(resolvedPath, {
|
|
898
|
+
maxBytes,
|
|
899
|
+
optimizeImages: false,
|
|
900
|
+
localRoots
|
|
901
|
+
})).buffer,
|
|
902
|
+
fileName: explicitFileName ?? basename(candidate)
|
|
903
|
+
};
|
|
904
|
+
}
|
|
905
|
+
if (absolutePath && !existsSync(candidate)) throw new Error(`File not found: "${candidate}". If you intended to pass image binary data, use a data URI instead: data:image/jpeg;base64,...`);
|
|
906
|
+
}
|
|
907
|
+
if (imageInput) {
|
|
908
|
+
const trimmed = imageInput.trim();
|
|
909
|
+
if (trimmed.length === 0 || !/^[A-Za-z0-9+/]+=*$/.test(trimmed)) throw new Error("Invalid base64: image input contains characters outside the standard base64 alphabet. Use a data URI (data:image/png;base64,...) or a local file path instead.");
|
|
910
|
+
const estimatedBytes = Math.ceil(trimmed.length * 3 / 4);
|
|
911
|
+
if (estimatedBytes > maxBytes) throw new Error(`Base64 image exceeds limit: estimated ${estimatedBytes} bytes > ${maxBytes} bytes`);
|
|
912
|
+
const buffer = Buffer.from(trimmed, "base64");
|
|
913
|
+
if (buffer.length === 0) throw new Error("Base64 image decoded to empty buffer; check the input.");
|
|
914
|
+
return {
|
|
915
|
+
buffer,
|
|
916
|
+
fileName: explicitFileName ?? "image.png"
|
|
917
|
+
};
|
|
918
|
+
}
|
|
919
|
+
if (!url && !filePath) throw new Error("Either url, file_path, or image (base64/data URI) must be provided");
|
|
920
|
+
if (url && filePath) throw new Error("Provide only one of url or file_path");
|
|
921
|
+
if (url) {
|
|
922
|
+
const fetched = await getFeishuRuntime().channel.media.fetchRemoteMedia({
|
|
923
|
+
url,
|
|
924
|
+
maxBytes
|
|
925
|
+
});
|
|
926
|
+
const guessed = new URL(url).pathname.split("/").pop() || "upload.bin";
|
|
927
|
+
return {
|
|
928
|
+
buffer: fetched.buffer,
|
|
929
|
+
fileName: explicitFileName || guessed
|
|
930
|
+
};
|
|
931
|
+
}
|
|
932
|
+
const resolvedFilePath = resolve(filePath);
|
|
933
|
+
return {
|
|
934
|
+
buffer: (await getFeishuRuntime().media.loadWebMedia(resolvedFilePath, {
|
|
935
|
+
maxBytes,
|
|
936
|
+
optimizeImages: false,
|
|
937
|
+
localRoots
|
|
938
|
+
})).buffer,
|
|
939
|
+
fileName: explicitFileName || basename(filePath)
|
|
940
|
+
};
|
|
941
|
+
}
|
|
942
|
+
async function processImages(client, docToken, markdown, insertedBlocks, maxBytes) {
|
|
943
|
+
const imageUrls = extractImageUrls(markdown);
|
|
944
|
+
if (imageUrls.length === 0) return 0;
|
|
945
|
+
const imageBlocks = insertedBlocks.filter((b) => b.block_type === 27);
|
|
946
|
+
let processed = 0;
|
|
947
|
+
for (let i = 0; i < Math.min(imageUrls.length, imageBlocks.length); i++) {
|
|
948
|
+
const url = imageUrls[i];
|
|
949
|
+
const blockId = imageBlocks[i]?.block_id;
|
|
950
|
+
if (!blockId) continue;
|
|
951
|
+
try {
|
|
952
|
+
const fileToken = await uploadImageToDocx(client, blockId, await downloadImage(url, maxBytes), new URL(url).pathname.split("/").pop() || `image_${i}.png`, docToken);
|
|
953
|
+
await client.docx.documentBlock.patch({
|
|
954
|
+
path: {
|
|
955
|
+
document_id: docToken,
|
|
956
|
+
block_id: blockId
|
|
957
|
+
},
|
|
958
|
+
data: { replace_image: { token: fileToken } }
|
|
959
|
+
});
|
|
960
|
+
processed++;
|
|
961
|
+
} catch (err) {
|
|
962
|
+
console.error(`Failed to process image ${url}:`, err);
|
|
963
|
+
}
|
|
964
|
+
}
|
|
965
|
+
return processed;
|
|
966
|
+
}
|
|
967
|
+
async function uploadImageBlock(client, docToken, maxBytes, localRoots, url, filePath, parentBlockId, filename, index, imageInput) {
|
|
968
|
+
const insertRes = await client.docx.documentBlockChildren.create({
|
|
969
|
+
path: {
|
|
970
|
+
document_id: docToken,
|
|
971
|
+
block_id: parentBlockId ?? docToken
|
|
972
|
+
},
|
|
973
|
+
params: { document_revision_id: -1 },
|
|
974
|
+
data: {
|
|
975
|
+
children: [{
|
|
976
|
+
block_type: 27,
|
|
977
|
+
image: {}
|
|
978
|
+
}],
|
|
979
|
+
index: index ?? -1
|
|
980
|
+
}
|
|
981
|
+
});
|
|
982
|
+
if (insertRes.code !== 0) throw new Error(`Failed to create image block: ${insertRes.msg}`);
|
|
983
|
+
const imageBlockId = insertRes.data?.children?.find((b) => b.block_type === 27)?.block_id;
|
|
984
|
+
if (!imageBlockId) throw new Error("Failed to create image block");
|
|
985
|
+
const upload = await resolveUploadInput(url, filePath, maxBytes, localRoots, filename, imageInput);
|
|
986
|
+
const fileToken = await uploadImageToDocx(client, imageBlockId, upload.buffer, upload.fileName, docToken);
|
|
987
|
+
const patchRes = await client.docx.documentBlock.patch({
|
|
988
|
+
path: {
|
|
989
|
+
document_id: docToken,
|
|
990
|
+
block_id: imageBlockId
|
|
991
|
+
},
|
|
992
|
+
data: { replace_image: { token: fileToken } }
|
|
993
|
+
});
|
|
994
|
+
if (patchRes.code !== 0) throw new Error(patchRes.msg);
|
|
995
|
+
return {
|
|
996
|
+
success: true,
|
|
997
|
+
block_id: imageBlockId,
|
|
998
|
+
file_token: fileToken,
|
|
999
|
+
file_name: upload.fileName,
|
|
1000
|
+
size: upload.buffer.length
|
|
1001
|
+
};
|
|
1002
|
+
}
|
|
1003
|
+
async function uploadFileBlock(client, docToken, maxBytes, localRoots, url, filePath, parentBlockId, filename) {
|
|
1004
|
+
const blockId = parentBlockId ?? docToken;
|
|
1005
|
+
const upload = await resolveUploadInput(url, filePath, maxBytes, localRoots, filename);
|
|
1006
|
+
const converted = await convertMarkdown(client, `[${upload.fileName}](https://example.com/placeholder)`);
|
|
1007
|
+
const { orderedBlocks } = normalizeConvertedBlockTree(converted.blocks, converted.firstLevelBlockIds);
|
|
1008
|
+
const { children: inserted } = await insertBlocks(client, docToken, orderedBlocks, blockId);
|
|
1009
|
+
const placeholderBlock = inserted[0];
|
|
1010
|
+
if (!placeholderBlock?.block_id) throw new Error("Failed to create placeholder block for file upload");
|
|
1011
|
+
const parentId = placeholderBlock.parent_id ?? blockId;
|
|
1012
|
+
const childrenRes = await client.docx.documentBlockChildren.get({ path: {
|
|
1013
|
+
document_id: docToken,
|
|
1014
|
+
block_id: parentId
|
|
1015
|
+
} });
|
|
1016
|
+
if (childrenRes.code !== 0) throw new Error(childrenRes.msg);
|
|
1017
|
+
const placeholderIdx = (childrenRes.data?.items ?? []).findIndex((item) => item.block_id === placeholderBlock.block_id);
|
|
1018
|
+
if (placeholderIdx >= 0) {
|
|
1019
|
+
const deleteRes = await client.docx.documentBlockChildren.batchDelete({
|
|
1020
|
+
path: {
|
|
1021
|
+
document_id: docToken,
|
|
1022
|
+
block_id: parentId
|
|
1023
|
+
},
|
|
1024
|
+
data: {
|
|
1025
|
+
start_index: placeholderIdx,
|
|
1026
|
+
end_index: placeholderIdx + 1
|
|
1027
|
+
}
|
|
1028
|
+
});
|
|
1029
|
+
if (deleteRes.code !== 0) throw new Error(deleteRes.msg);
|
|
1030
|
+
}
|
|
1031
|
+
const fileToken = (await client.drive.media.uploadAll({ data: {
|
|
1032
|
+
file_name: upload.fileName,
|
|
1033
|
+
parent_type: "docx_file",
|
|
1034
|
+
parent_node: docToken,
|
|
1035
|
+
size: upload.buffer.length,
|
|
1036
|
+
file: upload.buffer
|
|
1037
|
+
} }))?.file_token;
|
|
1038
|
+
if (!fileToken) throw new Error("File upload failed: no file_token returned");
|
|
1039
|
+
return {
|
|
1040
|
+
success: true,
|
|
1041
|
+
file_token: fileToken,
|
|
1042
|
+
file_name: upload.fileName,
|
|
1043
|
+
size: upload.buffer.length,
|
|
1044
|
+
note: "File uploaded to drive. Use the file_token to reference it. Direct file block creation is not supported by the Feishu API."
|
|
1045
|
+
};
|
|
1046
|
+
}
|
|
1047
|
+
const STRUCTURED_BLOCK_TYPES = new Set([
|
|
1048
|
+
14,
|
|
1049
|
+
18,
|
|
1050
|
+
21,
|
|
1051
|
+
23,
|
|
1052
|
+
27,
|
|
1053
|
+
30,
|
|
1054
|
+
31,
|
|
1055
|
+
32
|
|
1056
|
+
]);
|
|
1057
|
+
async function readDoc(client, docToken) {
|
|
1058
|
+
const [contentRes, infoRes, blocksRes] = await Promise.all([
|
|
1059
|
+
client.docx.document.rawContent({ path: { document_id: docToken } }),
|
|
1060
|
+
client.docx.document.get({ path: { document_id: docToken } }),
|
|
1061
|
+
client.docx.documentBlock.list({ path: { document_id: docToken } })
|
|
1062
|
+
]);
|
|
1063
|
+
if (contentRes.code !== 0) throw new Error(contentRes.msg);
|
|
1064
|
+
const blocks = blocksRes.data?.items ?? [];
|
|
1065
|
+
const blockCounts = {};
|
|
1066
|
+
const structuredTypes = [];
|
|
1067
|
+
for (const b of blocks) {
|
|
1068
|
+
const type = b.block_type ?? 0;
|
|
1069
|
+
const name = BLOCK_TYPE_NAMES[type] || `type_${type}`;
|
|
1070
|
+
blockCounts[name] = (blockCounts[name] || 0) + 1;
|
|
1071
|
+
if (STRUCTURED_BLOCK_TYPES.has(type) && !structuredTypes.includes(name)) structuredTypes.push(name);
|
|
1072
|
+
}
|
|
1073
|
+
let hint;
|
|
1074
|
+
if (structuredTypes.length > 0) hint = `This document contains ${structuredTypes.join(", ")} which are NOT included in the plain text above. Use feishu_doc with action: "list_blocks" to get full content.`;
|
|
1075
|
+
return {
|
|
1076
|
+
title: infoRes.data?.document?.title,
|
|
1077
|
+
content: contentRes.data?.content,
|
|
1078
|
+
revision_id: infoRes.data?.document?.revision_id,
|
|
1079
|
+
block_count: blocks.length,
|
|
1080
|
+
block_types: blockCounts,
|
|
1081
|
+
...hint && { hint }
|
|
1082
|
+
};
|
|
1083
|
+
}
|
|
1084
|
+
async function createDoc(client, title, folderToken, options) {
|
|
1085
|
+
const res = await client.docx.document.create({ data: {
|
|
1086
|
+
title,
|
|
1087
|
+
folder_token: folderToken
|
|
1088
|
+
} });
|
|
1089
|
+
if (res.code !== 0) throw new Error(res.msg);
|
|
1090
|
+
const doc = res.data?.document;
|
|
1091
|
+
const docToken = doc?.document_id;
|
|
1092
|
+
if (!docToken) throw new Error("Document creation succeeded but no document_id was returned");
|
|
1093
|
+
const shouldGrantToRequester = options?.grantToRequester !== false;
|
|
1094
|
+
const requesterOpenId = options?.requesterOpenId?.trim();
|
|
1095
|
+
const requesterPermType = "edit";
|
|
1096
|
+
let requesterPermissionAdded = false;
|
|
1097
|
+
let requesterPermissionSkippedReason;
|
|
1098
|
+
let requesterPermissionError;
|
|
1099
|
+
if (shouldGrantToRequester) if (!requesterOpenId) requesterPermissionSkippedReason = "trusted requester identity unavailable";
|
|
1100
|
+
else try {
|
|
1101
|
+
await client.drive.permissionMember.create({
|
|
1102
|
+
path: { token: docToken },
|
|
1103
|
+
params: {
|
|
1104
|
+
type: "docx",
|
|
1105
|
+
need_notification: false
|
|
1106
|
+
},
|
|
1107
|
+
data: {
|
|
1108
|
+
member_type: "openid",
|
|
1109
|
+
member_id: requesterOpenId,
|
|
1110
|
+
perm: requesterPermType
|
|
1111
|
+
}
|
|
1112
|
+
});
|
|
1113
|
+
requesterPermissionAdded = true;
|
|
1114
|
+
} catch (err) {
|
|
1115
|
+
requesterPermissionError = formatErrorMessage(err);
|
|
1116
|
+
}
|
|
1117
|
+
return {
|
|
1118
|
+
document_id: docToken,
|
|
1119
|
+
title: doc?.title,
|
|
1120
|
+
url: `https://feishu.cn/docx/${docToken}`,
|
|
1121
|
+
...shouldGrantToRequester && {
|
|
1122
|
+
requester_permission_added: requesterPermissionAdded,
|
|
1123
|
+
...requesterOpenId && { requester_open_id: requesterOpenId },
|
|
1124
|
+
requester_perm_type: requesterPermType,
|
|
1125
|
+
...requesterPermissionSkippedReason && { requester_permission_skipped_reason: requesterPermissionSkippedReason },
|
|
1126
|
+
...requesterPermissionError && { requester_permission_error: requesterPermissionError }
|
|
1127
|
+
}
|
|
1128
|
+
};
|
|
1129
|
+
}
|
|
1130
|
+
async function writeDoc(client, docToken, markdown, maxBytes, logger) {
|
|
1131
|
+
const deleted = await clearDocumentContent(client, docToken);
|
|
1132
|
+
logger?.info?.("feishu_doc: Converting markdown...");
|
|
1133
|
+
const { blocks, firstLevelBlockIds } = await chunkedConvertMarkdown(client, markdown);
|
|
1134
|
+
if (blocks.length === 0) return {
|
|
1135
|
+
success: true,
|
|
1136
|
+
blocks_deleted: deleted,
|
|
1137
|
+
blocks_added: 0,
|
|
1138
|
+
images_processed: 0
|
|
1139
|
+
};
|
|
1140
|
+
logger?.info?.(`feishu_doc: Converted to ${blocks.length} blocks, inserting...`);
|
|
1141
|
+
const { orderedBlocks, rootIds } = normalizeConvertedBlockTree(blocks, firstLevelBlockIds);
|
|
1142
|
+
const { children: inserted } = blocks.length > 1e3 ? await insertBlocksInBatches(client, docToken, orderedBlocks, rootIds, logger) : await insertBlocksWithDescendant(client, docToken, orderedBlocks, rootIds);
|
|
1143
|
+
const imagesProcessed = await processImages(client, docToken, markdown, inserted, maxBytes);
|
|
1144
|
+
logger?.info?.(`feishu_doc: Done (${blocks.length} blocks, ${imagesProcessed} images)`);
|
|
1145
|
+
return {
|
|
1146
|
+
success: true,
|
|
1147
|
+
blocks_deleted: deleted,
|
|
1148
|
+
blocks_added: blocks.length,
|
|
1149
|
+
images_processed: imagesProcessed
|
|
1150
|
+
};
|
|
1151
|
+
}
|
|
1152
|
+
async function appendDoc(client, docToken, markdown, maxBytes, logger) {
|
|
1153
|
+
logger?.info?.("feishu_doc: Converting markdown...");
|
|
1154
|
+
const { blocks, firstLevelBlockIds } = await chunkedConvertMarkdown(client, markdown);
|
|
1155
|
+
if (blocks.length === 0) throw new Error("Content is empty");
|
|
1156
|
+
logger?.info?.(`feishu_doc: Converted to ${blocks.length} blocks, inserting...`);
|
|
1157
|
+
const { orderedBlocks, rootIds } = normalizeConvertedBlockTree(blocks, firstLevelBlockIds);
|
|
1158
|
+
const { children: inserted } = blocks.length > 1e3 ? await insertBlocksInBatches(client, docToken, orderedBlocks, rootIds, logger) : await insertBlocksWithDescendant(client, docToken, orderedBlocks, rootIds);
|
|
1159
|
+
const imagesProcessed = await processImages(client, docToken, markdown, inserted, maxBytes);
|
|
1160
|
+
logger?.info?.(`feishu_doc: Done (${blocks.length} blocks, ${imagesProcessed} images)`);
|
|
1161
|
+
return {
|
|
1162
|
+
success: true,
|
|
1163
|
+
blocks_added: blocks.length,
|
|
1164
|
+
images_processed: imagesProcessed,
|
|
1165
|
+
block_ids: inserted.map((b) => b.block_id)
|
|
1166
|
+
};
|
|
1167
|
+
}
|
|
1168
|
+
async function insertDoc(client, docToken, markdown, afterBlockId, maxBytes, logger) {
|
|
1169
|
+
const blockInfo = await client.docx.documentBlock.get({ path: {
|
|
1170
|
+
document_id: docToken,
|
|
1171
|
+
block_id: afterBlockId
|
|
1172
|
+
} });
|
|
1173
|
+
if (blockInfo.code !== 0) throw new Error(blockInfo.msg);
|
|
1174
|
+
const parentId = blockInfo.data?.block?.parent_id ?? docToken;
|
|
1175
|
+
const items = [];
|
|
1176
|
+
let pageToken;
|
|
1177
|
+
do {
|
|
1178
|
+
const childrenRes = await client.docx.documentBlockChildren.get({
|
|
1179
|
+
path: {
|
|
1180
|
+
document_id: docToken,
|
|
1181
|
+
block_id: parentId
|
|
1182
|
+
},
|
|
1183
|
+
params: pageToken ? { page_token: pageToken } : {}
|
|
1184
|
+
});
|
|
1185
|
+
if (childrenRes.code !== 0) throw new Error(childrenRes.msg);
|
|
1186
|
+
items.push(...childrenRes.data?.items ?? []);
|
|
1187
|
+
pageToken = childrenRes.data?.page_token ?? void 0;
|
|
1188
|
+
} while (pageToken);
|
|
1189
|
+
const blockIndex = items.findIndex((item) => item.block_id === afterBlockId);
|
|
1190
|
+
if (blockIndex === -1) throw new Error(`after_block_id "${afterBlockId}" was not found among the children of parent block "${parentId}". Use list_blocks to verify the block ID.`);
|
|
1191
|
+
const insertIndex = blockIndex + 1;
|
|
1192
|
+
logger?.info?.("feishu_doc: Converting markdown...");
|
|
1193
|
+
const { blocks, firstLevelBlockIds } = await chunkedConvertMarkdown(client, markdown);
|
|
1194
|
+
if (blocks.length === 0) throw new Error("Content is empty");
|
|
1195
|
+
const { orderedBlocks, rootIds } = normalizeConvertedBlockTree(blocks, firstLevelBlockIds);
|
|
1196
|
+
logger?.info?.(`feishu_doc: Converted to ${blocks.length} blocks, inserting at index ${insertIndex}...`);
|
|
1197
|
+
const { children: inserted } = blocks.length > 1e3 ? await insertBlocksInBatches(client, docToken, orderedBlocks, rootIds, logger, parentId, insertIndex) : await insertBlocksWithDescendant(client, docToken, orderedBlocks, rootIds, {
|
|
1198
|
+
parentBlockId: parentId,
|
|
1199
|
+
index: insertIndex
|
|
1200
|
+
});
|
|
1201
|
+
const imagesProcessed = await processImages(client, docToken, markdown, inserted, maxBytes);
|
|
1202
|
+
logger?.info?.(`feishu_doc: Done (${blocks.length} blocks, ${imagesProcessed} images)`);
|
|
1203
|
+
return {
|
|
1204
|
+
success: true,
|
|
1205
|
+
blocks_added: blocks.length,
|
|
1206
|
+
images_processed: imagesProcessed,
|
|
1207
|
+
block_ids: inserted.map((b) => b.block_id)
|
|
1208
|
+
};
|
|
1209
|
+
}
|
|
1210
|
+
async function createTable(client, docToken, rowSize, columnSize, parentBlockId, columnWidth) {
|
|
1211
|
+
if (columnWidth && columnWidth.length !== columnSize) throw new Error("column_width length must equal column_size");
|
|
1212
|
+
const blockId = parentBlockId ?? docToken;
|
|
1213
|
+
const res = await client.docx.documentBlockChildren.create({
|
|
1214
|
+
path: {
|
|
1215
|
+
document_id: docToken,
|
|
1216
|
+
block_id: blockId
|
|
1217
|
+
},
|
|
1218
|
+
data: { children: [{
|
|
1219
|
+
block_type: 31,
|
|
1220
|
+
table: { property: {
|
|
1221
|
+
row_size: rowSize,
|
|
1222
|
+
column_size: columnSize,
|
|
1223
|
+
...columnWidth && columnWidth.length > 0 ? { column_width: columnWidth } : {}
|
|
1224
|
+
} }
|
|
1225
|
+
}] }
|
|
1226
|
+
});
|
|
1227
|
+
if (res.code !== 0) throw new Error(res.msg);
|
|
1228
|
+
const tableBlock = res.data?.children?.find((b) => b.block_type === 31);
|
|
1229
|
+
const cells = normalizeInsertedChildBlocks(tableBlock?.children);
|
|
1230
|
+
return {
|
|
1231
|
+
success: true,
|
|
1232
|
+
table_block_id: tableBlock?.block_id,
|
|
1233
|
+
row_size: rowSize,
|
|
1234
|
+
column_size: columnSize,
|
|
1235
|
+
table_cell_block_ids: cells.map((c) => c.block_id).filter(Boolean),
|
|
1236
|
+
raw_children_count: res.data?.children?.length ?? 0
|
|
1237
|
+
};
|
|
1238
|
+
}
|
|
1239
|
+
async function writeTableCells(client, docToken, tableBlockId, values) {
|
|
1240
|
+
if (!values.length || !values[0]?.length) throw new Error("values must be a non-empty 2D array");
|
|
1241
|
+
const tableRes = await client.docx.documentBlock.get({ path: {
|
|
1242
|
+
document_id: docToken,
|
|
1243
|
+
block_id: tableBlockId
|
|
1244
|
+
} });
|
|
1245
|
+
if (tableRes.code !== 0) throw new Error(tableRes.msg);
|
|
1246
|
+
const tableBlock = tableRes.data?.block;
|
|
1247
|
+
if (tableBlock?.block_type !== 31) throw new Error("table_block_id is not a table block");
|
|
1248
|
+
const tableData = tableBlock.table;
|
|
1249
|
+
const rows = tableData?.property?.row_size;
|
|
1250
|
+
const cols = tableData?.property?.column_size;
|
|
1251
|
+
const cellIds = tableData?.cells ?? [];
|
|
1252
|
+
if (!rows || !cols || !cellIds.length) throw new Error("Table cell IDs unavailable from table block. Use list_blocks/get_block and pass explicit cell block IDs if needed.");
|
|
1253
|
+
const writeRows = Math.min(values.length, rows);
|
|
1254
|
+
let written = 0;
|
|
1255
|
+
for (let r = 0; r < writeRows; r++) {
|
|
1256
|
+
const rowValues = values[r] ?? [];
|
|
1257
|
+
const writeCols = Math.min(rowValues.length, cols);
|
|
1258
|
+
for (let c = 0; c < writeCols; c++) {
|
|
1259
|
+
const cellId = cellIds[r * cols + c];
|
|
1260
|
+
if (!cellId) continue;
|
|
1261
|
+
const childrenRes = await client.docx.documentBlockChildren.get({ path: {
|
|
1262
|
+
document_id: docToken,
|
|
1263
|
+
block_id: cellId
|
|
1264
|
+
} });
|
|
1265
|
+
if (childrenRes.code !== 0) throw new Error(childrenRes.msg);
|
|
1266
|
+
const existingChildren = childrenRes.data?.items ?? [];
|
|
1267
|
+
if (existingChildren.length > 0) {
|
|
1268
|
+
const delRes = await client.docx.documentBlockChildren.batchDelete({
|
|
1269
|
+
path: {
|
|
1270
|
+
document_id: docToken,
|
|
1271
|
+
block_id: cellId
|
|
1272
|
+
},
|
|
1273
|
+
data: {
|
|
1274
|
+
start_index: 0,
|
|
1275
|
+
end_index: existingChildren.length
|
|
1276
|
+
}
|
|
1277
|
+
});
|
|
1278
|
+
if (delRes.code !== 0) throw new Error(delRes.msg);
|
|
1279
|
+
}
|
|
1280
|
+
const converted = await convertMarkdown(client, rowValues[c] ?? "");
|
|
1281
|
+
const { orderedBlocks } = normalizeConvertedBlockTree(converted.blocks, converted.firstLevelBlockIds);
|
|
1282
|
+
if (orderedBlocks.length > 0) await insertBlocks(client, docToken, orderedBlocks, cellId);
|
|
1283
|
+
written++;
|
|
1284
|
+
}
|
|
1285
|
+
}
|
|
1286
|
+
return {
|
|
1287
|
+
success: true,
|
|
1288
|
+
table_block_id: tableBlockId,
|
|
1289
|
+
cells_written: written,
|
|
1290
|
+
table_size: {
|
|
1291
|
+
rows,
|
|
1292
|
+
cols
|
|
1293
|
+
}
|
|
1294
|
+
};
|
|
1295
|
+
}
|
|
1296
|
+
async function createTableWithValues(client, docToken, rowSize, columnSize, values, parentBlockId, columnWidth) {
|
|
1297
|
+
const tableBlockId = (await createTable(client, docToken, rowSize, columnSize, parentBlockId, columnWidth)).table_block_id;
|
|
1298
|
+
if (!tableBlockId) throw new Error("create_table succeeded but table_block_id is missing");
|
|
1299
|
+
return {
|
|
1300
|
+
success: true,
|
|
1301
|
+
table_block_id: tableBlockId,
|
|
1302
|
+
row_size: rowSize,
|
|
1303
|
+
column_size: columnSize,
|
|
1304
|
+
cells_written: (await writeTableCells(client, docToken, tableBlockId, values)).cells_written
|
|
1305
|
+
};
|
|
1306
|
+
}
|
|
1307
|
+
async function updateBlock(client, docToken, blockId, content) {
|
|
1308
|
+
const blockInfo = await client.docx.documentBlock.get({ path: {
|
|
1309
|
+
document_id: docToken,
|
|
1310
|
+
block_id: blockId
|
|
1311
|
+
} });
|
|
1312
|
+
if (blockInfo.code !== 0) throw new Error(blockInfo.msg);
|
|
1313
|
+
const res = await client.docx.documentBlock.patch({
|
|
1314
|
+
path: {
|
|
1315
|
+
document_id: docToken,
|
|
1316
|
+
block_id: blockId
|
|
1317
|
+
},
|
|
1318
|
+
data: { update_text_elements: { elements: [{ text_run: { content } }] } }
|
|
1319
|
+
});
|
|
1320
|
+
if (res.code !== 0) throw new Error(res.msg);
|
|
1321
|
+
return {
|
|
1322
|
+
success: true,
|
|
1323
|
+
block_id: blockId
|
|
1324
|
+
};
|
|
1325
|
+
}
|
|
1326
|
+
async function deleteBlock(client, docToken, blockId) {
|
|
1327
|
+
const blockInfo = await client.docx.documentBlock.get({ path: {
|
|
1328
|
+
document_id: docToken,
|
|
1329
|
+
block_id: blockId
|
|
1330
|
+
} });
|
|
1331
|
+
if (blockInfo.code !== 0) throw new Error(blockInfo.msg);
|
|
1332
|
+
const parentId = blockInfo.data?.block?.parent_id ?? docToken;
|
|
1333
|
+
const children = await client.docx.documentBlockChildren.get({ path: {
|
|
1334
|
+
document_id: docToken,
|
|
1335
|
+
block_id: parentId
|
|
1336
|
+
} });
|
|
1337
|
+
if (children.code !== 0) throw new Error(children.msg);
|
|
1338
|
+
const index = (children.data?.items ?? []).findIndex((item) => item.block_id === blockId);
|
|
1339
|
+
if (index === -1) throw new Error("Block not found");
|
|
1340
|
+
const res = await client.docx.documentBlockChildren.batchDelete({
|
|
1341
|
+
path: {
|
|
1342
|
+
document_id: docToken,
|
|
1343
|
+
block_id: parentId
|
|
1344
|
+
},
|
|
1345
|
+
data: {
|
|
1346
|
+
start_index: index,
|
|
1347
|
+
end_index: index + 1
|
|
1348
|
+
}
|
|
1349
|
+
});
|
|
1350
|
+
if (res.code !== 0) throw new Error(res.msg);
|
|
1351
|
+
return {
|
|
1352
|
+
success: true,
|
|
1353
|
+
deleted_block_id: blockId
|
|
1354
|
+
};
|
|
1355
|
+
}
|
|
1356
|
+
async function listBlocks(client, docToken) {
|
|
1357
|
+
const res = await client.docx.documentBlock.list({ path: { document_id: docToken } });
|
|
1358
|
+
if (res.code !== 0) throw new Error(res.msg);
|
|
1359
|
+
return { blocks: res.data?.items ?? [] };
|
|
1360
|
+
}
|
|
1361
|
+
async function getBlock(client, docToken, blockId) {
|
|
1362
|
+
const res = await client.docx.documentBlock.get({ path: {
|
|
1363
|
+
document_id: docToken,
|
|
1364
|
+
block_id: blockId
|
|
1365
|
+
} });
|
|
1366
|
+
if (res.code !== 0) throw new Error(res.msg);
|
|
1367
|
+
return { block: res.data?.block };
|
|
1368
|
+
}
|
|
1369
|
+
async function listAppScopes(client) {
|
|
1370
|
+
const res = await client.application.scope.list({});
|
|
1371
|
+
if (res.code !== 0) throw new Error(res.msg);
|
|
1372
|
+
const scopes = res.data?.scopes ?? [];
|
|
1373
|
+
const granted = scopes.filter((s) => s.grant_status === 1);
|
|
1374
|
+
const pending = scopes.filter((s) => s.grant_status !== 1);
|
|
1375
|
+
return {
|
|
1376
|
+
granted: granted.map((s) => ({
|
|
1377
|
+
name: s.scope_name,
|
|
1378
|
+
type: s.scope_type
|
|
1379
|
+
})),
|
|
1380
|
+
pending: pending.map((s) => ({
|
|
1381
|
+
name: s.scope_name,
|
|
1382
|
+
type: s.scope_type
|
|
1383
|
+
})),
|
|
1384
|
+
summary: `${granted.length} granted, ${pending.length} pending`
|
|
1385
|
+
};
|
|
1386
|
+
}
|
|
1387
|
+
function registerFeishuDocTools(api) {
|
|
1388
|
+
if (!api.config) return;
|
|
1389
|
+
const accounts = listEnabledFeishuAccounts(api.config);
|
|
1390
|
+
if (accounts.length === 0) return;
|
|
1391
|
+
const toolsCfg = resolveAnyEnabledFeishuToolsConfig(accounts);
|
|
1392
|
+
const registered = [];
|
|
1393
|
+
const getClient = (params, defaultAccountId) => createFeishuToolClient({
|
|
1394
|
+
api,
|
|
1395
|
+
executeParams: params,
|
|
1396
|
+
defaultAccountId
|
|
1397
|
+
});
|
|
1398
|
+
const getMediaMaxBytes = (params, defaultAccountId) => (resolveFeishuToolAccount({
|
|
1399
|
+
api,
|
|
1400
|
+
executeParams: params,
|
|
1401
|
+
defaultAccountId
|
|
1402
|
+
}).config?.mediaMaxMb ?? 30) * 1024 * 1024;
|
|
1403
|
+
if (toolsCfg.doc) {
|
|
1404
|
+
api.registerTool((ctx) => {
|
|
1405
|
+
const defaultAccountId = ctx.agentAccountId;
|
|
1406
|
+
const mediaLocalRoots = resolveDocToolLocalRoots(ctx);
|
|
1407
|
+
const trustedRequesterOpenId = ctx.messageChannel === "feishu" ? normalizeOptionalString(ctx.requesterSenderId) : void 0;
|
|
1408
|
+
return {
|
|
1409
|
+
name: "feishu_doc",
|
|
1410
|
+
label: "Feishu Doc",
|
|
1411
|
+
description: "Feishu document operations. Actions: read, write, append, insert, create, list_blocks, get_block, update_block, delete_block, create_table, write_table_cells, create_table_with_values, insert_table_row, insert_table_column, delete_table_rows, delete_table_columns, merge_table_cells, upload_image, upload_file, color_text",
|
|
1412
|
+
parameters: FeishuDocSchema,
|
|
1413
|
+
async execute(_toolCallId, params) {
|
|
1414
|
+
const p = params;
|
|
1415
|
+
try {
|
|
1416
|
+
const client = getClient(p, defaultAccountId);
|
|
1417
|
+
switch (p.action) {
|
|
1418
|
+
case "read": return json$1(await readDoc(client, p.doc_token));
|
|
1419
|
+
case "write": return json$1(await writeDoc(client, p.doc_token, p.content, getMediaMaxBytes(p, defaultAccountId), api.logger));
|
|
1420
|
+
case "append": return json$1(await appendDoc(client, p.doc_token, p.content, getMediaMaxBytes(p, defaultAccountId), api.logger));
|
|
1421
|
+
case "insert": return json$1(await insertDoc(client, p.doc_token, p.content, p.after_block_id, getMediaMaxBytes(p, defaultAccountId), api.logger));
|
|
1422
|
+
case "create": return json$1(await createDoc(client, p.title, p.folder_token, {
|
|
1423
|
+
grantToRequester: p.grant_to_requester,
|
|
1424
|
+
requesterOpenId: trustedRequesterOpenId
|
|
1425
|
+
}));
|
|
1426
|
+
case "list_blocks": return json$1(await listBlocks(client, p.doc_token));
|
|
1427
|
+
case "get_block": return json$1(await getBlock(client, p.doc_token, p.block_id));
|
|
1428
|
+
case "update_block": return json$1(await updateBlock(client, p.doc_token, p.block_id, p.content));
|
|
1429
|
+
case "delete_block": return json$1(await deleteBlock(client, p.doc_token, p.block_id));
|
|
1430
|
+
case "create_table": return json$1(await createTable(client, p.doc_token, p.row_size, p.column_size, p.parent_block_id, p.column_width));
|
|
1431
|
+
case "write_table_cells": return json$1(await writeTableCells(client, p.doc_token, p.table_block_id, p.values));
|
|
1432
|
+
case "create_table_with_values": return json$1(await createTableWithValues(client, p.doc_token, p.row_size, p.column_size, p.values, p.parent_block_id, p.column_width));
|
|
1433
|
+
case "upload_image": return json$1(await uploadImageBlock(client, p.doc_token, getMediaMaxBytes(p, defaultAccountId), mediaLocalRoots, p.url, p.file_path, p.parent_block_id, p.filename, p.index, p.image));
|
|
1434
|
+
case "upload_file": return json$1(await uploadFileBlock(client, p.doc_token, getMediaMaxBytes(p, defaultAccountId), mediaLocalRoots, p.url, p.file_path, p.parent_block_id, p.filename));
|
|
1435
|
+
case "color_text": return json$1(await updateColorText(client, p.doc_token, p.block_id, p.content));
|
|
1436
|
+
case "insert_table_row": return json$1(await insertTableRow(client, p.doc_token, p.block_id, p.row_index));
|
|
1437
|
+
case "insert_table_column": return json$1(await insertTableColumn(client, p.doc_token, p.block_id, p.column_index));
|
|
1438
|
+
case "delete_table_rows": return json$1(await deleteTableRows(client, p.doc_token, p.block_id, p.row_start, p.row_count));
|
|
1439
|
+
case "delete_table_columns": return json$1(await deleteTableColumns(client, p.doc_token, p.block_id, p.column_start, p.column_count));
|
|
1440
|
+
case "merge_table_cells": return json$1(await mergeTableCells(client, p.doc_token, p.block_id, p.row_start, p.row_end, p.column_start, p.column_end));
|
|
1441
|
+
default: return json$1({ error: "Unknown action" });
|
|
1442
|
+
}
|
|
1443
|
+
} catch (err) {
|
|
1444
|
+
return json$1({ error: formatErrorMessage(err) });
|
|
1445
|
+
}
|
|
1446
|
+
}
|
|
1447
|
+
};
|
|
1448
|
+
}, { name: "feishu_doc" });
|
|
1449
|
+
registered.push("feishu_doc");
|
|
1450
|
+
}
|
|
1451
|
+
if (toolsCfg.scopes) {
|
|
1452
|
+
api.registerTool((ctx) => ({
|
|
1453
|
+
name: "feishu_app_scopes",
|
|
1454
|
+
label: "Feishu App Scopes",
|
|
1455
|
+
description: "List current app permissions (scopes). Use to debug permission issues or check available capabilities.",
|
|
1456
|
+
parameters: Type.Object({}),
|
|
1457
|
+
async execute() {
|
|
1458
|
+
try {
|
|
1459
|
+
return json$1(await listAppScopes(getClient(void 0, ctx.agentAccountId)));
|
|
1460
|
+
} catch (err) {
|
|
1461
|
+
return json$1({ error: formatErrorMessage(err) });
|
|
1462
|
+
}
|
|
1463
|
+
}
|
|
1464
|
+
}), { name: "feishu_app_scopes" });
|
|
1465
|
+
registered.push("feishu_app_scopes");
|
|
1466
|
+
}
|
|
1467
|
+
}
|
|
1468
|
+
//#endregion
|
|
1469
|
+
//#region extensions/feishu/src/wiki-schema.ts
|
|
1470
|
+
const FeishuWikiSchema = Type.Union([
|
|
1471
|
+
Type.Object({ action: Type.Literal("spaces") }),
|
|
1472
|
+
Type.Object({
|
|
1473
|
+
action: Type.Literal("nodes"),
|
|
1474
|
+
space_id: Type.String({ description: "Knowledge space ID" }),
|
|
1475
|
+
parent_node_token: Type.Optional(Type.String({ description: "Parent node token (optional, omit for root)" }))
|
|
1476
|
+
}),
|
|
1477
|
+
Type.Object({
|
|
1478
|
+
action: Type.Literal("get"),
|
|
1479
|
+
token: Type.String({ description: "Wiki node token (from URL /wiki/XXX)" })
|
|
1480
|
+
}),
|
|
1481
|
+
Type.Object({
|
|
1482
|
+
action: Type.Literal("search"),
|
|
1483
|
+
query: Type.String({ description: "Search query" }),
|
|
1484
|
+
space_id: Type.Optional(Type.String({ description: "Limit search to this space (optional)" }))
|
|
1485
|
+
}),
|
|
1486
|
+
Type.Object({
|
|
1487
|
+
action: Type.Literal("create"),
|
|
1488
|
+
space_id: Type.String({ description: "Knowledge space ID" }),
|
|
1489
|
+
title: Type.String({ description: "Node title" }),
|
|
1490
|
+
obj_type: Type.Optional(Type.Union([
|
|
1491
|
+
Type.Literal("docx"),
|
|
1492
|
+
Type.Literal("sheet"),
|
|
1493
|
+
Type.Literal("bitable")
|
|
1494
|
+
], { description: "Object type (default: docx)" })),
|
|
1495
|
+
parent_node_token: Type.Optional(Type.String({ description: "Parent node token (optional, omit for root)" }))
|
|
1496
|
+
}),
|
|
1497
|
+
Type.Object({
|
|
1498
|
+
action: Type.Literal("move"),
|
|
1499
|
+
space_id: Type.String({ description: "Source knowledge space ID" }),
|
|
1500
|
+
node_token: Type.String({ description: "Node token to move" }),
|
|
1501
|
+
target_space_id: Type.Optional(Type.String({ description: "Target space ID (optional, same space if omitted)" })),
|
|
1502
|
+
target_parent_token: Type.Optional(Type.String({ description: "Target parent node token (optional, root if omitted)" }))
|
|
1503
|
+
}),
|
|
1504
|
+
Type.Object({
|
|
1505
|
+
action: Type.Literal("rename"),
|
|
1506
|
+
space_id: Type.String({ description: "Knowledge space ID" }),
|
|
1507
|
+
node_token: Type.String({ description: "Node token to rename" }),
|
|
1508
|
+
title: Type.String({ description: "New title" })
|
|
1509
|
+
})
|
|
1510
|
+
]);
|
|
1511
|
+
//#endregion
|
|
1512
|
+
//#region extensions/feishu/src/wiki.ts
|
|
1513
|
+
const WIKI_ACCESS_HINT = "To grant wiki access: Open wiki space → Settings → Members → Add the bot. See: https://open.feishu.cn/document/server-docs/docs/wiki-v2/wiki-qa#a40ad4ca";
|
|
1514
|
+
async function listSpaces(client) {
|
|
1515
|
+
const res = await client.wiki.space.list({});
|
|
1516
|
+
if (res.code !== 0) throw new Error(res.msg);
|
|
1517
|
+
const spaces = res.data?.items?.map((s) => ({
|
|
1518
|
+
space_id: s.space_id,
|
|
1519
|
+
name: s.name,
|
|
1520
|
+
description: s.description,
|
|
1521
|
+
visibility: s.visibility
|
|
1522
|
+
})) ?? [];
|
|
1523
|
+
return {
|
|
1524
|
+
spaces,
|
|
1525
|
+
...spaces.length === 0 && { hint: WIKI_ACCESS_HINT }
|
|
1526
|
+
};
|
|
1527
|
+
}
|
|
1528
|
+
async function listNodes(client, spaceId, parentNodeToken) {
|
|
1529
|
+
const res = await client.wiki.spaceNode.list({
|
|
1530
|
+
path: { space_id: spaceId },
|
|
1531
|
+
params: { parent_node_token: parentNodeToken }
|
|
1532
|
+
});
|
|
1533
|
+
if (res.code !== 0) throw new Error(res.msg);
|
|
1534
|
+
return { nodes: res.data?.items?.map((n) => ({
|
|
1535
|
+
node_token: n.node_token,
|
|
1536
|
+
obj_token: n.obj_token,
|
|
1537
|
+
obj_type: n.obj_type,
|
|
1538
|
+
title: n.title,
|
|
1539
|
+
has_child: n.has_child
|
|
1540
|
+
})) ?? [] };
|
|
1541
|
+
}
|
|
1542
|
+
async function getNode(client, token) {
|
|
1543
|
+
const res = await client.wiki.space.getNode({ params: { token } });
|
|
1544
|
+
if (res.code !== 0) throw new Error(res.msg);
|
|
1545
|
+
const node = res.data?.node;
|
|
1546
|
+
return {
|
|
1547
|
+
node_token: node?.node_token,
|
|
1548
|
+
space_id: node?.space_id,
|
|
1549
|
+
obj_token: node?.obj_token,
|
|
1550
|
+
obj_type: node?.obj_type,
|
|
1551
|
+
title: node?.title,
|
|
1552
|
+
parent_node_token: node?.parent_node_token,
|
|
1553
|
+
has_child: node?.has_child,
|
|
1554
|
+
creator: node?.creator,
|
|
1555
|
+
create_time: node?.node_create_time
|
|
1556
|
+
};
|
|
1557
|
+
}
|
|
1558
|
+
async function createNode(client, spaceId, title, objType, parentNodeToken) {
|
|
1559
|
+
const res = await client.wiki.spaceNode.create({
|
|
1560
|
+
path: { space_id: spaceId },
|
|
1561
|
+
data: {
|
|
1562
|
+
obj_type: objType || "docx",
|
|
1563
|
+
node_type: "origin",
|
|
1564
|
+
title,
|
|
1565
|
+
parent_node_token: parentNodeToken
|
|
1566
|
+
}
|
|
1567
|
+
});
|
|
1568
|
+
if (res.code !== 0) throw new Error(res.msg);
|
|
1569
|
+
const node = res.data?.node;
|
|
1570
|
+
return {
|
|
1571
|
+
node_token: node?.node_token,
|
|
1572
|
+
obj_token: node?.obj_token,
|
|
1573
|
+
obj_type: node?.obj_type,
|
|
1574
|
+
title: node?.title
|
|
1575
|
+
};
|
|
1576
|
+
}
|
|
1577
|
+
async function moveNode(client, spaceId, nodeToken, targetSpaceId, targetParentToken) {
|
|
1578
|
+
const res = await client.wiki.spaceNode.move({
|
|
1579
|
+
path: {
|
|
1580
|
+
space_id: spaceId,
|
|
1581
|
+
node_token: nodeToken
|
|
1582
|
+
},
|
|
1583
|
+
data: {
|
|
1584
|
+
target_space_id: targetSpaceId || spaceId,
|
|
1585
|
+
target_parent_token: targetParentToken
|
|
1586
|
+
}
|
|
1587
|
+
});
|
|
1588
|
+
if (res.code !== 0) throw new Error(res.msg);
|
|
1589
|
+
return {
|
|
1590
|
+
success: true,
|
|
1591
|
+
node_token: res.data?.node?.node_token
|
|
1592
|
+
};
|
|
1593
|
+
}
|
|
1594
|
+
async function renameNode(client, spaceId, nodeToken, title) {
|
|
1595
|
+
const res = await client.wiki.spaceNode.updateTitle({
|
|
1596
|
+
path: {
|
|
1597
|
+
space_id: spaceId,
|
|
1598
|
+
node_token: nodeToken
|
|
1599
|
+
},
|
|
1600
|
+
data: { title }
|
|
1601
|
+
});
|
|
1602
|
+
if (res.code !== 0) throw new Error(res.msg);
|
|
1603
|
+
return {
|
|
1604
|
+
success: true,
|
|
1605
|
+
node_token: nodeToken,
|
|
1606
|
+
title
|
|
1607
|
+
};
|
|
1608
|
+
}
|
|
1609
|
+
function registerFeishuWikiTools(api) {
|
|
1610
|
+
if (!api.config) return;
|
|
1611
|
+
const accounts = listEnabledFeishuAccounts(api.config);
|
|
1612
|
+
if (accounts.length === 0) return;
|
|
1613
|
+
if (!resolveAnyEnabledFeishuToolsConfig(accounts).wiki) return;
|
|
1614
|
+
api.registerTool((ctx) => {
|
|
1615
|
+
const defaultAccountId = ctx.agentAccountId;
|
|
1616
|
+
return {
|
|
1617
|
+
name: "feishu_wiki",
|
|
1618
|
+
label: "Feishu Wiki",
|
|
1619
|
+
description: "Feishu knowledge base operations. Actions: spaces, nodes, get, create, move, rename",
|
|
1620
|
+
parameters: FeishuWikiSchema,
|
|
1621
|
+
async execute(_toolCallId, params) {
|
|
1622
|
+
const p = params;
|
|
1623
|
+
try {
|
|
1624
|
+
const client = createFeishuToolClient({
|
|
1625
|
+
api,
|
|
1626
|
+
executeParams: p,
|
|
1627
|
+
defaultAccountId
|
|
1628
|
+
});
|
|
1629
|
+
switch (p.action) {
|
|
1630
|
+
case "spaces": return jsonToolResult(await listSpaces(client));
|
|
1631
|
+
case "nodes": return jsonToolResult(await listNodes(client, p.space_id, p.parent_node_token));
|
|
1632
|
+
case "get": return jsonToolResult(await getNode(client, p.token));
|
|
1633
|
+
case "search": return jsonToolResult({ error: "Search is not available. Use feishu_wiki with action: 'nodes' to browse or action: 'get' to lookup by token." });
|
|
1634
|
+
case "create": return jsonToolResult(await createNode(client, p.space_id, p.title, p.obj_type, p.parent_node_token));
|
|
1635
|
+
case "move": return jsonToolResult(await moveNode(client, p.space_id, p.node_token, p.target_space_id, p.target_parent_token));
|
|
1636
|
+
case "rename": return jsonToolResult(await renameNode(client, p.space_id, p.node_token, p.title));
|
|
1637
|
+
default: return unknownToolActionResult(p.action);
|
|
1638
|
+
}
|
|
1639
|
+
} catch (err) {
|
|
1640
|
+
return toolExecutionErrorResult(err);
|
|
1641
|
+
}
|
|
1642
|
+
}
|
|
1643
|
+
};
|
|
1644
|
+
}, { name: "feishu_wiki" });
|
|
1645
|
+
}
|
|
1646
|
+
//#endregion
|
|
1647
|
+
//#region extensions/feishu/src/perm-schema.ts
|
|
1648
|
+
const TokenType = Type.Union([
|
|
1649
|
+
Type.Literal("doc"),
|
|
1650
|
+
Type.Literal("docx"),
|
|
1651
|
+
Type.Literal("sheet"),
|
|
1652
|
+
Type.Literal("bitable"),
|
|
1653
|
+
Type.Literal("folder"),
|
|
1654
|
+
Type.Literal("file"),
|
|
1655
|
+
Type.Literal("wiki"),
|
|
1656
|
+
Type.Literal("mindnote")
|
|
1657
|
+
]);
|
|
1658
|
+
const MemberType = Type.Union([
|
|
1659
|
+
Type.Literal("email"),
|
|
1660
|
+
Type.Literal("openid"),
|
|
1661
|
+
Type.Literal("userid"),
|
|
1662
|
+
Type.Literal("unionid"),
|
|
1663
|
+
Type.Literal("openchat"),
|
|
1664
|
+
Type.Literal("opendepartmentid")
|
|
1665
|
+
]);
|
|
1666
|
+
const Permission = Type.Union([
|
|
1667
|
+
Type.Literal("view"),
|
|
1668
|
+
Type.Literal("edit"),
|
|
1669
|
+
Type.Literal("full_access")
|
|
1670
|
+
]);
|
|
1671
|
+
const FeishuPermSchema = Type.Union([
|
|
1672
|
+
Type.Object({
|
|
1673
|
+
action: Type.Literal("list"),
|
|
1674
|
+
token: Type.String({ description: "File token" }),
|
|
1675
|
+
type: TokenType
|
|
1676
|
+
}),
|
|
1677
|
+
Type.Object({
|
|
1678
|
+
action: Type.Literal("add"),
|
|
1679
|
+
token: Type.String({ description: "File token" }),
|
|
1680
|
+
type: TokenType,
|
|
1681
|
+
member_type: MemberType,
|
|
1682
|
+
member_id: Type.String({ description: "Member ID (email, open_id, user_id, etc.)" }),
|
|
1683
|
+
perm: Permission
|
|
1684
|
+
}),
|
|
1685
|
+
Type.Object({
|
|
1686
|
+
action: Type.Literal("remove"),
|
|
1687
|
+
token: Type.String({ description: "File token" }),
|
|
1688
|
+
type: TokenType,
|
|
1689
|
+
member_type: MemberType,
|
|
1690
|
+
member_id: Type.String({ description: "Member ID to remove" })
|
|
1691
|
+
})
|
|
1692
|
+
]);
|
|
1693
|
+
//#endregion
|
|
1694
|
+
//#region extensions/feishu/src/perm.ts
|
|
1695
|
+
async function listMembers(client, token, type) {
|
|
1696
|
+
const res = await client.drive.permissionMember.list({
|
|
1697
|
+
path: { token },
|
|
1698
|
+
params: { type }
|
|
1699
|
+
});
|
|
1700
|
+
if (res.code !== 0) throw new Error(res.msg);
|
|
1701
|
+
return { members: res.data?.items?.map((m) => ({
|
|
1702
|
+
member_type: m.member_type,
|
|
1703
|
+
member_id: m.member_id,
|
|
1704
|
+
perm: m.perm,
|
|
1705
|
+
name: m.name
|
|
1706
|
+
})) ?? [] };
|
|
1707
|
+
}
|
|
1708
|
+
async function addMember(client, token, type, memberType, memberId, perm) {
|
|
1709
|
+
const res = await client.drive.permissionMember.create({
|
|
1710
|
+
path: { token },
|
|
1711
|
+
params: {
|
|
1712
|
+
type,
|
|
1713
|
+
need_notification: false
|
|
1714
|
+
},
|
|
1715
|
+
data: {
|
|
1716
|
+
member_type: memberType,
|
|
1717
|
+
member_id: memberId,
|
|
1718
|
+
perm
|
|
1719
|
+
}
|
|
1720
|
+
});
|
|
1721
|
+
if (res.code !== 0) throw new Error(res.msg);
|
|
1722
|
+
return {
|
|
1723
|
+
success: true,
|
|
1724
|
+
member: res.data?.member
|
|
1725
|
+
};
|
|
1726
|
+
}
|
|
1727
|
+
async function removeMember(client, token, type, memberType, memberId) {
|
|
1728
|
+
const res = await client.drive.permissionMember.delete({
|
|
1729
|
+
path: {
|
|
1730
|
+
token,
|
|
1731
|
+
member_id: memberId
|
|
1732
|
+
},
|
|
1733
|
+
params: {
|
|
1734
|
+
type,
|
|
1735
|
+
member_type: memberType
|
|
1736
|
+
}
|
|
1737
|
+
});
|
|
1738
|
+
if (res.code !== 0) throw new Error(res.msg);
|
|
1739
|
+
return { success: true };
|
|
1740
|
+
}
|
|
1741
|
+
function registerFeishuPermTools(api) {
|
|
1742
|
+
if (!api.config) return;
|
|
1743
|
+
const accounts = listEnabledFeishuAccounts(api.config);
|
|
1744
|
+
if (accounts.length === 0) return;
|
|
1745
|
+
if (!resolveAnyEnabledFeishuToolsConfig(accounts).perm) return;
|
|
1746
|
+
api.registerTool((ctx) => {
|
|
1747
|
+
const defaultAccountId = ctx.agentAccountId;
|
|
1748
|
+
return {
|
|
1749
|
+
name: "feishu_perm",
|
|
1750
|
+
label: "Feishu Perm",
|
|
1751
|
+
description: "Feishu permission management. Actions: list, add, remove",
|
|
1752
|
+
parameters: FeishuPermSchema,
|
|
1753
|
+
async execute(_toolCallId, params) {
|
|
1754
|
+
const p = params;
|
|
1755
|
+
try {
|
|
1756
|
+
const client = createFeishuToolClient({
|
|
1757
|
+
api,
|
|
1758
|
+
executeParams: p,
|
|
1759
|
+
defaultAccountId
|
|
1760
|
+
});
|
|
1761
|
+
switch (p.action) {
|
|
1762
|
+
case "list": return jsonToolResult(await listMembers(client, p.token, p.type));
|
|
1763
|
+
case "add": return jsonToolResult(await addMember(client, p.token, p.type, p.member_type, p.member_id, p.perm));
|
|
1764
|
+
case "remove": return jsonToolResult(await removeMember(client, p.token, p.type, p.member_type, p.member_id));
|
|
1765
|
+
default: return unknownToolActionResult(p.action);
|
|
1766
|
+
}
|
|
1767
|
+
} catch (err) {
|
|
1768
|
+
return toolExecutionErrorResult(err);
|
|
1769
|
+
}
|
|
1770
|
+
}
|
|
1771
|
+
};
|
|
1772
|
+
}, { name: "feishu_perm" });
|
|
1773
|
+
}
|
|
1774
|
+
//#endregion
|
|
1775
|
+
//#region extensions/feishu/src/bitable.ts
|
|
1776
|
+
function json(data) {
|
|
1777
|
+
return {
|
|
1778
|
+
content: [{
|
|
1779
|
+
type: "text",
|
|
1780
|
+
text: JSON.stringify(data, null, 2)
|
|
1781
|
+
}],
|
|
1782
|
+
details: data
|
|
1783
|
+
};
|
|
1784
|
+
}
|
|
1785
|
+
var LarkApiError = class extends Error {
|
|
1786
|
+
constructor(code, message, api, context) {
|
|
1787
|
+
super(`[${api}] code=${code} message=${message}`);
|
|
1788
|
+
this.name = "LarkApiError";
|
|
1789
|
+
this.code = code;
|
|
1790
|
+
this.api = api;
|
|
1791
|
+
this.context = context;
|
|
1792
|
+
}
|
|
1793
|
+
};
|
|
1794
|
+
function ensureLarkSuccess(res, api, context) {
|
|
1795
|
+
if (res.code !== 0) throw new LarkApiError(res.code ?? -1, res.msg ?? "unknown error", api, context);
|
|
1796
|
+
}
|
|
1797
|
+
/** Field type ID to human-readable name */
|
|
1798
|
+
const FIELD_TYPE_NAMES = {
|
|
1799
|
+
1: "Text",
|
|
1800
|
+
2: "Number",
|
|
1801
|
+
3: "SingleSelect",
|
|
1802
|
+
4: "MultiSelect",
|
|
1803
|
+
5: "DateTime",
|
|
1804
|
+
7: "Checkbox",
|
|
1805
|
+
11: "User",
|
|
1806
|
+
13: "Phone",
|
|
1807
|
+
15: "URL",
|
|
1808
|
+
17: "Attachment",
|
|
1809
|
+
18: "SingleLink",
|
|
1810
|
+
19: "Lookup",
|
|
1811
|
+
20: "Formula",
|
|
1812
|
+
21: "DuplexLink",
|
|
1813
|
+
22: "Location",
|
|
1814
|
+
23: "GroupChat",
|
|
1815
|
+
1001: "CreatedTime",
|
|
1816
|
+
1002: "ModifiedTime",
|
|
1817
|
+
1003: "CreatedUser",
|
|
1818
|
+
1004: "ModifiedUser",
|
|
1819
|
+
1005: "AutoNumber"
|
|
1820
|
+
};
|
|
1821
|
+
/** Parse bitable URL and extract tokens */
|
|
1822
|
+
function parseBitableUrl(url) {
|
|
1823
|
+
try {
|
|
1824
|
+
const u = new URL(url);
|
|
1825
|
+
const tableId = u.searchParams.get("table") ?? void 0;
|
|
1826
|
+
const wikiMatch = u.pathname.match(/\/wiki\/([A-Za-z0-9]+)/);
|
|
1827
|
+
if (wikiMatch) return {
|
|
1828
|
+
token: wikiMatch[1],
|
|
1829
|
+
tableId,
|
|
1830
|
+
isWiki: true
|
|
1831
|
+
};
|
|
1832
|
+
const baseMatch = u.pathname.match(/\/base\/([A-Za-z0-9]+)/);
|
|
1833
|
+
if (baseMatch) return {
|
|
1834
|
+
token: baseMatch[1],
|
|
1835
|
+
tableId,
|
|
1836
|
+
isWiki: false
|
|
1837
|
+
};
|
|
1838
|
+
return null;
|
|
1839
|
+
} catch {
|
|
1840
|
+
return null;
|
|
1841
|
+
}
|
|
1842
|
+
}
|
|
1843
|
+
/** Get app_token from wiki node_token */
|
|
1844
|
+
async function getAppTokenFromWiki(client, nodeToken) {
|
|
1845
|
+
const res = await client.wiki.space.getNode({ params: { token: nodeToken } });
|
|
1846
|
+
ensureLarkSuccess(res, "wiki.space.getNode", { nodeToken });
|
|
1847
|
+
const node = res.data?.node;
|
|
1848
|
+
if (!node) throw new Error("Node not found");
|
|
1849
|
+
if (node.obj_type !== "bitable") throw new Error(`Node is not a bitable (type: ${node.obj_type})`);
|
|
1850
|
+
return node.obj_token;
|
|
1851
|
+
}
|
|
1852
|
+
/** Get bitable metadata from URL (handles both /base/ and /wiki/ URLs) */
|
|
1853
|
+
async function getBitableMeta(client, url) {
|
|
1854
|
+
const parsed = parseBitableUrl(url);
|
|
1855
|
+
if (!parsed) throw new Error("Invalid URL format. Expected /base/XXX or /wiki/XXX URL");
|
|
1856
|
+
let appToken;
|
|
1857
|
+
if (parsed.isWiki) appToken = await getAppTokenFromWiki(client, parsed.token);
|
|
1858
|
+
else appToken = parsed.token;
|
|
1859
|
+
const res = await client.bitable.app.get({ path: { app_token: appToken } });
|
|
1860
|
+
ensureLarkSuccess(res, "bitable.app.get", { appToken });
|
|
1861
|
+
let tables = [];
|
|
1862
|
+
if (!parsed.tableId) {
|
|
1863
|
+
const tablesRes = await client.bitable.appTable.list({ path: { app_token: appToken } });
|
|
1864
|
+
if (tablesRes.code === 0) tables = (tablesRes.data?.items ?? []).map((t) => ({
|
|
1865
|
+
table_id: t.table_id,
|
|
1866
|
+
name: t.name
|
|
1867
|
+
}));
|
|
1868
|
+
}
|
|
1869
|
+
return {
|
|
1870
|
+
app_token: appToken,
|
|
1871
|
+
table_id: parsed.tableId,
|
|
1872
|
+
name: res.data?.app?.name,
|
|
1873
|
+
url_type: parsed.isWiki ? "wiki" : "base",
|
|
1874
|
+
...tables.length > 0 && { tables },
|
|
1875
|
+
hint: parsed.tableId ? `Use app_token="${appToken}" and table_id="${parsed.tableId}" for other bitable tools` : `Use app_token="${appToken}" for other bitable tools. Select a table_id from the tables list.`
|
|
1876
|
+
};
|
|
1877
|
+
}
|
|
1878
|
+
async function listFields(client, appToken, tableId) {
|
|
1879
|
+
const res = await client.bitable.appTableField.list({ path: {
|
|
1880
|
+
app_token: appToken,
|
|
1881
|
+
table_id: tableId
|
|
1882
|
+
} });
|
|
1883
|
+
ensureLarkSuccess(res, "bitable.appTableField.list", {
|
|
1884
|
+
appToken,
|
|
1885
|
+
tableId
|
|
1886
|
+
});
|
|
1887
|
+
const fields = res.data?.items ?? [];
|
|
1888
|
+
return {
|
|
1889
|
+
fields: fields.map((f) => ({
|
|
1890
|
+
field_id: f.field_id,
|
|
1891
|
+
field_name: f.field_name,
|
|
1892
|
+
type: f.type,
|
|
1893
|
+
type_name: FIELD_TYPE_NAMES[f.type ?? 0] || `type_${f.type}`,
|
|
1894
|
+
is_primary: f.is_primary,
|
|
1895
|
+
...f.property && { property: f.property }
|
|
1896
|
+
})),
|
|
1897
|
+
total: fields.length
|
|
1898
|
+
};
|
|
1899
|
+
}
|
|
1900
|
+
async function listRecords(client, appToken, tableId, pageSize, pageToken) {
|
|
1901
|
+
const res = await client.bitable.appTableRecord.list({
|
|
1902
|
+
path: {
|
|
1903
|
+
app_token: appToken,
|
|
1904
|
+
table_id: tableId
|
|
1905
|
+
},
|
|
1906
|
+
params: {
|
|
1907
|
+
page_size: pageSize ?? 100,
|
|
1908
|
+
...pageToken && { page_token: pageToken }
|
|
1909
|
+
}
|
|
1910
|
+
});
|
|
1911
|
+
ensureLarkSuccess(res, "bitable.appTableRecord.list", {
|
|
1912
|
+
appToken,
|
|
1913
|
+
tableId,
|
|
1914
|
+
pageSize
|
|
1915
|
+
});
|
|
1916
|
+
return {
|
|
1917
|
+
records: res.data?.items ?? [],
|
|
1918
|
+
has_more: res.data?.has_more ?? false,
|
|
1919
|
+
page_token: res.data?.page_token,
|
|
1920
|
+
total: res.data?.total
|
|
1921
|
+
};
|
|
1922
|
+
}
|
|
1923
|
+
async function getRecord(client, appToken, tableId, recordId) {
|
|
1924
|
+
const res = await client.bitable.appTableRecord.get({ path: {
|
|
1925
|
+
app_token: appToken,
|
|
1926
|
+
table_id: tableId,
|
|
1927
|
+
record_id: recordId
|
|
1928
|
+
} });
|
|
1929
|
+
ensureLarkSuccess(res, "bitable.appTableRecord.get", {
|
|
1930
|
+
appToken,
|
|
1931
|
+
tableId,
|
|
1932
|
+
recordId
|
|
1933
|
+
});
|
|
1934
|
+
return { record: res.data?.record };
|
|
1935
|
+
}
|
|
1936
|
+
async function createRecord(client, appToken, tableId, fields) {
|
|
1937
|
+
const res = await client.bitable.appTableRecord.create({
|
|
1938
|
+
path: {
|
|
1939
|
+
app_token: appToken,
|
|
1940
|
+
table_id: tableId
|
|
1941
|
+
},
|
|
1942
|
+
data: { fields }
|
|
1943
|
+
});
|
|
1944
|
+
ensureLarkSuccess(res, "bitable.appTableRecord.create", {
|
|
1945
|
+
appToken,
|
|
1946
|
+
tableId
|
|
1947
|
+
});
|
|
1948
|
+
return { record: res.data?.record };
|
|
1949
|
+
}
|
|
1950
|
+
/** Default field types created for new Bitable tables (to be cleaned up) */
|
|
1951
|
+
const DEFAULT_CLEANUP_FIELD_TYPES = new Set([
|
|
1952
|
+
3,
|
|
1953
|
+
5,
|
|
1954
|
+
17
|
|
1955
|
+
]);
|
|
1956
|
+
function isDefaultEmptyBitableFieldValue(value) {
|
|
1957
|
+
if (value === void 0 || value === null || value === "") return true;
|
|
1958
|
+
if (Array.isArray(value)) return value.every(isDefaultEmptyBitableFieldValue);
|
|
1959
|
+
if (typeof value === "object") {
|
|
1960
|
+
const record = value;
|
|
1961
|
+
const keys = Object.keys(record);
|
|
1962
|
+
if (keys.length === 0) return true;
|
|
1963
|
+
if ("text" in record && keys.every((key) => key === "text" || key === "type")) return record.text === void 0 || record.text === null || record.text === "";
|
|
1964
|
+
return Object.values(record).every(isDefaultEmptyBitableFieldValue);
|
|
1965
|
+
}
|
|
1966
|
+
return false;
|
|
1967
|
+
}
|
|
1968
|
+
function isPlaceholderBitableRecord(fields) {
|
|
1969
|
+
if (!fields || typeof fields !== "object" || Array.isArray(fields)) return true;
|
|
1970
|
+
return Object.values(fields).every(isDefaultEmptyBitableFieldValue);
|
|
1971
|
+
}
|
|
1972
|
+
/** Clean up default placeholder rows and fields in a newly created Bitable table */
|
|
1973
|
+
async function cleanupNewBitable(client, appToken, tableId, tableName, logger) {
|
|
1974
|
+
let cleanedRows = 0;
|
|
1975
|
+
let cleanedFields = 0;
|
|
1976
|
+
const fieldsRes = await client.bitable.appTableField.list({ path: {
|
|
1977
|
+
app_token: appToken,
|
|
1978
|
+
table_id: tableId
|
|
1979
|
+
} });
|
|
1980
|
+
if (fieldsRes.code === 0 && fieldsRes.data?.items) {
|
|
1981
|
+
const primaryField = fieldsRes.data.items.find((f) => f.is_primary);
|
|
1982
|
+
if (primaryField?.field_id) try {
|
|
1983
|
+
const newFieldName = tableName.length <= 20 ? tableName : "Name";
|
|
1984
|
+
await client.bitable.appTableField.update({
|
|
1985
|
+
path: {
|
|
1986
|
+
app_token: appToken,
|
|
1987
|
+
table_id: tableId,
|
|
1988
|
+
field_id: primaryField.field_id
|
|
1989
|
+
},
|
|
1990
|
+
data: {
|
|
1991
|
+
field_name: newFieldName,
|
|
1992
|
+
type: 1
|
|
1993
|
+
}
|
|
1994
|
+
});
|
|
1995
|
+
cleanedFields++;
|
|
1996
|
+
} catch (err) {
|
|
1997
|
+
logger.debug(`Failed to rename primary field: ${String(err)}`);
|
|
1998
|
+
}
|
|
1999
|
+
const defaultFieldsToDelete = fieldsRes.data.items.filter((f) => !f.is_primary && DEFAULT_CLEANUP_FIELD_TYPES.has(f.type ?? 0));
|
|
2000
|
+
for (const field of defaultFieldsToDelete) if (field.field_id) try {
|
|
2001
|
+
await client.bitable.appTableField.delete({ path: {
|
|
2002
|
+
app_token: appToken,
|
|
2003
|
+
table_id: tableId,
|
|
2004
|
+
field_id: field.field_id
|
|
2005
|
+
} });
|
|
2006
|
+
cleanedFields++;
|
|
2007
|
+
} catch (err) {
|
|
2008
|
+
logger.debug(`Failed to delete default field ${field.field_name}: ${String(err)}`);
|
|
2009
|
+
}
|
|
2010
|
+
}
|
|
2011
|
+
const recordsRes = await client.bitable.appTableRecord.list({
|
|
2012
|
+
path: {
|
|
2013
|
+
app_token: appToken,
|
|
2014
|
+
table_id: tableId
|
|
2015
|
+
},
|
|
2016
|
+
params: { page_size: 100 }
|
|
2017
|
+
});
|
|
2018
|
+
if (recordsRes.code === 0 && recordsRes.data?.items) {
|
|
2019
|
+
const emptyRecordIds = recordsRes.data.items.filter((r) => isPlaceholderBitableRecord(r.fields)).map((r) => r.record_id).filter((id) => Boolean(id));
|
|
2020
|
+
if (emptyRecordIds.length > 0) try {
|
|
2021
|
+
await client.bitable.appTableRecord.batchDelete({
|
|
2022
|
+
path: {
|
|
2023
|
+
app_token: appToken,
|
|
2024
|
+
table_id: tableId
|
|
2025
|
+
},
|
|
2026
|
+
data: { records: emptyRecordIds }
|
|
2027
|
+
});
|
|
2028
|
+
cleanedRows = emptyRecordIds.length;
|
|
2029
|
+
} catch {
|
|
2030
|
+
for (const recordId of emptyRecordIds) try {
|
|
2031
|
+
await client.bitable.appTableRecord.delete({ path: {
|
|
2032
|
+
app_token: appToken,
|
|
2033
|
+
table_id: tableId,
|
|
2034
|
+
record_id: recordId
|
|
2035
|
+
} });
|
|
2036
|
+
cleanedRows++;
|
|
2037
|
+
} catch (err) {
|
|
2038
|
+
logger.debug(`Failed to delete empty row ${recordId}: ${String(err)}`);
|
|
2039
|
+
}
|
|
2040
|
+
}
|
|
2041
|
+
}
|
|
2042
|
+
return {
|
|
2043
|
+
cleanedRows,
|
|
2044
|
+
cleanedFields
|
|
2045
|
+
};
|
|
2046
|
+
}
|
|
2047
|
+
async function createApp(client, name, folderToken, logger) {
|
|
2048
|
+
const res = await client.bitable.app.create({ data: {
|
|
2049
|
+
name,
|
|
2050
|
+
...folderToken && { folder_token: folderToken }
|
|
2051
|
+
} });
|
|
2052
|
+
ensureLarkSuccess(res, "bitable.app.create", {
|
|
2053
|
+
name,
|
|
2054
|
+
folderToken
|
|
2055
|
+
});
|
|
2056
|
+
const appToken = res.data?.app?.app_token;
|
|
2057
|
+
if (!appToken) throw new Error("Failed to create Bitable: no app_token returned");
|
|
2058
|
+
const log = logger ?? {
|
|
2059
|
+
debug: () => {},
|
|
2060
|
+
warn: () => {}
|
|
2061
|
+
};
|
|
2062
|
+
let tableId;
|
|
2063
|
+
let cleanedRows = 0;
|
|
2064
|
+
let cleanedFields = 0;
|
|
2065
|
+
try {
|
|
2066
|
+
const tablesRes = await client.bitable.appTable.list({ path: { app_token: appToken } });
|
|
2067
|
+
if (tablesRes.code === 0 && tablesRes.data?.items && tablesRes.data.items.length > 0) {
|
|
2068
|
+
tableId = tablesRes.data.items[0].table_id ?? void 0;
|
|
2069
|
+
if (tableId) {
|
|
2070
|
+
const cleanup = await cleanupNewBitable(client, appToken, tableId, name, log);
|
|
2071
|
+
cleanedRows = cleanup.cleanedRows;
|
|
2072
|
+
cleanedFields = cleanup.cleanedFields;
|
|
2073
|
+
}
|
|
2074
|
+
}
|
|
2075
|
+
} catch (err) {
|
|
2076
|
+
log.debug(`Cleanup failed (non-critical): ${String(err)}`);
|
|
2077
|
+
}
|
|
2078
|
+
return {
|
|
2079
|
+
app_token: appToken,
|
|
2080
|
+
table_id: tableId,
|
|
2081
|
+
name: res.data?.app?.name,
|
|
2082
|
+
url: res.data?.app?.url,
|
|
2083
|
+
cleaned_placeholder_rows: cleanedRows,
|
|
2084
|
+
cleaned_default_fields: cleanedFields,
|
|
2085
|
+
hint: tableId ? `Table created. Use app_token="${appToken}" and table_id="${tableId}" for other bitable tools.` : "Table created. Use feishu_bitable_get_meta to get table_id and field details."
|
|
2086
|
+
};
|
|
2087
|
+
}
|
|
2088
|
+
async function createField(client, appToken, tableId, fieldName, fieldType, property) {
|
|
2089
|
+
const res = await client.bitable.appTableField.create({
|
|
2090
|
+
path: {
|
|
2091
|
+
app_token: appToken,
|
|
2092
|
+
table_id: tableId
|
|
2093
|
+
},
|
|
2094
|
+
data: {
|
|
2095
|
+
field_name: fieldName,
|
|
2096
|
+
type: fieldType,
|
|
2097
|
+
...property && { property }
|
|
2098
|
+
}
|
|
2099
|
+
});
|
|
2100
|
+
ensureLarkSuccess(res, "bitable.appTableField.create", {
|
|
2101
|
+
appToken,
|
|
2102
|
+
tableId,
|
|
2103
|
+
fieldName,
|
|
2104
|
+
fieldType
|
|
2105
|
+
});
|
|
2106
|
+
return {
|
|
2107
|
+
field_id: res.data?.field?.field_id,
|
|
2108
|
+
field_name: res.data?.field?.field_name,
|
|
2109
|
+
type: res.data?.field?.type,
|
|
2110
|
+
type_name: FIELD_TYPE_NAMES[res.data?.field?.type ?? 0] || `type_${res.data?.field?.type}`
|
|
2111
|
+
};
|
|
2112
|
+
}
|
|
2113
|
+
async function updateRecord(client, appToken, tableId, recordId, fields) {
|
|
2114
|
+
const res = await client.bitable.appTableRecord.update({
|
|
2115
|
+
path: {
|
|
2116
|
+
app_token: appToken,
|
|
2117
|
+
table_id: tableId,
|
|
2118
|
+
record_id: recordId
|
|
2119
|
+
},
|
|
2120
|
+
data: { fields }
|
|
2121
|
+
});
|
|
2122
|
+
ensureLarkSuccess(res, "bitable.appTableRecord.update", {
|
|
2123
|
+
appToken,
|
|
2124
|
+
tableId,
|
|
2125
|
+
recordId
|
|
2126
|
+
});
|
|
2127
|
+
return { record: res.data?.record };
|
|
2128
|
+
}
|
|
2129
|
+
const GetMetaSchema = Type.Object({ url: Type.String({ description: "Bitable URL. Supports both formats: /base/XXX?table=YYY or /wiki/XXX?table=YYY" }) });
|
|
2130
|
+
const ListFieldsSchema = Type.Object({
|
|
2131
|
+
app_token: Type.String({ description: "Bitable app token (use feishu_bitable_get_meta to get from URL)" }),
|
|
2132
|
+
table_id: Type.String({ description: "Table ID (from URL: ?table=YYY)" })
|
|
2133
|
+
});
|
|
2134
|
+
const ListRecordsSchema = Type.Object({
|
|
2135
|
+
app_token: Type.String({ description: "Bitable app token (use feishu_bitable_get_meta to get from URL)" }),
|
|
2136
|
+
table_id: Type.String({ description: "Table ID (from URL: ?table=YYY)" }),
|
|
2137
|
+
page_size: Type.Optional(Type.Number({
|
|
2138
|
+
description: "Number of records per page (1-500, default 100)",
|
|
2139
|
+
minimum: 1,
|
|
2140
|
+
maximum: 500
|
|
2141
|
+
})),
|
|
2142
|
+
page_token: Type.Optional(Type.String({ description: "Pagination token from previous response" }))
|
|
2143
|
+
});
|
|
2144
|
+
const GetRecordSchema = Type.Object({
|
|
2145
|
+
app_token: Type.String({ description: "Bitable app token (use feishu_bitable_get_meta to get from URL)" }),
|
|
2146
|
+
table_id: Type.String({ description: "Table ID (from URL: ?table=YYY)" }),
|
|
2147
|
+
record_id: Type.String({ description: "Record ID to retrieve" })
|
|
2148
|
+
});
|
|
2149
|
+
const CreateRecordSchema = Type.Object({
|
|
2150
|
+
app_token: Type.String({ description: "Bitable app token (use feishu_bitable_get_meta to get from URL)" }),
|
|
2151
|
+
table_id: Type.String({ description: "Table ID (from URL: ?table=YYY)" }),
|
|
2152
|
+
fields: Type.Record(Type.String(), Type.Any(), { description: "Field values keyed by field name. Format by type: Text='string', Number=123, SingleSelect='Option', MultiSelect=['A','B'], DateTime=timestamp_ms, User=[{id:'ou_xxx'}], URL={text:'Display',link:'https://...'}" })
|
|
2153
|
+
});
|
|
2154
|
+
const CreateAppSchema = Type.Object({
|
|
2155
|
+
name: Type.String({ description: "Name for the new Bitable application" }),
|
|
2156
|
+
folder_token: Type.Optional(Type.String({ description: "Optional folder token to place the Bitable in a specific folder" }))
|
|
2157
|
+
});
|
|
2158
|
+
const CreateFieldSchema = Type.Object({
|
|
2159
|
+
app_token: Type.String({ description: "Bitable app token (use feishu_bitable_get_meta to get from URL, or feishu_bitable_create_app to create new)" }),
|
|
2160
|
+
table_id: Type.String({ description: "Table ID (from URL: ?table=YYY)" }),
|
|
2161
|
+
field_name: Type.String({ description: "Name for the new field" }),
|
|
2162
|
+
field_type: Type.Number({
|
|
2163
|
+
description: "Field type ID: 1=Text, 2=Number, 3=SingleSelect, 4=MultiSelect, 5=DateTime, 7=Checkbox, 11=User, 13=Phone, 15=URL, 17=Attachment, 18=SingleLink, 19=Lookup, 20=Formula, 21=DuplexLink, 22=Location, 23=GroupChat, 1001=CreatedTime, 1002=ModifiedTime, 1003=CreatedUser, 1004=ModifiedUser, 1005=AutoNumber",
|
|
2164
|
+
minimum: 1
|
|
2165
|
+
}),
|
|
2166
|
+
property: Type.Optional(Type.Record(Type.String(), Type.Any(), { description: "Field-specific properties (e.g., options for SingleSelect, format for Number)" }))
|
|
2167
|
+
});
|
|
2168
|
+
const UpdateRecordSchema = Type.Object({
|
|
2169
|
+
app_token: Type.String({ description: "Bitable app token (use feishu_bitable_get_meta to get from URL)" }),
|
|
2170
|
+
table_id: Type.String({ description: "Table ID (from URL: ?table=YYY)" }),
|
|
2171
|
+
record_id: Type.String({ description: "Record ID to update" }),
|
|
2172
|
+
fields: Type.Record(Type.String(), Type.Any(), { description: "Field values to update (same format as create_record)" })
|
|
2173
|
+
});
|
|
2174
|
+
function registerFeishuBitableTools(api) {
|
|
2175
|
+
if (!api.config) return;
|
|
2176
|
+
if (listEnabledFeishuAccounts(api.config).length === 0) return;
|
|
2177
|
+
const getClient = (params, defaultAccountId) => createFeishuToolClient({
|
|
2178
|
+
api,
|
|
2179
|
+
executeParams: params,
|
|
2180
|
+
defaultAccountId
|
|
2181
|
+
});
|
|
2182
|
+
const registerBitableTool = (params) => {
|
|
2183
|
+
api.registerTool((ctx) => ({
|
|
2184
|
+
name: params.name,
|
|
2185
|
+
label: params.label,
|
|
2186
|
+
description: params.description,
|
|
2187
|
+
parameters: params.parameters,
|
|
2188
|
+
async execute(_toolCallId, rawParams) {
|
|
2189
|
+
try {
|
|
2190
|
+
return json(await params.execute({
|
|
2191
|
+
params: rawParams,
|
|
2192
|
+
defaultAccountId: ctx.agentAccountId
|
|
2193
|
+
}));
|
|
2194
|
+
} catch (err) {
|
|
2195
|
+
return json({ error: formatErrorMessage(err) });
|
|
2196
|
+
}
|
|
2197
|
+
}
|
|
2198
|
+
}), { name: params.name });
|
|
2199
|
+
};
|
|
2200
|
+
registerBitableTool({
|
|
2201
|
+
name: "feishu_bitable_get_meta",
|
|
2202
|
+
label: "Feishu Bitable Get Meta",
|
|
2203
|
+
description: "Parse a Bitable URL and get app_token, table_id, and table list. Use this first when given a /wiki/ or /base/ URL.",
|
|
2204
|
+
parameters: GetMetaSchema,
|
|
2205
|
+
async execute({ params, defaultAccountId }) {
|
|
2206
|
+
return getBitableMeta(getClient(params, defaultAccountId), params.url);
|
|
2207
|
+
}
|
|
2208
|
+
});
|
|
2209
|
+
registerBitableTool({
|
|
2210
|
+
name: "feishu_bitable_list_fields",
|
|
2211
|
+
label: "Feishu Bitable List Fields",
|
|
2212
|
+
description: "List all fields (columns) in a Bitable table with their types and properties",
|
|
2213
|
+
parameters: ListFieldsSchema,
|
|
2214
|
+
async execute({ params, defaultAccountId }) {
|
|
2215
|
+
return listFields(getClient(params, defaultAccountId), params.app_token, params.table_id);
|
|
2216
|
+
}
|
|
2217
|
+
});
|
|
2218
|
+
registerBitableTool({
|
|
2219
|
+
name: "feishu_bitable_list_records",
|
|
2220
|
+
label: "Feishu Bitable List Records",
|
|
2221
|
+
description: "List records (rows) from a Bitable table with pagination support",
|
|
2222
|
+
parameters: ListRecordsSchema,
|
|
2223
|
+
async execute({ params, defaultAccountId }) {
|
|
2224
|
+
return listRecords(getClient(params, defaultAccountId), params.app_token, params.table_id, params.page_size, params.page_token);
|
|
2225
|
+
}
|
|
2226
|
+
});
|
|
2227
|
+
registerBitableTool({
|
|
2228
|
+
name: "feishu_bitable_get_record",
|
|
2229
|
+
label: "Feishu Bitable Get Record",
|
|
2230
|
+
description: "Get a single record by ID from a Bitable table",
|
|
2231
|
+
parameters: GetRecordSchema,
|
|
2232
|
+
async execute({ params, defaultAccountId }) {
|
|
2233
|
+
return getRecord(getClient(params, defaultAccountId), params.app_token, params.table_id, params.record_id);
|
|
2234
|
+
}
|
|
2235
|
+
});
|
|
2236
|
+
registerBitableTool({
|
|
2237
|
+
name: "feishu_bitable_create_record",
|
|
2238
|
+
label: "Feishu Bitable Create Record",
|
|
2239
|
+
description: "Create a new record (row) in a Bitable table",
|
|
2240
|
+
parameters: CreateRecordSchema,
|
|
2241
|
+
async execute({ params, defaultAccountId }) {
|
|
2242
|
+
return createRecord(getClient(params, defaultAccountId), params.app_token, params.table_id, params.fields);
|
|
2243
|
+
}
|
|
2244
|
+
});
|
|
2245
|
+
registerBitableTool({
|
|
2246
|
+
name: "feishu_bitable_update_record",
|
|
2247
|
+
label: "Feishu Bitable Update Record",
|
|
2248
|
+
description: "Update an existing record (row) in a Bitable table",
|
|
2249
|
+
parameters: UpdateRecordSchema,
|
|
2250
|
+
async execute({ params, defaultAccountId }) {
|
|
2251
|
+
return updateRecord(getClient(params, defaultAccountId), params.app_token, params.table_id, params.record_id, params.fields);
|
|
2252
|
+
}
|
|
2253
|
+
});
|
|
2254
|
+
registerBitableTool({
|
|
2255
|
+
name: "feishu_bitable_create_app",
|
|
2256
|
+
label: "Feishu Bitable Create App",
|
|
2257
|
+
description: "Create a new Bitable (multidimensional table) application",
|
|
2258
|
+
parameters: CreateAppSchema,
|
|
2259
|
+
async execute({ params, defaultAccountId }) {
|
|
2260
|
+
return createApp(getClient(params, defaultAccountId), params.name, params.folder_token, {
|
|
2261
|
+
debug: (msg) => api.logger.debug?.(msg),
|
|
2262
|
+
warn: (msg) => api.logger.warn?.(msg)
|
|
2263
|
+
});
|
|
2264
|
+
}
|
|
2265
|
+
});
|
|
2266
|
+
registerBitableTool({
|
|
2267
|
+
name: "feishu_bitable_create_field",
|
|
2268
|
+
label: "Feishu Bitable Create Field",
|
|
2269
|
+
description: "Create a new field (column) in a Bitable table",
|
|
2270
|
+
parameters: CreateFieldSchema,
|
|
2271
|
+
async execute({ params, defaultAccountId }) {
|
|
2272
|
+
return createField(getClient(params, defaultAccountId), params.app_token, params.table_id, params.field_name, params.field_type, params.property);
|
|
2273
|
+
}
|
|
2274
|
+
});
|
|
2275
|
+
}
|
|
2276
|
+
//#endregion
|
|
2277
|
+
//#region extensions/feishu/api.ts
|
|
2278
|
+
const feishuSessionBindingAdapterChannels = ["feishu"];
|
|
2279
|
+
//#endregion
|
|
2280
|
+
export { __testing, __testing as feishuThreadBindingTesting, buildFeishuConversationId, buildFeishuModelOverrideParentCandidates, createClackPrompter, createFeishuThreadBindingManager, feishuPlugin, feishuSessionBindingAdapterChannels, feishuSetupAdapter, feishuSetupWizard, getFeishuThreadBindingManager, handleFeishuSubagentDeliveryTarget, handleFeishuSubagentEnded, handleFeishuSubagentSpawning, parseFeishuConversationId, parseFeishuDirectConversationId, parseFeishuTargetId, registerFeishuBitableTools, registerFeishuChatTools, registerFeishuDocTools, registerFeishuDriveTools, registerFeishuPermTools, registerFeishuWikiTools, runFeishuLogin, setFeishuNamedAccountEnabled };
|