@superbenxxxh/feishu 2.0.0 → 3.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +423 -322
- package/index.ts +21 -13
- package/openclaw.plugin.json +10 -9
- package/package.json +2 -1
- package/src/bot.ts +35 -21
- package/src/channel.ts +13 -2
- package/src/config-schema.ts +31 -11
- package/src/doc-schema.ts +47 -0
- package/src/docx.ts +506 -0
- package/src/drive-schema.ts +46 -0
- package/src/drive.ts +201 -0
- package/src/perm-schema.ts +52 -0
- package/src/perm.ts +160 -0
- package/src/tools-config.ts +21 -0
- package/src/types.ts +13 -5
- package/src/wiki-schema.ts +55 -0
- package/src/wiki.ts +218 -0
package/src/docx.ts
ADDED
|
@@ -0,0 +1,506 @@
|
|
|
1
|
+
import { Type } from "@sinclair/typebox";
|
|
2
|
+
import type { OpenClawPluginApi } from "openclaw/plugin-sdk";
|
|
3
|
+
import { createFeishuClient } from "./client.js";
|
|
4
|
+
import type { FeishuConfig } from "./types.js";
|
|
5
|
+
import type * as Lark from "@larksuiteoapi/node-sdk";
|
|
6
|
+
import { Readable } from "stream";
|
|
7
|
+
import { FeishuDocSchema, type FeishuDocParams } from "./doc-schema.js";
|
|
8
|
+
import { resolveToolsConfig } from "./tools-config.js";
|
|
9
|
+
|
|
10
|
+
// ============ Helpers ============
|
|
11
|
+
|
|
12
|
+
function json(data: unknown) {
|
|
13
|
+
return {
|
|
14
|
+
content: [{ type: "text" as const, text: JSON.stringify(data, null, 2) }],
|
|
15
|
+
details: data,
|
|
16
|
+
};
|
|
17
|
+
}
|
|
18
|
+
|
|
19
|
+
function looksLikeMarkdownContent(text: string): boolean {
|
|
20
|
+
if (text.includes("\n")) return true;
|
|
21
|
+
if (/```/.test(text)) return true;
|
|
22
|
+
if (/^#{1,6}\s+/m.test(text)) return true;
|
|
23
|
+
if (/^\s*[-*+]\s+/m.test(text)) return true;
|
|
24
|
+
if (/^\s*\d+\.\s+/m.test(text)) return true;
|
|
25
|
+
if (/\|.+\|/.test(text)) return true;
|
|
26
|
+
return false;
|
|
27
|
+
}
|
|
28
|
+
|
|
29
|
+
function deriveTitleFromContent(content: string): string {
|
|
30
|
+
const firstLine =
|
|
31
|
+
content
|
|
32
|
+
.split(/\r?\n/)
|
|
33
|
+
.map((line) => line.trim())
|
|
34
|
+
.find((line) => line.length > 0) ?? "Untitled";
|
|
35
|
+
return firstLine.replace(/^#{1,6}\s+/, "").slice(0, 80);
|
|
36
|
+
}
|
|
37
|
+
|
|
38
|
+
/** Extract image URLs from markdown content */
|
|
39
|
+
function extractImageUrls(markdown: string): string[] {
|
|
40
|
+
const regex = /!\[[^\]]*\]\(([^)]+)\)/g;
|
|
41
|
+
const urls: string[] = [];
|
|
42
|
+
let match;
|
|
43
|
+
while ((match = regex.exec(markdown)) !== null) {
|
|
44
|
+
const url = match[1].trim();
|
|
45
|
+
if (url.startsWith("http://") || url.startsWith("https://")) {
|
|
46
|
+
urls.push(url);
|
|
47
|
+
}
|
|
48
|
+
}
|
|
49
|
+
return urls;
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
const BLOCK_TYPE_NAMES: Record<number, string> = {
|
|
53
|
+
1: "Page",
|
|
54
|
+
2: "Text",
|
|
55
|
+
3: "Heading1",
|
|
56
|
+
4: "Heading2",
|
|
57
|
+
5: "Heading3",
|
|
58
|
+
12: "Bullet",
|
|
59
|
+
13: "Ordered",
|
|
60
|
+
14: "Code",
|
|
61
|
+
15: "Quote",
|
|
62
|
+
17: "Todo",
|
|
63
|
+
18: "Bitable",
|
|
64
|
+
21: "Diagram",
|
|
65
|
+
22: "Divider",
|
|
66
|
+
23: "File",
|
|
67
|
+
27: "Image",
|
|
68
|
+
30: "Sheet",
|
|
69
|
+
31: "Table",
|
|
70
|
+
32: "TableCell",
|
|
71
|
+
};
|
|
72
|
+
|
|
73
|
+
// Block types that cannot be created via documentBlockChildren.create API
|
|
74
|
+
const UNSUPPORTED_CREATE_TYPES = new Set([31, 32]);
|
|
75
|
+
const MAX_CHILDREN_PER_REQUEST = 50;
|
|
76
|
+
|
|
77
|
+
/** Clean blocks for insertion (remove unsupported types and read-only fields) */
|
|
78
|
+
function cleanBlocksForInsert(blocks: any[]): { cleaned: any[]; skipped: string[] } {
|
|
79
|
+
const skipped: string[] = [];
|
|
80
|
+
const cleaned = blocks
|
|
81
|
+
.filter((block) => {
|
|
82
|
+
if (UNSUPPORTED_CREATE_TYPES.has(block.block_type)) {
|
|
83
|
+
const typeName = BLOCK_TYPE_NAMES[block.block_type] || `type_${block.block_type}`;
|
|
84
|
+
skipped.push(typeName);
|
|
85
|
+
return false;
|
|
86
|
+
}
|
|
87
|
+
return true;
|
|
88
|
+
})
|
|
89
|
+
.map((block) => {
|
|
90
|
+
if (block.block_type === 31 && block.table?.merge_info) {
|
|
91
|
+
const { merge_info, ...tableRest } = block.table;
|
|
92
|
+
return { ...block, table: tableRest };
|
|
93
|
+
}
|
|
94
|
+
return block;
|
|
95
|
+
});
|
|
96
|
+
return { cleaned, skipped };
|
|
97
|
+
}
|
|
98
|
+
|
|
99
|
+
// ============ Core Functions ============
|
|
100
|
+
|
|
101
|
+
async function convertMarkdown(client: Lark.Client, markdown: string) {
|
|
102
|
+
const res = await client.docx.document.convert({
|
|
103
|
+
data: { content_type: "markdown", content: markdown },
|
|
104
|
+
});
|
|
105
|
+
if (res.code !== 0) throw new Error(res.msg);
|
|
106
|
+
return {
|
|
107
|
+
blocks: res.data?.blocks ?? [],
|
|
108
|
+
firstLevelBlockIds: res.data?.first_level_block_ids ?? [],
|
|
109
|
+
};
|
|
110
|
+
}
|
|
111
|
+
|
|
112
|
+
async function insertBlocks(
|
|
113
|
+
client: Lark.Client,
|
|
114
|
+
docToken: string,
|
|
115
|
+
blocks: any[],
|
|
116
|
+
parentBlockId?: string,
|
|
117
|
+
): Promise<{ children: any[]; skipped: string[] }> {
|
|
118
|
+
const { cleaned, skipped } = cleanBlocksForInsert(blocks);
|
|
119
|
+
const blockId = parentBlockId ?? docToken;
|
|
120
|
+
|
|
121
|
+
if (cleaned.length === 0) {
|
|
122
|
+
return { children: [], skipped };
|
|
123
|
+
}
|
|
124
|
+
|
|
125
|
+
const inserted: any[] = [];
|
|
126
|
+
for (let i = 0; i < cleaned.length; i += MAX_CHILDREN_PER_REQUEST) {
|
|
127
|
+
const batch = cleaned.slice(i, i + MAX_CHILDREN_PER_REQUEST);
|
|
128
|
+
const res = await client.docx.documentBlockChildren.create({
|
|
129
|
+
path: { document_id: docToken, block_id: blockId },
|
|
130
|
+
data: { children: batch },
|
|
131
|
+
});
|
|
132
|
+
if (res.code !== 0) throw new Error(res.msg);
|
|
133
|
+
inserted.push(...(res.data?.children ?? []));
|
|
134
|
+
}
|
|
135
|
+
|
|
136
|
+
return { children: inserted, skipped };
|
|
137
|
+
}
|
|
138
|
+
|
|
139
|
+
async function clearDocumentContent(client: Lark.Client, docToken: string) {
|
|
140
|
+
const existing = await client.docx.documentBlock.list({
|
|
141
|
+
path: { document_id: docToken },
|
|
142
|
+
});
|
|
143
|
+
if (existing.code !== 0) throw new Error(existing.msg);
|
|
144
|
+
|
|
145
|
+
const childIds =
|
|
146
|
+
existing.data?.items
|
|
147
|
+
?.filter((b) => b.parent_id === docToken && b.block_type !== 1)
|
|
148
|
+
.map((b) => b.block_id) ?? [];
|
|
149
|
+
|
|
150
|
+
if (childIds.length > 0) {
|
|
151
|
+
const res = await client.docx.documentBlockChildren.batchDelete({
|
|
152
|
+
path: { document_id: docToken, block_id: docToken },
|
|
153
|
+
data: { start_index: 0, end_index: childIds.length },
|
|
154
|
+
});
|
|
155
|
+
if (res.code !== 0) throw new Error(res.msg);
|
|
156
|
+
}
|
|
157
|
+
|
|
158
|
+
return childIds.length;
|
|
159
|
+
}
|
|
160
|
+
|
|
161
|
+
async function uploadImageToDocx(
|
|
162
|
+
client: Lark.Client,
|
|
163
|
+
blockId: string,
|
|
164
|
+
imageBuffer: Buffer,
|
|
165
|
+
fileName: string,
|
|
166
|
+
): Promise<string> {
|
|
167
|
+
const res = await client.drive.media.uploadAll({
|
|
168
|
+
data: {
|
|
169
|
+
file_name: fileName,
|
|
170
|
+
parent_type: "docx_image",
|
|
171
|
+
parent_node: blockId,
|
|
172
|
+
size: imageBuffer.length,
|
|
173
|
+
file: Readable.from(imageBuffer) as any,
|
|
174
|
+
},
|
|
175
|
+
});
|
|
176
|
+
|
|
177
|
+
const fileToken = res?.file_token;
|
|
178
|
+
if (!fileToken) {
|
|
179
|
+
throw new Error("Image upload failed: no file_token returned");
|
|
180
|
+
}
|
|
181
|
+
return fileToken;
|
|
182
|
+
}
|
|
183
|
+
|
|
184
|
+
async function downloadImage(url: string): Promise<Buffer> {
|
|
185
|
+
const response = await fetch(url);
|
|
186
|
+
if (!response.ok) {
|
|
187
|
+
throw new Error(`Failed to download image: ${response.status} ${response.statusText}`);
|
|
188
|
+
}
|
|
189
|
+
return Buffer.from(await response.arrayBuffer());
|
|
190
|
+
}
|
|
191
|
+
|
|
192
|
+
async function processImages(
|
|
193
|
+
client: Lark.Client,
|
|
194
|
+
docToken: string,
|
|
195
|
+
markdown: string,
|
|
196
|
+
insertedBlocks: any[],
|
|
197
|
+
): Promise<number> {
|
|
198
|
+
const imageUrls = extractImageUrls(markdown);
|
|
199
|
+
if (imageUrls.length === 0) return 0;
|
|
200
|
+
|
|
201
|
+
const imageBlocks = insertedBlocks.filter((b) => b.block_type === 27);
|
|
202
|
+
|
|
203
|
+
let processed = 0;
|
|
204
|
+
for (let i = 0; i < Math.min(imageUrls.length, imageBlocks.length); i++) {
|
|
205
|
+
const url = imageUrls[i];
|
|
206
|
+
const blockId = imageBlocks[i].block_id;
|
|
207
|
+
|
|
208
|
+
try {
|
|
209
|
+
const buffer = await downloadImage(url);
|
|
210
|
+
const urlPath = new URL(url).pathname;
|
|
211
|
+
const fileName = urlPath.split("/").pop() || `image_${i}.png`;
|
|
212
|
+
const fileToken = await uploadImageToDocx(client, blockId, buffer, fileName);
|
|
213
|
+
|
|
214
|
+
await client.docx.documentBlock.patch({
|
|
215
|
+
path: { document_id: docToken, block_id: blockId },
|
|
216
|
+
data: {
|
|
217
|
+
replace_image: { token: fileToken },
|
|
218
|
+
},
|
|
219
|
+
});
|
|
220
|
+
|
|
221
|
+
processed++;
|
|
222
|
+
} catch (err) {
|
|
223
|
+
console.error(`Failed to process image ${url}:`, err);
|
|
224
|
+
}
|
|
225
|
+
}
|
|
226
|
+
|
|
227
|
+
return processed;
|
|
228
|
+
}
|
|
229
|
+
|
|
230
|
+
// ============ Actions ============
|
|
231
|
+
|
|
232
|
+
const STRUCTURED_BLOCK_TYPES = new Set([14, 18, 21, 23, 27, 30, 31, 32]);
|
|
233
|
+
|
|
234
|
+
async function readDoc(client: Lark.Client, docToken: string) {
|
|
235
|
+
const [contentRes, infoRes, blocksRes] = await Promise.all([
|
|
236
|
+
client.docx.document.rawContent({ path: { document_id: docToken } }),
|
|
237
|
+
client.docx.document.get({ path: { document_id: docToken } }),
|
|
238
|
+
client.docx.documentBlock.list({ path: { document_id: docToken } }),
|
|
239
|
+
]);
|
|
240
|
+
|
|
241
|
+
if (contentRes.code !== 0) throw new Error(contentRes.msg);
|
|
242
|
+
|
|
243
|
+
const blocks = blocksRes.data?.items ?? [];
|
|
244
|
+
const blockCounts: Record<string, number> = {};
|
|
245
|
+
const structuredTypes: string[] = [];
|
|
246
|
+
|
|
247
|
+
for (const b of blocks) {
|
|
248
|
+
const type = b.block_type ?? 0;
|
|
249
|
+
const name = BLOCK_TYPE_NAMES[type] || `type_${type}`;
|
|
250
|
+
blockCounts[name] = (blockCounts[name] || 0) + 1;
|
|
251
|
+
|
|
252
|
+
if (STRUCTURED_BLOCK_TYPES.has(type) && !structuredTypes.includes(name)) {
|
|
253
|
+
structuredTypes.push(name);
|
|
254
|
+
}
|
|
255
|
+
}
|
|
256
|
+
|
|
257
|
+
let hint: string | undefined;
|
|
258
|
+
if (structuredTypes.length > 0) {
|
|
259
|
+
hint = `This document contains ${structuredTypes.join(", ")} which are NOT included in the plain text above. Use feishu_doc with action: "list_blocks" to get full content.`;
|
|
260
|
+
}
|
|
261
|
+
|
|
262
|
+
return {
|
|
263
|
+
title: infoRes.data?.document?.title,
|
|
264
|
+
content: contentRes.data?.content,
|
|
265
|
+
revision_id: infoRes.data?.document?.revision_id,
|
|
266
|
+
block_count: blocks.length,
|
|
267
|
+
block_types: blockCounts,
|
|
268
|
+
...(hint && { hint }),
|
|
269
|
+
};
|
|
270
|
+
}
|
|
271
|
+
|
|
272
|
+
async function createDoc(client: Lark.Client, title: string, folderToken?: string) {
|
|
273
|
+
const res = await client.docx.document.create({
|
|
274
|
+
data: { title, folder_token: folderToken },
|
|
275
|
+
});
|
|
276
|
+
if (res.code !== 0) throw new Error(res.msg);
|
|
277
|
+
const doc = res.data?.document;
|
|
278
|
+
return {
|
|
279
|
+
document_id: doc?.document_id,
|
|
280
|
+
title: doc?.title,
|
|
281
|
+
url: `https://feishu.cn/docx/${doc?.document_id}`,
|
|
282
|
+
};
|
|
283
|
+
}
|
|
284
|
+
|
|
285
|
+
async function writeDoc(client: Lark.Client, docToken: string, markdown: string) {
|
|
286
|
+
const deleted = await clearDocumentContent(client, docToken);
|
|
287
|
+
|
|
288
|
+
const { blocks } = await convertMarkdown(client, markdown);
|
|
289
|
+
if (blocks.length === 0) {
|
|
290
|
+
return { success: true, blocks_deleted: deleted, blocks_added: 0, images_processed: 0 };
|
|
291
|
+
}
|
|
292
|
+
|
|
293
|
+
const { children: inserted, skipped } = await insertBlocks(client, docToken, blocks);
|
|
294
|
+
const imagesProcessed = await processImages(client, docToken, markdown, inserted);
|
|
295
|
+
|
|
296
|
+
return {
|
|
297
|
+
success: true,
|
|
298
|
+
blocks_deleted: deleted,
|
|
299
|
+
blocks_added: inserted.length,
|
|
300
|
+
images_processed: imagesProcessed,
|
|
301
|
+
...(skipped.length > 0 && {
|
|
302
|
+
warning: `Skipped unsupported block types: ${skipped.join(", ")}. Tables are not supported via this API.`,
|
|
303
|
+
}),
|
|
304
|
+
};
|
|
305
|
+
}
|
|
306
|
+
|
|
307
|
+
async function appendDoc(client: Lark.Client, docToken: string, markdown: string) {
|
|
308
|
+
const { blocks } = await convertMarkdown(client, markdown);
|
|
309
|
+
if (blocks.length === 0) {
|
|
310
|
+
throw new Error("Content is empty");
|
|
311
|
+
}
|
|
312
|
+
|
|
313
|
+
const { children: inserted, skipped } = await insertBlocks(client, docToken, blocks);
|
|
314
|
+
const imagesProcessed = await processImages(client, docToken, markdown, inserted);
|
|
315
|
+
|
|
316
|
+
return {
|
|
317
|
+
success: true,
|
|
318
|
+
blocks_added: inserted.length,
|
|
319
|
+
images_processed: imagesProcessed,
|
|
320
|
+
block_ids: inserted.map((b: any) => b.block_id),
|
|
321
|
+
...(skipped.length > 0 && {
|
|
322
|
+
warning: `Skipped unsupported block types: ${skipped.join(", ")}. Tables are not supported via this API.`,
|
|
323
|
+
}),
|
|
324
|
+
};
|
|
325
|
+
}
|
|
326
|
+
|
|
327
|
+
async function updateBlock(
|
|
328
|
+
client: Lark.Client,
|
|
329
|
+
docToken: string,
|
|
330
|
+
blockId: string,
|
|
331
|
+
content: string,
|
|
332
|
+
) {
|
|
333
|
+
const blockInfo = await client.docx.documentBlock.get({
|
|
334
|
+
path: { document_id: docToken, block_id: blockId },
|
|
335
|
+
});
|
|
336
|
+
if (blockInfo.code !== 0) throw new Error(blockInfo.msg);
|
|
337
|
+
|
|
338
|
+
const res = await client.docx.documentBlock.patch({
|
|
339
|
+
path: { document_id: docToken, block_id: blockId },
|
|
340
|
+
data: {
|
|
341
|
+
update_text_elements: {
|
|
342
|
+
elements: [{ text_run: { content } }],
|
|
343
|
+
},
|
|
344
|
+
},
|
|
345
|
+
});
|
|
346
|
+
if (res.code !== 0) throw new Error(res.msg);
|
|
347
|
+
|
|
348
|
+
return { success: true, block_id: blockId };
|
|
349
|
+
}
|
|
350
|
+
|
|
351
|
+
async function deleteBlock(client: Lark.Client, docToken: string, blockId: string) {
|
|
352
|
+
const blockInfo = await client.docx.documentBlock.get({
|
|
353
|
+
path: { document_id: docToken, block_id: blockId },
|
|
354
|
+
});
|
|
355
|
+
if (blockInfo.code !== 0) throw new Error(blockInfo.msg);
|
|
356
|
+
|
|
357
|
+
const parentId = blockInfo.data?.block?.parent_id ?? docToken;
|
|
358
|
+
|
|
359
|
+
const children = await client.docx.documentBlockChildren.get({
|
|
360
|
+
path: { document_id: docToken, block_id: parentId },
|
|
361
|
+
});
|
|
362
|
+
if (children.code !== 0) throw new Error(children.msg);
|
|
363
|
+
|
|
364
|
+
const items = children.data?.items ?? [];
|
|
365
|
+
const index = items.findIndex((item: any) => item.block_id === blockId);
|
|
366
|
+
if (index === -1) throw new Error("Block not found");
|
|
367
|
+
|
|
368
|
+
const res = await client.docx.documentBlockChildren.batchDelete({
|
|
369
|
+
path: { document_id: docToken, block_id: parentId },
|
|
370
|
+
data: { start_index: index, end_index: index + 1 },
|
|
371
|
+
});
|
|
372
|
+
if (res.code !== 0) throw new Error(res.msg);
|
|
373
|
+
|
|
374
|
+
return { success: true, deleted_block_id: blockId };
|
|
375
|
+
}
|
|
376
|
+
|
|
377
|
+
async function listBlocks(client: Lark.Client, docToken: string) {
|
|
378
|
+
const res = await client.docx.documentBlock.list({
|
|
379
|
+
path: { document_id: docToken },
|
|
380
|
+
});
|
|
381
|
+
if (res.code !== 0) throw new Error(res.msg);
|
|
382
|
+
|
|
383
|
+
return {
|
|
384
|
+
blocks: res.data?.items ?? [],
|
|
385
|
+
};
|
|
386
|
+
}
|
|
387
|
+
|
|
388
|
+
async function getBlock(client: Lark.Client, docToken: string, blockId: string) {
|
|
389
|
+
const res = await client.docx.documentBlock.get({
|
|
390
|
+
path: { document_id: docToken, block_id: blockId },
|
|
391
|
+
});
|
|
392
|
+
if (res.code !== 0) throw new Error(res.msg);
|
|
393
|
+
|
|
394
|
+
return {
|
|
395
|
+
block: res.data?.block,
|
|
396
|
+
};
|
|
397
|
+
}
|
|
398
|
+
|
|
399
|
+
async function listAppScopes(client: Lark.Client) {
|
|
400
|
+
const res = await client.application.scope.list({});
|
|
401
|
+
if (res.code !== 0) throw new Error(res.msg);
|
|
402
|
+
|
|
403
|
+
const scopes = res.data?.scopes ?? [];
|
|
404
|
+
const granted = scopes.filter((s) => s.grant_status === 1);
|
|
405
|
+
const pending = scopes.filter((s) => s.grant_status !== 1);
|
|
406
|
+
|
|
407
|
+
return {
|
|
408
|
+
granted: granted.map((s) => ({ name: s.scope_name, type: s.scope_type })),
|
|
409
|
+
pending: pending.map((s) => ({ name: s.scope_name, type: s.scope_type })),
|
|
410
|
+
summary: `${granted.length} granted, ${pending.length} pending`,
|
|
411
|
+
};
|
|
412
|
+
}
|
|
413
|
+
|
|
414
|
+
// ============ Tool Registration ============
|
|
415
|
+
|
|
416
|
+
export function registerFeishuDocTools(api: OpenClawPluginApi) {
|
|
417
|
+
const feishuCfg = api.config?.channels?.feishu as FeishuConfig | undefined;
|
|
418
|
+
if (!feishuCfg?.appId || !feishuCfg?.appSecret) {
|
|
419
|
+
api.logger.debug?.("feishu_doc: Feishu credentials not configured, skipping doc tools");
|
|
420
|
+
return;
|
|
421
|
+
}
|
|
422
|
+
|
|
423
|
+
const toolsCfg = resolveToolsConfig(feishuCfg.tools);
|
|
424
|
+
const getClient = () => createFeishuClient(feishuCfg);
|
|
425
|
+
const registered: string[] = [];
|
|
426
|
+
|
|
427
|
+
// Main document tool with action-based dispatch
|
|
428
|
+
if (toolsCfg.doc) {
|
|
429
|
+
api.registerTool(
|
|
430
|
+
{
|
|
431
|
+
name: "feishu_doc",
|
|
432
|
+
label: "Feishu Doc",
|
|
433
|
+
description:
|
|
434
|
+
"Feishu document operations. Actions: read, write, append, create, list_blocks, get_block, update_block, delete_block",
|
|
435
|
+
parameters: FeishuDocSchema,
|
|
436
|
+
async execute(_toolCallId, params) {
|
|
437
|
+
const p = params as FeishuDocParams;
|
|
438
|
+
try {
|
|
439
|
+
const client = getClient();
|
|
440
|
+
switch (p.action) {
|
|
441
|
+
case "read":
|
|
442
|
+
return json(await readDoc(client, p.doc_token));
|
|
443
|
+
case "write":
|
|
444
|
+
return json(await writeDoc(client, p.doc_token, p.content));
|
|
445
|
+
case "append":
|
|
446
|
+
return json(await appendDoc(client, p.doc_token, p.content));
|
|
447
|
+
case "create":
|
|
448
|
+
if (looksLikeMarkdownContent(p.title)) {
|
|
449
|
+
const content = p.title;
|
|
450
|
+
const safeTitle = deriveTitleFromContent(content);
|
|
451
|
+
const created = await createDoc(client, safeTitle, p.folder_token);
|
|
452
|
+
if (created.document_id) {
|
|
453
|
+
await writeDoc(client, created.document_id, content);
|
|
454
|
+
return json({ ...created, warning: "Title looked like content; auto-wrote body." });
|
|
455
|
+
}
|
|
456
|
+
return json(created);
|
|
457
|
+
}
|
|
458
|
+
return json(await createDoc(client, p.title, p.folder_token));
|
|
459
|
+
case "list_blocks":
|
|
460
|
+
return json(await listBlocks(client, p.doc_token));
|
|
461
|
+
case "get_block":
|
|
462
|
+
return json(await getBlock(client, p.doc_token, p.block_id));
|
|
463
|
+
case "update_block":
|
|
464
|
+
return json(await updateBlock(client, p.doc_token, p.block_id, p.content));
|
|
465
|
+
case "delete_block":
|
|
466
|
+
return json(await deleteBlock(client, p.doc_token, p.block_id));
|
|
467
|
+
default:
|
|
468
|
+
return json({ error: `Unknown action: ${(p as any).action}` });
|
|
469
|
+
}
|
|
470
|
+
} catch (err) {
|
|
471
|
+
return json({ error: err instanceof Error ? err.message : String(err) });
|
|
472
|
+
}
|
|
473
|
+
},
|
|
474
|
+
},
|
|
475
|
+
{ name: "feishu_doc" },
|
|
476
|
+
);
|
|
477
|
+
registered.push("feishu_doc");
|
|
478
|
+
}
|
|
479
|
+
|
|
480
|
+
// Keep feishu_app_scopes as independent tool
|
|
481
|
+
if (toolsCfg.scopes) {
|
|
482
|
+
api.registerTool(
|
|
483
|
+
{
|
|
484
|
+
name: "feishu_app_scopes",
|
|
485
|
+
label: "Feishu App Scopes",
|
|
486
|
+
description:
|
|
487
|
+
"List current app permissions (scopes). Use to debug permission issues or check available capabilities.",
|
|
488
|
+
parameters: Type.Object({}),
|
|
489
|
+
async execute() {
|
|
490
|
+
try {
|
|
491
|
+
const result = await listAppScopes(getClient());
|
|
492
|
+
return json(result);
|
|
493
|
+
} catch (err) {
|
|
494
|
+
return json({ error: err instanceof Error ? err.message : String(err) });
|
|
495
|
+
}
|
|
496
|
+
},
|
|
497
|
+
},
|
|
498
|
+
{ name: "feishu_app_scopes" },
|
|
499
|
+
);
|
|
500
|
+
registered.push("feishu_app_scopes");
|
|
501
|
+
}
|
|
502
|
+
|
|
503
|
+
if (registered.length > 0) {
|
|
504
|
+
api.logger.info?.(`feishu_doc: Registered ${registered.join(", ")}`);
|
|
505
|
+
}
|
|
506
|
+
}
|
|
@@ -0,0 +1,46 @@
|
|
|
1
|
+
import { Type, type Static } from "@sinclair/typebox";
|
|
2
|
+
|
|
3
|
+
const FileType = Type.Union([
|
|
4
|
+
Type.Literal("doc"),
|
|
5
|
+
Type.Literal("docx"),
|
|
6
|
+
Type.Literal("sheet"),
|
|
7
|
+
Type.Literal("bitable"),
|
|
8
|
+
Type.Literal("folder"),
|
|
9
|
+
Type.Literal("file"),
|
|
10
|
+
Type.Literal("mindnote"),
|
|
11
|
+
Type.Literal("shortcut"),
|
|
12
|
+
]);
|
|
13
|
+
|
|
14
|
+
export const FeishuDriveSchema = Type.Union([
|
|
15
|
+
Type.Object({
|
|
16
|
+
action: Type.Literal("list"),
|
|
17
|
+
folder_token: Type.Optional(
|
|
18
|
+
Type.String({ description: "Folder token (optional, omit for root directory)" }),
|
|
19
|
+
),
|
|
20
|
+
}),
|
|
21
|
+
Type.Object({
|
|
22
|
+
action: Type.Literal("info"),
|
|
23
|
+
file_token: Type.String({ description: "File or folder token" }),
|
|
24
|
+
type: FileType,
|
|
25
|
+
}),
|
|
26
|
+
Type.Object({
|
|
27
|
+
action: Type.Literal("create_folder"),
|
|
28
|
+
name: Type.String({ description: "Folder name" }),
|
|
29
|
+
folder_token: Type.Optional(
|
|
30
|
+
Type.String({ description: "Parent folder token (optional, omit for root)" }),
|
|
31
|
+
),
|
|
32
|
+
}),
|
|
33
|
+
Type.Object({
|
|
34
|
+
action: Type.Literal("move"),
|
|
35
|
+
file_token: Type.String({ description: "File token to move" }),
|
|
36
|
+
type: FileType,
|
|
37
|
+
folder_token: Type.String({ description: "Target folder token" }),
|
|
38
|
+
}),
|
|
39
|
+
Type.Object({
|
|
40
|
+
action: Type.Literal("delete"),
|
|
41
|
+
file_token: Type.String({ description: "File token to delete" }),
|
|
42
|
+
type: FileType,
|
|
43
|
+
}),
|
|
44
|
+
]);
|
|
45
|
+
|
|
46
|
+
export type FeishuDriveParams = Static<typeof FeishuDriveSchema>;
|