@yuuko1410/feishu-bitable 0.0.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/lib/client.d.ts +33 -0
- package/lib/errors.d.ts +9 -0
- package/lib/index.cjs +454 -0
- package/lib/index.d.ts +6 -0
- package/lib/index.js +388 -0
- package/lib/types.d.ts +106 -0
- package/lib/utils.d.ts +24 -0
- package/package.json +53 -0
package/lib/client.d.ts
ADDED
|
@@ -0,0 +1,33 @@
|
|
|
1
|
+
import * as lark from "@larksuiteoapi/node-sdk";
|
|
2
|
+
import type { BatchOperationOptions, BitableBatchUpdatePayload, BitableBatchUpdateResponse, BitableConstructorOptions, BitableInsertRecord, BitableRecord, BitableUpdateRecord, FetchAllRecordsOptions, UpdateRecordsOptions, UploadFileOptions } from "./types";
|
|
3
|
+
export declare class Bitable {
|
|
4
|
+
readonly client: lark.Client;
|
|
5
|
+
readonly defaultAppToken?: string;
|
|
6
|
+
readonly maxRetries: number;
|
|
7
|
+
readonly retryDelayMs: number;
|
|
8
|
+
readonly defaultConcurrency: number;
|
|
9
|
+
constructor(options?: BitableConstructorOptions);
|
|
10
|
+
constructor(defaultAppToken?: string | null, appId?: string | null, appSecret?: string | null);
|
|
11
|
+
static fromEnv(env?: NodeJS.ProcessEnv): Bitable;
|
|
12
|
+
fetchAllRecords(tableId: string, options?: FetchAllRecordsOptions, appToken?: string): Promise<BitableRecord[]>;
|
|
13
|
+
insertList(tableId: string, records: BitableInsertRecord[], options?: BatchOperationOptions): Promise<Array<{
|
|
14
|
+
code?: number;
|
|
15
|
+
msg?: string;
|
|
16
|
+
data?: unknown;
|
|
17
|
+
}>>;
|
|
18
|
+
batchUpdateRecords(payload: BitableBatchUpdatePayload): Promise<BitableBatchUpdateResponse>;
|
|
19
|
+
updateRecords(tableId: string, records: BitableUpdateRecord[], options?: UpdateRecordsOptions): Promise<BitableBatchUpdateResponse[]>;
|
|
20
|
+
deleteList(tableId: string, recordIds: string[], options?: BatchOperationOptions): Promise<Array<{
|
|
21
|
+
code?: number;
|
|
22
|
+
msg?: string;
|
|
23
|
+
data?: unknown;
|
|
24
|
+
}>>;
|
|
25
|
+
uploadFile(options: UploadFileOptions): Promise<{
|
|
26
|
+
file_token?: string;
|
|
27
|
+
}>;
|
|
28
|
+
downloadFile(fileToken: string, extra?: string): Promise<Buffer>;
|
|
29
|
+
downLoadFile(fileToken: string, extra?: string): Promise<Buffer>;
|
|
30
|
+
private resolveConstructorOptions;
|
|
31
|
+
private resolveAppToken;
|
|
32
|
+
private withRetry;
|
|
33
|
+
}
|
package/lib/errors.d.ts
ADDED
package/lib/index.cjs
ADDED
|
@@ -0,0 +1,454 @@
|
|
|
1
|
+
var __create = Object.create;
|
|
2
|
+
var __getProtoOf = Object.getPrototypeOf;
|
|
3
|
+
var __defProp = Object.defineProperty;
|
|
4
|
+
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
5
|
+
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
6
|
+
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
|
7
|
+
function __accessProp(key) {
|
|
8
|
+
return this[key];
|
|
9
|
+
}
|
|
10
|
+
var __toESMCache_node;
|
|
11
|
+
var __toESMCache_esm;
|
|
12
|
+
var __toESM = (mod, isNodeMode, target) => {
|
|
13
|
+
var canCache = mod != null && typeof mod === "object";
|
|
14
|
+
if (canCache) {
|
|
15
|
+
var cache = isNodeMode ? __toESMCache_node ??= new WeakMap : __toESMCache_esm ??= new WeakMap;
|
|
16
|
+
var cached = cache.get(mod);
|
|
17
|
+
if (cached)
|
|
18
|
+
return cached;
|
|
19
|
+
}
|
|
20
|
+
target = mod != null ? __create(__getProtoOf(mod)) : {};
|
|
21
|
+
const to = isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target;
|
|
22
|
+
for (let key of __getOwnPropNames(mod))
|
|
23
|
+
if (!__hasOwnProp.call(to, key))
|
|
24
|
+
__defProp(to, key, {
|
|
25
|
+
get: __accessProp.bind(mod, key),
|
|
26
|
+
enumerable: true
|
|
27
|
+
});
|
|
28
|
+
if (canCache)
|
|
29
|
+
cache.set(mod, to);
|
|
30
|
+
return to;
|
|
31
|
+
};
|
|
32
|
+
var __toCommonJS = (from) => {
|
|
33
|
+
var entry = (__moduleCache ??= new WeakMap).get(from), desc;
|
|
34
|
+
if (entry)
|
|
35
|
+
return entry;
|
|
36
|
+
entry = __defProp({}, "__esModule", { value: true });
|
|
37
|
+
if (from && typeof from === "object" || typeof from === "function") {
|
|
38
|
+
for (var key of __getOwnPropNames(from))
|
|
39
|
+
if (!__hasOwnProp.call(entry, key))
|
|
40
|
+
__defProp(entry, key, {
|
|
41
|
+
get: __accessProp.bind(from, key),
|
|
42
|
+
enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable
|
|
43
|
+
});
|
|
44
|
+
}
|
|
45
|
+
__moduleCache.set(from, entry);
|
|
46
|
+
return entry;
|
|
47
|
+
};
|
|
48
|
+
var __moduleCache;
|
|
49
|
+
var __returnValue = (v) => v;
|
|
50
|
+
function __exportSetter(name, newValue) {
|
|
51
|
+
this[name] = __returnValue.bind(null, newValue);
|
|
52
|
+
}
|
|
53
|
+
var __export = (target, all) => {
|
|
54
|
+
for (var name in all)
|
|
55
|
+
__defProp(target, name, {
|
|
56
|
+
get: all[name],
|
|
57
|
+
enumerable: true,
|
|
58
|
+
configurable: true,
|
|
59
|
+
set: __exportSetter.bind(all, name)
|
|
60
|
+
});
|
|
61
|
+
};
|
|
62
|
+
|
|
63
|
+
// src/index.ts
|
|
64
|
+
var exports_src = {};
|
|
65
|
+
__export(exports_src, {
|
|
66
|
+
default: () => src_default,
|
|
67
|
+
LoggerLevel: () => import_node_sdk.LoggerLevel,
|
|
68
|
+
FeishuBitableError: () => FeishuBitableError,
|
|
69
|
+
Domain: () => import_node_sdk.Domain,
|
|
70
|
+
Bitable: () => Bitable,
|
|
71
|
+
AppType: () => import_node_sdk.AppType
|
|
72
|
+
});
|
|
73
|
+
module.exports = __toCommonJS(exports_src);
|
|
74
|
+
|
|
75
|
+
// src/client.ts
|
|
76
|
+
var lark = __toESM(require("@larksuiteoapi/node-sdk"));
|
|
77
|
+
|
|
78
|
+
// src/errors.ts
|
|
79
|
+
class FeishuBitableError extends Error {
|
|
80
|
+
code;
|
|
81
|
+
details;
|
|
82
|
+
constructor(message, options) {
|
|
83
|
+
super(message, options?.cause ? { cause: options.cause } : undefined);
|
|
84
|
+
this.name = "FeishuBitableError";
|
|
85
|
+
this.code = options?.code;
|
|
86
|
+
this.details = options?.details;
|
|
87
|
+
}
|
|
88
|
+
}
|
|
89
|
+
|
|
90
|
+
// src/utils.ts
|
|
91
|
+
var DATA_URL_PATTERN = /^data:([A-Za-z-+/]+\/[A-Za-z0-9.+-]+)?;base64,(.+)$/;
|
|
92
|
+
var MIME_EXTENSION_MAP = {
|
|
93
|
+
"image/jpeg": "jpg",
|
|
94
|
+
"image/jpg": "jpg",
|
|
95
|
+
"image/png": "png",
|
|
96
|
+
"image/gif": "gif",
|
|
97
|
+
"image/webp": "webp",
|
|
98
|
+
"application/pdf": "pdf",
|
|
99
|
+
"text/plain": "txt"
|
|
100
|
+
};
|
|
101
|
+
var FEISHU_BATCH_LIMIT = 500;
|
|
102
|
+
var FEISHU_SIMPLE_UPLOAD_LIMIT = 20 * 1024 * 1024;
|
|
103
|
+
function chunkArray(items, chunkSize) {
|
|
104
|
+
const normalizedChunkSize = Math.max(1, Math.min(chunkSize, FEISHU_BATCH_LIMIT));
|
|
105
|
+
const chunks = [];
|
|
106
|
+
for (let index = 0;index < items.length; index += normalizedChunkSize) {
|
|
107
|
+
chunks.push(items.slice(index, index + normalizedChunkSize));
|
|
108
|
+
}
|
|
109
|
+
return chunks;
|
|
110
|
+
}
|
|
111
|
+
async function sleep(ms) {
|
|
112
|
+
await new Promise((resolve) => setTimeout(resolve, ms));
|
|
113
|
+
}
|
|
114
|
+
async function runWithConcurrency(items, concurrency, worker) {
|
|
115
|
+
const results = new Array(items.length);
|
|
116
|
+
const normalizedConcurrency = Math.max(1, concurrency);
|
|
117
|
+
let cursor = 0;
|
|
118
|
+
async function runWorker() {
|
|
119
|
+
while (true) {
|
|
120
|
+
const index = cursor++;
|
|
121
|
+
if (index >= items.length) {
|
|
122
|
+
return;
|
|
123
|
+
}
|
|
124
|
+
results[index] = await worker(items[index], index);
|
|
125
|
+
}
|
|
126
|
+
}
|
|
127
|
+
await Promise.all(Array.from({ length: Math.min(normalizedConcurrency, items.length) }, () => runWorker()));
|
|
128
|
+
return results;
|
|
129
|
+
}
|
|
130
|
+
function assertFeishuResponse(response, action) {
|
|
131
|
+
if (!response) {
|
|
132
|
+
throw new FeishuBitableError(`${action} failed: empty response from Feishu SDK`);
|
|
133
|
+
}
|
|
134
|
+
if (typeof response.code === "number" && response.code !== 0) {
|
|
135
|
+
throw new FeishuBitableError(`${action} failed: ${response.msg ?? "unknown error"}`, {
|
|
136
|
+
code: response.code,
|
|
137
|
+
details: response
|
|
138
|
+
});
|
|
139
|
+
}
|
|
140
|
+
return response;
|
|
141
|
+
}
|
|
142
|
+
function normalizeFieldValue(value) {
|
|
143
|
+
if (Array.isArray(value)) {
|
|
144
|
+
if (value.length === 1 && isPlainObject(value[0]) && typeof value[0].text === "string") {
|
|
145
|
+
return value[0].text;
|
|
146
|
+
}
|
|
147
|
+
return value;
|
|
148
|
+
}
|
|
149
|
+
if (!isPlainObject(value)) {
|
|
150
|
+
return value;
|
|
151
|
+
}
|
|
152
|
+
if (typeof value.type === "number" && "value" in value) {
|
|
153
|
+
if (value.type === 1 && Array.isArray(value.value)) {
|
|
154
|
+
return value.value.map((item) => isPlainObject(item) && typeof item.text === "string" ? item.text : String(item ?? "")).join("");
|
|
155
|
+
}
|
|
156
|
+
if ((value.type === 2 || value.type === 3 || value.type === 1005) && Array.isArray(value.value)) {
|
|
157
|
+
return value.value.join(",");
|
|
158
|
+
}
|
|
159
|
+
}
|
|
160
|
+
return value;
|
|
161
|
+
}
|
|
162
|
+
function normalizeRecord(recordId, fields, normalizeFields = true) {
|
|
163
|
+
return {
|
|
164
|
+
...normalizeFields ? normalizeRecordFields(fields) : fields,
|
|
165
|
+
record_id: recordId ?? ""
|
|
166
|
+
};
|
|
167
|
+
}
|
|
168
|
+
function normalizeRecordFields(fields) {
|
|
169
|
+
return Object.fromEntries(Object.entries(fields).map(([key, value]) => [key, normalizeFieldValue(value)]));
|
|
170
|
+
}
|
|
171
|
+
function splitUpdateRecord(record) {
|
|
172
|
+
const { record_id: recordId, ...fields } = record;
|
|
173
|
+
return { recordId, fields };
|
|
174
|
+
}
|
|
175
|
+
async function toBuffer(file) {
|
|
176
|
+
if (typeof file === "string") {
|
|
177
|
+
const dataUrlMatch = file.match(DATA_URL_PATTERN);
|
|
178
|
+
const base64Payload = dataUrlMatch ? dataUrlMatch[2] : file;
|
|
179
|
+
return Buffer.from(base64Payload, "base64");
|
|
180
|
+
}
|
|
181
|
+
if (Buffer.isBuffer(file)) {
|
|
182
|
+
return file;
|
|
183
|
+
}
|
|
184
|
+
if (file instanceof Uint8Array) {
|
|
185
|
+
return Buffer.from(file);
|
|
186
|
+
}
|
|
187
|
+
if (file instanceof ArrayBuffer) {
|
|
188
|
+
return Buffer.from(file);
|
|
189
|
+
}
|
|
190
|
+
if (typeof Blob !== "undefined" && file instanceof Blob) {
|
|
191
|
+
return Buffer.from(await file.arrayBuffer());
|
|
192
|
+
}
|
|
193
|
+
if (isFileLike(file)) {
|
|
194
|
+
return Buffer.from(await file.arrayBuffer());
|
|
195
|
+
}
|
|
196
|
+
throw new FeishuBitableError("Unsupported file input. Use base64 string, Buffer, Uint8Array, ArrayBuffer, Blob or Bun.file().");
|
|
197
|
+
}
|
|
198
|
+
function inferFileName(file, fallback = "upload.bin") {
|
|
199
|
+
if (isFileLike(file) && typeof file.name === "string" && file.name.trim()) {
|
|
200
|
+
return file.name;
|
|
201
|
+
}
|
|
202
|
+
if (typeof file === "string") {
|
|
203
|
+
const dataUrlMatch = file.match(DATA_URL_PATTERN);
|
|
204
|
+
const mimeType = dataUrlMatch?.[1];
|
|
205
|
+
if (mimeType && MIME_EXTENSION_MAP[mimeType]) {
|
|
206
|
+
return `upload.${MIME_EXTENSION_MAP[mimeType]}`;
|
|
207
|
+
}
|
|
208
|
+
}
|
|
209
|
+
return fallback;
|
|
210
|
+
}
|
|
211
|
+
async function readableToBuffer(stream) {
|
|
212
|
+
const chunks = [];
|
|
213
|
+
for await (const chunk of stream) {
|
|
214
|
+
chunks.push(Buffer.isBuffer(chunk) ? chunk : Buffer.from(chunk));
|
|
215
|
+
}
|
|
216
|
+
return Buffer.concat(chunks);
|
|
217
|
+
}
|
|
218
|
+
function isPlainObject(value) {
|
|
219
|
+
return typeof value === "object" && value !== null && !Array.isArray(value);
|
|
220
|
+
}
|
|
221
|
+
function isFileLike(value) {
|
|
222
|
+
return isPlainObject(value) && typeof value.arrayBuffer === "function";
|
|
223
|
+
}
|
|
224
|
+
|
|
225
|
+
// src/client.ts
|
|
226
|
+
class Bitable {
|
|
227
|
+
client;
|
|
228
|
+
defaultAppToken;
|
|
229
|
+
maxRetries;
|
|
230
|
+
retryDelayMs;
|
|
231
|
+
defaultConcurrency;
|
|
232
|
+
constructor(optionsOrToken, appId, appSecret) {
|
|
233
|
+
const options = this.resolveConstructorOptions(optionsOrToken, appId, appSecret);
|
|
234
|
+
this.defaultAppToken = options.defaultAppToken;
|
|
235
|
+
this.maxRetries = Math.max(1, options.maxRetries ?? 5);
|
|
236
|
+
this.retryDelayMs = Math.max(100, options.retryDelayMs ?? 1000);
|
|
237
|
+
this.defaultConcurrency = Math.max(1, options.defaultConcurrency ?? 1);
|
|
238
|
+
this.client = options.sdkClient ?? new lark.Client({
|
|
239
|
+
appId: options.appId,
|
|
240
|
+
appSecret: options.appSecret,
|
|
241
|
+
appType: options.appType ?? lark.AppType.SelfBuild,
|
|
242
|
+
domain: options.domain ?? lark.Domain.Feishu
|
|
243
|
+
});
|
|
244
|
+
}
|
|
245
|
+
static fromEnv(env = process.env) {
|
|
246
|
+
return new Bitable({
|
|
247
|
+
appId: env.FEISHU_APP_ID,
|
|
248
|
+
appSecret: env.FEISHU_APP_SECRET,
|
|
249
|
+
defaultAppToken: env.FEISHU_APP_TOKEN
|
|
250
|
+
});
|
|
251
|
+
}
|
|
252
|
+
async fetchAllRecords(tableId, options = {}, appToken) {
|
|
253
|
+
const token = this.resolveAppToken(appToken);
|
|
254
|
+
const pageSize = Math.max(1, Math.min(options.pageSize ?? FEISHU_BATCH_LIMIT, FEISHU_BATCH_LIMIT));
|
|
255
|
+
const iterator = await this.client.bitable.v1.appTableRecord.searchWithIterator({
|
|
256
|
+
path: {
|
|
257
|
+
app_token: token,
|
|
258
|
+
table_id: tableId
|
|
259
|
+
},
|
|
260
|
+
params: {
|
|
261
|
+
page_size: pageSize,
|
|
262
|
+
user_id_type: "open_id"
|
|
263
|
+
},
|
|
264
|
+
data: {
|
|
265
|
+
view_id: options.viewId,
|
|
266
|
+
field_names: options.fieldNames,
|
|
267
|
+
filter: options.filter,
|
|
268
|
+
sort: options.sort,
|
|
269
|
+
automatic_fields: options.automaticFields
|
|
270
|
+
}
|
|
271
|
+
});
|
|
272
|
+
const allRecords = [];
|
|
273
|
+
for await (const page of iterator) {
|
|
274
|
+
const items = page?.items ?? [];
|
|
275
|
+
allRecords.push(...items.map((item) => normalizeRecord(item.record_id, item.fields, options.normalizeFields !== false)));
|
|
276
|
+
}
|
|
277
|
+
return allRecords;
|
|
278
|
+
}
|
|
279
|
+
async insertList(tableId, records, options = {}) {
|
|
280
|
+
if (records.length === 0) {
|
|
281
|
+
return [];
|
|
282
|
+
}
|
|
283
|
+
const token = this.resolveAppToken(options.appToken);
|
|
284
|
+
const chunks = chunkArray(records, options.chunkSize ?? FEISHU_BATCH_LIMIT);
|
|
285
|
+
return runWithConcurrency(chunks, options.concurrency ?? this.defaultConcurrency, async (chunk) => this.withRetry("insert records", async () => assertFeishuResponse(await this.client.bitable.v1.appTableRecord.batchCreate({
|
|
286
|
+
path: { app_token: token, table_id: tableId },
|
|
287
|
+
data: {
|
|
288
|
+
records: chunk.map((fields) => ({ fields }))
|
|
289
|
+
}
|
|
290
|
+
}), "insert records")));
|
|
291
|
+
}
|
|
292
|
+
async batchUpdateRecords(payload) {
|
|
293
|
+
return this.withRetry("batch update records", async () => assertFeishuResponse(await this.client.bitable.v1.appTableRecord.batchUpdate(payload), "batch update records"));
|
|
294
|
+
}
|
|
295
|
+
async updateRecords(tableId, records, options = {}) {
|
|
296
|
+
if (records.length === 0) {
|
|
297
|
+
return [];
|
|
298
|
+
}
|
|
299
|
+
const token = this.resolveAppToken(options.appToken);
|
|
300
|
+
const chunks = chunkArray(records, options.chunkSize ?? FEISHU_BATCH_LIMIT);
|
|
301
|
+
return runWithConcurrency(chunks, options.concurrency ?? this.defaultConcurrency, async (chunk) => {
|
|
302
|
+
const payload = {
|
|
303
|
+
path: {
|
|
304
|
+
app_token: token,
|
|
305
|
+
table_id: tableId
|
|
306
|
+
},
|
|
307
|
+
data: {
|
|
308
|
+
records: chunk.map((record) => {
|
|
309
|
+
const { recordId, fields } = splitUpdateRecord(record);
|
|
310
|
+
return {
|
|
311
|
+
record_id: recordId,
|
|
312
|
+
fields
|
|
313
|
+
};
|
|
314
|
+
})
|
|
315
|
+
}
|
|
316
|
+
};
|
|
317
|
+
if (options.userIdType || options.ignoreConsistencyCheck !== undefined) {
|
|
318
|
+
payload.params = {
|
|
319
|
+
user_id_type: options.userIdType,
|
|
320
|
+
ignore_consistency_check: options.ignoreConsistencyCheck
|
|
321
|
+
};
|
|
322
|
+
}
|
|
323
|
+
return this.batchUpdateRecords(payload);
|
|
324
|
+
});
|
|
325
|
+
}
|
|
326
|
+
async deleteList(tableId, recordIds, options = {}) {
|
|
327
|
+
if (recordIds.length === 0) {
|
|
328
|
+
return [];
|
|
329
|
+
}
|
|
330
|
+
const token = this.resolveAppToken(options.appToken);
|
|
331
|
+
const chunks = chunkArray(recordIds, options.chunkSize ?? FEISHU_BATCH_LIMIT);
|
|
332
|
+
return runWithConcurrency(chunks, options.concurrency ?? this.defaultConcurrency, async (chunk) => this.withRetry("delete records", async () => assertFeishuResponse(await this.client.bitable.v1.appTableRecord.batchDelete({
|
|
333
|
+
path: { app_token: token, table_id: tableId },
|
|
334
|
+
data: {
|
|
335
|
+
records: chunk
|
|
336
|
+
}
|
|
337
|
+
}), "delete records")));
|
|
338
|
+
}
|
|
339
|
+
async uploadFile(options) {
|
|
340
|
+
const buffer = await toBuffer(options.file);
|
|
341
|
+
const fileName = options.fileName ?? inferFileName(options.file);
|
|
342
|
+
if (buffer.byteLength <= FEISHU_SIMPLE_UPLOAD_LIMIT) {
|
|
343
|
+
return await this.withRetry("upload file", async () => this.client.drive.v1.media.uploadAll({
|
|
344
|
+
data: {
|
|
345
|
+
file_name: fileName,
|
|
346
|
+
parent_type: options.parentType,
|
|
347
|
+
parent_node: options.parentNode,
|
|
348
|
+
size: buffer.byteLength,
|
|
349
|
+
extra: options.extra,
|
|
350
|
+
file: buffer
|
|
351
|
+
}
|
|
352
|
+
})) ?? {};
|
|
353
|
+
}
|
|
354
|
+
const prepare = assertFeishuResponse(await this.withRetry("prepare multipart upload", async () => this.client.drive.v1.media.uploadPrepare({
|
|
355
|
+
data: {
|
|
356
|
+
file_name: fileName,
|
|
357
|
+
parent_type: options.parentType,
|
|
358
|
+
parent_node: options.parentNode,
|
|
359
|
+
size: buffer.byteLength
|
|
360
|
+
}
|
|
361
|
+
})), "prepare multipart upload");
|
|
362
|
+
const uploadId = prepare.data?.upload_id;
|
|
363
|
+
const blockSize = prepare.data?.block_size;
|
|
364
|
+
const blockNum = prepare.data?.block_num;
|
|
365
|
+
if (!uploadId || !blockSize || !blockNum) {
|
|
366
|
+
throw new FeishuBitableError("prepare multipart upload failed: missing upload metadata", {
|
|
367
|
+
details: prepare
|
|
368
|
+
});
|
|
369
|
+
}
|
|
370
|
+
for (let index = 0;index < blockNum; index++) {
|
|
371
|
+
const start = index * blockSize;
|
|
372
|
+
const end = Math.min(start + blockSize, buffer.byteLength);
|
|
373
|
+
const chunk = buffer.subarray(start, end);
|
|
374
|
+
await this.withRetry(`upload file chunk ${index + 1}/${blockNum}`, async () => this.client.drive.v1.media.uploadPart({
|
|
375
|
+
data: {
|
|
376
|
+
upload_id: uploadId,
|
|
377
|
+
seq: index,
|
|
378
|
+
size: chunk.byteLength,
|
|
379
|
+
file: chunk
|
|
380
|
+
}
|
|
381
|
+
}));
|
|
382
|
+
}
|
|
383
|
+
const finish = assertFeishuResponse(await this.withRetry("finish multipart upload", async () => this.client.drive.v1.media.uploadFinish({
|
|
384
|
+
data: {
|
|
385
|
+
upload_id: uploadId,
|
|
386
|
+
block_num: blockNum
|
|
387
|
+
}
|
|
388
|
+
})), "finish multipart upload");
|
|
389
|
+
return {
|
|
390
|
+
file_token: finish.data?.file_token
|
|
391
|
+
};
|
|
392
|
+
}
|
|
393
|
+
async downloadFile(fileToken, extra) {
|
|
394
|
+
const response = await this.withRetry("download file", async () => this.client.drive.v1.media.download({
|
|
395
|
+
path: {
|
|
396
|
+
file_token: fileToken
|
|
397
|
+
},
|
|
398
|
+
params: {
|
|
399
|
+
extra
|
|
400
|
+
}
|
|
401
|
+
}));
|
|
402
|
+
return readableToBuffer(response.getReadableStream());
|
|
403
|
+
}
|
|
404
|
+
async downLoadFile(fileToken, extra) {
|
|
405
|
+
return this.downloadFile(fileToken, extra);
|
|
406
|
+
}
|
|
407
|
+
resolveConstructorOptions(optionsOrToken, appIdArg, appSecretArg) {
|
|
408
|
+
const objectMode = typeof optionsOrToken === "object" && optionsOrToken !== null && !Array.isArray(optionsOrToken);
|
|
409
|
+
const options = objectMode ? optionsOrToken : {
|
|
410
|
+
defaultAppToken: typeof optionsOrToken === "string" ? optionsOrToken : undefined,
|
|
411
|
+
appId: appIdArg ?? undefined,
|
|
412
|
+
appSecret: appSecretArg ?? undefined
|
|
413
|
+
};
|
|
414
|
+
const appId = options.appId ?? process.env.FEISHU_APP_ID;
|
|
415
|
+
const appSecret = options.appSecret ?? process.env.FEISHU_APP_SECRET;
|
|
416
|
+
if (!appId || !appSecret) {
|
|
417
|
+
throw new FeishuBitableError("appId and appSecret are required. Pass them in the constructor or provide FEISHU_APP_ID and FEISHU_APP_SECRET.");
|
|
418
|
+
}
|
|
419
|
+
return {
|
|
420
|
+
...options,
|
|
421
|
+
appId,
|
|
422
|
+
appSecret,
|
|
423
|
+
defaultAppToken: options.defaultAppToken ?? process.env.FEISHU_APP_TOKEN
|
|
424
|
+
};
|
|
425
|
+
}
|
|
426
|
+
resolveAppToken(overrideToken) {
|
|
427
|
+
const token = overrideToken ?? this.defaultAppToken;
|
|
428
|
+
if (!token) {
|
|
429
|
+
throw new FeishuBitableError("appToken is required. Pass it to the constructor, method options, or FEISHU_APP_TOKEN.");
|
|
430
|
+
}
|
|
431
|
+
return token;
|
|
432
|
+
}
|
|
433
|
+
async withRetry(label, task) {
|
|
434
|
+
let lastError;
|
|
435
|
+
for (let attempt = 1;attempt <= this.maxRetries; attempt++) {
|
|
436
|
+
try {
|
|
437
|
+
return await task();
|
|
438
|
+
} catch (error) {
|
|
439
|
+
lastError = error;
|
|
440
|
+
if (attempt === this.maxRetries) {
|
|
441
|
+
break;
|
|
442
|
+
}
|
|
443
|
+
await sleep(this.retryDelayMs * attempt);
|
|
444
|
+
}
|
|
445
|
+
}
|
|
446
|
+
throw new FeishuBitableError(`${label} failed after ${this.maxRetries} attempts`, {
|
|
447
|
+
cause: lastError
|
|
448
|
+
});
|
|
449
|
+
}
|
|
450
|
+
}
|
|
451
|
+
|
|
452
|
+
// src/index.ts
|
|
453
|
+
var import_node_sdk = require("@larksuiteoapi/node-sdk");
|
|
454
|
+
var src_default = Bitable;
|
package/lib/index.d.ts
ADDED
|
@@ -0,0 +1,6 @@
|
|
|
1
|
+
import { Bitable } from "./client";
|
|
2
|
+
export { Bitable };
|
|
3
|
+
export { FeishuBitableError } from "./errors";
|
|
4
|
+
export { AppType, Domain, LoggerLevel } from "@larksuiteoapi/node-sdk";
|
|
5
|
+
export type { BatchOperationOptions, BitableBatchUpdatePayload, BitableBatchUpdateResponse, BitableConstructorOptions, BitableFieldValue, BitableFilterCondition, BitableFilterGroup, BitableInsertRecord, BitableLocationValue, BitableMemberValue, BitableRecord, BitableRecordFields, BitableSort, BitableTextValue, BitableUpdateRecord, FetchAllRecordsOptions, MediaParentType, UpdateRecordsOptions, UploadFileOptions, UploadableFile, } from "./types";
|
|
6
|
+
export default Bitable;
|
package/lib/index.js
ADDED
|
@@ -0,0 +1,388 @@
|
|
|
1
|
+
// src/client.ts
|
|
2
|
+
import * as lark from "@larksuiteoapi/node-sdk";
|
|
3
|
+
|
|
4
|
+
// src/errors.ts
|
|
5
|
+
class FeishuBitableError extends Error {
|
|
6
|
+
code;
|
|
7
|
+
details;
|
|
8
|
+
constructor(message, options) {
|
|
9
|
+
super(message, options?.cause ? { cause: options.cause } : undefined);
|
|
10
|
+
this.name = "FeishuBitableError";
|
|
11
|
+
this.code = options?.code;
|
|
12
|
+
this.details = options?.details;
|
|
13
|
+
}
|
|
14
|
+
}
|
|
15
|
+
|
|
16
|
+
// src/utils.ts
|
|
17
|
+
var DATA_URL_PATTERN = /^data:([A-Za-z-+/]+\/[A-Za-z0-9.+-]+)?;base64,(.+)$/;
|
|
18
|
+
var MIME_EXTENSION_MAP = {
|
|
19
|
+
"image/jpeg": "jpg",
|
|
20
|
+
"image/jpg": "jpg",
|
|
21
|
+
"image/png": "png",
|
|
22
|
+
"image/gif": "gif",
|
|
23
|
+
"image/webp": "webp",
|
|
24
|
+
"application/pdf": "pdf",
|
|
25
|
+
"text/plain": "txt"
|
|
26
|
+
};
|
|
27
|
+
var FEISHU_BATCH_LIMIT = 500;
|
|
28
|
+
var FEISHU_SIMPLE_UPLOAD_LIMIT = 20 * 1024 * 1024;
|
|
29
|
+
function chunkArray(items, chunkSize) {
|
|
30
|
+
const normalizedChunkSize = Math.max(1, Math.min(chunkSize, FEISHU_BATCH_LIMIT));
|
|
31
|
+
const chunks = [];
|
|
32
|
+
for (let index = 0;index < items.length; index += normalizedChunkSize) {
|
|
33
|
+
chunks.push(items.slice(index, index + normalizedChunkSize));
|
|
34
|
+
}
|
|
35
|
+
return chunks;
|
|
36
|
+
}
|
|
37
|
+
async function sleep(ms) {
|
|
38
|
+
await new Promise((resolve) => setTimeout(resolve, ms));
|
|
39
|
+
}
|
|
40
|
+
async function runWithConcurrency(items, concurrency, worker) {
|
|
41
|
+
const results = new Array(items.length);
|
|
42
|
+
const normalizedConcurrency = Math.max(1, concurrency);
|
|
43
|
+
let cursor = 0;
|
|
44
|
+
async function runWorker() {
|
|
45
|
+
while (true) {
|
|
46
|
+
const index = cursor++;
|
|
47
|
+
if (index >= items.length) {
|
|
48
|
+
return;
|
|
49
|
+
}
|
|
50
|
+
results[index] = await worker(items[index], index);
|
|
51
|
+
}
|
|
52
|
+
}
|
|
53
|
+
await Promise.all(Array.from({ length: Math.min(normalizedConcurrency, items.length) }, () => runWorker()));
|
|
54
|
+
return results;
|
|
55
|
+
}
|
|
56
|
+
function assertFeishuResponse(response, action) {
|
|
57
|
+
if (!response) {
|
|
58
|
+
throw new FeishuBitableError(`${action} failed: empty response from Feishu SDK`);
|
|
59
|
+
}
|
|
60
|
+
if (typeof response.code === "number" && response.code !== 0) {
|
|
61
|
+
throw new FeishuBitableError(`${action} failed: ${response.msg ?? "unknown error"}`, {
|
|
62
|
+
code: response.code,
|
|
63
|
+
details: response
|
|
64
|
+
});
|
|
65
|
+
}
|
|
66
|
+
return response;
|
|
67
|
+
}
|
|
68
|
+
function normalizeFieldValue(value) {
|
|
69
|
+
if (Array.isArray(value)) {
|
|
70
|
+
if (value.length === 1 && isPlainObject(value[0]) && typeof value[0].text === "string") {
|
|
71
|
+
return value[0].text;
|
|
72
|
+
}
|
|
73
|
+
return value;
|
|
74
|
+
}
|
|
75
|
+
if (!isPlainObject(value)) {
|
|
76
|
+
return value;
|
|
77
|
+
}
|
|
78
|
+
if (typeof value.type === "number" && "value" in value) {
|
|
79
|
+
if (value.type === 1 && Array.isArray(value.value)) {
|
|
80
|
+
return value.value.map((item) => isPlainObject(item) && typeof item.text === "string" ? item.text : String(item ?? "")).join("");
|
|
81
|
+
}
|
|
82
|
+
if ((value.type === 2 || value.type === 3 || value.type === 1005) && Array.isArray(value.value)) {
|
|
83
|
+
return value.value.join(",");
|
|
84
|
+
}
|
|
85
|
+
}
|
|
86
|
+
return value;
|
|
87
|
+
}
|
|
88
|
+
function normalizeRecord(recordId, fields, normalizeFields = true) {
|
|
89
|
+
return {
|
|
90
|
+
...normalizeFields ? normalizeRecordFields(fields) : fields,
|
|
91
|
+
record_id: recordId ?? ""
|
|
92
|
+
};
|
|
93
|
+
}
|
|
94
|
+
function normalizeRecordFields(fields) {
|
|
95
|
+
return Object.fromEntries(Object.entries(fields).map(([key, value]) => [key, normalizeFieldValue(value)]));
|
|
96
|
+
}
|
|
97
|
+
function splitUpdateRecord(record) {
|
|
98
|
+
const { record_id: recordId, ...fields } = record;
|
|
99
|
+
return { recordId, fields };
|
|
100
|
+
}
|
|
101
|
+
async function toBuffer(file) {
|
|
102
|
+
if (typeof file === "string") {
|
|
103
|
+
const dataUrlMatch = file.match(DATA_URL_PATTERN);
|
|
104
|
+
const base64Payload = dataUrlMatch ? dataUrlMatch[2] : file;
|
|
105
|
+
return Buffer.from(base64Payload, "base64");
|
|
106
|
+
}
|
|
107
|
+
if (Buffer.isBuffer(file)) {
|
|
108
|
+
return file;
|
|
109
|
+
}
|
|
110
|
+
if (file instanceof Uint8Array) {
|
|
111
|
+
return Buffer.from(file);
|
|
112
|
+
}
|
|
113
|
+
if (file instanceof ArrayBuffer) {
|
|
114
|
+
return Buffer.from(file);
|
|
115
|
+
}
|
|
116
|
+
if (typeof Blob !== "undefined" && file instanceof Blob) {
|
|
117
|
+
return Buffer.from(await file.arrayBuffer());
|
|
118
|
+
}
|
|
119
|
+
if (isFileLike(file)) {
|
|
120
|
+
return Buffer.from(await file.arrayBuffer());
|
|
121
|
+
}
|
|
122
|
+
throw new FeishuBitableError("Unsupported file input. Use base64 string, Buffer, Uint8Array, ArrayBuffer, Blob or Bun.file().");
|
|
123
|
+
}
|
|
124
|
+
function inferFileName(file, fallback = "upload.bin") {
|
|
125
|
+
if (isFileLike(file) && typeof file.name === "string" && file.name.trim()) {
|
|
126
|
+
return file.name;
|
|
127
|
+
}
|
|
128
|
+
if (typeof file === "string") {
|
|
129
|
+
const dataUrlMatch = file.match(DATA_URL_PATTERN);
|
|
130
|
+
const mimeType = dataUrlMatch?.[1];
|
|
131
|
+
if (mimeType && MIME_EXTENSION_MAP[mimeType]) {
|
|
132
|
+
return `upload.${MIME_EXTENSION_MAP[mimeType]}`;
|
|
133
|
+
}
|
|
134
|
+
}
|
|
135
|
+
return fallback;
|
|
136
|
+
}
|
|
137
|
+
async function readableToBuffer(stream) {
|
|
138
|
+
const chunks = [];
|
|
139
|
+
for await (const chunk of stream) {
|
|
140
|
+
chunks.push(Buffer.isBuffer(chunk) ? chunk : Buffer.from(chunk));
|
|
141
|
+
}
|
|
142
|
+
return Buffer.concat(chunks);
|
|
143
|
+
}
|
|
144
|
+
function isPlainObject(value) {
|
|
145
|
+
return typeof value === "object" && value !== null && !Array.isArray(value);
|
|
146
|
+
}
|
|
147
|
+
function isFileLike(value) {
|
|
148
|
+
return isPlainObject(value) && typeof value.arrayBuffer === "function";
|
|
149
|
+
}
|
|
150
|
+
|
|
151
|
+
// src/client.ts
|
|
152
|
+
class Bitable {
|
|
153
|
+
client;
|
|
154
|
+
defaultAppToken;
|
|
155
|
+
maxRetries;
|
|
156
|
+
retryDelayMs;
|
|
157
|
+
defaultConcurrency;
|
|
158
|
+
constructor(optionsOrToken, appId, appSecret) {
|
|
159
|
+
const options = this.resolveConstructorOptions(optionsOrToken, appId, appSecret);
|
|
160
|
+
this.defaultAppToken = options.defaultAppToken;
|
|
161
|
+
this.maxRetries = Math.max(1, options.maxRetries ?? 5);
|
|
162
|
+
this.retryDelayMs = Math.max(100, options.retryDelayMs ?? 1000);
|
|
163
|
+
this.defaultConcurrency = Math.max(1, options.defaultConcurrency ?? 1);
|
|
164
|
+
this.client = options.sdkClient ?? new lark.Client({
|
|
165
|
+
appId: options.appId,
|
|
166
|
+
appSecret: options.appSecret,
|
|
167
|
+
appType: options.appType ?? lark.AppType.SelfBuild,
|
|
168
|
+
domain: options.domain ?? lark.Domain.Feishu
|
|
169
|
+
});
|
|
170
|
+
}
|
|
171
|
+
static fromEnv(env = process.env) {
|
|
172
|
+
return new Bitable({
|
|
173
|
+
appId: env.FEISHU_APP_ID,
|
|
174
|
+
appSecret: env.FEISHU_APP_SECRET,
|
|
175
|
+
defaultAppToken: env.FEISHU_APP_TOKEN
|
|
176
|
+
});
|
|
177
|
+
}
|
|
178
|
+
async fetchAllRecords(tableId, options = {}, appToken) {
|
|
179
|
+
const token = this.resolveAppToken(appToken);
|
|
180
|
+
const pageSize = Math.max(1, Math.min(options.pageSize ?? FEISHU_BATCH_LIMIT, FEISHU_BATCH_LIMIT));
|
|
181
|
+
const iterator = await this.client.bitable.v1.appTableRecord.searchWithIterator({
|
|
182
|
+
path: {
|
|
183
|
+
app_token: token,
|
|
184
|
+
table_id: tableId
|
|
185
|
+
},
|
|
186
|
+
params: {
|
|
187
|
+
page_size: pageSize,
|
|
188
|
+
user_id_type: "open_id"
|
|
189
|
+
},
|
|
190
|
+
data: {
|
|
191
|
+
view_id: options.viewId,
|
|
192
|
+
field_names: options.fieldNames,
|
|
193
|
+
filter: options.filter,
|
|
194
|
+
sort: options.sort,
|
|
195
|
+
automatic_fields: options.automaticFields
|
|
196
|
+
}
|
|
197
|
+
});
|
|
198
|
+
const allRecords = [];
|
|
199
|
+
for await (const page of iterator) {
|
|
200
|
+
const items = page?.items ?? [];
|
|
201
|
+
allRecords.push(...items.map((item) => normalizeRecord(item.record_id, item.fields, options.normalizeFields !== false)));
|
|
202
|
+
}
|
|
203
|
+
return allRecords;
|
|
204
|
+
}
|
|
205
|
+
async insertList(tableId, records, options = {}) {
|
|
206
|
+
if (records.length === 0) {
|
|
207
|
+
return [];
|
|
208
|
+
}
|
|
209
|
+
const token = this.resolveAppToken(options.appToken);
|
|
210
|
+
const chunks = chunkArray(records, options.chunkSize ?? FEISHU_BATCH_LIMIT);
|
|
211
|
+
return runWithConcurrency(chunks, options.concurrency ?? this.defaultConcurrency, async (chunk) => this.withRetry("insert records", async () => assertFeishuResponse(await this.client.bitable.v1.appTableRecord.batchCreate({
|
|
212
|
+
path: { app_token: token, table_id: tableId },
|
|
213
|
+
data: {
|
|
214
|
+
records: chunk.map((fields) => ({ fields }))
|
|
215
|
+
}
|
|
216
|
+
}), "insert records")));
|
|
217
|
+
}
|
|
218
|
+
async batchUpdateRecords(payload) {
|
|
219
|
+
return this.withRetry("batch update records", async () => assertFeishuResponse(await this.client.bitable.v1.appTableRecord.batchUpdate(payload), "batch update records"));
|
|
220
|
+
}
|
|
221
|
+
async updateRecords(tableId, records, options = {}) {
|
|
222
|
+
if (records.length === 0) {
|
|
223
|
+
return [];
|
|
224
|
+
}
|
|
225
|
+
const token = this.resolveAppToken(options.appToken);
|
|
226
|
+
const chunks = chunkArray(records, options.chunkSize ?? FEISHU_BATCH_LIMIT);
|
|
227
|
+
return runWithConcurrency(chunks, options.concurrency ?? this.defaultConcurrency, async (chunk) => {
|
|
228
|
+
const payload = {
|
|
229
|
+
path: {
|
|
230
|
+
app_token: token,
|
|
231
|
+
table_id: tableId
|
|
232
|
+
},
|
|
233
|
+
data: {
|
|
234
|
+
records: chunk.map((record) => {
|
|
235
|
+
const { recordId, fields } = splitUpdateRecord(record);
|
|
236
|
+
return {
|
|
237
|
+
record_id: recordId,
|
|
238
|
+
fields
|
|
239
|
+
};
|
|
240
|
+
})
|
|
241
|
+
}
|
|
242
|
+
};
|
|
243
|
+
if (options.userIdType || options.ignoreConsistencyCheck !== undefined) {
|
|
244
|
+
payload.params = {
|
|
245
|
+
user_id_type: options.userIdType,
|
|
246
|
+
ignore_consistency_check: options.ignoreConsistencyCheck
|
|
247
|
+
};
|
|
248
|
+
}
|
|
249
|
+
return this.batchUpdateRecords(payload);
|
|
250
|
+
});
|
|
251
|
+
}
|
|
252
|
+
async deleteList(tableId, recordIds, options = {}) {
|
|
253
|
+
if (recordIds.length === 0) {
|
|
254
|
+
return [];
|
|
255
|
+
}
|
|
256
|
+
const token = this.resolveAppToken(options.appToken);
|
|
257
|
+
const chunks = chunkArray(recordIds, options.chunkSize ?? FEISHU_BATCH_LIMIT);
|
|
258
|
+
return runWithConcurrency(chunks, options.concurrency ?? this.defaultConcurrency, async (chunk) => this.withRetry("delete records", async () => assertFeishuResponse(await this.client.bitable.v1.appTableRecord.batchDelete({
|
|
259
|
+
path: { app_token: token, table_id: tableId },
|
|
260
|
+
data: {
|
|
261
|
+
records: chunk
|
|
262
|
+
}
|
|
263
|
+
}), "delete records")));
|
|
264
|
+
}
|
|
265
|
+
async uploadFile(options) {
|
|
266
|
+
const buffer = await toBuffer(options.file);
|
|
267
|
+
const fileName = options.fileName ?? inferFileName(options.file);
|
|
268
|
+
if (buffer.byteLength <= FEISHU_SIMPLE_UPLOAD_LIMIT) {
|
|
269
|
+
return await this.withRetry("upload file", async () => this.client.drive.v1.media.uploadAll({
|
|
270
|
+
data: {
|
|
271
|
+
file_name: fileName,
|
|
272
|
+
parent_type: options.parentType,
|
|
273
|
+
parent_node: options.parentNode,
|
|
274
|
+
size: buffer.byteLength,
|
|
275
|
+
extra: options.extra,
|
|
276
|
+
file: buffer
|
|
277
|
+
}
|
|
278
|
+
})) ?? {};
|
|
279
|
+
}
|
|
280
|
+
const prepare = assertFeishuResponse(await this.withRetry("prepare multipart upload", async () => this.client.drive.v1.media.uploadPrepare({
|
|
281
|
+
data: {
|
|
282
|
+
file_name: fileName,
|
|
283
|
+
parent_type: options.parentType,
|
|
284
|
+
parent_node: options.parentNode,
|
|
285
|
+
size: buffer.byteLength
|
|
286
|
+
}
|
|
287
|
+
})), "prepare multipart upload");
|
|
288
|
+
const uploadId = prepare.data?.upload_id;
|
|
289
|
+
const blockSize = prepare.data?.block_size;
|
|
290
|
+
const blockNum = prepare.data?.block_num;
|
|
291
|
+
if (!uploadId || !blockSize || !blockNum) {
|
|
292
|
+
throw new FeishuBitableError("prepare multipart upload failed: missing upload metadata", {
|
|
293
|
+
details: prepare
|
|
294
|
+
});
|
|
295
|
+
}
|
|
296
|
+
for (let index = 0;index < blockNum; index++) {
|
|
297
|
+
const start = index * blockSize;
|
|
298
|
+
const end = Math.min(start + blockSize, buffer.byteLength);
|
|
299
|
+
const chunk = buffer.subarray(start, end);
|
|
300
|
+
await this.withRetry(`upload file chunk ${index + 1}/${blockNum}`, async () => this.client.drive.v1.media.uploadPart({
|
|
301
|
+
data: {
|
|
302
|
+
upload_id: uploadId,
|
|
303
|
+
seq: index,
|
|
304
|
+
size: chunk.byteLength,
|
|
305
|
+
file: chunk
|
|
306
|
+
}
|
|
307
|
+
}));
|
|
308
|
+
}
|
|
309
|
+
const finish = assertFeishuResponse(await this.withRetry("finish multipart upload", async () => this.client.drive.v1.media.uploadFinish({
|
|
310
|
+
data: {
|
|
311
|
+
upload_id: uploadId,
|
|
312
|
+
block_num: blockNum
|
|
313
|
+
}
|
|
314
|
+
})), "finish multipart upload");
|
|
315
|
+
return {
|
|
316
|
+
file_token: finish.data?.file_token
|
|
317
|
+
};
|
|
318
|
+
}
|
|
319
|
+
async downloadFile(fileToken, extra) {
|
|
320
|
+
const response = await this.withRetry("download file", async () => this.client.drive.v1.media.download({
|
|
321
|
+
path: {
|
|
322
|
+
file_token: fileToken
|
|
323
|
+
},
|
|
324
|
+
params: {
|
|
325
|
+
extra
|
|
326
|
+
}
|
|
327
|
+
}));
|
|
328
|
+
return readableToBuffer(response.getReadableStream());
|
|
329
|
+
}
|
|
330
|
+
async downLoadFile(fileToken, extra) {
|
|
331
|
+
return this.downloadFile(fileToken, extra);
|
|
332
|
+
}
|
|
333
|
+
resolveConstructorOptions(optionsOrToken, appIdArg, appSecretArg) {
|
|
334
|
+
const objectMode = typeof optionsOrToken === "object" && optionsOrToken !== null && !Array.isArray(optionsOrToken);
|
|
335
|
+
const options = objectMode ? optionsOrToken : {
|
|
336
|
+
defaultAppToken: typeof optionsOrToken === "string" ? optionsOrToken : undefined,
|
|
337
|
+
appId: appIdArg ?? undefined,
|
|
338
|
+
appSecret: appSecretArg ?? undefined
|
|
339
|
+
};
|
|
340
|
+
const appId = options.appId ?? process.env.FEISHU_APP_ID;
|
|
341
|
+
const appSecret = options.appSecret ?? process.env.FEISHU_APP_SECRET;
|
|
342
|
+
if (!appId || !appSecret) {
|
|
343
|
+
throw new FeishuBitableError("appId and appSecret are required. Pass them in the constructor or provide FEISHU_APP_ID and FEISHU_APP_SECRET.");
|
|
344
|
+
}
|
|
345
|
+
return {
|
|
346
|
+
...options,
|
|
347
|
+
appId,
|
|
348
|
+
appSecret,
|
|
349
|
+
defaultAppToken: options.defaultAppToken ?? process.env.FEISHU_APP_TOKEN
|
|
350
|
+
};
|
|
351
|
+
}
|
|
352
|
+
resolveAppToken(overrideToken) {
|
|
353
|
+
const token = overrideToken ?? this.defaultAppToken;
|
|
354
|
+
if (!token) {
|
|
355
|
+
throw new FeishuBitableError("appToken is required. Pass it to the constructor, method options, or FEISHU_APP_TOKEN.");
|
|
356
|
+
}
|
|
357
|
+
return token;
|
|
358
|
+
}
|
|
359
|
+
async withRetry(label, task) {
|
|
360
|
+
let lastError;
|
|
361
|
+
for (let attempt = 1;attempt <= this.maxRetries; attempt++) {
|
|
362
|
+
try {
|
|
363
|
+
return await task();
|
|
364
|
+
} catch (error) {
|
|
365
|
+
lastError = error;
|
|
366
|
+
if (attempt === this.maxRetries) {
|
|
367
|
+
break;
|
|
368
|
+
}
|
|
369
|
+
await sleep(this.retryDelayMs * attempt);
|
|
370
|
+
}
|
|
371
|
+
}
|
|
372
|
+
throw new FeishuBitableError(`${label} failed after ${this.maxRetries} attempts`, {
|
|
373
|
+
cause: lastError
|
|
374
|
+
});
|
|
375
|
+
}
|
|
376
|
+
}
|
|
377
|
+
|
|
378
|
+
// src/index.ts
|
|
379
|
+
import { AppType as AppType2, Domain as Domain2, LoggerLevel } from "@larksuiteoapi/node-sdk";
|
|
380
|
+
var src_default = Bitable;
|
|
381
|
+
export {
|
|
382
|
+
src_default as default,
|
|
383
|
+
LoggerLevel,
|
|
384
|
+
FeishuBitableError,
|
|
385
|
+
Domain2 as Domain,
|
|
386
|
+
Bitable,
|
|
387
|
+
AppType2 as AppType
|
|
388
|
+
};
|
package/lib/types.d.ts
ADDED
|
@@ -0,0 +1,106 @@
|
|
|
1
|
+
import type * as lark from "@larksuiteoapi/node-sdk";
|
|
2
|
+
export type BitableTextValue = {
|
|
3
|
+
text?: string;
|
|
4
|
+
link?: string;
|
|
5
|
+
};
|
|
6
|
+
export type BitableLocationValue = {
|
|
7
|
+
location?: string;
|
|
8
|
+
pname?: string;
|
|
9
|
+
cityname?: string;
|
|
10
|
+
adname?: string;
|
|
11
|
+
address?: string;
|
|
12
|
+
name?: string;
|
|
13
|
+
full_address?: string;
|
|
14
|
+
};
|
|
15
|
+
export type BitableMemberValue = {
|
|
16
|
+
id?: string;
|
|
17
|
+
name?: string;
|
|
18
|
+
en_name?: string;
|
|
19
|
+
email?: string;
|
|
20
|
+
avatar_url?: string;
|
|
21
|
+
};
|
|
22
|
+
export type BitableAttachmentValue = {
|
|
23
|
+
file_token?: string;
|
|
24
|
+
name?: string;
|
|
25
|
+
type?: string;
|
|
26
|
+
size?: number;
|
|
27
|
+
url?: string;
|
|
28
|
+
tmp_url?: string;
|
|
29
|
+
};
|
|
30
|
+
export type BitableFieldValue = string | number | boolean | BitableTextValue | BitableLocationValue | Array<{
|
|
31
|
+
id?: string;
|
|
32
|
+
name?: string;
|
|
33
|
+
avatar_url?: string;
|
|
34
|
+
}> | Array<string> | Array<BitableMemberValue> | Array<BitableAttachmentValue> | {
|
|
35
|
+
type?: number;
|
|
36
|
+
value?: unknown;
|
|
37
|
+
};
|
|
38
|
+
export type BitableRecordFields = Record<string, BitableFieldValue>;
|
|
39
|
+
export type BitableRecord = Record<string, unknown> & {
|
|
40
|
+
record_id: string;
|
|
41
|
+
};
|
|
42
|
+
export type BitableInsertRecord = BitableRecordFields;
|
|
43
|
+
export type BitableUpdateRecord = BitableRecordFields & {
|
|
44
|
+
record_id: string;
|
|
45
|
+
};
|
|
46
|
+
export type BitableSort = {
|
|
47
|
+
field_name?: string;
|
|
48
|
+
desc?: boolean;
|
|
49
|
+
};
|
|
50
|
+
export type BitableFilterCondition = {
|
|
51
|
+
field_name: string;
|
|
52
|
+
operator: "is" | "isNot" | "contains" | "doesNotContain" | "isEmpty" | "isNotEmpty" | "isGreater" | "isGreaterEqual" | "isLess" | "isLessEqual" | "like" | "in";
|
|
53
|
+
value?: string[];
|
|
54
|
+
};
|
|
55
|
+
export type BitableFilterGroup = {
|
|
56
|
+
conjunction: "and" | "or";
|
|
57
|
+
conditions?: BitableFilterCondition[];
|
|
58
|
+
children?: Array<{
|
|
59
|
+
conjunction: "and" | "or";
|
|
60
|
+
conditions?: BitableFilterCondition[];
|
|
61
|
+
}>;
|
|
62
|
+
};
|
|
63
|
+
export type MediaParentType = "doc_image" | "docx_image" | "sheet_image" | "doc_file" | "docx_file" | "sheet_file" | "vc_virtual_background" | "bitable_image" | "bitable_file" | "moments" | "ccm_import_open" | "calendar" | "base_global" | "lark_ai_media_analysis";
|
|
64
|
+
export type UploadableFile = string | Buffer | Uint8Array | ArrayBuffer | Blob | BunFileLike;
|
|
65
|
+
export type BunFileLike = {
|
|
66
|
+
arrayBuffer(): Promise<ArrayBuffer>;
|
|
67
|
+
name?: string;
|
|
68
|
+
};
|
|
69
|
+
export interface BitableConstructorOptions {
|
|
70
|
+
appId?: string;
|
|
71
|
+
appSecret?: string;
|
|
72
|
+
defaultAppToken?: string;
|
|
73
|
+
appType?: lark.AppType;
|
|
74
|
+
domain?: lark.Domain | string;
|
|
75
|
+
maxRetries?: number;
|
|
76
|
+
retryDelayMs?: number;
|
|
77
|
+
defaultConcurrency?: number;
|
|
78
|
+
sdkClient?: lark.Client;
|
|
79
|
+
}
|
|
80
|
+
export interface FetchAllRecordsOptions {
|
|
81
|
+
viewId?: string;
|
|
82
|
+
fieldNames?: string[];
|
|
83
|
+
filter?: BitableFilterGroup;
|
|
84
|
+
sort?: BitableSort[];
|
|
85
|
+
pageSize?: number;
|
|
86
|
+
normalizeFields?: boolean;
|
|
87
|
+
automaticFields?: boolean;
|
|
88
|
+
}
|
|
89
|
+
export interface BatchOperationOptions {
|
|
90
|
+
appToken?: string;
|
|
91
|
+
concurrency?: number;
|
|
92
|
+
chunkSize?: number;
|
|
93
|
+
}
|
|
94
|
+
export interface UpdateRecordsOptions extends BatchOperationOptions {
|
|
95
|
+
userIdType?: "user_id" | "union_id" | "open_id";
|
|
96
|
+
ignoreConsistencyCheck?: boolean;
|
|
97
|
+
}
|
|
98
|
+
export interface UploadFileOptions {
|
|
99
|
+
file: UploadableFile;
|
|
100
|
+
fileName?: string;
|
|
101
|
+
parentType: MediaParentType;
|
|
102
|
+
parentNode: string;
|
|
103
|
+
extra?: string;
|
|
104
|
+
}
|
|
105
|
+
export type BitableBatchUpdatePayload = NonNullable<Parameters<lark.Client["bitable"]["v1"]["appTableRecord"]["batchUpdate"]>[0]>;
|
|
106
|
+
export type BitableBatchUpdateResponse = Awaited<ReturnType<lark.Client["bitable"]["v1"]["appTableRecord"]["batchUpdate"]>>;
|
package/lib/utils.d.ts
ADDED
|
@@ -0,0 +1,24 @@
|
|
|
1
|
+
import { Readable } from "node:stream";
|
|
2
|
+
import type { BitableRecord, BitableRecordFields, UploadableFile } from "./types";
|
|
3
|
+
export declare const FEISHU_BATCH_LIMIT = 500;
|
|
4
|
+
export declare const FEISHU_SIMPLE_UPLOAD_LIMIT: number;
|
|
5
|
+
export declare function chunkArray<T>(items: T[], chunkSize: number): T[][];
|
|
6
|
+
export declare function sleep(ms: number): Promise<void>;
|
|
7
|
+
export declare function runWithConcurrency<T, R>(items: T[], concurrency: number, worker: (item: T, index: number) => Promise<R>): Promise<R[]>;
|
|
8
|
+
export declare function assertFeishuResponse<T extends {
|
|
9
|
+
code?: number;
|
|
10
|
+
msg?: string;
|
|
11
|
+
data?: unknown;
|
|
12
|
+
}>(response: T | null | undefined, action: string): T;
|
|
13
|
+
export declare function normalizeFieldValue(value: unknown): unknown;
|
|
14
|
+
export declare function normalizeRecord(recordId: string | undefined, fields: BitableRecordFields, normalizeFields?: boolean): BitableRecord;
|
|
15
|
+
export declare function normalizeRecordFields(fields: BitableRecordFields): Record<string, unknown>;
|
|
16
|
+
export declare function splitUpdateRecord<T extends Record<string, unknown>>(record: T & {
|
|
17
|
+
record_id: string;
|
|
18
|
+
}): {
|
|
19
|
+
recordId: string;
|
|
20
|
+
fields: Record<string, unknown>;
|
|
21
|
+
};
|
|
22
|
+
export declare function toBuffer(file: UploadableFile): Promise<Buffer>;
|
|
23
|
+
export declare function inferFileName(file: UploadableFile, fallback?: string): string;
|
|
24
|
+
export declare function readableToBuffer(stream: Readable): Promise<Buffer>;
|
package/package.json
ADDED
|
@@ -0,0 +1,53 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "@yuuko1410/feishu-bitable",
|
|
3
|
+
"version": "0.0.2",
|
|
4
|
+
"description": "基于 Bun + TypeScript + 飞书官方 SDK 的多维表格操作库",
|
|
5
|
+
"type": "module",
|
|
6
|
+
"main": "./lib/index.cjs",
|
|
7
|
+
"module": "./lib/index.js",
|
|
8
|
+
"types": "./lib/index.d.ts",
|
|
9
|
+
"exports": {
|
|
10
|
+
".": {
|
|
11
|
+
"types": "./lib/index.d.ts",
|
|
12
|
+
"import": "./lib/index.js",
|
|
13
|
+
"require": "./lib/index.cjs"
|
|
14
|
+
}
|
|
15
|
+
},
|
|
16
|
+
"scripts": {
|
|
17
|
+
"build": "bun build src/index.ts --outdir lib --target node --format esm --external @larksuiteoapi/node-sdk && bun build src/index.ts --outfile lib/index.cjs --target node --format cjs --external @larksuiteoapi/node-sdk && tsc -p tsconfig.build.json",
|
|
18
|
+
"check": "tsc -p tsconfig.json --noEmit",
|
|
19
|
+
"test": "bun test",
|
|
20
|
+
"prepublishOnly": "bun run build"
|
|
21
|
+
},
|
|
22
|
+
"keywords": [
|
|
23
|
+
"feishu",
|
|
24
|
+
"lark",
|
|
25
|
+
"bitable",
|
|
26
|
+
"multidimensional-table",
|
|
27
|
+
"飞书",
|
|
28
|
+
"多维表格",
|
|
29
|
+
"sdk",
|
|
30
|
+
"api",
|
|
31
|
+
"batch",
|
|
32
|
+
"upload",
|
|
33
|
+
"file"
|
|
34
|
+
],
|
|
35
|
+
"author": "yuuko1410",
|
|
36
|
+
"license": "MIT",
|
|
37
|
+
"dependencies": {
|
|
38
|
+
"@larksuiteoapi/node-sdk": "^1.35.0"
|
|
39
|
+
},
|
|
40
|
+
"devDependencies": {
|
|
41
|
+
"@types/node": "^25.5.0",
|
|
42
|
+
"bun-types": "^1.3.11",
|
|
43
|
+
"typescript": "^5.9.3"
|
|
44
|
+
},
|
|
45
|
+
"engines": {
|
|
46
|
+
"bun": ">=1.1.0",
|
|
47
|
+
"node": ">=18.0.0"
|
|
48
|
+
},
|
|
49
|
+
"files": [
|
|
50
|
+
"lib",
|
|
51
|
+
"README.md"
|
|
52
|
+
]
|
|
53
|
+
}
|