@kweaver-ai/kweaver-sdk 0.7.1 → 0.7.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (43) hide show
  1. package/README.md +34 -4
  2. package/README.zh.md +27 -2
  3. package/dist/api/datasources.d.ts +7 -0
  4. package/dist/api/datasources.js +8 -0
  5. package/dist/api/skills.js +10 -8
  6. package/dist/api/toolboxes.d.ts +2 -0
  7. package/dist/api/toolboxes.js +2 -1
  8. package/dist/cli.js +65 -17
  9. package/dist/commands/auth.js +85 -10
  10. package/dist/commands/bkn-ops.d.ts +6 -1
  11. package/dist/commands/bkn-ops.js +202 -93
  12. package/dist/commands/bkn-utils.d.ts +26 -2
  13. package/dist/commands/bkn-utils.js +66 -9
  14. package/dist/commands/config.js +8 -0
  15. package/dist/commands/context-loader.js +112 -36
  16. package/dist/commands/dataflow.js +194 -20
  17. package/dist/commands/ds.d.ts +23 -1
  18. package/dist/commands/ds.js +135 -27
  19. package/dist/commands/import-csv.d.ts +0 -2
  20. package/dist/commands/import-csv.js +2 -4
  21. package/dist/commands/skill.js +26 -6
  22. package/dist/commands/tool.d.ts +1 -0
  23. package/dist/commands/tool.js +12 -0
  24. package/dist/config/stateless.d.ts +13 -0
  25. package/dist/config/stateless.js +20 -0
  26. package/dist/config/store.d.ts +1 -0
  27. package/dist/config/store.js +17 -0
  28. package/dist/resources/toolboxes.d.ts +2 -0
  29. package/dist/templates/bkn/document/manifest.json +12 -0
  30. package/dist/templates/bkn/document/template.json +757 -0
  31. package/dist/templates/dataflow/unstructured/manifest.json +11 -0
  32. package/dist/templates/dataflow/unstructured/template.json +63 -0
  33. package/dist/templates/dataset/document/manifest.json +10 -0
  34. package/dist/templates/dataset/document/template.json +23 -0
  35. package/dist/templates/dataset/document-content/manifest.json +10 -0
  36. package/dist/templates/dataset/document-content/template.json +29 -0
  37. package/dist/templates/dataset/document-element/manifest.json +10 -0
  38. package/dist/templates/dataset/document-element/template.json +21 -0
  39. package/dist/utils/skill-bundle.d.ts +5 -0
  40. package/dist/utils/skill-bundle.js +74 -0
  41. package/dist/utils/template-loader.d.ts +40 -0
  42. package/dist/utils/template-loader.js +129 -0
  43. package/package.json +2 -1
@@ -3,7 +3,7 @@ import { statSync } from "node:fs";
3
3
  import { glob } from "node:fs/promises";
4
4
  import { resolve as resolvePath } from "node:path";
5
5
  import { ensureValidToken, formatHttpError, with401RefreshRetry } from "../auth/oauth.js";
6
- import { testDatasource, createDatasource, listDatasources, getDatasource, deleteDatasource, listTablesWithColumns, } from "../api/datasources.js";
6
+ import { testDatasource, createDatasource, listDatasources, getDatasource, deleteDatasource, listTablesWithColumns, scanMetadata, } from "../api/datasources.js";
7
7
  import { formatCallOutput } from "./call.js";
8
8
  import { resolveBusinessDomain } from "../config/store.js";
9
9
  import { parseCsvFile, buildTableName, splitBatches, buildFieldMappings, buildDagBody, } from "./import-csv.js";
@@ -37,7 +37,10 @@ Subcommands:
37
37
  delete <id> [-y] Delete a datasource
38
38
  tables <id> [--keyword X] List tables with columns
39
39
  connect <db_type> <host> <port> <database> --account X --password Y [--schema Z] [--name N]
40
+ [--reuse-existing|--force-new]
40
41
  Test connectivity, register datasource, and discover tables.
42
+ By default reuses an existing ds with the same (type, host, port, database, account)
43
+ instead of creating a duplicate. --force-new always creates a new entry.
41
44
  import-csv <ds-id> --files <glob_or_list> [--table-prefix X] [--batch-size N]
42
45
  Import CSV files into datasource tables via dataflow API.`);
43
46
  return 0;
@@ -206,6 +209,52 @@ async function runDsTablesCommand(args) {
206
209
  console.log(formatCallOutput(body, pretty));
207
210
  return 0;
208
211
  }
212
+ export function findExistingDatasource(listBody, sig) {
213
+ const parsed = JSON.parse(listBody);
214
+ const entries = Array.isArray(parsed) ? parsed : (parsed.entries ?? []);
215
+ const tupleMatch = entries.find((e) => e.id &&
216
+ e.type === sig.type &&
217
+ e.bin_data?.host === sig.host &&
218
+ Number(e.bin_data?.port) === Number(sig.port) &&
219
+ e.bin_data?.database_name === sig.database &&
220
+ e.bin_data?.account === sig.account);
221
+ if (tupleMatch) {
222
+ return {
223
+ id: String(tupleMatch.id),
224
+ name: String(tupleMatch.name ?? ""),
225
+ matchedByName: tupleMatch.name === sig.name,
226
+ matchedByTuple: true,
227
+ };
228
+ }
229
+ if (sig.name) {
230
+ const nameMatch = entries.find((e) => e.id && e.name === sig.name);
231
+ if (nameMatch) {
232
+ return {
233
+ id: String(nameMatch.id),
234
+ name: String(nameMatch.name),
235
+ matchedByName: true,
236
+ matchedByTuple: false,
237
+ };
238
+ }
239
+ }
240
+ return undefined;
241
+ }
242
+ export function findDatasourceIdByName(listBody, name) {
243
+ const parsed = JSON.parse(listBody);
244
+ const entries = Array.isArray(parsed) ? parsed : (parsed.entries ?? []);
245
+ const hit = entries.find((e) => e.id && e.name === name);
246
+ return hit?.id ? String(hit.id) : undefined;
247
+ }
248
+ function isDuplicateNameError(err) {
249
+ if (!err || typeof err !== "object")
250
+ return false;
251
+ // HttpError.message is just "HTTP 400 ..."; the description lives in body.
252
+ const status = "status" in err ? Number(err.status) : NaN;
253
+ const body = "body" in err ? String(err.body) : "";
254
+ if (status !== 400)
255
+ return false;
256
+ return /数据源名称已存在|datasource name.*exist|already exists/i.test(body);
257
+ }
209
258
  async function runDsConnectCommand(args) {
210
259
  let dbType = "";
211
260
  let host = "";
@@ -215,6 +264,7 @@ async function runDsConnectCommand(args) {
215
264
  let password = "";
216
265
  let schema;
217
266
  let name;
267
+ let forceNew = false;
218
268
  for (let i = 0; i < args.length; i += 1) {
219
269
  const arg = args[i];
220
270
  if (arg === "--account" && args[i + 1]) {
@@ -233,6 +283,14 @@ async function runDsConnectCommand(args) {
233
283
  name = args[++i];
234
284
  continue;
235
285
  }
286
+ if (arg === "--force-new") {
287
+ forceNew = true;
288
+ continue;
289
+ }
290
+ if (arg === "--reuse-existing") {
291
+ forceNew = false;
292
+ continue;
293
+ }
236
294
  if (!arg.startsWith("-")) {
237
295
  if (!dbType)
238
296
  dbType = arg;
@@ -245,7 +303,7 @@ async function runDsConnectCommand(args) {
245
303
  }
246
304
  }
247
305
  if (!dbType || !host || !database || !account || !password) {
248
- console.error("Usage: kweaver ds connect <db_type> <host> <port> <database> --account X --password Y [--schema Z] [--name N]");
306
+ console.error("Usage: kweaver ds connect <db_type> <host> <port> <database> --account X --password Y [--schema Z] [--name N] [--reuse-existing|--force-new]");
249
307
  return 1;
250
308
  }
251
309
  if (Number.isNaN(port) || port < 1) {
@@ -254,6 +312,28 @@ async function runDsConnectCommand(args) {
254
312
  }
255
313
  const token = await ensureValidToken();
256
314
  const base = { baseUrl: token.baseUrl, accessToken: token.accessToken };
315
+ const dsName = name ?? database;
316
+ // Pre-flight dedup: connection-tuple match is the silent-orphan vector.
317
+ // Backend already rejects duplicate names with 400, but won't notice
318
+ // tuple collisions, so we own that check.
319
+ if (!forceNew) {
320
+ const listBody = await listDatasources({ ...base });
321
+ const hit = findExistingDatasource(listBody, {
322
+ type: dbType,
323
+ host,
324
+ port,
325
+ database,
326
+ account,
327
+ name: dsName,
328
+ });
329
+ if (hit) {
330
+ const why = hit.matchedByTuple
331
+ ? "matched by (type,host,port,database,account)"
332
+ : "matched by --name";
333
+ console.error(`Reusing existing datasource ${hit.id} (${hit.name}); ${why}. Use --force-new to override.`);
334
+ return printDsConnectOutput(base, hit.id);
335
+ }
336
+ }
257
337
  console.error("Testing connectivity ...");
258
338
  await testDatasource({
259
339
  ...base,
@@ -265,27 +345,45 @@ async function runDsConnectCommand(args) {
265
345
  password,
266
346
  schema,
267
347
  });
268
- const dsName = name ?? database;
269
- const createBody = await createDatasource({
270
- ...base,
271
- name: dsName,
272
- type: dbType,
273
- host,
274
- port,
275
- database,
276
- account,
277
- password,
278
- schema,
279
- });
280
- const dsId = extractDatasourceId(createBody);
348
+ let dsId = "";
349
+ try {
350
+ const createBody = await createDatasource({
351
+ ...base,
352
+ name: dsName,
353
+ type: dbType,
354
+ host,
355
+ port,
356
+ database,
357
+ account,
358
+ password,
359
+ schema,
360
+ });
361
+ dsId = extractDatasourceId(createBody);
362
+ }
363
+ catch (err) {
364
+ // Backend checks name uniqueness but not tuple. If we raced another caller
365
+ // (or tuple match got disabled by --force-new and the name still collides),
366
+ // turn the raw 400 into a useful pointer to the existing id.
367
+ if (isDuplicateNameError(err)) {
368
+ // Backend rejected the name; look it up specifically (not by tuple —
369
+ // sibling ds sharing the same connection would mislead the pointer).
370
+ const listBody = await listDatasources({ ...base });
371
+ const existingId = findDatasourceIdByName(listBody, dsName);
372
+ if (existingId) {
373
+ console.error(`Datasource name '${dsName}' already exists as ${existingId}. Re-run without --force-new to reuse it, or pick a different --name.`);
374
+ return 1;
375
+ }
376
+ }
377
+ throw err;
378
+ }
281
379
  if (!dsId) {
282
380
  console.error("Failed to get datasource ID from create response");
283
381
  return 1;
284
382
  }
285
- const tablesBody = await listTablesWithColumns({
286
- ...base,
287
- id: dsId,
288
- });
383
+ return printDsConnectOutput(base, dsId);
384
+ }
385
+ async function printDsConnectOutput(base, dsId) {
386
+ const tablesBody = await listTablesWithColumns({ ...base, id: dsId });
289
387
  const tables = JSON.parse(tablesBody);
290
388
  const output = {
291
389
  datasource_id: dsId,
@@ -306,7 +404,6 @@ Options:
306
404
  --files <s> CSV file paths (comma-separated or glob pattern, required)
307
405
  --table-prefix <s> Table name prefix (default: none)
308
406
  --batch-size <n> Rows per batch (default: 500, range: 1-10000)
309
- --recreate First batch uses overwrite (drop/recreate table) then append; use when schema changed
310
407
  -bd, --biz-domain Business domain (default: bd_public)`;
311
408
  export function parseImportCsvArgs(args) {
312
409
  let datasourceId = "";
@@ -314,7 +411,6 @@ export function parseImportCsvArgs(args) {
314
411
  let tablePrefix = "";
315
412
  let batchSize = 500;
316
413
  let businessDomain = "";
317
- let recreate = false;
318
414
  for (let i = 0; i < args.length; i += 1) {
319
415
  const arg = args[i];
320
416
  if (arg === "--help" || arg === "-h")
@@ -323,10 +419,6 @@ export function parseImportCsvArgs(args) {
323
419
  files = args[++i];
324
420
  continue;
325
421
  }
326
- if (arg === "--recreate") {
327
- recreate = true;
328
- continue;
329
- }
330
422
  if (arg === "--table-prefix" && args[i + 1]) {
331
423
  tablePrefix = args[++i];
332
424
  continue;
@@ -349,7 +441,7 @@ export function parseImportCsvArgs(args) {
349
441
  }
350
442
  if (!businessDomain)
351
443
  businessDomain = resolveBusinessDomain();
352
- return { datasourceId, files, tablePrefix, batchSize, businessDomain, recreate };
444
+ return { datasourceId, files, tablePrefix, batchSize, businessDomain };
353
445
  }
354
446
  export async function resolveFiles(pattern) {
355
447
  const parts = pattern.split(",").map((p) => p.trim()).filter(Boolean);
@@ -452,7 +544,6 @@ export async function runDsImportCsv(args) {
452
544
  tableExist,
453
545
  data: batch,
454
546
  fieldMappings,
455
- recreate: options.recreate,
456
547
  });
457
548
  const t0 = Date.now();
458
549
  process.stderr.write(`[${tableName}] batch ${batchLabel} (${rowCount} rows)... `);
@@ -487,6 +578,23 @@ export async function runDsImportCsv(args) {
487
578
  if (failed.length > 0) {
488
579
  console.error(`Failed tables: ${failed.join(", ")}`);
489
580
  }
581
+ // Refresh the platform metadata catalog so the freshly imported tables
582
+ // are visible to ds tables / bkn create-from-ds without manual scan.
583
+ // Best-effort: scan failures shouldn't mask a successful import.
584
+ if (succeeded.length > 0) {
585
+ process.stderr.write("Scanning datasource metadata ...\n");
586
+ try {
587
+ await scanMetadata({
588
+ ...base,
589
+ id: options.datasourceId,
590
+ dsType: datasourceType,
591
+ businessDomain: options.businessDomain,
592
+ });
593
+ }
594
+ catch (err) {
595
+ console.error(`Scan warning (continuing): ${formatHttpError(err)}`);
596
+ }
597
+ }
490
598
  return { code: failed.length > 0 ? 1 : 0, tables: succeeded, failed, tableColumns, sampleRows };
491
599
  }
492
600
  export async function runDsImportCsvCommand(args) {
@@ -19,8 +19,6 @@ export interface DagBodyOptions {
19
19
  tableExist: boolean;
20
20
  data: Array<Record<string, string | null>>;
21
21
  fieldMappings: FieldMapping[];
22
- /** When true on the first batch (`tableExist` false), use "insert" to force table recreation. */
23
- recreate?: boolean;
24
22
  }
25
23
  /**
26
24
  * Read a CSV file and return its headers and rows.
@@ -80,11 +80,9 @@ export function buildFieldMappings(headers) {
80
80
  * The DAG has two steps: a manual trigger and the database write.
81
81
  */
82
82
  export function buildDagBody(options) {
83
- const { datasourceId, datasourceType, tableName, tableExist, data, fieldMappings, recreate } = options;
83
+ const { datasourceId, datasourceType, tableName, tableExist, data, fieldMappings } = options;
84
84
  const ts = Date.now();
85
- // "insert" creates/replaces the table; "append" adds rows to an existing table.
86
- // With --recreate, use "insert" on first batch to force table recreation when schema changed.
87
- const operateType = tableExist ? "append" : recreate ? "insert" : "append";
85
+ const operateType = "append";
88
86
  const triggerStep = {
89
87
  id: "step-trigger",
90
88
  title: "Trigger",
@@ -1,8 +1,9 @@
1
- import { mkdirSync, readFileSync, writeFileSync } from "node:fs";
1
+ import { mkdirSync, readFileSync, statSync, writeFileSync } from "node:fs";
2
2
  import { basename, dirname, resolve } from "node:path";
3
3
  import { ensureValidToken, formatHttpError, with401RefreshRetry } from "../auth/oauth.js";
4
4
  import { resolveBusinessDomain } from "../config/store.js";
5
- import { deleteSkill, downloadSkill, fetchSkillContent, fetchSkillFile, getSkill, getSkillContentIndex, installSkillArchive, listSkillMarket, listSkills, readSkillFile, registerSkillContent, registerSkillZip, updateSkillStatus, } from "../api/skills.js";
5
+ import { deleteSkill, downloadSkill, fetchSkillContent, fetchSkillFile, getSkill, getSkillContentIndex, installSkillArchive, listSkillMarket, listSkills, readSkillFile, registerSkillZip, updateSkillStatus, } from "../api/skills.js";
6
+ import { bundleSkillDirectoryToZip, bundleSkillFileToZip } from "../utils/skill-bundle.js";
6
7
  function printSkillHelp(subcommand) {
7
8
  if (subcommand === "list") {
8
9
  console.log(`kweaver skill list [--name kw] [--source src] [--status status] [--create-user user]
@@ -20,7 +21,15 @@ function printSkillHelp(subcommand) {
20
21
  }
21
22
  if (subcommand === "register") {
22
23
  console.log(`kweaver skill register (--content-file <path> | --zip-file <path>)
23
- [--source src] [--extend-info json] [-bd value] [--pretty|--compact]`);
24
+ [--source src] [--extend-info json] [-bd value] [--pretty|--compact]
25
+
26
+ --content-file accepts either:
27
+ - a single file named SKILL.md (auto-bundled into a 1-file zip)
28
+ - a skill directory containing SKILL.md (bundled into a zip)
29
+ Both paths upload as multipart zip; the backend's file_type=content
30
+ registration is unreliable (publish-then-read returns 404) so the CLI
31
+ always goes through zip.
32
+ --zip-file accepts a pre-built .zip with SKILL.md at the archive root.`);
24
33
  return;
25
34
  }
26
35
  if (subcommand === "set-status" || subcommand === "status") {
@@ -54,6 +63,7 @@ Subcommands:
54
63
  market [--name kw] [--source src] [--page N] [--page-size N] [-bd value]
55
64
  get <skill-id> [-bd value]
56
65
  register --content-file <path> | --zip-file <path> [--source src] [--extend-info json]
66
+ (--content-file accepts a file named SKILL.md or a directory; both auto-zip)
57
67
  set-status <skill-id> <unpublish|published|offline> [-bd value]
58
68
  delete <skill-id> [-y] [-bd value]
59
69
  content <skill-id> [--raw] [--output file] [-bd value]
@@ -394,13 +404,23 @@ export async function runSkillCommand(args) {
394
404
  if (subcommand === "register") {
395
405
  const opts = parseSkillRegisterArgs(rest);
396
406
  if (opts.contentFile) {
397
- const content = readFileSync(resolve(opts.contentFile), "utf8");
398
- const result = await registerSkillContent({
407
+ // Always bundle into zip — the backend's file_type=content path
408
+ // doesn't write skill_file_index, so SKILL.md is unreachable
409
+ // after publish via /skills/:id/content. Going through zip
410
+ // (single SKILL.md or full directory) is the only path that
411
+ // produces a readable skill end-to-end.
412
+ const abs = resolve(opts.contentFile);
413
+ const stat = statSync(abs);
414
+ const bytes = stat.isDirectory()
415
+ ? await bundleSkillDirectoryToZip(abs)
416
+ : await bundleSkillFileToZip(abs);
417
+ const result = await registerSkillZip({
399
418
  ...token,
400
419
  businessDomain: opts.businessDomain,
401
- content,
402
420
  source: opts.source,
403
421
  extendInfo: opts.extendInfo,
422
+ filename: `${basename(abs).replace(/\.zip$/i, "")}.zip`,
423
+ bytes,
404
424
  });
405
425
  console.log(format(result, opts.pretty));
406
426
  return 0;
@@ -19,6 +19,7 @@ export interface ToolInvokeOptions {
19
19
  toolId: string;
20
20
  header?: Record<string, unknown>;
21
21
  query?: Record<string, unknown>;
22
+ path?: Record<string, unknown>;
22
23
  body?: unknown;
23
24
  bodyFile?: string;
24
25
  timeout?: number;
@@ -12,8 +12,10 @@ Subcommands:
12
12
  enable --toolbox <box-id> <tool-id>... Enable one or more tools
13
13
  disable --toolbox <box-id> <tool-id>... Disable one or more tools
14
14
  execute --toolbox <box-id> <tool-id> [--body '<json>'|--body-file <path>]
15
+ [--header|--query|--path '<json>']
15
16
  Invoke a published+enabled tool
16
17
  debug --toolbox <box-id> <tool-id> [--body '<json>'|--body-file <path>]
18
+ [--header|--query|--path '<json>']
17
19
  Invoke a tool (works on draft/disabled too)
18
20
 
19
21
  Options for execute/debug:
@@ -21,6 +23,9 @@ Options for execute/debug:
21
23
  (Authorization is auto-injected from current session
22
24
  when --header omits it; pass {} to send none)
23
25
  --query '<json>' Query params map forwarded to the downstream tool
26
+ --path '<json>' Path parameter map for OpenAPI path placeholders (e.g. {id})
27
+ (JSON object: quote id and UUID, e.g. key id for get_dataview_detail /
28
+ query_dataview_sql)
24
29
  --timeout <seconds> Per-call timeout (backend default applies when omitted)
25
30
 
26
31
  Common options:
@@ -241,6 +246,7 @@ export function parseToolInvokeArgs(args) {
241
246
  let pretty = true;
242
247
  let header;
243
248
  let query;
249
+ let path;
244
250
  let body;
245
251
  let bodyProvided = false;
246
252
  let bodyFile;
@@ -259,6 +265,10 @@ export function parseToolInvokeArgs(args) {
259
265
  query = parseJsonOption("--query", args[++i]);
260
266
  continue;
261
267
  }
268
+ if (a === "--path" && args[i + 1]) {
269
+ path = parseJsonOption("--path", args[++i]);
270
+ continue;
271
+ }
262
272
  if (a === "--body" && args[i + 1]) {
263
273
  const raw = args[++i];
264
274
  try {
@@ -312,6 +322,7 @@ export function parseToolInvokeArgs(args) {
312
322
  toolId,
313
323
  header,
314
324
  query,
325
+ path,
315
326
  body: bodyProvided ? body : undefined,
316
327
  bodyFile,
317
328
  timeout,
@@ -370,6 +381,7 @@ async function runToolInvoke(args, action) {
370
381
  toolId: opts.toolId,
371
382
  header,
372
383
  query: opts.query,
384
+ path: opts.path,
373
385
  body,
374
386
  timeout: opts.timeout,
375
387
  });
@@ -0,0 +1,13 @@
1
+ /**
2
+ * Stateless token mode: user passed --token on the CLI for this invocation.
3
+ *
4
+ * In stateless mode the CLI must not mutate ~/.kweaver/ — we error out from
5
+ * any command that would write tokens, sessions, or per-platform config.
6
+ *
7
+ * KWEAVER_TOKEN env (without --token flag) is NOT considered stateless: the
8
+ * env-var path predates this feature and keeps its existing semantics for
9
+ * backward compatibility. The cli.ts argv parser sets KWEAVER_TOKEN_SOURCE=flag
10
+ * only when --token was passed explicitly.
11
+ */
12
+ export declare function isStatelessTokenMode(): boolean;
13
+ export declare function assertNotStatelessForWrite(commandName: string): void;
@@ -0,0 +1,20 @@
1
+ /**
2
+ * Stateless token mode: user passed --token on the CLI for this invocation.
3
+ *
4
+ * In stateless mode the CLI must not mutate ~/.kweaver/ — we error out from
5
+ * any command that would write tokens, sessions, or per-platform config.
6
+ *
7
+ * KWEAVER_TOKEN env (without --token flag) is NOT considered stateless: the
8
+ * env-var path predates this feature and keeps its existing semantics for
9
+ * backward compatibility. The cli.ts argv parser sets KWEAVER_TOKEN_SOURCE=flag
10
+ * only when --token was passed explicitly.
11
+ */
12
+ export function isStatelessTokenMode() {
13
+ return process.env.KWEAVER_TOKEN_SOURCE === "flag";
14
+ }
15
+ export function assertNotStatelessForWrite(commandName) {
16
+ if (isStatelessTokenMode()) {
17
+ throw new Error(`Cannot run \`${commandName}\` with --token. The --token flag is for stateless invocations and ` +
18
+ `must not mutate ~/.kweaver/. Drop --token, or use \`kweaver auth login\` to obtain a saved session.`);
19
+ }
20
+ }
@@ -53,6 +53,7 @@ export interface PlatformSummary {
53
53
  /** Human-readable name persisted from /oauth2/userinfo at login time. */
54
54
  displayName?: string;
55
55
  }
56
+ export declare function getProfileName(): string | null;
56
57
  /** Extract userId from a TokenConfig (try idToken, then accessToken, fallback "default"). */
57
58
  export declare function extractUserId(token: TokenConfig): string;
58
59
  /** Get the active userId for a platform. */
@@ -29,6 +29,19 @@ const MCP_PATH = "/api/agent-retrieval/v1/mcp";
29
29
  function buildMcpUrl(baseUrl) {
30
30
  return baseUrl.replace(/\/+$/, "") + MCP_PATH;
31
31
  }
32
+ const PROFILE_NAME_RE = /^[A-Za-z0-9_-]{1,64}$/;
33
+ export function getProfileName() {
34
+ const raw = process.env.KWEAVER_PROFILE;
35
+ if (!raw)
36
+ return null;
37
+ const trimmed = raw.trim();
38
+ if (!trimmed)
39
+ return null;
40
+ if (!PROFILE_NAME_RE.test(trimmed)) {
41
+ throw new Error(`KWEAVER_PROFILE='${raw}' is invalid. Use 1-64 chars from [A-Za-z0-9_-].`);
42
+ }
43
+ return trimmed;
44
+ }
32
45
  function getConfigDirPath() {
33
46
  return process.env.KWEAVERC_CONFIG_DIR || join(homedir(), ".kweaver");
34
47
  }
@@ -36,6 +49,10 @@ function getPlatformsDirPath() {
36
49
  return join(getConfigDirPath(), "platforms");
37
50
  }
38
51
  function getStateFilePath() {
52
+ const profile = getProfileName();
53
+ if (profile) {
54
+ return join(getConfigDirPath(), "profiles", profile, "state.json");
55
+ }
39
56
  return join(getConfigDirPath(), "state.json");
40
57
  }
41
58
  function getLegacyClientFilePath() {
@@ -6,6 +6,8 @@ export interface InvokeToolArgs {
6
6
  * send no headers. */
7
7
  header?: Record<string, unknown>;
8
8
  query?: Record<string, unknown>;
9
+ /** Path parameters for OpenAPI `{name}` placeholders (e.g. `{ id: "<uuid>" }`). */
10
+ path?: Record<string, unknown>;
9
11
  body?: unknown;
10
12
  /** Per-call timeout in seconds (backend default applies when omitted). */
11
13
  timeout?: number;
@@ -0,0 +1,12 @@
1
+ {
2
+ "name": "document",
3
+ "type": "bkn",
4
+ "description": "文档知识网络",
5
+ "arguments": [
6
+ { "name": "name", "required": true, "description": "BKN 名称", "type": "string" },
7
+ { "name": "embedding_model_id", "required": true, "description": "向量化模型 ID", "type": "string" },
8
+ { "name": "content_dataset_id", "required": true, "description": "内容数据集 ID", "type": "string" },
9
+ { "name": "document_dataset_id", "required": true, "description": "文档数据集 ID", "type": "string" },
10
+ { "name": "element_dataset_id", "required": true, "description": "元素数据集 ID", "type": "string" }
11
+ ]
12
+ }