@kweaver-ai/kweaver-sdk 0.7.2 → 0.7.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +35 -1
- package/README.zh.md +26 -0
- package/bin/kweaver.js +12 -11
- package/dist/api/bkn-backend.d.ts +1 -0
- package/dist/api/bkn-backend.js +1 -1
- package/dist/api/bkn-metrics.d.ts +59 -0
- package/dist/api/bkn-metrics.js +129 -0
- package/dist/api/conversations.d.ts +47 -2
- package/dist/api/conversations.js +113 -17
- package/dist/api/datasources.d.ts +7 -0
- package/dist/api/datasources.js +51 -6
- package/dist/api/model-invocation.d.ts +58 -0
- package/dist/api/model-invocation.js +203 -0
- package/dist/api/models.d.ts +79 -0
- package/dist/api/models.js +183 -0
- package/dist/api/ontology-query-metrics.d.ts +14 -0
- package/dist/api/ontology-query-metrics.js +30 -0
- package/dist/api/toolboxes.d.ts +2 -0
- package/dist/api/toolboxes.js +2 -1
- package/dist/bundled-model-templates.d.ts +17 -0
- package/dist/bundled-model-templates.js +24 -0
- package/dist/cli.js +28 -2
- package/dist/client.d.ts +3 -0
- package/dist/client.js +5 -0
- package/dist/commands/agent.d.ts +7 -1
- package/dist/commands/agent.js +75 -21
- package/dist/commands/auth.js +42 -7
- package/dist/commands/bkn-metric.d.ts +1 -0
- package/dist/commands/bkn-metric.js +406 -0
- package/dist/commands/bkn-ops.d.ts +2 -1
- package/dist/commands/bkn-ops.js +75 -34
- package/dist/commands/bkn-utils.d.ts +55 -2
- package/dist/commands/bkn-utils.js +103 -9
- package/dist/commands/bkn.js +4 -0
- package/dist/commands/dataflow.js +194 -20
- package/dist/commands/ds.d.ts +0 -1
- package/dist/commands/ds.js +26 -10
- package/dist/commands/explore-chat.js +2 -2
- package/dist/commands/import-csv.d.ts +0 -2
- package/dist/commands/import-csv.js +2 -4
- package/dist/commands/model.d.ts +72 -0
- package/dist/commands/model.js +1315 -0
- package/dist/commands/tool.d.ts +1 -0
- package/dist/commands/tool.js +12 -0
- package/dist/config/store.d.ts +1 -0
- package/dist/config/store.js +17 -0
- package/dist/index.d.ts +9 -0
- package/dist/index.js +5 -0
- package/dist/resources/models.d.ts +40 -0
- package/dist/resources/models.js +88 -0
- package/dist/resources/toolboxes.d.ts +2 -0
- package/dist/templates/bkn/document/manifest.json +12 -0
- package/dist/templates/bkn/document/template.json +757 -0
- package/dist/templates/dataflow/unstructured/manifest.json +11 -0
- package/dist/templates/dataflow/unstructured/template.json +63 -0
- package/dist/templates/dataset/document/manifest.json +10 -0
- package/dist/templates/dataset/document/template.json +23 -0
- package/dist/templates/dataset/document-content/manifest.json +10 -0
- package/dist/templates/dataset/document-content/template.json +29 -0
- package/dist/templates/dataset/document-element/manifest.json +10 -0
- package/dist/templates/dataset/document-element/template.json +21 -0
- package/dist/templates/model/llm-basic.json +13 -0
- package/dist/templates/model/manifest.json +16 -0
- package/dist/templates/model/small-basic.json +6 -0
- package/dist/utils/template-loader.d.ts +40 -0
- package/dist/utils/template-loader.js +129 -0
- package/dist/utils/trace-views.d.ts +44 -0
- package/dist/utils/trace-views.js +425 -0
- package/package.json +3 -3
|
@@ -18,14 +18,67 @@ export declare function parseOntologyQueryFlags(args: string[]): {
|
|
|
18
18
|
businessDomain: string;
|
|
19
19
|
};
|
|
20
20
|
export declare const DISPLAY_HINTS: string[];
|
|
21
|
-
|
|
21
|
+
export interface PkCandidate {
|
|
22
|
+
name: string;
|
|
23
|
+
cardinality: number;
|
|
24
|
+
}
|
|
25
|
+
export interface PkDetectionResult {
|
|
26
|
+
/** Detected PK column name, or null when detection is not confident. */
|
|
27
|
+
pk: string | null;
|
|
28
|
+
/** All columns sorted by cardinality desc. Empty when no sample. */
|
|
29
|
+
candidates: PkCandidate[];
|
|
30
|
+
/** 0 when no sample data was provided. */
|
|
31
|
+
sampleSize: number;
|
|
32
|
+
}
|
|
33
|
+
export declare const PK_NAME_HINTS: string[];
|
|
34
|
+
/**
|
|
35
|
+
* Detect primary key from a row sample. Returns null pk when no column has
|
|
36
|
+
* unique values across the sample — caller must fail-fast and prompt for --pk-map.
|
|
37
|
+
* Among columns that ARE fully unique, prefers PK-like names (id, *_id, pk).
|
|
38
|
+
*/
|
|
22
39
|
export declare function detectPrimaryKey(table: {
|
|
23
40
|
name: string;
|
|
24
41
|
columns: Array<{
|
|
25
42
|
name: string;
|
|
26
43
|
type: string;
|
|
27
44
|
}>;
|
|
28
|
-
}, rows?: Array<Record<string, string | null>>):
|
|
45
|
+
}, rows?: Array<Record<string, string | null>>): PkDetectionResult;
|
|
46
|
+
export interface PkResolution {
|
|
47
|
+
/** Resolved PK column name, or null when caller must fail-fast. */
|
|
48
|
+
pk: string | null;
|
|
49
|
+
/** Origin of the resolution — used by callers for messaging and warnings. */
|
|
50
|
+
source: "override" | "schema" | "sample" | "ambiguous";
|
|
51
|
+
/** For 'sample' source: cardinality candidates from `detectPrimaryKey`. */
|
|
52
|
+
candidates?: PkCandidate[];
|
|
53
|
+
/** For 'sample' source: rows seen, propagated for error formatting. */
|
|
54
|
+
sampleSize?: number;
|
|
55
|
+
/** For 'ambiguous' source: schema-declared composite PK columns. */
|
|
56
|
+
ambiguous?: string[];
|
|
57
|
+
}
|
|
58
|
+
/**
|
|
59
|
+
* Resolve a single PK for a BKN object type, in priority order:
|
|
60
|
+
* 1. caller-provided override (e.g. --pk-map)
|
|
61
|
+
* 2. schema-declared single PK from datasource metadata
|
|
62
|
+
* 3. sample-based detection (CSV / schemaless sources)
|
|
63
|
+
* Composite SQL PKs intentionally surface as `source: "ambiguous"` — BKN
|
|
64
|
+
* object types take a single PK, so the caller must pick via --pk-map.
|
|
65
|
+
*/
|
|
66
|
+
export declare function resolvePrimaryKey(table: {
|
|
67
|
+
name: string;
|
|
68
|
+
columns: Array<{
|
|
69
|
+
name: string;
|
|
70
|
+
type: string;
|
|
71
|
+
isPrimaryKey?: boolean;
|
|
72
|
+
}>;
|
|
73
|
+
primaryKeys?: string[];
|
|
74
|
+
}, sampleRows?: Array<Record<string, string | null>>, override?: string | null): PkResolution;
|
|
75
|
+
/** Format a user-facing error message when PK auto-detection fails. */
|
|
76
|
+
export declare function formatPkDetectionError(tableName: string, result: PkDetectionResult): string;
|
|
77
|
+
/**
|
|
78
|
+
* Parse --pk-map string into a Record<table, field>.
|
|
79
|
+
* Format: "<table>:<field>[,<table>:<field>...]". Throws on invalid input.
|
|
80
|
+
*/
|
|
81
|
+
export declare function parsePkMap(input: string): Record<string, string>;
|
|
29
82
|
export declare function detectDisplayKey(table: {
|
|
30
83
|
name: string;
|
|
31
84
|
columns: Array<{
|
|
@@ -68,18 +68,112 @@ export function parseOntologyQueryFlags(args) {
|
|
|
68
68
|
}
|
|
69
69
|
// ── Schema detection helpers ─────────────────────────────────────────────────
|
|
70
70
|
export const DISPLAY_HINTS = ["name", "title", "label", "display_name", "description"];
|
|
71
|
-
|
|
71
|
+
export const PK_NAME_HINTS = ["id", "_id", "pk"];
|
|
72
|
+
/**
|
|
73
|
+
* Detect primary key from a row sample. Returns null pk when no column has
|
|
74
|
+
* unique values across the sample — caller must fail-fast and prompt for --pk-map.
|
|
75
|
+
* Among columns that ARE fully unique, prefers PK-like names (id, *_id, pk).
|
|
76
|
+
*/
|
|
72
77
|
export function detectPrimaryKey(table, rows) {
|
|
73
|
-
if (rows
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
|
|
78
|
+
if (!rows || rows.length === 0) {
|
|
79
|
+
return { pk: null, candidates: [], sampleSize: 0 };
|
|
80
|
+
}
|
|
81
|
+
const candidates = table.columns
|
|
82
|
+
.map((col) => {
|
|
83
|
+
const unique = new Set(rows.map((r) => r[col.name]));
|
|
84
|
+
return { name: col.name, cardinality: unique.size };
|
|
85
|
+
})
|
|
86
|
+
.sort((a, b) => b.cardinality - a.cardinality);
|
|
87
|
+
const fullCardinality = candidates.filter((c) => c.cardinality === rows.length);
|
|
88
|
+
if (fullCardinality.length === 0) {
|
|
89
|
+
return { pk: null, candidates, sampleSize: rows.length };
|
|
90
|
+
}
|
|
91
|
+
const named = fullCardinality.find((c) => {
|
|
92
|
+
const lower = c.name.toLowerCase();
|
|
93
|
+
return PK_NAME_HINTS.some((h) => lower === h || lower.endsWith(`_${h}`));
|
|
94
|
+
});
|
|
95
|
+
return {
|
|
96
|
+
pk: named?.name ?? fullCardinality[0].name,
|
|
97
|
+
candidates,
|
|
98
|
+
sampleSize: rows.length,
|
|
99
|
+
};
|
|
100
|
+
}
|
|
101
|
+
/**
|
|
102
|
+
* Resolve a single PK for a BKN object type, in priority order:
|
|
103
|
+
* 1. caller-provided override (e.g. --pk-map)
|
|
104
|
+
* 2. schema-declared single PK from datasource metadata
|
|
105
|
+
* 3. sample-based detection (CSV / schemaless sources)
|
|
106
|
+
* Composite SQL PKs intentionally surface as `source: "ambiguous"` — BKN
|
|
107
|
+
* object types take a single PK, so the caller must pick via --pk-map.
|
|
108
|
+
*/
|
|
109
|
+
export function resolvePrimaryKey(table, sampleRows, override) {
|
|
110
|
+
if (override) {
|
|
111
|
+
return { pk: override, source: "override" };
|
|
112
|
+
}
|
|
113
|
+
const schemaPks = collectSchemaPks(table);
|
|
114
|
+
if (schemaPks.length === 1) {
|
|
115
|
+
return { pk: schemaPks[0], source: "schema" };
|
|
116
|
+
}
|
|
117
|
+
if (schemaPks.length > 1) {
|
|
118
|
+
return { pk: null, source: "ambiguous", ambiguous: schemaPks };
|
|
119
|
+
}
|
|
120
|
+
const sample = detectPrimaryKey(table, sampleRows);
|
|
121
|
+
return {
|
|
122
|
+
pk: sample.pk,
|
|
123
|
+
source: "sample",
|
|
124
|
+
candidates: sample.candidates,
|
|
125
|
+
sampleSize: sample.sampleSize,
|
|
126
|
+
};
|
|
127
|
+
}
|
|
128
|
+
function collectSchemaPks(table) {
|
|
129
|
+
// Filter against the actual column list — schema metadata can drift (stale
|
|
130
|
+
// catalog, post-rename) and an unusable PK should fall through cleanly to
|
|
131
|
+
// sample/fail rather than poison downstream object-type creation.
|
|
132
|
+
const colNames = new Set(table.columns.map((c) => c.name));
|
|
133
|
+
if (Array.isArray(table.primaryKeys) && table.primaryKeys.length > 0) {
|
|
134
|
+
return table.primaryKeys.filter((n) => colNames.has(n));
|
|
135
|
+
}
|
|
136
|
+
return table.columns.filter((c) => c.isPrimaryKey === true).map((c) => c.name);
|
|
137
|
+
}
|
|
138
|
+
/** Format a user-facing error message when PK auto-detection fails. */
|
|
139
|
+
export function formatPkDetectionError(tableName, result) {
|
|
140
|
+
const lines = [`Cannot auto-detect primary key for table '${tableName}'.`];
|
|
141
|
+
if (result.sampleSize === 0) {
|
|
142
|
+
lines.push(` No sample data available — chain with 'kweaver ds import-csv' or use --pk-map.`);
|
|
143
|
+
}
|
|
144
|
+
else {
|
|
145
|
+
lines.push(` No column has unique values in the ${result.sampleSize}-row sample.`);
|
|
146
|
+
lines.push(` Top candidates by cardinality:`);
|
|
147
|
+
const top = result.candidates.slice(0, 5);
|
|
148
|
+
const maxNameLen = Math.max(...top.map((c) => c.name.length));
|
|
149
|
+
for (const c of top) {
|
|
150
|
+
lines.push(` ${c.name.padEnd(maxNameLen)} ${c.cardinality} unique`);
|
|
151
|
+
}
|
|
152
|
+
}
|
|
153
|
+
lines.push(``);
|
|
154
|
+
lines.push(` Re-run with --pk-map to specify explicitly:`);
|
|
155
|
+
lines.push(` --pk-map ${tableName}:<column>`);
|
|
156
|
+
return lines.join("\n");
|
|
157
|
+
}
|
|
158
|
+
/**
|
|
159
|
+
* Parse --pk-map string into a Record<table, field>.
|
|
160
|
+
* Format: "<table>:<field>[,<table>:<field>...]". Throws on invalid input.
|
|
161
|
+
*/
|
|
162
|
+
export function parsePkMap(input) {
|
|
163
|
+
const result = {};
|
|
164
|
+
for (const pair of input.split(",").map((s) => s.trim()).filter(Boolean)) {
|
|
165
|
+
const idx = pair.indexOf(":");
|
|
166
|
+
if (idx <= 0 || idx >= pair.length - 1) {
|
|
167
|
+
throw new Error(`Invalid --pk-map entry '${pair}'. Expected '<table>:<field>[,<table>:<field>...]'`);
|
|
168
|
+
}
|
|
169
|
+
const table = pair.slice(0, idx).trim();
|
|
170
|
+
const field = pair.slice(idx + 1).trim();
|
|
171
|
+
if (!table || !field) {
|
|
172
|
+
throw new Error(`Invalid --pk-map entry '${pair}'. Expected '<table>:<field>[,<table>:<field>...]'`);
|
|
79
173
|
}
|
|
174
|
+
result[table] = field;
|
|
80
175
|
}
|
|
81
|
-
|
|
82
|
-
return table.columns[0]?.name ?? "id";
|
|
176
|
+
return result;
|
|
83
177
|
}
|
|
84
178
|
export function detectDisplayKey(table, primaryKey) {
|
|
85
179
|
for (const col of table.columns) {
|
package/dist/commands/bkn.js
CHANGED
|
@@ -7,6 +7,7 @@ import { resolveBusinessDomain } from "../config/store.js";
|
|
|
7
7
|
import { runKnObjectTypeCommand, runKnRelationTypeCommand, runKnActionTypeCommand, runKnConceptGroupCommand, } from "./bkn-schema.js";
|
|
8
8
|
import { runKnSubgraphCommand, runKnActionExecutionCommand, runKnActionLogCommand, runKnSearchCommand, runKnRelationTypePathsCommand, runKnResourcesCommand, } from "./bkn-query.js";
|
|
9
9
|
import { runKnBuildCommand, runKnValidateCommand, runKnPushCommand, runKnPullCommand, runKnCreateFromDsCommand, runKnCreateFromCsvCommand, runKnActionScheduleCommand, runKnJobCommand, } from "./bkn-ops.js";
|
|
10
|
+
import { runKnMetricCommand } from "./bkn-metric.js";
|
|
10
11
|
// Re-export shared utils for backward compatibility (tests import from bkn.js)
|
|
11
12
|
export { pollWithBackoff, parseOntologyQueryFlags, parseJsonObject, parseSearchAfterArray, confirmYes, DISPLAY_HINTS, detectPrimaryKey, detectDisplayKey, } from "./bkn-utils.js";
|
|
12
13
|
// Re-export schema types and parse functions for backward compatibility
|
|
@@ -404,6 +405,7 @@ Subcommands:
|
|
|
404
405
|
job list|get|tasks|delete <kn-id> ...
|
|
405
406
|
relation-type-paths <kn-id> '<json>' Query relation type paths between OTs
|
|
406
407
|
resources List available resources
|
|
408
|
+
metric list|get|create|search|validate|update|delete|query|dry-run <kn-id> ... BKN metrics (definitions + data)
|
|
407
409
|
|
|
408
410
|
Use 'kweaver bkn <subcommand> --help' for subcommand options.`;
|
|
409
411
|
export async function runKnCommand(args) {
|
|
@@ -463,6 +465,8 @@ export async function runKnCommand(args) {
|
|
|
463
465
|
return runKnRelationTypePathsCommand(rest);
|
|
464
466
|
if (subcommand === "resources")
|
|
465
467
|
return runKnResourcesCommand(rest);
|
|
468
|
+
if (subcommand === "metric")
|
|
469
|
+
return runKnMetricCommand(rest);
|
|
466
470
|
return Promise.resolve(-1);
|
|
467
471
|
};
|
|
468
472
|
try {
|
|
@@ -7,6 +7,9 @@ import { ensureValidToken, formatHttpError, with401RefreshRetry } from "../auth/
|
|
|
7
7
|
import { resolveBusinessDomain } from "../config/store.js";
|
|
8
8
|
import { getDataflowLogsPage, listDataflowRuns, listDataflows, runDataflowWithFile, runDataflowWithRemoteUrl, } from "../api/dataflow2.js";
|
|
9
9
|
import { createDataflow } from "../api/dataflow.js";
|
|
10
|
+
import { createVegaResource } from "../api/vega.js";
|
|
11
|
+
import { createKnowledgeNetwork } from "../api/knowledge-networks.js";
|
|
12
|
+
import { loadTemplate, listTemplates, renderTemplate, generateSourceIdentifier, getTemplatesDir, } from "../utils/template-loader.js";
|
|
10
13
|
function renderTable(rows) {
|
|
11
14
|
if (rows.length === 0)
|
|
12
15
|
return "";
|
|
@@ -101,26 +104,6 @@ export async function runDataflowCommand(args) {
|
|
|
101
104
|
.strict()
|
|
102
105
|
.fail((message, error) => {
|
|
103
106
|
throw error ?? new Error(message);
|
|
104
|
-
})
|
|
105
|
-
.command("create <json>", "Create a new dataflow (DAG) from a JSON definition", (command) => command
|
|
106
|
-
.positional("json", {
|
|
107
|
-
type: "string",
|
|
108
|
-
describe: "JSON body string or @file-path to read from file",
|
|
109
|
-
})
|
|
110
|
-
.option("biz-domain", { alias: "bd", type: "string" }), async (argv) => {
|
|
111
|
-
exitCode = await with401RefreshRetry(async () => {
|
|
112
|
-
const base = await requireTokenAndBusinessDomain(argv.bizDomain);
|
|
113
|
-
let raw = argv.json;
|
|
114
|
-
if (raw.startsWith("@")) {
|
|
115
|
-
const filePath = raw.slice(1);
|
|
116
|
-
await access(filePath, constants.R_OK);
|
|
117
|
-
raw = (await readFile(filePath, "utf8")).toString();
|
|
118
|
-
}
|
|
119
|
-
const body = JSON.parse(raw);
|
|
120
|
-
const dagId = await createDataflow({ ...base, body });
|
|
121
|
-
console.log(JSON.stringify({ id: dagId }, null, 2));
|
|
122
|
-
return 0;
|
|
123
|
-
});
|
|
124
107
|
})
|
|
125
108
|
.command("list", "List all dataflows", (command) => command
|
|
126
109
|
.option("biz-domain", {
|
|
@@ -280,6 +263,197 @@ export async function runDataflowCommand(args) {
|
|
|
280
263
|
}
|
|
281
264
|
return 0;
|
|
282
265
|
});
|
|
266
|
+
})
|
|
267
|
+
.command("templates", "List all available templates", {
|
|
268
|
+
json: { type: "boolean", default: false, describe: "Output as JSON" },
|
|
269
|
+
}, (argv) => {
|
|
270
|
+
const templatesDir = getTemplatesDir();
|
|
271
|
+
return Promise.all([
|
|
272
|
+
listTemplates("dataset", templatesDir),
|
|
273
|
+
listTemplates("bkn", templatesDir),
|
|
274
|
+
listTemplates("dataflow", templatesDir),
|
|
275
|
+
]).then(([datasetTemplates, bknTemplates, dataflowTemplates]) => {
|
|
276
|
+
if (argv.json) {
|
|
277
|
+
console.log(JSON.stringify({
|
|
278
|
+
dataset: datasetTemplates,
|
|
279
|
+
bkn: bknTemplates,
|
|
280
|
+
dataflow: dataflowTemplates,
|
|
281
|
+
}, null, 2));
|
|
282
|
+
}
|
|
283
|
+
else {
|
|
284
|
+
console.log("Dataset Templates:");
|
|
285
|
+
for (const t of datasetTemplates) {
|
|
286
|
+
console.log(` - ${t.name.padEnd(18)} ${t.description}`);
|
|
287
|
+
}
|
|
288
|
+
console.log("");
|
|
289
|
+
console.log("BKN Templates:");
|
|
290
|
+
for (const t of bknTemplates) {
|
|
291
|
+
console.log(` - ${t.name.padEnd(18)} ${t.description}`);
|
|
292
|
+
}
|
|
293
|
+
console.log("");
|
|
294
|
+
console.log("Dataflow Templates:");
|
|
295
|
+
for (const t of dataflowTemplates) {
|
|
296
|
+
console.log(` - ${t.name.padEnd(18)} ${t.description}`);
|
|
297
|
+
}
|
|
298
|
+
}
|
|
299
|
+
});
|
|
300
|
+
})
|
|
301
|
+
.command("create-dataset", "Create a dataset from a template", (command) => command
|
|
302
|
+
.option("template", { type: "string", demandOption: true, describe: "Template name" })
|
|
303
|
+
.option("set", { type: "array", string: true, describe: "Set parameter (key=value), can be used multiple times" })
|
|
304
|
+
.option("json", { type: "boolean", default: false, describe: "Output as JSON" })
|
|
305
|
+
.option("biz-domain", { alias: "bd", type: "string" }), async (argv) => {
|
|
306
|
+
exitCode = await with401RefreshRetry(async () => {
|
|
307
|
+
const base = await requireTokenAndBusinessDomain(argv.bizDomain);
|
|
308
|
+
const templatesDir = getTemplatesDir();
|
|
309
|
+
// Parse --set arguments
|
|
310
|
+
const args = {};
|
|
311
|
+
if (argv.set) {
|
|
312
|
+
for (const item of argv.set) {
|
|
313
|
+
const eqIdx = item.indexOf("=");
|
|
314
|
+
if (eqIdx > 0) {
|
|
315
|
+
const key = item.slice(0, eqIdx);
|
|
316
|
+
const value = item.slice(eqIdx + 1);
|
|
317
|
+
args[key] = value;
|
|
318
|
+
}
|
|
319
|
+
}
|
|
320
|
+
}
|
|
321
|
+
// Load template
|
|
322
|
+
const loaded = await loadTemplate(argv.template, "dataset", templatesDir);
|
|
323
|
+
if (!loaded) {
|
|
324
|
+
console.error(`Template not found: ${argv.template}`);
|
|
325
|
+
return 1;
|
|
326
|
+
}
|
|
327
|
+
// Auto-generate source_identifier if not provided
|
|
328
|
+
if (!args["source_identifier"]) {
|
|
329
|
+
const prefixMap = {
|
|
330
|
+
"document": "dataflow_document",
|
|
331
|
+
"document-content": "dataflow_content",
|
|
332
|
+
"document-element": "dataflow_element",
|
|
333
|
+
};
|
|
334
|
+
const prefix = prefixMap[loaded.manifest.name] || "dataflow";
|
|
335
|
+
args["source_identifier"] = generateSourceIdentifier(prefix);
|
|
336
|
+
}
|
|
337
|
+
// Render template
|
|
338
|
+
const rendered = renderTemplate(loaded.template, loaded.manifest, args);
|
|
339
|
+
// Create dataset via API
|
|
340
|
+
const response = await createVegaResource({
|
|
341
|
+
...base,
|
|
342
|
+
body: JSON.stringify(rendered),
|
|
343
|
+
});
|
|
344
|
+
const result = JSON.parse(response);
|
|
345
|
+
if (argv.json) {
|
|
346
|
+
console.log(JSON.stringify({ success: true, id: result.id, name: args.name }, null, 2));
|
|
347
|
+
}
|
|
348
|
+
else {
|
|
349
|
+
console.log(`dataset created: id=${result.id}`);
|
|
350
|
+
}
|
|
351
|
+
return 0;
|
|
352
|
+
});
|
|
353
|
+
})
|
|
354
|
+
.command("create-bkn", "Create a BKN (knowledge network) from a template", (command) => command
|
|
355
|
+
.option("template", { type: "string", demandOption: true, describe: "Template name" })
|
|
356
|
+
.option("set", { type: "array", string: true, describe: "Set parameter (key=value), can be used multiple times" })
|
|
357
|
+
.option("json", { type: "boolean", default: false, describe: "Output as JSON" })
|
|
358
|
+
.option("biz-domain", { alias: "bd", type: "string" }), async (argv) => {
|
|
359
|
+
exitCode = await with401RefreshRetry(async () => {
|
|
360
|
+
const base = await requireTokenAndBusinessDomain(argv.bizDomain);
|
|
361
|
+
const templatesDir = getTemplatesDir();
|
|
362
|
+
// Parse --set arguments
|
|
363
|
+
const args = {};
|
|
364
|
+
if (argv.set) {
|
|
365
|
+
for (const item of argv.set) {
|
|
366
|
+
const eqIdx = item.indexOf("=");
|
|
367
|
+
if (eqIdx > 0) {
|
|
368
|
+
const key = item.slice(0, eqIdx);
|
|
369
|
+
const value = item.slice(eqIdx + 1);
|
|
370
|
+
args[key] = value;
|
|
371
|
+
}
|
|
372
|
+
}
|
|
373
|
+
}
|
|
374
|
+
// Load template
|
|
375
|
+
const loaded = await loadTemplate(argv.template, "bkn", templatesDir);
|
|
376
|
+
if (!loaded) {
|
|
377
|
+
console.error(`Template not found: ${argv.template}`);
|
|
378
|
+
return 1;
|
|
379
|
+
}
|
|
380
|
+
// Render template
|
|
381
|
+
const rendered = renderTemplate(loaded.template, loaded.manifest, args);
|
|
382
|
+
rendered.business_domain = base.businessDomain;
|
|
383
|
+
// Create BKN via API
|
|
384
|
+
const response = await createKnowledgeNetwork({
|
|
385
|
+
...base,
|
|
386
|
+
body: JSON.stringify(rendered),
|
|
387
|
+
validate_dependency: false,
|
|
388
|
+
});
|
|
389
|
+
const result = JSON.parse(response);
|
|
390
|
+
if (argv.json) {
|
|
391
|
+
console.log(JSON.stringify({ success: true, id: result.id, name: args.name }, null, 2));
|
|
392
|
+
}
|
|
393
|
+
else {
|
|
394
|
+
console.log(`bkn created: id=${result.id}`);
|
|
395
|
+
}
|
|
396
|
+
return 0;
|
|
397
|
+
});
|
|
398
|
+
})
|
|
399
|
+
.command("create [json]", "Create a new dataflow (DAG) from a JSON definition or template", (command) => command
|
|
400
|
+
.positional("json", {
|
|
401
|
+
type: "string",
|
|
402
|
+
describe: "JSON body string or @file-path to read from file",
|
|
403
|
+
})
|
|
404
|
+
.option("template", { type: "string", describe: "Template name (use instead of json)" })
|
|
405
|
+
.option("set", { type: "array", string: true, describe: "Set parameter (key=value), can be used multiple times" })
|
|
406
|
+
.option("biz-domain", { alias: "bd", type: "string" })
|
|
407
|
+
.check((argv) => {
|
|
408
|
+
const hasJson = typeof argv.json === "string";
|
|
409
|
+
const hasTemplate = typeof argv.template === "string";
|
|
410
|
+
if (hasJson && hasTemplate) {
|
|
411
|
+
throw new Error("Cannot use both json and --template");
|
|
412
|
+
}
|
|
413
|
+
if (!hasJson && !hasTemplate) {
|
|
414
|
+
throw new Error("Either json or --template is required");
|
|
415
|
+
}
|
|
416
|
+
return true;
|
|
417
|
+
}), async (argv) => {
|
|
418
|
+
exitCode = await with401RefreshRetry(async () => {
|
|
419
|
+
const base = await requireTokenAndBusinessDomain(argv.bizDomain);
|
|
420
|
+
let body;
|
|
421
|
+
if (argv.template) {
|
|
422
|
+
// Use template
|
|
423
|
+
const templatesDir = getTemplatesDir();
|
|
424
|
+
// Parse --set arguments
|
|
425
|
+
const args = {};
|
|
426
|
+
if (argv.set) {
|
|
427
|
+
for (const item of argv.set) {
|
|
428
|
+
const eqIdx = item.indexOf("=");
|
|
429
|
+
if (eqIdx > 0) {
|
|
430
|
+
const key = item.slice(0, eqIdx);
|
|
431
|
+
const value = item.slice(eqIdx + 1);
|
|
432
|
+
args[key] = value;
|
|
433
|
+
}
|
|
434
|
+
}
|
|
435
|
+
}
|
|
436
|
+
const loaded = await loadTemplate(argv.template, "dataflow", templatesDir);
|
|
437
|
+
if (!loaded) {
|
|
438
|
+
console.error(`Template not found: ${argv.template}`);
|
|
439
|
+
return 1;
|
|
440
|
+
}
|
|
441
|
+
body = renderTemplate(loaded.template, loaded.manifest, args);
|
|
442
|
+
}
|
|
443
|
+
else {
|
|
444
|
+
// Use JSON
|
|
445
|
+
let raw = argv.json;
|
|
446
|
+
if (raw.startsWith("@")) {
|
|
447
|
+
const filePath = raw.slice(1);
|
|
448
|
+
await access(filePath, constants.R_OK);
|
|
449
|
+
raw = (await readFile(filePath, "utf8")).toString();
|
|
450
|
+
}
|
|
451
|
+
body = JSON.parse(raw);
|
|
452
|
+
}
|
|
453
|
+
const dagId = await createDataflow({ ...base, body });
|
|
454
|
+
console.log(JSON.stringify({ id: dagId }, null, 2));
|
|
455
|
+
return 0;
|
|
456
|
+
});
|
|
283
457
|
})
|
|
284
458
|
.demandCommand(1);
|
|
285
459
|
try {
|
package/dist/commands/ds.d.ts
CHANGED
|
@@ -34,7 +34,6 @@ export declare function parseImportCsvArgs(args: string[]): {
|
|
|
34
34
|
tablePrefix: string;
|
|
35
35
|
batchSize: number;
|
|
36
36
|
businessDomain: string;
|
|
37
|
-
recreate: boolean;
|
|
38
37
|
};
|
|
39
38
|
export declare function resolveFiles(pattern: string): Promise<string[]>;
|
|
40
39
|
export interface ImportCsvResult {
|
package/dist/commands/ds.js
CHANGED
|
@@ -3,7 +3,7 @@ import { statSync } from "node:fs";
|
|
|
3
3
|
import { glob } from "node:fs/promises";
|
|
4
4
|
import { resolve as resolvePath } from "node:path";
|
|
5
5
|
import { ensureValidToken, formatHttpError, with401RefreshRetry } from "../auth/oauth.js";
|
|
6
|
-
import { testDatasource, createDatasource, listDatasources, getDatasource, deleteDatasource, listTablesWithColumns, } from "../api/datasources.js";
|
|
6
|
+
import { testDatasource, createDatasource, listDatasources, getDatasource, deleteDatasource, listTablesWithColumns, scanMetadata, } from "../api/datasources.js";
|
|
7
7
|
import { formatCallOutput } from "./call.js";
|
|
8
8
|
import { resolveBusinessDomain } from "../config/store.js";
|
|
9
9
|
import { parseCsvFile, buildTableName, splitBatches, buildFieldMappings, buildDagBody, } from "./import-csv.js";
|
|
@@ -389,7 +389,13 @@ async function printDsConnectOutput(base, dsId) {
|
|
|
389
389
|
datasource_id: dsId,
|
|
390
390
|
tables: tables.map((t) => ({
|
|
391
391
|
name: t.name,
|
|
392
|
-
|
|
392
|
+
...(t.primaryKeys && t.primaryKeys.length > 0 ? { primary_keys: t.primaryKeys } : {}),
|
|
393
|
+
columns: t.columns.map((c) => ({
|
|
394
|
+
name: c.name,
|
|
395
|
+
type: c.type,
|
|
396
|
+
comment: c.comment,
|
|
397
|
+
...(c.isPrimaryKey ? { is_primary_key: true } : {}),
|
|
398
|
+
})),
|
|
393
399
|
})),
|
|
394
400
|
};
|
|
395
401
|
console.log(JSON.stringify(output, null, 2));
|
|
@@ -404,7 +410,6 @@ Options:
|
|
|
404
410
|
--files <s> CSV file paths (comma-separated or glob pattern, required)
|
|
405
411
|
--table-prefix <s> Table name prefix (default: none)
|
|
406
412
|
--batch-size <n> Rows per batch (default: 500, range: 1-10000)
|
|
407
|
-
--recreate First batch uses overwrite (drop/recreate table) then append; use when schema changed
|
|
408
413
|
-bd, --biz-domain Business domain (default: bd_public)`;
|
|
409
414
|
export function parseImportCsvArgs(args) {
|
|
410
415
|
let datasourceId = "";
|
|
@@ -412,7 +417,6 @@ export function parseImportCsvArgs(args) {
|
|
|
412
417
|
let tablePrefix = "";
|
|
413
418
|
let batchSize = 500;
|
|
414
419
|
let businessDomain = "";
|
|
415
|
-
let recreate = false;
|
|
416
420
|
for (let i = 0; i < args.length; i += 1) {
|
|
417
421
|
const arg = args[i];
|
|
418
422
|
if (arg === "--help" || arg === "-h")
|
|
@@ -421,10 +425,6 @@ export function parseImportCsvArgs(args) {
|
|
|
421
425
|
files = args[++i];
|
|
422
426
|
continue;
|
|
423
427
|
}
|
|
424
|
-
if (arg === "--recreate") {
|
|
425
|
-
recreate = true;
|
|
426
|
-
continue;
|
|
427
|
-
}
|
|
428
428
|
if (arg === "--table-prefix" && args[i + 1]) {
|
|
429
429
|
tablePrefix = args[++i];
|
|
430
430
|
continue;
|
|
@@ -447,7 +447,7 @@ export function parseImportCsvArgs(args) {
|
|
|
447
447
|
}
|
|
448
448
|
if (!businessDomain)
|
|
449
449
|
businessDomain = resolveBusinessDomain();
|
|
450
|
-
return { datasourceId, files, tablePrefix, batchSize, businessDomain
|
|
450
|
+
return { datasourceId, files, tablePrefix, batchSize, businessDomain };
|
|
451
451
|
}
|
|
452
452
|
export async function resolveFiles(pattern) {
|
|
453
453
|
const parts = pattern.split(",").map((p) => p.trim()).filter(Boolean);
|
|
@@ -550,7 +550,6 @@ export async function runDsImportCsv(args) {
|
|
|
550
550
|
tableExist,
|
|
551
551
|
data: batch,
|
|
552
552
|
fieldMappings,
|
|
553
|
-
recreate: options.recreate,
|
|
554
553
|
});
|
|
555
554
|
const t0 = Date.now();
|
|
556
555
|
process.stderr.write(`[${tableName}] batch ${batchLabel} (${rowCount} rows)... `);
|
|
@@ -585,6 +584,23 @@ export async function runDsImportCsv(args) {
|
|
|
585
584
|
if (failed.length > 0) {
|
|
586
585
|
console.error(`Failed tables: ${failed.join(", ")}`);
|
|
587
586
|
}
|
|
587
|
+
// Refresh the platform metadata catalog so the freshly imported tables
|
|
588
|
+
// are visible to ds tables / bkn create-from-ds without manual scan.
|
|
589
|
+
// Best-effort: scan failures shouldn't mask a successful import.
|
|
590
|
+
if (succeeded.length > 0) {
|
|
591
|
+
process.stderr.write("Scanning datasource metadata ...\n");
|
|
592
|
+
try {
|
|
593
|
+
await scanMetadata({
|
|
594
|
+
...base,
|
|
595
|
+
id: options.datasourceId,
|
|
596
|
+
dsType: datasourceType,
|
|
597
|
+
businessDomain: options.businessDomain,
|
|
598
|
+
});
|
|
599
|
+
}
|
|
600
|
+
catch (err) {
|
|
601
|
+
console.error(`Scan warning (continuing): ${formatHttpError(err)}`);
|
|
602
|
+
}
|
|
603
|
+
}
|
|
588
604
|
return { code: failed.length > 0 ? 1 : 0, tables: succeeded, failed, tableColumns, sampleRows };
|
|
589
605
|
}
|
|
590
606
|
export async function runDsImportCsvCommand(args) {
|
|
@@ -175,7 +175,7 @@ export function registerChatRoutes(getToken, businessDomain) {
|
|
|
175
175
|
return;
|
|
176
176
|
}
|
|
177
177
|
const t = await getToken();
|
|
178
|
-
const
|
|
178
|
+
const result = await getTracesByConversation({
|
|
179
179
|
baseUrl: t.baseUrl,
|
|
180
180
|
accessToken: t.accessToken,
|
|
181
181
|
agentId,
|
|
@@ -183,7 +183,7 @@ export function registerChatRoutes(getToken, businessDomain) {
|
|
|
183
183
|
businessDomain,
|
|
184
184
|
});
|
|
185
185
|
res.writeHead(200, { "Content-Type": "application/json; charset=utf-8" });
|
|
186
|
-
res.end(
|
|
186
|
+
res.end(JSON.stringify(result));
|
|
187
187
|
}
|
|
188
188
|
catch (error) {
|
|
189
189
|
handleApiError(res, error);
|
|
@@ -19,8 +19,6 @@ export interface DagBodyOptions {
|
|
|
19
19
|
tableExist: boolean;
|
|
20
20
|
data: Array<Record<string, string | null>>;
|
|
21
21
|
fieldMappings: FieldMapping[];
|
|
22
|
-
/** When true on the first batch (`tableExist` false), use "insert" to force table recreation. */
|
|
23
|
-
recreate?: boolean;
|
|
24
22
|
}
|
|
25
23
|
/**
|
|
26
24
|
* Read a CSV file and return its headers and rows.
|
|
@@ -80,11 +80,9 @@ export function buildFieldMappings(headers) {
|
|
|
80
80
|
* The DAG has two steps: a manual trigger and the database write.
|
|
81
81
|
*/
|
|
82
82
|
export function buildDagBody(options) {
|
|
83
|
-
const { datasourceId, datasourceType, tableName, tableExist, data, fieldMappings
|
|
83
|
+
const { datasourceId, datasourceType, tableName, tableExist, data, fieldMappings } = options;
|
|
84
84
|
const ts = Date.now();
|
|
85
|
-
|
|
86
|
-
// With --recreate, use "insert" on first batch to force table recreation when schema changed.
|
|
87
|
-
const operateType = tableExist ? "append" : recreate ? "insert" : "append";
|
|
85
|
+
const operateType = "append";
|
|
88
86
|
const triggerStep = {
|
|
89
87
|
id: "step-trigger",
|
|
90
88
|
title: "Trigger",
|
|
@@ -0,0 +1,72 @@
|
|
|
1
|
+
export interface ModelGlobalParse {
|
|
2
|
+
rest: string[];
|
|
3
|
+
businessDomain: string;
|
|
4
|
+
mfManagerBaseUrl?: string;
|
|
5
|
+
mfApiBaseUrl?: string;
|
|
6
|
+
pretty: boolean;
|
|
7
|
+
}
|
|
8
|
+
/** Strip global flags; fill default business domain. */
|
|
9
|
+
export declare function parseModelGlobalFlags(args: string[]): ModelGlobalParse;
|
|
10
|
+
/** Sparse flags for model llm edit (after leading model_id). Exported for unit tests. */
|
|
11
|
+
export interface ParsedLlmSparseEditFlags {
|
|
12
|
+
name?: string;
|
|
13
|
+
modelSeries?: string;
|
|
14
|
+
modelType?: string;
|
|
15
|
+
maxModelLen?: number;
|
|
16
|
+
quota?: boolean;
|
|
17
|
+
modelConfigFile?: string;
|
|
18
|
+
upstreamUrl?: string;
|
|
19
|
+
apiModel?: string;
|
|
20
|
+
apiKey?: string;
|
|
21
|
+
apiKeyFile?: string;
|
|
22
|
+
}
|
|
23
|
+
/** Exported for unit tests. */
|
|
24
|
+
export declare function parsedLlmSparseEditHasUpdates(p: ParsedLlmSparseEditFlags): boolean;
|
|
25
|
+
/** Exported for unit tests. */
|
|
26
|
+
export declare function parseLlmSparseEditFlags(tail: string[]): ParsedLlmSparseEditFlags;
|
|
27
|
+
/**
|
|
28
|
+
* Normalize GET /llm/get JSON into a body suitable for POST /llm/edit.
|
|
29
|
+
* Exported for unit tests.
|
|
30
|
+
*/
|
|
31
|
+
export declare function llmModelGetToEditBase(raw: unknown): Record<string, unknown>;
|
|
32
|
+
/** Apply sparse llm edit flags onto a normalized record from GET. Exported for unit tests. */
|
|
33
|
+
export declare function mergeLlmEditOntoExistingBase(base: Record<string, unknown>, p: ParsedLlmSparseEditFlags): Promise<Record<string, unknown>>;
|
|
34
|
+
/**
|
|
35
|
+
* Returns registry **model_name** from ``GET /llm/get`` JSON when present. Exported for unit tests.
|
|
36
|
+
*/
|
|
37
|
+
export declare function llmGetRecordModelName(raw: unknown): string;
|
|
38
|
+
/** Registry **model_name** from ``GET /small-model/get``. Exported for unit tests. */
|
|
39
|
+
export declare function smallGetRecordModelName(raw: unknown): string;
|
|
40
|
+
export interface ParsedSmallAddFlags {
|
|
41
|
+
name?: string;
|
|
42
|
+
modelType?: "embedding" | "reranker";
|
|
43
|
+
batchSize?: number;
|
|
44
|
+
maxTokens?: number;
|
|
45
|
+
embeddingDim?: number;
|
|
46
|
+
modelConfigFile?: string;
|
|
47
|
+
adapter?: boolean;
|
|
48
|
+
adapterCodeFile?: string;
|
|
49
|
+
bodyFile?: string;
|
|
50
|
+
/** Outbound HTTP API base or full path (stored in model_config.api_url). */
|
|
51
|
+
upstreamUrl?: string;
|
|
52
|
+
/** Third-party model id / deployment name (model_config.api_model). */
|
|
53
|
+
apiModel?: string;
|
|
54
|
+
/** Inline API secret — prefer --api-key-file (shell history risk). */
|
|
55
|
+
apiKey?: string;
|
|
56
|
+
apiKeyFile?: string;
|
|
57
|
+
}
|
|
58
|
+
/** True when sparse CLI flags should perform an edit (excluding --body-file). Exported for unit tests. */
|
|
59
|
+
export declare function parsedSmallFlagsHasEditUpdates(p: ParsedSmallAddFlags): boolean;
|
|
60
|
+
/**
|
|
61
|
+
* Normalize GET /small-model/get JSON into a body suitable for POST /small-model/edit.
|
|
62
|
+
* Exported for unit tests.
|
|
63
|
+
*/
|
|
64
|
+
export declare function smallModelGetToEditBase(raw: unknown): Record<string, unknown>;
|
|
65
|
+
/**
|
|
66
|
+
* Apply sparse edit flags onto a normalized small-model record (from GET). Exported for unit tests.
|
|
67
|
+
*/
|
|
68
|
+
export declare function mergeSmallEditOntoExistingBase(base: Record<string, unknown>, p: ParsedSmallAddFlags): Promise<Record<string, unknown>>;
|
|
69
|
+
/** Parse small-model add/edit flags from argv tail (after action). Exported for unit tests. */
|
|
70
|
+
export declare function parseSmallAddFlags(tail: string[]): ParsedSmallAddFlags;
|
|
71
|
+
export declare function buildSmallBodyFromFlags(p: ParsedSmallAddFlags, modelId?: string): Promise<Record<string, unknown>>;
|
|
72
|
+
export declare function runModelCommand(args: string[]): Promise<number>;
|