@kweaver-ai/kweaver-sdk 0.4.10 → 0.4.12
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +61 -3
- package/README.zh.md +42 -1
- package/dist/api/dataflow.d.ts +78 -0
- package/dist/api/dataflow.js +135 -0
- package/dist/api/dataviews.d.ts +58 -1
- package/dist/api/dataviews.js +150 -1
- package/dist/auth/oauth.d.ts +6 -1
- package/dist/auth/oauth.js +240 -166
- package/dist/cli.js +13 -1
- package/dist/client.d.ts +12 -0
- package/dist/client.js +18 -0
- package/dist/commands/auth.js +36 -16
- package/dist/commands/bkn.js +214 -21
- package/dist/commands/dataview.d.ts +1 -0
- package/dist/commands/dataview.js +244 -0
- package/dist/commands/ds.d.ts +16 -0
- package/dist/commands/ds.js +204 -1
- package/dist/commands/import-csv.d.ts +47 -0
- package/dist/commands/import-csv.js +111 -0
- package/dist/config/store.d.ts +2 -0
- package/dist/config/tls-env.d.ts +8 -0
- package/dist/config/tls-env.js +22 -0
- package/dist/index.d.ts +3 -0
- package/dist/index.js +2 -0
- package/dist/resources/dataflows.d.ts +17 -0
- package/dist/resources/dataflows.js +22 -0
- package/dist/resources/datasources.d.ts +52 -0
- package/dist/resources/datasources.js +54 -0
- package/dist/resources/dataviews.d.ts +28 -0
- package/dist/resources/dataviews.js +34 -0
- package/dist/resources/vega.d.ts +41 -0
- package/dist/resources/vega.js +80 -0
- package/package.json +2 -1
package/dist/commands/ds.js
CHANGED
|
@@ -1,8 +1,13 @@
|
|
|
1
1
|
import { createInterface } from "node:readline";
|
|
2
|
+
import { statSync } from "node:fs";
|
|
3
|
+
import { glob } from "node:fs/promises";
|
|
4
|
+
import { resolve as resolvePath } from "node:path";
|
|
2
5
|
import { ensureValidToken, formatHttpError, with401RefreshRetry } from "../auth/oauth.js";
|
|
3
6
|
import { testDatasource, createDatasource, listDatasources, getDatasource, deleteDatasource, listTablesWithColumns, } from "../api/datasources.js";
|
|
4
7
|
import { formatCallOutput } from "./call.js";
|
|
5
8
|
import { resolveBusinessDomain } from "../config/store.js";
|
|
9
|
+
import { parseCsvFile, buildTableName, splitBatches, buildFieldMappings, buildDagBody, } from "./import-csv.js";
|
|
10
|
+
import { executeDataflow } from "../api/dataflow.js";
|
|
6
11
|
function confirmYes(prompt) {
|
|
7
12
|
return new Promise((resolve) => {
|
|
8
13
|
const rl = createInterface({ input: process.stdin, output: process.stdout });
|
|
@@ -32,7 +37,9 @@ Subcommands:
|
|
|
32
37
|
delete <id> [-y] Delete a datasource
|
|
33
38
|
tables <id> [--keyword X] List tables with columns
|
|
34
39
|
connect <db_type> <host> <port> <database> --account X --password Y [--schema Z] [--name N]
|
|
35
|
-
Test connectivity, register datasource, and discover tables
|
|
40
|
+
Test connectivity, register datasource, and discover tables.
|
|
41
|
+
import-csv <ds-id> --files <glob_or_list> [--table-prefix X] [--batch-size N]
|
|
42
|
+
Import CSV files into datasource tables via dataflow API.`);
|
|
36
43
|
return 0;
|
|
37
44
|
}
|
|
38
45
|
const dispatch = () => {
|
|
@@ -46,6 +53,8 @@ Subcommands:
|
|
|
46
53
|
return runDsTablesCommand(rest);
|
|
47
54
|
if (subcommand === "connect")
|
|
48
55
|
return runDsConnectCommand(rest);
|
|
56
|
+
if (subcommand === "import-csv")
|
|
57
|
+
return runDsImportCsvCommand(rest);
|
|
49
58
|
return Promise.resolve(-1);
|
|
50
59
|
};
|
|
51
60
|
try {
|
|
@@ -288,3 +297,197 @@ async function runDsConnectCommand(args) {
|
|
|
288
297
|
console.log(JSON.stringify(output, null, 2));
|
|
289
298
|
return 0;
|
|
290
299
|
}
|
|
300
|
+
// ── import-csv ────────────────────────────────────────────────────────────────
|
|
301
|
+
const IMPORT_CSV_HELP = `kweaver ds import-csv <ds-id> --files <glob_or_list> [options]
|
|
302
|
+
|
|
303
|
+
Import CSV files into datasource tables via dataflow API.
|
|
304
|
+
|
|
305
|
+
Options:
|
|
306
|
+
--files <s> CSV file paths (comma-separated or glob pattern, required)
|
|
307
|
+
--table-prefix <s> Table name prefix (default: none)
|
|
308
|
+
--batch-size <n> Rows per batch (default: 500, range: 1-10000)
|
|
309
|
+
-bd, --biz-domain Business domain (default: bd_public)`;
|
|
310
|
+
export function parseImportCsvArgs(args) {
|
|
311
|
+
let datasourceId = "";
|
|
312
|
+
let files = "";
|
|
313
|
+
let tablePrefix = "";
|
|
314
|
+
let batchSize = 500;
|
|
315
|
+
let businessDomain = "";
|
|
316
|
+
for (let i = 0; i < args.length; i += 1) {
|
|
317
|
+
const arg = args[i];
|
|
318
|
+
if (arg === "--help" || arg === "-h")
|
|
319
|
+
throw new Error("help");
|
|
320
|
+
if (arg === "--files" && args[i + 1]) {
|
|
321
|
+
files = args[++i];
|
|
322
|
+
continue;
|
|
323
|
+
}
|
|
324
|
+
if (arg === "--table-prefix" && args[i + 1]) {
|
|
325
|
+
tablePrefix = args[++i];
|
|
326
|
+
continue;
|
|
327
|
+
}
|
|
328
|
+
if (arg === "--batch-size" && args[i + 1]) {
|
|
329
|
+
const n = parseInt(args[++i], 10);
|
|
330
|
+
if (Number.isNaN(n) || n < 1 || n > 10000) {
|
|
331
|
+
throw new Error("--batch-size must be between 1 and 10000");
|
|
332
|
+
}
|
|
333
|
+
batchSize = n;
|
|
334
|
+
continue;
|
|
335
|
+
}
|
|
336
|
+
if ((arg === "-bd" || arg === "--biz-domain") && args[i + 1]) {
|
|
337
|
+
businessDomain = args[++i];
|
|
338
|
+
continue;
|
|
339
|
+
}
|
|
340
|
+
if (!arg.startsWith("-") && !datasourceId) {
|
|
341
|
+
datasourceId = arg;
|
|
342
|
+
}
|
|
343
|
+
}
|
|
344
|
+
if (!businessDomain)
|
|
345
|
+
businessDomain = resolveBusinessDomain();
|
|
346
|
+
return { datasourceId, files, tablePrefix, batchSize, businessDomain };
|
|
347
|
+
}
|
|
348
|
+
export async function resolveFiles(pattern) {
|
|
349
|
+
const parts = pattern.split(",").map((p) => p.trim()).filter(Boolean);
|
|
350
|
+
const result = [];
|
|
351
|
+
for (const part of parts) {
|
|
352
|
+
if (part.includes("*") || part.includes("?")) {
|
|
353
|
+
const matched = [];
|
|
354
|
+
for await (const entry of glob(part)) {
|
|
355
|
+
const p = String(entry);
|
|
356
|
+
if (/\.csv$/i.test(p)) {
|
|
357
|
+
matched.push(resolvePath(p));
|
|
358
|
+
}
|
|
359
|
+
}
|
|
360
|
+
result.push(...matched);
|
|
361
|
+
}
|
|
362
|
+
else {
|
|
363
|
+
const abs = resolvePath(part);
|
|
364
|
+
statSync(abs); // throws if file does not exist
|
|
365
|
+
result.push(abs);
|
|
366
|
+
}
|
|
367
|
+
}
|
|
368
|
+
if (result.length === 0) {
|
|
369
|
+
throw new Error(`No CSV files matched: ${pattern}`);
|
|
370
|
+
}
|
|
371
|
+
return result;
|
|
372
|
+
}
|
|
373
|
+
export async function runDsImportCsv(args) {
|
|
374
|
+
let options;
|
|
375
|
+
try {
|
|
376
|
+
options = parseImportCsvArgs(args);
|
|
377
|
+
}
|
|
378
|
+
catch (error) {
|
|
379
|
+
if (error instanceof Error && error.message === "help") {
|
|
380
|
+
console.log(IMPORT_CSV_HELP);
|
|
381
|
+
return { code: 0, tables: [], tableColumns: {}, sampleRows: {} };
|
|
382
|
+
}
|
|
383
|
+
throw error;
|
|
384
|
+
}
|
|
385
|
+
if (!options.datasourceId) {
|
|
386
|
+
console.error("Usage: kweaver ds import-csv <ds-id> --files <glob_or_list> [options]");
|
|
387
|
+
return { code: 1, tables: [], tableColumns: {}, sampleRows: {} };
|
|
388
|
+
}
|
|
389
|
+
if (!options.files) {
|
|
390
|
+
console.error("Error: --files is required");
|
|
391
|
+
return { code: 1, tables: [], tableColumns: {}, sampleRows: {} };
|
|
392
|
+
}
|
|
393
|
+
// 1. Get credentials
|
|
394
|
+
const token = await ensureValidToken();
|
|
395
|
+
const base = { baseUrl: token.baseUrl, accessToken: token.accessToken };
|
|
396
|
+
// 2. Resolve glob / file list
|
|
397
|
+
const filePaths = await resolveFiles(options.files);
|
|
398
|
+
// 3. Get datasource type
|
|
399
|
+
const dsBody = await getDatasource({ ...base, id: options.datasourceId, businessDomain: options.businessDomain });
|
|
400
|
+
const dsData = JSON.parse(dsBody);
|
|
401
|
+
const datasourceType = String(dsData.type ?? dsData.ds_type ?? dsData.data_type ?? "mysql");
|
|
402
|
+
const parsed = [];
|
|
403
|
+
for (const filePath of filePaths) {
|
|
404
|
+
const tableName = buildTableName(filePath, options.tablePrefix);
|
|
405
|
+
let csvData;
|
|
406
|
+
try {
|
|
407
|
+
csvData = await parseCsvFile(filePath);
|
|
408
|
+
}
|
|
409
|
+
catch (err) {
|
|
410
|
+
const msg = err instanceof Error ? err.message : String(err);
|
|
411
|
+
console.error(`[${tableName}] skipping — parse error: ${msg}`);
|
|
412
|
+
continue;
|
|
413
|
+
}
|
|
414
|
+
if (csvData.headers.length === 0) {
|
|
415
|
+
console.error(`[${tableName}] skipping — no headers`);
|
|
416
|
+
continue;
|
|
417
|
+
}
|
|
418
|
+
if (csvData.rows.length === 0) {
|
|
419
|
+
console.error(`[${tableName}] skipping — no rows`);
|
|
420
|
+
continue;
|
|
421
|
+
}
|
|
422
|
+
parsed.push({ filePath, tableName, headers: csvData.headers, rows: csvData.rows });
|
|
423
|
+
}
|
|
424
|
+
if (parsed.length === 0) {
|
|
425
|
+
console.error("All files were skipped — nothing to import");
|
|
426
|
+
return { code: 1, tables: [], tableColumns: {}, sampleRows: {} };
|
|
427
|
+
}
|
|
428
|
+
// Phase 2: Import each file in batches
|
|
429
|
+
const succeeded = [];
|
|
430
|
+
const failed = [];
|
|
431
|
+
const tableColumns = {};
|
|
432
|
+
const sampleRows = {};
|
|
433
|
+
for (const { tableName, headers, rows } of parsed) {
|
|
434
|
+
const batches = splitBatches(rows, options.batchSize);
|
|
435
|
+
const fieldMappings = buildFieldMappings(headers);
|
|
436
|
+
let batchFailed = false;
|
|
437
|
+
for (let bIdx = 0; bIdx < batches.length; bIdx += 1) {
|
|
438
|
+
const batch = batches[bIdx];
|
|
439
|
+
const tableExist = bIdx > 0;
|
|
440
|
+
const batchLabel = `${bIdx + 1}/${batches.length}`;
|
|
441
|
+
const rowCount = batch.length;
|
|
442
|
+
const dagBody = buildDagBody({
|
|
443
|
+
datasourceId: options.datasourceId,
|
|
444
|
+
datasourceType,
|
|
445
|
+
tableName,
|
|
446
|
+
tableExist,
|
|
447
|
+
data: batch,
|
|
448
|
+
fieldMappings,
|
|
449
|
+
});
|
|
450
|
+
const t0 = Date.now();
|
|
451
|
+
process.stderr.write(`[${tableName}] batch ${batchLabel} (${rowCount} rows)... `);
|
|
452
|
+
try {
|
|
453
|
+
await executeDataflow({
|
|
454
|
+
...base,
|
|
455
|
+
businessDomain: options.businessDomain,
|
|
456
|
+
body: dagBody,
|
|
457
|
+
});
|
|
458
|
+
const elapsed = ((Date.now() - t0) / 1000).toFixed(1);
|
|
459
|
+
process.stderr.write(`${elapsed}s\n`);
|
|
460
|
+
}
|
|
461
|
+
catch (err) {
|
|
462
|
+
const msg = err instanceof Error ? err.message : String(err);
|
|
463
|
+
process.stderr.write(`FAILED\n`);
|
|
464
|
+
console.error(`[${tableName}] batch ${batchLabel} error: ${msg}`);
|
|
465
|
+
batchFailed = true;
|
|
466
|
+
break;
|
|
467
|
+
}
|
|
468
|
+
}
|
|
469
|
+
if (batchFailed) {
|
|
470
|
+
failed.push(tableName);
|
|
471
|
+
}
|
|
472
|
+
else {
|
|
473
|
+
succeeded.push(tableName);
|
|
474
|
+
tableColumns[tableName] = headers;
|
|
475
|
+
sampleRows[tableName] = parsed.find((p) => p.tableName === tableName)?.rows.slice(0, 100) ?? [];
|
|
476
|
+
}
|
|
477
|
+
}
|
|
478
|
+
// Summary
|
|
479
|
+
console.error(`\nImport complete: ${succeeded.length} succeeded, ${failed.length} failed.`);
|
|
480
|
+
if (failed.length > 0) {
|
|
481
|
+
console.error(`Failed tables: ${failed.join(", ")}`);
|
|
482
|
+
}
|
|
483
|
+
console.log(JSON.stringify({
|
|
484
|
+
tables: succeeded,
|
|
485
|
+
failed,
|
|
486
|
+
summary: { succeeded: succeeded.length, failed: failed.length },
|
|
487
|
+
}, null, 2));
|
|
488
|
+
return { code: failed.length > 0 ? 1 : 0, tables: succeeded, tableColumns, sampleRows };
|
|
489
|
+
}
|
|
490
|
+
export async function runDsImportCsvCommand(args) {
|
|
491
|
+
const result = await runDsImportCsv(args);
|
|
492
|
+
return result.code;
|
|
493
|
+
}
|
|
@@ -0,0 +1,47 @@
|
|
|
1
|
+
import type { DataflowCreateBody } from "../api/dataflow.js";
|
|
2
|
+
export interface CsvData {
|
|
3
|
+
headers: string[];
|
|
4
|
+
rows: Array<Record<string, string | null>>;
|
|
5
|
+
}
|
|
6
|
+
export interface FieldMapping {
|
|
7
|
+
source: {
|
|
8
|
+
name: string;
|
|
9
|
+
};
|
|
10
|
+
target: {
|
|
11
|
+
name: string;
|
|
12
|
+
data_type: string;
|
|
13
|
+
};
|
|
14
|
+
}
|
|
15
|
+
export interface DagBodyOptions {
|
|
16
|
+
datasourceId: string;
|
|
17
|
+
datasourceType: string;
|
|
18
|
+
tableName: string;
|
|
19
|
+
tableExist: boolean;
|
|
20
|
+
data: Array<Record<string, string | null>>;
|
|
21
|
+
fieldMappings: FieldMapping[];
|
|
22
|
+
}
|
|
23
|
+
/**
|
|
24
|
+
* Read a CSV file and return its headers and rows.
|
|
25
|
+
* - Strips UTF-8 BOM if present
|
|
26
|
+
* - Converts empty strings to null
|
|
27
|
+
* - Throws on column count mismatch
|
|
28
|
+
*/
|
|
29
|
+
export declare function parseCsvFile(filePath: string): Promise<CsvData>;
|
|
30
|
+
/**
|
|
31
|
+
* Derive a table name from a file path: strip .csv (case-insensitive) and prepend prefix.
|
|
32
|
+
*/
|
|
33
|
+
export declare function buildTableName(filePath: string, prefix: string): string;
|
|
34
|
+
/**
|
|
35
|
+
* Split an array into chunks of at most `batchSize` elements.
|
|
36
|
+
*/
|
|
37
|
+
export declare function splitBatches<T>(rows: T[], batchSize: number): T[][];
|
|
38
|
+
/**
|
|
39
|
+
* Build field mapping descriptors from CSV headers.
|
|
40
|
+
* All target fields default to VARCHAR(512).
|
|
41
|
+
*/
|
|
42
|
+
export declare function buildFieldMappings(headers: string[]): FieldMapping[];
|
|
43
|
+
/**
|
|
44
|
+
* Construct a DataflowCreateBody for a CSV → database write operation.
|
|
45
|
+
* The DAG has two steps: a manual trigger and the database write.
|
|
46
|
+
*/
|
|
47
|
+
export declare function buildDagBody(options: DagBodyOptions): DataflowCreateBody;
|
|
@@ -0,0 +1,111 @@
|
|
|
1
|
+
import { readFile } from "node:fs/promises";
|
|
2
|
+
import { basename } from "node:path";
|
|
3
|
+
import { parse } from "csv-parse/sync";
|
|
4
|
+
// ── parseCsvFile ──────────────────────────────────────────────────────────────
|
|
5
|
+
/**
|
|
6
|
+
* Read a CSV file and return its headers and rows.
|
|
7
|
+
* - Strips UTF-8 BOM if present
|
|
8
|
+
* - Converts empty strings to null
|
|
9
|
+
* - Throws on column count mismatch
|
|
10
|
+
*/
|
|
11
|
+
export async function parseCsvFile(filePath) {
|
|
12
|
+
let content = await readFile(filePath, "utf8");
|
|
13
|
+
// Strip UTF-8 BOM
|
|
14
|
+
if (content.charCodeAt(0) === 0xfeff) {
|
|
15
|
+
content = content.slice(1);
|
|
16
|
+
}
|
|
17
|
+
// Parse with columns:true to get key/value rows
|
|
18
|
+
const records = parse(content, {
|
|
19
|
+
columns: true,
|
|
20
|
+
skip_empty_lines: true,
|
|
21
|
+
trim: true,
|
|
22
|
+
relax_column_count: false,
|
|
23
|
+
});
|
|
24
|
+
// If no records, parse just first row to extract headers
|
|
25
|
+
if (records.length === 0) {
|
|
26
|
+
const headerRows = parse(content, {
|
|
27
|
+
columns: false,
|
|
28
|
+
skip_empty_lines: false,
|
|
29
|
+
trim: true,
|
|
30
|
+
to: 1,
|
|
31
|
+
});
|
|
32
|
+
const headers = (headerRows[0] ?? []);
|
|
33
|
+
return { headers, rows: [] };
|
|
34
|
+
}
|
|
35
|
+
const headers = Object.keys(records[0]);
|
|
36
|
+
// Convert empty strings to null
|
|
37
|
+
const rows = records.map((record) => {
|
|
38
|
+
const row = {};
|
|
39
|
+
for (const key of headers) {
|
|
40
|
+
const val = record[key];
|
|
41
|
+
row[key] = val === "" ? null : (val ?? null);
|
|
42
|
+
}
|
|
43
|
+
return row;
|
|
44
|
+
});
|
|
45
|
+
return { headers, rows };
|
|
46
|
+
}
|
|
47
|
+
// ── buildTableName ────────────────────────────────────────────────────────────
|
|
48
|
+
/**
|
|
49
|
+
* Derive a table name from a file path: strip .csv (case-insensitive) and prepend prefix.
|
|
50
|
+
*/
|
|
51
|
+
export function buildTableName(filePath, prefix) {
|
|
52
|
+
const base = basename(filePath).replace(/\.csv$/i, "");
|
|
53
|
+
return prefix + base;
|
|
54
|
+
}
|
|
55
|
+
// ── splitBatches ──────────────────────────────────────────────────────────────
|
|
56
|
+
/**
|
|
57
|
+
* Split an array into chunks of at most `batchSize` elements.
|
|
58
|
+
*/
|
|
59
|
+
export function splitBatches(rows, batchSize) {
|
|
60
|
+
const batches = [];
|
|
61
|
+
for (let i = 0; i < rows.length; i += batchSize) {
|
|
62
|
+
batches.push(rows.slice(i, i + batchSize));
|
|
63
|
+
}
|
|
64
|
+
return batches;
|
|
65
|
+
}
|
|
66
|
+
// ── buildFieldMappings ────────────────────────────────────────────────────────
|
|
67
|
+
/**
|
|
68
|
+
* Build field mapping descriptors from CSV headers.
|
|
69
|
+
* All target fields default to VARCHAR(512).
|
|
70
|
+
*/
|
|
71
|
+
export function buildFieldMappings(headers) {
|
|
72
|
+
return headers.map((name) => ({
|
|
73
|
+
source: { name },
|
|
74
|
+
target: { name, data_type: "VARCHAR(512)" },
|
|
75
|
+
}));
|
|
76
|
+
}
|
|
77
|
+
// ── buildDagBody ──────────────────────────────────────────────────────────────
|
|
78
|
+
/**
|
|
79
|
+
* Construct a DataflowCreateBody for a CSV → database write operation.
|
|
80
|
+
* The DAG has two steps: a manual trigger and the database write.
|
|
81
|
+
*/
|
|
82
|
+
export function buildDagBody(options) {
|
|
83
|
+
const { datasourceId, datasourceType, tableName, tableExist, data, fieldMappings } = options;
|
|
84
|
+
const ts = Date.now();
|
|
85
|
+
const triggerStep = {
|
|
86
|
+
id: "step-trigger",
|
|
87
|
+
title: "Trigger",
|
|
88
|
+
operator: "@trigger/manual",
|
|
89
|
+
parameters: {},
|
|
90
|
+
};
|
|
91
|
+
const writeStep = {
|
|
92
|
+
id: "step-write",
|
|
93
|
+
title: "Write to Database",
|
|
94
|
+
operator: "@internal/database/write",
|
|
95
|
+
parameters: {
|
|
96
|
+
datasource_type: datasourceType,
|
|
97
|
+
datasource_id: datasourceId,
|
|
98
|
+
table_name: tableName,
|
|
99
|
+
table_exist: tableExist,
|
|
100
|
+
operate_type: "append",
|
|
101
|
+
data,
|
|
102
|
+
sync_model_fields: fieldMappings,
|
|
103
|
+
},
|
|
104
|
+
};
|
|
105
|
+
return {
|
|
106
|
+
title: `import-csv-${tableName}-${ts}`,
|
|
107
|
+
description: `CSV import into table ${tableName}`,
|
|
108
|
+
trigger_config: { operator: "@internal/trigger/manual" },
|
|
109
|
+
steps: [triggerStep, writeStep],
|
|
110
|
+
};
|
|
111
|
+
}
|
package/dist/config/store.d.ts
CHANGED
|
@@ -8,6 +8,8 @@ export interface TokenConfig {
|
|
|
8
8
|
refreshToken?: string;
|
|
9
9
|
idToken?: string;
|
|
10
10
|
obtainedAt: string;
|
|
11
|
+
/** When true, skip TLS certificate verification for this platform (saved by `kweaver auth --insecure`). */
|
|
12
|
+
tlsInsecure?: boolean;
|
|
11
13
|
}
|
|
12
14
|
/** OAuth2 client registration (per platform), used for refresh_token grant. */
|
|
13
15
|
export interface ClientConfig {
|
|
@@ -0,0 +1,8 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* When a platform was logged in with `--insecure`, the flag is stored on the token.
|
|
3
|
+
* Apply Node TLS verification skip for this process so all `fetch` calls to that
|
|
4
|
+
* platform succeed without per-request options.
|
|
5
|
+
*
|
|
6
|
+
* Also honors `KWEAVER_TLS_INSECURE=1` or `true` (development / scripting only).
|
|
7
|
+
*/
|
|
8
|
+
export declare function applyTlsEnvFromSavedTokens(): void;
|
|
@@ -0,0 +1,22 @@
|
|
|
1
|
+
import { getCurrentPlatform, loadTokenConfig } from "./store.js";
|
|
2
|
+
/**
|
|
3
|
+
* When a platform was logged in with `--insecure`, the flag is stored on the token.
|
|
4
|
+
* Apply Node TLS verification skip for this process so all `fetch` calls to that
|
|
5
|
+
* platform succeed without per-request options.
|
|
6
|
+
*
|
|
7
|
+
* Also honors `KWEAVER_TLS_INSECURE=1` or `true` (development / scripting only).
|
|
8
|
+
*/
|
|
9
|
+
export function applyTlsEnvFromSavedTokens() {
|
|
10
|
+
if (process.env.KWEAVER_TLS_INSECURE === "1" || process.env.KWEAVER_TLS_INSECURE === "true") {
|
|
11
|
+
process.env.NODE_TLS_REJECT_UNAUTHORIZED = "0";
|
|
12
|
+
return;
|
|
13
|
+
}
|
|
14
|
+
const platform = getCurrentPlatform();
|
|
15
|
+
if (!platform) {
|
|
16
|
+
return;
|
|
17
|
+
}
|
|
18
|
+
const token = loadTokenConfig(platform);
|
|
19
|
+
if (token?.tlsInsecure) {
|
|
20
|
+
process.env.NODE_TLS_REJECT_UNAUTHORIZED = "0";
|
|
21
|
+
}
|
|
22
|
+
}
|
package/dist/index.d.ts
CHANGED
|
@@ -49,6 +49,9 @@ export type { AgentConfig, AgentInput, AgentInputField, AgentOutput, AgentLlmCon
|
|
|
49
49
|
export { BknResource } from "./resources/bkn.js";
|
|
50
50
|
export { ConversationsResource } from "./resources/conversations.js";
|
|
51
51
|
export { ContextLoaderResource } from "./resources/context-loader.js";
|
|
52
|
+
export type { ViewField, DataView, CreateDataViewOptions, GetDataViewOptions, ListDataViewsOptions, DeleteDataViewOptions, FindDataViewOptions, } from "./api/dataviews.js";
|
|
53
|
+
export { parseDataView, createDataView, getDataView, listDataViews, deleteDataView, findDataView, } from "./api/dataviews.js";
|
|
54
|
+
export { DataViewsResource } from "./resources/dataviews.js";
|
|
52
55
|
export { HttpError, NetworkRequestError, fetchTextOrThrow } from "./utils/http.js";
|
|
53
56
|
export type { TokenConfig, ContextLoaderEntry, ContextLoaderConfig, } from "./config/store.js";
|
|
54
57
|
export { getConfigDir, getCurrentPlatform } from "./config/store.js";
|
package/dist/index.js
CHANGED
|
@@ -39,6 +39,8 @@ export { AgentsResource } from "./resources/agents.js";
|
|
|
39
39
|
export { BknResource } from "./resources/bkn.js";
|
|
40
40
|
export { ConversationsResource } from "./resources/conversations.js";
|
|
41
41
|
export { ContextLoaderResource } from "./resources/context-loader.js";
|
|
42
|
+
export { parseDataView, createDataView, getDataView, listDataViews, deleteDataView, findDataView, } from "./api/dataviews.js";
|
|
43
|
+
export { DataViewsResource } from "./resources/dataviews.js";
|
|
42
44
|
// ── HTTP utilities ────────────────────────────────────────────────────────────
|
|
43
45
|
export { HttpError, NetworkRequestError, fetchTextOrThrow } from "./utils/http.js";
|
|
44
46
|
export { getConfigDir, getCurrentPlatform } from "./config/store.js";
|
|
@@ -0,0 +1,17 @@
|
|
|
1
|
+
import { type DataflowCreateBody, type DataflowResult } from "../api/dataflow.js";
|
|
2
|
+
import type { ClientContext } from "../client.js";
|
|
3
|
+
export declare class DataflowsResource {
|
|
4
|
+
private readonly ctx;
|
|
5
|
+
constructor(ctx: ClientContext);
|
|
6
|
+
create(body: DataflowCreateBody): Promise<string>;
|
|
7
|
+
run(dagId: string): Promise<void>;
|
|
8
|
+
poll(dagId: string, opts?: {
|
|
9
|
+
interval?: number;
|
|
10
|
+
timeout?: number;
|
|
11
|
+
}): Promise<DataflowResult>;
|
|
12
|
+
delete(dagId: string): Promise<void>;
|
|
13
|
+
execute(body: DataflowCreateBody, opts?: {
|
|
14
|
+
interval?: number;
|
|
15
|
+
timeout?: number;
|
|
16
|
+
}): Promise<DataflowResult>;
|
|
17
|
+
}
|
|
@@ -0,0 +1,22 @@
|
|
|
1
|
+
import { createDataflow, runDataflow, pollDataflowResults, deleteDataflow, executeDataflow, } from "../api/dataflow.js";
|
|
2
|
+
export class DataflowsResource {
|
|
3
|
+
ctx;
|
|
4
|
+
constructor(ctx) {
|
|
5
|
+
this.ctx = ctx;
|
|
6
|
+
}
|
|
7
|
+
async create(body) {
|
|
8
|
+
return createDataflow({ ...this.ctx.base(), body });
|
|
9
|
+
}
|
|
10
|
+
async run(dagId) {
|
|
11
|
+
return runDataflow({ ...this.ctx.base(), dagId });
|
|
12
|
+
}
|
|
13
|
+
async poll(dagId, opts = {}) {
|
|
14
|
+
return pollDataflowResults({ ...this.ctx.base(), dagId, ...opts });
|
|
15
|
+
}
|
|
16
|
+
async delete(dagId) {
|
|
17
|
+
return deleteDataflow({ ...this.ctx.base(), dagId });
|
|
18
|
+
}
|
|
19
|
+
async execute(body, opts = {}) {
|
|
20
|
+
return executeDataflow({ ...this.ctx.base(), body, ...opts });
|
|
21
|
+
}
|
|
22
|
+
}
|
|
@@ -0,0 +1,52 @@
|
|
|
1
|
+
import type { ClientContext } from "../client.js";
|
|
2
|
+
export declare class DataSourcesResource {
|
|
3
|
+
private readonly ctx;
|
|
4
|
+
constructor(ctx: ClientContext);
|
|
5
|
+
test(opts: {
|
|
6
|
+
type: string;
|
|
7
|
+
host: string;
|
|
8
|
+
port: number;
|
|
9
|
+
database: string;
|
|
10
|
+
account: string;
|
|
11
|
+
password: string;
|
|
12
|
+
schema?: string;
|
|
13
|
+
}): Promise<void>;
|
|
14
|
+
create(opts: {
|
|
15
|
+
name: string;
|
|
16
|
+
type: string;
|
|
17
|
+
host: string;
|
|
18
|
+
port: number;
|
|
19
|
+
database: string;
|
|
20
|
+
account: string;
|
|
21
|
+
password: string;
|
|
22
|
+
schema?: string;
|
|
23
|
+
comment?: string;
|
|
24
|
+
}): Promise<unknown>;
|
|
25
|
+
list(opts?: {
|
|
26
|
+
keyword?: string;
|
|
27
|
+
type?: string;
|
|
28
|
+
}): Promise<unknown[]>;
|
|
29
|
+
get(id: string): Promise<unknown>;
|
|
30
|
+
delete(id: string): Promise<void>;
|
|
31
|
+
listTables(id: string, opts?: {
|
|
32
|
+
keyword?: string;
|
|
33
|
+
limit?: number;
|
|
34
|
+
offset?: number;
|
|
35
|
+
}): Promise<unknown[]>;
|
|
36
|
+
listTablesWithColumns(id: string, opts?: {
|
|
37
|
+
keyword?: string;
|
|
38
|
+
limit?: number;
|
|
39
|
+
offset?: number;
|
|
40
|
+
autoScan?: boolean;
|
|
41
|
+
}): Promise<Array<{
|
|
42
|
+
name: string;
|
|
43
|
+
columns: Array<{
|
|
44
|
+
name: string;
|
|
45
|
+
type: string;
|
|
46
|
+
comment?: string;
|
|
47
|
+
}>;
|
|
48
|
+
}>>;
|
|
49
|
+
scanMetadata(id: string, opts?: {
|
|
50
|
+
dsType?: string;
|
|
51
|
+
}): Promise<string>;
|
|
52
|
+
}
|
|
@@ -0,0 +1,54 @@
|
|
|
1
|
+
import { testDatasource, createDatasource, listDatasources, getDatasource, deleteDatasource, listTables, listTablesWithColumns, scanMetadata, } from "../api/datasources.js";
|
|
2
|
+
export class DataSourcesResource {
|
|
3
|
+
ctx;
|
|
4
|
+
constructor(ctx) {
|
|
5
|
+
this.ctx = ctx;
|
|
6
|
+
}
|
|
7
|
+
async test(opts) {
|
|
8
|
+
await testDatasource({ ...this.ctx.base(), ...opts });
|
|
9
|
+
}
|
|
10
|
+
async create(opts) {
|
|
11
|
+
const raw = await createDatasource({ ...this.ctx.base(), ...opts });
|
|
12
|
+
return JSON.parse(raw);
|
|
13
|
+
}
|
|
14
|
+
async list(opts = {}) {
|
|
15
|
+
const raw = await listDatasources({ ...this.ctx.base(), ...opts });
|
|
16
|
+
const parsed = JSON.parse(raw);
|
|
17
|
+
if (Array.isArray(parsed))
|
|
18
|
+
return parsed;
|
|
19
|
+
if (parsed && typeof parsed === "object") {
|
|
20
|
+
const obj = parsed;
|
|
21
|
+
const items = obj.entries ?? obj.data ?? obj.records;
|
|
22
|
+
if (Array.isArray(items))
|
|
23
|
+
return items;
|
|
24
|
+
}
|
|
25
|
+
return [];
|
|
26
|
+
}
|
|
27
|
+
async get(id) {
|
|
28
|
+
const raw = await getDatasource({ ...this.ctx.base(), id });
|
|
29
|
+
return JSON.parse(raw);
|
|
30
|
+
}
|
|
31
|
+
async delete(id) {
|
|
32
|
+
await deleteDatasource({ ...this.ctx.base(), id });
|
|
33
|
+
}
|
|
34
|
+
async listTables(id, opts = {}) {
|
|
35
|
+
const raw = await listTables({ ...this.ctx.base(), id, ...opts });
|
|
36
|
+
const parsed = JSON.parse(raw);
|
|
37
|
+
if (Array.isArray(parsed))
|
|
38
|
+
return parsed;
|
|
39
|
+
if (parsed && typeof parsed === "object") {
|
|
40
|
+
const obj = parsed;
|
|
41
|
+
const items = obj.entries ?? obj.data;
|
|
42
|
+
if (Array.isArray(items))
|
|
43
|
+
return items;
|
|
44
|
+
}
|
|
45
|
+
return [];
|
|
46
|
+
}
|
|
47
|
+
async listTablesWithColumns(id, opts = {}) {
|
|
48
|
+
const raw = await listTablesWithColumns({ ...this.ctx.base(), id, ...opts });
|
|
49
|
+
return JSON.parse(raw);
|
|
50
|
+
}
|
|
51
|
+
async scanMetadata(id, opts = {}) {
|
|
52
|
+
return scanMetadata({ ...this.ctx.base(), id, ...opts });
|
|
53
|
+
}
|
|
54
|
+
}
|
|
@@ -0,0 +1,28 @@
|
|
|
1
|
+
import type { DataView } from "../api/dataviews.js";
|
|
2
|
+
import type { ClientContext } from "../client.js";
|
|
3
|
+
export declare class DataViewsResource {
|
|
4
|
+
private readonly ctx;
|
|
5
|
+
constructor(ctx: ClientContext);
|
|
6
|
+
create(opts: {
|
|
7
|
+
name: string;
|
|
8
|
+
datasourceId: string;
|
|
9
|
+
table: string;
|
|
10
|
+
fields?: Array<{
|
|
11
|
+
name: string;
|
|
12
|
+
type: string;
|
|
13
|
+
}>;
|
|
14
|
+
}): Promise<string>;
|
|
15
|
+
get(id: string): Promise<DataView>;
|
|
16
|
+
list(opts?: {
|
|
17
|
+
datasourceId?: string;
|
|
18
|
+
type?: string;
|
|
19
|
+
limit?: number;
|
|
20
|
+
}): Promise<DataView[]>;
|
|
21
|
+
find(name: string, opts?: {
|
|
22
|
+
datasourceId?: string;
|
|
23
|
+
exact?: boolean;
|
|
24
|
+
wait?: boolean;
|
|
25
|
+
timeoutMs?: number;
|
|
26
|
+
}): Promise<DataView[]>;
|
|
27
|
+
delete(id: string): Promise<void>;
|
|
28
|
+
}
|
|
@@ -0,0 +1,34 @@
|
|
|
1
|
+
import { createDataView, deleteDataView, findDataView, getDataView, listDataViews, } from "../api/dataviews.js";
|
|
2
|
+
export class DataViewsResource {
|
|
3
|
+
ctx;
|
|
4
|
+
constructor(ctx) {
|
|
5
|
+
this.ctx = ctx;
|
|
6
|
+
}
|
|
7
|
+
async create(opts) {
|
|
8
|
+
return createDataView({ ...this.ctx.base(), ...opts });
|
|
9
|
+
}
|
|
10
|
+
async get(id) {
|
|
11
|
+
return getDataView({ ...this.ctx.base(), id });
|
|
12
|
+
}
|
|
13
|
+
async list(opts = {}) {
|
|
14
|
+
return listDataViews({
|
|
15
|
+
...this.ctx.base(),
|
|
16
|
+
datasourceId: opts.datasourceId,
|
|
17
|
+
type: opts.type,
|
|
18
|
+
limit: opts.limit,
|
|
19
|
+
});
|
|
20
|
+
}
|
|
21
|
+
async find(name, opts) {
|
|
22
|
+
return findDataView({
|
|
23
|
+
...this.ctx.base(),
|
|
24
|
+
name,
|
|
25
|
+
datasourceId: opts?.datasourceId,
|
|
26
|
+
exact: opts?.exact,
|
|
27
|
+
wait: opts?.wait,
|
|
28
|
+
timeoutMs: opts?.timeoutMs,
|
|
29
|
+
});
|
|
30
|
+
}
|
|
31
|
+
async delete(id) {
|
|
32
|
+
await deleteDataView({ ...this.ctx.base(), id });
|
|
33
|
+
}
|
|
34
|
+
}
|