@kweaver-ai/kweaver-sdk 0.4.9 → 0.4.11
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +28 -1
- package/README.zh.md +28 -1
- package/dist/api/dataflow.d.ts +78 -0
- package/dist/api/dataflow.js +135 -0
- package/dist/api/dataviews.js +49 -0
- package/dist/auth/oauth.d.ts +6 -1
- package/dist/auth/oauth.js +240 -166
- package/dist/cli.js +4 -2
- package/dist/client.js +2 -0
- package/dist/commands/auth.js +45 -15
- package/dist/commands/bkn.d.ts +16 -0
- package/dist/commands/bkn.js +213 -25
- package/dist/commands/ds.d.ts +16 -0
- package/dist/commands/ds.js +204 -1
- package/dist/commands/import-csv.d.ts +47 -0
- package/dist/commands/import-csv.js +111 -0
- package/dist/config/store.d.ts +2 -0
- package/dist/config/tls-env.d.ts +8 -0
- package/dist/config/tls-env.js +22 -0
- package/package.json +2 -1
|
@@ -0,0 +1,47 @@
|
|
|
1
|
+
import type { DataflowCreateBody } from "../api/dataflow.js";
|
|
2
|
+
export interface CsvData {
|
|
3
|
+
headers: string[];
|
|
4
|
+
rows: Array<Record<string, string | null>>;
|
|
5
|
+
}
|
|
6
|
+
export interface FieldMapping {
|
|
7
|
+
source: {
|
|
8
|
+
name: string;
|
|
9
|
+
};
|
|
10
|
+
target: {
|
|
11
|
+
name: string;
|
|
12
|
+
data_type: string;
|
|
13
|
+
};
|
|
14
|
+
}
|
|
15
|
+
export interface DagBodyOptions {
|
|
16
|
+
datasourceId: string;
|
|
17
|
+
datasourceType: string;
|
|
18
|
+
tableName: string;
|
|
19
|
+
tableExist: boolean;
|
|
20
|
+
data: Array<Record<string, string | null>>;
|
|
21
|
+
fieldMappings: FieldMapping[];
|
|
22
|
+
}
|
|
23
|
+
/**
|
|
24
|
+
* Read a CSV file and return its headers and rows.
|
|
25
|
+
* - Strips UTF-8 BOM if present
|
|
26
|
+
* - Converts empty strings to null
|
|
27
|
+
* - Throws on column count mismatch
|
|
28
|
+
*/
|
|
29
|
+
export declare function parseCsvFile(filePath: string): Promise<CsvData>;
|
|
30
|
+
/**
|
|
31
|
+
* Derive a table name from a file path: strip .csv (case-insensitive) and prepend prefix.
|
|
32
|
+
*/
|
|
33
|
+
export declare function buildTableName(filePath: string, prefix: string): string;
|
|
34
|
+
/**
|
|
35
|
+
* Split an array into chunks of at most `batchSize` elements.
|
|
36
|
+
*/
|
|
37
|
+
export declare function splitBatches<T>(rows: T[], batchSize: number): T[][];
|
|
38
|
+
/**
|
|
39
|
+
* Build field mapping descriptors from CSV headers.
|
|
40
|
+
* All target fields default to VARCHAR(512).
|
|
41
|
+
*/
|
|
42
|
+
export declare function buildFieldMappings(headers: string[]): FieldMapping[];
|
|
43
|
+
/**
|
|
44
|
+
* Construct a DataflowCreateBody for a CSV → database write operation.
|
|
45
|
+
* The DAG has two steps: a manual trigger and the database write.
|
|
46
|
+
*/
|
|
47
|
+
export declare function buildDagBody(options: DagBodyOptions): DataflowCreateBody;
|
|
@@ -0,0 +1,111 @@
|
|
|
1
|
+
import { readFile } from "node:fs/promises";
|
|
2
|
+
import { basename } from "node:path";
|
|
3
|
+
import { parse } from "csv-parse/sync";
|
|
4
|
+
// ── parseCsvFile ──────────────────────────────────────────────────────────────
|
|
5
|
+
/**
|
|
6
|
+
* Read a CSV file and return its headers and rows.
|
|
7
|
+
* - Strips UTF-8 BOM if present
|
|
8
|
+
* - Converts empty strings to null
|
|
9
|
+
* - Throws on column count mismatch
|
|
10
|
+
*/
|
|
11
|
+
export async function parseCsvFile(filePath) {
|
|
12
|
+
let content = await readFile(filePath, "utf8");
|
|
13
|
+
// Strip UTF-8 BOM
|
|
14
|
+
if (content.charCodeAt(0) === 0xfeff) {
|
|
15
|
+
content = content.slice(1);
|
|
16
|
+
}
|
|
17
|
+
// Parse with columns:true to get key/value rows
|
|
18
|
+
const records = parse(content, {
|
|
19
|
+
columns: true,
|
|
20
|
+
skip_empty_lines: true,
|
|
21
|
+
trim: true,
|
|
22
|
+
relax_column_count: false,
|
|
23
|
+
});
|
|
24
|
+
// If no records, parse just first row to extract headers
|
|
25
|
+
if (records.length === 0) {
|
|
26
|
+
const headerRows = parse(content, {
|
|
27
|
+
columns: false,
|
|
28
|
+
skip_empty_lines: false,
|
|
29
|
+
trim: true,
|
|
30
|
+
to: 1,
|
|
31
|
+
});
|
|
32
|
+
const headers = (headerRows[0] ?? []);
|
|
33
|
+
return { headers, rows: [] };
|
|
34
|
+
}
|
|
35
|
+
const headers = Object.keys(records[0]);
|
|
36
|
+
// Convert empty strings to null
|
|
37
|
+
const rows = records.map((record) => {
|
|
38
|
+
const row = {};
|
|
39
|
+
for (const key of headers) {
|
|
40
|
+
const val = record[key];
|
|
41
|
+
row[key] = val === "" ? null : (val ?? null);
|
|
42
|
+
}
|
|
43
|
+
return row;
|
|
44
|
+
});
|
|
45
|
+
return { headers, rows };
|
|
46
|
+
}
|
|
47
|
+
// ── buildTableName ────────────────────────────────────────────────────────────
|
|
48
|
+
/**
|
|
49
|
+
* Derive a table name from a file path: strip .csv (case-insensitive) and prepend prefix.
|
|
50
|
+
*/
|
|
51
|
+
export function buildTableName(filePath, prefix) {
|
|
52
|
+
const base = basename(filePath).replace(/\.csv$/i, "");
|
|
53
|
+
return prefix + base;
|
|
54
|
+
}
|
|
55
|
+
// ── splitBatches ──────────────────────────────────────────────────────────────
|
|
56
|
+
/**
|
|
57
|
+
* Split an array into chunks of at most `batchSize` elements.
|
|
58
|
+
*/
|
|
59
|
+
export function splitBatches(rows, batchSize) {
|
|
60
|
+
const batches = [];
|
|
61
|
+
for (let i = 0; i < rows.length; i += batchSize) {
|
|
62
|
+
batches.push(rows.slice(i, i + batchSize));
|
|
63
|
+
}
|
|
64
|
+
return batches;
|
|
65
|
+
}
|
|
66
|
+
// ── buildFieldMappings ────────────────────────────────────────────────────────
|
|
67
|
+
/**
|
|
68
|
+
* Build field mapping descriptors from CSV headers.
|
|
69
|
+
* All target fields default to VARCHAR(512).
|
|
70
|
+
*/
|
|
71
|
+
export function buildFieldMappings(headers) {
|
|
72
|
+
return headers.map((name) => ({
|
|
73
|
+
source: { name },
|
|
74
|
+
target: { name, data_type: "VARCHAR(512)" },
|
|
75
|
+
}));
|
|
76
|
+
}
|
|
77
|
+
// ── buildDagBody ──────────────────────────────────────────────────────────────
|
|
78
|
+
/**
|
|
79
|
+
* Construct a DataflowCreateBody for a CSV → database write operation.
|
|
80
|
+
* The DAG has two steps: a manual trigger and the database write.
|
|
81
|
+
*/
|
|
82
|
+
export function buildDagBody(options) {
|
|
83
|
+
const { datasourceId, datasourceType, tableName, tableExist, data, fieldMappings } = options;
|
|
84
|
+
const ts = Date.now();
|
|
85
|
+
const triggerStep = {
|
|
86
|
+
id: "step-trigger",
|
|
87
|
+
title: "Trigger",
|
|
88
|
+
operator: "@trigger/manual",
|
|
89
|
+
parameters: {},
|
|
90
|
+
};
|
|
91
|
+
const writeStep = {
|
|
92
|
+
id: "step-write",
|
|
93
|
+
title: "Write to Database",
|
|
94
|
+
operator: "@internal/database/write",
|
|
95
|
+
parameters: {
|
|
96
|
+
datasource_type: datasourceType,
|
|
97
|
+
datasource_id: datasourceId,
|
|
98
|
+
table_name: tableName,
|
|
99
|
+
table_exist: tableExist,
|
|
100
|
+
operate_type: "append",
|
|
101
|
+
data,
|
|
102
|
+
sync_model_fields: fieldMappings,
|
|
103
|
+
},
|
|
104
|
+
};
|
|
105
|
+
return {
|
|
106
|
+
title: `import-csv-${tableName}-${ts}`,
|
|
107
|
+
description: `CSV import into table ${tableName}`,
|
|
108
|
+
trigger_config: { operator: "@internal/trigger/manual" },
|
|
109
|
+
steps: [triggerStep, writeStep],
|
|
110
|
+
};
|
|
111
|
+
}
|
package/dist/config/store.d.ts
CHANGED
|
@@ -8,6 +8,8 @@ export interface TokenConfig {
|
|
|
8
8
|
refreshToken?: string;
|
|
9
9
|
idToken?: string;
|
|
10
10
|
obtainedAt: string;
|
|
11
|
+
/** When true, skip TLS certificate verification for this platform (saved by `kweaver auth --insecure`). */
|
|
12
|
+
tlsInsecure?: boolean;
|
|
11
13
|
}
|
|
12
14
|
/** OAuth2 client registration (per platform), used for refresh_token grant. */
|
|
13
15
|
export interface ClientConfig {
|
|
@@ -0,0 +1,8 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* When a platform was logged in with `--insecure`, the flag is stored on the token.
|
|
3
|
+
* Apply Node TLS verification skip for this process so all `fetch` calls to that
|
|
4
|
+
* platform succeed without per-request options.
|
|
5
|
+
*
|
|
6
|
+
* Also honors `KWEAVER_TLS_INSECURE=1` or `true` (development / scripting only).
|
|
7
|
+
*/
|
|
8
|
+
export declare function applyTlsEnvFromSavedTokens(): void;
|
|
@@ -0,0 +1,22 @@
|
|
|
1
|
+
import { getCurrentPlatform, loadTokenConfig } from "./store.js";
|
|
2
|
+
/**
|
|
3
|
+
* When a platform was logged in with `--insecure`, the flag is stored on the token.
|
|
4
|
+
* Apply Node TLS verification skip for this process so all `fetch` calls to that
|
|
5
|
+
* platform succeed without per-request options.
|
|
6
|
+
*
|
|
7
|
+
* Also honors `KWEAVER_TLS_INSECURE=1` or `true` (development / scripting only).
|
|
8
|
+
*/
|
|
9
|
+
export function applyTlsEnvFromSavedTokens() {
|
|
10
|
+
if (process.env.KWEAVER_TLS_INSECURE === "1" || process.env.KWEAVER_TLS_INSECURE === "true") {
|
|
11
|
+
process.env.NODE_TLS_REJECT_UNAUTHORIZED = "0";
|
|
12
|
+
return;
|
|
13
|
+
}
|
|
14
|
+
const platform = getCurrentPlatform();
|
|
15
|
+
if (!platform) {
|
|
16
|
+
return;
|
|
17
|
+
}
|
|
18
|
+
const token = loadTokenConfig(platform);
|
|
19
|
+
if (token?.tlsInsecure) {
|
|
20
|
+
process.env.NODE_TLS_REJECT_UNAUTHORIZED = "0";
|
|
21
|
+
}
|
|
22
|
+
}
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@kweaver-ai/kweaver-sdk",
|
|
3
|
-
"version": "0.4.
|
|
3
|
+
"version": "0.4.11",
|
|
4
4
|
"description": "KWeaver TypeScript SDK — CLI tool and programmatic API for knowledge networks and Decision Agents.",
|
|
5
5
|
"type": "module",
|
|
6
6
|
"main": "./dist/index.js",
|
|
@@ -67,6 +67,7 @@
|
|
|
67
67
|
"@kweaver-ai/bkn": "^0.1.0",
|
|
68
68
|
"@playwright/test": "^1.58.2",
|
|
69
69
|
"chardet": "^2.1.1",
|
|
70
|
+
"csv-parse": "^6.2.1",
|
|
70
71
|
"iconv-lite": "^0.7.2",
|
|
71
72
|
"ink": "^6.8.0",
|
|
72
73
|
"ink-spinner": "^5.0.0",
|