@doufunao123/cognee-admin 0.2.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +84 -0
- package/dist/index.d.ts +1 -0
- package/dist/index.js +1107 -0
- package/package.json +38 -0
package/README.md
ADDED
|
@@ -0,0 +1,84 @@
|
|
|
1
|
+
# @doufunao123/cognee-admin
|
|
2
|
+
|
|
3
|
+
Lightweight npm thin client for the Cognee API. It mirrors the Rust `cognee-admin` client commands, but ships as a Node.js CLI for `npx` usage.
|
|
4
|
+
|
|
5
|
+
## Install
|
|
6
|
+
|
|
7
|
+
```bash
|
|
8
|
+
npm install -g @doufunao123/cognee-admin
|
|
9
|
+
```
|
|
10
|
+
|
|
11
|
+
Or run directly:
|
|
12
|
+
|
|
13
|
+
```bash
|
|
14
|
+
npx @doufunao123/cognee-admin describe
|
|
15
|
+
```
|
|
16
|
+
|
|
17
|
+
## Auth Sharing
|
|
18
|
+
|
|
19
|
+
The npm CLI reads and writes the same auth file as the Rust CLI:
|
|
20
|
+
|
|
21
|
+
```json
|
|
22
|
+
{
|
|
23
|
+
"cognee_jwt": "eyJ...",
|
|
24
|
+
"admin_token": "ca_...",
|
|
25
|
+
"cognee_url": "https://cogneeapi.xiaomao.chat"
|
|
26
|
+
}
|
|
27
|
+
```
|
|
28
|
+
|
|
29
|
+
Location: `~/.config/cognee-admin/auth.json`
|
|
30
|
+
|
|
31
|
+
Resolution order:
|
|
32
|
+
|
|
33
|
+
1. `--cognee-url` / `--cognee-jwt`
|
|
34
|
+
2. `COGNEE_URL` / `COGNEE_JWT`
|
|
35
|
+
3. `~/.config/cognee-admin/auth.json`
|
|
36
|
+
4. Built-in default URL: `https://cogneeapi.xiaomao.chat`
|
|
37
|
+
|
|
38
|
+
## Usage
|
|
39
|
+
|
|
40
|
+
```bash
|
|
41
|
+
cognee-admin login --username alice --password secret
|
|
42
|
+
cognee-admin health --detailed
|
|
43
|
+
cognee-admin dataset list
|
|
44
|
+
cognee-admin data add --dataset docs "hello world"
|
|
45
|
+
cognee-admin data add-file --dataset docs ./notes.md
|
|
46
|
+
cognee-admin data add-dir --dataset docs --glob "*.md" ./content
|
|
47
|
+
cognee-admin cognify --dataset-name docs --background
|
|
48
|
+
cognee-admin search "agent memory" --datasets docs,notes --top-k 10
|
|
49
|
+
cognee-admin search history
|
|
50
|
+
cognee-admin config get
|
|
51
|
+
cognee-admin config set '{"llm":{"provider":"openai","model":"gpt-5.4","api_key":"sk-..."}}'
|
|
52
|
+
cognee-admin ontology list
|
|
53
|
+
cognee-admin ontology upload --key game ./ontology.owl
|
|
54
|
+
cognee-admin describe
|
|
55
|
+
```
|
|
56
|
+
|
|
57
|
+
## Output
|
|
58
|
+
|
|
59
|
+
JSON is the default output format:
|
|
60
|
+
|
|
61
|
+
```json
|
|
62
|
+
{
|
|
63
|
+
"ok": true,
|
|
64
|
+
"command": "dataset.list",
|
|
65
|
+
"data": {
|
|
66
|
+
"items": []
|
|
67
|
+
}
|
|
68
|
+
}
|
|
69
|
+
```
|
|
70
|
+
|
|
71
|
+
Use `--human` for readable output.
|
|
72
|
+
|
|
73
|
+
## Commands
|
|
74
|
+
|
|
75
|
+
- `health [--detailed] [--watch] [--interval N]`
|
|
76
|
+
- `login --username X --password Y`
|
|
77
|
+
- `dataset list|create|delete|delete-all|status|graph`
|
|
78
|
+
- `data add|add-file|add-dir|list|delete|raw|update`
|
|
79
|
+
- `cognify [--dataset-id X] [--dataset-name X] [--custom-prompt Y] [--custom-prompt-file path] [--background] [--chunks-per-batch N]`
|
|
80
|
+
- `search <query> [--search-type X] [--top-k N] [--datasets a,b] [--verbose]`
|
|
81
|
+
- `search history`
|
|
82
|
+
- `config get|set`
|
|
83
|
+
- `ontology upload|list`
|
|
84
|
+
- `describe [command]`
|
package/dist/index.d.ts
ADDED
|
@@ -0,0 +1 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
package/dist/index.js
ADDED
|
@@ -0,0 +1,1107 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
|
|
3
|
+
// src/index.ts
|
|
4
|
+
import { Command as Command10 } from "commander";
|
|
5
|
+
|
|
6
|
+
// src/commands/cognify.ts
|
|
7
|
+
import { readFile as readFile2 } from "fs/promises";
|
|
8
|
+
import { Command } from "commander";
|
|
9
|
+
|
|
10
|
+
// src/errors.ts
|
|
11
|
+
var CogneeAdminError = class extends Error {
|
|
12
|
+
code;
|
|
13
|
+
exitCode;
|
|
14
|
+
suggestion;
|
|
15
|
+
constructor(message, options) {
|
|
16
|
+
super(message);
|
|
17
|
+
this.name = "CogneeAdminError";
|
|
18
|
+
this.code = options.code;
|
|
19
|
+
this.exitCode = options.exitCode ?? 1;
|
|
20
|
+
this.suggestion = options.suggestion;
|
|
21
|
+
}
|
|
22
|
+
};
|
|
23
|
+
function configError(message, suggestion) {
|
|
24
|
+
return new CogneeAdminError(message, {
|
|
25
|
+
code: "CONFIG_ERROR",
|
|
26
|
+
exitCode: 1,
|
|
27
|
+
suggestion: suggestion ?? "Run cognee-admin health to verify configuration"
|
|
28
|
+
});
|
|
29
|
+
}
|
|
30
|
+
function notFoundError(message) {
|
|
31
|
+
return new CogneeAdminError(message, {
|
|
32
|
+
code: "NOT_FOUND",
|
|
33
|
+
exitCode: 1
|
|
34
|
+
});
|
|
35
|
+
}
|
|
36
|
+
function apiError(message, suggestion) {
|
|
37
|
+
return new CogneeAdminError(message, {
|
|
38
|
+
code: "COGNEE_API_ERROR",
|
|
39
|
+
exitCode: 3,
|
|
40
|
+
suggestion: suggestion ?? "Check if Cognee is running: cognee-admin health"
|
|
41
|
+
});
|
|
42
|
+
}
|
|
43
|
+
function httpClientError(message, suggestion) {
|
|
44
|
+
return new CogneeAdminError(message, {
|
|
45
|
+
code: "HTTP_CLIENT_ERROR",
|
|
46
|
+
exitCode: 3,
|
|
47
|
+
suggestion: suggestion ?? "Check network connectivity to Cognee API"
|
|
48
|
+
});
|
|
49
|
+
}
|
|
50
|
+
function internalError(message) {
|
|
51
|
+
return new CogneeAdminError(message, {
|
|
52
|
+
code: "INTERNAL_ERROR",
|
|
53
|
+
exitCode: 2
|
|
54
|
+
});
|
|
55
|
+
}
|
|
56
|
+
function normalizeError(error2) {
|
|
57
|
+
if (error2 instanceof CogneeAdminError) {
|
|
58
|
+
return error2;
|
|
59
|
+
}
|
|
60
|
+
if (error2 instanceof Error) {
|
|
61
|
+
return internalError(error2.message);
|
|
62
|
+
}
|
|
63
|
+
return internalError(String(error2));
|
|
64
|
+
}
|
|
65
|
+
|
|
66
|
+
// src/client.ts
|
|
67
|
+
import { readFile } from "fs/promises";
|
|
68
|
+
import { basename } from "path";
|
|
69
|
+
var CogneeClient = class {
|
|
70
|
+
constructor(baseUrl, jwt) {
|
|
71
|
+
this.baseUrl = baseUrl;
|
|
72
|
+
this.jwt = jwt;
|
|
73
|
+
}
|
|
74
|
+
async get(path) {
|
|
75
|
+
return this.request("GET", path);
|
|
76
|
+
}
|
|
77
|
+
async post(path, body) {
|
|
78
|
+
return this.request("POST", path, { body });
|
|
79
|
+
}
|
|
80
|
+
async delete(path) {
|
|
81
|
+
return this.request("DELETE", path);
|
|
82
|
+
}
|
|
83
|
+
async uploadFiles(datasetName, files) {
|
|
84
|
+
const form = new FormData();
|
|
85
|
+
form.set("datasetName", datasetName);
|
|
86
|
+
for (const filePath of files) {
|
|
87
|
+
form.append("data", await fileBlobFromPath(filePath), basename(filePath));
|
|
88
|
+
}
|
|
89
|
+
return this.request("POST", "/api/v1/add", { form });
|
|
90
|
+
}
|
|
91
|
+
async health() {
|
|
92
|
+
return this.get("/health");
|
|
93
|
+
}
|
|
94
|
+
async healthDetailed() {
|
|
95
|
+
return this.get("/health/detailed");
|
|
96
|
+
}
|
|
97
|
+
async login(username, password) {
|
|
98
|
+
const body = new URLSearchParams({ username, password });
|
|
99
|
+
return this.request("POST", "/api/v1/auth/login", {
|
|
100
|
+
body,
|
|
101
|
+
headers: {
|
|
102
|
+
"content-type": "application/x-www-form-urlencoded"
|
|
103
|
+
}
|
|
104
|
+
});
|
|
105
|
+
}
|
|
106
|
+
async datasets() {
|
|
107
|
+
return this.get("/api/v1/datasets");
|
|
108
|
+
}
|
|
109
|
+
async createDataset(name) {
|
|
110
|
+
return this.post("/api/v1/datasets", { name });
|
|
111
|
+
}
|
|
112
|
+
async deleteDataset(id) {
|
|
113
|
+
return this.delete(`/api/v1/datasets/${encodeURIComponent(id)}`);
|
|
114
|
+
}
|
|
115
|
+
async deleteAllDatasets() {
|
|
116
|
+
return this.delete("/api/v1/datasets");
|
|
117
|
+
}
|
|
118
|
+
async datasetStatus() {
|
|
119
|
+
return this.get("/api/v1/datasets/status");
|
|
120
|
+
}
|
|
121
|
+
async datasetGraph(id) {
|
|
122
|
+
return this.get(`/api/v1/datasets/${encodeURIComponent(id)}/graph`);
|
|
123
|
+
}
|
|
124
|
+
async datasetData(id) {
|
|
125
|
+
return this.get(`/api/v1/datasets/${encodeURIComponent(id)}/data`);
|
|
126
|
+
}
|
|
127
|
+
async deleteData(datasetId, dataId) {
|
|
128
|
+
return this.delete(
|
|
129
|
+
`/api/v1/datasets/${encodeURIComponent(datasetId)}/data/${encodeURIComponent(dataId)}`
|
|
130
|
+
);
|
|
131
|
+
}
|
|
132
|
+
async rawData(datasetId, dataId) {
|
|
133
|
+
return this.get(
|
|
134
|
+
`/api/v1/datasets/${encodeURIComponent(datasetId)}/data/${encodeURIComponent(dataId)}/raw`
|
|
135
|
+
);
|
|
136
|
+
}
|
|
137
|
+
async addData(datasetName, content) {
|
|
138
|
+
const form = new FormData();
|
|
139
|
+
form.set("datasetName", datasetName);
|
|
140
|
+
form.append("data", new Blob([content], { type: "text/plain" }), "inline.txt");
|
|
141
|
+
return this.request("POST", "/api/v1/add", { form });
|
|
142
|
+
}
|
|
143
|
+
async uploadFile(datasetName, filePath) {
|
|
144
|
+
return this.uploadFiles(datasetName, [filePath]);
|
|
145
|
+
}
|
|
146
|
+
async updateData(datasetId, dataId, filePath) {
|
|
147
|
+
const form = new FormData();
|
|
148
|
+
form.append("data", await fileBlobFromPath(filePath), basename(filePath));
|
|
149
|
+
return this.request("PATCH", "/api/v1/update", {
|
|
150
|
+
form,
|
|
151
|
+
query: {
|
|
152
|
+
dataset_id: datasetId,
|
|
153
|
+
data_id: dataId
|
|
154
|
+
}
|
|
155
|
+
});
|
|
156
|
+
}
|
|
157
|
+
async cognify(options) {
|
|
158
|
+
return this.post("/api/v1/cognify", options);
|
|
159
|
+
}
|
|
160
|
+
async search(query, options = {}) {
|
|
161
|
+
return this.post("/api/v1/search", {
|
|
162
|
+
query,
|
|
163
|
+
search_type: options.search_type ?? "INSIGHTS",
|
|
164
|
+
top_k: options.top_k ?? 5,
|
|
165
|
+
...options.datasets && options.datasets.length > 0 ? { datasets: options.datasets } : {}
|
|
166
|
+
});
|
|
167
|
+
}
|
|
168
|
+
async searchHistory() {
|
|
169
|
+
return this.get("/api/v1/search");
|
|
170
|
+
}
|
|
171
|
+
async getSettings() {
|
|
172
|
+
return this.get("/api/v1/settings");
|
|
173
|
+
}
|
|
174
|
+
async saveSettings(settings) {
|
|
175
|
+
return this.post("/api/v1/settings", settings);
|
|
176
|
+
}
|
|
177
|
+
async uploadOntology(key, filePath) {
|
|
178
|
+
const form = new FormData();
|
|
179
|
+
form.set("ontology_key", key);
|
|
180
|
+
form.append("ontology_file", await fileBlobFromPath(filePath), basename(filePath));
|
|
181
|
+
return this.request("POST", "/api/v1/ontologies", { form });
|
|
182
|
+
}
|
|
183
|
+
async listOntologies() {
|
|
184
|
+
return this.get("/api/v1/ontologies");
|
|
185
|
+
}
|
|
186
|
+
async request(method, path, options = {}) {
|
|
187
|
+
const url = new URL(path, ensureTrailingSlash(this.baseUrl));
|
|
188
|
+
if (options.query) {
|
|
189
|
+
for (const [key, value] of Object.entries(options.query)) {
|
|
190
|
+
url.searchParams.set(key, value);
|
|
191
|
+
}
|
|
192
|
+
}
|
|
193
|
+
const headers = new Headers(options.headers);
|
|
194
|
+
if (this.jwt) {
|
|
195
|
+
headers.set("authorization", `Bearer ${this.jwt}`);
|
|
196
|
+
}
|
|
197
|
+
let body;
|
|
198
|
+
if (options.form) {
|
|
199
|
+
body = options.form;
|
|
200
|
+
} else if (options.body instanceof URLSearchParams) {
|
|
201
|
+
body = options.body;
|
|
202
|
+
} else if (options.body !== void 0) {
|
|
203
|
+
headers.set("content-type", "application/json");
|
|
204
|
+
body = JSON.stringify(options.body);
|
|
205
|
+
}
|
|
206
|
+
let response;
|
|
207
|
+
try {
|
|
208
|
+
response = await fetch(url, { method, headers, body });
|
|
209
|
+
} catch (error2) {
|
|
210
|
+
throw httpClientError(
|
|
211
|
+
error2 instanceof Error ? error2.message : String(error2)
|
|
212
|
+
);
|
|
213
|
+
}
|
|
214
|
+
const text = await response.text();
|
|
215
|
+
const payload = parseResponse(text);
|
|
216
|
+
if (!response.ok) {
|
|
217
|
+
const preview = typeof payload === "string" ? payload : JSON.stringify(payload);
|
|
218
|
+
if (response.status === 404) {
|
|
219
|
+
throw notFoundError(`HTTP 404 - ${truncate(preview, 512)}`);
|
|
220
|
+
}
|
|
221
|
+
throw apiError(`HTTP ${response.status} - ${truncate(preview, 512)}`);
|
|
222
|
+
}
|
|
223
|
+
return payload;
|
|
224
|
+
}
|
|
225
|
+
};
|
|
226
|
+
async function fileBlobFromPath(filePath) {
|
|
227
|
+
let content;
|
|
228
|
+
try {
|
|
229
|
+
content = await readFile(filePath);
|
|
230
|
+
} catch (error2) {
|
|
231
|
+
throw configError(
|
|
232
|
+
`Failed to read file ${filePath}: ${error2 instanceof Error ? error2.message : String(error2)}`
|
|
233
|
+
);
|
|
234
|
+
}
|
|
235
|
+
const arrayBuffer = content.buffer.slice(
|
|
236
|
+
content.byteOffset,
|
|
237
|
+
content.byteOffset + content.byteLength
|
|
238
|
+
);
|
|
239
|
+
return new Blob([arrayBuffer]);
|
|
240
|
+
}
|
|
241
|
+
function ensureTrailingSlash(url) {
|
|
242
|
+
return url.endsWith("/") ? url : `${url}/`;
|
|
243
|
+
}
|
|
244
|
+
function parseResponse(text) {
|
|
245
|
+
if (!text) {
|
|
246
|
+
return null;
|
|
247
|
+
}
|
|
248
|
+
try {
|
|
249
|
+
return JSON.parse(text);
|
|
250
|
+
} catch {
|
|
251
|
+
return text;
|
|
252
|
+
}
|
|
253
|
+
}
|
|
254
|
+
function truncate(value, maxLength) {
|
|
255
|
+
if (value.length <= maxLength) {
|
|
256
|
+
return value;
|
|
257
|
+
}
|
|
258
|
+
return `${value.slice(0, maxLength)}...[truncated]`;
|
|
259
|
+
}
|
|
260
|
+
|
|
261
|
+
// src/config.ts
|
|
262
|
+
import { chmodSync, existsSync, mkdirSync, readFileSync, writeFileSync } from "fs";
|
|
263
|
+
import { homedir } from "os";
|
|
264
|
+
import { dirname, join } from "path";
|
|
265
|
+
|
|
266
|
+
// src/meta.ts
|
|
267
|
+
var CLI_NAME = "cognee-admin";
|
|
268
|
+
var CLI_VERSION = "0.2.0";
|
|
269
|
+
var CLI_DESCRIPTION = "Cognee knowledge engine management CLI";
|
|
270
|
+
var DEFAULT_COGNEE_URL = "https://cogneeapi.xiaomao.chat";
|
|
271
|
+
|
|
272
|
+
// src/config.ts
|
|
273
|
+
function configDir() {
|
|
274
|
+
return join(homedir(), ".config", "cognee-admin");
|
|
275
|
+
}
|
|
276
|
+
function configPath() {
|
|
277
|
+
return join(configDir(), "auth.json");
|
|
278
|
+
}
|
|
279
|
+
function loadAuthConfig() {
|
|
280
|
+
const path = configPath();
|
|
281
|
+
if (!existsSync(path)) {
|
|
282
|
+
return {};
|
|
283
|
+
}
|
|
284
|
+
const content = readFileSync(path, "utf8");
|
|
285
|
+
if (!content.trim()) {
|
|
286
|
+
return {};
|
|
287
|
+
}
|
|
288
|
+
try {
|
|
289
|
+
const parsed = JSON.parse(content);
|
|
290
|
+
if (hasCurrentAuthFields(parsed)) {
|
|
291
|
+
return {
|
|
292
|
+
cognee_jwt: parsed.cognee_jwt,
|
|
293
|
+
admin_token: parsed.admin_token,
|
|
294
|
+
cognee_url: parsed.cognee_url
|
|
295
|
+
};
|
|
296
|
+
}
|
|
297
|
+
const migrated = {
|
|
298
|
+
cognee_url: parsed.cognee_url
|
|
299
|
+
};
|
|
300
|
+
if (parsed.token) {
|
|
301
|
+
if (parsed.token.startsWith("ca_")) {
|
|
302
|
+
migrated.admin_token = parsed.token;
|
|
303
|
+
} else {
|
|
304
|
+
migrated.cognee_jwt = parsed.token;
|
|
305
|
+
}
|
|
306
|
+
}
|
|
307
|
+
return migrated;
|
|
308
|
+
} catch (error2) {
|
|
309
|
+
throw configError(
|
|
310
|
+
`Failed to parse auth config at ${path}: ${error2 instanceof Error ? error2.message : String(error2)}`
|
|
311
|
+
);
|
|
312
|
+
}
|
|
313
|
+
}
|
|
314
|
+
function saveAuthConfig(config) {
|
|
315
|
+
const path = configPath();
|
|
316
|
+
mkdirSync(dirname(path), { recursive: true });
|
|
317
|
+
writeFileSync(path, JSON.stringify(config, null, 2) + "\n", { mode: 384 });
|
|
318
|
+
chmodSync(path, 384);
|
|
319
|
+
}
|
|
320
|
+
function resolveCogneeUrl(overrides) {
|
|
321
|
+
return overrides?.cogneeUrl ?? process.env.COGNEE_URL ?? loadAuthConfig().cognee_url ?? DEFAULT_COGNEE_URL;
|
|
322
|
+
}
|
|
323
|
+
function resolveCogneeJwt(overrides) {
|
|
324
|
+
return overrides?.cogneeJwt ?? process.env.COGNEE_JWT ?? loadAuthConfig().cognee_jwt;
|
|
325
|
+
}
|
|
326
|
+
function hasCurrentAuthFields(value) {
|
|
327
|
+
return Object.hasOwn(value, "cognee_jwt") || Object.hasOwn(value, "admin_token") || Object.hasOwn(value, "cognee_url");
|
|
328
|
+
}
|
|
329
|
+
|
|
330
|
+
// src/output.ts
|
|
331
|
+
function success(command, data) {
|
|
332
|
+
return { ok: true, command, data };
|
|
333
|
+
}
|
|
334
|
+
function error(command, code, message, suggestion) {
|
|
335
|
+
return {
|
|
336
|
+
ok: false,
|
|
337
|
+
command,
|
|
338
|
+
error: { code, message, ...suggestion ? { suggestion } : {} }
|
|
339
|
+
};
|
|
340
|
+
}
|
|
341
|
+
function output(result, human = false) {
|
|
342
|
+
if (human) {
|
|
343
|
+
if (result.ok) {
|
|
344
|
+
console.log(`\u2713 ${result.command}`);
|
|
345
|
+
console.log(formatHuman(result.data));
|
|
346
|
+
} else {
|
|
347
|
+
console.error(`\u2717 ${result.command}: ${result.error.message}`);
|
|
348
|
+
if (result.error.suggestion) {
|
|
349
|
+
console.error(` \u2192 ${result.error.suggestion}`);
|
|
350
|
+
}
|
|
351
|
+
}
|
|
352
|
+
} else {
|
|
353
|
+
process.stdout.write(JSON.stringify(result, null, 2) + "\n");
|
|
354
|
+
}
|
|
355
|
+
}
|
|
356
|
+
function filterFields(data, fields) {
|
|
357
|
+
if (!fields) return data;
|
|
358
|
+
const allowed = new Set(fields.split(",").map((f) => f.trim()));
|
|
359
|
+
const filtered = {};
|
|
360
|
+
for (const key of allowed) {
|
|
361
|
+
if (key in data) {
|
|
362
|
+
filtered[key] = data[key];
|
|
363
|
+
}
|
|
364
|
+
}
|
|
365
|
+
return filtered;
|
|
366
|
+
}
|
|
367
|
+
function formatHuman(data, indent = 0) {
|
|
368
|
+
if (data === null || data === void 0) return "";
|
|
369
|
+
if (typeof data === "string") return " ".repeat(indent) + data;
|
|
370
|
+
if (typeof data !== "object") return " ".repeat(indent) + String(data);
|
|
371
|
+
const pad = " ".repeat(indent);
|
|
372
|
+
if (Array.isArray(data)) {
|
|
373
|
+
return data.map((item) => formatHuman(item, indent + 2)).join("\n");
|
|
374
|
+
}
|
|
375
|
+
return Object.entries(data).map(([key, val]) => {
|
|
376
|
+
if (typeof val === "object" && val !== null) {
|
|
377
|
+
return `${pad}${key}:
|
|
378
|
+
${formatHuman(val, indent + 2)}`;
|
|
379
|
+
}
|
|
380
|
+
return `${pad}${key}: ${val}`;
|
|
381
|
+
}).join("\n");
|
|
382
|
+
}
|
|
383
|
+
|
|
384
|
+
// src/commands/common.ts
|
|
385
|
+
function getGlobalOptions(command) {
|
|
386
|
+
return command.optsWithGlobals();
|
|
387
|
+
}
|
|
388
|
+
function createContext(command, requireAuth = true) {
|
|
389
|
+
const globals = getGlobalOptions(command);
|
|
390
|
+
const url = resolveCogneeUrl({ cogneeUrl: globals.cogneeUrl });
|
|
391
|
+
const jwt = resolveCogneeJwt({ cogneeJwt: globals.cogneeJwt });
|
|
392
|
+
if (requireAuth && !jwt) {
|
|
393
|
+
throw configError(
|
|
394
|
+
"Cognee JWT is required. Use --cognee-jwt, set COGNEE_JWT, or run cognee-admin login.",
|
|
395
|
+
"Run cognee-admin login --username <user> --password <pass>"
|
|
396
|
+
);
|
|
397
|
+
}
|
|
398
|
+
return {
|
|
399
|
+
client: new CogneeClient(url, jwt),
|
|
400
|
+
human: Boolean(globals.human),
|
|
401
|
+
fields: globals.fields
|
|
402
|
+
};
|
|
403
|
+
}
|
|
404
|
+
function printSuccess(commandName, data, ctx) {
|
|
405
|
+
const filtered = isRecord(data) ? filterFields(data, ctx.fields) : data;
|
|
406
|
+
output(success(commandName, filtered), ctx.human);
|
|
407
|
+
}
|
|
408
|
+
function printError(commandName, err, human = false) {
|
|
409
|
+
const normalized = normalizeError(err);
|
|
410
|
+
output(
|
|
411
|
+
error(commandName, normalized.code, normalized.message, normalized.suggestion),
|
|
412
|
+
human
|
|
413
|
+
);
|
|
414
|
+
process.exit(normalized.exitCode);
|
|
415
|
+
}
|
|
416
|
+
function loadSharedAuthConfig() {
|
|
417
|
+
return loadAuthConfig();
|
|
418
|
+
}
|
|
419
|
+
function isRecord(value) {
|
|
420
|
+
return typeof value === "object" && value !== null && !Array.isArray(value);
|
|
421
|
+
}
|
|
422
|
+
|
|
423
|
+
// src/commands/cognify.ts
|
|
424
|
+
function createCognifyCommand() {
|
|
425
|
+
return new Command("cognify").description("Trigger cognification pipeline").option("--dataset-id <id>", "Optional dataset ID to cognify").option("--dataset-name <name>", "Optional dataset name to cognify").option("--custom-prompt <prompt>", "Override the default cognify prompt").option("--custom-prompt-file <path>", "Read custom prompt text from a file").option("--background", "Run cognify in background mode").option("--chunks-per-batch <count>", "Override chunks per batch for the pipeline").action(
|
|
426
|
+
async function(options) {
|
|
427
|
+
const ctx = createContext(this);
|
|
428
|
+
try {
|
|
429
|
+
if (options.datasetId && options.datasetName) {
|
|
430
|
+
throw configError("use either --dataset-id or --dataset-name, not both");
|
|
431
|
+
}
|
|
432
|
+
if (options.customPrompt && options.customPromptFile) {
|
|
433
|
+
throw configError("use either --custom-prompt or --custom-prompt-file, not both");
|
|
434
|
+
}
|
|
435
|
+
const datasets = options.datasetId ?? options.datasetName;
|
|
436
|
+
const customPrompt = options.customPromptFile ? await readFile2(options.customPromptFile, "utf8") : options.customPrompt;
|
|
437
|
+
const chunksPerBatch = options.chunksPerBatch ? Number.parseInt(options.chunksPerBatch, 10) : void 0;
|
|
438
|
+
if (options.chunksPerBatch && !Number.isFinite(chunksPerBatch)) {
|
|
439
|
+
throw configError("--chunks-per-batch must be a valid integer");
|
|
440
|
+
}
|
|
441
|
+
const data = await ctx.client.cognify({
|
|
442
|
+
...datasets ? { datasets: [datasets] } : {},
|
|
443
|
+
...customPrompt ? { custom_prompt: customPrompt } : {},
|
|
444
|
+
run_in_background: Boolean(options.background),
|
|
445
|
+
...typeof chunksPerBatch === "number" ? { chunks_per_batch: chunksPerBatch } : {}
|
|
446
|
+
});
|
|
447
|
+
printSuccess("cognify", data, ctx);
|
|
448
|
+
} catch (error2) {
|
|
449
|
+
printError("cognify", error2, ctx.human);
|
|
450
|
+
}
|
|
451
|
+
}
|
|
452
|
+
);
|
|
453
|
+
}
|
|
454
|
+
|
|
455
|
+
// src/commands/config.ts
|
|
456
|
+
import { Command as Command2 } from "commander";
|
|
457
|
+
function createConfigCommand() {
|
|
458
|
+
const command = new Command2("config").description("Cognee settings management");
|
|
459
|
+
command.addCommand(
|
|
460
|
+
new Command2("get").description("Get current settings").action(async function() {
|
|
461
|
+
const ctx = createContext(this);
|
|
462
|
+
try {
|
|
463
|
+
printSuccess("config.get", await ctx.client.getSettings(), ctx);
|
|
464
|
+
} catch (error2) {
|
|
465
|
+
printError("config.get", error2, ctx.human);
|
|
466
|
+
}
|
|
467
|
+
})
|
|
468
|
+
);
|
|
469
|
+
command.addCommand(
|
|
470
|
+
new Command2("set").description("Update settings").argument("<settings>", "Settings as JSON").action(async function(settings) {
|
|
471
|
+
const ctx = createContext(this);
|
|
472
|
+
try {
|
|
473
|
+
const parsed = JSON.parse(settings);
|
|
474
|
+
printSuccess("config.set", await ctx.client.saveSettings(parsed), ctx);
|
|
475
|
+
} catch (error2) {
|
|
476
|
+
if (error2 instanceof SyntaxError) {
|
|
477
|
+
printError("config.set", configError(`Invalid JSON: ${error2.message}`), ctx.human);
|
|
478
|
+
}
|
|
479
|
+
printError("config.set", error2, ctx.human);
|
|
480
|
+
}
|
|
481
|
+
})
|
|
482
|
+
);
|
|
483
|
+
return command;
|
|
484
|
+
}
|
|
485
|
+
|
|
486
|
+
// src/commands/data.ts
|
|
487
|
+
import { readdir } from "fs/promises";
|
|
488
|
+
import { basename as basename2, join as join2, relative } from "path";
|
|
489
|
+
import { Command as Command3 } from "commander";
|
|
490
|
+
var UPLOAD_BATCH_SIZE = 10;
|
|
491
|
+
function createDataCommand() {
|
|
492
|
+
const command = new Command3("data").description("Data management within datasets");
|
|
493
|
+
command.addCommand(
|
|
494
|
+
new Command3("add").description("Add text data to a dataset").requiredOption("--dataset <name>", "Dataset name").argument("<content>", "Text content").action(async function(content, options) {
|
|
495
|
+
const ctx = createContext(this);
|
|
496
|
+
try {
|
|
497
|
+
printSuccess("data.add", await ctx.client.addData(options.dataset, content), ctx);
|
|
498
|
+
} catch (error2) {
|
|
499
|
+
printError("data.add", error2, ctx.human);
|
|
500
|
+
}
|
|
501
|
+
})
|
|
502
|
+
).addCommand(
|
|
503
|
+
new Command3("add-file").description("Upload a single file to a dataset").requiredOption("--dataset <name>", "Dataset name").argument("<path>", "File path to upload").action(async function(filePath, options) {
|
|
504
|
+
const ctx = createContext(this);
|
|
505
|
+
try {
|
|
506
|
+
printSuccess("data.add-file", await ctx.client.uploadFile(options.dataset, filePath), ctx);
|
|
507
|
+
} catch (error2) {
|
|
508
|
+
printError("data.add-file", error2, ctx.human);
|
|
509
|
+
}
|
|
510
|
+
})
|
|
511
|
+
).addCommand(
|
|
512
|
+
new Command3("add-dir").description("Upload matching files from a directory in batches of 10").requiredOption("--dataset <name>", "Dataset name").option("--glob <pattern>", "Glob pattern used to filter files", "*").argument("<dir>", "Directory to scan recursively").action(async function(dir, options) {
|
|
513
|
+
const ctx = createContext(this);
|
|
514
|
+
try {
|
|
515
|
+
const files = await collectMatchingFiles(dir, options.glob);
|
|
516
|
+
if (files.length === 0) {
|
|
517
|
+
throw notFoundError(`No files matched '${options.glob}' in ${dir}`);
|
|
518
|
+
}
|
|
519
|
+
let uploaded = 0;
|
|
520
|
+
let failed = 0;
|
|
521
|
+
const errors = [];
|
|
522
|
+
const totalBatches = Math.ceil(files.length / UPLOAD_BATCH_SIZE);
|
|
523
|
+
for (let index = 0; index < totalBatches; index += 1) {
|
|
524
|
+
const start = index * UPLOAD_BATCH_SIZE;
|
|
525
|
+
const batch = files.slice(start, start + UPLOAD_BATCH_SIZE);
|
|
526
|
+
console.error(
|
|
527
|
+
`[${index + 1}/${totalBatches}] Uploading ${batch.length} file(s) to ${options.dataset}`
|
|
528
|
+
);
|
|
529
|
+
try {
|
|
530
|
+
await ctx.client.uploadFiles(options.dataset, batch);
|
|
531
|
+
uploaded += batch.length;
|
|
532
|
+
console.error(
|
|
533
|
+
`[${index + 1}/${totalBatches}] Uploaded ${uploaded}/${files.length} file(s)`
|
|
534
|
+
);
|
|
535
|
+
} catch (batchError) {
|
|
536
|
+
console.error(
|
|
537
|
+
`[${index + 1}/${totalBatches}] Batch upload failed, retrying individually`
|
|
538
|
+
);
|
|
539
|
+
errors.push({ batch, error: errorMessage(batchError) });
|
|
540
|
+
for (const filePath of batch) {
|
|
541
|
+
try {
|
|
542
|
+
await ctx.client.uploadFile(options.dataset, filePath);
|
|
543
|
+
uploaded += 1;
|
|
544
|
+
console.error(` OK ${relative(dir, filePath) || basename2(filePath)}`);
|
|
545
|
+
} catch (fileError) {
|
|
546
|
+
failed += 1;
|
|
547
|
+
errors.push({ path: filePath, error: errorMessage(fileError) });
|
|
548
|
+
console.error(` FAIL ${relative(dir, filePath) || basename2(filePath)}: ${errorMessage(fileError)}`);
|
|
549
|
+
}
|
|
550
|
+
}
|
|
551
|
+
}
|
|
552
|
+
}
|
|
553
|
+
const summary = {
|
|
554
|
+
dataset: options.dataset,
|
|
555
|
+
directory: dir,
|
|
556
|
+
glob: options.glob,
|
|
557
|
+
batch_size: UPLOAD_BATCH_SIZE,
|
|
558
|
+
total: files.length,
|
|
559
|
+
uploaded,
|
|
560
|
+
failed,
|
|
561
|
+
skipped: 0,
|
|
562
|
+
errors
|
|
563
|
+
};
|
|
564
|
+
console.error(`Completed upload: success=${uploaded} failed=${failed} total=${files.length}`);
|
|
565
|
+
printSuccess("data.add-dir", summary, ctx);
|
|
566
|
+
} catch (error2) {
|
|
567
|
+
printError("data.add-dir", error2, ctx.human);
|
|
568
|
+
}
|
|
569
|
+
})
|
|
570
|
+
).addCommand(
|
|
571
|
+
new Command3("list").description("List data in a dataset").argument("<datasetId>", "Dataset ID").action(async function(datasetId) {
|
|
572
|
+
const ctx = createContext(this);
|
|
573
|
+
try {
|
|
574
|
+
printSuccess("data.list", await ctx.client.datasetData(datasetId), ctx);
|
|
575
|
+
} catch (error2) {
|
|
576
|
+
printError("data.list", error2, ctx.human);
|
|
577
|
+
}
|
|
578
|
+
})
|
|
579
|
+
).addCommand(
|
|
580
|
+
new Command3("delete").description("Delete data from a dataset").requiredOption("--dataset-id <id>", "Dataset ID").requiredOption("--data-id <id>", "Data ID").action(async function(options) {
|
|
581
|
+
const ctx = createContext(this);
|
|
582
|
+
try {
|
|
583
|
+
printSuccess(
|
|
584
|
+
"data.delete",
|
|
585
|
+
await ctx.client.deleteData(options.datasetId, options.dataId),
|
|
586
|
+
ctx
|
|
587
|
+
);
|
|
588
|
+
} catch (error2) {
|
|
589
|
+
printError("data.delete", error2, ctx.human);
|
|
590
|
+
}
|
|
591
|
+
})
|
|
592
|
+
).addCommand(
|
|
593
|
+
new Command3("raw").description("Get raw data content").requiredOption("--dataset-id <id>", "Dataset ID").requiredOption("--data-id <id>", "Data ID").action(async function(options) {
|
|
594
|
+
const ctx = createContext(this);
|
|
595
|
+
try {
|
|
596
|
+
printSuccess(
|
|
597
|
+
"data.raw",
|
|
598
|
+
await ctx.client.rawData(options.datasetId, options.dataId),
|
|
599
|
+
ctx
|
|
600
|
+
);
|
|
601
|
+
} catch (error2) {
|
|
602
|
+
printError("data.raw", error2, ctx.human);
|
|
603
|
+
}
|
|
604
|
+
})
|
|
605
|
+
).addCommand(
|
|
606
|
+
new Command3("update").description("Replace an existing data item with a new file").requiredOption("--dataset-id <id>", "Dataset ID").requiredOption("--data-id <id>", "Data ID").argument("<path>", "File path to upload").action(async function(filePath, options) {
|
|
607
|
+
const ctx = createContext(this);
|
|
608
|
+
try {
|
|
609
|
+
printSuccess(
|
|
610
|
+
"data.update",
|
|
611
|
+
await ctx.client.updateData(options.datasetId, options.dataId, filePath),
|
|
612
|
+
ctx
|
|
613
|
+
);
|
|
614
|
+
} catch (error2) {
|
|
615
|
+
printError("data.update", error2, ctx.human);
|
|
616
|
+
}
|
|
617
|
+
})
|
|
618
|
+
);
|
|
619
|
+
return command;
|
|
620
|
+
}
|
|
621
|
+
async function collectMatchingFiles(rootDir, globPattern) {
|
|
622
|
+
const matcher = createGlobMatcher(globPattern);
|
|
623
|
+
const files = [];
|
|
624
|
+
await visit(rootDir, rootDir, matcher, files);
|
|
625
|
+
files.sort((a, b) => a.localeCompare(b));
|
|
626
|
+
return files;
|
|
627
|
+
}
|
|
628
|
+
async function visit(rootDir, currentDir, matcher, files) {
|
|
629
|
+
let entries;
|
|
630
|
+
try {
|
|
631
|
+
entries = await readdir(currentDir, { withFileTypes: true });
|
|
632
|
+
} catch (error2) {
|
|
633
|
+
throw configError(
|
|
634
|
+
`Failed to read directory ${currentDir}: ${error2 instanceof Error ? error2.message : String(error2)}`
|
|
635
|
+
);
|
|
636
|
+
}
|
|
637
|
+
for (const entry of entries) {
|
|
638
|
+
const fullPath = join2(currentDir, entry.name);
|
|
639
|
+
if (entry.isDirectory()) {
|
|
640
|
+
await visit(rootDir, fullPath, matcher, files);
|
|
641
|
+
continue;
|
|
642
|
+
}
|
|
643
|
+
if (!entry.isFile()) {
|
|
644
|
+
continue;
|
|
645
|
+
}
|
|
646
|
+
const relativePath = relative(rootDir, fullPath).split("\\").join("/");
|
|
647
|
+
if (matcher(relativePath, entry.name)) {
|
|
648
|
+
files.push(fullPath);
|
|
649
|
+
}
|
|
650
|
+
}
|
|
651
|
+
}
|
|
652
|
+
function createGlobMatcher(globPattern) {
|
|
653
|
+
if (!globPattern || globPattern.trim() === "") {
|
|
654
|
+
throw configError("Invalid glob pattern: pattern must not be empty");
|
|
655
|
+
}
|
|
656
|
+
const regex = globToRegExp(globPattern);
|
|
657
|
+
const hasSlash = globPattern.includes("/");
|
|
658
|
+
return (relativePath, baseName) => {
|
|
659
|
+
if (regex.test(relativePath)) {
|
|
660
|
+
return true;
|
|
661
|
+
}
|
|
662
|
+
return !hasSlash && regex.test(baseName);
|
|
663
|
+
};
|
|
664
|
+
}
|
|
665
|
+
function globToRegExp(globPattern) {
|
|
666
|
+
let regex = "^";
|
|
667
|
+
for (let i = 0; i < globPattern.length; i += 1) {
|
|
668
|
+
const char = globPattern[i];
|
|
669
|
+
const next = globPattern[i + 1];
|
|
670
|
+
if (char === "*") {
|
|
671
|
+
if (next === "*") {
|
|
672
|
+
regex += ".*";
|
|
673
|
+
i += 1;
|
|
674
|
+
} else {
|
|
675
|
+
regex += "[^/]*";
|
|
676
|
+
}
|
|
677
|
+
continue;
|
|
678
|
+
}
|
|
679
|
+
if (char === "?") {
|
|
680
|
+
regex += "[^/]";
|
|
681
|
+
continue;
|
|
682
|
+
}
|
|
683
|
+
if (char === "/") {
|
|
684
|
+
regex += "/";
|
|
685
|
+
continue;
|
|
686
|
+
}
|
|
687
|
+
regex += escapeRegExp(char);
|
|
688
|
+
}
|
|
689
|
+
regex += "$";
|
|
690
|
+
return new RegExp(regex);
|
|
691
|
+
}
|
|
692
|
+
function escapeRegExp(value) {
|
|
693
|
+
return value.replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
|
|
694
|
+
}
|
|
695
|
+
function errorMessage(error2) {
|
|
696
|
+
return error2 instanceof Error ? error2.message : String(error2);
|
|
697
|
+
}
|
|
698
|
+
|
|
699
|
+
// src/commands/dataset.ts
|
|
700
|
+
import { Command as Command4 } from "commander";
|
|
701
|
+
function createDatasetCommand() {
|
|
702
|
+
const command = new Command4("dataset").description("Dataset management");
|
|
703
|
+
command.addCommand(
|
|
704
|
+
new Command4("list").description("List all datasets").action(async function() {
|
|
705
|
+
const ctx = createContext(this);
|
|
706
|
+
try {
|
|
707
|
+
printSuccess("dataset.list", await ctx.client.datasets(), ctx);
|
|
708
|
+
} catch (error2) {
|
|
709
|
+
printError("dataset.list", error2, ctx.human);
|
|
710
|
+
}
|
|
711
|
+
})
|
|
712
|
+
).addCommand(
|
|
713
|
+
new Command4("create").description("Create a new dataset").argument("<name>", "Dataset name").action(async function(name) {
|
|
714
|
+
const ctx = createContext(this);
|
|
715
|
+
try {
|
|
716
|
+
printSuccess("dataset.create", await ctx.client.createDataset(name), ctx);
|
|
717
|
+
} catch (error2) {
|
|
718
|
+
printError("dataset.create", error2, ctx.human);
|
|
719
|
+
}
|
|
720
|
+
})
|
|
721
|
+
).addCommand(
|
|
722
|
+
new Command4("delete").description("Delete a dataset").argument("<id>", "Dataset ID").action(async function(id) {
|
|
723
|
+
const ctx = createContext(this);
|
|
724
|
+
try {
|
|
725
|
+
printSuccess("dataset.delete", await ctx.client.deleteDataset(id), ctx);
|
|
726
|
+
} catch (error2) {
|
|
727
|
+
printError("dataset.delete", error2, ctx.human);
|
|
728
|
+
}
|
|
729
|
+
})
|
|
730
|
+
).addCommand(
|
|
731
|
+
new Command4("delete-all").description("Delete every dataset").option("--yes", "Confirm destructive delete-all operation").action(async function(options) {
|
|
732
|
+
const ctx = createContext(this);
|
|
733
|
+
try {
|
|
734
|
+
if (!options.yes) {
|
|
735
|
+
throw configError("refusing to delete all datasets: use --yes to confirm");
|
|
736
|
+
}
|
|
737
|
+
printSuccess("dataset.delete-all", await ctx.client.deleteAllDatasets(), ctx);
|
|
738
|
+
} catch (error2) {
|
|
739
|
+
printError("dataset.delete-all", error2, ctx.human);
|
|
740
|
+
}
|
|
741
|
+
})
|
|
742
|
+
).addCommand(
|
|
743
|
+
new Command4("status").description("Show dataset processing status").action(async function() {
|
|
744
|
+
const ctx = createContext(this);
|
|
745
|
+
try {
|
|
746
|
+
printSuccess("dataset.status", await ctx.client.datasetStatus(), ctx);
|
|
747
|
+
} catch (error2) {
|
|
748
|
+
printError("dataset.status", error2, ctx.human);
|
|
749
|
+
}
|
|
750
|
+
})
|
|
751
|
+
).addCommand(
|
|
752
|
+
new Command4("graph").description("Get dataset knowledge graph").argument("<id>", "Dataset ID").action(async function(id) {
|
|
753
|
+
const ctx = createContext(this);
|
|
754
|
+
try {
|
|
755
|
+
printSuccess("dataset.graph", await ctx.client.datasetGraph(id), ctx);
|
|
756
|
+
} catch (error2) {
|
|
757
|
+
printError("dataset.graph", error2, ctx.human);
|
|
758
|
+
}
|
|
759
|
+
})
|
|
760
|
+
);
|
|
761
|
+
return command;
|
|
762
|
+
}
|
|
763
|
+
|
|
764
|
+
// src/commands/describe.ts
|
|
765
|
+
import { Command as Command5 } from "commander";
|
|
766
|
+
var SCHEMAS = {
|
|
767
|
+
health: {
|
|
768
|
+
description: "Check Cognee API health",
|
|
769
|
+
params: {
|
|
770
|
+
"--detailed": { type: "bool", default: false, description: "Include detailed system info" },
|
|
771
|
+
"--watch": { type: "bool", default: false, description: "Continuously poll until Ctrl+C" },
|
|
772
|
+
"--interval": { type: "u64", default: 30, description: "Polling interval in seconds for watch mode" }
|
|
773
|
+
}
|
|
774
|
+
},
|
|
775
|
+
login: {
|
|
776
|
+
description: "Authenticate with Cognee API",
|
|
777
|
+
params: {
|
|
778
|
+
"--username": { type: "string", required: true, description: "Login username" },
|
|
779
|
+
"--password": { type: "string", required: true, description: "Login password" }
|
|
780
|
+
}
|
|
781
|
+
},
|
|
782
|
+
dataset: {
|
|
783
|
+
description: "Dataset management",
|
|
784
|
+
subcommands: {
|
|
785
|
+
list: { description: "List all datasets" },
|
|
786
|
+
create: {
|
|
787
|
+
description: "Create a new dataset",
|
|
788
|
+
params: { name: { type: "string", required: true } }
|
|
789
|
+
},
|
|
790
|
+
delete: {
|
|
791
|
+
description: "Delete a dataset",
|
|
792
|
+
params: { id: { type: "string", required: true } }
|
|
793
|
+
},
|
|
794
|
+
"delete-all": {
|
|
795
|
+
description: "Delete every dataset",
|
|
796
|
+
params: {
|
|
797
|
+
"--yes": {
|
|
798
|
+
type: "bool",
|
|
799
|
+
required: true,
|
|
800
|
+
description: "Confirm destructive delete-all operation"
|
|
801
|
+
}
|
|
802
|
+
}
|
|
803
|
+
},
|
|
804
|
+
status: { description: "Show dataset processing status" },
|
|
805
|
+
graph: {
|
|
806
|
+
description: "Get dataset knowledge graph",
|
|
807
|
+
params: { id: { type: "string", required: true } }
|
|
808
|
+
}
|
|
809
|
+
}
|
|
810
|
+
},
|
|
811
|
+
data: {
|
|
812
|
+
description: "Data management within datasets",
|
|
813
|
+
subcommands: {
|
|
814
|
+
add: {
|
|
815
|
+
description: "Add text data to a dataset",
|
|
816
|
+
params: {
|
|
817
|
+
"--dataset": { type: "string", required: true, description: "Dataset name" },
|
|
818
|
+
content: { type: "string", required: true, description: "Text content" }
|
|
819
|
+
}
|
|
820
|
+
},
|
|
821
|
+
"add-file": {
|
|
822
|
+
description: "Upload a single file to a dataset",
|
|
823
|
+
params: {
|
|
824
|
+
"--dataset": { type: "string", required: true, description: "Dataset name" },
|
|
825
|
+
path: { type: "string", required: true, description: "File path to upload" }
|
|
826
|
+
}
|
|
827
|
+
},
|
|
828
|
+
"add-dir": {
|
|
829
|
+
description: "Upload matching files from a directory in batches of 10",
|
|
830
|
+
params: {
|
|
831
|
+
"--dataset": { type: "string", required: true, description: "Dataset name" },
|
|
832
|
+
"--glob": {
|
|
833
|
+
type: "string",
|
|
834
|
+
required: false,
|
|
835
|
+
default: "*",
|
|
836
|
+
description: "Glob pattern used to filter files"
|
|
837
|
+
},
|
|
838
|
+
dir: { type: "string", required: true, description: "Directory to scan recursively" }
|
|
839
|
+
}
|
|
840
|
+
},
|
|
841
|
+
list: {
|
|
842
|
+
description: "List data in a dataset",
|
|
843
|
+
params: { datasetId: { type: "string", required: true } }
|
|
844
|
+
},
|
|
845
|
+
delete: {
|
|
846
|
+
description: "Delete data from a dataset",
|
|
847
|
+
params: {
|
|
848
|
+
"--dataset-id": { type: "string", required: true },
|
|
849
|
+
"--data-id": { type: "string", required: true }
|
|
850
|
+
}
|
|
851
|
+
},
|
|
852
|
+
raw: {
|
|
853
|
+
description: "Get raw data content",
|
|
854
|
+
params: {
|
|
855
|
+
"--dataset-id": { type: "string", required: true },
|
|
856
|
+
"--data-id": { type: "string", required: true }
|
|
857
|
+
}
|
|
858
|
+
},
|
|
859
|
+
update: {
|
|
860
|
+
description: "Replace an existing data item with a new file",
|
|
861
|
+
params: {
|
|
862
|
+
"--dataset-id": { type: "string", required: true },
|
|
863
|
+
"--data-id": { type: "string", required: true },
|
|
864
|
+
path: { type: "string", required: true, description: "File path to upload" }
|
|
865
|
+
}
|
|
866
|
+
}
|
|
867
|
+
}
|
|
868
|
+
},
|
|
869
|
+
cognify: {
|
|
870
|
+
description: "Trigger cognification pipeline",
|
|
871
|
+
params: {
|
|
872
|
+
"--dataset-id": { type: "string", required: false, description: "Optional dataset ID to cognify" },
|
|
873
|
+
"--dataset-name": { type: "string", required: false, description: "Optional dataset name to cognify" },
|
|
874
|
+
"--custom-prompt": { type: "string", required: false, description: "Override the default cognify prompt" },
|
|
875
|
+
"--custom-prompt-file": { type: "path", required: false, description: "Read custom prompt text from a file" },
|
|
876
|
+
"--background": { type: "bool", default: false, description: "Run cognify in background mode" },
|
|
877
|
+
"--chunks-per-batch": { type: "u32", required: false, description: "Override chunks per batch for the pipeline" }
|
|
878
|
+
}
|
|
879
|
+
},
|
|
880
|
+
search: {
|
|
881
|
+
description: "Search the knowledge base",
|
|
882
|
+
params: {
|
|
883
|
+
query: { type: "string", required: true, description: "Search query unless using the history subcommand" },
|
|
884
|
+
"--search-type": { type: "string", default: "INSIGHTS", description: "Search type" },
|
|
885
|
+
"--top-k": { type: "u32", default: 5, description: "Number of results" },
|
|
886
|
+
"--datasets": { type: "string[]", required: false, description: "Comma-separated dataset names to filter by" },
|
|
887
|
+
"--verbose": { type: "bool", default: false, description: "Include request metadata in the response" }
|
|
888
|
+
},
|
|
889
|
+
subcommands: {
|
|
890
|
+
history: { description: "Fetch search history" }
|
|
891
|
+
}
|
|
892
|
+
},
|
|
893
|
+
config: {
|
|
894
|
+
description: "Cognee settings management",
|
|
895
|
+
subcommands: {
|
|
896
|
+
get: { description: "Get current settings" },
|
|
897
|
+
set: {
|
|
898
|
+
description: "Update settings",
|
|
899
|
+
params: { settings: { type: "string", required: true, description: "Settings as JSON" } }
|
|
900
|
+
}
|
|
901
|
+
}
|
|
902
|
+
},
|
|
903
|
+
ontology: {
|
|
904
|
+
description: "Ontology management",
|
|
905
|
+
subcommands: {
|
|
906
|
+
upload: {
|
|
907
|
+
description: "Upload an ontology OWL file",
|
|
908
|
+
params: {
|
|
909
|
+
"--key": { type: "string", required: true, description: "Ontology key to store under" },
|
|
910
|
+
file: { type: "path", required: true, description: "Path to the OWL file" }
|
|
911
|
+
}
|
|
912
|
+
},
|
|
913
|
+
list: { description: "List uploaded ontologies" }
|
|
914
|
+
}
|
|
915
|
+
},
|
|
916
|
+
describe: {
|
|
917
|
+
description: "Self-describe available commands (JSON Schema)",
|
|
918
|
+
params: {
|
|
919
|
+
command: { type: "string", required: false, description: "Specific command to describe" }
|
|
920
|
+
}
|
|
921
|
+
}
|
|
922
|
+
};
|
|
923
|
+
function createDescribeCommand() {
|
|
924
|
+
return new Command5("describe").description("Self-describe available commands (JSON Schema)").argument("[command]", "Specific command to describe").action(function(commandArg) {
|
|
925
|
+
const globals = this.optsWithGlobals();
|
|
926
|
+
if (!commandArg) {
|
|
927
|
+
output(
|
|
928
|
+
success("describe", {
|
|
929
|
+
name: CLI_NAME,
|
|
930
|
+
version: CLI_VERSION,
|
|
931
|
+
description: CLI_DESCRIPTION,
|
|
932
|
+
commands: SCHEMAS
|
|
933
|
+
}),
|
|
934
|
+
globals.human
|
|
935
|
+
);
|
|
936
|
+
return;
|
|
937
|
+
}
|
|
938
|
+
const schema = SCHEMAS[commandArg];
|
|
939
|
+
if (!schema) {
|
|
940
|
+
output(
|
|
941
|
+
success("describe", {
|
|
942
|
+
error: `Unknown command: ${commandArg}`,
|
|
943
|
+
available: Object.keys(SCHEMAS)
|
|
944
|
+
}),
|
|
945
|
+
globals.human
|
|
946
|
+
);
|
|
947
|
+
return;
|
|
948
|
+
}
|
|
949
|
+
output(success("describe", { command: commandArg, schema }), globals.human);
|
|
950
|
+
});
|
|
951
|
+
}
|
|
952
|
+
|
|
953
|
+
// src/commands/health.ts
|
|
954
|
+
import { Command as Command6 } from "commander";
|
|
955
|
+
function createHealthCommand() {
|
|
956
|
+
return new Command6("health").description("Check Cognee API health").option("--detailed", "Include detailed system info").option("--watch", "Continuously poll until Ctrl+C").option("--interval <seconds>", "Polling interval in seconds for watch mode", "30").action(async function(options) {
|
|
957
|
+
const ctx = createContext(this);
|
|
958
|
+
const interval = Number.parseInt(options.interval ?? "30", 10);
|
|
959
|
+
if (!Number.isFinite(interval) || interval < 1) {
|
|
960
|
+
printError("health", configError("--interval must be at least 1 second"), ctx.human);
|
|
961
|
+
}
|
|
962
|
+
try {
|
|
963
|
+
if (!options.watch) {
|
|
964
|
+
const data = options.detailed ? await ctx.client.healthDetailed() : await ctx.client.health();
|
|
965
|
+
printSuccess("health", data, ctx);
|
|
966
|
+
return;
|
|
967
|
+
}
|
|
968
|
+
let stopped = false;
|
|
969
|
+
const onSigint = () => {
|
|
970
|
+
stopped = true;
|
|
971
|
+
};
|
|
972
|
+
process.once("SIGINT", onSigint);
|
|
973
|
+
while (!stopped) {
|
|
974
|
+
const data = options.detailed ? await ctx.client.healthDetailed() : await ctx.client.health();
|
|
975
|
+
printSuccess("health", data, ctx);
|
|
976
|
+
await sleep(interval * 1e3);
|
|
977
|
+
}
|
|
978
|
+
process.removeListener("SIGINT", onSigint);
|
|
979
|
+
printSuccess(
|
|
980
|
+
"health",
|
|
981
|
+
{ watch_stopped: true, interval_secs: interval },
|
|
982
|
+
ctx
|
|
983
|
+
);
|
|
984
|
+
} catch (error2) {
|
|
985
|
+
printError("health", error2, ctx.human);
|
|
986
|
+
}
|
|
987
|
+
});
|
|
988
|
+
}
|
|
989
|
+
function sleep(ms) {
|
|
990
|
+
return new Promise((resolve) => setTimeout(resolve, ms));
|
|
991
|
+
}
|
|
992
|
+
|
|
993
|
+
// src/commands/login.ts
|
|
994
|
+
import { Command as Command7 } from "commander";
|
|
995
|
+
function createLoginCommand() {
|
|
996
|
+
return new Command7("login").description("Authenticate with Cognee API").requiredOption("--username <username>", "Login username").requiredOption("--password <password>", "Login password").action(async function(options) {
|
|
997
|
+
const globals = getGlobalOptions(this);
|
|
998
|
+
const ctx = createContext(this, false);
|
|
999
|
+
try {
|
|
1000
|
+
const data = await ctx.client.login(options.username, options.password);
|
|
1001
|
+
const token = typeof data === "object" && data !== null && "access_token" in data ? data.access_token : void 0;
|
|
1002
|
+
if (typeof token !== "string" || token.length === 0) {
|
|
1003
|
+
throw configError("No access_token in login response");
|
|
1004
|
+
}
|
|
1005
|
+
const current = loadSharedAuthConfig();
|
|
1006
|
+
saveAuthConfig({
|
|
1007
|
+
...current,
|
|
1008
|
+
cognee_jwt: token,
|
|
1009
|
+
cognee_url: resolveCogneeUrl({ cogneeUrl: globals.cogneeUrl })
|
|
1010
|
+
});
|
|
1011
|
+
printSuccess("login", data, ctx);
|
|
1012
|
+
} catch (error2) {
|
|
1013
|
+
printError("login", error2, ctx.human);
|
|
1014
|
+
}
|
|
1015
|
+
});
|
|
1016
|
+
}
|
|
1017
|
+
|
|
1018
|
+
// src/commands/ontology.ts
|
|
1019
|
+
import { Command as Command8 } from "commander";
|
|
1020
|
+
function createOntologyCommand() {
|
|
1021
|
+
const command = new Command8("ontology").description("Ontology management");
|
|
1022
|
+
command.addCommand(
|
|
1023
|
+
new Command8("upload").description("Upload an ontology OWL file").requiredOption("--key <key>", "Ontology key to store under").argument("<file>", "Path to the OWL file").action(async function(file, options) {
|
|
1024
|
+
const ctx = createContext(this);
|
|
1025
|
+
try {
|
|
1026
|
+
printSuccess("ontology.upload", await ctx.client.uploadOntology(options.key, file), ctx);
|
|
1027
|
+
} catch (error2) {
|
|
1028
|
+
printError("ontology.upload", error2, ctx.human);
|
|
1029
|
+
}
|
|
1030
|
+
})
|
|
1031
|
+
);
|
|
1032
|
+
command.addCommand(
|
|
1033
|
+
new Command8("list").description("List uploaded ontologies").action(async function() {
|
|
1034
|
+
const ctx = createContext(this);
|
|
1035
|
+
try {
|
|
1036
|
+
printSuccess("ontology.list", await ctx.client.listOntologies(), ctx);
|
|
1037
|
+
} catch (error2) {
|
|
1038
|
+
printError("ontology.list", error2, ctx.human);
|
|
1039
|
+
}
|
|
1040
|
+
})
|
|
1041
|
+
);
|
|
1042
|
+
return command;
|
|
1043
|
+
}
|
|
1044
|
+
|
|
1045
|
+
// src/commands/search.ts
|
|
1046
|
+
import { Command as Command9 } from "commander";
|
|
1047
|
+
function createSearchCommand() {
|
|
1048
|
+
const command = new Command9("search").description("Search the knowledge base").argument("[query]", "Search query unless using the history subcommand").option("--search-type <type>", "Search type", "INSIGHTS").option("--top-k <count>", "Number of results", "5").option("--datasets <names>", "Comma-separated dataset names to filter by").option("--verbose", "Include request metadata in the response").action(
|
|
1049
|
+
async function(query, options) {
|
|
1050
|
+
const ctx = createContext(this);
|
|
1051
|
+
try {
|
|
1052
|
+
if (!query) {
|
|
1053
|
+
throw configError("query is required unless using `search history`");
|
|
1054
|
+
}
|
|
1055
|
+
const topK = Number.parseInt(options.topK, 10);
|
|
1056
|
+
if (!Number.isFinite(topK) || topK < 1) {
|
|
1057
|
+
throw configError("--top-k must be a positive integer");
|
|
1058
|
+
}
|
|
1059
|
+
const datasets = options.datasets ? options.datasets.split(",").map((value) => value.trim()).filter(Boolean) : [];
|
|
1060
|
+
const response = await ctx.client.search(query, {
|
|
1061
|
+
search_type: options.searchType,
|
|
1062
|
+
top_k: topK,
|
|
1063
|
+
datasets
|
|
1064
|
+
});
|
|
1065
|
+
const data = options.verbose ? {
|
|
1066
|
+
request: {
|
|
1067
|
+
query,
|
|
1068
|
+
search_type: options.searchType,
|
|
1069
|
+
top_k: topK,
|
|
1070
|
+
datasets
|
|
1071
|
+
},
|
|
1072
|
+
response
|
|
1073
|
+
} : response;
|
|
1074
|
+
printSuccess("search", data, ctx);
|
|
1075
|
+
} catch (error2) {
|
|
1076
|
+
printError("search", error2, ctx.human);
|
|
1077
|
+
}
|
|
1078
|
+
}
|
|
1079
|
+
);
|
|
1080
|
+
command.addCommand(
|
|
1081
|
+
new Command9("history").description("Fetch search history").action(async function() {
|
|
1082
|
+
const ctx = createContext(this);
|
|
1083
|
+
try {
|
|
1084
|
+
printSuccess("search.history", await ctx.client.searchHistory(), ctx);
|
|
1085
|
+
} catch (error2) {
|
|
1086
|
+
printError("search.history", error2, ctx.human);
|
|
1087
|
+
}
|
|
1088
|
+
})
|
|
1089
|
+
);
|
|
1090
|
+
return command;
|
|
1091
|
+
}
|
|
1092
|
+
|
|
1093
|
+
// src/index.ts
|
|
1094
|
+
var program = new Command10().name("cognee-admin").description("Cognee knowledge engine management CLI").version("0.2.0").option(
|
|
1095
|
+
"--cognee-url <url>",
|
|
1096
|
+
`Cognee API URL (default: $COGNEE_URL, auth config, or ${DEFAULT_COGNEE_URL})`
|
|
1097
|
+
).option("--cognee-jwt <token>", "Cognee JWT for API authentication").option("--human", "Human-readable output instead of JSON").option("--fields <fields>", "Comma-separated list of output fields");
|
|
1098
|
+
program.addCommand(createHealthCommand());
|
|
1099
|
+
program.addCommand(createLoginCommand());
|
|
1100
|
+
program.addCommand(createDatasetCommand());
|
|
1101
|
+
program.addCommand(createDataCommand());
|
|
1102
|
+
program.addCommand(createCognifyCommand());
|
|
1103
|
+
program.addCommand(createSearchCommand());
|
|
1104
|
+
program.addCommand(createConfigCommand());
|
|
1105
|
+
program.addCommand(createOntologyCommand());
|
|
1106
|
+
program.addCommand(createDescribeCommand());
|
|
1107
|
+
await program.parseAsync(process.argv);
|
package/package.json
ADDED
|
@@ -0,0 +1,38 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "@doufunao123/cognee-admin",
|
|
3
|
+
"version": "0.2.0",
|
|
4
|
+
"description": "Cognee knowledge engine CLI client — search, upload, cognify",
|
|
5
|
+
"type": "module",
|
|
6
|
+
"bin": {
|
|
7
|
+
"cognee-admin": "./dist/index.js"
|
|
8
|
+
},
|
|
9
|
+
"files": [
|
|
10
|
+
"dist",
|
|
11
|
+
"README.md"
|
|
12
|
+
],
|
|
13
|
+
"repository": {
|
|
14
|
+
"type": "git",
|
|
15
|
+
"url": "https://github.com/fran0220/agent-skills",
|
|
16
|
+
"directory": "cognee-admin/npm"
|
|
17
|
+
},
|
|
18
|
+
"author": "doufunao123",
|
|
19
|
+
"scripts": {
|
|
20
|
+
"build": "tsup src/index.ts --format esm --dts --clean",
|
|
21
|
+
"dev": "tsup src/index.ts --format esm --watch",
|
|
22
|
+
"test": "vitest run",
|
|
23
|
+
"lint": "tsc --noEmit",
|
|
24
|
+
"prepublishOnly": "npm run build"
|
|
25
|
+
},
|
|
26
|
+
"engines": {
|
|
27
|
+
"node": ">=20"
|
|
28
|
+
},
|
|
29
|
+
"dependencies": {
|
|
30
|
+
"commander": "^13.1.0"
|
|
31
|
+
},
|
|
32
|
+
"devDependencies": {
|
|
33
|
+
"@types/node": "^22.15.0",
|
|
34
|
+
"tsup": "^8.4.0",
|
|
35
|
+
"typescript": "^5.8.0",
|
|
36
|
+
"vitest": "^3.1.0"
|
|
37
|
+
}
|
|
38
|
+
}
|