@naisys/common-node 3.0.0-beta.10
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/agentConfigLoader.js +80 -0
- package/dist/bearerToken.js +9 -0
- package/dist/customModelsLoader.js +39 -0
- package/dist/expandEnv.js +7 -0
- package/dist/hashToken.js +5 -0
- package/dist/hubCertVerification.js +20 -0
- package/dist/index.js +9 -0
- package/dist/logFileService.js +58 -0
- package/dist/migrationHelper.js +121 -0
- package/dist/sessionCookie.js +10 -0
- package/package.json +34 -0
|
@@ -0,0 +1,80 @@
|
|
|
1
|
+
import { adminAgentConfig, AgentConfigFileSchema } from "@naisys/common";
|
|
2
|
+
import * as fs from "fs";
|
|
3
|
+
import yaml from "js-yaml";
|
|
4
|
+
import * as path from "path";
|
|
5
|
+
/** Loads agent yaml configs from a file or directory path, returns a map of userId → UserEntry */
|
|
6
|
+
export function loadAgentConfigs(startupPath) {
|
|
7
|
+
const configEntries = [];
|
|
8
|
+
const usernameToPath = new Map();
|
|
9
|
+
const resolvedPath = path.resolve(startupPath);
|
|
10
|
+
if (fs.statSync(resolvedPath).isDirectory()) {
|
|
11
|
+
processDirectory(resolvedPath, undefined, configEntries, usernameToPath);
|
|
12
|
+
}
|
|
13
|
+
else {
|
|
14
|
+
processFile(resolvedPath, undefined, configEntries, usernameToPath);
|
|
15
|
+
}
|
|
16
|
+
// Add admin if not present
|
|
17
|
+
const hasAdmin = configEntries.some((e) => e.username === adminAgentConfig.username);
|
|
18
|
+
if (!hasAdmin) {
|
|
19
|
+
configEntries.push({
|
|
20
|
+
username: adminAgentConfig.username,
|
|
21
|
+
leadEntryIndex: undefined,
|
|
22
|
+
config: adminAgentConfig,
|
|
23
|
+
});
|
|
24
|
+
}
|
|
25
|
+
// Build userId map (1-based sequential IDs)
|
|
26
|
+
const userMap = new Map();
|
|
27
|
+
for (let i = 0; i < configEntries.length; i++) {
|
|
28
|
+
const entry = configEntries[i];
|
|
29
|
+
const userId = i + 1;
|
|
30
|
+
const leadUserId = entry.leadEntryIndex !== undefined ? entry.leadEntryIndex + 1 : undefined;
|
|
31
|
+
userMap.set(userId, {
|
|
32
|
+
userId,
|
|
33
|
+
username: entry.username,
|
|
34
|
+
enabled: true,
|
|
35
|
+
leadUserId,
|
|
36
|
+
config: entry.config,
|
|
37
|
+
});
|
|
38
|
+
}
|
|
39
|
+
return userMap;
|
|
40
|
+
}
|
|
41
|
+
function processDirectory(dirPath, leadEntryIndex, configEntries, usernameToPath) {
|
|
42
|
+
const files = fs.readdirSync(dirPath);
|
|
43
|
+
for (const file of files) {
|
|
44
|
+
if (file.endsWith(".yaml") || file.endsWith(".yml")) {
|
|
45
|
+
processFile(path.join(dirPath, file), leadEntryIndex, configEntries, usernameToPath);
|
|
46
|
+
}
|
|
47
|
+
}
|
|
48
|
+
}
|
|
49
|
+
function processFile(filePath, leadEntryIndex, configEntries, usernameToPath) {
|
|
50
|
+
const absolutePath = path.resolve(filePath);
|
|
51
|
+
try {
|
|
52
|
+
const configYaml = fs.readFileSync(absolutePath, "utf8");
|
|
53
|
+
const configObj = yaml.load(configYaml);
|
|
54
|
+
const agentConfig = AgentConfigFileSchema.parse(configObj);
|
|
55
|
+
const username = agentConfig.username;
|
|
56
|
+
// Check for duplicate usernames from different files
|
|
57
|
+
const existingPath = usernameToPath.get(username);
|
|
58
|
+
if (existingPath && existingPath !== absolutePath) {
|
|
59
|
+
throw new Error(`Duplicate username "${username}" found in multiple files:\n ${existingPath}\n ${absolutePath}`);
|
|
60
|
+
}
|
|
61
|
+
usernameToPath.set(username, absolutePath);
|
|
62
|
+
const currentIndex = configEntries.length;
|
|
63
|
+
configEntries.push({
|
|
64
|
+
username,
|
|
65
|
+
leadEntryIndex,
|
|
66
|
+
config: agentConfig,
|
|
67
|
+
});
|
|
68
|
+
console.log(`Loaded user: ${username} from ${filePath}`);
|
|
69
|
+
// Check for a subdirectory matching the filename (without extension)
|
|
70
|
+
const ext = path.extname(absolutePath);
|
|
71
|
+
const baseName = path.basename(absolutePath, ext);
|
|
72
|
+
const subDir = path.join(path.dirname(absolutePath), baseName);
|
|
73
|
+
if (fs.existsSync(subDir) && fs.statSync(subDir).isDirectory()) {
|
|
74
|
+
processDirectory(subDir, currentIndex, configEntries, usernameToPath);
|
|
75
|
+
}
|
|
76
|
+
}
|
|
77
|
+
catch (e) {
|
|
78
|
+
throw new Error(`Failed to process agent config at ${filePath}: ${e}`);
|
|
79
|
+
}
|
|
80
|
+
}
|
|
@@ -0,0 +1,9 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Extract the API key from an Authorization: Bearer header value.
|
|
3
|
+
* Returns undefined if the header is missing or not in Bearer format.
|
|
4
|
+
*/
|
|
5
|
+
export function extractBearerToken(authHeader) {
|
|
6
|
+
if (!authHeader?.startsWith("Bearer "))
|
|
7
|
+
return undefined;
|
|
8
|
+
return authHeader.slice(7);
|
|
9
|
+
}
|
|
@@ -0,0 +1,39 @@
|
|
|
1
|
+
import { CustomModelsFileSchema } from "@naisys/common";
|
|
2
|
+
import fs from "fs";
|
|
3
|
+
import yaml from "js-yaml";
|
|
4
|
+
import path from "path";
|
|
5
|
+
export function loadCustomModels(folder) {
|
|
6
|
+
if (!folder) {
|
|
7
|
+
return { llmModels: [], imageModels: [] };
|
|
8
|
+
}
|
|
9
|
+
const filePath = path.join(folder, "custom-models.yaml");
|
|
10
|
+
if (!fs.existsSync(filePath)) {
|
|
11
|
+
return { llmModels: [], imageModels: [] };
|
|
12
|
+
}
|
|
13
|
+
const raw = fs.readFileSync(filePath, "utf-8");
|
|
14
|
+
const parsed = yaml.load(raw);
|
|
15
|
+
const result = CustomModelsFileSchema.parse(parsed);
|
|
16
|
+
return {
|
|
17
|
+
llmModels: result.llmModels ?? [],
|
|
18
|
+
imageModels: result.imageModels ?? [],
|
|
19
|
+
};
|
|
20
|
+
}
|
|
21
|
+
export function saveCustomModels(data) {
|
|
22
|
+
const folder = process.env.NAISYS_FOLDER;
|
|
23
|
+
if (!folder) {
|
|
24
|
+
throw new Error("NAISYS_FOLDER environment variable is not set");
|
|
25
|
+
}
|
|
26
|
+
// Validate before writing
|
|
27
|
+
CustomModelsFileSchema.parse(data);
|
|
28
|
+
// Omit empty arrays from output
|
|
29
|
+
const output = {};
|
|
30
|
+
if (data.llmModels && data.llmModels.length > 0) {
|
|
31
|
+
output.llmModels = data.llmModels;
|
|
32
|
+
}
|
|
33
|
+
if (data.imageModels && data.imageModels.length > 0) {
|
|
34
|
+
output.imageModels = data.imageModels;
|
|
35
|
+
}
|
|
36
|
+
const filePath = path.join(folder, "custom-models.yaml");
|
|
37
|
+
const yamlStr = yaml.dump(output, { lineWidth: -1 });
|
|
38
|
+
fs.writeFileSync(filePath, yamlStr, "utf-8");
|
|
39
|
+
}
|
|
@@ -0,0 +1,7 @@
|
|
|
1
|
+
import os from "os";
|
|
2
|
+
/** Expand ~ to the user's home directory in NAISYS_FOLDER */
|
|
3
|
+
export function expandNaisysFolder() {
|
|
4
|
+
if (process.env.NAISYS_FOLDER?.startsWith("~")) {
|
|
5
|
+
process.env.NAISYS_FOLDER = process.env.NAISYS_FOLDER.replace("~", os.homedir());
|
|
6
|
+
}
|
|
7
|
+
}
|
|
@@ -0,0 +1,20 @@
|
|
|
1
|
+
import { existsSync, readFileSync } from "fs";
|
|
2
|
+
import { join } from "path";
|
|
3
|
+
/**
|
|
4
|
+
* Read the hub access key from the local cert file at NAISYS_FOLDER/cert/hub-access-key.
|
|
5
|
+
* Returns undefined if the file does not exist.
|
|
6
|
+
*/
|
|
7
|
+
export function readHubAccessKeyFile() {
|
|
8
|
+
const naisysFolder = process.env.NAISYS_FOLDER || "";
|
|
9
|
+
const accessKeyPath = join(naisysFolder, "cert", "hub-access-key");
|
|
10
|
+
if (!existsSync(accessKeyPath))
|
|
11
|
+
return undefined;
|
|
12
|
+
return readFileSync(accessKeyPath, "utf-8").trim();
|
|
13
|
+
}
|
|
14
|
+
/**
|
|
15
|
+
* Resolve the hub access key from environment variable or local cert file.
|
|
16
|
+
* Returns undefined if neither is available.
|
|
17
|
+
*/
|
|
18
|
+
export function resolveHubAccessKey() {
|
|
19
|
+
return process.env.HUB_ACCESS_KEY || readHubAccessKeyFile();
|
|
20
|
+
}
|
package/dist/index.js
ADDED
|
@@ -0,0 +1,9 @@
|
|
|
1
|
+
export * from "./agentConfigLoader.js";
|
|
2
|
+
export * from "./bearerToken.js";
|
|
3
|
+
export * from "./customModelsLoader.js";
|
|
4
|
+
export * from "./expandEnv.js";
|
|
5
|
+
export * from "./hashToken.js";
|
|
6
|
+
export * from "./hubCertVerification.js";
|
|
7
|
+
export * from "./logFileService.js";
|
|
8
|
+
export * from "./migrationHelper.js";
|
|
9
|
+
export * from "./sessionCookie.js";
|
|
@@ -0,0 +1,58 @@
|
|
|
1
|
+
import fsp from "node:fs/promises";
|
|
2
|
+
const MAX_READ_BYTES = 256 * 1024;
|
|
3
|
+
export async function tailLogFile(filePath, lineCount, minLevel) {
|
|
4
|
+
let stat;
|
|
5
|
+
try {
|
|
6
|
+
stat = await fsp.stat(filePath);
|
|
7
|
+
}
|
|
8
|
+
catch {
|
|
9
|
+
return { entries: [], fileSize: 0 };
|
|
10
|
+
}
|
|
11
|
+
const fileSize = stat.size;
|
|
12
|
+
if (fileSize === 0) {
|
|
13
|
+
return { entries: [], fileSize: 0 };
|
|
14
|
+
}
|
|
15
|
+
const readSize = Math.min(fileSize, MAX_READ_BYTES);
|
|
16
|
+
const position = fileSize - readSize;
|
|
17
|
+
const buffer = Buffer.alloc(readSize);
|
|
18
|
+
const handle = await fsp.open(filePath, "r");
|
|
19
|
+
try {
|
|
20
|
+
await handle.read(buffer, 0, readSize, position);
|
|
21
|
+
}
|
|
22
|
+
finally {
|
|
23
|
+
await handle.close();
|
|
24
|
+
}
|
|
25
|
+
const text = buffer.toString("utf-8");
|
|
26
|
+
const lines = text.split("\n").filter((line) => line.trim().length > 0);
|
|
27
|
+
// If we didn't read from the start, drop the first line (likely partial)
|
|
28
|
+
if (position > 0 && lines.length > 0) {
|
|
29
|
+
lines.shift();
|
|
30
|
+
}
|
|
31
|
+
const OMIT_KEYS = new Set(["level", "time", "msg", "pid", "hostname"]);
|
|
32
|
+
const entries = [];
|
|
33
|
+
for (const line of lines) {
|
|
34
|
+
try {
|
|
35
|
+
const parsed = JSON.parse(line);
|
|
36
|
+
const level = parsed.level ?? 30;
|
|
37
|
+
if (minLevel != null && level < minLevel)
|
|
38
|
+
continue;
|
|
39
|
+
const extra = {};
|
|
40
|
+
for (const key of Object.keys(parsed)) {
|
|
41
|
+
if (!OMIT_KEYS.has(key)) {
|
|
42
|
+
extra[key] = parsed[key];
|
|
43
|
+
}
|
|
44
|
+
}
|
|
45
|
+
const detail = Object.keys(extra).length > 0 ? JSON.stringify(extra) : undefined;
|
|
46
|
+
entries.push({
|
|
47
|
+
level,
|
|
48
|
+
time: parsed.time ?? 0,
|
|
49
|
+
msg: parsed.msg ?? "",
|
|
50
|
+
detail,
|
|
51
|
+
});
|
|
52
|
+
}
|
|
53
|
+
catch {
|
|
54
|
+
// skip malformed lines
|
|
55
|
+
}
|
|
56
|
+
}
|
|
57
|
+
return { entries: entries.slice(-lineCount), fileSize };
|
|
58
|
+
}
|
|
@@ -0,0 +1,121 @@
|
|
|
1
|
+
import Database from "better-sqlite3";
|
|
2
|
+
import { exec } from "child_process";
|
|
3
|
+
import { existsSync, mkdirSync, unlinkSync } from "fs";
|
|
4
|
+
import { dirname, join, resolve } from "path";
|
|
5
|
+
import { promisify } from "util";
|
|
6
|
+
const execAsync = promisify(exec);
|
|
7
|
+
/**
|
|
8
|
+
* Shared helper that runs `prisma migrate deploy` with a version-checked fast path.
|
|
9
|
+
* Uses `better-sqlite3` directly (synchronous, no Prisma dependency) for the version check.
|
|
10
|
+
*/
|
|
11
|
+
export async function deployPrismaMigrations(options) {
|
|
12
|
+
const { packageDir, databasePath, expectedVersion, envOverrides } = options;
|
|
13
|
+
// Ensure database directory exists
|
|
14
|
+
const databaseDir = dirname(databasePath);
|
|
15
|
+
if (!existsSync(databaseDir)) {
|
|
16
|
+
mkdirSync(databaseDir, { recursive: true });
|
|
17
|
+
}
|
|
18
|
+
let currentVersion;
|
|
19
|
+
// Check version if database file already exists
|
|
20
|
+
if (existsSync(databasePath)) {
|
|
21
|
+
const db = new Database(databasePath);
|
|
22
|
+
try {
|
|
23
|
+
const row = db
|
|
24
|
+
.prepare("SELECT version FROM schema_version WHERE id = 1")
|
|
25
|
+
.get();
|
|
26
|
+
currentVersion = row?.version;
|
|
27
|
+
}
|
|
28
|
+
catch {
|
|
29
|
+
// "no such table" → treat as new DB, proceed with migration
|
|
30
|
+
}
|
|
31
|
+
// Switch from WAL to DELETE journal mode before closing. This merges any
|
|
32
|
+
// pending WAL data and removes the -wal/-shm files entirely. Without this,
|
|
33
|
+
// prisma migrate (a separate process) sees the leftover SHM file and fails
|
|
34
|
+
// with "database is locked".
|
|
35
|
+
try {
|
|
36
|
+
db.pragma("journal_mode=DELETE");
|
|
37
|
+
}
|
|
38
|
+
catch {
|
|
39
|
+
// Failed — another process may genuinely hold the lock
|
|
40
|
+
}
|
|
41
|
+
db.close();
|
|
42
|
+
if (currentVersion === expectedVersion) {
|
|
43
|
+
return; // Fast path — already at expected version
|
|
44
|
+
}
|
|
45
|
+
}
|
|
46
|
+
// Log migration status
|
|
47
|
+
if (currentVersion !== undefined) {
|
|
48
|
+
if (currentVersion > expectedVersion) {
|
|
49
|
+
throw new Error(`Database version ${currentVersion} is newer than expected ${expectedVersion}. Manual intervention required.`);
|
|
50
|
+
}
|
|
51
|
+
console.log(`Migrating database from version ${currentVersion} to ${expectedVersion}...`);
|
|
52
|
+
}
|
|
53
|
+
else {
|
|
54
|
+
console.log(`Creating new database with schema version ${expectedVersion}...`);
|
|
55
|
+
}
|
|
56
|
+
// Run prisma migrate deploy
|
|
57
|
+
const schemaPath = join(packageDir, "prisma", "schema.prisma");
|
|
58
|
+
const absoluteDbPath = resolve(databasePath).replace(/\\/g, "/");
|
|
59
|
+
let stdout;
|
|
60
|
+
let stderr;
|
|
61
|
+
try {
|
|
62
|
+
({ stdout, stderr } = await execAsync(`npx prisma migrate deploy --schema="${schemaPath}"`, {
|
|
63
|
+
cwd: packageDir,
|
|
64
|
+
env: {
|
|
65
|
+
...process.env,
|
|
66
|
+
// Resolve to absolute so prisma.config.ts gets a correct path
|
|
67
|
+
// regardless of this subprocess's cwd (which is packageDir)
|
|
68
|
+
NAISYS_FOLDER: resolve(process.env.NAISYS_FOLDER || ""),
|
|
69
|
+
...envOverrides,
|
|
70
|
+
},
|
|
71
|
+
}));
|
|
72
|
+
}
|
|
73
|
+
catch (error) {
|
|
74
|
+
const msg = error instanceof Error ? error.message : String(error);
|
|
75
|
+
if (msg.includes("database is locked")) {
|
|
76
|
+
// Stale WAL/SHM files from a crashed process — remove and retry
|
|
77
|
+
const walPath = absoluteDbPath + "-wal";
|
|
78
|
+
const shmPath = absoluteDbPath + "-shm";
|
|
79
|
+
let removed = false;
|
|
80
|
+
for (const staleFile of [walPath, shmPath]) {
|
|
81
|
+
if (existsSync(staleFile)) {
|
|
82
|
+
console.log(`Removing stale file: ${staleFile}`);
|
|
83
|
+
unlinkSync(staleFile);
|
|
84
|
+
removed = true;
|
|
85
|
+
}
|
|
86
|
+
}
|
|
87
|
+
if (removed) {
|
|
88
|
+
console.log("Retrying migration after removing stale WAL files...");
|
|
89
|
+
({ stdout, stderr } = await execAsync(`npx prisma migrate deploy --schema="${schemaPath}"`, {
|
|
90
|
+
cwd: packageDir,
|
|
91
|
+
env: {
|
|
92
|
+
...process.env,
|
|
93
|
+
NAISYS_FOLDER: resolve(process.env.NAISYS_FOLDER || ""),
|
|
94
|
+
...envOverrides,
|
|
95
|
+
},
|
|
96
|
+
}));
|
|
97
|
+
}
|
|
98
|
+
else {
|
|
99
|
+
throw new Error(`Database is locked: ${absoluteDbPath}\n` +
|
|
100
|
+
`Another process may be using the database.`);
|
|
101
|
+
}
|
|
102
|
+
}
|
|
103
|
+
else {
|
|
104
|
+
throw error;
|
|
105
|
+
}
|
|
106
|
+
}
|
|
107
|
+
if (stdout)
|
|
108
|
+
console.log(stdout);
|
|
109
|
+
if (stderr && !stderr.includes("Loaded Prisma config")) {
|
|
110
|
+
console.error(stderr);
|
|
111
|
+
}
|
|
112
|
+
// Upsert schema_version row via raw SQL
|
|
113
|
+
const db = new Database(absoluteDbPath);
|
|
114
|
+
try {
|
|
115
|
+
db.prepare("INSERT OR REPLACE INTO schema_version (id, version, updated) VALUES (1, ?, ?)").run(expectedVersion, new Date().toISOString());
|
|
116
|
+
}
|
|
117
|
+
finally {
|
|
118
|
+
db.close();
|
|
119
|
+
}
|
|
120
|
+
console.log("Database migration completed.");
|
|
121
|
+
}
|
|
@@ -0,0 +1,10 @@
|
|
|
1
|
+
export const SESSION_COOKIE_NAME = "naisys_session";
|
|
2
|
+
export function sessionCookieOptions(expiresAt) {
|
|
3
|
+
return {
|
|
4
|
+
path: "/",
|
|
5
|
+
httpOnly: true,
|
|
6
|
+
sameSite: "lax",
|
|
7
|
+
secure: process.env.NODE_ENV === "production",
|
|
8
|
+
maxAge: Math.floor((expiresAt.getTime() - Date.now()) / 1000),
|
|
9
|
+
};
|
|
10
|
+
}
|
package/package.json
ADDED
|
@@ -0,0 +1,34 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "@naisys/common-node",
|
|
3
|
+
"version": "3.0.0-beta.10",
|
|
4
|
+
"type": "module",
|
|
5
|
+
"description": "[internal] Node-only utilities for NAISYS",
|
|
6
|
+
"files": [
|
|
7
|
+
"dist",
|
|
8
|
+
"!dist/**/*.map",
|
|
9
|
+
"!dist/**/*.d.ts",
|
|
10
|
+
"!dist/**/*.d.ts.map"
|
|
11
|
+
],
|
|
12
|
+
"scripts": {
|
|
13
|
+
"clean": "rimraf dist",
|
|
14
|
+
"build": "tsc",
|
|
15
|
+
"npm:publish:dryrun": "npm publish --dry-run",
|
|
16
|
+
"npm:publish": "npm publish --access public"
|
|
17
|
+
},
|
|
18
|
+
"dependencies": {
|
|
19
|
+
"@naisys/common": "3.0.0-beta.10",
|
|
20
|
+
"better-sqlite3": "^12.6.2",
|
|
21
|
+
"js-yaml": "^4.1.1"
|
|
22
|
+
},
|
|
23
|
+
"devDependencies": {
|
|
24
|
+
"@types/better-sqlite3": "^7.6.13",
|
|
25
|
+
"@types/js-yaml": "^4.0.9",
|
|
26
|
+
"typescript": "^5.9.3"
|
|
27
|
+
},
|
|
28
|
+
"exports": {
|
|
29
|
+
".": {
|
|
30
|
+
"types": "./dist/index.d.ts",
|
|
31
|
+
"default": "./dist/index.js"
|
|
32
|
+
}
|
|
33
|
+
}
|
|
34
|
+
}
|