0nmcp 2.0.0 → 2.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +111 -47
- package/cli.js +274 -0
- package/index.js +8 -1
- package/lib/stats.json +1 -1
- package/package.json +18 -2
- package/vault/deed-collector.js +286 -0
- package/vault/deed-importer.js +277 -0
- package/vault/deed.js +319 -0
- package/vault/tools-deed.js +257 -0
|
@@ -0,0 +1,286 @@
|
|
|
1
|
+
// ============================================================
|
|
2
|
+
// 0nMCP — Vault: Deed Credential Collector
|
|
3
|
+
// ============================================================
|
|
4
|
+
// Collects credentials from multiple sources (files, dirs,
|
|
5
|
+
// manual entry) and auto-detects services using the engine
|
|
6
|
+
// mapper. Feeds into BusinessDeed.create().
|
|
7
|
+
//
|
|
8
|
+
// Patent Pending: US Provisional Patent Application #63/990,046
|
|
9
|
+
// ============================================================
|
|
10
|
+
|
|
11
|
+
import { existsSync, readdirSync, readFileSync } from "fs";
|
|
12
|
+
import { join, extname } from "path";
|
|
13
|
+
import { homedir } from "os";
|
|
14
|
+
import { parseFile, parseEnvString, parseJsonString, parseCsvString } from "../engine/parser.js";
|
|
15
|
+
import { mapEnvVars, groupByService } from "../engine/mapper.js";
|
|
16
|
+
import { verifyCredentials } from "../engine/validator.js";
|
|
17
|
+
|
|
18
|
+
const CONNECTIONS_DIR = join(homedir(), ".0n", "connections");
|
|
19
|
+
|
|
20
|
+
// ── Collect from .0n connection files ───────────────────────
|
|
21
|
+
|
|
22
|
+
/**
|
|
23
|
+
* Load credentials from ~/.0n/connections/*.0n files.
|
|
24
|
+
*
|
|
25
|
+
* @param {string} [dir] - Connections directory (default: ~/.0n/connections/)
|
|
26
|
+
* @returns {{ credentials: Object, services: string[] }}
|
|
27
|
+
*/
|
|
28
|
+
export function collectFromConnections(dir = CONNECTIONS_DIR) {
|
|
29
|
+
const credentials = {};
|
|
30
|
+
const services = [];
|
|
31
|
+
|
|
32
|
+
if (!existsSync(dir)) return { credentials, services };
|
|
33
|
+
|
|
34
|
+
const files = readdirSync(dir).filter(f => f.endsWith(".0n"));
|
|
35
|
+
|
|
36
|
+
for (const file of files) {
|
|
37
|
+
try {
|
|
38
|
+
const content = readFileSync(join(dir, file), "utf-8");
|
|
39
|
+
const data = JSON.parse(content);
|
|
40
|
+
|
|
41
|
+
// .0n connection file format: { $0n: { type, service }, credentials: { ... } }
|
|
42
|
+
const service = data?.$0n?.service || file.replace(".0n", "");
|
|
43
|
+
if (data.credentials && typeof data.credentials === "object") {
|
|
44
|
+
credentials[service] = data.credentials;
|
|
45
|
+
services.push(service);
|
|
46
|
+
}
|
|
47
|
+
} catch {
|
|
48
|
+
// Skip invalid files
|
|
49
|
+
}
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
return { credentials, services };
|
|
53
|
+
}
|
|
54
|
+
|
|
55
|
+
// ── Collect from file (auto-detect format) ──────────────────
|
|
56
|
+
|
|
57
|
+
/**
|
|
58
|
+
* Collect credentials from a file (.env, .json, .csv).
|
|
59
|
+
*
|
|
60
|
+
* @param {string} filePath
|
|
61
|
+
* @returns {{ credentials: Object, envVars: Object, services: string[], unmapped: Array }}
|
|
62
|
+
*/
|
|
63
|
+
export function collectFromFile(filePath) {
|
|
64
|
+
const { entries } = parseFile(filePath);
|
|
65
|
+
return processEntries(entries);
|
|
66
|
+
}
|
|
67
|
+
|
|
68
|
+
// ── Collect from string content ─────────────────────────────
|
|
69
|
+
|
|
70
|
+
/**
|
|
71
|
+
* Collect from raw .env string content.
|
|
72
|
+
* @param {string} content
|
|
73
|
+
* @returns {{ credentials: Object, envVars: Object, services: string[], unmapped: Array }}
|
|
74
|
+
*/
|
|
75
|
+
export function collectFromEnvString(content) {
|
|
76
|
+
const entries = parseEnvString(content);
|
|
77
|
+
return processEntries(entries);
|
|
78
|
+
}
|
|
79
|
+
|
|
80
|
+
/**
|
|
81
|
+
* Collect from raw JSON string content.
|
|
82
|
+
* @param {string} content
|
|
83
|
+
* @returns {{ credentials: Object, envVars: Object, services: string[], unmapped: Array }}
|
|
84
|
+
*/
|
|
85
|
+
export function collectFromJsonString(content) {
|
|
86
|
+
const entries = parseJsonString(content);
|
|
87
|
+
return processEntries(entries);
|
|
88
|
+
}
|
|
89
|
+
|
|
90
|
+
/**
|
|
91
|
+
* Collect from raw CSV string content.
|
|
92
|
+
* @param {string} content
|
|
93
|
+
* @returns {{ credentials: Object, envVars: Object, services: string[], unmapped: Array }}
|
|
94
|
+
*/
|
|
95
|
+
export function collectFromCsvString(content) {
|
|
96
|
+
const entries = parseCsvString(content);
|
|
97
|
+
return processEntries(entries);
|
|
98
|
+
}
|
|
99
|
+
|
|
100
|
+
// ── Collect from manual key-value pairs ─────────────────────
|
|
101
|
+
|
|
102
|
+
/**
|
|
103
|
+
* Collect from manual key-value pairs.
|
|
104
|
+
*
|
|
105
|
+
* @param {Array<{ key: string, value: string }>} pairs
|
|
106
|
+
* @returns {{ credentials: Object, envVars: Object, services: string[], unmapped: Array }}
|
|
107
|
+
*/
|
|
108
|
+
export function collectFromManual(pairs) {
|
|
109
|
+
return processEntries(pairs);
|
|
110
|
+
}
|
|
111
|
+
|
|
112
|
+
// ── Collect from pre-structured service credentials ─────────
|
|
113
|
+
|
|
114
|
+
/**
|
|
115
|
+
* Collect from already-structured credentials object.
|
|
116
|
+
* Format: { stripe: { apiKey: "sk_..." }, github: { token: "ghp_..." } }
|
|
117
|
+
*
|
|
118
|
+
* @param {Object} structured
|
|
119
|
+
* @returns {{ credentials: Object, services: string[] }}
|
|
120
|
+
*/
|
|
121
|
+
export function collectFromStructured(structured) {
|
|
122
|
+
const services = Object.keys(structured);
|
|
123
|
+
return { credentials: structured, services };
|
|
124
|
+
}
|
|
125
|
+
|
|
126
|
+
// ── Process entries through mapper ──────────────────────────
|
|
127
|
+
|
|
128
|
+
/**
|
|
129
|
+
* Process raw key-value entries through the mapper pipeline.
|
|
130
|
+
*
|
|
131
|
+
* @param {Array<{ key: string, value: string }>} entries
|
|
132
|
+
* @returns {{ credentials: Object, envVars: Object, services: string[], unmapped: Array }}
|
|
133
|
+
*/
|
|
134
|
+
function processEntries(entries) {
|
|
135
|
+
const { mapped, unmapped } = mapEnvVars(entries);
|
|
136
|
+
const grouped = groupByService(mapped);
|
|
137
|
+
|
|
138
|
+
// Build credentials and envVars
|
|
139
|
+
const credentials = {};
|
|
140
|
+
const envVars = {};
|
|
141
|
+
const services = [];
|
|
142
|
+
|
|
143
|
+
for (const [service, group] of Object.entries(grouped)) {
|
|
144
|
+
credentials[service] = group.credentials;
|
|
145
|
+
services.push(service);
|
|
146
|
+
for (const envVar of group.envVars) {
|
|
147
|
+
const entry = entries.find(e => e.key === envVar);
|
|
148
|
+
if (entry) envVars[entry.key] = entry.value;
|
|
149
|
+
}
|
|
150
|
+
}
|
|
151
|
+
|
|
152
|
+
// Unmapped entries go to envVars
|
|
153
|
+
for (const entry of unmapped) {
|
|
154
|
+
envVars[entry.key] = entry.value;
|
|
155
|
+
}
|
|
156
|
+
|
|
157
|
+
return { credentials, envVars, services, unmapped };
|
|
158
|
+
}
|
|
159
|
+
|
|
160
|
+
// ── Validate credentials live ───────────────────────────────
|
|
161
|
+
|
|
162
|
+
/**
|
|
163
|
+
* Validate collected credentials against live API endpoints.
|
|
164
|
+
*
|
|
165
|
+
* @param {Object} credentials - { service: { field: value } }
|
|
166
|
+
* @returns {Promise<Object>} Validation results per service
|
|
167
|
+
*/
|
|
168
|
+
export async function validateCollected(credentials) {
|
|
169
|
+
const results = {};
|
|
170
|
+
const summary = { total: 0, valid: 0, invalid: 0, skipped: 0 };
|
|
171
|
+
|
|
172
|
+
const entries = Object.entries(credentials);
|
|
173
|
+
summary.total = entries.length;
|
|
174
|
+
|
|
175
|
+
// Validate in parallel batches of 5
|
|
176
|
+
const batchSize = 5;
|
|
177
|
+
for (let i = 0; i < entries.length; i += batchSize) {
|
|
178
|
+
const batch = entries.slice(i, i + batchSize);
|
|
179
|
+
const promises = batch.map(([service, creds]) =>
|
|
180
|
+
verifyCredentials(service, creds).then(r => ({ service, ...r }))
|
|
181
|
+
);
|
|
182
|
+
const batchResults = await Promise.allSettled(promises);
|
|
183
|
+
|
|
184
|
+
for (const result of batchResults) {
|
|
185
|
+
if (result.status === "fulfilled") {
|
|
186
|
+
const r = result.value;
|
|
187
|
+
results[r.service] = r;
|
|
188
|
+
if (r.skipped) summary.skipped++;
|
|
189
|
+
else if (r.valid) summary.valid++;
|
|
190
|
+
else summary.invalid++;
|
|
191
|
+
}
|
|
192
|
+
}
|
|
193
|
+
}
|
|
194
|
+
|
|
195
|
+
return { results, summary };
|
|
196
|
+
}
|
|
197
|
+
|
|
198
|
+
// ── Master collection function ──────────────────────────────
|
|
199
|
+
|
|
200
|
+
/**
|
|
201
|
+
* Collect credentials from multiple sources at once.
|
|
202
|
+
*
|
|
203
|
+
* @param {Object} sources
|
|
204
|
+
* @param {string} [sources.envFile] - Path to .env file
|
|
205
|
+
* @param {string} [sources.jsonFile] - Path to JSON file
|
|
206
|
+
* @param {string} [sources.csvFile] - Path to CSV file
|
|
207
|
+
* @param {string} [sources.connectionsDir] - Path to connections dir
|
|
208
|
+
* @param {Array} [sources.manual] - Manual key-value pairs
|
|
209
|
+
* @param {Object} [sources.structured] - Pre-structured credentials
|
|
210
|
+
* @param {boolean} [sources.validate] - Validate credentials live
|
|
211
|
+
* @returns {Promise<Object>}
|
|
212
|
+
*/
|
|
213
|
+
export async function collectCredentials(sources) {
|
|
214
|
+
const allCredentials = {};
|
|
215
|
+
const allEnvVars = {};
|
|
216
|
+
const allServices = new Set();
|
|
217
|
+
const allUnmapped = [];
|
|
218
|
+
|
|
219
|
+
// Collect from each source
|
|
220
|
+
if (sources.envFile && existsSync(sources.envFile)) {
|
|
221
|
+
const result = collectFromFile(sources.envFile);
|
|
222
|
+
mergeResults(allCredentials, allEnvVars, allServices, allUnmapped, result);
|
|
223
|
+
}
|
|
224
|
+
|
|
225
|
+
if (sources.jsonFile && existsSync(sources.jsonFile)) {
|
|
226
|
+
const result = collectFromFile(sources.jsonFile);
|
|
227
|
+
mergeResults(allCredentials, allEnvVars, allServices, allUnmapped, result);
|
|
228
|
+
}
|
|
229
|
+
|
|
230
|
+
if (sources.csvFile && existsSync(sources.csvFile)) {
|
|
231
|
+
const result = collectFromFile(sources.csvFile);
|
|
232
|
+
mergeResults(allCredentials, allEnvVars, allServices, allUnmapped, result);
|
|
233
|
+
}
|
|
234
|
+
|
|
235
|
+
if (sources.connectionsDir || existsSync(CONNECTIONS_DIR)) {
|
|
236
|
+
const dir = sources.connectionsDir || CONNECTIONS_DIR;
|
|
237
|
+
const result = collectFromConnections(dir);
|
|
238
|
+
for (const [svc, creds] of Object.entries(result.credentials)) {
|
|
239
|
+
allCredentials[svc] = { ...allCredentials[svc], ...creds };
|
|
240
|
+
allServices.add(svc);
|
|
241
|
+
}
|
|
242
|
+
}
|
|
243
|
+
|
|
244
|
+
if (sources.manual && Array.isArray(sources.manual)) {
|
|
245
|
+
const result = collectFromManual(sources.manual);
|
|
246
|
+
mergeResults(allCredentials, allEnvVars, allServices, allUnmapped, result);
|
|
247
|
+
}
|
|
248
|
+
|
|
249
|
+
if (sources.structured && typeof sources.structured === "object") {
|
|
250
|
+
const result = collectFromStructured(sources.structured);
|
|
251
|
+
for (const [svc, creds] of Object.entries(result.credentials)) {
|
|
252
|
+
allCredentials[svc] = { ...allCredentials[svc], ...creds };
|
|
253
|
+
allServices.add(svc);
|
|
254
|
+
}
|
|
255
|
+
}
|
|
256
|
+
|
|
257
|
+
const output = {
|
|
258
|
+
credentials: allCredentials,
|
|
259
|
+
envVars: allEnvVars,
|
|
260
|
+
services: Array.from(allServices),
|
|
261
|
+
unmapped: allUnmapped,
|
|
262
|
+
credentialCount: Object.values(allCredentials).reduce(
|
|
263
|
+
(sum, svc) => sum + Object.keys(svc).length, 0
|
|
264
|
+
),
|
|
265
|
+
};
|
|
266
|
+
|
|
267
|
+
// Optional live validation
|
|
268
|
+
if (sources.validate) {
|
|
269
|
+
output.validation = await validateCollected(allCredentials);
|
|
270
|
+
}
|
|
271
|
+
|
|
272
|
+
return output;
|
|
273
|
+
}
|
|
274
|
+
|
|
275
|
+
// ── Merge helper ────────────────────────────────────────────
|
|
276
|
+
|
|
277
|
+
function mergeResults(allCreds, allEnv, allServices, allUnmapped, result) {
|
|
278
|
+
for (const [svc, creds] of Object.entries(result.credentials || {})) {
|
|
279
|
+
allCreds[svc] = { ...allCreds[svc], ...creds };
|
|
280
|
+
allServices.add(svc);
|
|
281
|
+
}
|
|
282
|
+
for (const [k, v] of Object.entries(result.envVars || {})) {
|
|
283
|
+
allEnv[k] = v;
|
|
284
|
+
}
|
|
285
|
+
if (result.unmapped) allUnmapped.push(...result.unmapped);
|
|
286
|
+
}
|
|
@@ -0,0 +1,277 @@
|
|
|
1
|
+
// ============================================================
|
|
2
|
+
// 0nMCP — Vault: Deed Importer
|
|
3
|
+
// ============================================================
|
|
4
|
+
// Writes decrypted deed contents back to live system config.
|
|
5
|
+
// Generates .0n connection files, .env files, MCP platform
|
|
6
|
+
// configs, and restores workflows and AI brain data.
|
|
7
|
+
//
|
|
8
|
+
// Patent Pending: US Provisional Patent Application #63/990,046
|
|
9
|
+
// ============================================================
|
|
10
|
+
|
|
11
|
+
import { existsSync, mkdirSync, writeFileSync, readdirSync } from "fs";
|
|
12
|
+
import { join } from "path";
|
|
13
|
+
import { homedir } from "os";
|
|
14
|
+
|
|
15
|
+
const DOT_ON = join(homedir(), ".0n");
|
|
16
|
+
const CONNECTIONS_DIR = join(DOT_ON, "connections");
|
|
17
|
+
const WORKFLOWS_DIR = join(DOT_ON, "workflows");
|
|
18
|
+
const BRAIN_DIR = join(DOT_ON, "brain");
|
|
19
|
+
|
|
20
|
+
function ensureDir(dir) {
|
|
21
|
+
if (!existsSync(dir)) mkdirSync(dir, { recursive: true });
|
|
22
|
+
}
|
|
23
|
+
|
|
24
|
+
// ── Write .0n Connection Files ──────────────────────────────
|
|
25
|
+
|
|
26
|
+
/**
|
|
27
|
+
* Write service credentials as .0n connection files.
|
|
28
|
+
*
|
|
29
|
+
* @param {Object} credentials - { service: { field: value } }
|
|
30
|
+
* @param {string} dir - Target connections directory
|
|
31
|
+
* @returns {{ written: string[], skipped: string[] }}
|
|
32
|
+
*/
|
|
33
|
+
function writeConnectionFiles(credentials, dir) {
|
|
34
|
+
ensureDir(dir);
|
|
35
|
+
const written = [];
|
|
36
|
+
const skipped = [];
|
|
37
|
+
|
|
38
|
+
for (const [service, creds] of Object.entries(credentials)) {
|
|
39
|
+
try {
|
|
40
|
+
const connection = {
|
|
41
|
+
$0n: {
|
|
42
|
+
type: "connection",
|
|
43
|
+
version: "1.0.0",
|
|
44
|
+
service,
|
|
45
|
+
created: new Date().toISOString(),
|
|
46
|
+
source: "business_deed_import",
|
|
47
|
+
},
|
|
48
|
+
credentials: creds,
|
|
49
|
+
};
|
|
50
|
+
|
|
51
|
+
const filePath = join(dir, `${service}.0n`);
|
|
52
|
+
writeFileSync(filePath, JSON.stringify(connection, null, 2));
|
|
53
|
+
written.push(service);
|
|
54
|
+
} catch (err) {
|
|
55
|
+
skipped.push(`${service}: ${err.message}`);
|
|
56
|
+
}
|
|
57
|
+
}
|
|
58
|
+
|
|
59
|
+
return { written, skipped };
|
|
60
|
+
}
|
|
61
|
+
|
|
62
|
+
// ── Generate .env File ──────────────────────────────────────
|
|
63
|
+
|
|
64
|
+
/**
|
|
65
|
+
* Write environment variables to a .env file.
|
|
66
|
+
*
|
|
67
|
+
* @param {Object} envVars - { KEY: value }
|
|
68
|
+
* @param {string} filePath - Output .env path
|
|
69
|
+
* @returns {{ written: number, file: string }}
|
|
70
|
+
*/
|
|
71
|
+
function writeEnvFile(envVars, filePath) {
|
|
72
|
+
const lines = [
|
|
73
|
+
`# Generated by 0nMCP Business Deed Import`,
|
|
74
|
+
`# ${new Date().toISOString()}`,
|
|
75
|
+
"",
|
|
76
|
+
];
|
|
77
|
+
|
|
78
|
+
for (const [key, value] of Object.entries(envVars)) {
|
|
79
|
+
// Quote values that contain spaces or special chars
|
|
80
|
+
const needsQuotes = /[\s#"'\\]/.test(value);
|
|
81
|
+
lines.push(`${key}=${needsQuotes ? `"${value}"` : value}`);
|
|
82
|
+
}
|
|
83
|
+
|
|
84
|
+
writeFileSync(filePath, lines.join("\n") + "\n");
|
|
85
|
+
return { written: Object.keys(envVars).length, file: filePath };
|
|
86
|
+
}
|
|
87
|
+
|
|
88
|
+
// ── Write Workflow Files ────────────────────────────────────
|
|
89
|
+
|
|
90
|
+
/**
|
|
91
|
+
* Write workflow definitions to ~/.0n/workflows/.
|
|
92
|
+
*
|
|
93
|
+
* @param {Array|Object} workflows - Workflow definitions
|
|
94
|
+
* @param {string} dir - Target workflows directory
|
|
95
|
+
* @returns {{ written: string[], count: number }}
|
|
96
|
+
*/
|
|
97
|
+
function writeWorkflowFiles(workflows, dir) {
|
|
98
|
+
ensureDir(dir);
|
|
99
|
+
const written = [];
|
|
100
|
+
|
|
101
|
+
const items = Array.isArray(workflows) ? workflows : [workflows];
|
|
102
|
+
|
|
103
|
+
for (const workflow of items) {
|
|
104
|
+
try {
|
|
105
|
+
const name = workflow?.$0n?.name || workflow?.name || `workflow-${Date.now()}`;
|
|
106
|
+
const safeName = name.replace(/[^a-zA-Z0-9_-]/g, "_");
|
|
107
|
+
const filePath = join(dir, `${safeName}.0n`);
|
|
108
|
+
writeFileSync(filePath, JSON.stringify(workflow, null, 2));
|
|
109
|
+
written.push(safeName);
|
|
110
|
+
} catch {
|
|
111
|
+
// Skip invalid workflows
|
|
112
|
+
}
|
|
113
|
+
}
|
|
114
|
+
|
|
115
|
+
return { written, count: written.length };
|
|
116
|
+
}
|
|
117
|
+
|
|
118
|
+
// ── Write MCP Configs ───────────────────────────────────────
|
|
119
|
+
|
|
120
|
+
/**
|
|
121
|
+
* Write MCP platform configurations.
|
|
122
|
+
*
|
|
123
|
+
* @param {Object} mcpConfigs - MCP configuration data
|
|
124
|
+
* @param {string} targetDir - Target directory
|
|
125
|
+
* @returns {{ written: string[] }}
|
|
126
|
+
*/
|
|
127
|
+
function writeMcpConfigs(mcpConfigs, targetDir) {
|
|
128
|
+
const written = [];
|
|
129
|
+
|
|
130
|
+
if (mcpConfigs.platforms && typeof mcpConfigs.platforms === "object") {
|
|
131
|
+
for (const [platform, config] of Object.entries(mcpConfigs.platforms)) {
|
|
132
|
+
try {
|
|
133
|
+
const filePath = join(targetDir, `mcp-${platform}.json`);
|
|
134
|
+
writeFileSync(filePath, JSON.stringify(config, null, 2));
|
|
135
|
+
written.push(platform);
|
|
136
|
+
} catch {
|
|
137
|
+
// Skip
|
|
138
|
+
}
|
|
139
|
+
}
|
|
140
|
+
}
|
|
141
|
+
|
|
142
|
+
// Write raw config if present
|
|
143
|
+
if (mcpConfigs.servers) {
|
|
144
|
+
try {
|
|
145
|
+
const filePath = join(targetDir, "mcp-config.json");
|
|
146
|
+
writeFileSync(filePath, JSON.stringify(mcpConfigs, null, 2));
|
|
147
|
+
written.push("mcp-config");
|
|
148
|
+
} catch {
|
|
149
|
+
// Skip
|
|
150
|
+
}
|
|
151
|
+
}
|
|
152
|
+
|
|
153
|
+
return { written };
|
|
154
|
+
}
|
|
155
|
+
|
|
156
|
+
// ── Write AI Brain Data ─────────────────────────────────────
|
|
157
|
+
|
|
158
|
+
/**
|
|
159
|
+
* Write AI brain data to ~/.0n/brain/.
|
|
160
|
+
*
|
|
161
|
+
* @param {Object} aiBrain - AI brain data
|
|
162
|
+
* @param {string} dir - Target brain directory
|
|
163
|
+
* @returns {{ written: string[] }}
|
|
164
|
+
*/
|
|
165
|
+
function writeBrainData(aiBrain, dir) {
|
|
166
|
+
ensureDir(dir);
|
|
167
|
+
const written = [];
|
|
168
|
+
|
|
169
|
+
// Write each brain section as a separate file
|
|
170
|
+
for (const [section, data] of Object.entries(aiBrain)) {
|
|
171
|
+
try {
|
|
172
|
+
const filePath = join(dir, `${section}.json`);
|
|
173
|
+
writeFileSync(filePath, JSON.stringify(data, null, 2));
|
|
174
|
+
written.push(section);
|
|
175
|
+
} catch {
|
|
176
|
+
// Skip
|
|
177
|
+
}
|
|
178
|
+
}
|
|
179
|
+
|
|
180
|
+
return { written };
|
|
181
|
+
}
|
|
182
|
+
|
|
183
|
+
// ── Master Import Function ──────────────────────────────────
|
|
184
|
+
|
|
185
|
+
/**
|
|
186
|
+
* Import all deed layers into the live system.
|
|
187
|
+
*
|
|
188
|
+
* @param {Object} layers - Decrypted layer data
|
|
189
|
+
* @param {string} [targetDir] - Base target directory (default: ~/.0n/)
|
|
190
|
+
* @returns {Object} Import report
|
|
191
|
+
*/
|
|
192
|
+
export function importDeedToSystem(layers, targetDir = null) {
|
|
193
|
+
const baseDir = targetDir || DOT_ON;
|
|
194
|
+
const connectionsDir = join(baseDir, "connections");
|
|
195
|
+
const workflowsDir = join(baseDir, "workflows");
|
|
196
|
+
const brainDir = join(baseDir, "brain");
|
|
197
|
+
|
|
198
|
+
ensureDir(baseDir);
|
|
199
|
+
|
|
200
|
+
const report = {
|
|
201
|
+
success: true,
|
|
202
|
+
timestamp: new Date().toISOString(),
|
|
203
|
+
connections: { written: [], skipped: [] },
|
|
204
|
+
envFile: null,
|
|
205
|
+
workflows: { written: [], count: 0 },
|
|
206
|
+
mcpConfigs: { written: [] },
|
|
207
|
+
brain: { written: [] },
|
|
208
|
+
siteProfiles: null,
|
|
209
|
+
deed: null,
|
|
210
|
+
errors: [],
|
|
211
|
+
};
|
|
212
|
+
|
|
213
|
+
// 1. Import credentials as .0n connection files
|
|
214
|
+
if (layers.credentials && typeof layers.credentials === "object" && !layers.credentials.error) {
|
|
215
|
+
try {
|
|
216
|
+
report.connections = writeConnectionFiles(layers.credentials, connectionsDir);
|
|
217
|
+
} catch (err) {
|
|
218
|
+
report.errors.push(`credentials: ${err.message}`);
|
|
219
|
+
}
|
|
220
|
+
}
|
|
221
|
+
|
|
222
|
+
// 2. Generate .env file from env_vars
|
|
223
|
+
if (layers.env_vars && typeof layers.env_vars === "object" && !layers.env_vars.error) {
|
|
224
|
+
try {
|
|
225
|
+
const envPath = join(baseDir, ".env.deed");
|
|
226
|
+
report.envFile = writeEnvFile(layers.env_vars, envPath);
|
|
227
|
+
} catch (err) {
|
|
228
|
+
report.errors.push(`env_vars: ${err.message}`);
|
|
229
|
+
}
|
|
230
|
+
}
|
|
231
|
+
|
|
232
|
+
// 3. Write workflows
|
|
233
|
+
if (layers.workflows && !layers.workflows.error) {
|
|
234
|
+
try {
|
|
235
|
+
report.workflows = writeWorkflowFiles(layers.workflows, workflowsDir);
|
|
236
|
+
} catch (err) {
|
|
237
|
+
report.errors.push(`workflows: ${err.message}`);
|
|
238
|
+
}
|
|
239
|
+
}
|
|
240
|
+
|
|
241
|
+
// 4. Write MCP configs
|
|
242
|
+
if (layers.mcp_configs && typeof layers.mcp_configs === "object" && !layers.mcp_configs.error) {
|
|
243
|
+
try {
|
|
244
|
+
report.mcpConfigs = writeMcpConfigs(layers.mcp_configs, baseDir);
|
|
245
|
+
} catch (err) {
|
|
246
|
+
report.errors.push(`mcp_configs: ${err.message}`);
|
|
247
|
+
}
|
|
248
|
+
}
|
|
249
|
+
|
|
250
|
+
// 5. Write site profiles
|
|
251
|
+
if (layers.site_profiles && typeof layers.site_profiles === "object" && !layers.site_profiles.error) {
|
|
252
|
+
try {
|
|
253
|
+
const profilePath = join(baseDir, "site-profiles.json");
|
|
254
|
+
writeFileSync(profilePath, JSON.stringify(layers.site_profiles, null, 2));
|
|
255
|
+
report.siteProfiles = { file: profilePath };
|
|
256
|
+
} catch (err) {
|
|
257
|
+
report.errors.push(`site_profiles: ${err.message}`);
|
|
258
|
+
}
|
|
259
|
+
}
|
|
260
|
+
|
|
261
|
+
// 6. Write AI brain data
|
|
262
|
+
if (layers.ai_brain && typeof layers.ai_brain === "object" && !layers.ai_brain.error) {
|
|
263
|
+
try {
|
|
264
|
+
report.brain = writeBrainData(layers.ai_brain, brainDir);
|
|
265
|
+
} catch (err) {
|
|
266
|
+
report.errors.push(`ai_brain: ${err.message}`);
|
|
267
|
+
}
|
|
268
|
+
}
|
|
269
|
+
|
|
270
|
+
// 7. Extract deed metadata from audit_trail
|
|
271
|
+
if (layers.audit_trail?.deed) {
|
|
272
|
+
report.deed = layers.audit_trail.deed;
|
|
273
|
+
}
|
|
274
|
+
|
|
275
|
+
report.success = report.errors.length === 0;
|
|
276
|
+
return report;
|
|
277
|
+
}
|