0nmcp 1.6.0 → 2.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +24 -23
- package/cli.js +667 -1
- package/command-runner.js +224 -0
- package/commands.js +115 -0
- package/connections.js +3 -1
- package/engine/app-builder.js +318 -0
- package/engine/app-server.js +471 -0
- package/engine/application.js +205 -0
- package/engine/bundler.js +13 -0
- package/engine/index.js +281 -3
- package/engine/operations.js +227 -0
- package/engine/scheduler.js +270 -0
- package/index.js +8 -1
- package/lib/badges.json +1 -1
- package/lib/stats.json +4 -3
- package/package.json +45 -6
- package/server.js +2 -2
- package/vault/container.js +479 -0
- package/vault/crypto-container.js +278 -0
- package/vault/escrow.js +227 -0
- package/vault/layers.js +254 -0
- package/vault/registry.js +159 -0
- package/vault/seal.js +74 -0
- package/vault/tools-container.js +356 -0
- package/workflow.js +36 -4
|
@@ -0,0 +1,224 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* ═══════════════════════════════════════════════════════════════════════════
|
|
3
|
+
* 0nMCP — Command Runner
|
|
4
|
+
* ═══════════════════════════════════════════════════════════════════════════
|
|
5
|
+
*
|
|
6
|
+
* Executes named RUNs (command aliases) defined in the SWITCH file.
|
|
7
|
+
*
|
|
8
|
+
* Pipeline commands run built-in actions sequentially:
|
|
9
|
+
* verify, platforms, list, serve, connect
|
|
10
|
+
*
|
|
11
|
+
* Workflow commands run .0n workflow files via the WorkflowRunner.
|
|
12
|
+
*
|
|
13
|
+
* ═══════════════════════════════════════════════════════════════════════════
|
|
14
|
+
*/
|
|
15
|
+
|
|
16
|
+
import path from 'path';
|
|
17
|
+
import os from 'os';
|
|
18
|
+
import fs from 'fs';
|
|
19
|
+
import { resolveInputs, parseCommandArgs } from './commands.js';
|
|
20
|
+
|
|
21
|
+
const DOT_ON_DIR = path.join(os.homedir(), '.0n');
|
|
22
|
+
|
|
23
|
+
// Colors
|
|
24
|
+
const c = {
|
|
25
|
+
reset: '\x1b[0m',
|
|
26
|
+
bright: '\x1b[1m',
|
|
27
|
+
red: '\x1b[31m',
|
|
28
|
+
green: '\x1b[32m',
|
|
29
|
+
yellow: '\x1b[33m',
|
|
30
|
+
blue: '\x1b[34m',
|
|
31
|
+
cyan: '\x1b[36m',
|
|
32
|
+
};
|
|
33
|
+
|
|
34
|
+
/**
|
|
35
|
+
* Run a command from the SWITCH file
|
|
36
|
+
* @param {string} alias - The command alias
|
|
37
|
+
* @param {object} def - The command definition
|
|
38
|
+
* @param {string[]} rawArgs - CLI arguments after the alias
|
|
39
|
+
*/
|
|
40
|
+
export async function runCommand(alias, def, rawArgs = []) {
|
|
41
|
+
const type = def.type || 'workflow';
|
|
42
|
+
const { args, flags } = parseCommandArgs(rawArgs);
|
|
43
|
+
|
|
44
|
+
console.log(`${c.bright}Running: ${c.cyan}${def.name || alias}${c.reset}`);
|
|
45
|
+
if (def.description) console.log(` ${def.description}\n`);
|
|
46
|
+
|
|
47
|
+
const start = Date.now();
|
|
48
|
+
|
|
49
|
+
try {
|
|
50
|
+
switch (type) {
|
|
51
|
+
case 'pipeline':
|
|
52
|
+
await runPipeline(def, args, flags);
|
|
53
|
+
break;
|
|
54
|
+
case 'workflow':
|
|
55
|
+
await runWorkflow(def, args, flags);
|
|
56
|
+
break;
|
|
57
|
+
default:
|
|
58
|
+
console.log(`${c.red}Unknown command type: ${type}${c.reset}`);
|
|
59
|
+
process.exit(1);
|
|
60
|
+
}
|
|
61
|
+
|
|
62
|
+
const elapsed = Date.now() - start;
|
|
63
|
+
console.log(`\n${c.green}${c.bright}Done${c.reset} ${c.green}(${elapsed}ms)${c.reset}`);
|
|
64
|
+
} catch (err) {
|
|
65
|
+
console.log(`\n${c.red}${c.bright}Failed:${c.reset} ${c.red}${err.message}${c.reset}`);
|
|
66
|
+
process.exit(1);
|
|
67
|
+
}
|
|
68
|
+
}
|
|
69
|
+
|
|
70
|
+
/**
|
|
71
|
+
* Execute a pipeline command — sequential built-in actions
|
|
72
|
+
*/
|
|
73
|
+
async function runPipeline(def, args, flags) {
|
|
74
|
+
const steps = def.steps || [];
|
|
75
|
+
|
|
76
|
+
if (steps.length === 0) {
|
|
77
|
+
console.log(`${c.yellow}No steps defined in this command.${c.reset}`);
|
|
78
|
+
return;
|
|
79
|
+
}
|
|
80
|
+
|
|
81
|
+
for (let i = 0; i < steps.length; i++) {
|
|
82
|
+
const step = steps[i];
|
|
83
|
+
const action = step.action;
|
|
84
|
+
const stepNum = `[${i + 1}/${steps.length}]`;
|
|
85
|
+
|
|
86
|
+
console.log(`${c.cyan}${stepNum}${c.reset} ${step.description || action}...`);
|
|
87
|
+
|
|
88
|
+
await executePipelineAction(action, step, args, flags);
|
|
89
|
+
|
|
90
|
+
console.log(`${c.green} ✓${c.reset} ${action} complete\n`);
|
|
91
|
+
}
|
|
92
|
+
}
|
|
93
|
+
|
|
94
|
+
/**
|
|
95
|
+
* Execute a single pipeline action
|
|
96
|
+
*/
|
|
97
|
+
async function executePipelineAction(action, step, args, flags) {
|
|
98
|
+
switch (action) {
|
|
99
|
+
case 'verify': {
|
|
100
|
+
const { verifyAll } = await import('./engine/validator.js');
|
|
101
|
+
const connectionsDir = path.join(DOT_ON_DIR, 'connections');
|
|
102
|
+
|
|
103
|
+
if (!fs.existsSync(connectionsDir)) {
|
|
104
|
+
console.log(` ${c.yellow}No connections found.${c.reset}`);
|
|
105
|
+
return;
|
|
106
|
+
}
|
|
107
|
+
|
|
108
|
+
const files = fs.readdirSync(connectionsDir).filter(f => f.endsWith('.0n'));
|
|
109
|
+
const connections = {};
|
|
110
|
+
for (const file of files) {
|
|
111
|
+
try {
|
|
112
|
+
const data = JSON.parse(fs.readFileSync(path.join(connectionsDir, file), 'utf8'));
|
|
113
|
+
if (data.$0n?.sealed) continue;
|
|
114
|
+
connections[data.service] = { credentials: data.auth?.credentials || {} };
|
|
115
|
+
} catch { /* skip */ }
|
|
116
|
+
}
|
|
117
|
+
|
|
118
|
+
if (Object.keys(connections).length === 0) {
|
|
119
|
+
console.log(` ${c.yellow}No unsealed connections to verify.${c.reset}`);
|
|
120
|
+
return;
|
|
121
|
+
}
|
|
122
|
+
|
|
123
|
+
const { results, summary } = await verifyAll(connections);
|
|
124
|
+
for (const [service, result] of Object.entries(results)) {
|
|
125
|
+
const icon = result.valid ? `${c.green} ✓` : `${c.red} ✗`;
|
|
126
|
+
const latency = result.latency_ms ? ` (${result.latency_ms}ms)` : '';
|
|
127
|
+
console.log(`${icon}${c.reset} ${service}${latency}${result.error ? ` — ${result.error}` : ''}`);
|
|
128
|
+
}
|
|
129
|
+
console.log(` ${summary.valid}/${summary.total} valid`);
|
|
130
|
+
break;
|
|
131
|
+
}
|
|
132
|
+
|
|
133
|
+
case 'platforms': {
|
|
134
|
+
const { getPlatformInfo } = await import('./engine/platforms.js');
|
|
135
|
+
const info = getPlatformInfo();
|
|
136
|
+
for (const p of info) {
|
|
137
|
+
const status = p.installed ? `${c.green}installed${c.reset}` : `${c.blue}available${c.reset}`;
|
|
138
|
+
console.log(` ${p.installed ? '●' : '○'} ${p.name} (${status})`);
|
|
139
|
+
}
|
|
140
|
+
break;
|
|
141
|
+
}
|
|
142
|
+
|
|
143
|
+
case 'list': {
|
|
144
|
+
const connectionsDir = path.join(DOT_ON_DIR, 'connections');
|
|
145
|
+
if (!fs.existsSync(connectionsDir)) {
|
|
146
|
+
console.log(` ${c.yellow}No connections found.${c.reset}`);
|
|
147
|
+
return;
|
|
148
|
+
}
|
|
149
|
+
|
|
150
|
+
const files = fs.readdirSync(connectionsDir).filter(f => f.endsWith('.0n'));
|
|
151
|
+
for (const file of files) {
|
|
152
|
+
try {
|
|
153
|
+
const data = JSON.parse(fs.readFileSync(path.join(connectionsDir, file), 'utf8'));
|
|
154
|
+
console.log(` ${c.green}●${c.reset} ${data.$0n?.name || data.service} (${file})`);
|
|
155
|
+
} catch {
|
|
156
|
+
console.log(` ${c.red}●${c.reset} ${file} (error)`);
|
|
157
|
+
}
|
|
158
|
+
}
|
|
159
|
+
break;
|
|
160
|
+
}
|
|
161
|
+
|
|
162
|
+
case 'serve': {
|
|
163
|
+
const port = step.port || flags.port || 3001;
|
|
164
|
+
const host = step.host || flags.host || '0.0.0.0';
|
|
165
|
+
console.log(` Starting server on ${host}:${port}...`);
|
|
166
|
+
const { startServer } = await import('./server.js');
|
|
167
|
+
await startServer({ port: Number(port), host: String(host) });
|
|
168
|
+
break;
|
|
169
|
+
}
|
|
170
|
+
|
|
171
|
+
default:
|
|
172
|
+
console.log(` ${c.yellow}Unknown pipeline action: ${action}${c.reset}`);
|
|
173
|
+
}
|
|
174
|
+
}
|
|
175
|
+
|
|
176
|
+
/**
|
|
177
|
+
* Execute a workflow command — runs a .0n workflow file
|
|
178
|
+
*/
|
|
179
|
+
async function runWorkflow(def, args, flags) {
|
|
180
|
+
const workflowName = def.workflow;
|
|
181
|
+
if (!workflowName) {
|
|
182
|
+
console.log(`${c.red}No workflow specified in command definition.${c.reset}`);
|
|
183
|
+
process.exit(1);
|
|
184
|
+
}
|
|
185
|
+
|
|
186
|
+
// Resolve inputs from CLI args/flags
|
|
187
|
+
const inputs = def.inputs ? resolveInputs(def.inputs, args, flags) : {};
|
|
188
|
+
|
|
189
|
+
const { ConnectionManager } = await import('./connections.js');
|
|
190
|
+
const { WorkflowRunner } = await import('./workflow.js');
|
|
191
|
+
|
|
192
|
+
const connections = new ConnectionManager();
|
|
193
|
+
const runner = new WorkflowRunner(connections);
|
|
194
|
+
|
|
195
|
+
if (Object.keys(inputs).length > 0) {
|
|
196
|
+
console.log(`${c.bright}Inputs:${c.reset}`, JSON.stringify(inputs, null, 2));
|
|
197
|
+
console.log('');
|
|
198
|
+
}
|
|
199
|
+
|
|
200
|
+
const result = await runner.run({ workflowPath: workflowName, inputs });
|
|
201
|
+
|
|
202
|
+
if (result.success) {
|
|
203
|
+
console.log(`${c.green}${c.bright}Workflow completed${c.reset}`);
|
|
204
|
+
} else {
|
|
205
|
+
console.log(`${c.red}${c.bright}Workflow failed${c.reset}`);
|
|
206
|
+
}
|
|
207
|
+
|
|
208
|
+
console.log(` Steps: ${result.stepsSuccessful}/${result.stepsExecuted} successful`);
|
|
209
|
+
console.log(` Duration: ${result.duration}ms`);
|
|
210
|
+
|
|
211
|
+
if (result.outputs && Object.keys(result.outputs).length > 0) {
|
|
212
|
+
console.log(`\n${c.bright}Outputs:${c.reset}`);
|
|
213
|
+
console.log(JSON.stringify(result.outputs, null, 2));
|
|
214
|
+
}
|
|
215
|
+
|
|
216
|
+
if (result.errors?.length > 0) {
|
|
217
|
+
console.log(`\n${c.red}Errors:${c.reset}`);
|
|
218
|
+
for (const err of result.errors) {
|
|
219
|
+
console.log(` ${c.red}●${c.reset} ${err.service}.${err.action}: ${err.error}`);
|
|
220
|
+
}
|
|
221
|
+
}
|
|
222
|
+
|
|
223
|
+
if (!result.success) process.exit(1);
|
|
224
|
+
}
|
package/commands.js
ADDED
|
@@ -0,0 +1,115 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* ═══════════════════════════════════════════════════════════════════════════
|
|
3
|
+
* 0nMCP — Named Runs / Command Aliases
|
|
4
|
+
* ═══════════════════════════════════════════════════════════════════════════
|
|
5
|
+
*
|
|
6
|
+
* Loads command aliases from the SWITCH file (~/.0n/0n-setup.0n) and makes
|
|
7
|
+
* them available as CLI commands: `0nmcp launch`, `0nmcp hello`, etc.
|
|
8
|
+
*
|
|
9
|
+
* Command types:
|
|
10
|
+
* - pipeline: Sequential built-in actions (verify, platforms, list, serve)
|
|
11
|
+
* - workflow: Run a named .0n workflow from ~/.0n/workflows/
|
|
12
|
+
*
|
|
13
|
+
* ═══════════════════════════════════════════════════════════════════════════
|
|
14
|
+
*/
|
|
15
|
+
|
|
16
|
+
import { readFileSync, existsSync } from 'fs';
|
|
17
|
+
import { join } from 'path';
|
|
18
|
+
import { homedir } from 'os';
|
|
19
|
+
|
|
20
|
+
const DOT_ON_DIR = join(homedir(), '.0n');
|
|
21
|
+
const DEFAULT_SWITCH = join(DOT_ON_DIR, '0n-setup.0n');
|
|
22
|
+
|
|
23
|
+
/**
|
|
24
|
+
* Load commands from a SWITCH file
|
|
25
|
+
* @param {string} [switchPath] - Path to SWITCH file. Defaults to ~/.0n/0n-setup.0n
|
|
26
|
+
* @returns {Map<string, object>} Map of alias → command definition
|
|
27
|
+
*/
|
|
28
|
+
export function loadCommands(switchPath) {
|
|
29
|
+
const filePath = switchPath || DEFAULT_SWITCH;
|
|
30
|
+
if (!existsSync(filePath)) return new Map();
|
|
31
|
+
|
|
32
|
+
try {
|
|
33
|
+
const raw = readFileSync(filePath, 'utf8');
|
|
34
|
+
const data = JSON.parse(raw);
|
|
35
|
+
if (!data.commands || typeof data.commands !== 'object') return new Map();
|
|
36
|
+
return new Map(Object.entries(data.commands));
|
|
37
|
+
} catch {
|
|
38
|
+
return new Map();
|
|
39
|
+
}
|
|
40
|
+
}
|
|
41
|
+
|
|
42
|
+
/**
|
|
43
|
+
* List all available commands with metadata
|
|
44
|
+
* @param {string} [switchPath]
|
|
45
|
+
* @returns {Array<{alias: string, name: string, description: string, type: string}>}
|
|
46
|
+
*/
|
|
47
|
+
export function listCommands(switchPath) {
|
|
48
|
+
const commands = loadCommands(switchPath);
|
|
49
|
+
return [...commands.entries()].map(([alias, def]) => ({
|
|
50
|
+
alias,
|
|
51
|
+
name: def.name || alias,
|
|
52
|
+
description: def.description || '',
|
|
53
|
+
type: def.type || 'workflow',
|
|
54
|
+
}));
|
|
55
|
+
}
|
|
56
|
+
|
|
57
|
+
/**
|
|
58
|
+
* Parse CLI arguments into positional args and named flags
|
|
59
|
+
* @param {string[]} rawArgs - Arguments after the command name
|
|
60
|
+
* @returns {{ args: string[], flags: Record<string, string> }}
|
|
61
|
+
*/
|
|
62
|
+
export function parseCommandArgs(rawArgs) {
|
|
63
|
+
const args = [];
|
|
64
|
+
const flags = {};
|
|
65
|
+
|
|
66
|
+
for (let i = 0; i < rawArgs.length; i++) {
|
|
67
|
+
if (rawArgs[i].startsWith('--') && rawArgs[i + 1] && !rawArgs[i + 1].startsWith('--')) {
|
|
68
|
+
flags[rawArgs[i].slice(2)] = rawArgs[i + 1];
|
|
69
|
+
i++;
|
|
70
|
+
} else if (rawArgs[i].startsWith('--')) {
|
|
71
|
+
flags[rawArgs[i].slice(2)] = 'true';
|
|
72
|
+
} else {
|
|
73
|
+
args.push(rawArgs[i]);
|
|
74
|
+
}
|
|
75
|
+
}
|
|
76
|
+
|
|
77
|
+
return { args, flags };
|
|
78
|
+
}
|
|
79
|
+
|
|
80
|
+
/**
|
|
81
|
+
* Resolve template expressions in command inputs
|
|
82
|
+
* Supports {{args.N}}, {{flags.key}}, and literal values
|
|
83
|
+
* @param {Record<string, string>} inputs - Input template map
|
|
84
|
+
* @param {string[]} args - Positional CLI args
|
|
85
|
+
* @param {Record<string, string>} flags - Named CLI flags
|
|
86
|
+
* @returns {Record<string, unknown>}
|
|
87
|
+
*/
|
|
88
|
+
export function resolveInputs(inputs, args, flags) {
|
|
89
|
+
const resolved = {};
|
|
90
|
+
|
|
91
|
+
for (const [key, template] of Object.entries(inputs)) {
|
|
92
|
+
if (typeof template !== 'string') {
|
|
93
|
+
resolved[key] = template;
|
|
94
|
+
continue;
|
|
95
|
+
}
|
|
96
|
+
|
|
97
|
+
const argsMatch = template.match(/^\{\{args\.(\d+)\}\}$/);
|
|
98
|
+
if (argsMatch) {
|
|
99
|
+
const idx = parseInt(argsMatch[1]);
|
|
100
|
+
resolved[key] = args[idx] ?? template;
|
|
101
|
+
continue;
|
|
102
|
+
}
|
|
103
|
+
|
|
104
|
+
const flagsMatch = template.match(/^\{\{flags\.(\w+)\}\}$/);
|
|
105
|
+
if (flagsMatch) {
|
|
106
|
+
resolved[key] = flags[flagsMatch[1]] ?? template;
|
|
107
|
+
continue;
|
|
108
|
+
}
|
|
109
|
+
|
|
110
|
+
// Pass through literal values
|
|
111
|
+
resolved[key] = template;
|
|
112
|
+
}
|
|
113
|
+
|
|
114
|
+
return resolved;
|
|
115
|
+
}
|
package/connections.js
CHANGED
|
@@ -18,6 +18,7 @@ const SNAPSHOTS_DIR = join(DOT_ON, "snapshots");
|
|
|
18
18
|
const HISTORY_DIR = join(DOT_ON, "history");
|
|
19
19
|
const CACHE_DIR = join(DOT_ON, "cache");
|
|
20
20
|
const PLUGINS_DIR = join(DOT_ON, "plugins");
|
|
21
|
+
const APPS_DIR = join(DOT_ON, "apps");
|
|
21
22
|
const CONFIG_FILE = join(DOT_ON, "config.json");
|
|
22
23
|
|
|
23
24
|
// Legacy path for migration
|
|
@@ -28,7 +29,7 @@ const LEGACY_FILE = join(LEGACY_DIR, "connections.json");
|
|
|
28
29
|
* Initialize the ~/.0n/ directory structure.
|
|
29
30
|
*/
|
|
30
31
|
export function initDotOn() {
|
|
31
|
-
const dirs = [DOT_ON, CONNECTIONS_DIR, WORKFLOWS_DIR, SNAPSHOTS_DIR, HISTORY_DIR, CACHE_DIR, PLUGINS_DIR];
|
|
32
|
+
const dirs = [DOT_ON, CONNECTIONS_DIR, WORKFLOWS_DIR, SNAPSHOTS_DIR, HISTORY_DIR, CACHE_DIR, PLUGINS_DIR, APPS_DIR];
|
|
32
33
|
for (const dir of dirs) {
|
|
33
34
|
if (!existsSync(dir)) {
|
|
34
35
|
mkdirSync(dir, { recursive: true });
|
|
@@ -331,3 +332,4 @@ export const CONNECTIONS_PATH = CONNECTIONS_DIR;
|
|
|
331
332
|
export const HISTORY_PATH = HISTORY_DIR;
|
|
332
333
|
export const WORKFLOWS_PATH = WORKFLOWS_DIR;
|
|
333
334
|
export const SNAPSHOTS_PATH = SNAPSHOTS_DIR;
|
|
335
|
+
export const APPS_PATH = APPS_DIR;
|
|
@@ -0,0 +1,318 @@
|
|
|
1
|
+
// ============================================================
|
|
2
|
+
// 0nMCP -Engine: Application Builder
|
|
3
|
+
// ============================================================
|
|
4
|
+
// Creates, opens, inspects, and validates .0n application
|
|
5
|
+
// bundles ($0n.type: "application"). Analogous to bundler.js
|
|
6
|
+
// but for the full Application Engine format.
|
|
7
|
+
//
|
|
8
|
+
// Patent Pending: US Provisional Patent Application #63/968,814
|
|
9
|
+
// ============================================================
|
|
10
|
+
|
|
11
|
+
import { readFileSync, writeFileSync, existsSync, mkdirSync } from "fs";
|
|
12
|
+
import { join, basename } from "path";
|
|
13
|
+
import { createHash } from "crypto";
|
|
14
|
+
import { homedir } from "os";
|
|
15
|
+
import { sealPortable, unsealPortable } from "./cipher-portable.js";
|
|
16
|
+
import { validateOperations } from "./operations.js";
|
|
17
|
+
|
|
18
|
+
const APPS_DIR = join(homedir(), ".0n", "apps");
|
|
19
|
+
|
|
20
|
+
/**
|
|
21
|
+
* Compute SHA-256 checksum of a string.
|
|
22
|
+
*/
|
|
23
|
+
function sha256(data) {
|
|
24
|
+
return createHash("sha256").update(data).digest("hex");
|
|
25
|
+
}
|
|
26
|
+
|
|
27
|
+
/**
|
|
28
|
+
* Create a .0n application bundle.
|
|
29
|
+
*
|
|
30
|
+
* @param {object} options
|
|
31
|
+
* @param {string} options.name -Application name
|
|
32
|
+
* @param {string} options.passphrase -Encryption passphrase
|
|
33
|
+
* @param {Record<string, object>} [options.connections] -{ service: { credentials, authType?, ... } }
|
|
34
|
+
* @param {Record<string, object>} [options.operations] -Operation definitions
|
|
35
|
+
* @param {Record<string, object>} [options.workflows] -Workflow definitions
|
|
36
|
+
* @param {Record<string, object>} [options.endpoints] -Endpoint definitions
|
|
37
|
+
* @param {Record<string, object>} [options.automations] -Automation definitions
|
|
38
|
+
* @param {object} [options.environment] -{ variables, secrets, settings, feature_flags }
|
|
39
|
+
* @param {string} [options.output] -Output file path
|
|
40
|
+
* @param {string} [options.description]
|
|
41
|
+
* @param {string} [options.author]
|
|
42
|
+
* @param {string} [options.version]
|
|
43
|
+
* @returns {{ bundle: object, path: string, manifest: object }}
|
|
44
|
+
*/
|
|
45
|
+
export function createApplication(options) {
|
|
46
|
+
const {
|
|
47
|
+
name = "0n Application",
|
|
48
|
+
passphrase,
|
|
49
|
+
connections = {},
|
|
50
|
+
operations = {},
|
|
51
|
+
workflows = {},
|
|
52
|
+
endpoints = {},
|
|
53
|
+
automations = {},
|
|
54
|
+
environment = {},
|
|
55
|
+
output,
|
|
56
|
+
description = "",
|
|
57
|
+
author = "",
|
|
58
|
+
version = "1.0.0",
|
|
59
|
+
} = options;
|
|
60
|
+
|
|
61
|
+
if (!passphrase) {
|
|
62
|
+
throw new Error("Passphrase is required to create an application bundle.");
|
|
63
|
+
}
|
|
64
|
+
|
|
65
|
+
const now = new Date().toISOString();
|
|
66
|
+
|
|
67
|
+
// Seal connections
|
|
68
|
+
const bundleConnections = [];
|
|
69
|
+
for (const [service, conn] of Object.entries(connections)) {
|
|
70
|
+
const credJson = JSON.stringify(conn.credentials || conn);
|
|
71
|
+
const { sealed } = sealPortable(credJson, passphrase);
|
|
72
|
+
|
|
73
|
+
bundleConnections.push({
|
|
74
|
+
service,
|
|
75
|
+
name: conn.name || service,
|
|
76
|
+
environment: conn.environment || "production",
|
|
77
|
+
auth_type: conn.authType || conn.auth_type || "api_key",
|
|
78
|
+
credential_keys: Object.keys(conn.credentials || conn),
|
|
79
|
+
sealed: true,
|
|
80
|
+
vault: {
|
|
81
|
+
data: sealed,
|
|
82
|
+
algorithm: "aes-256-gcm",
|
|
83
|
+
kdf: "pbkdf2-sha512-100k",
|
|
84
|
+
portable: true,
|
|
85
|
+
},
|
|
86
|
+
});
|
|
87
|
+
}
|
|
88
|
+
|
|
89
|
+
// Seal environment secrets
|
|
90
|
+
const bundleEnvironment = {
|
|
91
|
+
variables: environment.variables || {},
|
|
92
|
+
secrets: {},
|
|
93
|
+
settings: environment.settings || {},
|
|
94
|
+
feature_flags: environment.feature_flags || {},
|
|
95
|
+
};
|
|
96
|
+
|
|
97
|
+
if (environment.secrets) {
|
|
98
|
+
for (const [key, val] of Object.entries(environment.secrets)) {
|
|
99
|
+
const { sealed } = sealPortable(String(val), passphrase);
|
|
100
|
+
bundleEnvironment.secrets[key] = sealed;
|
|
101
|
+
}
|
|
102
|
+
}
|
|
103
|
+
|
|
104
|
+
// Build manifest
|
|
105
|
+
const manifest = {
|
|
106
|
+
bundle_version: "1.0.0",
|
|
107
|
+
generator: "0nmcp-engine/1.7.0",
|
|
108
|
+
type: "application",
|
|
109
|
+
connection_count: bundleConnections.length,
|
|
110
|
+
workflow_count: Object.keys(workflows).length,
|
|
111
|
+
operation_count: Object.keys(operations).length,
|
|
112
|
+
endpoint_count: Object.keys(endpoints).length,
|
|
113
|
+
automation_count: Object.keys(automations).length,
|
|
114
|
+
services: bundleConnections.map(c => c.service),
|
|
115
|
+
encryption: {
|
|
116
|
+
method: "portable",
|
|
117
|
+
algorithm: "aes-256-gcm",
|
|
118
|
+
kdf: "pbkdf2-sha512-100k",
|
|
119
|
+
},
|
|
120
|
+
};
|
|
121
|
+
|
|
122
|
+
// Assemble bundle
|
|
123
|
+
const bundle = {
|
|
124
|
+
$0n: {
|
|
125
|
+
type: "application",
|
|
126
|
+
version,
|
|
127
|
+
name,
|
|
128
|
+
description,
|
|
129
|
+
author,
|
|
130
|
+
created: now,
|
|
131
|
+
updated: now,
|
|
132
|
+
},
|
|
133
|
+
connections: bundleConnections,
|
|
134
|
+
environment: bundleEnvironment,
|
|
135
|
+
operations,
|
|
136
|
+
workflows,
|
|
137
|
+
endpoints,
|
|
138
|
+
automations,
|
|
139
|
+
platforms: {},
|
|
140
|
+
includes: [],
|
|
141
|
+
manifest,
|
|
142
|
+
};
|
|
143
|
+
|
|
144
|
+
// Compute checksums
|
|
145
|
+
manifest.checksums = {
|
|
146
|
+
connections: `sha256:${sha256(JSON.stringify(bundleConnections))}`,
|
|
147
|
+
operations: `sha256:${sha256(JSON.stringify(operations))}`,
|
|
148
|
+
workflows: `sha256:${sha256(JSON.stringify(workflows))}`,
|
|
149
|
+
endpoints: `sha256:${sha256(JSON.stringify(endpoints))}`,
|
|
150
|
+
automations: `sha256:${sha256(JSON.stringify(automations))}`,
|
|
151
|
+
};
|
|
152
|
+
|
|
153
|
+
// Write to file
|
|
154
|
+
if (!existsSync(APPS_DIR)) mkdirSync(APPS_DIR, { recursive: true });
|
|
155
|
+
const ts = now.replace(/[:.]/g, "-").slice(0, 19);
|
|
156
|
+
const safeName = name.toLowerCase().replace(/[^a-z0-9]+/g, "-").replace(/-+$/, "");
|
|
157
|
+
const outPath = output || join(APPS_DIR, `${safeName}-${ts}.0n`);
|
|
158
|
+
const outDir = join(outPath, "..");
|
|
159
|
+
if (!existsSync(outDir)) mkdirSync(outDir, { recursive: true });
|
|
160
|
+
|
|
161
|
+
writeFileSync(outPath, JSON.stringify(bundle, null, 2));
|
|
162
|
+
|
|
163
|
+
return { bundle, path: outPath, manifest };
|
|
164
|
+
}
|
|
165
|
+
|
|
166
|
+
/**
|
|
167
|
+
* Open a .0n application bundle and return parsed data.
|
|
168
|
+
*
|
|
169
|
+
* @param {string} bundlePath -Path to .0n application file
|
|
170
|
+
* @param {string} passphrase -Decryption passphrase
|
|
171
|
+
* @returns {object} Parsed and decrypted bundle
|
|
172
|
+
*/
|
|
173
|
+
export function openApplication(bundlePath, passphrase) {
|
|
174
|
+
const raw = readFileSync(bundlePath, "utf-8");
|
|
175
|
+
const bundle = JSON.parse(raw);
|
|
176
|
+
|
|
177
|
+
if (!bundle.$0n || bundle.$0n.type !== "application") {
|
|
178
|
+
throw new Error(`Not a .0n application file. Type: ${bundle.$0n?.type || "unknown"}`);
|
|
179
|
+
}
|
|
180
|
+
|
|
181
|
+
// Verify connections can be decrypted
|
|
182
|
+
for (const conn of bundle.connections || []) {
|
|
183
|
+
if (conn.sealed && conn.vault?.data) {
|
|
184
|
+
try {
|
|
185
|
+
unsealPortable(conn.vault.data, passphrase);
|
|
186
|
+
} catch {
|
|
187
|
+
throw new Error(`Failed to decrypt connection "${conn.service}" -wrong passphrase.`);
|
|
188
|
+
}
|
|
189
|
+
}
|
|
190
|
+
}
|
|
191
|
+
|
|
192
|
+
return bundle;
|
|
193
|
+
}
|
|
194
|
+
|
|
195
|
+
/**
|
|
196
|
+
* Inspect an application bundle without passphrase.
|
|
197
|
+
*
|
|
198
|
+
* @param {string} bundlePath
|
|
199
|
+
* @returns {object} Application metadata
|
|
200
|
+
*/
|
|
201
|
+
export function inspectApplication(bundlePath) {
|
|
202
|
+
const raw = readFileSync(bundlePath, "utf-8");
|
|
203
|
+
const bundle = JSON.parse(raw);
|
|
204
|
+
|
|
205
|
+
if (!bundle.$0n || bundle.$0n.type !== "application") {
|
|
206
|
+
throw new Error(`Not a .0n application file. Type: ${bundle.$0n?.type || "unknown"}`);
|
|
207
|
+
}
|
|
208
|
+
|
|
209
|
+
return {
|
|
210
|
+
name: bundle.$0n.name,
|
|
211
|
+
version: bundle.$0n.version,
|
|
212
|
+
description: bundle.$0n.description,
|
|
213
|
+
author: bundle.$0n.author,
|
|
214
|
+
created: bundle.$0n.created,
|
|
215
|
+
updated: bundle.$0n.updated,
|
|
216
|
+
connections: (bundle.connections || []).map(c => ({
|
|
217
|
+
service: c.service,
|
|
218
|
+
name: c.name,
|
|
219
|
+
sealed: c.sealed,
|
|
220
|
+
credential_keys: c.credential_keys,
|
|
221
|
+
})),
|
|
222
|
+
workflows: Object.keys(bundle.workflows || {}),
|
|
223
|
+
operations: Object.keys(bundle.operations || {}),
|
|
224
|
+
endpoints: Object.keys(bundle.endpoints || {}),
|
|
225
|
+
automations: Object.keys(bundle.automations || {}),
|
|
226
|
+
environment: {
|
|
227
|
+
variables: Object.keys(bundle.environment?.variables || {}),
|
|
228
|
+
secrets: Object.keys(bundle.environment?.secrets || {}),
|
|
229
|
+
settings: bundle.environment?.settings || {},
|
|
230
|
+
feature_flags: bundle.environment?.feature_flags || {},
|
|
231
|
+
},
|
|
232
|
+
manifest: bundle.manifest,
|
|
233
|
+
};
|
|
234
|
+
}
|
|
235
|
+
|
|
236
|
+
/**
|
|
237
|
+
* Validate an application bundle's cross-references.
|
|
238
|
+
* Ensures endpoints reference valid workflows, workflows reference valid operations, etc.
|
|
239
|
+
*
|
|
240
|
+
* @param {object} bundle -Parsed application bundle
|
|
241
|
+
* @returns {{ valid: boolean, errors: string[], warnings: string[] }}
|
|
242
|
+
*/
|
|
243
|
+
export function validateApplication(bundle) {
|
|
244
|
+
const errors = [];
|
|
245
|
+
const warnings = [];
|
|
246
|
+
|
|
247
|
+
if (!bundle.$0n || bundle.$0n.type !== "application") {
|
|
248
|
+
return { valid: false, errors: ["Not a valid .0n application bundle."], warnings };
|
|
249
|
+
}
|
|
250
|
+
|
|
251
|
+
const workflows = bundle.workflows || {};
|
|
252
|
+
const operations = bundle.operations || {};
|
|
253
|
+
const endpoints = bundle.endpoints || {};
|
|
254
|
+
const automations = bundle.automations || {};
|
|
255
|
+
|
|
256
|
+
// Validate operations
|
|
257
|
+
if (Object.keys(operations).length > 0) {
|
|
258
|
+
const opValidation = validateOperations(operations);
|
|
259
|
+
errors.push(...opValidation.errors);
|
|
260
|
+
}
|
|
261
|
+
|
|
262
|
+
// Validate endpoints → workflows
|
|
263
|
+
for (const [route, ep] of Object.entries(endpoints)) {
|
|
264
|
+
if (ep.handler) continue; // Built-in handler (e.g., "health")
|
|
265
|
+
|
|
266
|
+
if (ep.workflow && !workflows[ep.workflow]) {
|
|
267
|
+
errors.push(`Endpoint "${route}" references unknown workflow: "${ep.workflow}"`);
|
|
268
|
+
}
|
|
269
|
+
}
|
|
270
|
+
|
|
271
|
+
// Validate automations → workflows
|
|
272
|
+
for (const [id, auto] of Object.entries(automations)) {
|
|
273
|
+
if (auto.workflow && !workflows[auto.workflow]) {
|
|
274
|
+
errors.push(`Automation "${id}" references unknown workflow: "${auto.workflow}"`);
|
|
275
|
+
}
|
|
276
|
+
|
|
277
|
+
if (auto.type === "schedule" && auto.config?.cron) {
|
|
278
|
+
// Validate cron expression (import synchronously -already loaded)
|
|
279
|
+
try {
|
|
280
|
+
const parts = auto.config.cron.trim().split(/\s+/);
|
|
281
|
+
if (parts.length !== 5) {
|
|
282
|
+
errors.push(`Automation "${id}" has invalid cron: must have 5 fields`);
|
|
283
|
+
}
|
|
284
|
+
} catch (err) {
|
|
285
|
+
errors.push(`Automation "${id}" has invalid cron: ${err.message}`);
|
|
286
|
+
}
|
|
287
|
+
}
|
|
288
|
+
}
|
|
289
|
+
|
|
290
|
+
// Validate workflow steps → operations
|
|
291
|
+
for (const [wfId, wf] of Object.entries(workflows)) {
|
|
292
|
+
if (!wf.steps || !Array.isArray(wf.steps)) {
|
|
293
|
+
errors.push(`Workflow "${wfId}" has no steps array`);
|
|
294
|
+
continue;
|
|
295
|
+
}
|
|
296
|
+
|
|
297
|
+
for (const step of wf.steps) {
|
|
298
|
+
if (step.operation && !operations[step.operation]) {
|
|
299
|
+
errors.push(`Workflow "${wfId}" step "${step.id}" references unknown operation: "${step.operation}"`);
|
|
300
|
+
}
|
|
301
|
+
}
|
|
302
|
+
}
|
|
303
|
+
|
|
304
|
+
// Warnings for unused operations
|
|
305
|
+
const usedOps = new Set();
|
|
306
|
+
for (const wf of Object.values(workflows)) {
|
|
307
|
+
for (const step of wf.steps || []) {
|
|
308
|
+
if (step.operation) usedOps.add(step.operation);
|
|
309
|
+
}
|
|
310
|
+
}
|
|
311
|
+
for (const opId of Object.keys(operations)) {
|
|
312
|
+
if (!usedOps.has(opId)) {
|
|
313
|
+
warnings.push(`Operation "${opId}" is defined but never referenced by any workflow`);
|
|
314
|
+
}
|
|
315
|
+
}
|
|
316
|
+
|
|
317
|
+
return { valid: errors.length === 0, errors, warnings };
|
|
318
|
+
}
|