nothumanallowed 14.0.2 → 14.0.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/package.json +1 -1
- package/src/constants.mjs +1 -1
- package/src/server/index.mjs +1 -0
- package/src/server/routes/connectors.mjs +238 -0
- package/src/ui-dist/assets/{index-B331y0W4.js → index-BNb4mQKL.js} +11 -11
- package/src/ui-dist/assets/index-C_jbotpR.css +1 -0
- package/src/ui-dist/index.html +2 -2
- package/src/ui-dist/assets/index-tVEci8V1.css +0 -1
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "nothumanallowed",
|
|
3
|
-
"version": "14.0.
|
|
3
|
+
"version": "14.0.3",
|
|
4
4
|
"description": "NotHumanAllowed — 38 AI agents, 80 tools, Studio (visual agentic workflows). Email, calendar, browser automation, screen capture, canvas, cron/heartbeat, Alexandria E2E messaging, GitHub, Notion, Slack, voice chat, free AI (Liara), 28 languages. Zero-dependency CLI.",
|
|
5
5
|
"type": "module",
|
|
6
6
|
"bin": {
|
package/src/constants.mjs
CHANGED
|
@@ -5,7 +5,7 @@ import { fileURLToPath } from 'url';
|
|
|
5
5
|
const __filename = fileURLToPath(import.meta.url);
|
|
6
6
|
const __dirname = path.dirname(__filename);
|
|
7
7
|
|
|
8
|
-
export const VERSION = '14.0.
|
|
8
|
+
export const VERSION = '14.0.3';
|
|
9
9
|
export const BASE_URL = 'https://nothumanallowed.com/cli';
|
|
10
10
|
export const API_BASE = 'https://nothumanallowed.com/api/v1';
|
|
11
11
|
|
package/src/server/index.mjs
CHANGED
|
@@ -186,6 +186,7 @@ async function buildRouter() {
|
|
|
186
186
|
import('./routes/integrations.mjs'),
|
|
187
187
|
import('./routes/collab.mjs'),
|
|
188
188
|
import('./routes/google-auth.mjs'),
|
|
189
|
+
import('./routes/connectors.mjs'),
|
|
189
190
|
]);
|
|
190
191
|
for (const mod of mods) mod.register(router);
|
|
191
192
|
return router;
|
|
@@ -0,0 +1,238 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Connectors — visual workflow automation backend
|
|
3
|
+
* Workflows stored in ~/.nha/workflows/*.json
|
|
4
|
+
* Execution: each node runs a real NHA tool or AI call
|
|
5
|
+
*/
|
|
6
|
+
import fs from 'fs';
|
|
7
|
+
import path from 'path';
|
|
8
|
+
import { NHA_DIR } from '../../constants.mjs';
|
|
9
|
+
import { loadConfig } from '../../config.mjs';
|
|
10
|
+
import { sendJSON, sendError, parseBody } from '../index.mjs';
|
|
11
|
+
import { executeTool } from '../../services/tool-executor.mjs';
|
|
12
|
+
import { callLLM } from '../../services/llm.mjs';
|
|
13
|
+
|
|
14
|
+
const WORKFLOWS_DIR = path.join(NHA_DIR, 'workflows');
|
|
15
|
+
|
|
16
|
+
function ensureDir() {
|
|
17
|
+
if (!fs.existsSync(WORKFLOWS_DIR)) fs.mkdirSync(WORKFLOWS_DIR, { recursive: true });
|
|
18
|
+
}
|
|
19
|
+
|
|
20
|
+
function listWorkflows() {
|
|
21
|
+
ensureDir();
|
|
22
|
+
return fs.readdirSync(WORKFLOWS_DIR)
|
|
23
|
+
.filter((f) => f.endsWith('.json'))
|
|
24
|
+
.map((f) => {
|
|
25
|
+
try { return JSON.parse(fs.readFileSync(path.join(WORKFLOWS_DIR, f), 'utf-8')); }
|
|
26
|
+
catch { return null; }
|
|
27
|
+
})
|
|
28
|
+
.filter(Boolean);
|
|
29
|
+
}
|
|
30
|
+
|
|
31
|
+
function saveWorkflow(wf) {
|
|
32
|
+
ensureDir();
|
|
33
|
+
fs.writeFileSync(path.join(WORKFLOWS_DIR, `${wf.id}.json`), JSON.stringify(wf, null, 2));
|
|
34
|
+
}
|
|
35
|
+
|
|
36
|
+
function deleteWorkflow(id) {
|
|
37
|
+
const p = path.join(WORKFLOWS_DIR, `${id}.json`);
|
|
38
|
+
if (fs.existsSync(p)) fs.unlinkSync(p);
|
|
39
|
+
}
|
|
40
|
+
|
|
41
|
+
/** Substitute {{varName}} placeholders in a string using a context map */
|
|
42
|
+
function interpolate(str, ctx) {
|
|
43
|
+
if (typeof str !== 'string') return str;
|
|
44
|
+
return str.replace(/\{\{(\w+)\}\}/g, (_, k) => ctx[k] ?? '');
|
|
45
|
+
}
|
|
46
|
+
|
|
47
|
+
/** Interpolate all string values in an object */
|
|
48
|
+
function interpolateObj(obj, ctx) {
|
|
49
|
+
if (!obj || typeof obj !== 'object') return obj;
|
|
50
|
+
const out = {};
|
|
51
|
+
for (const [k, v] of Object.entries(obj)) {
|
|
52
|
+
out[k] = typeof v === 'string' ? interpolate(v, ctx) : v;
|
|
53
|
+
}
|
|
54
|
+
return out;
|
|
55
|
+
}
|
|
56
|
+
|
|
57
|
+
/**
|
|
58
|
+
* Execute a single workflow node.
|
|
59
|
+
* Returns { output: string, ctx: updatedContext }
|
|
60
|
+
*/
|
|
61
|
+
async function executeNode(node, nodeDef, ctx, config) {
|
|
62
|
+
const cfg = interpolateObj(node.config ?? {}, ctx);
|
|
63
|
+
|
|
64
|
+
// AI nodes
|
|
65
|
+
if (nodeDef.type === 'ai') {
|
|
66
|
+
const prompt = cfg.prompt || `Process this: ${ctx.output || ''}`;
|
|
67
|
+
const systemPrompt = cfg.systemPrompt || 'You are a helpful AI assistant. Process the input and return a concise result.';
|
|
68
|
+
const result = await callLLM(config, systemPrompt, prompt);
|
|
69
|
+
return result?.content || result || '';
|
|
70
|
+
}
|
|
71
|
+
|
|
72
|
+
// Action nodes — map to executeTool actions
|
|
73
|
+
const ACTION_MAP = {
|
|
74
|
+
action_email: ['gmail_send', { to: cfg.to, subject: cfg.subject || 'NHA Workflow', body: cfg.body || ctx.output }],
|
|
75
|
+
action_slack: ['slack_message', { channel: cfg.channel || '#general', text: cfg.text || ctx.output }],
|
|
76
|
+
action_calendar: ['calendar_create', { title: cfg.title || ctx.output, date: cfg.date || new Date().toISOString().split('T')[0], time: cfg.time || '09:00', duration: cfg.duration || '60' }],
|
|
77
|
+
action_task: ['task_create', { title: cfg.title || ctx.output, priority: cfg.priority || 'medium' }],
|
|
78
|
+
action_drive: ['drive_upload', { name: cfg.name || 'workflow-output.txt', content: cfg.content || ctx.output }],
|
|
79
|
+
action_notion: ['notion_page', { title: cfg.title || 'Workflow Output', content: cfg.content || ctx.output }],
|
|
80
|
+
action_github: ['github_issue', { repo: cfg.repo, title: cfg.title || ctx.output, body: cfg.body || '' }],
|
|
81
|
+
action_webhook: ['fetch_url', { url: cfg.url, method: cfg.method || 'POST', body: cfg.body || ctx.output }],
|
|
82
|
+
};
|
|
83
|
+
|
|
84
|
+
const mapped = ACTION_MAP[node.defId];
|
|
85
|
+
if (mapped) {
|
|
86
|
+
try {
|
|
87
|
+
const result = await executeTool(mapped[0], mapped[1], config);
|
|
88
|
+
return String(result ?? '');
|
|
89
|
+
} catch (e) {
|
|
90
|
+
return `Error: ${e.message}`;
|
|
91
|
+
}
|
|
92
|
+
}
|
|
93
|
+
|
|
94
|
+
// Trigger nodes produce no output themselves (they're the entry point)
|
|
95
|
+
if (nodeDef.type === 'trigger') {
|
|
96
|
+
return ctx.output || cfg.input || '';
|
|
97
|
+
}
|
|
98
|
+
|
|
99
|
+
return '';
|
|
100
|
+
}
|
|
101
|
+
|
|
102
|
+
/**
|
|
103
|
+
* Run a workflow from start to finish.
|
|
104
|
+
* Returns array of { nodeId, output, error? } step results.
|
|
105
|
+
*/
|
|
106
|
+
async function runWorkflow(wf, initialInput, config) {
|
|
107
|
+
const steps = [];
|
|
108
|
+
const nodeMap = Object.fromEntries(wf.nodes.map((n) => [n.id, n]));
|
|
109
|
+
const defMap = Object.fromEntries((wf.nodeDefs || []).map((d) => [d.id, d]));
|
|
110
|
+
|
|
111
|
+
// Build adjacency: from → to
|
|
112
|
+
const next = {};
|
|
113
|
+
for (const e of wf.edges ?? []) {
|
|
114
|
+
if (!next[e.from]) next[e.from] = [];
|
|
115
|
+
next[e.from].push(e.to);
|
|
116
|
+
}
|
|
117
|
+
|
|
118
|
+
// Find start node (trigger, or first with no incoming edges)
|
|
119
|
+
const hasIncoming = new Set((wf.edges ?? []).map((e) => e.to));
|
|
120
|
+
const startCandidates = wf.nodes.filter((n) => !hasIncoming.has(n.id));
|
|
121
|
+
if (startCandidates.length === 0) return [{ nodeId: '__error', output: 'No start node found.' }];
|
|
122
|
+
|
|
123
|
+
// BFS execution
|
|
124
|
+
const queue = startCandidates.map((n) => ({ nodeId: n.id, ctx: { output: initialInput || '', input: initialInput || '' } }));
|
|
125
|
+
const visited = new Set();
|
|
126
|
+
|
|
127
|
+
while (queue.length > 0) {
|
|
128
|
+
const { nodeId, ctx } = queue.shift();
|
|
129
|
+
if (visited.has(nodeId)) continue;
|
|
130
|
+
visited.add(nodeId);
|
|
131
|
+
|
|
132
|
+
const node = nodeMap[nodeId];
|
|
133
|
+
if (!node) continue;
|
|
134
|
+
const nodeDef = defMap[node.defId];
|
|
135
|
+
if (!nodeDef) continue;
|
|
136
|
+
|
|
137
|
+
let output = '';
|
|
138
|
+
let error = null;
|
|
139
|
+
try {
|
|
140
|
+
output = await executeNode(node, nodeDef, ctx, config);
|
|
141
|
+
} catch (e) {
|
|
142
|
+
error = e.message;
|
|
143
|
+
output = '';
|
|
144
|
+
}
|
|
145
|
+
|
|
146
|
+
steps.push({ nodeId, nodeLabel: nodeDef.label, nodeIcon: nodeDef.icon, output, error });
|
|
147
|
+
|
|
148
|
+
const nextCtx = { ...ctx, output, [`${nodeDef.id}_output`]: output };
|
|
149
|
+
for (const toId of next[nodeId] ?? []) {
|
|
150
|
+
queue.push({ nodeId: toId, ctx: nextCtx });
|
|
151
|
+
}
|
|
152
|
+
}
|
|
153
|
+
|
|
154
|
+
return steps;
|
|
155
|
+
}
|
|
156
|
+
|
|
157
|
+
export function register(router) {
|
|
158
|
+
// GET /api/workflows — list all workflows
|
|
159
|
+
router.get('/api/workflows', async (req, res) => {
|
|
160
|
+
try {
|
|
161
|
+
sendJSON(res, 200, { workflows: listWorkflows() });
|
|
162
|
+
} catch (e) {
|
|
163
|
+
sendError(res, 500, e.message);
|
|
164
|
+
}
|
|
165
|
+
});
|
|
166
|
+
|
|
167
|
+
// POST /api/workflows — create workflow
|
|
168
|
+
router.post('/api/workflows', async (req, res) => {
|
|
169
|
+
try {
|
|
170
|
+
const body = await parseBody(req);
|
|
171
|
+
if (!body.id) body.id = `wf_${Date.now()}`;
|
|
172
|
+
body.createdAt = body.createdAt || new Date().toISOString();
|
|
173
|
+
body.updatedAt = new Date().toISOString();
|
|
174
|
+
saveWorkflow(body);
|
|
175
|
+
sendJSON(res, 200, { ok: true, workflow: body });
|
|
176
|
+
} catch (e) {
|
|
177
|
+
sendError(res, 500, e.message);
|
|
178
|
+
}
|
|
179
|
+
});
|
|
180
|
+
|
|
181
|
+
// PUT /api/workflows/:id — update workflow
|
|
182
|
+
router.put('/api/workflows/:id', async (req, res) => {
|
|
183
|
+
try {
|
|
184
|
+
const body = await parseBody(req);
|
|
185
|
+
body.updatedAt = new Date().toISOString();
|
|
186
|
+
saveWorkflow(body);
|
|
187
|
+
sendJSON(res, 200, { ok: true, workflow: body });
|
|
188
|
+
} catch (e) {
|
|
189
|
+
sendError(res, 500, e.message);
|
|
190
|
+
}
|
|
191
|
+
});
|
|
192
|
+
|
|
193
|
+
// DELETE /api/workflows/:id
|
|
194
|
+
router.delete('/api/workflows/:id', async (req, res) => {
|
|
195
|
+
const id = req.params?.id;
|
|
196
|
+
try {
|
|
197
|
+
deleteWorkflow(id);
|
|
198
|
+
sendJSON(res, 200, { ok: true });
|
|
199
|
+
} catch (e) {
|
|
200
|
+
sendError(res, 500, e.message);
|
|
201
|
+
}
|
|
202
|
+
});
|
|
203
|
+
|
|
204
|
+
// POST /api/workflows/:id/run — execute workflow
|
|
205
|
+
router.post('/api/workflows/:id/run', async (req, res) => {
|
|
206
|
+
const id = req.params?.id;
|
|
207
|
+
try {
|
|
208
|
+
const body = await parseBody(req).catch(() => ({}));
|
|
209
|
+
const wfPath = path.join(WORKFLOWS_DIR, `${id}.json`);
|
|
210
|
+
if (!fs.existsSync(wfPath)) return sendError(res, 404, 'Workflow not found');
|
|
211
|
+
|
|
212
|
+
const wf = JSON.parse(fs.readFileSync(wfPath, 'utf-8'));
|
|
213
|
+
const config = loadConfig();
|
|
214
|
+
const steps = await runWorkflow(wf, body.input || '', config);
|
|
215
|
+
|
|
216
|
+
// Save last run result in workflow
|
|
217
|
+
wf.lastRun = { at: new Date().toISOString(), steps };
|
|
218
|
+
saveWorkflow(wf);
|
|
219
|
+
|
|
220
|
+
sendJSON(res, 200, { ok: true, steps });
|
|
221
|
+
} catch (e) {
|
|
222
|
+
sendError(res, 500, e.message);
|
|
223
|
+
}
|
|
224
|
+
});
|
|
225
|
+
|
|
226
|
+
// GET /api/workflows/:id/runs — last run results
|
|
227
|
+
router.get('/api/workflows/:id/runs', async (req, res) => {
|
|
228
|
+
const id = req.params?.id;
|
|
229
|
+
try {
|
|
230
|
+
const wfPath = path.join(WORKFLOWS_DIR, `${id}.json`);
|
|
231
|
+
if (!fs.existsSync(wfPath)) return sendError(res, 404, 'Workflow not found');
|
|
232
|
+
const wf = JSON.parse(fs.readFileSync(wfPath, 'utf-8'));
|
|
233
|
+
sendJSON(res, 200, { lastRun: wf.lastRun || null });
|
|
234
|
+
} catch (e) {
|
|
235
|
+
sendError(res, 500, e.message);
|
|
236
|
+
}
|
|
237
|
+
});
|
|
238
|
+
}
|