granclaw 0.0.1-beta.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (53) hide show
  1. package/LICENSE +21 -0
  2. package/README.md +61 -0
  3. package/bin/granclaw.js +2 -0
  4. package/dist/backend/agent/process.js +246 -0
  5. package/dist/backend/agent/runner-pi.js +993 -0
  6. package/dist/backend/agent/runner.js +334 -0
  7. package/dist/backend/agent/telegram-adapter.js +261 -0
  8. package/dist/backend/agent/telegram-http-client.js +133 -0
  9. package/dist/backend/agent-db.js +108 -0
  10. package/dist/backend/assets/stealth-extension/manifest.json +15 -0
  11. package/dist/backend/assets/stealth-extension/stealth.js +220 -0
  12. package/dist/backend/browser/session-manager.js +213 -0
  13. package/dist/backend/browser/stealth.js +140 -0
  14. package/dist/backend/browser-sessions.js +197 -0
  15. package/dist/backend/config.js +57 -0
  16. package/dist/backend/data-db.js +99 -0
  17. package/dist/backend/esm-import.js +25 -0
  18. package/dist/backend/index.js +53 -0
  19. package/dist/backend/lib/i18n-telegram.js +104 -0
  20. package/dist/backend/logs-db.js +51 -0
  21. package/dist/backend/messages-db.js +112 -0
  22. package/dist/backend/orchestrator/agent-manager.js +139 -0
  23. package/dist/backend/orchestrator/browser-live.js +533 -0
  24. package/dist/backend/orchestrator/server.js +1669 -0
  25. package/dist/backend/providers-config.js +138 -0
  26. package/dist/backend/routes/logs.js +20 -0
  27. package/dist/backend/scheduler.js +66 -0
  28. package/dist/backend/schedules-db.js +125 -0
  29. package/dist/backend/secrets-vault.js +33 -0
  30. package/dist/backend/takeover-messages.js +45 -0
  31. package/dist/backend/takeover-state.js +101 -0
  32. package/dist/backend/takeover-timeout.js +51 -0
  33. package/dist/backend/tasks-db.js +115 -0
  34. package/dist/backend/usage-scanner.js +109 -0
  35. package/dist/backend/workflows/runner.js +267 -0
  36. package/dist/backend/workflows-db.js +235 -0
  37. package/dist/backend/workspace-pool.js +189 -0
  38. package/dist/frontend/assets/index-CZcU3XNC.js +143 -0
  39. package/dist/frontend/assets/index-CkgRytfR.css +1 -0
  40. package/dist/frontend/browser-onboarding.png +0 -0
  41. package/dist/frontend/chat-history-options.html +304 -0
  42. package/dist/frontend/granclaw-logo.png +0 -0
  43. package/dist/frontend/index.html +36 -0
  44. package/dist/home.js +51 -0
  45. package/dist/index.js +159 -0
  46. package/package.json +58 -0
  47. package/templates/AGENT.onboarding.md +74 -0
  48. package/templates/SYSTEM.md +58 -0
  49. package/templates/agents.config.json +3 -0
  50. package/templates/skills/housekeeping/SKILL.md +202 -0
  51. package/templates/skills/memory/SKILL.md +109 -0
  52. package/templates/skills/schedules/SKILL.md +80 -0
  53. package/templates/skills/workflows/SKILL.md +315 -0
package/LICENSE ADDED
@@ -0,0 +1,21 @@
1
+ MIT License
2
+
3
+ Copyright (c) 2026 AITrace
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ of this software and associated documentation files (the "Software"), to deal
7
+ in the Software without restriction, including without limitation the rights
8
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ copies of the Software, and to permit persons to whom the Software is
10
+ furnished to do so, subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in all
13
+ copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
+ SOFTWARE.
package/README.md ADDED
@@ -0,0 +1,61 @@
1
+ # granclaw
2
+
3
+ > A personal AI assistant you run on your own machine. Built on the Claude Code CLI.
4
+
5
+ ## Install
6
+
7
+ ```bash
8
+ npx granclaw
9
+ ```
10
+
11
+ or globally:
12
+
13
+ ```bash
14
+ npm i -g granclaw
15
+ granclaw
16
+ ```
17
+
18
+ Requires **Node 20+** and the [Claude Code CLI](https://claude.ai/download) on `PATH`.
19
+
20
+ ## Usage
21
+
22
+ ```
23
+ granclaw [start] [options]
24
+
25
+ Options:
26
+ --port <n> Listen on port n (default: 8787; env: PORT)
27
+ --home <path> GranClaw home directory (default: ~/.granclaw; env: GRANCLAW_HOME)
28
+ --version Print the version
29
+ --help Print this message
30
+ ```
31
+
32
+ On first run, GranClaw creates `~/.granclaw/` containing:
33
+
34
+ ```
35
+ agents.config.json ← your agents (empty by default)
36
+ data/ ← SQLite databases
37
+ workspaces/ ← per-agent working directories
38
+ logs/ ← CLI process logs
39
+ ```
40
+
41
+ Open the dashboard at <http://localhost:8787> and create your first agent.
42
+
43
+ ## What's inside
44
+
45
+ - **Streaming chat** with Claude Code, tokens live over WebSocket
46
+ - **Mission Control** kanban board every agent knows how to drive
47
+ - **Persistent browser sessions** — saved logins, DOM replay, screenshots
48
+ - **Obsidian-compatible vault** — each agent keeps its own plain-markdown brain
49
+ - **Secrets vault** — API keys injected as env vars only inside the agent process
50
+ - **Schedules** — cron-based scheduled tasks with per-agent isolation
51
+ - **Usage tracking** — token cost breakdown per agent and per model
52
+
53
+ ## Links
54
+
55
+ - 🌐 [granclaw.com](https://granclaw.com)
56
+ - 💻 [github.com/aitrace-dev/granclaw](https://github.com/aitrace-dev/granclaw)
57
+ - 🐛 [Issues](https://github.com/aitrace-dev/granclaw/issues)
58
+
59
+ ## License
60
+
61
+ MIT
@@ -0,0 +1,2 @@
1
+ #!/usr/bin/env node
2
+ require('../dist/index.js').main(process.argv.slice(2));
@@ -0,0 +1,246 @@
1
+ "use strict";
2
+ /**
3
+ * agent/process.ts
4
+ *
5
+ * Standalone agent process — spawned by the orchestrator, one per agent.
6
+ *
7
+ * Architecture:
8
+ * WS server → receives messages → [BB evaluate] → enqueue()
9
+ * Queue worker (poll loop) → dequeueNext() → runAgent() → broadcastToChannel chunks
10
+ *
11
+ * WebSocket protocol:
12
+ * Client → Agent: { type: 'message', text: string, channelId?: string }
13
+ * Agent → Client: { type: 'queued' }
14
+ * { type: 'chunk', chunk: StreamChunk }
15
+ * { type: 'error', message: string }
16
+ * { type: 'blocked', reason: string }
17
+ */
18
+ var __importDefault = (this && this.__importDefault) || function (mod) {
19
+ return (mod && mod.__esModule) ? mod : { "default": mod };
20
+ };
21
+ Object.defineProperty(exports, "__esModule", { value: true });
22
+ require("dotenv/config");
23
+ const ws_1 = require("ws");
24
+ const crypto_1 = require("crypto");
25
+ const path_1 = __importDefault(require("path"));
26
+ const config_js_1 = require("../config.js");
27
+ const agent_db_js_1 = require("../agent-db.js");
28
+ const runner_pi_js_1 = require("./runner-pi.js");
29
+ const messages_db_js_1 = require("../messages-db.js");
30
+ const telegram_adapter_js_1 = require("./telegram-adapter.js");
31
+ const browser_sessions_js_1 = require("../browser-sessions.js");
32
+ const takeover_state_js_1 = require("../takeover-state.js");
33
+ const takeover_timeout_js_1 = require("../takeover-timeout.js");
34
+ const agentId = process.env.AGENT_ID;
35
+ const port = Number(process.env.AGENT_PORT);
36
+ if (!agentId || !port) {
37
+ console.error('[agent/process] AGENT_ID and AGENT_PORT env vars are required');
38
+ process.exit(1);
39
+ }
40
+ // ── Main ───────────────────────────────────────────────────────────────────────
41
+ function main() {
42
+ const agent = (0, config_js_1.getAgent)(agentId);
43
+ if (!agent) {
44
+ console.error(`[agent/process] Agent "${agentId}" not found in agents.config.json`);
45
+ process.exit(1);
46
+ }
47
+ const workspaceDir = path_1.default.resolve(config_js_1.REPO_ROOT, agent.workspaceDir);
48
+ // Clean up stale 'processing' jobs from previous crashes/restarts
49
+ const cleaned = (0, agent_db_js_1.cleanupStaleJobs)(workspaceDir);
50
+ if (cleaned > 0)
51
+ console.log(`[agent:${agentId}] cleaned up ${cleaned} stale processing jobs`);
52
+ // ── WebSocket server ───────────────────────────────────────────────────────
53
+ const wss = new ws_1.WebSocketServer({ port });
54
+ // Map from channelId → set of WS clients subscribed to that channel
55
+ const channelClients = new Map();
56
+ function getChannelClients(channelId) {
57
+ if (!channelClients.has(channelId))
58
+ channelClients.set(channelId, new Set());
59
+ return channelClients.get(channelId);
60
+ }
61
+ wss.on('connection', (ws) => {
62
+ let clientChannelId = 'ui'; // default until client sends a message with channelId
63
+ console.log(`[agent:${agentId}] client connected`);
64
+ ws.on('message', (raw) => {
65
+ let msg;
66
+ try {
67
+ msg = JSON.parse(raw.toString());
68
+ }
69
+ catch {
70
+ ws.send(JSON.stringify({ type: 'error', message: 'invalid JSON' }));
71
+ return;
72
+ }
73
+ if (msg.type === 'stop') {
74
+ const stopped = (0, runner_pi_js_1.stopAgent)(agentId);
75
+ console.log(`[agent:${agentId}] stop requested — ${stopped ? 'killed' : 'no active process'}`);
76
+ ws.send(JSON.stringify({ type: 'stopped', killed: stopped }));
77
+ }
78
+ else if (msg.type === 'subscribe' && msg.channelId) {
79
+ // Subscribe this WS client to a channel without sending a message.
80
+ // Used by the frontend to receive live chunks from scheduled runs.
81
+ clientChannelId = msg.channelId;
82
+ getChannelClients(clientChannelId).add(ws);
83
+ }
84
+ else if (msg.type === 'message' && msg.text) {
85
+ clientChannelId = msg.channelId ?? 'ui';
86
+ getChannelClients(clientChannelId).add(ws);
87
+ (0, agent_db_js_1.enqueue)(workspaceDir, agentId, msg.text, clientChannelId);
88
+ ws.send(JSON.stringify({ type: 'queued' }));
89
+ }
90
+ });
91
+ ws.on('close', () => {
92
+ // Remove from whichever channel set it was in
93
+ for (const [id, set] of channelClients.entries()) {
94
+ set.delete(ws);
95
+ if (set.size === 0)
96
+ channelClients.delete(id);
97
+ }
98
+ console.log(`[agent:${agentId}] client disconnected`);
99
+ });
100
+ });
101
+ console.log(`[agent:${agentId}] WS listening on ws://localhost:${port}`);
102
+ // ── Telegram adapter ───────────────────────────────────────────────────────
103
+ // Started automatically if TELEGRAM_BOT_TOKEN is set (via Secrets in the UI).
104
+ // The user adds TELEGRAM_BOT_TOKEN as a secret → orchestrator injects it as
105
+ // an env var when spawning this process → adapter picks it up here.
106
+ let telegramAdapter = null;
107
+ const telegramBotToken = process.env.TELEGRAM_BOT_TOKEN;
108
+ if (telegramBotToken) {
109
+ telegramAdapter = new telegram_adapter_js_1.TelegramAdapter(agentId, telegramBotToken, workspaceDir);
110
+ }
111
+ // ── Queue worker ───────────────────────────────────────────────────────────
112
+ function broadcastToChannel(channelId, data) {
113
+ const json = JSON.stringify(data);
114
+ const targets = channelClients.get(channelId);
115
+ if (!targets)
116
+ return;
117
+ for (const ws of targets) {
118
+ if (ws.readyState === ws_1.WebSocket.OPEN)
119
+ ws.send(json);
120
+ }
121
+ }
122
+ // Track busy state per channel type so UI chat can run while workflows/schedules execute
123
+ const busyChannels = new Set();
124
+ function channelType(channelId) {
125
+ if (channelId.startsWith('wf-'))
126
+ return 'workflow';
127
+ if (channelId === 'schedule')
128
+ return 'schedule';
129
+ return channelId; // 'ui', 'telegram:xxx' — each is its own lane
130
+ }
131
+ async function processNext() {
132
+ const job = (0, agent_db_js_1.dequeueNext)(workspaceDir, agentId, busyChannels);
133
+ if (!job)
134
+ return;
135
+ const lane = channelType(job.channelId);
136
+ busyChannels.add(lane);
137
+ try {
138
+ const isTelegramJob = telegramAdapter !== null && job.channelId.startsWith('telegram:');
139
+ // Save the prompt so it's visible in run history immediately
140
+ try {
141
+ (0, messages_db_js_1.saveMessage)({ id: (0, crypto_1.randomUUID)(), agentId: agentId, channelId: job.channelId, role: 'user', content: job.message });
142
+ }
143
+ catch { /* non-fatal */ }
144
+ // Stream chunks directly to channel clients.
145
+ //
146
+ // tool_call rows are persisted to the DB the moment they arrive
147
+ // (not batched at turn end). Reason: if a user leaves the chat view
148
+ // mid-turn and navigates to /dashboard, ChatPage unmounts and loses
149
+ // its in-memory streaming state. On return, it refetches history
150
+ // from the DB — if tool_calls were still buffered in memory, the
151
+ // user would see an empty chat while the agent was clearly still
152
+ // working. Persisting as-they-happen makes the live state
153
+ // refetchable. See regression A (view-switch-state.spec.ts).
154
+ let fullResponse = '';
155
+ let toolCallCount = 0;
156
+ // Inject context message if a human takeover was pending
157
+ let messageText = job.message;
158
+ if ((0, takeover_state_js_1.hasTakeover)(agentId)) {
159
+ (0, takeover_state_js_1.cancelTakeoverTimer)(agentId); // stop 10min timeout — entry stays for runner-pi to restore handle
160
+ messageText =
161
+ `[User completed browser interaction]\n` +
162
+ `User said: "${job.message}"`;
163
+ }
164
+ await (0, runner_pi_js_1.runAgent)(agent, messageText, (chunk) => {
165
+ broadcastToChannel(job.channelId, { type: 'chunk', chunk });
166
+ if (chunk.type === 'text') {
167
+ fullResponse += chunk.text;
168
+ if (isTelegramJob) {
169
+ telegramAdapter.appendChunk(job.channelId, chunk.text);
170
+ }
171
+ }
172
+ if (chunk.type === 'tool_call') {
173
+ const tcString = `${chunk.tool}(${JSON.stringify(chunk.input)})`;
174
+ toolCallCount++;
175
+ try {
176
+ (0, messages_db_js_1.saveMessage)({
177
+ id: (0, crypto_1.randomUUID)(),
178
+ agentId: agentId,
179
+ channelId: job.channelId,
180
+ role: 'tool_call',
181
+ content: tcString,
182
+ });
183
+ }
184
+ catch { /* non-fatal — WAL/locking can fail under parallel writes */ }
185
+ if (isTelegramJob) {
186
+ // Live status update — appears in the user's chat as the
187
+ // acknowledgment message gets edited to show progress.
188
+ void telegramAdapter.appendToolStep(job.channelId, chunk.tool);
189
+ }
190
+ }
191
+ }, { channelId: job.channelId });
192
+ // Persist the final assistant message. tool_call rows were already
193
+ // saved one-by-one above, so no batch here.
194
+ try {
195
+ if (fullResponse) {
196
+ (0, messages_db_js_1.saveMessage)({
197
+ id: (0, crypto_1.randomUUID)(),
198
+ agentId: agentId,
199
+ channelId: job.channelId,
200
+ role: 'assistant',
201
+ content: fullResponse,
202
+ createdAt: Date.now() + toolCallCount, // ordered after the last tool_call
203
+ });
204
+ }
205
+ }
206
+ catch { /* non-fatal */ }
207
+ (0, agent_db_js_1.markDone)(workspaceDir, job.id);
208
+ // Arm 10-minute timeout if the agent registered a takeover during this run
209
+ if ((0, takeover_state_js_1.hasTakeover)(agentId)) {
210
+ const timer = setTimeout(() => {
211
+ (0, takeover_timeout_js_1.handleTakeoverTimeout)(agentId, workspaceDir).catch((err) => {
212
+ console.error(`[agent:${agentId}] takeover timeout callback failed`, err);
213
+ });
214
+ }, takeover_state_js_1.TAKEOVER_TIMEOUT_MS);
215
+ (0, takeover_state_js_1.updateTakeoverTimer)(agentId, timer);
216
+ }
217
+ // Belt-and-suspenders: if the agent left a browser session open (e.g.
218
+ // forgot to call close), finalize it so recordings don't stay "active"
219
+ // forever and stream subscribers detach cleanly.
220
+ // Skip if a human takeover is pending — browser session must stay alive
221
+ if (!(0, takeover_state_js_1.hasTakeover)(agentId)) {
222
+ (0, browser_sessions_js_1.forceCloseActiveSession)(agentId);
223
+ }
224
+ // Send the full reply back to Telegram once the turn is complete
225
+ if (isTelegramJob) {
226
+ await telegramAdapter.flushReply(job.channelId);
227
+ }
228
+ }
229
+ catch (err) {
230
+ const message = err instanceof Error ? err.message : String(err);
231
+ (0, agent_db_js_1.markFailed)(workspaceDir, job.id);
232
+ broadcastToChannel(job.channelId, { type: 'error', message });
233
+ if (telegramAdapter && job.channelId.startsWith('telegram:')) {
234
+ const chatId = parseInt(job.channelId.split(':')[1], 10);
235
+ if (!isNaN(chatId)) {
236
+ await telegramAdapter.sendErrorMessage(chatId, 'Sorry, something went wrong. Please try again.').catch(() => { });
237
+ }
238
+ }
239
+ }
240
+ finally {
241
+ busyChannels.delete(lane);
242
+ }
243
+ }
244
+ setInterval(() => { processNext().catch(console.error); }, 300);
245
+ }
246
+ main();