0agent 1.0.7 → 1.0.9

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (3) hide show
  1. package/bin/0agent.js +211 -5
  2. package/dist/daemon.mjs +592 -153
  3. package/package.json +1 -1
package/bin/0agent.js CHANGED
@@ -86,6 +86,14 @@ switch (cmd) {
86
86
  showLogs(args.slice(1));
87
87
  break;
88
88
 
89
+ case 'team':
90
+ await runTeamCommand(args.slice(1));
91
+ break;
92
+
93
+ case 'serve':
94
+ await runServe(args.slice(1));
95
+ break;
96
+
89
97
  default:
90
98
  showHelp();
91
99
  break;
@@ -415,6 +423,7 @@ async function streamSession(sessionId) {
415
423
 
416
424
  return new Promise((resolve) => {
417
425
  const ws = new WS(`ws://localhost:4200/ws`);
426
+ let streaming = false; // true when mid-token-stream
418
427
 
419
428
  ws.on('open', () => {
420
429
  ws.send(JSON.stringify({ type: 'subscribe', topics: ['sessions'] }));
@@ -427,18 +436,30 @@ async function streamSession(sessionId) {
427
436
 
428
437
  switch (event.type) {
429
438
  case 'session.step':
430
- console.log(` › ${event.step}`);
439
+ // Newline before step if we were mid-stream
440
+ if (streaming) { process.stdout.write('\n'); streaming = false; }
441
+ console.log(` \x1b[2m›\x1b[0m ${event.step}`);
442
+ break;
443
+ case 'session.token':
444
+ // Token-by-token streaming — print without newline
445
+ if (!streaming) { process.stdout.write('\n '); streaming = true; }
446
+ process.stdout.write(event.token);
431
447
  break;
432
448
  case 'session.completed': {
433
- console.log('\n ✓ Done\n');
434
- const out = event.result?.output ?? event.result;
435
- if (out && typeof out === 'string') console.log(` ${out}\n`);
449
+ if (streaming) { process.stdout.write('\n'); streaming = false; }
450
+ // Show files written + commands run
451
+ const r = event.result ?? {};
452
+ if (r.files_written?.length) console.log(`\n \x1b[32m✓\x1b[0m Files: ${r.files_written.join(', ')}`);
453
+ if (r.commands_run?.length) console.log(` \x1b[32m✓\x1b[0m Commands run: ${r.commands_run.length}`);
454
+ if (r.tokens_used) console.log(` \x1b[2m${r.tokens_used} tokens · ${r.model}\x1b[0m`);
455
+ console.log('\n \x1b[32m✓ Done\x1b[0m\n');
436
456
  ws.close();
437
457
  resolve();
438
458
  break;
439
459
  }
440
460
  case 'session.failed':
441
- console.log(`\n ✗ Failed: ${event.error}\n`);
461
+ if (streaming) { process.stdout.write('\n'); streaming = false; }
462
+ console.log(`\n \x1b[31m✗ Failed:\x1b[0m ${event.error}\n`);
442
463
  ws.close();
443
464
  resolve();
444
465
  break;
@@ -601,6 +622,183 @@ function showLogs(logArgs) {
601
622
 
602
623
  // ─── Help ─────────────────────────────────────────────────────────────────
603
624
 
625
+ // ─── Team commands ────────────────────────────────────────────────────────────
626
+
627
+ async function runTeamCommand(teamArgs) {
628
+ const sub = teamArgs[0];
629
+ const SYNC_URL = process.env['ZEROAGENT_SYNC'] ?? 'http://localhost:4201';
630
+
631
+ switch (sub) {
632
+ case 'create': {
633
+ const name = teamArgs.slice(1).join(' ');
634
+ if (!name) { console.log(' Usage: 0agent team create "<name>"'); break; }
635
+ const res = await fetch(`${SYNC_URL}/api/teams`, {
636
+ method: 'POST',
637
+ headers: { 'Content-Type': 'application/json' },
638
+ body: JSON.stringify({
639
+ name,
640
+ creator_entity_id: crypto.randomUUID(),
641
+ creator_name: process.env['USER'] ?? 'User',
642
+ }),
643
+ }).catch(() => null);
644
+ if (!res?.ok) { console.log(` Sync server not running. Start it with: 0agent serve`); break; }
645
+ const team = await res.json();
646
+ console.log(`\n ✓ Team created: ${team.name}`);
647
+ console.log(` Invite code: \x1b[1m${team.invite_code}\x1b[0m`);
648
+ console.log(`\n Share with teammates:`);
649
+ console.log(` 0agent team join ${team.invite_code} --server ${SYNC_URL}\n`);
650
+ break;
651
+ }
652
+
653
+ case 'join': {
654
+ const code = teamArgs[1]?.toUpperCase();
655
+ const serverIdx = teamArgs.indexOf('--server');
656
+ const serverUrl = serverIdx >= 0 ? teamArgs[serverIdx + 1] : SYNC_URL;
657
+ if (!code) { console.log(' Usage: 0agent team join <CODE> [--server <url>]'); break; }
658
+ const res = await fetch(`${serverUrl}/api/teams/by-code/${code}`).catch(() => null);
659
+ if (!res?.ok) { console.log(` Invalid code or sync server unreachable: ${serverUrl}`); break; }
660
+ const team = await res.json();
661
+ const joinRes = await fetch(`${serverUrl}/api/teams/${team.id}/join`, {
662
+ method: 'POST',
663
+ headers: { 'Content-Type': 'application/json' },
664
+ body: JSON.stringify({
665
+ entity_node_id: crypto.randomUUID(),
666
+ name: process.env['USER'] ?? 'User',
667
+ }),
668
+ });
669
+ if (!joinRes.ok) { console.log(' Failed to join team.'); break; }
670
+ console.log(`\n ✓ Joined: ${team.name}`);
671
+ console.log(` Members: ${team.members?.length ?? '?'}`);
672
+ console.log(` Sync server: ${serverUrl}\n`);
673
+ break;
674
+ }
675
+
676
+ case 'list': {
677
+ // Show teams from local teams.yaml
678
+ const { readFileSync, existsSync } = await import('node:fs');
679
+ const { resolve } = await import('node:path');
680
+ const { homedir } = await import('node:os');
681
+ const teamsPath = resolve(homedir(), '.0agent', 'teams.yaml');
682
+ if (!existsSync(teamsPath)) { console.log('\n No teams joined yet. Use: 0agent team join <CODE>\n'); break; }
683
+ const YAML = await import('yaml');
684
+ const config = YAML.parse(readFileSync(teamsPath, 'utf8'));
685
+ console.log('\n Your teams:\n');
686
+ for (const m of (config.memberships ?? [])) {
687
+ const ago = m.last_synced_at ? `synced ${Math.round((Date.now() - m.last_synced_at) / 60000)}m ago` : 'never synced';
688
+ console.log(` ${m.team_name.padEnd(24)} ${m.invite_code} ${ago}`);
689
+ console.log(` ${' '.repeat(24)} ${m.server_url}`);
690
+ }
691
+ console.log();
692
+ break;
693
+ }
694
+
695
+ default:
696
+ console.log(' Usage: 0agent team create "<name>" | join <CODE> [--server <url>] | list');
697
+ }
698
+ }
699
+
700
+ // ─── Serve command (sync server + optional tunnel) ────────────────────────────
701
+
702
+ async function runServe(serveArgs) {
703
+ const hasTunnel = serveArgs.includes('--tunnel');
704
+ const port = parseInt(serveArgs.find(a => a.match(/^\d+$/)) ?? '4201', 10);
705
+
706
+ console.log(`\n Starting 0agent sync server on port ${port}...\n`);
707
+
708
+ // Find sync server entry point
709
+ const { resolve, dirname } = await import('node:path');
710
+ const { existsSync } = await import('node:fs');
711
+ const { spawn } = await import('node:child_process');
712
+ const { networkInterfaces } = await import('node:os');
713
+
714
+ const pkgRoot = resolve(dirname(new URL(import.meta.url).pathname), '..');
715
+ const serverScript = resolve(pkgRoot, 'packages', 'sync-server', 'src', 'index.ts');
716
+
717
+ if (!existsSync(serverScript)) {
718
+ console.log(' Sync server not found in package. Install with: npm install -g 0agent');
719
+ return;
720
+ }
721
+
722
+ // Start sync server
723
+ const proc = spawn(process.execPath, ['--experimental-specifier-resolution=node', serverScript], {
724
+ env: { ...process.env, SYNC_PORT: String(port), SYNC_HOST: '0.0.0.0' },
725
+ stdio: 'inherit',
726
+ detached: false,
727
+ });
728
+
729
+ // Get LAN IP
730
+ const nets = networkInterfaces();
731
+ let lanIp = '127.0.0.1';
732
+ for (const iface of Object.values(nets)) {
733
+ if (!iface) continue;
734
+ for (const net of iface) {
735
+ if (net.family === 'IPv4' && !net.internal) { lanIp = net.address; break; }
736
+ }
737
+ }
738
+
739
+ await sleep(1500);
740
+
741
+ const localUrl = `http://localhost:${port}`;
742
+ const lanUrl = `http://${lanIp}:${port}`;
743
+
744
+ console.log(`\n ✓ Sync server running`);
745
+ console.log(` Local: ${localUrl}`);
746
+ console.log(` LAN: ${lanUrl} ← share with teammates on same WiFi`);
747
+
748
+ if (hasTunnel) {
749
+ console.log('\n Opening public tunnel...');
750
+ let tunnelUrl = null;
751
+
752
+ // Try cloudflared
753
+ try {
754
+ const { execSync: es } = await import('node:child_process');
755
+ es('which cloudflared', { stdio: 'ignore' });
756
+ const cf = spawn('cloudflared', ['tunnel', '--url', localUrl], { stdio: ['ignore', 'pipe', 'pipe'] });
757
+ cf.unref();
758
+ tunnelUrl = await waitForTunnelUrl(cf, /https:\/\/[a-z0-9\-]+\.trycloudflare\.com/i, 12000);
759
+ } catch {}
760
+
761
+ // Try ngrok
762
+ if (!tunnelUrl) {
763
+ try {
764
+ const { execSync: es } = await import('node:child_process');
765
+ es('which ngrok', { stdio: 'ignore' });
766
+ const ng = spawn('ngrok', ['http', String(port), '--log=stdout'], { stdio: ['ignore', 'pipe', 'pipe'] });
767
+ ng.unref();
768
+ tunnelUrl = await waitForTunnelUrl(ng, /https:\/\/[a-z0-9\-]+\.ngrok/i, 8000);
769
+ } catch {}
770
+ }
771
+
772
+ if (tunnelUrl) {
773
+ console.log(` Public: \x1b[1m${tunnelUrl}\x1b[0m ← share with anyone`);
774
+ const code = Math.random().toString(36).slice(2,5).toUpperCase() + '-' + Math.floor(1000+Math.random()*9000);
775
+ console.log(`\n Share this with teammates:`);
776
+ console.log(` 0agent team join <CODE> --server ${tunnelUrl}\n`);
777
+ } else {
778
+ console.log(' No tunnel tool found. Install cloudflared: brew install cloudflared');
779
+ console.log(' Using LAN only.');
780
+ }
781
+ }
782
+
783
+ console.log('\n Press Ctrl+C to stop.\n');
784
+ proc.on('close', () => process.exit(0));
785
+ }
786
+
787
+ async function waitForTunnelUrl(proc, pattern, timeout) {
788
+ return new Promise(resolve => {
789
+ const chunks = [];
790
+ const onData = d => {
791
+ const s = d.toString(); chunks.push(s);
792
+ const match = chunks.join('').match(pattern);
793
+ if (match) { cleanup(); resolve(match[0]); }
794
+ };
795
+ proc.stdout?.on('data', onData);
796
+ proc.stderr?.on('data', onData);
797
+ const timer = setTimeout(() => { cleanup(); resolve(null); }, timeout);
798
+ const cleanup = () => { clearTimeout(timer); proc.stdout?.removeListener('data', onData); proc.stderr?.removeListener('data', onData); };
799
+ });
800
+ }
801
+
604
802
  function showHelp() {
605
803
  console.log(`
606
804
  0agent — An agent that learns.
@@ -619,6 +817,13 @@ function showHelp() {
619
817
  0agent improve Self-improvement analysis
620
818
  0agent logs Tail daemon logs
621
819
 
820
+ Team collaboration:
821
+ 0agent serve Start sync server (LAN)
822
+ 0agent serve --tunnel Start sync server + public tunnel
823
+ 0agent team create "<name>" Create a team, get invite code
824
+ 0agent team join <CODE> Join a team by invite code
825
+ 0agent team list List your teams
826
+
622
827
  Dashboard:
623
828
  http://localhost:4200 Web UI (after starting daemon)
624
829
 
@@ -627,6 +832,7 @@ function showHelp() {
627
832
  0agent /research "Acme Corp funding"
628
833
  0agent /build --task next
629
834
  0agent /qa --url https://staging.myapp.com
835
+ 0agent serve --tunnel # then share the URL + 0agent team join <CODE>
630
836
  `);
631
837
  }
632
838
 
package/dist/daemon.mjs CHANGED
@@ -1,6 +1,6 @@
1
1
  // packages/daemon/src/ZeroAgentDaemon.ts
2
- import { writeFileSync as writeFileSync2, unlinkSync as unlinkSync2, existsSync as existsSync3, mkdirSync as mkdirSync2 } from "node:fs";
3
- import { resolve as resolve3 } from "node:path";
2
+ import { writeFileSync as writeFileSync3, unlinkSync as unlinkSync2, existsSync as existsSync4, mkdirSync as mkdirSync3 } from "node:fs";
3
+ import { resolve as resolve4 } from "node:path";
4
4
  import { homedir as homedir3 } from "node:os";
5
5
 
6
6
  // packages/core/src/graph/GraphNode.ts
@@ -1685,6 +1685,537 @@ var EntityScopedContextLoader = class {
1685
1685
  }
1686
1686
  };
1687
1687
 
1688
+ // packages/daemon/src/AgentExecutor.ts
1689
+ import { spawn } from "node:child_process";
1690
+ import { writeFileSync, readFileSync as readFileSync2, readdirSync, mkdirSync, existsSync as existsSync2 } from "node:fs";
1691
+ import { resolve as resolve2, dirname, relative } from "node:path";
1692
+
1693
+ // packages/daemon/src/LLMExecutor.ts
1694
+ var AGENT_TOOLS = [
1695
+ {
1696
+ name: "shell_exec",
1697
+ description: "Execute a shell command in the working directory. Use for running servers (with & for background), installing packages, running tests, git operations, etc. Returns stdout + stderr.",
1698
+ input_schema: {
1699
+ type: "object",
1700
+ properties: {
1701
+ command: { type: "string", description: "Shell command to execute" },
1702
+ timeout_ms: { type: "number", description: "Timeout in milliseconds (default 30000)" }
1703
+ },
1704
+ required: ["command"]
1705
+ }
1706
+ },
1707
+ {
1708
+ name: "write_file",
1709
+ description: "Write content to a file. Creates parent directories if needed. Use for creating HTML, CSS, JS, config files, etc.",
1710
+ input_schema: {
1711
+ type: "object",
1712
+ properties: {
1713
+ path: { type: "string", description: "File path relative to working directory" },
1714
+ content: { type: "string", description: "Full file content to write" }
1715
+ },
1716
+ required: ["path", "content"]
1717
+ }
1718
+ },
1719
+ {
1720
+ name: "read_file",
1721
+ description: "Read a file's contents.",
1722
+ input_schema: {
1723
+ type: "object",
1724
+ properties: {
1725
+ path: { type: "string", description: "File path relative to working directory" }
1726
+ },
1727
+ required: ["path"]
1728
+ }
1729
+ },
1730
+ {
1731
+ name: "list_dir",
1732
+ description: "List files and directories.",
1733
+ input_schema: {
1734
+ type: "object",
1735
+ properties: {
1736
+ path: { type: "string", description: 'Directory path relative to working directory (default: ".")' }
1737
+ }
1738
+ }
1739
+ }
1740
+ ];
1741
+ var LLMExecutor = class {
1742
+ constructor(config) {
1743
+ this.config = config;
1744
+ }
1745
+ get isConfigured() {
1746
+ if (this.config.provider === "ollama") return true;
1747
+ return !!this.config.api_key?.trim();
1748
+ }
1749
+ // ─── Single completion (no tools, no streaming) ──────────────────────────
1750
+ async complete(messages, system) {
1751
+ const res = await this.completeWithTools(messages, [], system, void 0);
1752
+ return { content: res.content, tokens_used: res.tokens_used, model: res.model };
1753
+ }
1754
+ // ─── Tool-calling completion with optional streaming ─────────────────────
1755
+ async completeWithTools(messages, tools, system, onToken) {
1756
+ switch (this.config.provider) {
1757
+ case "anthropic":
1758
+ return this.anthropic(messages, tools, system, onToken);
1759
+ case "openai":
1760
+ return this.openai(messages, tools, system, onToken);
1761
+ case "xai":
1762
+ return this.openai(messages, tools, system, onToken, "https://api.x.ai/v1");
1763
+ case "gemini":
1764
+ return this.openai(messages, tools, system, onToken, "https://generativelanguage.googleapis.com/v1beta/openai");
1765
+ case "ollama":
1766
+ return this.ollama(messages, system, onToken);
1767
+ default:
1768
+ return this.openai(messages, tools, system, onToken);
1769
+ }
1770
+ }
1771
+ // ─── Anthropic ───────────────────────────────────────────────────────────
1772
+ async anthropic(messages, tools, system, onToken) {
1773
+ const sysContent = system ?? messages.find((m) => m.role === "system")?.content;
1774
+ const filtered = messages.filter((m) => m.role !== "system");
1775
+ const anthropicMsgs = filtered.map((m) => {
1776
+ if (m.role === "tool") {
1777
+ return {
1778
+ role: "user",
1779
+ content: [{ type: "tool_result", tool_use_id: m.tool_call_id, content: m.content }]
1780
+ };
1781
+ }
1782
+ if (m.role === "assistant" && m.tool_calls?.length) {
1783
+ return {
1784
+ role: "assistant",
1785
+ content: [
1786
+ ...m.content ? [{ type: "text", text: m.content }] : [],
1787
+ ...m.tool_calls.map((tc) => ({
1788
+ type: "tool_use",
1789
+ id: tc.id,
1790
+ name: tc.name,
1791
+ input: tc.input
1792
+ }))
1793
+ ]
1794
+ };
1795
+ }
1796
+ return { role: m.role, content: m.content };
1797
+ });
1798
+ const body = {
1799
+ model: this.config.model,
1800
+ max_tokens: 8192,
1801
+ messages: anthropicMsgs,
1802
+ stream: true
1803
+ };
1804
+ if (sysContent) body.system = sysContent;
1805
+ if (tools.length > 0) {
1806
+ body.tools = tools.map((t) => ({
1807
+ name: t.name,
1808
+ description: t.description,
1809
+ input_schema: t.input_schema
1810
+ }));
1811
+ }
1812
+ const res = await fetch("https://api.anthropic.com/v1/messages", {
1813
+ method: "POST",
1814
+ headers: {
1815
+ "Content-Type": "application/json",
1816
+ "x-api-key": this.config.api_key,
1817
+ "anthropic-version": "2023-06-01"
1818
+ },
1819
+ body: JSON.stringify(body)
1820
+ });
1821
+ if (!res.ok) {
1822
+ const err = await res.text();
1823
+ throw new Error(`Anthropic ${res.status}: ${err}`);
1824
+ }
1825
+ let textContent = "";
1826
+ let stopReason = "end_turn";
1827
+ let inputTokens = 0;
1828
+ let outputTokens = 0;
1829
+ let modelName = this.config.model;
1830
+ const toolCalls = [];
1831
+ const toolInputBuffers = {};
1832
+ let currentToolId = "";
1833
+ const reader = res.body.getReader();
1834
+ const decoder = new TextDecoder();
1835
+ let buf = "";
1836
+ while (true) {
1837
+ const { done, value } = await reader.read();
1838
+ if (done) break;
1839
+ buf += decoder.decode(value, { stream: true });
1840
+ const lines = buf.split("\n");
1841
+ buf = lines.pop() ?? "";
1842
+ for (const line of lines) {
1843
+ if (!line.startsWith("data: ")) continue;
1844
+ const data = line.slice(6).trim();
1845
+ if (data === "[DONE]" || data === "") continue;
1846
+ let evt;
1847
+ try {
1848
+ evt = JSON.parse(data);
1849
+ } catch {
1850
+ continue;
1851
+ }
1852
+ const type = evt.type;
1853
+ if (type === "message_start") {
1854
+ const usage = evt.message?.usage;
1855
+ inputTokens = usage?.input_tokens ?? 0;
1856
+ modelName = evt.message?.model ?? modelName;
1857
+ } else if (type === "content_block_start") {
1858
+ const block = evt.content_block;
1859
+ if (block?.type === "tool_use") {
1860
+ currentToolId = block.id;
1861
+ toolInputBuffers[currentToolId] = "";
1862
+ toolCalls.push({ id: currentToolId, name: block.name, input: {} });
1863
+ }
1864
+ } else if (type === "content_block_delta") {
1865
+ const delta = evt.delta;
1866
+ if (delta?.type === "text_delta") {
1867
+ const token = delta.text ?? "";
1868
+ textContent += token;
1869
+ if (onToken && token) onToken(token);
1870
+ } else if (delta?.type === "input_json_delta") {
1871
+ toolInputBuffers[currentToolId] = (toolInputBuffers[currentToolId] ?? "") + (delta.partial_json ?? "");
1872
+ }
1873
+ } else if (type === "content_block_stop") {
1874
+ if (currentToolId && toolInputBuffers[currentToolId]) {
1875
+ const tc = toolCalls.find((t) => t.id === currentToolId);
1876
+ if (tc) {
1877
+ try {
1878
+ tc.input = JSON.parse(toolInputBuffers[currentToolId]);
1879
+ } catch {
1880
+ }
1881
+ }
1882
+ }
1883
+ } else if (type === "message_delta") {
1884
+ const usage = evt.usage;
1885
+ outputTokens = usage?.output_tokens ?? 0;
1886
+ const stop = evt.delta?.stop_reason;
1887
+ if (stop === "tool_use") stopReason = "tool_use";
1888
+ else if (stop === "end_turn") stopReason = "end_turn";
1889
+ else if (stop === "max_tokens") stopReason = "max_tokens";
1890
+ }
1891
+ }
1892
+ }
1893
+ return {
1894
+ content: textContent,
1895
+ tool_calls: toolCalls.length > 0 ? toolCalls : null,
1896
+ stop_reason: stopReason,
1897
+ tokens_used: inputTokens + outputTokens,
1898
+ model: modelName
1899
+ };
1900
+ }
1901
+ // ─── OpenAI (also xAI, Gemini) ───────────────────────────────────────────
1902
+ async openai(messages, tools, system, onToken, baseUrl = "https://api.openai.com/v1") {
1903
+ const allMessages = [];
1904
+ const sysContent = system ?? messages.find((m) => m.role === "system")?.content;
1905
+ if (sysContent) allMessages.push({ role: "system", content: sysContent });
1906
+ for (const m of messages.filter((m2) => m2.role !== "system")) {
1907
+ if (m.role === "tool") {
1908
+ allMessages.push({ role: "tool", tool_call_id: m.tool_call_id, content: m.content });
1909
+ } else if (m.role === "assistant" && m.tool_calls?.length) {
1910
+ allMessages.push({
1911
+ role: "assistant",
1912
+ content: m.content || null,
1913
+ tool_calls: m.tool_calls.map((tc) => ({
1914
+ id: tc.id,
1915
+ type: "function",
1916
+ function: { name: tc.name, arguments: JSON.stringify(tc.input) }
1917
+ }))
1918
+ });
1919
+ } else {
1920
+ allMessages.push({ role: m.role, content: m.content });
1921
+ }
1922
+ }
1923
+ const body = {
1924
+ model: this.config.model,
1925
+ messages: allMessages,
1926
+ max_tokens: 8192,
1927
+ stream: true,
1928
+ stream_options: { include_usage: true }
1929
+ };
1930
+ if (tools.length > 0) {
1931
+ body.tools = tools.map((t) => ({
1932
+ type: "function",
1933
+ function: { name: t.name, description: t.description, parameters: t.input_schema }
1934
+ }));
1935
+ }
1936
+ const res = await fetch(`${this.config.base_url ?? baseUrl}/chat/completions`, {
1937
+ method: "POST",
1938
+ headers: {
1939
+ "Content-Type": "application/json",
1940
+ "Authorization": `Bearer ${this.config.api_key}`
1941
+ },
1942
+ body: JSON.stringify(body)
1943
+ });
1944
+ if (!res.ok) {
1945
+ const err = await res.text();
1946
+ throw new Error(`OpenAI ${res.status}: ${err}`);
1947
+ }
1948
+ let textContent = "";
1949
+ let tokensUsed = 0;
1950
+ let modelName = this.config.model;
1951
+ let stopReason = "end_turn";
1952
+ const toolCallMap = {};
1953
+ const reader = res.body.getReader();
1954
+ const decoder = new TextDecoder();
1955
+ let buf = "";
1956
+ while (true) {
1957
+ const { done, value } = await reader.read();
1958
+ if (done) break;
1959
+ buf += decoder.decode(value, { stream: true });
1960
+ const lines = buf.split("\n");
1961
+ buf = lines.pop() ?? "";
1962
+ for (const line of lines) {
1963
+ if (!line.startsWith("data: ")) continue;
1964
+ const data = line.slice(6).trim();
1965
+ if (data === "[DONE]") continue;
1966
+ let evt;
1967
+ try {
1968
+ evt = JSON.parse(data);
1969
+ } catch {
1970
+ continue;
1971
+ }
1972
+ modelName = evt.model ?? modelName;
1973
+ const usage = evt.usage;
1974
+ if (usage?.total_tokens) tokensUsed = usage.total_tokens;
1975
+ const choices = evt.choices;
1976
+ if (!choices?.length) continue;
1977
+ const delta = choices[0].delta;
1978
+ if (!delta) continue;
1979
+ const finish = choices[0].finish_reason;
1980
+ if (finish === "tool_calls") stopReason = "tool_use";
1981
+ else if (finish === "stop") stopReason = "end_turn";
1982
+ const token = delta.content;
1983
+ if (token) {
1984
+ textContent += token;
1985
+ if (onToken) onToken(token);
1986
+ }
1987
+ const toolCallDeltas = delta.tool_calls;
1988
+ if (toolCallDeltas) {
1989
+ for (const tc of toolCallDeltas) {
1990
+ const idx = tc.index;
1991
+ if (!toolCallMap[idx]) {
1992
+ toolCallMap[idx] = { id: "", name: "", args: "" };
1993
+ }
1994
+ const fn = tc.function;
1995
+ if (tc.id) toolCallMap[idx].id = tc.id;
1996
+ if (fn?.name) toolCallMap[idx].name = fn.name;
1997
+ if (fn?.arguments) toolCallMap[idx].args += fn.arguments;
1998
+ }
1999
+ }
2000
+ }
2001
+ }
2002
+ const toolCalls = Object.values(toolCallMap).filter((tc) => tc.id && tc.name).map((tc) => {
2003
+ let input = {};
2004
+ try {
2005
+ input = JSON.parse(tc.args);
2006
+ } catch {
2007
+ }
2008
+ return { id: tc.id, name: tc.name, input };
2009
+ });
2010
+ return {
2011
+ content: textContent,
2012
+ tool_calls: toolCalls.length > 0 ? toolCalls : null,
2013
+ stop_reason: stopReason,
2014
+ tokens_used: tokensUsed,
2015
+ model: modelName
2016
+ };
2017
+ }
2018
+ // ─── Ollama (no streaming for simplicity) ────────────────────────────────
2019
+ async ollama(messages, system, onToken) {
2020
+ const baseUrl = this.config.base_url ?? "http://localhost:11434";
2021
+ const allMessages = [];
2022
+ const sysContent = system ?? messages.find((m) => m.role === "system")?.content;
2023
+ if (sysContent) allMessages.push({ role: "system", content: sysContent });
2024
+ allMessages.push(...messages.filter((m) => m.role !== "system").map((m) => ({ role: m.role, content: m.content })));
2025
+ const res = await fetch(`${baseUrl}/api/chat`, {
2026
+ method: "POST",
2027
+ headers: { "Content-Type": "application/json" },
2028
+ body: JSON.stringify({ model: this.config.model, messages: allMessages, stream: false })
2029
+ });
2030
+ if (!res.ok) throw new Error(`Ollama error ${res.status}`);
2031
+ const data = await res.json();
2032
+ if (onToken) onToken(data.message.content);
2033
+ return { content: data.message.content, tool_calls: null, stop_reason: "end_turn", tokens_used: data.eval_count ?? 0, model: this.config.model };
2034
+ }
2035
+ };
2036
+
2037
+ // packages/daemon/src/AgentExecutor.ts
2038
+ var AgentExecutor = class {
2039
+ constructor(llm, config, onStep, onToken) {
2040
+ this.llm = llm;
2041
+ this.config = config;
2042
+ this.onStep = onStep;
2043
+ this.onToken = onToken;
2044
+ this.cwd = config.cwd;
2045
+ this.maxIterations = config.max_iterations ?? 20;
2046
+ this.maxCommandMs = config.max_command_ms ?? 3e4;
2047
+ }
2048
+ cwd;
2049
+ maxIterations;
2050
+ maxCommandMs;
2051
+ async execute(task, systemContext) {
2052
+ const filesWritten = [];
2053
+ const commandsRun = [];
2054
+ let totalTokens = 0;
2055
+ let modelName = "";
2056
+ const systemPrompt = this.buildSystemPrompt(systemContext);
2057
+ const messages = [
2058
+ { role: "user", content: task }
2059
+ ];
2060
+ let finalOutput = "";
2061
+ for (let i = 0; i < this.maxIterations; i++) {
2062
+ this.onStep(i === 0 ? "Thinking\u2026" : "Continuing\u2026");
2063
+ let response;
2064
+ try {
2065
+ response = await this.llm.completeWithTools(
2066
+ messages,
2067
+ AGENT_TOOLS,
2068
+ systemPrompt,
2069
+ // Only stream tokens on the final (non-tool) turn
2070
+ (token) => {
2071
+ this.onToken(token);
2072
+ finalOutput += token;
2073
+ }
2074
+ );
2075
+ } catch (err) {
2076
+ const msg = err instanceof Error ? err.message : String(err);
2077
+ this.onStep(`LLM error: ${msg}`);
2078
+ finalOutput = `Error: ${msg}`;
2079
+ break;
2080
+ }
2081
+ totalTokens += response.tokens_used;
2082
+ modelName = response.model;
2083
+ if (response.stop_reason === "end_turn" || !response.tool_calls?.length) {
2084
+ if (!finalOutput && response.content) finalOutput = response.content;
2085
+ break;
2086
+ }
2087
+ finalOutput = "";
2088
+ messages.push({
2089
+ role: "assistant",
2090
+ content: response.content,
2091
+ tool_calls: response.tool_calls
2092
+ });
2093
+ for (const tc of response.tool_calls) {
2094
+ this.onStep(`\u25B6 ${tc.name}(${this.summariseInput(tc.name, tc.input)})`);
2095
+ let result;
2096
+ try {
2097
+ result = await this.executeTool(tc.name, tc.input);
2098
+ if (tc.name === "write_file" && tc.input.path) {
2099
+ filesWritten.push(String(tc.input.path));
2100
+ }
2101
+ if (tc.name === "shell_exec" && tc.input.command) {
2102
+ commandsRun.push(String(tc.input.command));
2103
+ }
2104
+ } catch (err) {
2105
+ result = `Error: ${err instanceof Error ? err.message : String(err)}`;
2106
+ }
2107
+ this.onStep(` \u21B3 ${result.slice(0, 120)}${result.length > 120 ? "\u2026" : ""}`);
2108
+ messages.push({
2109
+ role: "tool",
2110
+ content: result,
2111
+ tool_call_id: tc.id
2112
+ });
2113
+ }
2114
+ }
2115
+ return {
2116
+ output: finalOutput || "(no output)",
2117
+ files_written: filesWritten,
2118
+ commands_run: commandsRun,
2119
+ tokens_used: totalTokens,
2120
+ model: modelName,
2121
+ iterations: messages.filter((m) => m.role === "assistant").length
2122
+ };
2123
+ }
2124
+ // ─── Tool execution ────────────────────────────────────────────────────────
2125
+ async executeTool(name, input) {
2126
+ switch (name) {
2127
+ case "shell_exec":
2128
+ return this.shellExec(
2129
+ String(input.command ?? ""),
2130
+ Number(input.timeout_ms ?? this.maxCommandMs)
2131
+ );
2132
+ case "write_file":
2133
+ return this.writeFile(String(input.path ?? ""), String(input.content ?? ""));
2134
+ case "read_file":
2135
+ return this.readFile(String(input.path ?? ""));
2136
+ case "list_dir":
2137
+ return this.listDir(input.path ? String(input.path) : void 0);
2138
+ default:
2139
+ return `Unknown tool: ${name}`;
2140
+ }
2141
+ }
2142
+ shellExec(command, timeoutMs) {
2143
+ return new Promise((resolve6) => {
2144
+ const chunks = [];
2145
+ const proc = spawn("bash", ["-c", command], {
2146
+ cwd: this.cwd,
2147
+ env: { ...process.env, TERM: "dumb" },
2148
+ timeout: timeoutMs
2149
+ });
2150
+ proc.stdout.on("data", (d) => chunks.push(d.toString()));
2151
+ proc.stderr.on("data", (d) => chunks.push(d.toString()));
2152
+ proc.on("close", (code) => {
2153
+ const output = chunks.join("").trim();
2154
+ resolve6(output || (code === 0 ? "(command completed, no output)" : `exit code ${code}`));
2155
+ });
2156
+ proc.on("error", (err) => {
2157
+ resolve6(`Error: ${err.message}`);
2158
+ });
2159
+ });
2160
+ }
2161
+ writeFile(filePath, content) {
2162
+ const safe = this.safePath(filePath);
2163
+ if (!safe) return "Error: path outside working directory";
2164
+ mkdirSync(dirname(safe), { recursive: true });
2165
+ writeFileSync(safe, content, "utf8");
2166
+ const rel = relative(this.cwd, safe);
2167
+ return `Written: ${rel} (${content.length} bytes)`;
2168
+ }
2169
+ readFile(filePath) {
2170
+ const safe = this.safePath(filePath);
2171
+ if (!safe) return "Error: path outside working directory";
2172
+ if (!existsSync2(safe)) return `File not found: ${filePath}`;
2173
+ const content = readFileSync2(safe, "utf8");
2174
+ return content.length > 8e3 ? content.slice(0, 8e3) + `
2175
+ \u2026[truncated, ${content.length} total bytes]` : content;
2176
+ }
2177
+ listDir(dirPath) {
2178
+ const safe = this.safePath(dirPath ?? ".");
2179
+ if (!safe) return "Error: path outside working directory";
2180
+ if (!existsSync2(safe)) return `Directory not found: ${dirPath}`;
2181
+ try {
2182
+ const entries = readdirSync(safe, { withFileTypes: true }).filter((e) => !e.name.startsWith(".") && e.name !== "node_modules").map((e) => `${e.isDirectory() ? "d" : "f"} ${e.name}`).join("\n");
2183
+ return entries || "(empty directory)";
2184
+ } catch (e) {
2185
+ return `Error: ${e instanceof Error ? e.message : String(e)}`;
2186
+ }
2187
+ }
2188
+ // ─── Helpers ───────────────────────────────────────────────────────────────
2189
+ safePath(p) {
2190
+ const resolved = resolve2(this.cwd, p);
2191
+ return resolved.startsWith(this.cwd) ? resolved : null;
2192
+ }
2193
+ buildSystemPrompt(extra) {
2194
+ const lines = [
2195
+ `You are 0agent, an AI software engineer. You can execute shell commands and manage files.`,
2196
+ `Working directory: ${this.cwd}`,
2197
+ ``,
2198
+ `Instructions:`,
2199
+ `- Use tools to actually accomplish tasks, don't just describe what to do`,
2200
+ `- For web servers: write the files, then start the server with & (background)`,
2201
+ `- For npm/node projects: check package.json first with read_file or list_dir`,
2202
+ `- After write_file, verify with read_file if needed`,
2203
+ `- After shell_exec, check output for errors and retry if needed`,
2204
+ `- Use relative paths from the working directory`,
2205
+ `- Be concise in your final response: state what was done and where to find it`
2206
+ ];
2207
+ if (extra) lines.push(``, `Context:`, extra);
2208
+ return lines.join("\n");
2209
+ }
2210
+ summariseInput(toolName, input) {
2211
+ if (toolName === "shell_exec") return `"${String(input.command ?? "").slice(0, 60)}"`;
2212
+ if (toolName === "write_file") return `"${input.path}"`;
2213
+ if (toolName === "read_file") return `"${input.path}"`;
2214
+ if (toolName === "list_dir") return `"${input.path ?? "."}"`;
2215
+ return JSON.stringify(input).slice(0, 60);
2216
+ }
2217
+ };
2218
+
1688
2219
  // packages/daemon/src/SessionManager.ts
1689
2220
  var SessionManager = class {
1690
2221
  sessions = /* @__PURE__ */ new Map();
@@ -1692,11 +2223,13 @@ var SessionManager = class {
1692
2223
  eventBus;
1693
2224
  graph;
1694
2225
  llm;
2226
+ cwd;
1695
2227
  constructor(deps = {}) {
1696
2228
  this.inferenceEngine = deps.inferenceEngine;
1697
2229
  this.eventBus = deps.eventBus;
1698
2230
  this.graph = deps.graph;
1699
2231
  this.llm = deps.llm;
2232
+ this.cwd = deps.cwd ?? process.cwd();
1700
2233
  }
1701
2234
  /**
1702
2235
  * Create a new session with status 'pending'.
@@ -1864,30 +2397,36 @@ var SessionManager = class {
1864
2397
  } else {
1865
2398
  this.addStep(session.id, "No inference engine connected \u2014 executing task directly");
1866
2399
  }
1867
- this.addStep(session.id, "Calling LLM\u2026");
1868
- let output = "";
1869
2400
  if (this.llm?.isConfigured) {
1870
- try {
1871
- const systemPrompt = enrichedReq.context?.system_context ? String(enrichedReq.context.system_context) : `You are 0agent, a helpful AI assistant. Complete the user's task directly and concisely. If the task involves creating files, writing code, or running commands, provide the exact output needed.`;
1872
- const llmRes = await this.llm.complete([
1873
- { role: "user", content: enrichedReq.task }
1874
- ], systemPrompt);
1875
- output = llmRes.content;
1876
- this.addStep(session.id, `LLM responded (${llmRes.tokens_used} tokens, ${llmRes.model})`);
1877
- } catch (llmErr) {
1878
- const msg = llmErr instanceof Error ? llmErr.message : String(llmErr);
1879
- this.addStep(session.id, `LLM error: ${msg}`);
1880
- output = `Error calling LLM: ${msg}`;
2401
+ const executor = new AgentExecutor(
2402
+ this.llm,
2403
+ { cwd: this.cwd },
2404
+ // step callback emit session.step events
2405
+ (step) => this.addStep(session.id, step),
2406
+ // token callback → emit session.token events
2407
+ (token) => this.emit({ type: "session.token", session_id: session.id, token })
2408
+ );
2409
+ const systemContext = enrichedReq.context?.system_context ? String(enrichedReq.context.system_context) : void 0;
2410
+ const agentResult = await executor.execute(enrichedReq.task, systemContext);
2411
+ if (agentResult.files_written.length > 0) {
2412
+ this.addStep(session.id, `Files written: ${agentResult.files_written.join(", ")}`);
2413
+ }
2414
+ if (agentResult.commands_run.length > 0) {
2415
+ this.addStep(session.id, `Commands run: ${agentResult.commands_run.length}`);
1881
2416
  }
2417
+ this.addStep(session.id, `Done (${agentResult.tokens_used} tokens, ${agentResult.iterations} LLM turns)`);
2418
+ this.completeSession(session.id, {
2419
+ output: agentResult.output,
2420
+ files_written: agentResult.files_written,
2421
+ commands_run: agentResult.commands_run,
2422
+ tokens_used: agentResult.tokens_used,
2423
+ model: agentResult.model
2424
+ });
1882
2425
  } else {
1883
- output = session.plan?.reasoning ?? "No LLM configured \u2014 add API key to ~/.0agent/config.yaml";
1884
- this.addStep(session.id, "No LLM configured (set api_key in ~/.0agent/config.yaml)");
2426
+ const output = session.plan?.reasoning ?? "No LLM configured \u2014 add api_key to ~/.0agent/config.yaml";
2427
+ this.addStep(session.id, "No LLM API key configured");
2428
+ this.completeSession(session.id, { output });
1885
2429
  }
1886
- this.completeSession(session.id, {
1887
- output,
1888
- plan: session.plan ?? null,
1889
- steps: session.steps.length
1890
- });
1891
2430
  } catch (err) {
1892
2431
  const message = err instanceof Error ? err.message : String(err);
1893
2432
  this.failSession(session.id, message);
@@ -2130,7 +2669,7 @@ var BackgroundWorkers = class {
2130
2669
  };
2131
2670
 
2132
2671
  // packages/daemon/src/SkillRegistry.ts
2133
- import { readFileSync as readFileSync2, readdirSync, existsSync as existsSync2, writeFileSync, unlinkSync, mkdirSync } from "node:fs";
2672
+ import { readFileSync as readFileSync3, readdirSync as readdirSync2, existsSync as existsSync3, writeFileSync as writeFileSync2, unlinkSync, mkdirSync as mkdirSync2 } from "node:fs";
2134
2673
  import { join } from "node:path";
2135
2674
  import { homedir as homedir2 } from "node:os";
2136
2675
  import YAML2 from "yaml";
@@ -2153,11 +2692,11 @@ var SkillRegistry = class {
2153
2692
  this.loadFromDir(this.customDir, false);
2154
2693
  }
2155
2694
  loadFromDir(dir, isBuiltin) {
2156
- if (!existsSync2(dir)) return;
2157
- const files = readdirSync(dir).filter((f) => f.endsWith(".yaml") || f.endsWith(".yml"));
2695
+ if (!existsSync3(dir)) return;
2696
+ const files = readdirSync2(dir).filter((f) => f.endsWith(".yaml") || f.endsWith(".yml"));
2158
2697
  for (const file of files) {
2159
2698
  try {
2160
- const raw = readFileSync2(join(dir, file), "utf8");
2699
+ const raw = readFileSync3(join(dir, file), "utf8");
2161
2700
  const skill = YAML2.parse(raw);
2162
2701
  if (skill.name) {
2163
2702
  this.skills.set(skill.name, skill);
@@ -2192,9 +2731,9 @@ var SkillRegistry = class {
2192
2731
  if (this.builtinNames.has(name)) {
2193
2732
  throw new Error(`Cannot override built-in skill: ${name}`);
2194
2733
  }
2195
- mkdirSync(this.customDir, { recursive: true });
2734
+ mkdirSync2(this.customDir, { recursive: true });
2196
2735
  const filePath = join(this.customDir, `${name}.yaml`);
2197
- writeFileSync(filePath, yamlContent, "utf8");
2736
+ writeFileSync2(filePath, yamlContent, "utf8");
2198
2737
  const skill = YAML2.parse(yamlContent);
2199
2738
  this.skills.set(name, skill);
2200
2739
  return skill;
@@ -2207,7 +2746,7 @@ var SkillRegistry = class {
2207
2746
  throw new Error(`Cannot delete built-in skill: ${name}`);
2208
2747
  }
2209
2748
  const filePath = join(this.customDir, `${name}.yaml`);
2210
- if (existsSync2(filePath)) {
2749
+ if (existsSync3(filePath)) {
2211
2750
  unlinkSync(filePath);
2212
2751
  }
2213
2752
  this.skills.delete(name);
@@ -2220,8 +2759,8 @@ var SkillRegistry = class {
2220
2759
  // packages/daemon/src/HTTPServer.ts
2221
2760
  import { Hono as Hono8 } from "hono";
2222
2761
  import { serve } from "@hono/node-server";
2223
- import { readFileSync as readFileSync3 } from "node:fs";
2224
- import { resolve as resolve2, dirname } from "node:path";
2762
+ import { readFileSync as readFileSync4 } from "node:fs";
2763
+ import { resolve as resolve3, dirname as dirname2 } from "node:path";
2225
2764
  import { fileURLToPath } from "node:url";
2226
2765
 
2227
2766
  // packages/daemon/src/routes/health.ts
@@ -2476,15 +3015,15 @@ function skillRoutes(deps) {
2476
3015
  // packages/daemon/src/HTTPServer.ts
2477
3016
  function findGraphHtml() {
2478
3017
  const candidates = [
2479
- resolve2(dirname(fileURLToPath(import.meta.url)), "graph.html"),
3018
+ resolve3(dirname2(fileURLToPath(import.meta.url)), "graph.html"),
2480
3019
  // dev (src/)
2481
- resolve2(dirname(fileURLToPath(import.meta.url)), "..", "graph.html"),
3020
+ resolve3(dirname2(fileURLToPath(import.meta.url)), "..", "graph.html"),
2482
3021
  // bundled (dist/../)
2483
- resolve2(dirname(fileURLToPath(import.meta.url)), "..", "dist", "graph.html")
3022
+ resolve3(dirname2(fileURLToPath(import.meta.url)), "..", "dist", "graph.html")
2484
3023
  ];
2485
3024
  for (const p of candidates) {
2486
3025
  try {
2487
- readFileSync3(p);
3026
+ readFileSync4(p);
2488
3027
  return p;
2489
3028
  } catch {
2490
3029
  }
@@ -2508,7 +3047,7 @@ var HTTPServer = class {
2508
3047
  this.app.route("/api/skills", skillRoutes({ skillRegistry: deps.skillRegistry }));
2509
3048
  const serveGraph = (c) => {
2510
3049
  try {
2511
- const html = readFileSync3(GRAPH_HTML_PATH, "utf8");
3050
+ const html = readFileSync4(GRAPH_HTML_PATH, "utf8");
2512
3051
  return c.html(html);
2513
3052
  } catch {
2514
3053
  return c.html("<p>Graph UI not found. Run: pnpm build</p>");
@@ -2518,7 +3057,7 @@ var HTTPServer = class {
2518
3057
  this.app.get("/graph", serveGraph);
2519
3058
  }
2520
3059
  start() {
2521
- return new Promise((resolve5) => {
3060
+ return new Promise((resolve6) => {
2522
3061
  this.server = serve(
2523
3062
  {
2524
3063
  fetch: this.app.fetch,
@@ -2526,20 +3065,20 @@ var HTTPServer = class {
2526
3065
  hostname: this.deps.host
2527
3066
  },
2528
3067
  () => {
2529
- resolve5();
3068
+ resolve6();
2530
3069
  }
2531
3070
  );
2532
3071
  });
2533
3072
  }
2534
3073
  stop() {
2535
- return new Promise((resolve5, reject) => {
3074
+ return new Promise((resolve6, reject) => {
2536
3075
  if (!this.server) {
2537
- resolve5();
3076
+ resolve6();
2538
3077
  return;
2539
3078
  }
2540
3079
  this.server.close((err) => {
2541
3080
  if (err) reject(err);
2542
- else resolve5();
3081
+ else resolve6();
2543
3082
  });
2544
3083
  });
2545
3084
  }
@@ -2548,107 +3087,6 @@ var HTTPServer = class {
2548
3087
  }
2549
3088
  };
2550
3089
 
2551
- // packages/daemon/src/LLMExecutor.ts
2552
- var LLMExecutor = class {
2553
- constructor(config) {
2554
- this.config = config;
2555
- }
2556
- async complete(messages, system) {
2557
- switch (this.config.provider) {
2558
- case "anthropic":
2559
- return this.callAnthropic(messages, system);
2560
- case "openai":
2561
- return this.callOpenAI(messages, system);
2562
- case "xai":
2563
- return this.callOpenAI(messages, system, "https://api.x.ai/v1");
2564
- case "gemini":
2565
- return this.callOpenAI(messages, system, "https://generativelanguage.googleapis.com/v1beta/openai");
2566
- case "ollama":
2567
- return this.callOllama(messages, system);
2568
- default:
2569
- return this.callOpenAI(messages, system);
2570
- }
2571
- }
2572
- async callAnthropic(messages, system) {
2573
- const body = {
2574
- model: this.config.model,
2575
- max_tokens: 8192,
2576
- messages: messages.filter((m) => m.role !== "system").map((m) => ({ role: m.role, content: m.content }))
2577
- };
2578
- if (system) body.system = system;
2579
- else {
2580
- const sysMsg = messages.find((m) => m.role === "system");
2581
- if (sysMsg) body.system = sysMsg.content;
2582
- }
2583
- const res = await fetch("https://api.anthropic.com/v1/messages", {
2584
- method: "POST",
2585
- headers: {
2586
- "Content-Type": "application/json",
2587
- "x-api-key": this.config.api_key,
2588
- "anthropic-version": "2023-06-01"
2589
- },
2590
- body: JSON.stringify(body)
2591
- });
2592
- if (!res.ok) {
2593
- const err = await res.text();
2594
- throw new Error(`Anthropic API error ${res.status}: ${err}`);
2595
- }
2596
- const data = await res.json();
2597
- return {
2598
- content: data.content.filter((c) => c.type === "text").map((c) => c.text).join(""),
2599
- tokens_used: (data.usage?.input_tokens ?? 0) + (data.usage?.output_tokens ?? 0),
2600
- model: data.model
2601
- };
2602
- }
2603
- async callOpenAI(messages, system, baseUrl = "https://api.openai.com/v1") {
2604
- const allMessages = [];
2605
- const sysContent = system ?? messages.find((m) => m.role === "system")?.content;
2606
- if (sysContent) allMessages.push({ role: "system", content: sysContent });
2607
- allMessages.push(...messages.filter((m) => m.role !== "system").map((m) => ({ role: m.role, content: m.content })));
2608
- const res = await fetch(`${this.config.base_url ?? baseUrl}/chat/completions`, {
2609
- method: "POST",
2610
- headers: {
2611
- "Content-Type": "application/json",
2612
- "Authorization": `Bearer ${this.config.api_key}`
2613
- },
2614
- body: JSON.stringify({
2615
- model: this.config.model,
2616
- messages: allMessages,
2617
- max_tokens: 8192
2618
- })
2619
- });
2620
- if (!res.ok) {
2621
- const err = await res.text();
2622
- throw new Error(`OpenAI API error ${res.status}: ${err}`);
2623
- }
2624
- const data = await res.json();
2625
- return {
2626
- content: data.choices[0]?.message?.content ?? "",
2627
- tokens_used: data.usage?.total_tokens ?? 0,
2628
- model: data.model
2629
- };
2630
- }
2631
- async callOllama(messages, system) {
2632
- const baseUrl = this.config.base_url ?? "http://localhost:11434";
2633
- const allMessages = [];
2634
- const sysContent = system ?? messages.find((m) => m.role === "system")?.content;
2635
- if (sysContent) allMessages.push({ role: "system", content: sysContent });
2636
- allMessages.push(...messages.filter((m) => m.role !== "system"));
2637
- const res = await fetch(`${baseUrl}/api/chat`, {
2638
- method: "POST",
2639
- headers: { "Content-Type": "application/json" },
2640
- body: JSON.stringify({ model: this.config.model, messages: allMessages, stream: false })
2641
- });
2642
- if (!res.ok) throw new Error(`Ollama error ${res.status}`);
2643
- const data = await res.json();
2644
- return { content: data.message.content, tokens_used: data.eval_count ?? 0, model: this.config.model };
2645
- }
2646
- get isConfigured() {
2647
- if (this.config.provider === "ollama") return true;
2648
- return !!this.config.api_key?.trim();
2649
- }
2650
- };
2651
-
2652
3090
  // packages/daemon/src/ZeroAgentDaemon.ts
2653
3091
  var ZeroAgentDaemon = class {
2654
3092
  config = null;
@@ -2664,13 +3102,13 @@ var ZeroAgentDaemon = class {
2664
3102
  startedAt = 0;
2665
3103
  pidFilePath;
2666
3104
  constructor() {
2667
- this.pidFilePath = resolve3(homedir3(), ".0agent", "daemon.pid");
3105
+ this.pidFilePath = resolve4(homedir3(), ".0agent", "daemon.pid");
2668
3106
  }
2669
3107
  async start(opts) {
2670
3108
  this.config = await loadConfig(opts?.config_path);
2671
- const dotDir = resolve3(homedir3(), ".0agent");
2672
- if (!existsSync3(dotDir)) {
2673
- mkdirSync2(dotDir, { recursive: true });
3109
+ const dotDir = resolve4(homedir3(), ".0agent");
3110
+ if (!existsSync4(dotDir)) {
3111
+ mkdirSync3(dotDir, { recursive: true });
2674
3112
  }
2675
3113
  this.adapter = new SQLiteAdapter({ db_path: this.config.graph.db_path });
2676
3114
  this.graph = new KnowledgeGraph(this.adapter);
@@ -2698,7 +3136,8 @@ var ZeroAgentDaemon = class {
2698
3136
  inferenceEngine: this.inferenceEngine,
2699
3137
  eventBus: this.eventBus,
2700
3138
  graph: this.graph,
2701
- llm: llmExecutor
3139
+ llm: llmExecutor,
3140
+ cwd: process.env["ZEROAGENT_CWD"] ?? process.cwd()
2702
3141
  });
2703
3142
  this.backgroundWorkers = new BackgroundWorkers({
2704
3143
  graph: this.graph,
@@ -2720,7 +3159,7 @@ var ZeroAgentDaemon = class {
2720
3159
  getStatus: () => this.getStatus()
2721
3160
  });
2722
3161
  await this.httpServer.start();
2723
- writeFileSync2(this.pidFilePath, String(process.pid), "utf8");
3162
+ writeFileSync3(this.pidFilePath, String(process.pid), "utf8");
2724
3163
  console.log(
2725
3164
  `[0agent] Daemon started on ${this.config.server.host}:${this.config.server.port} (PID: ${process.pid})`
2726
3165
  );
@@ -2754,7 +3193,7 @@ var ZeroAgentDaemon = class {
2754
3193
  this.graph = null;
2755
3194
  }
2756
3195
  this.adapter = null;
2757
- if (existsSync3(this.pidFilePath)) {
3196
+ if (existsSync4(this.pidFilePath)) {
2758
3197
  try {
2759
3198
  unlinkSync2(this.pidFilePath);
2760
3199
  } catch {
@@ -2784,11 +3223,11 @@ var ZeroAgentDaemon = class {
2784
3223
  };
2785
3224
 
2786
3225
  // packages/daemon/src/start.ts
2787
- import { resolve as resolve4 } from "node:path";
3226
+ import { resolve as resolve5 } from "node:path";
2788
3227
  import { homedir as homedir4 } from "node:os";
2789
- import { existsSync as existsSync4 } from "node:fs";
2790
- var CONFIG_PATH = process.env["ZEROAGENT_CONFIG"] ?? resolve4(homedir4(), ".0agent", "config.yaml");
2791
- if (!existsSync4(CONFIG_PATH)) {
3228
+ import { existsSync as existsSync5 } from "node:fs";
3229
+ var CONFIG_PATH = process.env["ZEROAGENT_CONFIG"] ?? resolve5(homedir4(), ".0agent", "config.yaml");
3230
+ if (!existsSync5(CONFIG_PATH)) {
2792
3231
  console.error(`
2793
3232
  0agent is not initialised.
2794
3233
 
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "0agent",
3
- "version": "1.0.7",
3
+ "version": "1.0.9",
4
4
  "description": "A persistent, learning AI agent that runs on your machine. An agent that learns.",
5
5
  "private": false,
6
6
  "license": "Apache-2.0",