taskmonkey-cli 0.10.1 → 0.11.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/bin/tm.js CHANGED
@@ -1,6 +1,9 @@
1
1
  #!/usr/bin/env node
2
2
 
3
3
  import { Command } from 'commander';
4
+ import { readFileSync } from 'fs';
5
+ import { fileURLToPath } from 'url';
6
+ import { dirname, join } from 'path';
4
7
  import { login } from '../src/commands/login.js';
5
8
  import { testTool } from '../src/commands/test-tool.js';
6
9
  import { sync } from '../src/commands/sync.js';
@@ -15,12 +18,18 @@ import { testChat } from '../src/commands/test-chat.js';
15
18
  import { testConversations } from '../src/commands/test-conversations.js';
16
19
  import { optimizePrompt } from '../src/commands/optimize-prompt.js';
17
20
 
21
+ // Read version from package.json so `tm --version` is always in sync.
22
+ const __dirname = dirname(fileURLToPath(import.meta.url));
23
+ const pkgVersion = JSON.parse(
24
+ readFileSync(join(__dirname, '..', 'package.json'), 'utf8')
25
+ ).version;
26
+
18
27
  const program = new Command();
19
28
 
20
29
  program
21
30
  .name('tm')
22
31
  .description('TaskMonkey CLI — Remote dev tools for tenant config')
23
- .version('0.1.0');
32
+ .version(pkgVersion);
24
33
 
25
34
  program
26
35
  .command('login')
@@ -53,7 +62,8 @@ program
53
62
 
54
63
  program
55
64
  .command('watch')
56
- .description('Watch for file changes and auto-sync')
65
+ .description('Watch for file changes and auto-sync (streams server logs by default)')
66
+ .option('--no-logs', 'Disable live log streaming')
57
67
  .action(watch);
58
68
 
59
69
  program
@@ -71,8 +81,9 @@ program
71
81
 
72
82
  program
73
83
  .command('logs')
74
- .description('Stream server logs')
84
+ .description('Stream server logs (or clear them with --clear)')
75
85
  .option('-n, --lines <number>', 'Initial lines to show', '50')
86
+ .option('--clear', 'Truncate tools.log on the server and exit')
76
87
  .action(logs);
77
88
 
78
89
  program
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "taskmonkey-cli",
3
- "version": "0.10.1",
3
+ "version": "0.11.1",
4
4
  "description": "TaskMonkey CLI — Remote dev tools for tenant config editing and tool testing",
5
5
  "bin": {
6
6
  "tm": "./bin/tm.js",
@@ -1,63 +1,65 @@
1
1
  import chalk from 'chalk';
2
- import EventSource from 'eventsource';
3
- import { createClient } from '../lib/api.js';
4
- import { loadConfig } from '../config.js';
2
+ import { createClient, ensureFreshToken } from '../lib/api.js';
3
+ import { streamLogs } from '../lib/log-stream.js';
5
4
 
6
5
  export async function logs(options) {
7
- const config = loadConfig();
8
- if (!config) {
9
- console.error(chalk.red('Not logged in. Run `tm login` first.'));
6
+ let config;
7
+ try {
8
+ // Refresh the access token proactively before opening the SSE stream.
9
+ // fetch-based streaming has no retry-on-401 either, so a stale token
10
+ // would just make the stream fail silently a few minutes in.
11
+ config = await ensureFreshToken();
12
+ } catch (err) {
13
+ console.error(chalk.red(err.message));
10
14
  process.exit(1);
11
15
  }
12
16
 
13
- const lines = options.lines || 50;
14
- const url = `${config.server}/api/logs/stream?tenant=${config.tenant}&lines=${lines}`;
15
-
16
- console.log(chalk.cyan('📋 Streaming logs'), chalk.gray(`(${config.tenant})`));
17
- console.log(chalk.gray(' Ctrl+C to stop\n'));
18
-
19
- const es = new EventSource(url, {
20
- headers: { 'Authorization': `Bearer ${config.token}` },
21
- });
22
-
23
- es.onmessage = (e) => {
17
+ if (options.clear) {
24
18
  try {
25
- const data = JSON.parse(e.data);
26
- const line = data.line || '';
27
- const type = data.type || 'info';
28
-
29
- switch (type) {
30
- case 'error':
31
- console.log(chalk.red(line));
32
- break;
33
- case 'warning':
34
- console.log(chalk.yellow(line));
35
- break;
36
- case 'success':
37
- console.log(chalk.green(line));
38
- break;
39
- default:
40
- console.log(chalk.gray(line));
19
+ const client = createClient();
20
+ const res = await client.post('/api/logs/clear', {});
21
+ if (res.cleared) {
22
+ console.log(chalk.green('✓'), `cleared ${res.path}`);
23
+ } else {
24
+ console.log(chalk.gray('(nothing to clear — tools.log does not exist yet)'));
41
25
  }
42
- } catch {
43
- console.log(chalk.gray(e.data));
26
+ } catch (err) {
27
+ console.error(chalk.red(`Clear failed: ${err.message}`));
28
+ process.exit(1);
44
29
  }
45
- };
30
+ return;
31
+ }
46
32
 
47
- es.addEventListener('init_complete', () => {
48
- console.log(chalk.gray('--- live ---\n'));
49
- });
33
+ const lines = parseInt(options.lines || '50', 10);
50
34
 
51
- es.onerror = (err) => {
52
- if (es.readyState === EventSource.CLOSED) {
53
- console.log(chalk.gray('\nConnection closed.'));
54
- process.exit(0);
55
- }
56
- };
35
+ console.log(chalk.cyan('📋 Streaming logs'), chalk.gray(`(${config.tenant})`));
36
+ console.log(chalk.gray(' Ctrl+C to stop\n'));
57
37
 
38
+ const abort = new AbortController();
58
39
  process.on('SIGINT', () => {
59
- es.close();
40
+ abort.abort();
60
41
  console.log(chalk.gray('\nStopped.'));
61
42
  process.exit(0);
62
43
  });
44
+
45
+ try {
46
+ await streamLogs({
47
+ config,
48
+ lines,
49
+ signal: abort.signal,
50
+ onLine: (line, type) => {
51
+ switch (type) {
52
+ case 'error': console.log(chalk.red(line)); break;
53
+ case 'warning': console.log(chalk.yellow(line)); break;
54
+ case 'success': console.log(chalk.green(line)); break;
55
+ default: console.log(chalk.gray(line));
56
+ }
57
+ },
58
+ onInitComplete: () => console.log(chalk.gray('--- live ---\n')),
59
+ });
60
+ } catch (err) {
61
+ if (err.name === 'AbortError') return;
62
+ console.error(chalk.red(`Stream error: ${err.message}`));
63
+ process.exit(1);
64
+ }
63
65
  }
@@ -1,6 +1,6 @@
1
1
  import chalk from 'chalk';
2
2
  import EventSource from 'eventsource';
3
- import { loadConfig } from '../config.js';
3
+ import { ensureFreshToken } from '../lib/api.js';
4
4
 
5
5
  const BADGES = {
6
6
  running: chalk.bgYellow.black(' RUN '),
@@ -61,9 +61,13 @@ function renderExecution(exec) {
61
61
  }
62
62
 
63
63
  export async function monitor(options) {
64
- const config = loadConfig();
65
- if (!config) {
66
- console.error(chalk.red('Not logged in. Run `tm login` first.'));
64
+ let config;
65
+ try {
66
+ // Refresh proactively EventSource cannot retry-on-401, so a stale
67
+ // token would let the stream die wordlessly after the JWT expires.
68
+ config = await ensureFreshToken();
69
+ } catch (err) {
70
+ console.error(chalk.red(err.message));
67
71
  process.exit(1);
68
72
  }
69
73
 
@@ -336,16 +336,18 @@ Tools können auf drei Arten definiert werden:
336
336
  \\$logger = \\$ctx['logger'];
337
337
  \\$logger->info("Starte...");
338
338
 
339
- // Andere Tools aufrufen
339
+ // Andere Tools aufrufen — DAS ist der richtige Weg, um an externe
340
+ // Daten zu kommen. Direkter HTTP-Client-Aufruf ist im Sandbox-
341
+ // Validator blockiert.
340
342
  \\$item = \\$ctx['tool']('searchItems', ['searchSku' => \\$args['sku']]);
341
-
342
- // API-Calls via Cake HTTP Client
343
- \\$api = \\$ctx['config']['apis']['jtl'];
344
- \\$http = new \\Cake\\Http\\Client(['timeout' => 60]);
345
- \\$response = \\$http->get(\\$api['base_url'] . '/items', [], ['headers' => \\$api['headers']]);
343
+ \\$stock = \\$ctx['tool']('getStockBySku', ['sku' => \\$args['sku']]);
346
344
 
347
345
  \\$logger->success("Fertig");
348
- return ['success' => true, 'data' => \\$response->getJson()];
346
+ return [
347
+ 'success' => true,
348
+ 'item' => \\$item,
349
+ 'stock' => \\$stock,
350
+ ];
349
351
  },
350
352
  'parameters' => [
351
353
  'type' => 'object',
@@ -378,24 +380,55 @@ Diese PHP-Klassen stehen als Handler zur Verfügung (Referenz: \`[ClassName, 'me
378
380
  Jeder Handler bekommt einen Kontext mit:
379
381
 
380
382
  \`\`\`php
381
- \\$ctx['config'] // Tenant-Konfiguration (APIs, Tools, etc.)
383
+ \\$ctx['config'] // Tenant-Konfiguration (APIs, Tools, etc.) — read-only
382
384
  \\$ctx['tenant'] // Tenant-Code (z.B. "bloomify")
383
385
  \\$ctx['logger'] // TaskLogger: ->info(), ->success(), ->error(), ->warning()
384
- \\$ctx['tool'] // Funktion zum Aufrufen anderer Tools: \\$ctx['tool']('toolName', \\$args)
386
+ \\$ctx['tool'] // Funktion zum Aufrufen anderer Tools: \\$ctx['tool']('name', \\$args)
385
387
  \\$ctx['chat_id'] // Chat-Session ID
386
388
  \\$ctx['testMode'] // true wenn im Simulationsmodus
389
+ \\$ctx['tmp_path'] // Tenant-eigener tmp/-Schreibpfad für generierte Dateien
390
+ \\$ctx['log_path'] // Tenant-eigener logs/-Pfad (TaskLogger schreibt selbst hierher)
387
391
  \`\`\`
388
392
 
389
- ## Verfügbare Vendor-Libraries
393
+ ## Sandbox / Code-Validator
394
+
395
+ CLI-synced Handler-Code läuft durch einen Token-basierten Validator
396
+ (\`TenantController::validateCode\`). Folgende Funktionen und Klassen
397
+ sind **blockiert**:
398
+
399
+ \`\`\`
400
+ shell_exec, exec, system, passthru, popen, proc_open
401
+ eval, assert, create_function, call_user_func, call_user_func_array
402
+ file_get_contents, file_put_contents, fopen, fwrite, unlink, mkdir,
403
+ glob, scandir, rmdir, rename, copy, symlink, chmod
404
+ curl_init, curl_exec, fsockopen
405
+ include, require, include_once, require_once
406
+ new PDO, new mysqli, new SplFileObject, new ZipArchive, new Phar,
407
+ new ReflectionFunction, new ReflectionMethod, new ReflectionClass,
408
+ new \\Cake\\Http\\Client, new \\Cake\\ORM\\TableRegistry,
409
+ new \\Cake\\Datasource\\ConnectionManager
410
+ Variable function calls (\`\\$f(...)\`) — Closure-Subskript wie
411
+ \`\\$ctx['tool'](...)\` ist erlaubt.
412
+ \`\`\`
413
+
414
+ Alles, was du davon brauchst, geht über \`\\$ctx['tool']()\`. Wenn dein
415
+ Handler etwas braucht, das nicht über \`\\$ctx\` exposed ist, melde es
416
+ als Bedarf — dann landet die Logik in einem \`_shared/\` Helper-Tool
417
+ auf Server-Seite.
418
+
419
+ ## Erlaubte PHP-Funktionen (Auswahl)
390
420
 
391
421
  \`\`\`php
392
- \\Cake\\Http\\Client // HTTP-Requests (GET, POST, etc.)
393
- \\Cake\\Cache\\Cache // Caching (read, write, delete)
394
- \\Cake\\ORM\\TableRegistry // Datenbank-Zugriff
395
- \\Cake\\Core\\Configure // App-Konfiguration lesen
396
- \\Google_Service_Sheets // Google Sheets API
397
- \\Google_Service_Drive // Google Drive API
398
- \\GuzzleHttp\\Client // Alternative HTTP-Library
422
+ // Daten:
423
+ array_*, count, json_encode, json_decode, str_*, preg_*
424
+ // Datum/Zeit:
425
+ date, time, mktime, strtotime, DateTime, DateInterval
426
+ // Bilder (GD-Erweiterung — schreibt nicht über file_put_contents):
427
+ imagepng, imagejpeg, imagecreatetruecolor, imagestring, …
428
+ // File-Inspektion (lesen, nicht schreiben):
429
+ filesize, filemtime, file_exists, is_dir, is_file
430
+ // Math:
431
+ round, floor, ceil, min, max, abs, intval, floatval
399
432
  \`\`\`
400
433
 
401
434
  ## Database Gateway (Remote-SQL)
@@ -1,15 +1,21 @@
1
- import { readdirSync, readFileSync } from 'fs';
2
- import { join, relative } from 'path';
1
+ import { readdirSync, readFileSync, mkdirSync, writeFileSync } from 'fs';
2
+ import { join, relative, dirname } from 'path';
3
3
  import chalk from 'chalk';
4
4
  import ora from 'ora';
5
5
  import { loadConfig } from '../config.js';
6
6
  import { createClient } from '../lib/api.js';
7
7
 
8
+ // Skip runtime dirs and the local CLI metadata when collecting files to upload.
9
+ const SKIP_TOP_LEVEL = new Set(['logs', 'tmp', '.claude', '.git', 'node_modules', 'docs', 'shared']);
10
+
8
11
  function collectFiles(dir, base = dir) {
9
12
  const files = {};
10
13
  const entries = readdirSync(dir, { withFileTypes: true });
11
14
 
12
15
  for (const entry of entries) {
16
+ if (dir === base && SKIP_TOP_LEVEL.has(entry.name)) {
17
+ continue;
18
+ }
13
19
  const fullPath = join(dir, entry.name);
14
20
  if (entry.isDirectory()) {
15
21
  Object.assign(files, collectFiles(fullPath, base));
@@ -22,6 +28,55 @@ function collectFiles(dir, base = dir) {
22
28
  return files;
23
29
  }
24
30
 
31
+ /**
32
+ * Pulls logs/ and tmp/ from the server into the local working copy so the
33
+ * developer can read them with normal file tools (cat, grep, Claude, etc.).
34
+ *
35
+ * Text files come back as utf-8 strings; binary files come back as base64
36
+ * under `binary_files`. We write both verbatim.
37
+ *
38
+ * Failures here are non-fatal — the sync itself was already successful.
39
+ */
40
+ export async function pullRuntime(tenantDir, { quiet = false } = {}) {
41
+ const client = createClient();
42
+ let runtime;
43
+ try {
44
+ runtime = await client.get('/api/tenant/runtime');
45
+ } catch (err) {
46
+ if (!quiet) {
47
+ console.log(chalk.gray(` (runtime pull skipped: ${err.message})`));
48
+ }
49
+ return 0;
50
+ }
51
+
52
+ let written = 0;
53
+ const writeOne = (relPath, content, isBase64) => {
54
+ const fullPath = join(tenantDir, relPath);
55
+ mkdirSync(dirname(fullPath), { recursive: true });
56
+ if (isBase64) {
57
+ writeFileSync(fullPath, Buffer.from(content, 'base64'));
58
+ } else {
59
+ writeFileSync(fullPath, content);
60
+ }
61
+ written++;
62
+ };
63
+
64
+ // Some servers serialize empty PHP arrays as JSON [] instead of {}.
65
+ // Treat plain arrays as empty since they have no path-keyed entries.
66
+ const asMap = (v) => (v && typeof v === 'object' && !Array.isArray(v)) ? v : {};
67
+ for (const [relPath, content] of Object.entries(asMap(runtime.files))) {
68
+ writeOne(relPath, content, false);
69
+ }
70
+ for (const [relPath, content] of Object.entries(asMap(runtime.binary_files))) {
71
+ writeOne(relPath, content, true);
72
+ }
73
+
74
+ if (written > 0 && !quiet) {
75
+ console.log(chalk.gray(` ↓ ${written} runtime files (logs/, tmp/)`));
76
+ }
77
+ return written;
78
+ }
79
+
25
80
  export async function sync(options = {}) {
26
81
  const config = loadConfig();
27
82
  if (!config) {
@@ -88,6 +143,9 @@ export async function sync(options = {}) {
88
143
  console.log(chalk.red(` ${err.path}: ${err.error}`));
89
144
  }
90
145
  }
146
+
147
+ // Mirror server-side logs/ and tmp/ into the working copy for debugging.
148
+ await pullRuntime(tenantDir);
91
149
  } catch (err) {
92
150
  spinner.fail(err.message);
93
151
  process.exit(1);
@@ -1,11 +1,12 @@
1
1
  import chokidar from 'chokidar';
2
2
  import chalk from 'chalk';
3
3
  import { loadConfig } from '../config.js';
4
- import { createClient } from '../lib/api.js';
4
+ import { createClient, ensureFreshToken } from '../lib/api.js';
5
5
  import { sync } from './sync.js';
6
+ import { streamLogs } from '../lib/log-stream.js';
6
7
  import { join, relative } from 'path';
7
8
 
8
- export async function watch() {
9
+ export async function watch(options = {}) {
9
10
  const config = loadConfig();
10
11
  if (!config) {
11
12
  console.error(chalk.red('Not logged in. Run `tm login` first.'));
@@ -13,16 +14,30 @@ export async function watch() {
13
14
  }
14
15
 
15
16
  const tenantDir = join(config._configDir, config.tenant_path || '.');
17
+ const showLogs = options.logs !== false; // on by default; disable with --no-logs
16
18
 
17
19
  console.log(chalk.cyan('👀 Watching'), tenantDir);
20
+ if (showLogs) {
21
+ console.log(chalk.gray(' Live logs: on (disable with --no-logs)'));
22
+ }
18
23
  console.log(chalk.gray(' Ctrl+C to stop\n'));
19
24
 
20
25
  let syncTimer = null;
21
26
  const pendingDeletes = [];
22
27
 
28
+ // Watch only .php files, and explicitly ignore the runtime dirs that
29
+ // sync() writes back into the working copy — otherwise pullRuntime would
30
+ // re-trigger sync in an endless loop.
23
31
  const watcher = chokidar.watch(join(tenantDir, '**/*.php'), {
24
32
  ignoreInitial: true,
25
33
  awaitWriteFinish: { stabilityThreshold: 300 },
34
+ ignored: [
35
+ /(^|[\/\\])logs[\/\\]/,
36
+ /(^|[\/\\])tmp[\/\\]/,
37
+ /(^|[\/\\])\.claude[\/\\]/,
38
+ /(^|[\/\\])\.git[\/\\]/,
39
+ /(^|[\/\\])node_modules[\/\\]/,
40
+ ],
26
41
  });
27
42
 
28
43
  watcher.on('add', (path) => {
@@ -68,9 +83,84 @@ export async function watch() {
68
83
  }, 500);
69
84
  }
70
85
 
86
+ // Live log streaming alongside the file watcher. Runs in the background;
87
+ // reconnects automatically if the server-side 5-minute stream window
88
+ // expires or the connection drops.
89
+ let logsAbort = null;
90
+ if (showLogs) {
91
+ runLogStreamForever(config).then((ctl) => { logsAbort = ctl; });
92
+ }
93
+
71
94
  process.on('SIGINT', () => {
72
95
  watcher.close();
96
+ if (logsAbort) logsAbort.abort();
73
97
  console.log(chalk.gray('\nStopped.'));
74
98
  process.exit(0);
75
99
  });
76
100
  }
101
+
102
+ /**
103
+ * Open the log stream and keep it open for as long as the watcher runs.
104
+ * Returns an AbortController so the SIGINT handler can tear it down.
105
+ *
106
+ * On disconnect we wait 1s and reconnect. On auth failure we refresh the
107
+ * token via ensureFreshToken() and retry. Errors are printed but never
108
+ * kill the watcher.
109
+ */
110
+ async function runLogStreamForever(initialConfig) {
111
+ const abort = new AbortController();
112
+ let config = initialConfig;
113
+ let firstConnect = true;
114
+
115
+ (async () => {
116
+ while (!abort.signal.aborted) {
117
+ try {
118
+ // Refresh token before each (re)connect — cheap and prevents
119
+ // silent auth drops on long-running sessions.
120
+ config = await ensureFreshToken();
121
+ } catch (err) {
122
+ console.error(chalk.red(` [logs] token refresh failed: ${err.message}`));
123
+ await sleep(5000);
124
+ continue;
125
+ }
126
+
127
+ try {
128
+ await streamLogs({
129
+ config,
130
+ lines: firstConnect ? 20 : 0,
131
+ signal: abort.signal,
132
+ onLine: (line, type) => {
133
+ const prefix = chalk.gray(' │ ');
134
+ switch (type) {
135
+ case 'error': console.log(prefix + chalk.red(line)); break;
136
+ case 'warning': console.log(prefix + chalk.yellow(line)); break;
137
+ case 'success': console.log(prefix + chalk.green(line)); break;
138
+ default: console.log(prefix + chalk.gray(line));
139
+ }
140
+ },
141
+ onInitComplete: () => {
142
+ if (firstConnect) {
143
+ console.log(chalk.gray(' │ --- live logs ---'));
144
+ firstConnect = false;
145
+ }
146
+ },
147
+ });
148
+ } catch (err) {
149
+ if (abort.signal.aborted) return;
150
+ // Server timeouts after 5 min are expected; don't spam.
151
+ if (!/HTTP 5|fetch failed|terminated|ECONN/i.test(err.message)) {
152
+ console.error(chalk.yellow(` [logs] ${err.message}`));
153
+ }
154
+ }
155
+
156
+ if (abort.signal.aborted) return;
157
+ await sleep(1000);
158
+ }
159
+ })();
160
+
161
+ return abort;
162
+ }
163
+
164
+ function sleep(ms) {
165
+ return new Promise((r) => setTimeout(r, ms));
166
+ }
package/src/lib/api.js CHANGED
@@ -99,3 +99,50 @@ export function createClient() {
99
99
  }
100
100
  return new ApiClient(config);
101
101
  }
102
+
103
+ /**
104
+ * Returns the current access token, refreshing it first if it appears to be
105
+ * close to expiry. Used by SSE commands (tm logs, tm monitor) which build
106
+ * their own EventSource and cannot use the ApiClient retry-on-401 path.
107
+ *
108
+ * Returns the (possibly refreshed) config object, with .token guaranteed
109
+ * fresh enough for at least the next few minutes.
110
+ */
111
+ export async function ensureFreshToken() {
112
+ const config = loadConfig();
113
+ if (!config) {
114
+ throw new Error('Not logged in. Run `tm login` first.');
115
+ }
116
+
117
+ // JWT exp lives in the payload. Decode without verifying — we just want
118
+ // to know roughly when it expires so we can refresh proactively.
119
+ const exp = decodeJwtExp(config.token);
120
+ const now = Math.floor(Date.now() / 1000);
121
+ const refreshThreshold = 60; // refresh if less than 60 s remain
122
+
123
+ if (exp === null || exp - now > refreshThreshold) {
124
+ return config;
125
+ }
126
+
127
+ // Try a refresh.
128
+ const client = new ApiClient(config);
129
+ const ok = await client._refresh();
130
+ if (!ok) {
131
+ throw new Error('Token expired and refresh failed. Run `tm login` again.');
132
+ }
133
+ return loadConfig();
134
+ }
135
+
136
+ function decodeJwtExp(token) {
137
+ if (!token) return null;
138
+ try {
139
+ const parts = token.split('.');
140
+ if (parts.length !== 3) return null;
141
+ // base64url → base64
142
+ const payload = parts[1].replace(/-/g, '+').replace(/_/g, '/');
143
+ const decoded = JSON.parse(Buffer.from(payload, 'base64').toString('utf8'));
144
+ return typeof decoded.exp === 'number' ? decoded.exp : null;
145
+ } catch {
146
+ return null;
147
+ }
148
+ }
@@ -0,0 +1,81 @@
1
+ /**
2
+ * Minimal SSE reader for /api/logs/stream.
3
+ *
4
+ * Why not the `eventsource` package: v2 of that package breaks against
5
+ * HTTP/2 origins (our Apache speaks h2), and v3 has a new API anyway. A
6
+ * hand-rolled parser is ~40 lines and avoids the dependency entirely.
7
+ *
8
+ * Protocol we actually receive:
9
+ * data: {"line":"...","type":"info"}\n\n
10
+ * event: init_complete\ndata: {}\n\n
11
+ * : heartbeat\n\n
12
+ *
13
+ * We only care about unnamed data events and the init_complete marker.
14
+ */
15
+ export async function streamLogs({ config, lines = 50, signal, onLine, onInitComplete }) {
16
+ const url = `${config.server}/api/logs/stream?tenant=${encodeURIComponent(config.tenant)}&lines=${lines}`;
17
+
18
+ const res = await fetch(url, {
19
+ headers: {
20
+ Authorization: `Bearer ${config.token}`,
21
+ Accept: 'text/event-stream',
22
+ },
23
+ signal,
24
+ });
25
+
26
+ if (!res.ok) {
27
+ throw new Error(`HTTP ${res.status} ${res.statusText}`);
28
+ }
29
+ if (!res.body) {
30
+ throw new Error('Response has no body (fetch streaming unsupported?)');
31
+ }
32
+
33
+ const reader = res.body.getReader();
34
+ const decoder = new TextDecoder('utf-8');
35
+ let buffer = '';
36
+
37
+ while (true) {
38
+ const { value, done } = await reader.read();
39
+ if (done) return;
40
+ buffer += decoder.decode(value, { stream: true });
41
+
42
+ // SSE events are separated by a blank line (\n\n). Split off complete
43
+ // events and leave the partial tail in the buffer.
44
+ let sep;
45
+ while ((sep = buffer.indexOf('\n\n')) !== -1) {
46
+ const raw = buffer.slice(0, sep);
47
+ buffer = buffer.slice(sep + 2);
48
+ handleEvent(raw, { onLine, onInitComplete });
49
+ }
50
+ }
51
+ }
52
+
53
+ function handleEvent(raw, { onLine, onInitComplete }) {
54
+ // Comments (heartbeats) start with ':'
55
+ if (raw.startsWith(':')) return;
56
+
57
+ let event = 'message';
58
+ const dataLines = [];
59
+ for (const line of raw.split('\n')) {
60
+ if (line.startsWith('event:')) {
61
+ event = line.slice(6).trim();
62
+ } else if (line.startsWith('data:')) {
63
+ dataLines.push(line.slice(5).replace(/^ /, ''));
64
+ }
65
+ }
66
+
67
+ if (event === 'init_complete') {
68
+ onInitComplete?.();
69
+ return;
70
+ }
71
+
72
+ if (dataLines.length === 0) return;
73
+ const data = dataLines.join('\n');
74
+ try {
75
+ const parsed = JSON.parse(data);
76
+ onLine?.(parsed.line ?? '', parsed.type ?? 'info');
77
+ } catch {
78
+ // Malformed JSON — pass through as info so nothing is lost.
79
+ onLine?.(data, 'info');
80
+ }
81
+ }