@kapeta/local-cluster-service 0.71.0 → 0.71.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/CHANGELOG.md CHANGED
@@ -1,3 +1,18 @@
1
+ ## [0.71.2](https://github.com/kapetacom/local-cluster-service/compare/v0.71.1...v0.71.2) (2024-09-18)
2
+
3
+
4
+ ### Bug Fixes
5
+
6
+ * send initial screen errors too ([#253](https://github.com/kapetacom/local-cluster-service/issues/253)) ([386dabb](https://github.com/kapetacom/local-cluster-service/commit/386dabb0acb07384691242ecf4cc3081e0d04a05))
7
+
8
+ ## [0.71.1](https://github.com/kapetacom/local-cluster-service/compare/v0.71.0...v0.71.1) (2024-09-17)
9
+
10
+
11
+ ### Bug Fixes
12
+
13
+ * emit all events except chunks from screen endpoints ([8e67b58](https://github.com/kapetacom/local-cluster-service/commit/8e67b58b6cc70c359556edfc61dd904254f9d3bc))
14
+ * replace fetch retry handler library to avoid fetch failed ([59a4342](https://github.com/kapetacom/local-cluster-service/commit/59a4342203cd8efb706beba254284ce321fe313a))
15
+
1
16
  # [0.71.0](https://github.com/kapetacom/local-cluster-service/compare/v0.70.12...v0.71.0) (2024-09-16)
2
17
 
3
18
 
@@ -268,7 +268,7 @@ class PageQueue extends node_events_1.EventEmitter {
268
268
  });
269
269
  await screenStream.waitForDone();
270
270
  if (!pageEvent) {
271
- throw new Error('No page was generated');
271
+ throw new Error('No page was generated for ' + prompt.name);
272
272
  }
273
273
  await this.processPageEventWithReferences(pageEvent);
274
274
  }
@@ -129,6 +129,12 @@ router.post('/ui/screen', async (req, res) => {
129
129
  console.error('Failed to process page', err);
130
130
  sendError(err, res);
131
131
  });
132
+ queue.on('event', (event) => {
133
+ if (event.type === 'FILE_CHUNK') {
134
+ return;
135
+ }
136
+ sendEvent(res, event);
137
+ });
132
138
  await queue.addPrompt(aiRequest, conversationId, true);
133
139
  await queue.wait();
134
140
  await Promise.allSettled(promises);
@@ -207,8 +213,11 @@ router.post('/:handle/ui/iterative', async (req, res) => {
207
213
  pageQueue.cancel();
208
214
  });
209
215
  pageQueue.on('page', (screenData) => sendPageEvent(landingPagesStream.getConversationId(), screenData, res));
210
- pageQueue.on('event', (screenData) => {
211
- sendEvent(res, screenData);
216
+ pageQueue.on('event', (event) => {
217
+ if (event.type === 'FILE_CHUNK') {
218
+ return;
219
+ }
220
+ sendEvent(res, event);
212
221
  });
213
222
  pageQueue.on('error', (err) => {
214
223
  console.error('Failed to process page', err);
@@ -360,8 +369,11 @@ router.post('/:handle/ui', async (req, res) => {
360
369
  queue.cancel();
361
370
  });
362
371
  queue.on('page', (screenData) => sendPageEvent(outerConversationId, screenData, res));
363
- queue.on('event', (screenData) => {
364
- sendEvent(res, screenData);
372
+ queue.on('event', (event) => {
373
+ if (event.type === 'FILE_CHUNK') {
374
+ return;
375
+ }
376
+ sendEvent(res, event);
365
377
  });
366
378
  queue.on('error', (err) => {
367
379
  console.error('Failed to process page', err);
@@ -382,6 +394,7 @@ router.post('/:handle/ui', async (req, res) => {
382
394
  })
383
395
  .catch((e) => {
384
396
  console.error('Failed to generate page for screen %s', screen.name, e);
397
+ sendError(e, res);
385
398
  });
386
399
  }
387
400
  if (userJourneysStream.isAborted()) {
@@ -417,10 +430,11 @@ router.post('/ui/edit', async (req, res) => {
417
430
  return promise;
418
431
  }
419
432
  });
420
- queue.on('event', (data) => {
421
- if (data.type === 'FILE_START' || data.type === 'FILE_DONE' || data.type === 'FILE_STATE') {
422
- sendEvent(res, data);
433
+ queue.on('event', (event) => {
434
+ if (event.type === 'FILE_CHUNK') {
435
+ return;
423
436
  }
437
+ sendEvent(res, event);
424
438
  });
425
439
  queue.on('error', (err) => {
426
440
  console.error('Failed to process page', err);
@@ -1,3 +1,4 @@
1
+ /// <reference types="node" />
1
2
  import { ConversationItem, ImplementAPIClientsRequest, StormFileImplementationPrompt, StormStream, StormUIImplementationPrompt, StormUIListPrompt } from './stream';
2
3
  import { Page, StormEventPageUrl } from './events';
3
4
  export declare const STORM_ID = "storm";
@@ -65,7 +66,7 @@ declare class StormClient {
65
66
  createUIShells(prompt: UIShellsPrompt, conversationId?: string): Promise<StormStream>;
66
67
  createUILandingPages(prompt: BasePromptRequest, conversationId?: string): Promise<StormStream>;
67
68
  createUIPage(prompt: UIPagePrompt, conversationId?: string, history?: ConversationItem[]): Promise<StormStream>;
68
- voteUIPage(topic: string, conversationId: string, vote: -1 | 0 | 1, mainConversationId?: string): Promise<import("undici").Response>;
69
+ voteUIPage(topic: string, conversationId: string, vote: -1 | 0 | 1, mainConversationId?: string): Promise<Response>;
69
70
  getVoteUIPage(topic: string, conversationId: string, mainConversationId?: string): Promise<{
70
71
  vote: -1 | 0 | 1;
71
72
  }>;
@@ -13,22 +13,8 @@ const utils_1 = require("../utils/utils");
13
13
  const promises_1 = __importDefault(require("node:readline/promises"));
14
14
  const node_stream_1 = require("node:stream");
15
15
  const stream_1 = require("./stream");
16
- const undici_1 = require("undici");
17
- // Will only retry on error codes and GET requests by default
18
- // See https://github.com/nodejs/undici/blob/990df2c7e37cbe5bb44fe2f576dddeaeb5916590/docs/docs/api/RetryAgent.md
19
- const retryAgent = new undici_1.RetryAgent(new undici_1.Agent(), {
20
- methods: [
21
- // Added methods ↓ (not idempotent), but we want to retry on POST:
22
- 'POST',
23
- // defaults below
24
- 'GET',
25
- 'HEAD',
26
- 'OPTIONS',
27
- 'PUT',
28
- 'DELETE',
29
- 'TRACE',
30
- ],
31
- });
16
+ const fetch_retry_1 = __importDefault(require("fetch-retry"));
17
+ const fetchWithRetries = (0, fetch_retry_1.default)(global.fetch, { retries: 5, retryDelay: 10 });
32
18
  exports.STORM_ID = 'storm';
33
19
  exports.ConversationIdHeader = 'Conversation-Id';
34
20
  class StormClient {
@@ -54,7 +40,6 @@ class StormClient {
54
40
  method: method,
55
41
  body: JSON.stringify(body),
56
42
  headers,
57
- dispatcher: retryAgent,
58
43
  };
59
44
  }
60
45
  async send(path, body, method = 'POST') {
@@ -65,7 +50,7 @@ class StormClient {
65
50
  });
66
51
  const abort = new AbortController();
67
52
  options.signal = abort.signal;
68
- const response = await (0, undici_1.fetch)(options.url, options);
53
+ const response = await fetchWithRetries(options.url, options);
69
54
  if (response.status !== 200) {
70
55
  throw new Error(`Got error response from ${options.url}: ${response.status}\nContent: ${await response.text()}`);
71
56
  }
@@ -138,19 +123,19 @@ class StormClient {
138
123
  prompt: JSON.stringify({ topic, vote, mainConversationId }),
139
124
  conversationId,
140
125
  });
141
- return (0, undici_1.fetch)(options.url, options);
126
+ return fetch(options.url, options);
142
127
  }
143
128
  async getVoteUIPage(topic, conversationId, mainConversationId) {
144
129
  const options = await this.createOptions('/v2/ui/get-vote', 'POST', {
145
130
  prompt: JSON.stringify({ topic, mainConversationId }),
146
131
  conversationId,
147
132
  });
148
- const response = await (0, undici_1.fetch)(options.url, options);
133
+ const response = await fetch(options.url, options);
149
134
  return response.json();
150
135
  }
151
136
  async implementAPIClients(prompt) {
152
137
  const u = `${this._baseUrl}/v2/ui/implement-api-clients`;
153
- const response = await (0, undici_1.fetch)(u, {
138
+ const response = await fetch(u, {
154
139
  method: 'POST',
155
140
  body: JSON.stringify({
156
141
  fileName: prompt.fileName,
@@ -162,7 +147,7 @@ class StormClient {
162
147
  }
163
148
  async generatePrompt(pages) {
164
149
  const u = `${this._baseUrl}/v2/ui/prompt`;
165
- const response = await (0, undici_1.fetch)(u, {
150
+ const response = await fetch(u, {
166
151
  method: 'POST',
167
152
  body: JSON.stringify({
168
153
  pages: pages,
@@ -235,7 +220,7 @@ class StormClient {
235
220
  prompt: '',
236
221
  conversationId: conversationId,
237
222
  });
238
- const response = await (0, undici_1.fetch)(options.url, options);
223
+ const response = await fetch(options.url, options);
239
224
  return response.text();
240
225
  }
241
226
  }
@@ -268,7 +268,7 @@ class PageQueue extends node_events_1.EventEmitter {
268
268
  });
269
269
  await screenStream.waitForDone();
270
270
  if (!pageEvent) {
271
- throw new Error('No page was generated');
271
+ throw new Error('No page was generated for ' + prompt.name);
272
272
  }
273
273
  await this.processPageEventWithReferences(pageEvent);
274
274
  }
@@ -129,6 +129,12 @@ router.post('/ui/screen', async (req, res) => {
129
129
  console.error('Failed to process page', err);
130
130
  sendError(err, res);
131
131
  });
132
+ queue.on('event', (event) => {
133
+ if (event.type === 'FILE_CHUNK') {
134
+ return;
135
+ }
136
+ sendEvent(res, event);
137
+ });
132
138
  await queue.addPrompt(aiRequest, conversationId, true);
133
139
  await queue.wait();
134
140
  await Promise.allSettled(promises);
@@ -207,8 +213,11 @@ router.post('/:handle/ui/iterative', async (req, res) => {
207
213
  pageQueue.cancel();
208
214
  });
209
215
  pageQueue.on('page', (screenData) => sendPageEvent(landingPagesStream.getConversationId(), screenData, res));
210
- pageQueue.on('event', (screenData) => {
211
- sendEvent(res, screenData);
216
+ pageQueue.on('event', (event) => {
217
+ if (event.type === 'FILE_CHUNK') {
218
+ return;
219
+ }
220
+ sendEvent(res, event);
212
221
  });
213
222
  pageQueue.on('error', (err) => {
214
223
  console.error('Failed to process page', err);
@@ -360,8 +369,11 @@ router.post('/:handle/ui', async (req, res) => {
360
369
  queue.cancel();
361
370
  });
362
371
  queue.on('page', (screenData) => sendPageEvent(outerConversationId, screenData, res));
363
- queue.on('event', (screenData) => {
364
- sendEvent(res, screenData);
372
+ queue.on('event', (event) => {
373
+ if (event.type === 'FILE_CHUNK') {
374
+ return;
375
+ }
376
+ sendEvent(res, event);
365
377
  });
366
378
  queue.on('error', (err) => {
367
379
  console.error('Failed to process page', err);
@@ -382,6 +394,7 @@ router.post('/:handle/ui', async (req, res) => {
382
394
  })
383
395
  .catch((e) => {
384
396
  console.error('Failed to generate page for screen %s', screen.name, e);
397
+ sendError(e, res);
385
398
  });
386
399
  }
387
400
  if (userJourneysStream.isAborted()) {
@@ -417,10 +430,11 @@ router.post('/ui/edit', async (req, res) => {
417
430
  return promise;
418
431
  }
419
432
  });
420
- queue.on('event', (data) => {
421
- if (data.type === 'FILE_START' || data.type === 'FILE_DONE' || data.type === 'FILE_STATE') {
422
- sendEvent(res, data);
433
+ queue.on('event', (event) => {
434
+ if (event.type === 'FILE_CHUNK') {
435
+ return;
423
436
  }
437
+ sendEvent(res, event);
424
438
  });
425
439
  queue.on('error', (err) => {
426
440
  console.error('Failed to process page', err);
@@ -1,3 +1,4 @@
1
+ /// <reference types="node" />
1
2
  import { ConversationItem, ImplementAPIClientsRequest, StormFileImplementationPrompt, StormStream, StormUIImplementationPrompt, StormUIListPrompt } from './stream';
2
3
  import { Page, StormEventPageUrl } from './events';
3
4
  export declare const STORM_ID = "storm";
@@ -65,7 +66,7 @@ declare class StormClient {
65
66
  createUIShells(prompt: UIShellsPrompt, conversationId?: string): Promise<StormStream>;
66
67
  createUILandingPages(prompt: BasePromptRequest, conversationId?: string): Promise<StormStream>;
67
68
  createUIPage(prompt: UIPagePrompt, conversationId?: string, history?: ConversationItem[]): Promise<StormStream>;
68
- voteUIPage(topic: string, conversationId: string, vote: -1 | 0 | 1, mainConversationId?: string): Promise<import("undici").Response>;
69
+ voteUIPage(topic: string, conversationId: string, vote: -1 | 0 | 1, mainConversationId?: string): Promise<Response>;
69
70
  getVoteUIPage(topic: string, conversationId: string, mainConversationId?: string): Promise<{
70
71
  vote: -1 | 0 | 1;
71
72
  }>;
@@ -13,22 +13,8 @@ const utils_1 = require("../utils/utils");
13
13
  const promises_1 = __importDefault(require("node:readline/promises"));
14
14
  const node_stream_1 = require("node:stream");
15
15
  const stream_1 = require("./stream");
16
- const undici_1 = require("undici");
17
- // Will only retry on error codes and GET requests by default
18
- // See https://github.com/nodejs/undici/blob/990df2c7e37cbe5bb44fe2f576dddeaeb5916590/docs/docs/api/RetryAgent.md
19
- const retryAgent = new undici_1.RetryAgent(new undici_1.Agent(), {
20
- methods: [
21
- // Added methods ↓ (not idempotent), but we want to retry on POST:
22
- 'POST',
23
- // defaults below
24
- 'GET',
25
- 'HEAD',
26
- 'OPTIONS',
27
- 'PUT',
28
- 'DELETE',
29
- 'TRACE',
30
- ],
31
- });
16
+ const fetch_retry_1 = __importDefault(require("fetch-retry"));
17
+ const fetchWithRetries = (0, fetch_retry_1.default)(global.fetch, { retries: 5, retryDelay: 10 });
32
18
  exports.STORM_ID = 'storm';
33
19
  exports.ConversationIdHeader = 'Conversation-Id';
34
20
  class StormClient {
@@ -54,7 +40,6 @@ class StormClient {
54
40
  method: method,
55
41
  body: JSON.stringify(body),
56
42
  headers,
57
- dispatcher: retryAgent,
58
43
  };
59
44
  }
60
45
  async send(path, body, method = 'POST') {
@@ -65,7 +50,7 @@ class StormClient {
65
50
  });
66
51
  const abort = new AbortController();
67
52
  options.signal = abort.signal;
68
- const response = await (0, undici_1.fetch)(options.url, options);
53
+ const response = await fetchWithRetries(options.url, options);
69
54
  if (response.status !== 200) {
70
55
  throw new Error(`Got error response from ${options.url}: ${response.status}\nContent: ${await response.text()}`);
71
56
  }
@@ -138,19 +123,19 @@ class StormClient {
138
123
  prompt: JSON.stringify({ topic, vote, mainConversationId }),
139
124
  conversationId,
140
125
  });
141
- return (0, undici_1.fetch)(options.url, options);
126
+ return fetch(options.url, options);
142
127
  }
143
128
  async getVoteUIPage(topic, conversationId, mainConversationId) {
144
129
  const options = await this.createOptions('/v2/ui/get-vote', 'POST', {
145
130
  prompt: JSON.stringify({ topic, mainConversationId }),
146
131
  conversationId,
147
132
  });
148
- const response = await (0, undici_1.fetch)(options.url, options);
133
+ const response = await fetch(options.url, options);
149
134
  return response.json();
150
135
  }
151
136
  async implementAPIClients(prompt) {
152
137
  const u = `${this._baseUrl}/v2/ui/implement-api-clients`;
153
- const response = await (0, undici_1.fetch)(u, {
138
+ const response = await fetch(u, {
154
139
  method: 'POST',
155
140
  body: JSON.stringify({
156
141
  fileName: prompt.fileName,
@@ -162,7 +147,7 @@ class StormClient {
162
147
  }
163
148
  async generatePrompt(pages) {
164
149
  const u = `${this._baseUrl}/v2/ui/prompt`;
165
- const response = await (0, undici_1.fetch)(u, {
150
+ const response = await fetch(u, {
166
151
  method: 'POST',
167
152
  body: JSON.stringify({
168
153
  pages: pages,
@@ -235,7 +220,7 @@ class StormClient {
235
220
  prompt: '',
236
221
  conversationId: conversationId,
237
222
  });
238
- const response = await (0, undici_1.fetch)(options.url, options);
223
+ const response = await fetch(options.url, options);
239
224
  return response.text();
240
225
  }
241
226
  }
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@kapeta/local-cluster-service",
3
- "version": "0.71.0",
3
+ "version": "0.71.2",
4
4
  "description": "Manages configuration, ports and service discovery for locally running Kapeta systems",
5
5
  "type": "commonjs",
6
6
  "exports": {
@@ -70,6 +70,7 @@
70
70
  "download-git-repo": "^3.0.2",
71
71
  "express": "4.17.1",
72
72
  "express-promise-router": "^4.1.1",
73
+ "fetch-retry": "^6.0.0",
73
74
  "fs-extra": "^11.1.0",
74
75
  "glob": "^7.1.6",
75
76
  "gunzip-maybe": "^1.4.2",
@@ -88,7 +89,6 @@
88
89
  "stream-json": "^1.8.0",
89
90
  "tar-stream": "^3.1.6",
90
91
  "typescript": "^5.1.6",
91
- "undici": "^6.19.8",
92
92
  "uuid": "^9.0.1",
93
93
  "yaml": "^1.6.0"
94
94
  },
@@ -259,7 +259,7 @@ export class PageQueue extends EventEmitter {
259
259
  // Add safeguard to avoid generating images for nonsense URLs
260
260
  // Sometimes we get entries for Base URLs that will then cause issues on the filesystem
261
261
  // Example: https://www.kapeta.com/images/
262
- const mimeType = mimetypes.lookup(prompt.url) as string | false;
262
+ const mimeType = mimetypes.lookup(prompt.url);
263
263
  if (!mimeType || !mimeType.startsWith('image/')) {
264
264
  console.warn('Skipping image reference of type %s for url %s', mimeType, prompt.url);
265
265
  return;
@@ -300,7 +300,7 @@ export class PageQueue extends EventEmitter {
300
300
 
301
301
  await screenStream.waitForDone();
302
302
  if (!pageEvent) {
303
- throw new Error('No page was generated');
303
+ throw new Error('No page was generated for ' + prompt.name);
304
304
  }
305
305
  await this.processPageEventWithReferences(pageEvent);
306
306
  }
@@ -181,6 +181,13 @@ router.post('/ui/screen', async (req: KapetaBodyRequest, res: Response) => {
181
181
  sendError(err as any, res);
182
182
  });
183
183
 
184
+ queue.on('event', (event: StormEvent) => {
185
+ if (event.type === 'FILE_CHUNK') {
186
+ return;
187
+ }
188
+ sendEvent(res, event);
189
+ });
190
+
184
191
  await queue.addPrompt(aiRequest, conversationId, true);
185
192
 
186
193
  await queue.wait();
@@ -274,8 +281,11 @@ router.post('/:handle/ui/iterative', async (req: KapetaBodyRequest, res: Respons
274
281
  sendPageEvent(landingPagesStream.getConversationId(), screenData, res)
275
282
  );
276
283
 
277
- pageQueue.on('event', (screenData: StormEvent) => {
278
- sendEvent(res, screenData);
284
+ pageQueue.on('event', (event: StormEvent) => {
285
+ if (event.type === 'FILE_CHUNK') {
286
+ return;
287
+ }
288
+ sendEvent(res, event);
279
289
  });
280
290
 
281
291
  pageQueue.on('error', (err) => {
@@ -460,8 +470,11 @@ router.post('/:handle/ui', async (req: KapetaBodyRequest, res: Response) => {
460
470
 
461
471
  queue.on('page', (screenData: StormEventPage) => sendPageEvent(outerConversationId, screenData, res));
462
472
 
463
- queue.on('event', (screenData: StormEvent) => {
464
- sendEvent(res, screenData);
473
+ queue.on('event', (event: StormEvent) => {
474
+ if (event.type === 'FILE_CHUNK') {
475
+ return;
476
+ }
477
+ sendEvent(res, event);
465
478
  });
466
479
 
467
480
  queue.on('error', (err) => {
@@ -484,6 +497,7 @@ router.post('/:handle/ui', async (req: KapetaBodyRequest, res: Response) => {
484
497
  })
485
498
  .catch((e) => {
486
499
  console.error('Failed to generate page for screen %s', screen.name, e);
500
+ sendError(e as any, res);
487
501
  });
488
502
  }
489
503
 
@@ -527,10 +541,11 @@ router.post('/ui/edit', async (req: KapetaBodyRequest, res: Response) => {
527
541
  }
528
542
  });
529
543
 
530
- queue.on('event', (data) => {
531
- if (data.type === 'FILE_START' || data.type === 'FILE_DONE' || data.type === 'FILE_STATE') {
532
- sendEvent(res, data);
544
+ queue.on('event', (event) => {
545
+ if (event.type === 'FILE_CHUNK') {
546
+ return;
533
547
  }
548
+ sendEvent(res, event);
534
549
  });
535
550
 
536
551
  queue.on('error', (err) => {
@@ -16,23 +16,8 @@ import {
16
16
  StormUIListPrompt,
17
17
  } from './stream';
18
18
  import { Page, StormEventPageUrl } from './events';
19
- import { fetch, RequestInit, Agent, RetryAgent } from 'undici';
20
-
21
- // Will only retry on error codes and GET requests by default
22
- // See https://github.com/nodejs/undici/blob/990df2c7e37cbe5bb44fe2f576dddeaeb5916590/docs/docs/api/RetryAgent.md
23
- const retryAgent = new RetryAgent(new Agent(), {
24
- methods: [
25
- // Added methods ↓ (not idempotent), but we want to retry on POST:
26
- 'POST',
27
- // defaults below
28
- 'GET',
29
- 'HEAD',
30
- 'OPTIONS',
31
- 'PUT',
32
- 'DELETE',
33
- 'TRACE',
34
- ],
35
- });
19
+ import createFetch from 'fetch-retry';
20
+ const fetchWithRetries = createFetch(global.fetch, { retries: 5, retryDelay: 10 });
36
21
 
37
22
  export const STORM_ID = 'storm';
38
23
 
@@ -129,7 +114,6 @@ class StormClient {
129
114
  method: method,
130
115
  body: JSON.stringify(body),
131
116
  headers,
132
- dispatcher: retryAgent,
133
117
  };
134
118
  }
135
119
 
@@ -148,7 +132,7 @@ class StormClient {
148
132
  const abort = new AbortController();
149
133
  options.signal = abort.signal;
150
134
 
151
- const response = await fetch(options.url, options);
135
+ const response = await fetchWithRetries(options.url, options);
152
136
 
153
137
  if (response.status !== 200) {
154
138
  throw new Error(