@kapeta/local-cluster-service 0.76.0 → 0.76.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +15 -0
- package/dist/cjs/src/instanceManager.js +12 -0
- package/dist/cjs/src/instances/routes.js +4 -1
- package/dist/cjs/src/serviceManager.js +7 -2
- package/dist/cjs/src/storm/PageGenerator.js +3 -3
- package/dist/cjs/src/storm/routes.js +6 -4
- package/dist/esm/src/instanceManager.js +12 -0
- package/dist/esm/src/instances/routes.js +4 -1
- package/dist/esm/src/serviceManager.js +7 -2
- package/dist/esm/src/storm/PageGenerator.js +3 -3
- package/dist/esm/src/storm/routes.js +6 -4
- package/package.json +1 -1
- package/src/instanceManager.ts +16 -0
- package/src/instances/routes.ts +4 -1
- package/src/serviceManager.ts +6 -2
- package/src/storm/PageGenerator.ts +3 -3
- package/src/storm/routes.ts +6 -4
package/CHANGELOG.md
CHANGED
@@ -1,3 +1,18 @@
|
|
1
|
+
## [0.76.2](https://github.com/kapetacom/local-cluster-service/compare/v0.76.1...v0.76.2) (2024-09-30)
|
2
|
+
|
3
|
+
|
4
|
+
### Bug Fixes
|
5
|
+
|
6
|
+
* gracefully handle missing docker ([973c2d3](https://github.com/kapetacom/local-cluster-service/commit/973c2d383b0315e7fd6aa376b248417f76bd8f62))
|
7
|
+
* port conflict cant prevent server start ([0aac354](https://github.com/kapetacom/local-cluster-service/commit/0aac354742c3ccadd9da769a8802bea5b38df736))
|
8
|
+
|
9
|
+
## [0.76.1](https://github.com/kapetacom/local-cluster-service/compare/v0.76.0...v0.76.1) (2024-09-30)
|
10
|
+
|
11
|
+
|
12
|
+
### Bug Fixes
|
13
|
+
|
14
|
+
* more handle logging ([80e8a4c](https://github.com/kapetacom/local-cluster-service/commit/80e8a4c50df9b40226e920ac28f8a7e843c72d73))
|
15
|
+
|
1
16
|
# [0.76.0](https://github.com/kapetacom/local-cluster-service/compare/v0.75.0...v0.76.0) (2024-09-30)
|
2
17
|
|
3
18
|
|
@@ -427,6 +427,10 @@ class InstanceManager {
|
|
427
427
|
if (!blockAsset) {
|
428
428
|
throw new Error('Block not found: ' + blockRef);
|
429
429
|
}
|
430
|
+
const isAlive = containerManager_1.containerManager.isAlive();
|
431
|
+
if (!isAlive) {
|
432
|
+
throw new Error('Docker is not running or is not responding');
|
433
|
+
}
|
430
434
|
if (checkForSingleton && (await this.isSingletonOperator(blockAsset))) {
|
431
435
|
const instances = await this.getAllInstancesForKind(systemId, blockAsset.data.kind);
|
432
436
|
if (instances.length > 1) {
|
@@ -584,6 +588,10 @@ class InstanceManager {
|
|
584
588
|
//console.log('\n## Checking instances:');
|
585
589
|
let changed = false;
|
586
590
|
const all = [...this._instances];
|
591
|
+
if (!containerManager_1.containerManager.isAlive()) {
|
592
|
+
// No need to check anything if docker is not running
|
593
|
+
return;
|
594
|
+
}
|
587
595
|
while (all.length > 0) {
|
588
596
|
// Check a few instances at a time - docker doesn't like too many concurrent requests
|
589
597
|
const chunk = all.splice(0, 30);
|
@@ -712,6 +720,10 @@ class InstanceManager {
|
|
712
720
|
}
|
713
721
|
async getExternalStatus(instance) {
|
714
722
|
if (instance.type === types_1.InstanceType.DOCKER) {
|
723
|
+
if (!containerManager_1.containerManager.isAlive()) {
|
724
|
+
// Consider making this "unknown"
|
725
|
+
return types_1.InstanceStatus.STOPPED;
|
726
|
+
}
|
715
727
|
const containerName = await (0, utils_1.getBlockInstanceContainerName)(instance.systemId, instance.instanceId);
|
716
728
|
const container = await containerManager_1.containerManager.getContainerByName(containerName);
|
717
729
|
if (!container) {
|
@@ -73,13 +73,16 @@ router.post('/:systemId/:instanceId/start', async (req, res) => {
|
|
73
73
|
taskId: result.id,
|
74
74
|
});
|
75
75
|
}
|
76
|
-
else {
|
76
|
+
else if (result) {
|
77
77
|
res.status(202).send({
|
78
78
|
ok: true,
|
79
79
|
pid: result.pid,
|
80
80
|
type: result.type,
|
81
81
|
});
|
82
82
|
}
|
83
|
+
else {
|
84
|
+
res.status(500).send({ ok: false, error: 'Failed to start instance' });
|
85
|
+
}
|
83
86
|
}
|
84
87
|
catch (e) {
|
85
88
|
res.status(500).send({ ok: false, error: e.message });
|
@@ -24,10 +24,15 @@ class ServiceManager {
|
|
24
24
|
if (!this._systems) {
|
25
25
|
this._systems = {};
|
26
26
|
}
|
27
|
-
lodash_1.default.forEach(this._systems, (system) => {
|
27
|
+
lodash_1.default.forEach(this._systems, (system, systemId) => {
|
28
28
|
lodash_1.default.forEach(system, (services) => {
|
29
29
|
lodash_1.default.forEach(services, (portInfo) => {
|
30
|
-
|
30
|
+
try {
|
31
|
+
clusterService_1.clusterService.reservePort(portInfo.port);
|
32
|
+
}
|
33
|
+
catch (e) {
|
34
|
+
console.warn('Failed to reserve port', systemId, portInfo.port, e);
|
35
|
+
}
|
31
36
|
});
|
32
37
|
});
|
33
38
|
});
|
@@ -246,7 +246,7 @@ class PageQueue extends node_events_1.EventEmitter {
|
|
246
246
|
console.warn('Skipping image reference of type %s for url %s', mimeType, prompt.url);
|
247
247
|
return;
|
248
248
|
}
|
249
|
-
const client = new stormClient_1.StormClient(this.systemId);
|
249
|
+
const client = new stormClient_1.StormClient(this.handle, this.systemId);
|
250
250
|
this.images.set(prompt.url, prompt.description);
|
251
251
|
const result = await client.createImage(`Create an image for the url "${prompt.url}" with this description: ${prompt.description}`.trim());
|
252
252
|
let imageEvent = null;
|
@@ -263,7 +263,7 @@ class PageQueue extends node_events_1.EventEmitter {
|
|
263
263
|
this.emit('image', imageEvent, prompt);
|
264
264
|
}
|
265
265
|
async generate(prompt, conversationId) {
|
266
|
-
const client = new stormClient_1.StormClient(
|
266
|
+
const client = new stormClient_1.StormClient(this.handle, this.systemId);
|
267
267
|
const screenStream = await client.createUIPage(prompt, conversationId);
|
268
268
|
let pageEvent = null;
|
269
269
|
screenStream.on('data', (event) => {
|
@@ -281,7 +281,7 @@ class PageQueue extends node_events_1.EventEmitter {
|
|
281
281
|
await this.processPageEventWithReferences(pageEvent);
|
282
282
|
}
|
283
283
|
async resolveReferences(content) {
|
284
|
-
const client = new stormClient_1.StormClient(
|
284
|
+
const client = new stormClient_1.StormClient(this.handle, this.systemId);
|
285
285
|
const referenceStream = await client.classifyUIReferences(content);
|
286
286
|
const references = [];
|
287
287
|
referenceStream.on('data', (referenceData) => {
|
@@ -226,8 +226,9 @@ router.delete('/ui/serve/:systemId', async (req, res) => {
|
|
226
226
|
}
|
227
227
|
res.status(200).json({ status: 'ok' });
|
228
228
|
});
|
229
|
-
router.post('/ui/screen', async (req, res) => {
|
229
|
+
router.post('/:handle/ui/screen', async (req, res) => {
|
230
230
|
try {
|
231
|
+
const handle = req.params.handle;
|
231
232
|
const conversationId = req.headers[stormClient_1.ConversationIdHeader.toLowerCase()];
|
232
233
|
const systemId = req.headers[page_utils_1.SystemIdHeader.toLowerCase()];
|
233
234
|
const aiRequest = JSON.parse(req.stringBody ?? '{}');
|
@@ -236,7 +237,7 @@ router.post('/ui/screen', async (req, res) => {
|
|
236
237
|
res.set('Access-Control-Expose-Headers', stormClient_1.ConversationIdHeader);
|
237
238
|
res.set(stormClient_1.ConversationIdHeader, conversationId);
|
238
239
|
const parentConversationId = systemId ?? '';
|
239
|
-
const queue = new PageGenerator_1.PageQueue(
|
240
|
+
const queue = new PageGenerator_1.PageQueue(handle, parentConversationId, '', 5);
|
240
241
|
onRequestAborted(req, res, () => {
|
241
242
|
queue.cancel();
|
242
243
|
});
|
@@ -539,13 +540,14 @@ router.post('/:handle/ui', async (req, res) => {
|
|
539
540
|
}
|
540
541
|
}
|
541
542
|
});
|
542
|
-
router.post('/ui/edit', async (req, res) => {
|
543
|
+
router.post('/:handle/ui/edit', async (req, res) => {
|
543
544
|
try {
|
545
|
+
const handle = req.params.handle;
|
544
546
|
const systemId = (req.headers[page_utils_1.SystemIdHeader.toLowerCase()] ||
|
545
547
|
req.headers[stormClient_1.ConversationIdHeader.toLowerCase()]);
|
546
548
|
const aiRequest = JSON.parse(req.stringBody ?? '{}');
|
547
549
|
const storagePrefix = systemId ? systemId + '_' : 'mock_';
|
548
|
-
const queue = new PageGenerator_1.PageQueue(
|
550
|
+
const queue = new PageGenerator_1.PageQueue(handle, systemId, '', 5);
|
549
551
|
onRequestAborted(req, res, () => {
|
550
552
|
queue.cancel();
|
551
553
|
});
|
@@ -427,6 +427,10 @@ class InstanceManager {
|
|
427
427
|
if (!blockAsset) {
|
428
428
|
throw new Error('Block not found: ' + blockRef);
|
429
429
|
}
|
430
|
+
const isAlive = containerManager_1.containerManager.isAlive();
|
431
|
+
if (!isAlive) {
|
432
|
+
throw new Error('Docker is not running or is not responding');
|
433
|
+
}
|
430
434
|
if (checkForSingleton && (await this.isSingletonOperator(blockAsset))) {
|
431
435
|
const instances = await this.getAllInstancesForKind(systemId, blockAsset.data.kind);
|
432
436
|
if (instances.length > 1) {
|
@@ -584,6 +588,10 @@ class InstanceManager {
|
|
584
588
|
//console.log('\n## Checking instances:');
|
585
589
|
let changed = false;
|
586
590
|
const all = [...this._instances];
|
591
|
+
if (!containerManager_1.containerManager.isAlive()) {
|
592
|
+
// No need to check anything if docker is not running
|
593
|
+
return;
|
594
|
+
}
|
587
595
|
while (all.length > 0) {
|
588
596
|
// Check a few instances at a time - docker doesn't like too many concurrent requests
|
589
597
|
const chunk = all.splice(0, 30);
|
@@ -712,6 +720,10 @@ class InstanceManager {
|
|
712
720
|
}
|
713
721
|
async getExternalStatus(instance) {
|
714
722
|
if (instance.type === types_1.InstanceType.DOCKER) {
|
723
|
+
if (!containerManager_1.containerManager.isAlive()) {
|
724
|
+
// Consider making this "unknown"
|
725
|
+
return types_1.InstanceStatus.STOPPED;
|
726
|
+
}
|
715
727
|
const containerName = await (0, utils_1.getBlockInstanceContainerName)(instance.systemId, instance.instanceId);
|
716
728
|
const container = await containerManager_1.containerManager.getContainerByName(containerName);
|
717
729
|
if (!container) {
|
@@ -73,13 +73,16 @@ router.post('/:systemId/:instanceId/start', async (req, res) => {
|
|
73
73
|
taskId: result.id,
|
74
74
|
});
|
75
75
|
}
|
76
|
-
else {
|
76
|
+
else if (result) {
|
77
77
|
res.status(202).send({
|
78
78
|
ok: true,
|
79
79
|
pid: result.pid,
|
80
80
|
type: result.type,
|
81
81
|
});
|
82
82
|
}
|
83
|
+
else {
|
84
|
+
res.status(500).send({ ok: false, error: 'Failed to start instance' });
|
85
|
+
}
|
83
86
|
}
|
84
87
|
catch (e) {
|
85
88
|
res.status(500).send({ ok: false, error: e.message });
|
@@ -24,10 +24,15 @@ class ServiceManager {
|
|
24
24
|
if (!this._systems) {
|
25
25
|
this._systems = {};
|
26
26
|
}
|
27
|
-
lodash_1.default.forEach(this._systems, (system) => {
|
27
|
+
lodash_1.default.forEach(this._systems, (system, systemId) => {
|
28
28
|
lodash_1.default.forEach(system, (services) => {
|
29
29
|
lodash_1.default.forEach(services, (portInfo) => {
|
30
|
-
|
30
|
+
try {
|
31
|
+
clusterService_1.clusterService.reservePort(portInfo.port);
|
32
|
+
}
|
33
|
+
catch (e) {
|
34
|
+
console.warn('Failed to reserve port', systemId, portInfo.port, e);
|
35
|
+
}
|
31
36
|
});
|
32
37
|
});
|
33
38
|
});
|
@@ -246,7 +246,7 @@ class PageQueue extends node_events_1.EventEmitter {
|
|
246
246
|
console.warn('Skipping image reference of type %s for url %s', mimeType, prompt.url);
|
247
247
|
return;
|
248
248
|
}
|
249
|
-
const client = new stormClient_1.StormClient(this.systemId);
|
249
|
+
const client = new stormClient_1.StormClient(this.handle, this.systemId);
|
250
250
|
this.images.set(prompt.url, prompt.description);
|
251
251
|
const result = await client.createImage(`Create an image for the url "${prompt.url}" with this description: ${prompt.description}`.trim());
|
252
252
|
let imageEvent = null;
|
@@ -263,7 +263,7 @@ class PageQueue extends node_events_1.EventEmitter {
|
|
263
263
|
this.emit('image', imageEvent, prompt);
|
264
264
|
}
|
265
265
|
async generate(prompt, conversationId) {
|
266
|
-
const client = new stormClient_1.StormClient(
|
266
|
+
const client = new stormClient_1.StormClient(this.handle, this.systemId);
|
267
267
|
const screenStream = await client.createUIPage(prompt, conversationId);
|
268
268
|
let pageEvent = null;
|
269
269
|
screenStream.on('data', (event) => {
|
@@ -281,7 +281,7 @@ class PageQueue extends node_events_1.EventEmitter {
|
|
281
281
|
await this.processPageEventWithReferences(pageEvent);
|
282
282
|
}
|
283
283
|
async resolveReferences(content) {
|
284
|
-
const client = new stormClient_1.StormClient(
|
284
|
+
const client = new stormClient_1.StormClient(this.handle, this.systemId);
|
285
285
|
const referenceStream = await client.classifyUIReferences(content);
|
286
286
|
const references = [];
|
287
287
|
referenceStream.on('data', (referenceData) => {
|
@@ -226,8 +226,9 @@ router.delete('/ui/serve/:systemId', async (req, res) => {
|
|
226
226
|
}
|
227
227
|
res.status(200).json({ status: 'ok' });
|
228
228
|
});
|
229
|
-
router.post('/ui/screen', async (req, res) => {
|
229
|
+
router.post('/:handle/ui/screen', async (req, res) => {
|
230
230
|
try {
|
231
|
+
const handle = req.params.handle;
|
231
232
|
const conversationId = req.headers[stormClient_1.ConversationIdHeader.toLowerCase()];
|
232
233
|
const systemId = req.headers[page_utils_1.SystemIdHeader.toLowerCase()];
|
233
234
|
const aiRequest = JSON.parse(req.stringBody ?? '{}');
|
@@ -236,7 +237,7 @@ router.post('/ui/screen', async (req, res) => {
|
|
236
237
|
res.set('Access-Control-Expose-Headers', stormClient_1.ConversationIdHeader);
|
237
238
|
res.set(stormClient_1.ConversationIdHeader, conversationId);
|
238
239
|
const parentConversationId = systemId ?? '';
|
239
|
-
const queue = new PageGenerator_1.PageQueue(
|
240
|
+
const queue = new PageGenerator_1.PageQueue(handle, parentConversationId, '', 5);
|
240
241
|
onRequestAborted(req, res, () => {
|
241
242
|
queue.cancel();
|
242
243
|
});
|
@@ -539,13 +540,14 @@ router.post('/:handle/ui', async (req, res) => {
|
|
539
540
|
}
|
540
541
|
}
|
541
542
|
});
|
542
|
-
router.post('/ui/edit', async (req, res) => {
|
543
|
+
router.post('/:handle/ui/edit', async (req, res) => {
|
543
544
|
try {
|
545
|
+
const handle = req.params.handle;
|
544
546
|
const systemId = (req.headers[page_utils_1.SystemIdHeader.toLowerCase()] ||
|
545
547
|
req.headers[stormClient_1.ConversationIdHeader.toLowerCase()]);
|
546
548
|
const aiRequest = JSON.parse(req.stringBody ?? '{}');
|
547
549
|
const storagePrefix = systemId ? systemId + '_' : 'mock_';
|
548
|
-
const queue = new PageGenerator_1.PageQueue(
|
550
|
+
const queue = new PageGenerator_1.PageQueue(handle, systemId, '', 5);
|
549
551
|
onRequestAborted(req, res, () => {
|
550
552
|
queue.cancel();
|
551
553
|
});
|
package/package.json
CHANGED
package/src/instanceManager.ts
CHANGED
@@ -575,6 +575,11 @@ export class InstanceManager {
|
|
575
575
|
throw new Error('Block not found: ' + blockRef);
|
576
576
|
}
|
577
577
|
|
578
|
+
const isAlive = containerManager.isAlive();
|
579
|
+
if (!isAlive) {
|
580
|
+
throw new Error('Docker is not running or is not responding');
|
581
|
+
}
|
582
|
+
|
578
583
|
if (checkForSingleton && (await this.isSingletonOperator(blockAsset))) {
|
579
584
|
const instances = await this.getAllInstancesForKind(systemId, blockAsset.data.kind);
|
580
585
|
if (instances.length > 1) {
|
@@ -767,6 +772,12 @@ export class InstanceManager {
|
|
767
772
|
//console.log('\n## Checking instances:');
|
768
773
|
let changed = false;
|
769
774
|
const all = [...this._instances];
|
775
|
+
|
776
|
+
if (!containerManager.isAlive()) {
|
777
|
+
// No need to check anything if docker is not running
|
778
|
+
return;
|
779
|
+
}
|
780
|
+
|
770
781
|
while (all.length > 0) {
|
771
782
|
// Check a few instances at a time - docker doesn't like too many concurrent requests
|
772
783
|
const chunk = all.splice(0, 30);
|
@@ -932,6 +943,11 @@ export class InstanceManager {
|
|
932
943
|
|
933
944
|
private async getExternalStatus(instance: InstanceInfo): Promise<InstanceStatus> {
|
934
945
|
if (instance.type === InstanceType.DOCKER) {
|
946
|
+
if (!containerManager.isAlive()) {
|
947
|
+
// Consider making this "unknown"
|
948
|
+
return InstanceStatus.STOPPED;
|
949
|
+
}
|
950
|
+
|
935
951
|
const containerName = await getBlockInstanceContainerName(instance.systemId, instance.instanceId);
|
936
952
|
const container = await containerManager.getContainerByName(containerName);
|
937
953
|
if (!container) {
|
package/src/instances/routes.ts
CHANGED
@@ -12,6 +12,7 @@ import { kapetaHeaders, KapetaRequest } from '../middleware/kapeta';
|
|
12
12
|
import { stringBody } from '../middleware/stringBody';
|
13
13
|
import { DesiredInstanceStatus, InstanceInfo, InstanceOwner, InstanceType, KapetaBodyRequest } from '../types';
|
14
14
|
import { Task } from '../taskManager';
|
15
|
+
import { containerManager } from '../containerManager';
|
15
16
|
|
16
17
|
const router = Router();
|
17
18
|
router.use('/', corsHandler);
|
@@ -76,12 +77,14 @@ router.post('/:systemId/:instanceId/start', async (req: Request, res: Response)
|
|
76
77
|
ok: true,
|
77
78
|
taskId: result.id,
|
78
79
|
});
|
79
|
-
} else {
|
80
|
+
} else if (result) {
|
80
81
|
res.status(202).send({
|
81
82
|
ok: true,
|
82
83
|
pid: result.pid,
|
83
84
|
type: result.type,
|
84
85
|
});
|
86
|
+
} else {
|
87
|
+
res.status(500).send({ ok: false, error: 'Failed to start instance' });
|
85
88
|
}
|
86
89
|
} catch (e: any) {
|
87
90
|
res.status(500).send({ ok: false, error: e.message });
|
package/src/serviceManager.ts
CHANGED
@@ -26,10 +26,14 @@ class ServiceManager {
|
|
26
26
|
this._systems = {};
|
27
27
|
}
|
28
28
|
|
29
|
-
_.forEach(this._systems, (system) => {
|
29
|
+
_.forEach(this._systems, (system, systemId) => {
|
30
30
|
_.forEach(system, (services) => {
|
31
31
|
_.forEach(services, (portInfo) => {
|
32
|
-
|
32
|
+
try {
|
33
|
+
clusterService.reservePort(portInfo.port);
|
34
|
+
} catch (e) {
|
35
|
+
console.warn('Failed to reserve port', systemId, portInfo.port, e);
|
36
|
+
}
|
33
37
|
});
|
34
38
|
});
|
35
39
|
});
|
@@ -274,7 +274,7 @@ export class PageQueue extends EventEmitter {
|
|
274
274
|
return;
|
275
275
|
}
|
276
276
|
|
277
|
-
const client = new StormClient(this.systemId);
|
277
|
+
const client = new StormClient(this.handle, this.systemId);
|
278
278
|
this.images.set(prompt.url, prompt.description);
|
279
279
|
const result = await client.createImage(
|
280
280
|
`Create an image for the url "${prompt.url}" with this description: ${prompt.description}`.trim()
|
@@ -297,7 +297,7 @@ export class PageQueue extends EventEmitter {
|
|
297
297
|
}
|
298
298
|
|
299
299
|
public async generate(prompt: UIPagePrompt, conversationId: string) {
|
300
|
-
const client = new StormClient(
|
300
|
+
const client = new StormClient(this.handle, this.systemId);
|
301
301
|
const screenStream = await client.createUIPage(prompt, conversationId);
|
302
302
|
let pageEvent: StormEventPage | null = null;
|
303
303
|
screenStream.on('data', (event: StormEvent) => {
|
@@ -317,7 +317,7 @@ export class PageQueue extends EventEmitter {
|
|
317
317
|
}
|
318
318
|
|
319
319
|
private async resolveReferences(content: string): Promise<ReferenceClassification[]> {
|
320
|
-
const client = new StormClient(
|
320
|
+
const client = new StormClient(this.handle, this.systemId);
|
321
321
|
const referenceStream = await client.classifyUIReferences(content);
|
322
322
|
|
323
323
|
const references: ReferenceClassification[] = [];
|
package/src/storm/routes.ts
CHANGED
@@ -293,8 +293,9 @@ router.delete('/ui/serve/:systemId', async (req: KapetaBodyRequest, res: Respons
|
|
293
293
|
res.status(200).json({ status: 'ok' });
|
294
294
|
});
|
295
295
|
|
296
|
-
router.post('/ui/screen', async (req: KapetaBodyRequest, res: Response) => {
|
296
|
+
router.post('/:handle/ui/screen', async (req: KapetaBodyRequest, res: Response) => {
|
297
297
|
try {
|
298
|
+
const handle = req.params.handle as string;
|
298
299
|
const conversationId = req.headers[ConversationIdHeader.toLowerCase()] as string | undefined;
|
299
300
|
const systemId = req.headers[SystemIdHeader.toLowerCase()] as string | undefined;
|
300
301
|
|
@@ -307,7 +308,7 @@ router.post('/ui/screen', async (req: KapetaBodyRequest, res: Response) => {
|
|
307
308
|
|
308
309
|
const parentConversationId = systemId ?? '';
|
309
310
|
|
310
|
-
const queue = new PageQueue(
|
311
|
+
const queue = new PageQueue(handle, parentConversationId, '', 5);
|
311
312
|
onRequestAborted(req, res, () => {
|
312
313
|
queue.cancel();
|
313
314
|
});
|
@@ -672,15 +673,16 @@ router.post('/:handle/ui', async (req: KapetaBodyRequest, res: Response) => {
|
|
672
673
|
}
|
673
674
|
});
|
674
675
|
|
675
|
-
router.post('/ui/edit', async (req: KapetaBodyRequest, res: Response) => {
|
676
|
+
router.post('/:handle/ui/edit', async (req: KapetaBodyRequest, res: Response) => {
|
676
677
|
try {
|
678
|
+
const handle = req.params.handle as string;
|
677
679
|
const systemId = (req.headers[SystemIdHeader.toLowerCase()] ||
|
678
680
|
req.headers[ConversationIdHeader.toLowerCase()]) as string | undefined;
|
679
681
|
|
680
682
|
const aiRequest: StormContextRequest<UIPageEditRequest> = JSON.parse(req.stringBody ?? '{}');
|
681
683
|
const storagePrefix = systemId ? systemId + '_' : 'mock_';
|
682
684
|
|
683
|
-
const queue = new PageQueue(
|
685
|
+
const queue = new PageQueue(handle, systemId!, '', 5);
|
684
686
|
|
685
687
|
onRequestAborted(req, res, () => {
|
686
688
|
queue.cancel();
|