@lota-sdk/core 0.1.18 → 0.1.20
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/infrastructure/schema/09_queue_job.surql +38 -0
- package/infrastructure/schema/10_autonomous_job.surql +44 -0
- package/package.json +2 -2
- package/src/ai-gateway/ai-gateway.ts +130 -21
- package/src/ai-gateway/cache-headers.ts +26 -1
- package/src/create-runtime.ts +10 -1
- package/src/db/base.service.ts +6 -1
- package/src/db/tables.ts +4 -0
- package/src/queues/autonomous-job.queue.ts +134 -0
- package/src/queues/document-processor.queue.ts +13 -2
- package/src/queues/index.ts +1 -0
- package/src/queues/memory-consolidation.queue.ts +22 -3
- package/src/queues/queue-factory.ts +33 -4
- package/src/runtime/chat-run-registry.ts +4 -0
- package/src/runtime/context-compaction.ts +100 -12
- package/src/runtime/memory-prompts-fact.ts +3 -1
- package/src/runtime/runtime-config.ts +1 -1
- package/src/runtime/runtime-worker-registry.ts +3 -0
- package/src/services/autonomous-job.service.ts +692 -0
- package/src/services/index.ts +2 -0
- package/src/services/plan-deadline.service.ts +6 -4
- package/src/services/queue-job.service.ts +356 -0
- package/src/services/workstream-message.service.ts +25 -14
- package/src/services/workstream-title.service.ts +1 -1
- package/src/services/workstream-turn-preparation.service.ts +22 -6
- package/src/services/workstream-turn.ts +11 -3
- package/src/services/workstream.service.ts +19 -2
- package/src/system-agents/context-compaction.agent.ts +2 -2
- package/src/system-agents/delegated-agent-factory.ts +2 -9
- package/src/system-agents/memory-reranker.agent.ts +2 -2
- package/src/system-agents/memory.agent.ts +2 -2
- package/src/system-agents/recent-activity-title-refiner.agent.ts +2 -2
- package/src/system-agents/regular-chat-memory-digest.agent.ts +2 -2
- package/src/system-agents/skill-extractor.agent.ts +2 -2
- package/src/system-agents/skill-manager.agent.ts +2 -2
- package/src/system-agents/title-generator.agent.ts +2 -2
- package/src/tools/research-topic.tool.ts +2 -2
- package/src/utils/date-time.ts +11 -0
- package/src/workers/utils/file-section-chunker.ts +1 -1
- package/src/workers/worker-utils.ts +35 -7
|
@@ -0,0 +1,38 @@
|
|
|
1
|
+
DEFINE TABLE IF NOT EXISTS queueJob SCHEMAFULL;
|
|
2
|
+
DEFINE FIELD IF NOT EXISTS queueName ON TABLE queueJob TYPE string;
|
|
3
|
+
DEFINE FIELD IF NOT EXISTS jobName ON TABLE queueJob TYPE string;
|
|
4
|
+
DEFINE FIELD IF NOT EXISTS bullmqJobId ON TABLE queueJob TYPE string;
|
|
5
|
+
DEFINE FIELD IF NOT EXISTS status ON TABLE queueJob TYPE string;
|
|
6
|
+
DEFINE FIELD IF NOT EXISTS data ON TABLE queueJob TYPE option<object> FLEXIBLE;
|
|
7
|
+
DEFINE FIELD IF NOT EXISTS options ON TABLE queueJob TYPE option<object> FLEXIBLE;
|
|
8
|
+
DEFINE FIELD IF NOT EXISTS context ON TABLE queueJob TYPE option<object> FLEXIBLE;
|
|
9
|
+
DEFINE FIELD IF NOT EXISTS deduplicationId ON TABLE queueJob TYPE option<string>;
|
|
10
|
+
DEFINE FIELD IF NOT EXISTS schedulerId ON TABLE queueJob TYPE option<string>;
|
|
11
|
+
DEFINE FIELD IF NOT EXISTS maxAttempts ON TABLE queueJob TYPE option<int>;
|
|
12
|
+
DEFINE FIELD IF NOT EXISTS attemptCount ON TABLE queueJob TYPE int DEFAULT 0;
|
|
13
|
+
DEFINE FIELD IF NOT EXISTS result ON TABLE queueJob TYPE option<object> FLEXIBLE;
|
|
14
|
+
DEFINE FIELD IF NOT EXISTS lastError ON TABLE queueJob TYPE option<object> FLEXIBLE;
|
|
15
|
+
DEFINE FIELD IF NOT EXISTS queuedAt ON TABLE queueJob TYPE datetime;
|
|
16
|
+
DEFINE FIELD IF NOT EXISTS startedAt ON TABLE queueJob TYPE option<datetime>;
|
|
17
|
+
DEFINE FIELD IF NOT EXISTS completedAt ON TABLE queueJob TYPE option<datetime>;
|
|
18
|
+
DEFINE FIELD IF NOT EXISTS failedAt ON TABLE queueJob TYPE option<datetime>;
|
|
19
|
+
DEFINE FIELD IF NOT EXISTS createdAt ON TABLE queueJob TYPE datetime DEFAULT time::now() READONLY;
|
|
20
|
+
DEFINE FIELD IF NOT EXISTS updatedAt ON TABLE queueJob TYPE datetime VALUE time::now();
|
|
21
|
+
|
|
22
|
+
DEFINE INDEX IF NOT EXISTS queueJobQueueStatusIdx ON TABLE queueJob COLUMNS queueName, status;
|
|
23
|
+
DEFINE INDEX IF NOT EXISTS queueJobQueueBullmqIdx ON TABLE queueJob COLUMNS queueName, bullmqJobId UNIQUE;
|
|
24
|
+
|
|
25
|
+
DEFINE TABLE IF NOT EXISTS queueJobAttempt SCHEMAFULL;
|
|
26
|
+
DEFINE FIELD IF NOT EXISTS queueJobId ON TABLE queueJobAttempt TYPE record<queueJob> REFERENCE ON DELETE CASCADE;
|
|
27
|
+
DEFINE FIELD IF NOT EXISTS attemptNumber ON TABLE queueJobAttempt TYPE int;
|
|
28
|
+
DEFINE FIELD IF NOT EXISTS status ON TABLE queueJobAttempt TYPE string;
|
|
29
|
+
DEFINE FIELD IF NOT EXISTS result ON TABLE queueJobAttempt TYPE option<object> FLEXIBLE;
|
|
30
|
+
DEFINE FIELD IF NOT EXISTS error ON TABLE queueJobAttempt TYPE option<object> FLEXIBLE;
|
|
31
|
+
DEFINE FIELD IF NOT EXISTS startedAt ON TABLE queueJobAttempt TYPE datetime;
|
|
32
|
+
DEFINE FIELD IF NOT EXISTS completedAt ON TABLE queueJobAttempt TYPE option<datetime>;
|
|
33
|
+
DEFINE FIELD IF NOT EXISTS durationMs ON TABLE queueJobAttempt TYPE option<int>;
|
|
34
|
+
DEFINE FIELD IF NOT EXISTS createdAt ON TABLE queueJobAttempt TYPE datetime DEFAULT time::now() READONLY;
|
|
35
|
+
DEFINE FIELD IF NOT EXISTS updatedAt ON TABLE queueJobAttempt TYPE datetime VALUE time::now();
|
|
36
|
+
|
|
37
|
+
DEFINE INDEX IF NOT EXISTS queueJobAttemptQueueIdx ON TABLE queueJobAttempt COLUMNS queueJobId;
|
|
38
|
+
DEFINE INDEX IF NOT EXISTS queueJobAttemptQueueNumberIdx ON TABLE queueJobAttempt COLUMNS queueJobId, attemptNumber UNIQUE;
|
|
@@ -0,0 +1,44 @@
|
|
|
1
|
+
DEFINE TABLE IF NOT EXISTS autonomousJob SCHEMAFULL;
|
|
2
|
+
DEFINE FIELD IF NOT EXISTS organizationId ON TABLE autonomousJob TYPE record<organization>;
|
|
3
|
+
DEFINE FIELD IF NOT EXISTS ownerUserId ON TABLE autonomousJob TYPE record<user>;
|
|
4
|
+
DEFINE FIELD IF NOT EXISTS ownerUserName ON TABLE autonomousJob TYPE option<string>;
|
|
5
|
+
DEFINE FIELD IF NOT EXISTS workstreamId ON TABLE autonomousJob TYPE record<workstream> REFERENCE ON DELETE CASCADE;
|
|
6
|
+
DEFINE FIELD IF NOT EXISTS agentId ON TABLE autonomousJob TYPE string;
|
|
7
|
+
DEFINE FIELD IF NOT EXISTS title ON TABLE autonomousJob TYPE string;
|
|
8
|
+
DEFINE FIELD IF NOT EXISTS prompt ON TABLE autonomousJob TYPE string;
|
|
9
|
+
DEFINE FIELD IF NOT EXISTS schedule ON TABLE autonomousJob TYPE object FLEXIBLE;
|
|
10
|
+
DEFINE FIELD IF NOT EXISTS status ON TABLE autonomousJob TYPE string;
|
|
11
|
+
DEFINE FIELD IF NOT EXISTS autoPauseThreshold ON TABLE autonomousJob TYPE int DEFAULT 3;
|
|
12
|
+
DEFINE FIELD IF NOT EXISTS consecutiveErrorCount ON TABLE autonomousJob TYPE int DEFAULT 0;
|
|
13
|
+
DEFINE FIELD IF NOT EXISTS lastRunStatus ON TABLE autonomousJob TYPE option<string>;
|
|
14
|
+
DEFINE FIELD IF NOT EXISTS lastRunAt ON TABLE autonomousJob TYPE option<datetime>;
|
|
15
|
+
DEFINE FIELD IF NOT EXISTS nextRunAt ON TABLE autonomousJob TYPE option<datetime>;
|
|
16
|
+
DEFINE FIELD IF NOT EXISTS linkedPlanSpecId ON TABLE autonomousJob TYPE option<record<planSpec>>;
|
|
17
|
+
DEFINE FIELD IF NOT EXISTS linkedPlanRunId ON TABLE autonomousJob TYPE option<record<planRun>>;
|
|
18
|
+
DEFINE FIELD IF NOT EXISTS lastError ON TABLE autonomousJob TYPE option<object> FLEXIBLE;
|
|
19
|
+
DEFINE FIELD IF NOT EXISTS createdAt ON TABLE autonomousJob TYPE datetime DEFAULT time::now() READONLY;
|
|
20
|
+
DEFINE FIELD IF NOT EXISTS updatedAt ON TABLE autonomousJob TYPE datetime VALUE time::now();
|
|
21
|
+
|
|
22
|
+
DEFINE INDEX IF NOT EXISTS autonomousJobOrgStatusIdx ON TABLE autonomousJob COLUMNS organizationId, status;
|
|
23
|
+
DEFINE INDEX IF NOT EXISTS autonomousJobWorkstreamIdx ON TABLE autonomousJob COLUMNS workstreamId;
|
|
24
|
+
DEFINE INDEX IF NOT EXISTS autonomousJobOwnerIdx ON TABLE autonomousJob COLUMNS ownerUserId;
|
|
25
|
+
|
|
26
|
+
DEFINE TABLE IF NOT EXISTS autonomousJobRun SCHEMAFULL;
|
|
27
|
+
DEFINE FIELD IF NOT EXISTS autonomousJobId ON TABLE autonomousJobRun TYPE record<autonomousJob> REFERENCE ON DELETE CASCADE;
|
|
28
|
+
DEFINE FIELD IF NOT EXISTS workstreamId ON TABLE autonomousJobRun TYPE record<workstream> REFERENCE ON DELETE CASCADE;
|
|
29
|
+
DEFINE FIELD IF NOT EXISTS queueJobId ON TABLE autonomousJobRun TYPE option<record<queueJob>>;
|
|
30
|
+
DEFINE FIELD IF NOT EXISTS status ON TABLE autonomousJobRun TYPE string;
|
|
31
|
+
DEFINE FIELD IF NOT EXISTS inputMessageId ON TABLE autonomousJobRun TYPE option<string>;
|
|
32
|
+
DEFINE FIELD IF NOT EXISTS assistantMessageIds ON TABLE autonomousJobRun TYPE array<string> DEFAULT [];
|
|
33
|
+
DEFINE FIELD IF NOT EXISTS summary ON TABLE autonomousJobRun TYPE option<string>;
|
|
34
|
+
DEFINE FIELD IF NOT EXISTS error ON TABLE autonomousJobRun TYPE option<object> FLEXIBLE;
|
|
35
|
+
DEFINE FIELD IF NOT EXISTS linkedPlanSpecId ON TABLE autonomousJobRun TYPE option<record<planSpec>>;
|
|
36
|
+
DEFINE FIELD IF NOT EXISTS linkedPlanRunId ON TABLE autonomousJobRun TYPE option<record<planRun>>;
|
|
37
|
+
DEFINE FIELD IF NOT EXISTS startedAt ON TABLE autonomousJobRun TYPE option<datetime>;
|
|
38
|
+
DEFINE FIELD IF NOT EXISTS completedAt ON TABLE autonomousJobRun TYPE option<datetime>;
|
|
39
|
+
DEFINE FIELD IF NOT EXISTS createdAt ON TABLE autonomousJobRun TYPE datetime DEFAULT time::now() READONLY;
|
|
40
|
+
DEFINE FIELD IF NOT EXISTS updatedAt ON TABLE autonomousJobRun TYPE datetime VALUE time::now();
|
|
41
|
+
|
|
42
|
+
DEFINE INDEX IF NOT EXISTS autonomousJobRunJobIdx ON TABLE autonomousJobRun COLUMNS autonomousJobId;
|
|
43
|
+
DEFINE INDEX IF NOT EXISTS autonomousJobRunQueueJobIdx ON TABLE autonomousJobRun COLUMNS queueJobId;
|
|
44
|
+
DEFINE INDEX IF NOT EXISTS autonomousJobRunStatusIdx ON TABLE autonomousJobRun COLUMNS status;
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@lota-sdk/core",
|
|
3
|
-
"version": "0.1.
|
|
3
|
+
"version": "0.1.20",
|
|
4
4
|
"type": "module",
|
|
5
5
|
"main": "./src/index.ts",
|
|
6
6
|
"types": "./src/index.ts",
|
|
@@ -32,7 +32,7 @@
|
|
|
32
32
|
"@chat-adapter/slack": "^4.23.0",
|
|
33
33
|
"@chat-adapter/state-ioredis": "^4.23.0",
|
|
34
34
|
"@logtape/logtape": "^2.0.5",
|
|
35
|
-
"@lota-sdk/shared": "0.1.
|
|
35
|
+
"@lota-sdk/shared": "0.1.20",
|
|
36
36
|
"@mendable/firecrawl-js": "^4.17.0",
|
|
37
37
|
"@surrealdb/node": "^3.0.3",
|
|
38
38
|
"ai": "^6.0.137",
|
|
@@ -5,6 +5,7 @@ import type { LanguageModelMiddleware } from 'ai'
|
|
|
5
5
|
|
|
6
6
|
import { getRuntimeConfig } from '../runtime/runtime-config'
|
|
7
7
|
import { isRecord, readString } from '../utils/string'
|
|
8
|
+
import { buildAiGatewayCacheHeaders } from './cache-headers'
|
|
8
9
|
|
|
9
10
|
type AiGatewayLanguageModel = Parameters<typeof wrapLanguageModel>[0]['model']
|
|
10
11
|
type AiGatewayExtraParams = Record<string, unknown>
|
|
@@ -17,15 +18,72 @@ type AiGatewayStreamResult = Awaited<ReturnType<WrapStreamOptions['doStream']>>
|
|
|
17
18
|
type AiGatewayGeneratedContent = AiGatewayGenerateResult['content'][number]
|
|
18
19
|
type AiGatewayStreamPart = AiGatewayStreamResult['stream'] extends ReadableStream<infer T> ? T : never
|
|
19
20
|
type AiGatewayConfig = { apiKey: string; baseURL: string }
|
|
21
|
+
type AiGatewayProviderOptions = NonNullable<AiGatewayCallOptions['providerOptions']>
|
|
20
22
|
|
|
21
23
|
const EXPECTED_GATEWAY_KEY_PREFIX = 'sk-bf-'
|
|
22
24
|
const AI_GATEWAY_VIRTUAL_KEY_HEADER = 'x-bf-vk'
|
|
23
25
|
const AI_GATEWAY_EXTRA_PARAMS_HEADER = 'x-bf-passthrough-extra-params'
|
|
24
26
|
const DEFAULT_AI_GATEWAY_URL = 'https://ai-gateway.gobrainy.ai' as const
|
|
27
|
+
const OPENAI_PROMPT_CACHE_RETENTION = '24h' as const
|
|
25
28
|
const OPENROUTER_RESPONSE_HEALING_EXTRA_PARAMS = {
|
|
26
29
|
plugins: [{ id: 'response-healing' }],
|
|
27
30
|
} as const satisfies AiGatewayExtraParams
|
|
28
31
|
|
|
32
|
+
function toAiGatewayCacheKeyPart(value: string): string {
|
|
33
|
+
const normalized = value
|
|
34
|
+
.trim()
|
|
35
|
+
.toLowerCase()
|
|
36
|
+
.replace(/[^a-z0-9:_-]+/g, '-')
|
|
37
|
+
.replace(/-+/g, '-')
|
|
38
|
+
return normalized.replace(/^-+|-+$/g, '') || 'request'
|
|
39
|
+
}
|
|
40
|
+
|
|
41
|
+
function mergeAiGatewayHeaders(
|
|
42
|
+
existingHeaders: AiGatewayCallOptions['headers'] | undefined,
|
|
43
|
+
additionalHeaders: Record<string, string>,
|
|
44
|
+
): Record<string, string> {
|
|
45
|
+
const merged = new Headers(existingHeaders as HeadersInit | undefined)
|
|
46
|
+
for (const [key, value] of Object.entries(additionalHeaders)) {
|
|
47
|
+
if (!merged.has(key)) {
|
|
48
|
+
merged.set(key, value)
|
|
49
|
+
}
|
|
50
|
+
}
|
|
51
|
+
return Object.fromEntries(merged.entries())
|
|
52
|
+
}
|
|
53
|
+
|
|
54
|
+
function parseAiGatewayJsonRequestBody(body: BodyInit | null | undefined): Record<string, unknown> | null {
|
|
55
|
+
if (typeof body !== 'string') return null
|
|
56
|
+
|
|
57
|
+
let parsed: unknown
|
|
58
|
+
try {
|
|
59
|
+
parsed = JSON.parse(body)
|
|
60
|
+
} catch {
|
|
61
|
+
return null
|
|
62
|
+
}
|
|
63
|
+
|
|
64
|
+
return isRecord(parsed) ? parsed : null
|
|
65
|
+
}
|
|
66
|
+
|
|
67
|
+
function isAiGatewayOpenAIModelRequest(body: BodyInit | null | undefined): boolean {
|
|
68
|
+
const parsed = parseAiGatewayJsonRequestBody(body)
|
|
69
|
+
return readString(parsed?.model)?.startsWith('openai/') ?? false
|
|
70
|
+
}
|
|
71
|
+
|
|
72
|
+
function hasAiGatewayPromptCacheRetention(body: BodyInit | null | undefined): boolean {
|
|
73
|
+
const parsed = parseAiGatewayJsonRequestBody(body)
|
|
74
|
+
return readString(parsed?.prompt_cache_retention) !== null
|
|
75
|
+
}
|
|
76
|
+
|
|
77
|
+
function withDefaultAiGatewayCacheHeaders(params: AiGatewayCallOptions, modelId: string): AiGatewayCallOptions {
|
|
78
|
+
return {
|
|
79
|
+
...params,
|
|
80
|
+
headers: mergeAiGatewayHeaders(
|
|
81
|
+
params.headers,
|
|
82
|
+
buildAiGatewayCacheHeaders(`model:${toAiGatewayCacheKeyPart(modelId)}`),
|
|
83
|
+
),
|
|
84
|
+
}
|
|
85
|
+
}
|
|
86
|
+
|
|
29
87
|
function normalizeAiGatewayUrl(value: string): string {
|
|
30
88
|
const trimmed = value.trim()
|
|
31
89
|
if (!trimmed) {
|
|
@@ -37,10 +95,10 @@ function normalizeAiGatewayUrl(value: string): string {
|
|
|
37
95
|
}
|
|
38
96
|
|
|
39
97
|
function readDirectEnvAiGatewayConfig(): AiGatewayConfig {
|
|
40
|
-
const apiKey = (process.env.
|
|
98
|
+
const apiKey = (process.env.AI_GATEWAY_KEY ?? '').trim()
|
|
41
99
|
if (!apiKey) {
|
|
42
100
|
throw new Error(
|
|
43
|
-
'[ai-gateway] Missing AI gateway key. Set
|
|
101
|
+
'[ai-gateway] Missing AI gateway key. Set AI_GATEWAY_KEY, or configure createLotaRuntime({ aiGateway: { key } }).',
|
|
44
102
|
)
|
|
45
103
|
}
|
|
46
104
|
|
|
@@ -234,20 +292,36 @@ function addAiGatewayReasoningRawChunks(
|
|
|
234
292
|
return { ...params, includeRawChunks: true }
|
|
235
293
|
}
|
|
236
294
|
|
|
295
|
+
export function normalizeAiGatewayChatProviderOptions(params: AiGatewayCallOptions): AiGatewayCallOptions {
|
|
296
|
+
const providerOptions = isRecord(params.providerOptions)
|
|
297
|
+
? ({ ...params.providerOptions } as AiGatewayProviderOptions)
|
|
298
|
+
: ({} as AiGatewayProviderOptions)
|
|
299
|
+
const openaiOptions = isRecord(providerOptions.openai)
|
|
300
|
+
? { ...providerOptions.openai }
|
|
301
|
+
: ({} as Record<string, unknown>)
|
|
302
|
+
|
|
303
|
+
if (openaiOptions.systemMessageMode === 'system') {
|
|
304
|
+
return params
|
|
305
|
+
}
|
|
306
|
+
|
|
307
|
+
return {
|
|
308
|
+
...params,
|
|
309
|
+
providerOptions: {
|
|
310
|
+
...providerOptions,
|
|
311
|
+
openai: {
|
|
312
|
+
...openaiOptions,
|
|
313
|
+
...(openaiOptions.systemMessageMode === 'remove' ? {} : { systemMessageMode: 'system' }),
|
|
314
|
+
},
|
|
315
|
+
},
|
|
316
|
+
}
|
|
317
|
+
}
|
|
318
|
+
|
|
237
319
|
export function injectAiGatewayExtraParamsRequestBody(
|
|
238
320
|
body: BodyInit | null | undefined,
|
|
239
321
|
extraParams: AiGatewayExtraParams,
|
|
240
322
|
): BodyInit | null | undefined {
|
|
241
|
-
|
|
242
|
-
|
|
243
|
-
let parsed: unknown
|
|
244
|
-
try {
|
|
245
|
-
parsed = JSON.parse(body)
|
|
246
|
-
} catch {
|
|
247
|
-
return body
|
|
248
|
-
}
|
|
249
|
-
|
|
250
|
-
if (!isRecord(parsed)) return body
|
|
323
|
+
const parsed = parseAiGatewayJsonRequestBody(body)
|
|
324
|
+
if (!parsed) return body
|
|
251
325
|
|
|
252
326
|
const mergedExtraParams = isRecord(parsed.extra_params)
|
|
253
327
|
? { ...parsed.extra_params, ...extraParams }
|
|
@@ -256,11 +330,35 @@ export function injectAiGatewayExtraParamsRequestBody(
|
|
|
256
330
|
return JSON.stringify({ ...parsed, extra_params: mergedExtraParams })
|
|
257
331
|
}
|
|
258
332
|
|
|
259
|
-
function
|
|
260
|
-
|
|
261
|
-
|
|
333
|
+
export function injectAiGatewayOpenAIPromptCacheRetentionRequestBody(
|
|
334
|
+
body: BodyInit | null | undefined,
|
|
335
|
+
): BodyInit | null | undefined {
|
|
336
|
+
const parsed = parseAiGatewayJsonRequestBody(body)
|
|
337
|
+
if (!parsed) return body
|
|
338
|
+
if (!readString(parsed.model)?.startsWith('openai/')) return body
|
|
339
|
+
if (readString(parsed.prompt_cache_retention) !== null) return body
|
|
340
|
+
|
|
341
|
+
return JSON.stringify({ ...parsed, prompt_cache_retention: OPENAI_PROMPT_CACHE_RETENTION })
|
|
342
|
+
}
|
|
262
343
|
|
|
263
|
-
|
|
344
|
+
function createAiGatewayFetch(extraParams?: AiGatewayExtraParams): typeof fetch {
|
|
345
|
+
const fetchWithMutations = (input: RequestInfo | URL, init?: RequestInit | BunFetchRequestInit) => {
|
|
346
|
+
const bodyWithPromptCacheRetention = injectAiGatewayOpenAIPromptCacheRetentionRequestBody(init?.body)
|
|
347
|
+
const body =
|
|
348
|
+
extraParams !== undefined
|
|
349
|
+
? injectAiGatewayExtraParamsRequestBody(bodyWithPromptCacheRetention, extraParams)
|
|
350
|
+
: bodyWithPromptCacheRetention
|
|
351
|
+
|
|
352
|
+
const headers = new Headers(init?.headers as HeadersInit | undefined)
|
|
353
|
+
if (extraParams !== undefined || (isAiGatewayOpenAIModelRequest(body) && hasAiGatewayPromptCacheRetention(body))) {
|
|
354
|
+
// Bifrost only forwards provider-specific extra params when passthrough is enabled.
|
|
355
|
+
headers.set(AI_GATEWAY_EXTRA_PARAMS_HEADER, 'true')
|
|
356
|
+
}
|
|
357
|
+
|
|
358
|
+
return globalThis.fetch(input, { ...init, headers, body })
|
|
359
|
+
}
|
|
360
|
+
|
|
361
|
+
return Object.assign(fetchWithMutations, { preconnect: globalThis.fetch.preconnect.bind(globalThis.fetch) })
|
|
264
362
|
}
|
|
265
363
|
|
|
266
364
|
function createAiGatewayProvider(extraParams?: AiGatewayExtraParams) {
|
|
@@ -274,9 +372,8 @@ function createAiGatewayProvider(extraParams?: AiGatewayExtraParams) {
|
|
|
274
372
|
apiKey,
|
|
275
373
|
headers: {
|
|
276
374
|
[AI_GATEWAY_VIRTUAL_KEY_HEADER]: apiKey,
|
|
277
|
-
...(extraParams ? { [AI_GATEWAY_EXTRA_PARAMS_HEADER]: 'true' } : {}),
|
|
278
375
|
},
|
|
279
|
-
|
|
376
|
+
fetch: createAiGatewayFetch(extraParams),
|
|
280
377
|
})
|
|
281
378
|
}
|
|
282
379
|
|
|
@@ -309,7 +406,8 @@ export function aiGatewayModel(modelId: string) {
|
|
|
309
406
|
model: getAiGatewayProvider()(modelId),
|
|
310
407
|
middleware: {
|
|
311
408
|
specificationVersion: 'v3',
|
|
312
|
-
transformParams: async ({ params, type }) =>
|
|
409
|
+
transformParams: async ({ params, type }) =>
|
|
410
|
+
withDefaultAiGatewayCacheHeaders(addAiGatewayReasoningRawChunks(params, type), modelId),
|
|
313
411
|
wrapStream: async ({ doStream, params }) => {
|
|
314
412
|
const result = await doStream()
|
|
315
413
|
if (!isReasoningEnabled(params)) return result
|
|
@@ -322,7 +420,15 @@ export function aiGatewayModel(modelId: string) {
|
|
|
322
420
|
}
|
|
323
421
|
|
|
324
422
|
export function aiGatewayOpenRouterResponseHealingModel(modelId: string) {
|
|
325
|
-
return withAiGatewayDevTools(
|
|
423
|
+
return withAiGatewayDevTools(
|
|
424
|
+
wrapLanguageModel({
|
|
425
|
+
model: getAiGatewayOpenRouterResponseHealingProvider()(modelId),
|
|
426
|
+
middleware: {
|
|
427
|
+
specificationVersion: 'v3',
|
|
428
|
+
transformParams: async ({ params }) => withDefaultAiGatewayCacheHeaders(params, modelId),
|
|
429
|
+
},
|
|
430
|
+
}),
|
|
431
|
+
)
|
|
326
432
|
}
|
|
327
433
|
|
|
328
434
|
export function aiGatewayChatModel(modelId: string) {
|
|
@@ -331,7 +437,10 @@ export function aiGatewayChatModel(modelId: string) {
|
|
|
331
437
|
model: getAiGatewayProvider().chat(modelId),
|
|
332
438
|
middleware: {
|
|
333
439
|
specificationVersion: 'v3',
|
|
334
|
-
transformParams: async ({ params, type }) =>
|
|
440
|
+
transformParams: async ({ params, type }) =>
|
|
441
|
+
normalizeAiGatewayChatProviderOptions(
|
|
442
|
+
withDefaultAiGatewayCacheHeaders(addAiGatewayReasoningRawChunks(params, type), modelId),
|
|
443
|
+
),
|
|
335
444
|
wrapGenerate: async ({ doGenerate }) => {
|
|
336
445
|
const result = await doGenerate()
|
|
337
446
|
|
|
@@ -1,8 +1,33 @@
|
|
|
1
1
|
const AI_GATEWAY_CACHE_KEY_HEADER = 'x-bf-cache-key'
|
|
2
2
|
const AI_GATEWAY_CACHE_TTL_HEADER = 'x-bf-cache-ttl'
|
|
3
|
+
const AI_GATEWAY_CACHE_THRESHOLD_HEADER = 'x-bf-cache-threshold'
|
|
4
|
+
const AI_GATEWAY_CACHE_TYPE_HEADER = 'x-bf-cache-type'
|
|
3
5
|
|
|
4
|
-
export
|
|
6
|
+
export const AI_GATEWAY_STRICT_SEMANTIC_CACHE_THRESHOLD = 0.975
|
|
7
|
+
|
|
8
|
+
export type AiGatewayCacheType = 'direct' | 'semantic'
|
|
9
|
+
|
|
10
|
+
export function buildAiGatewayCacheHeaders(
|
|
11
|
+
cacheKey: string,
|
|
12
|
+
ttl?: string,
|
|
13
|
+
threshold?: number,
|
|
14
|
+
cacheType?: AiGatewayCacheType,
|
|
15
|
+
): Record<string, string> {
|
|
5
16
|
const headers: Record<string, string> = { [AI_GATEWAY_CACHE_KEY_HEADER]: cacheKey }
|
|
6
17
|
if (ttl) headers[AI_GATEWAY_CACHE_TTL_HEADER] = ttl
|
|
18
|
+
if (typeof threshold === 'number') headers[AI_GATEWAY_CACHE_THRESHOLD_HEADER] = String(threshold)
|
|
19
|
+
if (cacheType) headers[AI_GATEWAY_CACHE_TYPE_HEADER] = cacheType
|
|
7
20
|
return headers
|
|
8
21
|
}
|
|
22
|
+
|
|
23
|
+
export function buildAiGatewayDirectCacheHeaders(cacheKey: string, ttl?: string): Record<string, string> {
|
|
24
|
+
return buildAiGatewayCacheHeaders(cacheKey, ttl, undefined, 'direct')
|
|
25
|
+
}
|
|
26
|
+
|
|
27
|
+
export function buildAiGatewayStrictSemanticCacheHeaders(
|
|
28
|
+
cacheKey: string,
|
|
29
|
+
ttl?: string,
|
|
30
|
+
threshold = AI_GATEWAY_STRICT_SEMANTIC_CACHE_THRESHOLD,
|
|
31
|
+
): Record<string, string> {
|
|
32
|
+
return buildAiGatewayCacheHeaders(cacheKey, ttl, threshold, 'semantic')
|
|
33
|
+
}
|
package/src/create-runtime.ts
CHANGED
|
@@ -29,6 +29,8 @@ import type { LotaRuntimeSocialChat } from './runtime/social-chat'
|
|
|
29
29
|
import { createSocialChatRuntime } from './runtime/social-chat'
|
|
30
30
|
import type { attachmentService } from './services/attachment.service'
|
|
31
31
|
import { attachmentService as attachmentServiceSingleton } from './services/attachment.service'
|
|
32
|
+
import type { autonomousJobService } from './services/autonomous-job.service'
|
|
33
|
+
import { autonomousJobService as autonomousJobServiceSingleton } from './services/autonomous-job.service'
|
|
32
34
|
import { coordinationRegistryService as coordinationRegistryServiceSingleton } from './services/coordination-registry.service'
|
|
33
35
|
import type { documentChunkService } from './services/document-chunk.service'
|
|
34
36
|
import { documentChunkService as documentChunkServiceSingleton } from './services/document-chunk.service'
|
|
@@ -112,6 +114,7 @@ export interface LotaRuntime {
|
|
|
112
114
|
redis: RedisConnectionManager
|
|
113
115
|
closeRedisConnection: () => Promise<void>
|
|
114
116
|
attachmentService: typeof attachmentService
|
|
117
|
+
autonomousJobService: typeof autonomousJobService
|
|
115
118
|
documentChunkService: typeof documentChunkService
|
|
116
119
|
generatedDocumentStorageService: typeof generatedDocumentStorageService
|
|
117
120
|
memoryService: typeof memoryService
|
|
@@ -382,6 +385,7 @@ export async function createLotaRuntime(config: LotaRuntimeConfig): Promise<Lota
|
|
|
382
385
|
redis: redisManager,
|
|
383
386
|
closeRedisConnection: async () => await redisManager.closeConnection(),
|
|
384
387
|
attachmentService: attachmentServiceSingleton,
|
|
388
|
+
autonomousJobService: autonomousJobServiceSingleton,
|
|
385
389
|
documentChunkService: documentChunkServiceSingleton,
|
|
386
390
|
generatedDocumentStorageService: generatedDocumentStorageServiceSingleton,
|
|
387
391
|
memoryService: memoryServiceSingleton,
|
|
@@ -458,8 +462,13 @@ function getBuiltInSchemaFiles(): URL[] {
|
|
|
458
462
|
new URL('../infrastructure/schema/01_memory.surql', import.meta.url),
|
|
459
463
|
new URL('../infrastructure/schema/02_execution_plan.surql', import.meta.url),
|
|
460
464
|
new URL('../infrastructure/schema/03_learned_skill.surql', import.meta.url),
|
|
461
|
-
new URL('../infrastructure/schema/05_recent_activity.surql', import.meta.url),
|
|
462
465
|
new URL('../infrastructure/schema/04_runtime_bootstrap.surql', import.meta.url),
|
|
466
|
+
new URL('../infrastructure/schema/05_recent_activity.surql', import.meta.url),
|
|
467
|
+
new URL('../infrastructure/schema/06_playbook.surql', import.meta.url),
|
|
468
|
+
new URL('../infrastructure/schema/07_institutional_memory.surql', import.meta.url),
|
|
469
|
+
new URL('../infrastructure/schema/08_quality_metrics.surql', import.meta.url),
|
|
470
|
+
new URL('../infrastructure/schema/09_queue_job.surql', import.meta.url),
|
|
471
|
+
new URL('../infrastructure/schema/10_autonomous_job.surql', import.meta.url),
|
|
463
472
|
]
|
|
464
473
|
}
|
|
465
474
|
|
package/src/db/base.service.ts
CHANGED
|
@@ -1,6 +1,7 @@
|
|
|
1
1
|
import type { z } from 'zod'
|
|
2
2
|
|
|
3
3
|
import { NotFoundError } from '../utils/errors'
|
|
4
|
+
import { ensureRecordId } from './record-id'
|
|
4
5
|
import { databaseService as defaultDatabaseService } from './service'
|
|
5
6
|
import type { SurrealDBService } from './service'
|
|
6
7
|
import type { DatabaseTable } from './tables'
|
|
@@ -16,7 +17,11 @@ export abstract class BaseService<T extends z.ZodType> {
|
|
|
16
17
|
}
|
|
17
18
|
|
|
18
19
|
async findById(id: unknown): Promise<z.infer<T> | null> {
|
|
19
|
-
return this.databaseService.findOne(
|
|
20
|
+
return this.databaseService.findOne(
|
|
21
|
+
this.table,
|
|
22
|
+
{ id: ensureRecordId(id as Parameters<typeof ensureRecordId>[0], this.table) },
|
|
23
|
+
this.schema,
|
|
24
|
+
)
|
|
20
25
|
}
|
|
21
26
|
|
|
22
27
|
async getById(id: unknown): Promise<z.infer<T>> {
|
package/src/db/tables.ts
CHANGED
|
@@ -29,6 +29,10 @@ export const TABLES = {
|
|
|
29
29
|
PLAYBOOK_VERSION: 'playbookVersion',
|
|
30
30
|
INSTITUTIONAL_MEMORY: 'institutionalMemory',
|
|
31
31
|
QUALITY_METRIC: 'qualityMetric',
|
|
32
|
+
QUEUE_JOB: 'queueJob',
|
|
33
|
+
QUEUE_JOB_ATTEMPT: 'queueJobAttempt',
|
|
34
|
+
AUTONOMOUS_JOB: 'autonomousJob',
|
|
35
|
+
AUTONOMOUS_JOB_RUN: 'autonomousJobRun',
|
|
32
36
|
} as const
|
|
33
37
|
|
|
34
38
|
export type DatabaseTable = (typeof TABLES)[keyof typeof TABLES] | (string & {})
|
|
@@ -0,0 +1,134 @@
|
|
|
1
|
+
import type { AutonomousJobSchedule } from '@lota-sdk/shared'
|
|
2
|
+
import type { Job } from 'bullmq'
|
|
3
|
+
|
|
4
|
+
import { serverLogger } from '../config/logger'
|
|
5
|
+
import { databaseService } from '../db/service'
|
|
6
|
+
import { autonomousJobService } from '../services/autonomous-job.service'
|
|
7
|
+
import { queueJobService } from '../services/queue-job.service'
|
|
8
|
+
import type { WorkerHandle } from '../workers/worker-utils'
|
|
9
|
+
import { DEFAULT_JOB_RETENTION } from '../workers/worker-utils'
|
|
10
|
+
import { createQueueFactory } from './queue-factory'
|
|
11
|
+
|
|
12
|
+
export interface AutonomousJobQueuePayload {
|
|
13
|
+
autonomousJobId: string
|
|
14
|
+
autonomousJobRunId?: string
|
|
15
|
+
trigger: 'scheduled' | 'manual'
|
|
16
|
+
}
|
|
17
|
+
|
|
18
|
+
export const AUTONOMOUS_JOB_QUEUE = 'autonomous-job'
|
|
19
|
+
|
|
20
|
+
const DEFAULT_AUTONOMOUS_JOB_OPTIONS = {
|
|
21
|
+
...DEFAULT_JOB_RETENTION,
|
|
22
|
+
attempts: 3,
|
|
23
|
+
backoff: { type: 'exponential', delay: 5_000 },
|
|
24
|
+
} as const
|
|
25
|
+
|
|
26
|
+
async function processAutonomousJob(
|
|
27
|
+
job: Job<AutonomousJobQueuePayload>,
|
|
28
|
+
): Promise<{ status: string; summary?: string }> {
|
|
29
|
+
await databaseService.connect()
|
|
30
|
+
return autonomousJobService.executeQueuedRun(job)
|
|
31
|
+
}
|
|
32
|
+
|
|
33
|
+
const autonomousJobQueue = createQueueFactory<AutonomousJobQueuePayload>({
|
|
34
|
+
name: AUTONOMOUS_JOB_QUEUE,
|
|
35
|
+
displayName: 'Autonomous job',
|
|
36
|
+
jobName: 'run-autonomous-job',
|
|
37
|
+
concurrency: 2,
|
|
38
|
+
defaultJobOptions: DEFAULT_AUTONOMOUS_JOB_OPTIONS,
|
|
39
|
+
processor: processAutonomousJob,
|
|
40
|
+
})
|
|
41
|
+
|
|
42
|
+
function buildAutonomousSchedulerId(autonomousJobId: string): string {
|
|
43
|
+
return `autonomous:${autonomousJobId}`
|
|
44
|
+
}
|
|
45
|
+
|
|
46
|
+
function encodeBullmqId(raw: string): string {
|
|
47
|
+
return Buffer.from(raw).toString('base64url')
|
|
48
|
+
}
|
|
49
|
+
|
|
50
|
+
export function buildAutonomousAtJobId(autonomousJobId: string): string {
|
|
51
|
+
return `autonomous-at-${encodeBullmqId(autonomousJobId)}`
|
|
52
|
+
}
|
|
53
|
+
|
|
54
|
+
export async function enqueueAutonomousJobRun(params: {
|
|
55
|
+
payload: AutonomousJobQueuePayload
|
|
56
|
+
delayMs?: number
|
|
57
|
+
jobId?: string
|
|
58
|
+
}): Promise<{ bullmqJobId: string; queueJobId: string }> {
|
|
59
|
+
const queuedJob = await autonomousJobQueue
|
|
60
|
+
.getQueue()
|
|
61
|
+
.add('run-autonomous-job', params.payload, {
|
|
62
|
+
...(typeof params.delayMs === 'number' ? { delay: Math.max(0, params.delayMs) } : {}),
|
|
63
|
+
...(params.jobId ? { jobId: params.jobId } : {}),
|
|
64
|
+
})
|
|
65
|
+
|
|
66
|
+
const queueJobId = await queueJobService.recordEnqueued({
|
|
67
|
+
queueName: AUTONOMOUS_JOB_QUEUE,
|
|
68
|
+
id: queuedJob.id,
|
|
69
|
+
name: queuedJob.name,
|
|
70
|
+
data: queuedJob.data,
|
|
71
|
+
opts: queuedJob.opts,
|
|
72
|
+
attemptsMade: queuedJob.attemptsMade,
|
|
73
|
+
timestamp: queuedJob.timestamp,
|
|
74
|
+
})
|
|
75
|
+
|
|
76
|
+
return { bullmqJobId: String(queuedJob.id), queueJobId }
|
|
77
|
+
}
|
|
78
|
+
|
|
79
|
+
export async function upsertAutonomousJobScheduler(params: {
|
|
80
|
+
autonomousJobId: string
|
|
81
|
+
schedule: Extract<AutonomousJobSchedule, { kind: 'cron' | 'every' }>
|
|
82
|
+
}): Promise<void> {
|
|
83
|
+
const repeatOpts =
|
|
84
|
+
params.schedule.kind === 'cron' ? { pattern: params.schedule.cron } : { every: params.schedule.intervalMs }
|
|
85
|
+
const queuedJob = await autonomousJobQueue
|
|
86
|
+
.getQueue()
|
|
87
|
+
.upsertJobScheduler(buildAutonomousSchedulerId(params.autonomousJobId), repeatOpts, {
|
|
88
|
+
name: 'run-autonomous-job',
|
|
89
|
+
data: { autonomousJobId: params.autonomousJobId, trigger: 'scheduled' },
|
|
90
|
+
opts: DEFAULT_AUTONOMOUS_JOB_OPTIONS,
|
|
91
|
+
})
|
|
92
|
+
|
|
93
|
+
await queueJobService.recordEnqueued({
|
|
94
|
+
queueName: AUTONOMOUS_JOB_QUEUE,
|
|
95
|
+
id: queuedJob.id,
|
|
96
|
+
name: queuedJob.name,
|
|
97
|
+
data: queuedJob.data,
|
|
98
|
+
opts: queuedJob.opts,
|
|
99
|
+
attemptsMade: queuedJob.attemptsMade,
|
|
100
|
+
timestamp: queuedJob.timestamp,
|
|
101
|
+
})
|
|
102
|
+
}
|
|
103
|
+
|
|
104
|
+
export async function removeAutonomousJobScheduler(autonomousJobId: string): Promise<void> {
|
|
105
|
+
await autonomousJobQueue.getQueue().removeJobScheduler(buildAutonomousSchedulerId(autonomousJobId))
|
|
106
|
+
}
|
|
107
|
+
|
|
108
|
+
export async function removeAutonomousAtJob(autonomousJobId: string): Promise<void> {
|
|
109
|
+
try {
|
|
110
|
+
await autonomousJobQueue.getQueue().remove(buildAutonomousAtJobId(autonomousJobId))
|
|
111
|
+
} catch {
|
|
112
|
+
// The delayed job may have already fired or never existed.
|
|
113
|
+
}
|
|
114
|
+
}
|
|
115
|
+
|
|
116
|
+
type AutonomousJobWorkerOptions = Parameters<typeof autonomousJobQueue.startWorker>[0]
|
|
117
|
+
|
|
118
|
+
export function startAutonomousJobWorker(options: AutonomousJobWorkerOptions = {}): WorkerHandle {
|
|
119
|
+
const handle = autonomousJobQueue.startWorker(options)
|
|
120
|
+
|
|
121
|
+
autonomousJobService.recoverActiveJobs().catch((error: unknown) => {
|
|
122
|
+
serverLogger.error`Autonomous job startup recovery failed: ${error}`
|
|
123
|
+
})
|
|
124
|
+
|
|
125
|
+
return handle
|
|
126
|
+
}
|
|
127
|
+
|
|
128
|
+
export function getAutonomousJobQueueHandle(): WorkerHandle {
|
|
129
|
+
return startAutonomousJobWorker()
|
|
130
|
+
}
|
|
131
|
+
|
|
132
|
+
if (import.meta.main) {
|
|
133
|
+
startAutonomousJobWorker()
|
|
134
|
+
}
|
|
@@ -2,6 +2,7 @@ import { Queue, Worker } from 'bullmq'
|
|
|
2
2
|
import type IORedis from 'ioredis'
|
|
3
3
|
|
|
4
4
|
import type { chatLogger } from '../config/logger'
|
|
5
|
+
import { queueJobService } from '../services/queue-job.service'
|
|
5
6
|
import {
|
|
6
7
|
attachWorkerEvents,
|
|
7
8
|
createWorkerShutdown,
|
|
@@ -92,8 +93,18 @@ export function createDocumentProcessorQueueRuntime<TJob extends DocumentProcess
|
|
|
92
93
|
}
|
|
93
94
|
|
|
94
95
|
return {
|
|
95
|
-
enqueue: async (job) =>
|
|
96
|
-
await getQueue().add(jobName, toQueueData(job), { jobId: buildDocumentProcessorJobId(job) })
|
|
96
|
+
enqueue: async (job) => {
|
|
97
|
+
const queuedJob = await getQueue().add(jobName, toQueueData(job), { jobId: buildDocumentProcessorJobId(job) })
|
|
98
|
+
await queueJobService.recordEnqueued({
|
|
99
|
+
queueName,
|
|
100
|
+
id: queuedJob.id,
|
|
101
|
+
name: queuedJob.name,
|
|
102
|
+
data: queuedJob.data,
|
|
103
|
+
opts: queuedJob.opts,
|
|
104
|
+
attemptsMade: queuedJob.attemptsMade,
|
|
105
|
+
timestamp: queuedJob.timestamp,
|
|
106
|
+
})
|
|
107
|
+
},
|
|
97
108
|
startWorker: (options = {}) => {
|
|
98
109
|
const { registerSignals = import.meta.main } = options
|
|
99
110
|
const worker = new Worker(queueName, params.getWorkerPath(), {
|
package/src/queues/index.ts
CHANGED
|
@@ -1,3 +1,4 @@
|
|
|
1
|
+
import { queueJobService } from '../services/queue-job.service'
|
|
1
2
|
import { getWorkerPath, LONG_JOB_LOCK_DURATION_MS, LOW_JOB_RETENTION } from '../workers/worker-utils'
|
|
2
3
|
import { createQueueFactory } from './queue-factory'
|
|
3
4
|
|
|
@@ -6,7 +7,7 @@ export interface MemoryConsolidationJob {
|
|
|
6
7
|
}
|
|
7
8
|
|
|
8
9
|
const MEMORY_CONSOLIDATION_INTERVAL_MS = 24 * 60 * 60 * 1000
|
|
9
|
-
const
|
|
10
|
+
const MEMORY_CONSOLIDATION_SCHEDULER_ID = 'memory-consolidation-recurring'
|
|
10
11
|
|
|
11
12
|
const memoryConsolidation = createQueueFactory<MemoryConsolidationJob>({
|
|
12
13
|
name: 'memory-consolidation',
|
|
@@ -23,9 +24,27 @@ export async function enqueueMemoryConsolidation(job: MemoryConsolidationJob = {
|
|
|
23
24
|
}
|
|
24
25
|
|
|
25
26
|
export async function scheduleRecurringConsolidation() {
|
|
26
|
-
await memoryConsolidation
|
|
27
|
+
const queuedJob = await memoryConsolidation
|
|
27
28
|
.getQueue()
|
|
28
|
-
.
|
|
29
|
+
.upsertJobScheduler(
|
|
30
|
+
MEMORY_CONSOLIDATION_SCHEDULER_ID,
|
|
31
|
+
{ every: MEMORY_CONSOLIDATION_INTERVAL_MS },
|
|
32
|
+
{
|
|
33
|
+
name: 'consolidate',
|
|
34
|
+
data: {},
|
|
35
|
+
opts: { ...LOW_JOB_RETENTION, attempts: 2, backoff: { type: 'exponential', delay: 5000 } },
|
|
36
|
+
},
|
|
37
|
+
)
|
|
38
|
+
|
|
39
|
+
await queueJobService.recordEnqueued({
|
|
40
|
+
queueName: 'memory-consolidation',
|
|
41
|
+
id: queuedJob.id,
|
|
42
|
+
name: queuedJob.name,
|
|
43
|
+
data: queuedJob.data,
|
|
44
|
+
opts: queuedJob.opts,
|
|
45
|
+
attemptsMade: queuedJob.attemptsMade,
|
|
46
|
+
timestamp: queuedJob.timestamp,
|
|
47
|
+
})
|
|
29
48
|
}
|
|
30
49
|
|
|
31
50
|
export const startMemoryConsolidationWorker = memoryConsolidation.startWorker
|