@langgraph-js/pure-graph 1.4.0 → 1.4.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +158 -2
- package/dist/__vite-browser-external-DGN5jhtd.js +4 -0
- package/dist/__vite-browser-external-DGN5jhtd.js.map +1 -0
- package/dist/checkpoint-BHKV54sL.js +386 -0
- package/dist/checkpoint-BHKV54sL.js.map +1 -0
- package/dist/checkpoint-DxiUsHMy.js +13 -0
- package/dist/checkpoint-DxiUsHMy.js.map +1 -0
- package/dist/global.js +3 -0
- package/dist/index-DcXE-SZb.js +1264 -0
- package/dist/index-DcXE-SZb.js.map +1 -0
- package/dist/index.js.map +1 -0
- package/dist/queue-C6iEVbd2.js +120 -0
- package/dist/queue-C6iEVbd2.js.map +1 -0
- package/dist/storage/index.js +13 -7
- package/dist/storage/memory/threads.d.ts +1 -0
- package/dist/storage/memory/threads.js +3 -0
- package/dist/storage/pg/checkpoint.js +1 -1
- package/dist/storage/pg/threads.d.ts +1 -1
- package/dist/storage/pg/threads.js +0 -1
- package/dist/storage/sqlite/DB.js +1 -1
- package/dist/storage/sqlite/threads.d.ts +1 -1
- package/dist/storage/sqlite/threads.js +1 -2
- package/dist/threads/index.d.ts +1 -0
- package/dist/threads-BUgBiCiK.js +302 -0
- package/dist/threads-BUgBiCiK.js.map +1 -0
- package/dist/tsconfig.tsbuildinfo +1 -1
- package/package.json +5 -2
|
@@ -0,0 +1,1264 @@
|
|
|
1
|
+
import { isBaseMessage } from '@langchain/core/messages';
|
|
2
|
+
import { Command, Send, StateGraph } from '@langchain/langgraph';
|
|
3
|
+
import { EventEmitter } from 'eventemitter3';
|
|
4
|
+
import { load } from '@langchain/core/load';
|
|
5
|
+
import { MemorySaver } from '@langchain/langgraph-checkpoint';
|
|
6
|
+
|
|
7
|
+
const getLangGraphCommand = (command) => {
|
|
8
|
+
let goto = command.goto != null && !Array.isArray(command.goto) ? [command.goto] : command.goto;
|
|
9
|
+
return new Command({
|
|
10
|
+
goto: goto?.map((item) => {
|
|
11
|
+
if (typeof item !== "string") return new Send(item.node, item.input);
|
|
12
|
+
return item;
|
|
13
|
+
}),
|
|
14
|
+
update: command.update ?? void 0,
|
|
15
|
+
resume: command.resume
|
|
16
|
+
});
|
|
17
|
+
};
|
|
18
|
+
|
|
19
|
+
var LIMIT_REPLACE_NODE = "[...]";
|
|
20
|
+
var CIRCULAR_REPLACE_NODE = "[Circular]";
|
|
21
|
+
var arr = [];
|
|
22
|
+
var replacerStack = [];
|
|
23
|
+
function defaultOptions() {
|
|
24
|
+
return {
|
|
25
|
+
depthLimit: Number.MAX_SAFE_INTEGER,
|
|
26
|
+
edgesLimit: Number.MAX_SAFE_INTEGER
|
|
27
|
+
};
|
|
28
|
+
}
|
|
29
|
+
function stringify(obj, replacer, spacer, options) {
|
|
30
|
+
if (typeof options === "undefined") {
|
|
31
|
+
options = defaultOptions();
|
|
32
|
+
}
|
|
33
|
+
decirc(obj, "", 0, [], void 0, 0, options);
|
|
34
|
+
var res;
|
|
35
|
+
try {
|
|
36
|
+
if (replacerStack.length === 0) {
|
|
37
|
+
res = JSON.stringify(obj, replacer, spacer);
|
|
38
|
+
} else {
|
|
39
|
+
res = JSON.stringify(obj, replaceGetterValues(replacer), spacer);
|
|
40
|
+
}
|
|
41
|
+
} catch (_) {
|
|
42
|
+
return JSON.stringify("[unable to serialize, circular reference is too complex to analyze]");
|
|
43
|
+
} finally {
|
|
44
|
+
while (arr.length !== 0) {
|
|
45
|
+
var part = arr.pop();
|
|
46
|
+
if (part.length === 4) {
|
|
47
|
+
Object.defineProperty(part[0], part[1], part[3]);
|
|
48
|
+
} else {
|
|
49
|
+
part[0][part[1]] = part[2];
|
|
50
|
+
}
|
|
51
|
+
}
|
|
52
|
+
}
|
|
53
|
+
return res;
|
|
54
|
+
}
|
|
55
|
+
function setReplace(replace, val, k, parent) {
|
|
56
|
+
var propertyDescriptor = Object.getOwnPropertyDescriptor(parent, k);
|
|
57
|
+
if (propertyDescriptor.get !== void 0) {
|
|
58
|
+
if (propertyDescriptor.configurable) {
|
|
59
|
+
Object.defineProperty(parent, k, { value: replace });
|
|
60
|
+
arr.push([parent, k, val, propertyDescriptor]);
|
|
61
|
+
} else {
|
|
62
|
+
replacerStack.push([val, k, replace]);
|
|
63
|
+
}
|
|
64
|
+
} else {
|
|
65
|
+
parent[k] = replace;
|
|
66
|
+
arr.push([parent, k, val]);
|
|
67
|
+
}
|
|
68
|
+
}
|
|
69
|
+
function decirc(val, k, edgeIndex, stack, parent, depth, options) {
|
|
70
|
+
depth += 1;
|
|
71
|
+
var i;
|
|
72
|
+
if (typeof val === "object" && val !== null) {
|
|
73
|
+
for (i = 0; i < stack.length; i++) {
|
|
74
|
+
if (stack[i] === val) {
|
|
75
|
+
setReplace(CIRCULAR_REPLACE_NODE, val, k, parent);
|
|
76
|
+
return;
|
|
77
|
+
}
|
|
78
|
+
}
|
|
79
|
+
if (typeof options.depthLimit !== "undefined" && depth > options.depthLimit) {
|
|
80
|
+
setReplace(LIMIT_REPLACE_NODE, val, k, parent);
|
|
81
|
+
return;
|
|
82
|
+
}
|
|
83
|
+
if (typeof options.edgesLimit !== "undefined" && edgeIndex + 1 > options.edgesLimit) {
|
|
84
|
+
setReplace(LIMIT_REPLACE_NODE, val, k, parent);
|
|
85
|
+
return;
|
|
86
|
+
}
|
|
87
|
+
stack.push(val);
|
|
88
|
+
if (Array.isArray(val)) {
|
|
89
|
+
for (i = 0; i < val.length; i++) {
|
|
90
|
+
decirc(val[i], i, i, stack, val, depth, options);
|
|
91
|
+
}
|
|
92
|
+
} else {
|
|
93
|
+
var keys = Object.keys(val);
|
|
94
|
+
for (i = 0; i < keys.length; i++) {
|
|
95
|
+
var key = keys[i];
|
|
96
|
+
decirc(val[key], key, i, stack, val, depth, options);
|
|
97
|
+
}
|
|
98
|
+
}
|
|
99
|
+
stack.pop();
|
|
100
|
+
}
|
|
101
|
+
}
|
|
102
|
+
function replaceGetterValues(replacer) {
|
|
103
|
+
replacer = typeof replacer !== "undefined" ? replacer : function(k, v) {
|
|
104
|
+
return v;
|
|
105
|
+
};
|
|
106
|
+
return function(key, val) {
|
|
107
|
+
if (replacerStack.length > 0) {
|
|
108
|
+
for (var i = 0; i < replacerStack.length; i++) {
|
|
109
|
+
var part = replacerStack[i];
|
|
110
|
+
if (part[1] === key && part[0] === val) {
|
|
111
|
+
val = part[2];
|
|
112
|
+
replacerStack.splice(i, 1);
|
|
113
|
+
break;
|
|
114
|
+
}
|
|
115
|
+
}
|
|
116
|
+
}
|
|
117
|
+
return replacer.call(this, key, val);
|
|
118
|
+
};
|
|
119
|
+
}
|
|
120
|
+
|
|
121
|
+
function isLangChainSerializedObject(value) {
|
|
122
|
+
return value !== null && value.lc === 1 && value.type === "constructor" && Array.isArray(value.id);
|
|
123
|
+
}
|
|
124
|
+
async function _reviver(value) {
|
|
125
|
+
if (value && typeof value === "object") {
|
|
126
|
+
if (Array.isArray(value)) {
|
|
127
|
+
const revivedArray = await Promise.all(value.map((item) => _reviver(item)));
|
|
128
|
+
return revivedArray;
|
|
129
|
+
} else {
|
|
130
|
+
const revivedObj = {};
|
|
131
|
+
for (const [k, v] of Object.entries(value)) {
|
|
132
|
+
revivedObj[k] = await _reviver(v);
|
|
133
|
+
}
|
|
134
|
+
if (revivedObj.lc === 2 && revivedObj.type === "undefined") {
|
|
135
|
+
return void 0;
|
|
136
|
+
} else if (revivedObj.lc === 2 && revivedObj.type === "constructor" && Array.isArray(revivedObj.id)) {
|
|
137
|
+
try {
|
|
138
|
+
const constructorName = revivedObj.id[revivedObj.id.length - 1];
|
|
139
|
+
let constructor;
|
|
140
|
+
switch (constructorName) {
|
|
141
|
+
case "Set":
|
|
142
|
+
constructor = Set;
|
|
143
|
+
break;
|
|
144
|
+
case "Map":
|
|
145
|
+
constructor = Map;
|
|
146
|
+
break;
|
|
147
|
+
case "RegExp":
|
|
148
|
+
constructor = RegExp;
|
|
149
|
+
break;
|
|
150
|
+
case "Error":
|
|
151
|
+
constructor = Error;
|
|
152
|
+
break;
|
|
153
|
+
default:
|
|
154
|
+
return revivedObj;
|
|
155
|
+
}
|
|
156
|
+
if (revivedObj.method) {
|
|
157
|
+
return constructor[revivedObj.method](...revivedObj.args || []);
|
|
158
|
+
} else {
|
|
159
|
+
return new constructor(...revivedObj.args || []);
|
|
160
|
+
}
|
|
161
|
+
} catch (error) {
|
|
162
|
+
return revivedObj;
|
|
163
|
+
}
|
|
164
|
+
} else if (isLangChainSerializedObject(revivedObj)) {
|
|
165
|
+
return load(JSON.stringify(revivedObj));
|
|
166
|
+
}
|
|
167
|
+
return revivedObj;
|
|
168
|
+
}
|
|
169
|
+
}
|
|
170
|
+
return value;
|
|
171
|
+
}
|
|
172
|
+
function _encodeConstructorArgs(constructor, method, args, kwargs) {
|
|
173
|
+
return {
|
|
174
|
+
lc: 2,
|
|
175
|
+
type: "constructor",
|
|
176
|
+
id: [constructor.name],
|
|
177
|
+
method: method ?? null,
|
|
178
|
+
args: args ?? [],
|
|
179
|
+
kwargs: kwargs ?? {}
|
|
180
|
+
};
|
|
181
|
+
}
|
|
182
|
+
function _default(obj) {
|
|
183
|
+
if (obj === void 0) {
|
|
184
|
+
return {
|
|
185
|
+
lc: 2,
|
|
186
|
+
type: "undefined"
|
|
187
|
+
};
|
|
188
|
+
} else if (obj instanceof Set || obj instanceof Map) {
|
|
189
|
+
return _encodeConstructorArgs(obj.constructor, void 0, [Array.from(obj)]);
|
|
190
|
+
} else if (obj instanceof RegExp) {
|
|
191
|
+
return _encodeConstructorArgs(RegExp, void 0, [obj.source, obj.flags]);
|
|
192
|
+
} else if (obj instanceof Error) {
|
|
193
|
+
return _encodeConstructorArgs(obj.constructor, void 0, [obj.message]);
|
|
194
|
+
} else if (obj?.lg_name === "Send") {
|
|
195
|
+
return {
|
|
196
|
+
node: obj.node,
|
|
197
|
+
args: obj.args
|
|
198
|
+
};
|
|
199
|
+
} else {
|
|
200
|
+
return obj;
|
|
201
|
+
}
|
|
202
|
+
}
|
|
203
|
+
class JsonPlusSerializer {
|
|
204
|
+
_dumps(obj) {
|
|
205
|
+
const encoder = new TextEncoder();
|
|
206
|
+
return encoder.encode(
|
|
207
|
+
stringify(obj, (_, value) => {
|
|
208
|
+
return _default(value);
|
|
209
|
+
})
|
|
210
|
+
);
|
|
211
|
+
}
|
|
212
|
+
async dumpsTyped(obj) {
|
|
213
|
+
if (obj instanceof Uint8Array) {
|
|
214
|
+
return ["bytes", obj];
|
|
215
|
+
} else {
|
|
216
|
+
return ["json", this._dumps(obj)];
|
|
217
|
+
}
|
|
218
|
+
}
|
|
219
|
+
async _loads(data) {
|
|
220
|
+
const parsed = JSON.parse(data);
|
|
221
|
+
return _reviver(parsed);
|
|
222
|
+
}
|
|
223
|
+
async loadsTyped(type, data) {
|
|
224
|
+
if (type === "bytes") {
|
|
225
|
+
return typeof data === "string" ? new TextEncoder().encode(data) : data;
|
|
226
|
+
} else if (type === "json") {
|
|
227
|
+
return this._loads(typeof data === "string" ? data : new TextDecoder().decode(data));
|
|
228
|
+
} else {
|
|
229
|
+
throw new Error(`Unknown serialization type: ${type}`);
|
|
230
|
+
}
|
|
231
|
+
}
|
|
232
|
+
}
|
|
233
|
+
|
|
234
|
+
class BaseStreamQueue extends EventEmitter {
|
|
235
|
+
/**
|
|
236
|
+
* 构造函数
|
|
237
|
+
* Constructor
|
|
238
|
+
* @param compressMessages 是否压缩消息 / Whether to compress messages
|
|
239
|
+
*/
|
|
240
|
+
constructor(id, compressMessages = true) {
|
|
241
|
+
super();
|
|
242
|
+
this.id = id;
|
|
243
|
+
this.compressMessages = compressMessages;
|
|
244
|
+
}
|
|
245
|
+
/** 序列化器实例 / Serializer instance */
|
|
246
|
+
serializer = new JsonPlusSerializer();
|
|
247
|
+
/**
|
|
248
|
+
* 编码数据为 Uint8Array
|
|
249
|
+
* Encode data to Uint8Array
|
|
250
|
+
* @param message 要编码的消息 / Message to encode
|
|
251
|
+
* @returns 编码后的 Uint8Array / Encoded Uint8Array
|
|
252
|
+
*/
|
|
253
|
+
async encodeData(message) {
|
|
254
|
+
const [_, serializedMessage] = await this.serializer.dumpsTyped(message);
|
|
255
|
+
return serializedMessage;
|
|
256
|
+
}
|
|
257
|
+
/**
|
|
258
|
+
* 解码数据为 EventMessage
|
|
259
|
+
* Decode data to EventMessage
|
|
260
|
+
* @param serializedMessage 要解码的消息 / Message to decode
|
|
261
|
+
* @returns 解码后的 EventMessage / Decoded EventMessage
|
|
262
|
+
*/
|
|
263
|
+
async decodeData(serializedMessage) {
|
|
264
|
+
const message = await this.serializer.loadsTyped("json", serializedMessage);
|
|
265
|
+
return message;
|
|
266
|
+
}
|
|
267
|
+
}
|
|
268
|
+
class StreamQueueManager {
|
|
269
|
+
/** 存储队列实例的 Map / Map storing queue instances */
|
|
270
|
+
queues = /* @__PURE__ */ new Map();
|
|
271
|
+
/** 默认是否压缩消息 / Default compress messages setting */
|
|
272
|
+
defaultCompressMessages;
|
|
273
|
+
/** 队列构造函数 / Queue constructor */
|
|
274
|
+
queueConstructor;
|
|
275
|
+
/**
|
|
276
|
+
* 构造函数
|
|
277
|
+
* Constructor
|
|
278
|
+
* @param queueConstructor 队列构造函数 / Queue constructor
|
|
279
|
+
* @param options 配置选项 / Configuration options
|
|
280
|
+
*/
|
|
281
|
+
constructor(queueConstructor, options = {}) {
|
|
282
|
+
this.defaultCompressMessages = options.defaultCompressMessages ?? true;
|
|
283
|
+
this.queueConstructor = queueConstructor;
|
|
284
|
+
}
|
|
285
|
+
/**
|
|
286
|
+
* 创建指定 id 的队列
|
|
287
|
+
* Create queue with specified id
|
|
288
|
+
* @param id 队列 ID / Queue ID
|
|
289
|
+
* @param compressMessages 是否压缩消息 / Whether to compress messages
|
|
290
|
+
* @returns 创建的队列实例 / Created queue instance
|
|
291
|
+
*/
|
|
292
|
+
createQueue(id, compressMessages) {
|
|
293
|
+
compressMessages ?? this.defaultCompressMessages;
|
|
294
|
+
this.queues.set(id, new this.queueConstructor(id));
|
|
295
|
+
return this.queues.get(id);
|
|
296
|
+
}
|
|
297
|
+
/**
|
|
298
|
+
* 获取或创建指定 id 的队列
|
|
299
|
+
* Get or create queue with specified id
|
|
300
|
+
* @param id 队列 ID / Queue ID
|
|
301
|
+
* @param compressMessages 是否压缩消息,默认为构造函数中的默认值 / Whether to compress messages, defaults to constructor default
|
|
302
|
+
* @returns StreamQueue 实例 / StreamQueue instance
|
|
303
|
+
*/
|
|
304
|
+
getQueue(id) {
|
|
305
|
+
const queue = this.queues.get(id);
|
|
306
|
+
if (!queue) {
|
|
307
|
+
throw new Error(`Queue with id '${id}' does not exist`);
|
|
308
|
+
}
|
|
309
|
+
return queue;
|
|
310
|
+
}
|
|
311
|
+
/**
|
|
312
|
+
* 取消指定 id 的队列
|
|
313
|
+
* Cancel queue with specified id
|
|
314
|
+
* @param id 队列 ID / Queue ID
|
|
315
|
+
*/
|
|
316
|
+
cancelQueue(id) {
|
|
317
|
+
const queue = this.queues.get(id);
|
|
318
|
+
if (queue) {
|
|
319
|
+
queue.cancel();
|
|
320
|
+
this.removeQueue(id);
|
|
321
|
+
}
|
|
322
|
+
}
|
|
323
|
+
/**
|
|
324
|
+
* 向指定 id 的队列推送数据
|
|
325
|
+
* Push data to queue with specified id
|
|
326
|
+
* @param id 队列 ID / Queue ID
|
|
327
|
+
* @param item 要推送的数据项 / Item to push
|
|
328
|
+
* @param compressMessages 是否压缩消息,默认为构造函数中的默认值 / Whether to compress messages, defaults to constructor default
|
|
329
|
+
*/
|
|
330
|
+
async pushToQueue(id, item, compressMessages) {
|
|
331
|
+
const queue = this.getQueue(id);
|
|
332
|
+
await queue.push(item);
|
|
333
|
+
}
|
|
334
|
+
/**
|
|
335
|
+
* 获取指定 id 队列中的所有数据
|
|
336
|
+
* Get all data from queue with specified id
|
|
337
|
+
* @param id 队列 ID / Queue ID
|
|
338
|
+
* @returns 队列中的所有数据 / All data in the queue
|
|
339
|
+
*/
|
|
340
|
+
async getQueueData(id) {
|
|
341
|
+
const queue = this.queues.get(id);
|
|
342
|
+
if (!queue) {
|
|
343
|
+
throw new Error(`Queue with id '${id}' does not exist`);
|
|
344
|
+
}
|
|
345
|
+
return await queue.getAll();
|
|
346
|
+
}
|
|
347
|
+
/**
|
|
348
|
+
* 清空指定 id 的队列
|
|
349
|
+
* Clear queue with specified id
|
|
350
|
+
* @param id 队列 ID / Queue ID
|
|
351
|
+
*/
|
|
352
|
+
clearQueue(id) {
|
|
353
|
+
const queue = this.queues.get(id);
|
|
354
|
+
if (queue) {
|
|
355
|
+
queue.clear();
|
|
356
|
+
}
|
|
357
|
+
}
|
|
358
|
+
/**
|
|
359
|
+
* 删除指定 id 的队列
|
|
360
|
+
* Remove queue with specified id
|
|
361
|
+
* @param id 队列 ID / Queue ID
|
|
362
|
+
* @returns 是否成功删除 / Whether successfully deleted
|
|
363
|
+
*/
|
|
364
|
+
removeQueue(id) {
|
|
365
|
+
setTimeout(() => {
|
|
366
|
+
return this.queues.delete(id);
|
|
367
|
+
}, 500);
|
|
368
|
+
}
|
|
369
|
+
/**
|
|
370
|
+
* 获取所有队列的 ID
|
|
371
|
+
* Get all queue IDs
|
|
372
|
+
* @returns 所有队列 ID 的数组 / Array of all queue IDs
|
|
373
|
+
*/
|
|
374
|
+
getAllQueueIds() {
|
|
375
|
+
return Array.from(this.queues.keys());
|
|
376
|
+
}
|
|
377
|
+
/**
|
|
378
|
+
* 获取所有队列及其数据的快照
|
|
379
|
+
* Get snapshot of all queues and their data
|
|
380
|
+
* @returns 包含所有队列数据的结果对象 / Result object containing all queue data
|
|
381
|
+
*/
|
|
382
|
+
async getAllQueuesData() {
|
|
383
|
+
const result = {};
|
|
384
|
+
for (const [id, queue] of this.queues) {
|
|
385
|
+
result[id] = await queue.getAll();
|
|
386
|
+
}
|
|
387
|
+
return result;
|
|
388
|
+
}
|
|
389
|
+
/**
|
|
390
|
+
* 清空所有队列
|
|
391
|
+
* Clear all queues
|
|
392
|
+
*/
|
|
393
|
+
clearAllQueues() {
|
|
394
|
+
for (const queue of this.queues.values()) {
|
|
395
|
+
queue.clear();
|
|
396
|
+
}
|
|
397
|
+
}
|
|
398
|
+
}
|
|
399
|
+
|
|
400
|
+
class EventMessage {
|
|
401
|
+
event;
|
|
402
|
+
data;
|
|
403
|
+
id;
|
|
404
|
+
constructor(event, data) {
|
|
405
|
+
this.event = event;
|
|
406
|
+
this.data = data;
|
|
407
|
+
}
|
|
408
|
+
}
|
|
409
|
+
class CancelEventMessage extends EventMessage {
|
|
410
|
+
constructor() {
|
|
411
|
+
super("__system_cancel__", "user cancel this run");
|
|
412
|
+
}
|
|
413
|
+
}
|
|
414
|
+
class StreamEndEventMessage extends EventMessage {
|
|
415
|
+
constructor() {
|
|
416
|
+
super("__stream_end__", "stream end");
|
|
417
|
+
}
|
|
418
|
+
}
|
|
419
|
+
class StreamErrorEventMessage extends EventMessage {
|
|
420
|
+
constructor(error) {
|
|
421
|
+
super("__stream_error__", {
|
|
422
|
+
error: error.name,
|
|
423
|
+
message: error.message
|
|
424
|
+
});
|
|
425
|
+
}
|
|
426
|
+
}
|
|
427
|
+
|
|
428
|
+
class MemoryStreamQueue extends BaseStreamQueue {
|
|
429
|
+
data = [];
|
|
430
|
+
async push(item) {
|
|
431
|
+
const data = this.compressMessages ? await this.encodeData(item) : item;
|
|
432
|
+
this.data.push(data);
|
|
433
|
+
this.emit("dataChange", data);
|
|
434
|
+
}
|
|
435
|
+
onDataChange(listener) {
|
|
436
|
+
this.on("dataChange", async (item) => {
|
|
437
|
+
listener(this.compressMessages ? await this.decodeData(item) : item);
|
|
438
|
+
});
|
|
439
|
+
return () => this.off("dataChange", listener);
|
|
440
|
+
}
|
|
441
|
+
/**
|
|
442
|
+
* 异步生成器:支持 for await...of 方式消费队列数据
|
|
443
|
+
*/
|
|
444
|
+
async *onDataReceive() {
|
|
445
|
+
let queue = [];
|
|
446
|
+
let pendingResolve = null;
|
|
447
|
+
let isStreamEnded = false;
|
|
448
|
+
const handleData = async (item) => {
|
|
449
|
+
const data = this.compressMessages ? await this.decodeData(item) : item;
|
|
450
|
+
queue.push(data);
|
|
451
|
+
if (data.event === "__stream_end__" || data.event === "__stream_error__" || data.event === "__stream_cancel__") {
|
|
452
|
+
setTimeout(() => {
|
|
453
|
+
isStreamEnded = true;
|
|
454
|
+
if (pendingResolve) {
|
|
455
|
+
pendingResolve();
|
|
456
|
+
pendingResolve = null;
|
|
457
|
+
}
|
|
458
|
+
}, 300);
|
|
459
|
+
if (data.event === "__stream_cancel__") {
|
|
460
|
+
this.cancel();
|
|
461
|
+
}
|
|
462
|
+
}
|
|
463
|
+
if (pendingResolve) {
|
|
464
|
+
pendingResolve();
|
|
465
|
+
pendingResolve = null;
|
|
466
|
+
}
|
|
467
|
+
};
|
|
468
|
+
this.on("dataChange", handleData);
|
|
469
|
+
try {
|
|
470
|
+
while (!isStreamEnded) {
|
|
471
|
+
if (queue.length > 0) {
|
|
472
|
+
for (const item of queue) {
|
|
473
|
+
yield item;
|
|
474
|
+
}
|
|
475
|
+
queue = [];
|
|
476
|
+
} else {
|
|
477
|
+
await new Promise((resolve) => {
|
|
478
|
+
pendingResolve = resolve;
|
|
479
|
+
});
|
|
480
|
+
}
|
|
481
|
+
}
|
|
482
|
+
} finally {
|
|
483
|
+
this.off("dataChange", handleData);
|
|
484
|
+
}
|
|
485
|
+
}
|
|
486
|
+
async getAll() {
|
|
487
|
+
return this.compressMessages ? await Promise.all(
|
|
488
|
+
this.data.map((i) => this.decodeData(i))
|
|
489
|
+
) : this.data;
|
|
490
|
+
}
|
|
491
|
+
clear() {
|
|
492
|
+
this.data = [];
|
|
493
|
+
}
|
|
494
|
+
cancelSignal = new AbortController();
|
|
495
|
+
cancel() {
|
|
496
|
+
this.push(new CancelEventMessage());
|
|
497
|
+
this.cancelSignal.abort("user cancel this run");
|
|
498
|
+
}
|
|
499
|
+
}
|
|
500
|
+
|
|
501
|
+
const GRAPHS = {};
|
|
502
|
+
async function registerGraph(graphId, graph) {
|
|
503
|
+
GRAPHS[graphId] = graph;
|
|
504
|
+
}
|
|
505
|
+
async function getGraph(graphId, config, options) {
|
|
506
|
+
if (!GRAPHS[graphId]) throw new Error(`Graph "${graphId}" not found`);
|
|
507
|
+
const compiled = typeof GRAPHS[graphId] === "function" ? await GRAPHS[graphId](config ?? { configurable: {} }) : GRAPHS[graphId];
|
|
508
|
+
if (typeof options?.checkpointer !== "undefined") {
|
|
509
|
+
compiled.checkpointer = options?.checkpointer ?? LangGraphGlobal.globalCheckPointer;
|
|
510
|
+
} else {
|
|
511
|
+
compiled.checkpointer = LangGraphGlobal.globalCheckPointer;
|
|
512
|
+
}
|
|
513
|
+
compiled.store = options?.store ?? void 0;
|
|
514
|
+
return compiled;
|
|
515
|
+
}
|
|
516
|
+
|
|
517
|
+
class MemoryThreadsManager {
|
|
518
|
+
threads = [];
|
|
519
|
+
async setup() {
|
|
520
|
+
return;
|
|
521
|
+
}
|
|
522
|
+
async create(payload) {
|
|
523
|
+
const threadId = payload?.threadId || crypto.randomUUID();
|
|
524
|
+
if (payload?.ifExists === "raise" && this.threads.some((t) => t.thread_id === threadId)) {
|
|
525
|
+
throw new Error(`Thread with ID ${threadId} already exists.`);
|
|
526
|
+
}
|
|
527
|
+
const thread = {
|
|
528
|
+
thread_id: threadId,
|
|
529
|
+
created_at: (/* @__PURE__ */ new Date()).toISOString(),
|
|
530
|
+
updated_at: (/* @__PURE__ */ new Date()).toISOString(),
|
|
531
|
+
metadata: payload?.metadata || {},
|
|
532
|
+
status: "idle",
|
|
533
|
+
values: null,
|
|
534
|
+
interrupts: {}
|
|
535
|
+
};
|
|
536
|
+
this.threads.push(thread);
|
|
537
|
+
return thread;
|
|
538
|
+
}
|
|
539
|
+
async search(query) {
|
|
540
|
+
let filteredThreads = [...this.threads];
|
|
541
|
+
if (query?.status) {
|
|
542
|
+
filteredThreads = filteredThreads.filter((t) => t.status === query.status);
|
|
543
|
+
}
|
|
544
|
+
if (query?.metadata) {
|
|
545
|
+
for (const key in query.metadata) {
|
|
546
|
+
if (Object.prototype.hasOwnProperty.call(query.metadata, key)) {
|
|
547
|
+
filteredThreads = filteredThreads.filter(
|
|
548
|
+
(t) => t.metadata && t.metadata[key] === query.metadata?.[key]
|
|
549
|
+
);
|
|
550
|
+
}
|
|
551
|
+
}
|
|
552
|
+
}
|
|
553
|
+
if (query?.sortBy) {
|
|
554
|
+
filteredThreads.sort((a, b) => {
|
|
555
|
+
let aValue;
|
|
556
|
+
let bValue;
|
|
557
|
+
switch (query.sortBy) {
|
|
558
|
+
case "created_at":
|
|
559
|
+
aValue = new Date(a.created_at).getTime();
|
|
560
|
+
bValue = new Date(b.created_at).getTime();
|
|
561
|
+
break;
|
|
562
|
+
case "updated_at":
|
|
563
|
+
aValue = new Date(a.updated_at).getTime();
|
|
564
|
+
bValue = new Date(b.updated_at).getTime();
|
|
565
|
+
break;
|
|
566
|
+
default:
|
|
567
|
+
return 0;
|
|
568
|
+
}
|
|
569
|
+
if (query.sortOrder === "desc") {
|
|
570
|
+
return bValue - aValue;
|
|
571
|
+
} else {
|
|
572
|
+
return aValue - bValue;
|
|
573
|
+
}
|
|
574
|
+
});
|
|
575
|
+
}
|
|
576
|
+
const offset = query?.offset || 0;
|
|
577
|
+
const limit = query?.limit || filteredThreads.length;
|
|
578
|
+
return filteredThreads.slice(offset, offset + limit);
|
|
579
|
+
}
|
|
580
|
+
async get(threadId) {
|
|
581
|
+
const thread = this.threads.find((t) => t.thread_id === threadId);
|
|
582
|
+
if (!thread) {
|
|
583
|
+
throw new Error(`Thread with ID ${threadId} not found.`);
|
|
584
|
+
}
|
|
585
|
+
return thread;
|
|
586
|
+
}
|
|
587
|
+
async set(threadId, thread) {
|
|
588
|
+
const index = this.threads.findIndex((t) => t.thread_id === threadId);
|
|
589
|
+
if (index === -1) {
|
|
590
|
+
throw new Error(`Thread with ID ${threadId} not found.`);
|
|
591
|
+
}
|
|
592
|
+
this.threads[index] = { ...this.threads[index], ...thread };
|
|
593
|
+
}
|
|
594
|
+
async delete(threadId) {
|
|
595
|
+
const initialLength = this.threads.length;
|
|
596
|
+
this.threads = this.threads.filter((t) => t.thread_id !== threadId);
|
|
597
|
+
if (this.threads.length === initialLength) {
|
|
598
|
+
throw new Error(`Thread with ID ${threadId} not found.`);
|
|
599
|
+
}
|
|
600
|
+
}
|
|
601
|
+
async updateState(threadId, thread) {
|
|
602
|
+
const index = this.threads.findIndex((t) => t.thread_id === threadId);
|
|
603
|
+
if (index === -1) {
|
|
604
|
+
throw new Error(`Thread with ID ${threadId} not found.`);
|
|
605
|
+
}
|
|
606
|
+
const targetThread = this.threads[index];
|
|
607
|
+
if (targetThread.status === "busy") {
|
|
608
|
+
throw new Error(`Thread with ID ${threadId} is busy, can't update state.`);
|
|
609
|
+
}
|
|
610
|
+
this.threads[index] = { ...targetThread, values: thread.values };
|
|
611
|
+
if (!targetThread.metadata?.graph_id) {
|
|
612
|
+
throw new Error(`Thread with ID ${threadId} has no graph_id.`);
|
|
613
|
+
}
|
|
614
|
+
const graphId = targetThread.metadata?.graph_id;
|
|
615
|
+
const config = {
|
|
616
|
+
configurable: {
|
|
617
|
+
thread_id: threadId,
|
|
618
|
+
graph_id: graphId
|
|
619
|
+
}
|
|
620
|
+
};
|
|
621
|
+
const graph = await getGraph(graphId, config);
|
|
622
|
+
const nextConfig = await graph.updateState(config, thread.values);
|
|
623
|
+
const graphState = await graph.getState(config);
|
|
624
|
+
await this.set(threadId, { values: JSON.parse(serialiseAsDict(graphState.values)) });
|
|
625
|
+
return nextConfig;
|
|
626
|
+
}
|
|
627
|
+
runs = [];
|
|
628
|
+
async createRun(threadId, assistantId, payload) {
|
|
629
|
+
const runId = crypto.randomUUID();
|
|
630
|
+
const run = {
|
|
631
|
+
run_id: runId,
|
|
632
|
+
thread_id: threadId,
|
|
633
|
+
assistant_id: assistantId,
|
|
634
|
+
created_at: (/* @__PURE__ */ new Date()).toISOString(),
|
|
635
|
+
updated_at: (/* @__PURE__ */ new Date()).toISOString(),
|
|
636
|
+
status: "pending",
|
|
637
|
+
metadata: payload?.metadata ?? {},
|
|
638
|
+
multitask_strategy: "reject"
|
|
639
|
+
};
|
|
640
|
+
this.runs.push(run);
|
|
641
|
+
return run;
|
|
642
|
+
}
|
|
643
|
+
async listRuns(threadId, options) {
|
|
644
|
+
let filteredRuns = [...this.runs];
|
|
645
|
+
if (options?.status) {
|
|
646
|
+
filteredRuns = filteredRuns.filter((r) => r.status === options.status);
|
|
647
|
+
}
|
|
648
|
+
if (options?.limit) {
|
|
649
|
+
filteredRuns = filteredRuns.slice(options.offset || 0, (options.offset || 0) + options.limit);
|
|
650
|
+
}
|
|
651
|
+
return filteredRuns;
|
|
652
|
+
}
|
|
653
|
+
async updateRun(runId, run) {
|
|
654
|
+
const index = this.runs.findIndex((r) => r.run_id === runId);
|
|
655
|
+
if (index === -1) {
|
|
656
|
+
throw new Error(`Run with ID ${runId} not found.`);
|
|
657
|
+
}
|
|
658
|
+
this.runs[index] = { ...this.runs[index], ...run };
|
|
659
|
+
}
|
|
660
|
+
}
|
|
661
|
+
|
|
662
|
+
class SQLiteThreadsManager {
|
|
663
|
+
db;
|
|
664
|
+
isSetup = false;
|
|
665
|
+
constructor(checkpointer) {
|
|
666
|
+
this.db = checkpointer.db;
|
|
667
|
+
}
|
|
668
|
+
async setup() {
|
|
669
|
+
if (this.isSetup) {
|
|
670
|
+
return;
|
|
671
|
+
}
|
|
672
|
+
this.db.exec(`
|
|
673
|
+
CREATE TABLE IF NOT EXISTS threads (
|
|
674
|
+
thread_id TEXT PRIMARY KEY,
|
|
675
|
+
created_at TEXT NOT NULL,
|
|
676
|
+
updated_at TEXT NOT NULL,
|
|
677
|
+
metadata TEXT NOT NULL DEFAULT '{}',
|
|
678
|
+
status TEXT NOT NULL DEFAULT 'idle',
|
|
679
|
+
[values] TEXT,
|
|
680
|
+
interrupts TEXT NOT NULL DEFAULT '{}'
|
|
681
|
+
)
|
|
682
|
+
`);
|
|
683
|
+
this.db.exec(`
|
|
684
|
+
CREATE TABLE IF NOT EXISTS runs (
|
|
685
|
+
run_id TEXT PRIMARY KEY,
|
|
686
|
+
thread_id TEXT NOT NULL,
|
|
687
|
+
assistant_id TEXT NOT NULL,
|
|
688
|
+
created_at TEXT NOT NULL,
|
|
689
|
+
updated_at TEXT NOT NULL,
|
|
690
|
+
status TEXT NOT NULL DEFAULT 'pending',
|
|
691
|
+
metadata TEXT NOT NULL DEFAULT '{}',
|
|
692
|
+
multitask_strategy TEXT NOT NULL DEFAULT 'reject',
|
|
693
|
+
FOREIGN KEY (thread_id) REFERENCES threads(thread_id) ON DELETE CASCADE
|
|
694
|
+
)
|
|
695
|
+
`);
|
|
696
|
+
this.db.exec(`CREATE INDEX IF NOT EXISTS idx_threads_status ON threads(status)`);
|
|
697
|
+
this.db.exec(`CREATE INDEX IF NOT EXISTS idx_threads_created_at ON threads(created_at)`);
|
|
698
|
+
this.db.exec(`CREATE INDEX IF NOT EXISTS idx_threads_updated_at ON threads(updated_at)`);
|
|
699
|
+
this.db.exec(`CREATE INDEX IF NOT EXISTS idx_runs_thread_id ON runs(thread_id)`);
|
|
700
|
+
this.db.exec(`CREATE INDEX IF NOT EXISTS idx_runs_status ON runs(status)`);
|
|
701
|
+
this.isSetup = true;
|
|
702
|
+
}
|
|
703
|
+
async create(payload) {
|
|
704
|
+
const threadId = payload?.threadId || crypto.randomUUID();
|
|
705
|
+
if (payload?.ifExists === "raise") {
|
|
706
|
+
const existingThread = this.db.prepare("SELECT thread_id FROM threads WHERE thread_id = ?").get(threadId);
|
|
707
|
+
if (existingThread) {
|
|
708
|
+
throw new Error(`Thread with ID ${threadId} already exists.`);
|
|
709
|
+
}
|
|
710
|
+
}
|
|
711
|
+
const now = (/* @__PURE__ */ new Date()).toISOString();
|
|
712
|
+
const metadata = JSON.stringify(payload?.metadata || {});
|
|
713
|
+
const interrupts = JSON.stringify({});
|
|
714
|
+
const thread = {
|
|
715
|
+
thread_id: threadId,
|
|
716
|
+
created_at: now,
|
|
717
|
+
updated_at: now,
|
|
718
|
+
metadata: payload?.metadata || {},
|
|
719
|
+
status: "idle",
|
|
720
|
+
values: null,
|
|
721
|
+
interrupts: {}
|
|
722
|
+
};
|
|
723
|
+
this.db.prepare(
|
|
724
|
+
`
|
|
725
|
+
INSERT INTO threads (thread_id, created_at, updated_at, metadata, status, [values], interrupts)
|
|
726
|
+
VALUES (?, ?, ?, ?, ?, ?, ?)
|
|
727
|
+
`
|
|
728
|
+
).run(threadId, now, now, metadata, "idle", null, interrupts);
|
|
729
|
+
return thread;
|
|
730
|
+
}
|
|
731
|
+
async search(query) {
|
|
732
|
+
let sql = "SELECT * FROM threads";
|
|
733
|
+
const whereConditions = [];
|
|
734
|
+
const params = [];
|
|
735
|
+
if (query?.status) {
|
|
736
|
+
whereConditions.push("status = ?");
|
|
737
|
+
params.push(query.status);
|
|
738
|
+
}
|
|
739
|
+
if (query?.metadata) {
|
|
740
|
+
for (const [key, value] of Object.entries(query.metadata)) {
|
|
741
|
+
whereConditions.push(`json_extract(metadata, '$.${key}') = ?`);
|
|
742
|
+
params.push(JSON.stringify(value));
|
|
743
|
+
}
|
|
744
|
+
}
|
|
745
|
+
if (whereConditions.length > 0) {
|
|
746
|
+
sql += " WHERE " + whereConditions.join(" AND ");
|
|
747
|
+
}
|
|
748
|
+
if (query?.sortBy) {
|
|
749
|
+
sql += ` ORDER BY ${query.sortBy}`;
|
|
750
|
+
if (query.sortOrder === "desc") {
|
|
751
|
+
sql += " DESC";
|
|
752
|
+
} else {
|
|
753
|
+
sql += " ASC";
|
|
754
|
+
}
|
|
755
|
+
}
|
|
756
|
+
if (query?.limit) {
|
|
757
|
+
sql += ` LIMIT ${query.limit}`;
|
|
758
|
+
if (query?.offset) {
|
|
759
|
+
sql += ` OFFSET ${query.offset}`;
|
|
760
|
+
}
|
|
761
|
+
}
|
|
762
|
+
const rows = this.db.prepare(sql).all(...params);
|
|
763
|
+
return rows.map((row) => ({
|
|
764
|
+
thread_id: row.thread_id,
|
|
765
|
+
created_at: row.created_at,
|
|
766
|
+
updated_at: row.updated_at,
|
|
767
|
+
metadata: JSON.parse(row.metadata),
|
|
768
|
+
status: row.status,
|
|
769
|
+
values: row.values ? JSON.parse(row.values) : null,
|
|
770
|
+
interrupts: JSON.parse(row.interrupts)
|
|
771
|
+
}));
|
|
772
|
+
}
|
|
773
|
+
async get(threadId) {
|
|
774
|
+
const row = this.db.prepare("SELECT * FROM threads WHERE thread_id = ?").get(threadId);
|
|
775
|
+
if (!row) {
|
|
776
|
+
throw new Error(`Thread with ID ${threadId} not found.`);
|
|
777
|
+
}
|
|
778
|
+
return {
|
|
779
|
+
thread_id: row.thread_id,
|
|
780
|
+
created_at: row.created_at,
|
|
781
|
+
updated_at: row.updated_at,
|
|
782
|
+
metadata: JSON.parse(row.metadata),
|
|
783
|
+
status: row.status,
|
|
784
|
+
values: row.values ? JSON.parse(row.values) : null,
|
|
785
|
+
interrupts: JSON.parse(row.interrupts)
|
|
786
|
+
};
|
|
787
|
+
}
|
|
788
|
+
async set(threadId, thread) {
|
|
789
|
+
const existingThread = this.db.prepare("SELECT thread_id FROM threads WHERE thread_id = ?").get(threadId);
|
|
790
|
+
if (!existingThread) {
|
|
791
|
+
throw new Error(`Thread with ID ${threadId} not found.`);
|
|
792
|
+
}
|
|
793
|
+
const updateFields = [];
|
|
794
|
+
const values = [];
|
|
795
|
+
if (thread.metadata !== void 0) {
|
|
796
|
+
updateFields.push("metadata = ?");
|
|
797
|
+
values.push(JSON.stringify(thread.metadata));
|
|
798
|
+
}
|
|
799
|
+
if (thread.status !== void 0) {
|
|
800
|
+
updateFields.push("status = ?");
|
|
801
|
+
values.push(thread.status);
|
|
802
|
+
}
|
|
803
|
+
if (thread.values !== void 0) {
|
|
804
|
+
updateFields.push("[values] = ?");
|
|
805
|
+
values.push(thread.values ? JSON.stringify(thread.values) : null);
|
|
806
|
+
}
|
|
807
|
+
if (thread.interrupts !== void 0) {
|
|
808
|
+
updateFields.push("interrupts = ?");
|
|
809
|
+
values.push(JSON.stringify(thread.interrupts));
|
|
810
|
+
}
|
|
811
|
+
updateFields.push("updated_at = ?");
|
|
812
|
+
values.push((/* @__PURE__ */ new Date()).toISOString());
|
|
813
|
+
if (updateFields.length > 0) {
|
|
814
|
+
values.push(threadId);
|
|
815
|
+
this.db.prepare(
|
|
816
|
+
`
|
|
817
|
+
UPDATE threads
|
|
818
|
+
SET ${updateFields.join(", ")}
|
|
819
|
+
WHERE thread_id = ?
|
|
820
|
+
`
|
|
821
|
+
).run(...values);
|
|
822
|
+
}
|
|
823
|
+
}
|
|
824
|
+
async updateState(threadId, thread) {
|
|
825
|
+
const row = this.db.prepare("SELECT * FROM threads WHERE thread_id = ?").get(threadId);
|
|
826
|
+
if (!row) {
|
|
827
|
+
throw new Error(`Thread with ID ${threadId} not found.`);
|
|
828
|
+
}
|
|
829
|
+
const targetThread = {
|
|
830
|
+
thread_id: row.thread_id,
|
|
831
|
+
created_at: row.created_at,
|
|
832
|
+
updated_at: row.updated_at,
|
|
833
|
+
metadata: JSON.parse(row.metadata),
|
|
834
|
+
status: row.status,
|
|
835
|
+
values: row.values ? JSON.parse(row.values) : null,
|
|
836
|
+
interrupts: JSON.parse(row.interrupts)
|
|
837
|
+
};
|
|
838
|
+
if (targetThread.status === "busy") {
|
|
839
|
+
throw new Error(`Thread with ID ${threadId} is busy, can't update state.`);
|
|
840
|
+
}
|
|
841
|
+
if (!targetThread.metadata?.graph_id) {
|
|
842
|
+
throw new Error(`Thread with ID ${threadId} has no graph_id.`);
|
|
843
|
+
}
|
|
844
|
+
const graphId = targetThread.metadata?.graph_id;
|
|
845
|
+
const config = {
|
|
846
|
+
configurable: {
|
|
847
|
+
thread_id: threadId,
|
|
848
|
+
graph_id: graphId
|
|
849
|
+
}
|
|
850
|
+
};
|
|
851
|
+
const graph = await getGraph(graphId, config);
|
|
852
|
+
const nextConfig = await graph.updateState(config, thread.values);
|
|
853
|
+
const graphState = await graph.getState(config);
|
|
854
|
+
await this.set(threadId, { values: JSON.parse(serialiseAsDict(graphState.values)) });
|
|
855
|
+
return nextConfig;
|
|
856
|
+
}
|
|
857
|
+
async delete(threadId) {
|
|
858
|
+
const result = this.db.prepare("DELETE FROM threads WHERE thread_id = ?").run(threadId);
|
|
859
|
+
if (result.changes === 0) {
|
|
860
|
+
throw new Error(`Thread with ID ${threadId} not found.`);
|
|
861
|
+
}
|
|
862
|
+
}
|
|
863
|
+
async createRun(threadId, assistantId, payload) {
|
|
864
|
+
const runId = crypto.randomUUID();
|
|
865
|
+
const now = (/* @__PURE__ */ new Date()).toISOString();
|
|
866
|
+
const metadata = JSON.stringify(payload?.metadata ?? {});
|
|
867
|
+
const run = {
|
|
868
|
+
run_id: runId,
|
|
869
|
+
thread_id: threadId,
|
|
870
|
+
assistant_id: assistantId,
|
|
871
|
+
created_at: now,
|
|
872
|
+
updated_at: now,
|
|
873
|
+
status: "pending",
|
|
874
|
+
metadata: payload?.metadata ?? {},
|
|
875
|
+
multitask_strategy: "reject"
|
|
876
|
+
};
|
|
877
|
+
this.db.prepare(
|
|
878
|
+
`
|
|
879
|
+
INSERT INTO runs (run_id, thread_id, assistant_id, created_at, updated_at, status, metadata, multitask_strategy)
|
|
880
|
+
VALUES (?, ?, ?, ?, ?, ?, ?, ?)
|
|
881
|
+
`
|
|
882
|
+
).run(runId, threadId, assistantId, now, now, "pending", metadata, "reject");
|
|
883
|
+
return run;
|
|
884
|
+
}
|
|
885
|
+
async listRuns(threadId, options) {
|
|
886
|
+
let sql = "SELECT * FROM runs WHERE thread_id = ?";
|
|
887
|
+
const params = [threadId];
|
|
888
|
+
if (options?.status) {
|
|
889
|
+
sql += " AND status = ?";
|
|
890
|
+
params.push(options.status);
|
|
891
|
+
}
|
|
892
|
+
sql += " ORDER BY created_at DESC";
|
|
893
|
+
if (options?.limit) {
|
|
894
|
+
sql += ` LIMIT ${options.limit}`;
|
|
895
|
+
if (options?.offset) {
|
|
896
|
+
sql += ` OFFSET ${options.offset}`;
|
|
897
|
+
}
|
|
898
|
+
}
|
|
899
|
+
const rows = this.db.prepare(sql).all(...params);
|
|
900
|
+
return rows.map((row) => ({
|
|
901
|
+
run_id: row.run_id,
|
|
902
|
+
thread_id: row.thread_id,
|
|
903
|
+
assistant_id: row.assistant_id,
|
|
904
|
+
created_at: row.created_at,
|
|
905
|
+
updated_at: row.updated_at,
|
|
906
|
+
status: row.status,
|
|
907
|
+
metadata: JSON.parse(row.metadata),
|
|
908
|
+
multitask_strategy: row.multitask_strategy
|
|
909
|
+
}));
|
|
910
|
+
}
|
|
911
|
+
async updateRun(runId, run) {
|
|
912
|
+
const existingRun = this.db.prepare("SELECT run_id FROM runs WHERE run_id = ?").get(runId);
|
|
913
|
+
if (!existingRun) {
|
|
914
|
+
throw new Error(`Run with ID ${runId} not found.`);
|
|
915
|
+
}
|
|
916
|
+
const updateFields = [];
|
|
917
|
+
const values = [];
|
|
918
|
+
if (run.status !== void 0) {
|
|
919
|
+
updateFields.push("status = ?");
|
|
920
|
+
values.push(run.status);
|
|
921
|
+
}
|
|
922
|
+
if (run.metadata !== void 0) {
|
|
923
|
+
updateFields.push("metadata = ?");
|
|
924
|
+
values.push(JSON.stringify(run.metadata));
|
|
925
|
+
}
|
|
926
|
+
if (run.multitask_strategy !== void 0) {
|
|
927
|
+
updateFields.push("multitask_strategy = ?");
|
|
928
|
+
values.push(run.multitask_strategy);
|
|
929
|
+
}
|
|
930
|
+
updateFields.push("updated_at = ?");
|
|
931
|
+
values.push((/* @__PURE__ */ new Date()).toISOString());
|
|
932
|
+
if (updateFields.length > 0) {
|
|
933
|
+
values.push(runId);
|
|
934
|
+
this.db.prepare(
|
|
935
|
+
`
|
|
936
|
+
UPDATE runs
|
|
937
|
+
SET ${updateFields.join(", ")}
|
|
938
|
+
WHERE run_id = ?
|
|
939
|
+
`
|
|
940
|
+
).run(...values);
|
|
941
|
+
}
|
|
942
|
+
}
|
|
943
|
+
}
|
|
944
|
+
|
|
945
|
+
const createCheckPointer = async () => {
|
|
946
|
+
if (process.env.REDIS_URL && process.env.CHECKPOINT_TYPE === "redis" || process.env.CHECKPOINT_TYPE === "shallow/redis") {
|
|
947
|
+
if (process.env.CHECKPOINT_TYPE === "redis") {
|
|
948
|
+
console.debug("LG | Using redis as checkpoint");
|
|
949
|
+
const { RedisSaver } = await import('@langchain/langgraph-checkpoint-redis');
|
|
950
|
+
return await RedisSaver.fromUrl(process.env.REDIS_URL, {
|
|
951
|
+
defaultTTL: 60,
|
|
952
|
+
// TTL in minutes
|
|
953
|
+
refreshOnRead: true
|
|
954
|
+
});
|
|
955
|
+
}
|
|
956
|
+
if (process.env.CHECKPOINT_TYPE === "shallow/redis") {
|
|
957
|
+
console.debug("LG | Using shallow redis as checkpoint");
|
|
958
|
+
const { ShallowRedisSaver } = await import('@langchain/langgraph-checkpoint-redis/shallow');
|
|
959
|
+
return await ShallowRedisSaver.fromUrl(process.env.REDIS_URL);
|
|
960
|
+
}
|
|
961
|
+
}
|
|
962
|
+
if (process.env.DATABASE_URL) {
|
|
963
|
+
console.debug("LG | Using postgres as checkpoint");
|
|
964
|
+
const { createPGCheckpoint } = await import('./checkpoint-DxiUsHMy.js');
|
|
965
|
+
return createPGCheckpoint();
|
|
966
|
+
}
|
|
967
|
+
if (process.env.SQLITE_DATABASE_URI) {
|
|
968
|
+
console.debug("LG | Using sqlite as checkpoint");
|
|
969
|
+
const { SqliteSaver } = await import('./checkpoint-BHKV54sL.js');
|
|
970
|
+
const db = SqliteSaver.fromConnString(process.env.SQLITE_DATABASE_URI);
|
|
971
|
+
return db;
|
|
972
|
+
}
|
|
973
|
+
return new MemorySaver();
|
|
974
|
+
};
|
|
975
|
+
const createMessageQueue = async () => {
|
|
976
|
+
let q;
|
|
977
|
+
if (process.env.REDIS_URL) {
|
|
978
|
+
console.debug("LG | Using redis as stream queue");
|
|
979
|
+
const { RedisStreamQueue } = await import('./queue-C6iEVbd2.js');
|
|
980
|
+
q = RedisStreamQueue;
|
|
981
|
+
} else {
|
|
982
|
+
q = MemoryStreamQueue;
|
|
983
|
+
}
|
|
984
|
+
return new StreamQueueManager(q);
|
|
985
|
+
};
|
|
986
|
+
const createThreadManager = async (config) => {
|
|
987
|
+
if (process.env.DATABASE_URL && config.checkpointer) {
|
|
988
|
+
const { PostgresThreadsManager } = await import('./threads-BUgBiCiK.js');
|
|
989
|
+
const threadsManager = new PostgresThreadsManager(config.checkpointer);
|
|
990
|
+
if (process.env.DATABASE_INIT === "true") {
|
|
991
|
+
await threadsManager.setup();
|
|
992
|
+
}
|
|
993
|
+
return threadsManager;
|
|
994
|
+
}
|
|
995
|
+
if (process.env.SQLITE_DATABASE_URI && config.checkpointer) {
|
|
996
|
+
const threadsManager = new SQLiteThreadsManager(config.checkpointer);
|
|
997
|
+
await threadsManager.setup();
|
|
998
|
+
return threadsManager;
|
|
999
|
+
}
|
|
1000
|
+
return new MemoryThreadsManager();
|
|
1001
|
+
};
|
|
1002
|
+
|
|
1003
|
+
const [globalMessageQueue, globalCheckPointer] = await Promise.all([createMessageQueue(), createCheckPointer()]);
|
|
1004
|
+
console.debug("LG | checkpointer created");
|
|
1005
|
+
const globalThreadsManager = await createThreadManager({
|
|
1006
|
+
checkpointer: globalCheckPointer
|
|
1007
|
+
});
|
|
1008
|
+
console.debug("LG | threads manager created");
|
|
1009
|
+
console.debug("LG | global init done");
|
|
1010
|
+
class LangGraphGlobal {
|
|
1011
|
+
static globalMessageQueue = globalMessageQueue;
|
|
1012
|
+
static globalCheckPointer = globalCheckPointer;
|
|
1013
|
+
static globalThreadsManager = globalThreadsManager;
|
|
1014
|
+
}
|
|
1015
|
+
|
|
1016
|
+
async function streamStateWithQueue(threads, run, queue, payload, options) {
|
|
1017
|
+
const kwargs = payload;
|
|
1018
|
+
const graphId = kwargs.config?.configurable?.graph_id;
|
|
1019
|
+
if (!graphId || typeof graphId !== "string") {
|
|
1020
|
+
throw new Error("Invalid or missing graph_id");
|
|
1021
|
+
}
|
|
1022
|
+
const graph = await options.getGraph(graphId, payload.config, {
|
|
1023
|
+
checkpointer: payload.temporary ? null : void 0
|
|
1024
|
+
});
|
|
1025
|
+
const userStreamMode = payload.stream_mode ?? [];
|
|
1026
|
+
const libStreamMode = /* @__PURE__ */ new Set([
|
|
1027
|
+
"values",
|
|
1028
|
+
...userStreamMode.filter((mode) => mode !== "events" && mode !== "messages-tuple")
|
|
1029
|
+
]);
|
|
1030
|
+
if (userStreamMode.includes("messages-tuple")) {
|
|
1031
|
+
libStreamMode.add("messages");
|
|
1032
|
+
}
|
|
1033
|
+
if (userStreamMode.includes("messages")) {
|
|
1034
|
+
libStreamMode.add("values");
|
|
1035
|
+
}
|
|
1036
|
+
await queue.push(
|
|
1037
|
+
new EventMessage("metadata", {
|
|
1038
|
+
run_id: run.run_id,
|
|
1039
|
+
attempt: options.attempt,
|
|
1040
|
+
graph_id: graphId
|
|
1041
|
+
})
|
|
1042
|
+
);
|
|
1043
|
+
const metadata = {
|
|
1044
|
+
...payload.config?.metadata,
|
|
1045
|
+
run_attempt: options.attempt
|
|
1046
|
+
};
|
|
1047
|
+
const events = graph.streamEvents(
|
|
1048
|
+
payload.command != null ? getLangGraphCommand(payload.command) : payload.input ?? null,
|
|
1049
|
+
{
|
|
1050
|
+
version: "v2",
|
|
1051
|
+
interruptAfter: payload.interrupt_after,
|
|
1052
|
+
interruptBefore: payload.interrupt_before,
|
|
1053
|
+
tags: payload.config?.tags,
|
|
1054
|
+
configurable: payload.config?.configurable,
|
|
1055
|
+
recursionLimit: payload.config?.recursionLimit,
|
|
1056
|
+
subgraphs: payload.stream_subgraphs,
|
|
1057
|
+
metadata,
|
|
1058
|
+
runId: run.run_id,
|
|
1059
|
+
streamMode: [...libStreamMode],
|
|
1060
|
+
signal: queue.cancelSignal.signal
|
|
1061
|
+
}
|
|
1062
|
+
);
|
|
1063
|
+
const messages = {};
|
|
1064
|
+
const completedIds = /* @__PURE__ */ new Set();
|
|
1065
|
+
try {
|
|
1066
|
+
for await (const event of events) {
|
|
1067
|
+
if (event.tags?.includes("langsmith:hidden")) continue;
|
|
1068
|
+
if (event.event === "on_chain_stream" && event.run_id === run.run_id) {
|
|
1069
|
+
const [ns, mode, chunk] = payload.stream_subgraphs ? event.data.chunk : [null, ...event.data.chunk];
|
|
1070
|
+
let data = chunk;
|
|
1071
|
+
if (mode === "messages") {
|
|
1072
|
+
if (userStreamMode.includes("messages-tuple")) {
|
|
1073
|
+
await queue.push(new EventMessage("messages", data));
|
|
1074
|
+
}
|
|
1075
|
+
} else if (userStreamMode.includes(mode)) {
|
|
1076
|
+
if (payload.stream_subgraphs && ns?.length) {
|
|
1077
|
+
await queue.push(new EventMessage(`${mode}|${ns.join("|")}`, data));
|
|
1078
|
+
} else {
|
|
1079
|
+
await queue.push(new EventMessage(mode, data));
|
|
1080
|
+
}
|
|
1081
|
+
}
|
|
1082
|
+
if (mode === "values") {
|
|
1083
|
+
await threads.set(run.thread_id, {
|
|
1084
|
+
values: data ? JSON.parse(serialiseAsDict(data)) : ""
|
|
1085
|
+
});
|
|
1086
|
+
}
|
|
1087
|
+
} else if (userStreamMode.includes("events")) {
|
|
1088
|
+
await queue.push(new EventMessage("events", event));
|
|
1089
|
+
}
|
|
1090
|
+
if (userStreamMode.includes("messages")) {
|
|
1091
|
+
if (event.event === "on_chain_stream" && event.run_id === run.run_id) {
|
|
1092
|
+
const newMessages = [];
|
|
1093
|
+
const [_, chunk] = event.data.chunk;
|
|
1094
|
+
let chunkMessages = [];
|
|
1095
|
+
if (typeof chunk === "object" && chunk != null && "messages" in chunk && !isBaseMessage(chunk)) {
|
|
1096
|
+
chunkMessages = chunk?.messages;
|
|
1097
|
+
}
|
|
1098
|
+
if (!Array.isArray(chunkMessages)) {
|
|
1099
|
+
chunkMessages = [chunkMessages];
|
|
1100
|
+
}
|
|
1101
|
+
for (const message of chunkMessages) {
|
|
1102
|
+
if (!message.id || completedIds.has(message.id)) continue;
|
|
1103
|
+
completedIds.add(message.id);
|
|
1104
|
+
newMessages.push(message);
|
|
1105
|
+
}
|
|
1106
|
+
if (newMessages.length > 0) {
|
|
1107
|
+
await queue.push(new EventMessage("messages/complete", newMessages));
|
|
1108
|
+
}
|
|
1109
|
+
} else if (event.event === "on_chat_model_stream" && !event.tags?.includes("nostream")) {
|
|
1110
|
+
const message = event.data.chunk;
|
|
1111
|
+
if (!message.id) continue;
|
|
1112
|
+
if (messages[message.id] == null) {
|
|
1113
|
+
messages[message.id] = message;
|
|
1114
|
+
await queue.push(
|
|
1115
|
+
new EventMessage("messages/metadata", {
|
|
1116
|
+
[message.id]: { metadata: event.metadata }
|
|
1117
|
+
})
|
|
1118
|
+
);
|
|
1119
|
+
} else {
|
|
1120
|
+
messages[message.id] = messages[message.id].concat(message);
|
|
1121
|
+
}
|
|
1122
|
+
await queue.push(new EventMessage("messages/partial", [messages[message.id]]));
|
|
1123
|
+
}
|
|
1124
|
+
}
|
|
1125
|
+
}
|
|
1126
|
+
} finally {
|
|
1127
|
+
await queue.push(new StreamEndEventMessage());
|
|
1128
|
+
}
|
|
1129
|
+
}
|
|
1130
|
+
const serialiseAsDict = (obj, indent = 2) => {
|
|
1131
|
+
return JSON.stringify(
|
|
1132
|
+
obj,
|
|
1133
|
+
function(key, value) {
|
|
1134
|
+
const rawValue = this[key];
|
|
1135
|
+
if (rawValue != null && typeof rawValue === "object" && "toDict" in rawValue && typeof rawValue.toDict === "function") {
|
|
1136
|
+
const { type, data } = rawValue.toDict();
|
|
1137
|
+
return { ...data, type };
|
|
1138
|
+
}
|
|
1139
|
+
return value;
|
|
1140
|
+
},
|
|
1141
|
+
indent
|
|
1142
|
+
);
|
|
1143
|
+
};
|
|
1144
|
+
async function* streamState(threads, run, payload, options) {
|
|
1145
|
+
run = await run;
|
|
1146
|
+
const queueId = run.run_id;
|
|
1147
|
+
const threadId = run.thread_id;
|
|
1148
|
+
try {
|
|
1149
|
+
await threads.set(threadId, { status: "busy" });
|
|
1150
|
+
await threads.updateRun(run.run_id, { status: "running" });
|
|
1151
|
+
const queue = LangGraphGlobal.globalMessageQueue.createQueue(queueId);
|
|
1152
|
+
const state = queue.onDataReceive();
|
|
1153
|
+
streamStateWithQueue(threads, run, queue, payload, options).catch((error) => {
|
|
1154
|
+
console.error("Queue task error:", error);
|
|
1155
|
+
LangGraphGlobal.globalMessageQueue.pushToQueue(queueId, new StreamErrorEventMessage(error));
|
|
1156
|
+
});
|
|
1157
|
+
for await (const data of state) {
|
|
1158
|
+
yield data;
|
|
1159
|
+
}
|
|
1160
|
+
await threads.updateRun(run.run_id, { status: "success" });
|
|
1161
|
+
} catch (error) {
|
|
1162
|
+
console.error("Stream error:", error);
|
|
1163
|
+
await threads.updateRun(run.run_id, { status: "error" });
|
|
1164
|
+
await threads.set(threadId, { status: "error" });
|
|
1165
|
+
} finally {
|
|
1166
|
+
await threads.set(threadId, { status: "idle" });
|
|
1167
|
+
LangGraphGlobal.globalMessageQueue.removeQueue(queueId);
|
|
1168
|
+
}
|
|
1169
|
+
}
|
|
1170
|
+
|
|
1171
|
+
const AssistantEndpoint = {
|
|
1172
|
+
async search(query) {
|
|
1173
|
+
if (query?.graphId) {
|
|
1174
|
+
return [
|
|
1175
|
+
{
|
|
1176
|
+
assistant_id: query.graphId,
|
|
1177
|
+
graph_id: query.graphId,
|
|
1178
|
+
config: {},
|
|
1179
|
+
created_at: (/* @__PURE__ */ new Date()).toISOString(),
|
|
1180
|
+
updated_at: (/* @__PURE__ */ new Date()).toISOString(),
|
|
1181
|
+
metadata: {},
|
|
1182
|
+
version: 1,
|
|
1183
|
+
name: query.graphId,
|
|
1184
|
+
description: ""
|
|
1185
|
+
}
|
|
1186
|
+
];
|
|
1187
|
+
}
|
|
1188
|
+
return Object.entries(GRAPHS).map(
|
|
1189
|
+
([graphId, _]) => ({
|
|
1190
|
+
assistant_id: graphId,
|
|
1191
|
+
graph_id: graphId,
|
|
1192
|
+
config: {},
|
|
1193
|
+
metadata: {},
|
|
1194
|
+
version: 1,
|
|
1195
|
+
name: graphId,
|
|
1196
|
+
description: "",
|
|
1197
|
+
created_at: (/* @__PURE__ */ new Date()).toISOString(),
|
|
1198
|
+
updated_at: (/* @__PURE__ */ new Date()).toISOString()
|
|
1199
|
+
})
|
|
1200
|
+
);
|
|
1201
|
+
},
|
|
1202
|
+
async getGraph(assistantId, options) {
|
|
1203
|
+
const config = {};
|
|
1204
|
+
const graph = await getGraph(assistantId, config);
|
|
1205
|
+
const drawable = await graph.getGraphAsync({
|
|
1206
|
+
...config,
|
|
1207
|
+
xray: options?.xray ?? void 0
|
|
1208
|
+
});
|
|
1209
|
+
return drawable.toJSON();
|
|
1210
|
+
}
|
|
1211
|
+
};
|
|
1212
|
+
const createEndpoint = () => {
|
|
1213
|
+
const threads = LangGraphGlobal.globalThreadsManager;
|
|
1214
|
+
return {
|
|
1215
|
+
assistants: AssistantEndpoint,
|
|
1216
|
+
threads,
|
|
1217
|
+
runs: {
|
|
1218
|
+
list(threadId, options) {
|
|
1219
|
+
return threads.listRuns(threadId, options);
|
|
1220
|
+
},
|
|
1221
|
+
async cancel(threadId, runId, wait, action) {
|
|
1222
|
+
return LangGraphGlobal.globalMessageQueue.cancelQueue(runId);
|
|
1223
|
+
},
|
|
1224
|
+
async *stream(threadId, assistantId, payload) {
|
|
1225
|
+
if (!payload.config) {
|
|
1226
|
+
payload.config = {
|
|
1227
|
+
configurable: {
|
|
1228
|
+
graph_id: assistantId,
|
|
1229
|
+
thread_id: threadId
|
|
1230
|
+
}
|
|
1231
|
+
};
|
|
1232
|
+
}
|
|
1233
|
+
for await (const data of streamState(
|
|
1234
|
+
threads,
|
|
1235
|
+
threads.createRun(threadId, assistantId, payload),
|
|
1236
|
+
payload,
|
|
1237
|
+
{
|
|
1238
|
+
attempt: 0,
|
|
1239
|
+
getGraph
|
|
1240
|
+
}
|
|
1241
|
+
)) {
|
|
1242
|
+
yield data;
|
|
1243
|
+
}
|
|
1244
|
+
},
|
|
1245
|
+
joinStream(threadId, runId, options) {
|
|
1246
|
+
throw new Error("Function not implemented.");
|
|
1247
|
+
}
|
|
1248
|
+
}
|
|
1249
|
+
};
|
|
1250
|
+
};
|
|
1251
|
+
|
|
1252
|
+
const createEntrypointGraph = ({
|
|
1253
|
+
stateSchema,
|
|
1254
|
+
config,
|
|
1255
|
+
graph
|
|
1256
|
+
}) => {
|
|
1257
|
+
const name = graph.getName();
|
|
1258
|
+
return new StateGraph(stateSchema, config).addNode(name, (state, config2) => graph.invoke(state, config2)).addEdge("__start__", name).addEdge(name, "__end__").compile({
|
|
1259
|
+
name
|
|
1260
|
+
});
|
|
1261
|
+
};
|
|
1262
|
+
|
|
1263
|
+
export { AssistantEndpoint as A, BaseStreamQueue as B, CancelEventMessage as C, LangGraphGlobal as L, createEntrypointGraph as a, createEndpoint as c, getGraph as g, registerGraph as r, serialiseAsDict as s };
|
|
1264
|
+
//# sourceMappingURL=index-DcXE-SZb.js.map
|