@langgraph-js/pure-graph 2.2.9 → 2.3.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/adapter/hono/index.js +3 -3
- package/dist/adapter/hono/index.js.map +1 -1
- package/dist/adapter/nextjs/index.js +2 -2
- package/dist/checkpoint-DdL-Wo1x.js +377 -0
- package/dist/checkpoint-DdL-Wo1x.js.map +1 -0
- package/dist/{createEndpoint-DHnODS67.js → createEndpoint-DJkcdyXQ.js} +2 -2
- package/dist/{createEndpoint-DHnODS67.js.map → createEndpoint-DJkcdyXQ.js.map} +1 -1
- package/dist/index.js +2 -2
- package/dist/index.js.map +1 -1
- package/dist/{queue-CwCW8m0q.js → queue-D1uesSTS.js} +2 -2
- package/dist/{queue-CwCW8m0q.js.map → queue-D1uesSTS.js.map} +1 -1
- package/dist/{router-AADuUgGy.js → router-B7j3i6t0.js} +3 -3
- package/dist/{router-AADuUgGy.js.map → router-B7j3i6t0.js.map} +1 -1
- package/dist/{stream-SJdqjfOM.js → stream-Ll8XU8_8.js} +34 -63
- package/dist/{stream-SJdqjfOM.js.map → stream-Ll8XU8_8.js.map} +1 -1
- package/package.json +2 -1
- package/dist/checkpoint-DnfHhpJw.js +0 -1418
- package/dist/checkpoint-DnfHhpJw.js.map +0 -1
|
@@ -1,1418 +0,0 @@
|
|
|
1
|
-
import require$$0 from 'node:fs';
|
|
2
|
-
import require$$1 from 'node:path';
|
|
3
|
-
import require$$2 from 'node:util';
|
|
4
|
-
import { BaseCheckpointSaver, TASKS, copyCheckpoint, maxChannelVersion } from '@langchain/langgraph-checkpoint';
|
|
5
|
-
|
|
6
|
-
var lib = {exports: {}};
|
|
7
|
-
|
|
8
|
-
function commonjsRequire(path) {
|
|
9
|
-
throw new Error('Could not dynamically require "' + path + '". Please configure the dynamicRequireTargets or/and ignoreDynamicRequires option of @rollup/plugin-commonjs appropriately for this require call to work.');
|
|
10
|
-
}
|
|
11
|
-
|
|
12
|
-
var util = {};
|
|
13
|
-
|
|
14
|
-
var hasRequiredUtil;
|
|
15
|
-
|
|
16
|
-
function requireUtil () {
|
|
17
|
-
if (hasRequiredUtil) return util;
|
|
18
|
-
hasRequiredUtil = 1;
|
|
19
|
-
|
|
20
|
-
util.getBooleanOption = (options, key) => {
|
|
21
|
-
let value = false;
|
|
22
|
-
if (key in options && typeof (value = options[key]) !== 'boolean') {
|
|
23
|
-
throw new TypeError(`Expected the "${key}" option to be a boolean`);
|
|
24
|
-
}
|
|
25
|
-
return value;
|
|
26
|
-
};
|
|
27
|
-
|
|
28
|
-
util.cppdb = Symbol();
|
|
29
|
-
util.inspect = Symbol.for('nodejs.util.inspect.custom');
|
|
30
|
-
return util;
|
|
31
|
-
}
|
|
32
|
-
|
|
33
|
-
var sqliteError;
|
|
34
|
-
var hasRequiredSqliteError;
|
|
35
|
-
|
|
36
|
-
function requireSqliteError () {
|
|
37
|
-
if (hasRequiredSqliteError) return sqliteError;
|
|
38
|
-
hasRequiredSqliteError = 1;
|
|
39
|
-
const descriptor = { value: 'SqliteError', writable: true, enumerable: false, configurable: true };
|
|
40
|
-
|
|
41
|
-
function SqliteError(message, code) {
|
|
42
|
-
if (new.target !== SqliteError) {
|
|
43
|
-
return new SqliteError(message, code);
|
|
44
|
-
}
|
|
45
|
-
if (typeof code !== 'string') {
|
|
46
|
-
throw new TypeError('Expected second argument to be a string');
|
|
47
|
-
}
|
|
48
|
-
Error.call(this, message);
|
|
49
|
-
descriptor.value = '' + message;
|
|
50
|
-
Object.defineProperty(this, 'message', descriptor);
|
|
51
|
-
Error.captureStackTrace(this, SqliteError);
|
|
52
|
-
this.code = code;
|
|
53
|
-
}
|
|
54
|
-
Object.setPrototypeOf(SqliteError, Error);
|
|
55
|
-
Object.setPrototypeOf(SqliteError.prototype, Error.prototype);
|
|
56
|
-
Object.defineProperty(SqliteError.prototype, 'name', descriptor);
|
|
57
|
-
sqliteError = SqliteError;
|
|
58
|
-
return sqliteError;
|
|
59
|
-
}
|
|
60
|
-
|
|
61
|
-
var bindings = {exports: {}};
|
|
62
|
-
|
|
63
|
-
var fileUriToPath_1;
|
|
64
|
-
var hasRequiredFileUriToPath;
|
|
65
|
-
|
|
66
|
-
function requireFileUriToPath () {
|
|
67
|
-
if (hasRequiredFileUriToPath) return fileUriToPath_1;
|
|
68
|
-
hasRequiredFileUriToPath = 1;
|
|
69
|
-
/**
|
|
70
|
-
* Module dependencies.
|
|
71
|
-
*/
|
|
72
|
-
|
|
73
|
-
var sep = require$$1.sep || '/';
|
|
74
|
-
|
|
75
|
-
/**
|
|
76
|
-
* Module exports.
|
|
77
|
-
*/
|
|
78
|
-
|
|
79
|
-
fileUriToPath_1 = fileUriToPath;
|
|
80
|
-
|
|
81
|
-
/**
|
|
82
|
-
* File URI to Path function.
|
|
83
|
-
*
|
|
84
|
-
* @param {String} uri
|
|
85
|
-
* @return {String} path
|
|
86
|
-
* @api public
|
|
87
|
-
*/
|
|
88
|
-
|
|
89
|
-
function fileUriToPath (uri) {
|
|
90
|
-
if ('string' != typeof uri ||
|
|
91
|
-
uri.length <= 7 ||
|
|
92
|
-
'file://' != uri.substring(0, 7)) {
|
|
93
|
-
throw new TypeError('must pass in a file:// URI to convert to a file path');
|
|
94
|
-
}
|
|
95
|
-
|
|
96
|
-
var rest = decodeURI(uri.substring(7));
|
|
97
|
-
var firstSlash = rest.indexOf('/');
|
|
98
|
-
var host = rest.substring(0, firstSlash);
|
|
99
|
-
var path = rest.substring(firstSlash + 1);
|
|
100
|
-
|
|
101
|
-
// 2. Scheme Definition
|
|
102
|
-
// As a special case, <host> can be the string "localhost" or the empty
|
|
103
|
-
// string; this is interpreted as "the machine from which the URL is
|
|
104
|
-
// being interpreted".
|
|
105
|
-
if ('localhost' == host) host = '';
|
|
106
|
-
|
|
107
|
-
if (host) {
|
|
108
|
-
host = sep + sep + host;
|
|
109
|
-
}
|
|
110
|
-
|
|
111
|
-
// 3.2 Drives, drive letters, mount points, file system root
|
|
112
|
-
// Drive letters are mapped into the top of a file URI in various ways,
|
|
113
|
-
// depending on the implementation; some applications substitute
|
|
114
|
-
// vertical bar ("|") for the colon after the drive letter, yielding
|
|
115
|
-
// "file:///c|/tmp/test.txt". In some cases, the colon is left
|
|
116
|
-
// unchanged, as in "file:///c:/tmp/test.txt". In other cases, the
|
|
117
|
-
// colon is simply omitted, as in "file:///c/tmp/test.txt".
|
|
118
|
-
path = path.replace(/^(.+)\|/, '$1:');
|
|
119
|
-
|
|
120
|
-
// for Windows, we need to invert the path separators from what a URI uses
|
|
121
|
-
if (sep == '\\') {
|
|
122
|
-
path = path.replace(/\//g, '\\');
|
|
123
|
-
}
|
|
124
|
-
|
|
125
|
-
if (/^.+\:/.test(path)) ; else {
|
|
126
|
-
// unix path…
|
|
127
|
-
path = sep + path;
|
|
128
|
-
}
|
|
129
|
-
|
|
130
|
-
return host + path;
|
|
131
|
-
}
|
|
132
|
-
return fileUriToPath_1;
|
|
133
|
-
}
|
|
134
|
-
|
|
135
|
-
/**
|
|
136
|
-
* Module dependencies.
|
|
137
|
-
*/
|
|
138
|
-
|
|
139
|
-
var hasRequiredBindings;
|
|
140
|
-
|
|
141
|
-
function requireBindings () {
|
|
142
|
-
if (hasRequiredBindings) return bindings.exports;
|
|
143
|
-
hasRequiredBindings = 1;
|
|
144
|
-
(function (module, exports) {
|
|
145
|
-
var fs = require$$0,
|
|
146
|
-
path = require$$1,
|
|
147
|
-
fileURLToPath = requireFileUriToPath(),
|
|
148
|
-
join = path.join,
|
|
149
|
-
dirname = path.dirname,
|
|
150
|
-
exists =
|
|
151
|
-
(fs.accessSync &&
|
|
152
|
-
function(path) {
|
|
153
|
-
try {
|
|
154
|
-
fs.accessSync(path);
|
|
155
|
-
} catch (e) {
|
|
156
|
-
return false;
|
|
157
|
-
}
|
|
158
|
-
return true;
|
|
159
|
-
}) ||
|
|
160
|
-
fs.existsSync ||
|
|
161
|
-
path.existsSync,
|
|
162
|
-
defaults = {
|
|
163
|
-
arrow: process.env.NODE_BINDINGS_ARROW || ' → ',
|
|
164
|
-
compiled: process.env.NODE_BINDINGS_COMPILED_DIR || 'compiled',
|
|
165
|
-
platform: process.platform,
|
|
166
|
-
arch: process.arch,
|
|
167
|
-
nodePreGyp:
|
|
168
|
-
'node-v' +
|
|
169
|
-
process.versions.modules +
|
|
170
|
-
'-' +
|
|
171
|
-
process.platform +
|
|
172
|
-
'-' +
|
|
173
|
-
process.arch,
|
|
174
|
-
version: process.versions.node,
|
|
175
|
-
bindings: 'bindings.node',
|
|
176
|
-
try: [
|
|
177
|
-
// node-gyp's linked version in the "build" dir
|
|
178
|
-
['module_root', 'build', 'bindings'],
|
|
179
|
-
// node-waf and gyp_addon (a.k.a node-gyp)
|
|
180
|
-
['module_root', 'build', 'Debug', 'bindings'],
|
|
181
|
-
['module_root', 'build', 'Release', 'bindings'],
|
|
182
|
-
// Debug files, for development (legacy behavior, remove for node v0.9)
|
|
183
|
-
['module_root', 'out', 'Debug', 'bindings'],
|
|
184
|
-
['module_root', 'Debug', 'bindings'],
|
|
185
|
-
// Release files, but manually compiled (legacy behavior, remove for node v0.9)
|
|
186
|
-
['module_root', 'out', 'Release', 'bindings'],
|
|
187
|
-
['module_root', 'Release', 'bindings'],
|
|
188
|
-
// Legacy from node-waf, node <= 0.4.x
|
|
189
|
-
['module_root', 'build', 'default', 'bindings'],
|
|
190
|
-
// Production "Release" buildtype binary (meh...)
|
|
191
|
-
['module_root', 'compiled', 'version', 'platform', 'arch', 'bindings'],
|
|
192
|
-
// node-qbs builds
|
|
193
|
-
['module_root', 'addon-build', 'release', 'install-root', 'bindings'],
|
|
194
|
-
['module_root', 'addon-build', 'debug', 'install-root', 'bindings'],
|
|
195
|
-
['module_root', 'addon-build', 'default', 'install-root', 'bindings'],
|
|
196
|
-
// node-pre-gyp path ./lib/binding/{node_abi}-{platform}-{arch}
|
|
197
|
-
['module_root', 'lib', 'binding', 'nodePreGyp', 'bindings']
|
|
198
|
-
]
|
|
199
|
-
};
|
|
200
|
-
|
|
201
|
-
/**
|
|
202
|
-
* The main `bindings()` function loads the compiled bindings for a given module.
|
|
203
|
-
* It uses V8's Error API to determine the parent filename that this function is
|
|
204
|
-
* being invoked from, which is then used to find the root directory.
|
|
205
|
-
*/
|
|
206
|
-
|
|
207
|
-
function bindings(opts) {
|
|
208
|
-
// Argument surgery
|
|
209
|
-
if (typeof opts == 'string') {
|
|
210
|
-
opts = { bindings: opts };
|
|
211
|
-
} else if (!opts) {
|
|
212
|
-
opts = {};
|
|
213
|
-
}
|
|
214
|
-
|
|
215
|
-
// maps `defaults` onto `opts` object
|
|
216
|
-
Object.keys(defaults).map(function(i) {
|
|
217
|
-
if (!(i in opts)) opts[i] = defaults[i];
|
|
218
|
-
});
|
|
219
|
-
|
|
220
|
-
// Get the module root
|
|
221
|
-
if (!opts.module_root) {
|
|
222
|
-
opts.module_root = exports.getRoot(exports.getFileName());
|
|
223
|
-
}
|
|
224
|
-
|
|
225
|
-
// Ensure the given bindings name ends with .node
|
|
226
|
-
if (path.extname(opts.bindings) != '.node') {
|
|
227
|
-
opts.bindings += '.node';
|
|
228
|
-
}
|
|
229
|
-
|
|
230
|
-
// https://github.com/webpack/webpack/issues/4175#issuecomment-342931035
|
|
231
|
-
var requireFunc =
|
|
232
|
-
typeof __webpack_require__ === 'function'
|
|
233
|
-
? __non_webpack_require__
|
|
234
|
-
: commonjsRequire;
|
|
235
|
-
|
|
236
|
-
var tries = [],
|
|
237
|
-
i = 0,
|
|
238
|
-
l = opts.try.length,
|
|
239
|
-
n,
|
|
240
|
-
b,
|
|
241
|
-
err;
|
|
242
|
-
|
|
243
|
-
for (; i < l; i++) {
|
|
244
|
-
n = join.apply(
|
|
245
|
-
null,
|
|
246
|
-
opts.try[i].map(function(p) {
|
|
247
|
-
return opts[p] || p;
|
|
248
|
-
})
|
|
249
|
-
);
|
|
250
|
-
tries.push(n);
|
|
251
|
-
try {
|
|
252
|
-
b = opts.path ? requireFunc.resolve(n) : requireFunc(n);
|
|
253
|
-
if (!opts.path) {
|
|
254
|
-
b.path = n;
|
|
255
|
-
}
|
|
256
|
-
return b;
|
|
257
|
-
} catch (e) {
|
|
258
|
-
if (e.code !== 'MODULE_NOT_FOUND' &&
|
|
259
|
-
e.code !== 'QUALIFIED_PATH_RESOLUTION_FAILED' &&
|
|
260
|
-
!/not find/i.test(e.message)) {
|
|
261
|
-
throw e;
|
|
262
|
-
}
|
|
263
|
-
}
|
|
264
|
-
}
|
|
265
|
-
|
|
266
|
-
err = new Error(
|
|
267
|
-
'Could not locate the bindings file. Tried:\n' +
|
|
268
|
-
tries
|
|
269
|
-
.map(function(a) {
|
|
270
|
-
return opts.arrow + a;
|
|
271
|
-
})
|
|
272
|
-
.join('\n')
|
|
273
|
-
);
|
|
274
|
-
err.tries = tries;
|
|
275
|
-
throw err;
|
|
276
|
-
}
|
|
277
|
-
module.exports = exports = bindings;
|
|
278
|
-
|
|
279
|
-
/**
|
|
280
|
-
* Gets the filename of the JavaScript file that invokes this function.
|
|
281
|
-
* Used to help find the root directory of a module.
|
|
282
|
-
* Optionally accepts an filename argument to skip when searching for the invoking filename
|
|
283
|
-
*/
|
|
284
|
-
|
|
285
|
-
exports.getFileName = function getFileName(calling_file) {
|
|
286
|
-
var origPST = Error.prepareStackTrace,
|
|
287
|
-
origSTL = Error.stackTraceLimit,
|
|
288
|
-
dummy = {},
|
|
289
|
-
fileName;
|
|
290
|
-
|
|
291
|
-
Error.stackTraceLimit = 10;
|
|
292
|
-
|
|
293
|
-
Error.prepareStackTrace = function(e, st) {
|
|
294
|
-
for (var i = 0, l = st.length; i < l; i++) {
|
|
295
|
-
fileName = st[i].getFileName();
|
|
296
|
-
if (fileName !== __filename) {
|
|
297
|
-
if (calling_file) {
|
|
298
|
-
if (fileName !== calling_file) {
|
|
299
|
-
return;
|
|
300
|
-
}
|
|
301
|
-
} else {
|
|
302
|
-
return;
|
|
303
|
-
}
|
|
304
|
-
}
|
|
305
|
-
}
|
|
306
|
-
};
|
|
307
|
-
|
|
308
|
-
// run the 'prepareStackTrace' function above
|
|
309
|
-
Error.captureStackTrace(dummy);
|
|
310
|
-
dummy.stack;
|
|
311
|
-
|
|
312
|
-
// cleanup
|
|
313
|
-
Error.prepareStackTrace = origPST;
|
|
314
|
-
Error.stackTraceLimit = origSTL;
|
|
315
|
-
|
|
316
|
-
// handle filename that starts with "file://"
|
|
317
|
-
var fileSchema = 'file://';
|
|
318
|
-
if (fileName.indexOf(fileSchema) === 0) {
|
|
319
|
-
fileName = fileURLToPath(fileName);
|
|
320
|
-
}
|
|
321
|
-
|
|
322
|
-
return fileName;
|
|
323
|
-
};
|
|
324
|
-
|
|
325
|
-
/**
|
|
326
|
-
* Gets the root directory of a module, given an arbitrary filename
|
|
327
|
-
* somewhere in the module tree. The "root directory" is the directory
|
|
328
|
-
* containing the `package.json` file.
|
|
329
|
-
*
|
|
330
|
-
* In: /home/nate/node-native-module/lib/index.js
|
|
331
|
-
* Out: /home/nate/node-native-module
|
|
332
|
-
*/
|
|
333
|
-
|
|
334
|
-
exports.getRoot = function getRoot(file) {
|
|
335
|
-
var dir = dirname(file),
|
|
336
|
-
prev;
|
|
337
|
-
while (true) {
|
|
338
|
-
if (dir === '.') {
|
|
339
|
-
// Avoids an infinite loop in rare cases, like the REPL
|
|
340
|
-
dir = process.cwd();
|
|
341
|
-
}
|
|
342
|
-
if (
|
|
343
|
-
exists(join(dir, 'package.json')) ||
|
|
344
|
-
exists(join(dir, 'node_modules'))
|
|
345
|
-
) {
|
|
346
|
-
// Found the 'package.json' file or 'node_modules' dir; we're done
|
|
347
|
-
return dir;
|
|
348
|
-
}
|
|
349
|
-
if (prev === dir) {
|
|
350
|
-
// Got to the top
|
|
351
|
-
throw new Error(
|
|
352
|
-
'Could not find module root given file: "' +
|
|
353
|
-
file +
|
|
354
|
-
'". Do you have a `package.json` file? '
|
|
355
|
-
);
|
|
356
|
-
}
|
|
357
|
-
// Try the parent dir next
|
|
358
|
-
prev = dir;
|
|
359
|
-
dir = join(dir, '..');
|
|
360
|
-
}
|
|
361
|
-
};
|
|
362
|
-
} (bindings, bindings.exports));
|
|
363
|
-
return bindings.exports;
|
|
364
|
-
}
|
|
365
|
-
|
|
366
|
-
var wrappers = {};
|
|
367
|
-
|
|
368
|
-
var hasRequiredWrappers;
|
|
369
|
-
|
|
370
|
-
function requireWrappers () {
|
|
371
|
-
if (hasRequiredWrappers) return wrappers;
|
|
372
|
-
hasRequiredWrappers = 1;
|
|
373
|
-
const { cppdb } = requireUtil();
|
|
374
|
-
|
|
375
|
-
wrappers.prepare = function prepare(sql) {
|
|
376
|
-
return this[cppdb].prepare(sql, this, false);
|
|
377
|
-
};
|
|
378
|
-
|
|
379
|
-
wrappers.exec = function exec(sql) {
|
|
380
|
-
this[cppdb].exec(sql);
|
|
381
|
-
return this;
|
|
382
|
-
};
|
|
383
|
-
|
|
384
|
-
wrappers.close = function close() {
|
|
385
|
-
this[cppdb].close();
|
|
386
|
-
return this;
|
|
387
|
-
};
|
|
388
|
-
|
|
389
|
-
wrappers.loadExtension = function loadExtension(...args) {
|
|
390
|
-
this[cppdb].loadExtension(...args);
|
|
391
|
-
return this;
|
|
392
|
-
};
|
|
393
|
-
|
|
394
|
-
wrappers.defaultSafeIntegers = function defaultSafeIntegers(...args) {
|
|
395
|
-
this[cppdb].defaultSafeIntegers(...args);
|
|
396
|
-
return this;
|
|
397
|
-
};
|
|
398
|
-
|
|
399
|
-
wrappers.unsafeMode = function unsafeMode(...args) {
|
|
400
|
-
this[cppdb].unsafeMode(...args);
|
|
401
|
-
return this;
|
|
402
|
-
};
|
|
403
|
-
|
|
404
|
-
wrappers.getters = {
|
|
405
|
-
name: {
|
|
406
|
-
get: function name() { return this[cppdb].name; },
|
|
407
|
-
enumerable: true,
|
|
408
|
-
},
|
|
409
|
-
open: {
|
|
410
|
-
get: function open() { return this[cppdb].open; },
|
|
411
|
-
enumerable: true,
|
|
412
|
-
},
|
|
413
|
-
inTransaction: {
|
|
414
|
-
get: function inTransaction() { return this[cppdb].inTransaction; },
|
|
415
|
-
enumerable: true,
|
|
416
|
-
},
|
|
417
|
-
readonly: {
|
|
418
|
-
get: function readonly() { return this[cppdb].readonly; },
|
|
419
|
-
enumerable: true,
|
|
420
|
-
},
|
|
421
|
-
memory: {
|
|
422
|
-
get: function memory() { return this[cppdb].memory; },
|
|
423
|
-
enumerable: true,
|
|
424
|
-
},
|
|
425
|
-
};
|
|
426
|
-
return wrappers;
|
|
427
|
-
}
|
|
428
|
-
|
|
429
|
-
var transaction;
|
|
430
|
-
var hasRequiredTransaction;
|
|
431
|
-
|
|
432
|
-
function requireTransaction () {
|
|
433
|
-
if (hasRequiredTransaction) return transaction;
|
|
434
|
-
hasRequiredTransaction = 1;
|
|
435
|
-
const { cppdb } = requireUtil();
|
|
436
|
-
const controllers = new WeakMap();
|
|
437
|
-
|
|
438
|
-
transaction = function transaction(fn) {
|
|
439
|
-
if (typeof fn !== 'function') throw new TypeError('Expected first argument to be a function');
|
|
440
|
-
|
|
441
|
-
const db = this[cppdb];
|
|
442
|
-
const controller = getController(db, this);
|
|
443
|
-
const { apply } = Function.prototype;
|
|
444
|
-
|
|
445
|
-
// Each version of the transaction function has these same properties
|
|
446
|
-
const properties = {
|
|
447
|
-
default: { value: wrapTransaction(apply, fn, db, controller.default) },
|
|
448
|
-
deferred: { value: wrapTransaction(apply, fn, db, controller.deferred) },
|
|
449
|
-
immediate: { value: wrapTransaction(apply, fn, db, controller.immediate) },
|
|
450
|
-
exclusive: { value: wrapTransaction(apply, fn, db, controller.exclusive) },
|
|
451
|
-
database: { value: this, enumerable: true },
|
|
452
|
-
};
|
|
453
|
-
|
|
454
|
-
Object.defineProperties(properties.default.value, properties);
|
|
455
|
-
Object.defineProperties(properties.deferred.value, properties);
|
|
456
|
-
Object.defineProperties(properties.immediate.value, properties);
|
|
457
|
-
Object.defineProperties(properties.exclusive.value, properties);
|
|
458
|
-
|
|
459
|
-
// Return the default version of the transaction function
|
|
460
|
-
return properties.default.value;
|
|
461
|
-
};
|
|
462
|
-
|
|
463
|
-
// Return the database's cached transaction controller, or create a new one
|
|
464
|
-
const getController = (db, self) => {
|
|
465
|
-
let controller = controllers.get(db);
|
|
466
|
-
if (!controller) {
|
|
467
|
-
const shared = {
|
|
468
|
-
commit: db.prepare('COMMIT', self, false),
|
|
469
|
-
rollback: db.prepare('ROLLBACK', self, false),
|
|
470
|
-
savepoint: db.prepare('SAVEPOINT `\t_bs3.\t`', self, false),
|
|
471
|
-
release: db.prepare('RELEASE `\t_bs3.\t`', self, false),
|
|
472
|
-
rollbackTo: db.prepare('ROLLBACK TO `\t_bs3.\t`', self, false),
|
|
473
|
-
};
|
|
474
|
-
controllers.set(db, controller = {
|
|
475
|
-
default: Object.assign({ begin: db.prepare('BEGIN', self, false) }, shared),
|
|
476
|
-
deferred: Object.assign({ begin: db.prepare('BEGIN DEFERRED', self, false) }, shared),
|
|
477
|
-
immediate: Object.assign({ begin: db.prepare('BEGIN IMMEDIATE', self, false) }, shared),
|
|
478
|
-
exclusive: Object.assign({ begin: db.prepare('BEGIN EXCLUSIVE', self, false) }, shared),
|
|
479
|
-
});
|
|
480
|
-
}
|
|
481
|
-
return controller;
|
|
482
|
-
};
|
|
483
|
-
|
|
484
|
-
// Return a new transaction function by wrapping the given function
|
|
485
|
-
const wrapTransaction = (apply, fn, db, { begin, commit, rollback, savepoint, release, rollbackTo }) => function sqliteTransaction() {
|
|
486
|
-
let before, after, undo;
|
|
487
|
-
if (db.inTransaction) {
|
|
488
|
-
before = savepoint;
|
|
489
|
-
after = release;
|
|
490
|
-
undo = rollbackTo;
|
|
491
|
-
} else {
|
|
492
|
-
before = begin;
|
|
493
|
-
after = commit;
|
|
494
|
-
undo = rollback;
|
|
495
|
-
}
|
|
496
|
-
before.run();
|
|
497
|
-
try {
|
|
498
|
-
const result = apply.call(fn, this, arguments);
|
|
499
|
-
if (result && typeof result.then === 'function') {
|
|
500
|
-
throw new TypeError('Transaction function cannot return a promise');
|
|
501
|
-
}
|
|
502
|
-
after.run();
|
|
503
|
-
return result;
|
|
504
|
-
} catch (ex) {
|
|
505
|
-
if (db.inTransaction) {
|
|
506
|
-
undo.run();
|
|
507
|
-
if (undo !== rollback) after.run();
|
|
508
|
-
}
|
|
509
|
-
throw ex;
|
|
510
|
-
}
|
|
511
|
-
};
|
|
512
|
-
return transaction;
|
|
513
|
-
}
|
|
514
|
-
|
|
515
|
-
var pragma;
|
|
516
|
-
var hasRequiredPragma;
|
|
517
|
-
|
|
518
|
-
function requirePragma () {
|
|
519
|
-
if (hasRequiredPragma) return pragma;
|
|
520
|
-
hasRequiredPragma = 1;
|
|
521
|
-
const { getBooleanOption, cppdb } = requireUtil();
|
|
522
|
-
|
|
523
|
-
pragma = function pragma(source, options) {
|
|
524
|
-
if (options == null) options = {};
|
|
525
|
-
if (typeof source !== 'string') throw new TypeError('Expected first argument to be a string');
|
|
526
|
-
if (typeof options !== 'object') throw new TypeError('Expected second argument to be an options object');
|
|
527
|
-
const simple = getBooleanOption(options, 'simple');
|
|
528
|
-
|
|
529
|
-
const stmt = this[cppdb].prepare(`PRAGMA ${source}`, this, true);
|
|
530
|
-
return simple ? stmt.pluck().get() : stmt.all();
|
|
531
|
-
};
|
|
532
|
-
return pragma;
|
|
533
|
-
}
|
|
534
|
-
|
|
535
|
-
var backup;
|
|
536
|
-
var hasRequiredBackup;
|
|
537
|
-
|
|
538
|
-
function requireBackup () {
|
|
539
|
-
if (hasRequiredBackup) return backup;
|
|
540
|
-
hasRequiredBackup = 1;
|
|
541
|
-
const fs = require$$0;
|
|
542
|
-
const path = require$$1;
|
|
543
|
-
const { promisify } = require$$2;
|
|
544
|
-
const { cppdb } = requireUtil();
|
|
545
|
-
const fsAccess = promisify(fs.access);
|
|
546
|
-
|
|
547
|
-
backup = async function backup(filename, options) {
|
|
548
|
-
if (options == null) options = {};
|
|
549
|
-
|
|
550
|
-
// Validate arguments
|
|
551
|
-
if (typeof filename !== 'string') throw new TypeError('Expected first argument to be a string');
|
|
552
|
-
if (typeof options !== 'object') throw new TypeError('Expected second argument to be an options object');
|
|
553
|
-
|
|
554
|
-
// Interpret options
|
|
555
|
-
filename = filename.trim();
|
|
556
|
-
const attachedName = 'attached' in options ? options.attached : 'main';
|
|
557
|
-
const handler = 'progress' in options ? options.progress : null;
|
|
558
|
-
|
|
559
|
-
// Validate interpreted options
|
|
560
|
-
if (!filename) throw new TypeError('Backup filename cannot be an empty string');
|
|
561
|
-
if (filename === ':memory:') throw new TypeError('Invalid backup filename ":memory:"');
|
|
562
|
-
if (typeof attachedName !== 'string') throw new TypeError('Expected the "attached" option to be a string');
|
|
563
|
-
if (!attachedName) throw new TypeError('The "attached" option cannot be an empty string');
|
|
564
|
-
if (handler != null && typeof handler !== 'function') throw new TypeError('Expected the "progress" option to be a function');
|
|
565
|
-
|
|
566
|
-
// Make sure the specified directory exists
|
|
567
|
-
await fsAccess(path.dirname(filename)).catch(() => {
|
|
568
|
-
throw new TypeError('Cannot save backup because the directory does not exist');
|
|
569
|
-
});
|
|
570
|
-
|
|
571
|
-
const isNewFile = await fsAccess(filename).then(() => false, () => true);
|
|
572
|
-
return runBackup(this[cppdb].backup(this, attachedName, filename, isNewFile), handler || null);
|
|
573
|
-
};
|
|
574
|
-
|
|
575
|
-
const runBackup = (backup, handler) => {
|
|
576
|
-
let rate = 0;
|
|
577
|
-
let useDefault = true;
|
|
578
|
-
|
|
579
|
-
return new Promise((resolve, reject) => {
|
|
580
|
-
setImmediate(function step() {
|
|
581
|
-
try {
|
|
582
|
-
const progress = backup.transfer(rate);
|
|
583
|
-
if (!progress.remainingPages) {
|
|
584
|
-
backup.close();
|
|
585
|
-
resolve(progress);
|
|
586
|
-
return;
|
|
587
|
-
}
|
|
588
|
-
if (useDefault) {
|
|
589
|
-
useDefault = false;
|
|
590
|
-
rate = 100;
|
|
591
|
-
}
|
|
592
|
-
if (handler) {
|
|
593
|
-
const ret = handler(progress);
|
|
594
|
-
if (ret !== undefined) {
|
|
595
|
-
if (typeof ret === 'number' && ret === ret) rate = Math.max(0, Math.min(0x7fffffff, Math.round(ret)));
|
|
596
|
-
else throw new TypeError('Expected progress callback to return a number or undefined');
|
|
597
|
-
}
|
|
598
|
-
}
|
|
599
|
-
setImmediate(step);
|
|
600
|
-
} catch (err) {
|
|
601
|
-
backup.close();
|
|
602
|
-
reject(err);
|
|
603
|
-
}
|
|
604
|
-
});
|
|
605
|
-
});
|
|
606
|
-
};
|
|
607
|
-
return backup;
|
|
608
|
-
}
|
|
609
|
-
|
|
610
|
-
var serialize;
|
|
611
|
-
var hasRequiredSerialize;
|
|
612
|
-
|
|
613
|
-
function requireSerialize () {
|
|
614
|
-
if (hasRequiredSerialize) return serialize;
|
|
615
|
-
hasRequiredSerialize = 1;
|
|
616
|
-
const { cppdb } = requireUtil();
|
|
617
|
-
|
|
618
|
-
serialize = function serialize(options) {
|
|
619
|
-
if (options == null) options = {};
|
|
620
|
-
|
|
621
|
-
// Validate arguments
|
|
622
|
-
if (typeof options !== 'object') throw new TypeError('Expected first argument to be an options object');
|
|
623
|
-
|
|
624
|
-
// Interpret and validate options
|
|
625
|
-
const attachedName = 'attached' in options ? options.attached : 'main';
|
|
626
|
-
if (typeof attachedName !== 'string') throw new TypeError('Expected the "attached" option to be a string');
|
|
627
|
-
if (!attachedName) throw new TypeError('The "attached" option cannot be an empty string');
|
|
628
|
-
|
|
629
|
-
return this[cppdb].serialize(attachedName);
|
|
630
|
-
};
|
|
631
|
-
return serialize;
|
|
632
|
-
}
|
|
633
|
-
|
|
634
|
-
var _function;
|
|
635
|
-
var hasRequired_function;
|
|
636
|
-
|
|
637
|
-
function require_function () {
|
|
638
|
-
if (hasRequired_function) return _function;
|
|
639
|
-
hasRequired_function = 1;
|
|
640
|
-
const { getBooleanOption, cppdb } = requireUtil();
|
|
641
|
-
|
|
642
|
-
_function = function defineFunction(name, options, fn) {
|
|
643
|
-
// Apply defaults
|
|
644
|
-
if (options == null) options = {};
|
|
645
|
-
if (typeof options === 'function') { fn = options; options = {}; }
|
|
646
|
-
|
|
647
|
-
// Validate arguments
|
|
648
|
-
if (typeof name !== 'string') throw new TypeError('Expected first argument to be a string');
|
|
649
|
-
if (typeof fn !== 'function') throw new TypeError('Expected last argument to be a function');
|
|
650
|
-
if (typeof options !== 'object') throw new TypeError('Expected second argument to be an options object');
|
|
651
|
-
if (!name) throw new TypeError('User-defined function name cannot be an empty string');
|
|
652
|
-
|
|
653
|
-
// Interpret options
|
|
654
|
-
const safeIntegers = 'safeIntegers' in options ? +getBooleanOption(options, 'safeIntegers') : 2;
|
|
655
|
-
const deterministic = getBooleanOption(options, 'deterministic');
|
|
656
|
-
const directOnly = getBooleanOption(options, 'directOnly');
|
|
657
|
-
const varargs = getBooleanOption(options, 'varargs');
|
|
658
|
-
let argCount = -1;
|
|
659
|
-
|
|
660
|
-
// Determine argument count
|
|
661
|
-
if (!varargs) {
|
|
662
|
-
argCount = fn.length;
|
|
663
|
-
if (!Number.isInteger(argCount) || argCount < 0) throw new TypeError('Expected function.length to be a positive integer');
|
|
664
|
-
if (argCount > 100) throw new RangeError('User-defined functions cannot have more than 100 arguments');
|
|
665
|
-
}
|
|
666
|
-
|
|
667
|
-
this[cppdb].function(fn, name, argCount, safeIntegers, deterministic, directOnly);
|
|
668
|
-
return this;
|
|
669
|
-
};
|
|
670
|
-
return _function;
|
|
671
|
-
}
|
|
672
|
-
|
|
673
|
-
var aggregate;
|
|
674
|
-
var hasRequiredAggregate;
|
|
675
|
-
|
|
676
|
-
function requireAggregate () {
|
|
677
|
-
if (hasRequiredAggregate) return aggregate;
|
|
678
|
-
hasRequiredAggregate = 1;
|
|
679
|
-
const { getBooleanOption, cppdb } = requireUtil();
|
|
680
|
-
|
|
681
|
-
aggregate = function defineAggregate(name, options) {
|
|
682
|
-
// Validate arguments
|
|
683
|
-
if (typeof name !== 'string') throw new TypeError('Expected first argument to be a string');
|
|
684
|
-
if (typeof options !== 'object' || options === null) throw new TypeError('Expected second argument to be an options object');
|
|
685
|
-
if (!name) throw new TypeError('User-defined function name cannot be an empty string');
|
|
686
|
-
|
|
687
|
-
// Interpret options
|
|
688
|
-
const start = 'start' in options ? options.start : null;
|
|
689
|
-
const step = getFunctionOption(options, 'step', true);
|
|
690
|
-
const inverse = getFunctionOption(options, 'inverse', false);
|
|
691
|
-
const result = getFunctionOption(options, 'result', false);
|
|
692
|
-
const safeIntegers = 'safeIntegers' in options ? +getBooleanOption(options, 'safeIntegers') : 2;
|
|
693
|
-
const deterministic = getBooleanOption(options, 'deterministic');
|
|
694
|
-
const directOnly = getBooleanOption(options, 'directOnly');
|
|
695
|
-
const varargs = getBooleanOption(options, 'varargs');
|
|
696
|
-
let argCount = -1;
|
|
697
|
-
|
|
698
|
-
// Determine argument count
|
|
699
|
-
if (!varargs) {
|
|
700
|
-
argCount = Math.max(getLength(step), inverse ? getLength(inverse) : 0);
|
|
701
|
-
if (argCount > 0) argCount -= 1;
|
|
702
|
-
if (argCount > 100) throw new RangeError('User-defined functions cannot have more than 100 arguments');
|
|
703
|
-
}
|
|
704
|
-
|
|
705
|
-
this[cppdb].aggregate(start, step, inverse, result, name, argCount, safeIntegers, deterministic, directOnly);
|
|
706
|
-
return this;
|
|
707
|
-
};
|
|
708
|
-
|
|
709
|
-
const getFunctionOption = (options, key, required) => {
|
|
710
|
-
const value = key in options ? options[key] : null;
|
|
711
|
-
if (typeof value === 'function') return value;
|
|
712
|
-
if (value != null) throw new TypeError(`Expected the "${key}" option to be a function`);
|
|
713
|
-
if (required) throw new TypeError(`Missing required option "${key}"`);
|
|
714
|
-
return null;
|
|
715
|
-
};
|
|
716
|
-
|
|
717
|
-
const getLength = ({ length }) => {
|
|
718
|
-
if (Number.isInteger(length) && length >= 0) return length;
|
|
719
|
-
throw new TypeError('Expected function.length to be a positive integer');
|
|
720
|
-
};
|
|
721
|
-
return aggregate;
|
|
722
|
-
}
|
|
723
|
-
|
|
724
|
-
var table;
|
|
725
|
-
var hasRequiredTable;
|
|
726
|
-
|
|
727
|
-
function requireTable () {
|
|
728
|
-
if (hasRequiredTable) return table;
|
|
729
|
-
hasRequiredTable = 1;
|
|
730
|
-
const { cppdb } = requireUtil();
|
|
731
|
-
|
|
732
|
-
table = function defineTable(name, factory) {
|
|
733
|
-
// Validate arguments
|
|
734
|
-
if (typeof name !== 'string') throw new TypeError('Expected first argument to be a string');
|
|
735
|
-
if (!name) throw new TypeError('Virtual table module name cannot be an empty string');
|
|
736
|
-
|
|
737
|
-
// Determine whether the module is eponymous-only or not
|
|
738
|
-
let eponymous = false;
|
|
739
|
-
if (typeof factory === 'object' && factory !== null) {
|
|
740
|
-
eponymous = true;
|
|
741
|
-
factory = defer(parseTableDefinition(factory, 'used', name));
|
|
742
|
-
} else {
|
|
743
|
-
if (typeof factory !== 'function') throw new TypeError('Expected second argument to be a function or a table definition object');
|
|
744
|
-
factory = wrapFactory(factory);
|
|
745
|
-
}
|
|
746
|
-
|
|
747
|
-
this[cppdb].table(factory, name, eponymous);
|
|
748
|
-
return this;
|
|
749
|
-
};
|
|
750
|
-
|
|
751
|
-
function wrapFactory(factory) {
|
|
752
|
-
return function virtualTableFactory(moduleName, databaseName, tableName, ...args) {
|
|
753
|
-
const thisObject = {
|
|
754
|
-
module: moduleName,
|
|
755
|
-
database: databaseName,
|
|
756
|
-
table: tableName,
|
|
757
|
-
};
|
|
758
|
-
|
|
759
|
-
// Generate a new table definition by invoking the factory
|
|
760
|
-
const def = apply.call(factory, thisObject, args);
|
|
761
|
-
if (typeof def !== 'object' || def === null) {
|
|
762
|
-
throw new TypeError(`Virtual table module "${moduleName}" did not return a table definition object`);
|
|
763
|
-
}
|
|
764
|
-
|
|
765
|
-
return parseTableDefinition(def, 'returned', moduleName);
|
|
766
|
-
};
|
|
767
|
-
}
|
|
768
|
-
|
|
769
|
-
function parseTableDefinition(def, verb, moduleName) {
|
|
770
|
-
// Validate required properties
|
|
771
|
-
if (!hasOwnProperty.call(def, 'rows')) {
|
|
772
|
-
throw new TypeError(`Virtual table module "${moduleName}" ${verb} a table definition without a "rows" property`);
|
|
773
|
-
}
|
|
774
|
-
if (!hasOwnProperty.call(def, 'columns')) {
|
|
775
|
-
throw new TypeError(`Virtual table module "${moduleName}" ${verb} a table definition without a "columns" property`);
|
|
776
|
-
}
|
|
777
|
-
|
|
778
|
-
// Validate "rows" property
|
|
779
|
-
const rows = def.rows;
|
|
780
|
-
if (typeof rows !== 'function' || Object.getPrototypeOf(rows) !== GeneratorFunctionPrototype) {
|
|
781
|
-
throw new TypeError(`Virtual table module "${moduleName}" ${verb} a table definition with an invalid "rows" property (should be a generator function)`);
|
|
782
|
-
}
|
|
783
|
-
|
|
784
|
-
// Validate "columns" property
|
|
785
|
-
let columns = def.columns;
|
|
786
|
-
if (!Array.isArray(columns) || !(columns = [...columns]).every(x => typeof x === 'string')) {
|
|
787
|
-
throw new TypeError(`Virtual table module "${moduleName}" ${verb} a table definition with an invalid "columns" property (should be an array of strings)`);
|
|
788
|
-
}
|
|
789
|
-
if (columns.length !== new Set(columns).size) {
|
|
790
|
-
throw new TypeError(`Virtual table module "${moduleName}" ${verb} a table definition with duplicate column names`);
|
|
791
|
-
}
|
|
792
|
-
if (!columns.length) {
|
|
793
|
-
throw new RangeError(`Virtual table module "${moduleName}" ${verb} a table definition with zero columns`);
|
|
794
|
-
}
|
|
795
|
-
|
|
796
|
-
// Validate "parameters" property
|
|
797
|
-
let parameters;
|
|
798
|
-
if (hasOwnProperty.call(def, 'parameters')) {
|
|
799
|
-
parameters = def.parameters;
|
|
800
|
-
if (!Array.isArray(parameters) || !(parameters = [...parameters]).every(x => typeof x === 'string')) {
|
|
801
|
-
throw new TypeError(`Virtual table module "${moduleName}" ${verb} a table definition with an invalid "parameters" property (should be an array of strings)`);
|
|
802
|
-
}
|
|
803
|
-
} else {
|
|
804
|
-
parameters = inferParameters(rows);
|
|
805
|
-
}
|
|
806
|
-
if (parameters.length !== new Set(parameters).size) {
|
|
807
|
-
throw new TypeError(`Virtual table module "${moduleName}" ${verb} a table definition with duplicate parameter names`);
|
|
808
|
-
}
|
|
809
|
-
if (parameters.length > 32) {
|
|
810
|
-
throw new RangeError(`Virtual table module "${moduleName}" ${verb} a table definition with more than the maximum number of 32 parameters`);
|
|
811
|
-
}
|
|
812
|
-
for (const parameter of parameters) {
|
|
813
|
-
if (columns.includes(parameter)) {
|
|
814
|
-
throw new TypeError(`Virtual table module "${moduleName}" ${verb} a table definition with column "${parameter}" which was ambiguously defined as both a column and parameter`);
|
|
815
|
-
}
|
|
816
|
-
}
|
|
817
|
-
|
|
818
|
-
// Validate "safeIntegers" option
|
|
819
|
-
let safeIntegers = 2;
|
|
820
|
-
if (hasOwnProperty.call(def, 'safeIntegers')) {
|
|
821
|
-
const bool = def.safeIntegers;
|
|
822
|
-
if (typeof bool !== 'boolean') {
|
|
823
|
-
throw new TypeError(`Virtual table module "${moduleName}" ${verb} a table definition with an invalid "safeIntegers" property (should be a boolean)`);
|
|
824
|
-
}
|
|
825
|
-
safeIntegers = +bool;
|
|
826
|
-
}
|
|
827
|
-
|
|
828
|
-
// Validate "directOnly" option
|
|
829
|
-
let directOnly = false;
|
|
830
|
-
if (hasOwnProperty.call(def, 'directOnly')) {
|
|
831
|
-
directOnly = def.directOnly;
|
|
832
|
-
if (typeof directOnly !== 'boolean') {
|
|
833
|
-
throw new TypeError(`Virtual table module "${moduleName}" ${verb} a table definition with an invalid "directOnly" property (should be a boolean)`);
|
|
834
|
-
}
|
|
835
|
-
}
|
|
836
|
-
|
|
837
|
-
// Generate SQL for the virtual table definition
|
|
838
|
-
const columnDefinitions = [
|
|
839
|
-
...parameters.map(identifier).map(str => `${str} HIDDEN`),
|
|
840
|
-
...columns.map(identifier),
|
|
841
|
-
];
|
|
842
|
-
return [
|
|
843
|
-
`CREATE TABLE x(${columnDefinitions.join(', ')});`,
|
|
844
|
-
wrapGenerator(rows, new Map(columns.map((x, i) => [x, parameters.length + i])), moduleName),
|
|
845
|
-
parameters,
|
|
846
|
-
safeIntegers,
|
|
847
|
-
directOnly,
|
|
848
|
-
];
|
|
849
|
-
}
|
|
850
|
-
|
|
851
|
-
function wrapGenerator(generator, columnMap, moduleName) {
|
|
852
|
-
return function* virtualTable(...args) {
|
|
853
|
-
/*
|
|
854
|
-
We must defensively clone any buffers in the arguments, because
|
|
855
|
-
otherwise the generator could mutate one of them, which would cause
|
|
856
|
-
us to return incorrect values for hidden columns, potentially
|
|
857
|
-
corrupting the database.
|
|
858
|
-
*/
|
|
859
|
-
const output = args.map(x => Buffer.isBuffer(x) ? Buffer.from(x) : x);
|
|
860
|
-
for (let i = 0; i < columnMap.size; ++i) {
|
|
861
|
-
output.push(null); // Fill with nulls to prevent gaps in array (v8 optimization)
|
|
862
|
-
}
|
|
863
|
-
for (const row of generator(...args)) {
|
|
864
|
-
if (Array.isArray(row)) {
|
|
865
|
-
extractRowArray(row, output, columnMap.size, moduleName);
|
|
866
|
-
yield output;
|
|
867
|
-
} else if (typeof row === 'object' && row !== null) {
|
|
868
|
-
extractRowObject(row, output, columnMap, moduleName);
|
|
869
|
-
yield output;
|
|
870
|
-
} else {
|
|
871
|
-
throw new TypeError(`Virtual table module "${moduleName}" yielded something that isn't a valid row object`);
|
|
872
|
-
}
|
|
873
|
-
}
|
|
874
|
-
};
|
|
875
|
-
}
|
|
876
|
-
|
|
877
|
-
function extractRowArray(row, output, columnCount, moduleName) {
|
|
878
|
-
if (row.length !== columnCount) {
|
|
879
|
-
throw new TypeError(`Virtual table module "${moduleName}" yielded a row with an incorrect number of columns`);
|
|
880
|
-
}
|
|
881
|
-
const offset = output.length - columnCount;
|
|
882
|
-
for (let i = 0; i < columnCount; ++i) {
|
|
883
|
-
output[i + offset] = row[i];
|
|
884
|
-
}
|
|
885
|
-
}
|
|
886
|
-
|
|
887
|
-
function extractRowObject(row, output, columnMap, moduleName) {
|
|
888
|
-
let count = 0;
|
|
889
|
-
for (const key of Object.keys(row)) {
|
|
890
|
-
const index = columnMap.get(key);
|
|
891
|
-
if (index === undefined) {
|
|
892
|
-
throw new TypeError(`Virtual table module "${moduleName}" yielded a row with an undeclared column "${key}"`);
|
|
893
|
-
}
|
|
894
|
-
output[index] = row[key];
|
|
895
|
-
count += 1;
|
|
896
|
-
}
|
|
897
|
-
if (count !== columnMap.size) {
|
|
898
|
-
throw new TypeError(`Virtual table module "${moduleName}" yielded a row with missing columns`);
|
|
899
|
-
}
|
|
900
|
-
}
|
|
901
|
-
|
|
902
|
-
function inferParameters({ length }) {
|
|
903
|
-
if (!Number.isInteger(length) || length < 0) {
|
|
904
|
-
throw new TypeError('Expected function.length to be a positive integer');
|
|
905
|
-
}
|
|
906
|
-
const params = [];
|
|
907
|
-
for (let i = 0; i < length; ++i) {
|
|
908
|
-
params.push(`$${i + 1}`);
|
|
909
|
-
}
|
|
910
|
-
return params;
|
|
911
|
-
}
|
|
912
|
-
|
|
913
|
-
const { hasOwnProperty } = Object.prototype;
|
|
914
|
-
const { apply } = Function.prototype;
|
|
915
|
-
const GeneratorFunctionPrototype = Object.getPrototypeOf(function*(){});
|
|
916
|
-
const identifier = str => `"${str.replace(/"/g, '""')}"`;
|
|
917
|
-
const defer = x => () => x;
|
|
918
|
-
return table;
|
|
919
|
-
}
|
|
920
|
-
|
|
921
|
-
var inspect;
|
|
922
|
-
var hasRequiredInspect;
|
|
923
|
-
|
|
924
|
-
function requireInspect () {
|
|
925
|
-
if (hasRequiredInspect) return inspect;
|
|
926
|
-
hasRequiredInspect = 1;
|
|
927
|
-
const DatabaseInspection = function Database() {};
|
|
928
|
-
|
|
929
|
-
inspect = function inspect(depth, opts) {
|
|
930
|
-
return Object.assign(new DatabaseInspection(), this);
|
|
931
|
-
};
|
|
932
|
-
return inspect;
|
|
933
|
-
}
|
|
934
|
-
|
|
935
|
-
var database;
|
|
936
|
-
var hasRequiredDatabase;
|
|
937
|
-
|
|
938
|
-
function requireDatabase () {
|
|
939
|
-
if (hasRequiredDatabase) return database;
|
|
940
|
-
hasRequiredDatabase = 1;
|
|
941
|
-
const fs = require$$0;
|
|
942
|
-
const path = require$$1;
|
|
943
|
-
const util = requireUtil();
|
|
944
|
-
const SqliteError = requireSqliteError();
|
|
945
|
-
|
|
946
|
-
let DEFAULT_ADDON;
|
|
947
|
-
|
|
948
|
-
function Database(filenameGiven, options) {
|
|
949
|
-
if (new.target == null) {
|
|
950
|
-
return new Database(filenameGiven, options);
|
|
951
|
-
}
|
|
952
|
-
|
|
953
|
-
// Apply defaults
|
|
954
|
-
let buffer;
|
|
955
|
-
if (Buffer.isBuffer(filenameGiven)) {
|
|
956
|
-
buffer = filenameGiven;
|
|
957
|
-
filenameGiven = ':memory:';
|
|
958
|
-
}
|
|
959
|
-
if (filenameGiven == null) filenameGiven = '';
|
|
960
|
-
if (options == null) options = {};
|
|
961
|
-
|
|
962
|
-
// Validate arguments
|
|
963
|
-
if (typeof filenameGiven !== 'string') throw new TypeError('Expected first argument to be a string');
|
|
964
|
-
if (typeof options !== 'object') throw new TypeError('Expected second argument to be an options object');
|
|
965
|
-
if ('readOnly' in options) throw new TypeError('Misspelled option "readOnly" should be "readonly"');
|
|
966
|
-
if ('memory' in options) throw new TypeError('Option "memory" was removed in v7.0.0 (use ":memory:" filename instead)');
|
|
967
|
-
|
|
968
|
-
// Interpret options
|
|
969
|
-
const filename = filenameGiven.trim();
|
|
970
|
-
const anonymous = filename === '' || filename === ':memory:';
|
|
971
|
-
const readonly = util.getBooleanOption(options, 'readonly');
|
|
972
|
-
const fileMustExist = util.getBooleanOption(options, 'fileMustExist');
|
|
973
|
-
const timeout = 'timeout' in options ? options.timeout : 5000;
|
|
974
|
-
const verbose = 'verbose' in options ? options.verbose : null;
|
|
975
|
-
const nativeBinding = 'nativeBinding' in options ? options.nativeBinding : null;
|
|
976
|
-
|
|
977
|
-
// Validate interpreted options
|
|
978
|
-
if (readonly && anonymous && !buffer) throw new TypeError('In-memory/temporary databases cannot be readonly');
|
|
979
|
-
if (!Number.isInteger(timeout) || timeout < 0) throw new TypeError('Expected the "timeout" option to be a positive integer');
|
|
980
|
-
if (timeout > 0x7fffffff) throw new RangeError('Option "timeout" cannot be greater than 2147483647');
|
|
981
|
-
if (verbose != null && typeof verbose !== 'function') throw new TypeError('Expected the "verbose" option to be a function');
|
|
982
|
-
if (nativeBinding != null && typeof nativeBinding !== 'string' && typeof nativeBinding !== 'object') throw new TypeError('Expected the "nativeBinding" option to be a string or addon object');
|
|
983
|
-
|
|
984
|
-
// Load the native addon
|
|
985
|
-
let addon;
|
|
986
|
-
if (nativeBinding == null) {
|
|
987
|
-
addon = DEFAULT_ADDON || (DEFAULT_ADDON = requireBindings()('better_sqlite3.node'));
|
|
988
|
-
} else if (typeof nativeBinding === 'string') {
|
|
989
|
-
// See <https://webpack.js.org/api/module-variables/#__non_webpack_require__-webpack-specific>
|
|
990
|
-
const requireFunc = typeof __non_webpack_require__ === 'function' ? __non_webpack_require__ : commonjsRequire;
|
|
991
|
-
addon = requireFunc(path.resolve(nativeBinding).replace(/(\.node)?$/, '.node'));
|
|
992
|
-
} else {
|
|
993
|
-
// See <https://github.com/WiseLibs/better-sqlite3/issues/972>
|
|
994
|
-
addon = nativeBinding;
|
|
995
|
-
}
|
|
996
|
-
|
|
997
|
-
if (!addon.isInitialized) {
|
|
998
|
-
addon.setErrorConstructor(SqliteError);
|
|
999
|
-
addon.isInitialized = true;
|
|
1000
|
-
}
|
|
1001
|
-
|
|
1002
|
-
// Make sure the specified directory exists
|
|
1003
|
-
if (!anonymous && !filename.startsWith('file:') && !fs.existsSync(path.dirname(filename))) {
|
|
1004
|
-
throw new TypeError('Cannot open database because the directory does not exist');
|
|
1005
|
-
}
|
|
1006
|
-
|
|
1007
|
-
Object.defineProperties(this, {
|
|
1008
|
-
[util.cppdb]: { value: new addon.Database(filename, filenameGiven, anonymous, readonly, fileMustExist, timeout, verbose || null, buffer || null) },
|
|
1009
|
-
...wrappers.getters,
|
|
1010
|
-
});
|
|
1011
|
-
}
|
|
1012
|
-
|
|
1013
|
-
const wrappers = requireWrappers();
|
|
1014
|
-
Database.prototype.prepare = wrappers.prepare;
|
|
1015
|
-
Database.prototype.transaction = requireTransaction();
|
|
1016
|
-
Database.prototype.pragma = requirePragma();
|
|
1017
|
-
Database.prototype.backup = requireBackup();
|
|
1018
|
-
Database.prototype.serialize = requireSerialize();
|
|
1019
|
-
Database.prototype.function = require_function();
|
|
1020
|
-
Database.prototype.aggregate = requireAggregate();
|
|
1021
|
-
Database.prototype.table = requireTable();
|
|
1022
|
-
Database.prototype.loadExtension = wrappers.loadExtension;
|
|
1023
|
-
Database.prototype.exec = wrappers.exec;
|
|
1024
|
-
Database.prototype.close = wrappers.close;
|
|
1025
|
-
Database.prototype.defaultSafeIntegers = wrappers.defaultSafeIntegers;
|
|
1026
|
-
Database.prototype.unsafeMode = wrappers.unsafeMode;
|
|
1027
|
-
Database.prototype[util.inspect] = requireInspect();
|
|
1028
|
-
|
|
1029
|
-
database = Database;
|
|
1030
|
-
return database;
|
|
1031
|
-
}
|
|
1032
|
-
|
|
1033
|
-
var hasRequiredLib;
|
|
1034
|
-
|
|
1035
|
-
function requireLib () {
|
|
1036
|
-
if (hasRequiredLib) return lib.exports;
|
|
1037
|
-
hasRequiredLib = 1;
|
|
1038
|
-
lib.exports = requireDatabase();
|
|
1039
|
-
lib.exports.SqliteError = requireSqliteError();
|
|
1040
|
-
return lib.exports;
|
|
1041
|
-
}
|
|
1042
|
-
|
|
1043
|
-
var libExports = requireLib();
|
|
1044
|
-
|
|
1045
|
-
const checkpointMetadataKeys = ["source", "step", "parents"];
|
|
1046
|
-
function validateKeys(keys) {
|
|
1047
|
-
return keys;
|
|
1048
|
-
}
|
|
1049
|
-
const validCheckpointMetadataKeys = validateKeys(
|
|
1050
|
-
checkpointMetadataKeys
|
|
1051
|
-
);
|
|
1052
|
-
function prepareSql(db, checkpointId) {
|
|
1053
|
-
const sql = `
|
|
1054
|
-
SELECT
|
|
1055
|
-
thread_id,
|
|
1056
|
-
checkpoint_ns,
|
|
1057
|
-
checkpoint_id,
|
|
1058
|
-
parent_checkpoint_id,
|
|
1059
|
-
type,
|
|
1060
|
-
checkpoint,
|
|
1061
|
-
metadata,
|
|
1062
|
-
(
|
|
1063
|
-
SELECT
|
|
1064
|
-
json_group_array(
|
|
1065
|
-
json_object(
|
|
1066
|
-
'task_id', pw.task_id,
|
|
1067
|
-
'channel', pw.channel,
|
|
1068
|
-
'type', pw.type,
|
|
1069
|
-
'value', CAST(pw.value AS TEXT)
|
|
1070
|
-
)
|
|
1071
|
-
)
|
|
1072
|
-
FROM writes as pw
|
|
1073
|
-
WHERE pw.thread_id = checkpoints.thread_id
|
|
1074
|
-
AND pw.checkpoint_ns = checkpoints.checkpoint_ns
|
|
1075
|
-
AND pw.checkpoint_id = checkpoints.checkpoint_id
|
|
1076
|
-
) as pending_writes,
|
|
1077
|
-
(
|
|
1078
|
-
SELECT
|
|
1079
|
-
json_group_array(
|
|
1080
|
-
json_object(
|
|
1081
|
-
'type', ps.type,
|
|
1082
|
-
'value', CAST(ps.value AS TEXT)
|
|
1083
|
-
)
|
|
1084
|
-
)
|
|
1085
|
-
FROM writes as ps
|
|
1086
|
-
WHERE ps.thread_id = checkpoints.thread_id
|
|
1087
|
-
AND ps.checkpoint_ns = checkpoints.checkpoint_ns
|
|
1088
|
-
AND ps.checkpoint_id = checkpoints.parent_checkpoint_id
|
|
1089
|
-
AND ps.channel = '${TASKS}'
|
|
1090
|
-
ORDER BY ps.idx
|
|
1091
|
-
) as pending_sends
|
|
1092
|
-
FROM checkpoints
|
|
1093
|
-
WHERE thread_id = ? AND checkpoint_ns = ? ${checkpointId ? "AND checkpoint_id = ?" : "ORDER BY checkpoint_id DESC LIMIT 1"}`;
|
|
1094
|
-
return db.prepare(sql);
|
|
1095
|
-
}
|
|
1096
|
-
class SqliteSaver extends BaseCheckpointSaver {
|
|
1097
|
-
db;
|
|
1098
|
-
isSetup;
|
|
1099
|
-
withoutCheckpoint;
|
|
1100
|
-
withCheckpoint;
|
|
1101
|
-
constructor(db, serde) {
|
|
1102
|
-
super(serde);
|
|
1103
|
-
this.db = db;
|
|
1104
|
-
this.isSetup = false;
|
|
1105
|
-
}
|
|
1106
|
-
static fromConnString(connStringOrLocalPath) {
|
|
1107
|
-
return new SqliteSaver(new libExports.Database(connStringOrLocalPath));
|
|
1108
|
-
}
|
|
1109
|
-
setup() {
|
|
1110
|
-
if (this.isSetup) {
|
|
1111
|
-
return;
|
|
1112
|
-
}
|
|
1113
|
-
this.db.exec("PRAGMA journal_mode = WAL;");
|
|
1114
|
-
this.db.exec(`
|
|
1115
|
-
CREATE TABLE IF NOT EXISTS checkpoints (
|
|
1116
|
-
thread_id TEXT NOT NULL,
|
|
1117
|
-
checkpoint_ns TEXT NOT NULL DEFAULT '',
|
|
1118
|
-
checkpoint_id TEXT NOT NULL,
|
|
1119
|
-
parent_checkpoint_id TEXT,
|
|
1120
|
-
type TEXT,
|
|
1121
|
-
checkpoint BLOB,
|
|
1122
|
-
metadata BLOB,
|
|
1123
|
-
PRIMARY KEY (thread_id, checkpoint_ns, checkpoint_id)
|
|
1124
|
-
);`);
|
|
1125
|
-
this.db.exec(`
|
|
1126
|
-
CREATE TABLE IF NOT EXISTS writes (
|
|
1127
|
-
thread_id TEXT NOT NULL,
|
|
1128
|
-
checkpoint_ns TEXT NOT NULL DEFAULT '',
|
|
1129
|
-
checkpoint_id TEXT NOT NULL,
|
|
1130
|
-
task_id TEXT NOT NULL,
|
|
1131
|
-
idx INTEGER NOT NULL,
|
|
1132
|
-
channel TEXT NOT NULL,
|
|
1133
|
-
type TEXT,
|
|
1134
|
-
value BLOB,
|
|
1135
|
-
PRIMARY KEY (thread_id, checkpoint_ns, checkpoint_id, task_id, idx)
|
|
1136
|
-
);`);
|
|
1137
|
-
this.withoutCheckpoint = prepareSql(this.db, false);
|
|
1138
|
-
this.withCheckpoint = prepareSql(this.db, true);
|
|
1139
|
-
this.isSetup = true;
|
|
1140
|
-
}
|
|
1141
|
-
async getTuple(config) {
|
|
1142
|
-
this.setup();
|
|
1143
|
-
const { thread_id, checkpoint_ns = "", checkpoint_id } = config.configurable ?? {};
|
|
1144
|
-
const args = [thread_id, checkpoint_ns];
|
|
1145
|
-
if (checkpoint_id) args.push(checkpoint_id);
|
|
1146
|
-
const stm = checkpoint_id ? this.withCheckpoint : this.withoutCheckpoint;
|
|
1147
|
-
const row = stm.get(...args);
|
|
1148
|
-
if (row === void 0 || row === null) return void 0;
|
|
1149
|
-
let finalConfig = config;
|
|
1150
|
-
if (!checkpoint_id) {
|
|
1151
|
-
finalConfig = {
|
|
1152
|
-
configurable: {
|
|
1153
|
-
thread_id: row.thread_id,
|
|
1154
|
-
checkpoint_ns,
|
|
1155
|
-
checkpoint_id: row.checkpoint_id
|
|
1156
|
-
}
|
|
1157
|
-
};
|
|
1158
|
-
}
|
|
1159
|
-
if (finalConfig.configurable?.thread_id === void 0 || finalConfig.configurable?.checkpoint_id === void 0) {
|
|
1160
|
-
throw new Error("Missing thread_id or checkpoint_id");
|
|
1161
|
-
}
|
|
1162
|
-
const pendingWrites = await Promise.all(
|
|
1163
|
-
JSON.parse(row.pending_writes).map(async (write) => {
|
|
1164
|
-
return [
|
|
1165
|
-
write.task_id,
|
|
1166
|
-
write.channel,
|
|
1167
|
-
await this.serde.loadsTyped(write.type ?? "json", write.value ?? "")
|
|
1168
|
-
];
|
|
1169
|
-
})
|
|
1170
|
-
);
|
|
1171
|
-
const checkpoint = await this.serde.loadsTyped(row.type ?? "json", row.checkpoint);
|
|
1172
|
-
if (checkpoint.v < 4 && row.parent_checkpoint_id != null) {
|
|
1173
|
-
await this.migratePendingSends(checkpoint, row.thread_id, row.parent_checkpoint_id);
|
|
1174
|
-
}
|
|
1175
|
-
return {
|
|
1176
|
-
checkpoint,
|
|
1177
|
-
config: finalConfig,
|
|
1178
|
-
metadata: await this.serde.loadsTyped(row.type ?? "json", row.metadata),
|
|
1179
|
-
parentConfig: row.parent_checkpoint_id ? {
|
|
1180
|
-
configurable: {
|
|
1181
|
-
thread_id: row.thread_id,
|
|
1182
|
-
checkpoint_ns,
|
|
1183
|
-
checkpoint_id: row.parent_checkpoint_id
|
|
1184
|
-
}
|
|
1185
|
-
} : void 0,
|
|
1186
|
-
pendingWrites
|
|
1187
|
-
};
|
|
1188
|
-
}
|
|
1189
|
-
async *list(config, options) {
|
|
1190
|
-
const { limit, before, filter } = options ?? {};
|
|
1191
|
-
this.setup();
|
|
1192
|
-
const thread_id = config.configurable?.thread_id;
|
|
1193
|
-
const checkpoint_ns = config.configurable?.checkpoint_ns;
|
|
1194
|
-
let sql = `
|
|
1195
|
-
SELECT
|
|
1196
|
-
thread_id,
|
|
1197
|
-
checkpoint_ns,
|
|
1198
|
-
checkpoint_id,
|
|
1199
|
-
parent_checkpoint_id,
|
|
1200
|
-
type,
|
|
1201
|
-
checkpoint,
|
|
1202
|
-
metadata,
|
|
1203
|
-
(
|
|
1204
|
-
SELECT
|
|
1205
|
-
json_group_array(
|
|
1206
|
-
json_object(
|
|
1207
|
-
'task_id', pw.task_id,
|
|
1208
|
-
'channel', pw.channel,
|
|
1209
|
-
'type', pw.type,
|
|
1210
|
-
'value', CAST(pw.value AS TEXT)
|
|
1211
|
-
)
|
|
1212
|
-
)
|
|
1213
|
-
FROM writes as pw
|
|
1214
|
-
WHERE pw.thread_id = checkpoints.thread_id
|
|
1215
|
-
AND pw.checkpoint_ns = checkpoints.checkpoint_ns
|
|
1216
|
-
AND pw.checkpoint_id = checkpoints.checkpoint_id
|
|
1217
|
-
) as pending_writes,
|
|
1218
|
-
(
|
|
1219
|
-
SELECT
|
|
1220
|
-
json_group_array(
|
|
1221
|
-
json_object(
|
|
1222
|
-
'type', ps.type,
|
|
1223
|
-
'value', CAST(ps.value AS TEXT)
|
|
1224
|
-
)
|
|
1225
|
-
)
|
|
1226
|
-
FROM writes as ps
|
|
1227
|
-
WHERE ps.thread_id = checkpoints.thread_id
|
|
1228
|
-
AND ps.checkpoint_ns = checkpoints.checkpoint_ns
|
|
1229
|
-
AND ps.checkpoint_id = checkpoints.parent_checkpoint_id
|
|
1230
|
-
AND ps.channel = '${TASKS}'
|
|
1231
|
-
ORDER BY ps.idx
|
|
1232
|
-
) as pending_sends
|
|
1233
|
-
FROM checkpoints
|
|
1234
|
-
`;
|
|
1235
|
-
const whereClause = [];
|
|
1236
|
-
if (thread_id) {
|
|
1237
|
-
whereClause.push("thread_id = ?");
|
|
1238
|
-
}
|
|
1239
|
-
if (checkpoint_ns !== void 0 && checkpoint_ns !== null) {
|
|
1240
|
-
whereClause.push("checkpoint_ns = ?");
|
|
1241
|
-
}
|
|
1242
|
-
if (before?.configurable?.checkpoint_id !== void 0) {
|
|
1243
|
-
whereClause.push("checkpoint_id < ?");
|
|
1244
|
-
}
|
|
1245
|
-
const sanitizedFilter = Object.fromEntries(
|
|
1246
|
-
Object.entries(filter ?? {}).filter(
|
|
1247
|
-
([key, value]) => value !== void 0 && validCheckpointMetadataKeys.includes(key)
|
|
1248
|
-
)
|
|
1249
|
-
);
|
|
1250
|
-
whereClause.push(
|
|
1251
|
-
...Object.entries(sanitizedFilter).map(([key]) => `jsonb(CAST(metadata AS TEXT))->'$.${key}' = ?`)
|
|
1252
|
-
);
|
|
1253
|
-
if (whereClause.length > 0) {
|
|
1254
|
-
sql += `WHERE
|
|
1255
|
-
${whereClause.join(" AND\n ")}
|
|
1256
|
-
`;
|
|
1257
|
-
}
|
|
1258
|
-
sql += "\nORDER BY checkpoint_id DESC";
|
|
1259
|
-
if (limit) {
|
|
1260
|
-
sql += ` LIMIT ${parseInt(limit, 10)}`;
|
|
1261
|
-
}
|
|
1262
|
-
const args = [
|
|
1263
|
-
thread_id,
|
|
1264
|
-
checkpoint_ns,
|
|
1265
|
-
before?.configurable?.checkpoint_id,
|
|
1266
|
-
...Object.values(sanitizedFilter).map((value) => JSON.stringify(value))
|
|
1267
|
-
].filter((value) => value !== void 0 && value !== null);
|
|
1268
|
-
const rows = this.db.prepare(sql).all(...args);
|
|
1269
|
-
if (rows) {
|
|
1270
|
-
for (const row of rows) {
|
|
1271
|
-
const pendingWrites = await Promise.all(
|
|
1272
|
-
JSON.parse(row.pending_writes).map(async (write) => {
|
|
1273
|
-
return [
|
|
1274
|
-
write.task_id,
|
|
1275
|
-
write.channel,
|
|
1276
|
-
await this.serde.loadsTyped(write.type ?? "json", write.value ?? "")
|
|
1277
|
-
];
|
|
1278
|
-
})
|
|
1279
|
-
);
|
|
1280
|
-
const checkpoint = await this.serde.loadsTyped(row.type ?? "json", row.checkpoint);
|
|
1281
|
-
if (checkpoint.v < 4 && row.parent_checkpoint_id != null) {
|
|
1282
|
-
await this.migratePendingSends(checkpoint, row.thread_id, row.parent_checkpoint_id);
|
|
1283
|
-
}
|
|
1284
|
-
yield {
|
|
1285
|
-
config: {
|
|
1286
|
-
configurable: {
|
|
1287
|
-
thread_id: row.thread_id,
|
|
1288
|
-
checkpoint_ns: row.checkpoint_ns,
|
|
1289
|
-
checkpoint_id: row.checkpoint_id
|
|
1290
|
-
}
|
|
1291
|
-
},
|
|
1292
|
-
checkpoint,
|
|
1293
|
-
metadata: await this.serde.loadsTyped(row.type ?? "json", row.metadata),
|
|
1294
|
-
parentConfig: row.parent_checkpoint_id ? {
|
|
1295
|
-
configurable: {
|
|
1296
|
-
thread_id: row.thread_id,
|
|
1297
|
-
checkpoint_ns: row.checkpoint_ns,
|
|
1298
|
-
checkpoint_id: row.parent_checkpoint_id
|
|
1299
|
-
}
|
|
1300
|
-
} : void 0,
|
|
1301
|
-
pendingWrites
|
|
1302
|
-
};
|
|
1303
|
-
}
|
|
1304
|
-
}
|
|
1305
|
-
}
|
|
1306
|
-
async put(config, checkpoint, metadata) {
|
|
1307
|
-
this.setup();
|
|
1308
|
-
if (!config.configurable) {
|
|
1309
|
-
throw new Error("Empty configuration supplied.");
|
|
1310
|
-
}
|
|
1311
|
-
const thread_id = config.configurable?.thread_id;
|
|
1312
|
-
const checkpoint_ns = config.configurable?.checkpoint_ns ?? "";
|
|
1313
|
-
const parent_checkpoint_id = config.configurable?.checkpoint_id;
|
|
1314
|
-
if (!thread_id) {
|
|
1315
|
-
throw new Error(`Missing "thread_id" field in passed "config.configurable".`);
|
|
1316
|
-
}
|
|
1317
|
-
const preparedCheckpoint = copyCheckpoint(checkpoint);
|
|
1318
|
-
const [[type1, serializedCheckpoint], [type2, serializedMetadata]] = await Promise.all([
|
|
1319
|
-
this.serde.dumpsTyped(preparedCheckpoint),
|
|
1320
|
-
this.serde.dumpsTyped(metadata)
|
|
1321
|
-
]);
|
|
1322
|
-
if (type1 !== type2) {
|
|
1323
|
-
throw new Error("Failed to serialized checkpoint and metadata to the same type.");
|
|
1324
|
-
}
|
|
1325
|
-
const row = [
|
|
1326
|
-
thread_id,
|
|
1327
|
-
checkpoint_ns,
|
|
1328
|
-
checkpoint.id,
|
|
1329
|
-
parent_checkpoint_id,
|
|
1330
|
-
type1,
|
|
1331
|
-
serializedCheckpoint,
|
|
1332
|
-
serializedMetadata
|
|
1333
|
-
];
|
|
1334
|
-
this.db.prepare(
|
|
1335
|
-
`INSERT OR REPLACE INTO checkpoints (thread_id, checkpoint_ns, checkpoint_id, parent_checkpoint_id, type, checkpoint, metadata) VALUES (?, ?, ?, ?, ?, ?, ?)`
|
|
1336
|
-
).run(...row);
|
|
1337
|
-
return {
|
|
1338
|
-
configurable: {
|
|
1339
|
-
thread_id,
|
|
1340
|
-
checkpoint_ns,
|
|
1341
|
-
checkpoint_id: checkpoint.id
|
|
1342
|
-
}
|
|
1343
|
-
};
|
|
1344
|
-
}
|
|
1345
|
-
async putWrites(config, writes, taskId) {
|
|
1346
|
-
this.setup();
|
|
1347
|
-
if (!config.configurable) {
|
|
1348
|
-
throw new Error("Empty configuration supplied.");
|
|
1349
|
-
}
|
|
1350
|
-
if (!config.configurable?.thread_id) {
|
|
1351
|
-
throw new Error("Missing thread_id field in config.configurable.");
|
|
1352
|
-
}
|
|
1353
|
-
if (!config.configurable?.checkpoint_id) {
|
|
1354
|
-
throw new Error("Missing checkpoint_id field in config.configurable.");
|
|
1355
|
-
}
|
|
1356
|
-
const stmt = this.db.prepare(`
|
|
1357
|
-
INSERT OR REPLACE INTO writes
|
|
1358
|
-
(thread_id, checkpoint_ns, checkpoint_id, task_id, idx, channel, type, value)
|
|
1359
|
-
VALUES (?, ?, ?, ?, ?, ?, ?, ?)
|
|
1360
|
-
`);
|
|
1361
|
-
const transaction = this.db.transaction((rows2) => {
|
|
1362
|
-
for (const row of rows2) {
|
|
1363
|
-
stmt.run(...row);
|
|
1364
|
-
}
|
|
1365
|
-
});
|
|
1366
|
-
const rows = await Promise.all(
|
|
1367
|
-
writes.map(async (write, idx) => {
|
|
1368
|
-
const [type, serializedWrite] = await this.serde.dumpsTyped(write[1]);
|
|
1369
|
-
return [
|
|
1370
|
-
config.configurable?.thread_id,
|
|
1371
|
-
config.configurable?.checkpoint_ns,
|
|
1372
|
-
config.configurable?.checkpoint_id,
|
|
1373
|
-
taskId,
|
|
1374
|
-
idx,
|
|
1375
|
-
write[0],
|
|
1376
|
-
type,
|
|
1377
|
-
serializedWrite
|
|
1378
|
-
];
|
|
1379
|
-
})
|
|
1380
|
-
);
|
|
1381
|
-
transaction(rows);
|
|
1382
|
-
}
|
|
1383
|
-
async deleteThread(threadId) {
|
|
1384
|
-
const transaction = this.db.transaction(() => {
|
|
1385
|
-
this.db.prepare(`DELETE FROM checkpoints WHERE thread_id = ?`).run(threadId);
|
|
1386
|
-
this.db.prepare(`DELETE FROM writes WHERE thread_id = ?`).run(threadId);
|
|
1387
|
-
});
|
|
1388
|
-
transaction();
|
|
1389
|
-
}
|
|
1390
|
-
async migratePendingSends(checkpoint, threadId, parentCheckpointId) {
|
|
1391
|
-
const { pending_sends } = this.db.prepare(
|
|
1392
|
-
`
|
|
1393
|
-
SELECT
|
|
1394
|
-
checkpoint_id,
|
|
1395
|
-
json_group_array(
|
|
1396
|
-
json_object(
|
|
1397
|
-
'type', ps.type,
|
|
1398
|
-
'value', CAST(ps.value AS TEXT)
|
|
1399
|
-
)
|
|
1400
|
-
) as pending_sends
|
|
1401
|
-
FROM writes as ps
|
|
1402
|
-
WHERE ps.thread_id = ?
|
|
1403
|
-
AND ps.checkpoint_id = ?
|
|
1404
|
-
AND ps.channel = '${TASKS}'
|
|
1405
|
-
ORDER BY ps.idx
|
|
1406
|
-
`
|
|
1407
|
-
).get(threadId, parentCheckpointId);
|
|
1408
|
-
const mutableCheckpoint = checkpoint;
|
|
1409
|
-
mutableCheckpoint.channel_values ??= {};
|
|
1410
|
-
mutableCheckpoint.channel_values[TASKS] = await Promise.all(
|
|
1411
|
-
JSON.parse(pending_sends).map(({ type, value }) => this.serde.loadsTyped(type, value))
|
|
1412
|
-
);
|
|
1413
|
-
mutableCheckpoint.channel_versions[TASKS] = Object.keys(checkpoint.channel_versions).length > 0 ? maxChannelVersion(...Object.values(checkpoint.channel_versions)) : this.getNextVersion(void 0);
|
|
1414
|
-
}
|
|
1415
|
-
}
|
|
1416
|
-
|
|
1417
|
-
export { SqliteSaver };
|
|
1418
|
-
//# sourceMappingURL=checkpoint-DnfHhpJw.js.map
|