@hotmeshio/hotmesh 0.5.1 → 0.5.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +9 -5
- package/build/package.json +16 -14
- package/build/services/hotmesh/index.d.ts +9 -11
- package/build/services/hotmesh/index.js +9 -11
- package/build/services/memflow/entity.d.ts +168 -4
- package/build/services/memflow/entity.js +177 -15
- package/build/services/memflow/workflow/index.d.ts +2 -4
- package/build/services/memflow/workflow/index.js +2 -4
- package/build/services/memflow/workflow/interruption.d.ts +6 -4
- package/build/services/memflow/workflow/interruption.js +6 -4
- package/build/services/memflow/workflow/waitFor.js +1 -0
- package/build/services/search/index.d.ts +10 -0
- package/build/services/search/providers/postgres/postgres.d.ts +12 -0
- package/build/services/search/providers/postgres/postgres.js +209 -0
- package/build/services/search/providers/redis/ioredis.d.ts +4 -0
- package/build/services/search/providers/redis/ioredis.js +13 -0
- package/build/services/search/providers/redis/redis.d.ts +4 -0
- package/build/services/search/providers/redis/redis.js +13 -0
- package/build/services/store/providers/postgres/kvsql.d.ts +13 -37
- package/build/services/store/providers/postgres/kvsql.js +2 -2
- package/build/services/store/providers/postgres/kvtypes/hash/basic.d.ts +16 -0
- package/build/services/store/providers/postgres/kvtypes/hash/basic.js +480 -0
- package/build/services/store/providers/postgres/kvtypes/hash/expire.d.ts +5 -0
- package/build/services/store/providers/postgres/kvtypes/hash/expire.js +33 -0
- package/build/services/store/providers/postgres/kvtypes/hash/index.d.ts +29 -0
- package/build/services/store/providers/postgres/kvtypes/hash/index.js +190 -0
- package/build/services/store/providers/postgres/kvtypes/hash/jsonb.d.ts +14 -0
- package/build/services/store/providers/postgres/kvtypes/hash/jsonb.js +699 -0
- package/build/services/store/providers/postgres/kvtypes/hash/scan.d.ts +10 -0
- package/build/services/store/providers/postgres/kvtypes/hash/scan.js +91 -0
- package/build/services/store/providers/postgres/kvtypes/hash/types.d.ts +19 -0
- package/build/services/store/providers/postgres/kvtypes/hash/types.js +2 -0
- package/build/services/store/providers/postgres/kvtypes/hash/utils.d.ts +18 -0
- package/build/services/store/providers/postgres/kvtypes/hash/utils.js +90 -0
- package/build/types/memflow.d.ts +1 -1
- package/build/types/meshdata.d.ts +1 -1
- package/package.json +16 -14
- package/build/services/store/providers/postgres/kvtypes/hash.d.ts +0 -60
- package/build/services/store/providers/postgres/kvtypes/hash.js +0 -1287
|
@@ -1,1287 +0,0 @@
|
|
|
1
|
-
"use strict";
|
|
2
|
-
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
-
exports.hashModule = void 0;
|
|
4
|
-
const hashModule = (context) => ({
|
|
5
|
-
async hsetnx(key, field, value, multi, entity) {
|
|
6
|
-
const { sql, params } = this._hset(key, { [field]: value }, { nx: true, entity });
|
|
7
|
-
if (multi) {
|
|
8
|
-
multi.addCommand(sql, params, 'number');
|
|
9
|
-
return Promise.resolve(0);
|
|
10
|
-
}
|
|
11
|
-
else {
|
|
12
|
-
try {
|
|
13
|
-
const res = await context.pgClient.query(sql, params);
|
|
14
|
-
return res.rowCount;
|
|
15
|
-
}
|
|
16
|
-
catch (err) {
|
|
17
|
-
console.error('hsetnx error', err, sql, params);
|
|
18
|
-
return 0;
|
|
19
|
-
}
|
|
20
|
-
}
|
|
21
|
-
},
|
|
22
|
-
async hset(key, fields, options, multi) {
|
|
23
|
-
const { sql, params } = this._hset(key, fields, options);
|
|
24
|
-
if (multi) {
|
|
25
|
-
multi.addCommand(sql, params, 'number');
|
|
26
|
-
return Promise.resolve(0);
|
|
27
|
-
}
|
|
28
|
-
else {
|
|
29
|
-
try {
|
|
30
|
-
const res = await context.pgClient.query(sql, params);
|
|
31
|
-
// Check if this is a JSONB operation that returns a value
|
|
32
|
-
const isJsonbOperation = Object.keys(fields).some(k => k.startsWith('@context:') && k !== '@context');
|
|
33
|
-
// Special handling for @context:get operations
|
|
34
|
-
const isGetOperation = '@context:get' in fields;
|
|
35
|
-
if (isJsonbOperation && res.rows[0]?.new_value !== undefined) {
|
|
36
|
-
let returnValue;
|
|
37
|
-
try {
|
|
38
|
-
// Try to parse as JSON, fallback to string if it fails
|
|
39
|
-
returnValue = JSON.parse(res.rows[0].new_value);
|
|
40
|
-
}
|
|
41
|
-
catch {
|
|
42
|
-
returnValue = res.rows[0].new_value;
|
|
43
|
-
}
|
|
44
|
-
return returnValue;
|
|
45
|
-
}
|
|
46
|
-
return res.rowCount;
|
|
47
|
-
}
|
|
48
|
-
catch (err) {
|
|
49
|
-
console.error('hset error', err, sql, params);
|
|
50
|
-
return 0;
|
|
51
|
-
}
|
|
52
|
-
}
|
|
53
|
-
},
|
|
54
|
-
/**
|
|
55
|
-
* Derives the enumerated `type` value based on the field name when
|
|
56
|
-
* setting a field in a jobs table (a 'jobshash' table type).
|
|
57
|
-
*/
|
|
58
|
-
_deriveType(fieldName) {
|
|
59
|
-
if (fieldName === ':') {
|
|
60
|
-
return 'status';
|
|
61
|
-
}
|
|
62
|
-
else if (fieldName.startsWith('_')) {
|
|
63
|
-
return 'udata';
|
|
64
|
-
}
|
|
65
|
-
else if (fieldName.startsWith('-')) {
|
|
66
|
-
return fieldName.includes(',') ? 'hmark' : 'jmark';
|
|
67
|
-
}
|
|
68
|
-
else if (fieldName.length === 3) {
|
|
69
|
-
return 'jdata';
|
|
70
|
-
}
|
|
71
|
-
else if (fieldName.includes(',')) {
|
|
72
|
-
return 'adata';
|
|
73
|
-
}
|
|
74
|
-
else {
|
|
75
|
-
return 'other';
|
|
76
|
-
}
|
|
77
|
-
},
|
|
78
|
-
_hset(key, fields, options) {
|
|
79
|
-
const tableName = context.tableForKey(key, 'hash');
|
|
80
|
-
const isJobsTable = this.isJobsTable(tableName);
|
|
81
|
-
const fieldEntries = Object.entries(fields);
|
|
82
|
-
const isStatusOnly = fieldEntries.length === 1 && fieldEntries[0][0] === ':';
|
|
83
|
-
let targetTable = tableName; // Default table name
|
|
84
|
-
if (isJobsTable) {
|
|
85
|
-
if (isStatusOnly) {
|
|
86
|
-
// Target the jobs table directly when setting only the status field
|
|
87
|
-
targetTable = tableName;
|
|
88
|
-
}
|
|
89
|
-
else {
|
|
90
|
-
// For other fields, target the attributes table
|
|
91
|
-
targetTable = `${tableName}_attributes`;
|
|
92
|
-
}
|
|
93
|
-
}
|
|
94
|
-
const params = [];
|
|
95
|
-
let sql = '';
|
|
96
|
-
if (isJobsTable && isStatusOnly) {
|
|
97
|
-
if (options?.nx) {
|
|
98
|
-
// Use WHERE NOT EXISTS to enforce nx
|
|
99
|
-
sql = `
|
|
100
|
-
INSERT INTO ${targetTable} (id, key, status, entity)
|
|
101
|
-
SELECT gen_random_uuid(), $1, $2, $3
|
|
102
|
-
WHERE NOT EXISTS (
|
|
103
|
-
SELECT 1 FROM ${targetTable}
|
|
104
|
-
WHERE key = $1 AND is_live
|
|
105
|
-
)
|
|
106
|
-
RETURNING 1 as count
|
|
107
|
-
`;
|
|
108
|
-
params.push(key, fields[':'], options?.entity ?? null);
|
|
109
|
-
}
|
|
110
|
-
else {
|
|
111
|
-
// Update existing job or insert new one
|
|
112
|
-
sql = `
|
|
113
|
-
INSERT INTO ${targetTable} (id, key, status, entity)
|
|
114
|
-
VALUES (gen_random_uuid(), $1, $2, $3)
|
|
115
|
-
ON CONFLICT (key) WHERE is_live DO UPDATE SET status = EXCLUDED.status
|
|
116
|
-
RETURNING 1 as count
|
|
117
|
-
`;
|
|
118
|
-
params.push(key, fields[':'], options?.entity ?? null);
|
|
119
|
-
}
|
|
120
|
-
}
|
|
121
|
-
else if (isJobsTable && '@context' in fields) {
|
|
122
|
-
// Handle JSONB context updates - use the jobs table directly
|
|
123
|
-
const replayId = Object.keys(fields).find(k => k.includes('-') && k !== '@context');
|
|
124
|
-
if (options?.nx) {
|
|
125
|
-
if (replayId) {
|
|
126
|
-
sql = `
|
|
127
|
-
WITH inserted_job AS (
|
|
128
|
-
INSERT INTO ${tableName} (id, key, context)
|
|
129
|
-
SELECT gen_random_uuid(), $1, $2::jsonb
|
|
130
|
-
WHERE NOT EXISTS (
|
|
131
|
-
SELECT 1 FROM ${tableName}
|
|
132
|
-
WHERE key = $1 AND is_live
|
|
133
|
-
)
|
|
134
|
-
RETURNING id, context::text as new_value
|
|
135
|
-
),
|
|
136
|
-
replay_insert AS (
|
|
137
|
-
INSERT INTO ${tableName}_attributes (job_id, field, value, type)
|
|
138
|
-
SELECT id, $3, new_value, $4
|
|
139
|
-
FROM inserted_job
|
|
140
|
-
ON CONFLICT (job_id, field) DO UPDATE
|
|
141
|
-
SET value = EXCLUDED.value
|
|
142
|
-
RETURNING 1
|
|
143
|
-
)
|
|
144
|
-
SELECT new_value FROM inserted_job
|
|
145
|
-
`;
|
|
146
|
-
params.push(key, fields['@context'], replayId, this._deriveType(replayId));
|
|
147
|
-
}
|
|
148
|
-
else {
|
|
149
|
-
sql = `
|
|
150
|
-
INSERT INTO ${tableName} (id, key, context)
|
|
151
|
-
SELECT gen_random_uuid(), $1, $2::jsonb
|
|
152
|
-
WHERE NOT EXISTS (
|
|
153
|
-
SELECT 1 FROM ${tableName}
|
|
154
|
-
WHERE key = $1 AND is_live
|
|
155
|
-
)
|
|
156
|
-
RETURNING context::text as new_value
|
|
157
|
-
`;
|
|
158
|
-
params.push(key, fields['@context']);
|
|
159
|
-
}
|
|
160
|
-
}
|
|
161
|
-
else {
|
|
162
|
-
if (replayId) {
|
|
163
|
-
sql = `
|
|
164
|
-
WITH updated_job AS (
|
|
165
|
-
UPDATE ${tableName}
|
|
166
|
-
SET context = $2::jsonb
|
|
167
|
-
WHERE key = $1 AND is_live
|
|
168
|
-
RETURNING id, context::text as new_value
|
|
169
|
-
),
|
|
170
|
-
replay_insert AS (
|
|
171
|
-
INSERT INTO ${tableName}_attributes (job_id, field, value, type)
|
|
172
|
-
SELECT id, $3, new_value, $4
|
|
173
|
-
FROM updated_job
|
|
174
|
-
ON CONFLICT (job_id, field) DO UPDATE
|
|
175
|
-
SET value = EXCLUDED.value
|
|
176
|
-
RETURNING 1
|
|
177
|
-
)
|
|
178
|
-
SELECT new_value FROM updated_job
|
|
179
|
-
`;
|
|
180
|
-
params.push(key, fields['@context'], replayId, this._deriveType(replayId));
|
|
181
|
-
}
|
|
182
|
-
else {
|
|
183
|
-
sql = `
|
|
184
|
-
UPDATE ${tableName}
|
|
185
|
-
SET context = $2::jsonb
|
|
186
|
-
WHERE key = $1 AND is_live
|
|
187
|
-
RETURNING context::text as new_value
|
|
188
|
-
`;
|
|
189
|
-
params.push(key, fields['@context']);
|
|
190
|
-
}
|
|
191
|
-
}
|
|
192
|
-
}
|
|
193
|
-
else if (isJobsTable && '@context:merge' in fields) {
|
|
194
|
-
// Handle JSONB context merge - deep merge operation
|
|
195
|
-
const replayId = Object.keys(fields).find(k => k.includes('-') && k !== '@context:merge');
|
|
196
|
-
if (options?.nx) {
|
|
197
|
-
sql = `
|
|
198
|
-
INSERT INTO ${tableName} (id, key, context)
|
|
199
|
-
SELECT gen_random_uuid(), $1, $2::jsonb
|
|
200
|
-
WHERE NOT EXISTS (
|
|
201
|
-
SELECT 1 FROM ${tableName}
|
|
202
|
-
WHERE key = $1 AND is_live
|
|
203
|
-
)
|
|
204
|
-
RETURNING context::text as new_value
|
|
205
|
-
`;
|
|
206
|
-
params.push(key, fields['@context:merge']);
|
|
207
|
-
}
|
|
208
|
-
else {
|
|
209
|
-
if (replayId) {
|
|
210
|
-
// Store replay value and update context in one transaction with deep merge
|
|
211
|
-
sql = `
|
|
212
|
-
WITH updated_job AS (
|
|
213
|
-
UPDATE ${tableName}
|
|
214
|
-
SET context = (
|
|
215
|
-
WITH RECURSIVE deep_merge(original, new_data, result) AS (
|
|
216
|
-
-- Base case: start with the original and new data
|
|
217
|
-
SELECT
|
|
218
|
-
COALESCE(context, '{}'::jsonb) as original,
|
|
219
|
-
$2::jsonb as new_data,
|
|
220
|
-
COALESCE(context, '{}'::jsonb) as result
|
|
221
|
-
FROM ${tableName}
|
|
222
|
-
WHERE key = $1 AND is_live
|
|
223
|
-
),
|
|
224
|
-
merged_data AS (
|
|
225
|
-
SELECT
|
|
226
|
-
(
|
|
227
|
-
SELECT jsonb_object_agg(
|
|
228
|
-
key,
|
|
229
|
-
CASE
|
|
230
|
-
-- If both are objects, merge them recursively
|
|
231
|
-
WHEN jsonb_typeof(original -> key) = 'object' AND jsonb_typeof(new_data -> key) = 'object'
|
|
232
|
-
THEN (
|
|
233
|
-
WITH nested_keys AS (
|
|
234
|
-
SELECT unnest(ARRAY(SELECT jsonb_object_keys((original -> key) || (new_data -> key)))) as nested_key
|
|
235
|
-
)
|
|
236
|
-
SELECT jsonb_object_agg(
|
|
237
|
-
nested_key,
|
|
238
|
-
CASE
|
|
239
|
-
WHEN (new_data -> key) ? nested_key
|
|
240
|
-
THEN (new_data -> key) -> nested_key
|
|
241
|
-
ELSE (original -> key) -> nested_key
|
|
242
|
-
END
|
|
243
|
-
)
|
|
244
|
-
FROM nested_keys
|
|
245
|
-
)
|
|
246
|
-
-- If new data has this key, use new value
|
|
247
|
-
WHEN new_data ? key
|
|
248
|
-
THEN new_data -> key
|
|
249
|
-
-- Otherwise keep original value
|
|
250
|
-
ELSE original -> key
|
|
251
|
-
END
|
|
252
|
-
)
|
|
253
|
-
FROM (
|
|
254
|
-
SELECT unnest(ARRAY(SELECT jsonb_object_keys(original || new_data))) as key
|
|
255
|
-
) all_keys
|
|
256
|
-
) as merged_context
|
|
257
|
-
FROM deep_merge
|
|
258
|
-
)
|
|
259
|
-
SELECT merged_context FROM merged_data
|
|
260
|
-
)
|
|
261
|
-
WHERE key = $1 AND is_live
|
|
262
|
-
RETURNING id, context::text as new_value
|
|
263
|
-
),
|
|
264
|
-
replay_insert AS (
|
|
265
|
-
INSERT INTO ${tableName}_attributes (job_id, field, value, type)
|
|
266
|
-
SELECT id, $3, new_value, $4
|
|
267
|
-
FROM updated_job
|
|
268
|
-
ON CONFLICT (job_id, field) DO UPDATE
|
|
269
|
-
SET value = EXCLUDED.value
|
|
270
|
-
RETURNING 1
|
|
271
|
-
)
|
|
272
|
-
SELECT new_value FROM updated_job
|
|
273
|
-
`;
|
|
274
|
-
params.push(key, fields['@context:merge'], replayId, this._deriveType(replayId));
|
|
275
|
-
}
|
|
276
|
-
else {
|
|
277
|
-
sql = `
|
|
278
|
-
UPDATE ${tableName}
|
|
279
|
-
SET context = (
|
|
280
|
-
WITH merged_data AS (
|
|
281
|
-
SELECT
|
|
282
|
-
(
|
|
283
|
-
SELECT jsonb_object_agg(
|
|
284
|
-
key,
|
|
285
|
-
CASE
|
|
286
|
-
-- If both are objects, merge them recursively
|
|
287
|
-
WHEN jsonb_typeof(original -> key) = 'object' AND jsonb_typeof(new_data -> key) = 'object'
|
|
288
|
-
THEN (
|
|
289
|
-
WITH nested_keys AS (
|
|
290
|
-
SELECT unnest(ARRAY(SELECT jsonb_object_keys((original -> key) || (new_data -> key)))) as nested_key
|
|
291
|
-
)
|
|
292
|
-
SELECT jsonb_object_agg(
|
|
293
|
-
nested_key,
|
|
294
|
-
CASE
|
|
295
|
-
WHEN (new_data -> key) ? nested_key
|
|
296
|
-
THEN (new_data -> key) -> nested_key
|
|
297
|
-
ELSE (original -> key) -> nested_key
|
|
298
|
-
END
|
|
299
|
-
)
|
|
300
|
-
FROM nested_keys
|
|
301
|
-
)
|
|
302
|
-
-- If new data has this key, use new value
|
|
303
|
-
WHEN new_data ? key
|
|
304
|
-
THEN new_data -> key
|
|
305
|
-
-- Otherwise keep original value
|
|
306
|
-
ELSE original -> key
|
|
307
|
-
END
|
|
308
|
-
)
|
|
309
|
-
FROM (
|
|
310
|
-
SELECT unnest(ARRAY(SELECT jsonb_object_keys(original || new_data))) as key
|
|
311
|
-
) all_keys
|
|
312
|
-
) as merged_context
|
|
313
|
-
FROM (
|
|
314
|
-
SELECT
|
|
315
|
-
COALESCE(context, '{}'::jsonb) as original,
|
|
316
|
-
$2::jsonb as new_data
|
|
317
|
-
FROM ${tableName}
|
|
318
|
-
WHERE key = $1 AND is_live
|
|
319
|
-
) base_data
|
|
320
|
-
)
|
|
321
|
-
SELECT merged_context FROM merged_data
|
|
322
|
-
)
|
|
323
|
-
WHERE key = $1 AND is_live
|
|
324
|
-
RETURNING context::text as new_value
|
|
325
|
-
`;
|
|
326
|
-
params.push(key, fields['@context:merge']);
|
|
327
|
-
}
|
|
328
|
-
}
|
|
329
|
-
}
|
|
330
|
-
else if (isJobsTable && '@context:delete' in fields) {
|
|
331
|
-
// Handle JSONB context delete - remove path
|
|
332
|
-
const path = fields['@context:delete'];
|
|
333
|
-
const pathParts = path.split('.');
|
|
334
|
-
const replayId = Object.keys(fields).find(k => k.includes('-') && k !== '@context:delete');
|
|
335
|
-
if (pathParts.length === 1) {
|
|
336
|
-
// Simple key deletion
|
|
337
|
-
if (replayId) {
|
|
338
|
-
sql = `
|
|
339
|
-
WITH updated_job AS (
|
|
340
|
-
UPDATE ${tableName}
|
|
341
|
-
SET context = context - $2
|
|
342
|
-
WHERE key = $1 AND is_live
|
|
343
|
-
RETURNING id, context::text as new_value
|
|
344
|
-
),
|
|
345
|
-
replay_insert AS (
|
|
346
|
-
INSERT INTO ${tableName}_attributes (job_id, field, value, type)
|
|
347
|
-
SELECT id, $3, new_value, $4
|
|
348
|
-
FROM updated_job
|
|
349
|
-
ON CONFLICT (job_id, field) DO UPDATE
|
|
350
|
-
SET value = EXCLUDED.value
|
|
351
|
-
RETURNING 1
|
|
352
|
-
)
|
|
353
|
-
SELECT new_value FROM updated_job
|
|
354
|
-
`;
|
|
355
|
-
params.push(key, path, replayId, this._deriveType(replayId));
|
|
356
|
-
}
|
|
357
|
-
else {
|
|
358
|
-
sql = `
|
|
359
|
-
UPDATE ${tableName}
|
|
360
|
-
SET context = context - $2
|
|
361
|
-
WHERE key = $1 AND is_live
|
|
362
|
-
RETURNING context::text as new_value
|
|
363
|
-
`;
|
|
364
|
-
params.push(key, path);
|
|
365
|
-
}
|
|
366
|
-
}
|
|
367
|
-
else {
|
|
368
|
-
// Nested path deletion using jsonb_set with null to remove
|
|
369
|
-
if (replayId) {
|
|
370
|
-
sql = `
|
|
371
|
-
WITH updated_job AS (
|
|
372
|
-
UPDATE ${tableName}
|
|
373
|
-
SET context = context #- $2::text[]
|
|
374
|
-
WHERE key = $1 AND is_live
|
|
375
|
-
RETURNING id, context::text as new_value
|
|
376
|
-
),
|
|
377
|
-
replay_insert AS (
|
|
378
|
-
INSERT INTO ${tableName}_attributes (job_id, field, value, type)
|
|
379
|
-
SELECT id, $3, new_value, $4
|
|
380
|
-
FROM updated_job
|
|
381
|
-
ON CONFLICT (job_id, field) DO UPDATE
|
|
382
|
-
SET value = EXCLUDED.value
|
|
383
|
-
RETURNING 1
|
|
384
|
-
)
|
|
385
|
-
SELECT new_value FROM updated_job
|
|
386
|
-
`;
|
|
387
|
-
params.push(key, pathParts, replayId, this._deriveType(replayId));
|
|
388
|
-
}
|
|
389
|
-
else {
|
|
390
|
-
sql = `
|
|
391
|
-
UPDATE ${tableName}
|
|
392
|
-
SET context = context #- $2::text[]
|
|
393
|
-
WHERE key = $1 AND is_live
|
|
394
|
-
RETURNING context::text as new_value
|
|
395
|
-
`;
|
|
396
|
-
params.push(key, pathParts);
|
|
397
|
-
}
|
|
398
|
-
}
|
|
399
|
-
}
|
|
400
|
-
else if (isJobsTable && '@context:append' in fields) {
|
|
401
|
-
// Handle JSONB array append
|
|
402
|
-
const { path, value } = JSON.parse(fields['@context:append']);
|
|
403
|
-
const pathParts = path.split('.');
|
|
404
|
-
const replayId = Object.keys(fields).find(k => k.includes('-') && k !== '@context:append');
|
|
405
|
-
if (replayId) {
|
|
406
|
-
sql = `
|
|
407
|
-
WITH updated_job AS (
|
|
408
|
-
UPDATE ${tableName}
|
|
409
|
-
SET context = jsonb_set(
|
|
410
|
-
COALESCE(context, '{}'::jsonb),
|
|
411
|
-
$2::text[],
|
|
412
|
-
COALESCE(context #> $2::text[], '[]'::jsonb) || $3::jsonb,
|
|
413
|
-
true
|
|
414
|
-
)
|
|
415
|
-
WHERE key = $1 AND is_live
|
|
416
|
-
RETURNING id, (context #> $2::text[])::text as new_value
|
|
417
|
-
),
|
|
418
|
-
replay_insert AS (
|
|
419
|
-
INSERT INTO ${tableName}_attributes (job_id, field, value, type)
|
|
420
|
-
SELECT id, $4, new_value, $5
|
|
421
|
-
FROM updated_job
|
|
422
|
-
ON CONFLICT (job_id, field) DO UPDATE
|
|
423
|
-
SET value = EXCLUDED.value
|
|
424
|
-
RETURNING 1
|
|
425
|
-
)
|
|
426
|
-
SELECT new_value FROM updated_job
|
|
427
|
-
`;
|
|
428
|
-
params.push(key, pathParts, JSON.stringify([value]), replayId, this._deriveType(replayId));
|
|
429
|
-
}
|
|
430
|
-
else {
|
|
431
|
-
sql = `
|
|
432
|
-
UPDATE ${tableName}
|
|
433
|
-
SET context = jsonb_set(
|
|
434
|
-
COALESCE(context, '{}'::jsonb),
|
|
435
|
-
$2::text[],
|
|
436
|
-
COALESCE(context #> $2::text[], '[]'::jsonb) || $3::jsonb,
|
|
437
|
-
true
|
|
438
|
-
)
|
|
439
|
-
WHERE key = $1 AND is_live
|
|
440
|
-
RETURNING (context #> $2::text[])::text as new_value
|
|
441
|
-
`;
|
|
442
|
-
params.push(key, pathParts, JSON.stringify([value]));
|
|
443
|
-
}
|
|
444
|
-
}
|
|
445
|
-
else if (isJobsTable && '@context:prepend' in fields) {
|
|
446
|
-
// Handle JSONB array prepend
|
|
447
|
-
const { path, value } = JSON.parse(fields['@context:prepend']);
|
|
448
|
-
const pathParts = path.split('.');
|
|
449
|
-
const replayId = Object.keys(fields).find(k => k.includes('-') && k !== '@context:prepend');
|
|
450
|
-
if (replayId) {
|
|
451
|
-
sql = `
|
|
452
|
-
WITH updated_job AS (
|
|
453
|
-
UPDATE ${tableName}
|
|
454
|
-
SET context = jsonb_set(
|
|
455
|
-
COALESCE(context, '{}'::jsonb),
|
|
456
|
-
$2::text[],
|
|
457
|
-
$3::jsonb || COALESCE(context #> $2::text[], '[]'::jsonb),
|
|
458
|
-
true
|
|
459
|
-
)
|
|
460
|
-
WHERE key = $1 AND is_live
|
|
461
|
-
RETURNING id, (context #> $2::text[])::text as new_value
|
|
462
|
-
),
|
|
463
|
-
replay_insert AS (
|
|
464
|
-
INSERT INTO ${tableName}_attributes (job_id, field, value, type)
|
|
465
|
-
SELECT id, $4, new_value, $5
|
|
466
|
-
FROM updated_job
|
|
467
|
-
ON CONFLICT (job_id, field) DO UPDATE
|
|
468
|
-
SET value = EXCLUDED.value
|
|
469
|
-
RETURNING 1
|
|
470
|
-
)
|
|
471
|
-
SELECT new_value FROM updated_job
|
|
472
|
-
`;
|
|
473
|
-
params.push(key, pathParts, JSON.stringify([value]), replayId, this._deriveType(replayId));
|
|
474
|
-
}
|
|
475
|
-
else {
|
|
476
|
-
sql = `
|
|
477
|
-
UPDATE ${tableName}
|
|
478
|
-
SET context = jsonb_set(
|
|
479
|
-
COALESCE(context, '{}'::jsonb),
|
|
480
|
-
$2::text[],
|
|
481
|
-
$3::jsonb || COALESCE(context #> $2::text[], '[]'::jsonb),
|
|
482
|
-
true
|
|
483
|
-
)
|
|
484
|
-
WHERE key = $1 AND is_live
|
|
485
|
-
RETURNING (context #> $2::text[])::text as new_value
|
|
486
|
-
`;
|
|
487
|
-
params.push(key, pathParts, JSON.stringify([value]));
|
|
488
|
-
}
|
|
489
|
-
}
|
|
490
|
-
else if (isJobsTable && '@context:remove' in fields) {
|
|
491
|
-
// Handle JSONB array remove by index
|
|
492
|
-
const { path, index } = JSON.parse(fields['@context:remove']);
|
|
493
|
-
const pathParts = path.split('.');
|
|
494
|
-
const replayId = Object.keys(fields).find(k => k.includes('-') && k !== '@context:remove');
|
|
495
|
-
if (replayId) {
|
|
496
|
-
sql = `
|
|
497
|
-
WITH updated_job AS (
|
|
498
|
-
UPDATE ${tableName}
|
|
499
|
-
SET context = jsonb_set(
|
|
500
|
-
COALESCE(context, '{}'::jsonb),
|
|
501
|
-
$2::text[],
|
|
502
|
-
(
|
|
503
|
-
SELECT jsonb_agg(value)
|
|
504
|
-
FROM (
|
|
505
|
-
SELECT value, row_number() OVER () - 1 as idx
|
|
506
|
-
FROM jsonb_array_elements(COALESCE(context #> $2::text[], '[]'::jsonb))
|
|
507
|
-
) t
|
|
508
|
-
WHERE idx != $3
|
|
509
|
-
),
|
|
510
|
-
true
|
|
511
|
-
)
|
|
512
|
-
WHERE key = $1 AND is_live
|
|
513
|
-
RETURNING id, (context #> $2::text[])::text as new_value
|
|
514
|
-
),
|
|
515
|
-
replay_insert AS (
|
|
516
|
-
INSERT INTO ${tableName}_attributes (job_id, field, value, type)
|
|
517
|
-
SELECT id, $4, new_value, $5
|
|
518
|
-
FROM updated_job
|
|
519
|
-
ON CONFLICT (job_id, field) DO UPDATE
|
|
520
|
-
SET value = EXCLUDED.value
|
|
521
|
-
RETURNING 1
|
|
522
|
-
)
|
|
523
|
-
SELECT new_value FROM updated_job
|
|
524
|
-
`;
|
|
525
|
-
params.push(key, pathParts, index, replayId, this._deriveType(replayId));
|
|
526
|
-
}
|
|
527
|
-
else {
|
|
528
|
-
sql = `
|
|
529
|
-
UPDATE ${tableName}
|
|
530
|
-
SET context = jsonb_set(
|
|
531
|
-
COALESCE(context, '{}'::jsonb),
|
|
532
|
-
$2::text[],
|
|
533
|
-
(
|
|
534
|
-
SELECT jsonb_agg(value)
|
|
535
|
-
FROM (
|
|
536
|
-
SELECT value, row_number() OVER () - 1 as idx
|
|
537
|
-
FROM jsonb_array_elements(COALESCE(context #> $2::text[], '[]'::jsonb))
|
|
538
|
-
) t
|
|
539
|
-
WHERE idx != $3
|
|
540
|
-
),
|
|
541
|
-
true
|
|
542
|
-
)
|
|
543
|
-
WHERE key = $1 AND is_live
|
|
544
|
-
RETURNING (context #> $2::text[])::text as new_value
|
|
545
|
-
`;
|
|
546
|
-
params.push(key, pathParts, index);
|
|
547
|
-
}
|
|
548
|
-
}
|
|
549
|
-
else if (isJobsTable && '@context:increment' in fields) {
|
|
550
|
-
// Handle JSONB numeric increment
|
|
551
|
-
const { path, value } = JSON.parse(fields['@context:increment']);
|
|
552
|
-
const pathParts = path.split('.');
|
|
553
|
-
const replayId = Object.keys(fields).find(k => k.includes('-') && k !== '@context:increment');
|
|
554
|
-
if (replayId) {
|
|
555
|
-
sql = `
|
|
556
|
-
WITH updated_job AS (
|
|
557
|
-
UPDATE ${tableName}
|
|
558
|
-
SET context = jsonb_set(
|
|
559
|
-
COALESCE(context, '{}'::jsonb),
|
|
560
|
-
$2::text[],
|
|
561
|
-
to_jsonb((COALESCE((context #> $2::text[])::text::numeric, 0) + $3)::numeric),
|
|
562
|
-
true
|
|
563
|
-
)
|
|
564
|
-
WHERE key = $1 AND is_live
|
|
565
|
-
RETURNING id, (context #> $2::text[])::text as new_value
|
|
566
|
-
),
|
|
567
|
-
replay_insert AS (
|
|
568
|
-
INSERT INTO ${tableName}_attributes (job_id, field, value, type)
|
|
569
|
-
SELECT id, $4, new_value, $5
|
|
570
|
-
FROM updated_job
|
|
571
|
-
ON CONFLICT (job_id, field) DO UPDATE
|
|
572
|
-
SET value = EXCLUDED.value
|
|
573
|
-
RETURNING 1
|
|
574
|
-
)
|
|
575
|
-
SELECT new_value FROM updated_job
|
|
576
|
-
`;
|
|
577
|
-
params.push(key, pathParts, value, replayId, this._deriveType(replayId));
|
|
578
|
-
}
|
|
579
|
-
else {
|
|
580
|
-
sql = `
|
|
581
|
-
UPDATE ${tableName}
|
|
582
|
-
SET context = jsonb_set(
|
|
583
|
-
COALESCE(context, '{}'::jsonb),
|
|
584
|
-
$2::text[],
|
|
585
|
-
to_jsonb((COALESCE((context #> $2::text[])::text::numeric, 0) + $3)::numeric),
|
|
586
|
-
true
|
|
587
|
-
)
|
|
588
|
-
WHERE key = $1 AND is_live
|
|
589
|
-
RETURNING (context #> $2::text[])::text as new_value
|
|
590
|
-
`;
|
|
591
|
-
params.push(key, pathParts, value);
|
|
592
|
-
}
|
|
593
|
-
}
|
|
594
|
-
else if (isJobsTable && '@context:toggle' in fields) {
|
|
595
|
-
// Handle JSONB boolean toggle
|
|
596
|
-
const path = fields['@context:toggle'];
|
|
597
|
-
const pathParts = path.split('.');
|
|
598
|
-
const replayId = Object.keys(fields).find(k => k.includes('-') && k !== '@context:toggle');
|
|
599
|
-
if (replayId) {
|
|
600
|
-
sql = `
|
|
601
|
-
WITH updated_job AS (
|
|
602
|
-
UPDATE ${tableName}
|
|
603
|
-
SET context = jsonb_set(
|
|
604
|
-
COALESCE(context, '{}'::jsonb),
|
|
605
|
-
$2::text[],
|
|
606
|
-
to_jsonb(NOT COALESCE((context #> $2::text[])::text::boolean, false)),
|
|
607
|
-
true
|
|
608
|
-
)
|
|
609
|
-
WHERE key = $1 AND is_live
|
|
610
|
-
RETURNING id, (context #> $2::text[])::text as new_value
|
|
611
|
-
),
|
|
612
|
-
replay_insert AS (
|
|
613
|
-
INSERT INTO ${tableName}_attributes (job_id, field, value, type)
|
|
614
|
-
SELECT id, $3, new_value, $4
|
|
615
|
-
FROM updated_job
|
|
616
|
-
ON CONFLICT (job_id, field) DO UPDATE
|
|
617
|
-
SET value = EXCLUDED.value
|
|
618
|
-
RETURNING 1
|
|
619
|
-
)
|
|
620
|
-
SELECT new_value FROM updated_job
|
|
621
|
-
`;
|
|
622
|
-
params.push(key, pathParts, replayId, this._deriveType(replayId));
|
|
623
|
-
}
|
|
624
|
-
else {
|
|
625
|
-
sql = `
|
|
626
|
-
UPDATE ${tableName}
|
|
627
|
-
SET context = jsonb_set(
|
|
628
|
-
COALESCE(context, '{}'::jsonb),
|
|
629
|
-
$2::text[],
|
|
630
|
-
to_jsonb(NOT COALESCE((context #> $2::text[])::text::boolean, false)),
|
|
631
|
-
true
|
|
632
|
-
)
|
|
633
|
-
WHERE key = $1 AND is_live
|
|
634
|
-
RETURNING (context #> $2::text[])::text as new_value
|
|
635
|
-
`;
|
|
636
|
-
params.push(key, pathParts);
|
|
637
|
-
}
|
|
638
|
-
}
|
|
639
|
-
else if (isJobsTable && '@context:setIfNotExists' in fields) {
|
|
640
|
-
// Handle JSONB conditional set
|
|
641
|
-
const { path, value } = JSON.parse(fields['@context:setIfNotExists']);
|
|
642
|
-
const pathParts = path.split('.');
|
|
643
|
-
const replayId = Object.keys(fields).find(k => k.includes('-') && k !== '@context:setIfNotExists');
|
|
644
|
-
if (replayId) {
|
|
645
|
-
sql = `
|
|
646
|
-
WITH updated_job AS (
|
|
647
|
-
UPDATE ${tableName}
|
|
648
|
-
SET context = CASE
|
|
649
|
-
WHEN context #> $2::text[] IS NULL THEN
|
|
650
|
-
jsonb_set(COALESCE(context, '{}'::jsonb), $2::text[], $3::jsonb, true)
|
|
651
|
-
ELSE context
|
|
652
|
-
END
|
|
653
|
-
WHERE key = $1 AND is_live
|
|
654
|
-
RETURNING id, (context #> $2::text[])::text as new_value
|
|
655
|
-
),
|
|
656
|
-
replay_insert AS (
|
|
657
|
-
INSERT INTO ${tableName}_attributes (job_id, field, value, type)
|
|
658
|
-
SELECT id, $4, new_value, $5
|
|
659
|
-
FROM updated_job
|
|
660
|
-
ON CONFLICT (job_id, field) DO UPDATE
|
|
661
|
-
SET value = EXCLUDED.value
|
|
662
|
-
RETURNING 1
|
|
663
|
-
)
|
|
664
|
-
SELECT new_value FROM updated_job
|
|
665
|
-
`;
|
|
666
|
-
params.push(key, pathParts, JSON.stringify(value), replayId, this._deriveType(replayId));
|
|
667
|
-
}
|
|
668
|
-
else {
|
|
669
|
-
sql = `
|
|
670
|
-
UPDATE ${tableName}
|
|
671
|
-
SET context = CASE
|
|
672
|
-
WHEN context #> $2::text[] IS NULL THEN
|
|
673
|
-
jsonb_set(COALESCE(context, '{}'::jsonb), $2::text[], $3::jsonb, true)
|
|
674
|
-
ELSE context
|
|
675
|
-
END
|
|
676
|
-
WHERE key = $1 AND is_live
|
|
677
|
-
RETURNING (context #> $2::text[])::text as new_value
|
|
678
|
-
`;
|
|
679
|
-
params.push(key, pathParts, JSON.stringify(value));
|
|
680
|
-
}
|
|
681
|
-
}
|
|
682
|
-
else if (isJobsTable && Object.keys(fields).some(k => k.startsWith('@context:get:'))) {
|
|
683
|
-
// Handle JSONB path extraction for get operations
|
|
684
|
-
const getField = Object.keys(fields).find(k => k.startsWith('@context:get:'));
|
|
685
|
-
const pathKey = getField.replace('@context:get:', '');
|
|
686
|
-
const pathParts = JSON.parse(fields[getField]);
|
|
687
|
-
// Extract the specific path and store it as a temporary field
|
|
688
|
-
sql = `
|
|
689
|
-
INSERT INTO ${tableName}_attributes (job_id, field, value, type)
|
|
690
|
-
SELECT
|
|
691
|
-
job.id,
|
|
692
|
-
$2,
|
|
693
|
-
COALESCE((job.context #> $3::text[])::text, 'null'),
|
|
694
|
-
$4
|
|
695
|
-
FROM (
|
|
696
|
-
SELECT id, context FROM ${tableName} WHERE key = $1 AND is_live
|
|
697
|
-
) AS job
|
|
698
|
-
ON CONFLICT (job_id, field) DO UPDATE
|
|
699
|
-
SET value = COALESCE((
|
|
700
|
-
SELECT context #> $3::text[]
|
|
701
|
-
FROM ${tableName}
|
|
702
|
-
WHERE key = $1 AND is_live
|
|
703
|
-
)::text, 'null')
|
|
704
|
-
RETURNING 1 as count
|
|
705
|
-
`;
|
|
706
|
-
params.push(key, getField, pathParts, this._deriveType(getField));
|
|
707
|
-
}
|
|
708
|
-
else if (isJobsTable && '@context:get' in fields) {
|
|
709
|
-
// Handle JSONB context get operation with replay storage
|
|
710
|
-
const path = fields['@context:get'];
|
|
711
|
-
const replayId = Object.keys(fields).find(k => k.includes('-') && k !== '@context:get');
|
|
712
|
-
if (path === '') {
|
|
713
|
-
// Get entire context
|
|
714
|
-
if (replayId) {
|
|
715
|
-
sql = `
|
|
716
|
-
WITH job_data AS (
|
|
717
|
-
SELECT id, context::text as context_value
|
|
718
|
-
FROM ${tableName}
|
|
719
|
-
WHERE key = $1 AND is_live
|
|
720
|
-
),
|
|
721
|
-
replay_insert AS (
|
|
722
|
-
INSERT INTO ${tableName}_attributes (job_id, field, value, type)
|
|
723
|
-
SELECT id, $2, context_value, $3
|
|
724
|
-
FROM job_data
|
|
725
|
-
WHERE id IS NOT NULL
|
|
726
|
-
ON CONFLICT (job_id, field) DO UPDATE
|
|
727
|
-
SET value = EXCLUDED.value
|
|
728
|
-
RETURNING 1
|
|
729
|
-
)
|
|
730
|
-
SELECT context_value as new_value FROM job_data
|
|
731
|
-
`;
|
|
732
|
-
params.push(key, replayId, this._deriveType(replayId));
|
|
733
|
-
}
|
|
734
|
-
else {
|
|
735
|
-
sql = `
|
|
736
|
-
SELECT context::text as new_value
|
|
737
|
-
FROM ${tableName}
|
|
738
|
-
WHERE key = $1 AND is_live
|
|
739
|
-
`;
|
|
740
|
-
params.push(key);
|
|
741
|
-
}
|
|
742
|
-
}
|
|
743
|
-
else {
|
|
744
|
-
// Get specific path
|
|
745
|
-
const pathParts = path.split('.');
|
|
746
|
-
if (replayId) {
|
|
747
|
-
sql = `
|
|
748
|
-
WITH job_data AS (
|
|
749
|
-
SELECT id, COALESCE((context #> $2::text[])::text, 'null') as path_value
|
|
750
|
-
FROM ${tableName}
|
|
751
|
-
WHERE key = $1 AND is_live
|
|
752
|
-
),
|
|
753
|
-
replay_insert AS (
|
|
754
|
-
INSERT INTO ${tableName}_attributes (job_id, field, value, type)
|
|
755
|
-
SELECT id, $3, path_value, $4
|
|
756
|
-
FROM job_data
|
|
757
|
-
WHERE id IS NOT NULL
|
|
758
|
-
ON CONFLICT (job_id, field) DO UPDATE
|
|
759
|
-
SET value = EXCLUDED.value
|
|
760
|
-
RETURNING 1
|
|
761
|
-
)
|
|
762
|
-
SELECT path_value as new_value FROM job_data
|
|
763
|
-
`;
|
|
764
|
-
params.push(key, pathParts, replayId, this._deriveType(replayId));
|
|
765
|
-
}
|
|
766
|
-
else {
|
|
767
|
-
sql = `
|
|
768
|
-
SELECT COALESCE((context #> $2::text[])::text, 'null') as new_value
|
|
769
|
-
FROM ${tableName}
|
|
770
|
-
WHERE key = $1 AND is_live
|
|
771
|
-
`;
|
|
772
|
-
params.push(key, pathParts);
|
|
773
|
-
}
|
|
774
|
-
}
|
|
775
|
-
}
|
|
776
|
-
else if (isJobsTable) {
|
|
777
|
-
const schemaName = context.safeName(context.appId);
|
|
778
|
-
const conflictAction = options?.nx
|
|
779
|
-
? 'ON CONFLICT DO NOTHING'
|
|
780
|
-
: `ON CONFLICT (job_id, field) DO UPDATE SET value = EXCLUDED.value`;
|
|
781
|
-
const placeholders = fieldEntries
|
|
782
|
-
.map(([field, value], index) => {
|
|
783
|
-
const baseIndex = index * 3 + 2; // Adjusted baseIndex
|
|
784
|
-
params.push(field, value, this._deriveType(field));
|
|
785
|
-
return `($${baseIndex}, $${baseIndex + 1}, $${baseIndex + 2}::${schemaName}.type_enum)`;
|
|
786
|
-
})
|
|
787
|
-
.join(', ');
|
|
788
|
-
sql = `
|
|
789
|
-
INSERT INTO ${targetTable} (job_id, field, value, type)
|
|
790
|
-
SELECT
|
|
791
|
-
job.id,
|
|
792
|
-
vals.field,
|
|
793
|
-
vals.value,
|
|
794
|
-
vals.type
|
|
795
|
-
FROM (
|
|
796
|
-
SELECT id FROM ${tableName} WHERE key = $1 AND is_live
|
|
797
|
-
) AS job
|
|
798
|
-
CROSS JOIN (
|
|
799
|
-
VALUES ${placeholders}
|
|
800
|
-
) AS vals(field, value, type)
|
|
801
|
-
${conflictAction}
|
|
802
|
-
RETURNING 1 as count
|
|
803
|
-
`;
|
|
804
|
-
params.unshift(key); // Add key as first parameter
|
|
805
|
-
}
|
|
806
|
-
else {
|
|
807
|
-
// For non-jobs tables
|
|
808
|
-
const conflictAction = options?.nx
|
|
809
|
-
? 'ON CONFLICT DO NOTHING'
|
|
810
|
-
: `ON CONFLICT (key, field) DO UPDATE SET value = EXCLUDED.value`;
|
|
811
|
-
const placeholders = fieldEntries
|
|
812
|
-
.map(([field, value], index) => {
|
|
813
|
-
params.push(field, value);
|
|
814
|
-
return `($1, $${index * 2 + 2}, $${index * 2 + 3})`;
|
|
815
|
-
})
|
|
816
|
-
.join(', ');
|
|
817
|
-
sql = `
|
|
818
|
-
INSERT INTO ${targetTable} (key, field, value)
|
|
819
|
-
VALUES ${placeholders}
|
|
820
|
-
${conflictAction}
|
|
821
|
-
RETURNING 1 as count
|
|
822
|
-
`;
|
|
823
|
-
params.unshift(key); // Add key as the first parameter
|
|
824
|
-
}
|
|
825
|
-
return { sql, params };
|
|
826
|
-
},
|
|
827
|
-
async hget(key, field, multi) {
|
|
828
|
-
const { sql, params } = this._hget(key, field);
|
|
829
|
-
if (multi) {
|
|
830
|
-
multi.addCommand(sql, params, 'string', (rows) => {
|
|
831
|
-
return rows[0]?.value || null;
|
|
832
|
-
});
|
|
833
|
-
return Promise.resolve(null);
|
|
834
|
-
}
|
|
835
|
-
else {
|
|
836
|
-
const res = await context.pgClient.query(sql, params);
|
|
837
|
-
return res.rows[0]?.value || null;
|
|
838
|
-
}
|
|
839
|
-
},
|
|
840
|
-
_hget(key, field) {
|
|
841
|
-
const tableName = context.tableForKey(key, 'hash');
|
|
842
|
-
const isJobsTable = this.isJobsTable(tableName);
|
|
843
|
-
const isStatusField = field === ':';
|
|
844
|
-
const isContextField = field === '@';
|
|
845
|
-
if (isJobsTable && isStatusField) {
|
|
846
|
-
// Fetch status from jobs table
|
|
847
|
-
const sql = `
|
|
848
|
-
SELECT status::text AS value
|
|
849
|
-
FROM ${tableName}
|
|
850
|
-
WHERE key = $1 AND is_live
|
|
851
|
-
`;
|
|
852
|
-
return { sql, params: [key] };
|
|
853
|
-
}
|
|
854
|
-
else if (isJobsTable && isContextField) {
|
|
855
|
-
// Fetch context from jobs table
|
|
856
|
-
const sql = `
|
|
857
|
-
SELECT context::text AS value
|
|
858
|
-
FROM ${tableName}
|
|
859
|
-
WHERE key = $1 AND is_live
|
|
860
|
-
`;
|
|
861
|
-
return { sql, params: [key] };
|
|
862
|
-
}
|
|
863
|
-
else if (isJobsTable) {
|
|
864
|
-
// Fetch a specific field from the attributes table for a job
|
|
865
|
-
const sql = `
|
|
866
|
-
SELECT value
|
|
867
|
-
FROM ${tableName}_attributes
|
|
868
|
-
WHERE job_id = (
|
|
869
|
-
SELECT id FROM ${tableName}
|
|
870
|
-
WHERE key = $1 AND is_live
|
|
871
|
-
)
|
|
872
|
-
AND field = $2
|
|
873
|
-
`;
|
|
874
|
-
return { sql, params: [key, field] };
|
|
875
|
-
}
|
|
876
|
-
else {
|
|
877
|
-
// Non-jobs tables
|
|
878
|
-
const baseQuery = `
|
|
879
|
-
SELECT value
|
|
880
|
-
FROM ${tableName}
|
|
881
|
-
WHERE key = $1 AND field = $2
|
|
882
|
-
`;
|
|
883
|
-
const sql = context.appendExpiryClause(baseQuery, tableName);
|
|
884
|
-
return { sql, params: [key, field] };
|
|
885
|
-
}
|
|
886
|
-
},
|
|
887
|
-
async hdel(key, fields, multi) {
|
|
888
|
-
// Ensure fields is an array
|
|
889
|
-
if (!Array.isArray(fields)) {
|
|
890
|
-
fields = [fields];
|
|
891
|
-
}
|
|
892
|
-
const { sql, params } = this._hdel(key, fields);
|
|
893
|
-
if (multi) {
|
|
894
|
-
multi.addCommand(sql, params, 'number');
|
|
895
|
-
return Promise.resolve(0);
|
|
896
|
-
}
|
|
897
|
-
else {
|
|
898
|
-
const res = await context.pgClient.query(sql, params);
|
|
899
|
-
return Number(res.rows[0]?.count || 0);
|
|
900
|
-
}
|
|
901
|
-
},
|
|
902
|
-
_hdel(key, fields) {
|
|
903
|
-
const tableName = context.tableForKey(key, 'hash');
|
|
904
|
-
const isJobsTable = this.isJobsTable(tableName);
|
|
905
|
-
const targetTable = isJobsTable ? `${tableName}_attributes` : tableName;
|
|
906
|
-
const fieldPlaceholders = fields.map((_, i) => `$${i + 2}`).join(', ');
|
|
907
|
-
const params = [key, ...fields];
|
|
908
|
-
if (isJobsTable) {
|
|
909
|
-
const sql = `
|
|
910
|
-
WITH valid_job AS (
|
|
911
|
-
SELECT id
|
|
912
|
-
FROM ${tableName}
|
|
913
|
-
WHERE key = $1 AND is_live
|
|
914
|
-
),
|
|
915
|
-
deleted AS (
|
|
916
|
-
DELETE FROM ${targetTable}
|
|
917
|
-
WHERE job_id IN (SELECT id FROM valid_job) AND field IN (${fieldPlaceholders})
|
|
918
|
-
RETURNING 1
|
|
919
|
-
)
|
|
920
|
-
SELECT COUNT(*) as count FROM deleted
|
|
921
|
-
`;
|
|
922
|
-
return { sql, params };
|
|
923
|
-
}
|
|
924
|
-
else {
|
|
925
|
-
const sql = `
|
|
926
|
-
WITH deleted AS (
|
|
927
|
-
DELETE FROM ${targetTable}
|
|
928
|
-
WHERE key = $1 AND field IN (${fieldPlaceholders})
|
|
929
|
-
RETURNING 1
|
|
930
|
-
)
|
|
931
|
-
SELECT COUNT(*) as count FROM deleted
|
|
932
|
-
`;
|
|
933
|
-
return { sql, params };
|
|
934
|
-
}
|
|
935
|
-
},
|
|
936
|
-
async hmget(key, fields, multi) {
|
|
937
|
-
const { sql, params } = this._hmget(key, fields);
|
|
938
|
-
const processRows = (rows) => {
|
|
939
|
-
let statusValue = null;
|
|
940
|
-
let contextValue = null;
|
|
941
|
-
const fieldValueMap = new Map();
|
|
942
|
-
for (const row of rows) {
|
|
943
|
-
if (row.field === 'status') {
|
|
944
|
-
statusValue = row.value;
|
|
945
|
-
fieldValueMap.set(':', row.value); // Map status to ':'
|
|
946
|
-
}
|
|
947
|
-
else if (row.field === 'context') {
|
|
948
|
-
contextValue = row.value;
|
|
949
|
-
fieldValueMap.set('@', row.value); // Map context to '@'
|
|
950
|
-
}
|
|
951
|
-
else if (row.field !== ':' && row.field !== '@') {
|
|
952
|
-
// Ignore old format fields
|
|
953
|
-
fieldValueMap.set(row.field, row.value);
|
|
954
|
-
}
|
|
955
|
-
}
|
|
956
|
-
// Ensure ':' and '@' are present in the map with their values
|
|
957
|
-
if (statusValue !== null) {
|
|
958
|
-
fieldValueMap.set(':', statusValue);
|
|
959
|
-
}
|
|
960
|
-
if (contextValue !== null) {
|
|
961
|
-
fieldValueMap.set('@', contextValue);
|
|
962
|
-
}
|
|
963
|
-
// Map requested fields to their values, or null if not present
|
|
964
|
-
return fields.map((field) => fieldValueMap.get(field) || null);
|
|
965
|
-
};
|
|
966
|
-
if (multi) {
|
|
967
|
-
multi.addCommand(sql, params, 'array', (rows) => {
|
|
968
|
-
return processRows(rows);
|
|
969
|
-
});
|
|
970
|
-
return Promise.resolve([]);
|
|
971
|
-
}
|
|
972
|
-
else {
|
|
973
|
-
try {
|
|
974
|
-
const res = await context.pgClient.query(sql, params);
|
|
975
|
-
return processRows(res.rows);
|
|
976
|
-
}
|
|
977
|
-
catch (err) {
|
|
978
|
-
console.error('hmget error', err, sql, params);
|
|
979
|
-
throw err;
|
|
980
|
-
}
|
|
981
|
-
}
|
|
982
|
-
},
|
|
983
|
-
_hmget(key, fields) {
|
|
984
|
-
const tableName = context.tableForKey(key, 'hash');
|
|
985
|
-
const isJobsTable = this.isJobsTable(tableName);
|
|
986
|
-
if (isJobsTable) {
|
|
987
|
-
const sql = `
|
|
988
|
-
WITH valid_job AS (
|
|
989
|
-
SELECT id, status, context
|
|
990
|
-
FROM ${tableName}
|
|
991
|
-
WHERE key = $1
|
|
992
|
-
AND (expired_at IS NULL OR expired_at > NOW())
|
|
993
|
-
LIMIT 1
|
|
994
|
-
),
|
|
995
|
-
job_fields AS (
|
|
996
|
-
-- Include both status and context fields from jobs table
|
|
997
|
-
SELECT
|
|
998
|
-
'status' AS field,
|
|
999
|
-
status::text AS value
|
|
1000
|
-
FROM valid_job
|
|
1001
|
-
|
|
1002
|
-
UNION ALL
|
|
1003
|
-
|
|
1004
|
-
SELECT
|
|
1005
|
-
'context' AS field,
|
|
1006
|
-
context::text AS value
|
|
1007
|
-
FROM valid_job
|
|
1008
|
-
|
|
1009
|
-
UNION ALL
|
|
1010
|
-
|
|
1011
|
-
-- Get attribute fields with proper type handling
|
|
1012
|
-
SELECT
|
|
1013
|
-
a.field,
|
|
1014
|
-
a.value
|
|
1015
|
-
FROM ${tableName}_attributes a
|
|
1016
|
-
JOIN valid_job j ON j.id = a.job_id
|
|
1017
|
-
WHERE a.field = ANY($2::text[])
|
|
1018
|
-
)
|
|
1019
|
-
SELECT field, value
|
|
1020
|
-
FROM job_fields
|
|
1021
|
-
ORDER BY field
|
|
1022
|
-
`;
|
|
1023
|
-
return { sql, params: [key, fields] };
|
|
1024
|
-
}
|
|
1025
|
-
else {
|
|
1026
|
-
// Non-job tables logic remains the same
|
|
1027
|
-
const baseQuery = `
|
|
1028
|
-
SELECT field, value
|
|
1029
|
-
FROM ${tableName}
|
|
1030
|
-
WHERE key = $1
|
|
1031
|
-
AND field = ANY($2::text[])
|
|
1032
|
-
`;
|
|
1033
|
-
const sql = context.appendExpiryClause(baseQuery, tableName);
|
|
1034
|
-
return { sql, params: [key, fields] };
|
|
1035
|
-
}
|
|
1036
|
-
},
|
|
1037
|
-
async hgetall(key, multi) {
|
|
1038
|
-
const tableName = context.tableForKey(key, 'hash');
|
|
1039
|
-
const isJobsTable = this.isJobsTable(tableName);
|
|
1040
|
-
const { sql, params } = this._hgetall(key);
|
|
1041
|
-
const processRows = (rows) => {
|
|
1042
|
-
const result = {};
|
|
1043
|
-
for (const row of rows) {
|
|
1044
|
-
// Map status to ':' and context to '@'
|
|
1045
|
-
// Ignore old format fields
|
|
1046
|
-
if (isJobsTable) {
|
|
1047
|
-
if (row.field === 'status') {
|
|
1048
|
-
result[':'] = row.value;
|
|
1049
|
-
}
|
|
1050
|
-
else if (row.field === 'context') {
|
|
1051
|
-
result['@'] = row.value;
|
|
1052
|
-
}
|
|
1053
|
-
else if (row.field !== ':' && row.field !== '@') {
|
|
1054
|
-
result[row.field] = row.value;
|
|
1055
|
-
}
|
|
1056
|
-
}
|
|
1057
|
-
else {
|
|
1058
|
-
result[row.field] = row.value;
|
|
1059
|
-
}
|
|
1060
|
-
}
|
|
1061
|
-
return result;
|
|
1062
|
-
};
|
|
1063
|
-
if (multi) {
|
|
1064
|
-
multi.addCommand(sql, params, 'object', (rows) => {
|
|
1065
|
-
return processRows(rows);
|
|
1066
|
-
});
|
|
1067
|
-
return Promise.resolve({});
|
|
1068
|
-
}
|
|
1069
|
-
else {
|
|
1070
|
-
try {
|
|
1071
|
-
const res = await context.pgClient.query(sql, params);
|
|
1072
|
-
return processRows(res.rows);
|
|
1073
|
-
}
|
|
1074
|
-
catch (err) {
|
|
1075
|
-
console.error('hgetall error', err, sql, params);
|
|
1076
|
-
throw err;
|
|
1077
|
-
}
|
|
1078
|
-
}
|
|
1079
|
-
},
|
|
1080
|
-
_hgetall(key) {
|
|
1081
|
-
const tableName = context.tableForKey(key, 'hash');
|
|
1082
|
-
const isJobsTable = this.isJobsTable(tableName);
|
|
1083
|
-
if (isJobsTable) {
|
|
1084
|
-
const sql = `
|
|
1085
|
-
WITH valid_job AS (
|
|
1086
|
-
SELECT id, status, context
|
|
1087
|
-
FROM ${tableName}
|
|
1088
|
-
WHERE key = $1 AND is_live
|
|
1089
|
-
),
|
|
1090
|
-
job_data AS (
|
|
1091
|
-
SELECT 'status' AS field, status::text AS value
|
|
1092
|
-
FROM ${tableName}
|
|
1093
|
-
WHERE key = $1 AND is_live
|
|
1094
|
-
|
|
1095
|
-
UNION ALL
|
|
1096
|
-
|
|
1097
|
-
SELECT 'context' AS field, context::text AS value
|
|
1098
|
-
FROM ${tableName}
|
|
1099
|
-
WHERE key = $1 AND is_live
|
|
1100
|
-
),
|
|
1101
|
-
attribute_data AS (
|
|
1102
|
-
SELECT field, value
|
|
1103
|
-
FROM ${tableName}_attributes
|
|
1104
|
-
WHERE job_id IN (SELECT id FROM valid_job)
|
|
1105
|
-
)
|
|
1106
|
-
SELECT * FROM job_data
|
|
1107
|
-
UNION ALL
|
|
1108
|
-
SELECT * FROM attribute_data;
|
|
1109
|
-
`;
|
|
1110
|
-
return { sql, params: [key] };
|
|
1111
|
-
}
|
|
1112
|
-
else {
|
|
1113
|
-
// Non-job tables
|
|
1114
|
-
const sql = context.appendExpiryClause(`
|
|
1115
|
-
SELECT field, value
|
|
1116
|
-
FROM ${tableName}
|
|
1117
|
-
WHERE key = $1
|
|
1118
|
-
`, tableName);
|
|
1119
|
-
return { sql, params: [key] };
|
|
1120
|
-
}
|
|
1121
|
-
},
|
|
1122
|
-
async hincrbyfloat(key, field, increment, multi) {
|
|
1123
|
-
const { sql, params } = this._hincrbyfloat(key, field, increment);
|
|
1124
|
-
if (multi) {
|
|
1125
|
-
multi.addCommand(sql, params, 'number', (rows) => {
|
|
1126
|
-
return parseFloat(rows[0].value);
|
|
1127
|
-
});
|
|
1128
|
-
return Promise.resolve(0);
|
|
1129
|
-
}
|
|
1130
|
-
else {
|
|
1131
|
-
const res = await context.pgClient.query(sql, params);
|
|
1132
|
-
return parseFloat(res.rows[0].value);
|
|
1133
|
-
}
|
|
1134
|
-
},
|
|
1135
|
-
_hincrbyfloat(key, field, increment) {
|
|
1136
|
-
const tableName = context.tableForKey(key, 'hash');
|
|
1137
|
-
const isJobsTable = this.isJobsTable(tableName);
|
|
1138
|
-
const isStatusField = field === ':';
|
|
1139
|
-
if (isJobsTable && isStatusField) {
|
|
1140
|
-
const sql = `
|
|
1141
|
-
UPDATE ${tableName}
|
|
1142
|
-
SET status = status + $2
|
|
1143
|
-
WHERE key = $1 AND is_live
|
|
1144
|
-
RETURNING status::text AS value
|
|
1145
|
-
`;
|
|
1146
|
-
return { sql, params: [key, increment] };
|
|
1147
|
-
}
|
|
1148
|
-
else if (isJobsTable) {
|
|
1149
|
-
// Update the condition here
|
|
1150
|
-
const sql = `
|
|
1151
|
-
WITH valid_job AS (
|
|
1152
|
-
SELECT id
|
|
1153
|
-
FROM ${tableName}
|
|
1154
|
-
WHERE key = $1 AND is_live
|
|
1155
|
-
)
|
|
1156
|
-
INSERT INTO ${tableName}_attributes (job_id, field, value, type)
|
|
1157
|
-
SELECT id, $2, ($3::double precision)::text, $4
|
|
1158
|
-
FROM valid_job
|
|
1159
|
-
ON CONFLICT (job_id, field) DO UPDATE
|
|
1160
|
-
SET
|
|
1161
|
-
value = ((COALESCE(${tableName}_attributes.value, '0')::double precision) + $3::double precision)::text,
|
|
1162
|
-
type = EXCLUDED.type
|
|
1163
|
-
RETURNING value;
|
|
1164
|
-
`;
|
|
1165
|
-
return { sql, params: [key, field, increment, this._deriveType(field)] };
|
|
1166
|
-
}
|
|
1167
|
-
else {
|
|
1168
|
-
const sql = `
|
|
1169
|
-
INSERT INTO ${tableName} (key, field, value)
|
|
1170
|
-
VALUES ($1, $2, ($3)::text)
|
|
1171
|
-
ON CONFLICT (key, field) DO UPDATE
|
|
1172
|
-
SET value = ((COALESCE(${tableName}.value, '0')::double precision + $3::double precision)::text)
|
|
1173
|
-
RETURNING value
|
|
1174
|
-
`;
|
|
1175
|
-
return { sql, params: [key, field, increment] };
|
|
1176
|
-
}
|
|
1177
|
-
},
|
|
1178
|
-
async hscan(key, cursor, count = 10, pattern, multi) {
|
|
1179
|
-
const { sql, params } = this._hscan(key, cursor, count, pattern);
|
|
1180
|
-
if (multi) {
|
|
1181
|
-
multi.addCommand(sql, params, 'object', (rows) => {
|
|
1182
|
-
const items = {};
|
|
1183
|
-
for (const row of rows) {
|
|
1184
|
-
items[row.field] = row.value;
|
|
1185
|
-
}
|
|
1186
|
-
const newCursor = rows.length < count ? 0 : Number(cursor) + rows.length;
|
|
1187
|
-
return { cursor: newCursor.toString(), items };
|
|
1188
|
-
});
|
|
1189
|
-
return Promise.resolve({ cursor: '0', items: {} });
|
|
1190
|
-
}
|
|
1191
|
-
else {
|
|
1192
|
-
const res = await context.pgClient.query(sql, params);
|
|
1193
|
-
const items = {};
|
|
1194
|
-
for (const row of res.rows) {
|
|
1195
|
-
items[row.field] = row.value;
|
|
1196
|
-
}
|
|
1197
|
-
const newCursor = res.rowCount < count ? 0 : Number(cursor) + res.rowCount;
|
|
1198
|
-
return { cursor: newCursor.toString(), items };
|
|
1199
|
-
}
|
|
1200
|
-
},
|
|
1201
|
-
_hscan(key, cursor, count, pattern) {
|
|
1202
|
-
const tableName = context.tableForKey(key, 'hash');
|
|
1203
|
-
const params = [key];
|
|
1204
|
-
let sql = `
|
|
1205
|
-
SELECT field, value FROM ${tableName}
|
|
1206
|
-
WHERE key = $1 AND (expiry IS NULL OR expiry > NOW())
|
|
1207
|
-
`;
|
|
1208
|
-
let paramIndex = 2;
|
|
1209
|
-
if (pattern) {
|
|
1210
|
-
const sqlPattern = pattern.replace(/\*/g, '%');
|
|
1211
|
-
sql += ` AND field LIKE $${paramIndex}`;
|
|
1212
|
-
params.push(sqlPattern);
|
|
1213
|
-
paramIndex++;
|
|
1214
|
-
}
|
|
1215
|
-
sql += `
|
|
1216
|
-
ORDER BY field
|
|
1217
|
-
OFFSET $${paramIndex} LIMIT $${paramIndex + 1}
|
|
1218
|
-
`;
|
|
1219
|
-
params.push(cursor.toString());
|
|
1220
|
-
params.push(count.toString());
|
|
1221
|
-
return { sql, params };
|
|
1222
|
-
},
|
|
1223
|
-
async expire(key, seconds, multi) {
|
|
1224
|
-
const { sql, params } = this._expire(key, seconds);
|
|
1225
|
-
if (multi) {
|
|
1226
|
-
multi.addCommand(sql, params, 'boolean');
|
|
1227
|
-
return Promise.resolve(true);
|
|
1228
|
-
}
|
|
1229
|
-
else {
|
|
1230
|
-
const res = await context.pgClient.query(sql, params);
|
|
1231
|
-
return res.rowCount > 0;
|
|
1232
|
-
}
|
|
1233
|
-
},
|
|
1234
|
-
_expire(key, seconds) {
|
|
1235
|
-
//only job tables are ever expired
|
|
1236
|
-
const tableName = context.tableForKey(key);
|
|
1237
|
-
const expiryTime = new Date(Date.now() + seconds * 1000);
|
|
1238
|
-
const sql = `
|
|
1239
|
-
UPDATE ${tableName}
|
|
1240
|
-
SET expired_at = $2
|
|
1241
|
-
WHERE key = $1 AND is_live
|
|
1242
|
-
RETURNING true as success
|
|
1243
|
-
`;
|
|
1244
|
-
const params = [key, expiryTime];
|
|
1245
|
-
return { sql, params };
|
|
1246
|
-
},
|
|
1247
|
-
async scan(cursor, count = 10, pattern, multi) {
|
|
1248
|
-
const { sql, params } = this._scan(cursor, count, pattern);
|
|
1249
|
-
if (multi) {
|
|
1250
|
-
multi.addCommand(sql, params, 'object', (rows) => {
|
|
1251
|
-
const keys = rows.map((row) => row.key);
|
|
1252
|
-
const newCursor = cursor + rows.length;
|
|
1253
|
-
return { cursor: newCursor, keys };
|
|
1254
|
-
});
|
|
1255
|
-
return Promise.resolve({ cursor: 0, keys: [] });
|
|
1256
|
-
}
|
|
1257
|
-
else {
|
|
1258
|
-
const res = await context.pgClient.query(sql, params);
|
|
1259
|
-
const keys = res.rows.map((row) => row.key);
|
|
1260
|
-
const newCursor = cursor + res.rowCount;
|
|
1261
|
-
return { cursor: newCursor, keys };
|
|
1262
|
-
}
|
|
1263
|
-
},
|
|
1264
|
-
_scan(cursor, count, pattern) {
|
|
1265
|
-
const tableName = context.tableForKey(`_:${context.appId}:j:_`);
|
|
1266
|
-
let sql = `
|
|
1267
|
-
SELECT key FROM ${tableName}
|
|
1268
|
-
WHERE (expired_at IS NULL OR expired_at > NOW())
|
|
1269
|
-
`;
|
|
1270
|
-
const params = [];
|
|
1271
|
-
if (pattern) {
|
|
1272
|
-
sql += ' AND key LIKE $1';
|
|
1273
|
-
params.push(pattern.replace(/\*/g, '%'));
|
|
1274
|
-
}
|
|
1275
|
-
sql += `
|
|
1276
|
-
ORDER BY key
|
|
1277
|
-
OFFSET $${params.length + 1} LIMIT $${params.length + 2}
|
|
1278
|
-
`;
|
|
1279
|
-
params.push(cursor.toString());
|
|
1280
|
-
params.push(count.toString());
|
|
1281
|
-
return { sql, params };
|
|
1282
|
-
},
|
|
1283
|
-
isJobsTable(tableName) {
|
|
1284
|
-
return tableName.endsWith('jobs');
|
|
1285
|
-
},
|
|
1286
|
-
});
|
|
1287
|
-
exports.hashModule = hashModule;
|