@fjell/lib-sequelize 4.4.14 → 4.4.16
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/{types/Operations.d.ts → Operations.d.ts} +5 -5
- package/dist/{types/SequelizeLibraryFactory.d.ts → SequelizeLibraryFactory.d.ts} +4 -4
- package/dist/{types/contained → contained}/SequelizeLibrary.d.ts +4 -4
- package/dist/index.js +1488 -0
- package/dist/index.js.map +7 -0
- package/dist/logger.d.ts +2 -0
- package/dist/{types/ops → ops}/all.d.ts +3 -3
- package/dist/{types/ops → ops}/create.d.ts +3 -3
- package/dist/{types/ops → ops}/find.d.ts +3 -3
- package/dist/{types/ops → ops}/get.d.ts +1 -1
- package/dist/{types/ops → ops}/one.d.ts +3 -3
- package/dist/{types/ops → ops}/remove.d.ts +4 -3
- package/dist/{types/ops → ops}/update.d.ts +3 -3
- package/dist/{types/primary → primary}/SequelizeLibrary.d.ts +4 -4
- package/package.json +17 -20
- package/dist/cjs/AggregationBuilder.cjs +0 -65
- package/dist/cjs/Coordinate.cjs +0 -24
- package/dist/cjs/Definition.cjs +0 -25
- package/dist/cjs/EventCoordinator.cjs +0 -54
- package/dist/cjs/KeyMaster.cjs +0 -151
- package/dist/cjs/OperationContext.cjs +0 -161
- package/dist/cjs/Operations.cjs +0 -34
- package/dist/cjs/Options.cjs +0 -46
- package/dist/cjs/QueryBuilder.cjs +0 -296
- package/dist/cjs/ReferenceBuilder.cjs +0 -76
- package/dist/cjs/RowProcessor.cjs +0 -56
- package/dist/cjs/SequelizeLibrary.cjs +0 -56
- package/dist/cjs/SequelizeLibraryFactory.cjs +0 -25
- package/dist/cjs/contained/SequelizeLibrary.cjs +0 -31
- package/dist/cjs/contained/index.cjs +0 -11
- package/dist/cjs/index.cjs +0 -26
- package/dist/cjs/logger.cjs +0 -10
- package/dist/cjs/ops/all.cjs +0 -145
- package/dist/cjs/ops/create.cjs +0 -252
- package/dist/cjs/ops/find.cjs +0 -47
- package/dist/cjs/ops/get.cjs +0 -92
- package/dist/cjs/ops/one.cjs +0 -27
- package/dist/cjs/ops/remove.cjs +0 -114
- package/dist/cjs/ops/update.cjs +0 -120
- package/dist/cjs/primary/SequelizeLibrary.cjs +0 -41
- package/dist/cjs/primary/index.cjs +0 -11
- package/dist/cjs/util/general.cjs +0 -48
- package/dist/cjs/util/relationshipUtils.cjs +0 -117
- package/dist/es/AggregationBuilder.js +0 -61
- package/dist/es/Coordinate.js +0 -19
- package/dist/es/Definition.js +0 -21
- package/dist/es/EventCoordinator.js +0 -48
- package/dist/es/KeyMaster.js +0 -146
- package/dist/es/OperationContext.js +0 -155
- package/dist/es/Operations.js +0 -30
- package/dist/es/Options.js +0 -23
- package/dist/es/QueryBuilder.js +0 -290
- package/dist/es/ReferenceBuilder.js +0 -72
- package/dist/es/RowProcessor.js +0 -52
- package/dist/es/SequelizeLibrary.js +0 -32
- package/dist/es/SequelizeLibraryFactory.js +0 -21
- package/dist/es/contained/SequelizeLibrary.js +0 -26
- package/dist/es/contained/index.js +0 -2
- package/dist/es/index.js +0 -11
- package/dist/es/logger.js +0 -6
- package/dist/es/ops/all.js +0 -141
- package/dist/es/ops/create.js +0 -248
- package/dist/es/ops/find.js +0 -43
- package/dist/es/ops/get.js +0 -88
- package/dist/es/ops/one.js +0 -23
- package/dist/es/ops/remove.js +0 -110
- package/dist/es/ops/update.js +0 -116
- package/dist/es/primary/SequelizeLibrary.js +0 -36
- package/dist/es/primary/index.js +0 -2
- package/dist/es/util/general.js +0 -44
- package/dist/es/util/relationshipUtils.js +0 -112
- package/dist/index.cjs +0 -1853
- package/dist/index.cjs.map +0 -1
- package/dist/types/AggregationBuilder.d.ts +0 -5
- package/dist/types/EventCoordinator.d.ts +0 -6
- package/dist/types/KeyMaster.d.ts +0 -4
- package/dist/types/OperationContext.d.ts +0 -72
- package/dist/types/QueryBuilder.d.ts +0 -12
- package/dist/types/ReferenceBuilder.d.ts +0 -4
- package/dist/types/RowProcessor.d.ts +0 -6
- package/dist/types/logger.d.ts +0 -2
- package/dist/types/util/general.d.ts +0 -4
- package/dist/types/util/relationshipUtils.d.ts +0 -21
- package/dist/{types/Coordinate.d.ts → Coordinate.d.ts} +0 -0
- package/dist/{types/Definition.d.ts → Definition.d.ts} +0 -0
- package/dist/{types/Options.d.ts → Options.d.ts} +1 -1
- package/dist/{types/Registry.d.ts → Registry.d.ts} +0 -0
- package/dist/{types/SequelizeLibrary.d.ts → SequelizeLibrary.d.ts} +1 -1
- /package/dist/{types/contained → contained}/index.d.ts +0 -0
- /package/dist/{types/index.d.ts → index.d.ts} +0 -0
- /package/dist/{types/primary → primary}/index.d.ts +0 -0
package/dist/index.cjs
DELETED
|
@@ -1,1853 +0,0 @@
|
|
|
1
|
-
'use strict';
|
|
2
|
-
|
|
3
|
-
Object.defineProperty(exports, Symbol.toStringTag, { value: 'Module' });
|
|
4
|
-
|
|
5
|
-
const Library = require('@fjell/lib');
|
|
6
|
-
const Logging = require('@fjell/logging');
|
|
7
|
-
const registry = require('@fjell/registry');
|
|
8
|
-
const core = require('@fjell/core');
|
|
9
|
-
const sequelize = require('sequelize');
|
|
10
|
-
require('deepmerge');
|
|
11
|
-
|
|
12
|
-
function _interopNamespaceDefault(e) {
|
|
13
|
-
const n = Object.create(null, { [Symbol.toStringTag]: { value: 'Module' } });
|
|
14
|
-
if (e) {
|
|
15
|
-
for (const k in e) {
|
|
16
|
-
if (k !== 'default') {
|
|
17
|
-
const d = Object.getOwnPropertyDescriptor(e, k);
|
|
18
|
-
Object.defineProperty(n, k, d.get ? d : {
|
|
19
|
-
enumerable: true,
|
|
20
|
-
get: () => e[k]
|
|
21
|
-
});
|
|
22
|
-
}
|
|
23
|
-
}
|
|
24
|
-
}
|
|
25
|
-
n.default = e;
|
|
26
|
-
return Object.freeze(n);
|
|
27
|
-
}
|
|
28
|
-
|
|
29
|
-
const Library__namespace = /*#__PURE__*/_interopNamespaceDefault(Library);
|
|
30
|
-
|
|
31
|
-
const DEFAULT_SEQUELIZE_OPTIONS = {
|
|
32
|
-
deleteOnRemove: false,
|
|
33
|
-
references: [],
|
|
34
|
-
aggregations: []
|
|
35
|
-
};
|
|
36
|
-
const createOptions = (sequelizeOptions)=>{
|
|
37
|
-
// Create the base lib options
|
|
38
|
-
const baseOptions = Library__namespace.createOptions(sequelizeOptions);
|
|
39
|
-
var _sequelizeOptions_deleteOnRemove, _sequelizeOptions_references, _sequelizeOptions_aggregations;
|
|
40
|
-
// Add Sequelize-specific defaults
|
|
41
|
-
const result = {
|
|
42
|
-
...baseOptions,
|
|
43
|
-
deleteOnRemove: (_sequelizeOptions_deleteOnRemove = sequelizeOptions === null || sequelizeOptions === void 0 ? void 0 : sequelizeOptions.deleteOnRemove) !== null && _sequelizeOptions_deleteOnRemove !== void 0 ? _sequelizeOptions_deleteOnRemove : DEFAULT_SEQUELIZE_OPTIONS.deleteOnRemove,
|
|
44
|
-
references: (_sequelizeOptions_references = sequelizeOptions === null || sequelizeOptions === void 0 ? void 0 : sequelizeOptions.references) !== null && _sequelizeOptions_references !== void 0 ? _sequelizeOptions_references : DEFAULT_SEQUELIZE_OPTIONS.references,
|
|
45
|
-
aggregations: (_sequelizeOptions_aggregations = sequelizeOptions === null || sequelizeOptions === void 0 ? void 0 : sequelizeOptions.aggregations) !== null && _sequelizeOptions_aggregations !== void 0 ? _sequelizeOptions_aggregations : DEFAULT_SEQUELIZE_OPTIONS.aggregations
|
|
46
|
-
};
|
|
47
|
-
return result;
|
|
48
|
-
};
|
|
49
|
-
|
|
50
|
-
const logger$1.default = Logging.getLogger('@fjell/lib-sequelize');
|
|
51
|
-
|
|
52
|
-
const logger$i = logger$1.default.get('Coordinate');
|
|
53
|
-
const SCOPE_SEQUELIZE = 'sequelize';
|
|
54
|
-
const createCoordinate = (kta, scopes)=>{
|
|
55
|
-
logger$i.debug('createCoordinate', {
|
|
56
|
-
kta,
|
|
57
|
-
scopes
|
|
58
|
-
});
|
|
59
|
-
const coordinate = registry.createCoordinate(kta, [
|
|
60
|
-
SCOPE_SEQUELIZE,
|
|
61
|
-
...scopes || []
|
|
62
|
-
]);
|
|
63
|
-
return coordinate;
|
|
64
|
-
};
|
|
65
|
-
|
|
66
|
-
const logger$h = logger$1.default.get('lib-sequelize', 'Definition');
|
|
67
|
-
const createDefinition = (kta, scopes, libOptions)=>{
|
|
68
|
-
logger$h.debug('createDefinition', {
|
|
69
|
-
kta,
|
|
70
|
-
scopes,
|
|
71
|
-
libOptions
|
|
72
|
-
});
|
|
73
|
-
const coordinate = createCoordinate(kta, scopes);
|
|
74
|
-
const options = createOptions(libOptions);
|
|
75
|
-
return {
|
|
76
|
-
coordinate,
|
|
77
|
-
options
|
|
78
|
-
};
|
|
79
|
-
};
|
|
80
|
-
|
|
81
|
-
//Recursive implementation of jSON.stringify;
|
|
82
|
-
const stringifyJSON = function(obj, visited = new Set()) {
|
|
83
|
-
const arrOfKeyVals = [];
|
|
84
|
-
const arrVals = [];
|
|
85
|
-
let objKeys = [];
|
|
86
|
-
/*********CHECK FOR PRIMITIVE TYPES**********/ if (typeof obj === 'number' || typeof obj === 'boolean' || obj === null) return '' + obj;
|
|
87
|
-
else if (typeof obj === 'string') return '"' + obj + '"';
|
|
88
|
-
/*********DETECT CIRCULAR REFERENCES**********/ if (obj instanceof Object && visited.has(obj)) {
|
|
89
|
-
return '"(circular)"';
|
|
90
|
-
} else if (Array.isArray(obj)) {
|
|
91
|
-
//check for empty array
|
|
92
|
-
if (obj[0] === undefined) return '[]';
|
|
93
|
-
else {
|
|
94
|
-
// Add array to visited before processing its elements
|
|
95
|
-
visited.add(obj);
|
|
96
|
-
obj.forEach(function(el) {
|
|
97
|
-
arrVals.push(stringifyJSON(el, visited));
|
|
98
|
-
});
|
|
99
|
-
return '[' + arrVals + ']';
|
|
100
|
-
}
|
|
101
|
-
} else if (obj instanceof Object) {
|
|
102
|
-
// Add object to visited before processing its properties
|
|
103
|
-
visited.add(obj);
|
|
104
|
-
//get object keys
|
|
105
|
-
objKeys = Object.keys(obj);
|
|
106
|
-
//set key output;
|
|
107
|
-
objKeys.forEach(function(key) {
|
|
108
|
-
const keyOut = '"' + key + '":';
|
|
109
|
-
const keyValOut = obj[key];
|
|
110
|
-
//skip functions and undefined properties
|
|
111
|
-
if (keyValOut instanceof Function || keyValOut === undefined) return; // Skip this entry entirely instead of pushing an empty string
|
|
112
|
-
else if (typeof keyValOut === 'string') arrOfKeyVals.push(keyOut + '"' + keyValOut + '"');
|
|
113
|
-
else if (typeof keyValOut === 'boolean' || typeof keyValOut === 'number' || keyValOut === null) arrOfKeyVals.push(keyOut + keyValOut);
|
|
114
|
-
else if (keyValOut instanceof Object) {
|
|
115
|
-
arrOfKeyVals.push(keyOut + stringifyJSON(keyValOut, visited));
|
|
116
|
-
}
|
|
117
|
-
});
|
|
118
|
-
return '{' + arrOfKeyVals + '}';
|
|
119
|
-
}
|
|
120
|
-
return '';
|
|
121
|
-
};
|
|
122
|
-
|
|
123
|
-
const logger$g = logger$1.default.get('sequelize', 'QueryBuilder');
|
|
124
|
-
const addDeleteQuery = (options, model)=>{
|
|
125
|
-
logger$g.default(`QueryBuilder adding delete query with options: ${stringifyJSON(options)}`);
|
|
126
|
-
if (model.getAttributes().deletedAt) {
|
|
127
|
-
options.where['deletedAt'] = {
|
|
128
|
-
[sequelize.Op.eq]: null
|
|
129
|
-
};
|
|
130
|
-
} else if (model.getAttributes().isDeleted) {
|
|
131
|
-
options.where['isDeleted'] = {
|
|
132
|
-
[sequelize.Op.eq]: false
|
|
133
|
-
};
|
|
134
|
-
}
|
|
135
|
-
return options;
|
|
136
|
-
};
|
|
137
|
-
const addEventQueries = (options, events, model)=>{
|
|
138
|
-
logger$g.default(`QueryBuilder adding event queries with options: ${stringifyJSON(options)}, events: ${stringifyJSON(events)}`);
|
|
139
|
-
Object.keys(events).forEach((key)=>{
|
|
140
|
-
if (!model.getAttributes()[`${key}At`]) {
|
|
141
|
-
throw new Error(`Event ${key} is not supported on this model, column ${key}At not found`);
|
|
142
|
-
}
|
|
143
|
-
let whereClauses = {};
|
|
144
|
-
const event = events[key];
|
|
145
|
-
if (event.start) {
|
|
146
|
-
whereClauses = {
|
|
147
|
-
...whereClauses,
|
|
148
|
-
[sequelize.Op.gte]: new Date(event.start)
|
|
149
|
-
};
|
|
150
|
-
}
|
|
151
|
-
if (event.end) {
|
|
152
|
-
whereClauses = {
|
|
153
|
-
...whereClauses,
|
|
154
|
-
[sequelize.Op.lt]: new Date(event.end)
|
|
155
|
-
};
|
|
156
|
-
}
|
|
157
|
-
if (event.by) {
|
|
158
|
-
if (!model.getAttributes()[`${key}By`]) {
|
|
159
|
-
throw new Error(`Event ${key} is not supported on this model, column ${key}By not found`);
|
|
160
|
-
}
|
|
161
|
-
whereClauses = {
|
|
162
|
-
...whereClauses,
|
|
163
|
-
[sequelize.Op.eq]: event.by
|
|
164
|
-
};
|
|
165
|
-
}
|
|
166
|
-
options.where[`${key}At`] = whereClauses;
|
|
167
|
-
});
|
|
168
|
-
return options;
|
|
169
|
-
};
|
|
170
|
-
// Add the references to the query
|
|
171
|
-
const addReferenceQueries = (options, references, model)=>{
|
|
172
|
-
logger$g.default(`QueryBuilder adding reference queries with options: ${stringifyJSON(options)}, references: ${stringifyJSON(references)}`);
|
|
173
|
-
Object.keys(references).forEach((key)=>{
|
|
174
|
-
logger$g.default(`QueryBuilder adding reference query for key: ${key}, references: ${stringifyJSON(references)}`);
|
|
175
|
-
if (!model.getAttributes()[`${key}Id`]) {
|
|
176
|
-
throw new Error(`Reference ${key} is not supported on this model, column ${key}Id not found`);
|
|
177
|
-
}
|
|
178
|
-
if (core.isPriKey(references[key])) {
|
|
179
|
-
const priKey = references[key];
|
|
180
|
-
if (priKey.pk == null || priKey.pk === '' || typeof priKey.pk === 'object' && Object.keys(priKey.pk).length === 0) {
|
|
181
|
-
logger$g.error(`Reference key '${key}' has invalid pk value: ${stringifyJSON(priKey.pk)}`, {
|
|
182
|
-
priKey,
|
|
183
|
-
references
|
|
184
|
-
});
|
|
185
|
-
throw new Error(`Reference key '${key}' has invalid pk value: ${stringifyJSON(priKey.pk)}`);
|
|
186
|
-
}
|
|
187
|
-
logger$g.trace(`[QueryBuilder] Setting reference where clause: ${key}Id = ${stringifyJSON(priKey.pk)} (type: ${typeof priKey.pk})`);
|
|
188
|
-
options.where[`${key}Id`] = {
|
|
189
|
-
[sequelize.Op.eq]: priKey.pk
|
|
190
|
-
};
|
|
191
|
-
} else if (core.isComKey(references[key])) {
|
|
192
|
-
throw new Error('ComKeys are not supported in Sequelize');
|
|
193
|
-
}
|
|
194
|
-
});
|
|
195
|
-
return options;
|
|
196
|
-
};
|
|
197
|
-
const addCompoundCondition = (options, compoundCondition, model)=>{
|
|
198
|
-
// Ensure options.where exists
|
|
199
|
-
options.where = options.where || {};
|
|
200
|
-
let compoundOp;
|
|
201
|
-
const compoundType = compoundCondition.compoundType;
|
|
202
|
-
if (compoundType === "AND") {
|
|
203
|
-
compoundOp = sequelize.Op.and;
|
|
204
|
-
} else {
|
|
205
|
-
compoundOp = sequelize.Op.or;
|
|
206
|
-
}
|
|
207
|
-
let conditions = {};
|
|
208
|
-
compoundCondition.conditions.forEach((condition)=>{
|
|
209
|
-
if (core.isCondition(condition)) {
|
|
210
|
-
conditions = addCondition(conditions, condition, model);
|
|
211
|
-
} else {
|
|
212
|
-
throw new Error('Nest Compound conditions not supported');
|
|
213
|
-
}
|
|
214
|
-
});
|
|
215
|
-
// Merge with existing where conditions instead of replacing
|
|
216
|
-
if (Object.keys(options.where).length > 0) {
|
|
217
|
-
// If there are existing conditions, wrap everything in an AND
|
|
218
|
-
options.where = {
|
|
219
|
-
[sequelize.Op.and]: [
|
|
220
|
-
options.where,
|
|
221
|
-
{
|
|
222
|
-
[compoundOp]: conditions
|
|
223
|
-
}
|
|
224
|
-
]
|
|
225
|
-
};
|
|
226
|
-
} else {
|
|
227
|
-
// If no existing conditions, just set the compound condition
|
|
228
|
-
options.where[compoundOp] = conditions;
|
|
229
|
-
}
|
|
230
|
-
return options;
|
|
231
|
-
};
|
|
232
|
-
const getSequelizeOperator = (operator)=>{
|
|
233
|
-
if (operator === '==') {
|
|
234
|
-
return sequelize.Op.eq;
|
|
235
|
-
} else if (operator === '<') {
|
|
236
|
-
return sequelize.Op.lt;
|
|
237
|
-
} else if (operator === '>') {
|
|
238
|
-
return sequelize.Op.gt;
|
|
239
|
-
} else if (operator === '<=') {
|
|
240
|
-
return sequelize.Op.lte;
|
|
241
|
-
} else if (operator === '>=') {
|
|
242
|
-
return sequelize.Op.gte;
|
|
243
|
-
} else if (operator === 'in') {
|
|
244
|
-
return sequelize.Op.in;
|
|
245
|
-
} else {
|
|
246
|
-
throw new Error(`Operator ${operator} not supported`);
|
|
247
|
-
}
|
|
248
|
-
};
|
|
249
|
-
const addAssociationCondition = (conditions, condition, model)=>{
|
|
250
|
-
const [associationName, attributeName] = condition.column.split('.', 2);
|
|
251
|
-
// Check if the association exists on the model
|
|
252
|
-
if (!model.associations || !model.associations[associationName]) {
|
|
253
|
-
throw new Error(`Association ${associationName} not found on model ${model.name}`);
|
|
254
|
-
}
|
|
255
|
-
const association = model.associations[associationName];
|
|
256
|
-
const associatedModel = association.target;
|
|
257
|
-
// Check if the attribute exists on the associated model
|
|
258
|
-
if (!associatedModel.getAttributes()[attributeName]) {
|
|
259
|
-
throw new Error(`Attribute ${attributeName} not found on associated model ${associatedModel.name} for association ${associationName}`);
|
|
260
|
-
}
|
|
261
|
-
// Use Sequelize's $association.attribute$ syntax for querying associated models
|
|
262
|
-
const sequelizeAssociationColumn = `$${associationName}.${attributeName}$`;
|
|
263
|
-
const conditionOp = getSequelizeOperator(condition.operator);
|
|
264
|
-
if (condition.value == null && condition.operator !== '==' && condition.operator !== 'in') {
|
|
265
|
-
logger$g.error(`Association condition for '${associationName}.${attributeName}' has undefined/null value`, {
|
|
266
|
-
condition
|
|
267
|
-
});
|
|
268
|
-
throw new Error(`Association condition for '${associationName}.${attributeName}' has undefined/null value`);
|
|
269
|
-
}
|
|
270
|
-
logger$g.trace(`[QueryBuilder] Setting association condition: ${sequelizeAssociationColumn} = ${stringifyJSON(condition.value)} (type: ${typeof condition.value})`);
|
|
271
|
-
conditions[sequelizeAssociationColumn] = {
|
|
272
|
-
[conditionOp]: condition.value
|
|
273
|
-
};
|
|
274
|
-
return conditions;
|
|
275
|
-
};
|
|
276
|
-
const addAttributeCondition = (conditions, condition, model)=>{
|
|
277
|
-
const conditionColumn = condition.column;
|
|
278
|
-
if (!model.getAttributes()[conditionColumn]) {
|
|
279
|
-
throw new Error(`Condition column ${conditionColumn} not found on model ${model.name}`);
|
|
280
|
-
}
|
|
281
|
-
const conditionOp = getSequelizeOperator(condition.operator);
|
|
282
|
-
if (condition.value == null && condition.operator !== '==' && condition.operator !== 'in') {
|
|
283
|
-
logger$g.error(`Attribute condition for '${conditionColumn}' has undefined/null value`, {
|
|
284
|
-
condition
|
|
285
|
-
});
|
|
286
|
-
throw new Error(`Attribute condition for '${conditionColumn}' has undefined/null value`);
|
|
287
|
-
}
|
|
288
|
-
logger$g.trace(`[QueryBuilder] Setting attribute condition: ${conditionColumn} = ${stringifyJSON(condition.value)} (type: ${typeof condition.value})`);
|
|
289
|
-
conditions[conditionColumn] = {
|
|
290
|
-
[conditionOp]: condition.value
|
|
291
|
-
};
|
|
292
|
-
return conditions;
|
|
293
|
-
};
|
|
294
|
-
const addCondition = (conditions, condition, model)=>{
|
|
295
|
-
const conditionColumn = condition.column;
|
|
296
|
-
// Check if this is an association query (contains a dot)
|
|
297
|
-
if (conditionColumn.includes('.')) {
|
|
298
|
-
return addAssociationCondition(conditions, condition, model);
|
|
299
|
-
}
|
|
300
|
-
// Handle regular column queries
|
|
301
|
-
return addAttributeCondition(conditions, condition, model);
|
|
302
|
-
};
|
|
303
|
-
const collectAssociationsFromConditions = (conditions)=>{
|
|
304
|
-
const associations = new Set();
|
|
305
|
-
const processObject = (obj)=>{
|
|
306
|
-
if (typeof obj === 'object' && obj !== null) {
|
|
307
|
-
// Check string keys
|
|
308
|
-
Object.keys(obj).forEach((key)=>{
|
|
309
|
-
// Check if this is an association reference ($association.attribute$)
|
|
310
|
-
if (typeof key === 'string' && key.startsWith('$') && key.endsWith('$') && key.includes('.')) {
|
|
311
|
-
const associationName = key.substring(1, key.indexOf('.'));
|
|
312
|
-
associations.add(associationName);
|
|
313
|
-
}
|
|
314
|
-
// Recursively process nested objects
|
|
315
|
-
if (typeof obj[key] === 'object') {
|
|
316
|
-
processObject(obj[key]);
|
|
317
|
-
}
|
|
318
|
-
});
|
|
319
|
-
// Also check Symbol keys (for compound conditions like Op.and, Op.or)
|
|
320
|
-
Object.getOwnPropertySymbols(obj).forEach((symbol)=>{
|
|
321
|
-
if (typeof obj[symbol] === 'object') {
|
|
322
|
-
processObject(obj[symbol]);
|
|
323
|
-
}
|
|
324
|
-
});
|
|
325
|
-
}
|
|
326
|
-
// Handle arrays (for compound conditions that might be arrays)
|
|
327
|
-
if (Array.isArray(obj)) {
|
|
328
|
-
obj.forEach((item)=>{
|
|
329
|
-
if (typeof item === 'object') {
|
|
330
|
-
processObject(item);
|
|
331
|
-
}
|
|
332
|
-
});
|
|
333
|
-
}
|
|
334
|
-
};
|
|
335
|
-
processObject(conditions);
|
|
336
|
-
return associations;
|
|
337
|
-
};
|
|
338
|
-
const addAssociationIncludes = (options, model)=>{
|
|
339
|
-
// Collect all association names used in conditions
|
|
340
|
-
const referencedAssociations = collectAssociationsFromConditions(options.where);
|
|
341
|
-
if (referencedAssociations.size > 0) {
|
|
342
|
-
options.include = options.include || [];
|
|
343
|
-
// Add each referenced association to the include array
|
|
344
|
-
referencedAssociations.forEach((associationName)=>{
|
|
345
|
-
// Check if this association is already included
|
|
346
|
-
const alreadyIncluded = options.include.some((inc)=>typeof inc === 'string' && inc === associationName || typeof inc === 'object' && inc.association === associationName);
|
|
347
|
-
if (!alreadyIncluded && model.associations && model.associations[associationName]) {
|
|
348
|
-
options.include.push({
|
|
349
|
-
model: model.associations[associationName].target,
|
|
350
|
-
as: associationName,
|
|
351
|
-
required: false // Use LEFT JOIN so records without associations are still returned
|
|
352
|
-
});
|
|
353
|
-
}
|
|
354
|
-
});
|
|
355
|
-
}
|
|
356
|
-
return options;
|
|
357
|
-
};
|
|
358
|
-
const buildQuery = (itemQuery, model)=>{
|
|
359
|
-
logger$g.default(`QueryBuilder build called with itemQuery: ${stringifyJSON(itemQuery)}`);
|
|
360
|
-
let options = {
|
|
361
|
-
where: {}
|
|
362
|
-
};
|
|
363
|
-
if (itemQuery.compoundCondition) {
|
|
364
|
-
logger$g.default(`QueryBuilder adding conditions: ${stringifyJSON(itemQuery.compoundCondition)}`);
|
|
365
|
-
options = addCompoundCondition(options, itemQuery.compoundCondition, model);
|
|
366
|
-
}
|
|
367
|
-
// If the model has a deletedAt column, we need to add a delete query
|
|
368
|
-
if (model.getAttributes().deletedAt || model.getAttributes().isDeleted) {
|
|
369
|
-
options = addDeleteQuery(options, model);
|
|
370
|
-
}
|
|
371
|
-
if (itemQuery.refs) {
|
|
372
|
-
options = addReferenceQueries(options, itemQuery.refs, model);
|
|
373
|
-
}
|
|
374
|
-
if (itemQuery.events) {
|
|
375
|
-
options = addEventQueries(options, itemQuery.events, model);
|
|
376
|
-
}
|
|
377
|
-
// TODO: Once we start to support Aggs on the server-side, we'll need to parse agg queries
|
|
378
|
-
// Apply a limit to the result set
|
|
379
|
-
if (itemQuery.limit) {
|
|
380
|
-
logger$g.default(`QueryBuilder applying limit: ${itemQuery.limit}`);
|
|
381
|
-
options.limit = itemQuery.limit;
|
|
382
|
-
}
|
|
383
|
-
// Apply an offset to the result set
|
|
384
|
-
if (itemQuery.offset) {
|
|
385
|
-
options.offset = itemQuery.offset;
|
|
386
|
-
}
|
|
387
|
-
// Add orderBy to the query
|
|
388
|
-
if (itemQuery.orderBy) {
|
|
389
|
-
itemQuery.orderBy.forEach((orderBy)=>{
|
|
390
|
-
if (!model.getAttributes()[orderBy.field]) {
|
|
391
|
-
throw new Error(`Order by field ${orderBy.field} not found on model ${model.name}`);
|
|
392
|
-
}
|
|
393
|
-
options.order = [
|
|
394
|
-
[
|
|
395
|
-
orderBy.field,
|
|
396
|
-
orderBy.direction
|
|
397
|
-
]
|
|
398
|
-
];
|
|
399
|
-
});
|
|
400
|
-
}
|
|
401
|
-
// Add includes for any associations referenced in conditions
|
|
402
|
-
options = addAssociationIncludes(options, model);
|
|
403
|
-
return options;
|
|
404
|
-
};
|
|
405
|
-
|
|
406
|
-
/* eslint-disable indent */ /**
|
|
407
|
-
* Helper function to build relationship chain includes
|
|
408
|
-
*/ const buildRelationshipChain = (targetModel, kta, currentIndex, targetIndex)=>{
|
|
409
|
-
// Build the association path and validate relationships exist
|
|
410
|
-
const associationParts = [];
|
|
411
|
-
const modelChain = [
|
|
412
|
-
targetModel
|
|
413
|
-
];
|
|
414
|
-
let currentModel = targetModel;
|
|
415
|
-
// Validate that all associations exist and build model chain
|
|
416
|
-
for(let i = currentIndex + 1; i <= targetIndex; i++){
|
|
417
|
-
const intermediateType = kta[i];
|
|
418
|
-
const associationName = intermediateType;
|
|
419
|
-
if (!currentModel.associations || !currentModel.associations[associationName]) {
|
|
420
|
-
return {
|
|
421
|
-
success: false
|
|
422
|
-
};
|
|
423
|
-
}
|
|
424
|
-
associationParts.push(associationName);
|
|
425
|
-
currentModel = currentModel.associations[associationName].target;
|
|
426
|
-
modelChain.push(currentModel);
|
|
427
|
-
}
|
|
428
|
-
// Build the full association path for the target field
|
|
429
|
-
const targetPrimaryKey = currentModel.primaryKeyAttribute || 'id';
|
|
430
|
-
const associationPath = `$${associationParts.join('.')}.${targetPrimaryKey}$`;
|
|
431
|
-
// Build nested includes structure iteratively (clearer than recursion)
|
|
432
|
-
let deepestInclude = null;
|
|
433
|
-
// Build from the deepest level back to the root
|
|
434
|
-
for(let i = targetIndex; i > currentIndex; i--){
|
|
435
|
-
const currentType = kta[i];
|
|
436
|
-
const modelIndex = i - currentIndex;
|
|
437
|
-
const includeObj = {
|
|
438
|
-
model: modelChain[modelIndex],
|
|
439
|
-
as: currentType,
|
|
440
|
-
required: true
|
|
441
|
-
};
|
|
442
|
-
if (deepestInclude) {
|
|
443
|
-
includeObj.include = [
|
|
444
|
-
deepestInclude
|
|
445
|
-
];
|
|
446
|
-
}
|
|
447
|
-
deepestInclude = includeObj;
|
|
448
|
-
}
|
|
449
|
-
const includes = deepestInclude ? [
|
|
450
|
-
deepestInclude
|
|
451
|
-
] : [];
|
|
452
|
-
return {
|
|
453
|
-
success: true,
|
|
454
|
-
path: associationPath,
|
|
455
|
-
includes
|
|
456
|
-
};
|
|
457
|
-
};
|
|
458
|
-
/**
|
|
459
|
-
* Helper function to build relationship path for a locator
|
|
460
|
-
* @param includeIsDirect Whether to include the isDirect flag in the result
|
|
461
|
-
*/ const relationshipUtils.buildRelationshipPath = (targetModel, locatorType, kta, includeIsDirect = false)=>{
|
|
462
|
-
// First check if the field exists directly
|
|
463
|
-
const directFieldName = `${locatorType}Id`;
|
|
464
|
-
const attributes = targetModel.getAttributes();
|
|
465
|
-
if (attributes && attributes[directFieldName]) {
|
|
466
|
-
const result = {
|
|
467
|
-
found: true
|
|
468
|
-
};
|
|
469
|
-
if (includeIsDirect) {
|
|
470
|
-
result.isDirect = true;
|
|
471
|
-
}
|
|
472
|
-
return result;
|
|
473
|
-
}
|
|
474
|
-
// If not direct, look for relationship path
|
|
475
|
-
const targetIndex = kta.indexOf(locatorType);
|
|
476
|
-
if (targetIndex === -1) {
|
|
477
|
-
const result = {
|
|
478
|
-
found: false
|
|
479
|
-
};
|
|
480
|
-
if (includeIsDirect) {
|
|
481
|
-
result.isDirect = false;
|
|
482
|
-
}
|
|
483
|
-
return result;
|
|
484
|
-
}
|
|
485
|
-
const currentIndex = 0; // We're always looking from the base model
|
|
486
|
-
if (targetIndex <= currentIndex) {
|
|
487
|
-
const result = {
|
|
488
|
-
found: false
|
|
489
|
-
};
|
|
490
|
-
if (includeIsDirect) {
|
|
491
|
-
result.isDirect = false;
|
|
492
|
-
}
|
|
493
|
-
return result;
|
|
494
|
-
}
|
|
495
|
-
const chainResult = buildRelationshipChain(targetModel, kta, currentIndex, targetIndex);
|
|
496
|
-
if (chainResult.success) {
|
|
497
|
-
const result = {
|
|
498
|
-
found: true,
|
|
499
|
-
path: chainResult.path,
|
|
500
|
-
includes: chainResult.includes
|
|
501
|
-
};
|
|
502
|
-
if (includeIsDirect) {
|
|
503
|
-
result.isDirect = false;
|
|
504
|
-
}
|
|
505
|
-
return result;
|
|
506
|
-
}
|
|
507
|
-
const result = {
|
|
508
|
-
found: false
|
|
509
|
-
};
|
|
510
|
-
if (includeIsDirect) {
|
|
511
|
-
result.isDirect = false;
|
|
512
|
-
}
|
|
513
|
-
return result;
|
|
514
|
-
};
|
|
515
|
-
|
|
516
|
-
const logger$f = logger$1.default.get('sequelize', 'KeyMaster');
|
|
517
|
-
// Helper function to extract location key value from item
|
|
518
|
-
const extractLocationKeyValue = (model, item, locatorType, kta)=>{
|
|
519
|
-
logger$f.default('Extracting location key value', {
|
|
520
|
-
locatorType,
|
|
521
|
-
kta
|
|
522
|
-
});
|
|
523
|
-
const relationshipInfo = relationshipUtils.buildRelationshipPath(model, locatorType, kta, true);
|
|
524
|
-
if (!relationshipInfo.found) {
|
|
525
|
-
throw new Error(`Location key '${locatorType}' cannot be resolved on model '${model.name}' or through its relationships.`);
|
|
526
|
-
}
|
|
527
|
-
if (relationshipInfo.isDirect) {
|
|
528
|
-
// Direct foreign key field
|
|
529
|
-
const foreignKeyField = `${locatorType}Id`;
|
|
530
|
-
const value = item[foreignKeyField];
|
|
531
|
-
if (typeof value === 'undefined' || value === null) {
|
|
532
|
-
throw new Error(`Direct foreign key field '${foreignKeyField}' is missing or null in item`);
|
|
533
|
-
}
|
|
534
|
-
return value;
|
|
535
|
-
} else {
|
|
536
|
-
// Need to traverse relationship hierarchy
|
|
537
|
-
// Find the path through the key type array
|
|
538
|
-
const locatorIndex = kta.indexOf(locatorType);
|
|
539
|
-
if (locatorIndex === -1) {
|
|
540
|
-
throw new Error(`Locator type '${locatorType}' not found in key type array`);
|
|
541
|
-
}
|
|
542
|
-
// Start from the current item (index 0 in kta)
|
|
543
|
-
let currentObject = item;
|
|
544
|
-
// Traverse through each intermediate relationship to reach the target
|
|
545
|
-
for(let i = 1; i < locatorIndex; i++){
|
|
546
|
-
const intermediateType = kta[i];
|
|
547
|
-
// Check if the intermediate relationship object is loaded
|
|
548
|
-
if (currentObject[intermediateType] && typeof currentObject[intermediateType] === 'object') {
|
|
549
|
-
currentObject = currentObject[intermediateType];
|
|
550
|
-
} else {
|
|
551
|
-
// Try the foreign key approach if the relationship object isn't loaded
|
|
552
|
-
const foreignKeyField = `${intermediateType}Id`;
|
|
553
|
-
if (typeof currentObject[foreignKeyField] !== 'undefined' && currentObject[foreignKeyField] !== null) {
|
|
554
|
-
// We have the foreign key but not the loaded object, we can't traverse further
|
|
555
|
-
throw new Error(`Intermediate relationship '${intermediateType}' is not loaded. Cannot traverse to '${locatorType}'. Either include the relationship in your query or ensure it's loaded.`);
|
|
556
|
-
}
|
|
557
|
-
throw new Error(`Intermediate relationship '${intermediateType}' is missing in the relationship chain. Expected path: ${kta.slice(0, locatorIndex + 1).join(' → ')}`);
|
|
558
|
-
}
|
|
559
|
-
}
|
|
560
|
-
// Now extract the target locator value from the current object
|
|
561
|
-
// First try to get it from the loaded relationship object
|
|
562
|
-
if (currentObject[locatorType] && typeof currentObject[locatorType] === 'object' && typeof currentObject[locatorType].id !== 'undefined') {
|
|
563
|
-
return currentObject[locatorType].id;
|
|
564
|
-
}
|
|
565
|
-
// If the relationship object isn't loaded, try the foreign key field
|
|
566
|
-
const foreignKeyField = `${locatorType}Id`;
|
|
567
|
-
if (typeof currentObject[foreignKeyField] !== 'undefined' && currentObject[foreignKeyField] !== null) {
|
|
568
|
-
return currentObject[foreignKeyField];
|
|
569
|
-
}
|
|
570
|
-
throw new Error(`Unable to extract location key for '${locatorType}'. Neither the relationship object nor direct foreign key is available. Traversal path: ${kta.slice(0, locatorIndex + 1).join(' → ')}`);
|
|
571
|
-
}
|
|
572
|
-
};
|
|
573
|
-
const removeKey = (item)=>{
|
|
574
|
-
logger$f.default('Removing Key', {
|
|
575
|
-
item
|
|
576
|
-
});
|
|
577
|
-
delete item.key;
|
|
578
|
-
return item;
|
|
579
|
-
};
|
|
580
|
-
// export const populateKey = <
|
|
581
|
-
// S extends string,
|
|
582
|
-
// L1 extends string = never,
|
|
583
|
-
// L2 extends string = never,
|
|
584
|
-
// L3 extends string = never,
|
|
585
|
-
// L4 extends string = never,
|
|
586
|
-
// L5 extends string = never
|
|
587
|
-
// >(
|
|
588
|
-
// item: ItemProperties<S, L1, L2, L3, L4, L5>,
|
|
589
|
-
// keyTypes: AllItemTypeArrays<S, L1, L2, L3, L4, L5>
|
|
590
|
-
// ): ItemProperties<S, L1, L2, L3, L4, L5> => {
|
|
591
|
-
// if (keyTypes.length === 1) {
|
|
592
|
-
// item.key = { kt: keyTypes[0], pk: item.id };
|
|
593
|
-
// delete item.id;
|
|
594
|
-
// } else if (keyTypes.length === 2) {
|
|
595
|
-
// item.key = {
|
|
596
|
-
// kt: keyTypes[0], pk: item.id,
|
|
597
|
-
// // TODO: Shouldn't this be inspecting the model to get the primary key type?
|
|
598
|
-
// loc: [{ kt: keyTypes[1], lk: item[keyTypes[1] + 'Id'] }],
|
|
599
|
-
// };
|
|
600
|
-
// delete item.id;
|
|
601
|
-
// delete item[keyTypes[1] + 'Id'];
|
|
602
|
-
// } else {
|
|
603
|
-
// throw new Error('Not implemented');
|
|
604
|
-
// }
|
|
605
|
-
// return item;
|
|
606
|
-
// }
|
|
607
|
-
const addKey = (model, item, keyTypes)=>{
|
|
608
|
-
logger$f.default('Adding Key', {
|
|
609
|
-
item
|
|
610
|
-
});
|
|
611
|
-
const key = {};
|
|
612
|
-
const modelClass = model.constructor;
|
|
613
|
-
const primaryKeyAttr = modelClass.primaryKeyAttribute;
|
|
614
|
-
if (Array.isArray(keyTypes) && keyTypes.length > 1) {
|
|
615
|
-
const type = [
|
|
616
|
-
...keyTypes
|
|
617
|
-
];
|
|
618
|
-
const pkType = type.shift();
|
|
619
|
-
Object.assign(key, {
|
|
620
|
-
kt: pkType,
|
|
621
|
-
pk: item[primaryKeyAttr]
|
|
622
|
-
});
|
|
623
|
-
// Build location keys for composite key
|
|
624
|
-
const locationKeys = [];
|
|
625
|
-
for (const locatorType of type){
|
|
626
|
-
try {
|
|
627
|
-
const lk = extractLocationKeyValue(modelClass, item, locatorType, keyTypes);
|
|
628
|
-
locationKeys.push({
|
|
629
|
-
kt: locatorType,
|
|
630
|
-
lk
|
|
631
|
-
});
|
|
632
|
-
} catch (error) {
|
|
633
|
-
const errorMessage = error instanceof Error ? error.message : String(error);
|
|
634
|
-
logger$f.error(`Failed to extract location key for '${locatorType}'`, {
|
|
635
|
-
error: errorMessage,
|
|
636
|
-
item,
|
|
637
|
-
keyTypes
|
|
638
|
-
});
|
|
639
|
-
throw error;
|
|
640
|
-
}
|
|
641
|
-
}
|
|
642
|
-
Object.assign(key, {
|
|
643
|
-
loc: locationKeys
|
|
644
|
-
});
|
|
645
|
-
} else {
|
|
646
|
-
Object.assign(key, {
|
|
647
|
-
kt: keyTypes[0],
|
|
648
|
-
pk: item[primaryKeyAttr]
|
|
649
|
-
});
|
|
650
|
-
}
|
|
651
|
-
Object.assign(item, {
|
|
652
|
-
key
|
|
653
|
-
});
|
|
654
|
-
return item;
|
|
655
|
-
};
|
|
656
|
-
|
|
657
|
-
const logger$e = logger$1.default.get('sequelize', 'ReferenceBuilder');
|
|
658
|
-
const buildReference = async (item, referenceDefinition, registry, context)=>{
|
|
659
|
-
// Check if there is more than one key type
|
|
660
|
-
if (referenceDefinition.kta.length > 1) {
|
|
661
|
-
throw new Error("The ReferenceBuilder doesn't work with more than one key type yet");
|
|
662
|
-
}
|
|
663
|
-
// Check if dependencies exist
|
|
664
|
-
if (!registry) {
|
|
665
|
-
throw new Error("This model definition has a reference definition, but the registry is not present");
|
|
666
|
-
}
|
|
667
|
-
// Find the Library.Instance for the key type
|
|
668
|
-
const library = registry.get(referenceDefinition.kta);
|
|
669
|
-
if (!library) {
|
|
670
|
-
throw new Error("This model definition has a reference definition, but the dependency is not present");
|
|
671
|
-
}
|
|
672
|
-
// Check if the column value is null - if so, skip the reference
|
|
673
|
-
const columnValue = item[referenceDefinition.column];
|
|
674
|
-
if (columnValue == null) {
|
|
675
|
-
item[referenceDefinition.property] = null;
|
|
676
|
-
return item;
|
|
677
|
-
}
|
|
678
|
-
// Create a PriKey using the column value from item
|
|
679
|
-
const priKey = {
|
|
680
|
-
kt: referenceDefinition.kta[0],
|
|
681
|
-
pk: columnValue
|
|
682
|
-
};
|
|
683
|
-
let referencedItem;
|
|
684
|
-
if (context) {
|
|
685
|
-
// Check if we already have this item cached
|
|
686
|
-
if (context.isCached(priKey)) {
|
|
687
|
-
logger$e.default('Using cached reference', {
|
|
688
|
-
priKey,
|
|
689
|
-
property: referenceDefinition.property
|
|
690
|
-
});
|
|
691
|
-
referencedItem = context.getCached(priKey);
|
|
692
|
-
} else if (context.isInProgress(priKey)) {
|
|
693
|
-
logger$e.default('Circular dependency detected, creating reference placeholder', {
|
|
694
|
-
priKey,
|
|
695
|
-
property: referenceDefinition.property
|
|
696
|
-
});
|
|
697
|
-
// Create a minimal reference object with just the key to break the cycle
|
|
698
|
-
referencedItem = {
|
|
699
|
-
key: priKey
|
|
700
|
-
};
|
|
701
|
-
} else {
|
|
702
|
-
// Mark this key as in progress before loading
|
|
703
|
-
context.markInProgress(priKey);
|
|
704
|
-
try {
|
|
705
|
-
// Get the referenced item using the Library.Operations get method (context now managed internally)
|
|
706
|
-
referencedItem = await library.operations.get(priKey);
|
|
707
|
-
// Cache the result
|
|
708
|
-
context.setCached(priKey, referencedItem);
|
|
709
|
-
} finally{
|
|
710
|
-
// Always mark as complete, even if there was an error
|
|
711
|
-
context.markComplete(priKey);
|
|
712
|
-
}
|
|
713
|
-
}
|
|
714
|
-
} else {
|
|
715
|
-
// Fallback to original behavior if no context provided
|
|
716
|
-
referencedItem = await library.operations.get(priKey);
|
|
717
|
-
}
|
|
718
|
-
// TODO: In a Fjell-compliant implementation, this value should be stored in the ref property
|
|
719
|
-
// For now, we'll just populate the property directly
|
|
720
|
-
// Store the result in the property on item
|
|
721
|
-
item[referenceDefinition.property] = referencedItem;
|
|
722
|
-
return item;
|
|
723
|
-
};
|
|
724
|
-
|
|
725
|
-
function _define_property(obj, key, value) {
|
|
726
|
-
if (key in obj) {
|
|
727
|
-
Object.defineProperty(obj, key, {
|
|
728
|
-
value: value,
|
|
729
|
-
enumerable: true,
|
|
730
|
-
configurable: true,
|
|
731
|
-
writable: true
|
|
732
|
-
});
|
|
733
|
-
} else {
|
|
734
|
-
obj[key] = value;
|
|
735
|
-
}
|
|
736
|
-
return obj;
|
|
737
|
-
}
|
|
738
|
-
const logger$d = logger$1.default.get('sequelize', 'OperationContext');
|
|
739
|
-
/**
|
|
740
|
-
* Serialize an ItemKey to a string for use in sets and maps
|
|
741
|
-
*/ const OperationContext.serializeKey = (key)=>{
|
|
742
|
-
if ('pk' in key && 'kt' in key && !('loc' in key)) {
|
|
743
|
-
// PriKey
|
|
744
|
-
return `${key.kt}:${key.pk}`;
|
|
745
|
-
} else if ('pk' in key && 'kt' in key && 'loc' in key) {
|
|
746
|
-
// ComKey
|
|
747
|
-
const locStr = key.loc.map((l)=>`${l.kt}:${l.lk}`).join(',');
|
|
748
|
-
return `${key.kt}:${key.pk}|${locStr}`;
|
|
749
|
-
}
|
|
750
|
-
throw new Error(`Unsupported key type: ${JSON.stringify(key)}`);
|
|
751
|
-
};
|
|
752
|
-
/**
|
|
753
|
-
* Create a new OperationContext
|
|
754
|
-
*/ const createOperationContext = ()=>{
|
|
755
|
-
const inProgress = new Set();
|
|
756
|
-
const cache = new Map();
|
|
757
|
-
return {
|
|
758
|
-
inProgress,
|
|
759
|
-
cache,
|
|
760
|
-
markInProgress (key) {
|
|
761
|
-
const serialized = OperationContext.serializeKey(key);
|
|
762
|
-
logger$d.default('Marking key as in progress', {
|
|
763
|
-
key,
|
|
764
|
-
serialized
|
|
765
|
-
});
|
|
766
|
-
inProgress.add(serialized);
|
|
767
|
-
},
|
|
768
|
-
markComplete (key) {
|
|
769
|
-
const serialized = OperationContext.serializeKey(key);
|
|
770
|
-
logger$d.default('Marking key as complete', {
|
|
771
|
-
key,
|
|
772
|
-
serialized
|
|
773
|
-
});
|
|
774
|
-
inProgress.delete(serialized);
|
|
775
|
-
},
|
|
776
|
-
isInProgress (key) {
|
|
777
|
-
const serialized = OperationContext.serializeKey(key);
|
|
778
|
-
const result = inProgress.has(serialized);
|
|
779
|
-
logger$d.default('Checking if key is in progress', {
|
|
780
|
-
key,
|
|
781
|
-
serialized,
|
|
782
|
-
result
|
|
783
|
-
});
|
|
784
|
-
return result;
|
|
785
|
-
},
|
|
786
|
-
getCached (key) {
|
|
787
|
-
const serialized = OperationContext.serializeKey(key);
|
|
788
|
-
const result = cache.get(serialized);
|
|
789
|
-
logger$d.default('Getting cached item', {
|
|
790
|
-
key,
|
|
791
|
-
serialized,
|
|
792
|
-
found: !!result
|
|
793
|
-
});
|
|
794
|
-
return result;
|
|
795
|
-
},
|
|
796
|
-
setCached (key, item) {
|
|
797
|
-
const serialized = OperationContext.serializeKey(key);
|
|
798
|
-
logger$d.default('Caching item', {
|
|
799
|
-
key,
|
|
800
|
-
serialized
|
|
801
|
-
});
|
|
802
|
-
cache.set(serialized, item);
|
|
803
|
-
},
|
|
804
|
-
isCached (key) {
|
|
805
|
-
const serialized = OperationContext.serializeKey(key);
|
|
806
|
-
const result = cache.has(serialized);
|
|
807
|
-
logger$d.default('Checking if key is cached', {
|
|
808
|
-
key,
|
|
809
|
-
serialized,
|
|
810
|
-
result
|
|
811
|
-
});
|
|
812
|
-
return result;
|
|
813
|
-
}
|
|
814
|
-
};
|
|
815
|
-
};
|
|
816
|
-
/**
|
|
817
|
-
* Context Manager for sharing context across operations without changing public interfaces
|
|
818
|
-
*/ class ContextManager {
|
|
819
|
-
/**
|
|
820
|
-
* Set the current context for the current operation chain
|
|
821
|
-
*/ setCurrentContext(context) {
|
|
822
|
-
const contextId = Math.random().toString(36).substring(7);
|
|
823
|
-
this.contexts.set(contextId, context);
|
|
824
|
-
this.currentContextId = contextId;
|
|
825
|
-
logger$d.default('Set current context', {
|
|
826
|
-
contextId
|
|
827
|
-
});
|
|
828
|
-
return contextId;
|
|
829
|
-
}
|
|
830
|
-
/**
|
|
831
|
-
* Get the current context if one is set
|
|
832
|
-
*/ getCurrentContext() {
|
|
833
|
-
if (this.currentContextId) {
|
|
834
|
-
const context = this.contexts.get(this.currentContextId);
|
|
835
|
-
logger$d.default('Got current context', {
|
|
836
|
-
contextId: this.currentContextId,
|
|
837
|
-
found: !!context
|
|
838
|
-
});
|
|
839
|
-
return context;
|
|
840
|
-
}
|
|
841
|
-
return;
|
|
842
|
-
}
|
|
843
|
-
/**
|
|
844
|
-
* Clear the current context
|
|
845
|
-
*/ clearCurrentContext() {
|
|
846
|
-
if (this.currentContextId) {
|
|
847
|
-
logger$d.default('Clearing current context', {
|
|
848
|
-
contextId: this.currentContextId
|
|
849
|
-
});
|
|
850
|
-
this.contexts.delete(this.currentContextId);
|
|
851
|
-
this.currentContextId = null;
|
|
852
|
-
}
|
|
853
|
-
}
|
|
854
|
-
/**
|
|
855
|
-
* Execute a function with a specific context set as current
|
|
856
|
-
*/ async withContext(context, fn) {
|
|
857
|
-
const previousContextId = this.currentContextId;
|
|
858
|
-
this.setCurrentContext(context);
|
|
859
|
-
try {
|
|
860
|
-
return await fn();
|
|
861
|
-
} finally{
|
|
862
|
-
this.clearCurrentContext();
|
|
863
|
-
if (previousContextId) {
|
|
864
|
-
this.currentContextId = previousContextId;
|
|
865
|
-
}
|
|
866
|
-
}
|
|
867
|
-
}
|
|
868
|
-
constructor(){
|
|
869
|
-
_define_property(this, "contexts", new Map());
|
|
870
|
-
_define_property(this, "currentContextId", null);
|
|
871
|
-
}
|
|
872
|
-
}
|
|
873
|
-
// Global context manager instance
|
|
874
|
-
const OperationContext.contextManager = new ContextManager();
|
|
875
|
-
|
|
876
|
-
const logger$c = logger$1.default.get('sequelize', 'AggregationBuilder');
|
|
877
|
-
const buildAggregation = async (item, aggregationDefinition, registry, context)=>{
|
|
878
|
-
const location = core.ikToLKA(item.key);
|
|
879
|
-
// Get the library instance from the registry using the key type array
|
|
880
|
-
const libraryInstance = registry.get(aggregationDefinition.kta);
|
|
881
|
-
if (!libraryInstance) {
|
|
882
|
-
throw new Error(`Library instance not found for key type array: ${aggregationDefinition.kta.join(', ')}`);
|
|
883
|
-
}
|
|
884
|
-
// Create a cache key for this aggregation query
|
|
885
|
-
// This helps avoid running the same aggregation multiple times
|
|
886
|
-
const aggregationCacheKey = `${aggregationDefinition.kta.join('.')}_${aggregationDefinition.cardinality}_${OperationContext.serializeKey(item.key)}`;
|
|
887
|
-
if (context) {
|
|
888
|
-
// Check if this aggregation is already cached
|
|
889
|
-
if (context.cache.has(aggregationCacheKey)) {
|
|
890
|
-
const cachedResult = context.cache.get(aggregationCacheKey);
|
|
891
|
-
logger$c.default('Using cached aggregation result', {
|
|
892
|
-
aggregationCacheKey,
|
|
893
|
-
property: aggregationDefinition.property
|
|
894
|
-
});
|
|
895
|
-
item[aggregationDefinition.property] = cachedResult;
|
|
896
|
-
return item;
|
|
897
|
-
}
|
|
898
|
-
// Note: We don't check for circular dependencies here because:
|
|
899
|
-
// 1. Aggregations are location-based queries, not key-based references
|
|
900
|
-
// 2. They should be allowed to run during normal item processing
|
|
901
|
-
// 3. The main circular dependency concern is with references, not aggregations
|
|
902
|
-
}
|
|
903
|
-
// Execute aggregation within the current context to ensure context sharing
|
|
904
|
-
return OperationContext.contextManager.withContext(context || OperationContext.contextManager.getCurrentContext() || {
|
|
905
|
-
inProgress: new Set(),
|
|
906
|
-
cache: new Map()
|
|
907
|
-
}, async ()=>{
|
|
908
|
-
// Based on cardinality, use either one or all operation
|
|
909
|
-
if (aggregationDefinition.cardinality === 'one') {
|
|
910
|
-
// For one-to-one relationship, use the one operation
|
|
911
|
-
return libraryInstance.operations.one({}, location).then((result)=>{
|
|
912
|
-
if (context) {
|
|
913
|
-
context.cache.set(aggregationCacheKey, result);
|
|
914
|
-
}
|
|
915
|
-
item[aggregationDefinition.property] = result;
|
|
916
|
-
return item;
|
|
917
|
-
});
|
|
918
|
-
} else {
|
|
919
|
-
// For one-to-many relationship, use the all operation
|
|
920
|
-
return libraryInstance.operations.all({}, location).then((results)=>{
|
|
921
|
-
if (context) {
|
|
922
|
-
context.cache.set(aggregationCacheKey, results);
|
|
923
|
-
}
|
|
924
|
-
item[aggregationDefinition.property] = results;
|
|
925
|
-
return item;
|
|
926
|
-
});
|
|
927
|
-
}
|
|
928
|
-
});
|
|
929
|
-
};
|
|
930
|
-
|
|
931
|
-
const logger$b = logger$1.default.get("sequelize", "EventCoordinator");
|
|
932
|
-
//#endregion
|
|
933
|
-
const populateEvents = (item)=>{
|
|
934
|
-
const events = {
|
|
935
|
-
created: {
|
|
936
|
-
at: item.createdAt || null
|
|
937
|
-
},
|
|
938
|
-
updated: {
|
|
939
|
-
at: item.updatedAt || null
|
|
940
|
-
},
|
|
941
|
-
deleted: {
|
|
942
|
-
at: null
|
|
943
|
-
}
|
|
944
|
-
};
|
|
945
|
-
item.events = events;
|
|
946
|
-
return item;
|
|
947
|
-
};
|
|
948
|
-
const extractEvents = (item)=>{
|
|
949
|
-
logger$b.default('Extracting Events to database fields', {
|
|
950
|
-
item
|
|
951
|
-
});
|
|
952
|
-
if (item.events) {
|
|
953
|
-
var _item_events_created, _item_events_updated, _item_events_deleted;
|
|
954
|
-
if ((_item_events_created = item.events.created) === null || _item_events_created === void 0 ? void 0 : _item_events_created.at) {
|
|
955
|
-
item.createdAt = item.events.created.at;
|
|
956
|
-
}
|
|
957
|
-
if ((_item_events_updated = item.events.updated) === null || _item_events_updated === void 0 ? void 0 : _item_events_updated.at) {
|
|
958
|
-
item.updatedAt = item.events.updated.at;
|
|
959
|
-
}
|
|
960
|
-
if ((_item_events_deleted = item.events.deleted) === null || _item_events_deleted === void 0 ? void 0 : _item_events_deleted.at) {
|
|
961
|
-
item.deletedAt = item.events.deleted.at;
|
|
962
|
-
}
|
|
963
|
-
}
|
|
964
|
-
return item;
|
|
965
|
-
};
|
|
966
|
-
const removeEvents = (item)=>{
|
|
967
|
-
logger$b.default('Removing Events', {
|
|
968
|
-
item
|
|
969
|
-
});
|
|
970
|
-
delete item.events;
|
|
971
|
-
return item;
|
|
972
|
-
};
|
|
973
|
-
|
|
974
|
-
const logger$a = logger$1.default.get('sequelize', 'RowProcessor');
|
|
975
|
-
const processRow = async (row, keyTypes, referenceDefinitions, aggregationDefinitions, registry, context)=>{
|
|
976
|
-
logger$a.default('Processing Row', {
|
|
977
|
-
row
|
|
978
|
-
});
|
|
979
|
-
// Use provided context or create new one
|
|
980
|
-
const operationContext = context || createOperationContext();
|
|
981
|
-
// Process the row within the context to ensure all operations share the same context
|
|
982
|
-
return OperationContext.contextManager.withContext(operationContext, async ()=>{
|
|
983
|
-
let item = row.get({
|
|
984
|
-
plain: true
|
|
985
|
-
});
|
|
986
|
-
logger$a.default('Adding Key to Item with Key Types: %s', stringifyJSON(keyTypes));
|
|
987
|
-
item = addKey(row, item, keyTypes);
|
|
988
|
-
item = populateEvents(item);
|
|
989
|
-
logger$a.default('Key Added to Item: %s', stringifyJSON(item.key));
|
|
990
|
-
// Mark this item as in progress to detect circular references
|
|
991
|
-
operationContext.markInProgress(item.key);
|
|
992
|
-
try {
|
|
993
|
-
if (referenceDefinitions && referenceDefinitions.length > 0) {
|
|
994
|
-
for (const referenceDefinition of referenceDefinitions){
|
|
995
|
-
logger$a.default('Processing Reference for %s to %s', item.key.kt, stringifyJSON(referenceDefinition.kta));
|
|
996
|
-
item = await buildReference(item, referenceDefinition, registry, operationContext);
|
|
997
|
-
}
|
|
998
|
-
}
|
|
999
|
-
if (aggregationDefinitions && aggregationDefinitions.length > 0) {
|
|
1000
|
-
for (const aggregationDefinition of aggregationDefinitions){
|
|
1001
|
-
logger$a.default('Processing Aggregation for %s from %s', item.key.kt, stringifyJSON(aggregationDefinition.kta));
|
|
1002
|
-
item = await buildAggregation(item, aggregationDefinition, registry, operationContext);
|
|
1003
|
-
}
|
|
1004
|
-
}
|
|
1005
|
-
// Cache the fully processed item
|
|
1006
|
-
operationContext.setCached(item.key, item);
|
|
1007
|
-
} finally{
|
|
1008
|
-
// Mark this item as complete
|
|
1009
|
-
operationContext.markComplete(item.key);
|
|
1010
|
-
}
|
|
1011
|
-
logger$a.default('Processed Row: %j', stringifyJSON(item));
|
|
1012
|
-
return item;
|
|
1013
|
-
});
|
|
1014
|
-
};
|
|
1015
|
-
|
|
1016
|
-
const logger$9 = logger$1.default.get('sequelize', 'ops', 'all');
|
|
1017
|
-
// Helper function to merge includes avoiding duplicates
|
|
1018
|
-
const mergeIncludes$1 = (existingIncludes, newIncludes)=>{
|
|
1019
|
-
const mergedIncludes = [
|
|
1020
|
-
...existingIncludes
|
|
1021
|
-
];
|
|
1022
|
-
for (const newInclude of newIncludes){
|
|
1023
|
-
const existingIndex = mergedIncludes.findIndex((existing)=>existing.as === newInclude.as && existing.model === newInclude.model);
|
|
1024
|
-
if (existingIndex === -1) {
|
|
1025
|
-
mergedIncludes.push(newInclude);
|
|
1026
|
-
} else if (newInclude.include && mergedIncludes[existingIndex].include) {
|
|
1027
|
-
mergedIncludes[existingIndex].include = [
|
|
1028
|
-
...mergedIncludes[existingIndex].include,
|
|
1029
|
-
...newInclude.include
|
|
1030
|
-
];
|
|
1031
|
-
} else if (newInclude.include) {
|
|
1032
|
-
mergedIncludes[existingIndex].include = newInclude.include;
|
|
1033
|
-
}
|
|
1034
|
-
}
|
|
1035
|
-
return mergedIncludes;
|
|
1036
|
-
};
|
|
1037
|
-
const all.getAllOperation = (models, definition, registry)=>{
|
|
1038
|
-
const { coordinate, options: { references, aggregations } } = definition;
|
|
1039
|
-
//#region Query
|
|
1040
|
-
const all = async (itemQuery, locations)=>{
|
|
1041
|
-
var _options_include;
|
|
1042
|
-
logger$9.debug(`ALL operation called on ${models[0].name} with ${(locations === null || locations === void 0 ? void 0 : locations.length) || 0} location filters: ${(locations === null || locations === void 0 ? void 0 : locations.map((loc)=>`${loc.kt}=${loc.lk}`).join(', ')) || 'none'}`);
|
|
1043
|
-
const loc = locations || [];
|
|
1044
|
-
// @ts-ignore
|
|
1045
|
-
const model = models[0];
|
|
1046
|
-
// Build base query from itemQuery
|
|
1047
|
-
const options = buildQuery(itemQuery, model);
|
|
1048
|
-
// Handle location keys if present
|
|
1049
|
-
if (loc.length > 0) {
|
|
1050
|
-
const { kta } = coordinate;
|
|
1051
|
-
const directLocations = [];
|
|
1052
|
-
const hierarchicalLocations = [];
|
|
1053
|
-
const additionalIncludes = [];
|
|
1054
|
-
// Categorize location keys as direct or hierarchical
|
|
1055
|
-
for (const locKey of loc){
|
|
1056
|
-
const relationshipInfo = relationshipUtils.buildRelationshipPath(model, locKey.kt, kta, true);
|
|
1057
|
-
if (!relationshipInfo.found) {
|
|
1058
|
-
const errorMessage = `Location key '${locKey.kt}' cannot be resolved on model '${model.name}' or through its relationships.`;
|
|
1059
|
-
logger$9.error(errorMessage, {
|
|
1060
|
-
locations: loc,
|
|
1061
|
-
kta
|
|
1062
|
-
});
|
|
1063
|
-
throw new Error(errorMessage);
|
|
1064
|
-
}
|
|
1065
|
-
if (relationshipInfo.isDirect) {
|
|
1066
|
-
directLocations.push(locKey);
|
|
1067
|
-
} else {
|
|
1068
|
-
hierarchicalLocations.push(locKey);
|
|
1069
|
-
}
|
|
1070
|
-
}
|
|
1071
|
-
// Handle direct location keys (simple foreign key constraints)
|
|
1072
|
-
for (const locKey of directLocations){
|
|
1073
|
-
if (locKey.lk === undefined || locKey.lk == null || locKey.lk === '' || typeof locKey.lk === 'object' && Object.keys(locKey.lk).length === 0) {
|
|
1074
|
-
logger$9.error(`Location key '${locKey.kt}' has invalid lk value: ${stringifyJSON(locKey.lk)}`, {
|
|
1075
|
-
locKey,
|
|
1076
|
-
locations: loc
|
|
1077
|
-
});
|
|
1078
|
-
throw new Error(`Location key '${locKey.kt}' has invalid lk value: ${stringifyJSON(locKey.lk)}`);
|
|
1079
|
-
}
|
|
1080
|
-
const foreignKeyField = locKey.kt + 'Id';
|
|
1081
|
-
// Check if this field already has a condition from the itemQuery
|
|
1082
|
-
if (options.where[foreignKeyField]) {
|
|
1083
|
-
logger$9.debug(`[ALL] Field ${foreignKeyField} already constrained by itemQuery, skipping location constraint to avoid conflicts`);
|
|
1084
|
-
continue; // Skip this location constraint to avoid conflicts
|
|
1085
|
-
}
|
|
1086
|
-
logger$9.trace(`[ALL] Setting direct location where clause: ${foreignKeyField} = ${stringifyJSON(locKey.lk)} (type: ${typeof locKey.lk})`);
|
|
1087
|
-
options.where[foreignKeyField] = {
|
|
1088
|
-
[sequelize.Op.eq]: locKey.lk
|
|
1089
|
-
};
|
|
1090
|
-
}
|
|
1091
|
-
// Handle hierarchical location keys (requires relationship traversal)
|
|
1092
|
-
for (const locKey of hierarchicalLocations){
|
|
1093
|
-
if (locKey.lk === undefined || locKey.lk == null || locKey.lk === '' || typeof locKey.lk === 'object' && Object.keys(locKey.lk).length === 0) {
|
|
1094
|
-
logger$9.error(`Hierarchical location key '${locKey.kt}' has invalid lk value: ${stringifyJSON(locKey.lk)}`, {
|
|
1095
|
-
locKey,
|
|
1096
|
-
locations: loc
|
|
1097
|
-
});
|
|
1098
|
-
throw new Error(`Hierarchical location key '${locKey.kt}' has invalid lk value: ${stringifyJSON(locKey.lk)}`);
|
|
1099
|
-
}
|
|
1100
|
-
const relationshipInfo = relationshipUtils.buildRelationshipPath(model, locKey.kt, kta);
|
|
1101
|
-
if (relationshipInfo.found && relationshipInfo.path) {
|
|
1102
|
-
// Check if this field already has a condition from the itemQuery
|
|
1103
|
-
if (options.where[relationshipInfo.path]) {
|
|
1104
|
-
logger$9.debug(`[ALL] Field ${relationshipInfo.path} already constrained by itemQuery, skipping hierarchical location constraint to avoid conflicts`);
|
|
1105
|
-
continue; // Skip this location constraint to avoid conflicts
|
|
1106
|
-
}
|
|
1107
|
-
// Add the relationship constraint using the path
|
|
1108
|
-
logger$9.trace(`[ALL] Setting hierarchical location where clause: ${relationshipInfo.path} = ${stringifyJSON(locKey.lk)} (type: ${typeof locKey.lk})`);
|
|
1109
|
-
options.where[relationshipInfo.path] = {
|
|
1110
|
-
[sequelize.Op.eq]: locKey.lk
|
|
1111
|
-
};
|
|
1112
|
-
// Add necessary includes for the relationship traversal
|
|
1113
|
-
if (relationshipInfo.includes) {
|
|
1114
|
-
additionalIncludes.push(...relationshipInfo.includes);
|
|
1115
|
-
}
|
|
1116
|
-
}
|
|
1117
|
-
}
|
|
1118
|
-
// Merge additional includes with existing includes
|
|
1119
|
-
if (additionalIncludes.length > 0) {
|
|
1120
|
-
const existingIncludes = options.include || [];
|
|
1121
|
-
options.include = mergeIncludes$1(existingIncludes, additionalIncludes);
|
|
1122
|
-
}
|
|
1123
|
-
}
|
|
1124
|
-
logger$9.default(`All query configured for ${model.name} with where fields: ${options.where ? Object.keys(options.where).join(', ') : 'none'}, includes: ${((_options_include = options.include) === null || _options_include === void 0 ? void 0 : _options_include.length) || 0}`);
|
|
1125
|
-
try {
|
|
1126
|
-
logger$9.trace(`[ALL] Executing ${model.name}.findAll() with options: ${JSON.stringify(options, null, 2)}`);
|
|
1127
|
-
} catch {
|
|
1128
|
-
// Fallback for cases where JSON.stringify fails on Sequelize operators
|
|
1129
|
-
logger$9.trace(`[ALL] Executing ${model.name}.findAll() with options containing non-serializable operators (${Object.keys(options.where || {}).length} where conditions)`);
|
|
1130
|
-
}
|
|
1131
|
-
const matchingItems = await model.findAll(options);
|
|
1132
|
-
// this.logger.default('Matching Items', { matchingItems });
|
|
1133
|
-
// Get the current context from context manager
|
|
1134
|
-
const context = OperationContext.contextManager.getCurrentContext();
|
|
1135
|
-
// TODO: Move this Up!
|
|
1136
|
-
const results = await Promise.all(matchingItems.map(async (row)=>{
|
|
1137
|
-
const processedRow = await processRow(row, coordinate.kta, references, aggregations, registry, context);
|
|
1138
|
-
return core.validateKeys(processedRow, coordinate.kta);
|
|
1139
|
-
}));
|
|
1140
|
-
logger$9.debug(`[ALL] Returning ${results.length} ${model.name} records`);
|
|
1141
|
-
return results;
|
|
1142
|
-
};
|
|
1143
|
-
return all;
|
|
1144
|
-
};
|
|
1145
|
-
|
|
1146
|
-
const logger$8 = logger$1.default.get('sequelize', 'ops', 'create');
|
|
1147
|
-
// Helper function to translate PostgreSQL errors to meaningful messages
|
|
1148
|
-
function translateDatabaseError(error, itemData, modelName) {
|
|
1149
|
-
var _error_original, _error_original1, _error_original2;
|
|
1150
|
-
const originalMessage = error.message || '';
|
|
1151
|
-
const errorCode = (_error_original = error.original) === null || _error_original === void 0 ? void 0 : _error_original.code;
|
|
1152
|
-
const constraint = (_error_original1 = error.original) === null || _error_original1 === void 0 ? void 0 : _error_original1.constraint;
|
|
1153
|
-
const detail = (_error_original2 = error.original) === null || _error_original2 === void 0 ? void 0 : _error_original2.detail;
|
|
1154
|
-
logger$8.error('Database error during create operation', {
|
|
1155
|
-
errorCode,
|
|
1156
|
-
constraint,
|
|
1157
|
-
detail,
|
|
1158
|
-
originalMessage,
|
|
1159
|
-
modelName,
|
|
1160
|
-
itemData: JSON.stringify(itemData, null, 2)
|
|
1161
|
-
});
|
|
1162
|
-
// Handle specific PostgreSQL error codes
|
|
1163
|
-
switch(errorCode){
|
|
1164
|
-
case '23505':
|
|
1165
|
-
if (constraint) {
|
|
1166
|
-
return new Error(`Duplicate value violates unique constraint '${constraint}'. ${detail || ''}`);
|
|
1167
|
-
}
|
|
1168
|
-
return new Error(`Duplicate value detected. This record already exists. ${detail || ''}`);
|
|
1169
|
-
case '23503':
|
|
1170
|
-
if (constraint) {
|
|
1171
|
-
return new Error(`Foreign key constraint '${constraint}' violated. Referenced record does not exist. ${detail || ''}`);
|
|
1172
|
-
}
|
|
1173
|
-
return new Error(`Referenced record does not exist. Check that all related records are valid. ${detail || ''}`);
|
|
1174
|
-
case '23502':
|
|
1175
|
-
var _error_original3;
|
|
1176
|
-
const column = (_error_original3 = error.original) === null || _error_original3 === void 0 ? void 0 : _error_original3.column;
|
|
1177
|
-
if (column) {
|
|
1178
|
-
return new Error(`Required field '${column}' cannot be null`);
|
|
1179
|
-
}
|
|
1180
|
-
return new Error(`Required field is missing or null`);
|
|
1181
|
-
case '23514':
|
|
1182
|
-
if (constraint) {
|
|
1183
|
-
return new Error(`Check constraint '${constraint}' violated. ${detail || ''}`);
|
|
1184
|
-
}
|
|
1185
|
-
return new Error(`Data validation failed. Check constraint violated. ${detail || ''}`);
|
|
1186
|
-
case '22001':
|
|
1187
|
-
return new Error(`Data too long for field. Check string lengths. ${detail || ''}`);
|
|
1188
|
-
case '22003':
|
|
1189
|
-
return new Error(`Numeric value out of range. Check number values. ${detail || ''}`);
|
|
1190
|
-
case '42703':
|
|
1191
|
-
var _error_original4;
|
|
1192
|
-
const undefinedColumn = (_error_original4 = error.original) === null || _error_original4 === void 0 ? void 0 : _error_original4.column;
|
|
1193
|
-
if (undefinedColumn) {
|
|
1194
|
-
return new Error(`Column '${undefinedColumn}' does not exist in table '${modelName}'`);
|
|
1195
|
-
}
|
|
1196
|
-
return new Error(`Referenced column does not exist`);
|
|
1197
|
-
case '42P01':
|
|
1198
|
-
return new Error(`Table '${modelName}' does not exist`);
|
|
1199
|
-
default:
|
|
1200
|
-
// For unknown errors, provide the original message with context
|
|
1201
|
-
return new Error(`Database error in ${modelName}.create(): ${originalMessage}. Item data: ${JSON.stringify(itemData, null, 2)}`);
|
|
1202
|
-
}
|
|
1203
|
-
}
|
|
1204
|
-
// Helper function to validate hierarchical chain exists
|
|
1205
|
-
async function validateHierarchicalChain(models, locKey, kta) {
|
|
1206
|
-
try {
|
|
1207
|
-
// Find the direct parent model that contains this locator
|
|
1208
|
-
const locatorIndex = kta.indexOf(locKey.kt);
|
|
1209
|
-
if (locatorIndex === -1) {
|
|
1210
|
-
throw new Error(`Locator type '${locKey.kt}' not found in kta array`);
|
|
1211
|
-
}
|
|
1212
|
-
// Get the model for this locator
|
|
1213
|
-
const locatorModel = models[locatorIndex] || models[0]; // Fallback to primary model
|
|
1214
|
-
// Build a query to validate the chain exists
|
|
1215
|
-
const chainResult = buildRelationshipChain(locatorModel, kta, locatorIndex, kta.length - 1);
|
|
1216
|
-
if (!chainResult.success) {
|
|
1217
|
-
// If we can't build a chain, just validate the record exists
|
|
1218
|
-
const record = await locatorModel.findByPk(locKey.lk);
|
|
1219
|
-
if (!record) {
|
|
1220
|
-
throw new Error(`Referenced ${locKey.kt} with id ${locKey.lk} does not exist`);
|
|
1221
|
-
}
|
|
1222
|
-
return;
|
|
1223
|
-
}
|
|
1224
|
-
// Validate that the chain exists
|
|
1225
|
-
const queryOptions = {
|
|
1226
|
-
where: {
|
|
1227
|
-
id: locKey.lk
|
|
1228
|
-
}
|
|
1229
|
-
};
|
|
1230
|
-
if (chainResult.includes && chainResult.includes.length > 0) {
|
|
1231
|
-
queryOptions.include = chainResult.includes;
|
|
1232
|
-
}
|
|
1233
|
-
const record = await locatorModel.findOne(queryOptions);
|
|
1234
|
-
if (!record) {
|
|
1235
|
-
throw new Error(`Referenced ${locKey.kt} with id ${locKey.lk} does not exist or chain is invalid`);
|
|
1236
|
-
}
|
|
1237
|
-
} catch (error) {
|
|
1238
|
-
// Add context to validation errors
|
|
1239
|
-
if (error.original) {
|
|
1240
|
-
throw translateDatabaseError(error, {
|
|
1241
|
-
locKey,
|
|
1242
|
-
kta
|
|
1243
|
-
}, locKey.kt);
|
|
1244
|
-
}
|
|
1245
|
-
throw error;
|
|
1246
|
-
}
|
|
1247
|
-
}
|
|
1248
|
-
const getCreateOperation = (models, definition, registry)=>{
|
|
1249
|
-
const create = async (item, options)=>{
|
|
1250
|
-
logger$8.debug(`CREATE operation called on ${models[0].name} with ${(options === null || options === void 0 ? void 0 : options.key) ? `key: pk=${options.key.pk}, loc=[${core.isComKey(options.key) ? options.key.loc.map((l)=>`${l.kt}=${l.lk}`).join(', ') : ''}]` : (options === null || options === void 0 ? void 0 : options.locations) ? `locations: ${options.locations.map((loc)=>`${loc.kt}=${loc.lk}`).join(', ')}` : 'no constraints'}`);
|
|
1251
|
-
logger$8.default(`Create configured for ${models[0].name} with ${Object.keys(item).length} item fields`);
|
|
1252
|
-
const { coordinate, options: { references, aggregations } } = definition;
|
|
1253
|
-
const { kta } = coordinate;
|
|
1254
|
-
// Get the primary model (first model in array)
|
|
1255
|
-
const model = models[0];
|
|
1256
|
-
const modelAttributes = model.getAttributes();
|
|
1257
|
-
// Validate that all item attributes exist on the model
|
|
1258
|
-
let itemData = {
|
|
1259
|
-
...item
|
|
1260
|
-
};
|
|
1261
|
-
// TODO: We need the opposite of processRow, something to step down from fjell to database.
|
|
1262
|
-
itemData = extractEvents(itemData);
|
|
1263
|
-
itemData = removeEvents(itemData);
|
|
1264
|
-
// Validate that all item attributes exist on the model
|
|
1265
|
-
const invalidAttributes = [];
|
|
1266
|
-
for (const key of Object.keys(itemData)){
|
|
1267
|
-
if (!modelAttributes[key]) {
|
|
1268
|
-
invalidAttributes.push(key);
|
|
1269
|
-
}
|
|
1270
|
-
}
|
|
1271
|
-
if (invalidAttributes.length > 0) {
|
|
1272
|
-
const availableAttributes = Object.keys(modelAttributes).join(', ');
|
|
1273
|
-
throw new Error(`Invalid attributes for model '${model.name}': [${invalidAttributes.join(', ')}]. ` + `Available attributes: [${availableAttributes}]. ` + `Item data: ${JSON.stringify(itemData, null, 2)}`);
|
|
1274
|
-
}
|
|
1275
|
-
// Handle key options
|
|
1276
|
-
// If a key is supplied, assume its contents are to be assigned to the appropriate ids.
|
|
1277
|
-
// For most cases this will be null as key generation is often through autoIncrement.
|
|
1278
|
-
// If this is a CItem then the locations will be present.
|
|
1279
|
-
if (options === null || options === void 0 ? void 0 : options.key) {
|
|
1280
|
-
const key = options.key;
|
|
1281
|
-
if (core.isPriKey(key)) {
|
|
1282
|
-
// Set the primary key
|
|
1283
|
-
itemData.id = key.pk;
|
|
1284
|
-
} else if (core.isComKey(key)) {
|
|
1285
|
-
// Set primary key
|
|
1286
|
-
itemData.id = key.pk;
|
|
1287
|
-
// Process location keys - only set direct foreign keys, validate hierarchical chains
|
|
1288
|
-
const comKey = key;
|
|
1289
|
-
const directLocations = [];
|
|
1290
|
-
const hierarchicalLocations = [];
|
|
1291
|
-
// Categorize location keys as direct or hierarchical
|
|
1292
|
-
for (const locKey of comKey.loc){
|
|
1293
|
-
const relationshipInfo = relationshipUtils.buildRelationshipPath(model, locKey.kt, kta, true);
|
|
1294
|
-
if (!relationshipInfo.found) {
|
|
1295
|
-
const associations = model.associations ? Object.keys(model.associations) : [];
|
|
1296
|
-
const errorMessage = `Composite key locator '${locKey.kt}' cannot be resolved on model '${model.name}' or through its relationships. ` + `Available associations: [${associations.join(', ')}]. ` + `KTA: [${kta.join(', ')}]. ` + `Composite key: ${JSON.stringify(comKey, null, 2)}`;
|
|
1297
|
-
logger$8.error(errorMessage, {
|
|
1298
|
-
key: comKey,
|
|
1299
|
-
kta,
|
|
1300
|
-
associations
|
|
1301
|
-
});
|
|
1302
|
-
throw new Error(errorMessage);
|
|
1303
|
-
}
|
|
1304
|
-
if (relationshipInfo.isDirect) {
|
|
1305
|
-
directLocations.push(locKey);
|
|
1306
|
-
} else {
|
|
1307
|
-
hierarchicalLocations.push(locKey);
|
|
1308
|
-
}
|
|
1309
|
-
}
|
|
1310
|
-
// Set direct foreign keys
|
|
1311
|
-
for (const locKey of directLocations){
|
|
1312
|
-
if (locKey.lk == null || locKey.lk === '') {
|
|
1313
|
-
logger$8.error(`Composite key location '${locKey.kt}' has undefined/null lk value`, {
|
|
1314
|
-
locKey,
|
|
1315
|
-
key: comKey
|
|
1316
|
-
});
|
|
1317
|
-
throw new Error(`Composite key location '${locKey.kt}' has undefined/null lk value`);
|
|
1318
|
-
}
|
|
1319
|
-
const foreignKeyField = locKey.kt + 'Id';
|
|
1320
|
-
itemData[foreignKeyField] = locKey.lk;
|
|
1321
|
-
}
|
|
1322
|
-
// Validate hierarchical chains exist
|
|
1323
|
-
for (const locKey of hierarchicalLocations){
|
|
1324
|
-
await validateHierarchicalChain(models, locKey, kta);
|
|
1325
|
-
}
|
|
1326
|
-
}
|
|
1327
|
-
}
|
|
1328
|
-
// Handle locations options
|
|
1329
|
-
// This is the most frequent way relationship ids will be set
|
|
1330
|
-
if (options === null || options === void 0 ? void 0 : options.locations) {
|
|
1331
|
-
const directLocations = [];
|
|
1332
|
-
const hierarchicalLocations = [];
|
|
1333
|
-
// Categorize location keys as direct or hierarchical
|
|
1334
|
-
for (const locKey of options.locations){
|
|
1335
|
-
const relationshipInfo = relationshipUtils.buildRelationshipPath(model, locKey.kt, kta, true);
|
|
1336
|
-
if (!relationshipInfo.found) {
|
|
1337
|
-
const associations = model.associations ? Object.keys(model.associations) : [];
|
|
1338
|
-
const errorMessage = `Location key '${locKey.kt}' cannot be resolved on model '${model.name}' or through its relationships. ` + `Available associations: [${associations.join(', ')}]. ` + `KTA: [${kta.join(', ')}]. ` + `Locations: ${JSON.stringify(options.locations, null, 2)}`;
|
|
1339
|
-
logger$8.error(errorMessage, {
|
|
1340
|
-
locations: options.locations,
|
|
1341
|
-
kta,
|
|
1342
|
-
associations
|
|
1343
|
-
});
|
|
1344
|
-
throw new Error(errorMessage);
|
|
1345
|
-
}
|
|
1346
|
-
if (relationshipInfo.isDirect) {
|
|
1347
|
-
directLocations.push(locKey);
|
|
1348
|
-
} else {
|
|
1349
|
-
hierarchicalLocations.push(locKey);
|
|
1350
|
-
}
|
|
1351
|
-
}
|
|
1352
|
-
// Set direct foreign keys
|
|
1353
|
-
for (const locKey of directLocations){
|
|
1354
|
-
if (locKey.lk == null || locKey.lk === '') {
|
|
1355
|
-
logger$8.error(`Location option '${locKey.kt}' has undefined/null lk value`, {
|
|
1356
|
-
locKey,
|
|
1357
|
-
locations: options.locations
|
|
1358
|
-
});
|
|
1359
|
-
throw new Error(`Location option '${locKey.kt}' has undefined/null lk value`);
|
|
1360
|
-
}
|
|
1361
|
-
const foreignKeyField = locKey.kt + 'Id';
|
|
1362
|
-
itemData[foreignKeyField] = locKey.lk;
|
|
1363
|
-
}
|
|
1364
|
-
// Validate hierarchical chains exist
|
|
1365
|
-
for (const locKey of hierarchicalLocations){
|
|
1366
|
-
await validateHierarchicalChain(models, locKey, kta);
|
|
1367
|
-
}
|
|
1368
|
-
}
|
|
1369
|
-
// Create the record
|
|
1370
|
-
try {
|
|
1371
|
-
logger$8.trace(`[CREATE] Executing ${model.name}.create() with data: ${stringifyJSON(itemData)}`);
|
|
1372
|
-
const createdRecord = await model.create(itemData);
|
|
1373
|
-
// Add key and events
|
|
1374
|
-
const processedRecord = await processRow(createdRecord, kta, references, aggregations, registry);
|
|
1375
|
-
const result = core.validateKeys(processedRecord, kta);
|
|
1376
|
-
logger$8.debug(`[CREATE] Created ${model.name} with key: ${result.key ? JSON.stringify(result.key) : `id=${createdRecord.id}`}`);
|
|
1377
|
-
return result;
|
|
1378
|
-
} catch (error) {
|
|
1379
|
-
throw translateDatabaseError(error, itemData, model.name);
|
|
1380
|
-
}
|
|
1381
|
-
};
|
|
1382
|
-
return create;
|
|
1383
|
-
};
|
|
1384
|
-
|
|
1385
|
-
const logger$7 = logger$1.default.get('sequelize', 'ops', 'find');
|
|
1386
|
-
const getFindOperation = (models, definition, registry)=>{
|
|
1387
|
-
const { options: { finders, references, aggregations } } = definition;
|
|
1388
|
-
const find = async (finder, finderParams, locations)=>{
|
|
1389
|
-
logger$7.debug(`FIND operation called on ${models[0].name} with finder '${finder}' and ${(locations === null || locations === void 0 ? void 0 : locations.length) || 0} location filters: ${(locations === null || locations === void 0 ? void 0 : locations.map((loc)=>`${loc.kt}=${loc.lk}`).join(', ')) || 'none'}`);
|
|
1390
|
-
logger$7.default(`Find configured for ${models[0].name} using finder '${finder}' with ${Object.keys(finderParams).length} params`);
|
|
1391
|
-
// Note that we execute the createFinders function here because we want to make sure we're always getting the
|
|
1392
|
-
// most up to date methods.
|
|
1393
|
-
if (finders && finders[finder]) {
|
|
1394
|
-
const finderMethod = finders[finder];
|
|
1395
|
-
if (finderMethod) {
|
|
1396
|
-
logger$7.trace(`[FIND] Executing finder '${finder}' on ${models[0].name} with params: ${stringifyJSON(finderParams)}, locations: ${stringifyJSON(locations)}`);
|
|
1397
|
-
const results = await finderMethod(finderParams, locations);
|
|
1398
|
-
if (results && results.length > 0) {
|
|
1399
|
-
const processedResults = await Promise.all(results.map(async (row)=>{
|
|
1400
|
-
const processedRow = await processRow(row, definition.coordinate.kta, references, aggregations, registry);
|
|
1401
|
-
return core.validateKeys(processedRow, definition.coordinate.kta);
|
|
1402
|
-
}));
|
|
1403
|
-
logger$7.debug(`[FIND] Found ${processedResults.length} ${models[0].name} records using finder '${finder}'`);
|
|
1404
|
-
return processedResults;
|
|
1405
|
-
} else {
|
|
1406
|
-
logger$7.debug(`[FIND] Found 0 ${models[0].name} records using finder '${finder}'`);
|
|
1407
|
-
return [];
|
|
1408
|
-
}
|
|
1409
|
-
} else {
|
|
1410
|
-
logger$7.error(`Finder %s not found`, finder);
|
|
1411
|
-
throw new Error(`Finder ${finder} not found`);
|
|
1412
|
-
}
|
|
1413
|
-
} else {
|
|
1414
|
-
logger$7.error(`No finders have been defined for this lib`);
|
|
1415
|
-
throw new Error(`No finders found`);
|
|
1416
|
-
}
|
|
1417
|
-
};
|
|
1418
|
-
return find;
|
|
1419
|
-
};
|
|
1420
|
-
|
|
1421
|
-
const logger$6 = logger$1.default.get('sequelize', 'ops', 'get');
|
|
1422
|
-
// Helper function to process composite key and build query options
|
|
1423
|
-
const processCompositeKey$1 = (comKey, model, kta)=>{
|
|
1424
|
-
const where = {
|
|
1425
|
-
id: comKey.pk
|
|
1426
|
-
};
|
|
1427
|
-
const includes = [];
|
|
1428
|
-
for (const locator of comKey.loc){
|
|
1429
|
-
const relationshipInfo = relationshipUtils.buildRelationshipPath(model, locator.kt, kta);
|
|
1430
|
-
if (!relationshipInfo.found) {
|
|
1431
|
-
const errorMessage = `Composite key locator '${locator.kt}' cannot be resolved on model '${model.name}' or through its relationships.`;
|
|
1432
|
-
logger$6.error(errorMessage, {
|
|
1433
|
-
key: comKey,
|
|
1434
|
-
kta
|
|
1435
|
-
});
|
|
1436
|
-
throw new Error(errorMessage);
|
|
1437
|
-
}
|
|
1438
|
-
if (relationshipInfo.path) {
|
|
1439
|
-
// This requires a relationship traversal
|
|
1440
|
-
where[relationshipInfo.path] = locator.lk;
|
|
1441
|
-
if (relationshipInfo.includes) {
|
|
1442
|
-
includes.push(...relationshipInfo.includes);
|
|
1443
|
-
}
|
|
1444
|
-
} else {
|
|
1445
|
-
// This is a direct field
|
|
1446
|
-
const fieldName = `${locator.kt}Id`;
|
|
1447
|
-
where[fieldName] = locator.lk;
|
|
1448
|
-
}
|
|
1449
|
-
}
|
|
1450
|
-
const result = {
|
|
1451
|
-
where
|
|
1452
|
-
};
|
|
1453
|
-
if (includes.length > 0) {
|
|
1454
|
-
result.include = includes;
|
|
1455
|
-
}
|
|
1456
|
-
return result;
|
|
1457
|
-
};
|
|
1458
|
-
const getGetOperation = (models, definition, registry)=>{
|
|
1459
|
-
const { coordinate, options: { references, aggregations } } = definition;
|
|
1460
|
-
const { kta } = coordinate;
|
|
1461
|
-
const get = async (key)=>{
|
|
1462
|
-
if (!core.isValidItemKey(key)) {
|
|
1463
|
-
logger$6.error('Key for Get is not a valid ItemKey: %j', key);
|
|
1464
|
-
throw new Error('Key for Get is not a valid ItemKey');
|
|
1465
|
-
}
|
|
1466
|
-
logger$6.debug(`GET operation called on ${models[0].name} with ${core.isPriKey(key) ? `primary key: pk=${key.pk}` : `composite key: pk=${key.pk}, loc=[${key.loc.map((l)=>`${l.kt}=${l.lk}`).join(', ')}]`}`);
|
|
1467
|
-
logger$6.default(`Get configured for ${models[0].name} with ${core.isPriKey(key) ? 'primary' : 'composite'} key`);
|
|
1468
|
-
const itemKey = key;
|
|
1469
|
-
// @ts-ignore
|
|
1470
|
-
const model = models[0];
|
|
1471
|
-
let item;
|
|
1472
|
-
if (core.isPriKey(itemKey)) {
|
|
1473
|
-
// This is the easy case because we can just find the item by its primary key
|
|
1474
|
-
logger$6.trace(`[GET] Executing ${model.name}.findByPk() with pk: ${itemKey.pk}`);
|
|
1475
|
-
item = await model.findByPk(itemKey.pk);
|
|
1476
|
-
} else if (core.isComKey(itemKey)) {
|
|
1477
|
-
// This is a composite key, so we need to build a where clause based on the composite key's locators
|
|
1478
|
-
const comKey = itemKey;
|
|
1479
|
-
const queryOptions = processCompositeKey$1(comKey, model, kta);
|
|
1480
|
-
logger$6.default('Composite key query', {
|
|
1481
|
-
queryOptions
|
|
1482
|
-
});
|
|
1483
|
-
logger$6.trace(`[GET] Executing ${model.name}.findOne() with options: ${stringifyJSON(queryOptions)}`);
|
|
1484
|
-
item = await model.findOne(queryOptions);
|
|
1485
|
-
}
|
|
1486
|
-
if (!item) {
|
|
1487
|
-
throw new Library.NotFoundError('get', coordinate, key);
|
|
1488
|
-
} else {
|
|
1489
|
-
// Get the current context from context manager
|
|
1490
|
-
const context = OperationContext.contextManager.getCurrentContext();
|
|
1491
|
-
const result = core.validateKeys(await processRow(item, kta, references, aggregations, registry, context), kta);
|
|
1492
|
-
logger$6.debug(`[GET] Retrieved ${model.name} with key: ${result.key ? JSON.stringify(result.key) : `id=${item.id}`}`);
|
|
1493
|
-
return result;
|
|
1494
|
-
}
|
|
1495
|
-
};
|
|
1496
|
-
return get;
|
|
1497
|
-
};
|
|
1498
|
-
|
|
1499
|
-
const logger$5 = logger$1.default.get('sequelize', 'ops', 'one');
|
|
1500
|
-
const getOneOperation = (models, definition, registry)=>{
|
|
1501
|
-
const one = async (itemQuery, locations = [])=>{
|
|
1502
|
-
logger$5.debug(`ONE operation called on ${models[0].name} with ${locations.length} location filters: ${locations.map((loc)=>`${loc.kt}=${loc.lk}`).join(', ') || 'none'}`);
|
|
1503
|
-
logger$5.default(`One configured for ${models[0].name} delegating to all operation`);
|
|
1504
|
-
const items = await all.getAllOperation(models, definition, registry)(itemQuery, locations);
|
|
1505
|
-
if (items.length > 0) {
|
|
1506
|
-
const result = items[0];
|
|
1507
|
-
logger$5.debug(`[ONE] Found ${models[0].name} record with key: ${result.key ? JSON.stringify(result.key) : 'unknown'}`);
|
|
1508
|
-
return result;
|
|
1509
|
-
} else {
|
|
1510
|
-
logger$5.debug(`[ONE] No ${models[0].name} record found`);
|
|
1511
|
-
return null;
|
|
1512
|
-
}
|
|
1513
|
-
};
|
|
1514
|
-
return one;
|
|
1515
|
-
};
|
|
1516
|
-
|
|
1517
|
-
const logger$4 = logger$1.default.get('sequelize', 'ops', 'remove');
|
|
1518
|
-
// Helper function to process composite key and build query options
|
|
1519
|
-
const processCompositeKey = (comKey, model, kta)=>{
|
|
1520
|
-
const where = {
|
|
1521
|
-
id: comKey.pk
|
|
1522
|
-
};
|
|
1523
|
-
const includes = [];
|
|
1524
|
-
for (const locator of comKey.loc){
|
|
1525
|
-
const relationshipInfo = relationshipUtils.buildRelationshipPath(model, locator.kt, kta);
|
|
1526
|
-
if (!relationshipInfo.found) {
|
|
1527
|
-
const errorMessage = `Composite key locator '${locator.kt}' cannot be resolved on model '${model.name}' or through its relationships.`;
|
|
1528
|
-
logger$4.error(errorMessage, {
|
|
1529
|
-
key: comKey,
|
|
1530
|
-
kta
|
|
1531
|
-
});
|
|
1532
|
-
throw new Error(errorMessage);
|
|
1533
|
-
}
|
|
1534
|
-
if (relationshipInfo.path) {
|
|
1535
|
-
// This requires a relationship traversal
|
|
1536
|
-
where[relationshipInfo.path] = locator.lk;
|
|
1537
|
-
if (relationshipInfo.includes) {
|
|
1538
|
-
includes.push(...relationshipInfo.includes);
|
|
1539
|
-
}
|
|
1540
|
-
} else {
|
|
1541
|
-
// This is a direct field
|
|
1542
|
-
const fieldName = `${locator.kt}Id`;
|
|
1543
|
-
where[fieldName] = locator.lk;
|
|
1544
|
-
}
|
|
1545
|
-
}
|
|
1546
|
-
const result = {
|
|
1547
|
-
where
|
|
1548
|
-
};
|
|
1549
|
-
if (includes.length > 0) {
|
|
1550
|
-
result.include = includes;
|
|
1551
|
-
}
|
|
1552
|
-
return result;
|
|
1553
|
-
};
|
|
1554
|
-
const getRemoveOperation = (models, definition, // eslint-disable-next-line @typescript-eslint/no-unused-vars
|
|
1555
|
-
registry)=>{
|
|
1556
|
-
const { coordinate, options } = definition;
|
|
1557
|
-
const { kta } = coordinate;
|
|
1558
|
-
const remove = async (key)=>{
|
|
1559
|
-
if (!core.isValidItemKey(key)) {
|
|
1560
|
-
logger$4.error('Key for Remove is not a valid ItemKey: %j', key);
|
|
1561
|
-
throw new Error('Key for Remove is not a valid ItemKey');
|
|
1562
|
-
}
|
|
1563
|
-
logger$4.debug(`REMOVE operation called on ${models[0].name} with ${core.isPriKey(key) ? `primary key: pk=${key.pk}` : `composite key: pk=${key.pk}, loc=[${key.loc.map((l)=>`${l.kt}=${l.lk}`).join(', ')}]`}`);
|
|
1564
|
-
logger$4.default(`Remove configured for ${models[0].name} with ${core.isPriKey(key) ? 'primary' : 'composite'} key`);
|
|
1565
|
-
// @ts-ignore
|
|
1566
|
-
const model = models[0];
|
|
1567
|
-
let item;
|
|
1568
|
-
let returnItem;
|
|
1569
|
-
logger$4.debug('remove: %s', core.abbrevIK(key));
|
|
1570
|
-
if (core.isPriKey(key)) {
|
|
1571
|
-
logger$4.debug(`[REMOVE] Executing ${model.name}.findByPk() with pk: ${key.pk}`);
|
|
1572
|
-
item = await model.findByPk(key.pk);
|
|
1573
|
-
} else if (core.isComKey(key)) {
|
|
1574
|
-
// This is a composite key, so we need to build a where clause based on the composite key's locators
|
|
1575
|
-
const comKey = key;
|
|
1576
|
-
const queryOptions = processCompositeKey(comKey, model, kta);
|
|
1577
|
-
logger$4.default(`Remove composite key query for ${model.name} with where fields: ${queryOptions.where ? Object.keys(queryOptions.where).join(', ') : 'none'}`);
|
|
1578
|
-
logger$4.debug(`[REMOVE] Executing ${model.name}.findOne() with options: ${stringifyJSON(queryOptions)}`);
|
|
1579
|
-
item = await model.findOne(queryOptions);
|
|
1580
|
-
}
|
|
1581
|
-
if (!item) {
|
|
1582
|
-
throw new Error(`Item not found for removal with key: ${core.abbrevIK(key)}`);
|
|
1583
|
-
}
|
|
1584
|
-
const isDeletedAttribute = model.getAttributes().isDeleted;
|
|
1585
|
-
const deletedAtAttribute = model.getAttributes().deletedAt;
|
|
1586
|
-
if (isDeletedAttribute || deletedAtAttribute) {
|
|
1587
|
-
if (model.getAttributes().isDeleted) {
|
|
1588
|
-
item.isDeleted = true;
|
|
1589
|
-
}
|
|
1590
|
-
if (model.getAttributes().deletedAt) {
|
|
1591
|
-
item.deletedAt = new Date();
|
|
1592
|
-
}
|
|
1593
|
-
// Save the object
|
|
1594
|
-
logger$4.debug(`[REMOVE] Executing ${model.name}.save() for soft delete`);
|
|
1595
|
-
await (item === null || item === void 0 ? void 0 : item.save());
|
|
1596
|
-
returnItem = item === null || item === void 0 ? void 0 : item.get({
|
|
1597
|
-
plain: true
|
|
1598
|
-
});
|
|
1599
|
-
returnItem = addKey(item, returnItem, kta);
|
|
1600
|
-
returnItem = populateEvents(returnItem);
|
|
1601
|
-
} else if (options.deleteOnRemove) {
|
|
1602
|
-
logger$4.debug(`[REMOVE] Executing ${model.name}.destroy() for hard delete`);
|
|
1603
|
-
await (item === null || item === void 0 ? void 0 : item.destroy());
|
|
1604
|
-
returnItem = item === null || item === void 0 ? void 0 : item.get({
|
|
1605
|
-
plain: true
|
|
1606
|
-
});
|
|
1607
|
-
returnItem = addKey(item, returnItem, kta);
|
|
1608
|
-
returnItem = populateEvents(returnItem);
|
|
1609
|
-
} else {
|
|
1610
|
-
throw new Error('No deletedAt or isDeleted attribute found in model, and deleteOnRemove is not set');
|
|
1611
|
-
}
|
|
1612
|
-
logger$4.debug(`[REMOVE] Removed ${model.name} with key: ${returnItem.key ? JSON.stringify(returnItem.key) : `id=${item.id}`}`);
|
|
1613
|
-
return returnItem;
|
|
1614
|
-
};
|
|
1615
|
-
return remove;
|
|
1616
|
-
};
|
|
1617
|
-
|
|
1618
|
-
const logger$3 = logger$1.default.get('sequelize', 'ops', 'update');
|
|
1619
|
-
// Helper function to merge includes avoiding duplicates
|
|
1620
|
-
const mergeIncludes = (existingIncludes, newIncludes)=>{
|
|
1621
|
-
const mergedIncludes = [
|
|
1622
|
-
...existingIncludes
|
|
1623
|
-
];
|
|
1624
|
-
for (const newInclude of newIncludes){
|
|
1625
|
-
const existingIndex = mergedIncludes.findIndex((existing)=>existing.as === newInclude.as && existing.model === newInclude.model);
|
|
1626
|
-
if (existingIndex === -1) {
|
|
1627
|
-
mergedIncludes.push(newInclude);
|
|
1628
|
-
} else if (newInclude.include && mergedIncludes[existingIndex].include) {
|
|
1629
|
-
mergedIncludes[existingIndex].include = [
|
|
1630
|
-
...mergedIncludes[existingIndex].include,
|
|
1631
|
-
...newInclude.include
|
|
1632
|
-
];
|
|
1633
|
-
} else if (newInclude.include) {
|
|
1634
|
-
mergedIncludes[existingIndex].include = newInclude.include;
|
|
1635
|
-
}
|
|
1636
|
-
}
|
|
1637
|
-
return mergedIncludes;
|
|
1638
|
-
};
|
|
1639
|
-
const getUpdateOperation = (models, definition, registry)=>{
|
|
1640
|
-
const { options: { references, aggregations } } = definition;
|
|
1641
|
-
const update = async (key, item)=>{
|
|
1642
|
-
logger$3.debug(`UPDATE operation called on ${models[0].name} with ${core.isPriKey(key) ? `primary key: pk=${key.pk}` : `composite key: pk=${key.pk}, loc=[${key.loc.map((l)=>`${l.kt}=${l.lk}`).join(', ')}]`}`);
|
|
1643
|
-
const { coordinate } = definition;
|
|
1644
|
-
const { kta } = coordinate;
|
|
1645
|
-
logger$3.debug('update: %s, %j', core.abbrevIK(key), item);
|
|
1646
|
-
// Find the object we're updating
|
|
1647
|
-
// @ts-ignore
|
|
1648
|
-
const model = models[0];
|
|
1649
|
-
let response;
|
|
1650
|
-
if (core.isPriKey(key)) {
|
|
1651
|
-
// Find the model by using the PK
|
|
1652
|
-
const priKey = key;
|
|
1653
|
-
logger$3.trace(`[UPDATE] Executing ${model.name}.findByPk() with pk: ${priKey.pk}`);
|
|
1654
|
-
response = await model.findByPk(priKey.pk);
|
|
1655
|
-
} else if (core.isComKey(key)) {
|
|
1656
|
-
const comKey = key;
|
|
1657
|
-
// Build query options for composite key with multiple location keys
|
|
1658
|
-
const where = {
|
|
1659
|
-
id: comKey.pk
|
|
1660
|
-
};
|
|
1661
|
-
const additionalIncludes = [];
|
|
1662
|
-
// Process all location keys in the composite key
|
|
1663
|
-
for (const locator of comKey.loc){
|
|
1664
|
-
const relationshipInfo = relationshipUtils.buildRelationshipPath(model, locator.kt, kta, true);
|
|
1665
|
-
if (!relationshipInfo.found) {
|
|
1666
|
-
const errorMessage = `Composite key locator '${locator.kt}' cannot be resolved on model '${model.name}' or through its relationships.`;
|
|
1667
|
-
logger$3.error(errorMessage, {
|
|
1668
|
-
key: comKey,
|
|
1669
|
-
kta
|
|
1670
|
-
});
|
|
1671
|
-
throw new Error(errorMessage);
|
|
1672
|
-
}
|
|
1673
|
-
if (relationshipInfo.isDirect) {
|
|
1674
|
-
// Direct foreign key field
|
|
1675
|
-
const fieldName = `${locator.kt}Id`;
|
|
1676
|
-
where[fieldName] = locator.lk;
|
|
1677
|
-
} else if (relationshipInfo.path) {
|
|
1678
|
-
// Hierarchical relationship requiring traversal
|
|
1679
|
-
where[relationshipInfo.path] = {
|
|
1680
|
-
[sequelize.Op.eq]: locator.lk
|
|
1681
|
-
};
|
|
1682
|
-
// Add necessary includes for relationship traversal
|
|
1683
|
-
if (relationshipInfo.includes) {
|
|
1684
|
-
additionalIncludes.push(...relationshipInfo.includes);
|
|
1685
|
-
}
|
|
1686
|
-
}
|
|
1687
|
-
}
|
|
1688
|
-
// Build final query options
|
|
1689
|
-
const queryOptions = {
|
|
1690
|
-
where
|
|
1691
|
-
};
|
|
1692
|
-
if (additionalIncludes.length > 0) {
|
|
1693
|
-
queryOptions.include = mergeIncludes([], additionalIncludes);
|
|
1694
|
-
}
|
|
1695
|
-
logger$3.default(`Update composite key query for ${model.name} with where fields: ${queryOptions.where ? Object.keys(queryOptions.where).join(', ') : 'none'}`);
|
|
1696
|
-
logger$3.trace(`[UPDATE] Executing ${model.name}.findOne() with options: ${stringifyJSON(queryOptions)}`);
|
|
1697
|
-
response = await model.findOne(queryOptions);
|
|
1698
|
-
}
|
|
1699
|
-
if (response) {
|
|
1700
|
-
// Remove the key and events
|
|
1701
|
-
let updateProps = removeKey(item);
|
|
1702
|
-
// TODO: We need the opposite of processRow, something to step down from fjell to database.
|
|
1703
|
-
updateProps = extractEvents(updateProps);
|
|
1704
|
-
updateProps = removeEvents(updateProps);
|
|
1705
|
-
logger$3.default(`Update found ${model.name} record to modify`);
|
|
1706
|
-
logger$3.default(`Update properties configured: ${Object.keys(updateProps).join(', ')}`);
|
|
1707
|
-
// Update the object
|
|
1708
|
-
logger$3.trace(`[UPDATE] Executing ${model.name}.update() with properties: ${stringifyJSON(updateProps)}`);
|
|
1709
|
-
response = await response.update(updateProps);
|
|
1710
|
-
// Populate the key and events
|
|
1711
|
-
const processedItem = await processRow(response, kta, references, aggregations, registry);
|
|
1712
|
-
const returnItem = core.validateKeys(processedItem, kta);
|
|
1713
|
-
logger$3.debug(`[UPDATE] Updated ${model.name} with key: ${returnItem.key ? JSON.stringify(returnItem.key) : `id=${response.id}`}`);
|
|
1714
|
-
return returnItem;
|
|
1715
|
-
} else {
|
|
1716
|
-
throw new Library.NotFoundError('update', coordinate, key);
|
|
1717
|
-
}
|
|
1718
|
-
};
|
|
1719
|
-
return update;
|
|
1720
|
-
};
|
|
1721
|
-
|
|
1722
|
-
const createOperations = (models, coordinate, registry, options)=>{
|
|
1723
|
-
const operations = {};
|
|
1724
|
-
// Create a definition-like object for backward compatibility with existing operation functions
|
|
1725
|
-
const definition = {
|
|
1726
|
-
coordinate,
|
|
1727
|
-
options
|
|
1728
|
-
};
|
|
1729
|
-
operations.all = all.getAllOperation(models, definition, registry);
|
|
1730
|
-
operations.one = getOneOperation(models, definition, registry);
|
|
1731
|
-
operations.create = getCreateOperation(models, definition, registry);
|
|
1732
|
-
operations.update = getUpdateOperation(models, definition, registry);
|
|
1733
|
-
operations.get = getGetOperation(models, definition, registry);
|
|
1734
|
-
operations.remove = getRemoveOperation(models, definition);
|
|
1735
|
-
operations.find = getFindOperation(models, definition, registry);
|
|
1736
|
-
operations.upsert = async ()=>{
|
|
1737
|
-
throw new Error('Not implemented');
|
|
1738
|
-
};
|
|
1739
|
-
return operations;
|
|
1740
|
-
};
|
|
1741
|
-
|
|
1742
|
-
const logger$2 = logger$1.default.get("SequelizeLibrary");
|
|
1743
|
-
/**
|
|
1744
|
-
* Creates a new SequelizeLibrary that extends the fjell-lib Library
|
|
1745
|
-
* with Sequelize-specific functionality
|
|
1746
|
-
*/ const createSequelizeLibrary$2 = (registry, coordinate, models, options)=>{
|
|
1747
|
-
logger$2.debug("createSequelizeLibrary", {
|
|
1748
|
-
coordinate,
|
|
1749
|
-
models,
|
|
1750
|
-
registry,
|
|
1751
|
-
options
|
|
1752
|
-
});
|
|
1753
|
-
// Create Sequelize-specific operations
|
|
1754
|
-
const operations = createOperations(models, coordinate, registry, options);
|
|
1755
|
-
// Create the base fjell-lib library
|
|
1756
|
-
const libLibrary = Library__namespace.createLibrary(registry, coordinate, operations, options);
|
|
1757
|
-
return {
|
|
1758
|
-
...libLibrary,
|
|
1759
|
-
models
|
|
1760
|
-
};
|
|
1761
|
-
};
|
|
1762
|
-
/**
|
|
1763
|
-
* Type guard to check if an object is a SequelizeLibrary
|
|
1764
|
-
*/ const isSequelizeLibrary = (library)=>{
|
|
1765
|
-
return library != null && library.coordinate != null && library.operations != null && library.options != null && library.registry != null && library.models != null && Array.isArray(library.models);
|
|
1766
|
-
};
|
|
1767
|
-
|
|
1768
|
-
const logger$1 = logger$1.default.get("InstanceFactory");
|
|
1769
|
-
/**
|
|
1770
|
-
* Factory function for creating Sequelize libraries
|
|
1771
|
-
* This extends the fjell-lib pattern by adding Sequelize-specific models
|
|
1772
|
-
*/ const createSequelizeLibraryFactory = (models, options)=>{
|
|
1773
|
-
return (coordinate, context)=>{
|
|
1774
|
-
logger$1.debug("Creating Sequelize instance", {
|
|
1775
|
-
coordinate,
|
|
1776
|
-
registry: context.registry,
|
|
1777
|
-
models: models.map((m)=>m.name),
|
|
1778
|
-
options
|
|
1779
|
-
});
|
|
1780
|
-
return createSequelizeLibrary$2(context.registry, coordinate, models, options);
|
|
1781
|
-
};
|
|
1782
|
-
};
|
|
1783
|
-
|
|
1784
|
-
function createSequelizeLibrary$1(keyTypes, models, libOptions = {}, scopes = [], registry) {
|
|
1785
|
-
// Create coordinate and options separately following new pattern
|
|
1786
|
-
const coordinate = createCoordinate(keyTypes, scopes);
|
|
1787
|
-
const options = createOptions(libOptions);
|
|
1788
|
-
// Create operations with the new signature
|
|
1789
|
-
const operations = createOperations(models, coordinate, registry, options);
|
|
1790
|
-
// Wrap operations for contained pattern
|
|
1791
|
-
const wrappedOperations = Library.Contained.wrapOperations(operations, options, coordinate, registry);
|
|
1792
|
-
return {
|
|
1793
|
-
coordinate,
|
|
1794
|
-
registry,
|
|
1795
|
-
operations: wrappedOperations,
|
|
1796
|
-
options,
|
|
1797
|
-
models
|
|
1798
|
-
};
|
|
1799
|
-
}
|
|
1800
|
-
// Legacy exports for backwards compatibility
|
|
1801
|
-
const createInstance$1 = createSequelizeLibrary$1;
|
|
1802
|
-
|
|
1803
|
-
const index$1 = /*#__PURE__*/Object.freeze(/*#__PURE__*/Object.defineProperty({
|
|
1804
|
-
__proto__: null,
|
|
1805
|
-
createInstance: createInstance$1,
|
|
1806
|
-
createSequelizeLibrary: createSequelizeLibrary$1
|
|
1807
|
-
}, Symbol.toStringTag, { value: 'Module' }));
|
|
1808
|
-
|
|
1809
|
-
const logger = logger$1.default.get('lib-sequelize', 'primary', 'instance');
|
|
1810
|
-
function createSequelizeLibrary(keyType, models, libOptions = {}, scopes = [], registry) {
|
|
1811
|
-
logger.debug('createSequelizeLibrary', {
|
|
1812
|
-
keyType,
|
|
1813
|
-
models,
|
|
1814
|
-
libOptions,
|
|
1815
|
-
scopes
|
|
1816
|
-
});
|
|
1817
|
-
// Create coordinate and options separately following new pattern
|
|
1818
|
-
const coordinate = createCoordinate([
|
|
1819
|
-
keyType
|
|
1820
|
-
], scopes);
|
|
1821
|
-
const options = createOptions(libOptions);
|
|
1822
|
-
// Create operations with the new signature
|
|
1823
|
-
const operations = createOperations(models, coordinate, registry, options);
|
|
1824
|
-
// Wrap operations for primary pattern
|
|
1825
|
-
const wrappedOperations = Library.Primary.wrapOperations(operations, options, coordinate, registry);
|
|
1826
|
-
return {
|
|
1827
|
-
coordinate,
|
|
1828
|
-
registry,
|
|
1829
|
-
operations: wrappedOperations,
|
|
1830
|
-
options,
|
|
1831
|
-
models
|
|
1832
|
-
};
|
|
1833
|
-
}
|
|
1834
|
-
// Legacy exports for backwards compatibility
|
|
1835
|
-
const createInstance = createSequelizeLibrary;
|
|
1836
|
-
|
|
1837
|
-
const index = /*#__PURE__*/Object.freeze(/*#__PURE__*/Object.defineProperty({
|
|
1838
|
-
__proto__: null,
|
|
1839
|
-
createInstance,
|
|
1840
|
-
createSequelizeLibrary
|
|
1841
|
-
}, Symbol.toStringTag, { value: 'Module' }));
|
|
1842
|
-
|
|
1843
|
-
exports.Contained = index$1;
|
|
1844
|
-
exports.Primary = index;
|
|
1845
|
-
exports.SCOPE_SEQUELIZE = SCOPE_SEQUELIZE;
|
|
1846
|
-
exports.createCoordinate = createCoordinate;
|
|
1847
|
-
exports.createDefinition = createDefinition;
|
|
1848
|
-
exports.createOperations = createOperations;
|
|
1849
|
-
exports.createOptions = createOptions;
|
|
1850
|
-
exports.createSequelizeLibrary = createSequelizeLibrary$2;
|
|
1851
|
-
exports.createSequelizeLibraryFactory = createSequelizeLibraryFactory;
|
|
1852
|
-
exports.isSequelizeLibrary = isSequelizeLibrary;
|
|
1853
|
-
//# sourceMappingURL=index.cjs.map
|