@fjell/lib-sequelize 4.4.5 → 4.4.10
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/cjs/Coordinate.cjs +9 -22
- package/dist/cjs/Definition.cjs +5 -26
- package/dist/cjs/Instance.cjs +26 -10
- package/dist/cjs/InstanceFactory.cjs +25 -0
- package/dist/cjs/Operations.cjs +7 -2
- package/dist/cjs/Options.cjs +14 -7
- package/dist/cjs/QueryBuilder.cjs +31 -25
- package/dist/cjs/contained/Instance.cjs +15 -8
- package/dist/cjs/index.cjs +7 -4
- package/dist/cjs/ops/all.cjs +44 -20
- package/dist/cjs/ops/create.cjs +138 -40
- package/dist/cjs/ops/find.cjs +9 -7
- package/dist/cjs/ops/get.cjs +9 -5
- package/dist/cjs/ops/one.cjs +7 -6
- package/dist/cjs/ops/remove.cjs +10 -7
- package/dist/cjs/ops/update.cjs +10 -7
- package/dist/cjs/primary/Instance.cjs +16 -9
- package/dist/cjs/util/general.cjs +1 -5
- package/dist/es/Coordinate.js +9 -3
- package/dist/es/Definition.js +5 -7
- package/dist/es/Instance.js +26 -11
- package/dist/es/InstanceFactory.js +21 -0
- package/dist/es/Operations.js +7 -2
- package/dist/es/Options.js +14 -7
- package/dist/es/QueryBuilder.js +31 -25
- package/dist/es/contained/Instance.js +15 -8
- package/dist/es/index.js +4 -3
- package/dist/es/ops/all.js +44 -20
- package/dist/es/ops/create.js +139 -41
- package/dist/es/ops/find.js +9 -7
- package/dist/es/ops/get.js +9 -5
- package/dist/es/ops/one.js +7 -6
- package/dist/es/ops/remove.js +11 -8
- package/dist/es/ops/update.js +11 -8
- package/dist/es/primary/Instance.js +16 -9
- package/dist/es/util/general.js +2 -5
- package/dist/index.cjs +412 -216
- package/dist/index.cjs.map +1 -1
- package/dist/types/AggregationBuilder.d.ts +1 -1
- package/dist/types/Coordinate.d.ts +3 -2
- package/dist/types/Definition.d.ts +3 -3
- package/dist/types/Instance.d.ts +22 -2
- package/dist/types/InstanceFactory.d.ts +14 -0
- package/dist/types/Operations.d.ts +3 -2
- package/dist/types/Options.d.ts +1 -1
- package/dist/types/Registry.d.ts +6 -0
- package/dist/types/contained/Instance.d.ts +3 -3
- package/dist/types/index.d.ts +4 -1
- package/dist/types/primary/Instance.d.ts +2 -2
- package/package.json +23 -23
package/dist/cjs/ops/create.cjs
CHANGED
|
@@ -7,47 +7,114 @@ const logger$1 = require('../logger.cjs');
|
|
|
7
7
|
const RowProcessor = require('../RowProcessor.cjs');
|
|
8
8
|
const EventCoordinator = require('../EventCoordinator.cjs');
|
|
9
9
|
const relationshipUtils = require('../util/relationshipUtils.cjs');
|
|
10
|
+
const general = require('../util/general.cjs');
|
|
10
11
|
|
|
11
12
|
const logger = logger$1.default.get('sequelize', 'ops', 'create');
|
|
13
|
+
// Helper function to translate PostgreSQL errors to meaningful messages
|
|
14
|
+
function translateDatabaseError(error, itemData, modelName) {
|
|
15
|
+
var _error_original, _error_original1, _error_original2;
|
|
16
|
+
const originalMessage = error.message || '';
|
|
17
|
+
const errorCode = (_error_original = error.original) === null || _error_original === void 0 ? void 0 : _error_original.code;
|
|
18
|
+
const constraint = (_error_original1 = error.original) === null || _error_original1 === void 0 ? void 0 : _error_original1.constraint;
|
|
19
|
+
const detail = (_error_original2 = error.original) === null || _error_original2 === void 0 ? void 0 : _error_original2.detail;
|
|
20
|
+
logger.error('Database error during create operation', {
|
|
21
|
+
errorCode,
|
|
22
|
+
constraint,
|
|
23
|
+
detail,
|
|
24
|
+
originalMessage,
|
|
25
|
+
modelName,
|
|
26
|
+
itemData: JSON.stringify(itemData, null, 2)
|
|
27
|
+
});
|
|
28
|
+
// Handle specific PostgreSQL error codes
|
|
29
|
+
switch(errorCode){
|
|
30
|
+
case '23505':
|
|
31
|
+
if (constraint) {
|
|
32
|
+
return new Error(`Duplicate value violates unique constraint '${constraint}'. ${detail || ''}`);
|
|
33
|
+
}
|
|
34
|
+
return new Error(`Duplicate value detected. This record already exists. ${detail || ''}`);
|
|
35
|
+
case '23503':
|
|
36
|
+
if (constraint) {
|
|
37
|
+
return new Error(`Foreign key constraint '${constraint}' violated. Referenced record does not exist. ${detail || ''}`);
|
|
38
|
+
}
|
|
39
|
+
return new Error(`Referenced record does not exist. Check that all related records are valid. ${detail || ''}`);
|
|
40
|
+
case '23502':
|
|
41
|
+
var _error_original3;
|
|
42
|
+
const column = (_error_original3 = error.original) === null || _error_original3 === void 0 ? void 0 : _error_original3.column;
|
|
43
|
+
if (column) {
|
|
44
|
+
return new Error(`Required field '${column}' cannot be null`);
|
|
45
|
+
}
|
|
46
|
+
return new Error(`Required field is missing or null`);
|
|
47
|
+
case '23514':
|
|
48
|
+
if (constraint) {
|
|
49
|
+
return new Error(`Check constraint '${constraint}' violated. ${detail || ''}`);
|
|
50
|
+
}
|
|
51
|
+
return new Error(`Data validation failed. Check constraint violated. ${detail || ''}`);
|
|
52
|
+
case '22001':
|
|
53
|
+
return new Error(`Data too long for field. Check string lengths. ${detail || ''}`);
|
|
54
|
+
case '22003':
|
|
55
|
+
return new Error(`Numeric value out of range. Check number values. ${detail || ''}`);
|
|
56
|
+
case '42703':
|
|
57
|
+
var _error_original4;
|
|
58
|
+
const undefinedColumn = (_error_original4 = error.original) === null || _error_original4 === void 0 ? void 0 : _error_original4.column;
|
|
59
|
+
if (undefinedColumn) {
|
|
60
|
+
return new Error(`Column '${undefinedColumn}' does not exist in table '${modelName}'`);
|
|
61
|
+
}
|
|
62
|
+
return new Error(`Referenced column does not exist`);
|
|
63
|
+
case '42P01':
|
|
64
|
+
return new Error(`Table '${modelName}' does not exist`);
|
|
65
|
+
default:
|
|
66
|
+
// For unknown errors, provide the original message with context
|
|
67
|
+
return new Error(`Database error in ${modelName}.create(): ${originalMessage}. Item data: ${JSON.stringify(itemData, null, 2)}`);
|
|
68
|
+
}
|
|
69
|
+
}
|
|
12
70
|
// Helper function to validate hierarchical chain exists
|
|
13
71
|
async function validateHierarchicalChain(models, locKey, kta) {
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
72
|
+
try {
|
|
73
|
+
// Find the direct parent model that contains this locator
|
|
74
|
+
const locatorIndex = kta.indexOf(locKey.kt);
|
|
75
|
+
if (locatorIndex === -1) {
|
|
76
|
+
throw new Error(`Locator type '${locKey.kt}' not found in kta array`);
|
|
77
|
+
}
|
|
78
|
+
// Get the model for this locator
|
|
79
|
+
const locatorModel = models[locatorIndex] || models[0]; // Fallback to primary model
|
|
80
|
+
// Build a query to validate the chain exists
|
|
81
|
+
const chainResult = relationshipUtils.buildRelationshipChain(locatorModel, kta, locatorIndex, kta.length - 1);
|
|
82
|
+
if (!chainResult.success) {
|
|
83
|
+
// If we can't build a chain, just validate the record exists
|
|
84
|
+
const record = await locatorModel.findByPk(locKey.lk);
|
|
85
|
+
if (!record) {
|
|
86
|
+
throw new Error(`Referenced ${locKey.kt} with id ${locKey.lk} does not exist`);
|
|
87
|
+
}
|
|
88
|
+
return;
|
|
89
|
+
}
|
|
90
|
+
// Validate that the chain exists
|
|
91
|
+
const queryOptions = {
|
|
92
|
+
where: {
|
|
93
|
+
id: locKey.lk
|
|
94
|
+
}
|
|
95
|
+
};
|
|
96
|
+
if (chainResult.includes && chainResult.includes.length > 0) {
|
|
97
|
+
queryOptions.include = chainResult.includes;
|
|
98
|
+
}
|
|
99
|
+
const record = await locatorModel.findOne(queryOptions);
|
|
26
100
|
if (!record) {
|
|
27
|
-
throw new Error(`Referenced ${locKey.kt} with id ${locKey.lk} does not exist`);
|
|
101
|
+
throw new Error(`Referenced ${locKey.kt} with id ${locKey.lk} does not exist or chain is invalid`);
|
|
28
102
|
}
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
103
|
+
} catch (error) {
|
|
104
|
+
// Add context to validation errors
|
|
105
|
+
if (error.original) {
|
|
106
|
+
throw translateDatabaseError(error, {
|
|
107
|
+
locKey,
|
|
108
|
+
kta
|
|
109
|
+
}, locKey.kt);
|
|
35
110
|
}
|
|
36
|
-
|
|
37
|
-
if (chainResult.includes && chainResult.includes.length > 0) {
|
|
38
|
-
queryOptions.include = chainResult.includes;
|
|
39
|
-
}
|
|
40
|
-
const record = await locatorModel.findOne(queryOptions);
|
|
41
|
-
if (!record) {
|
|
42
|
-
throw new Error(`Referenced ${locKey.kt} with id ${locKey.lk} does not exist or chain is invalid`);
|
|
111
|
+
throw error;
|
|
43
112
|
}
|
|
44
113
|
}
|
|
45
114
|
const getCreateOperation = (models, definition, registry)=>{
|
|
46
115
|
const create = async (item, options)=>{
|
|
47
|
-
logger.
|
|
48
|
-
|
|
49
|
-
options
|
|
50
|
-
});
|
|
116
|
+
logger.debug(`CREATE operation called on ${models[0].name} with ${(options === null || options === void 0 ? void 0 : options.key) ? `key: pk=${options.key.pk}, loc=[${core.isComKey(options.key) ? options.key.loc.map((l)=>`${l.kt}=${l.lk}`).join(', ') : ''}]` : (options === null || options === void 0 ? void 0 : options.locations) ? `locations: ${options.locations.map((loc)=>`${loc.kt}=${loc.lk}`).join(', ')}` : 'no constraints'}`);
|
|
117
|
+
logger.default(`Create configured for ${models[0].name} with ${Object.keys(item).length} item fields`);
|
|
51
118
|
const { coordinate, options: { references, aggregations } } = definition;
|
|
52
119
|
const { kta } = coordinate;
|
|
53
120
|
// Get the primary model (first model in array)
|
|
@@ -60,11 +127,17 @@ const getCreateOperation = (models, definition, registry)=>{
|
|
|
60
127
|
// TODO: We need the opposite of processRow, something to step down from fjell to database.
|
|
61
128
|
itemData = EventCoordinator.extractEvents(itemData);
|
|
62
129
|
itemData = EventCoordinator.removeEvents(itemData);
|
|
130
|
+
// Validate that all item attributes exist on the model
|
|
131
|
+
const invalidAttributes = [];
|
|
63
132
|
for (const key of Object.keys(itemData)){
|
|
64
133
|
if (!modelAttributes[key]) {
|
|
65
|
-
|
|
134
|
+
invalidAttributes.push(key);
|
|
66
135
|
}
|
|
67
136
|
}
|
|
137
|
+
if (invalidAttributes.length > 0) {
|
|
138
|
+
const availableAttributes = Object.keys(modelAttributes).join(', ');
|
|
139
|
+
throw new Error(`Invalid attributes for model '${model.name}': [${invalidAttributes.join(', ')}]. ` + `Available attributes: [${availableAttributes}]. ` + `Item data: ${JSON.stringify(itemData, null, 2)}`);
|
|
140
|
+
}
|
|
68
141
|
// Handle key options
|
|
69
142
|
// If a key is supplied, assume its contents are to be assigned to the appropriate ids.
|
|
70
143
|
// For most cases this will be null as key generation is often through autoIncrement.
|
|
@@ -85,10 +158,12 @@ const getCreateOperation = (models, definition, registry)=>{
|
|
|
85
158
|
for (const locKey of comKey.loc){
|
|
86
159
|
const relationshipInfo = relationshipUtils.buildRelationshipPath(model, locKey.kt, kta, true);
|
|
87
160
|
if (!relationshipInfo.found) {
|
|
88
|
-
const
|
|
161
|
+
const associations = model.associations ? Object.keys(model.associations) : [];
|
|
162
|
+
const errorMessage = `Composite key locator '${locKey.kt}' cannot be resolved on model '${model.name}' or through its relationships. ` + `Available associations: [${associations.join(', ')}]. ` + `KTA: [${kta.join(', ')}]. ` + `Composite key: ${JSON.stringify(comKey, null, 2)}`;
|
|
89
163
|
logger.error(errorMessage, {
|
|
90
164
|
key: comKey,
|
|
91
|
-
kta
|
|
165
|
+
kta,
|
|
166
|
+
associations
|
|
92
167
|
});
|
|
93
168
|
throw new Error(errorMessage);
|
|
94
169
|
}
|
|
@@ -100,6 +175,13 @@ const getCreateOperation = (models, definition, registry)=>{
|
|
|
100
175
|
}
|
|
101
176
|
// Set direct foreign keys
|
|
102
177
|
for (const locKey of directLocations){
|
|
178
|
+
if (locKey.lk == null || locKey.lk === '') {
|
|
179
|
+
logger.error(`Composite key location '${locKey.kt}' has undefined/null lk value`, {
|
|
180
|
+
locKey,
|
|
181
|
+
key: comKey
|
|
182
|
+
});
|
|
183
|
+
throw new Error(`Composite key location '${locKey.kt}' has undefined/null lk value`);
|
|
184
|
+
}
|
|
103
185
|
const foreignKeyField = locKey.kt + 'Id';
|
|
104
186
|
itemData[foreignKeyField] = locKey.lk;
|
|
105
187
|
}
|
|
@@ -118,10 +200,12 @@ const getCreateOperation = (models, definition, registry)=>{
|
|
|
118
200
|
for (const locKey of options.locations){
|
|
119
201
|
const relationshipInfo = relationshipUtils.buildRelationshipPath(model, locKey.kt, kta, true);
|
|
120
202
|
if (!relationshipInfo.found) {
|
|
121
|
-
const
|
|
203
|
+
const associations = model.associations ? Object.keys(model.associations) : [];
|
|
204
|
+
const errorMessage = `Location key '${locKey.kt}' cannot be resolved on model '${model.name}' or through its relationships. ` + `Available associations: [${associations.join(', ')}]. ` + `KTA: [${kta.join(', ')}]. ` + `Locations: ${JSON.stringify(options.locations, null, 2)}`;
|
|
122
205
|
logger.error(errorMessage, {
|
|
123
206
|
locations: options.locations,
|
|
124
|
-
kta
|
|
207
|
+
kta,
|
|
208
|
+
associations
|
|
125
209
|
});
|
|
126
210
|
throw new Error(errorMessage);
|
|
127
211
|
}
|
|
@@ -133,6 +217,13 @@ const getCreateOperation = (models, definition, registry)=>{
|
|
|
133
217
|
}
|
|
134
218
|
// Set direct foreign keys
|
|
135
219
|
for (const locKey of directLocations){
|
|
220
|
+
if (locKey.lk == null || locKey.lk === '') {
|
|
221
|
+
logger.error(`Location option '${locKey.kt}' has undefined/null lk value`, {
|
|
222
|
+
locKey,
|
|
223
|
+
locations: options.locations
|
|
224
|
+
});
|
|
225
|
+
throw new Error(`Location option '${locKey.kt}' has undefined/null lk value`);
|
|
226
|
+
}
|
|
136
227
|
const foreignKeyField = locKey.kt + 'Id';
|
|
137
228
|
itemData[foreignKeyField] = locKey.lk;
|
|
138
229
|
}
|
|
@@ -142,13 +233,20 @@ const getCreateOperation = (models, definition, registry)=>{
|
|
|
142
233
|
}
|
|
143
234
|
}
|
|
144
235
|
// Create the record
|
|
145
|
-
|
|
146
|
-
|
|
147
|
-
|
|
148
|
-
|
|
236
|
+
try {
|
|
237
|
+
logger.trace(`[CREATE] Executing ${model.name}.create() with data: ${general.stringifyJSON(itemData)}`);
|
|
238
|
+
const createdRecord = await model.create(itemData);
|
|
239
|
+
// Add key and events
|
|
240
|
+
const processedRecord = await RowProcessor.processRow(createdRecord, kta, references, aggregations, registry);
|
|
241
|
+
const result = core.validateKeys(processedRecord, kta);
|
|
242
|
+
logger.debug(`[CREATE] Created ${model.name} with key: ${result.key ? JSON.stringify(result.key) : `id=${createdRecord.id}`}`);
|
|
243
|
+
return result;
|
|
244
|
+
} catch (error) {
|
|
245
|
+
throw translateDatabaseError(error, itemData, model.name);
|
|
246
|
+
}
|
|
149
247
|
};
|
|
150
248
|
return create;
|
|
151
249
|
};
|
|
152
250
|
|
|
153
251
|
exports.getCreateOperation = getCreateOperation;
|
|
154
|
-
//# sourceMappingURL=data:application/json;charset=utf-8;base64,
|
|
252
|
+
//# sourceMappingURL=data:application/json;charset=utf-8;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoiY3JlYXRlLmNqcyIsInNvdXJjZXMiOltdLCJzb3VyY2VzQ29udGVudCI6W10sIm5hbWVzIjpbXSwibWFwcGluZ3MiOiI7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7OzsifQ==
|
package/dist/cjs/ops/find.cjs
CHANGED
|
@@ -5,28 +5,30 @@ Object.defineProperty(exports, Symbol.toStringTag, { value: 'Module' });
|
|
|
5
5
|
const core = require('@fjell/core');
|
|
6
6
|
const logger$1 = require('../logger.cjs');
|
|
7
7
|
const RowProcessor = require('../RowProcessor.cjs');
|
|
8
|
+
const general = require('../util/general.cjs');
|
|
8
9
|
|
|
9
10
|
const logger = logger$1.default.get('sequelize', 'ops', 'find');
|
|
10
11
|
const getFindOperation = (models, definition, registry)=>{
|
|
11
12
|
const { options: { finders, references, aggregations } } = definition;
|
|
12
13
|
const find = async (finder, finderParams, locations)=>{
|
|
13
|
-
logger.
|
|
14
|
-
|
|
15
|
-
finderParams,
|
|
16
|
-
locations
|
|
17
|
-
});
|
|
14
|
+
logger.debug(`FIND operation called on ${models[0].name} with finder '${finder}' and ${(locations === null || locations === void 0 ? void 0 : locations.length) || 0} location filters: ${(locations === null || locations === void 0 ? void 0 : locations.map((loc)=>`${loc.kt}=${loc.lk}`).join(', ')) || 'none'}`);
|
|
15
|
+
logger.default(`Find configured for ${models[0].name} using finder '${finder}' with ${Object.keys(finderParams).length} params`);
|
|
18
16
|
// Note that we execute the createFinders function here because we want to make sure we're always getting the
|
|
19
17
|
// most up to date methods.
|
|
20
18
|
if (finders && finders[finder]) {
|
|
21
19
|
const finderMethod = finders[finder];
|
|
22
20
|
if (finderMethod) {
|
|
21
|
+
logger.trace(`[FIND] Executing finder '${finder}' on ${models[0].name} with params: ${general.stringifyJSON(finderParams)}, locations: ${general.stringifyJSON(locations)}`);
|
|
23
22
|
const results = await finderMethod(finderParams, locations);
|
|
24
23
|
if (results && results.length > 0) {
|
|
25
|
-
|
|
24
|
+
const processedResults = await Promise.all(results.map(async (row)=>{
|
|
26
25
|
const processedRow = await RowProcessor.processRow(row, definition.coordinate.kta, references, aggregations, registry);
|
|
27
26
|
return core.validateKeys(processedRow, definition.coordinate.kta);
|
|
28
27
|
}));
|
|
28
|
+
logger.debug(`[FIND] Found ${processedResults.length} ${models[0].name} records using finder '${finder}'`);
|
|
29
|
+
return processedResults;
|
|
29
30
|
} else {
|
|
31
|
+
logger.debug(`[FIND] Found 0 ${models[0].name} records using finder '${finder}'`);
|
|
30
32
|
return [];
|
|
31
33
|
}
|
|
32
34
|
} else {
|
|
@@ -42,4 +44,4 @@ const getFindOperation = (models, definition, registry)=>{
|
|
|
42
44
|
};
|
|
43
45
|
|
|
44
46
|
exports.getFindOperation = getFindOperation;
|
|
45
|
-
//# sourceMappingURL=data:application/json;charset=utf-8;base64,
|
|
47
|
+
//# sourceMappingURL=data:application/json;charset=utf-8;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoiZmluZC5janMiLCJzb3VyY2VzIjpbXSwic291cmNlc0NvbnRlbnQiOltdLCJuYW1lcyI6W10sIm1hcHBpbmdzIjoiOzs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7OzsifQ==
|
package/dist/cjs/ops/get.cjs
CHANGED
|
@@ -8,6 +8,7 @@ const RowProcessor = require('../RowProcessor.cjs');
|
|
|
8
8
|
const Library = require('@fjell/lib');
|
|
9
9
|
const relationshipUtils = require('../util/relationshipUtils.cjs');
|
|
10
10
|
const OperationContext = require('../OperationContext.cjs');
|
|
11
|
+
const general = require('../util/general.cjs');
|
|
11
12
|
|
|
12
13
|
const logger = logger$1.default.get('sequelize', 'ops', 'get');
|
|
13
14
|
// Helper function to process composite key and build query options
|
|
@@ -50,19 +51,19 @@ const getGetOperation = (models, definition, registry)=>{
|
|
|
50
51
|
const { coordinate, options: { references, aggregations } } = definition;
|
|
51
52
|
const { kta } = coordinate;
|
|
52
53
|
const get = async (key)=>{
|
|
53
|
-
logger.default('Get', {
|
|
54
|
-
key
|
|
55
|
-
});
|
|
56
54
|
if (!core.isValidItemKey(key)) {
|
|
57
55
|
logger.error('Key for Get is not a valid ItemKey: %j', key);
|
|
58
56
|
throw new Error('Key for Get is not a valid ItemKey');
|
|
59
57
|
}
|
|
58
|
+
logger.debug(`GET operation called on ${models[0].name} with ${core.isPriKey(key) ? `primary key: pk=${key.pk}` : `composite key: pk=${key.pk}, loc=[${key.loc.map((l)=>`${l.kt}=${l.lk}`).join(', ')}]`}`);
|
|
59
|
+
logger.default(`Get configured for ${models[0].name} with ${core.isPriKey(key) ? 'primary' : 'composite'} key`);
|
|
60
60
|
const itemKey = key;
|
|
61
61
|
// @ts-ignore
|
|
62
62
|
const model = models[0];
|
|
63
63
|
let item;
|
|
64
64
|
if (core.isPriKey(itemKey)) {
|
|
65
65
|
// This is the easy case because we can just find the item by its primary key
|
|
66
|
+
logger.trace(`[GET] Executing ${model.name}.findByPk() with pk: ${itemKey.pk}`);
|
|
66
67
|
item = await model.findByPk(itemKey.pk);
|
|
67
68
|
} else if (core.isComKey(itemKey)) {
|
|
68
69
|
// This is a composite key, so we need to build a where clause based on the composite key's locators
|
|
@@ -71,6 +72,7 @@ const getGetOperation = (models, definition, registry)=>{
|
|
|
71
72
|
logger.default('Composite key query', {
|
|
72
73
|
queryOptions
|
|
73
74
|
});
|
|
75
|
+
logger.trace(`[GET] Executing ${model.name}.findOne() with options: ${general.stringifyJSON(queryOptions)}`);
|
|
74
76
|
item = await model.findOne(queryOptions);
|
|
75
77
|
}
|
|
76
78
|
if (!item) {
|
|
@@ -78,11 +80,13 @@ const getGetOperation = (models, definition, registry)=>{
|
|
|
78
80
|
} else {
|
|
79
81
|
// Get the current context from context manager
|
|
80
82
|
const context = OperationContext.contextManager.getCurrentContext();
|
|
81
|
-
|
|
83
|
+
const result = core.validateKeys(await RowProcessor.processRow(item, kta, references, aggregations, registry, context), kta);
|
|
84
|
+
logger.debug(`[GET] Retrieved ${model.name} with key: ${result.key ? JSON.stringify(result.key) : `id=${item.id}`}`);
|
|
85
|
+
return result;
|
|
82
86
|
}
|
|
83
87
|
};
|
|
84
88
|
return get;
|
|
85
89
|
};
|
|
86
90
|
|
|
87
91
|
exports.getGetOperation = getGetOperation;
|
|
88
|
-
//# sourceMappingURL=data:application/json;charset=utf-8;base64,
|
|
92
|
+
//# sourceMappingURL=data:application/json;charset=utf-8;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoiZ2V0LmNqcyIsInNvdXJjZXMiOltdLCJzb3VyY2VzQ29udGVudCI6W10sIm5hbWVzIjpbXSwibWFwcGluZ3MiOiI7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7OyJ9
|
package/dist/cjs/ops/one.cjs
CHANGED
|
@@ -8,14 +8,15 @@ const all = require('./all.cjs');
|
|
|
8
8
|
const logger = logger$1.default.get('sequelize', 'ops', 'one');
|
|
9
9
|
const getOneOperation = (models, definition, registry)=>{
|
|
10
10
|
const one = async (itemQuery, locations = [])=>{
|
|
11
|
-
logger.
|
|
12
|
-
|
|
13
|
-
locations
|
|
14
|
-
});
|
|
11
|
+
logger.debug(`ONE operation called on ${models[0].name} with ${locations.length} location filters: ${locations.map((loc)=>`${loc.kt}=${loc.lk}`).join(', ') || 'none'}`);
|
|
12
|
+
logger.default(`One configured for ${models[0].name} delegating to all operation`);
|
|
15
13
|
const items = await all.getAllOperation(models, definition, registry)(itemQuery, locations);
|
|
16
14
|
if (items.length > 0) {
|
|
17
|
-
|
|
15
|
+
const result = items[0];
|
|
16
|
+
logger.debug(`[ONE] Found ${models[0].name} record with key: ${result.key ? JSON.stringify(result.key) : 'unknown'}`);
|
|
17
|
+
return result;
|
|
18
18
|
} else {
|
|
19
|
+
logger.debug(`[ONE] No ${models[0].name} record found`);
|
|
19
20
|
return null;
|
|
20
21
|
}
|
|
21
22
|
};
|
|
@@ -23,4 +24,4 @@ const getOneOperation = (models, definition, registry)=>{
|
|
|
23
24
|
};
|
|
24
25
|
|
|
25
26
|
exports.getOneOperation = getOneOperation;
|
|
26
|
-
//# sourceMappingURL=data:application/json;charset=utf-8;base64,
|
|
27
|
+
//# sourceMappingURL=data:application/json;charset=utf-8;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoib25lLmNqcyIsInNvdXJjZXMiOltdLCJzb3VyY2VzQ29udGVudCI6W10sIm5hbWVzIjpbXSwibWFwcGluZ3MiOiI7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7OzsifQ==
|
package/dist/cjs/ops/remove.cjs
CHANGED
|
@@ -7,6 +7,7 @@ const EventCoordinator = require('../EventCoordinator.cjs');
|
|
|
7
7
|
const KeyMaster = require('../KeyMaster.cjs');
|
|
8
8
|
const logger$1 = require('../logger.cjs');
|
|
9
9
|
const relationshipUtils = require('../util/relationshipUtils.cjs');
|
|
10
|
+
const general = require('../util/general.cjs');
|
|
10
11
|
|
|
11
12
|
const logger = logger$1.default.get('sequelize', 'ops', 'remove');
|
|
12
13
|
// Helper function to process composite key and build query options
|
|
@@ -50,27 +51,26 @@ registry)=>{
|
|
|
50
51
|
const { coordinate, options } = definition;
|
|
51
52
|
const { kta } = coordinate;
|
|
52
53
|
const remove = async (key)=>{
|
|
53
|
-
logger.default('Remove', {
|
|
54
|
-
key
|
|
55
|
-
});
|
|
56
54
|
if (!core.isValidItemKey(key)) {
|
|
57
55
|
logger.error('Key for Remove is not a valid ItemKey: %j', key);
|
|
58
56
|
throw new Error('Key for Remove is not a valid ItemKey');
|
|
59
57
|
}
|
|
58
|
+
logger.debug(`REMOVE operation called on ${models[0].name} with ${core.isPriKey(key) ? `primary key: pk=${key.pk}` : `composite key: pk=${key.pk}, loc=[${key.loc.map((l)=>`${l.kt}=${l.lk}`).join(', ')}]`}`);
|
|
59
|
+
logger.default(`Remove configured for ${models[0].name} with ${core.isPriKey(key) ? 'primary' : 'composite'} key`);
|
|
60
60
|
// @ts-ignore
|
|
61
61
|
const model = models[0];
|
|
62
62
|
let item;
|
|
63
63
|
let returnItem;
|
|
64
64
|
logger.debug('remove: %s', core.abbrevIK(key));
|
|
65
65
|
if (core.isPriKey(key)) {
|
|
66
|
+
logger.debug(`[REMOVE] Executing ${model.name}.findByPk() with pk: ${key.pk}`);
|
|
66
67
|
item = await model.findByPk(key.pk);
|
|
67
68
|
} else if (core.isComKey(key)) {
|
|
68
69
|
// This is a composite key, so we need to build a where clause based on the composite key's locators
|
|
69
70
|
const comKey = key;
|
|
70
71
|
const queryOptions = processCompositeKey(comKey, model, kta);
|
|
71
|
-
logger.default(
|
|
72
|
-
|
|
73
|
-
});
|
|
72
|
+
logger.default(`Remove composite key query for ${model.name} with where fields: ${queryOptions.where ? Object.keys(queryOptions.where).join(', ') : 'none'}`);
|
|
73
|
+
logger.debug(`[REMOVE] Executing ${model.name}.findOne() with options: ${general.stringifyJSON(queryOptions)}`);
|
|
74
74
|
item = await model.findOne(queryOptions);
|
|
75
75
|
}
|
|
76
76
|
if (!item) {
|
|
@@ -86,6 +86,7 @@ registry)=>{
|
|
|
86
86
|
item.deletedAt = new Date();
|
|
87
87
|
}
|
|
88
88
|
// Save the object
|
|
89
|
+
logger.debug(`[REMOVE] Executing ${model.name}.save() for soft delete`);
|
|
89
90
|
await (item === null || item === void 0 ? void 0 : item.save());
|
|
90
91
|
returnItem = item === null || item === void 0 ? void 0 : item.get({
|
|
91
92
|
plain: true
|
|
@@ -93,6 +94,7 @@ registry)=>{
|
|
|
93
94
|
returnItem = KeyMaster.addKey(item, returnItem, kta);
|
|
94
95
|
returnItem = EventCoordinator.populateEvents(returnItem);
|
|
95
96
|
} else if (options.deleteOnRemove) {
|
|
97
|
+
logger.debug(`[REMOVE] Executing ${model.name}.destroy() for hard delete`);
|
|
96
98
|
await (item === null || item === void 0 ? void 0 : item.destroy());
|
|
97
99
|
returnItem = item === null || item === void 0 ? void 0 : item.get({
|
|
98
100
|
plain: true
|
|
@@ -102,10 +104,11 @@ registry)=>{
|
|
|
102
104
|
} else {
|
|
103
105
|
throw new Error('No deletedAt or isDeleted attribute found in model, and deleteOnRemove is not set');
|
|
104
106
|
}
|
|
107
|
+
logger.debug(`[REMOVE] Removed ${model.name} with key: ${returnItem.key ? JSON.stringify(returnItem.key) : `id=${item.id}`}`);
|
|
105
108
|
return returnItem;
|
|
106
109
|
};
|
|
107
110
|
return remove;
|
|
108
111
|
};
|
|
109
112
|
|
|
110
113
|
exports.getRemoveOperation = getRemoveOperation;
|
|
111
|
-
//# sourceMappingURL=data:application/json;charset=utf-8;base64,
|
|
114
|
+
//# sourceMappingURL=data:application/json;charset=utf-8;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoicmVtb3ZlLmNqcyIsInNvdXJjZXMiOltdLCJzb3VyY2VzQ29udGVudCI6W10sIm5hbWVzIjpbXSwibWFwcGluZ3MiOiI7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7OzsifQ==
|
package/dist/cjs/ops/update.cjs
CHANGED
|
@@ -7,10 +7,10 @@ const EventCoordinator = require('../EventCoordinator.cjs');
|
|
|
7
7
|
const KeyMaster = require('../KeyMaster.cjs');
|
|
8
8
|
const logger$1 = require('../logger.cjs');
|
|
9
9
|
const RowProcessor = require('../RowProcessor.cjs');
|
|
10
|
-
const general = require('../util/general.cjs');
|
|
11
10
|
const Library = require('@fjell/lib');
|
|
12
11
|
const sequelize = require('sequelize');
|
|
13
12
|
const relationshipUtils = require('../util/relationshipUtils.cjs');
|
|
13
|
+
const general = require('../util/general.cjs');
|
|
14
14
|
|
|
15
15
|
const logger = logger$1.default.get('sequelize', 'ops', 'update');
|
|
16
16
|
// Helper function to merge includes avoiding duplicates
|
|
@@ -36,6 +36,7 @@ const mergeIncludes = (existingIncludes, newIncludes)=>{
|
|
|
36
36
|
const getUpdateOperation = (models, definition, registry)=>{
|
|
37
37
|
const { options: { references, aggregations } } = definition;
|
|
38
38
|
const update = async (key, item)=>{
|
|
39
|
+
logger.debug(`UPDATE operation called on ${models[0].name} with ${core.isPriKey(key) ? `primary key: pk=${key.pk}` : `composite key: pk=${key.pk}, loc=[${key.loc.map((l)=>`${l.kt}=${l.lk}`).join(', ')}]`}`);
|
|
39
40
|
const { coordinate } = definition;
|
|
40
41
|
const { kta } = coordinate;
|
|
41
42
|
logger.debug('update: %s, %j', core.abbrevIK(key), item);
|
|
@@ -46,6 +47,7 @@ const getUpdateOperation = (models, definition, registry)=>{
|
|
|
46
47
|
if (core.isPriKey(key)) {
|
|
47
48
|
// Find the model by using the PK
|
|
48
49
|
const priKey = key;
|
|
50
|
+
logger.trace(`[UPDATE] Executing ${model.name}.findByPk() with pk: ${priKey.pk}`);
|
|
49
51
|
response = await model.findByPk(priKey.pk);
|
|
50
52
|
} else if (core.isComKey(key)) {
|
|
51
53
|
const comKey = key;
|
|
@@ -87,9 +89,8 @@ const getUpdateOperation = (models, definition, registry)=>{
|
|
|
87
89
|
if (additionalIncludes.length > 0) {
|
|
88
90
|
queryOptions.include = mergeIncludes([], additionalIncludes);
|
|
89
91
|
}
|
|
90
|
-
logger.default(
|
|
91
|
-
|
|
92
|
-
});
|
|
92
|
+
logger.default(`Update composite key query for ${model.name} with where fields: ${queryOptions.where ? Object.keys(queryOptions.where).join(', ') : 'none'}`);
|
|
93
|
+
logger.trace(`[UPDATE] Executing ${model.name}.findOne() with options: ${general.stringifyJSON(queryOptions)}`);
|
|
93
94
|
response = await model.findOne(queryOptions);
|
|
94
95
|
}
|
|
95
96
|
if (response) {
|
|
@@ -98,13 +99,15 @@ const getUpdateOperation = (models, definition, registry)=>{
|
|
|
98
99
|
// TODO: We need the opposite of processRow, something to step down from fjell to database.
|
|
99
100
|
updateProps = EventCoordinator.extractEvents(updateProps);
|
|
100
101
|
updateProps = EventCoordinator.removeEvents(updateProps);
|
|
101
|
-
logger.default(
|
|
102
|
-
logger.default(
|
|
102
|
+
logger.default(`Update found ${model.name} record to modify`);
|
|
103
|
+
logger.default(`Update properties configured: ${Object.keys(updateProps).join(', ')}`);
|
|
103
104
|
// Update the object
|
|
105
|
+
logger.trace(`[UPDATE] Executing ${model.name}.update() with properties: ${general.stringifyJSON(updateProps)}`);
|
|
104
106
|
response = await response.update(updateProps);
|
|
105
107
|
// Populate the key and events
|
|
106
108
|
const processedItem = await RowProcessor.processRow(response, kta, references, aggregations, registry);
|
|
107
109
|
const returnItem = core.validateKeys(processedItem, kta);
|
|
110
|
+
logger.debug(`[UPDATE] Updated ${model.name} with key: ${returnItem.key ? JSON.stringify(returnItem.key) : `id=${response.id}`}`);
|
|
108
111
|
return returnItem;
|
|
109
112
|
} else {
|
|
110
113
|
throw new Library.NotFoundError('update', coordinate, key);
|
|
@@ -114,4 +117,4 @@ const getUpdateOperation = (models, definition, registry)=>{
|
|
|
114
117
|
};
|
|
115
118
|
|
|
116
119
|
exports.getUpdateOperation = getUpdateOperation;
|
|
117
|
-
//# sourceMappingURL=data:application/json;charset=utf-8;base64,
|
|
120
|
+
//# sourceMappingURL=data:application/json;charset=utf-8;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoidXBkYXRlLmNqcyIsInNvdXJjZXMiOltdLCJzb3VyY2VzQ29udGVudCI6W10sIm5hbWVzIjpbXSwibWFwcGluZ3MiOiI7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7OzsifQ==
|
|
@@ -3,8 +3,9 @@
|
|
|
3
3
|
Object.defineProperty(exports, Symbol.toStringTag, { value: 'Module' });
|
|
4
4
|
|
|
5
5
|
const Library = require('@fjell/lib');
|
|
6
|
-
const Definition = require('../Definition.cjs');
|
|
7
6
|
const Operations = require('../Operations.cjs');
|
|
7
|
+
const Options = require('../Options.cjs');
|
|
8
|
+
const Coordinate = require('../Coordinate.cjs');
|
|
8
9
|
const logger$1 = require('../logger.cjs');
|
|
9
10
|
|
|
10
11
|
const logger = logger$1.default.get('lib-sequelize', 'primary', 'instance');
|
|
@@ -15,17 +16,23 @@ function createInstance(keyType, models, libOptions = {}, scopes = [], registry)
|
|
|
15
16
|
libOptions,
|
|
16
17
|
scopes
|
|
17
18
|
});
|
|
18
|
-
|
|
19
|
+
// Create coordinate and options separately following new pattern
|
|
20
|
+
const coordinate = Coordinate.createCoordinate([
|
|
19
21
|
keyType
|
|
20
|
-
], scopes
|
|
21
|
-
const
|
|
22
|
+
], scopes);
|
|
23
|
+
const options = Options.createOptions(libOptions);
|
|
24
|
+
// Create operations with the new signature
|
|
25
|
+
const operations = Operations.createOperations(models, coordinate, registry, options);
|
|
26
|
+
// Wrap operations for primary pattern
|
|
27
|
+
const wrappedOperations = Library.Primary.wrapOperations(operations, options, coordinate, registry);
|
|
22
28
|
return {
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
29
|
+
coordinate,
|
|
30
|
+
registry,
|
|
31
|
+
operations: wrappedOperations,
|
|
32
|
+
options,
|
|
33
|
+
models
|
|
27
34
|
};
|
|
28
35
|
}
|
|
29
36
|
|
|
30
37
|
exports.createInstance = createInstance;
|
|
31
|
-
//# sourceMappingURL=data:application/json;charset=utf-8;base64,
|
|
38
|
+
//# sourceMappingURL=data:application/json;charset=utf-8;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoiSW5zdGFuY2UuY2pzIiwic291cmNlcyI6W10sInNvdXJjZXNDb250ZW50IjpbXSwibmFtZXMiOltdLCJtYXBwaW5ncyI6Ijs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7In0=
|
|
@@ -2,9 +2,6 @@
|
|
|
2
2
|
|
|
3
3
|
Object.defineProperty(exports, Symbol.toStringTag, { value: 'Module' });
|
|
4
4
|
|
|
5
|
-
/* eslint-disable @typescript-eslint/no-unused-vars */ /* eslint-disable no-undefined */ const clean = (obj)=>{
|
|
6
|
-
return Object.fromEntries(Object.entries(obj).filter(([_, v])=>v !== undefined));
|
|
7
|
-
};
|
|
8
5
|
//Recursive implementation of jSON.stringify;
|
|
9
6
|
const stringifyJSON = function(obj, visited = new Set()) {
|
|
10
7
|
const arrOfKeyVals = [];
|
|
@@ -47,6 +44,5 @@ const stringifyJSON = function(obj, visited = new Set()) {
|
|
|
47
44
|
return '';
|
|
48
45
|
};
|
|
49
46
|
|
|
50
|
-
exports.clean = clean;
|
|
51
47
|
exports.stringifyJSON = stringifyJSON;
|
|
52
|
-
//# sourceMappingURL=data:application/json;charset=utf-8;base64,
|
|
48
|
+
//# sourceMappingURL=data:application/json;charset=utf-8;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoiZ2VuZXJhbC5janMiLCJzb3VyY2VzIjpbXSwic291cmNlc0NvbnRlbnQiOltdLCJuYW1lcyI6W10sIm1hcHBpbmdzIjoiOzs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7In0=
|
package/dist/es/Coordinate.js
CHANGED
|
@@ -1,8 +1,14 @@
|
|
|
1
|
-
import
|
|
1
|
+
import { createCoordinate as createCoordinate$1 } from '@fjell/registry';
|
|
2
|
+
import LibLogger from './logger.js';
|
|
2
3
|
|
|
4
|
+
const logger = LibLogger.get('Coordinate');
|
|
3
5
|
const SCOPE_SEQUELIZE = 'sequelize';
|
|
4
6
|
const createCoordinate = (kta, scopes)=>{
|
|
5
|
-
|
|
7
|
+
logger.debug('createCoordinate', {
|
|
8
|
+
kta,
|
|
9
|
+
scopes
|
|
10
|
+
});
|
|
11
|
+
const coordinate = createCoordinate$1(kta, [
|
|
6
12
|
SCOPE_SEQUELIZE,
|
|
7
13
|
...scopes || []
|
|
8
14
|
]);
|
|
@@ -10,4 +16,4 @@ const createCoordinate = (kta, scopes)=>{
|
|
|
10
16
|
};
|
|
11
17
|
|
|
12
18
|
export { SCOPE_SEQUELIZE, createCoordinate };
|
|
13
|
-
//# sourceMappingURL=data:application/json;charset=utf-8;base64,
|
|
19
|
+
//# sourceMappingURL=data:application/json;charset=utf-8;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoiQ29vcmRpbmF0ZS5qcyIsInNvdXJjZXMiOltdLCJzb3VyY2VzQ29udGVudCI6W10sIm5hbWVzIjpbXSwibWFwcGluZ3MiOiI7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7In0=
|
package/dist/es/Definition.js
CHANGED
|
@@ -1,10 +1,9 @@
|
|
|
1
|
-
import * as Library from '@fjell/lib';
|
|
2
|
-
import { createCoordinate } from './Coordinate.js';
|
|
3
1
|
import { createOptions } from './Options.js';
|
|
4
2
|
import LibLogger from './logger.js';
|
|
3
|
+
import { createCoordinate } from './Coordinate.js';
|
|
5
4
|
|
|
6
5
|
const logger = LibLogger.get('lib-sequelize', 'Definition');
|
|
7
|
-
|
|
6
|
+
const createDefinition = (kta, scopes, libOptions)=>{
|
|
8
7
|
logger.debug('createDefinition', {
|
|
9
8
|
kta,
|
|
10
9
|
scopes,
|
|
@@ -12,12 +11,11 @@ function createDefinition(kta, scopes, libOptions) {
|
|
|
12
11
|
});
|
|
13
12
|
const coordinate = createCoordinate(kta, scopes);
|
|
14
13
|
const options = createOptions(libOptions);
|
|
15
|
-
const definition = Library.createDefinition(coordinate, options);
|
|
16
14
|
return {
|
|
17
|
-
|
|
15
|
+
coordinate,
|
|
18
16
|
options
|
|
19
17
|
};
|
|
20
|
-
}
|
|
18
|
+
};
|
|
21
19
|
|
|
22
20
|
export { createDefinition };
|
|
23
|
-
//# sourceMappingURL=data:application/json;charset=utf-8;base64,
|
|
21
|
+
//# sourceMappingURL=data:application/json;charset=utf-8;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoiRGVmaW5pdGlvbi5qcyIsInNvdXJjZXMiOltdLCJzb3VyY2VzQ29udGVudCI6W10sIm5hbWVzIjpbXSwibWFwcGluZ3MiOiI7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7OzsifQ==
|