@fjell/lib-sequelize 4.4.14 → 4.4.16
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/{types/Operations.d.ts → Operations.d.ts} +5 -5
- package/dist/{types/SequelizeLibraryFactory.d.ts → SequelizeLibraryFactory.d.ts} +4 -4
- package/dist/{types/contained → contained}/SequelizeLibrary.d.ts +4 -4
- package/dist/index.js +1488 -0
- package/dist/index.js.map +7 -0
- package/dist/logger.d.ts +2 -0
- package/dist/{types/ops → ops}/all.d.ts +3 -3
- package/dist/{types/ops → ops}/create.d.ts +3 -3
- package/dist/{types/ops → ops}/find.d.ts +3 -3
- package/dist/{types/ops → ops}/get.d.ts +1 -1
- package/dist/{types/ops → ops}/one.d.ts +3 -3
- package/dist/{types/ops → ops}/remove.d.ts +4 -3
- package/dist/{types/ops → ops}/update.d.ts +3 -3
- package/dist/{types/primary → primary}/SequelizeLibrary.d.ts +4 -4
- package/package.json +17 -20
- package/dist/cjs/AggregationBuilder.cjs +0 -65
- package/dist/cjs/Coordinate.cjs +0 -24
- package/dist/cjs/Definition.cjs +0 -25
- package/dist/cjs/EventCoordinator.cjs +0 -54
- package/dist/cjs/KeyMaster.cjs +0 -151
- package/dist/cjs/OperationContext.cjs +0 -161
- package/dist/cjs/Operations.cjs +0 -34
- package/dist/cjs/Options.cjs +0 -46
- package/dist/cjs/QueryBuilder.cjs +0 -296
- package/dist/cjs/ReferenceBuilder.cjs +0 -76
- package/dist/cjs/RowProcessor.cjs +0 -56
- package/dist/cjs/SequelizeLibrary.cjs +0 -56
- package/dist/cjs/SequelizeLibraryFactory.cjs +0 -25
- package/dist/cjs/contained/SequelizeLibrary.cjs +0 -31
- package/dist/cjs/contained/index.cjs +0 -11
- package/dist/cjs/index.cjs +0 -26
- package/dist/cjs/logger.cjs +0 -10
- package/dist/cjs/ops/all.cjs +0 -145
- package/dist/cjs/ops/create.cjs +0 -252
- package/dist/cjs/ops/find.cjs +0 -47
- package/dist/cjs/ops/get.cjs +0 -92
- package/dist/cjs/ops/one.cjs +0 -27
- package/dist/cjs/ops/remove.cjs +0 -114
- package/dist/cjs/ops/update.cjs +0 -120
- package/dist/cjs/primary/SequelizeLibrary.cjs +0 -41
- package/dist/cjs/primary/index.cjs +0 -11
- package/dist/cjs/util/general.cjs +0 -48
- package/dist/cjs/util/relationshipUtils.cjs +0 -117
- package/dist/es/AggregationBuilder.js +0 -61
- package/dist/es/Coordinate.js +0 -19
- package/dist/es/Definition.js +0 -21
- package/dist/es/EventCoordinator.js +0 -48
- package/dist/es/KeyMaster.js +0 -146
- package/dist/es/OperationContext.js +0 -155
- package/dist/es/Operations.js +0 -30
- package/dist/es/Options.js +0 -23
- package/dist/es/QueryBuilder.js +0 -290
- package/dist/es/ReferenceBuilder.js +0 -72
- package/dist/es/RowProcessor.js +0 -52
- package/dist/es/SequelizeLibrary.js +0 -32
- package/dist/es/SequelizeLibraryFactory.js +0 -21
- package/dist/es/contained/SequelizeLibrary.js +0 -26
- package/dist/es/contained/index.js +0 -2
- package/dist/es/index.js +0 -11
- package/dist/es/logger.js +0 -6
- package/dist/es/ops/all.js +0 -141
- package/dist/es/ops/create.js +0 -248
- package/dist/es/ops/find.js +0 -43
- package/dist/es/ops/get.js +0 -88
- package/dist/es/ops/one.js +0 -23
- package/dist/es/ops/remove.js +0 -110
- package/dist/es/ops/update.js +0 -116
- package/dist/es/primary/SequelizeLibrary.js +0 -36
- package/dist/es/primary/index.js +0 -2
- package/dist/es/util/general.js +0 -44
- package/dist/es/util/relationshipUtils.js +0 -112
- package/dist/index.cjs +0 -1853
- package/dist/index.cjs.map +0 -1
- package/dist/types/AggregationBuilder.d.ts +0 -5
- package/dist/types/EventCoordinator.d.ts +0 -6
- package/dist/types/KeyMaster.d.ts +0 -4
- package/dist/types/OperationContext.d.ts +0 -72
- package/dist/types/QueryBuilder.d.ts +0 -12
- package/dist/types/ReferenceBuilder.d.ts +0 -4
- package/dist/types/RowProcessor.d.ts +0 -6
- package/dist/types/logger.d.ts +0 -2
- package/dist/types/util/general.d.ts +0 -4
- package/dist/types/util/relationshipUtils.d.ts +0 -21
- package/dist/{types/Coordinate.d.ts → Coordinate.d.ts} +0 -0
- package/dist/{types/Definition.d.ts → Definition.d.ts} +0 -0
- package/dist/{types/Options.d.ts → Options.d.ts} +1 -1
- package/dist/{types/Registry.d.ts → Registry.d.ts} +0 -0
- package/dist/{types/SequelizeLibrary.d.ts → SequelizeLibrary.d.ts} +1 -1
- /package/dist/{types/contained → contained}/index.d.ts +0 -0
- /package/dist/{types/index.d.ts → index.d.ts} +0 -0
- /package/dist/{types/primary → primary}/index.d.ts +0 -0
|
@@ -1,72 +0,0 @@
|
|
|
1
|
-
import LibLogger from './logger.js';
|
|
2
|
-
|
|
3
|
-
const logger = LibLogger.get('sequelize', 'ReferenceBuilder');
|
|
4
|
-
const buildReference = async (item, referenceDefinition, registry, context)=>{
|
|
5
|
-
// Check if there is more than one key type
|
|
6
|
-
if (referenceDefinition.kta.length > 1) {
|
|
7
|
-
throw new Error("The ReferenceBuilder doesn't work with more than one key type yet");
|
|
8
|
-
}
|
|
9
|
-
// Check if dependencies exist
|
|
10
|
-
if (!registry) {
|
|
11
|
-
throw new Error("This model definition has a reference definition, but the registry is not present");
|
|
12
|
-
}
|
|
13
|
-
// Find the Library.Instance for the key type
|
|
14
|
-
const library = registry.get(referenceDefinition.kta);
|
|
15
|
-
if (!library) {
|
|
16
|
-
throw new Error("This model definition has a reference definition, but the dependency is not present");
|
|
17
|
-
}
|
|
18
|
-
// Check if the column value is null - if so, skip the reference
|
|
19
|
-
const columnValue = item[referenceDefinition.column];
|
|
20
|
-
if (columnValue == null) {
|
|
21
|
-
item[referenceDefinition.property] = null;
|
|
22
|
-
return item;
|
|
23
|
-
}
|
|
24
|
-
// Create a PriKey using the column value from item
|
|
25
|
-
const priKey = {
|
|
26
|
-
kt: referenceDefinition.kta[0],
|
|
27
|
-
pk: columnValue
|
|
28
|
-
};
|
|
29
|
-
let referencedItem;
|
|
30
|
-
if (context) {
|
|
31
|
-
// Check if we already have this item cached
|
|
32
|
-
if (context.isCached(priKey)) {
|
|
33
|
-
logger.default('Using cached reference', {
|
|
34
|
-
priKey,
|
|
35
|
-
property: referenceDefinition.property
|
|
36
|
-
});
|
|
37
|
-
referencedItem = context.getCached(priKey);
|
|
38
|
-
} else if (context.isInProgress(priKey)) {
|
|
39
|
-
logger.default('Circular dependency detected, creating reference placeholder', {
|
|
40
|
-
priKey,
|
|
41
|
-
property: referenceDefinition.property
|
|
42
|
-
});
|
|
43
|
-
// Create a minimal reference object with just the key to break the cycle
|
|
44
|
-
referencedItem = {
|
|
45
|
-
key: priKey
|
|
46
|
-
};
|
|
47
|
-
} else {
|
|
48
|
-
// Mark this key as in progress before loading
|
|
49
|
-
context.markInProgress(priKey);
|
|
50
|
-
try {
|
|
51
|
-
// Get the referenced item using the Library.Operations get method (context now managed internally)
|
|
52
|
-
referencedItem = await library.operations.get(priKey);
|
|
53
|
-
// Cache the result
|
|
54
|
-
context.setCached(priKey, referencedItem);
|
|
55
|
-
} finally{
|
|
56
|
-
// Always mark as complete, even if there was an error
|
|
57
|
-
context.markComplete(priKey);
|
|
58
|
-
}
|
|
59
|
-
}
|
|
60
|
-
} else {
|
|
61
|
-
// Fallback to original behavior if no context provided
|
|
62
|
-
referencedItem = await library.operations.get(priKey);
|
|
63
|
-
}
|
|
64
|
-
// TODO: In a Fjell-compliant implementation, this value should be stored in the ref property
|
|
65
|
-
// For now, we'll just populate the property directly
|
|
66
|
-
// Store the result in the property on item
|
|
67
|
-
item[referenceDefinition.property] = referencedItem;
|
|
68
|
-
return item;
|
|
69
|
-
};
|
|
70
|
-
|
|
71
|
-
export { buildReference };
|
|
72
|
-
//# sourceMappingURL=data:application/json;charset=utf-8;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoiUmVmZXJlbmNlQnVpbGRlci5qcyIsInNvdXJjZXMiOltdLCJzb3VyY2VzQ29udGVudCI6W10sIm5hbWVzIjpbXSwibWFwcGluZ3MiOiI7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7OzsifQ==
|
package/dist/es/RowProcessor.js
DELETED
|
@@ -1,52 +0,0 @@
|
|
|
1
|
-
import LibLogger from './logger.js';
|
|
2
|
-
import { addKey } from './KeyMaster.js';
|
|
3
|
-
import { buildReference } from './ReferenceBuilder.js';
|
|
4
|
-
import { buildAggregation } from './AggregationBuilder.js';
|
|
5
|
-
import { stringifyJSON } from './util/general.js';
|
|
6
|
-
import { populateEvents } from './EventCoordinator.js';
|
|
7
|
-
import { contextManager, createOperationContext } from './OperationContext.js';
|
|
8
|
-
|
|
9
|
-
const logger = LibLogger.get('sequelize', 'RowProcessor');
|
|
10
|
-
const processRow = async (row, keyTypes, referenceDefinitions, aggregationDefinitions, registry, context)=>{
|
|
11
|
-
logger.default('Processing Row', {
|
|
12
|
-
row
|
|
13
|
-
});
|
|
14
|
-
// Use provided context or create new one
|
|
15
|
-
const operationContext = context || createOperationContext();
|
|
16
|
-
// Process the row within the context to ensure all operations share the same context
|
|
17
|
-
return contextManager.withContext(operationContext, async ()=>{
|
|
18
|
-
let item = row.get({
|
|
19
|
-
plain: true
|
|
20
|
-
});
|
|
21
|
-
logger.default('Adding Key to Item with Key Types: %s', stringifyJSON(keyTypes));
|
|
22
|
-
item = addKey(row, item, keyTypes);
|
|
23
|
-
item = populateEvents(item);
|
|
24
|
-
logger.default('Key Added to Item: %s', stringifyJSON(item.key));
|
|
25
|
-
// Mark this item as in progress to detect circular references
|
|
26
|
-
operationContext.markInProgress(item.key);
|
|
27
|
-
try {
|
|
28
|
-
if (referenceDefinitions && referenceDefinitions.length > 0) {
|
|
29
|
-
for (const referenceDefinition of referenceDefinitions){
|
|
30
|
-
logger.default('Processing Reference for %s to %s', item.key.kt, stringifyJSON(referenceDefinition.kta));
|
|
31
|
-
item = await buildReference(item, referenceDefinition, registry, operationContext);
|
|
32
|
-
}
|
|
33
|
-
}
|
|
34
|
-
if (aggregationDefinitions && aggregationDefinitions.length > 0) {
|
|
35
|
-
for (const aggregationDefinition of aggregationDefinitions){
|
|
36
|
-
logger.default('Processing Aggregation for %s from %s', item.key.kt, stringifyJSON(aggregationDefinition.kta));
|
|
37
|
-
item = await buildAggregation(item, aggregationDefinition, registry, operationContext);
|
|
38
|
-
}
|
|
39
|
-
}
|
|
40
|
-
// Cache the fully processed item
|
|
41
|
-
operationContext.setCached(item.key, item);
|
|
42
|
-
} finally{
|
|
43
|
-
// Mark this item as complete
|
|
44
|
-
operationContext.markComplete(item.key);
|
|
45
|
-
}
|
|
46
|
-
logger.default('Processed Row: %j', stringifyJSON(item));
|
|
47
|
-
return item;
|
|
48
|
-
});
|
|
49
|
-
};
|
|
50
|
-
|
|
51
|
-
export { processRow };
|
|
52
|
-
//# sourceMappingURL=data:application/json;charset=utf-8;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoiUm93UHJvY2Vzc29yLmpzIiwic291cmNlcyI6W10sInNvdXJjZXNDb250ZW50IjpbXSwibmFtZXMiOltdLCJtYXBwaW5ncyI6Ijs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7OzsifQ==
|
|
@@ -1,32 +0,0 @@
|
|
|
1
|
-
import * as Library from '@fjell/lib';
|
|
2
|
-
import { createOperations } from './Operations.js';
|
|
3
|
-
import LibLogger from './logger.js';
|
|
4
|
-
|
|
5
|
-
const logger = LibLogger.get("SequelizeLibrary");
|
|
6
|
-
/**
|
|
7
|
-
* Creates a new SequelizeLibrary that extends the fjell-lib Library
|
|
8
|
-
* with Sequelize-specific functionality
|
|
9
|
-
*/ const createSequelizeLibrary = (registry, coordinate, models, options)=>{
|
|
10
|
-
logger.debug("createSequelizeLibrary", {
|
|
11
|
-
coordinate,
|
|
12
|
-
models,
|
|
13
|
-
registry,
|
|
14
|
-
options
|
|
15
|
-
});
|
|
16
|
-
// Create Sequelize-specific operations
|
|
17
|
-
const operations = createOperations(models, coordinate, registry, options);
|
|
18
|
-
// Create the base fjell-lib library
|
|
19
|
-
const libLibrary = Library.createLibrary(registry, coordinate, operations, options);
|
|
20
|
-
return {
|
|
21
|
-
...libLibrary,
|
|
22
|
-
models
|
|
23
|
-
};
|
|
24
|
-
};
|
|
25
|
-
/**
|
|
26
|
-
* Type guard to check if an object is a SequelizeLibrary
|
|
27
|
-
*/ const isSequelizeLibrary = (library)=>{
|
|
28
|
-
return library != null && library.coordinate != null && library.operations != null && library.options != null && library.registry != null && library.models != null && Array.isArray(library.models);
|
|
29
|
-
};
|
|
30
|
-
|
|
31
|
-
export { createSequelizeLibrary, isSequelizeLibrary };
|
|
32
|
-
//# sourceMappingURL=data:application/json;charset=utf-8;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoiU2VxdWVsaXplTGlicmFyeS5qcyIsInNvdXJjZXMiOltdLCJzb3VyY2VzQ29udGVudCI6W10sIm5hbWVzIjpbXSwibWFwcGluZ3MiOiI7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7OyJ9
|
|
@@ -1,21 +0,0 @@
|
|
|
1
|
-
import { createSequelizeLibrary } from './SequelizeLibrary.js';
|
|
2
|
-
import LibLogger from './logger.js';
|
|
3
|
-
|
|
4
|
-
const logger = LibLogger.get("InstanceFactory");
|
|
5
|
-
/**
|
|
6
|
-
* Factory function for creating Sequelize libraries
|
|
7
|
-
* This extends the fjell-lib pattern by adding Sequelize-specific models
|
|
8
|
-
*/ const createSequelizeLibraryFactory = (models, options)=>{
|
|
9
|
-
return (coordinate, context)=>{
|
|
10
|
-
logger.debug("Creating Sequelize instance", {
|
|
11
|
-
coordinate,
|
|
12
|
-
registry: context.registry,
|
|
13
|
-
models: models.map((m)=>m.name),
|
|
14
|
-
options
|
|
15
|
-
});
|
|
16
|
-
return createSequelizeLibrary(context.registry, coordinate, models, options);
|
|
17
|
-
};
|
|
18
|
-
};
|
|
19
|
-
|
|
20
|
-
export { createSequelizeLibraryFactory };
|
|
21
|
-
//# sourceMappingURL=data:application/json;charset=utf-8;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoiU2VxdWVsaXplTGlicmFyeUZhY3RvcnkuanMiLCJzb3VyY2VzIjpbXSwic291cmNlc0NvbnRlbnQiOltdLCJuYW1lcyI6W10sIm1hcHBpbmdzIjoiOzs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7In0=
|
|
@@ -1,26 +0,0 @@
|
|
|
1
|
-
import { Contained } from '@fjell/lib';
|
|
2
|
-
import { createOperations } from '../Operations.js';
|
|
3
|
-
import { createOptions } from '../Options.js';
|
|
4
|
-
import { createCoordinate } from '../Coordinate.js';
|
|
5
|
-
|
|
6
|
-
function createSequelizeLibrary(keyTypes, models, libOptions = {}, scopes = [], registry) {
|
|
7
|
-
// Create coordinate and options separately following new pattern
|
|
8
|
-
const coordinate = createCoordinate(keyTypes, scopes);
|
|
9
|
-
const options = createOptions(libOptions);
|
|
10
|
-
// Create operations with the new signature
|
|
11
|
-
const operations = createOperations(models, coordinate, registry, options);
|
|
12
|
-
// Wrap operations for contained pattern
|
|
13
|
-
const wrappedOperations = Contained.wrapOperations(operations, options, coordinate, registry);
|
|
14
|
-
return {
|
|
15
|
-
coordinate,
|
|
16
|
-
registry,
|
|
17
|
-
operations: wrappedOperations,
|
|
18
|
-
options,
|
|
19
|
-
models
|
|
20
|
-
};
|
|
21
|
-
}
|
|
22
|
-
// Legacy exports for backwards compatibility
|
|
23
|
-
const createInstance = createSequelizeLibrary;
|
|
24
|
-
|
|
25
|
-
export { createInstance, createSequelizeLibrary };
|
|
26
|
-
//# sourceMappingURL=data:application/json;charset=utf-8;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoiU2VxdWVsaXplTGlicmFyeS5qcyIsInNvdXJjZXMiOltdLCJzb3VyY2VzQ29udGVudCI6W10sIm5hbWVzIjpbXSwibWFwcGluZ3MiOiI7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7OyJ9
|
package/dist/es/index.js
DELETED
|
@@ -1,11 +0,0 @@
|
|
|
1
|
-
export { createDefinition } from './Definition.js';
|
|
2
|
-
export { createSequelizeLibrary, isSequelizeLibrary } from './SequelizeLibrary.js';
|
|
3
|
-
export { createSequelizeLibraryFactory } from './SequelizeLibraryFactory.js';
|
|
4
|
-
export { createOptions } from './Options.js';
|
|
5
|
-
export { createOperations } from './Operations.js';
|
|
6
|
-
import * as index from './contained/index.js';
|
|
7
|
-
export { index as Contained };
|
|
8
|
-
import * as index$1 from './primary/index.js';
|
|
9
|
-
export { index$1 as Primary };
|
|
10
|
-
export { SCOPE_SEQUELIZE, createCoordinate } from './Coordinate.js';
|
|
11
|
-
//# sourceMappingURL=data:application/json;charset=utf-8;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoiaW5kZXguanMiLCJzb3VyY2VzIjpbXSwic291cmNlc0NvbnRlbnQiOltdLCJuYW1lcyI6W10sIm1hcHBpbmdzIjoiOzs7Ozs7Ozs7In0=
|
package/dist/es/logger.js
DELETED
|
@@ -1,6 +0,0 @@
|
|
|
1
|
-
import Logging from '@fjell/logging';
|
|
2
|
-
|
|
3
|
-
const LibLogger = Logging.getLogger('@fjell/lib-sequelize');
|
|
4
|
-
|
|
5
|
-
export { LibLogger as default };
|
|
6
|
-
//# sourceMappingURL=data:application/json;charset=utf-8;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoibG9nZ2VyLmpzIiwic291cmNlcyI6W10sInNvdXJjZXNDb250ZW50IjpbXSwibmFtZXMiOltdLCJtYXBwaW5ncyI6Ijs7Ozs7OyJ9
|
package/dist/es/ops/all.js
DELETED
|
@@ -1,141 +0,0 @@
|
|
|
1
|
-
import { validateKeys } from '@fjell/core';
|
|
2
|
-
import { buildQuery } from '../QueryBuilder.js';
|
|
3
|
-
import LibLogger from '../logger.js';
|
|
4
|
-
import { processRow } from '../RowProcessor.js';
|
|
5
|
-
import { Op } from 'sequelize';
|
|
6
|
-
import { buildRelationshipPath } from '../util/relationshipUtils.js';
|
|
7
|
-
import { contextManager } from '../OperationContext.js';
|
|
8
|
-
import { stringifyJSON } from '../util/general.js';
|
|
9
|
-
|
|
10
|
-
const logger = LibLogger.get('sequelize', 'ops', 'all');
|
|
11
|
-
// Helper function to merge includes avoiding duplicates
|
|
12
|
-
const mergeIncludes = (existingIncludes, newIncludes)=>{
|
|
13
|
-
const mergedIncludes = [
|
|
14
|
-
...existingIncludes
|
|
15
|
-
];
|
|
16
|
-
for (const newInclude of newIncludes){
|
|
17
|
-
const existingIndex = mergedIncludes.findIndex((existing)=>existing.as === newInclude.as && existing.model === newInclude.model);
|
|
18
|
-
if (existingIndex === -1) {
|
|
19
|
-
mergedIncludes.push(newInclude);
|
|
20
|
-
} else if (newInclude.include && mergedIncludes[existingIndex].include) {
|
|
21
|
-
mergedIncludes[existingIndex].include = [
|
|
22
|
-
...mergedIncludes[existingIndex].include,
|
|
23
|
-
...newInclude.include
|
|
24
|
-
];
|
|
25
|
-
} else if (newInclude.include) {
|
|
26
|
-
mergedIncludes[existingIndex].include = newInclude.include;
|
|
27
|
-
}
|
|
28
|
-
}
|
|
29
|
-
return mergedIncludes;
|
|
30
|
-
};
|
|
31
|
-
const getAllOperation = (models, definition, registry)=>{
|
|
32
|
-
const { coordinate, options: { references, aggregations } } = definition;
|
|
33
|
-
//#region Query
|
|
34
|
-
const all = async (itemQuery, locations)=>{
|
|
35
|
-
var _options_include;
|
|
36
|
-
logger.debug(`ALL operation called on ${models[0].name} with ${(locations === null || locations === void 0 ? void 0 : locations.length) || 0} location filters: ${(locations === null || locations === void 0 ? void 0 : locations.map((loc)=>`${loc.kt}=${loc.lk}`).join(', ')) || 'none'}`);
|
|
37
|
-
const loc = locations || [];
|
|
38
|
-
// @ts-ignore
|
|
39
|
-
const model = models[0];
|
|
40
|
-
// Build base query from itemQuery
|
|
41
|
-
const options = buildQuery(itemQuery, model);
|
|
42
|
-
// Handle location keys if present
|
|
43
|
-
if (loc.length > 0) {
|
|
44
|
-
const { kta } = coordinate;
|
|
45
|
-
const directLocations = [];
|
|
46
|
-
const hierarchicalLocations = [];
|
|
47
|
-
const additionalIncludes = [];
|
|
48
|
-
// Categorize location keys as direct or hierarchical
|
|
49
|
-
for (const locKey of loc){
|
|
50
|
-
const relationshipInfo = buildRelationshipPath(model, locKey.kt, kta, true);
|
|
51
|
-
if (!relationshipInfo.found) {
|
|
52
|
-
const errorMessage = `Location key '${locKey.kt}' cannot be resolved on model '${model.name}' or through its relationships.`;
|
|
53
|
-
logger.error(errorMessage, {
|
|
54
|
-
locations: loc,
|
|
55
|
-
kta
|
|
56
|
-
});
|
|
57
|
-
throw new Error(errorMessage);
|
|
58
|
-
}
|
|
59
|
-
if (relationshipInfo.isDirect) {
|
|
60
|
-
directLocations.push(locKey);
|
|
61
|
-
} else {
|
|
62
|
-
hierarchicalLocations.push(locKey);
|
|
63
|
-
}
|
|
64
|
-
}
|
|
65
|
-
// Handle direct location keys (simple foreign key constraints)
|
|
66
|
-
for (const locKey of directLocations){
|
|
67
|
-
if (locKey.lk === undefined || locKey.lk == null || locKey.lk === '' || typeof locKey.lk === 'object' && Object.keys(locKey.lk).length === 0) {
|
|
68
|
-
logger.error(`Location key '${locKey.kt}' has invalid lk value: ${stringifyJSON(locKey.lk)}`, {
|
|
69
|
-
locKey,
|
|
70
|
-
locations: loc
|
|
71
|
-
});
|
|
72
|
-
throw new Error(`Location key '${locKey.kt}' has invalid lk value: ${stringifyJSON(locKey.lk)}`);
|
|
73
|
-
}
|
|
74
|
-
const foreignKeyField = locKey.kt + 'Id';
|
|
75
|
-
// Check if this field already has a condition from the itemQuery
|
|
76
|
-
if (options.where[foreignKeyField]) {
|
|
77
|
-
logger.debug(`[ALL] Field ${foreignKeyField} already constrained by itemQuery, skipping location constraint to avoid conflicts`);
|
|
78
|
-
continue; // Skip this location constraint to avoid conflicts
|
|
79
|
-
}
|
|
80
|
-
logger.trace(`[ALL] Setting direct location where clause: ${foreignKeyField} = ${stringifyJSON(locKey.lk)} (type: ${typeof locKey.lk})`);
|
|
81
|
-
options.where[foreignKeyField] = {
|
|
82
|
-
[Op.eq]: locKey.lk
|
|
83
|
-
};
|
|
84
|
-
}
|
|
85
|
-
// Handle hierarchical location keys (requires relationship traversal)
|
|
86
|
-
for (const locKey of hierarchicalLocations){
|
|
87
|
-
if (locKey.lk === undefined || locKey.lk == null || locKey.lk === '' || typeof locKey.lk === 'object' && Object.keys(locKey.lk).length === 0) {
|
|
88
|
-
logger.error(`Hierarchical location key '${locKey.kt}' has invalid lk value: ${stringifyJSON(locKey.lk)}`, {
|
|
89
|
-
locKey,
|
|
90
|
-
locations: loc
|
|
91
|
-
});
|
|
92
|
-
throw new Error(`Hierarchical location key '${locKey.kt}' has invalid lk value: ${stringifyJSON(locKey.lk)}`);
|
|
93
|
-
}
|
|
94
|
-
const relationshipInfo = buildRelationshipPath(model, locKey.kt, kta);
|
|
95
|
-
if (relationshipInfo.found && relationshipInfo.path) {
|
|
96
|
-
// Check if this field already has a condition from the itemQuery
|
|
97
|
-
if (options.where[relationshipInfo.path]) {
|
|
98
|
-
logger.debug(`[ALL] Field ${relationshipInfo.path} already constrained by itemQuery, skipping hierarchical location constraint to avoid conflicts`);
|
|
99
|
-
continue; // Skip this location constraint to avoid conflicts
|
|
100
|
-
}
|
|
101
|
-
// Add the relationship constraint using the path
|
|
102
|
-
logger.trace(`[ALL] Setting hierarchical location where clause: ${relationshipInfo.path} = ${stringifyJSON(locKey.lk)} (type: ${typeof locKey.lk})`);
|
|
103
|
-
options.where[relationshipInfo.path] = {
|
|
104
|
-
[Op.eq]: locKey.lk
|
|
105
|
-
};
|
|
106
|
-
// Add necessary includes for the relationship traversal
|
|
107
|
-
if (relationshipInfo.includes) {
|
|
108
|
-
additionalIncludes.push(...relationshipInfo.includes);
|
|
109
|
-
}
|
|
110
|
-
}
|
|
111
|
-
}
|
|
112
|
-
// Merge additional includes with existing includes
|
|
113
|
-
if (additionalIncludes.length > 0) {
|
|
114
|
-
const existingIncludes = options.include || [];
|
|
115
|
-
options.include = mergeIncludes(existingIncludes, additionalIncludes);
|
|
116
|
-
}
|
|
117
|
-
}
|
|
118
|
-
logger.default(`All query configured for ${model.name} with where fields: ${options.where ? Object.keys(options.where).join(', ') : 'none'}, includes: ${((_options_include = options.include) === null || _options_include === void 0 ? void 0 : _options_include.length) || 0}`);
|
|
119
|
-
try {
|
|
120
|
-
logger.trace(`[ALL] Executing ${model.name}.findAll() with options: ${JSON.stringify(options, null, 2)}`);
|
|
121
|
-
} catch {
|
|
122
|
-
// Fallback for cases where JSON.stringify fails on Sequelize operators
|
|
123
|
-
logger.trace(`[ALL] Executing ${model.name}.findAll() with options containing non-serializable operators (${Object.keys(options.where || {}).length} where conditions)`);
|
|
124
|
-
}
|
|
125
|
-
const matchingItems = await model.findAll(options);
|
|
126
|
-
// this.logger.default('Matching Items', { matchingItems });
|
|
127
|
-
// Get the current context from context manager
|
|
128
|
-
const context = contextManager.getCurrentContext();
|
|
129
|
-
// TODO: Move this Up!
|
|
130
|
-
const results = await Promise.all(matchingItems.map(async (row)=>{
|
|
131
|
-
const processedRow = await processRow(row, coordinate.kta, references, aggregations, registry, context);
|
|
132
|
-
return validateKeys(processedRow, coordinate.kta);
|
|
133
|
-
}));
|
|
134
|
-
logger.debug(`[ALL] Returning ${results.length} ${model.name} records`);
|
|
135
|
-
return results;
|
|
136
|
-
};
|
|
137
|
-
return all;
|
|
138
|
-
};
|
|
139
|
-
|
|
140
|
-
export { getAllOperation };
|
|
141
|
-
//# sourceMappingURL=data:application/json;charset=utf-8;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoiYWxsLmpzIiwic291cmNlcyI6W10sInNvdXJjZXNDb250ZW50IjpbXSwibmFtZXMiOltdLCJtYXBwaW5ncyI6Ijs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7OyJ9
|
package/dist/es/ops/create.js
DELETED
|
@@ -1,248 +0,0 @@
|
|
|
1
|
-
import { isComKey, isPriKey, validateKeys } from '@fjell/core';
|
|
2
|
-
import LibLogger from '../logger.js';
|
|
3
|
-
import { processRow } from '../RowProcessor.js';
|
|
4
|
-
import { extractEvents, removeEvents } from '../EventCoordinator.js';
|
|
5
|
-
import { buildRelationshipPath, buildRelationshipChain } from '../util/relationshipUtils.js';
|
|
6
|
-
import { stringifyJSON } from '../util/general.js';
|
|
7
|
-
|
|
8
|
-
const logger = LibLogger.get('sequelize', 'ops', 'create');
|
|
9
|
-
// Helper function to translate PostgreSQL errors to meaningful messages
|
|
10
|
-
function translateDatabaseError(error, itemData, modelName) {
|
|
11
|
-
var _error_original, _error_original1, _error_original2;
|
|
12
|
-
const originalMessage = error.message || '';
|
|
13
|
-
const errorCode = (_error_original = error.original) === null || _error_original === void 0 ? void 0 : _error_original.code;
|
|
14
|
-
const constraint = (_error_original1 = error.original) === null || _error_original1 === void 0 ? void 0 : _error_original1.constraint;
|
|
15
|
-
const detail = (_error_original2 = error.original) === null || _error_original2 === void 0 ? void 0 : _error_original2.detail;
|
|
16
|
-
logger.error('Database error during create operation', {
|
|
17
|
-
errorCode,
|
|
18
|
-
constraint,
|
|
19
|
-
detail,
|
|
20
|
-
originalMessage,
|
|
21
|
-
modelName,
|
|
22
|
-
itemData: JSON.stringify(itemData, null, 2)
|
|
23
|
-
});
|
|
24
|
-
// Handle specific PostgreSQL error codes
|
|
25
|
-
switch(errorCode){
|
|
26
|
-
case '23505':
|
|
27
|
-
if (constraint) {
|
|
28
|
-
return new Error(`Duplicate value violates unique constraint '${constraint}'. ${detail || ''}`);
|
|
29
|
-
}
|
|
30
|
-
return new Error(`Duplicate value detected. This record already exists. ${detail || ''}`);
|
|
31
|
-
case '23503':
|
|
32
|
-
if (constraint) {
|
|
33
|
-
return new Error(`Foreign key constraint '${constraint}' violated. Referenced record does not exist. ${detail || ''}`);
|
|
34
|
-
}
|
|
35
|
-
return new Error(`Referenced record does not exist. Check that all related records are valid. ${detail || ''}`);
|
|
36
|
-
case '23502':
|
|
37
|
-
var _error_original3;
|
|
38
|
-
const column = (_error_original3 = error.original) === null || _error_original3 === void 0 ? void 0 : _error_original3.column;
|
|
39
|
-
if (column) {
|
|
40
|
-
return new Error(`Required field '${column}' cannot be null`);
|
|
41
|
-
}
|
|
42
|
-
return new Error(`Required field is missing or null`);
|
|
43
|
-
case '23514':
|
|
44
|
-
if (constraint) {
|
|
45
|
-
return new Error(`Check constraint '${constraint}' violated. ${detail || ''}`);
|
|
46
|
-
}
|
|
47
|
-
return new Error(`Data validation failed. Check constraint violated. ${detail || ''}`);
|
|
48
|
-
case '22001':
|
|
49
|
-
return new Error(`Data too long for field. Check string lengths. ${detail || ''}`);
|
|
50
|
-
case '22003':
|
|
51
|
-
return new Error(`Numeric value out of range. Check number values. ${detail || ''}`);
|
|
52
|
-
case '42703':
|
|
53
|
-
var _error_original4;
|
|
54
|
-
const undefinedColumn = (_error_original4 = error.original) === null || _error_original4 === void 0 ? void 0 : _error_original4.column;
|
|
55
|
-
if (undefinedColumn) {
|
|
56
|
-
return new Error(`Column '${undefinedColumn}' does not exist in table '${modelName}'`);
|
|
57
|
-
}
|
|
58
|
-
return new Error(`Referenced column does not exist`);
|
|
59
|
-
case '42P01':
|
|
60
|
-
return new Error(`Table '${modelName}' does not exist`);
|
|
61
|
-
default:
|
|
62
|
-
// For unknown errors, provide the original message with context
|
|
63
|
-
return new Error(`Database error in ${modelName}.create(): ${originalMessage}. Item data: ${JSON.stringify(itemData, null, 2)}`);
|
|
64
|
-
}
|
|
65
|
-
}
|
|
66
|
-
// Helper function to validate hierarchical chain exists
|
|
67
|
-
async function validateHierarchicalChain(models, locKey, kta) {
|
|
68
|
-
try {
|
|
69
|
-
// Find the direct parent model that contains this locator
|
|
70
|
-
const locatorIndex = kta.indexOf(locKey.kt);
|
|
71
|
-
if (locatorIndex === -1) {
|
|
72
|
-
throw new Error(`Locator type '${locKey.kt}' not found in kta array`);
|
|
73
|
-
}
|
|
74
|
-
// Get the model for this locator
|
|
75
|
-
const locatorModel = models[locatorIndex] || models[0]; // Fallback to primary model
|
|
76
|
-
// Build a query to validate the chain exists
|
|
77
|
-
const chainResult = buildRelationshipChain(locatorModel, kta, locatorIndex, kta.length - 1);
|
|
78
|
-
if (!chainResult.success) {
|
|
79
|
-
// If we can't build a chain, just validate the record exists
|
|
80
|
-
const record = await locatorModel.findByPk(locKey.lk);
|
|
81
|
-
if (!record) {
|
|
82
|
-
throw new Error(`Referenced ${locKey.kt} with id ${locKey.lk} does not exist`);
|
|
83
|
-
}
|
|
84
|
-
return;
|
|
85
|
-
}
|
|
86
|
-
// Validate that the chain exists
|
|
87
|
-
const queryOptions = {
|
|
88
|
-
where: {
|
|
89
|
-
id: locKey.lk
|
|
90
|
-
}
|
|
91
|
-
};
|
|
92
|
-
if (chainResult.includes && chainResult.includes.length > 0) {
|
|
93
|
-
queryOptions.include = chainResult.includes;
|
|
94
|
-
}
|
|
95
|
-
const record = await locatorModel.findOne(queryOptions);
|
|
96
|
-
if (!record) {
|
|
97
|
-
throw new Error(`Referenced ${locKey.kt} with id ${locKey.lk} does not exist or chain is invalid`);
|
|
98
|
-
}
|
|
99
|
-
} catch (error) {
|
|
100
|
-
// Add context to validation errors
|
|
101
|
-
if (error.original) {
|
|
102
|
-
throw translateDatabaseError(error, {
|
|
103
|
-
locKey,
|
|
104
|
-
kta
|
|
105
|
-
}, locKey.kt);
|
|
106
|
-
}
|
|
107
|
-
throw error;
|
|
108
|
-
}
|
|
109
|
-
}
|
|
110
|
-
const getCreateOperation = (models, definition, registry)=>{
|
|
111
|
-
const create = async (item, options)=>{
|
|
112
|
-
logger.debug(`CREATE operation called on ${models[0].name} with ${(options === null || options === void 0 ? void 0 : options.key) ? `key: pk=${options.key.pk}, loc=[${isComKey(options.key) ? options.key.loc.map((l)=>`${l.kt}=${l.lk}`).join(', ') : ''}]` : (options === null || options === void 0 ? void 0 : options.locations) ? `locations: ${options.locations.map((loc)=>`${loc.kt}=${loc.lk}`).join(', ')}` : 'no constraints'}`);
|
|
113
|
-
logger.default(`Create configured for ${models[0].name} with ${Object.keys(item).length} item fields`);
|
|
114
|
-
const { coordinate, options: { references, aggregations } } = definition;
|
|
115
|
-
const { kta } = coordinate;
|
|
116
|
-
// Get the primary model (first model in array)
|
|
117
|
-
const model = models[0];
|
|
118
|
-
const modelAttributes = model.getAttributes();
|
|
119
|
-
// Validate that all item attributes exist on the model
|
|
120
|
-
let itemData = {
|
|
121
|
-
...item
|
|
122
|
-
};
|
|
123
|
-
// TODO: We need the opposite of processRow, something to step down from fjell to database.
|
|
124
|
-
itemData = extractEvents(itemData);
|
|
125
|
-
itemData = removeEvents(itemData);
|
|
126
|
-
// Validate that all item attributes exist on the model
|
|
127
|
-
const invalidAttributes = [];
|
|
128
|
-
for (const key of Object.keys(itemData)){
|
|
129
|
-
if (!modelAttributes[key]) {
|
|
130
|
-
invalidAttributes.push(key);
|
|
131
|
-
}
|
|
132
|
-
}
|
|
133
|
-
if (invalidAttributes.length > 0) {
|
|
134
|
-
const availableAttributes = Object.keys(modelAttributes).join(', ');
|
|
135
|
-
throw new Error(`Invalid attributes for model '${model.name}': [${invalidAttributes.join(', ')}]. ` + `Available attributes: [${availableAttributes}]. ` + `Item data: ${JSON.stringify(itemData, null, 2)}`);
|
|
136
|
-
}
|
|
137
|
-
// Handle key options
|
|
138
|
-
// If a key is supplied, assume its contents are to be assigned to the appropriate ids.
|
|
139
|
-
// For most cases this will be null as key generation is often through autoIncrement.
|
|
140
|
-
// If this is a CItem then the locations will be present.
|
|
141
|
-
if (options === null || options === void 0 ? void 0 : options.key) {
|
|
142
|
-
const key = options.key;
|
|
143
|
-
if (isPriKey(key)) {
|
|
144
|
-
// Set the primary key
|
|
145
|
-
itemData.id = key.pk;
|
|
146
|
-
} else if (isComKey(key)) {
|
|
147
|
-
// Set primary key
|
|
148
|
-
itemData.id = key.pk;
|
|
149
|
-
// Process location keys - only set direct foreign keys, validate hierarchical chains
|
|
150
|
-
const comKey = key;
|
|
151
|
-
const directLocations = [];
|
|
152
|
-
const hierarchicalLocations = [];
|
|
153
|
-
// Categorize location keys as direct or hierarchical
|
|
154
|
-
for (const locKey of comKey.loc){
|
|
155
|
-
const relationshipInfo = buildRelationshipPath(model, locKey.kt, kta, true);
|
|
156
|
-
if (!relationshipInfo.found) {
|
|
157
|
-
const associations = model.associations ? Object.keys(model.associations) : [];
|
|
158
|
-
const errorMessage = `Composite key locator '${locKey.kt}' cannot be resolved on model '${model.name}' or through its relationships. ` + `Available associations: [${associations.join(', ')}]. ` + `KTA: [${kta.join(', ')}]. ` + `Composite key: ${JSON.stringify(comKey, null, 2)}`;
|
|
159
|
-
logger.error(errorMessage, {
|
|
160
|
-
key: comKey,
|
|
161
|
-
kta,
|
|
162
|
-
associations
|
|
163
|
-
});
|
|
164
|
-
throw new Error(errorMessage);
|
|
165
|
-
}
|
|
166
|
-
if (relationshipInfo.isDirect) {
|
|
167
|
-
directLocations.push(locKey);
|
|
168
|
-
} else {
|
|
169
|
-
hierarchicalLocations.push(locKey);
|
|
170
|
-
}
|
|
171
|
-
}
|
|
172
|
-
// Set direct foreign keys
|
|
173
|
-
for (const locKey of directLocations){
|
|
174
|
-
if (locKey.lk == null || locKey.lk === '') {
|
|
175
|
-
logger.error(`Composite key location '${locKey.kt}' has undefined/null lk value`, {
|
|
176
|
-
locKey,
|
|
177
|
-
key: comKey
|
|
178
|
-
});
|
|
179
|
-
throw new Error(`Composite key location '${locKey.kt}' has undefined/null lk value`);
|
|
180
|
-
}
|
|
181
|
-
const foreignKeyField = locKey.kt + 'Id';
|
|
182
|
-
itemData[foreignKeyField] = locKey.lk;
|
|
183
|
-
}
|
|
184
|
-
// Validate hierarchical chains exist
|
|
185
|
-
for (const locKey of hierarchicalLocations){
|
|
186
|
-
await validateHierarchicalChain(models, locKey, kta);
|
|
187
|
-
}
|
|
188
|
-
}
|
|
189
|
-
}
|
|
190
|
-
// Handle locations options
|
|
191
|
-
// This is the most frequent way relationship ids will be set
|
|
192
|
-
if (options === null || options === void 0 ? void 0 : options.locations) {
|
|
193
|
-
const directLocations = [];
|
|
194
|
-
const hierarchicalLocations = [];
|
|
195
|
-
// Categorize location keys as direct or hierarchical
|
|
196
|
-
for (const locKey of options.locations){
|
|
197
|
-
const relationshipInfo = buildRelationshipPath(model, locKey.kt, kta, true);
|
|
198
|
-
if (!relationshipInfo.found) {
|
|
199
|
-
const associations = model.associations ? Object.keys(model.associations) : [];
|
|
200
|
-
const errorMessage = `Location key '${locKey.kt}' cannot be resolved on model '${model.name}' or through its relationships. ` + `Available associations: [${associations.join(', ')}]. ` + `KTA: [${kta.join(', ')}]. ` + `Locations: ${JSON.stringify(options.locations, null, 2)}`;
|
|
201
|
-
logger.error(errorMessage, {
|
|
202
|
-
locations: options.locations,
|
|
203
|
-
kta,
|
|
204
|
-
associations
|
|
205
|
-
});
|
|
206
|
-
throw new Error(errorMessage);
|
|
207
|
-
}
|
|
208
|
-
if (relationshipInfo.isDirect) {
|
|
209
|
-
directLocations.push(locKey);
|
|
210
|
-
} else {
|
|
211
|
-
hierarchicalLocations.push(locKey);
|
|
212
|
-
}
|
|
213
|
-
}
|
|
214
|
-
// Set direct foreign keys
|
|
215
|
-
for (const locKey of directLocations){
|
|
216
|
-
if (locKey.lk == null || locKey.lk === '') {
|
|
217
|
-
logger.error(`Location option '${locKey.kt}' has undefined/null lk value`, {
|
|
218
|
-
locKey,
|
|
219
|
-
locations: options.locations
|
|
220
|
-
});
|
|
221
|
-
throw new Error(`Location option '${locKey.kt}' has undefined/null lk value`);
|
|
222
|
-
}
|
|
223
|
-
const foreignKeyField = locKey.kt + 'Id';
|
|
224
|
-
itemData[foreignKeyField] = locKey.lk;
|
|
225
|
-
}
|
|
226
|
-
// Validate hierarchical chains exist
|
|
227
|
-
for (const locKey of hierarchicalLocations){
|
|
228
|
-
await validateHierarchicalChain(models, locKey, kta);
|
|
229
|
-
}
|
|
230
|
-
}
|
|
231
|
-
// Create the record
|
|
232
|
-
try {
|
|
233
|
-
logger.trace(`[CREATE] Executing ${model.name}.create() with data: ${stringifyJSON(itemData)}`);
|
|
234
|
-
const createdRecord = await model.create(itemData);
|
|
235
|
-
// Add key and events
|
|
236
|
-
const processedRecord = await processRow(createdRecord, kta, references, aggregations, registry);
|
|
237
|
-
const result = validateKeys(processedRecord, kta);
|
|
238
|
-
logger.debug(`[CREATE] Created ${model.name} with key: ${result.key ? JSON.stringify(result.key) : `id=${createdRecord.id}`}`);
|
|
239
|
-
return result;
|
|
240
|
-
} catch (error) {
|
|
241
|
-
throw translateDatabaseError(error, itemData, model.name);
|
|
242
|
-
}
|
|
243
|
-
};
|
|
244
|
-
return create;
|
|
245
|
-
};
|
|
246
|
-
|
|
247
|
-
export { getCreateOperation };
|
|
248
|
-
//# sourceMappingURL=data:application/json;charset=utf-8;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoiY3JlYXRlLmpzIiwic291cmNlcyI6W10sInNvdXJjZXNDb250ZW50IjpbXSwibmFtZXMiOltdLCJtYXBwaW5ncyI6Ijs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7In0=
|