@fjell/lib-sequelize 4.4.81 → 4.4.83
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/Coordinate.d.ts +1 -1
- package/dist/Coordinate.d.ts.map +1 -1
- package/dist/Definition.d.ts +1 -1
- package/dist/Definition.d.ts.map +1 -1
- package/dist/EventCoordinator.d.ts +1 -1
- package/dist/EventCoordinator.d.ts.map +1 -1
- package/dist/KeyMaster.d.ts +1 -1
- package/dist/KeyMaster.d.ts.map +1 -1
- package/dist/Operations.d.ts +1 -1
- package/dist/Operations.d.ts.map +1 -1
- package/dist/Options.d.ts +1 -1
- package/dist/Options.d.ts.map +1 -1
- package/dist/QueryBuilder.d.ts +1 -1
- package/dist/QueryBuilder.d.ts.map +1 -1
- package/dist/RowProcessor.d.ts +3 -3
- package/dist/RowProcessor.d.ts.map +1 -1
- package/dist/SequelizeLibrary.d.ts +1 -1
- package/dist/SequelizeLibrary.d.ts.map +1 -1
- package/dist/SequelizeLibraryFactory.d.ts +1 -1
- package/dist/SequelizeLibraryFactory.d.ts.map +1 -1
- package/dist/contained/SequelizeLibrary.d.ts +2 -3
- package/dist/contained/SequelizeLibrary.d.ts.map +1 -1
- package/dist/index.js +97 -73
- package/dist/index.js.map +3 -3
- package/dist/ops/all.d.ts +1 -2
- package/dist/ops/all.d.ts.map +1 -1
- package/dist/ops/create.d.ts +1 -1
- package/dist/ops/create.d.ts.map +1 -1
- package/dist/ops/find.d.ts +2 -2
- package/dist/ops/find.d.ts.map +1 -1
- package/dist/ops/get.d.ts +3 -3
- package/dist/ops/get.d.ts.map +1 -1
- package/dist/ops/one.d.ts +1 -1
- package/dist/ops/one.d.ts.map +1 -1
- package/dist/ops/remove.d.ts +1 -1
- package/dist/ops/remove.d.ts.map +1 -1
- package/dist/ops/update.d.ts +1 -1
- package/dist/ops/update.d.ts.map +1 -1
- package/dist/ops/upsert.d.ts +1 -1
- package/dist/ops/upsert.d.ts.map +1 -1
- package/dist/primary/SequelizeLibrary.d.ts +2 -3
- package/dist/primary/SequelizeLibrary.d.ts.map +1 -1
- package/dist/processing/AggsAdapter.d.ts +1 -1
- package/dist/processing/AggsAdapter.d.ts.map +1 -1
- package/dist/processing/ReferenceBuilder.d.ts +2 -3
- package/dist/processing/ReferenceBuilder.d.ts.map +1 -1
- package/dist/processing/RefsAdapter.d.ts +1 -1
- package/dist/processing/RefsAdapter.d.ts.map +1 -1
- package/package.json +7 -5
package/dist/index.js
CHANGED
|
@@ -55,14 +55,11 @@ import * as Library2 from "@fjell/lib";
|
|
|
55
55
|
|
|
56
56
|
// src/ops/all.ts
|
|
57
57
|
import { createAllWrapper } from "@fjell/core";
|
|
58
|
-
import { validateKeys } from "@fjell/
|
|
58
|
+
import { validateKeys } from "@fjell/validation";
|
|
59
59
|
|
|
60
60
|
// src/QueryBuilder.ts
|
|
61
|
-
import {
|
|
62
|
-
|
|
63
|
-
isCondition,
|
|
64
|
-
isPriKey
|
|
65
|
-
} from "@fjell/core";
|
|
61
|
+
import { isCondition } from "@fjell/types";
|
|
62
|
+
import { isComKey, isPriKey } from "@fjell/core";
|
|
66
63
|
import { Op } from "sequelize";
|
|
67
64
|
|
|
68
65
|
// src/util/general.ts
|
|
@@ -926,11 +923,11 @@ var removeEvents = (item) => {
|
|
|
926
923
|
|
|
927
924
|
// src/processing/ReferenceBuilder.ts
|
|
928
925
|
var buildSequelizeReference = async (item, referenceDefinition, registry, context) => {
|
|
929
|
-
const
|
|
926
|
+
const libLogger4 = logger_default.get("processing", "ReferenceBuilder");
|
|
930
927
|
const isCompositeItem = referenceDefinition.kta.length > 1;
|
|
931
928
|
const primaryKeyType = referenceDefinition.kta[0];
|
|
932
929
|
if (isCompositeItem) {
|
|
933
|
-
|
|
930
|
+
libLogger4.debug(
|
|
934
931
|
"Detected composite item reference - will use ComKey with empty loc array",
|
|
935
932
|
{
|
|
936
933
|
kta: referenceDefinition.kta,
|
|
@@ -941,7 +938,7 @@ var buildSequelizeReference = async (item, referenceDefinition, registry, contex
|
|
|
941
938
|
);
|
|
942
939
|
}
|
|
943
940
|
if (!registry) {
|
|
944
|
-
|
|
941
|
+
libLogger4.error("Registry not provided for reference processing", {
|
|
945
942
|
component: "lib-sequelize",
|
|
946
943
|
subcomponent: "ReferenceBuilder",
|
|
947
944
|
operation: "buildSequelizeReference",
|
|
@@ -956,7 +953,7 @@ var buildSequelizeReference = async (item, referenceDefinition, registry, contex
|
|
|
956
953
|
}
|
|
957
954
|
const library = registry.get(referenceDefinition.kta);
|
|
958
955
|
if (!library) {
|
|
959
|
-
|
|
956
|
+
libLogger4.error("Referenced library not found in registry", {
|
|
960
957
|
component: "lib-sequelize",
|
|
961
958
|
subcomponent: "ReferenceBuilder",
|
|
962
959
|
operation: "buildSequelizeReference",
|
|
@@ -988,7 +985,7 @@ var buildSequelizeReference = async (item, referenceDefinition, registry, contex
|
|
|
988
985
|
const columnName = referenceDefinition.locationColumns[i];
|
|
989
986
|
const locValue = item[columnName];
|
|
990
987
|
if (locValue == null) {
|
|
991
|
-
|
|
988
|
+
libLogger4.warning(
|
|
992
989
|
`Location column '${columnName}' is null/undefined for reference '${referenceDefinition.property}'. Falling back to empty loc array search.`
|
|
993
990
|
);
|
|
994
991
|
hasNullLocation = true;
|
|
@@ -1011,7 +1008,7 @@ var buildSequelizeReference = async (item, referenceDefinition, registry, contex
|
|
|
1011
1008
|
pk: columnValue,
|
|
1012
1009
|
loc
|
|
1013
1010
|
};
|
|
1014
|
-
|
|
1011
|
+
libLogger4.debug("Built full ComKey with location context", {
|
|
1015
1012
|
itemKey,
|
|
1016
1013
|
locationColumns: referenceDefinition.locationColumns,
|
|
1017
1014
|
property: referenceDefinition.property
|
|
@@ -1023,12 +1020,12 @@ var buildSequelizeReference = async (item, referenceDefinition, registry, contex
|
|
|
1023
1020
|
pk: columnValue,
|
|
1024
1021
|
loc: []
|
|
1025
1022
|
};
|
|
1026
|
-
|
|
1023
|
+
libLogger4.default("Using empty loc array for composite item reference", {
|
|
1027
1024
|
kta: referenceDefinition.kta,
|
|
1028
1025
|
property: referenceDefinition.property
|
|
1029
1026
|
});
|
|
1030
1027
|
}
|
|
1031
|
-
|
|
1028
|
+
libLogger4.default("Created reference key", {
|
|
1032
1029
|
itemKey,
|
|
1033
1030
|
isCompositeItem,
|
|
1034
1031
|
hasLocationColumns: !!referenceDefinition.locationColumns,
|
|
@@ -1037,10 +1034,10 @@ var buildSequelizeReference = async (item, referenceDefinition, registry, contex
|
|
|
1037
1034
|
let referencedItem;
|
|
1038
1035
|
if (context) {
|
|
1039
1036
|
if (context.isCached(itemKey)) {
|
|
1040
|
-
|
|
1037
|
+
libLogger4.debug("Using cached reference", { itemKey, property: referenceDefinition.property });
|
|
1041
1038
|
referencedItem = context.getCached(itemKey);
|
|
1042
1039
|
} else if (context.isInProgress(itemKey)) {
|
|
1043
|
-
|
|
1040
|
+
libLogger4.default("Circular dependency detected, creating reference placeholder", {
|
|
1044
1041
|
itemKey,
|
|
1045
1042
|
property: referenceDefinition.property
|
|
1046
1043
|
});
|
|
@@ -1055,7 +1052,7 @@ var buildSequelizeReference = async (item, referenceDefinition, registry, contex
|
|
|
1055
1052
|
referencedItem = await library.operations.get(itemKey);
|
|
1056
1053
|
context.setCached(itemKey, referencedItem);
|
|
1057
1054
|
} catch (error) {
|
|
1058
|
-
|
|
1055
|
+
libLogger4.error("Failed to load reference", {
|
|
1059
1056
|
component: "lib-sequelize",
|
|
1060
1057
|
subcomponent: "ReferenceBuilder",
|
|
1061
1058
|
operation: "buildSequelizeReference",
|
|
@@ -1767,7 +1764,7 @@ var getAllOperation = (models, definition, registry) => {
|
|
|
1767
1764
|
|
|
1768
1765
|
// src/ops/create.ts
|
|
1769
1766
|
import { createCreateWrapper, isComKey as isComKey3, isPriKey as isPriKey3 } from "@fjell/core";
|
|
1770
|
-
import { validateKeys as validateKeys2 } from "@fjell/
|
|
1767
|
+
import { validateKeys as validateKeys2 } from "@fjell/validation";
|
|
1771
1768
|
var logger10 = logger_default.get("sequelize", "ops", "create");
|
|
1772
1769
|
async function validateHierarchicalChain(models, locKey, kta) {
|
|
1773
1770
|
const locatorIndex = kta.indexOf(locKey.kt);
|
|
@@ -1913,7 +1910,42 @@ var getCreateOperation = (models, definition, registry) => {
|
|
|
1913
1910
|
logger10.trace(`[CREATE] Executing ${model.name}.create() with data: ${stringifyJSON(itemData)}`);
|
|
1914
1911
|
queryMetrics.recordQuery(model.name);
|
|
1915
1912
|
const createdRecord = await model.create(itemData);
|
|
1916
|
-
|
|
1913
|
+
let recordToProcess = createdRecord;
|
|
1914
|
+
if (kta.length > 1) {
|
|
1915
|
+
const includesForKey = [];
|
|
1916
|
+
let currentInclude = null;
|
|
1917
|
+
for (let i = kta.length - 1; i > 0; i--) {
|
|
1918
|
+
const relationshipType = kta[i];
|
|
1919
|
+
const relationshipInfo = buildRelationshipPath(model, relationshipType, kta, true);
|
|
1920
|
+
if (relationshipInfo.found && !relationshipInfo.isDirect) {
|
|
1921
|
+
const intermediateType = kta[i - 1];
|
|
1922
|
+
const newInclude = {
|
|
1923
|
+
association: intermediateType,
|
|
1924
|
+
required: false
|
|
1925
|
+
};
|
|
1926
|
+
if (currentInclude) {
|
|
1927
|
+
newInclude.include = [currentInclude];
|
|
1928
|
+
}
|
|
1929
|
+
currentInclude = newInclude;
|
|
1930
|
+
}
|
|
1931
|
+
}
|
|
1932
|
+
if (currentInclude) {
|
|
1933
|
+
includesForKey.push(currentInclude);
|
|
1934
|
+
}
|
|
1935
|
+
if (includesForKey.length > 0) {
|
|
1936
|
+
logger10.debug(`[CREATE] Reloading ${model.name} with includes for key construction`, { includes: includesForKey });
|
|
1937
|
+
queryMetrics.recordQuery(model.name);
|
|
1938
|
+
const reloadedRecord = await model.findByPk(createdRecord.get("id"), {
|
|
1939
|
+
include: includesForKey
|
|
1940
|
+
});
|
|
1941
|
+
if (reloadedRecord) {
|
|
1942
|
+
recordToProcess = reloadedRecord;
|
|
1943
|
+
} else {
|
|
1944
|
+
logger10.warning(`[CREATE] Failed to reload ${model.name} after creation, using original record`);
|
|
1945
|
+
}
|
|
1946
|
+
}
|
|
1947
|
+
}
|
|
1948
|
+
const processedRecord = await processRow(recordToProcess, kta, references || [], aggregations || [], registry, void 0, void 0);
|
|
1917
1949
|
const result = validateKeys2(processedRecord, kta);
|
|
1918
1950
|
logger10.debug(`[CREATE] Created ${model.name} with key: ${result.key ? JSON.stringify(result.key) : `id=${createdRecord.id}`}`);
|
|
1919
1951
|
return result;
|
|
@@ -1941,7 +1973,7 @@ var getCreateOperation = (models, definition, registry) => {
|
|
|
1941
1973
|
|
|
1942
1974
|
// src/ops/find.ts
|
|
1943
1975
|
import { createFindWrapper } from "@fjell/core";
|
|
1944
|
-
import { validateKeys as validateKeys3 } from "@fjell/
|
|
1976
|
+
import { validateKeys as validateKeys3 } from "@fjell/validation";
|
|
1945
1977
|
var logger11 = logger_default.get("sequelize", "ops", "find");
|
|
1946
1978
|
var getFindOperation = (models, definition, registry) => {
|
|
1947
1979
|
const { options: { finders, references, aggregations } } = definition;
|
|
@@ -2041,9 +2073,9 @@ import {
|
|
|
2041
2073
|
isComKey as isComKey4,
|
|
2042
2074
|
isPriKey as isPriKey4,
|
|
2043
2075
|
isValidItemKey,
|
|
2044
|
-
|
|
2076
|
+
NotFoundError
|
|
2045
2077
|
} from "@fjell/core";
|
|
2046
|
-
import {
|
|
2078
|
+
import { validateKeys as validateKeys4 } from "@fjell/validation";
|
|
2047
2079
|
var logger12 = logger_default.get("sequelize", "ops", "get");
|
|
2048
2080
|
var processCompositeKey = (comKey, model, kta) => {
|
|
2049
2081
|
const where = { id: comKey.pk };
|
|
@@ -2150,21 +2182,20 @@ var getGetOperation = (models, definition, registry) => {
|
|
|
2150
2182
|
|
|
2151
2183
|
// src/ops/one.ts
|
|
2152
2184
|
import { createOneWrapper } from "@fjell/core";
|
|
2153
|
-
var logger13 = logger_default.get("sequelize", "ops", "one");
|
|
2154
2185
|
var getOneOperation = (models, definition, registry) => {
|
|
2155
2186
|
return createOneWrapper(
|
|
2156
2187
|
definition.coordinate,
|
|
2157
2188
|
async (itemQuery, locations) => {
|
|
2158
2189
|
const locs = locations ?? [];
|
|
2159
|
-
|
|
2160
|
-
|
|
2190
|
+
logger_default.debug(`ONE operation called on ${models[0].name} with ${locs.length} location filters: ${locs.map((loc) => `${loc.kt}=${loc.lk}`).join(", ") || "none"}`);
|
|
2191
|
+
logger_default.default(`One configured for ${models[0].name} delegating to all operation`);
|
|
2161
2192
|
const result = await getAllOperation(models, definition, registry)(itemQuery ?? {}, locs, { limit: 1 });
|
|
2162
2193
|
if (result.items.length > 0) {
|
|
2163
2194
|
const item = result.items[0];
|
|
2164
|
-
|
|
2195
|
+
logger_default.debug(`[ONE] Found ${models[0].name} record with key: ${item.key ? JSON.stringify(item.key) : "unknown"}`);
|
|
2165
2196
|
return item;
|
|
2166
2197
|
} else {
|
|
2167
|
-
|
|
2198
|
+
logger_default.debug(`[ONE] No ${models[0].name} record found`);
|
|
2168
2199
|
return null;
|
|
2169
2200
|
}
|
|
2170
2201
|
}
|
|
@@ -2172,9 +2203,8 @@ var getOneOperation = (models, definition, registry) => {
|
|
|
2172
2203
|
};
|
|
2173
2204
|
|
|
2174
2205
|
// src/ops/remove.ts
|
|
2175
|
-
import { abbrevIK, isComKey as isComKey5, isPriKey as isPriKey5, isValidItemKey as isValidItemKey2, createRemoveWrapper } from "@fjell/core";
|
|
2176
|
-
|
|
2177
|
-
var logger14 = logger_default.get("sequelize", "ops", "remove");
|
|
2206
|
+
import { abbrevIK, isComKey as isComKey5, isPriKey as isPriKey5, isValidItemKey as isValidItemKey2, createRemoveWrapper, NotFoundError as NotFoundError2 } from "@fjell/core";
|
|
2207
|
+
var libLogger3 = logger_default.get("sequelize", "ops", "remove");
|
|
2178
2208
|
var processCompositeKey2 = (comKey, model, kta) => {
|
|
2179
2209
|
const where = { id: comKey.pk };
|
|
2180
2210
|
const includes = [];
|
|
@@ -2182,7 +2212,7 @@ var processCompositeKey2 = (comKey, model, kta) => {
|
|
|
2182
2212
|
const relationshipInfo = buildRelationshipPath(model, locator.kt, kta);
|
|
2183
2213
|
if (!relationshipInfo.found) {
|
|
2184
2214
|
const errorMessage = `Composite key locator '${locator.kt}' cannot be resolved on model '${model.name}' or through its relationships.`;
|
|
2185
|
-
|
|
2215
|
+
libLogger3.error(errorMessage, { key: comKey, kta });
|
|
2186
2216
|
throw new Error(errorMessage);
|
|
2187
2217
|
}
|
|
2188
2218
|
if (relationshipInfo.path) {
|
|
@@ -2209,25 +2239,25 @@ var getRemoveOperation = (models, definition, _registry) => {
|
|
|
2209
2239
|
async (key) => {
|
|
2210
2240
|
try {
|
|
2211
2241
|
if (!isValidItemKey2(key)) {
|
|
2212
|
-
|
|
2242
|
+
logger_default.error("Key for Remove is not a valid ItemKey: %j", key);
|
|
2213
2243
|
throw new Error("Key for Remove is not a valid ItemKey");
|
|
2214
2244
|
}
|
|
2215
2245
|
const keyDescription = isPriKey5(key) ? `primary key: pk=${key.pk}` : `composite key: pk=${key.pk}, loc=[${key.loc.map((l) => `${l.kt}=${l.lk}`).join(", ")}]`;
|
|
2216
|
-
|
|
2217
|
-
|
|
2246
|
+
logger_default.debug(`REMOVE operation called on ${models[0].name} with ${keyDescription}`);
|
|
2247
|
+
logger_default.default(`Remove configured for ${models[0].name} with ${isPriKey5(key) ? "primary" : "composite"} key`);
|
|
2218
2248
|
const model = models[0];
|
|
2219
2249
|
let item;
|
|
2220
2250
|
let returnItem;
|
|
2221
|
-
|
|
2251
|
+
logger_default.debug("remove: %s", abbrevIK(key));
|
|
2222
2252
|
if (isPriKey5(key)) {
|
|
2223
|
-
|
|
2253
|
+
logger_default.debug(`[REMOVE] Executing ${model.name}.findByPk() with pk: ${key.pk}`);
|
|
2224
2254
|
queryMetrics.recordQuery(model.name);
|
|
2225
2255
|
item = await model.findByPk(key.pk);
|
|
2226
2256
|
} else if (isComKey5(key)) {
|
|
2227
2257
|
const comKey = key;
|
|
2228
2258
|
const queryOptions = processCompositeKey2(comKey, model, kta);
|
|
2229
|
-
|
|
2230
|
-
|
|
2259
|
+
logger_default.default(`Remove composite key query for ${model.name} with where fields: ${queryOptions.where ? Object.keys(queryOptions.where).join(", ") : "none"}`);
|
|
2260
|
+
logger_default.debug(`[REMOVE] Executing ${model.name}.findOne() with options: ${stringifyJSON(queryOptions)}`);
|
|
2231
2261
|
queryMetrics.recordQuery(model.name);
|
|
2232
2262
|
item = await model.findOne(queryOptions);
|
|
2233
2263
|
}
|
|
@@ -2247,14 +2277,14 @@ var getRemoveOperation = (models, definition, _registry) => {
|
|
|
2247
2277
|
if (model.getAttributes().deletedAt) {
|
|
2248
2278
|
item.deletedAt = /* @__PURE__ */ new Date();
|
|
2249
2279
|
}
|
|
2250
|
-
|
|
2280
|
+
logger_default.debug(`[REMOVE] Executing ${model.name}.save() for soft delete`);
|
|
2251
2281
|
queryMetrics.recordQuery(model.name);
|
|
2252
2282
|
await item?.save();
|
|
2253
2283
|
returnItem = item?.get({ plain: true });
|
|
2254
2284
|
returnItem = addKey(item, returnItem, kta);
|
|
2255
2285
|
returnItem = populateEvents(returnItem);
|
|
2256
2286
|
} else if (options.deleteOnRemove) {
|
|
2257
|
-
|
|
2287
|
+
logger_default.debug(`[REMOVE] Executing ${model.name}.destroy() for hard delete`);
|
|
2258
2288
|
queryMetrics.recordQuery(model.name);
|
|
2259
2289
|
await item?.destroy();
|
|
2260
2290
|
returnItem = item?.get({ plain: true });
|
|
@@ -2263,7 +2293,7 @@ var getRemoveOperation = (models, definition, _registry) => {
|
|
|
2263
2293
|
} else {
|
|
2264
2294
|
throw new Error("No deletedAt or isDeleted attribute found in model, and deleteOnRemove is not set");
|
|
2265
2295
|
}
|
|
2266
|
-
|
|
2296
|
+
logger_default.debug(`[REMOVE] Removed ${model.name} with key: ${returnItem.key ? JSON.stringify(returnItem.key) : `id=${item.id}`}`);
|
|
2267
2297
|
const { references } = options;
|
|
2268
2298
|
if (references && references.length > 0) {
|
|
2269
2299
|
returnItem = addRefsToSequelizeItem(returnItem, references);
|
|
@@ -2278,16 +2308,11 @@ var getRemoveOperation = (models, definition, _registry) => {
|
|
|
2278
2308
|
};
|
|
2279
2309
|
|
|
2280
2310
|
// src/ops/update.ts
|
|
2281
|
-
import {
|
|
2282
|
-
|
|
2283
|
-
createUpdateWrapper,
|
|
2284
|
-
isComKey as isComKey6,
|
|
2285
|
-
isPriKey as isPriKey6
|
|
2286
|
-
} from "@fjell/core";
|
|
2287
|
-
import { validateKeys as validateKeys5 } from "@fjell/core/validation";
|
|
2311
|
+
import { abbrevIK as abbrevIK2, createUpdateWrapper, isComKey as isComKey6, isPriKey as isPriKey6 } from "@fjell/core";
|
|
2312
|
+
import { validateKeys as validateKeys5 } from "@fjell/validation";
|
|
2288
2313
|
import { NotFoundError as NotFoundError3 } from "@fjell/core";
|
|
2289
2314
|
import { Op as Op3 } from "sequelize";
|
|
2290
|
-
var
|
|
2315
|
+
var logger13 = logger_default.get("sequelize", "ops", "update");
|
|
2291
2316
|
var mergeIncludes2 = (existingIncludes, newIncludes) => {
|
|
2292
2317
|
const mergedIncludes = [...existingIncludes];
|
|
2293
2318
|
for (const newInclude of newIncludes) {
|
|
@@ -2319,15 +2344,15 @@ var getUpdateOperation = (models, definition, registry) => {
|
|
|
2319
2344
|
);
|
|
2320
2345
|
}
|
|
2321
2346
|
const keyDescription = isPriKey6(key) ? `primary key: pk=${key.pk}` : `composite key: pk=${key.pk}, loc=[${key.loc.map((l) => `${l.kt}=${l.lk}`).join(", ")}]`;
|
|
2322
|
-
|
|
2347
|
+
logger13.debug(`UPDATE operation called on ${models[0].name} with ${keyDescription}`, { options });
|
|
2323
2348
|
const { coordinate } = definition;
|
|
2324
2349
|
const { kta } = coordinate;
|
|
2325
|
-
|
|
2350
|
+
logger13.debug("update: %s, %j", abbrevIK2(key), item);
|
|
2326
2351
|
const model = models[0];
|
|
2327
2352
|
let response;
|
|
2328
2353
|
if (isPriKey6(key)) {
|
|
2329
2354
|
const priKey = key;
|
|
2330
|
-
|
|
2355
|
+
logger13.trace(`[UPDATE] Executing ${model.name}.findByPk() with pk: ${priKey.pk}`);
|
|
2331
2356
|
queryMetrics.recordQuery(model.name);
|
|
2332
2357
|
response = await model.findByPk(priKey.pk);
|
|
2333
2358
|
} else if (isComKey6(key)) {
|
|
@@ -2338,7 +2363,7 @@ var getUpdateOperation = (models, definition, registry) => {
|
|
|
2338
2363
|
const relationshipInfo = buildRelationshipPath(model, locator.kt, kta, true);
|
|
2339
2364
|
if (!relationshipInfo.found) {
|
|
2340
2365
|
const errorMessage = `Composite key locator '${locator.kt}' cannot be resolved on model '${model.name}' or through its relationships.`;
|
|
2341
|
-
|
|
2366
|
+
logger13.error(errorMessage, { key: comKey, kta });
|
|
2342
2367
|
throw new Error(errorMessage);
|
|
2343
2368
|
}
|
|
2344
2369
|
if (relationshipInfo.isDirect) {
|
|
@@ -2357,8 +2382,8 @@ var getUpdateOperation = (models, definition, registry) => {
|
|
|
2357
2382
|
if (additionalIncludes.length > 0) {
|
|
2358
2383
|
queryOptions.include = mergeIncludes2([], additionalIncludes);
|
|
2359
2384
|
}
|
|
2360
|
-
|
|
2361
|
-
|
|
2385
|
+
logger13.default(`Update composite key query for ${model.name} with where fields: ${queryOptions.where ? Object.keys(queryOptions.where).join(", ") : "none"}`);
|
|
2386
|
+
logger13.trace(`[UPDATE] Executing ${model.name}.findOne() with options: ${stringifyJSON(queryOptions)}`);
|
|
2362
2387
|
queryMetrics.recordQuery(model.name);
|
|
2363
2388
|
response = await model.findOne(queryOptions);
|
|
2364
2389
|
}
|
|
@@ -2378,14 +2403,14 @@ var getUpdateOperation = (models, definition, registry) => {
|
|
|
2378
2403
|
if (aggregations && aggregations.length > 0) {
|
|
2379
2404
|
updateProps = removeAggsFromItem(updateProps, aggregations);
|
|
2380
2405
|
}
|
|
2381
|
-
|
|
2382
|
-
|
|
2383
|
-
|
|
2406
|
+
logger13.default(`Update found ${model.name} record to modify`);
|
|
2407
|
+
logger13.default(`Update properties configured: ${Object.keys(updateProps).join(", ")}`);
|
|
2408
|
+
logger13.trace(`[UPDATE] Executing ${model.name}.update() with properties: ${stringifyJSON(updateProps)}`);
|
|
2384
2409
|
queryMetrics.recordQuery(model.name);
|
|
2385
2410
|
response = await response.update(updateProps);
|
|
2386
2411
|
const processedItem = await processRow(response, kta, references || [], aggregations || [], registry, void 0, void 0);
|
|
2387
2412
|
const returnItem = validateKeys5(processedItem, kta);
|
|
2388
|
-
|
|
2413
|
+
logger13.debug(`[UPDATE] Updated ${model.name} with key: ${returnItem.key ? JSON.stringify(returnItem.key) : `id=${response.id}`}`);
|
|
2389
2414
|
return returnItem;
|
|
2390
2415
|
} catch (error) {
|
|
2391
2416
|
if (error instanceof NotFoundError3) throw error;
|
|
@@ -2397,7 +2422,6 @@ var getUpdateOperation = (models, definition, registry) => {
|
|
|
2397
2422
|
|
|
2398
2423
|
// src/ops/upsert.ts
|
|
2399
2424
|
import { createUpsertWrapper, isValidItemKey as isValidItemKey3, NotFoundError as NotFoundError4 } from "@fjell/core";
|
|
2400
|
-
var logger16 = logger_default.get("sequelize", "ops", "upsert");
|
|
2401
2425
|
var getUpsertOperation = (models, definition, registry) => {
|
|
2402
2426
|
const get = getGetOperation(models, definition, registry);
|
|
2403
2427
|
const update = getUpdateOperation(models, definition, registry);
|
|
@@ -2406,7 +2430,7 @@ var getUpsertOperation = (models, definition, registry) => {
|
|
|
2406
2430
|
definition.coordinate,
|
|
2407
2431
|
async (key, item, locations, options) => {
|
|
2408
2432
|
if (!isValidItemKey3(key)) {
|
|
2409
|
-
|
|
2433
|
+
logger_default.error("Invalid key for upsert operation", {
|
|
2410
2434
|
operation: "upsert",
|
|
2411
2435
|
model: models[0]?.name,
|
|
2412
2436
|
key: stringifyJSON(key),
|
|
@@ -2417,19 +2441,19 @@ var getUpsertOperation = (models, definition, registry) => {
|
|
|
2417
2441
|
});
|
|
2418
2442
|
throw new Error(`Invalid key for upsert operation: ${stringifyJSON(key)}. Expected valid PriKey or ComKey structure.`);
|
|
2419
2443
|
}
|
|
2420
|
-
|
|
2444
|
+
logger_default.debug(`[UPSERT] Attempting upsert with key: ${stringifyJSON(key)}`, { options });
|
|
2421
2445
|
let resultItem = null;
|
|
2422
2446
|
try {
|
|
2423
|
-
|
|
2447
|
+
logger_default.debug(`[UPSERT] Retrieving item by key: ${stringifyJSON(key)}`);
|
|
2424
2448
|
resultItem = await get(key);
|
|
2425
2449
|
} catch (error) {
|
|
2426
2450
|
const isNotFound = error instanceof NotFoundError4 || error?.name === "NotFoundError" || error?.errorInfo?.code === "NOT_FOUND";
|
|
2427
2451
|
if (isNotFound) {
|
|
2428
|
-
|
|
2452
|
+
logger_default.debug(`[UPSERT] Item not found, creating new item with key: ${stringifyJSON(key)}, errorType: ${error?.name}, errorCode: ${error?.errorInfo?.code}`);
|
|
2429
2453
|
const createOptions3 = locations ? { locations } : { key };
|
|
2430
2454
|
resultItem = await create(item, createOptions3);
|
|
2431
2455
|
} else {
|
|
2432
|
-
|
|
2456
|
+
logger_default.error(`[UPSERT] Unexpected error during get operation`, {
|
|
2433
2457
|
operation: "upsert",
|
|
2434
2458
|
phase: "get-existing",
|
|
2435
2459
|
model: models[0]?.name,
|
|
@@ -2447,9 +2471,9 @@ var getUpsertOperation = (models, definition, registry) => {
|
|
|
2447
2471
|
if (!resultItem) {
|
|
2448
2472
|
throw new Error(`Failed to retrieve or create item for key: ${stringifyJSON(key)}`);
|
|
2449
2473
|
}
|
|
2450
|
-
|
|
2474
|
+
logger_default.debug(`[UPSERT] Updating item with properties, key: ${stringifyJSON(key)}`, { options });
|
|
2451
2475
|
resultItem = await update(resultItem.key, item, options);
|
|
2452
|
-
|
|
2476
|
+
logger_default.debug(`[UPSERT] Item upserted successfully: ${stringifyJSON(resultItem)}`);
|
|
2453
2477
|
return resultItem;
|
|
2454
2478
|
}
|
|
2455
2479
|
);
|
|
@@ -2478,9 +2502,9 @@ var createOperations = (models, coordinate, registry, options) => {
|
|
|
2478
2502
|
};
|
|
2479
2503
|
|
|
2480
2504
|
// src/SequelizeLibrary.ts
|
|
2481
|
-
var
|
|
2505
|
+
var logger14 = logger_default.get("SequelizeLibrary");
|
|
2482
2506
|
var createSequelizeLibrary = (registry, coordinate, models, options) => {
|
|
2483
|
-
|
|
2507
|
+
logger14.debug("createSequelizeLibrary", { coordinate, models, registry, options });
|
|
2484
2508
|
const operations = createOperations(models, coordinate, registry, options);
|
|
2485
2509
|
const wrappedOperations = Library3.wrapOperations(operations, options, coordinate, registry);
|
|
2486
2510
|
const libLibrary = Library3.createLibrary(registry, coordinate, wrappedOperations, options);
|
|
@@ -2494,10 +2518,10 @@ var isSequelizeLibrary = (library) => {
|
|
|
2494
2518
|
};
|
|
2495
2519
|
|
|
2496
2520
|
// src/SequelizeLibraryFactory.ts
|
|
2497
|
-
var
|
|
2521
|
+
var logger15 = logger_default.get("InstanceFactory");
|
|
2498
2522
|
var createSequelizeLibraryFactory = (models, options) => {
|
|
2499
2523
|
return (coordinate, context) => {
|
|
2500
|
-
|
|
2524
|
+
logger15.debug("Creating Sequelize instance", {
|
|
2501
2525
|
coordinate,
|
|
2502
2526
|
registry: context.registry,
|
|
2503
2527
|
models: models.map((m) => m.name),
|
|
@@ -2542,9 +2566,9 @@ __export(primary_exports, {
|
|
|
2542
2566
|
|
|
2543
2567
|
// src/primary/SequelizeLibrary.ts
|
|
2544
2568
|
import { Primary } from "@fjell/lib";
|
|
2545
|
-
var
|
|
2569
|
+
var logger16 = logger_default.get("lib-sequelize", "primary", "library");
|
|
2546
2570
|
function createSequelizeLibrary3(keyType, models, libOptions = {}, scopes = [], registry) {
|
|
2547
|
-
|
|
2571
|
+
logger16.debug("createSequelizeLibrary", { keyType, models, libOptions, scopes });
|
|
2548
2572
|
const coordinate = createCoordinate([keyType], scopes);
|
|
2549
2573
|
const options = createOptions2(libOptions);
|
|
2550
2574
|
const operations = createOperations(models, coordinate, registry, options);
|