@nocobase/plugin-flow-engine 2.0.0-alpha.8 → 2.1.0-alpha.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/externalVersion.js +9 -9
- package/dist/locale/de-DE.json +62 -0
- package/dist/locale/en-US.json +57 -45
- package/dist/locale/es-ES.json +62 -0
- package/dist/locale/fr-FR.json +62 -0
- package/dist/locale/hu-HU.json +62 -0
- package/dist/locale/id-ID.json +62 -0
- package/dist/locale/index.d.ts +114 -90
- package/dist/locale/it-IT.json +62 -0
- package/dist/locale/ja-JP.json +62 -0
- package/dist/locale/ko-KR.json +62 -0
- package/dist/locale/nl-NL.json +62 -0
- package/dist/locale/pt-BR.json +62 -0
- package/dist/locale/ru-RU.json +62 -0
- package/dist/locale/tr-TR.json +62 -0
- package/dist/locale/uk-UA.json +62 -0
- package/dist/locale/vi-VN.json +62 -0
- package/dist/locale/zh-CN.json +58 -46
- package/dist/locale/zh-TW.json +62 -0
- package/dist/node_modules/ses/package.json +1 -1
- package/dist/server/collections/flowsql.js +1 -0
- package/dist/server/index.d.ts +1 -0
- package/dist/server/index.js +6 -0
- package/dist/server/plugin.d.ts +0 -4
- package/dist/server/plugin.js +17 -19
- package/dist/server/repository.d.ts +4 -1
- package/dist/server/repository.js +120 -8
- package/dist/server/server.js +10 -3
- package/dist/server/variables/registry.js +141 -40
- package/dist/server/variables/selects.d.ts +19 -0
- package/dist/server/variables/selects.js +80 -0
- package/dist/server/variables/utils.js +13 -3
- package/package.json +2 -2
|
@@ -204,6 +204,9 @@ const _FlowModelRepository = class _FlowModelRepository extends import_database.
|
|
|
204
204
|
}
|
|
205
205
|
return this.doGetJsonSchema(uid2, options);
|
|
206
206
|
}
|
|
207
|
+
static optionsToJson(options) {
|
|
208
|
+
return import_lodash.default.isPlainObject(options) ? options : JSON.parse(options);
|
|
209
|
+
}
|
|
207
210
|
nodesToSchema(nodes, rootUid) {
|
|
208
211
|
const nodeAttributeSanitize = (node) => {
|
|
209
212
|
const schema = {
|
|
@@ -426,13 +429,122 @@ const _FlowModelRepository = class _FlowModelRepository extends import_database.
|
|
|
426
429
|
await this.clearXUidPathCache(result["uid"], transaction2);
|
|
427
430
|
return result;
|
|
428
431
|
}
|
|
429
|
-
async duplicate(
|
|
430
|
-
|
|
431
|
-
if (!(
|
|
432
|
+
async duplicate(modelUid, options) {
|
|
433
|
+
let nodes = await this.findNodesById(modelUid, { ...options, includeAsyncNode: true });
|
|
434
|
+
if (!(nodes == null ? void 0 : nodes.length)) {
|
|
432
435
|
return null;
|
|
433
436
|
}
|
|
434
|
-
this.
|
|
435
|
-
|
|
437
|
+
nodes = this.dedupeNodesForDuplicate(nodes, modelUid);
|
|
438
|
+
const uidMap = {};
|
|
439
|
+
for (const n of nodes) {
|
|
440
|
+
uidMap[n["uid"]] = (0, import_utils.uid)();
|
|
441
|
+
}
|
|
442
|
+
const sorted = [...nodes].sort((a, b) => {
|
|
443
|
+
if (a.depth !== b.depth) return a.depth - b.depth;
|
|
444
|
+
const ap = a.parent || "";
|
|
445
|
+
const bp = b.parent || "";
|
|
446
|
+
if (ap !== bp) return ap < bp ? -1 : 1;
|
|
447
|
+
const at = a.type || "";
|
|
448
|
+
const bt = b.type || "";
|
|
449
|
+
if (at !== bt) return at < bt ? -1 : 1;
|
|
450
|
+
const as = a.sort ?? 0;
|
|
451
|
+
const bs = b.sort ?? 0;
|
|
452
|
+
return as - bs;
|
|
453
|
+
});
|
|
454
|
+
for (const n of sorted) {
|
|
455
|
+
const oldUid = n["uid"];
|
|
456
|
+
const newUid = uidMap[oldUid];
|
|
457
|
+
const oldParentUid = n["parent"];
|
|
458
|
+
const newParentUid = uidMap[oldParentUid] ?? null;
|
|
459
|
+
const optionsObj = this.replaceStepParamsModelUids(
|
|
460
|
+
import_lodash.default.isPlainObject(n.options) ? n.options : JSON.parse(n.options),
|
|
461
|
+
uidMap
|
|
462
|
+
);
|
|
463
|
+
if (newParentUid) {
|
|
464
|
+
optionsObj.parent = newParentUid;
|
|
465
|
+
optionsObj.parentId = newParentUid;
|
|
466
|
+
}
|
|
467
|
+
const schemaNode = {
|
|
468
|
+
uid: newUid,
|
|
469
|
+
["x-async"]: !!n.async,
|
|
470
|
+
...optionsObj
|
|
471
|
+
};
|
|
472
|
+
if (newParentUid) {
|
|
473
|
+
schemaNode.childOptions = {
|
|
474
|
+
parentUid: newParentUid,
|
|
475
|
+
type: n.type,
|
|
476
|
+
position: "last"
|
|
477
|
+
};
|
|
478
|
+
}
|
|
479
|
+
await this.insertSingleNode(schemaNode, { transaction: options == null ? void 0 : options.transaction });
|
|
480
|
+
}
|
|
481
|
+
return this.findModelById(uidMap[modelUid], { ...options });
|
|
482
|
+
}
|
|
483
|
+
dedupeNodesForDuplicate(nodes, rootUid) {
|
|
484
|
+
if (!Array.isArray(nodes) || nodes.length <= 1) {
|
|
485
|
+
return nodes;
|
|
486
|
+
}
|
|
487
|
+
const rowsByUid = import_lodash.default.groupBy(nodes, "uid");
|
|
488
|
+
const uniqueUids = Object.keys(rowsByUid);
|
|
489
|
+
if (uniqueUids.length === nodes.length) {
|
|
490
|
+
return nodes;
|
|
491
|
+
}
|
|
492
|
+
const uidsInSubtree = new Set(uniqueUids);
|
|
493
|
+
const rootDepthByUid = /* @__PURE__ */ new Map();
|
|
494
|
+
for (const uid2 of uniqueUids) {
|
|
495
|
+
const rows = rowsByUid[uid2] || [];
|
|
496
|
+
const depths = rows.map((row) => Number((row == null ? void 0 : row.depth) ?? 0));
|
|
497
|
+
rootDepthByUid.set(uid2, depths.length ? Math.min(...depths) : 0);
|
|
498
|
+
}
|
|
499
|
+
const pickRowForUid = (uid2, rows) => {
|
|
500
|
+
if (!(rows == null ? void 0 : rows.length)) return null;
|
|
501
|
+
if (rows.length === 1) return rows[0];
|
|
502
|
+
if (uid2 === rootUid) return rows[0];
|
|
503
|
+
let bestRow = rows[0];
|
|
504
|
+
let bestParentRootDepth = -1;
|
|
505
|
+
for (const row of rows) {
|
|
506
|
+
const parentUid = row == null ? void 0 : row.parent;
|
|
507
|
+
if (!parentUid || !uidsInSubtree.has(parentUid)) {
|
|
508
|
+
continue;
|
|
509
|
+
}
|
|
510
|
+
const parentRootDepth = rootDepthByUid.get(parentUid) ?? -1;
|
|
511
|
+
if (parentRootDepth > bestParentRootDepth) {
|
|
512
|
+
bestParentRootDepth = parentRootDepth;
|
|
513
|
+
bestRow = row;
|
|
514
|
+
}
|
|
515
|
+
}
|
|
516
|
+
return bestRow;
|
|
517
|
+
};
|
|
518
|
+
const uidsInQueryOrder = [];
|
|
519
|
+
const seenUidsInQueryOrder = /* @__PURE__ */ new Set();
|
|
520
|
+
for (const row of nodes) {
|
|
521
|
+
const uid2 = row == null ? void 0 : row.uid;
|
|
522
|
+
if (!uid2 || seenUidsInQueryOrder.has(uid2)) continue;
|
|
523
|
+
seenUidsInQueryOrder.add(uid2);
|
|
524
|
+
uidsInQueryOrder.push(uid2);
|
|
525
|
+
}
|
|
526
|
+
return uidsInQueryOrder.map((uid2) => pickRowForUid(uid2, rowsByUid[uid2])).filter(Boolean);
|
|
527
|
+
}
|
|
528
|
+
replaceStepParamsModelUids(options, uidMap) {
|
|
529
|
+
const opts = options && typeof options === "object" ? options : {};
|
|
530
|
+
const replaceUidString = (v) => typeof v === "string" && uidMap[v] ? uidMap[v] : v;
|
|
531
|
+
const replaceInPlace = (val) => {
|
|
532
|
+
if (Array.isArray(val)) {
|
|
533
|
+
for (let i = 0; i < val.length; i++) {
|
|
534
|
+
val[i] = replaceInPlace(val[i]);
|
|
535
|
+
}
|
|
536
|
+
return val;
|
|
537
|
+
}
|
|
538
|
+
if (val && typeof val === "object") {
|
|
539
|
+
for (const k of Object.keys(val)) {
|
|
540
|
+
val[k] = replaceInPlace(val[k]);
|
|
541
|
+
}
|
|
542
|
+
return val;
|
|
543
|
+
}
|
|
544
|
+
return replaceUidString(val);
|
|
545
|
+
};
|
|
546
|
+
if (opts.stepParams) opts.stepParams = replaceInPlace(opts.stepParams);
|
|
547
|
+
return opts;
|
|
436
548
|
}
|
|
437
549
|
async insert(schema, options) {
|
|
438
550
|
const nodes = _FlowModelRepository.schemaToSingleNodes(schema);
|
|
@@ -1025,11 +1137,11 @@ WHERE TreeTable.depth = 1 AND TreeTable.ancestor = :ancestor and TreeTable.sort
|
|
|
1025
1137
|
const children = nodes.filter((n) => n.parent === rootUid);
|
|
1026
1138
|
const subModels = {};
|
|
1027
1139
|
for (const child of children) {
|
|
1028
|
-
const { subKey, subType } = child.options;
|
|
1140
|
+
const { subKey, subType } = this.optionsToJson(child.options);
|
|
1029
1141
|
if (!subKey) continue;
|
|
1030
1142
|
const model = _FlowModelRepository.nodesToModel(nodes, child["uid"]) || {
|
|
1031
1143
|
uid: child["uid"],
|
|
1032
|
-
...child.options,
|
|
1144
|
+
...this.optionsToJson(child.options),
|
|
1033
1145
|
sortIndex: child.sort
|
|
1034
1146
|
};
|
|
1035
1147
|
model.sortIndex = child.sort;
|
|
@@ -1055,7 +1167,7 @@ WHERE TreeTable.depth = 1 AND TreeTable.ancestor = :ancestor and TreeTable.sort
|
|
|
1055
1167
|
}
|
|
1056
1168
|
return {
|
|
1057
1169
|
uid: rootNode["uid"],
|
|
1058
|
-
...rootNode.options,
|
|
1170
|
+
...this.optionsToJson(rootNode.options),
|
|
1059
1171
|
...Object.keys(filteredSubModels).length > 0 ? { subModels: filteredSubModels } : {}
|
|
1060
1172
|
};
|
|
1061
1173
|
}
|
package/dist/server/server.js
CHANGED
|
@@ -127,15 +127,22 @@ class PluginUISchemaStorageServer extends import_server.Plugin {
|
|
|
127
127
|
name: "flowModels",
|
|
128
128
|
actions: {
|
|
129
129
|
findOne: async (ctx, next) => {
|
|
130
|
-
const { uid: uid2, parentId, subKey } = ctx.action.params;
|
|
130
|
+
const { uid: uid2, parentId, subKey, includeAsyncNode = false } = ctx.action.params;
|
|
131
131
|
const repository = ctx.db.getRepository("flowModels");
|
|
132
132
|
if (uid2) {
|
|
133
|
-
ctx.body = await repository.findModelById(uid2);
|
|
133
|
+
ctx.body = await repository.findModelById(uid2, { includeAsyncNode });
|
|
134
134
|
} else if (parentId) {
|
|
135
|
-
ctx.body = await repository.findModelByParentId(parentId, { subKey });
|
|
135
|
+
ctx.body = await repository.findModelByParentId(parentId, { subKey, includeAsyncNode });
|
|
136
136
|
}
|
|
137
137
|
await next();
|
|
138
138
|
},
|
|
139
|
+
duplicate: async (ctx, next) => {
|
|
140
|
+
const { uid: uid2 } = ctx.action.params;
|
|
141
|
+
const repository = ctx.db.getRepository("flowModels");
|
|
142
|
+
const duplicated = await repository.duplicate(uid2);
|
|
143
|
+
ctx.body = duplicated;
|
|
144
|
+
await next();
|
|
145
|
+
},
|
|
139
146
|
move: async (ctx, next) => {
|
|
140
147
|
const { sourceId, targetId, position } = ctx.action.params;
|
|
141
148
|
const repository = ctx.db.getRepository("flowModels");
|
|
@@ -43,6 +43,7 @@ module.exports = __toCommonJS(registry_exports);
|
|
|
43
43
|
var import_lodash = __toESM(require("lodash"));
|
|
44
44
|
var import_contexts = require("../template/contexts");
|
|
45
45
|
var import_utils = require("@nocobase/utils");
|
|
46
|
+
var import_selects = require("./selects");
|
|
46
47
|
class VariableRegistry {
|
|
47
48
|
vars = /* @__PURE__ */ new Map();
|
|
48
49
|
register(def) {
|
|
@@ -128,8 +129,8 @@ function inferSelectsFromUsage(paths = [], _params) {
|
|
|
128
129
|
const generatedFields = fieldSet.size ? Array.from(fieldSet) : void 0;
|
|
129
130
|
return { generatedAppends, generatedFields };
|
|
130
131
|
}
|
|
131
|
-
async function fetchRecordWithRequestCache(koaCtx, dataSourceKey, collection, filterByTk, fields, appends) {
|
|
132
|
-
var _a, _b, _c, _d;
|
|
132
|
+
async function fetchRecordWithRequestCache(koaCtx, dataSourceKey, collection, filterByTk, fields, appends, preferFullRecord, associationName, sourceId) {
|
|
133
|
+
var _a, _b, _c, _d, _e;
|
|
133
134
|
try {
|
|
134
135
|
const log = (_b = (_a = koaCtx.app) == null ? void 0 : _a.logger) == null ? void 0 : _b.child({
|
|
135
136
|
module: "plugin-flow-engine",
|
|
@@ -145,24 +146,34 @@ async function fetchRecordWithRequestCache(koaCtx, dataSourceKey, collection, fi
|
|
|
145
146
|
const ds = koaCtx.app.dataSourceManager.get(dataSourceKey || "main");
|
|
146
147
|
const cm = ds.collectionManager;
|
|
147
148
|
if (!(cm == null ? void 0 : cm.db)) return void 0;
|
|
148
|
-
const repo = cm.db.getRepository(collection);
|
|
149
|
+
const repo = associationName && typeof sourceId !== "undefined" ? cm.db.getRepository(associationName, sourceId) : cm.db.getRepository(collection);
|
|
150
|
+
const modelInfo = (_c = repo.collection) == null ? void 0 : _c.model;
|
|
151
|
+
const pkAttr = modelInfo == null ? void 0 : modelInfo.primaryKeyAttribute;
|
|
152
|
+
const pkIsValid = pkAttr && (modelInfo == null ? void 0 : modelInfo.rawAttributes) && Object.prototype.hasOwnProperty.call(modelInfo.rawAttributes, pkAttr);
|
|
153
|
+
const fieldsWithPk = Array.isArray(fields) && fields.length > 0 && pkIsValid ? Array.from(/* @__PURE__ */ new Set([...fields, pkAttr])) : fields;
|
|
154
|
+
const cacheKeyFields = preferFullRecord && pkIsValid ? void 0 : Array.isArray(fieldsWithPk) ? [...fieldsWithPk].sort() : void 0;
|
|
155
|
+
const cacheKeyAppends = preferFullRecord ? void 0 : Array.isArray(appends) ? [...appends].sort() : void 0;
|
|
149
156
|
const keyObj = {
|
|
150
157
|
ds: dataSourceKey || "main",
|
|
151
158
|
c: collection,
|
|
152
159
|
tk: filterByTk,
|
|
153
|
-
f:
|
|
154
|
-
a:
|
|
160
|
+
f: cacheKeyFields,
|
|
161
|
+
a: cacheKeyAppends,
|
|
162
|
+
full: preferFullRecord ? true : void 0,
|
|
163
|
+
assoc: associationName,
|
|
164
|
+
sid: typeof sourceId === "undefined" ? void 0 : sourceId
|
|
155
165
|
};
|
|
156
166
|
const key = JSON.stringify(keyObj);
|
|
157
167
|
if (cache) {
|
|
158
168
|
if (cache.has(key)) {
|
|
159
169
|
return cache.get(key);
|
|
160
170
|
}
|
|
161
|
-
const needFields = Array.isArray(
|
|
162
|
-
const needAppends = Array.isArray(appends) ? new Set(appends) : void 0;
|
|
171
|
+
const needFields = !preferFullRecord && Array.isArray(fieldsWithPk) ? [...new Set(fieldsWithPk)] : void 0;
|
|
172
|
+
const needAppends = !preferFullRecord && Array.isArray(appends) ? new Set(appends) : void 0;
|
|
163
173
|
for (const [cacheKey, cacheVal] of cache.entries()) {
|
|
164
174
|
const parsed = JSON.parse(cacheKey);
|
|
165
|
-
if (!parsed || parsed.ds !== keyObj.ds || parsed.c !== keyObj.c || parsed.tk !== keyObj.tk)
|
|
175
|
+
if (!parsed || parsed.ds !== keyObj.ds || parsed.c !== keyObj.c || parsed.tk !== keyObj.tk || parsed.assoc !== keyObj.assoc || parsed.sid !== keyObj.sid)
|
|
176
|
+
continue;
|
|
166
177
|
const cachedFields = new Set(parsed.f || []);
|
|
167
178
|
const cachedAppends = new Set(parsed.a || []);
|
|
168
179
|
const fieldCoveredByAppends = (fieldPath) => {
|
|
@@ -173,34 +184,40 @@ async function fetchRecordWithRequestCache(koaCtx, dataSourceKey, collection, fi
|
|
|
173
184
|
}
|
|
174
185
|
return false;
|
|
175
186
|
};
|
|
176
|
-
const fieldsOk =
|
|
187
|
+
const fieldsOk = needFields ? needFields.every((f) => cachedFields.has(f) || fieldCoveredByAppends(f)) : parsed.f === void 0;
|
|
177
188
|
const appendsOk = !needAppends || [...needAppends].every((a) => cachedAppends.has(a));
|
|
178
|
-
|
|
189
|
+
const fullOk = preferFullRecord ? parsed.full === true : true;
|
|
190
|
+
if (fieldsOk && appendsOk && fullOk) {
|
|
179
191
|
return cacheVal;
|
|
180
192
|
}
|
|
181
193
|
}
|
|
182
194
|
}
|
|
183
|
-
const rec = await repo.findOne(
|
|
184
|
-
|
|
185
|
-
|
|
186
|
-
|
|
187
|
-
|
|
195
|
+
const rec = await repo.findOne(
|
|
196
|
+
preferFullRecord ? {
|
|
197
|
+
filterByTk
|
|
198
|
+
} : {
|
|
199
|
+
filterByTk,
|
|
200
|
+
fields: fieldsWithPk,
|
|
201
|
+
appends
|
|
202
|
+
}
|
|
203
|
+
);
|
|
188
204
|
const json = rec ? rec.toJSON() : void 0;
|
|
189
205
|
if (cache) cache.set(key, json);
|
|
190
206
|
return json;
|
|
191
207
|
} catch (e) {
|
|
192
|
-
const log = (
|
|
208
|
+
const log = (_e = (_d = koaCtx.app) == null ? void 0 : _d.logger) == null ? void 0 : _e.child({
|
|
193
209
|
module: "plugin-flow-engine",
|
|
194
210
|
submodule: "variables.resolve",
|
|
195
211
|
method: "fetchRecordWithRequestCache"
|
|
196
212
|
});
|
|
197
|
-
|
|
213
|
+
const errMsg = e instanceof Error ? e.message : String(e);
|
|
214
|
+
log == null ? void 0 : log.warn("[variables.resolve] fetchRecordWithRequestCache error", {
|
|
198
215
|
ds: dataSourceKey,
|
|
199
216
|
collection,
|
|
200
217
|
tk: filterByTk,
|
|
201
218
|
fields,
|
|
202
219
|
appends,
|
|
203
|
-
error:
|
|
220
|
+
error: errMsg
|
|
204
221
|
});
|
|
205
222
|
return void 0;
|
|
206
223
|
}
|
|
@@ -218,17 +235,28 @@ function attachGenericRecordVariables(flowCtx, koaCtx, usage, contextParams) {
|
|
|
218
235
|
const topParams = import_lodash.default.get(contextParams, varName);
|
|
219
236
|
if (isRecordParams(topParams)) {
|
|
220
237
|
const { generatedAppends, generatedFields } = inferSelectsFromUsage(usedPaths, topParams);
|
|
238
|
+
const hasDirectRefTop = (usedPaths || []).some((p) => p === "");
|
|
221
239
|
flowCtx.defineProperty(varName, {
|
|
222
240
|
get: async () => {
|
|
223
241
|
const dataSourceKey = (topParams == null ? void 0 : topParams.dataSourceKey) || "main";
|
|
224
|
-
|
|
242
|
+
const fixed = (0, import_selects.adjustSelectsForCollection)(
|
|
225
243
|
koaCtx,
|
|
226
244
|
dataSourceKey,
|
|
227
245
|
topParams.collection,
|
|
228
|
-
topParams.filterByTk,
|
|
229
246
|
generatedFields,
|
|
230
247
|
generatedAppends
|
|
231
248
|
);
|
|
249
|
+
return await fetchRecordWithRequestCache(
|
|
250
|
+
koaCtx,
|
|
251
|
+
dataSourceKey,
|
|
252
|
+
topParams.collection,
|
|
253
|
+
topParams.filterByTk,
|
|
254
|
+
fixed.fields,
|
|
255
|
+
fixed.appends,
|
|
256
|
+
hasDirectRefTop,
|
|
257
|
+
topParams.associationName,
|
|
258
|
+
topParams.sourceId
|
|
259
|
+
);
|
|
232
260
|
},
|
|
233
261
|
cache: true
|
|
234
262
|
});
|
|
@@ -261,44 +289,107 @@ function attachGenericRecordVariables(flowCtx, koaCtx, usage, contextParams) {
|
|
|
261
289
|
segmentMap.set(seg, arr);
|
|
262
290
|
}
|
|
263
291
|
const segEntries = Array.from(segmentMap.entries());
|
|
264
|
-
const
|
|
292
|
+
const oneLevelRecordChildren = segEntries.filter(([seg]) => {
|
|
265
293
|
const idx = parseIndexSegment(seg);
|
|
266
294
|
const nestedObj = import_lodash.default.get(contextParams, [varName, seg]) ?? (idx ? import_lodash.default.get(contextParams, [varName, idx]) : void 0);
|
|
267
295
|
const dotted = (contextParams || {})[`${varName}.${seg}`] ?? (idx ? (contextParams || {})[`${varName}.${idx}`] : void 0);
|
|
268
296
|
return isRecordParams(nestedObj) || isRecordParams(dotted);
|
|
269
297
|
});
|
|
270
|
-
|
|
298
|
+
const deepRecordMap = /* @__PURE__ */ new Map();
|
|
299
|
+
const cp = contextParams;
|
|
300
|
+
if (cp && typeof cp === "object") {
|
|
301
|
+
const cpRec = cp;
|
|
302
|
+
for (const key of Object.keys(cpRec)) {
|
|
303
|
+
if (!key || key !== varName && !key.startsWith(`${varName}.`)) continue;
|
|
304
|
+
if (key === varName) continue;
|
|
305
|
+
const val = cpRec[key];
|
|
306
|
+
if (!isRecordParams(val)) continue;
|
|
307
|
+
const relative = key.slice(varName.length + 1);
|
|
308
|
+
if (!relative) continue;
|
|
309
|
+
deepRecordMap.set(relative, val);
|
|
310
|
+
}
|
|
311
|
+
}
|
|
312
|
+
if (!oneLevelRecordChildren.length && deepRecordMap.size === 0) continue;
|
|
271
313
|
flowCtx.defineProperty(varName, {
|
|
272
314
|
get: () => {
|
|
273
|
-
const
|
|
274
|
-
|
|
275
|
-
|
|
276
|
-
const
|
|
277
|
-
|
|
278
|
-
if (!effRemainders.length) {
|
|
279
|
-
const all = usedPaths.map(
|
|
280
|
-
(p) => p.startsWith(`${seg}.`) ? p.slice(seg.length + 1) : p.startsWith(`${seg}[`) ? p.slice(seg.length) : ""
|
|
281
|
-
).filter((x) => !!x);
|
|
282
|
-
if (all.length) effRemainders = all;
|
|
283
|
-
}
|
|
284
|
-
const { generatedAppends, generatedFields } = inferSelectsFromUsage(effRemainders, recordParams);
|
|
285
|
-
const definitionKey = idx ?? seg;
|
|
286
|
-
subContext.defineProperty(definitionKey, {
|
|
315
|
+
const root = new import_contexts.ServerBaseContext();
|
|
316
|
+
const definedFirstLevel = /* @__PURE__ */ new Set();
|
|
317
|
+
const defineRecordGetter = (container, key, recordParams, subPaths = [], preferFull) => {
|
|
318
|
+
const { generatedAppends, generatedFields } = inferSelectsFromUsage(subPaths, recordParams);
|
|
319
|
+
container.defineProperty(key, {
|
|
287
320
|
get: async () => {
|
|
288
321
|
const dataSourceKey = (recordParams == null ? void 0 : recordParams.dataSourceKey) || "main";
|
|
289
|
-
|
|
322
|
+
const fixed = (0, import_selects.adjustSelectsForCollection)(
|
|
290
323
|
koaCtx,
|
|
291
324
|
dataSourceKey,
|
|
292
325
|
recordParams.collection,
|
|
293
|
-
recordParams.filterByTk,
|
|
294
326
|
generatedFields,
|
|
295
327
|
generatedAppends
|
|
296
328
|
);
|
|
329
|
+
return await fetchRecordWithRequestCache(
|
|
330
|
+
koaCtx,
|
|
331
|
+
dataSourceKey,
|
|
332
|
+
recordParams.collection,
|
|
333
|
+
recordParams.filterByTk,
|
|
334
|
+
fixed.fields,
|
|
335
|
+
fixed.appends,
|
|
336
|
+
preferFull || ((subPaths == null ? void 0 : subPaths.length) ?? 0) === 0,
|
|
337
|
+
recordParams.associationName,
|
|
338
|
+
recordParams.sourceId
|
|
339
|
+
);
|
|
297
340
|
},
|
|
298
341
|
cache: true
|
|
299
342
|
});
|
|
343
|
+
};
|
|
344
|
+
const subContainers = /* @__PURE__ */ new Map();
|
|
345
|
+
const ensureSubContainer = (parent, key) => {
|
|
346
|
+
let map = subContainers.get(parent);
|
|
347
|
+
if (!map) {
|
|
348
|
+
map = /* @__PURE__ */ new Map();
|
|
349
|
+
subContainers.set(parent, map);
|
|
350
|
+
}
|
|
351
|
+
let child = map.get(key);
|
|
352
|
+
if (!child) {
|
|
353
|
+
const inst = new import_contexts.ServerBaseContext();
|
|
354
|
+
parent.defineProperty(key, { get: () => inst.createProxy(), cache: true });
|
|
355
|
+
map.set(key, inst);
|
|
356
|
+
child = inst;
|
|
357
|
+
}
|
|
358
|
+
return child;
|
|
359
|
+
};
|
|
360
|
+
for (const [seg, remainders] of oneLevelRecordChildren) {
|
|
361
|
+
const idx = parseIndexSegment(seg);
|
|
362
|
+
const recordParams = import_lodash.default.get(contextParams, [varName, seg]) ?? (idx ? import_lodash.default.get(contextParams, [varName, idx]) : void 0) ?? (contextParams || {})[`${varName}.${seg}`] ?? (idx ? (contextParams || {})[`${varName}.${idx}`] : void 0);
|
|
363
|
+
let effRemainders = (remainders || []).filter((r) => !!r);
|
|
364
|
+
if (!effRemainders.length) {
|
|
365
|
+
const all = usedPaths.map(
|
|
366
|
+
(p) => p.startsWith(`${seg}.`) ? p.slice(seg.length + 1) : p.startsWith(`${seg}[`) ? p.slice(seg.length) : ""
|
|
367
|
+
).filter((x) => !!x);
|
|
368
|
+
if (all.length) effRemainders = all;
|
|
369
|
+
}
|
|
370
|
+
const hasDirectRefOne = (usedPaths || []).some((p) => p === seg || !!idx && p === `[${idx}]`);
|
|
371
|
+
defineRecordGetter(root, idx ?? seg, recordParams, effRemainders, hasDirectRefOne);
|
|
372
|
+
definedFirstLevel.add(idx ?? seg);
|
|
300
373
|
}
|
|
301
|
-
|
|
374
|
+
for (const [relative, recordParams] of deepRecordMap.entries()) {
|
|
375
|
+
const segs = String(relative).split(".").filter(Boolean);
|
|
376
|
+
if (segs.length === 0) continue;
|
|
377
|
+
const first = segs[0];
|
|
378
|
+
let container;
|
|
379
|
+
if (definedFirstLevel.has(first)) {
|
|
380
|
+
continue;
|
|
381
|
+
} else {
|
|
382
|
+
container = root;
|
|
383
|
+
for (let i = 0; i < segs.length - 1; i++) {
|
|
384
|
+
container = ensureSubContainer(container, segs[i]);
|
|
385
|
+
}
|
|
386
|
+
}
|
|
387
|
+
const leaf = segs[segs.length - 1];
|
|
388
|
+
const subPaths = (usedPaths || []).map((p) => p === relative ? "" : p.startsWith(relative + ".") ? p.slice(relative.length + 1) : "").filter((x) => x !== "");
|
|
389
|
+
const hasDirectRef = (usedPaths || []).some((p) => p === relative);
|
|
390
|
+
defineRecordGetter(container, leaf, recordParams, subPaths, hasDirectRef);
|
|
391
|
+
}
|
|
392
|
+
return root.createProxy();
|
|
302
393
|
},
|
|
303
394
|
cache: true
|
|
304
395
|
});
|
|
@@ -318,7 +409,17 @@ function registerBuiltInVariables(reg) {
|
|
|
318
409
|
const authObj = koaCtx.auth;
|
|
319
410
|
const uid = (_a = authObj == null ? void 0 : authObj.user) == null ? void 0 : _a.id;
|
|
320
411
|
if (typeof uid === "undefined" || uid === null) return void 0;
|
|
321
|
-
return await fetchRecordWithRequestCache(
|
|
412
|
+
return await fetchRecordWithRequestCache(
|
|
413
|
+
koaCtx,
|
|
414
|
+
"main",
|
|
415
|
+
"users",
|
|
416
|
+
uid,
|
|
417
|
+
generatedFields,
|
|
418
|
+
generatedAppends,
|
|
419
|
+
void 0,
|
|
420
|
+
void 0,
|
|
421
|
+
void 0
|
|
422
|
+
);
|
|
322
423
|
},
|
|
323
424
|
cache: true
|
|
324
425
|
});
|
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* This file is part of the NocoBase (R) project.
|
|
3
|
+
* Copyright (c) 2020-2024 NocoBase Co., Ltd.
|
|
4
|
+
* Authors: NocoBase Team.
|
|
5
|
+
*
|
|
6
|
+
* This project is dual-licensed under AGPL-3.0 and NocoBase Commercial License.
|
|
7
|
+
* For more information, please refer to: https://www.nocobase.com/agreement.
|
|
8
|
+
*/
|
|
9
|
+
import { ResourcerContext } from '@nocobase/resourcer';
|
|
10
|
+
/**
|
|
11
|
+
* 针对给定集合,修正 selects:
|
|
12
|
+
* - 若 fields 中包含单段且为关联名(如 'roles'),则将其从 fields 移到 appends。
|
|
13
|
+
* - 若 fields 中包含多段且首段为关联名(如 'roles.name'),确保 appends 包含该关联名,并将首段替换为模型真实关联名。
|
|
14
|
+
* - 非关联字段:仅当模型存在该属性(或其 snake/camel 变体)时才保留,否则丢弃以避免数据库错误。
|
|
15
|
+
*/
|
|
16
|
+
export declare function adjustSelectsForCollection(koaCtx: ResourcerContext, dataSourceKey: string, collection: string, fields?: string[], appends?: string[]): {
|
|
17
|
+
fields?: string[];
|
|
18
|
+
appends?: string[];
|
|
19
|
+
};
|
|
@@ -0,0 +1,80 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* This file is part of the NocoBase (R) project.
|
|
3
|
+
* Copyright (c) 2020-2024 NocoBase Co., Ltd.
|
|
4
|
+
* Authors: NocoBase Team.
|
|
5
|
+
*
|
|
6
|
+
* This project is dual-licensed under AGPL-3.0 and NocoBase Commercial License.
|
|
7
|
+
* For more information, please refer to: https://www.nocobase.com/agreement.
|
|
8
|
+
*/
|
|
9
|
+
|
|
10
|
+
var __defProp = Object.defineProperty;
|
|
11
|
+
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
12
|
+
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
13
|
+
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
|
14
|
+
var __export = (target, all) => {
|
|
15
|
+
for (var name in all)
|
|
16
|
+
__defProp(target, name, { get: all[name], enumerable: true });
|
|
17
|
+
};
|
|
18
|
+
var __copyProps = (to, from, except, desc) => {
|
|
19
|
+
if (from && typeof from === "object" || typeof from === "function") {
|
|
20
|
+
for (let key of __getOwnPropNames(from))
|
|
21
|
+
if (!__hasOwnProp.call(to, key) && key !== except)
|
|
22
|
+
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
|
23
|
+
}
|
|
24
|
+
return to;
|
|
25
|
+
};
|
|
26
|
+
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
|
27
|
+
var selects_exports = {};
|
|
28
|
+
__export(selects_exports, {
|
|
29
|
+
adjustSelectsForCollection: () => adjustSelectsForCollection
|
|
30
|
+
});
|
|
31
|
+
module.exports = __toCommonJS(selects_exports);
|
|
32
|
+
function adjustSelectsForCollection(koaCtx, dataSourceKey, collection, fields, appends) {
|
|
33
|
+
var _a, _b, _c, _d;
|
|
34
|
+
const ds = koaCtx.app.dataSourceManager.get(dataSourceKey || "main");
|
|
35
|
+
const cm = ds.collectionManager;
|
|
36
|
+
const coll = (_b = (_a = cm == null ? void 0 : cm.db) == null ? void 0 : _a.getCollection) == null ? void 0 : _b.call(_a, collection);
|
|
37
|
+
const assocKeys = Object.keys(((_c = coll == null ? void 0 : coll.model) == null ? void 0 : _c.associations) || {});
|
|
38
|
+
const rawAttrs = ((_d = coll == null ? void 0 : coll.model) == null ? void 0 : _d.rawAttributes) || {};
|
|
39
|
+
const toCamel = (s) => s.replace(/_([a-zA-Z0-9])/g, (_m, c) => String(c).toUpperCase());
|
|
40
|
+
const toSnake = (s) => s.replace(/([A-Z])/g, "_$1").toLowerCase().replace(/^_/, "");
|
|
41
|
+
const assocMap = /* @__PURE__ */ new Map();
|
|
42
|
+
for (const k of assocKeys) {
|
|
43
|
+
assocMap.set(k, k);
|
|
44
|
+
assocMap.set(toSnake(k), k);
|
|
45
|
+
assocMap.set(toCamel(k), k);
|
|
46
|
+
}
|
|
47
|
+
const outFields = [];
|
|
48
|
+
const outAppends = new Set(appends || []);
|
|
49
|
+
for (const f of fields || []) {
|
|
50
|
+
const segs = String(f).split(".").filter(Boolean);
|
|
51
|
+
if (!segs.length) continue;
|
|
52
|
+
const first = segs[0];
|
|
53
|
+
const assocCanonical = assocMap.get(first) || assocMap.get(toCamel(first)) || assocMap.get(toSnake(first));
|
|
54
|
+
if (assocCanonical) {
|
|
55
|
+
outAppends.add(assocCanonical);
|
|
56
|
+
if (segs.length === 1) {
|
|
57
|
+
continue;
|
|
58
|
+
}
|
|
59
|
+
outFields.push([assocCanonical, ...segs.slice(1)].join("."));
|
|
60
|
+
continue;
|
|
61
|
+
}
|
|
62
|
+
if (rawAttrs[first]) {
|
|
63
|
+
outFields.push(f);
|
|
64
|
+
} else if (rawAttrs[toSnake(first)]) {
|
|
65
|
+
outFields.push([toSnake(first), ...segs.slice(1)].join("."));
|
|
66
|
+
} else if (rawAttrs[toCamel(first)]) {
|
|
67
|
+
outFields.push([toCamel(first), ...segs.slice(1)].join("."));
|
|
68
|
+
} else {
|
|
69
|
+
continue;
|
|
70
|
+
}
|
|
71
|
+
}
|
|
72
|
+
return {
|
|
73
|
+
fields: outFields.length ? outFields : void 0,
|
|
74
|
+
appends: outAppends.size ? Array.from(outAppends) : void 0
|
|
75
|
+
};
|
|
76
|
+
}
|
|
77
|
+
// Annotate the CommonJS export names for ESM import in node:
|
|
78
|
+
0 && (module.exports = {
|
|
79
|
+
adjustSelectsForCollection
|
|
80
|
+
});
|
|
@@ -30,8 +30,9 @@ __export(utils_exports, {
|
|
|
30
30
|
});
|
|
31
31
|
module.exports = __toCommonJS(utils_exports);
|
|
32
32
|
var import_registry = require("./registry");
|
|
33
|
+
var import_selects = require("./selects");
|
|
33
34
|
async function prefetchRecordsForResolve(koaCtx, items) {
|
|
34
|
-
var _a, _b, _c, _d, _e, _f;
|
|
35
|
+
var _a, _b, _c, _d, _e, _f, _g;
|
|
35
36
|
try {
|
|
36
37
|
const log = (_b = (_a = koaCtx.app) == null ? void 0 : _a.logger) == null ? void 0 : _b.child({ module: "plugin-flow-engine", submodule: "variables.prefetch" });
|
|
37
38
|
const groupMap = /* @__PURE__ */ new Map();
|
|
@@ -69,7 +70,10 @@ async function prefetchRecordsForResolve(koaCtx, items) {
|
|
|
69
70
|
const filterByTk = recordParams == null ? void 0 : recordParams.filterByTk;
|
|
70
71
|
if (!collection || typeof filterByTk === "undefined") continue;
|
|
71
72
|
const group = ensureGroup(dataSourceKey, collection, filterByTk);
|
|
72
|
-
|
|
73
|
+
let { generatedAppends, generatedFields } = (0, import_registry.inferSelectsFromUsage)(remainders);
|
|
74
|
+
const fixed = (0, import_selects.adjustSelectsForCollection)(koaCtx, dataSourceKey, collection, generatedFields, generatedAppends);
|
|
75
|
+
generatedFields = fixed.fields;
|
|
76
|
+
generatedAppends = fixed.appends;
|
|
73
77
|
if (generatedFields == null ? void 0 : generatedFields.length) generatedFields.forEach((f) => group.fields.add(f));
|
|
74
78
|
if (generatedAppends == null ? void 0 : generatedAppends.length) generatedAppends.forEach((a) => group.appends.add(a));
|
|
75
79
|
}
|
|
@@ -86,6 +90,12 @@ async function prefetchRecordsForResolve(koaCtx, items) {
|
|
|
86
90
|
const cm = ds.collectionManager;
|
|
87
91
|
if (!(cm == null ? void 0 : cm.db)) continue;
|
|
88
92
|
const repo = cm.db.getRepository(collection);
|
|
93
|
+
const modelInfo = (_d = repo.collection) == null ? void 0 : _d.model;
|
|
94
|
+
const pkAttr = modelInfo == null ? void 0 : modelInfo.primaryKeyAttribute;
|
|
95
|
+
const pkIsValid = pkAttr && (modelInfo == null ? void 0 : modelInfo.rawAttributes) && Object.prototype.hasOwnProperty.call(modelInfo.rawAttributes, pkAttr);
|
|
96
|
+
if (fields.size && pkIsValid) {
|
|
97
|
+
fields.add(pkAttr);
|
|
98
|
+
}
|
|
89
99
|
const fld = fields.size ? Array.from(fields).sort() : void 0;
|
|
90
100
|
const app = appends.size ? Array.from(appends).sort() : void 0;
|
|
91
101
|
const rec = await repo.findOne({ filterByTk, fields: fld, appends: app });
|
|
@@ -104,7 +114,7 @@ async function prefetchRecordsForResolve(koaCtx, items) {
|
|
|
104
114
|
}
|
|
105
115
|
}
|
|
106
116
|
} catch (e) {
|
|
107
|
-
(
|
|
117
|
+
(_g = (_f = (_e = koaCtx.app) == null ? void 0 : _e.logger) == null ? void 0 : _f.child({ module: "plugin-flow-engine", submodule: "variables.prefetch" })) == null ? void 0 : _g.debug("[variables.resolve] prefetch fatal error", { error: (e == null ? void 0 : e.message) || String(e) });
|
|
108
118
|
}
|
|
109
119
|
}
|
|
110
120
|
// Annotate the CommonJS export names for ESM import in node:
|
package/package.json
CHANGED
|
@@ -4,7 +4,7 @@
|
|
|
4
4
|
"displayName.zh-CN": "前端流引擎",
|
|
5
5
|
"description": "",
|
|
6
6
|
"description.zh-CN": "",
|
|
7
|
-
"version": "2.
|
|
7
|
+
"version": "2.1.0-alpha.1",
|
|
8
8
|
"main": "./dist/server/index.js",
|
|
9
9
|
"license": "AGPL-3.0",
|
|
10
10
|
"devDependencies": {
|
|
@@ -24,5 +24,5 @@
|
|
|
24
24
|
"@nocobase/test": "2.x",
|
|
25
25
|
"@nocobase/utils": "2.x"
|
|
26
26
|
},
|
|
27
|
-
"gitHead": "
|
|
27
|
+
"gitHead": "d27baf21569643d6fa83f882233f4e90eb5b89f1"
|
|
28
28
|
}
|