@nocobase/plugin-flow-engine 2.0.0-alpha.6 → 2.0.0-alpha.60

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -51,6 +51,7 @@ export declare class FlowModelRepository extends Repository {
51
51
  getParentJsonSchema(uid: string, options?: GetJsonSchemaOptions): Promise<any>;
52
52
  getParentProperty(uid: string, options?: GetPropertiesOptions): Promise<any>;
53
53
  getJsonSchema(uid: string, options?: GetJsonSchemaOptions): Promise<any>;
54
+ static optionsToJson(options: any): any;
54
55
  nodesToSchema(nodes: any, rootUid: any): {
55
56
  uid: any;
56
57
  "x-async": boolean;
@@ -70,7 +71,9 @@ export declare class FlowModelRepository extends Repository {
70
71
  }): Promise<void>;
71
72
  remove(uid: string, options?: Transactionable & removeParentOptions): Promise<void>;
72
73
  insertAdjacent(position: 'beforeBegin' | 'afterBegin' | 'beforeEnd' | 'afterEnd', target: string, schema: any, options?: InsertAdjacentOptions): Promise<any>;
73
- duplicate(uid: string, options?: Transactionable): Promise<any>;
74
+ duplicate(modelUid: string, options?: Transactionable): Promise<any>;
75
+ private dedupeNodesForDuplicate;
76
+ private replaceStepParamsModelUids;
74
77
  insert(schema: any, options?: Transactionable): Promise<any>;
75
78
  insertNewSchema(schema: any, options?: Transactionable & {
76
79
  returnNode?: boolean;
@@ -204,6 +204,9 @@ const _FlowModelRepository = class _FlowModelRepository extends import_database.
204
204
  }
205
205
  return this.doGetJsonSchema(uid2, options);
206
206
  }
207
+ static optionsToJson(options) {
208
+ return import_lodash.default.isPlainObject(options) ? options : JSON.parse(options);
209
+ }
207
210
  nodesToSchema(nodes, rootUid) {
208
211
  const nodeAttributeSanitize = (node) => {
209
212
  const schema = {
@@ -426,13 +429,122 @@ const _FlowModelRepository = class _FlowModelRepository extends import_database.
426
429
  await this.clearXUidPathCache(result["uid"], transaction2);
427
430
  return result;
428
431
  }
429
- async duplicate(uid2, options) {
430
- const s = await this.getJsonSchema(uid2, { ...options, includeAsyncNode: true });
431
- if (!(s == null ? void 0 : s["uid"])) {
432
+ async duplicate(modelUid, options) {
433
+ let nodes = await this.findNodesById(modelUid, { ...options, includeAsyncNode: true });
434
+ if (!(nodes == null ? void 0 : nodes.length)) {
432
435
  return null;
433
436
  }
434
- this.regenerateUid(s);
435
- return this.insert(s, options);
437
+ nodes = this.dedupeNodesForDuplicate(nodes, modelUid);
438
+ const uidMap = {};
439
+ for (const n of nodes) {
440
+ uidMap[n["uid"]] = (0, import_utils.uid)();
441
+ }
442
+ const sorted = [...nodes].sort((a, b) => {
443
+ if (a.depth !== b.depth) return a.depth - b.depth;
444
+ const ap = a.parent || "";
445
+ const bp = b.parent || "";
446
+ if (ap !== bp) return ap < bp ? -1 : 1;
447
+ const at = a.type || "";
448
+ const bt = b.type || "";
449
+ if (at !== bt) return at < bt ? -1 : 1;
450
+ const as = a.sort ?? 0;
451
+ const bs = b.sort ?? 0;
452
+ return as - bs;
453
+ });
454
+ for (const n of sorted) {
455
+ const oldUid = n["uid"];
456
+ const newUid = uidMap[oldUid];
457
+ const oldParentUid = n["parent"];
458
+ const newParentUid = uidMap[oldParentUid] ?? null;
459
+ const optionsObj = this.replaceStepParamsModelUids(
460
+ import_lodash.default.isPlainObject(n.options) ? n.options : JSON.parse(n.options),
461
+ uidMap
462
+ );
463
+ if (newParentUid) {
464
+ optionsObj.parent = newParentUid;
465
+ optionsObj.parentId = newParentUid;
466
+ }
467
+ const schemaNode = {
468
+ uid: newUid,
469
+ ["x-async"]: !!n.async,
470
+ ...optionsObj
471
+ };
472
+ if (newParentUid) {
473
+ schemaNode.childOptions = {
474
+ parentUid: newParentUid,
475
+ type: n.type,
476
+ position: "last"
477
+ };
478
+ }
479
+ await this.insertSingleNode(schemaNode, { transaction: options == null ? void 0 : options.transaction });
480
+ }
481
+ return this.findModelById(uidMap[modelUid], { ...options });
482
+ }
483
+ dedupeNodesForDuplicate(nodes, rootUid) {
484
+ if (!Array.isArray(nodes) || nodes.length <= 1) {
485
+ return nodes;
486
+ }
487
+ const rowsByUid = import_lodash.default.groupBy(nodes, "uid");
488
+ const uniqueUids = Object.keys(rowsByUid);
489
+ if (uniqueUids.length === nodes.length) {
490
+ return nodes;
491
+ }
492
+ const uidsInSubtree = new Set(uniqueUids);
493
+ const rootDepthByUid = /* @__PURE__ */ new Map();
494
+ for (const uid2 of uniqueUids) {
495
+ const rows = rowsByUid[uid2] || [];
496
+ const depths = rows.map((row) => Number((row == null ? void 0 : row.depth) ?? 0));
497
+ rootDepthByUid.set(uid2, depths.length ? Math.min(...depths) : 0);
498
+ }
499
+ const pickRowForUid = (uid2, rows) => {
500
+ if (!(rows == null ? void 0 : rows.length)) return null;
501
+ if (rows.length === 1) return rows[0];
502
+ if (uid2 === rootUid) return rows[0];
503
+ let bestRow = rows[0];
504
+ let bestParentRootDepth = -1;
505
+ for (const row of rows) {
506
+ const parentUid = row == null ? void 0 : row.parent;
507
+ if (!parentUid || !uidsInSubtree.has(parentUid)) {
508
+ continue;
509
+ }
510
+ const parentRootDepth = rootDepthByUid.get(parentUid) ?? -1;
511
+ if (parentRootDepth > bestParentRootDepth) {
512
+ bestParentRootDepth = parentRootDepth;
513
+ bestRow = row;
514
+ }
515
+ }
516
+ return bestRow;
517
+ };
518
+ const uidsInQueryOrder = [];
519
+ const seenUidsInQueryOrder = /* @__PURE__ */ new Set();
520
+ for (const row of nodes) {
521
+ const uid2 = row == null ? void 0 : row.uid;
522
+ if (!uid2 || seenUidsInQueryOrder.has(uid2)) continue;
523
+ seenUidsInQueryOrder.add(uid2);
524
+ uidsInQueryOrder.push(uid2);
525
+ }
526
+ return uidsInQueryOrder.map((uid2) => pickRowForUid(uid2, rowsByUid[uid2])).filter(Boolean);
527
+ }
528
+ replaceStepParamsModelUids(options, uidMap) {
529
+ const opts = options && typeof options === "object" ? options : {};
530
+ const replaceUidString = (v) => typeof v === "string" && uidMap[v] ? uidMap[v] : v;
531
+ const replaceInPlace = (val) => {
532
+ if (Array.isArray(val)) {
533
+ for (let i = 0; i < val.length; i++) {
534
+ val[i] = replaceInPlace(val[i]);
535
+ }
536
+ return val;
537
+ }
538
+ if (val && typeof val === "object") {
539
+ for (const k of Object.keys(val)) {
540
+ val[k] = replaceInPlace(val[k]);
541
+ }
542
+ return val;
543
+ }
544
+ return replaceUidString(val);
545
+ };
546
+ if (opts.stepParams) opts.stepParams = replaceInPlace(opts.stepParams);
547
+ return opts;
436
548
  }
437
549
  async insert(schema, options) {
438
550
  const nodes = _FlowModelRepository.schemaToSingleNodes(schema);
@@ -1025,11 +1137,11 @@ WHERE TreeTable.depth = 1 AND TreeTable.ancestor = :ancestor and TreeTable.sort
1025
1137
  const children = nodes.filter((n) => n.parent === rootUid);
1026
1138
  const subModels = {};
1027
1139
  for (const child of children) {
1028
- const { subKey, subType } = child.options;
1140
+ const { subKey, subType } = this.optionsToJson(child.options);
1029
1141
  if (!subKey) continue;
1030
1142
  const model = _FlowModelRepository.nodesToModel(nodes, child["uid"]) || {
1031
1143
  uid: child["uid"],
1032
- ...child.options,
1144
+ ...this.optionsToJson(child.options),
1033
1145
  sortIndex: child.sort
1034
1146
  };
1035
1147
  model.sortIndex = child.sort;
@@ -1055,7 +1167,7 @@ WHERE TreeTable.depth = 1 AND TreeTable.ancestor = :ancestor and TreeTable.sort
1055
1167
  }
1056
1168
  return {
1057
1169
  uid: rootNode["uid"],
1058
- ...rootNode.options,
1170
+ ...this.optionsToJson(rootNode.options),
1059
1171
  ...Object.keys(filteredSubModels).length > 0 ? { subModels: filteredSubModels } : {}
1060
1172
  };
1061
1173
  }
@@ -127,15 +127,22 @@ class PluginUISchemaStorageServer extends import_server.Plugin {
127
127
  name: "flowModels",
128
128
  actions: {
129
129
  findOne: async (ctx, next) => {
130
- const { uid: uid2, parentId, subKey } = ctx.action.params;
130
+ const { uid: uid2, parentId, subKey, includeAsyncNode = false } = ctx.action.params;
131
131
  const repository = ctx.db.getRepository("flowModels");
132
132
  if (uid2) {
133
- ctx.body = await repository.findModelById(uid2);
133
+ ctx.body = await repository.findModelById(uid2, { includeAsyncNode });
134
134
  } else if (parentId) {
135
- ctx.body = await repository.findModelByParentId(parentId, { subKey });
135
+ ctx.body = await repository.findModelByParentId(parentId, { subKey, includeAsyncNode });
136
136
  }
137
137
  await next();
138
138
  },
139
+ duplicate: async (ctx, next) => {
140
+ const { uid: uid2 } = ctx.action.params;
141
+ const repository = ctx.db.getRepository("flowModels");
142
+ const duplicated = await repository.duplicate(uid2);
143
+ ctx.body = duplicated;
144
+ await next();
145
+ },
139
146
  move: async (ctx, next) => {
140
147
  const { sourceId, targetId, position } = ctx.action.params;
141
148
  const repository = ctx.db.getRepository("flowModels");
@@ -39,4 +39,14 @@ declare class VariableRegistry {
39
39
  attachUsedVariables(ctx: HttpRequestContext, koaCtx: ResourcerContext, template: JSONValue, contextParams: any): Promise<void>;
40
40
  }
41
41
  export declare const variables: VariableRegistry;
42
+ /** 仅测试使用:重置变量注册表为内置默认集 */
43
+ /**
44
+ * 从使用路径推断查询所需的 fields 与 appends。
45
+ * @param paths 使用到的子路径数组
46
+ * @param params 显式参数(仅用于兼容签名)
47
+ */
48
+ export declare function inferSelectsFromUsage(paths?: string[], _params?: unknown): {
49
+ generatedAppends?: string[];
50
+ generatedFields?: string[];
51
+ };
42
52
  export {};
@@ -36,12 +36,14 @@ var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__ge
36
36
  var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
37
37
  var registry_exports = {};
38
38
  __export(registry_exports, {
39
+ inferSelectsFromUsage: () => inferSelectsFromUsage,
39
40
  variables: () => variables
40
41
  });
41
42
  module.exports = __toCommonJS(registry_exports);
42
43
  var import_lodash = __toESM(require("lodash"));
43
44
  var import_contexts = require("../template/contexts");
44
45
  var import_utils = require("@nocobase/utils");
46
+ var import_selects = require("./selects");
45
47
  class VariableRegistry {
46
48
  vars = /* @__PURE__ */ new Map();
47
49
  register(def) {
@@ -90,40 +92,51 @@ if (!g[GLOBAL_KEY]) {
90
92
  g[GLOBAL_KEY] = new VariableRegistry();
91
93
  }
92
94
  const variables = g[GLOBAL_KEY];
93
- function inferSelectsFromUsage(paths = [], params) {
95
+ function inferSelectsFromUsage(paths = [], _params) {
94
96
  if (!Array.isArray(paths) || paths.length === 0) {
95
97
  return { generatedAppends: void 0, generatedFields: void 0 };
96
98
  }
97
99
  const appendSet = /* @__PURE__ */ new Set();
98
100
  const fieldSet = /* @__PURE__ */ new Set();
101
+ const normalizePath = (raw) => {
102
+ if (!raw) return "";
103
+ let s = String(raw);
104
+ s = s.replace(/\[(?:\d+)\]/g, "");
105
+ s = s.replace(/\[(?:"((?:[^"\\]|\\.)*)"|'((?:[^'\\]|\\.)*)')\]/g, (_m, g1, g2) => `.${g1 || g2}`);
106
+ s = s.replace(/\.\.+/g, ".");
107
+ s = s.replace(/^\./, "").replace(/\.$/, "");
108
+ return s;
109
+ };
99
110
  for (let path of paths) {
100
111
  if (!path) continue;
101
112
  while (/^\[(\d+)\](\.|$)/.test(path)) {
102
113
  path = path.replace(/^\[(\d+)\]\.?/, "");
103
114
  }
104
- if (!path) continue;
105
- let first = "";
106
- let rest = "";
107
- const mStr = path.match(/^\[(?:"((?:[^"\\]|\\.)*)"|'((?:[^'\\]|\\.)*)')\](.*)$/);
108
- if (mStr) {
109
- first = (mStr[1] ?? mStr[2]) || "";
110
- rest = mStr[3] || "";
111
- } else {
112
- const m = path.match(/^([^.[]+)([\s\S]*)$/);
113
- first = (m == null ? void 0 : m[1]) ?? "";
114
- rest = (m == null ? void 0 : m[2]) ?? "";
115
+ const norm = normalizePath(path);
116
+ if (!norm) continue;
117
+ const segments = norm.split(".").filter(Boolean);
118
+ if (segments.length === 0) continue;
119
+ if (segments.length === 1) {
120
+ fieldSet.add(segments[0]);
121
+ continue;
122
+ }
123
+ for (let i = 0; i < segments.length - 1; i++) {
124
+ appendSet.add(segments.slice(0, i + 1).join("."));
115
125
  }
116
- if (!first) continue;
117
- const hasDeep = rest.includes(".") || rest.includes("[");
118
- if (hasDeep) appendSet.add(first);
119
- else fieldSet.add(first);
126
+ fieldSet.add(segments.join("."));
120
127
  }
121
128
  const generatedAppends = appendSet.size ? Array.from(appendSet) : void 0;
122
129
  const generatedFields = fieldSet.size ? Array.from(fieldSet) : void 0;
123
130
  return { generatedAppends, generatedFields };
124
131
  }
125
- async function fetchRecordWithRequestCache(koaCtx, dataSourceKey, collection, filterByTk, fields, appends) {
132
+ async function fetchRecordWithRequestCache(koaCtx, dataSourceKey, collection, filterByTk, fields, appends, preferFullRecord, associationName, sourceId) {
133
+ var _a, _b, _c, _d, _e;
126
134
  try {
135
+ const log = (_b = (_a = koaCtx.app) == null ? void 0 : _a.logger) == null ? void 0 : _b.child({
136
+ module: "plugin-flow-engine",
137
+ submodule: "variables.resolve",
138
+ method: "fetchRecordWithRequestCache"
139
+ });
127
140
  const kctx = koaCtx;
128
141
  if (!kctx.state) kctx.state = {};
129
142
  if (!kctx.state["__varResolveBatchCache"]) {
@@ -133,42 +146,79 @@ async function fetchRecordWithRequestCache(koaCtx, dataSourceKey, collection, fi
133
146
  const ds = koaCtx.app.dataSourceManager.get(dataSourceKey || "main");
134
147
  const cm = ds.collectionManager;
135
148
  if (!(cm == null ? void 0 : cm.db)) return void 0;
136
- const repo = cm.db.getRepository(collection);
149
+ const repo = associationName && typeof sourceId !== "undefined" ? cm.db.getRepository(associationName, sourceId) : cm.db.getRepository(collection);
150
+ const modelInfo = (_c = repo.collection) == null ? void 0 : _c.model;
151
+ const pkAttr = modelInfo == null ? void 0 : modelInfo.primaryKeyAttribute;
152
+ const pkIsValid = pkAttr && (modelInfo == null ? void 0 : modelInfo.rawAttributes) && Object.prototype.hasOwnProperty.call(modelInfo.rawAttributes, pkAttr);
153
+ const fieldsWithPk = Array.isArray(fields) && fields.length > 0 && pkIsValid ? Array.from(/* @__PURE__ */ new Set([...fields, pkAttr])) : fields;
154
+ const cacheKeyFields = preferFullRecord && pkIsValid ? void 0 : Array.isArray(fieldsWithPk) ? [...fieldsWithPk].sort() : void 0;
155
+ const cacheKeyAppends = preferFullRecord ? void 0 : Array.isArray(appends) ? [...appends].sort() : void 0;
137
156
  const keyObj = {
138
157
  ds: dataSourceKey || "main",
139
158
  c: collection,
140
159
  tk: filterByTk,
141
- f: Array.isArray(fields) ? [...fields].sort() : void 0,
142
- a: Array.isArray(appends) ? [...appends].sort() : void 0
160
+ f: cacheKeyFields,
161
+ a: cacheKeyAppends,
162
+ full: preferFullRecord ? true : void 0,
163
+ assoc: associationName,
164
+ sid: typeof sourceId === "undefined" ? void 0 : sourceId
143
165
  };
144
166
  const key = JSON.stringify(keyObj);
145
167
  if (cache) {
146
- if (cache.has(key)) return cache.get(key);
147
- const needFields = Array.isArray(fields) ? new Set(fields) : void 0;
148
- const needAppends = Array.isArray(appends) ? new Set(appends) : void 0;
149
- let fallbackAny = void 0;
168
+ if (cache.has(key)) {
169
+ return cache.get(key);
170
+ }
171
+ const needFields = !preferFullRecord && Array.isArray(fieldsWithPk) ? [...new Set(fieldsWithPk)] : void 0;
172
+ const needAppends = !preferFullRecord && Array.isArray(appends) ? new Set(appends) : void 0;
150
173
  for (const [cacheKey, cacheVal] of cache.entries()) {
151
174
  const parsed = JSON.parse(cacheKey);
152
- if (!parsed || parsed.ds !== keyObj.ds || parsed.c !== keyObj.c || parsed.tk !== keyObj.tk) continue;
153
- const cachedFields = Array.isArray(parsed.f) ? new Set(parsed.f) : void 0;
154
- const cachedAppends = Array.isArray(parsed.a) ? new Set(parsed.a) : void 0;
155
- const fieldsOk = !needFields || cachedFields && [...needFields].every((x) => cachedFields.has(x));
156
- const appendsOk = !needAppends || cachedAppends && [...needAppends].every((x) => cachedAppends.has(x));
157
- if (fieldsOk && appendsOk) return cacheVal;
158
- if (typeof fallbackAny === "undefined") fallbackAny = cacheVal;
175
+ if (!parsed || parsed.ds !== keyObj.ds || parsed.c !== keyObj.c || parsed.tk !== keyObj.tk || parsed.assoc !== keyObj.assoc || parsed.sid !== keyObj.sid)
176
+ continue;
177
+ const cachedFields = new Set(parsed.f || []);
178
+ const cachedAppends = new Set(parsed.a || []);
179
+ const fieldCoveredByAppends = (fieldPath) => {
180
+ const p = String(fieldPath || "");
181
+ for (const a of cachedAppends) {
182
+ if (!a) continue;
183
+ if (p === a || p.startsWith(a + ".")) return true;
184
+ }
185
+ return false;
186
+ };
187
+ const fieldsOk = needFields ? needFields.every((f) => cachedFields.has(f) || fieldCoveredByAppends(f)) : parsed.f === void 0;
188
+ const appendsOk = !needAppends || [...needAppends].every((a) => cachedAppends.has(a));
189
+ const fullOk = preferFullRecord ? parsed.full === true : true;
190
+ if (fieldsOk && appendsOk && fullOk) {
191
+ return cacheVal;
192
+ }
159
193
  }
160
- if (typeof fallbackAny !== "undefined") return fallbackAny;
161
194
  }
162
- const tk = filterByTk;
163
- const rec = await repo.findOne({
164
- filterByTk: tk,
165
- fields,
166
- appends
167
- });
195
+ const rec = await repo.findOne(
196
+ preferFullRecord ? {
197
+ filterByTk
198
+ } : {
199
+ filterByTk,
200
+ fields: fieldsWithPk,
201
+ appends
202
+ }
203
+ );
168
204
  const json = rec ? rec.toJSON() : void 0;
169
205
  if (cache) cache.set(key, json);
170
206
  return json;
171
- } catch (_2) {
207
+ } catch (e) {
208
+ const log = (_e = (_d = koaCtx.app) == null ? void 0 : _d.logger) == null ? void 0 : _e.child({
209
+ module: "plugin-flow-engine",
210
+ submodule: "variables.resolve",
211
+ method: "fetchRecordWithRequestCache"
212
+ });
213
+ const errMsg = e instanceof Error ? e.message : String(e);
214
+ log == null ? void 0 : log.warn("[variables.resolve] fetchRecordWithRequestCache error", {
215
+ ds: dataSourceKey,
216
+ collection,
217
+ tk: filterByTk,
218
+ fields,
219
+ appends,
220
+ error: errMsg
221
+ });
172
222
  return void 0;
173
223
  }
174
224
  }
@@ -185,17 +235,28 @@ function attachGenericRecordVariables(flowCtx, koaCtx, usage, contextParams) {
185
235
  const topParams = import_lodash.default.get(contextParams, varName);
186
236
  if (isRecordParams(topParams)) {
187
237
  const { generatedAppends, generatedFields } = inferSelectsFromUsage(usedPaths, topParams);
238
+ const hasDirectRefTop = (usedPaths || []).some((p) => p === "");
188
239
  flowCtx.defineProperty(varName, {
189
240
  get: async () => {
190
241
  const dataSourceKey = (topParams == null ? void 0 : topParams.dataSourceKey) || "main";
191
- return await fetchRecordWithRequestCache(
242
+ const fixed = (0, import_selects.adjustSelectsForCollection)(
192
243
  koaCtx,
193
244
  dataSourceKey,
194
245
  topParams.collection,
195
- topParams.filterByTk,
196
246
  generatedFields,
197
247
  generatedAppends
198
248
  );
249
+ return await fetchRecordWithRequestCache(
250
+ koaCtx,
251
+ dataSourceKey,
252
+ topParams.collection,
253
+ topParams.filterByTk,
254
+ fixed.fields,
255
+ fixed.appends,
256
+ hasDirectRefTop,
257
+ topParams.associationName,
258
+ topParams.sourceId
259
+ );
199
260
  },
200
261
  cache: true
201
262
  });
@@ -228,44 +289,107 @@ function attachGenericRecordVariables(flowCtx, koaCtx, usage, contextParams) {
228
289
  segmentMap.set(seg, arr);
229
290
  }
230
291
  const segEntries = Array.from(segmentMap.entries());
231
- const recordChildren = segEntries.filter(([seg]) => {
292
+ const oneLevelRecordChildren = segEntries.filter(([seg]) => {
232
293
  const idx = parseIndexSegment(seg);
233
294
  const nestedObj = import_lodash.default.get(contextParams, [varName, seg]) ?? (idx ? import_lodash.default.get(contextParams, [varName, idx]) : void 0);
234
295
  const dotted = (contextParams || {})[`${varName}.${seg}`] ?? (idx ? (contextParams || {})[`${varName}.${idx}`] : void 0);
235
296
  return isRecordParams(nestedObj) || isRecordParams(dotted);
236
297
  });
237
- if (!recordChildren.length) continue;
298
+ const deepRecordMap = /* @__PURE__ */ new Map();
299
+ const cp = contextParams;
300
+ if (cp && typeof cp === "object") {
301
+ const cpRec = cp;
302
+ for (const key of Object.keys(cpRec)) {
303
+ if (!key || key !== varName && !key.startsWith(`${varName}.`)) continue;
304
+ if (key === varName) continue;
305
+ const val = cpRec[key];
306
+ if (!isRecordParams(val)) continue;
307
+ const relative = key.slice(varName.length + 1);
308
+ if (!relative) continue;
309
+ deepRecordMap.set(relative, val);
310
+ }
311
+ }
312
+ if (!oneLevelRecordChildren.length && deepRecordMap.size === 0) continue;
238
313
  flowCtx.defineProperty(varName, {
239
314
  get: () => {
240
- const subContext = new import_contexts.ServerBaseContext();
241
- for (const [seg, remainders] of recordChildren) {
242
- const idx = parseIndexSegment(seg);
243
- const recordParams = import_lodash.default.get(contextParams, [varName, seg]) ?? (idx ? import_lodash.default.get(contextParams, [varName, idx]) : void 0) ?? (contextParams || {})[`${varName}.${seg}`] ?? (idx ? (contextParams || {})[`${varName}.${idx}`] : void 0);
244
- let effRemainders = remainders.filter((r) => !!r);
245
- if (!effRemainders.length) {
246
- const all = usedPaths.map(
247
- (p) => p.startsWith(`${seg}.`) ? p.slice(seg.length + 1) : p.startsWith(`${seg}[`) ? p.slice(seg.length) : ""
248
- ).filter((x) => !!x);
249
- if (all.length) effRemainders = all;
250
- }
251
- const { generatedAppends, generatedFields } = inferSelectsFromUsage(effRemainders, recordParams);
252
- const definitionKey = idx ?? seg;
253
- subContext.defineProperty(definitionKey, {
315
+ const root = new import_contexts.ServerBaseContext();
316
+ const definedFirstLevel = /* @__PURE__ */ new Set();
317
+ const defineRecordGetter = (container, key, recordParams, subPaths = [], preferFull) => {
318
+ const { generatedAppends, generatedFields } = inferSelectsFromUsage(subPaths, recordParams);
319
+ container.defineProperty(key, {
254
320
  get: async () => {
255
321
  const dataSourceKey = (recordParams == null ? void 0 : recordParams.dataSourceKey) || "main";
256
- return await fetchRecordWithRequestCache(
322
+ const fixed = (0, import_selects.adjustSelectsForCollection)(
257
323
  koaCtx,
258
324
  dataSourceKey,
259
325
  recordParams.collection,
260
- recordParams.filterByTk,
261
326
  generatedFields,
262
327
  generatedAppends
263
328
  );
329
+ return await fetchRecordWithRequestCache(
330
+ koaCtx,
331
+ dataSourceKey,
332
+ recordParams.collection,
333
+ recordParams.filterByTk,
334
+ fixed.fields,
335
+ fixed.appends,
336
+ preferFull || ((subPaths == null ? void 0 : subPaths.length) ?? 0) === 0,
337
+ recordParams.associationName,
338
+ recordParams.sourceId
339
+ );
264
340
  },
265
341
  cache: true
266
342
  });
343
+ };
344
+ const subContainers = /* @__PURE__ */ new Map();
345
+ const ensureSubContainer = (parent, key) => {
346
+ let map = subContainers.get(parent);
347
+ if (!map) {
348
+ map = /* @__PURE__ */ new Map();
349
+ subContainers.set(parent, map);
350
+ }
351
+ let child = map.get(key);
352
+ if (!child) {
353
+ const inst = new import_contexts.ServerBaseContext();
354
+ parent.defineProperty(key, { get: () => inst.createProxy(), cache: true });
355
+ map.set(key, inst);
356
+ child = inst;
357
+ }
358
+ return child;
359
+ };
360
+ for (const [seg, remainders] of oneLevelRecordChildren) {
361
+ const idx = parseIndexSegment(seg);
362
+ const recordParams = import_lodash.default.get(contextParams, [varName, seg]) ?? (idx ? import_lodash.default.get(contextParams, [varName, idx]) : void 0) ?? (contextParams || {})[`${varName}.${seg}`] ?? (idx ? (contextParams || {})[`${varName}.${idx}`] : void 0);
363
+ let effRemainders = (remainders || []).filter((r) => !!r);
364
+ if (!effRemainders.length) {
365
+ const all = usedPaths.map(
366
+ (p) => p.startsWith(`${seg}.`) ? p.slice(seg.length + 1) : p.startsWith(`${seg}[`) ? p.slice(seg.length) : ""
367
+ ).filter((x) => !!x);
368
+ if (all.length) effRemainders = all;
369
+ }
370
+ const hasDirectRefOne = (usedPaths || []).some((p) => p === seg || !!idx && p === `[${idx}]`);
371
+ defineRecordGetter(root, idx ?? seg, recordParams, effRemainders, hasDirectRefOne);
372
+ definedFirstLevel.add(idx ?? seg);
373
+ }
374
+ for (const [relative, recordParams] of deepRecordMap.entries()) {
375
+ const segs = String(relative).split(".").filter(Boolean);
376
+ if (segs.length === 0) continue;
377
+ const first = segs[0];
378
+ let container;
379
+ if (definedFirstLevel.has(first)) {
380
+ continue;
381
+ } else {
382
+ container = root;
383
+ for (let i = 0; i < segs.length - 1; i++) {
384
+ container = ensureSubContainer(container, segs[i]);
385
+ }
386
+ }
387
+ const leaf = segs[segs.length - 1];
388
+ const subPaths = (usedPaths || []).map((p) => p === relative ? "" : p.startsWith(relative + ".") ? p.slice(relative.length + 1) : "").filter((x) => x !== "");
389
+ const hasDirectRef = (usedPaths || []).some((p) => p === relative);
390
+ defineRecordGetter(container, leaf, recordParams, subPaths, hasDirectRef);
267
391
  }
268
- return subContext.createProxy();
392
+ return root.createProxy();
269
393
  },
270
394
  cache: true
271
395
  });
@@ -285,7 +409,17 @@ function registerBuiltInVariables(reg) {
285
409
  const authObj = koaCtx.auth;
286
410
  const uid = (_a = authObj == null ? void 0 : authObj.user) == null ? void 0 : _a.id;
287
411
  if (typeof uid === "undefined" || uid === null) return void 0;
288
- return await fetchRecordWithRequestCache(koaCtx, "main", "users", uid, generatedFields, generatedAppends);
412
+ return await fetchRecordWithRequestCache(
413
+ koaCtx,
414
+ "main",
415
+ "users",
416
+ uid,
417
+ generatedFields,
418
+ generatedAppends,
419
+ void 0,
420
+ void 0,
421
+ void 0
422
+ );
289
423
  },
290
424
  cache: true
291
425
  });
@@ -295,5 +429,6 @@ function registerBuiltInVariables(reg) {
295
429
  registerBuiltInVariables(variables);
296
430
  // Annotate the CommonJS export names for ESM import in node:
297
431
  0 && (module.exports = {
432
+ inferSelectsFromUsage,
298
433
  variables
299
434
  });
@@ -0,0 +1,19 @@
1
+ /**
2
+ * This file is part of the NocoBase (R) project.
3
+ * Copyright (c) 2020-2024 NocoBase Co., Ltd.
4
+ * Authors: NocoBase Team.
5
+ *
6
+ * This project is dual-licensed under AGPL-3.0 and NocoBase Commercial License.
7
+ * For more information, please refer to: https://www.nocobase.com/agreement.
8
+ */
9
+ import { ResourcerContext } from '@nocobase/resourcer';
10
+ /**
11
+ * 针对给定集合,修正 selects:
12
+ * - 若 fields 中包含单段且为关联名(如 'roles'),则将其从 fields 移到 appends。
13
+ * - 若 fields 中包含多段且首段为关联名(如 'roles.name'),确保 appends 包含该关联名,并将首段替换为模型真实关联名。
14
+ * - 非关联字段:仅当模型存在该属性(或其 snake/camel 变体)时才保留,否则丢弃以避免数据库错误。
15
+ */
16
+ export declare function adjustSelectsForCollection(koaCtx: ResourcerContext, dataSourceKey: string, collection: string, fields?: string[], appends?: string[]): {
17
+ fields?: string[];
18
+ appends?: string[];
19
+ };