@nocobase/plugin-flow-engine 2.0.0-alpha.2 → 2.0.0-alpha.21

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -8,14 +8,14 @@
8
8
  */
9
9
 
10
10
  module.exports = {
11
- "@nocobase/client": "2.0.0-alpha.2",
11
+ "@nocobase/client": "2.0.0-alpha.21",
12
12
  "lodash": "4.17.21",
13
- "@nocobase/database": "2.0.0-alpha.2",
14
- "@nocobase/data-source-manager": "2.0.0-alpha.2",
15
- "@nocobase/resourcer": "2.0.0-alpha.2",
16
- "@nocobase/server": "2.0.0-alpha.2",
17
- "@nocobase/cache": "2.0.0-alpha.2",
18
- "@nocobase/utils": "2.0.0-alpha.2",
19
- "@nocobase/plugin-localization": "2.0.0-alpha.2",
20
- "@nocobase/actions": "2.0.0-alpha.2"
13
+ "@nocobase/database": "2.0.0-alpha.21",
14
+ "@nocobase/data-source-manager": "2.0.0-alpha.21",
15
+ "@nocobase/resourcer": "2.0.0-alpha.21",
16
+ "@nocobase/server": "2.0.0-alpha.21",
17
+ "@nocobase/cache": "2.0.0-alpha.21",
18
+ "@nocobase/utils": "2.0.0-alpha.21",
19
+ "@nocobase/plugin-localization": "2.0.0-alpha.21",
20
+ "@nocobase/actions": "2.0.0-alpha.21"
21
21
  };
@@ -57,5 +57,6 @@
57
57
  "Name": "Name",
58
58
  "Model with ID {{uid}} not found": "Model with ID {{uid}} not found",
59
59
  "Common actions": "Common actions",
60
- "This variable is not available": "This variable is not available"
60
+ "This variable is not available": "This variable is not available",
61
+ "Copy popup UID": "Copy popup UID"
61
62
  }
@@ -67,6 +67,7 @@ export declare const locales: {
67
67
  "Model with ID {{uid}} not found": string;
68
68
  "Common actions": string;
69
69
  "This variable is not available": string;
70
+ "Copy popup UID": string;
70
71
  };
71
72
  'zh-CN': {
72
73
  "Invalid model provided": string;
@@ -128,6 +129,7 @@ export declare const locales: {
128
129
  "Model with ID {{uid}} not found": string;
129
130
  "Common actions": string;
130
131
  "This variable is not available": string;
132
+ "Copy popup UID": string;
131
133
  };
132
134
  };
133
135
  /**
@@ -57,5 +57,6 @@
57
57
  "Name": "名称",
58
58
  "Model with ID {{uid}} not found": "未找到 ID 为 {{uid}} 的模型",
59
59
  "Common actions": "通用操作",
60
- "This variable is not available": "此变量不可用"
60
+ "This variable is not available": "此变量不可用",
61
+ "Copy popup UID": "复制弹窗 UID"
61
62
  }
@@ -1 +1 @@
1
- {"name":"ses","version":"1.14.0","description":"Hardened JavaScript for Fearless Cooperation","keywords":["lockdown","harden","Compartment","assert","security","confinement","isolation","object capabilities","ocaps","secure execution","third-party code","prototype pollution","supply-chain attack","plugin"],"author":"Agoric","license":"Apache-2.0","homepage":"https://github.com/Agoric/SES-shim/tree/master/packages/ses#readme","repository":{"type":"git","url":"git+https://github.com/endojs/endo.git","directory":"packages/ses"},"bugs":{"url":"https://github.com/endojs/endo/issues"},"type":"module","main":"./dist/ses.cjs","module":"./index.js","unpkg":"./dist/ses.umd.js","types":"./types.d.ts","exports":{".":{"import":{"types":"./types.d.ts","xs":"./src-xs/index.js","default":"./index.js"},"require":{"types":"./dist/types.d.cts","default":"./dist/ses.cjs"}},"./lockdown":{"import":{"types":"./types.d.ts","default":"./index.js"},"require":{"types":"./dist/types.d.cts","default":"./dist/ses.cjs"}},"./hermes":{"require":{"types":"./dist/types.d.cts","default":"./dist/ses-hermes.cjs"}},"./tools.js":"./tools.js","./assert-shim.js":"./assert-shim.js","./lockdown-shim.js":{"xs":"./src-xs/lockdown-shim.js","default":"./lockdown-shim.js"},"./compartment-shim.js":{"xs":"./src-xs/compartment-shim.js","default":"./compartment-shim.js"},"./console-shim.js":"./console-shim.js","./package.json":"./package.json"},"scripts":{"build:vanilla":"node scripts/bundle.js","build:hermes":"node scripts/bundle.js hermes","build":"yarn build:vanilla && yarn build:hermes","clean":"rm -rf dist","cover":"c8 ava","demo":"python3 -m http.server","lint":"yarn lint:types && yarn lint:eslint","lint-fix":"eslint --fix .","lint:eslint":"eslint .","lint:types":"tsc","prepare":"npm run clean && npm run build","qt":"ava","test":"tsd && ava","test:hermes":"./scripts/hermes-test.sh","test:xs":"xst dist/ses.umd.js test/_lockdown-safe.js && node scripts/generate-test-xs.js && xst tmp/test-xs.js && rm -rf tmp","postpack":"git clean -fX \"*.d.ts*\" \"*.d.cts*\" \"*.d.mts*\" \"*.tsbuildinfo\""},"dependencies":{"@endo/cache-map":"^1.1.0","@endo/env-options":"^1.1.11","@endo/immutable-arraybuffer":"^1.1.2"},"devDependencies":{"@babel/generator":"^7.26.3","@babel/parser":"~7.26.2","@babel/traverse":"~7.25.9","@babel/types":"~7.26.0","@endo/compartment-mapper":"^1.6.3","@endo/module-source":"^1.3.3","@endo/test262-runner":"^0.1.48","@types/babel__traverse":"^7.20.5","ava":"^6.1.3","babel-eslint":"^10.1.0","c8":"^7.14.0","core-js":"^3.31.0","eslint":"^8.57.1","eslint-config-airbnb-base":"^15.0.0","eslint-config-prettier":"^9.1.0","eslint-plugin-eslint-comments":"^3.2.0","eslint-plugin-import":"^2.31.0","hermes-engine-cli":"^0.12.0","prettier":"^3.5.3","terser":"^5.16.6","tsd":"^0.31.2","typescript":"~5.8.3"},"files":["./*.d.ts","./*.js","./*.map","LICENSE*","SECURITY*","dist","lib","src","tools"],"publishConfig":{"access":"public"},"eslintConfig":{"extends":["plugin:@endo/ses"]},"ava":{"files":["test/**/*.test.*"],"timeout":"2m"},"typeCoverage":{"atLeast":81.17},"gitHead":"9815aea9541f241389d2135c6097a7442bdffa17","_lastModified":"2025-09-30T13:20:00.167Z"}
1
+ {"name":"ses","version":"1.14.0","description":"Hardened JavaScript for Fearless Cooperation","keywords":["lockdown","harden","Compartment","assert","security","confinement","isolation","object capabilities","ocaps","secure execution","third-party code","prototype pollution","supply-chain attack","plugin"],"author":"Agoric","license":"Apache-2.0","homepage":"https://github.com/Agoric/SES-shim/tree/master/packages/ses#readme","repository":{"type":"git","url":"git+https://github.com/endojs/endo.git","directory":"packages/ses"},"bugs":{"url":"https://github.com/endojs/endo/issues"},"type":"module","main":"./dist/ses.cjs","module":"./index.js","unpkg":"./dist/ses.umd.js","types":"./types.d.ts","exports":{".":{"import":{"types":"./types.d.ts","xs":"./src-xs/index.js","default":"./index.js"},"require":{"types":"./dist/types.d.cts","default":"./dist/ses.cjs"}},"./lockdown":{"import":{"types":"./types.d.ts","default":"./index.js"},"require":{"types":"./dist/types.d.cts","default":"./dist/ses.cjs"}},"./hermes":{"require":{"types":"./dist/types.d.cts","default":"./dist/ses-hermes.cjs"}},"./tools.js":"./tools.js","./assert-shim.js":"./assert-shim.js","./lockdown-shim.js":{"xs":"./src-xs/lockdown-shim.js","default":"./lockdown-shim.js"},"./compartment-shim.js":{"xs":"./src-xs/compartment-shim.js","default":"./compartment-shim.js"},"./console-shim.js":"./console-shim.js","./package.json":"./package.json"},"scripts":{"build:vanilla":"node scripts/bundle.js","build:hermes":"node scripts/bundle.js hermes","build":"yarn build:vanilla && yarn build:hermes","clean":"rm -rf dist","cover":"c8 ava","demo":"python3 -m http.server","lint":"yarn lint:types && yarn lint:eslint","lint-fix":"eslint --fix .","lint:eslint":"eslint .","lint:types":"tsc","prepare":"npm run clean && npm run build","qt":"ava","test":"tsd && ava","test:hermes":"./scripts/hermes-test.sh","test:xs":"xst dist/ses.umd.js test/_lockdown-safe.js && node scripts/generate-test-xs.js && xst tmp/test-xs.js && rm -rf tmp","postpack":"git clean -fX \"*.d.ts*\" \"*.d.cts*\" \"*.d.mts*\" \"*.tsbuildinfo\""},"dependencies":{"@endo/cache-map":"^1.1.0","@endo/env-options":"^1.1.11","@endo/immutable-arraybuffer":"^1.1.2"},"devDependencies":{"@babel/generator":"^7.26.3","@babel/parser":"~7.26.2","@babel/traverse":"~7.25.9","@babel/types":"~7.26.0","@endo/compartment-mapper":"^1.6.3","@endo/module-source":"^1.3.3","@endo/test262-runner":"^0.1.48","@types/babel__traverse":"^7.20.5","ava":"^6.1.3","babel-eslint":"^10.1.0","c8":"^7.14.0","core-js":"^3.31.0","eslint":"^8.57.1","eslint-config-airbnb-base":"^15.0.0","eslint-config-prettier":"^9.1.0","eslint-plugin-eslint-comments":"^3.2.0","eslint-plugin-import":"^2.31.0","hermes-engine-cli":"^0.12.0","prettier":"^3.5.3","terser":"^5.16.6","tsd":"^0.31.2","typescript":"~5.8.3"},"files":["./*.d.ts","./*.js","./*.map","LICENSE*","SECURITY*","dist","lib","src","tools"],"publishConfig":{"access":"public"},"eslintConfig":{"extends":["plugin:@endo/ses"]},"ava":{"files":["test/**/*.test.*"],"timeout":"2m"},"typeCoverage":{"atLeast":81.17},"gitHead":"9815aea9541f241389d2135c6097a7442bdffa17","_lastModified":"2025-10-24T05:12:47.107Z"}
@@ -12,7 +12,6 @@ export declare class PluginFlowEngineServer extends PluginUISchemaStorageServer
12
12
  afterAdd(): Promise<void>;
13
13
  beforeLoad(): Promise<void>;
14
14
  getDatabaseByDataSourceKey(dataSourceKey?: string): import("@nocobase/database").default;
15
- private prefetchRecordsForResolve;
16
15
  transformSQL(template: string): {
17
16
  sql: string;
18
17
  bind: {};
@@ -44,6 +44,7 @@ var import_server2 = __toESM(require("./server"));
44
44
  var import_contexts = require("./template/contexts");
45
45
  var import_resolver = require("./template/resolver");
46
46
  var import_registry = require("./variables/registry");
47
+ var import_utils = require("./variables/utils");
47
48
  class PluginFlowEngineServer extends import_server2.default {
48
49
  globalContext;
49
50
  async afterAdd() {
@@ -59,86 +60,6 @@ class PluginFlowEngineServer extends import_server2.default {
59
60
  }
60
61
  return cm.db;
61
62
  }
62
- // 预取:构建“同记录”的字段/关联并集,一次查询写入 ctx.state.__varResolveBatchCache,供后续解析复用
63
- async prefetchRecordsForResolve(koaCtx, items) {
64
- var _a;
65
- try {
66
- const groupMap = /* @__PURE__ */ new Map();
67
- const ensureGroup = (dataSourceKey, collection, filterByTk) => {
68
- const groupKey = JSON.stringify({ ds: dataSourceKey, collection, tk: filterByTk });
69
- let group = groupMap.get(groupKey);
70
- if (!group) {
71
- group = { dataSourceKey, collection, filterByTk, fields: /* @__PURE__ */ new Set(), appends: /* @__PURE__ */ new Set() };
72
- groupMap.set(groupKey, group);
73
- }
74
- return group;
75
- };
76
- const normalizeNestedSeg = (segment) => /^\d+$/.test(segment) ? `[${segment}]` : segment;
77
- const toFirstSeg = (path) => {
78
- const m = path.match(/^([^.[]+|\[[^\]]+\])([\s\S]*)$/);
79
- const segment = m ? m[1] : "";
80
- const rest = m ? m[2] : "";
81
- return { segment, hasDeep: rest.includes(".") || rest.includes("[") || rest.length > 0 };
82
- };
83
- for (const it of items) {
84
- const template = (it == null ? void 0 : it.template) ?? {};
85
- const contextParams = (it == null ? void 0 : it.contextParams) || {};
86
- const usage = import_registry.variables.extractUsage(template);
87
- for (const [cpKey, recordParams] of Object.entries(contextParams)) {
88
- const parts = String(cpKey).split(".");
89
- const varName = parts[0];
90
- const nestedSeg = parts.slice(1).join(".");
91
- const paths = (usage == null ? void 0 : usage[varName]) || [];
92
- if (!paths.length) continue;
93
- const segNorm = nestedSeg ? normalizeNestedSeg(nestedSeg) : "";
94
- const remainders = [];
95
- for (const p of paths) {
96
- if (!segNorm) remainders.push(p);
97
- else if (p === segNorm) remainders.push("");
98
- else if (p.startsWith(`${segNorm}.`) || p.startsWith(`${segNorm}[`))
99
- remainders.push(p.slice(segNorm.length + 1));
100
- }
101
- if (!remainders.length) continue;
102
- const dataSourceKey = (recordParams == null ? void 0 : recordParams.dataSourceKey) || "main";
103
- const collection = recordParams == null ? void 0 : recordParams.collection;
104
- const filterByTk = recordParams == null ? void 0 : recordParams.filterByTk;
105
- if (!collection || typeof filterByTk === "undefined") continue;
106
- const group = ensureGroup(dataSourceKey, collection, filterByTk);
107
- for (const r of remainders) {
108
- const { segment, hasDeep } = toFirstSeg(r);
109
- if (!segment) continue;
110
- const key = segment.replace(/^\[(.+)\]$/, "$1");
111
- if (hasDeep) group.appends.add(key);
112
- else group.fields.add(key);
113
- }
114
- }
115
- }
116
- if (!groupMap.size) return;
117
- const stateObj = koaCtx.state;
118
- if (stateObj && !stateObj["__varResolveBatchCache"]) {
119
- stateObj["__varResolveBatchCache"] = /* @__PURE__ */ new Map();
120
- }
121
- const cache = (_a = koaCtx.state) == null ? void 0 : _a["__varResolveBatchCache"];
122
- for (const { dataSourceKey, collection, filterByTk, fields, appends } of groupMap.values()) {
123
- try {
124
- const dataSource = this.app.dataSourceManager.get(dataSourceKey);
125
- const cm = dataSource.collectionManager;
126
- if (!(cm == null ? void 0 : cm.db)) continue;
127
- const repo = cm.db.getRepository(collection);
128
- const fld = fields.size ? Array.from(fields) : void 0;
129
- const app = appends.size ? Array.from(appends) : void 0;
130
- const rec = await repo.findOne({ filterByTk, fields: fld, appends: app });
131
- const json = rec ? rec.toJSON() : void 0;
132
- if (cache) {
133
- const key = JSON.stringify({ ds: dataSourceKey, c: collection, tk: filterByTk, f: fld, a: app });
134
- cache.set(key, json);
135
- }
136
- } catch {
137
- }
138
- }
139
- } catch {
140
- }
141
- }
142
63
  transformSQL(template) {
143
64
  let index = 1;
144
65
  const bind = {};
@@ -167,7 +88,7 @@ class PluginFlowEngineServer extends import_server2.default {
167
88
  const values = typeof (raw == null ? void 0 : raw.values) !== "undefined" ? raw.values : raw;
168
89
  if (Array.isArray(values == null ? void 0 : values.batch)) {
169
90
  const batchItems = values.batch;
170
- await this.prefetchRecordsForResolve(
91
+ await (0, import_utils.prefetchRecordsForResolve)(
171
92
  ctx,
172
93
  batchItems.map((it) => ({
173
94
  template: it.template,
@@ -196,7 +117,7 @@ class PluginFlowEngineServer extends import_server2.default {
196
117
  }
197
118
  const template = values.template;
198
119
  const contextParams = (values == null ? void 0 : values.contextParams) || {};
199
- await this.prefetchRecordsForResolve(ctx, [{ template, contextParams }]);
120
+ await (0, import_utils.prefetchRecordsForResolve)(ctx, [{ template, contextParams }]);
200
121
  const requestCtx = new import_contexts.HttpRequestContext(ctx);
201
122
  requestCtx.delegate(this.globalContext);
202
123
  await import_registry.variables.attachUsedVariables(requestCtx, ctx, template, contextParams);
@@ -70,7 +70,8 @@ export declare class FlowModelRepository extends Repository {
70
70
  }): Promise<void>;
71
71
  remove(uid: string, options?: Transactionable & removeParentOptions): Promise<void>;
72
72
  insertAdjacent(position: 'beforeBegin' | 'afterBegin' | 'beforeEnd' | 'afterEnd', target: string, schema: any, options?: InsertAdjacentOptions): Promise<any>;
73
- duplicate(uid: string, options?: Transactionable): Promise<any>;
73
+ duplicate(modelUid: string, options?: Transactionable): Promise<any>;
74
+ private replaceStepParamsModelUids;
74
75
  insert(schema: any, options?: Transactionable): Promise<any>;
75
76
  insertNewSchema(schema: any, options?: Transactionable & {
76
77
  returnNode?: boolean;
@@ -426,13 +426,76 @@ const _FlowModelRepository = class _FlowModelRepository extends import_database.
426
426
  await this.clearXUidPathCache(result["uid"], transaction2);
427
427
  return result;
428
428
  }
429
- async duplicate(uid2, options) {
430
- const s = await this.getJsonSchema(uid2, { ...options, includeAsyncNode: true });
431
- if (!(s == null ? void 0 : s["uid"])) {
429
+ async duplicate(modelUid, options) {
430
+ const nodes = await this.findNodesById(modelUid, { ...options, includeAsyncNode: true });
431
+ if (!(nodes == null ? void 0 : nodes.length)) {
432
432
  return null;
433
433
  }
434
- this.regenerateUid(s);
435
- return this.insert(s, options);
434
+ const uidMap = {};
435
+ for (const n of nodes) {
436
+ uidMap[n["uid"]] = (0, import_utils.uid)();
437
+ }
438
+ const sorted = [...nodes].sort((a, b) => {
439
+ if (a.depth !== b.depth) return a.depth - b.depth;
440
+ const ap = a.parent || "";
441
+ const bp = b.parent || "";
442
+ if (ap !== bp) return ap < bp ? -1 : 1;
443
+ const at = a.type || "";
444
+ const bt = b.type || "";
445
+ if (at !== bt) return at < bt ? -1 : 1;
446
+ const as = a.sort ?? 0;
447
+ const bs = b.sort ?? 0;
448
+ return as - bs;
449
+ });
450
+ for (const n of sorted) {
451
+ const oldUid = n["uid"];
452
+ const newUid = uidMap[oldUid];
453
+ const oldParentUid = n["parent"];
454
+ const newParentUid = uidMap[oldParentUid] ?? null;
455
+ const optionsObj = this.replaceStepParamsModelUids(
456
+ import_lodash.default.isPlainObject(n.options) ? n.options : JSON.parse(n.options),
457
+ uidMap
458
+ );
459
+ if (newParentUid) {
460
+ optionsObj.parent = newParentUid;
461
+ optionsObj.parentId = newParentUid;
462
+ }
463
+ const schemaNode = {
464
+ uid: newUid,
465
+ ["x-async"]: !!n.async,
466
+ ...optionsObj
467
+ };
468
+ if (newParentUid) {
469
+ schemaNode.childOptions = {
470
+ parentUid: newParentUid,
471
+ type: n.type,
472
+ position: "last"
473
+ };
474
+ }
475
+ await this.insertSingleNode(schemaNode, { transaction: options == null ? void 0 : options.transaction });
476
+ }
477
+ return this.findModelById(uidMap[modelUid], { ...options });
478
+ }
479
+ replaceStepParamsModelUids(options, uidMap) {
480
+ const opts = options && typeof options === "object" ? options : {};
481
+ const replaceUidString = (v) => typeof v === "string" && uidMap[v] ? uidMap[v] : v;
482
+ const replaceInPlace = (val) => {
483
+ if (Array.isArray(val)) {
484
+ for (let i = 0; i < val.length; i++) {
485
+ val[i] = replaceInPlace(val[i]);
486
+ }
487
+ return val;
488
+ }
489
+ if (val && typeof val === "object") {
490
+ for (const k of Object.keys(val)) {
491
+ val[k] = replaceInPlace(val[k]);
492
+ }
493
+ return val;
494
+ }
495
+ return replaceUidString(val);
496
+ };
497
+ if (opts.stepParams) opts.stepParams = replaceInPlace(opts.stepParams);
498
+ return opts;
436
499
  }
437
500
  async insert(schema, options) {
438
501
  const nodes = _FlowModelRepository.schemaToSingleNodes(schema);
@@ -136,6 +136,13 @@ class PluginUISchemaStorageServer extends import_server.Plugin {
136
136
  }
137
137
  await next();
138
138
  },
139
+ duplicate: async (ctx, next) => {
140
+ const { uid: uid2 } = ctx.action.params;
141
+ const repository = ctx.db.getRepository("flowModels");
142
+ const duplicated = await repository.duplicate(uid2);
143
+ ctx.body = duplicated;
144
+ await next();
145
+ },
139
146
  move: async (ctx, next) => {
140
147
  const { sourceId, targetId, position } = ctx.action.params;
141
148
  const repository = ctx.db.getRepository("flowModels");
@@ -39,4 +39,14 @@ declare class VariableRegistry {
39
39
  attachUsedVariables(ctx: HttpRequestContext, koaCtx: ResourcerContext, template: JSONValue, contextParams: any): Promise<void>;
40
40
  }
41
41
  export declare const variables: VariableRegistry;
42
+ /** 仅测试使用:重置变量注册表为内置默认集 */
43
+ /**
44
+ * 从使用路径推断查询所需的 fields 与 appends。
45
+ * @param paths 使用到的子路径数组
46
+ * @param params 显式参数(仅用于兼容签名)
47
+ */
48
+ export declare function inferSelectsFromUsage(paths?: string[], _params?: unknown): {
49
+ generatedAppends?: string[];
50
+ generatedFields?: string[];
51
+ };
42
52
  export {};
@@ -36,6 +36,7 @@ var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__ge
36
36
  var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
37
37
  var registry_exports = {};
38
38
  __export(registry_exports, {
39
+ inferSelectsFromUsage: () => inferSelectsFromUsage,
39
40
  variables: () => variables
40
41
  });
41
42
  module.exports = __toCommonJS(registry_exports);
@@ -90,40 +91,51 @@ if (!g[GLOBAL_KEY]) {
90
91
  g[GLOBAL_KEY] = new VariableRegistry();
91
92
  }
92
93
  const variables = g[GLOBAL_KEY];
93
- function inferSelectsFromUsage(paths = [], params) {
94
+ function inferSelectsFromUsage(paths = [], _params) {
94
95
  if (!Array.isArray(paths) || paths.length === 0) {
95
96
  return { generatedAppends: void 0, generatedFields: void 0 };
96
97
  }
97
98
  const appendSet = /* @__PURE__ */ new Set();
98
99
  const fieldSet = /* @__PURE__ */ new Set();
100
+ const normalizePath = (raw) => {
101
+ if (!raw) return "";
102
+ let s = String(raw);
103
+ s = s.replace(/\[(?:\d+)\]/g, "");
104
+ s = s.replace(/\[(?:"((?:[^"\\]|\\.)*)"|'((?:[^'\\]|\\.)*)')\]/g, (_m, g1, g2) => `.${g1 || g2}`);
105
+ s = s.replace(/\.\.+/g, ".");
106
+ s = s.replace(/^\./, "").replace(/\.$/, "");
107
+ return s;
108
+ };
99
109
  for (let path of paths) {
100
110
  if (!path) continue;
101
111
  while (/^\[(\d+)\](\.|$)/.test(path)) {
102
112
  path = path.replace(/^\[(\d+)\]\.?/, "");
103
113
  }
104
- if (!path) continue;
105
- let first = "";
106
- let rest = "";
107
- const mStr = path.match(/^\[(?:"((?:[^"\\]|\\.)*)"|'((?:[^'\\]|\\.)*)')\](.*)$/);
108
- if (mStr) {
109
- first = (mStr[1] ?? mStr[2]) || "";
110
- rest = mStr[3] || "";
111
- } else {
112
- const m = path.match(/^([^.[]+)([\s\S]*)$/);
113
- first = (m == null ? void 0 : m[1]) ?? "";
114
- rest = (m == null ? void 0 : m[2]) ?? "";
114
+ const norm = normalizePath(path);
115
+ if (!norm) continue;
116
+ const segments = norm.split(".").filter(Boolean);
117
+ if (segments.length === 0) continue;
118
+ if (segments.length === 1) {
119
+ fieldSet.add(segments[0]);
120
+ continue;
115
121
  }
116
- if (!first) continue;
117
- const hasDeep = rest.includes(".") || rest.includes("[");
118
- if (hasDeep) appendSet.add(first);
119
- else fieldSet.add(first);
122
+ for (let i = 0; i < segments.length - 1; i++) {
123
+ appendSet.add(segments.slice(0, i + 1).join("."));
124
+ }
125
+ fieldSet.add(segments.join("."));
120
126
  }
121
127
  const generatedAppends = appendSet.size ? Array.from(appendSet) : void 0;
122
128
  const generatedFields = fieldSet.size ? Array.from(fieldSet) : void 0;
123
129
  return { generatedAppends, generatedFields };
124
130
  }
125
131
  async function fetchRecordWithRequestCache(koaCtx, dataSourceKey, collection, filterByTk, fields, appends) {
132
+ var _a, _b, _c, _d;
126
133
  try {
134
+ const log = (_b = (_a = koaCtx.app) == null ? void 0 : _a.logger) == null ? void 0 : _b.child({
135
+ module: "plugin-flow-engine",
136
+ submodule: "variables.resolve",
137
+ method: "fetchRecordWithRequestCache"
138
+ });
127
139
  const kctx = koaCtx;
128
140
  if (!kctx.state) kctx.state = {};
129
141
  if (!kctx.state["__varResolveBatchCache"]) {
@@ -143,32 +155,54 @@ async function fetchRecordWithRequestCache(koaCtx, dataSourceKey, collection, fi
143
155
  };
144
156
  const key = JSON.stringify(keyObj);
145
157
  if (cache) {
146
- if (cache.has(key)) return cache.get(key);
147
- const needFields = Array.isArray(fields) ? new Set(fields) : void 0;
158
+ if (cache.has(key)) {
159
+ return cache.get(key);
160
+ }
161
+ const needFields = Array.isArray(fields) ? [...new Set(fields)] : void 0;
148
162
  const needAppends = Array.isArray(appends) ? new Set(appends) : void 0;
149
- let fallbackAny = void 0;
150
163
  for (const [cacheKey, cacheVal] of cache.entries()) {
151
164
  const parsed = JSON.parse(cacheKey);
152
165
  if (!parsed || parsed.ds !== keyObj.ds || parsed.c !== keyObj.c || parsed.tk !== keyObj.tk) continue;
153
- const cachedFields = Array.isArray(parsed.f) ? new Set(parsed.f) : void 0;
154
- const cachedAppends = Array.isArray(parsed.a) ? new Set(parsed.a) : void 0;
155
- const fieldsOk = !needFields || cachedFields && [...needFields].every((x) => cachedFields.has(x));
156
- const appendsOk = !needAppends || cachedAppends && [...needAppends].every((x) => cachedAppends.has(x));
157
- if (fieldsOk && appendsOk) return cacheVal;
158
- if (typeof fallbackAny === "undefined") fallbackAny = cacheVal;
166
+ const cachedFields = new Set(parsed.f || []);
167
+ const cachedAppends = new Set(parsed.a || []);
168
+ const fieldCoveredByAppends = (fieldPath) => {
169
+ const p = String(fieldPath || "");
170
+ for (const a of cachedAppends) {
171
+ if (!a) continue;
172
+ if (p === a || p.startsWith(a + ".")) return true;
173
+ }
174
+ return false;
175
+ };
176
+ const fieldsOk = !needFields || needFields.every((f) => cachedFields.has(f) || fieldCoveredByAppends(f));
177
+ const appendsOk = !needAppends || [...needAppends].every((a) => cachedAppends.has(a));
178
+ if (fieldsOk && appendsOk) {
179
+ return cacheVal;
180
+ }
159
181
  }
160
- if (typeof fallbackAny !== "undefined") return fallbackAny;
161
182
  }
162
- const tk = filterByTk;
163
183
  const rec = await repo.findOne({
164
- filterByTk: tk,
184
+ filterByTk,
165
185
  fields,
166
186
  appends
167
187
  });
168
188
  const json = rec ? rec.toJSON() : void 0;
169
189
  if (cache) cache.set(key, json);
170
190
  return json;
171
- } catch (_2) {
191
+ } catch (e) {
192
+ const log = (_d = (_c = koaCtx.app) == null ? void 0 : _c.logger) == null ? void 0 : _d.child({
193
+ module: "plugin-flow-engine",
194
+ submodule: "variables.resolve",
195
+ method: "fetchRecordWithRequestCache"
196
+ });
197
+ const errMsg = e instanceof Error ? e.message : String(e);
198
+ log == null ? void 0 : log.warn("[variables.resolve] fetchRecordWithRequestCache error", {
199
+ ds: dataSourceKey,
200
+ collection,
201
+ tk: filterByTk,
202
+ fields,
203
+ appends,
204
+ error: errMsg
205
+ });
172
206
  return void 0;
173
207
  }
174
208
  }
@@ -228,29 +262,34 @@ function attachGenericRecordVariables(flowCtx, koaCtx, usage, contextParams) {
228
262
  segmentMap.set(seg, arr);
229
263
  }
230
264
  const segEntries = Array.from(segmentMap.entries());
231
- const recordChildren = segEntries.filter(([seg]) => {
265
+ const oneLevelRecordChildren = segEntries.filter(([seg]) => {
232
266
  const idx = parseIndexSegment(seg);
233
267
  const nestedObj = import_lodash.default.get(contextParams, [varName, seg]) ?? (idx ? import_lodash.default.get(contextParams, [varName, idx]) : void 0);
234
268
  const dotted = (contextParams || {})[`${varName}.${seg}`] ?? (idx ? (contextParams || {})[`${varName}.${idx}`] : void 0);
235
269
  return isRecordParams(nestedObj) || isRecordParams(dotted);
236
270
  });
237
- if (!recordChildren.length) continue;
271
+ const deepRecordMap = /* @__PURE__ */ new Map();
272
+ const cp = contextParams;
273
+ if (cp && typeof cp === "object") {
274
+ const cpRec = cp;
275
+ for (const key of Object.keys(cpRec)) {
276
+ if (!key || key !== varName && !key.startsWith(`${varName}.`)) continue;
277
+ if (key === varName) continue;
278
+ const val = cpRec[key];
279
+ if (!isRecordParams(val)) continue;
280
+ const relative = key.slice(varName.length + 1);
281
+ if (!relative) continue;
282
+ deepRecordMap.set(relative, val);
283
+ }
284
+ }
285
+ if (!oneLevelRecordChildren.length && deepRecordMap.size === 0) continue;
238
286
  flowCtx.defineProperty(varName, {
239
287
  get: () => {
240
- const subContext = new import_contexts.ServerBaseContext();
241
- for (const [seg, remainders] of recordChildren) {
242
- const idx = parseIndexSegment(seg);
243
- const recordParams = import_lodash.default.get(contextParams, [varName, seg]) ?? (idx ? import_lodash.default.get(contextParams, [varName, idx]) : void 0) ?? (contextParams || {})[`${varName}.${seg}`] ?? (idx ? (contextParams || {})[`${varName}.${idx}`] : void 0);
244
- let effRemainders = remainders.filter((r) => !!r);
245
- if (!effRemainders.length) {
246
- const all = usedPaths.map(
247
- (p) => p.startsWith(`${seg}.`) ? p.slice(seg.length + 1) : p.startsWith(`${seg}[`) ? p.slice(seg.length) : ""
248
- ).filter((x) => !!x);
249
- if (all.length) effRemainders = all;
250
- }
251
- const { generatedAppends, generatedFields } = inferSelectsFromUsage(effRemainders, recordParams);
252
- const definitionKey = idx ?? seg;
253
- subContext.defineProperty(definitionKey, {
288
+ const root = new import_contexts.ServerBaseContext();
289
+ const definedFirstLevel = /* @__PURE__ */ new Set();
290
+ const defineRecordGetter = (container, key, recordParams, subPaths = []) => {
291
+ const { generatedAppends, generatedFields } = inferSelectsFromUsage(subPaths, recordParams);
292
+ container.defineProperty(key, {
254
293
  get: async () => {
255
294
  const dataSourceKey = (recordParams == null ? void 0 : recordParams.dataSourceKey) || "main";
256
295
  return await fetchRecordWithRequestCache(
@@ -264,8 +303,54 @@ function attachGenericRecordVariables(flowCtx, koaCtx, usage, contextParams) {
264
303
  },
265
304
  cache: true
266
305
  });
306
+ };
307
+ const subContainers = /* @__PURE__ */ new Map();
308
+ const ensureSubContainer = (parent, key) => {
309
+ let map = subContainers.get(parent);
310
+ if (!map) {
311
+ map = /* @__PURE__ */ new Map();
312
+ subContainers.set(parent, map);
313
+ }
314
+ let child = map.get(key);
315
+ if (!child) {
316
+ const inst = new import_contexts.ServerBaseContext();
317
+ parent.defineProperty(key, { get: () => inst.createProxy(), cache: true });
318
+ map.set(key, inst);
319
+ child = inst;
320
+ }
321
+ return child;
322
+ };
323
+ for (const [seg, remainders] of oneLevelRecordChildren) {
324
+ const idx = parseIndexSegment(seg);
325
+ const recordParams = import_lodash.default.get(contextParams, [varName, seg]) ?? (idx ? import_lodash.default.get(contextParams, [varName, idx]) : void 0) ?? (contextParams || {})[`${varName}.${seg}`] ?? (idx ? (contextParams || {})[`${varName}.${idx}`] : void 0);
326
+ let effRemainders = (remainders || []).filter((r) => !!r);
327
+ if (!effRemainders.length) {
328
+ const all = usedPaths.map(
329
+ (p) => p.startsWith(`${seg}.`) ? p.slice(seg.length + 1) : p.startsWith(`${seg}[`) ? p.slice(seg.length) : ""
330
+ ).filter((x) => !!x);
331
+ if (all.length) effRemainders = all;
332
+ }
333
+ defineRecordGetter(root, idx ?? seg, recordParams, effRemainders);
334
+ definedFirstLevel.add(idx ?? seg);
335
+ }
336
+ for (const [relative, recordParams] of deepRecordMap.entries()) {
337
+ const segs = String(relative).split(".").filter(Boolean);
338
+ if (segs.length === 0) continue;
339
+ const first = segs[0];
340
+ let container;
341
+ if (definedFirstLevel.has(first)) {
342
+ continue;
343
+ } else {
344
+ container = root;
345
+ for (let i = 0; i < segs.length - 1; i++) {
346
+ container = ensureSubContainer(container, segs[i]);
347
+ }
348
+ }
349
+ const leaf = segs[segs.length - 1];
350
+ const subPaths = (usedPaths || []).map((p) => p === relative ? "" : p.startsWith(relative + ".") ? p.slice(relative.length + 1) : "").filter((x) => x !== "");
351
+ defineRecordGetter(container, leaf, recordParams, subPaths);
267
352
  }
268
- return subContext.createProxy();
353
+ return root.createProxy();
269
354
  },
270
355
  cache: true
271
356
  });
@@ -295,5 +380,6 @@ function registerBuiltInVariables(reg) {
295
380
  registerBuiltInVariables(variables);
296
381
  // Annotate the CommonJS export names for ESM import in node:
297
382
  0 && (module.exports = {
383
+ inferSelectsFromUsage,
298
384
  variables
299
385
  });
@@ -0,0 +1,17 @@
1
+ /**
2
+ * This file is part of the NocoBase (R) project.
3
+ * Copyright (c) 2020-2024 NocoBase Co., Ltd.
4
+ * Authors: NocoBase Team.
5
+ *
6
+ * This project is dual-licensed under AGPL-3.0 and NocoBase Commercial License.
7
+ * For more information, please refer to: https://www.nocobase.com/agreement.
8
+ */
9
+ import type { ResourcerContext } from '@nocobase/resourcer';
10
+ import type { JSONValue } from '../template/resolver';
11
+ /**
12
+ * 预取:构建“同记录”的字段/关联并集,一次查询写入 ctx.state.__varResolveBatchCache,供后续解析复用
13
+ */
14
+ export declare function prefetchRecordsForResolve(koaCtx: ResourcerContext, items: Array<{
15
+ template: JSONValue;
16
+ contextParams?: Record<string, unknown>;
17
+ }>): Promise<void>;
@@ -0,0 +1,113 @@
1
+ /**
2
+ * This file is part of the NocoBase (R) project.
3
+ * Copyright (c) 2020-2024 NocoBase Co., Ltd.
4
+ * Authors: NocoBase Team.
5
+ *
6
+ * This project is dual-licensed under AGPL-3.0 and NocoBase Commercial License.
7
+ * For more information, please refer to: https://www.nocobase.com/agreement.
8
+ */
9
+
10
+ var __defProp = Object.defineProperty;
11
+ var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
12
+ var __getOwnPropNames = Object.getOwnPropertyNames;
13
+ var __hasOwnProp = Object.prototype.hasOwnProperty;
14
+ var __export = (target, all) => {
15
+ for (var name in all)
16
+ __defProp(target, name, { get: all[name], enumerable: true });
17
+ };
18
+ var __copyProps = (to, from, except, desc) => {
19
+ if (from && typeof from === "object" || typeof from === "function") {
20
+ for (let key of __getOwnPropNames(from))
21
+ if (!__hasOwnProp.call(to, key) && key !== except)
22
+ __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
23
+ }
24
+ return to;
25
+ };
26
+ var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
27
+ var utils_exports = {};
28
+ __export(utils_exports, {
29
+ prefetchRecordsForResolve: () => prefetchRecordsForResolve
30
+ });
31
+ module.exports = __toCommonJS(utils_exports);
32
+ var import_registry = require("./registry");
33
+ async function prefetchRecordsForResolve(koaCtx, items) {
34
+ var _a, _b, _c, _d, _e, _f;
35
+ try {
36
+ const log = (_b = (_a = koaCtx.app) == null ? void 0 : _a.logger) == null ? void 0 : _b.child({ module: "plugin-flow-engine", submodule: "variables.prefetch" });
37
+ const groupMap = /* @__PURE__ */ new Map();
38
+ const ensureGroup = (dataSourceKey, collection, filterByTk) => {
39
+ const groupKey = JSON.stringify({ ds: dataSourceKey, collection, tk: filterByTk });
40
+ let group = groupMap.get(groupKey);
41
+ if (!group) {
42
+ group = { dataSourceKey, collection, filterByTk, fields: /* @__PURE__ */ new Set(), appends: /* @__PURE__ */ new Set() };
43
+ groupMap.set(groupKey, group);
44
+ }
45
+ return group;
46
+ };
47
+ const normalizeNestedSeg = (segment) => /^\d+$/.test(segment) ? `[${segment}]` : segment;
48
+ for (const it of items) {
49
+ const template = (it == null ? void 0 : it.template) ?? {};
50
+ const contextParams = (it == null ? void 0 : it.contextParams) || {};
51
+ const usage = import_registry.variables.extractUsage(template);
52
+ for (const [cpKey, recordParams] of Object.entries(contextParams)) {
53
+ const parts = String(cpKey).split(".");
54
+ const varName = parts[0];
55
+ const nestedSeg = parts.slice(1).join(".");
56
+ const paths = (usage == null ? void 0 : usage[varName]) || [];
57
+ if (!paths.length) continue;
58
+ const segNorm = nestedSeg ? normalizeNestedSeg(nestedSeg) : "";
59
+ const remainders = [];
60
+ for (const p of paths) {
61
+ if (!segNorm) remainders.push(p);
62
+ else if (p === segNorm) remainders.push("");
63
+ else if (p.startsWith(`${segNorm}.`) || p.startsWith(`${segNorm}[`))
64
+ remainders.push(p.slice(segNorm.length + 1));
65
+ }
66
+ if (!remainders.length) continue;
67
+ const dataSourceKey = (recordParams == null ? void 0 : recordParams.dataSourceKey) || "main";
68
+ const collection = recordParams == null ? void 0 : recordParams.collection;
69
+ const filterByTk = recordParams == null ? void 0 : recordParams.filterByTk;
70
+ if (!collection || typeof filterByTk === "undefined") continue;
71
+ const group = ensureGroup(dataSourceKey, collection, filterByTk);
72
+ const { generatedAppends, generatedFields } = (0, import_registry.inferSelectsFromUsage)(remainders);
73
+ if (generatedFields == null ? void 0 : generatedFields.length) generatedFields.forEach((f) => group.fields.add(f));
74
+ if (generatedAppends == null ? void 0 : generatedAppends.length) generatedAppends.forEach((a) => group.appends.add(a));
75
+ }
76
+ }
77
+ if (!groupMap.size) return;
78
+ const stateObj = koaCtx.state;
79
+ if (stateObj && !stateObj["__varResolveBatchCache"]) {
80
+ stateObj["__varResolveBatchCache"] = /* @__PURE__ */ new Map();
81
+ }
82
+ const cache = (_c = koaCtx.state) == null ? void 0 : _c["__varResolveBatchCache"];
83
+ for (const { dataSourceKey, collection, filterByTk, fields, appends } of groupMap.values()) {
84
+ try {
85
+ const ds = koaCtx.app.dataSourceManager.get(dataSourceKey);
86
+ const cm = ds.collectionManager;
87
+ if (!(cm == null ? void 0 : cm.db)) continue;
88
+ const repo = cm.db.getRepository(collection);
89
+ const fld = fields.size ? Array.from(fields).sort() : void 0;
90
+ const app = appends.size ? Array.from(appends).sort() : void 0;
91
+ const rec = await repo.findOne({ filterByTk, fields: fld, appends: app });
92
+ const json = rec ? rec.toJSON() : void 0;
93
+ if (cache) {
94
+ const key = JSON.stringify({ ds: dataSourceKey, c: collection, tk: filterByTk, f: fld, a: app });
95
+ cache.set(key, json);
96
+ }
97
+ } catch (e) {
98
+ log == null ? void 0 : log.debug("[variables.resolve] prefetch query error", {
99
+ ds: dataSourceKey,
100
+ collection,
101
+ tk: filterByTk,
102
+ error: (e == null ? void 0 : e.message) || String(e)
103
+ });
104
+ }
105
+ }
106
+ } catch (e) {
107
+ (_f = (_e = (_d = koaCtx.app) == null ? void 0 : _d.logger) == null ? void 0 : _e.child({ module: "plugin-flow-engine", submodule: "variables.prefetch" })) == null ? void 0 : _f.debug("[variables.resolve] prefetch fatal error", { error: (e == null ? void 0 : e.message) || String(e) });
108
+ }
109
+ }
110
+ // Annotate the CommonJS export names for ESM import in node:
111
+ 0 && (module.exports = {
112
+ prefetchRecordsForResolve
113
+ });
package/package.json CHANGED
@@ -4,7 +4,7 @@
4
4
  "displayName.zh-CN": "前端流引擎",
5
5
  "description": "",
6
6
  "description.zh-CN": "",
7
- "version": "2.0.0-alpha.2",
7
+ "version": "2.0.0-alpha.21",
8
8
  "main": "./dist/server/index.js",
9
9
  "license": "AGPL-3.0",
10
10
  "devDependencies": {
@@ -24,5 +24,5 @@
24
24
  "@nocobase/test": "2.x",
25
25
  "@nocobase/utils": "2.x"
26
26
  },
27
- "gitHead": "1322f486b248bef53ed8c8f42f0a39dfd02125fd"
27
+ "gitHead": "0398c85e979d09e834952e71c5c1a1ccf1a3a8e1"
28
28
  }