@nocobase/plugin-workflow 0.19.0-alpha.2 → 0.19.0-alpha.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/client/components/{ExecutionStatusSelect.d.ts → ExecutionStatus.d.ts} +1 -0
- package/dist/client/index.d.ts +1 -0
- package/dist/client/index.js +30 -28
- package/dist/client/nodes/end.d.ts +22 -0
- package/dist/client/nodes/index.d.ts +3 -0
- package/dist/client/schemas/executions.d.ts +14 -1
- package/dist/client/triggers/index.d.ts +1 -0
- package/dist/client/triggers/schedule/index.d.ts +1 -0
- package/dist/externalVersion.js +10 -10
- package/dist/locale/ko_KR.json +168 -0
- package/dist/locale/zh-CN.json +9 -1
- package/dist/node_modules/cron-parser/package.json +1 -1
- package/dist/node_modules/lru-cache/package.json +1 -1
- package/dist/server/Plugin.d.ts +4 -1
- package/dist/server/Plugin.js +65 -35
- package/dist/server/Processor.d.ts +3 -1
- package/dist/server/Processor.js +27 -15
- package/dist/server/actions/executions.d.ts +1 -0
- package/dist/server/actions/executions.js +38 -0
- package/dist/server/collections/jobs.js +4 -0
- package/dist/server/collections/workflows.js +5 -0
- package/dist/server/instructions/ConditionInstruction.d.ts +1 -0
- package/dist/server/instructions/ConditionInstruction.js +1 -0
- package/dist/server/instructions/CreateInstruction.js +4 -4
- package/dist/server/instructions/DestroyInstruction.js +2 -2
- package/dist/server/instructions/EndInstruction.d.ts +8 -0
- package/dist/server/instructions/EndInstruction.js +42 -0
- package/dist/server/instructions/QueryInstruction.js +2 -2
- package/dist/server/instructions/UpdateInstruction.js +2 -2
- package/dist/server/migrations/20240115220721-add-node-key-to-job.d.ts +6 -0
- package/dist/server/migrations/20240115220721-add-node-key-to-job.js +55 -0
- package/dist/server/triggers/CollectionTrigger.js +18 -7
- package/dist/server/triggers/ScheduleTrigger.d.ts +1 -0
- package/dist/server/triggers/ScheduleTrigger.js +1 -0
- package/dist/server/triggers/index.d.ts +1 -0
- package/dist/server/triggers/index.js +1 -0
- package/dist/server/types/Workflow.d.ts +5 -2
- package/package.json +3 -3
package/dist/server/Processor.js
CHANGED
|
@@ -29,6 +29,7 @@ class Processor {
|
|
|
29
29
|
this.execution = execution;
|
|
30
30
|
this.options = options;
|
|
31
31
|
this.logger = options.plugin.getLogger(execution.workflowId);
|
|
32
|
+
this.transaction = options.transaction;
|
|
32
33
|
}
|
|
33
34
|
static StatusMap = {
|
|
34
35
|
[import_constants.JOB_STATUS.PENDING]: import_constants.EXECUTION_STATUS.STARTED,
|
|
@@ -41,10 +42,12 @@ class Processor {
|
|
|
41
42
|
[import_constants.JOB_STATUS.RETRY_NEEDED]: import_constants.EXECUTION_STATUS.RETRY_NEEDED
|
|
42
43
|
};
|
|
43
44
|
logger;
|
|
45
|
+
transaction;
|
|
44
46
|
nodes = [];
|
|
45
47
|
nodesMap = /* @__PURE__ */ new Map();
|
|
46
48
|
jobsMap = /* @__PURE__ */ new Map();
|
|
47
49
|
jobsMapByNodeKey = {};
|
|
50
|
+
lastSavedJob = null;
|
|
48
51
|
// make dual linked nodes list then cache
|
|
49
52
|
makeNodes(nodes = []) {
|
|
50
53
|
this.nodes = nodes;
|
|
@@ -68,14 +71,15 @@ class Processor {
|
|
|
68
71
|
});
|
|
69
72
|
}
|
|
70
73
|
async prepare() {
|
|
71
|
-
const { execution } = this;
|
|
74
|
+
const { execution, transaction } = this;
|
|
72
75
|
if (!execution.workflow) {
|
|
73
|
-
execution.workflow = await execution.getWorkflow();
|
|
76
|
+
execution.workflow = await execution.getWorkflow({ transaction });
|
|
74
77
|
}
|
|
75
|
-
const nodes = await execution.workflow.getNodes();
|
|
78
|
+
const nodes = await execution.workflow.getNodes({ transaction });
|
|
76
79
|
this.makeNodes(nodes);
|
|
77
80
|
const jobs = await execution.getJobs({
|
|
78
|
-
order: [["id", "ASC"]]
|
|
81
|
+
order: [["id", "ASC"]],
|
|
82
|
+
transaction
|
|
79
83
|
});
|
|
80
84
|
this.makeJobs(jobs);
|
|
81
85
|
}
|
|
@@ -116,7 +120,10 @@ class Processor {
|
|
|
116
120
|
{ error: err }
|
|
117
121
|
);
|
|
118
122
|
job = {
|
|
119
|
-
result: err instanceof Error ? {
|
|
123
|
+
result: err instanceof Error ? {
|
|
124
|
+
message: err.message,
|
|
125
|
+
stack: process.env.NODE_ENV === "production" ? 'Error stack will not be shown under "production" environment, please check logs.' : err.stack
|
|
126
|
+
} : err,
|
|
120
127
|
status: import_constants.JOB_STATUS.ERROR
|
|
121
128
|
};
|
|
122
129
|
if (prevJob && prevJob.nodeId === node.id) {
|
|
@@ -127,6 +134,7 @@ class Processor {
|
|
|
127
134
|
if (!(job instanceof import_database.Model)) {
|
|
128
135
|
job.upstreamId = prevJob instanceof import_database.Model ? prevJob.get("id") : null;
|
|
129
136
|
job.nodeId = node.id;
|
|
137
|
+
job.nodeKey = node.key;
|
|
130
138
|
}
|
|
131
139
|
const savedJob = await this.saveJob(job);
|
|
132
140
|
this.logger.info(
|
|
@@ -171,7 +179,7 @@ class Processor {
|
|
|
171
179
|
async exit(s) {
|
|
172
180
|
if (typeof s === "number") {
|
|
173
181
|
const status = this.constructor.StatusMap[s] ?? Math.sign(s);
|
|
174
|
-
await this.execution.update({ status });
|
|
182
|
+
await this.execution.update({ status }, { transaction: this.transaction });
|
|
175
183
|
}
|
|
176
184
|
this.logger.info(`execution (${this.execution.id}) exiting with status ${this.execution.status}`);
|
|
177
185
|
return null;
|
|
@@ -179,22 +187,26 @@ class Processor {
|
|
|
179
187
|
// TODO(optimize)
|
|
180
188
|
async saveJob(payload) {
|
|
181
189
|
const { database } = this.execution.constructor;
|
|
190
|
+
const { transaction } = this;
|
|
182
191
|
const { model } = database.getCollection("jobs");
|
|
183
192
|
let job;
|
|
184
193
|
if (payload instanceof model) {
|
|
185
|
-
job = await payload.save();
|
|
194
|
+
job = await payload.save({ transaction });
|
|
186
195
|
} else if (payload.id) {
|
|
187
|
-
job = await model.findByPk(payload.id);
|
|
188
|
-
await job.update(payload);
|
|
196
|
+
job = await model.findByPk(payload.id, { transaction });
|
|
197
|
+
await job.update(payload, { transaction });
|
|
189
198
|
} else {
|
|
190
|
-
job = await model.create(
|
|
191
|
-
|
|
192
|
-
|
|
193
|
-
|
|
199
|
+
job = await model.create(
|
|
200
|
+
{
|
|
201
|
+
...payload,
|
|
202
|
+
executionId: this.execution.id
|
|
203
|
+
},
|
|
204
|
+
{ transaction }
|
|
205
|
+
);
|
|
194
206
|
}
|
|
195
207
|
this.jobsMap.set(job.id, job);
|
|
196
|
-
|
|
197
|
-
this.jobsMapByNodeKey[
|
|
208
|
+
this.lastSavedJob = job;
|
|
209
|
+
this.jobsMapByNodeKey[job.nodeKey] = job.result;
|
|
198
210
|
return job;
|
|
199
211
|
}
|
|
200
212
|
getBranches(node) {
|
|
@@ -27,6 +27,7 @@ var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__ge
|
|
|
27
27
|
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
|
28
28
|
var executions_exports = {};
|
|
29
29
|
__export(executions_exports, {
|
|
30
|
+
cancel: () => cancel,
|
|
30
31
|
destroy: () => destroy
|
|
31
32
|
});
|
|
32
33
|
module.exports = __toCommonJS(executions_exports);
|
|
@@ -43,7 +44,44 @@ async function destroy(context, next) {
|
|
|
43
44
|
});
|
|
44
45
|
await import_actions.default.destroy(context, next);
|
|
45
46
|
}
|
|
47
|
+
async function cancel(context, next) {
|
|
48
|
+
const { filterByTk } = context.action.params;
|
|
49
|
+
const ExecutionRepo = context.db.getRepository("executions");
|
|
50
|
+
const JobRepo = context.db.getRepository("jobs");
|
|
51
|
+
const execution = await ExecutionRepo.findOne({
|
|
52
|
+
filterByTk,
|
|
53
|
+
appends: ["jobs"]
|
|
54
|
+
});
|
|
55
|
+
if (!execution) {
|
|
56
|
+
return context.throw(404);
|
|
57
|
+
}
|
|
58
|
+
if (execution.status) {
|
|
59
|
+
return context.throw(400);
|
|
60
|
+
}
|
|
61
|
+
await context.db.sequelize.transaction(async (transaction) => {
|
|
62
|
+
await execution.update(
|
|
63
|
+
{
|
|
64
|
+
status: import_constants.EXECUTION_STATUS.CANCELED
|
|
65
|
+
},
|
|
66
|
+
{ transaction }
|
|
67
|
+
);
|
|
68
|
+
const pendingJobs = execution.jobs.filter((job) => job.status === import_constants.JOB_STATUS.PENDING);
|
|
69
|
+
await JobRepo.update({
|
|
70
|
+
values: {
|
|
71
|
+
status: import_constants.JOB_STATUS.CANCELED
|
|
72
|
+
},
|
|
73
|
+
filter: {
|
|
74
|
+
id: pendingJobs.map((job) => job.id)
|
|
75
|
+
},
|
|
76
|
+
individualHooks: false,
|
|
77
|
+
transaction
|
|
78
|
+
});
|
|
79
|
+
});
|
|
80
|
+
context.body = execution;
|
|
81
|
+
await next();
|
|
82
|
+
}
|
|
46
83
|
// Annotate the CommonJS export names for ESM import in node:
|
|
47
84
|
0 && (module.exports = {
|
|
85
|
+
cancel,
|
|
48
86
|
destroy
|
|
49
87
|
});
|
|
@@ -128,6 +128,7 @@ class ConditionInstruction extends import__.Instruction {
|
|
|
128
128
|
result,
|
|
129
129
|
// TODO(optimize): try unify the building of job
|
|
130
130
|
nodeId: node.id,
|
|
131
|
+
nodeKey: node.key,
|
|
131
132
|
upstreamId: prevJob && prevJob.id || null
|
|
132
133
|
};
|
|
133
134
|
const branchNode = processor.nodes.find(
|
|
@@ -33,8 +33,8 @@ class CreateInstruction extends import__.Instruction {
|
|
|
33
33
|
...options,
|
|
34
34
|
context: {
|
|
35
35
|
executionId: processor.execution.id
|
|
36
|
-
}
|
|
37
|
-
|
|
36
|
+
},
|
|
37
|
+
transaction: processor.transaction
|
|
38
38
|
});
|
|
39
39
|
let result = created;
|
|
40
40
|
if (created && appends.length) {
|
|
@@ -45,8 +45,8 @@ class CreateInstruction extends import__.Instruction {
|
|
|
45
45
|
}, /* @__PURE__ */ new Set());
|
|
46
46
|
result = await repository.findOne({
|
|
47
47
|
filterByTk: created[model.primaryKeyAttribute],
|
|
48
|
-
appends: Array.from(includeFields)
|
|
49
|
-
|
|
48
|
+
appends: Array.from(includeFields),
|
|
49
|
+
transaction: processor.transaction
|
|
50
50
|
});
|
|
51
51
|
}
|
|
52
52
|
return {
|
|
@@ -0,0 +1,42 @@
|
|
|
1
|
+
var __create = Object.create;
|
|
2
|
+
var __defProp = Object.defineProperty;
|
|
3
|
+
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
4
|
+
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
5
|
+
var __getProtoOf = Object.getPrototypeOf;
|
|
6
|
+
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
|
7
|
+
var __export = (target, all) => {
|
|
8
|
+
for (var name in all)
|
|
9
|
+
__defProp(target, name, { get: all[name], enumerable: true });
|
|
10
|
+
};
|
|
11
|
+
var __copyProps = (to, from, except, desc) => {
|
|
12
|
+
if (from && typeof from === "object" || typeof from === "function") {
|
|
13
|
+
for (let key of __getOwnPropNames(from))
|
|
14
|
+
if (!__hasOwnProp.call(to, key) && key !== except)
|
|
15
|
+
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
|
16
|
+
}
|
|
17
|
+
return to;
|
|
18
|
+
};
|
|
19
|
+
var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
|
|
20
|
+
// If the importer is in node compatibility mode or this is not an ESM
|
|
21
|
+
// file that has been converted to a CommonJS file using a Babel-
|
|
22
|
+
// compatible transform (i.e. "__esModule" has not been set), then set
|
|
23
|
+
// "default" to the CommonJS "module.exports" for node compatibility.
|
|
24
|
+
isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
|
|
25
|
+
mod
|
|
26
|
+
));
|
|
27
|
+
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
|
28
|
+
var EndInstruction_exports = {};
|
|
29
|
+
__export(EndInstruction_exports, {
|
|
30
|
+
default: () => EndInstruction_default
|
|
31
|
+
});
|
|
32
|
+
module.exports = __toCommonJS(EndInstruction_exports);
|
|
33
|
+
var import__ = __toESM(require("."));
|
|
34
|
+
var import_constants = require("../constants");
|
|
35
|
+
class EndInstruction_default extends import__.default {
|
|
36
|
+
async run(node, prevJob, processor) {
|
|
37
|
+
const { endStatus } = node.config;
|
|
38
|
+
return {
|
|
39
|
+
status: endStatus ?? import_constants.JOB_STATUS.RESOLVED
|
|
40
|
+
};
|
|
41
|
+
}
|
|
42
|
+
}
|
|
@@ -49,8 +49,8 @@ class QueryInstruction extends import__.Instruction {
|
|
|
49
49
|
var _a;
|
|
50
50
|
return `${((_a = item.direction) == null ? void 0 : _a.toLowerCase()) === "desc" ? "-" : ""}${item.field}`;
|
|
51
51
|
}),
|
|
52
|
-
appends
|
|
53
|
-
|
|
52
|
+
appends,
|
|
53
|
+
transaction: processor.transaction
|
|
54
54
|
});
|
|
55
55
|
if (failOnEmpty && (multiple ? !result.length : !result)) {
|
|
56
56
|
return {
|
|
@@ -32,8 +32,8 @@ class UpdateInstruction extends import__.Instruction {
|
|
|
32
32
|
...options,
|
|
33
33
|
context: {
|
|
34
34
|
executionId: processor.execution.id
|
|
35
|
-
}
|
|
36
|
-
|
|
35
|
+
},
|
|
36
|
+
transaction: processor.transaction
|
|
37
37
|
});
|
|
38
38
|
return {
|
|
39
39
|
result: result.length ?? result,
|
|
@@ -0,0 +1,55 @@
|
|
|
1
|
+
var __defProp = Object.defineProperty;
|
|
2
|
+
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
3
|
+
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
4
|
+
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
|
5
|
+
var __export = (target, all) => {
|
|
6
|
+
for (var name in all)
|
|
7
|
+
__defProp(target, name, { get: all[name], enumerable: true });
|
|
8
|
+
};
|
|
9
|
+
var __copyProps = (to, from, except, desc) => {
|
|
10
|
+
if (from && typeof from === "object" || typeof from === "function") {
|
|
11
|
+
for (let key of __getOwnPropNames(from))
|
|
12
|
+
if (!__hasOwnProp.call(to, key) && key !== except)
|
|
13
|
+
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
|
14
|
+
}
|
|
15
|
+
return to;
|
|
16
|
+
};
|
|
17
|
+
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
|
18
|
+
var add_node_key_to_job_exports = {};
|
|
19
|
+
__export(add_node_key_to_job_exports, {
|
|
20
|
+
default: () => add_node_key_to_job_default
|
|
21
|
+
});
|
|
22
|
+
module.exports = __toCommonJS(add_node_key_to_job_exports);
|
|
23
|
+
var import_server = require("@nocobase/server");
|
|
24
|
+
class add_node_key_to_job_default extends import_server.Migration {
|
|
25
|
+
appVersion = "<0.19.0-alpha.4";
|
|
26
|
+
on = "afterSync";
|
|
27
|
+
async up() {
|
|
28
|
+
const { db } = this.context;
|
|
29
|
+
const PluginModel = db.getModel("applicationPlugins");
|
|
30
|
+
const JobRepo = db.getRepository("jobs");
|
|
31
|
+
await db.sequelize.transaction(async (transaction) => {
|
|
32
|
+
const jobs = await JobRepo.find({
|
|
33
|
+
appends: ["node.key"]
|
|
34
|
+
});
|
|
35
|
+
await jobs.reduce(
|
|
36
|
+
(promise, job) => promise.then(() => {
|
|
37
|
+
var _a;
|
|
38
|
+
if (job.nodeKey) {
|
|
39
|
+
return;
|
|
40
|
+
}
|
|
41
|
+
return job.update(
|
|
42
|
+
{
|
|
43
|
+
nodeKey: (_a = job.node) == null ? void 0 : _a.key
|
|
44
|
+
},
|
|
45
|
+
{
|
|
46
|
+
silent: true,
|
|
47
|
+
transaction
|
|
48
|
+
}
|
|
49
|
+
);
|
|
50
|
+
}),
|
|
51
|
+
Promise.resolve()
|
|
52
|
+
);
|
|
53
|
+
});
|
|
54
|
+
}
|
|
55
|
+
}
|
|
@@ -86,13 +86,24 @@ async function handler(workflow, data, options) {
|
|
|
86
86
|
});
|
|
87
87
|
}
|
|
88
88
|
const json = (0, import_utils.toJSON)(result);
|
|
89
|
-
|
|
90
|
-
workflow
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
|
|
89
|
+
if (workflow.sync) {
|
|
90
|
+
await this.workflow.trigger(
|
|
91
|
+
workflow,
|
|
92
|
+
{ data: json },
|
|
93
|
+
{
|
|
94
|
+
context,
|
|
95
|
+
transaction
|
|
96
|
+
}
|
|
97
|
+
);
|
|
98
|
+
} else {
|
|
99
|
+
this.workflow.trigger(
|
|
100
|
+
workflow,
|
|
101
|
+
{ data: json },
|
|
102
|
+
{
|
|
103
|
+
context
|
|
104
|
+
}
|
|
105
|
+
);
|
|
106
|
+
}
|
|
96
107
|
}
|
|
97
108
|
class CollectionTrigger extends import__.default {
|
|
98
109
|
events = /* @__PURE__ */ new Map();
|
|
@@ -18,6 +18,7 @@ export declare const SCHEDULE_MODE: {
|
|
|
18
18
|
};
|
|
19
19
|
declare function matchNext(this: ScheduleTrigger, workflow: any, now: Date, range?: number): boolean;
|
|
20
20
|
export default class ScheduleTrigger extends Trigger {
|
|
21
|
+
sync: boolean;
|
|
21
22
|
static CacheRules: (typeof matchNext | ((workflow: any, now: any) => any))[];
|
|
22
23
|
static TriggerRules: ((workflow: any, now: any) => any)[];
|
|
23
24
|
events: Map<any, any>;
|
|
@@ -333,6 +333,7 @@ function matchNext(workflow, now, range = this.cacheCycle) {
|
|
|
333
333
|
return false;
|
|
334
334
|
}
|
|
335
335
|
class ScheduleTrigger extends import__.default {
|
|
336
|
+
sync = false;
|
|
336
337
|
static CacheRules = [
|
|
337
338
|
({ config, allExecuted }) => (config.limit ? allExecuted < config.limit : true) && config.startsOn,
|
|
338
339
|
matchNext,
|
|
@@ -7,5 +7,6 @@ export declare abstract class Trigger {
|
|
|
7
7
|
abstract on(workflow: WorkflowModel): void;
|
|
8
8
|
abstract off(workflow: WorkflowModel): void;
|
|
9
9
|
duplicateConfig?(workflow: WorkflowModel, options: Transactionable): object | Promise<object>;
|
|
10
|
+
sync?: boolean;
|
|
10
11
|
}
|
|
11
12
|
export default Trigger;
|
|
@@ -11,13 +11,16 @@ export default class WorkflowModel extends Model {
|
|
|
11
11
|
description?: string;
|
|
12
12
|
type: string;
|
|
13
13
|
config: any;
|
|
14
|
+
options: any;
|
|
14
15
|
executed: number;
|
|
16
|
+
allExecuted: number;
|
|
17
|
+
sync: boolean;
|
|
15
18
|
createdAt: Date;
|
|
16
19
|
updatedAt: Date;
|
|
17
|
-
nodes
|
|
20
|
+
nodes?: FlowNodeModel[];
|
|
18
21
|
getNodes: HasManyGetAssociationsMixin<FlowNodeModel>;
|
|
19
22
|
createNode: HasManyCreateAssociationMixin<FlowNodeModel>;
|
|
20
|
-
executions
|
|
23
|
+
executions?: ExecutionModel[];
|
|
21
24
|
countExecutions: HasManyCountAssociationsMixin;
|
|
22
25
|
getExecutions: HasManyGetAssociationsMixin<ExecutionModel>;
|
|
23
26
|
createExecution: HasManyCreateAssociationMixin<ExecutionModel>;
|
package/package.json
CHANGED
|
@@ -4,11 +4,11 @@
|
|
|
4
4
|
"displayName.zh-CN": "工作流",
|
|
5
5
|
"description": "A powerful workflow plugin designed to support business process management and automation.",
|
|
6
6
|
"description.zh-CN": "工作流插件,为业务流程管理和自动化提供支持。",
|
|
7
|
-
"version": "0.19.0-alpha.
|
|
7
|
+
"version": "0.19.0-alpha.4",
|
|
8
8
|
"license": "AGPL-3.0",
|
|
9
9
|
"main": "./dist/server/index.js",
|
|
10
10
|
"dependencies": {
|
|
11
|
-
"@nocobase/plugin-workflow-test": "0.19.0-alpha.
|
|
11
|
+
"@nocobase/plugin-workflow-test": "0.19.0-alpha.4"
|
|
12
12
|
},
|
|
13
13
|
"devDependencies": {
|
|
14
14
|
"@ant-design/icons": "5.x",
|
|
@@ -43,5 +43,5 @@
|
|
|
43
43
|
"@nocobase/test": "0.x",
|
|
44
44
|
"@nocobase/utils": "0.x"
|
|
45
45
|
},
|
|
46
|
-
"gitHead": "
|
|
46
|
+
"gitHead": "9583023f7bea828da5192384a5c002782c341b65"
|
|
47
47
|
}
|