@aztec/prover-client 4.0.0-nightly.20250907 → 4.0.0-nightly.20260107
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dest/block-factory/index.d.ts +1 -1
- package/dest/block-factory/light.d.ts +5 -3
- package/dest/block-factory/light.d.ts.map +1 -1
- package/dest/block-factory/light.js +32 -11
- package/dest/config.d.ts +2 -2
- package/dest/config.d.ts.map +1 -1
- package/dest/config.js +2 -2
- package/dest/index.d.ts +1 -1
- package/dest/light/index.d.ts +2 -0
- package/dest/light/index.d.ts.map +1 -0
- package/dest/light/index.js +1 -0
- package/dest/light/lightweight_checkpoint_builder.d.ts +36 -0
- package/dest/light/lightweight_checkpoint_builder.d.ts.map +1 -0
- package/dest/light/lightweight_checkpoint_builder.js +147 -0
- package/dest/mocks/fixtures.d.ts +5 -5
- package/dest/mocks/fixtures.d.ts.map +1 -1
- package/dest/mocks/fixtures.js +33 -15
- package/dest/mocks/test_context.d.ts +38 -33
- package/dest/mocks/test_context.d.ts.map +1 -1
- package/dest/mocks/test_context.js +133 -82
- package/dest/orchestrator/block-building-helpers.d.ts +35 -35
- package/dest/orchestrator/block-building-helpers.d.ts.map +1 -1
- package/dest/orchestrator/block-building-helpers.js +151 -187
- package/dest/orchestrator/block-proving-state.d.ts +68 -55
- package/dest/orchestrator/block-proving-state.d.ts.map +1 -1
- package/dest/orchestrator/block-proving-state.js +273 -185
- package/dest/orchestrator/checkpoint-proving-state.d.ts +63 -0
- package/dest/orchestrator/checkpoint-proving-state.d.ts.map +1 -0
- package/dest/orchestrator/checkpoint-proving-state.js +210 -0
- package/dest/orchestrator/epoch-proving-state.d.ts +38 -31
- package/dest/orchestrator/epoch-proving-state.d.ts.map +1 -1
- package/dest/orchestrator/epoch-proving-state.js +128 -84
- package/dest/orchestrator/index.d.ts +1 -1
- package/dest/orchestrator/orchestrator.d.ts +35 -34
- package/dest/orchestrator/orchestrator.d.ts.map +1 -1
- package/dest/orchestrator/orchestrator.js +777 -292
- package/dest/orchestrator/orchestrator_metrics.d.ts +1 -3
- package/dest/orchestrator/orchestrator_metrics.d.ts.map +1 -1
- package/dest/orchestrator/orchestrator_metrics.js +0 -9
- package/dest/orchestrator/tx-proving-state.d.ts +12 -10
- package/dest/orchestrator/tx-proving-state.d.ts.map +1 -1
- package/dest/orchestrator/tx-proving-state.js +23 -29
- package/dest/prover-client/factory.d.ts +3 -3
- package/dest/prover-client/factory.d.ts.map +1 -1
- package/dest/prover-client/index.d.ts +1 -1
- package/dest/prover-client/prover-client.d.ts +3 -3
- package/dest/prover-client/prover-client.d.ts.map +1 -1
- package/dest/prover-client/server-epoch-prover.d.ts +13 -11
- package/dest/prover-client/server-epoch-prover.d.ts.map +1 -1
- package/dest/prover-client/server-epoch-prover.js +9 -9
- package/dest/proving_broker/broker_prover_facade.d.ts +23 -18
- package/dest/proving_broker/broker_prover_facade.d.ts.map +1 -1
- package/dest/proving_broker/broker_prover_facade.js +42 -33
- package/dest/proving_broker/config.d.ts +18 -14
- package/dest/proving_broker/config.d.ts.map +1 -1
- package/dest/proving_broker/config.js +12 -6
- package/dest/proving_broker/factory.d.ts +1 -1
- package/dest/proving_broker/factory.js +1 -1
- package/dest/proving_broker/fixtures.d.ts +3 -2
- package/dest/proving_broker/fixtures.d.ts.map +1 -1
- package/dest/proving_broker/fixtures.js +3 -2
- package/dest/proving_broker/index.d.ts +1 -1
- package/dest/proving_broker/proof_store/factory.d.ts +2 -2
- package/dest/proving_broker/proof_store/gcs_proof_store.d.ts +1 -1
- package/dest/proving_broker/proof_store/gcs_proof_store.d.ts.map +1 -1
- package/dest/proving_broker/proof_store/index.d.ts +2 -1
- package/dest/proving_broker/proof_store/index.d.ts.map +1 -1
- package/dest/proving_broker/proof_store/index.js +1 -0
- package/dest/proving_broker/proof_store/inline_proof_store.d.ts +1 -1
- package/dest/proving_broker/proof_store/inline_proof_store.d.ts.map +1 -1
- package/dest/proving_broker/proof_store/proof_store.d.ts +1 -1
- package/dest/proving_broker/proving_agent.d.ts +1 -1
- package/dest/proving_broker/proving_agent.d.ts.map +1 -1
- package/dest/proving_broker/proving_agent.js +383 -8
- package/dest/proving_broker/proving_agent_instrumentation.d.ts +1 -1
- package/dest/proving_broker/proving_agent_instrumentation.d.ts.map +1 -1
- package/dest/proving_broker/proving_broker.d.ts +2 -2
- package/dest/proving_broker/proving_broker.d.ts.map +1 -1
- package/dest/proving_broker/proving_broker.js +418 -29
- package/dest/proving_broker/proving_broker_database/memory.d.ts +3 -2
- package/dest/proving_broker/proving_broker_database/memory.d.ts.map +1 -1
- package/dest/proving_broker/proving_broker_database/persisted.d.ts +3 -2
- package/dest/proving_broker/proving_broker_database/persisted.d.ts.map +1 -1
- package/dest/proving_broker/proving_broker_database/persisted.js +8 -7
- package/dest/proving_broker/proving_broker_database.d.ts +3 -2
- package/dest/proving_broker/proving_broker_database.d.ts.map +1 -1
- package/dest/proving_broker/proving_broker_instrumentation.d.ts +1 -1
- package/dest/proving_broker/proving_broker_instrumentation.d.ts.map +1 -1
- package/dest/proving_broker/proving_job_controller.d.ts +3 -2
- package/dest/proving_broker/proving_job_controller.d.ts.map +1 -1
- package/dest/proving_broker/proving_job_controller.js +40 -21
- package/dest/proving_broker/rpc.d.ts +4 -4
- package/dest/test/mock_proof_store.d.ts +3 -3
- package/dest/test/mock_proof_store.d.ts.map +1 -1
- package/dest/test/mock_prover.d.ts +23 -19
- package/dest/test/mock_prover.d.ts.map +1 -1
- package/dest/test/mock_prover.js +36 -21
- package/package.json +21 -19
- package/src/block-factory/light.ts +40 -17
- package/src/config.ts +2 -2
- package/src/light/index.ts +1 -0
- package/src/light/lightweight_checkpoint_builder.ts +198 -0
- package/src/mocks/fixtures.ts +41 -36
- package/src/mocks/test_context.ts +196 -114
- package/src/orchestrator/block-building-helpers.ts +233 -313
- package/src/orchestrator/block-proving-state.ts +315 -247
- package/src/orchestrator/checkpoint-proving-state.ts +303 -0
- package/src/orchestrator/epoch-proving-state.ts +176 -129
- package/src/orchestrator/orchestrator.ts +558 -348
- package/src/orchestrator/orchestrator_metrics.ts +1 -20
- package/src/orchestrator/tx-proving-state.ts +47 -55
- package/src/prover-client/factory.ts +6 -2
- package/src/prover-client/prover-client.ts +3 -2
- package/src/prover-client/server-epoch-prover.ts +30 -21
- package/src/proving_broker/broker_prover_facade.ts +175 -112
- package/src/proving_broker/config.ts +14 -7
- package/src/proving_broker/factory.ts +1 -1
- package/src/proving_broker/fixtures.ts +8 -3
- package/src/proving_broker/proof_store/index.ts +1 -0
- package/src/proving_broker/proving_broker.ts +41 -19
- package/src/proving_broker/proving_broker_database/memory.ts +2 -1
- package/src/proving_broker/proving_broker_database/persisted.ts +10 -9
- package/src/proving_broker/proving_broker_database.ts +2 -1
- package/src/proving_broker/proving_job_controller.ts +42 -22
- package/src/test/mock_prover.ts +143 -66
- package/dest/bin/get-proof-inputs.d.ts +0 -2
- package/dest/bin/get-proof-inputs.d.ts.map +0 -1
- package/dest/bin/get-proof-inputs.js +0 -51
- package/src/bin/get-proof-inputs.ts +0 -59
|
@@ -1,33 +1,404 @@
|
|
|
1
|
-
function
|
|
2
|
-
|
|
3
|
-
|
|
4
|
-
|
|
5
|
-
|
|
1
|
+
function applyDecs2203RFactory() {
|
|
2
|
+
function createAddInitializerMethod(initializers, decoratorFinishedRef) {
|
|
3
|
+
return function addInitializer(initializer) {
|
|
4
|
+
assertNotFinished(decoratorFinishedRef, "addInitializer");
|
|
5
|
+
assertCallable(initializer, "An initializer");
|
|
6
|
+
initializers.push(initializer);
|
|
7
|
+
};
|
|
8
|
+
}
|
|
9
|
+
function memberDec(dec, name, desc, initializers, kind, isStatic, isPrivate, metadata, value) {
|
|
10
|
+
var kindStr;
|
|
11
|
+
switch(kind){
|
|
12
|
+
case 1:
|
|
13
|
+
kindStr = "accessor";
|
|
14
|
+
break;
|
|
15
|
+
case 2:
|
|
16
|
+
kindStr = "method";
|
|
17
|
+
break;
|
|
18
|
+
case 3:
|
|
19
|
+
kindStr = "getter";
|
|
20
|
+
break;
|
|
21
|
+
case 4:
|
|
22
|
+
kindStr = "setter";
|
|
23
|
+
break;
|
|
24
|
+
default:
|
|
25
|
+
kindStr = "field";
|
|
26
|
+
}
|
|
27
|
+
var ctx = {
|
|
28
|
+
kind: kindStr,
|
|
29
|
+
name: isPrivate ? "#" + name : name,
|
|
30
|
+
static: isStatic,
|
|
31
|
+
private: isPrivate,
|
|
32
|
+
metadata: metadata
|
|
33
|
+
};
|
|
34
|
+
var decoratorFinishedRef = {
|
|
35
|
+
v: false
|
|
36
|
+
};
|
|
37
|
+
ctx.addInitializer = createAddInitializerMethod(initializers, decoratorFinishedRef);
|
|
38
|
+
var get, set;
|
|
39
|
+
if (kind === 0) {
|
|
40
|
+
if (isPrivate) {
|
|
41
|
+
get = desc.get;
|
|
42
|
+
set = desc.set;
|
|
43
|
+
} else {
|
|
44
|
+
get = function() {
|
|
45
|
+
return this[name];
|
|
46
|
+
};
|
|
47
|
+
set = function(v) {
|
|
48
|
+
this[name] = v;
|
|
49
|
+
};
|
|
50
|
+
}
|
|
51
|
+
} else if (kind === 2) {
|
|
52
|
+
get = function() {
|
|
53
|
+
return desc.value;
|
|
54
|
+
};
|
|
55
|
+
} else {
|
|
56
|
+
if (kind === 1 || kind === 3) {
|
|
57
|
+
get = function() {
|
|
58
|
+
return desc.get.call(this);
|
|
59
|
+
};
|
|
60
|
+
}
|
|
61
|
+
if (kind === 1 || kind === 4) {
|
|
62
|
+
set = function(v) {
|
|
63
|
+
desc.set.call(this, v);
|
|
64
|
+
};
|
|
65
|
+
}
|
|
66
|
+
}
|
|
67
|
+
ctx.access = get && set ? {
|
|
68
|
+
get: get,
|
|
69
|
+
set: set
|
|
70
|
+
} : get ? {
|
|
71
|
+
get: get
|
|
72
|
+
} : {
|
|
73
|
+
set: set
|
|
74
|
+
};
|
|
75
|
+
try {
|
|
76
|
+
return dec(value, ctx);
|
|
77
|
+
} finally{
|
|
78
|
+
decoratorFinishedRef.v = true;
|
|
79
|
+
}
|
|
80
|
+
}
|
|
81
|
+
function assertNotFinished(decoratorFinishedRef, fnName) {
|
|
82
|
+
if (decoratorFinishedRef.v) {
|
|
83
|
+
throw new Error("attempted to call " + fnName + " after decoration was finished");
|
|
84
|
+
}
|
|
85
|
+
}
|
|
86
|
+
function assertCallable(fn, hint) {
|
|
87
|
+
if (typeof fn !== "function") {
|
|
88
|
+
throw new TypeError(hint + " must be a function");
|
|
89
|
+
}
|
|
90
|
+
}
|
|
91
|
+
function assertValidReturnValue(kind, value) {
|
|
92
|
+
var type = typeof value;
|
|
93
|
+
if (kind === 1) {
|
|
94
|
+
if (type !== "object" || value === null) {
|
|
95
|
+
throw new TypeError("accessor decorators must return an object with get, set, or init properties or void 0");
|
|
96
|
+
}
|
|
97
|
+
if (value.get !== undefined) {
|
|
98
|
+
assertCallable(value.get, "accessor.get");
|
|
99
|
+
}
|
|
100
|
+
if (value.set !== undefined) {
|
|
101
|
+
assertCallable(value.set, "accessor.set");
|
|
102
|
+
}
|
|
103
|
+
if (value.init !== undefined) {
|
|
104
|
+
assertCallable(value.init, "accessor.init");
|
|
105
|
+
}
|
|
106
|
+
} else if (type !== "function") {
|
|
107
|
+
var hint;
|
|
108
|
+
if (kind === 0) {
|
|
109
|
+
hint = "field";
|
|
110
|
+
} else if (kind === 10) {
|
|
111
|
+
hint = "class";
|
|
112
|
+
} else {
|
|
113
|
+
hint = "method";
|
|
114
|
+
}
|
|
115
|
+
throw new TypeError(hint + " decorators must return a function or void 0");
|
|
116
|
+
}
|
|
117
|
+
}
|
|
118
|
+
function applyMemberDec(ret, base, decInfo, name, kind, isStatic, isPrivate, initializers, metadata) {
|
|
119
|
+
var decs = decInfo[0];
|
|
120
|
+
var desc, init, value;
|
|
121
|
+
if (isPrivate) {
|
|
122
|
+
if (kind === 0 || kind === 1) {
|
|
123
|
+
desc = {
|
|
124
|
+
get: decInfo[3],
|
|
125
|
+
set: decInfo[4]
|
|
126
|
+
};
|
|
127
|
+
} else if (kind === 3) {
|
|
128
|
+
desc = {
|
|
129
|
+
get: decInfo[3]
|
|
130
|
+
};
|
|
131
|
+
} else if (kind === 4) {
|
|
132
|
+
desc = {
|
|
133
|
+
set: decInfo[3]
|
|
134
|
+
};
|
|
135
|
+
} else {
|
|
136
|
+
desc = {
|
|
137
|
+
value: decInfo[3]
|
|
138
|
+
};
|
|
139
|
+
}
|
|
140
|
+
} else if (kind !== 0) {
|
|
141
|
+
desc = Object.getOwnPropertyDescriptor(base, name);
|
|
142
|
+
}
|
|
143
|
+
if (kind === 1) {
|
|
144
|
+
value = {
|
|
145
|
+
get: desc.get,
|
|
146
|
+
set: desc.set
|
|
147
|
+
};
|
|
148
|
+
} else if (kind === 2) {
|
|
149
|
+
value = desc.value;
|
|
150
|
+
} else if (kind === 3) {
|
|
151
|
+
value = desc.get;
|
|
152
|
+
} else if (kind === 4) {
|
|
153
|
+
value = desc.set;
|
|
154
|
+
}
|
|
155
|
+
var newValue, get, set;
|
|
156
|
+
if (typeof decs === "function") {
|
|
157
|
+
newValue = memberDec(decs, name, desc, initializers, kind, isStatic, isPrivate, metadata, value);
|
|
158
|
+
if (newValue !== void 0) {
|
|
159
|
+
assertValidReturnValue(kind, newValue);
|
|
160
|
+
if (kind === 0) {
|
|
161
|
+
init = newValue;
|
|
162
|
+
} else if (kind === 1) {
|
|
163
|
+
init = newValue.init;
|
|
164
|
+
get = newValue.get || value.get;
|
|
165
|
+
set = newValue.set || value.set;
|
|
166
|
+
value = {
|
|
167
|
+
get: get,
|
|
168
|
+
set: set
|
|
169
|
+
};
|
|
170
|
+
} else {
|
|
171
|
+
value = newValue;
|
|
172
|
+
}
|
|
173
|
+
}
|
|
174
|
+
} else {
|
|
175
|
+
for(var i = decs.length - 1; i >= 0; i--){
|
|
176
|
+
var dec = decs[i];
|
|
177
|
+
newValue = memberDec(dec, name, desc, initializers, kind, isStatic, isPrivate, metadata, value);
|
|
178
|
+
if (newValue !== void 0) {
|
|
179
|
+
assertValidReturnValue(kind, newValue);
|
|
180
|
+
var newInit;
|
|
181
|
+
if (kind === 0) {
|
|
182
|
+
newInit = newValue;
|
|
183
|
+
} else if (kind === 1) {
|
|
184
|
+
newInit = newValue.init;
|
|
185
|
+
get = newValue.get || value.get;
|
|
186
|
+
set = newValue.set || value.set;
|
|
187
|
+
value = {
|
|
188
|
+
get: get,
|
|
189
|
+
set: set
|
|
190
|
+
};
|
|
191
|
+
} else {
|
|
192
|
+
value = newValue;
|
|
193
|
+
}
|
|
194
|
+
if (newInit !== void 0) {
|
|
195
|
+
if (init === void 0) {
|
|
196
|
+
init = newInit;
|
|
197
|
+
} else if (typeof init === "function") {
|
|
198
|
+
init = [
|
|
199
|
+
init,
|
|
200
|
+
newInit
|
|
201
|
+
];
|
|
202
|
+
} else {
|
|
203
|
+
init.push(newInit);
|
|
204
|
+
}
|
|
205
|
+
}
|
|
206
|
+
}
|
|
207
|
+
}
|
|
208
|
+
}
|
|
209
|
+
if (kind === 0 || kind === 1) {
|
|
210
|
+
if (init === void 0) {
|
|
211
|
+
init = function(instance, init) {
|
|
212
|
+
return init;
|
|
213
|
+
};
|
|
214
|
+
} else if (typeof init !== "function") {
|
|
215
|
+
var ownInitializers = init;
|
|
216
|
+
init = function(instance, init) {
|
|
217
|
+
var value = init;
|
|
218
|
+
for(var i = 0; i < ownInitializers.length; i++){
|
|
219
|
+
value = ownInitializers[i].call(instance, value);
|
|
220
|
+
}
|
|
221
|
+
return value;
|
|
222
|
+
};
|
|
223
|
+
} else {
|
|
224
|
+
var originalInitializer = init;
|
|
225
|
+
init = function(instance, init) {
|
|
226
|
+
return originalInitializer.call(instance, init);
|
|
227
|
+
};
|
|
228
|
+
}
|
|
229
|
+
ret.push(init);
|
|
230
|
+
}
|
|
231
|
+
if (kind !== 0) {
|
|
232
|
+
if (kind === 1) {
|
|
233
|
+
desc.get = value.get;
|
|
234
|
+
desc.set = value.set;
|
|
235
|
+
} else if (kind === 2) {
|
|
236
|
+
desc.value = value;
|
|
237
|
+
} else if (kind === 3) {
|
|
238
|
+
desc.get = value;
|
|
239
|
+
} else if (kind === 4) {
|
|
240
|
+
desc.set = value;
|
|
241
|
+
}
|
|
242
|
+
if (isPrivate) {
|
|
243
|
+
if (kind === 1) {
|
|
244
|
+
ret.push(function(instance, args) {
|
|
245
|
+
return value.get.call(instance, args);
|
|
246
|
+
});
|
|
247
|
+
ret.push(function(instance, args) {
|
|
248
|
+
return value.set.call(instance, args);
|
|
249
|
+
});
|
|
250
|
+
} else if (kind === 2) {
|
|
251
|
+
ret.push(value);
|
|
252
|
+
} else {
|
|
253
|
+
ret.push(function(instance, args) {
|
|
254
|
+
return value.call(instance, args);
|
|
255
|
+
});
|
|
256
|
+
}
|
|
257
|
+
} else {
|
|
258
|
+
Object.defineProperty(base, name, desc);
|
|
259
|
+
}
|
|
260
|
+
}
|
|
261
|
+
}
|
|
262
|
+
function applyMemberDecs(Class, decInfos, metadata) {
|
|
263
|
+
var ret = [];
|
|
264
|
+
var protoInitializers;
|
|
265
|
+
var staticInitializers;
|
|
266
|
+
var existingProtoNonFields = new Map();
|
|
267
|
+
var existingStaticNonFields = new Map();
|
|
268
|
+
for(var i = 0; i < decInfos.length; i++){
|
|
269
|
+
var decInfo = decInfos[i];
|
|
270
|
+
if (!Array.isArray(decInfo)) continue;
|
|
271
|
+
var kind = decInfo[1];
|
|
272
|
+
var name = decInfo[2];
|
|
273
|
+
var isPrivate = decInfo.length > 3;
|
|
274
|
+
var isStatic = kind >= 5;
|
|
275
|
+
var base;
|
|
276
|
+
var initializers;
|
|
277
|
+
if (isStatic) {
|
|
278
|
+
base = Class;
|
|
279
|
+
kind = kind - 5;
|
|
280
|
+
staticInitializers = staticInitializers || [];
|
|
281
|
+
initializers = staticInitializers;
|
|
282
|
+
} else {
|
|
283
|
+
base = Class.prototype;
|
|
284
|
+
protoInitializers = protoInitializers || [];
|
|
285
|
+
initializers = protoInitializers;
|
|
286
|
+
}
|
|
287
|
+
if (kind !== 0 && !isPrivate) {
|
|
288
|
+
var existingNonFields = isStatic ? existingStaticNonFields : existingProtoNonFields;
|
|
289
|
+
var existingKind = existingNonFields.get(name) || 0;
|
|
290
|
+
if (existingKind === true || existingKind === 3 && kind !== 4 || existingKind === 4 && kind !== 3) {
|
|
291
|
+
throw new Error("Attempted to decorate a public method/accessor that has the same name as a previously decorated public method/accessor. This is not currently supported by the decorators plugin. Property name was: " + name);
|
|
292
|
+
} else if (!existingKind && kind > 2) {
|
|
293
|
+
existingNonFields.set(name, kind);
|
|
294
|
+
} else {
|
|
295
|
+
existingNonFields.set(name, true);
|
|
296
|
+
}
|
|
297
|
+
}
|
|
298
|
+
applyMemberDec(ret, base, decInfo, name, kind, isStatic, isPrivate, initializers, metadata);
|
|
299
|
+
}
|
|
300
|
+
pushInitializers(ret, protoInitializers);
|
|
301
|
+
pushInitializers(ret, staticInitializers);
|
|
302
|
+
return ret;
|
|
303
|
+
}
|
|
304
|
+
function pushInitializers(ret, initializers) {
|
|
305
|
+
if (initializers) {
|
|
306
|
+
ret.push(function(instance) {
|
|
307
|
+
for(var i = 0; i < initializers.length; i++){
|
|
308
|
+
initializers[i].call(instance);
|
|
309
|
+
}
|
|
310
|
+
return instance;
|
|
311
|
+
});
|
|
312
|
+
}
|
|
313
|
+
}
|
|
314
|
+
function applyClassDecs(targetClass, classDecs, metadata) {
|
|
315
|
+
if (classDecs.length > 0) {
|
|
316
|
+
var initializers = [];
|
|
317
|
+
var newClass = targetClass;
|
|
318
|
+
var name = targetClass.name;
|
|
319
|
+
for(var i = classDecs.length - 1; i >= 0; i--){
|
|
320
|
+
var decoratorFinishedRef = {
|
|
321
|
+
v: false
|
|
322
|
+
};
|
|
323
|
+
try {
|
|
324
|
+
var nextNewClass = classDecs[i](newClass, {
|
|
325
|
+
kind: "class",
|
|
326
|
+
name: name,
|
|
327
|
+
addInitializer: createAddInitializerMethod(initializers, decoratorFinishedRef),
|
|
328
|
+
metadata
|
|
329
|
+
});
|
|
330
|
+
} finally{
|
|
331
|
+
decoratorFinishedRef.v = true;
|
|
332
|
+
}
|
|
333
|
+
if (nextNewClass !== undefined) {
|
|
334
|
+
assertValidReturnValue(10, nextNewClass);
|
|
335
|
+
newClass = nextNewClass;
|
|
336
|
+
}
|
|
337
|
+
}
|
|
338
|
+
return [
|
|
339
|
+
defineMetadata(newClass, metadata),
|
|
340
|
+
function() {
|
|
341
|
+
for(var i = 0; i < initializers.length; i++){
|
|
342
|
+
initializers[i].call(newClass);
|
|
343
|
+
}
|
|
344
|
+
}
|
|
345
|
+
];
|
|
346
|
+
}
|
|
347
|
+
}
|
|
348
|
+
function defineMetadata(Class, metadata) {
|
|
349
|
+
return Object.defineProperty(Class, Symbol.metadata || Symbol.for("Symbol.metadata"), {
|
|
350
|
+
configurable: true,
|
|
351
|
+
enumerable: true,
|
|
352
|
+
value: metadata
|
|
353
|
+
});
|
|
354
|
+
}
|
|
355
|
+
return function applyDecs2203R(targetClass, memberDecs, classDecs, parentClass) {
|
|
356
|
+
if (parentClass !== void 0) {
|
|
357
|
+
var parentMetadata = parentClass[Symbol.metadata || Symbol.for("Symbol.metadata")];
|
|
358
|
+
}
|
|
359
|
+
var metadata = Object.create(parentMetadata === void 0 ? null : parentMetadata);
|
|
360
|
+
var e = applyMemberDecs(targetClass, memberDecs, metadata);
|
|
361
|
+
if (!classDecs.length) defineMetadata(targetClass, metadata);
|
|
362
|
+
return {
|
|
363
|
+
e: e,
|
|
364
|
+
get c () {
|
|
365
|
+
return applyClassDecs(targetClass, classDecs, metadata);
|
|
366
|
+
}
|
|
367
|
+
};
|
|
368
|
+
};
|
|
6
369
|
}
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
|
|
370
|
+
function _apply_decs_2203_r(targetClass, memberDecs, classDecs, parentClass) {
|
|
371
|
+
return (_apply_decs_2203_r = applyDecs2203RFactory())(targetClass, memberDecs, classDecs, parentClass);
|
|
372
|
+
}
|
|
373
|
+
var _dec, _dec1, _dec2, _dec3, _dec4, _initProto;
|
|
374
|
+
import { L1_TO_L2_MSG_SUBTREE_HEIGHT, L1_TO_L2_MSG_SUBTREE_ROOT_SIBLING_PATH_LENGTH, NESTED_RECURSIVE_ROLLUP_HONK_PROOF_LENGTH, NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP, NUM_BASE_PARITY_PER_ROOT_PARITY } from '@aztec/constants';
|
|
375
|
+
import { BlockNumber } from '@aztec/foundation/branded-types';
|
|
376
|
+
import { padArrayEnd } from '@aztec/foundation/collection';
|
|
377
|
+
import { Fr } from '@aztec/foundation/curves/bn254';
|
|
10
378
|
import { AbortError } from '@aztec/foundation/error';
|
|
11
|
-
import { Fr } from '@aztec/foundation/fields';
|
|
12
379
|
import { createLogger } from '@aztec/foundation/log';
|
|
13
380
|
import { promiseWithResolvers } from '@aztec/foundation/promise';
|
|
14
381
|
import { assertLength } from '@aztec/foundation/serialize';
|
|
15
382
|
import { pushTestData } from '@aztec/foundation/testing';
|
|
16
383
|
import { elapsed } from '@aztec/foundation/timer';
|
|
17
|
-
import {
|
|
18
|
-
import { readAvmMinimalPublicTxInputsFromFile } from '@aztec/simulator/public/fixtures';
|
|
19
|
-
import { L2Block } from '@aztec/stdlib/block';
|
|
20
|
-
import { BaseParityInputs } from '@aztec/stdlib/parity';
|
|
21
|
-
import { EmptyBlockRootRollupInputs, PrivateBaseRollupInputs, SingleTxBlockRootRollupInputs, TubeInputs } from '@aztec/stdlib/rollup';
|
|
384
|
+
import { BlockRootEmptyTxFirstRollupPrivateInputs, BlockRootFirstRollupPrivateInputs, BlockRootSingleTxFirstRollupPrivateInputs, BlockRootSingleTxRollupPrivateInputs, CheckpointRootSingleBlockRollupPrivateInputs, PrivateTxBaseRollupPrivateInputs } from '@aztec/stdlib/rollup';
|
|
22
385
|
import { MerkleTreeId } from '@aztec/stdlib/trees';
|
|
23
|
-
import { toNumBlobFields } from '@aztec/stdlib/tx';
|
|
24
386
|
import { Attributes, getTelemetryClient, trackSpan, wrapCallbackInSpan } from '@aztec/telemetry-client';
|
|
25
387
|
import { inspect } from 'util';
|
|
26
|
-
import {
|
|
388
|
+
import { buildHeaderFromCircuitOutputs, getLastSiblingPath, getPublicChonkVerifierPrivateInputsFromTx, getRootTreeSiblingPath, getSubtreeSiblingPath, getTreeSnapshot, insertSideEffectsAndBuildBaseRollupHints, validatePartialState, validateTx } from './block-building-helpers.js';
|
|
27
389
|
import { EpochProvingState } from './epoch-proving-state.js';
|
|
28
390
|
import { ProvingOrchestratorMetrics } from './orchestrator_metrics.js';
|
|
29
391
|
import { TxProvingState } from './tx-proving-state.js';
|
|
30
392
|
const logger = createLogger('prover-client:orchestrator');
|
|
393
|
+
_dec = trackSpan('ProvingOrchestrator.startNewBlock', (blockNumber)=>({
|
|
394
|
+
[Attributes.BLOCK_NUMBER]: blockNumber
|
|
395
|
+
})), _dec1 = trackSpan('ProvingOrchestrator.addTxs', (txs)=>({
|
|
396
|
+
[Attributes.BLOCK_TXS_COUNT]: txs.length
|
|
397
|
+
})), _dec2 = trackSpan('ProvingOrchestrator.startChonkVerifierCircuits'), _dec3 = trackSpan('ProvingOrchestrator.setBlockCompleted', (blockNumber)=>({
|
|
398
|
+
[Attributes.BLOCK_NUMBER]: blockNumber
|
|
399
|
+
})), _dec4 = trackSpan('ProvingOrchestrator.prepareBaseRollupInputs', (tx)=>({
|
|
400
|
+
[Attributes.TX_HASH]: tx.hash.toString()
|
|
401
|
+
}));
|
|
31
402
|
/**
|
|
32
403
|
* Implements an event driven proving scheduler to build the recursive proof tree. The idea being:
|
|
33
404
|
* 1. Transactions are provided to the scheduler post simulation.
|
|
@@ -43,16 +414,46 @@ const logger = createLogger('prover-client:orchestrator');
|
|
|
43
414
|
dbProvider;
|
|
44
415
|
prover;
|
|
45
416
|
proverId;
|
|
417
|
+
static{
|
|
418
|
+
({ e: [_initProto] } = _apply_decs_2203_r(this, [
|
|
419
|
+
[
|
|
420
|
+
_dec,
|
|
421
|
+
2,
|
|
422
|
+
"startNewBlock"
|
|
423
|
+
],
|
|
424
|
+
[
|
|
425
|
+
_dec1,
|
|
426
|
+
2,
|
|
427
|
+
"addTxs"
|
|
428
|
+
],
|
|
429
|
+
[
|
|
430
|
+
_dec2,
|
|
431
|
+
2,
|
|
432
|
+
"startChonkVerifierCircuits"
|
|
433
|
+
],
|
|
434
|
+
[
|
|
435
|
+
_dec3,
|
|
436
|
+
2,
|
|
437
|
+
"setBlockCompleted"
|
|
438
|
+
],
|
|
439
|
+
[
|
|
440
|
+
_dec4,
|
|
441
|
+
2,
|
|
442
|
+
"prepareBaseRollupInputs"
|
|
443
|
+
]
|
|
444
|
+
], []));
|
|
445
|
+
}
|
|
46
446
|
provingState;
|
|
47
447
|
pendingProvingJobs;
|
|
48
448
|
provingPromise;
|
|
49
449
|
metrics;
|
|
450
|
+
// eslint-disable-next-line aztec-custom/no-non-primitive-in-collections
|
|
50
451
|
dbs;
|
|
51
452
|
constructor(dbProvider, prover, proverId, telemetryClient = getTelemetryClient()){
|
|
52
453
|
this.dbProvider = dbProvider;
|
|
53
454
|
this.prover = prover;
|
|
54
455
|
this.proverId = proverId;
|
|
55
|
-
this.provingState = undefined;
|
|
456
|
+
this.provingState = (_initProto(this), undefined);
|
|
56
457
|
this.pendingProvingJobs = [];
|
|
57
458
|
this.provingPromise = undefined;
|
|
58
459
|
this.dbs = new Map();
|
|
@@ -68,82 +469,137 @@ const logger = createLogger('prover-client:orchestrator');
|
|
|
68
469
|
this.cancel();
|
|
69
470
|
return Promise.resolve();
|
|
70
471
|
}
|
|
71
|
-
startNewEpoch(epochNumber,
|
|
472
|
+
startNewEpoch(epochNumber, totalNumCheckpoints, finalBlobBatchingChallenges) {
|
|
473
|
+
if (this.provingState?.verifyState()) {
|
|
474
|
+
throw new Error(`Cannot start epoch ${epochNumber} when epoch ${this.provingState.epochNumber} is still being processed.`);
|
|
475
|
+
}
|
|
72
476
|
const { promise: _promise, resolve, reject } = promiseWithResolvers();
|
|
73
477
|
const promise = _promise.catch((reason)=>({
|
|
74
478
|
status: 'failure',
|
|
75
479
|
reason
|
|
76
480
|
}));
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
}
|
|
80
|
-
logger.info(`Starting epoch ${epochNumber} with ${totalNumBlocks} blocks`);
|
|
81
|
-
this.provingState = new EpochProvingState(epochNumber, firstBlockNumber, totalNumBlocks, finalBlobBatchingChallenges, resolve, reject);
|
|
481
|
+
logger.info(`Starting epoch ${epochNumber} with ${totalNumCheckpoints} checkpoints.`);
|
|
482
|
+
this.provingState = new EpochProvingState(epochNumber, totalNumCheckpoints, finalBlobBatchingChallenges, (provingState)=>this.checkAndEnqueueCheckpointRootRollup(provingState), resolve, reject);
|
|
82
483
|
this.provingPromise = promise;
|
|
83
484
|
}
|
|
485
|
+
async startNewCheckpoint(checkpointIndex, constants, l1ToL2Messages, totalNumBlocks, headerOfLastBlockInPreviousCheckpoint) {
|
|
486
|
+
if (!this.provingState) {
|
|
487
|
+
throw new Error('Empty epoch proving state. Call startNewEpoch before starting a checkpoint.');
|
|
488
|
+
}
|
|
489
|
+
if (!this.provingState.isAcceptingCheckpoints()) {
|
|
490
|
+
throw new Error(`Epoch not accepting further checkpoints.`);
|
|
491
|
+
}
|
|
492
|
+
// Fork world state at the end of the immediately previous block.
|
|
493
|
+
const lastBlockNumber = headerOfLastBlockInPreviousCheckpoint.globalVariables.blockNumber;
|
|
494
|
+
const db = await this.dbProvider.fork(lastBlockNumber);
|
|
495
|
+
const firstBlockNumber = BlockNumber(lastBlockNumber + 1);
|
|
496
|
+
this.dbs.set(firstBlockNumber, db);
|
|
497
|
+
// Get archive sibling path before any block in this checkpoint lands.
|
|
498
|
+
const lastArchiveSiblingPath = await getLastSiblingPath(MerkleTreeId.ARCHIVE, db);
|
|
499
|
+
// Insert all the l1 to l2 messages into the db. And get the states before and after the insertion.
|
|
500
|
+
const { lastL1ToL2MessageTreeSnapshot, lastL1ToL2MessageSubtreeRootSiblingPath, newL1ToL2MessageTreeSnapshot, newL1ToL2MessageSubtreeRootSiblingPath } = await this.updateL1ToL2MessageTree(l1ToL2Messages, db);
|
|
501
|
+
this.provingState.startNewCheckpoint(checkpointIndex, constants, totalNumBlocks, headerOfLastBlockInPreviousCheckpoint, lastArchiveSiblingPath, l1ToL2Messages, lastL1ToL2MessageTreeSnapshot, lastL1ToL2MessageSubtreeRootSiblingPath, newL1ToL2MessageTreeSnapshot, newL1ToL2MessageSubtreeRootSiblingPath);
|
|
502
|
+
}
|
|
84
503
|
/**
|
|
85
504
|
* Starts off a new block
|
|
86
|
-
* @param
|
|
87
|
-
* @param
|
|
88
|
-
*
|
|
89
|
-
|
|
505
|
+
* @param blockNumber - The block number
|
|
506
|
+
* @param timestamp - The timestamp of the block. This is only required for constructing the private inputs for the
|
|
507
|
+
* block that doesn't have any txs.
|
|
508
|
+
* @param totalNumTxs - The total number of txs in the block
|
|
509
|
+
*/ async startNewBlock(blockNumber, timestamp, totalNumTxs) {
|
|
90
510
|
if (!this.provingState) {
|
|
91
|
-
throw new Error(
|
|
92
|
-
}
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
//
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
|
|
511
|
+
throw new Error('Empty epoch proving state. Call startNewEpoch before starting a block.');
|
|
512
|
+
}
|
|
513
|
+
const checkpointProvingState = this.provingState.getCheckpointProvingStateByBlockNumber(blockNumber);
|
|
514
|
+
if (!checkpointProvingState) {
|
|
515
|
+
throw new Error(`Checkpoint not started. Call startNewCheckpoint first.`);
|
|
516
|
+
}
|
|
517
|
+
if (!checkpointProvingState.isAcceptingBlocks()) {
|
|
518
|
+
throw new Error(`Checkpoint not accepting further blocks.`);
|
|
519
|
+
}
|
|
520
|
+
const constants = checkpointProvingState.constants;
|
|
521
|
+
logger.info(`Starting block ${blockNumber} for slot ${constants.slotNumber}.`);
|
|
522
|
+
// Fork the db only when it's not already set. The db for the first block is set in `startNewCheckpoint`.
|
|
523
|
+
if (!this.dbs.has(blockNumber)) {
|
|
524
|
+
// Fork world state at the end of the immediately previous block
|
|
525
|
+
const db = await this.dbProvider.fork(BlockNumber(blockNumber - 1));
|
|
526
|
+
this.dbs.set(blockNumber, db);
|
|
527
|
+
}
|
|
528
|
+
const db = this.dbs.get(blockNumber);
|
|
529
|
+
// Get archive snapshot and sibling path before any txs in this block lands.
|
|
530
|
+
const lastArchiveTreeSnapshot = await getTreeSnapshot(MerkleTreeId.ARCHIVE, db);
|
|
531
|
+
const lastArchiveSiblingPath = await getRootTreeSiblingPath(MerkleTreeId.ARCHIVE, db);
|
|
532
|
+
const blockProvingState = checkpointProvingState.startNewBlock(blockNumber, timestamp, totalNumTxs, lastArchiveTreeSnapshot, lastArchiveSiblingPath);
|
|
533
|
+
// Enqueue base parity circuits for the first block in the checkpoint.
|
|
534
|
+
if (blockProvingState.index === 0) {
|
|
535
|
+
for(let i = 0; i < NUM_BASE_PARITY_PER_ROOT_PARITY; i++){
|
|
536
|
+
this.enqueueBaseParityCircuit(checkpointProvingState, blockProvingState, i);
|
|
537
|
+
}
|
|
538
|
+
}
|
|
539
|
+
// Because `addTxs` won't be called for a block without txs, and that's where the sponge blob state is computed.
|
|
540
|
+
// We need to set its end sponge blob here, which will become the start sponge blob for the next block.
|
|
541
|
+
if (totalNumTxs === 0) {
|
|
542
|
+
const endState = await db.getStateReference();
|
|
543
|
+
blockProvingState.setEndState(endState);
|
|
544
|
+
const endSpongeBlob = blockProvingState.getStartSpongeBlob().clone();
|
|
545
|
+
const blockEndBlobFields = blockProvingState.getBlockEndBlobFields();
|
|
546
|
+
await endSpongeBlob.absorb(blockEndBlobFields);
|
|
547
|
+
blockProvingState.setEndSpongeBlob(endSpongeBlob);
|
|
548
|
+
// And also try to accumulate the blobs as far as we can:
|
|
549
|
+
await this.provingState.setBlobAccumulators();
|
|
110
550
|
}
|
|
111
551
|
}
|
|
112
552
|
/**
|
|
113
553
|
* The interface to add simulated transactions to the scheduler. This can only be called once per block.
|
|
114
554
|
* @param txs - The transactions to be proven
|
|
115
555
|
*/ async addTxs(txs) {
|
|
556
|
+
if (!this.provingState) {
|
|
557
|
+
throw new Error(`Empty epoch proving state. Call startNewEpoch before adding txs.`);
|
|
558
|
+
}
|
|
116
559
|
if (!txs.length) {
|
|
117
560
|
// To avoid an ugly throw below. If we require an empty block, we can just call setBlockCompleted
|
|
118
561
|
// on a block with no txs. We cannot do that here because we cannot find the blockNumber without any txs.
|
|
119
562
|
logger.warn(`Provided no txs to orchestrator addTxs.`);
|
|
120
563
|
return;
|
|
121
564
|
}
|
|
122
|
-
const blockNumber = txs[0].globalVariables.blockNumber;
|
|
123
|
-
const provingState = this.provingState
|
|
565
|
+
const blockNumber = BlockNumber(txs[0].globalVariables.blockNumber);
|
|
566
|
+
const provingState = this.provingState.getBlockProvingStateByBlockNumber(blockNumber);
|
|
124
567
|
if (!provingState) {
|
|
125
|
-
throw new Error(`
|
|
568
|
+
throw new Error(`Proving state for block ${blockNumber} not found. Call startNewBlock first.`);
|
|
126
569
|
}
|
|
127
|
-
if (provingState.totalNumTxs) {
|
|
570
|
+
if (provingState.totalNumTxs !== txs.length) {
|
|
571
|
+
throw new Error(`Block ${blockNumber} should be filled with ${provingState.totalNumTxs} txs. Received ${txs.length} txs.`);
|
|
572
|
+
}
|
|
573
|
+
if (!provingState.isAcceptingTxs()) {
|
|
128
574
|
throw new Error(`Block ${blockNumber} has been initialized with transactions.`);
|
|
129
575
|
}
|
|
130
|
-
|
|
131
|
-
|
|
132
|
-
|
|
576
|
+
logger.info(`Adding ${txs.length} transactions to block ${blockNumber}`);
|
|
577
|
+
const db = this.dbs.get(blockNumber);
|
|
578
|
+
const lastArchive = provingState.lastArchiveTreeSnapshot;
|
|
579
|
+
const newL1ToL2MessageTreeSnapshot = provingState.newL1ToL2MessageTreeSnapshot;
|
|
580
|
+
const spongeBlobState = provingState.getStartSpongeBlob().clone();
|
|
133
581
|
for (const tx of txs){
|
|
134
582
|
try {
|
|
135
583
|
if (!provingState.verifyState()) {
|
|
136
584
|
throw new Error(`Invalid proving state when adding a tx`);
|
|
137
585
|
}
|
|
138
586
|
validateTx(tx);
|
|
139
|
-
logger.
|
|
140
|
-
const
|
|
141
|
-
const
|
|
587
|
+
logger.debug(`Received transaction: ${tx.hash}`);
|
|
588
|
+
const startSpongeBlob = spongeBlobState.clone();
|
|
589
|
+
const [hints, treeSnapshots] = await this.prepareBaseRollupInputs(tx, lastArchive, newL1ToL2MessageTreeSnapshot, startSpongeBlob, db);
|
|
590
|
+
if (!provingState.verifyState()) {
|
|
591
|
+
throw new Error(`Unable to add transaction, preparing base inputs failed`);
|
|
592
|
+
}
|
|
593
|
+
await spongeBlobState.absorb(tx.txEffect.toBlobFields());
|
|
594
|
+
const txProvingState = new TxProvingState(tx, hints, treeSnapshots, this.proverId.toField());
|
|
142
595
|
const txIndex = provingState.addNewTx(txProvingState);
|
|
143
|
-
this.getOrEnqueueTube(provingState, txIndex);
|
|
144
596
|
if (txProvingState.requireAvmProof) {
|
|
597
|
+
this.getOrEnqueueChonkVerifier(provingState, txIndex);
|
|
145
598
|
logger.debug(`Enqueueing public VM for tx ${txIndex}`);
|
|
146
599
|
this.enqueueVM(provingState, txIndex);
|
|
600
|
+
} else {
|
|
601
|
+
logger.debug(`Enqueueing base rollup for private-only tx ${txIndex}`);
|
|
602
|
+
this.enqueueBaseRollup(provingState, txIndex);
|
|
147
603
|
}
|
|
148
604
|
} catch (err) {
|
|
149
605
|
throw new Error(`Error adding transaction ${tx.hash.toString()} to block ${blockNumber}: ${err.message}`, {
|
|
@@ -151,21 +607,31 @@ const logger = createLogger('prover-client:orchestrator');
|
|
|
151
607
|
});
|
|
152
608
|
}
|
|
153
609
|
}
|
|
610
|
+
const endState = await db.getStateReference();
|
|
611
|
+
provingState.setEndState(endState);
|
|
612
|
+
const blockEndBlobFields = provingState.getBlockEndBlobFields();
|
|
613
|
+
await spongeBlobState.absorb(blockEndBlobFields);
|
|
614
|
+
provingState.setEndSpongeBlob(spongeBlobState);
|
|
615
|
+
// Txs have been added to the block. Now try to accumulate the blobs as far as we can:
|
|
616
|
+
await this.provingState.setBlobAccumulators();
|
|
154
617
|
}
|
|
155
618
|
/**
|
|
156
|
-
* Kickstarts
|
|
157
|
-
* Note that if the
|
|
158
|
-
*/
|
|
619
|
+
* Kickstarts chonk verifier circuits for the specified txs. These will be used during epoch proving.
|
|
620
|
+
* Note that if the chonk verifier circuits are not started this way, they will be started nontheless after processing.
|
|
621
|
+
*/ startChonkVerifierCircuits(txs) {
|
|
159
622
|
if (!this.provingState?.verifyState()) {
|
|
160
|
-
throw new Error(`
|
|
623
|
+
throw new Error(`Empty epoch proving state. call startNewEpoch before starting chonk verifier circuits.`);
|
|
161
624
|
}
|
|
162
|
-
|
|
625
|
+
const publicTxs = txs.filter((tx)=>tx.data.forPublic);
|
|
626
|
+
for (const tx of publicTxs){
|
|
163
627
|
const txHash = tx.getTxHash().toString();
|
|
164
|
-
const
|
|
628
|
+
const privateInputs = getPublicChonkVerifierPrivateInputsFromTx(tx, this.proverId.toField());
|
|
165
629
|
const tubeProof = promiseWithResolvers();
|
|
166
|
-
logger.debug(`Starting
|
|
167
|
-
this.
|
|
168
|
-
|
|
630
|
+
logger.debug(`Starting chonk verifier circuit for tx ${txHash}`);
|
|
631
|
+
this.doEnqueueChonkVerifier(txHash, privateInputs, (proof)=>{
|
|
632
|
+
tubeProof.resolve(proof);
|
|
633
|
+
});
|
|
634
|
+
this.provingState.cachedChonkVerifierProofs.set(txHash, tubeProof.promise);
|
|
169
635
|
}
|
|
170
636
|
return Promise.resolve();
|
|
171
637
|
}
|
|
@@ -177,57 +643,73 @@ const logger = createLogger('prover-client:orchestrator');
|
|
|
177
643
|
if (!provingState) {
|
|
178
644
|
throw new Error(`Block proving state for ${blockNumber} not found`);
|
|
179
645
|
}
|
|
180
|
-
if
|
|
181
|
-
|
|
182
|
-
|
|
183
|
-
|
|
646
|
+
// Abort with specific error for the block if there's one.
|
|
647
|
+
const error = provingState.getError();
|
|
648
|
+
if (error) {
|
|
649
|
+
throw new Error(`Block proving failed: ${error}`);
|
|
184
650
|
}
|
|
651
|
+
// Abort if the proving state is not valid due to errors occurred elsewhere.
|
|
185
652
|
if (!provingState.verifyState()) {
|
|
186
|
-
throw new Error(`
|
|
653
|
+
throw new Error(`Invalid proving state when completing block ${blockNumber}.`);
|
|
187
654
|
}
|
|
188
|
-
|
|
655
|
+
if (provingState.isAcceptingTxs()) {
|
|
656
|
+
throw new Error(`Block ${blockNumber} is still accepting txs. Call setBlockCompleted after all txs have been added.`);
|
|
657
|
+
}
|
|
658
|
+
// Given we've applied every change from this block, now assemble the block header:
|
|
189
659
|
logger.verbose(`Block ${blockNumber} completed. Assembling header.`);
|
|
190
|
-
await
|
|
191
|
-
logger.debug(`Accumulating blobs for ${blockNumber}`);
|
|
192
|
-
await this.provingState?.setBlobAccumulators(blockNumber);
|
|
193
|
-
// If the proofs were faster than the block building, then we need to try the block root rollup again here
|
|
194
|
-
await this.checkAndEnqueueBlockRootRollup(provingState);
|
|
195
|
-
return provingState.block;
|
|
196
|
-
}
|
|
197
|
-
/** Returns the block as built for a given index. */ getBlock(index) {
|
|
198
|
-
const block = this.provingState?.blocks[index]?.block;
|
|
199
|
-
if (!block) {
|
|
200
|
-
throw new Error(`Block at index ${index} not available`);
|
|
201
|
-
}
|
|
202
|
-
return block;
|
|
203
|
-
}
|
|
204
|
-
async buildBlock(provingState, expectedHeader) {
|
|
205
|
-
// Collect all new nullifiers, commitments, and contracts from all txs in this block to build body
|
|
206
|
-
const txs = provingState.allTxs.map((a)=>a.processedTx);
|
|
207
|
-
// Get db for this block
|
|
208
|
-
const db = this.dbs.get(provingState.blockNumber);
|
|
209
|
-
// Given we've applied every change from this block, now assemble the block header
|
|
210
|
-
// and update the archive tree, so we're ready to start processing the next block
|
|
211
|
-
const { header, body } = await buildHeaderAndBodyFromTxs(txs, provingState.globalVariables, provingState.newL1ToL2Messages, db);
|
|
660
|
+
const header = await provingState.buildBlockHeader();
|
|
212
661
|
if (expectedHeader && !header.equals(expectedHeader)) {
|
|
213
662
|
logger.error(`Block header mismatch: header=${header} expectedHeader=${expectedHeader}`);
|
|
214
663
|
throw new Error('Block header mismatch');
|
|
215
664
|
}
|
|
665
|
+
// Get db for this block
|
|
666
|
+
const db = this.dbs.get(provingState.blockNumber);
|
|
667
|
+
// Update the archive tree, so we're ready to start processing the next block:
|
|
216
668
|
logger.verbose(`Updating archive tree with block ${provingState.blockNumber} header ${(await header.hash()).toString()}`);
|
|
217
669
|
await db.updateArchive(header);
|
|
218
|
-
|
|
219
|
-
|
|
220
|
-
const l2Block = new L2Block(newArchive, header, body);
|
|
221
|
-
await this.verifyBuiltBlockAgainstSyncedState(l2Block, newArchive);
|
|
222
|
-
logger.verbose(`Orchestrator finalized block ${l2Block.number}`);
|
|
223
|
-
provingState.setBlock(l2Block);
|
|
670
|
+
await this.verifyBuiltBlockAgainstSyncedState(provingState);
|
|
671
|
+
return header;
|
|
224
672
|
}
|
|
225
673
|
// Flagged as protected to disable in certain unit tests
|
|
226
|
-
async verifyBuiltBlockAgainstSyncedState(
|
|
227
|
-
const
|
|
674
|
+
async verifyBuiltBlockAgainstSyncedState(provingState) {
|
|
675
|
+
const builtBlockHeader = provingState.getBuiltBlockHeader();
|
|
676
|
+
if (!builtBlockHeader) {
|
|
677
|
+
logger.debug('Block header not built yet, skipping header check.');
|
|
678
|
+
return;
|
|
679
|
+
}
|
|
680
|
+
const output = provingState.getBlockRootRollupOutput();
|
|
681
|
+
if (!output) {
|
|
682
|
+
logger.debug('Block root rollup proof not built yet, skipping header check.');
|
|
683
|
+
return;
|
|
684
|
+
}
|
|
685
|
+
const header = await buildHeaderFromCircuitOutputs(output);
|
|
686
|
+
if (!(await header.hash()).equals(await builtBlockHeader.hash())) {
|
|
687
|
+
logger.error(`Block header mismatch.\nCircuit: ${inspect(header)}\nComputed: ${inspect(builtBlockHeader)}`);
|
|
688
|
+
provingState.reject(`Block header hash mismatch.`);
|
|
689
|
+
return;
|
|
690
|
+
}
|
|
691
|
+
// Get db for this block
|
|
692
|
+
const blockNumber = provingState.blockNumber;
|
|
693
|
+
const db = this.dbs.get(blockNumber);
|
|
694
|
+
const newArchive = await getTreeSnapshot(MerkleTreeId.ARCHIVE, db);
|
|
695
|
+
const syncedArchive = await getTreeSnapshot(MerkleTreeId.ARCHIVE, this.dbProvider.getSnapshot(blockNumber));
|
|
228
696
|
if (!syncedArchive.equals(newArchive)) {
|
|
229
|
-
|
|
697
|
+
logger.error(`Archive tree mismatch for block ${blockNumber}: world state synced to ${inspect(syncedArchive)} but built ${inspect(newArchive)}`);
|
|
698
|
+
provingState.reject(`Archive tree mismatch.`);
|
|
699
|
+
return;
|
|
230
700
|
}
|
|
701
|
+
const circuitArchive = output.newArchive;
|
|
702
|
+
if (!newArchive.equals(circuitArchive)) {
|
|
703
|
+
logger.error(`New archive mismatch.\nCircuit: ${output.newArchive}\nComputed: ${newArchive}`);
|
|
704
|
+
provingState.reject(`New archive mismatch.`);
|
|
705
|
+
return;
|
|
706
|
+
}
|
|
707
|
+
// TODO(palla/prover): This closes the fork only on the happy path. If this epoch orchestrator
|
|
708
|
+
// is aborted and never reaches this point, it will leak the fork. We need to add a global cleanup,
|
|
709
|
+
// but have to make sure it only runs once all operations are completed, otherwise some function here
|
|
710
|
+
// will attempt to access the fork after it was closed.
|
|
711
|
+
logger.debug(`Cleaning up world state fork for ${blockNumber}`);
|
|
712
|
+
void this.dbs.get(blockNumber)?.close().then(()=>this.dbs.delete(blockNumber)).catch((err)=>logger.error(`Error closing db for block ${blockNumber}`, err));
|
|
231
713
|
}
|
|
232
714
|
/**
|
|
233
715
|
* Cancel any further proving
|
|
@@ -247,14 +729,7 @@ const logger = createLogger('prover-client:orchestrator');
|
|
|
247
729
|
if (result.status === 'failure') {
|
|
248
730
|
throw new Error(`Epoch proving failed: ${result.reason}`);
|
|
249
731
|
}
|
|
250
|
-
|
|
251
|
-
// TODO(MW): EpochProvingState uses this.blocks.filter(b => !!b).length as total blocks, use this below:
|
|
252
|
-
const finalBlock = this.provingState.blocks[this.provingState.totalNumBlocks - 1];
|
|
253
|
-
if (!finalBlock || !finalBlock.endBlobAccumulator) {
|
|
254
|
-
throw new Error(`Epoch's final block not ready for finalize`);
|
|
255
|
-
}
|
|
256
|
-
const finalBatchedBlob = await finalBlock.endBlobAccumulator.finalize();
|
|
257
|
-
this.provingState.setFinalBatchedBlob(finalBatchedBlob);
|
|
732
|
+
await this.provingState.finalizeBatchedBlob();
|
|
258
733
|
const epochProofResult = this.provingState.getEpochProofResult();
|
|
259
734
|
pushTestData('epochProofResult', {
|
|
260
735
|
proof: epochProofResult.proof.toString(),
|
|
@@ -263,24 +738,12 @@ const logger = createLogger('prover-client:orchestrator');
|
|
|
263
738
|
return epochProofResult;
|
|
264
739
|
}
|
|
265
740
|
/**
|
|
266
|
-
* Starts the proving process for the given transaction and adds it to our state
|
|
267
|
-
* @param tx - The transaction whose proving we wish to commence
|
|
268
|
-
* @param provingState - The proving state being worked on
|
|
269
|
-
*/ async prepareTransaction(tx, provingState) {
|
|
270
|
-
const txInputs = await this.prepareBaseRollupInputs(provingState, tx);
|
|
271
|
-
if (!txInputs) {
|
|
272
|
-
// This should not be possible
|
|
273
|
-
throw new Error(`Unable to add transaction, preparing base inputs failed`);
|
|
274
|
-
}
|
|
275
|
-
return txInputs;
|
|
276
|
-
}
|
|
277
|
-
/**
|
|
278
741
|
* Enqueue a job to be scheduled
|
|
279
742
|
* @param provingState - The proving state object being operated on
|
|
280
743
|
* @param jobType - The type of job to be queued
|
|
281
744
|
* @param job - The actual job, returns a promise notifying of the job's completion
|
|
282
745
|
*/ deferredProving(provingState, request, callback) {
|
|
283
|
-
if (!provingState
|
|
746
|
+
if (!provingState.verifyState()) {
|
|
284
747
|
logger.debug(`Not enqueuing job, state no longer valid`);
|
|
285
748
|
return;
|
|
286
749
|
}
|
|
@@ -295,7 +758,7 @@ const logger = createLogger('prover-client:orchestrator');
|
|
|
295
758
|
return;
|
|
296
759
|
}
|
|
297
760
|
const result = await request(controller.signal);
|
|
298
|
-
if (!provingState
|
|
761
|
+
if (!provingState.verifyState()) {
|
|
299
762
|
logger.debug(`State no longer valid, discarding result`);
|
|
300
763
|
return;
|
|
301
764
|
}
|
|
@@ -323,31 +786,26 @@ const logger = createLogger('prover-client:orchestrator');
|
|
|
323
786
|
// let the callstack unwind before adding the job to the queue
|
|
324
787
|
setImmediate(()=>void safeJob());
|
|
325
788
|
}
|
|
326
|
-
async
|
|
789
|
+
async updateL1ToL2MessageTree(l1ToL2Messages, db) {
|
|
327
790
|
const l1ToL2MessagesPadded = padArrayEnd(l1ToL2Messages, Fr.ZERO, NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP, 'Too many L1 to L2 messages');
|
|
328
|
-
const
|
|
329
|
-
const
|
|
330
|
-
const l1ToL2MessageSubtreeSiblingPath = assertLength(await getSubtreeSiblingPath(MerkleTreeId.L1_TO_L2_MESSAGE_TREE, L1_TO_L2_MSG_SUBTREE_HEIGHT, db), L1_TO_L2_MSG_SUBTREE_SIBLING_PATH_LENGTH);
|
|
791
|
+
const lastL1ToL2MessageTreeSnapshot = await getTreeSnapshot(MerkleTreeId.L1_TO_L2_MESSAGE_TREE, db);
|
|
792
|
+
const lastL1ToL2MessageSubtreeRootSiblingPath = assertLength(await getSubtreeSiblingPath(MerkleTreeId.L1_TO_L2_MESSAGE_TREE, L1_TO_L2_MSG_SUBTREE_HEIGHT, db), L1_TO_L2_MSG_SUBTREE_ROOT_SIBLING_PATH_LENGTH);
|
|
331
793
|
// Update the local trees to include the new l1 to l2 messages
|
|
332
794
|
await db.appendLeaves(MerkleTreeId.L1_TO_L2_MESSAGE_TREE, l1ToL2MessagesPadded);
|
|
333
|
-
const
|
|
795
|
+
const newL1ToL2MessageTreeSnapshot = await getTreeSnapshot(MerkleTreeId.L1_TO_L2_MESSAGE_TREE, db);
|
|
796
|
+
const newL1ToL2MessageSubtreeRootSiblingPath = assertLength(await getSubtreeSiblingPath(MerkleTreeId.L1_TO_L2_MESSAGE_TREE, L1_TO_L2_MSG_SUBTREE_HEIGHT, db), L1_TO_L2_MSG_SUBTREE_ROOT_SIBLING_PATH_LENGTH);
|
|
334
797
|
return {
|
|
335
|
-
|
|
336
|
-
|
|
337
|
-
|
|
338
|
-
|
|
798
|
+
lastL1ToL2MessageTreeSnapshot,
|
|
799
|
+
lastL1ToL2MessageSubtreeRootSiblingPath,
|
|
800
|
+
newL1ToL2MessageTreeSnapshot,
|
|
801
|
+
newL1ToL2MessageSubtreeRootSiblingPath
|
|
339
802
|
};
|
|
340
803
|
}
|
|
341
804
|
// Updates the merkle trees for a transaction. The first enqueued job for a transaction
|
|
342
|
-
async prepareBaseRollupInputs(
|
|
343
|
-
if (!provingState.verifyState() || !provingState.spongeBlobState) {
|
|
344
|
-
logger.debug('Not preparing base rollup inputs, state invalid');
|
|
345
|
-
return;
|
|
346
|
-
}
|
|
347
|
-
const db = this.dbs.get(provingState.blockNumber);
|
|
805
|
+
async prepareBaseRollupInputs(tx, lastArchive, newL1ToL2MessageTreeSnapshot, startSpongeBlob, db) {
|
|
348
806
|
// We build the base rollup inputs using a mock proof and verification key.
|
|
349
|
-
// These will be overwritten later once we have proven the
|
|
350
|
-
const [ms, hints] = await elapsed(insertSideEffectsAndBuildBaseRollupHints(tx,
|
|
807
|
+
// These will be overwritten later once we have proven the chonk verifier circuit and any public kernels
|
|
808
|
+
const [ms, hints] = await elapsed(insertSideEffectsAndBuildBaseRollupHints(tx, lastArchive, newL1ToL2MessageTreeSnapshot, startSpongeBlob, this.proverId.toField(), db));
|
|
351
809
|
this.metrics.recordBaseRollupInputs(ms);
|
|
352
810
|
const promises = [
|
|
353
811
|
MerkleTreeId.NOTE_HASH_TREE,
|
|
@@ -363,10 +821,6 @@ const logger = createLogger('prover-client:orchestrator');
|
|
|
363
821
|
obj.key,
|
|
364
822
|
obj.value
|
|
365
823
|
]));
|
|
366
|
-
if (!provingState.verifyState()) {
|
|
367
|
-
logger.debug(`Discarding proving job, state no longer valid`);
|
|
368
|
-
return;
|
|
369
|
-
}
|
|
370
824
|
return [
|
|
371
825
|
hints,
|
|
372
826
|
treeSnapshots
|
|
@@ -379,66 +833,71 @@ const logger = createLogger('prover-client:orchestrator');
|
|
|
379
833
|
logger.debug('Not running base rollup, state invalid');
|
|
380
834
|
return;
|
|
381
835
|
}
|
|
836
|
+
if (!provingState.tryStartProvingBase(txIndex)) {
|
|
837
|
+
logger.debug(`Base rollup for tx ${txIndex} already started.`);
|
|
838
|
+
return;
|
|
839
|
+
}
|
|
382
840
|
const txProvingState = provingState.getTxProvingState(txIndex);
|
|
383
841
|
const { processedTx } = txProvingState;
|
|
384
842
|
const { rollupType, inputs } = txProvingState.getBaseRollupTypeAndInputs();
|
|
385
843
|
logger.debug(`Enqueuing deferred proving base rollup for ${processedTx.hash.toString()}`);
|
|
386
|
-
this.deferredProving(provingState, wrapCallbackInSpan(this.tracer, `ProvingOrchestrator.prover.${inputs instanceof
|
|
844
|
+
this.deferredProving(provingState, wrapCallbackInSpan(this.tracer, `ProvingOrchestrator.prover.${inputs instanceof PrivateTxBaseRollupPrivateInputs ? 'getPrivateTxBaseRollupProof' : 'getPublicTxBaseRollupProof'}`, {
|
|
387
845
|
[Attributes.TX_HASH]: processedTx.hash.toString(),
|
|
388
846
|
[Attributes.PROTOCOL_CIRCUIT_NAME]: rollupType
|
|
389
847
|
}, (signal)=>{
|
|
390
|
-
if (inputs instanceof
|
|
391
|
-
return this.prover.
|
|
848
|
+
if (inputs instanceof PrivateTxBaseRollupPrivateInputs) {
|
|
849
|
+
return this.prover.getPrivateTxBaseRollupProof(inputs, signal, provingState.epochNumber);
|
|
392
850
|
} else {
|
|
393
|
-
return this.prover.
|
|
851
|
+
return this.prover.getPublicTxBaseRollupProof(inputs, signal, provingState.epochNumber);
|
|
394
852
|
}
|
|
395
|
-
}),
|
|
853
|
+
}), (result)=>{
|
|
396
854
|
logger.debug(`Completed proof for ${rollupType} for tx ${processedTx.hash.toString()}`);
|
|
397
|
-
validatePartialState(result.inputs.
|
|
855
|
+
validatePartialState(result.inputs.endTreeSnapshots, txProvingState.treeSnapshots);
|
|
398
856
|
const leafLocation = provingState.setBaseRollupProof(txIndex, result);
|
|
399
857
|
if (provingState.totalNumTxs === 1) {
|
|
400
|
-
|
|
858
|
+
this.checkAndEnqueueBlockRootRollup(provingState);
|
|
401
859
|
} else {
|
|
402
|
-
|
|
860
|
+
this.checkAndEnqueueNextMergeRollup(provingState, leafLocation);
|
|
403
861
|
}
|
|
404
862
|
});
|
|
405
863
|
}
|
|
406
|
-
// Enqueues the
|
|
407
|
-
// Once completed, will enqueue the
|
|
408
|
-
|
|
864
|
+
// Enqueues the public chonk verifier circuit for a given transaction index, or reuses the one already enqueued.
|
|
865
|
+
// Once completed, will enqueue the the public tx base rollup.
|
|
866
|
+
getOrEnqueueChonkVerifier(provingState, txIndex) {
|
|
409
867
|
if (!provingState.verifyState()) {
|
|
410
|
-
logger.debug('Not running
|
|
868
|
+
logger.debug('Not running chonk verifier circuit, state invalid');
|
|
411
869
|
return;
|
|
412
870
|
}
|
|
413
871
|
const txProvingState = provingState.getTxProvingState(txIndex);
|
|
414
872
|
const txHash = txProvingState.processedTx.hash.toString();
|
|
873
|
+
NESTED_RECURSIVE_ROLLUP_HONK_PROOF_LENGTH;
|
|
415
874
|
const handleResult = (result)=>{
|
|
416
|
-
logger.debug(`Got
|
|
875
|
+
logger.debug(`Got chonk verifier proof for tx index: ${txIndex}`, {
|
|
417
876
|
txHash
|
|
418
877
|
});
|
|
419
|
-
txProvingState.
|
|
420
|
-
this.provingState?.
|
|
421
|
-
this.
|
|
878
|
+
txProvingState.setPublicChonkVerifierProof(result);
|
|
879
|
+
this.provingState?.cachedChonkVerifierProofs.delete(txHash);
|
|
880
|
+
this.checkAndEnqueueBaseRollup(provingState, txIndex);
|
|
422
881
|
};
|
|
423
|
-
if (this.provingState?.
|
|
424
|
-
logger.debug(`
|
|
882
|
+
if (this.provingState?.cachedChonkVerifierProofs.has(txHash)) {
|
|
883
|
+
logger.debug(`Chonk verifier proof already enqueued for tx index: ${txIndex}`, {
|
|
425
884
|
txHash
|
|
426
885
|
});
|
|
427
|
-
void this.provingState.
|
|
886
|
+
void this.provingState.cachedChonkVerifierProofs.get(txHash).then(handleResult);
|
|
428
887
|
return;
|
|
429
888
|
}
|
|
430
|
-
logger.debug(`Enqueuing
|
|
431
|
-
this.
|
|
889
|
+
logger.debug(`Enqueuing chonk verifier circuit for tx index: ${txIndex}`);
|
|
890
|
+
this.doEnqueueChonkVerifier(txHash, txProvingState.getPublicChonkVerifierPrivateInputs(), handleResult);
|
|
432
891
|
}
|
|
433
|
-
|
|
434
|
-
if (!provingState
|
|
435
|
-
logger.debug('Not running
|
|
892
|
+
doEnqueueChonkVerifier(txHash, inputs, handler, provingState = this.provingState) {
|
|
893
|
+
if (!provingState.verifyState()) {
|
|
894
|
+
logger.debug('Not running chonk verifier circuit, state invalid');
|
|
436
895
|
return;
|
|
437
896
|
}
|
|
438
|
-
this.deferredProving(provingState, wrapCallbackInSpan(this.tracer, 'ProvingOrchestrator.prover.
|
|
897
|
+
this.deferredProving(provingState, wrapCallbackInSpan(this.tracer, 'ProvingOrchestrator.prover.getPublicChonkVerifierProof', {
|
|
439
898
|
[Attributes.TX_HASH]: txHash,
|
|
440
|
-
[Attributes.PROTOCOL_CIRCUIT_NAME]: '
|
|
441
|
-
}, (signal)=>this.prover.
|
|
899
|
+
[Attributes.PROTOCOL_CIRCUIT_NAME]: 'chonk-verifier-public'
|
|
900
|
+
}, (signal)=>this.prover.getPublicChonkVerifierProof(inputs, signal, provingState.epochNumber)), handler);
|
|
442
901
|
}
|
|
443
902
|
// Executes the merge rollup circuit and stored the output as intermediate state for the parent merge/block root circuit
|
|
444
903
|
// Enqueues the next level of merge if all inputs are available
|
|
@@ -447,74 +906,73 @@ const logger = createLogger('prover-client:orchestrator');
|
|
|
447
906
|
logger.debug('Not running merge rollup. State no longer valid.');
|
|
448
907
|
return;
|
|
449
908
|
}
|
|
909
|
+
if (!provingState.tryStartProvingMerge(location)) {
|
|
910
|
+
logger.debug('Merge rollup already started.');
|
|
911
|
+
return;
|
|
912
|
+
}
|
|
450
913
|
const inputs = provingState.getMergeRollupInputs(location);
|
|
451
|
-
this.deferredProving(provingState, wrapCallbackInSpan(this.tracer, 'ProvingOrchestrator.prover.
|
|
452
|
-
[Attributes.PROTOCOL_CIRCUIT_NAME]: 'merge
|
|
453
|
-
}, (signal)=>this.prover.
|
|
914
|
+
this.deferredProving(provingState, wrapCallbackInSpan(this.tracer, 'ProvingOrchestrator.prover.getTxMergeRollupProof', {
|
|
915
|
+
[Attributes.PROTOCOL_CIRCUIT_NAME]: 'rollup-tx-merge'
|
|
916
|
+
}, (signal)=>this.prover.getTxMergeRollupProof(inputs, signal, provingState.epochNumber)), (result)=>{
|
|
454
917
|
provingState.setMergeRollupProof(location, result);
|
|
455
|
-
|
|
918
|
+
this.checkAndEnqueueNextMergeRollup(provingState, location);
|
|
456
919
|
});
|
|
457
920
|
}
|
|
458
921
|
// Executes the block root rollup circuit
|
|
459
|
-
|
|
922
|
+
enqueueBlockRootRollup(provingState) {
|
|
460
923
|
if (!provingState.verifyState()) {
|
|
461
924
|
logger.debug('Not running block root rollup, state no longer valid');
|
|
462
925
|
return;
|
|
463
926
|
}
|
|
464
|
-
provingState.
|
|
465
|
-
|
|
466
|
-
|
|
927
|
+
if (!provingState.tryStartProvingBlockRoot()) {
|
|
928
|
+
logger.debug('Block root rollup already started.');
|
|
929
|
+
return;
|
|
930
|
+
}
|
|
931
|
+
const { rollupType, inputs } = provingState.getBlockRootRollupTypeAndInputs();
|
|
932
|
+
logger.debug(`Enqueuing ${rollupType} for block ${provingState.blockNumber}.`);
|
|
467
933
|
this.deferredProving(provingState, wrapCallbackInSpan(this.tracer, 'ProvingOrchestrator.prover.getBlockRootRollupProof', {
|
|
468
934
|
[Attributes.PROTOCOL_CIRCUIT_NAME]: rollupType
|
|
469
935
|
}, (signal)=>{
|
|
470
|
-
if (inputs instanceof
|
|
471
|
-
return this.prover.
|
|
472
|
-
} else if (inputs instanceof
|
|
473
|
-
return this.prover.
|
|
936
|
+
if (inputs instanceof BlockRootFirstRollupPrivateInputs) {
|
|
937
|
+
return this.prover.getBlockRootFirstRollupProof(inputs, signal, provingState.epochNumber);
|
|
938
|
+
} else if (inputs instanceof BlockRootSingleTxFirstRollupPrivateInputs) {
|
|
939
|
+
return this.prover.getBlockRootSingleTxFirstRollupProof(inputs, signal, provingState.epochNumber);
|
|
940
|
+
} else if (inputs instanceof BlockRootEmptyTxFirstRollupPrivateInputs) {
|
|
941
|
+
return this.prover.getBlockRootEmptyTxFirstRollupProof(inputs, signal, provingState.epochNumber);
|
|
942
|
+
} else if (inputs instanceof BlockRootSingleTxRollupPrivateInputs) {
|
|
943
|
+
return this.prover.getBlockRootSingleTxRollupProof(inputs, signal, provingState.epochNumber);
|
|
474
944
|
} else {
|
|
475
945
|
return this.prover.getBlockRootRollupProof(inputs, signal, provingState.epochNumber);
|
|
476
946
|
}
|
|
477
947
|
}), async (result)=>{
|
|
478
|
-
|
|
479
|
-
|
|
480
|
-
|
|
481
|
-
|
|
482
|
-
|
|
483
|
-
|
|
484
|
-
|
|
485
|
-
const circuitArchiveRoot = result.inputs.newArchive.root;
|
|
486
|
-
if (!dbArchiveRoot.equals(circuitArchiveRoot)) {
|
|
487
|
-
logger.error(`New archive root mismatch.\nCircuit: ${result.inputs.newArchive.root}\nComputed: ${dbArchiveRoot}`);
|
|
488
|
-
provingState.reject(`New archive root mismatch.`);
|
|
489
|
-
}
|
|
490
|
-
const endBlobAccumulatorPublicInputs = BlobAccumulatorPublicInputs.fromBatchedBlobAccumulator(provingState.endBlobAccumulator);
|
|
491
|
-
const circuitEndBlobAccumulatorState = result.inputs.blobPublicInputs.endBlobAccumulator;
|
|
492
|
-
if (!circuitEndBlobAccumulatorState.equals(endBlobAccumulatorPublicInputs)) {
|
|
493
|
-
logger.error(`Blob accumulator state mismatch.\nCircuit: ${inspect(circuitEndBlobAccumulatorState)}\nComputed: ${inspect(endBlobAccumulatorPublicInputs)}`);
|
|
494
|
-
provingState.reject(`Blob accumulator state mismatch.`);
|
|
495
|
-
}
|
|
496
|
-
logger.debug(`Completed ${rollupType} proof for block ${provingState.block.number}`);
|
|
497
|
-
// validatePartialState(result.inputs.end, tx.treeSnapshots); // TODO(palla/prover)
|
|
498
|
-
const epochProvingState = this.provingState;
|
|
499
|
-
const leafLocation = epochProvingState.setBlockRootRollupProof(provingState.index, result);
|
|
500
|
-
if (epochProvingState.totalNumBlocks === 1) {
|
|
501
|
-
this.enqueueEpochPadding(epochProvingState);
|
|
948
|
+
// If the proofs were slower than the block header building, then we need to try validating the block header hashes here.
|
|
949
|
+
await this.verifyBuiltBlockAgainstSyncedState(provingState);
|
|
950
|
+
logger.debug(`Completed ${rollupType} proof for block ${provingState.blockNumber}`);
|
|
951
|
+
const leafLocation = provingState.setBlockRootRollupProof(result);
|
|
952
|
+
const checkpointProvingState = provingState.parentCheckpoint;
|
|
953
|
+
if (checkpointProvingState.totalNumBlocks === 1) {
|
|
954
|
+
this.checkAndEnqueueCheckpointRootRollup(checkpointProvingState);
|
|
502
955
|
} else {
|
|
503
|
-
this.checkAndEnqueueNextBlockMergeRollup(
|
|
956
|
+
this.checkAndEnqueueNextBlockMergeRollup(checkpointProvingState, leafLocation);
|
|
504
957
|
}
|
|
505
958
|
});
|
|
506
959
|
}
|
|
507
960
|
// Executes the base parity circuit and stores the intermediate state for the root parity circuit
|
|
508
961
|
// Enqueues the root parity circuit if all inputs are available
|
|
509
|
-
enqueueBaseParityCircuit(
|
|
962
|
+
enqueueBaseParityCircuit(checkpointProvingState, provingState, baseParityIndex) {
|
|
510
963
|
if (!provingState.verifyState()) {
|
|
511
964
|
logger.debug('Not running base parity. State no longer valid.');
|
|
512
965
|
return;
|
|
513
966
|
}
|
|
967
|
+
if (!provingState.tryStartProvingBaseParity(baseParityIndex)) {
|
|
968
|
+
logger.warn(`Base parity ${baseParityIndex} already started.`);
|
|
969
|
+
return;
|
|
970
|
+
}
|
|
971
|
+
const inputs = checkpointProvingState.getBaseParityInputs(baseParityIndex);
|
|
514
972
|
this.deferredProving(provingState, wrapCallbackInSpan(this.tracer, 'ProvingOrchestrator.prover.getBaseParityProof', {
|
|
515
|
-
[Attributes.PROTOCOL_CIRCUIT_NAME]: 'base
|
|
973
|
+
[Attributes.PROTOCOL_CIRCUIT_NAME]: 'parity-base'
|
|
516
974
|
}, (signal)=>this.prover.getBaseParityProof(inputs, signal, provingState.epochNumber)), (provingOutput)=>{
|
|
517
|
-
provingState.setBaseParityProof(
|
|
975
|
+
provingState.setBaseParityProof(baseParityIndex, provingOutput);
|
|
518
976
|
this.checkAndEnqueueRootParityCircuit(provingState);
|
|
519
977
|
});
|
|
520
978
|
}
|
|
@@ -531,12 +989,16 @@ const logger = createLogger('prover-client:orchestrator');
|
|
|
531
989
|
logger.debug('Not running root parity. State no longer valid.');
|
|
532
990
|
return;
|
|
533
991
|
}
|
|
534
|
-
|
|
992
|
+
if (!provingState.tryStartProvingRootParity()) {
|
|
993
|
+
logger.debug('Root parity already started.');
|
|
994
|
+
return;
|
|
995
|
+
}
|
|
996
|
+
const inputs = provingState.getParityRootInputs();
|
|
535
997
|
this.deferredProving(provingState, wrapCallbackInSpan(this.tracer, 'ProvingOrchestrator.prover.getRootParityProof', {
|
|
536
|
-
[Attributes.PROTOCOL_CIRCUIT_NAME]: 'root
|
|
537
|
-
}, (signal)=>this.prover.getRootParityProof(inputs, signal, provingState.epochNumber)),
|
|
998
|
+
[Attributes.PROTOCOL_CIRCUIT_NAME]: 'parity-root'
|
|
999
|
+
}, (signal)=>this.prover.getRootParityProof(inputs, signal, provingState.epochNumber)), (result)=>{
|
|
538
1000
|
provingState.setRootParityProof(result);
|
|
539
|
-
|
|
1001
|
+
this.checkAndEnqueueBlockRootRollup(provingState);
|
|
540
1002
|
});
|
|
541
1003
|
}
|
|
542
1004
|
// Executes the block merge rollup circuit and stored the output as intermediate state for the parent merge/block root circuit
|
|
@@ -546,26 +1008,90 @@ const logger = createLogger('prover-client:orchestrator');
|
|
|
546
1008
|
logger.debug('Not running block merge rollup. State no longer valid.');
|
|
547
1009
|
return;
|
|
548
1010
|
}
|
|
1011
|
+
if (!provingState.tryStartProvingBlockMerge(location)) {
|
|
1012
|
+
logger.debug('Block merge rollup already started.');
|
|
1013
|
+
return;
|
|
1014
|
+
}
|
|
549
1015
|
const inputs = provingState.getBlockMergeRollupInputs(location);
|
|
550
1016
|
this.deferredProving(provingState, wrapCallbackInSpan(this.tracer, 'ProvingOrchestrator.prover.getBlockMergeRollupProof', {
|
|
551
|
-
[Attributes.PROTOCOL_CIRCUIT_NAME]: 'block-merge
|
|
1017
|
+
[Attributes.PROTOCOL_CIRCUIT_NAME]: 'rollup-block-merge'
|
|
552
1018
|
}, (signal)=>this.prover.getBlockMergeRollupProof(inputs, signal, provingState.epochNumber)), (result)=>{
|
|
553
1019
|
provingState.setBlockMergeRollupProof(location, result);
|
|
554
1020
|
this.checkAndEnqueueNextBlockMergeRollup(provingState, location);
|
|
555
1021
|
});
|
|
556
1022
|
}
|
|
1023
|
+
enqueueCheckpointRootRollup(provingState) {
|
|
1024
|
+
if (!provingState.verifyState()) {
|
|
1025
|
+
logger.debug('Not running checkpoint root rollup. State no longer valid.');
|
|
1026
|
+
return;
|
|
1027
|
+
}
|
|
1028
|
+
if (!provingState.tryStartProvingCheckpointRoot()) {
|
|
1029
|
+
logger.debug('Checkpoint root rollup already started.');
|
|
1030
|
+
return;
|
|
1031
|
+
}
|
|
1032
|
+
const rollupType = provingState.getCheckpointRootRollupType();
|
|
1033
|
+
logger.debug(`Enqueuing ${rollupType} for checkpoint ${provingState.index}.`);
|
|
1034
|
+
const inputs = provingState.getCheckpointRootRollupInputs();
|
|
1035
|
+
this.deferredProving(provingState, wrapCallbackInSpan(this.tracer, 'ProvingOrchestrator.prover.getCheckpointRootRollupProof', {
|
|
1036
|
+
[Attributes.PROTOCOL_CIRCUIT_NAME]: rollupType
|
|
1037
|
+
}, (signal)=>{
|
|
1038
|
+
if (inputs instanceof CheckpointRootSingleBlockRollupPrivateInputs) {
|
|
1039
|
+
return this.prover.getCheckpointRootSingleBlockRollupProof(inputs, signal, provingState.epochNumber);
|
|
1040
|
+
} else {
|
|
1041
|
+
return this.prover.getCheckpointRootRollupProof(inputs, signal, provingState.epochNumber);
|
|
1042
|
+
}
|
|
1043
|
+
}), (result)=>{
|
|
1044
|
+
const computedEndBlobAccumulatorState = provingState.getEndBlobAccumulator().toBlobAccumulator();
|
|
1045
|
+
const circuitEndBlobAccumulatorState = result.inputs.endBlobAccumulator;
|
|
1046
|
+
if (!circuitEndBlobAccumulatorState.equals(computedEndBlobAccumulatorState)) {
|
|
1047
|
+
logger.error(`Blob accumulator state mismatch.\nCircuit: ${inspect(circuitEndBlobAccumulatorState)}\nComputed: ${inspect(computedEndBlobAccumulatorState)}`);
|
|
1048
|
+
provingState.reject(`Blob accumulator state mismatch.`);
|
|
1049
|
+
return;
|
|
1050
|
+
}
|
|
1051
|
+
logger.debug(`Completed ${rollupType} proof for checkpoint ${provingState.index}.`);
|
|
1052
|
+
const leafLocation = provingState.setCheckpointRootRollupProof(result);
|
|
1053
|
+
const epochProvingState = provingState.parentEpoch;
|
|
1054
|
+
if (epochProvingState.totalNumCheckpoints === 1) {
|
|
1055
|
+
this.enqueueEpochPadding(epochProvingState);
|
|
1056
|
+
} else {
|
|
1057
|
+
this.checkAndEnqueueNextCheckpointMergeRollup(epochProvingState, leafLocation);
|
|
1058
|
+
}
|
|
1059
|
+
});
|
|
1060
|
+
}
|
|
1061
|
+
enqueueCheckpointMergeRollup(provingState, location) {
|
|
1062
|
+
if (!provingState.verifyState()) {
|
|
1063
|
+
logger.debug('Not running checkpoint merge rollup. State no longer valid.');
|
|
1064
|
+
return;
|
|
1065
|
+
}
|
|
1066
|
+
if (!provingState.tryStartProvingCheckpointMerge(location)) {
|
|
1067
|
+
logger.debug('Checkpoint merge rollup already started.');
|
|
1068
|
+
return;
|
|
1069
|
+
}
|
|
1070
|
+
const inputs = provingState.getCheckpointMergeRollupInputs(location);
|
|
1071
|
+
this.deferredProving(provingState, wrapCallbackInSpan(this.tracer, 'ProvingOrchestrator.prover.getCheckpointMergeRollupProof', {
|
|
1072
|
+
[Attributes.PROTOCOL_CIRCUIT_NAME]: 'rollup-checkpoint-merge'
|
|
1073
|
+
}, (signal)=>this.prover.getCheckpointMergeRollupProof(inputs, signal, provingState.epochNumber)), (result)=>{
|
|
1074
|
+
logger.debug('Completed proof for checkpoint merge rollup.');
|
|
1075
|
+
provingState.setCheckpointMergeRollupProof(location, result);
|
|
1076
|
+
this.checkAndEnqueueNextCheckpointMergeRollup(provingState, location);
|
|
1077
|
+
});
|
|
1078
|
+
}
|
|
557
1079
|
enqueueEpochPadding(provingState) {
|
|
558
1080
|
if (!provingState.verifyState()) {
|
|
559
1081
|
logger.debug('Not running epoch padding. State no longer valid.');
|
|
560
1082
|
return;
|
|
561
1083
|
}
|
|
1084
|
+
if (!provingState.tryStartProvingPaddingCheckpoint()) {
|
|
1085
|
+
logger.debug('Padding checkpoint already started.');
|
|
1086
|
+
return;
|
|
1087
|
+
}
|
|
562
1088
|
logger.debug('Padding epoch proof with a padding block root proof.');
|
|
563
|
-
const inputs = provingState.
|
|
564
|
-
this.deferredProving(provingState, wrapCallbackInSpan(this.tracer, 'ProvingOrchestrator.prover.
|
|
565
|
-
[Attributes.PROTOCOL_CIRCUIT_NAME]: '
|
|
566
|
-
}, (signal)=>this.prover.
|
|
567
|
-
logger.debug('Completed proof for padding
|
|
568
|
-
provingState.
|
|
1089
|
+
const inputs = provingState.getPaddingCheckpointInputs();
|
|
1090
|
+
this.deferredProving(provingState, wrapCallbackInSpan(this.tracer, 'ProvingOrchestrator.prover.getCheckpointPaddingRollupProof', {
|
|
1091
|
+
[Attributes.PROTOCOL_CIRCUIT_NAME]: 'rollup-checkpoint-padding'
|
|
1092
|
+
}, (signal)=>this.prover.getCheckpointPaddingRollupProof(inputs, signal, provingState.epochNumber)), (result)=>{
|
|
1093
|
+
logger.debug('Completed proof for padding checkpoint.');
|
|
1094
|
+
provingState.setCheckpointPaddingProof(result);
|
|
569
1095
|
this.checkAndEnqueueRootRollup(provingState);
|
|
570
1096
|
});
|
|
571
1097
|
}
|
|
@@ -578,7 +1104,7 @@ const logger = createLogger('prover-client:orchestrator');
|
|
|
578
1104
|
logger.debug(`Preparing root rollup`);
|
|
579
1105
|
const inputs = provingState.getRootRollupInputs();
|
|
580
1106
|
this.deferredProving(provingState, wrapCallbackInSpan(this.tracer, 'ProvingOrchestrator.prover.getRootRollupProof', {
|
|
581
|
-
[Attributes.PROTOCOL_CIRCUIT_NAME]: 'root
|
|
1107
|
+
[Attributes.PROTOCOL_CIRCUIT_NAME]: 'rollup-root'
|
|
582
1108
|
}, (signal)=>this.prover.getRootRollupProof(inputs, signal, provingState.epochNumber)), (result)=>{
|
|
583
1109
|
logger.verbose(`Orchestrator completed root rollup for epoch ${provingState.epochNumber}`);
|
|
584
1110
|
provingState.setRootRollupProof(result);
|
|
@@ -587,36 +1113,23 @@ const logger = createLogger('prover-client:orchestrator');
|
|
|
587
1113
|
});
|
|
588
1114
|
});
|
|
589
1115
|
}
|
|
590
|
-
|
|
1116
|
+
checkAndEnqueueNextMergeRollup(provingState, currentLocation) {
|
|
591
1117
|
if (!provingState.isReadyForMergeRollup(currentLocation)) {
|
|
592
1118
|
return;
|
|
593
1119
|
}
|
|
594
1120
|
const parentLocation = provingState.getParentLocation(currentLocation);
|
|
595
1121
|
if (parentLocation.level === 0) {
|
|
596
|
-
|
|
1122
|
+
this.checkAndEnqueueBlockRootRollup(provingState);
|
|
597
1123
|
} else {
|
|
598
1124
|
this.enqueueMergeRollup(provingState, parentLocation);
|
|
599
1125
|
}
|
|
600
1126
|
}
|
|
601
|
-
|
|
602
|
-
const blockNumber = provingState.blockNumber;
|
|
603
|
-
// Accumulate as far as we can, in case blocks came in out of order and we are behind:
|
|
604
|
-
await this.provingState?.setBlobAccumulators(blockNumber);
|
|
1127
|
+
checkAndEnqueueBlockRootRollup(provingState) {
|
|
605
1128
|
if (!provingState.isReadyForBlockRootRollup()) {
|
|
606
1129
|
logger.debug('Not ready for block root rollup');
|
|
607
1130
|
return;
|
|
608
1131
|
}
|
|
609
|
-
|
|
610
|
-
logger.debug('Block root rollup already started');
|
|
611
|
-
return;
|
|
612
|
-
}
|
|
613
|
-
// TODO(palla/prover): This closes the fork only on the happy path. If this epoch orchestrator
|
|
614
|
-
// is aborted and never reaches this point, it will leak the fork. We need to add a global cleanup,
|
|
615
|
-
// but have to make sure it only runs once all operations are completed, otherwise some function here
|
|
616
|
-
// will attempt to access the fork after it was closed.
|
|
617
|
-
logger.debug(`Cleaning up world state fork for ${blockNumber}`);
|
|
618
|
-
void this.dbs.get(blockNumber)?.close().then(()=>this.dbs.delete(blockNumber)).catch((err)=>logger.error(`Error closing db for block ${blockNumber}`, err));
|
|
619
|
-
await this.enqueueBlockRootRollup(provingState);
|
|
1132
|
+
this.enqueueBlockRootRollup(provingState);
|
|
620
1133
|
}
|
|
621
1134
|
checkAndEnqueueNextBlockMergeRollup(provingState, currentLocation) {
|
|
622
1135
|
if (!provingState.isReadyForBlockMerge(currentLocation)) {
|
|
@@ -624,11 +1137,28 @@ const logger = createLogger('prover-client:orchestrator');
|
|
|
624
1137
|
}
|
|
625
1138
|
const parentLocation = provingState.getParentLocation(currentLocation);
|
|
626
1139
|
if (parentLocation.level === 0) {
|
|
627
|
-
this.
|
|
1140
|
+
this.checkAndEnqueueCheckpointRootRollup(provingState);
|
|
628
1141
|
} else {
|
|
629
1142
|
this.enqueueBlockMergeRollup(provingState, parentLocation);
|
|
630
1143
|
}
|
|
631
1144
|
}
|
|
1145
|
+
checkAndEnqueueCheckpointRootRollup(provingState) {
|
|
1146
|
+
if (!provingState.isReadyForCheckpointRoot()) {
|
|
1147
|
+
return;
|
|
1148
|
+
}
|
|
1149
|
+
this.enqueueCheckpointRootRollup(provingState);
|
|
1150
|
+
}
|
|
1151
|
+
checkAndEnqueueNextCheckpointMergeRollup(provingState, currentLocation) {
|
|
1152
|
+
if (!provingState.isReadyForCheckpointMerge(currentLocation)) {
|
|
1153
|
+
return;
|
|
1154
|
+
}
|
|
1155
|
+
const parentLocation = provingState.getParentLocation(currentLocation);
|
|
1156
|
+
if (parentLocation.level === 0) {
|
|
1157
|
+
this.checkAndEnqueueRootRollup(provingState);
|
|
1158
|
+
} else {
|
|
1159
|
+
this.enqueueCheckpointMergeRollup(provingState, parentLocation);
|
|
1160
|
+
}
|
|
1161
|
+
}
|
|
632
1162
|
checkAndEnqueueRootRollup(provingState) {
|
|
633
1163
|
if (!provingState.isReadyForRootRollup()) {
|
|
634
1164
|
logger.debug('Not ready for root rollup');
|
|
@@ -647,70 +1177,25 @@ const logger = createLogger('prover-client:orchestrator');
|
|
|
647
1177
|
return;
|
|
648
1178
|
}
|
|
649
1179
|
const txProvingState = provingState.getTxProvingState(txIndex);
|
|
650
|
-
// This function tries to do AVM proving. If there is a failure, it fakes the proof unless AVM_PROVING_STRICT is defined.
|
|
651
|
-
// Nothing downstream depends on the AVM proof yet. So having this mode lets us incrementally build the AVM circuit.
|
|
652
1180
|
const doAvmProving = wrapCallbackInSpan(this.tracer, 'ProvingOrchestrator.prover.getAvmProof', {
|
|
653
1181
|
[Attributes.TX_HASH]: txProvingState.processedTx.hash.toString()
|
|
654
1182
|
}, async (signal)=>{
|
|
655
1183
|
const inputs = txProvingState.getAvmInputs();
|
|
656
|
-
|
|
657
|
-
// TODO(#14234)[Unconditional PIs validation]: Remove the whole try-catch logic and
|
|
658
|
-
// just keep the next line but removing the second argument (false).
|
|
659
|
-
return await this.prover.getAvmProof(inputs, false, signal, provingState.epochNumber);
|
|
660
|
-
} catch (err) {
|
|
661
|
-
if (process.env.AVM_PROVING_STRICT) {
|
|
662
|
-
logger.error(`Error thrown when proving AVM circuit with AVM_PROVING_STRICT on`, err);
|
|
663
|
-
throw err;
|
|
664
|
-
} else {
|
|
665
|
-
logger.warn(`Error thrown when proving AVM circuit but AVM_PROVING_STRICT is off. Use snapshotted
|
|
666
|
-
AVM inputs and carrying on. ${inspect(err)}.`);
|
|
667
|
-
try {
|
|
668
|
-
this.metrics.incAvmFallback();
|
|
669
|
-
const snapshotAvmPrivateInputs = readAvmMinimalPublicTxInputsFromFile();
|
|
670
|
-
return await this.prover.getAvmProof(snapshotAvmPrivateInputs, true, signal, provingState.epochNumber);
|
|
671
|
-
} catch (err) {
|
|
672
|
-
logger.error(`Error thrown when proving snapshotted AVM inputs.`, err);
|
|
673
|
-
throw err;
|
|
674
|
-
}
|
|
675
|
-
}
|
|
676
|
-
}
|
|
1184
|
+
return await this.prover.getAvmProof(inputs, signal, provingState.epochNumber);
|
|
677
1185
|
});
|
|
678
1186
|
this.deferredProving(provingState, doAvmProving, (proofAndVk)=>{
|
|
679
1187
|
logger.debug(`Proven VM for tx index: ${txIndex}`);
|
|
680
1188
|
txProvingState.setAvmProof(proofAndVk);
|
|
681
|
-
this.
|
|
1189
|
+
this.checkAndEnqueueBaseRollup(provingState, txIndex);
|
|
682
1190
|
});
|
|
683
1191
|
}
|
|
684
|
-
|
|
1192
|
+
checkAndEnqueueBaseRollup(provingState, txIndex) {
|
|
685
1193
|
const txProvingState = provingState.getTxProvingState(txIndex);
|
|
686
1194
|
if (!txProvingState.ready()) {
|
|
687
1195
|
return;
|
|
688
1196
|
}
|
|
689
|
-
// We must have completed all proving (
|
|
1197
|
+
// We must have completed all proving (chonk verifier proof and (if required) vm proof are generated), we now move to the base rollup.
|
|
690
1198
|
logger.debug(`Public functions completed for tx ${txIndex} enqueueing base rollup`);
|
|
691
1199
|
this.enqueueBaseRollup(provingState, txIndex);
|
|
692
1200
|
}
|
|
693
1201
|
}
|
|
694
|
-
_ts_decorate([
|
|
695
|
-
trackSpan('ProvingOrchestrator.startNewBlock', (globalVariables)=>({
|
|
696
|
-
[Attributes.BLOCK_NUMBER]: globalVariables.blockNumber
|
|
697
|
-
}))
|
|
698
|
-
], ProvingOrchestrator.prototype, "startNewBlock", null);
|
|
699
|
-
_ts_decorate([
|
|
700
|
-
trackSpan('ProvingOrchestrator.addTxs', (txs)=>({
|
|
701
|
-
[Attributes.BLOCK_TXS_COUNT]: txs.length
|
|
702
|
-
}))
|
|
703
|
-
], ProvingOrchestrator.prototype, "addTxs", null);
|
|
704
|
-
_ts_decorate([
|
|
705
|
-
trackSpan('ProvingOrchestrator.startTubeCircuits')
|
|
706
|
-
], ProvingOrchestrator.prototype, "startTubeCircuits", null);
|
|
707
|
-
_ts_decorate([
|
|
708
|
-
trackSpan('ProvingOrchestrator.setBlockCompleted', (blockNumber)=>({
|
|
709
|
-
[Attributes.BLOCK_NUMBER]: blockNumber
|
|
710
|
-
}))
|
|
711
|
-
], ProvingOrchestrator.prototype, "setBlockCompleted", null);
|
|
712
|
-
_ts_decorate([
|
|
713
|
-
trackSpan('ProvingOrchestrator.prepareBaseRollupInputs', (_, tx)=>({
|
|
714
|
-
[Attributes.TX_HASH]: tx.hash.toString()
|
|
715
|
-
}))
|
|
716
|
-
], ProvingOrchestrator.prototype, "prepareBaseRollupInputs", null);
|