@aztec/prover-client 0.0.0-test.1 → 0.0.1-commit.1142ef1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dest/block-factory/index.d.ts +2 -0
- package/dest/block-factory/index.d.ts.map +1 -0
- package/dest/block-factory/light.d.ts +38 -0
- package/dest/block-factory/light.d.ts.map +1 -0
- package/dest/block-factory/light.js +106 -0
- package/dest/config.d.ts +8 -8
- package/dest/config.d.ts.map +1 -1
- package/dest/config.js +12 -2
- package/dest/index.d.ts +1 -1
- package/dest/light/index.d.ts +2 -0
- package/dest/light/index.d.ts.map +1 -0
- package/dest/light/index.js +1 -0
- package/dest/light/lightweight_checkpoint_builder.d.ts +43 -0
- package/dest/light/lightweight_checkpoint_builder.d.ts.map +1 -0
- package/dest/light/lightweight_checkpoint_builder.js +183 -0
- package/dest/mocks/fixtures.d.ts +8 -8
- package/dest/mocks/fixtures.d.ts.map +1 -1
- package/dest/mocks/fixtures.js +34 -16
- package/dest/mocks/test_context.d.ts +42 -32
- package/dest/mocks/test_context.d.ts.map +1 -1
- package/dest/mocks/test_context.js +144 -87
- package/dest/orchestrator/block-building-helpers.d.ts +37 -30
- package/dest/orchestrator/block-building-helpers.d.ts.map +1 -1
- package/dest/orchestrator/block-building-helpers.js +170 -189
- package/dest/orchestrator/block-proving-state.d.ts +70 -48
- package/dest/orchestrator/block-proving-state.d.ts.map +1 -1
- package/dest/orchestrator/block-proving-state.js +282 -177
- package/dest/orchestrator/checkpoint-proving-state.d.ts +63 -0
- package/dest/orchestrator/checkpoint-proving-state.d.ts.map +1 -0
- package/dest/orchestrator/checkpoint-proving-state.js +210 -0
- package/dest/orchestrator/epoch-proving-state.d.ts +41 -27
- package/dest/orchestrator/epoch-proving-state.d.ts.map +1 -1
- package/dest/orchestrator/epoch-proving-state.js +143 -73
- package/dest/orchestrator/index.d.ts +1 -1
- package/dest/orchestrator/orchestrator.d.ts +37 -34
- package/dest/orchestrator/orchestrator.d.ts.map +1 -1
- package/dest/orchestrator/orchestrator.js +788 -277
- package/dest/orchestrator/orchestrator_metrics.d.ts +1 -1
- package/dest/orchestrator/orchestrator_metrics.d.ts.map +1 -1
- package/dest/orchestrator/orchestrator_metrics.js +2 -6
- package/dest/orchestrator/tx-proving-state.d.ts +15 -12
- package/dest/orchestrator/tx-proving-state.d.ts.map +1 -1
- package/dest/orchestrator/tx-proving-state.js +27 -44
- package/dest/prover-client/factory.d.ts +3 -3
- package/dest/prover-client/factory.d.ts.map +1 -1
- package/dest/prover-client/index.d.ts +1 -1
- package/dest/prover-client/prover-client.d.ts +5 -5
- package/dest/prover-client/prover-client.d.ts.map +1 -1
- package/dest/prover-client/prover-client.js +6 -5
- package/dest/prover-client/server-epoch-prover.d.ts +16 -12
- package/dest/prover-client/server-epoch-prover.d.ts.map +1 -1
- package/dest/prover-client/server-epoch-prover.js +11 -11
- package/dest/proving_broker/broker_prover_facade.d.ts +25 -17
- package/dest/proving_broker/broker_prover_facade.d.ts.map +1 -1
- package/dest/proving_broker/broker_prover_facade.js +59 -40
- package/dest/proving_broker/config.d.ts +19 -10
- package/dest/proving_broker/config.d.ts.map +1 -1
- package/dest/proving_broker/config.js +23 -6
- package/dest/proving_broker/factory.d.ts +2 -2
- package/dest/proving_broker/factory.d.ts.map +1 -1
- package/dest/proving_broker/factory.js +5 -1
- package/dest/proving_broker/fixtures.d.ts +3 -2
- package/dest/proving_broker/fixtures.d.ts.map +1 -1
- package/dest/proving_broker/fixtures.js +3 -2
- package/dest/proving_broker/index.d.ts +1 -1
- package/dest/proving_broker/proof_store/factory.d.ts +2 -2
- package/dest/proving_broker/proof_store/factory.js +1 -1
- package/dest/proving_broker/proof_store/gcs_proof_store.d.ts +1 -1
- package/dest/proving_broker/proof_store/gcs_proof_store.d.ts.map +1 -1
- package/dest/proving_broker/proof_store/gcs_proof_store.js +1 -0
- package/dest/proving_broker/proof_store/index.d.ts +2 -1
- package/dest/proving_broker/proof_store/index.d.ts.map +1 -1
- package/dest/proving_broker/proof_store/index.js +1 -0
- package/dest/proving_broker/proof_store/inline_proof_store.d.ts +1 -1
- package/dest/proving_broker/proof_store/inline_proof_store.d.ts.map +1 -1
- package/dest/proving_broker/proof_store/proof_store.d.ts +1 -1
- package/dest/proving_broker/proving_agent.d.ts +6 -11
- package/dest/proving_broker/proving_agent.d.ts.map +1 -1
- package/dest/proving_broker/proving_agent.js +84 -63
- package/dest/proving_broker/proving_broker.d.ts +13 -4
- package/dest/proving_broker/proving_broker.d.ts.map +1 -1
- package/dest/proving_broker/proving_broker.js +40 -33
- package/dest/proving_broker/proving_broker_database/memory.d.ts +3 -2
- package/dest/proving_broker/proving_broker_database/memory.d.ts.map +1 -1
- package/dest/proving_broker/proving_broker_database/memory.js +1 -1
- package/dest/proving_broker/proving_broker_database/persisted.d.ts +5 -3
- package/dest/proving_broker/proving_broker_database/persisted.d.ts.map +1 -1
- package/dest/proving_broker/proving_broker_database/persisted.js +401 -11
- package/dest/proving_broker/proving_broker_database.d.ts +3 -2
- package/dest/proving_broker/proving_broker_database.d.ts.map +1 -1
- package/dest/proving_broker/proving_broker_instrumentation.d.ts +1 -1
- package/dest/proving_broker/proving_broker_instrumentation.d.ts.map +1 -1
- package/dest/proving_broker/proving_broker_instrumentation.js +11 -35
- package/dest/proving_broker/proving_job_controller.d.ts +9 -9
- package/dest/proving_broker/proving_job_controller.d.ts.map +1 -1
- package/dest/proving_broker/proving_job_controller.js +87 -60
- package/dest/proving_broker/rpc.d.ts +4 -6
- package/dest/proving_broker/rpc.d.ts.map +1 -1
- package/dest/proving_broker/rpc.js +1 -4
- package/dest/test/mock_proof_store.d.ts +9 -0
- package/dest/test/mock_proof_store.d.ts.map +1 -0
- package/dest/test/mock_proof_store.js +10 -0
- package/dest/test/mock_prover.d.ts +23 -17
- package/dest/test/mock_prover.d.ts.map +1 -1
- package/dest/test/mock_prover.js +38 -20
- package/package.json +34 -31
- package/src/block-factory/index.ts +1 -0
- package/src/block-factory/light.ts +136 -0
- package/src/config.ts +25 -9
- package/src/light/index.ts +1 -0
- package/src/light/lightweight_checkpoint_builder.ts +249 -0
- package/src/mocks/fixtures.ts +44 -39
- package/src/mocks/test_context.ts +218 -116
- package/src/orchestrator/block-building-helpers.ts +258 -334
- package/src/orchestrator/block-proving-state.ts +325 -231
- package/src/orchestrator/checkpoint-proving-state.ts +303 -0
- package/src/orchestrator/epoch-proving-state.ts +191 -113
- package/src/orchestrator/orchestrator.ts +587 -318
- package/src/orchestrator/orchestrator_metrics.ts +2 -6
- package/src/orchestrator/tx-proving-state.ts +48 -66
- package/src/prover-client/factory.ts +6 -2
- package/src/prover-client/prover-client.ts +20 -25
- package/src/prover-client/server-epoch-prover.ts +40 -22
- package/src/proving_broker/broker_prover_facade.ts +206 -128
- package/src/proving_broker/config.ts +25 -7
- package/src/proving_broker/factory.ts +2 -1
- package/src/proving_broker/fixtures.ts +8 -3
- package/src/proving_broker/proof_store/factory.ts +1 -1
- package/src/proving_broker/proof_store/gcs_proof_store.ts +5 -1
- package/src/proving_broker/proof_store/index.ts +1 -0
- package/src/proving_broker/proof_store/inline_proof_store.ts +1 -1
- package/src/proving_broker/proving_agent.ts +90 -64
- package/src/proving_broker/proving_broker.ts +57 -41
- package/src/proving_broker/proving_broker_database/memory.ts +3 -2
- package/src/proving_broker/proving_broker_database/persisted.ts +29 -13
- package/src/proving_broker/proving_broker_database.ts +2 -1
- package/src/proving_broker/proving_broker_instrumentation.ts +10 -35
- package/src/proving_broker/proving_job_controller.ts +92 -81
- package/src/proving_broker/rpc.ts +1 -6
- package/src/test/mock_proof_store.ts +14 -0
- package/src/test/mock_prover.ts +156 -64
- package/dest/bin/get-proof-inputs.d.ts +0 -2
- package/dest/bin/get-proof-inputs.d.ts.map +0 -1
- package/dest/bin/get-proof-inputs.js +0 -51
- package/dest/block_builder/index.d.ts +0 -6
- package/dest/block_builder/index.d.ts.map +0 -1
- package/dest/block_builder/light.d.ts +0 -33
- package/dest/block_builder/light.d.ts.map +0 -1
- package/dest/block_builder/light.js +0 -82
- package/dest/proving_broker/proving_agent_instrumentation.d.ts +0 -8
- package/dest/proving_broker/proving_agent_instrumentation.d.ts.map +0 -1
- package/dest/proving_broker/proving_agent_instrumentation.js +0 -16
- package/src/bin/get-proof-inputs.ts +0 -59
- package/src/block_builder/index.ts +0 -6
- package/src/block_builder/light.ts +0 -101
- package/src/proving_broker/proving_agent_instrumentation.ts +0 -21
- /package/dest/{block_builder → block-factory}/index.js +0 -0
|
@@ -1,33 +1,404 @@
|
|
|
1
|
-
function
|
|
2
|
-
|
|
3
|
-
|
|
4
|
-
|
|
5
|
-
|
|
1
|
+
function applyDecs2203RFactory() {
|
|
2
|
+
function createAddInitializerMethod(initializers, decoratorFinishedRef) {
|
|
3
|
+
return function addInitializer(initializer) {
|
|
4
|
+
assertNotFinished(decoratorFinishedRef, "addInitializer");
|
|
5
|
+
assertCallable(initializer, "An initializer");
|
|
6
|
+
initializers.push(initializer);
|
|
7
|
+
};
|
|
8
|
+
}
|
|
9
|
+
function memberDec(dec, name, desc, initializers, kind, isStatic, isPrivate, metadata, value) {
|
|
10
|
+
var kindStr;
|
|
11
|
+
switch(kind){
|
|
12
|
+
case 1:
|
|
13
|
+
kindStr = "accessor";
|
|
14
|
+
break;
|
|
15
|
+
case 2:
|
|
16
|
+
kindStr = "method";
|
|
17
|
+
break;
|
|
18
|
+
case 3:
|
|
19
|
+
kindStr = "getter";
|
|
20
|
+
break;
|
|
21
|
+
case 4:
|
|
22
|
+
kindStr = "setter";
|
|
23
|
+
break;
|
|
24
|
+
default:
|
|
25
|
+
kindStr = "field";
|
|
26
|
+
}
|
|
27
|
+
var ctx = {
|
|
28
|
+
kind: kindStr,
|
|
29
|
+
name: isPrivate ? "#" + name : name,
|
|
30
|
+
static: isStatic,
|
|
31
|
+
private: isPrivate,
|
|
32
|
+
metadata: metadata
|
|
33
|
+
};
|
|
34
|
+
var decoratorFinishedRef = {
|
|
35
|
+
v: false
|
|
36
|
+
};
|
|
37
|
+
ctx.addInitializer = createAddInitializerMethod(initializers, decoratorFinishedRef);
|
|
38
|
+
var get, set;
|
|
39
|
+
if (kind === 0) {
|
|
40
|
+
if (isPrivate) {
|
|
41
|
+
get = desc.get;
|
|
42
|
+
set = desc.set;
|
|
43
|
+
} else {
|
|
44
|
+
get = function() {
|
|
45
|
+
return this[name];
|
|
46
|
+
};
|
|
47
|
+
set = function(v) {
|
|
48
|
+
this[name] = v;
|
|
49
|
+
};
|
|
50
|
+
}
|
|
51
|
+
} else if (kind === 2) {
|
|
52
|
+
get = function() {
|
|
53
|
+
return desc.value;
|
|
54
|
+
};
|
|
55
|
+
} else {
|
|
56
|
+
if (kind === 1 || kind === 3) {
|
|
57
|
+
get = function() {
|
|
58
|
+
return desc.get.call(this);
|
|
59
|
+
};
|
|
60
|
+
}
|
|
61
|
+
if (kind === 1 || kind === 4) {
|
|
62
|
+
set = function(v) {
|
|
63
|
+
desc.set.call(this, v);
|
|
64
|
+
};
|
|
65
|
+
}
|
|
66
|
+
}
|
|
67
|
+
ctx.access = get && set ? {
|
|
68
|
+
get: get,
|
|
69
|
+
set: set
|
|
70
|
+
} : get ? {
|
|
71
|
+
get: get
|
|
72
|
+
} : {
|
|
73
|
+
set: set
|
|
74
|
+
};
|
|
75
|
+
try {
|
|
76
|
+
return dec(value, ctx);
|
|
77
|
+
} finally{
|
|
78
|
+
decoratorFinishedRef.v = true;
|
|
79
|
+
}
|
|
80
|
+
}
|
|
81
|
+
function assertNotFinished(decoratorFinishedRef, fnName) {
|
|
82
|
+
if (decoratorFinishedRef.v) {
|
|
83
|
+
throw new Error("attempted to call " + fnName + " after decoration was finished");
|
|
84
|
+
}
|
|
85
|
+
}
|
|
86
|
+
function assertCallable(fn, hint) {
|
|
87
|
+
if (typeof fn !== "function") {
|
|
88
|
+
throw new TypeError(hint + " must be a function");
|
|
89
|
+
}
|
|
90
|
+
}
|
|
91
|
+
function assertValidReturnValue(kind, value) {
|
|
92
|
+
var type = typeof value;
|
|
93
|
+
if (kind === 1) {
|
|
94
|
+
if (type !== "object" || value === null) {
|
|
95
|
+
throw new TypeError("accessor decorators must return an object with get, set, or init properties or void 0");
|
|
96
|
+
}
|
|
97
|
+
if (value.get !== undefined) {
|
|
98
|
+
assertCallable(value.get, "accessor.get");
|
|
99
|
+
}
|
|
100
|
+
if (value.set !== undefined) {
|
|
101
|
+
assertCallable(value.set, "accessor.set");
|
|
102
|
+
}
|
|
103
|
+
if (value.init !== undefined) {
|
|
104
|
+
assertCallable(value.init, "accessor.init");
|
|
105
|
+
}
|
|
106
|
+
} else if (type !== "function") {
|
|
107
|
+
var hint;
|
|
108
|
+
if (kind === 0) {
|
|
109
|
+
hint = "field";
|
|
110
|
+
} else if (kind === 10) {
|
|
111
|
+
hint = "class";
|
|
112
|
+
} else {
|
|
113
|
+
hint = "method";
|
|
114
|
+
}
|
|
115
|
+
throw new TypeError(hint + " decorators must return a function or void 0");
|
|
116
|
+
}
|
|
117
|
+
}
|
|
118
|
+
function applyMemberDec(ret, base, decInfo, name, kind, isStatic, isPrivate, initializers, metadata) {
|
|
119
|
+
var decs = decInfo[0];
|
|
120
|
+
var desc, init, value;
|
|
121
|
+
if (isPrivate) {
|
|
122
|
+
if (kind === 0 || kind === 1) {
|
|
123
|
+
desc = {
|
|
124
|
+
get: decInfo[3],
|
|
125
|
+
set: decInfo[4]
|
|
126
|
+
};
|
|
127
|
+
} else if (kind === 3) {
|
|
128
|
+
desc = {
|
|
129
|
+
get: decInfo[3]
|
|
130
|
+
};
|
|
131
|
+
} else if (kind === 4) {
|
|
132
|
+
desc = {
|
|
133
|
+
set: decInfo[3]
|
|
134
|
+
};
|
|
135
|
+
} else {
|
|
136
|
+
desc = {
|
|
137
|
+
value: decInfo[3]
|
|
138
|
+
};
|
|
139
|
+
}
|
|
140
|
+
} else if (kind !== 0) {
|
|
141
|
+
desc = Object.getOwnPropertyDescriptor(base, name);
|
|
142
|
+
}
|
|
143
|
+
if (kind === 1) {
|
|
144
|
+
value = {
|
|
145
|
+
get: desc.get,
|
|
146
|
+
set: desc.set
|
|
147
|
+
};
|
|
148
|
+
} else if (kind === 2) {
|
|
149
|
+
value = desc.value;
|
|
150
|
+
} else if (kind === 3) {
|
|
151
|
+
value = desc.get;
|
|
152
|
+
} else if (kind === 4) {
|
|
153
|
+
value = desc.set;
|
|
154
|
+
}
|
|
155
|
+
var newValue, get, set;
|
|
156
|
+
if (typeof decs === "function") {
|
|
157
|
+
newValue = memberDec(decs, name, desc, initializers, kind, isStatic, isPrivate, metadata, value);
|
|
158
|
+
if (newValue !== void 0) {
|
|
159
|
+
assertValidReturnValue(kind, newValue);
|
|
160
|
+
if (kind === 0) {
|
|
161
|
+
init = newValue;
|
|
162
|
+
} else if (kind === 1) {
|
|
163
|
+
init = newValue.init;
|
|
164
|
+
get = newValue.get || value.get;
|
|
165
|
+
set = newValue.set || value.set;
|
|
166
|
+
value = {
|
|
167
|
+
get: get,
|
|
168
|
+
set: set
|
|
169
|
+
};
|
|
170
|
+
} else {
|
|
171
|
+
value = newValue;
|
|
172
|
+
}
|
|
173
|
+
}
|
|
174
|
+
} else {
|
|
175
|
+
for(var i = decs.length - 1; i >= 0; i--){
|
|
176
|
+
var dec = decs[i];
|
|
177
|
+
newValue = memberDec(dec, name, desc, initializers, kind, isStatic, isPrivate, metadata, value);
|
|
178
|
+
if (newValue !== void 0) {
|
|
179
|
+
assertValidReturnValue(kind, newValue);
|
|
180
|
+
var newInit;
|
|
181
|
+
if (kind === 0) {
|
|
182
|
+
newInit = newValue;
|
|
183
|
+
} else if (kind === 1) {
|
|
184
|
+
newInit = newValue.init;
|
|
185
|
+
get = newValue.get || value.get;
|
|
186
|
+
set = newValue.set || value.set;
|
|
187
|
+
value = {
|
|
188
|
+
get: get,
|
|
189
|
+
set: set
|
|
190
|
+
};
|
|
191
|
+
} else {
|
|
192
|
+
value = newValue;
|
|
193
|
+
}
|
|
194
|
+
if (newInit !== void 0) {
|
|
195
|
+
if (init === void 0) {
|
|
196
|
+
init = newInit;
|
|
197
|
+
} else if (typeof init === "function") {
|
|
198
|
+
init = [
|
|
199
|
+
init,
|
|
200
|
+
newInit
|
|
201
|
+
];
|
|
202
|
+
} else {
|
|
203
|
+
init.push(newInit);
|
|
204
|
+
}
|
|
205
|
+
}
|
|
206
|
+
}
|
|
207
|
+
}
|
|
208
|
+
}
|
|
209
|
+
if (kind === 0 || kind === 1) {
|
|
210
|
+
if (init === void 0) {
|
|
211
|
+
init = function(instance, init) {
|
|
212
|
+
return init;
|
|
213
|
+
};
|
|
214
|
+
} else if (typeof init !== "function") {
|
|
215
|
+
var ownInitializers = init;
|
|
216
|
+
init = function(instance, init) {
|
|
217
|
+
var value = init;
|
|
218
|
+
for(var i = 0; i < ownInitializers.length; i++){
|
|
219
|
+
value = ownInitializers[i].call(instance, value);
|
|
220
|
+
}
|
|
221
|
+
return value;
|
|
222
|
+
};
|
|
223
|
+
} else {
|
|
224
|
+
var originalInitializer = init;
|
|
225
|
+
init = function(instance, init) {
|
|
226
|
+
return originalInitializer.call(instance, init);
|
|
227
|
+
};
|
|
228
|
+
}
|
|
229
|
+
ret.push(init);
|
|
230
|
+
}
|
|
231
|
+
if (kind !== 0) {
|
|
232
|
+
if (kind === 1) {
|
|
233
|
+
desc.get = value.get;
|
|
234
|
+
desc.set = value.set;
|
|
235
|
+
} else if (kind === 2) {
|
|
236
|
+
desc.value = value;
|
|
237
|
+
} else if (kind === 3) {
|
|
238
|
+
desc.get = value;
|
|
239
|
+
} else if (kind === 4) {
|
|
240
|
+
desc.set = value;
|
|
241
|
+
}
|
|
242
|
+
if (isPrivate) {
|
|
243
|
+
if (kind === 1) {
|
|
244
|
+
ret.push(function(instance, args) {
|
|
245
|
+
return value.get.call(instance, args);
|
|
246
|
+
});
|
|
247
|
+
ret.push(function(instance, args) {
|
|
248
|
+
return value.set.call(instance, args);
|
|
249
|
+
});
|
|
250
|
+
} else if (kind === 2) {
|
|
251
|
+
ret.push(value);
|
|
252
|
+
} else {
|
|
253
|
+
ret.push(function(instance, args) {
|
|
254
|
+
return value.call(instance, args);
|
|
255
|
+
});
|
|
256
|
+
}
|
|
257
|
+
} else {
|
|
258
|
+
Object.defineProperty(base, name, desc);
|
|
259
|
+
}
|
|
260
|
+
}
|
|
261
|
+
}
|
|
262
|
+
function applyMemberDecs(Class, decInfos, metadata) {
|
|
263
|
+
var ret = [];
|
|
264
|
+
var protoInitializers;
|
|
265
|
+
var staticInitializers;
|
|
266
|
+
var existingProtoNonFields = new Map();
|
|
267
|
+
var existingStaticNonFields = new Map();
|
|
268
|
+
for(var i = 0; i < decInfos.length; i++){
|
|
269
|
+
var decInfo = decInfos[i];
|
|
270
|
+
if (!Array.isArray(decInfo)) continue;
|
|
271
|
+
var kind = decInfo[1];
|
|
272
|
+
var name = decInfo[2];
|
|
273
|
+
var isPrivate = decInfo.length > 3;
|
|
274
|
+
var isStatic = kind >= 5;
|
|
275
|
+
var base;
|
|
276
|
+
var initializers;
|
|
277
|
+
if (isStatic) {
|
|
278
|
+
base = Class;
|
|
279
|
+
kind = kind - 5;
|
|
280
|
+
staticInitializers = staticInitializers || [];
|
|
281
|
+
initializers = staticInitializers;
|
|
282
|
+
} else {
|
|
283
|
+
base = Class.prototype;
|
|
284
|
+
protoInitializers = protoInitializers || [];
|
|
285
|
+
initializers = protoInitializers;
|
|
286
|
+
}
|
|
287
|
+
if (kind !== 0 && !isPrivate) {
|
|
288
|
+
var existingNonFields = isStatic ? existingStaticNonFields : existingProtoNonFields;
|
|
289
|
+
var existingKind = existingNonFields.get(name) || 0;
|
|
290
|
+
if (existingKind === true || existingKind === 3 && kind !== 4 || existingKind === 4 && kind !== 3) {
|
|
291
|
+
throw new Error("Attempted to decorate a public method/accessor that has the same name as a previously decorated public method/accessor. This is not currently supported by the decorators plugin. Property name was: " + name);
|
|
292
|
+
} else if (!existingKind && kind > 2) {
|
|
293
|
+
existingNonFields.set(name, kind);
|
|
294
|
+
} else {
|
|
295
|
+
existingNonFields.set(name, true);
|
|
296
|
+
}
|
|
297
|
+
}
|
|
298
|
+
applyMemberDec(ret, base, decInfo, name, kind, isStatic, isPrivate, initializers, metadata);
|
|
299
|
+
}
|
|
300
|
+
pushInitializers(ret, protoInitializers);
|
|
301
|
+
pushInitializers(ret, staticInitializers);
|
|
302
|
+
return ret;
|
|
303
|
+
}
|
|
304
|
+
function pushInitializers(ret, initializers) {
|
|
305
|
+
if (initializers) {
|
|
306
|
+
ret.push(function(instance) {
|
|
307
|
+
for(var i = 0; i < initializers.length; i++){
|
|
308
|
+
initializers[i].call(instance);
|
|
309
|
+
}
|
|
310
|
+
return instance;
|
|
311
|
+
});
|
|
312
|
+
}
|
|
313
|
+
}
|
|
314
|
+
function applyClassDecs(targetClass, classDecs, metadata) {
|
|
315
|
+
if (classDecs.length > 0) {
|
|
316
|
+
var initializers = [];
|
|
317
|
+
var newClass = targetClass;
|
|
318
|
+
var name = targetClass.name;
|
|
319
|
+
for(var i = classDecs.length - 1; i >= 0; i--){
|
|
320
|
+
var decoratorFinishedRef = {
|
|
321
|
+
v: false
|
|
322
|
+
};
|
|
323
|
+
try {
|
|
324
|
+
var nextNewClass = classDecs[i](newClass, {
|
|
325
|
+
kind: "class",
|
|
326
|
+
name: name,
|
|
327
|
+
addInitializer: createAddInitializerMethod(initializers, decoratorFinishedRef),
|
|
328
|
+
metadata
|
|
329
|
+
});
|
|
330
|
+
} finally{
|
|
331
|
+
decoratorFinishedRef.v = true;
|
|
332
|
+
}
|
|
333
|
+
if (nextNewClass !== undefined) {
|
|
334
|
+
assertValidReturnValue(10, nextNewClass);
|
|
335
|
+
newClass = nextNewClass;
|
|
336
|
+
}
|
|
337
|
+
}
|
|
338
|
+
return [
|
|
339
|
+
defineMetadata(newClass, metadata),
|
|
340
|
+
function() {
|
|
341
|
+
for(var i = 0; i < initializers.length; i++){
|
|
342
|
+
initializers[i].call(newClass);
|
|
343
|
+
}
|
|
344
|
+
}
|
|
345
|
+
];
|
|
346
|
+
}
|
|
347
|
+
}
|
|
348
|
+
function defineMetadata(Class, metadata) {
|
|
349
|
+
return Object.defineProperty(Class, Symbol.metadata || Symbol.for("Symbol.metadata"), {
|
|
350
|
+
configurable: true,
|
|
351
|
+
enumerable: true,
|
|
352
|
+
value: metadata
|
|
353
|
+
});
|
|
354
|
+
}
|
|
355
|
+
return function applyDecs2203R(targetClass, memberDecs, classDecs, parentClass) {
|
|
356
|
+
if (parentClass !== void 0) {
|
|
357
|
+
var parentMetadata = parentClass[Symbol.metadata || Symbol.for("Symbol.metadata")];
|
|
358
|
+
}
|
|
359
|
+
var metadata = Object.create(parentMetadata === void 0 ? null : parentMetadata);
|
|
360
|
+
var e = applyMemberDecs(targetClass, memberDecs, metadata);
|
|
361
|
+
if (!classDecs.length) defineMetadata(targetClass, metadata);
|
|
362
|
+
return {
|
|
363
|
+
e: e,
|
|
364
|
+
get c () {
|
|
365
|
+
return applyClassDecs(targetClass, classDecs, metadata);
|
|
366
|
+
}
|
|
367
|
+
};
|
|
368
|
+
};
|
|
6
369
|
}
|
|
7
|
-
|
|
8
|
-
|
|
370
|
+
function _apply_decs_2203_r(targetClass, memberDecs, classDecs, parentClass) {
|
|
371
|
+
return (_apply_decs_2203_r = applyDecs2203RFactory())(targetClass, memberDecs, classDecs, parentClass);
|
|
372
|
+
}
|
|
373
|
+
var _dec, _dec1, _dec2, _dec3, _dec4, _initProto;
|
|
374
|
+
import { L1_TO_L2_MSG_SUBTREE_HEIGHT, L1_TO_L2_MSG_SUBTREE_ROOT_SIBLING_PATH_LENGTH, NESTED_RECURSIVE_ROLLUP_HONK_PROOF_LENGTH, NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP, NUM_BASE_PARITY_PER_ROOT_PARITY } from '@aztec/constants';
|
|
375
|
+
import { BlockNumber } from '@aztec/foundation/branded-types';
|
|
376
|
+
import { padArrayEnd } from '@aztec/foundation/collection';
|
|
377
|
+
import { Fr } from '@aztec/foundation/curves/bn254';
|
|
9
378
|
import { AbortError } from '@aztec/foundation/error';
|
|
10
|
-
import { Fr } from '@aztec/foundation/fields';
|
|
11
379
|
import { createLogger } from '@aztec/foundation/log';
|
|
12
380
|
import { promiseWithResolvers } from '@aztec/foundation/promise';
|
|
13
381
|
import { assertLength } from '@aztec/foundation/serialize';
|
|
14
382
|
import { pushTestData } from '@aztec/foundation/testing';
|
|
15
383
|
import { elapsed } from '@aztec/foundation/timer';
|
|
16
|
-
import {
|
|
17
|
-
import { L2Block } from '@aztec/stdlib/block';
|
|
18
|
-
import { BaseParityInputs } from '@aztec/stdlib/parity';
|
|
19
|
-
import { makeEmptyRecursiveProof } from '@aztec/stdlib/proofs';
|
|
20
|
-
import { EmptyBlockRootRollupInputs, PrivateBaseRollupInputs, SingleTxBlockRootRollupInputs, TubeInputs } from '@aztec/stdlib/rollup';
|
|
384
|
+
import { BlockRootEmptyTxFirstRollupPrivateInputs, BlockRootFirstRollupPrivateInputs, BlockRootSingleTxFirstRollupPrivateInputs, BlockRootSingleTxRollupPrivateInputs, CheckpointRootSingleBlockRollupPrivateInputs, PrivateTxBaseRollupPrivateInputs } from '@aztec/stdlib/rollup';
|
|
21
385
|
import { MerkleTreeId } from '@aztec/stdlib/trees';
|
|
22
|
-
import { toNumBlobFields } from '@aztec/stdlib/tx';
|
|
23
|
-
import { VerificationKeyData } from '@aztec/stdlib/vks';
|
|
24
386
|
import { Attributes, getTelemetryClient, trackSpan, wrapCallbackInSpan } from '@aztec/telemetry-client';
|
|
25
387
|
import { inspect } from 'util';
|
|
26
|
-
import {
|
|
388
|
+
import { buildHeaderFromCircuitOutputs, getLastSiblingPath, getPublicChonkVerifierPrivateInputsFromTx, getRootTreeSiblingPath, getSubtreeSiblingPath, getTreeSnapshot, insertSideEffectsAndBuildBaseRollupHints, validatePartialState, validateTx } from './block-building-helpers.js';
|
|
27
389
|
import { EpochProvingState } from './epoch-proving-state.js';
|
|
28
390
|
import { ProvingOrchestratorMetrics } from './orchestrator_metrics.js';
|
|
29
391
|
import { TxProvingState } from './tx-proving-state.js';
|
|
30
392
|
const logger = createLogger('prover-client:orchestrator');
|
|
393
|
+
_dec = trackSpan('ProvingOrchestrator.startNewBlock', (blockNumber)=>({
|
|
394
|
+
[Attributes.BLOCK_NUMBER]: blockNumber
|
|
395
|
+
})), _dec1 = trackSpan('ProvingOrchestrator.addTxs', (txs)=>({
|
|
396
|
+
[Attributes.BLOCK_TXS_COUNT]: txs.length
|
|
397
|
+
})), _dec2 = trackSpan('ProvingOrchestrator.startChonkVerifierCircuits'), _dec3 = trackSpan('ProvingOrchestrator.setBlockCompleted', (blockNumber)=>({
|
|
398
|
+
[Attributes.BLOCK_NUMBER]: blockNumber
|
|
399
|
+
})), _dec4 = trackSpan('ProvingOrchestrator.prepareBaseRollupInputs', (tx)=>({
|
|
400
|
+
[Attributes.TX_HASH]: tx.hash.toString()
|
|
401
|
+
}));
|
|
31
402
|
/**
|
|
32
403
|
* Implements an event driven proving scheduler to build the recursive proof tree. The idea being:
|
|
33
404
|
* 1. Transactions are provided to the scheduler post simulation.
|
|
@@ -43,16 +414,46 @@ const logger = createLogger('prover-client:orchestrator');
|
|
|
43
414
|
dbProvider;
|
|
44
415
|
prover;
|
|
45
416
|
proverId;
|
|
417
|
+
static{
|
|
418
|
+
({ e: [_initProto] } = _apply_decs_2203_r(this, [
|
|
419
|
+
[
|
|
420
|
+
_dec,
|
|
421
|
+
2,
|
|
422
|
+
"startNewBlock"
|
|
423
|
+
],
|
|
424
|
+
[
|
|
425
|
+
_dec1,
|
|
426
|
+
2,
|
|
427
|
+
"addTxs"
|
|
428
|
+
],
|
|
429
|
+
[
|
|
430
|
+
_dec2,
|
|
431
|
+
2,
|
|
432
|
+
"startChonkVerifierCircuits"
|
|
433
|
+
],
|
|
434
|
+
[
|
|
435
|
+
_dec3,
|
|
436
|
+
2,
|
|
437
|
+
"setBlockCompleted"
|
|
438
|
+
],
|
|
439
|
+
[
|
|
440
|
+
_dec4,
|
|
441
|
+
2,
|
|
442
|
+
"prepareBaseRollupInputs"
|
|
443
|
+
]
|
|
444
|
+
], []));
|
|
445
|
+
}
|
|
46
446
|
provingState;
|
|
47
447
|
pendingProvingJobs;
|
|
48
448
|
provingPromise;
|
|
49
449
|
metrics;
|
|
450
|
+
// eslint-disable-next-line aztec-custom/no-non-primitive-in-collections
|
|
50
451
|
dbs;
|
|
51
|
-
constructor(dbProvider, prover, proverId
|
|
452
|
+
constructor(dbProvider, prover, proverId, telemetryClient = getTelemetryClient()){
|
|
52
453
|
this.dbProvider = dbProvider;
|
|
53
454
|
this.prover = prover;
|
|
54
455
|
this.proverId = proverId;
|
|
55
|
-
this.provingState = undefined;
|
|
456
|
+
this.provingState = (_initProto(this), undefined);
|
|
56
457
|
this.pendingProvingJobs = [];
|
|
57
458
|
this.provingPromise = undefined;
|
|
58
459
|
this.dbs = new Map();
|
|
@@ -68,81 +469,137 @@ const logger = createLogger('prover-client:orchestrator');
|
|
|
68
469
|
this.cancel();
|
|
69
470
|
return Promise.resolve();
|
|
70
471
|
}
|
|
71
|
-
startNewEpoch(epochNumber,
|
|
472
|
+
startNewEpoch(epochNumber, totalNumCheckpoints, finalBlobBatchingChallenges) {
|
|
473
|
+
if (this.provingState?.verifyState()) {
|
|
474
|
+
throw new Error(`Cannot start epoch ${epochNumber} when epoch ${this.provingState.epochNumber} is still being processed.`);
|
|
475
|
+
}
|
|
72
476
|
const { promise: _promise, resolve, reject } = promiseWithResolvers();
|
|
73
477
|
const promise = _promise.catch((reason)=>({
|
|
74
478
|
status: 'failure',
|
|
75
479
|
reason
|
|
76
480
|
}));
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
}
|
|
80
|
-
logger.info(`Starting epoch ${epochNumber} with ${totalNumBlocks} blocks`);
|
|
81
|
-
this.provingState = new EpochProvingState(epochNumber, firstBlockNumber, totalNumBlocks, resolve, reject);
|
|
481
|
+
logger.info(`Starting epoch ${epochNumber} with ${totalNumCheckpoints} checkpoints.`);
|
|
482
|
+
this.provingState = new EpochProvingState(epochNumber, totalNumCheckpoints, finalBlobBatchingChallenges, (provingState)=>this.checkAndEnqueueCheckpointRootRollup(provingState), resolve, reject);
|
|
82
483
|
this.provingPromise = promise;
|
|
83
484
|
}
|
|
485
|
+
async startNewCheckpoint(checkpointIndex, constants, l1ToL2Messages, totalNumBlocks, headerOfLastBlockInPreviousCheckpoint) {
|
|
486
|
+
if (!this.provingState) {
|
|
487
|
+
throw new Error('Empty epoch proving state. Call startNewEpoch before starting a checkpoint.');
|
|
488
|
+
}
|
|
489
|
+
if (!this.provingState.isAcceptingCheckpoints()) {
|
|
490
|
+
throw new Error(`Epoch not accepting further checkpoints.`);
|
|
491
|
+
}
|
|
492
|
+
// Fork world state at the end of the immediately previous block.
|
|
493
|
+
const lastBlockNumber = headerOfLastBlockInPreviousCheckpoint.globalVariables.blockNumber;
|
|
494
|
+
const db = await this.dbProvider.fork(lastBlockNumber);
|
|
495
|
+
const firstBlockNumber = BlockNumber(lastBlockNumber + 1);
|
|
496
|
+
this.dbs.set(firstBlockNumber, db);
|
|
497
|
+
// Get archive sibling path before any block in this checkpoint lands.
|
|
498
|
+
const lastArchiveSiblingPath = await getLastSiblingPath(MerkleTreeId.ARCHIVE, db);
|
|
499
|
+
// Insert all the l1 to l2 messages into the db. And get the states before and after the insertion.
|
|
500
|
+
const { lastL1ToL2MessageTreeSnapshot, lastL1ToL2MessageSubtreeRootSiblingPath, newL1ToL2MessageTreeSnapshot, newL1ToL2MessageSubtreeRootSiblingPath } = await this.updateL1ToL2MessageTree(l1ToL2Messages, db);
|
|
501
|
+
this.provingState.startNewCheckpoint(checkpointIndex, constants, totalNumBlocks, headerOfLastBlockInPreviousCheckpoint, lastArchiveSiblingPath, l1ToL2Messages, lastL1ToL2MessageTreeSnapshot, lastL1ToL2MessageSubtreeRootSiblingPath, newL1ToL2MessageTreeSnapshot, newL1ToL2MessageSubtreeRootSiblingPath);
|
|
502
|
+
}
|
|
84
503
|
/**
|
|
85
504
|
* Starts off a new block
|
|
86
|
-
* @param
|
|
87
|
-
* @param
|
|
88
|
-
*
|
|
89
|
-
|
|
505
|
+
* @param blockNumber - The block number
|
|
506
|
+
* @param timestamp - The timestamp of the block. This is only required for constructing the private inputs for the
|
|
507
|
+
* block that doesn't have any txs.
|
|
508
|
+
* @param totalNumTxs - The total number of txs in the block
|
|
509
|
+
*/ async startNewBlock(blockNumber, timestamp, totalNumTxs) {
|
|
90
510
|
if (!this.provingState) {
|
|
91
|
-
throw new Error(
|
|
92
|
-
}
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
//
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
511
|
+
throw new Error('Empty epoch proving state. Call startNewEpoch before starting a block.');
|
|
512
|
+
}
|
|
513
|
+
const checkpointProvingState = this.provingState.getCheckpointProvingStateByBlockNumber(blockNumber);
|
|
514
|
+
if (!checkpointProvingState) {
|
|
515
|
+
throw new Error(`Checkpoint not started. Call startNewCheckpoint first.`);
|
|
516
|
+
}
|
|
517
|
+
if (!checkpointProvingState.isAcceptingBlocks()) {
|
|
518
|
+
throw new Error(`Checkpoint not accepting further blocks.`);
|
|
519
|
+
}
|
|
520
|
+
const constants = checkpointProvingState.constants;
|
|
521
|
+
logger.info(`Starting block ${blockNumber} for slot ${constants.slotNumber}.`);
|
|
522
|
+
// Fork the db only when it's not already set. The db for the first block is set in `startNewCheckpoint`.
|
|
523
|
+
if (!this.dbs.has(blockNumber)) {
|
|
524
|
+
// Fork world state at the end of the immediately previous block
|
|
525
|
+
const db = await this.dbProvider.fork(BlockNumber(blockNumber - 1));
|
|
526
|
+
this.dbs.set(blockNumber, db);
|
|
527
|
+
}
|
|
528
|
+
const db = this.dbs.get(blockNumber);
|
|
529
|
+
// Get archive snapshot and sibling path before any txs in this block lands.
|
|
530
|
+
const lastArchiveTreeSnapshot = await getTreeSnapshot(MerkleTreeId.ARCHIVE, db);
|
|
531
|
+
const lastArchiveSiblingPath = await getRootTreeSiblingPath(MerkleTreeId.ARCHIVE, db);
|
|
532
|
+
const blockProvingState = checkpointProvingState.startNewBlock(blockNumber, timestamp, totalNumTxs, lastArchiveTreeSnapshot, lastArchiveSiblingPath);
|
|
533
|
+
// Enqueue base parity circuits for the first block in the checkpoint.
|
|
534
|
+
if (blockProvingState.index === 0) {
|
|
535
|
+
for(let i = 0; i < NUM_BASE_PARITY_PER_ROOT_PARITY; i++){
|
|
536
|
+
this.enqueueBaseParityCircuit(checkpointProvingState, blockProvingState, i);
|
|
537
|
+
}
|
|
538
|
+
}
|
|
539
|
+
// Because `addTxs` won't be called for a block without txs, and that's where the sponge blob state is computed.
|
|
540
|
+
// We need to set its end sponge blob here, which will become the start sponge blob for the next block.
|
|
541
|
+
if (totalNumTxs === 0) {
|
|
542
|
+
const endState = await db.getStateReference();
|
|
543
|
+
blockProvingState.setEndState(endState);
|
|
544
|
+
const endSpongeBlob = blockProvingState.getStartSpongeBlob().clone();
|
|
545
|
+
const blockEndBlobFields = blockProvingState.getBlockEndBlobFields();
|
|
546
|
+
await endSpongeBlob.absorb(blockEndBlobFields);
|
|
547
|
+
blockProvingState.setEndSpongeBlob(endSpongeBlob);
|
|
548
|
+
// And also try to accumulate the blobs as far as we can:
|
|
549
|
+
await this.provingState.setBlobAccumulators();
|
|
109
550
|
}
|
|
110
551
|
}
|
|
111
552
|
/**
|
|
112
553
|
* The interface to add simulated transactions to the scheduler. This can only be called once per block.
|
|
113
554
|
* @param txs - The transactions to be proven
|
|
114
555
|
*/ async addTxs(txs) {
|
|
556
|
+
if (!this.provingState) {
|
|
557
|
+
throw new Error(`Empty epoch proving state. Call startNewEpoch before adding txs.`);
|
|
558
|
+
}
|
|
115
559
|
if (!txs.length) {
|
|
116
560
|
// To avoid an ugly throw below. If we require an empty block, we can just call setBlockCompleted
|
|
117
561
|
// on a block with no txs. We cannot do that here because we cannot find the blockNumber without any txs.
|
|
118
562
|
logger.warn(`Provided no txs to orchestrator addTxs.`);
|
|
119
563
|
return;
|
|
120
564
|
}
|
|
121
|
-
const blockNumber = txs[0].
|
|
122
|
-
const provingState = this.provingState
|
|
565
|
+
const blockNumber = BlockNumber(txs[0].globalVariables.blockNumber);
|
|
566
|
+
const provingState = this.provingState.getBlockProvingStateByBlockNumber(blockNumber);
|
|
123
567
|
if (!provingState) {
|
|
124
|
-
throw new Error(`
|
|
568
|
+
throw new Error(`Proving state for block ${blockNumber} not found. Call startNewBlock first.`);
|
|
569
|
+
}
|
|
570
|
+
if (provingState.totalNumTxs !== txs.length) {
|
|
571
|
+
throw new Error(`Block ${blockNumber} should be filled with ${provingState.totalNumTxs} txs. Received ${txs.length} txs.`);
|
|
125
572
|
}
|
|
126
|
-
if (provingState.
|
|
573
|
+
if (!provingState.isAcceptingTxs()) {
|
|
127
574
|
throw new Error(`Block ${blockNumber} has been initialized with transactions.`);
|
|
128
575
|
}
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
|
|
576
|
+
logger.info(`Adding ${txs.length} transactions to block ${blockNumber}`);
|
|
577
|
+
const db = this.dbs.get(blockNumber);
|
|
578
|
+
const lastArchive = provingState.lastArchiveTreeSnapshot;
|
|
579
|
+
const newL1ToL2MessageTreeSnapshot = provingState.newL1ToL2MessageTreeSnapshot;
|
|
580
|
+
const spongeBlobState = provingState.getStartSpongeBlob().clone();
|
|
132
581
|
for (const tx of txs){
|
|
133
582
|
try {
|
|
134
583
|
if (!provingState.verifyState()) {
|
|
135
584
|
throw new Error(`Invalid proving state when adding a tx`);
|
|
136
585
|
}
|
|
137
586
|
validateTx(tx);
|
|
138
|
-
logger.
|
|
139
|
-
const
|
|
140
|
-
const
|
|
587
|
+
logger.debug(`Received transaction: ${tx.hash}`);
|
|
588
|
+
const startSpongeBlob = spongeBlobState.clone();
|
|
589
|
+
const [hints, treeSnapshots] = await this.prepareBaseRollupInputs(tx, lastArchive, newL1ToL2MessageTreeSnapshot, startSpongeBlob, db);
|
|
590
|
+
if (!provingState.verifyState()) {
|
|
591
|
+
throw new Error(`Unable to add transaction, preparing base inputs failed`);
|
|
592
|
+
}
|
|
593
|
+
await spongeBlobState.absorb(tx.txEffect.toBlobFields());
|
|
594
|
+
const txProvingState = new TxProvingState(tx, hints, treeSnapshots, this.proverId.toField());
|
|
141
595
|
const txIndex = provingState.addNewTx(txProvingState);
|
|
142
|
-
this.getOrEnqueueTube(provingState, txIndex);
|
|
143
596
|
if (txProvingState.requireAvmProof) {
|
|
597
|
+
this.getOrEnqueueChonkVerifier(provingState, txIndex);
|
|
144
598
|
logger.debug(`Enqueueing public VM for tx ${txIndex}`);
|
|
145
599
|
this.enqueueVM(provingState, txIndex);
|
|
600
|
+
} else {
|
|
601
|
+
logger.debug(`Enqueueing base rollup for private-only tx ${txIndex}`);
|
|
602
|
+
this.enqueueBaseRollup(provingState, txIndex);
|
|
146
603
|
}
|
|
147
604
|
} catch (err) {
|
|
148
605
|
throw new Error(`Error adding transaction ${tx.hash.toString()} to block ${blockNumber}: ${err.message}`, {
|
|
@@ -150,22 +607,33 @@ const logger = createLogger('prover-client:orchestrator');
|
|
|
150
607
|
});
|
|
151
608
|
}
|
|
152
609
|
}
|
|
610
|
+
const endState = await db.getStateReference();
|
|
611
|
+
provingState.setEndState(endState);
|
|
612
|
+
const blockEndBlobFields = provingState.getBlockEndBlobFields();
|
|
613
|
+
await spongeBlobState.absorb(blockEndBlobFields);
|
|
614
|
+
provingState.setEndSpongeBlob(spongeBlobState);
|
|
615
|
+
// Txs have been added to the block. Now try to accumulate the blobs as far as we can:
|
|
616
|
+
await this.provingState.setBlobAccumulators();
|
|
153
617
|
}
|
|
154
618
|
/**
|
|
155
|
-
* Kickstarts
|
|
156
|
-
* Note that if the
|
|
157
|
-
*/
|
|
619
|
+
* Kickstarts chonk verifier circuits for the specified txs. These will be used during epoch proving.
|
|
620
|
+
* Note that if the chonk verifier circuits are not started this way, they will be started nontheless after processing.
|
|
621
|
+
*/ startChonkVerifierCircuits(txs) {
|
|
158
622
|
if (!this.provingState?.verifyState()) {
|
|
159
|
-
throw new Error(`
|
|
623
|
+
throw new Error(`Empty epoch proving state. call startNewEpoch before starting chonk verifier circuits.`);
|
|
160
624
|
}
|
|
161
|
-
|
|
162
|
-
|
|
163
|
-
const
|
|
625
|
+
const publicTxs = txs.filter((tx)=>tx.data.forPublic);
|
|
626
|
+
for (const tx of publicTxs){
|
|
627
|
+
const txHash = tx.getTxHash().toString();
|
|
628
|
+
const privateInputs = getPublicChonkVerifierPrivateInputsFromTx(tx, this.proverId.toField());
|
|
164
629
|
const tubeProof = promiseWithResolvers();
|
|
165
|
-
logger.debug(`Starting
|
|
166
|
-
this.
|
|
167
|
-
|
|
630
|
+
logger.debug(`Starting chonk verifier circuit for tx ${txHash}`);
|
|
631
|
+
this.doEnqueueChonkVerifier(txHash, privateInputs, (proof)=>{
|
|
632
|
+
tubeProof.resolve(proof);
|
|
633
|
+
});
|
|
634
|
+
this.provingState.cachedChonkVerifierProofs.set(txHash, tubeProof.promise);
|
|
168
635
|
}
|
|
636
|
+
return Promise.resolve();
|
|
169
637
|
}
|
|
170
638
|
/**
|
|
171
639
|
* Marks the block as completed.
|
|
@@ -175,55 +643,73 @@ const logger = createLogger('prover-client:orchestrator');
|
|
|
175
643
|
if (!provingState) {
|
|
176
644
|
throw new Error(`Block proving state for ${blockNumber} not found`);
|
|
177
645
|
}
|
|
178
|
-
if
|
|
179
|
-
|
|
180
|
-
|
|
181
|
-
|
|
646
|
+
// Abort with specific error for the block if there's one.
|
|
647
|
+
const error = provingState.getError();
|
|
648
|
+
if (error) {
|
|
649
|
+
throw new Error(`Block proving failed: ${error}`);
|
|
182
650
|
}
|
|
651
|
+
// Abort if the proving state is not valid due to errors occurred elsewhere.
|
|
183
652
|
if (!provingState.verifyState()) {
|
|
184
|
-
throw new Error(`
|
|
653
|
+
throw new Error(`Invalid proving state when completing block ${blockNumber}.`);
|
|
185
654
|
}
|
|
186
|
-
|
|
187
|
-
|
|
188
|
-
await this.buildBlock(provingState, expectedHeader);
|
|
189
|
-
// If the proofs were faster than the block building, then we need to try the block root rollup again here
|
|
190
|
-
await this.checkAndEnqueueBlockRootRollup(provingState);
|
|
191
|
-
return provingState.block;
|
|
192
|
-
}
|
|
193
|
-
/** Returns the block as built for a given index. */ getBlock(index) {
|
|
194
|
-
const block = this.provingState?.blocks[index]?.block;
|
|
195
|
-
if (!block) {
|
|
196
|
-
throw new Error(`Block at index ${index} not available`);
|
|
655
|
+
if (provingState.isAcceptingTxs()) {
|
|
656
|
+
throw new Error(`Block ${blockNumber} is still accepting txs. Call setBlockCompleted after all txs have been added.`);
|
|
197
657
|
}
|
|
198
|
-
|
|
199
|
-
|
|
200
|
-
|
|
201
|
-
// Collect all new nullifiers, commitments, and contracts from all txs in this block to build body
|
|
202
|
-
const txs = provingState.allTxs.map((a)=>a.processedTx);
|
|
203
|
-
// Get db for this block
|
|
204
|
-
const db = this.dbs.get(provingState.blockNumber);
|
|
205
|
-
// Given we've applied every change from this block, now assemble the block header
|
|
206
|
-
// and update the archive tree, so we're ready to start processing the next block
|
|
207
|
-
const { header, body } = await buildHeaderAndBodyFromTxs(txs, provingState.globalVariables, provingState.newL1ToL2Messages, db);
|
|
658
|
+
// Given we've applied every change from this block, now assemble the block header:
|
|
659
|
+
logger.verbose(`Block ${blockNumber} completed. Assembling header.`);
|
|
660
|
+
const header = await provingState.buildBlockHeader();
|
|
208
661
|
if (expectedHeader && !header.equals(expectedHeader)) {
|
|
209
662
|
logger.error(`Block header mismatch: header=${header} expectedHeader=${expectedHeader}`);
|
|
210
663
|
throw new Error('Block header mismatch');
|
|
211
664
|
}
|
|
665
|
+
// Get db for this block
|
|
666
|
+
const db = this.dbs.get(provingState.blockNumber);
|
|
667
|
+
// Update the archive tree, so we're ready to start processing the next block:
|
|
212
668
|
logger.verbose(`Updating archive tree with block ${provingState.blockNumber} header ${(await header.hash()).toString()}`);
|
|
213
669
|
await db.updateArchive(header);
|
|
214
|
-
|
|
215
|
-
|
|
216
|
-
const l2Block = new L2Block(newArchive, header, body);
|
|
217
|
-
await this.verifyBuiltBlockAgainstSyncedState(l2Block, newArchive);
|
|
218
|
-
logger.verbose(`Orchestrator finalised block ${l2Block.number}`);
|
|
219
|
-
provingState.block = l2Block;
|
|
670
|
+
await this.verifyBuiltBlockAgainstSyncedState(provingState);
|
|
671
|
+
return header;
|
|
220
672
|
}
|
|
221
673
|
// Flagged as protected to disable in certain unit tests
|
|
222
|
-
async verifyBuiltBlockAgainstSyncedState(
|
|
223
|
-
const
|
|
674
|
+
async verifyBuiltBlockAgainstSyncedState(provingState) {
|
|
675
|
+
const builtBlockHeader = provingState.getBuiltBlockHeader();
|
|
676
|
+
if (!builtBlockHeader) {
|
|
677
|
+
logger.debug('Block header not built yet, skipping header check.');
|
|
678
|
+
return;
|
|
679
|
+
}
|
|
680
|
+
const output = provingState.getBlockRootRollupOutput();
|
|
681
|
+
if (!output) {
|
|
682
|
+
logger.debug('Block root rollup proof not built yet, skipping header check.');
|
|
683
|
+
return;
|
|
684
|
+
}
|
|
685
|
+
const header = await buildHeaderFromCircuitOutputs(output);
|
|
686
|
+
if (!(await header.hash()).equals(await builtBlockHeader.hash())) {
|
|
687
|
+
logger.error(`Block header mismatch.\nCircuit: ${inspect(header)}\nComputed: ${inspect(builtBlockHeader)}`);
|
|
688
|
+
provingState.reject(`Block header hash mismatch.`);
|
|
689
|
+
return;
|
|
690
|
+
}
|
|
691
|
+
// Get db for this block
|
|
692
|
+
const blockNumber = provingState.blockNumber;
|
|
693
|
+
const db = this.dbs.get(blockNumber);
|
|
694
|
+
const newArchive = await getTreeSnapshot(MerkleTreeId.ARCHIVE, db);
|
|
695
|
+
const syncedArchive = await getTreeSnapshot(MerkleTreeId.ARCHIVE, this.dbProvider.getSnapshot(blockNumber));
|
|
224
696
|
if (!syncedArchive.equals(newArchive)) {
|
|
225
|
-
|
|
697
|
+
logger.error(`Archive tree mismatch for block ${blockNumber}: world state synced to ${inspect(syncedArchive)} but built ${inspect(newArchive)}`);
|
|
698
|
+
provingState.reject(`Archive tree mismatch.`);
|
|
699
|
+
return;
|
|
226
700
|
}
|
|
701
|
+
const circuitArchive = output.newArchive;
|
|
702
|
+
if (!newArchive.equals(circuitArchive)) {
|
|
703
|
+
logger.error(`New archive mismatch.\nCircuit: ${output.newArchive}\nComputed: ${newArchive}`);
|
|
704
|
+
provingState.reject(`New archive mismatch.`);
|
|
705
|
+
return;
|
|
706
|
+
}
|
|
707
|
+
// TODO(palla/prover): This closes the fork only on the happy path. If this epoch orchestrator
|
|
708
|
+
// is aborted and never reaches this point, it will leak the fork. We need to add a global cleanup,
|
|
709
|
+
// but have to make sure it only runs once all operations are completed, otherwise some function here
|
|
710
|
+
// will attempt to access the fork after it was closed.
|
|
711
|
+
logger.debug(`Cleaning up world state fork for ${blockNumber}`);
|
|
712
|
+
void this.dbs.get(blockNumber)?.close().then(()=>this.dbs.delete(blockNumber)).catch((err)=>logger.error(`Error closing db for block ${blockNumber}`, err));
|
|
227
713
|
}
|
|
228
714
|
/**
|
|
229
715
|
* Cancel any further proving
|
|
@@ -235,14 +721,15 @@ const logger = createLogger('prover-client:orchestrator');
|
|
|
235
721
|
}
|
|
236
722
|
/**
|
|
237
723
|
* Returns the proof for the current epoch.
|
|
238
|
-
*/ async
|
|
724
|
+
*/ async finalizeEpoch() {
|
|
239
725
|
if (!this.provingState || !this.provingPromise) {
|
|
240
|
-
throw new Error(`Invalid proving state, an epoch must be proven before it can be
|
|
726
|
+
throw new Error(`Invalid proving state, an epoch must be proven before it can be finalized`);
|
|
241
727
|
}
|
|
242
728
|
const result = await this.provingPromise;
|
|
243
729
|
if (result.status === 'failure') {
|
|
244
730
|
throw new Error(`Epoch proving failed: ${result.reason}`);
|
|
245
731
|
}
|
|
732
|
+
await this.provingState.finalizeBatchedBlob();
|
|
246
733
|
const epochProofResult = this.provingState.getEpochProofResult();
|
|
247
734
|
pushTestData('epochProofResult', {
|
|
248
735
|
proof: epochProofResult.proof.toString(),
|
|
@@ -251,24 +738,12 @@ const logger = createLogger('prover-client:orchestrator');
|
|
|
251
738
|
return epochProofResult;
|
|
252
739
|
}
|
|
253
740
|
/**
|
|
254
|
-
* Starts the proving process for the given transaction and adds it to our state
|
|
255
|
-
* @param tx - The transaction whose proving we wish to commence
|
|
256
|
-
* @param provingState - The proving state being worked on
|
|
257
|
-
*/ async prepareTransaction(tx, provingState) {
|
|
258
|
-
const txInputs = await this.prepareBaseRollupInputs(provingState, tx);
|
|
259
|
-
if (!txInputs) {
|
|
260
|
-
// This should not be possible
|
|
261
|
-
throw new Error(`Unable to add transaction, preparing base inputs failed`);
|
|
262
|
-
}
|
|
263
|
-
return txInputs;
|
|
264
|
-
}
|
|
265
|
-
/**
|
|
266
741
|
* Enqueue a job to be scheduled
|
|
267
742
|
* @param provingState - The proving state object being operated on
|
|
268
743
|
* @param jobType - The type of job to be queued
|
|
269
744
|
* @param job - The actual job, returns a promise notifying of the job's completion
|
|
270
745
|
*/ deferredProving(provingState, request, callback) {
|
|
271
|
-
if (!provingState
|
|
746
|
+
if (!provingState.verifyState()) {
|
|
272
747
|
logger.debug(`Not enqueuing job, state no longer valid`);
|
|
273
748
|
return;
|
|
274
749
|
}
|
|
@@ -283,7 +758,7 @@ const logger = createLogger('prover-client:orchestrator');
|
|
|
283
758
|
return;
|
|
284
759
|
}
|
|
285
760
|
const result = await request(controller.signal);
|
|
286
|
-
if (!provingState
|
|
761
|
+
if (!provingState.verifyState()) {
|
|
287
762
|
logger.debug(`State no longer valid, discarding result`);
|
|
288
763
|
return;
|
|
289
764
|
}
|
|
@@ -311,29 +786,26 @@ const logger = createLogger('prover-client:orchestrator');
|
|
|
311
786
|
// let the callstack unwind before adding the job to the queue
|
|
312
787
|
setImmediate(()=>void safeJob());
|
|
313
788
|
}
|
|
314
|
-
async
|
|
789
|
+
async updateL1ToL2MessageTree(l1ToL2Messages, db) {
|
|
315
790
|
const l1ToL2MessagesPadded = padArrayEnd(l1ToL2Messages, Fr.ZERO, NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP, 'Too many L1 to L2 messages');
|
|
316
|
-
const
|
|
317
|
-
const
|
|
791
|
+
const lastL1ToL2MessageTreeSnapshot = await getTreeSnapshot(MerkleTreeId.L1_TO_L2_MESSAGE_TREE, db);
|
|
792
|
+
const lastL1ToL2MessageSubtreeRootSiblingPath = assertLength(await getSubtreeSiblingPath(MerkleTreeId.L1_TO_L2_MESSAGE_TREE, L1_TO_L2_MSG_SUBTREE_HEIGHT, db), L1_TO_L2_MSG_SUBTREE_ROOT_SIBLING_PATH_LENGTH);
|
|
318
793
|
// Update the local trees to include the new l1 to l2 messages
|
|
319
794
|
await db.appendLeaves(MerkleTreeId.L1_TO_L2_MESSAGE_TREE, l1ToL2MessagesPadded);
|
|
320
|
-
const
|
|
795
|
+
const newL1ToL2MessageTreeSnapshot = await getTreeSnapshot(MerkleTreeId.L1_TO_L2_MESSAGE_TREE, db);
|
|
796
|
+
const newL1ToL2MessageSubtreeRootSiblingPath = assertLength(await getSubtreeSiblingPath(MerkleTreeId.L1_TO_L2_MESSAGE_TREE, L1_TO_L2_MSG_SUBTREE_HEIGHT, db), L1_TO_L2_MSG_SUBTREE_ROOT_SIBLING_PATH_LENGTH);
|
|
321
797
|
return {
|
|
322
|
-
|
|
323
|
-
|
|
324
|
-
|
|
798
|
+
lastL1ToL2MessageTreeSnapshot,
|
|
799
|
+
lastL1ToL2MessageSubtreeRootSiblingPath,
|
|
800
|
+
newL1ToL2MessageTreeSnapshot,
|
|
801
|
+
newL1ToL2MessageSubtreeRootSiblingPath
|
|
325
802
|
};
|
|
326
803
|
}
|
|
327
804
|
// Updates the merkle trees for a transaction. The first enqueued job for a transaction
|
|
328
|
-
async prepareBaseRollupInputs(
|
|
329
|
-
if (!provingState.verifyState() || !provingState.spongeBlobState) {
|
|
330
|
-
logger.debug('Not preparing base rollup inputs, state invalid');
|
|
331
|
-
return;
|
|
332
|
-
}
|
|
333
|
-
const db = this.dbs.get(provingState.blockNumber);
|
|
805
|
+
async prepareBaseRollupInputs(tx, lastArchive, newL1ToL2MessageTreeSnapshot, startSpongeBlob, db) {
|
|
334
806
|
// We build the base rollup inputs using a mock proof and verification key.
|
|
335
|
-
// These will be overwritten later once we have proven the
|
|
336
|
-
const [ms, hints] = await elapsed(
|
|
807
|
+
// These will be overwritten later once we have proven the chonk verifier circuit and any public kernels
|
|
808
|
+
const [ms, hints] = await elapsed(insertSideEffectsAndBuildBaseRollupHints(tx, lastArchive, newL1ToL2MessageTreeSnapshot, startSpongeBlob, this.proverId.toField(), db));
|
|
337
809
|
this.metrics.recordBaseRollupInputs(ms);
|
|
338
810
|
const promises = [
|
|
339
811
|
MerkleTreeId.NOTE_HASH_TREE,
|
|
@@ -349,10 +821,6 @@ const logger = createLogger('prover-client:orchestrator');
|
|
|
349
821
|
obj.key,
|
|
350
822
|
obj.value
|
|
351
823
|
]));
|
|
352
|
-
if (!provingState.verifyState()) {
|
|
353
|
-
logger.debug(`Discarding proving job, state no longer valid`);
|
|
354
|
-
return;
|
|
355
|
-
}
|
|
356
824
|
return [
|
|
357
825
|
hints,
|
|
358
826
|
treeSnapshots
|
|
@@ -365,68 +833,71 @@ const logger = createLogger('prover-client:orchestrator');
|
|
|
365
833
|
logger.debug('Not running base rollup, state invalid');
|
|
366
834
|
return;
|
|
367
835
|
}
|
|
836
|
+
if (!provingState.tryStartProvingBase(txIndex)) {
|
|
837
|
+
logger.debug(`Base rollup for tx ${txIndex} already started.`);
|
|
838
|
+
return;
|
|
839
|
+
}
|
|
368
840
|
const txProvingState = provingState.getTxProvingState(txIndex);
|
|
369
841
|
const { processedTx } = txProvingState;
|
|
370
842
|
const { rollupType, inputs } = txProvingState.getBaseRollupTypeAndInputs();
|
|
371
843
|
logger.debug(`Enqueuing deferred proving base rollup for ${processedTx.hash.toString()}`);
|
|
372
|
-
this.deferredProving(provingState, wrapCallbackInSpan(this.tracer, `ProvingOrchestrator.prover.${inputs instanceof
|
|
844
|
+
this.deferredProving(provingState, wrapCallbackInSpan(this.tracer, `ProvingOrchestrator.prover.${inputs instanceof PrivateTxBaseRollupPrivateInputs ? 'getPrivateTxBaseRollupProof' : 'getPublicTxBaseRollupProof'}`, {
|
|
373
845
|
[Attributes.TX_HASH]: processedTx.hash.toString(),
|
|
374
|
-
[Attributes.PROTOCOL_CIRCUIT_TYPE]: 'server',
|
|
375
846
|
[Attributes.PROTOCOL_CIRCUIT_NAME]: rollupType
|
|
376
847
|
}, (signal)=>{
|
|
377
|
-
if (inputs instanceof
|
|
378
|
-
return this.prover.
|
|
848
|
+
if (inputs instanceof PrivateTxBaseRollupPrivateInputs) {
|
|
849
|
+
return this.prover.getPrivateTxBaseRollupProof(inputs, signal, provingState.epochNumber);
|
|
379
850
|
} else {
|
|
380
|
-
return this.prover.
|
|
851
|
+
return this.prover.getPublicTxBaseRollupProof(inputs, signal, provingState.epochNumber);
|
|
381
852
|
}
|
|
382
|
-
}),
|
|
853
|
+
}), (result)=>{
|
|
383
854
|
logger.debug(`Completed proof for ${rollupType} for tx ${processedTx.hash.toString()}`);
|
|
384
|
-
validatePartialState(result.inputs.
|
|
855
|
+
validatePartialState(result.inputs.endTreeSnapshots, txProvingState.treeSnapshots);
|
|
385
856
|
const leafLocation = provingState.setBaseRollupProof(txIndex, result);
|
|
386
857
|
if (provingState.totalNumTxs === 1) {
|
|
387
|
-
|
|
858
|
+
this.checkAndEnqueueBlockRootRollup(provingState);
|
|
388
859
|
} else {
|
|
389
|
-
|
|
860
|
+
this.checkAndEnqueueNextMergeRollup(provingState, leafLocation);
|
|
390
861
|
}
|
|
391
862
|
});
|
|
392
863
|
}
|
|
393
|
-
// Enqueues the
|
|
394
|
-
// Once completed, will enqueue the
|
|
395
|
-
|
|
864
|
+
// Enqueues the public chonk verifier circuit for a given transaction index, or reuses the one already enqueued.
|
|
865
|
+
// Once completed, will enqueue the the public tx base rollup.
|
|
866
|
+
getOrEnqueueChonkVerifier(provingState, txIndex) {
|
|
396
867
|
if (!provingState.verifyState()) {
|
|
397
|
-
logger.debug('Not running
|
|
868
|
+
logger.debug('Not running chonk verifier circuit, state invalid');
|
|
398
869
|
return;
|
|
399
870
|
}
|
|
400
871
|
const txProvingState = provingState.getTxProvingState(txIndex);
|
|
401
872
|
const txHash = txProvingState.processedTx.hash.toString();
|
|
873
|
+
NESTED_RECURSIVE_ROLLUP_HONK_PROOF_LENGTH;
|
|
402
874
|
const handleResult = (result)=>{
|
|
403
|
-
logger.debug(`Got
|
|
875
|
+
logger.debug(`Got chonk verifier proof for tx index: ${txIndex}`, {
|
|
404
876
|
txHash
|
|
405
877
|
});
|
|
406
|
-
txProvingState.
|
|
407
|
-
this.provingState?.
|
|
408
|
-
this.
|
|
878
|
+
txProvingState.setPublicChonkVerifierProof(result);
|
|
879
|
+
this.provingState?.cachedChonkVerifierProofs.delete(txHash);
|
|
880
|
+
this.checkAndEnqueueBaseRollup(provingState, txIndex);
|
|
409
881
|
};
|
|
410
|
-
if (this.provingState?.
|
|
411
|
-
logger.debug(`
|
|
882
|
+
if (this.provingState?.cachedChonkVerifierProofs.has(txHash)) {
|
|
883
|
+
logger.debug(`Chonk verifier proof already enqueued for tx index: ${txIndex}`, {
|
|
412
884
|
txHash
|
|
413
885
|
});
|
|
414
|
-
void this.provingState.
|
|
886
|
+
void this.provingState.cachedChonkVerifierProofs.get(txHash).then(handleResult);
|
|
415
887
|
return;
|
|
416
888
|
}
|
|
417
|
-
logger.debug(`Enqueuing
|
|
418
|
-
this.
|
|
889
|
+
logger.debug(`Enqueuing chonk verifier circuit for tx index: ${txIndex}`);
|
|
890
|
+
this.doEnqueueChonkVerifier(txHash, txProvingState.getPublicChonkVerifierPrivateInputs(), handleResult);
|
|
419
891
|
}
|
|
420
|
-
|
|
421
|
-
if (!provingState
|
|
422
|
-
logger.debug('Not running
|
|
892
|
+
doEnqueueChonkVerifier(txHash, inputs, handler, provingState = this.provingState) {
|
|
893
|
+
if (!provingState.verifyState()) {
|
|
894
|
+
logger.debug('Not running chonk verifier circuit, state invalid');
|
|
423
895
|
return;
|
|
424
896
|
}
|
|
425
|
-
this.deferredProving(provingState, wrapCallbackInSpan(this.tracer, 'ProvingOrchestrator.prover.
|
|
897
|
+
this.deferredProving(provingState, wrapCallbackInSpan(this.tracer, 'ProvingOrchestrator.prover.getPublicChonkVerifierProof', {
|
|
426
898
|
[Attributes.TX_HASH]: txHash,
|
|
427
|
-
[Attributes.
|
|
428
|
-
|
|
429
|
-
}, (signal)=>this.prover.getTubeProof(inputs, signal, this.provingState.epochNumber)), handler);
|
|
899
|
+
[Attributes.PROTOCOL_CIRCUIT_NAME]: 'chonk-verifier-public'
|
|
900
|
+
}, (signal)=>this.prover.getPublicChonkVerifierProof(inputs, signal, provingState.epochNumber)), handler);
|
|
430
901
|
}
|
|
431
902
|
// Executes the merge rollup circuit and stored the output as intermediate state for the parent merge/block root circuit
|
|
432
903
|
// Enqueues the next level of merge if all inputs are available
|
|
@@ -435,65 +906,73 @@ const logger = createLogger('prover-client:orchestrator');
|
|
|
435
906
|
logger.debug('Not running merge rollup. State no longer valid.');
|
|
436
907
|
return;
|
|
437
908
|
}
|
|
909
|
+
if (!provingState.tryStartProvingMerge(location)) {
|
|
910
|
+
logger.debug('Merge rollup already started.');
|
|
911
|
+
return;
|
|
912
|
+
}
|
|
438
913
|
const inputs = provingState.getMergeRollupInputs(location);
|
|
439
|
-
this.deferredProving(provingState, wrapCallbackInSpan(this.tracer, 'ProvingOrchestrator.prover.
|
|
440
|
-
[Attributes.
|
|
441
|
-
|
|
442
|
-
}, (signal)=>this.prover.getMergeRollupProof(inputs, signal, provingState.epochNumber)), async (result)=>{
|
|
914
|
+
this.deferredProving(provingState, wrapCallbackInSpan(this.tracer, 'ProvingOrchestrator.prover.getTxMergeRollupProof', {
|
|
915
|
+
[Attributes.PROTOCOL_CIRCUIT_NAME]: 'rollup-tx-merge'
|
|
916
|
+
}, (signal)=>this.prover.getTxMergeRollupProof(inputs, signal, provingState.epochNumber)), (result)=>{
|
|
443
917
|
provingState.setMergeRollupProof(location, result);
|
|
444
|
-
|
|
918
|
+
this.checkAndEnqueueNextMergeRollup(provingState, location);
|
|
445
919
|
});
|
|
446
920
|
}
|
|
447
921
|
// Executes the block root rollup circuit
|
|
448
|
-
|
|
922
|
+
enqueueBlockRootRollup(provingState) {
|
|
449
923
|
if (!provingState.verifyState()) {
|
|
450
924
|
logger.debug('Not running block root rollup, state no longer valid');
|
|
451
925
|
return;
|
|
452
926
|
}
|
|
453
|
-
provingState.
|
|
454
|
-
|
|
455
|
-
|
|
927
|
+
if (!provingState.tryStartProvingBlockRoot()) {
|
|
928
|
+
logger.debug('Block root rollup already started.');
|
|
929
|
+
return;
|
|
930
|
+
}
|
|
931
|
+
const { rollupType, inputs } = provingState.getBlockRootRollupTypeAndInputs();
|
|
932
|
+
logger.debug(`Enqueuing ${rollupType} for block ${provingState.blockNumber}.`);
|
|
456
933
|
this.deferredProving(provingState, wrapCallbackInSpan(this.tracer, 'ProvingOrchestrator.prover.getBlockRootRollupProof', {
|
|
457
|
-
[Attributes.PROTOCOL_CIRCUIT_TYPE]: 'server',
|
|
458
934
|
[Attributes.PROTOCOL_CIRCUIT_NAME]: rollupType
|
|
459
935
|
}, (signal)=>{
|
|
460
|
-
if (inputs instanceof
|
|
461
|
-
return this.prover.
|
|
462
|
-
} else if (inputs instanceof
|
|
463
|
-
return this.prover.
|
|
936
|
+
if (inputs instanceof BlockRootFirstRollupPrivateInputs) {
|
|
937
|
+
return this.prover.getBlockRootFirstRollupProof(inputs, signal, provingState.epochNumber);
|
|
938
|
+
} else if (inputs instanceof BlockRootSingleTxFirstRollupPrivateInputs) {
|
|
939
|
+
return this.prover.getBlockRootSingleTxFirstRollupProof(inputs, signal, provingState.epochNumber);
|
|
940
|
+
} else if (inputs instanceof BlockRootEmptyTxFirstRollupPrivateInputs) {
|
|
941
|
+
return this.prover.getBlockRootEmptyTxFirstRollupProof(inputs, signal, provingState.epochNumber);
|
|
942
|
+
} else if (inputs instanceof BlockRootSingleTxRollupPrivateInputs) {
|
|
943
|
+
return this.prover.getBlockRootSingleTxRollupProof(inputs, signal, provingState.epochNumber);
|
|
464
944
|
} else {
|
|
465
945
|
return this.prover.getBlockRootRollupProof(inputs, signal, provingState.epochNumber);
|
|
466
946
|
}
|
|
467
947
|
}), async (result)=>{
|
|
468
|
-
|
|
469
|
-
|
|
470
|
-
|
|
471
|
-
|
|
472
|
-
|
|
473
|
-
|
|
474
|
-
|
|
475
|
-
// validatePartialState(result.inputs.end, tx.treeSnapshots); // TODO(palla/prover)
|
|
476
|
-
const epochProvingState = this.provingState;
|
|
477
|
-
const leafLocation = epochProvingState.setBlockRootRollupProof(provingState.index, result);
|
|
478
|
-
if (epochProvingState.totalNumBlocks === 1) {
|
|
479
|
-
await this.enqueueEpochPadding(epochProvingState);
|
|
948
|
+
// If the proofs were slower than the block header building, then we need to try validating the block header hashes here.
|
|
949
|
+
await this.verifyBuiltBlockAgainstSyncedState(provingState);
|
|
950
|
+
logger.debug(`Completed ${rollupType} proof for block ${provingState.blockNumber}`);
|
|
951
|
+
const leafLocation = provingState.setBlockRootRollupProof(result);
|
|
952
|
+
const checkpointProvingState = provingState.parentCheckpoint;
|
|
953
|
+
if (checkpointProvingState.totalNumBlocks === 1) {
|
|
954
|
+
this.checkAndEnqueueCheckpointRootRollup(checkpointProvingState);
|
|
480
955
|
} else {
|
|
481
|
-
this.checkAndEnqueueNextBlockMergeRollup(
|
|
956
|
+
this.checkAndEnqueueNextBlockMergeRollup(checkpointProvingState, leafLocation);
|
|
482
957
|
}
|
|
483
958
|
});
|
|
484
959
|
}
|
|
485
960
|
// Executes the base parity circuit and stores the intermediate state for the root parity circuit
|
|
486
961
|
// Enqueues the root parity circuit if all inputs are available
|
|
487
|
-
enqueueBaseParityCircuit(
|
|
962
|
+
enqueueBaseParityCircuit(checkpointProvingState, provingState, baseParityIndex) {
|
|
488
963
|
if (!provingState.verifyState()) {
|
|
489
964
|
logger.debug('Not running base parity. State no longer valid.');
|
|
490
965
|
return;
|
|
491
966
|
}
|
|
967
|
+
if (!provingState.tryStartProvingBaseParity(baseParityIndex)) {
|
|
968
|
+
logger.warn(`Base parity ${baseParityIndex} already started.`);
|
|
969
|
+
return;
|
|
970
|
+
}
|
|
971
|
+
const inputs = checkpointProvingState.getBaseParityInputs(baseParityIndex);
|
|
492
972
|
this.deferredProving(provingState, wrapCallbackInSpan(this.tracer, 'ProvingOrchestrator.prover.getBaseParityProof', {
|
|
493
|
-
[Attributes.
|
|
494
|
-
[Attributes.PROTOCOL_CIRCUIT_NAME]: 'base-parity'
|
|
973
|
+
[Attributes.PROTOCOL_CIRCUIT_NAME]: 'parity-base'
|
|
495
974
|
}, (signal)=>this.prover.getBaseParityProof(inputs, signal, provingState.epochNumber)), (provingOutput)=>{
|
|
496
|
-
provingState.setBaseParityProof(
|
|
975
|
+
provingState.setBaseParityProof(baseParityIndex, provingOutput);
|
|
497
976
|
this.checkAndEnqueueRootParityCircuit(provingState);
|
|
498
977
|
});
|
|
499
978
|
}
|
|
@@ -510,13 +989,16 @@ const logger = createLogger('prover-client:orchestrator');
|
|
|
510
989
|
logger.debug('Not running root parity. State no longer valid.');
|
|
511
990
|
return;
|
|
512
991
|
}
|
|
513
|
-
|
|
992
|
+
if (!provingState.tryStartProvingRootParity()) {
|
|
993
|
+
logger.debug('Root parity already started.');
|
|
994
|
+
return;
|
|
995
|
+
}
|
|
996
|
+
const inputs = provingState.getParityRootInputs();
|
|
514
997
|
this.deferredProving(provingState, wrapCallbackInSpan(this.tracer, 'ProvingOrchestrator.prover.getRootParityProof', {
|
|
515
|
-
[Attributes.
|
|
516
|
-
|
|
517
|
-
}, (signal)=>this.prover.getRootParityProof(inputs, signal, provingState.epochNumber)), async (result)=>{
|
|
998
|
+
[Attributes.PROTOCOL_CIRCUIT_NAME]: 'parity-root'
|
|
999
|
+
}, (signal)=>this.prover.getRootParityProof(inputs, signal, provingState.epochNumber)), (result)=>{
|
|
518
1000
|
provingState.setRootParityProof(result);
|
|
519
|
-
|
|
1001
|
+
this.checkAndEnqueueBlockRootRollup(provingState);
|
|
520
1002
|
});
|
|
521
1003
|
}
|
|
522
1004
|
// Executes the block merge rollup circuit and stored the output as intermediate state for the parent merge/block root circuit
|
|
@@ -526,28 +1008,90 @@ const logger = createLogger('prover-client:orchestrator');
|
|
|
526
1008
|
logger.debug('Not running block merge rollup. State no longer valid.');
|
|
527
1009
|
return;
|
|
528
1010
|
}
|
|
1011
|
+
if (!provingState.tryStartProvingBlockMerge(location)) {
|
|
1012
|
+
logger.debug('Block merge rollup already started.');
|
|
1013
|
+
return;
|
|
1014
|
+
}
|
|
529
1015
|
const inputs = provingState.getBlockMergeRollupInputs(location);
|
|
530
1016
|
this.deferredProving(provingState, wrapCallbackInSpan(this.tracer, 'ProvingOrchestrator.prover.getBlockMergeRollupProof', {
|
|
531
|
-
[Attributes.
|
|
532
|
-
[Attributes.PROTOCOL_CIRCUIT_NAME]: 'block-merge-rollup'
|
|
1017
|
+
[Attributes.PROTOCOL_CIRCUIT_NAME]: 'rollup-block-merge'
|
|
533
1018
|
}, (signal)=>this.prover.getBlockMergeRollupProof(inputs, signal, provingState.epochNumber)), (result)=>{
|
|
534
1019
|
provingState.setBlockMergeRollupProof(location, result);
|
|
535
1020
|
this.checkAndEnqueueNextBlockMergeRollup(provingState, location);
|
|
536
1021
|
});
|
|
537
1022
|
}
|
|
538
|
-
|
|
1023
|
+
enqueueCheckpointRootRollup(provingState) {
|
|
1024
|
+
if (!provingState.verifyState()) {
|
|
1025
|
+
logger.debug('Not running checkpoint root rollup. State no longer valid.');
|
|
1026
|
+
return;
|
|
1027
|
+
}
|
|
1028
|
+
if (!provingState.tryStartProvingCheckpointRoot()) {
|
|
1029
|
+
logger.debug('Checkpoint root rollup already started.');
|
|
1030
|
+
return;
|
|
1031
|
+
}
|
|
1032
|
+
const rollupType = provingState.getCheckpointRootRollupType();
|
|
1033
|
+
logger.debug(`Enqueuing ${rollupType} for checkpoint ${provingState.index}.`);
|
|
1034
|
+
const inputs = provingState.getCheckpointRootRollupInputs();
|
|
1035
|
+
this.deferredProving(provingState, wrapCallbackInSpan(this.tracer, 'ProvingOrchestrator.prover.getCheckpointRootRollupProof', {
|
|
1036
|
+
[Attributes.PROTOCOL_CIRCUIT_NAME]: rollupType
|
|
1037
|
+
}, (signal)=>{
|
|
1038
|
+
if (inputs instanceof CheckpointRootSingleBlockRollupPrivateInputs) {
|
|
1039
|
+
return this.prover.getCheckpointRootSingleBlockRollupProof(inputs, signal, provingState.epochNumber);
|
|
1040
|
+
} else {
|
|
1041
|
+
return this.prover.getCheckpointRootRollupProof(inputs, signal, provingState.epochNumber);
|
|
1042
|
+
}
|
|
1043
|
+
}), (result)=>{
|
|
1044
|
+
const computedEndBlobAccumulatorState = provingState.getEndBlobAccumulator().toBlobAccumulator();
|
|
1045
|
+
const circuitEndBlobAccumulatorState = result.inputs.endBlobAccumulator;
|
|
1046
|
+
if (!circuitEndBlobAccumulatorState.equals(computedEndBlobAccumulatorState)) {
|
|
1047
|
+
logger.error(`Blob accumulator state mismatch.\nCircuit: ${inspect(circuitEndBlobAccumulatorState)}\nComputed: ${inspect(computedEndBlobAccumulatorState)}`);
|
|
1048
|
+
provingState.reject(`Blob accumulator state mismatch.`);
|
|
1049
|
+
return;
|
|
1050
|
+
}
|
|
1051
|
+
logger.debug(`Completed ${rollupType} proof for checkpoint ${provingState.index}.`);
|
|
1052
|
+
const leafLocation = provingState.setCheckpointRootRollupProof(result);
|
|
1053
|
+
const epochProvingState = provingState.parentEpoch;
|
|
1054
|
+
if (epochProvingState.totalNumCheckpoints === 1) {
|
|
1055
|
+
this.enqueueEpochPadding(epochProvingState);
|
|
1056
|
+
} else {
|
|
1057
|
+
this.checkAndEnqueueNextCheckpointMergeRollup(epochProvingState, leafLocation);
|
|
1058
|
+
}
|
|
1059
|
+
});
|
|
1060
|
+
}
|
|
1061
|
+
enqueueCheckpointMergeRollup(provingState, location) {
|
|
1062
|
+
if (!provingState.verifyState()) {
|
|
1063
|
+
logger.debug('Not running checkpoint merge rollup. State no longer valid.');
|
|
1064
|
+
return;
|
|
1065
|
+
}
|
|
1066
|
+
if (!provingState.tryStartProvingCheckpointMerge(location)) {
|
|
1067
|
+
logger.debug('Checkpoint merge rollup already started.');
|
|
1068
|
+
return;
|
|
1069
|
+
}
|
|
1070
|
+
const inputs = provingState.getCheckpointMergeRollupInputs(location);
|
|
1071
|
+
this.deferredProving(provingState, wrapCallbackInSpan(this.tracer, 'ProvingOrchestrator.prover.getCheckpointMergeRollupProof', {
|
|
1072
|
+
[Attributes.PROTOCOL_CIRCUIT_NAME]: 'rollup-checkpoint-merge'
|
|
1073
|
+
}, (signal)=>this.prover.getCheckpointMergeRollupProof(inputs, signal, provingState.epochNumber)), (result)=>{
|
|
1074
|
+
logger.debug('Completed proof for checkpoint merge rollup.');
|
|
1075
|
+
provingState.setCheckpointMergeRollupProof(location, result);
|
|
1076
|
+
this.checkAndEnqueueNextCheckpointMergeRollup(provingState, location);
|
|
1077
|
+
});
|
|
1078
|
+
}
|
|
1079
|
+
enqueueEpochPadding(provingState) {
|
|
539
1080
|
if (!provingState.verifyState()) {
|
|
540
1081
|
logger.debug('Not running epoch padding. State no longer valid.');
|
|
541
1082
|
return;
|
|
542
1083
|
}
|
|
543
|
-
|
|
544
|
-
|
|
545
|
-
|
|
546
|
-
|
|
547
|
-
|
|
548
|
-
|
|
549
|
-
|
|
550
|
-
|
|
1084
|
+
if (!provingState.tryStartProvingPaddingCheckpoint()) {
|
|
1085
|
+
logger.debug('Padding checkpoint already started.');
|
|
1086
|
+
return;
|
|
1087
|
+
}
|
|
1088
|
+
logger.debug('Padding epoch proof with a padding block root proof.');
|
|
1089
|
+
const inputs = provingState.getPaddingCheckpointInputs();
|
|
1090
|
+
this.deferredProving(provingState, wrapCallbackInSpan(this.tracer, 'ProvingOrchestrator.prover.getCheckpointPaddingRollupProof', {
|
|
1091
|
+
[Attributes.PROTOCOL_CIRCUIT_NAME]: 'rollup-checkpoint-padding'
|
|
1092
|
+
}, (signal)=>this.prover.getCheckpointPaddingRollupProof(inputs, signal, provingState.epochNumber)), (result)=>{
|
|
1093
|
+
logger.debug('Completed proof for padding checkpoint.');
|
|
1094
|
+
provingState.setCheckpointPaddingProof(result);
|
|
551
1095
|
this.checkAndEnqueueRootRollup(provingState);
|
|
552
1096
|
});
|
|
553
1097
|
}
|
|
@@ -558,10 +1102,9 @@ const logger = createLogger('prover-client:orchestrator');
|
|
|
558
1102
|
return;
|
|
559
1103
|
}
|
|
560
1104
|
logger.debug(`Preparing root rollup`);
|
|
561
|
-
const inputs = provingState.getRootRollupInputs(
|
|
1105
|
+
const inputs = provingState.getRootRollupInputs();
|
|
562
1106
|
this.deferredProving(provingState, wrapCallbackInSpan(this.tracer, 'ProvingOrchestrator.prover.getRootRollupProof', {
|
|
563
|
-
[Attributes.
|
|
564
|
-
[Attributes.PROTOCOL_CIRCUIT_NAME]: 'root-rollup'
|
|
1107
|
+
[Attributes.PROTOCOL_CIRCUIT_NAME]: 'rollup-root'
|
|
565
1108
|
}, (signal)=>this.prover.getRootRollupProof(inputs, signal, provingState.epochNumber)), (result)=>{
|
|
566
1109
|
logger.verbose(`Orchestrator completed root rollup for epoch ${provingState.epochNumber}`);
|
|
567
1110
|
provingState.setRootRollupProof(result);
|
|
@@ -570,34 +1113,23 @@ const logger = createLogger('prover-client:orchestrator');
|
|
|
570
1113
|
});
|
|
571
1114
|
});
|
|
572
1115
|
}
|
|
573
|
-
|
|
1116
|
+
checkAndEnqueueNextMergeRollup(provingState, currentLocation) {
|
|
574
1117
|
if (!provingState.isReadyForMergeRollup(currentLocation)) {
|
|
575
1118
|
return;
|
|
576
1119
|
}
|
|
577
1120
|
const parentLocation = provingState.getParentLocation(currentLocation);
|
|
578
1121
|
if (parentLocation.level === 0) {
|
|
579
|
-
|
|
1122
|
+
this.checkAndEnqueueBlockRootRollup(provingState);
|
|
580
1123
|
} else {
|
|
581
1124
|
this.enqueueMergeRollup(provingState, parentLocation);
|
|
582
1125
|
}
|
|
583
1126
|
}
|
|
584
|
-
|
|
1127
|
+
checkAndEnqueueBlockRootRollup(provingState) {
|
|
585
1128
|
if (!provingState.isReadyForBlockRootRollup()) {
|
|
586
|
-
logger.debug('Not ready for root rollup');
|
|
587
|
-
return;
|
|
588
|
-
}
|
|
589
|
-
if (provingState.blockRootRollupStarted) {
|
|
590
|
-
logger.debug('Block root rollup already started');
|
|
1129
|
+
logger.debug('Not ready for block root rollup');
|
|
591
1130
|
return;
|
|
592
1131
|
}
|
|
593
|
-
|
|
594
|
-
// TODO(palla/prover): This closes the fork only on the happy path. If this epoch orchestrator
|
|
595
|
-
// is aborted and never reaches this point, it will leak the fork. We need to add a global cleanup,
|
|
596
|
-
// but have to make sure it only runs once all operations are completed, otherwise some function here
|
|
597
|
-
// will attempt to access the fork after it was closed.
|
|
598
|
-
logger.debug(`Cleaning up world state fork for ${blockNumber}`);
|
|
599
|
-
void this.dbs.get(blockNumber)?.close().then(()=>this.dbs.delete(blockNumber)).catch((err)=>logger.error(`Error closing db for block ${blockNumber}`, err));
|
|
600
|
-
await this.enqueueBlockRootRollup(provingState);
|
|
1132
|
+
this.enqueueBlockRootRollup(provingState);
|
|
601
1133
|
}
|
|
602
1134
|
checkAndEnqueueNextBlockMergeRollup(provingState, currentLocation) {
|
|
603
1135
|
if (!provingState.isReadyForBlockMerge(currentLocation)) {
|
|
@@ -605,11 +1137,28 @@ const logger = createLogger('prover-client:orchestrator');
|
|
|
605
1137
|
}
|
|
606
1138
|
const parentLocation = provingState.getParentLocation(currentLocation);
|
|
607
1139
|
if (parentLocation.level === 0) {
|
|
608
|
-
this.
|
|
1140
|
+
this.checkAndEnqueueCheckpointRootRollup(provingState);
|
|
609
1141
|
} else {
|
|
610
1142
|
this.enqueueBlockMergeRollup(provingState, parentLocation);
|
|
611
1143
|
}
|
|
612
1144
|
}
|
|
1145
|
+
checkAndEnqueueCheckpointRootRollup(provingState) {
|
|
1146
|
+
if (!provingState.isReadyForCheckpointRoot()) {
|
|
1147
|
+
return;
|
|
1148
|
+
}
|
|
1149
|
+
this.enqueueCheckpointRootRollup(provingState);
|
|
1150
|
+
}
|
|
1151
|
+
checkAndEnqueueNextCheckpointMergeRollup(provingState, currentLocation) {
|
|
1152
|
+
if (!provingState.isReadyForCheckpointMerge(currentLocation)) {
|
|
1153
|
+
return;
|
|
1154
|
+
}
|
|
1155
|
+
const parentLocation = provingState.getParentLocation(currentLocation);
|
|
1156
|
+
if (parentLocation.level === 0) {
|
|
1157
|
+
this.checkAndEnqueueRootRollup(provingState);
|
|
1158
|
+
} else {
|
|
1159
|
+
this.enqueueCheckpointMergeRollup(provingState, parentLocation);
|
|
1160
|
+
}
|
|
1161
|
+
}
|
|
613
1162
|
checkAndEnqueueRootRollup(provingState) {
|
|
614
1163
|
if (!provingState.isReadyForRootRollup()) {
|
|
615
1164
|
logger.debug('Not ready for root rollup');
|
|
@@ -628,63 +1177,25 @@ const logger = createLogger('prover-client:orchestrator');
|
|
|
628
1177
|
return;
|
|
629
1178
|
}
|
|
630
1179
|
const txProvingState = provingState.getTxProvingState(txIndex);
|
|
631
|
-
// This function tries to do AVM proving. If there is a failure, it fakes the proof unless AVM_PROVING_STRICT is defined.
|
|
632
|
-
// Nothing downstream depends on the AVM proof yet. So having this mode lets us incrementally build the AVM circuit.
|
|
633
1180
|
const doAvmProving = wrapCallbackInSpan(this.tracer, 'ProvingOrchestrator.prover.getAvmProof', {
|
|
634
1181
|
[Attributes.TX_HASH]: txProvingState.processedTx.hash.toString()
|
|
635
1182
|
}, async (signal)=>{
|
|
636
1183
|
const inputs = txProvingState.getAvmInputs();
|
|
637
|
-
|
|
638
|
-
return await this.prover.getAvmProof(inputs, signal, provingState.epochNumber);
|
|
639
|
-
} catch (err) {
|
|
640
|
-
if (process.env.AVM_PROVING_STRICT) {
|
|
641
|
-
logger.error(`Error thrown when proving AVM circuit with AVM_PROVING_STRICT on`, err);
|
|
642
|
-
throw err;
|
|
643
|
-
} else {
|
|
644
|
-
logger.warn(`Error thrown when proving AVM circuit but AVM_PROVING_STRICT is off. Faking AVM proof and carrying on. ${inspect(err)}.`);
|
|
645
|
-
return {
|
|
646
|
-
proof: makeEmptyRecursiveProof(AVM_PROOF_LENGTH_IN_FIELDS),
|
|
647
|
-
verificationKey: VerificationKeyData.makeFake(AVM_VERIFICATION_KEY_LENGTH_IN_FIELDS)
|
|
648
|
-
};
|
|
649
|
-
}
|
|
650
|
-
}
|
|
1184
|
+
return await this.prover.getAvmProof(inputs, signal, provingState.epochNumber);
|
|
651
1185
|
});
|
|
652
|
-
this.deferredProving(provingState, doAvmProving, (
|
|
1186
|
+
this.deferredProving(provingState, doAvmProving, (proof)=>{
|
|
653
1187
|
logger.debug(`Proven VM for tx index: ${txIndex}`);
|
|
654
|
-
txProvingState.setAvmProof(
|
|
655
|
-
this.
|
|
1188
|
+
txProvingState.setAvmProof(proof);
|
|
1189
|
+
this.checkAndEnqueueBaseRollup(provingState, txIndex);
|
|
656
1190
|
});
|
|
657
1191
|
}
|
|
658
|
-
|
|
1192
|
+
checkAndEnqueueBaseRollup(provingState, txIndex) {
|
|
659
1193
|
const txProvingState = provingState.getTxProvingState(txIndex);
|
|
660
1194
|
if (!txProvingState.ready()) {
|
|
661
1195
|
return;
|
|
662
1196
|
}
|
|
663
|
-
// We must have completed all proving (
|
|
1197
|
+
// We must have completed all proving (chonk verifier proof and (if required) vm proof are generated), we now move to the base rollup.
|
|
664
1198
|
logger.debug(`Public functions completed for tx ${txIndex} enqueueing base rollup`);
|
|
665
1199
|
this.enqueueBaseRollup(provingState, txIndex);
|
|
666
1200
|
}
|
|
667
1201
|
}
|
|
668
|
-
_ts_decorate([
|
|
669
|
-
trackSpan('ProvingOrchestrator.startNewBlock', (globalVariables)=>({
|
|
670
|
-
[Attributes.BLOCK_NUMBER]: globalVariables.blockNumber.toNumber()
|
|
671
|
-
}))
|
|
672
|
-
], ProvingOrchestrator.prototype, "startNewBlock", null);
|
|
673
|
-
_ts_decorate([
|
|
674
|
-
trackSpan('ProvingOrchestrator.addTxs', (txs)=>({
|
|
675
|
-
[Attributes.BLOCK_TXS_COUNT]: txs.length
|
|
676
|
-
}))
|
|
677
|
-
], ProvingOrchestrator.prototype, "addTxs", null);
|
|
678
|
-
_ts_decorate([
|
|
679
|
-
trackSpan('ProvingOrchestrator.startTubeCircuits')
|
|
680
|
-
], ProvingOrchestrator.prototype, "startTubeCircuits", null);
|
|
681
|
-
_ts_decorate([
|
|
682
|
-
trackSpan('ProvingOrchestrator.setBlockCompleted', (blockNumber)=>({
|
|
683
|
-
[Attributes.BLOCK_NUMBER]: blockNumber
|
|
684
|
-
}))
|
|
685
|
-
], ProvingOrchestrator.prototype, "setBlockCompleted", null);
|
|
686
|
-
_ts_decorate([
|
|
687
|
-
trackSpan('ProvingOrchestrator.prepareBaseRollupInputs', (_, tx)=>({
|
|
688
|
-
[Attributes.TX_HASH]: tx.hash.toString()
|
|
689
|
-
}))
|
|
690
|
-
], ProvingOrchestrator.prototype, "prepareBaseRollupInputs", null);
|