@aztec/archiver 4.0.0-nightly.20250907 → 4.0.0-nightly.20260107
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +27 -6
- package/dest/archiver/archiver.d.ts +127 -84
- package/dest/archiver/archiver.d.ts.map +1 -1
- package/dest/archiver/archiver.js +1128 -380
- package/dest/archiver/archiver_store.d.ts +122 -45
- package/dest/archiver/archiver_store.d.ts.map +1 -1
- package/dest/archiver/archiver_store_test_suite.d.ts +1 -1
- package/dest/archiver/archiver_store_test_suite.d.ts.map +1 -1
- package/dest/archiver/archiver_store_test_suite.js +2013 -343
- package/dest/archiver/config.d.ts +7 -20
- package/dest/archiver/config.d.ts.map +1 -1
- package/dest/archiver/config.js +21 -5
- package/dest/archiver/errors.d.ts +25 -1
- package/dest/archiver/errors.d.ts.map +1 -1
- package/dest/archiver/errors.js +37 -0
- package/dest/archiver/index.d.ts +2 -2
- package/dest/archiver/index.d.ts.map +1 -1
- package/dest/archiver/instrumentation.d.ts +5 -3
- package/dest/archiver/instrumentation.d.ts.map +1 -1
- package/dest/archiver/instrumentation.js +14 -0
- package/dest/archiver/kv_archiver_store/block_store.d.ts +83 -15
- package/dest/archiver/kv_archiver_store/block_store.d.ts.map +1 -1
- package/dest/archiver/kv_archiver_store/block_store.js +396 -73
- package/dest/archiver/kv_archiver_store/contract_class_store.d.ts +2 -2
- package/dest/archiver/kv_archiver_store/contract_class_store.d.ts.map +1 -1
- package/dest/archiver/kv_archiver_store/contract_class_store.js +1 -1
- package/dest/archiver/kv_archiver_store/contract_instance_store.d.ts +2 -2
- package/dest/archiver/kv_archiver_store/contract_instance_store.d.ts.map +1 -1
- package/dest/archiver/kv_archiver_store/kv_archiver_store.d.ts +51 -55
- package/dest/archiver/kv_archiver_store/kv_archiver_store.d.ts.map +1 -1
- package/dest/archiver/kv_archiver_store/kv_archiver_store.js +82 -46
- package/dest/archiver/kv_archiver_store/log_store.d.ts +12 -16
- package/dest/archiver/kv_archiver_store/log_store.d.ts.map +1 -1
- package/dest/archiver/kv_archiver_store/log_store.js +149 -84
- package/dest/archiver/kv_archiver_store/message_store.d.ts +6 -5
- package/dest/archiver/kv_archiver_store/message_store.d.ts.map +1 -1
- package/dest/archiver/kv_archiver_store/message_store.js +15 -14
- package/dest/archiver/l1/bin/retrieve-calldata.d.ts +3 -0
- package/dest/archiver/l1/bin/retrieve-calldata.d.ts.map +1 -0
- package/dest/archiver/l1/bin/retrieve-calldata.js +149 -0
- package/dest/archiver/l1/calldata_retriever.d.ts +112 -0
- package/dest/archiver/l1/calldata_retriever.d.ts.map +1 -0
- package/dest/archiver/l1/calldata_retriever.js +471 -0
- package/dest/archiver/l1/data_retrieval.d.ts +90 -0
- package/dest/archiver/l1/data_retrieval.d.ts.map +1 -0
- package/dest/archiver/l1/data_retrieval.js +331 -0
- package/dest/archiver/l1/debug_tx.d.ts +19 -0
- package/dest/archiver/l1/debug_tx.d.ts.map +1 -0
- package/dest/archiver/l1/debug_tx.js +73 -0
- package/dest/archiver/l1/spire_proposer.d.ts +70 -0
- package/dest/archiver/l1/spire_proposer.d.ts.map +1 -0
- package/dest/archiver/l1/spire_proposer.js +157 -0
- package/dest/archiver/l1/trace_tx.d.ts +97 -0
- package/dest/archiver/l1/trace_tx.d.ts.map +1 -0
- package/dest/archiver/l1/trace_tx.js +91 -0
- package/dest/archiver/l1/types.d.ts +12 -0
- package/dest/archiver/l1/types.d.ts.map +1 -0
- package/dest/archiver/l1/types.js +3 -0
- package/dest/archiver/l1/validate_trace.d.ts +29 -0
- package/dest/archiver/l1/validate_trace.d.ts.map +1 -0
- package/dest/archiver/l1/validate_trace.js +150 -0
- package/dest/archiver/structs/data_retrieval.d.ts +1 -1
- package/dest/archiver/structs/inbox_message.d.ts +4 -4
- package/dest/archiver/structs/inbox_message.d.ts.map +1 -1
- package/dest/archiver/structs/inbox_message.js +6 -5
- package/dest/archiver/structs/published.d.ts +2 -2
- package/dest/archiver/structs/published.d.ts.map +1 -1
- package/dest/archiver/validation.d.ts +10 -4
- package/dest/archiver/validation.d.ts.map +1 -1
- package/dest/archiver/validation.js +66 -44
- package/dest/factory.d.ts +4 -6
- package/dest/factory.d.ts.map +1 -1
- package/dest/factory.js +5 -4
- package/dest/index.d.ts +2 -2
- package/dest/index.d.ts.map +1 -1
- package/dest/index.js +1 -1
- package/dest/rpc/index.d.ts +2 -2
- package/dest/test/index.d.ts +1 -1
- package/dest/test/mock_archiver.d.ts +16 -8
- package/dest/test/mock_archiver.d.ts.map +1 -1
- package/dest/test/mock_archiver.js +19 -14
- package/dest/test/mock_l1_to_l2_message_source.d.ts +7 -6
- package/dest/test/mock_l1_to_l2_message_source.d.ts.map +1 -1
- package/dest/test/mock_l1_to_l2_message_source.js +10 -9
- package/dest/test/mock_l2_block_source.d.ts +31 -20
- package/dest/test/mock_l2_block_source.d.ts.map +1 -1
- package/dest/test/mock_l2_block_source.js +85 -18
- package/dest/test/mock_structs.d.ts +3 -2
- package/dest/test/mock_structs.d.ts.map +1 -1
- package/dest/test/mock_structs.js +9 -8
- package/package.json +18 -17
- package/src/archiver/archiver.ts +971 -475
- package/src/archiver/archiver_store.ts +141 -44
- package/src/archiver/archiver_store_test_suite.ts +2114 -331
- package/src/archiver/config.ts +30 -35
- package/src/archiver/errors.ts +64 -0
- package/src/archiver/index.ts +1 -1
- package/src/archiver/instrumentation.ts +19 -2
- package/src/archiver/kv_archiver_store/block_store.ts +541 -83
- package/src/archiver/kv_archiver_store/contract_class_store.ts +1 -1
- package/src/archiver/kv_archiver_store/contract_instance_store.ts +1 -1
- package/src/archiver/kv_archiver_store/kv_archiver_store.ts +107 -67
- package/src/archiver/kv_archiver_store/log_store.ts +209 -99
- package/src/archiver/kv_archiver_store/message_store.ts +21 -18
- package/src/archiver/l1/README.md +98 -0
- package/src/archiver/l1/bin/retrieve-calldata.ts +182 -0
- package/src/archiver/l1/calldata_retriever.ts +641 -0
- package/src/archiver/l1/data_retrieval.ts +512 -0
- package/src/archiver/l1/debug_tx.ts +99 -0
- package/src/archiver/l1/spire_proposer.ts +160 -0
- package/src/archiver/l1/trace_tx.ts +128 -0
- package/src/archiver/l1/types.ts +13 -0
- package/src/archiver/l1/validate_trace.ts +211 -0
- package/src/archiver/structs/inbox_message.ts +8 -8
- package/src/archiver/structs/published.ts +1 -1
- package/src/archiver/validation.ts +86 -32
- package/src/factory.ts +6 -7
- package/src/index.ts +1 -1
- package/src/test/fixtures/debug_traceTransaction-multicall3.json +88 -0
- package/src/test/fixtures/debug_traceTransaction-multiplePropose.json +153 -0
- package/src/test/fixtures/debug_traceTransaction-proxied.json +122 -0
- package/src/test/fixtures/trace_transaction-multicall3.json +65 -0
- package/src/test/fixtures/trace_transaction-multiplePropose.json +319 -0
- package/src/test/fixtures/trace_transaction-proxied.json +128 -0
- package/src/test/fixtures/trace_transaction-randomRevert.json +216 -0
- package/src/test/mock_archiver.ts +22 -16
- package/src/test/mock_l1_to_l2_message_source.ts +10 -9
- package/src/test/mock_l2_block_source.ts +114 -27
- package/src/test/mock_structs.ts +10 -9
- package/dest/archiver/data_retrieval.d.ts +0 -78
- package/dest/archiver/data_retrieval.d.ts.map +0 -1
- package/dest/archiver/data_retrieval.js +0 -354
- package/src/archiver/data_retrieval.ts +0 -535
|
@@ -1,75 +1,472 @@
|
|
|
1
|
-
function
|
|
2
|
-
|
|
3
|
-
|
|
4
|
-
|
|
5
|
-
|
|
1
|
+
function applyDecs2203RFactory() {
|
|
2
|
+
function createAddInitializerMethod(initializers, decoratorFinishedRef) {
|
|
3
|
+
return function addInitializer(initializer) {
|
|
4
|
+
assertNotFinished(decoratorFinishedRef, "addInitializer");
|
|
5
|
+
assertCallable(initializer, "An initializer");
|
|
6
|
+
initializers.push(initializer);
|
|
7
|
+
};
|
|
8
|
+
}
|
|
9
|
+
function memberDec(dec, name, desc, initializers, kind, isStatic, isPrivate, metadata, value) {
|
|
10
|
+
var kindStr;
|
|
11
|
+
switch(kind){
|
|
12
|
+
case 1:
|
|
13
|
+
kindStr = "accessor";
|
|
14
|
+
break;
|
|
15
|
+
case 2:
|
|
16
|
+
kindStr = "method";
|
|
17
|
+
break;
|
|
18
|
+
case 3:
|
|
19
|
+
kindStr = "getter";
|
|
20
|
+
break;
|
|
21
|
+
case 4:
|
|
22
|
+
kindStr = "setter";
|
|
23
|
+
break;
|
|
24
|
+
default:
|
|
25
|
+
kindStr = "field";
|
|
26
|
+
}
|
|
27
|
+
var ctx = {
|
|
28
|
+
kind: kindStr,
|
|
29
|
+
name: isPrivate ? "#" + name : name,
|
|
30
|
+
static: isStatic,
|
|
31
|
+
private: isPrivate,
|
|
32
|
+
metadata: metadata
|
|
33
|
+
};
|
|
34
|
+
var decoratorFinishedRef = {
|
|
35
|
+
v: false
|
|
36
|
+
};
|
|
37
|
+
ctx.addInitializer = createAddInitializerMethod(initializers, decoratorFinishedRef);
|
|
38
|
+
var get, set;
|
|
39
|
+
if (kind === 0) {
|
|
40
|
+
if (isPrivate) {
|
|
41
|
+
get = desc.get;
|
|
42
|
+
set = desc.set;
|
|
43
|
+
} else {
|
|
44
|
+
get = function() {
|
|
45
|
+
return this[name];
|
|
46
|
+
};
|
|
47
|
+
set = function(v) {
|
|
48
|
+
this[name] = v;
|
|
49
|
+
};
|
|
50
|
+
}
|
|
51
|
+
} else if (kind === 2) {
|
|
52
|
+
get = function() {
|
|
53
|
+
return desc.value;
|
|
54
|
+
};
|
|
55
|
+
} else {
|
|
56
|
+
if (kind === 1 || kind === 3) {
|
|
57
|
+
get = function() {
|
|
58
|
+
return desc.get.call(this);
|
|
59
|
+
};
|
|
60
|
+
}
|
|
61
|
+
if (kind === 1 || kind === 4) {
|
|
62
|
+
set = function(v) {
|
|
63
|
+
desc.set.call(this, v);
|
|
64
|
+
};
|
|
65
|
+
}
|
|
66
|
+
}
|
|
67
|
+
ctx.access = get && set ? {
|
|
68
|
+
get: get,
|
|
69
|
+
set: set
|
|
70
|
+
} : get ? {
|
|
71
|
+
get: get
|
|
72
|
+
} : {
|
|
73
|
+
set: set
|
|
74
|
+
};
|
|
75
|
+
try {
|
|
76
|
+
return dec(value, ctx);
|
|
77
|
+
} finally{
|
|
78
|
+
decoratorFinishedRef.v = true;
|
|
79
|
+
}
|
|
80
|
+
}
|
|
81
|
+
function assertNotFinished(decoratorFinishedRef, fnName) {
|
|
82
|
+
if (decoratorFinishedRef.v) {
|
|
83
|
+
throw new Error("attempted to call " + fnName + " after decoration was finished");
|
|
84
|
+
}
|
|
85
|
+
}
|
|
86
|
+
function assertCallable(fn, hint) {
|
|
87
|
+
if (typeof fn !== "function") {
|
|
88
|
+
throw new TypeError(hint + " must be a function");
|
|
89
|
+
}
|
|
90
|
+
}
|
|
91
|
+
function assertValidReturnValue(kind, value) {
|
|
92
|
+
var type = typeof value;
|
|
93
|
+
if (kind === 1) {
|
|
94
|
+
if (type !== "object" || value === null) {
|
|
95
|
+
throw new TypeError("accessor decorators must return an object with get, set, or init properties or void 0");
|
|
96
|
+
}
|
|
97
|
+
if (value.get !== undefined) {
|
|
98
|
+
assertCallable(value.get, "accessor.get");
|
|
99
|
+
}
|
|
100
|
+
if (value.set !== undefined) {
|
|
101
|
+
assertCallable(value.set, "accessor.set");
|
|
102
|
+
}
|
|
103
|
+
if (value.init !== undefined) {
|
|
104
|
+
assertCallable(value.init, "accessor.init");
|
|
105
|
+
}
|
|
106
|
+
} else if (type !== "function") {
|
|
107
|
+
var hint;
|
|
108
|
+
if (kind === 0) {
|
|
109
|
+
hint = "field";
|
|
110
|
+
} else if (kind === 10) {
|
|
111
|
+
hint = "class";
|
|
112
|
+
} else {
|
|
113
|
+
hint = "method";
|
|
114
|
+
}
|
|
115
|
+
throw new TypeError(hint + " decorators must return a function or void 0");
|
|
116
|
+
}
|
|
117
|
+
}
|
|
118
|
+
function applyMemberDec(ret, base, decInfo, name, kind, isStatic, isPrivate, initializers, metadata) {
|
|
119
|
+
var decs = decInfo[0];
|
|
120
|
+
var desc, init, value;
|
|
121
|
+
if (isPrivate) {
|
|
122
|
+
if (kind === 0 || kind === 1) {
|
|
123
|
+
desc = {
|
|
124
|
+
get: decInfo[3],
|
|
125
|
+
set: decInfo[4]
|
|
126
|
+
};
|
|
127
|
+
} else if (kind === 3) {
|
|
128
|
+
desc = {
|
|
129
|
+
get: decInfo[3]
|
|
130
|
+
};
|
|
131
|
+
} else if (kind === 4) {
|
|
132
|
+
desc = {
|
|
133
|
+
set: decInfo[3]
|
|
134
|
+
};
|
|
135
|
+
} else {
|
|
136
|
+
desc = {
|
|
137
|
+
value: decInfo[3]
|
|
138
|
+
};
|
|
139
|
+
}
|
|
140
|
+
} else if (kind !== 0) {
|
|
141
|
+
desc = Object.getOwnPropertyDescriptor(base, name);
|
|
142
|
+
}
|
|
143
|
+
if (kind === 1) {
|
|
144
|
+
value = {
|
|
145
|
+
get: desc.get,
|
|
146
|
+
set: desc.set
|
|
147
|
+
};
|
|
148
|
+
} else if (kind === 2) {
|
|
149
|
+
value = desc.value;
|
|
150
|
+
} else if (kind === 3) {
|
|
151
|
+
value = desc.get;
|
|
152
|
+
} else if (kind === 4) {
|
|
153
|
+
value = desc.set;
|
|
154
|
+
}
|
|
155
|
+
var newValue, get, set;
|
|
156
|
+
if (typeof decs === "function") {
|
|
157
|
+
newValue = memberDec(decs, name, desc, initializers, kind, isStatic, isPrivate, metadata, value);
|
|
158
|
+
if (newValue !== void 0) {
|
|
159
|
+
assertValidReturnValue(kind, newValue);
|
|
160
|
+
if (kind === 0) {
|
|
161
|
+
init = newValue;
|
|
162
|
+
} else if (kind === 1) {
|
|
163
|
+
init = newValue.init;
|
|
164
|
+
get = newValue.get || value.get;
|
|
165
|
+
set = newValue.set || value.set;
|
|
166
|
+
value = {
|
|
167
|
+
get: get,
|
|
168
|
+
set: set
|
|
169
|
+
};
|
|
170
|
+
} else {
|
|
171
|
+
value = newValue;
|
|
172
|
+
}
|
|
173
|
+
}
|
|
174
|
+
} else {
|
|
175
|
+
for(var i = decs.length - 1; i >= 0; i--){
|
|
176
|
+
var dec = decs[i];
|
|
177
|
+
newValue = memberDec(dec, name, desc, initializers, kind, isStatic, isPrivate, metadata, value);
|
|
178
|
+
if (newValue !== void 0) {
|
|
179
|
+
assertValidReturnValue(kind, newValue);
|
|
180
|
+
var newInit;
|
|
181
|
+
if (kind === 0) {
|
|
182
|
+
newInit = newValue;
|
|
183
|
+
} else if (kind === 1) {
|
|
184
|
+
newInit = newValue.init;
|
|
185
|
+
get = newValue.get || value.get;
|
|
186
|
+
set = newValue.set || value.set;
|
|
187
|
+
value = {
|
|
188
|
+
get: get,
|
|
189
|
+
set: set
|
|
190
|
+
};
|
|
191
|
+
} else {
|
|
192
|
+
value = newValue;
|
|
193
|
+
}
|
|
194
|
+
if (newInit !== void 0) {
|
|
195
|
+
if (init === void 0) {
|
|
196
|
+
init = newInit;
|
|
197
|
+
} else if (typeof init === "function") {
|
|
198
|
+
init = [
|
|
199
|
+
init,
|
|
200
|
+
newInit
|
|
201
|
+
];
|
|
202
|
+
} else {
|
|
203
|
+
init.push(newInit);
|
|
204
|
+
}
|
|
205
|
+
}
|
|
206
|
+
}
|
|
207
|
+
}
|
|
208
|
+
}
|
|
209
|
+
if (kind === 0 || kind === 1) {
|
|
210
|
+
if (init === void 0) {
|
|
211
|
+
init = function(instance, init) {
|
|
212
|
+
return init;
|
|
213
|
+
};
|
|
214
|
+
} else if (typeof init !== "function") {
|
|
215
|
+
var ownInitializers = init;
|
|
216
|
+
init = function(instance, init) {
|
|
217
|
+
var value = init;
|
|
218
|
+
for(var i = 0; i < ownInitializers.length; i++){
|
|
219
|
+
value = ownInitializers[i].call(instance, value);
|
|
220
|
+
}
|
|
221
|
+
return value;
|
|
222
|
+
};
|
|
223
|
+
} else {
|
|
224
|
+
var originalInitializer = init;
|
|
225
|
+
init = function(instance, init) {
|
|
226
|
+
return originalInitializer.call(instance, init);
|
|
227
|
+
};
|
|
228
|
+
}
|
|
229
|
+
ret.push(init);
|
|
230
|
+
}
|
|
231
|
+
if (kind !== 0) {
|
|
232
|
+
if (kind === 1) {
|
|
233
|
+
desc.get = value.get;
|
|
234
|
+
desc.set = value.set;
|
|
235
|
+
} else if (kind === 2) {
|
|
236
|
+
desc.value = value;
|
|
237
|
+
} else if (kind === 3) {
|
|
238
|
+
desc.get = value;
|
|
239
|
+
} else if (kind === 4) {
|
|
240
|
+
desc.set = value;
|
|
241
|
+
}
|
|
242
|
+
if (isPrivate) {
|
|
243
|
+
if (kind === 1) {
|
|
244
|
+
ret.push(function(instance, args) {
|
|
245
|
+
return value.get.call(instance, args);
|
|
246
|
+
});
|
|
247
|
+
ret.push(function(instance, args) {
|
|
248
|
+
return value.set.call(instance, args);
|
|
249
|
+
});
|
|
250
|
+
} else if (kind === 2) {
|
|
251
|
+
ret.push(value);
|
|
252
|
+
} else {
|
|
253
|
+
ret.push(function(instance, args) {
|
|
254
|
+
return value.call(instance, args);
|
|
255
|
+
});
|
|
256
|
+
}
|
|
257
|
+
} else {
|
|
258
|
+
Object.defineProperty(base, name, desc);
|
|
259
|
+
}
|
|
260
|
+
}
|
|
261
|
+
}
|
|
262
|
+
function applyMemberDecs(Class, decInfos, metadata) {
|
|
263
|
+
var ret = [];
|
|
264
|
+
var protoInitializers;
|
|
265
|
+
var staticInitializers;
|
|
266
|
+
var existingProtoNonFields = new Map();
|
|
267
|
+
var existingStaticNonFields = new Map();
|
|
268
|
+
for(var i = 0; i < decInfos.length; i++){
|
|
269
|
+
var decInfo = decInfos[i];
|
|
270
|
+
if (!Array.isArray(decInfo)) continue;
|
|
271
|
+
var kind = decInfo[1];
|
|
272
|
+
var name = decInfo[2];
|
|
273
|
+
var isPrivate = decInfo.length > 3;
|
|
274
|
+
var isStatic = kind >= 5;
|
|
275
|
+
var base;
|
|
276
|
+
var initializers;
|
|
277
|
+
if (isStatic) {
|
|
278
|
+
base = Class;
|
|
279
|
+
kind = kind - 5;
|
|
280
|
+
staticInitializers = staticInitializers || [];
|
|
281
|
+
initializers = staticInitializers;
|
|
282
|
+
} else {
|
|
283
|
+
base = Class.prototype;
|
|
284
|
+
protoInitializers = protoInitializers || [];
|
|
285
|
+
initializers = protoInitializers;
|
|
286
|
+
}
|
|
287
|
+
if (kind !== 0 && !isPrivate) {
|
|
288
|
+
var existingNonFields = isStatic ? existingStaticNonFields : existingProtoNonFields;
|
|
289
|
+
var existingKind = existingNonFields.get(name) || 0;
|
|
290
|
+
if (existingKind === true || existingKind === 3 && kind !== 4 || existingKind === 4 && kind !== 3) {
|
|
291
|
+
throw new Error("Attempted to decorate a public method/accessor that has the same name as a previously decorated public method/accessor. This is not currently supported by the decorators plugin. Property name was: " + name);
|
|
292
|
+
} else if (!existingKind && kind > 2) {
|
|
293
|
+
existingNonFields.set(name, kind);
|
|
294
|
+
} else {
|
|
295
|
+
existingNonFields.set(name, true);
|
|
296
|
+
}
|
|
297
|
+
}
|
|
298
|
+
applyMemberDec(ret, base, decInfo, name, kind, isStatic, isPrivate, initializers, metadata);
|
|
299
|
+
}
|
|
300
|
+
pushInitializers(ret, protoInitializers);
|
|
301
|
+
pushInitializers(ret, staticInitializers);
|
|
302
|
+
return ret;
|
|
303
|
+
}
|
|
304
|
+
function pushInitializers(ret, initializers) {
|
|
305
|
+
if (initializers) {
|
|
306
|
+
ret.push(function(instance) {
|
|
307
|
+
for(var i = 0; i < initializers.length; i++){
|
|
308
|
+
initializers[i].call(instance);
|
|
309
|
+
}
|
|
310
|
+
return instance;
|
|
311
|
+
});
|
|
312
|
+
}
|
|
313
|
+
}
|
|
314
|
+
function applyClassDecs(targetClass, classDecs, metadata) {
|
|
315
|
+
if (classDecs.length > 0) {
|
|
316
|
+
var initializers = [];
|
|
317
|
+
var newClass = targetClass;
|
|
318
|
+
var name = targetClass.name;
|
|
319
|
+
for(var i = classDecs.length - 1; i >= 0; i--){
|
|
320
|
+
var decoratorFinishedRef = {
|
|
321
|
+
v: false
|
|
322
|
+
};
|
|
323
|
+
try {
|
|
324
|
+
var nextNewClass = classDecs[i](newClass, {
|
|
325
|
+
kind: "class",
|
|
326
|
+
name: name,
|
|
327
|
+
addInitializer: createAddInitializerMethod(initializers, decoratorFinishedRef),
|
|
328
|
+
metadata
|
|
329
|
+
});
|
|
330
|
+
} finally{
|
|
331
|
+
decoratorFinishedRef.v = true;
|
|
332
|
+
}
|
|
333
|
+
if (nextNewClass !== undefined) {
|
|
334
|
+
assertValidReturnValue(10, nextNewClass);
|
|
335
|
+
newClass = nextNewClass;
|
|
336
|
+
}
|
|
337
|
+
}
|
|
338
|
+
return [
|
|
339
|
+
defineMetadata(newClass, metadata),
|
|
340
|
+
function() {
|
|
341
|
+
for(var i = 0; i < initializers.length; i++){
|
|
342
|
+
initializers[i].call(newClass);
|
|
343
|
+
}
|
|
344
|
+
}
|
|
345
|
+
];
|
|
346
|
+
}
|
|
347
|
+
}
|
|
348
|
+
function defineMetadata(Class, metadata) {
|
|
349
|
+
return Object.defineProperty(Class, Symbol.metadata || Symbol.for("Symbol.metadata"), {
|
|
350
|
+
configurable: true,
|
|
351
|
+
enumerable: true,
|
|
352
|
+
value: metadata
|
|
353
|
+
});
|
|
354
|
+
}
|
|
355
|
+
return function applyDecs2203R(targetClass, memberDecs, classDecs, parentClass) {
|
|
356
|
+
if (parentClass !== void 0) {
|
|
357
|
+
var parentMetadata = parentClass[Symbol.metadata || Symbol.for("Symbol.metadata")];
|
|
358
|
+
}
|
|
359
|
+
var metadata = Object.create(parentMetadata === void 0 ? null : parentMetadata);
|
|
360
|
+
var e = applyMemberDecs(targetClass, memberDecs, metadata);
|
|
361
|
+
if (!classDecs.length) defineMetadata(targetClass, metadata);
|
|
362
|
+
return {
|
|
363
|
+
e: e,
|
|
364
|
+
get c () {
|
|
365
|
+
return applyClassDecs(targetClass, classDecs, metadata);
|
|
366
|
+
}
|
|
367
|
+
};
|
|
368
|
+
};
|
|
6
369
|
}
|
|
370
|
+
function _apply_decs_2203_r(targetClass, memberDecs, classDecs, parentClass) {
|
|
371
|
+
return (_apply_decs_2203_r = applyDecs2203RFactory())(targetClass, memberDecs, classDecs, parentClass);
|
|
372
|
+
}
|
|
373
|
+
var _dec, _initProto;
|
|
374
|
+
import { GENESIS_BLOCK_HEADER_HASH } from '@aztec/constants';
|
|
7
375
|
import { EpochCache } from '@aztec/epoch-cache';
|
|
8
|
-
import {
|
|
376
|
+
import { createEthereumChain } from '@aztec/ethereum/chain';
|
|
377
|
+
import { BlockTagTooOldError, InboxContract, RollupContract } from '@aztec/ethereum/contracts';
|
|
9
378
|
import { maxBigint } from '@aztec/foundation/bigint';
|
|
379
|
+
import { BlockNumber, CheckpointNumber } from '@aztec/foundation/branded-types';
|
|
10
380
|
import { Buffer16, Buffer32 } from '@aztec/foundation/buffer';
|
|
11
|
-
import { pick } from '@aztec/foundation/collection';
|
|
12
|
-
import { Fr } from '@aztec/foundation/
|
|
381
|
+
import { merge, pick } from '@aztec/foundation/collection';
|
|
382
|
+
import { Fr } from '@aztec/foundation/curves/bn254';
|
|
13
383
|
import { createLogger } from '@aztec/foundation/log';
|
|
384
|
+
import { promiseWithResolvers } from '@aztec/foundation/promise';
|
|
14
385
|
import { RunningPromise, makeLoggingErrorHandler } from '@aztec/foundation/running-promise';
|
|
15
|
-
import { sleep } from '@aztec/foundation/sleep';
|
|
16
386
|
import { count } from '@aztec/foundation/string';
|
|
17
|
-
import { Timer, elapsed } from '@aztec/foundation/timer';
|
|
387
|
+
import { DateProvider, Timer, elapsed } from '@aztec/foundation/timer';
|
|
388
|
+
import { isDefined } from '@aztec/foundation/types';
|
|
18
389
|
import { ContractClassPublishedEvent, PrivateFunctionBroadcastedEvent, UtilityFunctionBroadcastedEvent } from '@aztec/protocol-contracts/class-registry';
|
|
19
390
|
import { ContractInstancePublishedEvent, ContractInstanceUpdatedEvent } from '@aztec/protocol-contracts/instance-registry';
|
|
20
|
-
import { L2BlockSourceEvents } from '@aztec/stdlib/block';
|
|
391
|
+
import { CommitteeAttestation, L2Block, L2BlockSourceEvents, PublishedL2Block } from '@aztec/stdlib/block';
|
|
392
|
+
import { Checkpoint, PublishedCheckpoint } from '@aztec/stdlib/checkpoint';
|
|
21
393
|
import { computePublicBytecodeCommitment, isValidPrivateFunctionMembershipProof, isValidUtilityFunctionMembershipProof } from '@aztec/stdlib/contract';
|
|
22
394
|
import { getEpochAtSlot, getEpochNumberAtTimestamp, getSlotAtTimestamp, getSlotRangeForEpoch, getTimestampRangeForEpoch } from '@aztec/stdlib/epoch-helpers';
|
|
23
|
-
import {
|
|
395
|
+
import { computeInHashFromL1ToL2Messages } from '@aztec/stdlib/messaging';
|
|
396
|
+
import { getTelemetryClient, trackSpan } from '@aztec/telemetry-client';
|
|
24
397
|
import { EventEmitter } from 'events';
|
|
25
398
|
import groupBy from 'lodash.groupby';
|
|
26
399
|
import { createPublicClient, fallback, http } from 'viem';
|
|
27
|
-
import {
|
|
28
|
-
import { InitialBlockNumberNotSequentialError, NoBlobBodiesFoundError } from './errors.js';
|
|
400
|
+
import { InitialCheckpointNumberNotSequentialError, NoBlobBodiesFoundError } from './errors.js';
|
|
29
401
|
import { ArchiverInstrumentation } from './instrumentation.js';
|
|
30
|
-
import {
|
|
402
|
+
import { retrieveCheckpointsFromRollup, retrieveL1ToL2Message, retrieveL1ToL2Messages, retrievedToPublishedCheckpoint } from './l1/data_retrieval.js';
|
|
403
|
+
import { validateAndLogTraceAvailability } from './l1/validate_trace.js';
|
|
404
|
+
import { validateCheckpointAttestations } from './validation.js';
|
|
405
|
+
function mapArchiverConfig(config) {
|
|
406
|
+
return {
|
|
407
|
+
pollingIntervalMs: config.archiverPollingIntervalMS,
|
|
408
|
+
batchSize: config.archiverBatchSize,
|
|
409
|
+
skipValidateBlockAttestations: config.skipValidateBlockAttestations,
|
|
410
|
+
maxAllowedEthClientDriftSeconds: config.maxAllowedEthClientDriftSeconds,
|
|
411
|
+
ethereumAllowNoDebugHosts: config.ethereumAllowNoDebugHosts
|
|
412
|
+
};
|
|
413
|
+
}
|
|
414
|
+
_dec = trackSpan('Archiver.sync');
|
|
31
415
|
/**
|
|
32
|
-
* Pulls
|
|
416
|
+
* Pulls checkpoints in a non-blocking manner and provides interface for their retrieval.
|
|
33
417
|
* Responsible for handling robust L1 polling so that other components do not need to
|
|
34
418
|
* concern themselves with it.
|
|
35
419
|
*/ export class Archiver extends EventEmitter {
|
|
36
420
|
publicClient;
|
|
421
|
+
debugClient;
|
|
37
422
|
l1Addresses;
|
|
38
423
|
dataStore;
|
|
39
424
|
config;
|
|
40
|
-
|
|
425
|
+
blobClient;
|
|
41
426
|
epochCache;
|
|
427
|
+
dateProvider;
|
|
42
428
|
instrumentation;
|
|
43
429
|
l1constants;
|
|
44
430
|
log;
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
431
|
+
static{
|
|
432
|
+
({ e: [_initProto] } = _apply_decs_2203_r(this, [
|
|
433
|
+
[
|
|
434
|
+
_dec,
|
|
435
|
+
2,
|
|
436
|
+
"sync"
|
|
437
|
+
]
|
|
438
|
+
], []));
|
|
439
|
+
}
|
|
440
|
+
/** A loop in which we will be continually fetching new checkpoints. */ runningPromise;
|
|
48
441
|
rollup;
|
|
49
442
|
inbox;
|
|
50
443
|
store;
|
|
51
444
|
l1BlockNumber;
|
|
52
445
|
l1Timestamp;
|
|
53
|
-
pendingChainValidationStatus;
|
|
54
446
|
initialSyncComplete;
|
|
447
|
+
initialSyncPromise;
|
|
448
|
+
/** Queue of blocks to be added to the store, processed by the sync loop. */ blockQueue;
|
|
55
449
|
tracer;
|
|
56
450
|
/**
|
|
57
451
|
* Creates a new instance of the Archiver.
|
|
58
452
|
* @param publicClient - A client for interacting with the Ethereum node.
|
|
453
|
+
* @param debugClient - A client for interacting with the Ethereum node for debug/trace methods.
|
|
59
454
|
* @param rollupAddress - Ethereum address of the rollup contract.
|
|
60
455
|
* @param inboxAddress - Ethereum address of the inbox contract.
|
|
61
456
|
* @param registryAddress - Ethereum address of the registry contract.
|
|
62
457
|
* @param pollingIntervalMs - The interval for polling for L1 logs (in milliseconds).
|
|
63
458
|
* @param store - An archiver data store for storage & retrieval of blocks, encrypted logs & contract data.
|
|
64
459
|
* @param log - A logger.
|
|
65
|
-
*/ constructor(publicClient, l1Addresses, dataStore, config,
|
|
66
|
-
super(), this.publicClient = publicClient, this.l1Addresses = l1Addresses, this.dataStore = dataStore, this.config = config, this.
|
|
67
|
-
valid: true
|
|
68
|
-
}, this.initialSyncComplete = false;
|
|
460
|
+
*/ constructor(publicClient, debugClient, l1Addresses, dataStore, config, blobClient, epochCache, dateProvider, instrumentation, l1constants, log = createLogger('archiver')){
|
|
461
|
+
super(), this.publicClient = publicClient, this.debugClient = debugClient, this.l1Addresses = l1Addresses, this.dataStore = dataStore, this.config = config, this.blobClient = blobClient, this.epochCache = epochCache, this.dateProvider = dateProvider, this.instrumentation = instrumentation, this.l1constants = l1constants, this.log = log, this.initialSyncComplete = (_initProto(this), false), this.blockQueue = [];
|
|
69
462
|
this.tracer = instrumentation.tracer;
|
|
70
463
|
this.store = new ArchiverStoreHelper(dataStore);
|
|
71
464
|
this.rollup = new RollupContract(publicClient, l1Addresses.rollupAddress);
|
|
72
465
|
this.inbox = new InboxContract(publicClient, l1Addresses.inboxAddress);
|
|
466
|
+
this.initialSyncPromise = promiseWithResolvers();
|
|
467
|
+
// Running promise starts with a small interval inbetween runs, so all iterations needed for the initial sync
|
|
468
|
+
// are done as fast as possible. This then gets updated once the initial sync completes.
|
|
469
|
+
this.runningPromise = new RunningPromise(()=>this.sync(), this.log, this.config.pollingIntervalMs / 10, makeLoggingErrorHandler(this.log, NoBlobBodiesFoundError, BlockTagTooOldError));
|
|
73
470
|
}
|
|
74
471
|
/**
|
|
75
472
|
* Creates a new instance of the Archiver and blocks until it syncs from chain.
|
|
@@ -81,14 +478,27 @@ import { validateBlockAttestations } from './validation.js';
|
|
|
81
478
|
const chain = createEthereumChain(config.l1RpcUrls, config.l1ChainId);
|
|
82
479
|
const publicClient = createPublicClient({
|
|
83
480
|
chain: chain.chainInfo,
|
|
84
|
-
transport: fallback(config.l1RpcUrls.map((url)=>http(url
|
|
481
|
+
transport: fallback(config.l1RpcUrls.map((url)=>http(url, {
|
|
482
|
+
batch: false
|
|
483
|
+
}))),
|
|
484
|
+
pollingInterval: config.viemPollingIntervalMS
|
|
485
|
+
});
|
|
486
|
+
// Create debug client using debug RPC URLs if available, otherwise fall back to regular RPC URLs
|
|
487
|
+
const debugRpcUrls = config.l1DebugRpcUrls.length > 0 ? config.l1DebugRpcUrls : config.l1RpcUrls;
|
|
488
|
+
const debugClient = createPublicClient({
|
|
489
|
+
chain: chain.chainInfo,
|
|
490
|
+
transport: fallback(debugRpcUrls.map((url)=>http(url, {
|
|
491
|
+
batch: false
|
|
492
|
+
}))),
|
|
85
493
|
pollingInterval: config.viemPollingIntervalMS
|
|
86
494
|
});
|
|
87
495
|
const rollup = new RollupContract(publicClient, config.l1Contracts.rollupAddress);
|
|
88
|
-
const [l1StartBlock, l1GenesisTime, proofSubmissionEpochs] = await Promise.all([
|
|
496
|
+
const [l1StartBlock, l1GenesisTime, proofSubmissionEpochs, genesisArchiveRoot, slashingProposerAddress] = await Promise.all([
|
|
89
497
|
rollup.getL1StartBlock(),
|
|
90
498
|
rollup.getL1GenesisTime(),
|
|
91
|
-
rollup.getProofSubmissionEpochs()
|
|
499
|
+
rollup.getProofSubmissionEpochs(),
|
|
500
|
+
rollup.getGenesisArchiveTreeRoot(),
|
|
501
|
+
rollup.getSlashingProposerAddress()
|
|
92
502
|
]);
|
|
93
503
|
const l1StartBlockHash = await publicClient.getBlock({
|
|
94
504
|
blockNumber: l1StartBlock,
|
|
@@ -102,61 +512,117 @@ import { validateBlockAttestations } from './validation.js';
|
|
|
102
512
|
epochDuration,
|
|
103
513
|
slotDuration,
|
|
104
514
|
ethereumSlotDuration,
|
|
105
|
-
proofSubmissionEpochs: Number(proofSubmissionEpochs)
|
|
106
|
-
|
|
107
|
-
const opts = {
|
|
108
|
-
pollingIntervalMs: config.archiverPollingIntervalMS ?? 10_000,
|
|
109
|
-
batchSize: config.archiverBatchSize ?? 100
|
|
515
|
+
proofSubmissionEpochs: Number(proofSubmissionEpochs),
|
|
516
|
+
genesisArchiveRoot: Fr.fromString(genesisArchiveRoot.toString())
|
|
110
517
|
};
|
|
518
|
+
const opts = merge({
|
|
519
|
+
pollingIntervalMs: 10_000,
|
|
520
|
+
batchSize: 100,
|
|
521
|
+
maxAllowedEthClientDriftSeconds: 300,
|
|
522
|
+
ethereumAllowNoDebugHosts: false
|
|
523
|
+
}, mapArchiverConfig(config));
|
|
111
524
|
const epochCache = deps.epochCache ?? await EpochCache.create(config.l1Contracts.rollupAddress, config, deps);
|
|
112
525
|
const telemetry = deps.telemetry ?? getTelemetryClient();
|
|
113
|
-
const archiver = new Archiver(publicClient,
|
|
526
|
+
const archiver = new Archiver(publicClient, debugClient, {
|
|
527
|
+
...config.l1Contracts,
|
|
528
|
+
slashingProposerAddress
|
|
529
|
+
}, archiverStore, opts, deps.blobClient, epochCache, deps.dateProvider ?? new DateProvider(), await ArchiverInstrumentation.new(telemetry, ()=>archiverStore.estimateSize()), l1Constants);
|
|
114
530
|
await archiver.start(blockUntilSynced);
|
|
115
531
|
return archiver;
|
|
116
532
|
}
|
|
533
|
+
/** Updates archiver config */ updateConfig(newConfig) {
|
|
534
|
+
this.config = merge(this.config, mapArchiverConfig(newConfig));
|
|
535
|
+
}
|
|
117
536
|
/**
|
|
118
537
|
* Starts sync process.
|
|
119
538
|
* @param blockUntilSynced - If true, blocks until the archiver has fully synced.
|
|
120
539
|
*/ async start(blockUntilSynced) {
|
|
121
|
-
if (this.runningPromise) {
|
|
540
|
+
if (this.runningPromise.isRunning()) {
|
|
122
541
|
throw new Error('Archiver is already running');
|
|
123
542
|
}
|
|
124
|
-
await this.
|
|
543
|
+
await this.blobClient.testSources();
|
|
544
|
+
await this.testEthereumNodeSynced();
|
|
545
|
+
await validateAndLogTraceAvailability(this.debugClient, this.config.ethereumAllowNoDebugHosts ?? false);
|
|
546
|
+
// Log initial state for the archiver
|
|
547
|
+
const { l1StartBlock } = this.l1constants;
|
|
548
|
+
const { blocksSynchedTo = l1StartBlock, messagesSynchedTo = l1StartBlock } = await this.store.getSynchPoint();
|
|
549
|
+
const currentL2Checkpoint = await this.getSynchedCheckpointNumber();
|
|
550
|
+
this.log.info(`Starting archiver sync to rollup contract ${this.l1Addresses.rollupAddress.toString()} from L1 block ${blocksSynchedTo} and L2 checkpoint ${currentL2Checkpoint}`, {
|
|
551
|
+
blocksSynchedTo,
|
|
552
|
+
messagesSynchedTo,
|
|
553
|
+
currentL2Checkpoint
|
|
554
|
+
});
|
|
555
|
+
// Start sync loop, and return the wait for initial sync if we are asked to block until synced
|
|
556
|
+
this.runningPromise.start();
|
|
125
557
|
if (blockUntilSynced) {
|
|
126
|
-
|
|
127
|
-
this.log.info(`Retrying initial archiver sync in ${this.config.pollingIntervalMs}ms`);
|
|
128
|
-
await sleep(this.config.pollingIntervalMs);
|
|
129
|
-
}
|
|
558
|
+
return this.waitForInitialSync();
|
|
130
559
|
}
|
|
131
|
-
this.runningPromise = new RunningPromise(()=>this.sync(false), this.log, this.config.pollingIntervalMs, makeLoggingErrorHandler(this.log, // Ignored errors will not log to the console
|
|
132
|
-
// We ignore NoBlobBodiesFound as the message may not have been passed to the blob sink yet
|
|
133
|
-
NoBlobBodiesFoundError));
|
|
134
|
-
this.runningPromise.start();
|
|
135
560
|
}
|
|
136
561
|
syncImmediate() {
|
|
137
|
-
if (!this.runningPromise) {
|
|
138
|
-
throw new Error('Archiver is not running');
|
|
139
|
-
}
|
|
140
562
|
return this.runningPromise.trigger();
|
|
141
563
|
}
|
|
142
|
-
|
|
143
|
-
|
|
144
|
-
|
|
145
|
-
|
|
146
|
-
|
|
147
|
-
|
|
148
|
-
|
|
149
|
-
|
|
150
|
-
|
|
151
|
-
|
|
152
|
-
|
|
564
|
+
/**
|
|
565
|
+
* Queues a block to be added to the archiver store and triggers processing.
|
|
566
|
+
* The block will be processed by the sync loop.
|
|
567
|
+
* Implements the L2BlockSink interface.
|
|
568
|
+
* @param block - The L2 block to add.
|
|
569
|
+
* @returns A promise that resolves when the block has been added to the store, or rejects on error.
|
|
570
|
+
*/ addBlock(block) {
|
|
571
|
+
return new Promise((resolve, reject)=>{
|
|
572
|
+
this.blockQueue.push({
|
|
573
|
+
block,
|
|
574
|
+
resolve,
|
|
575
|
+
reject
|
|
576
|
+
});
|
|
577
|
+
this.log.debug(`Queued block ${block.number} for processing`);
|
|
578
|
+
// Trigger an immediate sync, but don't wait for it - the promise resolves when the block is processed
|
|
579
|
+
this.syncImmediate().catch((err)=>{
|
|
580
|
+
this.log.error(`Sync immediate call failed: ${err}`);
|
|
581
|
+
});
|
|
582
|
+
});
|
|
583
|
+
}
|
|
584
|
+
/**
|
|
585
|
+
* Processes all queued blocks, adding them to the store.
|
|
586
|
+
* Called at the beginning of each sync iteration.
|
|
587
|
+
* Blocks are processed in the order they were queued.
|
|
588
|
+
*/ async processQueuedBlocks() {
|
|
589
|
+
if (this.blockQueue.length === 0) {
|
|
590
|
+
return;
|
|
591
|
+
}
|
|
592
|
+
// Take all blocks from the queue
|
|
593
|
+
const queuedItems = this.blockQueue.splice(0, this.blockQueue.length);
|
|
594
|
+
this.log.debug(`Processing ${queuedItems.length} queued block(s)`);
|
|
595
|
+
// Process each block individually to properly resolve/reject each promise
|
|
596
|
+
for (const { block, resolve, reject } of queuedItems){
|
|
597
|
+
try {
|
|
598
|
+
await this.store.addBlocks([
|
|
599
|
+
block
|
|
600
|
+
]);
|
|
601
|
+
this.log.debug(`Added block ${block.number} to store`);
|
|
602
|
+
resolve();
|
|
603
|
+
} catch (err) {
|
|
604
|
+
this.log.error(`Failed to add block ${block.number} to store: ${err.message}`);
|
|
605
|
+
reject(err);
|
|
153
606
|
}
|
|
154
|
-
return false;
|
|
155
607
|
}
|
|
156
608
|
}
|
|
157
|
-
|
|
158
|
-
|
|
159
|
-
|
|
609
|
+
waitForInitialSync() {
|
|
610
|
+
return this.initialSyncPromise.promise;
|
|
611
|
+
}
|
|
612
|
+
/** Checks that the ethereum node we are connected to has a latest timestamp no more than the allowed drift. Throw if not. */ async testEthereumNodeSynced() {
|
|
613
|
+
const maxAllowedDelay = this.config.maxAllowedEthClientDriftSeconds;
|
|
614
|
+
if (maxAllowedDelay === 0) {
|
|
615
|
+
return;
|
|
616
|
+
}
|
|
617
|
+
const { number, timestamp: l1Timestamp } = await this.publicClient.getBlock({
|
|
618
|
+
includeTransactions: false
|
|
619
|
+
});
|
|
620
|
+
const currentTime = BigInt(this.dateProvider.nowInSeconds());
|
|
621
|
+
if (currentTime - l1Timestamp > BigInt(maxAllowedDelay)) {
|
|
622
|
+
throw new Error(`Ethereum node is out of sync (last block synced ${number} at ${l1Timestamp} vs current time ${currentTime})`);
|
|
623
|
+
}
|
|
624
|
+
}
|
|
625
|
+
async syncFromL1() {
|
|
160
626
|
/**
|
|
161
627
|
* We keep track of three "pointers" to L1 blocks:
|
|
162
628
|
* 1. the last L1 block that published an L2 block
|
|
@@ -166,8 +632,6 @@ import { validateBlockAttestations } from './validation.js';
|
|
|
166
632
|
* We do this to deal with L1 data providers that are eventually consistent (e.g. Infura).
|
|
167
633
|
* We guard against seeing block X with no data at one point, and later, the provider processes the block and it has data.
|
|
168
634
|
* The archiver will stay back, until there's data on L1 that will move the pointers forward.
|
|
169
|
-
*
|
|
170
|
-
* This code does not handle reorgs.
|
|
171
635
|
*/ const { l1StartBlock, l1StartBlockHash } = this.l1constants;
|
|
172
636
|
const { blocksSynchedTo = l1StartBlock, messagesSynchedTo = {
|
|
173
637
|
l1BlockNumber: l1StartBlock,
|
|
@@ -178,12 +642,12 @@ import { validateBlockAttestations } from './validation.js';
|
|
|
178
642
|
});
|
|
179
643
|
const currentL1BlockNumber = currentL1Block.number;
|
|
180
644
|
const currentL1BlockHash = Buffer32.fromString(currentL1Block.hash);
|
|
181
|
-
|
|
182
|
-
|
|
183
|
-
|
|
184
|
-
|
|
185
|
-
|
|
186
|
-
}
|
|
645
|
+
this.log.trace(`Starting new archiver sync iteration`, {
|
|
646
|
+
blocksSynchedTo,
|
|
647
|
+
messagesSynchedTo,
|
|
648
|
+
currentL1BlockNumber,
|
|
649
|
+
currentL1BlockHash
|
|
650
|
+
});
|
|
187
651
|
// ********** Ensuring Consistency of data pulled from L1 **********
|
|
188
652
|
/**
|
|
189
653
|
* There are a number of calls in this sync operation to L1 for retrieving
|
|
@@ -206,28 +670,40 @@ import { validateBlockAttestations } from './validation.js';
|
|
|
206
670
|
const currentL1Timestamp = !this.l1Timestamp || !this.l1BlockNumber || this.l1BlockNumber !== currentL1BlockNumber ? (await this.publicClient.getBlock({
|
|
207
671
|
blockNumber: currentL1BlockNumber
|
|
208
672
|
})).timestamp : this.l1Timestamp;
|
|
209
|
-
//
|
|
673
|
+
// Warn if the latest L1 block timestamp is too old
|
|
674
|
+
const maxAllowedDelay = this.config.maxAllowedEthClientDriftSeconds;
|
|
675
|
+
const now = this.dateProvider.nowInSeconds();
|
|
676
|
+
if (maxAllowedDelay > 0 && Number(currentL1Timestamp) <= now - maxAllowedDelay) {
|
|
677
|
+
this.log.warn(`Latest L1 block ${currentL1BlockNumber} timestamp ${currentL1Timestamp} is too old. Make sure your Ethereum node is synced.`, {
|
|
678
|
+
currentL1BlockNumber,
|
|
679
|
+
currentL1Timestamp,
|
|
680
|
+
now,
|
|
681
|
+
maxAllowedDelay
|
|
682
|
+
});
|
|
683
|
+
}
|
|
684
|
+
// ********** Events that are processed per checkpoint **********
|
|
210
685
|
if (currentL1BlockNumber > blocksSynchedTo) {
|
|
211
|
-
// First we retrieve new L2 blocks
|
|
212
|
-
|
|
686
|
+
// First we retrieve new checkpoints and L2 blocks and store them in the DB. This will also update the
|
|
687
|
+
// pending chain validation status, proven checkpoint number, and synched L1 block number.
|
|
688
|
+
const rollupStatus = await this.handleCheckpoints(blocksSynchedTo, currentL1BlockNumber);
|
|
213
689
|
// Then we prune the current epoch if it'd reorg on next submission.
|
|
214
|
-
// Note that we don't do this before retrieving
|
|
215
|
-
//
|
|
690
|
+
// Note that we don't do this before retrieving checkpoints because we may need to retrieve
|
|
691
|
+
// checkpoints from more than 2 epochs ago, so we want to make sure we have the latest view of
|
|
216
692
|
// the chain locally before we start unwinding stuff. This can be optimized by figuring out
|
|
217
|
-
// up to which point we're pruning, and then requesting
|
|
218
|
-
const { rollupCanPrune } = await this.handleEpochPrune(rollupStatus.
|
|
219
|
-
//
|
|
220
|
-
//
|
|
221
|
-
// we
|
|
222
|
-
if (rollupStatus.validationResult && rollupStatus.
|
|
223
|
-
this.
|
|
693
|
+
// up to which point we're pruning, and then requesting checkpoints up to that point only.
|
|
694
|
+
const { rollupCanPrune } = await this.handleEpochPrune(rollupStatus.provenCheckpointNumber, currentL1BlockNumber, currentL1Timestamp);
|
|
695
|
+
// If the last checkpoint we processed had an invalid attestation, we manually advance the L1 syncpoint
|
|
696
|
+
// past it, since otherwise we'll keep downloading it and reprocessing it on every iteration until
|
|
697
|
+
// we get a valid checkpoint to advance the syncpoint.
|
|
698
|
+
if (!rollupStatus.validationResult?.valid && rollupStatus.lastL1BlockWithCheckpoint !== undefined) {
|
|
699
|
+
await this.store.setCheckpointSynchedL1BlockNumber(rollupStatus.lastL1BlockWithCheckpoint);
|
|
224
700
|
}
|
|
225
|
-
// And lastly we check if we are missing any
|
|
701
|
+
// And lastly we check if we are missing any checkpoints behind us due to a possible L1 reorg.
|
|
226
702
|
// We only do this if rollup cant prune on the next submission. Otherwise we will end up
|
|
227
|
-
// re-syncing the
|
|
703
|
+
// re-syncing the checkpoints we have just unwound above. We also dont do this if the last checkpoint is invalid,
|
|
228
704
|
// since the archiver will rightfully refuse to sync up to it.
|
|
229
|
-
if (!rollupCanPrune &&
|
|
230
|
-
await this.
|
|
705
|
+
if (!rollupCanPrune && rollupStatus.validationResult?.valid) {
|
|
706
|
+
await this.checkForNewCheckpointsBeforeL1SyncPoint(rollupStatus, blocksSynchedTo, currentL1BlockNumber);
|
|
231
707
|
}
|
|
232
708
|
this.instrumentation.updateL1BlockHeight(currentL1BlockNumber);
|
|
233
709
|
}
|
|
@@ -236,15 +712,27 @@ import { validateBlockAttestations } from './validation.js';
|
|
|
236
712
|
// but the corresponding blocks have not been processed (see #12631).
|
|
237
713
|
this.l1Timestamp = currentL1Timestamp;
|
|
238
714
|
this.l1BlockNumber = currentL1BlockNumber;
|
|
239
|
-
|
|
240
|
-
if
|
|
241
|
-
|
|
715
|
+
// We resolve the initial sync only once we've caught up with the latest L1 block number (with 1 block grace)
|
|
716
|
+
// so if the initial sync took too long, we still go for another iteration.
|
|
717
|
+
if (!this.initialSyncComplete && currentL1BlockNumber + 1n >= await this.publicClient.getBlockNumber()) {
|
|
718
|
+
this.log.info(`Initial archiver sync to L1 block ${currentL1BlockNumber} complete`, {
|
|
242
719
|
l1BlockNumber: currentL1BlockNumber,
|
|
243
720
|
syncPoint: await this.store.getSynchPoint(),
|
|
244
721
|
...await this.getL2Tips()
|
|
245
722
|
});
|
|
723
|
+
this.runningPromise.setPollingIntervalMS(this.config.pollingIntervalMs);
|
|
724
|
+
this.initialSyncComplete = true;
|
|
725
|
+
this.initialSyncPromise.resolve();
|
|
246
726
|
}
|
|
247
727
|
}
|
|
728
|
+
/**
|
|
729
|
+
* Fetches logs from L1 contracts and processes them.
|
|
730
|
+
*/ async sync() {
|
|
731
|
+
// Process any queued blocks first, before doing L1 sync
|
|
732
|
+
await this.processQueuedBlocks();
|
|
733
|
+
// Now perform L1 sync
|
|
734
|
+
await this.syncFromL1();
|
|
735
|
+
}
|
|
248
736
|
/** Queries the rollup contract on whether a prune can be executed on the immediate next L1 block. */ async canPrune(currentL1BlockNumber, currentL1Timestamp) {
|
|
249
737
|
const time = (currentL1Timestamp ?? 0n) + BigInt(this.l1constants.ethereumSlotDuration);
|
|
250
738
|
const result = await this.rollup.canPruneAtTime(time, {
|
|
@@ -259,30 +747,37 @@ import { validateBlockAttestations } from './validation.js';
|
|
|
259
747
|
}
|
|
260
748
|
return result;
|
|
261
749
|
}
|
|
262
|
-
/** Checks if there'd be a reorg for the next
|
|
750
|
+
/** Checks if there'd be a reorg for the next checkpoint submission and start pruning now. */ async handleEpochPrune(provenCheckpointNumber, currentL1BlockNumber, currentL1Timestamp) {
|
|
263
751
|
const rollupCanPrune = await this.canPrune(currentL1BlockNumber, currentL1Timestamp);
|
|
264
|
-
const
|
|
265
|
-
const canPrune =
|
|
752
|
+
const localPendingCheckpointNumber = await this.getSynchedCheckpointNumber();
|
|
753
|
+
const canPrune = localPendingCheckpointNumber > provenCheckpointNumber && rollupCanPrune;
|
|
266
754
|
if (canPrune) {
|
|
267
755
|
const timer = new Timer();
|
|
268
|
-
const pruneFrom =
|
|
269
|
-
const header = await this.
|
|
756
|
+
const pruneFrom = CheckpointNumber(provenCheckpointNumber + 1);
|
|
757
|
+
const header = await this.getCheckpointHeader(pruneFrom);
|
|
270
758
|
if (header === undefined) {
|
|
271
|
-
throw new Error(`Missing
|
|
759
|
+
throw new Error(`Missing checkpoint header ${pruneFrom}`);
|
|
272
760
|
}
|
|
273
|
-
const pruneFromSlotNumber = header.
|
|
761
|
+
const pruneFromSlotNumber = header.slotNumber;
|
|
274
762
|
const pruneFromEpochNumber = getEpochAtSlot(pruneFromSlotNumber, this.l1constants);
|
|
275
|
-
const
|
|
276
|
-
const
|
|
763
|
+
const checkpointsToUnwind = localPendingCheckpointNumber - provenCheckpointNumber;
|
|
764
|
+
const checkpointPromises = Array.from({
|
|
765
|
+
length: checkpointsToUnwind
|
|
766
|
+
}).fill(0).map((_, i)=>this.store.getCheckpointData(CheckpointNumber(i + pruneFrom)));
|
|
767
|
+
const checkpoints = await Promise.all(checkpointPromises);
|
|
768
|
+
const blockPromises = await Promise.all(checkpoints.filter(isDefined).map((cp)=>this.store.getBlocksForCheckpoint(CheckpointNumber(cp.checkpointNumber))));
|
|
769
|
+
const newBlocks = blockPromises.filter(isDefined).flat();
|
|
770
|
+
// TODO(pw/mbps): Don't convert to legacy blocks here
|
|
771
|
+
const blocks = (await Promise.all(newBlocks.map((x)=>this.getBlock(x.number)))).filter(isDefined);
|
|
277
772
|
// Emit an event for listening services to react to the chain prune
|
|
278
773
|
this.emit(L2BlockSourceEvents.L2PruneDetected, {
|
|
279
774
|
type: L2BlockSourceEvents.L2PruneDetected,
|
|
280
775
|
epochNumber: pruneFromEpochNumber,
|
|
281
776
|
blocks
|
|
282
777
|
});
|
|
283
|
-
this.log.debug(`L2 prune from ${
|
|
284
|
-
await this.
|
|
285
|
-
this.log.warn(`Unwound ${count(
|
|
778
|
+
this.log.debug(`L2 prune from ${provenCheckpointNumber + 1} to ${localPendingCheckpointNumber} will occur on next checkpoint submission.`);
|
|
779
|
+
await this.unwindCheckpoints(localPendingCheckpointNumber, checkpointsToUnwind);
|
|
780
|
+
this.log.warn(`Unwound ${count(checkpointsToUnwind, 'checkpoint')} from checkpoint ${localPendingCheckpointNumber} ` + `to ${provenCheckpointNumber} due to predicted reorg at L1 block ${currentL1BlockNumber}. ` + `Updated latest checkpoint is ${await this.getSynchedCheckpointNumber()}.`);
|
|
286
781
|
this.instrumentation.processPrune(timer.ms());
|
|
287
782
|
// TODO(palla/reorg): Do we need to set the block synched L1 block number here?
|
|
288
783
|
// Seems like the next iteration should handle this.
|
|
@@ -325,7 +820,7 @@ import { validateBlockAttestations } from './validation.js';
|
|
|
325
820
|
});
|
|
326
821
|
// Compare message count and rolling hash. If they match, no need to retrieve anything.
|
|
327
822
|
if (remoteMessagesState.totalMessagesInserted === localMessagesInserted && remoteMessagesState.messagesRollingHash.equals(localLastMessage?.rollingHash ?? Buffer16.ZERO)) {
|
|
328
|
-
this.log.
|
|
823
|
+
this.log.trace(`No L1 to L2 messages to query between L1 blocks ${messagesSyncPoint.l1BlockNumber} and ${currentL1BlockNumber}.`);
|
|
329
824
|
return;
|
|
330
825
|
}
|
|
331
826
|
// Check if our syncpoint is still valid. If not, there was an L1 reorg and we need to re-retrieve messages.
|
|
@@ -374,7 +869,7 @@ import { validateBlockAttestations } from './validation.js';
|
|
|
374
869
|
}while (searchEndBlock < currentL1BlockNumber)
|
|
375
870
|
// Log stats for messages retrieved (if any).
|
|
376
871
|
if (messageCount > 0) {
|
|
377
|
-
this.log.info(`Retrieved ${messageCount} new L1 to L2 messages up to message with index ${lastMessage?.index} for
|
|
872
|
+
this.log.info(`Retrieved ${messageCount} new L1 to L2 messages up to message with index ${lastMessage?.index} for checkpoint ${lastMessage?.checkpointNumber}`, {
|
|
378
873
|
lastMessage,
|
|
379
874
|
messageCount
|
|
380
875
|
});
|
|
@@ -451,156 +946,171 @@ import { validateBlockAttestations } from './validation.js';
|
|
|
451
946
|
}
|
|
452
947
|
return Buffer32.fromString(block.hash);
|
|
453
948
|
}
|
|
454
|
-
async
|
|
455
|
-
const
|
|
456
|
-
const
|
|
949
|
+
async handleCheckpoints(blocksSynchedTo, currentL1BlockNumber) {
|
|
950
|
+
const localPendingCheckpointNumber = await this.getSynchedCheckpointNumber();
|
|
951
|
+
const initialValidationResult = await this.store.getPendingChainValidationStatus();
|
|
952
|
+
const { provenCheckpointNumber, provenArchive, pendingCheckpointNumber, pendingArchive, archiveOfMyCheckpoint: archiveForLocalPendingCheckpointNumber } = await this.rollup.status(localPendingCheckpointNumber, {
|
|
457
953
|
blockNumber: currentL1BlockNumber
|
|
458
954
|
});
|
|
459
955
|
const rollupStatus = {
|
|
460
|
-
|
|
461
|
-
provenArchive,
|
|
462
|
-
|
|
463
|
-
pendingArchive,
|
|
464
|
-
validationResult:
|
|
956
|
+
provenCheckpointNumber,
|
|
957
|
+
provenArchive: provenArchive.toString(),
|
|
958
|
+
pendingCheckpointNumber,
|
|
959
|
+
pendingArchive: pendingArchive.toString(),
|
|
960
|
+
validationResult: initialValidationResult
|
|
465
961
|
};
|
|
466
962
|
this.log.trace(`Retrieved rollup status at current L1 block ${currentL1BlockNumber}.`, {
|
|
467
|
-
|
|
963
|
+
localPendingCheckpointNumber,
|
|
468
964
|
blocksSynchedTo,
|
|
469
965
|
currentL1BlockNumber,
|
|
470
|
-
|
|
966
|
+
archiveForLocalPendingCheckpointNumber,
|
|
471
967
|
...rollupStatus
|
|
472
968
|
});
|
|
473
|
-
const
|
|
474
|
-
// Annoying edge case: if proven
|
|
475
|
-
// we need to set it to zero. This is an edge case because we dont have a
|
|
476
|
-
// so
|
|
477
|
-
if (
|
|
478
|
-
const
|
|
479
|
-
if (
|
|
480
|
-
await this.
|
|
481
|
-
this.log.info(`Rolled back proven chain to
|
|
482
|
-
|
|
969
|
+
const updateProvenCheckpoint = async ()=>{
|
|
970
|
+
// Annoying edge case: if proven checkpoint is moved back to 0 due to a reorg at the beginning of the chain,
|
|
971
|
+
// we need to set it to zero. This is an edge case because we dont have a checkpoint zero (initial checkpoint is one),
|
|
972
|
+
// so localCheckpointForDestinationProvenCheckpointNumber would not be found below.
|
|
973
|
+
if (provenCheckpointNumber === 0) {
|
|
974
|
+
const localProvenCheckpointNumber = await this.getProvenCheckpointNumber();
|
|
975
|
+
if (localProvenCheckpointNumber !== provenCheckpointNumber) {
|
|
976
|
+
await this.setProvenCheckpointNumber(provenCheckpointNumber);
|
|
977
|
+
this.log.info(`Rolled back proven chain to checkpoint ${provenCheckpointNumber}`, {
|
|
978
|
+
provenCheckpointNumber
|
|
483
979
|
});
|
|
484
980
|
}
|
|
485
981
|
}
|
|
486
|
-
const
|
|
487
|
-
// Sanity check. I've hit what seems to be a state where the proven
|
|
488
|
-
// synched
|
|
489
|
-
const synched = await this.
|
|
490
|
-
if (
|
|
491
|
-
this.log.error(`Hit local
|
|
982
|
+
const localCheckpointForDestinationProvenCheckpointNumber = await this.store.getCheckpointData(provenCheckpointNumber);
|
|
983
|
+
// Sanity check. I've hit what seems to be a state where the proven checkpoint is set to a value greater than the latest
|
|
984
|
+
// synched checkpoint when requesting L2Tips from the archiver. This is the only place where the proven checkpoint is set.
|
|
985
|
+
const synched = await this.getSynchedCheckpointNumber();
|
|
986
|
+
if (localCheckpointForDestinationProvenCheckpointNumber && synched < localCheckpointForDestinationProvenCheckpointNumber.checkpointNumber) {
|
|
987
|
+
this.log.error(`Hit local checkpoint greater than last synched checkpoint: ${localCheckpointForDestinationProvenCheckpointNumber.checkpointNumber} > ${synched}`);
|
|
492
988
|
}
|
|
493
|
-
this.log.trace(`Local
|
|
494
|
-
if (
|
|
495
|
-
const
|
|
496
|
-
if (
|
|
497
|
-
await this.
|
|
498
|
-
this.log.info(`Updated proven chain to
|
|
499
|
-
|
|
989
|
+
this.log.trace(`Local checkpoint for remote proven checkpoint ${provenCheckpointNumber} is ${localCheckpointForDestinationProvenCheckpointNumber?.archive.root.toString() ?? 'undefined'}`);
|
|
990
|
+
if (localCheckpointForDestinationProvenCheckpointNumber && provenArchive.equals(localCheckpointForDestinationProvenCheckpointNumber.archive.root)) {
|
|
991
|
+
const localProvenCheckpointNumber = await this.getProvenCheckpointNumber();
|
|
992
|
+
if (localProvenCheckpointNumber !== provenCheckpointNumber) {
|
|
993
|
+
await this.setProvenCheckpointNumber(provenCheckpointNumber);
|
|
994
|
+
this.log.info(`Updated proven chain to checkpoint ${provenCheckpointNumber}`, {
|
|
995
|
+
provenCheckpointNumber
|
|
500
996
|
});
|
|
501
|
-
const provenSlotNumber =
|
|
997
|
+
const provenSlotNumber = localCheckpointForDestinationProvenCheckpointNumber.header.slotNumber;
|
|
502
998
|
const provenEpochNumber = getEpochAtSlot(provenSlotNumber, this.l1constants);
|
|
999
|
+
const lastBlockNumberInCheckpoint = localCheckpointForDestinationProvenCheckpointNumber.startBlock + localCheckpointForDestinationProvenCheckpointNumber.numBlocks - 1;
|
|
503
1000
|
this.emit(L2BlockSourceEvents.L2BlockProven, {
|
|
504
1001
|
type: L2BlockSourceEvents.L2BlockProven,
|
|
505
|
-
blockNumber:
|
|
1002
|
+
blockNumber: BlockNumber(lastBlockNumberInCheckpoint),
|
|
506
1003
|
slotNumber: provenSlotNumber,
|
|
507
1004
|
epochNumber: provenEpochNumber
|
|
508
1005
|
});
|
|
1006
|
+
this.instrumentation.updateLastProvenBlock(lastBlockNumberInCheckpoint);
|
|
509
1007
|
} else {
|
|
510
|
-
this.log.trace(`Proven
|
|
1008
|
+
this.log.trace(`Proven checkpoint ${provenCheckpointNumber} already stored.`);
|
|
511
1009
|
}
|
|
512
1010
|
}
|
|
513
|
-
this.instrumentation.updateLastProvenBlock(Number(provenBlockNumber));
|
|
514
1011
|
};
|
|
515
|
-
// This is an edge case that we only hit if there are no proposed
|
|
516
|
-
// If we have 0
|
|
517
|
-
const
|
|
518
|
-
if (
|
|
519
|
-
await this.store.
|
|
520
|
-
this.log.debug(`No
|
|
1012
|
+
// This is an edge case that we only hit if there are no proposed checkpoints.
|
|
1013
|
+
// If we have 0 checkpoints locally and there are no checkpoints onchain there is nothing to do.
|
|
1014
|
+
const noCheckpoints = localPendingCheckpointNumber === 0 && pendingCheckpointNumber === 0;
|
|
1015
|
+
if (noCheckpoints) {
|
|
1016
|
+
await this.store.setCheckpointSynchedL1BlockNumber(currentL1BlockNumber);
|
|
1017
|
+
this.log.debug(`No checkpoints to retrieve from ${blocksSynchedTo + 1n} to ${currentL1BlockNumber}, no checkpoints on chain`);
|
|
521
1018
|
return rollupStatus;
|
|
522
1019
|
}
|
|
523
|
-
await
|
|
1020
|
+
await updateProvenCheckpoint();
|
|
524
1021
|
// Related to the L2 reorgs of the pending chain. We are only interested in actually addressing a reorg if there
|
|
525
|
-
// are any state that could be impacted by it. If we have no
|
|
526
|
-
if (
|
|
527
|
-
const
|
|
528
|
-
if (
|
|
529
|
-
throw new Error(`Missing
|
|
1022
|
+
// are any state that could be impacted by it. If we have no checkpoints, there is no impact.
|
|
1023
|
+
if (localPendingCheckpointNumber > 0) {
|
|
1024
|
+
const localPendingCheckpoint = await this.store.getCheckpointData(localPendingCheckpointNumber);
|
|
1025
|
+
if (localPendingCheckpoint === undefined) {
|
|
1026
|
+
throw new Error(`Missing checkpoint ${localPendingCheckpointNumber}`);
|
|
530
1027
|
}
|
|
531
|
-
const localPendingArchiveRoot =
|
|
532
|
-
const
|
|
533
|
-
if (
|
|
1028
|
+
const localPendingArchiveRoot = localPendingCheckpoint.archive.root.toString();
|
|
1029
|
+
const noCheckpointSinceLast = localPendingCheckpoint && pendingArchive.toString() === localPendingArchiveRoot;
|
|
1030
|
+
if (noCheckpointSinceLast) {
|
|
534
1031
|
// We believe the following line causes a problem when we encounter L1 re-orgs.
|
|
535
1032
|
// Basically, by setting the synched L1 block number here, we are saying that we have
|
|
536
|
-
// processed all
|
|
1033
|
+
// processed all checkpoints up to the current L1 block number and we will not attempt to retrieve logs from
|
|
537
1034
|
// this block again (or any blocks before).
|
|
538
|
-
// However, in the re-org scenario, our L1 node is temporarily lying to us and we end up potentially missing
|
|
1035
|
+
// However, in the re-org scenario, our L1 node is temporarily lying to us and we end up potentially missing checkpoints.
|
|
539
1036
|
// We must only set this block number based on actually retrieved logs.
|
|
540
1037
|
// TODO(#8621): Tackle this properly when we handle L1 Re-orgs.
|
|
541
1038
|
// await this.store.setBlockSynchedL1BlockNumber(currentL1BlockNumber);
|
|
542
|
-
this.log.debug(`No
|
|
1039
|
+
this.log.debug(`No checkpoints to retrieve from ${blocksSynchedTo + 1n} to ${currentL1BlockNumber}`);
|
|
543
1040
|
return rollupStatus;
|
|
544
1041
|
}
|
|
545
|
-
const
|
|
546
|
-
if (!
|
|
547
|
-
// If our local pending
|
|
1042
|
+
const localPendingCheckpointInChain = archiveForLocalPendingCheckpointNumber.equals(localPendingCheckpoint.archive.root);
|
|
1043
|
+
if (!localPendingCheckpointInChain) {
|
|
1044
|
+
// If our local pending checkpoint tip is not in the chain on L1 a "prune" must have happened
|
|
548
1045
|
// or the L1 have reorged.
|
|
549
1046
|
// In any case, we have to figure out how far into the past the action will take us.
|
|
550
|
-
// For simplicity here, we will simply rewind until we end in a
|
|
551
|
-
this.log.debug(`L2 prune has been detected due to local pending
|
|
552
|
-
|
|
1047
|
+
// For simplicity here, we will simply rewind until we end in a checkpoint that is also on the chain on L1.
|
|
1048
|
+
this.log.debug(`L2 prune has been detected due to local pending checkpoint ${localPendingCheckpointNumber} not in chain`, {
|
|
1049
|
+
localPendingCheckpointNumber,
|
|
553
1050
|
localPendingArchiveRoot,
|
|
554
|
-
|
|
1051
|
+
archiveForLocalPendingCheckpointNumber
|
|
555
1052
|
});
|
|
556
|
-
let tipAfterUnwind =
|
|
1053
|
+
let tipAfterUnwind = localPendingCheckpointNumber;
|
|
557
1054
|
while(true){
|
|
558
|
-
const
|
|
559
|
-
if (
|
|
1055
|
+
const candidateCheckpoint = await this.store.getCheckpointData(tipAfterUnwind);
|
|
1056
|
+
if (candidateCheckpoint === undefined) {
|
|
560
1057
|
break;
|
|
561
1058
|
}
|
|
562
|
-
const archiveAtContract = await this.rollup.archiveAt(
|
|
563
|
-
|
|
1059
|
+
const archiveAtContract = await this.rollup.archiveAt(candidateCheckpoint.checkpointNumber);
|
|
1060
|
+
this.log.trace(`Checking local checkpoint ${candidateCheckpoint.checkpointNumber} with archive ${candidateCheckpoint.archive.root}`, {
|
|
1061
|
+
archiveAtContract,
|
|
1062
|
+
archiveLocal: candidateCheckpoint.archive.root.toString()
|
|
1063
|
+
});
|
|
1064
|
+
if (archiveAtContract.equals(candidateCheckpoint.archive.root)) {
|
|
564
1065
|
break;
|
|
565
1066
|
}
|
|
566
1067
|
tipAfterUnwind--;
|
|
567
1068
|
}
|
|
568
|
-
const
|
|
569
|
-
await this.
|
|
570
|
-
this.log.warn(`Unwound ${count(
|
|
1069
|
+
const checkpointsToUnwind = localPendingCheckpointNumber - tipAfterUnwind;
|
|
1070
|
+
await this.unwindCheckpoints(localPendingCheckpointNumber, checkpointsToUnwind);
|
|
1071
|
+
this.log.warn(`Unwound ${count(checkpointsToUnwind, 'checkpoint')} from checkpoint ${localPendingCheckpointNumber} ` + `due to mismatched checkpoint hashes at L1 block ${currentL1BlockNumber}. ` + `Updated L2 latest checkpoint is ${await this.getSynchedCheckpointNumber()}.`);
|
|
571
1072
|
}
|
|
572
1073
|
}
|
|
573
|
-
// Retrieve
|
|
1074
|
+
// Retrieve checkpoints in batches. Each batch is estimated to accommodate up to 'blockBatchSize' L1 blocks,
|
|
574
1075
|
// computed using the L2 block time vs the L1 block time.
|
|
575
1076
|
let searchStartBlock = blocksSynchedTo;
|
|
576
1077
|
let searchEndBlock = blocksSynchedTo;
|
|
577
|
-
let
|
|
1078
|
+
let lastRetrievedCheckpoint;
|
|
1079
|
+
let lastL1BlockWithCheckpoint = undefined;
|
|
578
1080
|
do {
|
|
579
1081
|
[searchStartBlock, searchEndBlock] = this.nextRange(searchEndBlock, currentL1BlockNumber);
|
|
580
|
-
this.log.trace(`Retrieving
|
|
581
|
-
// TODO(md): Retrieve from blob
|
|
582
|
-
const
|
|
583
|
-
if (
|
|
1082
|
+
this.log.trace(`Retrieving checkpoints from L1 block ${searchStartBlock} to ${searchEndBlock}`);
|
|
1083
|
+
// TODO(md): Retrieve from blob client then from consensus client, then from peers
|
|
1084
|
+
const retrievedCheckpoints = await retrieveCheckpointsFromRollup(this.rollup.getContract(), this.publicClient, this.debugClient, this.blobClient, searchStartBlock, searchEndBlock, this.l1Addresses, this.instrumentation, this.log, !this.initialSyncComplete);
|
|
1085
|
+
if (retrievedCheckpoints.length === 0) {
|
|
584
1086
|
// We are not calling `setBlockSynchedL1BlockNumber` because it may cause sync issues if based off infura.
|
|
585
1087
|
// See further details in earlier comments.
|
|
586
|
-
this.log.trace(`Retrieved no new
|
|
1088
|
+
this.log.trace(`Retrieved no new checkpoints from L1 block ${searchStartBlock} to ${searchEndBlock}`);
|
|
587
1089
|
continue;
|
|
588
1090
|
}
|
|
589
|
-
|
|
590
|
-
|
|
591
|
-
|
|
592
|
-
|
|
593
|
-
|
|
594
|
-
|
|
595
|
-
|
|
596
|
-
|
|
597
|
-
|
|
1091
|
+
this.log.debug(`Retrieved ${retrievedCheckpoints.length} new checkpoints between L1 blocks ${searchStartBlock} and ${searchEndBlock}`, {
|
|
1092
|
+
lastProcessedCheckpoint: retrievedCheckpoints[retrievedCheckpoints.length - 1].l1,
|
|
1093
|
+
searchStartBlock,
|
|
1094
|
+
searchEndBlock
|
|
1095
|
+
});
|
|
1096
|
+
const publishedCheckpoints = await Promise.all(retrievedCheckpoints.map((b)=>retrievedToPublishedCheckpoint(b)));
|
|
1097
|
+
const validCheckpoints = [];
|
|
1098
|
+
for (const published of publishedCheckpoints){
|
|
1099
|
+
const validationResult = this.config.skipValidateBlockAttestations ? {
|
|
1100
|
+
valid: true
|
|
1101
|
+
} : await validateCheckpointAttestations(published, this.epochCache, this.l1constants, this.log);
|
|
1102
|
+
// Only update the validation result if it has changed, so we can keep track of the first invalid checkpoint
|
|
1103
|
+
// in case there is a sequence of more than one invalid checkpoint, as we need to invalidate the first one.
|
|
1104
|
+
// There is an exception though: if a checkpoint is invalidated and replaced with another invalid checkpoint,
|
|
1105
|
+
// we need to update the validation result, since we need to be able to invalidate the new one.
|
|
1106
|
+
// See test 'chain progresses if an invalid checkpoint is invalidated with an invalid one' for more info.
|
|
1107
|
+
if (rollupStatus.validationResult?.valid !== validationResult.valid || !rollupStatus.validationResult.valid && !validationResult.valid && rollupStatus.validationResult.block.blockNumber === validationResult.block.blockNumber) {
|
|
598
1108
|
rollupStatus.validationResult = validationResult;
|
|
599
1109
|
}
|
|
600
1110
|
if (!validationResult.valid) {
|
|
601
|
-
this.log.warn(`Skipping
|
|
602
|
-
|
|
603
|
-
l1BlockNumber:
|
|
1111
|
+
this.log.warn(`Skipping checkpoint ${published.checkpoint.number} due to invalid attestations`, {
|
|
1112
|
+
checkpointHash: published.checkpoint.hash(),
|
|
1113
|
+
l1BlockNumber: published.l1.blockNumber,
|
|
604
1114
|
...pick(validationResult, 'reason')
|
|
605
1115
|
});
|
|
606
1116
|
// Emit event for invalid block detection
|
|
@@ -610,84 +1120,107 @@ import { validateBlockAttestations } from './validation.js';
|
|
|
610
1120
|
});
|
|
611
1121
|
continue;
|
|
612
1122
|
}
|
|
613
|
-
|
|
614
|
-
|
|
615
|
-
|
|
616
|
-
|
|
617
|
-
|
|
618
|
-
|
|
1123
|
+
// Check the inHash of the checkpoint against the l1->l2 messages.
|
|
1124
|
+
// The messages should've been synced up to the currentL1BlockNumber and must be available for the published
|
|
1125
|
+
// checkpoints we just retrieved.
|
|
1126
|
+
const l1ToL2Messages = await this.getL1ToL2Messages(published.checkpoint.number);
|
|
1127
|
+
const computedInHash = computeInHashFromL1ToL2Messages(l1ToL2Messages);
|
|
1128
|
+
const publishedInHash = published.checkpoint.header.contentCommitment.inHash;
|
|
1129
|
+
if (!computedInHash.equals(publishedInHash)) {
|
|
1130
|
+
this.log.fatal(`Mismatch inHash for checkpoint ${published.checkpoint.number}`, {
|
|
1131
|
+
checkpointHash: published.checkpoint.hash(),
|
|
1132
|
+
l1BlockNumber: published.l1.blockNumber,
|
|
1133
|
+
computedInHash,
|
|
1134
|
+
publishedInHash
|
|
1135
|
+
});
|
|
1136
|
+
// Throwing an error since this is most likely caused by a bug.
|
|
1137
|
+
throw new Error(`Mismatch inHash for checkpoint ${published.checkpoint.number}. Expected ${computedInHash} but got ${publishedInHash}`);
|
|
1138
|
+
}
|
|
1139
|
+
validCheckpoints.push(published);
|
|
1140
|
+
this.log.debug(`Ingesting new checkpoint ${published.checkpoint.number} with ${published.checkpoint.blocks.length} blocks`, {
|
|
1141
|
+
checkpointHash: published.checkpoint.hash(),
|
|
1142
|
+
l1BlockNumber: published.l1.blockNumber,
|
|
1143
|
+
...published.checkpoint.header.toInspect(),
|
|
1144
|
+
blocks: published.checkpoint.blocks.map((b)=>b.getStats())
|
|
619
1145
|
});
|
|
620
1146
|
}
|
|
621
1147
|
try {
|
|
622
|
-
const
|
|
623
|
-
|
|
1148
|
+
const updatedValidationResult = rollupStatus.validationResult === initialValidationResult ? undefined : rollupStatus.validationResult;
|
|
1149
|
+
const [processDuration] = await elapsed(()=>this.addCheckpoints(validCheckpoints, updatedValidationResult));
|
|
1150
|
+
this.instrumentation.processNewBlocks(processDuration / validCheckpoints.length, validCheckpoints.flatMap((c)=>c.checkpoint.blocks));
|
|
624
1151
|
} catch (err) {
|
|
625
|
-
if (err instanceof
|
|
626
|
-
const {
|
|
627
|
-
const
|
|
628
|
-
const updatedL1SyncPoint =
|
|
1152
|
+
if (err instanceof InitialCheckpointNumberNotSequentialError) {
|
|
1153
|
+
const { previousCheckpointNumber, newCheckpointNumber } = err;
|
|
1154
|
+
const previousCheckpoint = previousCheckpointNumber ? await this.store.getCheckpointData(CheckpointNumber(previousCheckpointNumber)) : undefined;
|
|
1155
|
+
const updatedL1SyncPoint = previousCheckpoint?.l1.blockNumber ?? this.l1constants.l1StartBlock;
|
|
629
1156
|
await this.store.setBlockSynchedL1BlockNumber(updatedL1SyncPoint);
|
|
630
|
-
this.log.warn(`Attempting to insert
|
|
631
|
-
|
|
632
|
-
|
|
633
|
-
newBlockNumber,
|
|
1157
|
+
this.log.warn(`Attempting to insert checkpoint ${newCheckpointNumber} with previous block ${previousCheckpointNumber}. Rolling back L1 sync point to ${updatedL1SyncPoint} to try and fetch the missing blocks.`, {
|
|
1158
|
+
previousCheckpointNumber,
|
|
1159
|
+
newCheckpointNumber,
|
|
634
1160
|
updatedL1SyncPoint
|
|
635
1161
|
});
|
|
636
1162
|
}
|
|
637
1163
|
throw err;
|
|
638
1164
|
}
|
|
639
|
-
for (const
|
|
640
|
-
this.log.info(`Downloaded
|
|
641
|
-
|
|
642
|
-
|
|
643
|
-
|
|
644
|
-
|
|
645
|
-
|
|
646
|
-
|
|
1165
|
+
for (const checkpoint of validCheckpoints){
|
|
1166
|
+
this.log.info(`Downloaded checkpoint ${checkpoint.checkpoint.number}`, {
|
|
1167
|
+
checkpointHash: checkpoint.checkpoint.hash(),
|
|
1168
|
+
checkpointNumber: checkpoint.checkpoint.number,
|
|
1169
|
+
blockCount: checkpoint.checkpoint.blocks.length,
|
|
1170
|
+
txCount: checkpoint.checkpoint.blocks.reduce((acc, b)=>acc + b.body.txEffects.length, 0),
|
|
1171
|
+
header: checkpoint.checkpoint.header.toInspect(),
|
|
1172
|
+
archiveRoot: checkpoint.checkpoint.archive.root.toString(),
|
|
1173
|
+
archiveNextLeafIndex: checkpoint.checkpoint.archive.nextAvailableLeafIndex
|
|
647
1174
|
});
|
|
648
1175
|
}
|
|
649
|
-
|
|
1176
|
+
lastRetrievedCheckpoint = validCheckpoints.at(-1) ?? lastRetrievedCheckpoint;
|
|
1177
|
+
lastL1BlockWithCheckpoint = retrievedCheckpoints.at(-1)?.l1.blockNumber ?? lastL1BlockWithCheckpoint;
|
|
650
1178
|
}while (searchEndBlock < currentL1BlockNumber)
|
|
651
1179
|
// Important that we update AFTER inserting the blocks.
|
|
652
|
-
await
|
|
1180
|
+
await updateProvenCheckpoint();
|
|
653
1181
|
return {
|
|
654
1182
|
...rollupStatus,
|
|
655
|
-
|
|
1183
|
+
lastRetrievedCheckpoint,
|
|
1184
|
+
lastL1BlockWithCheckpoint
|
|
656
1185
|
};
|
|
657
1186
|
}
|
|
658
|
-
async
|
|
659
|
-
const {
|
|
660
|
-
// Compare the last
|
|
1187
|
+
async checkForNewCheckpointsBeforeL1SyncPoint(status, blocksSynchedTo, currentL1BlockNumber) {
|
|
1188
|
+
const { lastRetrievedCheckpoint, pendingCheckpointNumber } = status;
|
|
1189
|
+
// Compare the last checkpoint we have (either retrieved in this round or loaded from store) with what the
|
|
661
1190
|
// rollup contract told us was the latest one (pinned at the currentL1BlockNumber).
|
|
662
|
-
const
|
|
663
|
-
if (
|
|
1191
|
+
const latestLocalCheckpointNumber = lastRetrievedCheckpoint?.checkpoint.number ?? await this.getSynchedCheckpointNumber();
|
|
1192
|
+
if (latestLocalCheckpointNumber < pendingCheckpointNumber) {
|
|
664
1193
|
// Here we have consumed all logs until the `currentL1Block` we pinned at the beginning of the archiver loop,
|
|
665
|
-
// but still
|
|
666
|
-
// We suspect an L1 reorg that added
|
|
667
|
-
// last
|
|
668
|
-
// don't have one, we go back 2 L1 epochs, which is the deepest possible reorg (assuming Casper is working).
|
|
669
|
-
|
|
670
|
-
|
|
671
|
-
|
|
672
|
-
|
|
673
|
-
|
|
674
|
-
|
|
1194
|
+
// but still haven't reached the pending checkpoint according to the call to the rollup contract.
|
|
1195
|
+
// We suspect an L1 reorg that added checkpoints *behind* us. If that is the case, it must have happened between
|
|
1196
|
+
// the last checkpoint we saw and the current one, so we reset the last synched L1 block number. In the edge case
|
|
1197
|
+
// we don't have one, we go back 2 L1 epochs, which is the deepest possible reorg (assuming Casper is working).
|
|
1198
|
+
let latestLocalCheckpointArchive = undefined;
|
|
1199
|
+
let targetL1BlockNumber = maxBigint(currentL1BlockNumber - 64n, 0n);
|
|
1200
|
+
if (lastRetrievedCheckpoint) {
|
|
1201
|
+
latestLocalCheckpointArchive = lastRetrievedCheckpoint.checkpoint.archive.root.toString();
|
|
1202
|
+
targetL1BlockNumber = lastRetrievedCheckpoint.l1.blockNumber;
|
|
1203
|
+
} else if (latestLocalCheckpointNumber > 0) {
|
|
1204
|
+
const checkpoint = await this.store.getRangeOfCheckpoints(latestLocalCheckpointNumber, 1).then(([c])=>c);
|
|
1205
|
+
latestLocalCheckpointArchive = checkpoint.archive.root.toString();
|
|
1206
|
+
targetL1BlockNumber = checkpoint.l1.blockNumber;
|
|
1207
|
+
}
|
|
1208
|
+
this.log.warn(`Failed to reach checkpoint ${pendingCheckpointNumber} at ${currentL1BlockNumber} (latest is ${latestLocalCheckpointNumber}). ` + `Rolling back last synched L1 block number to ${targetL1BlockNumber}.`, {
|
|
1209
|
+
latestLocalCheckpointNumber,
|
|
1210
|
+
latestLocalCheckpointArchive,
|
|
675
1211
|
blocksSynchedTo,
|
|
676
1212
|
currentL1BlockNumber,
|
|
677
1213
|
...status
|
|
678
1214
|
});
|
|
679
1215
|
await this.store.setBlockSynchedL1BlockNumber(targetL1BlockNumber);
|
|
680
1216
|
} else {
|
|
681
|
-
this.log.trace(`No new
|
|
682
|
-
|
|
683
|
-
|
|
1217
|
+
this.log.trace(`No new checkpoints behind L1 sync point to retrieve.`, {
|
|
1218
|
+
latestLocalCheckpointNumber,
|
|
1219
|
+
pendingCheckpointNumber
|
|
684
1220
|
});
|
|
685
1221
|
}
|
|
686
1222
|
}
|
|
687
1223
|
/** Resumes the archiver after a stop. */ resume() {
|
|
688
|
-
if (!this.runningPromise) {
|
|
689
|
-
throw new Error(`Archiver was never started`);
|
|
690
|
-
}
|
|
691
1224
|
if (this.runningPromise.isRunning()) {
|
|
692
1225
|
this.log.warn(`Archiver already running`);
|
|
693
1226
|
}
|
|
@@ -699,7 +1232,7 @@ import { validateBlockAttestations } from './validation.js';
|
|
|
699
1232
|
* @returns A promise signalling completion of the stop process.
|
|
700
1233
|
*/ async stop() {
|
|
701
1234
|
this.log.debug('Stopping...');
|
|
702
|
-
await this.runningPromise
|
|
1235
|
+
await this.runningPromise.stop();
|
|
703
1236
|
this.log.info('Stopped.');
|
|
704
1237
|
return Promise.resolve();
|
|
705
1238
|
}
|
|
@@ -709,6 +1242,11 @@ import { validateBlockAttestations } from './validation.js';
|
|
|
709
1242
|
getL1Constants() {
|
|
710
1243
|
return Promise.resolve(this.l1constants);
|
|
711
1244
|
}
|
|
1245
|
+
getGenesisValues() {
|
|
1246
|
+
return Promise.resolve({
|
|
1247
|
+
genesisArchiveRoot: this.l1constants.genesisArchiveRoot
|
|
1248
|
+
});
|
|
1249
|
+
}
|
|
712
1250
|
getRollupAddress() {
|
|
713
1251
|
return Promise.resolve(this.l1Addresses.rollupAddress);
|
|
714
1252
|
}
|
|
@@ -716,60 +1254,65 @@ import { validateBlockAttestations } from './validation.js';
|
|
|
716
1254
|
return Promise.resolve(this.l1Addresses.registryAddress);
|
|
717
1255
|
}
|
|
718
1256
|
getL1BlockNumber() {
|
|
719
|
-
|
|
720
|
-
if (!l1BlockNumber) {
|
|
721
|
-
throw new Error('L1 block number not yet available. Complete an initial sync first.');
|
|
722
|
-
}
|
|
723
|
-
return l1BlockNumber;
|
|
1257
|
+
return this.l1BlockNumber;
|
|
724
1258
|
}
|
|
725
1259
|
getL1Timestamp() {
|
|
726
|
-
|
|
727
|
-
if (!l1Timestamp) {
|
|
728
|
-
throw new Error('L1 timestamp not yet available. Complete an initial sync first.');
|
|
729
|
-
}
|
|
730
|
-
return Promise.resolve(l1Timestamp);
|
|
1260
|
+
return Promise.resolve(this.l1Timestamp);
|
|
731
1261
|
}
|
|
732
|
-
|
|
733
|
-
return getSlotAtTimestamp(
|
|
1262
|
+
getL2SlotNumber() {
|
|
1263
|
+
return Promise.resolve(this.l1Timestamp === undefined ? undefined : getSlotAtTimestamp(this.l1Timestamp, this.l1constants));
|
|
734
1264
|
}
|
|
735
|
-
|
|
736
|
-
return getEpochNumberAtTimestamp(
|
|
1265
|
+
getL2EpochNumber() {
|
|
1266
|
+
return Promise.resolve(this.l1Timestamp === undefined ? undefined : getEpochNumberAtTimestamp(this.l1Timestamp, this.l1constants));
|
|
737
1267
|
}
|
|
738
1268
|
async getBlocksForEpoch(epochNumber) {
|
|
739
1269
|
const [start, end] = getSlotRangeForEpoch(epochNumber, this.l1constants);
|
|
740
1270
|
const blocks = [];
|
|
741
|
-
// Walk the list of
|
|
742
|
-
// We'll typically ask for
|
|
743
|
-
let
|
|
744
|
-
const slot = (b)=>b.header.
|
|
745
|
-
while(
|
|
746
|
-
if (slot(
|
|
747
|
-
blocks
|
|
1271
|
+
// Walk the list of checkpoints backwards and filter by slots matching the requested epoch.
|
|
1272
|
+
// We'll typically ask for checkpoints for a very recent epoch, so we shouldn't need an index here.
|
|
1273
|
+
let checkpoint = await this.store.getCheckpointData(await this.store.getSynchedCheckpointNumber());
|
|
1274
|
+
const slot = (b)=>b.header.slotNumber;
|
|
1275
|
+
while(checkpoint && slot(checkpoint) >= start){
|
|
1276
|
+
if (slot(checkpoint) <= end) {
|
|
1277
|
+
// push the blocks on backwards
|
|
1278
|
+
const endBlock = checkpoint.startBlock + checkpoint.numBlocks - 1;
|
|
1279
|
+
for(let i = endBlock; i >= checkpoint.startBlock; i--){
|
|
1280
|
+
const block = await this.getBlock(BlockNumber(i));
|
|
1281
|
+
if (block) {
|
|
1282
|
+
blocks.push(block);
|
|
1283
|
+
}
|
|
1284
|
+
}
|
|
748
1285
|
}
|
|
749
|
-
|
|
1286
|
+
checkpoint = await this.store.getCheckpointData(CheckpointNumber(checkpoint.checkpointNumber - 1));
|
|
750
1287
|
}
|
|
751
1288
|
return blocks.reverse();
|
|
752
1289
|
}
|
|
753
1290
|
async getBlockHeadersForEpoch(epochNumber) {
|
|
754
1291
|
const [start, end] = getSlotRangeForEpoch(epochNumber, this.l1constants);
|
|
755
1292
|
const blocks = [];
|
|
756
|
-
// Walk the list of
|
|
757
|
-
// We'll typically ask for
|
|
758
|
-
let
|
|
759
|
-
|
|
760
|
-
|
|
761
|
-
|
|
762
|
-
|
|
763
|
-
|
|
1293
|
+
// Walk the list of checkpoints backwards and filter by slots matching the requested epoch.
|
|
1294
|
+
// We'll typically ask for checkpoints for a very recent epoch, so we shouldn't need an index here.
|
|
1295
|
+
let checkpoint = await this.store.getCheckpointData(await this.store.getSynchedCheckpointNumber());
|
|
1296
|
+
const slot = (b)=>b.header.slotNumber;
|
|
1297
|
+
while(checkpoint && slot(checkpoint) >= start){
|
|
1298
|
+
if (slot(checkpoint) <= end) {
|
|
1299
|
+
// push the blocks on backwards
|
|
1300
|
+
const endBlock = checkpoint.startBlock + checkpoint.numBlocks - 1;
|
|
1301
|
+
for(let i = endBlock; i >= checkpoint.startBlock; i--){
|
|
1302
|
+
const block = await this.getBlockHeader(BlockNumber(i));
|
|
1303
|
+
if (block) {
|
|
1304
|
+
blocks.push(block);
|
|
1305
|
+
}
|
|
1306
|
+
}
|
|
764
1307
|
}
|
|
765
|
-
|
|
1308
|
+
checkpoint = await this.store.getCheckpointData(CheckpointNumber(checkpoint.checkpointNumber - 1));
|
|
766
1309
|
}
|
|
767
1310
|
return blocks.reverse();
|
|
768
1311
|
}
|
|
769
1312
|
async isEpochComplete(epochNumber) {
|
|
770
1313
|
// The epoch is complete if the current L2 block is the last one in the epoch (or later)
|
|
771
1314
|
const header = await this.getBlockHeader('latest');
|
|
772
|
-
const slot = header
|
|
1315
|
+
const slot = header ? header.globalVariables.slotNumber : undefined;
|
|
773
1316
|
const [_startSlot, endSlot] = getSlotRangeForEpoch(epochNumber, this.l1constants);
|
|
774
1317
|
if (slot && slot >= endSlot) {
|
|
775
1318
|
return true;
|
|
@@ -793,24 +1336,55 @@ import { validateBlockAttestations } from './validation.js';
|
|
|
793
1336
|
/** Returns whether the archiver has completed an initial sync run successfully. */ isInitialSyncComplete() {
|
|
794
1337
|
return this.initialSyncComplete;
|
|
795
1338
|
}
|
|
796
|
-
|
|
797
|
-
|
|
798
|
-
|
|
799
|
-
|
|
800
|
-
|
|
801
|
-
|
|
802
|
-
|
|
803
|
-
|
|
1339
|
+
async getCheckpointHeader(number) {
|
|
1340
|
+
if (number === 'latest') {
|
|
1341
|
+
number = await this.getSynchedCheckpointNumber();
|
|
1342
|
+
}
|
|
1343
|
+
if (number === 0) {
|
|
1344
|
+
return undefined;
|
|
1345
|
+
}
|
|
1346
|
+
const checkpoint = await this.store.getCheckpointData(number);
|
|
1347
|
+
if (!checkpoint) {
|
|
1348
|
+
return undefined;
|
|
1349
|
+
}
|
|
1350
|
+
return checkpoint.header;
|
|
1351
|
+
}
|
|
1352
|
+
getCheckpointNumber() {
|
|
1353
|
+
return this.getSynchedCheckpointNumber();
|
|
1354
|
+
}
|
|
1355
|
+
getSynchedCheckpointNumber() {
|
|
1356
|
+
return this.store.getSynchedCheckpointNumber();
|
|
1357
|
+
}
|
|
1358
|
+
getProvenCheckpointNumber() {
|
|
1359
|
+
return this.store.getProvenCheckpointNumber();
|
|
1360
|
+
}
|
|
1361
|
+
setProvenCheckpointNumber(checkpointNumber) {
|
|
1362
|
+
return this.store.setProvenCheckpointNumber(checkpointNumber);
|
|
1363
|
+
}
|
|
1364
|
+
unwindCheckpoints(from, checkpointsToUnwind) {
|
|
1365
|
+
return this.store.unwindCheckpoints(from, checkpointsToUnwind);
|
|
1366
|
+
}
|
|
1367
|
+
async getLastBlockNumberInCheckpoint(checkpointNumber) {
|
|
1368
|
+
const checkpointData = await this.store.getCheckpointData(checkpointNumber);
|
|
1369
|
+
if (!checkpointData) {
|
|
1370
|
+
return undefined;
|
|
1371
|
+
}
|
|
1372
|
+
return BlockNumber(checkpointData.startBlock + checkpointData.numBlocks - 1);
|
|
804
1373
|
}
|
|
805
|
-
|
|
806
|
-
|
|
807
|
-
|
|
1374
|
+
addCheckpoints(checkpoints, pendingChainValidationStatus) {
|
|
1375
|
+
return this.store.addCheckpoints(checkpoints, pendingChainValidationStatus);
|
|
1376
|
+
}
|
|
1377
|
+
getBlockHeaderByHash(blockHash) {
|
|
1378
|
+
return this.store.getBlockHeaderByHash(blockHash);
|
|
1379
|
+
}
|
|
1380
|
+
getBlockHeaderByArchive(archive) {
|
|
1381
|
+
return this.store.getBlockHeaderByArchive(archive);
|
|
808
1382
|
}
|
|
809
1383
|
/**
|
|
810
1384
|
* Gets an l2 block.
|
|
811
1385
|
* @param number - The block number to return.
|
|
812
1386
|
* @returns The requested L2 block.
|
|
813
|
-
*/ async
|
|
1387
|
+
*/ async getL2BlockNew(number) {
|
|
814
1388
|
// If the number provided is -ve, then return the latest block.
|
|
815
1389
|
if (number < 0) {
|
|
816
1390
|
number = await this.store.getSynchedL2BlockNumber();
|
|
@@ -818,8 +1392,8 @@ import { validateBlockAttestations } from './validation.js';
|
|
|
818
1392
|
if (number === 0) {
|
|
819
1393
|
return undefined;
|
|
820
1394
|
}
|
|
821
|
-
const publishedBlock = await this.store.
|
|
822
|
-
return publishedBlock
|
|
1395
|
+
const publishedBlock = await this.store.store.getBlock(number);
|
|
1396
|
+
return publishedBlock;
|
|
823
1397
|
}
|
|
824
1398
|
async getBlockHeader(number) {
|
|
825
1399
|
if (number === 'latest') {
|
|
@@ -831,27 +1405,29 @@ import { validateBlockAttestations } from './validation.js';
|
|
|
831
1405
|
const headers = await this.store.getBlockHeaders(number, 1);
|
|
832
1406
|
return headers.length === 0 ? undefined : headers[0];
|
|
833
1407
|
}
|
|
1408
|
+
getCheckpointedBlock(number) {
|
|
1409
|
+
return this.store.getCheckpointedBlock(number);
|
|
1410
|
+
}
|
|
1411
|
+
getCheckpointedBlockByHash(blockHash) {
|
|
1412
|
+
return this.store.getCheckpointedBlockByHash(blockHash);
|
|
1413
|
+
}
|
|
1414
|
+
getProvenBlockNumber() {
|
|
1415
|
+
return this.store.getProvenBlockNumber();
|
|
1416
|
+
}
|
|
1417
|
+
getCheckpointedBlockByArchive(archive) {
|
|
1418
|
+
return this.store.getCheckpointedBlockByArchive(archive);
|
|
1419
|
+
}
|
|
834
1420
|
getTxEffect(txHash) {
|
|
835
1421
|
return this.store.getTxEffect(txHash);
|
|
836
1422
|
}
|
|
837
1423
|
getSettledTxReceipt(txHash) {
|
|
838
1424
|
return this.store.getSettledTxReceipt(txHash);
|
|
839
1425
|
}
|
|
840
|
-
|
|
841
|
-
|
|
842
|
-
* @param from - The block number from which to begin retrieving logs.
|
|
843
|
-
* @param limit - The maximum number of blocks to retrieve logs from.
|
|
844
|
-
* @returns An array of private logs from the specified range of blocks.
|
|
845
|
-
*/ getPrivateLogs(from, limit) {
|
|
846
|
-
return this.store.getPrivateLogs(from, limit);
|
|
1426
|
+
getPrivateLogsByTags(tags) {
|
|
1427
|
+
return this.store.getPrivateLogsByTags(tags);
|
|
847
1428
|
}
|
|
848
|
-
|
|
849
|
-
|
|
850
|
-
* @param tags - The tags to filter the logs by.
|
|
851
|
-
* @returns For each received tag, an array of matching logs is returned. An empty array implies no logs match
|
|
852
|
-
* that tag.
|
|
853
|
-
*/ getLogsByTags(tags) {
|
|
854
|
-
return this.store.getLogsByTags(tags);
|
|
1429
|
+
getPublicLogsByTagsFromContract(contractAddress, tags) {
|
|
1430
|
+
return this.store.getPublicLogsByTagsFromContract(contractAddress, tags);
|
|
855
1431
|
}
|
|
856
1432
|
/**
|
|
857
1433
|
* Gets public logs based on the provided filter.
|
|
@@ -869,15 +1445,10 @@ import { validateBlockAttestations } from './validation.js';
|
|
|
869
1445
|
}
|
|
870
1446
|
/**
|
|
871
1447
|
* Gets the number of the latest L2 block processed by the block source implementation.
|
|
1448
|
+
* This includes both checkpointed and uncheckpointed blocks.
|
|
872
1449
|
* @returns The number of the latest L2 block processed by the block source implementation.
|
|
873
1450
|
*/ getBlockNumber() {
|
|
874
|
-
return this.store.
|
|
875
|
-
}
|
|
876
|
-
getProvenBlockNumber() {
|
|
877
|
-
return this.store.getProvenL2BlockNumber();
|
|
878
|
-
}
|
|
879
|
-
/** Forcefully updates the last proven block number. Use for testing. */ setProvenBlockNumber(blockNumber) {
|
|
880
|
-
return this.store.setProvenL2BlockNumber(blockNumber);
|
|
1451
|
+
return this.store.getLatestBlockNumber();
|
|
881
1452
|
}
|
|
882
1453
|
getContractClass(id) {
|
|
883
1454
|
return this.store.getContractClass(id);
|
|
@@ -897,11 +1468,11 @@ import { validateBlockAttestations } from './validation.js';
|
|
|
897
1468
|
return this.store.getContractInstance(address, timestamp);
|
|
898
1469
|
}
|
|
899
1470
|
/**
|
|
900
|
-
* Gets L1 to L2 message (to be) included in a given
|
|
901
|
-
* @param
|
|
1471
|
+
* Gets L1 to L2 message (to be) included in a given checkpoint.
|
|
1472
|
+
* @param checkpointNumber - Checkpoint number to get messages for.
|
|
902
1473
|
* @returns The L1 to L2 messages/leaves of the messages subtree (throws if not found).
|
|
903
|
-
*/ getL1ToL2Messages(
|
|
904
|
-
return this.store.getL1ToL2Messages(
|
|
1474
|
+
*/ getL1ToL2Messages(checkpointNumber) {
|
|
1475
|
+
return this.store.getL1ToL2Messages(checkpointNumber);
|
|
905
1476
|
}
|
|
906
1477
|
/**
|
|
907
1478
|
* Gets the L1 to L2 message index in the L1 to L2 message tree.
|
|
@@ -919,11 +1490,13 @@ import { validateBlockAttestations } from './validation.js';
|
|
|
919
1490
|
getDebugFunctionName(address, selector) {
|
|
920
1491
|
return this.store.getDebugFunctionName(address, selector);
|
|
921
1492
|
}
|
|
922
|
-
getPendingChainValidationStatus() {
|
|
923
|
-
return
|
|
1493
|
+
async getPendingChainValidationStatus() {
|
|
1494
|
+
return await this.store.getPendingChainValidationStatus() ?? {
|
|
1495
|
+
valid: true
|
|
1496
|
+
};
|
|
924
1497
|
}
|
|
925
1498
|
isPendingChainInvalid() {
|
|
926
|
-
return
|
|
1499
|
+
return this.getPendingChainValidationStatus().then((status)=>!status.valid);
|
|
927
1500
|
}
|
|
928
1501
|
async getL2Tips() {
|
|
929
1502
|
const [latestBlockNumber, provenBlockNumber] = await Promise.all([
|
|
@@ -933,7 +1506,7 @@ import { validateBlockAttestations } from './validation.js';
|
|
|
933
1506
|
// TODO(#13569): Compute proper finalized block number based on L1 finalized block.
|
|
934
1507
|
// We just force it 2 epochs worth of proven data for now.
|
|
935
1508
|
// NOTE: update end-to-end/src/e2e_epochs/epochs_empty_blocks.test.ts as that uses finalized blocks in computations
|
|
936
|
-
const finalizedBlockNumber = Math.max(provenBlockNumber - this.l1constants.epochDuration * 2, 0);
|
|
1509
|
+
const finalizedBlockNumber = BlockNumber(Math.max(provenBlockNumber - this.l1constants.epochDuration * 2, 0));
|
|
937
1510
|
const [latestBlockHeader, provenBlockHeader, finalizedBlockHeader] = await Promise.all([
|
|
938
1511
|
latestBlockNumber > 0 ? this.getBlockHeader(latestBlockNumber) : undefined,
|
|
939
1512
|
provenBlockNumber > 0 ? this.getBlockHeader(provenBlockNumber) : undefined,
|
|
@@ -948,43 +1521,44 @@ import { validateBlockAttestations } from './validation.js';
|
|
|
948
1521
|
if (finalizedBlockNumber > 0 && !finalizedBlockHeader) {
|
|
949
1522
|
throw new Error(`Failed to retrieve finalized block header for block ${finalizedBlockNumber} (latest block is ${latestBlockNumber})`);
|
|
950
1523
|
}
|
|
951
|
-
const latestBlockHeaderHash = await latestBlockHeader?.hash();
|
|
952
|
-
const provenBlockHeaderHash = await provenBlockHeader?.hash();
|
|
953
|
-
const finalizedBlockHeaderHash = await finalizedBlockHeader?.hash();
|
|
1524
|
+
const latestBlockHeaderHash = await latestBlockHeader?.hash() ?? GENESIS_BLOCK_HEADER_HASH;
|
|
1525
|
+
const provenBlockHeaderHash = await provenBlockHeader?.hash() ?? GENESIS_BLOCK_HEADER_HASH;
|
|
1526
|
+
const finalizedBlockHeaderHash = await finalizedBlockHeader?.hash() ?? GENESIS_BLOCK_HEADER_HASH;
|
|
954
1527
|
return {
|
|
955
1528
|
latest: {
|
|
956
1529
|
number: latestBlockNumber,
|
|
957
|
-
hash: latestBlockHeaderHash
|
|
1530
|
+
hash: latestBlockHeaderHash.toString()
|
|
958
1531
|
},
|
|
959
1532
|
proven: {
|
|
960
1533
|
number: provenBlockNumber,
|
|
961
|
-
hash: provenBlockHeaderHash
|
|
1534
|
+
hash: provenBlockHeaderHash.toString()
|
|
962
1535
|
},
|
|
963
1536
|
finalized: {
|
|
964
1537
|
number: finalizedBlockNumber,
|
|
965
|
-
hash: finalizedBlockHeaderHash
|
|
1538
|
+
hash: finalizedBlockHeaderHash.toString()
|
|
966
1539
|
}
|
|
967
1540
|
};
|
|
968
1541
|
}
|
|
969
1542
|
async rollbackTo(targetL2BlockNumber) {
|
|
1543
|
+
// TODO(pw/mbps): This still assumes 1 block per checkpoint
|
|
970
1544
|
const currentBlocks = await this.getL2Tips();
|
|
971
1545
|
const currentL2Block = currentBlocks.latest.number;
|
|
972
1546
|
const currentProvenBlock = currentBlocks.proven.number;
|
|
973
|
-
// const currentFinalizedBlock = currentBlocks.finalized.number;
|
|
974
1547
|
if (targetL2BlockNumber >= currentL2Block) {
|
|
975
1548
|
throw new Error(`Target L2 block ${targetL2BlockNumber} must be less than current L2 block ${currentL2Block}`);
|
|
976
1549
|
}
|
|
977
1550
|
const blocksToUnwind = currentL2Block - targetL2BlockNumber;
|
|
978
|
-
const targetL2Block = await this.store.
|
|
1551
|
+
const targetL2Block = await this.store.getCheckpointedBlock(targetL2BlockNumber);
|
|
979
1552
|
if (!targetL2Block) {
|
|
980
1553
|
throw new Error(`Target L2 block ${targetL2BlockNumber} not found`);
|
|
981
1554
|
}
|
|
982
1555
|
const targetL1BlockNumber = targetL2Block.l1.blockNumber;
|
|
1556
|
+
const targetCheckpointNumber = CheckpointNumber.fromBlockNumber(targetL2BlockNumber);
|
|
983
1557
|
const targetL1BlockHash = await this.getL1BlockHash(targetL1BlockNumber);
|
|
984
|
-
this.log.info(`Unwinding ${blocksToUnwind}
|
|
985
|
-
await this.store.
|
|
986
|
-
this.log.info(`Unwinding L1 to L2 messages to ${
|
|
987
|
-
await this.store.
|
|
1558
|
+
this.log.info(`Unwinding ${blocksToUnwind} checkpoints from L2 block ${currentL2Block}`);
|
|
1559
|
+
await this.store.unwindCheckpoints(CheckpointNumber(currentL2Block), blocksToUnwind);
|
|
1560
|
+
this.log.info(`Unwinding L1 to L2 messages to checkpoint ${targetCheckpointNumber}`);
|
|
1561
|
+
await this.store.rollbackL1ToL2MessagesToCheckpoint(targetCheckpointNumber);
|
|
988
1562
|
this.log.info(`Setting L1 syncpoints to ${targetL1BlockNumber}`);
|
|
989
1563
|
await this.store.setBlockSynchedL1BlockNumber(targetL1BlockNumber);
|
|
990
1564
|
await this.store.setMessageSynchedL1Block({
|
|
@@ -993,7 +1567,7 @@ import { validateBlockAttestations } from './validation.js';
|
|
|
993
1567
|
});
|
|
994
1568
|
if (targetL2BlockNumber < currentProvenBlock) {
|
|
995
1569
|
this.log.info(`Clearing proven L2 block number`);
|
|
996
|
-
await this.store.
|
|
1570
|
+
await this.store.setProvenCheckpointNumber(CheckpointNumber.ZERO);
|
|
997
1571
|
}
|
|
998
1572
|
// TODO(palla/reorg): Set the finalized block when we add support for it.
|
|
999
1573
|
// if (targetL2BlockNumber < currentFinalizedBlock) {
|
|
@@ -1001,12 +1575,104 @@ import { validateBlockAttestations } from './validation.js';
|
|
|
1001
1575
|
// await this.store.setFinalizedL2BlockNumber(0);
|
|
1002
1576
|
// }
|
|
1003
1577
|
}
|
|
1578
|
+
async getPublishedCheckpoints(checkpointNumber, limit) {
|
|
1579
|
+
const checkpoints = await this.store.getRangeOfCheckpoints(checkpointNumber, limit);
|
|
1580
|
+
const blocks = (await Promise.all(checkpoints.map((ch)=>this.store.getBlocksForCheckpoint(ch.checkpointNumber)))).filter(isDefined);
|
|
1581
|
+
const fullCheckpoints = [];
|
|
1582
|
+
for(let i = 0; i < checkpoints.length; i++){
|
|
1583
|
+
const blocksForCheckpoint = blocks[i];
|
|
1584
|
+
const checkpoint = checkpoints[i];
|
|
1585
|
+
const fullCheckpoint = new Checkpoint(checkpoint.archive, checkpoint.header, blocksForCheckpoint, checkpoint.checkpointNumber);
|
|
1586
|
+
const publishedCheckpoint = new PublishedCheckpoint(fullCheckpoint, checkpoint.l1, checkpoint.attestations.map((x)=>CommitteeAttestation.fromBuffer(x)));
|
|
1587
|
+
fullCheckpoints.push(publishedCheckpoint);
|
|
1588
|
+
}
|
|
1589
|
+
return fullCheckpoints;
|
|
1590
|
+
}
|
|
1591
|
+
async getCheckpointsForEpoch(epochNumber) {
|
|
1592
|
+
const [start, end] = getSlotRangeForEpoch(epochNumber, this.l1constants);
|
|
1593
|
+
const checkpoints = [];
|
|
1594
|
+
// Walk the list of checkpoints backwards and filter by slots matching the requested epoch.
|
|
1595
|
+
// We'll typically ask for checkpoints for a very recent epoch, so we shouldn't need an index here.
|
|
1596
|
+
let checkpointData = await this.store.getCheckpointData(await this.store.getSynchedCheckpointNumber());
|
|
1597
|
+
const slot = (b)=>b.header.slotNumber;
|
|
1598
|
+
while(checkpointData && slot(checkpointData) >= start){
|
|
1599
|
+
if (slot(checkpointData) <= end) {
|
|
1600
|
+
// push the checkpoints on backwards
|
|
1601
|
+
const [checkpoint] = await this.getPublishedCheckpoints(checkpointData.checkpointNumber, 1);
|
|
1602
|
+
checkpoints.push(checkpoint.checkpoint);
|
|
1603
|
+
}
|
|
1604
|
+
checkpointData = await this.store.getCheckpointData(CheckpointNumber(checkpointData.checkpointNumber - 1));
|
|
1605
|
+
}
|
|
1606
|
+
return checkpoints.reverse();
|
|
1607
|
+
}
|
|
1608
|
+
/* Legacy APIs */ async getPublishedBlockByHash(blockHash) {
|
|
1609
|
+
const checkpointedBlock = await this.store.getCheckpointedBlockByHash(blockHash);
|
|
1610
|
+
return this.buildOldBlockFromCheckpointedBlock(checkpointedBlock);
|
|
1611
|
+
}
|
|
1612
|
+
async getPublishedBlockByArchive(archive) {
|
|
1613
|
+
const checkpointedBlock = await this.store.getCheckpointedBlockByArchive(archive);
|
|
1614
|
+
return this.buildOldBlockFromCheckpointedBlock(checkpointedBlock);
|
|
1615
|
+
}
|
|
1616
|
+
/**
|
|
1617
|
+
* Gets up to `limit` amount of L2 blocks starting from `from`.
|
|
1618
|
+
* @param from - Number of the first block to return (inclusive).
|
|
1619
|
+
* @param limit - The number of blocks to return.
|
|
1620
|
+
* @param proven - If true, only return blocks that have been proven.
|
|
1621
|
+
* @returns The requested L2 blocks.
|
|
1622
|
+
*/ async getBlocks(from, limit, proven) {
|
|
1623
|
+
const publishedBlocks = await this.getPublishedBlocks(from, limit, proven);
|
|
1624
|
+
return publishedBlocks.map((x)=>x.block);
|
|
1625
|
+
}
|
|
1626
|
+
async getPublishedBlocks(from, limit, proven) {
|
|
1627
|
+
const checkpoints = await this.store.getRangeOfCheckpoints(CheckpointNumber(from), limit);
|
|
1628
|
+
const provenCheckpointNumber = await this.getProvenCheckpointNumber();
|
|
1629
|
+
const blocks = (await Promise.all(checkpoints.map((ch)=>this.store.getBlocksForCheckpoint(ch.checkpointNumber)))).filter(isDefined);
|
|
1630
|
+
const olbBlocks = [];
|
|
1631
|
+
for(let i = 0; i < checkpoints.length; i++){
|
|
1632
|
+
const blockForCheckpoint = blocks[i][0];
|
|
1633
|
+
const checkpoint = checkpoints[i];
|
|
1634
|
+
if (checkpoint.checkpointNumber > provenCheckpointNumber && proven === true) {
|
|
1635
|
+
continue;
|
|
1636
|
+
}
|
|
1637
|
+
const oldCheckpoint = new Checkpoint(blockForCheckpoint.archive, checkpoint.header, [
|
|
1638
|
+
blockForCheckpoint
|
|
1639
|
+
], checkpoint.checkpointNumber);
|
|
1640
|
+
const oldBlock = L2Block.fromCheckpoint(oldCheckpoint);
|
|
1641
|
+
const publishedBlock = new PublishedL2Block(oldBlock, checkpoint.l1, checkpoint.attestations.map((x)=>CommitteeAttestation.fromBuffer(x)));
|
|
1642
|
+
olbBlocks.push(publishedBlock);
|
|
1643
|
+
}
|
|
1644
|
+
return olbBlocks;
|
|
1645
|
+
}
|
|
1646
|
+
async buildOldBlockFromCheckpointedBlock(checkpointedBlock) {
|
|
1647
|
+
if (!checkpointedBlock) {
|
|
1648
|
+
return undefined;
|
|
1649
|
+
}
|
|
1650
|
+
const checkpoint = await this.store.getCheckpointData(checkpointedBlock.checkpointNumber);
|
|
1651
|
+
if (!checkpoint) {
|
|
1652
|
+
return checkpoint;
|
|
1653
|
+
}
|
|
1654
|
+
const fullCheckpoint = new Checkpoint(checkpointedBlock?.block.archive, checkpoint?.header, [
|
|
1655
|
+
checkpointedBlock.block
|
|
1656
|
+
], checkpoint.checkpointNumber);
|
|
1657
|
+
const oldBlock = L2Block.fromCheckpoint(fullCheckpoint);
|
|
1658
|
+
const published = new PublishedL2Block(oldBlock, checkpoint.l1, checkpoint.attestations.map((x)=>CommitteeAttestation.fromBuffer(x)));
|
|
1659
|
+
return published;
|
|
1660
|
+
}
|
|
1661
|
+
async getBlock(number) {
|
|
1662
|
+
// If the number provided is -ve, then return the latest block.
|
|
1663
|
+
if (number < 0) {
|
|
1664
|
+
number = await this.store.getSynchedL2BlockNumber();
|
|
1665
|
+
}
|
|
1666
|
+
if (number === 0) {
|
|
1667
|
+
return undefined;
|
|
1668
|
+
}
|
|
1669
|
+
const publishedBlocks = await this.getPublishedBlocks(number, 1);
|
|
1670
|
+
if (publishedBlocks.length === 0) {
|
|
1671
|
+
return undefined;
|
|
1672
|
+
}
|
|
1673
|
+
return publishedBlocks[0].block;
|
|
1674
|
+
}
|
|
1004
1675
|
}
|
|
1005
|
-
_ts_decorate([
|
|
1006
|
-
trackSpan('Archiver.sync', (initialRun)=>({
|
|
1007
|
-
[Attributes.INITIAL_SYNC]: initialRun
|
|
1008
|
-
}))
|
|
1009
|
-
], Archiver.prototype, "sync", null);
|
|
1010
1676
|
var Operation = /*#__PURE__*/ function(Operation) {
|
|
1011
1677
|
Operation[Operation["Store"] = 0] = "Store";
|
|
1012
1678
|
Operation[Operation["Delete"] = 1] = "Delete";
|
|
@@ -1124,67 +1790,139 @@ var Operation = /*#__PURE__*/ function(Operation) {
|
|
|
1124
1790
|
}
|
|
1125
1791
|
return true;
|
|
1126
1792
|
}
|
|
1127
|
-
|
|
1793
|
+
async addBlockDataToDB(block) {
|
|
1794
|
+
const contractClassLogs = block.body.txEffects.flatMap((txEffect)=>txEffect.contractClassLogs);
|
|
1795
|
+
// ContractInstancePublished event logs are broadcast in privateLogs.
|
|
1796
|
+
const privateLogs = block.body.txEffects.flatMap((txEffect)=>txEffect.privateLogs);
|
|
1797
|
+
const publicLogs = block.body.txEffects.flatMap((txEffect)=>txEffect.publicLogs);
|
|
1798
|
+
return (await Promise.all([
|
|
1799
|
+
this.#updatePublishedContractClasses(contractClassLogs, block.number, 0),
|
|
1800
|
+
this.#updateDeployedContractInstances(privateLogs, block.number, 0),
|
|
1801
|
+
this.#updateUpdatedContractInstances(publicLogs, block.header.globalVariables.timestamp, 0),
|
|
1802
|
+
this.#storeBroadcastedIndividualFunctions(contractClassLogs, block.number)
|
|
1803
|
+
])).every(Boolean);
|
|
1804
|
+
}
|
|
1805
|
+
addBlocks(blocks, pendingChainValidationStatus) {
|
|
1128
1806
|
// Add the blocks to the store. Store will throw if the blocks are not in order, there are gaps,
|
|
1129
1807
|
// or if the previous block is not in the store.
|
|
1130
1808
|
return this.store.transactionAsync(async ()=>{
|
|
1131
1809
|
await this.store.addBlocks(blocks);
|
|
1132
1810
|
const opResults = await Promise.all([
|
|
1133
|
-
|
|
1811
|
+
// Update the pending chain validation status if provided
|
|
1812
|
+
pendingChainValidationStatus && this.store.setPendingChainValidationStatus(pendingChainValidationStatus),
|
|
1813
|
+
// Add any logs emitted during the retrieved blocks
|
|
1814
|
+
this.store.addLogs(blocks),
|
|
1134
1815
|
// Unroll all logs emitted during the retrieved blocks and extract any contract classes and instances from them
|
|
1135
|
-
...blocks.map(
|
|
1136
|
-
|
|
1137
|
-
// ContractInstancePublished event logs are broadcast in privateLogs.
|
|
1138
|
-
const privateLogs = block.block.body.txEffects.flatMap((txEffect)=>txEffect.privateLogs);
|
|
1139
|
-
const publicLogs = block.block.body.txEffects.flatMap((txEffect)=>txEffect.publicLogs);
|
|
1140
|
-
return (await Promise.all([
|
|
1141
|
-
this.#updatePublishedContractClasses(contractClassLogs, block.block.number, 0),
|
|
1142
|
-
this.#updateDeployedContractInstances(privateLogs, block.block.number, 0),
|
|
1143
|
-
this.#updateUpdatedContractInstances(publicLogs, block.block.header.globalVariables.timestamp, 0),
|
|
1144
|
-
this.#storeBroadcastedIndividualFunctions(contractClassLogs, block.block.number)
|
|
1145
|
-
])).every(Boolean);
|
|
1816
|
+
...blocks.map((block)=>{
|
|
1817
|
+
return this.addBlockDataToDB(block);
|
|
1146
1818
|
})
|
|
1147
1819
|
]);
|
|
1148
1820
|
return opResults.every(Boolean);
|
|
1149
1821
|
});
|
|
1150
1822
|
}
|
|
1151
|
-
|
|
1152
|
-
|
|
1823
|
+
addCheckpoints(checkpoints, pendingChainValidationStatus) {
|
|
1824
|
+
// Add the blocks to the store. Store will throw if the blocks are not in order, there are gaps,
|
|
1825
|
+
// or if the previous block is not in the store.
|
|
1826
|
+
return this.store.transactionAsync(async ()=>{
|
|
1827
|
+
await this.store.addCheckpoints(checkpoints);
|
|
1828
|
+
const allBlocks = checkpoints.flatMap((ch)=>ch.checkpoint.blocks);
|
|
1829
|
+
const opResults = await Promise.all([
|
|
1830
|
+
// Update the pending chain validation status if provided
|
|
1831
|
+
pendingChainValidationStatus && this.store.setPendingChainValidationStatus(pendingChainValidationStatus),
|
|
1832
|
+
// Add any logs emitted during the retrieved blocks
|
|
1833
|
+
this.store.addLogs(allBlocks),
|
|
1834
|
+
// Unroll all logs emitted during the retrieved blocks and extract any contract classes and instances from them
|
|
1835
|
+
...allBlocks.map((block)=>{
|
|
1836
|
+
return this.addBlockDataToDB(block);
|
|
1837
|
+
})
|
|
1838
|
+
]);
|
|
1839
|
+
return opResults.every(Boolean);
|
|
1840
|
+
});
|
|
1841
|
+
}
|
|
1842
|
+
async unwindCheckpoints(from, checkpointsToUnwind) {
|
|
1843
|
+
if (checkpointsToUnwind <= 0) {
|
|
1844
|
+
throw new Error(`Cannot unwind ${checkpointsToUnwind} blocks`);
|
|
1845
|
+
}
|
|
1846
|
+
const last = await this.getSynchedCheckpointNumber();
|
|
1153
1847
|
if (from != last) {
|
|
1154
|
-
throw new Error(`Cannot unwind
|
|
1848
|
+
throw new Error(`Cannot unwind checkpoints from checkpoint ${from} when the last checkpoint is ${last}`);
|
|
1155
1849
|
}
|
|
1156
|
-
|
|
1157
|
-
|
|
1850
|
+
const blocks = [];
|
|
1851
|
+
const lastCheckpointNumber = from + checkpointsToUnwind - 1;
|
|
1852
|
+
for(let checkpointNumber = from; checkpointNumber <= lastCheckpointNumber; checkpointNumber++){
|
|
1853
|
+
const blocksForCheckpoint = await this.store.getBlocksForCheckpoint(checkpointNumber);
|
|
1854
|
+
if (!blocksForCheckpoint) {
|
|
1855
|
+
continue;
|
|
1856
|
+
}
|
|
1857
|
+
blocks.push(...blocksForCheckpoint);
|
|
1158
1858
|
}
|
|
1159
|
-
// from - blocksToUnwind = the new head, so + 1 for what we need to remove
|
|
1160
|
-
const blocks = await this.getPublishedBlocks(from - blocksToUnwind + 1, blocksToUnwind);
|
|
1161
1859
|
const opResults = await Promise.all([
|
|
1860
|
+
// Prune rolls back to the last proven block, which is by definition valid
|
|
1861
|
+
this.store.setPendingChainValidationStatus({
|
|
1862
|
+
valid: true
|
|
1863
|
+
}),
|
|
1162
1864
|
// Unroll all logs emitted during the retrieved blocks and extract any contract classes and instances from them
|
|
1163
1865
|
...blocks.map(async (block)=>{
|
|
1164
|
-
const contractClassLogs = block.
|
|
1866
|
+
const contractClassLogs = block.body.txEffects.flatMap((txEffect)=>txEffect.contractClassLogs);
|
|
1165
1867
|
// ContractInstancePublished event logs are broadcast in privateLogs.
|
|
1166
|
-
const privateLogs = block.
|
|
1167
|
-
const publicLogs = block.
|
|
1868
|
+
const privateLogs = block.body.txEffects.flatMap((txEffect)=>txEffect.privateLogs);
|
|
1869
|
+
const publicLogs = block.body.txEffects.flatMap((txEffect)=>txEffect.publicLogs);
|
|
1168
1870
|
return (await Promise.all([
|
|
1169
|
-
this.#updatePublishedContractClasses(contractClassLogs, block.
|
|
1170
|
-
this.#updateDeployedContractInstances(privateLogs, block.
|
|
1171
|
-
this.#updateUpdatedContractInstances(publicLogs, block.
|
|
1871
|
+
this.#updatePublishedContractClasses(contractClassLogs, block.number, 1),
|
|
1872
|
+
this.#updateDeployedContractInstances(privateLogs, block.number, 1),
|
|
1873
|
+
this.#updateUpdatedContractInstances(publicLogs, block.header.globalVariables.timestamp, 1)
|
|
1172
1874
|
])).every(Boolean);
|
|
1173
1875
|
}),
|
|
1174
|
-
this.store.deleteLogs(blocks
|
|
1175
|
-
this.store.
|
|
1876
|
+
this.store.deleteLogs(blocks),
|
|
1877
|
+
this.store.unwindCheckpoints(from, checkpointsToUnwind)
|
|
1176
1878
|
]);
|
|
1177
1879
|
return opResults.every(Boolean);
|
|
1178
1880
|
}
|
|
1179
|
-
|
|
1180
|
-
return this.store.
|
|
1881
|
+
getCheckpointData(checkpointNumber) {
|
|
1882
|
+
return this.store.getCheckpointData(checkpointNumber);
|
|
1883
|
+
}
|
|
1884
|
+
getRangeOfCheckpoints(from, limit) {
|
|
1885
|
+
return this.store.getRangeOfCheckpoints(from, limit);
|
|
1886
|
+
}
|
|
1887
|
+
getCheckpointedL2BlockNumber() {
|
|
1888
|
+
return this.store.getCheckpointedL2BlockNumber();
|
|
1181
1889
|
}
|
|
1182
|
-
|
|
1183
|
-
return this.store.
|
|
1890
|
+
getSynchedCheckpointNumber() {
|
|
1891
|
+
return this.store.getSynchedCheckpointNumber();
|
|
1892
|
+
}
|
|
1893
|
+
setCheckpointSynchedL1BlockNumber(l1BlockNumber) {
|
|
1894
|
+
return this.store.setCheckpointSynchedL1BlockNumber(l1BlockNumber);
|
|
1895
|
+
}
|
|
1896
|
+
getCheckpointedBlock(number) {
|
|
1897
|
+
return this.store.getCheckpointedBlock(number);
|
|
1898
|
+
}
|
|
1899
|
+
getCheckpointedBlockByHash(blockHash) {
|
|
1900
|
+
return this.store.getCheckpointedBlockByHash(blockHash);
|
|
1901
|
+
}
|
|
1902
|
+
getCheckpointedBlockByArchive(archive) {
|
|
1903
|
+
return this.store.getCheckpointedBlockByArchive(archive);
|
|
1184
1904
|
}
|
|
1185
1905
|
getBlockHeaders(from, limit) {
|
|
1186
1906
|
return this.store.getBlockHeaders(from, limit);
|
|
1187
1907
|
}
|
|
1908
|
+
getBlockHeaderByHash(blockHash) {
|
|
1909
|
+
return this.store.getBlockHeaderByHash(blockHash);
|
|
1910
|
+
}
|
|
1911
|
+
getBlockHeaderByArchive(archive) {
|
|
1912
|
+
return this.store.getBlockHeaderByArchive(archive);
|
|
1913
|
+
}
|
|
1914
|
+
getBlockByHash(blockHash) {
|
|
1915
|
+
return this.store.getBlockByHash(blockHash);
|
|
1916
|
+
}
|
|
1917
|
+
getBlockByArchive(archive) {
|
|
1918
|
+
return this.store.getBlockByArchive(archive);
|
|
1919
|
+
}
|
|
1920
|
+
getLatestBlockNumber() {
|
|
1921
|
+
return this.store.getLatestBlockNumber();
|
|
1922
|
+
}
|
|
1923
|
+
getBlocksForCheckpoint(checkpointNumber) {
|
|
1924
|
+
return this.store.getBlocksForCheckpoint(checkpointNumber);
|
|
1925
|
+
}
|
|
1188
1926
|
getTxEffect(txHash) {
|
|
1189
1927
|
return this.store.getTxEffect(txHash);
|
|
1190
1928
|
}
|
|
@@ -1194,17 +1932,17 @@ var Operation = /*#__PURE__*/ function(Operation) {
|
|
|
1194
1932
|
addL1ToL2Messages(messages) {
|
|
1195
1933
|
return this.store.addL1ToL2Messages(messages);
|
|
1196
1934
|
}
|
|
1197
|
-
getL1ToL2Messages(
|
|
1198
|
-
return this.store.getL1ToL2Messages(
|
|
1935
|
+
getL1ToL2Messages(checkpointNumber) {
|
|
1936
|
+
return this.store.getL1ToL2Messages(checkpointNumber);
|
|
1199
1937
|
}
|
|
1200
1938
|
getL1ToL2MessageIndex(l1ToL2Message) {
|
|
1201
1939
|
return this.store.getL1ToL2MessageIndex(l1ToL2Message);
|
|
1202
1940
|
}
|
|
1203
|
-
|
|
1204
|
-
return this.store.
|
|
1941
|
+
getPrivateLogsByTags(tags) {
|
|
1942
|
+
return this.store.getPrivateLogsByTags(tags);
|
|
1205
1943
|
}
|
|
1206
|
-
|
|
1207
|
-
return this.store.
|
|
1944
|
+
getPublicLogsByTagsFromContract(contractAddress, tags) {
|
|
1945
|
+
return this.store.getPublicLogsByTagsFromContract(contractAddress, tags);
|
|
1208
1946
|
}
|
|
1209
1947
|
getPublicLogs(filter) {
|
|
1210
1948
|
return this.store.getPublicLogs(filter);
|
|
@@ -1213,16 +1951,19 @@ var Operation = /*#__PURE__*/ function(Operation) {
|
|
|
1213
1951
|
return this.store.getContractClassLogs(filter);
|
|
1214
1952
|
}
|
|
1215
1953
|
getSynchedL2BlockNumber() {
|
|
1216
|
-
return this.store.
|
|
1954
|
+
return this.store.getCheckpointedL2BlockNumber();
|
|
1217
1955
|
}
|
|
1218
|
-
|
|
1219
|
-
return this.store.
|
|
1956
|
+
getProvenCheckpointNumber() {
|
|
1957
|
+
return this.store.getProvenCheckpointNumber();
|
|
1958
|
+
}
|
|
1959
|
+
getProvenBlockNumber() {
|
|
1960
|
+
return this.store.getProvenBlockNumber();
|
|
1220
1961
|
}
|
|
1221
|
-
|
|
1222
|
-
return this.store.
|
|
1962
|
+
setProvenCheckpointNumber(checkpointNumber) {
|
|
1963
|
+
return this.store.setProvenCheckpointNumber(checkpointNumber);
|
|
1223
1964
|
}
|
|
1224
1965
|
setBlockSynchedL1BlockNumber(l1BlockNumber) {
|
|
1225
|
-
return this.store.
|
|
1966
|
+
return this.store.setCheckpointSynchedL1BlockNumber(l1BlockNumber);
|
|
1226
1967
|
}
|
|
1227
1968
|
setMessageSynchedL1Block(l1Block) {
|
|
1228
1969
|
return this.store.setMessageSynchedL1Block(l1Block);
|
|
@@ -1254,8 +1995,8 @@ var Operation = /*#__PURE__*/ function(Operation) {
|
|
|
1254
1995
|
estimateSize() {
|
|
1255
1996
|
return this.store.estimateSize();
|
|
1256
1997
|
}
|
|
1257
|
-
|
|
1258
|
-
return this.store.
|
|
1998
|
+
rollbackL1ToL2MessagesToCheckpoint(targetCheckpointNumber) {
|
|
1999
|
+
return this.store.rollbackL1ToL2MessagesToCheckpoint(targetCheckpointNumber);
|
|
1259
2000
|
}
|
|
1260
2001
|
iterateL1ToL2Messages(range = {}) {
|
|
1261
2002
|
return this.store.iterateL1ToL2Messages(range);
|
|
@@ -1266,4 +2007,11 @@ var Operation = /*#__PURE__*/ function(Operation) {
|
|
|
1266
2007
|
getLastL1ToL2Message() {
|
|
1267
2008
|
return this.store.getLastL1ToL2Message();
|
|
1268
2009
|
}
|
|
2010
|
+
getPendingChainValidationStatus() {
|
|
2011
|
+
return this.store.getPendingChainValidationStatus();
|
|
2012
|
+
}
|
|
2013
|
+
setPendingChainValidationStatus(status) {
|
|
2014
|
+
this.#log.debug(`Setting pending chain validation status to valid ${status?.valid}`, status);
|
|
2015
|
+
return this.store.setPendingChainValidationStatus(status);
|
|
2016
|
+
}
|
|
1269
2017
|
}
|