@aztec/archiver 4.0.0-nightly.20260113 → 4.0.0-nightly.20260115

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (174) hide show
  1. package/README.md +139 -22
  2. package/dest/archiver.d.ts +134 -0
  3. package/dest/archiver.d.ts.map +1 -0
  4. package/dest/archiver.js +767 -0
  5. package/dest/{archiver/config.d.ts → config.d.ts} +9 -1
  6. package/dest/config.d.ts.map +1 -0
  7. package/dest/{archiver/config.js → config.js} +9 -0
  8. package/dest/{archiver/errors.d.ts → errors.d.ts} +1 -1
  9. package/dest/errors.d.ts.map +1 -0
  10. package/dest/factory.d.ts +5 -6
  11. package/dest/factory.d.ts.map +1 -1
  12. package/dest/factory.js +82 -5
  13. package/dest/index.d.ts +10 -4
  14. package/dest/index.d.ts.map +1 -1
  15. package/dest/index.js +8 -3
  16. package/dest/interfaces.d.ts +9 -0
  17. package/dest/interfaces.d.ts.map +1 -0
  18. package/dest/interfaces.js +3 -0
  19. package/dest/{archiver/l1 → l1}/bin/retrieve-calldata.d.ts +1 -1
  20. package/dest/l1/bin/retrieve-calldata.d.ts.map +1 -0
  21. package/dest/{archiver/l1 → l1}/calldata_retriever.d.ts +2 -2
  22. package/dest/l1/calldata_retriever.d.ts.map +1 -0
  23. package/dest/l1/data_retrieval.d.ts +88 -0
  24. package/dest/l1/data_retrieval.d.ts.map +1 -0
  25. package/dest/{archiver/l1 → l1}/data_retrieval.js +32 -51
  26. package/dest/{archiver/l1 → l1}/debug_tx.d.ts +1 -1
  27. package/dest/l1/debug_tx.d.ts.map +1 -0
  28. package/dest/{archiver/l1 → l1}/spire_proposer.d.ts +1 -1
  29. package/dest/l1/spire_proposer.d.ts.map +1 -0
  30. package/dest/{archiver/l1 → l1}/trace_tx.d.ts +1 -1
  31. package/dest/l1/trace_tx.d.ts.map +1 -0
  32. package/dest/l1/types.d.ts +12 -0
  33. package/dest/l1/types.d.ts.map +1 -0
  34. package/dest/{archiver/l1 → l1}/validate_trace.d.ts +1 -1
  35. package/dest/l1/validate_trace.d.ts.map +1 -0
  36. package/dest/modules/data_source_base.d.ts +83 -0
  37. package/dest/modules/data_source_base.d.ts.map +1 -0
  38. package/dest/modules/data_source_base.js +301 -0
  39. package/dest/modules/data_store_updater.d.ts +46 -0
  40. package/dest/modules/data_store_updater.d.ts.map +1 -0
  41. package/dest/modules/data_store_updater.js +216 -0
  42. package/dest/modules/instrumentation.d.ts +37 -0
  43. package/dest/modules/instrumentation.d.ts.map +1 -0
  44. package/dest/modules/l1_synchronizer.d.ts +67 -0
  45. package/dest/modules/l1_synchronizer.d.ts.map +1 -0
  46. package/dest/modules/l1_synchronizer.js +1064 -0
  47. package/dest/{archiver → modules}/validation.d.ts +1 -1
  48. package/dest/modules/validation.d.ts.map +1 -0
  49. package/dest/{archiver/kv_archiver_store → store}/block_store.d.ts +2 -2
  50. package/dest/store/block_store.d.ts.map +1 -0
  51. package/dest/{archiver/kv_archiver_store → store}/block_store.js +1 -1
  52. package/dest/store/contract_class_store.d.ts +18 -0
  53. package/dest/store/contract_class_store.d.ts.map +1 -0
  54. package/dest/{archiver/kv_archiver_store → store}/contract_class_store.js +1 -1
  55. package/dest/store/contract_instance_store.d.ts +24 -0
  56. package/dest/store/contract_instance_store.d.ts.map +1 -0
  57. package/dest/{archiver/kv_archiver_store → store}/contract_instance_store.js +1 -1
  58. package/dest/{archiver/archiver_store.d.ts → store/kv_archiver_store.d.ts} +143 -139
  59. package/dest/store/kv_archiver_store.d.ts.map +1 -0
  60. package/dest/{archiver/kv_archiver_store → store}/kv_archiver_store.js +157 -49
  61. package/dest/{archiver/kv_archiver_store → store}/log_store.d.ts +1 -1
  62. package/dest/store/log_store.d.ts.map +1 -0
  63. package/dest/{archiver/kv_archiver_store → store}/message_store.d.ts +1 -1
  64. package/dest/store/message_store.d.ts.map +1 -0
  65. package/dest/{archiver/structs → structs}/data_retrieval.d.ts +1 -1
  66. package/dest/structs/data_retrieval.d.ts.map +1 -0
  67. package/dest/structs/inbox_message.d.ts +15 -0
  68. package/dest/structs/inbox_message.d.ts.map +1 -0
  69. package/dest/{archiver/structs → structs}/published.d.ts +1 -1
  70. package/dest/structs/published.d.ts.map +1 -0
  71. package/dest/test/fake_l1_state.d.ts +173 -0
  72. package/dest/test/fake_l1_state.d.ts.map +1 -0
  73. package/dest/test/fake_l1_state.js +364 -0
  74. package/dest/test/index.d.ts +2 -1
  75. package/dest/test/index.d.ts.map +1 -1
  76. package/dest/test/index.js +1 -0
  77. package/dest/test/mock_structs.d.ts +76 -2
  78. package/dest/test/mock_structs.d.ts.map +1 -1
  79. package/dest/test/mock_structs.js +133 -2
  80. package/package.json +15 -17
  81. package/src/archiver.ts +522 -0
  82. package/src/{archiver/config.ts → config.ts} +11 -0
  83. package/src/factory.ts +118 -6
  84. package/src/index.ts +10 -3
  85. package/src/interfaces.ts +9 -0
  86. package/src/{archiver/l1 → l1}/calldata_retriever.ts +1 -1
  87. package/src/{archiver/l1 → l1}/data_retrieval.ts +52 -69
  88. package/src/modules/data_source_base.ts +439 -0
  89. package/src/modules/data_store_updater.ts +318 -0
  90. package/src/modules/l1_synchronizer.ts +870 -0
  91. package/src/{archiver/kv_archiver_store → store}/block_store.ts +1 -1
  92. package/src/{archiver/kv_archiver_store → store}/contract_class_store.ts +1 -1
  93. package/src/{archiver/kv_archiver_store → store}/contract_instance_store.ts +1 -1
  94. package/src/{archiver/kv_archiver_store → store}/kv_archiver_store.ts +170 -8
  95. package/src/test/fake_l1_state.ts +561 -0
  96. package/src/test/index.ts +1 -0
  97. package/src/test/mock_structs.ts +247 -2
  98. package/dest/archiver/archiver.d.ts +0 -307
  99. package/dest/archiver/archiver.d.ts.map +0 -1
  100. package/dest/archiver/archiver.js +0 -2102
  101. package/dest/archiver/archiver_store.d.ts.map +0 -1
  102. package/dest/archiver/archiver_store.js +0 -4
  103. package/dest/archiver/archiver_store_test_suite.d.ts +0 -8
  104. package/dest/archiver/archiver_store_test_suite.d.ts.map +0 -1
  105. package/dest/archiver/archiver_store_test_suite.js +0 -2770
  106. package/dest/archiver/config.d.ts.map +0 -1
  107. package/dest/archiver/errors.d.ts.map +0 -1
  108. package/dest/archiver/index.d.ts +0 -7
  109. package/dest/archiver/index.d.ts.map +0 -1
  110. package/dest/archiver/index.js +0 -4
  111. package/dest/archiver/instrumentation.d.ts +0 -37
  112. package/dest/archiver/instrumentation.d.ts.map +0 -1
  113. package/dest/archiver/kv_archiver_store/block_store.d.ts.map +0 -1
  114. package/dest/archiver/kv_archiver_store/contract_class_store.d.ts +0 -18
  115. package/dest/archiver/kv_archiver_store/contract_class_store.d.ts.map +0 -1
  116. package/dest/archiver/kv_archiver_store/contract_instance_store.d.ts +0 -24
  117. package/dest/archiver/kv_archiver_store/contract_instance_store.d.ts.map +0 -1
  118. package/dest/archiver/kv_archiver_store/kv_archiver_store.d.ts +0 -159
  119. package/dest/archiver/kv_archiver_store/kv_archiver_store.d.ts.map +0 -1
  120. package/dest/archiver/kv_archiver_store/log_store.d.ts.map +0 -1
  121. package/dest/archiver/kv_archiver_store/message_store.d.ts.map +0 -1
  122. package/dest/archiver/l1/bin/retrieve-calldata.d.ts.map +0 -1
  123. package/dest/archiver/l1/calldata_retriever.d.ts.map +0 -1
  124. package/dest/archiver/l1/data_retrieval.d.ts +0 -90
  125. package/dest/archiver/l1/data_retrieval.d.ts.map +0 -1
  126. package/dest/archiver/l1/debug_tx.d.ts.map +0 -1
  127. package/dest/archiver/l1/spire_proposer.d.ts.map +0 -1
  128. package/dest/archiver/l1/trace_tx.d.ts.map +0 -1
  129. package/dest/archiver/l1/types.d.ts +0 -12
  130. package/dest/archiver/l1/types.d.ts.map +0 -1
  131. package/dest/archiver/l1/validate_trace.d.ts.map +0 -1
  132. package/dest/archiver/structs/data_retrieval.d.ts.map +0 -1
  133. package/dest/archiver/structs/inbox_message.d.ts +0 -15
  134. package/dest/archiver/structs/inbox_message.d.ts.map +0 -1
  135. package/dest/archiver/structs/published.d.ts.map +0 -1
  136. package/dest/archiver/validation.d.ts.map +0 -1
  137. package/dest/rpc/index.d.ts +0 -9
  138. package/dest/rpc/index.d.ts.map +0 -1
  139. package/dest/rpc/index.js +0 -15
  140. package/src/archiver/archiver.ts +0 -2265
  141. package/src/archiver/archiver_store.ts +0 -380
  142. package/src/archiver/archiver_store_test_suite.ts +0 -2842
  143. package/src/archiver/index.ts +0 -6
  144. package/src/rpc/index.ts +0 -16
  145. /package/dest/{archiver/errors.js → errors.js} +0 -0
  146. /package/dest/{archiver/l1 → l1}/bin/retrieve-calldata.js +0 -0
  147. /package/dest/{archiver/l1 → l1}/calldata_retriever.js +0 -0
  148. /package/dest/{archiver/l1 → l1}/debug_tx.js +0 -0
  149. /package/dest/{archiver/l1 → l1}/spire_proposer.js +0 -0
  150. /package/dest/{archiver/l1 → l1}/trace_tx.js +0 -0
  151. /package/dest/{archiver/l1 → l1}/types.js +0 -0
  152. /package/dest/{archiver/l1 → l1}/validate_trace.js +0 -0
  153. /package/dest/{archiver → modules}/instrumentation.js +0 -0
  154. /package/dest/{archiver → modules}/validation.js +0 -0
  155. /package/dest/{archiver/kv_archiver_store → store}/log_store.js +0 -0
  156. /package/dest/{archiver/kv_archiver_store → store}/message_store.js +0 -0
  157. /package/dest/{archiver/structs → structs}/data_retrieval.js +0 -0
  158. /package/dest/{archiver/structs → structs}/inbox_message.js +0 -0
  159. /package/dest/{archiver/structs → structs}/published.js +0 -0
  160. /package/src/{archiver/errors.ts → errors.ts} +0 -0
  161. /package/src/{archiver/l1 → l1}/README.md +0 -0
  162. /package/src/{archiver/l1 → l1}/bin/retrieve-calldata.ts +0 -0
  163. /package/src/{archiver/l1 → l1}/debug_tx.ts +0 -0
  164. /package/src/{archiver/l1 → l1}/spire_proposer.ts +0 -0
  165. /package/src/{archiver/l1 → l1}/trace_tx.ts +0 -0
  166. /package/src/{archiver/l1 → l1}/types.ts +0 -0
  167. /package/src/{archiver/l1 → l1}/validate_trace.ts +0 -0
  168. /package/src/{archiver → modules}/instrumentation.ts +0 -0
  169. /package/src/{archiver → modules}/validation.ts +0 -0
  170. /package/src/{archiver/kv_archiver_store → store}/log_store.ts +0 -0
  171. /package/src/{archiver/kv_archiver_store → store}/message_store.ts +0 -0
  172. /package/src/{archiver/structs → structs}/data_retrieval.ts +0 -0
  173. /package/src/{archiver/structs → structs}/inbox_message.ts +0 -0
  174. /package/src/{archiver/structs → structs}/published.ts +0 -0
@@ -1,2102 +0,0 @@
1
- function applyDecs2203RFactory() {
2
- function createAddInitializerMethod(initializers, decoratorFinishedRef) {
3
- return function addInitializer(initializer) {
4
- assertNotFinished(decoratorFinishedRef, "addInitializer");
5
- assertCallable(initializer, "An initializer");
6
- initializers.push(initializer);
7
- };
8
- }
9
- function memberDec(dec, name, desc, initializers, kind, isStatic, isPrivate, metadata, value) {
10
- var kindStr;
11
- switch(kind){
12
- case 1:
13
- kindStr = "accessor";
14
- break;
15
- case 2:
16
- kindStr = "method";
17
- break;
18
- case 3:
19
- kindStr = "getter";
20
- break;
21
- case 4:
22
- kindStr = "setter";
23
- break;
24
- default:
25
- kindStr = "field";
26
- }
27
- var ctx = {
28
- kind: kindStr,
29
- name: isPrivate ? "#" + name : name,
30
- static: isStatic,
31
- private: isPrivate,
32
- metadata: metadata
33
- };
34
- var decoratorFinishedRef = {
35
- v: false
36
- };
37
- ctx.addInitializer = createAddInitializerMethod(initializers, decoratorFinishedRef);
38
- var get, set;
39
- if (kind === 0) {
40
- if (isPrivate) {
41
- get = desc.get;
42
- set = desc.set;
43
- } else {
44
- get = function() {
45
- return this[name];
46
- };
47
- set = function(v) {
48
- this[name] = v;
49
- };
50
- }
51
- } else if (kind === 2) {
52
- get = function() {
53
- return desc.value;
54
- };
55
- } else {
56
- if (kind === 1 || kind === 3) {
57
- get = function() {
58
- return desc.get.call(this);
59
- };
60
- }
61
- if (kind === 1 || kind === 4) {
62
- set = function(v) {
63
- desc.set.call(this, v);
64
- };
65
- }
66
- }
67
- ctx.access = get && set ? {
68
- get: get,
69
- set: set
70
- } : get ? {
71
- get: get
72
- } : {
73
- set: set
74
- };
75
- try {
76
- return dec(value, ctx);
77
- } finally{
78
- decoratorFinishedRef.v = true;
79
- }
80
- }
81
- function assertNotFinished(decoratorFinishedRef, fnName) {
82
- if (decoratorFinishedRef.v) {
83
- throw new Error("attempted to call " + fnName + " after decoration was finished");
84
- }
85
- }
86
- function assertCallable(fn, hint) {
87
- if (typeof fn !== "function") {
88
- throw new TypeError(hint + " must be a function");
89
- }
90
- }
91
- function assertValidReturnValue(kind, value) {
92
- var type = typeof value;
93
- if (kind === 1) {
94
- if (type !== "object" || value === null) {
95
- throw new TypeError("accessor decorators must return an object with get, set, or init properties or void 0");
96
- }
97
- if (value.get !== undefined) {
98
- assertCallable(value.get, "accessor.get");
99
- }
100
- if (value.set !== undefined) {
101
- assertCallable(value.set, "accessor.set");
102
- }
103
- if (value.init !== undefined) {
104
- assertCallable(value.init, "accessor.init");
105
- }
106
- } else if (type !== "function") {
107
- var hint;
108
- if (kind === 0) {
109
- hint = "field";
110
- } else if (kind === 10) {
111
- hint = "class";
112
- } else {
113
- hint = "method";
114
- }
115
- throw new TypeError(hint + " decorators must return a function or void 0");
116
- }
117
- }
118
- function applyMemberDec(ret, base, decInfo, name, kind, isStatic, isPrivate, initializers, metadata) {
119
- var decs = decInfo[0];
120
- var desc, init, value;
121
- if (isPrivate) {
122
- if (kind === 0 || kind === 1) {
123
- desc = {
124
- get: decInfo[3],
125
- set: decInfo[4]
126
- };
127
- } else if (kind === 3) {
128
- desc = {
129
- get: decInfo[3]
130
- };
131
- } else if (kind === 4) {
132
- desc = {
133
- set: decInfo[3]
134
- };
135
- } else {
136
- desc = {
137
- value: decInfo[3]
138
- };
139
- }
140
- } else if (kind !== 0) {
141
- desc = Object.getOwnPropertyDescriptor(base, name);
142
- }
143
- if (kind === 1) {
144
- value = {
145
- get: desc.get,
146
- set: desc.set
147
- };
148
- } else if (kind === 2) {
149
- value = desc.value;
150
- } else if (kind === 3) {
151
- value = desc.get;
152
- } else if (kind === 4) {
153
- value = desc.set;
154
- }
155
- var newValue, get, set;
156
- if (typeof decs === "function") {
157
- newValue = memberDec(decs, name, desc, initializers, kind, isStatic, isPrivate, metadata, value);
158
- if (newValue !== void 0) {
159
- assertValidReturnValue(kind, newValue);
160
- if (kind === 0) {
161
- init = newValue;
162
- } else if (kind === 1) {
163
- init = newValue.init;
164
- get = newValue.get || value.get;
165
- set = newValue.set || value.set;
166
- value = {
167
- get: get,
168
- set: set
169
- };
170
- } else {
171
- value = newValue;
172
- }
173
- }
174
- } else {
175
- for(var i = decs.length - 1; i >= 0; i--){
176
- var dec = decs[i];
177
- newValue = memberDec(dec, name, desc, initializers, kind, isStatic, isPrivate, metadata, value);
178
- if (newValue !== void 0) {
179
- assertValidReturnValue(kind, newValue);
180
- var newInit;
181
- if (kind === 0) {
182
- newInit = newValue;
183
- } else if (kind === 1) {
184
- newInit = newValue.init;
185
- get = newValue.get || value.get;
186
- set = newValue.set || value.set;
187
- value = {
188
- get: get,
189
- set: set
190
- };
191
- } else {
192
- value = newValue;
193
- }
194
- if (newInit !== void 0) {
195
- if (init === void 0) {
196
- init = newInit;
197
- } else if (typeof init === "function") {
198
- init = [
199
- init,
200
- newInit
201
- ];
202
- } else {
203
- init.push(newInit);
204
- }
205
- }
206
- }
207
- }
208
- }
209
- if (kind === 0 || kind === 1) {
210
- if (init === void 0) {
211
- init = function(instance, init) {
212
- return init;
213
- };
214
- } else if (typeof init !== "function") {
215
- var ownInitializers = init;
216
- init = function(instance, init) {
217
- var value = init;
218
- for(var i = 0; i < ownInitializers.length; i++){
219
- value = ownInitializers[i].call(instance, value);
220
- }
221
- return value;
222
- };
223
- } else {
224
- var originalInitializer = init;
225
- init = function(instance, init) {
226
- return originalInitializer.call(instance, init);
227
- };
228
- }
229
- ret.push(init);
230
- }
231
- if (kind !== 0) {
232
- if (kind === 1) {
233
- desc.get = value.get;
234
- desc.set = value.set;
235
- } else if (kind === 2) {
236
- desc.value = value;
237
- } else if (kind === 3) {
238
- desc.get = value;
239
- } else if (kind === 4) {
240
- desc.set = value;
241
- }
242
- if (isPrivate) {
243
- if (kind === 1) {
244
- ret.push(function(instance, args) {
245
- return value.get.call(instance, args);
246
- });
247
- ret.push(function(instance, args) {
248
- return value.set.call(instance, args);
249
- });
250
- } else if (kind === 2) {
251
- ret.push(value);
252
- } else {
253
- ret.push(function(instance, args) {
254
- return value.call(instance, args);
255
- });
256
- }
257
- } else {
258
- Object.defineProperty(base, name, desc);
259
- }
260
- }
261
- }
262
- function applyMemberDecs(Class, decInfos, metadata) {
263
- var ret = [];
264
- var protoInitializers;
265
- var staticInitializers;
266
- var existingProtoNonFields = new Map();
267
- var existingStaticNonFields = new Map();
268
- for(var i = 0; i < decInfos.length; i++){
269
- var decInfo = decInfos[i];
270
- if (!Array.isArray(decInfo)) continue;
271
- var kind = decInfo[1];
272
- var name = decInfo[2];
273
- var isPrivate = decInfo.length > 3;
274
- var isStatic = kind >= 5;
275
- var base;
276
- var initializers;
277
- if (isStatic) {
278
- base = Class;
279
- kind = kind - 5;
280
- staticInitializers = staticInitializers || [];
281
- initializers = staticInitializers;
282
- } else {
283
- base = Class.prototype;
284
- protoInitializers = protoInitializers || [];
285
- initializers = protoInitializers;
286
- }
287
- if (kind !== 0 && !isPrivate) {
288
- var existingNonFields = isStatic ? existingStaticNonFields : existingProtoNonFields;
289
- var existingKind = existingNonFields.get(name) || 0;
290
- if (existingKind === true || existingKind === 3 && kind !== 4 || existingKind === 4 && kind !== 3) {
291
- throw new Error("Attempted to decorate a public method/accessor that has the same name as a previously decorated public method/accessor. This is not currently supported by the decorators plugin. Property name was: " + name);
292
- } else if (!existingKind && kind > 2) {
293
- existingNonFields.set(name, kind);
294
- } else {
295
- existingNonFields.set(name, true);
296
- }
297
- }
298
- applyMemberDec(ret, base, decInfo, name, kind, isStatic, isPrivate, initializers, metadata);
299
- }
300
- pushInitializers(ret, protoInitializers);
301
- pushInitializers(ret, staticInitializers);
302
- return ret;
303
- }
304
- function pushInitializers(ret, initializers) {
305
- if (initializers) {
306
- ret.push(function(instance) {
307
- for(var i = 0; i < initializers.length; i++){
308
- initializers[i].call(instance);
309
- }
310
- return instance;
311
- });
312
- }
313
- }
314
- function applyClassDecs(targetClass, classDecs, metadata) {
315
- if (classDecs.length > 0) {
316
- var initializers = [];
317
- var newClass = targetClass;
318
- var name = targetClass.name;
319
- for(var i = classDecs.length - 1; i >= 0; i--){
320
- var decoratorFinishedRef = {
321
- v: false
322
- };
323
- try {
324
- var nextNewClass = classDecs[i](newClass, {
325
- kind: "class",
326
- name: name,
327
- addInitializer: createAddInitializerMethod(initializers, decoratorFinishedRef),
328
- metadata
329
- });
330
- } finally{
331
- decoratorFinishedRef.v = true;
332
- }
333
- if (nextNewClass !== undefined) {
334
- assertValidReturnValue(10, nextNewClass);
335
- newClass = nextNewClass;
336
- }
337
- }
338
- return [
339
- defineMetadata(newClass, metadata),
340
- function() {
341
- for(var i = 0; i < initializers.length; i++){
342
- initializers[i].call(newClass);
343
- }
344
- }
345
- ];
346
- }
347
- }
348
- function defineMetadata(Class, metadata) {
349
- return Object.defineProperty(Class, Symbol.metadata || Symbol.for("Symbol.metadata"), {
350
- configurable: true,
351
- enumerable: true,
352
- value: metadata
353
- });
354
- }
355
- return function applyDecs2203R(targetClass, memberDecs, classDecs, parentClass) {
356
- if (parentClass !== void 0) {
357
- var parentMetadata = parentClass[Symbol.metadata || Symbol.for("Symbol.metadata")];
358
- }
359
- var metadata = Object.create(parentMetadata === void 0 ? null : parentMetadata);
360
- var e = applyMemberDecs(targetClass, memberDecs, metadata);
361
- if (!classDecs.length) defineMetadata(targetClass, metadata);
362
- return {
363
- e: e,
364
- get c () {
365
- return applyClassDecs(targetClass, classDecs, metadata);
366
- }
367
- };
368
- };
369
- }
370
- function _apply_decs_2203_r(targetClass, memberDecs, classDecs, parentClass) {
371
- return (_apply_decs_2203_r = applyDecs2203RFactory())(targetClass, memberDecs, classDecs, parentClass);
372
- }
373
- var _dec, _dec1, _dec2, _dec3, _dec4, _initProto;
374
- import { GENESIS_BLOCK_HEADER_HASH, INITIAL_L2_BLOCK_NUM } from '@aztec/constants';
375
- import { EpochCache } from '@aztec/epoch-cache';
376
- import { createEthereumChain } from '@aztec/ethereum/chain';
377
- import { BlockTagTooOldError, InboxContract, RollupContract } from '@aztec/ethereum/contracts';
378
- import { maxBigint } from '@aztec/foundation/bigint';
379
- import { BlockNumber, CheckpointNumber } from '@aztec/foundation/branded-types';
380
- import { Buffer16, Buffer32 } from '@aztec/foundation/buffer';
381
- import { merge, pick } from '@aztec/foundation/collection';
382
- import { Fr } from '@aztec/foundation/curves/bn254';
383
- import { createLogger } from '@aztec/foundation/log';
384
- import { promiseWithResolvers } from '@aztec/foundation/promise';
385
- import { RunningPromise, makeLoggingErrorHandler } from '@aztec/foundation/running-promise';
386
- import { count } from '@aztec/foundation/string';
387
- import { DateProvider, Timer, elapsed } from '@aztec/foundation/timer';
388
- import { isDefined } from '@aztec/foundation/types';
389
- import { ContractClassPublishedEvent, PrivateFunctionBroadcastedEvent, UtilityFunctionBroadcastedEvent } from '@aztec/protocol-contracts/class-registry';
390
- import { ContractInstancePublishedEvent, ContractInstanceUpdatedEvent } from '@aztec/protocol-contracts/instance-registry';
391
- import { CommitteeAttestation, GENESIS_CHECKPOINT_HEADER_HASH, L2Block, L2BlockSourceEvents, PublishedL2Block } from '@aztec/stdlib/block';
392
- import { Checkpoint, PublishedCheckpoint } from '@aztec/stdlib/checkpoint';
393
- import { computePublicBytecodeCommitment, isValidPrivateFunctionMembershipProof, isValidUtilityFunctionMembershipProof } from '@aztec/stdlib/contract';
394
- import { getEpochAtSlot, getEpochNumberAtTimestamp, getSlotAtTimestamp, getSlotRangeForEpoch, getTimestampRangeForEpoch } from '@aztec/stdlib/epoch-helpers';
395
- import { computeInHashFromL1ToL2Messages } from '@aztec/stdlib/messaging';
396
- import { execInSpan, getTelemetryClient, trackSpan } from '@aztec/telemetry-client';
397
- import { EventEmitter } from 'events';
398
- import groupBy from 'lodash.groupby';
399
- import { createPublicClient, fallback, http } from 'viem';
400
- import { InitialCheckpointNumberNotSequentialError, NoBlobBodiesFoundError } from './errors.js';
401
- import { ArchiverInstrumentation } from './instrumentation.js';
402
- import { retrieveCheckpointsFromRollup, retrieveL1ToL2Message, retrieveL1ToL2Messages, retrievedToPublishedCheckpoint } from './l1/data_retrieval.js';
403
- import { validateAndLogTraceAvailability } from './l1/validate_trace.js';
404
- import { validateCheckpointAttestations } from './validation.js';
405
- function mapArchiverConfig(config) {
406
- return {
407
- pollingIntervalMs: config.archiverPollingIntervalMS,
408
- batchSize: config.archiverBatchSize,
409
- skipValidateCheckpointAttestations: config.skipValidateCheckpointAttestations,
410
- maxAllowedEthClientDriftSeconds: config.maxAllowedEthClientDriftSeconds,
411
- ethereumAllowNoDebugHosts: config.ethereumAllowNoDebugHosts
412
- };
413
- }
414
- _dec = trackSpan('Archiver.syncFromL1'), _dec1 = trackSpan('Archiver.sync'), _dec2 = trackSpan('Archiver.handleEpochPrune'), _dec3 = trackSpan('Archiver.handleL1ToL2Messages'), _dec4 = trackSpan('Archiver.handleCheckpoints');
415
- /**
416
- * Pulls checkpoints in a non-blocking manner and provides interface for their retrieval.
417
- * Responsible for handling robust L1 polling so that other components do not need to
418
- * concern themselves with it.
419
- */ export class Archiver extends EventEmitter {
420
- publicClient;
421
- debugClient;
422
- l1Addresses;
423
- dataStore;
424
- config;
425
- blobClient;
426
- epochCache;
427
- dateProvider;
428
- instrumentation;
429
- l1constants;
430
- log;
431
- static{
432
- ({ e: [_initProto] } = _apply_decs_2203_r(this, [
433
- [
434
- _dec,
435
- 2,
436
- "syncFromL1"
437
- ],
438
- [
439
- _dec1,
440
- 2,
441
- "sync"
442
- ],
443
- [
444
- _dec2,
445
- 2,
446
- "handleEpochPrune"
447
- ],
448
- [
449
- _dec3,
450
- 2,
451
- "handleL1ToL2Messages"
452
- ],
453
- [
454
- _dec4,
455
- 2,
456
- "handleCheckpoints"
457
- ]
458
- ], []));
459
- }
460
- /** A loop in which we will be continually fetching new checkpoints. */ runningPromise;
461
- rollup;
462
- inbox;
463
- store;
464
- l1BlockNumber;
465
- l1Timestamp;
466
- initialSyncComplete;
467
- initialSyncPromise;
468
- /** Queue of blocks to be added to the store, processed by the sync loop. */ blockQueue;
469
- tracer;
470
- /**
471
- * Creates a new instance of the Archiver.
472
- * @param publicClient - A client for interacting with the Ethereum node.
473
- * @param debugClient - A client for interacting with the Ethereum node for debug/trace methods.
474
- * @param rollupAddress - Ethereum address of the rollup contract.
475
- * @param inboxAddress - Ethereum address of the inbox contract.
476
- * @param registryAddress - Ethereum address of the registry contract.
477
- * @param pollingIntervalMs - The interval for polling for L1 logs (in milliseconds).
478
- * @param store - An archiver data store for storage & retrieval of blocks, encrypted logs & contract data.
479
- * @param log - A logger.
480
- */ constructor(publicClient, debugClient, l1Addresses, dataStore, config, blobClient, epochCache, dateProvider, instrumentation, l1constants, log = createLogger('archiver')){
481
- super(), this.publicClient = publicClient, this.debugClient = debugClient, this.l1Addresses = l1Addresses, this.dataStore = dataStore, this.config = config, this.blobClient = blobClient, this.epochCache = epochCache, this.dateProvider = dateProvider, this.instrumentation = instrumentation, this.l1constants = l1constants, this.log = log, this.initialSyncComplete = (_initProto(this), false), this.blockQueue = [];
482
- this.tracer = instrumentation.tracer;
483
- this.store = new ArchiverStoreHelper(dataStore);
484
- this.rollup = new RollupContract(publicClient, l1Addresses.rollupAddress);
485
- this.inbox = new InboxContract(publicClient, l1Addresses.inboxAddress);
486
- this.initialSyncPromise = promiseWithResolvers();
487
- // Running promise starts with a small interval inbetween runs, so all iterations needed for the initial sync
488
- // are done as fast as possible. This then gets updated once the initial sync completes.
489
- this.runningPromise = new RunningPromise(()=>this.sync(), this.log, this.config.pollingIntervalMs / 10, makeLoggingErrorHandler(this.log, NoBlobBodiesFoundError, BlockTagTooOldError));
490
- }
491
- /**
492
- * Creates a new instance of the Archiver and blocks until it syncs from chain.
493
- * @param config - The archiver's desired configuration.
494
- * @param archiverStore - The backing store for the archiver.
495
- * @param blockUntilSynced - If true, blocks until the archiver has fully synced.
496
- * @returns - An instance of the archiver.
497
- */ static async createAndSync(config, archiverStore, deps, blockUntilSynced = true) {
498
- const chain = createEthereumChain(config.l1RpcUrls, config.l1ChainId);
499
- const publicClient = createPublicClient({
500
- chain: chain.chainInfo,
501
- transport: fallback(config.l1RpcUrls.map((url)=>http(url, {
502
- batch: false
503
- }))),
504
- pollingInterval: config.viemPollingIntervalMS
505
- });
506
- // Create debug client using debug RPC URLs if available, otherwise fall back to regular RPC URLs
507
- const debugRpcUrls = config.l1DebugRpcUrls.length > 0 ? config.l1DebugRpcUrls : config.l1RpcUrls;
508
- const debugClient = createPublicClient({
509
- chain: chain.chainInfo,
510
- transport: fallback(debugRpcUrls.map((url)=>http(url, {
511
- batch: false
512
- }))),
513
- pollingInterval: config.viemPollingIntervalMS
514
- });
515
- const rollup = new RollupContract(publicClient, config.l1Contracts.rollupAddress);
516
- const [l1StartBlock, l1GenesisTime, proofSubmissionEpochs, genesisArchiveRoot, slashingProposerAddress] = await Promise.all([
517
- rollup.getL1StartBlock(),
518
- rollup.getL1GenesisTime(),
519
- rollup.getProofSubmissionEpochs(),
520
- rollup.getGenesisArchiveTreeRoot(),
521
- rollup.getSlashingProposerAddress()
522
- ]);
523
- const l1StartBlockHash = await publicClient.getBlock({
524
- blockNumber: l1StartBlock,
525
- includeTransactions: false
526
- }).then((block)=>Buffer32.fromString(block.hash));
527
- const { aztecEpochDuration: epochDuration, aztecSlotDuration: slotDuration, ethereumSlotDuration } = config;
528
- const l1Constants = {
529
- l1StartBlockHash,
530
- l1StartBlock,
531
- l1GenesisTime,
532
- epochDuration,
533
- slotDuration,
534
- ethereumSlotDuration,
535
- proofSubmissionEpochs: Number(proofSubmissionEpochs),
536
- genesisArchiveRoot: Fr.fromString(genesisArchiveRoot.toString())
537
- };
538
- const opts = merge({
539
- pollingIntervalMs: 10_000,
540
- batchSize: 100,
541
- maxAllowedEthClientDriftSeconds: 300,
542
- ethereumAllowNoDebugHosts: false
543
- }, mapArchiverConfig(config));
544
- const epochCache = deps.epochCache ?? await EpochCache.create(config.l1Contracts.rollupAddress, config, deps);
545
- const telemetry = deps.telemetry ?? getTelemetryClient();
546
- const archiver = new Archiver(publicClient, debugClient, {
547
- ...config.l1Contracts,
548
- slashingProposerAddress
549
- }, archiverStore, opts, deps.blobClient, epochCache, deps.dateProvider ?? new DateProvider(), await ArchiverInstrumentation.new(telemetry, ()=>archiverStore.estimateSize()), l1Constants);
550
- await archiver.start(blockUntilSynced);
551
- return archiver;
552
- }
553
- /** Updates archiver config */ updateConfig(newConfig) {
554
- this.config = merge(this.config, mapArchiverConfig(newConfig));
555
- }
556
- /**
557
- * Starts sync process.
558
- * @param blockUntilSynced - If true, blocks until the archiver has fully synced.
559
- */ async start(blockUntilSynced) {
560
- if (this.runningPromise.isRunning()) {
561
- throw new Error('Archiver is already running');
562
- }
563
- await this.blobClient.testSources();
564
- await this.testEthereumNodeSynced();
565
- await validateAndLogTraceAvailability(this.debugClient, this.config.ethereumAllowNoDebugHosts ?? false);
566
- // Log initial state for the archiver
567
- const { l1StartBlock } = this.l1constants;
568
- const { blocksSynchedTo = l1StartBlock, messagesSynchedTo = l1StartBlock } = await this.store.getSynchPoint();
569
- const currentL2Checkpoint = await this.getSynchedCheckpointNumber();
570
- this.log.info(`Starting archiver sync to rollup contract ${this.l1Addresses.rollupAddress.toString()} from L1 block ${blocksSynchedTo} and L2 checkpoint ${currentL2Checkpoint}`, {
571
- blocksSynchedTo,
572
- messagesSynchedTo,
573
- currentL2Checkpoint
574
- });
575
- // Start sync loop, and return the wait for initial sync if we are asked to block until synced
576
- this.runningPromise.start();
577
- if (blockUntilSynced) {
578
- return this.waitForInitialSync();
579
- }
580
- }
581
- syncImmediate() {
582
- return this.runningPromise.trigger();
583
- }
584
- /**
585
- * Queues a block to be added to the archiver store and triggers processing.
586
- * The block will be processed by the sync loop.
587
- * Implements the L2BlockSink interface.
588
- * @param block - The L2 block to add.
589
- * @returns A promise that resolves when the block has been added to the store, or rejects on error.
590
- */ addBlock(block) {
591
- return new Promise((resolve, reject)=>{
592
- this.blockQueue.push({
593
- block,
594
- resolve,
595
- reject
596
- });
597
- this.log.debug(`Queued block ${block.number} for processing`);
598
- // Trigger an immediate sync, but don't wait for it - the promise resolves when the block is processed
599
- this.syncImmediate().catch((err)=>{
600
- this.log.error(`Sync immediate call failed: ${err}`);
601
- });
602
- });
603
- }
604
- /**
605
- * Processes all queued blocks, adding them to the store.
606
- * Called at the beginning of each sync iteration.
607
- * Blocks are processed in the order they were queued.
608
- */ async processQueuedBlocks() {
609
- if (this.blockQueue.length === 0) {
610
- return;
611
- }
612
- // Take all blocks from the queue
613
- const queuedItems = this.blockQueue.splice(0, this.blockQueue.length);
614
- this.log.debug(`Processing ${queuedItems.length} queued block(s)`);
615
- // Process each block individually to properly resolve/reject each promise
616
- for (const { block, resolve, reject } of queuedItems){
617
- try {
618
- await this.store.addBlocks([
619
- block
620
- ]);
621
- this.log.debug(`Added block ${block.number} to store`);
622
- resolve();
623
- } catch (err) {
624
- this.log.error(`Failed to add block ${block.number} to store: ${err.message}`);
625
- reject(err);
626
- }
627
- }
628
- }
629
- waitForInitialSync() {
630
- return this.initialSyncPromise.promise;
631
- }
632
- /** Checks that the ethereum node we are connected to has a latest timestamp no more than the allowed drift. Throw if not. */ async testEthereumNodeSynced() {
633
- const maxAllowedDelay = this.config.maxAllowedEthClientDriftSeconds;
634
- if (maxAllowedDelay === 0) {
635
- return;
636
- }
637
- const { number, timestamp: l1Timestamp } = await this.publicClient.getBlock({
638
- includeTransactions: false
639
- });
640
- const currentTime = BigInt(this.dateProvider.nowInSeconds());
641
- if (currentTime - l1Timestamp > BigInt(maxAllowedDelay)) {
642
- throw new Error(`Ethereum node is out of sync (last block synced ${number} at ${l1Timestamp} vs current time ${currentTime})`);
643
- }
644
- }
645
- async syncFromL1() {
646
- /**
647
- * We keep track of three "pointers" to L1 blocks:
648
- * 1. the last L1 block that published an L2 block
649
- * 2. the last L1 block that added L1 to L2 messages
650
- * 3. the last L1 block that cancelled L1 to L2 messages
651
- *
652
- * We do this to deal with L1 data providers that are eventually consistent (e.g. Infura).
653
- * We guard against seeing block X with no data at one point, and later, the provider processes the block and it has data.
654
- * The archiver will stay back, until there's data on L1 that will move the pointers forward.
655
- */ const { l1StartBlock, l1StartBlockHash } = this.l1constants;
656
- const { blocksSynchedTo = l1StartBlock, messagesSynchedTo = {
657
- l1BlockNumber: l1StartBlock,
658
- l1BlockHash: l1StartBlockHash
659
- } } = await this.store.getSynchPoint();
660
- const currentL1Block = await this.publicClient.getBlock({
661
- includeTransactions: false
662
- });
663
- const currentL1BlockNumber = currentL1Block.number;
664
- const currentL1BlockHash = Buffer32.fromString(currentL1Block.hash);
665
- this.log.trace(`Starting new archiver sync iteration`, {
666
- blocksSynchedTo,
667
- messagesSynchedTo,
668
- currentL1BlockNumber,
669
- currentL1BlockHash
670
- });
671
- // ********** Ensuring Consistency of data pulled from L1 **********
672
- /**
673
- * There are a number of calls in this sync operation to L1 for retrieving
674
- * events and transaction data. There are a couple of things we need to bear in mind
675
- * to ensure that data is read exactly once.
676
- *
677
- * The first is the problem of eventually consistent ETH service providers like Infura.
678
- * Each L1 read operation will query data from the last L1 block that it saw emit its kind of data.
679
- * (so pending L1 to L2 messages will read from the last L1 block that emitted a message and so on)
680
- * This will mean the archiver will lag behind L1 and will only advance when there's L2-relevant activity on the chain.
681
- *
682
- * The second is that in between the various calls to L1, the block number can move meaning some
683
- * of the following calls will return data for blocks that were not present during earlier calls.
684
- * To combat this for the time being we simply ensure that all data retrieval methods only retrieve
685
- * data up to the currentBlockNumber captured at the top of this function. We might want to improve on this
686
- * in future but for the time being it should give us the guarantees that we need
687
- */ // ********** Events that are processed per L1 block **********
688
- await this.handleL1ToL2Messages(messagesSynchedTo, currentL1BlockNumber, currentL1BlockHash);
689
- // Get L1 timestamp for the current block
690
- const currentL1Timestamp = !this.l1Timestamp || !this.l1BlockNumber || this.l1BlockNumber !== currentL1BlockNumber ? (await this.publicClient.getBlock({
691
- blockNumber: currentL1BlockNumber
692
- })).timestamp : this.l1Timestamp;
693
- // Warn if the latest L1 block timestamp is too old
694
- const maxAllowedDelay = this.config.maxAllowedEthClientDriftSeconds;
695
- const now = this.dateProvider.nowInSeconds();
696
- if (maxAllowedDelay > 0 && Number(currentL1Timestamp) <= now - maxAllowedDelay) {
697
- this.log.warn(`Latest L1 block ${currentL1BlockNumber} timestamp ${currentL1Timestamp} is too old. Make sure your Ethereum node is synced.`, {
698
- currentL1BlockNumber,
699
- currentL1Timestamp,
700
- now,
701
- maxAllowedDelay
702
- });
703
- }
704
- // ********** Events that are processed per checkpoint **********
705
- if (currentL1BlockNumber > blocksSynchedTo) {
706
- // First we retrieve new checkpoints and L2 blocks and store them in the DB. This will also update the
707
- // pending chain validation status, proven checkpoint number, and synched L1 block number.
708
- const rollupStatus = await this.handleCheckpoints(blocksSynchedTo, currentL1BlockNumber);
709
- // Then we prune the current epoch if it'd reorg on next submission.
710
- // Note that we don't do this before retrieving checkpoints because we may need to retrieve
711
- // checkpoints from more than 2 epochs ago, so we want to make sure we have the latest view of
712
- // the chain locally before we start unwinding stuff. This can be optimized by figuring out
713
- // up to which point we're pruning, and then requesting checkpoints up to that point only.
714
- const { rollupCanPrune } = await this.handleEpochPrune(rollupStatus.provenCheckpointNumber, currentL1BlockNumber, currentL1Timestamp);
715
- // If the last checkpoint we processed had an invalid attestation, we manually advance the L1 syncpoint
716
- // past it, since otherwise we'll keep downloading it and reprocessing it on every iteration until
717
- // we get a valid checkpoint to advance the syncpoint.
718
- if (!rollupStatus.validationResult?.valid && rollupStatus.lastL1BlockWithCheckpoint !== undefined) {
719
- await this.store.setCheckpointSynchedL1BlockNumber(rollupStatus.lastL1BlockWithCheckpoint);
720
- }
721
- // And lastly we check if we are missing any checkpoints behind us due to a possible L1 reorg.
722
- // We only do this if rollup cant prune on the next submission. Otherwise we will end up
723
- // re-syncing the checkpoints we have just unwound above. We also dont do this if the last checkpoint is invalid,
724
- // since the archiver will rightfully refuse to sync up to it.
725
- if (!rollupCanPrune && rollupStatus.validationResult?.valid) {
726
- await this.checkForNewCheckpointsBeforeL1SyncPoint(rollupStatus, blocksSynchedTo, currentL1BlockNumber);
727
- }
728
- this.instrumentation.updateL1BlockHeight(currentL1BlockNumber);
729
- }
730
- // After syncing has completed, update the current l1 block number and timestamp,
731
- // otherwise we risk announcing to the world that we've synced to a given point,
732
- // but the corresponding blocks have not been processed (see #12631).
733
- this.l1Timestamp = currentL1Timestamp;
734
- this.l1BlockNumber = currentL1BlockNumber;
735
- // We resolve the initial sync only once we've caught up with the latest L1 block number (with 1 block grace)
736
- // so if the initial sync took too long, we still go for another iteration.
737
- if (!this.initialSyncComplete && currentL1BlockNumber + 1n >= await this.publicClient.getBlockNumber()) {
738
- this.log.info(`Initial archiver sync to L1 block ${currentL1BlockNumber} complete`, {
739
- l1BlockNumber: currentL1BlockNumber,
740
- syncPoint: await this.store.getSynchPoint(),
741
- ...await this.getL2Tips()
742
- });
743
- this.runningPromise.setPollingIntervalMS(this.config.pollingIntervalMs);
744
- this.initialSyncComplete = true;
745
- this.initialSyncPromise.resolve();
746
- }
747
- }
748
- /**
749
- * Fetches logs from L1 contracts and processes them.
750
- */ async sync() {
751
- // Process any queued blocks first, before doing L1 sync
752
- await this.processQueuedBlocks();
753
- // Now perform L1 sync
754
- await this.syncFromL1();
755
- }
756
- /** Queries the rollup contract on whether a prune can be executed on the immediate next L1 block. */ async canPrune(currentL1BlockNumber, currentL1Timestamp) {
757
- const time = (currentL1Timestamp ?? 0n) + BigInt(this.l1constants.ethereumSlotDuration);
758
- const result = await this.rollup.canPruneAtTime(time, {
759
- blockNumber: currentL1BlockNumber
760
- });
761
- if (result) {
762
- this.log.debug(`Rollup contract allows pruning at L1 block ${currentL1BlockNumber} time ${time}`, {
763
- currentL1Timestamp,
764
- pruneTime: time,
765
- currentL1BlockNumber
766
- });
767
- }
768
- return result;
769
- }
770
- /** Checks if there'd be a reorg for the next checkpoint submission and start pruning now. */ async handleEpochPrune(provenCheckpointNumber, currentL1BlockNumber, currentL1Timestamp) {
771
- const rollupCanPrune = await this.canPrune(currentL1BlockNumber, currentL1Timestamp);
772
- const localPendingCheckpointNumber = await this.getSynchedCheckpointNumber();
773
- const canPrune = localPendingCheckpointNumber > provenCheckpointNumber && rollupCanPrune;
774
- if (canPrune) {
775
- const timer = new Timer();
776
- const pruneFrom = CheckpointNumber(provenCheckpointNumber + 1);
777
- const header = await this.getCheckpointHeader(pruneFrom);
778
- if (header === undefined) {
779
- throw new Error(`Missing checkpoint header ${pruneFrom}`);
780
- }
781
- const pruneFromSlotNumber = header.slotNumber;
782
- const pruneFromEpochNumber = getEpochAtSlot(pruneFromSlotNumber, this.l1constants);
783
- const checkpointsToUnwind = localPendingCheckpointNumber - provenCheckpointNumber;
784
- const checkpointPromises = Array.from({
785
- length: checkpointsToUnwind
786
- }).fill(0).map((_, i)=>this.store.getCheckpointData(CheckpointNumber(i + pruneFrom)));
787
- const checkpoints = await Promise.all(checkpointPromises);
788
- const blockPromises = await Promise.all(checkpoints.filter(isDefined).map((cp)=>this.store.getBlocksForCheckpoint(CheckpointNumber(cp.checkpointNumber))));
789
- const newBlocks = blockPromises.filter(isDefined).flat();
790
- // Emit an event for listening services to react to the chain prune
791
- this.emit(L2BlockSourceEvents.L2PruneDetected, {
792
- type: L2BlockSourceEvents.L2PruneDetected,
793
- epochNumber: pruneFromEpochNumber,
794
- blocks: newBlocks
795
- });
796
- this.log.debug(`L2 prune from ${provenCheckpointNumber + 1} to ${localPendingCheckpointNumber} will occur on next checkpoint submission.`);
797
- await this.unwindCheckpoints(localPendingCheckpointNumber, checkpointsToUnwind);
798
- this.log.warn(`Unwound ${count(checkpointsToUnwind, 'checkpoint')} from checkpoint ${localPendingCheckpointNumber} ` + `to ${provenCheckpointNumber} due to predicted reorg at L1 block ${currentL1BlockNumber}. ` + `Updated latest checkpoint is ${await this.getSynchedCheckpointNumber()}.`);
799
- this.instrumentation.processPrune(timer.ms());
800
- // TODO(palla/reorg): Do we need to set the block synched L1 block number here?
801
- // Seems like the next iteration should handle this.
802
- // await this.store.setBlockSynchedL1BlockNumber(currentL1BlockNumber);
803
- }
804
- return {
805
- rollupCanPrune
806
- };
807
- }
808
- nextRange(end, limit) {
809
- const batchSize = this.config.batchSize * this.l1constants.slotDuration / this.l1constants.ethereumSlotDuration;
810
- const nextStart = end + 1n;
811
- const nextEnd = nextStart + BigInt(batchSize);
812
- if (nextEnd > limit) {
813
- return [
814
- nextStart,
815
- limit
816
- ];
817
- }
818
- return [
819
- nextStart,
820
- nextEnd
821
- ];
822
- }
823
- async handleL1ToL2Messages(messagesSyncPoint, currentL1BlockNumber, _currentL1BlockHash) {
824
- this.log.trace(`Handling L1 to L2 messages from ${messagesSyncPoint.l1BlockNumber} to ${currentL1BlockNumber}.`);
825
- if (currentL1BlockNumber <= messagesSyncPoint.l1BlockNumber) {
826
- return;
827
- }
828
- // Load remote and local inbox states.
829
- const localMessagesInserted = await this.store.getTotalL1ToL2MessageCount();
830
- const localLastMessage = await this.store.getLastL1ToL2Message();
831
- const remoteMessagesState = await this.inbox.getState({
832
- blockNumber: currentL1BlockNumber
833
- });
834
- this.log.trace(`Retrieved remote inbox state at L1 block ${currentL1BlockNumber}.`, {
835
- localMessagesInserted,
836
- localLastMessage,
837
- remoteMessagesState
838
- });
839
- // Compare message count and rolling hash. If they match, no need to retrieve anything.
840
- if (remoteMessagesState.totalMessagesInserted === localMessagesInserted && remoteMessagesState.messagesRollingHash.equals(localLastMessage?.rollingHash ?? Buffer16.ZERO)) {
841
- this.log.trace(`No L1 to L2 messages to query between L1 blocks ${messagesSyncPoint.l1BlockNumber} and ${currentL1BlockNumber}.`);
842
- return;
843
- }
844
- // Check if our syncpoint is still valid. If not, there was an L1 reorg and we need to re-retrieve messages.
845
- // Note that we need to fetch it from logs and not from inbox state at the syncpoint l1 block number, since it
846
- // could be older than 128 blocks and non-archive nodes cannot resolve it.
847
- if (localLastMessage) {
848
- const remoteLastMessage = await this.retrieveL1ToL2Message(localLastMessage.leaf);
849
- this.log.trace(`Retrieved remote message for local last`, {
850
- remoteLastMessage,
851
- localLastMessage
852
- });
853
- if (!remoteLastMessage || !remoteLastMessage.rollingHash.equals(localLastMessage.rollingHash)) {
854
- this.log.warn(`Rolling back L1 to L2 messages due to hash mismatch or msg not found.`, {
855
- remoteLastMessage,
856
- messagesSyncPoint,
857
- localLastMessage
858
- });
859
- messagesSyncPoint = await this.rollbackL1ToL2Messages(localLastMessage, messagesSyncPoint);
860
- this.log.debug(`Rolled back L1 to L2 messages to L1 block ${messagesSyncPoint.l1BlockNumber}.`, {
861
- messagesSyncPoint
862
- });
863
- }
864
- }
865
- // Retrieve and save messages in batches. Each batch is estimated to acommodate up to L2 'blockBatchSize' blocks,
866
- let searchStartBlock = 0n;
867
- let searchEndBlock = messagesSyncPoint.l1BlockNumber;
868
- let lastMessage;
869
- let messageCount = 0;
870
- do {
871
- [searchStartBlock, searchEndBlock] = this.nextRange(searchEndBlock, currentL1BlockNumber);
872
- this.log.trace(`Retrieving L1 to L2 messages between L1 blocks ${searchStartBlock} and ${searchEndBlock}.`);
873
- const messages = await retrieveL1ToL2Messages(this.inbox.getContract(), searchStartBlock, searchEndBlock);
874
- this.log.verbose(`Retrieved ${messages.length} new L1 to L2 messages between L1 blocks ${searchStartBlock} and ${searchEndBlock}.`);
875
- const timer = new Timer();
876
- await this.store.addL1ToL2Messages(messages);
877
- const perMsg = timer.ms() / messages.length;
878
- this.instrumentation.processNewMessages(messages.length, perMsg);
879
- for (const msg of messages){
880
- this.log.debug(`Downloaded L1 to L2 message`, {
881
- ...msg,
882
- leaf: msg.leaf.toString()
883
- });
884
- lastMessage = msg;
885
- messageCount++;
886
- }
887
- }while (searchEndBlock < currentL1BlockNumber)
888
- // Log stats for messages retrieved (if any).
889
- if (messageCount > 0) {
890
- this.log.info(`Retrieved ${messageCount} new L1 to L2 messages up to message with index ${lastMessage?.index} for checkpoint ${lastMessage?.checkpointNumber}`, {
891
- lastMessage,
892
- messageCount
893
- });
894
- }
895
- // Warn if the resulting rolling hash does not match the remote state we had retrieved.
896
- if (lastMessage && !lastMessage.rollingHash.equals(remoteMessagesState.messagesRollingHash)) {
897
- this.log.warn(`Last message retrieved rolling hash does not match remote state.`, {
898
- lastMessage,
899
- remoteMessagesState
900
- });
901
- }
902
- }
903
- async retrieveL1ToL2Message(leaf) {
904
- const currentL1BlockNumber = await this.publicClient.getBlockNumber();
905
- let searchStartBlock = 0n;
906
- let searchEndBlock = this.l1constants.l1StartBlock - 1n;
907
- do {
908
- [searchStartBlock, searchEndBlock] = this.nextRange(searchEndBlock, currentL1BlockNumber);
909
- const message = await retrieveL1ToL2Message(this.inbox.getContract(), leaf, searchStartBlock, searchEndBlock);
910
- if (message) {
911
- return message;
912
- }
913
- }while (searchEndBlock < currentL1BlockNumber)
914
- return undefined;
915
- }
916
- async rollbackL1ToL2Messages(localLastMessage, messagesSyncPoint) {
917
- // Slowly go back through our messages until we find the last common message.
918
- // We could query the logs in batch as an optimization, but the depth of the reorg should not be deep, and this
919
- // is a very rare case, so it's fine to query one log at a time.
920
- let commonMsg;
921
- this.log.verbose(`Searching most recent common L1 to L2 message at or before index ${localLastMessage.index}`);
922
- for await (const msg of this.store.iterateL1ToL2Messages({
923
- reverse: true,
924
- end: localLastMessage.index
925
- })){
926
- const remoteMsg = await this.retrieveL1ToL2Message(msg.leaf);
927
- const logCtx = {
928
- remoteMsg,
929
- localMsg: msg
930
- };
931
- if (remoteMsg && remoteMsg.rollingHash.equals(msg.rollingHash)) {
932
- this.log.verbose(`Found most recent common L1 to L2 message at index ${msg.index} on L1 block ${msg.l1BlockNumber}`, logCtx);
933
- commonMsg = remoteMsg;
934
- break;
935
- } else if (remoteMsg) {
936
- this.log.debug(`Local L1 to L2 message with index ${msg.index} has different rolling hash`, logCtx);
937
- } else {
938
- this.log.debug(`Local L1 to L2 message with index ${msg.index} not found on L1`, logCtx);
939
- }
940
- }
941
- // Delete everything after the common message we found.
942
- const lastGoodIndex = commonMsg?.index;
943
- this.log.warn(`Deleting all local L1 to L2 messages after index ${lastGoodIndex ?? 'undefined'}`);
944
- await this.store.removeL1ToL2Messages(lastGoodIndex !== undefined ? lastGoodIndex + 1n : 0n);
945
- // Update the syncpoint so the loop below reprocesses the changed messages. We go to the block before
946
- // the last common one, so we force reprocessing it, in case new messages were added on that same L1 block
947
- // after the last common message.
948
- const syncPointL1BlockNumber = commonMsg ? commonMsg.l1BlockNumber - 1n : this.l1constants.l1StartBlock;
949
- const syncPointL1BlockHash = await this.getL1BlockHash(syncPointL1BlockNumber);
950
- messagesSyncPoint = {
951
- l1BlockNumber: syncPointL1BlockNumber,
952
- l1BlockHash: syncPointL1BlockHash
953
- };
954
- await this.store.setMessageSynchedL1Block(messagesSyncPoint);
955
- return messagesSyncPoint;
956
- }
957
- async getL1BlockHash(l1BlockNumber) {
958
- const block = await this.publicClient.getBlock({
959
- blockNumber: l1BlockNumber,
960
- includeTransactions: false
961
- });
962
- if (!block) {
963
- throw new Error(`Missing L1 block ${l1BlockNumber}`);
964
- }
965
- return Buffer32.fromString(block.hash);
966
- }
967
- async handleCheckpoints(blocksSynchedTo, currentL1BlockNumber) {
968
- const localPendingCheckpointNumber = await this.getSynchedCheckpointNumber();
969
- const initialValidationResult = await this.store.getPendingChainValidationStatus();
970
- const { provenCheckpointNumber, provenArchive, pendingCheckpointNumber, pendingArchive, archiveOfMyCheckpoint: archiveForLocalPendingCheckpointNumber } = await execInSpan(this.tracer, 'Archiver.getRollupStatus', ()=>this.rollup.status(localPendingCheckpointNumber, {
971
- blockNumber: currentL1BlockNumber
972
- }));
973
- const rollupStatus = {
974
- provenCheckpointNumber,
975
- provenArchive: provenArchive.toString(),
976
- pendingCheckpointNumber,
977
- pendingArchive: pendingArchive.toString(),
978
- validationResult: initialValidationResult
979
- };
980
- this.log.trace(`Retrieved rollup status at current L1 block ${currentL1BlockNumber}.`, {
981
- localPendingCheckpointNumber,
982
- blocksSynchedTo,
983
- currentL1BlockNumber,
984
- archiveForLocalPendingCheckpointNumber,
985
- ...rollupStatus
986
- });
987
- const updateProvenCheckpoint = async ()=>{
988
- // Annoying edge case: if proven checkpoint is moved back to 0 due to a reorg at the beginning of the chain,
989
- // we need to set it to zero. This is an edge case because we dont have a checkpoint zero (initial checkpoint is one),
990
- // so localCheckpointForDestinationProvenCheckpointNumber would not be found below.
991
- if (provenCheckpointNumber === 0) {
992
- const localProvenCheckpointNumber = await this.getProvenCheckpointNumber();
993
- if (localProvenCheckpointNumber !== provenCheckpointNumber) {
994
- await this.setProvenCheckpointNumber(provenCheckpointNumber);
995
- this.log.info(`Rolled back proven chain to checkpoint ${provenCheckpointNumber}`, {
996
- provenCheckpointNumber
997
- });
998
- }
999
- }
1000
- const localCheckpointForDestinationProvenCheckpointNumber = await this.store.getCheckpointData(provenCheckpointNumber);
1001
- // Sanity check. I've hit what seems to be a state where the proven checkpoint is set to a value greater than the latest
1002
- // synched checkpoint when requesting L2Tips from the archiver. This is the only place where the proven checkpoint is set.
1003
- const synched = await this.getSynchedCheckpointNumber();
1004
- if (localCheckpointForDestinationProvenCheckpointNumber && synched < localCheckpointForDestinationProvenCheckpointNumber.checkpointNumber) {
1005
- this.log.error(`Hit local checkpoint greater than last synched checkpoint: ${localCheckpointForDestinationProvenCheckpointNumber.checkpointNumber} > ${synched}`);
1006
- }
1007
- this.log.trace(`Local checkpoint for remote proven checkpoint ${provenCheckpointNumber} is ${localCheckpointForDestinationProvenCheckpointNumber?.archive.root.toString() ?? 'undefined'}`);
1008
- if (localCheckpointForDestinationProvenCheckpointNumber && provenArchive.equals(localCheckpointForDestinationProvenCheckpointNumber.archive.root)) {
1009
- const localProvenCheckpointNumber = await this.getProvenCheckpointNumber();
1010
- if (localProvenCheckpointNumber !== provenCheckpointNumber) {
1011
- await this.setProvenCheckpointNumber(provenCheckpointNumber);
1012
- this.log.info(`Updated proven chain to checkpoint ${provenCheckpointNumber}`, {
1013
- provenCheckpointNumber
1014
- });
1015
- const provenSlotNumber = localCheckpointForDestinationProvenCheckpointNumber.header.slotNumber;
1016
- const provenEpochNumber = getEpochAtSlot(provenSlotNumber, this.l1constants);
1017
- const lastBlockNumberInCheckpoint = localCheckpointForDestinationProvenCheckpointNumber.startBlock + localCheckpointForDestinationProvenCheckpointNumber.numBlocks - 1;
1018
- this.emit(L2BlockSourceEvents.L2BlockProven, {
1019
- type: L2BlockSourceEvents.L2BlockProven,
1020
- blockNumber: BlockNumber(lastBlockNumberInCheckpoint),
1021
- slotNumber: provenSlotNumber,
1022
- epochNumber: provenEpochNumber
1023
- });
1024
- this.instrumentation.updateLastProvenBlock(lastBlockNumberInCheckpoint);
1025
- } else {
1026
- this.log.trace(`Proven checkpoint ${provenCheckpointNumber} already stored.`);
1027
- }
1028
- }
1029
- };
1030
- // This is an edge case that we only hit if there are no proposed checkpoints.
1031
- // If we have 0 checkpoints locally and there are no checkpoints onchain there is nothing to do.
1032
- const noCheckpoints = localPendingCheckpointNumber === 0 && pendingCheckpointNumber === 0;
1033
- if (noCheckpoints) {
1034
- await this.store.setCheckpointSynchedL1BlockNumber(currentL1BlockNumber);
1035
- this.log.debug(`No checkpoints to retrieve from ${blocksSynchedTo + 1n} to ${currentL1BlockNumber}, no checkpoints on chain`);
1036
- return rollupStatus;
1037
- }
1038
- await updateProvenCheckpoint();
1039
- // Related to the L2 reorgs of the pending chain. We are only interested in actually addressing a reorg if there
1040
- // are any state that could be impacted by it. If we have no checkpoints, there is no impact.
1041
- if (localPendingCheckpointNumber > 0) {
1042
- const localPendingCheckpoint = await this.store.getCheckpointData(localPendingCheckpointNumber);
1043
- if (localPendingCheckpoint === undefined) {
1044
- throw new Error(`Missing checkpoint ${localPendingCheckpointNumber}`);
1045
- }
1046
- const localPendingArchiveRoot = localPendingCheckpoint.archive.root.toString();
1047
- const noCheckpointSinceLast = localPendingCheckpoint && pendingArchive.toString() === localPendingArchiveRoot;
1048
- if (noCheckpointSinceLast) {
1049
- // We believe the following line causes a problem when we encounter L1 re-orgs.
1050
- // Basically, by setting the synched L1 block number here, we are saying that we have
1051
- // processed all checkpoints up to the current L1 block number and we will not attempt to retrieve logs from
1052
- // this block again (or any blocks before).
1053
- // However, in the re-org scenario, our L1 node is temporarily lying to us and we end up potentially missing checkpoints.
1054
- // We must only set this block number based on actually retrieved logs.
1055
- // TODO(#8621): Tackle this properly when we handle L1 Re-orgs.
1056
- // await this.store.setBlockSynchedL1BlockNumber(currentL1BlockNumber);
1057
- this.log.debug(`No checkpoints to retrieve from ${blocksSynchedTo + 1n} to ${currentL1BlockNumber}`);
1058
- return rollupStatus;
1059
- }
1060
- const localPendingCheckpointInChain = archiveForLocalPendingCheckpointNumber.equals(localPendingCheckpoint.archive.root);
1061
- if (!localPendingCheckpointInChain) {
1062
- // If our local pending checkpoint tip is not in the chain on L1 a "prune" must have happened
1063
- // or the L1 have reorged.
1064
- // In any case, we have to figure out how far into the past the action will take us.
1065
- // For simplicity here, we will simply rewind until we end in a checkpoint that is also on the chain on L1.
1066
- this.log.debug(`L2 prune has been detected due to local pending checkpoint ${localPendingCheckpointNumber} not in chain`, {
1067
- localPendingCheckpointNumber,
1068
- localPendingArchiveRoot,
1069
- archiveForLocalPendingCheckpointNumber
1070
- });
1071
- let tipAfterUnwind = localPendingCheckpointNumber;
1072
- while(true){
1073
- const candidateCheckpoint = await this.store.getCheckpointData(tipAfterUnwind);
1074
- if (candidateCheckpoint === undefined) {
1075
- break;
1076
- }
1077
- const archiveAtContract = await this.rollup.archiveAt(candidateCheckpoint.checkpointNumber);
1078
- this.log.trace(`Checking local checkpoint ${candidateCheckpoint.checkpointNumber} with archive ${candidateCheckpoint.archive.root}`, {
1079
- archiveAtContract,
1080
- archiveLocal: candidateCheckpoint.archive.root.toString()
1081
- });
1082
- if (archiveAtContract.equals(candidateCheckpoint.archive.root)) {
1083
- break;
1084
- }
1085
- tipAfterUnwind--;
1086
- }
1087
- const checkpointsToUnwind = localPendingCheckpointNumber - tipAfterUnwind;
1088
- await this.unwindCheckpoints(localPendingCheckpointNumber, checkpointsToUnwind);
1089
- this.log.warn(`Unwound ${count(checkpointsToUnwind, 'checkpoint')} from checkpoint ${localPendingCheckpointNumber} ` + `due to mismatched checkpoint hashes at L1 block ${currentL1BlockNumber}. ` + `Updated L2 latest checkpoint is ${await this.getSynchedCheckpointNumber()}.`);
1090
- }
1091
- }
1092
- // Retrieve checkpoints in batches. Each batch is estimated to accommodate up to 'blockBatchSize' L1 blocks,
1093
- // computed using the L2 block time vs the L1 block time.
1094
- let searchStartBlock = blocksSynchedTo;
1095
- let searchEndBlock = blocksSynchedTo;
1096
- let lastRetrievedCheckpoint;
1097
- let lastL1BlockWithCheckpoint = undefined;
1098
- do {
1099
- [searchStartBlock, searchEndBlock] = this.nextRange(searchEndBlock, currentL1BlockNumber);
1100
- this.log.trace(`Retrieving checkpoints from L1 block ${searchStartBlock} to ${searchEndBlock}`);
1101
- // TODO(md): Retrieve from blob client then from consensus client, then from peers
1102
- const retrievedCheckpoints = await execInSpan(this.tracer, 'Archiver.retrieveCheckpointsFromRollup', ()=>retrieveCheckpointsFromRollup(this.rollup.getContract(), this.publicClient, this.debugClient, this.blobClient, searchStartBlock, searchEndBlock, this.l1Addresses, this.instrumentation, this.log, !this.initialSyncComplete));
1103
- if (retrievedCheckpoints.length === 0) {
1104
- // We are not calling `setBlockSynchedL1BlockNumber` because it may cause sync issues if based off infura.
1105
- // See further details in earlier comments.
1106
- this.log.trace(`Retrieved no new checkpoints from L1 block ${searchStartBlock} to ${searchEndBlock}`);
1107
- continue;
1108
- }
1109
- this.log.debug(`Retrieved ${retrievedCheckpoints.length} new checkpoints between L1 blocks ${searchStartBlock} and ${searchEndBlock}`, {
1110
- lastProcessedCheckpoint: retrievedCheckpoints[retrievedCheckpoints.length - 1].l1,
1111
- searchStartBlock,
1112
- searchEndBlock
1113
- });
1114
- const publishedCheckpoints = await Promise.all(retrievedCheckpoints.map((b)=>retrievedToPublishedCheckpoint(b)));
1115
- const validCheckpoints = [];
1116
- for (const published of publishedCheckpoints){
1117
- const validationResult = this.config.skipValidateCheckpointAttestations ? {
1118
- valid: true
1119
- } : await validateCheckpointAttestations(published, this.epochCache, this.l1constants, this.log);
1120
- // Only update the validation result if it has changed, so we can keep track of the first invalid checkpoint
1121
- // in case there is a sequence of more than one invalid checkpoint, as we need to invalidate the first one.
1122
- // There is an exception though: if a checkpoint is invalidated and replaced with another invalid checkpoint,
1123
- // we need to update the validation result, since we need to be able to invalidate the new one.
1124
- // See test 'chain progresses if an invalid checkpoint is invalidated with an invalid one' for more info.
1125
- if (rollupStatus.validationResult?.valid !== validationResult.valid || !rollupStatus.validationResult.valid && !validationResult.valid && rollupStatus.validationResult.checkpoint.checkpointNumber === validationResult.checkpoint.checkpointNumber) {
1126
- rollupStatus.validationResult = validationResult;
1127
- }
1128
- if (!validationResult.valid) {
1129
- this.log.warn(`Skipping checkpoint ${published.checkpoint.number} due to invalid attestations`, {
1130
- checkpointHash: published.checkpoint.hash(),
1131
- l1BlockNumber: published.l1.blockNumber,
1132
- ...pick(validationResult, 'reason')
1133
- });
1134
- // Emit event for invalid checkpoint detection
1135
- this.emit(L2BlockSourceEvents.InvalidAttestationsCheckpointDetected, {
1136
- type: L2BlockSourceEvents.InvalidAttestationsCheckpointDetected,
1137
- validationResult
1138
- });
1139
- continue;
1140
- }
1141
- // Check the inHash of the checkpoint against the l1->l2 messages.
1142
- // The messages should've been synced up to the currentL1BlockNumber and must be available for the published
1143
- // checkpoints we just retrieved.
1144
- const l1ToL2Messages = await this.getL1ToL2Messages(published.checkpoint.number);
1145
- const computedInHash = computeInHashFromL1ToL2Messages(l1ToL2Messages);
1146
- const publishedInHash = published.checkpoint.header.inHash;
1147
- if (!computedInHash.equals(publishedInHash)) {
1148
- this.log.fatal(`Mismatch inHash for checkpoint ${published.checkpoint.number}`, {
1149
- checkpointHash: published.checkpoint.hash(),
1150
- l1BlockNumber: published.l1.blockNumber,
1151
- computedInHash,
1152
- publishedInHash
1153
- });
1154
- // Throwing an error since this is most likely caused by a bug.
1155
- throw new Error(`Mismatch inHash for checkpoint ${published.checkpoint.number}. Expected ${computedInHash} but got ${publishedInHash}`);
1156
- }
1157
- validCheckpoints.push(published);
1158
- this.log.debug(`Ingesting new checkpoint ${published.checkpoint.number} with ${published.checkpoint.blocks.length} blocks`, {
1159
- checkpointHash: published.checkpoint.hash(),
1160
- l1BlockNumber: published.l1.blockNumber,
1161
- ...published.checkpoint.header.toInspect(),
1162
- blocks: published.checkpoint.blocks.map((b)=>b.getStats())
1163
- });
1164
- }
1165
- try {
1166
- const updatedValidationResult = rollupStatus.validationResult === initialValidationResult ? undefined : rollupStatus.validationResult;
1167
- const [processDuration] = await elapsed(()=>execInSpan(this.tracer, 'Archiver.addCheckpoints', ()=>this.addCheckpoints(validCheckpoints, updatedValidationResult)));
1168
- this.instrumentation.processNewBlocks(processDuration / validCheckpoints.length, validCheckpoints.flatMap((c)=>c.checkpoint.blocks));
1169
- } catch (err) {
1170
- if (err instanceof InitialCheckpointNumberNotSequentialError) {
1171
- const { previousCheckpointNumber, newCheckpointNumber } = err;
1172
- const previousCheckpoint = previousCheckpointNumber ? await this.store.getCheckpointData(CheckpointNumber(previousCheckpointNumber)) : undefined;
1173
- const updatedL1SyncPoint = previousCheckpoint?.l1.blockNumber ?? this.l1constants.l1StartBlock;
1174
- await this.store.setBlockSynchedL1BlockNumber(updatedL1SyncPoint);
1175
- this.log.warn(`Attempting to insert checkpoint ${newCheckpointNumber} with previous block ${previousCheckpointNumber}. Rolling back L1 sync point to ${updatedL1SyncPoint} to try and fetch the missing blocks.`, {
1176
- previousCheckpointNumber,
1177
- newCheckpointNumber,
1178
- updatedL1SyncPoint
1179
- });
1180
- }
1181
- throw err;
1182
- }
1183
- for (const checkpoint of validCheckpoints){
1184
- this.log.info(`Downloaded checkpoint ${checkpoint.checkpoint.number}`, {
1185
- checkpointHash: checkpoint.checkpoint.hash(),
1186
- checkpointNumber: checkpoint.checkpoint.number,
1187
- blockCount: checkpoint.checkpoint.blocks.length,
1188
- txCount: checkpoint.checkpoint.blocks.reduce((acc, b)=>acc + b.body.txEffects.length, 0),
1189
- header: checkpoint.checkpoint.header.toInspect(),
1190
- archiveRoot: checkpoint.checkpoint.archive.root.toString(),
1191
- archiveNextLeafIndex: checkpoint.checkpoint.archive.nextAvailableLeafIndex
1192
- });
1193
- }
1194
- lastRetrievedCheckpoint = validCheckpoints.at(-1) ?? lastRetrievedCheckpoint;
1195
- lastL1BlockWithCheckpoint = retrievedCheckpoints.at(-1)?.l1.blockNumber ?? lastL1BlockWithCheckpoint;
1196
- }while (searchEndBlock < currentL1BlockNumber)
1197
- // Important that we update AFTER inserting the blocks.
1198
- await updateProvenCheckpoint();
1199
- return {
1200
- ...rollupStatus,
1201
- lastRetrievedCheckpoint,
1202
- lastL1BlockWithCheckpoint
1203
- };
1204
- }
1205
- async checkForNewCheckpointsBeforeL1SyncPoint(status, blocksSynchedTo, currentL1BlockNumber) {
1206
- const { lastRetrievedCheckpoint, pendingCheckpointNumber } = status;
1207
- // Compare the last checkpoint we have (either retrieved in this round or loaded from store) with what the
1208
- // rollup contract told us was the latest one (pinned at the currentL1BlockNumber).
1209
- const latestLocalCheckpointNumber = lastRetrievedCheckpoint?.checkpoint.number ?? await this.getSynchedCheckpointNumber();
1210
- if (latestLocalCheckpointNumber < pendingCheckpointNumber) {
1211
- // Here we have consumed all logs until the `currentL1Block` we pinned at the beginning of the archiver loop,
1212
- // but still haven't reached the pending checkpoint according to the call to the rollup contract.
1213
- // We suspect an L1 reorg that added checkpoints *behind* us. If that is the case, it must have happened between
1214
- // the last checkpoint we saw and the current one, so we reset the last synched L1 block number. In the edge case
1215
- // we don't have one, we go back 2 L1 epochs, which is the deepest possible reorg (assuming Casper is working).
1216
- let latestLocalCheckpointArchive = undefined;
1217
- let targetL1BlockNumber = maxBigint(currentL1BlockNumber - 64n, 0n);
1218
- if (lastRetrievedCheckpoint) {
1219
- latestLocalCheckpointArchive = lastRetrievedCheckpoint.checkpoint.archive.root.toString();
1220
- targetL1BlockNumber = lastRetrievedCheckpoint.l1.blockNumber;
1221
- } else if (latestLocalCheckpointNumber > 0) {
1222
- const checkpoint = await this.store.getRangeOfCheckpoints(latestLocalCheckpointNumber, 1).then(([c])=>c);
1223
- latestLocalCheckpointArchive = checkpoint.archive.root.toString();
1224
- targetL1BlockNumber = checkpoint.l1.blockNumber;
1225
- }
1226
- this.log.warn(`Failed to reach checkpoint ${pendingCheckpointNumber} at ${currentL1BlockNumber} (latest is ${latestLocalCheckpointNumber}). ` + `Rolling back last synched L1 block number to ${targetL1BlockNumber}.`, {
1227
- latestLocalCheckpointNumber,
1228
- latestLocalCheckpointArchive,
1229
- blocksSynchedTo,
1230
- currentL1BlockNumber,
1231
- ...status
1232
- });
1233
- await this.store.setBlockSynchedL1BlockNumber(targetL1BlockNumber);
1234
- } else {
1235
- this.log.trace(`No new checkpoints behind L1 sync point to retrieve.`, {
1236
- latestLocalCheckpointNumber,
1237
- pendingCheckpointNumber
1238
- });
1239
- }
1240
- }
1241
- /** Resumes the archiver after a stop. */ resume() {
1242
- if (this.runningPromise.isRunning()) {
1243
- this.log.warn(`Archiver already running`);
1244
- }
1245
- this.log.info(`Restarting archiver`);
1246
- this.runningPromise.start();
1247
- }
1248
- /**
1249
- * Stops the archiver.
1250
- * @returns A promise signalling completion of the stop process.
1251
- */ async stop() {
1252
- this.log.debug('Stopping...');
1253
- await this.runningPromise.stop();
1254
- this.log.info('Stopped.');
1255
- return Promise.resolve();
1256
- }
1257
- backupTo(destPath) {
1258
- return this.dataStore.backupTo(destPath);
1259
- }
1260
- getL1Constants() {
1261
- return Promise.resolve(this.l1constants);
1262
- }
1263
- getGenesisValues() {
1264
- return Promise.resolve({
1265
- genesisArchiveRoot: this.l1constants.genesisArchiveRoot
1266
- });
1267
- }
1268
- getRollupAddress() {
1269
- return Promise.resolve(this.l1Addresses.rollupAddress);
1270
- }
1271
- getRegistryAddress() {
1272
- return Promise.resolve(this.l1Addresses.registryAddress);
1273
- }
1274
- getL1BlockNumber() {
1275
- return this.l1BlockNumber;
1276
- }
1277
- getL1Timestamp() {
1278
- return Promise.resolve(this.l1Timestamp);
1279
- }
1280
- getL2SlotNumber() {
1281
- return Promise.resolve(this.l1Timestamp === undefined ? undefined : getSlotAtTimestamp(this.l1Timestamp, this.l1constants));
1282
- }
1283
- getL2EpochNumber() {
1284
- return Promise.resolve(this.l1Timestamp === undefined ? undefined : getEpochNumberAtTimestamp(this.l1Timestamp, this.l1constants));
1285
- }
1286
- async getBlocksForEpoch(epochNumber) {
1287
- const [start, end] = getSlotRangeForEpoch(epochNumber, this.l1constants);
1288
- const blocks = [];
1289
- // Walk the list of checkpoints backwards and filter by slots matching the requested epoch.
1290
- // We'll typically ask for checkpoints for a very recent epoch, so we shouldn't need an index here.
1291
- let checkpoint = await this.store.getCheckpointData(await this.store.getSynchedCheckpointNumber());
1292
- const slot = (b)=>b.header.slotNumber;
1293
- while(checkpoint && slot(checkpoint) >= start){
1294
- if (slot(checkpoint) <= end) {
1295
- // push the blocks on backwards
1296
- const endBlock = checkpoint.startBlock + checkpoint.numBlocks - 1;
1297
- for(let i = endBlock; i >= checkpoint.startBlock; i--){
1298
- const block = await this.getBlock(BlockNumber(i));
1299
- if (block) {
1300
- blocks.push(block);
1301
- }
1302
- }
1303
- }
1304
- checkpoint = await this.store.getCheckpointData(CheckpointNumber(checkpoint.checkpointNumber - 1));
1305
- }
1306
- return blocks.reverse();
1307
- }
1308
- async getBlockHeadersForEpoch(epochNumber) {
1309
- const [start, end] = getSlotRangeForEpoch(epochNumber, this.l1constants);
1310
- const blocks = [];
1311
- // Walk the list of checkpoints backwards and filter by slots matching the requested epoch.
1312
- // We'll typically ask for checkpoints for a very recent epoch, so we shouldn't need an index here.
1313
- let checkpoint = await this.store.getCheckpointData(await this.store.getSynchedCheckpointNumber());
1314
- const slot = (b)=>b.header.slotNumber;
1315
- while(checkpoint && slot(checkpoint) >= start){
1316
- if (slot(checkpoint) <= end) {
1317
- // push the blocks on backwards
1318
- const endBlock = checkpoint.startBlock + checkpoint.numBlocks - 1;
1319
- for(let i = endBlock; i >= checkpoint.startBlock; i--){
1320
- const block = await this.getBlockHeader(BlockNumber(i));
1321
- if (block) {
1322
- blocks.push(block);
1323
- }
1324
- }
1325
- }
1326
- checkpoint = await this.store.getCheckpointData(CheckpointNumber(checkpoint.checkpointNumber - 1));
1327
- }
1328
- return blocks.reverse();
1329
- }
1330
- async isEpochComplete(epochNumber) {
1331
- // The epoch is complete if the current L2 block is the last one in the epoch (or later)
1332
- const header = await this.getBlockHeader('latest');
1333
- const slot = header ? header.globalVariables.slotNumber : undefined;
1334
- const [_startSlot, endSlot] = getSlotRangeForEpoch(epochNumber, this.l1constants);
1335
- if (slot && slot >= endSlot) {
1336
- return true;
1337
- }
1338
- // If we haven't run an initial sync, just return false.
1339
- const l1Timestamp = this.l1Timestamp;
1340
- if (l1Timestamp === undefined) {
1341
- return false;
1342
- }
1343
- // If not, the epoch may also be complete if the L2 slot has passed without a block
1344
- // We compute this based on the end timestamp for the given epoch and the timestamp of the last L1 block
1345
- const [_startTimestamp, endTimestamp] = getTimestampRangeForEpoch(epochNumber, this.l1constants);
1346
- // For this computation, we throw in a few extra seconds just for good measure,
1347
- // since we know the next L1 block won't be mined within this range. Remember that
1348
- // l1timestamp is the timestamp of the last l1 block we've seen, so this relies on
1349
- // the fact that L1 won't mine two blocks within this time of each other.
1350
- // TODO(palla/reorg): Is the above a safe assumption?
1351
- const leeway = 1n;
1352
- return l1Timestamp + leeway >= endTimestamp;
1353
- }
1354
- /** Returns whether the archiver has completed an initial sync run successfully. */ isInitialSyncComplete() {
1355
- return this.initialSyncComplete;
1356
- }
1357
- async getCheckpointHeader(number) {
1358
- if (number === 'latest') {
1359
- number = await this.getSynchedCheckpointNumber();
1360
- }
1361
- if (number === 0) {
1362
- return undefined;
1363
- }
1364
- const checkpoint = await this.store.getCheckpointData(number);
1365
- if (!checkpoint) {
1366
- return undefined;
1367
- }
1368
- return checkpoint.header;
1369
- }
1370
- getCheckpointNumber() {
1371
- return this.getSynchedCheckpointNumber();
1372
- }
1373
- getSynchedCheckpointNumber() {
1374
- return this.store.getSynchedCheckpointNumber();
1375
- }
1376
- getProvenCheckpointNumber() {
1377
- return this.store.getProvenCheckpointNumber();
1378
- }
1379
- setProvenCheckpointNumber(checkpointNumber) {
1380
- return this.store.setProvenCheckpointNumber(checkpointNumber);
1381
- }
1382
- unwindCheckpoints(from, checkpointsToUnwind) {
1383
- return this.store.unwindCheckpoints(from, checkpointsToUnwind);
1384
- }
1385
- async getLastBlockNumberInCheckpoint(checkpointNumber) {
1386
- const checkpointData = await this.store.getCheckpointData(checkpointNumber);
1387
- if (!checkpointData) {
1388
- return undefined;
1389
- }
1390
- return BlockNumber(checkpointData.startBlock + checkpointData.numBlocks - 1);
1391
- }
1392
- addCheckpoints(checkpoints, pendingChainValidationStatus) {
1393
- return this.store.addCheckpoints(checkpoints, pendingChainValidationStatus);
1394
- }
1395
- getBlockHeaderByHash(blockHash) {
1396
- return this.store.getBlockHeaderByHash(blockHash);
1397
- }
1398
- getBlockHeaderByArchive(archive) {
1399
- return this.store.getBlockHeaderByArchive(archive);
1400
- }
1401
- /**
1402
- * Gets an l2 block.
1403
- * @param number - The block number to return.
1404
- * @returns The requested L2 block.
1405
- */ async getL2BlockNew(number) {
1406
- // If the number provided is -ve, then return the latest block.
1407
- if (number < 0) {
1408
- number = await this.store.getSynchedL2BlockNumber();
1409
- }
1410
- if (number === 0) {
1411
- return undefined;
1412
- }
1413
- const publishedBlock = await this.store.store.getBlock(number);
1414
- return publishedBlock;
1415
- }
1416
- async getL2BlocksNew(from, limit, proven) {
1417
- const blocks = await this.store.store.getBlocks(from, limit);
1418
- if (proven === true) {
1419
- const provenBlockNumber = await this.store.getProvenBlockNumber();
1420
- return blocks.filter((b)=>b.number <= provenBlockNumber);
1421
- }
1422
- return blocks;
1423
- }
1424
- async getBlockHeader(number) {
1425
- if (number === 'latest') {
1426
- number = await this.store.getSynchedL2BlockNumber();
1427
- }
1428
- if (number === 0) {
1429
- return undefined;
1430
- }
1431
- const headers = await this.store.getBlockHeaders(number, 1);
1432
- return headers.length === 0 ? undefined : headers[0];
1433
- }
1434
- getCheckpointedBlock(number) {
1435
- return this.store.getCheckpointedBlock(number);
1436
- }
1437
- async getCheckpointedBlocks(from, limit, proven) {
1438
- const blocks = await this.store.store.getCheckpointedBlocks(from, limit);
1439
- if (proven === true) {
1440
- const provenBlockNumber = await this.store.getProvenBlockNumber();
1441
- return blocks.filter((b)=>b.block.number <= provenBlockNumber);
1442
- }
1443
- return blocks;
1444
- }
1445
- getCheckpointedBlockByHash(blockHash) {
1446
- return this.store.getCheckpointedBlockByHash(blockHash);
1447
- }
1448
- getProvenBlockNumber() {
1449
- return this.store.getProvenBlockNumber();
1450
- }
1451
- getCheckpointedBlockNumber() {
1452
- return this.store.getCheckpointedL2BlockNumber();
1453
- }
1454
- getCheckpointedBlockByArchive(archive) {
1455
- return this.store.getCheckpointedBlockByArchive(archive);
1456
- }
1457
- getTxEffect(txHash) {
1458
- return this.store.getTxEffect(txHash);
1459
- }
1460
- getSettledTxReceipt(txHash) {
1461
- return this.store.getSettledTxReceipt(txHash);
1462
- }
1463
- getPrivateLogsByTags(tags) {
1464
- return this.store.getPrivateLogsByTags(tags);
1465
- }
1466
- getPublicLogsByTagsFromContract(contractAddress, tags) {
1467
- return this.store.getPublicLogsByTagsFromContract(contractAddress, tags);
1468
- }
1469
- /**
1470
- * Gets public logs based on the provided filter.
1471
- * @param filter - The filter to apply to the logs.
1472
- * @returns The requested logs.
1473
- */ getPublicLogs(filter) {
1474
- return this.store.getPublicLogs(filter);
1475
- }
1476
- /**
1477
- * Gets contract class logs based on the provided filter.
1478
- * @param filter - The filter to apply to the logs.
1479
- * @returns The requested logs.
1480
- */ getContractClassLogs(filter) {
1481
- return this.store.getContractClassLogs(filter);
1482
- }
1483
- /**
1484
- * Gets the number of the latest L2 block processed by the block source implementation.
1485
- * This includes both checkpointed and uncheckpointed blocks.
1486
- * @returns The number of the latest L2 block processed by the block source implementation.
1487
- */ getBlockNumber() {
1488
- return this.store.getLatestBlockNumber();
1489
- }
1490
- getContractClass(id) {
1491
- return this.store.getContractClass(id);
1492
- }
1493
- getBytecodeCommitment(id) {
1494
- return this.store.getBytecodeCommitment(id);
1495
- }
1496
- async getContract(address, maybeTimestamp) {
1497
- let timestamp;
1498
- if (maybeTimestamp === undefined) {
1499
- const latestBlockHeader = await this.getBlockHeader('latest');
1500
- // If we get undefined block header, it means that the archiver has not yet synced any block so we default to 0.
1501
- timestamp = latestBlockHeader ? latestBlockHeader.globalVariables.timestamp : 0n;
1502
- } else {
1503
- timestamp = maybeTimestamp;
1504
- }
1505
- return this.store.getContractInstance(address, timestamp);
1506
- }
1507
- /**
1508
- * Gets L1 to L2 message (to be) included in a given checkpoint.
1509
- * @param checkpointNumber - Checkpoint number to get messages for.
1510
- * @returns The L1 to L2 messages/leaves of the messages subtree (throws if not found).
1511
- */ getL1ToL2Messages(checkpointNumber) {
1512
- return this.store.getL1ToL2Messages(checkpointNumber);
1513
- }
1514
- /**
1515
- * Gets the L1 to L2 message index in the L1 to L2 message tree.
1516
- * @param l1ToL2Message - The L1 to L2 message.
1517
- * @returns The index of the L1 to L2 message in the L1 to L2 message tree (undefined if not found).
1518
- */ getL1ToL2MessageIndex(l1ToL2Message) {
1519
- return this.store.getL1ToL2MessageIndex(l1ToL2Message);
1520
- }
1521
- getContractClassIds() {
1522
- return this.store.getContractClassIds();
1523
- }
1524
- registerContractFunctionSignatures(signatures) {
1525
- return this.store.registerContractFunctionSignatures(signatures);
1526
- }
1527
- getDebugFunctionName(address, selector) {
1528
- return this.store.getDebugFunctionName(address, selector);
1529
- }
1530
- async getPendingChainValidationStatus() {
1531
- return await this.store.getPendingChainValidationStatus() ?? {
1532
- valid: true
1533
- };
1534
- }
1535
- isPendingChainInvalid() {
1536
- return this.getPendingChainValidationStatus().then((status)=>!status.valid);
1537
- }
1538
- async getL2Tips() {
1539
- const [latestBlockNumber, provenBlockNumber, checkpointedBlockNumber] = await Promise.all([
1540
- this.getBlockNumber(),
1541
- this.getProvenBlockNumber(),
1542
- this.getCheckpointedBlockNumber()
1543
- ]);
1544
- // TODO(#13569): Compute proper finalized block number based on L1 finalized block.
1545
- // We just force it 2 epochs worth of proven data for now.
1546
- // NOTE: update end-to-end/src/e2e_epochs/epochs_empty_blocks.test.ts as that uses finalized blocks in computations
1547
- const finalizedBlockNumber = BlockNumber(Math.max(provenBlockNumber - this.l1constants.epochDuration * 2, 0));
1548
- const beforeInitialblockNumber = BlockNumber(INITIAL_L2_BLOCK_NUM - 1);
1549
- // Get the latest block header and checkpointed blocks for proven, finalised and checkpointed blocks
1550
- const [latestBlockHeader, provenCheckpointedBlock, finalizedCheckpointedBlock, checkpointedBlock] = await Promise.all([
1551
- latestBlockNumber > beforeInitialblockNumber ? this.getBlockHeader(latestBlockNumber) : undefined,
1552
- provenBlockNumber > beforeInitialblockNumber ? this.getCheckpointedBlock(provenBlockNumber) : undefined,
1553
- finalizedBlockNumber > beforeInitialblockNumber ? this.getCheckpointedBlock(finalizedBlockNumber) : undefined,
1554
- checkpointedBlockNumber > beforeInitialblockNumber ? this.getCheckpointedBlock(checkpointedBlockNumber) : undefined
1555
- ]);
1556
- if (latestBlockNumber > beforeInitialblockNumber && !latestBlockHeader) {
1557
- throw new Error(`Failed to retrieve latest block header for block ${latestBlockNumber}`);
1558
- }
1559
- // Checkpointed blocks must exist for proven, finalized and checkpointed tips if they are beyond the initial block number.
1560
- if (checkpointedBlockNumber > beforeInitialblockNumber && !checkpointedBlock?.block.header) {
1561
- throw new Error(`Failed to retrieve checkpointed block header for block ${checkpointedBlockNumber} (latest block is ${latestBlockNumber})`);
1562
- }
1563
- if (provenBlockNumber > beforeInitialblockNumber && !provenCheckpointedBlock?.block.header) {
1564
- throw new Error(`Failed to retrieve proven checkpointed for block ${provenBlockNumber} (latest block is ${latestBlockNumber})`);
1565
- }
1566
- if (finalizedBlockNumber > beforeInitialblockNumber && !finalizedCheckpointedBlock?.block.header) {
1567
- throw new Error(`Failed to retrieve finalized block header for block ${finalizedBlockNumber} (latest block is ${latestBlockNumber})`);
1568
- }
1569
- const latestBlockHeaderHash = await latestBlockHeader?.hash() ?? GENESIS_BLOCK_HEADER_HASH;
1570
- const provenBlockHeaderHash = await provenCheckpointedBlock?.block.header?.hash() ?? GENESIS_BLOCK_HEADER_HASH;
1571
- const finalizedBlockHeaderHash = await finalizedCheckpointedBlock?.block.header?.hash() ?? GENESIS_BLOCK_HEADER_HASH;
1572
- const checkpointedBlockHeaderHash = await checkpointedBlock?.block.header?.hash() ?? GENESIS_BLOCK_HEADER_HASH;
1573
- // Now attempt to retrieve checkpoints for proven, finalised and checkpointed blocks
1574
- const [[provenBlockCheckpoint], [finalizedBlockCheckpoint], [checkpointedBlockCheckpoint]] = await Promise.all([
1575
- provenCheckpointedBlock !== undefined ? await this.getPublishedCheckpoints(provenCheckpointedBlock?.checkpointNumber, 1) : [
1576
- undefined
1577
- ],
1578
- finalizedCheckpointedBlock !== undefined ? await this.getPublishedCheckpoints(finalizedCheckpointedBlock?.checkpointNumber, 1) : [
1579
- undefined
1580
- ],
1581
- checkpointedBlock !== undefined ? await this.getPublishedCheckpoints(checkpointedBlock?.checkpointNumber, 1) : [
1582
- undefined
1583
- ]
1584
- ]);
1585
- const initialcheckpointId = {
1586
- number: CheckpointNumber.ZERO,
1587
- hash: GENESIS_CHECKPOINT_HEADER_HASH.toString()
1588
- };
1589
- const makeCheckpointId = (checkpoint)=>{
1590
- if (checkpoint === undefined) {
1591
- return initialcheckpointId;
1592
- }
1593
- return {
1594
- number: checkpoint.checkpoint.number,
1595
- hash: checkpoint.checkpoint.hash().toString()
1596
- };
1597
- };
1598
- const l2Tips = {
1599
- proposed: {
1600
- number: latestBlockNumber,
1601
- hash: latestBlockHeaderHash.toString()
1602
- },
1603
- proven: {
1604
- block: {
1605
- number: provenBlockNumber,
1606
- hash: provenBlockHeaderHash.toString()
1607
- },
1608
- checkpoint: makeCheckpointId(provenBlockCheckpoint)
1609
- },
1610
- finalized: {
1611
- block: {
1612
- number: finalizedBlockNumber,
1613
- hash: finalizedBlockHeaderHash.toString()
1614
- },
1615
- checkpoint: makeCheckpointId(finalizedBlockCheckpoint)
1616
- },
1617
- checkpointed: {
1618
- block: {
1619
- number: checkpointedBlockNumber,
1620
- hash: checkpointedBlockHeaderHash.toString()
1621
- },
1622
- checkpoint: makeCheckpointId(checkpointedBlockCheckpoint)
1623
- }
1624
- };
1625
- return l2Tips;
1626
- }
1627
- async rollbackTo(targetL2BlockNumber) {
1628
- // TODO(pw/mbps): This still assumes 1 block per checkpoint
1629
- const currentBlocks = await this.getL2Tips();
1630
- const currentL2Block = currentBlocks.proposed.number;
1631
- const currentProvenBlock = currentBlocks.proven.block.number;
1632
- if (targetL2BlockNumber >= currentL2Block) {
1633
- throw new Error(`Target L2 block ${targetL2BlockNumber} must be less than current L2 block ${currentL2Block}`);
1634
- }
1635
- const blocksToUnwind = currentL2Block - targetL2BlockNumber;
1636
- const targetL2Block = await this.store.getCheckpointedBlock(targetL2BlockNumber);
1637
- if (!targetL2Block) {
1638
- throw new Error(`Target L2 block ${targetL2BlockNumber} not found`);
1639
- }
1640
- const targetL1BlockNumber = targetL2Block.l1.blockNumber;
1641
- const targetCheckpointNumber = CheckpointNumber.fromBlockNumber(targetL2BlockNumber);
1642
- const targetL1BlockHash = await this.getL1BlockHash(targetL1BlockNumber);
1643
- this.log.info(`Unwinding ${blocksToUnwind} checkpoints from L2 block ${currentL2Block}`);
1644
- await this.store.unwindCheckpoints(CheckpointNumber(currentL2Block), blocksToUnwind);
1645
- this.log.info(`Unwinding L1 to L2 messages to checkpoint ${targetCheckpointNumber}`);
1646
- await this.store.rollbackL1ToL2MessagesToCheckpoint(targetCheckpointNumber);
1647
- this.log.info(`Setting L1 syncpoints to ${targetL1BlockNumber}`);
1648
- await this.store.setBlockSynchedL1BlockNumber(targetL1BlockNumber);
1649
- await this.store.setMessageSynchedL1Block({
1650
- l1BlockNumber: targetL1BlockNumber,
1651
- l1BlockHash: targetL1BlockHash
1652
- });
1653
- if (targetL2BlockNumber < currentProvenBlock) {
1654
- this.log.info(`Clearing proven L2 block number`);
1655
- await this.store.setProvenCheckpointNumber(CheckpointNumber.ZERO);
1656
- }
1657
- // TODO(palla/reorg): Set the finalized block when we add support for it.
1658
- // if (targetL2BlockNumber < currentFinalizedBlock) {
1659
- // this.log.info(`Clearing finalized L2 block number`);
1660
- // await this.store.setFinalizedL2BlockNumber(0);
1661
- // }
1662
- }
1663
- async getPublishedCheckpoints(checkpointNumber, limit) {
1664
- const checkpoints = await this.store.getRangeOfCheckpoints(checkpointNumber, limit);
1665
- const blocks = (await Promise.all(checkpoints.map((ch)=>this.store.getBlocksForCheckpoint(ch.checkpointNumber)))).filter(isDefined);
1666
- const fullCheckpoints = [];
1667
- for(let i = 0; i < checkpoints.length; i++){
1668
- const blocksForCheckpoint = blocks[i];
1669
- const checkpoint = checkpoints[i];
1670
- const fullCheckpoint = new Checkpoint(checkpoint.archive, checkpoint.header, blocksForCheckpoint, checkpoint.checkpointNumber);
1671
- const publishedCheckpoint = new PublishedCheckpoint(fullCheckpoint, checkpoint.l1, checkpoint.attestations.map((x)=>CommitteeAttestation.fromBuffer(x)));
1672
- fullCheckpoints.push(publishedCheckpoint);
1673
- }
1674
- return fullCheckpoints;
1675
- }
1676
- async getCheckpointsForEpoch(epochNumber) {
1677
- const [start, end] = getSlotRangeForEpoch(epochNumber, this.l1constants);
1678
- const checkpoints = [];
1679
- // Walk the list of checkpoints backwards and filter by slots matching the requested epoch.
1680
- // We'll typically ask for checkpoints for a very recent epoch, so we shouldn't need an index here.
1681
- let checkpointData = await this.store.getCheckpointData(await this.store.getSynchedCheckpointNumber());
1682
- const slot = (b)=>b.header.slotNumber;
1683
- while(checkpointData && slot(checkpointData) >= start){
1684
- if (slot(checkpointData) <= end) {
1685
- // push the checkpoints on backwards
1686
- const [checkpoint] = await this.getPublishedCheckpoints(checkpointData.checkpointNumber, 1);
1687
- checkpoints.push(checkpoint.checkpoint);
1688
- }
1689
- checkpointData = await this.store.getCheckpointData(CheckpointNumber(checkpointData.checkpointNumber - 1));
1690
- }
1691
- return checkpoints.reverse();
1692
- }
1693
- /* Legacy APIs */ async getPublishedBlockByHash(blockHash) {
1694
- const checkpointedBlock = await this.store.getCheckpointedBlockByHash(blockHash);
1695
- return this.buildOldBlockFromCheckpointedBlock(checkpointedBlock);
1696
- }
1697
- async getPublishedBlockByArchive(archive) {
1698
- const checkpointedBlock = await this.store.getCheckpointedBlockByArchive(archive);
1699
- return this.buildOldBlockFromCheckpointedBlock(checkpointedBlock);
1700
- }
1701
- /**
1702
- * Gets up to `limit` amount of L2 blocks starting from `from`.
1703
- * @param from - Number of the first block to return (inclusive).
1704
- * @param limit - The number of blocks to return.
1705
- * @param proven - If true, only return blocks that have been proven.
1706
- * @returns The requested L2 blocks.
1707
- */ async getBlocks(from, limit, proven) {
1708
- const publishedBlocks = await this.getPublishedBlocks(from, limit, proven);
1709
- return publishedBlocks.map((x)=>x.block);
1710
- }
1711
- async getPublishedBlocks(from, limit, proven) {
1712
- const checkpoints = await this.store.getRangeOfCheckpoints(CheckpointNumber(from), limit);
1713
- const provenCheckpointNumber = await this.getProvenCheckpointNumber();
1714
- const blocks = (await Promise.all(checkpoints.map((ch)=>this.store.getBlocksForCheckpoint(ch.checkpointNumber)))).filter(isDefined);
1715
- const olbBlocks = [];
1716
- for(let i = 0; i < checkpoints.length; i++){
1717
- const blockForCheckpoint = blocks[i][0];
1718
- const checkpoint = checkpoints[i];
1719
- if (checkpoint.checkpointNumber > provenCheckpointNumber && proven === true) {
1720
- continue;
1721
- }
1722
- const oldCheckpoint = new Checkpoint(blockForCheckpoint.archive, checkpoint.header, [
1723
- blockForCheckpoint
1724
- ], checkpoint.checkpointNumber);
1725
- const oldBlock = L2Block.fromCheckpoint(oldCheckpoint);
1726
- const publishedBlock = new PublishedL2Block(oldBlock, checkpoint.l1, checkpoint.attestations.map((x)=>CommitteeAttestation.fromBuffer(x)));
1727
- olbBlocks.push(publishedBlock);
1728
- }
1729
- return olbBlocks;
1730
- }
1731
- async buildOldBlockFromCheckpointedBlock(checkpointedBlock) {
1732
- if (!checkpointedBlock) {
1733
- return undefined;
1734
- }
1735
- const checkpoint = await this.store.getCheckpointData(checkpointedBlock.checkpointNumber);
1736
- if (!checkpoint) {
1737
- return checkpoint;
1738
- }
1739
- const fullCheckpoint = new Checkpoint(checkpointedBlock?.block.archive, checkpoint?.header, [
1740
- checkpointedBlock.block
1741
- ], checkpoint.checkpointNumber);
1742
- const oldBlock = L2Block.fromCheckpoint(fullCheckpoint);
1743
- const published = new PublishedL2Block(oldBlock, checkpoint.l1, checkpoint.attestations.map((x)=>CommitteeAttestation.fromBuffer(x)));
1744
- return published;
1745
- }
1746
- async getBlock(number) {
1747
- // If the number provided is -ve, then return the latest block.
1748
- if (number < 0) {
1749
- number = await this.store.getSynchedL2BlockNumber();
1750
- }
1751
- if (number === 0) {
1752
- return undefined;
1753
- }
1754
- const publishedBlocks = await this.getPublishedBlocks(number, 1);
1755
- if (publishedBlocks.length === 0) {
1756
- return undefined;
1757
- }
1758
- return publishedBlocks[0].block;
1759
- }
1760
- }
1761
- var Operation = /*#__PURE__*/ function(Operation) {
1762
- Operation[Operation["Store"] = 0] = "Store";
1763
- Operation[Operation["Delete"] = 1] = "Delete";
1764
- return Operation;
1765
- }(Operation || {});
1766
- /**
1767
- * A helper class that we use to deal with some of the logic needed when adding blocks.
1768
- *
1769
- * I would have preferred to not have this type. But it is useful for handling the logic that any
1770
- * store would need to include otherwise while exposing fewer functions and logic directly to the archiver.
1771
- */ export class ArchiverStoreHelper {
1772
- store;
1773
- #log;
1774
- constructor(store){
1775
- this.store = store;
1776
- this.#log = createLogger('archiver:block-helper');
1777
- }
1778
- /**
1779
- * Extracts and stores contract classes out of ContractClassPublished events emitted by the class registry contract.
1780
- * @param allLogs - All logs emitted in a bunch of blocks.
1781
- */ async #updatePublishedContractClasses(allLogs, blockNum, operation) {
1782
- const contractClassPublishedEvents = allLogs.filter((log)=>ContractClassPublishedEvent.isContractClassPublishedEvent(log)).map((log)=>ContractClassPublishedEvent.fromLog(log));
1783
- const contractClasses = await Promise.all(contractClassPublishedEvents.map((e)=>e.toContractClassPublic()));
1784
- if (contractClasses.length > 0) {
1785
- contractClasses.forEach((c)=>this.#log.verbose(`${Operation[operation]} contract class ${c.id.toString()}`));
1786
- if (operation == 0) {
1787
- // TODO: Will probably want to create some worker threads to compute these bytecode commitments as they are expensive
1788
- const commitments = await Promise.all(contractClasses.map((c)=>computePublicBytecodeCommitment(c.packedBytecode)));
1789
- return await this.store.addContractClasses(contractClasses, commitments, blockNum);
1790
- } else if (operation == 1) {
1791
- return await this.store.deleteContractClasses(contractClasses, blockNum);
1792
- }
1793
- }
1794
- return true;
1795
- }
1796
- /**
1797
- * Extracts and stores contract instances out of ContractInstancePublished events emitted by the canonical deployer contract.
1798
- * @param allLogs - All logs emitted in a bunch of blocks.
1799
- */ async #updateDeployedContractInstances(allLogs, blockNum, operation) {
1800
- const contractInstances = allLogs.filter((log)=>ContractInstancePublishedEvent.isContractInstancePublishedEvent(log)).map((log)=>ContractInstancePublishedEvent.fromLog(log)).map((e)=>e.toContractInstance());
1801
- if (contractInstances.length > 0) {
1802
- contractInstances.forEach((c)=>this.#log.verbose(`${Operation[operation]} contract instance at ${c.address.toString()}`));
1803
- if (operation == 0) {
1804
- return await this.store.addContractInstances(contractInstances, blockNum);
1805
- } else if (operation == 1) {
1806
- return await this.store.deleteContractInstances(contractInstances, blockNum);
1807
- }
1808
- }
1809
- return true;
1810
- }
1811
- /**
1812
- * Extracts and stores contract instances out of ContractInstancePublished events emitted by the canonical deployer contract.
1813
- * @param allLogs - All logs emitted in a bunch of blocks.
1814
- * @param timestamp - Timestamp at which the updates were scheduled.
1815
- * @param operation - The operation to perform on the contract instance updates (Store or Delete).
1816
- */ async #updateUpdatedContractInstances(allLogs, timestamp, operation) {
1817
- const contractUpdates = allLogs.filter((log)=>ContractInstanceUpdatedEvent.isContractInstanceUpdatedEvent(log)).map((log)=>ContractInstanceUpdatedEvent.fromLog(log)).map((e)=>e.toContractInstanceUpdate());
1818
- if (contractUpdates.length > 0) {
1819
- contractUpdates.forEach((c)=>this.#log.verbose(`${Operation[operation]} contract instance update at ${c.address.toString()}`));
1820
- if (operation == 0) {
1821
- return await this.store.addContractInstanceUpdates(contractUpdates, timestamp);
1822
- } else if (operation == 1) {
1823
- return await this.store.deleteContractInstanceUpdates(contractUpdates, timestamp);
1824
- }
1825
- }
1826
- return true;
1827
- }
1828
- /**
1829
- * Stores the functions that were broadcasted individually
1830
- *
1831
- * @dev Beware that there is not a delete variant of this, since they are added to contract classes
1832
- * and will be deleted as part of the class if needed.
1833
- *
1834
- * @param allLogs - The logs from the block
1835
- * @param _blockNum - The block number
1836
- * @returns
1837
- */ async #storeBroadcastedIndividualFunctions(allLogs, _blockNum) {
1838
- // Filter out private and utility function broadcast events
1839
- const privateFnEvents = allLogs.filter((log)=>PrivateFunctionBroadcastedEvent.isPrivateFunctionBroadcastedEvent(log)).map((log)=>PrivateFunctionBroadcastedEvent.fromLog(log));
1840
- const utilityFnEvents = allLogs.filter((log)=>UtilityFunctionBroadcastedEvent.isUtilityFunctionBroadcastedEvent(log)).map((log)=>UtilityFunctionBroadcastedEvent.fromLog(log));
1841
- // Group all events by contract class id
1842
- for (const [classIdString, classEvents] of Object.entries(groupBy([
1843
- ...privateFnEvents,
1844
- ...utilityFnEvents
1845
- ], (e)=>e.contractClassId.toString()))){
1846
- const contractClassId = Fr.fromHexString(classIdString);
1847
- const contractClass = await this.getContractClass(contractClassId);
1848
- if (!contractClass) {
1849
- this.#log.warn(`Skipping broadcasted functions as contract class ${contractClassId.toString()} was not found`);
1850
- continue;
1851
- }
1852
- // Split private and utility functions, and filter out invalid ones
1853
- const allFns = classEvents.map((e)=>e.toFunctionWithMembershipProof());
1854
- const privateFns = allFns.filter((fn)=>'utilityFunctionsTreeRoot' in fn);
1855
- const utilityFns = allFns.filter((fn)=>'privateFunctionsArtifactTreeRoot' in fn);
1856
- const privateFunctionsWithValidity = await Promise.all(privateFns.map(async (fn)=>({
1857
- fn,
1858
- valid: await isValidPrivateFunctionMembershipProof(fn, contractClass)
1859
- })));
1860
- const validPrivateFns = privateFunctionsWithValidity.filter(({ valid })=>valid).map(({ fn })=>fn);
1861
- const utilityFunctionsWithValidity = await Promise.all(utilityFns.map(async (fn)=>({
1862
- fn,
1863
- valid: await isValidUtilityFunctionMembershipProof(fn, contractClass)
1864
- })));
1865
- const validUtilityFns = utilityFunctionsWithValidity.filter(({ valid })=>valid).map(({ fn })=>fn);
1866
- const validFnCount = validPrivateFns.length + validUtilityFns.length;
1867
- if (validFnCount !== allFns.length) {
1868
- this.#log.warn(`Skipping ${allFns.length - validFnCount} invalid functions`);
1869
- }
1870
- // Store the functions in the contract class in a single operation
1871
- if (validFnCount > 0) {
1872
- this.#log.verbose(`Storing ${validFnCount} functions for contract class ${contractClassId.toString()}`);
1873
- }
1874
- return await this.store.addFunctions(contractClassId, validPrivateFns, validUtilityFns);
1875
- }
1876
- return true;
1877
- }
1878
- async addBlockDataToDB(block) {
1879
- const contractClassLogs = block.body.txEffects.flatMap((txEffect)=>txEffect.contractClassLogs);
1880
- // ContractInstancePublished event logs are broadcast in privateLogs.
1881
- const privateLogs = block.body.txEffects.flatMap((txEffect)=>txEffect.privateLogs);
1882
- const publicLogs = block.body.txEffects.flatMap((txEffect)=>txEffect.publicLogs);
1883
- return (await Promise.all([
1884
- this.#updatePublishedContractClasses(contractClassLogs, block.number, 0),
1885
- this.#updateDeployedContractInstances(privateLogs, block.number, 0),
1886
- this.#updateUpdatedContractInstances(publicLogs, block.header.globalVariables.timestamp, 0),
1887
- this.#storeBroadcastedIndividualFunctions(contractClassLogs, block.number)
1888
- ])).every(Boolean);
1889
- }
1890
- addBlocks(blocks, pendingChainValidationStatus) {
1891
- // Add the blocks to the store. Store will throw if the blocks are not in order, there are gaps,
1892
- // or if the previous block is not in the store.
1893
- return this.store.transactionAsync(async ()=>{
1894
- await this.store.addBlocks(blocks);
1895
- const opResults = await Promise.all([
1896
- // Update the pending chain validation status if provided
1897
- pendingChainValidationStatus && this.store.setPendingChainValidationStatus(pendingChainValidationStatus),
1898
- // Add any logs emitted during the retrieved blocks
1899
- this.store.addLogs(blocks),
1900
- // Unroll all logs emitted during the retrieved blocks and extract any contract classes and instances from them
1901
- ...blocks.map((block)=>{
1902
- return this.addBlockDataToDB(block);
1903
- })
1904
- ]);
1905
- return opResults.every(Boolean);
1906
- });
1907
- }
1908
- addCheckpoints(checkpoints, pendingChainValidationStatus) {
1909
- // Add the blocks to the store. Store will throw if the blocks are not in order, there are gaps,
1910
- // or if the previous block is not in the store.
1911
- return this.store.transactionAsync(async ()=>{
1912
- await this.store.addCheckpoints(checkpoints);
1913
- const allBlocks = checkpoints.flatMap((ch)=>ch.checkpoint.blocks);
1914
- const opResults = await Promise.all([
1915
- // Update the pending chain validation status if provided
1916
- pendingChainValidationStatus && this.store.setPendingChainValidationStatus(pendingChainValidationStatus),
1917
- // Add any logs emitted during the retrieved blocks
1918
- this.store.addLogs(allBlocks),
1919
- // Unroll all logs emitted during the retrieved blocks and extract any contract classes and instances from them
1920
- ...allBlocks.map((block)=>{
1921
- return this.addBlockDataToDB(block);
1922
- })
1923
- ]);
1924
- return opResults.every(Boolean);
1925
- });
1926
- }
1927
- async unwindCheckpoints(from, checkpointsToUnwind) {
1928
- if (checkpointsToUnwind <= 0) {
1929
- throw new Error(`Cannot unwind ${checkpointsToUnwind} blocks`);
1930
- }
1931
- const last = await this.getSynchedCheckpointNumber();
1932
- if (from != last) {
1933
- throw new Error(`Cannot unwind checkpoints from checkpoint ${from} when the last checkpoint is ${last}`);
1934
- }
1935
- const blocks = [];
1936
- const lastCheckpointNumber = from + checkpointsToUnwind - 1;
1937
- for(let checkpointNumber = from; checkpointNumber <= lastCheckpointNumber; checkpointNumber++){
1938
- const blocksForCheckpoint = await this.store.getBlocksForCheckpoint(checkpointNumber);
1939
- if (!blocksForCheckpoint) {
1940
- continue;
1941
- }
1942
- blocks.push(...blocksForCheckpoint);
1943
- }
1944
- const opResults = await Promise.all([
1945
- // Prune rolls back to the last proven block, which is by definition valid
1946
- this.store.setPendingChainValidationStatus({
1947
- valid: true
1948
- }),
1949
- // Unroll all logs emitted during the retrieved blocks and extract any contract classes and instances from them
1950
- ...blocks.map(async (block)=>{
1951
- const contractClassLogs = block.body.txEffects.flatMap((txEffect)=>txEffect.contractClassLogs);
1952
- // ContractInstancePublished event logs are broadcast in privateLogs.
1953
- const privateLogs = block.body.txEffects.flatMap((txEffect)=>txEffect.privateLogs);
1954
- const publicLogs = block.body.txEffects.flatMap((txEffect)=>txEffect.publicLogs);
1955
- return (await Promise.all([
1956
- this.#updatePublishedContractClasses(contractClassLogs, block.number, 1),
1957
- this.#updateDeployedContractInstances(privateLogs, block.number, 1),
1958
- this.#updateUpdatedContractInstances(publicLogs, block.header.globalVariables.timestamp, 1)
1959
- ])).every(Boolean);
1960
- }),
1961
- this.store.deleteLogs(blocks),
1962
- this.store.unwindCheckpoints(from, checkpointsToUnwind)
1963
- ]);
1964
- return opResults.every(Boolean);
1965
- }
1966
- getCheckpointData(checkpointNumber) {
1967
- return this.store.getCheckpointData(checkpointNumber);
1968
- }
1969
- getRangeOfCheckpoints(from, limit) {
1970
- return this.store.getRangeOfCheckpoints(from, limit);
1971
- }
1972
- getCheckpointedL2BlockNumber() {
1973
- return this.store.getCheckpointedL2BlockNumber();
1974
- }
1975
- getSynchedCheckpointNumber() {
1976
- return this.store.getSynchedCheckpointNumber();
1977
- }
1978
- setCheckpointSynchedL1BlockNumber(l1BlockNumber) {
1979
- return this.store.setCheckpointSynchedL1BlockNumber(l1BlockNumber);
1980
- }
1981
- getCheckpointedBlock(number) {
1982
- return this.store.getCheckpointedBlock(number);
1983
- }
1984
- getCheckpointedBlockByHash(blockHash) {
1985
- return this.store.getCheckpointedBlockByHash(blockHash);
1986
- }
1987
- getCheckpointedBlockByArchive(archive) {
1988
- return this.store.getCheckpointedBlockByArchive(archive);
1989
- }
1990
- getBlockHeaders(from, limit) {
1991
- return this.store.getBlockHeaders(from, limit);
1992
- }
1993
- getBlockHeaderByHash(blockHash) {
1994
- return this.store.getBlockHeaderByHash(blockHash);
1995
- }
1996
- getBlockHeaderByArchive(archive) {
1997
- return this.store.getBlockHeaderByArchive(archive);
1998
- }
1999
- getBlockByHash(blockHash) {
2000
- return this.store.getBlockByHash(blockHash);
2001
- }
2002
- getBlockByArchive(archive) {
2003
- return this.store.getBlockByArchive(archive);
2004
- }
2005
- getLatestBlockNumber() {
2006
- return this.store.getLatestBlockNumber();
2007
- }
2008
- getBlocksForCheckpoint(checkpointNumber) {
2009
- return this.store.getBlocksForCheckpoint(checkpointNumber);
2010
- }
2011
- getTxEffect(txHash) {
2012
- return this.store.getTxEffect(txHash);
2013
- }
2014
- getSettledTxReceipt(txHash) {
2015
- return this.store.getSettledTxReceipt(txHash);
2016
- }
2017
- addL1ToL2Messages(messages) {
2018
- return this.store.addL1ToL2Messages(messages);
2019
- }
2020
- getL1ToL2Messages(checkpointNumber) {
2021
- return this.store.getL1ToL2Messages(checkpointNumber);
2022
- }
2023
- getL1ToL2MessageIndex(l1ToL2Message) {
2024
- return this.store.getL1ToL2MessageIndex(l1ToL2Message);
2025
- }
2026
- getPrivateLogsByTags(tags) {
2027
- return this.store.getPrivateLogsByTags(tags);
2028
- }
2029
- getPublicLogsByTagsFromContract(contractAddress, tags) {
2030
- return this.store.getPublicLogsByTagsFromContract(contractAddress, tags);
2031
- }
2032
- getPublicLogs(filter) {
2033
- return this.store.getPublicLogs(filter);
2034
- }
2035
- getContractClassLogs(filter) {
2036
- return this.store.getContractClassLogs(filter);
2037
- }
2038
- getSynchedL2BlockNumber() {
2039
- return this.store.getLatestBlockNumber();
2040
- }
2041
- getProvenCheckpointNumber() {
2042
- return this.store.getProvenCheckpointNumber();
2043
- }
2044
- getProvenBlockNumber() {
2045
- return this.store.getProvenBlockNumber();
2046
- }
2047
- setProvenCheckpointNumber(checkpointNumber) {
2048
- return this.store.setProvenCheckpointNumber(checkpointNumber);
2049
- }
2050
- setBlockSynchedL1BlockNumber(l1BlockNumber) {
2051
- return this.store.setCheckpointSynchedL1BlockNumber(l1BlockNumber);
2052
- }
2053
- setMessageSynchedL1Block(l1Block) {
2054
- return this.store.setMessageSynchedL1Block(l1Block);
2055
- }
2056
- getSynchPoint() {
2057
- return this.store.getSynchPoint();
2058
- }
2059
- getContractClass(id) {
2060
- return this.store.getContractClass(id);
2061
- }
2062
- getBytecodeCommitment(contractClassId) {
2063
- return this.store.getBytecodeCommitment(contractClassId);
2064
- }
2065
- getContractInstance(address, timestamp) {
2066
- return this.store.getContractInstance(address, timestamp);
2067
- }
2068
- getContractClassIds() {
2069
- return this.store.getContractClassIds();
2070
- }
2071
- registerContractFunctionSignatures(signatures) {
2072
- return this.store.registerContractFunctionSignatures(signatures);
2073
- }
2074
- getDebugFunctionName(address, selector) {
2075
- return this.store.getDebugFunctionName(address, selector);
2076
- }
2077
- getTotalL1ToL2MessageCount() {
2078
- return this.store.getTotalL1ToL2MessageCount();
2079
- }
2080
- estimateSize() {
2081
- return this.store.estimateSize();
2082
- }
2083
- rollbackL1ToL2MessagesToCheckpoint(targetCheckpointNumber) {
2084
- return this.store.rollbackL1ToL2MessagesToCheckpoint(targetCheckpointNumber);
2085
- }
2086
- iterateL1ToL2Messages(range = {}) {
2087
- return this.store.iterateL1ToL2Messages(range);
2088
- }
2089
- removeL1ToL2Messages(startIndex) {
2090
- return this.store.removeL1ToL2Messages(startIndex);
2091
- }
2092
- getLastL1ToL2Message() {
2093
- return this.store.getLastL1ToL2Message();
2094
- }
2095
- getPendingChainValidationStatus() {
2096
- return this.store.getPendingChainValidationStatus();
2097
- }
2098
- setPendingChainValidationStatus(status) {
2099
- this.#log.debug(`Setting pending chain validation status to valid ${status?.valid}`, status);
2100
- return this.store.setPendingChainValidationStatus(status);
2101
- }
2102
- }