@aztec/archiver 0.0.1-commit.fcb71a6 → 0.0.1-commit.fffb133c

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (192) hide show
  1. package/README.md +156 -22
  2. package/dest/archiver.d.ts +135 -0
  3. package/dest/archiver.d.ts.map +1 -0
  4. package/dest/archiver.js +768 -0
  5. package/dest/{archiver/config.d.ts → config.d.ts} +9 -1
  6. package/dest/config.d.ts.map +1 -0
  7. package/dest/{archiver/config.js → config.js} +11 -2
  8. package/dest/errors.d.ts +41 -0
  9. package/dest/errors.d.ts.map +1 -0
  10. package/dest/{archiver/errors.js → errors.js} +8 -0
  11. package/dest/factory.d.ts +7 -7
  12. package/dest/factory.d.ts.map +1 -1
  13. package/dest/factory.js +87 -8
  14. package/dest/index.d.ts +10 -4
  15. package/dest/index.d.ts.map +1 -1
  16. package/dest/index.js +8 -3
  17. package/dest/interfaces.d.ts +9 -0
  18. package/dest/interfaces.d.ts.map +1 -0
  19. package/dest/interfaces.js +3 -0
  20. package/dest/{archiver/l1 → l1}/bin/retrieve-calldata.d.ts +1 -1
  21. package/dest/l1/bin/retrieve-calldata.d.ts.map +1 -0
  22. package/dest/{archiver/l1 → l1}/bin/retrieve-calldata.js +2 -2
  23. package/dest/{archiver/l1 → l1}/calldata_retriever.d.ts +3 -3
  24. package/dest/l1/calldata_retriever.d.ts.map +1 -0
  25. package/dest/{archiver/l1 → l1}/calldata_retriever.js +2 -2
  26. package/dest/l1/data_retrieval.d.ts +88 -0
  27. package/dest/l1/data_retrieval.d.ts.map +1 -0
  28. package/dest/{archiver/l1 → l1}/data_retrieval.js +35 -54
  29. package/dest/{archiver/l1 → l1}/debug_tx.d.ts +1 -1
  30. package/dest/l1/debug_tx.d.ts.map +1 -0
  31. package/dest/{archiver/l1 → l1}/spire_proposer.d.ts +1 -1
  32. package/dest/l1/spire_proposer.d.ts.map +1 -0
  33. package/dest/{archiver/l1 → l1}/trace_tx.d.ts +1 -1
  34. package/dest/l1/trace_tx.d.ts.map +1 -0
  35. package/dest/l1/types.d.ts +12 -0
  36. package/dest/l1/types.d.ts.map +1 -0
  37. package/dest/{archiver/l1 → l1}/validate_trace.d.ts +1 -1
  38. package/dest/l1/validate_trace.d.ts.map +1 -0
  39. package/dest/{archiver/l1 → l1}/validate_trace.js +1 -1
  40. package/dest/modules/data_source_base.d.ts +84 -0
  41. package/dest/modules/data_source_base.d.ts.map +1 -0
  42. package/dest/modules/data_source_base.js +260 -0
  43. package/dest/modules/data_store_updater.d.ts +73 -0
  44. package/dest/modules/data_store_updater.d.ts.map +1 -0
  45. package/dest/modules/data_store_updater.js +302 -0
  46. package/dest/modules/instrumentation.d.ts +37 -0
  47. package/dest/modules/instrumentation.d.ts.map +1 -0
  48. package/dest/{archiver → modules}/instrumentation.js +15 -63
  49. package/dest/modules/l1_synchronizer.d.ts +75 -0
  50. package/dest/modules/l1_synchronizer.d.ts.map +1 -0
  51. package/dest/modules/l1_synchronizer.js +1113 -0
  52. package/dest/modules/validation.d.ts +17 -0
  53. package/dest/modules/validation.d.ts.map +1 -0
  54. package/dest/{archiver → modules}/validation.js +7 -1
  55. package/dest/store/block_store.d.ts +192 -0
  56. package/dest/store/block_store.d.ts.map +1 -0
  57. package/dest/{archiver/kv_archiver_store → store}/block_store.js +162 -48
  58. package/dest/store/contract_class_store.d.ts +18 -0
  59. package/dest/store/contract_class_store.d.ts.map +1 -0
  60. package/dest/{archiver/kv_archiver_store → store}/contract_class_store.js +1 -1
  61. package/dest/store/contract_instance_store.d.ts +24 -0
  62. package/dest/store/contract_instance_store.d.ts.map +1 -0
  63. package/dest/{archiver/kv_archiver_store → store}/contract_instance_store.js +1 -1
  64. package/dest/store/kv_archiver_store.d.ts +340 -0
  65. package/dest/store/kv_archiver_store.d.ts.map +1 -0
  66. package/dest/store/kv_archiver_store.js +447 -0
  67. package/dest/store/log_store.d.ts +54 -0
  68. package/dest/store/log_store.d.ts.map +1 -0
  69. package/dest/{archiver/kv_archiver_store → store}/log_store.js +89 -54
  70. package/dest/{archiver/kv_archiver_store → store}/message_store.d.ts +1 -1
  71. package/dest/store/message_store.d.ts.map +1 -0
  72. package/dest/{archiver/structs → structs}/data_retrieval.d.ts +1 -1
  73. package/dest/structs/data_retrieval.d.ts.map +1 -0
  74. package/dest/structs/inbox_message.d.ts +15 -0
  75. package/dest/structs/inbox_message.d.ts.map +1 -0
  76. package/dest/{archiver/structs → structs}/published.d.ts +1 -1
  77. package/dest/structs/published.d.ts.map +1 -0
  78. package/dest/test/fake_l1_state.d.ts +190 -0
  79. package/dest/test/fake_l1_state.d.ts.map +1 -0
  80. package/dest/test/fake_l1_state.js +383 -0
  81. package/dest/test/index.d.ts +2 -1
  82. package/dest/test/index.d.ts.map +1 -1
  83. package/dest/test/index.js +1 -0
  84. package/dest/test/mock_archiver.d.ts +2 -2
  85. package/dest/test/mock_archiver.d.ts.map +1 -1
  86. package/dest/test/mock_archiver.js +1 -2
  87. package/dest/test/mock_l1_to_l2_message_source.d.ts +2 -2
  88. package/dest/test/mock_l1_to_l2_message_source.d.ts.map +1 -1
  89. package/dest/test/mock_l1_to_l2_message_source.js +12 -3
  90. package/dest/test/mock_l2_block_source.d.ts +22 -15
  91. package/dest/test/mock_l2_block_source.d.ts.map +1 -1
  92. package/dest/test/mock_l2_block_source.js +163 -57
  93. package/dest/test/mock_structs.d.ts +76 -2
  94. package/dest/test/mock_structs.d.ts.map +1 -1
  95. package/dest/test/mock_structs.js +133 -2
  96. package/package.json +15 -17
  97. package/src/archiver.ts +523 -0
  98. package/src/{archiver/config.ts → config.ts} +13 -2
  99. package/src/{archiver/errors.ts → errors.ts} +12 -0
  100. package/src/factory.ts +122 -8
  101. package/src/index.ts +10 -3
  102. package/src/interfaces.ts +9 -0
  103. package/src/{archiver/l1 → l1}/bin/retrieve-calldata.ts +7 -2
  104. package/src/{archiver/l1 → l1}/calldata_retriever.ts +3 -3
  105. package/src/{archiver/l1 → l1}/data_retrieval.ts +56 -73
  106. package/src/{archiver/l1 → l1}/validate_trace.ts +1 -1
  107. package/src/modules/data_source_base.ts +367 -0
  108. package/src/modules/data_store_updater.ts +423 -0
  109. package/src/{archiver → modules}/instrumentation.ts +16 -65
  110. package/src/modules/l1_synchronizer.ts +931 -0
  111. package/src/{archiver → modules}/validation.ts +11 -6
  112. package/src/{archiver/kv_archiver_store → store}/block_store.ts +210 -66
  113. package/src/{archiver/kv_archiver_store → store}/contract_class_store.ts +1 -1
  114. package/src/{archiver/kv_archiver_store → store}/contract_instance_store.ts +1 -1
  115. package/src/{archiver/kv_archiver_store → store}/kv_archiver_store.ts +236 -35
  116. package/src/{archiver/kv_archiver_store → store}/log_store.ts +145 -86
  117. package/src/test/fake_l1_state.ts +599 -0
  118. package/src/test/index.ts +1 -0
  119. package/src/test/mock_archiver.ts +2 -2
  120. package/src/test/mock_l1_to_l2_message_source.ts +10 -4
  121. package/src/test/mock_l2_block_source.ts +173 -67
  122. package/src/test/mock_structs.ts +247 -2
  123. package/dest/archiver/archiver.d.ts +0 -304
  124. package/dest/archiver/archiver.d.ts.map +0 -1
  125. package/dest/archiver/archiver.js +0 -1645
  126. package/dest/archiver/archiver_store.d.ts +0 -308
  127. package/dest/archiver/archiver_store.d.ts.map +0 -1
  128. package/dest/archiver/archiver_store.js +0 -4
  129. package/dest/archiver/archiver_store_test_suite.d.ts +0 -8
  130. package/dest/archiver/archiver_store_test_suite.d.ts.map +0 -1
  131. package/dest/archiver/archiver_store_test_suite.js +0 -2770
  132. package/dest/archiver/config.d.ts.map +0 -1
  133. package/dest/archiver/errors.d.ts +0 -36
  134. package/dest/archiver/errors.d.ts.map +0 -1
  135. package/dest/archiver/index.d.ts +0 -7
  136. package/dest/archiver/index.d.ts.map +0 -1
  137. package/dest/archiver/index.js +0 -4
  138. package/dest/archiver/instrumentation.d.ts +0 -37
  139. package/dest/archiver/instrumentation.d.ts.map +0 -1
  140. package/dest/archiver/kv_archiver_store/block_store.d.ts +0 -157
  141. package/dest/archiver/kv_archiver_store/block_store.d.ts.map +0 -1
  142. package/dest/archiver/kv_archiver_store/contract_class_store.d.ts +0 -18
  143. package/dest/archiver/kv_archiver_store/contract_class_store.d.ts.map +0 -1
  144. package/dest/archiver/kv_archiver_store/contract_instance_store.d.ts +0 -24
  145. package/dest/archiver/kv_archiver_store/contract_instance_store.d.ts.map +0 -1
  146. package/dest/archiver/kv_archiver_store/kv_archiver_store.d.ts +0 -158
  147. package/dest/archiver/kv_archiver_store/kv_archiver_store.d.ts.map +0 -1
  148. package/dest/archiver/kv_archiver_store/kv_archiver_store.js +0 -313
  149. package/dest/archiver/kv_archiver_store/log_store.d.ts +0 -45
  150. package/dest/archiver/kv_archiver_store/log_store.d.ts.map +0 -1
  151. package/dest/archiver/kv_archiver_store/message_store.d.ts.map +0 -1
  152. package/dest/archiver/l1/bin/retrieve-calldata.d.ts.map +0 -1
  153. package/dest/archiver/l1/calldata_retriever.d.ts.map +0 -1
  154. package/dest/archiver/l1/data_retrieval.d.ts +0 -90
  155. package/dest/archiver/l1/data_retrieval.d.ts.map +0 -1
  156. package/dest/archiver/l1/debug_tx.d.ts.map +0 -1
  157. package/dest/archiver/l1/spire_proposer.d.ts.map +0 -1
  158. package/dest/archiver/l1/trace_tx.d.ts.map +0 -1
  159. package/dest/archiver/l1/types.d.ts +0 -12
  160. package/dest/archiver/l1/types.d.ts.map +0 -1
  161. package/dest/archiver/l1/validate_trace.d.ts.map +0 -1
  162. package/dest/archiver/structs/data_retrieval.d.ts.map +0 -1
  163. package/dest/archiver/structs/inbox_message.d.ts +0 -15
  164. package/dest/archiver/structs/inbox_message.d.ts.map +0 -1
  165. package/dest/archiver/structs/published.d.ts.map +0 -1
  166. package/dest/archiver/validation.d.ts +0 -17
  167. package/dest/archiver/validation.d.ts.map +0 -1
  168. package/dest/rpc/index.d.ts +0 -9
  169. package/dest/rpc/index.d.ts.map +0 -1
  170. package/dest/rpc/index.js +0 -15
  171. package/src/archiver/archiver.ts +0 -2157
  172. package/src/archiver/archiver_store.ts +0 -372
  173. package/src/archiver/archiver_store_test_suite.ts +0 -2843
  174. package/src/archiver/index.ts +0 -6
  175. package/src/rpc/index.ts +0 -16
  176. /package/dest/{archiver/l1 → l1}/debug_tx.js +0 -0
  177. /package/dest/{archiver/l1 → l1}/spire_proposer.js +0 -0
  178. /package/dest/{archiver/l1 → l1}/trace_tx.js +0 -0
  179. /package/dest/{archiver/l1 → l1}/types.js +0 -0
  180. /package/dest/{archiver/kv_archiver_store → store}/message_store.js +0 -0
  181. /package/dest/{archiver/structs → structs}/data_retrieval.js +0 -0
  182. /package/dest/{archiver/structs → structs}/inbox_message.js +0 -0
  183. /package/dest/{archiver/structs → structs}/published.js +0 -0
  184. /package/src/{archiver/l1 → l1}/README.md +0 -0
  185. /package/src/{archiver/l1 → l1}/debug_tx.ts +0 -0
  186. /package/src/{archiver/l1 → l1}/spire_proposer.ts +0 -0
  187. /package/src/{archiver/l1 → l1}/trace_tx.ts +0 -0
  188. /package/src/{archiver/l1 → l1}/types.ts +0 -0
  189. /package/src/{archiver/kv_archiver_store → store}/message_store.ts +0 -0
  190. /package/src/{archiver/structs → structs}/data_retrieval.ts +0 -0
  191. /package/src/{archiver/structs → structs}/inbox_message.ts +0 -0
  192. /package/src/{archiver/structs → structs}/published.ts +0 -0
@@ -0,0 +1,1113 @@
1
+ function applyDecs2203RFactory() {
2
+ function createAddInitializerMethod(initializers, decoratorFinishedRef) {
3
+ return function addInitializer(initializer) {
4
+ assertNotFinished(decoratorFinishedRef, "addInitializer");
5
+ assertCallable(initializer, "An initializer");
6
+ initializers.push(initializer);
7
+ };
8
+ }
9
+ function memberDec(dec, name, desc, initializers, kind, isStatic, isPrivate, metadata, value) {
10
+ var kindStr;
11
+ switch(kind){
12
+ case 1:
13
+ kindStr = "accessor";
14
+ break;
15
+ case 2:
16
+ kindStr = "method";
17
+ break;
18
+ case 3:
19
+ kindStr = "getter";
20
+ break;
21
+ case 4:
22
+ kindStr = "setter";
23
+ break;
24
+ default:
25
+ kindStr = "field";
26
+ }
27
+ var ctx = {
28
+ kind: kindStr,
29
+ name: isPrivate ? "#" + name : name,
30
+ static: isStatic,
31
+ private: isPrivate,
32
+ metadata: metadata
33
+ };
34
+ var decoratorFinishedRef = {
35
+ v: false
36
+ };
37
+ ctx.addInitializer = createAddInitializerMethod(initializers, decoratorFinishedRef);
38
+ var get, set;
39
+ if (kind === 0) {
40
+ if (isPrivate) {
41
+ get = desc.get;
42
+ set = desc.set;
43
+ } else {
44
+ get = function() {
45
+ return this[name];
46
+ };
47
+ set = function(v) {
48
+ this[name] = v;
49
+ };
50
+ }
51
+ } else if (kind === 2) {
52
+ get = function() {
53
+ return desc.value;
54
+ };
55
+ } else {
56
+ if (kind === 1 || kind === 3) {
57
+ get = function() {
58
+ return desc.get.call(this);
59
+ };
60
+ }
61
+ if (kind === 1 || kind === 4) {
62
+ set = function(v) {
63
+ desc.set.call(this, v);
64
+ };
65
+ }
66
+ }
67
+ ctx.access = get && set ? {
68
+ get: get,
69
+ set: set
70
+ } : get ? {
71
+ get: get
72
+ } : {
73
+ set: set
74
+ };
75
+ try {
76
+ return dec(value, ctx);
77
+ } finally{
78
+ decoratorFinishedRef.v = true;
79
+ }
80
+ }
81
+ function assertNotFinished(decoratorFinishedRef, fnName) {
82
+ if (decoratorFinishedRef.v) {
83
+ throw new Error("attempted to call " + fnName + " after decoration was finished");
84
+ }
85
+ }
86
+ function assertCallable(fn, hint) {
87
+ if (typeof fn !== "function") {
88
+ throw new TypeError(hint + " must be a function");
89
+ }
90
+ }
91
+ function assertValidReturnValue(kind, value) {
92
+ var type = typeof value;
93
+ if (kind === 1) {
94
+ if (type !== "object" || value === null) {
95
+ throw new TypeError("accessor decorators must return an object with get, set, or init properties or void 0");
96
+ }
97
+ if (value.get !== undefined) {
98
+ assertCallable(value.get, "accessor.get");
99
+ }
100
+ if (value.set !== undefined) {
101
+ assertCallable(value.set, "accessor.set");
102
+ }
103
+ if (value.init !== undefined) {
104
+ assertCallable(value.init, "accessor.init");
105
+ }
106
+ } else if (type !== "function") {
107
+ var hint;
108
+ if (kind === 0) {
109
+ hint = "field";
110
+ } else if (kind === 10) {
111
+ hint = "class";
112
+ } else {
113
+ hint = "method";
114
+ }
115
+ throw new TypeError(hint + " decorators must return a function or void 0");
116
+ }
117
+ }
118
+ function applyMemberDec(ret, base, decInfo, name, kind, isStatic, isPrivate, initializers, metadata) {
119
+ var decs = decInfo[0];
120
+ var desc, init, value;
121
+ if (isPrivate) {
122
+ if (kind === 0 || kind === 1) {
123
+ desc = {
124
+ get: decInfo[3],
125
+ set: decInfo[4]
126
+ };
127
+ } else if (kind === 3) {
128
+ desc = {
129
+ get: decInfo[3]
130
+ };
131
+ } else if (kind === 4) {
132
+ desc = {
133
+ set: decInfo[3]
134
+ };
135
+ } else {
136
+ desc = {
137
+ value: decInfo[3]
138
+ };
139
+ }
140
+ } else if (kind !== 0) {
141
+ desc = Object.getOwnPropertyDescriptor(base, name);
142
+ }
143
+ if (kind === 1) {
144
+ value = {
145
+ get: desc.get,
146
+ set: desc.set
147
+ };
148
+ } else if (kind === 2) {
149
+ value = desc.value;
150
+ } else if (kind === 3) {
151
+ value = desc.get;
152
+ } else if (kind === 4) {
153
+ value = desc.set;
154
+ }
155
+ var newValue, get, set;
156
+ if (typeof decs === "function") {
157
+ newValue = memberDec(decs, name, desc, initializers, kind, isStatic, isPrivate, metadata, value);
158
+ if (newValue !== void 0) {
159
+ assertValidReturnValue(kind, newValue);
160
+ if (kind === 0) {
161
+ init = newValue;
162
+ } else if (kind === 1) {
163
+ init = newValue.init;
164
+ get = newValue.get || value.get;
165
+ set = newValue.set || value.set;
166
+ value = {
167
+ get: get,
168
+ set: set
169
+ };
170
+ } else {
171
+ value = newValue;
172
+ }
173
+ }
174
+ } else {
175
+ for(var i = decs.length - 1; i >= 0; i--){
176
+ var dec = decs[i];
177
+ newValue = memberDec(dec, name, desc, initializers, kind, isStatic, isPrivate, metadata, value);
178
+ if (newValue !== void 0) {
179
+ assertValidReturnValue(kind, newValue);
180
+ var newInit;
181
+ if (kind === 0) {
182
+ newInit = newValue;
183
+ } else if (kind === 1) {
184
+ newInit = newValue.init;
185
+ get = newValue.get || value.get;
186
+ set = newValue.set || value.set;
187
+ value = {
188
+ get: get,
189
+ set: set
190
+ };
191
+ } else {
192
+ value = newValue;
193
+ }
194
+ if (newInit !== void 0) {
195
+ if (init === void 0) {
196
+ init = newInit;
197
+ } else if (typeof init === "function") {
198
+ init = [
199
+ init,
200
+ newInit
201
+ ];
202
+ } else {
203
+ init.push(newInit);
204
+ }
205
+ }
206
+ }
207
+ }
208
+ }
209
+ if (kind === 0 || kind === 1) {
210
+ if (init === void 0) {
211
+ init = function(instance, init) {
212
+ return init;
213
+ };
214
+ } else if (typeof init !== "function") {
215
+ var ownInitializers = init;
216
+ init = function(instance, init) {
217
+ var value = init;
218
+ for(var i = 0; i < ownInitializers.length; i++){
219
+ value = ownInitializers[i].call(instance, value);
220
+ }
221
+ return value;
222
+ };
223
+ } else {
224
+ var originalInitializer = init;
225
+ init = function(instance, init) {
226
+ return originalInitializer.call(instance, init);
227
+ };
228
+ }
229
+ ret.push(init);
230
+ }
231
+ if (kind !== 0) {
232
+ if (kind === 1) {
233
+ desc.get = value.get;
234
+ desc.set = value.set;
235
+ } else if (kind === 2) {
236
+ desc.value = value;
237
+ } else if (kind === 3) {
238
+ desc.get = value;
239
+ } else if (kind === 4) {
240
+ desc.set = value;
241
+ }
242
+ if (isPrivate) {
243
+ if (kind === 1) {
244
+ ret.push(function(instance, args) {
245
+ return value.get.call(instance, args);
246
+ });
247
+ ret.push(function(instance, args) {
248
+ return value.set.call(instance, args);
249
+ });
250
+ } else if (kind === 2) {
251
+ ret.push(value);
252
+ } else {
253
+ ret.push(function(instance, args) {
254
+ return value.call(instance, args);
255
+ });
256
+ }
257
+ } else {
258
+ Object.defineProperty(base, name, desc);
259
+ }
260
+ }
261
+ }
262
+ function applyMemberDecs(Class, decInfos, metadata) {
263
+ var ret = [];
264
+ var protoInitializers;
265
+ var staticInitializers;
266
+ var existingProtoNonFields = new Map();
267
+ var existingStaticNonFields = new Map();
268
+ for(var i = 0; i < decInfos.length; i++){
269
+ var decInfo = decInfos[i];
270
+ if (!Array.isArray(decInfo)) continue;
271
+ var kind = decInfo[1];
272
+ var name = decInfo[2];
273
+ var isPrivate = decInfo.length > 3;
274
+ var isStatic = kind >= 5;
275
+ var base;
276
+ var initializers;
277
+ if (isStatic) {
278
+ base = Class;
279
+ kind = kind - 5;
280
+ staticInitializers = staticInitializers || [];
281
+ initializers = staticInitializers;
282
+ } else {
283
+ base = Class.prototype;
284
+ protoInitializers = protoInitializers || [];
285
+ initializers = protoInitializers;
286
+ }
287
+ if (kind !== 0 && !isPrivate) {
288
+ var existingNonFields = isStatic ? existingStaticNonFields : existingProtoNonFields;
289
+ var existingKind = existingNonFields.get(name) || 0;
290
+ if (existingKind === true || existingKind === 3 && kind !== 4 || existingKind === 4 && kind !== 3) {
291
+ throw new Error("Attempted to decorate a public method/accessor that has the same name as a previously decorated public method/accessor. This is not currently supported by the decorators plugin. Property name was: " + name);
292
+ } else if (!existingKind && kind > 2) {
293
+ existingNonFields.set(name, kind);
294
+ } else {
295
+ existingNonFields.set(name, true);
296
+ }
297
+ }
298
+ applyMemberDec(ret, base, decInfo, name, kind, isStatic, isPrivate, initializers, metadata);
299
+ }
300
+ pushInitializers(ret, protoInitializers);
301
+ pushInitializers(ret, staticInitializers);
302
+ return ret;
303
+ }
304
+ function pushInitializers(ret, initializers) {
305
+ if (initializers) {
306
+ ret.push(function(instance) {
307
+ for(var i = 0; i < initializers.length; i++){
308
+ initializers[i].call(instance);
309
+ }
310
+ return instance;
311
+ });
312
+ }
313
+ }
314
+ function applyClassDecs(targetClass, classDecs, metadata) {
315
+ if (classDecs.length > 0) {
316
+ var initializers = [];
317
+ var newClass = targetClass;
318
+ var name = targetClass.name;
319
+ for(var i = classDecs.length - 1; i >= 0; i--){
320
+ var decoratorFinishedRef = {
321
+ v: false
322
+ };
323
+ try {
324
+ var nextNewClass = classDecs[i](newClass, {
325
+ kind: "class",
326
+ name: name,
327
+ addInitializer: createAddInitializerMethod(initializers, decoratorFinishedRef),
328
+ metadata
329
+ });
330
+ } finally{
331
+ decoratorFinishedRef.v = true;
332
+ }
333
+ if (nextNewClass !== undefined) {
334
+ assertValidReturnValue(10, nextNewClass);
335
+ newClass = nextNewClass;
336
+ }
337
+ }
338
+ return [
339
+ defineMetadata(newClass, metadata),
340
+ function() {
341
+ for(var i = 0; i < initializers.length; i++){
342
+ initializers[i].call(newClass);
343
+ }
344
+ }
345
+ ];
346
+ }
347
+ }
348
+ function defineMetadata(Class, metadata) {
349
+ return Object.defineProperty(Class, Symbol.metadata || Symbol.for("Symbol.metadata"), {
350
+ configurable: true,
351
+ enumerable: true,
352
+ value: metadata
353
+ });
354
+ }
355
+ return function applyDecs2203R(targetClass, memberDecs, classDecs, parentClass) {
356
+ if (parentClass !== void 0) {
357
+ var parentMetadata = parentClass[Symbol.metadata || Symbol.for("Symbol.metadata")];
358
+ }
359
+ var metadata = Object.create(parentMetadata === void 0 ? null : parentMetadata);
360
+ var e = applyMemberDecs(targetClass, memberDecs, metadata);
361
+ if (!classDecs.length) defineMetadata(targetClass, metadata);
362
+ return {
363
+ e: e,
364
+ get c () {
365
+ return applyClassDecs(targetClass, classDecs, metadata);
366
+ }
367
+ };
368
+ };
369
+ }
370
+ function _apply_decs_2203_r(targetClass, memberDecs, classDecs, parentClass) {
371
+ return (_apply_decs_2203_r = applyDecs2203RFactory())(targetClass, memberDecs, classDecs, parentClass);
372
+ }
373
+ var _dec, _dec1, _dec2, _dec3, _initProto;
374
+ import { maxBigint } from '@aztec/foundation/bigint';
375
+ import { BlockNumber, CheckpointNumber } from '@aztec/foundation/branded-types';
376
+ import { Buffer32 } from '@aztec/foundation/buffer';
377
+ import { pick } from '@aztec/foundation/collection';
378
+ import { createLogger } from '@aztec/foundation/log';
379
+ import { count } from '@aztec/foundation/string';
380
+ import { Timer, elapsed } from '@aztec/foundation/timer';
381
+ import { isDefined } from '@aztec/foundation/types';
382
+ import { L2BlockSourceEvents } from '@aztec/stdlib/block';
383
+ import { getEpochAtSlot, getSlotAtTimestamp } from '@aztec/stdlib/epoch-helpers';
384
+ import { computeInHashFromL1ToL2Messages } from '@aztec/stdlib/messaging';
385
+ import { execInSpan, trackSpan } from '@aztec/telemetry-client';
386
+ import { InitialCheckpointNumberNotSequentialError } from '../errors.js';
387
+ import { retrieveCheckpointsFromRollup, retrieveL1ToL2Message, retrieveL1ToL2Messages, retrievedToPublishedCheckpoint } from '../l1/data_retrieval.js';
388
+ import { ArchiverDataStoreUpdater } from './data_store_updater.js';
389
+ import { validateCheckpointAttestations } from './validation.js';
390
+ _dec = trackSpan('Archiver.syncFromL1'), _dec1 = trackSpan('Archiver.handleEpochPrune'), _dec2 = trackSpan('Archiver.handleL1ToL2Messages'), _dec3 = trackSpan('Archiver.handleCheckpoints');
391
+ /**
392
+ * Handles L1 synchronization for the archiver.
393
+ * Responsible for fetching checkpoints, L1→L2 messages, and handling L1 reorgs.
394
+ */ export class ArchiverL1Synchronizer {
395
+ publicClient;
396
+ debugClient;
397
+ rollup;
398
+ inbox;
399
+ l1Addresses;
400
+ store;
401
+ config;
402
+ blobClient;
403
+ epochCache;
404
+ dateProvider;
405
+ instrumentation;
406
+ l1Constants;
407
+ events;
408
+ log;
409
+ static{
410
+ ({ e: [_initProto] } = _apply_decs_2203_r(this, [
411
+ [
412
+ _dec,
413
+ 2,
414
+ "syncFromL1"
415
+ ],
416
+ [
417
+ _dec1,
418
+ 2,
419
+ "handleEpochPrune"
420
+ ],
421
+ [
422
+ _dec2,
423
+ 2,
424
+ "handleL1ToL2Messages"
425
+ ],
426
+ [
427
+ _dec3,
428
+ 2,
429
+ "handleCheckpoints"
430
+ ]
431
+ ], []));
432
+ }
433
+ l1BlockNumber;
434
+ l1BlockHash;
435
+ l1Timestamp;
436
+ updater;
437
+ tracer;
438
+ constructor(publicClient, debugClient, rollup, inbox, l1Addresses, store, config, blobClient, epochCache, dateProvider, instrumentation, l1Constants, events, tracer, log = createLogger('archiver:l1-sync')){
439
+ this.publicClient = publicClient;
440
+ this.debugClient = debugClient;
441
+ this.rollup = rollup;
442
+ this.inbox = inbox;
443
+ this.l1Addresses = l1Addresses;
444
+ this.store = store;
445
+ this.config = config;
446
+ this.blobClient = blobClient;
447
+ this.epochCache = epochCache;
448
+ this.dateProvider = dateProvider;
449
+ this.instrumentation = instrumentation;
450
+ this.l1Constants = l1Constants;
451
+ this.events = events;
452
+ this.log = log;
453
+ _initProto(this);
454
+ this.updater = new ArchiverDataStoreUpdater(this.store);
455
+ this.tracer = tracer;
456
+ }
457
+ /** Sets new config */ setConfig(newConfig) {
458
+ this.config = newConfig;
459
+ }
460
+ /** Returns the last L1 block number that was synced. */ getL1BlockNumber() {
461
+ return this.l1BlockNumber;
462
+ }
463
+ /** Returns the last L1 timestamp that was synced. */ getL1Timestamp() {
464
+ return this.l1Timestamp;
465
+ }
466
+ /** Checks that the ethereum node we are connected to has a latest timestamp no more than the allowed drift. Throw if not. */ async testEthereumNodeSynced() {
467
+ const maxAllowedDelay = this.config.maxAllowedEthClientDriftSeconds;
468
+ if (maxAllowedDelay === 0) {
469
+ return;
470
+ }
471
+ const { number, timestamp: l1Timestamp } = await this.publicClient.getBlock({
472
+ includeTransactions: false
473
+ });
474
+ const currentTime = BigInt(this.dateProvider.nowInSeconds());
475
+ if (currentTime - l1Timestamp > BigInt(maxAllowedDelay)) {
476
+ throw new Error(`Ethereum node is out of sync (last block synced ${number} at ${l1Timestamp} vs current time ${currentTime})`);
477
+ }
478
+ }
479
+ async syncFromL1(initialSyncComplete) {
480
+ const currentL1Block = await this.publicClient.getBlock({
481
+ includeTransactions: false
482
+ });
483
+ const currentL1BlockNumber = currentL1Block.number;
484
+ const currentL1BlockHash = Buffer32.fromString(currentL1Block.hash);
485
+ const currentL1Timestamp = currentL1Block.timestamp;
486
+ if (this.l1BlockHash && currentL1BlockHash.equals(this.l1BlockHash)) {
487
+ this.log.trace(`No new L1 blocks since last sync at L1 block ${this.l1BlockNumber}`);
488
+ return;
489
+ }
490
+ // Warn if the latest L1 block timestamp is too old
491
+ const maxAllowedDelay = this.config.maxAllowedEthClientDriftSeconds;
492
+ const now = this.dateProvider.nowInSeconds();
493
+ if (maxAllowedDelay > 0 && Number(currentL1Timestamp) <= now - maxAllowedDelay) {
494
+ this.log.warn(`Latest L1 block ${currentL1BlockNumber} timestamp ${currentL1Timestamp} is too old. Make sure your Ethereum node is synced.`, {
495
+ currentL1BlockNumber,
496
+ currentL1Timestamp,
497
+ now,
498
+ maxAllowedDelay
499
+ });
500
+ }
501
+ // Load sync point for blocks and messages defaulting to start block
502
+ const { blocksSynchedTo = this.l1Constants.l1StartBlock, messagesSynchedTo = {
503
+ l1BlockNumber: this.l1Constants.l1StartBlock,
504
+ l1BlockHash: this.l1Constants.l1StartBlockHash
505
+ } } = await this.store.getSynchPoint();
506
+ this.log.debug(`Starting new archiver sync iteration`, {
507
+ blocksSynchedTo,
508
+ messagesSynchedTo,
509
+ currentL1BlockNumber,
510
+ currentL1BlockHash
511
+ });
512
+ // ********** Ensuring Consistency of data pulled from L1 **********
513
+ /**
514
+ * There are a number of calls in this sync operation to L1 for retrieving
515
+ * events and transaction data. There are a couple of things we need to bear in mind
516
+ * to ensure that data is read exactly once.
517
+ *
518
+ * The first is the problem of eventually consistent ETH service providers like Infura.
519
+ * Each L1 read operation will query data from the last L1 block that it saw emit its kind of data.
520
+ * (so pending L1 to L2 messages will read from the last L1 block that emitted a message and so on)
521
+ * This will mean the archiver will lag behind L1 and will only advance when there's L2-relevant activity on the chain.
522
+ *
523
+ * The second is that in between the various calls to L1, the block number can move meaning some
524
+ * of the following calls will return data for blocks that were not present during earlier calls.
525
+ * To combat this for the time being we simply ensure that all data retrieval methods only retrieve
526
+ * data up to the currentBlockNumber captured at the top of this function. We might want to improve on this
527
+ * in future but for the time being it should give us the guarantees that we need
528
+ */ // ********** Events that are processed per L1 block **********
529
+ await this.handleL1ToL2Messages(messagesSynchedTo, currentL1BlockNumber);
530
+ // ********** Events that are processed per checkpoint **********
531
+ if (currentL1BlockNumber > blocksSynchedTo) {
532
+ // First we retrieve new checkpoints and L2 blocks and store them in the DB. This will also update the
533
+ // pending chain validation status, proven checkpoint number, and synched L1 block number.
534
+ const rollupStatus = await this.handleCheckpoints(blocksSynchedTo, currentL1BlockNumber, initialSyncComplete);
535
+ // Then we try pruning uncheckpointed blocks if a new slot was mined without checkpoints
536
+ await this.pruneUncheckpointedBlocks(currentL1Timestamp);
537
+ // Then we prune the current epoch if it'd reorg on next submission.
538
+ // Note that we don't do this before retrieving checkpoints because we may need to retrieve
539
+ // checkpoints from more than 2 epochs ago, so we want to make sure we have the latest view of
540
+ // the chain locally before we start unwinding stuff. This can be optimized by figuring out
541
+ // up to which point we're pruning, and then requesting checkpoints up to that point only.
542
+ const { rollupCanPrune } = await this.handleEpochPrune(rollupStatus.provenCheckpointNumber, currentL1BlockNumber, currentL1Timestamp);
543
+ // If the last checkpoint we processed had an invalid attestation, we manually advance the L1 syncpoint
544
+ // past it, since otherwise we'll keep downloading it and reprocessing it on every iteration until
545
+ // we get a valid checkpoint to advance the syncpoint.
546
+ if (!rollupStatus.validationResult?.valid && rollupStatus.lastL1BlockWithCheckpoint !== undefined) {
547
+ await this.store.setCheckpointSynchedL1BlockNumber(rollupStatus.lastL1BlockWithCheckpoint);
548
+ }
549
+ // And lastly we check if we are missing any checkpoints behind us due to a possible L1 reorg.
550
+ // We only do this if rollup cant prune on the next submission. Otherwise we will end up
551
+ // re-syncing the checkpoints we have just unwound above. We also dont do this if the last checkpoint is invalid,
552
+ // since the archiver will rightfully refuse to sync up to it.
553
+ if (!rollupCanPrune && rollupStatus.validationResult?.valid) {
554
+ await this.checkForNewCheckpointsBeforeL1SyncPoint(rollupStatus, blocksSynchedTo, currentL1BlockNumber);
555
+ }
556
+ this.instrumentation.updateL1BlockHeight(currentL1BlockNumber);
557
+ }
558
+ // After syncing has completed, update the current l1 block number and timestamp,
559
+ // otherwise we risk announcing to the world that we've synced to a given point,
560
+ // but the corresponding blocks have not been processed (see #12631).
561
+ this.l1Timestamp = currentL1Timestamp;
562
+ this.l1BlockNumber = currentL1BlockNumber;
563
+ this.l1BlockHash = currentL1BlockHash;
564
+ const l1BlockNumberAtEnd = await this.publicClient.getBlockNumber();
565
+ this.log.debug(`Archiver sync iteration complete`, {
566
+ l1BlockNumberAtStart: currentL1BlockNumber,
567
+ l1TimestampAtStart: currentL1Timestamp,
568
+ l1BlockNumberAtEnd
569
+ });
570
+ }
571
+ /** Prune all proposed local blocks that should have been checkpointed by now. */ async pruneUncheckpointedBlocks(currentL1Timestamp) {
572
+ const [lastCheckpointedBlockNumber, lastProposedBlockNumber] = await Promise.all([
573
+ this.store.getCheckpointedL2BlockNumber(),
574
+ this.store.getLatestBlockNumber()
575
+ ]);
576
+ // If there are no uncheckpointed blocks, we got nothing to do
577
+ if (lastProposedBlockNumber === lastCheckpointedBlockNumber) {
578
+ this.log.trace(`No uncheckpointed blocks to prune.`);
579
+ return;
580
+ }
581
+ // What's the slot of the first uncheckpointed block?
582
+ const firstUncheckpointedBlockNumber = BlockNumber(lastCheckpointedBlockNumber + 1);
583
+ const [firstUncheckpointedBlockHeader] = await this.store.getBlockHeaders(firstUncheckpointedBlockNumber, 1);
584
+ const firstUncheckpointedBlockSlot = firstUncheckpointedBlockHeader?.getSlot();
585
+ // What's the slot at the next L1 block? All blocks for slots strictly before this one should've been checkpointed by now.
586
+ const nextL1BlockTimestamp = currentL1Timestamp + BigInt(this.l1Constants.ethereumSlotDuration);
587
+ const slotAtNextL1Block = getSlotAtTimestamp(nextL1BlockTimestamp, this.l1Constants);
588
+ // Prune provisional blocks from slots that have ended without being checkpointed
589
+ if (firstUncheckpointedBlockSlot !== undefined && firstUncheckpointedBlockSlot < slotAtNextL1Block) {
590
+ this.log.warn(`Pruning blocks after block ${lastCheckpointedBlockNumber} due to slot ${firstUncheckpointedBlockSlot} not being checkpointed`, {
591
+ firstUncheckpointedBlockHeader: firstUncheckpointedBlockHeader.toInspect(),
592
+ slotAtNextL1Block
593
+ });
594
+ const prunedBlocks = await this.updater.removeUncheckpointedBlocksAfter(lastCheckpointedBlockNumber);
595
+ if (prunedBlocks.length > 0) {
596
+ this.events.emit(L2BlockSourceEvents.L2PruneUncheckpointed, {
597
+ type: L2BlockSourceEvents.L2PruneUncheckpointed,
598
+ slotNumber: firstUncheckpointedBlockSlot,
599
+ blocks: prunedBlocks
600
+ });
601
+ }
602
+ }
603
+ }
604
+ /** Queries the rollup contract on whether a prune can be executed on the immediate next L1 block. */ async canPrune(currentL1BlockNumber, currentL1Timestamp) {
605
+ const time = (currentL1Timestamp ?? 0n) + BigInt(this.l1Constants.ethereumSlotDuration);
606
+ const result = await this.rollup.canPruneAtTime(time, {
607
+ blockNumber: currentL1BlockNumber
608
+ });
609
+ if (result) {
610
+ this.log.debug(`Rollup contract allows pruning at L1 block ${currentL1BlockNumber} time ${time}`, {
611
+ currentL1Timestamp,
612
+ pruneTime: time,
613
+ currentL1BlockNumber
614
+ });
615
+ }
616
+ return result;
617
+ }
618
+ /** Checks if there'd be a reorg for the next checkpoint submission and start pruning now. */ async handleEpochPrune(provenCheckpointNumber, currentL1BlockNumber, currentL1Timestamp) {
619
+ const rollupCanPrune = await this.canPrune(currentL1BlockNumber, currentL1Timestamp);
620
+ const localPendingCheckpointNumber = await this.store.getSynchedCheckpointNumber();
621
+ const canPrune = localPendingCheckpointNumber > provenCheckpointNumber && rollupCanPrune;
622
+ if (canPrune) {
623
+ const timer = new Timer();
624
+ const pruneFrom = CheckpointNumber(provenCheckpointNumber + 1);
625
+ const header = await this.getCheckpointHeader(pruneFrom);
626
+ if (header === undefined) {
627
+ throw new Error(`Missing checkpoint header ${pruneFrom}`);
628
+ }
629
+ const pruneFromSlotNumber = header.slotNumber;
630
+ const pruneFromEpochNumber = getEpochAtSlot(pruneFromSlotNumber, this.l1Constants);
631
+ const checkpointsToUnwind = localPendingCheckpointNumber - provenCheckpointNumber;
632
+ const checkpointPromises = Array.from({
633
+ length: checkpointsToUnwind
634
+ }).fill(0).map((_, i)=>this.store.getCheckpointData(CheckpointNumber(i + pruneFrom)));
635
+ const checkpoints = await Promise.all(checkpointPromises);
636
+ const blockPromises = await Promise.all(checkpoints.filter(isDefined).map((cp)=>this.store.getBlocksForCheckpoint(CheckpointNumber(cp.checkpointNumber))));
637
+ const newBlocks = blockPromises.filter(isDefined).flat();
638
+ // Emit an event for listening services to react to the chain prune
639
+ this.events.emit(L2BlockSourceEvents.L2PruneUnproven, {
640
+ type: L2BlockSourceEvents.L2PruneUnproven,
641
+ epochNumber: pruneFromEpochNumber,
642
+ blocks: newBlocks
643
+ });
644
+ this.log.debug(`L2 prune from ${provenCheckpointNumber + 1} to ${localPendingCheckpointNumber} will occur on next checkpoint submission.`);
645
+ await this.updater.removeCheckpointsAfter(provenCheckpointNumber);
646
+ this.log.warn(`Removed ${count(checkpointsToUnwind, 'checkpoint')} after checkpoint ${provenCheckpointNumber} ` + `due to predicted reorg at L1 block ${currentL1BlockNumber}. ` + `Updated latest checkpoint is ${await this.store.getSynchedCheckpointNumber()}.`);
647
+ this.instrumentation.processPrune(timer.ms());
648
+ // TODO(palla/reorg): Do we need to set the block synched L1 block number here?
649
+ // Seems like the next iteration should handle this.
650
+ // await this.store.setCheckpointSynchedL1BlockNumber(currentL1BlockNumber);
651
+ }
652
+ return {
653
+ rollupCanPrune
654
+ };
655
+ }
656
+ nextRange(end, limit) {
657
+ const batchSize = this.config.batchSize * this.l1Constants.slotDuration / this.l1Constants.ethereumSlotDuration;
658
+ const nextStart = end + 1n;
659
+ const nextEnd = nextStart + BigInt(batchSize);
660
+ if (nextEnd > limit) {
661
+ return [
662
+ nextStart,
663
+ limit
664
+ ];
665
+ }
666
+ return [
667
+ nextStart,
668
+ nextEnd
669
+ ];
670
+ }
671
+ async handleL1ToL2Messages(messagesSyncPoint, currentL1BlockNumber) {
672
+ this.log.trace(`Handling L1 to L2 messages from ${messagesSyncPoint.l1BlockNumber} to ${currentL1BlockNumber}.`);
673
+ if (currentL1BlockNumber <= messagesSyncPoint.l1BlockNumber) {
674
+ return;
675
+ }
676
+ // Load remote and local inbox states.
677
+ const localMessagesInserted = await this.store.getTotalL1ToL2MessageCount();
678
+ const localLastMessage = await this.store.getLastL1ToL2Message();
679
+ const remoteMessagesState = await this.inbox.getState({
680
+ blockNumber: currentL1BlockNumber
681
+ });
682
+ this.log.trace(`Retrieved remote inbox state at L1 block ${currentL1BlockNumber}.`, {
683
+ localMessagesInserted,
684
+ localLastMessage,
685
+ remoteMessagesState
686
+ });
687
+ // Compare message count and rolling hash. If they match, no need to retrieve anything.
688
+ if (remoteMessagesState.totalMessagesInserted === localMessagesInserted && remoteMessagesState.messagesRollingHash.equals(localLastMessage?.rollingHash ?? Buffer32.ZERO)) {
689
+ this.log.trace(`No L1 to L2 messages to query between L1 blocks ${messagesSyncPoint.l1BlockNumber} and ${currentL1BlockNumber}.`);
690
+ return;
691
+ }
692
+ // Check if our syncpoint is still valid. If not, there was an L1 reorg and we need to re-retrieve messages.
693
+ // Note that we need to fetch it from logs and not from inbox state at the syncpoint l1 block number, since it
694
+ // could be older than 128 blocks and non-archive nodes cannot resolve it.
695
+ if (localLastMessage) {
696
+ const remoteLastMessage = await this.retrieveL1ToL2Message(localLastMessage.leaf);
697
+ this.log.trace(`Retrieved remote message for local last`, {
698
+ remoteLastMessage,
699
+ localLastMessage
700
+ });
701
+ if (!remoteLastMessage || !remoteLastMessage.rollingHash.equals(localLastMessage.rollingHash)) {
702
+ this.log.warn(`Rolling back L1 to L2 messages due to hash mismatch or msg not found.`, {
703
+ remoteLastMessage,
704
+ messagesSyncPoint,
705
+ localLastMessage
706
+ });
707
+ messagesSyncPoint = await this.rollbackL1ToL2Messages(localLastMessage, messagesSyncPoint);
708
+ this.log.debug(`Rolled back L1 to L2 messages to L1 block ${messagesSyncPoint.l1BlockNumber}.`, {
709
+ messagesSyncPoint
710
+ });
711
+ }
712
+ }
713
+ // Retrieve and save messages in batches. Each batch is estimated to acommodate up to L2 'blockBatchSize' blocks,
714
+ let searchStartBlock = 0n;
715
+ let searchEndBlock = messagesSyncPoint.l1BlockNumber;
716
+ let lastMessage;
717
+ let messageCount = 0;
718
+ do {
719
+ [searchStartBlock, searchEndBlock] = this.nextRange(searchEndBlock, currentL1BlockNumber);
720
+ this.log.trace(`Retrieving L1 to L2 messages in L1 blocks ${searchStartBlock}-${searchEndBlock}`);
721
+ const messages = await retrieveL1ToL2Messages(this.inbox, searchStartBlock, searchEndBlock);
722
+ const timer = new Timer();
723
+ await this.store.addL1ToL2Messages(messages);
724
+ const perMsg = timer.ms() / messages.length;
725
+ this.instrumentation.processNewMessages(messages.length, perMsg);
726
+ for (const msg of messages){
727
+ this.log.debug(`Downloaded L1 to L2 message`, {
728
+ ...msg,
729
+ leaf: msg.leaf.toString()
730
+ });
731
+ lastMessage = msg;
732
+ messageCount++;
733
+ }
734
+ }while (searchEndBlock < currentL1BlockNumber)
735
+ // Log stats for messages retrieved (if any).
736
+ if (messageCount > 0) {
737
+ this.log.info(`Retrieved ${messageCount} new L1 to L2 messages up to message with index ${lastMessage?.index} for checkpoint ${lastMessage?.checkpointNumber}`, {
738
+ lastMessage,
739
+ messageCount
740
+ });
741
+ }
742
+ // Warn if the resulting rolling hash does not match the remote state we had retrieved.
743
+ if (lastMessage && !lastMessage.rollingHash.equals(remoteMessagesState.messagesRollingHash)) {
744
+ this.log.warn(`Last message retrieved rolling hash does not match remote state.`, {
745
+ lastMessage,
746
+ remoteMessagesState
747
+ });
748
+ }
749
+ }
750
+ async retrieveL1ToL2Message(leaf) {
751
+ const currentL1BlockNumber = await this.publicClient.getBlockNumber();
752
+ let searchStartBlock = 0n;
753
+ let searchEndBlock = this.l1Constants.l1StartBlock - 1n;
754
+ do {
755
+ [searchStartBlock, searchEndBlock] = this.nextRange(searchEndBlock, currentL1BlockNumber);
756
+ const message = await retrieveL1ToL2Message(this.inbox, leaf, searchStartBlock, searchEndBlock);
757
+ if (message) {
758
+ return message;
759
+ }
760
+ }while (searchEndBlock < currentL1BlockNumber)
761
+ return undefined;
762
+ }
763
+ async rollbackL1ToL2Messages(localLastMessage, messagesSyncPoint) {
764
+ // Slowly go back through our messages until we find the last common message.
765
+ // We could query the logs in batch as an optimization, but the depth of the reorg should not be deep, and this
766
+ // is a very rare case, so it's fine to query one log at a time.
767
+ let commonMsg;
768
+ this.log.verbose(`Searching most recent common L1 to L2 message at or before index ${localLastMessage.index}`);
769
+ for await (const msg of this.store.iterateL1ToL2Messages({
770
+ reverse: true,
771
+ end: localLastMessage.index
772
+ })){
773
+ const remoteMsg = await this.retrieveL1ToL2Message(msg.leaf);
774
+ const logCtx = {
775
+ remoteMsg,
776
+ localMsg: msg
777
+ };
778
+ if (remoteMsg && remoteMsg.rollingHash.equals(msg.rollingHash)) {
779
+ this.log.verbose(`Found most recent common L1 to L2 message at index ${msg.index} on L1 block ${msg.l1BlockNumber}`, logCtx);
780
+ commonMsg = remoteMsg;
781
+ break;
782
+ } else if (remoteMsg) {
783
+ this.log.debug(`Local L1 to L2 message with index ${msg.index} has different rolling hash`, logCtx);
784
+ } else {
785
+ this.log.debug(`Local L1 to L2 message with index ${msg.index} not found on L1`, logCtx);
786
+ }
787
+ }
788
+ // Delete everything after the common message we found.
789
+ const lastGoodIndex = commonMsg?.index;
790
+ this.log.warn(`Deleting all local L1 to L2 messages after index ${lastGoodIndex ?? 'undefined'}`);
791
+ await this.store.removeL1ToL2Messages(lastGoodIndex !== undefined ? lastGoodIndex + 1n : 0n);
792
+ // Update the syncpoint so the loop below reprocesses the changed messages. We go to the block before
793
+ // the last common one, so we force reprocessing it, in case new messages were added on that same L1 block
794
+ // after the last common message.
795
+ const syncPointL1BlockNumber = commonMsg ? commonMsg.l1BlockNumber - 1n : this.l1Constants.l1StartBlock;
796
+ const syncPointL1BlockHash = await this.getL1BlockHash(syncPointL1BlockNumber);
797
+ messagesSyncPoint = {
798
+ l1BlockNumber: syncPointL1BlockNumber,
799
+ l1BlockHash: syncPointL1BlockHash
800
+ };
801
+ await this.store.setMessageSynchedL1Block(messagesSyncPoint);
802
+ return messagesSyncPoint;
803
+ }
804
+ async getL1BlockHash(l1BlockNumber) {
805
+ const block = await this.publicClient.getBlock({
806
+ blockNumber: l1BlockNumber,
807
+ includeTransactions: false
808
+ });
809
+ if (!block) {
810
+ throw new Error(`Missing L1 block ${l1BlockNumber}`);
811
+ }
812
+ return Buffer32.fromString(block.hash);
813
+ }
814
+ async handleCheckpoints(blocksSynchedTo, currentL1BlockNumber, initialSyncComplete) {
815
+ const localPendingCheckpointNumber = await this.store.getSynchedCheckpointNumber();
816
+ const initialValidationResult = await this.store.getPendingChainValidationStatus();
817
+ const { provenCheckpointNumber, provenArchive, pendingCheckpointNumber, pendingArchive, archiveOfMyCheckpoint: archiveForLocalPendingCheckpointNumber } = await execInSpan(this.tracer, 'Archiver.getRollupStatus', ()=>this.rollup.status(localPendingCheckpointNumber, {
818
+ blockNumber: currentL1BlockNumber
819
+ }));
820
+ const rollupStatus = {
821
+ provenCheckpointNumber,
822
+ provenArchive: provenArchive.toString(),
823
+ pendingCheckpointNumber,
824
+ pendingArchive: pendingArchive.toString(),
825
+ validationResult: initialValidationResult
826
+ };
827
+ this.log.trace(`Retrieved rollup status at current L1 block ${currentL1BlockNumber}.`, {
828
+ localPendingCheckpointNumber,
829
+ blocksSynchedTo,
830
+ currentL1BlockNumber,
831
+ archiveForLocalPendingCheckpointNumber,
832
+ ...rollupStatus
833
+ });
834
+ const updateProvenCheckpoint = async ()=>{
835
+ // Annoying edge case: if proven checkpoint is moved back to 0 due to a reorg at the beginning of the chain,
836
+ // we need to set it to zero. This is an edge case because we dont have a checkpoint zero (initial checkpoint is one),
837
+ // so localCheckpointForDestinationProvenCheckpointNumber would not be found below.
838
+ if (provenCheckpointNumber === 0) {
839
+ const localProvenCheckpointNumber = await this.store.getProvenCheckpointNumber();
840
+ if (localProvenCheckpointNumber !== provenCheckpointNumber) {
841
+ await this.store.setProvenCheckpointNumber(provenCheckpointNumber);
842
+ this.log.info(`Rolled back proven chain to checkpoint ${provenCheckpointNumber}`, {
843
+ provenCheckpointNumber
844
+ });
845
+ }
846
+ }
847
+ const localCheckpointForDestinationProvenCheckpointNumber = await this.store.getCheckpointData(provenCheckpointNumber);
848
+ // Sanity check. I've hit what seems to be a state where the proven checkpoint is set to a value greater than the latest
849
+ // synched checkpoint when requesting L2Tips from the archiver. This is the only place where the proven checkpoint is set.
850
+ const synched = await this.store.getSynchedCheckpointNumber();
851
+ if (localCheckpointForDestinationProvenCheckpointNumber && synched < localCheckpointForDestinationProvenCheckpointNumber.checkpointNumber) {
852
+ this.log.error(`Hit local checkpoint greater than last synched checkpoint: ${localCheckpointForDestinationProvenCheckpointNumber.checkpointNumber} > ${synched}`);
853
+ }
854
+ this.log.trace(`Local checkpoint for remote proven checkpoint ${provenCheckpointNumber} is ${localCheckpointForDestinationProvenCheckpointNumber?.archive.root.toString() ?? 'undefined'}`);
855
+ if (localCheckpointForDestinationProvenCheckpointNumber && provenArchive.equals(localCheckpointForDestinationProvenCheckpointNumber.archive.root)) {
856
+ const localProvenCheckpointNumber = await this.store.getProvenCheckpointNumber();
857
+ if (localProvenCheckpointNumber !== provenCheckpointNumber) {
858
+ await this.store.setProvenCheckpointNumber(provenCheckpointNumber);
859
+ this.log.info(`Updated proven chain to checkpoint ${provenCheckpointNumber}`, {
860
+ provenCheckpointNumber
861
+ });
862
+ const provenSlotNumber = localCheckpointForDestinationProvenCheckpointNumber.header.slotNumber;
863
+ const provenEpochNumber = getEpochAtSlot(provenSlotNumber, this.l1Constants);
864
+ const lastBlockNumberInCheckpoint = localCheckpointForDestinationProvenCheckpointNumber.startBlock + localCheckpointForDestinationProvenCheckpointNumber.numBlocks - 1;
865
+ this.events.emit(L2BlockSourceEvents.L2BlockProven, {
866
+ type: L2BlockSourceEvents.L2BlockProven,
867
+ blockNumber: BlockNumber(lastBlockNumberInCheckpoint),
868
+ slotNumber: provenSlotNumber,
869
+ epochNumber: provenEpochNumber
870
+ });
871
+ this.instrumentation.updateLastProvenBlock(lastBlockNumberInCheckpoint);
872
+ } else {
873
+ this.log.trace(`Proven checkpoint ${provenCheckpointNumber} already stored.`);
874
+ }
875
+ }
876
+ };
877
+ // This is an edge case that we only hit if there are no proposed checkpoints.
878
+ // If we have 0 checkpoints locally and there are no checkpoints onchain there is nothing to do.
879
+ const noCheckpoints = localPendingCheckpointNumber === 0 && pendingCheckpointNumber === 0;
880
+ if (noCheckpoints) {
881
+ await this.store.setCheckpointSynchedL1BlockNumber(currentL1BlockNumber);
882
+ this.log.debug(`No checkpoints to retrieve from ${blocksSynchedTo + 1n} to ${currentL1BlockNumber}, no checkpoints on chain`);
883
+ return rollupStatus;
884
+ }
885
+ await updateProvenCheckpoint();
886
+ // Related to the L2 reorgs of the pending chain. We are only interested in actually addressing a reorg if there
887
+ // are any state that could be impacted by it. If we have no checkpoints, there is no impact.
888
+ if (localPendingCheckpointNumber > 0) {
889
+ const localPendingCheckpoint = await this.store.getCheckpointData(localPendingCheckpointNumber);
890
+ if (localPendingCheckpoint === undefined) {
891
+ throw new Error(`Missing checkpoint ${localPendingCheckpointNumber}`);
892
+ }
893
+ const localPendingArchiveRoot = localPendingCheckpoint.archive.root.toString();
894
+ const noCheckpointSinceLast = localPendingCheckpoint && pendingArchive.toString() === localPendingArchiveRoot;
895
+ if (noCheckpointSinceLast) {
896
+ // We believe the following line causes a problem when we encounter L1 re-orgs.
897
+ // Basically, by setting the synched L1 block number here, we are saying that we have
898
+ // processed all checkpoints up to the current L1 block number and we will not attempt to retrieve logs from
899
+ // this block again (or any blocks before).
900
+ // However, in the re-org scenario, our L1 node is temporarily lying to us and we end up potentially missing checkpoints.
901
+ // We must only set this block number based on actually retrieved logs.
902
+ // TODO(#8621): Tackle this properly when we handle L1 Re-orgs.
903
+ // await this.store.setCheckpointSynchedL1BlockNumber(currentL1BlockNumber);
904
+ this.log.debug(`No checkpoints to retrieve from ${blocksSynchedTo + 1n} to ${currentL1BlockNumber}`);
905
+ return rollupStatus;
906
+ }
907
+ const localPendingCheckpointInChain = archiveForLocalPendingCheckpointNumber.equals(localPendingCheckpoint.archive.root);
908
+ if (!localPendingCheckpointInChain) {
909
+ // If our local pending checkpoint tip is not in the chain on L1 a "prune" must have happened
910
+ // or the L1 have reorged.
911
+ // In any case, we have to figure out how far into the past the action will take us.
912
+ // For simplicity here, we will simply rewind until we end in a checkpoint that is also on the chain on L1.
913
+ this.log.debug(`L2 prune has been detected due to local pending checkpoint ${localPendingCheckpointNumber} not in chain`, {
914
+ localPendingCheckpointNumber,
915
+ localPendingArchiveRoot,
916
+ archiveForLocalPendingCheckpointNumber
917
+ });
918
+ let tipAfterUnwind = localPendingCheckpointNumber;
919
+ while(true){
920
+ const candidateCheckpoint = await this.store.getCheckpointData(tipAfterUnwind);
921
+ if (candidateCheckpoint === undefined) {
922
+ break;
923
+ }
924
+ const archiveAtContract = await this.rollup.archiveAt(candidateCheckpoint.checkpointNumber);
925
+ this.log.trace(`Checking local checkpoint ${candidateCheckpoint.checkpointNumber} with archive ${candidateCheckpoint.archive.root}`, {
926
+ archiveAtContract,
927
+ archiveLocal: candidateCheckpoint.archive.root.toString()
928
+ });
929
+ if (archiveAtContract.equals(candidateCheckpoint.archive.root)) {
930
+ break;
931
+ }
932
+ tipAfterUnwind--;
933
+ }
934
+ const checkpointsToRemove = localPendingCheckpointNumber - tipAfterUnwind;
935
+ await this.updater.removeCheckpointsAfter(CheckpointNumber(tipAfterUnwind));
936
+ this.log.warn(`Removed ${count(checkpointsToRemove, 'checkpoint')} after checkpoint ${tipAfterUnwind} ` + `due to mismatched checkpoint hashes at L1 block ${currentL1BlockNumber}. ` + `Updated L2 latest checkpoint is ${await this.store.getSynchedCheckpointNumber()}.`);
937
+ }
938
+ }
939
+ // Retrieve checkpoints in batches. Each batch is estimated to accommodate up to 'blockBatchSize' L1 blocks,
940
+ // computed using the L2 block time vs the L1 block time.
941
+ let searchStartBlock = blocksSynchedTo;
942
+ let searchEndBlock = blocksSynchedTo;
943
+ let lastRetrievedCheckpoint;
944
+ let lastL1BlockWithCheckpoint = undefined;
945
+ do {
946
+ [searchStartBlock, searchEndBlock] = this.nextRange(searchEndBlock, currentL1BlockNumber);
947
+ this.log.trace(`Retrieving checkpoints from L1 block ${searchStartBlock} to ${searchEndBlock}`);
948
+ // TODO(md): Retrieve from blob client then from consensus client, then from peers
949
+ const retrievedCheckpoints = await execInSpan(this.tracer, 'Archiver.retrieveCheckpointsFromRollup', ()=>retrieveCheckpointsFromRollup(this.rollup, this.publicClient, this.debugClient, this.blobClient, searchStartBlock, searchEndBlock, this.l1Addresses, this.instrumentation, this.log, !initialSyncComplete));
950
+ if (retrievedCheckpoints.length === 0) {
951
+ // We are not calling `setBlockSynchedL1BlockNumber` because it may cause sync issues if based off infura.
952
+ // See further details in earlier comments.
953
+ this.log.trace(`Retrieved no new checkpoints from L1 block ${searchStartBlock} to ${searchEndBlock}`);
954
+ continue;
955
+ }
956
+ this.log.debug(`Retrieved ${retrievedCheckpoints.length} new checkpoints between L1 blocks ${searchStartBlock} and ${searchEndBlock}`, {
957
+ lastProcessedCheckpoint: retrievedCheckpoints[retrievedCheckpoints.length - 1].l1,
958
+ searchStartBlock,
959
+ searchEndBlock
960
+ });
961
+ const publishedCheckpoints = await Promise.all(retrievedCheckpoints.map((b)=>retrievedToPublishedCheckpoint(b)));
962
+ const validCheckpoints = [];
963
+ for (const published of publishedCheckpoints){
964
+ const validationResult = this.config.skipValidateCheckpointAttestations ? {
965
+ valid: true
966
+ } : await validateCheckpointAttestations(published, this.epochCache, this.l1Constants, this.log);
967
+ // Only update the validation result if it has changed, so we can keep track of the first invalid checkpoint
968
+ // in case there is a sequence of more than one invalid checkpoint, as we need to invalidate the first one.
969
+ // There is an exception though: if a checkpoint is invalidated and replaced with another invalid checkpoint,
970
+ // we need to update the validation result, since we need to be able to invalidate the new one.
971
+ // See test 'chain progresses if an invalid checkpoint is invalidated with an invalid one' for more info.
972
+ if (rollupStatus.validationResult?.valid !== validationResult.valid || !rollupStatus.validationResult.valid && !validationResult.valid && rollupStatus.validationResult.checkpoint.checkpointNumber === validationResult.checkpoint.checkpointNumber) {
973
+ rollupStatus.validationResult = validationResult;
974
+ }
975
+ if (!validationResult.valid) {
976
+ this.log.warn(`Skipping checkpoint ${published.checkpoint.number} due to invalid attestations`, {
977
+ checkpointHash: published.checkpoint.hash(),
978
+ l1BlockNumber: published.l1.blockNumber,
979
+ ...pick(validationResult, 'reason')
980
+ });
981
+ // Emit event for invalid checkpoint detection
982
+ this.events.emit(L2BlockSourceEvents.InvalidAttestationsCheckpointDetected, {
983
+ type: L2BlockSourceEvents.InvalidAttestationsCheckpointDetected,
984
+ validationResult
985
+ });
986
+ continue;
987
+ }
988
+ // Check the inHash of the checkpoint against the l1->l2 messages.
989
+ // The messages should've been synced up to the currentL1BlockNumber and must be available for the published
990
+ // checkpoints we just retrieved.
991
+ const l1ToL2Messages = await this.store.getL1ToL2Messages(published.checkpoint.number);
992
+ const computedInHash = computeInHashFromL1ToL2Messages(l1ToL2Messages);
993
+ const publishedInHash = published.checkpoint.header.inHash;
994
+ if (!computedInHash.equals(publishedInHash)) {
995
+ this.log.fatal(`Mismatch inHash for checkpoint ${published.checkpoint.number}`, {
996
+ checkpointHash: published.checkpoint.hash(),
997
+ l1BlockNumber: published.l1.blockNumber,
998
+ computedInHash,
999
+ publishedInHash
1000
+ });
1001
+ // Throwing an error since this is most likely caused by a bug.
1002
+ throw new Error(`Mismatch inHash for checkpoint ${published.checkpoint.number}. Expected ${computedInHash} but got ${publishedInHash}`);
1003
+ }
1004
+ validCheckpoints.push(published);
1005
+ this.log.debug(`Ingesting new checkpoint ${published.checkpoint.number} with ${published.checkpoint.blocks.length} blocks`, {
1006
+ checkpointHash: published.checkpoint.hash(),
1007
+ l1BlockNumber: published.l1.blockNumber,
1008
+ ...published.checkpoint.header.toInspect(),
1009
+ blocks: published.checkpoint.blocks.map((b)=>b.getStats())
1010
+ });
1011
+ }
1012
+ try {
1013
+ const updatedValidationResult = rollupStatus.validationResult === initialValidationResult ? undefined : rollupStatus.validationResult;
1014
+ const [processDuration, result] = await elapsed(()=>execInSpan(this.tracer, 'Archiver.addCheckpoints', ()=>this.updater.addCheckpoints(validCheckpoints, updatedValidationResult)));
1015
+ this.instrumentation.processNewBlocks(processDuration / validCheckpoints.length, validCheckpoints.flatMap((c)=>c.checkpoint.blocks));
1016
+ // If blocks were pruned due to conflict with L1 checkpoints, emit event
1017
+ if (result.prunedBlocks && result.prunedBlocks.length > 0) {
1018
+ const prunedCheckpointNumber = result.prunedBlocks[0].checkpointNumber;
1019
+ const prunedSlotNumber = result.prunedBlocks[0].header.globalVariables.slotNumber;
1020
+ this.log.warn(`Pruned ${result.prunedBlocks.length} mismatching blocks for checkpoint ${prunedCheckpointNumber}`, {
1021
+ prunedBlocks: result.prunedBlocks.map((b)=>b.toBlockInfo()),
1022
+ prunedSlotNumber,
1023
+ prunedCheckpointNumber
1024
+ });
1025
+ // Emit event for listening services to react to the prune.
1026
+ // Note: slotNumber comes from the first pruned block. If pruned blocks theoretically spanned multiple slots,
1027
+ // only one slot number would be reported (though in practice all blocks in a checkpoint span a single slot).
1028
+ this.events.emit(L2BlockSourceEvents.L2PruneUncheckpointed, {
1029
+ type: L2BlockSourceEvents.L2PruneUncheckpointed,
1030
+ slotNumber: prunedSlotNumber,
1031
+ blocks: result.prunedBlocks
1032
+ });
1033
+ }
1034
+ } catch (err) {
1035
+ if (err instanceof InitialCheckpointNumberNotSequentialError) {
1036
+ const { previousCheckpointNumber, newCheckpointNumber } = err;
1037
+ const previousCheckpoint = previousCheckpointNumber ? await this.store.getCheckpointData(CheckpointNumber(previousCheckpointNumber)) : undefined;
1038
+ const updatedL1SyncPoint = previousCheckpoint?.l1.blockNumber ?? this.l1Constants.l1StartBlock;
1039
+ await this.store.setCheckpointSynchedL1BlockNumber(updatedL1SyncPoint);
1040
+ this.log.warn(`Attempting to insert checkpoint ${newCheckpointNumber} with previous block ${previousCheckpointNumber}. Rolling back L1 sync point to ${updatedL1SyncPoint} to try and fetch the missing blocks.`, {
1041
+ previousCheckpointNumber,
1042
+ newCheckpointNumber,
1043
+ updatedL1SyncPoint
1044
+ });
1045
+ }
1046
+ throw err;
1047
+ }
1048
+ for (const checkpoint of validCheckpoints){
1049
+ this.log.info(`Downloaded checkpoint ${checkpoint.checkpoint.number}`, {
1050
+ checkpointHash: checkpoint.checkpoint.hash(),
1051
+ checkpointNumber: checkpoint.checkpoint.number,
1052
+ blockCount: checkpoint.checkpoint.blocks.length,
1053
+ txCount: checkpoint.checkpoint.blocks.reduce((acc, b)=>acc + b.body.txEffects.length, 0),
1054
+ header: checkpoint.checkpoint.header.toInspect(),
1055
+ archiveRoot: checkpoint.checkpoint.archive.root.toString(),
1056
+ archiveNextLeafIndex: checkpoint.checkpoint.archive.nextAvailableLeafIndex
1057
+ });
1058
+ }
1059
+ lastRetrievedCheckpoint = validCheckpoints.at(-1) ?? lastRetrievedCheckpoint;
1060
+ lastL1BlockWithCheckpoint = retrievedCheckpoints.at(-1)?.l1.blockNumber ?? lastL1BlockWithCheckpoint;
1061
+ }while (searchEndBlock < currentL1BlockNumber)
1062
+ // Important that we update AFTER inserting the blocks.
1063
+ await updateProvenCheckpoint();
1064
+ return {
1065
+ ...rollupStatus,
1066
+ lastRetrievedCheckpoint,
1067
+ lastL1BlockWithCheckpoint
1068
+ };
1069
+ }
1070
+ async checkForNewCheckpointsBeforeL1SyncPoint(status, blocksSynchedTo, currentL1BlockNumber) {
1071
+ const { lastRetrievedCheckpoint, pendingCheckpointNumber } = status;
1072
+ // Compare the last checkpoint we have (either retrieved in this round or loaded from store) with what the
1073
+ // rollup contract told us was the latest one (pinned at the currentL1BlockNumber).
1074
+ const latestLocalCheckpointNumber = lastRetrievedCheckpoint?.checkpoint.number ?? await this.store.getSynchedCheckpointNumber();
1075
+ if (latestLocalCheckpointNumber < pendingCheckpointNumber) {
1076
+ // Here we have consumed all logs until the `currentL1Block` we pinned at the beginning of the archiver loop,
1077
+ // but still haven't reached the pending checkpoint according to the call to the rollup contract.
1078
+ // We suspect an L1 reorg that added checkpoints *behind* us. If that is the case, it must have happened between
1079
+ // the last checkpoint we saw and the current one, so we reset the last synched L1 block number. In the edge case
1080
+ // we don't have one, we go back 2 L1 epochs, which is the deepest possible reorg (assuming Casper is working).
1081
+ let latestLocalCheckpointArchive = undefined;
1082
+ let targetL1BlockNumber = maxBigint(currentL1BlockNumber - 64n, 0n);
1083
+ if (lastRetrievedCheckpoint) {
1084
+ latestLocalCheckpointArchive = lastRetrievedCheckpoint.checkpoint.archive.root.toString();
1085
+ targetL1BlockNumber = lastRetrievedCheckpoint.l1.blockNumber;
1086
+ } else if (latestLocalCheckpointNumber > 0) {
1087
+ const checkpoint = await this.store.getRangeOfCheckpoints(latestLocalCheckpointNumber, 1).then(([c])=>c);
1088
+ latestLocalCheckpointArchive = checkpoint.archive.root.toString();
1089
+ targetL1BlockNumber = checkpoint.l1.blockNumber;
1090
+ }
1091
+ this.log.warn(`Failed to reach checkpoint ${pendingCheckpointNumber} at ${currentL1BlockNumber} (latest is ${latestLocalCheckpointNumber}). ` + `Rolling back last synched L1 block number to ${targetL1BlockNumber}.`, {
1092
+ latestLocalCheckpointNumber,
1093
+ latestLocalCheckpointArchive,
1094
+ blocksSynchedTo,
1095
+ currentL1BlockNumber,
1096
+ ...status
1097
+ });
1098
+ await this.store.setCheckpointSynchedL1BlockNumber(targetL1BlockNumber);
1099
+ } else {
1100
+ this.log.trace(`No new checkpoints behind L1 sync point to retrieve.`, {
1101
+ latestLocalCheckpointNumber,
1102
+ pendingCheckpointNumber
1103
+ });
1104
+ }
1105
+ }
1106
+ async getCheckpointHeader(number) {
1107
+ const checkpoint = await this.store.getCheckpointData(number);
1108
+ if (!checkpoint) {
1109
+ return undefined;
1110
+ }
1111
+ return checkpoint.header;
1112
+ }
1113
+ }