@aztec/prover-client 0.72.1 → 0.74.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dest/bin/get-proof-inputs.js +3 -3
- package/dest/mocks/test_context.d.ts +2 -2
- package/dest/mocks/test_context.d.ts.map +1 -1
- package/dest/mocks/test_context.js +6 -6
- package/dest/orchestrator/block-building-helpers.d.ts +3 -3
- package/dest/orchestrator/block-building-helpers.d.ts.map +1 -1
- package/dest/orchestrator/block-building-helpers.js +14 -13
- package/dest/orchestrator/block-proving-state.d.ts +6 -6
- package/dest/orchestrator/block-proving-state.d.ts.map +1 -1
- package/dest/orchestrator/block-proving-state.js +27 -25
- package/dest/orchestrator/epoch-proving-state.d.ts +3 -3
- package/dest/orchestrator/epoch-proving-state.d.ts.map +1 -1
- package/dest/orchestrator/epoch-proving-state.js +11 -8
- package/dest/orchestrator/orchestrator.d.ts +1 -1
- package/dest/orchestrator/orchestrator.d.ts.map +1 -1
- package/dest/orchestrator/orchestrator.js +59 -59
- package/dest/orchestrator/tx-proving-state.d.ts +2 -2
- package/dest/orchestrator/tx-proving-state.d.ts.map +1 -1
- package/dest/orchestrator/tx-proving-state.js +14 -14
- package/dest/prover-client/server-epoch-prover.d.ts +1 -1
- package/dest/prover-client/server-epoch-prover.d.ts.map +1 -1
- package/dest/prover-client/server-epoch-prover.js +2 -2
- package/dest/proving_broker/broker_prover_facade.js +3 -3
- package/dest/proving_broker/proving_broker.d.ts.map +1 -1
- package/dest/proving_broker/proving_broker.js +3 -4
- package/dest/proving_broker/proving_broker_database/memory.d.ts +1 -1
- package/dest/proving_broker/proving_broker_database/memory.d.ts.map +1 -1
- package/dest/proving_broker/proving_broker_database/memory.js +3 -3
- package/dest/proving_broker/proving_broker_database/persisted.d.ts +1 -1
- package/dest/proving_broker/proving_broker_database/persisted.d.ts.map +1 -1
- package/dest/proving_broker/proving_broker_database/persisted.js +12 -12
- package/dest/proving_broker/proving_broker_database.d.ts +1 -1
- package/dest/proving_broker/proving_broker_database.d.ts.map +1 -1
- package/package.json +11 -11
- package/src/bin/get-proof-inputs.ts +2 -2
- package/src/mocks/test_context.ts +9 -7
- package/src/orchestrator/block-building-helpers.ts +13 -14
- package/src/orchestrator/block-proving-state.ts +26 -24
- package/src/orchestrator/epoch-proving-state.ts +10 -7
- package/src/orchestrator/orchestrator.ts +65 -60
- package/src/orchestrator/tx-proving-state.ts +14 -14
- package/src/prover-client/server-epoch-prover.ts +2 -2
- package/src/proving_broker/broker_prover_facade.ts +3 -3
- package/src/proving_broker/proof_store/inline_proof_store.ts +1 -1
- package/src/proving_broker/proving_broker.ts +2 -4
- package/src/proving_broker/proving_broker_database/memory.ts +2 -2
- package/src/proving_broker/proving_broker_database/persisted.ts +15 -15
- package/src/proving_broker/proving_broker_database.ts +1 -1
|
@@ -136,7 +136,7 @@ export const buildBaseRollupHints = runInSpan(
|
|
|
136
136
|
|
|
137
137
|
// Append new data to startSpongeBlob
|
|
138
138
|
const inputSpongeBlob = startSpongeBlob.clone();
|
|
139
|
-
startSpongeBlob.absorb(tx.txEffect.toBlobFields());
|
|
139
|
+
await startSpongeBlob.absorb(tx.txEffect.toBlobFields());
|
|
140
140
|
|
|
141
141
|
if (tx.avmProvingRequest) {
|
|
142
142
|
// Build public base rollup hints
|
|
@@ -172,7 +172,7 @@ export const buildBaseRollupHints = runInSpan(
|
|
|
172
172
|
),
|
|
173
173
|
});
|
|
174
174
|
|
|
175
|
-
const blockHash = tx.constants.historicalHeader.hash();
|
|
175
|
+
const blockHash = await tx.constants.historicalHeader.hash();
|
|
176
176
|
const archiveRootMembershipWitness = await getMembershipWitnessFor(
|
|
177
177
|
blockHash,
|
|
178
178
|
MerkleTreeId.ARCHIVE,
|
|
@@ -198,7 +198,7 @@ export const buildBaseRollupHints = runInSpan(
|
|
|
198
198
|
|
|
199
199
|
// Create data hint for reading fee payer initial balance in Fee Juice
|
|
200
200
|
// If no fee payer is set, read hint should be empty
|
|
201
|
-
const leafSlot = computeFeePayerBalanceLeafSlot(tx.data.feePayer);
|
|
201
|
+
const leafSlot = await computeFeePayerBalanceLeafSlot(tx.data.feePayer);
|
|
202
202
|
const feePayerFeeJuiceBalanceReadHint = tx.data.feePayer.isZero()
|
|
203
203
|
? PublicDataHint.empty()
|
|
204
204
|
: await getPublicDataHint(db, leafSlot.toBigInt());
|
|
@@ -232,7 +232,7 @@ export const buildBaseRollupHints = runInSpan(
|
|
|
232
232
|
feeWriteSiblingPath,
|
|
233
233
|
});
|
|
234
234
|
|
|
235
|
-
const blockHash = tx.constants.historicalHeader.hash();
|
|
235
|
+
const blockHash = await tx.constants.historicalHeader.hash();
|
|
236
236
|
const archiveRootMembershipWitness = await getMembershipWitnessFor(
|
|
237
237
|
blockHash,
|
|
238
238
|
MerkleTreeId.ARCHIVE,
|
|
@@ -275,9 +275,9 @@ export async function getPublicDataHint(db: MerkleTreeWriteOperations, leafSlot:
|
|
|
275
275
|
export const buildBlobHints = runInSpan(
|
|
276
276
|
'BlockBuilderHelpers',
|
|
277
277
|
'buildBlobHints',
|
|
278
|
-
(_span: Span, txEffects: TxEffect[]) => {
|
|
278
|
+
async (_span: Span, txEffects: TxEffect[]) => {
|
|
279
279
|
const blobFields = txEffects.flatMap(tx => tx.toBlobFields());
|
|
280
|
-
const blobs = Blob.getBlobs(blobFields);
|
|
280
|
+
const blobs = await Blob.getBlobs(blobFields);
|
|
281
281
|
const blobCommitments = blobs.map(b => b.commitmentToFields());
|
|
282
282
|
const blobsHash = new Fr(getBlobsHashFromBlobs(blobs));
|
|
283
283
|
return { blobFields, blobCommitments, blobs, blobsHash };
|
|
@@ -287,7 +287,7 @@ export const buildBlobHints = runInSpan(
|
|
|
287
287
|
export const buildHeaderFromCircuitOutputs = runInSpan(
|
|
288
288
|
'BlockBuilderHelpers',
|
|
289
289
|
'buildHeaderFromCircuitOutputs',
|
|
290
|
-
(
|
|
290
|
+
async (
|
|
291
291
|
_span,
|
|
292
292
|
previousRollupData: BaseOrMergeRollupPublicInputs[],
|
|
293
293
|
parityPublicInputs: ParityPublicInputs,
|
|
@@ -326,7 +326,7 @@ export const buildHeaderFromCircuitOutputs = runInSpan(
|
|
|
326
326
|
accumulatedFees,
|
|
327
327
|
accumulatedManaUsed,
|
|
328
328
|
);
|
|
329
|
-
if (!header.hash().equals(rootRollupOutputs.endBlockHash)) {
|
|
329
|
+
if (!(await header.hash()).equals(rootRollupOutputs.endBlockHash)) {
|
|
330
330
|
logger?.error(
|
|
331
331
|
`Block header mismatch when building header from circuit outputs.` +
|
|
332
332
|
`\n\nHeader: ${inspect(header)}` +
|
|
@@ -375,12 +375,11 @@ export const buildHeaderAndBodyFromTxs = runInSpan(
|
|
|
375
375
|
);
|
|
376
376
|
|
|
377
377
|
l1ToL2Messages = padArrayEnd(l1ToL2Messages, Fr.ZERO, NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP);
|
|
378
|
-
const hasher = (left: Buffer, right: Buffer) => sha256Trunc(Buffer.concat([left, right]));
|
|
378
|
+
const hasher = (left: Buffer, right: Buffer) => Promise.resolve(sha256Trunc(Buffer.concat([left, right])));
|
|
379
379
|
const parityHeight = Math.ceil(Math.log2(NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP));
|
|
380
|
-
const
|
|
381
|
-
|
|
382
|
-
);
|
|
383
|
-
const blobsHash = getBlobsHashFromBlobs(Blob.getBlobs(body.toBlobFields()));
|
|
380
|
+
const parityCalculator = await MerkleTreeCalculator.create(parityHeight, Fr.ZERO.toBuffer(), hasher);
|
|
381
|
+
const parityShaRoot = await parityCalculator.computeTreeRoot(l1ToL2Messages.map(msg => msg.toBuffer()));
|
|
382
|
+
const blobsHash = getBlobsHashFromBlobs(await Blob.getBlobs(body.toBlobFields()));
|
|
384
383
|
|
|
385
384
|
const contentCommitment = new ContentCommitment(new Fr(numTxs), blobsHash, parityShaRoot, outHash);
|
|
386
385
|
|
|
@@ -442,7 +441,7 @@ export const getConstantRollupData = runInSpan(
|
|
|
442
441
|
'getConstantRollupData',
|
|
443
442
|
async (_span, globalVariables: GlobalVariables, db: MerkleTreeReadOperations): Promise<ConstantRollupData> => {
|
|
444
443
|
return ConstantRollupData.from({
|
|
445
|
-
vkTreeRoot: getVKTreeRoot(),
|
|
444
|
+
vkTreeRoot: await getVKTreeRoot(),
|
|
446
445
|
protocolContractTreeRoot,
|
|
447
446
|
lastArchive: await getTreeSnapshot(MerkleTreeId.ARCHIVE, db),
|
|
448
447
|
globalVariables,
|
|
@@ -164,16 +164,16 @@ export class BlockProvingState {
|
|
|
164
164
|
return this.baseOrMergeProvingOutputs.getParentLocation(location);
|
|
165
165
|
}
|
|
166
166
|
|
|
167
|
-
public getMergeRollupInputs(mergeLocation: TreeNodeLocation) {
|
|
167
|
+
public async getMergeRollupInputs(mergeLocation: TreeNodeLocation) {
|
|
168
168
|
const [left, right] = this.baseOrMergeProvingOutputs.getChildren(mergeLocation);
|
|
169
169
|
if (!left || !right) {
|
|
170
170
|
throw new Error('At lease one child is not ready.');
|
|
171
171
|
}
|
|
172
172
|
|
|
173
|
-
return new MergeRollupInputs([this.#getPreviousRollupData(left), this.#getPreviousRollupData(right)]);
|
|
173
|
+
return new MergeRollupInputs([await this.#getPreviousRollupData(left), await this.#getPreviousRollupData(right)]);
|
|
174
174
|
}
|
|
175
175
|
|
|
176
|
-
public getBlockRootRollupTypeAndInputs(proverId: Fr) {
|
|
176
|
+
public async getBlockRootRollupTypeAndInputs(proverId: Fr) {
|
|
177
177
|
if (!this.rootParityProvingOutput) {
|
|
178
178
|
throw new Error('Root parity is not ready.');
|
|
179
179
|
}
|
|
@@ -184,13 +184,13 @@ export class BlockProvingState {
|
|
|
184
184
|
throw new Error('At lease one child is not ready for the block root.');
|
|
185
185
|
}
|
|
186
186
|
|
|
187
|
-
const data = this.#getBlockRootRollupData(proverId);
|
|
187
|
+
const data = await this.#getBlockRootRollupData(proverId);
|
|
188
188
|
|
|
189
189
|
if (this.totalNumTxs === 0) {
|
|
190
190
|
const constants = ConstantRollupData.from({
|
|
191
191
|
lastArchive: this.lastArchiveSnapshot,
|
|
192
192
|
globalVariables: this.globalVariables,
|
|
193
|
-
vkTreeRoot: getVKTreeRoot(),
|
|
193
|
+
vkTreeRoot: await getVKTreeRoot(),
|
|
194
194
|
protocolContractTreeRoot,
|
|
195
195
|
});
|
|
196
196
|
|
|
@@ -204,8 +204,8 @@ export class BlockProvingState {
|
|
|
204
204
|
};
|
|
205
205
|
}
|
|
206
206
|
|
|
207
|
-
const previousRollupData = nonEmptyProofs.map(p => this.#getPreviousRollupData(p!));
|
|
208
|
-
const blobData = this.#getBlockRootRollupBlobData();
|
|
207
|
+
const previousRollupData = await Promise.all(nonEmptyProofs.map(p => this.#getPreviousRollupData(p!)));
|
|
208
|
+
const blobData = await this.#getBlockRootRollupBlobData();
|
|
209
209
|
|
|
210
210
|
if (previousRollupData.length === 1) {
|
|
211
211
|
return {
|
|
@@ -224,17 +224,17 @@ export class BlockProvingState {
|
|
|
224
224
|
}
|
|
225
225
|
}
|
|
226
226
|
|
|
227
|
-
public getPaddingBlockRootInputs(proverId: Fr) {
|
|
227
|
+
public async getPaddingBlockRootInputs(proverId: Fr) {
|
|
228
228
|
if (!this.rootParityProvingOutput) {
|
|
229
229
|
throw new Error('Root parity is not ready.');
|
|
230
230
|
}
|
|
231
231
|
|
|
232
232
|
// Use the new block header and archive of the current block as the previous header and archiver of the next padding block.
|
|
233
|
-
const newBlockHeader = this.buildHeaderFromProvingOutputs();
|
|
233
|
+
const newBlockHeader = await this.buildHeaderFromProvingOutputs();
|
|
234
234
|
const newArchive = this.blockRootProvingOutput!.inputs.newArchive;
|
|
235
235
|
|
|
236
236
|
const data = BlockRootRollupData.from({
|
|
237
|
-
l1ToL2Roots: this.#getRootParityData(this.rootParityProvingOutput!),
|
|
237
|
+
l1ToL2Roots: await this.#getRootParityData(this.rootParityProvingOutput!),
|
|
238
238
|
l1ToL2MessageSubtreeSiblingPath: this.l1ToL2MessageSubtreeSiblingPath,
|
|
239
239
|
newArchiveSiblingPath: this.newArchiveSiblingPath,
|
|
240
240
|
previousBlockHeader: newBlockHeader,
|
|
@@ -244,7 +244,7 @@ export class BlockProvingState {
|
|
|
244
244
|
const constants = ConstantRollupData.from({
|
|
245
245
|
lastArchive: newArchive,
|
|
246
246
|
globalVariables: this.globalVariables,
|
|
247
|
-
vkTreeRoot: getVKTreeRoot(),
|
|
247
|
+
vkTreeRoot: await getVKTreeRoot(),
|
|
248
248
|
protocolContractTreeRoot,
|
|
249
249
|
});
|
|
250
250
|
|
|
@@ -255,12 +255,12 @@ export class BlockProvingState {
|
|
|
255
255
|
});
|
|
256
256
|
}
|
|
257
257
|
|
|
258
|
-
public getRootParityInputs() {
|
|
258
|
+
public async getRootParityInputs() {
|
|
259
259
|
if (!this.baseParityProvingOutputs.every(p => !!p)) {
|
|
260
260
|
throw new Error('At lease one base parity is not ready.');
|
|
261
261
|
}
|
|
262
262
|
|
|
263
|
-
const children = this.baseParityProvingOutputs.map(p => this.#getRootParityData(p!));
|
|
263
|
+
const children = await Promise.all(this.baseParityProvingOutputs.map(p => this.#getRootParityData(p!)));
|
|
264
264
|
return new RootParityInputs(
|
|
265
265
|
children as Tuple<RootParityInput<typeof RECURSIVE_PROOF_LENGTH>, typeof NUM_BASE_PARITY_PER_ROOT_PARITY>,
|
|
266
266
|
);
|
|
@@ -271,9 +271,11 @@ export class BlockProvingState {
|
|
|
271
271
|
return this.txs[txIndex];
|
|
272
272
|
}
|
|
273
273
|
|
|
274
|
-
public buildHeaderFromProvingOutputs(logger?: Logger) {
|
|
274
|
+
public async buildHeaderFromProvingOutputs(logger?: Logger) {
|
|
275
275
|
const previousRollupData =
|
|
276
|
-
this.totalNumTxs === 0
|
|
276
|
+
this.totalNumTxs === 0
|
|
277
|
+
? []
|
|
278
|
+
: await Promise.all(this.#getChildProofsForBlockRoot().map(p => this.#getPreviousRollupData(p!)));
|
|
277
279
|
|
|
278
280
|
let endPartialState = this.previousBlockHeader.state.partial;
|
|
279
281
|
if (this.totalNumTxs !== 0) {
|
|
@@ -324,9 +326,9 @@ export class BlockProvingState {
|
|
|
324
326
|
this.parentEpoch.reject(reason);
|
|
325
327
|
}
|
|
326
328
|
|
|
327
|
-
#getBlockRootRollupData(proverId: Fr) {
|
|
329
|
+
async #getBlockRootRollupData(proverId: Fr) {
|
|
328
330
|
return BlockRootRollupData.from({
|
|
329
|
-
l1ToL2Roots: this.#getRootParityData(this.rootParityProvingOutput!),
|
|
331
|
+
l1ToL2Roots: await this.#getRootParityData(this.rootParityProvingOutput!),
|
|
330
332
|
l1ToL2MessageSubtreeSiblingPath: this.l1ToL2MessageSubtreeSiblingPath,
|
|
331
333
|
newArchiveSiblingPath: this.newArchiveSiblingPath,
|
|
332
334
|
previousBlockHeader: this.previousBlockHeader,
|
|
@@ -334,9 +336,9 @@ export class BlockProvingState {
|
|
|
334
336
|
});
|
|
335
337
|
}
|
|
336
338
|
|
|
337
|
-
#getBlockRootRollupBlobData() {
|
|
339
|
+
async #getBlockRootRollupBlobData() {
|
|
338
340
|
const txEffects = this.txs.map(txProvingState => txProvingState.processedTx.txEffect);
|
|
339
|
-
const { blobFields, blobCommitments, blobsHash } = buildBlobHints(txEffects);
|
|
341
|
+
const { blobFields, blobCommitments, blobsHash } = await buildBlobHints(txEffects);
|
|
340
342
|
return BlockRootRollupBlobData.from({
|
|
341
343
|
blobFields: padArrayEnd(blobFields, Fr.ZERO, FIELDS_PER_BLOB * BLOBS_PER_BLOCK),
|
|
342
344
|
blobCommitments: padArrayEnd(blobCommitments, [Fr.ZERO, Fr.ZERO], BLOBS_PER_BLOCK),
|
|
@@ -356,25 +358,25 @@ export class BlockProvingState {
|
|
|
356
358
|
: this.baseOrMergeProvingOutputs.getChildren(rootLocation);
|
|
357
359
|
}
|
|
358
360
|
|
|
359
|
-
#getPreviousRollupData({
|
|
361
|
+
async #getPreviousRollupData({
|
|
360
362
|
inputs,
|
|
361
363
|
proof,
|
|
362
364
|
verificationKey,
|
|
363
365
|
}: PublicInputsAndRecursiveProof<BaseOrMergeRollupPublicInputs, typeof NESTED_RECURSIVE_ROLLUP_HONK_PROOF_LENGTH>) {
|
|
364
|
-
const leafIndex = getVKIndex(verificationKey.keyAsFields);
|
|
366
|
+
const leafIndex = await getVKIndex(verificationKey.keyAsFields);
|
|
365
367
|
return new PreviousRollupData(
|
|
366
368
|
inputs,
|
|
367
369
|
proof,
|
|
368
370
|
verificationKey.keyAsFields,
|
|
369
|
-
new MembershipWitness(VK_TREE_HEIGHT, BigInt(leafIndex), getVKSiblingPath(leafIndex)),
|
|
371
|
+
new MembershipWitness(VK_TREE_HEIGHT, BigInt(leafIndex), await getVKSiblingPath(leafIndex)),
|
|
370
372
|
);
|
|
371
373
|
}
|
|
372
374
|
|
|
373
|
-
#getRootParityData({ inputs, proof, verificationKey }: PublicInputsAndRecursiveProof<ParityPublicInputs>) {
|
|
375
|
+
async #getRootParityData({ inputs, proof, verificationKey }: PublicInputsAndRecursiveProof<ParityPublicInputs>) {
|
|
374
376
|
return new RootParityInput(
|
|
375
377
|
proof,
|
|
376
378
|
verificationKey.keyAsFields,
|
|
377
|
-
getVKSiblingPath(getVKIndex(verificationKey)),
|
|
379
|
+
await getVKSiblingPath(await getVKIndex(verificationKey)),
|
|
378
380
|
inputs,
|
|
379
381
|
);
|
|
380
382
|
}
|
|
@@ -150,23 +150,26 @@ export class EpochProvingState {
|
|
|
150
150
|
return this.blockRootOrMergeProvingOutputs.getParentLocation(location);
|
|
151
151
|
}
|
|
152
152
|
|
|
153
|
-
public getBlockMergeRollupInputs(mergeLocation: TreeNodeLocation) {
|
|
153
|
+
public async getBlockMergeRollupInputs(mergeLocation: TreeNodeLocation) {
|
|
154
154
|
const [left, right] = this.blockRootOrMergeProvingOutputs.getChildren(mergeLocation);
|
|
155
155
|
if (!left || !right) {
|
|
156
156
|
throw new Error('At lease one child is not ready.');
|
|
157
157
|
}
|
|
158
158
|
|
|
159
|
-
return new BlockMergeRollupInputs([
|
|
159
|
+
return new BlockMergeRollupInputs([
|
|
160
|
+
await this.#getPreviousRollupData(left),
|
|
161
|
+
await this.#getPreviousRollupData(right),
|
|
162
|
+
]);
|
|
160
163
|
}
|
|
161
164
|
|
|
162
|
-
public getRootRollupInputs(proverId: Fr) {
|
|
165
|
+
public async getRootRollupInputs(proverId: Fr) {
|
|
163
166
|
const [left, right] = this.#getChildProofsForRoot();
|
|
164
167
|
if (!left || !right) {
|
|
165
168
|
throw new Error('At lease one child is not ready.');
|
|
166
169
|
}
|
|
167
170
|
|
|
168
171
|
return RootRollupInputs.from({
|
|
169
|
-
previousRollupData: [this.#getPreviousRollupData(left), this.#getPreviousRollupData(right)],
|
|
172
|
+
previousRollupData: [await this.#getPreviousRollupData(left), await this.#getPreviousRollupData(right)],
|
|
170
173
|
proverId,
|
|
171
174
|
});
|
|
172
175
|
}
|
|
@@ -238,7 +241,7 @@ export class EpochProvingState {
|
|
|
238
241
|
: this.blockRootOrMergeProvingOutputs.getChildren(rootLocation);
|
|
239
242
|
}
|
|
240
243
|
|
|
241
|
-
#getPreviousRollupData({
|
|
244
|
+
async #getPreviousRollupData({
|
|
242
245
|
inputs,
|
|
243
246
|
proof,
|
|
244
247
|
verificationKey,
|
|
@@ -246,12 +249,12 @@ export class EpochProvingState {
|
|
|
246
249
|
BlockRootOrBlockMergePublicInputs,
|
|
247
250
|
typeof NESTED_RECURSIVE_ROLLUP_HONK_PROOF_LENGTH
|
|
248
251
|
>) {
|
|
249
|
-
const leafIndex = getVKIndex(verificationKey.keyAsFields);
|
|
252
|
+
const leafIndex = await getVKIndex(verificationKey.keyAsFields);
|
|
250
253
|
return new PreviousRollupBlockData(
|
|
251
254
|
inputs,
|
|
252
255
|
proof,
|
|
253
256
|
verificationKey.keyAsFields,
|
|
254
|
-
new MembershipWitness(VK_TREE_HEIGHT, BigInt(leafIndex), getVKSiblingPath(leafIndex)),
|
|
257
|
+
new MembershipWitness(VK_TREE_HEIGHT, BigInt(leafIndex), await getVKSiblingPath(leafIndex)),
|
|
255
258
|
);
|
|
256
259
|
}
|
|
257
260
|
}
|