@aztec/prover-client 0.0.1-commit.6d3c34e → 0.0.1-commit.9372f48

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (62) hide show
  1. package/dest/light/lightweight_checkpoint_builder.d.ts +11 -7
  2. package/dest/light/lightweight_checkpoint_builder.d.ts.map +1 -1
  3. package/dest/light/lightweight_checkpoint_builder.js +28 -11
  4. package/dest/mocks/fixtures.d.ts +1 -1
  5. package/dest/mocks/fixtures.d.ts.map +1 -1
  6. package/dest/mocks/fixtures.js +2 -1
  7. package/dest/mocks/test_context.d.ts +3 -2
  8. package/dest/mocks/test_context.d.ts.map +1 -1
  9. package/dest/mocks/test_context.js +6 -1
  10. package/dest/orchestrator/block-building-helpers.d.ts +1 -1
  11. package/dest/orchestrator/block-building-helpers.js +1 -1
  12. package/dest/orchestrator/checkpoint-proving-state.d.ts +15 -2
  13. package/dest/orchestrator/checkpoint-proving-state.d.ts.map +1 -1
  14. package/dest/orchestrator/checkpoint-proving-state.js +34 -1
  15. package/dest/orchestrator/epoch-proving-state.d.ts +5 -4
  16. package/dest/orchestrator/epoch-proving-state.d.ts.map +1 -1
  17. package/dest/orchestrator/epoch-proving-state.js +35 -1
  18. package/dest/orchestrator/orchestrator.d.ts +18 -3
  19. package/dest/orchestrator/orchestrator.d.ts.map +1 -1
  20. package/dest/orchestrator/orchestrator.js +120 -78
  21. package/dest/prover-client/prover-client.d.ts +1 -1
  22. package/dest/prover-client/prover-client.d.ts.map +1 -1
  23. package/dest/prover-client/prover-client.js +7 -4
  24. package/dest/proving_broker/broker_prover_facade.d.ts +4 -3
  25. package/dest/proving_broker/broker_prover_facade.d.ts.map +1 -1
  26. package/dest/proving_broker/broker_prover_facade.js +3 -3
  27. package/dest/proving_broker/config.d.ts +5 -1
  28. package/dest/proving_broker/config.d.ts.map +1 -1
  29. package/dest/proving_broker/config.js +7 -1
  30. package/dest/proving_broker/proving_agent.d.ts +4 -3
  31. package/dest/proving_broker/proving_agent.d.ts.map +1 -1
  32. package/dest/proving_broker/proving_agent.js +4 -4
  33. package/dest/proving_broker/proving_broker_instrumentation.d.ts +1 -1
  34. package/dest/proving_broker/proving_broker_instrumentation.d.ts.map +1 -1
  35. package/dest/proving_broker/proving_broker_instrumentation.js +11 -7
  36. package/dest/proving_broker/proving_job_controller.d.ts +4 -3
  37. package/dest/proving_broker/proving_job_controller.d.ts.map +1 -1
  38. package/dest/proving_broker/proving_job_controller.js +6 -3
  39. package/dest/test/mock_proof_store.d.ts +3 -3
  40. package/dest/test/mock_proof_store.d.ts.map +1 -1
  41. package/package.json +16 -17
  42. package/src/light/lightweight_checkpoint_builder.ts +52 -12
  43. package/src/mocks/fixtures.ts +2 -1
  44. package/src/mocks/test_context.ts +5 -0
  45. package/src/orchestrator/block-building-helpers.ts +1 -1
  46. package/src/orchestrator/checkpoint-proving-state.ts +47 -1
  47. package/src/orchestrator/epoch-proving-state.ts +56 -8
  48. package/src/orchestrator/orchestrator.ts +124 -83
  49. package/src/prover-client/prover-client.ts +23 -6
  50. package/src/proving_broker/broker_prover_facade.ts +6 -3
  51. package/src/proving_broker/config.ts +9 -0
  52. package/src/proving_broker/proving_agent.ts +5 -2
  53. package/src/proving_broker/proving_broker_instrumentation.ts +10 -6
  54. package/src/proving_broker/proving_job_controller.ts +9 -3
  55. package/dest/block-factory/index.d.ts +0 -2
  56. package/dest/block-factory/index.d.ts.map +0 -1
  57. package/dest/block-factory/index.js +0 -1
  58. package/dest/block-factory/light.d.ts +0 -38
  59. package/dest/block-factory/light.d.ts.map +0 -1
  60. package/dest/block-factory/light.js +0 -106
  61. package/src/block-factory/index.ts +0 -1
  62. package/src/block-factory/light.ts +0 -136
@@ -1,7 +1,7 @@
1
1
  import { type ACVMConfig, type BBConfig, BBNativeRollupProver, TestCircuitProver } from '@aztec/bb-prover';
2
2
  import { times } from '@aztec/foundation/collection';
3
3
  import type { EthAddress } from '@aztec/foundation/eth-address';
4
- import { createLogger } from '@aztec/foundation/log';
4
+ import { type Logger, createLogger } from '@aztec/foundation/log';
5
5
  import { NativeACVMSimulator } from '@aztec/simulator/server';
6
6
  import {
7
7
  type ActualProverConfig,
@@ -38,15 +38,29 @@ export class ProverClient implements EpochProverManager {
38
38
  private orchestratorClient: ProvingJobProducer,
39
39
  private agentClient?: ProvingJobConsumer,
40
40
  private telemetry: TelemetryClient = getTelemetryClient(),
41
- private log = createLogger('prover-client:tx-prover'),
41
+ private log: Logger = createLogger('prover-client:tx-prover'),
42
42
  ) {
43
43
  this.proofStore = new InlineProofStore();
44
44
  this.failedProofStore = this.config.failedProofStore ? createProofStore(this.config.failedProofStore) : undefined;
45
45
  }
46
46
 
47
47
  public createEpochProver(): EpochProver {
48
- const facade = new BrokerCircuitProverFacade(this.orchestratorClient, this.proofStore, this.failedProofStore);
49
- const orchestrator = new ProvingOrchestrator(this.worldState, facade, this.config.proverId, this.telemetry);
48
+ const bindings = this.log.getBindings();
49
+ const facade = new BrokerCircuitProverFacade(
50
+ this.orchestratorClient,
51
+ this.proofStore,
52
+ this.failedProofStore,
53
+ undefined,
54
+ bindings,
55
+ );
56
+ const orchestrator = new ProvingOrchestrator(
57
+ this.worldState,
58
+ facade,
59
+ this.config.proverId,
60
+ this.config.cancelJobsOnStop,
61
+ this.telemetry,
62
+ bindings,
63
+ );
50
64
  return new ServerEpochProver(facade, orchestrator);
51
65
  }
52
66
 
@@ -128,9 +142,11 @@ export class ProverClient implements EpochProverManager {
128
142
 
129
143
  const proofStore = new InlineProofStore();
130
144
  const prover = await buildServerCircuitProver(this.config, this.telemetry);
145
+ const bindings = this.log.getBindings();
131
146
  this.agents = times(
132
147
  this.config.proverAgentCount,
133
- () => new ProvingAgent(this.agentClient!, proofStore, prover, [], this.config.proverAgentPollIntervalMs),
148
+ () =>
149
+ new ProvingAgent(this.agentClient!, proofStore, prover, [], this.config.proverAgentPollIntervalMs, bindings),
134
150
  );
135
151
 
136
152
  await Promise.all(this.agents.map(agent => agent.start()));
@@ -149,8 +165,9 @@ export function buildServerCircuitProver(
149
165
  return BBNativeRollupProver.new(config, telemetry);
150
166
  }
151
167
 
168
+ const logger = createLogger('prover-client:acvm-native');
152
169
  const simulator = config.acvmBinaryPath
153
- ? new NativeACVMSimulator(config.acvmWorkingDirectory, config.acvmBinaryPath)
170
+ ? new NativeACVMSimulator(config.acvmWorkingDirectory, config.acvmBinaryPath, undefined, logger)
154
171
  : undefined;
155
172
 
156
173
  return Promise.resolve(new TestCircuitProver(simulator, config, telemetry));
@@ -6,7 +6,7 @@ import type {
6
6
  } from '@aztec/constants';
7
7
  import { EpochNumber } from '@aztec/foundation/branded-types';
8
8
  import { sha256 } from '@aztec/foundation/crypto/sha256';
9
- import { createLogger } from '@aztec/foundation/log';
9
+ import { type Logger, type LoggerBindings, createLogger } from '@aztec/foundation/log';
10
10
  import { type PromiseWithResolvers, RunningPromise, promiseWithResolvers } from '@aztec/foundation/promise';
11
11
  import { truncate } from '@aztec/foundation/string';
12
12
  import type { AvmCircuitInputs } from '@aztec/stdlib/avm';
@@ -68,14 +68,17 @@ export class BrokerCircuitProverFacade implements ServerCircuitProver {
68
68
  private runningPromise?: RunningPromise;
69
69
  private timeOfLastSnapshotSync = Date.now();
70
70
  private jobsToRetrieve: Set<ProvingJobId> = new Set();
71
+ private log: Logger;
71
72
 
72
73
  constructor(
73
74
  private broker: ProvingJobProducer,
74
75
  private proofStore: ProofStore = new InlineProofStore(),
75
76
  private failedProofStore?: ProofStore,
76
77
  private pollIntervalMs = 1000,
77
- private log = createLogger('prover-client:broker-circuit-prover-facade'),
78
- ) {}
78
+ bindings?: LoggerBindings,
79
+ ) {
80
+ this.log = createLogger('prover-client:broker-circuit-prover-facade', bindings);
81
+ }
79
82
 
80
83
  /**
81
84
  * This is a critical section. This function can not be async since it writes
@@ -100,6 +100,8 @@ export const ProverAgentConfig = z.object({
100
100
  proverTestDelayFactor: z.number(),
101
101
  /** The delay (ms) to inject during fake proof verification */
102
102
  proverTestVerificationDelayMs: z.number().optional(),
103
+ /** Whether to abort pending proving jobs when the orchestrator is cancelled */
104
+ cancelJobsOnStop: z.boolean(),
103
105
  });
104
106
 
105
107
  export type ProverAgentConfig = z.infer<typeof ProverAgentConfig>;
@@ -153,4 +155,11 @@ export const proverAgentConfigMappings: ConfigMappingsType<ProverAgentConfig> =
153
155
  description: 'The delay (ms) to inject during fake proof verification',
154
156
  ...numberConfigHelper(10),
155
157
  },
158
+ cancelJobsOnStop: {
159
+ env: 'PROVER_CANCEL_JOBS_ON_STOP',
160
+ description:
161
+ 'Whether to abort pending proving jobs when the orchestrator is cancelled. ' +
162
+ 'When false (default), jobs remain in the broker queue and can be reused on restart/reorg.',
163
+ ...booleanConfigHelper(false),
164
+ },
156
165
  };
@@ -1,5 +1,5 @@
1
1
  import { AbortError } from '@aztec/foundation/error';
2
- import { createLogger } from '@aztec/foundation/log';
2
+ import { type Logger, type LoggerBindings, createLogger } from '@aztec/foundation/log';
3
3
  import { RunningPromise } from '@aztec/foundation/running-promise';
4
4
  import { truncate } from '@aztec/foundation/string';
5
5
  import { ProvingError } from '@aztec/stdlib/errors';
@@ -23,6 +23,7 @@ import { ProvingJobController, ProvingJobControllerStatus } from './proving_job_
23
23
  export class ProvingAgent {
24
24
  private currentJobController?: ProvingJobController;
25
25
  private runningPromise: RunningPromise;
26
+ private log: Logger;
26
27
 
27
28
  constructor(
28
29
  /** The source of proving jobs */
@@ -35,8 +36,9 @@ export class ProvingAgent {
35
36
  private proofAllowList: Array<ProvingRequestType> = [],
36
37
  /** How long to wait between jobs */
37
38
  private pollIntervalMs = 1000,
38
- private log = createLogger('prover-client:proving-agent'),
39
+ bindings?: LoggerBindings,
39
40
  ) {
41
+ this.log = createLogger('prover-client:proving-agent', bindings);
40
42
  this.runningPromise = new RunningPromise(this.work.bind(this), this.log, this.pollIntervalMs);
41
43
  }
42
44
 
@@ -159,6 +161,7 @@ export class ProvingAgent {
159
161
  // no need to await this here. The controller will stay alive (in DONE state) until the result is send to the broker
160
162
  void this.runningPromise.trigger();
161
163
  },
164
+ this.log.getBindings(),
162
165
  );
163
166
 
164
167
  if (abortedProofJobId) {
@@ -8,6 +8,7 @@ import {
8
8
  type ObservableResult,
9
9
  type TelemetryClient,
10
10
  type UpDownCounter,
11
+ createUpDownCounterWithDefault,
11
12
  } from '@aztec/telemetry-client';
12
13
 
13
14
  export type MonitorCallback = (proofType: ProvingRequestType) => number;
@@ -31,17 +32,20 @@ export class ProvingBrokerInstrumentation {
31
32
 
32
33
  this.activeJobs = meter.createObservableGauge(Metrics.PROVING_QUEUE_ACTIVE_JOBS);
33
34
 
34
- this.resolvedJobs = meter.createUpDownCounter(Metrics.PROVING_QUEUE_RESOLVED_JOBS);
35
+ const provingJobTypes = Object.values(ProvingRequestType).filter(v => typeof v === 'string');
36
+ const provingJobAttrs = { [Attributes.PROVING_JOB_TYPE]: provingJobTypes };
35
37
 
36
- this.rejectedJobs = meter.createUpDownCounter(Metrics.PROVING_QUEUE_REJECTED_JOBS);
38
+ this.resolvedJobs = createUpDownCounterWithDefault(meter, Metrics.PROVING_QUEUE_RESOLVED_JOBS, provingJobAttrs);
37
39
 
38
- this.retriedJobs = meter.createUpDownCounter(Metrics.PROVING_QUEUE_RETRIED_JOBS);
40
+ this.rejectedJobs = createUpDownCounterWithDefault(meter, Metrics.PROVING_QUEUE_REJECTED_JOBS, provingJobAttrs);
39
41
 
40
- this.timedOutJobs = meter.createUpDownCounter(Metrics.PROVING_QUEUE_TIMED_OUT_JOBS);
42
+ this.retriedJobs = createUpDownCounterWithDefault(meter, Metrics.PROVING_QUEUE_RETRIED_JOBS, provingJobAttrs);
41
43
 
42
- this.cachedJobs = meter.createUpDownCounter(Metrics.PROVING_QUEUE_CACHED_JOBS);
44
+ this.timedOutJobs = createUpDownCounterWithDefault(meter, Metrics.PROVING_QUEUE_TIMED_OUT_JOBS, provingJobAttrs);
43
45
 
44
- this.totalJobs = meter.createUpDownCounter(Metrics.PROVING_QUEUE_TOTAL_JOBS);
46
+ this.cachedJobs = createUpDownCounterWithDefault(meter, Metrics.PROVING_QUEUE_CACHED_JOBS, provingJobAttrs);
47
+
48
+ this.totalJobs = createUpDownCounterWithDefault(meter, Metrics.PROVING_QUEUE_TOTAL_JOBS, provingJobAttrs);
45
49
 
46
50
  this.jobWait = meter.createHistogram(Metrics.PROVING_QUEUE_JOB_WAIT);
47
51
 
@@ -1,7 +1,7 @@
1
1
  import { EpochNumber } from '@aztec/foundation/branded-types';
2
2
  import { randomBytes } from '@aztec/foundation/crypto/random';
3
3
  import { AbortError } from '@aztec/foundation/error';
4
- import { createLogger } from '@aztec/foundation/log';
4
+ import { type Logger, type LoggerBindings, createLogger } from '@aztec/foundation/log';
5
5
  import type {
6
6
  ProvingJobId,
7
7
  ProvingJobInputs,
@@ -21,6 +21,7 @@ export class ProvingJobController {
21
21
  private promise?: Promise<void>;
22
22
  private abortController = new AbortController();
23
23
  private result?: ProvingJobResultsMap[ProvingRequestType] | Error;
24
+ private log: Logger;
24
25
 
25
26
  constructor(
26
27
  private jobId: ProvingJobId,
@@ -29,8 +30,13 @@ export class ProvingJobController {
29
30
  private startedAt: number,
30
31
  private circuitProver: ServerCircuitProver,
31
32
  private onComplete: () => void,
32
- private log = createLogger('prover-client:proving-agent:job-controller-' + randomBytes(4).toString('hex')),
33
- ) {}
33
+ bindings?: LoggerBindings,
34
+ ) {
35
+ this.log = createLogger('prover-client:proving-agent:job-controller', {
36
+ instanceId: randomBytes(4).toString('hex'),
37
+ ...bindings,
38
+ });
39
+ }
34
40
 
35
41
  public start(): void {
36
42
  if (this.status !== ProvingJobControllerStatus.IDLE) {
@@ -1,2 +0,0 @@
1
- export * from './light.js';
2
- //# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoiaW5kZXguZC50cyIsInNvdXJjZVJvb3QiOiIiLCJzb3VyY2VzIjpbIi4uLy4uL3NyYy9ibG9jay1mYWN0b3J5L2luZGV4LnRzIl0sIm5hbWVzIjpbXSwibWFwcGluZ3MiOiJBQUFBLGNBQWMsWUFBWSxDQUFDIn0=
@@ -1 +0,0 @@
1
- {"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/block-factory/index.ts"],"names":[],"mappings":"AAAA,cAAc,YAAY,CAAC"}
@@ -1 +0,0 @@
1
- export * from './light.js';
@@ -1,38 +0,0 @@
1
- import { Fr } from '@aztec/foundation/curves/bn254';
2
- import { L2Block } from '@aztec/stdlib/block';
3
- import type { IBlockFactory, MerkleTreeWriteOperations } from '@aztec/stdlib/interfaces/server';
4
- import type { GlobalVariables, ProcessedTx } from '@aztec/stdlib/tx';
5
- import { type TelemetryClient } from '@aztec/telemetry-client';
6
- /**
7
- * Builds a block and its header from a set of processed tx without running any circuits.
8
- *
9
- * NOTE: the onus is ON THE CALLER to update the db that is passed in with the notes hashes, nullifiers, etc
10
- * PRIOR to calling `buildBlock`.
11
- *
12
- * Why? Because if you are, e.g. building a block in practice from TxObjects, you are using the
13
- * PublicProcessor which will do this for you as it processes transactions.
14
- *
15
- * If you haven't already inserted the side effects, e.g. because you are in a testing context, you can use the helper
16
- * function `buildBlockWithCleanDB`, which calls `insertSideEffects` for you.
17
- *
18
- * @deprecated Use LightweightCheckpointBuilder instead. This only works for one block per checkpoint.
19
- */
20
- export declare class LightweightBlockFactory implements IBlockFactory {
21
- private db;
22
- private telemetry;
23
- private globalVariables?;
24
- private l1ToL2Messages?;
25
- private txs;
26
- private readonly logger;
27
- constructor(db: MerkleTreeWriteOperations, telemetry?: TelemetryClient);
28
- startNewBlock(globalVariables: GlobalVariables, l1ToL2Messages: Fr[]): Promise<void>;
29
- addTxs(txs: ProcessedTx[]): Promise<void>;
30
- setBlockCompleted(): Promise<L2Block>;
31
- private buildBlock;
32
- }
33
- /**
34
- * Inserts the processed transactions into the DB, then creates a block.
35
- * @param db - A db fork to use for block building which WILL BE MODIFIED.
36
- */
37
- export declare function buildBlockWithCleanDB(txs: ProcessedTx[], globalVariables: GlobalVariables, l1ToL2Messages: Fr[], db: MerkleTreeWriteOperations, telemetry?: TelemetryClient): Promise<L2Block>;
38
- //# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoibGlnaHQuZC50cyIsInNvdXJjZVJvb3QiOiIiLCJzb3VyY2VzIjpbIi4uLy4uL3NyYy9ibG9jay1mYWN0b3J5L2xpZ2h0LnRzIl0sIm5hbWVzIjpbXSwibWFwcGluZ3MiOiJBQUdBLE9BQU8sRUFBRSxFQUFFLEVBQUUsTUFBTSxnQ0FBZ0MsQ0FBQztBQUVwRCxPQUFPLEVBQUUsT0FBTyxFQUFpQixNQUFNLHFCQUFxQixDQUFDO0FBQzdELE9BQU8sS0FBSyxFQUFFLGFBQWEsRUFBRSx5QkFBeUIsRUFBRSxNQUFNLGlDQUFpQyxDQUFDO0FBR2hHLE9BQU8sS0FBSyxFQUFFLGVBQWUsRUFBRSxXQUFXLEVBQUUsTUFBTSxrQkFBa0IsQ0FBQztBQUNyRSxPQUFPLEVBQUUsS0FBSyxlQUFlLEVBQXNCLE1BQU0seUJBQXlCLENBQUM7QUFRbkY7Ozs7Ozs7Ozs7Ozs7R0FhRztBQUNILHFCQUFhLHVCQUF3QixZQUFXLGFBQWE7SUFRekQsT0FBTyxDQUFDLEVBQUU7SUFDVixPQUFPLENBQUMsU0FBUztJQVJuQixPQUFPLENBQUMsZUFBZSxDQUFDLENBQWtCO0lBQzFDLE9BQU8sQ0FBQyxjQUFjLENBQUMsQ0FBTztJQUM5QixPQUFPLENBQUMsR0FBRyxDQUE0QjtJQUV2QyxPQUFPLENBQUMsUUFBUSxDQUFDLE1BQU0sQ0FBNkM7SUFFcEUsWUFDVSxFQUFFLEVBQUUseUJBQXlCLEVBQzdCLFNBQVMsR0FBRSxlQUFzQyxFQUN2RDtJQUVFLGFBQWEsQ0FBQyxlQUFlLEVBQUUsZUFBZSxFQUFFLGNBQWMsRUFBRSxFQUFFLEVBQUUsR0FBRyxPQUFPLENBQUMsSUFBSSxDQUFDLENBT3pGO0lBRUQsTUFBTSxDQUFDLEdBQUcsRUFBRSxXQUFXLEVBQUUsR0FBRyxPQUFPLENBQUMsSUFBSSxDQUFDLENBU3hDO0lBRUQsaUJBQWlCLElBQUksT0FBTyxDQUFDLE9BQU8sQ0FBQyxDQUVwQztZQUVhLFVBQVU7Q0E2Q3pCO0FBRUQ7OztHQUdHO0FBQ0gsd0JBQXNCLHFCQUFxQixDQUN6QyxHQUFHLEVBQUUsV0FBVyxFQUFFLEVBQ2xCLGVBQWUsRUFBRSxlQUFlLEVBQ2hDLGNBQWMsRUFBRSxFQUFFLEVBQUUsRUFDcEIsRUFBRSxFQUFFLHlCQUF5QixFQUM3QixTQUFTLEdBQUUsZUFBc0Msb0JBV2xEIn0=
@@ -1 +0,0 @@
1
- {"version":3,"file":"light.d.ts","sourceRoot":"","sources":["../../src/block-factory/light.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,EAAE,EAAE,MAAM,gCAAgC,CAAC;AAEpD,OAAO,EAAE,OAAO,EAAiB,MAAM,qBAAqB,CAAC;AAC7D,OAAO,KAAK,EAAE,aAAa,EAAE,yBAAyB,EAAE,MAAM,iCAAiC,CAAC;AAGhG,OAAO,KAAK,EAAE,eAAe,EAAE,WAAW,EAAE,MAAM,kBAAkB,CAAC;AACrE,OAAO,EAAE,KAAK,eAAe,EAAsB,MAAM,yBAAyB,CAAC;AAQnF;;;;;;;;;;;;;GAaG;AACH,qBAAa,uBAAwB,YAAW,aAAa;IAQzD,OAAO,CAAC,EAAE;IACV,OAAO,CAAC,SAAS;IARnB,OAAO,CAAC,eAAe,CAAC,CAAkB;IAC1C,OAAO,CAAC,cAAc,CAAC,CAAO;IAC9B,OAAO,CAAC,GAAG,CAA4B;IAEvC,OAAO,CAAC,QAAQ,CAAC,MAAM,CAA6C;IAEpE,YACU,EAAE,EAAE,yBAAyB,EAC7B,SAAS,GAAE,eAAsC,EACvD;IAEE,aAAa,CAAC,eAAe,EAAE,eAAe,EAAE,cAAc,EAAE,EAAE,EAAE,GAAG,OAAO,CAAC,IAAI,CAAC,CAOzF;IAED,MAAM,CAAC,GAAG,EAAE,WAAW,EAAE,GAAG,OAAO,CAAC,IAAI,CAAC,CASxC;IAED,iBAAiB,IAAI,OAAO,CAAC,OAAO,CAAC,CAEpC;YAEa,UAAU;CA6CzB;AAED;;;GAGG;AACH,wBAAsB,qBAAqB,CACzC,GAAG,EAAE,WAAW,EAAE,EAClB,eAAe,EAAE,eAAe,EAChC,cAAc,EAAE,EAAE,EAAE,EACpB,EAAE,EAAE,yBAAyB,EAC7B,SAAS,GAAE,eAAsC,oBAWlD"}
@@ -1,106 +0,0 @@
1
- import { SpongeBlob, computeBlobsHashFromBlobs, encodeCheckpointEndMarker, getBlobsPerL1Block } from '@aztec/blob-lib';
2
- import { NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP } from '@aztec/constants';
3
- import { padArrayEnd } from '@aztec/foundation/collection';
4
- import { Fr } from '@aztec/foundation/curves/bn254';
5
- import { createLogger } from '@aztec/foundation/log';
6
- import { L2Block, L2BlockHeader } from '@aztec/stdlib/block';
7
- import { computeInHashFromL1ToL2Messages } from '@aztec/stdlib/messaging';
8
- import { MerkleTreeId } from '@aztec/stdlib/trees';
9
- import { getTelemetryClient } from '@aztec/telemetry-client';
10
- import { buildHeaderAndBodyFromTxs, getTreeSnapshot, insertSideEffects } from '../orchestrator/block-building-helpers.js';
11
- /**
12
- * Builds a block and its header from a set of processed tx without running any circuits.
13
- *
14
- * NOTE: the onus is ON THE CALLER to update the db that is passed in with the notes hashes, nullifiers, etc
15
- * PRIOR to calling `buildBlock`.
16
- *
17
- * Why? Because if you are, e.g. building a block in practice from TxObjects, you are using the
18
- * PublicProcessor which will do this for you as it processes transactions.
19
- *
20
- * If you haven't already inserted the side effects, e.g. because you are in a testing context, you can use the helper
21
- * function `buildBlockWithCleanDB`, which calls `insertSideEffects` for you.
22
- *
23
- * @deprecated Use LightweightCheckpointBuilder instead. This only works for one block per checkpoint.
24
- */ export class LightweightBlockFactory {
25
- db;
26
- telemetry;
27
- globalVariables;
28
- l1ToL2Messages;
29
- txs;
30
- logger;
31
- constructor(db, telemetry = getTelemetryClient()){
32
- this.db = db;
33
- this.telemetry = telemetry;
34
- this.logger = createLogger('lightweight-block-factory');
35
- }
36
- async startNewBlock(globalVariables, l1ToL2Messages) {
37
- this.logger.debug('Starting new block', {
38
- globalVariables: globalVariables.toInspect(),
39
- l1ToL2Messages
40
- });
41
- this.globalVariables = globalVariables;
42
- this.l1ToL2Messages = padArrayEnd(l1ToL2Messages, Fr.ZERO, NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP);
43
- this.txs = undefined;
44
- // Update L1 to L2 tree
45
- await this.db.appendLeaves(MerkleTreeId.L1_TO_L2_MESSAGE_TREE, this.l1ToL2Messages);
46
- }
47
- addTxs(txs) {
48
- // Most times, `addTxs` is only called once per block.
49
- // So avoid copies.
50
- if (this.txs === undefined) {
51
- this.txs = txs;
52
- } else {
53
- this.txs.push(...txs);
54
- }
55
- return Promise.resolve();
56
- }
57
- setBlockCompleted() {
58
- return this.buildBlock();
59
- }
60
- async buildBlock() {
61
- const lastArchive = await getTreeSnapshot(MerkleTreeId.ARCHIVE, this.db);
62
- const state = await this.db.getStateReference();
63
- const txs = this.txs ?? [];
64
- const startSpongeBlob = SpongeBlob.init();
65
- const { header, body, blockBlobFields } = await buildHeaderAndBodyFromTxs(txs, lastArchive, state, this.globalVariables, startSpongeBlob, true);
66
- header.state.validate();
67
- await this.db.updateArchive(header);
68
- const newArchive = await getTreeSnapshot(MerkleTreeId.ARCHIVE, this.db);
69
- const inHash = computeInHashFromL1ToL2Messages(this.l1ToL2Messages);
70
- const numBlobFields = blockBlobFields.length + 1;
71
- const blobFields = blockBlobFields.concat([
72
- encodeCheckpointEndMarker({
73
- numBlobFields
74
- })
75
- ]);
76
- const blobsHash = computeBlobsHashFromBlobs(getBlobsPerL1Block(blobFields));
77
- const blockHeaderHash = await header.hash();
78
- const l2BlockHeader = L2BlockHeader.from({
79
- ...header,
80
- blockHeadersHash: blockHeaderHash,
81
- blobsHash,
82
- inHash
83
- });
84
- const block = new L2Block(newArchive, l2BlockHeader, body);
85
- this.logger.debug(`Built block ${block.number}`, {
86
- globalVariables: this.globalVariables?.toInspect(),
87
- archiveRoot: newArchive.root.toString(),
88
- stateReference: header.state.toInspect(),
89
- blockHash: (await block.hash()).toString(),
90
- txs: block.body.txEffects.map((tx)=>tx.txHash.toString())
91
- });
92
- return block;
93
- }
94
- }
95
- /**
96
- * Inserts the processed transactions into the DB, then creates a block.
97
- * @param db - A db fork to use for block building which WILL BE MODIFIED.
98
- */ export async function buildBlockWithCleanDB(txs, globalVariables, l1ToL2Messages, db, telemetry = getTelemetryClient()) {
99
- const builder = new LightweightBlockFactory(db, telemetry);
100
- await builder.startNewBlock(globalVariables, l1ToL2Messages);
101
- for (const tx of txs){
102
- await insertSideEffects(tx, db);
103
- }
104
- await builder.addTxs(txs);
105
- return await builder.setBlockCompleted();
106
- }
@@ -1 +0,0 @@
1
- export * from './light.js';
@@ -1,136 +0,0 @@
1
- import { SpongeBlob, computeBlobsHashFromBlobs, encodeCheckpointEndMarker, getBlobsPerL1Block } from '@aztec/blob-lib';
2
- import { NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP } from '@aztec/constants';
3
- import { padArrayEnd } from '@aztec/foundation/collection';
4
- import { Fr } from '@aztec/foundation/curves/bn254';
5
- import { createLogger } from '@aztec/foundation/log';
6
- import { L2Block, L2BlockHeader } from '@aztec/stdlib/block';
7
- import type { IBlockFactory, MerkleTreeWriteOperations } from '@aztec/stdlib/interfaces/server';
8
- import { computeInHashFromL1ToL2Messages } from '@aztec/stdlib/messaging';
9
- import { MerkleTreeId } from '@aztec/stdlib/trees';
10
- import type { GlobalVariables, ProcessedTx } from '@aztec/stdlib/tx';
11
- import { type TelemetryClient, getTelemetryClient } from '@aztec/telemetry-client';
12
-
13
- import {
14
- buildHeaderAndBodyFromTxs,
15
- getTreeSnapshot,
16
- insertSideEffects,
17
- } from '../orchestrator/block-building-helpers.js';
18
-
19
- /**
20
- * Builds a block and its header from a set of processed tx without running any circuits.
21
- *
22
- * NOTE: the onus is ON THE CALLER to update the db that is passed in with the notes hashes, nullifiers, etc
23
- * PRIOR to calling `buildBlock`.
24
- *
25
- * Why? Because if you are, e.g. building a block in practice from TxObjects, you are using the
26
- * PublicProcessor which will do this for you as it processes transactions.
27
- *
28
- * If you haven't already inserted the side effects, e.g. because you are in a testing context, you can use the helper
29
- * function `buildBlockWithCleanDB`, which calls `insertSideEffects` for you.
30
- *
31
- * @deprecated Use LightweightCheckpointBuilder instead. This only works for one block per checkpoint.
32
- */
33
- export class LightweightBlockFactory implements IBlockFactory {
34
- private globalVariables?: GlobalVariables;
35
- private l1ToL2Messages?: Fr[];
36
- private txs: ProcessedTx[] | undefined;
37
-
38
- private readonly logger = createLogger('lightweight-block-factory');
39
-
40
- constructor(
41
- private db: MerkleTreeWriteOperations,
42
- private telemetry: TelemetryClient = getTelemetryClient(),
43
- ) {}
44
-
45
- async startNewBlock(globalVariables: GlobalVariables, l1ToL2Messages: Fr[]): Promise<void> {
46
- this.logger.debug('Starting new block', { globalVariables: globalVariables.toInspect(), l1ToL2Messages });
47
- this.globalVariables = globalVariables;
48
- this.l1ToL2Messages = padArrayEnd<Fr, number>(l1ToL2Messages, Fr.ZERO, NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP);
49
- this.txs = undefined;
50
- // Update L1 to L2 tree
51
- await this.db.appendLeaves(MerkleTreeId.L1_TO_L2_MESSAGE_TREE, this.l1ToL2Messages!);
52
- }
53
-
54
- addTxs(txs: ProcessedTx[]): Promise<void> {
55
- // Most times, `addTxs` is only called once per block.
56
- // So avoid copies.
57
- if (this.txs === undefined) {
58
- this.txs = txs;
59
- } else {
60
- this.txs.push(...txs);
61
- }
62
- return Promise.resolve();
63
- }
64
-
65
- setBlockCompleted(): Promise<L2Block> {
66
- return this.buildBlock();
67
- }
68
-
69
- private async buildBlock(): Promise<L2Block> {
70
- const lastArchive = await getTreeSnapshot(MerkleTreeId.ARCHIVE, this.db);
71
- const state = await this.db.getStateReference();
72
-
73
- const txs = this.txs ?? [];
74
- const startSpongeBlob = SpongeBlob.init();
75
-
76
- const { header, body, blockBlobFields } = await buildHeaderAndBodyFromTxs(
77
- txs,
78
- lastArchive,
79
- state,
80
- this.globalVariables!,
81
- startSpongeBlob,
82
- true,
83
- );
84
-
85
- header.state.validate();
86
-
87
- await this.db.updateArchive(header);
88
- const newArchive = await getTreeSnapshot(MerkleTreeId.ARCHIVE, this.db);
89
-
90
- const inHash = computeInHashFromL1ToL2Messages(this.l1ToL2Messages!);
91
- const numBlobFields = blockBlobFields.length + 1;
92
- const blobFields = blockBlobFields.concat([encodeCheckpointEndMarker({ numBlobFields })]);
93
- const blobsHash = computeBlobsHashFromBlobs(getBlobsPerL1Block(blobFields));
94
- const blockHeaderHash = await header.hash();
95
- const l2BlockHeader = L2BlockHeader.from({
96
- ...header,
97
- blockHeadersHash: blockHeaderHash,
98
- blobsHash,
99
- inHash,
100
- });
101
-
102
- const block = new L2Block(newArchive, l2BlockHeader, body);
103
-
104
- this.logger.debug(`Built block ${block.number}`, {
105
- globalVariables: this.globalVariables?.toInspect(),
106
- archiveRoot: newArchive.root.toString(),
107
- stateReference: header.state.toInspect(),
108
- blockHash: (await block.hash()).toString(),
109
- txs: block.body.txEffects.map(tx => tx.txHash.toString()),
110
- });
111
-
112
- return block;
113
- }
114
- }
115
-
116
- /**
117
- * Inserts the processed transactions into the DB, then creates a block.
118
- * @param db - A db fork to use for block building which WILL BE MODIFIED.
119
- */
120
- export async function buildBlockWithCleanDB(
121
- txs: ProcessedTx[],
122
- globalVariables: GlobalVariables,
123
- l1ToL2Messages: Fr[],
124
- db: MerkleTreeWriteOperations,
125
- telemetry: TelemetryClient = getTelemetryClient(),
126
- ) {
127
- const builder = new LightweightBlockFactory(db, telemetry);
128
- await builder.startNewBlock(globalVariables, l1ToL2Messages);
129
-
130
- for (const tx of txs) {
131
- await insertSideEffects(tx, db);
132
- }
133
- await builder.addTxs(txs);
134
-
135
- return await builder.setBlockCompleted();
136
- }