@aztec/prover-client 0.0.1-commit.c7c42ec → 0.0.1-commit.f295ac2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (73) hide show
  1. package/dest/light/lightweight_checkpoint_builder.d.ts +12 -4
  2. package/dest/light/lightweight_checkpoint_builder.d.ts.map +1 -1
  3. package/dest/light/lightweight_checkpoint_builder.js +55 -8
  4. package/dest/mocks/test_context.d.ts +4 -2
  5. package/dest/mocks/test_context.d.ts.map +1 -1
  6. package/dest/mocks/test_context.js +14 -3
  7. package/dest/orchestrator/block-building-helpers.d.ts +1 -1
  8. package/dest/orchestrator/block-building-helpers.js +1 -1
  9. package/dest/orchestrator/checkpoint-proving-state.d.ts +15 -2
  10. package/dest/orchestrator/checkpoint-proving-state.d.ts.map +1 -1
  11. package/dest/orchestrator/checkpoint-proving-state.js +34 -1
  12. package/dest/orchestrator/epoch-proving-state.d.ts +5 -4
  13. package/dest/orchestrator/epoch-proving-state.d.ts.map +1 -1
  14. package/dest/orchestrator/epoch-proving-state.js +35 -1
  15. package/dest/orchestrator/orchestrator.d.ts +11 -1
  16. package/dest/orchestrator/orchestrator.d.ts.map +1 -1
  17. package/dest/orchestrator/orchestrator.js +446 -38
  18. package/dest/orchestrator/orchestrator_metrics.d.ts +1 -1
  19. package/dest/orchestrator/orchestrator_metrics.d.ts.map +1 -1
  20. package/dest/orchestrator/orchestrator_metrics.js +2 -6
  21. package/dest/orchestrator/tx-proving-state.d.ts +5 -4
  22. package/dest/orchestrator/tx-proving-state.d.ts.map +1 -1
  23. package/dest/orchestrator/tx-proving-state.js +6 -6
  24. package/dest/prover-client/prover-client.d.ts +1 -1
  25. package/dest/prover-client/prover-client.d.ts.map +1 -1
  26. package/dest/prover-client/prover-client.js +1 -1
  27. package/dest/proving_broker/broker_prover_facade.d.ts +4 -3
  28. package/dest/proving_broker/broker_prover_facade.d.ts.map +1 -1
  29. package/dest/proving_broker/proving_agent.d.ts +3 -8
  30. package/dest/proving_broker/proving_agent.d.ts.map +1 -1
  31. package/dest/proving_broker/proving_agent.js +1 -16
  32. package/dest/proving_broker/proving_broker.d.ts +1 -1
  33. package/dest/proving_broker/proving_broker.d.ts.map +1 -1
  34. package/dest/proving_broker/proving_broker.js +1 -10
  35. package/dest/proving_broker/proving_broker_database/persisted.d.ts +3 -2
  36. package/dest/proving_broker/proving_broker_database/persisted.d.ts.map +1 -1
  37. package/dest/proving_broker/proving_broker_database/persisted.js +389 -1
  38. package/dest/proving_broker/proving_broker_instrumentation.d.ts +1 -1
  39. package/dest/proving_broker/proving_broker_instrumentation.d.ts.map +1 -1
  40. package/dest/proving_broker/proving_broker_instrumentation.js +11 -35
  41. package/dest/test/mock_proof_store.d.ts +3 -3
  42. package/dest/test/mock_proof_store.d.ts.map +1 -1
  43. package/dest/test/mock_prover.d.ts +2 -2
  44. package/dest/test/mock_prover.d.ts.map +1 -1
  45. package/dest/test/mock_prover.js +3 -3
  46. package/package.json +16 -17
  47. package/src/light/lightweight_checkpoint_builder.ts +84 -8
  48. package/src/mocks/test_context.ts +11 -1
  49. package/src/orchestrator/block-building-helpers.ts +1 -1
  50. package/src/orchestrator/checkpoint-proving-state.ts +47 -1
  51. package/src/orchestrator/epoch-proving-state.ts +56 -8
  52. package/src/orchestrator/orchestrator.ts +40 -13
  53. package/src/orchestrator/orchestrator_metrics.ts +2 -6
  54. package/src/orchestrator/tx-proving-state.ts +8 -11
  55. package/src/prover-client/prover-client.ts +1 -9
  56. package/src/proving_broker/broker_prover_facade.ts +2 -3
  57. package/src/proving_broker/proving_agent.ts +1 -17
  58. package/src/proving_broker/proving_broker.ts +1 -8
  59. package/src/proving_broker/proving_broker_database/persisted.ts +15 -1
  60. package/src/proving_broker/proving_broker_instrumentation.ts +10 -35
  61. package/src/test/mock_prover.ts +1 -8
  62. package/dest/block-factory/index.d.ts +0 -2
  63. package/dest/block-factory/index.d.ts.map +0 -1
  64. package/dest/block-factory/index.js +0 -1
  65. package/dest/block-factory/light.d.ts +0 -38
  66. package/dest/block-factory/light.d.ts.map +0 -1
  67. package/dest/block-factory/light.js +0 -108
  68. package/dest/proving_broker/proving_agent_instrumentation.d.ts +0 -8
  69. package/dest/proving_broker/proving_agent_instrumentation.d.ts.map +0 -1
  70. package/dest/proving_broker/proving_agent_instrumentation.js +0 -16
  71. package/src/block-factory/index.ts +0 -1
  72. package/src/block-factory/light.ts +0 -137
  73. package/src/proving_broker/proving_agent_instrumentation.ts +0 -21
@@ -13,27 +13,16 @@ import type {
13
13
  ServerCircuitProver,
14
14
  } from '@aztec/stdlib/interfaces/server';
15
15
  import { ProvingRequestType } from '@aztec/stdlib/proofs';
16
- import {
17
- type TelemetryClient,
18
- type Traceable,
19
- type Tracer,
20
- getTelemetryClient,
21
- trackSpan,
22
- } from '@aztec/telemetry-client';
23
16
 
24
17
  import type { ProofStore } from './proof_store/index.js';
25
- import { ProvingAgentInstrumentation } from './proving_agent_instrumentation.js';
26
18
  import { ProvingJobController, ProvingJobControllerStatus } from './proving_job_controller.js';
27
19
 
28
20
  /**
29
21
  * A helper class that encapsulates a circuit prover and connects it to a job source.
30
22
  */
31
- export class ProvingAgent implements Traceable {
23
+ export class ProvingAgent {
32
24
  private currentJobController?: ProvingJobController;
33
25
  private runningPromise: RunningPromise;
34
- private instrumentation: ProvingAgentInstrumentation;
35
-
36
- public readonly tracer: Tracer;
37
26
 
38
27
  constructor(
39
28
  /** The source of proving jobs */
@@ -46,12 +35,8 @@ export class ProvingAgent implements Traceable {
46
35
  private proofAllowList: Array<ProvingRequestType> = [],
47
36
  /** How long to wait between jobs */
48
37
  private pollIntervalMs = 1000,
49
- /** A telemetry client through which to emit metrics */
50
- client: TelemetryClient = getTelemetryClient(),
51
38
  private log = createLogger('prover-client:proving-agent'),
52
39
  ) {
53
- this.tracer = client.getTracer('ProvingAgent');
54
- this.instrumentation = new ProvingAgentInstrumentation(client);
55
40
  this.runningPromise = new RunningPromise(this.work.bind(this), this.log, this.pollIntervalMs);
56
41
  }
57
42
 
@@ -85,7 +70,6 @@ export class ProvingAgent implements Traceable {
85
70
  return this.runningPromise.isRunning() ? { status: 'running' } : { status: 'stopped' };
86
71
  }
87
72
 
88
- @trackSpan('ProvingAgent.safeWork')
89
73
  private async work() {
90
74
  // every tick we need to take one of the following actions:
91
75
  // 1. send a hearbeat to the broker that we're working on some job
@@ -16,13 +16,7 @@ import {
16
16
  tryStop,
17
17
  } from '@aztec/stdlib/interfaces/server';
18
18
  import { ProvingRequestType } from '@aztec/stdlib/proofs';
19
- import {
20
- type TelemetryClient,
21
- type Traceable,
22
- type Tracer,
23
- getTelemetryClient,
24
- trackSpan,
25
- } from '@aztec/telemetry-client';
19
+ import { type TelemetryClient, type Traceable, type Tracer, getTelemetryClient } from '@aztec/telemetry-client';
26
20
 
27
21
  import assert from 'assert';
28
22
 
@@ -565,7 +559,6 @@ export class ProvingBroker implements ProvingJobProducer, ProvingJobConsumer, Tr
565
559
  return this.#getProvingJob(filter);
566
560
  }
567
561
 
568
- @trackSpan('ProvingBroker.cleanupPass')
569
562
  private async cleanupPass() {
570
563
  this.cleanupStaleJobs();
571
564
  this.reEnqueueExpiredJobs();
@@ -11,7 +11,14 @@ import {
11
11
  ProvingJobSettledResult,
12
12
  getEpochFromProvingJobId,
13
13
  } from '@aztec/stdlib/interfaces/server';
14
- import { Attributes, LmdbMetrics, type TelemetryClient, getTelemetryClient } from '@aztec/telemetry-client';
14
+ import {
15
+ Attributes,
16
+ LmdbMetrics,
17
+ type TelemetryClient,
18
+ type Tracer,
19
+ getTelemetryClient,
20
+ trackSpan,
21
+ } from '@aztec/telemetry-client';
15
22
 
16
23
  import { mkdir, readdir } from 'fs/promises';
17
24
  import { join } from 'path';
@@ -78,6 +85,8 @@ export class KVBrokerDatabase implements ProvingBrokerDatabase {
78
85
 
79
86
  private batchQueue: BatchQueue<ProvingJob | [ProvingJobId, ProvingJobSettledResult], number>;
80
87
 
88
+ public readonly tracer: Tracer;
89
+
81
90
  private constructor(
82
91
  private epochs: Map<number, SingleEpochDatabase>,
83
92
  private config: ProverBrokerConfig,
@@ -92,6 +101,8 @@ export class KVBrokerDatabase implements ProvingBrokerDatabase {
92
101
  () => this.estimateSize(),
93
102
  );
94
103
 
104
+ this.tracer = client.getTracer('KVBrokerDatabase');
105
+
95
106
  this.batchQueue = new BatchQueue(
96
107
  (items, key) => this.commitWrites(items, key),
97
108
  config.proverBrokerBatchSize,
@@ -165,6 +176,9 @@ export class KVBrokerDatabase implements ProvingBrokerDatabase {
165
176
  }
166
177
  }
167
178
 
179
+ @trackSpan('KVBrokerDatabase.deleteAllProvingJobsOlderThanEpoch', epochNumber => ({
180
+ [Attributes.EPOCH_NUMBER]: epochNumber,
181
+ }))
168
182
  async deleteAllProvingJobsOlderThanEpoch(epochNumber: EpochNumber): Promise<void> {
169
183
  const oldEpochs = Array.from(this.epochs.keys()).filter(e => e < Number(epochNumber));
170
184
  for (const old of oldEpochs) {
@@ -8,7 +8,6 @@ import {
8
8
  type ObservableResult,
9
9
  type TelemetryClient,
10
10
  type UpDownCounter,
11
- ValueType,
12
11
  } from '@aztec/telemetry-client';
13
12
 
14
13
  export type MonitorCallback = (proofType: ProvingRequestType) => number;
@@ -28,49 +27,25 @@ export class ProvingBrokerInstrumentation {
28
27
  constructor(client: TelemetryClient, name = 'ProvingBroker') {
29
28
  const meter = client.getMeter(name);
30
29
 
31
- this.queueSize = meter.createObservableGauge(Metrics.PROVING_QUEUE_SIZE, {
32
- valueType: ValueType.INT,
33
- });
30
+ this.queueSize = meter.createObservableGauge(Metrics.PROVING_QUEUE_SIZE);
34
31
 
35
- this.activeJobs = meter.createObservableGauge(Metrics.PROVING_QUEUE_ACTIVE_JOBS, {
36
- valueType: ValueType.INT,
37
- });
32
+ this.activeJobs = meter.createObservableGauge(Metrics.PROVING_QUEUE_ACTIVE_JOBS);
38
33
 
39
- this.resolvedJobs = meter.createUpDownCounter(Metrics.PROVING_QUEUE_RESOLVED_JOBS, {
40
- valueType: ValueType.INT,
41
- });
34
+ this.resolvedJobs = meter.createUpDownCounter(Metrics.PROVING_QUEUE_RESOLVED_JOBS);
42
35
 
43
- this.rejectedJobs = meter.createUpDownCounter(Metrics.PROVING_QUEUE_REJECTED_JOBS, {
44
- valueType: ValueType.INT,
45
- });
36
+ this.rejectedJobs = meter.createUpDownCounter(Metrics.PROVING_QUEUE_REJECTED_JOBS);
46
37
 
47
- this.retriedJobs = meter.createUpDownCounter(Metrics.PROVING_QUEUE_RETRIED_JOBS, {
48
- valueType: ValueType.INT,
49
- });
38
+ this.retriedJobs = meter.createUpDownCounter(Metrics.PROVING_QUEUE_RETRIED_JOBS);
50
39
 
51
- this.timedOutJobs = meter.createUpDownCounter(Metrics.PROVING_QUEUE_TIMED_OUT_JOBS, {
52
- valueType: ValueType.INT,
53
- });
40
+ this.timedOutJobs = meter.createUpDownCounter(Metrics.PROVING_QUEUE_TIMED_OUT_JOBS);
54
41
 
55
- this.cachedJobs = meter.createUpDownCounter(Metrics.PROVING_QUEUE_CACHED_JOBS, {
56
- valueType: ValueType.INT,
57
- });
42
+ this.cachedJobs = meter.createUpDownCounter(Metrics.PROVING_QUEUE_CACHED_JOBS);
58
43
 
59
- this.totalJobs = meter.createUpDownCounter(Metrics.PROVING_QUEUE_TOTAL_JOBS, {
60
- valueType: ValueType.INT,
61
- });
44
+ this.totalJobs = meter.createUpDownCounter(Metrics.PROVING_QUEUE_TOTAL_JOBS);
62
45
 
63
- this.jobWait = meter.createHistogram(Metrics.PROVING_QUEUE_JOB_WAIT, {
64
- description: 'Records how long a job sits in the queue',
65
- unit: 'ms',
66
- valueType: ValueType.INT,
67
- });
46
+ this.jobWait = meter.createHistogram(Metrics.PROVING_QUEUE_JOB_WAIT);
68
47
 
69
- this.jobDuration = meter.createHistogram(Metrics.PROVING_QUEUE_JOB_DURATION, {
70
- description: 'Records how long a job takes to complete',
71
- unit: 'ms',
72
- valueType: ValueType.INT,
73
- });
48
+ this.jobDuration = meter.createHistogram(Metrics.PROVING_QUEUE_JOB_DURATION);
74
49
  }
75
50
 
76
51
  monitorQueueDepth(fn: MonitorCallback) {
@@ -1,6 +1,5 @@
1
1
  import {
2
2
  AVM_V2_PROOF_LENGTH_IN_FIELDS_PADDED,
3
- AVM_V2_VERIFICATION_KEY_LENGTH_IN_FIELDS_PADDED,
4
3
  NESTED_RECURSIVE_PROOF_LENGTH,
5
4
  NESTED_RECURSIVE_ROLLUP_HONK_PROOF_LENGTH,
6
5
  RECURSIVE_PROOF_LENGTH,
@@ -14,7 +13,6 @@ import {
14
13
  type ProvingJobStatus,
15
14
  type PublicInputsAndRecursiveProof,
16
15
  type ServerCircuitProver,
17
- makeProofAndVerificationKey,
18
16
  makePublicInputsAndRecursiveProof,
19
17
  } from '@aztec/stdlib/interfaces/server';
20
18
  import type { ParityBasePrivateInputs, ParityRootPrivateInputs } from '@aztec/stdlib/parity';
@@ -106,12 +104,7 @@ export class MockProver implements ServerCircuitProver {
106
104
  constructor() {}
107
105
 
108
106
  getAvmProof(_inputs: AvmCircuitInputs, _signal?: AbortSignal, _epochNumber?: number) {
109
- return Promise.resolve(
110
- makeProofAndVerificationKey(
111
- makeEmptyRecursiveProof(AVM_V2_PROOF_LENGTH_IN_FIELDS_PADDED),
112
- VerificationKeyData.makeFake(AVM_V2_VERIFICATION_KEY_LENGTH_IN_FIELDS_PADDED),
113
- ),
114
- );
107
+ return Promise.resolve(makeEmptyRecursiveProof(AVM_V2_PROOF_LENGTH_IN_FIELDS_PADDED));
115
108
  }
116
109
 
117
110
  getBaseParityProof(_inputs: ParityBasePrivateInputs, _signal?: AbortSignal, _epochNumber?: number) {
@@ -1,2 +0,0 @@
1
- export * from './light.js';
2
- //# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoiaW5kZXguZC50cyIsInNvdXJjZVJvb3QiOiIiLCJzb3VyY2VzIjpbIi4uLy4uL3NyYy9ibG9jay1mYWN0b3J5L2luZGV4LnRzIl0sIm5hbWVzIjpbXSwibWFwcGluZ3MiOiJBQUFBLGNBQWMsWUFBWSxDQUFDIn0=
@@ -1 +0,0 @@
1
- {"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/block-factory/index.ts"],"names":[],"mappings":"AAAA,cAAc,YAAY,CAAC"}
@@ -1 +0,0 @@
1
- export * from './light.js';
@@ -1,38 +0,0 @@
1
- import { Fr } from '@aztec/foundation/curves/bn254';
2
- import { L2Block } from '@aztec/stdlib/block';
3
- import type { IBlockFactory, MerkleTreeWriteOperations } from '@aztec/stdlib/interfaces/server';
4
- import { type GlobalVariables, type ProcessedTx } from '@aztec/stdlib/tx';
5
- import { type TelemetryClient } from '@aztec/telemetry-client';
6
- /**
7
- * Builds a block and its header from a set of processed tx without running any circuits.
8
- *
9
- * NOTE: the onus is ON THE CALLER to update the db that is passed in with the notes hashes, nullifiers, etc
10
- * PRIOR to calling `buildBlock`.
11
- *
12
- * Why? Because if you are, e.g. building a block in practice from TxObjects, you are using the
13
- * PublicProcessor which will do this for you as it processes transactions.
14
- *
15
- * If you haven't already inserted the side effects, e.g. because you are in a testing context, you can use the helper
16
- * function `buildBlockWithCleanDB`, which calls `insertSideEffects` for you.
17
- *
18
- * @deprecated Use LightweightCheckpointBuilder instead. This only works for one block per checkpoint.
19
- */
20
- export declare class LightweightBlockFactory implements IBlockFactory {
21
- private db;
22
- private telemetry;
23
- private globalVariables?;
24
- private l1ToL2Messages?;
25
- private txs;
26
- private readonly logger;
27
- constructor(db: MerkleTreeWriteOperations, telemetry?: TelemetryClient);
28
- startNewBlock(globalVariables: GlobalVariables, l1ToL2Messages: Fr[]): Promise<void>;
29
- addTxs(txs: ProcessedTx[]): Promise<void>;
30
- setBlockCompleted(): Promise<L2Block>;
31
- private buildBlock;
32
- }
33
- /**
34
- * Inserts the processed transactions into the DB, then creates a block.
35
- * @param db - A db fork to use for block building which WILL BE MODIFIED.
36
- */
37
- export declare function buildBlockWithCleanDB(txs: ProcessedTx[], globalVariables: GlobalVariables, l1ToL2Messages: Fr[], db: MerkleTreeWriteOperations, telemetry?: TelemetryClient): Promise<L2Block>;
38
- //# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoibGlnaHQuZC50cyIsInNvdXJjZVJvb3QiOiIiLCJzb3VyY2VzIjpbIi4uLy4uL3NyYy9ibG9jay1mYWN0b3J5L2xpZ2h0LnRzIl0sIm5hbWVzIjpbXSwibWFwcGluZ3MiOiJBQUdBLE9BQU8sRUFBRSxFQUFFLEVBQUUsTUFBTSxnQ0FBZ0MsQ0FBQztBQUVwRCxPQUFPLEVBQUUsT0FBTyxFQUFpQixNQUFNLHFCQUFxQixDQUFDO0FBQzdELE9BQU8sS0FBSyxFQUFFLGFBQWEsRUFBRSx5QkFBeUIsRUFBRSxNQUFNLGlDQUFpQyxDQUFDO0FBR2hHLE9BQU8sRUFBcUIsS0FBSyxlQUFlLEVBQUUsS0FBSyxXQUFXLEVBQUUsTUFBTSxrQkFBa0IsQ0FBQztBQUM3RixPQUFPLEVBQUUsS0FBSyxlQUFlLEVBQXNCLE1BQU0seUJBQXlCLENBQUM7QUFRbkY7Ozs7Ozs7Ozs7Ozs7R0FhRztBQUNILHFCQUFhLHVCQUF3QixZQUFXLGFBQWE7SUFRekQsT0FBTyxDQUFDLEVBQUU7SUFDVixPQUFPLENBQUMsU0FBUztJQVJuQixPQUFPLENBQUMsZUFBZSxDQUFDLENBQWtCO0lBQzFDLE9BQU8sQ0FBQyxjQUFjLENBQUMsQ0FBTztJQUM5QixPQUFPLENBQUMsR0FBRyxDQUE0QjtJQUV2QyxPQUFPLENBQUMsUUFBUSxDQUFDLE1BQU0sQ0FBNkM7SUFFcEUsWUFDVSxFQUFFLEVBQUUseUJBQXlCLEVBQzdCLFNBQVMsR0FBRSxlQUFzQyxFQUN2RDtJQUVFLGFBQWEsQ0FBQyxlQUFlLEVBQUUsZUFBZSxFQUFFLGNBQWMsRUFBRSxFQUFFLEVBQUUsR0FBRyxPQUFPLENBQUMsSUFBSSxDQUFDLENBT3pGO0lBRUQsTUFBTSxDQUFDLEdBQUcsRUFBRSxXQUFXLEVBQUUsR0FBRyxPQUFPLENBQUMsSUFBSSxDQUFDLENBU3hDO0lBRUQsaUJBQWlCLElBQUksT0FBTyxDQUFDLE9BQU8sQ0FBQyxDQUVwQztZQUVhLFVBQVU7Q0E4Q3pCO0FBRUQ7OztHQUdHO0FBQ0gsd0JBQXNCLHFCQUFxQixDQUN6QyxHQUFHLEVBQUUsV0FBVyxFQUFFLEVBQ2xCLGVBQWUsRUFBRSxlQUFlLEVBQ2hDLGNBQWMsRUFBRSxFQUFFLEVBQUUsRUFDcEIsRUFBRSxFQUFFLHlCQUF5QixFQUM3QixTQUFTLEdBQUUsZUFBc0Msb0JBV2xEIn0=
@@ -1 +0,0 @@
1
- {"version":3,"file":"light.d.ts","sourceRoot":"","sources":["../../src/block-factory/light.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,EAAE,EAAE,MAAM,gCAAgC,CAAC;AAEpD,OAAO,EAAE,OAAO,EAAiB,MAAM,qBAAqB,CAAC;AAC7D,OAAO,KAAK,EAAE,aAAa,EAAE,yBAAyB,EAAE,MAAM,iCAAiC,CAAC;AAGhG,OAAO,EAAqB,KAAK,eAAe,EAAE,KAAK,WAAW,EAAE,MAAM,kBAAkB,CAAC;AAC7F,OAAO,EAAE,KAAK,eAAe,EAAsB,MAAM,yBAAyB,CAAC;AAQnF;;;;;;;;;;;;;GAaG;AACH,qBAAa,uBAAwB,YAAW,aAAa;IAQzD,OAAO,CAAC,EAAE;IACV,OAAO,CAAC,SAAS;IARnB,OAAO,CAAC,eAAe,CAAC,CAAkB;IAC1C,OAAO,CAAC,cAAc,CAAC,CAAO;IAC9B,OAAO,CAAC,GAAG,CAA4B;IAEvC,OAAO,CAAC,QAAQ,CAAC,MAAM,CAA6C;IAEpE,YACU,EAAE,EAAE,yBAAyB,EAC7B,SAAS,GAAE,eAAsC,EACvD;IAEE,aAAa,CAAC,eAAe,EAAE,eAAe,EAAE,cAAc,EAAE,EAAE,EAAE,GAAG,OAAO,CAAC,IAAI,CAAC,CAOzF;IAED,MAAM,CAAC,GAAG,EAAE,WAAW,EAAE,GAAG,OAAO,CAAC,IAAI,CAAC,CASxC;IAED,iBAAiB,IAAI,OAAO,CAAC,OAAO,CAAC,CAEpC;YAEa,UAAU;CA8CzB;AAED;;;GAGG;AACH,wBAAsB,qBAAqB,CACzC,GAAG,EAAE,WAAW,EAAE,EAClB,eAAe,EAAE,eAAe,EAChC,cAAc,EAAE,EAAE,EAAE,EACpB,EAAE,EAAE,yBAAyB,EAC7B,SAAS,GAAE,eAAsC,oBAWlD"}
@@ -1,108 +0,0 @@
1
- import { SpongeBlob, computeBlobsHashFromBlobs, encodeCheckpointEndMarker, getBlobsPerL1Block } from '@aztec/blob-lib';
2
- import { NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP } from '@aztec/constants';
3
- import { padArrayEnd } from '@aztec/foundation/collection';
4
- import { Fr } from '@aztec/foundation/curves/bn254';
5
- import { createLogger } from '@aztec/foundation/log';
6
- import { L2Block, L2BlockHeader } from '@aztec/stdlib/block';
7
- import { computeBlockOutHash, computeInHashFromL1ToL2Messages } from '@aztec/stdlib/messaging';
8
- import { MerkleTreeId } from '@aztec/stdlib/trees';
9
- import { ContentCommitment } from '@aztec/stdlib/tx';
10
- import { getTelemetryClient } from '@aztec/telemetry-client';
11
- import { buildHeaderAndBodyFromTxs, getTreeSnapshot, insertSideEffects } from '../orchestrator/block-building-helpers.js';
12
- /**
13
- * Builds a block and its header from a set of processed tx without running any circuits.
14
- *
15
- * NOTE: the onus is ON THE CALLER to update the db that is passed in with the notes hashes, nullifiers, etc
16
- * PRIOR to calling `buildBlock`.
17
- *
18
- * Why? Because if you are, e.g. building a block in practice from TxObjects, you are using the
19
- * PublicProcessor which will do this for you as it processes transactions.
20
- *
21
- * If you haven't already inserted the side effects, e.g. because you are in a testing context, you can use the helper
22
- * function `buildBlockWithCleanDB`, which calls `insertSideEffects` for you.
23
- *
24
- * @deprecated Use LightweightCheckpointBuilder instead. This only works for one block per checkpoint.
25
- */ export class LightweightBlockFactory {
26
- db;
27
- telemetry;
28
- globalVariables;
29
- l1ToL2Messages;
30
- txs;
31
- logger;
32
- constructor(db, telemetry = getTelemetryClient()){
33
- this.db = db;
34
- this.telemetry = telemetry;
35
- this.logger = createLogger('lightweight-block-factory');
36
- }
37
- async startNewBlock(globalVariables, l1ToL2Messages) {
38
- this.logger.debug('Starting new block', {
39
- globalVariables: globalVariables.toInspect(),
40
- l1ToL2Messages
41
- });
42
- this.globalVariables = globalVariables;
43
- this.l1ToL2Messages = padArrayEnd(l1ToL2Messages, Fr.ZERO, NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP);
44
- this.txs = undefined;
45
- // Update L1 to L2 tree
46
- await this.db.appendLeaves(MerkleTreeId.L1_TO_L2_MESSAGE_TREE, this.l1ToL2Messages);
47
- }
48
- addTxs(txs) {
49
- // Most times, `addTxs` is only called once per block.
50
- // So avoid copies.
51
- if (this.txs === undefined) {
52
- this.txs = txs;
53
- } else {
54
- this.txs.push(...txs);
55
- }
56
- return Promise.resolve();
57
- }
58
- setBlockCompleted() {
59
- return this.buildBlock();
60
- }
61
- async buildBlock() {
62
- const lastArchive = await getTreeSnapshot(MerkleTreeId.ARCHIVE, this.db);
63
- const state = await this.db.getStateReference();
64
- const txs = this.txs ?? [];
65
- const startSpongeBlob = SpongeBlob.init();
66
- const { header, body, blockBlobFields } = await buildHeaderAndBodyFromTxs(txs, lastArchive, state, this.globalVariables, startSpongeBlob, true);
67
- header.state.validate();
68
- await this.db.updateArchive(header);
69
- const newArchive = await getTreeSnapshot(MerkleTreeId.ARCHIVE, this.db);
70
- const outHash = computeBlockOutHash(txs.map((tx)=>tx.txEffect.l2ToL1Msgs));
71
- const inHash = computeInHashFromL1ToL2Messages(this.l1ToL2Messages);
72
- const numBlobFields = blockBlobFields.length + 1;
73
- const blobFields = blockBlobFields.concat([
74
- encodeCheckpointEndMarker({
75
- numBlobFields
76
- })
77
- ]);
78
- const blobsHash = computeBlobsHashFromBlobs(getBlobsPerL1Block(blobFields));
79
- const blockHeaderHash = await header.hash();
80
- const contentCommitment = new ContentCommitment(blobsHash, inHash, outHash);
81
- const l2BlockHeader = L2BlockHeader.from({
82
- ...header,
83
- blockHeadersHash: blockHeaderHash,
84
- contentCommitment
85
- });
86
- const block = new L2Block(newArchive, l2BlockHeader, body);
87
- this.logger.debug(`Built block ${block.number}`, {
88
- globalVariables: this.globalVariables?.toInspect(),
89
- archiveRoot: newArchive.root.toString(),
90
- stateReference: header.state.toInspect(),
91
- blockHash: (await block.hash()).toString(),
92
- txs: block.body.txEffects.map((tx)=>tx.txHash.toString())
93
- });
94
- return block;
95
- }
96
- }
97
- /**
98
- * Inserts the processed transactions into the DB, then creates a block.
99
- * @param db - A db fork to use for block building which WILL BE MODIFIED.
100
- */ export async function buildBlockWithCleanDB(txs, globalVariables, l1ToL2Messages, db, telemetry = getTelemetryClient()) {
101
- const builder = new LightweightBlockFactory(db, telemetry);
102
- await builder.startNewBlock(globalVariables, l1ToL2Messages);
103
- for (const tx of txs){
104
- await insertSideEffects(tx, db);
105
- }
106
- await builder.addTxs(txs);
107
- return await builder.setBlockCompleted();
108
- }
@@ -1,8 +0,0 @@
1
- import type { Timer } from '@aztec/foundation/timer';
2
- import { type TelemetryClient } from '@aztec/telemetry-client';
3
- export declare class ProvingAgentInstrumentation {
4
- private idleTime;
5
- constructor(client: TelemetryClient, name?: string);
6
- recordIdleTime(msOrTimer: Timer | number): void;
7
- }
8
- //# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoicHJvdmluZ19hZ2VudF9pbnN0cnVtZW50YXRpb24uZC50cyIsInNvdXJjZVJvb3QiOiIiLCJzb3VyY2VzIjpbIi4uLy4uL3NyYy9wcm92aW5nX2Jyb2tlci9wcm92aW5nX2FnZW50X2luc3RydW1lbnRhdGlvbi50cyJdLCJuYW1lcyI6W10sIm1hcHBpbmdzIjoiQUFBQSxPQUFPLEtBQUssRUFBRSxLQUFLLEVBQUUsTUFBTSx5QkFBeUIsQ0FBQztBQUNyRCxPQUFPLEVBQTJCLEtBQUssZUFBZSxFQUFhLE1BQU0seUJBQXlCLENBQUM7QUFFbkcscUJBQWEsMkJBQTJCO0lBQ3RDLE9BQU8sQ0FBQyxRQUFRLENBQVk7SUFFNUIsWUFBWSxNQUFNLEVBQUUsZUFBZSxFQUFFLElBQUksU0FBaUIsRUFRekQ7SUFFRCxjQUFjLENBQUMsU0FBUyxFQUFFLEtBQUssR0FBRyxNQUFNLFFBR3ZDO0NBQ0YifQ==
@@ -1 +0,0 @@
1
- {"version":3,"file":"proving_agent_instrumentation.d.ts","sourceRoot":"","sources":["../../src/proving_broker/proving_agent_instrumentation.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,KAAK,EAAE,MAAM,yBAAyB,CAAC;AACrD,OAAO,EAA2B,KAAK,eAAe,EAAa,MAAM,yBAAyB,CAAC;AAEnG,qBAAa,2BAA2B;IACtC,OAAO,CAAC,QAAQ,CAAY;IAE5B,YAAY,MAAM,EAAE,eAAe,EAAE,IAAI,SAAiB,EAQzD;IAED,cAAc,CAAC,SAAS,EAAE,KAAK,GAAG,MAAM,QAGvC;CACF"}
@@ -1,16 +0,0 @@
1
- import { Metrics, ValueType } from '@aztec/telemetry-client';
2
- export class ProvingAgentInstrumentation {
3
- idleTime;
4
- constructor(client, name = 'ProvingAgent'){
5
- const meter = client.getMeter(name);
6
- this.idleTime = meter.createHistogram(Metrics.PROVING_AGENT_IDLE, {
7
- description: 'Records how long an agent was idle',
8
- unit: 's',
9
- valueType: ValueType.DOUBLE
10
- });
11
- }
12
- recordIdleTime(msOrTimer) {
13
- const duration = typeof msOrTimer === 'number' ? msOrTimer : msOrTimer.ms();
14
- this.idleTime.record(duration / 1000);
15
- }
16
- }
@@ -1 +0,0 @@
1
- export * from './light.js';
@@ -1,137 +0,0 @@
1
- import { SpongeBlob, computeBlobsHashFromBlobs, encodeCheckpointEndMarker, getBlobsPerL1Block } from '@aztec/blob-lib';
2
- import { NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP } from '@aztec/constants';
3
- import { padArrayEnd } from '@aztec/foundation/collection';
4
- import { Fr } from '@aztec/foundation/curves/bn254';
5
- import { createLogger } from '@aztec/foundation/log';
6
- import { L2Block, L2BlockHeader } from '@aztec/stdlib/block';
7
- import type { IBlockFactory, MerkleTreeWriteOperations } from '@aztec/stdlib/interfaces/server';
8
- import { computeBlockOutHash, computeInHashFromL1ToL2Messages } from '@aztec/stdlib/messaging';
9
- import { MerkleTreeId } from '@aztec/stdlib/trees';
10
- import { ContentCommitment, type GlobalVariables, type ProcessedTx } from '@aztec/stdlib/tx';
11
- import { type TelemetryClient, getTelemetryClient } from '@aztec/telemetry-client';
12
-
13
- import {
14
- buildHeaderAndBodyFromTxs,
15
- getTreeSnapshot,
16
- insertSideEffects,
17
- } from '../orchestrator/block-building-helpers.js';
18
-
19
- /**
20
- * Builds a block and its header from a set of processed tx without running any circuits.
21
- *
22
- * NOTE: the onus is ON THE CALLER to update the db that is passed in with the notes hashes, nullifiers, etc
23
- * PRIOR to calling `buildBlock`.
24
- *
25
- * Why? Because if you are, e.g. building a block in practice from TxObjects, you are using the
26
- * PublicProcessor which will do this for you as it processes transactions.
27
- *
28
- * If you haven't already inserted the side effects, e.g. because you are in a testing context, you can use the helper
29
- * function `buildBlockWithCleanDB`, which calls `insertSideEffects` for you.
30
- *
31
- * @deprecated Use LightweightCheckpointBuilder instead. This only works for one block per checkpoint.
32
- */
33
- export class LightweightBlockFactory implements IBlockFactory {
34
- private globalVariables?: GlobalVariables;
35
- private l1ToL2Messages?: Fr[];
36
- private txs: ProcessedTx[] | undefined;
37
-
38
- private readonly logger = createLogger('lightweight-block-factory');
39
-
40
- constructor(
41
- private db: MerkleTreeWriteOperations,
42
- private telemetry: TelemetryClient = getTelemetryClient(),
43
- ) {}
44
-
45
- async startNewBlock(globalVariables: GlobalVariables, l1ToL2Messages: Fr[]): Promise<void> {
46
- this.logger.debug('Starting new block', { globalVariables: globalVariables.toInspect(), l1ToL2Messages });
47
- this.globalVariables = globalVariables;
48
- this.l1ToL2Messages = padArrayEnd<Fr, number>(l1ToL2Messages, Fr.ZERO, NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP);
49
- this.txs = undefined;
50
- // Update L1 to L2 tree
51
- await this.db.appendLeaves(MerkleTreeId.L1_TO_L2_MESSAGE_TREE, this.l1ToL2Messages!);
52
- }
53
-
54
- addTxs(txs: ProcessedTx[]): Promise<void> {
55
- // Most times, `addTxs` is only called once per block.
56
- // So avoid copies.
57
- if (this.txs === undefined) {
58
- this.txs = txs;
59
- } else {
60
- this.txs.push(...txs);
61
- }
62
- return Promise.resolve();
63
- }
64
-
65
- setBlockCompleted(): Promise<L2Block> {
66
- return this.buildBlock();
67
- }
68
-
69
- private async buildBlock(): Promise<L2Block> {
70
- const lastArchive = await getTreeSnapshot(MerkleTreeId.ARCHIVE, this.db);
71
- const state = await this.db.getStateReference();
72
-
73
- const txs = this.txs ?? [];
74
- const startSpongeBlob = SpongeBlob.init();
75
-
76
- const { header, body, blockBlobFields } = await buildHeaderAndBodyFromTxs(
77
- txs,
78
- lastArchive,
79
- state,
80
- this.globalVariables!,
81
- startSpongeBlob,
82
- true,
83
- );
84
-
85
- header.state.validate();
86
-
87
- await this.db.updateArchive(header);
88
- const newArchive = await getTreeSnapshot(MerkleTreeId.ARCHIVE, this.db);
89
-
90
- const outHash = computeBlockOutHash(txs.map(tx => tx.txEffect.l2ToL1Msgs));
91
- const inHash = computeInHashFromL1ToL2Messages(this.l1ToL2Messages!);
92
- const numBlobFields = blockBlobFields.length + 1;
93
- const blobFields = blockBlobFields.concat([encodeCheckpointEndMarker({ numBlobFields })]);
94
- const blobsHash = computeBlobsHashFromBlobs(getBlobsPerL1Block(blobFields));
95
- const blockHeaderHash = await header.hash();
96
- const contentCommitment = new ContentCommitment(blobsHash, inHash, outHash);
97
- const l2BlockHeader = L2BlockHeader.from({
98
- ...header,
99
- blockHeadersHash: blockHeaderHash,
100
- contentCommitment,
101
- });
102
-
103
- const block = new L2Block(newArchive, l2BlockHeader, body);
104
-
105
- this.logger.debug(`Built block ${block.number}`, {
106
- globalVariables: this.globalVariables?.toInspect(),
107
- archiveRoot: newArchive.root.toString(),
108
- stateReference: header.state.toInspect(),
109
- blockHash: (await block.hash()).toString(),
110
- txs: block.body.txEffects.map(tx => tx.txHash.toString()),
111
- });
112
-
113
- return block;
114
- }
115
- }
116
-
117
- /**
118
- * Inserts the processed transactions into the DB, then creates a block.
119
- * @param db - A db fork to use for block building which WILL BE MODIFIED.
120
- */
121
- export async function buildBlockWithCleanDB(
122
- txs: ProcessedTx[],
123
- globalVariables: GlobalVariables,
124
- l1ToL2Messages: Fr[],
125
- db: MerkleTreeWriteOperations,
126
- telemetry: TelemetryClient = getTelemetryClient(),
127
- ) {
128
- const builder = new LightweightBlockFactory(db, telemetry);
129
- await builder.startNewBlock(globalVariables, l1ToL2Messages);
130
-
131
- for (const tx of txs) {
132
- await insertSideEffects(tx, db);
133
- }
134
- await builder.addTxs(txs);
135
-
136
- return await builder.setBlockCompleted();
137
- }
@@ -1,21 +0,0 @@
1
- import type { Timer } from '@aztec/foundation/timer';
2
- import { type Histogram, Metrics, type TelemetryClient, ValueType } from '@aztec/telemetry-client';
3
-
4
- export class ProvingAgentInstrumentation {
5
- private idleTime: Histogram;
6
-
7
- constructor(client: TelemetryClient, name = 'ProvingAgent') {
8
- const meter = client.getMeter(name);
9
-
10
- this.idleTime = meter.createHistogram(Metrics.PROVING_AGENT_IDLE, {
11
- description: 'Records how long an agent was idle',
12
- unit: 's',
13
- valueType: ValueType.DOUBLE,
14
- });
15
- }
16
-
17
- recordIdleTime(msOrTimer: Timer | number) {
18
- const duration = typeof msOrTimer === 'number' ? msOrTimer : msOrTimer.ms();
19
- this.idleTime.record(duration / 1000);
20
- }
21
- }