@aztec/stdlib 2.0.3 → 2.1.0-rc.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dest/block/in_block.d.ts +5 -5
- package/dest/block/in_block.d.ts.map +1 -1
- package/dest/block/proposal/attestations_and_signers.d.ts +48 -0
- package/dest/block/proposal/attestations_and_signers.d.ts.map +1 -0
- package/dest/block/proposal/attestations_and_signers.js +99 -0
- package/dest/block/proposal/committee_attestation.d.ts +1 -0
- package/dest/block/proposal/committee_attestation.d.ts.map +1 -1
- package/dest/block/proposal/committee_attestation.js +3 -0
- package/dest/block/proposal/index.d.ts +1 -0
- package/dest/block/proposal/index.d.ts.map +1 -1
- package/dest/block/proposal/index.js +1 -0
- package/dest/contract/interfaces/contract_class.d.ts +3 -3
- package/dest/epoch-helpers/index.d.ts +2 -0
- package/dest/epoch-helpers/index.d.ts.map +1 -1
- package/dest/epoch-helpers/index.js +3 -0
- package/dest/file-store/factory.d.ts.map +1 -1
- package/dest/file-store/factory.js +18 -0
- package/dest/file-store/interface.d.ts +8 -2
- package/dest/file-store/interface.d.ts.map +1 -1
- package/dest/file-store/s3.d.ts +26 -0
- package/dest/file-store/s3.d.ts.map +1 -0
- package/dest/file-store/s3.js +252 -0
- package/dest/interfaces/aztec-node-admin.d.ts +13 -7
- package/dest/interfaces/aztec-node-admin.d.ts.map +1 -1
- package/dest/interfaces/block-builder.d.ts +1 -0
- package/dest/interfaces/block-builder.d.ts.map +1 -1
- package/dest/interfaces/proving-job.d.ts +6 -6
- package/dest/interfaces/slasher.d.ts +4 -0
- package/dest/interfaces/slasher.d.ts.map +1 -1
- package/dest/interfaces/slasher.js +1 -0
- package/dest/interfaces/validator.d.ts +9 -2
- package/dest/interfaces/validator.d.ts.map +1 -1
- package/dest/interfaces/validator.js +2 -1
- package/dest/p2p/consensus_payload.d.ts +27 -0
- package/dest/p2p/consensus_payload.d.ts.map +1 -1
- package/dest/p2p/consensus_payload.js +10 -0
- package/dest/p2p/signature_utils.d.ts +2 -1
- package/dest/p2p/signature_utils.d.ts.map +1 -1
- package/dest/p2p/signature_utils.js +1 -0
- package/dest/slashing/types.d.ts +1 -0
- package/dest/slashing/types.d.ts.map +1 -1
- package/dest/slashing/types.js +22 -0
- package/dest/snapshots/download.d.ts.map +1 -1
- package/dest/snapshots/download.js +58 -2
- package/dest/snapshots/upload.d.ts.map +1 -1
- package/dest/snapshots/upload.js +1 -0
- package/dest/tests/mocks.d.ts +2 -0
- package/dest/tests/mocks.d.ts.map +1 -1
- package/dest/tests/mocks.js +4 -0
- package/dest/tx/indexed_tx_effect.d.ts +3 -3
- package/dest/tx/proposed_block_header.d.ts +1 -0
- package/dest/tx/proposed_block_header.d.ts.map +1 -1
- package/dest/tx/proposed_block_header.js +3 -0
- package/dest/tx/state_reference.js +1 -1
- package/dest/tx/tx.d.ts +0 -7
- package/dest/tx/tx.d.ts.map +1 -1
- package/dest/tx/tx.js +0 -8
- package/package.json +10 -9
- package/src/block/proposal/attestations_and_signers.ts +121 -0
- package/src/block/proposal/committee_attestation.ts +4 -0
- package/src/block/proposal/index.ts +1 -0
- package/src/epoch-helpers/index.ts +8 -0
- package/src/file-store/factory.ts +15 -0
- package/src/file-store/interface.ts +8 -2
- package/src/file-store/s3.ts +254 -0
- package/src/interfaces/block-builder.ts +1 -0
- package/src/interfaces/slasher.ts +2 -0
- package/src/interfaces/validator.ts +12 -2
- package/src/p2p/consensus_payload.ts +16 -0
- package/src/p2p/signature_utils.ts +1 -0
- package/src/slashing/types.ts +23 -0
- package/src/snapshots/download.ts +66 -2
- package/src/snapshots/upload.ts +1 -0
- package/src/tests/mocks.ts +12 -0
- package/src/tx/proposed_block_header.ts +13 -0
- package/src/tx/state_reference.ts +1 -1
- package/src/tx/tx.ts +0 -10
package/dest/block/in_block.d.ts
CHANGED
|
@@ -12,21 +12,21 @@ export declare function inBlockSchemaFor<T extends ZodTypeAny>(schema: T): z.Zod
|
|
|
12
12
|
data: T;
|
|
13
13
|
l2BlockNumber: z.ZodPipeline<z.ZodUnion<[z.ZodBigInt, z.ZodNumber, z.ZodString]>, z.ZodNumber>;
|
|
14
14
|
l2BlockHash: z.ZodEffects<z.ZodEffects<z.ZodEffects<z.ZodEffects<z.ZodString, string, string>, string, string>, Buffer<ArrayBuffer>, string>, L2BlockHash, string>;
|
|
15
|
-
}, "strip", ZodTypeAny,
|
|
15
|
+
}, "strip", ZodTypeAny, z.objectUtil.addQuestionMarks<z.baseObjectOutputType<{
|
|
16
16
|
data: T;
|
|
17
17
|
l2BlockNumber: z.ZodPipeline<z.ZodUnion<[z.ZodBigInt, z.ZodNumber, z.ZodString]>, z.ZodNumber>;
|
|
18
18
|
l2BlockHash: z.ZodEffects<z.ZodEffects<z.ZodEffects<z.ZodEffects<z.ZodString, string, string>, string, string>, Buffer<ArrayBuffer>, string>, L2BlockHash, string>;
|
|
19
|
-
}>, any>]: z.objectUtil.addQuestionMarks<z.baseObjectOutputType<{
|
|
19
|
+
}>, any> extends infer T_1 ? { [k in keyof T_1]: z.objectUtil.addQuestionMarks<z.baseObjectOutputType<{
|
|
20
20
|
data: T;
|
|
21
21
|
l2BlockNumber: z.ZodPipeline<z.ZodUnion<[z.ZodBigInt, z.ZodNumber, z.ZodString]>, z.ZodNumber>;
|
|
22
22
|
l2BlockHash: z.ZodEffects<z.ZodEffects<z.ZodEffects<z.ZodEffects<z.ZodString, string, string>, string, string>, Buffer<ArrayBuffer>, string>, L2BlockHash, string>;
|
|
23
|
-
}>, any>[k]; }
|
|
23
|
+
}>, any>[k]; } : never, z.baseObjectInputType<{
|
|
24
24
|
data: T;
|
|
25
25
|
l2BlockNumber: z.ZodPipeline<z.ZodUnion<[z.ZodBigInt, z.ZodNumber, z.ZodString]>, z.ZodNumber>;
|
|
26
26
|
l2BlockHash: z.ZodEffects<z.ZodEffects<z.ZodEffects<z.ZodEffects<z.ZodString, string, string>, string, string>, Buffer<ArrayBuffer>, string>, L2BlockHash, string>;
|
|
27
|
-
}>]: z.baseObjectInputType<{
|
|
27
|
+
}> extends infer T_2 ? { [k_1 in keyof T_2]: z.baseObjectInputType<{
|
|
28
28
|
data: T;
|
|
29
29
|
l2BlockNumber: z.ZodPipeline<z.ZodUnion<[z.ZodBigInt, z.ZodNumber, z.ZodString]>, z.ZodNumber>;
|
|
30
30
|
l2BlockHash: z.ZodEffects<z.ZodEffects<z.ZodEffects<z.ZodEffects<z.ZodString, string, string>, string, string>, Buffer<ArrayBuffer>, string>, L2BlockHash, string>;
|
|
31
|
-
}>[k_1]; }>;
|
|
31
|
+
}>[k_1]; } : never>;
|
|
32
32
|
//# sourceMappingURL=in_block.d.ts.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"in_block.d.ts","sourceRoot":"","sources":["../../src/block/in_block.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,KAAK,UAAU,EAAE,CAAC,EAAE,MAAM,KAAK,CAAC;AAGzC,OAAO,EAAE,WAAW,EAAE,MAAM,iBAAiB,CAAC;AAC9C,OAAO,KAAK,EAAE,OAAO,EAAE,MAAM,eAAe,CAAC;AAG7C,MAAM,MAAM,OAAO,CAAC,CAAC,IAAI;IACvB,aAAa,EAAE,MAAM,CAAC;IACtB,WAAW,EAAE,WAAW,CAAC;IACzB,IAAI,EAAE,CAAC,CAAC;CACT,CAAC;AAEF,wBAAgB,aAAa,CAAC,CAAC,EAAE,IAAI,EAAE,CAAC,GAAG,OAAO,CAAC,CAAC,CAAC,CAMpD;AAED,wBAAsB,WAAW,CAAC,CAAC,EAAE,IAAI,EAAE,CAAC,EAAE,KAAK,EAAE,OAAO,GAAG,OAAO,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,CAMjF;AAED,wBAAgB,gBAAgB,CAAC,CAAC,SAAS,UAAU,EAAE,MAAM,EAAE,CAAC;;;;;;;;;;;;;;;;;;;;
|
|
1
|
+
{"version":3,"file":"in_block.d.ts","sourceRoot":"","sources":["../../src/block/in_block.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,KAAK,UAAU,EAAE,CAAC,EAAE,MAAM,KAAK,CAAC;AAGzC,OAAO,EAAE,WAAW,EAAE,MAAM,iBAAiB,CAAC;AAC9C,OAAO,KAAK,EAAE,OAAO,EAAE,MAAM,eAAe,CAAC;AAG7C,MAAM,MAAM,OAAO,CAAC,CAAC,IAAI;IACvB,aAAa,EAAE,MAAM,CAAC;IACtB,WAAW,EAAE,WAAW,CAAC;IACzB,IAAI,EAAE,CAAC,CAAC;CACT,CAAC;AAEF,wBAAgB,aAAa,CAAC,CAAC,EAAE,IAAI,EAAE,CAAC,GAAG,OAAO,CAAC,CAAC,CAAC,CAMpD;AAED,wBAAsB,WAAW,CAAC,CAAC,EAAE,IAAI,EAAE,CAAC,EAAE,KAAK,EAAE,OAAO,GAAG,OAAO,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,CAMjF;AAED,wBAAgB,gBAAgB,CAAC,CAAC,SAAS,UAAU,EAAE,MAAM,EAAE,CAAC;;;;;;;;;;;;;;;;;;;;oBAM/D"}
|
|
@@ -0,0 +1,48 @@
|
|
|
1
|
+
import type { ViemCommitteeAttestations } from '@aztec/ethereum';
|
|
2
|
+
import { z } from 'zod';
|
|
3
|
+
import type { Signable, SignatureDomainSeparator } from '../../p2p/signature_utils.js';
|
|
4
|
+
import { CommitteeAttestation } from './committee_attestation.js';
|
|
5
|
+
export declare class CommitteeAttestationsAndSigners implements Signable {
|
|
6
|
+
attestations: CommitteeAttestation[];
|
|
7
|
+
constructor(attestations: CommitteeAttestation[]);
|
|
8
|
+
static get schema(): z.ZodEffects<z.ZodObject<{
|
|
9
|
+
attestations: z.ZodArray<z.ZodEffects<z.ZodObject<{
|
|
10
|
+
address: z.ZodType<import("./committee_attestation.js").EthAddress, any, string>;
|
|
11
|
+
signature: z.ZodEffects<z.ZodEffects<z.ZodEffects<z.ZodString, `0x${string}`, string>, `0x${string}`, string>, import("./committee_attestation.js").Signature, string>;
|
|
12
|
+
}, "strip", z.ZodTypeAny, {
|
|
13
|
+
address: import("./committee_attestation.js").EthAddress;
|
|
14
|
+
signature: import("./committee_attestation.js").Signature;
|
|
15
|
+
}, {
|
|
16
|
+
address: string;
|
|
17
|
+
signature: string;
|
|
18
|
+
}>, CommitteeAttestation, {
|
|
19
|
+
address: string;
|
|
20
|
+
signature: string;
|
|
21
|
+
}>, "many">;
|
|
22
|
+
}, "strip", z.ZodTypeAny, {
|
|
23
|
+
attestations: CommitteeAttestation[];
|
|
24
|
+
}, {
|
|
25
|
+
attestations: {
|
|
26
|
+
address: string;
|
|
27
|
+
signature: string;
|
|
28
|
+
}[];
|
|
29
|
+
}>, CommitteeAttestationsAndSigners, {
|
|
30
|
+
attestations: {
|
|
31
|
+
address: string;
|
|
32
|
+
signature: string;
|
|
33
|
+
}[];
|
|
34
|
+
}>;
|
|
35
|
+
getPayloadToSign(domainSeparator: SignatureDomainSeparator): Buffer;
|
|
36
|
+
static empty(): CommitteeAttestationsAndSigners;
|
|
37
|
+
toString(): string;
|
|
38
|
+
getSigners(): import("./committee_attestation.js").EthAddress[];
|
|
39
|
+
getSignedAttestations(): CommitteeAttestation[];
|
|
40
|
+
/**
|
|
41
|
+
* Packs an array of committee attestations into the format expected by the Solidity contract
|
|
42
|
+
*
|
|
43
|
+
* @param attestations - Array of committee attestations with addresses and signatures
|
|
44
|
+
* @returns Packed attestations with bitmap and tightly packed signature/address data
|
|
45
|
+
*/
|
|
46
|
+
getPackedAttestations(): ViemCommitteeAttestations;
|
|
47
|
+
}
|
|
48
|
+
//# sourceMappingURL=attestations_and_signers.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"attestations_and_signers.d.ts","sourceRoot":"","sources":["../../../src/block/proposal/attestations_and_signers.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,yBAAyB,EAAE,MAAM,iBAAiB,CAAC;AAIjE,OAAO,EAAE,CAAC,EAAE,MAAM,KAAK,CAAC;AAExB,OAAO,KAAK,EAAE,QAAQ,EAAE,wBAAwB,EAAE,MAAM,8BAA8B,CAAC;AACvF,OAAO,EAAE,oBAAoB,EAAE,MAAM,4BAA4B,CAAC;AAElE,qBAAa,+BAAgC,YAAW,QAAQ;IAC3C,YAAY,EAAE,oBAAoB,EAAE;gBAApC,YAAY,EAAE,oBAAoB,EAAE;IAEvD,MAAM,KAAK,MAAM;;;;;;;;;;;;;;;;;;;;;;;;;;OAMhB;IAED,gBAAgB,CAAC,eAAe,EAAE,wBAAwB,GAAG,MAAM;IAanE,MAAM,CAAC,KAAK,IAAI,+BAA+B;IAI/C,QAAQ;IAIR,UAAU;IAIV,qBAAqB;IAIrB;;;;;OAKG;IACH,qBAAqB,IAAI,yBAAyB;CAiEnD"}
|
|
@@ -0,0 +1,99 @@
|
|
|
1
|
+
import { hexToBuffer } from '@aztec/foundation/string';
|
|
2
|
+
import { encodeAbiParameters, parseAbiParameters } from 'viem';
|
|
3
|
+
import { z } from 'zod';
|
|
4
|
+
import { CommitteeAttestation } from './committee_attestation.js';
|
|
5
|
+
export class CommitteeAttestationsAndSigners {
|
|
6
|
+
attestations;
|
|
7
|
+
constructor(attestations){
|
|
8
|
+
this.attestations = attestations;
|
|
9
|
+
}
|
|
10
|
+
static get schema() {
|
|
11
|
+
return z.object({
|
|
12
|
+
attestations: CommitteeAttestation.schema.array()
|
|
13
|
+
}).transform((obj)=>new CommitteeAttestationsAndSigners(obj.attestations));
|
|
14
|
+
}
|
|
15
|
+
getPayloadToSign(domainSeparator) {
|
|
16
|
+
const abi = parseAbiParameters('uint8,(bytes,bytes),address[]');
|
|
17
|
+
const packed = this.getPackedAttestations();
|
|
18
|
+
const encodedData = encodeAbiParameters(abi, [
|
|
19
|
+
domainSeparator,
|
|
20
|
+
[
|
|
21
|
+
packed.signatureIndices,
|
|
22
|
+
packed.signaturesOrAddresses
|
|
23
|
+
],
|
|
24
|
+
this.getSigners().map((s)=>s.toString())
|
|
25
|
+
]);
|
|
26
|
+
return hexToBuffer(encodedData);
|
|
27
|
+
}
|
|
28
|
+
static empty() {
|
|
29
|
+
return new CommitteeAttestationsAndSigners([]);
|
|
30
|
+
}
|
|
31
|
+
toString() {
|
|
32
|
+
return `CommitteeAttestationsAndSigners(${this.attestations.map((a)=>a.toString()).join(',')})`;
|
|
33
|
+
}
|
|
34
|
+
getSigners() {
|
|
35
|
+
return this.attestations.filter((a)=>!a.signature.isEmpty()).map((a)=>a.address);
|
|
36
|
+
}
|
|
37
|
+
getSignedAttestations() {
|
|
38
|
+
return this.attestations.filter((a)=>!a.signature.isEmpty());
|
|
39
|
+
}
|
|
40
|
+
/**
|
|
41
|
+
* Packs an array of committee attestations into the format expected by the Solidity contract
|
|
42
|
+
*
|
|
43
|
+
* @param attestations - Array of committee attestations with addresses and signatures
|
|
44
|
+
* @returns Packed attestations with bitmap and tightly packed signature/address data
|
|
45
|
+
*/ getPackedAttestations() {
|
|
46
|
+
const length = this.attestations.length;
|
|
47
|
+
const attestations = this.attestations.map((a)=>a.toViem());
|
|
48
|
+
// Calculate bitmap size (1 bit per attestation, rounded up to nearest byte)
|
|
49
|
+
const bitmapSize = Math.ceil(length / 8);
|
|
50
|
+
const signatureIndices = new Uint8Array(bitmapSize);
|
|
51
|
+
// Calculate total data size needed
|
|
52
|
+
let totalDataSize = 0;
|
|
53
|
+
for(let i = 0; i < length; i++){
|
|
54
|
+
const signature = attestations[i].signature;
|
|
55
|
+
// Check if signature is empty (v = 0)
|
|
56
|
+
const isEmpty = signature.v === 0;
|
|
57
|
+
if (!isEmpty) {
|
|
58
|
+
totalDataSize += 65; // v (1) + r (32) + s (32)
|
|
59
|
+
} else {
|
|
60
|
+
totalDataSize += 20; // address only
|
|
61
|
+
}
|
|
62
|
+
}
|
|
63
|
+
const signaturesOrAddresses = new Uint8Array(totalDataSize);
|
|
64
|
+
let dataIndex = 0;
|
|
65
|
+
// Pack the data
|
|
66
|
+
for(let i = 0; i < length; i++){
|
|
67
|
+
const attestation = attestations[i];
|
|
68
|
+
const signature = attestation.signature;
|
|
69
|
+
// Check if signature is empty
|
|
70
|
+
const isEmpty = signature.v === 0;
|
|
71
|
+
if (!isEmpty) {
|
|
72
|
+
// Set bit in bitmap (bit 7-0 in each byte, left to right)
|
|
73
|
+
const byteIndex = Math.floor(i / 8);
|
|
74
|
+
const bitIndex = 7 - i % 8;
|
|
75
|
+
signatureIndices[byteIndex] |= 1 << bitIndex;
|
|
76
|
+
// Pack signature: v + r + s
|
|
77
|
+
signaturesOrAddresses[dataIndex] = signature.v;
|
|
78
|
+
dataIndex++;
|
|
79
|
+
// Pack r (32 bytes)
|
|
80
|
+
const rBytes = Buffer.from(signature.r.slice(2), 'hex');
|
|
81
|
+
signaturesOrAddresses.set(rBytes, dataIndex);
|
|
82
|
+
dataIndex += 32;
|
|
83
|
+
// Pack s (32 bytes)
|
|
84
|
+
const sBytes = Buffer.from(signature.s.slice(2), 'hex');
|
|
85
|
+
signaturesOrAddresses.set(sBytes, dataIndex);
|
|
86
|
+
dataIndex += 32;
|
|
87
|
+
} else {
|
|
88
|
+
// Pack address only (20 bytes)
|
|
89
|
+
const addrBytes = Buffer.from(attestation.addr.slice(2), 'hex');
|
|
90
|
+
signaturesOrAddresses.set(addrBytes, dataIndex);
|
|
91
|
+
dataIndex += 20;
|
|
92
|
+
}
|
|
93
|
+
}
|
|
94
|
+
return {
|
|
95
|
+
signatureIndices: `0x${Buffer.from(signatureIndices).toString('hex')}`,
|
|
96
|
+
signaturesOrAddresses: `0x${Buffer.from(signaturesOrAddresses).toString('hex')}`
|
|
97
|
+
};
|
|
98
|
+
}
|
|
99
|
+
}
|
|
@@ -31,6 +31,7 @@ export declare class CommitteeAttestation {
|
|
|
31
31
|
static fromPacked(packed: ViemCommitteeAttestations, committeeSize: number): CommitteeAttestation[];
|
|
32
32
|
toBuffer(): Buffer;
|
|
33
33
|
equals(other: CommitteeAttestation): boolean;
|
|
34
|
+
toString(): string;
|
|
34
35
|
toViem(): ViemCommitteeAttestation;
|
|
35
36
|
}
|
|
36
37
|
//# sourceMappingURL=committee_attestation.d.ts.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"committee_attestation.d.ts","sourceRoot":"","sources":["../../../src/block/proposal/committee_attestation.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,wBAAwB,EAAE,yBAAyB,EAAE,MAAM,iBAAiB,CAAC;AAC3F,OAAO,EAAE,UAAU,EAAE,MAAM,+BAA+B,CAAC;AAC3D,OAAO,EAAE,SAAS,EAAE,MAAM,iCAAiC,CAAC;AAC5D,OAAO,EAAE,YAAY,EAAqB,MAAM,6BAA6B,CAAC;AAE9E,OAAO,EAAE,CAAC,EAAE,MAAM,KAAK,CAAC;AAExB,OAAO,EAAE,UAAU,EAAE,SAAS,EAAE,CAAC;AAEjC,qBAAa,oBAAoB;aAEb,OAAO,EAAE,UAAU;aACnB,SAAS,EAAE,SAAS;gBADpB,OAAO,EAAE,UAAU,EACnB,SAAS,EAAE,SAAS;IAGtC,MAAM,KAAK,MAAM;;;;;;;;;;;;OAOhB;IAGD,MAAM,CAAC,WAAW,CAAC,OAAO,EAAE,UAAU,GAAG,oBAAoB;IAK7D,MAAM,CAAC,uBAAuB,CAAC,OAAO,EAAE,UAAU,EAAE,SAAS,EAAE,SAAS,GAAG,oBAAoB;IAI/F,MAAM,CAAC,aAAa,CAAC,SAAS,EAAE,SAAS,GAAG,oBAAoB;IAIhE,MAAM,CAAC,QAAQ,CAAC,IAAI,EAAE,wBAAwB,GAAG,oBAAoB;IAIrE,MAAM,CAAC,UAAU,CAAC,MAAM,EAAE,MAAM,GAAG,YAAY,GAAG,oBAAoB;IAOtE,MAAM,CAAC,MAAM,IAAI,oBAAoB;IAKrC,MAAM,CAAC,KAAK,IAAI,oBAAoB;IAIpC,MAAM,CAAC,UAAU,CAAC,MAAM,EAAE,yBAAyB,EAAE,aAAa,EAAE,MAAM,GAAG,oBAAoB,EAAE;IA+CnG,QAAQ,IAAI,MAAM;IAIlB,MAAM,CAAC,KAAK,EAAE,oBAAoB,GAAG,OAAO;IAI5C,MAAM,IAAI,wBAAwB;CAMnC"}
|
|
1
|
+
{"version":3,"file":"committee_attestation.d.ts","sourceRoot":"","sources":["../../../src/block/proposal/committee_attestation.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,wBAAwB,EAAE,yBAAyB,EAAE,MAAM,iBAAiB,CAAC;AAC3F,OAAO,EAAE,UAAU,EAAE,MAAM,+BAA+B,CAAC;AAC3D,OAAO,EAAE,SAAS,EAAE,MAAM,iCAAiC,CAAC;AAC5D,OAAO,EAAE,YAAY,EAAqB,MAAM,6BAA6B,CAAC;AAE9E,OAAO,EAAE,CAAC,EAAE,MAAM,KAAK,CAAC;AAExB,OAAO,EAAE,UAAU,EAAE,SAAS,EAAE,CAAC;AAEjC,qBAAa,oBAAoB;aAEb,OAAO,EAAE,UAAU;aACnB,SAAS,EAAE,SAAS;gBADpB,OAAO,EAAE,UAAU,EACnB,SAAS,EAAE,SAAS;IAGtC,MAAM,KAAK,MAAM;;;;;;;;;;;;OAOhB;IAGD,MAAM,CAAC,WAAW,CAAC,OAAO,EAAE,UAAU,GAAG,oBAAoB;IAK7D,MAAM,CAAC,uBAAuB,CAAC,OAAO,EAAE,UAAU,EAAE,SAAS,EAAE,SAAS,GAAG,oBAAoB;IAI/F,MAAM,CAAC,aAAa,CAAC,SAAS,EAAE,SAAS,GAAG,oBAAoB;IAIhE,MAAM,CAAC,QAAQ,CAAC,IAAI,EAAE,wBAAwB,GAAG,oBAAoB;IAIrE,MAAM,CAAC,UAAU,CAAC,MAAM,EAAE,MAAM,GAAG,YAAY,GAAG,oBAAoB;IAOtE,MAAM,CAAC,MAAM,IAAI,oBAAoB;IAKrC,MAAM,CAAC,KAAK,IAAI,oBAAoB;IAIpC,MAAM,CAAC,UAAU,CAAC,MAAM,EAAE,yBAAyB,EAAE,aAAa,EAAE,MAAM,GAAG,oBAAoB,EAAE;IA+CnG,QAAQ,IAAI,MAAM;IAIlB,MAAM,CAAC,KAAK,EAAE,oBAAoB,GAAG,OAAO;IAI5C,QAAQ,IAAI,MAAM;IAIlB,MAAM,IAAI,wBAAwB;CAMnC"}
|
|
@@ -92,6 +92,9 @@ export class CommitteeAttestation {
|
|
|
92
92
|
equals(other) {
|
|
93
93
|
return this.address.equals(other.address) && this.signature.equals(other.signature);
|
|
94
94
|
}
|
|
95
|
+
toString() {
|
|
96
|
+
return `CommitteeAttestation(${this.address.toString()}, ${this.signature.toString()})`;
|
|
97
|
+
}
|
|
95
98
|
toViem() {
|
|
96
99
|
return {
|
|
97
100
|
addr: this.address.toString(),
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../../src/block/proposal/index.ts"],"names":[],"mappings":"AAAA,cAAc,4BAA4B,CAAC"}
|
|
1
|
+
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../../src/block/proposal/index.ts"],"names":[],"mappings":"AAAA,cAAc,4BAA4B,CAAC;AAC3C,cAAc,+BAA+B,CAAC"}
|
|
@@ -120,7 +120,7 @@ interface ContractClassCommitments {
|
|
|
120
120
|
}
|
|
121
121
|
/** A contract class with its precomputed id. */
|
|
122
122
|
export type ContractClassWithId = ContractClass & Pick<ContractClassCommitments, 'id'>;
|
|
123
|
-
export declare const ContractClassWithIdSchema: z.ZodObject<
|
|
123
|
+
export declare const ContractClassWithIdSchema: z.ZodObject<{
|
|
124
124
|
version: z.ZodLiteral<1>;
|
|
125
125
|
artifactHash: z.ZodType<Fr, any, string>;
|
|
126
126
|
privateFunctions: z.ZodArray<z.ZodObject<{
|
|
@@ -146,9 +146,9 @@ export declare const ContractClassWithIdSchema: z.ZodObject<z.objectUtil.extendS
|
|
|
146
146
|
type: "Buffer";
|
|
147
147
|
data: number[];
|
|
148
148
|
}>]>;
|
|
149
|
-
}
|
|
149
|
+
} & {
|
|
150
150
|
id: z.ZodType<Fr, any, string>;
|
|
151
|
-
}
|
|
151
|
+
}, "strip", z.ZodTypeAny, {
|
|
152
152
|
version: 1;
|
|
153
153
|
id: Fr;
|
|
154
154
|
artifactHash: Fr;
|
|
@@ -59,4 +59,6 @@ export declare function getProofSubmissionDeadlineEpoch(epochNumber: bigint, con
|
|
|
59
59
|
* Computed as the start of the given epoch plus the proof submission window.
|
|
60
60
|
*/
|
|
61
61
|
export declare function getProofSubmissionDeadlineTimestamp(epochNumber: bigint, constants: Pick<L1RollupConstants, 'l1GenesisTime' | 'slotDuration' | 'epochDuration' | 'proofSubmissionEpochs'>): bigint;
|
|
62
|
+
/** Returns the timestamp to start building a block for a given L2 slot. Computed as the start timestamp of the slot minus one L1 slot duration. */
|
|
63
|
+
export declare function getSlotStartBuildTimestamp(slotNumber: number | bigint, constants: Pick<L1RollupConstants, 'l1GenesisTime' | 'slotDuration' | 'ethereumSlotDuration'>): number;
|
|
62
64
|
//# sourceMappingURL=index.d.ts.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/epoch-helpers/index.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,CAAC,EAAE,MAAM,KAAK,CAAC;AAIxB,MAAM,MAAM,iBAAiB,GAAG;IAC9B,YAAY,EAAE,MAAM,CAAC;IACrB,aAAa,EAAE,MAAM,CAAC;IACtB,YAAY,EAAE,MAAM,CAAC;IACrB,aAAa,EAAE,MAAM,CAAC;IACtB,oBAAoB,EAAE,MAAM,CAAC;IAC7B,qBAAqB,EAAE,MAAM,CAAC;CAC/B,CAAC;AAEF,eAAO,MAAM,sBAAsB,EAAE,iBAOpC,CAAC;AAEF,eAAO,MAAM,uBAAuB;;;;;;;;;;;;;;;;;;;;;EAOE,CAAC;AAEvC,iDAAiD;AACjD,wBAAgB,mBAAmB,CACjC,IAAI,EAAE,MAAM,EACZ,SAAS,EAAE,IAAI,CAAC,iBAAiB,EAAE,eAAe,GAAG,cAAc,CAAC,UAGrE;AAED,qDAAqD;AACrD,wBAAgB,kBAAkB,CAAC,EAAE,EAAE,MAAM,EAAE,SAAS,EAAE,IAAI,CAAC,iBAAiB,EAAE,eAAe,GAAG,cAAc,CAAC,UAElH;AAED,sDAAsD;AACtD,wBAAgB,yBAAyB,CACvC,EAAE,EAAE,MAAM,EACV,SAAS,EAAE,IAAI,CAAC,iBAAiB,EAAE,eAAe,GAAG,cAAc,GAAG,eAAe,CAAC,UAGvF;AAED,iDAAiD;AACjD,wBAAgB,cAAc,CAAC,IAAI,EAAE,MAAM,EAAE,SAAS,EAAE,IAAI,CAAC,iBAAiB,EAAE,eAAe,CAAC,UAE/F;AAED,0EAA0E;AAC1E,wBAAgB,oBAAoB,CAClC,WAAW,EAAE,MAAM,EACnB,SAAS,EAAE,IAAI,CAAC,iBAAiB,EAAE,eAAe,CAAC,GAClD,CAAC,MAAM,EAAE,MAAM,CAAC,CAGlB;AAED;;;GAGG;AACH,wBAAgB,yBAAyB,CACvC,WAAW,EAAE,MAAM,EACnB,SAAS,EAAE,IAAI,CAAC,iBAAiB,EAAE,eAAe,GAAG,cAAc,GAAG,eAAe,GAAG,sBAAsB,CAAC,GAC9G,CAAC,MAAM,EAAE,MAAM,CAAC,CASlB;AAED;;GAEG;AACH,wBAAgB,yBAAyB,CACvC,WAAW,EAAE,MAAM,EACnB,SAAS,EAAE,IAAI,CAAC,iBAAiB,EAAE,eAAe,GAAG,cAAc,GAAG,eAAe,CAAC,UAIvF;AAED;;;GAGG;AACH,wBAAgB,+BAA+B,CAC7C,WAAW,EAAE,MAAM,EACnB,SAAS,EAAE,IAAI,CAAC,iBAAiB,EAAE,uBAAuB,CAAC,UAG5D;AAED;;;GAGG;AACH,wBAAgB,mCAAmC,CACjD,WAAW,EAAE,MAAM,EACnB,SAAS,EAAE,IAAI,CAAC,iBAAiB,EAAE,eAAe,GAAG,cAAc,GAAG,eAAe,GAAG,uBAAuB,CAAC,UAOjH"}
|
|
1
|
+
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/epoch-helpers/index.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,CAAC,EAAE,MAAM,KAAK,CAAC;AAIxB,MAAM,MAAM,iBAAiB,GAAG;IAC9B,YAAY,EAAE,MAAM,CAAC;IACrB,aAAa,EAAE,MAAM,CAAC;IACtB,YAAY,EAAE,MAAM,CAAC;IACrB,aAAa,EAAE,MAAM,CAAC;IACtB,oBAAoB,EAAE,MAAM,CAAC;IAC7B,qBAAqB,EAAE,MAAM,CAAC;CAC/B,CAAC;AAEF,eAAO,MAAM,sBAAsB,EAAE,iBAOpC,CAAC;AAEF,eAAO,MAAM,uBAAuB;;;;;;;;;;;;;;;;;;;;;EAOE,CAAC;AAEvC,iDAAiD;AACjD,wBAAgB,mBAAmB,CACjC,IAAI,EAAE,MAAM,EACZ,SAAS,EAAE,IAAI,CAAC,iBAAiB,EAAE,eAAe,GAAG,cAAc,CAAC,UAGrE;AAED,qDAAqD;AACrD,wBAAgB,kBAAkB,CAAC,EAAE,EAAE,MAAM,EAAE,SAAS,EAAE,IAAI,CAAC,iBAAiB,EAAE,eAAe,GAAG,cAAc,CAAC,UAElH;AAED,sDAAsD;AACtD,wBAAgB,yBAAyB,CACvC,EAAE,EAAE,MAAM,EACV,SAAS,EAAE,IAAI,CAAC,iBAAiB,EAAE,eAAe,GAAG,cAAc,GAAG,eAAe,CAAC,UAGvF;AAED,iDAAiD;AACjD,wBAAgB,cAAc,CAAC,IAAI,EAAE,MAAM,EAAE,SAAS,EAAE,IAAI,CAAC,iBAAiB,EAAE,eAAe,CAAC,UAE/F;AAED,0EAA0E;AAC1E,wBAAgB,oBAAoB,CAClC,WAAW,EAAE,MAAM,EACnB,SAAS,EAAE,IAAI,CAAC,iBAAiB,EAAE,eAAe,CAAC,GAClD,CAAC,MAAM,EAAE,MAAM,CAAC,CAGlB;AAED;;;GAGG;AACH,wBAAgB,yBAAyB,CACvC,WAAW,EAAE,MAAM,EACnB,SAAS,EAAE,IAAI,CAAC,iBAAiB,EAAE,eAAe,GAAG,cAAc,GAAG,eAAe,GAAG,sBAAsB,CAAC,GAC9G,CAAC,MAAM,EAAE,MAAM,CAAC,CASlB;AAED;;GAEG;AACH,wBAAgB,yBAAyB,CACvC,WAAW,EAAE,MAAM,EACnB,SAAS,EAAE,IAAI,CAAC,iBAAiB,EAAE,eAAe,GAAG,cAAc,GAAG,eAAe,CAAC,UAIvF;AAED;;;GAGG;AACH,wBAAgB,+BAA+B,CAC7C,WAAW,EAAE,MAAM,EACnB,SAAS,EAAE,IAAI,CAAC,iBAAiB,EAAE,uBAAuB,CAAC,UAG5D;AAED;;;GAGG;AACH,wBAAgB,mCAAmC,CACjD,WAAW,EAAE,MAAM,EACnB,SAAS,EAAE,IAAI,CAAC,iBAAiB,EAAE,eAAe,GAAG,cAAc,GAAG,eAAe,GAAG,uBAAuB,CAAC,UAOjH;AAED,mJAAmJ;AACnJ,wBAAgB,0BAA0B,CACxC,UAAU,EAAE,MAAM,GAAG,MAAM,EAC3B,SAAS,EAAE,IAAI,CAAC,iBAAiB,EAAE,eAAe,GAAG,cAAc,GAAG,sBAAsB,CAAC,GAC5F,MAAM,CAER"}
|
|
@@ -68,3 +68,6 @@ export const L1RollupConstantsSchema = z.object({
|
|
|
68
68
|
const [deadlineSlot] = getSlotRangeForEpoch(deadlineEpoch, constants);
|
|
69
69
|
return getTimestampForSlot(deadlineSlot, constants);
|
|
70
70
|
}
|
|
71
|
+
/** Returns the timestamp to start building a block for a given L2 slot. Computed as the start timestamp of the slot minus one L1 slot duration. */ export function getSlotStartBuildTimestamp(slotNumber, constants) {
|
|
72
|
+
return Number(constants.l1GenesisTime) + Number(slotNumber) * constants.slotDuration - constants.ethereumSlotDuration;
|
|
73
|
+
}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"factory.d.ts","sourceRoot":"","sources":["../../src/file-store/factory.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,KAAK,MAAM,EAAgB,MAAM,uBAAuB,CAAC;AAIlE,OAAO,KAAK,EAAE,SAAS,EAAE,iBAAiB,EAAE,MAAM,gBAAgB,CAAC;
|
|
1
|
+
{"version":3,"file":"factory.d.ts","sourceRoot":"","sources":["../../src/file-store/factory.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,KAAK,MAAM,EAAgB,MAAM,uBAAuB,CAAC;AAIlE,OAAO,KAAK,EAAE,SAAS,EAAE,iBAAiB,EAAE,MAAM,gBAAgB,CAAC;AAWnE,wBAAsB,eAAe,CAAC,MAAM,EAAE,MAAM,EAAE,MAAM,CAAC,EAAE,MAAM,GAAG,OAAO,CAAC,SAAS,CAAC,CAAC;AAC3F,wBAAsB,eAAe,CAAC,MAAM,EAAE,SAAS,EAAE,MAAM,CAAC,EAAE,MAAM,GAAG,OAAO,CAAC,SAAS,CAAC,CAAC;AA6C9F,wBAAsB,uBAAuB,CAAC,MAAM,EAAE,MAAM,EAAE,MAAM,CAAC,EAAE,MAAM,GAAG,OAAO,CAAC,iBAAiB,CAAC,CAAC;AAC3G,wBAAsB,uBAAuB,CAAC,MAAM,EAAE,SAAS,EAAE,MAAM,CAAC,EAAE,MAAM,GAAG,OAAO,CAAC,SAAS,CAAC,CAAC"}
|
|
@@ -2,8 +2,10 @@ import { createLogger } from '@aztec/foundation/log';
|
|
|
2
2
|
import { GoogleCloudFileStore } from './gcs.js';
|
|
3
3
|
import { HttpFileStore } from './http.js';
|
|
4
4
|
import { LocalFileStore } from './local.js';
|
|
5
|
+
import { S3FileStore } from './s3.js';
|
|
5
6
|
const supportedExamples = [
|
|
6
7
|
`gs://bucket-name/path/to/store`,
|
|
8
|
+
`s3://bucket-name/path/to/store`,
|
|
7
9
|
`file:///absolute/local/path/to/store`,
|
|
8
10
|
`https://host/path`
|
|
9
11
|
];
|
|
@@ -30,6 +32,22 @@ export async function createFileStore(config, logger = createLogger('stdlib:file
|
|
|
30
32
|
} catch {
|
|
31
33
|
throw new Error(`Invalid google cloud store definition: '${config}'.`);
|
|
32
34
|
}
|
|
35
|
+
} else if (config.startsWith('s3://')) {
|
|
36
|
+
try {
|
|
37
|
+
const url = new URL(config);
|
|
38
|
+
const bucket = url.host;
|
|
39
|
+
const path = url.pathname.replace(/^\/+/, '');
|
|
40
|
+
const endpoint = url.searchParams.get('endpoint');
|
|
41
|
+
const publicBaseUrl = url.searchParams.get('publicBaseUrl') ?? undefined;
|
|
42
|
+
logger.info(`Creating S3 file store at ${bucket} ${path}`);
|
|
43
|
+
const store = new S3FileStore(bucket, path, {
|
|
44
|
+
endpoint: endpoint ?? undefined,
|
|
45
|
+
publicBaseUrl
|
|
46
|
+
});
|
|
47
|
+
return store;
|
|
48
|
+
} catch {
|
|
49
|
+
throw new Error(`Invalid S3 store definition: '${config}'.`);
|
|
50
|
+
}
|
|
33
51
|
} else {
|
|
34
52
|
throw new Error(`Unknown file store config: '${config}'. Supported values are ${supportedExamples.join(', ')}.`);
|
|
35
53
|
}
|
|
@@ -14,9 +14,15 @@ export type FileStoreSaveOptions = {
|
|
|
14
14
|
};
|
|
15
15
|
/** Simple file store. */
|
|
16
16
|
export interface FileStore extends ReadOnlyFileStore {
|
|
17
|
-
/**
|
|
17
|
+
/**
|
|
18
|
+
* Saves contents to the given path. Returns an URI that can be used later to `read` the file.
|
|
19
|
+
* Default: `compress` is false unless explicitly set.
|
|
20
|
+
*/
|
|
18
21
|
save(path: string, data: Buffer, opts?: FileStoreSaveOptions): Promise<string>;
|
|
19
|
-
/**
|
|
22
|
+
/**
|
|
23
|
+
* Uploads contents from a local file. Returns an URI that can be used later to `read` the file.
|
|
24
|
+
* Default: `compress` is true unless explicitly set to false.
|
|
25
|
+
*/
|
|
20
26
|
upload(destPath: string, srcPath: string, opts?: FileStoreSaveOptions): Promise<string>;
|
|
21
27
|
}
|
|
22
28
|
//# sourceMappingURL=interface.d.ts.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"interface.d.ts","sourceRoot":"","sources":["../../src/file-store/interface.ts"],"names":[],"mappings":"AAAA,mCAAmC;AACnC,MAAM,WAAW,iBAAiB;IAChC,iGAAiG;IACjG,IAAI,CAAC,SAAS,EAAE,MAAM,GAAG,OAAO,CAAC,MAAM,CAAC,CAAC;IACzC,wGAAwG;IACxG,QAAQ,CAAC,YAAY,EAAE,MAAM,EAAE,QAAQ,EAAE,MAAM,GAAG,OAAO,CAAC,IAAI,CAAC,CAAC;IAChE,8DAA8D;IAC9D,MAAM,CAAC,SAAS,EAAE,MAAM,GAAG,OAAO,CAAC,OAAO,CAAC,CAAC;CAC7C;AAED,MAAM,MAAM,oBAAoB,GAAG;IAAE,MAAM,CAAC,EAAE,OAAO,CAAC;IAAC,QAAQ,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;IAAC,QAAQ,CAAC,EAAE,OAAO,CAAA;CAAE,CAAC;AAE/G,yBAAyB;AACzB,MAAM,WAAW,SAAU,SAAQ,iBAAiB;IAClD
|
|
1
|
+
{"version":3,"file":"interface.d.ts","sourceRoot":"","sources":["../../src/file-store/interface.ts"],"names":[],"mappings":"AAAA,mCAAmC;AACnC,MAAM,WAAW,iBAAiB;IAChC,iGAAiG;IACjG,IAAI,CAAC,SAAS,EAAE,MAAM,GAAG,OAAO,CAAC,MAAM,CAAC,CAAC;IACzC,wGAAwG;IACxG,QAAQ,CAAC,YAAY,EAAE,MAAM,EAAE,QAAQ,EAAE,MAAM,GAAG,OAAO,CAAC,IAAI,CAAC,CAAC;IAChE,8DAA8D;IAC9D,MAAM,CAAC,SAAS,EAAE,MAAM,GAAG,OAAO,CAAC,OAAO,CAAC,CAAC;CAC7C;AAED,MAAM,MAAM,oBAAoB,GAAG;IAAE,MAAM,CAAC,EAAE,OAAO,CAAC;IAAC,QAAQ,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;IAAC,QAAQ,CAAC,EAAE,OAAO,CAAA;CAAE,CAAC;AAE/G,yBAAyB;AACzB,MAAM,WAAW,SAAU,SAAQ,iBAAiB;IAClD;;;OAGG;IACH,IAAI,CAAC,IAAI,EAAE,MAAM,EAAE,IAAI,EAAE,MAAM,EAAE,IAAI,CAAC,EAAE,oBAAoB,GAAG,OAAO,CAAC,MAAM,CAAC,CAAC;IAC/E;;;OAGG;IACH,MAAM,CAAC,QAAQ,EAAE,MAAM,EAAE,OAAO,EAAE,MAAM,EAAE,IAAI,CAAC,EAAE,oBAAoB,GAAG,OAAO,CAAC,MAAM,CAAC,CAAC;CACzF"}
|
|
@@ -0,0 +1,26 @@
|
|
|
1
|
+
import { type Logger } from '@aztec/foundation/log';
|
|
2
|
+
import type { FileStore, FileStoreSaveOptions } from './interface.js';
|
|
3
|
+
export declare class S3FileStore implements FileStore {
|
|
4
|
+
private readonly bucketName;
|
|
5
|
+
private readonly basePath;
|
|
6
|
+
private readonly log;
|
|
7
|
+
private readonly s3;
|
|
8
|
+
private readonly region;
|
|
9
|
+
private readonly endpoint?;
|
|
10
|
+
private readonly publicBaseUrl?;
|
|
11
|
+
constructor(bucketName: string, basePath: string, opts: {
|
|
12
|
+
endpoint?: string;
|
|
13
|
+
publicBaseUrl?: string;
|
|
14
|
+
}, log?: Logger);
|
|
15
|
+
save(path: string, data: Buffer, opts?: FileStoreSaveOptions): Promise<string>;
|
|
16
|
+
upload(destPath: string, srcPath: string, opts?: FileStoreSaveOptions): Promise<string>;
|
|
17
|
+
read(pathOrUrlStr: string): Promise<Buffer>;
|
|
18
|
+
download(pathOrUrlStr: string, destPath: string): Promise<void>;
|
|
19
|
+
exists(pathOrUrlStr: string): Promise<boolean>;
|
|
20
|
+
private extractUserMetadata;
|
|
21
|
+
private detectContentType;
|
|
22
|
+
private buildReturnedUrl;
|
|
23
|
+
private getBucketAndKey;
|
|
24
|
+
private getFullPath;
|
|
25
|
+
}
|
|
26
|
+
//# sourceMappingURL=s3.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"s3.d.ts","sourceRoot":"","sources":["../../src/file-store/s3.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,KAAK,MAAM,EAAgB,MAAM,uBAAuB,CAAC;AAiBlE,OAAO,KAAK,EAAE,SAAS,EAAE,oBAAoB,EAAE,MAAM,gBAAgB,CAAC;AAMtE,qBAAa,WAAY,YAAW,SAAS;IAOzC,OAAO,CAAC,QAAQ,CAAC,UAAU;IAC3B,OAAO,CAAC,QAAQ,CAAC,QAAQ;IAEzB,OAAO,CAAC,QAAQ,CAAC,GAAG;IATtB,OAAO,CAAC,QAAQ,CAAC,EAAE,CAAW;IAC9B,OAAO,CAAC,QAAQ,CAAC,MAAM,CAAS;IAChC,OAAO,CAAC,QAAQ,CAAC,QAAQ,CAAC,CAAS;IACnC,OAAO,CAAC,QAAQ,CAAC,aAAa,CAAC,CAAS;gBAGrB,UAAU,EAAE,MAAM,EAClB,QAAQ,EAAE,MAAM,EACjC,IAAI,EAAE;QAAE,QAAQ,CAAC,EAAE,MAAM,CAAC;QAAC,aAAa,CAAC,EAAE,MAAM,CAAA;KAAE,EAClC,GAAG,GAAE,MAA6C;IAiBxD,IAAI,CAAC,IAAI,EAAE,MAAM,EAAE,IAAI,EAAE,MAAM,EAAE,IAAI,GAAE,oBAAyB,GAAG,OAAO,CAAC,MAAM,CAAC;IAoBlF,MAAM,CAAC,QAAQ,EAAE,MAAM,EAAE,OAAO,EAAE,MAAM,EAAE,IAAI,GAAE,oBAAyB,GAAG,OAAO,CAAC,MAAM,CAAC;IA6D3F,IAAI,CAAC,YAAY,EAAE,MAAM,GAAG,OAAO,CAAC,MAAM,CAAC;IAW3C,QAAQ,CAAC,YAAY,EAAE,MAAM,EAAE,QAAQ,EAAE,MAAM,GAAG,OAAO,CAAC,IAAI,CAAC;IAQ/D,MAAM,CAAC,YAAY,EAAE,MAAM,GAAG,OAAO,CAAC,OAAO,CAAC;IAe3D,OAAO,CAAC,mBAAmB;IAQ3B,OAAO,CAAC,iBAAiB;IAsBzB,OAAO,CAAC,gBAAgB;IA0BxB,OAAO,CAAC,eAAe;IA2BvB,OAAO,CAAC,WAAW;CAKpB"}
|
|
@@ -0,0 +1,252 @@
|
|
|
1
|
+
import { createLogger } from '@aztec/foundation/log';
|
|
2
|
+
import { GetObjectCommand, HeadObjectCommand, PutObjectCommand, S3Client } from '@aws-sdk/client-s3';
|
|
3
|
+
import { createReadStream, createWriteStream } from 'fs';
|
|
4
|
+
import { mkdir, mkdtemp, stat, unlink } from 'fs/promises';
|
|
5
|
+
import { tmpdir } from 'os';
|
|
6
|
+
import { basename, dirname, join } from 'path';
|
|
7
|
+
import { finished } from 'stream/promises';
|
|
8
|
+
import { createGzip } from 'zlib';
|
|
9
|
+
function normalizeBasePath(path) {
|
|
10
|
+
return path?.replace(/^\/+|\/+$/g, '') ?? '';
|
|
11
|
+
}
|
|
12
|
+
export class S3FileStore {
|
|
13
|
+
bucketName;
|
|
14
|
+
basePath;
|
|
15
|
+
log;
|
|
16
|
+
s3;
|
|
17
|
+
region;
|
|
18
|
+
endpoint;
|
|
19
|
+
publicBaseUrl;
|
|
20
|
+
constructor(bucketName, basePath, opts, log = createLogger('stdlib:s3-file-store')){
|
|
21
|
+
this.bucketName = bucketName;
|
|
22
|
+
this.basePath = basePath;
|
|
23
|
+
this.log = log;
|
|
24
|
+
this.endpoint = opts.endpoint;
|
|
25
|
+
this.region = this.endpoint ? 'auto' : process.env.AWS_REGION ?? process.env.AWS_DEFAULT_REGION ?? 'us-east-1';
|
|
26
|
+
this.publicBaseUrl = opts.publicBaseUrl;
|
|
27
|
+
const clientOptions = {};
|
|
28
|
+
if (this.endpoint) {
|
|
29
|
+
clientOptions.region = 'auto';
|
|
30
|
+
clientOptions.endpoint = this.endpoint;
|
|
31
|
+
clientOptions.forcePathStyle = true;
|
|
32
|
+
} else {
|
|
33
|
+
clientOptions.region = this.region;
|
|
34
|
+
}
|
|
35
|
+
this.s3 = new S3Client(clientOptions);
|
|
36
|
+
}
|
|
37
|
+
async save(path, data, opts = {}) {
|
|
38
|
+
const key = this.getFullPath(path);
|
|
39
|
+
const shouldCompress = !!opts.compress;
|
|
40
|
+
const body = shouldCompress ? (await import('zlib')).gzipSync(data) : data;
|
|
41
|
+
const contentLength = body.length;
|
|
42
|
+
const contentType = this.detectContentType(key, shouldCompress);
|
|
43
|
+
const put = new PutObjectCommand({
|
|
44
|
+
Bucket: this.bucketName,
|
|
45
|
+
Key: key,
|
|
46
|
+
Body: body,
|
|
47
|
+
ContentType: contentType,
|
|
48
|
+
CacheControl: opts.metadata?.['Cache-control'],
|
|
49
|
+
Metadata: this.extractUserMetadata(opts.metadata),
|
|
50
|
+
ContentLength: contentLength
|
|
51
|
+
});
|
|
52
|
+
await this.s3.send(put);
|
|
53
|
+
return this.buildReturnedUrl(key, !!opts.public);
|
|
54
|
+
}
|
|
55
|
+
async upload(destPath, srcPath, opts = {}) {
|
|
56
|
+
const key = this.getFullPath(destPath);
|
|
57
|
+
const shouldCompress = opts.compress !== false; // default true like GCS impl
|
|
58
|
+
await mkdir(dirname(srcPath), {
|
|
59
|
+
recursive: true
|
|
60
|
+
}).catch(()=>undefined);
|
|
61
|
+
let contentLength;
|
|
62
|
+
let bodyPath = srcPath;
|
|
63
|
+
// We don't set Content-Encoding and we avoid SigV4 streaming (aws-chunked).
|
|
64
|
+
// With AWS SigV4 streaming uploads (Content-Encoding: aws-chunked[,gzip]), servers require
|
|
65
|
+
// x-amz-decoded-content-length (the size of the decoded payload) and an exact Content-Length
|
|
66
|
+
// that includes chunk metadata. For on-the-fly compression, providing
|
|
67
|
+
// those values without buffering or a pre-pass is impractical. Instead, we pre-gzip to a temp file
|
|
68
|
+
// to know ContentLength up-front and upload the gzipped bytes as-is, omitting Content-Encoding.
|
|
69
|
+
// Reference: AWS SigV4 streaming (chunked upload) requirements —
|
|
70
|
+
// https://docs.aws.amazon.com/AmazonS3/latest/API/sigv4-streaming.html
|
|
71
|
+
if (shouldCompress) {
|
|
72
|
+
// Pre-gzip to a temp file so we know the exact length for R2/S3 headers
|
|
73
|
+
const tmpDir = await mkdtemp(join(tmpdir(), 's3-upload-'));
|
|
74
|
+
const gzPath = join(tmpDir, `${basename(srcPath)}.gz`);
|
|
75
|
+
const source = createReadStream(srcPath);
|
|
76
|
+
const gz = createGzip();
|
|
77
|
+
const out = createWriteStream(gzPath);
|
|
78
|
+
try {
|
|
79
|
+
await finished(source.pipe(gz).pipe(out));
|
|
80
|
+
const st = await stat(gzPath);
|
|
81
|
+
contentLength = st.size;
|
|
82
|
+
bodyPath = gzPath;
|
|
83
|
+
} catch (err) {
|
|
84
|
+
// Ensure temp file is removed on failure
|
|
85
|
+
await unlink(gzPath).catch(()=>undefined);
|
|
86
|
+
throw err;
|
|
87
|
+
}
|
|
88
|
+
} else {
|
|
89
|
+
const st = await stat(srcPath);
|
|
90
|
+
contentLength = st.size;
|
|
91
|
+
bodyPath = srcPath;
|
|
92
|
+
}
|
|
93
|
+
const bodyStream = createReadStream(bodyPath);
|
|
94
|
+
const contentType = this.detectContentType(key, shouldCompress);
|
|
95
|
+
try {
|
|
96
|
+
const put = new PutObjectCommand({
|
|
97
|
+
Bucket: this.bucketName,
|
|
98
|
+
Key: key,
|
|
99
|
+
Body: bodyStream,
|
|
100
|
+
ContentType: contentType,
|
|
101
|
+
CacheControl: opts.metadata?.['Cache-control'],
|
|
102
|
+
Metadata: this.extractUserMetadata(opts.metadata),
|
|
103
|
+
// Explicitly set ContentLength so R2 can compute x-amz-decoded-content-length correctly
|
|
104
|
+
ContentLength: contentLength
|
|
105
|
+
});
|
|
106
|
+
await this.s3.send(put);
|
|
107
|
+
} finally{
|
|
108
|
+
if (shouldCompress && bodyPath !== srcPath) {
|
|
109
|
+
await unlink(bodyPath).catch(()=>undefined);
|
|
110
|
+
}
|
|
111
|
+
}
|
|
112
|
+
return this.buildReturnedUrl(key, !!opts.public);
|
|
113
|
+
}
|
|
114
|
+
async read(pathOrUrlStr) {
|
|
115
|
+
const { bucket, key } = this.getBucketAndKey(pathOrUrlStr);
|
|
116
|
+
const out = await this.s3.send(new GetObjectCommand({
|
|
117
|
+
Bucket: bucket,
|
|
118
|
+
Key: key
|
|
119
|
+
}));
|
|
120
|
+
const stream = out.Body;
|
|
121
|
+
const chunks = [];
|
|
122
|
+
for await (const chunk of stream){
|
|
123
|
+
chunks.push(Buffer.isBuffer(chunk) ? chunk : Buffer.from(chunk));
|
|
124
|
+
}
|
|
125
|
+
return Buffer.concat(chunks);
|
|
126
|
+
}
|
|
127
|
+
async download(pathOrUrlStr, destPath) {
|
|
128
|
+
const { bucket, key } = this.getBucketAndKey(pathOrUrlStr);
|
|
129
|
+
const out = await this.s3.send(new GetObjectCommand({
|
|
130
|
+
Bucket: bucket,
|
|
131
|
+
Key: key
|
|
132
|
+
}));
|
|
133
|
+
await mkdir(dirname(destPath), {
|
|
134
|
+
recursive: true
|
|
135
|
+
});
|
|
136
|
+
const write = createWriteStream(destPath);
|
|
137
|
+
await finished(out.Body.pipe(write));
|
|
138
|
+
}
|
|
139
|
+
async exists(pathOrUrlStr) {
|
|
140
|
+
try {
|
|
141
|
+
const { bucket, key } = this.getBucketAndKey(pathOrUrlStr);
|
|
142
|
+
await this.s3.send(new HeadObjectCommand({
|
|
143
|
+
Bucket: bucket,
|
|
144
|
+
Key: key
|
|
145
|
+
}));
|
|
146
|
+
return true;
|
|
147
|
+
} catch (err) {
|
|
148
|
+
const code = err?.$metadata?.httpStatusCode ?? err?.name ?? err?.Code;
|
|
149
|
+
if (code === 404 || code === 'NotFound' || code === 'NoSuchKey') {
|
|
150
|
+
return false;
|
|
151
|
+
}
|
|
152
|
+
this.log.warn(`Error checking existence for ${pathOrUrlStr}: ${err?.message ?? String(err)}`);
|
|
153
|
+
return false;
|
|
154
|
+
}
|
|
155
|
+
}
|
|
156
|
+
extractUserMetadata(meta) {
|
|
157
|
+
if (!meta) {
|
|
158
|
+
return undefined;
|
|
159
|
+
}
|
|
160
|
+
const { ['Cache-control']: _ignored, ...rest } = meta;
|
|
161
|
+
return Object.keys(rest).length ? rest : undefined;
|
|
162
|
+
}
|
|
163
|
+
detectContentType(key, isCompressed) {
|
|
164
|
+
// Basic content type inference
|
|
165
|
+
const lower = key.toLowerCase();
|
|
166
|
+
if (lower.endsWith('.json') || lower.endsWith('.json.gz')) {
|
|
167
|
+
return 'application/json';
|
|
168
|
+
}
|
|
169
|
+
if (lower.endsWith('.txt') || lower.endsWith('.log') || lower.endsWith('.csv') || lower.endsWith('.md')) {
|
|
170
|
+
return 'text/plain; charset=utf-8';
|
|
171
|
+
}
|
|
172
|
+
if (lower.endsWith('.db') || lower.endsWith('.sqlite') || lower.endsWith('.bin')) {
|
|
173
|
+
return 'application/octet-stream';
|
|
174
|
+
}
|
|
175
|
+
if (lower.endsWith('.wasm') || lower.endsWith('.wasm.gz')) {
|
|
176
|
+
return 'application/wasm';
|
|
177
|
+
}
|
|
178
|
+
// If compressed, prefer octet-stream unless known
|
|
179
|
+
if (isCompressed) {
|
|
180
|
+
return 'application/octet-stream';
|
|
181
|
+
}
|
|
182
|
+
return undefined;
|
|
183
|
+
}
|
|
184
|
+
buildReturnedUrl(key, makePublic) {
|
|
185
|
+
if (!makePublic) {
|
|
186
|
+
return `s3://${this.bucketName}/${key}`;
|
|
187
|
+
}
|
|
188
|
+
if (this.publicBaseUrl) {
|
|
189
|
+
const base = this.publicBaseUrl.replace(/\/$/, '');
|
|
190
|
+
// key already includes basePath via getFullPath, so do not prefix basePath again
|
|
191
|
+
const full = key.replace(/^\/+/, '');
|
|
192
|
+
return `${base}/${full}`;
|
|
193
|
+
}
|
|
194
|
+
// Try to synthesize a URL from endpoint if available (works for public R2 buckets)
|
|
195
|
+
if (this.endpoint) {
|
|
196
|
+
try {
|
|
197
|
+
const url = new URL(this.endpoint);
|
|
198
|
+
return `https://${this.bucketName}.${url.host}/${key}`;
|
|
199
|
+
} catch {
|
|
200
|
+
// fallthrough
|
|
201
|
+
}
|
|
202
|
+
}
|
|
203
|
+
// Fallback to AWS style URL if region looks valid
|
|
204
|
+
return `https://${this.bucketName}.s3.${this.region}.amazonaws.com/${key}`;
|
|
205
|
+
}
|
|
206
|
+
getBucketAndKey(pathOrUrlStr) {
|
|
207
|
+
if (URL.canParse(pathOrUrlStr)) {
|
|
208
|
+
const url = new URL(pathOrUrlStr);
|
|
209
|
+
if (url.protocol === 's3:') {
|
|
210
|
+
return {
|
|
211
|
+
bucket: url.host,
|
|
212
|
+
key: url.pathname.replace(/^\/+/, '')
|
|
213
|
+
};
|
|
214
|
+
}
|
|
215
|
+
// For https URLs, try to infer virtual-hosted or path-style
|
|
216
|
+
if (url.protocol === 'https:' || url.protocol === 'http:') {
|
|
217
|
+
// If the URL matches the configured publicBaseUrl host, map back to our bucket and key
|
|
218
|
+
if (this.publicBaseUrl && url.host === new URL(this.publicBaseUrl).host) {
|
|
219
|
+
return {
|
|
220
|
+
bucket: this.bucketName,
|
|
221
|
+
key: url.pathname.replace(/^\/+/, '')
|
|
222
|
+
};
|
|
223
|
+
}
|
|
224
|
+
const hostParts = url.host.split('.');
|
|
225
|
+
if (hostParts.length > 3 && (hostParts[1] === 's3' || hostParts[hostParts.length - 2] === 'r2')) {
|
|
226
|
+
// virtual hosted
|
|
227
|
+
return {
|
|
228
|
+
bucket: hostParts[0],
|
|
229
|
+
key: url.pathname.replace(/^\/+/, '')
|
|
230
|
+
};
|
|
231
|
+
} else if (this.endpoint && url.host === new URL(this.endpoint).host) {
|
|
232
|
+
// path-style at custom endpoint
|
|
233
|
+
const [bucket, ...rest] = url.pathname.replace(/^\/+/, '').split('/');
|
|
234
|
+
return {
|
|
235
|
+
bucket,
|
|
236
|
+
key: rest.join('/')
|
|
237
|
+
};
|
|
238
|
+
}
|
|
239
|
+
}
|
|
240
|
+
}
|
|
241
|
+
// Treat as path
|
|
242
|
+
return {
|
|
243
|
+
bucket: this.bucketName,
|
|
244
|
+
key: this.getFullPath(pathOrUrlStr)
|
|
245
|
+
};
|
|
246
|
+
}
|
|
247
|
+
getFullPath(path) {
|
|
248
|
+
const base = normalizeBasePath(this.basePath);
|
|
249
|
+
const rel = path.replace(/^\/+/, '');
|
|
250
|
+
return base ? join(base, rel) : rel;
|
|
251
|
+
}
|
|
252
|
+
}
|