@ethersphere/bee-js 10.2.0 → 10.4.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -200,7 +200,7 @@ The `toString` method uses `toHex`.
200
200
  | `pssSend` | `POST /pss/send/:topic/:target` [🔗](https://docs.ethswarm.org/api/#tag/Postal-Service-for-Swarm/paths/~1pss~1send~1%7Btopic%7D~1%7Btargets%7D/post) | ❌✅✅ |
201
201
  | `pssSubscribe` _Websocket_ | `GET /pss/subscribe/:topic` [🔗](https://docs.ethswarm.org/api/#tag/Postal-Service-for-Swarm/paths/~1pss~1subscribe~1%7Btopic%7D/get) | ❌❌✅ |
202
202
  | `pssReceive` | `GET /pss/subscribe/:topic` [🔗](https://docs.ethswarm.org/api/#tag/Postal-Service-for-Swarm/paths/~1pss~1subscribe~1%7Btopic%7D/get) | ❌❌✅ |
203
- | `getAllPostageBatch` | `GET /stamps` [🔗](https://docs.ethswarm.org/api/#tag/Postage-Stamps/paths/~1stamps/get) | ❌✅✅ |
203
+ | `getPostageBatches` | `GET /stamps` [🔗](https://docs.ethswarm.org/api/#tag/Postage-Stamps/paths/~1stamps/get) | ❌✅✅ |
204
204
  | `getGlobalPostageBatches` | `GET /batches` [🔗](https://docs.ethswarm.org/api/#tag/Postage-Stamps/paths/~1batches/get) | ❌✅✅ |
205
205
  | `getPostageBatch` | `GET /stamps/:batchId` [🔗](https://docs.ethswarm.org/api/#tag/Postage-Stamps/paths/~1stamps~1%7Bbatch_id%7D/get) | ❌✅✅ |
206
206
  | `getPostageBatchBuckets` | `GET /stamps/:batchId/buckets` [🔗](https://docs.ethswarm.org/api/#tag/Postage-Stamps/paths/~1stamps~1%7Bbatch_id%7D~1buckets/get) | ❌✅✅ |
@@ -266,7 +266,7 @@ async function getOrCreatePostageBatch() {
266
266
  const bee = new Bee('http://localhost:1633')
267
267
  let batchId
268
268
 
269
- const batches = await bee.getAllPostageBatch()
269
+ const batches = await bee.getPostageBatches()
270
270
  const usable = batches.find(x => x.usable)
271
271
 
272
272
  if (usable) {
package/dist/cjs/bee.js CHANGED
@@ -184,7 +184,7 @@ class Bee {
184
184
  * Chunks uploaded with this method should be retrieved with the {@link downloadChunk} method.
185
185
  *
186
186
  * @param stamp Postage Batch ID or an Envelope created with the {@link createEnvelope} method.
187
- * @param data Raw chunk to be uploaded
187
+ * @param data Raw chunk to be uploaded (Content Addressed Chunk or Single Owner Chunk)
188
188
  * @param options Additional options like tag, encryption, pinning, content-type and request options
189
189
  * @param requestOptions Options for making requests, such as timeouts, custom HTTP agents, headers, etc.
190
190
  *
@@ -193,6 +193,7 @@ class Bee {
193
193
  * @see [Bee API reference - `POST /chunks`](https://docs.ethswarm.org/api/#tag/Chunk/paths/~1chunks/post)
194
194
  */
195
195
  async uploadChunk(stamp, data, options, requestOptions) {
196
+ const isSOC = 'identifier' in data && 'signature' in data && 'owner' in data;
196
197
  data = data instanceof Uint8Array ? data : data.data;
197
198
  if (options) {
198
199
  options = (0, type_1.prepareUploadOptions)(options);
@@ -200,8 +201,11 @@ class Bee {
200
201
  if (data.length < typed_bytes_1.Span.LENGTH) {
201
202
  throw new error_1.BeeArgumentError(`Chunk has to have size of at least ${typed_bytes_1.Span.LENGTH}.`, data);
202
203
  }
203
- if (data.length > types_1.CHUNK_SIZE + typed_bytes_1.Span.LENGTH) {
204
- throw new error_1.BeeArgumentError(`Chunk has to have size of at most ${types_1.CHUNK_SIZE + typed_bytes_1.Span.LENGTH}.`, data);
204
+ if (!isSOC && data.length > types_1.CHUNK_SIZE + typed_bytes_1.Span.LENGTH) {
205
+ throw new error_1.BeeArgumentError(`Content Addressed Chunk must not exceed ${types_1.CHUNK_SIZE + typed_bytes_1.Span.LENGTH} bytes.`, data);
206
+ }
207
+ if (isSOC && data.length > types_1.CHUNK_SIZE + typed_bytes_1.Span.LENGTH + typed_bytes_1.Signature.LENGTH + typed_bytes_1.Identifier.LENGTH) {
208
+ throw new error_1.BeeArgumentError(`Single Owner Chunk must not exceed ${types_1.CHUNK_SIZE + typed_bytes_1.Span.LENGTH + typed_bytes_1.Signature.LENGTH + typed_bytes_1.Identifier.LENGTH} bytes.`, data);
205
209
  }
206
210
  return chunk.upload(this.getRequestOptionsForCall(requestOptions), data, stamp, options);
207
211
  }
@@ -837,7 +841,7 @@ class Bee {
837
841
  signer = new typed_bytes_1.PrivateKey(signer);
838
842
  identifier = new typed_bytes_1.Identifier(identifier);
839
843
  const cac = (0, cac_1.makeContentAddressedChunk)(data);
840
- const soc = (0, soc_1.makeSingleOwnerChunk)(cac, identifier, signer);
844
+ const soc = cac.toSingleOwnerChunk(identifier, signer);
841
845
  return gsoc.send(this.getRequestOptionsForCall(requestOptions), soc, postageBatchId, options);
842
846
  }
843
847
  /**
@@ -968,6 +972,70 @@ class Bee {
968
972
  owner = new typed_bytes_1.EthAddress(owner);
969
973
  return (0, feed_2.fetchLatestFeedUpdate)(this.getRequestOptionsForCall(requestOptions), owner, topic);
970
974
  }
975
+ /**
976
+ * Creates a Content Addressed Chunk.
977
+ *
978
+ * To be uploaded with the {@link uploadChunk} method.
979
+ *
980
+ * Payload size must be between 1 and 4096 bytes.
981
+ *
982
+ * @param rawPayload Data to be stored in the chunk. If the data is a string, it will be converted to UTF-8 bytes.
983
+ * @param span Optional span for the chunk. If not provided, it will be set to the length of the payload.
984
+ *
985
+ * @example
986
+ *
987
+ */
988
+ makeContentAddressedChunk(rawPayload, span) {
989
+ return (0, cac_1.makeContentAddressedChunk)(rawPayload, span);
990
+ }
991
+ /**
992
+ * Attempts to unmarshal arbitrary data into a Content Addressed Chunk.
993
+ * Throws an error if the data is not a valid CAC.
994
+ *
995
+ * @param data The chunk data (`span` and `payload`)
996
+ */
997
+ unmarshalContentAddressedChunk(data) {
998
+ return (0, cac_1.unmarshalContentAddressedChunk)(data);
999
+ }
1000
+ /**
1001
+ * Creates a Single Owner Chunk.
1002
+ *
1003
+ * To be uploaded with the {@link uploadChunk} method.
1004
+ *
1005
+ * Identical to chaining `makeContentAddressedChunk` and `toSingleOwnerChunk`.
1006
+ *
1007
+ * Payload size must be between 1 and 4096 bytes.
1008
+ *
1009
+ * @param address Address of the Content Addressed Chunk
1010
+ * @param span Span of the Content Addressed Chunk
1011
+ * @param payload Payload of the Content Addressed Chunk
1012
+ * @param identifier The identifier of the chunk
1013
+ * @param signer The signer interface for signing the chunk
1014
+ */
1015
+ makeSingleOwnerChunk(address, span, payload, identifier, signer) {
1016
+ return (0, soc_1.makeSingleOwnerChunk)(address, span, payload, identifier, signer);
1017
+ }
1018
+ /**
1019
+ * Calculates the address of a Single Owner Chunk based on its identifier and owner address.
1020
+ *
1021
+ * @param identifier
1022
+ * @param address
1023
+ */
1024
+ calculateSingleOwnerChunkAddress(identifier, address) {
1025
+ return (0, soc_1.makeSOCAddress)(identifier, address);
1026
+ }
1027
+ /**
1028
+ * Attempts to unmarshal arbitrary data into a Single Owner Chunk.
1029
+ * Throws an error if the data is not a valid SOC.
1030
+ *
1031
+ * @param data The chunk data
1032
+ * @param address The address of the single owner chunk
1033
+ *
1034
+ * @returns a single owner chunk or throws error
1035
+ */
1036
+ unmarshalSingleOwnerChunk(data, address) {
1037
+ return (0, soc_1.unmarshalSingleOwnerChunk)(data, address);
1038
+ }
971
1039
  /**
972
1040
  * Returns an object for reading single owner chunks
973
1041
  *
@@ -1550,7 +1618,9 @@ class Bee {
1550
1618
  const blockTime = this.network === 'gnosis' ? 5 : 15;
1551
1619
  const additionalAmount = (0, stamps_1.getAmountForDuration)(duration, chainState.currentPrice, blockTime);
1552
1620
  const currentAmount = (0, stamps_1.getAmountForDuration)(batch.duration, chainState.currentPrice, blockTime);
1553
- const targetAmount = duration.isZero() ? currentAmount * multiplier : currentAmount + additionalAmount * multiplier;
1621
+ const targetAmount = duration.isZero()
1622
+ ? currentAmount * multiplier
1623
+ : (currentAmount + additionalAmount) * multiplier;
1554
1624
  const amountDelta = targetAmount - currentAmount;
1555
1625
  const transactionId = await this.topUpBatch(batch.batchID, amountDelta, requestOptions);
1556
1626
  if (depthDelta > 0) {
@@ -1,46 +1,41 @@
1
1
  "use strict";
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
- exports.asContentAddressedChunk = exports.makeContentAddressedChunk = exports.MAX_PAYLOAD_SIZE = exports.MIN_PAYLOAD_SIZE = void 0;
3
+ exports.makeContentAddressedChunk = exports.unmarshalContentAddressedChunk = exports.MAX_PAYLOAD_SIZE = exports.MIN_PAYLOAD_SIZE = void 0;
4
4
  const cafe_utility_1 = require("cafe-utility");
5
5
  const bytes_1 = require("../utils/bytes");
6
6
  const typed_bytes_1 = require("../utils/typed-bytes");
7
7
  const bmt_1 = require("./bmt");
8
+ const soc_1 = require("./soc");
8
9
  exports.MIN_PAYLOAD_SIZE = 1;
9
10
  exports.MAX_PAYLOAD_SIZE = 4096;
10
- const ENCODER = new TextEncoder();
11
- /**
12
- * Creates a content addressed chunk and verifies the payload size.
13
- *
14
- * @param payloadBytes the data to be stored in the chunk
15
- */
16
- function makeContentAddressedChunk(payloadBytes) {
17
- if (!(payloadBytes instanceof Uint8Array)) {
18
- payloadBytes = ENCODER.encode(payloadBytes);
11
+ function unmarshalContentAddressedChunk(data) {
12
+ data = new bytes_1.Bytes(data);
13
+ return makeContentAddressedChunk(data.toUint8Array().slice(typed_bytes_1.Span.LENGTH), typed_bytes_1.Span.fromSlice(data.toUint8Array(), 0));
14
+ }
15
+ exports.unmarshalContentAddressedChunk = unmarshalContentAddressedChunk;
16
+ function makeContentAddressedChunk(rawPayload, span) {
17
+ if (cafe_utility_1.Types.isString(rawPayload)) {
18
+ rawPayload = bytes_1.Bytes.fromUtf8(rawPayload);
19
19
  }
20
- if (payloadBytes.length < exports.MIN_PAYLOAD_SIZE || payloadBytes.length > exports.MAX_PAYLOAD_SIZE) {
21
- throw new RangeError(`payload size ${payloadBytes.length} exceeds limits [${exports.MIN_PAYLOAD_SIZE}, ${exports.MAX_PAYLOAD_SIZE}]`);
20
+ if (rawPayload.length < exports.MIN_PAYLOAD_SIZE || rawPayload.length > exports.MAX_PAYLOAD_SIZE) {
21
+ throw new RangeError(`payload size ${rawPayload.length} exceeds limits [${exports.MIN_PAYLOAD_SIZE}, ${exports.MAX_PAYLOAD_SIZE}]`);
22
22
  }
23
- const span = typed_bytes_1.Span.fromBigInt(BigInt(payloadBytes.length));
24
- const data = cafe_utility_1.Binary.concatBytes(span.toUint8Array(), payloadBytes);
23
+ const typedSpan = span
24
+ ? typeof span === 'bigint'
25
+ ? typed_bytes_1.Span.fromBigInt(span)
26
+ : span
27
+ : typed_bytes_1.Span.fromBigInt(BigInt(rawPayload.length));
28
+ const payload = new bytes_1.Bytes(rawPayload);
29
+ const data = cafe_utility_1.Binary.concatBytes(typedSpan.toUint8Array(), payload.toUint8Array());
30
+ const address = (0, bmt_1.calculateChunkAddress)(data);
25
31
  return {
26
32
  data,
27
- span,
28
- payload: bytes_1.Bytes.fromSlice(data, typed_bytes_1.Span.LENGTH),
29
- address: (0, bmt_1.calculateChunkAddress)(data),
33
+ span: typedSpan,
34
+ payload,
35
+ address,
36
+ toSingleOwnerChunk: (identifier, signer) => {
37
+ return (0, soc_1.makeSingleOwnerChunk)(address, typedSpan, payload, identifier, signer);
38
+ },
30
39
  };
31
40
  }
32
41
  exports.makeContentAddressedChunk = makeContentAddressedChunk;
33
- function asContentAddressedChunk(chunkBytes) {
34
- if (chunkBytes.length < exports.MIN_PAYLOAD_SIZE + typed_bytes_1.Span.LENGTH || chunkBytes.length > exports.MAX_PAYLOAD_SIZE + typed_bytes_1.Span.LENGTH) {
35
- throw new RangeError(`chunk size ${chunkBytes.length} exceeds limits [${exports.MIN_PAYLOAD_SIZE + typed_bytes_1.Span.LENGTH}, ${typed_bytes_1.Span.LENGTH}]`);
36
- }
37
- const span = typed_bytes_1.Span.fromSlice(chunkBytes, 0);
38
- const data = cafe_utility_1.Binary.concatBytes(span.toUint8Array(), chunkBytes.slice(typed_bytes_1.Span.LENGTH));
39
- return {
40
- data,
41
- span,
42
- payload: bytes_1.Bytes.fromSlice(data, typed_bytes_1.Span.LENGTH),
43
- address: (0, bmt_1.calculateChunkAddress)(data),
44
- };
45
- }
46
- exports.asContentAddressedChunk = asContentAddressedChunk;
@@ -23,7 +23,7 @@ var __importStar = (this && this.__importStar) || function (mod) {
23
23
  return result;
24
24
  };
25
25
  Object.defineProperty(exports, "__esModule", { value: true });
26
- exports.downloadSingleOwnerChunk = exports.uploadSingleOwnerChunkWithWrappedChunk = exports.uploadSingleOwnerChunkData = exports.uploadSingleOwnerChunk = exports.makeSingleOwnerChunk = exports.makeSOCAddress = exports.makeSingleOwnerChunkFromData = void 0;
26
+ exports.downloadSingleOwnerChunk = exports.uploadSingleOwnerChunkWithWrappedChunk = exports.uploadSingleOwnerChunkData = exports.uploadSingleOwnerChunk = exports.makeSingleOwnerChunk = exports.makeSOCAddress = exports.unmarshalSingleOwnerChunk = void 0;
27
27
  const cafe_utility_1 = require("cafe-utility");
28
28
  const chunkAPI = __importStar(require("../modules/chunk"));
29
29
  const socAPI = __importStar(require("../modules/soc"));
@@ -33,8 +33,8 @@ const typed_bytes_1 = require("../utils/typed-bytes");
33
33
  const bmt_1 = require("./bmt");
34
34
  const cac_1 = require("./cac");
35
35
  const SOC_SIGNATURE_OFFSET = typed_bytes_1.Identifier.LENGTH;
36
- const SOC_SPAN_OFFSET = SOC_SIGNATURE_OFFSET + typed_bytes_1.Signature.LENGTH;
37
- const SOC_PAYLOAD_OFFSET = SOC_SPAN_OFFSET + typed_bytes_1.Span.LENGTH;
36
+ const SOC_SPAN_OFFSET = typed_bytes_1.Identifier.LENGTH + typed_bytes_1.Signature.LENGTH;
37
+ const SOC_PAYLOAD_OFFSET = typed_bytes_1.Identifier.LENGTH + typed_bytes_1.Signature.LENGTH + typed_bytes_1.Span.LENGTH;
38
38
  function recoverChunkOwner(data) {
39
39
  const cacData = data.slice(SOC_SPAN_OFFSET);
40
40
  const chunkAddress = (0, bmt_1.calculateChunkAddress)(cacData);
@@ -45,21 +45,22 @@ function recoverChunkOwner(data) {
45
45
  return ownerAddress;
46
46
  }
47
47
  /**
48
- * Verifies if the data is a valid single owner chunk
48
+ * Unmarshals arbitrary data into a Single Owner Chunk.
49
+ * Throws an error if the data is not a valid SOC.
49
50
  *
50
51
  * @param data The chunk data
51
52
  * @param address The address of the single owner chunk
52
53
  *
53
54
  * @returns a single owner chunk or throws error
54
55
  */
55
- function makeSingleOwnerChunkFromData(data, address) {
56
+ function unmarshalSingleOwnerChunk(data, address) {
56
57
  data = data instanceof bytes_1.Bytes ? data.toUint8Array() : data;
57
58
  address = new typed_bytes_1.Reference(address);
58
59
  const ownerAddress = recoverChunkOwner(data);
59
60
  const identifier = bytes_1.Bytes.fromSlice(data, 0, typed_bytes_1.Identifier.LENGTH);
60
61
  const socAddress = new typed_bytes_1.Reference(cafe_utility_1.Binary.keccak256(cafe_utility_1.Binary.concatBytes(identifier.toUint8Array(), ownerAddress.toUint8Array())));
61
62
  if (!cafe_utility_1.Binary.equals(address.toUint8Array(), socAddress.toUint8Array())) {
62
- throw new error_1.BeeError('SOC Data does not match given address!');
63
+ throw new error_1.BeeError('SOC data does not match given address!');
63
64
  }
64
65
  const signature = typed_bytes_1.Signature.fromSlice(data, SOC_SIGNATURE_OFFSET);
65
66
  const span = typed_bytes_1.Span.fromSlice(data, SOC_SPAN_OFFSET);
@@ -74,7 +75,7 @@ function makeSingleOwnerChunkFromData(data, address) {
74
75
  owner: ownerAddress,
75
76
  };
76
77
  }
77
- exports.makeSingleOwnerChunkFromData = makeSingleOwnerChunkFromData;
78
+ exports.unmarshalSingleOwnerChunk = unmarshalSingleOwnerChunk;
78
79
  function makeSOCAddress(identifier, address) {
79
80
  return new typed_bytes_1.Reference(cafe_utility_1.Binary.keccak256(cafe_utility_1.Binary.concatBytes(identifier.toUint8Array(), address.toUint8Array())));
80
81
  }
@@ -86,21 +87,19 @@ exports.makeSOCAddress = makeSOCAddress;
86
87
  * @param identifier The identifier of the chunk
87
88
  * @param signer The signer interface for signing the chunk
88
89
  */
89
- function makeSingleOwnerChunk(chunk, identifier, signer) {
90
+ function makeSingleOwnerChunk(address, span, payload, identifier, signer) {
90
91
  identifier = new typed_bytes_1.Identifier(identifier);
91
92
  signer = new typed_bytes_1.PrivateKey(signer);
92
- const address = makeSOCAddress(identifier, signer.publicKey().address());
93
- const signature = signer.sign(cafe_utility_1.Binary.concatBytes(identifier.toUint8Array(), chunk.address.toUint8Array()));
94
- const data = cafe_utility_1.Binary.concatBytes(identifier.toUint8Array(), signature.toUint8Array(), chunk.data);
95
- const span = typed_bytes_1.Span.fromSlice(chunk.data, 0);
96
- const payload = bytes_1.Bytes.fromSlice(chunk.data, typed_bytes_1.Span.LENGTH);
93
+ const socAddress = makeSOCAddress(identifier, signer.publicKey().address());
94
+ const signature = signer.sign(cafe_utility_1.Binary.concatBytes(identifier.toUint8Array(), address.toUint8Array()));
95
+ const data = cafe_utility_1.Binary.concatBytes(identifier.toUint8Array(), signature.toUint8Array(), span.toUint8Array(), payload.toUint8Array());
97
96
  return {
98
97
  data,
99
98
  identifier,
100
99
  signature,
101
100
  span,
102
101
  payload,
103
- address,
102
+ address: socAddress,
104
103
  owner: signer.publicKey().address(),
105
104
  };
106
105
  }
@@ -134,14 +133,14 @@ async function uploadSingleOwnerChunkData(requestOptions, signer, stamp, identif
134
133
  signer = new typed_bytes_1.PrivateKey(signer);
135
134
  identifier = new typed_bytes_1.Identifier(identifier);
136
135
  const cac = (0, cac_1.makeContentAddressedChunk)(data);
137
- const soc = makeSingleOwnerChunk(cac, identifier, signer);
136
+ const soc = cac.toSingleOwnerChunk(identifier, signer);
138
137
  return uploadSingleOwnerChunk(requestOptions, soc, stamp, options);
139
138
  }
140
139
  exports.uploadSingleOwnerChunkData = uploadSingleOwnerChunkData;
141
- async function uploadSingleOwnerChunkWithWrappedChunk(requestOptions, signer, stamp, identifier, rootChunk, options) {
140
+ async function uploadSingleOwnerChunkWithWrappedChunk(requestOptions, signer, stamp, identifier, wrappedChunk, options) {
142
141
  signer = new typed_bytes_1.PrivateKey(signer);
143
142
  identifier = new typed_bytes_1.Identifier(identifier);
144
- const soc = makeSingleOwnerChunk((0, cac_1.asContentAddressedChunk)(rootChunk), identifier, signer);
143
+ const soc = wrappedChunk.toSingleOwnerChunk(identifier, signer);
145
144
  return uploadSingleOwnerChunk(requestOptions, soc, stamp, options);
146
145
  }
147
146
  exports.uploadSingleOwnerChunkWithWrappedChunk = uploadSingleOwnerChunkWithWrappedChunk;
@@ -157,6 +156,6 @@ async function downloadSingleOwnerChunk(requestOptions, ownerAddress, identifier
157
156
  ownerAddress = new typed_bytes_1.EthAddress(ownerAddress);
158
157
  const address = makeSOCAddress(identifier, ownerAddress);
159
158
  const cac = await chunkAPI.download(requestOptions, address.toHex());
160
- return makeSingleOwnerChunkFromData(cac, address);
159
+ return unmarshalSingleOwnerChunk(cac, address);
161
160
  }
162
161
  exports.downloadSingleOwnerChunk = downloadSingleOwnerChunk;
@@ -69,7 +69,7 @@ async function updateFeedWithPayload(requestOptions, signer, topic, data, postag
69
69
  const identifier = (0, identifier_1.makeFeedIdentifier)(topic, nextIndex);
70
70
  if (data.length > 4096) {
71
71
  const uploadResult = await bytes.upload(requestOptions, data, postageBatchId, options);
72
- const rootChunk = await chunkAPI.download(requestOptions, uploadResult.reference);
72
+ const rootChunk = (0, cac_1.unmarshalContentAddressedChunk)(await chunkAPI.download(requestOptions, uploadResult.reference));
73
73
  return (0, soc_1.uploadSingleOwnerChunkWithWrappedChunk)(requestOptions, signer, postageBatchId, identifier, rootChunk, options);
74
74
  }
75
75
  return (0, soc_1.uploadSingleOwnerChunkData)(requestOptions, signer, postageBatchId, identifier, cafe_utility_1.Types.isString(data) ? bytes_1.Bytes.fromUtf8(data).toUint8Array() : data, options);
@@ -84,7 +84,7 @@ async function downloadFeedUpdate(requestOptions, owner, topic, index, hasTimest
84
84
  index = typeof index === 'number' ? typed_bytes_1.FeedIndex.fromBigInt(BigInt(index)) : index;
85
85
  const address = getFeedUpdateChunkReference(owner, topic, index);
86
86
  const data = await chunkAPI.download(requestOptions, address.toHex());
87
- const soc = (0, soc_1.makeSingleOwnerChunkFromData)(data, address);
87
+ const soc = (0, soc_1.unmarshalSingleOwnerChunk)(data, address);
88
88
  let timestamp = cafe_utility_1.Optional.empty();
89
89
  if (hasTimestamp) {
90
90
  const timestampBytes = bytes_1.Bytes.fromSlice(soc.payload.toUint8Array(), TIMESTAMP_PAYLOAD_OFFSET, TIMESTAMP_PAYLOAD_SIZE);
@@ -100,7 +100,8 @@ async function downloadFeedUpdateAsCAC(requestOptions, owner, topic, index) {
100
100
  index = typeof index === 'number' ? typed_bytes_1.FeedIndex.fromBigInt(BigInt(index)) : index;
101
101
  const address = getFeedUpdateChunkReference(owner, topic, index);
102
102
  const data = await chunkAPI.download(requestOptions, address);
103
- return (0, cac_1.asContentAddressedChunk)(data.slice(typed_bytes_1.Identifier.LENGTH + typed_bytes_1.Signature.LENGTH));
103
+ const soc = (0, soc_1.unmarshalSingleOwnerChunk)(data, address);
104
+ return (0, cac_1.makeContentAddressedChunk)(soc.payload, soc.span);
104
105
  }
105
106
  exports.downloadFeedUpdateAsCAC = downloadFeedUpdateAsCAC;
106
107
  function makeFeedReader(requestOptions, topic, owner) {
@@ -2,13 +2,10 @@
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
3
  exports.diluteBatch = exports.topUpBatch = exports.createPostageBatch = exports.getPostageBatchBuckets = exports.getPostageBatch = exports.getAllPostageBatches = exports.getGlobalPostageBatches = void 0;
4
4
  const cafe_utility_1 = require("cafe-utility");
5
- const duration_1 = require("../../utils/duration");
6
5
  const http_1 = require("../../utils/http");
7
- const size_1 = require("../../utils/size");
8
6
  const stamps_1 = require("../../utils/stamps");
9
7
  const type_1 = require("../../utils/type");
10
8
  const typed_bytes_1 = require("../../utils/typed-bytes");
11
- const workaround_1 = require("../../utils/workaround");
12
9
  const STAMPS_ENDPOINT = 'stamps';
13
10
  const BATCHES_ENDPOINT = 'batches';
14
11
  async function getGlobalPostageBatches(requestOptions) {
@@ -39,40 +36,7 @@ async function getAllPostageBatches(requestOptions) {
39
36
  });
40
37
  const body = cafe_utility_1.Types.asObject(response.data, { name: 'response.data' });
41
38
  const stamps = cafe_utility_1.Types.asArray(body.stamps, { name: 'stamps' }).map(x => cafe_utility_1.Types.asObject(x, { name: 'stamp' }));
42
- return stamps.map(x => {
43
- const utilization = cafe_utility_1.Types.asNumber(x.utilization, { name: 'utilization' });
44
- const depth = cafe_utility_1.Types.asNumber(x.depth, { name: 'depth' });
45
- const bucketDepth = cafe_utility_1.Types.asNumber(x.bucketDepth, { name: 'bucketDepth' });
46
- const usage = (0, stamps_1.getStampUsage)(utilization, depth, bucketDepth);
47
- const batchTTL = (0, workaround_1.normalizeBatchTTL)(cafe_utility_1.Types.asNumber(x.batchTTL, { name: 'batchTTL' }));
48
- const duration = duration_1.Duration.fromSeconds(batchTTL);
49
- const effectiveBytes = (0, stamps_1.getStampEffectiveBytes)(depth);
50
- return {
51
- batchID: new typed_bytes_1.BatchId(cafe_utility_1.Types.asString(x.batchID, { name: 'batchID' })),
52
- utilization,
53
- usable: cafe_utility_1.Types.asBoolean(x.usable, { name: 'usable' }),
54
- label: cafe_utility_1.Types.asEmptiableString(x.label, { name: 'label' }),
55
- depth,
56
- amount: (0, type_1.asNumberString)(x.amount, { name: 'amount' }),
57
- bucketDepth,
58
- blockNumber: cafe_utility_1.Types.asNumber(x.blockNumber, { name: 'blockNumber' }),
59
- immutableFlag: cafe_utility_1.Types.asBoolean(x.immutableFlag, { name: 'immutableFlag' }),
60
- usage,
61
- usageText: `${Math.round(usage * 100)}%`,
62
- size: size_1.Size.fromBytes(effectiveBytes),
63
- remainingSize: size_1.Size.fromBytes(Math.ceil(effectiveBytes * (1 - usage))),
64
- theoreticalSize: size_1.Size.fromBytes((0, stamps_1.getStampTheoreticalBytes)(depth)),
65
- duration,
66
- calculateSize(encryption, redundancyLevel) {
67
- const effectiveBytes = (0, stamps_1.getStampEffectiveBytes)(this.depth, encryption, redundancyLevel);
68
- return size_1.Size.fromBytes(effectiveBytes);
69
- },
70
- calculateRemainingSize(encryption, redundancyLevel) {
71
- const effectiveBytes = (0, stamps_1.getStampEffectiveBytes)(this.depth, encryption, redundancyLevel);
72
- return size_1.Size.fromBytes(Math.ceil(effectiveBytes * (1 - this.usage)));
73
- },
74
- };
75
- });
39
+ return stamps.map(x => (0, stamps_1.mapPostageBatch)(validateRawPostageBatch(x)));
76
40
  }
77
41
  exports.getAllPostageBatches = getAllPostageBatches;
78
42
  async function getPostageBatch(requestOptions, postageBatchId, encryption, erasureCodeLevel) {
@@ -82,38 +46,7 @@ async function getPostageBatch(requestOptions, postageBatchId, encryption, erasu
82
46
  responseType: 'json',
83
47
  });
84
48
  const body = cafe_utility_1.Types.asObject(response.data, { name: 'response.data' });
85
- const utilization = cafe_utility_1.Types.asNumber(body.utilization, { name: 'utilization' });
86
- const depth = cafe_utility_1.Types.asNumber(body.depth, { name: 'depth' });
87
- const bucketDepth = cafe_utility_1.Types.asNumber(body.bucketDepth, { name: 'bucketDepth' });
88
- const usage = (0, stamps_1.getStampUsage)(utilization, depth, bucketDepth);
89
- const batchTTL = (0, workaround_1.normalizeBatchTTL)(cafe_utility_1.Types.asNumber(body.batchTTL, { name: 'batchTTL' }));
90
- const duration = duration_1.Duration.fromSeconds(batchTTL);
91
- const effectiveBytes = (0, stamps_1.getStampEffectiveBytes)(depth, encryption, erasureCodeLevel);
92
- return {
93
- batchID: new typed_bytes_1.BatchId(cafe_utility_1.Types.asString(body.batchID, { name: 'batchID' })),
94
- utilization,
95
- usable: cafe_utility_1.Types.asBoolean(body.usable, { name: 'usable' }),
96
- label: cafe_utility_1.Types.asEmptiableString(body.label, { name: 'label' }),
97
- depth,
98
- amount: (0, type_1.asNumberString)(body.amount, { name: 'amount' }),
99
- bucketDepth,
100
- blockNumber: cafe_utility_1.Types.asNumber(body.blockNumber, { name: 'blockNumber' }),
101
- immutableFlag: cafe_utility_1.Types.asBoolean(body.immutableFlag, { name: 'immutableFlag' }),
102
- usage,
103
- usageText: `${Math.round(usage * 100)}%`,
104
- size: size_1.Size.fromBytes(effectiveBytes),
105
- remainingSize: size_1.Size.fromBytes(Math.ceil(effectiveBytes * (1 - usage))),
106
- theoreticalSize: size_1.Size.fromBytes((0, stamps_1.getStampTheoreticalBytes)(depth)),
107
- duration,
108
- calculateSize(encryption, redundancyLevel) {
109
- const effectiveBytes = (0, stamps_1.getStampEffectiveBytes)(depth, encryption, redundancyLevel);
110
- return size_1.Size.fromBytes(effectiveBytes);
111
- },
112
- calculateRemainingSize(encryption, redundancyLevel) {
113
- const effectiveBytes = (0, stamps_1.getStampEffectiveBytes)(depth, encryption, redundancyLevel);
114
- return size_1.Size.fromBytes(Math.ceil(effectiveBytes * (1 - usage)));
115
- },
116
- };
49
+ return (0, stamps_1.mapPostageBatch)(validateRawPostageBatch(body), encryption, erasureCodeLevel);
117
50
  }
118
51
  exports.getPostageBatch = getPostageBatch;
119
52
  async function getPostageBatchBuckets(requestOptions, postageBatchId) {
@@ -175,3 +108,17 @@ async function diluteBatch(requestOptions, id, depth) {
175
108
  return new typed_bytes_1.BatchId(cafe_utility_1.Types.asString(body.batchID, { name: 'batchID' }));
176
109
  }
177
110
  exports.diluteBatch = diluteBatch;
111
+ function validateRawPostageBatch(raw) {
112
+ return {
113
+ amount: (0, type_1.asNumberString)(raw.amount, { name: 'amount' }),
114
+ batchID: cafe_utility_1.Types.asString(raw.batchID, { name: 'batchID' }),
115
+ batchTTL: cafe_utility_1.Types.asNumber(raw.batchTTL, { name: 'batchTTL' }),
116
+ bucketDepth: cafe_utility_1.Types.asNumber(raw.bucketDepth, { name: 'bucketDepth' }),
117
+ blockNumber: cafe_utility_1.Types.asNumber(raw.blockNumber, { name: 'blockNumber' }),
118
+ depth: cafe_utility_1.Types.asNumber(raw.depth, { name: 'depth' }),
119
+ immutableFlag: cafe_utility_1.Types.asBoolean(raw.immutableFlag, { name: 'immutableFlag' }),
120
+ label: cafe_utility_1.Types.asEmptiableString(raw.label, { name: 'label' }),
121
+ usable: cafe_utility_1.Types.asBoolean(raw.usable, { name: 'usable' }),
122
+ utilization: cafe_utility_1.Types.asNumber(raw.utilization, { name: 'utilization' }),
123
+ };
124
+ }
@@ -1,6 +1,6 @@
1
1
  "use strict";
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
- exports.getStampUsage = exports.getStampTheoreticalBytes = exports.getStampEffectiveBytesBreakpoints = exports.getStampEffectiveBytes = exports.getStampDuration = exports.getStampCost = exports.getDepthForSize = exports.getAmountForDuration = exports.getRedundancyStats = exports.getRedundancyStat = exports.approximateOverheadForRedundancyLevel = exports.makeMaxTarget = exports.getFolderSize = exports.makeCollectionFromFileList = exports.getCollectionSize = void 0;
3
+ exports.unmapPostageBatch = exports.mapPostageBatch = exports.getStampUsage = exports.getStampTheoreticalBytes = exports.getStampEffectiveBytesBreakpoints = exports.getStampEffectiveBytes = exports.getStampDuration = exports.getStampCost = exports.getDepthForSize = exports.getAmountForDuration = exports.getRedundancyStats = exports.getRedundancyStat = exports.approximateOverheadForRedundancyLevel = exports.makeMaxTarget = exports.getFolderSize = exports.makeCollectionFromFileList = exports.getCollectionSize = void 0;
4
4
  var collection_1 = require("./collection");
5
5
  Object.defineProperty(exports, "getCollectionSize", { enumerable: true, get: function () { return collection_1.getCollectionSize; } });
6
6
  Object.defineProperty(exports, "makeCollectionFromFileList", { enumerable: true, get: function () { return collection_1.makeCollectionFromFileList; } });
@@ -21,3 +21,5 @@ Object.defineProperty(exports, "getStampEffectiveBytes", { enumerable: true, get
21
21
  Object.defineProperty(exports, "getStampEffectiveBytesBreakpoints", { enumerable: true, get: function () { return stamps_1.getStampEffectiveBytesBreakpoints; } });
22
22
  Object.defineProperty(exports, "getStampTheoreticalBytes", { enumerable: true, get: function () { return stamps_1.getStampTheoreticalBytes; } });
23
23
  Object.defineProperty(exports, "getStampUsage", { enumerable: true, get: function () { return stamps_1.getStampUsage; } });
24
+ Object.defineProperty(exports, "mapPostageBatch", { enumerable: true, get: function () { return stamps_1.mapPostageBatch; } });
25
+ Object.defineProperty(exports, "unmapPostageBatch", { enumerable: true, get: function () { return stamps_1.unmapPostageBatch; } });
@@ -1,12 +1,15 @@
1
1
  "use strict";
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
- exports.marshalStamp = exports.convertEnvelopeToMarshaledStamp = exports.getDepthForSize = exports.getAmountForDuration = exports.getStampDuration = exports.getStampCost = exports.getStampEffectiveBytesBreakpoints = exports.getStampEffectiveBytes = exports.getStampTheoreticalBytes = exports.getStampUsage = void 0;
3
+ exports.unmapPostageBatch = exports.mapPostageBatch = exports.marshalStamp = exports.convertEnvelopeToMarshaledStamp = exports.getDepthForSize = exports.getAmountForDuration = exports.getStampDuration = exports.getStampCost = exports.getStampEffectiveBytesBreakpoints = exports.getStampEffectiveBytes = exports.getStampTheoreticalBytes = exports.getStampUsage = void 0;
4
4
  const cafe_utility_1 = require("cafe-utility");
5
5
  const types_1 = require("../types");
6
6
  const bytes_1 = require("./bytes");
7
7
  const duration_1 = require("./duration");
8
+ const size_1 = require("./size");
8
9
  const tokens_1 = require("./tokens");
9
10
  const type_1 = require("./type");
11
+ const typed_bytes_1 = require("./typed-bytes");
12
+ const workaround_1 = require("./workaround");
10
13
  const MAX_UTILIZATION = 0.9;
11
14
  /**
12
15
  * Utility function that calculates usage of postage batch based on its utilization, depth and bucket depth.
@@ -172,3 +175,50 @@ function marshalStamp(signature, batchId, timestamp, index) {
172
175
  return new bytes_1.Bytes(cafe_utility_1.Binary.concatBytes(batchId, index, timestamp, signature));
173
176
  }
174
177
  exports.marshalStamp = marshalStamp;
178
+ function mapPostageBatch(raw, encryption, erasureCodeLevel) {
179
+ const usage = getStampUsage(raw.utilization, raw.depth, raw.bucketDepth);
180
+ const batchTTL = (0, workaround_1.normalizeBatchTTL)(raw.batchTTL);
181
+ const duration = duration_1.Duration.fromSeconds(batchTTL);
182
+ const effectiveBytes = getStampEffectiveBytes(raw.depth, encryption, erasureCodeLevel);
183
+ return {
184
+ batchID: new typed_bytes_1.BatchId(raw.batchID),
185
+ utilization: raw.utilization,
186
+ usable: raw.usable,
187
+ label: raw.label,
188
+ depth: raw.depth,
189
+ amount: (0, type_1.asNumberString)(raw.amount),
190
+ bucketDepth: raw.bucketDepth,
191
+ blockNumber: raw.blockNumber,
192
+ immutableFlag: raw.immutableFlag,
193
+ usage,
194
+ usageText: `${Math.round(usage * 100)}%`,
195
+ size: size_1.Size.fromBytes(effectiveBytes),
196
+ remainingSize: size_1.Size.fromBytes(Math.ceil(effectiveBytes * (1 - usage))),
197
+ theoreticalSize: size_1.Size.fromBytes(getStampTheoreticalBytes(raw.depth)),
198
+ duration,
199
+ calculateSize(encryption, redundancyLevel) {
200
+ const effectiveBytes = getStampEffectiveBytes(raw.depth, encryption, redundancyLevel);
201
+ return size_1.Size.fromBytes(effectiveBytes);
202
+ },
203
+ calculateRemainingSize(encryption, redundancyLevel) {
204
+ const effectiveBytes = getStampEffectiveBytes(raw.depth, encryption, redundancyLevel);
205
+ return size_1.Size.fromBytes(Math.ceil(effectiveBytes * (1 - this.usage)));
206
+ },
207
+ };
208
+ }
209
+ exports.mapPostageBatch = mapPostageBatch;
210
+ function unmapPostageBatch(batch) {
211
+ return {
212
+ batchID: batch.batchID.toHex(),
213
+ utilization: batch.utilization,
214
+ usable: batch.usable,
215
+ label: batch.label,
216
+ depth: batch.depth,
217
+ amount: batch.amount,
218
+ bucketDepth: batch.bucketDepth,
219
+ blockNumber: batch.blockNumber,
220
+ immutableFlag: batch.immutableFlag,
221
+ batchTTL: batch.duration.toSeconds(),
222
+ };
223
+ }
224
+ exports.unmapPostageBatch = unmapPostageBatch;
@@ -9,8 +9,11 @@ function normalizeBatchTTL(batchTTL) {
9
9
  if (batchTTL < 1) {
10
10
  return 1;
11
11
  }
12
- if (batchTTL > 315569260) {
13
- return 315569260;
12
+ // Cap `batchTTL` (represents seconds) to 100 years.
13
+ // We can assume `storagePrice` is invalid (e.g. 1).
14
+ // This is needed to prevent Date objects breaking.
15
+ if (batchTTL > 3155695200) {
16
+ return 3155695200;
14
17
  }
15
18
  return batchTTL;
16
19
  }