@ethersphere/bee-js 8.1.0 → 8.2.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/cjs/bee.js +19 -6
- package/dist/cjs/chunk/soc.js +4 -6
- package/dist/cjs/modules/bytes.js +18 -1
- package/dist/cjs/modules/chunk.js +4 -4
- package/dist/cjs/modules/envelope.js +23 -0
- package/dist/cjs/modules/feed.js +2 -2
- package/dist/cjs/modules/soc.js +2 -2
- package/dist/cjs/utils/expose.js +3 -1
- package/dist/cjs/utils/headers.js +11 -6
- package/dist/cjs/utils/stamps.js +22 -1
- package/dist/cjs/utils/type.js +1 -5
- package/dist/index.browser.min.js +1 -1
- package/dist/index.browser.min.js.map +1 -1
- package/dist/mjs/bee.js +19 -6
- package/dist/mjs/chunk/soc.js +4 -6
- package/dist/mjs/modules/bytes.js +16 -0
- package/dist/mjs/modules/chunk.js +4 -4
- package/dist/mjs/modules/envelope.js +33 -0
- package/dist/mjs/modules/feed.js +2 -2
- package/dist/mjs/modules/soc.js +2 -2
- package/dist/mjs/utils/expose.js +3 -1
- package/dist/mjs/utils/headers.js +10 -6
- package/dist/mjs/utils/stamps.js +19 -0
- package/dist/mjs/utils/type.js +0 -3
- package/dist/types/bee.d.ts +10 -3
- package/dist/types/chunk/soc.d.ts +2 -2
- package/dist/types/modules/bytes.d.ts +8 -1
- package/dist/types/modules/chunk.d.ts +3 -3
- package/dist/types/modules/envelope.d.ts +2 -0
- package/dist/types/modules/feed.d.ts +1 -1
- package/dist/types/modules/soc.d.ts +1 -1
- package/dist/types/types/index.d.ts +10 -1
- package/dist/types/utils/expose.d.ts +6 -3
- package/dist/types/utils/headers.d.ts +1 -1
- package/dist/types/utils/stamps.d.ts +3 -1
- package/dist/types/utils/type.d.ts +0 -1
- package/package.json +2 -2
package/dist/cjs/bee.js
CHANGED
|
@@ -46,6 +46,7 @@ const states = __importStar(require("./modules/debug/states"));
|
|
|
46
46
|
const debugStatus = __importStar(require("./modules/debug/status"));
|
|
47
47
|
const debugTag = __importStar(require("./modules/debug/tag"));
|
|
48
48
|
const transactions = __importStar(require("./modules/debug/transactions"));
|
|
49
|
+
const envelope_1 = require("./modules/envelope");
|
|
49
50
|
const feed_2 = require("./modules/feed");
|
|
50
51
|
const grantee = __importStar(require("./modules/grantee"));
|
|
51
52
|
const pinning = __importStar(require("./modules/pinning"));
|
|
@@ -110,6 +111,16 @@ class Bee {
|
|
|
110
111
|
}
|
|
111
112
|
return bytes.upload(this.getRequestOptionsForCall(requestOptions), data, postageBatchId, options);
|
|
112
113
|
}
|
|
114
|
+
/**
|
|
115
|
+
* Requests content length for a `/bytes` reference
|
|
116
|
+
*
|
|
117
|
+
* @see [Bee API reference - `HEAD /bytes/`](https://docs.ethswarm.org/api/#tag/Bytes/paths/~1bytes~1%7Breference%7D/head)
|
|
118
|
+
*/
|
|
119
|
+
async probeData(reference, options) {
|
|
120
|
+
(0, type_2.assertRequestOptions)(options);
|
|
121
|
+
(0, type_2.assertReferenceOrEns)(reference);
|
|
122
|
+
return bytes.head(this.getRequestOptionsForCall(options), reference);
|
|
123
|
+
}
|
|
113
124
|
/**
|
|
114
125
|
* Download data as a byte array
|
|
115
126
|
*
|
|
@@ -151,8 +162,7 @@ class Bee {
|
|
|
151
162
|
* @see [Bee docs - Upload and download](https://docs.ethswarm.org/docs/develop/access-the-swarm/upload-and-download)
|
|
152
163
|
* @see [Bee API reference - `POST /chunks`](https://docs.ethswarm.org/api/#tag/Chunk/paths/~1chunks/post)
|
|
153
164
|
*/
|
|
154
|
-
async uploadChunk(
|
|
155
|
-
(0, type_2.assertBatchId)(postageBatchId);
|
|
165
|
+
async uploadChunk(stamp, data, options, requestOptions) {
|
|
156
166
|
(0, type_2.assertRequestOptions)(requestOptions);
|
|
157
167
|
if (!(data instanceof Uint8Array)) {
|
|
158
168
|
throw new TypeError('Data has to be Uint8Array instance!');
|
|
@@ -166,7 +176,7 @@ class Bee {
|
|
|
166
176
|
if (options) {
|
|
167
177
|
(0, type_2.assertUploadOptions)(options);
|
|
168
178
|
}
|
|
169
|
-
return chunk.upload(this.getRequestOptionsForCall(requestOptions), data,
|
|
179
|
+
return chunk.upload(this.getRequestOptionsForCall(requestOptions), data, stamp, options);
|
|
170
180
|
}
|
|
171
181
|
/**
|
|
172
182
|
* Download chunk as a byte array
|
|
@@ -727,13 +737,12 @@ class Bee {
|
|
|
727
737
|
* @see [Bee docs - Feeds](https://docs.ethswarm.org/docs/develop/tools-and-features/feeds)
|
|
728
738
|
* @see [Bee API reference - `POST /feeds`](https://docs.ethswarm.org/api/#tag/Feed/paths/~1feeds~1{owner}~1{topic}/post)
|
|
729
739
|
*/
|
|
730
|
-
async createFeedManifest(
|
|
740
|
+
async createFeedManifest(stamp, type, topic, owner, options) {
|
|
731
741
|
(0, type_2.assertRequestOptions)(options);
|
|
732
742
|
(0, type_1.assertFeedType)(type);
|
|
733
|
-
(0, type_2.assertBatchId)(postageBatchId);
|
|
734
743
|
const canonicalTopic = (0, topic_1.makeTopic)(topic);
|
|
735
744
|
const canonicalOwner = (0, eth_1.makeHexEthAddress)(owner);
|
|
736
|
-
const reference = await (0, feed_2.createFeedManifest)(this.getRequestOptionsForCall(options), canonicalOwner, canonicalTopic,
|
|
745
|
+
const reference = await (0, feed_2.createFeedManifest)(this.getRequestOptionsForCall(options), canonicalOwner, canonicalTopic, stamp);
|
|
737
746
|
return (0, type_2.addCidConversionFunction)({ reference }, swarm_cid_1.ReferenceType.FEED);
|
|
738
747
|
}
|
|
739
748
|
/**
|
|
@@ -883,6 +892,10 @@ class Bee {
|
|
|
883
892
|
upload: soc_1.uploadSingleOwnerChunkData.bind(null, this.getRequestOptionsForCall(options), canonicalSigner),
|
|
884
893
|
};
|
|
885
894
|
}
|
|
895
|
+
async createEnvelope(postageBatchId, reference, options) {
|
|
896
|
+
(0, type_2.assertRequestOptions)(options);
|
|
897
|
+
return (0, envelope_1.postEnvelope)(this.getRequestOptionsForCall(options), postageBatchId, reference);
|
|
898
|
+
}
|
|
886
899
|
/**
|
|
887
900
|
* Ping the Bee node to see if there is a live Bee node on the given URL.
|
|
888
901
|
*
|
package/dist/cjs/chunk/soc.js
CHANGED
|
@@ -31,7 +31,6 @@ const bytes_1 = require("../utils/bytes");
|
|
|
31
31
|
const error_1 = require("../utils/error");
|
|
32
32
|
const hash_1 = require("../utils/hash");
|
|
33
33
|
const hex_1 = require("../utils/hex");
|
|
34
|
-
const type_1 = require("../utils/type");
|
|
35
34
|
const bmt_1 = require("./bmt");
|
|
36
35
|
const cac_1 = require("./cac");
|
|
37
36
|
const signer_1 = require("./signer");
|
|
@@ -119,12 +118,12 @@ exports.makeSingleOwnerChunk = makeSingleOwnerChunk;
|
|
|
119
118
|
* @param postageBatchId Postage BatchId that will be assigned to uploaded data
|
|
120
119
|
* @param options Upload options
|
|
121
120
|
*/
|
|
122
|
-
async function uploadSingleOwnerChunk(requestOptions, chunk,
|
|
121
|
+
async function uploadSingleOwnerChunk(requestOptions, chunk, stamp, options) {
|
|
123
122
|
const owner = (0, hex_1.bytesToHex)(chunk.owner());
|
|
124
123
|
const identifier = (0, hex_1.bytesToHex)(chunk.identifier());
|
|
125
124
|
const signature = (0, hex_1.bytesToHex)(chunk.signature());
|
|
126
125
|
const data = cafe_utility_1.Binary.concatBytes(chunk.span(), chunk.payload());
|
|
127
|
-
return socAPI.upload(requestOptions, owner, identifier, signature, data,
|
|
126
|
+
return socAPI.upload(requestOptions, owner, identifier, signature, data, stamp, options);
|
|
128
127
|
}
|
|
129
128
|
exports.uploadSingleOwnerChunk = uploadSingleOwnerChunk;
|
|
130
129
|
/**
|
|
@@ -137,11 +136,10 @@ exports.uploadSingleOwnerChunk = uploadSingleOwnerChunk;
|
|
|
137
136
|
* @param data The chunk data
|
|
138
137
|
* @param options
|
|
139
138
|
*/
|
|
140
|
-
async function uploadSingleOwnerChunkData(requestOptions, signer,
|
|
141
|
-
(0, type_1.assertAddress)(postageBatchId);
|
|
139
|
+
async function uploadSingleOwnerChunkData(requestOptions, signer, stamp, identifier, data, options) {
|
|
142
140
|
const cac = (0, cac_1.makeContentAddressedChunk)(data);
|
|
143
141
|
const soc = await makeSingleOwnerChunk(cac, identifier, signer);
|
|
144
|
-
return uploadSingleOwnerChunk(requestOptions, soc,
|
|
142
|
+
return uploadSingleOwnerChunk(requestOptions, soc, stamp, options);
|
|
145
143
|
}
|
|
146
144
|
exports.uploadSingleOwnerChunkData = uploadSingleOwnerChunkData;
|
|
147
145
|
/**
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
-
exports.downloadReadable = exports.download = exports.upload = void 0;
|
|
3
|
+
exports.downloadReadable = exports.download = exports.head = exports.upload = void 0;
|
|
4
4
|
const bytes_1 = require("../utils/bytes");
|
|
5
5
|
const headers_1 = require("../utils/headers");
|
|
6
6
|
const http_1 = require("../utils/http");
|
|
@@ -32,6 +32,23 @@ async function upload(requestOptions, data, postageBatchId, options) {
|
|
|
32
32
|
};
|
|
33
33
|
}
|
|
34
34
|
exports.upload = upload;
|
|
35
|
+
/**
|
|
36
|
+
* Requests content length for a reference
|
|
37
|
+
*
|
|
38
|
+
* @param requestOptions Options for making requests
|
|
39
|
+
* @param hash Bee content reference
|
|
40
|
+
*/
|
|
41
|
+
async function head(requestOptions, hash) {
|
|
42
|
+
const response = await (0, http_1.http)(requestOptions, {
|
|
43
|
+
url: `${endpoint}/${hash}`,
|
|
44
|
+
method: 'head',
|
|
45
|
+
responseType: 'json',
|
|
46
|
+
});
|
|
47
|
+
return {
|
|
48
|
+
contentLength: parseInt(response.headers['content-length']),
|
|
49
|
+
};
|
|
50
|
+
}
|
|
51
|
+
exports.head = head;
|
|
35
52
|
/**
|
|
36
53
|
* Download data as a byte array
|
|
37
54
|
*
|
|
@@ -14,18 +14,18 @@ const endpoint = 'chunks';
|
|
|
14
14
|
* Upload expects the chuck data to be set accordingly.
|
|
15
15
|
*
|
|
16
16
|
* @param requestOptions Options for making requests
|
|
17
|
-
* @param data
|
|
18
|
-
* @param
|
|
17
|
+
* @param data Chunk data to be uploaded
|
|
18
|
+
* @param stamp BatchId or marshaled stamp to be used for the upload
|
|
19
19
|
* @param options Additional options like tag, encryption, pinning
|
|
20
20
|
*/
|
|
21
|
-
async function upload(requestOptions, data,
|
|
21
|
+
async function upload(requestOptions, data, stamp, options) {
|
|
22
22
|
const response = await (0, http_1.http)(requestOptions, {
|
|
23
23
|
method: 'post',
|
|
24
24
|
url: `${endpoint}`,
|
|
25
25
|
data,
|
|
26
26
|
headers: {
|
|
27
27
|
'content-type': 'application/octet-stream',
|
|
28
|
-
...(0, headers_1.extractUploadHeaders)(
|
|
28
|
+
...(0, headers_1.extractUploadHeaders)(stamp, options),
|
|
29
29
|
},
|
|
30
30
|
responseType: 'json',
|
|
31
31
|
});
|
|
@@ -0,0 +1,23 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.postEnvelope = void 0;
|
|
4
|
+
const cafe_utility_1 = require("cafe-utility");
|
|
5
|
+
const http_1 = require("../utils/http");
|
|
6
|
+
const ENVELOPE_ENDPOINT = 'envelope';
|
|
7
|
+
async function postEnvelope(requestOptions, postageBatchId, reference) {
|
|
8
|
+
const { data } = await (0, http_1.http)(requestOptions, {
|
|
9
|
+
method: 'post',
|
|
10
|
+
responseType: 'json',
|
|
11
|
+
url: `${ENVELOPE_ENDPOINT}/${reference}`,
|
|
12
|
+
headers: {
|
|
13
|
+
'swarm-postage-batch-id': postageBatchId,
|
|
14
|
+
},
|
|
15
|
+
});
|
|
16
|
+
return {
|
|
17
|
+
issuer: cafe_utility_1.Binary.hexToUint8Array(cafe_utility_1.Types.asHexString(data.issuer, { name: 'issuer', byteLength: 20 })),
|
|
18
|
+
index: cafe_utility_1.Binary.hexToUint8Array(cafe_utility_1.Types.asHexString(data.index, { name: 'index', byteLength: 8 })),
|
|
19
|
+
timestamp: cafe_utility_1.Binary.hexToUint8Array(cafe_utility_1.Types.asHexString(data.timestamp, { name: 'timestamp', byteLength: 8 })),
|
|
20
|
+
signature: cafe_utility_1.Binary.hexToUint8Array(cafe_utility_1.Types.asHexString(data.signature, { name: 'signature', byteLength: 65 })),
|
|
21
|
+
};
|
|
22
|
+
}
|
|
23
|
+
exports.postEnvelope = postEnvelope;
|
package/dist/cjs/modules/feed.js
CHANGED
|
@@ -14,12 +14,12 @@ const feedEndpoint = 'feeds';
|
|
|
14
14
|
* @param postageBatchId Postage BatchId to be used to create the Feed Manifest
|
|
15
15
|
* @param options Additional options, like type (default: 'sequence')
|
|
16
16
|
*/
|
|
17
|
-
async function createFeedManifest(requestOptions, owner, topic,
|
|
17
|
+
async function createFeedManifest(requestOptions, owner, topic, stamp) {
|
|
18
18
|
const response = await (0, http_1.http)(requestOptions, {
|
|
19
19
|
method: 'post',
|
|
20
20
|
responseType: 'json',
|
|
21
21
|
url: `${feedEndpoint}/${owner}/${topic}`,
|
|
22
|
-
headers: (0, headers_1.extractUploadHeaders)(
|
|
22
|
+
headers: (0, headers_1.extractUploadHeaders)(stamp),
|
|
23
23
|
});
|
|
24
24
|
return response.data.reference;
|
|
25
25
|
}
|
package/dist/cjs/modules/soc.js
CHANGED
|
@@ -16,14 +16,14 @@ const socEndpoint = 'soc';
|
|
|
16
16
|
* @param postageBatchId Postage BatchId that will be assigned to uploaded data
|
|
17
17
|
* @param options Additional options like tag, encryption, pinning
|
|
18
18
|
*/
|
|
19
|
-
async function upload(requestOptions, owner, identifier, signature, data,
|
|
19
|
+
async function upload(requestOptions, owner, identifier, signature, data, stamp, options) {
|
|
20
20
|
const response = await (0, http_1.http)(requestOptions, {
|
|
21
21
|
method: 'post',
|
|
22
22
|
url: `${socEndpoint}/${owner}/${identifier}`,
|
|
23
23
|
data,
|
|
24
24
|
headers: {
|
|
25
25
|
'content-type': 'application/octet-stream',
|
|
26
|
-
...(0, headers_1.extractUploadHeaders)(
|
|
26
|
+
...(0, headers_1.extractUploadHeaders)(stamp, options),
|
|
27
27
|
},
|
|
28
28
|
responseType: 'json',
|
|
29
29
|
params: { sig: signature },
|
package/dist/cjs/utils/expose.js
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
-
exports.getRedundancyStats = exports.getRedundancyStat = exports.approximateOverheadForRedundancyLevel = exports.getStampUsage = exports.getStampTtlSeconds = exports.getStampMaximumCapacityBytes = exports.getStampEffectiveBytes = exports.getStampCostInPlur = exports.getStampCostInBzz = exports.getDepthForCapacity = exports.getAmountForTtl = exports.makeMaxTarget = exports.keccak256Hash = exports.toLittleEndian = exports.makeHexEthAddress = exports.makeEthereumWalletSigner = exports.makeEthAddress = exports.isHexEthAddress = exports.fromLittleEndian = exports.ethToSwarmAddress = exports.capitalizeAddressERC55 = exports.makeHexString = exports.isHexString = exports.intToHex = exports.hexToBytes = exports.bytesToHex = exports.assertPrefixedHexString = exports.assertHexString = exports.isFlexBytes = exports.isBytes = exports.flexBytesAtOffset = exports.bytesEqual = exports.bytesAtOffset = exports.assertFlexBytes = exports.assertBytes = exports.getFolderSize = exports.getCollectionSize = void 0;
|
|
3
|
+
exports.NULL_TOPIC = exports.NULL_STAMP = exports.getRedundancyStats = exports.getRedundancyStat = exports.approximateOverheadForRedundancyLevel = exports.getStampUsage = exports.getStampTtlSeconds = exports.getStampMaximumCapacityBytes = exports.getStampEffectiveBytes = exports.getStampCostInPlur = exports.getStampCostInBzz = exports.getDepthForCapacity = exports.getAmountForTtl = exports.makeMaxTarget = exports.keccak256Hash = exports.toLittleEndian = exports.makeHexEthAddress = exports.makeEthereumWalletSigner = exports.makeEthAddress = exports.isHexEthAddress = exports.fromLittleEndian = exports.ethToSwarmAddress = exports.capitalizeAddressERC55 = exports.makeHexString = exports.isHexString = exports.intToHex = exports.hexToBytes = exports.bytesToHex = exports.assertPrefixedHexString = exports.assertHexString = exports.isFlexBytes = exports.isBytes = exports.flexBytesAtOffset = exports.bytesEqual = exports.bytesAtOffset = exports.assertFlexBytes = exports.assertBytes = exports.getFolderSize = exports.getCollectionSize = void 0;
|
|
4
4
|
var collection_1 = require("./collection");
|
|
5
5
|
Object.defineProperty(exports, "getCollectionSize", { enumerable: true, get: function () { return collection_1.getCollectionSize; } });
|
|
6
6
|
var collection_node_1 = require("./collection.node");
|
|
@@ -47,3 +47,5 @@ var redundancy_1 = require("./redundancy");
|
|
|
47
47
|
Object.defineProperty(exports, "approximateOverheadForRedundancyLevel", { enumerable: true, get: function () { return redundancy_1.approximateOverheadForRedundancyLevel; } });
|
|
48
48
|
Object.defineProperty(exports, "getRedundancyStat", { enumerable: true, get: function () { return redundancy_1.getRedundancyStat; } });
|
|
49
49
|
Object.defineProperty(exports, "getRedundancyStats", { enumerable: true, get: function () { return redundancy_1.getRedundancyStats; } });
|
|
50
|
+
exports.NULL_STAMP = '0000000000000000000000000000000000000000000000000000000000000000';
|
|
51
|
+
exports.NULL_TOPIC = '0000000000000000000000000000000000000000000000000000000000000000';
|
|
@@ -1,6 +1,7 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
3
|
exports.extractDownloadHeaders = exports.extractRedundantUploadHeaders = exports.extractUploadHeaders = exports.readFileHeaders = void 0;
|
|
4
|
+
const cafe_utility_1 = require("cafe-utility");
|
|
4
5
|
const error_1 = require("./error");
|
|
5
6
|
/**
|
|
6
7
|
* Read the filename from the content-disposition header
|
|
@@ -39,13 +40,17 @@ function readFileHeaders(headers) {
|
|
|
39
40
|
};
|
|
40
41
|
}
|
|
41
42
|
exports.readFileHeaders = readFileHeaders;
|
|
42
|
-
function extractUploadHeaders(
|
|
43
|
-
if (!
|
|
44
|
-
throw new error_1.BeeError('
|
|
43
|
+
function extractUploadHeaders(stamp, options) {
|
|
44
|
+
if (!stamp) {
|
|
45
|
+
throw new error_1.BeeError('Stamp has to be specified!');
|
|
46
|
+
}
|
|
47
|
+
const headers = {};
|
|
48
|
+
if (stamp instanceof Uint8Array) {
|
|
49
|
+
headers['swarm-postage-stamp'] = cafe_utility_1.Binary.uint8ArrayToHex(stamp);
|
|
50
|
+
}
|
|
51
|
+
else {
|
|
52
|
+
headers['swarm-postage-batch-id'] = stamp;
|
|
45
53
|
}
|
|
46
|
-
const headers = {
|
|
47
|
-
'swarm-postage-batch-id': postageBatchId,
|
|
48
|
-
};
|
|
49
54
|
if (options?.act) {
|
|
50
55
|
headers['swarm-act'] = String(options.act);
|
|
51
56
|
}
|
package/dist/cjs/utils/stamps.js
CHANGED
|
@@ -1,6 +1,7 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
-
exports.getDepthForCapacity = exports.getAmountForTtl = exports.getStampTtlSeconds = exports.getStampCostInBzz = exports.getStampCostInPlur = exports.getStampEffectiveBytes = exports.getStampMaximumCapacityBytes = exports.getStampUsage = void 0;
|
|
3
|
+
exports.marshalStamp = exports.convertEnvelopeToMarshaledStamp = exports.getDepthForCapacity = exports.getAmountForTtl = exports.getStampTtlSeconds = exports.getStampCostInBzz = exports.getStampCostInPlur = exports.getStampEffectiveBytes = exports.getStampMaximumCapacityBytes = exports.getStampUsage = void 0;
|
|
4
|
+
const cafe_utility_1 = require("cafe-utility");
|
|
4
5
|
/**
|
|
5
6
|
* Utility function that calculates usage of postage batch based on its utilization, depth and bucket depth.
|
|
6
7
|
*
|
|
@@ -112,3 +113,23 @@ function getDepthForCapacity(gigabytes) {
|
|
|
112
113
|
return gigabytes <= 1 ? 18 : Math.ceil(Math.log2(Math.ceil(gigabytes)) + 18);
|
|
113
114
|
}
|
|
114
115
|
exports.getDepthForCapacity = getDepthForCapacity;
|
|
116
|
+
function convertEnvelopeToMarshaledStamp(batchID, envelope) {
|
|
117
|
+
return marshalStamp(envelope.signature, cafe_utility_1.Binary.hexToUint8Array(batchID), envelope.timestamp, envelope.index);
|
|
118
|
+
}
|
|
119
|
+
exports.convertEnvelopeToMarshaledStamp = convertEnvelopeToMarshaledStamp;
|
|
120
|
+
function marshalStamp(signature, batchID, timestamp, index) {
|
|
121
|
+
if (signature.length !== 65) {
|
|
122
|
+
throw Error('invalid signature length');
|
|
123
|
+
}
|
|
124
|
+
if (batchID.length !== 32) {
|
|
125
|
+
throw Error('invalid batch ID length');
|
|
126
|
+
}
|
|
127
|
+
if (timestamp.length !== 8) {
|
|
128
|
+
throw Error('invalid timestamp length');
|
|
129
|
+
}
|
|
130
|
+
if (index.length !== 8) {
|
|
131
|
+
throw Error('invalid index length');
|
|
132
|
+
}
|
|
133
|
+
return cafe_utility_1.Binary.concatBytes(batchID, index, timestamp, signature);
|
|
134
|
+
}
|
|
135
|
+
exports.marshalStamp = marshalStamp;
|
package/dist/cjs/utils/type.js
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
-
exports.assertTransactionHash = exports.makeTagUid = exports.assertAllTagsOptions = exports.assertFileData = exports.assertData = exports.assertCashoutOptions = exports.assertTransactionOptions = exports.assertPostageBatchOptions = exports.assertPublicKey = exports.assertPssMessageHandler = exports.assertAddressPrefix = exports.assertTag = exports.isTag = exports.assertCollectionUploadOptions = exports.assertFileUploadOptions = exports.assertUploadOptions = exports.assertRequestOptions = exports.assertBatchId = exports.assertAddress = exports.addCidConversionFunction = exports.makeReferenceOrEns = exports.assertReferenceOrEns = exports.assertReference = exports.assertPositiveInteger = exports.assertNonNegativeInteger = exports.assertInteger = exports.assertBoolean = exports.assertStrictlyObject = exports.isError = exports.isStrictlyObject = exports.isObject = exports.isInteger = exports.
|
|
3
|
+
exports.assertTransactionHash = exports.makeTagUid = exports.assertAllTagsOptions = exports.assertFileData = exports.assertData = exports.assertCashoutOptions = exports.assertTransactionOptions = exports.assertPostageBatchOptions = exports.assertPublicKey = exports.assertPssMessageHandler = exports.assertAddressPrefix = exports.assertTag = exports.isTag = exports.assertCollectionUploadOptions = exports.assertFileUploadOptions = exports.assertUploadOptions = exports.assertRequestOptions = exports.assertBatchId = exports.assertAddress = exports.addCidConversionFunction = exports.makeReferenceOrEns = exports.assertReferenceOrEns = exports.assertReference = exports.assertPositiveInteger = exports.assertNonNegativeInteger = exports.assertInteger = exports.assertBoolean = exports.assertStrictlyObject = exports.isError = exports.isStrictlyObject = exports.isObject = exports.isInteger = exports.isReadable = void 0;
|
|
4
4
|
const swarm_cid_1 = require("@ethersphere/swarm-cid");
|
|
5
5
|
const stream_1 = require("stream");
|
|
6
6
|
const types_1 = require("../types");
|
|
@@ -11,10 +11,6 @@ function isReadable(obj) {
|
|
|
11
11
|
return typeof stream_1.Readable !== 'undefined' && obj instanceof stream_1.Readable;
|
|
12
12
|
}
|
|
13
13
|
exports.isReadable = isReadable;
|
|
14
|
-
function isUint8Array(obj) {
|
|
15
|
-
return obj instanceof Uint8Array;
|
|
16
|
-
}
|
|
17
|
-
exports.isUint8Array = isUint8Array;
|
|
18
14
|
function isInteger(value) {
|
|
19
15
|
return ((typeof value === 'string' && /^-?(0|[1-9][0-9]*)$/g.test(value)) ||
|
|
20
16
|
(typeof value === 'number' &&
|