@autonomys/auto-dag-data 0.8.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +18 -0
- package/README.md +167 -0
- package/dist/cid/index.d.ts +9 -0
- package/dist/cid/index.d.ts.map +1 -0
- package/dist/cid/index.js +20 -0
- package/dist/compression/index.d.ts +7 -0
- package/dist/compression/index.d.ts.map +1 -0
- package/dist/compression/index.js +106 -0
- package/dist/compression/types.d.ts +9 -0
- package/dist/compression/types.d.ts.map +1 -0
- package/dist/compression/types.js +1 -0
- package/dist/encryption/index.d.ts +8 -0
- package/dist/encryption/index.d.ts.map +1 -0
- package/dist/encryption/index.js +121 -0
- package/dist/encryption/types.d.ts +5 -0
- package/dist/encryption/types.d.ts.map +1 -0
- package/dist/encryption/types.js +1 -0
- package/dist/index.d.ts +6 -0
- package/dist/index.d.ts.map +1 -0
- package/dist/index.js +5 -0
- package/dist/ipld/blockstore/base.d.ts +9 -0
- package/dist/ipld/blockstore/base.d.ts.map +1 -0
- package/dist/ipld/blockstore/base.js +1 -0
- package/dist/ipld/blockstore/index.d.ts +3 -0
- package/dist/ipld/blockstore/index.d.ts.map +1 -0
- package/dist/ipld/blockstore/index.js +2 -0
- package/dist/ipld/blockstore/memory.d.ts +13 -0
- package/dist/ipld/blockstore/memory.d.ts.map +1 -0
- package/dist/ipld/blockstore/memory.js +57 -0
- package/dist/ipld/builders.d.ts +12 -0
- package/dist/ipld/builders.d.ts.map +1 -0
- package/dist/ipld/builders.js +13 -0
- package/dist/ipld/chunker.d.ts +30 -0
- package/dist/ipld/chunker.d.ts.map +1 -0
- package/dist/ipld/chunker.js +219 -0
- package/dist/ipld/index.d.ts +5 -0
- package/dist/ipld/index.d.ts.map +1 -0
- package/dist/ipld/index.js +4 -0
- package/dist/ipld/nodes.d.ts +15 -0
- package/dist/ipld/nodes.d.ts.map +1 -0
- package/dist/ipld/nodes.js +92 -0
- package/dist/ipld/utils.d.ts +8 -0
- package/dist/ipld/utils.d.ts.map +1 -0
- package/dist/ipld/utils.js +50 -0
- package/dist/metadata/index.d.ts +3 -0
- package/dist/metadata/index.d.ts.map +1 -0
- package/dist/metadata/index.js +2 -0
- package/dist/metadata/offchain/base.d.ts +4 -0
- package/dist/metadata/offchain/base.d.ts.map +1 -0
- package/dist/metadata/offchain/base.js +1 -0
- package/dist/metadata/offchain/file.d.ts +18 -0
- package/dist/metadata/offchain/file.d.ts.map +1 -0
- package/dist/metadata/offchain/file.js +16 -0
- package/dist/metadata/offchain/folder.d.ts +22 -0
- package/dist/metadata/offchain/folder.d.ts.map +1 -0
- package/dist/metadata/offchain/folder.js +27 -0
- package/dist/metadata/offchain/index.d.ts +4 -0
- package/dist/metadata/offchain/index.d.ts.map +1 -0
- package/dist/metadata/offchain/index.js +3 -0
- package/dist/metadata/onchain/index.d.ts +3 -0
- package/dist/metadata/onchain/index.d.ts.map +1 -0
- package/dist/metadata/onchain/index.js +2 -0
- package/dist/metadata/onchain/protobuf/OnchainMetadata.d.ts +69 -0
- package/dist/metadata/onchain/protobuf/OnchainMetadata.d.ts.map +1 -0
- package/dist/metadata/onchain/protobuf/OnchainMetadata.js +322 -0
- package/dist/metadata/onchain/utils.d.ts +4 -0
- package/dist/metadata/onchain/utils.d.ts.map +1 -0
- package/dist/metadata/onchain/utils.js +12 -0
- package/dist/src/cid/index.d.ts +9 -0
- package/dist/src/cid/index.d.ts.map +1 -0
- package/dist/src/cid/index.js +20 -0
- package/dist/src/index.d.ts +4 -0
- package/dist/src/index.d.ts.map +1 -0
- package/dist/src/index.js +3 -0
- package/dist/src/ipld/builders.d.ts +11 -0
- package/dist/src/ipld/builders.d.ts.map +1 -0
- package/dist/src/ipld/builders.js +13 -0
- package/dist/src/ipld/chunker.d.ts +22 -0
- package/dist/src/ipld/chunker.d.ts.map +1 -0
- package/dist/src/ipld/chunker.js +144 -0
- package/dist/src/ipld/index.d.ts +4 -0
- package/dist/src/ipld/index.d.ts.map +1 -0
- package/dist/src/ipld/index.js +3 -0
- package/dist/src/ipld/nodes.d.ts +15 -0
- package/dist/src/ipld/nodes.d.ts.map +1 -0
- package/dist/src/ipld/nodes.js +89 -0
- package/dist/src/ipld/utils.d.ts +5 -0
- package/dist/src/ipld/utils.d.ts.map +1 -0
- package/dist/src/ipld/utils.js +51 -0
- package/dist/src/metadata/index.d.ts +3 -0
- package/dist/src/metadata/index.d.ts.map +1 -0
- package/dist/src/metadata/index.js +2 -0
- package/dist/src/metadata/offchain/base.d.ts +4 -0
- package/dist/src/metadata/offchain/base.d.ts.map +1 -0
- package/dist/src/metadata/offchain/base.js +1 -0
- package/dist/src/metadata/offchain/file.d.ts +16 -0
- package/dist/src/metadata/offchain/file.d.ts.map +1 -0
- package/dist/src/metadata/offchain/file.js +19 -0
- package/dist/src/metadata/offchain/folder.d.ts +17 -0
- package/dist/src/metadata/offchain/folder.d.ts.map +1 -0
- package/dist/src/metadata/offchain/folder.js +10 -0
- package/dist/src/metadata/offchain/index.d.ts +4 -0
- package/dist/src/metadata/offchain/index.d.ts.map +1 -0
- package/dist/src/metadata/offchain/index.js +3 -0
- package/dist/src/metadata/onchain/index.d.ts +3 -0
- package/dist/src/metadata/onchain/index.d.ts.map +1 -0
- package/dist/src/metadata/onchain/index.js +2 -0
- package/dist/src/metadata/onchain/protobuf/OnchainMetadata.d.ts +28 -0
- package/dist/src/metadata/onchain/protobuf/OnchainMetadata.d.ts.map +1 -0
- package/dist/src/metadata/onchain/protobuf/OnchainMetadata.js +112 -0
- package/dist/src/metadata/onchain/utils.d.ts +4 -0
- package/dist/src/metadata/onchain/utils.d.ts.map +1 -0
- package/dist/src/metadata/onchain/utils.js +12 -0
- package/dist/utils/async.d.ts +3 -0
- package/dist/utils/async.d.ts.map +1 -0
- package/dist/utils/async.js +48 -0
- package/dist/utils/index.d.ts +2 -0
- package/dist/utils/index.d.ts.map +1 -0
- package/dist/utils/index.js +1 -0
- package/dist/utils/types.d.ts +2 -0
- package/dist/utils/types.d.ts.map +1 -0
- package/dist/utils/types.js +1 -0
- package/jest.config.ts +17 -0
- package/package.json +46 -0
- package/src/cid/index.ts +26 -0
- package/src/compression/index.ts +84 -0
- package/src/compression/types.ts +11 -0
- package/src/encryption/index.ts +99 -0
- package/src/encryption/types.ts +4 -0
- package/src/index.ts +5 -0
- package/src/ipld/builders.ts +40 -0
- package/src/ipld/chunker.ts +245 -0
- package/src/ipld/index.ts +4 -0
- package/src/ipld/nodes.ts +208 -0
- package/src/ipld/utils.ts +21 -0
- package/src/metadata/index.ts +2 -0
- package/src/metadata/offchain/base.ts +4 -0
- package/src/metadata/offchain/file.ts +41 -0
- package/src/metadata/offchain/folder.ts +54 -0
- package/src/metadata/offchain/index.ts +3 -0
- package/src/metadata/onchain/index.ts +2 -0
- package/src/metadata/onchain/protobuf/OnchainMetadata.proto +46 -0
- package/src/metadata/onchain/protobuf/OnchainMetadata.ts +397 -0
- package/src/metadata/onchain/utils.ts +15 -0
- package/src/utils/async.ts +20 -0
- package/src/utils/index.ts +1 -0
- package/src/utils/types.ts +1 -0
- package/tests/chunker.spec.ts +294 -0
- package/tests/cid.spec.ts +20 -0
- package/tests/compression.spec.ts +58 -0
- package/tests/encryption.spec.ts +67 -0
- package/tests/nodes.spec.ts +74 -0
- package/tsconfig.json +14 -0
package/LICENSE
ADDED
|
@@ -0,0 +1,18 @@
|
|
|
1
|
+
MIT License
|
|
2
|
+
|
|
3
|
+
Copyright (c) 2024 Autonomys Network (autonomys.xyz)
|
|
4
|
+
|
|
5
|
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
6
|
+
of this software and associated documentation files (the "Software"), to deal
|
|
7
|
+
in the Software without restriction, including without limitation the rights
|
|
8
|
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
9
|
+
copies of the Software, and to permit persons to whom the Software is
|
|
10
|
+
furnished to do so, subject to the following conditions:
|
|
11
|
+
|
|
12
|
+
1. **Attribution**: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
|
|
13
|
+
|
|
14
|
+
2. **No Warranty**: THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
|
|
15
|
+
|
|
16
|
+
3. **Limitation of Liability**: IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
|
17
|
+
|
|
18
|
+
---
|
package/README.md
ADDED
|
@@ -0,0 +1,167 @@
|
|
|
1
|
+
# Auto DAG Data
|
|
2
|
+
|
|
3
|
+

|
|
4
|
+
|
|
5
|
+
[](https://github.com/autonomys/auto-sdk/tags)
|
|
6
|
+
[](https://github.com/autonomys/auto-sdk/actions/workflows/build.yaml)
|
|
7
|
+
[](https://badge.fury.io/js/@autonomys/auto-dag-data)
|
|
8
|
+
|
|
9
|
+
## Overview
|
|
10
|
+
|
|
11
|
+
The **Autonomys Auto DAG Data SDK** (`@autonomys/auto-dag-data`) provides utilities for creating and managing IPLD DAGs (InterPlanetary Linked Data Directed Acyclic Graphs) for files and folders. It facilitates chunking large files, handling metadata, and creating folder structures suitable for distributed storage systems like IPFS.
|
|
12
|
+
|
|
13
|
+
## Features
|
|
14
|
+
|
|
15
|
+
- **File Chunking and DAG Creation**: Efficiently split large files into smaller chunks and create IPLD DAGs.
|
|
16
|
+
- **Folder Structure Creation**: Generate IPLD DAGs for directory structures.
|
|
17
|
+
- **Metadata Handling**: Add and manage metadata for files and folders.
|
|
18
|
+
- **CID Management**: Utilities for working with Content Identifiers (CIDs).
|
|
19
|
+
- **TypeScript Support**: Fully typed for enhanced developer experience.
|
|
20
|
+
|
|
21
|
+
## Installation
|
|
22
|
+
|
|
23
|
+
You can install Auto-DAG-Data using npm or yarn:
|
|
24
|
+
|
|
25
|
+
```bash
|
|
26
|
+
npm install @autonomys/auto-dag-data
|
|
27
|
+
```
|
|
28
|
+
|
|
29
|
+
or
|
|
30
|
+
|
|
31
|
+
```bash
|
|
32
|
+
yarn add @autonomys/auto-dag-data
|
|
33
|
+
```
|
|
34
|
+
|
|
35
|
+
## Usage
|
|
36
|
+
|
|
37
|
+
### Creating an IPLD DAG from a File
|
|
38
|
+
|
|
39
|
+
To create an IPLD DAG from a file, you can use the `createFileIPLDDag` function:
|
|
40
|
+
|
|
41
|
+
```typescript
|
|
42
|
+
import { createFileIPLDDag } from '@autonomys/auto-dag-data'
|
|
43
|
+
import fs from 'fs'
|
|
44
|
+
|
|
45
|
+
const fileBuffer = fs.readFileSync('path/to/your/file.txt')
|
|
46
|
+
|
|
47
|
+
const dag = createFileIPLDDag(fileBuffer, 'file.txt')
|
|
48
|
+
```
|
|
49
|
+
|
|
50
|
+
### Creating an IPLD DAG from a Folder
|
|
51
|
+
|
|
52
|
+
To create an IPLD DAG from a folder, you can use the `createFolderIPLDDag` function:
|
|
53
|
+
|
|
54
|
+
```typescript
|
|
55
|
+
import { createFolderIPLDDag } from '@autonomys/auto-dag-data'
|
|
56
|
+
import { CID } from 'multiformats'
|
|
57
|
+
|
|
58
|
+
// Example child CIDs and folder information
|
|
59
|
+
const childCIDs: CID[] = [
|
|
60
|
+
/* array of CIDs */
|
|
61
|
+
]
|
|
62
|
+
const folderName = 'my-folder'
|
|
63
|
+
const folderSize = 1024 // size in bytes
|
|
64
|
+
|
|
65
|
+
const folderDag = createFolderIPLDDag(childCIDs, folderName, folderSize)
|
|
66
|
+
```
|
|
67
|
+
|
|
68
|
+
### Working with CIDs
|
|
69
|
+
|
|
70
|
+
You can use functions from the `cid` module to work with CIDs:
|
|
71
|
+
|
|
72
|
+
```typescript
|
|
73
|
+
import { cidOfNode, cidToString, stringToCid } from '@autonomys/auto-dag-data'
|
|
74
|
+
|
|
75
|
+
// Create a CID from a node
|
|
76
|
+
const cid = cidOfNode(dag.head)
|
|
77
|
+
|
|
78
|
+
// Convert the CID to a string
|
|
79
|
+
const cidString = cidToString(cid)
|
|
80
|
+
|
|
81
|
+
// Parse a string back into a CID
|
|
82
|
+
const parsedCID = stringToCid(cidString)
|
|
83
|
+
```
|
|
84
|
+
|
|
85
|
+
### Encoding and Decoding Nodes
|
|
86
|
+
|
|
87
|
+
You can encode and decode IPLD nodes:
|
|
88
|
+
|
|
89
|
+
```typescript
|
|
90
|
+
import { encodeNode, decodeNode } from '@autonomys/auto-dag-data'
|
|
91
|
+
|
|
92
|
+
// Encode a node
|
|
93
|
+
const encodedNode = encodeNode(dag.head)
|
|
94
|
+
|
|
95
|
+
// Decode a node
|
|
96
|
+
const decodedNode = decodeNode(encodedNode)
|
|
97
|
+
```
|
|
98
|
+
|
|
99
|
+
### Handling Metadata
|
|
100
|
+
|
|
101
|
+
To add metadata to a node, you can create a metadata node:
|
|
102
|
+
|
|
103
|
+
```typescript
|
|
104
|
+
import { createMetadataNode } from '@autonomys/auto-dag-data'
|
|
105
|
+
|
|
106
|
+
const metadata = {
|
|
107
|
+
name: 'My File',
|
|
108
|
+
description: 'This is a sample file',
|
|
109
|
+
// ... other metadata fields
|
|
110
|
+
}
|
|
111
|
+
|
|
112
|
+
const metadataNode = createMetadataNode(metadata)
|
|
113
|
+
```
|
|
114
|
+
|
|
115
|
+
### Example: Creating a File DAG and Converting to CID
|
|
116
|
+
|
|
117
|
+
```typescript
|
|
118
|
+
import { createFileIPLDDag, cidOfNode, cidToString } from '@autonomys/auto-dag-data'
|
|
119
|
+
import fs from 'fs'
|
|
120
|
+
|
|
121
|
+
const fileBuffer = fs.readFileSync('path/to/your/file.txt')
|
|
122
|
+
|
|
123
|
+
const dag = createFileIPLDDag(fileBuffer, 'file.txt')
|
|
124
|
+
|
|
125
|
+
const cid = cidOfNode(dag.headCID)
|
|
126
|
+
const cidString = cidToString(cid)
|
|
127
|
+
|
|
128
|
+
console.log(`CID of the file DAG: ${cidString}`)
|
|
129
|
+
```
|
|
130
|
+
|
|
131
|
+
### Example: Converting Metadata To DAG
|
|
132
|
+
|
|
133
|
+
```typescript
|
|
134
|
+
import {
|
|
135
|
+
createMetadataIPLDDag,
|
|
136
|
+
cidOfNode,
|
|
137
|
+
cidToString,
|
|
138
|
+
type OffchainMetadata,
|
|
139
|
+
} from '@autonomys/auto-dag-data'
|
|
140
|
+
import fs from 'fs'
|
|
141
|
+
|
|
142
|
+
const metadata: OffchainMetadata = fs.readFileSync('path/to/your/metadata.json')
|
|
143
|
+
|
|
144
|
+
const dag = createMetadataIPLDDag(metadata)
|
|
145
|
+
|
|
146
|
+
const cid = cidOfNode(dag.headCID)
|
|
147
|
+
const cidString = cidToString(cid)
|
|
148
|
+
|
|
149
|
+
console.log(`CID of the metadata DAG: ${cidString}`)
|
|
150
|
+
```
|
|
151
|
+
|
|
152
|
+
## License
|
|
153
|
+
|
|
154
|
+
This project is licensed under the MIT License. See the [LICENSE](LICENSE) file for details.
|
|
155
|
+
|
|
156
|
+
## Additional Resources
|
|
157
|
+
|
|
158
|
+
- **Autonomys Academy**: Learn more at [Autonomys Academy](https://academy.autonomys.xyz).
|
|
159
|
+
- **Auto-Utils Package**: Utility functions used alongside `auto-dag-data` can be found in [`@autonomys/auto-utils`](../Auto-Utils/README.md).
|
|
160
|
+
|
|
161
|
+
## Contact
|
|
162
|
+
|
|
163
|
+
If you have any questions or need support, feel free to reach out:
|
|
164
|
+
|
|
165
|
+
- **GitHub Issues**: [GitHub Issues Page](https://github.com/autonomys/auto-sdk/issues)
|
|
166
|
+
|
|
167
|
+
We appreciate your feedback and contributions!
|
|
@@ -0,0 +1,9 @@
|
|
|
1
|
+
import { CID } from 'multiformats/cid';
|
|
2
|
+
import { PBNode } from '../ipld/utils.js';
|
|
3
|
+
export declare const BLAKE3_CODE = 31;
|
|
4
|
+
export declare const cidOfNode: (node: PBNode) => CID<unknown, 85, 31, 1>;
|
|
5
|
+
export declare const cidToString: (cid: CID) => string;
|
|
6
|
+
export declare const stringToCid: (str: string) => CID<unknown, number, number, import("multiformats/cid").Version>;
|
|
7
|
+
export declare const cidFromBlakeHash: (hash: Buffer) => CID<unknown, 85, 31, 1>;
|
|
8
|
+
export declare const blake3HashFromCid: (cid: CID) => Uint8Array;
|
|
9
|
+
//# sourceMappingURL=index.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/cid/index.ts"],"names":[],"mappings":"AAEA,OAAO,EAAE,GAAG,EAAE,MAAM,kBAAkB,CAAA;AAGtC,OAAO,EAAc,MAAM,EAAE,MAAM,kBAAkB,CAAA;AAErD,eAAO,MAAM,WAAW,KAAO,CAAA;AAE/B,eAAO,MAAM,SAAS,SAAU,MAAM,4BAErC,CAAA;AAED,eAAO,MAAM,WAAW,QAAS,GAAG,WAEnC,CAAA;AAED,eAAO,MAAM,WAAW,QAAS,MAAM,qEAEtC,CAAA;AAED,eAAO,MAAM,gBAAgB,SAAU,MAAM,4BAE5C,CAAA;AAED,eAAO,MAAM,iBAAiB,QAAS,GAAG,eAAyB,CAAA"}
|
|
@@ -0,0 +1,20 @@
|
|
|
1
|
+
import { hash } from 'blake3';
|
|
2
|
+
import * as base32 from 'multiformats/bases/base32';
|
|
3
|
+
import { CID } from 'multiformats/cid';
|
|
4
|
+
import * as raw from 'multiformats/codecs/raw';
|
|
5
|
+
import { create } from 'multiformats/hashes/digest';
|
|
6
|
+
import { encodeNode } from '../ipld/utils.js';
|
|
7
|
+
export const BLAKE3_CODE = 0x1f;
|
|
8
|
+
export const cidOfNode = (node) => {
|
|
9
|
+
return cidFromBlakeHash(hash(encodeNode(node)));
|
|
10
|
+
};
|
|
11
|
+
export const cidToString = (cid) => {
|
|
12
|
+
return cid.toString(base32.base32);
|
|
13
|
+
};
|
|
14
|
+
export const stringToCid = (str) => {
|
|
15
|
+
return CID.parse(str, base32.base32);
|
|
16
|
+
};
|
|
17
|
+
export const cidFromBlakeHash = (hash) => {
|
|
18
|
+
return CID.create(1, raw.code, create(BLAKE3_CODE, hash));
|
|
19
|
+
};
|
|
20
|
+
export const blake3HashFromCid = (cid) => cid.multihash.digest;
|
|
@@ -0,0 +1,7 @@
|
|
|
1
|
+
import type { AwaitIterable } from 'interface-store';
|
|
2
|
+
import type { PickPartial } from '../utils/types.js';
|
|
3
|
+
import { CompressionOptions } from './types.js';
|
|
4
|
+
export declare const COMPRESSION_CHUNK_SIZE: number;
|
|
5
|
+
export declare function compressFile(file: AwaitIterable<Buffer>, { level, chunkSize, algorithm, }: PickPartial<CompressionOptions, 'algorithm'>): AsyncIterable<Buffer>;
|
|
6
|
+
export declare function decompressFile(compressedFile: AwaitIterable<Buffer>, { chunkSize, algorithm, level, }: PickPartial<CompressionOptions, 'algorithm'>): AsyncIterable<Buffer>;
|
|
7
|
+
//# sourceMappingURL=index.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/compression/index.ts"],"names":[],"mappings":"AACA,OAAO,KAAK,EAAE,aAAa,EAAE,MAAM,iBAAiB,CAAA;AAGpD,OAAO,KAAK,EAAE,WAAW,EAAE,MAAM,mBAAmB,CAAA;AACpD,OAAO,EAAE,kBAAkB,EAAE,MAAM,YAAY,CAAA;AAE/C,eAAO,MAAM,sBAAsB,QAAc,CAAA;AAEjD,wBAAuB,YAAY,CACjC,IAAI,EAAE,aAAa,CAAC,MAAM,CAAC,EAC3B,EACE,KAAS,EACT,SAAkC,EAClC,SAAS,GACV,EAAE,WAAW,CAAC,kBAAkB,EAAE,WAAW,CAAC,GAC9C,aAAa,CAAC,MAAM,CAAC,CA6BvB;AAED,wBAAuB,cAAc,CACnC,cAAc,EAAE,aAAa,CAAC,MAAM,CAAC,EACrC,EACE,SAAkC,EAClC,SAAqC,EACrC,KAAS,GACV,EAAE,WAAW,CAAC,kBAAkB,EAAE,WAAW,CAAC,GAC9C,aAAa,CAAC,MAAM,CAAC,CA6BvB"}
|
|
@@ -0,0 +1,106 @@
|
|
|
1
|
+
var __asyncValues = (this && this.__asyncValues) || function (o) {
|
|
2
|
+
if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined.");
|
|
3
|
+
var m = o[Symbol.asyncIterator], i;
|
|
4
|
+
return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i);
|
|
5
|
+
function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; }
|
|
6
|
+
function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); }
|
|
7
|
+
};
|
|
8
|
+
var __await = (this && this.__await) || function (v) { return this instanceof __await ? (this.v = v, this) : new __await(v); }
|
|
9
|
+
var __asyncGenerator = (this && this.__asyncGenerator) || function (thisArg, _arguments, generator) {
|
|
10
|
+
if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined.");
|
|
11
|
+
var g = generator.apply(thisArg, _arguments || []), i, q = [];
|
|
12
|
+
return i = Object.create((typeof AsyncIterator === "function" ? AsyncIterator : Object).prototype), verb("next"), verb("throw"), verb("return", awaitReturn), i[Symbol.asyncIterator] = function () { return this; }, i;
|
|
13
|
+
function awaitReturn(f) { return function (v) { return Promise.resolve(v).then(f, reject); }; }
|
|
14
|
+
function verb(n, f) { if (g[n]) { i[n] = function (v) { return new Promise(function (a, b) { q.push([n, v, a, b]) > 1 || resume(n, v); }); }; if (f) i[n] = f(i[n]); } }
|
|
15
|
+
function resume(n, v) { try { step(g[n](v)); } catch (e) { settle(q[0][3], e); } }
|
|
16
|
+
function step(r) { r.value instanceof __await ? Promise.resolve(r.value.v).then(fulfill, reject) : settle(q[0][2], r); }
|
|
17
|
+
function fulfill(value) { resume("next", value); }
|
|
18
|
+
function reject(value) { resume("throw", value); }
|
|
19
|
+
function settle(f, v) { if (f(v), q.shift(), q.length) resume(q[0][0], q[0][1]); }
|
|
20
|
+
};
|
|
21
|
+
import { Unzlib, Zlib } from 'fflate';
|
|
22
|
+
import { CompressionAlgorithm } from '../metadata/index.js';
|
|
23
|
+
import { asyncByChunk } from '../utils/async.js';
|
|
24
|
+
export const COMPRESSION_CHUNK_SIZE = 1024 * 1024;
|
|
25
|
+
export function compressFile(file_1, _a) {
|
|
26
|
+
return __asyncGenerator(this, arguments, function* compressFile_1(file, { level = 9, chunkSize = COMPRESSION_CHUNK_SIZE, algorithm, }) {
|
|
27
|
+
var _b, e_1, _c, _d;
|
|
28
|
+
if (algorithm !== CompressionAlgorithm.ZLIB) {
|
|
29
|
+
throw new Error('Unsupported compression algorithm');
|
|
30
|
+
}
|
|
31
|
+
if (level < 0 || level > 9) {
|
|
32
|
+
throw new Error('Invalid compression level');
|
|
33
|
+
}
|
|
34
|
+
if (chunkSize <= 0) {
|
|
35
|
+
throw new Error('Invalid chunk size');
|
|
36
|
+
}
|
|
37
|
+
const zlib = new Zlib({ level });
|
|
38
|
+
const compressedChunks = [];
|
|
39
|
+
zlib.ondata = (chunk) => {
|
|
40
|
+
compressedChunks.push(Buffer.from(chunk));
|
|
41
|
+
};
|
|
42
|
+
try {
|
|
43
|
+
for (var _e = true, _f = __asyncValues(asyncByChunk(file, chunkSize)), _g; _g = yield __await(_f.next()), _b = _g.done, !_b; _e = true) {
|
|
44
|
+
_d = _g.value;
|
|
45
|
+
_e = false;
|
|
46
|
+
const chunk = _d;
|
|
47
|
+
zlib.push(chunk, false);
|
|
48
|
+
while (compressedChunks.length > 0) {
|
|
49
|
+
yield yield __await(compressedChunks.shift());
|
|
50
|
+
}
|
|
51
|
+
}
|
|
52
|
+
}
|
|
53
|
+
catch (e_1_1) { e_1 = { error: e_1_1 }; }
|
|
54
|
+
finally {
|
|
55
|
+
try {
|
|
56
|
+
if (!_e && !_b && (_c = _f.return)) yield __await(_c.call(_f));
|
|
57
|
+
}
|
|
58
|
+
finally { if (e_1) throw e_1.error; }
|
|
59
|
+
}
|
|
60
|
+
zlib.push(new Uint8Array(), true);
|
|
61
|
+
while (compressedChunks.length > 0) {
|
|
62
|
+
yield yield __await(compressedChunks.shift());
|
|
63
|
+
}
|
|
64
|
+
});
|
|
65
|
+
}
|
|
66
|
+
export function decompressFile(compressedFile_1, _a) {
|
|
67
|
+
return __asyncGenerator(this, arguments, function* decompressFile_1(compressedFile, { chunkSize = COMPRESSION_CHUNK_SIZE, algorithm = CompressionAlgorithm.ZLIB, level = 9, }) {
|
|
68
|
+
var _b, e_2, _c, _d;
|
|
69
|
+
if (algorithm !== CompressionAlgorithm.ZLIB) {
|
|
70
|
+
throw new Error('Unsupported compression algorithm');
|
|
71
|
+
}
|
|
72
|
+
if (chunkSize <= 0) {
|
|
73
|
+
throw new Error('Invalid chunk size');
|
|
74
|
+
}
|
|
75
|
+
if (level < 0 || level > 9) {
|
|
76
|
+
throw new Error('Invalid compression level');
|
|
77
|
+
}
|
|
78
|
+
const unzlib = new Unzlib();
|
|
79
|
+
const decompressedChunks = [];
|
|
80
|
+
unzlib.ondata = (chunk) => {
|
|
81
|
+
decompressedChunks.push(Buffer.from(chunk));
|
|
82
|
+
};
|
|
83
|
+
try {
|
|
84
|
+
for (var _e = true, _f = __asyncValues(asyncByChunk(compressedFile, chunkSize)), _g; _g = yield __await(_f.next()), _b = _g.done, !_b; _e = true) {
|
|
85
|
+
_d = _g.value;
|
|
86
|
+
_e = false;
|
|
87
|
+
const chunk = _d;
|
|
88
|
+
unzlib.push(chunk, false);
|
|
89
|
+
while (decompressedChunks.length > 0) {
|
|
90
|
+
yield yield __await(decompressedChunks.shift());
|
|
91
|
+
}
|
|
92
|
+
}
|
|
93
|
+
}
|
|
94
|
+
catch (e_2_1) { e_2 = { error: e_2_1 }; }
|
|
95
|
+
finally {
|
|
96
|
+
try {
|
|
97
|
+
if (!_e && !_b && (_c = _f.return)) yield __await(_c.call(_f));
|
|
98
|
+
}
|
|
99
|
+
finally { if (e_2) throw e_2.error; }
|
|
100
|
+
}
|
|
101
|
+
unzlib.push(new Uint8Array(), true);
|
|
102
|
+
while (decompressedChunks.length > 0) {
|
|
103
|
+
yield yield __await(decompressedChunks.shift());
|
|
104
|
+
}
|
|
105
|
+
});
|
|
106
|
+
}
|
|
@@ -0,0 +1,9 @@
|
|
|
1
|
+
import { CompressionAlgorithm } from '../metadata/index.js';
|
|
2
|
+
export type CompressionLevel = 0 | 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9;
|
|
3
|
+
export type ZLibOptions = {
|
|
4
|
+
algorithm: CompressionAlgorithm.ZLIB;
|
|
5
|
+
level: CompressionLevel;
|
|
6
|
+
chunkSize: number;
|
|
7
|
+
};
|
|
8
|
+
export type CompressionOptions = ZLibOptions;
|
|
9
|
+
//# sourceMappingURL=types.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"types.d.ts","sourceRoot":"","sources":["../../src/compression/types.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,oBAAoB,EAAE,MAAM,sBAAsB,CAAA;AAE3D,MAAM,MAAM,gBAAgB,GAAG,CAAC,GAAG,CAAC,GAAG,CAAC,GAAG,CAAC,GAAG,CAAC,GAAG,CAAC,GAAG,CAAC,GAAG,CAAC,GAAG,CAAC,GAAG,CAAC,CAAA;AAEpE,MAAM,MAAM,WAAW,GAAG;IACxB,SAAS,EAAE,oBAAoB,CAAC,IAAI,CAAA;IACpC,KAAK,EAAE,gBAAgB,CAAA;IACvB,SAAS,EAAE,MAAM,CAAA;CAClB,CAAA;AAED,MAAM,MAAM,kBAAkB,GAAG,WAAW,CAAA"}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export {};
|
|
@@ -0,0 +1,8 @@
|
|
|
1
|
+
import { EncryptionOptions } from '../metadata/index.js';
|
|
2
|
+
import type { PickPartial } from '../utils/types.js';
|
|
3
|
+
import { PasswordGenerationOptions } from './types.js';
|
|
4
|
+
export declare const ENCRYPTING_CHUNK_SIZE: number;
|
|
5
|
+
export declare const getKeyFromPassword: ({ password, salt }: PasswordGenerationOptions) => Promise<CryptoKey>;
|
|
6
|
+
export declare const encryptFile: (file: AsyncIterable<Buffer>, password: string, { chunkSize, algorithm }: PickPartial<EncryptionOptions, "algorithm">) => AsyncIterable<Buffer>;
|
|
7
|
+
export declare const decryptFile: (file: AsyncIterable<Buffer>, password: string, { chunkSize, algorithm }: PickPartial<EncryptionOptions, "algorithm">) => AsyncIterable<Buffer>;
|
|
8
|
+
//# sourceMappingURL=index.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/encryption/index.ts"],"names":[],"mappings":"AAEA,OAAO,EAAuB,iBAAiB,EAAE,MAAM,sBAAsB,CAAA;AAE7E,OAAO,KAAK,EAAE,WAAW,EAAE,MAAM,mBAAmB,CAAA;AACpD,OAAO,EAAE,yBAAyB,EAAE,MAAM,YAAY,CAAA;AAItD,eAAO,MAAM,qBAAqB,QAAc,CAAA;AAMhD,eAAO,MAAM,kBAAkB,uBAA8B,yBAAyB,uBAyBrF,CAAA;AAED,eAAO,MAAM,WAAW,SAChB,aAAa,CAAC,MAAM,CAAC,YACjB,MAAM,4BACkC,WAAW,CAAC,iBAAiB,EAAE,WAAW,CAAC,KAC5F,aAAa,CAAC,MAAM,CAetB,CAAA;AAED,eAAO,MAAM,WAAW,SAChB,aAAa,CAAC,MAAM,CAAC,YACjB,MAAM,4BACiC,WAAW,CAAC,iBAAiB,EAAE,WAAW,CAAC,KAC3F,aAAa,CAAC,MAAM,CA+BtB,CAAA"}
|
|
@@ -0,0 +1,121 @@
|
|
|
1
|
+
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
|
2
|
+
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
|
3
|
+
return new (P || (P = Promise))(function (resolve, reject) {
|
|
4
|
+
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
|
5
|
+
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
|
6
|
+
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
|
7
|
+
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
|
8
|
+
});
|
|
9
|
+
};
|
|
10
|
+
var __await = (this && this.__await) || function (v) { return this instanceof __await ? (this.v = v, this) : new __await(v); }
|
|
11
|
+
var __asyncValues = (this && this.__asyncValues) || function (o) {
|
|
12
|
+
if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined.");
|
|
13
|
+
var m = o[Symbol.asyncIterator], i;
|
|
14
|
+
return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i);
|
|
15
|
+
function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; }
|
|
16
|
+
function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); }
|
|
17
|
+
};
|
|
18
|
+
var __asyncGenerator = (this && this.__asyncGenerator) || function (thisArg, _arguments, generator) {
|
|
19
|
+
if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined.");
|
|
20
|
+
var g = generator.apply(thisArg, _arguments || []), i, q = [];
|
|
21
|
+
return i = Object.create((typeof AsyncIterator === "function" ? AsyncIterator : Object).prototype), verb("next"), verb("throw"), verb("return", awaitReturn), i[Symbol.asyncIterator] = function () { return this; }, i;
|
|
22
|
+
function awaitReturn(f) { return function (v) { return Promise.resolve(v).then(f, reject); }; }
|
|
23
|
+
function verb(n, f) { if (g[n]) { i[n] = function (v) { return new Promise(function (a, b) { q.push([n, v, a, b]) > 1 || resume(n, v); }); }; if (f) i[n] = f(i[n]); } }
|
|
24
|
+
function resume(n, v) { try { step(g[n](v)); } catch (e) { settle(q[0][3], e); } }
|
|
25
|
+
function step(r) { r.value instanceof __await ? Promise.resolve(r.value.v).then(fulfill, reject) : settle(q[0][2], r); }
|
|
26
|
+
function fulfill(value) { resume("next", value); }
|
|
27
|
+
function reject(value) { resume("throw", value); }
|
|
28
|
+
function settle(f, v) { if (f(v), q.shift(), q.length) resume(q[0][0], q[0][1]); }
|
|
29
|
+
};
|
|
30
|
+
import { Crypto } from '@peculiar/webcrypto';
|
|
31
|
+
import { randomBytes } from 'crypto';
|
|
32
|
+
import { EncryptionAlgorithm } from '../metadata/index.js';
|
|
33
|
+
import { asyncByChunk } from '../utils/async.js';
|
|
34
|
+
const crypto = new Crypto();
|
|
35
|
+
export const ENCRYPTING_CHUNK_SIZE = 1024 * 1024;
|
|
36
|
+
const IV_SIZE = 16;
|
|
37
|
+
const TAG_SIZE = 16;
|
|
38
|
+
const ENCRYPTED_CHUNK_SIZE = ENCRYPTING_CHUNK_SIZE + IV_SIZE + TAG_SIZE;
|
|
39
|
+
const SALT_SIZE = 32;
|
|
40
|
+
export const getKeyFromPassword = (_a) => __awaiter(void 0, [_a], void 0, function* ({ password, salt }) {
|
|
41
|
+
const encoder = new TextEncoder();
|
|
42
|
+
const saltHash = typeof salt === 'string' ? yield crypto.subtle.digest('SHA-256', encoder.encode(salt)) : salt;
|
|
43
|
+
const keyMaterial = yield crypto.subtle.importKey('raw', encoder.encode(password), 'PBKDF2', false, ['deriveBits', 'deriveKey']);
|
|
44
|
+
return crypto.subtle.deriveKey({
|
|
45
|
+
name: 'PBKDF2',
|
|
46
|
+
salt: saltHash,
|
|
47
|
+
iterations: 100000,
|
|
48
|
+
hash: 'SHA-256',
|
|
49
|
+
}, keyMaterial, { name: 'AES-GCM', length: 256 }, false, ['encrypt', 'decrypt']);
|
|
50
|
+
});
|
|
51
|
+
export const encryptFile = function (file_1, password_1, _a) {
|
|
52
|
+
return __asyncGenerator(this, arguments, function* (file, password, { chunkSize = ENCRYPTING_CHUNK_SIZE, algorithm }) {
|
|
53
|
+
var _b, e_1, _c, _d;
|
|
54
|
+
if (algorithm !== EncryptionAlgorithm.AES_256_GCM) {
|
|
55
|
+
throw new Error('Unsupported encryption algorithm');
|
|
56
|
+
}
|
|
57
|
+
const salt = randomBytes(SALT_SIZE);
|
|
58
|
+
const key = yield __await(getKeyFromPassword({ password, salt }));
|
|
59
|
+
yield yield __await(salt);
|
|
60
|
+
try {
|
|
61
|
+
for (var _e = true, _f = __asyncValues(asyncByChunk(file, chunkSize)), _g; _g = yield __await(_f.next()), _b = _g.done, !_b; _e = true) {
|
|
62
|
+
_d = _g.value;
|
|
63
|
+
_e = false;
|
|
64
|
+
const chunk = _d;
|
|
65
|
+
const iv = crypto.getRandomValues(new Uint8Array(IV_SIZE));
|
|
66
|
+
const encrypted = yield __await(crypto.subtle.encrypt({ name: 'AES-GCM', iv }, key, chunk));
|
|
67
|
+
yield yield __await(Buffer.concat([Buffer.from(iv), Buffer.from(encrypted)]));
|
|
68
|
+
}
|
|
69
|
+
}
|
|
70
|
+
catch (e_1_1) { e_1 = { error: e_1_1 }; }
|
|
71
|
+
finally {
|
|
72
|
+
try {
|
|
73
|
+
if (!_e && !_b && (_c = _f.return)) yield __await(_c.call(_f));
|
|
74
|
+
}
|
|
75
|
+
finally { if (e_1) throw e_1.error; }
|
|
76
|
+
}
|
|
77
|
+
});
|
|
78
|
+
};
|
|
79
|
+
export const decryptFile = function (file_1, password_1, _a) {
|
|
80
|
+
return __asyncGenerator(this, arguments, function* (file, password, { chunkSize = ENCRYPTED_CHUNK_SIZE, algorithm }) {
|
|
81
|
+
var _b, e_2, _c, _d;
|
|
82
|
+
if (algorithm !== EncryptionAlgorithm.AES_256_GCM) {
|
|
83
|
+
throw new Error('Unsupported encryption algorithm');
|
|
84
|
+
}
|
|
85
|
+
let key = undefined;
|
|
86
|
+
let chunks = Buffer.alloc(0);
|
|
87
|
+
try {
|
|
88
|
+
for (var _e = true, file_2 = __asyncValues(file), file_2_1; file_2_1 = yield __await(file_2.next()), _b = file_2_1.done, !_b; _e = true) {
|
|
89
|
+
_d = file_2_1.value;
|
|
90
|
+
_e = false;
|
|
91
|
+
const chunk = _d;
|
|
92
|
+
chunks = Buffer.concat([chunks, chunk]);
|
|
93
|
+
if (chunks.length >= SALT_SIZE && !key) {
|
|
94
|
+
const salt = chunks.subarray(0, 32);
|
|
95
|
+
key = yield __await(getKeyFromPassword({ password, salt }));
|
|
96
|
+
chunks = chunks.subarray(SALT_SIZE);
|
|
97
|
+
}
|
|
98
|
+
while (key && chunks.length >= chunkSize) {
|
|
99
|
+
const iv = chunks.subarray(0, IV_SIZE);
|
|
100
|
+
const encryptedChunk = chunk.subarray(IV_SIZE, chunkSize);
|
|
101
|
+
const decrypted = yield __await(crypto.subtle.decrypt({ name: 'AES-GCM', iv }, key, encryptedChunk));
|
|
102
|
+
chunks = chunks.subarray(chunkSize);
|
|
103
|
+
yield yield __await(Buffer.from(decrypted));
|
|
104
|
+
}
|
|
105
|
+
}
|
|
106
|
+
}
|
|
107
|
+
catch (e_2_1) { e_2 = { error: e_2_1 }; }
|
|
108
|
+
finally {
|
|
109
|
+
try {
|
|
110
|
+
if (!_e && !_b && (_c = file_2.return)) yield __await(_c.call(file_2));
|
|
111
|
+
}
|
|
112
|
+
finally { if (e_2) throw e_2.error; }
|
|
113
|
+
}
|
|
114
|
+
if (key && chunks.length > 0) {
|
|
115
|
+
const iv = chunks.subarray(0, IV_SIZE);
|
|
116
|
+
const encryptedChunk = chunks.subarray(IV_SIZE, chunkSize);
|
|
117
|
+
const decrypted = yield __await(crypto.subtle.decrypt({ name: 'AES-GCM', iv }, key, encryptedChunk));
|
|
118
|
+
yield yield __await(Buffer.from(decrypted));
|
|
119
|
+
}
|
|
120
|
+
});
|
|
121
|
+
};
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"types.d.ts","sourceRoot":"","sources":["../../src/encryption/types.ts"],"names":[],"mappings":"AAAA,MAAM,MAAM,yBAAyB,GAAG;IACtC,QAAQ,EAAE,MAAM,CAAA;IAChB,IAAI,EAAE,MAAM,GAAG,UAAU,CAAA;CAC1B,CAAA"}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export {};
|
package/dist/index.d.ts
ADDED
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AAAA,cAAc,gBAAgB,CAAA;AAC9B,cAAc,wBAAwB,CAAA;AACtC,cAAc,uBAAuB,CAAA;AACrC,cAAc,iBAAiB,CAAA;AAC/B,cAAc,qBAAqB,CAAA"}
|
package/dist/index.js
ADDED
|
@@ -0,0 +1,9 @@
|
|
|
1
|
+
import { BaseBlockstore } from 'blockstore-core';
|
|
2
|
+
import type { Pair } from 'interface-blockstore';
|
|
3
|
+
import { AbortOptions, AwaitIterable } from 'interface-store';
|
|
4
|
+
import { MetadataType } from '../../metadata/index.js';
|
|
5
|
+
export interface IPLDBlockstore extends BaseBlockstore {
|
|
6
|
+
getFilteredMany(nodeType: MetadataType, options?: AbortOptions): AwaitIterable<Pair['cid']>;
|
|
7
|
+
getSize(cid: Pair['cid']): Promise<number>;
|
|
8
|
+
}
|
|
9
|
+
//# sourceMappingURL=base.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"base.d.ts","sourceRoot":"","sources":["../../../src/ipld/blockstore/base.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,cAAc,EAAE,MAAM,iBAAiB,CAAA;AAChD,OAAO,KAAK,EAAE,IAAI,EAAE,MAAM,sBAAsB,CAAA;AAChD,OAAO,EAAE,YAAY,EAAE,aAAa,EAAE,MAAM,iBAAiB,CAAA;AAC7D,OAAO,EAAE,YAAY,EAAE,MAAM,yBAAyB,CAAA;AAEtD,MAAM,WAAW,cAAe,SAAQ,cAAc;IACpD,eAAe,CAAC,QAAQ,EAAE,YAAY,EAAE,OAAO,CAAC,EAAE,YAAY,GAAG,aAAa,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,CAAA;IAC3F,OAAO,CAAC,GAAG,EAAE,IAAI,CAAC,KAAK,CAAC,GAAG,OAAO,CAAC,MAAM,CAAC,CAAA;CAC3C"}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export {};
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../../src/ipld/blockstore/index.ts"],"names":[],"mappings":"AAAA,cAAc,WAAW,CAAA;AACzB,cAAc,aAAa,CAAA"}
|
|
@@ -0,0 +1,13 @@
|
|
|
1
|
+
import { MemoryBlockstore } from 'blockstore-core';
|
|
2
|
+
import { Pair } from 'interface-blockstore';
|
|
3
|
+
import { AbortOptions, AwaitIterable } from 'interface-store';
|
|
4
|
+
import { CID, Version } from 'multiformats';
|
|
5
|
+
import { MetadataType } from '../../metadata/index.js';
|
|
6
|
+
import { IPLDBlockstore } from './base.js';
|
|
7
|
+
export declare class MemoryIPLDBlockstore extends MemoryBlockstore implements IPLDBlockstore {
|
|
8
|
+
private readonly nodeByType;
|
|
9
|
+
getFilteredMany(nodeType: MetadataType, options?: AbortOptions): AwaitIterable<Pair['cid']>;
|
|
10
|
+
put(key: CID<unknown, number, number, Version>, val: Uint8Array): Promise<CID<unknown, number, number, Version>>;
|
|
11
|
+
getSize(cid: CID): Promise<number>;
|
|
12
|
+
}
|
|
13
|
+
//# sourceMappingURL=memory.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"memory.d.ts","sourceRoot":"","sources":["../../../src/ipld/blockstore/memory.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,gBAAgB,EAAE,MAAM,iBAAiB,CAAA;AAClD,OAAO,EAAE,IAAI,EAAE,MAAM,sBAAsB,CAAA;AAC3C,OAAO,EAAE,YAAY,EAAE,aAAa,EAAE,MAAM,iBAAiB,CAAA;AAC7D,OAAO,EAAE,GAAG,EAAE,OAAO,EAAE,MAAM,cAAc,CAAA;AAC3C,OAAO,EAAoC,YAAY,EAAE,MAAM,yBAAyB,CAAA;AACxF,OAAO,EAAE,cAAc,EAAE,MAAM,WAAW,CAAA;AAE1C,qBAAa,oBAAqB,SAAQ,gBAAiB,YAAW,cAAc;IAClF,OAAO,CAAC,QAAQ,CAAC,UAAU,CAAiC;IACrD,eAAe,CACpB,QAAQ,EAAE,YAAY,EACtB,OAAO,CAAC,EAAE,YAAY,GACrB,aAAa,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC;IAMvB,GAAG,CACP,GAAG,EAAE,GAAG,CAAC,OAAO,EAAE,MAAM,EAAE,MAAM,EAAE,OAAO,CAAC,EAC1C,GAAG,EAAE,UAAU,GACd,OAAO,CAAC,GAAG,CAAC,OAAO,EAAE,MAAM,EAAE,MAAM,EAAE,OAAO,CAAC,CAAC;IAM3C,OAAO,CAAC,GAAG,EAAE,GAAG,GAAG,OAAO,CAAC,MAAM,CAAC;CAKzC"}
|
|
@@ -0,0 +1,57 @@
|
|
|
1
|
+
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
|
2
|
+
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
|
3
|
+
return new (P || (P = Promise))(function (resolve, reject) {
|
|
4
|
+
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
|
5
|
+
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
|
6
|
+
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
|
7
|
+
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
|
8
|
+
});
|
|
9
|
+
};
|
|
10
|
+
var __await = (this && this.__await) || function (v) { return this instanceof __await ? (this.v = v, this) : new __await(v); }
|
|
11
|
+
var __asyncGenerator = (this && this.__asyncGenerator) || function (thisArg, _arguments, generator) {
|
|
12
|
+
if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined.");
|
|
13
|
+
var g = generator.apply(thisArg, _arguments || []), i, q = [];
|
|
14
|
+
return i = Object.create((typeof AsyncIterator === "function" ? AsyncIterator : Object).prototype), verb("next"), verb("throw"), verb("return", awaitReturn), i[Symbol.asyncIterator] = function () { return this; }, i;
|
|
15
|
+
function awaitReturn(f) { return function (v) { return Promise.resolve(v).then(f, reject); }; }
|
|
16
|
+
function verb(n, f) { if (g[n]) { i[n] = function (v) { return new Promise(function (a, b) { q.push([n, v, a, b]) > 1 || resume(n, v); }); }; if (f) i[n] = f(i[n]); } }
|
|
17
|
+
function resume(n, v) { try { step(g[n](v)); } catch (e) { settle(q[0][3], e); } }
|
|
18
|
+
function step(r) { r.value instanceof __await ? Promise.resolve(r.value.v).then(fulfill, reject) : settle(q[0][2], r); }
|
|
19
|
+
function fulfill(value) { resume("next", value); }
|
|
20
|
+
function reject(value) { resume("throw", value); }
|
|
21
|
+
function settle(f, v) { if (f(v), q.shift(), q.length) resume(q[0][0], q[0][1]); }
|
|
22
|
+
};
|
|
23
|
+
import { MemoryBlockstore } from 'blockstore-core';
|
|
24
|
+
import { decodeIPLDNodeData } from '../../metadata/index.js';
|
|
25
|
+
export class MemoryIPLDBlockstore extends MemoryBlockstore {
|
|
26
|
+
constructor() {
|
|
27
|
+
super(...arguments);
|
|
28
|
+
this.nodeByType = new Map();
|
|
29
|
+
}
|
|
30
|
+
getFilteredMany(nodeType, options) {
|
|
31
|
+
return __asyncGenerator(this, arguments, function* getFilteredMany_1() {
|
|
32
|
+
var _a;
|
|
33
|
+
for (const cid of (_a = this.nodeByType.get(nodeType)) !== null && _a !== void 0 ? _a : []) {
|
|
34
|
+
yield yield __await(cid);
|
|
35
|
+
}
|
|
36
|
+
});
|
|
37
|
+
}
|
|
38
|
+
put(key, val) {
|
|
39
|
+
const _super = Object.create(null, {
|
|
40
|
+
put: { get: () => super.put }
|
|
41
|
+
});
|
|
42
|
+
return __awaiter(this, void 0, void 0, function* () {
|
|
43
|
+
var _a;
|
|
44
|
+
const data = decodeIPLDNodeData(val);
|
|
45
|
+
this.nodeByType.set(data.type, [...((_a = this.nodeByType.get(data.type)) !== null && _a !== void 0 ? _a : []), key]);
|
|
46
|
+
return _super.put.call(this, key, val);
|
|
47
|
+
});
|
|
48
|
+
}
|
|
49
|
+
getSize(cid) {
|
|
50
|
+
return __awaiter(this, void 0, void 0, function* () {
|
|
51
|
+
var _a;
|
|
52
|
+
const bytes = yield this.get(cid);
|
|
53
|
+
const data = decodeIPLDNodeData(bytes);
|
|
54
|
+
return (_a = data.size) !== null && _a !== void 0 ? _a : 0;
|
|
55
|
+
});
|
|
56
|
+
}
|
|
57
|
+
}
|
|
@@ -0,0 +1,12 @@
|
|
|
1
|
+
import { CID } from 'multiformats/cid';
|
|
2
|
+
import { FileUploadOptions } from '../metadata/index.js';
|
|
3
|
+
import { PBNode } from './index.js';
|
|
4
|
+
export interface Builders {
|
|
5
|
+
inlink: (links: CID[], size: number, linkDepth: number, chunkSize: number) => PBNode;
|
|
6
|
+
chunk: (data: Buffer) => PBNode;
|
|
7
|
+
root: (links: CID[], size: number, linkDepth: number, name?: string, maxNodeSize?: number, fileUploadOptions?: FileUploadOptions) => PBNode;
|
|
8
|
+
single: (data: Buffer, filename?: string, fileUploadOptions?: FileUploadOptions) => PBNode;
|
|
9
|
+
}
|
|
10
|
+
export declare const metadataBuilders: Builders;
|
|
11
|
+
export declare const fileBuilders: Builders;
|
|
12
|
+
//# sourceMappingURL=builders.d.ts.map
|