@shhhum/xftp-web 0.2.0 → 0.4.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +64 -15
- package/dist/agent.d.ts +28 -9
- package/dist/agent.d.ts.map +1 -1
- package/dist/agent.js +238 -98
- package/dist/agent.js.map +1 -1
- package/dist/client.d.ts +16 -16
- package/dist/client.d.ts.map +1 -1
- package/dist/client.js +27 -21
- package/dist/client.js.map +1 -1
- package/dist/crypto/digest.d.ts +4 -0
- package/dist/crypto/digest.d.ts.map +1 -1
- package/dist/crypto/digest.js +10 -0
- package/dist/crypto/digest.js.map +1 -1
- package/dist/crypto/file.d.ts +11 -0
- package/dist/crypto/file.d.ts.map +1 -1
- package/dist/crypto/file.js +48 -0
- package/dist/crypto/file.js.map +1 -1
- package/dist/protocol/commands.d.ts +9 -1
- package/dist/protocol/commands.d.ts.map +1 -1
- package/dist/protocol/commands.js +15 -1
- package/dist/protocol/commands.js.map +1 -1
- package/package.json +1 -1
- package/src/agent.ts +309 -108
- package/src/client.ts +40 -38
- package/src/crypto/digest.ts +13 -0
- package/src/crypto/file.ts +83 -0
- package/src/protocol/commands.ts +22 -2
package/README.md
CHANGED
|
@@ -12,39 +12,88 @@ npm install xftp-web
|
|
|
12
12
|
|
|
13
13
|
```typescript
|
|
14
14
|
import {
|
|
15
|
-
|
|
15
|
+
XFTPAgent,
|
|
16
16
|
parseXFTPServer,
|
|
17
|
-
|
|
18
|
-
decodeDescriptionURI, encodeDescriptionURI,
|
|
17
|
+
sendFile, receiveFile, deleteFile,
|
|
19
18
|
XFTPRetriableError, XFTPPermanentError, isRetriable,
|
|
20
19
|
} from "xftp-web"
|
|
21
20
|
|
|
22
21
|
// Create agent (manages connections)
|
|
23
|
-
const agent =
|
|
22
|
+
const agent = new XFTPAgent()
|
|
23
|
+
|
|
24
|
+
const servers = [
|
|
25
|
+
parseXFTPServer("xftp://server1..."),
|
|
26
|
+
parseXFTPServer("xftp://server2..."),
|
|
27
|
+
parseXFTPServer("xftp://server3..."),
|
|
28
|
+
]
|
|
29
|
+
|
|
30
|
+
// Upload (from Uint8Array)
|
|
31
|
+
const {rcvDescriptions, sndDescription, uri} = await sendFile(
|
|
32
|
+
agent, servers, fileBytes, "photo.jpg",
|
|
33
|
+
{onProgress: (uploaded, total) => console.log(`${uploaded}/${total}`)}
|
|
34
|
+
)
|
|
35
|
+
|
|
36
|
+
// Upload (streaming — constant memory, no full-file buffer)
|
|
37
|
+
const file = inputEl.files[0]
|
|
38
|
+
const result = await sendFile(
|
|
39
|
+
agent, servers, file.stream(), file.size, file.name,
|
|
40
|
+
{onProgress: (uploaded, total) => console.log(`${uploaded}/${total}`)}
|
|
41
|
+
)
|
|
42
|
+
|
|
43
|
+
// Download
|
|
44
|
+
const {header, content} = await receiveFile(agent, uri, {
|
|
45
|
+
onProgress: (downloaded, total) => console.log(`${downloaded}/${total}`)
|
|
46
|
+
})
|
|
47
|
+
|
|
48
|
+
// Delete (requires sender description from upload)
|
|
49
|
+
await deleteFile(agent, sndDescription)
|
|
50
|
+
|
|
51
|
+
// Cleanup
|
|
52
|
+
agent.close()
|
|
53
|
+
```
|
|
54
|
+
|
|
55
|
+
### Advanced usage
|
|
56
|
+
|
|
57
|
+
For streaming encryption (avoids buffering the full encrypted file) or worker-based uploads:
|
|
24
58
|
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
59
|
+
```typescript
|
|
60
|
+
import {
|
|
61
|
+
encryptFileForUpload, uploadFile, downloadFile,
|
|
62
|
+
decodeDescriptionURI,
|
|
63
|
+
} from "xftp-web"
|
|
64
|
+
|
|
65
|
+
// Streaming encryption — encrypted slices emitted via callback
|
|
66
|
+
const metadata = await encryptFileForUpload(fileBytes, "photo.jpg", {
|
|
67
|
+
onSlice: (data) => { /* write to OPFS, IndexedDB, etc. */ },
|
|
68
|
+
onProgress: (done, total) => {},
|
|
30
69
|
})
|
|
70
|
+
// metadata has {digest, key, nonce, chunkSizes} but no encData
|
|
31
71
|
|
|
32
|
-
//
|
|
72
|
+
// Upload with custom chunk reader (e.g. reading from OPFS)
|
|
73
|
+
const result = await uploadFile(agent, servers, metadata, {
|
|
74
|
+
readChunk: (offset, size) => readFromStorage(offset, size),
|
|
75
|
+
})
|
|
76
|
+
|
|
77
|
+
// Download with FileDescription object
|
|
33
78
|
const fd = decodeDescriptionURI(uri)
|
|
34
79
|
const {header, content} = await downloadFile(agent, fd)
|
|
80
|
+
```
|
|
35
81
|
|
|
36
|
-
|
|
37
|
-
await deleteFile(agent, sndDescription)
|
|
82
|
+
### Upload options
|
|
38
83
|
|
|
39
|
-
|
|
40
|
-
|
|
84
|
+
```typescript
|
|
85
|
+
await sendFile(agent, servers, fileBytes, "photo.jpg", {
|
|
86
|
+
onProgress: (uploaded, total) => {}, // progress callback
|
|
87
|
+
auth: basicAuthBytes, // BasicAuth for auth-required servers
|
|
88
|
+
numRecipients: 3, // multiple independent download credentials (default: 1)
|
|
89
|
+
})
|
|
41
90
|
```
|
|
42
91
|
|
|
43
92
|
### Error handling
|
|
44
93
|
|
|
45
94
|
```typescript
|
|
46
95
|
try {
|
|
47
|
-
await
|
|
96
|
+
await sendFile(agent, servers, fileBytes, "photo.jpg")
|
|
48
97
|
} catch (e) {
|
|
49
98
|
if (e instanceof XFTPRetriableError) {
|
|
50
99
|
// Network/timeout/session errors — safe to retry
|
package/dist/agent.d.ts
CHANGED
|
@@ -1,6 +1,7 @@
|
|
|
1
|
+
export { prepareEncryption, type EncryptionParams } from "./crypto/file.js";
|
|
1
2
|
import { type FileDescription } from "./protocol/description.js";
|
|
2
|
-
import {
|
|
3
|
-
export {
|
|
3
|
+
import { XFTPAgent } from "./client.js";
|
|
4
|
+
export { XFTPAgent, type TransportConfig, XFTPRetriableError, XFTPPermanentError, isRetriable, categorizeError, humanReadableMessage } from "./client.js";
|
|
4
5
|
import type { XFTPServer } from "./protocol/address.js";
|
|
5
6
|
import type { FileHeader } from "./crypto/file.js";
|
|
6
7
|
export interface EncryptedFileMetadata {
|
|
@@ -13,7 +14,7 @@ export interface EncryptedFileInfo extends EncryptedFileMetadata {
|
|
|
13
14
|
encData: Uint8Array;
|
|
14
15
|
}
|
|
15
16
|
export interface UploadResult {
|
|
16
|
-
|
|
17
|
+
rcvDescriptions: FileDescription[];
|
|
17
18
|
sndDescription: FileDescription;
|
|
18
19
|
uri: string;
|
|
19
20
|
}
|
|
@@ -23,13 +24,29 @@ export interface DownloadResult {
|
|
|
23
24
|
}
|
|
24
25
|
export declare function encodeDescriptionURI(fd: FileDescription): string;
|
|
25
26
|
export declare function decodeDescriptionURI(fragment: string): FileDescription;
|
|
26
|
-
export
|
|
27
|
+
export interface EncryptForUploadOptions {
|
|
28
|
+
onProgress?: (done: number, total: number) => void;
|
|
29
|
+
onSlice?: (data: Uint8Array) => void | Promise<void>;
|
|
30
|
+
}
|
|
31
|
+
export declare function encryptFileForUpload(source: Uint8Array, fileName: string, options: EncryptForUploadOptions & {
|
|
32
|
+
onSlice: NonNullable<EncryptForUploadOptions['onSlice']>;
|
|
33
|
+
}): Promise<EncryptedFileMetadata>;
|
|
34
|
+
export declare function encryptFileForUpload(source: Uint8Array, fileName: string, options?: EncryptForUploadOptions): Promise<EncryptedFileInfo>;
|
|
27
35
|
export interface UploadOptions {
|
|
28
36
|
onProgress?: (uploaded: number, total: number) => void;
|
|
29
37
|
redirectThreshold?: number;
|
|
30
38
|
readChunk?: (offset: number, size: number) => Promise<Uint8Array>;
|
|
39
|
+
auth?: Uint8Array;
|
|
40
|
+
numRecipients?: number;
|
|
41
|
+
}
|
|
42
|
+
export declare function uploadFile(agent: XFTPAgent, servers: XFTPServer[], encrypted: EncryptedFileMetadata, options?: UploadOptions): Promise<UploadResult>;
|
|
43
|
+
export interface SendFileOptions {
|
|
44
|
+
onProgress?: (uploaded: number, total: number) => void;
|
|
45
|
+
auth?: Uint8Array;
|
|
46
|
+
numRecipients?: number;
|
|
31
47
|
}
|
|
32
|
-
export declare function
|
|
48
|
+
export declare function sendFile(agent: XFTPAgent, servers: XFTPServer[], source: Uint8Array, fileName: string, options?: SendFileOptions): Promise<UploadResult>;
|
|
49
|
+
export declare function sendFile(agent: XFTPAgent, servers: XFTPServer[], source: AsyncIterable<Uint8Array>, sourceSize: number, fileName: string, options?: SendFileOptions): Promise<UploadResult>;
|
|
33
50
|
export interface RawDownloadedChunk {
|
|
34
51
|
chunkNo: number;
|
|
35
52
|
dhSecret: Uint8Array;
|
|
@@ -39,9 +56,11 @@ export interface RawDownloadedChunk {
|
|
|
39
56
|
}
|
|
40
57
|
export interface DownloadRawOptions {
|
|
41
58
|
onProgress?: (downloaded: number, total: number) => void;
|
|
42
|
-
concurrency?: number;
|
|
43
59
|
}
|
|
44
|
-
export declare function downloadFileRaw(agent:
|
|
45
|
-
export declare function downloadFile(agent:
|
|
46
|
-
export declare function
|
|
60
|
+
export declare function downloadFileRaw(agent: XFTPAgent, fd: FileDescription, onRawChunk: (chunk: RawDownloadedChunk) => Promise<void>, options?: DownloadRawOptions): Promise<FileDescription>;
|
|
61
|
+
export declare function downloadFile(agent: XFTPAgent, fd: FileDescription, onProgress?: (downloaded: number, total: number) => void): Promise<DownloadResult>;
|
|
62
|
+
export declare function receiveFile(agent: XFTPAgent, uri: string, options?: {
|
|
63
|
+
onProgress?: (downloaded: number, total: number) => void;
|
|
64
|
+
}): Promise<DownloadResult>;
|
|
65
|
+
export declare function deleteFile(agent: XFTPAgent, sndDescription: FileDescription): Promise<void>;
|
|
47
66
|
//# sourceMappingURL=agent.d.ts.map
|
package/dist/agent.d.ts.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"agent.d.ts","sourceRoot":"","sources":["../src/agent.ts"],"names":[],"mappings":"
|
|
1
|
+
{"version":3,"file":"agent.d.ts","sourceRoot":"","sources":["../src/agent.ts"],"names":[],"mappings":"AASA,OAAO,EAAC,iBAAiB,EAAE,KAAK,gBAAgB,EAAC,MAAM,kBAAkB,CAAA;AAIzE,OAAO,EAGL,KAAK,eAAe,EACrB,MAAM,2BAA2B,CAAA;AAElC,OAAO,EAE0B,SAAS,EACzC,MAAM,aAAa,CAAA;AACpB,OAAO,EAAC,SAAS,EAAE,KAAK,eAAe,EACrC,kBAAkB,EAAE,kBAAkB,EAAE,WAAW,EAAE,eAAe,EAAE,oBAAoB,EAAC,MAAM,aAAa,CAAA;AAEhH,OAAO,KAAK,EAAC,UAAU,EAAC,MAAM,uBAAuB,CAAA;AAErD,OAAO,KAAK,EAAC,UAAU,EAAC,MAAM,kBAAkB,CAAA;AAchD,MAAM,WAAW,qBAAqB;IACpC,MAAM,EAAE,UAAU,CAAA;IAClB,GAAG,EAAE,UAAU,CAAA;IACf,KAAK,EAAE,UAAU,CAAA;IACjB,UAAU,EAAE,MAAM,EAAE,CAAA;CACrB;AAED,MAAM,WAAW,iBAAkB,SAAQ,qBAAqB;IAC9D,OAAO,EAAE,UAAU,CAAA;CACpB;AAED,MAAM,WAAW,YAAY;IAC3B,eAAe,EAAE,eAAe,EAAE,CAAA;IAClC,cAAc,EAAE,eAAe,CAAA;IAC/B,GAAG,EAAE,MAAM,CAAA;CACZ;AAED,MAAM,WAAW,cAAc;IAC7B,MAAM,EAAE,UAAU,CAAA;IAClB,OAAO,EAAE,UAAU,CAAA;CACpB;AAID,wBAAgB,oBAAoB,CAAC,EAAE,EAAE,eAAe,GAAG,MAAM,CAIhE;AAED,wBAAgB,oBAAoB,CAAC,QAAQ,EAAE,MAAM,GAAG,eAAe,CAOtE;AAID,MAAM,WAAW,uBAAuB;IACtC,UAAU,CAAC,EAAE,CAAC,IAAI,EAAE,MAAM,EAAE,KAAK,EAAE,MAAM,KAAK,IAAI,CAAA;IAClD,OAAO,CAAC,EAAE,CAAC,IAAI,EAAE,UAAU,KAAK,IAAI,GAAG,OAAO,CAAC,IAAI,CAAC,CAAA;CACrD;AAED,wBAAsB,oBAAoB,CACxC,MAAM,EAAE,UAAU,EAAE,QAAQ,EAAE,MAAM,EACpC,OAAO,EAAE,uBAAuB,GAAG;IAAC,OAAO,EAAE,WAAW,CAAC,uBAAuB,CAAC,SAAS,CAAC,CAAC,CAAA;CAAC,GAC5F,OAAO,CAAC,qBAAqB,CAAC,CAAA;AACjC,wBAAsB,oBAAoB,CACxC,MAAM,EAAE,UAAU,EAAE,QAAQ,EAAE,MAAM,EACpC,OAAO,CAAC,EAAE,uBAAuB,GAChC,OAAO,CAAC,iBAAiB,CAAC,CAAA;AA2B7B,MAAM,WAAW,aAAa;IAC5B,UAAU,CAAC,EAAE,CAAC,QAAQ,EAAE,MAAM,EAAE,KAAK,EAAE,MAAM,KAAK,IAAI,CAAA;IACtD,iBAAiB,CAAC,EAAE,MAAM,CAAA;IAC1B,SAAS,CAAC,EAAE,CAAC,MAAM,EAAE,MAAM,EAAE,IAAI,EAAE,MAAM,KAAK,OAAO,CAAC,UAAU,CAAC,CAAA;IACjE,IAAI,CAAC,EAAE,UAAU,CAAA;IACjB,aAAa,CAAC,EAAE,MAAM,CAAA;CACvB;AAuCD,wBAAsB,UAAU,CAC9B,KAAK,EAAE,SAAS,EAChB,OAAO,EAAE,UAAU,EAAE,EACrB,SAAS,EAAE,qBAAqB,EAChC,OAAO,CAAC,EAAE,aAAa,GACtB,OAAO,CAAC,YAAY,CAAC,CAkDvB;AAED,MAAM,WAAW,eAAe;IAC9B,UAAU,CAAC,EAAE,CAAC,QAAQ,EAAE,MAAM,EAAE,KAAK,EAAE,MAAM,KAAK,IAAI,CAAA;IACtD,IAAI,CAAC,EAAE,UAAU,CAAA;IACjB,aAAa,CAAC,EAAE,MAAM,CAAA;CACvB;AAED,wBAAsB,QAAQ,CAC5B,KAAK,EAAE,SAAS,EAAE,OAAO,EAAE,UAAU,EAAE,EACvC,MAAM,EAAE,UAAU,EAAE,QAAQ,EAAE,MAAM,EACpC,OAAO,CAAC,EAAE,eAAe,GACxB,OAAO,CAAC,YAAY,CAAC,CAAA;AACxB,wBAAsB,QAAQ,CAC5B,KAAK,EAAE,SAAS,EAAE,OAAO,EAAE,UAAU,EAAE,EACvC,MAAM,EAAE,aAAa,CAAC,UAAU,CAAC,EAAE,UAAU,EAAE,MAAM,EACrD,QAAQ,EAAE,MAAM,EAAE,OAAO,CAAC,EAAE,eAAe,GAC1C,OAAO,CAAC,YAAY,CAAC,CAAA;AAkLxB,MAAM,WAAW,kBAAkB;IACjC,OAAO,EAAE,MAAM,CAAA;IACf,QAAQ,EAAE,UAAU,CAAA;IACpB,KAAK,EAAE,UAAU,CAAA;IACjB,IAAI,EAAE,UAAU,CAAA;IAChB,MAAM,EAAE,UAAU,CAAA;CACnB;AAED,MAAM,WAAW,kBAAkB;IACjC,UAAU,CAAC,EAAE,CAAC,UAAU,EAAE,MAAM,EAAE,KAAK,EAAE,MAAM,KAAK,IAAI,CAAA;CACzD;AAED,wBAAsB,eAAe,CACnC,KAAK,EAAE,SAAS,EAChB,EAAE,EAAE,eAAe,EACnB,UAAU,EAAE,CAAC,KAAK,EAAE,kBAAkB,KAAK,OAAO,CAAC,IAAI,CAAC,EACxD,OAAO,CAAC,EAAE,kBAAkB,GAC3B,OAAO,CAAC,eAAe,CAAC,CAyC1B;AAED,wBAAsB,YAAY,CAChC,KAAK,EAAE,SAAS,EAChB,EAAE,EAAE,eAAe,EACnB,UAAU,CAAC,EAAE,CAAC,UAAU,EAAE,MAAM,EAAE,KAAK,EAAE,MAAM,KAAK,IAAI,GACvD,OAAO,CAAC,cAAc,CAAC,CAYzB;AAED,wBAAsB,WAAW,CAC/B,KAAK,EAAE,SAAS,EAChB,GAAG,EAAE,MAAM,EACX,OAAO,CAAC,EAAE;IAAC,UAAU,CAAC,EAAE,CAAC,UAAU,EAAE,MAAM,EAAE,KAAK,EAAE,MAAM,KAAK,IAAI,CAAA;CAAC,GACnE,OAAO,CAAC,cAAc,CAAC,CAGzB;AAyCD,wBAAsB,UAAU,CAAC,KAAK,EAAE,SAAS,EAAE,cAAc,EAAE,eAAe,GAAG,OAAO,CAAC,IAAI,CAAC,CAiBjG"}
|
package/dist/agent.js
CHANGED
|
@@ -3,13 +3,16 @@
|
|
|
3
3
|
// Combines all building blocks: encryption, chunking, XFTP client commands,
|
|
4
4
|
// file descriptions, and DEFLATE-compressed URI encoding.
|
|
5
5
|
import pako from "pako";
|
|
6
|
-
import {
|
|
6
|
+
import { encryptFileAsync, prepareEncryption } from "./crypto/file.js";
|
|
7
|
+
import { sbInit, sbEncryptChunk, sbAuth } from "./crypto/secretbox.js";
|
|
8
|
+
import { concatBytes, encodeInt64 } from "./protocol/encoding.js";
|
|
9
|
+
export { prepareEncryption } from "./crypto/file.js";
|
|
7
10
|
import { generateEd25519KeyPair, encodePubKeyEd25519, encodePrivKeyEd25519, decodePrivKeyEd25519, ed25519KeyPairFromSeed } from "./crypto/keys.js";
|
|
8
|
-
import { sha512Streaming } from "./crypto/digest.js";
|
|
9
|
-
import {
|
|
11
|
+
import { sha512Streaming, sha512Init, sha512Update, sha512Final } from "./crypto/digest.js";
|
|
12
|
+
import { prepareChunkSpecs, getChunkDigest } from "./protocol/chunks.js";
|
|
10
13
|
import { encodeFileDescription, decodeFileDescription, validateFileDescription, base64urlEncode, base64urlDecode } from "./protocol/description.js";
|
|
11
|
-
import { createXFTPChunk, uploadXFTPChunk, downloadXFTPChunk, downloadXFTPChunkRaw, deleteXFTPChunk } from "./client.js";
|
|
12
|
-
export {
|
|
14
|
+
import { createXFTPChunk, addXFTPRecipients, uploadXFTPChunk, downloadXFTPChunk, downloadXFTPChunkRaw, deleteXFTPChunk, ackXFTPChunk } from "./client.js";
|
|
15
|
+
export { XFTPAgent, XFTPRetriableError, XFTPPermanentError, isRetriable, categorizeError, humanReadableMessage } from "./client.js";
|
|
13
16
|
import { processDownloadedFile, decryptReceivedChunk } from "./download.js";
|
|
14
17
|
import { formatXFTPServer, parseXFTPServer } from "./protocol/address.js";
|
|
15
18
|
// -- URI encoding/decoding (RFC section 4.1: DEFLATE + base64url)
|
|
@@ -27,25 +30,61 @@ export function decodeDescriptionURI(fragment) {
|
|
|
27
30
|
throw new Error("decodeDescriptionURI: " + err);
|
|
28
31
|
return fd;
|
|
29
32
|
}
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
const key =
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
33
|
+
export async function encryptFileForUpload(source, fileName, options) {
|
|
34
|
+
const { onProgress, onSlice } = options ?? {};
|
|
35
|
+
const { fileHdr, key, nonce, fileSize, encSize, chunkSizes } = prepareEncryption(source.length, fileName);
|
|
36
|
+
if (onSlice) {
|
|
37
|
+
const hashState = sha512Init();
|
|
38
|
+
await encryptFileAsync(source, fileHdr, key, nonce, fileSize, encSize, onProgress, (data) => {
|
|
39
|
+
sha512Update(hashState, data);
|
|
40
|
+
return onSlice(data);
|
|
41
|
+
});
|
|
42
|
+
const digest = sha512Final(hashState);
|
|
43
|
+
return { digest, key, nonce, chunkSizes };
|
|
44
|
+
}
|
|
45
|
+
else {
|
|
46
|
+
const encData = await encryptFileAsync(source, fileHdr, key, nonce, fileSize, encSize, onProgress);
|
|
47
|
+
const digest = sha512Streaming([encData]);
|
|
48
|
+
console.log(`[AGENT-DBG] encrypt: encData.len=${encData.length} digest=${_dbgHex(digest, 64)} chunkSizes=[${chunkSizes.join(',')}]`);
|
|
49
|
+
return { encData, digest, key, nonce, chunkSizes };
|
|
50
|
+
}
|
|
45
51
|
}
|
|
46
52
|
const DEFAULT_REDIRECT_THRESHOLD = 400;
|
|
47
|
-
|
|
48
|
-
|
|
53
|
+
const MAX_RECIPIENTS_PER_REQUEST = 200; // each key is ~46 bytes; 200 keys fit within 16KB XFTP block
|
|
54
|
+
async function uploadSingleChunk(agent, server, chunkNo, chunkData, chunkSize, numRecipients, auth) {
|
|
55
|
+
const sndKp = generateEd25519KeyPair();
|
|
56
|
+
const rcvKps = Array.from({ length: numRecipients }, () => generateEd25519KeyPair());
|
|
57
|
+
const chunkDigest = getChunkDigest(chunkData);
|
|
58
|
+
const fileInfo = {
|
|
59
|
+
sndKey: encodePubKeyEd25519(sndKp.publicKey),
|
|
60
|
+
size: chunkSize,
|
|
61
|
+
digest: chunkDigest
|
|
62
|
+
};
|
|
63
|
+
const firstBatch = Math.min(numRecipients, MAX_RECIPIENTS_PER_REQUEST);
|
|
64
|
+
const firstBatchKeys = rcvKps.slice(0, firstBatch).map(kp => encodePubKeyEd25519(kp.publicKey));
|
|
65
|
+
const { senderId, recipientIds: firstIds } = await createXFTPChunk(agent, server, sndKp.privateKey, fileInfo, firstBatchKeys, auth);
|
|
66
|
+
const allRecipientIds = [...firstIds];
|
|
67
|
+
let added = firstBatch;
|
|
68
|
+
while (added < numRecipients) {
|
|
69
|
+
const batchSize = Math.min(numRecipients - added, MAX_RECIPIENTS_PER_REQUEST);
|
|
70
|
+
const batchKeys = rcvKps.slice(added, added + batchSize).map(kp => encodePubKeyEd25519(kp.publicKey));
|
|
71
|
+
const moreIds = await addXFTPRecipients(agent, server, sndKp.privateKey, senderId, batchKeys);
|
|
72
|
+
allRecipientIds.push(...moreIds);
|
|
73
|
+
added += batchSize;
|
|
74
|
+
}
|
|
75
|
+
await uploadXFTPChunk(agent, server, sndKp.privateKey, senderId, chunkData);
|
|
76
|
+
return {
|
|
77
|
+
chunkNo, senderId, senderKey: sndKp.privateKey,
|
|
78
|
+
recipients: allRecipientIds.map((rid, ri) => ({
|
|
79
|
+
recipientId: rid, recipientKey: rcvKps[ri].privateKey
|
|
80
|
+
})),
|
|
81
|
+
chunkSize, digest: chunkDigest, server
|
|
82
|
+
};
|
|
83
|
+
}
|
|
84
|
+
export async function uploadFile(agent, servers, encrypted, options) {
|
|
85
|
+
if (servers.length === 0)
|
|
86
|
+
throw new Error("uploadFile: servers list is empty");
|
|
87
|
+
const { onProgress, redirectThreshold, readChunk: readChunkOpt, auth, numRecipients = 1 } = options ?? {};
|
|
49
88
|
const readChunk = readChunkOpt
|
|
50
89
|
? readChunkOpt
|
|
51
90
|
: ('encData' in encrypted
|
|
@@ -53,44 +92,123 @@ export async function uploadFile(agent, server, encrypted, options) {
|
|
|
53
92
|
: () => { throw new Error("uploadFile: readChunk required when encData is absent"); });
|
|
54
93
|
const total = encrypted.chunkSizes.reduce((a, b) => a + b, 0);
|
|
55
94
|
const specs = prepareChunkSpecs(encrypted.chunkSizes);
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
const
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
size: spec.chunkSize,
|
|
69
|
-
digest: chunkDigest
|
|
70
|
-
};
|
|
71
|
-
const rcvKeysForChunk = [encodePubKeyEd25519(rcvKp.publicKey)];
|
|
72
|
-
const { senderId, recipientIds } = await createXFTPChunk(agent, server, sndKp.privateKey, fileInfo, rcvKeysForChunk);
|
|
73
|
-
await uploadXFTPChunk(agent, server, sndKp.privateKey, senderId, chunkData);
|
|
74
|
-
sentChunks.push({
|
|
75
|
-
chunkNo, senderId, senderKey: sndKp.privateKey,
|
|
76
|
-
recipientId: recipientIds[0], recipientKey: rcvKp.privateKey,
|
|
77
|
-
chunkSize: spec.chunkSize, digest: chunkDigest, server
|
|
78
|
-
});
|
|
79
|
-
uploaded += spec.chunkSize;
|
|
80
|
-
onProgress?.(uploaded, total);
|
|
95
|
+
// Pre-assign servers and group by server (matching Haskell groupAllOn)
|
|
96
|
+
const chunkJobs = specs.map((spec, i) => ({
|
|
97
|
+
index: i,
|
|
98
|
+
spec,
|
|
99
|
+
server: servers[Math.floor(Math.random() * servers.length)]
|
|
100
|
+
}));
|
|
101
|
+
const byServer = new Map();
|
|
102
|
+
for (const job of chunkJobs) {
|
|
103
|
+
const key = formatXFTPServer(job.server);
|
|
104
|
+
if (!byServer.has(key))
|
|
105
|
+
byServer.set(key, []);
|
|
106
|
+
byServer.get(key).push(job);
|
|
81
107
|
}
|
|
82
|
-
|
|
83
|
-
const
|
|
84
|
-
let
|
|
85
|
-
|
|
108
|
+
// Upload groups in parallel, sequential within each group
|
|
109
|
+
const sentChunks = new Array(specs.length);
|
|
110
|
+
let uploaded = 0;
|
|
111
|
+
await Promise.all([...byServer.values()].map(async (jobs) => {
|
|
112
|
+
for (const { index, spec, server } of jobs) {
|
|
113
|
+
const chunkNo = index + 1;
|
|
114
|
+
const chunkData = await readChunk(spec.chunkOffset, spec.chunkSize);
|
|
115
|
+
sentChunks[index] = await uploadSingleChunk(agent, server, chunkNo, chunkData, spec.chunkSize, numRecipients, auth ?? null);
|
|
116
|
+
uploaded += spec.chunkSize;
|
|
117
|
+
onProgress?.(uploaded, total);
|
|
118
|
+
}
|
|
119
|
+
}));
|
|
120
|
+
const rcvDescriptions = Array.from({ length: numRecipients }, (_, ri) => buildDescription("recipient", ri, encrypted, sentChunks));
|
|
121
|
+
const sndDescription = buildDescription("sender", 0, encrypted, sentChunks);
|
|
122
|
+
let uri = encodeDescriptionURI(rcvDescriptions[0]);
|
|
123
|
+
let finalRcvDescriptions = rcvDescriptions;
|
|
86
124
|
const threshold = redirectThreshold ?? DEFAULT_REDIRECT_THRESHOLD;
|
|
87
125
|
if (uri.length > threshold && sentChunks.length > 1) {
|
|
88
|
-
|
|
89
|
-
|
|
126
|
+
const redirected = await uploadRedirectDescription(agent, servers, rcvDescriptions[0], auth);
|
|
127
|
+
finalRcvDescriptions = [redirected, ...rcvDescriptions.slice(1)];
|
|
128
|
+
uri = encodeDescriptionURI(redirected);
|
|
129
|
+
}
|
|
130
|
+
return { rcvDescriptions: finalRcvDescriptions, sndDescription, uri };
|
|
131
|
+
}
|
|
132
|
+
export async function sendFile(agent, servers, source, fileNameOrSize, fileNameOrOptions, maybeOptions) {
|
|
133
|
+
let sourceSize, fileName, options;
|
|
134
|
+
if (source instanceof Uint8Array) {
|
|
135
|
+
sourceSize = source.length;
|
|
136
|
+
fileName = fileNameOrSize;
|
|
137
|
+
options = fileNameOrOptions;
|
|
138
|
+
}
|
|
139
|
+
else {
|
|
140
|
+
sourceSize = fileNameOrSize;
|
|
141
|
+
fileName = fileNameOrOptions;
|
|
142
|
+
options = maybeOptions;
|
|
143
|
+
}
|
|
144
|
+
if (servers.length === 0)
|
|
145
|
+
throw new Error("sendFile: servers list is empty");
|
|
146
|
+
const { onProgress, auth, numRecipients = 1 } = options ?? {};
|
|
147
|
+
const params = prepareEncryption(sourceSize, fileName);
|
|
148
|
+
const specs = prepareChunkSpecs(params.chunkSizes);
|
|
149
|
+
const total = params.chunkSizes.reduce((a, b) => a + b, 0);
|
|
150
|
+
const encState = sbInit(params.key, params.nonce);
|
|
151
|
+
const hashState = sha512Init();
|
|
152
|
+
const sentChunks = new Array(specs.length);
|
|
153
|
+
let specIdx = 0, chunkOff = 0, uploaded = 0;
|
|
154
|
+
let chunkBuf = new Uint8Array(specs[0].chunkSize);
|
|
155
|
+
async function flushChunk() {
|
|
156
|
+
const server = servers[Math.floor(Math.random() * servers.length)];
|
|
157
|
+
sentChunks[specIdx] = await uploadSingleChunk(agent, server, specIdx + 1, chunkBuf, specs[specIdx].chunkSize, numRecipients, auth ?? null);
|
|
158
|
+
uploaded += specs[specIdx].chunkSize;
|
|
159
|
+
onProgress?.(uploaded, total);
|
|
160
|
+
specIdx++;
|
|
161
|
+
if (specIdx < specs.length) {
|
|
162
|
+
chunkBuf = new Uint8Array(specs[specIdx].chunkSize);
|
|
163
|
+
chunkOff = 0;
|
|
164
|
+
}
|
|
165
|
+
}
|
|
166
|
+
async function feedEncrypted(data) {
|
|
167
|
+
sha512Update(hashState, data);
|
|
168
|
+
let off = 0;
|
|
169
|
+
while (off < data.length) {
|
|
170
|
+
const space = specs[specIdx].chunkSize - chunkOff;
|
|
171
|
+
const n = Math.min(space, data.length - off);
|
|
172
|
+
chunkBuf.set(data.subarray(off, off + n), chunkOff);
|
|
173
|
+
chunkOff += n;
|
|
174
|
+
off += n;
|
|
175
|
+
if (chunkOff === specs[specIdx].chunkSize)
|
|
176
|
+
await flushChunk();
|
|
177
|
+
}
|
|
178
|
+
}
|
|
179
|
+
await feedEncrypted(sbEncryptChunk(encState, concatBytes(encodeInt64(params.fileSize), params.fileHdr)));
|
|
180
|
+
const SLICE = 65536;
|
|
181
|
+
if (source instanceof Uint8Array) {
|
|
182
|
+
for (let off = 0; off < source.length; off += SLICE) {
|
|
183
|
+
await feedEncrypted(sbEncryptChunk(encState, source.subarray(off, Math.min(off + SLICE, source.length))));
|
|
184
|
+
}
|
|
185
|
+
}
|
|
186
|
+
else {
|
|
187
|
+
for await (const chunk of source) {
|
|
188
|
+
for (let off = 0; off < chunk.length; off += SLICE) {
|
|
189
|
+
await feedEncrypted(sbEncryptChunk(encState, chunk.subarray(off, Math.min(off + SLICE, chunk.length))));
|
|
190
|
+
}
|
|
191
|
+
}
|
|
192
|
+
}
|
|
193
|
+
const padLen = Number(params.encSize - 16n - params.fileSize - 8n);
|
|
194
|
+
const padding = new Uint8Array(padLen);
|
|
195
|
+
padding.fill(0x23);
|
|
196
|
+
await feedEncrypted(sbEncryptChunk(encState, padding));
|
|
197
|
+
await feedEncrypted(sbAuth(encState));
|
|
198
|
+
const digest = sha512Final(hashState);
|
|
199
|
+
const encrypted = { digest, key: params.key, nonce: params.nonce, chunkSizes: params.chunkSizes };
|
|
200
|
+
const rcvDescriptions = Array.from({ length: numRecipients }, (_, ri) => buildDescription("recipient", ri, encrypted, sentChunks));
|
|
201
|
+
const sndDescription = buildDescription("sender", 0, encrypted, sentChunks);
|
|
202
|
+
let uri = encodeDescriptionURI(rcvDescriptions[0]);
|
|
203
|
+
let finalRcvDescriptions = rcvDescriptions;
|
|
204
|
+
if (uri.length > DEFAULT_REDIRECT_THRESHOLD && sentChunks.length > 1) {
|
|
205
|
+
const redirected = await uploadRedirectDescription(agent, servers, rcvDescriptions[0], auth);
|
|
206
|
+
finalRcvDescriptions = [redirected, ...rcvDescriptions.slice(1)];
|
|
207
|
+
uri = encodeDescriptionURI(redirected);
|
|
90
208
|
}
|
|
91
|
-
return {
|
|
209
|
+
return { rcvDescriptions: finalRcvDescriptions, sndDescription, uri };
|
|
92
210
|
}
|
|
93
|
-
function buildDescription(party, enc, chunks) {
|
|
211
|
+
function buildDescription(party, recipientIndex, enc, chunks) {
|
|
94
212
|
const defChunkSize = enc.chunkSizes[0];
|
|
95
213
|
return {
|
|
96
214
|
party,
|
|
@@ -105,40 +223,38 @@ function buildDescription(party, enc, chunks) {
|
|
|
105
223
|
digest: c.digest,
|
|
106
224
|
replicas: [{
|
|
107
225
|
server: formatXFTPServer(c.server),
|
|
108
|
-
replicaId: party === "recipient" ? c.recipientId : c.senderId,
|
|
109
|
-
replicaKey: encodePrivKeyEd25519(party === "recipient" ? c.recipientKey : c.senderKey)
|
|
226
|
+
replicaId: party === "recipient" ? c.recipients[recipientIndex].recipientId : c.senderId,
|
|
227
|
+
replicaKey: encodePrivKeyEd25519(party === "recipient" ? c.recipients[recipientIndex].recipientKey : c.senderKey)
|
|
110
228
|
}]
|
|
111
229
|
})),
|
|
112
230
|
redirect: null
|
|
113
231
|
};
|
|
114
232
|
}
|
|
115
|
-
async function uploadRedirectDescription(agent,
|
|
233
|
+
async function uploadRedirectDescription(agent, servers, innerFd, auth) {
|
|
116
234
|
const yaml = encodeFileDescription(innerFd);
|
|
117
235
|
const yamlBytes = new TextEncoder().encode(yaml);
|
|
118
|
-
const enc = encryptFileForUpload(yamlBytes, "");
|
|
236
|
+
const enc = await encryptFileForUpload(yamlBytes, "");
|
|
119
237
|
const specs = prepareChunkSpecs(enc.chunkSizes);
|
|
120
|
-
const
|
|
121
|
-
|
|
122
|
-
|
|
123
|
-
|
|
124
|
-
|
|
125
|
-
|
|
126
|
-
|
|
127
|
-
const
|
|
128
|
-
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
digest: chunkDigest
|
|
132
|
-
};
|
|
133
|
-
const rcvKeysForChunk = [encodePubKeyEd25519(rcvKp.publicKey)];
|
|
134
|
-
const { senderId, recipientIds } = await createXFTPChunk(agent, server, sndKp.privateKey, fileInfo, rcvKeysForChunk);
|
|
135
|
-
await uploadXFTPChunk(agent, server, sndKp.privateKey, senderId, chunkData);
|
|
136
|
-
sentChunks.push({
|
|
137
|
-
chunkNo, senderId, senderKey: sndKp.privateKey,
|
|
138
|
-
recipientId: recipientIds[0], recipientKey: rcvKp.privateKey,
|
|
139
|
-
chunkSize: spec.chunkSize, digest: chunkDigest, server
|
|
140
|
-
});
|
|
238
|
+
const chunkJobs = specs.map((spec, i) => ({
|
|
239
|
+
index: i,
|
|
240
|
+
spec,
|
|
241
|
+
server: servers[Math.floor(Math.random() * servers.length)]
|
|
242
|
+
}));
|
|
243
|
+
const byServer = new Map();
|
|
244
|
+
for (const job of chunkJobs) {
|
|
245
|
+
const key = formatXFTPServer(job.server);
|
|
246
|
+
if (!byServer.has(key))
|
|
247
|
+
byServer.set(key, []);
|
|
248
|
+
byServer.get(key).push(job);
|
|
141
249
|
}
|
|
250
|
+
const sentChunks = new Array(specs.length);
|
|
251
|
+
await Promise.all([...byServer.values()].map(async (jobs) => {
|
|
252
|
+
for (const { index, spec, server } of jobs) {
|
|
253
|
+
const chunkNo = index + 1;
|
|
254
|
+
const chunkData = enc.encData.subarray(spec.chunkOffset, spec.chunkOffset + spec.chunkSize);
|
|
255
|
+
sentChunks[index] = await uploadSingleChunk(agent, server, chunkNo, chunkData, spec.chunkSize, 1, auth ?? null);
|
|
256
|
+
}
|
|
257
|
+
}));
|
|
142
258
|
return {
|
|
143
259
|
party: "recipient",
|
|
144
260
|
size: enc.chunkSizes.reduce((a, b) => a + b, 0),
|
|
@@ -152,8 +268,8 @@ async function uploadRedirectDescription(agent, server, innerFd) {
|
|
|
152
268
|
digest: c.digest,
|
|
153
269
|
replicas: [{
|
|
154
270
|
server: formatXFTPServer(c.server),
|
|
155
|
-
replicaId: c.recipientId,
|
|
156
|
-
replicaKey: encodePrivKeyEd25519(c.recipientKey)
|
|
271
|
+
replicaId: c.recipients[0].recipientId,
|
|
272
|
+
replicaKey: encodePrivKeyEd25519(c.recipients[0].recipientKey)
|
|
157
273
|
}]
|
|
158
274
|
})),
|
|
159
275
|
redirect: { size: innerFd.size, digest: innerFd.digest }
|
|
@@ -163,7 +279,7 @@ export async function downloadFileRaw(agent, fd, onRawChunk, options) {
|
|
|
163
279
|
const err = validateFileDescription(fd);
|
|
164
280
|
if (err)
|
|
165
281
|
throw new Error("downloadFileRaw: " + err);
|
|
166
|
-
const { onProgress
|
|
282
|
+
const { onProgress } = options ?? {};
|
|
167
283
|
// Resolve redirect on main thread (redirect data is small)
|
|
168
284
|
if (fd.redirect !== null) {
|
|
169
285
|
console.log(`[AGENT-DBG] resolving redirect: outer size=${fd.size} chunks=${fd.chunks.length}`);
|
|
@@ -197,6 +313,7 @@ export async function downloadFileRaw(agent, fd, onRawChunk, options) {
|
|
|
197
313
|
body: raw.body,
|
|
198
314
|
digest: chunk.digest
|
|
199
315
|
});
|
|
316
|
+
await ackXFTPChunk(agent, server, kp.privateKey, replica.replicaId);
|
|
200
317
|
downloaded += chunk.chunkSize;
|
|
201
318
|
onProgress?.(downloaded, resolvedFd.size);
|
|
202
319
|
}
|
|
@@ -216,18 +333,32 @@ export async function downloadFile(agent, fd, onProgress) {
|
|
|
216
333
|
throw new Error("downloadFile: file digest mismatch");
|
|
217
334
|
return processDownloadedFile(resolvedFd, chunks);
|
|
218
335
|
}
|
|
336
|
+
export async function receiveFile(agent, uri, options) {
|
|
337
|
+
const fd = decodeDescriptionURI(uri);
|
|
338
|
+
return downloadFile(agent, fd, options?.onProgress);
|
|
339
|
+
}
|
|
219
340
|
async function resolveRedirect(agent, fd) {
|
|
220
341
|
const plaintextChunks = new Array(fd.chunks.length);
|
|
342
|
+
const byServer = new Map();
|
|
221
343
|
for (const chunk of fd.chunks) {
|
|
222
|
-
const
|
|
223
|
-
if (!
|
|
224
|
-
|
|
225
|
-
|
|
226
|
-
const seed = decodePrivKeyEd25519(replica.replicaKey);
|
|
227
|
-
const kp = ed25519KeyPairFromSeed(seed);
|
|
228
|
-
const data = await downloadXFTPChunk(agent, server, kp.privateKey, replica.replicaId, chunk.digest);
|
|
229
|
-
plaintextChunks[chunk.chunkNo - 1] = data;
|
|
344
|
+
const srv = chunk.replicas[0]?.server ?? "";
|
|
345
|
+
if (!byServer.has(srv))
|
|
346
|
+
byServer.set(srv, []);
|
|
347
|
+
byServer.get(srv).push(chunk);
|
|
230
348
|
}
|
|
349
|
+
await Promise.all([...byServer.entries()].map(async ([srv, chunks]) => {
|
|
350
|
+
const server = parseXFTPServer(srv);
|
|
351
|
+
for (const chunk of chunks) {
|
|
352
|
+
const replica = chunk.replicas[0];
|
|
353
|
+
if (!replica)
|
|
354
|
+
throw new Error("resolveRedirect: chunk has no replicas");
|
|
355
|
+
const seed = decodePrivKeyEd25519(replica.replicaKey);
|
|
356
|
+
const kp = ed25519KeyPairFromSeed(seed);
|
|
357
|
+
const data = await downloadXFTPChunk(agent, server, kp.privateKey, replica.replicaId, chunk.digest);
|
|
358
|
+
plaintextChunks[chunk.chunkNo - 1] = data;
|
|
359
|
+
await ackXFTPChunk(agent, server, kp.privateKey, replica.replicaId);
|
|
360
|
+
}
|
|
361
|
+
}));
|
|
231
362
|
const totalSize = plaintextChunks.reduce((s, c) => s + c.length, 0);
|
|
232
363
|
if (totalSize !== fd.size)
|
|
233
364
|
throw new Error("resolveRedirect: redirect file size mismatch");
|
|
@@ -248,15 +379,24 @@ async function resolveRedirect(agent, fd) {
|
|
|
248
379
|
}
|
|
249
380
|
// -- Delete
|
|
250
381
|
export async function deleteFile(agent, sndDescription) {
|
|
382
|
+
const byServer = new Map();
|
|
251
383
|
for (const chunk of sndDescription.chunks) {
|
|
252
|
-
const
|
|
253
|
-
if (!
|
|
254
|
-
|
|
255
|
-
|
|
256
|
-
const seed = decodePrivKeyEd25519(replica.replicaKey);
|
|
257
|
-
const kp = ed25519KeyPairFromSeed(seed);
|
|
258
|
-
await deleteXFTPChunk(agent, server, kp.privateKey, replica.replicaId);
|
|
384
|
+
const srv = chunk.replicas[0]?.server ?? "";
|
|
385
|
+
if (!byServer.has(srv))
|
|
386
|
+
byServer.set(srv, []);
|
|
387
|
+
byServer.get(srv).push(chunk);
|
|
259
388
|
}
|
|
389
|
+
await Promise.all([...byServer.entries()].map(async ([srv, chunks]) => {
|
|
390
|
+
const server = parseXFTPServer(srv);
|
|
391
|
+
for (const chunk of chunks) {
|
|
392
|
+
const replica = chunk.replicas[0];
|
|
393
|
+
if (!replica)
|
|
394
|
+
throw new Error("deleteFile: chunk has no replicas");
|
|
395
|
+
const seed = decodePrivKeyEd25519(replica.replicaKey);
|
|
396
|
+
const kp = ed25519KeyPairFromSeed(seed);
|
|
397
|
+
await deleteXFTPChunk(agent, server, kp.privateKey, replica.replicaId);
|
|
398
|
+
}
|
|
399
|
+
}));
|
|
260
400
|
}
|
|
261
401
|
// -- Internal
|
|
262
402
|
function _dbgHex(b, n = 8) {
|