split-hash 0.3.0 → 0.3.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -40,6 +40,7 @@ for await (const hash of iter) {
40
40
  ```js
41
41
  import { SplitHashValidator } from 'split-hash/nodejs'
42
42
  import * as crypto from 'crypto'
43
+ import { pipeline } from 'stream'
43
44
 
44
45
  const KiB = 1024
45
46
 
@@ -58,11 +59,13 @@ const createHash = () => {
58
59
  const hashList = [/* ... */]
59
60
  const validator = new SplitHashValidator(hashList, 512 * KiB, createHash)
60
61
 
61
- const stream = fs.createReadStream('filename.bin')
62
- stream
63
- .pipe(validator)
64
- .on('data', /* same as stream */)
65
- .on('error', err => console.error('not matched'))
62
+ const stream = pipeline(
63
+ fs.createReadStream('filename.bin')
64
+ , validator
65
+ , err => {
66
+ // ...
67
+ }
68
+ )
66
69
  ```
67
70
 
68
71
  ## API
@@ -82,7 +85,7 @@ function splitHash<T>(
82
85
  stream: NodeJS.ReadableStream
83
86
  , blockSizeBytes: number
84
87
  , createHash: ProgressiveHashFactory<T>
85
- ): AsyncIterable<T>
88
+ ): AsyncIterableIterator<T>
86
89
  ```
87
90
 
88
91
  It throws `StreamEncodingError` when the `stream` encoding is set.
@@ -127,5 +130,5 @@ async function* splitHash<T>(
127
130
  stream: ReadableStream
128
131
  , blockSize: number
129
132
  , createHash: ProgressiveHashFactory<T>
130
- ): AsyncIterable<T>
133
+ ): AsyncIterableIterator<T>
131
134
  ```
@@ -1,7 +1,7 @@
1
1
  /// <reference types="node" resolution-mode="require"/>
2
2
  import { CustomError } from '@blackglory/errors';
3
3
  import { ProgressiveHashFactory } from './types.js';
4
- export declare function splitHash<T>(stream: NodeJS.ReadableStream, blockSizeBytes: number, createHash: ProgressiveHashFactory<T>): AsyncIterable<T>;
4
+ export declare function splitHash<T>(stream: NodeJS.ReadableStream, blockSizeBytes: number, createHash: ProgressiveHashFactory<T>): AsyncIterableIterator<T>;
5
5
  export declare class StreamEncodingError extends CustomError {
6
6
  constructor();
7
7
  }
@@ -1,2 +1,2 @@
1
1
  import { ProgressiveHashFactory } from './types.js';
2
- export declare function splitHash<T>(stream: ReadableStream, blockSizeBytes: number, createHash: ProgressiveHashFactory<T>): AsyncIterable<T>;
2
+ export declare function splitHash<T>(stream: ReadableStream, blockSizeBytes: number, createHash: ProgressiveHashFactory<T>): AsyncIterableIterator<T>;
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "split-hash",
3
- "version": "0.3.0",
3
+ "version": "0.3.2",
4
4
  "description": "Split the stream based on bytes and get digests from each part.",
5
5
  "keywords": [
6
6
  "split",
@@ -10,7 +10,8 @@
10
10
  "chunk"
11
11
  ],
12
12
  "files": [
13
- "lib"
13
+ "lib",
14
+ "src"
14
15
  ],
15
16
  "type": "module",
16
17
  "exports": {
@@ -51,10 +52,10 @@
51
52
  "@typescript-eslint/parser": "^5.56.0",
52
53
  "cross-env": "^7.0.3",
53
54
  "eslint": "^8.36.0",
54
- "extra-abort": "^0.3.3",
55
+ "extra-abort": "^0.3.4",
55
56
  "extra-promise": "^6.0.5",
56
57
  "husky": "^4.3.0",
57
- "iterable-operator": "^4.0.3",
58
+ "iterable-operator": "^4.0.5",
58
59
  "npm-run-all": "^4.1.5",
59
60
  "return-style": "^3.0.0",
60
61
  "rimraf": "^3.0.2",
@@ -0,0 +1,3 @@
1
+ export * from './types.js'
2
+ export * from './split-hash.js'
3
+ export * from './split-hash-validator.js'
@@ -0,0 +1,78 @@
1
+ import { Transform, TransformCallback } from 'stream'
2
+ import { CustomError } from '@blackglory/errors'
3
+ import { ProgressiveHashFactory, IProgressiveHash } from './types.js'
4
+
5
+ export class SplitHashValidator<T> extends Transform {
6
+ private hash: IProgressiveHash<T> = this.createHash()
7
+ private accu = 0
8
+ private digestIndex = 0
9
+
10
+ constructor(
11
+ private digests: T[]
12
+ , private blockSize: number
13
+ , private createHash: ProgressiveHashFactory<T>
14
+ , private equals: (a: T, b: T) => boolean = Object.is
15
+ ) {
16
+ super()
17
+ }
18
+
19
+ _transform(
20
+ chunk: Buffer
21
+ , encoding: BufferEncoding
22
+ , callback: TransformCallback
23
+ ): void {
24
+ // chunk is always Buffer, encoding is always 'buffer', so there is no need to check
25
+
26
+ if (this.accu + chunk.length < this.blockSize) {
27
+ this.hash.update(chunk)
28
+ this.accu += chunk.length
29
+ } else {
30
+ let offset = 0
31
+ while (true) {
32
+ const needed = this.blockSize - this.accu
33
+ const slice = chunk.slice(offset, offset + needed)
34
+ if (slice.length === needed) {
35
+ this.hash.update(slice)
36
+ const digest = this.hash.digest()
37
+ if (!this.equals(this.digests[this.digestIndex], digest)) {
38
+ return callback(new NotMatchedError())
39
+ }
40
+ this.digestIndex++
41
+ // prepare for the next round
42
+ this.hash = this.createHash()
43
+ this.accu = 0
44
+ offset += slice.length
45
+ } else {
46
+ // if the length does not match, the remaining data is not long enough, update the remaining data and exit the loop.
47
+ this.hash.update(slice)
48
+ this.accu += slice.length
49
+ break
50
+ }
51
+ }
52
+ }
53
+
54
+ callback(null, chunk)
55
+ }
56
+
57
+ _flush(callback: TransformCallback): void {
58
+ if (this.accu > 0) {
59
+ const digest = this.hash.digest()
60
+ if (!this.equals(this.digests[this.digestIndex], digest)) {
61
+ return callback(new NotMatchedError())
62
+ }
63
+ this.digestIndex++
64
+ }
65
+
66
+ if (this.digestIndex !== this.digests.length) {
67
+ return callback(new NotMatchedError())
68
+ }
69
+
70
+ callback()
71
+ }
72
+ }
73
+
74
+ export class NotMatchedError extends CustomError {
75
+ constructor() {
76
+ super('hashes do not match')
77
+ }
78
+ }
@@ -0,0 +1,46 @@
1
+ import { CustomError } from '@blackglory/errors'
2
+ import { ProgressiveHashFactory } from './types.js'
3
+
4
+ export async function* splitHash<T>(
5
+ stream: NodeJS.ReadableStream
6
+ , blockSizeBytes: number
7
+ , createHash: ProgressiveHashFactory<T>
8
+ ): AsyncIterableIterator<T> {
9
+ let hash = createHash()
10
+ let accu = 0
11
+ for await (const chunk of stream) {
12
+ if (!Buffer.isBuffer(chunk)) throw new StreamEncodingError()
13
+ if (accu + chunk.length < blockSizeBytes) {
14
+ hash.update(chunk)
15
+ accu += chunk.length
16
+ } else {
17
+ let offset = 0
18
+ while (true) {
19
+ const needed = blockSizeBytes - accu
20
+ const slice = chunk.slice(offset, offset + needed)
21
+ if (slice.length === needed) {
22
+ hash.update(slice)
23
+ const digest = hash.digest()
24
+ yield digest
25
+ // prepare for the next round
26
+ hash = createHash()
27
+ accu = 0
28
+ offset += slice.length
29
+ } else {
30
+ // if the length does not match, the remaining data is not long enough, update the remaining data and exit the loop.
31
+ hash.update(slice)
32
+ accu += slice.length
33
+ break
34
+ }
35
+ }
36
+ }
37
+ }
38
+ // digest remaining data if it exists
39
+ if (accu > 0) yield hash.digest()
40
+ }
41
+
42
+ export class StreamEncodingError extends CustomError {
43
+ constructor() {
44
+ super('stream encoding must not be set')
45
+ }
46
+ }
@@ -0,0 +1,6 @@
1
+ export type ProgressiveHashFactory<T> = () => IProgressiveHash<T>
2
+
3
+ export interface IProgressiveHash<T> {
4
+ update(buffer: Buffer): void
5
+ digest(): T
6
+ }
@@ -0,0 +1,2 @@
1
+ export * from './types.js'
2
+ export * from './split-hash.js'
@@ -0,0 +1,40 @@
1
+ import { toAsyncIterableIterator } from 'extra-stream'
2
+ import { ProgressiveHashFactory } from './types.js'
3
+
4
+ export async function* splitHash<T>(
5
+ stream: ReadableStream
6
+ , blockSizeBytes: number
7
+ , createHash: ProgressiveHashFactory<T>
8
+ ): AsyncIterableIterator<T> {
9
+ let hash = createHash()
10
+ let accu = 0
11
+ for await (const chunk of toAsyncIterableIterator(stream)) {
12
+ if (accu + chunk.length < blockSizeBytes) {
13
+ hash.update(chunk)
14
+ accu += chunk.length
15
+ } else {
16
+ let offset = 0
17
+ while (true) {
18
+ const needed = blockSizeBytes - accu
19
+ const slice = chunk.slice(offset, offset + needed)
20
+ if (slice.length === needed) {
21
+ hash.update(slice)
22
+ const digest = await hash.digest()
23
+ yield digest
24
+ // prepare for the next round
25
+ hash = createHash()
26
+ accu = 0
27
+ offset += slice.length
28
+ } else {
29
+ // if the length does not match,
30
+ // the remaining data is not long enough, update the remaining data and exit the loop.
31
+ hash.update(slice)
32
+ accu += slice.length
33
+ break
34
+ }
35
+ }
36
+ }
37
+ }
38
+ // digest remaining data if it exists
39
+ if (accu > 0) yield await hash.digest()
40
+ }
@@ -0,0 +1,6 @@
1
+ export type ProgressiveHashFactory<T> = () => IProgressiveHash<T>
2
+
3
+ export interface IProgressiveHash<T> {
4
+ update(buffer: Uint8Array): void
5
+ digest(): Promise<T>
6
+ }