split-hash 0.3.1 → 0.3.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "split-hash",
3
- "version": "0.3.1",
3
+ "version": "0.3.2",
4
4
  "description": "Split the stream based on bytes and get digests from each part.",
5
5
  "keywords": [
6
6
  "split",
@@ -10,7 +10,8 @@
10
10
  "chunk"
11
11
  ],
12
12
  "files": [
13
- "lib"
13
+ "lib",
14
+ "src"
14
15
  ],
15
16
  "type": "module",
16
17
  "exports": {
@@ -0,0 +1,3 @@
1
+ export * from './types.js'
2
+ export * from './split-hash.js'
3
+ export * from './split-hash-validator.js'
@@ -0,0 +1,78 @@
1
+ import { Transform, TransformCallback } from 'stream'
2
+ import { CustomError } from '@blackglory/errors'
3
+ import { ProgressiveHashFactory, IProgressiveHash } from './types.js'
4
+
5
+ export class SplitHashValidator<T> extends Transform {
6
+ private hash: IProgressiveHash<T> = this.createHash()
7
+ private accu = 0
8
+ private digestIndex = 0
9
+
10
+ constructor(
11
+ private digests: T[]
12
+ , private blockSize: number
13
+ , private createHash: ProgressiveHashFactory<T>
14
+ , private equals: (a: T, b: T) => boolean = Object.is
15
+ ) {
16
+ super()
17
+ }
18
+
19
+ _transform(
20
+ chunk: Buffer
21
+ , encoding: BufferEncoding
22
+ , callback: TransformCallback
23
+ ): void {
24
+ // chunk is always Buffer, encoding is always 'buffer', so there is no need to check
25
+
26
+ if (this.accu + chunk.length < this.blockSize) {
27
+ this.hash.update(chunk)
28
+ this.accu += chunk.length
29
+ } else {
30
+ let offset = 0
31
+ while (true) {
32
+ const needed = this.blockSize - this.accu
33
+ const slice = chunk.slice(offset, offset + needed)
34
+ if (slice.length === needed) {
35
+ this.hash.update(slice)
36
+ const digest = this.hash.digest()
37
+ if (!this.equals(this.digests[this.digestIndex], digest)) {
38
+ return callback(new NotMatchedError())
39
+ }
40
+ this.digestIndex++
41
+ // prepare for the next round
42
+ this.hash = this.createHash()
43
+ this.accu = 0
44
+ offset += slice.length
45
+ } else {
46
+ // if the length does not match, the remaining data is not long enough, update the remaining data and exit the loop.
47
+ this.hash.update(slice)
48
+ this.accu += slice.length
49
+ break
50
+ }
51
+ }
52
+ }
53
+
54
+ callback(null, chunk)
55
+ }
56
+
57
+ _flush(callback: TransformCallback): void {
58
+ if (this.accu > 0) {
59
+ const digest = this.hash.digest()
60
+ if (!this.equals(this.digests[this.digestIndex], digest)) {
61
+ return callback(new NotMatchedError())
62
+ }
63
+ this.digestIndex++
64
+ }
65
+
66
+ if (this.digestIndex !== this.digests.length) {
67
+ return callback(new NotMatchedError())
68
+ }
69
+
70
+ callback()
71
+ }
72
+ }
73
+
74
+ export class NotMatchedError extends CustomError {
75
+ constructor() {
76
+ super('hashes do not match')
77
+ }
78
+ }
@@ -0,0 +1,46 @@
1
+ import { CustomError } from '@blackglory/errors'
2
+ import { ProgressiveHashFactory } from './types.js'
3
+
4
+ export async function* splitHash<T>(
5
+ stream: NodeJS.ReadableStream
6
+ , blockSizeBytes: number
7
+ , createHash: ProgressiveHashFactory<T>
8
+ ): AsyncIterableIterator<T> {
9
+ let hash = createHash()
10
+ let accu = 0
11
+ for await (const chunk of stream) {
12
+ if (!Buffer.isBuffer(chunk)) throw new StreamEncodingError()
13
+ if (accu + chunk.length < blockSizeBytes) {
14
+ hash.update(chunk)
15
+ accu += chunk.length
16
+ } else {
17
+ let offset = 0
18
+ while (true) {
19
+ const needed = blockSizeBytes - accu
20
+ const slice = chunk.slice(offset, offset + needed)
21
+ if (slice.length === needed) {
22
+ hash.update(slice)
23
+ const digest = hash.digest()
24
+ yield digest
25
+ // prepare for the next round
26
+ hash = createHash()
27
+ accu = 0
28
+ offset += slice.length
29
+ } else {
30
+ // if the length does not match, the remaining data is not long enough, update the remaining data and exit the loop.
31
+ hash.update(slice)
32
+ accu += slice.length
33
+ break
34
+ }
35
+ }
36
+ }
37
+ }
38
+ // digest remaining data if it exists
39
+ if (accu > 0) yield hash.digest()
40
+ }
41
+
42
+ export class StreamEncodingError extends CustomError {
43
+ constructor() {
44
+ super('stream encoding must not be set')
45
+ }
46
+ }
@@ -0,0 +1,6 @@
1
+ export type ProgressiveHashFactory<T> = () => IProgressiveHash<T>
2
+
3
+ export interface IProgressiveHash<T> {
4
+ update(buffer: Buffer): void
5
+ digest(): T
6
+ }
@@ -0,0 +1,2 @@
1
+ export * from './types.js'
2
+ export * from './split-hash.js'
@@ -0,0 +1,40 @@
1
+ import { toAsyncIterableIterator } from 'extra-stream'
2
+ import { ProgressiveHashFactory } from './types.js'
3
+
4
+ export async function* splitHash<T>(
5
+ stream: ReadableStream
6
+ , blockSizeBytes: number
7
+ , createHash: ProgressiveHashFactory<T>
8
+ ): AsyncIterableIterator<T> {
9
+ let hash = createHash()
10
+ let accu = 0
11
+ for await (const chunk of toAsyncIterableIterator(stream)) {
12
+ if (accu + chunk.length < blockSizeBytes) {
13
+ hash.update(chunk)
14
+ accu += chunk.length
15
+ } else {
16
+ let offset = 0
17
+ while (true) {
18
+ const needed = blockSizeBytes - accu
19
+ const slice = chunk.slice(offset, offset + needed)
20
+ if (slice.length === needed) {
21
+ hash.update(slice)
22
+ const digest = await hash.digest()
23
+ yield digest
24
+ // prepare for the next round
25
+ hash = createHash()
26
+ accu = 0
27
+ offset += slice.length
28
+ } else {
29
+ // if the length does not match,
30
+ // the remaining data is not long enough, update the remaining data and exit the loop.
31
+ hash.update(slice)
32
+ accu += slice.length
33
+ break
34
+ }
35
+ }
36
+ }
37
+ }
38
+ // digest remaining data if it exists
39
+ if (accu > 0) yield await hash.digest()
40
+ }
@@ -0,0 +1,6 @@
1
+ export type ProgressiveHashFactory<T> = () => IProgressiveHash<T>
2
+
3
+ export interface IProgressiveHash<T> {
4
+ update(buffer: Uint8Array): void
5
+ digest(): Promise<T>
6
+ }