rac-delta 1.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (144) hide show
  1. package/LICENSE +21 -0
  2. package/README.md +1 -0
  3. package/dist/core/adapters/index.d.ts +2 -0
  4. package/dist/core/adapters/index.d.ts.map +1 -0
  5. package/dist/core/adapters/index.js +17 -0
  6. package/dist/core/adapters/storage-adapter.d.ts +125 -0
  7. package/dist/core/adapters/storage-adapter.d.ts.map +1 -0
  8. package/dist/core/adapters/storage-adapter.js +14 -0
  9. package/dist/core/config/index.d.ts +2 -0
  10. package/dist/core/config/index.d.ts.map +1 -0
  11. package/dist/core/config/index.js +17 -0
  12. package/dist/core/config/rac-delta-config.d.ts +132 -0
  13. package/dist/core/config/rac-delta-config.d.ts.map +1 -0
  14. package/dist/core/config/rac-delta-config.js +2 -0
  15. package/dist/core/exceptions.d.ts +25 -0
  16. package/dist/core/exceptions.d.ts.map +1 -0
  17. package/dist/core/exceptions.js +51 -0
  18. package/dist/core/models/chunk.d.ts +12 -0
  19. package/dist/core/models/chunk.d.ts.map +1 -0
  20. package/dist/core/models/chunk.js +2 -0
  21. package/dist/core/models/delta-plan.d.ts +12 -0
  22. package/dist/core/models/delta-plan.d.ts.map +1 -0
  23. package/dist/core/models/delta-plan.js +2 -0
  24. package/dist/core/models/file-entry.d.ts +9 -0
  25. package/dist/core/models/file-entry.d.ts.map +1 -0
  26. package/dist/core/models/file-entry.js +2 -0
  27. package/dist/core/models/index.d.ts +5 -0
  28. package/dist/core/models/index.d.ts.map +1 -0
  29. package/dist/core/models/index.js +20 -0
  30. package/dist/core/models/rd-index.d.ts +8 -0
  31. package/dist/core/models/rd-index.d.ts.map +1 -0
  32. package/dist/core/models/rd-index.js +2 -0
  33. package/dist/core/pipelines/download-pipeline.d.ts +142 -0
  34. package/dist/core/pipelines/download-pipeline.d.ts.map +1 -0
  35. package/dist/core/pipelines/download-pipeline.js +64 -0
  36. package/dist/core/pipelines/index.d.ts +3 -0
  37. package/dist/core/pipelines/index.d.ts.map +1 -0
  38. package/dist/core/pipelines/index.js +18 -0
  39. package/dist/core/pipelines/upload-pipeline.d.ts +60 -0
  40. package/dist/core/pipelines/upload-pipeline.d.ts.map +1 -0
  41. package/dist/core/pipelines/upload-pipeline.js +34 -0
  42. package/dist/core/services/delta-service.d.ts +76 -0
  43. package/dist/core/services/delta-service.d.ts.map +1 -0
  44. package/dist/core/services/delta-service.js +2 -0
  45. package/dist/core/services/hasher-service.d.ts +47 -0
  46. package/dist/core/services/hasher-service.d.ts.map +1 -0
  47. package/dist/core/services/hasher-service.js +2 -0
  48. package/dist/core/services/index.d.ts +5 -0
  49. package/dist/core/services/index.d.ts.map +1 -0
  50. package/dist/core/services/index.js +20 -0
  51. package/dist/core/services/reconstruction-service.d.ts +99 -0
  52. package/dist/core/services/reconstruction-service.d.ts.map +1 -0
  53. package/dist/core/services/reconstruction-service.js +4 -0
  54. package/dist/core/services/validation-service.d.ts +18 -0
  55. package/dist/core/services/validation-service.d.ts.map +1 -0
  56. package/dist/core/services/validation-service.js +2 -0
  57. package/dist/core/types/index.d.ts +2 -0
  58. package/dist/core/types/index.d.ts.map +1 -0
  59. package/dist/core/types/index.js +17 -0
  60. package/dist/core/types/types.d.ts +3 -0
  61. package/dist/core/types/types.d.ts.map +1 -0
  62. package/dist/core/types/types.js +2 -0
  63. package/dist/core/utils/index.d.ts +3 -0
  64. package/dist/core/utils/index.d.ts.map +1 -0
  65. package/dist/core/utils/index.js +18 -0
  66. package/dist/core/utils/invariant.d.ts +2 -0
  67. package/dist/core/utils/invariant.d.ts.map +1 -0
  68. package/dist/core/utils/invariant.js +11 -0
  69. package/dist/core/utils/stream-to-buffer.d.ts +3 -0
  70. package/dist/core/utils/stream-to-buffer.d.ts.map +1 -0
  71. package/dist/core/utils/stream-to-buffer.js +10 -0
  72. package/dist/index.d.ts +9 -0
  73. package/dist/index.d.ts.map +1 -0
  74. package/dist/index.js +29 -0
  75. package/dist/infrastructure/adapters/azure-blob-storage-adapter.d.ts +24 -0
  76. package/dist/infrastructure/adapters/azure-blob-storage-adapter.d.ts.map +1 -0
  77. package/dist/infrastructure/adapters/azure-blob-storage-adapter.js +149 -0
  78. package/dist/infrastructure/adapters/gcs-storage-adapter.d.ts +20 -0
  79. package/dist/infrastructure/adapters/gcs-storage-adapter.d.ts.map +1 -0
  80. package/dist/infrastructure/adapters/gcs-storage-adapter.js +101 -0
  81. package/dist/infrastructure/adapters/http-storage-adapter.d.ts +23 -0
  82. package/dist/infrastructure/adapters/http-storage-adapter.d.ts.map +1 -0
  83. package/dist/infrastructure/adapters/http-storage-adapter.js +154 -0
  84. package/dist/infrastructure/adapters/local-storage-adapter.d.ts +23 -0
  85. package/dist/infrastructure/adapters/local-storage-adapter.d.ts.map +1 -0
  86. package/dist/infrastructure/adapters/local-storage-adapter.js +124 -0
  87. package/dist/infrastructure/adapters/s3-storage-adapter.d.ts +24 -0
  88. package/dist/infrastructure/adapters/s3-storage-adapter.d.ts.map +1 -0
  89. package/dist/infrastructure/adapters/s3-storage-adapter.js +139 -0
  90. package/dist/infrastructure/adapters/ssh-storage-adapter.d.ts +28 -0
  91. package/dist/infrastructure/adapters/ssh-storage-adapter.d.ts.map +1 -0
  92. package/dist/infrastructure/adapters/ssh-storage-adapter.js +237 -0
  93. package/dist/infrastructure/adapters/url-storage-adapter.d.ts +14 -0
  94. package/dist/infrastructure/adapters/url-storage-adapter.d.ts.map +1 -0
  95. package/dist/infrastructure/adapters/url-storage-adapter.js +92 -0
  96. package/dist/infrastructure/chunk-sources/disk-chunk-source.d.ts +12 -0
  97. package/dist/infrastructure/chunk-sources/disk-chunk-source.d.ts.map +1 -0
  98. package/dist/infrastructure/chunk-sources/disk-chunk-source.js +61 -0
  99. package/dist/infrastructure/chunk-sources/index.d.ts +4 -0
  100. package/dist/infrastructure/chunk-sources/index.d.ts.map +1 -0
  101. package/dist/infrastructure/chunk-sources/index.js +19 -0
  102. package/dist/infrastructure/chunk-sources/memory-chunk-source.d.ts +9 -0
  103. package/dist/infrastructure/chunk-sources/memory-chunk-source.d.ts.map +1 -0
  104. package/dist/infrastructure/chunk-sources/memory-chunk-source.js +29 -0
  105. package/dist/infrastructure/chunk-sources/storage-chunk-source.d.ts +21 -0
  106. package/dist/infrastructure/chunk-sources/storage-chunk-source.d.ts.map +1 -0
  107. package/dist/infrastructure/chunk-sources/storage-chunk-source.js +150 -0
  108. package/dist/infrastructure/client.d.ts +45 -0
  109. package/dist/infrastructure/client.d.ts.map +1 -0
  110. package/dist/infrastructure/client.js +52 -0
  111. package/dist/infrastructure/factories/pipeline-factory.d.ts +15 -0
  112. package/dist/infrastructure/factories/pipeline-factory.d.ts.map +1 -0
  113. package/dist/infrastructure/factories/pipeline-factory.js +26 -0
  114. package/dist/infrastructure/factories/service-factory.d.ts +11 -0
  115. package/dist/infrastructure/factories/service-factory.d.ts.map +1 -0
  116. package/dist/infrastructure/factories/service-factory.js +17 -0
  117. package/dist/infrastructure/factories/storage-adpater-factory.d.ts +41 -0
  118. package/dist/infrastructure/factories/storage-adpater-factory.d.ts.map +1 -0
  119. package/dist/infrastructure/factories/storage-adpater-factory.js +33 -0
  120. package/dist/infrastructure/pipelines/default-hash-download-pipeline.d.ts +27 -0
  121. package/dist/infrastructure/pipelines/default-hash-download-pipeline.d.ts.map +1 -0
  122. package/dist/infrastructure/pipelines/default-hash-download-pipeline.js +211 -0
  123. package/dist/infrastructure/pipelines/default-hash-upload-pipeline.d.ts +19 -0
  124. package/dist/infrastructure/pipelines/default-hash-upload-pipeline.d.ts.map +1 -0
  125. package/dist/infrastructure/pipelines/default-hash-upload-pipeline.js +170 -0
  126. package/dist/infrastructure/pipelines/default-url-download-pipeline.d.ts +30 -0
  127. package/dist/infrastructure/pipelines/default-url-download-pipeline.d.ts.map +1 -0
  128. package/dist/infrastructure/pipelines/default-url-download-pipeline.js +198 -0
  129. package/dist/infrastructure/pipelines/default-url-upload-pipeline.d.ts +20 -0
  130. package/dist/infrastructure/pipelines/default-url-upload-pipeline.d.ts.map +1 -0
  131. package/dist/infrastructure/pipelines/default-url-upload-pipeline.js +126 -0
  132. package/dist/infrastructure/services/hash-wasm-hasher-service.d.ts +13 -0
  133. package/dist/infrastructure/services/hash-wasm-hasher-service.d.ts.map +1 -0
  134. package/dist/infrastructure/services/hash-wasm-hasher-service.js +113 -0
  135. package/dist/infrastructure/services/memory-delta-service.d.ts +17 -0
  136. package/dist/infrastructure/services/memory-delta-service.d.ts.map +1 -0
  137. package/dist/infrastructure/services/memory-delta-service.js +198 -0
  138. package/dist/infrastructure/services/memory-reconstruction-service.d.ts +25 -0
  139. package/dist/infrastructure/services/memory-reconstruction-service.d.ts.map +1 -0
  140. package/dist/infrastructure/services/memory-reconstruction-service.js +329 -0
  141. package/dist/infrastructure/services/memory-validation-service.d.ts +9 -0
  142. package/dist/infrastructure/services/memory-validation-service.d.ts.map +1 -0
  143. package/dist/infrastructure/services/memory-validation-service.js +33 -0
  144. package/package.json +43 -0
@@ -0,0 +1,142 @@
1
+ import { ChunkSource, DeltaService, ReconstructionService, ValidationService } from '../services';
2
+ import { HashStorageAdapter, UrlStorageAdapter } from '../adapters';
3
+ import { ChunkUrlInfo, DeltaPlan, RDIndex } from '../models';
4
+ import { RacDeltaConfig } from '../config';
5
+ import { Nullish } from '../types';
6
+ export declare enum UpdateStrategy {
7
+ /**
8
+ * Downloads every chunk before reconstruction and save chunks in memory.
9
+ * Perfect for fast connection and offline reconstruction.
10
+ *
11
+ * NOTE: For large updates this is not recommended, as could use a lot of memory
12
+ */
13
+ DownloadAllFirstToMemory = "download-all-first-to-memory",
14
+ /**
15
+ * Downloads chunks on demand while reconstruction.
16
+ * Useful for limited resourced environments or progressive streaming.
17
+ */
18
+ StreamFromNetwork = "stream-from-network",
19
+ /**
20
+ * Downloads every chunk before reconstruction and save chunks in disk to given path.
21
+ * Perfect for fast connection, fast disks and offline reconstruction.
22
+ */
23
+ DownloadAllFirstToDisk = "download-all-first-to-disk"
24
+ }
25
+ export interface DownloadOptions {
26
+ /**
27
+ * If true, downloads everything.
28
+ * If false, only new and modified chunks will be downloaded.
29
+ */
30
+ force?: Nullish<boolean>;
31
+ /**
32
+ * Path where chunks will be saved if `DownloadAllFirstToDisk` strategy is set.
33
+ */
34
+ chunksSavePath?: Nullish<string>;
35
+ /**
36
+ * If true, will search first an existing rd-index in local dir.
37
+ * This option is not recommended, as generating a new rd-index is always the best way to detect changes or corruption.
38
+ */
39
+ useExistingIndex?: Nullish<boolean>;
40
+ /**
41
+ * How many files will be reconstructed concurrently. (Default is 5)
42
+ */
43
+ fileReconstructionConcurrency?: Nullish<number>;
44
+ /**
45
+ * Minimum file size (in bytes) required to perform an **in-place reconstruction** instead of using a temporary file.
46
+ * Default: `400 * 1024 * 1024` (400 MB).
47
+ *
48
+ * **In-place reconstruction:**
49
+ * The existing file is opened and updated directly by overwriting only the modified or missing chunks.
50
+ *
51
+ * **.tmp reconstruction:**
52
+ * The file is fully rebuilt in a temporary `.tmp` location using all chunks (new and existing), then replaced over the original file.
53
+ *
54
+ * **When to use:**
55
+ * In-place reconstruction is recommended for **large files**, as it avoids rewriting the entire file and significantly reduces disk space usage.
56
+ * However, it may be **unsafe for certain formats** (e.g., ZIP archives or databases) that are sensitive to partial writes or corruption.
57
+ * To disable in-place reconstruction entirely, set this value to `0`.
58
+ */
59
+ inPlaceReconstructionThreshold?: Nullish<number>;
60
+ /**
61
+ * Optional callback to inform progress.
62
+ */
63
+ onProgress?: (type: 'download' | 'deleting' | 'reconstructing', progress: number, diskUsage?: Nullish<number>, speed?: Nullish<number>) => void;
64
+ /**
65
+ * Optinal callback for state changes.
66
+ */
67
+ onStateChange?: (state: 'downloading' | 'reconstructing' | 'cleaning' | 'scanning') => void;
68
+ }
69
+ export declare abstract class DownloadPipeline {
70
+ protected updateProgress(value: number, state: 'download' | 'reconstructing' | 'deleting', diskUsage?: Nullish<number>, speed?: Nullish<number>, options?: Nullish<DownloadOptions>): void;
71
+ protected changeState(state: 'downloading' | 'reconstructing' | 'cleaning' | 'scanning', options?: Nullish<DownloadOptions>): void;
72
+ /**
73
+ * This method will create a rd-index.json of a given directory, scanning files and generating hashes.
74
+ *
75
+ * @param dir directory to generate index.
76
+ */
77
+ abstract loadLocalIndex(dir: string): Promise<RDIndex>;
78
+ /**
79
+ * This method will find for a rd-index.json if exists on given folder.
80
+ *
81
+ * @param localDir directory to find index.
82
+ */
83
+ abstract findLocalIndex(localDir: string): Promise<RDIndex | null>;
84
+ /**
85
+ * This method will save the new local index on given folder.
86
+ *
87
+ * @param localDir directory to save index.
88
+ * @param index the rd-index.json object.
89
+ */
90
+ abstract saveLocalIndex(localDir: string, index: RDIndex): Promise<void>;
91
+ /**
92
+ * This method will check for reconstructed files, verifying its hash and obsolete chunks.
93
+ * If obsolete chunks are still present, it will delete them, and reconstruct file again if needed.
94
+ *
95
+ * @param plan the `DeltaPlan` generated by delta.compare of the two rd-index.json for the download
96
+ * @param localDir directory to check files
97
+ * @param remoteIndex the remote index for reference
98
+ * @param chunkSource `ChunkSource` to download corrupt chunks in case of invalid files.
99
+ */
100
+ abstract verifyAndDeleteObsoleteChunks(plan: DeltaPlan, localDir: string, remoteIndex: RDIndex, chunkSource: ChunkSource, options?: Nullish<DownloadOptions>): Promise<{
101
+ deletedFiles: string[];
102
+ verifiedFiles: string[];
103
+ rebuiltFiles: string[];
104
+ }>;
105
+ }
106
+ export declare abstract class HashDownloadPipeline extends DownloadPipeline {
107
+ protected readonly storage: HashStorageAdapter;
108
+ protected readonly delta: DeltaService;
109
+ protected readonly reconstruction: ReconstructionService;
110
+ protected readonly validation: ValidationService;
111
+ protected readonly config: RacDeltaConfig;
112
+ constructor(storage: HashStorageAdapter, delta: DeltaService, reconstruction: ReconstructionService, validation: ValidationService, config: RacDeltaConfig);
113
+ abstract execute(localDir: string, strategy: UpdateStrategy, remoteIndex?: Nullish<RDIndex>, options?: Nullish<DownloadOptions>): Promise<void>;
114
+ /**
115
+ * This method will download first all needed chunks for download, and save them temporary on disk or memory.
116
+ *
117
+ * Will return `ChunkSource`, ChunkSources will be needed to reconstruct files, this method will ONLY return
118
+ * memory or disk chunk sources for offline reconstruction, if you use a storage like S3, you can omit this
119
+ * and use directly the StorageChunkSource with `reconstruction.reconstructAll()` if you prefer.
120
+ *
121
+ * (Using StorageChunkSource will download chunks and reconstruct file at same time, concurrently)
122
+ *
123
+ * @param plan the `DeltaPlan` generated by delta.compare of the two rd-index.json for the download
124
+ * @param target disk or memory, where chunks will be saved temporary
125
+ * @param options
126
+ */
127
+ abstract downloadAllMissingChunks(plan: DeltaPlan, target: 'memory' | 'disk', options?: Nullish<DownloadOptions>): Promise<ChunkSource>;
128
+ }
129
+ export declare abstract class UrlDownloadPipeline extends DownloadPipeline {
130
+ protected readonly storage: UrlStorageAdapter;
131
+ protected readonly reconstruction: ReconstructionService;
132
+ protected readonly validation: ValidationService;
133
+ protected readonly delta: DeltaService;
134
+ protected readonly config: RacDeltaConfig;
135
+ constructor(storage: UrlStorageAdapter, reconstruction: ReconstructionService, validation: ValidationService, delta: DeltaService, config: RacDeltaConfig);
136
+ abstract execute(localDir: string, urls: {
137
+ downloadUrls: Record<string, ChunkUrlInfo>;
138
+ indexUrl: string;
139
+ }, strategy: UpdateStrategy, plan?: Nullish<DeltaPlan>, options?: Nullish<DownloadOptions>): Promise<void>;
140
+ abstract downloadAllMissingChunks(downloadUrls: Record<string, ChunkUrlInfo>, target: 'memory' | 'disk', options?: Nullish<DownloadOptions>): Promise<ChunkSource>;
141
+ }
142
+ //# sourceMappingURL=download-pipeline.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"download-pipeline.d.ts","sourceRoot":"","sources":["../../../src/core/pipelines/download-pipeline.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,WAAW,EAAE,YAAY,EAAE,qBAAqB,EAAE,iBAAiB,EAAE,MAAM,aAAa,CAAC;AAClG,OAAO,EAAE,kBAAkB,EAAE,iBAAiB,EAAE,MAAM,aAAa,CAAC;AACpE,OAAO,EAAE,YAAY,EAAE,SAAS,EAAE,OAAO,EAAE,MAAM,WAAW,CAAC;AAC7D,OAAO,EAAE,cAAc,EAAE,MAAM,WAAW,CAAC;AAC3C,OAAO,EAAE,OAAO,EAAE,MAAM,UAAU,CAAC;AAEnC,oBAAY,cAAc;IACxB;;;;;OAKG;IACH,wBAAwB,iCAAiC;IAEzD;;;OAGG;IACH,iBAAiB,wBAAwB;IAEzC;;;OAGG;IACH,sBAAsB,+BAA+B;CACtD;AAED,MAAM,WAAW,eAAe;IAC9B;;;OAGG;IACH,KAAK,CAAC,EAAE,OAAO,CAAC,OAAO,CAAC,CAAC;IAEzB;;OAEG;IACH,cAAc,CAAC,EAAE,OAAO,CAAC,MAAM,CAAC,CAAC;IAEjC;;;OAGG;IACH,gBAAgB,CAAC,EAAE,OAAO,CAAC,OAAO,CAAC,CAAC;IAEpC;;OAEG;IACH,6BAA6B,CAAC,EAAE,OAAO,CAAC,MAAM,CAAC,CAAC;IAEhD;;;;;;;;;;;;;;OAcG;IACH,8BAA8B,CAAC,EAAE,OAAO,CAAC,MAAM,CAAC,CAAC;IAEjD;;OAEG;IACH,UAAU,CAAC,EAAE,CACX,IAAI,EAAE,UAAU,GAAG,UAAU,GAAG,gBAAgB,EAChD,QAAQ,EAAE,MAAM,EAChB,SAAS,CAAC,EAAE,OAAO,CAAC,MAAM,CAAC,EAC3B,KAAK,CAAC,EAAE,OAAO,CAAC,MAAM,CAAC,KACpB,IAAI,CAAC;IAEV;;OAEG;IACH,aAAa,CAAC,EAAE,CAAC,KAAK,EAAE,aAAa,GAAG,gBAAgB,GAAG,UAAU,GAAG,UAAU,KAAK,IAAI,CAAC;CAC7F;AAED,8BAAsB,gBAAgB;IACpC,SAAS,CAAC,cAAc,CACtB,KAAK,EAAE,MAAM,EACb,KAAK,EAAE,UAAU,GAAG,gBAAgB,GAAG,UAAU,EACjD,SAAS,CAAC,EAAE,OAAO,CAAC,MAAM,CAAC,EAC3B,KAAK,CAAC,EAAE,OAAO,CAAC,MAAM,CAAC,EACvB,OAAO,CAAC,EAAE,OAAO,CAAC,eAAe,CAAC;IAKpC,SAAS,CAAC,WAAW,CACnB,KAAK,EAAE,aAAa,GAAG,gBAAgB,GAAG,UAAU,GAAG,UAAU,EACjE,OAAO,CAAC,EAAE,OAAO,CAAC,eAAe,CAAC;IAKpC;;;;OAIG;IACH,QAAQ,CAAC,cAAc,CAAC,GAAG,EAAE,MAAM,GAAG,OAAO,CAAC,OAAO,CAAC;IAEtD;;;;OAIG;IACH,QAAQ,CAAC,cAAc,CAAC,QAAQ,EAAE,MAAM,GAAG,OAAO,CAAC,OAAO,GAAG,IAAI,CAAC;IAElE;;;;;OAKG;IACH,QAAQ,CAAC,cAAc,CAAC,QAAQ,EAAE,MAAM,EAAE,KAAK,EAAE,OAAO,GAAG,OAAO,CAAC,IAAI,CAAC;IAExE;;;;;;;;OAQG;IACH,QAAQ,CAAC,6BAA6B,CACpC,IAAI,EAAE,SAAS,EACf,QAAQ,EAAE,MAAM,EAChB,WAAW,EAAE,OAAO,EACpB,WAAW,EAAE,WAAW,EACxB,OAAO,CAAC,EAAE,OAAO,CAAC,eAAe,CAAC,GACjC,OAAO,CAAC;QAAE,YAAY,EAAE,MAAM,EAAE,CAAC;QAAC,aAAa,EAAE,MAAM,EAAE,CAAC;QAAC,YAAY,EAAE,MAAM,EAAE,CAAA;KAAE,CAAC;CACxF;AAED,8BAAsB,oBAAqB,SAAQ,gBAAgB;IAE/D,SAAS,CAAC,QAAQ,CAAC,OAAO,EAAE,kBAAkB;IAC9C,SAAS,CAAC,QAAQ,CAAC,KAAK,EAAE,YAAY;IACtC,SAAS,CAAC,QAAQ,CAAC,cAAc,EAAE,qBAAqB;IACxD,SAAS,CAAC,QAAQ,CAAC,UAAU,EAAE,iBAAiB;IAChD,SAAS,CAAC,QAAQ,CAAC,MAAM,EAAE,cAAc;gBAJtB,OAAO,EAAE,kBAAkB,EAC3B,KAAK,EAAE,YAAY,EACnB,cAAc,EAAE,qBAAqB,EACrC,UAAU,EAAE,iBAAiB,EAC7B,MAAM,EAAE,cAAc;IAK3C,QAAQ,CAAC,OAAO,CACd,QAAQ,EAAE,MAAM,EAChB,QAAQ,EAAE,cAAc,EACxB,WAAW,CAAC,EAAE,OAAO,CAAC,OAAO,CAAC,EAC9B,OAAO,CAAC,EAAE,OAAO,CAAC,eAAe,CAAC,GACjC,OAAO,CAAC,IAAI,CAAC;IAEhB;;;;;;;;;;;;OAYG;IACH,QAAQ,CAAC,wBAAwB,CAC/B,IAAI,EAAE,SAAS,EACf,MAAM,EAAE,QAAQ,GAAG,MAAM,EACzB,OAAO,CAAC,EAAE,OAAO,CAAC,eAAe,CAAC,GACjC,OAAO,CAAC,WAAW,CAAC;CACxB;AAED,8BAAsB,mBAAoB,SAAQ,gBAAgB;IAE9D,SAAS,CAAC,QAAQ,CAAC,OAAO,EAAE,iBAAiB;IAC7C,SAAS,CAAC,QAAQ,CAAC,cAAc,EAAE,qBAAqB;IACxD,SAAS,CAAC,QAAQ,CAAC,UAAU,EAAE,iBAAiB;IAChD,SAAS,CAAC,QAAQ,CAAC,KAAK,EAAE,YAAY;IACtC,SAAS,CAAC,QAAQ,CAAC,MAAM,EAAE,cAAc;gBAJtB,OAAO,EAAE,iBAAiB,EAC1B,cAAc,EAAE,qBAAqB,EACrC,UAAU,EAAE,iBAAiB,EAC7B,KAAK,EAAE,YAAY,EACnB,MAAM,EAAE,cAAc;IAK3C,QAAQ,CAAC,OAAO,CACd,QAAQ,EAAE,MAAM,EAChB,IAAI,EAAE;QACJ,YAAY,EAAE,MAAM,CAAC,MAAM,EAAE,YAAY,CAAC,CAAC;QAC3C,QAAQ,EAAE,MAAM,CAAC;KAClB,EACD,QAAQ,EAAE,cAAc,EACxB,IAAI,CAAC,EAAE,OAAO,CAAC,SAAS,CAAC,EACzB,OAAO,CAAC,EAAE,OAAO,CAAC,eAAe,CAAC,GACjC,OAAO,CAAC,IAAI,CAAC;IAChB,QAAQ,CAAC,wBAAwB,CAC/B,YAAY,EAAE,MAAM,CAAC,MAAM,EAAE,YAAY,CAAC,EAC1C,MAAM,EAAE,QAAQ,GAAG,MAAM,EACzB,OAAO,CAAC,EAAE,OAAO,CAAC,eAAe,CAAC,GACjC,OAAO,CAAC,WAAW,CAAC;CACxB"}
@@ -0,0 +1,64 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.UrlDownloadPipeline = exports.HashDownloadPipeline = exports.DownloadPipeline = exports.UpdateStrategy = void 0;
4
+ var UpdateStrategy;
5
+ (function (UpdateStrategy) {
6
+ /**
7
+ * Downloads every chunk before reconstruction and save chunks in memory.
8
+ * Perfect for fast connection and offline reconstruction.
9
+ *
10
+ * NOTE: For large updates this is not recommended, as could use a lot of memory
11
+ */
12
+ UpdateStrategy["DownloadAllFirstToMemory"] = "download-all-first-to-memory";
13
+ /**
14
+ * Downloads chunks on demand while reconstruction.
15
+ * Useful for limited resourced environments or progressive streaming.
16
+ */
17
+ UpdateStrategy["StreamFromNetwork"] = "stream-from-network";
18
+ /**
19
+ * Downloads every chunk before reconstruction and save chunks in disk to given path.
20
+ * Perfect for fast connection, fast disks and offline reconstruction.
21
+ */
22
+ UpdateStrategy["DownloadAllFirstToDisk"] = "download-all-first-to-disk";
23
+ })(UpdateStrategy || (exports.UpdateStrategy = UpdateStrategy = {}));
24
+ class DownloadPipeline {
25
+ updateProgress(value, state, diskUsage, speed, options) {
26
+ options?.onProgress?.(state, value, diskUsage, speed);
27
+ }
28
+ changeState(state, options) {
29
+ options?.onStateChange?.(state);
30
+ }
31
+ }
32
+ exports.DownloadPipeline = DownloadPipeline;
33
+ class HashDownloadPipeline extends DownloadPipeline {
34
+ storage;
35
+ delta;
36
+ reconstruction;
37
+ validation;
38
+ config;
39
+ constructor(storage, delta, reconstruction, validation, config) {
40
+ super();
41
+ this.storage = storage;
42
+ this.delta = delta;
43
+ this.reconstruction = reconstruction;
44
+ this.validation = validation;
45
+ this.config = config;
46
+ }
47
+ }
48
+ exports.HashDownloadPipeline = HashDownloadPipeline;
49
+ class UrlDownloadPipeline extends DownloadPipeline {
50
+ storage;
51
+ reconstruction;
52
+ validation;
53
+ delta;
54
+ config;
55
+ constructor(storage, reconstruction, validation, delta, config) {
56
+ super();
57
+ this.storage = storage;
58
+ this.reconstruction = reconstruction;
59
+ this.validation = validation;
60
+ this.delta = delta;
61
+ this.config = config;
62
+ }
63
+ }
64
+ exports.UrlDownloadPipeline = UrlDownloadPipeline;
@@ -0,0 +1,3 @@
1
+ export * from './download-pipeline';
2
+ export * from './upload-pipeline';
3
+ //# sourceMappingURL=index.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../../src/core/pipelines/index.ts"],"names":[],"mappings":"AAAA,cAAc,qBAAqB,CAAC;AACpC,cAAc,mBAAmB,CAAC"}
@@ -0,0 +1,18 @@
1
+ "use strict";
2
+ var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
3
+ if (k2 === undefined) k2 = k;
4
+ var desc = Object.getOwnPropertyDescriptor(m, k);
5
+ if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
6
+ desc = { enumerable: true, get: function() { return m[k]; } };
7
+ }
8
+ Object.defineProperty(o, k2, desc);
9
+ }) : (function(o, m, k, k2) {
10
+ if (k2 === undefined) k2 = k;
11
+ o[k2] = m[k];
12
+ }));
13
+ var __exportStar = (this && this.__exportStar) || function(m, exports) {
14
+ for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);
15
+ };
16
+ Object.defineProperty(exports, "__esModule", { value: true });
17
+ __exportStar(require("./download-pipeline"), exports);
18
+ __exportStar(require("./upload-pipeline"), exports);
@@ -0,0 +1,60 @@
1
+ import { HashStorageAdapter, UrlStorageAdapter } from '../adapters';
2
+ import { ChunkUrlInfo, DeltaPlan, RDIndex } from '../models';
3
+ import { DeltaService } from '../services';
4
+ import { RacDeltaConfig } from '../config';
5
+ import { Nullish } from '../types';
6
+ export type UploadState = 'uploading' | 'comparing' | 'cleaning' | 'finalizing' | 'scanning';
7
+ export interface UploadOptions {
8
+ /**
9
+ * If true, forces complete upload even if remote index exists.
10
+ * If false, only new and modified chunks will be uploaded.
11
+ */
12
+ force?: Nullish<boolean>;
13
+ /**
14
+ * If true and no remote index found, abort upload.
15
+ * If false (default), uploads everything if no remote index found.
16
+ */
17
+ requireRemoteIndex?: Nullish<boolean>;
18
+ /**
19
+ * Files or directories that must be ignored when creating the rd-index.json.
20
+ * Example: '*.ts', '/folder/*', 'ignorefile.txt'...
21
+ */
22
+ ignorePatterns?: Nullish<string[]>;
23
+ /**
24
+ * Optional callback to inform progress.
25
+ */
26
+ onProgress?: (type: 'upload' | 'deleting', progress: number, speed?: Nullish<number>) => void;
27
+ /**
28
+ * Optional callback for state changes.
29
+ */
30
+ onStateChange?: (state: UploadState) => void;
31
+ }
32
+ export declare abstract class UploadPipeline {
33
+ protected updateProgress(value: number, state: 'upload' | 'deleting', speed?: Nullish<number>, options?: Nullish<UploadOptions>): void;
34
+ protected changeState(state: UploadState, options?: UploadOptions): void;
35
+ }
36
+ export declare abstract class HashUploadPipeline extends UploadPipeline {
37
+ protected readonly storage: HashStorageAdapter;
38
+ protected readonly delta: DeltaService;
39
+ protected readonly config: RacDeltaConfig;
40
+ constructor(storage: HashStorageAdapter, delta: DeltaService, config: RacDeltaConfig);
41
+ abstract execute(directory: string, remoteIndex?: Nullish<RDIndex>, options?: Nullish<UploadOptions>): Promise<RDIndex>;
42
+ abstract scanDirectory(dir: string, ignorePatterns?: Nullish<string[]>): Promise<RDIndex>;
43
+ abstract uploadMissingChunks(plan: DeltaPlan, baseDir: string, force: boolean, options?: Nullish<UploadOptions>): Promise<void>;
44
+ abstract uploadIndex(index: RDIndex): Promise<void>;
45
+ abstract deleteObsoleteChunks(plan: DeltaPlan, options?: Nullish<UploadOptions>): Promise<void>;
46
+ }
47
+ export declare abstract class UrlUploadPipeline extends UploadPipeline {
48
+ protected readonly storage: UrlStorageAdapter;
49
+ protected readonly config: RacDeltaConfig;
50
+ constructor(storage: UrlStorageAdapter, config: RacDeltaConfig);
51
+ abstract execute(localIndex: RDIndex, urls: {
52
+ uploadUrls: Record<string, ChunkUrlInfo>;
53
+ deleteUrls?: Nullish<string[]>;
54
+ indexUrl: string;
55
+ }, options?: Nullish<UploadOptions>): Promise<RDIndex>;
56
+ abstract uploadMissingChunks(uploadUrls: Record<string, ChunkUrlInfo>, options?: Nullish<UploadOptions>): Promise<void>;
57
+ abstract uploadIndex(index: RDIndex, uploadUrl: string): Promise<void>;
58
+ abstract deleteObsoleteChunks(deleteUrls: string[], options?: Nullish<UploadOptions>): Promise<void>;
59
+ }
60
+ //# sourceMappingURL=upload-pipeline.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"upload-pipeline.d.ts","sourceRoot":"","sources":["../../../src/core/pipelines/upload-pipeline.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,kBAAkB,EAAE,iBAAiB,EAAE,MAAM,aAAa,CAAC;AACpE,OAAO,EAAE,YAAY,EAAE,SAAS,EAAE,OAAO,EAAE,MAAM,WAAW,CAAC;AAC7D,OAAO,EAAE,YAAY,EAAE,MAAM,aAAa,CAAC;AAC3C,OAAO,EAAE,cAAc,EAAE,MAAM,WAAW,CAAC;AAC3C,OAAO,EAAE,OAAO,EAAE,MAAM,UAAU,CAAC;AAEnC,MAAM,MAAM,WAAW,GAAG,WAAW,GAAG,WAAW,GAAG,UAAU,GAAG,YAAY,GAAG,UAAU,CAAC;AAE7F,MAAM,WAAW,aAAa;IAC5B;;;OAGG;IACH,KAAK,CAAC,EAAE,OAAO,CAAC,OAAO,CAAC,CAAC;IAEzB;;;OAGG;IACH,kBAAkB,CAAC,EAAE,OAAO,CAAC,OAAO,CAAC,CAAC;IAEtC;;;OAGG;IACH,cAAc,CAAC,EAAE,OAAO,CAAC,MAAM,EAAE,CAAC,CAAC;IAEnC;;OAEG;IACH,UAAU,CAAC,EAAE,CAAC,IAAI,EAAE,QAAQ,GAAG,UAAU,EAAE,QAAQ,EAAE,MAAM,EAAE,KAAK,CAAC,EAAE,OAAO,CAAC,MAAM,CAAC,KAAK,IAAI,CAAC;IAE9F;;OAEG;IACH,aAAa,CAAC,EAAE,CAAC,KAAK,EAAE,WAAW,KAAK,IAAI,CAAC;CAC9C;AAED,8BAAsB,cAAc;IAClC,SAAS,CAAC,cAAc,CACtB,KAAK,EAAE,MAAM,EACb,KAAK,EAAE,QAAQ,GAAG,UAAU,EAC5B,KAAK,CAAC,EAAE,OAAO,CAAC,MAAM,CAAC,EACvB,OAAO,CAAC,EAAE,OAAO,CAAC,aAAa,CAAC;IAKlC,SAAS,CAAC,WAAW,CAAC,KAAK,EAAE,WAAW,EAAE,OAAO,CAAC,EAAE,aAAa;CAGlE;AAED,8BAAsB,kBAAmB,SAAQ,cAAc;IAE3D,SAAS,CAAC,QAAQ,CAAC,OAAO,EAAE,kBAAkB;IAC9C,SAAS,CAAC,QAAQ,CAAC,KAAK,EAAE,YAAY;IACtC,SAAS,CAAC,QAAQ,CAAC,MAAM,EAAE,cAAc;gBAFtB,OAAO,EAAE,kBAAkB,EAC3B,KAAK,EAAE,YAAY,EACnB,MAAM,EAAE,cAAc;IAK3C,QAAQ,CAAC,OAAO,CACd,SAAS,EAAE,MAAM,EACjB,WAAW,CAAC,EAAE,OAAO,CAAC,OAAO,CAAC,EAC9B,OAAO,CAAC,EAAE,OAAO,CAAC,aAAa,CAAC,GAC/B,OAAO,CAAC,OAAO,CAAC;IAEnB,QAAQ,CAAC,aAAa,CAAC,GAAG,EAAE,MAAM,EAAE,cAAc,CAAC,EAAE,OAAO,CAAC,MAAM,EAAE,CAAC,GAAG,OAAO,CAAC,OAAO,CAAC;IACzF,QAAQ,CAAC,mBAAmB,CAC1B,IAAI,EAAE,SAAS,EACf,OAAO,EAAE,MAAM,EACf,KAAK,EAAE,OAAO,EACd,OAAO,CAAC,EAAE,OAAO,CAAC,aAAa,CAAC,GAC/B,OAAO,CAAC,IAAI,CAAC;IAChB,QAAQ,CAAC,WAAW,CAAC,KAAK,EAAE,OAAO,GAAG,OAAO,CAAC,IAAI,CAAC;IACnD,QAAQ,CAAC,oBAAoB,CAAC,IAAI,EAAE,SAAS,EAAE,OAAO,CAAC,EAAE,OAAO,CAAC,aAAa,CAAC,GAAG,OAAO,CAAC,IAAI,CAAC;CAChG;AAED,8BAAsB,iBAAkB,SAAQ,cAAc;IAE1D,SAAS,CAAC,QAAQ,CAAC,OAAO,EAAE,iBAAiB;IAC7C,SAAS,CAAC,QAAQ,CAAC,MAAM,EAAE,cAAc;gBADtB,OAAO,EAAE,iBAAiB,EAC1B,MAAM,EAAE,cAAc;IAK3C,QAAQ,CAAC,OAAO,CACd,UAAU,EAAE,OAAO,EACnB,IAAI,EAAE;QACJ,UAAU,EAAE,MAAM,CAAC,MAAM,EAAE,YAAY,CAAC,CAAC;QACzC,UAAU,CAAC,EAAE,OAAO,CAAC,MAAM,EAAE,CAAC,CAAC;QAC/B,QAAQ,EAAE,MAAM,CAAC;KAClB,EACD,OAAO,CAAC,EAAE,OAAO,CAAC,aAAa,CAAC,GAC/B,OAAO,CAAC,OAAO,CAAC;IAEnB,QAAQ,CAAC,mBAAmB,CAC1B,UAAU,EAAE,MAAM,CAAC,MAAM,EAAE,YAAY,CAAC,EACxC,OAAO,CAAC,EAAE,OAAO,CAAC,aAAa,CAAC,GAC/B,OAAO,CAAC,IAAI,CAAC;IAChB,QAAQ,CAAC,WAAW,CAAC,KAAK,EAAE,OAAO,EAAE,SAAS,EAAE,MAAM,GAAG,OAAO,CAAC,IAAI,CAAC;IACtE,QAAQ,CAAC,oBAAoB,CAC3B,UAAU,EAAE,MAAM,EAAE,EACpB,OAAO,CAAC,EAAE,OAAO,CAAC,aAAa,CAAC,GAC/B,OAAO,CAAC,IAAI,CAAC;CACjB"}
@@ -0,0 +1,34 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.UrlUploadPipeline = exports.HashUploadPipeline = exports.UploadPipeline = void 0;
4
+ class UploadPipeline {
5
+ updateProgress(value, state, speed, options) {
6
+ options?.onProgress?.(state, value, speed);
7
+ }
8
+ changeState(state, options) {
9
+ options?.onStateChange?.(state);
10
+ }
11
+ }
12
+ exports.UploadPipeline = UploadPipeline;
13
+ class HashUploadPipeline extends UploadPipeline {
14
+ storage;
15
+ delta;
16
+ config;
17
+ constructor(storage, delta, config) {
18
+ super();
19
+ this.storage = storage;
20
+ this.delta = delta;
21
+ this.config = config;
22
+ }
23
+ }
24
+ exports.HashUploadPipeline = HashUploadPipeline;
25
+ class UrlUploadPipeline extends UploadPipeline {
26
+ storage;
27
+ config;
28
+ constructor(storage, config) {
29
+ super();
30
+ this.storage = storage;
31
+ this.config = config;
32
+ }
33
+ }
34
+ exports.UrlUploadPipeline = UrlUploadPipeline;
@@ -0,0 +1,76 @@
1
+ import { DeltaPlan, FileEntry, RDIndex } from '../models';
2
+ import { Nullish } from '../types';
3
+ export interface AsyncChunkStream extends AsyncIterable<Uint8Array> {
4
+ nextChunk(): Promise<Uint8Array | null>;
5
+ reset?(): Promise<void>;
6
+ close?(): Promise<void>;
7
+ }
8
+ export interface DeltaService {
9
+ /**
10
+ * Creates an RDIndex for a given directory.
11
+ *
12
+ * This process scans all files recursively, splits them into chunks,
13
+ * hashes each chunk (using HasherService.hashStream), and builds
14
+ * the rd-index.json structure.
15
+ *
16
+ * @param rootPath the path of the directory
17
+ * @param chunkSize The size (in bytes) chunks will have, recommended is 1024.
18
+ *
19
+ */
20
+ createIndexFromDirectory(rootPath: string, chunkSize: number, concurrency?: Nullish<number>, ignorePatterns?: Nullish<string[]>): Promise<RDIndex>;
21
+ /**
22
+ * Creates a FileEntry from a readable data stream.
23
+ *
24
+ * This method is used when the data source is remote or does not
25
+ * exist as a physical file in the local filesystem.
26
+ *
27
+ * It reads the stream in chunks (chunk size must be defined by source), hashes each one (using HasherService.hashStream),
28
+ * and produces a `FileEntry` compatible with RDIndex.
29
+ *
30
+ * @param stream An async stream providing file chunks.
31
+ * @param path Relative path of the source file
32
+ */
33
+ createFileEntryFromStream(stream: AsyncChunkStream, path: string): Promise<FileEntry>;
34
+ /**
35
+ * Compare two rd-index.json and generate a DeltaPlan. (Neutral method, for more specific methods use compareForUpload or compareForDownload)
36
+ *
37
+ * @param source The rd-index from source to compare (Example: local)
38
+ * @param target The rd-index from target to compare (Example: remote server)
39
+ *
40
+ * Local -> Remote = upload comparison.
41
+ *
42
+ * Remote -> Local = download comparison.
43
+ *
44
+ * DeltaPlan Explanation:
45
+ * - missing chunks: chunks that exist in `source` but are missing in `target`
46
+ * (i.e. need to be transferred from source -> target).
47
+ * - reused chunks: chunks present in target that can be reused.
48
+ *
49
+ * - obsolete chunks: chunks that no longer exists in source and needs to be removed from target.
50
+ * (considerations must be taken for deduplication, as for uploads, if a chunk is used for multiple files it could not be marked as obsolete)
51
+ */
52
+ compare(source: RDIndex, target: RDIndex | null): DeltaPlan;
53
+ /**
54
+ * Merges 2 `DeltaPlan`
55
+ *
56
+ * @param base base `DeltaPlan`
57
+ * @param updates `DeltaPlan` to merge with base
58
+ */
59
+ mergePlans(base: DeltaPlan, updates: DeltaPlan): DeltaPlan;
60
+ /**
61
+ * This wrapper will compare rd-indexes ready for upload update. It can check deduplication correctly with obsolete chunks.
62
+ * (Example: 1 chunk is used in 2 files but one file no longer uses the chunk => it IS NOT A OBSOLETE CHUNK)
63
+ *
64
+ * @param localIndex the local rd-index
65
+ * @param remoteIndex the remote rd-index (null to upload everything)
66
+ */
67
+ compareForUpload(localIndex: RDIndex, remoteIndex: RDIndex | null): Promise<DeltaPlan>;
68
+ /**
69
+ * This wrapper will compare rd-indexes ready for download update
70
+ *
71
+ * @param localIndex the local rd-index (null to download everything)
72
+ * @param remoteIndex the remote rd-index
73
+ */
74
+ compareForDownload(localIndex: RDIndex | null, remoteIndex: RDIndex): Promise<DeltaPlan>;
75
+ }
76
+ //# sourceMappingURL=delta-service.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"delta-service.d.ts","sourceRoot":"","sources":["../../../src/core/services/delta-service.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,SAAS,EAAE,SAAS,EAAE,OAAO,EAAE,MAAM,WAAW,CAAC;AAC1D,OAAO,EAAE,OAAO,EAAE,MAAM,UAAU,CAAC;AAEnC,MAAM,WAAW,gBAAiB,SAAQ,aAAa,CAAC,UAAU,CAAC;IACjE,SAAS,IAAI,OAAO,CAAC,UAAU,GAAG,IAAI,CAAC,CAAC;IACxC,KAAK,CAAC,IAAI,OAAO,CAAC,IAAI,CAAC,CAAC;IACxB,KAAK,CAAC,IAAI,OAAO,CAAC,IAAI,CAAC,CAAC;CACzB;AAED,MAAM,WAAW,YAAY;IAC3B;;;;;;;;;;OAUG;IACH,wBAAwB,CACtB,QAAQ,EAAE,MAAM,EAChB,SAAS,EAAE,MAAM,EACjB,WAAW,CAAC,EAAE,OAAO,CAAC,MAAM,CAAC,EAC7B,cAAc,CAAC,EAAE,OAAO,CAAC,MAAM,EAAE,CAAC,GACjC,OAAO,CAAC,OAAO,CAAC,CAAC;IAEpB;;;;;;;;;;;OAWG;IACH,yBAAyB,CAAC,MAAM,EAAE,gBAAgB,EAAE,IAAI,EAAE,MAAM,GAAG,OAAO,CAAC,SAAS,CAAC,CAAC;IAEtF;;;;;;;;;;;;;;;;;OAiBG;IACH,OAAO,CAAC,MAAM,EAAE,OAAO,EAAE,MAAM,EAAE,OAAO,GAAG,IAAI,GAAG,SAAS,CAAC;IAC5D;;;;;OAKG;IACH,UAAU,CAAC,IAAI,EAAE,SAAS,EAAE,OAAO,EAAE,SAAS,GAAG,SAAS,CAAC;IAE3D;;;;;;OAMG;IACH,gBAAgB,CAAC,UAAU,EAAE,OAAO,EAAE,WAAW,EAAE,OAAO,GAAG,IAAI,GAAG,OAAO,CAAC,SAAS,CAAC,CAAC;IACvF;;;;;OAKG;IACH,kBAAkB,CAAC,UAAU,EAAE,OAAO,GAAG,IAAI,EAAE,WAAW,EAAE,OAAO,GAAG,OAAO,CAAC,SAAS,CAAC,CAAC;CAC1F"}
@@ -0,0 +1,2 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
@@ -0,0 +1,47 @@
1
+ import { AsyncChunkStream } from './delta-service';
2
+ import { Chunk, FileEntry } from '../models';
3
+ export interface StreamingHasher {
4
+ update(data: Uint8Array | Buffer): void;
5
+ digest(encoding?: 'hex'): string;
6
+ }
7
+ export interface HasherService {
8
+ /**
9
+ * Will return a `FileEntry` of given file, calculating its hash and chunk hashes.
10
+ *
11
+ * @param filePath The relative path of the file (ex 'dir/file.txt').
12
+ * @param rootDir The root dir where the index is. (ex 'dir').
13
+ * @param chunkSize The size (in bytes) chunks will have, recommended is 1MB (1024 * 1024).
14
+ *
15
+ * **IMPORTANT NOTE:** selected chunkSize must be the same in all operations of rac-delta
16
+ */
17
+ hashFile(filePath: string, rootDir: string, chunkSize: number): Promise<FileEntry>;
18
+ /**
19
+ * Will process a stream of Chunks and return an array of hashed Chunks
20
+ *
21
+ * @param stream
22
+ * @param onChunk callback that returns the processed bytes
23
+ */
24
+ hashStream(stream: AsyncChunkStream, onChunk?: (chunk: Uint8Array) => void): Promise<Chunk[]>;
25
+ /**
26
+ * Returns a hash of a buffer
27
+ *
28
+ * @param data
29
+ */
30
+ hashBuffer(data: Uint8Array): Promise<string>;
31
+ /**
32
+ * Verifies that a chunk has the expected hash
33
+ *
34
+ * @param data chunk data
35
+ * @param expectedHash
36
+ */
37
+ verifyChunk(data: Uint8Array, expectedHash: string): Promise<boolean>;
38
+ /**
39
+ * Verifies that a file has the expected hash
40
+ *
41
+ * @param path file path
42
+ * @param expectedHash
43
+ */
44
+ verifyFile(path: string, expectedHash: string): Promise<boolean>;
45
+ createStreamingHasher(): Promise<StreamingHasher>;
46
+ }
47
+ //# sourceMappingURL=hasher-service.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"hasher-service.d.ts","sourceRoot":"","sources":["../../../src/core/services/hasher-service.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,gBAAgB,EAAE,MAAM,iBAAiB,CAAC;AACnD,OAAO,EAAE,KAAK,EAAE,SAAS,EAAE,MAAM,WAAW,CAAC;AAE7C,MAAM,WAAW,eAAe;IAC9B,MAAM,CAAC,IAAI,EAAE,UAAU,GAAG,MAAM,GAAG,IAAI,CAAC;IAExC,MAAM,CAAC,QAAQ,CAAC,EAAE,KAAK,GAAG,MAAM,CAAC;CAClC;AAED,MAAM,WAAW,aAAa;IAC5B;;;;;;;;OAQG;IACH,QAAQ,CAAC,QAAQ,EAAE,MAAM,EAAE,OAAO,EAAE,MAAM,EAAE,SAAS,EAAE,MAAM,GAAG,OAAO,CAAC,SAAS,CAAC,CAAC;IAEnF;;;;;OAKG;IACH,UAAU,CAAC,MAAM,EAAE,gBAAgB,EAAE,OAAO,CAAC,EAAE,CAAC,KAAK,EAAE,UAAU,KAAK,IAAI,GAAG,OAAO,CAAC,KAAK,EAAE,CAAC,CAAC;IAE9F;;;;OAIG;IACH,UAAU,CAAC,IAAI,EAAE,UAAU,GAAG,OAAO,CAAC,MAAM,CAAC,CAAC;IAE9C;;;;;OAKG;IACH,WAAW,CAAC,IAAI,EAAE,UAAU,EAAE,YAAY,EAAE,MAAM,GAAG,OAAO,CAAC,OAAO,CAAC,CAAC;IAEtE;;;;;OAKG;IACH,UAAU,CAAC,IAAI,EAAE,MAAM,EAAE,YAAY,EAAE,MAAM,GAAG,OAAO,CAAC,OAAO,CAAC,CAAC;IAEjE,qBAAqB,IAAI,OAAO,CAAC,eAAe,CAAC,CAAC;CACnD"}
@@ -0,0 +1,2 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
@@ -0,0 +1,5 @@
1
+ export * from './delta-service';
2
+ export * from './hasher-service';
3
+ export * from './reconstruction-service';
4
+ export * from './validation-service';
5
+ //# sourceMappingURL=index.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../../src/core/services/index.ts"],"names":[],"mappings":"AAAA,cAAc,iBAAiB,CAAC;AAChC,cAAc,kBAAkB,CAAC;AACjC,cAAc,0BAA0B,CAAC;AACzC,cAAc,sBAAsB,CAAC"}
@@ -0,0 +1,20 @@
1
+ "use strict";
2
+ var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
3
+ if (k2 === undefined) k2 = k;
4
+ var desc = Object.getOwnPropertyDescriptor(m, k);
5
+ if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
6
+ desc = { enumerable: true, get: function() { return m[k]; } };
7
+ }
8
+ Object.defineProperty(o, k2, desc);
9
+ }) : (function(o, m, k, k2) {
10
+ if (k2 === undefined) k2 = k;
11
+ o[k2] = m[k];
12
+ }));
13
+ var __exportStar = (this && this.__exportStar) || function(m, exports) {
14
+ for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);
15
+ };
16
+ Object.defineProperty(exports, "__esModule", { value: true });
17
+ __exportStar(require("./delta-service"), exports);
18
+ __exportStar(require("./hasher-service"), exports);
19
+ __exportStar(require("./reconstruction-service"), exports);
20
+ __exportStar(require("./validation-service"), exports);
@@ -0,0 +1,99 @@
1
+ import { Readable } from 'stream';
2
+ import { FileEntry, DeltaPlan } from '../models';
3
+ import { Nullish } from '../types';
4
+ export declare const DEFAULT_IN_PLACE_RECONSTRUCTION_THRESHOLD: number;
5
+ export interface ReconstructionOptions {
6
+ /**
7
+ * Force to rebuild even if hash file matches.
8
+ */
9
+ forceRebuild?: Nullish<boolean>;
10
+ /**
11
+ * Verifies the reconstructed file hash after finishing.
12
+ * If hash does not match, an error is thrown.
13
+ */
14
+ verifyAfterRebuild?: Nullish<boolean>;
15
+ /**
16
+ * Minimum file size (in bytes) required to perform an **in-place reconstruction** instead of using a temporary file.
17
+ * Default: `400 * 1024 * 1024` (400 MB).
18
+ *
19
+ * **In-place reconstruction:**
20
+ * The existing file is opened and updated directly by overwriting only the modified or missing chunks.
21
+ *
22
+ * **.tmp reconstruction:**
23
+ * The file is fully rebuilt in a temporary `.tmp` location using all chunks (new and existing), then replaced over the original file.
24
+ *
25
+ * **When to use:**
26
+ * In-place reconstruction is recommended for **large files**, as it avoids rewriting the entire file and significantly reduces disk space usage.
27
+ * However, it may be **unsafe for certain formats** (e.g., ZIP archives or databases) that are sensitive to partial writes or corruption.
28
+ * To disable in-place reconstruction entirely, set this value to `0`.
29
+ */
30
+ inPlaceReconstructionThreshold?: Nullish<number>;
31
+ /**
32
+ * How many files will reconstruct concurrently (default value is 5)
33
+ */
34
+ fileConcurrency?: Nullish<number>;
35
+ /**
36
+ * Callback that returns disk usage and optional network speed (only for storage chunk sources via streaming download-reconstruction)
37
+ *
38
+ * @param reconstructProgress current reconstruction progress
39
+ * @param diskSpeed speed of disk write in bytes per second
40
+ * @param networkProgress current network progress if any
41
+ * @param networkSpeed download speed in bytes per second
42
+ */
43
+ onProgress?: (reconstructProgress: number, diskSpeed: number, networkProgress?: Nullish<number>, networkSpeed?: Nullish<number>) => void;
44
+ }
45
+ export interface ChunkSource {
46
+ /**
47
+ * Gets a chunk from the source.
48
+ */
49
+ getChunk(hash: string): Promise<Buffer>;
50
+ /**
51
+ * Retrieves multiple chunks concurrently.
52
+ */
53
+ getChunks?(hashes: string[], options?: Nullish<{
54
+ concurrency?: number;
55
+ }>): Promise<Map<string, Buffer>>;
56
+ /**
57
+ * Streams file chunks from storage concurrently.
58
+ * Can preserve original order or emit as workers complete.
59
+ *
60
+ * @param options.concurrency Number of parallel fetches (default 8)
61
+ * @param options.preserveOrder Whether to yield in input order (default true)
62
+ */
63
+ streamChunks?(hashes: string[], options?: Nullish<{
64
+ concurrency?: number;
65
+ preserveOrder?: boolean;
66
+ }>): AsyncGenerator<{
67
+ hash: string;
68
+ data: Readable;
69
+ }>;
70
+ }
71
+ export interface ReconstructionService {
72
+ /**
73
+ * Reconstructs a file in disk.
74
+ * Able to reconstruct a new file or an existing file.
75
+ *
76
+ * @param entry The `FileEntry` containing the list of chunks and path of the file
77
+ * @param outputPath The path where the file will be reconstructed.
78
+ * @param chunkSource the source implementations of the chunks
79
+ * @param options optional parameters for the reconstruction
80
+ */
81
+ reconstructFile(entry: FileEntry, outputPath: string, chunkSource: ChunkSource, options?: Nullish<ReconstructionOptions>): Promise<void>;
82
+ /**
83
+ * Reconstructs all files from a DeltaPlan in disk.
84
+ *
85
+ * @param plan The DeltaPlan containing the list of files and chunks.
86
+ * @param outputDir The dir where the files will be reconstructed.
87
+ * @param chunkSource the source implementations of the chunks
88
+ * @param options optional parameters for the reconstruction
89
+ */
90
+ reconstructAll(plan: DeltaPlan, outputDir: string, chunkSource: ChunkSource, options?: Nullish<ReconstructionOptions>): Promise<void>;
91
+ /**
92
+ * Reconstructs a file to stream.
93
+ *
94
+ * @param entry The FileEntry containing the list of chunks of the file
95
+ * @param chunkSource the source implementations of the chunks
96
+ */
97
+ reconstructToStream(entry: FileEntry, chunkSource: ChunkSource): Promise<Readable>;
98
+ }
99
+ //# sourceMappingURL=reconstruction-service.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"reconstruction-service.d.ts","sourceRoot":"","sources":["../../../src/core/services/reconstruction-service.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,QAAQ,EAAE,MAAM,QAAQ,CAAC;AAElC,OAAO,EAAE,SAAS,EAAE,SAAS,EAAE,MAAM,WAAW,CAAC;AACjD,OAAO,EAAE,OAAO,EAAE,MAAM,UAAU,CAAC;AAEnC,eAAO,MAAM,yCAAyC,QAAoB,CAAC;AAE3E,MAAM,WAAW,qBAAqB;IACpC;;OAEG;IACH,YAAY,CAAC,EAAE,OAAO,CAAC,OAAO,CAAC,CAAC;IAEhC;;;OAGG;IACH,kBAAkB,CAAC,EAAE,OAAO,CAAC,OAAO,CAAC,CAAC;IAEtC;;;;;;;;;;;;;;OAcG;IACH,8BAA8B,CAAC,EAAE,OAAO,CAAC,MAAM,CAAC,CAAC;IAEjD;;OAEG;IACH,eAAe,CAAC,EAAE,OAAO,CAAC,MAAM,CAAC,CAAC;IAElC;;;;;;;OAOG;IACH,UAAU,CAAC,EAAE,CACX,mBAAmB,EAAE,MAAM,EAC3B,SAAS,EAAE,MAAM,EACjB,eAAe,CAAC,EAAE,OAAO,CAAC,MAAM,CAAC,EACjC,YAAY,CAAC,EAAE,OAAO,CAAC,MAAM,CAAC,KAC3B,IAAI,CAAC;CACX;AAED,MAAM,WAAW,WAAW;IAC1B;;OAEG;IACH,QAAQ,CAAC,IAAI,EAAE,MAAM,GAAG,OAAO,CAAC,MAAM,CAAC,CAAC;IAExC;;OAEG;IACH,SAAS,CAAC,CACR,MAAM,EAAE,MAAM,EAAE,EAChB,OAAO,CAAC,EAAE,OAAO,CAAC;QAAE,WAAW,CAAC,EAAE,MAAM,CAAA;KAAE,CAAC,GAC1C,OAAO,CAAC,GAAG,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC,CAAC;IAEhC;;;;;;OAMG;IACH,YAAY,CAAC,CACX,MAAM,EAAE,MAAM,EAAE,EAChB,OAAO,CAAC,EAAE,OAAO,CAAC;QAAE,WAAW,CAAC,EAAE,MAAM,CAAC;QAAC,aAAa,CAAC,EAAE,OAAO,CAAA;KAAE,CAAC,GACnE,cAAc,CAAC;QAAE,IAAI,EAAE,MAAM,CAAC;QAAC,IAAI,EAAE,QAAQ,CAAA;KAAE,CAAC,CAAC;CACrD;AAED,MAAM,WAAW,qBAAqB;IACpC;;;;;;;;OAQG;IACH,eAAe,CACb,KAAK,EAAE,SAAS,EAChB,UAAU,EAAE,MAAM,EAClB,WAAW,EAAE,WAAW,EACxB,OAAO,CAAC,EAAE,OAAO,CAAC,qBAAqB,CAAC,GACvC,OAAO,CAAC,IAAI,CAAC,CAAC;IAEjB;;;;;;;OAOG;IACH,cAAc,CACZ,IAAI,EAAE,SAAS,EACf,SAAS,EAAE,MAAM,EACjB,WAAW,EAAE,WAAW,EACxB,OAAO,CAAC,EAAE,OAAO,CAAC,qBAAqB,CAAC,GACvC,OAAO,CAAC,IAAI,CAAC,CAAC;IAEjB;;;;;OAKG;IACH,mBAAmB,CAAC,KAAK,EAAE,SAAS,EAAE,WAAW,EAAE,WAAW,GAAG,OAAO,CAAC,QAAQ,CAAC,CAAC;CACpF"}
@@ -0,0 +1,4 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.DEFAULT_IN_PLACE_RECONSTRUCTION_THRESHOLD = void 0;
4
+ exports.DEFAULT_IN_PLACE_RECONSTRUCTION_THRESHOLD = 400 * 1024 * 1024; // 400MB
@@ -0,0 +1,18 @@
1
+ import { FileEntry, RDIndex } from '../models';
2
+ export interface ValidationService {
3
+ /**
4
+ * Will validate a given file with its `FileEntry`
5
+ *
6
+ * @param entry
7
+ * @param path path of the file to validate
8
+ */
9
+ validateFile(entry: FileEntry, path: string): Promise<boolean>;
10
+ /**
11
+ * Will validate all files of a `RdIndex`
12
+ *
13
+ * @param index rd-index
14
+ * @param basePath directory of the files
15
+ */
16
+ validateIndex(index: RDIndex, basePath: string): Promise<boolean>;
17
+ }
18
+ //# sourceMappingURL=validation-service.d.ts.map