@stryke/hash 0.12.52 → 0.13.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +20 -0
- package/dist/index.cjs +20 -19
- package/dist/index.d.cts +2 -7
- package/dist/index.d.mts +2 -7
- package/dist/index.mjs +2 -7
- package/dist/neutral-CDggl_ee.cjs +739 -0
- package/dist/neutral-D7D-RtyE.mjs +601 -0
- package/dist/neutral-D7D-RtyE.mjs.map +1 -0
- package/dist/neutral-DqpQs0yN.d.cts +162 -0
- package/dist/neutral-DqpQs0yN.d.cts.map +1 -0
- package/dist/neutral-DvfYl4mT.d.mts +162 -0
- package/dist/neutral-DvfYl4mT.d.mts.map +1 -0
- package/dist/neutral.cjs +20 -14
- package/dist/neutral.d.cts +2 -5
- package/dist/neutral.d.mts +2 -5
- package/dist/neutral.mjs +2 -5
- package/dist/node.cjs +659 -0
- package/dist/node.d.cts +172 -0
- package/dist/node.d.cts.map +1 -0
- package/dist/node.d.mts +172 -0
- package/dist/node.d.mts.map +1 -0
- package/dist/node.mjs +613 -0
- package/dist/node.mjs.map +1 -0
- package/package.json +41 -24
- package/dist/_virtual/rolldown_runtime.cjs +0 -29
- package/dist/convert/src/array-buffer-to-string.cjs +0 -19
- package/dist/convert/src/array-buffer-to-string.mjs +0 -19
- package/dist/convert/src/array-buffer-to-string.mjs.map +0 -1
- package/dist/convert/src/neutral.cjs +0 -5
- package/dist/convert/src/neutral.mjs +0 -7
- package/dist/convert/src/parse-type-definition.cjs +0 -1
- package/dist/convert/src/parse-type-definition.mjs +0 -3
- package/dist/convert/src/string-to-uint8-array.cjs +0 -14
- package/dist/convert/src/string-to-uint8-array.mjs +0 -14
- package/dist/convert/src/string-to-uint8-array.mjs.map +0 -1
- package/dist/convert/src/string-to-utf8-array.cjs +0 -5
- package/dist/convert/src/string-to-utf8-array.mjs +0 -6
- package/dist/convert/src/string-to-utf8-array.mjs.map +0 -1
- package/dist/convert/src/utf8-array-to-string.cjs +0 -5
- package/dist/convert/src/utf8-array-to-string.mjs +0 -6
- package/dist/convert/src/utf8-array-to-string.mjs.map +0 -1
- package/dist/digest.cjs +0 -56
- package/dist/digest.d.cts +0 -32
- package/dist/digest.d.cts.map +0 -1
- package/dist/digest.d.mts +0 -32
- package/dist/digest.d.mts.map +0 -1
- package/dist/digest.mjs +0 -54
- package/dist/digest.mjs.map +0 -1
- package/dist/etag.cjs +0 -53
- package/dist/etag.d.cts +0 -22
- package/dist/etag.d.cts.map +0 -1
- package/dist/etag.d.mts +0 -22
- package/dist/etag.d.mts.map +0 -1
- package/dist/etag.mjs +0 -52
- package/dist/etag.mjs.map +0 -1
- package/dist/fs/src/list-files.cjs +0 -36
- package/dist/fs/src/list-files.d.cts +0 -7
- package/dist/fs/src/list-files.d.cts.map +0 -1
- package/dist/fs/src/list-files.d.mts +0 -7
- package/dist/fs/src/list-files.d.mts.map +0 -1
- package/dist/fs/src/list-files.mjs +0 -35
- package/dist/fs/src/list-files.mjs.map +0 -1
- package/dist/fs/src/read-file.cjs +0 -16
- package/dist/fs/src/read-file.mjs +0 -16
- package/dist/fs/src/read-file.mjs.map +0 -1
- package/dist/hash-files.cjs +0 -41
- package/dist/hash-files.d.cts +0 -24
- package/dist/hash-files.d.cts.map +0 -1
- package/dist/hash-files.d.mts +0 -24
- package/dist/hash-files.d.mts.map +0 -1
- package/dist/hash-files.mjs +0 -41
- package/dist/hash-files.mjs.map +0 -1
- package/dist/md5.cjs +0 -17
- package/dist/md5.d.cts +0 -12
- package/dist/md5.d.cts.map +0 -1
- package/dist/md5.d.mts +0 -12
- package/dist/md5.d.mts.map +0 -1
- package/dist/md5.mjs +0 -17
- package/dist/md5.mjs.map +0 -1
- package/dist/murmurhash.cjs +0 -22
- package/dist/murmurhash.d.cts +0 -23
- package/dist/murmurhash.d.cts.map +0 -1
- package/dist/murmurhash.d.mts +0 -23
- package/dist/murmurhash.d.mts.map +0 -1
- package/dist/murmurhash.mjs +0 -22
- package/dist/murmurhash.mjs.map +0 -1
- package/dist/path/src/is-type.cjs +0 -28
- package/dist/path/src/is-type.mjs +0 -29
- package/dist/path/src/is-type.mjs.map +0 -1
- package/dist/path/src/join-paths.cjs +0 -122
- package/dist/path/src/join-paths.mjs +0 -123
- package/dist/path/src/join-paths.mjs.map +0 -1
- package/dist/path/src/regex.cjs +0 -12
- package/dist/path/src/regex.mjs +0 -9
- package/dist/path/src/regex.mjs.map +0 -1
- package/dist/path/src/slash.cjs +0 -15
- package/dist/path/src/slash.mjs +0 -15
- package/dist/path/src/slash.mjs.map +0 -1
- package/dist/type-checks/src/index.cjs +0 -4
- package/dist/type-checks/src/index.mjs +0 -6
- package/dist/type-checks/src/is-buffer.cjs +0 -12
- package/dist/type-checks/src/is-buffer.mjs +0 -12
- package/dist/type-checks/src/is-buffer.mjs.map +0 -1
- package/dist/type-checks/src/is-collection.cjs +0 -1
- package/dist/type-checks/src/is-collection.mjs +0 -3
- package/dist/type-checks/src/is-string.cjs +0 -12
- package/dist/type-checks/src/is-string.mjs +0 -12
- package/dist/type-checks/src/is-string.mjs.map +0 -1
- package/dist/type-checks/src/type-detect.cjs +0 -15
- package/dist/type-checks/src/type-detect.mjs +0 -17
- package/dist/type-checks/src/type-detect.mjs.map +0 -1
- package/dist/xx-hash.cjs +0 -25
- package/dist/xx-hash.d.cts +0 -7
- package/dist/xx-hash.d.cts.map +0 -1
- package/dist/xx-hash.d.mts +0 -7
- package/dist/xx-hash.d.mts.map +0 -1
- package/dist/xx-hash.mjs +0 -23
- package/dist/xx-hash.mjs.map +0 -1
|
@@ -0,0 +1,739 @@
|
|
|
1
|
+
//#region rolldown:runtime
|
|
2
|
+
var __create = Object.create;
|
|
3
|
+
var __defProp = Object.defineProperty;
|
|
4
|
+
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
5
|
+
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
6
|
+
var __getProtoOf = Object.getPrototypeOf;
|
|
7
|
+
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
|
8
|
+
var __copyProps = (to, from, except, desc) => {
|
|
9
|
+
if (from && typeof from === "object" || typeof from === "function") {
|
|
10
|
+
for (var keys = __getOwnPropNames(from), i = 0, n = keys.length, key; i < n; i++) {
|
|
11
|
+
key = keys[i];
|
|
12
|
+
if (!__hasOwnProp.call(to, key) && key !== except) {
|
|
13
|
+
__defProp(to, key, {
|
|
14
|
+
get: ((k) => from[k]).bind(null, key),
|
|
15
|
+
enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable
|
|
16
|
+
});
|
|
17
|
+
}
|
|
18
|
+
}
|
|
19
|
+
}
|
|
20
|
+
return to;
|
|
21
|
+
};
|
|
22
|
+
var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", {
|
|
23
|
+
value: mod,
|
|
24
|
+
enumerable: true
|
|
25
|
+
}) : target, mod));
|
|
26
|
+
|
|
27
|
+
//#endregion
|
|
28
|
+
let defu = require("defu");
|
|
29
|
+
defu = __toESM(defu);
|
|
30
|
+
let glob = require("glob");
|
|
31
|
+
let node_fs_promises = require("node:fs/promises");
|
|
32
|
+
let ohash = require("ohash");
|
|
33
|
+
let js_xxhash = require("js-xxhash");
|
|
34
|
+
|
|
35
|
+
//#region ../type-checks/src/is-null.ts
|
|
36
|
+
const isNull = (value) => {
|
|
37
|
+
try {
|
|
38
|
+
return value === null;
|
|
39
|
+
} catch {
|
|
40
|
+
return false;
|
|
41
|
+
}
|
|
42
|
+
};
|
|
43
|
+
|
|
44
|
+
//#endregion
|
|
45
|
+
//#region ../type-checks/src/is-undefined.ts
|
|
46
|
+
const isUndefined = (value) => {
|
|
47
|
+
return value === void 0;
|
|
48
|
+
};
|
|
49
|
+
|
|
50
|
+
//#endregion
|
|
51
|
+
//#region ../type-checks/src/is-empty.ts
|
|
52
|
+
/**
|
|
53
|
+
* Check if the provided value's type is `null` or `undefined`
|
|
54
|
+
*
|
|
55
|
+
* @param value - The value to type check
|
|
56
|
+
* @returns An indicator specifying if the value provided is of type `null` or `undefined`
|
|
57
|
+
*/
|
|
58
|
+
const isEmpty = (value) => {
|
|
59
|
+
try {
|
|
60
|
+
return isUndefined(value) || isNull(value);
|
|
61
|
+
} catch {
|
|
62
|
+
return false;
|
|
63
|
+
}
|
|
64
|
+
};
|
|
65
|
+
|
|
66
|
+
//#endregion
|
|
67
|
+
//#region ../type-checks/src/is-buffer.ts
|
|
68
|
+
const isBufferExists = typeof Buffer !== "undefined";
|
|
69
|
+
/**
|
|
70
|
+
* Check if the provided value's type is `Buffer`
|
|
71
|
+
*/
|
|
72
|
+
const isBuffer = isBufferExists ? Buffer.isBuffer.bind(Buffer) : function isBuffer$1(value) {
|
|
73
|
+
return false;
|
|
74
|
+
};
|
|
75
|
+
|
|
76
|
+
//#endregion
|
|
77
|
+
//#region ../type-checks/src/type-detect.ts
|
|
78
|
+
const globalObject = ((Obj) => {
|
|
79
|
+
if (typeof globalThis === "object") return globalThis;
|
|
80
|
+
Object.defineProperty(Obj, "typeDetectGlobalObject", {
|
|
81
|
+
get() {
|
|
82
|
+
return this;
|
|
83
|
+
},
|
|
84
|
+
configurable: true
|
|
85
|
+
});
|
|
86
|
+
return globalThis;
|
|
87
|
+
})(Object.prototype);
|
|
88
|
+
|
|
89
|
+
//#endregion
|
|
90
|
+
//#region ../type-checks/src/is-string.ts
|
|
91
|
+
const isString = (value) => {
|
|
92
|
+
try {
|
|
93
|
+
return typeof value === "string";
|
|
94
|
+
} catch {
|
|
95
|
+
return false;
|
|
96
|
+
}
|
|
97
|
+
};
|
|
98
|
+
|
|
99
|
+
//#endregion
|
|
100
|
+
//#region ../type-checks/src/is-set.ts
|
|
101
|
+
/**
|
|
102
|
+
* The inverse of the `isEmpty` function
|
|
103
|
+
*
|
|
104
|
+
* @param value - The value to type check
|
|
105
|
+
* @returns An indicator specifying if the value provided is **NOT** of type `null` or `undefined`
|
|
106
|
+
*/
|
|
107
|
+
const isSet = (value) => {
|
|
108
|
+
try {
|
|
109
|
+
return !isEmpty(value);
|
|
110
|
+
} catch {
|
|
111
|
+
return false;
|
|
112
|
+
}
|
|
113
|
+
};
|
|
114
|
+
|
|
115
|
+
//#endregion
|
|
116
|
+
//#region ../type-checks/src/is-set-string.ts
|
|
117
|
+
/**
|
|
118
|
+
* Determine if the type is string and is not empty (length greater than zero)
|
|
119
|
+
*
|
|
120
|
+
* @param value - The value to type check
|
|
121
|
+
* @returns An indicator specifying if the value provided is of type `string` and length greater than zero
|
|
122
|
+
*/
|
|
123
|
+
const isSetString = (value) => {
|
|
124
|
+
try {
|
|
125
|
+
return isSet(value) && isString(value) && value.length > 0;
|
|
126
|
+
} catch {
|
|
127
|
+
return false;
|
|
128
|
+
}
|
|
129
|
+
};
|
|
130
|
+
|
|
131
|
+
//#endregion
|
|
132
|
+
//#region src/digest.ts
|
|
133
|
+
/**
|
|
134
|
+
* Creates a new hash object for the specified algorithm.
|
|
135
|
+
*
|
|
136
|
+
* @param algorithm - The algorithm to use for the hash.
|
|
137
|
+
* @returns A new hash object.
|
|
138
|
+
*/
|
|
139
|
+
function createHasher(algorithm) {
|
|
140
|
+
return new Hasher(algorithm);
|
|
141
|
+
}
|
|
142
|
+
/**
|
|
143
|
+
* Creates a new hash object for the specified algorithm.
|
|
144
|
+
*
|
|
145
|
+
* @remarks
|
|
146
|
+
* This function uses the Web Crypto API to create a hash of the input data.
|
|
147
|
+
*
|
|
148
|
+
* @see https://developer.mozilla.org/en-US/docs/Web/API/SubtleCrypto/digest
|
|
149
|
+
*
|
|
150
|
+
* @param data - The data to hash.
|
|
151
|
+
* @param algorithm - The algorithm to use for the hash.
|
|
152
|
+
* @returns A hash string representation of the `data` parameter.
|
|
153
|
+
*/
|
|
154
|
+
async function digest(data, algorithm = "SHA-512") {
|
|
155
|
+
const encoder = new TextEncoder();
|
|
156
|
+
const arrayBuffer = await globalThis.crypto.subtle.digest(algorithm, isSetString(data) ? encoder.encode(data) : data);
|
|
157
|
+
return btoa(String.fromCharCode(...new Uint8Array(arrayBuffer))).replace(/\+/g, "-").replace(/\//g, "_").replace(/=/g, "");
|
|
158
|
+
}
|
|
159
|
+
/**
|
|
160
|
+
* Alias for {@link digest}.
|
|
161
|
+
*/
|
|
162
|
+
const hash = digest;
|
|
163
|
+
/**
|
|
164
|
+
* Hash a string or Uint8Array using SHA-256 and return the result as a base64url-encoded string.
|
|
165
|
+
*
|
|
166
|
+
* @param data - The data to hash.
|
|
167
|
+
* @returns A hash string representation of the `data` parameter.
|
|
168
|
+
*/
|
|
169
|
+
const sha256 = async (data) => digest(data, "SHA-256");
|
|
170
|
+
/**
|
|
171
|
+
* Hash a string or Uint8Array using SHA-384 and return the result as a base64url-encoded string.
|
|
172
|
+
*
|
|
173
|
+
* @param data - The data to hash.
|
|
174
|
+
* @returns A hash string representation of the `data` parameter.
|
|
175
|
+
*/
|
|
176
|
+
const sha384 = async (data) => digest(data, "SHA-384");
|
|
177
|
+
/**
|
|
178
|
+
* Hash a string or Uint8Array using SHA-512 and return the result as a base64url-encoded string.
|
|
179
|
+
*
|
|
180
|
+
* @param data - The data to hash.
|
|
181
|
+
* @returns A hash string representation of the `data` parameter.
|
|
182
|
+
*/
|
|
183
|
+
const sha512 = async (data) => digest(data, "SHA-512");
|
|
184
|
+
var Hasher = class {
|
|
185
|
+
#chunks = [];
|
|
186
|
+
#algorithm;
|
|
187
|
+
constructor(algorithm) {
|
|
188
|
+
this.#algorithm = algorithm;
|
|
189
|
+
}
|
|
190
|
+
update(data) {
|
|
191
|
+
this.#chunks.push(data);
|
|
192
|
+
}
|
|
193
|
+
async digest() {
|
|
194
|
+
const data = new Uint8Array(this.#chunks.reduce((acc, chunk) => acc + chunk.length, 0));
|
|
195
|
+
let offset = 0;
|
|
196
|
+
for (const chunk of this.#chunks) {
|
|
197
|
+
data.set(chunk, offset);
|
|
198
|
+
offset += chunk.length;
|
|
199
|
+
}
|
|
200
|
+
const arrayBuffer = await globalThis.crypto.subtle.digest(this.#algorithm, data);
|
|
201
|
+
return new Uint8Array(arrayBuffer);
|
|
202
|
+
}
|
|
203
|
+
};
|
|
204
|
+
|
|
205
|
+
//#endregion
|
|
206
|
+
//#region src/etag.ts
|
|
207
|
+
/**
|
|
208
|
+
* FNV-1a Hash implementation
|
|
209
|
+
*
|
|
210
|
+
* Ported from https://github.com/tjwebb/fnv-plus/blob/master/index.js
|
|
211
|
+
*
|
|
212
|
+
* @remarks
|
|
213
|
+
* Simplified, optimized and add modified for 52 bit, which provides a larger hash space
|
|
214
|
+
* and still making use of Javascript's 53-bit integer space.
|
|
215
|
+
*/
|
|
216
|
+
const fnv1a52 = (str) => {
|
|
217
|
+
const len = str.length;
|
|
218
|
+
let i = 0;
|
|
219
|
+
let t0 = 0;
|
|
220
|
+
let v0 = 8997;
|
|
221
|
+
let t1 = 0;
|
|
222
|
+
let v1 = 33826;
|
|
223
|
+
let t2 = 0;
|
|
224
|
+
let v2 = 40164;
|
|
225
|
+
let t3 = 0;
|
|
226
|
+
let v3 = 52210;
|
|
227
|
+
while (i < len) {
|
|
228
|
+
v0 ^= str.charCodeAt(i++);
|
|
229
|
+
t0 = v0 * 435;
|
|
230
|
+
t1 = v1 * 435;
|
|
231
|
+
t2 = v2 * 435;
|
|
232
|
+
t3 = v3 * 435;
|
|
233
|
+
t2 += v0 << 8;
|
|
234
|
+
t3 += v1 << 8;
|
|
235
|
+
t1 += t0 >>> 16;
|
|
236
|
+
v0 = t0 & 65535;
|
|
237
|
+
t2 += t1 >>> 16;
|
|
238
|
+
v1 = t1 & 65535;
|
|
239
|
+
v3 = t3 + (t2 >>> 16) & 65535;
|
|
240
|
+
v2 = t2 & 65535;
|
|
241
|
+
}
|
|
242
|
+
return (v3 & 15) * 281474976710656 + v2 * 4294967296 + v1 * 65536 + (v0 ^ v3 >> 4);
|
|
243
|
+
};
|
|
244
|
+
/**
|
|
245
|
+
* Generates an ETag for the given payload.
|
|
246
|
+
*
|
|
247
|
+
* @param payload - The payload to generate an ETag for.
|
|
248
|
+
* @param weak - Whether to generate a weak ETag.
|
|
249
|
+
* @returns The generated ETag.
|
|
250
|
+
*/
|
|
251
|
+
const generateETag = (payload, weak = false) => {
|
|
252
|
+
return `${(weak ? "W/\"" : "\"") + fnv1a52(payload).toString(36) + payload.length.toString(36)}"`;
|
|
253
|
+
};
|
|
254
|
+
|
|
255
|
+
//#endregion
|
|
256
|
+
//#region ../json/src/canonical.ts
|
|
257
|
+
/**
|
|
258
|
+
* Converts a JavaScript value to a canonical JSON string representation. This function is used for signing JSON objects in a consistent way, ensuring that the same input will always produce the same output string. The canonicalization process includes:
|
|
259
|
+
* - Sorting object keys in lexicographical order.
|
|
260
|
+
* - Removing whitespace and line breaks.
|
|
261
|
+
* - Representing primitive values (null, boolean, number, string) in their standard JSON format.
|
|
262
|
+
* - Recursively applying these rules to nested objects and arrays.
|
|
263
|
+
*
|
|
264
|
+
* This function is designed to produce a deterministic string representation of a JSON value, which is essential for cryptographic signing and verification processes where the exact byte representation of the data must be consistent across different environments and implementations.
|
|
265
|
+
*
|
|
266
|
+
* @param obj - The JavaScript value to convert to a canonical JSON string.
|
|
267
|
+
* @returns A canonical JSON string representation of the input value.
|
|
268
|
+
*/
|
|
269
|
+
function canonicalJson(obj) {
|
|
270
|
+
if (obj === null || obj === void 0) return "null";
|
|
271
|
+
if (typeof obj === "boolean" || typeof obj === "number") return JSON.stringify(obj);
|
|
272
|
+
if (typeof obj === "string") return JSON.stringify(obj);
|
|
273
|
+
if (Array.isArray(obj)) return `[${obj.map((item) => canonicalJson(item)).join(",")}]`;
|
|
274
|
+
if (typeof obj === "object") return `{${Object.keys(obj).sort().map((key) => {
|
|
275
|
+
const value = canonicalJson(obj[key]);
|
|
276
|
+
return `${JSON.stringify(key)}:${value}`;
|
|
277
|
+
}).join(",")}}`;
|
|
278
|
+
return "null";
|
|
279
|
+
}
|
|
280
|
+
|
|
281
|
+
//#endregion
|
|
282
|
+
//#region src/hash-content.ts
|
|
283
|
+
/**
|
|
284
|
+
* Hash the content of a PDU (Protocol Data Unit) by removing the `signatures` and `unsigned` fields, then hashing the remaining content using SHA-256 and encoding it as a base64url string. This function is useful for generating a consistent hash of the PDU content that can be used for integrity verification or caching purposes, while ignoring any fields that may change due to signatures or unsigned data.
|
|
285
|
+
*
|
|
286
|
+
* @param content - The PDU content to hash, represented as a record of string keys and unknown values.
|
|
287
|
+
* @returns A promise that resolves to a base64url-encoded string representing the hash of the PDU content.
|
|
288
|
+
*/
|
|
289
|
+
async function hashContent(content) {
|
|
290
|
+
const toHash = { ...content };
|
|
291
|
+
delete toHash.signatures;
|
|
292
|
+
delete toHash.unsigned;
|
|
293
|
+
return sha256(canonicalJson(toHash));
|
|
294
|
+
}
|
|
295
|
+
/**
|
|
296
|
+
* Verify the hash of a PDU (Protocol Data Unit) content by hashing the content using the {@link hashContent} function and comparing it to an expected hash value. This function is useful for validating the integrity of the PDU content by ensuring that the computed hash matches the expected hash, which can be used to detect any tampering or corruption of the content.
|
|
297
|
+
*
|
|
298
|
+
* @param content - The PDU content to verify, represented as a record of string keys and unknown values.
|
|
299
|
+
* @param expectedHash - The expected hash value to compare against, represented as a string.
|
|
300
|
+
* @returns A promise that resolves to a boolean indicating whether the computed hash of the content matches the expected hash value (true if they match, false otherwise).
|
|
301
|
+
*/
|
|
302
|
+
async function verifyContent(content, expectedHash) {
|
|
303
|
+
return await hashContent(content) === expectedHash;
|
|
304
|
+
}
|
|
305
|
+
|
|
306
|
+
//#endregion
|
|
307
|
+
//#region ../path/src/regex.ts
|
|
308
|
+
const DRIVE_LETTER_START_REGEX = /^[A-Z]:\//i;
|
|
309
|
+
const DRIVE_LETTER_REGEX = /^[A-Z]:$/i;
|
|
310
|
+
const UNC_REGEX = /^[/\\]{2}/;
|
|
311
|
+
const ABSOLUTE_PATH_REGEX = /^[/\\](?![/\\])|^[/\\]{2}(?!\.)|^~[/\\]|^[A-Z]:[/\\]/i;
|
|
312
|
+
|
|
313
|
+
//#endregion
|
|
314
|
+
//#region ../path/src/is-type.ts
|
|
315
|
+
/**
|
|
316
|
+
* Check if the path is an absolute path.
|
|
317
|
+
*
|
|
318
|
+
* @param path - The path to check
|
|
319
|
+
* @returns An indicator specifying if the path is an absolute path
|
|
320
|
+
*/
|
|
321
|
+
function isAbsolutePath(path) {
|
|
322
|
+
return ABSOLUTE_PATH_REGEX.test(slash(path));
|
|
323
|
+
}
|
|
324
|
+
/**
|
|
325
|
+
* Check if the path is an absolute path.
|
|
326
|
+
*
|
|
327
|
+
* @remarks
|
|
328
|
+
* This is an alias for {@link isAbsolutePath}.
|
|
329
|
+
*
|
|
330
|
+
* @param path - The path to check
|
|
331
|
+
* @returns An indicator specifying if the path is an absolute path
|
|
332
|
+
*/
|
|
333
|
+
function isAbsolute(path) {
|
|
334
|
+
return isAbsolutePath(path);
|
|
335
|
+
}
|
|
336
|
+
|
|
337
|
+
//#endregion
|
|
338
|
+
//#region ../path/src/slash.ts
|
|
339
|
+
/**
|
|
340
|
+
* Replace backslash to slash
|
|
341
|
+
*
|
|
342
|
+
* @param path - The string to replace
|
|
343
|
+
* @returns The string with replaced backslashes
|
|
344
|
+
*/
|
|
345
|
+
function slash(path) {
|
|
346
|
+
if (path.startsWith("\\\\?\\")) return path;
|
|
347
|
+
return path.replace(/\\/g, "/");
|
|
348
|
+
}
|
|
349
|
+
|
|
350
|
+
//#endregion
|
|
351
|
+
//#region ../path/src/join-paths.ts
|
|
352
|
+
function normalizeWindowsPath(input = "") {
|
|
353
|
+
if (!input) return input;
|
|
354
|
+
return input.replace(/\\/g, "/").replace(DRIVE_LETTER_START_REGEX, (r) => r.toUpperCase());
|
|
355
|
+
}
|
|
356
|
+
function correctPaths(path) {
|
|
357
|
+
if (!path || path.length === 0) return ".";
|
|
358
|
+
path = normalizeWindowsPath(path);
|
|
359
|
+
const isUNCPath = path.match(UNC_REGEX);
|
|
360
|
+
const isPathAbsolute = isAbsolute(path);
|
|
361
|
+
const trailingSeparator = path[path.length - 1] === "/";
|
|
362
|
+
path = normalizeString(path, !isPathAbsolute);
|
|
363
|
+
if (path.length === 0) {
|
|
364
|
+
if (isPathAbsolute) return "/";
|
|
365
|
+
return trailingSeparator ? "./" : ".";
|
|
366
|
+
}
|
|
367
|
+
if (trailingSeparator) path += "/";
|
|
368
|
+
if (DRIVE_LETTER_REGEX.test(path)) path += "/";
|
|
369
|
+
if (isUNCPath) {
|
|
370
|
+
if (!isPathAbsolute) return `//./${path}`;
|
|
371
|
+
return `//${path}`;
|
|
372
|
+
}
|
|
373
|
+
return isPathAbsolute && !isAbsolute(path) ? `/${path}` : path;
|
|
374
|
+
}
|
|
375
|
+
/**
|
|
376
|
+
* Joins all given path segments together using the platform-specific separator as a delimiter.
|
|
377
|
+
*
|
|
378
|
+
* @remarks
|
|
379
|
+
* Multiple segments can be provided as separate arguments. The resulting path is normalized to remove any redundant or unnecessary segments.
|
|
380
|
+
*
|
|
381
|
+
* @example
|
|
382
|
+
* ```ts
|
|
383
|
+
* import { joinPaths } from 'stryke/path';
|
|
384
|
+
*
|
|
385
|
+
* const fullPath = joinPaths('folder1', 'folder2', '..', 'folder3', 'file.txt');
|
|
386
|
+
* console.log(fullPath); // Output: 'folder1/folder3/file.txt'
|
|
387
|
+
*
|
|
388
|
+
* const absolutePath = joinPaths('/root', 'folder', '.', 'subfolder', 'file.txt');
|
|
389
|
+
* console.log(absolutePath); // Output: '/root/folder/subfolder/file.txt'
|
|
390
|
+
*
|
|
391
|
+
* const windowsPath = joinPaths('C:\\', 'Users', 'Public', '..', 'Documents', 'file.txt');
|
|
392
|
+
* console.log(windowsPath); // Output: 'C:/Users/Documents/file.txt'
|
|
393
|
+
*
|
|
394
|
+
* const uncPath = joinPaths('\\\\Server\\Share', 'Folder', 'File.txt');
|
|
395
|
+
* console.log(uncPath); // Output: '//Server/Share/Folder/File.txt'
|
|
396
|
+
* ```
|
|
397
|
+
*
|
|
398
|
+
* @param segments - The path segments to join.
|
|
399
|
+
* @returns The joined and normalized path string.
|
|
400
|
+
*/
|
|
401
|
+
function joinPaths(...segments) {
|
|
402
|
+
let result = "";
|
|
403
|
+
for (const segment of segments) if (segment && slash(segment).replaceAll(/\//g, "") !== ".") {
|
|
404
|
+
if (result) if (slash(segment).replaceAll(/\//g, "") === "..") result = slash(result).replace(/\/+$/, "").replace(/\/*[^/]+$/, "");
|
|
405
|
+
else result = `${slash(result).replace(/\/+$/, "")}/${slash(segment).replace(/^\/+/, "")}`;
|
|
406
|
+
else if (slash(segment).replaceAll(/\//g, "") !== "..") result = segment;
|
|
407
|
+
}
|
|
408
|
+
return correctPaths(result);
|
|
409
|
+
}
|
|
410
|
+
/**
|
|
411
|
+
* Resolves a string path, resolving '.' and '.' segments and allowing paths above the root.
|
|
412
|
+
*
|
|
413
|
+
* @param path - The path to normalize.
|
|
414
|
+
* @param allowAboveRoot - Whether to allow the resulting path to be above the root directory.
|
|
415
|
+
* @returns the normalized path string.
|
|
416
|
+
*/
|
|
417
|
+
function normalizeString(path, allowAboveRoot) {
|
|
418
|
+
let res = "";
|
|
419
|
+
let lastSegmentLength = 0;
|
|
420
|
+
let lastSlash = -1;
|
|
421
|
+
let dots = 0;
|
|
422
|
+
let char = null;
|
|
423
|
+
for (let index = 0; index <= path.length; ++index) {
|
|
424
|
+
if (index < path.length) char = path[index];
|
|
425
|
+
else if (char === "/") break;
|
|
426
|
+
else char = "/";
|
|
427
|
+
if (char === "/") {
|
|
428
|
+
if (lastSlash === index - 1 || dots === 1) {} else if (dots === 2) {
|
|
429
|
+
if (res.length < 2 || lastSegmentLength !== 2 || res[res.length - 1] !== "." || res[res.length - 2] !== ".") {
|
|
430
|
+
if (res.length > 2) {
|
|
431
|
+
const lastSlashIndex = res.lastIndexOf("/");
|
|
432
|
+
if (lastSlashIndex === -1) {
|
|
433
|
+
res = "";
|
|
434
|
+
lastSegmentLength = 0;
|
|
435
|
+
} else {
|
|
436
|
+
res = res.slice(0, lastSlashIndex);
|
|
437
|
+
lastSegmentLength = res.length - 1 - res.lastIndexOf("/");
|
|
438
|
+
}
|
|
439
|
+
lastSlash = index;
|
|
440
|
+
dots = 0;
|
|
441
|
+
continue;
|
|
442
|
+
} else if (res.length > 0) {
|
|
443
|
+
res = "";
|
|
444
|
+
lastSegmentLength = 0;
|
|
445
|
+
lastSlash = index;
|
|
446
|
+
dots = 0;
|
|
447
|
+
continue;
|
|
448
|
+
}
|
|
449
|
+
}
|
|
450
|
+
if (allowAboveRoot) {
|
|
451
|
+
res += res.length > 0 ? "/.." : "..";
|
|
452
|
+
lastSegmentLength = 2;
|
|
453
|
+
}
|
|
454
|
+
} else {
|
|
455
|
+
if (res.length > 0) res += `/${path.slice(lastSlash + 1, index)}`;
|
|
456
|
+
else res = path.slice(lastSlash + 1, index);
|
|
457
|
+
lastSegmentLength = index - lastSlash - 1;
|
|
458
|
+
}
|
|
459
|
+
lastSlash = index;
|
|
460
|
+
dots = 0;
|
|
461
|
+
} else if (char === "." && dots !== -1) ++dots;
|
|
462
|
+
else dots = -1;
|
|
463
|
+
}
|
|
464
|
+
return res;
|
|
465
|
+
}
|
|
466
|
+
|
|
467
|
+
//#endregion
|
|
468
|
+
//#region ../fs/src/list-files.ts
|
|
469
|
+
const DEFAULT_OPTIONS = { dot: true };
|
|
470
|
+
/**
|
|
471
|
+
* A files and directories listing helper function
|
|
472
|
+
*
|
|
473
|
+
* @param filesGlob - A glob pattern to match files
|
|
474
|
+
* @returns A list of file paths
|
|
475
|
+
*/
|
|
476
|
+
async function list(filesGlob, options) {
|
|
477
|
+
return (0, glob.glob)(isString(filesGlob) ? filesGlob.includes("*") ? filesGlob : joinPaths(filesGlob, "**/*") : filesGlob.input ? joinPaths(filesGlob.input, filesGlob.glob) : filesGlob.glob, (0, defu.default)(isString(filesGlob) ? {} : {
|
|
478
|
+
dot: filesGlob.dot,
|
|
479
|
+
ignore: filesGlob.ignore
|
|
480
|
+
}, options ?? {}, DEFAULT_OPTIONS));
|
|
481
|
+
}
|
|
482
|
+
/**
|
|
483
|
+
* A file listing helper function
|
|
484
|
+
*
|
|
485
|
+
* @param filesGlob - A glob pattern to match files
|
|
486
|
+
* @returns A list of file paths
|
|
487
|
+
*/
|
|
488
|
+
async function listFiles(filesGlob, options) {
|
|
489
|
+
const result = (await list(filesGlob, (0, defu.default)({ withFileTypes: true }, options ?? {}))).filter((ret) => ret.isFile());
|
|
490
|
+
if (!options?.withFileTypes) return result.map((file) => file.fullpath());
|
|
491
|
+
return result;
|
|
492
|
+
}
|
|
493
|
+
|
|
494
|
+
//#endregion
|
|
495
|
+
//#region ../fs/src/read-file.ts
|
|
496
|
+
/**
|
|
497
|
+
* Read the given content to the given file path
|
|
498
|
+
*
|
|
499
|
+
* @param filePath - The file path to read to
|
|
500
|
+
*/
|
|
501
|
+
const readFile = async (filePath) => {
|
|
502
|
+
if (!filePath) throw new Error("No file path provided to read data");
|
|
503
|
+
return (0, node_fs_promises.readFile)(filePath, { encoding: "utf8" });
|
|
504
|
+
};
|
|
505
|
+
|
|
506
|
+
//#endregion
|
|
507
|
+
//#region src/murmurhash.ts
|
|
508
|
+
/**
|
|
509
|
+
* Use a [MurmurHash3](https://en.wikipedia.org/wiki/MurmurHash) based algorithm to hash any JS value into a string.
|
|
510
|
+
*
|
|
511
|
+
* @see https://github.com/ohash/ohash
|
|
512
|
+
* @see https://en.wikipedia.org/wiki/MurmurHash
|
|
513
|
+
*
|
|
514
|
+
* @param content - The value to hash
|
|
515
|
+
* @param options - Hashing options
|
|
516
|
+
* @returns A hashed string value
|
|
517
|
+
*/
|
|
518
|
+
function murmurhash(content, options) {
|
|
519
|
+
const result = (0, ohash.hash)(content);
|
|
520
|
+
const maxLength = options?.maxLength ?? 32;
|
|
521
|
+
return result.length > maxLength ? result.slice(0, maxLength) : result;
|
|
522
|
+
}
|
|
523
|
+
|
|
524
|
+
//#endregion
|
|
525
|
+
//#region src/hash-files.ts
|
|
526
|
+
/**
|
|
527
|
+
* Hash a list of file paths into a string based on the file content
|
|
528
|
+
*
|
|
529
|
+
* @param files - The list of file paths to hash
|
|
530
|
+
* @param options - Hashing options
|
|
531
|
+
* @returns A hashed string value
|
|
532
|
+
*/
|
|
533
|
+
async function hashFiles(files, options) {
|
|
534
|
+
const result = {};
|
|
535
|
+
await Promise.all(files.map(async (file) => {
|
|
536
|
+
result[file] = await readFile(file);
|
|
537
|
+
}));
|
|
538
|
+
return murmurhash(result, options);
|
|
539
|
+
}
|
|
540
|
+
/**
|
|
541
|
+
* Hash a folder path into a string based on the file content
|
|
542
|
+
*
|
|
543
|
+
* @param directoryPath - The folder path to hash
|
|
544
|
+
* @param options - Hashing options. By default, the `node_modules`, `.git`, `.nx`, `.cache`, and `tmp` folders is ignored.
|
|
545
|
+
* @returns A hashed string value
|
|
546
|
+
*/
|
|
547
|
+
async function hashDirectory(directoryPath, options = {}) {
|
|
548
|
+
options.ignore = options.ignore ?? [
|
|
549
|
+
"**/node_modules/**",
|
|
550
|
+
"**/.git/**",
|
|
551
|
+
"**/.nx/**",
|
|
552
|
+
"**/.cache/**",
|
|
553
|
+
"**/.storm/**",
|
|
554
|
+
"**/tmp/**"
|
|
555
|
+
];
|
|
556
|
+
return hashFiles(await listFiles(directoryPath, options), options);
|
|
557
|
+
}
|
|
558
|
+
|
|
559
|
+
//#endregion
|
|
560
|
+
//#region src/pbkdf2.ts
|
|
561
|
+
/**
|
|
562
|
+
* Hash a password using PBKDF2 (Web Crypto compatible alternative to Argon2) with SHA-256, 100,000 iterations, and a random salt. The resulting hash is formatted as: `$pbkdf2-sha256$iterations$salt$hash`.
|
|
563
|
+
*
|
|
564
|
+
* @remarks
|
|
565
|
+
* This function uses the Web Crypto API to perform password hashing. It generates a random salt for each password, and uses PBKDF2 with SHA-256 and 100,000 iterations to derive a secure hash. The output is a string that includes the algorithm, iteration count, salt, and hash, which can be stored in a database for later verification using the {@link verifyPassword} function.
|
|
566
|
+
*
|
|
567
|
+
* @param password - The password to hash.
|
|
568
|
+
* @returns A promise that resolves to the hashed password string.
|
|
569
|
+
*/
|
|
570
|
+
async function hashPassword(password) {
|
|
571
|
+
const encoder = new TextEncoder();
|
|
572
|
+
const salt = crypto.getRandomValues(new Uint8Array(16));
|
|
573
|
+
const keyMaterial = await crypto.subtle.importKey("raw", encoder.encode(password), "PBKDF2", false, ["deriveBits"]);
|
|
574
|
+
const hash$1 = await crypto.subtle.deriveBits({
|
|
575
|
+
name: "PBKDF2",
|
|
576
|
+
salt,
|
|
577
|
+
iterations: 1e5,
|
|
578
|
+
hash: "SHA-256"
|
|
579
|
+
}, keyMaterial, 256);
|
|
580
|
+
return `$pbkdf2-sha256$100000$${btoa(String.fromCharCode(...salt))}$${btoa(String.fromCharCode(...new Uint8Array(hash$1)))}`;
|
|
581
|
+
}
|
|
582
|
+
/**
|
|
583
|
+
* Verify a password against a stored hash in the format produced by {@link hashPassword}.
|
|
584
|
+
*
|
|
585
|
+
* @param password - The password to verify.
|
|
586
|
+
* @param storedHash - The stored hash to verify against.
|
|
587
|
+
* @returns A promise that resolves to true if the password is correct, false otherwise.
|
|
588
|
+
*/
|
|
589
|
+
async function verifyPassword(password, storedHash) {
|
|
590
|
+
const parts = storedHash.split("$");
|
|
591
|
+
if (parts.length !== 5 || parts[1] !== "pbkdf2-sha256" || !parts[2] || !parts[3] || !parts[4]) return false;
|
|
592
|
+
const iterations = Number.parseInt(parts[2], 10);
|
|
593
|
+
const salt = Uint8Array.from(atob(parts[3]), (c) => c.charCodeAt(0));
|
|
594
|
+
const expectedHash = parts[4];
|
|
595
|
+
const encoder = new TextEncoder();
|
|
596
|
+
const keyMaterial = await crypto.subtle.importKey("raw", encoder.encode(password), "PBKDF2", false, ["deriveBits"]);
|
|
597
|
+
const hash$1 = await crypto.subtle.deriveBits({
|
|
598
|
+
name: "PBKDF2",
|
|
599
|
+
salt,
|
|
600
|
+
iterations,
|
|
601
|
+
hash: "SHA-256"
|
|
602
|
+
}, keyMaterial, 256);
|
|
603
|
+
return btoa(String.fromCharCode(...new Uint8Array(hash$1))) === expectedHash;
|
|
604
|
+
}
|
|
605
|
+
|
|
606
|
+
//#endregion
|
|
607
|
+
//#region src/xx-hash.ts
|
|
608
|
+
/**
|
|
609
|
+
* xxHash32 only computes 32-bit values. Run it n times with different seeds to
|
|
610
|
+
* get a larger hash with better collision resistance.
|
|
611
|
+
*
|
|
612
|
+
* @param content - The string to hash
|
|
613
|
+
* @param words - The number of 32-bit words to hash
|
|
614
|
+
* @returns A 128-bit hash
|
|
615
|
+
*/
|
|
616
|
+
function _xxHash32(content, words) {
|
|
617
|
+
let hash$1 = 0n;
|
|
618
|
+
for (let i = 0; i < words; i++) hash$1 = (hash$1 << 32n) + BigInt((0, js_xxhash.xxHash32)(content, i));
|
|
619
|
+
return hash$1;
|
|
620
|
+
}
|
|
621
|
+
const xxHash32 = (s) => (0, js_xxhash.xxHash32)(s, 0);
|
|
622
|
+
const xxHash64 = (s) => _xxHash32(s, 2);
|
|
623
|
+
const xxHash128 = (s) => _xxHash32(s, 4);
|
|
624
|
+
|
|
625
|
+
//#endregion
|
|
626
|
+
Object.defineProperty(exports, 'Hasher', {
|
|
627
|
+
enumerable: true,
|
|
628
|
+
get: function () {
|
|
629
|
+
return Hasher;
|
|
630
|
+
}
|
|
631
|
+
});
|
|
632
|
+
Object.defineProperty(exports, 'createHasher', {
|
|
633
|
+
enumerable: true,
|
|
634
|
+
get: function () {
|
|
635
|
+
return createHasher;
|
|
636
|
+
}
|
|
637
|
+
});
|
|
638
|
+
Object.defineProperty(exports, 'digest', {
|
|
639
|
+
enumerable: true,
|
|
640
|
+
get: function () {
|
|
641
|
+
return digest;
|
|
642
|
+
}
|
|
643
|
+
});
|
|
644
|
+
Object.defineProperty(exports, 'fnv1a52', {
|
|
645
|
+
enumerable: true,
|
|
646
|
+
get: function () {
|
|
647
|
+
return fnv1a52;
|
|
648
|
+
}
|
|
649
|
+
});
|
|
650
|
+
Object.defineProperty(exports, 'generateETag', {
|
|
651
|
+
enumerable: true,
|
|
652
|
+
get: function () {
|
|
653
|
+
return generateETag;
|
|
654
|
+
}
|
|
655
|
+
});
|
|
656
|
+
Object.defineProperty(exports, 'hash', {
|
|
657
|
+
enumerable: true,
|
|
658
|
+
get: function () {
|
|
659
|
+
return hash;
|
|
660
|
+
}
|
|
661
|
+
});
|
|
662
|
+
Object.defineProperty(exports, 'hashContent', {
|
|
663
|
+
enumerable: true,
|
|
664
|
+
get: function () {
|
|
665
|
+
return hashContent;
|
|
666
|
+
}
|
|
667
|
+
});
|
|
668
|
+
Object.defineProperty(exports, 'hashDirectory', {
|
|
669
|
+
enumerable: true,
|
|
670
|
+
get: function () {
|
|
671
|
+
return hashDirectory;
|
|
672
|
+
}
|
|
673
|
+
});
|
|
674
|
+
Object.defineProperty(exports, 'hashFiles', {
|
|
675
|
+
enumerable: true,
|
|
676
|
+
get: function () {
|
|
677
|
+
return hashFiles;
|
|
678
|
+
}
|
|
679
|
+
});
|
|
680
|
+
Object.defineProperty(exports, 'hashPassword', {
|
|
681
|
+
enumerable: true,
|
|
682
|
+
get: function () {
|
|
683
|
+
return hashPassword;
|
|
684
|
+
}
|
|
685
|
+
});
|
|
686
|
+
Object.defineProperty(exports, 'murmurhash', {
|
|
687
|
+
enumerable: true,
|
|
688
|
+
get: function () {
|
|
689
|
+
return murmurhash;
|
|
690
|
+
}
|
|
691
|
+
});
|
|
692
|
+
Object.defineProperty(exports, 'sha256', {
|
|
693
|
+
enumerable: true,
|
|
694
|
+
get: function () {
|
|
695
|
+
return sha256;
|
|
696
|
+
}
|
|
697
|
+
});
|
|
698
|
+
Object.defineProperty(exports, 'sha384', {
|
|
699
|
+
enumerable: true,
|
|
700
|
+
get: function () {
|
|
701
|
+
return sha384;
|
|
702
|
+
}
|
|
703
|
+
});
|
|
704
|
+
Object.defineProperty(exports, 'sha512', {
|
|
705
|
+
enumerable: true,
|
|
706
|
+
get: function () {
|
|
707
|
+
return sha512;
|
|
708
|
+
}
|
|
709
|
+
});
|
|
710
|
+
Object.defineProperty(exports, 'verifyContent', {
|
|
711
|
+
enumerable: true,
|
|
712
|
+
get: function () {
|
|
713
|
+
return verifyContent;
|
|
714
|
+
}
|
|
715
|
+
});
|
|
716
|
+
Object.defineProperty(exports, 'verifyPassword', {
|
|
717
|
+
enumerable: true,
|
|
718
|
+
get: function () {
|
|
719
|
+
return verifyPassword;
|
|
720
|
+
}
|
|
721
|
+
});
|
|
722
|
+
Object.defineProperty(exports, 'xxHash128', {
|
|
723
|
+
enumerable: true,
|
|
724
|
+
get: function () {
|
|
725
|
+
return xxHash128;
|
|
726
|
+
}
|
|
727
|
+
});
|
|
728
|
+
Object.defineProperty(exports, 'xxHash32', {
|
|
729
|
+
enumerable: true,
|
|
730
|
+
get: function () {
|
|
731
|
+
return xxHash32;
|
|
732
|
+
}
|
|
733
|
+
});
|
|
734
|
+
Object.defineProperty(exports, 'xxHash64', {
|
|
735
|
+
enumerable: true,
|
|
736
|
+
get: function () {
|
|
737
|
+
return xxHash64;
|
|
738
|
+
}
|
|
739
|
+
});
|