@stryke/hash 0.12.37 → 0.12.39

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (72) hide show
  1. package/CHANGELOG.md +13 -0
  2. package/dist/_virtual/rolldown_runtime.cjs +29 -1
  3. package/dist/convert/src/array-buffer-to-string.cjs +19 -1
  4. package/dist/convert/src/array-buffer-to-string.mjs +18 -1
  5. package/dist/convert/src/array-buffer-to-string.mjs.map +1 -1
  6. package/dist/convert/src/neutral.cjs +5 -1
  7. package/dist/convert/src/neutral.mjs +7 -1
  8. package/dist/convert/src/parse-type-definition.cjs +1 -1
  9. package/dist/convert/src/parse-type-definition.mjs +3 -1
  10. package/dist/convert/src/string-to-uint8-array.cjs +14 -1
  11. package/dist/convert/src/string-to-uint8-array.mjs +13 -1
  12. package/dist/convert/src/string-to-uint8-array.mjs.map +1 -1
  13. package/dist/convert/src/string-to-utf8-array.cjs +5 -1
  14. package/dist/convert/src/string-to-utf8-array.mjs +5 -1
  15. package/dist/convert/src/string-to-utf8-array.mjs.map +1 -1
  16. package/dist/convert/src/utf8-array-to-string.cjs +5 -1
  17. package/dist/convert/src/utf8-array-to-string.mjs +5 -1
  18. package/dist/convert/src/utf8-array-to-string.mjs.map +1 -1
  19. package/dist/digest.cjs +56 -1
  20. package/dist/digest.mjs +53 -1
  21. package/dist/digest.mjs.map +1 -1
  22. package/dist/etag.cjs +53 -1
  23. package/dist/etag.mjs +51 -1
  24. package/dist/etag.mjs.map +1 -1
  25. package/dist/fs/src/list-files.cjs +36 -1
  26. package/dist/fs/src/list-files.mjs +34 -1
  27. package/dist/fs/src/list-files.mjs.map +1 -1
  28. package/dist/fs/src/read-file.cjs +16 -1
  29. package/dist/fs/src/read-file.mjs +15 -1
  30. package/dist/fs/src/read-file.mjs.map +1 -1
  31. package/dist/hash-files.cjs +41 -1
  32. package/dist/hash-files.mjs +40 -1
  33. package/dist/hash-files.mjs.map +1 -1
  34. package/dist/index.cjs +20 -1
  35. package/dist/index.mjs +8 -1
  36. package/dist/md5.cjs +17 -1
  37. package/dist/md5.mjs +16 -1
  38. package/dist/md5.mjs.map +1 -1
  39. package/dist/murmurhash.cjs +22 -1
  40. package/dist/murmurhash.mjs +21 -1
  41. package/dist/murmurhash.mjs.map +1 -1
  42. package/dist/neutral.cjs +15 -1
  43. package/dist/neutral.mjs +6 -1
  44. package/dist/path/src/is-type.cjs +28 -1
  45. package/dist/path/src/is-type.mjs +28 -1
  46. package/dist/path/src/is-type.mjs.map +1 -1
  47. package/dist/path/src/join-paths.cjs +106 -1
  48. package/dist/path/src/join-paths.mjs +106 -1
  49. package/dist/path/src/join-paths.mjs.map +1 -1
  50. package/dist/path/src/regex.cjs +12 -1
  51. package/dist/path/src/regex.mjs +8 -1
  52. package/dist/path/src/regex.mjs.map +1 -1
  53. package/dist/path/src/slash.cjs +15 -1
  54. package/dist/path/src/slash.mjs +14 -1
  55. package/dist/path/src/slash.mjs.map +1 -1
  56. package/dist/type-checks/src/index.cjs +4 -1
  57. package/dist/type-checks/src/index.mjs +6 -1
  58. package/dist/type-checks/src/is-buffer.cjs +12 -1
  59. package/dist/type-checks/src/is-buffer.mjs +11 -1
  60. package/dist/type-checks/src/is-buffer.mjs.map +1 -1
  61. package/dist/type-checks/src/is-collection.cjs +1 -1
  62. package/dist/type-checks/src/is-collection.mjs +3 -1
  63. package/dist/type-checks/src/is-string.cjs +12 -1
  64. package/dist/type-checks/src/is-string.mjs +11 -1
  65. package/dist/type-checks/src/is-string.mjs.map +1 -1
  66. package/dist/type-checks/src/type-detect.cjs +15 -1
  67. package/dist/type-checks/src/type-detect.mjs +16 -1
  68. package/dist/type-checks/src/type-detect.mjs.map +1 -1
  69. package/dist/xx-hash.cjs +25 -1
  70. package/dist/xx-hash.mjs +22 -1
  71. package/dist/xx-hash.mjs.map +1 -1
  72. package/package.json +2 -2
@@ -1 +1,16 @@
1
- const e=require(`../../_virtual/rolldown_runtime.cjs`);let t=require(`node:fs/promises`);const n=async e=>{if(!e)throw Error(`No file path provided to read data`);return(0,t.readFile)(e,{encoding:`utf8`})};exports.readFile=n;
1
+ const require_rolldown_runtime = require('../../_virtual/rolldown_runtime.cjs');
2
+ let node_fs_promises = require("node:fs/promises");
3
+
4
+ //#region ../fs/src/read-file.ts
5
+ /**
6
+ * Read the given content to the given file path
7
+ *
8
+ * @param filePath - The file path to read to
9
+ */
10
+ const readFile = async (filePath) => {
11
+ if (!filePath) throw new Error("No file path provided to read data");
12
+ return (0, node_fs_promises.readFile)(filePath, { encoding: "utf8" });
13
+ };
14
+
15
+ //#endregion
16
+ exports.readFile = readFile;
@@ -1,2 +1,16 @@
1
- import{readFile as e}from"node:fs/promises";const t=async t=>{if(!t)throw Error(`No file path provided to read data`);return e(t,{encoding:`utf8`})};export{t as readFile};
1
+ import { readFile } from "node:fs/promises";
2
+
3
+ //#region ../fs/src/read-file.ts
4
+ /**
5
+ * Read the given content to the given file path
6
+ *
7
+ * @param filePath - The file path to read to
8
+ */
9
+ const readFile$1 = async (filePath) => {
10
+ if (!filePath) throw new Error("No file path provided to read data");
11
+ return readFile(filePath, { encoding: "utf8" });
12
+ };
13
+
14
+ //#endregion
15
+ export { readFile$1 as readFile };
2
16
  //# sourceMappingURL=read-file.mjs.map
@@ -1 +1 @@
1
- {"version":3,"file":"read-file.mjs","names":["readFile","readFileFs"],"sources":["../../../../fs/src/read-file.ts"],"sourcesContent":["/* -------------------------------------------------------------------\n\n ⚡ Storm Software - Stryke\n\n This code was released as part of the Stryke project. Stryke\n is maintained by Storm Software under the Apache-2.0 license, and is\n free for commercial and private use. For more information, please visit\n our licensing page at https://stormsoftware.com/licenses/projects/stryke.\n\n Website: https://stormsoftware.com\n Repository: https://github.com/storm-software/stryke\n Documentation: https://docs.stormsoftware.com/projects/stryke\n Contact: https://stormsoftware.com/contact\n\n SPDX-License-Identifier: Apache-2.0\n\n ------------------------------------------------------------------- */\n\nimport { existsSync, readFileSync as readFileSyncFs } from \"node:fs\";\nimport { readFile as readFileFs } from \"node:fs/promises\";\n\n/**\n * Read the given content to the given file path\n *\n * @param filePath - The file path to write to\n */\nexport const readFileSync = (filePath: string): string => {\n if (!filePath) {\n throw new Error(\"No file path provided to read data\");\n }\n\n return readFileSyncFs(filePath, { encoding: \"utf8\" });\n};\n\n/**\n * Read the given content to the given file path\n *\n * @param filePath - The file path to read to\n */\nexport const readFile = async (filePath: string): Promise<string> => {\n if (!filePath) {\n throw new Error(\"No file path provided to read data\");\n }\n\n return readFileFs(filePath, { encoding: \"utf8\" });\n};\n\n/**\n * Reads a file if it exists, otherwise returns an empty string.\n *\n * @param path - The path to the file to read.\n * @returns The content of the file if it exists, otherwise an empty string.\n */\nexport function readFileIfExistingSync(path: string) {\n return existsSync(path) ? readFileSync(path) : \"\";\n}\n\n/**\n * Reads a file if it exists, otherwise returns an empty string.\n *\n * @param path - The path to the file to read.\n * @returns The content of the file if it exists, otherwise an empty string.\n */\nexport async function readFileIfExisting(path: string) {\n return existsSync(path) ? readFile(path) : \"\";\n}\n"],"mappings":"4CAuCA,MAAaA,EAAW,KAAO,IAAsC,CACnE,GAAI,CAAC,EACH,MAAU,MAAM,qCAAqC,CAGvD,OAAOC,EAAW,EAAU,CAAE,SAAU,OAAQ,CAAC"}
1
+ {"version":3,"file":"read-file.mjs","names":["readFile","readFileFs"],"sources":["../../../../fs/src/read-file.ts"],"sourcesContent":["/* -------------------------------------------------------------------\n\n ⚡ Storm Software - Stryke\n\n This code was released as part of the Stryke project. Stryke\n is maintained by Storm Software under the Apache-2.0 license, and is\n free for commercial and private use. For more information, please visit\n our licensing page at https://stormsoftware.com/licenses/projects/stryke.\n\n Website: https://stormsoftware.com\n Repository: https://github.com/storm-software/stryke\n Documentation: https://docs.stormsoftware.com/projects/stryke\n Contact: https://stormsoftware.com/contact\n\n SPDX-License-Identifier: Apache-2.0\n\n ------------------------------------------------------------------- */\n\nimport { existsSync, readFileSync as readFileSyncFs } from \"node:fs\";\nimport { readFile as readFileFs } from \"node:fs/promises\";\n\n/**\n * Read the given content to the given file path\n *\n * @param filePath - The file path to write to\n */\nexport const readFileSync = (filePath: string): string => {\n if (!filePath) {\n throw new Error(\"No file path provided to read data\");\n }\n\n return readFileSyncFs(filePath, { encoding: \"utf8\" });\n};\n\n/**\n * Read the given content to the given file path\n *\n * @param filePath - The file path to read to\n */\nexport const readFile = async (filePath: string): Promise<string> => {\n if (!filePath) {\n throw new Error(\"No file path provided to read data\");\n }\n\n return readFileFs(filePath, { encoding: \"utf8\" });\n};\n\n/**\n * Reads a file if it exists, otherwise returns an empty string.\n *\n * @param path - The path to the file to read.\n * @returns The content of the file if it exists, otherwise an empty string.\n */\nexport function readFileIfExistingSync(path: string) {\n return existsSync(path) ? readFileSync(path) : \"\";\n}\n\n/**\n * Reads a file if it exists, otherwise returns an empty string.\n *\n * @param path - The path to the file to read.\n * @returns The content of the file if it exists, otherwise an empty string.\n */\nexport async function readFileIfExisting(path: string) {\n return existsSync(path) ? readFile(path) : \"\";\n}\n"],"mappings":";;;;;;;;AAuCA,MAAaA,aAAW,OAAO,aAAsC;AACnE,KAAI,CAAC,SACH,OAAM,IAAI,MAAM,qCAAqC;AAGvD,QAAOC,SAAW,UAAU,EAAE,UAAU,QAAQ,CAAC"}
@@ -1 +1,41 @@
1
- const e=require(`./fs/src/list-files.cjs`),t=require(`./fs/src/read-file.cjs`),n=require(`./murmurhash.cjs`);async function r(e,r){let i={};return await Promise.all(e.map(async e=>{i[e]=await t.readFile(e)})),n.murmurhash(i,r)}async function i(t,n={}){return n.ignore=n.ignore??[`**/node_modules/**`,`**/.git/**`,`**/.nx/**`,`**/.cache/**`,`**/.storm/**`,`**/tmp/**`],r(await e.listFiles(t,n),n)}exports.hashDirectory=i,exports.hashFiles=r;
1
+ const require_list_files = require('./fs/src/list-files.cjs');
2
+ const require_read_file = require('./fs/src/read-file.cjs');
3
+ const require_murmurhash = require('./murmurhash.cjs');
4
+
5
+ //#region src/hash-files.ts
6
+ /**
7
+ * Hash a list of file paths into a string based on the file content
8
+ *
9
+ * @param files - The list of file paths to hash
10
+ * @param options - Hashing options
11
+ * @returns A hashed string value
12
+ */
13
+ async function hashFiles(files, options) {
14
+ const result = {};
15
+ await Promise.all(files.map(async (file) => {
16
+ result[file] = await require_read_file.readFile(file);
17
+ }));
18
+ return require_murmurhash.murmurhash(result, options);
19
+ }
20
+ /**
21
+ * Hash a folder path into a string based on the file content
22
+ *
23
+ * @param directoryPath - The folder path to hash
24
+ * @param options - Hashing options. By default, the `node_modules`, `.git`, `.nx`, `.cache`, and `tmp` folders is ignored.
25
+ * @returns A hashed string value
26
+ */
27
+ async function hashDirectory(directoryPath, options = {}) {
28
+ options.ignore = options.ignore ?? [
29
+ "**/node_modules/**",
30
+ "**/.git/**",
31
+ "**/.nx/**",
32
+ "**/.cache/**",
33
+ "**/.storm/**",
34
+ "**/tmp/**"
35
+ ];
36
+ return hashFiles(await require_list_files.listFiles(directoryPath, options), options);
37
+ }
38
+
39
+ //#endregion
40
+ exports.hashDirectory = hashDirectory;
41
+ exports.hashFiles = hashFiles;
@@ -1,2 +1,41 @@
1
- import{listFiles as e}from"./fs/src/list-files.mjs";import{readFile as t}from"./fs/src/read-file.mjs";import{murmurhash as n}from"./murmurhash.mjs";async function r(e,r){let i={};return await Promise.all(e.map(async e=>{i[e]=await t(e)})),n(i,r)}async function i(t,n={}){return n.ignore=n.ignore??[`**/node_modules/**`,`**/.git/**`,`**/.nx/**`,`**/.cache/**`,`**/.storm/**`,`**/tmp/**`],r(await e(t,n),n)}export{i as hashDirectory,r as hashFiles};
1
+ import { listFiles } from "./fs/src/list-files.mjs";
2
+ import { readFile } from "./fs/src/read-file.mjs";
3
+ import { murmurhash } from "./murmurhash.mjs";
4
+
5
+ //#region src/hash-files.ts
6
+ /**
7
+ * Hash a list of file paths into a string based on the file content
8
+ *
9
+ * @param files - The list of file paths to hash
10
+ * @param options - Hashing options
11
+ * @returns A hashed string value
12
+ */
13
+ async function hashFiles(files, options) {
14
+ const result = {};
15
+ await Promise.all(files.map(async (file) => {
16
+ result[file] = await readFile(file);
17
+ }));
18
+ return murmurhash(result, options);
19
+ }
20
+ /**
21
+ * Hash a folder path into a string based on the file content
22
+ *
23
+ * @param directoryPath - The folder path to hash
24
+ * @param options - Hashing options. By default, the `node_modules`, `.git`, `.nx`, `.cache`, and `tmp` folders is ignored.
25
+ * @returns A hashed string value
26
+ */
27
+ async function hashDirectory(directoryPath, options = {}) {
28
+ options.ignore = options.ignore ?? [
29
+ "**/node_modules/**",
30
+ "**/.git/**",
31
+ "**/.nx/**",
32
+ "**/.cache/**",
33
+ "**/.storm/**",
34
+ "**/tmp/**"
35
+ ];
36
+ return hashFiles(await listFiles(directoryPath, options), options);
37
+ }
38
+
39
+ //#endregion
40
+ export { hashDirectory, hashFiles };
2
41
  //# sourceMappingURL=hash-files.mjs.map
@@ -1 +1 @@
1
- {"version":3,"file":"hash-files.mjs","names":[],"sources":["../src/hash-files.ts"],"sourcesContent":["/* -------------------------------------------------------------------\n\n ⚡ Storm Software - Stryke\n\n This code was released as part of the Stryke project. Stryke\n is maintained by Storm Software under the Apache-2.0 license, and is\n free for commercial and private use. For more information, please visit\n our licensing page at https://stormsoftware.com/licenses/projects/stryke.\n\n Website: https://stormsoftware.com\n Repository: https://github.com/storm-software/stryke\n Documentation: https://docs.stormsoftware.com/projects/stryke\n Contact: https://stormsoftware.com/contact\n\n SPDX-License-Identifier: Apache-2.0\n\n ------------------------------------------------------------------- */\n\nimport type { ListOptions } from \"@stryke/fs/list-files\";\nimport { listFiles } from \"@stryke/fs/list-files\";\nimport { readFile } from \"@stryke/fs/read-file\";\nimport type { HashOptions } from \"./murmurhash\";\nimport { murmurhash } from \"./murmurhash\";\n\n/**\n * Hash a list of file paths into a string based on the file content\n *\n * @param files - The list of file paths to hash\n * @param options - Hashing options\n * @returns A hashed string value\n */\nexport async function hashFiles(\n files: string[],\n options?: HashOptions\n): Promise<string> {\n const result = {} as Record<string, string>;\n await Promise.all(\n files.map(async file => {\n result[file] = await readFile(file);\n })\n );\n\n return murmurhash(result, options);\n}\n\n/**\n * Hash a folder path into a string based on the file content\n *\n * @param directoryPath - The folder path to hash\n * @param options - Hashing options. By default, the `node_modules`, `.git`, `.nx`, `.cache`, and `tmp` folders is ignored.\n * @returns A hashed string value\n */\nexport async function hashDirectory(\n directoryPath: string,\n options: HashOptions & ListOptions = {}\n): Promise<string> {\n options.ignore = options.ignore ?? [\n \"**/node_modules/**\",\n \"**/.git/**\",\n \"**/.nx/**\",\n \"**/.cache/**\",\n \"**/.storm/**\",\n \"**/tmp/**\"\n ];\n\n return hashFiles(await listFiles(directoryPath, options), options);\n}\n"],"mappings":"oJA+BA,eAAsB,EACpB,EACA,EACiB,CACjB,IAAM,EAAS,EAAE,CAOjB,OANA,MAAM,QAAQ,IACZ,EAAM,IAAI,KAAM,IAAQ,CACtB,EAAO,GAAQ,MAAM,EAAS,EAAK,EACnC,CACH,CAEM,EAAW,EAAQ,EAAQ,CAUpC,eAAsB,EACpB,EACA,EAAqC,EAAE,CACtB,CAUjB,MATA,GAAQ,OAAS,EAAQ,QAAU,CACjC,qBACA,aACA,YACA,eACA,eACA,YACD,CAEM,EAAU,MAAM,EAAU,EAAe,EAAQ,CAAE,EAAQ"}
1
+ {"version":3,"file":"hash-files.mjs","names":[],"sources":["../src/hash-files.ts"],"sourcesContent":["/* -------------------------------------------------------------------\n\n ⚡ Storm Software - Stryke\n\n This code was released as part of the Stryke project. Stryke\n is maintained by Storm Software under the Apache-2.0 license, and is\n free for commercial and private use. For more information, please visit\n our licensing page at https://stormsoftware.com/licenses/projects/stryke.\n\n Website: https://stormsoftware.com\n Repository: https://github.com/storm-software/stryke\n Documentation: https://docs.stormsoftware.com/projects/stryke\n Contact: https://stormsoftware.com/contact\n\n SPDX-License-Identifier: Apache-2.0\n\n ------------------------------------------------------------------- */\n\nimport type { ListOptions } from \"@stryke/fs/list-files\";\nimport { listFiles } from \"@stryke/fs/list-files\";\nimport { readFile } from \"@stryke/fs/read-file\";\nimport type { HashOptions } from \"./murmurhash\";\nimport { murmurhash } from \"./murmurhash\";\n\n/**\n * Hash a list of file paths into a string based on the file content\n *\n * @param files - The list of file paths to hash\n * @param options - Hashing options\n * @returns A hashed string value\n */\nexport async function hashFiles(\n files: string[],\n options?: HashOptions\n): Promise<string> {\n const result = {} as Record<string, string>;\n await Promise.all(\n files.map(async file => {\n result[file] = await readFile(file);\n })\n );\n\n return murmurhash(result, options);\n}\n\n/**\n * Hash a folder path into a string based on the file content\n *\n * @param directoryPath - The folder path to hash\n * @param options - Hashing options. By default, the `node_modules`, `.git`, `.nx`, `.cache`, and `tmp` folders is ignored.\n * @returns A hashed string value\n */\nexport async function hashDirectory(\n directoryPath: string,\n options: HashOptions & ListOptions = {}\n): Promise<string> {\n options.ignore = options.ignore ?? [\n \"**/node_modules/**\",\n \"**/.git/**\",\n \"**/.nx/**\",\n \"**/.cache/**\",\n \"**/.storm/**\",\n \"**/tmp/**\"\n ];\n\n return hashFiles(await listFiles(directoryPath, options), options);\n}\n"],"mappings":";;;;;;;;;;;;AA+BA,eAAsB,UACpB,OACA,SACiB;CACjB,MAAM,SAAS,EAAE;AACjB,OAAM,QAAQ,IACZ,MAAM,IAAI,OAAM,SAAQ;AACtB,SAAO,QAAQ,MAAM,SAAS,KAAK;GACnC,CACH;AAED,QAAO,WAAW,QAAQ,QAAQ;;;;;;;;;AAUpC,eAAsB,cACpB,eACA,UAAqC,EAAE,EACtB;AACjB,SAAQ,SAAS,QAAQ,UAAU;EACjC;EACA;EACA;EACA;EACA;EACA;EACD;AAED,QAAO,UAAU,MAAM,UAAU,eAAe,QAAQ,EAAE,QAAQ"}
package/dist/index.cjs CHANGED
@@ -1 +1,20 @@
1
- const e=require(`./digest.cjs`),t=require(`./etag.cjs`),n=require(`./murmurhash.cjs`),r=require(`./hash-files.cjs`),i=require(`./md5.cjs`),a=require(`./xx-hash.cjs`);exports.Hasher=e.Hasher,exports.createHasher=e.createHasher,exports.digest=e.digest,exports.fnv1a52=t.fnv1a52,exports.generateETag=t.generateETag,exports.hash=e.hash,exports.hashDirectory=r.hashDirectory,exports.hashFiles=r.hashFiles,exports.md5=i.md5,exports.murmurhash=n.murmurhash,exports.xxHash128=a.xxHash128,exports.xxHash32=a.xxHash32,exports.xxHash64=a.xxHash64;
1
+ const require_digest = require('./digest.cjs');
2
+ const require_etag = require('./etag.cjs');
3
+ const require_murmurhash = require('./murmurhash.cjs');
4
+ const require_hash_files = require('./hash-files.cjs');
5
+ const require_md5 = require('./md5.cjs');
6
+ const require_xx_hash = require('./xx-hash.cjs');
7
+
8
+ exports.Hasher = require_digest.Hasher;
9
+ exports.createHasher = require_digest.createHasher;
10
+ exports.digest = require_digest.digest;
11
+ exports.fnv1a52 = require_etag.fnv1a52;
12
+ exports.generateETag = require_etag.generateETag;
13
+ exports.hash = require_digest.hash;
14
+ exports.hashDirectory = require_hash_files.hashDirectory;
15
+ exports.hashFiles = require_hash_files.hashFiles;
16
+ exports.md5 = require_md5.md5;
17
+ exports.murmurhash = require_murmurhash.murmurhash;
18
+ exports.xxHash128 = require_xx_hash.xxHash128;
19
+ exports.xxHash32 = require_xx_hash.xxHash32;
20
+ exports.xxHash64 = require_xx_hash.xxHash64;
package/dist/index.mjs CHANGED
@@ -1 +1,8 @@
1
- import{Hasher as e,createHasher as t,digest as n,hash as r}from"./digest.mjs";import{fnv1a52 as i,generateETag as a}from"./etag.mjs";import{murmurhash as o}from"./murmurhash.mjs";import{hashDirectory as s,hashFiles as c}from"./hash-files.mjs";import{md5 as l}from"./md5.mjs";import{xxHash128 as u,xxHash32 as d,xxHash64 as f}from"./xx-hash.mjs";export{e as Hasher,t as createHasher,n as digest,i as fnv1a52,a as generateETag,r as hash,s as hashDirectory,c as hashFiles,l as md5,o as murmurhash,u as xxHash128,d as xxHash32,f as xxHash64};
1
+ import { Hasher, createHasher, digest, hash } from "./digest.mjs";
2
+ import { fnv1a52, generateETag } from "./etag.mjs";
3
+ import { murmurhash } from "./murmurhash.mjs";
4
+ import { hashDirectory, hashFiles } from "./hash-files.mjs";
5
+ import { md5 } from "./md5.mjs";
6
+ import { xxHash128, xxHash32, xxHash64 } from "./xx-hash.mjs";
7
+
8
+ export { Hasher, createHasher, digest, fnv1a52, generateETag, hash, hashDirectory, hashFiles, md5, murmurhash, xxHash128, xxHash32, xxHash64 };
package/dist/md5.cjs CHANGED
@@ -1 +1,17 @@
1
- const e=require(`./_virtual/rolldown_runtime.cjs`);let t=require(`node:crypto`);function n(e,n=32){return(0,t.createHash)(`md5`).update(e).digest(`hex`).slice(0,n)}exports.md5=n;
1
+ const require_rolldown_runtime = require('./_virtual/rolldown_runtime.cjs');
2
+ let node_crypto = require("node:crypto");
3
+
4
+ //#region src/md5.ts
5
+ /**
6
+ * Generate an MD5 hash of the provided content.
7
+ *
8
+ * @param content - The content to hash.
9
+ * @param length - The length of the hash to return.
10
+ * @returns The generated MD5 hash.
11
+ */
12
+ function md5(content, length = 32) {
13
+ return (0, node_crypto.createHash)("md5").update(content).digest("hex").slice(0, length);
14
+ }
15
+
16
+ //#endregion
17
+ exports.md5 = md5;
package/dist/md5.mjs CHANGED
@@ -1,2 +1,17 @@
1
- import{createHash as e}from"node:crypto";function t(t,n=32){return e(`md5`).update(t).digest(`hex`).slice(0,n)}export{t as md5};
1
+ import { createHash } from "node:crypto";
2
+
3
+ //#region src/md5.ts
4
+ /**
5
+ * Generate an MD5 hash of the provided content.
6
+ *
7
+ * @param content - The content to hash.
8
+ * @param length - The length of the hash to return.
9
+ * @returns The generated MD5 hash.
10
+ */
11
+ function md5(content, length = 32) {
12
+ return createHash("md5").update(content).digest("hex").slice(0, length);
13
+ }
14
+
15
+ //#endregion
16
+ export { md5 };
2
17
  //# sourceMappingURL=md5.mjs.map
package/dist/md5.mjs.map CHANGED
@@ -1 +1 @@
1
- {"version":3,"file":"md5.mjs","names":[],"sources":["../src/md5.ts"],"sourcesContent":["/* -------------------------------------------------------------------\n\n ⚡ Storm Software - Stryke\n\n This code was released as part of the Stryke project. Stryke\n is maintained by Storm Software under the Apache-2.0 license, and is\n free for commercial and private use. For more information, please visit\n our licensing page at https://stormsoftware.com/licenses/projects/stryke.\n\n Website: https://stormsoftware.com\n Repository: https://github.com/storm-software/stryke\n Documentation: https://docs.stormsoftware.com/projects/stryke\n Contact: https://stormsoftware.com/contact\n\n SPDX-License-Identifier: Apache-2.0\n\n ------------------------------------------------------------------- */\n\nimport { createHash } from \"node:crypto\";\n\n/**\n * Generate an MD5 hash of the provided content.\n *\n * @param content - The content to hash.\n * @param length - The length of the hash to return.\n * @returns The generated MD5 hash.\n */\nexport function md5(content: string, length = 32) {\n return createHash(\"md5\").update(content).digest(\"hex\").slice(0, length);\n}\n"],"mappings":"yCA2BA,SAAgB,EAAI,EAAiB,EAAS,GAAI,CAChD,OAAO,EAAW,MAAM,CAAC,OAAO,EAAQ,CAAC,OAAO,MAAM,CAAC,MAAM,EAAG,EAAO"}
1
+ {"version":3,"file":"md5.mjs","names":[],"sources":["../src/md5.ts"],"sourcesContent":["/* -------------------------------------------------------------------\n\n ⚡ Storm Software - Stryke\n\n This code was released as part of the Stryke project. Stryke\n is maintained by Storm Software under the Apache-2.0 license, and is\n free for commercial and private use. For more information, please visit\n our licensing page at https://stormsoftware.com/licenses/projects/stryke.\n\n Website: https://stormsoftware.com\n Repository: https://github.com/storm-software/stryke\n Documentation: https://docs.stormsoftware.com/projects/stryke\n Contact: https://stormsoftware.com/contact\n\n SPDX-License-Identifier: Apache-2.0\n\n ------------------------------------------------------------------- */\n\nimport { createHash } from \"node:crypto\";\n\n/**\n * Generate an MD5 hash of the provided content.\n *\n * @param content - The content to hash.\n * @param length - The length of the hash to return.\n * @returns The generated MD5 hash.\n */\nexport function md5(content: string, length = 32) {\n return createHash(\"md5\").update(content).digest(\"hex\").slice(0, length);\n}\n"],"mappings":";;;;;;;;;;AA2BA,SAAgB,IAAI,SAAiB,SAAS,IAAI;AAChD,QAAO,WAAW,MAAM,CAAC,OAAO,QAAQ,CAAC,OAAO,MAAM,CAAC,MAAM,GAAG,OAAO"}
@@ -1 +1,22 @@
1
- const e=require(`./_virtual/rolldown_runtime.cjs`);let t=require(`ohash`);function n(e,n){let r=(0,t.hash)(e),i=n?.maxLength??32;return r.length>i?r.slice(0,i):r}exports.murmurhash=n;
1
+ const require_rolldown_runtime = require('./_virtual/rolldown_runtime.cjs');
2
+ let ohash = require("ohash");
3
+
4
+ //#region src/murmurhash.ts
5
+ /**
6
+ * Use a [MurmurHash3](https://en.wikipedia.org/wiki/MurmurHash) based algorithm to hash any JS value into a string.
7
+ *
8
+ * @see https://github.com/ohash/ohash
9
+ * @see https://en.wikipedia.org/wiki/MurmurHash
10
+ *
11
+ * @param content - The value to hash
12
+ * @param options - Hashing options
13
+ * @returns A hashed string value
14
+ */
15
+ function murmurhash(content, options) {
16
+ const result = (0, ohash.hash)(content);
17
+ const maxLength = options?.maxLength ?? 32;
18
+ return result.length > maxLength ? result.slice(0, maxLength) : result;
19
+ }
20
+
21
+ //#endregion
22
+ exports.murmurhash = murmurhash;
@@ -1,2 +1,22 @@
1
- import{hash as e}from"ohash";function t(t,n){let r=e(t),i=n?.maxLength??32;return r.length>i?r.slice(0,i):r}export{t as murmurhash};
1
+ import { hash } from "ohash";
2
+
3
+ //#region src/murmurhash.ts
4
+ /**
5
+ * Use a [MurmurHash3](https://en.wikipedia.org/wiki/MurmurHash) based algorithm to hash any JS value into a string.
6
+ *
7
+ * @see https://github.com/ohash/ohash
8
+ * @see https://en.wikipedia.org/wiki/MurmurHash
9
+ *
10
+ * @param content - The value to hash
11
+ * @param options - Hashing options
12
+ * @returns A hashed string value
13
+ */
14
+ function murmurhash(content, options) {
15
+ const result = hash(content);
16
+ const maxLength = options?.maxLength ?? 32;
17
+ return result.length > maxLength ? result.slice(0, maxLength) : result;
18
+ }
19
+
20
+ //#endregion
21
+ export { murmurhash };
2
22
  //# sourceMappingURL=murmurhash.mjs.map
@@ -1 +1 @@
1
- {"version":3,"file":"murmurhash.mjs","names":["ohash"],"sources":["../src/murmurhash.ts"],"sourcesContent":["/* -------------------------------------------------------------------\n\n ⚡ Storm Software - Stryke\n\n This code was released as part of the Stryke project. Stryke\n is maintained by Storm Software under the Apache-2.0 license, and is\n free for commercial and private use. For more information, please visit\n our licensing page at https://stormsoftware.com/licenses/projects/stryke.\n\n Website: https://stormsoftware.com\n Repository: https://github.com/storm-software/stryke\n Documentation: https://docs.stormsoftware.com/projects/stryke\n Contact: https://stormsoftware.com/contact\n\n SPDX-License-Identifier: Apache-2.0\n\n ------------------------------------------------------------------- */\n\nimport { hash as ohash } from \"ohash\";\n\nexport interface HashOptions {\n /**\n * The maximum length of the hash\n *\n * @defaultValue 32\n */\n maxLength?: number;\n}\n\n/**\n * Use a [MurmurHash3](https://en.wikipedia.org/wiki/MurmurHash) based algorithm to hash any JS value into a string.\n *\n * @see https://github.com/ohash/ohash\n * @see https://en.wikipedia.org/wiki/MurmurHash\n *\n * @param content - The value to hash\n * @param options - Hashing options\n * @returns A hashed string value\n */\nexport function murmurhash(content: any, options?: HashOptions): string {\n const result = ohash(content);\n const maxLength = options?.maxLength ?? 32;\n\n return result.length > maxLength ? result.slice(0, maxLength) : result;\n}\n"],"mappings":"6BAuCA,SAAgB,EAAW,EAAc,EAA+B,CACtE,IAAM,EAASA,EAAM,EAAQ,CACvB,EAAY,GAAS,WAAa,GAExC,OAAO,EAAO,OAAS,EAAY,EAAO,MAAM,EAAG,EAAU,CAAG"}
1
+ {"version":3,"file":"murmurhash.mjs","names":["ohash"],"sources":["../src/murmurhash.ts"],"sourcesContent":["/* -------------------------------------------------------------------\n\n ⚡ Storm Software - Stryke\n\n This code was released as part of the Stryke project. Stryke\n is maintained by Storm Software under the Apache-2.0 license, and is\n free for commercial and private use. For more information, please visit\n our licensing page at https://stormsoftware.com/licenses/projects/stryke.\n\n Website: https://stormsoftware.com\n Repository: https://github.com/storm-software/stryke\n Documentation: https://docs.stormsoftware.com/projects/stryke\n Contact: https://stormsoftware.com/contact\n\n SPDX-License-Identifier: Apache-2.0\n\n ------------------------------------------------------------------- */\n\nimport { hash as ohash } from \"ohash\";\n\nexport interface HashOptions {\n /**\n * The maximum length of the hash\n *\n * @defaultValue 32\n */\n maxLength?: number;\n}\n\n/**\n * Use a [MurmurHash3](https://en.wikipedia.org/wiki/MurmurHash) based algorithm to hash any JS value into a string.\n *\n * @see https://github.com/ohash/ohash\n * @see https://en.wikipedia.org/wiki/MurmurHash\n *\n * @param content - The value to hash\n * @param options - Hashing options\n * @returns A hashed string value\n */\nexport function murmurhash(content: any, options?: HashOptions): string {\n const result = ohash(content);\n const maxLength = options?.maxLength ?? 32;\n\n return result.length > maxLength ? result.slice(0, maxLength) : result;\n}\n"],"mappings":";;;;;;;;;;;;;AAuCA,SAAgB,WAAW,SAAc,SAA+B;CACtE,MAAM,SAASA,KAAM,QAAQ;CAC7B,MAAM,YAAY,SAAS,aAAa;AAExC,QAAO,OAAO,SAAS,YAAY,OAAO,MAAM,GAAG,UAAU,GAAG"}
package/dist/neutral.cjs CHANGED
@@ -1 +1,15 @@
1
- const e=require(`./digest.cjs`),t=require(`./murmurhash.cjs`),n=require(`./hash-files.cjs`),r=require(`./xx-hash.cjs`);exports.Hasher=e.Hasher,exports.createHasher=e.createHasher,exports.digest=e.digest,exports.hash=e.hash,exports.hashDirectory=n.hashDirectory,exports.hashFiles=n.hashFiles,exports.murmurhash=t.murmurhash,exports.xxHash128=r.xxHash128,exports.xxHash32=r.xxHash32,exports.xxHash64=r.xxHash64;
1
+ const require_digest = require('./digest.cjs');
2
+ const require_murmurhash = require('./murmurhash.cjs');
3
+ const require_hash_files = require('./hash-files.cjs');
4
+ const require_xx_hash = require('./xx-hash.cjs');
5
+
6
+ exports.Hasher = require_digest.Hasher;
7
+ exports.createHasher = require_digest.createHasher;
8
+ exports.digest = require_digest.digest;
9
+ exports.hash = require_digest.hash;
10
+ exports.hashDirectory = require_hash_files.hashDirectory;
11
+ exports.hashFiles = require_hash_files.hashFiles;
12
+ exports.murmurhash = require_murmurhash.murmurhash;
13
+ exports.xxHash128 = require_xx_hash.xxHash128;
14
+ exports.xxHash32 = require_xx_hash.xxHash32;
15
+ exports.xxHash64 = require_xx_hash.xxHash64;
package/dist/neutral.mjs CHANGED
@@ -1 +1,6 @@
1
- import{Hasher as e,createHasher as t,digest as n,hash as r}from"./digest.mjs";import{murmurhash as i}from"./murmurhash.mjs";import{hashDirectory as a,hashFiles as o}from"./hash-files.mjs";import{xxHash128 as s,xxHash32 as c,xxHash64 as l}from"./xx-hash.mjs";export{e as Hasher,t as createHasher,n as digest,r as hash,a as hashDirectory,o as hashFiles,i as murmurhash,s as xxHash128,c as xxHash32,l as xxHash64};
1
+ import { Hasher, createHasher, digest, hash } from "./digest.mjs";
2
+ import { murmurhash } from "./murmurhash.mjs";
3
+ import { hashDirectory, hashFiles } from "./hash-files.mjs";
4
+ import { xxHash128, xxHash32, xxHash64 } from "./xx-hash.mjs";
5
+
6
+ export { Hasher, createHasher, digest, hash, hashDirectory, hashFiles, murmurhash, xxHash128, xxHash32, xxHash64 };
@@ -1 +1,28 @@
1
- const e=require(`./regex.cjs`),t=require(`./slash.cjs`);function n(n){return e.ABSOLUTE_PATH_REGEX.test(t.slash(n))}function r(e){return n(e)}exports.isAbsolute=r;
1
+ const require_regex = require('./regex.cjs');
2
+ const require_slash = require('./slash.cjs');
3
+
4
+ //#region ../path/src/is-type.ts
5
+ /**
6
+ * Check if the path is an absolute path.
7
+ *
8
+ * @param path - The path to check
9
+ * @returns An indicator specifying if the path is an absolute path
10
+ */
11
+ function isAbsolutePath(path) {
12
+ return require_regex.ABSOLUTE_PATH_REGEX.test(require_slash.slash(path));
13
+ }
14
+ /**
15
+ * Check if the path is an absolute path.
16
+ *
17
+ * @remarks
18
+ * This is an alias for {@link isAbsolutePath}.
19
+ *
20
+ * @param path - The path to check
21
+ * @returns An indicator specifying if the path is an absolute path
22
+ */
23
+ function isAbsolute(path) {
24
+ return isAbsolutePath(path);
25
+ }
26
+
27
+ //#endregion
28
+ exports.isAbsolute = isAbsolute;
@@ -1,2 +1,29 @@
1
- import{ABSOLUTE_PATH_REGEX as e}from"./regex.mjs";import{slash as t}from"./slash.mjs";function n(n){return e.test(t(n))}function r(e){return n(e)}export{r as isAbsolute};
1
+ import { ABSOLUTE_PATH_REGEX } from "./regex.mjs";
2
+ import { slash } from "./slash.mjs";
3
+
4
+ //#region ../path/src/is-type.ts
5
+ /**
6
+ * Check if the path is an absolute path.
7
+ *
8
+ * @param path - The path to check
9
+ * @returns An indicator specifying if the path is an absolute path
10
+ */
11
+ function isAbsolutePath(path) {
12
+ return ABSOLUTE_PATH_REGEX.test(slash(path));
13
+ }
14
+ /**
15
+ * Check if the path is an absolute path.
16
+ *
17
+ * @remarks
18
+ * This is an alias for {@link isAbsolutePath}.
19
+ *
20
+ * @param path - The path to check
21
+ * @returns An indicator specifying if the path is an absolute path
22
+ */
23
+ function isAbsolute(path) {
24
+ return isAbsolutePath(path);
25
+ }
26
+
27
+ //#endregion
28
+ export { isAbsolute };
2
29
  //# sourceMappingURL=is-type.mjs.map
@@ -1 +1 @@
1
- {"version":3,"file":"is-type.mjs","names":[],"sources":["../../../../path/src/is-type.ts"],"sourcesContent":["/* -------------------------------------------------------------------\n\n ⚡ Storm Software - Stryke\n\n This code was released as part of the Stryke project. Stryke\n is maintained by Storm Software under the Apache-2.0 license, and is\n free for commercial and private use. For more information, please visit\n our licensing page at https://stormsoftware.com/licenses/projects/stryke.\n\n Website: https://stormsoftware.com\n Repository: https://github.com/storm-software/stryke\n Documentation: https://docs.stormsoftware.com/projects/stryke\n Contact: https://stormsoftware.com/contact\n\n SPDX-License-Identifier: Apache-2.0\n\n ------------------------------------------------------------------- */\n\nimport { ABSOLUTE_PATH_REGEX, NPM_SCOPED_PACKAGE_REGEX } from \"./regex\";\nimport { slash } from \"./slash\";\n\n/**\n * Check if the path is an absolute path.\n *\n * @param path - The path to check\n * @returns An indicator specifying if the path is an absolute path\n */\nexport function isAbsolutePath(path: string): boolean {\n return ABSOLUTE_PATH_REGEX.test(slash(path));\n}\n\n/**\n * Check if the path is an absolute path.\n *\n * @remarks\n * This is an alias for {@link isAbsolutePath}.\n *\n * @param path - The path to check\n * @returns An indicator specifying if the path is an absolute path\n */\nexport function isAbsolute(path: string): boolean {\n return isAbsolutePath(path);\n}\n\n/**\n * Check if the path is a relative path.\n *\n * @param path - The path to check\n * @returns An indicator specifying if the path is a relative path\n */\nexport function isRelativePath(path: string): boolean {\n return !isAbsolutePath(path);\n}\n\n/**\n * Check if the path is a relative path.\n *\n * @remarks\n * This is an alias for {@link isRelativePath}.\n *\n * @param path - The path to check\n * @returns An indicator specifying if the path is a relative path\n */\nexport function isRelative(path: string): boolean {\n return isRelativePath(path);\n}\n\n/**\n * Check if the path is a npm package path.\n *\n * @remarks\n * This only checks if the path matches the npm namespace scoped package naming convention such as `@scope/package-name`. This is an alias for {@link isNpmScopedPackage}.\n *\n * @example\n * ```ts\n * isNpmScopedPackage(\"@stryke/path\"); // returns true\n * isNpmScopedPackage(\"lodash\"); // returns false\n * isNpmNamespacePackage(\"./src/index.ts\"); // returns false\n * ```\n *\n * @param path - The path to check\n * @returns An indicator specifying if the path is a npm package path\n */\nexport function isNpmScopedPackagePath(path: string): boolean {\n return NPM_SCOPED_PACKAGE_REGEX.test(slash(path));\n}\n\n/**\n * Check if the path is a npm package path.\n *\n * @remarks\n * This only checks if the path matches the npm namespace scoped package naming convention such as `@scope/package-name`. This is an alias for {@link isNpmScopedPackagePath}.\n *\n * @example\n * ```ts\n * isNpmScopedPackagePath(\"@stryke/path\"); // returns true\n * isNpmScopedPackagePath(\"lodash\"); // returns false\n * isNpmScopedPackagePath(\"./src/index.ts\"); // returns false\n * ```\n *\n * @param path - The path to check\n * @returns An indicator specifying if the path is a npm package path\n */\nexport function isNpmScopedPackage(path: string): boolean {\n return isNpmScopedPackagePath(path);\n}\n"],"mappings":"sFA2BA,SAAgB,EAAe,EAAuB,CACpD,OAAO,EAAoB,KAAK,EAAM,EAAK,CAAC,CAY9C,SAAgB,EAAW,EAAuB,CAChD,OAAO,EAAe,EAAK"}
1
+ {"version":3,"file":"is-type.mjs","names":[],"sources":["../../../../path/src/is-type.ts"],"sourcesContent":["/* -------------------------------------------------------------------\n\n ⚡ Storm Software - Stryke\n\n This code was released as part of the Stryke project. Stryke\n is maintained by Storm Software under the Apache-2.0 license, and is\n free for commercial and private use. For more information, please visit\n our licensing page at https://stormsoftware.com/licenses/projects/stryke.\n\n Website: https://stormsoftware.com\n Repository: https://github.com/storm-software/stryke\n Documentation: https://docs.stormsoftware.com/projects/stryke\n Contact: https://stormsoftware.com/contact\n\n SPDX-License-Identifier: Apache-2.0\n\n ------------------------------------------------------------------- */\n\nimport { ABSOLUTE_PATH_REGEX, NPM_SCOPED_PACKAGE_REGEX } from \"./regex\";\nimport { slash } from \"./slash\";\n\n/**\n * Check if the path is an absolute path.\n *\n * @param path - The path to check\n * @returns An indicator specifying if the path is an absolute path\n */\nexport function isAbsolutePath(path: string): boolean {\n return ABSOLUTE_PATH_REGEX.test(slash(path));\n}\n\n/**\n * Check if the path is an absolute path.\n *\n * @remarks\n * This is an alias for {@link isAbsolutePath}.\n *\n * @param path - The path to check\n * @returns An indicator specifying if the path is an absolute path\n */\nexport function isAbsolute(path: string): boolean {\n return isAbsolutePath(path);\n}\n\n/**\n * Check if the path is a relative path.\n *\n * @param path - The path to check\n * @returns An indicator specifying if the path is a relative path\n */\nexport function isRelativePath(path: string): boolean {\n return !isAbsolutePath(path);\n}\n\n/**\n * Check if the path is a relative path.\n *\n * @remarks\n * This is an alias for {@link isRelativePath}.\n *\n * @param path - The path to check\n * @returns An indicator specifying if the path is a relative path\n */\nexport function isRelative(path: string): boolean {\n return isRelativePath(path);\n}\n\n/**\n * Check if the path is a npm package path.\n *\n * @remarks\n * This only checks if the path matches the npm namespace scoped package naming convention such as `@scope/package-name`. This is an alias for {@link isNpmScopedPackage}.\n *\n * @example\n * ```ts\n * isNpmScopedPackage(\"@stryke/path\"); // returns true\n * isNpmScopedPackage(\"lodash\"); // returns false\n * isNpmNamespacePackage(\"./src/index.ts\"); // returns false\n * ```\n *\n * @param path - The path to check\n * @returns An indicator specifying if the path is a npm package path\n */\nexport function isNpmScopedPackagePath(path: string): boolean {\n return NPM_SCOPED_PACKAGE_REGEX.test(slash(path));\n}\n\n/**\n * Check if the path is a npm package path.\n *\n * @remarks\n * This only checks if the path matches the npm namespace scoped package naming convention such as `@scope/package-name`. This is an alias for {@link isNpmScopedPackagePath}.\n *\n * @example\n * ```ts\n * isNpmScopedPackagePath(\"@stryke/path\"); // returns true\n * isNpmScopedPackagePath(\"lodash\"); // returns false\n * isNpmScopedPackagePath(\"./src/index.ts\"); // returns false\n * ```\n *\n * @param path - The path to check\n * @returns An indicator specifying if the path is a npm package path\n */\nexport function isNpmScopedPackage(path: string): boolean {\n return isNpmScopedPackagePath(path);\n}\n"],"mappings":";;;;;;;;;;AA2BA,SAAgB,eAAe,MAAuB;AACpD,QAAO,oBAAoB,KAAK,MAAM,KAAK,CAAC;;;;;;;;;;;AAY9C,SAAgB,WAAW,MAAuB;AAChD,QAAO,eAAe,KAAK"}
@@ -1 +1,106 @@
1
- const e=require(`./regex.cjs`),t=require(`./is-type.cjs`);function n(t=``){return t&&t.replace(/\\/g,`/`).replace(e.DRIVE_LETTER_START_REGEX,e=>e.toUpperCase())}function r(r){if(!r||r.length===0)return`.`;r=n(r);let i=r.match(e.UNC_REGEX),o=t.isAbsolute(r),s=r[r.length-1]===`/`;return r=a(r,!o),r.length===0?o?`/`:s?`./`:`.`:(s&&(r+=`/`),e.DRIVE_LETTER_REGEX.test(r)&&(r+=`/`),i?o?`//${r}`:`//./${r}`:o&&!t.isAbsolute(r)?`/${r}`:r)}function i(...e){let t=``;for(let n of e)if(n)if(t.length>0){let e=t[t.length-1]===`/`,r=n[0]===`/`;e&&r?t+=n.slice(1):t+=e||r?n:`/${n}`}else t+=n;return r(t)}function a(e,t){let n=``,r=0,i=-1,a=0,o=null;for(let s=0;s<=e.length;++s){if(s<e.length)o=e[s];else if(o===`/`)break;else o=`/`;if(o===`/`){if(!(i===s-1||a===1))if(a===2){if(n.length<2||r!==2||n[n.length-1]!==`.`||n[n.length-2]!==`.`){if(n.length>2){let e=n.lastIndexOf(`/`);e===-1?(n=``,r=0):(n=n.slice(0,e),r=n.length-1-n.lastIndexOf(`/`)),i=s,a=0;continue}else if(n.length>0){n=``,r=0,i=s,a=0;continue}}t&&(n+=n.length>0?`/..`:`..`,r=2)}else n.length>0?n+=`/${e.slice(i+1,s)}`:n=e.slice(i+1,s),r=s-i-1;i=s,a=0}else o===`.`&&a!==-1?++a:a=-1}return n}exports.joinPaths=i;
1
+ const require_regex = require('./regex.cjs');
2
+ const require_is_type = require('./is-type.cjs');
3
+
4
+ //#region ../path/src/join-paths.ts
5
+ function normalizeWindowsPath(input = "") {
6
+ if (!input) return input;
7
+ return input.replace(/\\/g, "/").replace(require_regex.DRIVE_LETTER_START_REGEX, (r) => r.toUpperCase());
8
+ }
9
+ function correctPaths(path) {
10
+ if (!path || path.length === 0) return ".";
11
+ path = normalizeWindowsPath(path);
12
+ const isUNCPath = path.match(require_regex.UNC_REGEX);
13
+ const isPathAbsolute = require_is_type.isAbsolute(path);
14
+ const trailingSeparator = path[path.length - 1] === "/";
15
+ path = normalizeString(path, !isPathAbsolute);
16
+ if (path.length === 0) {
17
+ if (isPathAbsolute) return "/";
18
+ return trailingSeparator ? "./" : ".";
19
+ }
20
+ if (trailingSeparator) path += "/";
21
+ if (require_regex.DRIVE_LETTER_REGEX.test(path)) path += "/";
22
+ if (isUNCPath) {
23
+ if (!isPathAbsolute) return `//./${path}`;
24
+ return `//${path}`;
25
+ }
26
+ return isPathAbsolute && !require_is_type.isAbsolute(path) ? `/${path}` : path;
27
+ }
28
+ /**
29
+ * Joins all given path segments together using the platform-specific separator as a delimiter.
30
+ * The resulting path is normalized to remove any redundant or unnecessary segments.
31
+ *
32
+ * @param segments - The path segments to join.
33
+ * @returns The joined and normalized path string.
34
+ */
35
+ function joinPaths(...segments) {
36
+ let path = "";
37
+ for (const seg of segments) {
38
+ if (!seg) continue;
39
+ if (path.length > 0) {
40
+ const pathTrailing = path[path.length - 1] === "/";
41
+ const segLeading = seg[0] === "/";
42
+ if (pathTrailing && segLeading) path += seg.slice(1);
43
+ else path += pathTrailing || segLeading ? seg : `/${seg}`;
44
+ } else path += seg;
45
+ }
46
+ return correctPaths(path);
47
+ }
48
+ /**
49
+ * Resolves a string path, resolving '.' and '.' segments and allowing paths above the root.
50
+ *
51
+ * @param path - The path to normalize.
52
+ * @param allowAboveRoot - Whether to allow the resulting path to be above the root directory.
53
+ * @returns the normalized path string.
54
+ */
55
+ function normalizeString(path, allowAboveRoot) {
56
+ let res = "";
57
+ let lastSegmentLength = 0;
58
+ let lastSlash = -1;
59
+ let dots = 0;
60
+ let char = null;
61
+ for (let index = 0; index <= path.length; ++index) {
62
+ if (index < path.length) char = path[index];
63
+ else if (char === "/") break;
64
+ else char = "/";
65
+ if (char === "/") {
66
+ if (lastSlash === index - 1 || dots === 1) {} else if (dots === 2) {
67
+ if (res.length < 2 || lastSegmentLength !== 2 || res[res.length - 1] !== "." || res[res.length - 2] !== ".") {
68
+ if (res.length > 2) {
69
+ const lastSlashIndex = res.lastIndexOf("/");
70
+ if (lastSlashIndex === -1) {
71
+ res = "";
72
+ lastSegmentLength = 0;
73
+ } else {
74
+ res = res.slice(0, lastSlashIndex);
75
+ lastSegmentLength = res.length - 1 - res.lastIndexOf("/");
76
+ }
77
+ lastSlash = index;
78
+ dots = 0;
79
+ continue;
80
+ } else if (res.length > 0) {
81
+ res = "";
82
+ lastSegmentLength = 0;
83
+ lastSlash = index;
84
+ dots = 0;
85
+ continue;
86
+ }
87
+ }
88
+ if (allowAboveRoot) {
89
+ res += res.length > 0 ? "/.." : "..";
90
+ lastSegmentLength = 2;
91
+ }
92
+ } else {
93
+ if (res.length > 0) res += `/${path.slice(lastSlash + 1, index)}`;
94
+ else res = path.slice(lastSlash + 1, index);
95
+ lastSegmentLength = index - lastSlash - 1;
96
+ }
97
+ lastSlash = index;
98
+ dots = 0;
99
+ } else if (char === "." && dots !== -1) ++dots;
100
+ else dots = -1;
101
+ }
102
+ return res;
103
+ }
104
+
105
+ //#endregion
106
+ exports.joinPaths = joinPaths;
@@ -1,2 +1,107 @@
1
- import{DRIVE_LETTER_REGEX as e,DRIVE_LETTER_START_REGEX as t,UNC_REGEX as n}from"./regex.mjs";import{isAbsolute as r}from"./is-type.mjs";function i(e=``){return e&&e.replace(/\\/g,`/`).replace(t,e=>e.toUpperCase())}function a(t){if(!t||t.length===0)return`.`;t=i(t);let a=t.match(n),o=r(t),c=t[t.length-1]===`/`;return t=s(t,!o),t.length===0?o?`/`:c?`./`:`.`:(c&&(t+=`/`),e.test(t)&&(t+=`/`),a?o?`//${t}`:`//./${t}`:o&&!r(t)?`/${t}`:t)}function o(...e){let t=``;for(let n of e)if(n)if(t.length>0){let e=t[t.length-1]===`/`,r=n[0]===`/`;e&&r?t+=n.slice(1):t+=e||r?n:`/${n}`}else t+=n;return a(t)}function s(e,t){let n=``,r=0,i=-1,a=0,o=null;for(let s=0;s<=e.length;++s){if(s<e.length)o=e[s];else if(o===`/`)break;else o=`/`;if(o===`/`){if(!(i===s-1||a===1))if(a===2){if(n.length<2||r!==2||n[n.length-1]!==`.`||n[n.length-2]!==`.`){if(n.length>2){let e=n.lastIndexOf(`/`);e===-1?(n=``,r=0):(n=n.slice(0,e),r=n.length-1-n.lastIndexOf(`/`)),i=s,a=0;continue}else if(n.length>0){n=``,r=0,i=s,a=0;continue}}t&&(n+=n.length>0?`/..`:`..`,r=2)}else n.length>0?n+=`/${e.slice(i+1,s)}`:n=e.slice(i+1,s),r=s-i-1;i=s,a=0}else o===`.`&&a!==-1?++a:a=-1}return n}export{o as joinPaths};
1
+ import { DRIVE_LETTER_REGEX, DRIVE_LETTER_START_REGEX, UNC_REGEX } from "./regex.mjs";
2
+ import { isAbsolute } from "./is-type.mjs";
3
+
4
+ //#region ../path/src/join-paths.ts
5
+ function normalizeWindowsPath(input = "") {
6
+ if (!input) return input;
7
+ return input.replace(/\\/g, "/").replace(DRIVE_LETTER_START_REGEX, (r) => r.toUpperCase());
8
+ }
9
+ function correctPaths(path) {
10
+ if (!path || path.length === 0) return ".";
11
+ path = normalizeWindowsPath(path);
12
+ const isUNCPath = path.match(UNC_REGEX);
13
+ const isPathAbsolute = isAbsolute(path);
14
+ const trailingSeparator = path[path.length - 1] === "/";
15
+ path = normalizeString(path, !isPathAbsolute);
16
+ if (path.length === 0) {
17
+ if (isPathAbsolute) return "/";
18
+ return trailingSeparator ? "./" : ".";
19
+ }
20
+ if (trailingSeparator) path += "/";
21
+ if (DRIVE_LETTER_REGEX.test(path)) path += "/";
22
+ if (isUNCPath) {
23
+ if (!isPathAbsolute) return `//./${path}`;
24
+ return `//${path}`;
25
+ }
26
+ return isPathAbsolute && !isAbsolute(path) ? `/${path}` : path;
27
+ }
28
+ /**
29
+ * Joins all given path segments together using the platform-specific separator as a delimiter.
30
+ * The resulting path is normalized to remove any redundant or unnecessary segments.
31
+ *
32
+ * @param segments - The path segments to join.
33
+ * @returns The joined and normalized path string.
34
+ */
35
+ function joinPaths(...segments) {
36
+ let path = "";
37
+ for (const seg of segments) {
38
+ if (!seg) continue;
39
+ if (path.length > 0) {
40
+ const pathTrailing = path[path.length - 1] === "/";
41
+ const segLeading = seg[0] === "/";
42
+ if (pathTrailing && segLeading) path += seg.slice(1);
43
+ else path += pathTrailing || segLeading ? seg : `/${seg}`;
44
+ } else path += seg;
45
+ }
46
+ return correctPaths(path);
47
+ }
48
+ /**
49
+ * Resolves a string path, resolving '.' and '.' segments and allowing paths above the root.
50
+ *
51
+ * @param path - The path to normalize.
52
+ * @param allowAboveRoot - Whether to allow the resulting path to be above the root directory.
53
+ * @returns the normalized path string.
54
+ */
55
+ function normalizeString(path, allowAboveRoot) {
56
+ let res = "";
57
+ let lastSegmentLength = 0;
58
+ let lastSlash = -1;
59
+ let dots = 0;
60
+ let char = null;
61
+ for (let index = 0; index <= path.length; ++index) {
62
+ if (index < path.length) char = path[index];
63
+ else if (char === "/") break;
64
+ else char = "/";
65
+ if (char === "/") {
66
+ if (lastSlash === index - 1 || dots === 1) {} else if (dots === 2) {
67
+ if (res.length < 2 || lastSegmentLength !== 2 || res[res.length - 1] !== "." || res[res.length - 2] !== ".") {
68
+ if (res.length > 2) {
69
+ const lastSlashIndex = res.lastIndexOf("/");
70
+ if (lastSlashIndex === -1) {
71
+ res = "";
72
+ lastSegmentLength = 0;
73
+ } else {
74
+ res = res.slice(0, lastSlashIndex);
75
+ lastSegmentLength = res.length - 1 - res.lastIndexOf("/");
76
+ }
77
+ lastSlash = index;
78
+ dots = 0;
79
+ continue;
80
+ } else if (res.length > 0) {
81
+ res = "";
82
+ lastSegmentLength = 0;
83
+ lastSlash = index;
84
+ dots = 0;
85
+ continue;
86
+ }
87
+ }
88
+ if (allowAboveRoot) {
89
+ res += res.length > 0 ? "/.." : "..";
90
+ lastSegmentLength = 2;
91
+ }
92
+ } else {
93
+ if (res.length > 0) res += `/${path.slice(lastSlash + 1, index)}`;
94
+ else res = path.slice(lastSlash + 1, index);
95
+ lastSegmentLength = index - lastSlash - 1;
96
+ }
97
+ lastSlash = index;
98
+ dots = 0;
99
+ } else if (char === "." && dots !== -1) ++dots;
100
+ else dots = -1;
101
+ }
102
+ return res;
103
+ }
104
+
105
+ //#endregion
106
+ export { joinPaths };
2
107
  //# sourceMappingURL=join-paths.mjs.map