@lage-run/hasher 0.2.2 → 1.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (47) hide show
  1. package/CHANGELOG.json +37 -1
  2. package/CHANGELOG.md +11 -2
  3. package/lib/FileHasher.d.ts +13 -0
  4. package/lib/FileHasher.js +181 -0
  5. package/lib/PackageTree.d.ts +20 -0
  6. package/lib/PackageTree.js +178 -0
  7. package/lib/TargetHasher.d.ts +47 -0
  8. package/lib/TargetHasher.js +218 -0
  9. package/lib/__tests__/TargetHasher.test.js +128 -0
  10. package/lib/__tests__/getPackageDeps.test.js +70 -67
  11. package/lib/__tests__/resolveDependenciesHelper.js +19 -13
  12. package/lib/__tests__/resolveExternalDependencies.test.js +16 -16
  13. package/lib/__tests__/resolveInternalDependencies.test.js +12 -12
  14. package/lib/getPackageDeps.js +25 -15
  15. package/lib/hashStrings.d.ts +1 -0
  16. package/lib/hashStrings.js +28 -0
  17. package/lib/index.d.ts +1 -14
  18. package/lib/index.js +6 -70
  19. package/lib/nameAtVersion.d.ts +1 -0
  20. package/lib/nameAtVersion.js +13 -0
  21. package/lib/resolveExternalDependencies.js +18 -12
  22. package/lib/resolveInternalDependencies.js +8 -4
  23. package/package.json +9 -5
  24. package/lib/__tests__/createPackageHashes.test.js +0 -44
  25. package/lib/__tests__/getRepoDeps.test.d.ts +0 -1
  26. package/lib/__tests__/getRepoDeps.test.js +0 -253
  27. package/lib/__tests__/getRepoState.test.d.ts +0 -1
  28. package/lib/__tests__/getRepoState.test.js +0 -104
  29. package/lib/__tests__/hashOfFiles.test.d.ts +0 -1
  30. package/lib/__tests__/hashOfFiles.test.js +0 -103
  31. package/lib/__tests__/helpers.test.d.ts +0 -1
  32. package/lib/__tests__/helpers.test.js +0 -28
  33. package/lib/__tests__/index.test.d.ts +0 -1
  34. package/lib/__tests__/index.test.js +0 -99
  35. package/lib/createPackageHashes.d.ts +0 -4
  36. package/lib/createPackageHashes.js +0 -48
  37. package/lib/getRepoState.d.ts +0 -76
  38. package/lib/getRepoState.js +0 -256
  39. package/lib/hashOfFiles.d.ts +0 -14
  40. package/lib/hashOfFiles.js +0 -71
  41. package/lib/hashOfPackage.d.ts +0 -9
  42. package/lib/hashOfPackage.js +0 -65
  43. package/lib/helpers.d.ts +0 -3
  44. package/lib/helpers.js +0 -47
  45. package/lib/repoInfo.d.ts +0 -26
  46. package/lib/repoInfo.js +0 -65
  47. /package/lib/__tests__/{createPackageHashes.test.d.ts → TargetHasher.test.d.ts} +0 -0
package/CHANGELOG.json CHANGED
@@ -2,7 +2,43 @@
2
2
  "name": "@lage-run/hasher",
3
3
  "entries": [
4
4
  {
5
- "date": "Wed, 29 Mar 2023 20:02:22 GMT",
5
+ "date": "Mon, 08 May 2023 22:26:52 GMT",
6
+ "tag": "@lage-run/hasher_v1.0.0",
7
+ "version": "1.0.0",
8
+ "comments": {
9
+ "major": [
10
+ {
11
+ "author": "kchau@microsoft.com",
12
+ "package": "@lage-run/hasher",
13
+ "commit": "5a132808f166179bc316a279c9e11a13d3a39103",
14
+ "comment": "Added FileHasher, PackageTree, and its own TargetHasher"
15
+ },
16
+ {
17
+ "author": "beachball",
18
+ "package": "@lage-run/hasher",
19
+ "comment": "Bump @lage-run/target-graph to v0.8.6",
20
+ "commit": "5a132808f166179bc316a279c9e11a13d3a39103"
21
+ }
22
+ ]
23
+ }
24
+ },
25
+ {
26
+ "date": "Thu, 30 Mar 2023 17:47:47 GMT",
27
+ "tag": "@lage-run/hasher_v0.2.2",
28
+ "version": "0.2.2",
29
+ "comments": {
30
+ "none": [
31
+ {
32
+ "author": "kchau@microsoft.com",
33
+ "package": "@lage-run/hasher",
34
+ "commit": "2f641860d4a6a97708a94823fa56c507ae00e2c9",
35
+ "comment": "updated hasher tests not to depend on order"
36
+ }
37
+ ]
38
+ }
39
+ },
40
+ {
41
+ "date": "Wed, 29 Mar 2023 20:02:40 GMT",
6
42
  "tag": "@lage-run/hasher_v0.2.2",
7
43
  "version": "0.2.2",
8
44
  "comments": {
package/CHANGELOG.md CHANGED
@@ -1,12 +1,21 @@
1
1
  # Change Log - @lage-run/hasher
2
2
 
3
- This log was last generated on Wed, 29 Mar 2023 20:02:22 GMT and should not be manually modified.
3
+ This log was last generated on Mon, 08 May 2023 22:26:52 GMT and should not be manually modified.
4
4
 
5
5
  <!-- Start content -->
6
6
 
7
+ ## 1.0.0
8
+
9
+ Mon, 08 May 2023 22:26:52 GMT
10
+
11
+ ### Major changes
12
+
13
+ - Added FileHasher, PackageTree, and its own TargetHasher (kchau@microsoft.com)
14
+ - Bump @lage-run/target-graph to v0.8.6
15
+
7
16
  ## 0.2.2
8
17
 
9
- Wed, 29 Mar 2023 20:02:22 GMT
18
+ Wed, 29 Mar 2023 20:02:40 GMT
10
19
 
11
20
  ### Patches
12
21
 
@@ -0,0 +1,13 @@
1
+ interface FileHashStoreOptions {
2
+ root: string;
3
+ }
4
+ export declare class FileHasher {
5
+ #private;
6
+ private options;
7
+ constructor(options: FileHashStoreOptions);
8
+ getHashesFromGit(): Promise<void>;
9
+ readManifest(): Promise<void>;
10
+ writeManifest(): void;
11
+ hash(files: string[]): Record<string, string>;
12
+ }
13
+ export {};
@@ -0,0 +1,181 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", {
3
+ value: true
4
+ });
5
+ Object.defineProperty(exports, "FileHasher", {
6
+ enumerable: true,
7
+ get: function() {
8
+ return FileHasher;
9
+ }
10
+ });
11
+ const _gracefulfs = /*#__PURE__*/ _interop_require_default(require("graceful-fs"));
12
+ const _path = /*#__PURE__*/ _interop_require_default(require("path"));
13
+ const _globhasher = require("glob-hasher");
14
+ const _nodereadline = require("node:readline");
15
+ const _getPackageDeps = require("./getPackageDeps.js");
16
+ function _check_private_redeclaration(obj, privateCollection) {
17
+ if (privateCollection.has(obj)) {
18
+ throw new TypeError("Cannot initialize the same private elements twice on an object");
19
+ }
20
+ }
21
+ function _class_apply_descriptor_get(receiver, descriptor) {
22
+ if (descriptor.get) {
23
+ return descriptor.get.call(receiver);
24
+ }
25
+ return descriptor.value;
26
+ }
27
+ function _class_apply_descriptor_set(receiver, descriptor, value) {
28
+ if (descriptor.set) {
29
+ descriptor.set.call(receiver, value);
30
+ } else {
31
+ if (!descriptor.writable) {
32
+ throw new TypeError("attempted to set read only private field");
33
+ }
34
+ descriptor.value = value;
35
+ }
36
+ }
37
+ function _class_extract_field_descriptor(receiver, privateMap, action) {
38
+ if (!privateMap.has(receiver)) {
39
+ throw new TypeError("attempted to " + action + " private field on non-instance");
40
+ }
41
+ return privateMap.get(receiver);
42
+ }
43
+ function _class_private_field_get(receiver, privateMap) {
44
+ var descriptor = _class_extract_field_descriptor(receiver, privateMap, "get");
45
+ return _class_apply_descriptor_get(receiver, descriptor);
46
+ }
47
+ function _class_private_field_init(obj, privateMap, value) {
48
+ _check_private_redeclaration(obj, privateMap);
49
+ privateMap.set(obj, value);
50
+ }
51
+ function _class_private_field_set(receiver, privateMap, value) {
52
+ var descriptor = _class_extract_field_descriptor(receiver, privateMap, "set");
53
+ _class_apply_descriptor_set(receiver, descriptor, value);
54
+ return value;
55
+ }
56
+ function _define_property(obj, key, value) {
57
+ if (key in obj) {
58
+ Object.defineProperty(obj, key, {
59
+ value: value,
60
+ enumerable: true,
61
+ configurable: true,
62
+ writable: true
63
+ });
64
+ } else {
65
+ obj[key] = value;
66
+ }
67
+ return obj;
68
+ }
69
+ function _interop_require_default(obj) {
70
+ return obj && obj.__esModule ? obj : {
71
+ default: obj
72
+ };
73
+ }
74
+ var _store = /*#__PURE__*/ new WeakMap(), _manifestFile = /*#__PURE__*/ new WeakMap();
75
+ class FileHasher {
76
+ async getHashesFromGit() {
77
+ const { root } = this.options;
78
+ const fileHashes = await (0, _getPackageDeps.getPackageDeps)(root);
79
+ const files = [
80
+ ...fileHashes.keys()
81
+ ];
82
+ const fileStats = (0, _globhasher.stat)(files, {
83
+ cwd: root
84
+ }) ?? {};
85
+ for (const [relativePath, fileStat] of Object.entries(fileStats)){
86
+ const hash = fileHashes.get(relativePath);
87
+ if (hash) {
88
+ const { size , mtime } = fileStat;
89
+ _class_private_field_get(this, _store)[relativePath] = {
90
+ hash,
91
+ size,
92
+ mtime
93
+ };
94
+ }
95
+ }
96
+ this.writeManifest();
97
+ }
98
+ async readManifest() {
99
+ return new Promise((resolve)=>{
100
+ if (!_gracefulfs.default.existsSync(_class_private_field_get(this, _manifestFile))) {
101
+ this.getHashesFromGit().then(()=>resolve());
102
+ return;
103
+ }
104
+ const inputStream = _gracefulfs.default.createReadStream(_class_private_field_get(this, _manifestFile), "utf-8");
105
+ const rl = (0, _nodereadline.createInterface)({
106
+ input: inputStream,
107
+ crlfDelay: Infinity
108
+ });
109
+ rl.on("line", (line)=>{
110
+ const [relativePath, mtimeStr, sizeStr, hash] = line.split("\0");
111
+ _class_private_field_get(this, _store)[relativePath] = {
112
+ mtime: BigInt(mtimeStr),
113
+ size: parseInt(sizeStr),
114
+ hash
115
+ };
116
+ });
117
+ inputStream.on("end", ()=>{
118
+ rl.close();
119
+ resolve();
120
+ });
121
+ });
122
+ }
123
+ writeManifest() {
124
+ _gracefulfs.default.mkdirSync(_path.default.dirname(_class_private_field_get(this, _manifestFile)), {
125
+ recursive: true
126
+ });
127
+ const outputStream = _gracefulfs.default.createWriteStream(_class_private_field_get(this, _manifestFile), "utf-8");
128
+ for (const [relativePath, info] of Object.entries(_class_private_field_get(this, _store))){
129
+ outputStream.write(`${relativePath}\0${info.mtime.toString()}\0${info.size.toString()}\0${info.hash}\n`);
130
+ }
131
+ outputStream.end();
132
+ }
133
+ hash(files) {
134
+ const hashes = {};
135
+ const updatedFiles = [];
136
+ const stats = (0, _globhasher.stat)(files, {
137
+ cwd: this.options.root
138
+ }) ?? {};
139
+ for (const file of files){
140
+ const stat = stats[file];
141
+ const info = _class_private_field_get(this, _store)[file];
142
+ if (info && stat.mtime === info.mtime && stat.size == info.size) {
143
+ hashes[file] = info.hash;
144
+ } else {
145
+ updatedFiles.push(file);
146
+ }
147
+ }
148
+ const updatedHashes = (0, _globhasher.hash)(updatedFiles, {
149
+ cwd: this.options.root,
150
+ concurrency: 4
151
+ }) ?? {};
152
+ for (const [file, hash] of Object.entries(updatedHashes)){
153
+ const stat = _gracefulfs.default.statSync(_path.default.join(this.options.root, file), {
154
+ bigint: true
155
+ });
156
+ _class_private_field_get(this, _store)[file] = {
157
+ mtime: stat.mtimeMs,
158
+ size: Number(stat.size),
159
+ hash
160
+ };
161
+ hashes[file] = hash;
162
+ }
163
+ return hashes;
164
+ }
165
+ constructor(options){
166
+ _define_property(this, "options", void 0);
167
+ _class_private_field_init(this, _store, {
168
+ writable: true,
169
+ value: void 0
170
+ });
171
+ _class_private_field_init(this, _manifestFile, {
172
+ writable: true,
173
+ value: void 0
174
+ });
175
+ this.options = options;
176
+ _class_private_field_set(this, _store, {});
177
+ const { root } = options;
178
+ const cacheDirectory = _path.default.join(root, "node_modules", ".cache", "lage");
179
+ _class_private_field_set(this, _manifestFile, _path.default.join(cacheDirectory, "file_hashes.manifest"));
180
+ }
181
+ }
@@ -0,0 +1,20 @@
1
+ import { type PackageInfos } from "workspace-tools";
2
+ export interface PackageTreeOptions {
3
+ root: string;
4
+ packageInfos: PackageInfos;
5
+ includeUntracked: boolean;
6
+ }
7
+ /**
8
+ * Package Tree keeps a data structure to quickly find all files in a package.
9
+ *
10
+ * TODO: add a watcher to make sure the tree is up to date during a "watched" run.
11
+ */
12
+ export declare class PackageTree {
13
+ #private;
14
+ private options;
15
+ constructor(options: PackageTreeOptions);
16
+ reset(): void;
17
+ initialize(): Promise<void>;
18
+ addToPackageTree(filePaths: string[]): Promise<void>;
19
+ getPackageFiles(packageName: string, patterns: string[]): string[];
20
+ }
@@ -0,0 +1,178 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", {
3
+ value: true
4
+ });
5
+ Object.defineProperty(exports, "PackageTree", {
6
+ enumerable: true,
7
+ get: function() {
8
+ return PackageTree;
9
+ }
10
+ });
11
+ const _execa = /*#__PURE__*/ _interop_require_default(require("execa"));
12
+ const _path = /*#__PURE__*/ _interop_require_default(require("path"));
13
+ const _micromatch = /*#__PURE__*/ _interop_require_default(require("micromatch"));
14
+ function _check_private_redeclaration(obj, privateCollection) {
15
+ if (privateCollection.has(obj)) {
16
+ throw new TypeError("Cannot initialize the same private elements twice on an object");
17
+ }
18
+ }
19
+ function _class_apply_descriptor_get(receiver, descriptor) {
20
+ if (descriptor.get) {
21
+ return descriptor.get.call(receiver);
22
+ }
23
+ return descriptor.value;
24
+ }
25
+ function _class_apply_descriptor_set(receiver, descriptor, value) {
26
+ if (descriptor.set) {
27
+ descriptor.set.call(receiver, value);
28
+ } else {
29
+ if (!descriptor.writable) {
30
+ throw new TypeError("attempted to set read only private field");
31
+ }
32
+ descriptor.value = value;
33
+ }
34
+ }
35
+ function _class_extract_field_descriptor(receiver, privateMap, action) {
36
+ if (!privateMap.has(receiver)) {
37
+ throw new TypeError("attempted to " + action + " private field on non-instance");
38
+ }
39
+ return privateMap.get(receiver);
40
+ }
41
+ function _class_private_field_get(receiver, privateMap) {
42
+ var descriptor = _class_extract_field_descriptor(receiver, privateMap, "get");
43
+ return _class_apply_descriptor_get(receiver, descriptor);
44
+ }
45
+ function _class_private_field_init(obj, privateMap, value) {
46
+ _check_private_redeclaration(obj, privateMap);
47
+ privateMap.set(obj, value);
48
+ }
49
+ function _class_private_field_set(receiver, privateMap, value) {
50
+ var descriptor = _class_extract_field_descriptor(receiver, privateMap, "set");
51
+ _class_apply_descriptor_set(receiver, descriptor, value);
52
+ return value;
53
+ }
54
+ function _define_property(obj, key, value) {
55
+ if (key in obj) {
56
+ Object.defineProperty(obj, key, {
57
+ value: value,
58
+ enumerable: true,
59
+ configurable: true,
60
+ writable: true
61
+ });
62
+ } else {
63
+ obj[key] = value;
64
+ }
65
+ return obj;
66
+ }
67
+ function _interop_require_default(obj) {
68
+ return obj && obj.__esModule ? obj : {
69
+ default: obj
70
+ };
71
+ }
72
+ var _tree = /*#__PURE__*/ new WeakMap(), _packageFiles = /*#__PURE__*/ new WeakMap(), _memoizedPackageFiles = /*#__PURE__*/ new WeakMap();
73
+ class PackageTree {
74
+ reset() {
75
+ _class_private_field_set(this, _tree, {});
76
+ _class_private_field_set(this, _packageFiles, {});
77
+ _class_private_field_set(this, _memoizedPackageFiles, {});
78
+ }
79
+ async initialize() {
80
+ const { root , includeUntracked , packageInfos } = this.options;
81
+ this.reset();
82
+ // Generate path tree of all packages in workspace (scale: ~2000 * ~3)
83
+ for (const info of Object.values(packageInfos)){
84
+ const packagePath = _path.default.dirname(info.packageJsonPath);
85
+ const pathParts = _path.default.relative(root, packagePath).split(/[\\/]/);
86
+ let currentNode = _class_private_field_get(this, _tree);
87
+ for (const part of pathParts){
88
+ currentNode[part] = currentNode[part] || {};
89
+ currentNode = currentNode[part];
90
+ }
91
+ }
92
+ // Get all files in the workspace (scale: ~2000) according to git
93
+ const lsFilesResults = await (0, _execa.default)("git", [
94
+ "ls-files",
95
+ "-z"
96
+ ], {
97
+ cwd: root
98
+ });
99
+ if (lsFilesResults.exitCode === 0) {
100
+ const files = lsFilesResults.stdout.split("\0").filter(Boolean);
101
+ this.addToPackageTree(files);
102
+ }
103
+ if (includeUntracked) {
104
+ // Also get all untracked files in the workspace according to git
105
+ const lsOtherResults = await (0, _execa.default)("git", [
106
+ "ls-files",
107
+ "-o",
108
+ "--exclude-standard"
109
+ ], {
110
+ cwd: root
111
+ });
112
+ if (lsOtherResults.exitCode === 0) {
113
+ const files = lsOtherResults.stdout.split("\0").filter(Boolean);
114
+ this.addToPackageTree(files);
115
+ }
116
+ }
117
+ }
118
+ async addToPackageTree(filePaths) {
119
+ // key: path/to/package (packageRoot), value: array of a tuple of [file, hash]
120
+ const packageFiles = _class_private_field_get(this, _packageFiles);
121
+ for (const entry of filePaths){
122
+ const pathParts = entry.split(/[\\/]/);
123
+ let node = _class_private_field_get(this, _tree);
124
+ const packagePathParts = [];
125
+ for (const part of pathParts){
126
+ if (node[part]) {
127
+ node = node[part];
128
+ packagePathParts.push(part);
129
+ } else {
130
+ break;
131
+ }
132
+ }
133
+ const packageRoot = packagePathParts.join("/");
134
+ packageFiles[packageRoot] = packageFiles[packageRoot] || [];
135
+ packageFiles[packageRoot].push(entry);
136
+ }
137
+ }
138
+ getPackageFiles(packageName, patterns) {
139
+ const { root , packageInfos } = this.options;
140
+ const packagePath = _path.default.relative(root, _path.default.dirname(packageInfos[packageName].packageJsonPath)).replace(/\\/g, "/");
141
+ const packageFiles = _class_private_field_get(this, _packageFiles)[packagePath];
142
+ if (!packageFiles) {
143
+ return [];
144
+ }
145
+ const key = `${packageName}\0${patterns.join("\0")}`;
146
+ if (!_class_private_field_get(this, _memoizedPackageFiles)[key]) {
147
+ const packagePatterns = patterns.map((pattern)=>{
148
+ if (pattern.startsWith("!")) {
149
+ return `!${_path.default.join(packagePath, pattern.slice(1)).replace(/\\/g, "/")}`;
150
+ }
151
+ return _path.default.join(packagePath, pattern).replace(/\\/g, "/");
152
+ });
153
+ _class_private_field_get(this, _memoizedPackageFiles)[key] = (0, _micromatch.default)(packageFiles, packagePatterns, {
154
+ dot: true
155
+ });
156
+ }
157
+ return _class_private_field_get(this, _memoizedPackageFiles)[key];
158
+ }
159
+ constructor(options){
160
+ _define_property(this, "options", void 0);
161
+ _class_private_field_init(this, _tree, {
162
+ writable: true,
163
+ value: void 0
164
+ });
165
+ _class_private_field_init(this, _packageFiles, {
166
+ writable: true,
167
+ value: void 0
168
+ });
169
+ _class_private_field_init(this, _memoizedPackageFiles, {
170
+ writable: true,
171
+ value: void 0
172
+ });
173
+ this.options = options;
174
+ _class_private_field_set(this, _tree, {});
175
+ _class_private_field_set(this, _packageFiles, {});
176
+ _class_private_field_set(this, _memoizedPackageFiles, {});
177
+ }
178
+ }
@@ -0,0 +1,47 @@
1
+ import type { Target } from "@lage-run/target-graph";
2
+ import { type ParsedLock, type WorkspaceInfo, type PackageInfos } from "workspace-tools";
3
+ import type { DependencyMap } from "workspace-tools/lib/graph/createDependencyMap.js";
4
+ import { FileHasher } from "./FileHasher.js";
5
+ import { PackageTree } from "./PackageTree.js";
6
+ export interface TargetHasherOptions {
7
+ root: string;
8
+ environmentGlob: string[];
9
+ cacheKey?: string;
10
+ cliArgs?: string[];
11
+ }
12
+ export interface TargetManifest {
13
+ id: string;
14
+ hash: string;
15
+ globalInputsHash: string;
16
+ dependency: Record<string, string>;
17
+ fileHasher: FileHasher;
18
+ files: Record<string, {
19
+ mtimeMs: number;
20
+ size: number;
21
+ hash: string;
22
+ }>;
23
+ }
24
+ /**
25
+ * TargetHasher is a class that can be used to generate a hash of a target.
26
+ *
27
+ * Currently, it encapsulates the use of `backfill-hasher` to generate a hash.
28
+ */
29
+ export declare class TargetHasher {
30
+ private options;
31
+ fileHasher: FileHasher;
32
+ packageTree: PackageTree | undefined;
33
+ initializedPromise: Promise<unknown> | undefined;
34
+ packageInfos: PackageInfos;
35
+ workspaceInfo: WorkspaceInfo | undefined;
36
+ globalInputsHash: Record<string, string> | undefined;
37
+ lockInfo: ParsedLock | undefined;
38
+ targetHashes: Record<string, string>;
39
+ dependencyMap: DependencyMap;
40
+ getPackageInfos(workspacePackages: WorkspaceInfo): PackageInfos;
41
+ expandInputPatterns(patterns: string[], target: Target): Record<string, string[]>;
42
+ constructor(options: TargetHasherOptions);
43
+ ensureInitialized(): void;
44
+ initialize(): Promise<void>;
45
+ hash(target: Target): Promise<string>;
46
+ cleanup(): Promise<void>;
47
+ }