@lage-run/hasher 0.1.1 → 0.1.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.json +16 -1
- package/CHANGELOG.md +10 -2
- package/lib/__tests__/createPackageHashes.test.js +13 -12
- package/lib/__tests__/getPackageDeps.test.js +149 -131
- package/lib/__tests__/getRepoDeps.test.js +106 -91
- package/lib/__tests__/getRepoState.test.js +70 -33
- package/lib/__tests__/hashOfFiles.test.js +71 -68
- package/lib/__tests__/helpers.test.js +12 -11
- package/lib/__tests__/index.test.js +50 -39
- package/lib/__tests__/resolveDependenciesHelper.js +44 -25
- package/lib/__tests__/resolveExternalDependencies.test.js +114 -68
- package/lib/__tests__/resolveInternalDependencies.test.js +98 -67
- package/lib/createPackageHashes.js +27 -20
- package/lib/getPackageDeps.js +118 -125
- package/lib/getRepoState.js +86 -101
- package/lib/hashOfFiles.js +59 -53
- package/lib/hashOfPackage.js +41 -28
- package/lib/helpers.js +35 -19
- package/lib/index.js +50 -41
- package/lib/repoInfo.js +26 -29
- package/lib/resolveExternalDependencies.js +32 -22
- package/lib/resolveInternalDependencies.js +16 -8
- package/package.json +2 -2
- package/lib/__fixtures__/config/backfill.config.js +0 -5
- package/lib/__fixtures__/config/backfill.config.js.map +0 -1
- package/lib/__fixtures__/config/packages/package-1/backfill.config.js +0 -2
- package/lib/__fixtures__/config/packages/package-1/backfill.config.js.map +0 -1
- package/lib/__tests__/createPackageHashes.test.js.map +0 -1
- package/lib/__tests__/getPackageDeps.test.js.map +0 -1
- package/lib/__tests__/getRepoDeps.test.js.map +0 -1
- package/lib/__tests__/getRepoState.test.js.map +0 -1
- package/lib/__tests__/hashOfFiles.test.js.map +0 -1
- package/lib/__tests__/helpers.test.js.map +0 -1
- package/lib/__tests__/index.test.js.map +0 -1
- package/lib/__tests__/resolveDependenciesHelper.js.map +0 -1
- package/lib/__tests__/resolveExternalDependencies.test.js.map +0 -1
- package/lib/__tests__/resolveInternalDependencies.test.js.map +0 -1
- package/lib/createPackageHashes.js.map +0 -1
- package/lib/getPackageDeps.js.map +0 -1
- package/lib/getRepoState.js.map +0 -1
- package/lib/hashOfFiles.js.map +0 -1
- package/lib/hashOfPackage.js.map +0 -1
- package/lib/helpers.js.map +0 -1
- package/lib/index.js.map +0 -1
- package/lib/repoInfo.js.map +0 -1
- package/lib/resolveExternalDependencies.js.map +0 -1
- package/lib/resolveInternalDependencies.js.map +0 -1
|
@@ -1,46 +1,67 @@
|
|
|
1
1
|
"use strict";
|
|
2
|
-
|
|
3
|
-
|
|
4
|
-
var desc = Object.getOwnPropertyDescriptor(m, k);
|
|
5
|
-
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
|
6
|
-
desc = { enumerable: true, get: function() { return m[k]; } };
|
|
7
|
-
}
|
|
8
|
-
Object.defineProperty(o, k2, desc);
|
|
9
|
-
}) : (function(o, m, k, k2) {
|
|
10
|
-
if (k2 === undefined) k2 = k;
|
|
11
|
-
o[k2] = m[k];
|
|
12
|
-
}));
|
|
13
|
-
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
|
14
|
-
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
|
15
|
-
}) : function(o, v) {
|
|
16
|
-
o["default"] = v;
|
|
2
|
+
Object.defineProperty(exports, "__esModule", {
|
|
3
|
+
value: true
|
|
17
4
|
});
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
return
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
5
|
+
const _path = /*#__PURE__*/ _interopRequireWildcard(require("path"));
|
|
6
|
+
const _fs = /*#__PURE__*/ _interopRequireWildcard(require("fs"));
|
|
7
|
+
const _childProcess = require("child_process");
|
|
8
|
+
const _getRepoState = require("../getRepoState");
|
|
9
|
+
function _getRequireWildcardCache(nodeInterop) {
|
|
10
|
+
if (typeof WeakMap !== "function") return null;
|
|
11
|
+
var cacheBabelInterop = new WeakMap();
|
|
12
|
+
var cacheNodeInterop = new WeakMap();
|
|
13
|
+
return (_getRequireWildcardCache = function(nodeInterop) {
|
|
14
|
+
return nodeInterop ? cacheNodeInterop : cacheBabelInterop;
|
|
15
|
+
})(nodeInterop);
|
|
16
|
+
}
|
|
17
|
+
function _interopRequireWildcard(obj, nodeInterop) {
|
|
18
|
+
if (!nodeInterop && obj && obj.__esModule) {
|
|
19
|
+
return obj;
|
|
20
|
+
}
|
|
21
|
+
if (obj === null || typeof obj !== "object" && typeof obj !== "function") {
|
|
22
|
+
return {
|
|
23
|
+
default: obj
|
|
24
|
+
};
|
|
25
|
+
}
|
|
26
|
+
var cache = _getRequireWildcardCache(nodeInterop);
|
|
27
|
+
if (cache && cache.has(obj)) {
|
|
28
|
+
return cache.get(obj);
|
|
29
|
+
}
|
|
30
|
+
var newObj = {};
|
|
31
|
+
var hasPropertyDescriptor = Object.defineProperty && Object.getOwnPropertyDescriptor;
|
|
32
|
+
for(var key in obj){
|
|
33
|
+
if (key !== "default" && Object.prototype.hasOwnProperty.call(obj, key)) {
|
|
34
|
+
var desc = hasPropertyDescriptor ? Object.getOwnPropertyDescriptor(obj, key) : null;
|
|
35
|
+
if (desc && (desc.get || desc.set)) {
|
|
36
|
+
Object.defineProperty(newObj, key, desc);
|
|
37
|
+
} else {
|
|
38
|
+
newObj[key] = obj[key];
|
|
39
|
+
}
|
|
40
|
+
}
|
|
41
|
+
}
|
|
42
|
+
newObj.default = obj;
|
|
43
|
+
if (cache) {
|
|
44
|
+
cache.set(obj, newObj);
|
|
45
|
+
}
|
|
46
|
+
return newObj;
|
|
47
|
+
}
|
|
48
|
+
const SOURCE_PATH = _path.join(__dirname).replace(_path.join("lib", "__tests__"), _path.join("src", "__tests__"));
|
|
31
49
|
const TEST_PREFIX = `packages/hasher/src/__tests__/`;
|
|
32
|
-
const TEST_PROJECT_PATH =
|
|
33
|
-
const FILTERS = [
|
|
50
|
+
const TEST_PROJECT_PATH = _path.join(SOURCE_PATH, "testProject");
|
|
51
|
+
const FILTERS = [
|
|
52
|
+
`testProject/`,
|
|
53
|
+
`nestedTestProject/`
|
|
54
|
+
];
|
|
34
55
|
const FileSystem = {
|
|
35
|
-
writeFile:
|
|
36
|
-
deleteFile:
|
|
56
|
+
writeFile: _fs.writeFileSync,
|
|
57
|
+
deleteFile: _fs.rmSync
|
|
37
58
|
};
|
|
38
59
|
function getRelevantEntries(results) {
|
|
39
60
|
const relevantResults = new Map();
|
|
40
|
-
for (const [key, hash] of results)
|
|
61
|
+
for (const [key, hash] of results){
|
|
41
62
|
if (key.startsWith(TEST_PREFIX)) {
|
|
42
63
|
const partialKey = key.slice(TEST_PREFIX.length);
|
|
43
|
-
for (const filter of FILTERS)
|
|
64
|
+
for (const filter of FILTERS){
|
|
44
65
|
if (partialKey.startsWith(filter)) {
|
|
45
66
|
relevantResults.set(partialKey, hash);
|
|
46
67
|
}
|
|
@@ -49,45 +70,45 @@ function getRelevantEntries(results) {
|
|
|
49
70
|
}
|
|
50
71
|
return relevantResults;
|
|
51
72
|
}
|
|
52
|
-
describe(
|
|
53
|
-
it(`returns the correct directory`, ()
|
|
54
|
-
const root = (0,
|
|
55
|
-
const expectedRoot =
|
|
73
|
+
describe(_getRepoState.getRepoRoot.name, ()=>{
|
|
74
|
+
it(`returns the correct directory`, ()=>{
|
|
75
|
+
const root = (0, _getRepoState.getRepoRoot)(__dirname);
|
|
76
|
+
const expectedRoot = _path.resolve(__dirname, "../../../..").replace(/\\/g, "/");
|
|
56
77
|
expect(root).toEqual(expectedRoot);
|
|
57
78
|
});
|
|
58
79
|
});
|
|
59
|
-
describe(
|
|
60
|
-
it("can handle a blob", ()
|
|
80
|
+
describe(_getRepoState.parseGitLsTree.name, ()=>{
|
|
81
|
+
it("can handle a blob", ()=>{
|
|
61
82
|
const filename = "src/typings/tsd.d.ts";
|
|
62
83
|
const hash = "3451bccdc831cb43d7a70ed8e628dcf9c7f888c8";
|
|
63
84
|
const output = `100644 blob ${hash}\t${filename}\x00`;
|
|
64
|
-
const changes = (0,
|
|
85
|
+
const changes = (0, _getRepoState.parseGitLsTree)(output);
|
|
65
86
|
expect(changes.size).toEqual(1); // Expect there to be exactly 1 change
|
|
66
87
|
expect(changes.get(filename)).toEqual(hash); // Expect the hash to be ${hash}
|
|
67
88
|
});
|
|
68
|
-
it("can handle a submodule", ()
|
|
89
|
+
it("can handle a submodule", ()=>{
|
|
69
90
|
const filename = "rushstack";
|
|
70
91
|
const hash = "c5880bf5b0c6c1f2e2c43c95beeb8f0a808e8bac";
|
|
71
92
|
const output = `160000 commit ${hash}\t${filename}\x00`;
|
|
72
|
-
const changes = (0,
|
|
93
|
+
const changes = (0, _getRepoState.parseGitLsTree)(output);
|
|
73
94
|
expect(changes.size).toEqual(1); // Expect there to be exactly 1 change
|
|
74
95
|
expect(changes.get(filename)).toEqual(hash); // Expect the hash to be ${hash}
|
|
75
96
|
});
|
|
76
|
-
it("can handle multiple lines", ()
|
|
97
|
+
it("can handle multiple lines", ()=>{
|
|
77
98
|
const filename1 = "src/typings/tsd.d.ts";
|
|
78
99
|
const hash1 = "3451bccdc831cb43d7a70ed8e628dcf9c7f888c8";
|
|
79
100
|
const filename2 = "src/foo bar/tsd.d.ts";
|
|
80
101
|
const hash2 = "0123456789abcdef1234567890abcdef01234567";
|
|
81
102
|
const output = `100644 blob ${hash1}\t${filename1}\x00100666 blob ${hash2}\t${filename2}\0`;
|
|
82
|
-
const changes = (0,
|
|
103
|
+
const changes = (0, _getRepoState.parseGitLsTree)(output);
|
|
83
104
|
expect(changes.size).toEqual(2); // Expect there to be exactly 2 changes
|
|
84
105
|
expect(changes.get(filename1)).toEqual(hash1); // Expect the hash to be ${hash1}
|
|
85
106
|
expect(changes.get(filename2)).toEqual(hash2); // Expect the hash to be ${hash2}
|
|
86
107
|
});
|
|
87
108
|
});
|
|
88
|
-
describe(
|
|
89
|
-
it("can parse committed files", ()
|
|
90
|
-
const results = (0,
|
|
109
|
+
describe(_getRepoState.getRepoState.name, ()=>{
|
|
110
|
+
it("can parse committed files", ()=>{
|
|
111
|
+
const results = (0, _getRepoState.getRepoState)(__dirname);
|
|
91
112
|
const filteredResults = getRelevantEntries(results);
|
|
92
113
|
const expectedFiles = new Map(Object.entries({
|
|
93
114
|
"nestedTestProject/src/file 1.txt": "c7b2f707ac99ca522f965210a7b6b0b109863f34",
|
|
@@ -95,17 +116,17 @@ describe(getRepoState_1.getRepoState.name, () => {
|
|
|
95
116
|
"testProject/file1.txt": "c7b2f707ac99ca522f965210a7b6b0b109863f34",
|
|
96
117
|
"testProject/file 2.txt": "a385f754ec4fede884a4864d090064d9aeef8ccb",
|
|
97
118
|
"testProject/file蝴蝶.txt": "ae814af81e16cb2ae8c57503c77e2cab6b5462ba",
|
|
98
|
-
[`testProject/package.json`]: "18a1e415e56220fa5122428a4ef8eb8874756576"
|
|
119
|
+
[`testProject/package.json`]: "18a1e415e56220fa5122428a4ef8eb8874756576"
|
|
99
120
|
}));
|
|
100
|
-
for (const [filePath, hash] of expectedFiles)
|
|
121
|
+
for (const [filePath, hash] of expectedFiles){
|
|
101
122
|
expect(filteredResults.get(filePath)).toEqual(hash);
|
|
102
123
|
}
|
|
103
124
|
expect(filteredResults.size).toEqual(expectedFiles.size);
|
|
104
125
|
});
|
|
105
|
-
it("can handle adding one file", ()
|
|
106
|
-
const tempFilePath =
|
|
126
|
+
it("can handle adding one file", ()=>{
|
|
127
|
+
const tempFilePath = _path.join(TEST_PROJECT_PATH, "a.txt");
|
|
107
128
|
FileSystem.writeFile(tempFilePath, "a");
|
|
108
|
-
const results = (0,
|
|
129
|
+
const results = (0, _getRepoState.getRepoState)(__dirname);
|
|
109
130
|
const filteredResults = getRelevantEntries(results);
|
|
110
131
|
try {
|
|
111
132
|
const expectedFiles = new Map(Object.entries({
|
|
@@ -115,23 +136,22 @@ describe(getRepoState_1.getRepoState.name, () => {
|
|
|
115
136
|
"testProject/file1.txt": "c7b2f707ac99ca522f965210a7b6b0b109863f34",
|
|
116
137
|
"testProject/file 2.txt": "a385f754ec4fede884a4864d090064d9aeef8ccb",
|
|
117
138
|
"testProject/file蝴蝶.txt": "ae814af81e16cb2ae8c57503c77e2cab6b5462ba",
|
|
118
|
-
[`testProject/package.json`]: "18a1e415e56220fa5122428a4ef8eb8874756576"
|
|
139
|
+
[`testProject/package.json`]: "18a1e415e56220fa5122428a4ef8eb8874756576"
|
|
119
140
|
}));
|
|
120
|
-
for (const [filePath, hash] of expectedFiles)
|
|
141
|
+
for (const [filePath, hash] of expectedFiles){
|
|
121
142
|
expect(filteredResults.get(filePath)).toEqual(hash);
|
|
122
143
|
}
|
|
123
144
|
expect(filteredResults.size).toEqual(expectedFiles.size);
|
|
124
|
-
}
|
|
125
|
-
finally {
|
|
145
|
+
} finally{
|
|
126
146
|
FileSystem.deleteFile(tempFilePath);
|
|
127
147
|
}
|
|
128
148
|
});
|
|
129
|
-
it("can handle adding two files", ()
|
|
130
|
-
const tempFilePath1 =
|
|
131
|
-
const tempFilePath2 =
|
|
149
|
+
it("can handle adding two files", ()=>{
|
|
150
|
+
const tempFilePath1 = _path.join(TEST_PROJECT_PATH, "a.txt");
|
|
151
|
+
const tempFilePath2 = _path.join(TEST_PROJECT_PATH, "b.txt");
|
|
132
152
|
FileSystem.writeFile(tempFilePath1, "a");
|
|
133
153
|
FileSystem.writeFile(tempFilePath2, "a");
|
|
134
|
-
const results = (0,
|
|
154
|
+
const results = (0, _getRepoState.getRepoState)(__dirname);
|
|
135
155
|
const filteredResults = getRelevantEntries(results);
|
|
136
156
|
try {
|
|
137
157
|
const expectedFiles = new Map(Object.entries({
|
|
@@ -142,22 +162,21 @@ describe(getRepoState_1.getRepoState.name, () => {
|
|
|
142
162
|
"testProject/file1.txt": "c7b2f707ac99ca522f965210a7b6b0b109863f34",
|
|
143
163
|
"testProject/file 2.txt": "a385f754ec4fede884a4864d090064d9aeef8ccb",
|
|
144
164
|
"testProject/file蝴蝶.txt": "ae814af81e16cb2ae8c57503c77e2cab6b5462ba",
|
|
145
|
-
[`testProject/package.json`]: "18a1e415e56220fa5122428a4ef8eb8874756576"
|
|
165
|
+
[`testProject/package.json`]: "18a1e415e56220fa5122428a4ef8eb8874756576"
|
|
146
166
|
}));
|
|
147
|
-
for (const [filePath, hash] of expectedFiles)
|
|
167
|
+
for (const [filePath, hash] of expectedFiles){
|
|
148
168
|
expect(filteredResults.get(filePath)).toEqual(hash);
|
|
149
169
|
}
|
|
150
170
|
expect(filteredResults.size).toEqual(expectedFiles.size);
|
|
151
|
-
}
|
|
152
|
-
finally {
|
|
171
|
+
} finally{
|
|
153
172
|
FileSystem.deleteFile(tempFilePath1);
|
|
154
173
|
FileSystem.deleteFile(tempFilePath2);
|
|
155
174
|
}
|
|
156
175
|
});
|
|
157
|
-
it("can handle removing one file", ()
|
|
158
|
-
const testFilePath =
|
|
176
|
+
it("can handle removing one file", ()=>{
|
|
177
|
+
const testFilePath = _path.join(TEST_PROJECT_PATH, "file1.txt");
|
|
159
178
|
FileSystem.deleteFile(testFilePath);
|
|
160
|
-
const results = (0,
|
|
179
|
+
const results = (0, _getRepoState.getRepoState)(__dirname);
|
|
161
180
|
const filteredResults = getRelevantEntries(results);
|
|
162
181
|
try {
|
|
163
182
|
const expectedFiles = new Map(Object.entries({
|
|
@@ -165,24 +184,23 @@ describe(getRepoState_1.getRepoState.name, () => {
|
|
|
165
184
|
[`nestedTestProject/package.json`]: "18a1e415e56220fa5122428a4ef8eb8874756576",
|
|
166
185
|
"testProject/file 2.txt": "a385f754ec4fede884a4864d090064d9aeef8ccb",
|
|
167
186
|
"testProject/file蝴蝶.txt": "ae814af81e16cb2ae8c57503c77e2cab6b5462ba",
|
|
168
|
-
[`testProject/package.json`]: "18a1e415e56220fa5122428a4ef8eb8874756576"
|
|
187
|
+
[`testProject/package.json`]: "18a1e415e56220fa5122428a4ef8eb8874756576"
|
|
169
188
|
}));
|
|
170
|
-
for (const [filePath, hash] of expectedFiles)
|
|
189
|
+
for (const [filePath, hash] of expectedFiles){
|
|
171
190
|
expect(filteredResults.get(filePath)).toEqual(hash);
|
|
172
191
|
}
|
|
173
192
|
expect(filteredResults.size).toEqual(expectedFiles.size);
|
|
174
|
-
}
|
|
175
|
-
|
|
176
|
-
(0, child_process_1.execSync)(`git checkout --force HEAD -- ${TEST_PREFIX}testProject/file1.txt`, {
|
|
193
|
+
} finally{
|
|
194
|
+
(0, _childProcess.execSync)(`git checkout --force HEAD -- ${TEST_PREFIX}testProject/file1.txt`, {
|
|
177
195
|
stdio: "ignore",
|
|
178
|
-
cwd: (0,
|
|
196
|
+
cwd: (0, _getRepoState.getRepoRoot)(__dirname)
|
|
179
197
|
});
|
|
180
198
|
}
|
|
181
199
|
});
|
|
182
|
-
it("can handle changing one file", ()
|
|
183
|
-
const testFilePath =
|
|
200
|
+
it("can handle changing one file", ()=>{
|
|
201
|
+
const testFilePath = _path.join(TEST_PROJECT_PATH, "file1.txt");
|
|
184
202
|
FileSystem.writeFile(testFilePath, "abc");
|
|
185
|
-
const results = (0,
|
|
203
|
+
const results = (0, _getRepoState.getRepoState)(__dirname);
|
|
186
204
|
const filteredResults = getRelevantEntries(results);
|
|
187
205
|
try {
|
|
188
206
|
const expectedFiles = new Map(Object.entries({
|
|
@@ -191,25 +209,24 @@ describe(getRepoState_1.getRepoState.name, () => {
|
|
|
191
209
|
"testProject/file1.txt": "f2ba8f84ab5c1bce84a7b441cb1959cfc7093b7f",
|
|
192
210
|
"testProject/file 2.txt": "a385f754ec4fede884a4864d090064d9aeef8ccb",
|
|
193
211
|
"testProject/file蝴蝶.txt": "ae814af81e16cb2ae8c57503c77e2cab6b5462ba",
|
|
194
|
-
[`testProject/package.json`]: "18a1e415e56220fa5122428a4ef8eb8874756576"
|
|
212
|
+
[`testProject/package.json`]: "18a1e415e56220fa5122428a4ef8eb8874756576"
|
|
195
213
|
}));
|
|
196
|
-
for (const [filePath, hash] of expectedFiles)
|
|
214
|
+
for (const [filePath, hash] of expectedFiles){
|
|
197
215
|
expect(filteredResults.get(filePath)).toEqual(hash);
|
|
198
216
|
}
|
|
199
217
|
expect(filteredResults.size).toEqual(expectedFiles.size);
|
|
200
|
-
}
|
|
201
|
-
finally {
|
|
218
|
+
} finally{
|
|
202
219
|
FileSystem.writeFile(testFilePath, "file1.");
|
|
203
220
|
}
|
|
204
221
|
});
|
|
205
|
-
it("can handle uncommitted filenames with spaces and non-ASCII characters", ()
|
|
206
|
-
const tempFilePath1 =
|
|
207
|
-
const tempFilePath2 =
|
|
208
|
-
const tempFilePath3 =
|
|
222
|
+
it("can handle uncommitted filenames with spaces and non-ASCII characters", ()=>{
|
|
223
|
+
const tempFilePath1 = _path.join(TEST_PROJECT_PATH, "a file.txt");
|
|
224
|
+
const tempFilePath2 = _path.join(TEST_PROJECT_PATH, "a file name.txt");
|
|
225
|
+
const tempFilePath3 = _path.join(TEST_PROJECT_PATH, "newFile批把.txt");
|
|
209
226
|
FileSystem.writeFile(tempFilePath1, "a");
|
|
210
227
|
FileSystem.writeFile(tempFilePath2, "a");
|
|
211
228
|
FileSystem.writeFile(tempFilePath3, "a");
|
|
212
|
-
const results = (0,
|
|
229
|
+
const results = (0, _getRepoState.getRepoState)(__dirname);
|
|
213
230
|
const filteredResults = getRelevantEntries(results);
|
|
214
231
|
try {
|
|
215
232
|
const expectedFiles = new Map(Object.entries({
|
|
@@ -221,18 +238,16 @@ describe(getRepoState_1.getRepoState.name, () => {
|
|
|
221
238
|
"testProject/file 2.txt": "a385f754ec4fede884a4864d090064d9aeef8ccb",
|
|
222
239
|
"testProject/file蝴蝶.txt": "ae814af81e16cb2ae8c57503c77e2cab6b5462ba",
|
|
223
240
|
"testProject/newFile批把.txt": "2e65efe2a145dda7ee51d1741299f848e5bf752e",
|
|
224
|
-
[`testProject/package.json`]: "18a1e415e56220fa5122428a4ef8eb8874756576"
|
|
241
|
+
[`testProject/package.json`]: "18a1e415e56220fa5122428a4ef8eb8874756576"
|
|
225
242
|
}));
|
|
226
|
-
for (const [filePath, hash] of expectedFiles)
|
|
243
|
+
for (const [filePath, hash] of expectedFiles){
|
|
227
244
|
expect(filteredResults.get(filePath)).toEqual(hash);
|
|
228
245
|
}
|
|
229
246
|
expect(filteredResults.size).toEqual(expectedFiles.size);
|
|
230
|
-
}
|
|
231
|
-
finally {
|
|
247
|
+
} finally{
|
|
232
248
|
FileSystem.deleteFile(tempFilePath1);
|
|
233
249
|
FileSystem.deleteFile(tempFilePath2);
|
|
234
250
|
FileSystem.deleteFile(tempFilePath3);
|
|
235
251
|
}
|
|
236
252
|
});
|
|
237
253
|
});
|
|
238
|
-
//# sourceMappingURL=getRepoDeps.test.js.map
|
|
@@ -1,67 +1,104 @@
|
|
|
1
1
|
"use strict";
|
|
2
|
-
Object.defineProperty(exports, "__esModule", {
|
|
3
|
-
|
|
4
|
-
|
|
5
|
-
|
|
6
|
-
|
|
2
|
+
Object.defineProperty(exports, "__esModule", {
|
|
3
|
+
value: true
|
|
4
|
+
});
|
|
5
|
+
const _getRepoState = require("../getRepoState");
|
|
6
|
+
describe(_getRepoState.parseGitVersion.name, ()=>{
|
|
7
|
+
it("Can parse valid git version responses", ()=>{
|
|
8
|
+
expect((0, _getRepoState.parseGitVersion)("git version 2.30.2.windows.1")).toEqual({
|
|
7
9
|
major: 2,
|
|
8
10
|
minor: 30,
|
|
9
|
-
patch: 2
|
|
11
|
+
patch: 2
|
|
10
12
|
});
|
|
11
|
-
expect((0,
|
|
13
|
+
expect((0, _getRepoState.parseGitVersion)("git version 2.30.2.windows.1.g8b8f8e")).toEqual({
|
|
12
14
|
major: 2,
|
|
13
15
|
minor: 30,
|
|
14
|
-
patch: 2
|
|
16
|
+
patch: 2
|
|
15
17
|
});
|
|
16
|
-
expect((0,
|
|
18
|
+
expect((0, _getRepoState.parseGitVersion)("git version 2.30.2")).toEqual({
|
|
17
19
|
major: 2,
|
|
18
20
|
minor: 30,
|
|
19
|
-
patch: 2
|
|
21
|
+
patch: 2
|
|
20
22
|
});
|
|
21
23
|
});
|
|
22
|
-
it("Rejects invalid git version responses", ()
|
|
23
|
-
expect(()
|
|
24
|
-
expect(()
|
|
25
|
-
expect(()
|
|
26
|
-
expect(()
|
|
24
|
+
it("Rejects invalid git version responses", ()=>{
|
|
25
|
+
expect(()=>(0, _getRepoState.parseGitVersion)("2.22.0.windows.1")).toThrowErrorMatchingInlineSnapshot(`"While validating the Git installation, the "git version" command produced unexpected output: "2.22.0.windows.1""`);
|
|
26
|
+
expect(()=>(0, _getRepoState.parseGitVersion)("git version 2.30.A")).toThrowErrorMatchingInlineSnapshot(`"While validating the Git installation, the "git version" command produced unexpected output: "git version 2.30.A""`);
|
|
27
|
+
expect(()=>(0, _getRepoState.parseGitVersion)("git version 2.30")).toThrowErrorMatchingInlineSnapshot(`"While validating the Git installation, the "git version" command produced unexpected output: "git version 2.30""`);
|
|
28
|
+
expect(()=>(0, _getRepoState.parseGitVersion)("git version .2.30")).toThrowErrorMatchingInlineSnapshot(`"While validating the Git installation, the "git version" command produced unexpected output: "git version .2.30""`);
|
|
27
29
|
});
|
|
28
30
|
});
|
|
29
|
-
describe(
|
|
30
|
-
it("Finds index entries", ()
|
|
31
|
-
const files = [
|
|
32
|
-
|
|
33
|
-
|
|
31
|
+
describe(_getRepoState.parseGitStatus.name, ()=>{
|
|
32
|
+
it("Finds index entries", ()=>{
|
|
33
|
+
const files = [
|
|
34
|
+
`A.ts`,
|
|
35
|
+
`B.ts`,
|
|
36
|
+
`C.ts`
|
|
37
|
+
];
|
|
38
|
+
const input = [
|
|
39
|
+
`A ${files[0]}`,
|
|
40
|
+
`D ${files[1]}`,
|
|
41
|
+
`M ${files[2]}`,
|
|
42
|
+
""
|
|
43
|
+
].join("\0");
|
|
44
|
+
const result = (0, _getRepoState.parseGitStatus)(input);
|
|
34
45
|
expect(result.size).toEqual(3);
|
|
35
46
|
expect(result.get(files[0])).toEqual(true);
|
|
36
47
|
expect(result.get(files[1])).toEqual(false);
|
|
37
48
|
expect(result.get(files[2])).toEqual(true);
|
|
38
49
|
});
|
|
39
|
-
it("Finds working tree entries", ()
|
|
40
|
-
const files = [
|
|
41
|
-
|
|
42
|
-
|
|
50
|
+
it("Finds working tree entries", ()=>{
|
|
51
|
+
const files = [
|
|
52
|
+
`A.ts`,
|
|
53
|
+
`B.ts`,
|
|
54
|
+
`C.ts`
|
|
55
|
+
];
|
|
56
|
+
const input = [
|
|
57
|
+
` A ${files[0]}`,
|
|
58
|
+
` D ${files[1]}`,
|
|
59
|
+
` M ${files[2]}`,
|
|
60
|
+
""
|
|
61
|
+
].join("\0");
|
|
62
|
+
const result = (0, _getRepoState.parseGitStatus)(input);
|
|
43
63
|
expect(result.size).toEqual(3);
|
|
44
64
|
expect(result.get(files[0])).toEqual(true);
|
|
45
65
|
expect(result.get(files[1])).toEqual(false);
|
|
46
66
|
expect(result.get(files[2])).toEqual(true);
|
|
47
67
|
});
|
|
48
|
-
it("Can handle untracked files", ()
|
|
49
|
-
const files = [
|
|
50
|
-
|
|
51
|
-
|
|
68
|
+
it("Can handle untracked files", ()=>{
|
|
69
|
+
const files = [
|
|
70
|
+
`A.ts`,
|
|
71
|
+
`B.ts`,
|
|
72
|
+
`C.ts`
|
|
73
|
+
];
|
|
74
|
+
const input = [
|
|
75
|
+
`?? ${files[0]}`,
|
|
76
|
+
`?? ${files[1]}`,
|
|
77
|
+
`?? ${files[2]}`,
|
|
78
|
+
""
|
|
79
|
+
].join("\0");
|
|
80
|
+
const result = (0, _getRepoState.parseGitStatus)(input);
|
|
52
81
|
expect(result.size).toEqual(3);
|
|
53
82
|
expect(result.get(files[0])).toEqual(true);
|
|
54
83
|
expect(result.get(files[1])).toEqual(true);
|
|
55
84
|
expect(result.get(files[2])).toEqual(true);
|
|
56
85
|
});
|
|
57
|
-
it("Can handle files modified in both index and working tree", ()
|
|
58
|
-
const files = [
|
|
59
|
-
|
|
60
|
-
|
|
86
|
+
it("Can handle files modified in both index and working tree", ()=>{
|
|
87
|
+
const files = [
|
|
88
|
+
`A.ts`,
|
|
89
|
+
`B.ts`,
|
|
90
|
+
`C.ts`
|
|
91
|
+
];
|
|
92
|
+
const input = [
|
|
93
|
+
`D ${files[0]}`,
|
|
94
|
+
`AD ${files[1]}`,
|
|
95
|
+
`DA ${files[2]}`,
|
|
96
|
+
""
|
|
97
|
+
].join("\0");
|
|
98
|
+
const result = (0, _getRepoState.parseGitStatus)(input);
|
|
61
99
|
expect(result.size).toEqual(3);
|
|
62
100
|
expect(result.get(files[0])).toEqual(false);
|
|
63
101
|
expect(result.get(files[1])).toEqual(false);
|
|
64
102
|
expect(result.get(files[2])).toEqual(true);
|
|
65
103
|
});
|
|
66
104
|
});
|
|
67
|
-
//# sourceMappingURL=getRepoState.test.js.map
|