bun-workspaces 1.8.1 → 1.8.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/package.json +1 -1
- package/src/affected/fileAffectedWorkspaces.mjs +321 -0
- package/src/affected/gitAffectedFiles.mjs +153 -0
- package/src/affected/gitAffectedWorkspaces.mjs +38 -0
- package/src/affected/index.mjs +5 -0
- package/src/cli/commands/runScript/output/renderGroupedOutput.mjs +22 -7
- package/src/cli/commands/runScript/output/tuiTerminal.mjs +45 -0
- package/src/workspaces/index.mjs +1 -0
- package/src/workspaces/workspacePattern.mjs +13 -3
package/package.json
CHANGED
|
@@ -0,0 +1,321 @@
|
|
|
1
|
+
import path from "path";
|
|
2
|
+
import bun from "bun";
|
|
3
|
+
import { logger } from "../internal/logger/index.mjs";
|
|
4
|
+
import { matchWorkspacesByPatterns } from "../workspaces/index.mjs";
|
|
5
|
+
|
|
6
|
+
const FILE_PATTERN_NEGATION_PREFIX = "!";
|
|
7
|
+
const GLOB_CHARACTER_REGEX = /[*?[{]/;
|
|
8
|
+
const toPosixPath = (filePath) => filePath.replaceAll("\\", "/");
|
|
9
|
+
const stripTrailingSlashes = (filePath) => filePath.replace(/\/+$/, "");
|
|
10
|
+
const stripLeadingSlashes = (filePath) => filePath.replace(/^\/+/, "");
|
|
11
|
+
const normalizeChangedFilePath = ({ rootDirectory, filePath }) => {
|
|
12
|
+
const posixFilePath = toPosixPath(filePath);
|
|
13
|
+
if (!path.isAbsolute(filePath)) {
|
|
14
|
+
return posixFilePath;
|
|
15
|
+
}
|
|
16
|
+
const posixRoot = stripTrailingSlashes(toPosixPath(rootDirectory));
|
|
17
|
+
if (posixFilePath === posixRoot) {
|
|
18
|
+
return "";
|
|
19
|
+
}
|
|
20
|
+
if (posixRoot && posixFilePath.startsWith(`${posixRoot}/`)) {
|
|
21
|
+
return posixFilePath.slice(posixRoot.length + 1);
|
|
22
|
+
}
|
|
23
|
+
return posixFilePath;
|
|
24
|
+
};
|
|
25
|
+
const PROJECT_RELATIVE_PREFIX = "/";
|
|
26
|
+
const PARENT_SEGMENT = "..";
|
|
27
|
+
const resolveInputPattern = ({ workspacePath, inputPattern }) => {
|
|
28
|
+
const posixPattern = toPosixPath(inputPattern);
|
|
29
|
+
let rawJoined;
|
|
30
|
+
if (posixPattern.startsWith(PROJECT_RELATIVE_PREFIX)) {
|
|
31
|
+
rawJoined = stripLeadingSlashes(posixPattern);
|
|
32
|
+
} else {
|
|
33
|
+
const normalizedWorkspacePath = stripTrailingSlashes(
|
|
34
|
+
toPosixPath(workspacePath),
|
|
35
|
+
);
|
|
36
|
+
const stripped = stripTrailingSlashes(posixPattern);
|
|
37
|
+
if (!normalizedWorkspacePath || normalizedWorkspacePath === ".") {
|
|
38
|
+
rawJoined = stripped;
|
|
39
|
+
} else if (!stripped || stripped === ".") {
|
|
40
|
+
rawJoined = normalizedWorkspacePath;
|
|
41
|
+
} else {
|
|
42
|
+
rawJoined = `${normalizedWorkspacePath}/${stripped}`;
|
|
43
|
+
}
|
|
44
|
+
}
|
|
45
|
+
if (!rawJoined) return "";
|
|
46
|
+
const normalized = path.posix.normalize(rawJoined);
|
|
47
|
+
if (normalized === ".") return "";
|
|
48
|
+
return stripTrailingSlashes(normalized);
|
|
49
|
+
};
|
|
50
|
+
const isPatternOutsideProject = (resolvedPattern) =>
|
|
51
|
+
resolvedPattern === PARENT_SEGMENT ||
|
|
52
|
+
resolvedPattern.startsWith(`${PARENT_SEGMENT}/`);
|
|
53
|
+
const matchesResolvedPattern = ({ filePath, resolvedPattern }) => {
|
|
54
|
+
if (!resolvedPattern) {
|
|
55
|
+
return true;
|
|
56
|
+
}
|
|
57
|
+
if (GLOB_CHARACTER_REGEX.test(resolvedPattern)) {
|
|
58
|
+
return new bun.Glob(resolvedPattern).match(filePath);
|
|
59
|
+
}
|
|
60
|
+
return (
|
|
61
|
+
filePath === resolvedPattern || filePath.startsWith(`${resolvedPattern}/`)
|
|
62
|
+
);
|
|
63
|
+
};
|
|
64
|
+
const splitFilePatterns = (patterns) => {
|
|
65
|
+
const includes = [];
|
|
66
|
+
const excludes = [];
|
|
67
|
+
for (const pattern of patterns) {
|
|
68
|
+
if (pattern.startsWith(FILE_PATTERN_NEGATION_PREFIX)) {
|
|
69
|
+
excludes.push(pattern.slice(FILE_PATTERN_NEGATION_PREFIX.length));
|
|
70
|
+
} else {
|
|
71
|
+
includes.push(pattern);
|
|
72
|
+
}
|
|
73
|
+
}
|
|
74
|
+
return {
|
|
75
|
+
includes,
|
|
76
|
+
excludes,
|
|
77
|
+
};
|
|
78
|
+
};
|
|
79
|
+
const resolveFilePatterns = ({ workspace, patterns, isExclude }) => {
|
|
80
|
+
const resolved = [];
|
|
81
|
+
for (const inputPattern of patterns) {
|
|
82
|
+
const resolvedPattern = resolveInputPattern({
|
|
83
|
+
workspacePath: workspace.path,
|
|
84
|
+
inputPattern,
|
|
85
|
+
});
|
|
86
|
+
if (isPatternOutsideProject(resolvedPattern)) {
|
|
87
|
+
const displayPattern = isExclude
|
|
88
|
+
? `${FILE_PATTERN_NEGATION_PREFIX}${inputPattern}`
|
|
89
|
+
: inputPattern;
|
|
90
|
+
logger.warn(
|
|
91
|
+
`Input pattern ${JSON.stringify(displayPattern)} for workspace "${workspace.name}" resolves outside the project root and will be ignored.`,
|
|
92
|
+
);
|
|
93
|
+
continue;
|
|
94
|
+
}
|
|
95
|
+
resolved.push({
|
|
96
|
+
inputPattern,
|
|
97
|
+
resolvedPattern,
|
|
98
|
+
});
|
|
99
|
+
}
|
|
100
|
+
return resolved;
|
|
101
|
+
};
|
|
102
|
+
const matchChangedFilesForWorkspace = ({
|
|
103
|
+
workspace,
|
|
104
|
+
inputFilePatterns,
|
|
105
|
+
changedFilePaths,
|
|
106
|
+
}) => {
|
|
107
|
+
const { includes, excludes } = splitFilePatterns(inputFilePatterns);
|
|
108
|
+
const resolvedIncludes = resolveFilePatterns({
|
|
109
|
+
workspace,
|
|
110
|
+
patterns: includes,
|
|
111
|
+
isExclude: false,
|
|
112
|
+
});
|
|
113
|
+
const resolvedExcludes = resolveFilePatterns({
|
|
114
|
+
workspace,
|
|
115
|
+
patterns: excludes,
|
|
116
|
+
isExclude: true,
|
|
117
|
+
});
|
|
118
|
+
const matchedFiles = [];
|
|
119
|
+
const matchedFilePaths = new Set();
|
|
120
|
+
for (const filePath of changedFilePaths) {
|
|
121
|
+
if (matchedFilePaths.has(filePath)) continue;
|
|
122
|
+
const matchingInclude = resolvedIncludes.find(({ resolvedPattern }) =>
|
|
123
|
+
matchesResolvedPattern({
|
|
124
|
+
filePath,
|
|
125
|
+
resolvedPattern,
|
|
126
|
+
}),
|
|
127
|
+
);
|
|
128
|
+
if (!matchingInclude) continue;
|
|
129
|
+
const isExcluded = resolvedExcludes.some(({ resolvedPattern }) =>
|
|
130
|
+
matchesResolvedPattern({
|
|
131
|
+
filePath,
|
|
132
|
+
resolvedPattern,
|
|
133
|
+
}),
|
|
134
|
+
);
|
|
135
|
+
if (isExcluded) continue;
|
|
136
|
+
matchedFiles.push({
|
|
137
|
+
filePath,
|
|
138
|
+
fileMetadata: undefined,
|
|
139
|
+
inputPattern: matchingInclude.inputPattern,
|
|
140
|
+
});
|
|
141
|
+
matchedFilePaths.add(filePath);
|
|
142
|
+
}
|
|
143
|
+
return matchedFiles;
|
|
144
|
+
};
|
|
145
|
+
const resolveInputWorkspaceDependencies = ({ workspaceInputs }) => {
|
|
146
|
+
const inputDependenciesByName = new Map();
|
|
147
|
+
const allWorkspaces = workspaceInputs.map(({ workspace }) => workspace);
|
|
148
|
+
for (const { workspace, inputWorkspacePatterns } of workspaceInputs) {
|
|
149
|
+
if (inputWorkspacePatterns.length === 0) {
|
|
150
|
+
inputDependenciesByName.set(workspace.name, []);
|
|
151
|
+
continue;
|
|
152
|
+
}
|
|
153
|
+
const matchedNames = matchWorkspacesByPatterns(
|
|
154
|
+
inputWorkspacePatterns,
|
|
155
|
+
allWorkspaces,
|
|
156
|
+
)
|
|
157
|
+
.map((matchedWorkspace) => matchedWorkspace.name)
|
|
158
|
+
.filter((matchedName) => matchedName !== workspace.name);
|
|
159
|
+
inputDependenciesByName.set(workspace.name, matchedNames);
|
|
160
|
+
}
|
|
161
|
+
return inputDependenciesByName;
|
|
162
|
+
};
|
|
163
|
+
const computeAffectedWorkspaceSet = ({
|
|
164
|
+
workspaceInputs,
|
|
165
|
+
workspaceByName,
|
|
166
|
+
changedFilesByName,
|
|
167
|
+
inputDependenciesByName,
|
|
168
|
+
ignorePackageDependencies,
|
|
169
|
+
}) => {
|
|
170
|
+
const inputDependentsByName = new Map();
|
|
171
|
+
for (const [workspaceName, dependencyNames] of inputDependenciesByName) {
|
|
172
|
+
for (const dependencyName of dependencyNames) {
|
|
173
|
+
const existing = inputDependentsByName.get(dependencyName);
|
|
174
|
+
if (existing) {
|
|
175
|
+
existing.push(workspaceName);
|
|
176
|
+
} else {
|
|
177
|
+
inputDependentsByName.set(dependencyName, [workspaceName]);
|
|
178
|
+
}
|
|
179
|
+
}
|
|
180
|
+
}
|
|
181
|
+
const affected = new Set();
|
|
182
|
+
const queue = [];
|
|
183
|
+
for (const { workspace } of workspaceInputs) {
|
|
184
|
+
if ((changedFilesByName.get(workspace.name)?.length ?? 0) > 0) {
|
|
185
|
+
affected.add(workspace.name);
|
|
186
|
+
queue.push(workspace.name);
|
|
187
|
+
}
|
|
188
|
+
}
|
|
189
|
+
while (queue.length > 0) {
|
|
190
|
+
const currentName = queue.shift();
|
|
191
|
+
const currentWorkspace = workspaceByName.get(currentName);
|
|
192
|
+
const dependents = [
|
|
193
|
+
...(inputDependentsByName.get(currentName) ?? []),
|
|
194
|
+
...(!ignorePackageDependencies && currentWorkspace
|
|
195
|
+
? currentWorkspace.dependents
|
|
196
|
+
: []),
|
|
197
|
+
];
|
|
198
|
+
for (const dependentName of dependents) {
|
|
199
|
+
if (!workspaceByName.has(dependentName)) continue;
|
|
200
|
+
if (affected.has(dependentName)) continue;
|
|
201
|
+
affected.add(dependentName);
|
|
202
|
+
queue.push(dependentName);
|
|
203
|
+
}
|
|
204
|
+
}
|
|
205
|
+
return affected;
|
|
206
|
+
};
|
|
207
|
+
const collectAffectedDependencies = ({
|
|
208
|
+
startingWorkspace,
|
|
209
|
+
workspaceByName,
|
|
210
|
+
inputDependenciesByName,
|
|
211
|
+
affectedSet,
|
|
212
|
+
ignorePackageDependencies,
|
|
213
|
+
}) => {
|
|
214
|
+
const results = [];
|
|
215
|
+
const visited = new Set([startingWorkspace.name]);
|
|
216
|
+
const visit = (currentName, chain) => {
|
|
217
|
+
const currentWorkspace = workspaceByName.get(currentName);
|
|
218
|
+
if (!currentWorkspace) return;
|
|
219
|
+
const edges = [];
|
|
220
|
+
for (const dependencyName of inputDependenciesByName.get(currentName) ??
|
|
221
|
+
[]) {
|
|
222
|
+
edges.push({
|
|
223
|
+
dependencyName,
|
|
224
|
+
edgeSource: "input",
|
|
225
|
+
});
|
|
226
|
+
}
|
|
227
|
+
if (!ignorePackageDependencies) {
|
|
228
|
+
for (const dependencyName of currentWorkspace.dependencies) {
|
|
229
|
+
edges.push({
|
|
230
|
+
dependencyName,
|
|
231
|
+
edgeSource: "package",
|
|
232
|
+
});
|
|
233
|
+
}
|
|
234
|
+
}
|
|
235
|
+
for (const { dependencyName, edgeSource } of edges) {
|
|
236
|
+
if (visited.has(dependencyName)) continue;
|
|
237
|
+
if (!workspaceByName.has(dependencyName)) continue;
|
|
238
|
+
visited.add(dependencyName);
|
|
239
|
+
const dependencyChain = [
|
|
240
|
+
...chain,
|
|
241
|
+
{
|
|
242
|
+
workspaceName: dependencyName,
|
|
243
|
+
edgeSource,
|
|
244
|
+
},
|
|
245
|
+
];
|
|
246
|
+
if (affectedSet.has(dependencyName)) {
|
|
247
|
+
results.push({
|
|
248
|
+
dependencyName,
|
|
249
|
+
chain: dependencyChain,
|
|
250
|
+
});
|
|
251
|
+
}
|
|
252
|
+
visit(dependencyName, dependencyChain);
|
|
253
|
+
}
|
|
254
|
+
};
|
|
255
|
+
visit(startingWorkspace.name, [
|
|
256
|
+
{
|
|
257
|
+
workspaceName: startingWorkspace.name,
|
|
258
|
+
},
|
|
259
|
+
]);
|
|
260
|
+
return results;
|
|
261
|
+
};
|
|
262
|
+
const getFileAffectedWorkspaces = async ({
|
|
263
|
+
rootDirectory,
|
|
264
|
+
workspaceInputs,
|
|
265
|
+
changedFilePaths,
|
|
266
|
+
ignorePackageDependencies = false,
|
|
267
|
+
}) => {
|
|
268
|
+
const normalizedChangedFilePaths = changedFilePaths.map((filePath) =>
|
|
269
|
+
normalizeChangedFilePath({
|
|
270
|
+
rootDirectory,
|
|
271
|
+
filePath,
|
|
272
|
+
}),
|
|
273
|
+
);
|
|
274
|
+
const workspaceByName = new Map(
|
|
275
|
+
workspaceInputs.map(({ workspace }) => [workspace.name, workspace]),
|
|
276
|
+
);
|
|
277
|
+
const changedFilesByName = new Map();
|
|
278
|
+
for (const { workspace, inputFilePatterns } of workspaceInputs) {
|
|
279
|
+
changedFilesByName.set(
|
|
280
|
+
workspace.name,
|
|
281
|
+
matchChangedFilesForWorkspace({
|
|
282
|
+
workspace,
|
|
283
|
+
inputFilePatterns,
|
|
284
|
+
changedFilePaths: normalizedChangedFilePaths,
|
|
285
|
+
}),
|
|
286
|
+
);
|
|
287
|
+
}
|
|
288
|
+
const inputDependenciesByName = resolveInputWorkspaceDependencies({
|
|
289
|
+
workspaceInputs,
|
|
290
|
+
});
|
|
291
|
+
const affectedSet = computeAffectedWorkspaceSet({
|
|
292
|
+
workspaceInputs,
|
|
293
|
+
workspaceByName,
|
|
294
|
+
changedFilesByName,
|
|
295
|
+
inputDependenciesByName,
|
|
296
|
+
ignorePackageDependencies,
|
|
297
|
+
});
|
|
298
|
+
const affectedWorkspaces = workspaceInputs.map(({ workspace }) => {
|
|
299
|
+
const changedFiles = changedFilesByName.get(workspace.name) ?? [];
|
|
300
|
+
const dependencies = collectAffectedDependencies({
|
|
301
|
+
startingWorkspace: workspace,
|
|
302
|
+
workspaceByName,
|
|
303
|
+
inputDependenciesByName,
|
|
304
|
+
affectedSet,
|
|
305
|
+
ignorePackageDependencies,
|
|
306
|
+
});
|
|
307
|
+
return {
|
|
308
|
+
workspace,
|
|
309
|
+
isAffected: affectedSet.has(workspace.name),
|
|
310
|
+
affectedReasons: {
|
|
311
|
+
changedFiles,
|
|
312
|
+
dependencies,
|
|
313
|
+
},
|
|
314
|
+
};
|
|
315
|
+
});
|
|
316
|
+
return {
|
|
317
|
+
affectedWorkspaces,
|
|
318
|
+
};
|
|
319
|
+
};
|
|
320
|
+
|
|
321
|
+
export { getFileAffectedWorkspaces };
|
|
@@ -0,0 +1,153 @@
|
|
|
1
|
+
import fs from "fs";
|
|
2
|
+
import path from "path";
|
|
3
|
+
import { defineErrors } from "../internal/core/index.mjs";
|
|
4
|
+
import { createSubprocess } from "../runScript/subprocesses.mjs";
|
|
5
|
+
|
|
6
|
+
const GIT_AFFECTED_ERRORS = defineErrors("NoGitRepository", "GitCommandFailed");
|
|
7
|
+
const GIT_AFFECTED_FILE_REASONS = ["diff", "staged", "unstaged", "untracked"];
|
|
8
|
+
const runGit = async (args, cwd) => {
|
|
9
|
+
const proc = createSubprocess(["git", ...args], {
|
|
10
|
+
cwd,
|
|
11
|
+
stdout: "pipe",
|
|
12
|
+
stderr: "pipe",
|
|
13
|
+
});
|
|
14
|
+
const [stdout, stderr, exitCode] = await Promise.all([
|
|
15
|
+
new Response(proc.stdout).text(),
|
|
16
|
+
new Response(proc.stderr).text(),
|
|
17
|
+
proc.exited,
|
|
18
|
+
]);
|
|
19
|
+
return {
|
|
20
|
+
stdout,
|
|
21
|
+
stderr,
|
|
22
|
+
exitCode,
|
|
23
|
+
};
|
|
24
|
+
};
|
|
25
|
+
const runGitOrThrow = async (args, cwd) => {
|
|
26
|
+
const { stdout, stderr, exitCode } = await runGit(args, cwd);
|
|
27
|
+
if (exitCode !== 0) {
|
|
28
|
+
throw new GIT_AFFECTED_ERRORS.GitCommandFailed(
|
|
29
|
+
`git ${args.join(" ")} failed (exit ${exitCode}): ${stderr.trim()}`,
|
|
30
|
+
);
|
|
31
|
+
}
|
|
32
|
+
return stdout;
|
|
33
|
+
};
|
|
34
|
+
/**
|
|
35
|
+
* Git's `-z` flag emits paths separated by NUL bytes with no quoting or
|
|
36
|
+
* escaping, which is the only safe way to parse output containing paths
|
|
37
|
+
* with spaces, newlines, or non-ASCII characters under `core.quotePath`.
|
|
38
|
+
*/ const parseNullSeparated = (output) => output.split("\0").filter(Boolean);
|
|
39
|
+
const resolveGitRoot = async (rootDirectory) => {
|
|
40
|
+
let result;
|
|
41
|
+
try {
|
|
42
|
+
result = await runGit(["rev-parse", "--show-toplevel"], rootDirectory);
|
|
43
|
+
} catch (error) {
|
|
44
|
+
throw new GIT_AFFECTED_ERRORS.NoGitRepository(
|
|
45
|
+
`Not a git repository: ${rootDirectory}${error instanceof Error ? ` (${error.message})` : ""}`,
|
|
46
|
+
);
|
|
47
|
+
}
|
|
48
|
+
if (result.exitCode !== 0 || !result.stdout.trim()) {
|
|
49
|
+
throw new GIT_AFFECTED_ERRORS.NoGitRepository(
|
|
50
|
+
`Not a git repository: ${rootDirectory}`,
|
|
51
|
+
);
|
|
52
|
+
}
|
|
53
|
+
return result.stdout.trim();
|
|
54
|
+
};
|
|
55
|
+
const toProjectFilePath = ({
|
|
56
|
+
gitRoot,
|
|
57
|
+
absoluteProjectRoot,
|
|
58
|
+
gitRelativePath,
|
|
59
|
+
}) => {
|
|
60
|
+
const absolute = path.resolve(gitRoot, gitRelativePath);
|
|
61
|
+
const relative = path.relative(absoluteProjectRoot, absolute);
|
|
62
|
+
if (!relative || relative.startsWith("..") || path.isAbsolute(relative)) {
|
|
63
|
+
return null;
|
|
64
|
+
}
|
|
65
|
+
return relative.split(path.sep).join("/");
|
|
66
|
+
};
|
|
67
|
+
const getGitAffectedFiles = async (options) => {
|
|
68
|
+
const {
|
|
69
|
+
rootDirectory,
|
|
70
|
+
baseRef,
|
|
71
|
+
headRef,
|
|
72
|
+
ignoreUntracked,
|
|
73
|
+
ignoreStaged,
|
|
74
|
+
ignoreUnstaged,
|
|
75
|
+
ignoreUncommitted,
|
|
76
|
+
} = options;
|
|
77
|
+
const gitRoot = fs.realpathSync.native(
|
|
78
|
+
path.resolve(await resolveGitRoot(rootDirectory)),
|
|
79
|
+
);
|
|
80
|
+
const absoluteProjectRoot = fs.realpathSync.native(
|
|
81
|
+
path.resolve(rootDirectory),
|
|
82
|
+
);
|
|
83
|
+
const includeStaged = !ignoreUncommitted && !ignoreStaged;
|
|
84
|
+
const includeUnstaged = !ignoreUncommitted && !ignoreUnstaged;
|
|
85
|
+
const includeUntracked = !ignoreUncommitted && !ignoreUntracked;
|
|
86
|
+
const collectors = [
|
|
87
|
+
runGitOrThrow(
|
|
88
|
+
["diff", "--name-only", "-z", baseRef, headRef],
|
|
89
|
+
gitRoot,
|
|
90
|
+
).then((out) => ({
|
|
91
|
+
reason: "diff",
|
|
92
|
+
paths: parseNullSeparated(out),
|
|
93
|
+
})),
|
|
94
|
+
];
|
|
95
|
+
if (includeStaged) {
|
|
96
|
+
collectors.push(
|
|
97
|
+
runGitOrThrow(["diff", "--cached", "--name-only", "-z"], gitRoot).then(
|
|
98
|
+
(out) => ({
|
|
99
|
+
reason: "staged",
|
|
100
|
+
paths: parseNullSeparated(out),
|
|
101
|
+
}),
|
|
102
|
+
),
|
|
103
|
+
);
|
|
104
|
+
}
|
|
105
|
+
if (includeUnstaged) {
|
|
106
|
+
collectors.push(
|
|
107
|
+
runGitOrThrow(["diff", "--name-only", "-z"], gitRoot).then((out) => ({
|
|
108
|
+
reason: "unstaged",
|
|
109
|
+
paths: parseNullSeparated(out),
|
|
110
|
+
})),
|
|
111
|
+
);
|
|
112
|
+
}
|
|
113
|
+
if (includeUntracked) {
|
|
114
|
+
collectors.push(
|
|
115
|
+
runGitOrThrow(
|
|
116
|
+
["ls-files", "--others", "--exclude-standard", "-z"],
|
|
117
|
+
gitRoot,
|
|
118
|
+
).then((out) => ({
|
|
119
|
+
reason: "untracked",
|
|
120
|
+
paths: parseNullSeparated(out),
|
|
121
|
+
})),
|
|
122
|
+
);
|
|
123
|
+
}
|
|
124
|
+
const buckets = await Promise.all(collectors);
|
|
125
|
+
const reasonsByPath = new Map();
|
|
126
|
+
for (const { reason, paths } of buckets) {
|
|
127
|
+
for (const gitRelativePath of paths) {
|
|
128
|
+
const projectFilePath = toProjectFilePath({
|
|
129
|
+
gitRoot,
|
|
130
|
+
absoluteProjectRoot,
|
|
131
|
+
gitRelativePath,
|
|
132
|
+
});
|
|
133
|
+
if (!projectFilePath) continue;
|
|
134
|
+
let set = reasonsByPath.get(projectFilePath);
|
|
135
|
+
if (!set) {
|
|
136
|
+
set = new Set();
|
|
137
|
+
reasonsByPath.set(projectFilePath, set);
|
|
138
|
+
}
|
|
139
|
+
set.add(reason);
|
|
140
|
+
}
|
|
141
|
+
}
|
|
142
|
+
const files = Array.from(reasonsByPath.entries())
|
|
143
|
+
.map(([projectFilePath, reasonSet]) => ({
|
|
144
|
+
projectFilePath,
|
|
145
|
+
reasons: GIT_AFFECTED_FILE_REASONS.filter((r) => reasonSet.has(r)),
|
|
146
|
+
}))
|
|
147
|
+
.sort((a, b) => a.projectFilePath.localeCompare(b.projectFilePath));
|
|
148
|
+
return {
|
|
149
|
+
files,
|
|
150
|
+
};
|
|
151
|
+
};
|
|
152
|
+
|
|
153
|
+
export { GIT_AFFECTED_ERRORS, GIT_AFFECTED_FILE_REASONS, getGitAffectedFiles };
|
|
@@ -0,0 +1,38 @@
|
|
|
1
|
+
import { getFileAffectedWorkspaces } from "./fileAffectedWorkspaces.mjs";
|
|
2
|
+
import { getGitAffectedFiles } from "./gitAffectedFiles.mjs";
|
|
3
|
+
|
|
4
|
+
const getGitAffectedWorkspaces = async ({
|
|
5
|
+
rootDirectory,
|
|
6
|
+
workspacesOptions,
|
|
7
|
+
gitOptions,
|
|
8
|
+
}) => {
|
|
9
|
+
const { files: gitFiles } = await getGitAffectedFiles({
|
|
10
|
+
rootDirectory,
|
|
11
|
+
...gitOptions,
|
|
12
|
+
});
|
|
13
|
+
const gitFileByPath = new Map(
|
|
14
|
+
gitFiles.map((file) => [file.projectFilePath, file]),
|
|
15
|
+
);
|
|
16
|
+
const { affectedWorkspaces } = await getFileAffectedWorkspaces({
|
|
17
|
+
rootDirectory,
|
|
18
|
+
...workspacesOptions,
|
|
19
|
+
changedFilePaths: gitFiles.map((file) => file.projectFilePath),
|
|
20
|
+
});
|
|
21
|
+
const annotatedWorkspaces = affectedWorkspaces.map((result) => ({
|
|
22
|
+
...result,
|
|
23
|
+
affectedReasons: {
|
|
24
|
+
...result.affectedReasons,
|
|
25
|
+
changedFiles: result.affectedReasons.changedFiles.map((changedFile) => ({
|
|
26
|
+
...changedFile,
|
|
27
|
+
fileMetadata: {
|
|
28
|
+
git: gitFileByPath.get(changedFile.filePath),
|
|
29
|
+
},
|
|
30
|
+
})),
|
|
31
|
+
},
|
|
32
|
+
}));
|
|
33
|
+
return {
|
|
34
|
+
affectedWorkspaces: annotatedWorkspaces,
|
|
35
|
+
};
|
|
36
|
+
};
|
|
37
|
+
|
|
38
|
+
export { getGitAffectedWorkspaces };
|
|
@@ -9,6 +9,10 @@ import {
|
|
|
9
9
|
} from "../../../../internal/core/language/string/utf/visibleLength.mjs";
|
|
10
10
|
import { logger } from "../../../../internal/logger/index.mjs";
|
|
11
11
|
import { generatePlainOutputLines } from "./renderPlainOutput.mjs";
|
|
12
|
+
import {
|
|
13
|
+
initializeTuiTerminalState,
|
|
14
|
+
resetTuiTerminalState,
|
|
15
|
+
} from "./tuiTerminal.mjs";
|
|
12
16
|
|
|
13
17
|
class ScriptEventTarget extends TypedEventTarget {}
|
|
14
18
|
const createScriptEventTarget = () => new ScriptEventTarget();
|
|
@@ -86,8 +90,10 @@ const renderGroupedOutput = async (
|
|
|
86
90
|
}
|
|
87
91
|
isInitialized = true;
|
|
88
92
|
logger.debug("Initializing TUI state");
|
|
89
|
-
|
|
90
|
-
|
|
93
|
+
initializeTuiTerminalState({
|
|
94
|
+
stdout: outputWriters.stdout,
|
|
95
|
+
stdin: process.stdin,
|
|
96
|
+
});
|
|
91
97
|
};
|
|
92
98
|
let isReset = false;
|
|
93
99
|
const resetTuiTerminal = () => {
|
|
@@ -96,9 +102,10 @@ const renderGroupedOutput = async (
|
|
|
96
102
|
}
|
|
97
103
|
isReset = true;
|
|
98
104
|
logger.debug("Resetting TUI state");
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
|
|
105
|
+
resetTuiTerminalState({
|
|
106
|
+
stdout: outputWriters.stdout,
|
|
107
|
+
stdin: process.stdin,
|
|
108
|
+
});
|
|
102
109
|
};
|
|
103
110
|
let previousHeight = 0;
|
|
104
111
|
let didFinalRender = false;
|
|
@@ -285,8 +292,16 @@ const renderGroupedOutput = async (
|
|
|
285
292
|
process.stdin.on("data", (data) => {
|
|
286
293
|
// Send to the entire process group (pid=0) so child processes also receive
|
|
287
294
|
// the signal — raw mode prevents the terminal from doing this automatically.
|
|
288
|
-
|
|
289
|
-
|
|
295
|
+
const signal =
|
|
296
|
+
data[0] === 0x03 ? "SIGINT" : data[0] === 0x1c ? "SIGQUIT" : null;
|
|
297
|
+
if (!signal) return;
|
|
298
|
+
// Restore the tty before fanning the signal: once SIGINT lands across
|
|
299
|
+
// the process group, child cleanup races with our own tcsetattr and
|
|
300
|
+
// setRawMode reliably returns EIO, leaving the user's terminal stuck
|
|
301
|
+
// in raw mode. Doing it here, synchronously, while we still own the
|
|
302
|
+
// tty cleanly, is the only place this can run before the race.
|
|
303
|
+
resetTuiTerminal();
|
|
304
|
+
process.kill(0, signal);
|
|
290
305
|
});
|
|
291
306
|
runOnExit((reason) => {
|
|
292
307
|
try {
|
|
@@ -0,0 +1,45 @@
|
|
|
1
|
+
import { logger } from "../../../../internal/logger/index.mjs";
|
|
2
|
+
|
|
3
|
+
const SHOW_CURSOR_SEQUENCE = "\x1b[?25h";
|
|
4
|
+
const HIDE_CURSOR_SEQUENCE = "\x1b[?25l";
|
|
5
|
+
/**
|
|
6
|
+
* Resets the controlling tty after the grouped-output TUI has owned it.
|
|
7
|
+
*
|
|
8
|
+
* Each step runs independently: when SIGINT is delivered to the whole
|
|
9
|
+
* process group via the raw-mode ^C handler, `setRawMode(false)` can
|
|
10
|
+
* race with child-process tty teardown and return EIO. A throw must
|
|
11
|
+
* not strand the cursor in a hidden state nor leave the tty in raw
|
|
12
|
+
* mode for any subsequent step.
|
|
13
|
+
*
|
|
14
|
+
* `setRawMode` runs before `unref` so the underlying tty handle is in
|
|
15
|
+
* a known, ref'd state for tcsetattr.
|
|
16
|
+
*/ const resetTuiTerminalState = ({ stdout, stdin }) => {
|
|
17
|
+
try {
|
|
18
|
+
stdout(SHOW_CURSOR_SEQUENCE);
|
|
19
|
+
} catch (error) {
|
|
20
|
+
logger.debug("Failed to show cursor during TUI reset", {
|
|
21
|
+
error,
|
|
22
|
+
});
|
|
23
|
+
}
|
|
24
|
+
try {
|
|
25
|
+
stdin.setRawMode?.(false);
|
|
26
|
+
} catch (error) {
|
|
27
|
+
logger.debug("Failed to reset raw mode during TUI reset", {
|
|
28
|
+
error,
|
|
29
|
+
});
|
|
30
|
+
}
|
|
31
|
+
try {
|
|
32
|
+
stdin.pause();
|
|
33
|
+
stdin.unref?.();
|
|
34
|
+
} catch (error) {
|
|
35
|
+
logger.debug("Failed to detach stdin during TUI reset", {
|
|
36
|
+
error,
|
|
37
|
+
});
|
|
38
|
+
}
|
|
39
|
+
};
|
|
40
|
+
const initializeTuiTerminalState = ({ stdout, stdin }) => {
|
|
41
|
+
stdout(HIDE_CURSOR_SEQUENCE);
|
|
42
|
+
stdin.setRawMode?.(true);
|
|
43
|
+
};
|
|
44
|
+
|
|
45
|
+
export { initializeTuiTerminalState, resetTuiTerminalState };
|
package/src/workspaces/index.mjs
CHANGED
|
@@ -6,11 +6,19 @@ const WORKSPACE_PATTERN_TARGETS = ["path", "alias", "name", "tag"];
|
|
|
6
6
|
const TARGETS = WORKSPACE_PATTERN_TARGETS;
|
|
7
7
|
const WORKSPACE_PATTERN_ERRORS = defineErrors("InvalidWorkspacePattern");
|
|
8
8
|
const WORKSPACE_PATTERN_NEGATION_PREFIX = "not:";
|
|
9
|
+
const WORKSPACE_PATTERN_NEGATION_SHORT_PREFIX = "!";
|
|
10
|
+
const WORKSPACE_PATTERN_NEGATION_PREFIXES = [
|
|
11
|
+
WORKSPACE_PATTERN_NEGATION_PREFIX,
|
|
12
|
+
WORKSPACE_PATTERN_NEGATION_SHORT_PREFIX,
|
|
13
|
+
];
|
|
9
14
|
const WORKSPACE_PATTERN_SEPARATOR = ":";
|
|
10
15
|
const parseWorkspacePattern = (pattern) => {
|
|
11
|
-
const
|
|
12
|
-
|
|
13
|
-
|
|
16
|
+
const negationPrefix = WORKSPACE_PATTERN_NEGATION_PREFIXES.find((prefix) =>
|
|
17
|
+
pattern.startsWith(prefix),
|
|
18
|
+
);
|
|
19
|
+
const isNegated = !!negationPrefix;
|
|
20
|
+
const patternValue = negationPrefix
|
|
21
|
+
? pattern.slice(negationPrefix.length)
|
|
14
22
|
: pattern;
|
|
15
23
|
const target = TARGETS.find((target) =>
|
|
16
24
|
patternValue.startsWith(target + WORKSPACE_PATTERN_SEPARATOR),
|
|
@@ -108,6 +116,8 @@ const matchWorkspacesByPatterns = (patterns, workspaces) => {
|
|
|
108
116
|
export {
|
|
109
117
|
WORKSPACE_PATTERN_ERRORS,
|
|
110
118
|
WORKSPACE_PATTERN_NEGATION_PREFIX,
|
|
119
|
+
WORKSPACE_PATTERN_NEGATION_PREFIXES,
|
|
120
|
+
WORKSPACE_PATTERN_NEGATION_SHORT_PREFIX,
|
|
111
121
|
WORKSPACE_PATTERN_SEPARATOR,
|
|
112
122
|
matchWorkspacesByPatterns,
|
|
113
123
|
parseWorkspacePattern,
|