sfdx-git-delta 6.41.2 → 6.43.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +35 -2
- package/lib/adapter/GitAdapter.d.ts +51 -2
- package/lib/adapter/GitAdapter.js +367 -41
- package/lib/adapter/GitAdapter.js.map +1 -1
- package/lib/adapter/gitBatchCatFile.d.ts +26 -4
- package/lib/adapter/gitBatchCatFile.js +128 -44
- package/lib/adapter/gitBatchCatFile.js.map +1 -1
- package/lib/adapter/gitBlobReader.d.ts +58 -0
- package/lib/adapter/gitBlobReader.js +27 -0
- package/lib/adapter/gitBlobReader.js.map +1 -0
- package/lib/adapter/ioExecutor.d.ts +18 -6
- package/lib/adapter/ioExecutor.js +97 -12
- package/lib/adapter/ioExecutor.js.map +1 -1
- package/lib/commands/sgd/source/delta.d.ts +2 -0
- package/lib/commands/sgd/source/delta.js +31 -1
- package/lib/commands/sgd/source/delta.js.map +1 -1
- package/lib/constant/cliConstants.d.ts +2 -0
- package/lib/constant/cliConstants.js +4 -0
- package/lib/constant/cliConstants.js.map +1 -1
- package/lib/constant/gitConstants.d.ts +1 -0
- package/lib/constant/gitConstants.js +4 -0
- package/lib/constant/gitConstants.js.map +1 -1
- package/lib/main.js +35 -7
- package/lib/main.js.map +1 -1
- package/lib/metadata/MetadataRepositoryImpl.d.ts +1 -0
- package/lib/metadata/MetadataRepositoryImpl.js +14 -2
- package/lib/metadata/MetadataRepositoryImpl.js.map +1 -1
- package/lib/post-processor/changesManifestProcessor.d.ts +15 -0
- package/lib/post-processor/changesManifestProcessor.js +50 -0
- package/lib/post-processor/changesManifestProcessor.js.map +1 -0
- package/lib/post-processor/flowTranslationProcessor.d.ts +31 -14
- package/lib/post-processor/flowTranslationProcessor.js +146 -51
- package/lib/post-processor/flowTranslationProcessor.js.map +1 -1
- package/lib/post-processor/packageGenerator.d.ts +1 -2
- package/lib/post-processor/packageGenerator.js +23 -34
- package/lib/post-processor/packageGenerator.js.map +1 -1
- package/lib/post-processor/postProcessorManager.js +9 -6
- package/lib/post-processor/postProcessorManager.js.map +1 -1
- package/lib/service/botHandler.d.ts +2 -1
- package/lib/service/botHandler.js +4 -3
- package/lib/service/botHandler.js.map +1 -1
- package/lib/service/containedDecomposedHandler.d.ts +3 -2
- package/lib/service/containedDecomposedHandler.js +6 -6
- package/lib/service/containedDecomposedHandler.js.map +1 -1
- package/lib/service/customFieldHandler.d.ts +2 -1
- package/lib/service/customFieldHandler.js +2 -2
- package/lib/service/customFieldHandler.js.map +1 -1
- package/lib/service/customLabelHandler.d.ts +2 -1
- package/lib/service/customLabelHandler.js +3 -3
- package/lib/service/customLabelHandler.js.map +1 -1
- package/lib/service/customObjectHandler.d.ts +2 -1
- package/lib/service/customObjectHandler.js +2 -2
- package/lib/service/customObjectHandler.js.map +1 -1
- package/lib/service/decomposedHandler.d.ts +2 -1
- package/lib/service/decomposedHandler.js +2 -2
- package/lib/service/decomposedHandler.js.map +1 -1
- package/lib/service/diffLineInterpreter.d.ts +1 -1
- package/lib/service/diffLineInterpreter.js +16 -6
- package/lib/service/diffLineInterpreter.js.map +1 -1
- package/lib/service/flowHandler.d.ts +2 -1
- package/lib/service/flowHandler.js +2 -2
- package/lib/service/flowHandler.js.map +1 -1
- package/lib/service/inFileHandler.d.ts +8 -7
- package/lib/service/inFileHandler.js +37 -30
- package/lib/service/inFileHandler.js.map +1 -1
- package/lib/service/inFolderHandler.d.ts +2 -1
- package/lib/service/inFolderHandler.js +2 -2
- package/lib/service/inFolderHandler.js.map +1 -1
- package/lib/service/inResourceHandler.d.ts +3 -2
- package/lib/service/inResourceHandler.js +5 -5
- package/lib/service/inResourceHandler.js.map +1 -1
- package/lib/service/objectTranslationHandler.d.ts +3 -3
- package/lib/service/objectTranslationHandler.js +15 -8
- package/lib/service/objectTranslationHandler.js.map +1 -1
- package/lib/service/reportingFolderHandler.d.ts +8 -4
- package/lib/service/reportingFolderHandler.js +5 -6
- package/lib/service/reportingFolderHandler.js.map +1 -1
- package/lib/service/sharedFolderHandler.d.ts +8 -4
- package/lib/service/sharedFolderHandler.js +5 -6
- package/lib/service/sharedFolderHandler.js.map +1 -1
- package/lib/service/standardHandler.d.ts +14 -7
- package/lib/service/standardHandler.js +47 -27
- package/lib/service/standardHandler.js.map +1 -1
- package/lib/service/typeHandlerFactory.d.ts +2 -0
- package/lib/service/typeHandlerFactory.js +15 -0
- package/lib/service/typeHandlerFactory.js.map +1 -1
- package/lib/types/config.d.ts +1 -0
- package/lib/types/handlerResult.d.ts +17 -8
- package/lib/types/handlerResult.js +9 -11
- package/lib/types/handlerResult.js.map +1 -1
- package/lib/types/work.d.ts +2 -5
- package/lib/utils/LoggingDecorator.js +22 -11
- package/lib/utils/LoggingDecorator.js.map +1 -1
- package/lib/utils/changeSet.d.ts +53 -0
- package/lib/utils/changeSet.js +257 -0
- package/lib/utils/changeSet.js.map +1 -0
- package/lib/utils/configValidator.d.ts +1 -0
- package/lib/utils/configValidator.js +39 -1
- package/lib/utils/configValidator.js.map +1 -1
- package/lib/utils/gitLfsHelper.js +9 -1
- package/lib/utils/gitLfsHelper.js.map +1 -1
- package/lib/utils/metadataDiff/index.d.ts +28 -0
- package/lib/utils/metadataDiff/index.js +57 -0
- package/lib/utils/metadataDiff/index.js.map +1 -0
- package/lib/utils/metadataDiff/streamingDiff.d.ts +68 -0
- package/lib/utils/metadataDiff/streamingDiff.js +300 -0
- package/lib/utils/metadataDiff/streamingDiff.js.map +1 -0
- package/lib/utils/metadataDiff/xmlEventReader.d.ts +28 -0
- package/lib/utils/metadataDiff/xmlEventReader.js +221 -0
- package/lib/utils/metadataDiff/xmlEventReader.js.map +1 -0
- package/lib/utils/metadataDiff/xmlWriter.d.ts +17 -0
- package/lib/utils/metadataDiff/xmlWriter.js +213 -0
- package/lib/utils/metadataDiff/xmlWriter.js.map +1 -0
- package/lib/utils/packageHelper.d.ts +8 -1
- package/lib/utils/packageHelper.js +25 -28
- package/lib/utils/packageHelper.js.map +1 -1
- package/lib/utils/renameResolver.d.ts +19 -0
- package/lib/utils/renameResolver.js +52 -0
- package/lib/utils/renameResolver.js.map +1 -0
- package/lib/utils/repoGitDiff.d.ts +18 -5
- package/lib/utils/repoGitDiff.js +66 -33
- package/lib/utils/repoGitDiff.js.map +1 -1
- package/lib/utils/treeIndexScope.d.ts +1 -1
- package/lib/utils/treeIndexScope.js.map +1 -1
- package/lib/utils/txmlAdapter.d.ts +30 -0
- package/lib/utils/txmlAdapter.js +121 -0
- package/lib/utils/txmlAdapter.js.map +1 -0
- package/lib/utils/xmlHelper.d.ts +0 -5
- package/lib/utils/xmlHelper.js +0 -57
- package/lib/utils/xmlHelper.js.map +1 -1
- package/messages/delta.md +12 -0
- package/npm-shrinkwrap.json +628 -486
- package/oclif.manifest.json +9 -1
- package/package.json +53 -21
- package/lib/utils/manifestAggregator.d.ts +0 -3
- package/lib/utils/manifestAggregator.js +0 -22
- package/lib/utils/manifestAggregator.js.map +0 -1
- package/lib/utils/metadataDiff.d.ts +0 -23
- package/lib/utils/metadataDiff.js +0 -242
- package/lib/utils/metadataDiff.js.map +0 -1
package/README.md
CHANGED
|
@@ -149,7 +149,7 @@ Generate incremental package manifest and source content
|
|
|
149
149
|
```
|
|
150
150
|
USAGE
|
|
151
151
|
$ sf sgd source delta -f <value> [--json] [--flags-dir <value>] [-t <value>] [-d] [-o <value>] [-r <value>] [-s
|
|
152
|
-
<value>...] [-i <value>] [-D <value>] [-n <value>] [-N <value>] [-M <value>] [-W] [-a <value>]
|
|
152
|
+
<value>...] [-i <value>] [-D <value>] [-n <value>] [-N <value>] [-M <value>] [-c <value>] [-W] [-a <value>]
|
|
153
153
|
|
|
154
154
|
FLAGS
|
|
155
155
|
-D, --ignore-destructive-file=<value> file listing paths to explicitly ignore for any destructive actions
|
|
@@ -158,6 +158,8 @@ FLAGS
|
|
|
158
158
|
-W, --ignore-whitespace ignore git diff whitespace (space, tab, eol) changes
|
|
159
159
|
-a, --api-version=<value> salesforce metadata API version, default to sfdx-project.json
|
|
160
160
|
"sourceApiVersion" attribute or latest version
|
|
161
|
+
-c, --changes-manifest=<value> path to a JSON file grouping changed components by kind (add, modify,
|
|
162
|
+
delete, rename); setting this flag also enables git rename detection
|
|
161
163
|
-d, --generate-delta generate delta files in [--output-dir] folder
|
|
162
164
|
-f, --from=<value> (required) commit sha from where the diff is done
|
|
163
165
|
-i, --ignore-file=<value> file listing paths to explicitly ignore for any diff actions
|
|
@@ -200,7 +202,7 @@ FLAG DESCRIPTIONS
|
|
|
200
202
|
of a diff, in which case changes may still be picked up.
|
|
201
203
|
```
|
|
202
204
|
|
|
203
|
-
_See code: [src/commands/sgd/source/delta.ts](https://github.com/scolladon/sfdx-git-delta/blob/v6.
|
|
205
|
+
_See code: [src/commands/sgd/source/delta.ts](https://github.com/scolladon/sfdx-git-delta/blob/v6.43.0/src/commands/sgd/source/delta.ts)_
|
|
204
206
|
<!-- commandsstop -->
|
|
205
207
|
|
|
206
208
|
### Windows users
|
|
@@ -518,6 +520,37 @@ To cover this need, parse the content of the package.xml file produced by SGD us
|
|
|
518
520
|
|
|
519
521
|
`xq . < package/package.xml | jq '.Package.types | [.] | flatten | map(select(.name=="ApexClass")) | .[] | .members | [.] | flatten | map(select(. | index("*") | not)) | unique | join(",")'`
|
|
520
522
|
|
|
523
|
+
### Review-centric: list components by change kind
|
|
524
|
+
|
|
525
|
+
Package managers (1GP, unlocked, managed) and release reviewers often need to distinguish **newly added** components from **modified** ones — adding a new component to a managed package is a 1-way door that binds all subscribers. SGD groups `git diff` into Salesforce components, but its `package.xml` bundles additions and modifications together (as required for deployment).
|
|
526
|
+
|
|
527
|
+
Use `--changes-manifest [-c]` to emit an additional JSON file that groups components by change kind. The file is written alongside `package.xml` / `destructiveChanges.xml` (no changes to the deployment manifests themselves).
|
|
528
|
+
|
|
529
|
+
Two forms:
|
|
530
|
+
|
|
531
|
+
```sh
|
|
532
|
+
# Bare flag: writes to <output-dir>/changes.manifest.json
|
|
533
|
+
sf sgd source delta --from "origin/development" --to HEAD --output-dir incremental --changes-manifest
|
|
534
|
+
|
|
535
|
+
# Explicit path: resolved against cwd (or used as-is when absolute), same as --ignore-file
|
|
536
|
+
sf sgd source delta --from "origin/development" --to HEAD --changes-manifest reports/changes.json
|
|
537
|
+
```
|
|
538
|
+
|
|
539
|
+
Produces (bare form example) `incremental/changes.manifest.json`. Setting `--changes-manifest` also turns on git's `-M` rename detection for this run, so components renamed at the file level show up in their own bucket instead of being split into a fake delete+add pair. Default sgd runs (without the flag) keep the pre-feature behaviour — renames still appear as a delete on the old path + an add on the new path.
|
|
540
|
+
|
|
541
|
+
```json
|
|
542
|
+
{
|
|
543
|
+
"add": { "ApexClass": ["BrandNewClass"], "CustomObject": ["NewObject__c"] },
|
|
544
|
+
"modify": { "ApexClass": ["ExistingClass"], "CustomLabels": ["MyApp.Label1"] },
|
|
545
|
+
"delete": { "ApexTrigger": ["OldTrigger"] },
|
|
546
|
+
"rename": { "ApexClass": [{ "from": "OldName", "to": "NewName" }] }
|
|
547
|
+
}
|
|
548
|
+
```
|
|
549
|
+
|
|
550
|
+
`package.xml` still lists `NewName` and `destructiveChanges.xml` still lists `OldName` for renames — the deployment contract is unchanged.
|
|
551
|
+
|
|
552
|
+
Works for file-backed metadata, in-file sub-components (CustomLabels members, Workflow rules, etc.), decomposed metadata, in-resource bundles and in-folder metadata.
|
|
553
|
+
|
|
521
554
|
### Condition deployment on package.xml and destructiveChange content
|
|
522
555
|
|
|
523
556
|
SGD does not always generate content in the package.xml (or destructiveChanges.xml). Sometimes the commit range contains changes only within files to ignore (using .sgdignore and `--i` parameter).
|
|
@@ -1,9 +1,11 @@
|
|
|
1
|
+
import { type Readable } from 'node:stream';
|
|
1
2
|
import { SimpleGit } from 'simple-git';
|
|
2
3
|
import type { Config } from '../types/config.js';
|
|
3
4
|
import type { FileGitRef } from '../types/git.js';
|
|
4
5
|
import { GitBatchCatFile } from './gitBatchCatFile.js';
|
|
6
|
+
import type { GitBlobReader, SpawnFn } from './gitBlobReader.js';
|
|
5
7
|
import { TreeIndex } from './treeIndex.js';
|
|
6
|
-
export default class GitAdapter {
|
|
8
|
+
export default class GitAdapter implements GitBlobReader {
|
|
7
9
|
protected readonly config: Config;
|
|
8
10
|
private static instances;
|
|
9
11
|
private static keyFor;
|
|
@@ -11,21 +13,68 @@ export default class GitAdapter {
|
|
|
11
13
|
protected readonly simpleGit: SimpleGit;
|
|
12
14
|
protected readonly treeIndex: Map<string, TreeIndex>;
|
|
13
15
|
protected batchCatFile: GitBatchCatFile | null;
|
|
16
|
+
private readonly streamingChildren;
|
|
17
|
+
private static readonly STDERR_BUFFER_CAP;
|
|
18
|
+
private static readonly LFS_POINTER_CAP;
|
|
19
|
+
private spawnFn;
|
|
14
20
|
private constructor();
|
|
21
|
+
/**
|
|
22
|
+
* Testability seam: lets unit tests swap in a fake spawn for streaming
|
|
23
|
+
* subprocesses. Production always uses `child_process.spawn`.
|
|
24
|
+
*/
|
|
25
|
+
setSpawnFn(spawnFn: SpawnFn): void;
|
|
15
26
|
protected getBatchCatFile(): GitBatchCatFile;
|
|
16
27
|
closeBatchProcess(): void;
|
|
17
28
|
static closeAll(): void;
|
|
18
29
|
configureRepository(): Promise<void>;
|
|
19
30
|
parseRev(ref: string): Promise<string>;
|
|
20
31
|
preBuildTreeIndex(revision: string, scopePaths: string[]): Promise<void>;
|
|
32
|
+
/**
|
|
33
|
+
* Spawns `git <args>`, streams stdout through a readline interface, and
|
|
34
|
+
* yields one line at a time. Replaces the old "run command, split on EOL"
|
|
35
|
+
* pattern that accumulated a multi-MB string for megarepo diffs / tree
|
|
36
|
+
* listings before any downstream consumer ran.
|
|
37
|
+
*
|
|
38
|
+
* The spawned child is pushed into streamingChildren so closeAll()
|
|
39
|
+
* kills it if teardown happens mid-stream. stderr is drained to the
|
|
40
|
+
* debug log; a non-zero exit code rejects the iterator on next read.
|
|
41
|
+
*/
|
|
42
|
+
protected _spawnLines(args: string[]): AsyncGenerator<string>;
|
|
43
|
+
private _trackChild;
|
|
21
44
|
protected pathExistsImpl(path: string, revision: string): boolean;
|
|
22
45
|
pathExists(path: string, revision?: string): Promise<boolean>;
|
|
23
46
|
getFirstCommitRef(): Promise<string>;
|
|
24
47
|
getBufferContent(forRef: FileGitRef): Promise<Buffer>;
|
|
48
|
+
getBufferContentOrEscalate(forRef: FileGitRef): Promise<Buffer>;
|
|
49
|
+
/**
|
|
50
|
+
* Streams a directory from `revision` as `git archive --format=tar`
|
|
51
|
+
* entries. Yields `{ path, stream }` per file entry (directories are
|
|
52
|
+
* skipped). The tar subprocess is registered in streamingChildren so
|
|
53
|
+
* closeAll kills it on teardown.
|
|
54
|
+
*
|
|
55
|
+
* Callers MUST consume each entry's stream — even when skipping the
|
|
56
|
+
* entry — otherwise tar-stream back-pressures and halts parsing. For
|
|
57
|
+
* skip cases use `stream.resume()` to drain-and-discard.
|
|
58
|
+
*/
|
|
59
|
+
streamArchive(path: string, revision: string): AsyncGenerator<{
|
|
60
|
+
path: string;
|
|
61
|
+
stream: Readable;
|
|
62
|
+
}>;
|
|
63
|
+
/**
|
|
64
|
+
* Spawns a dedicated `git cat-file blob <oid>` subprocess and returns a
|
|
65
|
+
* Readable that peeks the first chunks for LFS pointer magic. On match,
|
|
66
|
+
* the spawned subprocess is killed and the Readable is fed from the
|
|
67
|
+
* resolved LFS object file. Otherwise bytes are forwarded as-is.
|
|
68
|
+
*/
|
|
69
|
+
streamContent(forRef: FileGitRef): Readable;
|
|
70
|
+
private _wireStreamContent;
|
|
71
|
+
private _handoffToLfs;
|
|
25
72
|
getStringContent(forRef: FileGitRef): Promise<string>;
|
|
26
73
|
protected getFilesPathCached(path: string, revision: string): string[];
|
|
27
74
|
getFilesPath(paths: string | string[], revision?: string): Promise<string[]>;
|
|
28
75
|
listDirAtRevision(dir: string, revision: string): Promise<string[]>;
|
|
29
76
|
gitGrep(pattern: string, path: string | string[], revision?: string): Promise<string[]>;
|
|
30
|
-
|
|
77
|
+
streamDiffLines(): AsyncGenerator<string>;
|
|
78
|
+
protected _getNumstatLines(changeType: string, detectRenames: boolean): Promise<string[]>;
|
|
79
|
+
private _getRenameLines;
|
|
31
80
|
}
|
|
@@ -1,9 +1,14 @@
|
|
|
1
1
|
import { __decorate } from "tslib";
|
|
2
|
+
import { spawn } from 'node:child_process';
|
|
3
|
+
import { createReadStream } from 'node:fs';
|
|
2
4
|
import { readFile } from 'node:fs/promises';
|
|
3
5
|
import { join } from 'node:path/posix';
|
|
6
|
+
import { createInterface } from 'node:readline';
|
|
7
|
+
import { PassThrough } from 'node:stream';
|
|
4
8
|
import { simpleGit } from 'simple-git';
|
|
9
|
+
import { TAB } from '../constant/cliConstants.js';
|
|
5
10
|
import { UTF8_ENCODING } from '../constant/fsConstants.js';
|
|
6
|
-
import { ADDITION, DELETION, HEAD, IGNORE_WHITESPACE_PARAMS, MODIFICATION, NUM_STAT_CHANGE_INFORMATION, } from '../constant/gitConstants.js';
|
|
11
|
+
import { ADDITION, DELETION, HEAD, IGNORE_WHITESPACE_PARAMS, MODIFICATION, NUM_STAT_CHANGE_INFORMATION, RENAMED, } from '../constant/gitConstants.js';
|
|
7
12
|
import { pushAll } from '../utils/arrayUtils.js';
|
|
8
13
|
import { getErrorMessage } from '../utils/errorUtils.js';
|
|
9
14
|
import { treatPathSep } from '../utils/fsUtils.js';
|
|
@@ -12,6 +17,7 @@ import { log } from '../utils/LoggingDecorator.js';
|
|
|
12
17
|
import { Logger, lazy } from '../utils/LoggingService.js';
|
|
13
18
|
import { GitBatchCatFile } from './gitBatchCatFile.js';
|
|
14
19
|
import { TreeIndex } from './treeIndex.js';
|
|
20
|
+
const LFS_MAGIC = Buffer.from('version https://git-lfs.github.com/spec/v1\n');
|
|
15
21
|
const EOL = /\r?\n/;
|
|
16
22
|
const ROOT_PATHS = new Set(['', '.', './']);
|
|
17
23
|
export default class GitAdapter {
|
|
@@ -33,20 +39,49 @@ export default class GitAdapter {
|
|
|
33
39
|
simpleGit;
|
|
34
40
|
treeIndex;
|
|
35
41
|
batchCatFile = null;
|
|
42
|
+
// Live-only list of streaming subprocesses: children are appended when
|
|
43
|
+
// spawned and spliced on `close` so long-running invocations don't
|
|
44
|
+
// accumulate dead process references. Iterated at closeAll() teardown and
|
|
45
|
+
// kill()ed if still alive.
|
|
46
|
+
streamingChildren = [];
|
|
47
|
+
// Cap on stderr buffered per-subprocess: long-running git processes that
|
|
48
|
+
// emit progress to stderr would otherwise grow this without bound. The
|
|
49
|
+
// final error message truncates at this size.
|
|
50
|
+
static STDERR_BUFFER_CAP = 8 * 1024;
|
|
51
|
+
// Cap on LFS pointer buffering: real pointers are < 200 bytes; a crafted
|
|
52
|
+
// blob with the LFS magic prefix followed by gigabytes of content should
|
|
53
|
+
// not OOM the process before validation fails.
|
|
54
|
+
static LFS_POINTER_CAP = 1024;
|
|
55
|
+
spawnFn = spawn;
|
|
36
56
|
constructor(config) {
|
|
37
57
|
this.config = config;
|
|
38
58
|
this.simpleGit = simpleGit({ baseDir: config.repo, trimmed: true });
|
|
39
59
|
this.treeIndex = new Map();
|
|
40
60
|
}
|
|
61
|
+
/**
|
|
62
|
+
* Testability seam: lets unit tests swap in a fake spawn for streaming
|
|
63
|
+
* subprocesses. Production always uses `child_process.spawn`.
|
|
64
|
+
*/
|
|
65
|
+
setSpawnFn(spawnFn) {
|
|
66
|
+
this.spawnFn = spawnFn;
|
|
67
|
+
}
|
|
41
68
|
getBatchCatFile() {
|
|
42
69
|
if (!this.batchCatFile) {
|
|
43
|
-
this.batchCatFile = new GitBatchCatFile(this.config.repo
|
|
70
|
+
this.batchCatFile = new GitBatchCatFile(this.config.repo, {
|
|
71
|
+
spawnFn: this.spawnFn,
|
|
72
|
+
});
|
|
44
73
|
}
|
|
45
74
|
return this.batchCatFile;
|
|
46
75
|
}
|
|
47
76
|
closeBatchProcess() {
|
|
48
77
|
this.batchCatFile?.close();
|
|
49
78
|
this.batchCatFile = null;
|
|
79
|
+
for (const child of this.streamingChildren) {
|
|
80
|
+
if (child.exitCode === null && !child.killed) {
|
|
81
|
+
child.kill();
|
|
82
|
+
}
|
|
83
|
+
}
|
|
84
|
+
this.streamingChildren.length = 0;
|
|
50
85
|
}
|
|
51
86
|
static closeAll() {
|
|
52
87
|
for (const instance of GitAdapter.instances.values()) {
|
|
@@ -70,9 +105,8 @@ export default class GitAdapter {
|
|
|
70
105
|
if (scopePaths.length > 0) {
|
|
71
106
|
args.push('--', ...scopePaths);
|
|
72
107
|
}
|
|
73
|
-
const output = await this.simpleGit.raw(args);
|
|
74
108
|
const index = new TreeIndex();
|
|
75
|
-
for (const line of
|
|
109
|
+
for await (const line of this._spawnLines(args)) {
|
|
76
110
|
if (line)
|
|
77
111
|
index.add(treatPathSep(line));
|
|
78
112
|
}
|
|
@@ -82,6 +116,66 @@ export default class GitAdapter {
|
|
|
82
116
|
Logger.debug(lazy `preBuildTreeIndex: scoped ls-tree for '${revision}' failed: ${() => getErrorMessage(error)}`);
|
|
83
117
|
}
|
|
84
118
|
}
|
|
119
|
+
/**
|
|
120
|
+
* Spawns `git <args>`, streams stdout through a readline interface, and
|
|
121
|
+
* yields one line at a time. Replaces the old "run command, split on EOL"
|
|
122
|
+
* pattern that accumulated a multi-MB string for megarepo diffs / tree
|
|
123
|
+
* listings before any downstream consumer ran.
|
|
124
|
+
*
|
|
125
|
+
* The spawned child is pushed into streamingChildren so closeAll()
|
|
126
|
+
* kills it if teardown happens mid-stream. stderr is drained to the
|
|
127
|
+
* debug log; a non-zero exit code rejects the iterator on next read.
|
|
128
|
+
*/
|
|
129
|
+
async *_spawnLines(args) {
|
|
130
|
+
const child = this.spawnFn('git', args, {
|
|
131
|
+
cwd: this.config.repo,
|
|
132
|
+
stdio: ['ignore', 'pipe', 'pipe'],
|
|
133
|
+
});
|
|
134
|
+
this._trackChild(child);
|
|
135
|
+
const stderrChunks = [];
|
|
136
|
+
let stderrLen = 0;
|
|
137
|
+
child.stderr.on('data', (chunk) => {
|
|
138
|
+
if (stderrLen >= GitAdapter.STDERR_BUFFER_CAP)
|
|
139
|
+
return;
|
|
140
|
+
stderrChunks.push(chunk);
|
|
141
|
+
stderrLen += chunk.length;
|
|
142
|
+
});
|
|
143
|
+
const rl = createInterface({
|
|
144
|
+
input: child.stdout,
|
|
145
|
+
crlfDelay: Number.POSITIVE_INFINITY,
|
|
146
|
+
});
|
|
147
|
+
const exitPromise = new Promise((resolve, reject) => {
|
|
148
|
+
child.once('error', reject);
|
|
149
|
+
child.once('close', resolve);
|
|
150
|
+
});
|
|
151
|
+
try {
|
|
152
|
+
for await (const line of rl) {
|
|
153
|
+
yield line;
|
|
154
|
+
}
|
|
155
|
+
const code = await exitPromise;
|
|
156
|
+
if (code !== 0 && code !== null) {
|
|
157
|
+
const stderr = Buffer.concat(stderrChunks)
|
|
158
|
+
.subarray(0, GitAdapter.STDERR_BUFFER_CAP)
|
|
159
|
+
.toString('utf8')
|
|
160
|
+
.trim();
|
|
161
|
+
throw new Error(`git ${args[0]} exited ${code}${stderr ? `: ${stderr}` : ''}`);
|
|
162
|
+
}
|
|
163
|
+
}
|
|
164
|
+
finally {
|
|
165
|
+
rl.close();
|
|
166
|
+
if (!child.killed && child.exitCode === null)
|
|
167
|
+
child.kill();
|
|
168
|
+
}
|
|
169
|
+
}
|
|
170
|
+
_trackChild(child) {
|
|
171
|
+
this.streamingChildren.push(child);
|
|
172
|
+
child.once('close', () => {
|
|
173
|
+
const idx = this.streamingChildren.indexOf(child);
|
|
174
|
+
/* v8 ignore next -- defensive: the close listener is once-only and bound to the tracked child; idx is always >= 0 here */
|
|
175
|
+
if (idx !== -1)
|
|
176
|
+
this.streamingChildren.splice(idx, 1);
|
|
177
|
+
});
|
|
178
|
+
}
|
|
85
179
|
pathExistsImpl(path, revision) {
|
|
86
180
|
const index = this.treeIndex.get(revision);
|
|
87
181
|
if (!index)
|
|
@@ -99,11 +193,171 @@ export default class GitAdapter {
|
|
|
99
193
|
async getBufferContent(forRef) {
|
|
100
194
|
let content = await this.getBatchCatFile().getContent(forRef.oid, forRef.path);
|
|
101
195
|
if (isLFS(content)) {
|
|
102
|
-
const
|
|
103
|
-
content = await readFile(join(this.config.repo,
|
|
196
|
+
const lfsPath = getLFSObjectContentPath(content);
|
|
197
|
+
content = await readFile(join(this.config.repo, lfsPath));
|
|
198
|
+
}
|
|
199
|
+
return content;
|
|
200
|
+
}
|
|
201
|
+
async getBufferContentOrEscalate(forRef) {
|
|
202
|
+
let content = await this.getBatchCatFile().getContentOrEscalate(forRef.oid, forRef.path);
|
|
203
|
+
if (isLFS(content)) {
|
|
204
|
+
const lfsPath = getLFSObjectContentPath(content);
|
|
205
|
+
content = await readFile(join(this.config.repo, lfsPath));
|
|
104
206
|
}
|
|
105
207
|
return content;
|
|
106
208
|
}
|
|
209
|
+
/**
|
|
210
|
+
* Streams a directory from `revision` as `git archive --format=tar`
|
|
211
|
+
* entries. Yields `{ path, stream }` per file entry (directories are
|
|
212
|
+
* skipped). The tar subprocess is registered in streamingChildren so
|
|
213
|
+
* closeAll kills it on teardown.
|
|
214
|
+
*
|
|
215
|
+
* Callers MUST consume each entry's stream — even when skipping the
|
|
216
|
+
* entry — otherwise tar-stream back-pressures and halts parsing. For
|
|
217
|
+
* skip cases use `stream.resume()` to drain-and-discard.
|
|
218
|
+
*/
|
|
219
|
+
async *streamArchive(path, revision) {
|
|
220
|
+
if (path.startsWith('-') || revision.startsWith('-')) {
|
|
221
|
+
throw new Error(`Refusing to spawn git archive for ${path}`);
|
|
222
|
+
}
|
|
223
|
+
const { extract } = await import('tar-stream');
|
|
224
|
+
const extractor = extract();
|
|
225
|
+
const child = this.spawnFn('git', ['archive', '--format=tar', revision, '--', path], { cwd: this.config.repo, stdio: ['ignore', 'pipe', 'pipe'] });
|
|
226
|
+
this._trackChild(child);
|
|
227
|
+
child.on('error', err => extractor.destroy(err));
|
|
228
|
+
child.stderr.on('data', (chunk) => {
|
|
229
|
+
Logger.debug(lazy `streamArchive stderr for ${path}@${revision}: ${() => chunk.toString()}`);
|
|
230
|
+
});
|
|
231
|
+
child.stdout.pipe(extractor);
|
|
232
|
+
try {
|
|
233
|
+
for await (const entry of extractor) {
|
|
234
|
+
if (entry.header.type !== 'file') {
|
|
235
|
+
entry.resume();
|
|
236
|
+
continue;
|
|
237
|
+
}
|
|
238
|
+
yield { path: entry.header.name, stream: entry };
|
|
239
|
+
}
|
|
240
|
+
}
|
|
241
|
+
finally {
|
|
242
|
+
if (!child.killed && child.exitCode === null)
|
|
243
|
+
child.kill();
|
|
244
|
+
}
|
|
245
|
+
}
|
|
246
|
+
/**
|
|
247
|
+
* Spawns a dedicated `git cat-file blob <oid>` subprocess and returns a
|
|
248
|
+
* Readable that peeks the first chunks for LFS pointer magic. On match,
|
|
249
|
+
* the spawned subprocess is killed and the Readable is fed from the
|
|
250
|
+
* resolved LFS object file. Otherwise bytes are forwarded as-is.
|
|
251
|
+
*/
|
|
252
|
+
streamContent(forRef) {
|
|
253
|
+
const out = new PassThrough();
|
|
254
|
+
// Defense in depth: `git cat-file blob <ref>` treats a ref starting
|
|
255
|
+
// with `-` as an option. Refs come from git diff output so this
|
|
256
|
+
// shouldn't happen in normal operation, but a malicious diff or a
|
|
257
|
+
// path with a leading dash would give git an option it respects.
|
|
258
|
+
// Fail fast with a clear error rather than trust the subprocess.
|
|
259
|
+
if (forRef.path.startsWith('-') || forRef.oid.startsWith('-')) {
|
|
260
|
+
process.nextTick(() => out.destroy(new Error(`Refusing to spawn git cat-file for ${forRef.path}`)));
|
|
261
|
+
return out;
|
|
262
|
+
}
|
|
263
|
+
const child = this.spawnFn('git', ['cat-file', 'blob', `${forRef.oid}:${forRef.path}`], { cwd: this.config.repo, stdio: ['ignore', 'pipe', 'pipe'] });
|
|
264
|
+
this._trackChild(child);
|
|
265
|
+
this._wireStreamContent(child, out, forRef);
|
|
266
|
+
return out;
|
|
267
|
+
}
|
|
268
|
+
_wireStreamContent(child, out, forRef) {
|
|
269
|
+
let peeked = [];
|
|
270
|
+
let peekedLen = 0;
|
|
271
|
+
let decided = false;
|
|
272
|
+
const forwardPeeked = () => {
|
|
273
|
+
const head = Buffer.concat(peeked, peekedLen);
|
|
274
|
+
peeked = [];
|
|
275
|
+
peekedLen = 0;
|
|
276
|
+
return head;
|
|
277
|
+
};
|
|
278
|
+
const onChunk = (chunk) => {
|
|
279
|
+
if (decided) {
|
|
280
|
+
/* v8 ignore next -- defensive: PassThrough rarely returns false here; backpressure handled at write time, drain listener resumes */
|
|
281
|
+
if (!out.write(chunk))
|
|
282
|
+
child.stdout.pause();
|
|
283
|
+
return;
|
|
284
|
+
}
|
|
285
|
+
peeked.push(chunk);
|
|
286
|
+
peekedLen += chunk.length;
|
|
287
|
+
if (peekedLen < LFS_MAGIC.length)
|
|
288
|
+
return;
|
|
289
|
+
decided = true;
|
|
290
|
+
const head = forwardPeeked();
|
|
291
|
+
if (head.subarray(0, LFS_MAGIC.length).equals(LFS_MAGIC)) {
|
|
292
|
+
this._handoffToLfs(child, out, head);
|
|
293
|
+
return;
|
|
294
|
+
}
|
|
295
|
+
/* v8 ignore next -- defensive: PassThrough rarely returns false here; backpressure handled at write time, drain listener resumes */
|
|
296
|
+
if (!out.write(head))
|
|
297
|
+
child.stdout.pause();
|
|
298
|
+
};
|
|
299
|
+
child.stdout.on('data', onChunk);
|
|
300
|
+
out.on('drain', () => child.stdout.resume());
|
|
301
|
+
child.stdout.on('end', () => {
|
|
302
|
+
if (!decided && peekedLen > 0) {
|
|
303
|
+
out.write(forwardPeeked());
|
|
304
|
+
}
|
|
305
|
+
/* v8 ignore next -- defensive: forwardPeeked above zeroes peekedLen, so the second arm always evaluates true after a flush */
|
|
306
|
+
if (decided || peekedLen === 0)
|
|
307
|
+
out.end();
|
|
308
|
+
});
|
|
309
|
+
child.stderr.on('data', (chunk) => {
|
|
310
|
+
Logger.debug(lazy `streamContent stderr for ${forRef.path}: ${() => chunk.toString()}`);
|
|
311
|
+
});
|
|
312
|
+
child.on('error', err => out.destroy(err));
|
|
313
|
+
child.on('close', code => {
|
|
314
|
+
// Intentional kills during LFS handoff close with a null/non-zero
|
|
315
|
+
// code; destroying `out` here would truncate the piped LFS stream
|
|
316
|
+
// the handoff just started.
|
|
317
|
+
if (code !== 0 && code !== null && !child.killed && !out.destroyed) {
|
|
318
|
+
out.destroy(new Error(`git cat-file blob exited ${code}`));
|
|
319
|
+
}
|
|
320
|
+
});
|
|
321
|
+
}
|
|
322
|
+
_handoffToLfs(child, out, head) {
|
|
323
|
+
const pointerParts = [head];
|
|
324
|
+
let pointerLen = head.length;
|
|
325
|
+
let aborted = false;
|
|
326
|
+
// Replace both data and end listeners from _wireStreamContent: the
|
|
327
|
+
// original end listener calls out.end() when decided===true, which would
|
|
328
|
+
// close `out` before the LFS file stream begins piping into it and the
|
|
329
|
+
// consumer would receive an empty/truncated payload.
|
|
330
|
+
child.stdout.removeAllListeners('data');
|
|
331
|
+
child.stdout.removeAllListeners('end');
|
|
332
|
+
child.stdout.on('data', (c) => {
|
|
333
|
+
if (aborted)
|
|
334
|
+
return;
|
|
335
|
+
pointerParts.push(c);
|
|
336
|
+
pointerLen += c.length;
|
|
337
|
+
if (pointerLen > GitAdapter.LFS_POINTER_CAP) {
|
|
338
|
+
aborted = true;
|
|
339
|
+
out.destroy(new Error('LFS pointer exceeds expected size'));
|
|
340
|
+
}
|
|
341
|
+
});
|
|
342
|
+
child.stdout.on('end', () => {
|
|
343
|
+
if (aborted)
|
|
344
|
+
return;
|
|
345
|
+
try {
|
|
346
|
+
const pointer = Buffer.concat(pointerParts, pointerLen);
|
|
347
|
+
const lfsPath = getLFSObjectContentPath(pointer);
|
|
348
|
+
createReadStream(join(this.config.repo, lfsPath))
|
|
349
|
+
.on('error', err => out.destroy(err))
|
|
350
|
+
.pipe(out);
|
|
351
|
+
}
|
|
352
|
+
catch (err) {
|
|
353
|
+
/* v8 ignore next -- defensive: getLFSObjectContentPath / createReadStream throw Error instances in practice; the String() fallback exists for non-Error throws */
|
|
354
|
+
out.destroy(err instanceof Error ? err : new Error(String(err)));
|
|
355
|
+
}
|
|
356
|
+
});
|
|
357
|
+
/* v8 ignore next -- defensive: _handoffToLfs is reached after the first peek; the child is alive at this point */
|
|
358
|
+
if (!child.killed)
|
|
359
|
+
child.kill();
|
|
360
|
+
}
|
|
107
361
|
async getStringContent(forRef) {
|
|
108
362
|
const content = await this.getBufferContent(forRef);
|
|
109
363
|
return content.toString(UTF8_ENCODING);
|
|
@@ -156,49 +410,121 @@ export default class GitAdapter {
|
|
|
156
410
|
}
|
|
157
411
|
}
|
|
158
412
|
// Fast path (no whitespace ignore): one `git diff --name-status` call.
|
|
413
|
+
// Rename detection (`-M` + `R` filter) is gated behind
|
|
414
|
+
// `config.changesManifest` so the default sgd pipeline emits the same
|
|
415
|
+
// A/M/D line shape as before this feature. When enabled, renames surface
|
|
416
|
+
// as `R<score>\tfrom\tto` lines that RepoGitDiff splits into synthetic
|
|
417
|
+
// A/D while recording the rename pair for ChangeSet.
|
|
418
|
+
//
|
|
419
|
+
// Whitespace path: three (or four, when rename detection is on) parallel
|
|
420
|
+
// `git diff --numstat` calls, one per --diff-filter. `--name-status` does
|
|
421
|
+
// NOT honor `--ignore-all-space` (git decides A/M/D from blob SHAs for
|
|
422
|
+
// that mode, so a whitespace-only change still appears as `M`). Only
|
|
423
|
+
// `--numstat` computes a real content diff under the whitespace flags,
|
|
424
|
+
// so files with 0/0 line changes drop out naturally. When rename
|
|
425
|
+
// detection is enabled the R call uses `-z` so it can sidestep numstat's
|
|
426
|
+
// brace/arrow rename-path encoding.
|
|
159
427
|
//
|
|
160
|
-
//
|
|
161
|
-
//
|
|
162
|
-
//
|
|
163
|
-
//
|
|
164
|
-
//
|
|
165
|
-
|
|
166
|
-
|
|
167
|
-
async getDiffLines() {
|
|
428
|
+
// The Promise.all fans out to 4 items with rename detection on. That's a
|
|
429
|
+
// localised exception to the CLAUDE.local.md ≤3 bounded-Promise.all
|
|
430
|
+
// guideline: the four `--diff-filter`s are the canonical per-kind split,
|
|
431
|
+
// so hiding R as an out-of-band sequential await just obscures the real
|
|
432
|
+
// symmetric fan-out without changing the resource footprint.
|
|
433
|
+
async *streamDiffLines() {
|
|
434
|
+
const detectRenames = Boolean(this.config.changesManifest);
|
|
168
435
|
if (!this.config.ignoreWhitespace) {
|
|
169
|
-
const
|
|
436
|
+
const args = [
|
|
170
437
|
'diff',
|
|
171
438
|
'--name-status',
|
|
172
|
-
'--no-renames',
|
|
173
|
-
|
|
439
|
+
...(detectRenames ? ['-M'] : ['--no-renames']),
|
|
440
|
+
`--diff-filter=${detectRenames ? 'AMDR' : 'AMD'}`,
|
|
174
441
|
this.config.from,
|
|
175
442
|
this.config.to,
|
|
176
443
|
'--',
|
|
177
444
|
...this.config.source,
|
|
178
|
-
]
|
|
179
|
-
|
|
180
|
-
|
|
181
|
-
|
|
182
|
-
|
|
445
|
+
];
|
|
446
|
+
for await (const line of this._spawnLines(args)) {
|
|
447
|
+
if (line)
|
|
448
|
+
yield treatPathSep(line);
|
|
449
|
+
}
|
|
450
|
+
return;
|
|
183
451
|
}
|
|
184
|
-
|
|
185
|
-
|
|
186
|
-
|
|
187
|
-
|
|
188
|
-
|
|
189
|
-
|
|
190
|
-
|
|
191
|
-
|
|
192
|
-
|
|
193
|
-
|
|
194
|
-
|
|
195
|
-
|
|
196
|
-
|
|
197
|
-
|
|
198
|
-
|
|
199
|
-
|
|
200
|
-
|
|
201
|
-
|
|
452
|
+
// When rename detection is on, the A/M/D filters also run with -M so
|
|
453
|
+
// renamed files drop out (reclassified to R by git), and the dedicated
|
|
454
|
+
// R call is the single source of rename lines — no dedup needed.
|
|
455
|
+
// Without rename detection, A/M/D keep their pre-feature line shape via
|
|
456
|
+
// --no-renames.
|
|
457
|
+
const filters = detectRenames
|
|
458
|
+
? [ADDITION, MODIFICATION, DELETION, RENAMED]
|
|
459
|
+
: [ADDITION, MODIFICATION, DELETION];
|
|
460
|
+
// Numstat path runs one git invocation per change-type; we still emit
|
|
461
|
+
// each batch as it lands so downstream filters can begin work before
|
|
462
|
+
// every filter has finished.
|
|
463
|
+
for (const changeType of filters) {
|
|
464
|
+
for (const line of await this._getNumstatLines(changeType, detectRenames)) {
|
|
465
|
+
yield line;
|
|
466
|
+
}
|
|
467
|
+
}
|
|
468
|
+
}
|
|
469
|
+
// Per-filter numstat call. The R branch uses `-z` because numstat
|
|
470
|
+
// otherwise encodes rename paths in three format variants within the
|
|
471
|
+
// path column (`{a => b}`, `a/{b => c}/d`, or bare `old => new`). `-z`
|
|
472
|
+
// emits `N<TAB>M<TAB>\0<src>\0<dst>\0` for each rename, so we stride-3
|
|
473
|
+
// over the NUL-split tokens and synthesise `R<TAB><src><TAB><dst>`
|
|
474
|
+
// lines that RepoGitDiff._expandRenames already understands.
|
|
475
|
+
//
|
|
476
|
+
// A/M/D: streamed via _spawnLines + readline, per-line transform strips
|
|
477
|
+
// the leading `N\tM\t` counts and rewrites them to the status prefix.
|
|
478
|
+
// R (NUL-delimited): buffered via simpleGit.raw because readline is
|
|
479
|
+
// newline-oriented and the whole-string split remains cheap for the
|
|
480
|
+
// rare rename set.
|
|
481
|
+
async _getNumstatLines(changeType, detectRenames) {
|
|
482
|
+
if (changeType === RENAMED) {
|
|
483
|
+
return this._getRenameLines();
|
|
484
|
+
}
|
|
485
|
+
const args = [
|
|
486
|
+
'diff',
|
|
487
|
+
'--numstat',
|
|
488
|
+
...(detectRenames ? ['-M'] : ['--no-renames']),
|
|
489
|
+
...IGNORE_WHITESPACE_PARAMS,
|
|
490
|
+
`--diff-filter=${changeType}`,
|
|
491
|
+
this.config.from,
|
|
492
|
+
this.config.to,
|
|
493
|
+
'--',
|
|
494
|
+
...this.config.source,
|
|
495
|
+
];
|
|
496
|
+
const lines = [];
|
|
497
|
+
for await (const line of this._spawnLines(args)) {
|
|
498
|
+
/* v8 ignore next -- defensive: _spawnLines splits on EOL; trailing/empty lines from git numstat are filtered here */
|
|
499
|
+
if (!line)
|
|
500
|
+
continue;
|
|
501
|
+
lines.push(treatPathSep(line.replace(NUM_STAT_CHANGE_INFORMATION, `${changeType}\t`)));
|
|
502
|
+
}
|
|
503
|
+
return lines;
|
|
504
|
+
}
|
|
505
|
+
async _getRenameLines() {
|
|
506
|
+
const output = await this.simpleGit.raw([
|
|
507
|
+
'diff',
|
|
508
|
+
'--numstat',
|
|
509
|
+
'-M',
|
|
510
|
+
'-z',
|
|
511
|
+
...IGNORE_WHITESPACE_PARAMS,
|
|
512
|
+
`--diff-filter=${RENAMED}`,
|
|
513
|
+
this.config.from,
|
|
514
|
+
this.config.to,
|
|
515
|
+
'--',
|
|
516
|
+
...this.config.source,
|
|
517
|
+
]);
|
|
518
|
+
const tokens = output.split('\0');
|
|
519
|
+
const lines = [];
|
|
520
|
+
for (let i = 0; i + 2 < tokens.length; i += 3) {
|
|
521
|
+
const src = tokens[i + 1];
|
|
522
|
+
const dst = tokens[i + 2];
|
|
523
|
+
if (!src || !dst)
|
|
524
|
+
continue;
|
|
525
|
+
lines.push(treatPathSep(`${RENAMED}${TAB}${src}${TAB}${dst}`));
|
|
526
|
+
}
|
|
527
|
+
return lines;
|
|
202
528
|
}
|
|
203
529
|
}
|
|
204
530
|
__decorate([
|
|
@@ -230,5 +556,5 @@ __decorate([
|
|
|
230
556
|
], GitAdapter.prototype, "gitGrep", null);
|
|
231
557
|
__decorate([
|
|
232
558
|
log
|
|
233
|
-
], GitAdapter.prototype, "
|
|
559
|
+
], GitAdapter.prototype, "streamDiffLines", null);
|
|
234
560
|
//# sourceMappingURL=GitAdapter.js.map
|