isomorphic-git 1.21.0 → 1.23.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +12 -1
- package/browser-tests.json +3 -5
- package/index.cjs +736 -492
- package/index.d.ts +64 -0
- package/index.js +736 -493
- package/index.umd.min.d.ts +64 -0
- package/index.umd.min.js +2 -2
- package/index.umd.min.js.map +1 -1
- package/package.json +3 -3
- package/size_report.html +1 -1
package/index.js
CHANGED
|
@@ -2,8 +2,8 @@ import AsyncLock from 'async-lock';
|
|
|
2
2
|
import Hash from 'sha.js/sha1.js';
|
|
3
3
|
import crc32 from 'crc-32';
|
|
4
4
|
import pako from 'pako';
|
|
5
|
-
import ignore from 'ignore';
|
|
6
5
|
import pify from 'pify';
|
|
6
|
+
import ignore from 'ignore';
|
|
7
7
|
import cleanGitRef from 'clean-git-ref';
|
|
8
8
|
import diff3Merge from 'diff3';
|
|
9
9
|
|
|
@@ -336,6 +336,21 @@ class BaseError extends Error {
|
|
|
336
336
|
}
|
|
337
337
|
}
|
|
338
338
|
|
|
339
|
+
class UnmergedPathsError extends BaseError {
|
|
340
|
+
/**
|
|
341
|
+
* @param {Array<string>} filepaths
|
|
342
|
+
*/
|
|
343
|
+
constructor(filepaths) {
|
|
344
|
+
super(
|
|
345
|
+
`Modifying the index is not possible because you have unmerged files: ${filepaths.toString}. Fix them up in the work tree, and then use 'git add/rm as appropriate to mark resolution and make a commit.`
|
|
346
|
+
);
|
|
347
|
+
this.code = this.name = UnmergedPathsError.code;
|
|
348
|
+
this.data = { filepaths };
|
|
349
|
+
}
|
|
350
|
+
}
|
|
351
|
+
/** @type {'UnmergedPathsError'} */
|
|
352
|
+
UnmergedPathsError.code = 'UnmergedPathsError';
|
|
353
|
+
|
|
339
354
|
class InternalError extends BaseError {
|
|
340
355
|
/**
|
|
341
356
|
* @param {string} message
|
|
@@ -621,11 +636,28 @@ class GitIndex {
|
|
|
621
636
|
_entries: Map<string, CacheEntry>
|
|
622
637
|
_dirty: boolean // Used to determine if index needs to be saved to filesystem
|
|
623
638
|
*/
|
|
624
|
-
constructor(entries) {
|
|
639
|
+
constructor(entries, unmergedPaths) {
|
|
625
640
|
this._dirty = false;
|
|
641
|
+
this._unmergedPaths = unmergedPaths || new Set();
|
|
626
642
|
this._entries = entries || new Map();
|
|
627
643
|
}
|
|
628
644
|
|
|
645
|
+
_addEntry(entry) {
|
|
646
|
+
if (entry.flags.stage === 0) {
|
|
647
|
+
entry.stages = [entry];
|
|
648
|
+
this._entries.set(entry.path, entry);
|
|
649
|
+
this._unmergedPaths.delete(entry.path);
|
|
650
|
+
} else {
|
|
651
|
+
let existingEntry = this._entries.get(entry.path);
|
|
652
|
+
if (!existingEntry) {
|
|
653
|
+
this._entries.set(entry.path, entry);
|
|
654
|
+
existingEntry = entry;
|
|
655
|
+
}
|
|
656
|
+
existingEntry.stages[entry.flags.stage] = entry;
|
|
657
|
+
this._unmergedPaths.add(entry.path);
|
|
658
|
+
}
|
|
659
|
+
}
|
|
660
|
+
|
|
629
661
|
static async from(buffer) {
|
|
630
662
|
if (Buffer.isBuffer(buffer)) {
|
|
631
663
|
return GitIndex.fromBuffer(buffer)
|
|
@@ -645,8 +677,8 @@ class GitIndex {
|
|
|
645
677
|
`Invalid checksum in GitIndex buffer: expected ${shaClaimed} but saw ${shaComputed}`
|
|
646
678
|
)
|
|
647
679
|
}
|
|
680
|
+
const index = new GitIndex();
|
|
648
681
|
const reader = new BufferCursor(buffer);
|
|
649
|
-
const _entries = new Map();
|
|
650
682
|
const magic = reader.toString('utf8', 4);
|
|
651
683
|
if (magic !== 'DIRC') {
|
|
652
684
|
throw new InternalError(`Inavlid dircache magic file number: ${magic}`)
|
|
@@ -701,10 +733,17 @@ class GitIndex {
|
|
|
701
733
|
}
|
|
702
734
|
}
|
|
703
735
|
// end of awkward part
|
|
704
|
-
|
|
736
|
+
entry.stages = [];
|
|
737
|
+
|
|
738
|
+
index._addEntry(entry);
|
|
739
|
+
|
|
705
740
|
i++;
|
|
706
741
|
}
|
|
707
|
-
return
|
|
742
|
+
return index
|
|
743
|
+
}
|
|
744
|
+
|
|
745
|
+
get unmergedPaths() {
|
|
746
|
+
return [...this._unmergedPaths]
|
|
708
747
|
}
|
|
709
748
|
|
|
710
749
|
get entries() {
|
|
@@ -715,13 +754,33 @@ class GitIndex {
|
|
|
715
754
|
return this._entries
|
|
716
755
|
}
|
|
717
756
|
|
|
757
|
+
get entriesFlat() {
|
|
758
|
+
return [...this.entries].flatMap(entry => {
|
|
759
|
+
return entry.stages.length > 1 ? entry.stages.filter(x => x) : entry
|
|
760
|
+
})
|
|
761
|
+
}
|
|
762
|
+
|
|
718
763
|
*[Symbol.iterator]() {
|
|
719
764
|
for (const entry of this.entries) {
|
|
720
765
|
yield entry;
|
|
721
766
|
}
|
|
722
767
|
}
|
|
723
768
|
|
|
724
|
-
insert({ filepath, stats, oid }) {
|
|
769
|
+
insert({ filepath, stats, oid, stage = 0 }) {
|
|
770
|
+
if (!stats) {
|
|
771
|
+
stats = {
|
|
772
|
+
ctimeSeconds: 0,
|
|
773
|
+
ctimeNanoseconds: 0,
|
|
774
|
+
mtimeSeconds: 0,
|
|
775
|
+
mtimeNanoseconds: 0,
|
|
776
|
+
dev: 0,
|
|
777
|
+
ino: 0,
|
|
778
|
+
mode: 0,
|
|
779
|
+
uid: 0,
|
|
780
|
+
gid: 0,
|
|
781
|
+
size: 0,
|
|
782
|
+
};
|
|
783
|
+
}
|
|
725
784
|
stats = normalizeStats(stats);
|
|
726
785
|
const bfilepath = Buffer.from(filepath);
|
|
727
786
|
const entry = {
|
|
@@ -743,11 +802,14 @@ class GitIndex {
|
|
|
743
802
|
flags: {
|
|
744
803
|
assumeValid: false,
|
|
745
804
|
extended: false,
|
|
746
|
-
stage
|
|
805
|
+
stage,
|
|
747
806
|
nameLength: bfilepath.length < 0xfff ? bfilepath.length : 0xfff,
|
|
748
807
|
},
|
|
808
|
+
stages: [],
|
|
749
809
|
};
|
|
750
|
-
|
|
810
|
+
|
|
811
|
+
this._addEntry(entry);
|
|
812
|
+
|
|
751
813
|
this._dirty = true;
|
|
752
814
|
}
|
|
753
815
|
|
|
@@ -761,6 +823,10 @@ class GitIndex {
|
|
|
761
823
|
}
|
|
762
824
|
}
|
|
763
825
|
}
|
|
826
|
+
|
|
827
|
+
if (this._unmergedPaths.has(filepath)) {
|
|
828
|
+
this._unmergedPaths.delete(filepath);
|
|
829
|
+
}
|
|
764
830
|
this._dirty = true;
|
|
765
831
|
}
|
|
766
832
|
|
|
@@ -779,36 +845,50 @@ class GitIndex {
|
|
|
779
845
|
.join('\n')
|
|
780
846
|
}
|
|
781
847
|
|
|
848
|
+
static async _entryToBuffer(entry) {
|
|
849
|
+
const bpath = Buffer.from(entry.path);
|
|
850
|
+
// the fixed length + the filename + at least one null char => align by 8
|
|
851
|
+
const length = Math.ceil((62 + bpath.length + 1) / 8) * 8;
|
|
852
|
+
const written = Buffer.alloc(length);
|
|
853
|
+
const writer = new BufferCursor(written);
|
|
854
|
+
const stat = normalizeStats(entry);
|
|
855
|
+
writer.writeUInt32BE(stat.ctimeSeconds);
|
|
856
|
+
writer.writeUInt32BE(stat.ctimeNanoseconds);
|
|
857
|
+
writer.writeUInt32BE(stat.mtimeSeconds);
|
|
858
|
+
writer.writeUInt32BE(stat.mtimeNanoseconds);
|
|
859
|
+
writer.writeUInt32BE(stat.dev);
|
|
860
|
+
writer.writeUInt32BE(stat.ino);
|
|
861
|
+
writer.writeUInt32BE(stat.mode);
|
|
862
|
+
writer.writeUInt32BE(stat.uid);
|
|
863
|
+
writer.writeUInt32BE(stat.gid);
|
|
864
|
+
writer.writeUInt32BE(stat.size);
|
|
865
|
+
writer.write(entry.oid, 20, 'hex');
|
|
866
|
+
writer.writeUInt16BE(renderCacheEntryFlags(entry));
|
|
867
|
+
writer.write(entry.path, bpath.length, 'utf8');
|
|
868
|
+
return written
|
|
869
|
+
}
|
|
870
|
+
|
|
782
871
|
async toObject() {
|
|
783
872
|
const header = Buffer.alloc(12);
|
|
784
873
|
const writer = new BufferCursor(header);
|
|
785
874
|
writer.write('DIRC', 4, 'utf8');
|
|
786
875
|
writer.writeUInt32BE(2);
|
|
787
|
-
writer.writeUInt32BE(this.
|
|
788
|
-
|
|
789
|
-
|
|
790
|
-
|
|
791
|
-
|
|
792
|
-
|
|
793
|
-
const
|
|
794
|
-
|
|
795
|
-
|
|
796
|
-
|
|
797
|
-
|
|
798
|
-
|
|
799
|
-
|
|
800
|
-
|
|
801
|
-
|
|
802
|
-
|
|
803
|
-
writer.writeUInt32BE(stat.uid);
|
|
804
|
-
writer.writeUInt32BE(stat.gid);
|
|
805
|
-
writer.writeUInt32BE(stat.size);
|
|
806
|
-
writer.write(entry.oid, 20, 'hex');
|
|
807
|
-
writer.writeUInt16BE(renderCacheEntryFlags(entry));
|
|
808
|
-
writer.write(entry.path, bpath.length, 'utf8');
|
|
809
|
-
return written
|
|
810
|
-
})
|
|
811
|
-
);
|
|
876
|
+
writer.writeUInt32BE(this.entriesFlat.length);
|
|
877
|
+
|
|
878
|
+
let entryBuffers = [];
|
|
879
|
+
for (const entry of this.entries) {
|
|
880
|
+
entryBuffers.push(GitIndex._entryToBuffer(entry));
|
|
881
|
+
if (entry.stages.length > 1) {
|
|
882
|
+
for (const stage of entry.stages) {
|
|
883
|
+
if (stage && stage !== entry) {
|
|
884
|
+
entryBuffers.push(GitIndex._entryToBuffer(stage));
|
|
885
|
+
}
|
|
886
|
+
}
|
|
887
|
+
}
|
|
888
|
+
}
|
|
889
|
+
entryBuffers = await Promise.all(entryBuffers);
|
|
890
|
+
|
|
891
|
+
const body = Buffer.concat(entryBuffers);
|
|
812
892
|
const main = Buffer.concat([header, body]);
|
|
813
893
|
const sum = await shasum(main);
|
|
814
894
|
return Buffer.concat([main, Buffer.from(sum, 'hex')])
|
|
@@ -874,14 +954,16 @@ class GitIndexManager {
|
|
|
874
954
|
* @param {import('../models/FileSystem.js').FileSystem} opts.fs
|
|
875
955
|
* @param {string} opts.gitdir
|
|
876
956
|
* @param {object} opts.cache
|
|
957
|
+
* @param {bool} opts.allowUnmerged
|
|
877
958
|
* @param {function(GitIndex): any} closure
|
|
878
959
|
*/
|
|
879
|
-
static async acquire({ fs, gitdir, cache }, closure) {
|
|
960
|
+
static async acquire({ fs, gitdir, cache, allowUnmerged = true }, closure) {
|
|
880
961
|
if (!cache[IndexCache]) cache[IndexCache] = createCache();
|
|
881
962
|
|
|
882
963
|
const filepath = `${gitdir}/index`;
|
|
883
964
|
if (lock === null) lock = new AsyncLock({ maxPending: Infinity });
|
|
884
965
|
let result;
|
|
966
|
+
let unmergedPaths = [];
|
|
885
967
|
await lock.acquire(filepath, async () => {
|
|
886
968
|
// Acquire a file lock while we're reading the index
|
|
887
969
|
// to make sure other processes aren't writing to it
|
|
@@ -891,6 +973,11 @@ class GitIndexManager {
|
|
|
891
973
|
await updateCachedIndexFile(fs, filepath, cache[IndexCache]);
|
|
892
974
|
}
|
|
893
975
|
const index = cache[IndexCache].map.get(filepath);
|
|
976
|
+
unmergedPaths = index.unmergedPaths;
|
|
977
|
+
|
|
978
|
+
if (unmergedPaths.length && !allowUnmerged)
|
|
979
|
+
throw new UnmergedPathsError(unmergedPaths)
|
|
980
|
+
|
|
894
981
|
result = await closure(index);
|
|
895
982
|
if (index._dirty) {
|
|
896
983
|
// Acquire a file lock while we're writing the index file
|
|
@@ -3377,6 +3464,21 @@ class UserCanceledError extends BaseError {
|
|
|
3377
3464
|
/** @type {'UserCanceledError'} */
|
|
3378
3465
|
UserCanceledError.code = 'UserCanceledError';
|
|
3379
3466
|
|
|
3467
|
+
class IndexResetError extends BaseError {
|
|
3468
|
+
/**
|
|
3469
|
+
* @param {Array<string>} filepaths
|
|
3470
|
+
*/
|
|
3471
|
+
constructor(filepath) {
|
|
3472
|
+
super(
|
|
3473
|
+
`Could not merge index: Entry for '${filepath}' is not up to date. Either reset the index entry to HEAD, or stage your unstaged chages.`
|
|
3474
|
+
);
|
|
3475
|
+
this.code = this.name = IndexResetError.code;
|
|
3476
|
+
this.data = { filepath };
|
|
3477
|
+
}
|
|
3478
|
+
}
|
|
3479
|
+
/** @type {'IndexResetError'} */
|
|
3480
|
+
IndexResetError.code = 'IndexResetError';
|
|
3481
|
+
|
|
3380
3482
|
|
|
3381
3483
|
|
|
3382
3484
|
var Errors = /*#__PURE__*/Object.freeze({
|
|
@@ -3409,7 +3511,9 @@ var Errors = /*#__PURE__*/Object.freeze({
|
|
|
3409
3511
|
UnknownTransportError: UnknownTransportError,
|
|
3410
3512
|
UnsafeFilepathError: UnsafeFilepathError,
|
|
3411
3513
|
UrlParseError: UrlParseError,
|
|
3412
|
-
UserCanceledError: UserCanceledError
|
|
3514
|
+
UserCanceledError: UserCanceledError,
|
|
3515
|
+
UnmergedPathsError: UnmergedPathsError,
|
|
3516
|
+
IndexResetError: IndexResetError
|
|
3413
3517
|
});
|
|
3414
3518
|
|
|
3415
3519
|
function formatAuthor({ name, email, timestamp, timezoneOffset }) {
|
|
@@ -4093,171 +4197,279 @@ function WORKDIR() {
|
|
|
4093
4197
|
|
|
4094
4198
|
// @ts-check
|
|
4095
4199
|
|
|
4096
|
-
//
|
|
4097
|
-
|
|
4098
|
-
|
|
4099
|
-
|
|
4100
|
-
// ALWAYS ignore ".git" folders.
|
|
4101
|
-
if (basename(filepath) === '.git') return true
|
|
4102
|
-
// '.' is not a valid gitignore entry, so '.' is never ignored
|
|
4103
|
-
if (filepath === '.') return false
|
|
4104
|
-
// Check and load exclusion rules from project exclude file (.git/info/exclude)
|
|
4105
|
-
let excludes = '';
|
|
4106
|
-
const excludesFile = join(gitdir, 'info', 'exclude');
|
|
4107
|
-
if (await fs.exists(excludesFile)) {
|
|
4108
|
-
excludes = await fs.read(excludesFile, 'utf8');
|
|
4109
|
-
}
|
|
4110
|
-
// Find all the .gitignore files that could affect this file
|
|
4111
|
-
const pairs = [
|
|
4112
|
-
{
|
|
4113
|
-
gitignore: join(dir, '.gitignore'),
|
|
4114
|
-
filepath,
|
|
4115
|
-
},
|
|
4116
|
-
];
|
|
4117
|
-
const pieces = filepath.split('/').filter(Boolean);
|
|
4118
|
-
for (let i = 1; i < pieces.length; i++) {
|
|
4119
|
-
const folder = pieces.slice(0, i).join('/');
|
|
4120
|
-
const file = pieces.slice(i).join('/');
|
|
4121
|
-
pairs.push({
|
|
4122
|
-
gitignore: join(dir, folder, '.gitignore'),
|
|
4123
|
-
filepath: file,
|
|
4124
|
-
});
|
|
4125
|
-
}
|
|
4126
|
-
let ignoredStatus = false;
|
|
4127
|
-
for (const p of pairs) {
|
|
4128
|
-
let file;
|
|
4129
|
-
try {
|
|
4130
|
-
file = await fs.read(p.gitignore, 'utf8');
|
|
4131
|
-
} catch (err) {
|
|
4132
|
-
if (err.code === 'NOENT') continue
|
|
4133
|
-
}
|
|
4134
|
-
const ign = ignore().add(excludes);
|
|
4135
|
-
ign.add(file);
|
|
4136
|
-
// If the parent directory is excluded, we are done.
|
|
4137
|
-
// "It is not possible to re-include a file if a parent directory of that file is excluded. Git doesn’t list excluded directories for performance reasons, so any patterns on contained files have no effect, no matter where they are defined."
|
|
4138
|
-
// source: https://git-scm.com/docs/gitignore
|
|
4139
|
-
const parentdir = dirname(p.filepath);
|
|
4140
|
-
if (parentdir !== '.' && ign.ignores(parentdir)) return true
|
|
4141
|
-
// If the file is currently ignored, test for UNignoring.
|
|
4142
|
-
if (ignoredStatus) {
|
|
4143
|
-
ignoredStatus = !ign.test(p.filepath).unignored;
|
|
4144
|
-
} else {
|
|
4145
|
-
ignoredStatus = ign.test(p.filepath).ignored;
|
|
4146
|
-
}
|
|
4147
|
-
}
|
|
4148
|
-
return ignoredStatus
|
|
4149
|
-
}
|
|
4200
|
+
// https://dev.to/namirsab/comment/2050
|
|
4201
|
+
function arrayRange(start, end) {
|
|
4202
|
+
const length = end - start;
|
|
4203
|
+
return Array.from({ length }, (_, i) => start + i)
|
|
4150
4204
|
}
|
|
4151
4205
|
|
|
4152
|
-
|
|
4153
|
-
|
|
4154
|
-
|
|
4155
|
-
|
|
4156
|
-
|
|
4157
|
-
* @param {import('../models/FileSystem.js').FileSystem} fs
|
|
4158
|
-
* @param {string} filepath - The file or directory to remove.
|
|
4159
|
-
*/
|
|
4160
|
-
async function rmRecursive(fs, filepath) {
|
|
4161
|
-
const entries = await fs.readdir(filepath);
|
|
4162
|
-
if (entries == null) {
|
|
4163
|
-
await fs.rm(filepath);
|
|
4164
|
-
} else if (entries.length) {
|
|
4165
|
-
await Promise.all(
|
|
4166
|
-
entries.map(entry => {
|
|
4167
|
-
const subpath = join(filepath, entry);
|
|
4168
|
-
return fs.lstat(subpath).then(stat => {
|
|
4169
|
-
if (!stat) return
|
|
4170
|
-
return stat.isDirectory() ? rmRecursive(fs, subpath) : fs.rm(subpath)
|
|
4171
|
-
})
|
|
4172
|
-
})
|
|
4173
|
-
).then(() => fs.rmdir(filepath));
|
|
4174
|
-
} else {
|
|
4175
|
-
await fs.rmdir(filepath);
|
|
4176
|
-
}
|
|
4177
|
-
}
|
|
4206
|
+
// TODO: Should I just polyfill Array.flat?
|
|
4207
|
+
const flat =
|
|
4208
|
+
typeof Array.prototype.flat === 'undefined'
|
|
4209
|
+
? entries => entries.reduce((acc, x) => acc.concat(x), [])
|
|
4210
|
+
: entries => entries.flat();
|
|
4178
4211
|
|
|
4179
|
-
|
|
4180
|
-
|
|
4181
|
-
|
|
4182
|
-
|
|
4183
|
-
|
|
4184
|
-
|
|
4212
|
+
// This is convenient for computing unions/joins of sorted lists.
|
|
4213
|
+
class RunningMinimum {
|
|
4214
|
+
constructor() {
|
|
4215
|
+
// Using a getter for 'value' would just bloat the code.
|
|
4216
|
+
// You know better than to set it directly right?
|
|
4217
|
+
this.value = null;
|
|
4218
|
+
}
|
|
4185
4219
|
|
|
4186
|
-
|
|
4187
|
-
if (
|
|
4188
|
-
|
|
4189
|
-
this.
|
|
4190
|
-
|
|
4191
|
-
|
|
4192
|
-
this._rm = fs.promises.rm.bind(fs.promises);
|
|
4193
|
-
} else if (fs.promises.rmdir.length > 1) {
|
|
4194
|
-
this._rm = fs.promises.rmdir.bind(fs.promises);
|
|
4195
|
-
} else {
|
|
4196
|
-
this._rm = rmRecursive.bind(null, this);
|
|
4197
|
-
}
|
|
4198
|
-
this._rmdir = fs.promises.rmdir.bind(fs.promises);
|
|
4199
|
-
this._unlink = fs.promises.unlink.bind(fs.promises);
|
|
4200
|
-
this._stat = fs.promises.stat.bind(fs.promises);
|
|
4201
|
-
this._lstat = fs.promises.lstat.bind(fs.promises);
|
|
4202
|
-
this._readdir = fs.promises.readdir.bind(fs.promises);
|
|
4203
|
-
this._readlink = fs.promises.readlink.bind(fs.promises);
|
|
4204
|
-
this._symlink = fs.promises.symlink.bind(fs.promises);
|
|
4205
|
-
} else {
|
|
4206
|
-
this._readFile = pify(fs.readFile.bind(fs));
|
|
4207
|
-
this._writeFile = pify(fs.writeFile.bind(fs));
|
|
4208
|
-
this._mkdir = pify(fs.mkdir.bind(fs));
|
|
4209
|
-
if (fs.rm) {
|
|
4210
|
-
this._rm = pify(fs.rm.bind(fs));
|
|
4211
|
-
} else if (fs.rmdir.length > 2) {
|
|
4212
|
-
this._rm = pify(fs.rmdir.bind(fs));
|
|
4213
|
-
} else {
|
|
4214
|
-
this._rm = rmRecursive.bind(null, this);
|
|
4215
|
-
}
|
|
4216
|
-
this._rmdir = pify(fs.rmdir.bind(fs));
|
|
4217
|
-
this._unlink = pify(fs.unlink.bind(fs));
|
|
4218
|
-
this._stat = pify(fs.stat.bind(fs));
|
|
4219
|
-
this._lstat = pify(fs.lstat.bind(fs));
|
|
4220
|
-
this._readdir = pify(fs.readdir.bind(fs));
|
|
4221
|
-
this._readlink = pify(fs.readlink.bind(fs));
|
|
4222
|
-
this._symlink = pify(fs.symlink.bind(fs));
|
|
4220
|
+
consider(value) {
|
|
4221
|
+
if (value === null || value === undefined) return
|
|
4222
|
+
if (this.value === null) {
|
|
4223
|
+
this.value = value;
|
|
4224
|
+
} else if (value < this.value) {
|
|
4225
|
+
this.value = value;
|
|
4223
4226
|
}
|
|
4224
|
-
this._original_unwrapped_fs = fs;
|
|
4225
4227
|
}
|
|
4226
4228
|
|
|
4227
|
-
|
|
4228
|
-
|
|
4229
|
-
* Rethrows errors that aren't related to file existance.
|
|
4230
|
-
*/
|
|
4231
|
-
async exists(filepath, options = {}) {
|
|
4232
|
-
try {
|
|
4233
|
-
await this._stat(filepath);
|
|
4234
|
-
return true
|
|
4235
|
-
} catch (err) {
|
|
4236
|
-
if (err.code === 'ENOENT' || err.code === 'ENOTDIR') {
|
|
4237
|
-
return false
|
|
4238
|
-
} else {
|
|
4239
|
-
console.log('Unhandled error in "FileSystem.exists()" function', err);
|
|
4240
|
-
throw err
|
|
4241
|
-
}
|
|
4242
|
-
}
|
|
4229
|
+
reset() {
|
|
4230
|
+
this.value = null;
|
|
4243
4231
|
}
|
|
4232
|
+
}
|
|
4244
4233
|
|
|
4245
|
-
|
|
4246
|
-
|
|
4234
|
+
// Take an array of length N of
|
|
4235
|
+
// iterators of length Q_n
|
|
4236
|
+
// of strings
|
|
4237
|
+
// and return an iterator of length max(Q_n) for all n
|
|
4238
|
+
// of arrays of length N
|
|
4239
|
+
// of string|null who all have the same string value
|
|
4240
|
+
function* unionOfIterators(sets) {
|
|
4241
|
+
/* NOTE: We can assume all arrays are sorted.
|
|
4242
|
+
* Indexes are sorted because they are defined that way:
|
|
4247
4243
|
*
|
|
4248
|
-
*
|
|
4249
|
-
*
|
|
4244
|
+
* > Index entries are sorted in ascending order on the name field,
|
|
4245
|
+
* > interpreted as a string of unsigned bytes (i.e. memcmp() order, no
|
|
4246
|
+
* > localization, no special casing of directory separator '/'). Entries
|
|
4247
|
+
* > with the same name are sorted by their stage field.
|
|
4250
4248
|
*
|
|
4251
|
-
*
|
|
4249
|
+
* Trees should be sorted because they are created directly from indexes.
|
|
4250
|
+
* They definitely should be sorted, or else they wouldn't have a unique SHA1.
|
|
4251
|
+
* So that would be very naughty on the part of the tree-creator.
|
|
4252
|
+
*
|
|
4253
|
+
* Lastly, the working dir entries are sorted because I choose to sort them
|
|
4254
|
+
* in my FileSystem.readdir() implementation.
|
|
4252
4255
|
*/
|
|
4253
|
-
|
|
4254
|
-
|
|
4255
|
-
|
|
4256
|
-
|
|
4257
|
-
|
|
4258
|
-
|
|
4259
|
-
|
|
4260
|
-
|
|
4256
|
+
|
|
4257
|
+
// Init
|
|
4258
|
+
const min = new RunningMinimum();
|
|
4259
|
+
let minimum;
|
|
4260
|
+
const heads = [];
|
|
4261
|
+
const numsets = sets.length;
|
|
4262
|
+
for (let i = 0; i < numsets; i++) {
|
|
4263
|
+
// Abuse the fact that iterators continue to return 'undefined' for value
|
|
4264
|
+
// once they are done
|
|
4265
|
+
heads[i] = sets[i].next().value;
|
|
4266
|
+
if (heads[i] !== undefined) {
|
|
4267
|
+
min.consider(heads[i]);
|
|
4268
|
+
}
|
|
4269
|
+
}
|
|
4270
|
+
if (min.value === null) return
|
|
4271
|
+
// Iterate
|
|
4272
|
+
while (true) {
|
|
4273
|
+
const result = [];
|
|
4274
|
+
minimum = min.value;
|
|
4275
|
+
min.reset();
|
|
4276
|
+
for (let i = 0; i < numsets; i++) {
|
|
4277
|
+
if (heads[i] !== undefined && heads[i] === minimum) {
|
|
4278
|
+
result[i] = heads[i];
|
|
4279
|
+
heads[i] = sets[i].next().value;
|
|
4280
|
+
} else {
|
|
4281
|
+
// A little hacky, but eh
|
|
4282
|
+
result[i] = null;
|
|
4283
|
+
}
|
|
4284
|
+
if (heads[i] !== undefined) {
|
|
4285
|
+
min.consider(heads[i]);
|
|
4286
|
+
}
|
|
4287
|
+
}
|
|
4288
|
+
yield result;
|
|
4289
|
+
if (min.value === null) return
|
|
4290
|
+
}
|
|
4291
|
+
}
|
|
4292
|
+
|
|
4293
|
+
// @ts-check
|
|
4294
|
+
|
|
4295
|
+
/**
|
|
4296
|
+
* @param {object} args
|
|
4297
|
+
* @param {import('../models/FileSystem.js').FileSystem} args.fs
|
|
4298
|
+
* @param {object} args.cache
|
|
4299
|
+
* @param {string} [args.dir]
|
|
4300
|
+
* @param {string} [args.gitdir=join(dir,'.git')]
|
|
4301
|
+
* @param {Walker[]} args.trees
|
|
4302
|
+
* @param {WalkerMap} [args.map]
|
|
4303
|
+
* @param {WalkerReduce} [args.reduce]
|
|
4304
|
+
* @param {WalkerIterate} [args.iterate]
|
|
4305
|
+
*
|
|
4306
|
+
* @returns {Promise<any>} The finished tree-walking result
|
|
4307
|
+
*
|
|
4308
|
+
* @see {WalkerMap}
|
|
4309
|
+
*
|
|
4310
|
+
*/
|
|
4311
|
+
async function _walk({
|
|
4312
|
+
fs,
|
|
4313
|
+
cache,
|
|
4314
|
+
dir,
|
|
4315
|
+
gitdir,
|
|
4316
|
+
trees,
|
|
4317
|
+
// @ts-ignore
|
|
4318
|
+
map = async (_, entry) => entry,
|
|
4319
|
+
// The default reducer is a flatmap that filters out undefineds.
|
|
4320
|
+
reduce = async (parent, children) => {
|
|
4321
|
+
const flatten = flat(children);
|
|
4322
|
+
if (parent !== undefined) flatten.unshift(parent);
|
|
4323
|
+
return flatten
|
|
4324
|
+
},
|
|
4325
|
+
// The default iterate function walks all children concurrently
|
|
4326
|
+
iterate = (walk, children) => Promise.all([...children].map(walk)),
|
|
4327
|
+
}) {
|
|
4328
|
+
const walkers = trees.map(proxy =>
|
|
4329
|
+
proxy[GitWalkSymbol]({ fs, dir, gitdir, cache })
|
|
4330
|
+
);
|
|
4331
|
+
|
|
4332
|
+
const root = new Array(walkers.length).fill('.');
|
|
4333
|
+
const range = arrayRange(0, walkers.length);
|
|
4334
|
+
const unionWalkerFromReaddir = async entries => {
|
|
4335
|
+
range.map(i => {
|
|
4336
|
+
entries[i] = entries[i] && new walkers[i].ConstructEntry(entries[i]);
|
|
4337
|
+
});
|
|
4338
|
+
const subdirs = await Promise.all(
|
|
4339
|
+
range.map(i => (entries[i] ? walkers[i].readdir(entries[i]) : []))
|
|
4340
|
+
);
|
|
4341
|
+
// Now process child directories
|
|
4342
|
+
const iterators = subdirs
|
|
4343
|
+
.map(array => (array === null ? [] : array))
|
|
4344
|
+
.map(array => array[Symbol.iterator]());
|
|
4345
|
+
return {
|
|
4346
|
+
entries,
|
|
4347
|
+
children: unionOfIterators(iterators),
|
|
4348
|
+
}
|
|
4349
|
+
};
|
|
4350
|
+
|
|
4351
|
+
const walk = async root => {
|
|
4352
|
+
const { entries, children } = await unionWalkerFromReaddir(root);
|
|
4353
|
+
const fullpath = entries.find(entry => entry && entry._fullpath)._fullpath;
|
|
4354
|
+
const parent = await map(fullpath, entries);
|
|
4355
|
+
if (parent !== null) {
|
|
4356
|
+
let walkedChildren = await iterate(walk, children);
|
|
4357
|
+
walkedChildren = walkedChildren.filter(x => x !== undefined);
|
|
4358
|
+
return reduce(parent, walkedChildren)
|
|
4359
|
+
}
|
|
4360
|
+
};
|
|
4361
|
+
return walk(root)
|
|
4362
|
+
}
|
|
4363
|
+
|
|
4364
|
+
/**
|
|
4365
|
+
* Removes the directory at the specified filepath recursively. Used internally to replicate the behavior of
|
|
4366
|
+
* fs.promises.rm({ recursive: true, force: true }) from Node.js 14 and above when not available. If the provided
|
|
4367
|
+
* filepath resolves to a file, it will be removed.
|
|
4368
|
+
*
|
|
4369
|
+
* @param {import('../models/FileSystem.js').FileSystem} fs
|
|
4370
|
+
* @param {string} filepath - The file or directory to remove.
|
|
4371
|
+
*/
|
|
4372
|
+
async function rmRecursive(fs, filepath) {
|
|
4373
|
+
const entries = await fs.readdir(filepath);
|
|
4374
|
+
if (entries == null) {
|
|
4375
|
+
await fs.rm(filepath);
|
|
4376
|
+
} else if (entries.length) {
|
|
4377
|
+
await Promise.all(
|
|
4378
|
+
entries.map(entry => {
|
|
4379
|
+
const subpath = join(filepath, entry);
|
|
4380
|
+
return fs.lstat(subpath).then(stat => {
|
|
4381
|
+
if (!stat) return
|
|
4382
|
+
return stat.isDirectory() ? rmRecursive(fs, subpath) : fs.rm(subpath)
|
|
4383
|
+
})
|
|
4384
|
+
})
|
|
4385
|
+
).then(() => fs.rmdir(filepath));
|
|
4386
|
+
} else {
|
|
4387
|
+
await fs.rmdir(filepath);
|
|
4388
|
+
}
|
|
4389
|
+
}
|
|
4390
|
+
|
|
4391
|
+
/**
|
|
4392
|
+
* This is just a collection of helper functions really. At least that's how it started.
|
|
4393
|
+
*/
|
|
4394
|
+
class FileSystem {
|
|
4395
|
+
constructor(fs) {
|
|
4396
|
+
if (typeof fs._original_unwrapped_fs !== 'undefined') return fs
|
|
4397
|
+
|
|
4398
|
+
const promises = Object.getOwnPropertyDescriptor(fs, 'promises');
|
|
4399
|
+
if (promises && promises.enumerable) {
|
|
4400
|
+
this._readFile = fs.promises.readFile.bind(fs.promises);
|
|
4401
|
+
this._writeFile = fs.promises.writeFile.bind(fs.promises);
|
|
4402
|
+
this._mkdir = fs.promises.mkdir.bind(fs.promises);
|
|
4403
|
+
if (fs.promises.rm) {
|
|
4404
|
+
this._rm = fs.promises.rm.bind(fs.promises);
|
|
4405
|
+
} else if (fs.promises.rmdir.length > 1) {
|
|
4406
|
+
this._rm = fs.promises.rmdir.bind(fs.promises);
|
|
4407
|
+
} else {
|
|
4408
|
+
this._rm = rmRecursive.bind(null, this);
|
|
4409
|
+
}
|
|
4410
|
+
this._rmdir = fs.promises.rmdir.bind(fs.promises);
|
|
4411
|
+
this._unlink = fs.promises.unlink.bind(fs.promises);
|
|
4412
|
+
this._stat = fs.promises.stat.bind(fs.promises);
|
|
4413
|
+
this._lstat = fs.promises.lstat.bind(fs.promises);
|
|
4414
|
+
this._readdir = fs.promises.readdir.bind(fs.promises);
|
|
4415
|
+
this._readlink = fs.promises.readlink.bind(fs.promises);
|
|
4416
|
+
this._symlink = fs.promises.symlink.bind(fs.promises);
|
|
4417
|
+
} else {
|
|
4418
|
+
this._readFile = pify(fs.readFile.bind(fs));
|
|
4419
|
+
this._writeFile = pify(fs.writeFile.bind(fs));
|
|
4420
|
+
this._mkdir = pify(fs.mkdir.bind(fs));
|
|
4421
|
+
if (fs.rm) {
|
|
4422
|
+
this._rm = pify(fs.rm.bind(fs));
|
|
4423
|
+
} else if (fs.rmdir.length > 2) {
|
|
4424
|
+
this._rm = pify(fs.rmdir.bind(fs));
|
|
4425
|
+
} else {
|
|
4426
|
+
this._rm = rmRecursive.bind(null, this);
|
|
4427
|
+
}
|
|
4428
|
+
this._rmdir = pify(fs.rmdir.bind(fs));
|
|
4429
|
+
this._unlink = pify(fs.unlink.bind(fs));
|
|
4430
|
+
this._stat = pify(fs.stat.bind(fs));
|
|
4431
|
+
this._lstat = pify(fs.lstat.bind(fs));
|
|
4432
|
+
this._readdir = pify(fs.readdir.bind(fs));
|
|
4433
|
+
this._readlink = pify(fs.readlink.bind(fs));
|
|
4434
|
+
this._symlink = pify(fs.symlink.bind(fs));
|
|
4435
|
+
}
|
|
4436
|
+
this._original_unwrapped_fs = fs;
|
|
4437
|
+
}
|
|
4438
|
+
|
|
4439
|
+
/**
|
|
4440
|
+
* Return true if a file exists, false if it doesn't exist.
|
|
4441
|
+
* Rethrows errors that aren't related to file existance.
|
|
4442
|
+
*/
|
|
4443
|
+
async exists(filepath, options = {}) {
|
|
4444
|
+
try {
|
|
4445
|
+
await this._stat(filepath);
|
|
4446
|
+
return true
|
|
4447
|
+
} catch (err) {
|
|
4448
|
+
if (err.code === 'ENOENT' || err.code === 'ENOTDIR') {
|
|
4449
|
+
return false
|
|
4450
|
+
} else {
|
|
4451
|
+
console.log('Unhandled error in "FileSystem.exists()" function', err);
|
|
4452
|
+
throw err
|
|
4453
|
+
}
|
|
4454
|
+
}
|
|
4455
|
+
}
|
|
4456
|
+
|
|
4457
|
+
/**
|
|
4458
|
+
* Return the contents of a file if it exists, otherwise returns null.
|
|
4459
|
+
*
|
|
4460
|
+
* @param {string} filepath
|
|
4461
|
+
* @param {object} [options]
|
|
4462
|
+
*
|
|
4463
|
+
* @returns {Promise<Buffer|string|null>}
|
|
4464
|
+
*/
|
|
4465
|
+
async read(filepath, options = {}) {
|
|
4466
|
+
try {
|
|
4467
|
+
let buffer = await this._readFile(filepath, options);
|
|
4468
|
+
// Convert plain ArrayBuffers to Buffers
|
|
4469
|
+
if (typeof buffer !== 'string') {
|
|
4470
|
+
buffer = Buffer.from(buffer);
|
|
4471
|
+
}
|
|
4472
|
+
return buffer
|
|
4261
4473
|
} catch (err) {
|
|
4262
4474
|
return null
|
|
4263
4475
|
}
|
|
@@ -4410,6 +4622,197 @@ class FileSystem {
|
|
|
4410
4622
|
}
|
|
4411
4623
|
}
|
|
4412
4624
|
|
|
4625
|
+
function assertParameter(name, value) {
|
|
4626
|
+
if (value === undefined) {
|
|
4627
|
+
throw new MissingParameterError(name)
|
|
4628
|
+
}
|
|
4629
|
+
}
|
|
4630
|
+
|
|
4631
|
+
// @ts-check
|
|
4632
|
+
/**
|
|
4633
|
+
*
|
|
4634
|
+
* @param {WalkerEntry} entry
|
|
4635
|
+
* @param {WalkerEntry} base
|
|
4636
|
+
*
|
|
4637
|
+
*/
|
|
4638
|
+
async function modified(entry, base) {
|
|
4639
|
+
if (!entry && !base) return false
|
|
4640
|
+
if (entry && !base) return true
|
|
4641
|
+
if (!entry && base) return true
|
|
4642
|
+
if ((await entry.type()) === 'tree' && (await base.type()) === 'tree') {
|
|
4643
|
+
return false
|
|
4644
|
+
}
|
|
4645
|
+
if (
|
|
4646
|
+
(await entry.type()) === (await base.type()) &&
|
|
4647
|
+
(await entry.mode()) === (await base.mode()) &&
|
|
4648
|
+
(await entry.oid()) === (await base.oid())
|
|
4649
|
+
) {
|
|
4650
|
+
return false
|
|
4651
|
+
}
|
|
4652
|
+
return true
|
|
4653
|
+
}
|
|
4654
|
+
|
|
4655
|
+
// @ts-check
|
|
4656
|
+
|
|
4657
|
+
/**
|
|
4658
|
+
* Abort a merge in progress.
|
|
4659
|
+
*
|
|
4660
|
+
* Based on the behavior of git reset --merge, i.e. "Resets the index and updates the files in the working tree that are different between <commit> and HEAD, but keeps those which are different between the index and working tree (i.e. which have changes which have not been added). If a file that is different between <commit> and the index has unstaged changes, reset is aborted."
|
|
4661
|
+
*
|
|
4662
|
+
* Essentially, abortMerge will reset any files affected by merge conflicts to their last known good version at HEAD.
|
|
4663
|
+
* Any unstaged changes are saved and any staged changes are reset as well.
|
|
4664
|
+
*
|
|
4665
|
+
* NOTE: The behavior of this command differs slightly from canonical git in that an error will be thrown if a file exists in the index and nowhere else.
|
|
4666
|
+
* Canonical git will reset the file and continue aborting the merge in this case.
|
|
4667
|
+
*
|
|
4668
|
+
* **WARNING:** Running git merge with non-trivial uncommitted changes is discouraged: while possible, it may leave you in a state that is hard to back out of in the case of a conflict.
|
|
4669
|
+
* If there were uncommitted changes when the merge started (and especially if those changes were further modified after the merge was started), `git.abortMerge` will in some cases be unable to reconstruct the original (pre-merge) changes.
|
|
4670
|
+
*
|
|
4671
|
+
* @param {object} args
|
|
4672
|
+
* @param {FsClient} args.fs - a file system implementation
|
|
4673
|
+
* @param {string} args.dir - The [working tree](dir-vs-gitdir.md) directory path
|
|
4674
|
+
* @param {string} [args.gitdir=join(dir, '.git')] - [required] The [git directory](dir-vs-gitdir.md) path
|
|
4675
|
+
* @param {string} [args.commit='HEAD'] - commit to reset the index and worktree to, defaults to HEAD
|
|
4676
|
+
* @param {object} [args.cache] - a [cache](cache.md) object
|
|
4677
|
+
*
|
|
4678
|
+
* @returns {Promise<void>} Resolves successfully once the git index has been updated
|
|
4679
|
+
*
|
|
4680
|
+
*/
|
|
4681
|
+
async function abortMerge({
|
|
4682
|
+
fs: _fs,
|
|
4683
|
+
dir,
|
|
4684
|
+
gitdir = join(dir, '.git'),
|
|
4685
|
+
commit = 'HEAD',
|
|
4686
|
+
cache = {},
|
|
4687
|
+
}) {
|
|
4688
|
+
try {
|
|
4689
|
+
assertParameter('fs', _fs);
|
|
4690
|
+
assertParameter('dir', dir);
|
|
4691
|
+
assertParameter('gitdir', gitdir);
|
|
4692
|
+
|
|
4693
|
+
const fs = new FileSystem(_fs);
|
|
4694
|
+
const trees = [TREE({ ref: commit }), WORKDIR(), STAGE()];
|
|
4695
|
+
let unmergedPaths = [];
|
|
4696
|
+
|
|
4697
|
+
await GitIndexManager.acquire({ fs, gitdir, cache }, async function(index) {
|
|
4698
|
+
unmergedPaths = index.unmergedPaths;
|
|
4699
|
+
});
|
|
4700
|
+
|
|
4701
|
+
const results = await _walk({
|
|
4702
|
+
fs,
|
|
4703
|
+
cache,
|
|
4704
|
+
dir,
|
|
4705
|
+
gitdir,
|
|
4706
|
+
trees,
|
|
4707
|
+
map: async function(path, [head, workdir, index]) {
|
|
4708
|
+
const staged = !(await modified(workdir, index));
|
|
4709
|
+
const unmerged = unmergedPaths.includes(path);
|
|
4710
|
+
const unmodified = !(await modified(index, head));
|
|
4711
|
+
|
|
4712
|
+
if (staged || unmerged) {
|
|
4713
|
+
return head
|
|
4714
|
+
? {
|
|
4715
|
+
path,
|
|
4716
|
+
mode: await head.mode(),
|
|
4717
|
+
oid: await head.oid(),
|
|
4718
|
+
type: await head.type(),
|
|
4719
|
+
content: await head.content(),
|
|
4720
|
+
}
|
|
4721
|
+
: undefined
|
|
4722
|
+
}
|
|
4723
|
+
|
|
4724
|
+
if (unmodified) return false
|
|
4725
|
+
else throw new IndexResetError(path)
|
|
4726
|
+
},
|
|
4727
|
+
});
|
|
4728
|
+
|
|
4729
|
+
await GitIndexManager.acquire({ fs, gitdir, cache }, async function(index) {
|
|
4730
|
+
// Reset paths in index and worktree, this can't be done in _walk because the
|
|
4731
|
+
// STAGE walker acquires its own index lock.
|
|
4732
|
+
|
|
4733
|
+
for (const entry of results) {
|
|
4734
|
+
if (entry === false) continue
|
|
4735
|
+
|
|
4736
|
+
// entry is not false, so from here we can assume index = workdir
|
|
4737
|
+
if (!entry) {
|
|
4738
|
+
await fs.rmdir(`${dir}/${entry.path}`, { recursive: true });
|
|
4739
|
+
index.delete({ filepath: entry.path });
|
|
4740
|
+
continue
|
|
4741
|
+
}
|
|
4742
|
+
|
|
4743
|
+
if (entry.type === 'blob') {
|
|
4744
|
+
const content = new TextDecoder().decode(entry.content);
|
|
4745
|
+
await fs.write(`${dir}/${entry.path}`, content, { mode: entry.mode });
|
|
4746
|
+
index.insert({
|
|
4747
|
+
filepath: entry.path,
|
|
4748
|
+
oid: entry.oid,
|
|
4749
|
+
stage: 0,
|
|
4750
|
+
});
|
|
4751
|
+
}
|
|
4752
|
+
}
|
|
4753
|
+
});
|
|
4754
|
+
} catch (err) {
|
|
4755
|
+
err.caller = 'git.abortMerge';
|
|
4756
|
+
throw err
|
|
4757
|
+
}
|
|
4758
|
+
}
|
|
4759
|
+
|
|
4760
|
+
// I'm putting this in a Manager because I reckon it could benefit
|
|
4761
|
+
// from a LOT of cacheing.
|
|
4762
|
+
class GitIgnoreManager {
|
|
4763
|
+
static async isIgnored({ fs, dir, gitdir = join(dir, '.git'), filepath }) {
|
|
4764
|
+
// ALWAYS ignore ".git" folders.
|
|
4765
|
+
if (basename(filepath) === '.git') return true
|
|
4766
|
+
// '.' is not a valid gitignore entry, so '.' is never ignored
|
|
4767
|
+
if (filepath === '.') return false
|
|
4768
|
+
// Check and load exclusion rules from project exclude file (.git/info/exclude)
|
|
4769
|
+
let excludes = '';
|
|
4770
|
+
const excludesFile = join(gitdir, 'info', 'exclude');
|
|
4771
|
+
if (await fs.exists(excludesFile)) {
|
|
4772
|
+
excludes = await fs.read(excludesFile, 'utf8');
|
|
4773
|
+
}
|
|
4774
|
+
// Find all the .gitignore files that could affect this file
|
|
4775
|
+
const pairs = [
|
|
4776
|
+
{
|
|
4777
|
+
gitignore: join(dir, '.gitignore'),
|
|
4778
|
+
filepath,
|
|
4779
|
+
},
|
|
4780
|
+
];
|
|
4781
|
+
const pieces = filepath.split('/').filter(Boolean);
|
|
4782
|
+
for (let i = 1; i < pieces.length; i++) {
|
|
4783
|
+
const folder = pieces.slice(0, i).join('/');
|
|
4784
|
+
const file = pieces.slice(i).join('/');
|
|
4785
|
+
pairs.push({
|
|
4786
|
+
gitignore: join(dir, folder, '.gitignore'),
|
|
4787
|
+
filepath: file,
|
|
4788
|
+
});
|
|
4789
|
+
}
|
|
4790
|
+
let ignoredStatus = false;
|
|
4791
|
+
for (const p of pairs) {
|
|
4792
|
+
let file;
|
|
4793
|
+
try {
|
|
4794
|
+
file = await fs.read(p.gitignore, 'utf8');
|
|
4795
|
+
} catch (err) {
|
|
4796
|
+
if (err.code === 'NOENT') continue
|
|
4797
|
+
}
|
|
4798
|
+
const ign = ignore().add(excludes);
|
|
4799
|
+
ign.add(file);
|
|
4800
|
+
// If the parent directory is excluded, we are done.
|
|
4801
|
+
// "It is not possible to re-include a file if a parent directory of that file is excluded. Git doesn’t list excluded directories for performance reasons, so any patterns on contained files have no effect, no matter where they are defined."
|
|
4802
|
+
// source: https://git-scm.com/docs/gitignore
|
|
4803
|
+
const parentdir = dirname(p.filepath);
|
|
4804
|
+
if (parentdir !== '.' && ign.ignores(parentdir)) return true
|
|
4805
|
+
// If the file is currently ignored, test for UNignoring.
|
|
4806
|
+
if (ignoredStatus) {
|
|
4807
|
+
ignoredStatus = !ign.test(p.filepath).unignored;
|
|
4808
|
+
} else {
|
|
4809
|
+
ignoredStatus = ign.test(p.filepath).ignored;
|
|
4810
|
+
}
|
|
4811
|
+
}
|
|
4812
|
+
return ignoredStatus
|
|
4813
|
+
}
|
|
4814
|
+
}
|
|
4815
|
+
|
|
4413
4816
|
async function writeObjectLoose({ fs, gitdir, object, format, oid }) {
|
|
4414
4817
|
if (format !== 'deflated') {
|
|
4415
4818
|
throw new InternalError(
|
|
@@ -4477,12 +4880,6 @@ async function _writeObject({
|
|
|
4477
4880
|
return oid
|
|
4478
4881
|
}
|
|
4479
4882
|
|
|
4480
|
-
function assertParameter(name, value) {
|
|
4481
|
-
if (value === undefined) {
|
|
4482
|
-
throw new MissingParameterError(name)
|
|
4483
|
-
}
|
|
4484
|
-
}
|
|
4485
|
-
|
|
4486
4883
|
function posixifyPathBuffer(buffer) {
|
|
4487
4884
|
let idx;
|
|
4488
4885
|
while (~(idx = buffer.indexOf(92))) buffer[idx] = 47;
|
|
@@ -4630,76 +5027,79 @@ async function _commit({
|
|
|
4630
5027
|
committer,
|
|
4631
5028
|
signingKey,
|
|
4632
5029
|
dryRun = false,
|
|
4633
|
-
noUpdateBranch = false,
|
|
4634
|
-
ref,
|
|
4635
|
-
parent,
|
|
4636
|
-
tree,
|
|
4637
|
-
}) {
|
|
4638
|
-
if (!ref) {
|
|
4639
|
-
ref = await GitRefManager.resolve({
|
|
4640
|
-
fs,
|
|
4641
|
-
gitdir,
|
|
4642
|
-
ref: 'HEAD',
|
|
4643
|
-
depth: 2,
|
|
4644
|
-
});
|
|
4645
|
-
}
|
|
4646
|
-
|
|
4647
|
-
return GitIndexManager.acquire({ fs, gitdir, cache }, async function(index) {
|
|
4648
|
-
const inodes = flatFileListToDirectoryStructure(index.entries);
|
|
4649
|
-
const inode = inodes.get('.');
|
|
4650
|
-
if (!tree) {
|
|
4651
|
-
tree = await constructTree({ fs, gitdir, inode, dryRun });
|
|
4652
|
-
}
|
|
4653
|
-
if (!parent) {
|
|
4654
|
-
try {
|
|
4655
|
-
parent = [
|
|
4656
|
-
await GitRefManager.resolve({
|
|
4657
|
-
fs,
|
|
4658
|
-
gitdir,
|
|
4659
|
-
ref,
|
|
4660
|
-
}),
|
|
4661
|
-
];
|
|
4662
|
-
} catch (err) {
|
|
4663
|
-
// Probably an initial commit
|
|
4664
|
-
parent = [];
|
|
4665
|
-
}
|
|
4666
|
-
} else {
|
|
4667
|
-
// ensure that the parents are oids, not refs
|
|
4668
|
-
parent = await Promise.all(
|
|
4669
|
-
parent.map(p => {
|
|
4670
|
-
return GitRefManager.resolve({ fs, gitdir, ref: p })
|
|
4671
|
-
})
|
|
4672
|
-
);
|
|
4673
|
-
}
|
|
4674
|
-
|
|
4675
|
-
let comm = GitCommit.from({
|
|
4676
|
-
tree,
|
|
4677
|
-
parent,
|
|
4678
|
-
author,
|
|
4679
|
-
committer,
|
|
4680
|
-
message,
|
|
4681
|
-
});
|
|
4682
|
-
if (signingKey) {
|
|
4683
|
-
comm = await GitCommit.sign(comm, onSign, signingKey);
|
|
4684
|
-
}
|
|
4685
|
-
const oid = await _writeObject({
|
|
5030
|
+
noUpdateBranch = false,
|
|
5031
|
+
ref,
|
|
5032
|
+
parent,
|
|
5033
|
+
tree,
|
|
5034
|
+
}) {
|
|
5035
|
+
if (!ref) {
|
|
5036
|
+
ref = await GitRefManager.resolve({
|
|
4686
5037
|
fs,
|
|
4687
5038
|
gitdir,
|
|
4688
|
-
|
|
4689
|
-
|
|
4690
|
-
dryRun,
|
|
5039
|
+
ref: 'HEAD',
|
|
5040
|
+
depth: 2,
|
|
4691
5041
|
});
|
|
4692
|
-
|
|
4693
|
-
|
|
4694
|
-
|
|
5042
|
+
}
|
|
5043
|
+
|
|
5044
|
+
return GitIndexManager.acquire(
|
|
5045
|
+
{ fs, gitdir, cache, allowUnmerged: false },
|
|
5046
|
+
async function(index) {
|
|
5047
|
+
const inodes = flatFileListToDirectoryStructure(index.entries);
|
|
5048
|
+
const inode = inodes.get('.');
|
|
5049
|
+
if (!tree) {
|
|
5050
|
+
tree = await constructTree({ fs, gitdir, inode, dryRun });
|
|
5051
|
+
}
|
|
5052
|
+
if (!parent) {
|
|
5053
|
+
try {
|
|
5054
|
+
parent = [
|
|
5055
|
+
await GitRefManager.resolve({
|
|
5056
|
+
fs,
|
|
5057
|
+
gitdir,
|
|
5058
|
+
ref,
|
|
5059
|
+
}),
|
|
5060
|
+
];
|
|
5061
|
+
} catch (err) {
|
|
5062
|
+
// Probably an initial commit
|
|
5063
|
+
parent = [];
|
|
5064
|
+
}
|
|
5065
|
+
} else {
|
|
5066
|
+
// ensure that the parents are oids, not refs
|
|
5067
|
+
parent = await Promise.all(
|
|
5068
|
+
parent.map(p => {
|
|
5069
|
+
return GitRefManager.resolve({ fs, gitdir, ref: p })
|
|
5070
|
+
})
|
|
5071
|
+
);
|
|
5072
|
+
}
|
|
5073
|
+
|
|
5074
|
+
let comm = GitCommit.from({
|
|
5075
|
+
tree,
|
|
5076
|
+
parent,
|
|
5077
|
+
author,
|
|
5078
|
+
committer,
|
|
5079
|
+
message,
|
|
5080
|
+
});
|
|
5081
|
+
if (signingKey) {
|
|
5082
|
+
comm = await GitCommit.sign(comm, onSign, signingKey);
|
|
5083
|
+
}
|
|
5084
|
+
const oid = await _writeObject({
|
|
4695
5085
|
fs,
|
|
4696
5086
|
gitdir,
|
|
4697
|
-
|
|
4698
|
-
|
|
5087
|
+
type: 'commit',
|
|
5088
|
+
object: comm.toObject(),
|
|
5089
|
+
dryRun,
|
|
4699
5090
|
});
|
|
5091
|
+
if (!noUpdateBranch && !dryRun) {
|
|
5092
|
+
// Update branch pointer
|
|
5093
|
+
await GitRefManager.writeRef({
|
|
5094
|
+
fs,
|
|
5095
|
+
gitdir,
|
|
5096
|
+
ref,
|
|
5097
|
+
value: oid,
|
|
5098
|
+
});
|
|
5099
|
+
}
|
|
5100
|
+
return oid
|
|
4700
5101
|
}
|
|
4701
|
-
|
|
4702
|
-
})
|
|
5102
|
+
)
|
|
4703
5103
|
}
|
|
4704
5104
|
|
|
4705
5105
|
async function constructTree({ fs, gitdir, inode, dryRun }) {
|
|
@@ -4782,7 +5182,7 @@ async function _resolveFilepath({
|
|
|
4782
5182
|
oid: entry.oid,
|
|
4783
5183
|
});
|
|
4784
5184
|
if (type !== 'tree') {
|
|
4785
|
-
throw new ObjectTypeError(oid, type, '
|
|
5185
|
+
throw new ObjectTypeError(oid, type, 'tree', filepath)
|
|
4786
5186
|
}
|
|
4787
5187
|
tree = GitTree.from(object);
|
|
4788
5188
|
return _resolveFilepath({
|
|
@@ -5489,170 +5889,6 @@ async function branch({
|
|
|
5489
5889
|
}
|
|
5490
5890
|
}
|
|
5491
5891
|
|
|
5492
|
-
// https://dev.to/namirsab/comment/2050
|
|
5493
|
-
function arrayRange(start, end) {
|
|
5494
|
-
const length = end - start;
|
|
5495
|
-
return Array.from({ length }, (_, i) => start + i)
|
|
5496
|
-
}
|
|
5497
|
-
|
|
5498
|
-
// TODO: Should I just polyfill Array.flat?
|
|
5499
|
-
const flat =
|
|
5500
|
-
typeof Array.prototype.flat === 'undefined'
|
|
5501
|
-
? entries => entries.reduce((acc, x) => acc.concat(x), [])
|
|
5502
|
-
: entries => entries.flat();
|
|
5503
|
-
|
|
5504
|
-
// This is convenient for computing unions/joins of sorted lists.
|
|
5505
|
-
class RunningMinimum {
|
|
5506
|
-
constructor() {
|
|
5507
|
-
// Using a getter for 'value' would just bloat the code.
|
|
5508
|
-
// You know better than to set it directly right?
|
|
5509
|
-
this.value = null;
|
|
5510
|
-
}
|
|
5511
|
-
|
|
5512
|
-
consider(value) {
|
|
5513
|
-
if (value === null || value === undefined) return
|
|
5514
|
-
if (this.value === null) {
|
|
5515
|
-
this.value = value;
|
|
5516
|
-
} else if (value < this.value) {
|
|
5517
|
-
this.value = value;
|
|
5518
|
-
}
|
|
5519
|
-
}
|
|
5520
|
-
|
|
5521
|
-
reset() {
|
|
5522
|
-
this.value = null;
|
|
5523
|
-
}
|
|
5524
|
-
}
|
|
5525
|
-
|
|
5526
|
-
// Take an array of length N of
|
|
5527
|
-
// iterators of length Q_n
|
|
5528
|
-
// of strings
|
|
5529
|
-
// and return an iterator of length max(Q_n) for all n
|
|
5530
|
-
// of arrays of length N
|
|
5531
|
-
// of string|null who all have the same string value
|
|
5532
|
-
function* unionOfIterators(sets) {
|
|
5533
|
-
/* NOTE: We can assume all arrays are sorted.
|
|
5534
|
-
* Indexes are sorted because they are defined that way:
|
|
5535
|
-
*
|
|
5536
|
-
* > Index entries are sorted in ascending order on the name field,
|
|
5537
|
-
* > interpreted as a string of unsigned bytes (i.e. memcmp() order, no
|
|
5538
|
-
* > localization, no special casing of directory separator '/'). Entries
|
|
5539
|
-
* > with the same name are sorted by their stage field.
|
|
5540
|
-
*
|
|
5541
|
-
* Trees should be sorted because they are created directly from indexes.
|
|
5542
|
-
* They definitely should be sorted, or else they wouldn't have a unique SHA1.
|
|
5543
|
-
* So that would be very naughty on the part of the tree-creator.
|
|
5544
|
-
*
|
|
5545
|
-
* Lastly, the working dir entries are sorted because I choose to sort them
|
|
5546
|
-
* in my FileSystem.readdir() implementation.
|
|
5547
|
-
*/
|
|
5548
|
-
|
|
5549
|
-
// Init
|
|
5550
|
-
const min = new RunningMinimum();
|
|
5551
|
-
let minimum;
|
|
5552
|
-
const heads = [];
|
|
5553
|
-
const numsets = sets.length;
|
|
5554
|
-
for (let i = 0; i < numsets; i++) {
|
|
5555
|
-
// Abuse the fact that iterators continue to return 'undefined' for value
|
|
5556
|
-
// once they are done
|
|
5557
|
-
heads[i] = sets[i].next().value;
|
|
5558
|
-
if (heads[i] !== undefined) {
|
|
5559
|
-
min.consider(heads[i]);
|
|
5560
|
-
}
|
|
5561
|
-
}
|
|
5562
|
-
if (min.value === null) return
|
|
5563
|
-
// Iterate
|
|
5564
|
-
while (true) {
|
|
5565
|
-
const result = [];
|
|
5566
|
-
minimum = min.value;
|
|
5567
|
-
min.reset();
|
|
5568
|
-
for (let i = 0; i < numsets; i++) {
|
|
5569
|
-
if (heads[i] !== undefined && heads[i] === minimum) {
|
|
5570
|
-
result[i] = heads[i];
|
|
5571
|
-
heads[i] = sets[i].next().value;
|
|
5572
|
-
} else {
|
|
5573
|
-
// A little hacky, but eh
|
|
5574
|
-
result[i] = null;
|
|
5575
|
-
}
|
|
5576
|
-
if (heads[i] !== undefined) {
|
|
5577
|
-
min.consider(heads[i]);
|
|
5578
|
-
}
|
|
5579
|
-
}
|
|
5580
|
-
yield result;
|
|
5581
|
-
if (min.value === null) return
|
|
5582
|
-
}
|
|
5583
|
-
}
|
|
5584
|
-
|
|
5585
|
-
// @ts-check
|
|
5586
|
-
|
|
5587
|
-
/**
|
|
5588
|
-
* @param {object} args
|
|
5589
|
-
* @param {import('../models/FileSystem.js').FileSystem} args.fs
|
|
5590
|
-
* @param {object} args.cache
|
|
5591
|
-
* @param {string} [args.dir]
|
|
5592
|
-
* @param {string} [args.gitdir=join(dir,'.git')]
|
|
5593
|
-
* @param {Walker[]} args.trees
|
|
5594
|
-
* @param {WalkerMap} [args.map]
|
|
5595
|
-
* @param {WalkerReduce} [args.reduce]
|
|
5596
|
-
* @param {WalkerIterate} [args.iterate]
|
|
5597
|
-
*
|
|
5598
|
-
* @returns {Promise<any>} The finished tree-walking result
|
|
5599
|
-
*
|
|
5600
|
-
* @see {WalkerMap}
|
|
5601
|
-
*
|
|
5602
|
-
*/
|
|
5603
|
-
async function _walk({
|
|
5604
|
-
fs,
|
|
5605
|
-
cache,
|
|
5606
|
-
dir,
|
|
5607
|
-
gitdir,
|
|
5608
|
-
trees,
|
|
5609
|
-
// @ts-ignore
|
|
5610
|
-
map = async (_, entry) => entry,
|
|
5611
|
-
// The default reducer is a flatmap that filters out undefineds.
|
|
5612
|
-
reduce = async (parent, children) => {
|
|
5613
|
-
const flatten = flat(children);
|
|
5614
|
-
if (parent !== undefined) flatten.unshift(parent);
|
|
5615
|
-
return flatten
|
|
5616
|
-
},
|
|
5617
|
-
// The default iterate function walks all children concurrently
|
|
5618
|
-
iterate = (walk, children) => Promise.all([...children].map(walk)),
|
|
5619
|
-
}) {
|
|
5620
|
-
const walkers = trees.map(proxy =>
|
|
5621
|
-
proxy[GitWalkSymbol]({ fs, dir, gitdir, cache })
|
|
5622
|
-
);
|
|
5623
|
-
|
|
5624
|
-
const root = new Array(walkers.length).fill('.');
|
|
5625
|
-
const range = arrayRange(0, walkers.length);
|
|
5626
|
-
const unionWalkerFromReaddir = async entries => {
|
|
5627
|
-
range.map(i => {
|
|
5628
|
-
entries[i] = entries[i] && new walkers[i].ConstructEntry(entries[i]);
|
|
5629
|
-
});
|
|
5630
|
-
const subdirs = await Promise.all(
|
|
5631
|
-
range.map(i => (entries[i] ? walkers[i].readdir(entries[i]) : []))
|
|
5632
|
-
);
|
|
5633
|
-
// Now process child directories
|
|
5634
|
-
const iterators = subdirs
|
|
5635
|
-
.map(array => (array === null ? [] : array))
|
|
5636
|
-
.map(array => array[Symbol.iterator]());
|
|
5637
|
-
return {
|
|
5638
|
-
entries,
|
|
5639
|
-
children: unionOfIterators(iterators),
|
|
5640
|
-
}
|
|
5641
|
-
};
|
|
5642
|
-
|
|
5643
|
-
const walk = async root => {
|
|
5644
|
-
const { entries, children } = await unionWalkerFromReaddir(root);
|
|
5645
|
-
const fullpath = entries.find(entry => entry && entry._fullpath)._fullpath;
|
|
5646
|
-
const parent = await map(fullpath, entries);
|
|
5647
|
-
if (parent !== null) {
|
|
5648
|
-
let walkedChildren = await iterate(walk, children);
|
|
5649
|
-
walkedChildren = walkedChildren.filter(x => x !== undefined);
|
|
5650
|
-
return reduce(parent, walkedChildren)
|
|
5651
|
-
}
|
|
5652
|
-
};
|
|
5653
|
-
return walk(root)
|
|
5654
|
-
}
|
|
5655
|
-
|
|
5656
5892
|
const worthWalking = (filepath, root) => {
|
|
5657
5893
|
if (filepath === '.' || root == null || root.length === 0 || root === '.') {
|
|
5658
5894
|
return true
|
|
@@ -7001,8 +7237,8 @@ function filterCapabilities(server, client) {
|
|
|
7001
7237
|
|
|
7002
7238
|
const pkg = {
|
|
7003
7239
|
name: 'isomorphic-git',
|
|
7004
|
-
version: '1.
|
|
7005
|
-
agent: 'git/isomorphic-git@1.
|
|
7240
|
+
version: '1.23.0',
|
|
7241
|
+
agent: 'git/isomorphic-git@1.23.0',
|
|
7006
7242
|
};
|
|
7007
7243
|
|
|
7008
7244
|
class FIFO {
|
|
@@ -8544,6 +8780,7 @@ async function mergeTree({
|
|
|
8544
8780
|
cache,
|
|
8545
8781
|
dir,
|
|
8546
8782
|
gitdir = join(dir, '.git'),
|
|
8783
|
+
index,
|
|
8547
8784
|
ourOid,
|
|
8548
8785
|
baseOid,
|
|
8549
8786
|
theirOid,
|
|
@@ -8560,8 +8797,6 @@ async function mergeTree({
|
|
|
8560
8797
|
|
|
8561
8798
|
const unmergedFiles = [];
|
|
8562
8799
|
|
|
8563
|
-
let cleanMerge = true;
|
|
8564
|
-
|
|
8565
8800
|
const results = await _walk({
|
|
8566
8801
|
fs,
|
|
8567
8802
|
cache,
|
|
@@ -8623,13 +8858,28 @@ async function mergeTree({
|
|
|
8623
8858
|
baseName,
|
|
8624
8859
|
theirName,
|
|
8625
8860
|
mergeDriver,
|
|
8626
|
-
}).then(r => {
|
|
8627
|
-
|
|
8628
|
-
|
|
8861
|
+
}).then(async r => {
|
|
8862
|
+
if (!r.cleanMerge) {
|
|
8863
|
+
unmergedFiles.push(filepath);
|
|
8864
|
+
if (!abortOnConflict) {
|
|
8865
|
+
const baseOid = await base.oid();
|
|
8866
|
+
const ourOid = await ours.oid();
|
|
8867
|
+
const theirOid = await theirs.oid();
|
|
8868
|
+
|
|
8869
|
+
index.delete({ filepath });
|
|
8870
|
+
|
|
8871
|
+
index.insert({ filepath, oid: baseOid, stage: 1 });
|
|
8872
|
+
index.insert({ filepath, oid: ourOid, stage: 2 });
|
|
8873
|
+
index.insert({ filepath, oid: theirOid, stage: 3 });
|
|
8874
|
+
}
|
|
8875
|
+
} else if (!abortOnConflict) {
|
|
8876
|
+
index.insert({ filepath, oid: r.mergeResult.oid, stage: 0 });
|
|
8877
|
+
}
|
|
8629
8878
|
return r.mergeResult
|
|
8630
8879
|
})
|
|
8631
8880
|
}
|
|
8632
8881
|
// all other types of conflicts fail
|
|
8882
|
+
// TODO: Merge conflicts involving deletions/additions
|
|
8633
8883
|
throw new MergeNotSupportedError()
|
|
8634
8884
|
}
|
|
8635
8885
|
}
|
|
@@ -8638,32 +8888,35 @@ async function mergeTree({
|
|
|
8638
8888
|
* @param {TreeEntry} [parent]
|
|
8639
8889
|
* @param {Array<TreeEntry>} children
|
|
8640
8890
|
*/
|
|
8641
|
-
reduce:
|
|
8642
|
-
|
|
8643
|
-
|
|
8644
|
-
|
|
8645
|
-
|
|
8646
|
-
|
|
8647
|
-
|
|
8648
|
-
|
|
8649
|
-
|
|
8650
|
-
|
|
8651
|
-
|
|
8652
|
-
|
|
8653
|
-
|
|
8654
|
-
|
|
8655
|
-
|
|
8656
|
-
|
|
8657
|
-
|
|
8658
|
-
|
|
8659
|
-
|
|
8660
|
-
|
|
8661
|
-
|
|
8662
|
-
|
|
8663
|
-
|
|
8891
|
+
reduce:
|
|
8892
|
+
unmergedFiles.length !== 0 && (!dir || abortOnConflict)
|
|
8893
|
+
? undefined
|
|
8894
|
+
: async (parent, children) => {
|
|
8895
|
+
const entries = children.filter(Boolean); // remove undefineds
|
|
8896
|
+
|
|
8897
|
+
// if the parent was deleted, the children have to go
|
|
8898
|
+
if (!parent) return
|
|
8899
|
+
|
|
8900
|
+
// automatically delete directories if they have been emptied
|
|
8901
|
+
if (parent && parent.type === 'tree' && entries.length === 0) return
|
|
8902
|
+
|
|
8903
|
+
if (entries.length > 0) {
|
|
8904
|
+
const tree = new GitTree(entries);
|
|
8905
|
+
const object = tree.toObject();
|
|
8906
|
+
const oid = await _writeObject({
|
|
8907
|
+
fs,
|
|
8908
|
+
gitdir,
|
|
8909
|
+
type: 'tree',
|
|
8910
|
+
object,
|
|
8911
|
+
dryRun,
|
|
8912
|
+
});
|
|
8913
|
+
parent.oid = oid;
|
|
8914
|
+
}
|
|
8915
|
+
return parent
|
|
8916
|
+
},
|
|
8664
8917
|
});
|
|
8665
8918
|
|
|
8666
|
-
if (
|
|
8919
|
+
if (unmergedFiles.length !== 0) {
|
|
8667
8920
|
if (dir && !abortOnConflict) {
|
|
8668
8921
|
await _walk({
|
|
8669
8922
|
fs,
|
|
@@ -8682,35 +8935,12 @@ async function mergeTree({
|
|
|
8682
8935
|
},
|
|
8683
8936
|
});
|
|
8684
8937
|
}
|
|
8685
|
-
|
|
8938
|
+
return new MergeConflictError(unmergedFiles)
|
|
8686
8939
|
}
|
|
8687
8940
|
|
|
8688
8941
|
return results.oid
|
|
8689
8942
|
}
|
|
8690
8943
|
|
|
8691
|
-
/**
|
|
8692
|
-
*
|
|
8693
|
-
* @param {WalkerEntry} entry
|
|
8694
|
-
* @param {WalkerEntry} base
|
|
8695
|
-
*
|
|
8696
|
-
*/
|
|
8697
|
-
async function modified(entry, base) {
|
|
8698
|
-
if (!entry && !base) return false
|
|
8699
|
-
if (entry && !base) return true
|
|
8700
|
-
if (!entry && base) return true
|
|
8701
|
-
if ((await entry.type()) === 'tree' && (await base.type()) === 'tree') {
|
|
8702
|
-
return false
|
|
8703
|
-
}
|
|
8704
|
-
if (
|
|
8705
|
-
(await entry.type()) === (await base.type()) &&
|
|
8706
|
-
(await entry.mode()) === (await base.mode()) &&
|
|
8707
|
-
(await entry.oid()) === (await base.oid())
|
|
8708
|
-
) {
|
|
8709
|
-
return false
|
|
8710
|
-
}
|
|
8711
|
-
return true
|
|
8712
|
-
}
|
|
8713
|
-
|
|
8714
8944
|
/**
|
|
8715
8945
|
*
|
|
8716
8946
|
* @param {Object} args
|
|
@@ -8881,6 +9111,7 @@ async function _merge({
|
|
|
8881
9111
|
oids: [ourOid, theirOid],
|
|
8882
9112
|
});
|
|
8883
9113
|
if (baseOids.length !== 1) {
|
|
9114
|
+
// TODO: Recursive Merge strategy
|
|
8884
9115
|
throw new MergeNotSupportedError()
|
|
8885
9116
|
}
|
|
8886
9117
|
const baseOid = baseOids[0];
|
|
@@ -8905,21 +9136,32 @@ async function _merge({
|
|
|
8905
9136
|
throw new FastForwardError()
|
|
8906
9137
|
}
|
|
8907
9138
|
// try a fancier merge
|
|
8908
|
-
const tree = await
|
|
8909
|
-
fs,
|
|
8910
|
-
|
|
8911
|
-
|
|
8912
|
-
|
|
8913
|
-
|
|
8914
|
-
|
|
8915
|
-
|
|
8916
|
-
|
|
8917
|
-
|
|
8918
|
-
|
|
8919
|
-
|
|
8920
|
-
|
|
8921
|
-
|
|
8922
|
-
|
|
9139
|
+
const tree = await GitIndexManager.acquire(
|
|
9140
|
+
{ fs, gitdir, cache, allowUnmerged: false },
|
|
9141
|
+
async index => {
|
|
9142
|
+
return mergeTree({
|
|
9143
|
+
fs,
|
|
9144
|
+
cache,
|
|
9145
|
+
dir,
|
|
9146
|
+
gitdir,
|
|
9147
|
+
index,
|
|
9148
|
+
ourOid,
|
|
9149
|
+
theirOid,
|
|
9150
|
+
baseOid,
|
|
9151
|
+
ourName: abbreviateRef(ours),
|
|
9152
|
+
baseName: 'base',
|
|
9153
|
+
theirName: abbreviateRef(theirs),
|
|
9154
|
+
dryRun,
|
|
9155
|
+
abortOnConflict,
|
|
9156
|
+
mergeDriver,
|
|
9157
|
+
})
|
|
9158
|
+
}
|
|
9159
|
+
);
|
|
9160
|
+
|
|
9161
|
+
// Defer throwing error until the index lock is relinquished and index is
|
|
9162
|
+
// written to filsesystem
|
|
9163
|
+
if (tree instanceof MergeConflictError) throw tree
|
|
9164
|
+
|
|
8923
9165
|
if (!message) {
|
|
8924
9166
|
message = `Merge branch '${abbreviateRef(theirs)}' into ${abbreviateRef(
|
|
8925
9167
|
ours
|
|
@@ -14579,6 +14821,7 @@ var index = {
|
|
|
14579
14821
|
TREE,
|
|
14580
14822
|
WORKDIR,
|
|
14581
14823
|
add,
|
|
14824
|
+
abortMerge,
|
|
14582
14825
|
addNote,
|
|
14583
14826
|
addRemote,
|
|
14584
14827
|
annotatedTag,
|
|
@@ -14644,4 +14887,4 @@ var index = {
|
|
|
14644
14887
|
};
|
|
14645
14888
|
|
|
14646
14889
|
export default index;
|
|
14647
|
-
export { Errors, STAGE, TREE, WORKDIR, add, addNote, addRemote, annotatedTag, branch, checkout, clone, commit, currentBranch, deleteBranch, deleteRef, deleteRemote, deleteTag, expandOid, expandRef, fastForward, fetch, findMergeBase, findRoot, getConfig, getConfigAll, getRemoteInfo, getRemoteInfo2, hashBlob, indexPack, init, isDescendent, isIgnored, listBranches, listFiles, listNotes, listRemotes, listServerRefs, listTags, log, merge, packObjects, pull, push, readBlob, readCommit, readNote, readObject, readTag, readTree, remove, removeNote, renameBranch, resetIndex, resolveRef, setConfig, status, statusMatrix, tag, updateIndex, version, walk, writeBlob, writeCommit, writeObject, writeRef, writeTag, writeTree };
|
|
14890
|
+
export { Errors, STAGE, TREE, WORKDIR, abortMerge, add, addNote, addRemote, annotatedTag, branch, checkout, clone, commit, currentBranch, deleteBranch, deleteRef, deleteRemote, deleteTag, expandOid, expandRef, fastForward, fetch, findMergeBase, findRoot, getConfig, getConfigAll, getRemoteInfo, getRemoteInfo2, hashBlob, indexPack, init, isDescendent, isIgnored, listBranches, listFiles, listNotes, listRemotes, listServerRefs, listTags, log, merge, packObjects, pull, push, readBlob, readCommit, readNote, readObject, readTag, readTree, remove, removeNote, renameBranch, resetIndex, resolveRef, setConfig, status, statusMatrix, tag, updateIndex, version, walk, writeBlob, writeCommit, writeObject, writeRef, writeTag, writeTree };
|