@prnv/tuck 1.8.0 → 1.9.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +12 -1
- package/dist/index.js +2093 -928
- package/dist/index.js.map +1 -1
- package/package.json +4 -2
package/dist/index.js
CHANGED
|
@@ -722,10 +722,14 @@ __export(paths_exports, {
|
|
|
722
722
|
getCategoryDir: () => getCategoryDir,
|
|
723
723
|
getConfigPath: () => getConfigPath,
|
|
724
724
|
getDestinationPath: () => getDestinationPath,
|
|
725
|
+
getDestinationPathFromSource: () => getDestinationPathFromSource,
|
|
725
726
|
getFilesDir: () => getFilesDir,
|
|
727
|
+
getHomeRelativeSourcePath: () => getHomeRelativeSourcePath,
|
|
726
728
|
getManifestPath: () => getManifestPath,
|
|
727
729
|
getRelativeDestination: () => getRelativeDestination,
|
|
730
|
+
getRelativeDestinationFromSource: () => getRelativeDestinationFromSource,
|
|
728
731
|
getRelativePath: () => getRelativePath,
|
|
732
|
+
getSafeRepoPathFromDestination: () => getSafeRepoPathFromDestination,
|
|
729
733
|
getTuckDir: () => getTuckDir,
|
|
730
734
|
isDirectory: () => isDirectory,
|
|
731
735
|
isFile: () => isFile,
|
|
@@ -735,13 +739,16 @@ __export(paths_exports, {
|
|
|
735
739
|
isWritable: () => isWritable,
|
|
736
740
|
pathExists: () => pathExists,
|
|
737
741
|
sanitizeFilename: () => sanitizeFilename,
|
|
742
|
+
validatePathWithinRoot: () => validatePathWithinRoot,
|
|
743
|
+
validateSafeDestinationPath: () => validateSafeDestinationPath,
|
|
744
|
+
validateSafeManifestDestination: () => validateSafeManifestDestination,
|
|
738
745
|
validateSafeSourcePath: () => validateSafeSourcePath
|
|
739
746
|
});
|
|
740
747
|
import { homedir as homedir3 } from "os";
|
|
741
|
-
import { join as join2, basename, dirname as dirname2, relative, isAbsolute, resolve, sep } from "path";
|
|
742
|
-
import { stat, access } from "fs/promises";
|
|
748
|
+
import { join as join2, basename, dirname as dirname2, relative, isAbsolute, resolve, sep, posix } from "path";
|
|
749
|
+
import { stat, lstat, access } from "fs/promises";
|
|
743
750
|
import { constants } from "fs";
|
|
744
|
-
var expandPath, collapsePath, getTuckDir, getManifestPath, getConfigPath, getFilesDir, getCategoryDir, getDestinationPath, getRelativeDestination, sanitizeFilename, detectCategory, pathExists, isDirectory, isFile, isSymlink, isReadable, isWritable, getRelativePath, isPathWithinHome, validateSafeSourcePath, generateFileId;
|
|
751
|
+
var expandPath, collapsePath, getTuckDir, getManifestPath, getConfigPath, getFilesDir, getCategoryDir, getDestinationPath, getRelativeDestination, getHomeRelativeSourcePath, getRelativeDestinationFromSource, getDestinationPathFromSource, sanitizeFilename, detectCategory, pathExists, isDirectory, isFile, isSymlink, isReadable, isWritable, getRelativePath, isPathWithinHome, validateSafeSourcePath, validatePathWithinRoot, validateSafeManifestDestination, getSafeRepoPathFromDestination, validateSafeDestinationPath, generateFileId;
|
|
745
752
|
var init_paths = __esm({
|
|
746
753
|
"src/lib/paths.ts"() {
|
|
747
754
|
"use strict";
|
|
@@ -770,7 +777,13 @@ var init_paths = __esm({
|
|
|
770
777
|
return path;
|
|
771
778
|
};
|
|
772
779
|
getTuckDir = (customDir) => {
|
|
773
|
-
|
|
780
|
+
const tuckDir = expandPath(customDir || DEFAULT_TUCK_DIR);
|
|
781
|
+
if (customDir && !isPathWithinHome(customDir)) {
|
|
782
|
+
throw new Error(
|
|
783
|
+
`Unsafe path detected: ${customDir} - custom tuck directory must be within home directory`
|
|
784
|
+
);
|
|
785
|
+
}
|
|
786
|
+
return tuckDir;
|
|
774
787
|
};
|
|
775
788
|
getManifestPath = (tuckDir) => {
|
|
776
789
|
return join2(tuckDir, MANIFEST_FILE);
|
|
@@ -788,11 +801,40 @@ var init_paths = __esm({
|
|
|
788
801
|
return join2(getCategoryDir(tuckDir, category), filename);
|
|
789
802
|
};
|
|
790
803
|
getRelativeDestination = (category, filename) => {
|
|
791
|
-
return
|
|
804
|
+
return posix.join(FILES_DIR, toPosixPath(category), toPosixPath(filename));
|
|
805
|
+
};
|
|
806
|
+
getHomeRelativeSourcePath = (sourcePath) => {
|
|
807
|
+
const expandedSource = resolve(expandPath(sourcePath));
|
|
808
|
+
const resolvedHome = resolve(homedir3());
|
|
809
|
+
if (!(expandedSource === resolvedHome || expandedSource.startsWith(resolvedHome + sep))) {
|
|
810
|
+
throw new Error(
|
|
811
|
+
`Unsafe path detected: ${sourcePath} - source path must be within home directory`
|
|
812
|
+
);
|
|
813
|
+
}
|
|
814
|
+
const relativePath = toPosixPath(relative(resolvedHome, expandedSource));
|
|
815
|
+
const segments = relativePath.split("/").filter(Boolean).filter((segment) => segment !== ".");
|
|
816
|
+
if (segments.some((segment) => segment === "..")) {
|
|
817
|
+
throw new Error(`Unsafe path detected: ${sourcePath} - path traversal is not allowed`);
|
|
818
|
+
}
|
|
819
|
+
return segments.length > 0 ? segments.join("/") : "home";
|
|
820
|
+
};
|
|
821
|
+
getRelativeDestinationFromSource = (category, sourcePath, customFilename) => {
|
|
822
|
+
const relativeSource = getHomeRelativeSourcePath(sourcePath).split("/");
|
|
823
|
+
if (customFilename) {
|
|
824
|
+
const sanitizedCustomName = sanitizeFilename(customFilename);
|
|
825
|
+
relativeSource[relativeSource.length - 1] = toPosixPath(sanitizedCustomName);
|
|
826
|
+
}
|
|
827
|
+
return posix.join(FILES_DIR, toPosixPath(category), ...relativeSource);
|
|
828
|
+
};
|
|
829
|
+
getDestinationPathFromSource = (tuckDir, category, sourcePath, customFilename) => {
|
|
830
|
+
return join2(tuckDir, getRelativeDestinationFromSource(category, sourcePath, customFilename));
|
|
792
831
|
};
|
|
793
832
|
sanitizeFilename = (filepath) => {
|
|
794
833
|
const base = basename(filepath);
|
|
795
834
|
const result = base.startsWith(".") ? base.slice(1) : base;
|
|
835
|
+
if (result === "." || result === "..") {
|
|
836
|
+
return "file";
|
|
837
|
+
}
|
|
796
838
|
return result || "file";
|
|
797
839
|
};
|
|
798
840
|
detectCategory = (filepath) => {
|
|
@@ -837,7 +879,7 @@ var init_paths = __esm({
|
|
|
837
879
|
};
|
|
838
880
|
isSymlink = async (path) => {
|
|
839
881
|
try {
|
|
840
|
-
const stats = await
|
|
882
|
+
const stats = await lstat(path);
|
|
841
883
|
return stats.isSymbolicLink();
|
|
842
884
|
} catch {
|
|
843
885
|
return false;
|
|
@@ -894,6 +936,63 @@ var init_paths = __esm({
|
|
|
894
936
|
throw new Error(`Unsafe path detected: ${source} - paths must be within home directory`);
|
|
895
937
|
}
|
|
896
938
|
};
|
|
939
|
+
validatePathWithinRoot = (pathToValidate, root, label = "path") => {
|
|
940
|
+
const resolvedPath = resolve(expandPath(pathToValidate));
|
|
941
|
+
const resolvedRoot = resolve(expandPath(root));
|
|
942
|
+
const isWithinRoot = resolvedPath === resolvedRoot || resolvedPath.startsWith(resolvedRoot + sep);
|
|
943
|
+
if (!isWithinRoot) {
|
|
944
|
+
throw new Error(
|
|
945
|
+
`Unsafe ${label} path detected: ${pathToValidate} - path must be within ${root}`
|
|
946
|
+
);
|
|
947
|
+
}
|
|
948
|
+
};
|
|
949
|
+
validateSafeManifestDestination = (destination) => {
|
|
950
|
+
const trimmedDestination = destination.trim();
|
|
951
|
+
if (!trimmedDestination) {
|
|
952
|
+
throw new Error("Unsafe manifest destination detected: destination cannot be empty");
|
|
953
|
+
}
|
|
954
|
+
if (isAbsolute(trimmedDestination) || /^[A-Za-z]:[\\/]/.test(trimmedDestination) || trimmedDestination.startsWith("\\\\")) {
|
|
955
|
+
throw new Error(
|
|
956
|
+
`Unsafe manifest destination detected: ${destination} - destination must be a relative path`
|
|
957
|
+
);
|
|
958
|
+
}
|
|
959
|
+
const normalized = trimmedDestination.replace(/\\/g, "/");
|
|
960
|
+
if (normalized.includes("../") || normalized.split("/").includes("..")) {
|
|
961
|
+
throw new Error(
|
|
962
|
+
`Unsafe manifest destination detected: ${destination} - path traversal is not allowed`
|
|
963
|
+
);
|
|
964
|
+
}
|
|
965
|
+
if (!(normalized === FILES_DIR || normalized.startsWith(`${FILES_DIR}/`))) {
|
|
966
|
+
throw new Error(
|
|
967
|
+
`Unsafe manifest destination detected: ${destination} - destination must be inside ${FILES_DIR}/`
|
|
968
|
+
);
|
|
969
|
+
}
|
|
970
|
+
};
|
|
971
|
+
getSafeRepoPathFromDestination = (tuckDir, destination) => {
|
|
972
|
+
validateSafeManifestDestination(destination);
|
|
973
|
+
const repoPath = join2(tuckDir, destination);
|
|
974
|
+
validatePathWithinRoot(repoPath, tuckDir, "manifest destination");
|
|
975
|
+
return repoPath;
|
|
976
|
+
};
|
|
977
|
+
validateSafeDestinationPath = (destination, allowedRoots) => {
|
|
978
|
+
const resolvedDestination = resolve(expandPath(destination));
|
|
979
|
+
const roots = (allowedRoots && allowedRoots.length > 0 ? allowedRoots : [homedir3()]).map(
|
|
980
|
+
(r) => resolve(expandPath(r))
|
|
981
|
+
);
|
|
982
|
+
const isWithinAllowedRoot = roots.some((root) => {
|
|
983
|
+
try {
|
|
984
|
+
validatePathWithinRoot(resolvedDestination, root, "destination");
|
|
985
|
+
return true;
|
|
986
|
+
} catch {
|
|
987
|
+
return false;
|
|
988
|
+
}
|
|
989
|
+
});
|
|
990
|
+
if (!isWithinAllowedRoot) {
|
|
991
|
+
throw new Error(
|
|
992
|
+
`Unsafe destination path detected: ${destination} - destination must be within allowed roots`
|
|
993
|
+
);
|
|
994
|
+
}
|
|
995
|
+
};
|
|
897
996
|
generateFileId = (source) => {
|
|
898
997
|
const collapsed = collapsePath(source);
|
|
899
998
|
const normalized = toPosixPath(collapsed);
|
|
@@ -2547,6 +2646,7 @@ __export(github_exports, {
|
|
|
2547
2646
|
MIN_GITHUB_TOKEN_LENGTH: () => MIN_GITHUB_TOKEN_LENGTH,
|
|
2548
2647
|
checkSSHKeys: () => checkSSHKeys,
|
|
2549
2648
|
configureGitCredentialHelper: () => configureGitCredentialHelper,
|
|
2649
|
+
configureGitCredentialHelperWithOptions: () => configureGitCredentialHelperWithOptions,
|
|
2550
2650
|
createRepo: () => createRepo,
|
|
2551
2651
|
detectTokenType: () => detectTokenType,
|
|
2552
2652
|
diagnoseAuthIssue: () => diagnoseAuthIssue,
|
|
@@ -2573,7 +2673,7 @@ __export(github_exports, {
|
|
|
2573
2673
|
});
|
|
2574
2674
|
import { execFile as execFile4 } from "child_process";
|
|
2575
2675
|
import { promisify as promisify4 } from "util";
|
|
2576
|
-
var execFileAsync4, API_REQUEST_TIMEOUT_MS, GIT_CREDENTIAL_CACHE_FALLBACK_TIMEOUT_SECONDS, TOKEN_EXPIRATION_WARNING_DAYS, MIN_GITHUB_TOKEN_LENGTH, GITHUB_TOKEN_PREFIXES, validateRepoName2, isGhInstalled, isGhAuthenticated, getAuthenticatedUser, repoExists, diagnoseRepoCreationFailure, createRepo, getPreferredRemoteProtocol, getRepoInfo, ghCloneRepo, findDotfilesRepo, getPreferredRepoUrl, checkSSHKeys, getStrictHostKeyCheckingOption, testSSHConnection, getSSHKeyInstructions, getFineGrainedTokenInstructions, getClassicTokenInstructions, getGitHubCLIInstallInstructions, getAuthMethods, configureGitCredentialHelper, getCredentialsPath, storeGitHubCredentials, getStoredCredentialMetadata, removeStoredCredentials, testStoredCredentials, diagnoseAuthIssue, updateStoredCredentials, detectTokenType;
|
|
2676
|
+
var execFileAsync4, API_REQUEST_TIMEOUT_MS, GIT_CREDENTIAL_CACHE_FALLBACK_TIMEOUT_SECONDS, TOKEN_EXPIRATION_WARNING_DAYS, MIN_GITHUB_TOKEN_LENGTH, GITHUB_TOKEN_PREFIXES, validateRepoName2, isGhInstalled, isGhAuthenticated, getAuthenticatedUser, repoExists, diagnoseRepoCreationFailure, createRepo, getPreferredRemoteProtocol, getRepoInfo, ghCloneRepo, findDotfilesRepo, getPreferredRepoUrl, checkSSHKeys, getStrictHostKeyCheckingOption, testSSHConnection, getSSHKeyInstructions, getFineGrainedTokenInstructions, getClassicTokenInstructions, getGitHubCLIInstallInstructions, getAuthMethods, configureGitCredentialHelper, configureGitCredentialHelperWithOptions, getCredentialsPath, storeGitHubCredentials, getStoredCredentialMetadata, removeStoredCredentials, testStoredCredentials, diagnoseAuthIssue, updateStoredCredentials, detectTokenType;
|
|
2577
2677
|
var init_github = __esm({
|
|
2578
2678
|
"src/lib/github.ts"() {
|
|
2579
2679
|
"use strict";
|
|
@@ -2868,14 +2968,14 @@ var init_github = __esm({
|
|
|
2868
2968
|
return protocol === "ssh" ? repo.sshUrl : repo.httpsUrl;
|
|
2869
2969
|
};
|
|
2870
2970
|
checkSSHKeys = async () => {
|
|
2871
|
-
const { homedir:
|
|
2872
|
-
const { join:
|
|
2971
|
+
const { homedir: homedir8 } = await import("os");
|
|
2972
|
+
const { join: join19 } = await import("path");
|
|
2873
2973
|
const { readFile: readFile15 } = await import("fs/promises");
|
|
2874
2974
|
const { pathExists: pathExists6 } = await Promise.resolve().then(() => (init_paths(), paths_exports));
|
|
2875
|
-
const sshDir =
|
|
2975
|
+
const sshDir = join19(homedir8(), ".ssh");
|
|
2876
2976
|
const keyTypes = ["id_ed25519", "id_rsa", "id_ecdsa"];
|
|
2877
2977
|
for (const keyType of keyTypes) {
|
|
2878
|
-
const privateKeyPath =
|
|
2978
|
+
const privateKeyPath = join19(sshDir, keyType);
|
|
2879
2979
|
const publicKeyPath = `${privateKeyPath}.pub`;
|
|
2880
2980
|
if (await pathExists6(publicKeyPath)) {
|
|
2881
2981
|
try {
|
|
@@ -2897,17 +2997,17 @@ var init_github = __esm({
|
|
|
2897
2997
|
}
|
|
2898
2998
|
return {
|
|
2899
2999
|
exists: false,
|
|
2900
|
-
path:
|
|
2901
|
-
publicKeyPath:
|
|
3000
|
+
path: join19(sshDir, "id_ed25519"),
|
|
3001
|
+
publicKeyPath: join19(sshDir, "id_ed25519.pub")
|
|
2902
3002
|
};
|
|
2903
3003
|
};
|
|
2904
3004
|
getStrictHostKeyCheckingOption = async () => {
|
|
2905
3005
|
try {
|
|
2906
|
-
const { homedir:
|
|
2907
|
-
const { join:
|
|
3006
|
+
const { homedir: homedir8 } = await import("os");
|
|
3007
|
+
const { join: join19 } = await import("path");
|
|
2908
3008
|
const { readFile: readFile15 } = await import("fs/promises");
|
|
2909
|
-
const sshDir =
|
|
2910
|
-
const knownHostsPath =
|
|
3009
|
+
const sshDir = join19(homedir8(), ".ssh");
|
|
3010
|
+
const knownHostsPath = join19(sshDir, "known_hosts");
|
|
2911
3011
|
const knownHostsContent = await readFile15(knownHostsPath, "utf-8");
|
|
2912
3012
|
const hasGitHubEntry = knownHostsContent.split("\n").some((line) => {
|
|
2913
3013
|
const trimmed = line.trim();
|
|
@@ -3017,11 +3117,13 @@ To create a Fine-grained Personal Access Token (recommended):
|
|
|
3017
3117
|
- Username: your-github-username
|
|
3018
3118
|
- Password: github_pat_xxxxxxxxxxxx (your token)
|
|
3019
3119
|
|
|
3020
|
-
|
|
3021
|
-
git config --global credential.helper
|
|
3120
|
+
Configure a secure credential helper instead:
|
|
3121
|
+
- macOS: git config --global credential.helper osxkeychain
|
|
3122
|
+
- Linux: git config --global credential.helper libsecret
|
|
3123
|
+
- Windows: git config --global credential.helper manager-core
|
|
3124
|
+
- Or with GitHub CLI: gh auth setup-git
|
|
3022
3125
|
|
|
3023
|
-
Then on first push, enter your token as the password
|
|
3024
|
-
and it will be saved for future use.
|
|
3126
|
+
Then on first push, enter your token as the password.
|
|
3025
3127
|
`.trim();
|
|
3026
3128
|
};
|
|
3027
3129
|
getClassicTokenInstructions = () => {
|
|
@@ -3060,8 +3162,11 @@ but classic tokens work if you need broader access.
|
|
|
3060
3162
|
- Username: your-github-username
|
|
3061
3163
|
- Password: ghp_xxxxxxxxxxxx (your token)
|
|
3062
3164
|
|
|
3063
|
-
|
|
3064
|
-
git config --global credential.helper
|
|
3165
|
+
Configure a secure credential helper instead:
|
|
3166
|
+
- macOS: git config --global credential.helper osxkeychain
|
|
3167
|
+
- Linux: git config --global credential.helper libsecret
|
|
3168
|
+
- Windows: git config --global credential.helper manager-core
|
|
3169
|
+
- Or with GitHub CLI: gh auth setup-git
|
|
3065
3170
|
|
|
3066
3171
|
Then on first push, enter your token as the password.
|
|
3067
3172
|
`.trim();
|
|
@@ -3144,7 +3249,11 @@ Learn more: https://cli.github.com/
|
|
|
3144
3249
|
];
|
|
3145
3250
|
};
|
|
3146
3251
|
configureGitCredentialHelper = async () => {
|
|
3252
|
+
await configureGitCredentialHelperWithOptions();
|
|
3253
|
+
};
|
|
3254
|
+
configureGitCredentialHelperWithOptions = async (options = {}) => {
|
|
3147
3255
|
const { platform: platform2 } = process;
|
|
3256
|
+
const allowGlobalConfigChange = options.allowGlobalConfigChange ?? false;
|
|
3148
3257
|
try {
|
|
3149
3258
|
const { stdout } = await execFileAsync4("git", ["config", "--global", "credential.helper"]);
|
|
3150
3259
|
if (stdout.trim()) {
|
|
@@ -3152,6 +3261,9 @@ Learn more: https://cli.github.com/
|
|
|
3152
3261
|
}
|
|
3153
3262
|
} catch {
|
|
3154
3263
|
}
|
|
3264
|
+
if (!allowGlobalConfigChange) {
|
|
3265
|
+
return;
|
|
3266
|
+
}
|
|
3155
3267
|
try {
|
|
3156
3268
|
if (platform2 === "darwin") {
|
|
3157
3269
|
await execFileAsync4("git", ["config", "--global", "credential.helper", "osxkeychain"]);
|
|
@@ -3165,21 +3277,33 @@ Learn more: https://cli.github.com/
|
|
|
3165
3277
|
await execFileAsync4("git", ["config", "--global", "credential.helper", `cache --timeout=${GIT_CREDENTIAL_CACHE_FALLBACK_TIMEOUT_SECONDS}`]);
|
|
3166
3278
|
}
|
|
3167
3279
|
} else if (platform2 === "win32") {
|
|
3168
|
-
|
|
3280
|
+
try {
|
|
3281
|
+
await execFileAsync4("git", ["config", "--global", "credential.helper", "manager-core"]);
|
|
3282
|
+
} catch {
|
|
3283
|
+
await execFileAsync4("git", ["config", "--global", "credential.helper", "manager"]);
|
|
3284
|
+
}
|
|
3169
3285
|
}
|
|
3170
|
-
} catch {
|
|
3171
|
-
|
|
3286
|
+
} catch (error) {
|
|
3287
|
+
const message = error instanceof Error ? error.message : String(error);
|
|
3288
|
+
throw new GitHubCliError(
|
|
3289
|
+
`Failed to configure git credential helper securely: ${message}`,
|
|
3290
|
+
[
|
|
3291
|
+
"Configure a helper manually (recommended):",
|
|
3292
|
+
" macOS: git config --global credential.helper osxkeychain",
|
|
3293
|
+
" Linux: git config --global credential.helper libsecret",
|
|
3294
|
+
" Windows: git config --global credential.helper manager-core"
|
|
3295
|
+
]
|
|
3296
|
+
);
|
|
3172
3297
|
}
|
|
3173
3298
|
};
|
|
3174
3299
|
getCredentialsPath = async () => {
|
|
3175
|
-
const { homedir:
|
|
3176
|
-
const { join:
|
|
3177
|
-
return
|
|
3300
|
+
const { homedir: homedir8 } = await import("os");
|
|
3301
|
+
const { join: join19 } = await import("path");
|
|
3302
|
+
return join19(homedir8(), ".tuck", ".credentials.json");
|
|
3178
3303
|
};
|
|
3179
3304
|
storeGitHubCredentials = async (username, token, type) => {
|
|
3180
3305
|
const { writeFile: writeFile11, mkdir: mkdir2 } = await import("fs/promises");
|
|
3181
3306
|
const { dirname: dirname9 } = await import("path");
|
|
3182
|
-
await configureGitCredentialHelper();
|
|
3183
3307
|
if (/[\r\n]/.test(username) || /[\r\n]/.test(token)) {
|
|
3184
3308
|
throw new GitHubCliError("Username or token contains invalid newline characters.", [
|
|
3185
3309
|
"Newline characters are not allowed in GitHub usernames or tokens because git's credential helper protocol is line-based.",
|
|
@@ -3195,7 +3319,7 @@ password=${token}
|
|
|
3195
3319
|
`;
|
|
3196
3320
|
try {
|
|
3197
3321
|
const { spawn: spawn3 } = await import("child_process");
|
|
3198
|
-
await new Promise((
|
|
3322
|
+
await new Promise((resolve3, reject) => {
|
|
3199
3323
|
const proc = spawn3("git", ["credential", "approve"], {
|
|
3200
3324
|
stdio: ["pipe", "pipe", "pipe"]
|
|
3201
3325
|
});
|
|
@@ -3203,7 +3327,7 @@ password=${token}
|
|
|
3203
3327
|
proc.stdin.end();
|
|
3204
3328
|
proc.on("close", (code) => {
|
|
3205
3329
|
if (code === 0) {
|
|
3206
|
-
|
|
3330
|
+
resolve3();
|
|
3207
3331
|
} else {
|
|
3208
3332
|
reject(new Error("git credential approve failed"));
|
|
3209
3333
|
}
|
|
@@ -3281,14 +3405,14 @@ password=${token}
|
|
|
3281
3405
|
const { pathExists: pathExists6 } = await Promise.resolve().then(() => (init_paths(), paths_exports));
|
|
3282
3406
|
try {
|
|
3283
3407
|
const { spawn: spawn3 } = await import("child_process");
|
|
3284
|
-
await new Promise((
|
|
3408
|
+
await new Promise((resolve3) => {
|
|
3285
3409
|
const proc = spawn3("git", ["credential", "reject"], {
|
|
3286
3410
|
stdio: ["pipe", "pipe", "pipe"]
|
|
3287
3411
|
});
|
|
3288
3412
|
proc.stdin.write("protocol=https\nhost=github.com\n\n");
|
|
3289
3413
|
proc.stdin.end();
|
|
3290
|
-
proc.on("close", () =>
|
|
3291
|
-
proc.on("error", () =>
|
|
3414
|
+
proc.on("close", () => resolve3());
|
|
3415
|
+
proc.on("error", () => resolve3());
|
|
3292
3416
|
});
|
|
3293
3417
|
} catch {
|
|
3294
3418
|
}
|
|
@@ -3309,7 +3433,7 @@ password=${token}
|
|
|
3309
3433
|
let password2 = null;
|
|
3310
3434
|
try {
|
|
3311
3435
|
const { spawn: spawn3 } = await import("child_process");
|
|
3312
|
-
const credentialOutput = await new Promise((
|
|
3436
|
+
const credentialOutput = await new Promise((resolve3, reject) => {
|
|
3313
3437
|
const proc = spawn3("git", ["credential", "fill"], {
|
|
3314
3438
|
stdio: ["pipe", "pipe", "pipe"]
|
|
3315
3439
|
});
|
|
@@ -3321,7 +3445,7 @@ password=${token}
|
|
|
3321
3445
|
});
|
|
3322
3446
|
proc.on("close", (code) => {
|
|
3323
3447
|
if (code === 0) {
|
|
3324
|
-
|
|
3448
|
+
resolve3(output);
|
|
3325
3449
|
} else {
|
|
3326
3450
|
reject(new Error("git credential fill failed"));
|
|
3327
3451
|
}
|
|
@@ -4262,7 +4386,7 @@ var init_detect = __esm({
|
|
|
4262
4386
|
|
|
4263
4387
|
// src/lib/files.ts
|
|
4264
4388
|
import { createHash } from "crypto";
|
|
4265
|
-
import { readFile as readFile3, stat as stat3, lstat, readdir as readdir2, copyFile, symlink, unlink, rm } from "fs/promises";
|
|
4389
|
+
import { readFile as readFile3, stat as stat3, lstat as lstat2, readdir as readdir2, copyFile, symlink, unlink, rm } from "fs/promises";
|
|
4266
4390
|
import { copy, ensureDir } from "fs-extra";
|
|
4267
4391
|
import { join as join5, dirname as dirname3, basename as basename3 } from "path";
|
|
4268
4392
|
import { constants as constants2 } from "fs";
|
|
@@ -4297,11 +4421,12 @@ var init_files = __esm({
|
|
|
4297
4421
|
}
|
|
4298
4422
|
try {
|
|
4299
4423
|
const stats = await stat3(expandedPath);
|
|
4424
|
+
const linkStats = await lstat2(expandedPath);
|
|
4300
4425
|
const permissions = IS_WINDOWS ? stats.isDirectory() ? "755" : "644" : (stats.mode & 511).toString(8).padStart(3, "0");
|
|
4301
4426
|
return {
|
|
4302
4427
|
path: expandedPath,
|
|
4303
4428
|
isDirectory: stats.isDirectory(),
|
|
4304
|
-
isSymlink:
|
|
4429
|
+
isSymlink: linkStats.isSymbolicLink(),
|
|
4305
4430
|
size: stats.size,
|
|
4306
4431
|
permissions,
|
|
4307
4432
|
modified: stats.mtime
|
|
@@ -4353,7 +4478,7 @@ var init_files = __esm({
|
|
|
4353
4478
|
continue;
|
|
4354
4479
|
}
|
|
4355
4480
|
try {
|
|
4356
|
-
const lstats = await
|
|
4481
|
+
const lstats = await lstat2(entryPath);
|
|
4357
4482
|
if (lstats.isSymbolicLink()) {
|
|
4358
4483
|
continue;
|
|
4359
4484
|
}
|
|
@@ -4382,6 +4507,7 @@ var init_files = __esm({
|
|
|
4382
4507
|
if (!await pathExists(expandedSource)) {
|
|
4383
4508
|
throw new FileNotFoundError(source);
|
|
4384
4509
|
}
|
|
4510
|
+
validateSafeDestinationPath(expandedDest);
|
|
4385
4511
|
await ensureDir(dirname3(expandedDest));
|
|
4386
4512
|
const sourceIsDir = await isDirectory(expandedSource);
|
|
4387
4513
|
try {
|
|
@@ -4419,10 +4545,11 @@ var init_files = __esm({
|
|
|
4419
4545
|
if (!await pathExists(expandedTarget)) {
|
|
4420
4546
|
throw new FileNotFoundError(target);
|
|
4421
4547
|
}
|
|
4548
|
+
validateSafeDestinationPath(expandedLink);
|
|
4422
4549
|
await ensureDir(dirname3(expandedLink));
|
|
4423
4550
|
if (options?.overwrite && await pathExists(expandedLink)) {
|
|
4424
4551
|
try {
|
|
4425
|
-
const linkStats = await
|
|
4552
|
+
const linkStats = await lstat2(expandedLink);
|
|
4426
4553
|
if (linkStats.isDirectory()) {
|
|
4427
4554
|
await rm(expandedLink, { recursive: true });
|
|
4428
4555
|
} else {
|
|
@@ -4539,6 +4666,7 @@ var init_fileTracking = __esm({
|
|
|
4539
4666
|
init_files();
|
|
4540
4667
|
init_config();
|
|
4541
4668
|
init_constants();
|
|
4669
|
+
init_platform();
|
|
4542
4670
|
SENSITIVE_FILE_PATTERNS = [
|
|
4543
4671
|
/^\.netrc$/,
|
|
4544
4672
|
/^\.aws\/credentials$/,
|
|
@@ -4582,7 +4710,12 @@ var init_fileTracking = __esm({
|
|
|
4582
4710
|
const total = files.length;
|
|
4583
4711
|
const errors = [];
|
|
4584
4712
|
const sensitiveFiles = [];
|
|
4713
|
+
const trackedDestinations = /* @__PURE__ */ new Map();
|
|
4585
4714
|
let succeeded = 0;
|
|
4715
|
+
const manifest = await loadManifest(tuckDir);
|
|
4716
|
+
for (const existingFile of Object.values(manifest.files)) {
|
|
4717
|
+
trackedDestinations.set(toPosixPath(existingFile.destination), existingFile.source);
|
|
4718
|
+
}
|
|
4586
4719
|
console.log();
|
|
4587
4720
|
console.log(chalk3.bold.cyan(`${actionVerb} ${total} ${total === 1 ? "file" : "files"}...`));
|
|
4588
4721
|
console.log(chalk3.dim("\u2500".repeat(50)));
|
|
@@ -4592,20 +4725,35 @@ var init_fileTracking = __esm({
|
|
|
4592
4725
|
const expandedPath = expandPath(file.path);
|
|
4593
4726
|
const indexStr = chalk3.dim(`[${i + 1}/${total}]`);
|
|
4594
4727
|
const category = file.category || detectCategory(expandedPath);
|
|
4595
|
-
const filename = sanitizeFilename(expandedPath);
|
|
4596
4728
|
const categoryInfo = CATEGORIES[category];
|
|
4597
4729
|
const icon = categoryInfo?.icon || "\u25CB";
|
|
4730
|
+
const sourcePath = collapsePath(file.path);
|
|
4731
|
+
const relativeDestination = getRelativeDestinationFromSource(category, expandedPath, file.name);
|
|
4732
|
+
const normalizedDestination = toPosixPath(relativeDestination);
|
|
4733
|
+
const existingSource = trackedDestinations.get(normalizedDestination);
|
|
4598
4734
|
const spinner4 = ora({
|
|
4599
|
-
text: `${indexStr} ${actionVerb} ${chalk3.cyan(
|
|
4735
|
+
text: `${indexStr} ${actionVerb} ${chalk3.cyan(sourcePath)}`,
|
|
4600
4736
|
color: "cyan",
|
|
4601
4737
|
spinner: "dots",
|
|
4602
4738
|
indent: 2
|
|
4603
4739
|
}).start();
|
|
4604
4740
|
try {
|
|
4605
|
-
|
|
4741
|
+
if (existingSource && existingSource !== sourcePath) {
|
|
4742
|
+
throw new Error(
|
|
4743
|
+
`Destination collision detected: ${relativeDestination} is already used by ${existingSource}`
|
|
4744
|
+
);
|
|
4745
|
+
}
|
|
4746
|
+
const destination = getDestinationPathFromSource(tuckDir, category, expandedPath, file.name);
|
|
4606
4747
|
await ensureDir2(dirname4(destination));
|
|
4607
4748
|
if (strategy === "symlink") {
|
|
4608
|
-
await
|
|
4749
|
+
await copyFileOrDir(expandedPath, destination, { overwrite: true });
|
|
4750
|
+
try {
|
|
4751
|
+
await createSymlink(destination, expandedPath, { overwrite: true });
|
|
4752
|
+
} catch (error) {
|
|
4753
|
+
await deleteFileOrDir(expandedPath).catch(() => void 0);
|
|
4754
|
+
await copyFileOrDir(destination, expandedPath, { overwrite: true }).catch(() => void 0);
|
|
4755
|
+
throw error;
|
|
4756
|
+
}
|
|
4609
4757
|
} else {
|
|
4610
4758
|
await copyFileOrDir(expandedPath, destination, { overwrite: true });
|
|
4611
4759
|
}
|
|
@@ -4614,8 +4762,8 @@ var init_fileTracking = __esm({
|
|
|
4614
4762
|
const now = (/* @__PURE__ */ new Date()).toISOString();
|
|
4615
4763
|
const id = generateFileId(file.path);
|
|
4616
4764
|
await addFileToManifest(tuckDir, id, {
|
|
4617
|
-
source:
|
|
4618
|
-
destination:
|
|
4765
|
+
source: sourcePath,
|
|
4766
|
+
destination: relativeDestination,
|
|
4619
4767
|
category,
|
|
4620
4768
|
strategy,
|
|
4621
4769
|
// TODO: Encryption and templating are planned for a future version
|
|
@@ -4628,16 +4776,17 @@ var init_fileTracking = __esm({
|
|
|
4628
4776
|
});
|
|
4629
4777
|
spinner4.stop();
|
|
4630
4778
|
const categoryStr = showCategory ? chalk3.dim(` ${icon} ${category}`) : "";
|
|
4631
|
-
console.log(` ${chalk3.green("\u2713")} ${indexStr} ${
|
|
4632
|
-
if (isSensitiveFile(
|
|
4779
|
+
console.log(` ${chalk3.green("\u2713")} ${indexStr} ${sourcePath}${categoryStr}`);
|
|
4780
|
+
if (isSensitiveFile(sourcePath)) {
|
|
4633
4781
|
sensitiveFiles.push(file.path);
|
|
4634
4782
|
}
|
|
4783
|
+
trackedDestinations.set(normalizedDestination, sourcePath);
|
|
4635
4784
|
succeeded++;
|
|
4636
4785
|
if (onProgress) {
|
|
4637
4786
|
onProgress(i + 1, total);
|
|
4638
4787
|
}
|
|
4639
4788
|
if (i < files.length - 1 && delayBetween > 0) {
|
|
4640
|
-
await new Promise((
|
|
4789
|
+
await new Promise((resolve3) => setTimeout(resolve3, delayBetween));
|
|
4641
4790
|
}
|
|
4642
4791
|
} catch (error) {
|
|
4643
4792
|
spinner4.stop();
|
|
@@ -4675,184 +4824,468 @@ var init_fileTracking = __esm({
|
|
|
4675
4824
|
}
|
|
4676
4825
|
});
|
|
4677
4826
|
|
|
4678
|
-
// src/lib/
|
|
4679
|
-
import {
|
|
4680
|
-
import {
|
|
4681
|
-
var
|
|
4682
|
-
var
|
|
4683
|
-
"src/lib/
|
|
4827
|
+
// src/lib/binary.ts
|
|
4828
|
+
import { open, stat as stat4 } from "fs/promises";
|
|
4829
|
+
import { basename as basename4, dirname as dirname5 } from "path";
|
|
4830
|
+
var MAGIC_NUMBERS, SCRIPT_EXTENSIONS, WINDOWS_EXECUTABLE_EXTENSIONS, bufferStartsWith, isBinaryExecutable, isScriptFile, shouldExcludeFromBin;
|
|
4831
|
+
var init_binary = __esm({
|
|
4832
|
+
"src/lib/binary.ts"() {
|
|
4684
4833
|
"use strict";
|
|
4685
|
-
init_constants();
|
|
4686
4834
|
init_paths();
|
|
4687
4835
|
init_platform();
|
|
4688
|
-
|
|
4689
|
-
|
|
4690
|
-
|
|
4691
|
-
|
|
4692
|
-
|
|
4693
|
-
|
|
4694
|
-
|
|
4695
|
-
|
|
4696
|
-
|
|
4697
|
-
|
|
4698
|
-
|
|
4699
|
-
|
|
4700
|
-
|
|
4701
|
-
|
|
4702
|
-
|
|
4703
|
-
|
|
4836
|
+
MAGIC_NUMBERS = {
|
|
4837
|
+
// ELF (Linux)
|
|
4838
|
+
ELF: Buffer.from([127, 69, 76, 70]),
|
|
4839
|
+
// Mach-O (macOS) - 32-bit
|
|
4840
|
+
MACHO_32: Buffer.from([254, 237, 250, 206]),
|
|
4841
|
+
// Mach-O (macOS) - 64-bit
|
|
4842
|
+
MACHO_64: Buffer.from([207, 250, 237, 254]),
|
|
4843
|
+
// Mach-O (macOS) - Universal binary
|
|
4844
|
+
MACHO_UNIVERSAL: Buffer.from([202, 254, 186, 190]),
|
|
4845
|
+
// PE (Windows)
|
|
4846
|
+
PE: Buffer.from([77, 90])
|
|
4847
|
+
// "MZ"
|
|
4848
|
+
};
|
|
4849
|
+
SCRIPT_EXTENSIONS = [
|
|
4850
|
+
// Unix shells
|
|
4851
|
+
".sh",
|
|
4852
|
+
".bash",
|
|
4853
|
+
".zsh",
|
|
4854
|
+
".fish",
|
|
4855
|
+
// Cross-platform scripting languages
|
|
4856
|
+
".py",
|
|
4857
|
+
".rb",
|
|
4858
|
+
".pl",
|
|
4859
|
+
".js",
|
|
4860
|
+
".ts",
|
|
4861
|
+
".lua",
|
|
4862
|
+
".php",
|
|
4863
|
+
".tcl",
|
|
4864
|
+
".awk",
|
|
4865
|
+
".sed",
|
|
4866
|
+
// Windows scripts
|
|
4867
|
+
".ps1",
|
|
4868
|
+
// PowerShell
|
|
4869
|
+
".psm1",
|
|
4870
|
+
// PowerShell module
|
|
4871
|
+
".psd1",
|
|
4872
|
+
// PowerShell data
|
|
4873
|
+
".bat",
|
|
4874
|
+
// Batch file
|
|
4875
|
+
".cmd",
|
|
4876
|
+
// Command script
|
|
4877
|
+
".vbs",
|
|
4878
|
+
// VBScript
|
|
4879
|
+
".wsf"
|
|
4880
|
+
// Windows Script File
|
|
4881
|
+
];
|
|
4882
|
+
WINDOWS_EXECUTABLE_EXTENSIONS = [".exe", ".com", ".dll"];
|
|
4883
|
+
bufferStartsWith = (buffer, magic) => {
|
|
4884
|
+
if (buffer.length < magic.length) {
|
|
4885
|
+
return false;
|
|
4704
4886
|
}
|
|
4705
|
-
|
|
4706
|
-
|
|
4707
|
-
|
|
4708
|
-
|
|
4709
|
-
|
|
4710
|
-
|
|
4711
|
-
await copy2(expandedSource, backupPath, { overwrite: true });
|
|
4712
|
-
return {
|
|
4713
|
-
originalPath: expandedSource,
|
|
4714
|
-
backupPath,
|
|
4715
|
-
date
|
|
4716
|
-
};
|
|
4887
|
+
for (let i = 0; i < magic.length; i++) {
|
|
4888
|
+
if (buffer[i] !== magic[i]) {
|
|
4889
|
+
return false;
|
|
4890
|
+
}
|
|
4891
|
+
}
|
|
4892
|
+
return true;
|
|
4717
4893
|
};
|
|
4718
|
-
|
|
4719
|
-
|
|
4720
|
-
|
|
4721
|
-
// src/lib/hooks.ts
|
|
4722
|
-
import { exec, execSync } from "child_process";
|
|
4723
|
-
import { promisify as promisify5 } from "util";
|
|
4724
|
-
import chalk4 from "chalk";
|
|
4725
|
-
var execAsync, getWindowsShell, runHook, runPreSyncHook, runPostSyncHook, runPreRestoreHook, runPostRestoreHook;
|
|
4726
|
-
var init_hooks = __esm({
|
|
4727
|
-
"src/lib/hooks.ts"() {
|
|
4728
|
-
"use strict";
|
|
4729
|
-
init_config();
|
|
4730
|
-
init_logger();
|
|
4731
|
-
init_prompts();
|
|
4732
|
-
init_platform();
|
|
4733
|
-
execAsync = promisify5(exec);
|
|
4734
|
-
getWindowsShell = () => {
|
|
4894
|
+
isBinaryExecutable = async (path) => {
|
|
4895
|
+
const expandedPath = expandPath(path);
|
|
4735
4896
|
try {
|
|
4736
|
-
|
|
4737
|
-
|
|
4897
|
+
const stats = await stat4(expandedPath);
|
|
4898
|
+
if (stats.isDirectory()) {
|
|
4899
|
+
return false;
|
|
4900
|
+
}
|
|
4901
|
+
if (IS_WINDOWS) {
|
|
4902
|
+
const lowerPath = expandedPath.toLowerCase();
|
|
4903
|
+
if (WINDOWS_EXECUTABLE_EXTENSIONS.some((ext) => lowerPath.endsWith(ext))) {
|
|
4904
|
+
return true;
|
|
4905
|
+
}
|
|
4906
|
+
}
|
|
4907
|
+
const hasExecutePermission = !IS_WINDOWS && (stats.mode & 73) !== 0;
|
|
4908
|
+
let fileHandle;
|
|
4909
|
+
try {
|
|
4910
|
+
fileHandle = await open(expandedPath, "r");
|
|
4911
|
+
const buffer = Buffer.alloc(512);
|
|
4912
|
+
await fileHandle.read(buffer, 0, 512, 0);
|
|
4913
|
+
if (bufferStartsWith(buffer, MAGIC_NUMBERS.ELF) || bufferStartsWith(buffer, MAGIC_NUMBERS.MACHO_32) || bufferStartsWith(buffer, MAGIC_NUMBERS.MACHO_64) || bufferStartsWith(buffer, MAGIC_NUMBERS.MACHO_UNIVERSAL) || bufferStartsWith(buffer, MAGIC_NUMBERS.PE)) {
|
|
4914
|
+
return true;
|
|
4915
|
+
}
|
|
4916
|
+
if (hasExecutePermission) {
|
|
4917
|
+
const startsWithShebang = buffer[0] === 35 && buffer[1] === 33;
|
|
4918
|
+
return !startsWithShebang;
|
|
4919
|
+
}
|
|
4920
|
+
return false;
|
|
4921
|
+
} finally {
|
|
4922
|
+
if (fileHandle) {
|
|
4923
|
+
await fileHandle.close();
|
|
4924
|
+
}
|
|
4925
|
+
}
|
|
4738
4926
|
} catch {
|
|
4927
|
+
return false;
|
|
4739
4928
|
}
|
|
4929
|
+
};
|
|
4930
|
+
isScriptFile = async (path) => {
|
|
4931
|
+
const expandedPath = expandPath(path);
|
|
4740
4932
|
try {
|
|
4741
|
-
|
|
4742
|
-
|
|
4933
|
+
const hasScriptExtension = SCRIPT_EXTENSIONS.some((ext) => expandedPath.endsWith(ext));
|
|
4934
|
+
if (hasScriptExtension) {
|
|
4935
|
+
return true;
|
|
4936
|
+
}
|
|
4937
|
+
const stats = await stat4(expandedPath);
|
|
4938
|
+
if (stats.isDirectory()) {
|
|
4939
|
+
return false;
|
|
4940
|
+
}
|
|
4941
|
+
let fileHandle;
|
|
4942
|
+
try {
|
|
4943
|
+
fileHandle = await open(expandedPath, "r");
|
|
4944
|
+
const buffer = Buffer.alloc(2);
|
|
4945
|
+
await fileHandle.read(buffer, 0, 2, 0);
|
|
4946
|
+
return buffer[0] === 35 && buffer[1] === 33;
|
|
4947
|
+
} finally {
|
|
4948
|
+
if (fileHandle) {
|
|
4949
|
+
await fileHandle.close();
|
|
4950
|
+
}
|
|
4951
|
+
}
|
|
4743
4952
|
} catch {
|
|
4953
|
+
return false;
|
|
4744
4954
|
}
|
|
4745
|
-
return "cmd.exe";
|
|
4746
4955
|
};
|
|
4747
|
-
|
|
4748
|
-
|
|
4749
|
-
|
|
4750
|
-
|
|
4751
|
-
const
|
|
4752
|
-
|
|
4753
|
-
|
|
4754
|
-
return { success: true };
|
|
4755
|
-
}
|
|
4756
|
-
if (!options?.trustHooks) {
|
|
4757
|
-
console.log();
|
|
4758
|
-
console.log(chalk4.yellow.bold("WARNING: Hook Execution"));
|
|
4759
|
-
console.log(chalk4.dim("\u2500".repeat(50)));
|
|
4760
|
-
console.log(chalk4.white(`Hook type: ${chalk4.cyan(hookType)}`));
|
|
4761
|
-
console.log(chalk4.white("Command:"));
|
|
4762
|
-
console.log(chalk4.red(` ${command}`));
|
|
4763
|
-
console.log(chalk4.dim("\u2500".repeat(50)));
|
|
4764
|
-
console.log(
|
|
4765
|
-
chalk4.yellow(
|
|
4766
|
-
"SECURITY: Hooks can execute arbitrary commands on your system."
|
|
4767
|
-
)
|
|
4768
|
-
);
|
|
4769
|
-
console.log(
|
|
4770
|
-
chalk4.yellow(
|
|
4771
|
-
"Only proceed if you trust the source of this configuration."
|
|
4772
|
-
)
|
|
4773
|
-
);
|
|
4774
|
-
console.log();
|
|
4775
|
-
const confirmed = await prompts.confirm(
|
|
4776
|
-
"Execute this hook?",
|
|
4777
|
-
false
|
|
4778
|
-
// Default to NO for safety
|
|
4779
|
-
);
|
|
4780
|
-
if (!confirmed) {
|
|
4781
|
-
logger.warning(`Hook ${hookType} skipped by user`);
|
|
4782
|
-
return { success: true, skipped: true };
|
|
4783
|
-
}
|
|
4784
|
-
}
|
|
4785
|
-
if (!options?.silent) {
|
|
4786
|
-
logger.dim(`Running ${hookType} hook...`);
|
|
4956
|
+
shouldExcludeFromBin = async (path) => {
|
|
4957
|
+
const expandedPath = expandPath(path);
|
|
4958
|
+
const parentDir = dirname5(expandedPath);
|
|
4959
|
+
const parentBasename = basename4(parentDir);
|
|
4960
|
+
const isInBinDir = parentBasename === "bin";
|
|
4961
|
+
if (!isInBinDir) {
|
|
4962
|
+
return false;
|
|
4787
4963
|
}
|
|
4788
4964
|
try {
|
|
4789
|
-
const
|
|
4790
|
-
|
|
4791
|
-
|
|
4792
|
-
timeout: 3e4,
|
|
4793
|
-
// 30 second timeout
|
|
4794
|
-
env: {
|
|
4795
|
-
...process.env,
|
|
4796
|
-
TUCK_DIR: tuckDir,
|
|
4797
|
-
TUCK_HOOK: hookType
|
|
4798
|
-
},
|
|
4799
|
-
...shellOptions
|
|
4800
|
-
});
|
|
4801
|
-
if (stdout && !options?.silent) {
|
|
4802
|
-
logger.dim(stdout.trim());
|
|
4803
|
-
}
|
|
4804
|
-
if (stderr && !options?.silent) {
|
|
4805
|
-
logger.warning(stderr.trim());
|
|
4806
|
-
}
|
|
4807
|
-
return { success: true, output: stdout };
|
|
4808
|
-
} catch (error) {
|
|
4809
|
-
const errorMessage = error instanceof Error ? error.message : String(error);
|
|
4810
|
-
if (!options?.silent) {
|
|
4811
|
-
logger.error(`Hook ${hookType} failed: ${errorMessage}`);
|
|
4965
|
+
const stats = await stat4(expandedPath);
|
|
4966
|
+
if (stats.isDirectory()) {
|
|
4967
|
+
return false;
|
|
4812
4968
|
}
|
|
4813
|
-
|
|
4969
|
+
} catch {
|
|
4970
|
+
return false;
|
|
4814
4971
|
}
|
|
4815
|
-
|
|
4816
|
-
|
|
4817
|
-
|
|
4818
|
-
|
|
4819
|
-
runPostSyncHook = async (tuckDir, options) => {
|
|
4820
|
-
return runHook("postSync", tuckDir, options);
|
|
4821
|
-
};
|
|
4822
|
-
runPreRestoreHook = async (tuckDir, options) => {
|
|
4823
|
-
return runHook("preRestore", tuckDir, options);
|
|
4824
|
-
};
|
|
4825
|
-
runPostRestoreHook = async (tuckDir, options) => {
|
|
4826
|
-
return runHook("postRestore", tuckDir, options);
|
|
4972
|
+
if (await isScriptFile(expandedPath)) {
|
|
4973
|
+
return false;
|
|
4974
|
+
}
|
|
4975
|
+
return await isBinaryExecutable(expandedPath);
|
|
4827
4976
|
};
|
|
4828
4977
|
}
|
|
4829
4978
|
});
|
|
4830
4979
|
|
|
4831
|
-
// src/lib/
|
|
4832
|
-
|
|
4833
|
-
|
|
4834
|
-
|
|
4980
|
+
// src/lib/tuckignore.ts
|
|
4981
|
+
import { join as join6 } from "path";
|
|
4982
|
+
import { readFile as readFile4, writeFile as writeFile3, appendFile } from "fs/promises";
|
|
4983
|
+
var TUCKIGNORE_FILENAME, TUCKIGNORE_HEADER, getTuckignorePath, loadTuckignore, addToTuckignore, isIgnored;
|
|
4984
|
+
var init_tuckignore = __esm({
|
|
4985
|
+
"src/lib/tuckignore.ts"() {
|
|
4835
4986
|
"use strict";
|
|
4836
|
-
|
|
4837
|
-
|
|
4838
|
-
|
|
4839
|
-
|
|
4840
|
-
|
|
4841
|
-
|
|
4842
|
-
|
|
4843
|
-
|
|
4844
|
-
|
|
4845
|
-
|
|
4846
|
-
|
|
4847
|
-
|
|
4848
|
-
|
|
4849
|
-
|
|
4850
|
-
|
|
4851
|
-
|
|
4852
|
-
|
|
4853
|
-
|
|
4854
|
-
}
|
|
4855
|
-
{
|
|
4987
|
+
init_paths();
|
|
4988
|
+
TUCKIGNORE_FILENAME = ".tuckignore";
|
|
4989
|
+
TUCKIGNORE_HEADER = `# .tuckignore - Files to exclude from tracking
|
|
4990
|
+
# One exact file path per line (no globs)
|
|
4991
|
+
# Lines starting with # are comments
|
|
4992
|
+
#
|
|
4993
|
+
# Example:
|
|
4994
|
+
# ~/bin/large-binary
|
|
4995
|
+
# ~/.docker/config.json
|
|
4996
|
+
`;
|
|
4997
|
+
getTuckignorePath = (tuckDir) => {
|
|
4998
|
+
return join6(tuckDir, TUCKIGNORE_FILENAME);
|
|
4999
|
+
};
|
|
5000
|
+
loadTuckignore = async (tuckDir) => {
|
|
5001
|
+
const ignorePath = getTuckignorePath(tuckDir);
|
|
5002
|
+
const ignoredPaths = /* @__PURE__ */ new Set();
|
|
5003
|
+
if (!await pathExists(ignorePath)) {
|
|
5004
|
+
return ignoredPaths;
|
|
5005
|
+
}
|
|
5006
|
+
try {
|
|
5007
|
+
const content = await readFile4(ignorePath, "utf-8");
|
|
5008
|
+
const lines = content.split("\n");
|
|
5009
|
+
for (const line of lines) {
|
|
5010
|
+
const trimmed = line.trim();
|
|
5011
|
+
if (!trimmed || trimmed.startsWith("#")) {
|
|
5012
|
+
continue;
|
|
5013
|
+
}
|
|
5014
|
+
const expanded = expandPath(trimmed);
|
|
5015
|
+
const collapsed = collapsePath(expanded);
|
|
5016
|
+
ignoredPaths.add(collapsed);
|
|
5017
|
+
}
|
|
5018
|
+
} catch {
|
|
5019
|
+
}
|
|
5020
|
+
return ignoredPaths;
|
|
5021
|
+
};
|
|
5022
|
+
addToTuckignore = async (tuckDir, path) => {
|
|
5023
|
+
const ignorePath = getTuckignorePath(tuckDir);
|
|
5024
|
+
const expanded = expandPath(path);
|
|
5025
|
+
const collapsed = collapsePath(expanded);
|
|
5026
|
+
const existingPaths = await loadTuckignore(tuckDir);
|
|
5027
|
+
if (existingPaths.has(collapsed)) {
|
|
5028
|
+
return;
|
|
5029
|
+
}
|
|
5030
|
+
if (!await pathExists(ignorePath)) {
|
|
5031
|
+
await writeFile3(ignorePath, TUCKIGNORE_HEADER + "\n", "utf-8");
|
|
5032
|
+
}
|
|
5033
|
+
await appendFile(ignorePath, collapsed + "\n", "utf-8");
|
|
5034
|
+
};
|
|
5035
|
+
isIgnored = async (tuckDir, path) => {
|
|
5036
|
+
const ignoredPaths = await loadTuckignore(tuckDir);
|
|
5037
|
+
const expanded = expandPath(path);
|
|
5038
|
+
const collapsed = collapsePath(expanded);
|
|
5039
|
+
return ignoredPaths.has(collapsed);
|
|
5040
|
+
};
|
|
5041
|
+
}
|
|
5042
|
+
});
|
|
5043
|
+
|
|
5044
|
+
// src/lib/audit.ts
|
|
5045
|
+
import { appendFile as appendFile2, mkdir } from "fs/promises";
|
|
5046
|
+
import { join as join7 } from "path";
|
|
5047
|
+
import { homedir as homedir4 } from "os";
|
|
5048
|
+
import { existsSync } from "fs";
|
|
5049
|
+
function getAuditLogPath() {
|
|
5050
|
+
return join7(homedir4(), ".tuck", AUDIT_FILENAME);
|
|
5051
|
+
}
|
|
5052
|
+
async function logAuditEntry(action, command, details) {
|
|
5053
|
+
try {
|
|
5054
|
+
const tuckDir = join7(homedir4(), ".tuck");
|
|
5055
|
+
if (!existsSync(tuckDir)) {
|
|
5056
|
+
await mkdir(tuckDir, { recursive: true });
|
|
5057
|
+
}
|
|
5058
|
+
const entry = {
|
|
5059
|
+
timestamp: (/* @__PURE__ */ new Date()).toISOString(),
|
|
5060
|
+
action,
|
|
5061
|
+
command,
|
|
5062
|
+
details,
|
|
5063
|
+
user: process.env.USER || process.env.USERNAME,
|
|
5064
|
+
cwd: process.cwd()
|
|
5065
|
+
};
|
|
5066
|
+
const logLine = JSON.stringify(entry) + "\n";
|
|
5067
|
+
const logPath = getAuditLogPath();
|
|
5068
|
+
await appendFile2(logPath, logLine, "utf-8");
|
|
5069
|
+
} catch {
|
|
5070
|
+
if (process.env.DEBUG) {
|
|
5071
|
+
console.error("[DEBUG] Failed to write audit log");
|
|
5072
|
+
}
|
|
5073
|
+
}
|
|
5074
|
+
}
|
|
5075
|
+
async function logForceSecretBypass(command, filesCount) {
|
|
5076
|
+
await logAuditEntry(
|
|
5077
|
+
"FORCE_SECRET_BYPASS",
|
|
5078
|
+
command,
|
|
5079
|
+
`Bypassed secret scanning for ${filesCount} file(s)`
|
|
5080
|
+
);
|
|
5081
|
+
}
|
|
5082
|
+
async function logForcePush(branch) {
|
|
5083
|
+
await logAuditEntry("FORCE_PUSH", "tuck push --force", `Force pushed to branch: ${branch}`);
|
|
5084
|
+
}
|
|
5085
|
+
var AUDIT_FILENAME;
|
|
5086
|
+
var init_audit = __esm({
|
|
5087
|
+
"src/lib/audit.ts"() {
|
|
5088
|
+
"use strict";
|
|
5089
|
+
AUDIT_FILENAME = "audit.log";
|
|
5090
|
+
}
|
|
5091
|
+
});
|
|
5092
|
+
|
|
5093
|
+
// src/lib/secrets/regexSafety.ts
|
|
5094
|
+
var MAX_CUSTOM_PATTERN_LENGTH, MAX_GROUP_DEPTH, ALLOWED_CUSTOM_REGEX_FLAGS, parseBraceQuantifier, getSafetyIssue, normalizeCustomRegexFlags, assertSafeCustomRegex;
|
|
5095
|
+
var init_regexSafety = __esm({
|
|
5096
|
+
"src/lib/secrets/regexSafety.ts"() {
|
|
5097
|
+
"use strict";
|
|
5098
|
+
MAX_CUSTOM_PATTERN_LENGTH = 500;
|
|
5099
|
+
MAX_GROUP_DEPTH = 16;
|
|
5100
|
+
ALLOWED_CUSTOM_REGEX_FLAGS = /* @__PURE__ */ new Set(["d", "g", "i", "m", "s", "u", "y"]);
|
|
5101
|
+
parseBraceQuantifier = (source, start) => {
|
|
5102
|
+
if (source[start] !== "{") {
|
|
5103
|
+
return null;
|
|
5104
|
+
}
|
|
5105
|
+
let index = start + 1;
|
|
5106
|
+
let min = "";
|
|
5107
|
+
let max = "";
|
|
5108
|
+
let hasComma = false;
|
|
5109
|
+
while (index < source.length && /[0-9]/.test(source[index])) {
|
|
5110
|
+
min += source[index];
|
|
5111
|
+
index++;
|
|
5112
|
+
}
|
|
5113
|
+
if (source[index] === ",") {
|
|
5114
|
+
hasComma = true;
|
|
5115
|
+
index++;
|
|
5116
|
+
while (index < source.length && /[0-9]/.test(source[index])) {
|
|
5117
|
+
max += source[index];
|
|
5118
|
+
index++;
|
|
5119
|
+
}
|
|
5120
|
+
}
|
|
5121
|
+
if (source[index] !== "}") {
|
|
5122
|
+
return null;
|
|
5123
|
+
}
|
|
5124
|
+
if (min.length === 0) {
|
|
5125
|
+
return null;
|
|
5126
|
+
}
|
|
5127
|
+
const unbounded = hasComma && max.length === 0;
|
|
5128
|
+
const variable = hasComma && (max.length === 0 || max !== min);
|
|
5129
|
+
return { endIndex: index, variable, unbounded };
|
|
5130
|
+
};
|
|
5131
|
+
getSafetyIssue = (source) => {
|
|
5132
|
+
const stack = [{ hasVariableQuantifier: false, hasAlternation: false }];
|
|
5133
|
+
let lastToken = { type: "none" };
|
|
5134
|
+
let inCharClass = false;
|
|
5135
|
+
for (let i = 0; i < source.length; ) {
|
|
5136
|
+
const char = source[i];
|
|
5137
|
+
if (char === "\\") {
|
|
5138
|
+
const next = source[i + 1];
|
|
5139
|
+
if (next && /[1-9]/.test(next)) {
|
|
5140
|
+
return "backreferences are not allowed";
|
|
5141
|
+
}
|
|
5142
|
+
if (next === "k" && source[i + 2] === "<") {
|
|
5143
|
+
return "named backreferences are not allowed";
|
|
5144
|
+
}
|
|
5145
|
+
lastToken = { type: "literal" };
|
|
5146
|
+
i += 2;
|
|
5147
|
+
continue;
|
|
5148
|
+
}
|
|
5149
|
+
if (inCharClass) {
|
|
5150
|
+
if (char === "]") {
|
|
5151
|
+
inCharClass = false;
|
|
5152
|
+
}
|
|
5153
|
+
i++;
|
|
5154
|
+
continue;
|
|
5155
|
+
}
|
|
5156
|
+
if (char === "[") {
|
|
5157
|
+
inCharClass = true;
|
|
5158
|
+
lastToken = { type: "literal" };
|
|
5159
|
+
i++;
|
|
5160
|
+
continue;
|
|
5161
|
+
}
|
|
5162
|
+
if (char === "(") {
|
|
5163
|
+
if (source.startsWith("(?<=", i) || source.startsWith("(?<!", i)) {
|
|
5164
|
+
return "lookbehind assertions are not allowed";
|
|
5165
|
+
}
|
|
5166
|
+
stack.push({ hasVariableQuantifier: false, hasAlternation: false });
|
|
5167
|
+
if (stack.length > MAX_GROUP_DEPTH) {
|
|
5168
|
+
return `pattern nesting is too deep (max ${MAX_GROUP_DEPTH} groups)`;
|
|
5169
|
+
}
|
|
5170
|
+
lastToken = { type: "none" };
|
|
5171
|
+
i++;
|
|
5172
|
+
continue;
|
|
5173
|
+
}
|
|
5174
|
+
if (char === ")") {
|
|
5175
|
+
if (stack.length > 1) {
|
|
5176
|
+
const closed = stack.pop();
|
|
5177
|
+
lastToken = { type: "group", group: closed };
|
|
5178
|
+
} else {
|
|
5179
|
+
lastToken = { type: "literal" };
|
|
5180
|
+
}
|
|
5181
|
+
i++;
|
|
5182
|
+
continue;
|
|
5183
|
+
}
|
|
5184
|
+
if (char === "|") {
|
|
5185
|
+
stack[stack.length - 1].hasAlternation = true;
|
|
5186
|
+
lastToken = { type: "none" };
|
|
5187
|
+
i++;
|
|
5188
|
+
continue;
|
|
5189
|
+
}
|
|
5190
|
+
const braceQuantifier = parseBraceQuantifier(source, i);
|
|
5191
|
+
if (braceQuantifier && (lastToken.type === "literal" || lastToken.type === "group")) {
|
|
5192
|
+
if (braceQuantifier.variable) {
|
|
5193
|
+
stack[stack.length - 1].hasVariableQuantifier = true;
|
|
5194
|
+
}
|
|
5195
|
+
if (lastToken.type === "group" && braceQuantifier.unbounded) {
|
|
5196
|
+
if (lastToken.group?.hasVariableQuantifier) {
|
|
5197
|
+
return "nested quantified groups are not allowed";
|
|
5198
|
+
}
|
|
5199
|
+
if (lastToken.group?.hasAlternation) {
|
|
5200
|
+
return "unbounded quantifiers on alternation groups are not allowed";
|
|
5201
|
+
}
|
|
5202
|
+
}
|
|
5203
|
+
lastToken = { type: "quantifier" };
|
|
5204
|
+
i = braceQuantifier.endIndex + 1;
|
|
5205
|
+
if (source[i] === "?") {
|
|
5206
|
+
i++;
|
|
5207
|
+
}
|
|
5208
|
+
continue;
|
|
5209
|
+
}
|
|
5210
|
+
if ((char === "*" || char === "+" || char === "?") && (lastToken.type === "literal" || lastToken.type === "group")) {
|
|
5211
|
+
stack[stack.length - 1].hasVariableQuantifier = true;
|
|
5212
|
+
const unbounded = char === "*" || char === "+";
|
|
5213
|
+
if (lastToken.type === "group" && unbounded) {
|
|
5214
|
+
if (lastToken.group?.hasVariableQuantifier) {
|
|
5215
|
+
return "nested quantified groups are not allowed";
|
|
5216
|
+
}
|
|
5217
|
+
if (lastToken.group?.hasAlternation) {
|
|
5218
|
+
return "unbounded quantifiers on alternation groups are not allowed";
|
|
5219
|
+
}
|
|
5220
|
+
}
|
|
5221
|
+
lastToken = { type: "quantifier" };
|
|
5222
|
+
i++;
|
|
5223
|
+
if (source[i] === "?") {
|
|
5224
|
+
i++;
|
|
5225
|
+
}
|
|
5226
|
+
continue;
|
|
5227
|
+
}
|
|
5228
|
+
lastToken = { type: "literal" };
|
|
5229
|
+
i++;
|
|
5230
|
+
}
|
|
5231
|
+
return null;
|
|
5232
|
+
};
|
|
5233
|
+
normalizeCustomRegexFlags = (flags = "g") => {
|
|
5234
|
+
const merged = `${flags}g`;
|
|
5235
|
+
let normalized = "";
|
|
5236
|
+
const seen = /* @__PURE__ */ new Set();
|
|
5237
|
+
for (const flag of merged) {
|
|
5238
|
+
if (!ALLOWED_CUSTOM_REGEX_FLAGS.has(flag)) {
|
|
5239
|
+
throw new Error(`Unsupported regex flag "${flag}" in custom pattern`);
|
|
5240
|
+
}
|
|
5241
|
+
if (!seen.has(flag)) {
|
|
5242
|
+
seen.add(flag);
|
|
5243
|
+
normalized += flag;
|
|
5244
|
+
}
|
|
5245
|
+
}
|
|
5246
|
+
return normalized;
|
|
5247
|
+
};
|
|
5248
|
+
assertSafeCustomRegex = (source) => {
|
|
5249
|
+
if (!source.trim()) {
|
|
5250
|
+
throw new Error("Custom pattern cannot be empty");
|
|
5251
|
+
}
|
|
5252
|
+
if (source.length > MAX_CUSTOM_PATTERN_LENGTH) {
|
|
5253
|
+
throw new Error(`Custom pattern is too long (${source.length} > ${MAX_CUSTOM_PATTERN_LENGTH})`);
|
|
5254
|
+
}
|
|
5255
|
+
const issue = getSafetyIssue(source);
|
|
5256
|
+
if (issue) {
|
|
5257
|
+
throw new Error(`Unsafe custom regex pattern rejected: ${issue}`);
|
|
5258
|
+
}
|
|
5259
|
+
};
|
|
5260
|
+
}
|
|
5261
|
+
});
|
|
5262
|
+
|
|
5263
|
+
// src/lib/secrets/patterns.ts
|
|
5264
|
+
var CLOUD_PROVIDER_PATTERNS, API_TOKEN_PATTERNS, PRIVATE_KEY_PATTERNS, GENERIC_PATTERNS, ALL_SECRET_PATTERNS, getPatternsAboveSeverity, createCustomPattern, BINARY_EXTENSIONS, shouldSkipFile;
|
|
5265
|
+
var init_patterns = __esm({
|
|
5266
|
+
"src/lib/secrets/patterns.ts"() {
|
|
5267
|
+
"use strict";
|
|
5268
|
+
init_regexSafety();
|
|
5269
|
+
CLOUD_PROVIDER_PATTERNS = [
|
|
5270
|
+
{
|
|
5271
|
+
id: "aws-access-key",
|
|
5272
|
+
name: "AWS Access Key ID",
|
|
5273
|
+
pattern: /\b(AKIA[0-9A-Z]{16})\b/g,
|
|
5274
|
+
severity: "critical",
|
|
5275
|
+
description: "AWS Access Key ID",
|
|
5276
|
+
placeholder: "AWS_ACCESS_KEY_ID"
|
|
5277
|
+
},
|
|
5278
|
+
{
|
|
5279
|
+
id: "aws-secret-key",
|
|
5280
|
+
name: "AWS Secret Access Key",
|
|
5281
|
+
// AWS secret keys are 40 characters, base64-ish
|
|
5282
|
+
// Look for context clues (assignment to aws_secret, etc.)
|
|
5283
|
+
pattern: /(?:aws_secret_access_key|aws_secret|secret_access_key)\s*[=:]\s*['"]?([A-Za-z0-9/+=]{40})['"]?/gi,
|
|
5284
|
+
severity: "critical",
|
|
5285
|
+
description: "AWS Secret Access Key",
|
|
5286
|
+
placeholder: "AWS_SECRET_ACCESS_KEY"
|
|
5287
|
+
},
|
|
5288
|
+
{
|
|
4856
5289
|
id: "aws-session-token",
|
|
4857
5290
|
name: "AWS Session Token",
|
|
4858
5291
|
pattern: /(?:aws_session_token)\s*[=:]\s*['"]?([A-Za-z0-9/+=]{100,1000})['"]?/gi,
|
|
@@ -5445,10 +5878,19 @@ var init_patterns = __esm({
|
|
|
5445
5878
|
return ALL_SECRET_PATTERNS.filter((p4) => severityOrder[p4.severity] <= minLevel);
|
|
5446
5879
|
};
|
|
5447
5880
|
createCustomPattern = (id, name, pattern, options) => {
|
|
5881
|
+
const normalizedFlags = normalizeCustomRegexFlags(options?.flags);
|
|
5882
|
+
assertSafeCustomRegex(pattern);
|
|
5883
|
+
let compiledPattern;
|
|
5884
|
+
try {
|
|
5885
|
+
compiledPattern = new RegExp(pattern, normalizedFlags);
|
|
5886
|
+
} catch (error) {
|
|
5887
|
+
const message = error instanceof Error ? error.message : String(error);
|
|
5888
|
+
throw new Error(`Invalid custom regex pattern: ${message}`);
|
|
5889
|
+
}
|
|
5448
5890
|
return {
|
|
5449
5891
|
id: `custom-${id}`,
|
|
5450
5892
|
name,
|
|
5451
|
-
pattern:
|
|
5893
|
+
pattern: compiledPattern,
|
|
5452
5894
|
severity: options?.severity || "high",
|
|
5453
5895
|
description: options?.description || `Custom pattern: ${name}`,
|
|
5454
5896
|
placeholder: options?.placeholder || id.toUpperCase().replace(/-/g, "_")
|
|
@@ -5535,7 +5977,7 @@ __export(scanner_exports, {
|
|
|
5535
5977
|
scanFile: () => scanFile,
|
|
5536
5978
|
scanFiles: () => scanFiles
|
|
5537
5979
|
});
|
|
5538
|
-
import { readFile as
|
|
5980
|
+
import { readFile as readFile5, stat as stat5 } from "fs/promises";
|
|
5539
5981
|
var MAX_FILE_SIZE, MAX_FILES_PER_SCAN, WARN_FILES_THRESHOLD, SCAN_TIMEOUT_MS, PATTERN_TIMEOUT_MS, redactSecret, getPosition, getContext, clonePattern, scanContent, scanFile, scanFiles, generateUniquePlaceholder, getSecretsWithPlaceholders;
|
|
5540
5982
|
var init_scanner = __esm({
|
|
5541
5983
|
"src/lib/secrets/scanner.ts"() {
|
|
@@ -5612,7 +6054,8 @@ var init_scanner = __esm({
|
|
|
5612
6054
|
}
|
|
5613
6055
|
};
|
|
5614
6056
|
clonePattern = (pattern) => {
|
|
5615
|
-
|
|
6057
|
+
const flags = pattern.flags.includes("g") ? pattern.flags : `${pattern.flags}g`;
|
|
6058
|
+
return new RegExp(pattern.source, flags);
|
|
5616
6059
|
};
|
|
5617
6060
|
scanContent = (content, options = {}) => {
|
|
5618
6061
|
const matches = [];
|
|
@@ -5627,6 +6070,9 @@ var init_scanner = __esm({
|
|
|
5627
6070
|
patterns = ALL_SECRET_PATTERNS;
|
|
5628
6071
|
}
|
|
5629
6072
|
if (options.customPatterns) {
|
|
6073
|
+
for (const customPattern of options.customPatterns) {
|
|
6074
|
+
assertSafeCustomRegex(customPattern.pattern.source);
|
|
6075
|
+
}
|
|
5630
6076
|
patterns = [...patterns, ...options.customPatterns];
|
|
5631
6077
|
}
|
|
5632
6078
|
if (options.excludePatternIds && options.excludePatternIds.length > 0) {
|
|
@@ -5711,7 +6157,7 @@ var init_scanner = __esm({
|
|
|
5711
6157
|
};
|
|
5712
6158
|
}
|
|
5713
6159
|
try {
|
|
5714
|
-
const stats = await
|
|
6160
|
+
const stats = await stat5(expandedPath);
|
|
5715
6161
|
if (stats.size > maxSize) {
|
|
5716
6162
|
return {
|
|
5717
6163
|
path: expandedPath,
|
|
@@ -5755,7 +6201,7 @@ var init_scanner = __esm({
|
|
|
5755
6201
|
};
|
|
5756
6202
|
}
|
|
5757
6203
|
try {
|
|
5758
|
-
const content = await
|
|
6204
|
+
const content = await readFile5(expandedPath, "utf-8");
|
|
5759
6205
|
const matches = scanContent(content, options);
|
|
5760
6206
|
return {
|
|
5761
6207
|
path: expandedPath,
|
|
@@ -5860,9 +6306,9 @@ var init_scanner = __esm({
|
|
|
5860
6306
|
});
|
|
5861
6307
|
|
|
5862
6308
|
// src/lib/secrets/store.ts
|
|
5863
|
-
import { readFile as
|
|
5864
|
-
import { join as
|
|
5865
|
-
import { ensureDir as
|
|
6309
|
+
import { readFile as readFile6, writeFile as writeFile4, chmod, stat as stat6 } from "fs/promises";
|
|
6310
|
+
import { join as join8 } from "path";
|
|
6311
|
+
import { ensureDir as ensureDir3 } from "fs-extra";
|
|
5866
6312
|
var SECRETS_FILE_MODE, TUCK_DIR_MODE, SECRETS_FILENAME, getSecretsPath, loadSecretsStore, windowsPermissionWarningShown, saveSecretsStore, setSecret, getSecret, unsetSecret, listSecrets, getAllSecrets, getSecretCount, ensureSecretsGitignored, MAX_SECRET_NAME_LENGTH, MIN_SECRET_NAME_LENGTH, isValidSecretName, normalizeSecretName;
|
|
5867
6313
|
var init_store = __esm({
|
|
5868
6314
|
"src/lib/secrets/store.ts"() {
|
|
@@ -5873,7 +6319,7 @@ var init_store = __esm({
|
|
|
5873
6319
|
TUCK_DIR_MODE = 448;
|
|
5874
6320
|
SECRETS_FILENAME = "secrets.local.json";
|
|
5875
6321
|
getSecretsPath = (tuckDir) => {
|
|
5876
|
-
return
|
|
6322
|
+
return join8(tuckDir, SECRETS_FILENAME);
|
|
5877
6323
|
};
|
|
5878
6324
|
loadSecretsStore = async (tuckDir) => {
|
|
5879
6325
|
const secretsPath = getSecretsPath(tuckDir);
|
|
@@ -5884,7 +6330,7 @@ var init_store = __esm({
|
|
|
5884
6330
|
};
|
|
5885
6331
|
}
|
|
5886
6332
|
try {
|
|
5887
|
-
const stats = await
|
|
6333
|
+
const stats = await stat6(secretsPath);
|
|
5888
6334
|
const mode = stats.mode & 511;
|
|
5889
6335
|
if ((mode & 63) !== 0) {
|
|
5890
6336
|
await chmod(secretsPath, SECRETS_FILE_MODE);
|
|
@@ -5892,7 +6338,7 @@ var init_store = __esm({
|
|
|
5892
6338
|
} catch {
|
|
5893
6339
|
}
|
|
5894
6340
|
try {
|
|
5895
|
-
const content = await
|
|
6341
|
+
const content = await readFile6(secretsPath, "utf-8");
|
|
5896
6342
|
const parsed = JSON.parse(content);
|
|
5897
6343
|
return secretsStoreSchema.parse(parsed);
|
|
5898
6344
|
} catch (error) {
|
|
@@ -5917,13 +6363,13 @@ var init_store = __esm({
|
|
|
5917
6363
|
windowsPermissionWarningShown = false;
|
|
5918
6364
|
saveSecretsStore = async (tuckDir, store) => {
|
|
5919
6365
|
const secretsPath = getSecretsPath(tuckDir);
|
|
5920
|
-
await
|
|
6366
|
+
await ensureDir3(tuckDir);
|
|
5921
6367
|
try {
|
|
5922
6368
|
await chmod(tuckDir, TUCK_DIR_MODE);
|
|
5923
6369
|
} catch {
|
|
5924
6370
|
}
|
|
5925
6371
|
const content = JSON.stringify(store, null, 2) + "\n";
|
|
5926
|
-
await
|
|
6372
|
+
await writeFile4(secretsPath, content, "utf-8");
|
|
5927
6373
|
try {
|
|
5928
6374
|
await chmod(secretsPath, SECRETS_FILE_MODE);
|
|
5929
6375
|
} catch {
|
|
@@ -5985,10 +6431,10 @@ var init_store = __esm({
|
|
|
5985
6431
|
return Object.keys(store.secrets).length;
|
|
5986
6432
|
};
|
|
5987
6433
|
ensureSecretsGitignored = async (tuckDir) => {
|
|
5988
|
-
const gitignorePath =
|
|
6434
|
+
const gitignorePath = join8(tuckDir, ".gitignore");
|
|
5989
6435
|
let gitignoreContent = "";
|
|
5990
6436
|
if (await pathExists(gitignorePath)) {
|
|
5991
|
-
gitignoreContent = await
|
|
6437
|
+
gitignoreContent = await readFile6(gitignorePath, "utf-8");
|
|
5992
6438
|
}
|
|
5993
6439
|
if (gitignoreContent.includes(SECRETS_FILENAME)) {
|
|
5994
6440
|
return;
|
|
@@ -6000,7 +6446,7 @@ ${SECRETS_FILENAME}
|
|
|
6000
6446
|
` : `# Local secrets (NEVER commit)
|
|
6001
6447
|
${SECRETS_FILENAME}
|
|
6002
6448
|
`;
|
|
6003
|
-
await
|
|
6449
|
+
await writeFile4(gitignorePath, newContent, "utf-8");
|
|
6004
6450
|
};
|
|
6005
6451
|
MAX_SECRET_NAME_LENGTH = 100;
|
|
6006
6452
|
MIN_SECRET_NAME_LENGTH = 1;
|
|
@@ -6105,13 +6551,13 @@ during 'tuck add' or 'tuck sync' operations.`;
|
|
|
6105
6551
|
|
|
6106
6552
|
// src/lib/secretBackends/onepassword.ts
|
|
6107
6553
|
import { execFile as execFile5 } from "child_process";
|
|
6108
|
-
import { promisify as
|
|
6554
|
+
import { promisify as promisify5 } from "util";
|
|
6109
6555
|
var execFileAsync5, OnePasswordBackend;
|
|
6110
6556
|
var init_onepassword = __esm({
|
|
6111
6557
|
"src/lib/secretBackends/onepassword.ts"() {
|
|
6112
6558
|
"use strict";
|
|
6113
6559
|
init_errors();
|
|
6114
|
-
execFileAsync5 =
|
|
6560
|
+
execFileAsync5 = promisify5(execFile5);
|
|
6115
6561
|
OnePasswordBackend = class {
|
|
6116
6562
|
name = "1password";
|
|
6117
6563
|
displayName = "1Password";
|
|
@@ -6275,13 +6721,13 @@ Path format: op://vault-name/item-name/field-name
|
|
|
6275
6721
|
|
|
6276
6722
|
// src/lib/secretBackends/bitwarden.ts
|
|
6277
6723
|
import { execFile as execFile6 } from "child_process";
|
|
6278
|
-
import { promisify as
|
|
6724
|
+
import { promisify as promisify6 } from "util";
|
|
6279
6725
|
var execFileAsync6, BitwardenBackend;
|
|
6280
6726
|
var init_bitwarden = __esm({
|
|
6281
6727
|
"src/lib/secretBackends/bitwarden.ts"() {
|
|
6282
6728
|
"use strict";
|
|
6283
6729
|
init_errors();
|
|
6284
|
-
execFileAsync6 =
|
|
6730
|
+
execFileAsync6 = promisify6(execFile6);
|
|
6285
6731
|
BitwardenBackend = class {
|
|
6286
6732
|
name = "bitwarden";
|
|
6287
6733
|
displayName = "Bitwarden";
|
|
@@ -6456,7 +6902,7 @@ Examples:
|
|
|
6456
6902
|
|
|
6457
6903
|
// src/lib/secretBackends/pass.ts
|
|
6458
6904
|
import { execFile as execFile7 } from "child_process";
|
|
6459
|
-
import { promisify as
|
|
6905
|
+
import { promisify as promisify7 } from "util";
|
|
6460
6906
|
import { access as access2 } from "fs/promises";
|
|
6461
6907
|
var execFileAsync7, fileExists, PassBackend;
|
|
6462
6908
|
var init_pass = __esm({
|
|
@@ -6464,7 +6910,7 @@ var init_pass = __esm({
|
|
|
6464
6910
|
"use strict";
|
|
6465
6911
|
init_errors();
|
|
6466
6912
|
init_paths();
|
|
6467
|
-
execFileAsync7 =
|
|
6913
|
+
execFileAsync7 = promisify7(execFile7);
|
|
6468
6914
|
fileExists = async (path) => {
|
|
6469
6915
|
try {
|
|
6470
6916
|
await access2(path);
|
|
@@ -6809,8 +7255,8 @@ var init_secretMappings_schema = __esm({
|
|
|
6809
7255
|
});
|
|
6810
7256
|
|
|
6811
7257
|
// src/lib/secretBackends/mappings.ts
|
|
6812
|
-
import { readFile as
|
|
6813
|
-
import { join as
|
|
7258
|
+
import { readFile as readFile7, writeFile as writeFile5 } from "fs/promises";
|
|
7259
|
+
import { join as join9 } from "path";
|
|
6814
7260
|
var DEFAULT_MAPPINGS_FILENAME, getMappingsPath, loadMappings, saveMappings, getMapping, setMapping, listMappings, getBackendPath;
|
|
6815
7261
|
var init_mappings = __esm({
|
|
6816
7262
|
"src/lib/secretBackends/mappings.ts"() {
|
|
@@ -6819,7 +7265,7 @@ var init_mappings = __esm({
|
|
|
6819
7265
|
init_secretMappings_schema();
|
|
6820
7266
|
DEFAULT_MAPPINGS_FILENAME = "secrets.mappings.json";
|
|
6821
7267
|
getMappingsPath = (tuckDir, customPath) => {
|
|
6822
|
-
return
|
|
7268
|
+
return join9(tuckDir, customPath || DEFAULT_MAPPINGS_FILENAME);
|
|
6823
7269
|
};
|
|
6824
7270
|
loadMappings = async (tuckDir, customPath) => {
|
|
6825
7271
|
const mappingsPath = getMappingsPath(tuckDir, customPath);
|
|
@@ -6827,7 +7273,7 @@ var init_mappings = __esm({
|
|
|
6827
7273
|
return { ...defaultMappingsFile };
|
|
6828
7274
|
}
|
|
6829
7275
|
try {
|
|
6830
|
-
const content = await
|
|
7276
|
+
const content = await readFile7(mappingsPath, "utf-8");
|
|
6831
7277
|
const parsed = JSON.parse(content);
|
|
6832
7278
|
return secretMappingsFileSchema.parse(parsed);
|
|
6833
7279
|
} catch (error) {
|
|
@@ -6839,7 +7285,7 @@ var init_mappings = __esm({
|
|
|
6839
7285
|
saveMappings = async (tuckDir, mappings, customPath) => {
|
|
6840
7286
|
const mappingsPath = getMappingsPath(tuckDir, customPath);
|
|
6841
7287
|
const content = JSON.stringify(mappings, null, 2) + "\n";
|
|
6842
|
-
await
|
|
7288
|
+
await writeFile5(mappingsPath, content, "utf-8");
|
|
6843
7289
|
};
|
|
6844
7290
|
getMapping = async (tuckDir, name, customPath) => {
|
|
6845
7291
|
const mappings = await loadMappings(tuckDir, customPath);
|
|
@@ -7147,9 +7593,9 @@ var init_resolver = __esm({
|
|
|
7147
7593
|
});
|
|
7148
7594
|
|
|
7149
7595
|
// src/lib/secrets/redactor.ts
|
|
7150
|
-
import { readFile as
|
|
7596
|
+
import { readFile as readFile8, writeFile as writeFile6, rename, unlink as unlink2, stat as stat7 } from "fs/promises";
|
|
7151
7597
|
import { randomBytes } from "crypto";
|
|
7152
|
-
import { dirname as
|
|
7598
|
+
import { dirname as dirname6, basename as basename5, join as join10 } from "path";
|
|
7153
7599
|
var atomicWriteFile, formatPlaceholder, PLACEHOLDER_REGEX, redactContent, redactFile, restoreContent, findPlaceholders, restoreFiles;
|
|
7154
7600
|
var init_redactor = __esm({
|
|
7155
7601
|
"src/lib/secrets/redactor.ts"() {
|
|
@@ -7160,20 +7606,20 @@ var init_redactor = __esm({
|
|
|
7160
7606
|
init_config();
|
|
7161
7607
|
atomicWriteFile = async (filepath, content) => {
|
|
7162
7608
|
const tempSuffix = randomBytes(8).toString("hex");
|
|
7163
|
-
const tempPath =
|
|
7609
|
+
const tempPath = join10(dirname6(filepath), `.${basename5(filepath)}.tmp.${tempSuffix}`);
|
|
7164
7610
|
try {
|
|
7165
7611
|
let mode;
|
|
7166
7612
|
let fileExists2 = false;
|
|
7167
7613
|
try {
|
|
7168
|
-
const stats = await
|
|
7614
|
+
const stats = await stat7(filepath);
|
|
7169
7615
|
mode = stats.mode;
|
|
7170
7616
|
fileExists2 = true;
|
|
7171
7617
|
} catch {
|
|
7172
7618
|
}
|
|
7173
|
-
if (!fileExists2 &&
|
|
7619
|
+
if (!fileExists2 && basename5(filepath).startsWith(".")) {
|
|
7174
7620
|
mode = 384;
|
|
7175
7621
|
}
|
|
7176
|
-
await
|
|
7622
|
+
await writeFile6(tempPath, content, { encoding: "utf-8", mode });
|
|
7177
7623
|
await rename(tempPath, filepath);
|
|
7178
7624
|
} catch (error) {
|
|
7179
7625
|
try {
|
|
@@ -7211,7 +7657,7 @@ var init_redactor = __esm({
|
|
|
7211
7657
|
};
|
|
7212
7658
|
redactFile = async (filepath, matches, placeholderMap) => {
|
|
7213
7659
|
const expandedPath = expandPath(filepath);
|
|
7214
|
-
const content = await
|
|
7660
|
+
const content = await readFile8(expandedPath, "utf-8");
|
|
7215
7661
|
const result = redactContent(content, matches, placeholderMap);
|
|
7216
7662
|
await atomicWriteFile(expandedPath, result.redactedContent);
|
|
7217
7663
|
return result;
|
|
@@ -7260,7 +7706,7 @@ var init_redactor = __esm({
|
|
|
7260
7706
|
if (!await pathExists(expandedPath)) {
|
|
7261
7707
|
continue;
|
|
7262
7708
|
}
|
|
7263
|
-
const content = await
|
|
7709
|
+
const content = await readFile8(expandedPath, "utf-8");
|
|
7264
7710
|
const result = restoreContent(content, secrets);
|
|
7265
7711
|
if (result.restored > 0) {
|
|
7266
7712
|
await atomicWriteFile(expandedPath, result.restoredContent);
|
|
@@ -7282,7 +7728,7 @@ var init_redactor = __esm({
|
|
|
7282
7728
|
|
|
7283
7729
|
// src/lib/secrets/external.ts
|
|
7284
7730
|
import { execFile as execFile8 } from "child_process";
|
|
7285
|
-
import { promisify as
|
|
7731
|
+
import { promisify as promisify8 } from "util";
|
|
7286
7732
|
import { z as z5 } from "zod";
|
|
7287
7733
|
var execFileAsync8, gitleaksResultSchema, gitleaksOutputSchema, isGitleaksInstalled, isTrufflehogInstalled, mapGitleaksSeverity, generatePlaceholderFromRule, GITLEAKS_FAILED, scanWithGitleaks, scanWithScanner;
|
|
7288
7734
|
var init_external = __esm({
|
|
@@ -7290,7 +7736,7 @@ var init_external = __esm({
|
|
|
7290
7736
|
"use strict";
|
|
7291
7737
|
init_scanner();
|
|
7292
7738
|
init_paths();
|
|
7293
|
-
execFileAsync8 =
|
|
7739
|
+
execFileAsync8 = promisify8(execFile8);
|
|
7294
7740
|
gitleaksResultSchema = z5.object({
|
|
7295
7741
|
Description: z5.string(),
|
|
7296
7742
|
StartLine: z5.number(),
|
|
@@ -7600,52 +8046,495 @@ var init_secrets = __esm({
|
|
|
7600
8046
|
}
|
|
7601
8047
|
});
|
|
7602
8048
|
|
|
7603
|
-
// src/
|
|
7604
|
-
|
|
7605
|
-
|
|
7606
|
-
|
|
7607
|
-
|
|
7608
|
-
});
|
|
7609
|
-
import { Command } from "commander";
|
|
7610
|
-
import { join as join10 } from "path";
|
|
7611
|
-
import { chmod as chmod2, stat as stat7 } from "fs/promises";
|
|
7612
|
-
var fixSSHPermissions, fixGPGPermissions, prepareFilesToRestore, restoreFilesInternal, runInteractiveRestore, displaySecretSummary, runRestore, runRestoreCommand, restoreCommand;
|
|
7613
|
-
var init_restore = __esm({
|
|
7614
|
-
"src/commands/restore.ts"() {
|
|
8049
|
+
// src/lib/trackPipeline.ts
|
|
8050
|
+
import { basename as basename6 } from "path";
|
|
8051
|
+
var PRIVATE_KEY_PATTERNS2, SENSITIVE_FILE_PATTERNS2, isPrivateKey, isSensitiveFile2, displaySecretWarning, handleFileSizePolicy, applySecretPolicy, preparePathsForTracking;
|
|
8052
|
+
var init_trackPipeline = __esm({
|
|
8053
|
+
"src/lib/trackPipeline.ts"() {
|
|
7615
8054
|
"use strict";
|
|
7616
|
-
init_theme();
|
|
7617
8055
|
init_ui();
|
|
7618
8056
|
init_paths();
|
|
7619
8057
|
init_manifest();
|
|
7620
|
-
init_config();
|
|
7621
8058
|
init_files();
|
|
7622
|
-
|
|
7623
|
-
|
|
8059
|
+
init_binary();
|
|
8060
|
+
init_tuckignore();
|
|
7624
8061
|
init_errors();
|
|
7625
|
-
|
|
8062
|
+
init_audit();
|
|
7626
8063
|
init_secrets();
|
|
7627
|
-
|
|
7628
|
-
|
|
7629
|
-
|
|
7630
|
-
|
|
8064
|
+
PRIVATE_KEY_PATTERNS2 = [
|
|
8065
|
+
/^id_rsa$/,
|
|
8066
|
+
/^id_dsa$/,
|
|
8067
|
+
/^id_ecdsa$/,
|
|
8068
|
+
/^id_ed25519$/,
|
|
8069
|
+
/^id_.*$/,
|
|
8070
|
+
/\.pem$/,
|
|
8071
|
+
/\.key$/,
|
|
8072
|
+
/^.*_key$/
|
|
8073
|
+
];
|
|
8074
|
+
SENSITIVE_FILE_PATTERNS2 = [
|
|
8075
|
+
/^\.netrc$/,
|
|
8076
|
+
/^\.aws\/credentials$/,
|
|
8077
|
+
/^\.docker\/config\.json$/,
|
|
8078
|
+
/^\.npmrc$/,
|
|
8079
|
+
/^\.pypirc$/,
|
|
8080
|
+
/^\.kube\/config$/,
|
|
8081
|
+
/^\.ssh\/config$/,
|
|
8082
|
+
/^\.gnupg\//,
|
|
8083
|
+
/credentials/i,
|
|
8084
|
+
/secrets?/i,
|
|
8085
|
+
/tokens?\.json$/i,
|
|
8086
|
+
/\.env$/,
|
|
8087
|
+
/\.env\./
|
|
8088
|
+
];
|
|
8089
|
+
isPrivateKey = (collapsedPath) => {
|
|
8090
|
+
const name = basename6(collapsedPath);
|
|
8091
|
+
if (collapsedPath.includes(".ssh/") && !name.endsWith(".pub")) {
|
|
8092
|
+
return PRIVATE_KEY_PATTERNS2.some((pattern) => pattern.test(name));
|
|
7631
8093
|
}
|
|
7632
|
-
|
|
7633
|
-
|
|
7634
|
-
|
|
7635
|
-
|
|
7636
|
-
|
|
7637
|
-
|
|
8094
|
+
return name.endsWith(".pem") || name.endsWith(".key");
|
|
8095
|
+
};
|
|
8096
|
+
isSensitiveFile2 = (collapsedPath) => {
|
|
8097
|
+
const pathToTest = collapsedPath.startsWith("~/") ? collapsedPath.slice(2) : collapsedPath;
|
|
8098
|
+
return SENSITIVE_FILE_PATTERNS2.some((pattern) => pattern.test(pathToTest));
|
|
8099
|
+
};
|
|
8100
|
+
displaySecretWarning = (summary) => {
|
|
8101
|
+
console.log();
|
|
8102
|
+
console.log(colors.error(colors.bold(` Security Warning: Found ${summary.totalSecrets} potential secret(s)`)));
|
|
8103
|
+
console.log();
|
|
8104
|
+
for (const result of summary.results) {
|
|
8105
|
+
console.log(` ${colors.brand(result.collapsedPath)}`);
|
|
8106
|
+
for (const match of result.matches) {
|
|
8107
|
+
const severityColor = match.severity === "critical" ? colors.error : match.severity === "high" ? colors.warning : match.severity === "medium" ? colors.info : colors.muted;
|
|
8108
|
+
console.log(
|
|
8109
|
+
` ${colors.muted(`Line ${match.line}:`)} ${match.redactedValue} ${severityColor(`[${match.severity}]`)}`
|
|
8110
|
+
);
|
|
7638
8111
|
}
|
|
7639
|
-
|
|
8112
|
+
console.log();
|
|
7640
8113
|
}
|
|
7641
8114
|
};
|
|
7642
|
-
|
|
7643
|
-
const
|
|
7644
|
-
|
|
7645
|
-
|
|
8115
|
+
handleFileSizePolicy = async (collapsedPath, sizeBytes, tuckDir, secretHandling) => {
|
|
8116
|
+
const sizeLabel = formatFileSize(sizeBytes);
|
|
8117
|
+
const isWarn = sizeBytes >= 50 * 1024 * 1024;
|
|
8118
|
+
const isBlock = sizeBytes >= 100 * 1024 * 1024;
|
|
8119
|
+
if (!isWarn && !isBlock) {
|
|
8120
|
+
return true;
|
|
7646
8121
|
}
|
|
7647
|
-
|
|
7648
|
-
|
|
8122
|
+
if (secretHandling === "strict") {
|
|
8123
|
+
if (isBlock) {
|
|
8124
|
+
throw new OperationCancelledError("file size exceeds GitHub limit");
|
|
8125
|
+
}
|
|
8126
|
+
logger.warning(`File ${collapsedPath} is ${sizeLabel}. GitHub recommends files under 50MB.`);
|
|
8127
|
+
return true;
|
|
8128
|
+
}
|
|
8129
|
+
if (isBlock) {
|
|
8130
|
+
logger.warning(`File ${collapsedPath} is ${sizeLabel} (exceeds GitHub's 100MB limit)`);
|
|
8131
|
+
const action2 = await prompts.select("How would you like to proceed?", [
|
|
8132
|
+
{ value: "ignore", label: "Add to .tuckignore and skip" },
|
|
8133
|
+
{ value: "cancel", label: "Cancel operation" }
|
|
8134
|
+
]);
|
|
8135
|
+
if (action2 === "ignore") {
|
|
8136
|
+
await addToTuckignore(tuckDir, collapsedPath);
|
|
8137
|
+
logger.success(`Added ${collapsedPath} to .tuckignore`);
|
|
8138
|
+
return false;
|
|
8139
|
+
}
|
|
8140
|
+
throw new OperationCancelledError("file size exceeds GitHub limit");
|
|
8141
|
+
}
|
|
8142
|
+
logger.warning(`File ${collapsedPath} is ${sizeLabel}. GitHub recommends files under 50MB.`);
|
|
8143
|
+
const action = await prompts.select("How would you like to proceed?", [
|
|
8144
|
+
{ value: "continue", label: "Track it anyway" },
|
|
8145
|
+
{ value: "ignore", label: "Add to .tuckignore and skip" },
|
|
8146
|
+
{ value: "cancel", label: "Cancel operation" }
|
|
8147
|
+
]);
|
|
8148
|
+
if (action === "ignore") {
|
|
8149
|
+
await addToTuckignore(tuckDir, collapsedPath);
|
|
8150
|
+
logger.success(`Added ${collapsedPath} to .tuckignore`);
|
|
8151
|
+
return false;
|
|
8152
|
+
}
|
|
8153
|
+
if (action === "cancel") {
|
|
8154
|
+
throw new OperationCancelledError("file size warning");
|
|
8155
|
+
}
|
|
8156
|
+
return true;
|
|
8157
|
+
};
|
|
8158
|
+
applySecretPolicy = async (files, tuckDir, options) => {
|
|
8159
|
+
if (files.length === 0) {
|
|
8160
|
+
return files;
|
|
8161
|
+
}
|
|
8162
|
+
if (!await isSecretScanningEnabled(tuckDir)) {
|
|
8163
|
+
return files;
|
|
8164
|
+
}
|
|
8165
|
+
const secretHandling = options.secretHandling ?? "interactive";
|
|
8166
|
+
if (options.force) {
|
|
8167
|
+
if (secretHandling === "interactive") {
|
|
8168
|
+
const confirmed2 = await prompts.confirmDangerous(
|
|
8169
|
+
"Using --force bypasses secret scanning.\nAny secrets in these files may be committed to git and potentially exposed.",
|
|
8170
|
+
"force"
|
|
8171
|
+
);
|
|
8172
|
+
if (!confirmed2) {
|
|
8173
|
+
logger.info("Operation cancelled");
|
|
8174
|
+
return [];
|
|
8175
|
+
}
|
|
8176
|
+
}
|
|
8177
|
+
logger.warning("Secret scanning bypassed with --force");
|
|
8178
|
+
await logForceSecretBypass(options.forceBypassCommand ?? "tuck add --force", files.length);
|
|
8179
|
+
return files;
|
|
8180
|
+
}
|
|
8181
|
+
const filePaths = files.map((f) => expandPath(f.source));
|
|
8182
|
+
const summary = await scanForSecrets(filePaths, tuckDir);
|
|
8183
|
+
if (summary.filesWithSecrets === 0) {
|
|
8184
|
+
return files;
|
|
8185
|
+
}
|
|
8186
|
+
if (secretHandling === "strict") {
|
|
8187
|
+
const shouldBlock = await shouldBlockOnSecrets(tuckDir);
|
|
8188
|
+
if (shouldBlock) {
|
|
8189
|
+
const filesWithSecrets = summary.results.filter((result) => result.hasSecrets).map((result) => collapsePath(result.path));
|
|
8190
|
+
throw new SecretsDetectedError(summary.totalSecrets, filesWithSecrets);
|
|
8191
|
+
}
|
|
8192
|
+
logger.warning("Secrets detected but blockOnSecrets is disabled - proceeding with tracking");
|
|
8193
|
+
logger.warning("Make sure your repository is private!");
|
|
8194
|
+
return files;
|
|
8195
|
+
}
|
|
8196
|
+
displaySecretWarning(summary);
|
|
8197
|
+
const action = await prompts.select("How would you like to proceed?", [
|
|
8198
|
+
{ value: "abort", label: "Abort operation", hint: "Do not track these files" },
|
|
8199
|
+
{
|
|
8200
|
+
value: "redact",
|
|
8201
|
+
label: "Replace with placeholders",
|
|
8202
|
+
hint: "Store originals in secrets.local.json (never committed)"
|
|
8203
|
+
},
|
|
8204
|
+
{ value: "ignore", label: "Add files to .tuckignore", hint: "Skip these files permanently" },
|
|
8205
|
+
{ value: "proceed", label: "Proceed anyway", hint: "Track files with secrets (dangerous!)" }
|
|
8206
|
+
]);
|
|
8207
|
+
if (action === "abort") {
|
|
8208
|
+
logger.info("Operation aborted");
|
|
8209
|
+
return [];
|
|
8210
|
+
}
|
|
8211
|
+
if (action === "redact") {
|
|
8212
|
+
const redactionMaps = await processSecretsForRedaction(summary.results, tuckDir);
|
|
8213
|
+
let totalRedacted = 0;
|
|
8214
|
+
for (const result of summary.results) {
|
|
8215
|
+
const placeholderMap = redactionMaps.get(result.path);
|
|
8216
|
+
if (placeholderMap && placeholderMap.size > 0) {
|
|
8217
|
+
const redactionResult = await redactFile(result.path, result.matches, placeholderMap);
|
|
8218
|
+
totalRedacted += redactionResult.replacements.length;
|
|
8219
|
+
}
|
|
8220
|
+
}
|
|
8221
|
+
console.log();
|
|
8222
|
+
logger.success(`Replaced ${totalRedacted} secret(s) with placeholders`);
|
|
8223
|
+
logger.dim(`Secrets stored in: ${collapsePath(getSecretsPath(tuckDir))} (never committed)`);
|
|
8224
|
+
logger.dim("Run 'tuck secrets list' to see stored secrets");
|
|
8225
|
+
console.log();
|
|
8226
|
+
return files;
|
|
8227
|
+
}
|
|
8228
|
+
if (action === "ignore") {
|
|
8229
|
+
const filesWithSecrets = new Set(summary.results.map((result) => result.collapsedPath));
|
|
8230
|
+
for (const file of files) {
|
|
8231
|
+
const normalizedSource = collapsePath(file.source);
|
|
8232
|
+
if (filesWithSecrets.has(normalizedSource)) {
|
|
8233
|
+
await addToTuckignore(tuckDir, file.source);
|
|
8234
|
+
logger.success(`Added ${normalizedSource} to .tuckignore`);
|
|
8235
|
+
}
|
|
8236
|
+
}
|
|
8237
|
+
const remaining = files.filter((file) => !filesWithSecrets.has(collapsePath(file.source)));
|
|
8238
|
+
if (remaining.length === 0) {
|
|
8239
|
+
logger.info("No files remaining to track");
|
|
8240
|
+
}
|
|
8241
|
+
return remaining;
|
|
8242
|
+
}
|
|
8243
|
+
const confirmed = await prompts.confirm(
|
|
8244
|
+
colors.error("Are you SURE you want to track files containing secrets?"),
|
|
8245
|
+
false
|
|
8246
|
+
);
|
|
8247
|
+
if (!confirmed) {
|
|
8248
|
+
logger.info("Operation aborted");
|
|
8249
|
+
return [];
|
|
8250
|
+
}
|
|
8251
|
+
logger.warning("Proceeding with secrets - be careful not to push to a public repository!");
|
|
8252
|
+
return files;
|
|
8253
|
+
};
|
|
8254
|
+
preparePathsForTracking = async (candidates, tuckDir, options = {}) => {
|
|
8255
|
+
const secretHandling = options.secretHandling ?? "interactive";
|
|
8256
|
+
const prepared = [];
|
|
8257
|
+
for (const candidate of candidates) {
|
|
8258
|
+
const expandedPath = expandPath(candidate.path);
|
|
8259
|
+
const collapsedPath = collapsePath(expandedPath);
|
|
8260
|
+
validateSafeSourcePath(collapsedPath);
|
|
8261
|
+
if (isPrivateKey(collapsedPath)) {
|
|
8262
|
+
throw new PrivateKeyError(candidate.path);
|
|
8263
|
+
}
|
|
8264
|
+
if (!await pathExists(expandedPath)) {
|
|
8265
|
+
throw new FileNotFoundError(candidate.path);
|
|
8266
|
+
}
|
|
8267
|
+
if (!options.allowAlreadyTracked && await isFileTracked(tuckDir, collapsedPath)) {
|
|
8268
|
+
throw new FileAlreadyTrackedError(candidate.path);
|
|
8269
|
+
}
|
|
8270
|
+
if (await isIgnored(tuckDir, collapsedPath)) {
|
|
8271
|
+
logger.info(`Skipping ${collapsedPath} (in .tuckignore)`);
|
|
8272
|
+
continue;
|
|
8273
|
+
}
|
|
8274
|
+
if (await shouldExcludeFromBin(expandedPath)) {
|
|
8275
|
+
const sizeCheck2 = await checkFileSizeThreshold(expandedPath);
|
|
8276
|
+
logger.info(
|
|
8277
|
+
`Skipping binary executable: ${collapsedPath}${sizeCheck2.size > 0 ? ` (${formatFileSize(sizeCheck2.size)})` : ""} - Add to .tuckignore to customize`
|
|
8278
|
+
);
|
|
8279
|
+
continue;
|
|
8280
|
+
}
|
|
8281
|
+
const sizeCheck = await checkFileSizeThreshold(expandedPath);
|
|
8282
|
+
const shouldTrack = await handleFileSizePolicy(
|
|
8283
|
+
collapsedPath,
|
|
8284
|
+
sizeCheck.size,
|
|
8285
|
+
tuckDir,
|
|
8286
|
+
secretHandling
|
|
8287
|
+
);
|
|
8288
|
+
if (!shouldTrack) {
|
|
8289
|
+
continue;
|
|
8290
|
+
}
|
|
8291
|
+
const isDir = await isDirectory(expandedPath);
|
|
8292
|
+
const fileCount = isDir ? await getDirectoryFileCount(expandedPath) : 1;
|
|
8293
|
+
const category = candidate.category || options.category || detectCategory(expandedPath);
|
|
8294
|
+
const customName = candidate.name ?? options.name;
|
|
8295
|
+
const nameOverride = customName ? sanitizeFilename(customName) : void 0;
|
|
8296
|
+
const filename = nameOverride || sanitizeFilename(expandedPath);
|
|
8297
|
+
prepared.push({
|
|
8298
|
+
source: collapsedPath,
|
|
8299
|
+
destination: getDestinationPathFromSource(tuckDir, category, expandedPath, nameOverride),
|
|
8300
|
+
category,
|
|
8301
|
+
filename,
|
|
8302
|
+
nameOverride,
|
|
8303
|
+
isDir,
|
|
8304
|
+
fileCount,
|
|
8305
|
+
sensitive: isSensitiveFile2(collapsedPath)
|
|
8306
|
+
});
|
|
8307
|
+
}
|
|
8308
|
+
return applySecretPolicy(prepared, tuckDir, options);
|
|
8309
|
+
};
|
|
8310
|
+
}
|
|
8311
|
+
});
|
|
8312
|
+
|
|
8313
|
+
// src/lib/backup.ts
|
|
8314
|
+
import { join as join11 } from "path";
|
|
8315
|
+
import { copy as copy2, ensureDir as ensureDir4, pathExists as pathExists2 } from "fs-extra";
|
|
8316
|
+
var getBackupDir, formatDateForBackup, getTimestampedBackupDir, createBackup;
|
|
8317
|
+
var init_backup = __esm({
|
|
8318
|
+
"src/lib/backup.ts"() {
|
|
8319
|
+
"use strict";
|
|
8320
|
+
init_constants();
|
|
8321
|
+
init_paths();
|
|
8322
|
+
init_platform();
|
|
8323
|
+
init_config();
|
|
8324
|
+
init_errors();
|
|
8325
|
+
getBackupDir = async (customBackupDir, tuckDir) => {
|
|
8326
|
+
if (customBackupDir) {
|
|
8327
|
+
const resolved = expandPath(customBackupDir);
|
|
8328
|
+
if (!isPathWithinHome(resolved)) {
|
|
8329
|
+
throw new BackupError(
|
|
8330
|
+
`Unsafe backup directory: ${customBackupDir} - backup directory must be within home directory`
|
|
8331
|
+
);
|
|
8332
|
+
}
|
|
8333
|
+
return resolved;
|
|
8334
|
+
}
|
|
8335
|
+
try {
|
|
8336
|
+
const config = await loadConfig(tuckDir);
|
|
8337
|
+
const backupDir = config.files.backupDir || BACKUP_DIR;
|
|
8338
|
+
const resolved = expandPath(backupDir);
|
|
8339
|
+
if (!isPathWithinHome(resolved)) {
|
|
8340
|
+
throw new BackupError(
|
|
8341
|
+
`Unsafe backup directory: ${backupDir} - backup directory must be within home directory`
|
|
8342
|
+
);
|
|
8343
|
+
}
|
|
8344
|
+
return resolved;
|
|
8345
|
+
} catch (error) {
|
|
8346
|
+
if (error instanceof BackupError) {
|
|
8347
|
+
throw error;
|
|
8348
|
+
}
|
|
8349
|
+
return expandPath(BACKUP_DIR);
|
|
8350
|
+
}
|
|
8351
|
+
};
|
|
8352
|
+
formatDateForBackup = (date) => {
|
|
8353
|
+
return date.toISOString().slice(0, 10);
|
|
8354
|
+
};
|
|
8355
|
+
getTimestampedBackupDir = (date, backupRoot) => {
|
|
8356
|
+
const timestamp = formatDateForBackup(date);
|
|
8357
|
+
return join11(backupRoot, timestamp);
|
|
8358
|
+
};
|
|
8359
|
+
createBackup = async (sourcePath, customBackupDir, tuckDir) => {
|
|
8360
|
+
const expandedSource = expandPath(sourcePath);
|
|
8361
|
+
const date = /* @__PURE__ */ new Date();
|
|
8362
|
+
if (!await pathExists(expandedSource)) {
|
|
8363
|
+
throw new Error(`Source path does not exist: ${sourcePath}`);
|
|
8364
|
+
}
|
|
8365
|
+
const backupRoot = await getBackupDir(customBackupDir, tuckDir);
|
|
8366
|
+
const datedBackupDir = getTimestampedBackupDir(date, backupRoot);
|
|
8367
|
+
await ensureDir4(datedBackupDir);
|
|
8368
|
+
const collapsed = toPosixPath(collapsePath(expandedSource));
|
|
8369
|
+
const backupName = collapsed.replace(/^~\//, "").replace(/\//g, "_").replace(/^\./, "dot-");
|
|
8370
|
+
const timestamp = date.toISOString().replace(/[:.]/g, "-").slice(11, 19);
|
|
8371
|
+
const backupPath = join11(datedBackupDir, `${backupName}_${timestamp}`);
|
|
8372
|
+
await copy2(expandedSource, backupPath, { overwrite: true });
|
|
8373
|
+
return {
|
|
8374
|
+
originalPath: expandedSource,
|
|
8375
|
+
backupPath,
|
|
8376
|
+
date
|
|
8377
|
+
};
|
|
8378
|
+
};
|
|
8379
|
+
}
|
|
8380
|
+
});
|
|
8381
|
+
|
|
8382
|
+
// src/lib/hooks.ts
|
|
8383
|
+
import { exec, execSync } from "child_process";
|
|
8384
|
+
import { promisify as promisify9 } from "util";
|
|
8385
|
+
import chalk4 from "chalk";
|
|
8386
|
+
var execAsync, getWindowsShell, runHook, runPreSyncHook, runPostSyncHook, runPreRestoreHook, runPostRestoreHook;
|
|
8387
|
+
var init_hooks = __esm({
|
|
8388
|
+
"src/lib/hooks.ts"() {
|
|
8389
|
+
"use strict";
|
|
8390
|
+
init_config();
|
|
8391
|
+
init_logger();
|
|
8392
|
+
init_prompts();
|
|
8393
|
+
init_platform();
|
|
8394
|
+
execAsync = promisify9(exec);
|
|
8395
|
+
getWindowsShell = () => {
|
|
8396
|
+
try {
|
|
8397
|
+
execSync("pwsh -Version", { stdio: "ignore" });
|
|
8398
|
+
return "pwsh";
|
|
8399
|
+
} catch {
|
|
8400
|
+
}
|
|
8401
|
+
try {
|
|
8402
|
+
execSync("powershell.exe -Version", { stdio: "ignore" });
|
|
8403
|
+
return "powershell.exe";
|
|
8404
|
+
} catch {
|
|
8405
|
+
}
|
|
8406
|
+
return "cmd.exe";
|
|
8407
|
+
};
|
|
8408
|
+
runHook = async (hookType, tuckDir, options) => {
|
|
8409
|
+
if (options?.skipHooks) {
|
|
8410
|
+
return { success: true, skipped: true };
|
|
8411
|
+
}
|
|
8412
|
+
const config = await loadConfig(tuckDir);
|
|
8413
|
+
const command = config.hooks[hookType];
|
|
8414
|
+
if (!command) {
|
|
8415
|
+
return { success: true };
|
|
8416
|
+
}
|
|
8417
|
+
if (!options?.trustHooks) {
|
|
8418
|
+
console.log();
|
|
8419
|
+
console.log(chalk4.yellow.bold("WARNING: Hook Execution"));
|
|
8420
|
+
console.log(chalk4.dim("\u2500".repeat(50)));
|
|
8421
|
+
console.log(chalk4.white(`Hook type: ${chalk4.cyan(hookType)}`));
|
|
8422
|
+
console.log(chalk4.white("Command:"));
|
|
8423
|
+
console.log(chalk4.red(` ${command}`));
|
|
8424
|
+
console.log(chalk4.dim("\u2500".repeat(50)));
|
|
8425
|
+
console.log(
|
|
8426
|
+
chalk4.yellow(
|
|
8427
|
+
"SECURITY: Hooks can execute arbitrary commands on your system."
|
|
8428
|
+
)
|
|
8429
|
+
);
|
|
8430
|
+
console.log(
|
|
8431
|
+
chalk4.yellow(
|
|
8432
|
+
"Only proceed if you trust the source of this configuration."
|
|
8433
|
+
)
|
|
8434
|
+
);
|
|
8435
|
+
console.log();
|
|
8436
|
+
const confirmed = await prompts.confirm(
|
|
8437
|
+
"Execute this hook?",
|
|
8438
|
+
false
|
|
8439
|
+
// Default to NO for safety
|
|
8440
|
+
);
|
|
8441
|
+
if (!confirmed) {
|
|
8442
|
+
logger.warning(`Hook ${hookType} skipped by user`);
|
|
8443
|
+
return { success: true, skipped: true };
|
|
8444
|
+
}
|
|
8445
|
+
}
|
|
8446
|
+
if (!options?.silent) {
|
|
8447
|
+
logger.dim(`Running ${hookType} hook...`);
|
|
8448
|
+
}
|
|
8449
|
+
try {
|
|
8450
|
+
const shellOptions = IS_WINDOWS ? { shell: getWindowsShell() } : {};
|
|
8451
|
+
const { stdout, stderr } = await execAsync(command, {
|
|
8452
|
+
cwd: tuckDir,
|
|
8453
|
+
timeout: 3e4,
|
|
8454
|
+
// 30 second timeout
|
|
8455
|
+
env: {
|
|
8456
|
+
...process.env,
|
|
8457
|
+
TUCK_DIR: tuckDir,
|
|
8458
|
+
TUCK_HOOK: hookType
|
|
8459
|
+
},
|
|
8460
|
+
...shellOptions
|
|
8461
|
+
});
|
|
8462
|
+
if (stdout && !options?.silent) {
|
|
8463
|
+
logger.dim(stdout.trim());
|
|
8464
|
+
}
|
|
8465
|
+
if (stderr && !options?.silent) {
|
|
8466
|
+
logger.warning(stderr.trim());
|
|
8467
|
+
}
|
|
8468
|
+
return { success: true, output: stdout };
|
|
8469
|
+
} catch (error) {
|
|
8470
|
+
const errorMessage = error instanceof Error ? error.message : String(error);
|
|
8471
|
+
if (!options?.silent) {
|
|
8472
|
+
logger.error(`Hook ${hookType} failed: ${errorMessage}`);
|
|
8473
|
+
}
|
|
8474
|
+
return { success: false, error: errorMessage };
|
|
8475
|
+
}
|
|
8476
|
+
};
|
|
8477
|
+
runPreSyncHook = async (tuckDir, options) => {
|
|
8478
|
+
return runHook("preSync", tuckDir, options);
|
|
8479
|
+
};
|
|
8480
|
+
runPostSyncHook = async (tuckDir, options) => {
|
|
8481
|
+
return runHook("postSync", tuckDir, options);
|
|
8482
|
+
};
|
|
8483
|
+
runPreRestoreHook = async (tuckDir, options) => {
|
|
8484
|
+
return runHook("preRestore", tuckDir, options);
|
|
8485
|
+
};
|
|
8486
|
+
runPostRestoreHook = async (tuckDir, options) => {
|
|
8487
|
+
return runHook("postRestore", tuckDir, options);
|
|
8488
|
+
};
|
|
8489
|
+
}
|
|
8490
|
+
});
|
|
8491
|
+
|
|
8492
|
+
// src/commands/restore.ts
|
|
8493
|
+
var restore_exports = {};
|
|
8494
|
+
__export(restore_exports, {
|
|
8495
|
+
restoreCommand: () => restoreCommand,
|
|
8496
|
+
runRestore: () => runRestore
|
|
8497
|
+
});
|
|
8498
|
+
import { Command } from "commander";
|
|
8499
|
+
import { join as join12 } from "path";
|
|
8500
|
+
import { chmod as chmod2, stat as stat8 } from "fs/promises";
|
|
8501
|
+
var fixSSHPermissions, fixGPGPermissions, prepareFilesToRestore, restoreFilesInternal, runInteractiveRestore, displaySecretSummary, runRestore, runRestoreCommand, restoreCommand;
|
|
8502
|
+
var init_restore = __esm({
|
|
8503
|
+
"src/commands/restore.ts"() {
|
|
8504
|
+
"use strict";
|
|
8505
|
+
init_theme();
|
|
8506
|
+
init_ui();
|
|
8507
|
+
init_paths();
|
|
8508
|
+
init_manifest();
|
|
8509
|
+
init_config();
|
|
8510
|
+
init_files();
|
|
8511
|
+
init_backup();
|
|
8512
|
+
init_hooks();
|
|
8513
|
+
init_errors();
|
|
8514
|
+
init_constants();
|
|
8515
|
+
init_secrets();
|
|
8516
|
+
fixSSHPermissions = async (path) => {
|
|
8517
|
+
const expandedPath = expandPath(path);
|
|
8518
|
+
if (!path.includes(".ssh/") && !path.endsWith(".ssh")) {
|
|
8519
|
+
return;
|
|
8520
|
+
}
|
|
8521
|
+
try {
|
|
8522
|
+
const stats = await stat8(expandedPath);
|
|
8523
|
+
if (stats.isDirectory()) {
|
|
8524
|
+
await chmod2(expandedPath, 448);
|
|
8525
|
+
} else {
|
|
8526
|
+
await chmod2(expandedPath, 384);
|
|
8527
|
+
}
|
|
8528
|
+
} catch {
|
|
8529
|
+
}
|
|
8530
|
+
};
|
|
8531
|
+
fixGPGPermissions = async (path) => {
|
|
8532
|
+
const expandedPath = expandPath(path);
|
|
8533
|
+
if (!path.includes(".gnupg/") && !path.endsWith(".gnupg")) {
|
|
8534
|
+
return;
|
|
8535
|
+
}
|
|
8536
|
+
try {
|
|
8537
|
+
const stats = await stat8(expandedPath);
|
|
7649
8538
|
if (stats.isDirectory()) {
|
|
7650
8539
|
await chmod2(expandedPath, 448);
|
|
7651
8540
|
} else {
|
|
@@ -7665,21 +8554,29 @@ var init_restore = __esm({
|
|
|
7665
8554
|
if (!tracked) {
|
|
7666
8555
|
throw new FileNotFoundError(`Not tracked: ${path}`);
|
|
7667
8556
|
}
|
|
8557
|
+
validateSafeSourcePath(tracked.file.source);
|
|
8558
|
+
validateSafeManifestDestination(tracked.file.destination);
|
|
8559
|
+
const repositoryPath = join12(tuckDir, tracked.file.destination);
|
|
8560
|
+
validatePathWithinRoot(repositoryPath, tuckDir, "restore source");
|
|
7668
8561
|
filesToRestore.push({
|
|
7669
8562
|
id: tracked.id,
|
|
7670
8563
|
source: tracked.file.source,
|
|
7671
|
-
destination:
|
|
8564
|
+
destination: repositoryPath,
|
|
7672
8565
|
category: tracked.file.category,
|
|
7673
8566
|
existsAtTarget: await pathExists(expandedPath)
|
|
7674
8567
|
});
|
|
7675
8568
|
}
|
|
7676
8569
|
} else {
|
|
7677
8570
|
for (const [id, file] of Object.entries(allFiles)) {
|
|
8571
|
+
validateSafeSourcePath(file.source);
|
|
8572
|
+
validateSafeManifestDestination(file.destination);
|
|
8573
|
+
const repositoryPath = join12(tuckDir, file.destination);
|
|
8574
|
+
validatePathWithinRoot(repositoryPath, tuckDir, "restore source");
|
|
7678
8575
|
const targetPath = expandPath(file.source);
|
|
7679
8576
|
filesToRestore.push({
|
|
7680
8577
|
id,
|
|
7681
8578
|
source: file.source,
|
|
7682
|
-
destination:
|
|
8579
|
+
destination: repositoryPath,
|
|
7683
8580
|
category: file.category,
|
|
7684
8581
|
existsAtTarget: await pathExists(targetPath)
|
|
7685
8582
|
});
|
|
@@ -7699,6 +8596,8 @@ var init_restore = __esm({
|
|
|
7699
8596
|
let restoredCount = 0;
|
|
7700
8597
|
const restoredPaths = [];
|
|
7701
8598
|
for (const file of files) {
|
|
8599
|
+
validateSafeSourcePath(file.source);
|
|
8600
|
+
validatePathWithinRoot(file.destination, tuckDir, "restore source");
|
|
7702
8601
|
const targetPath = expandPath(file.source);
|
|
7703
8602
|
if (!await pathExists(file.destination)) {
|
|
7704
8603
|
logger.warning(`Source not found in repository: ${file.source}`);
|
|
@@ -7714,7 +8613,7 @@ var init_restore = __esm({
|
|
|
7714
8613
|
}
|
|
7715
8614
|
if (shouldBackup && file.existsAtTarget) {
|
|
7716
8615
|
await withSpinner(`Backing up ${file.source}...`, async () => {
|
|
7717
|
-
await createBackup(targetPath);
|
|
8616
|
+
await createBackup(targetPath, config.files.backupDir, tuckDir);
|
|
7718
8617
|
});
|
|
7719
8618
|
}
|
|
7720
8619
|
await withSpinner(`Restoring ${file.source}...`, async () => {
|
|
@@ -7866,135 +8765,22 @@ var init_restore = __esm({
|
|
|
7866
8765
|
return;
|
|
7867
8766
|
}
|
|
7868
8767
|
if (options.dryRun) {
|
|
7869
|
-
logger.heading("Dry run - would restore:");
|
|
7870
|
-
} else {
|
|
7871
|
-
logger.heading("Restoring:");
|
|
7872
|
-
}
|
|
7873
|
-
const result = await restoreFilesInternal(tuckDir, files, options);
|
|
7874
|
-
logger.blank();
|
|
7875
|
-
if (options.dryRun) {
|
|
7876
|
-
logger.info(`Would restore ${files.length} file${files.length > 1 ? "s" : ""}`);
|
|
7877
|
-
} else {
|
|
7878
|
-
displaySecretSummary(result);
|
|
7879
|
-
logger.success(`Restored ${result.restoredCount} file${result.restoredCount !== 1 ? "s" : ""}`);
|
|
7880
|
-
}
|
|
7881
|
-
};
|
|
7882
|
-
restoreCommand = new Command("restore").description("Restore dotfiles to the system").argument("[paths...]", "Paths to restore (or use --all)").option("-a, --all", "Restore all tracked files").option("--symlink", "Create symlinks
|
|
7883
|
-
await runRestoreCommand(paths, options);
|
|
7884
|
-
});
|
|
7885
|
-
}
|
|
7886
|
-
});
|
|
7887
|
-
|
|
7888
|
-
// src/lib/tuckignore.ts
|
|
7889
|
-
import { join as join12 } from "path";
|
|
7890
|
-
import { readFile as readFile9, writeFile as writeFile7, appendFile } from "fs/promises";
|
|
7891
|
-
var TUCKIGNORE_FILENAME, TUCKIGNORE_HEADER, getTuckignorePath, loadTuckignore, addToTuckignore, isIgnored;
|
|
7892
|
-
var init_tuckignore = __esm({
|
|
7893
|
-
"src/lib/tuckignore.ts"() {
|
|
7894
|
-
"use strict";
|
|
7895
|
-
init_paths();
|
|
7896
|
-
TUCKIGNORE_FILENAME = ".tuckignore";
|
|
7897
|
-
TUCKIGNORE_HEADER = `# .tuckignore - Files to exclude from tracking
|
|
7898
|
-
# One exact file path per line (no globs)
|
|
7899
|
-
# Lines starting with # are comments
|
|
7900
|
-
#
|
|
7901
|
-
# Example:
|
|
7902
|
-
# ~/bin/large-binary
|
|
7903
|
-
# ~/.docker/config.json
|
|
7904
|
-
`;
|
|
7905
|
-
getTuckignorePath = (tuckDir) => {
|
|
7906
|
-
return join12(tuckDir, TUCKIGNORE_FILENAME);
|
|
7907
|
-
};
|
|
7908
|
-
loadTuckignore = async (tuckDir) => {
|
|
7909
|
-
const ignorePath = getTuckignorePath(tuckDir);
|
|
7910
|
-
const ignoredPaths = /* @__PURE__ */ new Set();
|
|
7911
|
-
if (!await pathExists(ignorePath)) {
|
|
7912
|
-
return ignoredPaths;
|
|
7913
|
-
}
|
|
7914
|
-
try {
|
|
7915
|
-
const content = await readFile9(ignorePath, "utf-8");
|
|
7916
|
-
const lines = content.split("\n");
|
|
7917
|
-
for (const line of lines) {
|
|
7918
|
-
const trimmed = line.trim();
|
|
7919
|
-
if (!trimmed || trimmed.startsWith("#")) {
|
|
7920
|
-
continue;
|
|
7921
|
-
}
|
|
7922
|
-
const expanded = expandPath(trimmed);
|
|
7923
|
-
const collapsed = collapsePath(expanded);
|
|
7924
|
-
ignoredPaths.add(collapsed);
|
|
7925
|
-
}
|
|
7926
|
-
} catch {
|
|
7927
|
-
}
|
|
7928
|
-
return ignoredPaths;
|
|
7929
|
-
};
|
|
7930
|
-
addToTuckignore = async (tuckDir, path) => {
|
|
7931
|
-
const ignorePath = getTuckignorePath(tuckDir);
|
|
7932
|
-
const expanded = expandPath(path);
|
|
7933
|
-
const collapsed = collapsePath(expanded);
|
|
7934
|
-
const existingPaths = await loadTuckignore(tuckDir);
|
|
7935
|
-
if (existingPaths.has(collapsed)) {
|
|
7936
|
-
return;
|
|
7937
|
-
}
|
|
7938
|
-
if (!await pathExists(ignorePath)) {
|
|
7939
|
-
await writeFile7(ignorePath, TUCKIGNORE_HEADER + "\n", "utf-8");
|
|
7940
|
-
}
|
|
7941
|
-
await appendFile(ignorePath, collapsed + "\n", "utf-8");
|
|
7942
|
-
};
|
|
7943
|
-
isIgnored = async (tuckDir, path) => {
|
|
7944
|
-
const ignoredPaths = await loadTuckignore(tuckDir);
|
|
7945
|
-
const expanded = expandPath(path);
|
|
7946
|
-
const collapsed = collapsePath(expanded);
|
|
7947
|
-
return ignoredPaths.has(collapsed);
|
|
7948
|
-
};
|
|
7949
|
-
}
|
|
7950
|
-
});
|
|
7951
|
-
|
|
7952
|
-
// src/lib/audit.ts
|
|
7953
|
-
import { appendFile as appendFile2, mkdir } from "fs/promises";
|
|
7954
|
-
import { join as join13 } from "path";
|
|
7955
|
-
import { homedir as homedir4 } from "os";
|
|
7956
|
-
import { existsSync } from "fs";
|
|
7957
|
-
function getAuditLogPath() {
|
|
7958
|
-
return join13(homedir4(), ".tuck", AUDIT_FILENAME);
|
|
7959
|
-
}
|
|
7960
|
-
async function logAuditEntry(action, command, details) {
|
|
7961
|
-
try {
|
|
7962
|
-
const tuckDir = join13(homedir4(), ".tuck");
|
|
7963
|
-
if (!existsSync(tuckDir)) {
|
|
7964
|
-
await mkdir(tuckDir, { recursive: true });
|
|
7965
|
-
}
|
|
7966
|
-
const entry = {
|
|
7967
|
-
timestamp: (/* @__PURE__ */ new Date()).toISOString(),
|
|
7968
|
-
action,
|
|
7969
|
-
command,
|
|
7970
|
-
details,
|
|
7971
|
-
user: process.env.USER || process.env.USERNAME,
|
|
7972
|
-
cwd: process.cwd()
|
|
7973
|
-
};
|
|
7974
|
-
const logLine = JSON.stringify(entry) + "\n";
|
|
7975
|
-
const logPath = getAuditLogPath();
|
|
7976
|
-
await appendFile2(logPath, logLine, "utf-8");
|
|
7977
|
-
} catch {
|
|
7978
|
-
if (process.env.DEBUG) {
|
|
7979
|
-
console.error("[DEBUG] Failed to write audit log");
|
|
7980
|
-
}
|
|
7981
|
-
}
|
|
7982
|
-
}
|
|
7983
|
-
async function logForceSecretBypass(command, filesCount) {
|
|
7984
|
-
await logAuditEntry(
|
|
7985
|
-
"FORCE_SECRET_BYPASS",
|
|
7986
|
-
command,
|
|
7987
|
-
`Bypassed secret scanning for ${filesCount} file(s)`
|
|
7988
|
-
);
|
|
7989
|
-
}
|
|
7990
|
-
async function logForcePush(branch) {
|
|
7991
|
-
await logAuditEntry("FORCE_PUSH", "tuck push --force", `Force pushed to branch: ${branch}`);
|
|
7992
|
-
}
|
|
7993
|
-
var AUDIT_FILENAME;
|
|
7994
|
-
var init_audit = __esm({
|
|
7995
|
-
"src/lib/audit.ts"() {
|
|
7996
|
-
"use strict";
|
|
7997
|
-
AUDIT_FILENAME = "audit.log";
|
|
8768
|
+
logger.heading("Dry run - would restore:");
|
|
8769
|
+
} else {
|
|
8770
|
+
logger.heading("Restoring:");
|
|
8771
|
+
}
|
|
8772
|
+
const result = await restoreFilesInternal(tuckDir, files, options);
|
|
8773
|
+
logger.blank();
|
|
8774
|
+
if (options.dryRun) {
|
|
8775
|
+
logger.info(`Would restore ${files.length} file${files.length > 1 ? "s" : ""}`);
|
|
8776
|
+
} else {
|
|
8777
|
+
displaySecretSummary(result);
|
|
8778
|
+
logger.success(`Restored ${result.restoredCount} file${result.restoredCount !== 1 ? "s" : ""}`);
|
|
8779
|
+
}
|
|
8780
|
+
};
|
|
8781
|
+
restoreCommand = new Command("restore").description("Restore dotfiles to the system").argument("[paths...]", "Paths to restore (or use --all)").option("-a, --all", "Restore all tracked files").option("--symlink", "Create symlinks from source paths to tuck repo files").option("--backup", "Backup existing files before restore").option("--no-backup", "Skip backup of existing files").option("--dry-run", "Show what would be done").option("--no-hooks", "Skip execution of pre/post restore hooks").option("--trust-hooks", "Trust and run hooks without confirmation (use with caution)").option("--no-secrets", "Skip restoring secrets (keep placeholders as-is)").action(async (paths, options) => {
|
|
8782
|
+
await runRestoreCommand(paths, options);
|
|
8783
|
+
});
|
|
7998
8784
|
}
|
|
7999
8785
|
});
|
|
8000
8786
|
|
|
@@ -8587,11 +9373,13 @@ var init_secrets2 = __esm({
|
|
|
8587
9373
|
var sync_exports = {};
|
|
8588
9374
|
__export(sync_exports, {
|
|
8589
9375
|
runSync: () => runSync,
|
|
9376
|
+
runSyncCommand: () => runSyncCommand,
|
|
8590
9377
|
syncCommand: () => syncCommand
|
|
8591
9378
|
});
|
|
8592
9379
|
import { Command as Command4 } from "commander";
|
|
8593
|
-
import { join as join14, basename as
|
|
8594
|
-
|
|
9380
|
+
import { join as join14, basename as basename7 } from "path";
|
|
9381
|
+
import { realpath } from "fs/promises";
|
|
9382
|
+
var pathsResolveToSameLocation, detectChanges, pullIfBehind, detectNewDotfiles, generateCommitMessage, syncFiles, scanAndHandleSecrets, runInteractiveSync, pushWithSpinner, runSync, runSyncCommand, syncCommand;
|
|
8595
9383
|
var init_sync = __esm({
|
|
8596
9384
|
"src/commands/sync.ts"() {
|
|
8597
9385
|
"use strict";
|
|
@@ -8605,14 +9393,28 @@ var init_sync = __esm({
|
|
|
8605
9393
|
init_errors();
|
|
8606
9394
|
init_detect();
|
|
8607
9395
|
init_fileTracking();
|
|
9396
|
+
init_trackPipeline();
|
|
8608
9397
|
init_secrets();
|
|
8609
9398
|
init_secrets2();
|
|
8610
9399
|
init_audit();
|
|
9400
|
+
pathsResolveToSameLocation = async (sourcePath, destinationPath) => {
|
|
9401
|
+
try {
|
|
9402
|
+
const [resolvedSource, resolvedDestination] = await Promise.all([
|
|
9403
|
+
realpath(sourcePath),
|
|
9404
|
+
realpath(destinationPath)
|
|
9405
|
+
]);
|
|
9406
|
+
return resolvedSource === resolvedDestination;
|
|
9407
|
+
} catch {
|
|
9408
|
+
return false;
|
|
9409
|
+
}
|
|
9410
|
+
};
|
|
8611
9411
|
detectChanges = async (tuckDir) => {
|
|
8612
9412
|
const files = await getAllTrackedFiles(tuckDir);
|
|
8613
9413
|
const ignoredPaths = await loadTuckignore(tuckDir);
|
|
8614
9414
|
const changes = [];
|
|
8615
9415
|
for (const [, file] of Object.entries(files)) {
|
|
9416
|
+
validateSafeSourcePath(file.source);
|
|
9417
|
+
validateSafeManifestDestination(file.destination);
|
|
8616
9418
|
if (ignoredPaths.has(file.source)) {
|
|
8617
9419
|
continue;
|
|
8618
9420
|
}
|
|
@@ -8731,11 +9533,19 @@ var init_sync = __esm({
|
|
|
8731
9533
|
};
|
|
8732
9534
|
await runPreSyncHook(tuckDir, hookOptions);
|
|
8733
9535
|
for (const change of changes) {
|
|
9536
|
+
validateSafeSourcePath(change.source);
|
|
9537
|
+
if (!change.destination) {
|
|
9538
|
+
throw new Error(`Unsafe manifest entry detected: missing destination for ${change.source}`);
|
|
9539
|
+
}
|
|
9540
|
+
validateSafeManifestDestination(change.destination);
|
|
8734
9541
|
const sourcePath = expandPath(change.source);
|
|
8735
9542
|
const destPath = join14(tuckDir, change.destination);
|
|
9543
|
+
validatePathWithinRoot(destPath, tuckDir, "sync destination");
|
|
8736
9544
|
if (change.status === "modified") {
|
|
8737
9545
|
await withSpinner(`Syncing ${change.path}...`, async () => {
|
|
8738
|
-
await
|
|
9546
|
+
if (!await pathsResolveToSameLocation(sourcePath, destPath)) {
|
|
9547
|
+
await copyFileOrDir(sourcePath, destPath, { overwrite: true });
|
|
9548
|
+
}
|
|
8739
9549
|
const newChecksum = await getFileChecksum(destPath);
|
|
8740
9550
|
const files = await getAllTrackedFiles(tuckDir);
|
|
8741
9551
|
const fileId = Object.entries(files).find(([, f]) => f.source === change.source)?.[0];
|
|
@@ -8746,7 +9556,7 @@ var init_sync = __esm({
|
|
|
8746
9556
|
});
|
|
8747
9557
|
}
|
|
8748
9558
|
});
|
|
8749
|
-
result.modified.push(
|
|
9559
|
+
result.modified.push(basename7(change.path) || change.path);
|
|
8750
9560
|
} else if (change.status === "deleted") {
|
|
8751
9561
|
await withSpinner(`Removing ${change.path}...`, async () => {
|
|
8752
9562
|
await deleteFileOrDir(destPath);
|
|
@@ -8756,7 +9566,7 @@ var init_sync = __esm({
|
|
|
8756
9566
|
await removeFileFromManifest(tuckDir, fileId);
|
|
8757
9567
|
}
|
|
8758
9568
|
});
|
|
8759
|
-
result.deleted.push(
|
|
9569
|
+
result.deleted.push(basename7(change.path) || change.path);
|
|
8760
9570
|
}
|
|
8761
9571
|
}
|
|
8762
9572
|
if (!options.noCommit && (result.modified.length > 0 || result.deleted.length > 0)) {
|
|
@@ -8923,6 +9733,7 @@ var init_sync = __esm({
|
|
|
8923
9733
|
}
|
|
8924
9734
|
}
|
|
8925
9735
|
}
|
|
9736
|
+
let filesToTrackCandidates = [];
|
|
8926
9737
|
let filesToTrack = [];
|
|
8927
9738
|
if (newFiles.length > 0) {
|
|
8928
9739
|
console.log();
|
|
@@ -8956,7 +9767,13 @@ var init_sync = __esm({
|
|
|
8956
9767
|
const selected = await prompts.multiselect("Select files to track:", selectOptions, {
|
|
8957
9768
|
initialValues
|
|
8958
9769
|
});
|
|
8959
|
-
|
|
9770
|
+
filesToTrackCandidates = selected.map((path) => {
|
|
9771
|
+
const matched = newFiles.find((file) => file.path === path);
|
|
9772
|
+
return {
|
|
9773
|
+
path,
|
|
9774
|
+
category: matched?.category
|
|
9775
|
+
};
|
|
9776
|
+
});
|
|
8960
9777
|
}
|
|
8961
9778
|
}
|
|
8962
9779
|
const largeFiles = [];
|
|
@@ -8999,7 +9816,7 @@ var init_sync = __esm({
|
|
|
8999
9816
|
}
|
|
9000
9817
|
}
|
|
9001
9818
|
prompts.log.success("Added large files to .tuckignore");
|
|
9002
|
-
if (changes.length === 0 &&
|
|
9819
|
+
if (changes.length === 0 && filesToTrackCandidates.length === 0) {
|
|
9003
9820
|
prompts.log.info("No changes remaining to sync");
|
|
9004
9821
|
return;
|
|
9005
9822
|
}
|
|
@@ -9023,7 +9840,7 @@ var init_sync = __esm({
|
|
|
9023
9840
|
}
|
|
9024
9841
|
}
|
|
9025
9842
|
prompts.log.success("Added large files to .tuckignore");
|
|
9026
|
-
if (changes.length === 0 &&
|
|
9843
|
+
if (changes.length === 0 && filesToTrackCandidates.length === 0) {
|
|
9027
9844
|
prompts.log.info("No changes remaining to sync");
|
|
9028
9845
|
return;
|
|
9029
9846
|
}
|
|
@@ -9033,6 +9850,19 @@ var init_sync = __esm({
|
|
|
9033
9850
|
}
|
|
9034
9851
|
}
|
|
9035
9852
|
}
|
|
9853
|
+
if (filesToTrackCandidates.length > 0) {
|
|
9854
|
+
const prepared = await preparePathsForTracking(filesToTrackCandidates, tuckDir, {
|
|
9855
|
+
secretHandling: "interactive"
|
|
9856
|
+
});
|
|
9857
|
+
filesToTrack = prepared.map((file) => ({
|
|
9858
|
+
path: file.source,
|
|
9859
|
+
category: file.category
|
|
9860
|
+
}));
|
|
9861
|
+
}
|
|
9862
|
+
if (changes.length === 0 && filesToTrack.length === 0 && filesToTrackCandidates.length > 0) {
|
|
9863
|
+
prompts.log.info("No changes remaining to sync");
|
|
9864
|
+
return;
|
|
9865
|
+
}
|
|
9036
9866
|
if (filesToTrack.length > 0) {
|
|
9037
9867
|
console.log();
|
|
9038
9868
|
await trackFilesWithProgress(filesToTrack, tuckDir, {
|
|
@@ -9173,7 +10003,7 @@ var init_sync = __esm({
|
|
|
9173
10003
|
});
|
|
9174
10004
|
|
|
9175
10005
|
// src/index.ts
|
|
9176
|
-
import { Command as
|
|
10006
|
+
import { Command as Command18 } from "commander";
|
|
9177
10007
|
import chalk6 from "chalk";
|
|
9178
10008
|
|
|
9179
10009
|
// src/commands/init.ts
|
|
@@ -9183,8 +10013,8 @@ init_config();
|
|
|
9183
10013
|
init_manifest();
|
|
9184
10014
|
init_git();
|
|
9185
10015
|
import { Command as Command2 } from "commander";
|
|
9186
|
-
import { join as
|
|
9187
|
-
import { writeFile as
|
|
10016
|
+
import { join as join13 } from "path";
|
|
10017
|
+
import { writeFile as writeFile7 } from "fs/promises";
|
|
9188
10018
|
import { ensureDir as ensureDir5 } from "fs-extra";
|
|
9189
10019
|
|
|
9190
10020
|
// src/lib/providerSetup.ts
|
|
@@ -9774,11 +10604,11 @@ For SSH URLs (git@...):
|
|
|
9774
10604
|
|
|
9775
10605
|
For HTTPS URLs:
|
|
9776
10606
|
1. Create a personal access token on your hosting service
|
|
9777
|
-
2. Configure git credential helper:
|
|
9778
|
-
git config --global credential.helper
|
|
9779
|
-
|
|
9780
|
-
|
|
9781
|
-
|
|
10607
|
+
2. Configure a secure git credential helper:
|
|
10608
|
+
- macOS: git config --global credential.helper osxkeychain
|
|
10609
|
+
- Linux: git config --global credential.helper libsecret
|
|
10610
|
+
- Windows: git config --global credential.helper manager-core
|
|
10611
|
+
3. On first push, enter your token as password`;
|
|
9782
10612
|
}
|
|
9783
10613
|
// -------------------------------------------------------------------------
|
|
9784
10614
|
// Private Helpers
|
|
@@ -10221,10 +11051,11 @@ init_errors();
|
|
|
10221
11051
|
init_constants();
|
|
10222
11052
|
init_config_schema();
|
|
10223
11053
|
init_fileTracking();
|
|
11054
|
+
init_trackPipeline();
|
|
10224
11055
|
init_validation();
|
|
10225
11056
|
import { copy as copy3 } from "fs-extra";
|
|
10226
11057
|
import { tmpdir } from "os";
|
|
10227
|
-
import { readFile as
|
|
11058
|
+
import { readFile as readFile9, rm as rm2 } from "fs/promises";
|
|
10228
11059
|
var GITIGNORE_TEMPLATE = `# OS generated files
|
|
10229
11060
|
.DS_Store
|
|
10230
11061
|
.DS_Store?
|
|
@@ -10244,8 +11075,19 @@ Thumbs.db
|
|
|
10244
11075
|
# .env.local
|
|
10245
11076
|
`;
|
|
10246
11077
|
var trackFilesWithProgressInit = async (selectedPaths, tuckDir) => {
|
|
10247
|
-
const
|
|
10248
|
-
path
|
|
11078
|
+
const prepared = await preparePathsForTracking(
|
|
11079
|
+
selectedPaths.map((path) => ({ path })),
|
|
11080
|
+
tuckDir,
|
|
11081
|
+
{
|
|
11082
|
+
secretHandling: "interactive"
|
|
11083
|
+
}
|
|
11084
|
+
);
|
|
11085
|
+
if (prepared.length === 0) {
|
|
11086
|
+
return 0;
|
|
11087
|
+
}
|
|
11088
|
+
const filesToTrack = prepared.map((file) => ({
|
|
11089
|
+
path: file.source,
|
|
11090
|
+
category: file.category
|
|
10249
11091
|
}));
|
|
10250
11092
|
const result = await trackFilesWithProgress(filesToTrack, tuckDir, {
|
|
10251
11093
|
showCategory: true,
|
|
@@ -10336,13 +11178,13 @@ var createDirectoryStructure = async (tuckDir) => {
|
|
|
10336
11178
|
}
|
|
10337
11179
|
};
|
|
10338
11180
|
var createDefaultFiles = async (tuckDir, machine) => {
|
|
10339
|
-
const gitignorePath =
|
|
11181
|
+
const gitignorePath = join13(tuckDir, ".gitignore");
|
|
10340
11182
|
if (!await pathExists(gitignorePath)) {
|
|
10341
|
-
await
|
|
11183
|
+
await writeFile7(gitignorePath, GITIGNORE_TEMPLATE, "utf-8");
|
|
10342
11184
|
}
|
|
10343
|
-
const readmePath =
|
|
11185
|
+
const readmePath = join13(tuckDir, "README.md");
|
|
10344
11186
|
if (!await pathExists(readmePath)) {
|
|
10345
|
-
await
|
|
11187
|
+
await writeFile7(readmePath, README_TEMPLATE(machine), "utf-8");
|
|
10346
11188
|
}
|
|
10347
11189
|
};
|
|
10348
11190
|
var initFromScratch = async (tuckDir, options) => {
|
|
@@ -10573,10 +11415,25 @@ var setupTokenAuth = async (tuckDir, preferredType) => {
|
|
|
10573
11415
|
);
|
|
10574
11416
|
prompts.log.info("You may be prompted for credentials when pushing");
|
|
10575
11417
|
}
|
|
10576
|
-
await
|
|
10577
|
-
|
|
10578
|
-
|
|
10579
|
-
|
|
11418
|
+
const configureHelper = await prompts.confirm(
|
|
11419
|
+
"Configure a global Git credential helper now? This updates `git config --global credential.helper`.",
|
|
11420
|
+
true
|
|
11421
|
+
);
|
|
11422
|
+
if (configureHelper) {
|
|
11423
|
+
await configureGitCredentialHelperWithOptions({ allowGlobalConfigChange: true }).catch(
|
|
11424
|
+
(error) => {
|
|
11425
|
+
const errorMsg = error instanceof Error ? error.message : String(error);
|
|
11426
|
+
logger?.debug?.(`Failed to configure git credential helper (non-fatal): ${errorMsg}`);
|
|
11427
|
+
prompts.log.warning(
|
|
11428
|
+
"Could not configure a credential helper automatically. Set one manually for secure token storage (osxkeychain/libsecret/manager-core)."
|
|
11429
|
+
);
|
|
11430
|
+
}
|
|
11431
|
+
);
|
|
11432
|
+
} else {
|
|
11433
|
+
prompts.log.info(
|
|
11434
|
+
"Skipping credential helper setup. Git may prompt for credentials unless a helper is configured."
|
|
11435
|
+
);
|
|
11436
|
+
}
|
|
10580
11437
|
return await promptForManualRepoUrl(tuckDir, username, "https");
|
|
10581
11438
|
};
|
|
10582
11439
|
var promptForManualRepoUrl = async (tuckDir, username, preferredProtocol = "https") => {
|
|
@@ -10737,10 +11594,10 @@ tuck apply ${user.login}`,
|
|
|
10737
11594
|
return { remoteUrl, pushed: false };
|
|
10738
11595
|
};
|
|
10739
11596
|
var analyzeRepository = async (repoDir) => {
|
|
10740
|
-
const manifestPath =
|
|
11597
|
+
const manifestPath = join13(repoDir, ".tuckmanifest.json");
|
|
10741
11598
|
if (await pathExists(manifestPath)) {
|
|
10742
11599
|
try {
|
|
10743
|
-
const content = await
|
|
11600
|
+
const content = await readFile9(manifestPath, "utf-8");
|
|
10744
11601
|
const manifest = JSON.parse(content);
|
|
10745
11602
|
if (manifest.files && Object.keys(manifest.files).length > 0) {
|
|
10746
11603
|
return { type: "valid-tuck", manifest };
|
|
@@ -10750,7 +11607,7 @@ var analyzeRepository = async (repoDir) => {
|
|
|
10750
11607
|
return { type: "messed-up", reason: "Manifest file is corrupted or invalid" };
|
|
10751
11608
|
}
|
|
10752
11609
|
}
|
|
10753
|
-
const filesDir =
|
|
11610
|
+
const filesDir = join13(repoDir, "files");
|
|
10754
11611
|
const hasFilesDir = await pathExists(filesDir);
|
|
10755
11612
|
const commonPatterns = [
|
|
10756
11613
|
".zshrc",
|
|
@@ -10771,7 +11628,7 @@ var analyzeRepository = async (repoDir) => {
|
|
|
10771
11628
|
try {
|
|
10772
11629
|
const categories = await readdir5(filesDir);
|
|
10773
11630
|
for (const category of categories) {
|
|
10774
|
-
const categoryPath =
|
|
11631
|
+
const categoryPath = join13(filesDir, category);
|
|
10775
11632
|
const categoryStats = await import("fs/promises").then(
|
|
10776
11633
|
(fs) => fs.stat(categoryPath).catch((e) => {
|
|
10777
11634
|
logger.debug?.(errorToMessage(e, `Failed to stat category path ${categoryPath}`));
|
|
@@ -10914,7 +11771,7 @@ var importExistingRepo = async (tuckDir, repoName, analysis, repoDir) => {
|
|
|
10914
11771
|
for (const entry of entries) {
|
|
10915
11772
|
if (entry === ".git" || entry === ".tuckmanifest.json" || entry === ".tuckrc.json")
|
|
10916
11773
|
continue;
|
|
10917
|
-
const fullPath =
|
|
11774
|
+
const fullPath = join13(dir, entry);
|
|
10918
11775
|
const stats = await stat11(fullPath).catch((e) => {
|
|
10919
11776
|
logger.debug?.(errorToMessage(e, `Failed to stat ${fullPath}`));
|
|
10920
11777
|
return null;
|
|
@@ -11040,7 +11897,7 @@ var runInteractiveInit = async () => {
|
|
|
11040
11897
|
spinner4.stop(`Found repository: ${existingRepoName}`);
|
|
11041
11898
|
const importRepo = await prompts.confirm(`Import dotfiles from ${existingRepoName}?`, true);
|
|
11042
11899
|
if (importRepo) {
|
|
11043
|
-
const tempDir =
|
|
11900
|
+
const tempDir = join13(tmpdir(), `tuck-import-${Date.now()}`);
|
|
11044
11901
|
const cloneSpinner = prompts.spinner();
|
|
11045
11902
|
cloneSpinner.start("Cloning repository...");
|
|
11046
11903
|
let phase = "cloning";
|
|
@@ -11434,424 +12291,25 @@ init_manifest();
|
|
|
11434
12291
|
init_fileTracking();
|
|
11435
12292
|
init_errors();
|
|
11436
12293
|
init_constants();
|
|
11437
|
-
|
|
12294
|
+
init_trackPipeline();
|
|
11438
12295
|
import { Command as Command5 } from "commander";
|
|
11439
|
-
import { basename as basename7 } from "path";
|
|
11440
|
-
|
|
11441
|
-
// src/lib/binary.ts
|
|
11442
|
-
init_paths();
|
|
11443
|
-
init_platform();
|
|
11444
|
-
import { open, stat as stat8 } from "fs/promises";
|
|
11445
|
-
import { basename as basename5, dirname as dirname6 } from "path";
|
|
11446
|
-
var MAGIC_NUMBERS = {
|
|
11447
|
-
// ELF (Linux)
|
|
11448
|
-
ELF: Buffer.from([127, 69, 76, 70]),
|
|
11449
|
-
// Mach-O (macOS) - 32-bit
|
|
11450
|
-
MACHO_32: Buffer.from([254, 237, 250, 206]),
|
|
11451
|
-
// Mach-O (macOS) - 64-bit
|
|
11452
|
-
MACHO_64: Buffer.from([207, 250, 237, 254]),
|
|
11453
|
-
// Mach-O (macOS) - Universal binary
|
|
11454
|
-
MACHO_UNIVERSAL: Buffer.from([202, 254, 186, 190]),
|
|
11455
|
-
// PE (Windows)
|
|
11456
|
-
PE: Buffer.from([77, 90])
|
|
11457
|
-
// "MZ"
|
|
11458
|
-
};
|
|
11459
|
-
var SCRIPT_EXTENSIONS = [
|
|
11460
|
-
// Unix shells
|
|
11461
|
-
".sh",
|
|
11462
|
-
".bash",
|
|
11463
|
-
".zsh",
|
|
11464
|
-
".fish",
|
|
11465
|
-
// Cross-platform scripting languages
|
|
11466
|
-
".py",
|
|
11467
|
-
".rb",
|
|
11468
|
-
".pl",
|
|
11469
|
-
".js",
|
|
11470
|
-
".ts",
|
|
11471
|
-
".lua",
|
|
11472
|
-
".php",
|
|
11473
|
-
".tcl",
|
|
11474
|
-
".awk",
|
|
11475
|
-
".sed",
|
|
11476
|
-
// Windows scripts
|
|
11477
|
-
".ps1",
|
|
11478
|
-
// PowerShell
|
|
11479
|
-
".psm1",
|
|
11480
|
-
// PowerShell module
|
|
11481
|
-
".psd1",
|
|
11482
|
-
// PowerShell data
|
|
11483
|
-
".bat",
|
|
11484
|
-
// Batch file
|
|
11485
|
-
".cmd",
|
|
11486
|
-
// Command script
|
|
11487
|
-
".vbs",
|
|
11488
|
-
// VBScript
|
|
11489
|
-
".wsf"
|
|
11490
|
-
// Windows Script File
|
|
11491
|
-
];
|
|
11492
|
-
var WINDOWS_EXECUTABLE_EXTENSIONS = [".exe", ".com", ".dll"];
|
|
11493
|
-
var bufferStartsWith = (buffer, magic) => {
|
|
11494
|
-
if (buffer.length < magic.length) {
|
|
11495
|
-
return false;
|
|
11496
|
-
}
|
|
11497
|
-
for (let i = 0; i < magic.length; i++) {
|
|
11498
|
-
if (buffer[i] !== magic[i]) {
|
|
11499
|
-
return false;
|
|
11500
|
-
}
|
|
11501
|
-
}
|
|
11502
|
-
return true;
|
|
11503
|
-
};
|
|
11504
|
-
var isBinaryExecutable = async (path) => {
|
|
11505
|
-
const expandedPath = expandPath(path);
|
|
11506
|
-
try {
|
|
11507
|
-
const stats = await stat8(expandedPath);
|
|
11508
|
-
if (stats.isDirectory()) {
|
|
11509
|
-
return false;
|
|
11510
|
-
}
|
|
11511
|
-
if (IS_WINDOWS) {
|
|
11512
|
-
const lowerPath = expandedPath.toLowerCase();
|
|
11513
|
-
if (WINDOWS_EXECUTABLE_EXTENSIONS.some((ext) => lowerPath.endsWith(ext))) {
|
|
11514
|
-
return true;
|
|
11515
|
-
}
|
|
11516
|
-
}
|
|
11517
|
-
const hasExecutePermission = !IS_WINDOWS && (stats.mode & 73) !== 0;
|
|
11518
|
-
let fileHandle;
|
|
11519
|
-
try {
|
|
11520
|
-
fileHandle = await open(expandedPath, "r");
|
|
11521
|
-
const buffer = Buffer.alloc(512);
|
|
11522
|
-
await fileHandle.read(buffer, 0, 512, 0);
|
|
11523
|
-
if (bufferStartsWith(buffer, MAGIC_NUMBERS.ELF) || bufferStartsWith(buffer, MAGIC_NUMBERS.MACHO_32) || bufferStartsWith(buffer, MAGIC_NUMBERS.MACHO_64) || bufferStartsWith(buffer, MAGIC_NUMBERS.MACHO_UNIVERSAL) || bufferStartsWith(buffer, MAGIC_NUMBERS.PE)) {
|
|
11524
|
-
return true;
|
|
11525
|
-
}
|
|
11526
|
-
if (hasExecutePermission) {
|
|
11527
|
-
const startsWithShebang = buffer[0] === 35 && buffer[1] === 33;
|
|
11528
|
-
return !startsWithShebang;
|
|
11529
|
-
}
|
|
11530
|
-
return false;
|
|
11531
|
-
} finally {
|
|
11532
|
-
if (fileHandle) {
|
|
11533
|
-
await fileHandle.close();
|
|
11534
|
-
}
|
|
11535
|
-
}
|
|
11536
|
-
} catch {
|
|
11537
|
-
return false;
|
|
11538
|
-
}
|
|
11539
|
-
};
|
|
11540
|
-
var isScriptFile = async (path) => {
|
|
11541
|
-
const expandedPath = expandPath(path);
|
|
11542
|
-
try {
|
|
11543
|
-
const hasScriptExtension = SCRIPT_EXTENSIONS.some((ext) => expandedPath.endsWith(ext));
|
|
11544
|
-
if (hasScriptExtension) {
|
|
11545
|
-
return true;
|
|
11546
|
-
}
|
|
11547
|
-
const stats = await stat8(expandedPath);
|
|
11548
|
-
if (stats.isDirectory()) {
|
|
11549
|
-
return false;
|
|
11550
|
-
}
|
|
11551
|
-
let fileHandle;
|
|
11552
|
-
try {
|
|
11553
|
-
fileHandle = await open(expandedPath, "r");
|
|
11554
|
-
const buffer = Buffer.alloc(2);
|
|
11555
|
-
await fileHandle.read(buffer, 0, 2, 0);
|
|
11556
|
-
return buffer[0] === 35 && buffer[1] === 33;
|
|
11557
|
-
} finally {
|
|
11558
|
-
if (fileHandle) {
|
|
11559
|
-
await fileHandle.close();
|
|
11560
|
-
}
|
|
11561
|
-
}
|
|
11562
|
-
} catch {
|
|
11563
|
-
return false;
|
|
11564
|
-
}
|
|
11565
|
-
};
|
|
11566
|
-
var shouldExcludeFromBin = async (path) => {
|
|
11567
|
-
const expandedPath = expandPath(path);
|
|
11568
|
-
const parentDir = dirname6(expandedPath);
|
|
11569
|
-
const parentBasename = basename5(parentDir);
|
|
11570
|
-
const isInBinDir = parentBasename === "bin";
|
|
11571
|
-
if (!isInBinDir) {
|
|
11572
|
-
return false;
|
|
11573
|
-
}
|
|
11574
|
-
try {
|
|
11575
|
-
const stats = await stat8(expandedPath);
|
|
11576
|
-
if (stats.isDirectory()) {
|
|
11577
|
-
return false;
|
|
11578
|
-
}
|
|
11579
|
-
} catch {
|
|
11580
|
-
return false;
|
|
11581
|
-
}
|
|
11582
|
-
if (await isScriptFile(expandedPath)) {
|
|
11583
|
-
return false;
|
|
11584
|
-
}
|
|
11585
|
-
return await isBinaryExecutable(expandedPath);
|
|
11586
|
-
};
|
|
11587
|
-
|
|
11588
|
-
// src/commands/add.ts
|
|
11589
|
-
init_tuckignore();
|
|
11590
|
-
init_config();
|
|
11591
|
-
init_audit();
|
|
11592
|
-
init_secrets();
|
|
11593
|
-
var PRIVATE_KEY_PATTERNS2 = [
|
|
11594
|
-
/^id_rsa$/,
|
|
11595
|
-
/^id_dsa$/,
|
|
11596
|
-
/^id_ecdsa$/,
|
|
11597
|
-
/^id_ed25519$/,
|
|
11598
|
-
/^id_.*$/,
|
|
11599
|
-
// Any id_ file without .pub
|
|
11600
|
-
/\.pem$/,
|
|
11601
|
-
/\.key$/,
|
|
11602
|
-
/^.*_key$/
|
|
11603
|
-
// aws_key, github_key, etc.
|
|
11604
|
-
];
|
|
11605
|
-
var SENSITIVE_FILE_PATTERNS2 = [
|
|
11606
|
-
/^\.netrc$/,
|
|
11607
|
-
/^\.aws\/credentials$/,
|
|
11608
|
-
/^\.docker\/config\.json$/,
|
|
11609
|
-
/^\.npmrc$/,
|
|
11610
|
-
// May contain tokens
|
|
11611
|
-
/^\.pypirc$/,
|
|
11612
|
-
/^\.kube\/config$/,
|
|
11613
|
-
/^\.ssh\/config$/,
|
|
11614
|
-
/^\.gnupg\//,
|
|
11615
|
-
/credentials/i,
|
|
11616
|
-
/secrets?/i,
|
|
11617
|
-
/tokens?\.json$/i,
|
|
11618
|
-
/\.env$/,
|
|
11619
|
-
/\.env\./
|
|
11620
|
-
];
|
|
11621
|
-
var isPrivateKey = (path) => {
|
|
11622
|
-
const name = basename7(path);
|
|
11623
|
-
if (path.includes(".ssh/") && !name.endsWith(".pub")) {
|
|
11624
|
-
for (const pattern of PRIVATE_KEY_PATTERNS2) {
|
|
11625
|
-
if (pattern.test(name)) {
|
|
11626
|
-
return true;
|
|
11627
|
-
}
|
|
11628
|
-
}
|
|
11629
|
-
}
|
|
11630
|
-
if (name.endsWith(".pem") || name.endsWith(".key")) {
|
|
11631
|
-
return true;
|
|
11632
|
-
}
|
|
11633
|
-
return false;
|
|
11634
|
-
};
|
|
11635
|
-
var isSensitiveFile2 = (path) => {
|
|
11636
|
-
const pathToTest = path.startsWith("~/") ? path.slice(2) : path;
|
|
11637
|
-
for (const pattern of SENSITIVE_FILE_PATTERNS2) {
|
|
11638
|
-
if (pattern.test(pathToTest)) {
|
|
11639
|
-
return true;
|
|
11640
|
-
}
|
|
11641
|
-
}
|
|
11642
|
-
return false;
|
|
11643
|
-
};
|
|
11644
|
-
var validateAndPrepareFiles = async (paths, tuckDir, options) => {
|
|
11645
|
-
const filesToAdd = [];
|
|
11646
|
-
for (const path of paths) {
|
|
11647
|
-
const expandedPath = expandPath(path);
|
|
11648
|
-
const collapsedPath = collapsePath(expandedPath);
|
|
11649
|
-
if (isPrivateKey(collapsedPath)) {
|
|
11650
|
-
throw new PrivateKeyError(path);
|
|
11651
|
-
}
|
|
11652
|
-
if (!await pathExists(expandedPath)) {
|
|
11653
|
-
throw new FileNotFoundError(path);
|
|
11654
|
-
}
|
|
11655
|
-
if (await isFileTracked(tuckDir, collapsedPath)) {
|
|
11656
|
-
throw new FileAlreadyTrackedError(path);
|
|
11657
|
-
}
|
|
11658
|
-
if (await isIgnored(tuckDir, collapsedPath)) {
|
|
11659
|
-
logger.info(`Skipping ${path} (in .tuckignore)`);
|
|
11660
|
-
continue;
|
|
11661
|
-
}
|
|
11662
|
-
if (await shouldExcludeFromBin(expandedPath)) {
|
|
11663
|
-
const sizeCheck2 = await checkFileSizeThreshold(expandedPath);
|
|
11664
|
-
logger.info(
|
|
11665
|
-
`Skipping binary executable: ${path}${sizeCheck2.size > 0 ? ` (${formatFileSize(sizeCheck2.size)})` : ""} - Add to .tuckignore to customize`
|
|
11666
|
-
);
|
|
11667
|
-
continue;
|
|
11668
|
-
}
|
|
11669
|
-
const sizeCheck = await checkFileSizeThreshold(expandedPath);
|
|
11670
|
-
if (sizeCheck.block) {
|
|
11671
|
-
logger.warning(
|
|
11672
|
-
`File ${path} is ${formatFileSize(sizeCheck.size)} (exceeds GitHub's 100MB limit)`
|
|
11673
|
-
);
|
|
11674
|
-
const action = await prompts.select("How would you like to proceed?", [
|
|
11675
|
-
{ value: "ignore", label: "Add to .tuckignore and skip" },
|
|
11676
|
-
{ value: "cancel", label: "Cancel operation" }
|
|
11677
|
-
]);
|
|
11678
|
-
if (action === "ignore") {
|
|
11679
|
-
await addToTuckignore(tuckDir, collapsedPath);
|
|
11680
|
-
logger.success(`Added ${path} to .tuckignore`);
|
|
11681
|
-
continue;
|
|
11682
|
-
} else {
|
|
11683
|
-
throw new OperationCancelledError("file size exceeds GitHub limit");
|
|
11684
|
-
}
|
|
11685
|
-
}
|
|
11686
|
-
if (sizeCheck.warn) {
|
|
11687
|
-
logger.warning(
|
|
11688
|
-
`File ${path} is ${formatFileSize(sizeCheck.size)}. GitHub recommends files under 50MB.`
|
|
11689
|
-
);
|
|
11690
|
-
const action = await prompts.select("How would you like to proceed?", [
|
|
11691
|
-
{ value: "continue", label: "Track it anyway" },
|
|
11692
|
-
{ value: "ignore", label: "Add to .tuckignore and skip" },
|
|
11693
|
-
{ value: "cancel", label: "Cancel operation" }
|
|
11694
|
-
]);
|
|
11695
|
-
if (action === "ignore") {
|
|
11696
|
-
await addToTuckignore(tuckDir, collapsedPath);
|
|
11697
|
-
logger.success(`Added ${path} to .tuckignore`);
|
|
11698
|
-
continue;
|
|
11699
|
-
} else if (action === "cancel") {
|
|
11700
|
-
throw new OperationCancelledError("file size warning");
|
|
11701
|
-
}
|
|
11702
|
-
}
|
|
11703
|
-
const isDir = await isDirectory(expandedPath);
|
|
11704
|
-
const fileCount = isDir ? await getDirectoryFileCount(expandedPath) : 1;
|
|
11705
|
-
const category = options.category || detectCategory(expandedPath);
|
|
11706
|
-
const filename = options.name || sanitizeFilename(expandedPath);
|
|
11707
|
-
const destination = getDestinationPath(tuckDir, category, filename);
|
|
11708
|
-
const sensitive = isSensitiveFile2(collapsedPath);
|
|
11709
|
-
filesToAdd.push({
|
|
11710
|
-
source: collapsedPath,
|
|
11711
|
-
destination,
|
|
11712
|
-
category,
|
|
11713
|
-
filename,
|
|
11714
|
-
isDir,
|
|
11715
|
-
fileCount,
|
|
11716
|
-
sensitive
|
|
11717
|
-
});
|
|
11718
|
-
}
|
|
11719
|
-
return filesToAdd;
|
|
11720
|
-
};
|
|
11721
12296
|
var addFiles = async (filesToAdd, tuckDir, options) => {
|
|
11722
|
-
const filesToTrack = filesToAdd.map((f) =>
|
|
11723
|
-
|
|
11724
|
-
|
|
11725
|
-
|
|
12297
|
+
const filesToTrack = filesToAdd.map((f) => {
|
|
12298
|
+
const trackedFile = {
|
|
12299
|
+
path: f.source,
|
|
12300
|
+
category: f.category
|
|
12301
|
+
};
|
|
12302
|
+
if (f.nameOverride) {
|
|
12303
|
+
trackedFile.name = f.nameOverride;
|
|
12304
|
+
}
|
|
12305
|
+
return trackedFile;
|
|
12306
|
+
});
|
|
11726
12307
|
await trackFilesWithProgress(filesToTrack, tuckDir, {
|
|
11727
12308
|
showCategory: true,
|
|
11728
12309
|
strategy: options.symlink ? "symlink" : void 0,
|
|
11729
12310
|
actionVerb: "Tracking"
|
|
11730
12311
|
});
|
|
11731
12312
|
};
|
|
11732
|
-
var displaySecretWarning = (summary) => {
|
|
11733
|
-
console.log();
|
|
11734
|
-
console.log(
|
|
11735
|
-
colors.error(colors.bold(` Security Warning: Found ${summary.totalSecrets} potential secret(s)`))
|
|
11736
|
-
);
|
|
11737
|
-
console.log();
|
|
11738
|
-
for (const result of summary.results) {
|
|
11739
|
-
console.log(` ${colors.brand(result.collapsedPath)}`);
|
|
11740
|
-
for (const match of result.matches) {
|
|
11741
|
-
const severityColor = match.severity === "critical" ? colors.error : match.severity === "high" ? colors.warning : match.severity === "medium" ? colors.info : colors.muted;
|
|
11742
|
-
console.log(
|
|
11743
|
-
` ${colors.muted(`Line ${match.line}:`)} ${match.redactedValue} ${severityColor(`[${match.severity}]`)}`
|
|
11744
|
-
);
|
|
11745
|
-
}
|
|
11746
|
-
console.log();
|
|
11747
|
-
}
|
|
11748
|
-
};
|
|
11749
|
-
var handleSecretsDetected = async (summary, filesToAdd, tuckDir) => {
|
|
11750
|
-
displaySecretWarning(summary);
|
|
11751
|
-
const action = await prompts.select("How would you like to proceed?", [
|
|
11752
|
-
{
|
|
11753
|
-
value: "abort",
|
|
11754
|
-
label: "Abort operation",
|
|
11755
|
-
hint: "Do not track these files"
|
|
11756
|
-
},
|
|
11757
|
-
{
|
|
11758
|
-
value: "redact",
|
|
11759
|
-
label: "Replace with placeholders",
|
|
11760
|
-
hint: "Store originals in secrets.local.json (never committed)"
|
|
11761
|
-
},
|
|
11762
|
-
{
|
|
11763
|
-
value: "ignore",
|
|
11764
|
-
label: "Add files to .tuckignore",
|
|
11765
|
-
hint: "Skip these files permanently"
|
|
11766
|
-
},
|
|
11767
|
-
{
|
|
11768
|
-
value: "proceed",
|
|
11769
|
-
label: "Proceed anyway",
|
|
11770
|
-
hint: "Track files with secrets (dangerous!)"
|
|
11771
|
-
}
|
|
11772
|
-
]);
|
|
11773
|
-
switch (action) {
|
|
11774
|
-
case "abort":
|
|
11775
|
-
logger.info("Operation aborted");
|
|
11776
|
-
return { continue: false, filesToAdd: [] };
|
|
11777
|
-
case "redact": {
|
|
11778
|
-
const redactionMaps = await processSecretsForRedaction(summary.results, tuckDir);
|
|
11779
|
-
let totalRedacted = 0;
|
|
11780
|
-
for (const result of summary.results) {
|
|
11781
|
-
const placeholderMap = redactionMaps.get(result.path);
|
|
11782
|
-
if (placeholderMap && placeholderMap.size > 0) {
|
|
11783
|
-
const redactionResult = await redactFile(result.path, result.matches, placeholderMap);
|
|
11784
|
-
totalRedacted += redactionResult.replacements.length;
|
|
11785
|
-
}
|
|
11786
|
-
}
|
|
11787
|
-
console.log();
|
|
11788
|
-
logger.success(`Replaced ${totalRedacted} secret(s) with placeholders`);
|
|
11789
|
-
logger.dim(`Secrets stored in: ${collapsePath(getSecretsPath(tuckDir))} (never committed)`);
|
|
11790
|
-
logger.dim("Run 'tuck secrets list' to see stored secrets");
|
|
11791
|
-
console.log();
|
|
11792
|
-
return { continue: true, filesToAdd };
|
|
11793
|
-
}
|
|
11794
|
-
case "ignore": {
|
|
11795
|
-
const filesWithSecrets = new Set(summary.results.map((r) => r.collapsedPath));
|
|
11796
|
-
for (const file of filesToAdd) {
|
|
11797
|
-
const normalizedFileSource = collapsePath(file.source);
|
|
11798
|
-
if (filesWithSecrets.has(normalizedFileSource)) {
|
|
11799
|
-
await addToTuckignore(tuckDir, file.source);
|
|
11800
|
-
logger.success(`Added ${normalizedFileSource} to .tuckignore`);
|
|
11801
|
-
}
|
|
11802
|
-
}
|
|
11803
|
-
const remainingFiles = filesToAdd.filter((f) => {
|
|
11804
|
-
const normalizedSource = collapsePath(f.source);
|
|
11805
|
-
return !filesWithSecrets.has(normalizedSource);
|
|
11806
|
-
});
|
|
11807
|
-
if (remainingFiles.length === 0) {
|
|
11808
|
-
logger.info("No files remaining to track");
|
|
11809
|
-
return { continue: false, filesToAdd: [] };
|
|
11810
|
-
}
|
|
11811
|
-
return { continue: true, filesToAdd: remainingFiles };
|
|
11812
|
-
}
|
|
11813
|
-
case "proceed": {
|
|
11814
|
-
const confirmed = await prompts.confirm(
|
|
11815
|
-
colors.error("Are you SURE you want to track files containing secrets?"),
|
|
11816
|
-
false
|
|
11817
|
-
);
|
|
11818
|
-
if (!confirmed) {
|
|
11819
|
-
logger.info("Operation aborted");
|
|
11820
|
-
return { continue: false, filesToAdd: [] };
|
|
11821
|
-
}
|
|
11822
|
-
logger.warning("Proceeding with secrets - be careful not to push to a public repository!");
|
|
11823
|
-
return { continue: true, filesToAdd };
|
|
11824
|
-
}
|
|
11825
|
-
default:
|
|
11826
|
-
return { continue: false, filesToAdd: [] };
|
|
11827
|
-
}
|
|
11828
|
-
};
|
|
11829
|
-
var scanAndHandleSecrets2 = async (filesToAdd, tuckDir, options) => {
|
|
11830
|
-
const config = await loadConfig(tuckDir);
|
|
11831
|
-
const security = config.security || {};
|
|
11832
|
-
if (security.scanSecrets === false) {
|
|
11833
|
-
return { continue: true, filesToAdd };
|
|
11834
|
-
}
|
|
11835
|
-
if (options.force) {
|
|
11836
|
-
const confirmed = await prompts.confirmDangerous(
|
|
11837
|
-
"Using --force bypasses secret scanning.\nAny secrets in these files may be committed to git and potentially exposed.",
|
|
11838
|
-
"force"
|
|
11839
|
-
);
|
|
11840
|
-
if (!confirmed) {
|
|
11841
|
-
logger.info("Operation cancelled");
|
|
11842
|
-
return { continue: false, filesToAdd: [] };
|
|
11843
|
-
}
|
|
11844
|
-
logger.warning("Secret scanning bypassed with --force");
|
|
11845
|
-
await logForceSecretBypass("tuck add --force", filesToAdd.length);
|
|
11846
|
-
return { continue: true, filesToAdd };
|
|
11847
|
-
}
|
|
11848
|
-
const filePaths = filesToAdd.map((f) => expandPath(f.source));
|
|
11849
|
-
const summary = await scanForSecrets(filePaths, tuckDir);
|
|
11850
|
-
if (summary.filesWithSecrets === 0) {
|
|
11851
|
-
return { continue: true, filesToAdd };
|
|
11852
|
-
}
|
|
11853
|
-
return handleSecretsDetected(summary, filesToAdd, tuckDir);
|
|
11854
|
-
};
|
|
11855
12313
|
var runInteractiveAdd = async (tuckDir) => {
|
|
11856
12314
|
prompts.intro("tuck add");
|
|
11857
12315
|
const pathsInput = await prompts.text("Enter file paths to track (space-separated):", {
|
|
@@ -11862,9 +12320,13 @@ var runInteractiveAdd = async (tuckDir) => {
|
|
|
11862
12320
|
}
|
|
11863
12321
|
});
|
|
11864
12322
|
const paths = pathsInput.split(/\s+/).filter(Boolean);
|
|
12323
|
+
const candidates = paths.map((path) => ({ path }));
|
|
11865
12324
|
let filesToAdd;
|
|
11866
12325
|
try {
|
|
11867
|
-
filesToAdd = await
|
|
12326
|
+
filesToAdd = await preparePathsForTracking(candidates, tuckDir, {
|
|
12327
|
+
secretHandling: "interactive",
|
|
12328
|
+
forceBypassCommand: "tuck add --force"
|
|
12329
|
+
});
|
|
11868
12330
|
} catch (error) {
|
|
11869
12331
|
if (error instanceof Error) {
|
|
11870
12332
|
prompts.log.error(error.message);
|
|
@@ -11872,6 +12334,10 @@ var runInteractiveAdd = async (tuckDir) => {
|
|
|
11872
12334
|
prompts.cancel();
|
|
11873
12335
|
return;
|
|
11874
12336
|
}
|
|
12337
|
+
if (filesToAdd.length === 0) {
|
|
12338
|
+
logger.info("No files to add");
|
|
12339
|
+
return;
|
|
12340
|
+
}
|
|
11875
12341
|
for (const file of filesToAdd) {
|
|
11876
12342
|
prompts.log.step(`${file.source}`);
|
|
11877
12343
|
const categoryOptions = Object.entries(CATEGORIES).map(([name, config]) => ({
|
|
@@ -11886,7 +12352,6 @@ var runInteractiveAdd = async (tuckDir) => {
|
|
|
11886
12352
|
});
|
|
11887
12353
|
const selectedCategory = await prompts.select("Category:", categoryOptions);
|
|
11888
12354
|
file.category = selectedCategory;
|
|
11889
|
-
file.destination = getDestinationPath(tuckDir, file.category, file.filename);
|
|
11890
12355
|
}
|
|
11891
12356
|
const confirm2 = await prompts.confirm(
|
|
11892
12357
|
`Add ${filesToAdd.length} ${filesToAdd.length === 1 ? "file" : "files"}?`,
|
|
@@ -11911,19 +12376,22 @@ var runAdd = async (paths, options) => {
|
|
|
11911
12376
|
await runInteractiveAdd(tuckDir);
|
|
11912
12377
|
return;
|
|
11913
12378
|
}
|
|
11914
|
-
|
|
12379
|
+
const candidates = paths.map((path) => ({
|
|
12380
|
+
path,
|
|
12381
|
+
category: options.category,
|
|
12382
|
+
name: options.name
|
|
12383
|
+
}));
|
|
12384
|
+
const filesToAdd = await preparePathsForTracking(candidates, tuckDir, {
|
|
12385
|
+
category: options.category,
|
|
12386
|
+
name: options.name,
|
|
12387
|
+
force: options.force,
|
|
12388
|
+
secretHandling: "interactive",
|
|
12389
|
+
forceBypassCommand: "tuck add --force"
|
|
12390
|
+
});
|
|
11915
12391
|
if (filesToAdd.length === 0) {
|
|
11916
12392
|
logger.info("No files to add");
|
|
11917
12393
|
return;
|
|
11918
12394
|
}
|
|
11919
|
-
const secretScanResult = await scanAndHandleSecrets2(filesToAdd, tuckDir, options);
|
|
11920
|
-
if (!secretScanResult.continue) {
|
|
11921
|
-
return;
|
|
11922
|
-
}
|
|
11923
|
-
filesToAdd = secretScanResult.filesToAdd;
|
|
11924
|
-
if (filesToAdd.length === 0) {
|
|
11925
|
-
return;
|
|
11926
|
-
}
|
|
11927
12395
|
await addFiles(filesToAdd, tuckDir, options);
|
|
11928
12396
|
console.log();
|
|
11929
12397
|
const shouldSync = await prompts.confirm("Would you like to sync these changes now?", true);
|
|
@@ -11936,7 +12404,7 @@ var runAdd = async (paths, options) => {
|
|
|
11936
12404
|
logger.info("Run 'tuck sync' when you're ready to commit changes");
|
|
11937
12405
|
}
|
|
11938
12406
|
};
|
|
11939
|
-
var addCommand = new Command5("add").description("Track new dotfiles").argument("[paths...]", "Paths to dotfiles to track").option("-c, --category <name>", "Category to organize under").option("-n, --name <name>", "Custom name for the file in manifest").option("--symlink", "
|
|
12407
|
+
var addCommand = new Command5("add").description("Track new dotfiles").argument("[paths...]", "Paths to dotfiles to track").option("-c, --category <name>", "Category to organize under").option("-n, --name <name>", "Custom name for the file in manifest").option("--symlink", "Copy into tuck repo, then replace source path with a symlink").option("-f, --force", "Skip secret scanning (not recommended)").action(async (paths, options) => {
|
|
11940
12408
|
await runAdd(paths, options);
|
|
11941
12409
|
});
|
|
11942
12410
|
|
|
@@ -11947,8 +12415,7 @@ init_manifest();
|
|
|
11947
12415
|
init_files();
|
|
11948
12416
|
init_errors();
|
|
11949
12417
|
import { Command as Command6 } from "commander";
|
|
11950
|
-
|
|
11951
|
-
var validateAndPrepareFiles2 = async (paths, tuckDir) => {
|
|
12418
|
+
var validateAndPrepareFiles = async (paths, tuckDir) => {
|
|
11952
12419
|
const filesToRemove = [];
|
|
11953
12420
|
for (const path of paths) {
|
|
11954
12421
|
const expandedPath = expandPath(path);
|
|
@@ -11957,10 +12424,11 @@ var validateAndPrepareFiles2 = async (paths, tuckDir) => {
|
|
|
11957
12424
|
if (!tracked) {
|
|
11958
12425
|
throw new FileNotTrackedError(path);
|
|
11959
12426
|
}
|
|
12427
|
+
validateSafeSourcePath(tracked.file.source);
|
|
11960
12428
|
filesToRemove.push({
|
|
11961
12429
|
id: tracked.id,
|
|
11962
12430
|
source: tracked.file.source,
|
|
11963
|
-
destination:
|
|
12431
|
+
destination: getSafeRepoPathFromDestination(tuckDir, tracked.file.destination)
|
|
11964
12432
|
});
|
|
11965
12433
|
}
|
|
11966
12434
|
return filesToRemove;
|
|
@@ -12014,10 +12482,11 @@ var runInteractiveRemove = async (tuckDir) => {
|
|
|
12014
12482
|
}
|
|
12015
12483
|
const filesToRemove = selectedFiles.map((id) => {
|
|
12016
12484
|
const file = trackedFiles[id];
|
|
12485
|
+
validateSafeSourcePath(file.source);
|
|
12017
12486
|
return {
|
|
12018
12487
|
id,
|
|
12019
12488
|
source: file.source,
|
|
12020
|
-
destination:
|
|
12489
|
+
destination: getSafeRepoPathFromDestination(tuckDir, file.destination)
|
|
12021
12490
|
};
|
|
12022
12491
|
});
|
|
12023
12492
|
await removeFiles(filesToRemove, tuckDir, { delete: shouldDelete });
|
|
@@ -12035,7 +12504,7 @@ var runRemove = async (paths, options) => {
|
|
|
12035
12504
|
await runInteractiveRemove(tuckDir);
|
|
12036
12505
|
return;
|
|
12037
12506
|
}
|
|
12038
|
-
const filesToRemove = await
|
|
12507
|
+
const filesToRemove = await validateAndPrepareFiles(paths, tuckDir);
|
|
12039
12508
|
await removeFiles(filesToRemove, tuckDir, options);
|
|
12040
12509
|
logger.blank();
|
|
12041
12510
|
logger.success(`Removed ${filesToRemove.length} ${filesToRemove.length === 1 ? "item" : "items"} from tracking`);
|
|
@@ -12363,6 +12832,8 @@ var detectFileChanges = async (tuckDir) => {
|
|
|
12363
12832
|
const ignoredPaths = await loadTuckignore(tuckDir);
|
|
12364
12833
|
const changes = [];
|
|
12365
12834
|
for (const [, file] of Object.entries(files)) {
|
|
12835
|
+
validateSafeSourcePath(file.source);
|
|
12836
|
+
getSafeRepoPathFromDestination(tuckDir, file.destination);
|
|
12366
12837
|
if (ignoredPaths.has(file.source)) {
|
|
12367
12838
|
continue;
|
|
12368
12839
|
}
|
|
@@ -12700,9 +13171,9 @@ init_manifest();
|
|
|
12700
13171
|
init_git();
|
|
12701
13172
|
init_files();
|
|
12702
13173
|
init_errors();
|
|
12703
|
-
|
|
12704
|
-
import { join as join16 } from "path";
|
|
13174
|
+
init_binary();
|
|
12705
13175
|
init_tuckignore();
|
|
13176
|
+
import { Command as Command11 } from "commander";
|
|
12706
13177
|
import { readFile as readFile10 } from "fs/promises";
|
|
12707
13178
|
var isBinary = async (path) => {
|
|
12708
13179
|
if (!await pathExists(path)) {
|
|
@@ -12715,8 +13186,9 @@ var getFileDiff = async (tuckDir, source) => {
|
|
|
12715
13186
|
if (!tracked) {
|
|
12716
13187
|
throw new FileNotFoundError(`Not tracked: ${source}`);
|
|
12717
13188
|
}
|
|
13189
|
+
validateSafeSourcePath(tracked.file.source);
|
|
12718
13190
|
const systemPath = expandPath(source);
|
|
12719
|
-
const repoPath =
|
|
13191
|
+
const repoPath = getSafeRepoPathFromDestination(tuckDir, tracked.file.destination);
|
|
12720
13192
|
const diff = {
|
|
12721
13193
|
source,
|
|
12722
13194
|
destination: tracked.file.destination,
|
|
@@ -13051,27 +13523,20 @@ var CONFIG_KEYS = [
|
|
|
13051
13523
|
description: "Command to run after restore",
|
|
13052
13524
|
section: "hooks"
|
|
13053
13525
|
},
|
|
13054
|
-
// Template settings
|
|
13055
|
-
{
|
|
13056
|
-
path: "templates.enabled",
|
|
13057
|
-
type: "boolean",
|
|
13058
|
-
description: "Enable template processing",
|
|
13059
|
-
section: "templates"
|
|
13060
|
-
},
|
|
13061
13526
|
// Encryption settings
|
|
13062
13527
|
{
|
|
13063
|
-
path: "encryption.
|
|
13528
|
+
path: "encryption.backupsEnabled",
|
|
13064
13529
|
type: "boolean",
|
|
13065
|
-
description: "Enable
|
|
13066
|
-
section: "encryption"
|
|
13067
|
-
},
|
|
13068
|
-
{
|
|
13069
|
-
path: "encryption.gpgKey",
|
|
13070
|
-
type: "string",
|
|
13071
|
-
description: "GPG key for encryption",
|
|
13530
|
+
description: "Enable backup encryption",
|
|
13072
13531
|
section: "encryption"
|
|
13073
13532
|
}
|
|
13074
13533
|
];
|
|
13534
|
+
var UNSUPPORTED_CONFIG_KEY_PREFIXES = [
|
|
13535
|
+
"templates",
|
|
13536
|
+
"encryption.enabled",
|
|
13537
|
+
"encryption.gpgKey",
|
|
13538
|
+
"encryption.files"
|
|
13539
|
+
];
|
|
13075
13540
|
var getKeyInfo = (path) => {
|
|
13076
13541
|
return CONFIG_KEYS.find((k) => k.path === path);
|
|
13077
13542
|
};
|
|
@@ -13133,6 +13598,14 @@ var runConfigGet = async (key) => {
|
|
|
13133
13598
|
}
|
|
13134
13599
|
};
|
|
13135
13600
|
var runConfigSet = async (key, value) => {
|
|
13601
|
+
const unsupportedPrefix = UNSUPPORTED_CONFIG_KEY_PREFIXES.find(
|
|
13602
|
+
(prefix) => key === prefix || key.startsWith(`${prefix}.`)
|
|
13603
|
+
);
|
|
13604
|
+
if (unsupportedPrefix) {
|
|
13605
|
+
throw new ConfigError(
|
|
13606
|
+
`Unsupported config key: ${key}. This setting is reserved but not wired yet.`
|
|
13607
|
+
);
|
|
13608
|
+
}
|
|
13136
13609
|
const tuckDir = getTuckDir();
|
|
13137
13610
|
const config = await loadConfig(tuckDir);
|
|
13138
13611
|
const parsedValue = parseValue(value);
|
|
@@ -13155,14 +13628,14 @@ var runConfigEdit = async () => {
|
|
|
13155
13628
|
const configPath = getConfigPath(tuckDir);
|
|
13156
13629
|
const editor = process.env.EDITOR || process.env.VISUAL || "vim";
|
|
13157
13630
|
logger.info(`Opening ${collapsePath(configPath)} in ${editor}...`);
|
|
13158
|
-
return new Promise((
|
|
13631
|
+
return new Promise((resolve3, reject) => {
|
|
13159
13632
|
const child = spawn(editor, [configPath], {
|
|
13160
13633
|
stdio: "inherit"
|
|
13161
13634
|
});
|
|
13162
13635
|
child.on("exit", (code) => {
|
|
13163
13636
|
if (code === 0) {
|
|
13164
13637
|
logger.success("Configuration updated");
|
|
13165
|
-
|
|
13638
|
+
resolve3();
|
|
13166
13639
|
} else {
|
|
13167
13640
|
reject(new ConfigError(`Editor exited with code ${code}`));
|
|
13168
13641
|
}
|
|
@@ -13198,7 +13671,6 @@ var showConfigView = async (config) => {
|
|
|
13198
13671
|
{ key: "files", title: "File Management", icon: ">" },
|
|
13199
13672
|
{ key: "ui", title: "User Interface", icon: "#" },
|
|
13200
13673
|
{ key: "hooks", title: "Hooks", icon: "!" },
|
|
13201
|
-
{ key: "templates", title: "Templates", icon: "%" },
|
|
13202
13674
|
{ key: "encryption", title: "Encryption", icon: "@" }
|
|
13203
13675
|
];
|
|
13204
13676
|
for (const section of sections) {
|
|
@@ -13206,7 +13678,15 @@ var showConfigView = async (config) => {
|
|
|
13206
13678
|
if (!sectionConfig || typeof sectionConfig !== "object") continue;
|
|
13207
13679
|
console.log(colors.bold.cyan(`${section.icon} ${section.title}`));
|
|
13208
13680
|
console.log(colors.dim("-".repeat(40)));
|
|
13209
|
-
|
|
13681
|
+
const sectionEntries = Object.entries(sectionConfig).filter(
|
|
13682
|
+
([key]) => {
|
|
13683
|
+
if (section.key === "encryption") {
|
|
13684
|
+
return getKeyInfo(`${section.key}.${key}`) !== void 0;
|
|
13685
|
+
}
|
|
13686
|
+
return true;
|
|
13687
|
+
}
|
|
13688
|
+
);
|
|
13689
|
+
for (const [key, value] of sectionEntries) {
|
|
13210
13690
|
const keyInfo = getKeyInfo(`${section.key}.${key}`);
|
|
13211
13691
|
const displayValue = formatConfigValue(value);
|
|
13212
13692
|
const description = keyInfo?.description || "";
|
|
@@ -13217,6 +13697,10 @@ var showConfigView = async (config) => {
|
|
|
13217
13697
|
}
|
|
13218
13698
|
console.log();
|
|
13219
13699
|
}
|
|
13700
|
+
if (config.templates?.enabled || Object.keys(config.templates?.variables || {}).length > 0) {
|
|
13701
|
+
console.log(colors.yellow("! Templates config is currently reserved and not applied during restore/sync."));
|
|
13702
|
+
console.log();
|
|
13703
|
+
}
|
|
13220
13704
|
};
|
|
13221
13705
|
var runConfigWizard = async (config, tuckDir) => {
|
|
13222
13706
|
prompts.log.info("Let's configure tuck for your workflow");
|
|
@@ -13522,7 +14006,7 @@ init_paths();
|
|
|
13522
14006
|
init_git();
|
|
13523
14007
|
init_github();
|
|
13524
14008
|
import { Command as Command13 } from "commander";
|
|
13525
|
-
import { join as
|
|
14009
|
+
import { join as join16, dirname as dirname8 } from "path";
|
|
13526
14010
|
import { readFile as readFile13, writeFile as writeFile9, rm as rm4, chmod as chmod3, stat as stat10 } from "fs/promises";
|
|
13527
14011
|
import { ensureDir as ensureDir7, pathExists as fsPathExists } from "fs-extra";
|
|
13528
14012
|
import { tmpdir as tmpdir2 } from "os";
|
|
@@ -13530,11 +14014,11 @@ import { tmpdir as tmpdir2 } from "os";
|
|
|
13530
14014
|
// src/lib/timemachine.ts
|
|
13531
14015
|
init_paths();
|
|
13532
14016
|
init_errors();
|
|
13533
|
-
import { join as
|
|
14017
|
+
import { join as join15, dirname as dirname7, relative as relative2, resolve as resolve2, sep as sep3 } from "path";
|
|
13534
14018
|
import { readdir as readdir3, readFile as readFile11, writeFile as writeFile8, rm as rm3, stat as stat9 } from "fs/promises";
|
|
13535
14019
|
import { copy as copy4, ensureDir as ensureDir6, pathExists as pathExists3 } from "fs-extra";
|
|
13536
14020
|
import { homedir as homedir5 } from "os";
|
|
13537
|
-
var TIMEMACHINE_DIR =
|
|
14021
|
+
var TIMEMACHINE_DIR = join15(homedir5(), ".tuck", "backups");
|
|
13538
14022
|
var generateSnapshotId = () => {
|
|
13539
14023
|
const now = /* @__PURE__ */ new Date();
|
|
13540
14024
|
const year = now.getFullYear();
|
|
@@ -13546,11 +14030,25 @@ var generateSnapshotId = () => {
|
|
|
13546
14030
|
return `${year}-${month}-${day}-${hours}${minutes}${seconds}`;
|
|
13547
14031
|
};
|
|
13548
14032
|
var getSnapshotPath = (snapshotId) => {
|
|
13549
|
-
return
|
|
14033
|
+
return join15(TIMEMACHINE_DIR, snapshotId);
|
|
13550
14034
|
};
|
|
13551
14035
|
var toBackupPath = (originalPath) => {
|
|
13552
|
-
const
|
|
13553
|
-
|
|
14036
|
+
const expandedOriginal = expandPath(originalPath);
|
|
14037
|
+
const homePath = resolve2(homedir5());
|
|
14038
|
+
const resolvedOriginal = resolve2(expandedOriginal);
|
|
14039
|
+
const isWithinHome = resolvedOriginal === homePath || resolvedOriginal.startsWith(homePath + sep3);
|
|
14040
|
+
if (!isWithinHome) {
|
|
14041
|
+
throw new BackupError(`Cannot snapshot path outside home directory: ${originalPath}`);
|
|
14042
|
+
}
|
|
14043
|
+
const relativePath = relative2(homePath, resolvedOriginal);
|
|
14044
|
+
const normalizedRelative = relativePath.replace(/\\/g, "/");
|
|
14045
|
+
if (!normalizedRelative || normalizedRelative === ".") {
|
|
14046
|
+
throw new BackupError(`Cannot snapshot home directory root directly: ${originalPath}`);
|
|
14047
|
+
}
|
|
14048
|
+
if (normalizedRelative.startsWith("/") || /^[A-Za-z]:[\\/]/.test(normalizedRelative) || normalizedRelative.split("/").includes("..")) {
|
|
14049
|
+
throw new BackupError(`Unsafe backup path generated from: ${originalPath}`);
|
|
14050
|
+
}
|
|
14051
|
+
return normalizedRelative;
|
|
13554
14052
|
};
|
|
13555
14053
|
var createSnapshot = async (filePaths, reason, profile) => {
|
|
13556
14054
|
const snapshotId = generateSnapshotId();
|
|
@@ -13561,7 +14059,7 @@ var createSnapshot = async (filePaths, reason, profile) => {
|
|
|
13561
14059
|
for (const filePath of filePaths) {
|
|
13562
14060
|
const expandedPath = expandPath(filePath);
|
|
13563
14061
|
const backupRelativePath = toBackupPath(expandedPath);
|
|
13564
|
-
const backupPath =
|
|
14062
|
+
const backupPath = join15(snapshotPath, "files", backupRelativePath);
|
|
13565
14063
|
const existed = await pathExists(expandedPath);
|
|
13566
14064
|
if (existed) {
|
|
13567
14065
|
await ensureDir6(dirname7(backupPath));
|
|
@@ -13582,7 +14080,7 @@ var createSnapshot = async (filePaths, reason, profile) => {
|
|
|
13582
14080
|
profile
|
|
13583
14081
|
};
|
|
13584
14082
|
await writeFile8(
|
|
13585
|
-
|
|
14083
|
+
join15(snapshotPath, "metadata.json"),
|
|
13586
14084
|
JSON.stringify(metadata, null, 2),
|
|
13587
14085
|
"utf-8"
|
|
13588
14086
|
);
|
|
@@ -13608,8 +14106,8 @@ var listSnapshots = async () => {
|
|
|
13608
14106
|
const snapshots = [];
|
|
13609
14107
|
for (const entry of entries) {
|
|
13610
14108
|
if (!entry.isDirectory()) continue;
|
|
13611
|
-
const snapshotPath =
|
|
13612
|
-
const metadataPath =
|
|
14109
|
+
const snapshotPath = join15(TIMEMACHINE_DIR, entry.name);
|
|
14110
|
+
const metadataPath = join15(snapshotPath, "metadata.json");
|
|
13613
14111
|
if (!await pathExists3(metadataPath)) continue;
|
|
13614
14112
|
try {
|
|
13615
14113
|
const content = await readFile11(metadataPath, "utf-8");
|
|
@@ -13636,7 +14134,7 @@ var getSnapshot = async (snapshotId) => {
|
|
|
13636
14134
|
if (!await pathExists3(snapshotPath)) {
|
|
13637
14135
|
return null;
|
|
13638
14136
|
}
|
|
13639
|
-
const metadataPath =
|
|
14137
|
+
const metadataPath = join15(snapshotPath, "metadata.json");
|
|
13640
14138
|
if (!await pathExists3(metadataPath)) {
|
|
13641
14139
|
return null;
|
|
13642
14140
|
}
|
|
@@ -13723,7 +14221,7 @@ var getSnapshotsSize = async () => {
|
|
|
13723
14221
|
let size = 0;
|
|
13724
14222
|
const entries = await readdir3(dirPath, { withFileTypes: true });
|
|
13725
14223
|
for (const entry of entries) {
|
|
13726
|
-
const entryPath =
|
|
14224
|
+
const entryPath = join15(dirPath, entry.name);
|
|
13727
14225
|
if (entry.isDirectory()) {
|
|
13728
14226
|
size += await calculateDirSize(entryPath);
|
|
13729
14227
|
} else {
|
|
@@ -13991,6 +14489,7 @@ ${incomingContent.slice(0, 500)}${incomingContent.length > 500 ? "..." : ""}`;
|
|
|
13991
14489
|
|
|
13992
14490
|
// src/commands/apply.ts
|
|
13993
14491
|
init_constants();
|
|
14492
|
+
init_manifest_schema();
|
|
13994
14493
|
init_secrets();
|
|
13995
14494
|
init_secretBackends();
|
|
13996
14495
|
init_config();
|
|
@@ -14047,7 +14546,7 @@ var resolveSource = async (source) => {
|
|
|
14047
14546
|
throw new RepositoryNotFoundError(source);
|
|
14048
14547
|
};
|
|
14049
14548
|
var cloneSource = async (repoId, isUrl) => {
|
|
14050
|
-
const tempDir =
|
|
14549
|
+
const tempDir = join16(tmpdir2(), `tuck-apply-${Date.now()}`);
|
|
14051
14550
|
await ensureDir7(tempDir);
|
|
14052
14551
|
if (isUrl) {
|
|
14053
14552
|
await cloneRepo(repoId, tempDir);
|
|
@@ -14062,13 +14561,14 @@ var cloneSource = async (repoId, isUrl) => {
|
|
|
14062
14561
|
return tempDir;
|
|
14063
14562
|
};
|
|
14064
14563
|
var readClonedManifest = async (repoDir) => {
|
|
14065
|
-
const manifestPath =
|
|
14564
|
+
const manifestPath = join16(repoDir, ".tuckmanifest.json");
|
|
14066
14565
|
if (!await fsPathExists(manifestPath)) {
|
|
14067
14566
|
return null;
|
|
14068
14567
|
}
|
|
14069
14568
|
try {
|
|
14070
14569
|
const content = await readFile13(manifestPath, "utf-8");
|
|
14071
|
-
|
|
14570
|
+
const parsed = JSON.parse(content);
|
|
14571
|
+
return tuckManifestSchema.parse(parsed);
|
|
14072
14572
|
} catch {
|
|
14073
14573
|
return null;
|
|
14074
14574
|
}
|
|
@@ -14076,14 +14576,21 @@ var readClonedManifest = async (repoDir) => {
|
|
|
14076
14576
|
var prepareFilesToApply = async (repoDir, manifest) => {
|
|
14077
14577
|
const files = [];
|
|
14078
14578
|
for (const [_id, file] of Object.entries(manifest.files)) {
|
|
14079
|
-
|
|
14579
|
+
try {
|
|
14580
|
+
validateSafeSourcePath(file.source);
|
|
14581
|
+
validateSafeManifestDestination(file.destination);
|
|
14582
|
+
} catch {
|
|
14583
|
+
logger.warning(`Skipping unsafe manifest entry: ${file.source}`);
|
|
14584
|
+
continue;
|
|
14585
|
+
}
|
|
14586
|
+
const repoFilePath = join16(repoDir, file.destination);
|
|
14587
|
+
try {
|
|
14588
|
+
validatePathWithinRoot(repoFilePath, repoDir, "repository file");
|
|
14589
|
+
} catch {
|
|
14590
|
+
logger.warning(`Skipping unsafe repository path from manifest: ${file.destination}`);
|
|
14591
|
+
continue;
|
|
14592
|
+
}
|
|
14080
14593
|
if (await fsPathExists(repoFilePath)) {
|
|
14081
|
-
try {
|
|
14082
|
-
validateSafeSourcePath(file.source);
|
|
14083
|
-
} catch (error) {
|
|
14084
|
-
logger.warning(`Skipping unsafe path from manifest: ${file.source}`);
|
|
14085
|
-
continue;
|
|
14086
|
-
}
|
|
14087
14594
|
files.push({
|
|
14088
14595
|
source: file.source,
|
|
14089
14596
|
destination: expandPath(file.source),
|
|
@@ -14716,8 +15223,10 @@ init_manifest();
|
|
|
14716
15223
|
init_detect();
|
|
14717
15224
|
init_errors();
|
|
14718
15225
|
init_fileTracking();
|
|
14719
|
-
|
|
15226
|
+
init_trackPipeline();
|
|
15227
|
+
init_binary();
|
|
14720
15228
|
init_tuckignore();
|
|
15229
|
+
import { Command as Command15 } from "commander";
|
|
14721
15230
|
var groupSelectableByCategory = (files) => {
|
|
14722
15231
|
const grouped = {};
|
|
14723
15232
|
for (const file of files) {
|
|
@@ -14846,9 +15355,22 @@ var showSummary = (selected) => {
|
|
|
14846
15355
|
}
|
|
14847
15356
|
};
|
|
14848
15357
|
var addFilesWithProgress = async (selected, tuckDir) => {
|
|
14849
|
-
const
|
|
14850
|
-
|
|
14851
|
-
|
|
15358
|
+
const prepared = await preparePathsForTracking(
|
|
15359
|
+
selected.map((file) => ({
|
|
15360
|
+
path: file.path,
|
|
15361
|
+
category: file.category
|
|
15362
|
+
})),
|
|
15363
|
+
tuckDir,
|
|
15364
|
+
{
|
|
15365
|
+
secretHandling: "interactive"
|
|
15366
|
+
}
|
|
15367
|
+
);
|
|
15368
|
+
if (prepared.length === 0) {
|
|
15369
|
+
return 0;
|
|
15370
|
+
}
|
|
15371
|
+
const filesToTrack = prepared.map((file) => ({
|
|
15372
|
+
path: file.source,
|
|
15373
|
+
category: file.category
|
|
14852
15374
|
}));
|
|
14853
15375
|
const result = await trackFilesWithProgress(filesToTrack, tuckDir, {
|
|
14854
15376
|
showCategory: true,
|
|
@@ -15151,7 +15673,7 @@ var LinuxKeystore = class {
|
|
|
15151
15673
|
account
|
|
15152
15674
|
];
|
|
15153
15675
|
try {
|
|
15154
|
-
await new Promise((
|
|
15676
|
+
await new Promise((resolve3, reject) => {
|
|
15155
15677
|
const child = spawn2("secret-tool", args, {
|
|
15156
15678
|
stdio: ["pipe", "pipe", "pipe"]
|
|
15157
15679
|
});
|
|
@@ -15164,7 +15686,7 @@ var LinuxKeystore = class {
|
|
|
15164
15686
|
});
|
|
15165
15687
|
child.on("close", (code) => {
|
|
15166
15688
|
if (code === 0) {
|
|
15167
|
-
|
|
15689
|
+
resolve3();
|
|
15168
15690
|
} else {
|
|
15169
15691
|
reject(new Error(`secret-tool exited with code ${code}: ${stderr}`));
|
|
15170
15692
|
}
|
|
@@ -15283,7 +15805,7 @@ var WindowsKeystore = class {
|
|
|
15283
15805
|
|
|
15284
15806
|
// src/lib/crypto/keystore/fallback.ts
|
|
15285
15807
|
import { readFile as readFile14, writeFile as writeFile10, chmod as chmod4 } from "fs/promises";
|
|
15286
|
-
import { join as
|
|
15808
|
+
import { join as join17 } from "path";
|
|
15287
15809
|
import { homedir as homedir6, hostname, userInfo } from "os";
|
|
15288
15810
|
import { createHash as createHash3, createCipheriv as createCipheriv2, createDecipheriv as createDecipheriv2, randomBytes as randomBytes3 } from "crypto";
|
|
15289
15811
|
import { ensureDir as ensureDir8, pathExists as pathExists4 } from "fs-extra";
|
|
@@ -15292,7 +15814,7 @@ var ALGORITHM = "aes-256-gcm";
|
|
|
15292
15814
|
var FallbackKeystore = class {
|
|
15293
15815
|
keystorePath;
|
|
15294
15816
|
constructor(customPath) {
|
|
15295
|
-
this.keystorePath = customPath ||
|
|
15817
|
+
this.keystorePath = customPath || join17(homedir6(), ".tuck", KEYSTORE_FILE);
|
|
15296
15818
|
}
|
|
15297
15819
|
getName() {
|
|
15298
15820
|
return "Local encrypted file";
|
|
@@ -15352,7 +15874,7 @@ var FallbackKeystore = class {
|
|
|
15352
15874
|
async saveData(data) {
|
|
15353
15875
|
const json = JSON.stringify(data, null, 2);
|
|
15354
15876
|
const encrypted = this.encrypt(Buffer.from(json, "utf-8"));
|
|
15355
|
-
await ensureDir8(
|
|
15877
|
+
await ensureDir8(join17(homedir6(), ".tuck"));
|
|
15356
15878
|
await writeFile10(this.keystorePath, encrypted);
|
|
15357
15879
|
await chmod4(this.keystorePath, 384);
|
|
15358
15880
|
}
|
|
@@ -15616,6 +16138,648 @@ var runEnable = async () => {
|
|
|
15616
16138
|
var encryptionCommand = new Command16("encryption").description("Manage backup encryption (power user)").addCommand(new Command16("status").description("Show encryption status").action(runStatus2)).addCommand(new Command16("setup").description("Set up backup encryption").action(runSetup)).addCommand(new Command16("enable").description("Enable backup encryption").action(runEnable)).addCommand(new Command16("disable").description("Disable backup encryption").action(runDisable)).addCommand(new Command16("rotate").description("Change encryption password").action(runRotate));
|
|
15617
16139
|
encryptionCommand.action(runStatus2);
|
|
15618
16140
|
|
|
16141
|
+
// src/commands/doctor.ts
|
|
16142
|
+
init_ui();
|
|
16143
|
+
import { Command as Command17 } from "commander";
|
|
16144
|
+
|
|
16145
|
+
// src/lib/doctor.ts
|
|
16146
|
+
init_config();
|
|
16147
|
+
init_git();
|
|
16148
|
+
init_manifest();
|
|
16149
|
+
init_paths();
|
|
16150
|
+
import { homedir as homedir7 } from "os";
|
|
16151
|
+
import { join as join18 } from "path";
|
|
16152
|
+
var DOCTOR_CATEGORIES = ["env", "repo", "manifest", "security", "hooks"];
|
|
16153
|
+
var checkNodeVersion = {
|
|
16154
|
+
id: "env.node-version",
|
|
16155
|
+
category: "env",
|
|
16156
|
+
run: async () => {
|
|
16157
|
+
const major = Number.parseInt(process.versions.node.split(".")[0] || "0", 10);
|
|
16158
|
+
if (major >= 18) {
|
|
16159
|
+
return {
|
|
16160
|
+
id: "env.node-version",
|
|
16161
|
+
category: "env",
|
|
16162
|
+
status: "pass",
|
|
16163
|
+
message: `Node.js ${process.versions.node} is supported`
|
|
16164
|
+
};
|
|
16165
|
+
}
|
|
16166
|
+
return {
|
|
16167
|
+
id: "env.node-version",
|
|
16168
|
+
category: "env",
|
|
16169
|
+
status: "fail",
|
|
16170
|
+
message: `Node.js ${process.versions.node} is unsupported`,
|
|
16171
|
+
fix: "Upgrade Node.js to version 18 or newer"
|
|
16172
|
+
};
|
|
16173
|
+
}
|
|
16174
|
+
};
|
|
16175
|
+
var checkHomeDirectory = {
|
|
16176
|
+
id: "env.home-directory",
|
|
16177
|
+
category: "env",
|
|
16178
|
+
run: async () => {
|
|
16179
|
+
const home = homedir7();
|
|
16180
|
+
if (!home || home.trim().length === 0) {
|
|
16181
|
+
return {
|
|
16182
|
+
id: "env.home-directory",
|
|
16183
|
+
category: "env",
|
|
16184
|
+
status: "fail",
|
|
16185
|
+
message: "Home directory could not be resolved",
|
|
16186
|
+
fix: "Ensure the current OS user account has a valid home directory"
|
|
16187
|
+
};
|
|
16188
|
+
}
|
|
16189
|
+
return {
|
|
16190
|
+
id: "env.home-directory",
|
|
16191
|
+
category: "env",
|
|
16192
|
+
status: "pass",
|
|
16193
|
+
message: `Home directory resolved: ${collapsePath(home)}`
|
|
16194
|
+
};
|
|
16195
|
+
}
|
|
16196
|
+
};
|
|
16197
|
+
var checkTuckDirectory = {
|
|
16198
|
+
id: "repo.tuck-directory",
|
|
16199
|
+
category: "repo",
|
|
16200
|
+
run: async (context) => {
|
|
16201
|
+
if (context.hasTuckDir && context.isTuckDirDirectory) {
|
|
16202
|
+
return {
|
|
16203
|
+
id: "repo.tuck-directory",
|
|
16204
|
+
category: "repo",
|
|
16205
|
+
status: "pass",
|
|
16206
|
+
message: `Tuck directory exists: ${collapsePath(context.tuckDir)}`
|
|
16207
|
+
};
|
|
16208
|
+
}
|
|
16209
|
+
if (context.hasTuckDir && !context.isTuckDirDirectory) {
|
|
16210
|
+
return {
|
|
16211
|
+
id: "repo.tuck-directory",
|
|
16212
|
+
category: "repo",
|
|
16213
|
+
status: "fail",
|
|
16214
|
+
message: `Tuck path is not a directory: ${collapsePath(context.tuckDir)}`,
|
|
16215
|
+
fix: "Remove or rename the conflicting file, then run `tuck init`"
|
|
16216
|
+
};
|
|
16217
|
+
}
|
|
16218
|
+
return {
|
|
16219
|
+
id: "repo.tuck-directory",
|
|
16220
|
+
category: "repo",
|
|
16221
|
+
status: "fail",
|
|
16222
|
+
message: `Tuck directory missing: ${collapsePath(context.tuckDir)}`,
|
|
16223
|
+
fix: "Run `tuck init` to initialize this machine"
|
|
16224
|
+
};
|
|
16225
|
+
}
|
|
16226
|
+
};
|
|
16227
|
+
var checkGitDirectory = {
|
|
16228
|
+
id: "repo.git-directory",
|
|
16229
|
+
category: "repo",
|
|
16230
|
+
run: async (context) => {
|
|
16231
|
+
if (!context.hasTuckDir) {
|
|
16232
|
+
return {
|
|
16233
|
+
id: "repo.git-directory",
|
|
16234
|
+
category: "repo",
|
|
16235
|
+
status: "warn",
|
|
16236
|
+
message: "Skipped git checks because tuck is not initialized"
|
|
16237
|
+
};
|
|
16238
|
+
}
|
|
16239
|
+
if (context.hasGitDir) {
|
|
16240
|
+
return {
|
|
16241
|
+
id: "repo.git-directory",
|
|
16242
|
+
category: "repo",
|
|
16243
|
+
status: "pass",
|
|
16244
|
+
message: "Git metadata is present in tuck directory"
|
|
16245
|
+
};
|
|
16246
|
+
}
|
|
16247
|
+
return {
|
|
16248
|
+
id: "repo.git-directory",
|
|
16249
|
+
category: "repo",
|
|
16250
|
+
status: "fail",
|
|
16251
|
+
message: "Missing .git directory under tuck repository",
|
|
16252
|
+
fix: "Reinitialize with `tuck init` or restore the git metadata"
|
|
16253
|
+
};
|
|
16254
|
+
}
|
|
16255
|
+
};
|
|
16256
|
+
var checkGitStatusReadable = {
|
|
16257
|
+
id: "repo.git-status",
|
|
16258
|
+
category: "repo",
|
|
16259
|
+
run: async (context) => {
|
|
16260
|
+
if (!context.hasTuckDir || !context.hasGitDir) {
|
|
16261
|
+
return {
|
|
16262
|
+
id: "repo.git-status",
|
|
16263
|
+
category: "repo",
|
|
16264
|
+
status: "warn",
|
|
16265
|
+
message: "Skipped git status check because repository is unavailable"
|
|
16266
|
+
};
|
|
16267
|
+
}
|
|
16268
|
+
try {
|
|
16269
|
+
await getStatus(context.tuckDir);
|
|
16270
|
+
return {
|
|
16271
|
+
id: "repo.git-status",
|
|
16272
|
+
category: "repo",
|
|
16273
|
+
status: "pass",
|
|
16274
|
+
message: "Git status can be read successfully"
|
|
16275
|
+
};
|
|
16276
|
+
} catch (error) {
|
|
16277
|
+
return {
|
|
16278
|
+
id: "repo.git-status",
|
|
16279
|
+
category: "repo",
|
|
16280
|
+
status: "fail",
|
|
16281
|
+
message: "Failed to read git status",
|
|
16282
|
+
details: error instanceof Error ? error.message : String(error),
|
|
16283
|
+
fix: "Run `git status` inside the tuck directory and resolve repository errors"
|
|
16284
|
+
};
|
|
16285
|
+
}
|
|
16286
|
+
}
|
|
16287
|
+
};
|
|
16288
|
+
var checkManifestLoadable = {
|
|
16289
|
+
id: "repo.manifest-loadable",
|
|
16290
|
+
category: "repo",
|
|
16291
|
+
run: async (context) => {
|
|
16292
|
+
if (!context.hasTuckDir) {
|
|
16293
|
+
return {
|
|
16294
|
+
id: "repo.manifest-loadable",
|
|
16295
|
+
category: "repo",
|
|
16296
|
+
status: "warn",
|
|
16297
|
+
message: "Skipped manifest load check because tuck is not initialized"
|
|
16298
|
+
};
|
|
16299
|
+
}
|
|
16300
|
+
if (!context.hasManifestFile) {
|
|
16301
|
+
return {
|
|
16302
|
+
id: "repo.manifest-loadable",
|
|
16303
|
+
category: "repo",
|
|
16304
|
+
status: "fail",
|
|
16305
|
+
message: `Manifest missing: ${collapsePath(context.manifestPath)}`,
|
|
16306
|
+
fix: "Recreate with `tuck init` or restore `.tuckmanifest.json` from backup"
|
|
16307
|
+
};
|
|
16308
|
+
}
|
|
16309
|
+
if (context.manifest) {
|
|
16310
|
+
return {
|
|
16311
|
+
id: "repo.manifest-loadable",
|
|
16312
|
+
category: "repo",
|
|
16313
|
+
status: "pass",
|
|
16314
|
+
message: "Manifest is present and valid"
|
|
16315
|
+
};
|
|
16316
|
+
}
|
|
16317
|
+
return {
|
|
16318
|
+
id: "repo.manifest-loadable",
|
|
16319
|
+
category: "repo",
|
|
16320
|
+
status: "fail",
|
|
16321
|
+
message: "Manifest exists but failed to parse",
|
|
16322
|
+
details: context.manifestLoadError,
|
|
16323
|
+
fix: "Repair `.tuckmanifest.json` using a valid schema or restore from git"
|
|
16324
|
+
};
|
|
16325
|
+
}
|
|
16326
|
+
};
|
|
16327
|
+
var checkConfigLoadable = {
|
|
16328
|
+
id: "repo.config-loadable",
|
|
16329
|
+
category: "repo",
|
|
16330
|
+
run: async (context) => {
|
|
16331
|
+
if (!context.hasTuckDir) {
|
|
16332
|
+
return {
|
|
16333
|
+
id: "repo.config-loadable",
|
|
16334
|
+
category: "repo",
|
|
16335
|
+
status: "warn",
|
|
16336
|
+
message: "Skipped config load check because tuck is not initialized"
|
|
16337
|
+
};
|
|
16338
|
+
}
|
|
16339
|
+
if (!context.hasConfigFile) {
|
|
16340
|
+
return {
|
|
16341
|
+
id: "repo.config-loadable",
|
|
16342
|
+
category: "repo",
|
|
16343
|
+
status: "warn",
|
|
16344
|
+
message: `Config file missing: ${collapsePath(context.configPath)} (defaults will be used)`,
|
|
16345
|
+
fix: "Run `tuck config reset` to generate a config file with defaults"
|
|
16346
|
+
};
|
|
16347
|
+
}
|
|
16348
|
+
if (context.config) {
|
|
16349
|
+
return {
|
|
16350
|
+
id: "repo.config-loadable",
|
|
16351
|
+
category: "repo",
|
|
16352
|
+
status: "pass",
|
|
16353
|
+
message: "Configuration is present and valid"
|
|
16354
|
+
};
|
|
16355
|
+
}
|
|
16356
|
+
return {
|
|
16357
|
+
id: "repo.config-loadable",
|
|
16358
|
+
category: "repo",
|
|
16359
|
+
status: "fail",
|
|
16360
|
+
message: "Configuration exists but failed to parse",
|
|
16361
|
+
details: context.configLoadError,
|
|
16362
|
+
fix: "Repair `.tuckrc.json` or run `tuck config reset`"
|
|
16363
|
+
};
|
|
16364
|
+
}
|
|
16365
|
+
};
|
|
16366
|
+
var checkManifestPathSafety = {
|
|
16367
|
+
id: "manifest.path-safety",
|
|
16368
|
+
category: "manifest",
|
|
16369
|
+
run: async (context) => {
|
|
16370
|
+
if (!context.manifest) {
|
|
16371
|
+
return {
|
|
16372
|
+
id: "manifest.path-safety",
|
|
16373
|
+
category: "manifest",
|
|
16374
|
+
status: "warn",
|
|
16375
|
+
message: "Skipped manifest path checks because manifest is unavailable"
|
|
16376
|
+
};
|
|
16377
|
+
}
|
|
16378
|
+
const violations = [];
|
|
16379
|
+
for (const [id, file] of Object.entries(context.manifest.files)) {
|
|
16380
|
+
try {
|
|
16381
|
+
validateSafeSourcePath(file.source);
|
|
16382
|
+
} catch (error) {
|
|
16383
|
+
violations.push(`${id}: unsafe source ${file.source} (${error instanceof Error ? error.message : String(error)})`);
|
|
16384
|
+
continue;
|
|
16385
|
+
}
|
|
16386
|
+
try {
|
|
16387
|
+
validateSafeManifestDestination(file.destination);
|
|
16388
|
+
} catch (error) {
|
|
16389
|
+
violations.push(
|
|
16390
|
+
`${id}: unsafe destination ${file.destination} (${error instanceof Error ? error.message : String(error)})`
|
|
16391
|
+
);
|
|
16392
|
+
continue;
|
|
16393
|
+
}
|
|
16394
|
+
try {
|
|
16395
|
+
validatePathWithinRoot(join18(context.tuckDir, file.destination), context.tuckDir, "manifest destination");
|
|
16396
|
+
} catch (error) {
|
|
16397
|
+
violations.push(
|
|
16398
|
+
`${id}: destination escapes tuck dir (${error instanceof Error ? error.message : String(error)})`
|
|
16399
|
+
);
|
|
16400
|
+
}
|
|
16401
|
+
}
|
|
16402
|
+
if (violations.length === 0) {
|
|
16403
|
+
return {
|
|
16404
|
+
id: "manifest.path-safety",
|
|
16405
|
+
category: "manifest",
|
|
16406
|
+
status: "pass",
|
|
16407
|
+
message: "All manifest paths are safe"
|
|
16408
|
+
};
|
|
16409
|
+
}
|
|
16410
|
+
return {
|
|
16411
|
+
id: "manifest.path-safety",
|
|
16412
|
+
category: "manifest",
|
|
16413
|
+
status: "fail",
|
|
16414
|
+
message: `Detected ${violations.length} unsafe manifest path entr${violations.length === 1 ? "y" : "ies"}`,
|
|
16415
|
+
details: violations.slice(0, 3).join("; "),
|
|
16416
|
+
fix: "Replace unsafe paths with home-scoped sources and `files/...` destinations"
|
|
16417
|
+
};
|
|
16418
|
+
}
|
|
16419
|
+
};
|
|
16420
|
+
var checkManifestDuplicateSources = {
|
|
16421
|
+
id: "manifest.duplicate-sources",
|
|
16422
|
+
category: "manifest",
|
|
16423
|
+
run: async (context) => {
|
|
16424
|
+
if (!context.manifest) {
|
|
16425
|
+
return {
|
|
16426
|
+
id: "manifest.duplicate-sources",
|
|
16427
|
+
category: "manifest",
|
|
16428
|
+
status: "warn",
|
|
16429
|
+
message: "Skipped duplicate source checks because manifest is unavailable"
|
|
16430
|
+
};
|
|
16431
|
+
}
|
|
16432
|
+
const seen = /* @__PURE__ */ new Set();
|
|
16433
|
+
const duplicates = [];
|
|
16434
|
+
for (const file of Object.values(context.manifest.files)) {
|
|
16435
|
+
const normalized = expandPath(file.source);
|
|
16436
|
+
if (seen.has(normalized)) {
|
|
16437
|
+
duplicates.push(file.source);
|
|
16438
|
+
}
|
|
16439
|
+
seen.add(normalized);
|
|
16440
|
+
}
|
|
16441
|
+
if (duplicates.length === 0) {
|
|
16442
|
+
return {
|
|
16443
|
+
id: "manifest.duplicate-sources",
|
|
16444
|
+
category: "manifest",
|
|
16445
|
+
status: "pass",
|
|
16446
|
+
message: "No duplicate source paths detected"
|
|
16447
|
+
};
|
|
16448
|
+
}
|
|
16449
|
+
return {
|
|
16450
|
+
id: "manifest.duplicate-sources",
|
|
16451
|
+
category: "manifest",
|
|
16452
|
+
status: "fail",
|
|
16453
|
+
message: `Detected duplicate source paths (${duplicates.length})`,
|
|
16454
|
+
details: duplicates.slice(0, 5).join(", "),
|
|
16455
|
+
fix: "Keep each source path tracked exactly once in `.tuckmanifest.json`"
|
|
16456
|
+
};
|
|
16457
|
+
}
|
|
16458
|
+
};
|
|
16459
|
+
var checkManifestDuplicateDestinations = {
|
|
16460
|
+
id: "manifest.duplicate-destinations",
|
|
16461
|
+
category: "manifest",
|
|
16462
|
+
run: async (context) => {
|
|
16463
|
+
if (!context.manifest) {
|
|
16464
|
+
return {
|
|
16465
|
+
id: "manifest.duplicate-destinations",
|
|
16466
|
+
category: "manifest",
|
|
16467
|
+
status: "warn",
|
|
16468
|
+
message: "Skipped duplicate destination checks because manifest is unavailable"
|
|
16469
|
+
};
|
|
16470
|
+
}
|
|
16471
|
+
const seen = /* @__PURE__ */ new Set();
|
|
16472
|
+
const duplicates = [];
|
|
16473
|
+
for (const file of Object.values(context.manifest.files)) {
|
|
16474
|
+
const normalized = file.destination.replace(/\\/g, "/");
|
|
16475
|
+
if (seen.has(normalized)) {
|
|
16476
|
+
duplicates.push(file.destination);
|
|
16477
|
+
}
|
|
16478
|
+
seen.add(normalized);
|
|
16479
|
+
}
|
|
16480
|
+
if (duplicates.length === 0) {
|
|
16481
|
+
return {
|
|
16482
|
+
id: "manifest.duplicate-destinations",
|
|
16483
|
+
category: "manifest",
|
|
16484
|
+
status: "pass",
|
|
16485
|
+
message: "No duplicate repository destinations detected"
|
|
16486
|
+
};
|
|
16487
|
+
}
|
|
16488
|
+
return {
|
|
16489
|
+
id: "manifest.duplicate-destinations",
|
|
16490
|
+
category: "manifest",
|
|
16491
|
+
status: "fail",
|
|
16492
|
+
message: `Detected duplicate destinations (${duplicates.length})`,
|
|
16493
|
+
details: duplicates.slice(0, 5).join(", "),
|
|
16494
|
+
fix: "Assign each tracked file a unique destination under `files/`"
|
|
16495
|
+
};
|
|
16496
|
+
}
|
|
16497
|
+
};
|
|
16498
|
+
var checkSecretScanning = {
|
|
16499
|
+
id: "security.secret-scanning",
|
|
16500
|
+
category: "security",
|
|
16501
|
+
run: async (context) => {
|
|
16502
|
+
if (!context.config) {
|
|
16503
|
+
return {
|
|
16504
|
+
id: "security.secret-scanning",
|
|
16505
|
+
category: "security",
|
|
16506
|
+
status: "warn",
|
|
16507
|
+
message: "Skipped secret scanning checks because config is unavailable"
|
|
16508
|
+
};
|
|
16509
|
+
}
|
|
16510
|
+
if (!context.config.security.scanSecrets) {
|
|
16511
|
+
return {
|
|
16512
|
+
id: "security.secret-scanning",
|
|
16513
|
+
category: "security",
|
|
16514
|
+
status: "warn",
|
|
16515
|
+
message: "Secret scanning is disabled",
|
|
16516
|
+
fix: "Enable with `tuck config set security.scanSecrets true`"
|
|
16517
|
+
};
|
|
16518
|
+
}
|
|
16519
|
+
return {
|
|
16520
|
+
id: "security.secret-scanning",
|
|
16521
|
+
category: "security",
|
|
16522
|
+
status: "pass",
|
|
16523
|
+
message: "Secret scanning is enabled"
|
|
16524
|
+
};
|
|
16525
|
+
}
|
|
16526
|
+
};
|
|
16527
|
+
var checkBackupOnRestore = {
|
|
16528
|
+
id: "security.backup-on-restore",
|
|
16529
|
+
category: "security",
|
|
16530
|
+
run: async (context) => {
|
|
16531
|
+
if (!context.config) {
|
|
16532
|
+
return {
|
|
16533
|
+
id: "security.backup-on-restore",
|
|
16534
|
+
category: "security",
|
|
16535
|
+
status: "warn",
|
|
16536
|
+
message: "Skipped backup checks because config is unavailable"
|
|
16537
|
+
};
|
|
16538
|
+
}
|
|
16539
|
+
if (!context.config.files.backupOnRestore) {
|
|
16540
|
+
return {
|
|
16541
|
+
id: "security.backup-on-restore",
|
|
16542
|
+
category: "security",
|
|
16543
|
+
status: "warn",
|
|
16544
|
+
message: "Backup before restore is disabled",
|
|
16545
|
+
fix: "Enable with `tuck config set files.backupOnRestore true`"
|
|
16546
|
+
};
|
|
16547
|
+
}
|
|
16548
|
+
return {
|
|
16549
|
+
id: "security.backup-on-restore",
|
|
16550
|
+
category: "security",
|
|
16551
|
+
status: "pass",
|
|
16552
|
+
message: "Backup before restore is enabled"
|
|
16553
|
+
};
|
|
16554
|
+
}
|
|
16555
|
+
};
|
|
16556
|
+
var checkHooksSafety = {
|
|
16557
|
+
id: "hooks.commands",
|
|
16558
|
+
category: "hooks",
|
|
16559
|
+
run: async (context) => {
|
|
16560
|
+
if (!context.config) {
|
|
16561
|
+
return {
|
|
16562
|
+
id: "hooks.commands",
|
|
16563
|
+
category: "hooks",
|
|
16564
|
+
status: "warn",
|
|
16565
|
+
message: "Skipped hook checks because config is unavailable"
|
|
16566
|
+
};
|
|
16567
|
+
}
|
|
16568
|
+
const hooks = context.config.hooks;
|
|
16569
|
+
const configuredHooks = Object.entries(hooks).filter(
|
|
16570
|
+
([, command]) => typeof command === "string" && command.trim().length > 0
|
|
16571
|
+
);
|
|
16572
|
+
if (configuredHooks.length === 0) {
|
|
16573
|
+
return {
|
|
16574
|
+
id: "hooks.commands",
|
|
16575
|
+
category: "hooks",
|
|
16576
|
+
status: "pass",
|
|
16577
|
+
message: "No lifecycle hooks configured"
|
|
16578
|
+
};
|
|
16579
|
+
}
|
|
16580
|
+
const suspiciousPatterns = [/&&/u, /\|\|/u, /;{1}/u, /\$\(/u, /`/u];
|
|
16581
|
+
const suspicious = configuredHooks.filter(
|
|
16582
|
+
([, command]) => suspiciousPatterns.some((pattern) => pattern.test(command))
|
|
16583
|
+
);
|
|
16584
|
+
if (suspicious.length > 0) {
|
|
16585
|
+
return {
|
|
16586
|
+
id: "hooks.commands",
|
|
16587
|
+
category: "hooks",
|
|
16588
|
+
status: "warn",
|
|
16589
|
+
message: `Detected ${suspicious.length} hook command${suspicious.length === 1 ? "" : "s"} with complex shell syntax`,
|
|
16590
|
+
details: suspicious.map(([name]) => name).join(", "),
|
|
16591
|
+
fix: "Review hook commands and keep them minimal and auditable"
|
|
16592
|
+
};
|
|
16593
|
+
}
|
|
16594
|
+
return {
|
|
16595
|
+
id: "hooks.commands",
|
|
16596
|
+
category: "hooks",
|
|
16597
|
+
status: "pass",
|
|
16598
|
+
message: `Validated ${configuredHooks.length} hook command${configuredHooks.length === 1 ? "" : "s"}`
|
|
16599
|
+
};
|
|
16600
|
+
}
|
|
16601
|
+
};
|
|
16602
|
+
var doctorChecks = [
|
|
16603
|
+
checkNodeVersion,
|
|
16604
|
+
checkHomeDirectory,
|
|
16605
|
+
checkTuckDirectory,
|
|
16606
|
+
checkGitDirectory,
|
|
16607
|
+
checkGitStatusReadable,
|
|
16608
|
+
checkManifestLoadable,
|
|
16609
|
+
checkConfigLoadable,
|
|
16610
|
+
checkManifestPathSafety,
|
|
16611
|
+
checkManifestDuplicateSources,
|
|
16612
|
+
checkManifestDuplicateDestinations,
|
|
16613
|
+
checkSecretScanning,
|
|
16614
|
+
checkBackupOnRestore,
|
|
16615
|
+
checkHooksSafety
|
|
16616
|
+
];
|
|
16617
|
+
var buildDoctorSummary = (checks) => {
|
|
16618
|
+
return checks.reduce(
|
|
16619
|
+
(summary, check) => {
|
|
16620
|
+
if (check.status === "pass") {
|
|
16621
|
+
summary.passed += 1;
|
|
16622
|
+
} else if (check.status === "warn") {
|
|
16623
|
+
summary.warnings += 1;
|
|
16624
|
+
} else {
|
|
16625
|
+
summary.failed += 1;
|
|
16626
|
+
}
|
|
16627
|
+
return summary;
|
|
16628
|
+
},
|
|
16629
|
+
{
|
|
16630
|
+
passed: 0,
|
|
16631
|
+
warnings: 0,
|
|
16632
|
+
failed: 0
|
|
16633
|
+
}
|
|
16634
|
+
);
|
|
16635
|
+
};
|
|
16636
|
+
var buildDoctorContext = async () => {
|
|
16637
|
+
const tuckDir = getTuckDir();
|
|
16638
|
+
const manifestPath = getManifestPath(tuckDir);
|
|
16639
|
+
const configPath = getConfigPath(tuckDir);
|
|
16640
|
+
const hasTuckDir = await pathExists(tuckDir);
|
|
16641
|
+
const isTuckDirDirectory = hasTuckDir ? await isDirectory(tuckDir) : false;
|
|
16642
|
+
const hasGitDir = await pathExists(join18(tuckDir, ".git"));
|
|
16643
|
+
const hasManifestFile = await pathExists(manifestPath);
|
|
16644
|
+
const hasConfigFile = await pathExists(configPath);
|
|
16645
|
+
const context = {
|
|
16646
|
+
tuckDir,
|
|
16647
|
+
manifestPath,
|
|
16648
|
+
configPath,
|
|
16649
|
+
hasTuckDir,
|
|
16650
|
+
isTuckDirDirectory,
|
|
16651
|
+
hasGitDir,
|
|
16652
|
+
hasManifestFile,
|
|
16653
|
+
hasConfigFile
|
|
16654
|
+
};
|
|
16655
|
+
if (hasManifestFile) {
|
|
16656
|
+
try {
|
|
16657
|
+
context.manifest = await loadManifest(tuckDir);
|
|
16658
|
+
} catch (error) {
|
|
16659
|
+
context.manifestLoadError = error instanceof Error ? error.message : String(error);
|
|
16660
|
+
}
|
|
16661
|
+
}
|
|
16662
|
+
if (hasConfigFile) {
|
|
16663
|
+
try {
|
|
16664
|
+
context.config = await loadConfig(tuckDir);
|
|
16665
|
+
} catch (error) {
|
|
16666
|
+
context.configLoadError = error instanceof Error ? error.message : String(error);
|
|
16667
|
+
}
|
|
16668
|
+
}
|
|
16669
|
+
return context;
|
|
16670
|
+
};
|
|
16671
|
+
var normalizeCategory = (category) => {
|
|
16672
|
+
if (!category) {
|
|
16673
|
+
return void 0;
|
|
16674
|
+
}
|
|
16675
|
+
if (DOCTOR_CATEGORIES.includes(category)) {
|
|
16676
|
+
return category;
|
|
16677
|
+
}
|
|
16678
|
+
return void 0;
|
|
16679
|
+
};
|
|
16680
|
+
var runDoctorChecks = async (options = {}) => {
|
|
16681
|
+
const context = await buildDoctorContext();
|
|
16682
|
+
const category = normalizeCategory(options.category);
|
|
16683
|
+
const selectedChecks = category ? doctorChecks.filter((check) => check.category === category) : doctorChecks;
|
|
16684
|
+
const checks = [];
|
|
16685
|
+
for (const check of selectedChecks) {
|
|
16686
|
+
try {
|
|
16687
|
+
checks.push(await check.run(context));
|
|
16688
|
+
} catch (error) {
|
|
16689
|
+
checks.push({
|
|
16690
|
+
id: check.id,
|
|
16691
|
+
category: check.category,
|
|
16692
|
+
status: "fail",
|
|
16693
|
+
message: "Doctor check crashed unexpectedly",
|
|
16694
|
+
details: error instanceof Error ? error.message : String(error),
|
|
16695
|
+
fix: "Run with DEBUG=1 and inspect the stack trace"
|
|
16696
|
+
});
|
|
16697
|
+
}
|
|
16698
|
+
}
|
|
16699
|
+
return {
|
|
16700
|
+
generatedAt: (/* @__PURE__ */ new Date()).toISOString(),
|
|
16701
|
+
tuckDir: context.tuckDir,
|
|
16702
|
+
summary: buildDoctorSummary(checks),
|
|
16703
|
+
checks
|
|
16704
|
+
};
|
|
16705
|
+
};
|
|
16706
|
+
var getDoctorExitCode = (report, strict = false) => {
|
|
16707
|
+
if (report.summary.failed > 0) {
|
|
16708
|
+
return 1;
|
|
16709
|
+
}
|
|
16710
|
+
if (strict && report.summary.warnings > 0) {
|
|
16711
|
+
return 2;
|
|
16712
|
+
}
|
|
16713
|
+
return 0;
|
|
16714
|
+
};
|
|
16715
|
+
|
|
16716
|
+
// src/commands/doctor.ts
|
|
16717
|
+
var isDoctorCategory = (value) => {
|
|
16718
|
+
return DOCTOR_CATEGORIES.includes(value);
|
|
16719
|
+
};
|
|
16720
|
+
var formatCheckId = (id) => {
|
|
16721
|
+
return id.replace(".", ": ");
|
|
16722
|
+
};
|
|
16723
|
+
var printHumanReport = (report) => {
|
|
16724
|
+
prompts.intro("tuck doctor");
|
|
16725
|
+
for (const check of report.checks) {
|
|
16726
|
+
const label = `[${check.category}] ${formatCheckId(check.id)} - ${check.message}`;
|
|
16727
|
+
if (check.status === "pass") {
|
|
16728
|
+
logger.success(label);
|
|
16729
|
+
} else if (check.status === "warn") {
|
|
16730
|
+
logger.warning(label);
|
|
16731
|
+
} else {
|
|
16732
|
+
logger.error(label);
|
|
16733
|
+
}
|
|
16734
|
+
if (check.details) {
|
|
16735
|
+
logger.dim(` Details: ${check.details}`);
|
|
16736
|
+
}
|
|
16737
|
+
if (check.fix) {
|
|
16738
|
+
logger.dim(` Fix: ${check.fix}`);
|
|
16739
|
+
}
|
|
16740
|
+
}
|
|
16741
|
+
logger.blank();
|
|
16742
|
+
logger.info(
|
|
16743
|
+
`Summary: ${report.summary.passed} passed, ${report.summary.warnings} warnings, ${report.summary.failed} failed`
|
|
16744
|
+
);
|
|
16745
|
+
};
|
|
16746
|
+
var runDoctor = async (options = {}) => {
|
|
16747
|
+
const report = await runDoctorChecks({
|
|
16748
|
+
category: options.category
|
|
16749
|
+
});
|
|
16750
|
+
if (options.json) {
|
|
16751
|
+
console.log(JSON.stringify(report, null, 2));
|
|
16752
|
+
} else {
|
|
16753
|
+
printHumanReport(report);
|
|
16754
|
+
}
|
|
16755
|
+
const exitCode = getDoctorExitCode(report, options.strict);
|
|
16756
|
+
process.exitCode = exitCode;
|
|
16757
|
+
if (!options.json) {
|
|
16758
|
+
if (exitCode === 0) {
|
|
16759
|
+
prompts.outro("Doctor checks completed successfully");
|
|
16760
|
+
} else if (exitCode === 2) {
|
|
16761
|
+
prompts.outro("Doctor completed with warnings (strict mode enabled)");
|
|
16762
|
+
} else {
|
|
16763
|
+
prompts.outro("Doctor found blocking issues");
|
|
16764
|
+
}
|
|
16765
|
+
}
|
|
16766
|
+
return report;
|
|
16767
|
+
};
|
|
16768
|
+
var doctorCommand = new Command17("doctor").description("Run repository health and safety diagnostics").option("--json", "Output as JSON").option("--strict", "Exit non-zero on warnings").option(
|
|
16769
|
+
"-c, --category <category>",
|
|
16770
|
+
`Run only one category (${DOCTOR_CATEGORIES.join("|")})`,
|
|
16771
|
+
(value) => {
|
|
16772
|
+
if (!isDoctorCategory(value)) {
|
|
16773
|
+
throw new Error(
|
|
16774
|
+
`Invalid category "${value}". Expected one of: ${DOCTOR_CATEGORIES.join(", ")}`
|
|
16775
|
+
);
|
|
16776
|
+
}
|
|
16777
|
+
return value;
|
|
16778
|
+
}
|
|
16779
|
+
).action(async (options) => {
|
|
16780
|
+
await runDoctor(options);
|
|
16781
|
+
});
|
|
16782
|
+
|
|
15619
16783
|
// src/index.ts
|
|
15620
16784
|
init_errors();
|
|
15621
16785
|
init_constants();
|
|
@@ -15655,21 +16819,21 @@ var getUpdateCommand = (packageManager) => {
|
|
|
15655
16819
|
return "npm update -g @prnv/tuck";
|
|
15656
16820
|
};
|
|
15657
16821
|
var waitForEnterOrCancel = () => {
|
|
15658
|
-
return new Promise((
|
|
16822
|
+
return new Promise((resolve3) => {
|
|
15659
16823
|
const rl = createInterface({
|
|
15660
16824
|
input: process.stdin,
|
|
15661
16825
|
output: process.stdout
|
|
15662
16826
|
});
|
|
15663
16827
|
rl.on("close", () => {
|
|
15664
|
-
|
|
16828
|
+
resolve3(false);
|
|
15665
16829
|
});
|
|
15666
16830
|
rl.on("line", () => {
|
|
15667
16831
|
rl.close();
|
|
15668
|
-
|
|
16832
|
+
resolve3(true);
|
|
15669
16833
|
});
|
|
15670
16834
|
process.on("SIGINT", () => {
|
|
15671
16835
|
rl.close();
|
|
15672
|
-
|
|
16836
|
+
resolve3(false);
|
|
15673
16837
|
});
|
|
15674
16838
|
});
|
|
15675
16839
|
};
|
|
@@ -15765,7 +16929,7 @@ init_banner();
|
|
|
15765
16929
|
init_paths();
|
|
15766
16930
|
init_manifest();
|
|
15767
16931
|
init_git();
|
|
15768
|
-
var program = new
|
|
16932
|
+
var program = new Command18();
|
|
15769
16933
|
program.name("tuck").description(DESCRIPTION).version(VERSION, "-v, --version", "Display version number").configureOutput({
|
|
15770
16934
|
outputError: (str, write) => write(chalk6.red(str))
|
|
15771
16935
|
}).addHelpText("before", customHelp(VERSION)).helpOption("-h, --help", "Display this help message").showHelpAfterError(false);
|
|
@@ -15785,6 +16949,7 @@ program.addCommand(undoCommand);
|
|
|
15785
16949
|
program.addCommand(scanCommand);
|
|
15786
16950
|
program.addCommand(secretsCommand);
|
|
15787
16951
|
program.addCommand(encryptionCommand);
|
|
16952
|
+
program.addCommand(doctorCommand);
|
|
15788
16953
|
var runDefaultAction = async () => {
|
|
15789
16954
|
const tuckDir = getTuckDir();
|
|
15790
16955
|
if (!await pathExists(tuckDir)) {
|