skuba 11.0.0-main-20250511022834 → 11.0.1-fix-nested-repo-usage-20250522060154
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +2 -1
- package/jest/transform.js +5 -1
- package/lib/api/github/push.js +8 -1
- package/lib/api/github/push.js.map +2 -2
- package/lib/cli/configure/processing/configFile.d.ts +2 -2
- package/lib/cli/configure/processing/configFile.js +18 -21
- package/lib/cli/configure/processing/configFile.js.map +2 -2
- package/lib/cli/init/types.d.ts +3 -3
- package/lib/cli/lint/internalLints/refreshConfigFiles.d.ts +6 -2
- package/lib/cli/lint/internalLints/refreshConfigFiles.js +26 -22
- package/lib/cli/lint/internalLints/refreshConfigFiles.js.map +3 -3
- package/lib/cli/lint/internalLints/upgrade/patches/10.1.0/index.js +5 -0
- package/lib/cli/lint/internalLints/upgrade/patches/10.1.0/index.js.map +2 -2
- package/lib/cli/lint/internalLints/upgrade/patches/10.1.0/migrateNpmrcToPnpmWorkspace.d.ts +2 -0
- package/lib/cli/lint/internalLints/upgrade/patches/10.1.0/migrateNpmrcToPnpmWorkspace.js +167 -0
- package/lib/cli/lint/internalLints/upgrade/patches/10.1.0/migrateNpmrcToPnpmWorkspace.js.map +7 -0
- package/lib/cli/lint/internalLints/upgrade/patches/7.3.1/index.js +0 -9
- package/lib/cli/lint/internalLints/upgrade/patches/7.3.1/index.js.map +2 -2
- package/lib/utils/copy.js +1 -1
- package/lib/utils/copy.js.map +2 -2
- package/lib/utils/dir.d.ts +10 -0
- package/lib/utils/dir.js +74 -2
- package/lib/utils/dir.js.map +3 -3
- package/lib/utils/npmrc.d.ts +0 -1
- package/lib/utils/npmrc.js +0 -3
- package/lib/utils/npmrc.js.map +2 -2
- package/package.json +4 -4
- package/template/base/_.gitignore +2 -0
- package/template/base/_pnpm-workspace.yaml +10 -0
- package/template/express-rest-api/.buildkite/pipeline.yml +2 -2
- package/template/express-rest-api/Dockerfile.dev-deps +2 -2
- package/template/greeter/.buildkite/pipeline.yml +2 -2
- package/template/greeter/Dockerfile +2 -2
- package/template/greeter/package.json +1 -1
- package/template/koa-rest-api/.buildkite/pipeline.yml +2 -2
- package/template/koa-rest-api/Dockerfile.dev-deps +2 -2
- package/template/koa-rest-api/package.json +1 -1
- package/template/lambda-sqs-worker-cdk/.buildkite/pipeline.yml +3 -3
- package/template/lambda-sqs-worker-cdk/Dockerfile +2 -2
- package/template/lambda-sqs-worker-cdk/package.json +1 -1
- package/template/oss-npm-package/_package.json +1 -1
- package/template/private-npm-package/_package.json +1 -1
- package/lib/cli/lint/internalLints/upgrade/patches/7.3.1/moveNpmrcOutOfIgnoreManagedSection.d.ts +0 -2
- package/lib/cli/lint/internalLints/upgrade/patches/7.3.1/moveNpmrcOutOfIgnoreManagedSection.js +0 -95
- package/lib/cli/lint/internalLints/upgrade/patches/7.3.1/moveNpmrcOutOfIgnoreManagedSection.js.map +0 -7
- package/template/base/_.npmrc +0 -9
package/README.md
CHANGED
|
@@ -2,7 +2,8 @@
|
|
|
2
2
|
|
|
3
3
|
---
|
|
4
4
|
|
|
5
|
-
[](https://www.npmjs.com/package/skuba)
|
|
5
|
+
[](https://www.npmjs.com/package/skuba)
|
|
6
|
+
[](https://www.npmjs.com/package/skuba)
|
|
6
7
|
|
|
7
8
|
---
|
|
8
9
|
|
package/jest/transform.js
CHANGED
|
@@ -38,7 +38,11 @@ const tsconfig = BROKEN_MODULE_RESOLUTIONS.has(
|
|
|
38
38
|
? { tsconfig: { moduleResolution: 'Node' } }
|
|
39
39
|
: undefined;
|
|
40
40
|
|
|
41
|
-
|
|
41
|
+
/**
|
|
42
|
+
* Rewrite `ts-jest` transformations using our resolved `TS_JEST_PATH`.
|
|
43
|
+
*
|
|
44
|
+
* @type {import ('@jest/types').Config.InitialOptions['transform']}
|
|
45
|
+
*/
|
|
42
46
|
module.exports.transform = Object.fromEntries(
|
|
43
47
|
Object.entries(defaults.transform).map(([key, value]) => {
|
|
44
48
|
if (typeof value === 'string') {
|
package/lib/api/github/push.js
CHANGED
|
@@ -93,10 +93,17 @@ const readFileChanges = async (dir, changedFiles) => {
|
|
|
93
93
|
const pathDir = import_path.default.relative(gitRoot, dir);
|
|
94
94
|
return import_path.default.join(pathDir, filePath);
|
|
95
95
|
};
|
|
96
|
+
const toRootPath = (filePath) => {
|
|
97
|
+
if (!gitRoot) {
|
|
98
|
+
return filePath;
|
|
99
|
+
}
|
|
100
|
+
const pathDir = import_path.default.resolve(gitRoot, dir);
|
|
101
|
+
return import_path.default.join(pathDir, filePath);
|
|
102
|
+
};
|
|
96
103
|
const additions = await Promise.all(
|
|
97
104
|
added.map(async (filePath) => ({
|
|
98
105
|
path: toGitHubPath(filePath),
|
|
99
|
-
contents: await import_fs_extra.default.promises.readFile(filePath, {
|
|
106
|
+
contents: await import_fs_extra.default.promises.readFile(toRootPath(filePath), {
|
|
100
107
|
encoding: "base64"
|
|
101
108
|
})
|
|
102
109
|
}))
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../../../src/api/github/push.ts"],
|
|
4
|
-
"sourcesContent": ["import path from 'path';\n\nimport type { CreateCommitOnBranchInput } from '@octokit/graphql-schema';\nimport fs from 'fs-extra';\n\nimport * as Git from '../git';\n\nimport { apiTokenFromEnvironment } from './environment';\nimport { graphql } from './octokit';\n\ninterface CreateCommitResult {\n createCommitOnBranch: {\n commit: {\n oid: string;\n };\n };\n}\n\ninterface UploadAllFileChangesParams {\n dir: string;\n /**\n * The branch name\n */\n branch: string;\n /**\n * The headline of the commit message\n */\n messageHeadline: string;\n\n /**\n * File changes to exclude from the upload.\n *\n * Defaults to `[]` (no exclusions).\n */\n ignore?: Git.ChangedFile[];\n /**\n * The body of the commit message\n */\n messageBody?: string;\n /**\n * Updates the local Git repository to match the new remote branch state\n */\n updateLocal?: boolean;\n}\n\n/**\n * Retrieves all file changes from the local Git repository using\n * `getChangedFiles`, then uploads the changes to a specified GitHub branch\n * using `uploadFileChanges`.\n *\n * Returns the commit ID, or `undefined` if there are no changes to commit.\n *\n * The file changes will appear as verified commits on GitHub.\n *\n * This will not update the local Git repository unless `updateLocal` is\n * specified.\n */\nexport const uploadAllFileChanges = async ({\n branch,\n dir,\n messageHeadline,\n\n ignore,\n messageBody,\n updateLocal = false,\n}: UploadAllFileChangesParams): Promise<string | undefined> => {\n const changedFiles = await Git.getChangedFiles({ dir, ignore });\n\n if (!changedFiles.length) {\n return;\n }\n\n const fileChanges = await readFileChanges(dir, changedFiles);\n\n const commitId = await uploadFileChanges({\n dir,\n branch,\n messageHeadline,\n messageBody,\n fileChanges,\n });\n\n if (updateLocal) {\n await Promise.all(\n [...fileChanges.additions, ...fileChanges.deletions].map((file) =>\n fs.rm(file.path),\n ),\n );\n\n await Git.fastForwardBranch({\n ref: branch,\n auth: { type: 'gitHubApp' },\n dir,\n });\n }\n\n return commitId;\n};\n\ninterface FileAddition {\n contents: unknown;\n path: string;\n}\n\ninterface FileDeletion {\n path: string;\n}\n\nexport interface FileChanges {\n additions: FileAddition[];\n deletions: FileDeletion[];\n}\n\n/**\n * Takes a list of `ChangedFiles`, reads them from the file system, and maps\n * them to GitHub GraphQL `FileChanges`.\n *\n * https://docs.github.com/en/graphql/reference/input-objects#filechanges\n */\nexport const readFileChanges = async (\n dir: string,\n changedFiles: Git.ChangedFile[],\n): Promise<FileChanges> => {\n const { added, deleted } = changedFiles.reduce<{\n added: string[];\n deleted: string[];\n }>(\n (files, changedFile) => {\n const filePath = changedFile.path;\n if (changedFile.state === 'deleted') {\n files.deleted.push(filePath);\n } else {\n files.added.push(filePath);\n }\n\n return files;\n },\n { added: [], deleted: [] },\n );\n\n const gitRoot = await Git.findRoot({ dir });\n\n const toGitHubPath = (filePath: string) => {\n if (!gitRoot) {\n return filePath;\n }\n\n const pathDir = path.relative(gitRoot, dir);\n\n return path.join(pathDir, filePath);\n };\n\n const additions: FileAddition[] = await Promise.all(\n added.map(async (filePath) => ({\n path: toGitHubPath(filePath),\n contents: await fs.promises.readFile(filePath, {\n encoding: 'base64',\n }),\n })),\n );\n\n const deletions: FileDeletion[] = deleted.map((filePath) => ({\n path: toGitHubPath(filePath),\n }));\n\n return {\n additions,\n deletions,\n };\n};\n\ninterface UploadFileChangesParams {\n dir: string;\n /**\n * The branch name\n */\n branch: string;\n /**\n * The headline of the commit message\n */\n messageHeadline: string;\n /**\n * The body of the commit message\n */\n messageBody?: string;\n /**\n * File additions and deletions\n */\n fileChanges: FileChanges;\n}\n\n/**\n * Uploads file changes from the local workspace to a specified GitHub branch.\n *\n * The file changes will appear as verified commits on GitHub.\n *\n * This will not update the local Git repository.\n */\nexport const uploadFileChanges = async ({\n dir,\n branch,\n messageHeadline,\n messageBody,\n fileChanges,\n}: UploadFileChangesParams): Promise<string> => {\n const authToken = apiTokenFromEnvironment();\n if (!authToken) {\n throw new Error(\n 'Could not read a GitHub API token from the environment. Please set GITHUB_API_TOKEN or GITHUB_TOKEN.',\n );\n }\n\n const [{ owner, repo }, headCommitId] = await Promise.all([\n Git.getOwnerAndRepo({ dir }),\n Git.getHeadCommitId({ dir }),\n ]);\n\n const input: CreateCommitOnBranchInput = {\n branch: {\n repositoryNameWithOwner: `${owner}/${repo}`,\n branchName: branch,\n },\n message: {\n headline: messageHeadline,\n body: messageBody,\n },\n expectedHeadOid: headCommitId,\n clientMutationId: 'skuba',\n fileChanges,\n };\n\n const result = await graphql<CreateCommitResult>(\n `\n mutation Mutation($input: CreateCommitOnBranchInput!) {\n createCommitOnBranch(input: $input) {\n commit {\n oid\n }\n }\n }\n `,\n {\n input,\n headers: {\n authorization: `Bearer ${authToken}`,\n },\n },\n );\n\n return result.createCommitOnBranch.commit.oid;\n};\n"],
|
|
5
|
-
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAiB;AAGjB,sBAAe;AAEf,UAAqB;AAErB,yBAAwC;AACxC,qBAAwB;AAiDjB,MAAM,uBAAuB,OAAO;AAAA,EACzC;AAAA,EACA;AAAA,EACA;AAAA,EAEA;AAAA,EACA;AAAA,EACA,cAAc;AAChB,MAA+D;AAC7D,QAAM,eAAe,MAAM,IAAI,gBAAgB,EAAE,KAAK,OAAO,CAAC;AAE9D,MAAI,CAAC,aAAa,QAAQ;AACxB;AAAA,EACF;AAEA,QAAM,cAAc,MAAM,gBAAgB,KAAK,YAAY;AAE3D,QAAM,WAAW,MAAM,kBAAkB;AAAA,IACvC;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,CAAC;AAED,MAAI,aAAa;AACf,UAAM,QAAQ;AAAA,MACZ,CAAC,GAAG,YAAY,WAAW,GAAG,YAAY,SAAS,EAAE;AAAA,QAAI,CAAC,SACxD,gBAAAA,QAAG,GAAG,KAAK,IAAI;AAAA,MACjB;AAAA,IACF;AAEA,UAAM,IAAI,kBAAkB;AAAA,MAC1B,KAAK;AAAA,MACL,MAAM,EAAE,MAAM,YAAY;AAAA,MAC1B;AAAA,IACF,CAAC;AAAA,EACH;AAEA,SAAO;AACT;AAsBO,MAAM,kBAAkB,OAC7B,KACA,iBACyB;AACzB,QAAM,EAAE,OAAO,QAAQ,IAAI,aAAa;AAAA,IAItC,CAAC,OAAO,gBAAgB;AACtB,YAAM,WAAW,YAAY;AAC7B,UAAI,YAAY,UAAU,WAAW;AACnC,cAAM,QAAQ,KAAK,QAAQ;AAAA,MAC7B,OAAO;AACL,cAAM,MAAM,KAAK,QAAQ;AAAA,MAC3B;AAEA,aAAO;AAAA,IACT;AAAA,IACA,EAAE,OAAO,CAAC,GAAG,SAAS,CAAC,EAAE;AAAA,EAC3B;AAEA,QAAM,UAAU,MAAM,IAAI,SAAS,EAAE,IAAI,CAAC;AAE1C,QAAM,eAAe,CAAC,aAAqB;AACzC,QAAI,CAAC,SAAS;AACZ,aAAO;AAAA,IACT;AAEA,UAAM,UAAU,YAAAC,QAAK,SAAS,SAAS,GAAG;AAE1C,WAAO,YAAAA,QAAK,KAAK,SAAS,QAAQ;AAAA,EACpC;AAEA,QAAM,YAA4B,MAAM,QAAQ;AAAA,IAC9C,MAAM,IAAI,OAAO,cAAc;AAAA,MAC7B,MAAM,aAAa,QAAQ;AAAA,MAC3B,UAAU,MAAM,gBAAAD,QAAG,SAAS,SAAS,
|
|
4
|
+
"sourcesContent": ["import path from 'path';\n\nimport type { CreateCommitOnBranchInput } from '@octokit/graphql-schema';\nimport fs from 'fs-extra';\n\nimport * as Git from '../git';\n\nimport { apiTokenFromEnvironment } from './environment';\nimport { graphql } from './octokit';\n\ninterface CreateCommitResult {\n createCommitOnBranch: {\n commit: {\n oid: string;\n };\n };\n}\n\ninterface UploadAllFileChangesParams {\n dir: string;\n /**\n * The branch name\n */\n branch: string;\n /**\n * The headline of the commit message\n */\n messageHeadline: string;\n\n /**\n * File changes to exclude from the upload.\n *\n * Defaults to `[]` (no exclusions).\n */\n ignore?: Git.ChangedFile[];\n /**\n * The body of the commit message\n */\n messageBody?: string;\n /**\n * Updates the local Git repository to match the new remote branch state\n */\n updateLocal?: boolean;\n}\n\n/**\n * Retrieves all file changes from the local Git repository using\n * `getChangedFiles`, then uploads the changes to a specified GitHub branch\n * using `uploadFileChanges`.\n *\n * Returns the commit ID, or `undefined` if there are no changes to commit.\n *\n * The file changes will appear as verified commits on GitHub.\n *\n * This will not update the local Git repository unless `updateLocal` is\n * specified.\n */\nexport const uploadAllFileChanges = async ({\n branch,\n dir,\n messageHeadline,\n\n ignore,\n messageBody,\n updateLocal = false,\n}: UploadAllFileChangesParams): Promise<string | undefined> => {\n const changedFiles = await Git.getChangedFiles({ dir, ignore });\n\n if (!changedFiles.length) {\n return;\n }\n\n const fileChanges = await readFileChanges(dir, changedFiles);\n\n const commitId = await uploadFileChanges({\n dir,\n branch,\n messageHeadline,\n messageBody,\n fileChanges,\n });\n\n if (updateLocal) {\n await Promise.all(\n [...fileChanges.additions, ...fileChanges.deletions].map((file) =>\n fs.rm(file.path),\n ),\n );\n\n await Git.fastForwardBranch({\n ref: branch,\n auth: { type: 'gitHubApp' },\n dir,\n });\n }\n\n return commitId;\n};\n\ninterface FileAddition {\n contents: unknown;\n path: string;\n}\n\ninterface FileDeletion {\n path: string;\n}\n\nexport interface FileChanges {\n additions: FileAddition[];\n deletions: FileDeletion[];\n}\n\n/**\n * Takes a list of `ChangedFiles`, reads them from the file system, and maps\n * them to GitHub GraphQL `FileChanges`.\n *\n * https://docs.github.com/en/graphql/reference/input-objects#filechanges\n */\nexport const readFileChanges = async (\n dir: string,\n changedFiles: Git.ChangedFile[],\n): Promise<FileChanges> => {\n const { added, deleted } = changedFiles.reduce<{\n added: string[];\n deleted: string[];\n }>(\n (files, changedFile) => {\n const filePath = changedFile.path;\n if (changedFile.state === 'deleted') {\n files.deleted.push(filePath);\n } else {\n files.added.push(filePath);\n }\n\n return files;\n },\n { added: [], deleted: [] },\n );\n\n const gitRoot = await Git.findRoot({ dir });\n\n const toGitHubPath = (filePath: string) => {\n if (!gitRoot) {\n return filePath;\n }\n\n const pathDir = path.relative(gitRoot, dir);\n\n return path.join(pathDir, filePath);\n };\n\n const toRootPath = (filePath: string) => {\n if (!gitRoot) {\n return filePath;\n }\n\n const pathDir = path.resolve(gitRoot, dir);\n\n return path.join(pathDir, filePath);\n };\n\n const additions: FileAddition[] = await Promise.all(\n added.map(async (filePath) => ({\n path: toGitHubPath(filePath),\n contents: await fs.promises.readFile(toRootPath(filePath), {\n encoding: 'base64',\n }),\n })),\n );\n\n const deletions: FileDeletion[] = deleted.map((filePath) => ({\n path: toGitHubPath(filePath),\n }));\n\n return {\n additions,\n deletions,\n };\n};\n\ninterface UploadFileChangesParams {\n dir: string;\n /**\n * The branch name\n */\n branch: string;\n /**\n * The headline of the commit message\n */\n messageHeadline: string;\n /**\n * The body of the commit message\n */\n messageBody?: string;\n /**\n * File additions and deletions\n */\n fileChanges: FileChanges;\n}\n\n/**\n * Uploads file changes from the local workspace to a specified GitHub branch.\n *\n * The file changes will appear as verified commits on GitHub.\n *\n * This will not update the local Git repository.\n */\nexport const uploadFileChanges = async ({\n dir,\n branch,\n messageHeadline,\n messageBody,\n fileChanges,\n}: UploadFileChangesParams): Promise<string> => {\n const authToken = apiTokenFromEnvironment();\n if (!authToken) {\n throw new Error(\n 'Could not read a GitHub API token from the environment. Please set GITHUB_API_TOKEN or GITHUB_TOKEN.',\n );\n }\n\n const [{ owner, repo }, headCommitId] = await Promise.all([\n Git.getOwnerAndRepo({ dir }),\n Git.getHeadCommitId({ dir }),\n ]);\n\n const input: CreateCommitOnBranchInput = {\n branch: {\n repositoryNameWithOwner: `${owner}/${repo}`,\n branchName: branch,\n },\n message: {\n headline: messageHeadline,\n body: messageBody,\n },\n expectedHeadOid: headCommitId,\n clientMutationId: 'skuba',\n fileChanges,\n };\n\n const result = await graphql<CreateCommitResult>(\n `\n mutation Mutation($input: CreateCommitOnBranchInput!) {\n createCommitOnBranch(input: $input) {\n commit {\n oid\n }\n }\n }\n `,\n {\n input,\n headers: {\n authorization: `Bearer ${authToken}`,\n },\n },\n );\n\n return result.createCommitOnBranch.commit.oid;\n};\n"],
|
|
5
|
+
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAiB;AAGjB,sBAAe;AAEf,UAAqB;AAErB,yBAAwC;AACxC,qBAAwB;AAiDjB,MAAM,uBAAuB,OAAO;AAAA,EACzC;AAAA,EACA;AAAA,EACA;AAAA,EAEA;AAAA,EACA;AAAA,EACA,cAAc;AAChB,MAA+D;AAC7D,QAAM,eAAe,MAAM,IAAI,gBAAgB,EAAE,KAAK,OAAO,CAAC;AAE9D,MAAI,CAAC,aAAa,QAAQ;AACxB;AAAA,EACF;AAEA,QAAM,cAAc,MAAM,gBAAgB,KAAK,YAAY;AAE3D,QAAM,WAAW,MAAM,kBAAkB;AAAA,IACvC;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,CAAC;AAED,MAAI,aAAa;AACf,UAAM,QAAQ;AAAA,MACZ,CAAC,GAAG,YAAY,WAAW,GAAG,YAAY,SAAS,EAAE;AAAA,QAAI,CAAC,SACxD,gBAAAA,QAAG,GAAG,KAAK,IAAI;AAAA,MACjB;AAAA,IACF;AAEA,UAAM,IAAI,kBAAkB;AAAA,MAC1B,KAAK;AAAA,MACL,MAAM,EAAE,MAAM,YAAY;AAAA,MAC1B;AAAA,IACF,CAAC;AAAA,EACH;AAEA,SAAO;AACT;AAsBO,MAAM,kBAAkB,OAC7B,KACA,iBACyB;AACzB,QAAM,EAAE,OAAO,QAAQ,IAAI,aAAa;AAAA,IAItC,CAAC,OAAO,gBAAgB;AACtB,YAAM,WAAW,YAAY;AAC7B,UAAI,YAAY,UAAU,WAAW;AACnC,cAAM,QAAQ,KAAK,QAAQ;AAAA,MAC7B,OAAO;AACL,cAAM,MAAM,KAAK,QAAQ;AAAA,MAC3B;AAEA,aAAO;AAAA,IACT;AAAA,IACA,EAAE,OAAO,CAAC,GAAG,SAAS,CAAC,EAAE;AAAA,EAC3B;AAEA,QAAM,UAAU,MAAM,IAAI,SAAS,EAAE,IAAI,CAAC;AAE1C,QAAM,eAAe,CAAC,aAAqB;AACzC,QAAI,CAAC,SAAS;AACZ,aAAO;AAAA,IACT;AAEA,UAAM,UAAU,YAAAC,QAAK,SAAS,SAAS,GAAG;AAE1C,WAAO,YAAAA,QAAK,KAAK,SAAS,QAAQ;AAAA,EACpC;AAEA,QAAM,aAAa,CAAC,aAAqB;AACvC,QAAI,CAAC,SAAS;AACZ,aAAO;AAAA,IACT;AAEA,UAAM,UAAU,YAAAA,QAAK,QAAQ,SAAS,GAAG;AAEzC,WAAO,YAAAA,QAAK,KAAK,SAAS,QAAQ;AAAA,EACpC;AAEA,QAAM,YAA4B,MAAM,QAAQ;AAAA,IAC9C,MAAM,IAAI,OAAO,cAAc;AAAA,MAC7B,MAAM,aAAa,QAAQ;AAAA,MAC3B,UAAU,MAAM,gBAAAD,QAAG,SAAS,SAAS,WAAW,QAAQ,GAAG;AAAA,QACzD,UAAU;AAAA,MACZ,CAAC;AAAA,IACH,EAAE;AAAA,EACJ;AAEA,QAAM,YAA4B,QAAQ,IAAI,CAAC,cAAc;AAAA,IAC3D,MAAM,aAAa,QAAQ;AAAA,EAC7B,EAAE;AAEF,SAAO;AAAA,IACL;AAAA,IACA;AAAA,EACF;AACF;AA6BO,MAAM,oBAAoB,OAAO;AAAA,EACtC;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF,MAAgD;AAC9C,QAAM,gBAAY,4CAAwB;AAC1C,MAAI,CAAC,WAAW;AACd,UAAM,IAAI;AAAA,MACR;AAAA,IACF;AAAA,EACF;AAEA,QAAM,CAAC,EAAE,OAAO,KAAK,GAAG,YAAY,IAAI,MAAM,QAAQ,IAAI;AAAA,IACxD,IAAI,gBAAgB,EAAE,IAAI,CAAC;AAAA,IAC3B,IAAI,gBAAgB,EAAE,IAAI,CAAC;AAAA,EAC7B,CAAC;AAED,QAAM,QAAmC;AAAA,IACvC,QAAQ;AAAA,MACN,yBAAyB,GAAG,KAAK,IAAI,IAAI;AAAA,MACzC,YAAY;AAAA,IACd;AAAA,IACA,SAAS;AAAA,MACP,UAAU;AAAA,MACV,MAAM;AAAA,IACR;AAAA,IACA,iBAAiB;AAAA,IACjB,kBAAkB;AAAA,IAClB;AAAA,EACF;AAEA,QAAM,SAAS,UAAM;AAAA,IACnB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,IASA;AAAA,MACE;AAAA,MACA,SAAS;AAAA,QACP,eAAe,UAAU,SAAS;AAAA,MACpC;AAAA,IACF;AAAA,EACF;AAEA,SAAO,OAAO,qBAAqB,OAAO;AAC5C;",
|
|
6
6
|
"names": ["fs", "path"]
|
|
7
7
|
}
|
|
@@ -5,5 +5,5 @@
|
|
|
5
5
|
* than `lib/`) but they generally represent the same _intent_.
|
|
6
6
|
*/
|
|
7
7
|
export declare const generateIgnoreFileSimpleVariants: (patterns: string[]) => Set<string>;
|
|
8
|
-
export declare const
|
|
9
|
-
export declare const mergeWithConfigFile: (rawTemplateFile: string, fileType?: "ignore" | "
|
|
8
|
+
export declare const replaceManagedSection: (input: string, template: string) => string;
|
|
9
|
+
export declare const mergeWithConfigFile: (rawTemplateFile: string, fileType?: "ignore" | "pnpm-workspace") => (rawInputFile?: string) => string;
|
|
@@ -19,8 +19,8 @@ var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: tru
|
|
|
19
19
|
var configFile_exports = {};
|
|
20
20
|
__export(configFile_exports, {
|
|
21
21
|
generateIgnoreFileSimpleVariants: () => generateIgnoreFileSimpleVariants,
|
|
22
|
-
|
|
23
|
-
|
|
22
|
+
mergeWithConfigFile: () => mergeWithConfigFile,
|
|
23
|
+
replaceManagedSection: () => replaceManagedSection
|
|
24
24
|
});
|
|
25
25
|
module.exports = __toCommonJS(configFile_exports);
|
|
26
26
|
const OUTDATED_PATTERNS = ["node_modules_bak/", "tmp-*/"];
|
|
@@ -46,24 +46,18 @@ const generateIgnoreFileSimpleVariants = (patterns) => {
|
|
|
46
46
|
set.delete("");
|
|
47
47
|
return set;
|
|
48
48
|
};
|
|
49
|
-
const
|
|
50
|
-
const set = /* @__PURE__ */ new Set();
|
|
51
|
-
for (const pattern of patterns) {
|
|
52
|
-
set.add(pattern);
|
|
53
|
-
const match = /^(?<key>[^"=]+)="?(?<value>[^"=]+)"?$/.exec(pattern);
|
|
54
|
-
if (!match?.groups) {
|
|
55
|
-
continue;
|
|
56
|
-
}
|
|
57
|
-
const { key, value } = match.groups;
|
|
58
|
-
set.add(`${key}=${value}`);
|
|
59
|
-
set.add(`${key}="${value}"`);
|
|
60
|
-
}
|
|
61
|
-
set.delete("");
|
|
62
|
-
return set;
|
|
63
|
-
};
|
|
49
|
+
const replaceManagedSection = (input, template) => input.replace(/# managed by skuba[\s\S]*# end managed by skuba/, template);
|
|
64
50
|
const mergeWithConfigFile = (rawTemplateFile, fileType = "ignore") => {
|
|
65
51
|
const templateFile = rawTemplateFile.trim();
|
|
66
|
-
|
|
52
|
+
let generator;
|
|
53
|
+
switch (fileType) {
|
|
54
|
+
case "ignore":
|
|
55
|
+
generator = generateIgnoreFileSimpleVariants;
|
|
56
|
+
break;
|
|
57
|
+
case "pnpm-workspace":
|
|
58
|
+
generator = () => /* @__PURE__ */ new Set();
|
|
59
|
+
break;
|
|
60
|
+
}
|
|
67
61
|
const templatePatterns = generator([
|
|
68
62
|
...OUTDATED_PATTERNS,
|
|
69
63
|
...templateFile.split("\n").map((line) => line.trim())
|
|
@@ -73,7 +67,10 @@ const mergeWithConfigFile = (rawTemplateFile, fileType = "ignore") => {
|
|
|
73
67
|
return `${templateFile}
|
|
74
68
|
`;
|
|
75
69
|
}
|
|
76
|
-
const replacedFile =
|
|
70
|
+
const replacedFile = replaceManagedSection(
|
|
71
|
+
rawInputFile.replace(/\r?\n/g, "\n"),
|
|
72
|
+
templateFile
|
|
73
|
+
);
|
|
77
74
|
if (replacedFile.includes(templateFile)) {
|
|
78
75
|
return replacedFile;
|
|
79
76
|
}
|
|
@@ -86,7 +83,7 @@ const mergeWithConfigFile = (rawTemplateFile, fileType = "ignore") => {
|
|
|
86
83
|
// Annotate the CommonJS export names for ESM import in node:
|
|
87
84
|
0 && (module.exports = {
|
|
88
85
|
generateIgnoreFileSimpleVariants,
|
|
89
|
-
|
|
90
|
-
|
|
86
|
+
mergeWithConfigFile,
|
|
87
|
+
replaceManagedSection
|
|
91
88
|
});
|
|
92
89
|
//# sourceMappingURL=configFile.js.map
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../../../../src/cli/configure/processing/configFile.ts"],
|
|
4
|
-
"sourcesContent": ["/**\n * Patterns that are superseded by skuba's bundled ignore file patterns and are\n * non-trivial to derive using e.g. `generateSimpleVariants`.\n */\nconst OUTDATED_PATTERNS = ['node_modules_bak/', 'tmp-*/'];\n\nconst ASTERISKS = /\\*/g;\nconst LEADING_SLASH = /^\\//;\nconst TRAILING_SLASH = /\\/$/;\n\n/**\n * Generate simple variants of an ignore pattern for exact matching purposes.\n *\n * Note that these patterns are not actually equivalent (e.g. `lib` matches more\n * than `lib/`) but they generally represent the same _intent_.\n */\nexport const generateIgnoreFileSimpleVariants = (patterns: string[]) => {\n const set = new Set<string>();\n\n for (const pattern of patterns) {\n const deAsterisked = pattern.replace(ASTERISKS, '');\n const stripped = deAsterisked\n .replace(LEADING_SLASH, '')\n .replace(TRAILING_SLASH, '');\n\n set.add(pattern);\n set.add(deAsterisked);\n set.add(deAsterisked.replace(LEADING_SLASH, ''));\n set.add(deAsterisked.replace(TRAILING_SLASH, ''));\n set.add(stripped);\n\n if (stripped !== '') {\n set.add(`/${stripped}`);\n set.add(`${stripped}/`);\n set.add(`/${stripped}/`);\n }\n }\n\n set.delete('');\n\n return set;\n};\n\nexport const
|
|
5
|
-
"mappings": ";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAIA,MAAM,oBAAoB,CAAC,qBAAqB,QAAQ;AAExD,MAAM,YAAY;AAClB,MAAM,gBAAgB;AACtB,MAAM,iBAAiB;AAQhB,MAAM,mCAAmC,CAAC,aAAuB;AACtE,QAAM,MAAM,oBAAI,IAAY;AAE5B,aAAW,WAAW,UAAU;AAC9B,UAAM,eAAe,QAAQ,QAAQ,WAAW,EAAE;AAClD,UAAM,WAAW,aACd,QAAQ,eAAe,EAAE,EACzB,QAAQ,gBAAgB,EAAE;AAE7B,QAAI,IAAI,OAAO;AACf,QAAI,IAAI,YAAY;AACpB,QAAI,IAAI,aAAa,QAAQ,eAAe,EAAE,CAAC;AAC/C,QAAI,IAAI,aAAa,QAAQ,gBAAgB,EAAE,CAAC;AAChD,QAAI,IAAI,QAAQ;AAEhB,QAAI,aAAa,IAAI;AACnB,UAAI,IAAI,IAAI,QAAQ,EAAE;AACtB,UAAI,IAAI,GAAG,QAAQ,GAAG;AACtB,UAAI,IAAI,IAAI,QAAQ,GAAG;AAAA,IACzB;AAAA,EACF;AAEA,MAAI,OAAO,EAAE;AAEb,SAAO;AACT;AAEO,MAAM,
|
|
4
|
+
"sourcesContent": ["/**\n * Patterns that are superseded by skuba's bundled ignore file patterns and are\n * non-trivial to derive using e.g. `generateSimpleVariants`.\n */\nconst OUTDATED_PATTERNS = ['node_modules_bak/', 'tmp-*/'];\n\nconst ASTERISKS = /\\*/g;\nconst LEADING_SLASH = /^\\//;\nconst TRAILING_SLASH = /\\/$/;\n\n/**\n * Generate simple variants of an ignore pattern for exact matching purposes.\n *\n * Note that these patterns are not actually equivalent (e.g. `lib` matches more\n * than `lib/`) but they generally represent the same _intent_.\n */\nexport const generateIgnoreFileSimpleVariants = (patterns: string[]) => {\n const set = new Set<string>();\n\n for (const pattern of patterns) {\n const deAsterisked = pattern.replace(ASTERISKS, '');\n const stripped = deAsterisked\n .replace(LEADING_SLASH, '')\n .replace(TRAILING_SLASH, '');\n\n set.add(pattern);\n set.add(deAsterisked);\n set.add(deAsterisked.replace(LEADING_SLASH, ''));\n set.add(deAsterisked.replace(TRAILING_SLASH, ''));\n set.add(stripped);\n\n if (stripped !== '') {\n set.add(`/${stripped}`);\n set.add(`${stripped}/`);\n set.add(`/${stripped}/`);\n }\n }\n\n set.delete('');\n\n return set;\n};\n\nexport const replaceManagedSection = (input: string, template: string) =>\n input.replace(/# managed by skuba[\\s\\S]*# end managed by skuba/, template);\n\nexport const mergeWithConfigFile = (\n rawTemplateFile: string,\n fileType: 'ignore' | 'pnpm-workspace' = 'ignore',\n) => {\n const templateFile = rawTemplateFile.trim();\n\n let generator: (s: string[]) => Set<string>;\n\n switch (fileType) {\n case 'ignore':\n generator = generateIgnoreFileSimpleVariants;\n break;\n case 'pnpm-workspace':\n generator = () => new Set<string>();\n break;\n }\n\n const templatePatterns = generator([\n ...OUTDATED_PATTERNS,\n ...templateFile.split('\\n').map((line) => line.trim()),\n ]);\n\n return (rawInputFile?: string) => {\n if (rawInputFile === undefined) {\n return `${templateFile}\\n`;\n }\n\n const replacedFile = replaceManagedSection(\n rawInputFile.replace(/\\r?\\n/g, '\\n'),\n templateFile,\n );\n\n if (replacedFile.includes(templateFile)) {\n return replacedFile;\n }\n\n // Crunch the existing lines of a non-skuba config.\n const migratedFile = replacedFile\n .split('\\n')\n .filter((line) => !templatePatterns.has(line))\n .join('\\n')\n .replace(/\\n{3,}/g, '\\n\\n')\n .trim();\n\n const outputFile = [templateFile, migratedFile].join('\\n\\n').trim();\n\n return `${outputFile}\\n`;\n };\n};\n"],
|
|
5
|
+
"mappings": ";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAIA,MAAM,oBAAoB,CAAC,qBAAqB,QAAQ;AAExD,MAAM,YAAY;AAClB,MAAM,gBAAgB;AACtB,MAAM,iBAAiB;AAQhB,MAAM,mCAAmC,CAAC,aAAuB;AACtE,QAAM,MAAM,oBAAI,IAAY;AAE5B,aAAW,WAAW,UAAU;AAC9B,UAAM,eAAe,QAAQ,QAAQ,WAAW,EAAE;AAClD,UAAM,WAAW,aACd,QAAQ,eAAe,EAAE,EACzB,QAAQ,gBAAgB,EAAE;AAE7B,QAAI,IAAI,OAAO;AACf,QAAI,IAAI,YAAY;AACpB,QAAI,IAAI,aAAa,QAAQ,eAAe,EAAE,CAAC;AAC/C,QAAI,IAAI,aAAa,QAAQ,gBAAgB,EAAE,CAAC;AAChD,QAAI,IAAI,QAAQ;AAEhB,QAAI,aAAa,IAAI;AACnB,UAAI,IAAI,IAAI,QAAQ,EAAE;AACtB,UAAI,IAAI,GAAG,QAAQ,GAAG;AACtB,UAAI,IAAI,IAAI,QAAQ,GAAG;AAAA,IACzB;AAAA,EACF;AAEA,MAAI,OAAO,EAAE;AAEb,SAAO;AACT;AAEO,MAAM,wBAAwB,CAAC,OAAe,aACnD,MAAM,QAAQ,mDAAmD,QAAQ;AAEpE,MAAM,sBAAsB,CACjC,iBACA,WAAwC,aACrC;AACH,QAAM,eAAe,gBAAgB,KAAK;AAE1C,MAAI;AAEJ,UAAQ,UAAU;AAAA,IAChB,KAAK;AACH,kBAAY;AACZ;AAAA,IACF,KAAK;AACH,kBAAY,MAAM,oBAAI,IAAY;AAClC;AAAA,EACJ;AAEA,QAAM,mBAAmB,UAAU;AAAA,IACjC,GAAG;AAAA,IACH,GAAG,aAAa,MAAM,IAAI,EAAE,IAAI,CAAC,SAAS,KAAK,KAAK,CAAC;AAAA,EACvD,CAAC;AAED,SAAO,CAAC,iBAA0B;AAChC,QAAI,iBAAiB,QAAW;AAC9B,aAAO,GAAG,YAAY;AAAA;AAAA,IACxB;AAEA,UAAM,eAAe;AAAA,MACnB,aAAa,QAAQ,UAAU,IAAI;AAAA,MACnC;AAAA,IACF;AAEA,QAAI,aAAa,SAAS,YAAY,GAAG;AACvC,aAAO;AAAA,IACT;AAGA,UAAM,eAAe,aAClB,MAAM,IAAI,EACV,OAAO,CAAC,SAAS,CAAC,iBAAiB,IAAI,IAAI,CAAC,EAC5C,KAAK,IAAI,EACT,QAAQ,WAAW,MAAM,EACzB,KAAK;AAER,UAAM,aAAa,CAAC,cAAc,YAAY,EAAE,KAAK,MAAM,EAAE,KAAK;AAElE,WAAO,GAAG,UAAU;AAAA;AAAA,EACtB;AACF;",
|
|
6
6
|
"names": []
|
|
7
7
|
}
|
package/lib/cli/init/types.d.ts
CHANGED
|
@@ -54,7 +54,7 @@ export declare const initConfigInputSchema: z.ZodObject<{
|
|
|
54
54
|
};
|
|
55
55
|
}>;
|
|
56
56
|
export type InitConfig = z.infer<typeof initConfigSchema>;
|
|
57
|
-
declare const initConfigSchema: z.ZodObject<
|
|
57
|
+
declare const initConfigSchema: z.ZodObject<Omit<{
|
|
58
58
|
destinationDir: z.ZodString;
|
|
59
59
|
templateComplete: z.ZodBoolean;
|
|
60
60
|
templateData: z.ZodObject<{
|
|
@@ -74,7 +74,7 @@ declare const initConfigSchema: z.ZodObject<z.objectUtil.extendShape<Omit<{
|
|
|
74
74
|
defaultBranch: z.ZodString;
|
|
75
75
|
}, z.ZodString, "strip">>;
|
|
76
76
|
templateName: z.ZodString;
|
|
77
|
-
}, "templateData"
|
|
77
|
+
}, "templateData"> & {
|
|
78
78
|
templateData: z.ZodObject<{
|
|
79
79
|
ownerName: z.ZodString;
|
|
80
80
|
repoName: z.ZodString;
|
|
@@ -100,7 +100,7 @@ declare const initConfigSchema: z.ZodObject<z.objectUtil.extendShape<Omit<{
|
|
|
100
100
|
entryPoint: z.ZodOptional<z.ZodString>;
|
|
101
101
|
packageManager: z.ZodDefault<z.ZodEnum<["pnpm", "yarn"]>>;
|
|
102
102
|
type: z.ZodOptional<z.ZodUnion<[z.ZodLiteral<"application">, z.ZodLiteral<"package">]>>;
|
|
103
|
-
}
|
|
103
|
+
}, "strip", z.ZodTypeAny, {
|
|
104
104
|
packageManager: "yarn" | "pnpm";
|
|
105
105
|
templateName: string;
|
|
106
106
|
destinationDir: string;
|
|
@@ -1,11 +1,15 @@
|
|
|
1
1
|
import type { Logger } from '../../../utils/logging';
|
|
2
2
|
import { type PackageManagerConfig } from '../../../utils/packageManager';
|
|
3
3
|
import type { InternalLintResult } from '../internal';
|
|
4
|
+
type ConditionOptions = {
|
|
5
|
+
packageManager: PackageManagerConfig;
|
|
6
|
+
isInWorkspaceRoot: boolean;
|
|
7
|
+
};
|
|
4
8
|
type RefreshableConfigFile = {
|
|
5
9
|
name: string;
|
|
6
|
-
type: 'ignore' | '
|
|
10
|
+
type: 'ignore' | 'pnpm-workspace';
|
|
7
11
|
additionalMapping?: (s: string, packageManager: PackageManagerConfig) => string;
|
|
8
|
-
if?: (
|
|
12
|
+
if?: (options: ConditionOptions) => boolean;
|
|
9
13
|
};
|
|
10
14
|
export declare const REFRESHABLE_CONFIG_FILES: RefreshableConfigFile[];
|
|
11
15
|
export declare const refreshConfigFiles: (mode: "format" | "lint", logger: Logger) => Promise<{
|
|
@@ -37,43 +37,44 @@ var import_path = __toESM(require("path"));
|
|
|
37
37
|
var import_util = require("util");
|
|
38
38
|
var import_fs_extra = require("fs-extra");
|
|
39
39
|
var import__ = require("../../..");
|
|
40
|
-
var
|
|
40
|
+
var import_dir = require("../../../utils/dir");
|
|
41
41
|
var import_packageManager = require("../../../utils/packageManager");
|
|
42
42
|
var import_template = require("../../../utils/template");
|
|
43
43
|
var import_package = require("../../configure/analysis/package");
|
|
44
44
|
var import_project = require("../../configure/analysis/project");
|
|
45
45
|
var import_configFile = require("../../configure/processing/configFile");
|
|
46
|
-
const
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
46
|
+
const OLD_IGNORE_WARNING = `# Ignore .npmrc. This is no longer managed by skuba as pnpm projects use a managed .npmrc.
|
|
47
|
+
# IMPORTANT: if migrating to pnpm, remove this line and add an .npmrc IN THE SAME COMMIT.
|
|
48
|
+
# You can use \`skuba format\` to generate the file or otherwise commit an empty file.
|
|
49
|
+
# Doing so will conflict with a local .npmrc and make it more difficult to unintentionally commit auth secrets.
|
|
50
|
+
.npmrc
|
|
51
|
+
`;
|
|
52
|
+
const removeOldWarning = (contents) => contents.includes(OLD_IGNORE_WARNING) ? `${contents.replace(OLD_IGNORE_WARNING, "").trim()}
|
|
53
|
+
` : contents;
|
|
54
54
|
const REFRESHABLE_CONFIG_FILES = [
|
|
55
55
|
{
|
|
56
56
|
name: ".gitignore",
|
|
57
57
|
type: "ignore",
|
|
58
|
-
additionalMapping:
|
|
58
|
+
additionalMapping: removeOldWarning
|
|
59
59
|
},
|
|
60
60
|
{ name: ".prettierignore", type: "ignore" },
|
|
61
61
|
{
|
|
62
|
-
name: ".
|
|
63
|
-
type: "
|
|
64
|
-
|
|
65
|
-
if: (packageManager) => packageManager.command === "pnpm"
|
|
62
|
+
name: "pnpm-workspace.yaml",
|
|
63
|
+
type: "pnpm-workspace",
|
|
64
|
+
if: ({ packageManager, isInWorkspaceRoot }) => isInWorkspaceRoot && packageManager.command === "pnpm"
|
|
66
65
|
},
|
|
67
66
|
{
|
|
68
67
|
name: ".dockerignore",
|
|
69
68
|
type: "ignore",
|
|
70
|
-
additionalMapping:
|
|
69
|
+
additionalMapping: removeOldWarning
|
|
71
70
|
}
|
|
72
71
|
];
|
|
73
72
|
const refreshConfigFiles = async (mode, logger) => {
|
|
74
|
-
const [manifest, gitRoot] = await Promise.all([
|
|
73
|
+
const [manifest, gitRoot, workspaceRoot, currentWorkspaceProjectRoot] = await Promise.all([
|
|
75
74
|
(0, import_package.getDestinationManifest)(),
|
|
76
|
-
import__.Git.findRoot({ dir: process.cwd() })
|
|
75
|
+
import__.Git.findRoot({ dir: process.cwd() }),
|
|
76
|
+
(0, import_dir.findWorkspaceRoot)(),
|
|
77
|
+
(0, import_dir.findCurrentWorkspaceProjectRoot)()
|
|
77
78
|
]);
|
|
78
79
|
const destinationRoot = import_path.default.dirname(manifest.path);
|
|
79
80
|
const readDestinationFile = (0, import_project.createDestinationFileReader)(destinationRoot);
|
|
@@ -82,8 +83,8 @@ const refreshConfigFiles = async (mode, logger) => {
|
|
|
82
83
|
type: fileType,
|
|
83
84
|
additionalMapping = (s) => s,
|
|
84
85
|
if: condition = () => true
|
|
85
|
-
},
|
|
86
|
-
if (!condition(
|
|
86
|
+
}, conditionOptions) => {
|
|
87
|
+
if (!condition(conditionOptions)) {
|
|
87
88
|
return { needsChange: false };
|
|
88
89
|
}
|
|
89
90
|
const [inputFile, templateFile, isGitIgnored] = await Promise.all([
|
|
@@ -99,7 +100,7 @@ const refreshConfigFiles = async (mode, logger) => {
|
|
|
99
100
|
}
|
|
100
101
|
const data = additionalMapping(
|
|
101
102
|
inputFile ? (0, import_configFile.mergeWithConfigFile)(templateFile, fileType)(inputFile) : templateFile,
|
|
102
|
-
|
|
103
|
+
packageManager
|
|
103
104
|
);
|
|
104
105
|
const filepath = import_path.default.join(destinationRoot, filename);
|
|
105
106
|
if (mode === "format") {
|
|
@@ -119,7 +120,7 @@ const refreshConfigFiles = async (mode, logger) => {
|
|
|
119
120
|
msg: `The ${logger.bold(
|
|
120
121
|
filename
|
|
121
122
|
)} file is out of date. Run \`${logger.bold(
|
|
122
|
-
|
|
123
|
+
packageManager.print.exec,
|
|
123
124
|
"skuba",
|
|
124
125
|
"format"
|
|
125
126
|
)}\` to update it.`,
|
|
@@ -131,7 +132,10 @@ const refreshConfigFiles = async (mode, logger) => {
|
|
|
131
132
|
const packageManager = await (0, import_packageManager.detectPackageManager)(destinationRoot);
|
|
132
133
|
const results = await Promise.all(
|
|
133
134
|
REFRESHABLE_CONFIG_FILES.map(
|
|
134
|
-
(conf) => refreshConfigFile(conf,
|
|
135
|
+
(conf) => refreshConfigFile(conf, {
|
|
136
|
+
packageManager,
|
|
137
|
+
isInWorkspaceRoot: workspaceRoot === currentWorkspaceProjectRoot
|
|
138
|
+
})
|
|
135
139
|
)
|
|
136
140
|
);
|
|
137
141
|
results.forEach((result) => {
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../../../../src/cli/lint/internalLints/refreshConfigFiles.ts"],
|
|
4
|
-
"sourcesContent": ["import path from 'path';\nimport { inspect, stripVTControlCharacters as stripAnsi } from 'util';\n\nimport { writeFile } from 'fs-extra';\n\nimport { Git } from '../../..';\nimport
|
|
5
|
-
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAiB;AACjB,kBAA+D;AAE/D,sBAA0B;AAE1B,eAAoB;
|
|
6
|
-
"names": ["path", "
|
|
4
|
+
"sourcesContent": ["import path from 'path';\nimport { inspect, stripVTControlCharacters as stripAnsi } from 'util';\n\nimport { writeFile } from 'fs-extra';\n\nimport { Git } from '../../..';\nimport {\n findCurrentWorkspaceProjectRoot,\n findWorkspaceRoot,\n} from '../../../utils/dir';\nimport type { Logger } from '../../../utils/logging';\nimport {\n type PackageManagerConfig,\n detectPackageManager,\n} from '../../../utils/packageManager';\nimport { readBaseTemplateFile } from '../../../utils/template';\nimport { getDestinationManifest } from '../../configure/analysis/package';\nimport { createDestinationFileReader } from '../../configure/analysis/project';\nimport { mergeWithConfigFile } from '../../configure/processing/configFile';\nimport type { InternalLintResult } from '../internal';\n\ntype ConditionOptions = {\n packageManager: PackageManagerConfig;\n isInWorkspaceRoot: boolean;\n};\n\ntype RefreshableConfigFile = {\n name: string;\n type: 'ignore' | 'pnpm-workspace';\n additionalMapping?: (\n s: string,\n packageManager: PackageManagerConfig,\n ) => string;\n if?: (options: ConditionOptions) => boolean;\n};\n\nconst OLD_IGNORE_WARNING = `# Ignore .npmrc. This is no longer managed by skuba as pnpm projects use a managed .npmrc.\n# IMPORTANT: if migrating to pnpm, remove this line and add an .npmrc IN THE SAME COMMIT.\n# You can use \\`skuba format\\` to generate the file or otherwise commit an empty file.\n# Doing so will conflict with a local .npmrc and make it more difficult to unintentionally commit auth secrets.\n.npmrc\n`;\n\nconst removeOldWarning = (contents: string) =>\n contents.includes(OLD_IGNORE_WARNING)\n ? `${contents.replace(OLD_IGNORE_WARNING, '').trim()}\\n`\n : contents;\n\nexport const REFRESHABLE_CONFIG_FILES: RefreshableConfigFile[] = [\n {\n name: '.gitignore',\n type: 'ignore',\n additionalMapping: removeOldWarning,\n },\n { name: '.prettierignore', type: 'ignore' },\n {\n name: 'pnpm-workspace.yaml',\n type: 'pnpm-workspace',\n if: ({ packageManager, isInWorkspaceRoot }) =>\n isInWorkspaceRoot && packageManager.command === 'pnpm',\n },\n {\n name: '.dockerignore',\n type: 'ignore',\n additionalMapping: removeOldWarning,\n },\n];\n\nexport const refreshConfigFiles = async (\n mode: 'format' | 'lint',\n logger: Logger,\n) => {\n const [manifest, gitRoot, workspaceRoot, currentWorkspaceProjectRoot] =\n await Promise.all([\n getDestinationManifest(),\n Git.findRoot({ dir: process.cwd() }),\n findWorkspaceRoot(),\n findCurrentWorkspaceProjectRoot(),\n ]);\n\n const destinationRoot = path.dirname(manifest.path);\n\n const readDestinationFile = createDestinationFileReader(destinationRoot);\n\n const refreshConfigFile = async (\n {\n name: filename,\n type: fileType,\n additionalMapping = (s) => s,\n if: condition = () => true,\n }: RefreshableConfigFile,\n conditionOptions: ConditionOptions,\n ) => {\n if (!condition(conditionOptions)) {\n return { needsChange: false };\n }\n\n const [inputFile, templateFile, isGitIgnored] = await Promise.all([\n readDestinationFile(filename),\n readBaseTemplateFile(`_${filename}`),\n gitRoot\n ? Git.isFileGitIgnored({\n gitRoot,\n absolutePath: path.join(destinationRoot, filename),\n })\n : false,\n ]);\n\n // If the file is gitignored and doesn't exist, don't make it\n if (inputFile === undefined && isGitIgnored) {\n return { needsChange: false };\n }\n\n const data = additionalMapping(\n inputFile\n ? mergeWithConfigFile(templateFile, fileType)(inputFile)\n : templateFile,\n packageManager,\n );\n\n const filepath = path.join(destinationRoot, filename);\n\n if (mode === 'format') {\n if (data === inputFile) {\n return { needsChange: false };\n }\n\n await writeFile(filepath, data);\n return {\n needsChange: false,\n msg: `Refreshed ${logger.bold(filename)}.`,\n filename,\n };\n }\n\n if (data !== inputFile) {\n return {\n needsChange: true,\n msg: `The ${logger.bold(\n filename,\n )} file is out of date. Run \\`${logger.bold(\n packageManager.print.exec,\n 'skuba',\n 'format',\n )}\\` to update it.`,\n filename,\n };\n }\n\n return { needsChange: false };\n };\n\n const packageManager = await detectPackageManager(destinationRoot);\n\n const results = await Promise.all(\n REFRESHABLE_CONFIG_FILES.map((conf) =>\n refreshConfigFile(conf, {\n packageManager,\n isInWorkspaceRoot: workspaceRoot === currentWorkspaceProjectRoot,\n }),\n ),\n );\n\n // Log after for reproducible test output ordering\n results.forEach((result) => {\n if (result.msg) {\n logger.warn(result.msg);\n }\n });\n\n const anyNeedChanging = results.some(({ needsChange }) => needsChange);\n\n return {\n ok: !anyNeedChanging,\n fixable: anyNeedChanging,\n annotations: results.flatMap(({ needsChange, filename, msg }) =>\n needsChange && msg\n ? [\n {\n path: filename,\n message: stripAnsi(msg),\n },\n ]\n : [],\n ),\n };\n};\n\nexport const tryRefreshConfigFiles = async (\n mode: 'format' | 'lint',\n logger: Logger,\n): Promise<InternalLintResult> => {\n try {\n return await refreshConfigFiles(mode, logger);\n } catch (err) {\n logger.warn('Failed to refresh config files.');\n logger.subtle(inspect(err));\n\n return {\n ok: false,\n fixable: false,\n annotations: [],\n };\n }\n};\n"],
|
|
5
|
+
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAiB;AACjB,kBAA+D;AAE/D,sBAA0B;AAE1B,eAAoB;AACpB,iBAGO;AAEP,4BAGO;AACP,sBAAqC;AACrC,qBAAuC;AACvC,qBAA4C;AAC5C,wBAAoC;AAkBpC,MAAM,qBAAqB;AAAA;AAAA;AAAA;AAAA;AAAA;AAO3B,MAAM,mBAAmB,CAAC,aACxB,SAAS,SAAS,kBAAkB,IAChC,GAAG,SAAS,QAAQ,oBAAoB,EAAE,EAAE,KAAK,CAAC;AAAA,IAClD;AAEC,MAAM,2BAAoD;AAAA,EAC/D;AAAA,IACE,MAAM;AAAA,IACN,MAAM;AAAA,IACN,mBAAmB;AAAA,EACrB;AAAA,EACA,EAAE,MAAM,mBAAmB,MAAM,SAAS;AAAA,EAC1C;AAAA,IACE,MAAM;AAAA,IACN,MAAM;AAAA,IACN,IAAI,CAAC,EAAE,gBAAgB,kBAAkB,MACvC,qBAAqB,eAAe,YAAY;AAAA,EACpD;AAAA,EACA;AAAA,IACE,MAAM;AAAA,IACN,MAAM;AAAA,IACN,mBAAmB;AAAA,EACrB;AACF;AAEO,MAAM,qBAAqB,OAChC,MACA,WACG;AACH,QAAM,CAAC,UAAU,SAAS,eAAe,2BAA2B,IAClE,MAAM,QAAQ,IAAI;AAAA,QAChB,uCAAuB;AAAA,IACvB,aAAI,SAAS,EAAE,KAAK,QAAQ,IAAI,EAAE,CAAC;AAAA,QACnC,8BAAkB;AAAA,QAClB,4CAAgC;AAAA,EAClC,CAAC;AAEH,QAAM,kBAAkB,YAAAA,QAAK,QAAQ,SAAS,IAAI;AAElD,QAAM,0BAAsB,4CAA4B,eAAe;AAEvE,QAAM,oBAAoB,OACxB;AAAA,IACE,MAAM;AAAA,IACN,MAAM;AAAA,IACN,oBAAoB,CAAC,MAAM;AAAA,IAC3B,IAAI,YAAY,MAAM;AAAA,EACxB,GACA,qBACG;AACH,QAAI,CAAC,UAAU,gBAAgB,GAAG;AAChC,aAAO,EAAE,aAAa,MAAM;AAAA,IAC9B;AAEA,UAAM,CAAC,WAAW,cAAc,YAAY,IAAI,MAAM,QAAQ,IAAI;AAAA,MAChE,oBAAoB,QAAQ;AAAA,UAC5B,sCAAqB,IAAI,QAAQ,EAAE;AAAA,MACnC,UACI,aAAI,iBAAiB;AAAA,QACnB;AAAA,QACA,cAAc,YAAAA,QAAK,KAAK,iBAAiB,QAAQ;AAAA,MACnD,CAAC,IACD;AAAA,IACN,CAAC;AAGD,QAAI,cAAc,UAAa,cAAc;AAC3C,aAAO,EAAE,aAAa,MAAM;AAAA,IAC9B;AAEA,UAAM,OAAO;AAAA,MACX,gBACI,uCAAoB,cAAc,QAAQ,EAAE,SAAS,IACrD;AAAA,MACJ;AAAA,IACF;AAEA,UAAM,WAAW,YAAAA,QAAK,KAAK,iBAAiB,QAAQ;AAEpD,QAAI,SAAS,UAAU;AACrB,UAAI,SAAS,WAAW;AACtB,eAAO,EAAE,aAAa,MAAM;AAAA,MAC9B;AAEA,gBAAM,2BAAU,UAAU,IAAI;AAC9B,aAAO;AAAA,QACL,aAAa;AAAA,QACb,KAAK,aAAa,OAAO,KAAK,QAAQ,CAAC;AAAA,QACvC;AAAA,MACF;AAAA,IACF;AAEA,QAAI,SAAS,WAAW;AACtB,aAAO;AAAA,QACL,aAAa;AAAA,QACb,KAAK,OAAO,OAAO;AAAA,UACjB;AAAA,QACF,CAAC,+BAA+B,OAAO;AAAA,UACrC,eAAe,MAAM;AAAA,UACrB;AAAA,UACA;AAAA,QACF,CAAC;AAAA,QACD;AAAA,MACF;AAAA,IACF;AAEA,WAAO,EAAE,aAAa,MAAM;AAAA,EAC9B;AAEA,QAAM,iBAAiB,UAAM,4CAAqB,eAAe;AAEjE,QAAM,UAAU,MAAM,QAAQ;AAAA,IAC5B,yBAAyB;AAAA,MAAI,CAAC,SAC5B,kBAAkB,MAAM;AAAA,QACtB;AAAA,QACA,mBAAmB,kBAAkB;AAAA,MACvC,CAAC;AAAA,IACH;AAAA,EACF;AAGA,UAAQ,QAAQ,CAAC,WAAW;AAC1B,QAAI,OAAO,KAAK;AACd,aAAO,KAAK,OAAO,GAAG;AAAA,IACxB;AAAA,EACF,CAAC;AAED,QAAM,kBAAkB,QAAQ,KAAK,CAAC,EAAE,YAAY,MAAM,WAAW;AAErE,SAAO;AAAA,IACL,IAAI,CAAC;AAAA,IACL,SAAS;AAAA,IACT,aAAa,QAAQ;AAAA,MAAQ,CAAC,EAAE,aAAa,UAAU,IAAI,MACzD,eAAe,MACX;AAAA,QACE;AAAA,UACE,MAAM;AAAA,UACN,aAAS,YAAAC,0BAAU,GAAG;AAAA,QACxB;AAAA,MACF,IACA,CAAC;AAAA,IACP;AAAA,EACF;AACF;AAEO,MAAM,wBAAwB,OACnC,MACA,WACgC;AAChC,MAAI;AACF,WAAO,MAAM,mBAAmB,MAAM,MAAM;AAAA,EAC9C,SAAS,KAAK;AACZ,WAAO,KAAK,iCAAiC;AAC7C,WAAO,WAAO,qBAAQ,GAAG,CAAC;AAE1B,WAAO;AAAA,MACL,IAAI;AAAA,MACJ,SAAS;AAAA,MACT,aAAa,CAAC;AAAA,IAChB;AAAA,EACF;AACF;",
|
|
6
|
+
"names": ["path", "stripAnsi"]
|
|
7
7
|
}
|
|
@@ -21,11 +21,16 @@ __export(__exports, {
|
|
|
21
21
|
patches: () => patches
|
|
22
22
|
});
|
|
23
23
|
module.exports = __toCommonJS(__exports);
|
|
24
|
+
var import_migrateNpmrcToPnpmWorkspace = require("./migrateNpmrcToPnpmWorkspace");
|
|
24
25
|
var import_stopBundlingInCDKTests = require("./stopBundlingInCDKTests");
|
|
25
26
|
const patches = [
|
|
26
27
|
{
|
|
27
28
|
apply: import_stopBundlingInCDKTests.tryStopBundlingInCDKTests,
|
|
28
29
|
description: "Stop bundling inside CDK unit tests"
|
|
30
|
+
},
|
|
31
|
+
{
|
|
32
|
+
apply: import_migrateNpmrcToPnpmWorkspace.tryMigrateNpmrcToPnpmWorkspace,
|
|
33
|
+
description: "Move .npmrc config to pnpm-workspace.yaml"
|
|
29
34
|
}
|
|
30
35
|
];
|
|
31
36
|
// Annotate the CommonJS export names for ESM import in node:
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../../../../../../../src/cli/lint/internalLints/upgrade/patches/10.1.0/index.ts"],
|
|
4
|
-
"sourcesContent": ["import type { Patches } from '../..';\n\nimport { tryStopBundlingInCDKTests } from './stopBundlingInCDKTests';\n\nexport const patches: Patches = [\n {\n apply: tryStopBundlingInCDKTests,\n description: 'Stop bundling inside CDK unit tests',\n },\n];\n"],
|
|
5
|
-
"mappings": ";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAEA,oCAA0C;AAEnC,MAAM,UAAmB;AAAA,EAC9B;AAAA,IACE,OAAO;AAAA,IACP,aAAa;AAAA,EACf;AACF;",
|
|
4
|
+
"sourcesContent": ["import type { Patches } from '../..';\n\nimport { tryMigrateNpmrcToPnpmWorkspace } from './migrateNpmrcToPnpmWorkspace';\nimport { tryStopBundlingInCDKTests } from './stopBundlingInCDKTests';\n\nexport const patches: Patches = [\n {\n apply: tryStopBundlingInCDKTests,\n description: 'Stop bundling inside CDK unit tests',\n },\n {\n apply: tryMigrateNpmrcToPnpmWorkspace,\n description: 'Move .npmrc config to pnpm-workspace.yaml',\n },\n];\n"],
|
|
5
|
+
"mappings": ";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAEA,yCAA+C;AAC/C,oCAA0C;AAEnC,MAAM,UAAmB;AAAA,EAC9B;AAAA,IACE,OAAO;AAAA,IACP,aAAa;AAAA,EACf;AAAA,EACA;AAAA,IACE,OAAO;AAAA,IACP,aAAa;AAAA,EACf;AACF;",
|
|
6
6
|
"names": []
|
|
7
7
|
}
|
|
@@ -0,0 +1,167 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __defProp = Object.defineProperty;
|
|
3
|
+
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
4
|
+
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
5
|
+
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
|
6
|
+
var __export = (target, all) => {
|
|
7
|
+
for (var name in all)
|
|
8
|
+
__defProp(target, name, { get: all[name], enumerable: true });
|
|
9
|
+
};
|
|
10
|
+
var __copyProps = (to, from, except, desc) => {
|
|
11
|
+
if (from && typeof from === "object" || typeof from === "function") {
|
|
12
|
+
for (let key of __getOwnPropNames(from))
|
|
13
|
+
if (!__hasOwnProp.call(to, key) && key !== except)
|
|
14
|
+
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
|
15
|
+
}
|
|
16
|
+
return to;
|
|
17
|
+
};
|
|
18
|
+
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
|
19
|
+
var migrateNpmrcToPnpmWorkspace_exports = {};
|
|
20
|
+
__export(migrateNpmrcToPnpmWorkspace_exports, {
|
|
21
|
+
tryMigrateNpmrcToPnpmWorkspace: () => tryMigrateNpmrcToPnpmWorkspace
|
|
22
|
+
});
|
|
23
|
+
module.exports = __toCommonJS(migrateNpmrcToPnpmWorkspace_exports);
|
|
24
|
+
var import_util = require("util");
|
|
25
|
+
var import_fast_glob = require("fast-glob");
|
|
26
|
+
var import_fs_extra = require("fs-extra");
|
|
27
|
+
var import_dir = require("../../../../../../utils/dir");
|
|
28
|
+
var import_logging = require("../../../../../../utils/logging");
|
|
29
|
+
var import_npmrc = require("../../../../../../utils/npmrc");
|
|
30
|
+
var import_configFile = require("../../../../../configure/processing/configFile");
|
|
31
|
+
const NPMRC = ".npmrc";
|
|
32
|
+
const checkFileExists = async (filePath) => {
|
|
33
|
+
try {
|
|
34
|
+
await import_fs_extra.promises.access(filePath);
|
|
35
|
+
return true;
|
|
36
|
+
} catch {
|
|
37
|
+
return false;
|
|
38
|
+
}
|
|
39
|
+
};
|
|
40
|
+
const migrateCustomNpmrcSettings = async () => {
|
|
41
|
+
const contents = await import_fs_extra.promises.readFile(NPMRC, "utf-8");
|
|
42
|
+
const remainderLines = (0, import_configFile.replaceManagedSection)(contents, "").split("\n").map((line) => line.trim()).filter((line) => line.length > 0).filter((line) => !line.startsWith("#")).filter((line) => !(0, import_npmrc.hasNpmrcSecret)(line));
|
|
43
|
+
if (remainderLines.length === 0) {
|
|
44
|
+
return;
|
|
45
|
+
}
|
|
46
|
+
const pnpmWorkspaceFile = "pnpm-workspace.yaml";
|
|
47
|
+
const pnpmWorkspaceExists = await checkFileExists(pnpmWorkspaceFile);
|
|
48
|
+
if (!pnpmWorkspaceExists) {
|
|
49
|
+
await import_fs_extra.promises.writeFile(pnpmWorkspaceFile, "");
|
|
50
|
+
}
|
|
51
|
+
const pnpmWorkspaceContents = await import_fs_extra.promises.readFile(pnpmWorkspaceFile, "utf-8");
|
|
52
|
+
const commentedLines = remainderLines.map((line) => `# ${line}`).join("\n");
|
|
53
|
+
const newContents = `# TODO: Translate these settings to the required format for pnpm-workspace.yaml.
|
|
54
|
+
# skuba moved these from .npmrc, but doesn't know what they mean.
|
|
55
|
+
# See: https://pnpm.io/settings
|
|
56
|
+
#
|
|
57
|
+
${commentedLines}
|
|
58
|
+
|
|
59
|
+
${pnpmWorkspaceContents}`;
|
|
60
|
+
await import_fs_extra.promises.writeFile(pnpmWorkspaceFile, newContents);
|
|
61
|
+
};
|
|
62
|
+
const fixDockerfiles = async () => {
|
|
63
|
+
const fileNames = await (0, import_fast_glob.glob)(["**/Dockerfile*"]);
|
|
64
|
+
await Promise.all(
|
|
65
|
+
fileNames.map(async (fileName) => {
|
|
66
|
+
const contents = await import_fs_extra.promises.readFile(fileName, "utf8");
|
|
67
|
+
const patched = contents.replaceAll(
|
|
68
|
+
"--mount=type=bind,source=.npmrc,target=.npmrc",
|
|
69
|
+
"--mount=type=bind,source=pnpm-workspace.yaml,target=pnpm-workspace.yaml"
|
|
70
|
+
);
|
|
71
|
+
if (patched !== contents) {
|
|
72
|
+
await import_fs_extra.promises.writeFile(fileName, patched);
|
|
73
|
+
}
|
|
74
|
+
})
|
|
75
|
+
);
|
|
76
|
+
};
|
|
77
|
+
const fixBuildkitePipelines = async () => {
|
|
78
|
+
const fileNames = await (0, import_fast_glob.glob)(["**/.buildkite/**.{yml,yaml}"]);
|
|
79
|
+
await Promise.all(
|
|
80
|
+
fileNames.map(async (fileName) => {
|
|
81
|
+
const contents = await import_fs_extra.promises.readFile(fileName, "utf8");
|
|
82
|
+
const patched = contents.replace(
|
|
83
|
+
/(cache-on:[\s\S]*?)([ \t]+-[ \t]+\.npmrc)([\s\S]*?)(?=\n[ \t]*\S|$)/g,
|
|
84
|
+
(_, before, npmrcLine, after) => before + npmrcLine.replace(".npmrc", "pnpm-workspace.yaml") + after
|
|
85
|
+
);
|
|
86
|
+
if (patched !== contents) {
|
|
87
|
+
await import_fs_extra.promises.writeFile(fileName, patched);
|
|
88
|
+
}
|
|
89
|
+
})
|
|
90
|
+
);
|
|
91
|
+
};
|
|
92
|
+
const forceUpgradeToPnpm10 = async () => {
|
|
93
|
+
const fileNames = await (0, import_fast_glob.glob)(["**/package.json"]);
|
|
94
|
+
await Promise.all(
|
|
95
|
+
fileNames.map(async (fileName) => {
|
|
96
|
+
const contents = await import_fs_extra.promises.readFile(fileName, "utf8");
|
|
97
|
+
const packageManagerMatch = /"packageManager"\s*:\s*"pnpm@([^"]+)"/.exec(
|
|
98
|
+
contents
|
|
99
|
+
);
|
|
100
|
+
if (!packageManagerMatch) return;
|
|
101
|
+
const currentVersion = packageManagerMatch[1] ?? "";
|
|
102
|
+
const majorVersion = parseInt(currentVersion.split(".")?.[0] ?? "0", 10);
|
|
103
|
+
if (!isNaN(majorVersion) && majorVersion < 10) {
|
|
104
|
+
const patched = contents.replace(
|
|
105
|
+
/"packageManager"(\s*):(\s*)"pnpm@[^"]+"/,
|
|
106
|
+
'"packageManager"$1:$2"pnpm@10.8.1"'
|
|
107
|
+
);
|
|
108
|
+
await import_fs_extra.promises.writeFile(fileName, patched);
|
|
109
|
+
}
|
|
110
|
+
})
|
|
111
|
+
);
|
|
112
|
+
};
|
|
113
|
+
const migrateNpmrcToPnpmWorkspace = async ({
|
|
114
|
+
mode,
|
|
115
|
+
packageManager
|
|
116
|
+
}) => {
|
|
117
|
+
if (packageManager.command !== "pnpm") {
|
|
118
|
+
return {
|
|
119
|
+
result: "skip",
|
|
120
|
+
reason: "not using pnpm"
|
|
121
|
+
};
|
|
122
|
+
}
|
|
123
|
+
const [workspaceRoot, currentWorkspaceProjectRoot] = await Promise.all([
|
|
124
|
+
(0, import_dir.findWorkspaceRoot)(),
|
|
125
|
+
(0, import_dir.findCurrentWorkspaceProjectRoot)()
|
|
126
|
+
]);
|
|
127
|
+
if (workspaceRoot !== currentWorkspaceProjectRoot) {
|
|
128
|
+
return {
|
|
129
|
+
result: "skip",
|
|
130
|
+
reason: "not running in the workspace root"
|
|
131
|
+
};
|
|
132
|
+
}
|
|
133
|
+
const npmrcExists = await checkFileExists(NPMRC);
|
|
134
|
+
if (!npmrcExists) {
|
|
135
|
+
return {
|
|
136
|
+
result: "skip",
|
|
137
|
+
reason: "no .npmrc found"
|
|
138
|
+
};
|
|
139
|
+
}
|
|
140
|
+
if (mode === "lint") {
|
|
141
|
+
return {
|
|
142
|
+
result: "apply"
|
|
143
|
+
};
|
|
144
|
+
}
|
|
145
|
+
await Promise.all([
|
|
146
|
+
migrateCustomNpmrcSettings(),
|
|
147
|
+
fixDockerfiles(),
|
|
148
|
+
fixBuildkitePipelines(),
|
|
149
|
+
forceUpgradeToPnpm10()
|
|
150
|
+
]);
|
|
151
|
+
await import_fs_extra.promises.rm(NPMRC);
|
|
152
|
+
return { result: "apply" };
|
|
153
|
+
};
|
|
154
|
+
const tryMigrateNpmrcToPnpmWorkspace = async (config) => {
|
|
155
|
+
try {
|
|
156
|
+
return await migrateNpmrcToPnpmWorkspace(config);
|
|
157
|
+
} catch (err) {
|
|
158
|
+
import_logging.log.warn("Failed to migrate .npmrc to pnpm-workspace.yaml");
|
|
159
|
+
import_logging.log.subtle((0, import_util.inspect)(err));
|
|
160
|
+
return { result: "skip", reason: "due to an error" };
|
|
161
|
+
}
|
|
162
|
+
};
|
|
163
|
+
// Annotate the CommonJS export names for ESM import in node:
|
|
164
|
+
0 && (module.exports = {
|
|
165
|
+
tryMigrateNpmrcToPnpmWorkspace
|
|
166
|
+
});
|
|
167
|
+
//# sourceMappingURL=migrateNpmrcToPnpmWorkspace.js.map
|