skuba 0.0.0-master-20230907050919 โ 0.0.0-master-20231002013336
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/config/prettier.d.ts +1 -0
- package/config/prettier.js +1 -0
- package/lib/api/git/commitAllChanges.js +16 -2
- package/lib/api/git/commitAllChanges.js.map +3 -3
- package/lib/api/github/push.d.ts +1 -1
- package/lib/api/github/push.js +13 -4
- package/lib/api/github/push.js.map +3 -3
- package/lib/cli/configure/processing/package.d.ts +1 -1
- package/lib/cli/configure/processing/package.js +8 -10
- package/lib/cli/configure/processing/package.js.map +2 -2
- package/lib/cli/lint/autofix.js +1 -1
- package/lib/cli/lint/autofix.js.map +2 -2
- package/package.json +13 -13
- package/template/express-rest-api/.buildkite/pipeline.yml +4 -2
- package/template/greeter/.buildkite/pipeline.yml +1 -1
- package/template/koa-rest-api/.buildkite/pipeline.yml +4 -2
- package/template/koa-rest-api/package.json +3 -3
- package/template/lambda-sqs-worker/.buildkite/pipeline.yml +5 -3
- package/template/lambda-sqs-worker-cdk/.buildkite/pipeline.yml +5 -3
package/config/prettier.d.ts
CHANGED
package/config/prettier.js
CHANGED
|
@@ -31,9 +31,11 @@ __export(commitAllChanges_exports, {
|
|
|
31
31
|
commitAllChanges: () => commitAllChanges
|
|
32
32
|
});
|
|
33
33
|
module.exports = __toCommonJS(commitAllChanges_exports);
|
|
34
|
+
var import_path = __toESM(require("path"));
|
|
34
35
|
var import_fs_extra = __toESM(require("fs-extra"));
|
|
35
36
|
var import_isomorphic_git = __toESM(require("isomorphic-git"));
|
|
36
37
|
var import_commit = require("./commit");
|
|
38
|
+
var import_findRoot = require("./findRoot");
|
|
37
39
|
var import_getChangedFiles = require("./getChangedFiles");
|
|
38
40
|
const commitAllChanges = async ({
|
|
39
41
|
dir,
|
|
@@ -46,13 +48,25 @@ const commitAllChanges = async ({
|
|
|
46
48
|
if (!changedFiles.length) {
|
|
47
49
|
return;
|
|
48
50
|
}
|
|
51
|
+
const gitRoot = await (0, import_findRoot.findRoot)({ dir });
|
|
52
|
+
if (!gitRoot) {
|
|
53
|
+
throw new Error(`Could not find Git root from directory: ${dir}`);
|
|
54
|
+
}
|
|
49
55
|
await Promise.all(
|
|
50
56
|
changedFiles.map(
|
|
51
|
-
(file) => file.state === "deleted" ? import_isomorphic_git.default.remove({
|
|
57
|
+
(file) => file.state === "deleted" ? import_isomorphic_git.default.remove({
|
|
58
|
+
fs: import_fs_extra.default,
|
|
59
|
+
dir: gitRoot,
|
|
60
|
+
filepath: import_path.default.relative(gitRoot, import_path.default.join(dir, file.path))
|
|
61
|
+
}) : import_isomorphic_git.default.add({
|
|
62
|
+
fs: import_fs_extra.default,
|
|
63
|
+
dir: gitRoot,
|
|
64
|
+
filepath: import_path.default.relative(gitRoot, import_path.default.join(dir, file.path))
|
|
65
|
+
})
|
|
52
66
|
)
|
|
53
67
|
);
|
|
54
68
|
return (0, import_commit.commit)({
|
|
55
|
-
dir,
|
|
69
|
+
dir: gitRoot,
|
|
56
70
|
message,
|
|
57
71
|
author,
|
|
58
72
|
committer
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../../../src/api/git/commitAllChanges.ts"],
|
|
4
|
-
"sourcesContent": ["import fs from 'fs-extra';\nimport git from 'isomorphic-git';\n\nimport { type Identity, commit } from './commit';\nimport { type ChangedFile, getChangedFiles } from './getChangedFiles';\n\ninterface CommitAllParameters {\n dir: string;\n message: string;\n author?: Identity;\n committer?: Identity;\n\n /**\n * File changes to exclude from the commit.\n *\n * Defaults to `[]` (no exclusions).\n */\n ignore?: ChangedFile[];\n}\n\n/**\n * Stages all changes and writes a commit to the local Git repository.\n */\nexport const commitAllChanges = async ({\n dir,\n message,\n\n author,\n committer,\n ignore,\n}: CommitAllParameters): Promise<string | undefined> => {\n const changedFiles = await getChangedFiles({ dir, ignore });\n\n if (!changedFiles.length) {\n return;\n }\n\n await Promise.all(\n changedFiles.map((file) =>\n file.state === 'deleted'\n ? git.remove({
|
|
5
|
-
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,sBAAe;AACf,4BAAgB;AAEhB,oBAAsC;AACtC,6BAAkD;AAmB3C,MAAM,mBAAmB,OAAO;AAAA,EACrC;AAAA,EACA;AAAA,EAEA;AAAA,EACA;AAAA,EACA;AACF,MAAwD;AACtD,QAAM,eAAe,UAAM,wCAAgB,EAAE,KAAK,OAAO,CAAC;AAE1D,MAAI,CAAC,aAAa,QAAQ;AACxB;AAAA,EACF;AAEA,QAAM,QAAQ;AAAA,IACZ,aAAa;AAAA,MAAI,CAAC,SAChB,KAAK,UAAU,YACX,sBAAAA,QAAI,OAAO,
|
|
6
|
-
"names": ["git", "fs"]
|
|
4
|
+
"sourcesContent": ["import path from 'path';\n\nimport fs from 'fs-extra';\nimport git from 'isomorphic-git';\n\nimport { type Identity, commit } from './commit';\nimport { findRoot } from './findRoot';\nimport { type ChangedFile, getChangedFiles } from './getChangedFiles';\n\ninterface CommitAllParameters {\n dir: string;\n message: string;\n author?: Identity;\n committer?: Identity;\n\n /**\n * File changes to exclude from the commit.\n *\n * Defaults to `[]` (no exclusions).\n */\n ignore?: ChangedFile[];\n}\n\n/**\n * Stages all changes and writes a commit to the local Git repository.\n */\nexport const commitAllChanges = async ({\n dir,\n message,\n\n author,\n committer,\n ignore,\n}: CommitAllParameters): Promise<string | undefined> => {\n const changedFiles = await getChangedFiles({ dir, ignore });\n\n if (!changedFiles.length) {\n return;\n }\n\n const gitRoot = await findRoot({ dir });\n\n if (!gitRoot) {\n throw new Error(`Could not find Git root from directory: ${dir}`);\n }\n\n await Promise.all(\n changedFiles.map((file) =>\n file.state === 'deleted'\n ? git.remove({\n fs,\n dir: gitRoot,\n filepath: path.relative(gitRoot, path.join(dir, file.path)),\n })\n : git.add({\n fs,\n dir: gitRoot,\n filepath: path.relative(gitRoot, path.join(dir, file.path)),\n }),\n ),\n );\n\n return commit({\n dir: gitRoot,\n message,\n author,\n committer,\n });\n};\n"],
|
|
5
|
+
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAiB;AAEjB,sBAAe;AACf,4BAAgB;AAEhB,oBAAsC;AACtC,sBAAyB;AACzB,6BAAkD;AAmB3C,MAAM,mBAAmB,OAAO;AAAA,EACrC;AAAA,EACA;AAAA,EAEA;AAAA,EACA;AAAA,EACA;AACF,MAAwD;AACtD,QAAM,eAAe,UAAM,wCAAgB,EAAE,KAAK,OAAO,CAAC;AAE1D,MAAI,CAAC,aAAa,QAAQ;AACxB;AAAA,EACF;AAEA,QAAM,UAAU,UAAM,0BAAS,EAAE,IAAI,CAAC;AAEtC,MAAI,CAAC,SAAS;AACZ,UAAM,IAAI,MAAM,2CAA2C,GAAG,EAAE;AAAA,EAClE;AAEA,QAAM,QAAQ;AAAA,IACZ,aAAa;AAAA,MAAI,CAAC,SAChB,KAAK,UAAU,YACX,sBAAAA,QAAI,OAAO;AAAA,QACT,oBAAAC;AAAA,QACA,KAAK;AAAA,QACL,UAAU,YAAAC,QAAK,SAAS,SAAS,YAAAA,QAAK,KAAK,KAAK,KAAK,IAAI,CAAC;AAAA,MAC5D,CAAC,IACD,sBAAAF,QAAI,IAAI;AAAA,QACN,oBAAAC;AAAA,QACA,KAAK;AAAA,QACL,UAAU,YAAAC,QAAK,SAAS,SAAS,YAAAA,QAAK,KAAK,KAAK,KAAK,IAAI,CAAC;AAAA,MAC5D,CAAC;AAAA,IACP;AAAA,EACF;AAEA,aAAO,sBAAO;AAAA,IACZ,KAAK;AAAA,IACL;AAAA,IACA;AAAA,IACA;AAAA,EACF,CAAC;AACH;",
|
|
6
|
+
"names": ["git", "fs", "path"]
|
|
7
7
|
}
|
package/lib/api/github/push.d.ts
CHANGED
|
@@ -48,7 +48,7 @@ export interface FileChanges {
|
|
|
48
48
|
*
|
|
49
49
|
* https://docs.github.com/en/graphql/reference/input-objects#filechanges
|
|
50
50
|
*/
|
|
51
|
-
export declare const readFileChanges: (changedFiles: Git.ChangedFile[]) => Promise<FileChanges>;
|
|
51
|
+
export declare const readFileChanges: (dir: string, changedFiles: Git.ChangedFile[]) => Promise<FileChanges>;
|
|
52
52
|
interface UploadFileChangesParams {
|
|
53
53
|
dir: string;
|
|
54
54
|
/**
|
package/lib/api/github/push.js
CHANGED
|
@@ -33,6 +33,7 @@ __export(push_exports, {
|
|
|
33
33
|
uploadFileChanges: () => uploadFileChanges
|
|
34
34
|
});
|
|
35
35
|
module.exports = __toCommonJS(push_exports);
|
|
36
|
+
var import_path = __toESM(require("path"));
|
|
36
37
|
var import_graphql = require("@octokit/graphql");
|
|
37
38
|
var import_fs_extra = __toESM(require("fs-extra"));
|
|
38
39
|
var Git = __toESM(require("../git"));
|
|
@@ -49,7 +50,7 @@ const uploadAllFileChanges = async ({
|
|
|
49
50
|
if (!changedFiles.length) {
|
|
50
51
|
return;
|
|
51
52
|
}
|
|
52
|
-
const fileChanges = await readFileChanges(changedFiles);
|
|
53
|
+
const fileChanges = await readFileChanges(dir, changedFiles);
|
|
53
54
|
const commitId = await uploadFileChanges({
|
|
54
55
|
dir,
|
|
55
56
|
branch,
|
|
@@ -71,7 +72,7 @@ const uploadAllFileChanges = async ({
|
|
|
71
72
|
}
|
|
72
73
|
return commitId;
|
|
73
74
|
};
|
|
74
|
-
const readFileChanges = async (changedFiles) => {
|
|
75
|
+
const readFileChanges = async (dir, changedFiles) => {
|
|
75
76
|
const { added, deleted } = changedFiles.reduce(
|
|
76
77
|
(files, changedFile) => {
|
|
77
78
|
const filePath = changedFile.path;
|
|
@@ -84,16 +85,24 @@ const readFileChanges = async (changedFiles) => {
|
|
|
84
85
|
},
|
|
85
86
|
{ added: [], deleted: [] }
|
|
86
87
|
);
|
|
88
|
+
const gitRoot = await Git.findRoot({ dir });
|
|
89
|
+
const toGitHubPath = (filePath) => {
|
|
90
|
+
if (!gitRoot) {
|
|
91
|
+
return filePath;
|
|
92
|
+
}
|
|
93
|
+
const pathDir = import_path.default.relative(gitRoot, dir);
|
|
94
|
+
return import_path.default.join(pathDir, filePath);
|
|
95
|
+
};
|
|
87
96
|
const additions = await Promise.all(
|
|
88
97
|
added.map(async (filePath) => ({
|
|
89
|
-
path: filePath,
|
|
98
|
+
path: toGitHubPath(filePath),
|
|
90
99
|
contents: await import_fs_extra.default.promises.readFile(filePath, {
|
|
91
100
|
encoding: "base64"
|
|
92
101
|
})
|
|
93
102
|
}))
|
|
94
103
|
);
|
|
95
104
|
const deletions = deleted.map((filePath) => ({
|
|
96
|
-
path: filePath
|
|
105
|
+
path: toGitHubPath(filePath)
|
|
97
106
|
}));
|
|
98
107
|
return {
|
|
99
108
|
additions,
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../../../src/api/github/push.ts"],
|
|
4
|
-
"sourcesContent": ["import { graphql } from '@octokit/graphql';\nimport type {\n CreateCommitOnBranchInput,\n FileAddition,\n FileDeletion,\n} from '@octokit/graphql-schema';\nimport fs from 'fs-extra';\n\nimport * as Git from '../git';\n\nimport { apiTokenFromEnvironment } from './environment';\n\ninterface CreateCommitResult {\n createCommitOnBranch: {\n commit: {\n oid: string;\n };\n };\n}\n\ninterface UploadAllFileChangesParams {\n dir: string;\n /**\n * The branch name\n */\n branch: string;\n /**\n * The headline of the commit message\n */\n messageHeadline: string;\n\n /**\n * File changes to exclude from the upload.\n *\n * Defaults to `[]` (no exclusions).\n */\n ignore?: Git.ChangedFile[];\n /**\n * The body of the commit message\n */\n messageBody?: string;\n /**\n * Updates the local Git repository to match the new remote branch state\n */\n updateLocal?: boolean;\n}\n\n/**\n * Retrieves all file changes from the local Git repository using\n * `getChangedFiles`, then uploads the changes to a specified GitHub branch\n * using `uploadFileChanges`.\n *\n * Returns the commit ID, or `undefined` if there are no changes to commit.\n *\n * The file changes will appear as verified commits on GitHub.\n *\n * This will not update the local Git repository unless `updateLocal` is\n * specified.\n */\nexport const uploadAllFileChanges = async ({\n branch,\n dir,\n messageHeadline,\n\n ignore,\n messageBody,\n updateLocal = false,\n}: UploadAllFileChangesParams): Promise<string | undefined> => {\n const changedFiles = await Git.getChangedFiles({ dir, ignore });\n\n if (!changedFiles.length) {\n return;\n }\n\n const fileChanges = await readFileChanges(changedFiles);\n\n const commitId = await uploadFileChanges({\n dir,\n branch,\n messageHeadline,\n messageBody,\n fileChanges,\n });\n\n if (updateLocal) {\n await Promise.all(\n [...fileChanges.additions, ...fileChanges.deletions].map((file) =>\n fs.rm(file.path),\n ),\n );\n\n await Git.fastForwardBranch({\n ref: branch,\n auth: { type: 'gitHubApp' },\n dir,\n });\n }\n\n return commitId;\n};\n\nexport interface FileChanges {\n additions: FileAddition[];\n deletions: FileDeletion[];\n}\n\n/**\n * Takes a list of `ChangedFiles`, reads them from the file system, and maps\n * them to GitHub GraphQL `FileChanges`.\n *\n * https://docs.github.com/en/graphql/reference/input-objects#filechanges\n */\nexport const readFileChanges = async (\n changedFiles: Git.ChangedFile[],\n): Promise<FileChanges> => {\n const { added, deleted } = changedFiles.reduce<{\n added: string[];\n deleted: string[];\n }>(\n (files, changedFile) => {\n const filePath = changedFile.path;\n if (changedFile.state === 'deleted') {\n files.deleted.push(filePath);\n } else {\n files.added.push(filePath);\n }\n\n return files;\n },\n { added: [], deleted: [] },\n );\n\n const additions: FileAddition[] = await Promise.all(\n added.map(async (filePath) => ({\n path: filePath,\n contents: await fs.promises.readFile(filePath, {\n encoding: 'base64',\n }),\n })),\n );\n\n const deletions: FileDeletion[] = deleted.map((filePath) => ({\n path: filePath,\n }));\n\n return {\n additions,\n deletions,\n };\n};\n\ninterface UploadFileChangesParams {\n dir: string;\n /**\n * The branch name\n */\n branch: string;\n /**\n * The headline of the commit message\n */\n messageHeadline: string;\n /**\n * The body of the commit message\n */\n messageBody?: string;\n /**\n * File additions and deletions\n */\n fileChanges: FileChanges;\n}\n\n/**\n * Uploads file changes from the local workspace to a specified GitHub branch.\n *\n * The file changes will appear as verified commits on GitHub.\n *\n * This will not update the local Git repository.\n */\nexport const uploadFileChanges = async ({\n dir,\n branch,\n messageHeadline,\n messageBody,\n fileChanges,\n}: UploadFileChangesParams): Promise<string> => {\n const authToken = apiTokenFromEnvironment();\n if (!authToken) {\n throw new Error(\n 'Could not read a GitHub API token from the environment. Please set GITHUB_API_TOKEN or GITHUB_TOKEN.',\n );\n }\n\n const [{ owner, repo }, headCommitId] = await Promise.all([\n Git.getOwnerAndRepo({ dir }),\n Git.getHeadCommitId({ dir }),\n ]);\n\n const input: CreateCommitOnBranchInput = {\n branch: {\n repositoryNameWithOwner: `${owner}/${repo}`,\n branchName: branch,\n },\n message: {\n headline: messageHeadline,\n body: messageBody,\n },\n expectedHeadOid: headCommitId,\n clientMutationId: 'skuba',\n fileChanges,\n };\n\n const result = await graphql<CreateCommitResult>(\n `\n mutation Mutation($input: CreateCommitOnBranchInput!) {\n createCommitOnBranch(input: $input) {\n commit {\n oid\n }\n }\n }\n `,\n {\n input,\n headers: {\n authorization: `Bearer ${authToken}`,\n },\n },\n );\n\n return result.createCommitOnBranch.commit.oid;\n};\n"],
|
|
5
|
-
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,qBAAwB;AAMxB,sBAAe;AAEf,UAAqB;AAErB,yBAAwC;AAiDjC,MAAM,uBAAuB,OAAO;AAAA,EACzC;AAAA,EACA;AAAA,EACA;AAAA,EAEA;AAAA,EACA;AAAA,EACA,cAAc;AAChB,MAA+D;AAC7D,QAAM,eAAe,MAAM,IAAI,gBAAgB,EAAE,KAAK,OAAO,CAAC;AAE9D,MAAI,CAAC,aAAa,QAAQ;AACxB;AAAA,EACF;AAEA,QAAM,cAAc,MAAM,gBAAgB,YAAY;
|
|
6
|
-
"names": ["fs"]
|
|
4
|
+
"sourcesContent": ["import path from 'path';\n\nimport { graphql } from '@octokit/graphql';\nimport type {\n CreateCommitOnBranchInput,\n FileAddition,\n FileDeletion,\n} from '@octokit/graphql-schema';\nimport fs from 'fs-extra';\n\nimport * as Git from '../git';\n\nimport { apiTokenFromEnvironment } from './environment';\n\ninterface CreateCommitResult {\n createCommitOnBranch: {\n commit: {\n oid: string;\n };\n };\n}\n\ninterface UploadAllFileChangesParams {\n dir: string;\n /**\n * The branch name\n */\n branch: string;\n /**\n * The headline of the commit message\n */\n messageHeadline: string;\n\n /**\n * File changes to exclude from the upload.\n *\n * Defaults to `[]` (no exclusions).\n */\n ignore?: Git.ChangedFile[];\n /**\n * The body of the commit message\n */\n messageBody?: string;\n /**\n * Updates the local Git repository to match the new remote branch state\n */\n updateLocal?: boolean;\n}\n\n/**\n * Retrieves all file changes from the local Git repository using\n * `getChangedFiles`, then uploads the changes to a specified GitHub branch\n * using `uploadFileChanges`.\n *\n * Returns the commit ID, or `undefined` if there are no changes to commit.\n *\n * The file changes will appear as verified commits on GitHub.\n *\n * This will not update the local Git repository unless `updateLocal` is\n * specified.\n */\nexport const uploadAllFileChanges = async ({\n branch,\n dir,\n messageHeadline,\n\n ignore,\n messageBody,\n updateLocal = false,\n}: UploadAllFileChangesParams): Promise<string | undefined> => {\n const changedFiles = await Git.getChangedFiles({ dir, ignore });\n\n if (!changedFiles.length) {\n return;\n }\n\n const fileChanges = await readFileChanges(dir, changedFiles);\n\n const commitId = await uploadFileChanges({\n dir,\n branch,\n messageHeadline,\n messageBody,\n fileChanges,\n });\n\n if (updateLocal) {\n await Promise.all(\n [...fileChanges.additions, ...fileChanges.deletions].map((file) =>\n fs.rm(file.path),\n ),\n );\n\n await Git.fastForwardBranch({\n ref: branch,\n auth: { type: 'gitHubApp' },\n dir,\n });\n }\n\n return commitId;\n};\n\nexport interface FileChanges {\n additions: FileAddition[];\n deletions: FileDeletion[];\n}\n\n/**\n * Takes a list of `ChangedFiles`, reads them from the file system, and maps\n * them to GitHub GraphQL `FileChanges`.\n *\n * https://docs.github.com/en/graphql/reference/input-objects#filechanges\n */\nexport const readFileChanges = async (\n dir: string,\n changedFiles: Git.ChangedFile[],\n): Promise<FileChanges> => {\n const { added, deleted } = changedFiles.reduce<{\n added: string[];\n deleted: string[];\n }>(\n (files, changedFile) => {\n const filePath = changedFile.path;\n if (changedFile.state === 'deleted') {\n files.deleted.push(filePath);\n } else {\n files.added.push(filePath);\n }\n\n return files;\n },\n { added: [], deleted: [] },\n );\n\n const gitRoot = await Git.findRoot({ dir });\n\n const toGitHubPath = (filePath: string) => {\n if (!gitRoot) {\n return filePath;\n }\n\n const pathDir = path.relative(gitRoot, dir);\n\n return path.join(pathDir, filePath);\n };\n\n const additions: FileAddition[] = await Promise.all(\n added.map(async (filePath) => ({\n path: toGitHubPath(filePath),\n contents: await fs.promises.readFile(filePath, {\n encoding: 'base64',\n }),\n })),\n );\n\n const deletions: FileDeletion[] = deleted.map((filePath) => ({\n path: toGitHubPath(filePath),\n }));\n\n return {\n additions,\n deletions,\n };\n};\n\ninterface UploadFileChangesParams {\n dir: string;\n /**\n * The branch name\n */\n branch: string;\n /**\n * The headline of the commit message\n */\n messageHeadline: string;\n /**\n * The body of the commit message\n */\n messageBody?: string;\n /**\n * File additions and deletions\n */\n fileChanges: FileChanges;\n}\n\n/**\n * Uploads file changes from the local workspace to a specified GitHub branch.\n *\n * The file changes will appear as verified commits on GitHub.\n *\n * This will not update the local Git repository.\n */\nexport const uploadFileChanges = async ({\n dir,\n branch,\n messageHeadline,\n messageBody,\n fileChanges,\n}: UploadFileChangesParams): Promise<string> => {\n const authToken = apiTokenFromEnvironment();\n if (!authToken) {\n throw new Error(\n 'Could not read a GitHub API token from the environment. Please set GITHUB_API_TOKEN or GITHUB_TOKEN.',\n );\n }\n\n const [{ owner, repo }, headCommitId] = await Promise.all([\n Git.getOwnerAndRepo({ dir }),\n Git.getHeadCommitId({ dir }),\n ]);\n\n const input: CreateCommitOnBranchInput = {\n branch: {\n repositoryNameWithOwner: `${owner}/${repo}`,\n branchName: branch,\n },\n message: {\n headline: messageHeadline,\n body: messageBody,\n },\n expectedHeadOid: headCommitId,\n clientMutationId: 'skuba',\n fileChanges,\n };\n\n const result = await graphql<CreateCommitResult>(\n `\n mutation Mutation($input: CreateCommitOnBranchInput!) {\n createCommitOnBranch(input: $input) {\n commit {\n oid\n }\n }\n }\n `,\n {\n input,\n headers: {\n authorization: `Bearer ${authToken}`,\n },\n },\n );\n\n return result.createCommitOnBranch.commit.oid;\n};\n"],
|
|
5
|
+
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAiB;AAEjB,qBAAwB;AAMxB,sBAAe;AAEf,UAAqB;AAErB,yBAAwC;AAiDjC,MAAM,uBAAuB,OAAO;AAAA,EACzC;AAAA,EACA;AAAA,EACA;AAAA,EAEA;AAAA,EACA;AAAA,EACA,cAAc;AAChB,MAA+D;AAC7D,QAAM,eAAe,MAAM,IAAI,gBAAgB,EAAE,KAAK,OAAO,CAAC;AAE9D,MAAI,CAAC,aAAa,QAAQ;AACxB;AAAA,EACF;AAEA,QAAM,cAAc,MAAM,gBAAgB,KAAK,YAAY;AAE3D,QAAM,WAAW,MAAM,kBAAkB;AAAA,IACvC;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,CAAC;AAED,MAAI,aAAa;AACf,UAAM,QAAQ;AAAA,MACZ,CAAC,GAAG,YAAY,WAAW,GAAG,YAAY,SAAS,EAAE;AAAA,QAAI,CAAC,SACxD,gBAAAA,QAAG,GAAG,KAAK,IAAI;AAAA,MACjB;AAAA,IACF;AAEA,UAAM,IAAI,kBAAkB;AAAA,MAC1B,KAAK;AAAA,MACL,MAAM,EAAE,MAAM,YAAY;AAAA,MAC1B;AAAA,IACF,CAAC;AAAA,EACH;AAEA,SAAO;AACT;AAaO,MAAM,kBAAkB,OAC7B,KACA,iBACyB;AACzB,QAAM,EAAE,OAAO,QAAQ,IAAI,aAAa;AAAA,IAItC,CAAC,OAAO,gBAAgB;AACtB,YAAM,WAAW,YAAY;AAC7B,UAAI,YAAY,UAAU,WAAW;AACnC,cAAM,QAAQ,KAAK,QAAQ;AAAA,MAC7B,OAAO;AACL,cAAM,MAAM,KAAK,QAAQ;AAAA,MAC3B;AAEA,aAAO;AAAA,IACT;AAAA,IACA,EAAE,OAAO,CAAC,GAAG,SAAS,CAAC,EAAE;AAAA,EAC3B;AAEA,QAAM,UAAU,MAAM,IAAI,SAAS,EAAE,IAAI,CAAC;AAE1C,QAAM,eAAe,CAAC,aAAqB;AACzC,QAAI,CAAC,SAAS;AACZ,aAAO;AAAA,IACT;AAEA,UAAM,UAAU,YAAAC,QAAK,SAAS,SAAS,GAAG;AAE1C,WAAO,YAAAA,QAAK,KAAK,SAAS,QAAQ;AAAA,EACpC;AAEA,QAAM,YAA4B,MAAM,QAAQ;AAAA,IAC9C,MAAM,IAAI,OAAO,cAAc;AAAA,MAC7B,MAAM,aAAa,QAAQ;AAAA,MAC3B,UAAU,MAAM,gBAAAD,QAAG,SAAS,SAAS,UAAU;AAAA,QAC7C,UAAU;AAAA,MACZ,CAAC;AAAA,IACH,EAAE;AAAA,EACJ;AAEA,QAAM,YAA4B,QAAQ,IAAI,CAAC,cAAc;AAAA,IAC3D,MAAM,aAAa,QAAQ;AAAA,EAC7B,EAAE;AAEF,SAAO;AAAA,IACL;AAAA,IACA;AAAA,EACF;AACF;AA6BO,MAAM,oBAAoB,OAAO;AAAA,EACtC;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF,MAAgD;AAC9C,QAAM,gBAAY,4CAAwB;AAC1C,MAAI,CAAC,WAAW;AACd,UAAM,IAAI;AAAA,MACR;AAAA,IACF;AAAA,EACF;AAEA,QAAM,CAAC,EAAE,OAAO,KAAK,GAAG,YAAY,IAAI,MAAM,QAAQ,IAAI;AAAA,IACxD,IAAI,gBAAgB,EAAE,IAAI,CAAC;AAAA,IAC3B,IAAI,gBAAgB,EAAE,IAAI,CAAC;AAAA,EAC7B,CAAC;AAED,QAAM,QAAmC;AAAA,IACvC,QAAQ;AAAA,MACN,yBAAyB,GAAG,KAAK,IAAI,IAAI;AAAA,MACzC,YAAY;AAAA,IACd;AAAA,IACA,SAAS;AAAA,MACP,UAAU;AAAA,MACV,MAAM;AAAA,IACR;AAAA,IACA,iBAAiB;AAAA,IACjB,kBAAkB;AAAA,IAClB;AAAA,EACF;AAEA,QAAM,SAAS,UAAM;AAAA,IACnB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,IASA;AAAA,MACE;AAAA,MACA,SAAS;AAAA,QACP,eAAe,UAAU,SAAS;AAAA,MACpC;AAAA,IACF;AAAA,EACF;AAEA,SAAO,OAAO,qBAAqB,OAAO;AAC5C;",
|
|
6
|
+
"names": ["fs", "path"]
|
|
7
7
|
}
|
|
@@ -51,7 +51,7 @@ export declare const createDependencyFilter: (names: readonly string[], type: 'd
|
|
|
51
51
|
private?: boolean | undefined;
|
|
52
52
|
publishConfig?: import("type-fest").PackageJson.PublishConfig | undefined;
|
|
53
53
|
funding?: string | {
|
|
54
|
-
type?: import("type-fest").LiteralUnion<"
|
|
54
|
+
type?: import("type-fest").LiteralUnion<"individual" | "github" | "opencollective" | "patreon" | "foundation" | "corporation", string> | undefined;
|
|
55
55
|
url: string;
|
|
56
56
|
} | undefined;
|
|
57
57
|
module?: string | undefined;
|
|
@@ -38,20 +38,18 @@ var import_normalize_package_data = __toESM(require("normalize-package-data"));
|
|
|
38
38
|
var import_json = require("./json");
|
|
39
39
|
var import_prettier = require("./prettier");
|
|
40
40
|
const formatPackage = async (rawData) => {
|
|
41
|
-
const sortPackageJson = await import("sort-package-json");
|
|
42
41
|
(0, import_normalize_package_data.default)(rawData);
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
delete data.name;
|
|
42
|
+
delete rawData._id;
|
|
43
|
+
if (rawData.name === "") {
|
|
44
|
+
delete rawData.name;
|
|
47
45
|
}
|
|
48
|
-
if (
|
|
49
|
-
delete
|
|
46
|
+
if (rawData.readme === "ERROR: No README data found!") {
|
|
47
|
+
delete rawData.readme;
|
|
50
48
|
}
|
|
51
|
-
if (
|
|
52
|
-
delete
|
|
49
|
+
if (rawData.version === "") {
|
|
50
|
+
delete rawData.version;
|
|
53
51
|
}
|
|
54
|
-
return (0, import_prettier.formatPrettier)(JSON.stringify(
|
|
52
|
+
return (0, import_prettier.formatPrettier)(JSON.stringify(rawData), {
|
|
55
53
|
filepath: "package.json"
|
|
56
54
|
});
|
|
57
55
|
};
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../../../../src/cli/configure/processing/package.ts"],
|
|
4
|
-
"sourcesContent": ["import normalizeData from 'normalize-package-data';\n\nimport type { PackageJson } from '../types';\n\nimport { parseObject } from './json';\nimport { formatPrettier } from './prettier';\n\nexport const formatPackage = async (rawData: PackageJson) => {\n
|
|
5
|
-
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,oCAA0B;AAI1B,kBAA4B;AAC5B,sBAA+B;AAExB,MAAM,gBAAgB,OAAO,YAAyB;AAC3D,
|
|
4
|
+
"sourcesContent": ["import normalizeData from 'normalize-package-data';\n\nimport type { PackageJson } from '../types';\n\nimport { parseObject } from './json';\nimport { formatPrettier } from './prettier';\n\nexport const formatPackage = async (rawData: PackageJson) => {\n normalizeData(rawData);\n\n // normalize-package-data fields that aren't useful for applications\n\n delete rawData._id;\n\n if (rawData.name === '') {\n delete rawData.name;\n }\n\n if (rawData.readme === 'ERROR: No README data found!') {\n delete rawData.readme;\n }\n\n if (rawData.version === '') {\n delete rawData.version;\n }\n\n return formatPrettier(JSON.stringify(rawData), {\n filepath: 'package.json',\n });\n};\n\nexport const parsePackage = (\n input: string | undefined,\n): PackageJson | undefined => {\n const data = parseObject(input);\n\n if (data === undefined) {\n return;\n }\n\n normalizeData(data);\n\n return data;\n};\n\nexport const createDependencyFilter = (\n names: readonly string[],\n type: 'dependencies' | 'devDependencies',\n) => {\n const set = new Set(names);\n\n return (data: PackageJson) => ({\n ...data,\n [type]: Object.fromEntries(\n Object.entries(data[type] ?? {}).filter(([name]) => !set.has(name)),\n ),\n });\n};\n\nexport const withPackage =\n (fn: (data: PackageJson) => PackageJson) => (input: string | undefined) => {\n const inputObject = parsePackage(input);\n\n const outputObject = fn(inputObject ?? {});\n\n return formatPackage(outputObject);\n };\n"],
|
|
5
|
+
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,oCAA0B;AAI1B,kBAA4B;AAC5B,sBAA+B;AAExB,MAAM,gBAAgB,OAAO,YAAyB;AAC3D,oCAAAA,SAAc,OAAO;AAIrB,SAAO,QAAQ;AAEf,MAAI,QAAQ,SAAS,IAAI;AACvB,WAAO,QAAQ;AAAA,EACjB;AAEA,MAAI,QAAQ,WAAW,gCAAgC;AACrD,WAAO,QAAQ;AAAA,EACjB;AAEA,MAAI,QAAQ,YAAY,IAAI;AAC1B,WAAO,QAAQ;AAAA,EACjB;AAEA,aAAO,gCAAe,KAAK,UAAU,OAAO,GAAG;AAAA,IAC7C,UAAU;AAAA,EACZ,CAAC;AACH;AAEO,MAAM,eAAe,CAC1B,UAC4B;AAC5B,QAAM,WAAO,yBAAY,KAAK;AAE9B,MAAI,SAAS,QAAW;AACtB;AAAA,EACF;AAEA,oCAAAA,SAAc,IAAI;AAElB,SAAO;AACT;AAEO,MAAM,yBAAyB,CACpC,OACA,SACG;AACH,QAAM,MAAM,IAAI,IAAI,KAAK;AAEzB,SAAO,CAAC,UAAuB;AAAA,IAC7B,GAAG;AAAA,IACH,CAAC,IAAI,GAAG,OAAO;AAAA,MACb,OAAO,QAAQ,KAAK,IAAI,KAAK,CAAC,CAAC,EAAE,OAAO,CAAC,CAAC,IAAI,MAAM,CAAC,IAAI,IAAI,IAAI,CAAC;AAAA,IACpE;AAAA,EACF;AACF;AAEO,MAAM,cACX,CAAC,OAA2C,CAAC,UAA8B;AACzE,QAAM,cAAc,aAAa,KAAK;AAEtC,QAAM,eAAe,GAAG,eAAe,CAAC,CAAC;AAEzC,SAAO,cAAc,YAAY;AACnC;",
|
|
6
6
|
"names": ["normalizeData"]
|
|
7
7
|
}
|
package/lib/cli/lint/autofix.js
CHANGED
|
@@ -110,7 +110,7 @@ const shouldPush = async ({
|
|
|
110
110
|
headCommitMessage = await Git.getHeadCommitMessage({ dir });
|
|
111
111
|
} catch {
|
|
112
112
|
}
|
|
113
|
-
if (headCommitMessage
|
|
113
|
+
if (headCommitMessage?.startsWith(AUTOFIX_COMMIT_MESSAGE)) {
|
|
114
114
|
return false;
|
|
115
115
|
}
|
|
116
116
|
return true;
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../../../src/cli/lint/autofix.ts"],
|
|
4
|
-
"sourcesContent": ["import path from 'path';\nimport { inspect } from 'util';\n\nimport fs from 'fs-extra';\nimport simpleGit from 'simple-git';\n\nimport * as Buildkite from '../../api/buildkite';\nimport * as Git from '../../api/git';\nimport * as GitHub from '../../api/github';\nimport { isCiEnv } from '../../utils/env';\nimport { createLogger, log } from '../../utils/logging';\nimport { throwOnTimeout } from '../../utils/wait';\nimport { runESLint } from '../adapter/eslint';\nimport { runPrettier } from '../adapter/prettier';\nimport { JEST_SETUP_FILES } from '../configure/addEmptyExports';\nimport { RENOVATE_CONFIG_FILENAMES } from '../configure/modules/renovate';\nimport { SERVER_LISTENER_FILENAME } from '../configure/patchServerListener';\nimport { REFRESHABLE_IGNORE_FILES } from '../configure/refreshIgnoreFiles';\n\nimport type { Input } from './types';\n\nconst RENOVATE_DEFAULT_PREFIX = 'renovate';\n\nconst AUTOFIX_COMMIT_MESSAGE = 'Run `skuba format`';\n\nconst AUTOFIX_DELETE_FILES = [\n // Try to delete this SEEK-Jobs/gutenberg automation file that may have been\n // accidentally committed in a prior autofix.\n 'Dockerfile-incunabulum',\n];\n\nconst AUTOFIX_CODEGEN_FILES = new Set<string>([\n ...AUTOFIX_DELETE_FILES,\n ...JEST_SETUP_FILES,\n ...REFRESHABLE_IGNORE_FILES,\n ...RENOVATE_CONFIG_FILENAMES,\n SERVER_LISTENER_FILENAME,\n]);\n\nexport const AUTOFIX_IGNORE_FILES: Git.ChangedFile[] = [\n {\n path: '.npmrc',\n state: 'added',\n },\n {\n // This file may already exist in version control, but we shouldn't commit\n // further changes as the CI environment may have appended an npm token.\n path: '.npmrc',\n state: 'modified',\n },\n {\n path: 'Dockerfile-incunabulum',\n state: 'added',\n },\n];\n\nconst shouldPush = async ({\n currentBranch,\n dir,\n}: {\n currentBranch?: string;\n dir: string;\n}) => {\n if (!isCiEnv()) {\n // We're not running in a CI environment so we don't need to push autofixes.\n // Ideally we'd drive this off of repository write permissions, but that is\n // non-trivial to infer without attempting an actual write.\n return false;\n }\n\n const isDefaultBuildkiteBranch =\n currentBranch &&\n [process.env.BUILDKITE_PIPELINE_DEFAULT_BRANCH, 'master', 'main'].includes(\n currentBranch,\n );\n\n const isProtectedGitHubBranch = process.env.GITHUB_REF_PROTECTED === 'true';\n\n if (isDefaultBuildkiteBranch || isProtectedGitHubBranch) {\n // The current branch is a protected branch.\n // We respect GitHub Flow; avoid pushing directly to the default branch.\n return false;\n }\n\n if (currentBranch?.startsWith(RENOVATE_DEFAULT_PREFIX)) {\n try {\n await GitHub.getPullRequestNumber();\n } catch (error) {\n const warning =\n 'An autofix is available, but it was not pushed because an open pull request for this Renovate branch could not be found. If a pull request has since been created, retry the lint step to push the fix.';\n log.warn(warning);\n try {\n await Buildkite.annotate(Buildkite.md.terminal(warning));\n } catch {}\n\n return false;\n }\n }\n\n let headCommitMessage;\n try {\n headCommitMessage = await Git.getHeadCommitMessage({ dir });\n } catch {}\n\n if (headCommitMessage
|
|
5
|
-
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAiB;AACjB,kBAAwB;AAExB,sBAAe;AACf,wBAAsB;AAEtB,gBAA2B;AAC3B,UAAqB;AACrB,aAAwB;AACxB,iBAAwB;AACxB,qBAAkC;AAClC,kBAA+B;AAC/B,oBAA0B;AAC1B,sBAA4B;AAC5B,6BAAiC;AACjC,sBAA0C;AAC1C,iCAAyC;AACzC,gCAAyC;AAIzC,MAAM,0BAA0B;AAEhC,MAAM,yBAAyB;AAE/B,MAAM,uBAAuB;AAAA;AAAA;AAAA,EAG3B;AACF;AAEA,MAAM,wBAAwB,oBAAI,IAAY;AAAA,EAC5C,GAAG;AAAA,EACH,GAAG;AAAA,EACH,GAAG;AAAA,EACH,GAAG;AAAA,EACH;AACF,CAAC;AAEM,MAAM,uBAA0C;AAAA,EACrD;AAAA,IACE,MAAM;AAAA,IACN,OAAO;AAAA,EACT;AAAA,EACA;AAAA;AAAA;AAAA,IAGE,MAAM;AAAA,IACN,OAAO;AAAA,EACT;AAAA,EACA;AAAA,IACE,MAAM;AAAA,IACN,OAAO;AAAA,EACT;AACF;AAEA,MAAM,aAAa,OAAO;AAAA,EACxB;AAAA,EACA;AACF,MAGM;AACJ,MAAI,KAAC,oBAAQ,GAAG;AAId,WAAO;AAAA,EACT;AAEA,QAAM,2BACJ,iBACA,CAAC,QAAQ,IAAI,mCAAmC,UAAU,MAAM,EAAE;AAAA,IAChE;AAAA,EACF;AAEF,QAAM,0BAA0B,QAAQ,IAAI,yBAAyB;AAErE,MAAI,4BAA4B,yBAAyB;AAGvD,WAAO;AAAA,EACT;AAEA,MAAI,eAAe,WAAW,uBAAuB,GAAG;AACtD,QAAI;AACF,YAAM,OAAO,qBAAqB;AAAA,IACpC,SAAS,OAAO;AACd,YAAM,UACJ;AACF,yBAAI,KAAK,OAAO;AAChB,UAAI;AACF,cAAM,UAAU,SAAS,UAAU,GAAG,SAAS,OAAO,CAAC;AAAA,MACzD,QAAQ;AAAA,MAAC;AAET,aAAO;AAAA,IACT;AAAA,EACF;AAEA,MAAI;AACJ,MAAI;AACF,wBAAoB,MAAM,IAAI,qBAAqB,EAAE,IAAI,CAAC;AAAA,EAC5D,QAAQ;AAAA,EAAC;AAET,MAAI,sBAAsB,
|
|
4
|
+
"sourcesContent": ["import path from 'path';\nimport { inspect } from 'util';\n\nimport fs from 'fs-extra';\nimport simpleGit from 'simple-git';\n\nimport * as Buildkite from '../../api/buildkite';\nimport * as Git from '../../api/git';\nimport * as GitHub from '../../api/github';\nimport { isCiEnv } from '../../utils/env';\nimport { createLogger, log } from '../../utils/logging';\nimport { throwOnTimeout } from '../../utils/wait';\nimport { runESLint } from '../adapter/eslint';\nimport { runPrettier } from '../adapter/prettier';\nimport { JEST_SETUP_FILES } from '../configure/addEmptyExports';\nimport { RENOVATE_CONFIG_FILENAMES } from '../configure/modules/renovate';\nimport { SERVER_LISTENER_FILENAME } from '../configure/patchServerListener';\nimport { REFRESHABLE_IGNORE_FILES } from '../configure/refreshIgnoreFiles';\n\nimport type { Input } from './types';\n\nconst RENOVATE_DEFAULT_PREFIX = 'renovate';\n\nconst AUTOFIX_COMMIT_MESSAGE = 'Run `skuba format`';\n\nconst AUTOFIX_DELETE_FILES = [\n // Try to delete this SEEK-Jobs/gutenberg automation file that may have been\n // accidentally committed in a prior autofix.\n 'Dockerfile-incunabulum',\n];\n\nconst AUTOFIX_CODEGEN_FILES = new Set<string>([\n ...AUTOFIX_DELETE_FILES,\n ...JEST_SETUP_FILES,\n ...REFRESHABLE_IGNORE_FILES,\n ...RENOVATE_CONFIG_FILENAMES,\n SERVER_LISTENER_FILENAME,\n]);\n\nexport const AUTOFIX_IGNORE_FILES: Git.ChangedFile[] = [\n {\n path: '.npmrc',\n state: 'added',\n },\n {\n // This file may already exist in version control, but we shouldn't commit\n // further changes as the CI environment may have appended an npm token.\n path: '.npmrc',\n state: 'modified',\n },\n {\n path: 'Dockerfile-incunabulum',\n state: 'added',\n },\n];\n\nconst shouldPush = async ({\n currentBranch,\n dir,\n}: {\n currentBranch?: string;\n dir: string;\n}) => {\n if (!isCiEnv()) {\n // We're not running in a CI environment so we don't need to push autofixes.\n // Ideally we'd drive this off of repository write permissions, but that is\n // non-trivial to infer without attempting an actual write.\n return false;\n }\n\n const isDefaultBuildkiteBranch =\n currentBranch &&\n [process.env.BUILDKITE_PIPELINE_DEFAULT_BRANCH, 'master', 'main'].includes(\n currentBranch,\n );\n\n const isProtectedGitHubBranch = process.env.GITHUB_REF_PROTECTED === 'true';\n\n if (isDefaultBuildkiteBranch || isProtectedGitHubBranch) {\n // The current branch is a protected branch.\n // We respect GitHub Flow; avoid pushing directly to the default branch.\n return false;\n }\n\n if (currentBranch?.startsWith(RENOVATE_DEFAULT_PREFIX)) {\n try {\n await GitHub.getPullRequestNumber();\n } catch (error) {\n const warning =\n 'An autofix is available, but it was not pushed because an open pull request for this Renovate branch could not be found. If a pull request has since been created, retry the lint step to push the fix.';\n log.warn(warning);\n try {\n await Buildkite.annotate(Buildkite.md.terminal(warning));\n } catch {}\n\n return false;\n }\n }\n\n let headCommitMessage;\n try {\n headCommitMessage = await Git.getHeadCommitMessage({ dir });\n } catch {}\n\n if (headCommitMessage?.startsWith(AUTOFIX_COMMIT_MESSAGE)) {\n // Short circuit when the head commit appears to be one of our autofixes.\n // Repeating the same operation is unlikely to correct outstanding issues.\n return false;\n }\n\n // Allow the push attempt to go ahead if our guards have been cleared.\n return true;\n};\n\ninterface AutofixParameters {\n debug: Input['debug'];\n\n eslint: boolean;\n prettier: boolean;\n}\n\n/**\n * @returns Whether skuba codegenned a file change which should be included in\n * an autofix commit.\n */\nconst tryCodegen = async (dir: string): Promise<boolean> => {\n try {\n // Try to forcibly remove `AUTOFIX_DELETE_FILES` from source control.\n // These may include outdated configuration files or internal files that\n // were accidentally committed by an autofix.\n await Promise.all(\n AUTOFIX_DELETE_FILES.map((filename) =>\n fs.promises.rm(path.join(dir, filename), { force: true }),\n ),\n );\n\n // Search codegenned file changes in the local Git working directory.\n // These may include the `AUTOFIX_DELETE_FILES` deleted above or fixups to\n // ignore files and module exports that were run at the start of the\n // `skuba lint` command.\n const changedFiles = await Git.getChangedFiles({\n dir,\n\n ignore: AUTOFIX_IGNORE_FILES,\n });\n\n // Determine if a meaningful codegen change\n return changedFiles.some((changedFile) =>\n AUTOFIX_CODEGEN_FILES.has(changedFile.path),\n );\n } catch (err) {\n log.warn(log.bold('Failed to evaluate codegen changes.'));\n log.subtle(inspect(err));\n\n return false;\n }\n};\n\nexport const autofix = async (params: AutofixParameters): Promise<void> => {\n const dir = process.cwd();\n\n const codegen = await tryCodegen(dir);\n\n if (!params.eslint && !params.prettier && !codegen) {\n return;\n }\n\n let currentBranch;\n try {\n currentBranch = await Git.currentBranch({ dir });\n } catch {}\n\n if (!(await shouldPush({ currentBranch, dir }))) {\n return;\n }\n\n try {\n log.newline();\n if (!params.eslint && !params.prettier) {\n log.warn('Trying to push codegen updates...');\n } else {\n log.warn(\n `Trying to autofix with ${\n params.eslint ? 'ESLint and ' : ''\n }Prettier...`,\n );\n\n const logger = createLogger(params.debug);\n\n if (params.eslint) {\n await runESLint('format', logger);\n }\n // Unconditionally re-run Prettier; reaching here means we have pre-existing\n // format violations or may have created new ones through ESLint fixes.\n await runPrettier('format', logger);\n }\n\n if (process.env.GITHUB_ACTIONS) {\n // GitHub runners have Git installed locally\n const ref = await Git.commitAllChanges({\n dir,\n message: AUTOFIX_COMMIT_MESSAGE,\n\n ignore: AUTOFIX_IGNORE_FILES,\n });\n\n if (!ref) {\n return log.warn('No autofixes detected.');\n }\n\n await throwOnTimeout(simpleGit().push(), { s: 30 });\n log.warn(`Pushed fix commit ${ref}.`);\n return;\n }\n\n // Other CI Environments, use GitHub API\n if (!currentBranch) {\n log.warn('Could not determine the current branch.');\n log.warn(\n 'Please propagate BUILDKITE_BRANCH, GITHUB_HEAD_REF, GITHUB_REF_NAME, or the .git directory to your container.',\n );\n return;\n }\n\n const ref = await throwOnTimeout(\n GitHub.uploadAllFileChanges({\n branch: currentBranch,\n dir,\n messageHeadline: AUTOFIX_COMMIT_MESSAGE,\n\n ignore: AUTOFIX_IGNORE_FILES,\n }),\n { s: 30 },\n );\n\n if (!ref) {\n return log.warn('No autofixes detected.');\n }\n\n log.warn(`Pushed fix commit ${ref}.`);\n } catch (err) {\n log.warn(log.bold('Failed to push fix commit.'));\n log.warn(\n log.bold(\n 'Does your CI environment have write access to your Git repository?',\n ),\n );\n log.subtle(inspect(err));\n }\n};\n"],
|
|
5
|
+
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAiB;AACjB,kBAAwB;AAExB,sBAAe;AACf,wBAAsB;AAEtB,gBAA2B;AAC3B,UAAqB;AACrB,aAAwB;AACxB,iBAAwB;AACxB,qBAAkC;AAClC,kBAA+B;AAC/B,oBAA0B;AAC1B,sBAA4B;AAC5B,6BAAiC;AACjC,sBAA0C;AAC1C,iCAAyC;AACzC,gCAAyC;AAIzC,MAAM,0BAA0B;AAEhC,MAAM,yBAAyB;AAE/B,MAAM,uBAAuB;AAAA;AAAA;AAAA,EAG3B;AACF;AAEA,MAAM,wBAAwB,oBAAI,IAAY;AAAA,EAC5C,GAAG;AAAA,EACH,GAAG;AAAA,EACH,GAAG;AAAA,EACH,GAAG;AAAA,EACH;AACF,CAAC;AAEM,MAAM,uBAA0C;AAAA,EACrD;AAAA,IACE,MAAM;AAAA,IACN,OAAO;AAAA,EACT;AAAA,EACA;AAAA;AAAA;AAAA,IAGE,MAAM;AAAA,IACN,OAAO;AAAA,EACT;AAAA,EACA;AAAA,IACE,MAAM;AAAA,IACN,OAAO;AAAA,EACT;AACF;AAEA,MAAM,aAAa,OAAO;AAAA,EACxB;AAAA,EACA;AACF,MAGM;AACJ,MAAI,KAAC,oBAAQ,GAAG;AAId,WAAO;AAAA,EACT;AAEA,QAAM,2BACJ,iBACA,CAAC,QAAQ,IAAI,mCAAmC,UAAU,MAAM,EAAE;AAAA,IAChE;AAAA,EACF;AAEF,QAAM,0BAA0B,QAAQ,IAAI,yBAAyB;AAErE,MAAI,4BAA4B,yBAAyB;AAGvD,WAAO;AAAA,EACT;AAEA,MAAI,eAAe,WAAW,uBAAuB,GAAG;AACtD,QAAI;AACF,YAAM,OAAO,qBAAqB;AAAA,IACpC,SAAS,OAAO;AACd,YAAM,UACJ;AACF,yBAAI,KAAK,OAAO;AAChB,UAAI;AACF,cAAM,UAAU,SAAS,UAAU,GAAG,SAAS,OAAO,CAAC;AAAA,MACzD,QAAQ;AAAA,MAAC;AAET,aAAO;AAAA,IACT;AAAA,EACF;AAEA,MAAI;AACJ,MAAI;AACF,wBAAoB,MAAM,IAAI,qBAAqB,EAAE,IAAI,CAAC;AAAA,EAC5D,QAAQ;AAAA,EAAC;AAET,MAAI,mBAAmB,WAAW,sBAAsB,GAAG;AAGzD,WAAO;AAAA,EACT;AAGA,SAAO;AACT;AAaA,MAAM,aAAa,OAAO,QAAkC;AAC1D,MAAI;AAIF,UAAM,QAAQ;AAAA,MACZ,qBAAqB;AAAA,QAAI,CAAC,aACxB,gBAAAA,QAAG,SAAS,GAAG,YAAAC,QAAK,KAAK,KAAK,QAAQ,GAAG,EAAE,OAAO,KAAK,CAAC;AAAA,MAC1D;AAAA,IACF;AAMA,UAAM,eAAe,MAAM,IAAI,gBAAgB;AAAA,MAC7C;AAAA,MAEA,QAAQ;AAAA,IACV,CAAC;AAGD,WAAO,aAAa;AAAA,MAAK,CAAC,gBACxB,sBAAsB,IAAI,YAAY,IAAI;AAAA,IAC5C;AAAA,EACF,SAAS,KAAK;AACZ,uBAAI,KAAK,mBAAI,KAAK,qCAAqC,CAAC;AACxD,uBAAI,WAAO,qBAAQ,GAAG,CAAC;AAEvB,WAAO;AAAA,EACT;AACF;AAEO,MAAM,UAAU,OAAO,WAA6C;AACzE,QAAM,MAAM,QAAQ,IAAI;AAExB,QAAM,UAAU,MAAM,WAAW,GAAG;AAEpC,MAAI,CAAC,OAAO,UAAU,CAAC,OAAO,YAAY,CAAC,SAAS;AAClD;AAAA,EACF;AAEA,MAAI;AACJ,MAAI;AACF,oBAAgB,MAAM,IAAI,cAAc,EAAE,IAAI,CAAC;AAAA,EACjD,QAAQ;AAAA,EAAC;AAET,MAAI,CAAE,MAAM,WAAW,EAAE,eAAe,IAAI,CAAC,GAAI;AAC/C;AAAA,EACF;AAEA,MAAI;AACF,uBAAI,QAAQ;AACZ,QAAI,CAAC,OAAO,UAAU,CAAC,OAAO,UAAU;AACtC,yBAAI,KAAK,mCAAmC;AAAA,IAC9C,OAAO;AACL,yBAAI;AAAA,QACF,0BACE,OAAO,SAAS,gBAAgB,EAClC;AAAA,MACF;AAEA,YAAM,aAAS,6BAAa,OAAO,KAAK;AAExC,UAAI,OAAO,QAAQ;AACjB,kBAAM,yBAAU,UAAU,MAAM;AAAA,MAClC;AAGA,gBAAM,6BAAY,UAAU,MAAM;AAAA,IACpC;AAEA,QAAI,QAAQ,IAAI,gBAAgB;AAE9B,YAAMC,OAAM,MAAM,IAAI,iBAAiB;AAAA,QACrC;AAAA,QACA,SAAS;AAAA,QAET,QAAQ;AAAA,MACV,CAAC;AAED,UAAI,CAACA,MAAK;AACR,eAAO,mBAAI,KAAK,wBAAwB;AAAA,MAC1C;AAEA,gBAAM,gCAAe,kBAAAC,SAAU,EAAE,KAAK,GAAG,EAAE,GAAG,GAAG,CAAC;AAClD,yBAAI,KAAK,qBAAqBD,IAAG,GAAG;AACpC;AAAA,IACF;AAGA,QAAI,CAAC,eAAe;AAClB,yBAAI,KAAK,yCAAyC;AAClD,yBAAI;AAAA,QACF;AAAA,MACF;AACA;AAAA,IACF;AAEA,UAAM,MAAM,UAAM;AAAA,MAChB,OAAO,qBAAqB;AAAA,QAC1B,QAAQ;AAAA,QACR;AAAA,QACA,iBAAiB;AAAA,QAEjB,QAAQ;AAAA,MACV,CAAC;AAAA,MACD,EAAE,GAAG,GAAG;AAAA,IACV;AAEA,QAAI,CAAC,KAAK;AACR,aAAO,mBAAI,KAAK,wBAAwB;AAAA,IAC1C;AAEA,uBAAI,KAAK,qBAAqB,GAAG,GAAG;AAAA,EACtC,SAAS,KAAK;AACZ,uBAAI,KAAK,mBAAI,KAAK,4BAA4B,CAAC;AAC/C,uBAAI;AAAA,MACF,mBAAI;AAAA,QACF;AAAA,MACF;AAAA,IACF;AACA,uBAAI,WAAO,qBAAQ,GAAG,CAAC;AAAA,EACzB;AACF;",
|
|
6
6
|
"names": ["fs", "path", "ref", "simpleGit"]
|
|
7
7
|
}
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "skuba",
|
|
3
|
-
"version": "0.0.0-master-
|
|
3
|
+
"version": "0.0.0-master-20231002013336",
|
|
4
4
|
"private": false,
|
|
5
5
|
"description": "SEEK development toolkit for backend applications and packages",
|
|
6
6
|
"homepage": "https://github.com/seek-oss/skuba#readme",
|
|
@@ -73,7 +73,7 @@
|
|
|
73
73
|
"@octokit/graphql": "^7.0.0",
|
|
74
74
|
"@octokit/graphql-schema": "^14.5.0",
|
|
75
75
|
"@octokit/rest": "^20.0.0",
|
|
76
|
-
"@octokit/types": "^
|
|
76
|
+
"@octokit/types": "^12.0.0",
|
|
77
77
|
"@types/jest": "^29.0.0",
|
|
78
78
|
"@types/node": ">=18.12",
|
|
79
79
|
"chalk": "^4.1.0",
|
|
@@ -103,12 +103,12 @@
|
|
|
103
103
|
"npm-which": "^3.0.1",
|
|
104
104
|
"picomatch": "^2.2.2",
|
|
105
105
|
"prettier": "~3.0.3",
|
|
106
|
+
"prettier-plugin-packagejson": "^2.4.6",
|
|
106
107
|
"read-pkg-up": "^7.0.1",
|
|
107
108
|
"runtypes": "^6.0.0",
|
|
108
109
|
"semantic-release": "^21.0.0",
|
|
109
110
|
"serialize-error": "^8.0.1",
|
|
110
111
|
"simple-git": "^3.5.0",
|
|
111
|
-
"sort-package-json": "^2.5.1",
|
|
112
112
|
"strip-ansi": "^6.0.1",
|
|
113
113
|
"ts-dedent": "^2.2.0",
|
|
114
114
|
"ts-jest": "^29.1.0",
|
|
@@ -122,25 +122,25 @@
|
|
|
122
122
|
"devDependencies": {
|
|
123
123
|
"@changesets/cli": "2.26.2",
|
|
124
124
|
"@changesets/get-github-info": "0.5.2",
|
|
125
|
-
"@jest/reporters": "29.
|
|
126
|
-
"@types/ejs": "3.1.
|
|
127
|
-
"@types/express": "4.17.
|
|
128
|
-
"@types/fs-extra": "11.0.
|
|
129
|
-
"@types/koa": "2.13.
|
|
130
|
-
"@types/libnpmsearch": "2.0.
|
|
125
|
+
"@jest/reporters": "29.7.0",
|
|
126
|
+
"@types/ejs": "3.1.3",
|
|
127
|
+
"@types/express": "4.17.18",
|
|
128
|
+
"@types/fs-extra": "11.0.2",
|
|
129
|
+
"@types/koa": "2.13.9",
|
|
130
|
+
"@types/libnpmsearch": "2.0.4",
|
|
131
131
|
"@types/lodash.mergewith": "4.6.7",
|
|
132
132
|
"@types/module-alias": "2.0.2",
|
|
133
133
|
"@types/npm-which": "3.0.1",
|
|
134
|
-
"@types/picomatch": "2.3.
|
|
134
|
+
"@types/picomatch": "2.3.1",
|
|
135
135
|
"@types/supertest": "2.0.12",
|
|
136
136
|
"@types/validate-npm-package-name": "4.0.0",
|
|
137
137
|
"enhanced-resolve": "5.15.0",
|
|
138
138
|
"express": "4.18.2",
|
|
139
|
-
"fastify": "4.
|
|
139
|
+
"fastify": "4.23.2",
|
|
140
140
|
"jsonfile": "6.1.0",
|
|
141
141
|
"koa": "2.14.2",
|
|
142
|
-
"memfs": "4.
|
|
143
|
-
"remark-cli": "
|
|
142
|
+
"memfs": "4.4.0",
|
|
143
|
+
"remark-cli": "12.0.0",
|
|
144
144
|
"remark-preset-lint-recommended": "6.1.3",
|
|
145
145
|
"semver": "7.5.4",
|
|
146
146
|
"supertest": "6.3.3",
|
|
@@ -9,7 +9,7 @@ configs:
|
|
|
9
9
|
NPM_READ_TOKEN: arn:aws:secretsmanager:ap-southeast-2:987872074697:secret:npm/npm-read-token
|
|
10
10
|
|
|
11
11
|
- &docker-ecr-cache
|
|
12
|
-
seek-oss/docker-ecr-cache#v2.
|
|
12
|
+
seek-oss/docker-ecr-cache#v2.1.0: &docker-ecr-cache-defaults
|
|
13
13
|
cache-on:
|
|
14
14
|
- package.json
|
|
15
15
|
- yarn.lock
|
|
@@ -36,7 +36,9 @@ steps:
|
|
|
36
36
|
plugins:
|
|
37
37
|
- *aws-sm
|
|
38
38
|
- *private-npm
|
|
39
|
-
-
|
|
39
|
+
- seek-oss/docker-ecr-cache#v2.1.0:
|
|
40
|
+
<<: *docker-ecr-cache-defaults
|
|
41
|
+
skip-pull-from-cache: true
|
|
40
42
|
|
|
41
43
|
- label: ๐งช Test & Lint
|
|
42
44
|
commands:
|
|
@@ -9,7 +9,7 @@ configs:
|
|
|
9
9
|
NPM_READ_TOKEN: arn:aws:secretsmanager:ap-southeast-2:987872074697:secret:npm/npm-read-token
|
|
10
10
|
|
|
11
11
|
- &docker-ecr-cache
|
|
12
|
-
seek-oss/docker-ecr-cache#v2.
|
|
12
|
+
seek-oss/docker-ecr-cache#v2.1.0: &docker-ecr-cache-defaults
|
|
13
13
|
cache-on:
|
|
14
14
|
- package.json
|
|
15
15
|
- yarn.lock
|
|
@@ -36,7 +36,9 @@ steps:
|
|
|
36
36
|
plugins:
|
|
37
37
|
- *aws-sm
|
|
38
38
|
- *private-npm
|
|
39
|
-
-
|
|
39
|
+
- seek-oss/docker-ecr-cache#v2.1.0:
|
|
40
|
+
<<: *docker-ecr-cache-defaults
|
|
41
|
+
skip-pull-from-cache: true
|
|
40
42
|
|
|
41
43
|
- label: ๐งช Test & Lint
|
|
42
44
|
commands:
|
|
@@ -14,10 +14,10 @@
|
|
|
14
14
|
"dependencies": {
|
|
15
15
|
"@koa/router": "^12.0.0",
|
|
16
16
|
"@opentelemetry/api": "^1.1.0",
|
|
17
|
-
"@opentelemetry/exporter-trace-otlp-grpc": "^0.
|
|
17
|
+
"@opentelemetry/exporter-trace-otlp-grpc": "^0.43.0",
|
|
18
18
|
"@opentelemetry/instrumentation-aws-sdk": "^0.36.0",
|
|
19
|
-
"@opentelemetry/instrumentation-http": "^0.
|
|
20
|
-
"@opentelemetry/sdk-node": "^0.
|
|
19
|
+
"@opentelemetry/instrumentation-http": "^0.43.0",
|
|
20
|
+
"@opentelemetry/sdk-node": "^0.43.0",
|
|
21
21
|
"@seek/logger": "^6.0.0",
|
|
22
22
|
"aws-sdk": "^2.1039.0",
|
|
23
23
|
"hot-shots": "^10.0.0",
|
|
@@ -9,7 +9,7 @@ configs:
|
|
|
9
9
|
NPM_READ_TOKEN: arn:aws:secretsmanager:ap-southeast-2:987872074697:secret:npm/npm-read-token
|
|
10
10
|
|
|
11
11
|
- &docker-ecr-cache
|
|
12
|
-
seek-oss/docker-ecr-cache#v2.
|
|
12
|
+
seek-oss/docker-ecr-cache#v2.1.0: &docker-ecr-cache-defaults
|
|
13
13
|
cache-on:
|
|
14
14
|
- package.json
|
|
15
15
|
- yarn.lock
|
|
@@ -26,7 +26,7 @@ configs:
|
|
|
26
26
|
- yarn deploy
|
|
27
27
|
concurrency: 1
|
|
28
28
|
plugins:
|
|
29
|
-
- artifacts#v1.9.
|
|
29
|
+
- artifacts#v1.9.2:
|
|
30
30
|
build: ${BUILDKITE_BUILD_ID}
|
|
31
31
|
download: lib/*
|
|
32
32
|
- *aws-sm
|
|
@@ -72,7 +72,9 @@ steps:
|
|
|
72
72
|
plugins:
|
|
73
73
|
- *aws-sm
|
|
74
74
|
- *private-npm
|
|
75
|
-
-
|
|
75
|
+
- seek-oss/docker-ecr-cache#v2.1.0:
|
|
76
|
+
<<: *docker-ecr-cache-defaults
|
|
77
|
+
skip-pull-from-cache: true
|
|
76
78
|
|
|
77
79
|
- wait
|
|
78
80
|
- block: ๐๐ปโโ๏ธ Deploy Dev
|
|
@@ -9,7 +9,7 @@ configs:
|
|
|
9
9
|
NPM_READ_TOKEN: arn:aws:secretsmanager:ap-southeast-2:987872074697:secret:npm/npm-read-token
|
|
10
10
|
|
|
11
11
|
- &docker-ecr-cache
|
|
12
|
-
seek-oss/docker-ecr-cache#v2.
|
|
12
|
+
seek-oss/docker-ecr-cache#v2.1.0: &docker-ecr-cache-defaults
|
|
13
13
|
cache-on:
|
|
14
14
|
- package.json
|
|
15
15
|
- yarn.lock
|
|
@@ -26,7 +26,7 @@ configs:
|
|
|
26
26
|
- yarn deploy
|
|
27
27
|
concurrency: 1
|
|
28
28
|
plugins:
|
|
29
|
-
- artifacts#v1.9.
|
|
29
|
+
- artifacts#v1.9.2:
|
|
30
30
|
build: ${BUILDKITE_BUILD_ID}
|
|
31
31
|
download: lib/*
|
|
32
32
|
- *aws-sm
|
|
@@ -69,7 +69,9 @@ steps:
|
|
|
69
69
|
plugins:
|
|
70
70
|
- *aws-sm
|
|
71
71
|
- *private-npm
|
|
72
|
-
-
|
|
72
|
+
- seek-oss/docker-ecr-cache#v2.1.0:
|
|
73
|
+
<<: *docker-ecr-cache-defaults
|
|
74
|
+
skip-pull-from-cache: true
|
|
73
75
|
|
|
74
76
|
- wait
|
|
75
77
|
- block: ๐๐ปโโ๏ธ Deploy Dev
|