skuba 5.0.1 → 5.1.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/lib/api/buildkite/annotate.d.ts +1 -1
- package/lib/api/git/commitAllChanges.d.ts +8 -1
- package/lib/api/git/commitAllChanges.js +3 -2
- package/lib/api/git/commitAllChanges.js.map +2 -2
- package/lib/api/git/getChangedFiles.d.ts +8 -2
- package/lib/api/git/getChangedFiles.js +7 -2
- package/lib/api/git/getChangedFiles.js.map +2 -2
- package/lib/api/git/index.d.ts +1 -0
- package/lib/api/git/index.js.map +2 -2
- package/lib/api/github/checkRun.d.ts +2 -2
- package/lib/api/github/push.d.ts +9 -3
- package/lib/api/github/push.js +4 -3
- package/lib/api/github/push.js.map +2 -2
- package/lib/api/jest/index.d.ts +2 -2
- package/lib/cli/adapter/prettier.js +15 -1
- package/lib/cli/adapter/prettier.js.map +2 -2
- package/lib/cli/configure/processing/prettier.d.ts +1 -1
- package/lib/cli/configure/processing/typescript.d.ts +2 -2
- package/lib/cli/configure/types.d.ts +6 -6
- package/lib/cli/init/getConfig.js +18 -5
- package/lib/cli/init/getConfig.js.map +2 -2
- package/lib/cli/init/prompts.d.ts +14 -1
- package/lib/cli/init/prompts.js +7 -1
- package/lib/cli/init/prompts.js.map +2 -2
- package/lib/cli/init/types.d.ts +3 -2
- package/lib/cli/init/types.js +2 -1
- package/lib/cli/init/types.js.map +2 -2
- package/lib/cli/init/validation.d.ts +5 -0
- package/lib/cli/init/validation.js +10 -2
- package/lib/cli/init/validation.js.map +2 -2
- package/lib/cli/lint/autofix.d.ts +2 -0
- package/lib/cli/lint/autofix.js +17 -3
- package/lib/cli/lint/autofix.js.map +2 -2
- package/lib/skuba.js.map +1 -1
- package/lib/utils/command.d.ts +1 -1
- package/lib/utils/copy.d.ts +1 -1
- package/lib/utils/error.d.ts +1 -1
- package/lib/utils/exec.d.ts +2 -2
- package/lib/utils/logging.d.ts +1 -1
- package/lib/utils/manifest.d.ts +1 -1
- package/lib/utils/template.d.ts +2 -2
- package/lib/utils/version.d.ts +1 -1
- package/lib/utils/wait.d.ts +1 -1
- package/lib/wrapper/main.js.map +1 -1
- package/package.json +14 -14
- package/template/express-rest-api/.buildkite/pipeline.yml +1 -1
- package/template/express-rest-api/.gantry/common.yml +1 -1
- package/template/express-rest-api/.gantry/dev.yml +1 -0
- package/template/express-rest-api/.gantry/prod.yml +1 -0
- package/template/express-rest-api/Dockerfile +1 -1
- package/template/express-rest-api/Dockerfile.dev-deps +1 -1
- package/template/express-rest-api/README.md +7 -10
- package/template/express-rest-api/gantry.apply.yml +2 -2
- package/template/express-rest-api/gantry.build.yml +1 -1
- package/template/greeter/.buildkite/pipeline.yml +1 -1
- package/template/greeter/Dockerfile +1 -1
- package/template/greeter/README.md +6 -9
- package/template/koa-rest-api/.buildkite/pipeline.yml +1 -1
- package/template/koa-rest-api/.gantry/common.yml +1 -1
- package/template/koa-rest-api/.gantry/dev.yml +1 -0
- package/template/koa-rest-api/.gantry/prod.yml +1 -0
- package/template/koa-rest-api/Dockerfile +1 -1
- package/template/koa-rest-api/Dockerfile.dev-deps +1 -1
- package/template/koa-rest-api/README.md +7 -10
- package/template/koa-rest-api/gantry.apply.yml +2 -2
- package/template/koa-rest-api/gantry.build.yml +1 -1
- package/template/koa-rest-api/package.json +2 -2
- package/template/lambda-sqs-worker/.buildkite/pipeline.yml +4 -4
- package/template/lambda-sqs-worker/.nvmrc +1 -1
- package/template/lambda-sqs-worker/Dockerfile +1 -1
- package/template/lambda-sqs-worker/README.md +7 -10
- package/template/lambda-sqs-worker/package.json +10 -5
- package/template/lambda-sqs-worker/serverless.yml +2 -4
- package/template/lambda-sqs-worker/src/app.test.ts +5 -6
- package/template/lambda-sqs-worker/src/framework/handler.test.ts +2 -2
- package/template/lambda-sqs-worker/src/hooks.ts +22 -30
- package/template/lambda-sqs-worker/src/services/aws.ts +2 -2
- package/template/lambda-sqs-worker/src/services/pipelineEventSender.test.ts +9 -7
- package/template/lambda-sqs-worker/src/services/pipelineEventSender.ts +6 -4
- package/template/lambda-sqs-worker/src/testing/services.ts +11 -7
- package/template/lambda-sqs-worker/tsconfig.json +2 -2
- package/template/lambda-sqs-worker-cdk/.buildkite/pipeline.yml +4 -4
- package/template/lambda-sqs-worker-cdk/.nvmrc +1 -1
- package/template/lambda-sqs-worker-cdk/Dockerfile +1 -1
- package/template/lambda-sqs-worker-cdk/infra/__snapshots__/appStack.test.ts.snap +2 -4
- package/template/lambda-sqs-worker-cdk/infra/appStack.ts +4 -4
- package/template/lambda-sqs-worker-cdk/package.json +4 -4
- package/template/lambda-sqs-worker-cdk/tsconfig.json +2 -2
- package/template/oss-npm-package/_package.json +1 -1
- package/template/private-npm-package/_package.json +1 -1
|
@@ -1,12 +1,19 @@
|
|
|
1
1
|
import type { Identity } from './commit';
|
|
2
|
+
import type { ChangedFile } from './getChangedFiles';
|
|
2
3
|
interface CommitAllParameters {
|
|
3
4
|
dir: string;
|
|
4
5
|
message: string;
|
|
5
6
|
author?: Identity;
|
|
6
7
|
committer?: Identity;
|
|
8
|
+
/**
|
|
9
|
+
* File changes to exclude from the commit.
|
|
10
|
+
*
|
|
11
|
+
* Defaults to `[]` (no exclusions).
|
|
12
|
+
*/
|
|
13
|
+
ignore?: ChangedFile[];
|
|
7
14
|
}
|
|
8
15
|
/**
|
|
9
16
|
* Stages all changes and writes a commit to the local Git repository.
|
|
10
17
|
*/
|
|
11
|
-
export declare const commitAllChanges: ({ dir, message, author, committer, }: CommitAllParameters) => Promise<string | undefined>;
|
|
18
|
+
export declare const commitAllChanges: ({ dir, message, author, committer, ignore, }: CommitAllParameters) => Promise<string | undefined>;
|
|
12
19
|
export {};
|
|
@@ -35,9 +35,10 @@ const commitAllChanges = async ({
|
|
|
35
35
|
dir,
|
|
36
36
|
message,
|
|
37
37
|
author,
|
|
38
|
-
committer
|
|
38
|
+
committer,
|
|
39
|
+
ignore
|
|
39
40
|
}) => {
|
|
40
|
-
const changedFiles = await (0, import_getChangedFiles.getChangedFiles)({ dir });
|
|
41
|
+
const changedFiles = await (0, import_getChangedFiles.getChangedFiles)({ dir, ignore });
|
|
41
42
|
if (!changedFiles.length) {
|
|
42
43
|
return;
|
|
43
44
|
}
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../../../src/api/git/commitAllChanges.ts"],
|
|
4
|
-
"sourcesContent": ["import fs from 'fs-extra';\nimport git from 'isomorphic-git';\n\nimport { commit } from './commit';\nimport type { Identity } from './commit';\nimport { getChangedFiles } from './getChangedFiles';\n\ninterface CommitAllParameters {\n dir: string;\n message: string;\n author?: Identity;\n committer?: Identity;\n}\n\n/**\n * Stages all changes and writes a commit to the local Git repository.\n */\nexport const commitAllChanges = async ({\n dir,\n message,\n author,\n committer,\n}: CommitAllParameters): Promise<string | undefined> => {\n const changedFiles = await getChangedFiles({ dir });\n\n if (!changedFiles.length) {\n return;\n }\n\n await Promise.all(\n changedFiles.map((file) =>\n file.state === 'deleted'\n ? git.remove({ fs, dir, filepath: file.path })\n : git.add({ fs, dir, filepath: file.path }),\n ),\n );\n\n return commit({\n dir,\n message,\n author,\n committer,\n });\n};\n"],
|
|
5
|
-
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,sBAAe;AACf,4BAAgB;AAEhB,oBAAuB;
|
|
4
|
+
"sourcesContent": ["import fs from 'fs-extra';\nimport git from 'isomorphic-git';\n\nimport { commit } from './commit';\nimport type { Identity } from './commit';\nimport type { ChangedFile } from './getChangedFiles';\nimport { getChangedFiles } from './getChangedFiles';\n\ninterface CommitAllParameters {\n dir: string;\n message: string;\n author?: Identity;\n committer?: Identity;\n\n /**\n * File changes to exclude from the commit.\n *\n * Defaults to `[]` (no exclusions).\n */\n ignore?: ChangedFile[];\n}\n\n/**\n * Stages all changes and writes a commit to the local Git repository.\n */\nexport const commitAllChanges = async ({\n dir,\n message,\n\n author,\n committer,\n ignore,\n}: CommitAllParameters): Promise<string | undefined> => {\n const changedFiles = await getChangedFiles({ dir, ignore });\n\n if (!changedFiles.length) {\n return;\n }\n\n await Promise.all(\n changedFiles.map((file) =>\n file.state === 'deleted'\n ? git.remove({ fs, dir, filepath: file.path })\n : git.add({ fs, dir, filepath: file.path }),\n ),\n );\n\n return commit({\n dir,\n message,\n author,\n committer,\n });\n};\n"],
|
|
5
|
+
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,sBAAe;AACf,4BAAgB;AAEhB,oBAAuB;AAGvB,6BAAgC;AAmBzB,MAAM,mBAAmB,OAAO;AAAA,EACrC;AAAA,EACA;AAAA,EAEA;AAAA,EACA;AAAA,EACA;AACF,MAAwD;AACtD,QAAM,eAAe,UAAM,wCAAgB,EAAE,KAAK,OAAO,CAAC;AAE1D,MAAI,CAAC,aAAa,QAAQ;AACxB;AAAA,EACF;AAEA,QAAM,QAAQ;AAAA,IACZ,aAAa;AAAA,MAAI,CAAC,SAChB,KAAK,UAAU,YACX,sBAAAA,QAAI,OAAO,EAAE,oBAAAC,SAAI,KAAK,UAAU,KAAK,KAAK,CAAC,IAC3C,sBAAAD,QAAI,IAAI,EAAE,oBAAAC,SAAI,KAAK,UAAU,KAAK,KAAK,CAAC;AAAA,IAC9C;AAAA,EACF;AAEA,aAAO,sBAAO;AAAA,IACZ;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,CAAC;AACH;",
|
|
6
6
|
"names": ["git", "fs"]
|
|
7
7
|
}
|
|
@@ -1,14 +1,20 @@
|
|
|
1
|
-
|
|
1
|
+
type ChangedFileState = 'added' | 'modified' | 'deleted';
|
|
2
2
|
export interface ChangedFile {
|
|
3
3
|
path: string;
|
|
4
4
|
state: ChangedFileState;
|
|
5
5
|
}
|
|
6
6
|
interface ChangedFilesParameters {
|
|
7
7
|
dir: string;
|
|
8
|
+
/**
|
|
9
|
+
* File changes to exclude from the result.
|
|
10
|
+
*
|
|
11
|
+
* Defaults to `[]` (no exclusions).
|
|
12
|
+
*/
|
|
13
|
+
ignore?: ChangedFile[];
|
|
8
14
|
}
|
|
9
15
|
/**
|
|
10
16
|
* Returns all the files which have been added, modified or deleted in the
|
|
11
17
|
* working directory of the local Git repository since the last commit.
|
|
12
18
|
*/
|
|
13
|
-
export declare const getChangedFiles: ({ dir, }: ChangedFilesParameters) => Promise<ChangedFile[]>;
|
|
19
|
+
export declare const getChangedFiles: ({ dir, ignore, }: ChangedFilesParameters) => Promise<ChangedFile[]>;
|
|
14
20
|
export {};
|
|
@@ -40,12 +40,17 @@ const mapState = (row) => {
|
|
|
40
40
|
return "deleted";
|
|
41
41
|
};
|
|
42
42
|
const getChangedFiles = async ({
|
|
43
|
-
dir
|
|
43
|
+
dir,
|
|
44
|
+
ignore = []
|
|
44
45
|
}) => {
|
|
45
46
|
const allFiles = await import_isomorphic_git.default.statusMatrix({ fs: import_fs_extra.default, dir });
|
|
46
47
|
return allFiles.filter(
|
|
47
48
|
(row) => row[import_statusMatrix.HEAD] !== import_statusMatrix.UNMODIFIED || row[import_statusMatrix.WORKDIR] !== import_statusMatrix.UNMODIFIED || row[import_statusMatrix.STAGE] !== import_statusMatrix.UNMODIFIED
|
|
48
|
-
).map((row) => ({ path: row[import_statusMatrix.FILEPATH], state: mapState(row) }))
|
|
49
|
+
).map((row) => ({ path: row[import_statusMatrix.FILEPATH], state: mapState(row) })).filter(
|
|
50
|
+
(changedFile) => !ignore.some(
|
|
51
|
+
(i) => i.path === changedFile.path && i.state === changedFile.state
|
|
52
|
+
)
|
|
53
|
+
);
|
|
49
54
|
};
|
|
50
55
|
// Annotate the CommonJS export names for ESM import in node:
|
|
51
56
|
0 && (module.exports = {
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../../../src/api/git/getChangedFiles.ts"],
|
|
4
|
-
"sourcesContent": ["import fs from 'fs-extra';\nimport git from 'isomorphic-git';\n\nimport {\n ABSENT,\n FILEPATH,\n HEAD,\n MODIFIED,\n STAGE,\n UNMODIFIED,\n WORKDIR,\n} from './statusMatrix';\n\ntype ChangedFileState = 'added' | 'modified' | 'deleted';\nexport interface ChangedFile {\n path: string;\n state: ChangedFileState;\n}\ninterface ChangedFilesParameters {\n dir: string;\n}\n\nconst mapState = (\n row: [string, 0 | 1, 0 | 1 | 2, 0 | 1 | 2 | 3],\n): ChangedFileState => {\n if (row[HEAD] === ABSENT) {\n return 'added';\n }\n\n if (row[WORKDIR] === MODIFIED) {\n return 'modified';\n }\n\n return 'deleted';\n};\n\n/**\n * Returns all the files which have been added, modified or deleted in the\n * working directory of the local Git repository since the last commit.\n */\nexport const getChangedFiles = async ({\n dir,\n}: ChangedFilesParameters): Promise<ChangedFile[]> => {\n const allFiles = await git.statusMatrix({ fs, dir });\n return allFiles\n .filter(\n (row) =>\n row[HEAD] !== UNMODIFIED ||\n row[WORKDIR] !== UNMODIFIED ||\n row[STAGE] !== UNMODIFIED,\n )\n .map((row) => ({ path: row[FILEPATH], state: mapState(row) }));\n};\n"],
|
|
5
|
-
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,sBAAe;AACf,4BAAgB;AAEhB,0BAQO;
|
|
4
|
+
"sourcesContent": ["import fs from 'fs-extra';\nimport git from 'isomorphic-git';\n\nimport {\n ABSENT,\n FILEPATH,\n HEAD,\n MODIFIED,\n STAGE,\n UNMODIFIED,\n WORKDIR,\n} from './statusMatrix';\n\ntype ChangedFileState = 'added' | 'modified' | 'deleted';\nexport interface ChangedFile {\n path: string;\n state: ChangedFileState;\n}\ninterface ChangedFilesParameters {\n dir: string;\n\n /**\n * File changes to exclude from the result.\n *\n * Defaults to `[]` (no exclusions).\n */\n ignore?: ChangedFile[];\n}\n\nconst mapState = (\n row: [string, 0 | 1, 0 | 1 | 2, 0 | 1 | 2 | 3],\n): ChangedFileState => {\n if (row[HEAD] === ABSENT) {\n return 'added';\n }\n\n if (row[WORKDIR] === MODIFIED) {\n return 'modified';\n }\n\n return 'deleted';\n};\n\n/**\n * Returns all the files which have been added, modified or deleted in the\n * working directory of the local Git repository since the last commit.\n */\nexport const getChangedFiles = async ({\n dir,\n\n ignore = [],\n}: ChangedFilesParameters): Promise<ChangedFile[]> => {\n const allFiles = await git.statusMatrix({ fs, dir });\n return allFiles\n .filter(\n (row) =>\n row[HEAD] !== UNMODIFIED ||\n row[WORKDIR] !== UNMODIFIED ||\n row[STAGE] !== UNMODIFIED,\n )\n .map((row) => ({ path: row[FILEPATH], state: mapState(row) }))\n .filter(\n (changedFile) =>\n !ignore.some(\n (i) => i.path === changedFile.path && i.state === changedFile.state,\n ),\n );\n};\n"],
|
|
5
|
+
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,sBAAe;AACf,4BAAgB;AAEhB,0BAQO;AAkBP,MAAM,WAAW,CACf,QACqB;AACrB,MAAI,IAAI,8BAAU,4BAAQ;AACxB,WAAO;AAAA,EACT;AAEA,MAAI,IAAI,iCAAa,8BAAU;AAC7B,WAAO;AAAA,EACT;AAEA,SAAO;AACT;AAMO,MAAM,kBAAkB,OAAO;AAAA,EACpC;AAAA,EAEA,SAAS,CAAC;AACZ,MAAsD;AACpD,QAAM,WAAW,MAAM,sBAAAA,QAAI,aAAa,EAAE,oBAAAC,SAAI,IAAI,CAAC;AACnD,SAAO,SACJ;AAAA,IACC,CAAC,QACC,IAAI,8BAAU,kCACd,IAAI,iCAAa,kCACjB,IAAI,+BAAW;AAAA,EACnB,EACC,IAAI,CAAC,SAAS,EAAE,MAAM,IAAI,+BAAW,OAAO,SAAS,GAAG,EAAE,EAAE,EAC5D;AAAA,IACC,CAAC,gBACC,CAAC,OAAO;AAAA,MACN,CAAC,MAAM,EAAE,SAAS,YAAY,QAAQ,EAAE,UAAU,YAAY;AAAA,IAChE;AAAA,EACJ;AACJ;",
|
|
6
6
|
"names": ["git", "fs"]
|
|
7
7
|
}
|
package/lib/api/git/index.d.ts
CHANGED
|
@@ -1,6 +1,7 @@
|
|
|
1
1
|
export { commit } from './commit';
|
|
2
2
|
export { commitAllChanges } from './commitAllChanges';
|
|
3
3
|
export { currentBranch } from './currentBranch';
|
|
4
|
+
export type { ChangedFile } from './getChangedFiles';
|
|
4
5
|
export { getChangedFiles } from './getChangedFiles';
|
|
5
6
|
export { getHeadCommitId, getHeadCommitMessage } from './log';
|
|
6
7
|
export { getOwnerAndRepo } from './remote';
|
package/lib/api/git/index.js.map
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../../../src/api/git/index.ts"],
|
|
4
|
-
"sourcesContent": ["export { commit } from './commit';\nexport { commitAllChanges } from './commitAllChanges';\nexport { currentBranch } from './currentBranch';\nexport { getChangedFiles } from './getChangedFiles';\nexport { getHeadCommitId, getHeadCommitMessage } from './log';\nexport { getOwnerAndRepo } from './remote';\nexport { push } from './push';\nexport { fastForwardBranch } from './pull';\nexport { reset } from './reset';\n"],
|
|
5
|
-
"mappings": ";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,oBAAuB;AACvB,8BAAiC;AACjC,2BAA8B;
|
|
4
|
+
"sourcesContent": ["export { commit } from './commit';\nexport { commitAllChanges } from './commitAllChanges';\nexport { currentBranch } from './currentBranch';\nexport type { ChangedFile } from './getChangedFiles';\nexport { getChangedFiles } from './getChangedFiles';\nexport { getHeadCommitId, getHeadCommitMessage } from './log';\nexport { getOwnerAndRepo } from './remote';\nexport { push } from './push';\nexport { fastForwardBranch } from './pull';\nexport { reset } from './reset';\n"],
|
|
5
|
+
"mappings": ";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,oBAAuB;AACvB,8BAAiC;AACjC,2BAA8B;AAE9B,6BAAgC;AAChC,iBAAsD;AACtD,oBAAgC;AAChC,kBAAqB;AACrB,kBAAkC;AAClC,mBAAsB;",
|
|
6
6
|
"names": []
|
|
7
7
|
}
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
import type { Endpoints } from '@octokit/types';
|
|
2
|
-
|
|
3
|
-
export
|
|
2
|
+
type Output = NonNullable<Endpoints['POST /repos/{owner}/{repo}/check-runs']['parameters']['output']>;
|
|
3
|
+
export type Annotation = NonNullable<Output['annotations']>[number];
|
|
4
4
|
/**
|
|
5
5
|
* {@link https://docs.github.com/en/rest/reference/checks#create-a-check-run}
|
|
6
6
|
*/
|
package/lib/api/github/push.d.ts
CHANGED
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
import type { FileAddition, FileDeletion } from '@octokit/graphql-schema';
|
|
2
|
-
import
|
|
2
|
+
import * as Git from '../git';
|
|
3
3
|
interface UploadAllFileChangesParams {
|
|
4
4
|
dir: string;
|
|
5
5
|
/**
|
|
@@ -10,6 +10,12 @@ interface UploadAllFileChangesParams {
|
|
|
10
10
|
* The headline of the commit message
|
|
11
11
|
*/
|
|
12
12
|
messageHeadline: string;
|
|
13
|
+
/**
|
|
14
|
+
* File changes to exclude from the upload.
|
|
15
|
+
*
|
|
16
|
+
* Defaults to `[]` (no exclusions).
|
|
17
|
+
*/
|
|
18
|
+
ignore?: Git.ChangedFile[];
|
|
13
19
|
/**
|
|
14
20
|
* The body of the commit message
|
|
15
21
|
*/
|
|
@@ -31,7 +37,7 @@ interface UploadAllFileChangesParams {
|
|
|
31
37
|
* This will not update the local Git repository unless `updateLocal` is
|
|
32
38
|
* specified.
|
|
33
39
|
*/
|
|
34
|
-
export declare const uploadAllFileChanges: ({
|
|
40
|
+
export declare const uploadAllFileChanges: ({ branch, dir, messageHeadline, ignore, messageBody, updateLocal, }: UploadAllFileChangesParams) => Promise<string | undefined>;
|
|
35
41
|
export interface FileChanges {
|
|
36
42
|
additions: FileAddition[];
|
|
37
43
|
deletions: FileDeletion[];
|
|
@@ -42,7 +48,7 @@ export interface FileChanges {
|
|
|
42
48
|
*
|
|
43
49
|
* https://docs.github.com/en/graphql/reference/input-objects#filechanges
|
|
44
50
|
*/
|
|
45
|
-
export declare const readFileChanges: (changedFiles: ChangedFile[]) => Promise<FileChanges>;
|
|
51
|
+
export declare const readFileChanges: (changedFiles: Git.ChangedFile[]) => Promise<FileChanges>;
|
|
46
52
|
interface UploadFileChangesParams {
|
|
47
53
|
dir: string;
|
|
48
54
|
/**
|
package/lib/api/github/push.js
CHANGED
|
@@ -34,15 +34,16 @@ var import_fs_extra = __toESM(require("fs-extra"));
|
|
|
34
34
|
var Git = __toESM(require("../git"));
|
|
35
35
|
var import_environment = require("./environment");
|
|
36
36
|
const uploadAllFileChanges = async ({
|
|
37
|
-
dir,
|
|
38
37
|
branch,
|
|
38
|
+
dir,
|
|
39
39
|
messageHeadline,
|
|
40
|
+
ignore,
|
|
40
41
|
messageBody,
|
|
41
42
|
updateLocal = false
|
|
42
43
|
}) => {
|
|
43
|
-
const changedFiles = await Git.getChangedFiles({ dir });
|
|
44
|
+
const changedFiles = await Git.getChangedFiles({ dir, ignore });
|
|
44
45
|
if (!changedFiles.length) {
|
|
45
|
-
return
|
|
46
|
+
return;
|
|
46
47
|
}
|
|
47
48
|
const fileChanges = await readFileChanges(changedFiles);
|
|
48
49
|
const commitId = await uploadFileChanges({
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../../../src/api/github/push.ts"],
|
|
4
|
-
"sourcesContent": ["import { graphql } from '@octokit/graphql';\nimport type {\n CreateCommitOnBranchInput,\n FileAddition,\n FileDeletion,\n} from '@octokit/graphql-schema';\nimport fs from 'fs-extra';\n\nimport * as Git from '../git';\
|
|
5
|
-
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,qBAAwB;AAMxB,sBAAe;AAEf,UAAqB;
|
|
4
|
+
"sourcesContent": ["import { graphql } from '@octokit/graphql';\nimport type {\n CreateCommitOnBranchInput,\n FileAddition,\n FileDeletion,\n} from '@octokit/graphql-schema';\nimport fs from 'fs-extra';\n\nimport * as Git from '../git';\n\nimport { apiTokenFromEnvironment } from './environment';\n\ninterface CreateCommitResult {\n createCommitOnBranch: {\n commit: {\n oid: string;\n };\n };\n}\n\ninterface UploadAllFileChangesParams {\n dir: string;\n /**\n * The branch name\n */\n branch: string;\n /**\n * The headline of the commit message\n */\n messageHeadline: string;\n\n /**\n * File changes to exclude from the upload.\n *\n * Defaults to `[]` (no exclusions).\n */\n ignore?: Git.ChangedFile[];\n /**\n * The body of the commit message\n */\n messageBody?: string;\n /**\n * Updates the local Git repository to match the new remote branch state\n */\n updateLocal?: boolean;\n}\n\n/**\n * Retrieves all file changes from the local Git repository using\n * `getChangedFiles`, then uploads the changes to a specified GitHub branch\n * using `uploadFileChanges`.\n *\n * Returns the commit ID, or `undefined` if there are no changes to commit.\n *\n * The file changes will appear as verified commits on GitHub.\n *\n * This will not update the local Git repository unless `updateLocal` is\n * specified.\n */\nexport const uploadAllFileChanges = async ({\n branch,\n dir,\n messageHeadline,\n\n ignore,\n messageBody,\n updateLocal = false,\n}: UploadAllFileChangesParams): Promise<string | undefined> => {\n const changedFiles = await Git.getChangedFiles({ dir, ignore });\n\n if (!changedFiles.length) {\n return;\n }\n\n const fileChanges = await readFileChanges(changedFiles);\n\n const commitId = await uploadFileChanges({\n dir,\n branch,\n messageHeadline,\n messageBody,\n fileChanges,\n });\n\n if (updateLocal) {\n await Promise.all(\n [...fileChanges.additions, ...fileChanges.deletions].map((file) =>\n fs.rm(file.path),\n ),\n );\n\n await Git.fastForwardBranch({\n ref: branch,\n auth: { type: 'gitHubApp' },\n dir,\n });\n }\n\n return commitId;\n};\n\nexport interface FileChanges {\n additions: FileAddition[];\n deletions: FileDeletion[];\n}\n\n/**\n * Takes a list of `ChangedFiles`, reads them from the file system, and maps\n * them to GitHub GraphQL `FileChanges`.\n *\n * https://docs.github.com/en/graphql/reference/input-objects#filechanges\n */\nexport const readFileChanges = async (\n changedFiles: Git.ChangedFile[],\n): Promise<FileChanges> => {\n const { added, deleted } = changedFiles.reduce<{\n added: string[];\n deleted: string[];\n }>(\n (files, changedFile) => {\n const filePath = changedFile.path;\n if (changedFile.state === 'deleted') {\n files.deleted.push(filePath);\n } else {\n files.added.push(filePath);\n }\n\n return files;\n },\n { added: [], deleted: [] },\n );\n\n const additions: FileAddition[] = await Promise.all(\n added.map(async (filePath) => ({\n path: filePath,\n contents: await fs.promises.readFile(filePath, {\n encoding: 'base64',\n }),\n })),\n );\n\n const deletions: FileDeletion[] = deleted.map((filePath) => ({\n path: filePath,\n }));\n\n return {\n additions,\n deletions,\n };\n};\n\ninterface UploadFileChangesParams {\n dir: string;\n /**\n * The branch name\n */\n branch: string;\n /**\n * The headline of the commit message\n */\n messageHeadline: string;\n /**\n * The body of the commit message\n */\n messageBody?: string;\n /**\n * File additions and deletions\n */\n fileChanges: FileChanges;\n}\n\n/**\n * Uploads file changes from the local workspace to a specified GitHub branch.\n *\n * The file changes will appear as verified commits on GitHub.\n *\n * This will not update the local Git repository.\n */\nexport const uploadFileChanges = async ({\n dir,\n branch,\n messageHeadline,\n messageBody,\n fileChanges,\n}: UploadFileChangesParams): Promise<string> => {\n const authToken = apiTokenFromEnvironment();\n if (!authToken) {\n throw new Error(\n 'Could not read a GitHub API token from the environment. Please set GITHUB_API_TOKEN or GITHUB_TOKEN.',\n );\n }\n\n const [{ owner, repo }, headCommitId] = await Promise.all([\n Git.getOwnerAndRepo({ dir }),\n Git.getHeadCommitId({ dir }),\n ]);\n\n const input: CreateCommitOnBranchInput = {\n branch: {\n repositoryNameWithOwner: `${owner}/${repo}`,\n branchName: branch,\n },\n message: {\n headline: messageHeadline,\n body: messageBody,\n },\n expectedHeadOid: headCommitId,\n clientMutationId: 'skuba',\n fileChanges,\n };\n\n const result = await graphql<CreateCommitResult>(\n `\n mutation Mutation($input: CreateCommitOnBranchInput!) {\n createCommitOnBranch(input: $input) {\n commit {\n oid\n }\n }\n }\n `,\n {\n input,\n headers: {\n authorization: `Bearer ${authToken}`,\n },\n },\n );\n\n return result.createCommitOnBranch.commit.oid;\n};\n"],
|
|
5
|
+
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,qBAAwB;AAMxB,sBAAe;AAEf,UAAqB;AAErB,yBAAwC;AAiDjC,MAAM,uBAAuB,OAAO;AAAA,EACzC;AAAA,EACA;AAAA,EACA;AAAA,EAEA;AAAA,EACA;AAAA,EACA,cAAc;AAChB,MAA+D;AAC7D,QAAM,eAAe,MAAM,IAAI,gBAAgB,EAAE,KAAK,OAAO,CAAC;AAE9D,MAAI,CAAC,aAAa,QAAQ;AACxB;AAAA,EACF;AAEA,QAAM,cAAc,MAAM,gBAAgB,YAAY;AAEtD,QAAM,WAAW,MAAM,kBAAkB;AAAA,IACvC;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,CAAC;AAED,MAAI,aAAa;AACf,UAAM,QAAQ;AAAA,MACZ,CAAC,GAAG,YAAY,WAAW,GAAG,YAAY,SAAS,EAAE;AAAA,QAAI,CAAC,SACxD,gBAAAA,QAAG,GAAG,KAAK,IAAI;AAAA,MACjB;AAAA,IACF;AAEA,UAAM,IAAI,kBAAkB;AAAA,MAC1B,KAAK;AAAA,MACL,MAAM,EAAE,MAAM,YAAY;AAAA,MAC1B;AAAA,IACF,CAAC;AAAA,EACH;AAEA,SAAO;AACT;AAaO,MAAM,kBAAkB,OAC7B,iBACyB;AACzB,QAAM,EAAE,OAAO,QAAQ,IAAI,aAAa;AAAA,IAItC,CAAC,OAAO,gBAAgB;AACtB,YAAM,WAAW,YAAY;AAC7B,UAAI,YAAY,UAAU,WAAW;AACnC,cAAM,QAAQ,KAAK,QAAQ;AAAA,MAC7B,OAAO;AACL,cAAM,MAAM,KAAK,QAAQ;AAAA,MAC3B;AAEA,aAAO;AAAA,IACT;AAAA,IACA,EAAE,OAAO,CAAC,GAAG,SAAS,CAAC,EAAE;AAAA,EAC3B;AAEA,QAAM,YAA4B,MAAM,QAAQ;AAAA,IAC9C,MAAM,IAAI,OAAO,cAAc;AAAA,MAC7B,MAAM;AAAA,MACN,UAAU,MAAM,gBAAAA,QAAG,SAAS,SAAS,UAAU;AAAA,QAC7C,UAAU;AAAA,MACZ,CAAC;AAAA,IACH,EAAE;AAAA,EACJ;AAEA,QAAM,YAA4B,QAAQ,IAAI,CAAC,cAAc;AAAA,IAC3D,MAAM;AAAA,EACR,EAAE;AAEF,SAAO;AAAA,IACL;AAAA,IACA;AAAA,EACF;AACF;AA6BO,MAAM,oBAAoB,OAAO;AAAA,EACtC;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF,MAAgD;AAC9C,QAAM,gBAAY,4CAAwB;AAC1C,MAAI,CAAC,WAAW;AACd,UAAM,IAAI;AAAA,MACR;AAAA,IACF;AAAA,EACF;AAEA,QAAM,CAAC,EAAE,OAAO,KAAK,GAAG,YAAY,IAAI,MAAM,QAAQ,IAAI;AAAA,IACxD,IAAI,gBAAgB,EAAE,IAAI,CAAC;AAAA,IAC3B,IAAI,gBAAgB,EAAE,IAAI,CAAC;AAAA,EAC7B,CAAC;AAED,QAAM,QAAmC;AAAA,IACvC,QAAQ;AAAA,MACN,yBAAyB,GAAG,SAAS;AAAA,MACrC,YAAY;AAAA,IACd;AAAA,IACA,SAAS;AAAA,MACP,UAAU;AAAA,MACV,MAAM;AAAA,IACR;AAAA,IACA,iBAAiB;AAAA,IACjB,kBAAkB;AAAA,IAClB;AAAA,EACF;AAEA,QAAM,SAAS,UAAM;AAAA,IACnB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,IASA;AAAA,MACE;AAAA,MACA,SAAS;AAAA,QACP,eAAe,UAAU;AAAA,MAC3B;AAAA,IACF;AAAA,EACF;AAEA,SAAO,OAAO,qBAAqB,OAAO;AAC5C;",
|
|
6
6
|
"names": ["fs"]
|
|
7
7
|
}
|
package/lib/api/jest/index.d.ts
CHANGED
|
@@ -5,13 +5,13 @@ import type { Config } from '@jest/types';
|
|
|
5
5
|
* While we technically accept anything compatible with `Config.InitialOptions`,
|
|
6
6
|
* these are tacitly endorsed for our use cases and receive IntelliSense.
|
|
7
7
|
*/
|
|
8
|
-
|
|
8
|
+
type DefaultOptions = 'collectCoverage' | 'collectCoverageFrom' | 'coveragePathIgnorePatterns' | 'coverageThreshold' | 'displayName' | 'globals' | 'globalSetup' | 'globalTeardown' | 'projects' | 'setupFiles' | 'setupFilesAfterEnv' | 'snapshotSerializers' | 'testEnvironment' | 'testPathIgnorePatterns' | 'testTimeout' | 'watchPathIgnorePatterns';
|
|
9
9
|
/**
|
|
10
10
|
* Merge additional Jest options into the **skuba** preset.
|
|
11
11
|
*
|
|
12
12
|
* This concatenates array options like `testPathIgnorePatterns`.
|
|
13
13
|
*/
|
|
14
|
-
export declare const mergePreset: <AdditionalOptions extends "filter" | "json" | "silent" | "cache" | "runtime" | "runner" | "projects" | "id" | "automock" | "cacheDirectory" | "clearMocks" | "coveragePathIgnorePatterns" | "dependencyExtractor" | "detectLeaks" | "detectOpenHandles" | "displayName" | "errorOnDeprecated" | "extensionsToTreatAsEsm" | "fakeTimers" | "forceCoverageMatch" | "globalSetup" | "globalTeardown" | "globals" | "haste" | "injectGlobals" | "moduleDirectories" | "moduleFileExtensions" | "moduleNameMapper" | "modulePathIgnorePatterns" | "modulePaths" | "prettierPath" | "resetMocks" | "resetModules" | "resolver" | "restoreMocks" | "rootDir" | "roots" | "sandboxInjectedGlobals" | "setupFiles" | "setupFilesAfterEnv" | "skipFilter" | "skipNodeResolution" | "slowTestThreshold" | "snapshotResolver" | "snapshotSerializers" | "snapshotFormat" | "testEnvironment" | "testEnvironmentOptions" | "testMatch" | "testLocationInResults" | "testPathIgnorePatterns" | "testRegex" | "testRunner" | "transform" | "transformIgnorePatterns" | "watchPathIgnorePatterns" | "unmockedModulePathPatterns" | "workerIdleMemoryLimit" | "bail" | "ci" | "changedFilesWithAncestor" | "changedSince" | "collectCoverage" | "collectCoverageFrom" | "coverageDirectory" | "coverageProvider" | "coverageReporters" | "coverageThreshold" | "expand" | "findRelatedTests" | "forceExit" | "reporters" | "logHeapUsage" | "lastCommit" | "listTests" | "maxConcurrency" | "maxWorkers" | "noStackTrace" | "notify" | "notifyMode" | "onlyChanged" | "onlyFailures" | "outputFile" | "passWithNoTests" | "
|
|
14
|
+
export declare const mergePreset: <AdditionalOptions extends "filter" | "json" | "silent" | "cache" | "runtime" | "runner" | "projects" | "id" | "automock" | "cacheDirectory" | "clearMocks" | "coveragePathIgnorePatterns" | "dependencyExtractor" | "detectLeaks" | "detectOpenHandles" | "displayName" | "errorOnDeprecated" | "extensionsToTreatAsEsm" | "fakeTimers" | "forceCoverageMatch" | "globalSetup" | "globalTeardown" | "globals" | "haste" | "injectGlobals" | "moduleDirectories" | "moduleFileExtensions" | "moduleNameMapper" | "modulePathIgnorePatterns" | "modulePaths" | "preset" | "prettierPath" | "resetMocks" | "resetModules" | "resolver" | "restoreMocks" | "rootDir" | "roots" | "sandboxInjectedGlobals" | "setupFiles" | "setupFilesAfterEnv" | "skipFilter" | "skipNodeResolution" | "slowTestThreshold" | "snapshotResolver" | "snapshotSerializers" | "snapshotFormat" | "testEnvironment" | "testEnvironmentOptions" | "testMatch" | "testLocationInResults" | "testPathIgnorePatterns" | "testRegex" | "testRunner" | "transform" | "transformIgnorePatterns" | "watchPathIgnorePatterns" | "unmockedModulePathPatterns" | "workerIdleMemoryLimit" | "bail" | "ci" | "changedFilesWithAncestor" | "changedSince" | "collectCoverage" | "collectCoverageFrom" | "coverageDirectory" | "coverageProvider" | "coverageReporters" | "coverageThreshold" | "expand" | "findRelatedTests" | "forceExit" | "reporters" | "logHeapUsage" | "lastCommit" | "listTests" | "maxConcurrency" | "maxWorkers" | "noStackTrace" | "notify" | "notifyMode" | "onlyChanged" | "onlyFailures" | "outputFile" | "passWithNoTests" | "replname" | "runTestsByPath" | "showSeed" | "testFailureExitCode" | "testNamePattern" | "testResultsProcessor" | "testSequencer" | "testTimeout" | "updateSnapshot" | "useStderr" | "verbose" | "watch" | "watchAll" | "watchman" | "watchPlugins">(options: Pick<Partial<{
|
|
15
15
|
automock: boolean;
|
|
16
16
|
bail: number | boolean;
|
|
17
17
|
cache: boolean;
|
|
@@ -44,11 +44,25 @@ const inferParser = (filepath) => {
|
|
|
44
44
|
);
|
|
45
45
|
return firstLanguage?.parsers[0];
|
|
46
46
|
};
|
|
47
|
+
const isPackageJsonOk = ({
|
|
48
|
+
data,
|
|
49
|
+
filepath
|
|
50
|
+
}) => {
|
|
51
|
+
if (import_path.default.basename(filepath) !== "package.json") {
|
|
52
|
+
return true;
|
|
53
|
+
}
|
|
54
|
+
try {
|
|
55
|
+
const packageJson = (0, import_package.parsePackage)(data);
|
|
56
|
+
return !packageJson || (0, import_package.formatPackage)(packageJson) === data;
|
|
57
|
+
} catch {
|
|
58
|
+
}
|
|
59
|
+
return true;
|
|
60
|
+
};
|
|
47
61
|
const formatOrLintFile = ({ data, filepath, options }, mode, result) => {
|
|
48
62
|
if (mode === "lint") {
|
|
49
63
|
let ok;
|
|
50
64
|
try {
|
|
51
|
-
ok = (0, import_prettier.check)(data, options);
|
|
65
|
+
ok = (0, import_prettier.check)(data, options) && isPackageJsonOk({ data, filepath });
|
|
52
66
|
} catch (err) {
|
|
53
67
|
result.errored.push({ err, filepath });
|
|
54
68
|
return;
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../../../src/cli/adapter/prettier.ts"],
|
|
4
|
-
"sourcesContent": ["import path from 'path';\n\nimport fs from 'fs-extra';\nimport type { Options, SupportLanguage } from 'prettier';\nimport { check, format, getSupportInfo, resolveConfig } from 'prettier';\n\nimport { crawlDirectory } from '../../utils/dir';\nimport type { Logger } from '../../utils/logging';\nimport { pluralise } from '../../utils/logging';\nimport { getConsumerManifest } from '../../utils/manifest';\nimport { formatPackage, parsePackage } from '../configure/processing/package';\n\nlet languages: SupportLanguage[] | undefined;\n\n/**\n * Infers a parser for the specified filepath.\n *\n * This is a cut-down version of Prettier's built-in function of the same name;\n * ours operates purely on the `filepath` string and does not perform file I/O.\n * Prettier's internal `getInterpreter` function can open a file to read the\n * shebang, and its file descriptor usage can throw warnings on worker threads:\n *\n * ```console\n * Warning: File descriptor 123 closed but not opened in unmanaged mode\n * at Object.closeSync (node:fs:530:11)\n * at Object.closeSync (node_modules/graceful-fs/graceful-fs.js:74:20)\n * ...\n * ```\n *\n * References:\n *\n * - https://github.com/prettier/prettier/blob/2.4.1/src/main/options.js#L167\n * - seek-oss/skuba#659\n */\nexport const inferParser = (filepath: string): string | undefined => {\n const filename = path.basename(filepath).toLowerCase();\n\n languages ??= getSupportInfo().languages.filter((language) => language.since);\n\n const firstLanguage = languages.find(\n (language) =>\n language.extensions?.some((extension) => filename.endsWith(extension)) ||\n language.filenames?.some((name) => name.toLowerCase() === filename),\n );\n\n return firstLanguage?.parsers[0];\n};\n\ninterface File {\n data: string;\n options: Options;\n filepath: string;\n}\n\ninterface Result {\n count: number;\n errored: Array<{ err?: unknown; filepath: string }>;\n touched: string[];\n unparsed: string[];\n}\n\nconst formatOrLintFile = (\n { data, filepath, options }: File,\n mode: 'format' | 'lint',\n result: Result,\n): string | undefined => {\n if (mode === 'lint') {\n let ok: boolean;\n try {\n ok = check(data, options);\n } catch (err) {\n result.errored.push({ err, filepath });\n return;\n }\n\n if (!ok) {\n result.errored.push({ filepath });\n }\n\n return;\n }\n\n let formatted: string;\n try {\n formatted = format(data, options);\n } catch (err) {\n result.errored.push({ err, filepath });\n return;\n }\n\n // Perform additional formatting (i.e. sorting) on a `package.json` manifest.\n try {\n if (path.basename(filepath) === 'package.json') {\n const packageJson = parsePackage(formatted);\n if (packageJson) {\n formatted = formatPackage(packageJson);\n }\n }\n } catch {\n //
|
|
5
|
-
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAiB;AAEjB,sBAAe;AAEf,sBAA6D;AAE7D,iBAA+B;AAE/B,qBAA0B;AAC1B,sBAAoC;AACpC,qBAA4C;AAE5C,IAAI;AAsBG,MAAM,cAAc,CAAC,aAAyC;AACnE,QAAM,WAAW,YAAAA,QAAK,SAAS,QAAQ,EAAE,YAAY;AAErD,gCAAc,gCAAe,EAAE,UAAU,OAAO,CAAC,aAAa,SAAS,KAAK;AAE5E,QAAM,gBAAgB,UAAU;AAAA,IAC9B,CAAC,aACC,SAAS,YAAY,KAAK,CAAC,cAAc,SAAS,SAAS,SAAS,CAAC,KACrE,SAAS,WAAW,KAAK,CAAC,SAAS,KAAK,YAAY,MAAM,QAAQ;AAAA,EACtE;AAEA,SAAO,eAAe,QAAQ;AAChC;AAeA,MAAM,mBAAmB,CACvB,EAAE,MAAM,UAAU,QAAQ,GAC1B,MACA,WACuB;AACvB,MAAI,SAAS,QAAQ;AACnB,QAAI;AACJ,QAAI;AACF,eAAK,uBAAM,MAAM,OAAO;AAAA,
|
|
4
|
+
"sourcesContent": ["import path from 'path';\n\nimport fs from 'fs-extra';\nimport type { Options, SupportLanguage } from 'prettier';\nimport { check, format, getSupportInfo, resolveConfig } from 'prettier';\n\nimport { crawlDirectory } from '../../utils/dir';\nimport type { Logger } from '../../utils/logging';\nimport { pluralise } from '../../utils/logging';\nimport { getConsumerManifest } from '../../utils/manifest';\nimport { formatPackage, parsePackage } from '../configure/processing/package';\n\nlet languages: SupportLanguage[] | undefined;\n\n/**\n * Infers a parser for the specified filepath.\n *\n * This is a cut-down version of Prettier's built-in function of the same name;\n * ours operates purely on the `filepath` string and does not perform file I/O.\n * Prettier's internal `getInterpreter` function can open a file to read the\n * shebang, and its file descriptor usage can throw warnings on worker threads:\n *\n * ```console\n * Warning: File descriptor 123 closed but not opened in unmanaged mode\n * at Object.closeSync (node:fs:530:11)\n * at Object.closeSync (node_modules/graceful-fs/graceful-fs.js:74:20)\n * ...\n * ```\n *\n * References:\n *\n * - https://github.com/prettier/prettier/blob/2.4.1/src/main/options.js#L167\n * - seek-oss/skuba#659\n */\nexport const inferParser = (filepath: string): string | undefined => {\n const filename = path.basename(filepath).toLowerCase();\n\n languages ??= getSupportInfo().languages.filter((language) => language.since);\n\n const firstLanguage = languages.find(\n (language) =>\n language.extensions?.some((extension) => filename.endsWith(extension)) ||\n language.filenames?.some((name) => name.toLowerCase() === filename),\n );\n\n return firstLanguage?.parsers[0];\n};\n\nconst isPackageJsonOk = ({\n data,\n filepath,\n}: {\n data: string;\n filepath: string;\n}): boolean => {\n if (path.basename(filepath) !== 'package.json') {\n return true;\n }\n\n try {\n const packageJson = parsePackage(data);\n\n return !packageJson || formatPackage(packageJson) === data;\n } catch {\n // Be more lenient about our custom formatting and don't throw if it errors.\n }\n\n return true;\n};\n\ninterface File {\n data: string;\n options: Options;\n filepath: string;\n}\n\ninterface Result {\n count: number;\n errored: Array<{ err?: unknown; filepath: string }>;\n touched: string[];\n unparsed: string[];\n}\n\nconst formatOrLintFile = (\n { data, filepath, options }: File,\n mode: 'format' | 'lint',\n result: Result,\n): string | undefined => {\n if (mode === 'lint') {\n let ok: boolean;\n try {\n ok = check(data, options) && isPackageJsonOk({ data, filepath });\n } catch (err) {\n result.errored.push({ err, filepath });\n return;\n }\n\n if (!ok) {\n result.errored.push({ filepath });\n }\n\n return;\n }\n\n let formatted: string;\n try {\n formatted = format(data, options);\n } catch (err) {\n result.errored.push({ err, filepath });\n return;\n }\n\n // Perform additional formatting (i.e. sorting) on a `package.json` manifest.\n try {\n if (path.basename(filepath) === 'package.json') {\n const packageJson = parsePackage(formatted);\n if (packageJson) {\n formatted = formatPackage(packageJson);\n }\n }\n } catch {\n // Be more lenient about our custom formatting and don't throw if it errors.\n }\n\n if (formatted === data) {\n return;\n }\n\n result.touched.push(filepath);\n return formatted;\n};\n\nexport interface PrettierOutput {\n ok: boolean;\n result: Result;\n}\n\n/**\n * Formats/lints files with Prettier.\n *\n * Prettier doesn't provide a higher-level Node.js API that replicates the\n * behaviour of its CLI, so we have to plumb together its lower-level functions.\n * On the other hand, this affords more flexibility in how we track and report\n * on progress and results.\n */\nexport const runPrettier = async (\n mode: 'format' | 'lint',\n logger: Logger,\n): Promise<PrettierOutput> => {\n logger.debug('Initialising Prettier...');\n\n const start = process.hrtime.bigint();\n\n let directory = process.cwd();\n\n const manifest = await getConsumerManifest();\n if (manifest) {\n directory = path.dirname(manifest.path);\n }\n\n logger.debug(\n manifest ? 'Detected project root:' : 'Detected working directory:',\n directory,\n );\n\n logger.debug('Discovering files...');\n\n // Match Prettier's opinion of not respecting `.gitignore`.\n // This avoids exhibiting different behaviour than a Prettier IDE integration,\n // and the headache of conflicting `.gitignore` and `.prettierignore` rules.\n const filepaths = await crawlDirectory(directory, '.prettierignore');\n\n logger.debug(`Discovered ${pluralise(filepaths.length, 'file')}.`);\n\n const result: Result = {\n count: filepaths.length,\n errored: [],\n touched: [],\n unparsed: [],\n };\n\n logger.debug(mode === 'format' ? 'Formatting' : 'Linting', 'files...');\n\n for (const filepath of filepaths) {\n // Infer parser upfront so we can skip unsupported files.\n const parser = inferParser(filepath);\n\n logger.debug(filepath);\n logger.debug(' parser:', parser ?? '-');\n\n if (!parser) {\n result.unparsed.push(filepath);\n continue;\n }\n\n const [config, data] = await Promise.all([\n resolveConfig(filepath),\n fs.promises.readFile(filepath, 'utf-8'),\n ]);\n\n const file: File = {\n data,\n filepath,\n options: { ...config, filepath },\n };\n\n const formatted = formatOrLintFile(file, mode, result);\n\n if (typeof formatted === 'string') {\n await fs.promises.writeFile(filepath, formatted);\n }\n }\n\n const end = process.hrtime.bigint();\n\n logger.plain(\n `Processed ${pluralise(\n result.count - result.unparsed.length,\n 'file',\n )} in ${logger.timing(start, end)}.`,\n );\n\n if (result.touched.length) {\n logger.plain(`Formatted ${pluralise(result.touched.length, 'file')}:`);\n for (const filepath of result.touched) {\n logger.warn(filepath);\n }\n }\n\n if (result.errored.length) {\n logger.plain(`Flagged ${pluralise(result.errored.length, 'file')}:`);\n for (const { err, filepath } of result.errored) {\n logger.warn(filepath, ...(err ? [String(err)] : []));\n }\n }\n\n return { ok: result.errored.length === 0, result };\n};\n"],
|
|
5
|
+
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAiB;AAEjB,sBAAe;AAEf,sBAA6D;AAE7D,iBAA+B;AAE/B,qBAA0B;AAC1B,sBAAoC;AACpC,qBAA4C;AAE5C,IAAI;AAsBG,MAAM,cAAc,CAAC,aAAyC;AACnE,QAAM,WAAW,YAAAA,QAAK,SAAS,QAAQ,EAAE,YAAY;AAErD,gCAAc,gCAAe,EAAE,UAAU,OAAO,CAAC,aAAa,SAAS,KAAK;AAE5E,QAAM,gBAAgB,UAAU;AAAA,IAC9B,CAAC,aACC,SAAS,YAAY,KAAK,CAAC,cAAc,SAAS,SAAS,SAAS,CAAC,KACrE,SAAS,WAAW,KAAK,CAAC,SAAS,KAAK,YAAY,MAAM,QAAQ;AAAA,EACtE;AAEA,SAAO,eAAe,QAAQ;AAChC;AAEA,MAAM,kBAAkB,CAAC;AAAA,EACvB;AAAA,EACA;AACF,MAGe;AACb,MAAI,YAAAA,QAAK,SAAS,QAAQ,MAAM,gBAAgB;AAC9C,WAAO;AAAA,EACT;AAEA,MAAI;AACF,UAAM,kBAAc,6BAAa,IAAI;AAErC,WAAO,CAAC,mBAAe,8BAAc,WAAW,MAAM;AAAA,EACxD,QAAE;AAAA,EAEF;AAEA,SAAO;AACT;AAeA,MAAM,mBAAmB,CACvB,EAAE,MAAM,UAAU,QAAQ,GAC1B,MACA,WACuB;AACvB,MAAI,SAAS,QAAQ;AACnB,QAAI;AACJ,QAAI;AACF,eAAK,uBAAM,MAAM,OAAO,KAAK,gBAAgB,EAAE,MAAM,SAAS,CAAC;AAAA,IACjE,SAAS,KAAP;AACA,aAAO,QAAQ,KAAK,EAAE,KAAK,SAAS,CAAC;AACrC;AAAA,IACF;AAEA,QAAI,CAAC,IAAI;AACP,aAAO,QAAQ,KAAK,EAAE,SAAS,CAAC;AAAA,IAClC;AAEA;AAAA,EACF;AAEA,MAAI;AACJ,MAAI;AACF,oBAAY,wBAAO,MAAM,OAAO;AAAA,EAClC,SAAS,KAAP;AACA,WAAO,QAAQ,KAAK,EAAE,KAAK,SAAS,CAAC;AACrC;AAAA,EACF;AAGA,MAAI;AACF,QAAI,YAAAA,QAAK,SAAS,QAAQ,MAAM,gBAAgB;AAC9C,YAAM,kBAAc,6BAAa,SAAS;AAC1C,UAAI,aAAa;AACf,wBAAY,8BAAc,WAAW;AAAA,MACvC;AAAA,IACF;AAAA,EACF,QAAE;AAAA,EAEF;AAEA,MAAI,cAAc,MAAM;AACtB;AAAA,EACF;AAEA,SAAO,QAAQ,KAAK,QAAQ;AAC5B,SAAO;AACT;AAeO,MAAM,cAAc,OACzB,MACA,WAC4B;AAC5B,SAAO,MAAM,0BAA0B;AAEvC,QAAM,QAAQ,QAAQ,OAAO,OAAO;AAEpC,MAAI,YAAY,QAAQ,IAAI;AAE5B,QAAM,WAAW,UAAM,qCAAoB;AAC3C,MAAI,UAAU;AACZ,gBAAY,YAAAA,QAAK,QAAQ,SAAS,IAAI;AAAA,EACxC;AAEA,SAAO;AAAA,IACL,WAAW,2BAA2B;AAAA,IACtC;AAAA,EACF;AAEA,SAAO,MAAM,sBAAsB;AAKnC,QAAM,YAAY,UAAM,2BAAe,WAAW,iBAAiB;AAEnE,SAAO,MAAM,kBAAc,0BAAU,UAAU,QAAQ,MAAM,IAAI;AAEjE,QAAM,SAAiB;AAAA,IACrB,OAAO,UAAU;AAAA,IACjB,SAAS,CAAC;AAAA,IACV,SAAS,CAAC;AAAA,IACV,UAAU,CAAC;AAAA,EACb;AAEA,SAAO,MAAM,SAAS,WAAW,eAAe,WAAW,UAAU;AAErE,aAAW,YAAY,WAAW;AAEhC,UAAM,SAAS,YAAY,QAAQ;AAEnC,WAAO,MAAM,QAAQ;AACrB,WAAO,MAAM,aAAa,UAAU,GAAG;AAEvC,QAAI,CAAC,QAAQ;AACX,aAAO,SAAS,KAAK,QAAQ;AAC7B;AAAA,IACF;AAEA,UAAM,CAAC,QAAQ,IAAI,IAAI,MAAM,QAAQ,IAAI;AAAA,UACvC,+BAAc,QAAQ;AAAA,MACtB,gBAAAC,QAAG,SAAS,SAAS,UAAU,OAAO;AAAA,IACxC,CAAC;AAED,UAAM,OAAa;AAAA,MACjB;AAAA,MACA;AAAA,MACA,SAAS,EAAE,GAAG,QAAQ,SAAS;AAAA,IACjC;AAEA,UAAM,YAAY,iBAAiB,MAAM,MAAM,MAAM;AAErD,QAAI,OAAO,cAAc,UAAU;AACjC,YAAM,gBAAAA,QAAG,SAAS,UAAU,UAAU,SAAS;AAAA,IACjD;AAAA,EACF;AAEA,QAAM,MAAM,QAAQ,OAAO,OAAO;AAElC,SAAO;AAAA,IACL,iBAAa;AAAA,MACX,OAAO,QAAQ,OAAO,SAAS;AAAA,MAC/B;AAAA,IACF,QAAQ,OAAO,OAAO,OAAO,GAAG;AAAA,EAClC;AAEA,MAAI,OAAO,QAAQ,QAAQ;AACzB,WAAO,MAAM,iBAAa,0BAAU,OAAO,QAAQ,QAAQ,MAAM,IAAI;AACrE,eAAW,YAAY,OAAO,SAAS;AACrC,aAAO,KAAK,QAAQ;AAAA,IACtB;AAAA,EACF;AAEA,MAAI,OAAO,QAAQ,QAAQ;AACzB,WAAO,MAAM,eAAW,0BAAU,OAAO,QAAQ,QAAQ,MAAM,IAAI;AACnE,eAAW,EAAE,KAAK,SAAS,KAAK,OAAO,SAAS;AAC9C,aAAO,KAAK,UAAU,GAAI,MAAM,CAAC,OAAO,GAAG,CAAC,IAAI,CAAC,CAAE;AAAA,IACrD;AAAA,EACF;AAEA,SAAO,EAAE,IAAI,OAAO,QAAQ,WAAW,GAAG,OAAO;AACnD;",
|
|
6
6
|
"names": ["path", "fs"]
|
|
7
7
|
}
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
import ts from 'typescript';
|
|
2
|
-
|
|
3
|
-
|
|
2
|
+
type Props = ts.NodeArray<ts.ObjectLiteralElementLike>;
|
|
3
|
+
type Transformer<T> = (context: ts.TransformationContext | null, props: T) => T;
|
|
4
4
|
/**
|
|
5
5
|
* Create a transformer to filter out unspecified props from an object literal.
|
|
6
6
|
*/
|
|
@@ -1,23 +1,23 @@
|
|
|
1
1
|
import type { PackageJson as TypeFestPackageJson } from 'type-fest';
|
|
2
2
|
import type { ProjectType } from '../../utils/manifest';
|
|
3
3
|
export type { TsConfigJson } from 'type-fest';
|
|
4
|
-
export
|
|
4
|
+
export type PackageJson = TypeFestPackageJson & Record<string, unknown>;
|
|
5
5
|
export interface DependencySet {
|
|
6
6
|
dependencies: Record<string, string>;
|
|
7
7
|
devDependencies: Record<string, string>;
|
|
8
8
|
type: ProjectType;
|
|
9
9
|
}
|
|
10
|
-
export
|
|
10
|
+
export type DependencyDiff = Record<string, {
|
|
11
11
|
operation: string;
|
|
12
12
|
version: string;
|
|
13
13
|
}>;
|
|
14
|
-
|
|
15
|
-
export
|
|
14
|
+
type FileProcessor = (file: string | undefined, files: Files, initialFiles: Readonly<Files>) => string | undefined;
|
|
15
|
+
export type FileDiff = Record<string, {
|
|
16
16
|
data: string | undefined;
|
|
17
17
|
operation: string;
|
|
18
18
|
}>;
|
|
19
|
-
export
|
|
20
|
-
export
|
|
19
|
+
export type Files = Record<string, string | undefined>;
|
|
20
|
+
export type Module = Record<string, FileProcessor>;
|
|
21
21
|
export interface Options {
|
|
22
22
|
destinationRoot: string;
|
|
23
23
|
entryPoint: string;
|
|
@@ -128,21 +128,34 @@ const getTemplateConfig = (dir) => {
|
|
|
128
128
|
throw err;
|
|
129
129
|
}
|
|
130
130
|
};
|
|
131
|
-
const baseToTemplateData = async ({
|
|
131
|
+
const baseToTemplateData = async ({
|
|
132
|
+
ownerName,
|
|
133
|
+
platformName,
|
|
134
|
+
repoName
|
|
135
|
+
}) => {
|
|
132
136
|
const [orgName, teamName] = ownerName.split("/");
|
|
133
137
|
const port = String(await (0, import_port.getRandomPort)());
|
|
134
138
|
return {
|
|
135
139
|
orgName,
|
|
136
140
|
ownerName,
|
|
137
|
-
port,
|
|
138
141
|
repoName,
|
|
139
|
-
teamName: teamName ?? orgName
|
|
142
|
+
teamName: teamName ?? orgName,
|
|
143
|
+
port,
|
|
144
|
+
platformName,
|
|
145
|
+
lambdaCdkArchitecture: platformName === "amd64" ? "X86_64" : "ARM_64",
|
|
146
|
+
lambdaServerlessArchitecture: platformName === "amd64" ? "x86_64" : platformName
|
|
140
147
|
};
|
|
141
148
|
};
|
|
142
149
|
const configureFromPrompt = async () => {
|
|
143
|
-
const { ownerName, repoName } = await runForm(
|
|
150
|
+
const { ownerName, platformName, repoName } = await runForm(
|
|
151
|
+
import_prompts.BASE_PROMPT_PROPS
|
|
152
|
+
);
|
|
144
153
|
import_logging.log.plain(import_chalk.default.cyan(repoName), "by", import_chalk.default.cyan(ownerName));
|
|
145
|
-
const templateData = await baseToTemplateData({
|
|
154
|
+
const templateData = await baseToTemplateData({
|
|
155
|
+
ownerName,
|
|
156
|
+
platformName,
|
|
157
|
+
repoName
|
|
158
|
+
});
|
|
146
159
|
const destinationDir = repoName;
|
|
147
160
|
await createDirectory(destinationDir);
|
|
148
161
|
import_logging.log.newline();
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../../../src/cli/init/getConfig.ts"],
|
|
4
|
-
"sourcesContent": ["import path from 'path';\n\nimport chalk from 'chalk';\nimport fs from 'fs-extra';\n\nimport { copyFiles } from '../../utils/copy';\nimport { isErrorWithCode } from '../../utils/error';\nimport { log } from '../../utils/logging';\nimport { getRandomPort } from '../../utils/port';\nimport {\n TEMPLATE_CONFIG_FILENAME,\n TEMPLATE_DIR,\n TemplateConfig,\n} from '../../utils/template';\n\nimport { downloadGitHubTemplate } from './git';\nimport type { BaseFields } from './prompts';\nimport {\n BASE_PROMPT_PROPS,\n GIT_PATH_PROMPT,\n SHOULD_CONTINUE_PROMPT,\n TEMPLATE_PROMPT,\n} from './prompts';\nimport type { InitConfig } from './types';\nimport { InitConfigInput } from './types';\n\nimport { Form } from 'enquirer';\nimport type { FormChoice } from 'enquirer';\n\nexport const runForm = <T = Record<string, string>>(props: {\n choices: Readonly<FormChoice[]>;\n message: string;\n name: string;\n}) => {\n const { message, name } = props;\n\n const choices = props.choices.map((choice) => ({\n ...choice,\n validate: (value: string) => {\n if (value === '' || value === choice.initial) {\n return 'Form is not complete';\n }\n\n return choice.validate?.(value) ?? true;\n },\n }));\n\n const form = new Form<T>({\n choices,\n message,\n name,\n validate: async (values) => {\n const results = await Promise.all(\n choices.map((choice) => choice.validate(values[choice.name])),\n );\n\n return (\n results.find((result) => typeof result === 'string') ??\n results.every((result) => result === true)\n );\n },\n });\n\n return form.run();\n};\n\nconst confirmShouldContinue = async (choices: Readonly<FormChoice[]>) => {\n const fieldsList = choices.map((choice) => choice.message);\n\n log.newline();\n log.plain('This template uses the following information:');\n log.newline();\n fieldsList.forEach((message) => log.subtle(`- ${message}`));\n\n log.newline();\n const result = await SHOULD_CONTINUE_PROMPT.run();\n\n return result === 'yes';\n};\n\nconst createDirectory = async (dir: string) => {\n try {\n await fs.promises.mkdir(dir);\n } catch (err) {\n if (isErrorWithCode(err, 'EEXIST')) {\n log.err(`The directory '${dir}' already exists.`);\n process.exit(1);\n }\n\n throw err;\n }\n};\n\nconst cloneTemplate = async (templateName: string, destinationDir: string) => {\n if (templateName.startsWith('github:')) {\n const gitHubPath = templateName.slice('github:'.length);\n return downloadGitHubTemplate(gitHubPath, destinationDir);\n }\n\n const templateDir = path.join(TEMPLATE_DIR, templateName);\n\n await copyFiles({\n // assume built-in templates have no extraneous files\n include: () => true,\n sourceRoot: templateDir,\n destinationRoot: destinationDir,\n processors: [],\n // built-in templates have files like _package.json\n stripUnderscorePrefix: true,\n });\n};\n\nconst getTemplateName = async () => {\n const templateSelection = await TEMPLATE_PROMPT.run();\n\n if (templateSelection === 'github \u2192') {\n const gitHubPath = await GIT_PATH_PROMPT.run();\n return `github:${gitHubPath}`;\n }\n\n return templateSelection;\n};\n\nconst generatePlaceholders = (choices: FormChoice[]) =>\n Object.fromEntries(\n choices.map(({ name }) => [name, `<%- ${name} %>`] as const),\n );\n\nexport const getTemplateConfig = (dir: string): TemplateConfig => {\n const templateConfigPath = path.join(dir, TEMPLATE_CONFIG_FILENAME);\n\n try {\n /* eslint-disable-next-line @typescript-eslint/no-var-requires */\n const templateConfig = require(templateConfigPath) as unknown;\n\n return TemplateConfig.check(templateConfig);\n } catch (err) {\n if (isErrorWithCode(err, 'MODULE_NOT_FOUND')) {\n return {\n entryPoint: undefined,\n fields: [],\n type: undefined,\n };\n }\n\n throw err;\n }\n};\n\nconst baseToTemplateData = async ({
|
|
5
|
-
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAiB;AAEjB,mBAAkB;AAClB,sBAAe;AAEf,kBAA0B;AAC1B,mBAAgC;AAChC,qBAAoB;AACpB,kBAA8B;AAC9B,sBAIO;AAEP,iBAAuC;AAEvC,qBAKO;AAEP,mBAAgC;AAEhC,sBAAqB;AAGd,MAAM,UAAU,CAA6B,UAI9C;AACJ,QAAM,EAAE,SAAS,KAAK,IAAI;AAE1B,QAAM,UAAU,MAAM,QAAQ,IAAI,CAAC,YAAY;AAAA,IAC7C,GAAG;AAAA,IACH,UAAU,CAAC,UAAkB;AAC3B,UAAI,UAAU,MAAM,UAAU,OAAO,SAAS;AAC5C,eAAO;AAAA,MACT;AAEA,aAAO,OAAO,WAAW,KAAK,KAAK;AAAA,IACrC;AAAA,EACF,EAAE;AAEF,QAAM,OAAO,IAAI,qBAAQ;AAAA,IACvB;AAAA,IACA;AAAA,IACA;AAAA,IACA,UAAU,OAAO,WAAW;AAC1B,YAAM,UAAU,MAAM,QAAQ;AAAA,QAC5B,QAAQ,IAAI,CAAC,WAAW,OAAO,SAAS,OAAO,OAAO,KAAK,CAAC;AAAA,MAC9D;AAEA,aACE,QAAQ,KAAK,CAAC,WAAW,OAAO,WAAW,QAAQ,KACnD,QAAQ,MAAM,CAAC,WAAW,WAAW,IAAI;AAAA,IAE7C;AAAA,EACF,CAAC;AAED,SAAO,KAAK,IAAI;AAClB;AAEA,MAAM,wBAAwB,OAAO,YAAoC;AACvE,QAAM,aAAa,QAAQ,IAAI,CAAC,WAAW,OAAO,OAAO;AAEzD,qBAAI,QAAQ;AACZ,qBAAI,MAAM,+CAA+C;AACzD,qBAAI,QAAQ;AACZ,aAAW,QAAQ,CAAC,YAAY,mBAAI,OAAO,KAAK,SAAS,CAAC;AAE1D,qBAAI,QAAQ;AACZ,QAAM,SAAS,MAAM,sCAAuB,IAAI;AAEhD,SAAO,WAAW;AACpB;AAEA,MAAM,kBAAkB,OAAO,QAAgB;AAC7C,MAAI;AACF,UAAM,gBAAAA,QAAG,SAAS,MAAM,GAAG;AAAA,EAC7B,SAAS,KAAP;AACA,YAAI,8BAAgB,KAAK,QAAQ,GAAG;AAClC,yBAAI,IAAI,kBAAkB,sBAAsB;AAChD,cAAQ,KAAK,CAAC;AAAA,IAChB;AAEA,UAAM;AAAA,EACR;AACF;AAEA,MAAM,gBAAgB,OAAO,cAAsB,mBAA2B;AAC5E,MAAI,aAAa,WAAW,SAAS,GAAG;AACtC,UAAM,aAAa,aAAa,MAAM,UAAU,MAAM;AACtD,eAAO,mCAAuB,YAAY,cAAc;AAAA,EAC1D;AAEA,QAAM,cAAc,YAAAC,QAAK,KAAK,8BAAc,YAAY;AAExD,YAAM,uBAAU;AAAA,IAEd,SAAS,MAAM;AAAA,IACf,YAAY;AAAA,IACZ,iBAAiB;AAAA,IACjB,YAAY,CAAC;AAAA,IAEb,uBAAuB;AAAA,EACzB,CAAC;AACH;AAEA,MAAM,kBAAkB,YAAY;AAClC,QAAM,oBAAoB,MAAM,+BAAgB,IAAI;AAEpD,MAAI,sBAAsB,iBAAY;AACpC,UAAM,aAAa,MAAM,+BAAgB,IAAI;AAC7C,WAAO,UAAU;AAAA,EACnB;AAEA,SAAO;AACT;AAEA,MAAM,uBAAuB,CAAC,YAC5B,OAAO;AAAA,EACL,QAAQ,IAAI,CAAC,EAAE,KAAK,MAAM,CAAC,MAAM,OAAO,SAAS,CAAU;AAC7D;AAEK,MAAM,oBAAoB,CAAC,QAAgC;AAChE,QAAM,qBAAqB,YAAAA,QAAK,KAAK,KAAK,wCAAwB;AAElE,MAAI;AAEF,UAAM,iBAAiB,QAAQ
|
|
4
|
+
"sourcesContent": ["import path from 'path';\n\nimport chalk from 'chalk';\nimport fs from 'fs-extra';\n\nimport { copyFiles } from '../../utils/copy';\nimport { isErrorWithCode } from '../../utils/error';\nimport { log } from '../../utils/logging';\nimport { getRandomPort } from '../../utils/port';\nimport {\n TEMPLATE_CONFIG_FILENAME,\n TEMPLATE_DIR,\n TemplateConfig,\n} from '../../utils/template';\n\nimport { downloadGitHubTemplate } from './git';\nimport type { BaseFields } from './prompts';\nimport {\n BASE_PROMPT_PROPS,\n GIT_PATH_PROMPT,\n SHOULD_CONTINUE_PROMPT,\n TEMPLATE_PROMPT,\n} from './prompts';\nimport type { InitConfig } from './types';\nimport { InitConfigInput } from './types';\n\nimport { Form } from 'enquirer';\nimport type { FormChoice } from 'enquirer';\n\nexport const runForm = <T = Record<string, string>>(props: {\n choices: Readonly<FormChoice[]>;\n message: string;\n name: string;\n}) => {\n const { message, name } = props;\n\n const choices = props.choices.map((choice) => ({\n ...choice,\n validate: (value: string) => {\n if (value === '' || value === choice.initial) {\n return 'Form is not complete';\n }\n\n return choice.validate?.(value) ?? true;\n },\n }));\n\n const form = new Form<T>({\n choices,\n message,\n name,\n validate: async (values) => {\n const results = await Promise.all(\n choices.map((choice) => choice.validate(values[choice.name])),\n );\n\n return (\n results.find((result) => typeof result === 'string') ??\n results.every((result) => result === true)\n );\n },\n });\n\n return form.run();\n};\n\nconst confirmShouldContinue = async (choices: Readonly<FormChoice[]>) => {\n const fieldsList = choices.map((choice) => choice.message);\n\n log.newline();\n log.plain('This template uses the following information:');\n log.newline();\n fieldsList.forEach((message) => log.subtle(`- ${message}`));\n\n log.newline();\n const result = await SHOULD_CONTINUE_PROMPT.run();\n\n return result === 'yes';\n};\n\nconst createDirectory = async (dir: string) => {\n try {\n await fs.promises.mkdir(dir);\n } catch (err) {\n if (isErrorWithCode(err, 'EEXIST')) {\n log.err(`The directory '${dir}' already exists.`);\n process.exit(1);\n }\n\n throw err;\n }\n};\n\nconst cloneTemplate = async (templateName: string, destinationDir: string) => {\n if (templateName.startsWith('github:')) {\n const gitHubPath = templateName.slice('github:'.length);\n return downloadGitHubTemplate(gitHubPath, destinationDir);\n }\n\n const templateDir = path.join(TEMPLATE_DIR, templateName);\n\n await copyFiles({\n // assume built-in templates have no extraneous files\n include: () => true,\n sourceRoot: templateDir,\n destinationRoot: destinationDir,\n processors: [],\n // built-in templates have files like _package.json\n stripUnderscorePrefix: true,\n });\n};\n\nconst getTemplateName = async () => {\n const templateSelection = await TEMPLATE_PROMPT.run();\n\n if (templateSelection === 'github \u2192') {\n const gitHubPath = await GIT_PATH_PROMPT.run();\n return `github:${gitHubPath}`;\n }\n\n return templateSelection;\n};\n\nconst generatePlaceholders = (choices: FormChoice[]) =>\n Object.fromEntries(\n choices.map(({ name }) => [name, `<%- ${name} %>`] as const),\n );\n\nexport const getTemplateConfig = (dir: string): TemplateConfig => {\n const templateConfigPath = path.join(dir, TEMPLATE_CONFIG_FILENAME);\n\n try {\n /* eslint-disable-next-line @typescript-eslint/no-var-requires */\n const templateConfig = require(templateConfigPath) as unknown;\n\n return TemplateConfig.check(templateConfig);\n } catch (err) {\n if (isErrorWithCode(err, 'MODULE_NOT_FOUND')) {\n return {\n entryPoint: undefined,\n fields: [],\n type: undefined,\n };\n }\n\n throw err;\n }\n};\n\nconst baseToTemplateData = async ({\n ownerName,\n platformName,\n repoName,\n}: BaseFields) => {\n const [orgName, teamName] = ownerName.split('/');\n\n const port = String(await getRandomPort());\n\n return {\n orgName,\n ownerName,\n repoName,\n // Use standalone username in `teamName` contexts\n teamName: teamName ?? orgName,\n\n port,\n\n platformName,\n lambdaCdkArchitecture: platformName === 'amd64' ? 'X86_64' : 'ARM_64',\n lambdaServerlessArchitecture:\n platformName === 'amd64' ? 'x86_64' : platformName,\n };\n};\n\nexport const configureFromPrompt = async (): Promise<InitConfig> => {\n const { ownerName, platformName, repoName } = await runForm<BaseFields>(\n BASE_PROMPT_PROPS,\n );\n log.plain(chalk.cyan(repoName), 'by', chalk.cyan(ownerName));\n\n const templateData = await baseToTemplateData({\n ownerName,\n platformName,\n repoName,\n });\n\n const destinationDir = repoName;\n\n await createDirectory(destinationDir);\n\n log.newline();\n const templateName = await getTemplateName();\n\n await cloneTemplate(templateName, destinationDir);\n\n const { entryPoint, fields, noSkip, type } = getTemplateConfig(\n path.join(process.cwd(), destinationDir),\n );\n\n if (fields.length === 0) {\n return {\n destinationDir,\n entryPoint,\n templateComplete: true,\n templateData,\n templateName,\n type,\n };\n }\n\n const shouldContinue = noSkip ? true : await confirmShouldContinue(fields);\n\n if (shouldContinue) {\n log.newline();\n const customAnswers = await runForm({\n choices: fields,\n message: chalk.bold(`Complete ${chalk.cyan(templateName)}:`),\n name: 'customAnswers',\n });\n\n return {\n destinationDir,\n entryPoint,\n templateComplete: true,\n templateData: { ...templateData, ...customAnswers },\n templateName,\n type,\n };\n }\n\n log.newline();\n log.warn(`Resume this later with ${chalk.bold('yarn skuba configure')}.`);\n\n const customAnswers = generatePlaceholders(fields);\n\n return {\n destinationDir,\n entryPoint,\n templateComplete: false,\n templateData: { ...templateData, ...customAnswers },\n templateName,\n type,\n };\n};\n\nconst configureFromPipe = async (): Promise<InitConfig> => {\n let text = '';\n\n await new Promise((resolve) =>\n process.stdin.on('data', (chunk) => (text += chunk)).once('end', resolve),\n );\n\n text = text.trim();\n\n if (text === '') {\n log.err('No data from stdin.');\n process.exit(1);\n }\n\n let value: unknown;\n\n try {\n value = JSON.parse(text) as unknown;\n } catch {\n log.err('Invalid JSON from stdin.');\n process.exit(1);\n }\n\n const result = InitConfigInput.validate(value);\n\n if (!result.success) {\n log.err('Invalid data from stdin:');\n log.err(result.message);\n process.exit(1);\n }\n\n const { destinationDir, templateComplete, templateName } = result.value;\n\n const templateData = {\n ...(await baseToTemplateData(result.value.templateData)),\n ...result.value.templateData,\n };\n\n await createDirectory(destinationDir);\n\n await cloneTemplate(templateName, destinationDir);\n\n const { entryPoint, fields, noSkip, type } = getTemplateConfig(\n path.join(process.cwd(), destinationDir),\n );\n\n if (!templateComplete) {\n if (noSkip) {\n log.err('Templating for', log.bold(templateName), 'cannot be skipped.');\n process.exit(1);\n }\n\n return {\n ...result.value,\n entryPoint,\n templateData: {\n ...templateData,\n ...generatePlaceholders(fields),\n },\n type,\n };\n }\n\n const required = fields.map(({ name }) => name);\n\n const provided = new Set(Object.keys(templateData));\n\n const missing = required.filter((name) => !provided.has(name));\n\n if (missing.length > 0) {\n log.err('This template uses the following information:');\n log.newline();\n missing.forEach((name) => log.err(`- ${name}`));\n process.exit(1);\n }\n\n return {\n ...result.value,\n entryPoint,\n templateData,\n type,\n };\n};\n\nexport const getConfig = () =>\n process.stdin.isTTY ? configureFromPrompt() : configureFromPipe();\n"],
|
|
5
|
+
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAiB;AAEjB,mBAAkB;AAClB,sBAAe;AAEf,kBAA0B;AAC1B,mBAAgC;AAChC,qBAAoB;AACpB,kBAA8B;AAC9B,sBAIO;AAEP,iBAAuC;AAEvC,qBAKO;AAEP,mBAAgC;AAEhC,sBAAqB;AAGd,MAAM,UAAU,CAA6B,UAI9C;AACJ,QAAM,EAAE,SAAS,KAAK,IAAI;AAE1B,QAAM,UAAU,MAAM,QAAQ,IAAI,CAAC,YAAY;AAAA,IAC7C,GAAG;AAAA,IACH,UAAU,CAAC,UAAkB;AAC3B,UAAI,UAAU,MAAM,UAAU,OAAO,SAAS;AAC5C,eAAO;AAAA,MACT;AAEA,aAAO,OAAO,WAAW,KAAK,KAAK;AAAA,IACrC;AAAA,EACF,EAAE;AAEF,QAAM,OAAO,IAAI,qBAAQ;AAAA,IACvB;AAAA,IACA;AAAA,IACA;AAAA,IACA,UAAU,OAAO,WAAW;AAC1B,YAAM,UAAU,MAAM,QAAQ;AAAA,QAC5B,QAAQ,IAAI,CAAC,WAAW,OAAO,SAAS,OAAO,OAAO,KAAK,CAAC;AAAA,MAC9D;AAEA,aACE,QAAQ,KAAK,CAAC,WAAW,OAAO,WAAW,QAAQ,KACnD,QAAQ,MAAM,CAAC,WAAW,WAAW,IAAI;AAAA,IAE7C;AAAA,EACF,CAAC;AAED,SAAO,KAAK,IAAI;AAClB;AAEA,MAAM,wBAAwB,OAAO,YAAoC;AACvE,QAAM,aAAa,QAAQ,IAAI,CAAC,WAAW,OAAO,OAAO;AAEzD,qBAAI,QAAQ;AACZ,qBAAI,MAAM,+CAA+C;AACzD,qBAAI,QAAQ;AACZ,aAAW,QAAQ,CAAC,YAAY,mBAAI,OAAO,KAAK,SAAS,CAAC;AAE1D,qBAAI,QAAQ;AACZ,QAAM,SAAS,MAAM,sCAAuB,IAAI;AAEhD,SAAO,WAAW;AACpB;AAEA,MAAM,kBAAkB,OAAO,QAAgB;AAC7C,MAAI;AACF,UAAM,gBAAAA,QAAG,SAAS,MAAM,GAAG;AAAA,EAC7B,SAAS,KAAP;AACA,YAAI,8BAAgB,KAAK,QAAQ,GAAG;AAClC,yBAAI,IAAI,kBAAkB,sBAAsB;AAChD,cAAQ,KAAK,CAAC;AAAA,IAChB;AAEA,UAAM;AAAA,EACR;AACF;AAEA,MAAM,gBAAgB,OAAO,cAAsB,mBAA2B;AAC5E,MAAI,aAAa,WAAW,SAAS,GAAG;AACtC,UAAM,aAAa,aAAa,MAAM,UAAU,MAAM;AACtD,eAAO,mCAAuB,YAAY,cAAc;AAAA,EAC1D;AAEA,QAAM,cAAc,YAAAC,QAAK,KAAK,8BAAc,YAAY;AAExD,YAAM,uBAAU;AAAA,IAEd,SAAS,MAAM;AAAA,IACf,YAAY;AAAA,IACZ,iBAAiB;AAAA,IACjB,YAAY,CAAC;AAAA,IAEb,uBAAuB;AAAA,EACzB,CAAC;AACH;AAEA,MAAM,kBAAkB,YAAY;AAClC,QAAM,oBAAoB,MAAM,+BAAgB,IAAI;AAEpD,MAAI,sBAAsB,iBAAY;AACpC,UAAM,aAAa,MAAM,+BAAgB,IAAI;AAC7C,WAAO,UAAU;AAAA,EACnB;AAEA,SAAO;AACT;AAEA,MAAM,uBAAuB,CAAC,YAC5B,OAAO;AAAA,EACL,QAAQ,IAAI,CAAC,EAAE,KAAK,MAAM,CAAC,MAAM,OAAO,SAAS,CAAU;AAC7D;AAEK,MAAM,oBAAoB,CAAC,QAAgC;AAChE,QAAM,qBAAqB,YAAAA,QAAK,KAAK,KAAK,wCAAwB;AAElE,MAAI;AAEF,UAAM,iBAAiB,QAAQ;AAE/B,WAAO,+BAAe,MAAM,cAAc;AAAA,EAC5C,SAAS,KAAP;AACA,YAAI,8BAAgB,KAAK,kBAAkB,GAAG;AAC5C,aAAO;AAAA,QACL,YAAY;AAAA,QACZ,QAAQ,CAAC;AAAA,QACT,MAAM;AAAA,MACR;AAAA,IACF;AAEA,UAAM;AAAA,EACR;AACF;AAEA,MAAM,qBAAqB,OAAO;AAAA,EAChC;AAAA,EACA;AAAA,EACA;AACF,MAAkB;AAChB,QAAM,CAAC,SAAS,QAAQ,IAAI,UAAU,MAAM,GAAG;AAE/C,QAAM,OAAO,OAAO,UAAM,2BAAc,CAAC;AAEzC,SAAO;AAAA,IACL;AAAA,IACA;AAAA,IACA;AAAA,IAEA,UAAU,YAAY;AAAA,IAEtB;AAAA,IAEA;AAAA,IACA,uBAAuB,iBAAiB,UAAU,WAAW;AAAA,IAC7D,8BACE,iBAAiB,UAAU,WAAW;AAAA,EAC1C;AACF;AAEO,MAAM,sBAAsB,YAAiC;AAClE,QAAM,EAAE,WAAW,cAAc,SAAS,IAAI,MAAM;AAAA,IAClD;AAAA,EACF;AACA,qBAAI,MAAM,aAAAC,QAAM,KAAK,QAAQ,GAAG,MAAM,aAAAA,QAAM,KAAK,SAAS,CAAC;AAE3D,QAAM,eAAe,MAAM,mBAAmB;AAAA,IAC5C;AAAA,IACA;AAAA,IACA;AAAA,EACF,CAAC;AAED,QAAM,iBAAiB;AAEvB,QAAM,gBAAgB,cAAc;AAEpC,qBAAI,QAAQ;AACZ,QAAM,eAAe,MAAM,gBAAgB;AAE3C,QAAM,cAAc,cAAc,cAAc;AAEhD,QAAM,EAAE,YAAY,QAAQ,QAAQ,KAAK,IAAI;AAAA,IAC3C,YAAAD,QAAK,KAAK,QAAQ,IAAI,GAAG,cAAc;AAAA,EACzC;AAEA,MAAI,OAAO,WAAW,GAAG;AACvB,WAAO;AAAA,MACL;AAAA,MACA;AAAA,MACA,kBAAkB;AAAA,MAClB;AAAA,MACA;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAEA,QAAM,iBAAiB,SAAS,OAAO,MAAM,sBAAsB,MAAM;AAEzE,MAAI,gBAAgB;AAClB,uBAAI,QAAQ;AACZ,UAAME,iBAAgB,MAAM,QAAQ;AAAA,MAClC,SAAS;AAAA,MACT,SAAS,aAAAD,QAAM,KAAK,YAAY,aAAAA,QAAM,KAAK,YAAY,IAAI;AAAA,MAC3D,MAAM;AAAA,IACR,CAAC;AAED,WAAO;AAAA,MACL;AAAA,MACA;AAAA,MACA,kBAAkB;AAAA,MAClB,cAAc,EAAE,GAAG,cAAc,GAAGC,eAAc;AAAA,MAClD;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAEA,qBAAI,QAAQ;AACZ,qBAAI,KAAK,0BAA0B,aAAAD,QAAM,KAAK,sBAAsB,IAAI;AAExE,QAAM,gBAAgB,qBAAqB,MAAM;AAEjD,SAAO;AAAA,IACL;AAAA,IACA;AAAA,IACA,kBAAkB;AAAA,IAClB,cAAc,EAAE,GAAG,cAAc,GAAG,cAAc;AAAA,IAClD;AAAA,IACA;AAAA,EACF;AACF;AAEA,MAAM,oBAAoB,YAAiC;AACzD,MAAI,OAAO;AAEX,QAAM,IAAI;AAAA,IAAQ,CAAC,YACjB,QAAQ,MAAM,GAAG,QAAQ,CAAC,UAAW,QAAQ,KAAM,EAAE,KAAK,OAAO,OAAO;AAAA,EAC1E;AAEA,SAAO,KAAK,KAAK;AAEjB,MAAI,SAAS,IAAI;AACf,uBAAI,IAAI,qBAAqB;AAC7B,YAAQ,KAAK,CAAC;AAAA,EAChB;AAEA,MAAI;AAEJ,MAAI;AACF,YAAQ,KAAK,MAAM,IAAI;AAAA,EACzB,QAAE;AACA,uBAAI,IAAI,0BAA0B;AAClC,YAAQ,KAAK,CAAC;AAAA,EAChB;AAEA,QAAM,SAAS,6BAAgB,SAAS,KAAK;AAE7C,MAAI,CAAC,OAAO,SAAS;AACnB,uBAAI,IAAI,0BAA0B;AAClC,uBAAI,IAAI,OAAO,OAAO;AACtB,YAAQ,KAAK,CAAC;AAAA,EAChB;AAEA,QAAM,EAAE,gBAAgB,kBAAkB,aAAa,IAAI,OAAO;AAElE,QAAM,eAAe;AAAA,IACnB,GAAI,MAAM,mBAAmB,OAAO,MAAM,YAAY;AAAA,IACtD,GAAG,OAAO,MAAM;AAAA,EAClB;AAEA,QAAM,gBAAgB,cAAc;AAEpC,QAAM,cAAc,cAAc,cAAc;AAEhD,QAAM,EAAE,YAAY,QAAQ,QAAQ,KAAK,IAAI;AAAA,IAC3C,YAAAD,QAAK,KAAK,QAAQ,IAAI,GAAG,cAAc;AAAA,EACzC;AAEA,MAAI,CAAC,kBAAkB;AACrB,QAAI,QAAQ;AACV,yBAAI,IAAI,kBAAkB,mBAAI,KAAK,YAAY,GAAG,oBAAoB;AACtE,cAAQ,KAAK,CAAC;AAAA,IAChB;AAEA,WAAO;AAAA,MACL,GAAG,OAAO;AAAA,MACV;AAAA,MACA,cAAc;AAAA,QACZ,GAAG;AAAA,QACH,GAAG,qBAAqB,MAAM;AAAA,MAChC;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAEA,QAAM,WAAW,OAAO,IAAI,CAAC,EAAE,KAAK,MAAM,IAAI;AAE9C,QAAM,WAAW,IAAI,IAAI,OAAO,KAAK,YAAY,CAAC;AAElD,QAAM,UAAU,SAAS,OAAO,CAAC,SAAS,CAAC,SAAS,IAAI,IAAI,CAAC;AAE7D,MAAI,QAAQ,SAAS,GAAG;AACtB,uBAAI,IAAI,+CAA+C;AACvD,uBAAI,QAAQ;AACZ,YAAQ,QAAQ,CAAC,SAAS,mBAAI,IAAI,KAAK,MAAM,CAAC;AAC9C,YAAQ,KAAK,CAAC;AAAA,EAChB;AAEA,SAAO;AAAA,IACL,GAAG,OAAO;AAAA,IACV;AAAA,IACA;AAAA,IACA;AAAA,EACF;AACF;AAEO,MAAM,YAAY,MACvB,QAAQ,MAAM,QAAQ,oBAAoB,IAAI,kBAAkB;",
|
|
6
6
|
"names": ["fs", "path", "chalk", "customAnswers"]
|
|
7
7
|
}
|
|
@@ -1,5 +1,8 @@
|
|
|
1
|
+
import type { Platform } from './validation';
|
|
1
2
|
import { Input, Select } from 'enquirer';
|
|
2
|
-
export
|
|
3
|
+
export type BaseFields = Record<typeof BASE_CHOICES[number]['name'], string> & {
|
|
4
|
+
platformName: Platform;
|
|
5
|
+
};
|
|
3
6
|
declare const BASE_CHOICES: readonly [{
|
|
4
7
|
readonly name: "ownerName";
|
|
5
8
|
readonly message: "Owner";
|
|
@@ -10,6 +13,11 @@ declare const BASE_CHOICES: readonly [{
|
|
|
10
13
|
readonly message: "Repo";
|
|
11
14
|
readonly initial: "my-repo";
|
|
12
15
|
readonly validate: (value: unknown) => Promise<string | true>;
|
|
16
|
+
}, {
|
|
17
|
+
readonly name: "platformName";
|
|
18
|
+
readonly message: "Platform";
|
|
19
|
+
readonly initial: string;
|
|
20
|
+
readonly validate: (value: unknown) => string | true;
|
|
13
21
|
}];
|
|
14
22
|
export declare const BASE_PROMPT_PROPS: {
|
|
15
23
|
choices: readonly [{
|
|
@@ -22,6 +30,11 @@ export declare const BASE_PROMPT_PROPS: {
|
|
|
22
30
|
readonly message: "Repo";
|
|
23
31
|
readonly initial: "my-repo";
|
|
24
32
|
readonly validate: (value: unknown) => Promise<string | true>;
|
|
33
|
+
}, {
|
|
34
|
+
readonly name: "platformName";
|
|
35
|
+
readonly message: "Platform";
|
|
36
|
+
readonly initial: string;
|
|
37
|
+
readonly validate: (value: unknown) => string | true;
|
|
25
38
|
}];
|
|
26
39
|
message: string;
|
|
27
40
|
name: string;
|
package/lib/cli/init/prompts.js
CHANGED
|
@@ -58,11 +58,17 @@ const BASE_CHOICES = [
|
|
|
58
58
|
const exists = await (0, import_fs_extra.pathExists)(value);
|
|
59
59
|
return !exists || `'${value}' is an existing directory`;
|
|
60
60
|
}
|
|
61
|
+
},
|
|
62
|
+
{
|
|
63
|
+
name: "platformName",
|
|
64
|
+
message: "Platform",
|
|
65
|
+
initial: import_validation.PLATFORM_OPTIONS,
|
|
66
|
+
validate: (value) => (0, import_validation.isPlatform)(value) || `must be ${import_validation.PLATFORM_OPTIONS}`
|
|
61
67
|
}
|
|
62
68
|
];
|
|
63
69
|
const BASE_PROMPT_PROPS = {
|
|
64
70
|
choices: BASE_CHOICES,
|
|
65
|
-
message: "For starters, some
|
|
71
|
+
message: "For starters, some project details:",
|
|
66
72
|
name: "baseAnswers"
|
|
67
73
|
};
|
|
68
74
|
const SHOULD_CONTINUE_PROMPT = new import_enquirer.Select({
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../../../src/cli/init/prompts.ts"],
|
|
4
|
-
"sourcesContent": ["import { pathExists } from 'fs-extra';\n\nimport { TEMPLATE_NAMES_WITH_BYO } from '../../utils/template';\n\nimport { isGitHubOrg
|
|
5
|
-
"mappings": ";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,sBAA2B;AAE3B,sBAAwC;
|
|
4
|
+
"sourcesContent": ["import { pathExists } from 'fs-extra';\n\nimport { TEMPLATE_NAMES_WITH_BYO } from '../../utils/template';\n\nimport type { Platform } from './validation';\nimport {\n PLATFORM_OPTIONS,\n isGitHubOrg,\n isGitHubRepo,\n isGitHubTeam,\n isPlatform,\n} from './validation';\n\nimport { Input, Select } from 'enquirer';\n\nexport type BaseFields = Record<typeof BASE_CHOICES[number]['name'], string> & {\n platformName: Platform;\n};\n\nconst BASE_CHOICES = [\n {\n name: 'ownerName',\n message: 'Owner',\n initial: 'SEEK-Jobs/my-team',\n validate: (value: unknown) => {\n if (typeof value !== 'string') {\n return 'required';\n }\n\n const [org, team] = value.split('/');\n\n if (!isGitHubOrg(org)) {\n return 'fails GitHub validation';\n }\n\n return (\n team === undefined || isGitHubTeam(team) || 'fails GitHub validation'\n );\n },\n },\n {\n name: 'repoName',\n message: 'Repo',\n initial: 'my-repo',\n validate: async (value: unknown) => {\n if (typeof value !== 'string') {\n return 'required';\n }\n\n if (!isGitHubRepo(value)) {\n return 'fails GitHub validation';\n }\n\n const exists = await pathExists(value);\n\n return !exists || `'${value}' is an existing directory`;\n },\n },\n {\n name: 'platformName',\n message: 'Platform',\n initial: PLATFORM_OPTIONS,\n validate: (value: unknown) =>\n isPlatform(value) || `must be ${PLATFORM_OPTIONS}`,\n },\n] as const;\n\nexport const BASE_PROMPT_PROPS = {\n choices: BASE_CHOICES,\n message: 'For starters, some project details:',\n name: 'baseAnswers',\n};\n\nexport const SHOULD_CONTINUE_PROMPT = new Select({\n choices: ['yes', 'no'] as const,\n message: 'Fill this in now?',\n name: 'shouldContinue',\n});\n\nexport const GIT_PATH_PROMPT = new Input({\n message: 'Git path',\n name: 'gitPath',\n initial: 'seek-oss/skuba',\n validate: (value) => /[^/]+\\/[^/]+/.test(value) || 'Path is not valid',\n});\n\nexport const TEMPLATE_PROMPT = new Select({\n choices: TEMPLATE_NAMES_WITH_BYO,\n message: 'Select a template:',\n name: 'templateName',\n});\n"],
|
|
5
|
+
"mappings": ";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,sBAA2B;AAE3B,sBAAwC;AAGxC,wBAMO;AAEP,sBAA8B;AAM9B,MAAM,eAAe;AAAA,EACnB;AAAA,IACE,MAAM;AAAA,IACN,SAAS;AAAA,IACT,SAAS;AAAA,IACT,UAAU,CAAC,UAAmB;AAC5B,UAAI,OAAO,UAAU,UAAU;AAC7B,eAAO;AAAA,MACT;AAEA,YAAM,CAAC,KAAK,IAAI,IAAI,MAAM,MAAM,GAAG;AAEnC,UAAI,KAAC,+BAAY,GAAG,GAAG;AACrB,eAAO;AAAA,MACT;AAEA,aACE,SAAS,cAAa,gCAAa,IAAI,KAAK;AAAA,IAEhD;AAAA,EACF;AAAA,EACA;AAAA,IACE,MAAM;AAAA,IACN,SAAS;AAAA,IACT,SAAS;AAAA,IACT,UAAU,OAAO,UAAmB;AAClC,UAAI,OAAO,UAAU,UAAU;AAC7B,eAAO;AAAA,MACT;AAEA,UAAI,KAAC,gCAAa,KAAK,GAAG;AACxB,eAAO;AAAA,MACT;AAEA,YAAM,SAAS,UAAM,4BAAW,KAAK;AAErC,aAAO,CAAC,UAAU,IAAI;AAAA,IACxB;AAAA,EACF;AAAA,EACA;AAAA,IACE,MAAM;AAAA,IACN,SAAS;AAAA,IACT,SAAS;AAAA,IACT,UAAU,CAAC,cACT,8BAAW,KAAK,KAAK,WAAW;AAAA,EACpC;AACF;AAEO,MAAM,oBAAoB;AAAA,EAC/B,SAAS;AAAA,EACT,SAAS;AAAA,EACT,MAAM;AACR;AAEO,MAAM,yBAAyB,IAAI,uBAAO;AAAA,EAC/C,SAAS,CAAC,OAAO,IAAI;AAAA,EACrB,SAAS;AAAA,EACT,MAAM;AACR,CAAC;AAEM,MAAM,kBAAkB,IAAI,sBAAM;AAAA,EACvC,SAAS;AAAA,EACT,MAAM;AAAA,EACN,SAAS;AAAA,EACT,UAAU,CAAC,UAAU,eAAe,KAAK,KAAK,KAAK;AACrD,CAAC;AAEM,MAAM,kBAAkB,IAAI,uBAAO;AAAA,EACxC,SAAS;AAAA,EACT,SAAS;AAAA,EACT,MAAM;AACR,CAAC;",
|
|
6
6
|
"names": []
|
|
7
7
|
}
|