skuba 0.0.0-master-20230318051429 → 0.0.0-preserve-assets-20230504052952

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (92) hide show
  1. package/README.md +1 -1
  2. package/lib/api/jest/index.d.ts +4 -1
  3. package/lib/cli/adapter/eslint.js +0 -13
  4. package/lib/cli/adapter/eslint.js.map +2 -2
  5. package/lib/cli/adapter/prettier.js +1 -1
  6. package/lib/cli/adapter/prettier.js.map +1 -1
  7. package/lib/cli/build/esbuild.js +3 -42
  8. package/lib/cli/build/esbuild.js.map +3 -3
  9. package/lib/cli/build/index.js +9 -2
  10. package/lib/cli/build/index.js.map +2 -2
  11. package/lib/cli/build/tsc.d.ts +3 -0
  12. package/lib/cli/build/tsc.js +60 -0
  13. package/lib/cli/build/tsc.js.map +3 -3
  14. package/lib/cli/buildPackage.js +17 -0
  15. package/lib/cli/buildPackage.js.map +3 -3
  16. package/lib/cli/configure/modules/jest.js +1 -1
  17. package/lib/cli/configure/modules/jest.js.map +1 -1
  18. package/lib/cli/configure/modules/package.js +3 -3
  19. package/lib/cli/configure/modules/package.js.map +1 -1
  20. package/lib/cli/configure/modules/tsconfig.js +1 -1
  21. package/lib/cli/configure/modules/tsconfig.js.map +2 -2
  22. package/lib/cli/configure/patchRenovateConfig.js +3 -1
  23. package/lib/cli/configure/patchRenovateConfig.js.map +2 -2
  24. package/lib/cli/configure/patchServerListener.d.ts +3 -0
  25. package/lib/cli/configure/patchServerListener.js +87 -0
  26. package/lib/cli/configure/patchServerListener.js.map +7 -0
  27. package/lib/cli/format.js +2 -0
  28. package/lib/cli/format.js.map +2 -2
  29. package/lib/cli/lint/autofix.js +9 -1
  30. package/lib/cli/lint/autofix.js.map +2 -2
  31. package/lib/cli/lint/external.js +1 -4
  32. package/lib/cli/lint/external.js.map +1 -1
  33. package/lib/cli/lint/index.js +2 -0
  34. package/lib/cli/lint/index.js.map +2 -2
  35. package/lib/cli/start.js +1 -1
  36. package/lib/cli/start.js.map +1 -1
  37. package/lib/cli/test/index.js +1 -2
  38. package/lib/cli/test/index.js.map +1 -1
  39. package/lib/cli/test/reporters/github/annotations.js +1 -1
  40. package/lib/cli/test/reporters/github/annotations.js.map +1 -1
  41. package/lib/cli/test/reporters/github/index.js +0 -2
  42. package/lib/utils/copy.d.ts +2 -0
  43. package/lib/utils/copy.js +39 -1
  44. package/lib/utils/copy.js.map +2 -2
  45. package/lib/utils/dir.d.ts +2 -1
  46. package/lib/utils/dir.js +2 -2
  47. package/lib/utils/dir.js.map +2 -2
  48. package/lib/utils/exec.js +1 -4
  49. package/lib/utils/exec.js.map +1 -1
  50. package/lib/utils/manifest.d.ts +1 -0
  51. package/lib/utils/manifest.js +8 -2
  52. package/lib/utils/manifest.js.map +2 -2
  53. package/lib/wrapper/requestListener.js +3 -0
  54. package/lib/wrapper/requestListener.js.map +2 -2
  55. package/package.json +15 -15
  56. package/template/base/.github/CODEOWNERS +0 -4
  57. package/template/express-rest-api/.buildkite/pipeline.yml +1 -1
  58. package/template/express-rest-api/gantry.apply.yml +0 -2
  59. package/template/express-rest-api/package.json +1 -1
  60. package/template/express-rest-api/src/api/healthCheck.ts +1 -1
  61. package/template/express-rest-api/src/api/smokeTest.ts +1 -1
  62. package/template/greeter/.buildkite/pipeline.yml +1 -1
  63. package/template/koa-rest-api/.buildkite/pipeline.yml +1 -1
  64. package/template/koa-rest-api/gantry.apply.yml +0 -2
  65. package/template/koa-rest-api/package.json +5 -5
  66. package/template/koa-rest-api/src/api/healthCheck.ts +1 -1
  67. package/template/koa-rest-api/src/api/jobs/getJobs.ts +1 -1
  68. package/template/koa-rest-api/src/api/jobs/postJob.ts +1 -1
  69. package/template/koa-rest-api/src/api/smokeTest.ts +1 -1
  70. package/template/koa-rest-api/src/framework/server.test.ts +1 -1
  71. package/template/koa-rest-api/src/framework/validation.ts +2 -2
  72. package/template/koa-rest-api/src/storage/jobs.ts +1 -1
  73. package/template/koa-rest-api/src/testing/server.ts +2 -2
  74. package/template/koa-rest-api/src/testing/types.ts +1 -1
  75. package/template/koa-rest-api/src/tracing.ts +3 -8
  76. package/template/koa-rest-api/tsconfig.json +18 -0
  77. package/template/lambda-sqs-worker/.buildkite/pipeline.yml +2 -2
  78. package/template/lambda-sqs-worker/package.json +2 -2
  79. package/template/lambda-sqs-worker/src/app.ts +1 -1
  80. package/template/lambda-sqs-worker/src/framework/validation.ts +1 -1
  81. package/template/lambda-sqs-worker/src/mapping/jobScorer.ts +5 -2
  82. package/template/lambda-sqs-worker/src/services/jobScorer.ts +6 -6
  83. package/template/lambda-sqs-worker/src/testing/handler.ts +1 -1
  84. package/template/lambda-sqs-worker/src/testing/types.ts +1 -1
  85. package/template/lambda-sqs-worker-cdk/.buildkite/pipeline.yml +2 -2
  86. package/template/lambda-sqs-worker-cdk/infra/appStack.ts +1 -1
  87. package/template/lambda-sqs-worker-cdk/package.json +1 -1
  88. package/template/lambda-sqs-worker-cdk/src/app.ts +1 -1
  89. package/template/oss-npm-package/_package.json +19 -16
  90. package/template/oss-npm-package/tsconfig.json +2 -2
  91. package/template/private-npm-package/_package.json +19 -16
  92. package/template/private-npm-package/tsconfig.json +2 -2
@@ -44,6 +44,7 @@ const RENOVATE_PRESETS = [
44
44
  "local>seekasia/renovate-config",
45
45
  "local>seek-jobs/renovate-config"
46
46
  ];
47
+ const EXISTING_REPO_PRESET_REGEX = /(github|local)>(seek-jobs|seekasia)\//;
47
48
  const RenovateConfig = t.Record({
48
49
  extends: t.Array(t.String)
49
50
  });
@@ -105,7 +106,8 @@ const patchRenovateConfig = async (dir) => {
105
106
  !config?.input || // The file appears to mention the baseline preset for the configured Git
106
107
  // owner. This is a very naive check that we don't want to overcomplicate
107
108
  // because it is invoked before each skuba format and lint.
108
- config.input.includes(presetToAdd)
109
+ config.input.includes(presetToAdd) || // Ignore any renovate configuration which already extends a SEEK-Jobs or seekasia config
110
+ EXISTING_REPO_PRESET_REGEX.exec(config.input)
109
111
  ) {
110
112
  return;
111
113
  }
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "version": 3,
3
3
  "sources": ["../../../src/cli/configure/patchRenovateConfig.ts"],
4
- "sourcesContent": ["/* eslint-disable new-cap */\n\nimport { inspect } from 'util';\n\nimport fs from 'fs-extra';\nimport * as fleece from 'golden-fleece';\nimport * as t from 'runtypes';\n\nimport * as Git from '../../api/git';\nimport { log } from '../../utils/logging';\n\nimport { createDestinationFileReader } from './analysis/project';\nimport { RENOVATE_CONFIG_FILENAMES } from './modules/renovate';\nimport { formatPrettier } from './processing/prettier';\n\nconst RENOVATE_PRESETS = [\n 'local>seekasia/renovate-config',\n 'local>seek-jobs/renovate-config',\n] as const;\n\ntype RenovateFiletype = 'json' | 'json5';\n\ntype RenovatePreset = (typeof RENOVATE_PRESETS)[number];\n\nconst RenovateConfig = t.Record({\n extends: t.Array(t.String),\n});\n\nconst ownerToRenovatePreset = (owner: string): RenovatePreset | undefined => {\n const lowercaseOwner = owner.toLowerCase();\n\n switch (lowercaseOwner) {\n case 'seekasia':\n return 'local>seekasia/renovate-config';\n\n case 'seek-jobs':\n return 'local>seek-jobs/renovate-config';\n\n default:\n return;\n }\n};\n\ntype PatchFile = (props: {\n filepath: string;\n input: string;\n presetToAdd: RenovatePreset;\n}) => Promise<void>;\n\nconst patchJson: PatchFile = async ({ filepath, input, presetToAdd }) => {\n const config: unknown = JSON.parse(input);\n\n if (!RenovateConfig.guard(config)) {\n return;\n }\n\n config.extends.unshift(presetToAdd);\n\n await fs.promises.writeFile(\n filepath,\n formatPrettier(JSON.stringify(config), { parser: 'json' }),\n );\n\n return;\n};\n\nconst patchJson5: PatchFile = async ({ filepath, input, presetToAdd }) => {\n const config: unknown = fleece.evaluate(input);\n\n if (!RenovateConfig.guard(config)) {\n return;\n }\n\n config.extends.unshift(presetToAdd);\n\n await fs.promises.writeFile(\n filepath,\n formatPrettier(fleece.patch(input, config), { parser: 'json5' }),\n );\n\n return;\n};\n\nconst patchByFiletype: Record<RenovateFiletype, PatchFile> = {\n json: patchJson,\n json5: patchJson5,\n};\n\nconst patchRenovateConfig = async (dir: string) => {\n const readFile = createDestinationFileReader(dir);\n\n const { owner } = await Git.getOwnerAndRepo({ dir });\n\n const presetToAdd = ownerToRenovatePreset(owner);\n\n if (!presetToAdd) {\n // No baseline preset needs to be added for the configured Git owner.\n return;\n }\n\n const maybeConfigs = await Promise.all(\n RENOVATE_CONFIG_FILENAMES.map(async (filepath) => ({\n input: await readFile(filepath),\n filepath,\n })),\n );\n\n const config = maybeConfigs.find((maybeConfig) => Boolean(maybeConfig.input));\n\n if (\n // No file was found.\n !config?.input ||\n // The file appears to mention the baseline preset for the configured Git\n // owner. This is a very naive check that we don't want to overcomplicate\n // because it is invoked before each skuba format and lint.\n config.input.includes(presetToAdd)\n ) {\n return;\n }\n\n const filetype: RenovateFiletype = config.filepath\n .toLowerCase()\n .endsWith('.json5')\n ? 'json5'\n : 'json';\n\n const patchFile = patchByFiletype[filetype];\n\n await patchFile({\n filepath: config.filepath,\n input: config.input,\n presetToAdd,\n });\n};\n\nexport const tryPatchRenovateConfig = async (dir = process.cwd()) => {\n try {\n await patchRenovateConfig(dir);\n } catch (err) {\n log.warn('Failed to patch Renovate config.');\n log.subtle(inspect(err));\n }\n};\n"],
5
- "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAEA,kBAAwB;AAExB,sBAAe;AACf,aAAwB;AACxB,QAAmB;AAEnB,UAAqB;AACrB,qBAAoB;AAEpB,qBAA4C;AAC5C,sBAA0C;AAC1C,sBAA+B;AAE/B,MAAM,mBAAmB;AAAA,EACvB;AAAA,EACA;AACF;AAMA,MAAM,iBAAiB,EAAE,OAAO;AAAA,EAC9B,SAAS,EAAE,MAAM,EAAE,MAAM;AAC3B,CAAC;AAED,MAAM,wBAAwB,CAAC,UAA8C;AAC3E,QAAM,iBAAiB,MAAM,YAAY;AAEzC,UAAQ,gBAAgB;AAAA,IACtB,KAAK;AACH,aAAO;AAAA,IAET,KAAK;AACH,aAAO;AAAA,IAET;AACE;AAAA,EACJ;AACF;AAQA,MAAM,YAAuB,OAAO,EAAE,UAAU,OAAO,YAAY,MAAM;AACvE,QAAM,SAAkB,KAAK,MAAM,KAAK;AAExC,MAAI,CAAC,eAAe,MAAM,MAAM,GAAG;AACjC;AAAA,EACF;AAEA,SAAO,QAAQ,QAAQ,WAAW;AAElC,QAAM,gBAAAA,QAAG,SAAS;AAAA,IAChB;AAAA,QACA,gCAAe,KAAK,UAAU,MAAM,GAAG,EAAE,QAAQ,OAAO,CAAC;AAAA,EAC3D;AAEA;AACF;AAEA,MAAM,aAAwB,OAAO,EAAE,UAAU,OAAO,YAAY,MAAM;AACxE,QAAM,SAAkB,OAAO,SAAS,KAAK;AAE7C,MAAI,CAAC,eAAe,MAAM,MAAM,GAAG;AACjC;AAAA,EACF;AAEA,SAAO,QAAQ,QAAQ,WAAW;AAElC,QAAM,gBAAAA,QAAG,SAAS;AAAA,IAChB;AAAA,QACA,gCAAe,OAAO,MAAM,OAAO,MAAM,GAAG,EAAE,QAAQ,QAAQ,CAAC;AAAA,EACjE;AAEA;AACF;AAEA,MAAM,kBAAuD;AAAA,EAC3D,MAAM;AAAA,EACN,OAAO;AACT;AAEA,MAAM,sBAAsB,OAAO,QAAgB;AACjD,QAAM,eAAW,4CAA4B,GAAG;AAEhD,QAAM,EAAE,MAAM,IAAI,MAAM,IAAI,gBAAgB,EAAE,IAAI,CAAC;AAEnD,QAAM,cAAc,sBAAsB,KAAK;AAE/C,MAAI,CAAC,aAAa;AAEhB;AAAA,EACF;AAEA,QAAM,eAAe,MAAM,QAAQ;AAAA,IACjC,0CAA0B,IAAI,OAAO,cAAc;AAAA,MACjD,OAAO,MAAM,SAAS,QAAQ;AAAA,MAC9B;AAAA,IACF,EAAE;AAAA,EACJ;AAEA,QAAM,SAAS,aAAa,KAAK,CAAC,gBAAgB,QAAQ,YAAY,KAAK,CAAC;AAE5E;AAAA;AAAA,IAEE,CAAC,QAAQ;AAAA;AAAA;AAAA,IAIT,OAAO,MAAM,SAAS,WAAW;AAAA,IACjC;AACA;AAAA,EACF;AAEA,QAAM,WAA6B,OAAO,SACvC,YAAY,EACZ,SAAS,QAAQ,IAChB,UACA;AAEJ,QAAM,YAAY,gBAAgB,QAAQ;AAE1C,QAAM,UAAU;AAAA,IACd,UAAU,OAAO;AAAA,IACjB,OAAO,OAAO;AAAA,IACd;AAAA,EACF,CAAC;AACH;AAEO,MAAM,yBAAyB,OAAO,MAAM,QAAQ,IAAI,MAAM;AACnE,MAAI;AACF,UAAM,oBAAoB,GAAG;AAAA,EAC/B,SAAS,KAAP;AACA,uBAAI,KAAK,kCAAkC;AAC3C,uBAAI,WAAO,qBAAQ,GAAG,CAAC;AAAA,EACzB;AACF;",
4
+ "sourcesContent": ["/* eslint-disable new-cap */\n\nimport { inspect } from 'util';\n\nimport fs from 'fs-extra';\nimport * as fleece from 'golden-fleece';\nimport * as t from 'runtypes';\n\nimport * as Git from '../../api/git';\nimport { log } from '../../utils/logging';\n\nimport { createDestinationFileReader } from './analysis/project';\nimport { RENOVATE_CONFIG_FILENAMES } from './modules/renovate';\nimport { formatPrettier } from './processing/prettier';\n\nconst RENOVATE_PRESETS = [\n 'local>seekasia/renovate-config',\n 'local>seek-jobs/renovate-config',\n] as const;\n\nconst EXISTING_REPO_PRESET_REGEX = /(github|local)>(seek-jobs|seekasia)\\//;\n\ntype RenovateFiletype = 'json' | 'json5';\n\ntype RenovatePreset = (typeof RENOVATE_PRESETS)[number];\n\nconst RenovateConfig = t.Record({\n extends: t.Array(t.String),\n});\n\nconst ownerToRenovatePreset = (owner: string): RenovatePreset | undefined => {\n const lowercaseOwner = owner.toLowerCase();\n\n switch (lowercaseOwner) {\n case 'seekasia':\n return 'local>seekasia/renovate-config';\n\n case 'seek-jobs':\n return 'local>seek-jobs/renovate-config';\n\n default:\n return;\n }\n};\n\ntype PatchFile = (props: {\n filepath: string;\n input: string;\n presetToAdd: RenovatePreset;\n}) => Promise<void>;\n\nconst patchJson: PatchFile = async ({ filepath, input, presetToAdd }) => {\n const config: unknown = JSON.parse(input);\n\n if (!RenovateConfig.guard(config)) {\n return;\n }\n\n config.extends.unshift(presetToAdd);\n\n await fs.promises.writeFile(\n filepath,\n formatPrettier(JSON.stringify(config), { parser: 'json' }),\n );\n\n return;\n};\n\nconst patchJson5: PatchFile = async ({ filepath, input, presetToAdd }) => {\n const config: unknown = fleece.evaluate(input);\n\n if (!RenovateConfig.guard(config)) {\n return;\n }\n\n config.extends.unshift(presetToAdd);\n\n await fs.promises.writeFile(\n filepath,\n formatPrettier(fleece.patch(input, config), { parser: 'json5' }),\n );\n\n return;\n};\n\nconst patchByFiletype: Record<RenovateFiletype, PatchFile> = {\n json: patchJson,\n json5: patchJson5,\n};\n\nconst patchRenovateConfig = async (dir: string) => {\n const readFile = createDestinationFileReader(dir);\n\n const { owner } = await Git.getOwnerAndRepo({ dir });\n\n const presetToAdd = ownerToRenovatePreset(owner);\n\n if (!presetToAdd) {\n // No baseline preset needs to be added for the configured Git owner.\n return;\n }\n\n const maybeConfigs = await Promise.all(\n RENOVATE_CONFIG_FILENAMES.map(async (filepath) => ({\n input: await readFile(filepath),\n filepath,\n })),\n );\n\n const config = maybeConfigs.find((maybeConfig) => Boolean(maybeConfig.input));\n\n if (\n // No file was found.\n !config?.input ||\n // The file appears to mention the baseline preset for the configured Git\n // owner. This is a very naive check that we don't want to overcomplicate\n // because it is invoked before each skuba format and lint.\n config.input.includes(presetToAdd) ||\n // Ignore any renovate configuration which already extends a SEEK-Jobs or seekasia config\n EXISTING_REPO_PRESET_REGEX.exec(config.input)\n ) {\n return;\n }\n\n const filetype: RenovateFiletype = config.filepath\n .toLowerCase()\n .endsWith('.json5')\n ? 'json5'\n : 'json';\n\n const patchFile = patchByFiletype[filetype];\n\n await patchFile({\n filepath: config.filepath,\n input: config.input,\n presetToAdd,\n });\n};\n\nexport const tryPatchRenovateConfig = async (dir = process.cwd()) => {\n try {\n await patchRenovateConfig(dir);\n } catch (err) {\n log.warn('Failed to patch Renovate config.');\n log.subtle(inspect(err));\n }\n};\n"],
5
+ "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAEA,kBAAwB;AAExB,sBAAe;AACf,aAAwB;AACxB,QAAmB;AAEnB,UAAqB;AACrB,qBAAoB;AAEpB,qBAA4C;AAC5C,sBAA0C;AAC1C,sBAA+B;AAE/B,MAAM,mBAAmB;AAAA,EACvB;AAAA,EACA;AACF;AAEA,MAAM,6BAA6B;AAMnC,MAAM,iBAAiB,EAAE,OAAO;AAAA,EAC9B,SAAS,EAAE,MAAM,EAAE,MAAM;AAC3B,CAAC;AAED,MAAM,wBAAwB,CAAC,UAA8C;AAC3E,QAAM,iBAAiB,MAAM,YAAY;AAEzC,UAAQ,gBAAgB;AAAA,IACtB,KAAK;AACH,aAAO;AAAA,IAET,KAAK;AACH,aAAO;AAAA,IAET;AACE;AAAA,EACJ;AACF;AAQA,MAAM,YAAuB,OAAO,EAAE,UAAU,OAAO,YAAY,MAAM;AACvE,QAAM,SAAkB,KAAK,MAAM,KAAK;AAExC,MAAI,CAAC,eAAe,MAAM,MAAM,GAAG;AACjC;AAAA,EACF;AAEA,SAAO,QAAQ,QAAQ,WAAW;AAElC,QAAM,gBAAAA,QAAG,SAAS;AAAA,IAChB;AAAA,QACA,gCAAe,KAAK,UAAU,MAAM,GAAG,EAAE,QAAQ,OAAO,CAAC;AAAA,EAC3D;AAEA;AACF;AAEA,MAAM,aAAwB,OAAO,EAAE,UAAU,OAAO,YAAY,MAAM;AACxE,QAAM,SAAkB,OAAO,SAAS,KAAK;AAE7C,MAAI,CAAC,eAAe,MAAM,MAAM,GAAG;AACjC;AAAA,EACF;AAEA,SAAO,QAAQ,QAAQ,WAAW;AAElC,QAAM,gBAAAA,QAAG,SAAS;AAAA,IAChB;AAAA,QACA,gCAAe,OAAO,MAAM,OAAO,MAAM,GAAG,EAAE,QAAQ,QAAQ,CAAC;AAAA,EACjE;AAEA;AACF;AAEA,MAAM,kBAAuD;AAAA,EAC3D,MAAM;AAAA,EACN,OAAO;AACT;AAEA,MAAM,sBAAsB,OAAO,QAAgB;AACjD,QAAM,eAAW,4CAA4B,GAAG;AAEhD,QAAM,EAAE,MAAM,IAAI,MAAM,IAAI,gBAAgB,EAAE,IAAI,CAAC;AAEnD,QAAM,cAAc,sBAAsB,KAAK;AAE/C,MAAI,CAAC,aAAa;AAEhB;AAAA,EACF;AAEA,QAAM,eAAe,MAAM,QAAQ;AAAA,IACjC,0CAA0B,IAAI,OAAO,cAAc;AAAA,MACjD,OAAO,MAAM,SAAS,QAAQ;AAAA,MAC9B;AAAA,IACF,EAAE;AAAA,EACJ;AAEA,QAAM,SAAS,aAAa,KAAK,CAAC,gBAAgB,QAAQ,YAAY,KAAK,CAAC;AAE5E;AAAA;AAAA,IAEE,CAAC,QAAQ;AAAA;AAAA;AAAA,IAIT,OAAO,MAAM,SAAS,WAAW;AAAA,IAEjC,2BAA2B,KAAK,OAAO,KAAK;AAAA,IAC5C;AACA;AAAA,EACF;AAEA,QAAM,WAA6B,OAAO,SACvC,YAAY,EACZ,SAAS,QAAQ,IAChB,UACA;AAEJ,QAAM,YAAY,gBAAgB,QAAQ;AAE1C,QAAM,UAAU;AAAA,IACd,UAAU,OAAO;AAAA,IACjB,OAAO,OAAO;AAAA,IACd;AAAA,EACF,CAAC;AACH;AAEO,MAAM,yBAAyB,OAAO,MAAM,QAAQ,IAAI,MAAM;AACnE,MAAI;AACF,UAAM,oBAAoB,GAAG;AAAA,EAC/B,SAAS,KAAP;AACA,uBAAI,KAAK,kCAAkC;AAC3C,uBAAI,WAAO,qBAAQ,GAAG,CAAC;AAAA,EACzB;AACF;",
6
6
  "names": ["fs"]
7
7
  }
@@ -0,0 +1,3 @@
1
+ export declare const KEEP_ALIVE_CODE = "\n// Gantry ALB default idle timeout is 30 seconds\n// https://nodejs.org/docs/latest-v18.x/api/http.html#serverkeepalivetimeout\n// Node default is 5 seconds\n// https://docs.aws.amazon.com/elasticloadbalancing/latest/application/application-load-balancers.html#connection-idle-timeout\n// AWS recommends setting an application timeout larger than the load balancer\nlistener.keepAliveTimeout = 31000;\n";
2
+ export declare const SERVER_LISTENER_FILENAME = "src/listen.ts";
3
+ export declare const tryPatchServerListener: (dir?: string) => Promise<void>;
@@ -0,0 +1,87 @@
1
+ "use strict";
2
+ var __create = Object.create;
3
+ var __defProp = Object.defineProperty;
4
+ var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
5
+ var __getOwnPropNames = Object.getOwnPropertyNames;
6
+ var __getProtoOf = Object.getPrototypeOf;
7
+ var __hasOwnProp = Object.prototype.hasOwnProperty;
8
+ var __export = (target, all) => {
9
+ for (var name in all)
10
+ __defProp(target, name, { get: all[name], enumerable: true });
11
+ };
12
+ var __copyProps = (to, from, except, desc) => {
13
+ if (from && typeof from === "object" || typeof from === "function") {
14
+ for (let key of __getOwnPropNames(from))
15
+ if (!__hasOwnProp.call(to, key) && key !== except)
16
+ __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
17
+ }
18
+ return to;
19
+ };
20
+ var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
21
+ // If the importer is in node compatibility mode or this is not an ESM
22
+ // file that has been converted to a CommonJS file using a Babel-
23
+ // compatible transform (i.e. "__esModule" has not been set), then set
24
+ // "default" to the CommonJS "module.exports" for node compatibility.
25
+ isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
26
+ mod
27
+ ));
28
+ var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
29
+ var patchServerListener_exports = {};
30
+ __export(patchServerListener_exports, {
31
+ KEEP_ALIVE_CODE: () => KEEP_ALIVE_CODE,
32
+ SERVER_LISTENER_FILENAME: () => SERVER_LISTENER_FILENAME,
33
+ tryPatchServerListener: () => tryPatchServerListener
34
+ });
35
+ module.exports = __toCommonJS(patchServerListener_exports);
36
+ var import_util = require("util");
37
+ var import_fs_extra = __toESM(require("fs-extra"));
38
+ var import_logging = require("../../utils/logging");
39
+ var import_project = require("./analysis/project");
40
+ var import_prettier = require("./processing/prettier");
41
+ const KEEP_ALIVE_CODE = `
42
+ // Gantry ALB default idle timeout is 30 seconds
43
+ // https://nodejs.org/docs/latest-v18.x/api/http.html#serverkeepalivetimeout
44
+ // Node default is 5 seconds
45
+ // https://docs.aws.amazon.com/elasticloadbalancing/latest/application/application-load-balancers.html#connection-idle-timeout
46
+ // AWS recommends setting an application timeout larger than the load balancer
47
+ listener.keepAliveTimeout = 31000;
48
+ `;
49
+ const SERVER_LISTENER_FILENAME = "src/listen.ts";
50
+ const patchServerListener = async (dir) => {
51
+ const readFile = (0, import_project.createDestinationFileReader)(dir);
52
+ let listener = await readFile(SERVER_LISTENER_FILENAME);
53
+ if (!listener || listener.includes("keepAliveTimeout")) {
54
+ return;
55
+ }
56
+ if (listener.includes("\napp.listen(")) {
57
+ listener = listener.replace(
58
+ "\napp.listen(",
59
+ "\nconst listener = app.listen("
60
+ );
61
+ }
62
+ if (!listener.includes("\nconst listener = app.listen(")) {
63
+ return;
64
+ }
65
+ listener = `${listener}${KEEP_ALIVE_CODE}`;
66
+ await import_fs_extra.default.promises.writeFile(
67
+ SERVER_LISTENER_FILENAME,
68
+ (0, import_prettier.formatPrettier)(listener, {
69
+ parser: "typescript"
70
+ })
71
+ );
72
+ };
73
+ const tryPatchServerListener = async (dir = process.cwd()) => {
74
+ try {
75
+ await patchServerListener(dir);
76
+ } catch (err) {
77
+ import_logging.log.warn("Failed to patch server listener.");
78
+ import_logging.log.subtle((0, import_util.inspect)(err));
79
+ }
80
+ };
81
+ // Annotate the CommonJS export names for ESM import in node:
82
+ 0 && (module.exports = {
83
+ KEEP_ALIVE_CODE,
84
+ SERVER_LISTENER_FILENAME,
85
+ tryPatchServerListener
86
+ });
87
+ //# sourceMappingURL=patchServerListener.js.map
@@ -0,0 +1,7 @@
1
+ {
2
+ "version": 3,
3
+ "sources": ["../../../src/cli/configure/patchServerListener.ts"],
4
+ "sourcesContent": ["import { inspect } from 'util';\n\nimport fs from 'fs-extra';\n\nimport { log } from '../../utils/logging';\n\nimport { createDestinationFileReader } from './analysis/project';\nimport { formatPrettier } from './processing/prettier';\n\nexport const KEEP_ALIVE_CODE = `\n// Gantry ALB default idle timeout is 30 seconds\n// https://nodejs.org/docs/latest-v18.x/api/http.html#serverkeepalivetimeout\n// Node default is 5 seconds\n// https://docs.aws.amazon.com/elasticloadbalancing/latest/application/application-load-balancers.html#connection-idle-timeout\n// AWS recommends setting an application timeout larger than the load balancer\nlistener.keepAliveTimeout = 31000;\n`;\n\nexport const SERVER_LISTENER_FILENAME = 'src/listen.ts';\n\nconst patchServerListener = async (dir: string) => {\n const readFile = createDestinationFileReader(dir);\n\n let listener = await readFile(SERVER_LISTENER_FILENAME);\n\n if (!listener || listener.includes('keepAliveTimeout')) {\n return;\n }\n\n if (listener.includes('\\napp.listen(')) {\n listener = listener.replace(\n '\\napp.listen(',\n '\\nconst listener = app.listen(',\n );\n }\n\n if (!listener.includes('\\nconst listener = app.listen(')) {\n return;\n }\n\n listener = `${listener}${KEEP_ALIVE_CODE}`;\n\n await fs.promises.writeFile(\n SERVER_LISTENER_FILENAME,\n formatPrettier(listener, {\n parser: 'typescript',\n }),\n );\n};\n\nexport const tryPatchServerListener = async (dir = process.cwd()) => {\n try {\n await patchServerListener(dir);\n } catch (err) {\n log.warn('Failed to patch server listener.');\n log.subtle(inspect(err));\n }\n};\n"],
5
+ "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAwB;AAExB,sBAAe;AAEf,qBAAoB;AAEpB,qBAA4C;AAC5C,sBAA+B;AAExB,MAAM,kBAAkB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AASxB,MAAM,2BAA2B;AAExC,MAAM,sBAAsB,OAAO,QAAgB;AACjD,QAAM,eAAW,4CAA4B,GAAG;AAEhD,MAAI,WAAW,MAAM,SAAS,wBAAwB;AAEtD,MAAI,CAAC,YAAY,SAAS,SAAS,kBAAkB,GAAG;AACtD;AAAA,EACF;AAEA,MAAI,SAAS,SAAS,eAAe,GAAG;AACtC,eAAW,SAAS;AAAA,MAClB;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAEA,MAAI,CAAC,SAAS,SAAS,gCAAgC,GAAG;AACxD;AAAA,EACF;AAEA,aAAW,GAAG,WAAW;AAEzB,QAAM,gBAAAA,QAAG,SAAS;AAAA,IAChB;AAAA,QACA,gCAAe,UAAU;AAAA,MACvB,QAAQ;AAAA,IACV,CAAC;AAAA,EACH;AACF;AAEO,MAAM,yBAAyB,OAAO,MAAM,QAAQ,IAAI,MAAM;AACnE,MAAI;AACF,UAAM,oBAAoB,GAAG;AAAA,EAC/B,SAAS,KAAP;AACA,uBAAI,KAAK,kCAAkC;AAC3C,uBAAI,WAAO,qBAAQ,GAAG,CAAC;AAAA,EACzB;AACF;",
6
+ "names": ["fs"]
7
+ }
package/lib/cli/format.js CHANGED
@@ -38,11 +38,13 @@ var import_eslint = require("./adapter/eslint");
38
38
  var import_prettier = require("./adapter/prettier");
39
39
  var import_addEmptyExports = require("./configure/addEmptyExports");
40
40
  var import_patchRenovateConfig = require("./configure/patchRenovateConfig");
41
+ var import_patchServerListener = require("./configure/patchServerListener");
41
42
  var import_refreshIgnoreFiles = require("./configure/refreshIgnoreFiles");
42
43
  const format = async (args = process.argv.slice(2)) => {
43
44
  await Promise.all([
44
45
  (0, import_addEmptyExports.tryAddEmptyExports)(),
45
46
  (0, import_patchRenovateConfig.tryPatchRenovateConfig)(),
47
+ (0, import_patchServerListener.tryPatchServerListener)(),
46
48
  (0, import_refreshIgnoreFiles.tryRefreshIgnoreFiles)()
47
49
  ]);
48
50
  const debug = (0, import_args.hasDebugFlag)(args);
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "version": 3,
3
3
  "sources": ["../../src/cli/format.ts"],
4
- "sourcesContent": ["import chalk from 'chalk';\n\nimport { hasDebugFlag } from '../utils/args';\nimport { createLogger, log } from '../utils/logging';\n\nimport { runESLint } from './adapter/eslint';\nimport { runPrettier } from './adapter/prettier';\nimport { tryAddEmptyExports } from './configure/addEmptyExports';\nimport { tryPatchRenovateConfig } from './configure/patchRenovateConfig';\nimport { tryRefreshIgnoreFiles } from './configure/refreshIgnoreFiles';\n\nexport const format = async (args = process.argv.slice(2)): Promise<void> => {\n await Promise.all([\n tryAddEmptyExports(),\n tryPatchRenovateConfig(),\n tryRefreshIgnoreFiles(),\n ]);\n\n const debug = hasDebugFlag(args);\n const logger = createLogger(debug);\n\n log.plain(chalk.magenta('ESLint'));\n\n const eslint = await runESLint('format', logger);\n\n log.newline();\n log.plain(chalk.cyan('Prettier'));\n\n const prettier = await runPrettier('format', logger);\n\n if (eslint.ok && prettier.ok) {\n return;\n }\n\n const tools = [\n ...(eslint.ok ? [] : ['ESLint']),\n ...(prettier.ok ? [] : ['Prettier']),\n ];\n\n log.newline();\n log.err(tools.join(', '), 'found issues that require triage.');\n\n process.exitCode = 1;\n};\n"],
5
- "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,mBAAkB;AAElB,kBAA6B;AAC7B,qBAAkC;AAElC,oBAA0B;AAC1B,sBAA4B;AAC5B,6BAAmC;AACnC,iCAAuC;AACvC,gCAAsC;AAE/B,MAAM,SAAS,OAAO,OAAO,QAAQ,KAAK,MAAM,CAAC,MAAqB;AAC3E,QAAM,QAAQ,IAAI;AAAA,QAChB,2CAAmB;AAAA,QACnB,mDAAuB;AAAA,QACvB,iDAAsB;AAAA,EACxB,CAAC;AAED,QAAM,YAAQ,0BAAa,IAAI;AAC/B,QAAM,aAAS,6BAAa,KAAK;AAEjC,qBAAI,MAAM,aAAAA,QAAM,QAAQ,QAAQ,CAAC;AAEjC,QAAM,SAAS,UAAM,yBAAU,UAAU,MAAM;AAE/C,qBAAI,QAAQ;AACZ,qBAAI,MAAM,aAAAA,QAAM,KAAK,UAAU,CAAC;AAEhC,QAAM,WAAW,UAAM,6BAAY,UAAU,MAAM;AAEnD,MAAI,OAAO,MAAM,SAAS,IAAI;AAC5B;AAAA,EACF;AAEA,QAAM,QAAQ;AAAA,IACZ,GAAI,OAAO,KAAK,CAAC,IAAI,CAAC,QAAQ;AAAA,IAC9B,GAAI,SAAS,KAAK,CAAC,IAAI,CAAC,UAAU;AAAA,EACpC;AAEA,qBAAI,QAAQ;AACZ,qBAAI,IAAI,MAAM,KAAK,IAAI,GAAG,mCAAmC;AAE7D,UAAQ,WAAW;AACrB;",
4
+ "sourcesContent": ["import chalk from 'chalk';\n\nimport { hasDebugFlag } from '../utils/args';\nimport { createLogger, log } from '../utils/logging';\n\nimport { runESLint } from './adapter/eslint';\nimport { runPrettier } from './adapter/prettier';\nimport { tryAddEmptyExports } from './configure/addEmptyExports';\nimport { tryPatchRenovateConfig } from './configure/patchRenovateConfig';\nimport { tryPatchServerListener } from './configure/patchServerListener';\nimport { tryRefreshIgnoreFiles } from './configure/refreshIgnoreFiles';\n\nexport const format = async (args = process.argv.slice(2)): Promise<void> => {\n await Promise.all([\n tryAddEmptyExports(),\n tryPatchRenovateConfig(),\n tryPatchServerListener(),\n tryRefreshIgnoreFiles(),\n ]);\n\n const debug = hasDebugFlag(args);\n const logger = createLogger(debug);\n\n log.plain(chalk.magenta('ESLint'));\n\n const eslint = await runESLint('format', logger);\n\n log.newline();\n log.plain(chalk.cyan('Prettier'));\n\n const prettier = await runPrettier('format', logger);\n\n if (eslint.ok && prettier.ok) {\n return;\n }\n\n const tools = [\n ...(eslint.ok ? [] : ['ESLint']),\n ...(prettier.ok ? [] : ['Prettier']),\n ];\n\n log.newline();\n log.err(tools.join(', '), 'found issues that require triage.');\n\n process.exitCode = 1;\n};\n"],
5
+ "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,mBAAkB;AAElB,kBAA6B;AAC7B,qBAAkC;AAElC,oBAA0B;AAC1B,sBAA4B;AAC5B,6BAAmC;AACnC,iCAAuC;AACvC,iCAAuC;AACvC,gCAAsC;AAE/B,MAAM,SAAS,OAAO,OAAO,QAAQ,KAAK,MAAM,CAAC,MAAqB;AAC3E,QAAM,QAAQ,IAAI;AAAA,QAChB,2CAAmB;AAAA,QACnB,mDAAuB;AAAA,QACvB,mDAAuB;AAAA,QACvB,iDAAsB;AAAA,EACxB,CAAC;AAED,QAAM,YAAQ,0BAAa,IAAI;AAC/B,QAAM,aAAS,6BAAa,KAAK;AAEjC,qBAAI,MAAM,aAAAA,QAAM,QAAQ,QAAQ,CAAC;AAEjC,QAAM,SAAS,UAAM,yBAAU,UAAU,MAAM;AAE/C,qBAAI,QAAQ;AACZ,qBAAI,MAAM,aAAAA,QAAM,KAAK,UAAU,CAAC;AAEhC,QAAM,WAAW,UAAM,6BAAY,UAAU,MAAM;AAEnD,MAAI,OAAO,MAAM,SAAS,IAAI;AAC5B;AAAA,EACF;AAEA,QAAM,QAAQ;AAAA,IACZ,GAAI,OAAO,KAAK,CAAC,IAAI,CAAC,QAAQ;AAAA,IAC9B,GAAI,SAAS,KAAK,CAAC,IAAI,CAAC,UAAU;AAAA,EACpC;AAEA,qBAAI,QAAQ;AACZ,qBAAI,IAAI,MAAM,KAAK,IAAI,GAAG,mCAAmC;AAE7D,UAAQ,WAAW;AACrB;",
6
6
  "names": ["chalk"]
7
7
  }
@@ -45,6 +45,7 @@ var import_eslint = require("../adapter/eslint");
45
45
  var import_prettier = require("../adapter/prettier");
46
46
  var import_addEmptyExports = require("../configure/addEmptyExports");
47
47
  var import_renovate = require("../configure/modules/renovate");
48
+ var import_patchServerListener = require("../configure/patchServerListener");
48
49
  var import_refreshIgnoreFiles = require("../configure/refreshIgnoreFiles");
49
50
  const AUTOFIX_COMMIT_MESSAGE = "Run `skuba format`";
50
51
  const AUTOFIX_DELETE_FILES = [
@@ -56,13 +57,20 @@ const AUTOFIX_CODEGEN_FILES = /* @__PURE__ */ new Set([
56
57
  ...AUTOFIX_DELETE_FILES,
57
58
  ...import_addEmptyExports.JEST_SETUP_FILES,
58
59
  ...import_refreshIgnoreFiles.REFRESHABLE_IGNORE_FILES,
59
- ...import_renovate.RENOVATE_CONFIG_FILENAMES
60
+ ...import_renovate.RENOVATE_CONFIG_FILENAMES,
61
+ import_patchServerListener.SERVER_LISTENER_FILENAME
60
62
  ]);
61
63
  const AUTOFIX_IGNORE_FILES = [
62
64
  {
63
65
  path: ".npmrc",
64
66
  state: "added"
65
67
  },
68
+ {
69
+ // This file may already exist in version control, but we shouldn't commit
70
+ // further changes as the CI environment may have appended an npm token.
71
+ path: ".npmrc",
72
+ state: "modified"
73
+ },
66
74
  {
67
75
  path: "Dockerfile-incunabulum",
68
76
  state: "added"
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "version": 3,
3
3
  "sources": ["../../../src/cli/lint/autofix.ts"],
4
- "sourcesContent": ["import path from 'path';\nimport { inspect } from 'util';\n\nimport fs from 'fs-extra';\nimport simpleGit from 'simple-git';\n\nimport * as Git from '../../api/git';\nimport * as GitHub from '../../api/github';\nimport { isCiEnv } from '../../utils/env';\nimport { createLogger, log } from '../../utils/logging';\nimport { throwOnTimeout } from '../../utils/wait';\nimport { runESLint } from '../adapter/eslint';\nimport { runPrettier } from '../adapter/prettier';\nimport { JEST_SETUP_FILES } from '../configure/addEmptyExports';\nimport { RENOVATE_CONFIG_FILENAMES } from '../configure/modules/renovate';\nimport { REFRESHABLE_IGNORE_FILES } from '../configure/refreshIgnoreFiles';\n\nimport type { Input } from './types';\n\nconst AUTOFIX_COMMIT_MESSAGE = 'Run `skuba format`';\n\nconst AUTOFIX_DELETE_FILES = [\n // Try to delete this SEEK-Jobs/gutenberg automation file that may have been\n // accidentally committed in a prior autofix.\n 'Dockerfile-incunabulum',\n];\n\nconst AUTOFIX_CODEGEN_FILES = new Set<string>([\n ...AUTOFIX_DELETE_FILES,\n ...JEST_SETUP_FILES,\n ...REFRESHABLE_IGNORE_FILES,\n ...RENOVATE_CONFIG_FILENAMES,\n]);\n\nexport const AUTOFIX_IGNORE_FILES: Git.ChangedFile[] = [\n {\n path: '.npmrc',\n state: 'added',\n },\n {\n path: 'Dockerfile-incunabulum',\n state: 'added',\n },\n];\n\nconst shouldPush = async ({\n currentBranch,\n dir,\n}: {\n currentBranch?: string;\n dir: string;\n}) => {\n if (!isCiEnv()) {\n // We're not running in a CI environment so we don't need to push autofixes.\n // Ideally we'd drive this off of repository write permissions, but that is\n // non-trivial to infer without attempting an actual write.\n return false;\n }\n\n const isDefaultBuildkiteBranch =\n currentBranch &&\n [process.env.BUILDKITE_PIPELINE_DEFAULT_BRANCH, 'master', 'main'].includes(\n currentBranch,\n );\n\n const isProtectedGitHubBranch = process.env.GITHUB_REF_PROTECTED === 'true';\n\n if (isDefaultBuildkiteBranch || isProtectedGitHubBranch) {\n // The current branch is a protected branch.\n // We respect GitHub Flow; avoid pushing directly to the default branch.\n return false;\n }\n\n let headCommitMessage;\n try {\n headCommitMessage = await Git.getHeadCommitMessage({ dir });\n } catch {}\n\n if (headCommitMessage === AUTOFIX_COMMIT_MESSAGE) {\n // Short circuit when the head commit appears to be one of our autofixes.\n // Repeating the same operation is unlikely to correct outstanding issues.\n return false;\n }\n\n // Allow the push attempt to go ahead if our guards have been cleared.\n return true;\n};\n\ninterface AutofixParameters {\n debug: Input['debug'];\n\n eslint: boolean;\n prettier: boolean;\n}\n\n/**\n * @returns Whether skuba codegenned a file change which should be included in\n * an autofix commit.\n */\nconst tryCodegen = async (dir: string): Promise<boolean> => {\n try {\n // Try to forcibly remove `AUTOFIX_DELETE_FILES` from source control.\n // These may include outdated configuration files or internal files that\n // were accidentally committed by an autofix.\n await Promise.all(\n AUTOFIX_DELETE_FILES.map((filename) =>\n fs.promises.rm(path.join(dir, filename), { force: true }),\n ),\n );\n\n // Search codegenned file changes in the local Git working directory.\n // These may include the `AUTOFIX_DELETE_FILES` deleted above or fixups to\n // ignore files and module exports that were run at the start of the\n // `skuba lint` command.\n const changedFiles = await Git.getChangedFiles({\n dir,\n\n ignore: AUTOFIX_IGNORE_FILES,\n });\n\n // Determine if a meaningful codegen change\n return changedFiles.some((changedFile) =>\n AUTOFIX_CODEGEN_FILES.has(changedFile.path),\n );\n } catch (err) {\n log.warn(log.bold('Failed to evaluate codegen changes.'));\n log.subtle(inspect(err));\n\n return false;\n }\n};\n\nexport const autofix = async (params: AutofixParameters): Promise<void> => {\n const dir = process.cwd();\n\n const codegen = await tryCodegen(dir);\n\n if (!params.eslint && !params.prettier && !codegen) {\n return;\n }\n\n let currentBranch;\n try {\n currentBranch = await Git.currentBranch({ dir });\n } catch {}\n\n if (!(await shouldPush({ currentBranch, dir }))) {\n return;\n }\n\n try {\n log.newline();\n if (!params.eslint && !params.prettier) {\n log.warn('Trying to push codegen updates...');\n } else {\n log.warn(\n `Trying to autofix with ${\n params.eslint ? 'ESLint and ' : ''\n }Prettier...`,\n );\n\n const logger = createLogger(params.debug);\n\n if (params.eslint) {\n await runESLint('format', logger);\n }\n // Unconditionally re-run Prettier; reaching here means we have pre-existing\n // format violations or may have created new ones through ESLint fixes.\n await runPrettier('format', logger);\n }\n\n if (process.env.GITHUB_ACTIONS) {\n // GitHub runners have Git installed locally\n const ref = await Git.commitAllChanges({\n dir,\n message: AUTOFIX_COMMIT_MESSAGE,\n\n ignore: AUTOFIX_IGNORE_FILES,\n });\n\n if (!ref) {\n return log.warn('No autofixes detected.');\n }\n\n await throwOnTimeout(simpleGit().push(), { s: 30 });\n log.warn(`Pushed fix commit ${ref}.`);\n return;\n }\n\n // Other CI Environments, use GitHub API\n if (!currentBranch) {\n log.warn('Could not determine the current branch.');\n log.warn(\n 'Please propagate BUILDKITE_BRANCH, GITHUB_HEAD_REF, GITHUB_REF_NAME, or the .git directory to your container.',\n );\n return;\n }\n\n const ref = await throwOnTimeout(\n GitHub.uploadAllFileChanges({\n branch: currentBranch,\n dir,\n messageHeadline: AUTOFIX_COMMIT_MESSAGE,\n\n ignore: AUTOFIX_IGNORE_FILES,\n }),\n { s: 30 },\n );\n\n if (!ref) {\n return log.warn('No autofixes detected.');\n }\n\n log.warn(`Pushed fix commit ${ref}.`);\n } catch (err) {\n log.warn(log.bold('Failed to push fix commit.'));\n log.warn(\n log.bold(\n 'Does your CI environment have write access to your Git repository?',\n ),\n );\n log.subtle(inspect(err));\n }\n};\n"],
5
- "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAiB;AACjB,kBAAwB;AAExB,sBAAe;AACf,wBAAsB;AAEtB,UAAqB;AACrB,aAAwB;AACxB,iBAAwB;AACxB,qBAAkC;AAClC,kBAA+B;AAC/B,oBAA0B;AAC1B,sBAA4B;AAC5B,6BAAiC;AACjC,sBAA0C;AAC1C,gCAAyC;AAIzC,MAAM,yBAAyB;AAE/B,MAAM,uBAAuB;AAAA;AAAA;AAAA,EAG3B;AACF;AAEA,MAAM,wBAAwB,oBAAI,IAAY;AAAA,EAC5C,GAAG;AAAA,EACH,GAAG;AAAA,EACH,GAAG;AAAA,EACH,GAAG;AACL,CAAC;AAEM,MAAM,uBAA0C;AAAA,EACrD;AAAA,IACE,MAAM;AAAA,IACN,OAAO;AAAA,EACT;AAAA,EACA;AAAA,IACE,MAAM;AAAA,IACN,OAAO;AAAA,EACT;AACF;AAEA,MAAM,aAAa,OAAO;AAAA,EACxB;AAAA,EACA;AACF,MAGM;AACJ,MAAI,KAAC,oBAAQ,GAAG;AAId,WAAO;AAAA,EACT;AAEA,QAAM,2BACJ,iBACA,CAAC,QAAQ,IAAI,mCAAmC,UAAU,MAAM,EAAE;AAAA,IAChE;AAAA,EACF;AAEF,QAAM,0BAA0B,QAAQ,IAAI,yBAAyB;AAErE,MAAI,4BAA4B,yBAAyB;AAGvD,WAAO;AAAA,EACT;AAEA,MAAI;AACJ,MAAI;AACF,wBAAoB,MAAM,IAAI,qBAAqB,EAAE,IAAI,CAAC;AAAA,EAC5D,QAAE;AAAA,EAAO;AAET,MAAI,sBAAsB,wBAAwB;AAGhD,WAAO;AAAA,EACT;AAGA,SAAO;AACT;AAaA,MAAM,aAAa,OAAO,QAAkC;AAC1D,MAAI;AAIF,UAAM,QAAQ;AAAA,MACZ,qBAAqB;AAAA,QAAI,CAAC,aACxB,gBAAAA,QAAG,SAAS,GAAG,YAAAC,QAAK,KAAK,KAAK,QAAQ,GAAG,EAAE,OAAO,KAAK,CAAC;AAAA,MAC1D;AAAA,IACF;AAMA,UAAM,eAAe,MAAM,IAAI,gBAAgB;AAAA,MAC7C;AAAA,MAEA,QAAQ;AAAA,IACV,CAAC;AAGD,WAAO,aAAa;AAAA,MAAK,CAAC,gBACxB,sBAAsB,IAAI,YAAY,IAAI;AAAA,IAC5C;AAAA,EACF,SAAS,KAAP;AACA,uBAAI,KAAK,mBAAI,KAAK,qCAAqC,CAAC;AACxD,uBAAI,WAAO,qBAAQ,GAAG,CAAC;AAEvB,WAAO;AAAA,EACT;AACF;AAEO,MAAM,UAAU,OAAO,WAA6C;AACzE,QAAM,MAAM,QAAQ,IAAI;AAExB,QAAM,UAAU,MAAM,WAAW,GAAG;AAEpC,MAAI,CAAC,OAAO,UAAU,CAAC,OAAO,YAAY,CAAC,SAAS;AAClD;AAAA,EACF;AAEA,MAAI;AACJ,MAAI;AACF,oBAAgB,MAAM,IAAI,cAAc,EAAE,IAAI,CAAC;AAAA,EACjD,QAAE;AAAA,EAAO;AAET,MAAI,CAAE,MAAM,WAAW,EAAE,eAAe,IAAI,CAAC,GAAI;AAC/C;AAAA,EACF;AAEA,MAAI;AACF,uBAAI,QAAQ;AACZ,QAAI,CAAC,OAAO,UAAU,CAAC,OAAO,UAAU;AACtC,yBAAI,KAAK,mCAAmC;AAAA,IAC9C,OAAO;AACL,yBAAI;AAAA,QACF,0BACE,OAAO,SAAS,gBAAgB;AAAA,MAEpC;AAEA,YAAM,aAAS,6BAAa,OAAO,KAAK;AAExC,UAAI,OAAO,QAAQ;AACjB,kBAAM,yBAAU,UAAU,MAAM;AAAA,MAClC;AAGA,gBAAM,6BAAY,UAAU,MAAM;AAAA,IACpC;AAEA,QAAI,QAAQ,IAAI,gBAAgB;AAE9B,YAAMC,OAAM,MAAM,IAAI,iBAAiB;AAAA,QACrC;AAAA,QACA,SAAS;AAAA,QAET,QAAQ;AAAA,MACV,CAAC;AAED,UAAI,CAACA,MAAK;AACR,eAAO,mBAAI,KAAK,wBAAwB;AAAA,MAC1C;AAEA,gBAAM,gCAAe,kBAAAC,SAAU,EAAE,KAAK,GAAG,EAAE,GAAG,GAAG,CAAC;AAClD,yBAAI,KAAK,qBAAqBD,OAAM;AACpC;AAAA,IACF;AAGA,QAAI,CAAC,eAAe;AAClB,yBAAI,KAAK,yCAAyC;AAClD,yBAAI;AAAA,QACF;AAAA,MACF;AACA;AAAA,IACF;AAEA,UAAM,MAAM,UAAM;AAAA,MAChB,OAAO,qBAAqB;AAAA,QAC1B,QAAQ;AAAA,QACR;AAAA,QACA,iBAAiB;AAAA,QAEjB,QAAQ;AAAA,MACV,CAAC;AAAA,MACD,EAAE,GAAG,GAAG;AAAA,IACV;AAEA,QAAI,CAAC,KAAK;AACR,aAAO,mBAAI,KAAK,wBAAwB;AAAA,IAC1C;AAEA,uBAAI,KAAK,qBAAqB,MAAM;AAAA,EACtC,SAAS,KAAP;AACA,uBAAI,KAAK,mBAAI,KAAK,4BAA4B,CAAC;AAC/C,uBAAI;AAAA,MACF,mBAAI;AAAA,QACF;AAAA,MACF;AAAA,IACF;AACA,uBAAI,WAAO,qBAAQ,GAAG,CAAC;AAAA,EACzB;AACF;",
4
+ "sourcesContent": ["import path from 'path';\nimport { inspect } from 'util';\n\nimport fs from 'fs-extra';\nimport simpleGit from 'simple-git';\n\nimport * as Git from '../../api/git';\nimport * as GitHub from '../../api/github';\nimport { isCiEnv } from '../../utils/env';\nimport { createLogger, log } from '../../utils/logging';\nimport { throwOnTimeout } from '../../utils/wait';\nimport { runESLint } from '../adapter/eslint';\nimport { runPrettier } from '../adapter/prettier';\nimport { JEST_SETUP_FILES } from '../configure/addEmptyExports';\nimport { RENOVATE_CONFIG_FILENAMES } from '../configure/modules/renovate';\nimport { SERVER_LISTENER_FILENAME } from '../configure/patchServerListener';\nimport { REFRESHABLE_IGNORE_FILES } from '../configure/refreshIgnoreFiles';\n\nimport type { Input } from './types';\n\nconst AUTOFIX_COMMIT_MESSAGE = 'Run `skuba format`';\n\nconst AUTOFIX_DELETE_FILES = [\n // Try to delete this SEEK-Jobs/gutenberg automation file that may have been\n // accidentally committed in a prior autofix.\n 'Dockerfile-incunabulum',\n];\n\nconst AUTOFIX_CODEGEN_FILES = new Set<string>([\n ...AUTOFIX_DELETE_FILES,\n ...JEST_SETUP_FILES,\n ...REFRESHABLE_IGNORE_FILES,\n ...RENOVATE_CONFIG_FILENAMES,\n SERVER_LISTENER_FILENAME,\n]);\n\nexport const AUTOFIX_IGNORE_FILES: Git.ChangedFile[] = [\n {\n path: '.npmrc',\n state: 'added',\n },\n {\n // This file may already exist in version control, but we shouldn't commit\n // further changes as the CI environment may have appended an npm token.\n path: '.npmrc',\n state: 'modified',\n },\n {\n path: 'Dockerfile-incunabulum',\n state: 'added',\n },\n];\n\nconst shouldPush = async ({\n currentBranch,\n dir,\n}: {\n currentBranch?: string;\n dir: string;\n}) => {\n if (!isCiEnv()) {\n // We're not running in a CI environment so we don't need to push autofixes.\n // Ideally we'd drive this off of repository write permissions, but that is\n // non-trivial to infer without attempting an actual write.\n return false;\n }\n\n const isDefaultBuildkiteBranch =\n currentBranch &&\n [process.env.BUILDKITE_PIPELINE_DEFAULT_BRANCH, 'master', 'main'].includes(\n currentBranch,\n );\n\n const isProtectedGitHubBranch = process.env.GITHUB_REF_PROTECTED === 'true';\n\n if (isDefaultBuildkiteBranch || isProtectedGitHubBranch) {\n // The current branch is a protected branch.\n // We respect GitHub Flow; avoid pushing directly to the default branch.\n return false;\n }\n\n let headCommitMessage;\n try {\n headCommitMessage = await Git.getHeadCommitMessage({ dir });\n } catch {}\n\n if (headCommitMessage === AUTOFIX_COMMIT_MESSAGE) {\n // Short circuit when the head commit appears to be one of our autofixes.\n // Repeating the same operation is unlikely to correct outstanding issues.\n return false;\n }\n\n // Allow the push attempt to go ahead if our guards have been cleared.\n return true;\n};\n\ninterface AutofixParameters {\n debug: Input['debug'];\n\n eslint: boolean;\n prettier: boolean;\n}\n\n/**\n * @returns Whether skuba codegenned a file change which should be included in\n * an autofix commit.\n */\nconst tryCodegen = async (dir: string): Promise<boolean> => {\n try {\n // Try to forcibly remove `AUTOFIX_DELETE_FILES` from source control.\n // These may include outdated configuration files or internal files that\n // were accidentally committed by an autofix.\n await Promise.all(\n AUTOFIX_DELETE_FILES.map((filename) =>\n fs.promises.rm(path.join(dir, filename), { force: true }),\n ),\n );\n\n // Search codegenned file changes in the local Git working directory.\n // These may include the `AUTOFIX_DELETE_FILES` deleted above or fixups to\n // ignore files and module exports that were run at the start of the\n // `skuba lint` command.\n const changedFiles = await Git.getChangedFiles({\n dir,\n\n ignore: AUTOFIX_IGNORE_FILES,\n });\n\n // Determine if a meaningful codegen change\n return changedFiles.some((changedFile) =>\n AUTOFIX_CODEGEN_FILES.has(changedFile.path),\n );\n } catch (err) {\n log.warn(log.bold('Failed to evaluate codegen changes.'));\n log.subtle(inspect(err));\n\n return false;\n }\n};\n\nexport const autofix = async (params: AutofixParameters): Promise<void> => {\n const dir = process.cwd();\n\n const codegen = await tryCodegen(dir);\n\n if (!params.eslint && !params.prettier && !codegen) {\n return;\n }\n\n let currentBranch;\n try {\n currentBranch = await Git.currentBranch({ dir });\n } catch {}\n\n if (!(await shouldPush({ currentBranch, dir }))) {\n return;\n }\n\n try {\n log.newline();\n if (!params.eslint && !params.prettier) {\n log.warn('Trying to push codegen updates...');\n } else {\n log.warn(\n `Trying to autofix with ${\n params.eslint ? 'ESLint and ' : ''\n }Prettier...`,\n );\n\n const logger = createLogger(params.debug);\n\n if (params.eslint) {\n await runESLint('format', logger);\n }\n // Unconditionally re-run Prettier; reaching here means we have pre-existing\n // format violations or may have created new ones through ESLint fixes.\n await runPrettier('format', logger);\n }\n\n if (process.env.GITHUB_ACTIONS) {\n // GitHub runners have Git installed locally\n const ref = await Git.commitAllChanges({\n dir,\n message: AUTOFIX_COMMIT_MESSAGE,\n\n ignore: AUTOFIX_IGNORE_FILES,\n });\n\n if (!ref) {\n return log.warn('No autofixes detected.');\n }\n\n await throwOnTimeout(simpleGit().push(), { s: 30 });\n log.warn(`Pushed fix commit ${ref}.`);\n return;\n }\n\n // Other CI Environments, use GitHub API\n if (!currentBranch) {\n log.warn('Could not determine the current branch.');\n log.warn(\n 'Please propagate BUILDKITE_BRANCH, GITHUB_HEAD_REF, GITHUB_REF_NAME, or the .git directory to your container.',\n );\n return;\n }\n\n const ref = await throwOnTimeout(\n GitHub.uploadAllFileChanges({\n branch: currentBranch,\n dir,\n messageHeadline: AUTOFIX_COMMIT_MESSAGE,\n\n ignore: AUTOFIX_IGNORE_FILES,\n }),\n { s: 30 },\n );\n\n if (!ref) {\n return log.warn('No autofixes detected.');\n }\n\n log.warn(`Pushed fix commit ${ref}.`);\n } catch (err) {\n log.warn(log.bold('Failed to push fix commit.'));\n log.warn(\n log.bold(\n 'Does your CI environment have write access to your Git repository?',\n ),\n );\n log.subtle(inspect(err));\n }\n};\n"],
5
+ "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAiB;AACjB,kBAAwB;AAExB,sBAAe;AACf,wBAAsB;AAEtB,UAAqB;AACrB,aAAwB;AACxB,iBAAwB;AACxB,qBAAkC;AAClC,kBAA+B;AAC/B,oBAA0B;AAC1B,sBAA4B;AAC5B,6BAAiC;AACjC,sBAA0C;AAC1C,iCAAyC;AACzC,gCAAyC;AAIzC,MAAM,yBAAyB;AAE/B,MAAM,uBAAuB;AAAA;AAAA;AAAA,EAG3B;AACF;AAEA,MAAM,wBAAwB,oBAAI,IAAY;AAAA,EAC5C,GAAG;AAAA,EACH,GAAG;AAAA,EACH,GAAG;AAAA,EACH,GAAG;AAAA,EACH;AACF,CAAC;AAEM,MAAM,uBAA0C;AAAA,EACrD;AAAA,IACE,MAAM;AAAA,IACN,OAAO;AAAA,EACT;AAAA,EACA;AAAA;AAAA;AAAA,IAGE,MAAM;AAAA,IACN,OAAO;AAAA,EACT;AAAA,EACA;AAAA,IACE,MAAM;AAAA,IACN,OAAO;AAAA,EACT;AACF;AAEA,MAAM,aAAa,OAAO;AAAA,EACxB;AAAA,EACA;AACF,MAGM;AACJ,MAAI,KAAC,oBAAQ,GAAG;AAId,WAAO;AAAA,EACT;AAEA,QAAM,2BACJ,iBACA,CAAC,QAAQ,IAAI,mCAAmC,UAAU,MAAM,EAAE;AAAA,IAChE;AAAA,EACF;AAEF,QAAM,0BAA0B,QAAQ,IAAI,yBAAyB;AAErE,MAAI,4BAA4B,yBAAyB;AAGvD,WAAO;AAAA,EACT;AAEA,MAAI;AACJ,MAAI;AACF,wBAAoB,MAAM,IAAI,qBAAqB,EAAE,IAAI,CAAC;AAAA,EAC5D,QAAE;AAAA,EAAO;AAET,MAAI,sBAAsB,wBAAwB;AAGhD,WAAO;AAAA,EACT;AAGA,SAAO;AACT;AAaA,MAAM,aAAa,OAAO,QAAkC;AAC1D,MAAI;AAIF,UAAM,QAAQ;AAAA,MACZ,qBAAqB;AAAA,QAAI,CAAC,aACxB,gBAAAA,QAAG,SAAS,GAAG,YAAAC,QAAK,KAAK,KAAK,QAAQ,GAAG,EAAE,OAAO,KAAK,CAAC;AAAA,MAC1D;AAAA,IACF;AAMA,UAAM,eAAe,MAAM,IAAI,gBAAgB;AAAA,MAC7C;AAAA,MAEA,QAAQ;AAAA,IACV,CAAC;AAGD,WAAO,aAAa;AAAA,MAAK,CAAC,gBACxB,sBAAsB,IAAI,YAAY,IAAI;AAAA,IAC5C;AAAA,EACF,SAAS,KAAP;AACA,uBAAI,KAAK,mBAAI,KAAK,qCAAqC,CAAC;AACxD,uBAAI,WAAO,qBAAQ,GAAG,CAAC;AAEvB,WAAO;AAAA,EACT;AACF;AAEO,MAAM,UAAU,OAAO,WAA6C;AACzE,QAAM,MAAM,QAAQ,IAAI;AAExB,QAAM,UAAU,MAAM,WAAW,GAAG;AAEpC,MAAI,CAAC,OAAO,UAAU,CAAC,OAAO,YAAY,CAAC,SAAS;AAClD;AAAA,EACF;AAEA,MAAI;AACJ,MAAI;AACF,oBAAgB,MAAM,IAAI,cAAc,EAAE,IAAI,CAAC;AAAA,EACjD,QAAE;AAAA,EAAO;AAET,MAAI,CAAE,MAAM,WAAW,EAAE,eAAe,IAAI,CAAC,GAAI;AAC/C;AAAA,EACF;AAEA,MAAI;AACF,uBAAI,QAAQ;AACZ,QAAI,CAAC,OAAO,UAAU,CAAC,OAAO,UAAU;AACtC,yBAAI,KAAK,mCAAmC;AAAA,IAC9C,OAAO;AACL,yBAAI;AAAA,QACF,0BACE,OAAO,SAAS,gBAAgB;AAAA,MAEpC;AAEA,YAAM,aAAS,6BAAa,OAAO,KAAK;AAExC,UAAI,OAAO,QAAQ;AACjB,kBAAM,yBAAU,UAAU,MAAM;AAAA,MAClC;AAGA,gBAAM,6BAAY,UAAU,MAAM;AAAA,IACpC;AAEA,QAAI,QAAQ,IAAI,gBAAgB;AAE9B,YAAMC,OAAM,MAAM,IAAI,iBAAiB;AAAA,QACrC;AAAA,QACA,SAAS;AAAA,QAET,QAAQ;AAAA,MACV,CAAC;AAED,UAAI,CAACA,MAAK;AACR,eAAO,mBAAI,KAAK,wBAAwB;AAAA,MAC1C;AAEA,gBAAM,gCAAe,kBAAAC,SAAU,EAAE,KAAK,GAAG,EAAE,GAAG,GAAG,CAAC;AAClD,yBAAI,KAAK,qBAAqBD,OAAM;AACpC;AAAA,IACF;AAGA,QAAI,CAAC,eAAe;AAClB,yBAAI,KAAK,yCAAyC;AAClD,yBAAI;AAAA,QACF;AAAA,MACF;AACA;AAAA,IACF;AAEA,UAAM,MAAM,UAAM;AAAA,MAChB,OAAO,qBAAqB;AAAA,QAC1B,QAAQ;AAAA,QACR;AAAA,QACA,iBAAiB;AAAA,QAEjB,QAAQ;AAAA,MACV,CAAC;AAAA,MACD,EAAE,GAAG,GAAG;AAAA,IACV;AAEA,QAAI,CAAC,KAAK;AACR,aAAO,mBAAI,KAAK,wBAAwB;AAAA,IAC1C;AAEA,uBAAI,KAAK,qBAAqB,MAAM;AAAA,EACtC,SAAS,KAAP;AACA,uBAAI,KAAK,mBAAI,KAAK,4BAA4B,CAAC;AAC/C,uBAAI;AAAA,MACF,mBAAI;AAAA,QACF;AAAA,MACF;AAAA,IACF;AACA,uBAAI,WAAO,qBAAQ,GAAG,CAAC;AAAA,EACzB;AACF;",
6
6
  "names": ["fs", "path", "ref", "simpleGit"]
7
7
  }
@@ -43,10 +43,7 @@ var import_prettier = require("./prettier");
43
43
  var import_tsc = require("./tsc");
44
44
  const tscPrefixRegex = /^(.*?tsc\s+│.*?\s)/gm;
45
45
  class StreamInterceptor extends import_stream.default.Transform {
46
- constructor() {
47
- super(...arguments);
48
- this.chunks = [];
49
- }
46
+ chunks = [];
50
47
  output() {
51
48
  return Buffer.concat(this.chunks).toString().replace(tscPrefixRegex, "");
52
49
  }
@@ -2,6 +2,6 @@
2
2
  "version": 3,
3
3
  "sources": ["../../../src/cli/lint/external.ts"],
4
4
  "sourcesContent": ["import stream from 'stream';\nimport { inspect } from 'util';\n\nimport { log } from '../../utils/logging';\nimport { throwOnTimeout } from '../../utils/wait';\n\nimport { createAnnotations } from './annotate';\nimport { autofix } from './autofix';\nimport { runESLintInCurrentThread, runESLintInWorkerThread } from './eslint';\nimport {\n runPrettierInCurrentThread,\n runPrettierInWorkerThread,\n} from './prettier';\nimport { runTscInNewProcess } from './tsc';\nimport type { Input } from './types';\n\nconst tscPrefixRegex = /^(.*?tsc\\s+\u2502.*?\\s)/gm;\n\nexport class StreamInterceptor extends stream.Transform {\n private chunks: Uint8Array[] = [];\n\n public output() {\n return Buffer.concat(this.chunks).toString().replace(tscPrefixRegex, '');\n }\n\n _transform(\n chunk: Uint8Array,\n _encoding: BufferEncoding,\n callback: stream.TransformCallback,\n ) {\n this.chunks.push(chunk);\n\n callback(null, chunk);\n }\n}\n\nconst lintConcurrently = async ({ tscOutputStream, ...input }: Input) => {\n const [eslint, prettier, tscOk] = await Promise.all([\n runESLintInWorkerThread(input),\n runPrettierInWorkerThread(input),\n runTscInNewProcess({ ...input, tscOutputStream }),\n ]);\n\n return { eslint, prettier, tscOk };\n};\n\n/**\n * Run linting tools `--serial`ly for resource-constrained environments.\n *\n * Note that we still run ESLint and Prettier in worker threads as a\n * counterintuitive optimisation. Memory can be more readily freed on worker\n * thread exit, which isn't as easy with a monolithic main thread.\n */\nconst lintSerially = async ({ tscOutputStream, ...input }: Input) => {\n const eslint = await runESLintInWorkerThread(input);\n const prettier = await runPrettierInWorkerThread(input);\n const tscOk = await runTscInNewProcess({ ...input, tscOutputStream });\n\n return { eslint, prettier, tscOk };\n};\n\nconst lintSeriallyWithoutWorkerThreads = async (input: Input) => {\n const eslint = await runESLintInCurrentThread(input);\n const prettier = await runPrettierInCurrentThread(input);\n const tscOk = await runTscInNewProcess(input);\n\n return { eslint, prettier, tscOk };\n};\n\nconst selectLintFunction = (input: Input) => {\n if (!input.workerThreads) {\n return lintSeriallyWithoutWorkerThreads;\n }\n\n // `--debug` implies `--serial`.\n const isSerial = input.debug || input.serial;\n\n return isSerial ? lintSerially : lintConcurrently;\n};\n\nexport const externalLint = async (input: Input) => {\n const lint = selectLintFunction(input);\n\n const tscOutputStream = new StreamInterceptor();\n tscOutputStream.pipe(input.tscOutputStream ?? process.stdout);\n\n const { eslint, prettier, tscOk } = await lint({ ...input, tscOutputStream });\n\n try {\n await throwOnTimeout(\n createAnnotations(eslint, prettier, tscOk, tscOutputStream),\n { s: 30 },\n );\n } catch (err) {\n log.warn('Failed to annotate lint results.');\n log.subtle(inspect(err));\n }\n\n if (!eslint.ok || !prettier.ok || !tscOk) {\n const tools = [\n ...(eslint.ok ? [] : ['ESLint']),\n ...(prettier.ok ? [] : ['Prettier']),\n ...(tscOk ? [] : ['tsc']),\n ];\n\n log.newline();\n log.err(`${tools.join(', ')} found issues that require triage.`);\n\n process.exitCode = 1;\n }\n\n await autofix({\n debug: input.debug,\n eslint: eslint.fixable,\n prettier: !prettier.ok,\n });\n};\n"],
5
- "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,oBAAmB;AACnB,kBAAwB;AAExB,qBAAoB;AACpB,kBAA+B;AAE/B,sBAAkC;AAClC,qBAAwB;AACxB,oBAAkE;AAClE,sBAGO;AACP,iBAAmC;AAGnC,MAAM,iBAAiB;AAEhB,MAAM,0BAA0B,cAAAA,QAAO,UAAU;AAAA,EAAjD;AAAA;AACL,SAAQ,SAAuB,CAAC;AAAA;AAAA,EAEzB,SAAS;AACd,WAAO,OAAO,OAAO,KAAK,MAAM,EAAE,SAAS,EAAE,QAAQ,gBAAgB,EAAE;AAAA,EACzE;AAAA,EAEA,WACE,OACA,WACA,UACA;AACA,SAAK,OAAO,KAAK,KAAK;AAEtB,aAAS,MAAM,KAAK;AAAA,EACtB;AACF;AAEA,MAAM,mBAAmB,OAAO,EAAE,iBAAiB,GAAG,MAAM,MAAa;AACvE,QAAM,CAAC,QAAQ,UAAU,KAAK,IAAI,MAAM,QAAQ,IAAI;AAAA,QAClD,uCAAwB,KAAK;AAAA,QAC7B,2CAA0B,KAAK;AAAA,QAC/B,+BAAmB,EAAE,GAAG,OAAO,gBAAgB,CAAC;AAAA,EAClD,CAAC;AAED,SAAO,EAAE,QAAQ,UAAU,MAAM;AACnC;AASA,MAAM,eAAe,OAAO,EAAE,iBAAiB,GAAG,MAAM,MAAa;AACnE,QAAM,SAAS,UAAM,uCAAwB,KAAK;AAClD,QAAM,WAAW,UAAM,2CAA0B,KAAK;AACtD,QAAM,QAAQ,UAAM,+BAAmB,EAAE,GAAG,OAAO,gBAAgB,CAAC;AAEpE,SAAO,EAAE,QAAQ,UAAU,MAAM;AACnC;AAEA,MAAM,mCAAmC,OAAO,UAAiB;AAC/D,QAAM,SAAS,UAAM,wCAAyB,KAAK;AACnD,QAAM,WAAW,UAAM,4CAA2B,KAAK;AACvD,QAAM,QAAQ,UAAM,+BAAmB,KAAK;AAE5C,SAAO,EAAE,QAAQ,UAAU,MAAM;AACnC;AAEA,MAAM,qBAAqB,CAAC,UAAiB;AAC3C,MAAI,CAAC,MAAM,eAAe;AACxB,WAAO;AAAA,EACT;AAGA,QAAM,WAAW,MAAM,SAAS,MAAM;AAEtC,SAAO,WAAW,eAAe;AACnC;AAEO,MAAM,eAAe,OAAO,UAAiB;AAClD,QAAM,OAAO,mBAAmB,KAAK;AAErC,QAAM,kBAAkB,IAAI,kBAAkB;AAC9C,kBAAgB,KAAK,MAAM,mBAAmB,QAAQ,MAAM;AAE5D,QAAM,EAAE,QAAQ,UAAU,MAAM,IAAI,MAAM,KAAK,EAAE,GAAG,OAAO,gBAAgB,CAAC;AAE5E,MAAI;AACF,cAAM;AAAA,UACJ,mCAAkB,QAAQ,UAAU,OAAO,eAAe;AAAA,MAC1D,EAAE,GAAG,GAAG;AAAA,IACV;AAAA,EACF,SAAS,KAAP;AACA,uBAAI,KAAK,kCAAkC;AAC3C,uBAAI,WAAO,qBAAQ,GAAG,CAAC;AAAA,EACzB;AAEA,MAAI,CAAC,OAAO,MAAM,CAAC,SAAS,MAAM,CAAC,OAAO;AACxC,UAAM,QAAQ;AAAA,MACZ,GAAI,OAAO,KAAK,CAAC,IAAI,CAAC,QAAQ;AAAA,MAC9B,GAAI,SAAS,KAAK,CAAC,IAAI,CAAC,UAAU;AAAA,MAClC,GAAI,QAAQ,CAAC,IAAI,CAAC,KAAK;AAAA,IACzB;AAEA,uBAAI,QAAQ;AACZ,uBAAI,IAAI,GAAG,MAAM,KAAK,IAAI,qCAAqC;AAE/D,YAAQ,WAAW;AAAA,EACrB;AAEA,YAAM,wBAAQ;AAAA,IACZ,OAAO,MAAM;AAAA,IACb,QAAQ,OAAO;AAAA,IACf,UAAU,CAAC,SAAS;AAAA,EACtB,CAAC;AACH;",
5
+ "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,oBAAmB;AACnB,kBAAwB;AAExB,qBAAoB;AACpB,kBAA+B;AAE/B,sBAAkC;AAClC,qBAAwB;AACxB,oBAAkE;AAClE,sBAGO;AACP,iBAAmC;AAGnC,MAAM,iBAAiB;AAEhB,MAAM,0BAA0B,cAAAA,QAAO,UAAU;AAAA,EAC9C,SAAuB,CAAC;AAAA,EAEzB,SAAS;AACd,WAAO,OAAO,OAAO,KAAK,MAAM,EAAE,SAAS,EAAE,QAAQ,gBAAgB,EAAE;AAAA,EACzE;AAAA,EAEA,WACE,OACA,WACA,UACA;AACA,SAAK,OAAO,KAAK,KAAK;AAEtB,aAAS,MAAM,KAAK;AAAA,EACtB;AACF;AAEA,MAAM,mBAAmB,OAAO,EAAE,iBAAiB,GAAG,MAAM,MAAa;AACvE,QAAM,CAAC,QAAQ,UAAU,KAAK,IAAI,MAAM,QAAQ,IAAI;AAAA,QAClD,uCAAwB,KAAK;AAAA,QAC7B,2CAA0B,KAAK;AAAA,QAC/B,+BAAmB,EAAE,GAAG,OAAO,gBAAgB,CAAC;AAAA,EAClD,CAAC;AAED,SAAO,EAAE,QAAQ,UAAU,MAAM;AACnC;AASA,MAAM,eAAe,OAAO,EAAE,iBAAiB,GAAG,MAAM,MAAa;AACnE,QAAM,SAAS,UAAM,uCAAwB,KAAK;AAClD,QAAM,WAAW,UAAM,2CAA0B,KAAK;AACtD,QAAM,QAAQ,UAAM,+BAAmB,EAAE,GAAG,OAAO,gBAAgB,CAAC;AAEpE,SAAO,EAAE,QAAQ,UAAU,MAAM;AACnC;AAEA,MAAM,mCAAmC,OAAO,UAAiB;AAC/D,QAAM,SAAS,UAAM,wCAAyB,KAAK;AACnD,QAAM,WAAW,UAAM,4CAA2B,KAAK;AACvD,QAAM,QAAQ,UAAM,+BAAmB,KAAK;AAE5C,SAAO,EAAE,QAAQ,UAAU,MAAM;AACnC;AAEA,MAAM,qBAAqB,CAAC,UAAiB;AAC3C,MAAI,CAAC,MAAM,eAAe;AACxB,WAAO;AAAA,EACT;AAGA,QAAM,WAAW,MAAM,SAAS,MAAM;AAEtC,SAAO,WAAW,eAAe;AACnC;AAEO,MAAM,eAAe,OAAO,UAAiB;AAClD,QAAM,OAAO,mBAAmB,KAAK;AAErC,QAAM,kBAAkB,IAAI,kBAAkB;AAC9C,kBAAgB,KAAK,MAAM,mBAAmB,QAAQ,MAAM;AAE5D,QAAM,EAAE,QAAQ,UAAU,MAAM,IAAI,MAAM,KAAK,EAAE,GAAG,OAAO,gBAAgB,CAAC;AAE5E,MAAI;AACF,cAAM;AAAA,UACJ,mCAAkB,QAAQ,UAAU,OAAO,eAAe;AAAA,MAC1D,EAAE,GAAG,GAAG;AAAA,IACV;AAAA,EACF,SAAS,KAAP;AACA,uBAAI,KAAK,kCAAkC;AAC3C,uBAAI,WAAO,qBAAQ,GAAG,CAAC;AAAA,EACzB;AAEA,MAAI,CAAC,OAAO,MAAM,CAAC,SAAS,MAAM,CAAC,OAAO;AACxC,UAAM,QAAQ;AAAA,MACZ,GAAI,OAAO,KAAK,CAAC,IAAI,CAAC,QAAQ;AAAA,MAC9B,GAAI,SAAS,KAAK,CAAC,IAAI,CAAC,UAAU;AAAA,MAClC,GAAI,QAAQ,CAAC,IAAI,CAAC,KAAK;AAAA,IACzB;AAEA,uBAAI,QAAQ;AACZ,uBAAI,IAAI,GAAG,MAAM,KAAK,IAAI,qCAAqC;AAE/D,YAAQ,WAAW;AAAA,EACrB;AAEA,YAAM,wBAAQ;AAAA,IACZ,OAAO,MAAM;AAAA,IACb,QAAQ,OAAO;AAAA,IACf,UAAU,CAAC,SAAS;AAAA,EACtB,CAAC;AACH;",
6
6
  "names": ["stream"]
7
7
  }
@@ -24,6 +24,7 @@ module.exports = __toCommonJS(lint_exports);
24
24
  var import_args = require("../../utils/args");
25
25
  var import_addEmptyExports = require("../configure/addEmptyExports");
26
26
  var import_patchRenovateConfig = require("../configure/patchRenovateConfig");
27
+ var import_patchServerListener = require("../configure/patchServerListener");
27
28
  var import_refreshIgnoreFiles = require("../configure/refreshIgnoreFiles");
28
29
  var import_external = require("./external");
29
30
  var import_internal = require("./internal");
@@ -31,6 +32,7 @@ const lint = async (args = process.argv.slice(2), tscOutputStream = void 0, work
31
32
  await Promise.all([
32
33
  (0, import_addEmptyExports.tryAddEmptyExports)(),
33
34
  (0, import_patchRenovateConfig.tryPatchRenovateConfig)(),
35
+ (0, import_patchServerListener.tryPatchServerListener)(),
34
36
  (0, import_refreshIgnoreFiles.tryRefreshIgnoreFiles)()
35
37
  ]);
36
38
  const opts = {
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "version": 3,
3
3
  "sources": ["../../../src/cli/lint/index.ts"],
4
- "sourcesContent": ["import type { Writable } from 'stream';\n\nimport { hasDebugFlag, hasSerialFlag } from '../../utils/args';\nimport { tryAddEmptyExports } from '../configure/addEmptyExports';\nimport { tryPatchRenovateConfig } from '../configure/patchRenovateConfig';\nimport { tryRefreshIgnoreFiles } from '../configure/refreshIgnoreFiles';\n\nimport { externalLint } from './external';\nimport { internalLint } from './internal';\nimport type { Input } from './types';\n\nexport const lint = async (\n args = process.argv.slice(2),\n tscOutputStream: Writable | undefined = undefined,\n workerThreads = true,\n) => {\n await Promise.all([\n tryAddEmptyExports(),\n tryPatchRenovateConfig(),\n tryRefreshIgnoreFiles(),\n ]);\n\n const opts: Input = {\n debug: hasDebugFlag(args),\n serial: hasSerialFlag(args),\n tscOutputStream,\n workerThreads,\n };\n\n await externalLint(opts);\n\n await internalLint();\n};\n"],
5
- "mappings": ";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAEA,kBAA4C;AAC5C,6BAAmC;AACnC,iCAAuC;AACvC,gCAAsC;AAEtC,sBAA6B;AAC7B,sBAA6B;AAGtB,MAAM,OAAO,OAClB,OAAO,QAAQ,KAAK,MAAM,CAAC,GAC3B,kBAAwC,QACxC,gBAAgB,SACb;AACH,QAAM,QAAQ,IAAI;AAAA,QAChB,2CAAmB;AAAA,QACnB,mDAAuB;AAAA,QACvB,iDAAsB;AAAA,EACxB,CAAC;AAED,QAAM,OAAc;AAAA,IAClB,WAAO,0BAAa,IAAI;AAAA,IACxB,YAAQ,2BAAc,IAAI;AAAA,IAC1B;AAAA,IACA;AAAA,EACF;AAEA,YAAM,8BAAa,IAAI;AAEvB,YAAM,8BAAa;AACrB;",
4
+ "sourcesContent": ["import type { Writable } from 'stream';\n\nimport { hasDebugFlag, hasSerialFlag } from '../../utils/args';\nimport { tryAddEmptyExports } from '../configure/addEmptyExports';\nimport { tryPatchRenovateConfig } from '../configure/patchRenovateConfig';\nimport { tryPatchServerListener } from '../configure/patchServerListener';\nimport { tryRefreshIgnoreFiles } from '../configure/refreshIgnoreFiles';\n\nimport { externalLint } from './external';\nimport { internalLint } from './internal';\nimport type { Input } from './types';\n\nexport const lint = async (\n args = process.argv.slice(2),\n tscOutputStream: Writable | undefined = undefined,\n workerThreads = true,\n) => {\n await Promise.all([\n tryAddEmptyExports(),\n tryPatchRenovateConfig(),\n tryPatchServerListener(),\n tryRefreshIgnoreFiles(),\n ]);\n\n const opts: Input = {\n debug: hasDebugFlag(args),\n serial: hasSerialFlag(args),\n tscOutputStream,\n workerThreads,\n };\n\n await externalLint(opts);\n\n await internalLint();\n};\n"],
5
+ "mappings": ";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAEA,kBAA4C;AAC5C,6BAAmC;AACnC,iCAAuC;AACvC,iCAAuC;AACvC,gCAAsC;AAEtC,sBAA6B;AAC7B,sBAA6B;AAGtB,MAAM,OAAO,OAClB,OAAO,QAAQ,KAAK,MAAM,CAAC,GAC3B,kBAAwC,QACxC,gBAAgB,SACb;AACH,QAAM,QAAQ,IAAI;AAAA,QAChB,2CAAmB;AAAA,QACnB,mDAAuB;AAAA,QACvB,mDAAuB;AAAA,QACvB,iDAAsB;AAAA,EACxB,CAAC;AAED,QAAM,OAAc;AAAA,IAClB,WAAO,0BAAa,IAAI;AAAA,IACxB,YAAQ,2BAAc,IAAI;AAAA,IAC1B;AAAA,IACA;AAAA,EACF;AAEA,YAAM,8BAAa,IAAI;AAEvB,YAAM,8BAAa;AACrB;",
6
6
  "names": []
7
7
  }
package/lib/cli/start.js CHANGED
@@ -42,7 +42,7 @@ const start = async () => {
42
42
  (0, import_args.parseRunArgs)(process.argv.slice(2)),
43
43
  (0, import_get_port.default)()
44
44
  ]);
45
- args.entryPoint ?? (args.entryPoint = await (0, import_manifest.getEntryPointFromManifest)());
45
+ args.entryPoint ??= await (0, import_manifest.getEntryPointFromManifest)();
46
46
  const execProcess = (0, import_exec.createExec)({
47
47
  env: {
48
48
  __SKUBA_ENTRY_POINT: args.entryPoint,
@@ -2,6 +2,6 @@
2
2
  "version": 3,
3
3
  "sources": ["../../src/cli/start.ts"],
4
4
  "sourcesContent": ["import path from 'path';\n\nimport getPort from 'get-port';\n\nimport { parseRunArgs } from '../utils/args';\nimport { createExec } from '../utils/exec';\nimport { getEntryPointFromManifest } from '../utils/manifest';\nimport { isIpPort } from '../utils/validation';\n\nexport const start = async () => {\n const [args, availablePort] = await Promise.all([\n parseRunArgs(process.argv.slice(2)),\n getPort(),\n ]);\n\n args.entryPoint ??= await getEntryPointFromManifest();\n\n const execProcess = createExec({\n env: {\n __SKUBA_ENTRY_POINT: args.entryPoint,\n __SKUBA_PORT: String(isIpPort(args.port) ? args.port : availablePort),\n },\n });\n\n return execProcess(\n 'ts-node-dev',\n ...args.node,\n '--require',\n 'dotenv/config',\n '--require',\n 'tsconfig-paths/register',\n '--respawn',\n '--transpile-only',\n path.join(__dirname, '..', 'wrapper'),\n ...args.script,\n );\n};\n"],
5
- "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAiB;AAEjB,sBAAoB;AAEpB,kBAA6B;AAC7B,kBAA2B;AAC3B,sBAA0C;AAC1C,wBAAyB;AAElB,MAAM,QAAQ,YAAY;AAC/B,QAAM,CAAC,MAAM,aAAa,IAAI,MAAM,QAAQ,IAAI;AAAA,QAC9C,0BAAa,QAAQ,KAAK,MAAM,CAAC,CAAC;AAAA,QAClC,gBAAAA,SAAQ;AAAA,EACV,CAAC;AAED,OAAK,eAAL,KAAK,aAAe,UAAM,2CAA0B;AAEpD,QAAM,kBAAc,wBAAW;AAAA,IAC7B,KAAK;AAAA,MACH,qBAAqB,KAAK;AAAA,MAC1B,cAAc,WAAO,4BAAS,KAAK,IAAI,IAAI,KAAK,OAAO,aAAa;AAAA,IACtE;AAAA,EACF,CAAC;AAED,SAAO;AAAA,IACL;AAAA,IACA,GAAG,KAAK;AAAA,IACR;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA,YAAAC,QAAK,KAAK,WAAW,MAAM,SAAS;AAAA,IACpC,GAAG,KAAK;AAAA,EACV;AACF;",
5
+ "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAiB;AAEjB,sBAAoB;AAEpB,kBAA6B;AAC7B,kBAA2B;AAC3B,sBAA0C;AAC1C,wBAAyB;AAElB,MAAM,QAAQ,YAAY;AAC/B,QAAM,CAAC,MAAM,aAAa,IAAI,MAAM,QAAQ,IAAI;AAAA,QAC9C,0BAAa,QAAQ,KAAK,MAAM,CAAC,CAAC;AAAA,QAClC,gBAAAA,SAAQ;AAAA,EACV,CAAC;AAED,OAAK,eAAe,UAAM,2CAA0B;AAEpD,QAAM,kBAAc,wBAAW;AAAA,IAC7B,KAAK;AAAA,MACH,qBAAqB,KAAK;AAAA,MAC1B,cAAc,WAAO,4BAAS,KAAK,IAAI,IAAI,KAAK,OAAO,aAAa;AAAA,IACtE;AAAA,EACF,CAAC;AAED,SAAO;AAAA,IACL;AAAA,IACA,GAAG,KAAK;AAAA,IACR;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA,YAAAC,QAAK,KAAK,WAAW,MAAM,SAAS;AAAA,IACpC,GAAG,KAAK;AAAA,EACV;AACF;",
6
6
  "names": ["getPort", "path"]
7
7
  }
@@ -24,9 +24,8 @@ module.exports = __toCommonJS(test_exports);
24
24
  var import_jest = require("jest");
25
25
  var import_addEmptyExports = require("../configure/addEmptyExports");
26
26
  const test = async () => {
27
- var _a;
28
27
  await (0, import_addEmptyExports.tryAddEmptyExports)();
29
- (_a = process.env).NODE_ENV ?? (_a.NODE_ENV = "test");
28
+ process.env.NODE_ENV ??= "test";
30
29
  const argv = process.argv.slice(2);
31
30
  return (0, import_jest.run)(argv);
32
31
  };
@@ -2,6 +2,6 @@
2
2
  "version": 3,
3
3
  "sources": ["../../../src/cli/test/index.ts"],
4
4
  "sourcesContent": ["import { run } from 'jest';\n\nimport { tryAddEmptyExports } from '../configure/addEmptyExports';\n\nexport const test = async () => {\n await tryAddEmptyExports();\n\n // This is usually set in `jest-cli`'s binary wrapper\n process.env.NODE_ENV ??= 'test';\n\n const argv = process.argv.slice(2);\n\n return run(argv);\n};\n"],
5
- "mappings": ";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAoB;AAEpB,6BAAmC;AAE5B,MAAM,OAAO,YAAY;AAJhC;AAKE,YAAM,2CAAmB;AAGzB,gBAAQ,KAAI,aAAZ,GAAY,WAAa;AAEzB,QAAM,OAAO,QAAQ,KAAK,MAAM,CAAC;AAEjC,aAAO,iBAAI,IAAI;AACjB;",
5
+ "mappings": ";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAoB;AAEpB,6BAAmC;AAE5B,MAAM,OAAO,YAAY;AAC9B,YAAM,2CAAmB;AAGzB,UAAQ,IAAI,aAAa;AAEzB,QAAM,OAAO,QAAQ,KAAK,MAAM,CAAC;AAEjC,aAAO,iBAAI,IAAI;AACjB;",
6
6
  "names": []
7
7
  }
@@ -79,7 +79,7 @@ const generateAnnotationEntries = (testResults) => {
79
79
  const resultsByDisplayName = testResults.reduce(
80
80
  (acc, result) => {
81
81
  const displayName = result.displayName?.name ?? DEFAULT_DISPLAY_NAME;
82
- (acc[displayName] ?? (acc[displayName] = [])).push(result);
82
+ (acc[displayName] ??= []).push(result);
83
83
  return acc;
84
84
  },
85
85
  {}
@@ -2,6 +2,6 @@
2
2
  "version": 3,
3
3
  "sources": ["../../../../../src/cli/test/reporters/github/annotations.ts"],
4
4
  "sourcesContent": ["import path from 'path';\n\nimport type { TestResult } from '@jest/test-result';\nimport stripAnsi from 'strip-ansi';\nimport dedent from 'ts-dedent';\n\nimport type * as GitHub from '../../../../api/github';\n\n/**\n * Matches the first stack trace location in a Jest failure message.\n *\n * For example, given the following input message:\n *\n * ```console\n * Error: expect(received).toBe(expected) // Object.is equality\n *\n * Expected: \"a\"\n * Received: \"b\"\n * at Object.<anonymous> (/workdir/skuba/src/test.test.ts:2:15)\n * at Promise.then.completed (/workdir/skuba/node_modules/jest-circus/build/utils.js:390:28)\n * ...\n * ```\n *\n * or:\n *\n * ```console\n * Error: expect(received).toBe(expected) // Object.is equality\n *\n * Expected: \"a\"\n * Received: \"b\"\n * at /workdir/skuba/src/test.test.ts:2:15\n * at Promise.then.completed (/workdir/skuba/node_modules/jest-circus/build/utils.js:390:28)\n * ...\n * ```\n *\n * This pattern will produce the following matches:\n *\n * 1. /workdir/skuba/src/test.test.ts\n * 2. 2\n * 2. 15\n */\nconst JEST_LOCATION_REGEX = /\\n +at (.+\\()?(.+?):(\\d+):(\\d+)/;\n\nexport const createAnnotations = (\n testResults: TestResult[],\n): GitHub.Annotation[] => {\n const cwd = process.cwd();\n\n return testResults.flatMap((testResult) => {\n if (testResult.testExecError) {\n return {\n annotation_level: 'failure',\n path: path.relative(cwd, testResult.testFilePath),\n start_line: 1,\n end_line: 1,\n message: stripAnsi(\n testResult.failureMessage\n ? dedent(testResult.failureMessage)\n : testResult.testExecError.message,\n ),\n title: 'Jest',\n };\n }\n\n if (testResult.numFailingTests > 0) {\n return testResult.testResults.flatMap((assertionResult) =>\n assertionResult.failureMessages.flatMap((failureMessage) => {\n const match = JEST_LOCATION_REGEX.exec(failureMessage);\n if (match?.length === 5) {\n return {\n annotation_level: 'failure',\n path: path.relative(cwd, match[2]),\n start_line: Number(match[3]),\n end_line: Number(match[3]),\n start_column: Number(match[4]),\n end_column: Number(match[4]),\n message: stripAnsi(failureMessage),\n title: 'Jest',\n };\n }\n\n return [];\n }),\n );\n }\n\n return [];\n });\n};\n\nconst DEFAULT_DISPLAY_NAME = Symbol('DEFAULT_DISPLAY_NAME');\n\ninterface AnnotationEntry {\n annotations: GitHub.Annotation[];\n displayName: string | undefined;\n}\n\nexport const generateAnnotationEntries = (\n testResults: TestResult[],\n): AnnotationEntry[] => {\n type ResultsByDisplayName = Record<string | symbol, TestResult[]>;\n\n // Group test results by display name.\n const resultsByDisplayName = testResults.reduce<ResultsByDisplayName>(\n (acc, result) => {\n const displayName = result.displayName?.name ?? DEFAULT_DISPLAY_NAME;\n\n (acc[displayName] ??= []).push(result);\n\n return acc;\n },\n {},\n );\n\n const defaultResults = resultsByDisplayName[DEFAULT_DISPLAY_NAME];\n\n const entries = [\n ...(defaultResults?.length ? ([[undefined, defaultResults]] as const) : []),\n ...Object.entries(resultsByDisplayName),\n ];\n\n // Create annotations for each display name.\n return entries.map<AnnotationEntry>(([displayName, results]) => ({\n annotations: createAnnotations(results),\n displayName,\n }));\n};\n"],
5
- "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAiB;AAGjB,wBAAsB;AACtB,uBAAmB;AAqCnB,MAAM,sBAAsB;AAErB,MAAM,oBAAoB,CAC/B,gBACwB;AACxB,QAAM,MAAM,QAAQ,IAAI;AAExB,SAAO,YAAY,QAAQ,CAAC,eAAe;AACzC,QAAI,WAAW,eAAe;AAC5B,aAAO;AAAA,QACL,kBAAkB;AAAA,QAClB,MAAM,YAAAA,QAAK,SAAS,KAAK,WAAW,YAAY;AAAA,QAChD,YAAY;AAAA,QACZ,UAAU;AAAA,QACV,aAAS,kBAAAC;AAAA,UACP,WAAW,qBACP,iBAAAC,SAAO,WAAW,cAAc,IAChC,WAAW,cAAc;AAAA,QAC/B;AAAA,QACA,OAAO;AAAA,MACT;AAAA,IACF;AAEA,QAAI,WAAW,kBAAkB,GAAG;AAClC,aAAO,WAAW,YAAY;AAAA,QAAQ,CAAC,oBACrC,gBAAgB,gBAAgB,QAAQ,CAAC,mBAAmB;AAC1D,gBAAM,QAAQ,oBAAoB,KAAK,cAAc;AACrD,cAAI,OAAO,WAAW,GAAG;AACvB,mBAAO;AAAA,cACL,kBAAkB;AAAA,cAClB,MAAM,YAAAF,QAAK,SAAS,KAAK,MAAM,CAAC,CAAC;AAAA,cACjC,YAAY,OAAO,MAAM,CAAC,CAAC;AAAA,cAC3B,UAAU,OAAO,MAAM,CAAC,CAAC;AAAA,cACzB,cAAc,OAAO,MAAM,CAAC,CAAC;AAAA,cAC7B,YAAY,OAAO,MAAM,CAAC,CAAC;AAAA,cAC3B,aAAS,kBAAAC,SAAU,cAAc;AAAA,cACjC,OAAO;AAAA,YACT;AAAA,UACF;AAEA,iBAAO,CAAC;AAAA,QACV,CAAC;AAAA,MACH;AAAA,IACF;AAEA,WAAO,CAAC;AAAA,EACV,CAAC;AACH;AAEA,MAAM,uBAAuB,OAAO,sBAAsB;AAOnD,MAAM,4BAA4B,CACvC,gBACsB;AAItB,QAAM,uBAAuB,YAAY;AAAA,IACvC,CAAC,KAAK,WAAW;AACf,YAAM,cAAc,OAAO,aAAa,QAAQ;AAEhD,OAAC,wCAAqB,CAAC,IAAG,KAAK,MAAM;AAErC,aAAO;AAAA,IACT;AAAA,IACA,CAAC;AAAA,EACH;AAEA,QAAM,iBAAiB,qBAAqB,oBAAoB;AAEhE,QAAM,UAAU;AAAA,IACd,GAAI,gBAAgB,SAAU,CAAC,CAAC,QAAW,cAAc,CAAC,IAAc,CAAC;AAAA,IACzE,GAAG,OAAO,QAAQ,oBAAoB;AAAA,EACxC;AAGA,SAAO,QAAQ,IAAqB,CAAC,CAAC,aAAa,OAAO,OAAO;AAAA,IAC/D,aAAa,kBAAkB,OAAO;AAAA,IACtC;AAAA,EACF,EAAE;AACJ;",
5
+ "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAiB;AAGjB,wBAAsB;AACtB,uBAAmB;AAqCnB,MAAM,sBAAsB;AAErB,MAAM,oBAAoB,CAC/B,gBACwB;AACxB,QAAM,MAAM,QAAQ,IAAI;AAExB,SAAO,YAAY,QAAQ,CAAC,eAAe;AACzC,QAAI,WAAW,eAAe;AAC5B,aAAO;AAAA,QACL,kBAAkB;AAAA,QAClB,MAAM,YAAAA,QAAK,SAAS,KAAK,WAAW,YAAY;AAAA,QAChD,YAAY;AAAA,QACZ,UAAU;AAAA,QACV,aAAS,kBAAAC;AAAA,UACP,WAAW,qBACP,iBAAAC,SAAO,WAAW,cAAc,IAChC,WAAW,cAAc;AAAA,QAC/B;AAAA,QACA,OAAO;AAAA,MACT;AAAA,IACF;AAEA,QAAI,WAAW,kBAAkB,GAAG;AAClC,aAAO,WAAW,YAAY;AAAA,QAAQ,CAAC,oBACrC,gBAAgB,gBAAgB,QAAQ,CAAC,mBAAmB;AAC1D,gBAAM,QAAQ,oBAAoB,KAAK,cAAc;AACrD,cAAI,OAAO,WAAW,GAAG;AACvB,mBAAO;AAAA,cACL,kBAAkB;AAAA,cAClB,MAAM,YAAAF,QAAK,SAAS,KAAK,MAAM,CAAC,CAAC;AAAA,cACjC,YAAY,OAAO,MAAM,CAAC,CAAC;AAAA,cAC3B,UAAU,OAAO,MAAM,CAAC,CAAC;AAAA,cACzB,cAAc,OAAO,MAAM,CAAC,CAAC;AAAA,cAC7B,YAAY,OAAO,MAAM,CAAC,CAAC;AAAA,cAC3B,aAAS,kBAAAC,SAAU,cAAc;AAAA,cACjC,OAAO;AAAA,YACT;AAAA,UACF;AAEA,iBAAO,CAAC;AAAA,QACV,CAAC;AAAA,MACH;AAAA,IACF;AAEA,WAAO,CAAC;AAAA,EACV,CAAC;AACH;AAEA,MAAM,uBAAuB,OAAO,sBAAsB;AAOnD,MAAM,4BAA4B,CACvC,gBACsB;AAItB,QAAM,uBAAuB,YAAY;AAAA,IACvC,CAAC,KAAK,WAAW;AACf,YAAM,cAAc,OAAO,aAAa,QAAQ;AAEhD,OAAC,IAAI,WAAW,MAAM,CAAC,GAAG,KAAK,MAAM;AAErC,aAAO;AAAA,IACT;AAAA,IACA,CAAC;AAAA,EACH;AAEA,QAAM,iBAAiB,qBAAqB,oBAAoB;AAEhE,QAAM,UAAU;AAAA,IACd,GAAI,gBAAgB,SAAU,CAAC,CAAC,QAAW,cAAc,CAAC,IAAc,CAAC;AAAA,IACzE,GAAG,OAAO,QAAQ,oBAAoB;AAAA,EACxC;AAGA,SAAO,QAAQ,IAAqB,CAAC,CAAC,aAAa,OAAO,OAAO;AAAA,IAC/D,aAAa,kBAAkB,OAAO;AAAA,IACtC;AAAA,EACF,EAAE;AACJ;",
6
6
  "names": ["path", "stripAnsi", "dedent"]
7
7
  }
@@ -72,6 +72,4 @@ class GitHubReporter {
72
72
  }
73
73
  }
74
74
  }
75
- // Annotate the CommonJS export names for ESM import in node:
76
- 0 && (module.exports = {});
77
75
  //# sourceMappingURL=index.js.map
@@ -1,3 +1,4 @@
1
+ import { type Logger } from './logging';
1
2
  export type TextProcessor = (contents: string) => string;
2
3
  interface CopyFilesOptions {
3
4
  sourceRoot: string;
@@ -13,4 +14,5 @@ export declare const createStringReplacer: (replacements: Array<{
13
14
  output: string;
14
15
  }>) => TextProcessor;
15
16
  export declare const copyFiles: (opts: CopyFilesOptions, currentSourceDir?: string, currentDestinationDir?: string) => Promise<void>;
17
+ export declare const copyAssets: (destinationDir: string, logger?: Logger) => Promise<void>;
16
18
  export {};
package/lib/utils/copy.js CHANGED
@@ -28,6 +28,7 @@ var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__ge
28
28
  var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
29
29
  var copy_exports = {};
30
30
  __export(copy_exports, {
31
+ copyAssets: () => copyAssets,
31
32
  copyFiles: () => copyFiles,
32
33
  createEjsRenderer: () => createEjsRenderer,
33
34
  createStringReplacer: () => createStringReplacer
@@ -36,9 +37,14 @@ module.exports = __toCommonJS(copy_exports);
36
37
  var import_path = __toESM(require("path"));
37
38
  var import_ejs = __toESM(require("ejs"));
38
39
  var import_fs_extra = __toESM(require("fs-extra"));
40
+ var import_dir = require("./dir");
39
41
  var import_error = require("./error");
40
42
  var import_logging = require("./logging");
41
- const copyFile = async (sourcePath, destinationPath, { overwrite = true, processors }) => {
43
+ var import_manifest = require("./manifest");
44
+ const copyFile = async (sourcePath, destinationPath, {
45
+ overwrite = true,
46
+ processors
47
+ }) => {
42
48
  const oldContents = await import_fs_extra.default.promises.readFile(sourcePath, "utf8");
43
49
  const newContents = processors.reduce(
44
50
  (contents, process) => process(contents),
@@ -91,8 +97,40 @@ const copyFiles = async (opts, currentSourceDir = opts.sourceRoot, currentDestin
91
97
  })
92
98
  );
93
99
  };
100
+ const copyAssets = async (destinationDir, logger = import_logging.log) => {
101
+ const manifest = await (0, import_manifest.getConsumerManifest)();
102
+ const preserveAssets = await (0, import_manifest.getPropFromConsumerManifest)(
103
+ "preserveAssets"
104
+ );
105
+ if (!manifest || !preserveAssets) {
106
+ return;
107
+ }
108
+ const srcDir = import_path.default.join(import_path.default.dirname(manifest.path), "src");
109
+ const allFiles = await (0, import_dir.crawlDirectory)(srcDir);
110
+ const filesByPattern = (0, import_dir.buildPatternToFilepathMap)(preserveAssets, allFiles, {
111
+ cwd: srcDir,
112
+ dot: true
113
+ });
114
+ for (const filenames of Object.values(filesByPattern)) {
115
+ await Promise.all(
116
+ filenames.map(async (filename) => {
117
+ logger?.subtle(`Copying ${filename}`);
118
+ await import_fs_extra.default.promises.mkdir(
119
+ import_path.default.dirname(import_path.default.join(destinationDir, filename)),
120
+ { recursive: true }
121
+ );
122
+ await copyFile(
123
+ import_path.default.join(srcDir, filename),
124
+ import_path.default.join(destinationDir, filename),
125
+ { processors: [] }
126
+ );
127
+ })
128
+ );
129
+ }
130
+ };
94
131
  // Annotate the CommonJS export names for ESM import in node:
95
132
  0 && (module.exports = {
133
+ copyAssets,
96
134
  copyFiles,
97
135
  createEjsRenderer,
98
136
  createStringReplacer
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "version": 3,
3
3
  "sources": ["../../src/utils/copy.ts"],
4
- "sourcesContent": ["import path from 'path';\n\nimport ejs from 'ejs';\nimport fs from 'fs-extra';\n\nimport { isErrorWithCode } from './error';\nimport { log } from './logging';\n\nexport type TextProcessor = (contents: string) => string;\n\nconst copyFile = async (\n sourcePath: string,\n destinationPath: string,\n { overwrite = true, processors }: CopyFilesOptions,\n) => {\n const oldContents = await fs.promises.readFile(sourcePath, 'utf8');\n\n const newContents = processors.reduce(\n (contents, process) => process(contents),\n oldContents,\n );\n\n if (oldContents === newContents && sourcePath === destinationPath) {\n return;\n }\n\n try {\n await fs.promises.writeFile(destinationPath, newContents, {\n flag: overwrite ? 'w' : 'wx',\n });\n } catch (err) {\n if (isErrorWithCode(err, 'EEXIST')) {\n return;\n }\n\n throw err;\n }\n};\n\ninterface CopyFilesOptions {\n sourceRoot: string;\n destinationRoot: string;\n\n include: (pathname: string) => boolean;\n overwrite?: boolean;\n processors: Array<TextProcessor>;\n stripUnderscorePrefix?: boolean;\n}\n\nexport const createEjsRenderer =\n (templateData: Record<string, unknown>): TextProcessor =>\n (contents) =>\n ejs.render(contents, templateData);\n\nexport const createStringReplacer =\n (\n replacements: Array<{\n input: RegExp;\n output: string;\n }>,\n ): TextProcessor =>\n (contents) =>\n replacements.reduce(\n (newContents, { input, output }) => newContents.replace(input, output),\n contents,\n );\n\nexport const copyFiles = async (\n opts: CopyFilesOptions,\n currentSourceDir: string = opts.sourceRoot,\n currentDestinationDir: string = opts.destinationRoot,\n) => {\n const filenames = await fs.promises.readdir(currentSourceDir);\n\n const toDestinationPath = (filename: string) =>\n path.join(\n currentDestinationDir,\n opts.stripUnderscorePrefix\n ? filename\n .replace(/^_\\./, '.')\n .replace(/^_package\\.json/, 'package.json')\n : filename,\n );\n\n const filteredFilenames = filenames.filter((filename) =>\n opts.include(\n path.relative(opts.destinationRoot, toDestinationPath(filename)),\n ),\n );\n\n await Promise.all(\n filteredFilenames.map(async (filename) => {\n const sourcePath = path.join(currentSourceDir, filename);\n const destinationPath = toDestinationPath(filename);\n\n try {\n await copyFile(sourcePath, destinationPath, opts);\n } catch (err) {\n if (isErrorWithCode(err, 'EISDIR')) {\n await fs.promises.mkdir(destinationPath, { recursive: true });\n return copyFiles(opts, sourcePath, destinationPath);\n }\n\n log.err('Failed to render', log.bold(sourcePath));\n\n throw err;\n }\n }),\n );\n};\n"],
5
- "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAiB;AAEjB,iBAAgB;AAChB,sBAAe;AAEf,mBAAgC;AAChC,qBAAoB;AAIpB,MAAM,WAAW,OACf,YACA,iBACA,EAAE,YAAY,MAAM,WAAW,MAC5B;AACH,QAAM,cAAc,MAAM,gBAAAA,QAAG,SAAS,SAAS,YAAY,MAAM;AAEjE,QAAM,cAAc,WAAW;AAAA,IAC7B,CAAC,UAAU,YAAY,QAAQ,QAAQ;AAAA,IACvC;AAAA,EACF;AAEA,MAAI,gBAAgB,eAAe,eAAe,iBAAiB;AACjE;AAAA,EACF;AAEA,MAAI;AACF,UAAM,gBAAAA,QAAG,SAAS,UAAU,iBAAiB,aAAa;AAAA,MACxD,MAAM,YAAY,MAAM;AAAA,IAC1B,CAAC;AAAA,EACH,SAAS,KAAP;AACA,YAAI,8BAAgB,KAAK,QAAQ,GAAG;AAClC;AAAA,IACF;AAEA,UAAM;AAAA,EACR;AACF;AAYO,MAAM,oBACX,CAAC,iBACD,CAAC,aACC,WAAAC,QAAI,OAAO,UAAU,YAAY;AAE9B,MAAM,uBACX,CACE,iBAKF,CAAC,aACC,aAAa;AAAA,EACX,CAAC,aAAa,EAAE,OAAO,OAAO,MAAM,YAAY,QAAQ,OAAO,MAAM;AAAA,EACrE;AACF;AAEG,MAAM,YAAY,OACvB,MACA,mBAA2B,KAAK,YAChC,wBAAgC,KAAK,oBAClC;AACH,QAAM,YAAY,MAAM,gBAAAD,QAAG,SAAS,QAAQ,gBAAgB;AAE5D,QAAM,oBAAoB,CAAC,aACzB,YAAAE,QAAK;AAAA,IACH;AAAA,IACA,KAAK,wBACD,SACG,QAAQ,QAAQ,GAAG,EACnB,QAAQ,mBAAmB,cAAc,IAC5C;AAAA,EACN;AAEF,QAAM,oBAAoB,UAAU;AAAA,IAAO,CAAC,aAC1C,KAAK;AAAA,MACH,YAAAA,QAAK,SAAS,KAAK,iBAAiB,kBAAkB,QAAQ,CAAC;AAAA,IACjE;AAAA,EACF;AAEA,QAAM,QAAQ;AAAA,IACZ,kBAAkB,IAAI,OAAO,aAAa;AACxC,YAAM,aAAa,YAAAA,QAAK,KAAK,kBAAkB,QAAQ;AACvD,YAAM,kBAAkB,kBAAkB,QAAQ;AAElD,UAAI;AACF,cAAM,SAAS,YAAY,iBAAiB,IAAI;AAAA,MAClD,SAAS,KAAP;AACA,gBAAI,8BAAgB,KAAK,QAAQ,GAAG;AAClC,gBAAM,gBAAAF,QAAG,SAAS,MAAM,iBAAiB,EAAE,WAAW,KAAK,CAAC;AAC5D,iBAAO,UAAU,MAAM,YAAY,eAAe;AAAA,QACpD;AAEA,2BAAI,IAAI,oBAAoB,mBAAI,KAAK,UAAU,CAAC;AAEhD,cAAM;AAAA,MACR;AAAA,IACF,CAAC;AAAA,EACH;AACF;",
4
+ "sourcesContent": ["import path from 'path';\n\nimport ejs from 'ejs';\nimport fs from 'fs-extra';\n\nimport { buildPatternToFilepathMap, crawlDirectory } from './dir';\nimport { isErrorWithCode } from './error';\nimport { type Logger, log } from './logging';\nimport { getConsumerManifest, getPropFromConsumerManifest } from './manifest';\n\nexport type TextProcessor = (contents: string) => string;\n\nconst copyFile = async (\n sourcePath: string,\n destinationPath: string,\n {\n overwrite = true,\n processors,\n }: Pick<CopyFilesOptions, 'overwrite' | 'processors'>,\n) => {\n const oldContents = await fs.promises.readFile(sourcePath, 'utf8');\n\n const newContents = processors.reduce(\n (contents, process) => process(contents),\n oldContents,\n );\n\n if (oldContents === newContents && sourcePath === destinationPath) {\n return;\n }\n\n try {\n await fs.promises.writeFile(destinationPath, newContents, {\n flag: overwrite ? 'w' : 'wx',\n });\n } catch (err) {\n if (isErrorWithCode(err, 'EEXIST')) {\n return;\n }\n\n throw err;\n }\n};\n\ninterface CopyFilesOptions {\n sourceRoot: string;\n destinationRoot: string;\n\n include: (pathname: string) => boolean;\n overwrite?: boolean;\n processors: Array<TextProcessor>;\n stripUnderscorePrefix?: boolean;\n}\n\nexport const createEjsRenderer =\n (templateData: Record<string, unknown>): TextProcessor =>\n (contents) =>\n ejs.render(contents, templateData);\n\nexport const createStringReplacer =\n (\n replacements: Array<{\n input: RegExp;\n output: string;\n }>,\n ): TextProcessor =>\n (contents) =>\n replacements.reduce(\n (newContents, { input, output }) => newContents.replace(input, output),\n contents,\n );\n\nexport const copyFiles = async (\n opts: CopyFilesOptions,\n currentSourceDir: string = opts.sourceRoot,\n currentDestinationDir: string = opts.destinationRoot,\n) => {\n const filenames = await fs.promises.readdir(currentSourceDir);\n\n const toDestinationPath = (filename: string) =>\n path.join(\n currentDestinationDir,\n opts.stripUnderscorePrefix\n ? filename\n .replace(/^_\\./, '.')\n .replace(/^_package\\.json/, 'package.json')\n : filename,\n );\n\n const filteredFilenames = filenames.filter((filename) =>\n opts.include(\n path.relative(opts.destinationRoot, toDestinationPath(filename)),\n ),\n );\n\n await Promise.all(\n filteredFilenames.map(async (filename) => {\n const sourcePath = path.join(currentSourceDir, filename);\n const destinationPath = toDestinationPath(filename);\n\n try {\n await copyFile(sourcePath, destinationPath, opts);\n } catch (err) {\n if (isErrorWithCode(err, 'EISDIR')) {\n await fs.promises.mkdir(destinationPath, { recursive: true });\n return copyFiles(opts, sourcePath, destinationPath);\n }\n\n log.err('Failed to render', log.bold(sourcePath));\n\n throw err;\n }\n }),\n );\n};\n\nexport const copyAssets = async (\n destinationDir: string,\n logger: Logger = log,\n) => {\n const manifest = await getConsumerManifest();\n const preserveAssets = await getPropFromConsumerManifest<string[]>(\n 'preserveAssets',\n );\n\n if (!manifest || !preserveAssets) {\n return;\n }\n\n const srcDir = path.join(path.dirname(manifest.path), 'src');\n const allFiles = await crawlDirectory(srcDir);\n const filesByPattern = buildPatternToFilepathMap(preserveAssets, allFiles, {\n cwd: srcDir,\n dot: true,\n });\n\n for (const filenames of Object.values(filesByPattern)) {\n await Promise.all(\n filenames.map(async (filename) => {\n logger?.subtle(`Copying ${filename}`);\n\n await fs.promises.mkdir(\n path.dirname(path.join(destinationDir, filename)),\n { recursive: true },\n );\n await copyFile(\n path.join(srcDir, filename),\n path.join(destinationDir, filename),\n { processors: [] },\n );\n }),\n );\n }\n};\n"],
5
+ "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAiB;AAEjB,iBAAgB;AAChB,sBAAe;AAEf,iBAA0D;AAC1D,mBAAgC;AAChC,qBAAiC;AACjC,sBAAiE;AAIjE,MAAM,WAAW,OACf,YACA,iBACA;AAAA,EACE,YAAY;AAAA,EACZ;AACF,MACG;AACH,QAAM,cAAc,MAAM,gBAAAA,QAAG,SAAS,SAAS,YAAY,MAAM;AAEjE,QAAM,cAAc,WAAW;AAAA,IAC7B,CAAC,UAAU,YAAY,QAAQ,QAAQ;AAAA,IACvC;AAAA,EACF;AAEA,MAAI,gBAAgB,eAAe,eAAe,iBAAiB;AACjE;AAAA,EACF;AAEA,MAAI;AACF,UAAM,gBAAAA,QAAG,SAAS,UAAU,iBAAiB,aAAa;AAAA,MACxD,MAAM,YAAY,MAAM;AAAA,IAC1B,CAAC;AAAA,EACH,SAAS,KAAP;AACA,YAAI,8BAAgB,KAAK,QAAQ,GAAG;AAClC;AAAA,IACF;AAEA,UAAM;AAAA,EACR;AACF;AAYO,MAAM,oBACX,CAAC,iBACD,CAAC,aACC,WAAAC,QAAI,OAAO,UAAU,YAAY;AAE9B,MAAM,uBACX,CACE,iBAKF,CAAC,aACC,aAAa;AAAA,EACX,CAAC,aAAa,EAAE,OAAO,OAAO,MAAM,YAAY,QAAQ,OAAO,MAAM;AAAA,EACrE;AACF;AAEG,MAAM,YAAY,OACvB,MACA,mBAA2B,KAAK,YAChC,wBAAgC,KAAK,oBAClC;AACH,QAAM,YAAY,MAAM,gBAAAD,QAAG,SAAS,QAAQ,gBAAgB;AAE5D,QAAM,oBAAoB,CAAC,aACzB,YAAAE,QAAK;AAAA,IACH;AAAA,IACA,KAAK,wBACD,SACG,QAAQ,QAAQ,GAAG,EACnB,QAAQ,mBAAmB,cAAc,IAC5C;AAAA,EACN;AAEF,QAAM,oBAAoB,UAAU;AAAA,IAAO,CAAC,aAC1C,KAAK;AAAA,MACH,YAAAA,QAAK,SAAS,KAAK,iBAAiB,kBAAkB,QAAQ,CAAC;AAAA,IACjE;AAAA,EACF;AAEA,QAAM,QAAQ;AAAA,IACZ,kBAAkB,IAAI,OAAO,aAAa;AACxC,YAAM,aAAa,YAAAA,QAAK,KAAK,kBAAkB,QAAQ;AACvD,YAAM,kBAAkB,kBAAkB,QAAQ;AAElD,UAAI;AACF,cAAM,SAAS,YAAY,iBAAiB,IAAI;AAAA,MAClD,SAAS,KAAP;AACA,gBAAI,8BAAgB,KAAK,QAAQ,GAAG;AAClC,gBAAM,gBAAAF,QAAG,SAAS,MAAM,iBAAiB,EAAE,WAAW,KAAK,CAAC;AAC5D,iBAAO,UAAU,MAAM,YAAY,eAAe;AAAA,QACpD;AAEA,2BAAI,IAAI,oBAAoB,mBAAI,KAAK,UAAU,CAAC;AAEhD,cAAM;AAAA,MACR;AAAA,IACF,CAAC;AAAA,EACH;AACF;AAEO,MAAM,aAAa,OACxB,gBACA,SAAiB,uBACd;AACH,QAAM,WAAW,UAAM,qCAAoB;AAC3C,QAAM,iBAAiB,UAAM;AAAA,IAC3B;AAAA,EACF;AAEA,MAAI,CAAC,YAAY,CAAC,gBAAgB;AAChC;AAAA,EACF;AAEA,QAAM,SAAS,YAAAE,QAAK,KAAK,YAAAA,QAAK,QAAQ,SAAS,IAAI,GAAG,KAAK;AAC3D,QAAM,WAAW,UAAM,2BAAe,MAAM;AAC5C,QAAM,qBAAiB,sCAA0B,gBAAgB,UAAU;AAAA,IACzE,KAAK;AAAA,IACL,KAAK;AAAA,EACP,CAAC;AAED,aAAW,aAAa,OAAO,OAAO,cAAc,GAAG;AACrD,UAAM,QAAQ;AAAA,MACZ,UAAU,IAAI,OAAO,aAAa;AAChC,gBAAQ,OAAO,WAAW,UAAU;AAEpC,cAAM,gBAAAF,QAAG,SAAS;AAAA,UAChB,YAAAE,QAAK,QAAQ,YAAAA,QAAK,KAAK,gBAAgB,QAAQ,CAAC;AAAA,UAChD,EAAE,WAAW,KAAK;AAAA,QACpB;AACA,cAAM;AAAA,UACJ,YAAAA,QAAK,KAAK,QAAQ,QAAQ;AAAA,UAC1B,YAAAA,QAAK,KAAK,gBAAgB,QAAQ;AAAA,UAClC,EAAE,YAAY,CAAC,EAAE;AAAA,QACnB;AAAA,MACF,CAAC;AAAA,IACH;AAAA,EACF;AACF;",
6
6
  "names": ["fs", "ejs", "path"]
7
7
  }
@@ -1,7 +1,8 @@
1
+ import picomatch from 'picomatch';
1
2
  /**
2
3
  * Build a map that associates each glob pattern with its matching filepaths.
3
4
  */
4
- export declare const buildPatternToFilepathMap: (patterns: string[], allFilepaths: string[]) => {
5
+ export declare const buildPatternToFilepathMap: (patterns: string[], allFilepaths: string[], options?: picomatch.PicomatchOptions) => {
5
6
  [k: string]: string[];
6
7
  };
7
8
  /**
package/lib/utils/dir.js CHANGED
@@ -39,9 +39,9 @@ var import_fs_extra = __toESM(require("fs-extra"));
39
39
  var import_ignore = __toESM(require("ignore"));
40
40
  var import_picomatch = __toESM(require("picomatch"));
41
41
  var import_error = require("./error");
42
- const buildPatternToFilepathMap = (patterns, allFilepaths) => Object.fromEntries(
42
+ const buildPatternToFilepathMap = (patterns, allFilepaths, options) => Object.fromEntries(
43
43
  patterns.map((pattern) => {
44
- const isMatch = (0, import_picomatch.default)(pattern);
44
+ const isMatch = (0, import_picomatch.default)(pattern, options);
45
45
  const filepaths = allFilepaths.filter((filepath) => isMatch(filepath));
46
46
  return [pattern, filepaths];
47
47
  })
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "version": 3,
3
3
  "sources": ["../../src/utils/dir.ts"],
4
- "sourcesContent": ["import path from 'path';\n\nimport { fdir as FDir } from 'fdir';\nimport fs from 'fs-extra';\nimport ignore from 'ignore';\nimport picomatch from 'picomatch';\n\nimport { isErrorWithCode } from './error';\n\n/**\n * Build a map that associates each glob pattern with its matching filepaths.\n */\nexport const buildPatternToFilepathMap = (\n patterns: string[],\n allFilepaths: string[],\n) =>\n Object.fromEntries(\n patterns.map((pattern) => {\n const isMatch = picomatch(pattern);\n\n const filepaths = allFilepaths.filter((filepath) => isMatch(filepath));\n\n return [pattern, filepaths] as const;\n }),\n );\n\n/**\n * List relative filepaths contained within a directory root.\n *\n * This excludes:\n *\n * - Patterns in the ignore files specified in `ignoreFilenames`\n * - `.git` subdirectories\n * - `node_modules` subdirectories\n */\nexport const crawlDirectory = async (\n root: string,\n ignoreFilename = '.gitignore',\n) => {\n const ignoreFileFilter = await createInclusionFilter([\n path.join(root, ignoreFilename),\n ]);\n\n const output = await new FDir()\n .crawlWithOptions(root, {\n exclude: (dirname) => ['.git', 'node_modules'].includes(dirname),\n filters: [\n (pathname) => {\n const relativePathname = path.relative(root, pathname);\n\n return ignoreFileFilter(relativePathname);\n },\n ],\n includeBasePath: true,\n })\n .withPromise();\n\n // Patch over non-specific `fdir` typings.\n const absoluteFilenames = output as string[];\n\n const relativeFilepaths = absoluteFilenames.map((filepath) =>\n path.relative(root, filepath),\n );\n\n return relativeFilepaths;\n};\n\n/**\n * Create a filter function that excludes filepaths based on ignore files like\n * `.gitignore` and `.prettierignore`.\n */\nexport const createInclusionFilter = async (ignoreFilepaths: string[]) => {\n const ignoreFiles = await Promise.all(\n ignoreFilepaths.map(async (ignoreFilepath) => {\n try {\n return await fs.promises.readFile(ignoreFilepath, 'utf8');\n } catch (err) {\n if (isErrorWithCode(err, 'ENOENT')) {\n return;\n }\n\n throw err;\n }\n }),\n );\n\n const managers = ignoreFiles\n .filter((value): value is string => typeof value === 'string')\n .map((value) => ignore().add(value));\n\n return ignore().add('.git').add(managers).createFilter();\n};\n"],
5
- "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAiB;AAEjB,kBAA6B;AAC7B,sBAAe;AACf,oBAAmB;AACnB,uBAAsB;AAEtB,mBAAgC;AAKzB,MAAM,4BAA4B,CACvC,UACA,iBAEA,OAAO;AAAA,EACL,SAAS,IAAI,CAAC,YAAY;AACxB,UAAM,cAAU,iBAAAA,SAAU,OAAO;AAEjC,UAAM,YAAY,aAAa,OAAO,CAAC,aAAa,QAAQ,QAAQ,CAAC;AAErE,WAAO,CAAC,SAAS,SAAS;AAAA,EAC5B,CAAC;AACH;AAWK,MAAM,iBAAiB,OAC5B,MACA,iBAAiB,iBACd;AACH,QAAM,mBAAmB,MAAM,sBAAsB;AAAA,IACnD,YAAAC,QAAK,KAAK,MAAM,cAAc;AAAA,EAChC,CAAC;AAED,QAAM,SAAS,MAAM,IAAI,YAAAC,KAAK,EAC3B,iBAAiB,MAAM;AAAA,IACtB,SAAS,CAAC,YAAY,CAAC,QAAQ,cAAc,EAAE,SAAS,OAAO;AAAA,IAC/D,SAAS;AAAA,MACP,CAAC,aAAa;AACZ,cAAM,mBAAmB,YAAAD,QAAK,SAAS,MAAM,QAAQ;AAErD,eAAO,iBAAiB,gBAAgB;AAAA,MAC1C;AAAA,IACF;AAAA,IACA,iBAAiB;AAAA,EACnB,CAAC,EACA,YAAY;AAGf,QAAM,oBAAoB;AAE1B,QAAM,oBAAoB,kBAAkB;AAAA,IAAI,CAAC,aAC/C,YAAAA,QAAK,SAAS,MAAM,QAAQ;AAAA,EAC9B;AAEA,SAAO;AACT;AAMO,MAAM,wBAAwB,OAAO,oBAA8B;AACxE,QAAM,cAAc,MAAM,QAAQ;AAAA,IAChC,gBAAgB,IAAI,OAAO,mBAAmB;AAC5C,UAAI;AACF,eAAO,MAAM,gBAAAE,QAAG,SAAS,SAAS,gBAAgB,MAAM;AAAA,MAC1D,SAAS,KAAP;AACA,gBAAI,8BAAgB,KAAK,QAAQ,GAAG;AAClC;AAAA,QACF;AAEA,cAAM;AAAA,MACR;AAAA,IACF,CAAC;AAAA,EACH;AAEA,QAAM,WAAW,YACd,OAAO,CAAC,UAA2B,OAAO,UAAU,QAAQ,EAC5D,IAAI,CAAC,cAAU,cAAAC,SAAO,EAAE,IAAI,KAAK,CAAC;AAErC,aAAO,cAAAA,SAAO,EAAE,IAAI,MAAM,EAAE,IAAI,QAAQ,EAAE,aAAa;AACzD;",
4
+ "sourcesContent": ["import path from 'path';\n\nimport { fdir as FDir } from 'fdir';\nimport fs from 'fs-extra';\nimport ignore from 'ignore';\nimport picomatch from 'picomatch';\n\nimport { isErrorWithCode } from './error';\n\n/**\n * Build a map that associates each glob pattern with its matching filepaths.\n */\nexport const buildPatternToFilepathMap = (\n patterns: string[],\n allFilepaths: string[],\n options?: picomatch.PicomatchOptions,\n) =>\n Object.fromEntries(\n patterns.map((pattern) => {\n const isMatch = picomatch(pattern, options);\n\n const filepaths = allFilepaths.filter((filepath) => isMatch(filepath));\n\n return [pattern, filepaths] as const;\n }),\n );\n\n/**\n * List relative filepaths contained within a directory root.\n *\n * This excludes:\n *\n * - Patterns in the ignore files specified in `ignoreFilenames`\n * - `.git` subdirectories\n * - `node_modules` subdirectories\n */\nexport const crawlDirectory = async (\n root: string,\n ignoreFilename = '.gitignore',\n) => {\n const ignoreFileFilter = await createInclusionFilter([\n path.join(root, ignoreFilename),\n ]);\n\n const output = await new FDir()\n .crawlWithOptions(root, {\n exclude: (dirname) => ['.git', 'node_modules'].includes(dirname),\n filters: [\n (pathname) => {\n const relativePathname = path.relative(root, pathname);\n\n return ignoreFileFilter(relativePathname);\n },\n ],\n includeBasePath: true,\n })\n .withPromise();\n\n // Patch over non-specific `fdir` typings.\n const absoluteFilenames = output as string[];\n\n const relativeFilepaths = absoluteFilenames.map((filepath) =>\n path.relative(root, filepath),\n );\n\n return relativeFilepaths;\n};\n\n/**\n * Create a filter function that excludes filepaths based on ignore files like\n * `.gitignore` and `.prettierignore`.\n */\nexport const createInclusionFilter = async (ignoreFilepaths: string[]) => {\n const ignoreFiles = await Promise.all(\n ignoreFilepaths.map(async (ignoreFilepath) => {\n try {\n return await fs.promises.readFile(ignoreFilepath, 'utf8');\n } catch (err) {\n if (isErrorWithCode(err, 'ENOENT')) {\n return;\n }\n\n throw err;\n }\n }),\n );\n\n const managers = ignoreFiles\n .filter((value): value is string => typeof value === 'string')\n .map((value) => ignore().add(value));\n\n return ignore().add('.git').add(managers).createFilter();\n};\n"],
5
+ "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAiB;AAEjB,kBAA6B;AAC7B,sBAAe;AACf,oBAAmB;AACnB,uBAAsB;AAEtB,mBAAgC;AAKzB,MAAM,4BAA4B,CACvC,UACA,cACA,YAEA,OAAO;AAAA,EACL,SAAS,IAAI,CAAC,YAAY;AACxB,UAAM,cAAU,iBAAAA,SAAU,SAAS,OAAO;AAE1C,UAAM,YAAY,aAAa,OAAO,CAAC,aAAa,QAAQ,QAAQ,CAAC;AAErE,WAAO,CAAC,SAAS,SAAS;AAAA,EAC5B,CAAC;AACH;AAWK,MAAM,iBAAiB,OAC5B,MACA,iBAAiB,iBACd;AACH,QAAM,mBAAmB,MAAM,sBAAsB;AAAA,IACnD,YAAAC,QAAK,KAAK,MAAM,cAAc;AAAA,EAChC,CAAC;AAED,QAAM,SAAS,MAAM,IAAI,YAAAC,KAAK,EAC3B,iBAAiB,MAAM;AAAA,IACtB,SAAS,CAAC,YAAY,CAAC,QAAQ,cAAc,EAAE,SAAS,OAAO;AAAA,IAC/D,SAAS;AAAA,MACP,CAAC,aAAa;AACZ,cAAM,mBAAmB,YAAAD,QAAK,SAAS,MAAM,QAAQ;AAErD,eAAO,iBAAiB,gBAAgB;AAAA,MAC1C;AAAA,IACF;AAAA,IACA,iBAAiB;AAAA,EACnB,CAAC,EACA,YAAY;AAGf,QAAM,oBAAoB;AAE1B,QAAM,oBAAoB,kBAAkB;AAAA,IAAI,CAAC,aAC/C,YAAAA,QAAK,SAAS,MAAM,QAAQ;AAAA,EAC9B;AAEA,SAAO;AACT;AAMO,MAAM,wBAAwB,OAAO,oBAA8B;AACxE,QAAM,cAAc,MAAM,QAAQ;AAAA,IAChC,gBAAgB,IAAI,OAAO,mBAAmB;AAC5C,UAAI;AACF,eAAO,MAAM,gBAAAE,QAAG,SAAS,SAAS,gBAAgB,MAAM;AAAA,MAC1D,SAAS,KAAP;AACA,gBAAI,8BAAgB,KAAK,QAAQ,GAAG;AAClC;AAAA,QACF;AAEA,cAAM;AAAA,MACR;AAAA,IACF,CAAC;AAAA,EACH;AAEA,QAAM,WAAW,YACd,OAAO,CAAC,UAA2B,OAAO,UAAU,QAAQ,EAC5D,IAAI,CAAC,cAAU,cAAAC,SAAO,EAAE,IAAI,KAAK,CAAC;AAErC,aAAO,cAAAA,SAAO,EAAE,IAAI,MAAM,EAAE,IAAI,QAAQ,EAAE,aAAa;AACzD;",
6
6
  "names": ["picomatch", "path", "FDir", "fs", "ignore"]
7
7
  }