skuba 6.0.1 → 6.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (37) hide show
  1. package/lib/api/jest/index.d.ts +4 -1
  2. package/lib/cli/adapter/eslint.js +0 -24
  3. package/lib/cli/adapter/eslint.js.map +2 -2
  4. package/lib/cli/configure/patchRenovateConfig.js +3 -1
  5. package/lib/cli/configure/patchRenovateConfig.js.map +2 -2
  6. package/lib/cli/test/reporters/github/index.js +0 -2
  7. package/lib/wrapper/requestListener.js +3 -0
  8. package/lib/wrapper/requestListener.js.map +2 -2
  9. package/package.json +10 -10
  10. package/template/express-rest-api/.buildkite/pipeline.yml +1 -1
  11. package/template/express-rest-api/package.json +1 -1
  12. package/template/express-rest-api/src/api/healthCheck.ts +1 -1
  13. package/template/express-rest-api/src/api/smokeTest.ts +1 -1
  14. package/template/greeter/.buildkite/pipeline.yml +1 -1
  15. package/template/koa-rest-api/.buildkite/pipeline.yml +1 -1
  16. package/template/koa-rest-api/package.json +4 -4
  17. package/template/koa-rest-api/src/api/healthCheck.ts +1 -1
  18. package/template/koa-rest-api/src/api/jobs/getJobs.ts +1 -1
  19. package/template/koa-rest-api/src/api/jobs/postJob.ts +1 -1
  20. package/template/koa-rest-api/src/api/smokeTest.ts +1 -1
  21. package/template/koa-rest-api/src/framework/server.test.ts +1 -1
  22. package/template/koa-rest-api/src/framework/validation.ts +2 -2
  23. package/template/koa-rest-api/src/storage/jobs.ts +1 -1
  24. package/template/koa-rest-api/src/testing/server.ts +2 -2
  25. package/template/koa-rest-api/src/testing/types.ts +1 -1
  26. package/template/lambda-sqs-worker/.buildkite/pipeline.yml +2 -2
  27. package/template/lambda-sqs-worker/package.json +2 -2
  28. package/template/lambda-sqs-worker/src/app.ts +1 -1
  29. package/template/lambda-sqs-worker/src/framework/validation.ts +1 -1
  30. package/template/lambda-sqs-worker/src/mapping/jobScorer.ts +5 -2
  31. package/template/lambda-sqs-worker/src/services/jobScorer.ts +6 -6
  32. package/template/lambda-sqs-worker/src/testing/handler.ts +1 -1
  33. package/template/lambda-sqs-worker/src/testing/types.ts +1 -1
  34. package/template/lambda-sqs-worker-cdk/.buildkite/pipeline.yml +2 -2
  35. package/template/lambda-sqs-worker-cdk/infra/appStack.ts +1 -1
  36. package/template/lambda-sqs-worker-cdk/package.json +1 -1
  37. package/template/lambda-sqs-worker-cdk/src/app.ts +1 -1
@@ -11,7 +11,7 @@ type DefaultOptions = 'collectCoverage' | 'collectCoverageFrom' | 'coveragePathI
11
11
  *
12
12
  * This concatenates array options like `testPathIgnorePatterns`.
13
13
  */
14
- export declare const mergePreset: <AdditionalOptions extends "filter" | "json" | "silent" | "cache" | "runtime" | "watch" | "runner" | "projects" | "id" | "automock" | "cacheDirectory" | "clearMocks" | "coveragePathIgnorePatterns" | "dependencyExtractor" | "detectLeaks" | "detectOpenHandles" | "displayName" | "errorOnDeprecated" | "extensionsToTreatAsEsm" | "fakeTimers" | "forceCoverageMatch" | "globalSetup" | "globalTeardown" | "globals" | "haste" | "injectGlobals" | "moduleDirectories" | "moduleFileExtensions" | "moduleNameMapper" | "modulePathIgnorePatterns" | "modulePaths" | "preset" | "prettierPath" | "resetMocks" | "resetModules" | "resolver" | "restoreMocks" | "rootDir" | "roots" | "sandboxInjectedGlobals" | "setupFiles" | "setupFilesAfterEnv" | "skipFilter" | "skipNodeResolution" | "slowTestThreshold" | "snapshotResolver" | "snapshotSerializers" | "snapshotFormat" | "testEnvironment" | "testEnvironmentOptions" | "testMatch" | "testLocationInResults" | "testPathIgnorePatterns" | "testRegex" | "testRunner" | "transform" | "transformIgnorePatterns" | "watchPathIgnorePatterns" | "unmockedModulePathPatterns" | "workerIdleMemoryLimit" | "bail" | "ci" | "changedFilesWithAncestor" | "changedSince" | "collectCoverage" | "collectCoverageFrom" | "coverageDirectory" | "coverageProvider" | "coverageReporters" | "coverageThreshold" | "expand" | "findRelatedTests" | "forceExit" | "reporters" | "logHeapUsage" | "lastCommit" | "listTests" | "maxConcurrency" | "maxWorkers" | "noStackTrace" | "notify" | "notifyMode" | "onlyChanged" | "onlyFailures" | "outputFile" | "passWithNoTests" | "replname" | "runTestsByPath" | "showSeed" | "testFailureExitCode" | "testNamePattern" | "testResultsProcessor" | "testSequencer" | "testTimeout" | "updateSnapshot" | "useStderr" | "verbose" | "watchAll" | "watchman" | "watchPlugins">(options: Pick<Partial<{
14
+ export declare const mergePreset: <AdditionalOptions extends "filter" | "json" | "silent" | "cache" | "runtime" | "watch" | "runner" | "projects" | "id" | "automock" | "cacheDirectory" | "clearMocks" | "coveragePathIgnorePatterns" | "dependencyExtractor" | "detectLeaks" | "detectOpenHandles" | "displayName" | "errorOnDeprecated" | "extensionsToTreatAsEsm" | "fakeTimers" | "forceCoverageMatch" | "globalSetup" | "globalTeardown" | "globals" | "haste" | "injectGlobals" | "moduleDirectories" | "moduleFileExtensions" | "moduleNameMapper" | "modulePathIgnorePatterns" | "modulePaths" | "openHandlesTimeout" | "preset" | "prettierPath" | "resetMocks" | "resetModules" | "resolver" | "restoreMocks" | "rootDir" | "roots" | "sandboxInjectedGlobals" | "setupFiles" | "setupFilesAfterEnv" | "skipFilter" | "skipNodeResolution" | "slowTestThreshold" | "snapshotResolver" | "snapshotSerializers" | "snapshotFormat" | "testEnvironment" | "testEnvironmentOptions" | "testMatch" | "testLocationInResults" | "testPathIgnorePatterns" | "testRegex" | "testRunner" | "transform" | "transformIgnorePatterns" | "watchPathIgnorePatterns" | "unmockedModulePathPatterns" | "workerIdleMemoryLimit" | "bail" | "ci" | "changedFilesWithAncestor" | "changedSince" | "collectCoverage" | "collectCoverageFrom" | "coverageDirectory" | "coverageProvider" | "coverageReporters" | "coverageThreshold" | "expand" | "findRelatedTests" | "forceExit" | "reporters" | "logHeapUsage" | "lastCommit" | "listTests" | "maxConcurrency" | "maxWorkers" | "noStackTrace" | "notify" | "notifyMode" | "onlyChanged" | "onlyFailures" | "outputFile" | "passWithNoTests" | "randomize" | "replname" | "runTestsByPath" | "showSeed" | "testFailureExitCode" | "testNamePattern" | "testResultsProcessor" | "testSequencer" | "testTimeout" | "updateSnapshot" | "useStderr" | "verbose" | "watchAll" | "watchman" | "watchPlugins" | "workerThreads">(options: Pick<Partial<{
15
15
  automock: boolean;
16
16
  bail: number | boolean;
17
17
  cache: boolean;
@@ -68,11 +68,13 @@ export declare const mergePreset: <AdditionalOptions extends "filter" | "json" |
68
68
  notifyMode: string;
69
69
  onlyChanged: boolean;
70
70
  onlyFailures: boolean;
71
+ openHandlesTimeout: number;
71
72
  outputFile: string;
72
73
  passWithNoTests: boolean;
73
74
  preset: string | null | undefined;
74
75
  prettierPath: string | null | undefined;
75
76
  projects: (string | Config.InitialProjectOptions)[];
77
+ randomize: boolean;
76
78
  replname: string | null | undefined;
77
79
  resetMocks: boolean;
78
80
  resetModules: boolean;
@@ -140,5 +142,6 @@ export declare const mergePreset: <AdditionalOptions extends "filter" | "json" |
140
142
  watchman: boolean;
141
143
  watchPlugins: (string | [string, Record<string, unknown>])[];
142
144
  workerIdleMemoryLimit: string | number;
145
+ workerThreads: boolean;
143
146
  }>, DefaultOptions | AdditionalOptions>) => Config.InitialOptions;
144
147
  export {};
@@ -45,40 +45,16 @@ const runESLint = async (mode, logger) => {
45
45
  logger.debug("Initialising ESLint...");
46
46
  const engine = new import_eslint.ESLint({
47
47
  cache: true,
48
- extensions: [
49
- "cjs",
50
- "cts",
51
- "js",
52
- "jsx",
53
- "mjs",
54
- "mts",
55
- "ts",
56
- "tsx",
57
- "yaml",
58
- "yml"
59
- ],
60
48
  fix: mode === "format",
61
49
  reportUnusedDisableDirectives: "error"
62
50
  });
63
51
  const cwd = process.cwd();
64
52
  logger.debug("Processing files...");
65
53
  const start = process.hrtime.bigint();
66
- const ogConsoleError = console.error;
67
- console.error = (...args) => {
68
- if (args[0] !== // `eslint-plugin-react` prints this annoying error on non-React repos.
69
- // We still want to support React linting for repos that have React code,
70
- // so we have to manually suppress it.
71
- //
72
- // https://github.com/yannickcr/eslint-plugin-react/blob/7484acaca8351a8568fa99344bc811c5cd8396bd/lib/util/version.js#L61-L65
73
- 'Warning: React version was set to "detect" in eslint-plugin-react settings, but the "react" package is not installed. Assuming latest React version for linting.') {
74
- ogConsoleError(...args);
75
- }
76
- };
77
54
  const [formatter, results] = await Promise.all([
78
55
  engine.loadFormatter(),
79
56
  engine.lintFiles(".")
80
57
  ]);
81
- console.error = ogConsoleError;
82
58
  const end = process.hrtime.bigint();
83
59
  logger.plain(
84
60
  `Processed ${(0, import_logging.pluralise)(results.length, "file")} in ${logger.timing(
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "version": 3,
3
3
  "sources": ["../../../src/cli/adapter/eslint.ts"],
4
- "sourcesContent": ["import path from 'path';\n\nimport chalk from 'chalk';\nimport type { Linter } from 'eslint';\nimport { ESLint } from 'eslint';\n\nimport type { Logger } from '../../utils/logging';\nimport { pluralise } from '../../utils/logging';\n\nconst symbolForResult = (result: ESLint.LintResult) => {\n if (result.errorCount) {\n return chalk.red('\u25CB');\n }\n\n return result.warningCount ? chalk.yellow('\u25CD') : chalk.green('\u25CB');\n};\n\nexport interface ESLintResult {\n messages: Linter.LintMessage[];\n filePath: string;\n}\n\nexport interface ESLintOutput {\n errors: ESLintResult[];\n fixable: boolean;\n ok: boolean;\n output: string;\n warnings: ESLintResult[];\n}\n\nexport const runESLint = async (\n mode: 'format' | 'lint',\n logger: Logger,\n): Promise<ESLintOutput> => {\n logger.debug('Initialising ESLint...');\n\n const engine = new ESLint({\n cache: true,\n extensions: [\n 'cjs',\n 'cts',\n 'js',\n 'jsx',\n 'mjs',\n 'mts',\n 'ts',\n 'tsx',\n 'yaml',\n 'yml',\n ],\n fix: mode === 'format',\n reportUnusedDisableDirectives: 'error',\n });\n\n const cwd = process.cwd();\n\n logger.debug('Processing files...');\n\n const start = process.hrtime.bigint();\n\n /* eslint-disable no-console */\n const ogConsoleError = console.error;\n console.error = (...args: unknown[]) => {\n if (\n args[0] !==\n // `eslint-plugin-react` prints this annoying error on non-React repos.\n // We still want to support React linting for repos that have React code,\n // so we have to manually suppress it.\n //\n // https://github.com/yannickcr/eslint-plugin-react/blob/7484acaca8351a8568fa99344bc811c5cd8396bd/lib/util/version.js#L61-L65\n 'Warning: React version was set to \"detect\" in eslint-plugin-react settings, but the \"react\" package is not installed. Assuming latest React version for linting.'\n ) {\n ogConsoleError(...args);\n }\n };\n\n const [formatter, results] = await Promise.all([\n engine.loadFormatter(),\n engine.lintFiles('.'),\n ]);\n\n console.error = ogConsoleError;\n /* eslint-enable no-console */\n\n const end = process.hrtime.bigint();\n\n logger.plain(\n `Processed ${pluralise(results.length, 'file')} in ${logger.timing(\n start,\n end,\n )}.`,\n );\n\n const errors: ESLintResult[] = [];\n const warnings: ESLintResult[] = [];\n let fixable = false;\n\n for (const result of results) {\n const relativePath = path.relative(cwd, result.filePath);\n if (result.fixableErrorCount + result.fixableWarningCount) {\n fixable = true;\n }\n\n if (result.errorCount) {\n errors.push({\n filePath: relativePath,\n messages: result.messages,\n });\n }\n\n if (result.warningCount) {\n warnings.push({\n filePath: relativePath,\n messages: result.messages,\n });\n }\n\n logger.debug(symbolForResult(result), relativePath);\n }\n\n const ok = errors.length === 0;\n\n await ESLint.outputFixes(results);\n\n const output = await formatter.format(results);\n\n if (output) {\n logger.plain(output);\n }\n\n return { errors, fixable, ok, output, warnings };\n};\n"],
5
- "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAiB;AAEjB,mBAAkB;AAElB,oBAAuB;AAGvB,qBAA0B;AAE1B,MAAM,kBAAkB,CAAC,WAA8B;AACrD,MAAI,OAAO,YAAY;AACrB,WAAO,aAAAA,QAAM,IAAI,QAAG;AAAA,EACtB;AAEA,SAAO,OAAO,eAAe,aAAAA,QAAM,OAAO,QAAG,IAAI,aAAAA,QAAM,MAAM,QAAG;AAClE;AAeO,MAAM,YAAY,OACvB,MACA,WAC0B;AAC1B,SAAO,MAAM,wBAAwB;AAErC,QAAM,SAAS,IAAI,qBAAO;AAAA,IACxB,OAAO;AAAA,IACP,YAAY;AAAA,MACV;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF;AAAA,IACA,KAAK,SAAS;AAAA,IACd,+BAA+B;AAAA,EACjC,CAAC;AAED,QAAM,MAAM,QAAQ,IAAI;AAExB,SAAO,MAAM,qBAAqB;AAElC,QAAM,QAAQ,QAAQ,OAAO,OAAO;AAGpC,QAAM,iBAAiB,QAAQ;AAC/B,UAAQ,QAAQ,IAAI,SAAoB;AACtC,QACE,KAAK,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA,IAMN,oKACA;AACA,qBAAe,GAAG,IAAI;AAAA,IACxB;AAAA,EACF;AAEA,QAAM,CAAC,WAAW,OAAO,IAAI,MAAM,QAAQ,IAAI;AAAA,IAC7C,OAAO,cAAc;AAAA,IACrB,OAAO,UAAU,GAAG;AAAA,EACtB,CAAC;AAED,UAAQ,QAAQ;AAGhB,QAAM,MAAM,QAAQ,OAAO,OAAO;AAElC,SAAO;AAAA,IACL,iBAAa,0BAAU,QAAQ,QAAQ,MAAM,QAAQ,OAAO;AAAA,MAC1D;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAEA,QAAM,SAAyB,CAAC;AAChC,QAAM,WAA2B,CAAC;AAClC,MAAI,UAAU;AAEd,aAAW,UAAU,SAAS;AAC5B,UAAM,eAAe,YAAAC,QAAK,SAAS,KAAK,OAAO,QAAQ;AACvD,QAAI,OAAO,oBAAoB,OAAO,qBAAqB;AACzD,gBAAU;AAAA,IACZ;AAEA,QAAI,OAAO,YAAY;AACrB,aAAO,KAAK;AAAA,QACV,UAAU;AAAA,QACV,UAAU,OAAO;AAAA,MACnB,CAAC;AAAA,IACH;AAEA,QAAI,OAAO,cAAc;AACvB,eAAS,KAAK;AAAA,QACZ,UAAU;AAAA,QACV,UAAU,OAAO;AAAA,MACnB,CAAC;AAAA,IACH;AAEA,WAAO,MAAM,gBAAgB,MAAM,GAAG,YAAY;AAAA,EACpD;AAEA,QAAM,KAAK,OAAO,WAAW;AAE7B,QAAM,qBAAO,YAAY,OAAO;AAEhC,QAAM,SAAS,MAAM,UAAU,OAAO,OAAO;AAE7C,MAAI,QAAQ;AACV,WAAO,MAAM,MAAM;AAAA,EACrB;AAEA,SAAO,EAAE,QAAQ,SAAS,IAAI,QAAQ,SAAS;AACjD;",
4
+ "sourcesContent": ["import path from 'path';\n\nimport chalk from 'chalk';\nimport type { Linter } from 'eslint';\nimport { ESLint } from 'eslint';\n\nimport type { Logger } from '../../utils/logging';\nimport { pluralise } from '../../utils/logging';\n\nconst symbolForResult = (result: ESLint.LintResult) => {\n if (result.errorCount) {\n return chalk.red('\u25CB');\n }\n\n return result.warningCount ? chalk.yellow('\u25CD') : chalk.green('\u25CB');\n};\n\nexport interface ESLintResult {\n messages: Linter.LintMessage[];\n filePath: string;\n}\n\nexport interface ESLintOutput {\n errors: ESLintResult[];\n fixable: boolean;\n ok: boolean;\n output: string;\n warnings: ESLintResult[];\n}\n\nexport const runESLint = async (\n mode: 'format' | 'lint',\n logger: Logger,\n): Promise<ESLintOutput> => {\n logger.debug('Initialising ESLint...');\n\n const engine = new ESLint({\n cache: true,\n fix: mode === 'format',\n reportUnusedDisableDirectives: 'error',\n });\n\n const cwd = process.cwd();\n\n logger.debug('Processing files...');\n\n const start = process.hrtime.bigint();\n\n const [formatter, results] = await Promise.all([\n engine.loadFormatter(),\n engine.lintFiles('.'),\n ]);\n\n const end = process.hrtime.bigint();\n\n logger.plain(\n `Processed ${pluralise(results.length, 'file')} in ${logger.timing(\n start,\n end,\n )}.`,\n );\n\n const errors: ESLintResult[] = [];\n const warnings: ESLintResult[] = [];\n let fixable = false;\n\n for (const result of results) {\n const relativePath = path.relative(cwd, result.filePath);\n if (result.fixableErrorCount + result.fixableWarningCount) {\n fixable = true;\n }\n\n if (result.errorCount) {\n errors.push({\n filePath: relativePath,\n messages: result.messages,\n });\n }\n\n if (result.warningCount) {\n warnings.push({\n filePath: relativePath,\n messages: result.messages,\n });\n }\n\n logger.debug(symbolForResult(result), relativePath);\n }\n\n const ok = errors.length === 0;\n\n await ESLint.outputFixes(results);\n\n const output = await formatter.format(results);\n\n if (output) {\n logger.plain(output);\n }\n\n return { errors, fixable, ok, output, warnings };\n};\n"],
5
+ "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAiB;AAEjB,mBAAkB;AAElB,oBAAuB;AAGvB,qBAA0B;AAE1B,MAAM,kBAAkB,CAAC,WAA8B;AACrD,MAAI,OAAO,YAAY;AACrB,WAAO,aAAAA,QAAM,IAAI,QAAG;AAAA,EACtB;AAEA,SAAO,OAAO,eAAe,aAAAA,QAAM,OAAO,QAAG,IAAI,aAAAA,QAAM,MAAM,QAAG;AAClE;AAeO,MAAM,YAAY,OACvB,MACA,WAC0B;AAC1B,SAAO,MAAM,wBAAwB;AAErC,QAAM,SAAS,IAAI,qBAAO;AAAA,IACxB,OAAO;AAAA,IACP,KAAK,SAAS;AAAA,IACd,+BAA+B;AAAA,EACjC,CAAC;AAED,QAAM,MAAM,QAAQ,IAAI;AAExB,SAAO,MAAM,qBAAqB;AAElC,QAAM,QAAQ,QAAQ,OAAO,OAAO;AAEpC,QAAM,CAAC,WAAW,OAAO,IAAI,MAAM,QAAQ,IAAI;AAAA,IAC7C,OAAO,cAAc;AAAA,IACrB,OAAO,UAAU,GAAG;AAAA,EACtB,CAAC;AAED,QAAM,MAAM,QAAQ,OAAO,OAAO;AAElC,SAAO;AAAA,IACL,iBAAa,0BAAU,QAAQ,QAAQ,MAAM,QAAQ,OAAO;AAAA,MAC1D;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAEA,QAAM,SAAyB,CAAC;AAChC,QAAM,WAA2B,CAAC;AAClC,MAAI,UAAU;AAEd,aAAW,UAAU,SAAS;AAC5B,UAAM,eAAe,YAAAC,QAAK,SAAS,KAAK,OAAO,QAAQ;AACvD,QAAI,OAAO,oBAAoB,OAAO,qBAAqB;AACzD,gBAAU;AAAA,IACZ;AAEA,QAAI,OAAO,YAAY;AACrB,aAAO,KAAK;AAAA,QACV,UAAU;AAAA,QACV,UAAU,OAAO;AAAA,MACnB,CAAC;AAAA,IACH;AAEA,QAAI,OAAO,cAAc;AACvB,eAAS,KAAK;AAAA,QACZ,UAAU;AAAA,QACV,UAAU,OAAO;AAAA,MACnB,CAAC;AAAA,IACH;AAEA,WAAO,MAAM,gBAAgB,MAAM,GAAG,YAAY;AAAA,EACpD;AAEA,QAAM,KAAK,OAAO,WAAW;AAE7B,QAAM,qBAAO,YAAY,OAAO;AAEhC,QAAM,SAAS,MAAM,UAAU,OAAO,OAAO;AAE7C,MAAI,QAAQ;AACV,WAAO,MAAM,MAAM;AAAA,EACrB;AAEA,SAAO,EAAE,QAAQ,SAAS,IAAI,QAAQ,SAAS;AACjD;",
6
6
  "names": ["chalk", "path"]
7
7
  }
@@ -44,6 +44,7 @@ const RENOVATE_PRESETS = [
44
44
  "local>seekasia/renovate-config",
45
45
  "local>seek-jobs/renovate-config"
46
46
  ];
47
+ const EXISTING_REPO_PRESET_REGEX = /(github|local)>(seek-jobs|seekasia)\//;
47
48
  const RenovateConfig = t.Record({
48
49
  extends: t.Array(t.String)
49
50
  });
@@ -105,7 +106,8 @@ const patchRenovateConfig = async (dir) => {
105
106
  !config?.input || // The file appears to mention the baseline preset for the configured Git
106
107
  // owner. This is a very naive check that we don't want to overcomplicate
107
108
  // because it is invoked before each skuba format and lint.
108
- config.input.includes(presetToAdd)
109
+ config.input.includes(presetToAdd) || // Ignore any renovate configuration which already extends a SEEK-Jobs or seekasia config
110
+ EXISTING_REPO_PRESET_REGEX.exec(config.input)
109
111
  ) {
110
112
  return;
111
113
  }
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "version": 3,
3
3
  "sources": ["../../../src/cli/configure/patchRenovateConfig.ts"],
4
- "sourcesContent": ["/* eslint-disable new-cap */\n\nimport { inspect } from 'util';\n\nimport fs from 'fs-extra';\nimport * as fleece from 'golden-fleece';\nimport * as t from 'runtypes';\n\nimport * as Git from '../../api/git';\nimport { log } from '../../utils/logging';\n\nimport { createDestinationFileReader } from './analysis/project';\nimport { RENOVATE_CONFIG_FILENAMES } from './modules/renovate';\nimport { formatPrettier } from './processing/prettier';\n\nconst RENOVATE_PRESETS = [\n 'local>seekasia/renovate-config',\n 'local>seek-jobs/renovate-config',\n] as const;\n\ntype RenovateFiletype = 'json' | 'json5';\n\ntype RenovatePreset = (typeof RENOVATE_PRESETS)[number];\n\nconst RenovateConfig = t.Record({\n extends: t.Array(t.String),\n});\n\nconst ownerToRenovatePreset = (owner: string): RenovatePreset | undefined => {\n const lowercaseOwner = owner.toLowerCase();\n\n switch (lowercaseOwner) {\n case 'seekasia':\n return 'local>seekasia/renovate-config';\n\n case 'seek-jobs':\n return 'local>seek-jobs/renovate-config';\n\n default:\n return;\n }\n};\n\ntype PatchFile = (props: {\n filepath: string;\n input: string;\n presetToAdd: RenovatePreset;\n}) => Promise<void>;\n\nconst patchJson: PatchFile = async ({ filepath, input, presetToAdd }) => {\n const config: unknown = JSON.parse(input);\n\n if (!RenovateConfig.guard(config)) {\n return;\n }\n\n config.extends.unshift(presetToAdd);\n\n await fs.promises.writeFile(\n filepath,\n formatPrettier(JSON.stringify(config), { parser: 'json' }),\n );\n\n return;\n};\n\nconst patchJson5: PatchFile = async ({ filepath, input, presetToAdd }) => {\n const config: unknown = fleece.evaluate(input);\n\n if (!RenovateConfig.guard(config)) {\n return;\n }\n\n config.extends.unshift(presetToAdd);\n\n await fs.promises.writeFile(\n filepath,\n formatPrettier(fleece.patch(input, config), { parser: 'json5' }),\n );\n\n return;\n};\n\nconst patchByFiletype: Record<RenovateFiletype, PatchFile> = {\n json: patchJson,\n json5: patchJson5,\n};\n\nconst patchRenovateConfig = async (dir: string) => {\n const readFile = createDestinationFileReader(dir);\n\n const { owner } = await Git.getOwnerAndRepo({ dir });\n\n const presetToAdd = ownerToRenovatePreset(owner);\n\n if (!presetToAdd) {\n // No baseline preset needs to be added for the configured Git owner.\n return;\n }\n\n const maybeConfigs = await Promise.all(\n RENOVATE_CONFIG_FILENAMES.map(async (filepath) => ({\n input: await readFile(filepath),\n filepath,\n })),\n );\n\n const config = maybeConfigs.find((maybeConfig) => Boolean(maybeConfig.input));\n\n if (\n // No file was found.\n !config?.input ||\n // The file appears to mention the baseline preset for the configured Git\n // owner. This is a very naive check that we don't want to overcomplicate\n // because it is invoked before each skuba format and lint.\n config.input.includes(presetToAdd)\n ) {\n return;\n }\n\n const filetype: RenovateFiletype = config.filepath\n .toLowerCase()\n .endsWith('.json5')\n ? 'json5'\n : 'json';\n\n const patchFile = patchByFiletype[filetype];\n\n await patchFile({\n filepath: config.filepath,\n input: config.input,\n presetToAdd,\n });\n};\n\nexport const tryPatchRenovateConfig = async (dir = process.cwd()) => {\n try {\n await patchRenovateConfig(dir);\n } catch (err) {\n log.warn('Failed to patch Renovate config.');\n log.subtle(inspect(err));\n }\n};\n"],
5
- "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAEA,kBAAwB;AAExB,sBAAe;AACf,aAAwB;AACxB,QAAmB;AAEnB,UAAqB;AACrB,qBAAoB;AAEpB,qBAA4C;AAC5C,sBAA0C;AAC1C,sBAA+B;AAE/B,MAAM,mBAAmB;AAAA,EACvB;AAAA,EACA;AACF;AAMA,MAAM,iBAAiB,EAAE,OAAO;AAAA,EAC9B,SAAS,EAAE,MAAM,EAAE,MAAM;AAC3B,CAAC;AAED,MAAM,wBAAwB,CAAC,UAA8C;AAC3E,QAAM,iBAAiB,MAAM,YAAY;AAEzC,UAAQ,gBAAgB;AAAA,IACtB,KAAK;AACH,aAAO;AAAA,IAET,KAAK;AACH,aAAO;AAAA,IAET;AACE;AAAA,EACJ;AACF;AAQA,MAAM,YAAuB,OAAO,EAAE,UAAU,OAAO,YAAY,MAAM;AACvE,QAAM,SAAkB,KAAK,MAAM,KAAK;AAExC,MAAI,CAAC,eAAe,MAAM,MAAM,GAAG;AACjC;AAAA,EACF;AAEA,SAAO,QAAQ,QAAQ,WAAW;AAElC,QAAM,gBAAAA,QAAG,SAAS;AAAA,IAChB;AAAA,QACA,gCAAe,KAAK,UAAU,MAAM,GAAG,EAAE,QAAQ,OAAO,CAAC;AAAA,EAC3D;AAEA;AACF;AAEA,MAAM,aAAwB,OAAO,EAAE,UAAU,OAAO,YAAY,MAAM;AACxE,QAAM,SAAkB,OAAO,SAAS,KAAK;AAE7C,MAAI,CAAC,eAAe,MAAM,MAAM,GAAG;AACjC;AAAA,EACF;AAEA,SAAO,QAAQ,QAAQ,WAAW;AAElC,QAAM,gBAAAA,QAAG,SAAS;AAAA,IAChB;AAAA,QACA,gCAAe,OAAO,MAAM,OAAO,MAAM,GAAG,EAAE,QAAQ,QAAQ,CAAC;AAAA,EACjE;AAEA;AACF;AAEA,MAAM,kBAAuD;AAAA,EAC3D,MAAM;AAAA,EACN,OAAO;AACT;AAEA,MAAM,sBAAsB,OAAO,QAAgB;AACjD,QAAM,eAAW,4CAA4B,GAAG;AAEhD,QAAM,EAAE,MAAM,IAAI,MAAM,IAAI,gBAAgB,EAAE,IAAI,CAAC;AAEnD,QAAM,cAAc,sBAAsB,KAAK;AAE/C,MAAI,CAAC,aAAa;AAEhB;AAAA,EACF;AAEA,QAAM,eAAe,MAAM,QAAQ;AAAA,IACjC,0CAA0B,IAAI,OAAO,cAAc;AAAA,MACjD,OAAO,MAAM,SAAS,QAAQ;AAAA,MAC9B;AAAA,IACF,EAAE;AAAA,EACJ;AAEA,QAAM,SAAS,aAAa,KAAK,CAAC,gBAAgB,QAAQ,YAAY,KAAK,CAAC;AAE5E;AAAA;AAAA,IAEE,CAAC,QAAQ;AAAA;AAAA;AAAA,IAIT,OAAO,MAAM,SAAS,WAAW;AAAA,IACjC;AACA;AAAA,EACF;AAEA,QAAM,WAA6B,OAAO,SACvC,YAAY,EACZ,SAAS,QAAQ,IAChB,UACA;AAEJ,QAAM,YAAY,gBAAgB,QAAQ;AAE1C,QAAM,UAAU;AAAA,IACd,UAAU,OAAO;AAAA,IACjB,OAAO,OAAO;AAAA,IACd;AAAA,EACF,CAAC;AACH;AAEO,MAAM,yBAAyB,OAAO,MAAM,QAAQ,IAAI,MAAM;AACnE,MAAI;AACF,UAAM,oBAAoB,GAAG;AAAA,EAC/B,SAAS,KAAP;AACA,uBAAI,KAAK,kCAAkC;AAC3C,uBAAI,WAAO,qBAAQ,GAAG,CAAC;AAAA,EACzB;AACF;",
4
+ "sourcesContent": ["/* eslint-disable new-cap */\n\nimport { inspect } from 'util';\n\nimport fs from 'fs-extra';\nimport * as fleece from 'golden-fleece';\nimport * as t from 'runtypes';\n\nimport * as Git from '../../api/git';\nimport { log } from '../../utils/logging';\n\nimport { createDestinationFileReader } from './analysis/project';\nimport { RENOVATE_CONFIG_FILENAMES } from './modules/renovate';\nimport { formatPrettier } from './processing/prettier';\n\nconst RENOVATE_PRESETS = [\n 'local>seekasia/renovate-config',\n 'local>seek-jobs/renovate-config',\n] as const;\n\nconst EXISTING_REPO_PRESET_REGEX = /(github|local)>(seek-jobs|seekasia)\\//;\n\ntype RenovateFiletype = 'json' | 'json5';\n\ntype RenovatePreset = (typeof RENOVATE_PRESETS)[number];\n\nconst RenovateConfig = t.Record({\n extends: t.Array(t.String),\n});\n\nconst ownerToRenovatePreset = (owner: string): RenovatePreset | undefined => {\n const lowercaseOwner = owner.toLowerCase();\n\n switch (lowercaseOwner) {\n case 'seekasia':\n return 'local>seekasia/renovate-config';\n\n case 'seek-jobs':\n return 'local>seek-jobs/renovate-config';\n\n default:\n return;\n }\n};\n\ntype PatchFile = (props: {\n filepath: string;\n input: string;\n presetToAdd: RenovatePreset;\n}) => Promise<void>;\n\nconst patchJson: PatchFile = async ({ filepath, input, presetToAdd }) => {\n const config: unknown = JSON.parse(input);\n\n if (!RenovateConfig.guard(config)) {\n return;\n }\n\n config.extends.unshift(presetToAdd);\n\n await fs.promises.writeFile(\n filepath,\n formatPrettier(JSON.stringify(config), { parser: 'json' }),\n );\n\n return;\n};\n\nconst patchJson5: PatchFile = async ({ filepath, input, presetToAdd }) => {\n const config: unknown = fleece.evaluate(input);\n\n if (!RenovateConfig.guard(config)) {\n return;\n }\n\n config.extends.unshift(presetToAdd);\n\n await fs.promises.writeFile(\n filepath,\n formatPrettier(fleece.patch(input, config), { parser: 'json5' }),\n );\n\n return;\n};\n\nconst patchByFiletype: Record<RenovateFiletype, PatchFile> = {\n json: patchJson,\n json5: patchJson5,\n};\n\nconst patchRenovateConfig = async (dir: string) => {\n const readFile = createDestinationFileReader(dir);\n\n const { owner } = await Git.getOwnerAndRepo({ dir });\n\n const presetToAdd = ownerToRenovatePreset(owner);\n\n if (!presetToAdd) {\n // No baseline preset needs to be added for the configured Git owner.\n return;\n }\n\n const maybeConfigs = await Promise.all(\n RENOVATE_CONFIG_FILENAMES.map(async (filepath) => ({\n input: await readFile(filepath),\n filepath,\n })),\n );\n\n const config = maybeConfigs.find((maybeConfig) => Boolean(maybeConfig.input));\n\n if (\n // No file was found.\n !config?.input ||\n // The file appears to mention the baseline preset for the configured Git\n // owner. This is a very naive check that we don't want to overcomplicate\n // because it is invoked before each skuba format and lint.\n config.input.includes(presetToAdd) ||\n // Ignore any renovate configuration which already extends a SEEK-Jobs or seekasia config\n EXISTING_REPO_PRESET_REGEX.exec(config.input)\n ) {\n return;\n }\n\n const filetype: RenovateFiletype = config.filepath\n .toLowerCase()\n .endsWith('.json5')\n ? 'json5'\n : 'json';\n\n const patchFile = patchByFiletype[filetype];\n\n await patchFile({\n filepath: config.filepath,\n input: config.input,\n presetToAdd,\n });\n};\n\nexport const tryPatchRenovateConfig = async (dir = process.cwd()) => {\n try {\n await patchRenovateConfig(dir);\n } catch (err) {\n log.warn('Failed to patch Renovate config.');\n log.subtle(inspect(err));\n }\n};\n"],
5
+ "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAEA,kBAAwB;AAExB,sBAAe;AACf,aAAwB;AACxB,QAAmB;AAEnB,UAAqB;AACrB,qBAAoB;AAEpB,qBAA4C;AAC5C,sBAA0C;AAC1C,sBAA+B;AAE/B,MAAM,mBAAmB;AAAA,EACvB;AAAA,EACA;AACF;AAEA,MAAM,6BAA6B;AAMnC,MAAM,iBAAiB,EAAE,OAAO;AAAA,EAC9B,SAAS,EAAE,MAAM,EAAE,MAAM;AAC3B,CAAC;AAED,MAAM,wBAAwB,CAAC,UAA8C;AAC3E,QAAM,iBAAiB,MAAM,YAAY;AAEzC,UAAQ,gBAAgB;AAAA,IACtB,KAAK;AACH,aAAO;AAAA,IAET,KAAK;AACH,aAAO;AAAA,IAET;AACE;AAAA,EACJ;AACF;AAQA,MAAM,YAAuB,OAAO,EAAE,UAAU,OAAO,YAAY,MAAM;AACvE,QAAM,SAAkB,KAAK,MAAM,KAAK;AAExC,MAAI,CAAC,eAAe,MAAM,MAAM,GAAG;AACjC;AAAA,EACF;AAEA,SAAO,QAAQ,QAAQ,WAAW;AAElC,QAAM,gBAAAA,QAAG,SAAS;AAAA,IAChB;AAAA,QACA,gCAAe,KAAK,UAAU,MAAM,GAAG,EAAE,QAAQ,OAAO,CAAC;AAAA,EAC3D;AAEA;AACF;AAEA,MAAM,aAAwB,OAAO,EAAE,UAAU,OAAO,YAAY,MAAM;AACxE,QAAM,SAAkB,OAAO,SAAS,KAAK;AAE7C,MAAI,CAAC,eAAe,MAAM,MAAM,GAAG;AACjC;AAAA,EACF;AAEA,SAAO,QAAQ,QAAQ,WAAW;AAElC,QAAM,gBAAAA,QAAG,SAAS;AAAA,IAChB;AAAA,QACA,gCAAe,OAAO,MAAM,OAAO,MAAM,GAAG,EAAE,QAAQ,QAAQ,CAAC;AAAA,EACjE;AAEA;AACF;AAEA,MAAM,kBAAuD;AAAA,EAC3D,MAAM;AAAA,EACN,OAAO;AACT;AAEA,MAAM,sBAAsB,OAAO,QAAgB;AACjD,QAAM,eAAW,4CAA4B,GAAG;AAEhD,QAAM,EAAE,MAAM,IAAI,MAAM,IAAI,gBAAgB,EAAE,IAAI,CAAC;AAEnD,QAAM,cAAc,sBAAsB,KAAK;AAE/C,MAAI,CAAC,aAAa;AAEhB;AAAA,EACF;AAEA,QAAM,eAAe,MAAM,QAAQ;AAAA,IACjC,0CAA0B,IAAI,OAAO,cAAc;AAAA,MACjD,OAAO,MAAM,SAAS,QAAQ;AAAA,MAC9B;AAAA,IACF,EAAE;AAAA,EACJ;AAEA,QAAM,SAAS,aAAa,KAAK,CAAC,gBAAgB,QAAQ,YAAY,KAAK,CAAC;AAE5E;AAAA;AAAA,IAEE,CAAC,QAAQ;AAAA;AAAA;AAAA,IAIT,OAAO,MAAM,SAAS,WAAW;AAAA,IAEjC,2BAA2B,KAAK,OAAO,KAAK;AAAA,IAC5C;AACA;AAAA,EACF;AAEA,QAAM,WAA6B,OAAO,SACvC,YAAY,EACZ,SAAS,QAAQ,IAChB,UACA;AAEJ,QAAM,YAAY,gBAAgB,QAAQ;AAE1C,QAAM,UAAU;AAAA,IACd,UAAU,OAAO;AAAA,IACjB,OAAO,OAAO;AAAA,IACd;AAAA,EACF,CAAC;AACH;AAEO,MAAM,yBAAyB,OAAO,MAAM,QAAQ,IAAI,MAAM;AACnE,MAAI;AACF,UAAM,oBAAoB,GAAG;AAAA,EAC/B,SAAS,KAAP;AACA,uBAAI,KAAK,kCAAkC;AAC3C,uBAAI,WAAO,qBAAQ,GAAG,CAAC;AAAA,EACzB;AACF;",
6
6
  "names": ["fs"]
7
7
  }
@@ -72,6 +72,4 @@ class GitHubReporter {
72
72
  }
73
73
  }
74
74
  }
75
- // Annotate the CommonJS export names for ESM import in node:
76
- 0 && (module.exports = {});
77
75
  //# sourceMappingURL=index.js.map
@@ -50,6 +50,9 @@ const runRequestListener = async ({
50
50
  return;
51
51
  }
52
52
  const port = (0, import_validation.isIpPort)(config.port) ? config.port : availablePort;
53
+ if (typeof config !== "function" && config instanceof import_http.default.Server) {
54
+ return (0, import_http2.startServer)(config, port);
55
+ }
53
56
  if (typeof config !== "function" && config.server && config.server instanceof import_http.default.Server) {
54
57
  return (0, import_http2.startServer)(config.server, port);
55
58
  }
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "version": 3,
3
3
  "sources": ["../../src/wrapper/requestListener.ts"],
4
- "sourcesContent": ["import http from 'http';\n\nimport { isFunction, isIpPort, isObject } from '../utils/validation';\n\nimport { serveRequestListener, startServer } from './http';\n\n// Express compatibility\ninterface FunctionConfig extends http.RequestListener {\n port?: number;\n}\n\ninterface ObjectConfig {\n // Koa compatibility\n callback?: () => http.RequestListener;\n\n requestListener?: http.RequestListener;\n\n // Fastify compatibility\n server?: http.Server;\n\n default?: Promise<unknown>;\n port?: unknown;\n}\n\nconst isConfig = (\n data: unknown,\n): data is Promise<FunctionConfig> | Promise<ObjectConfig> =>\n isFunction(data) || isObject(data);\n\ninterface Args {\n availablePort?: number;\n entryPoint: unknown;\n}\n\n/**\n * Create an HTTP server that calls into an exported `http.RequestListener`.\n *\n * This supports Express and Koa applications out of the box.\n */\nexport const runRequestListener = async ({\n availablePort,\n entryPoint,\n}: Args): Promise<void> => {\n if (!isConfig(entryPoint)) {\n // Assume an executable script with weird exports\n return;\n }\n\n let config: FunctionConfig | ObjectConfig = await entryPoint;\n\n if (typeof config === 'object' && isConfig(config.default)) {\n // Prefer `export default` over `export =`\n config = await config.default;\n }\n\n if (Object.keys(config).length === 0) {\n // Assume an executable script with no exports\n return;\n }\n\n const port = isIpPort(config.port) ? config.port : availablePort;\n\n // Fastify workaround\n if (\n typeof config !== 'function' &&\n config.server &&\n config.server instanceof http.Server\n ) {\n return startServer(config.server, port);\n }\n\n const requestListener =\n typeof config === 'function'\n ? config\n : config.requestListener ?? config.callback?.();\n\n if (typeof requestListener !== 'function') {\n // Assume an executable script with non-request listener exports\n return;\n }\n\n return serveRequestListener(requestListener, port);\n};\n"],
5
- "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAiB;AAEjB,wBAA+C;AAE/C,IAAAA,eAAkD;AAoBlD,MAAM,WAAW,CACf,aAEA,8BAAW,IAAI,SAAK,4BAAS,IAAI;AAY5B,MAAM,qBAAqB,OAAO;AAAA,EACvC;AAAA,EACA;AACF,MAA2B;AACzB,MAAI,CAAC,SAAS,UAAU,GAAG;AAEzB;AAAA,EACF;AAEA,MAAI,SAAwC,MAAM;AAElD,MAAI,OAAO,WAAW,YAAY,SAAS,OAAO,OAAO,GAAG;AAE1D,aAAS,MAAM,OAAO;AAAA,EACxB;AAEA,MAAI,OAAO,KAAK,MAAM,EAAE,WAAW,GAAG;AAEpC;AAAA,EACF;AAEA,QAAM,WAAO,4BAAS,OAAO,IAAI,IAAI,OAAO,OAAO;AAGnD,MACE,OAAO,WAAW,cAClB,OAAO,UACP,OAAO,kBAAkB,YAAAC,QAAK,QAC9B;AACA,eAAO,0BAAY,OAAO,QAAQ,IAAI;AAAA,EACxC;AAEA,QAAM,kBACJ,OAAO,WAAW,aACd,SACA,OAAO,mBAAmB,OAAO,WAAW;AAElD,MAAI,OAAO,oBAAoB,YAAY;AAEzC;AAAA,EACF;AAEA,aAAO,mCAAqB,iBAAiB,IAAI;AACnD;",
4
+ "sourcesContent": ["import http from 'http';\n\nimport { isFunction, isIpPort, isObject } from '../utils/validation';\n\nimport { serveRequestListener, startServer } from './http';\n\n// Express compatibility\ninterface FunctionConfig extends http.RequestListener {\n port?: number;\n}\n\ninterface ObjectConfig {\n // Koa compatibility\n callback?: () => http.RequestListener;\n\n requestListener?: http.RequestListener;\n\n // Fastify compatibility\n server?: http.Server;\n\n default?: Promise<unknown>;\n port?: unknown;\n}\n\nconst isConfig = (\n data: unknown,\n): data is Promise<FunctionConfig> | Promise<ObjectConfig> =>\n isFunction(data) || isObject(data);\n\ninterface Args {\n availablePort?: number;\n entryPoint: unknown;\n}\n\n/**\n * Create an HTTP server that calls into an exported `http.RequestListener`.\n *\n * This supports Express and Koa applications out of the box.\n */\nexport const runRequestListener = async ({\n availablePort,\n entryPoint,\n}: Args): Promise<void> => {\n if (!isConfig(entryPoint)) {\n // Assume an executable script with weird exports\n return;\n }\n\n let config: FunctionConfig | ObjectConfig = await entryPoint;\n\n if (typeof config === 'object' && isConfig(config.default)) {\n // Prefer `export default` over `export =`\n config = await config.default;\n }\n\n if (Object.keys(config).length === 0) {\n // Assume an executable script with no exports\n return;\n }\n\n const port = isIpPort(config.port) ? config.port : availablePort;\n\n // http.Server support\n if (typeof config !== 'function' && config instanceof http.Server) {\n return startServer(config, port);\n }\n\n // Fastify workaround\n if (\n typeof config !== 'function' &&\n config.server &&\n config.server instanceof http.Server\n ) {\n return startServer(config.server, port);\n }\n\n const requestListener =\n typeof config === 'function'\n ? config\n : config.requestListener ?? config.callback?.();\n\n if (typeof requestListener !== 'function') {\n // Assume an executable script with non-request listener exports\n return;\n }\n\n return serveRequestListener(requestListener, port);\n};\n"],
5
+ "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAiB;AAEjB,wBAA+C;AAE/C,IAAAA,eAAkD;AAoBlD,MAAM,WAAW,CACf,aAEA,8BAAW,IAAI,SAAK,4BAAS,IAAI;AAY5B,MAAM,qBAAqB,OAAO;AAAA,EACvC;AAAA,EACA;AACF,MAA2B;AACzB,MAAI,CAAC,SAAS,UAAU,GAAG;AAEzB;AAAA,EACF;AAEA,MAAI,SAAwC,MAAM;AAElD,MAAI,OAAO,WAAW,YAAY,SAAS,OAAO,OAAO,GAAG;AAE1D,aAAS,MAAM,OAAO;AAAA,EACxB;AAEA,MAAI,OAAO,KAAK,MAAM,EAAE,WAAW,GAAG;AAEpC;AAAA,EACF;AAEA,QAAM,WAAO,4BAAS,OAAO,IAAI,IAAI,OAAO,OAAO;AAGnD,MAAI,OAAO,WAAW,cAAc,kBAAkB,YAAAC,QAAK,QAAQ;AACjE,eAAO,0BAAY,QAAQ,IAAI;AAAA,EACjC;AAGA,MACE,OAAO,WAAW,cAClB,OAAO,UACP,OAAO,kBAAkB,YAAAA,QAAK,QAC9B;AACA,eAAO,0BAAY,OAAO,QAAQ,IAAI;AAAA,EACxC;AAEA,QAAM,kBACJ,OAAO,WAAW,aACd,SACA,OAAO,mBAAmB,OAAO,WAAW;AAElD,MAAI,OAAO,oBAAoB,YAAY;AAEzC;AAAA,EACF;AAEA,aAAO,mCAAqB,iBAAiB,IAAI;AACnD;",
6
6
  "names": ["import_http", "http"]
7
7
  }
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "skuba",
3
- "version": "6.0.1",
3
+ "version": "6.1.0",
4
4
  "private": false,
5
5
  "description": "SEEK development toolkit for backend applications and packages",
6
6
  "homepage": "https://github.com/seek-oss/skuba#readme",
@@ -71,19 +71,19 @@
71
71
  "@esbuild-plugins/tsconfig-paths": "^0.1.0",
72
72
  "@jest/types": "^29.0.0",
73
73
  "@octokit/graphql": "^5.0.0",
74
- "@octokit/graphql-schema": "^13.0.0",
74
+ "@octokit/graphql-schema": "^14.0.0",
75
75
  "@octokit/rest": "^19.0.0",
76
76
  "@octokit/types": "^9.0.0",
77
77
  "@types/jest": "^29.0.0",
78
78
  "@types/node": ">=16.11",
79
79
  "chalk": "^4.1.0",
80
- "concurrently": "^7.0.0",
80
+ "concurrently": "^8.0.0",
81
81
  "dotenv": "^16.0.0",
82
82
  "ejs": "^3.1.6",
83
83
  "enquirer": "^2.3.6",
84
84
  "esbuild": "~0.17.0",
85
85
  "eslint": "^8.11.0",
86
- "eslint-config-skuba": "1.4.1",
86
+ "eslint-config-skuba": "2.0.2",
87
87
  "execa": "^5.0.0",
88
88
  "fdir": "^6.0.0",
89
89
  "fs-extra": "^11.0.0",
@@ -120,13 +120,13 @@
120
120
  "validate-npm-package-name": "^5.0.0"
121
121
  },
122
122
  "devDependencies": {
123
- "@changesets/cli": "2.26.0",
123
+ "@changesets/cli": "2.26.1",
124
124
  "@changesets/get-github-info": "0.5.2",
125
- "@jest/reporters": "29.4.3",
125
+ "@jest/reporters": "29.5.0",
126
126
  "@types/ejs": "3.1.2",
127
127
  "@types/express": "4.17.17",
128
128
  "@types/fs-extra": "11.0.1",
129
- "@types/koa": "2.13.5",
129
+ "@types/koa": "2.13.6",
130
130
  "@types/libnpmsearch": "2.0.3",
131
131
  "@types/lodash.mergewith": "4.6.7",
132
132
  "@types/module-alias": "2.0.1",
@@ -136,13 +136,13 @@
136
136
  "@types/validate-npm-package-name": "4.0.0",
137
137
  "enhanced-resolve": "5.12.0",
138
138
  "express": "4.18.2",
139
- "fastify": "4.13.0",
139
+ "fastify": "4.15.0",
140
140
  "jsonfile": "6.1.0",
141
141
  "koa": "2.14.1",
142
- "memfs": "3.4.13",
142
+ "memfs": "3.5.0",
143
143
  "remark-cli": "11.0.0",
144
144
  "remark-preset-lint-recommended": "6.1.2",
145
- "semver": "7.3.8",
145
+ "semver": "7.4.0",
146
146
  "supertest": "6.3.3",
147
147
  "type-fest": "2.19.0"
148
148
  },
@@ -51,7 +51,7 @@ steps:
51
51
  - *aws-sm
52
52
  - *private-npm
53
53
  - *docker-ecr-cache
54
- - docker-compose#v4.11.0:
54
+ - docker-compose#v4.12.0:
55
55
  run: app
56
56
  timeout_in_minutes: 10
57
57
 
@@ -12,7 +12,7 @@
12
12
  "test:watch": "skuba test --watch"
13
13
  },
14
14
  "dependencies": {
15
- "@seek/logger": "^5.0.1",
15
+ "@seek/logger": "^6.0.0",
16
16
  "express": "^4.17.1",
17
17
  "skuba-dive": "^2.0.0"
18
18
  },
@@ -1,4 +1,4 @@
1
- import { Handler } from 'express';
1
+ import type { Handler } from 'express';
2
2
 
3
3
  /**
4
4
  * Signifies that the API is available to serve requests.
@@ -1,4 +1,4 @@
1
- import { Handler } from 'express';
1
+ import type { Handler } from 'express';
2
2
 
3
3
  /**
4
4
  * Tests connectivity to ensure appropriate access and network configuration.
@@ -32,6 +32,6 @@ steps:
32
32
  - *aws-sm
33
33
  - *private-npm
34
34
  - *docker-ecr-cache
35
- - docker-compose#v4.11.0:
35
+ - docker-compose#v4.12.0:
36
36
  run: app
37
37
  timeout_in_minutes: 10
@@ -51,7 +51,7 @@ steps:
51
51
  - *aws-sm
52
52
  - *private-npm
53
53
  - *docker-ecr-cache
54
- - docker-compose#v4.11.0:
54
+ - docker-compose#v4.12.0:
55
55
  run: app
56
56
  timeout_in_minutes: 10
57
57
 
@@ -14,11 +14,11 @@
14
14
  "dependencies": {
15
15
  "@koa/router": "^12.0.0",
16
16
  "@opentelemetry/api": "^1.1.0",
17
- "@opentelemetry/exporter-trace-otlp-grpc": "^0.36.0",
17
+ "@opentelemetry/exporter-trace-otlp-grpc": "^0.38.0",
18
18
  "@opentelemetry/instrumentation-aws-sdk": "^0.34.0",
19
- "@opentelemetry/instrumentation-http": "^0.36.0",
20
- "@opentelemetry/sdk-node": "^0.36.0",
21
- "@seek/logger": "^5.0.1",
19
+ "@opentelemetry/instrumentation-http": "^0.38.0",
20
+ "@opentelemetry/sdk-node": "^0.38.0",
21
+ "@seek/logger": "^6.0.0",
22
22
  "aws-sdk": "^2.1039.0",
23
23
  "hot-shots": "^10.0.0",
24
24
  "koa": "^2.13.4",
@@ -1,4 +1,4 @@
1
- import { Middleware } from 'src/types/koa';
1
+ import type { Middleware } from 'src/types/koa';
2
2
 
3
3
  /**
4
4
  * Signifies that the API is available to serve requests.
@@ -1,7 +1,7 @@
1
1
  import { logger } from 'src/framework/logging';
2
2
  import { metricsClient } from 'src/framework/metrics';
3
3
  import * as storage from 'src/storage/jobs';
4
- import { Middleware } from 'src/types/koa';
4
+ import type { Middleware } from 'src/types/koa';
5
5
 
6
6
  export const getJobsHandler: Middleware = async (ctx) => {
7
7
  const jobs = await storage.readJobs();
@@ -3,7 +3,7 @@ import { metricsClient } from 'src/framework/metrics';
3
3
  import { validateRequestBody } from 'src/framework/validation';
4
4
  import * as storage from 'src/storage/jobs';
5
5
  import { JobInputSchema } from 'src/types/jobs';
6
- import { Middleware } from 'src/types/koa';
6
+ import type { Middleware } from 'src/types/koa';
7
7
 
8
8
  export const postJobHandler: Middleware = async (ctx) => {
9
9
  const jobInput = validateRequestBody(ctx, JobInputSchema);
@@ -1,5 +1,5 @@
1
1
  import { smokeTestJobStorage } from 'src/storage/jobs';
2
- import { Middleware } from 'src/types/koa';
2
+ import type { Middleware } from 'src/types/koa';
3
3
 
4
4
  /**
5
5
  * Tests connectivity to ensure appropriate access and network configuration.
@@ -4,7 +4,7 @@ import { logger } from 'src/testing/logging';
4
4
  import { metricsClient } from 'src/testing/metrics';
5
5
  import { agentFromRouter } from 'src/testing/server';
6
6
  import { chance } from 'src/testing/types';
7
- import { Middleware } from 'src/types/koa';
7
+ import type { Middleware } from 'src/types/koa';
8
8
 
9
9
  const middleware = jest.fn<void, Parameters<Middleware>>();
10
10
 
@@ -1,7 +1,7 @@
1
1
  import { ErrorMiddleware } from 'seek-koala';
2
- import { z } from 'zod';
2
+ import type { z } from 'zod';
3
3
 
4
- import { Context } from 'src/types/koa';
4
+ import type { Context } from 'src/types/koa';
5
5
 
6
6
  /**
7
7
  * Converts a `ZodError` into an `invalidFields` object
@@ -1,6 +1,6 @@
1
1
  import { randomUUID } from 'crypto';
2
2
 
3
- import { Job, JobInput } from 'src/types/jobs';
3
+ import type { Job, JobInput } from 'src/types/jobs';
4
4
 
5
5
  const jobStore: Record<string, Job> = {};
6
6
 
@@ -1,5 +1,5 @@
1
- import Router from '@koa/router';
2
- import Koa from 'koa';
1
+ import type Router from '@koa/router';
2
+ import type Koa from 'koa';
3
3
  import request from 'supertest';
4
4
 
5
5
  import { createApp } from 'src/framework/server';
@@ -1,7 +1,7 @@
1
1
  import { Chance } from 'chance';
2
2
  import { z } from 'zod';
3
3
 
4
- import { JobInput } from 'src/types/jobs';
4
+ import type { JobInput } from 'src/types/jobs';
5
5
 
6
6
  export type IdDescription = z.infer<typeof IdDescriptionSchema>;
7
7
 
@@ -32,7 +32,7 @@ configs:
32
32
  - *aws-sm
33
33
  - *private-npm
34
34
  - *docker-ecr-cache
35
- - docker-compose#v4.11.0:
35
+ - docker-compose#v4.12.0:
36
36
  dependencies: false
37
37
  run: app
38
38
  retry:
@@ -60,7 +60,7 @@ steps:
60
60
  - *aws-sm
61
61
  - *private-npm
62
62
  - *docker-ecr-cache
63
- - docker-compose#v4.11.0:
63
+ - docker-compose#v4.12.0:
64
64
  run: app
65
65
  timeout_in_minutes: 10
66
66
 
@@ -18,8 +18,8 @@
18
18
  "@aws-sdk/client-lambda": "^3.259.0",
19
19
  "@aws-sdk/client-sns": "^3.259.0",
20
20
  "@aws-sdk/util-utf8-node": "^3.259.0",
21
- "@seek/logger": "^5.0.1",
22
- "datadog-lambda-js": "^6.83.0",
21
+ "@seek/logger": "^6.0.0",
22
+ "datadog-lambda-js": "^7.0.0",
23
23
  "dd-trace": "^3.8.0",
24
24
  "skuba-dive": "^2.0.0",
25
25
  "zod": "^3.19.1"
@@ -1,6 +1,6 @@
1
1
  import 'skuba-dive/register';
2
2
 
3
- import { SQSEvent } from 'aws-lambda';
3
+ import type { SQSEvent } from 'aws-lambda';
4
4
 
5
5
  import { createHandler } from 'src/framework/handler';
6
6
  import { logger } from 'src/framework/logging';
@@ -1,4 +1,4 @@
1
- import { z } from 'zod';
1
+ import type { z } from 'zod';
2
2
 
3
3
  export const validateJson = <
4
4
  Output,
@@ -1,5 +1,8 @@
1
- import { JobScorerInput, JobScorerOutput } from 'src/types/jobScorer';
2
- import { JobPublishedEvent, JobScoredEvent } from 'src/types/pipelineEvents';
1
+ import type { JobScorerInput, JobScorerOutput } from 'src/types/jobScorer';
2
+ import type {
3
+ JobPublishedEvent,
4
+ JobScoredEvent,
5
+ } from 'src/types/pipelineEvents';
3
6
 
4
7
  export const jobPublishedEventToScorerInput = (
5
8
  record: JobPublishedEvent,
@@ -2,12 +2,12 @@ import {
2
2
  jobPublishedEventToScorerInput,
3
3
  jobScorerOutputToScoredEvent,
4
4
  } from 'src/mapping/jobScorer';
5
- import {
6
- JobScorerInput,
7
- JobScorerOutput,
8
- JobScorerOutputSchema,
9
- } from 'src/types/jobScorer';
10
- import { JobPublishedEvent, JobScoredEvent } from 'src/types/pipelineEvents';
5
+ import type { JobScorerInput, JobScorerOutput } from 'src/types/jobScorer';
6
+ import { JobScorerOutputSchema } from 'src/types/jobScorer';
7
+ import type {
8
+ JobPublishedEvent,
9
+ JobScoredEvent,
10
+ } from 'src/types/pipelineEvents';
11
11
 
12
12
  /* istanbul ignore next: simulation of an external service */
13
13
  export const scoringService = {
@@ -1,4 +1,4 @@
1
- import { Context, SQSEvent } from 'aws-lambda';
1
+ import type { Context, SQSEvent } from 'aws-lambda';
2
2
 
3
3
  import { chance } from './types';
4
4
 
@@ -1,7 +1,7 @@
1
1
  import { Chance } from 'chance';
2
2
  import { z } from 'zod';
3
3
 
4
- import { JobPublishedEvent } from 'src/types/pipelineEvents';
4
+ import type { JobPublishedEvent } from 'src/types/pipelineEvents';
5
5
 
6
6
  export type IdDescription = z.infer<typeof IdDescriptionSchema>;
7
7
 
@@ -32,7 +32,7 @@ configs:
32
32
  - *aws-sm
33
33
  - *private-npm
34
34
  - *docker-ecr-cache
35
- - docker-compose#v4.11.0:
35
+ - docker-compose#v4.12.0:
36
36
  dependencies: false
37
37
  run: app
38
38
  retry:
@@ -57,7 +57,7 @@ steps:
57
57
  - *aws-sm
58
58
  - *private-npm
59
59
  - *docker-ecr-cache
60
- - docker-compose#v4.11.0:
60
+ - docker-compose#v4.12.0:
61
61
  run: app
62
62
  timeout_in_minutes: 10
63
63
 
@@ -1,6 +1,6 @@
1
+ import type { StackProps } from 'aws-cdk-lib';
1
2
  import {
2
3
  Stack,
3
- StackProps,
4
4
  aws_iam,
5
5
  aws_kms,
6
6
  aws_lambda,
@@ -12,7 +12,7 @@
12
12
  "test:watch": "skuba test --watch"
13
13
  },
14
14
  "dependencies": {
15
- "@seek/logger": "^5.0.1",
15
+ "@seek/logger": "^6.0.0",
16
16
  "zod": "^3.19.1"
17
17
  },
18
18
  "devDependencies": {
@@ -1,5 +1,5 @@
1
1
  import createLogger from '@seek/logger';
2
- import { SQSEvent, SQSHandler } from 'aws-lambda';
2
+ import type { SQSEvent, SQSHandler } from 'aws-lambda';
3
3
 
4
4
  const logger = createLogger({
5
5
  name: '<%- serviceName %>',