skuba 9.1.0 → 10.0.0-node-22-20250110051224

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (49) hide show
  1. package/lib/cli/adapter/prettier.js +4 -1
  2. package/lib/cli/adapter/prettier.js.map +2 -2
  3. package/lib/cli/configure/analysis/git.js +14 -1
  4. package/lib/cli/configure/analysis/git.js.map +2 -2
  5. package/lib/cli/lint/annotate/buildkite/prettier.js +4 -1
  6. package/lib/cli/lint/annotate/buildkite/prettier.js.map +2 -2
  7. package/lib/cli/lint/annotate/github/prettier.js +1 -1
  8. package/lib/cli/lint/annotate/github/prettier.js.map +2 -2
  9. package/lib/cli/lint/internalLints/upgrade/patches/10.0.0/index.d.ts +2 -0
  10. package/lib/cli/lint/internalLints/upgrade/patches/10.0.0/index.js +35 -0
  11. package/lib/cli/lint/internalLints/upgrade/patches/10.0.0/index.js.map +7 -0
  12. package/lib/cli/lint/internalLints/upgrade/patches/10.0.0/upgradeNode.d.ts +2 -0
  13. package/lib/cli/lint/internalLints/upgrade/patches/10.0.0/upgradeNode.js +51 -0
  14. package/lib/cli/lint/internalLints/upgrade/patches/10.0.0/upgradeNode.js.map +7 -0
  15. package/lib/cli/migrate/index.js +2 -1
  16. package/lib/cli/migrate/index.js.map +2 -2
  17. package/lib/cli/migrate/nodeVersion/getNode22TypesVersion.d.ts +1 -0
  18. package/lib/cli/migrate/nodeVersion/getNode22TypesVersion.js +32 -0
  19. package/lib/cli/migrate/nodeVersion/getNode22TypesVersion.js.map +7 -0
  20. package/lib/cli/migrate/nodeVersion/index.d.ts +10 -1
  21. package/lib/cli/migrate/nodeVersion/index.js +154 -23
  22. package/lib/cli/migrate/nodeVersion/index.js.map +2 -2
  23. package/lib/cli/migrate/nodeVersion/packageJsonChecks.d.ts +2 -0
  24. package/lib/cli/migrate/nodeVersion/packageJsonChecks.js +89 -0
  25. package/lib/cli/migrate/nodeVersion/packageJsonChecks.js.map +7 -0
  26. package/lib/wrapper/http.js +12 -13
  27. package/lib/wrapper/http.js.map +3 -3
  28. package/package.json +11 -11
  29. package/template/express-rest-api/.buildkite/pipeline.yml +4 -4
  30. package/template/express-rest-api/Dockerfile.dev-deps +1 -1
  31. package/template/express-rest-api/gantry.build.yml +0 -3
  32. package/template/express-rest-api/package.json +3 -3
  33. package/template/greeter/.buildkite/pipeline.yml +3 -3
  34. package/template/greeter/Dockerfile +1 -1
  35. package/template/greeter/package.json +2 -2
  36. package/template/koa-rest-api/.buildkite/pipeline.yml +4 -4
  37. package/template/koa-rest-api/Dockerfile.dev-deps +1 -1
  38. package/template/koa-rest-api/gantry.build.yml +0 -3
  39. package/template/koa-rest-api/package.json +6 -6
  40. package/template/lambda-sqs-worker/.buildkite/pipeline.yml +5 -5
  41. package/template/lambda-sqs-worker/Dockerfile +1 -1
  42. package/template/lambda-sqs-worker/package.json +2 -2
  43. package/template/lambda-sqs-worker-cdk/.buildkite/pipeline.yml +5 -5
  44. package/template/lambda-sqs-worker-cdk/Dockerfile +1 -1
  45. package/template/lambda-sqs-worker-cdk/README.md +4 -3
  46. package/template/lambda-sqs-worker-cdk/infra/__snapshots__/appStack.test.ts.snap +14 -2
  47. package/template/lambda-sqs-worker-cdk/infra/appStack.test.ts +5 -3
  48. package/template/lambda-sqs-worker-cdk/infra/appStack.ts +2 -0
  49. package/template/lambda-sqs-worker-cdk/package.json +6 -5
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "version": 3,
3
3
  "sources": ["../../../../src/cli/migrate/nodeVersion/index.ts"],
4
- "sourcesContent": ["import { inspect } from 'util';\n\nimport { glob } from 'fast-glob';\nimport fs from 'fs-extra';\n\nimport { log } from '../../../utils/logging';\nimport { createDestinationFileReader } from '../../configure/analysis/project';\n\ntype SubPatch = (\n | { files: string; file?: never }\n | { file: string; files?: never }\n) & {\n test?: RegExp;\n replace: string;\n};\n\nconst subPatches: SubPatch[] = [\n { file: '.nvmrc', replace: '<%- version %>\\n' },\n {\n files: 'Dockerfile*',\n test: /^FROM(.*) node:[0-9.]+(\\.[^- \\n]+)?(-[^ \\n]+)?( .+|)$/gm,\n replace: 'FROM$1 node:<%- version %>$3$4',\n },\n {\n files: 'Dockerfile*',\n test: /^FROM(.*) gcr.io\\/distroless\\/nodejs\\d+-debian(.+)$/gm,\n replace: 'FROM$1 gcr.io/distroless/nodejs<%- version %>-debian$2',\n },\n {\n files: 'serverless*.y*ml',\n test: /nodejs\\d+.x/gm,\n replace: 'nodejs<%- version %>.x',\n },\n {\n files: 'infra/**/*.ts',\n test: /NODEJS_\\d+_X/g,\n replace: 'NODEJS_<%- version %>_X',\n },\n {\n files: '.buildkite/*',\n test: /image: node:[0-9.]+(\\.[^- \\n]+)?(-[^ \\n]+)?$/gm,\n replace: 'image: node:<%- version %>$2',\n },\n];\n\nconst runSubPatch = async (version: number, dir: string, patch: SubPatch) => {\n const readFile = createDestinationFileReader(dir);\n const paths = patch.file\n ? [patch.file]\n : await glob(patch.files ?? [], { cwd: dir });\n\n await Promise.all(\n paths.map(async (path) => {\n const contents = await readFile(path);\n if (!contents) {\n return;\n }\n\n if (patch.test && !patch.test.test(contents)) {\n return;\n }\n\n const templated = patch.replace.replaceAll(\n '<%- version %>',\n version.toString(),\n );\n\n await fs.promises.writeFile(\n path,\n patch.test ? contents.replaceAll(patch.test, templated) : templated,\n );\n }),\n );\n};\n\nconst upgrade = async (version: number, dir: string) => {\n await Promise.all(\n subPatches.map((subPatch) => runSubPatch(version, dir, subPatch)),\n );\n};\n\nexport const nodeVersionMigration = async (\n version: number,\n dir = process.cwd(),\n) => {\n log.ok(`Upgrading to Node.js ${version}`);\n try {\n await upgrade(version, dir);\n log.ok('Upgraded to Node.js', version);\n } catch (err) {\n log.err('Failed to upgrade');\n log.subtle(inspect(err));\n process.exitCode = 1;\n }\n};\n"],
5
- "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAwB;AAExB,uBAAqB;AACrB,sBAAe;AAEf,qBAAoB;AACpB,qBAA4C;AAU5C,MAAM,aAAyB;AAAA,EAC7B,EAAE,MAAM,UAAU,SAAS,mBAAmB;AAAA,EAC9C;AAAA,IACE,OAAO;AAAA,IACP,MAAM;AAAA,IACN,SAAS;AAAA,EACX;AAAA,EACA;AAAA,IACE,OAAO;AAAA,IACP,MAAM;AAAA,IACN,SAAS;AAAA,EACX;AAAA,EACA;AAAA,IACE,OAAO;AAAA,IACP,MAAM;AAAA,IACN,SAAS;AAAA,EACX;AAAA,EACA;AAAA,IACE,OAAO;AAAA,IACP,MAAM;AAAA,IACN,SAAS;AAAA,EACX;AAAA,EACA;AAAA,IACE,OAAO;AAAA,IACP,MAAM;AAAA,IACN,SAAS;AAAA,EACX;AACF;AAEA,MAAM,cAAc,OAAO,SAAiB,KAAa,UAAoB;AAC3E,QAAM,eAAW,4CAA4B,GAAG;AAChD,QAAM,QAAQ,MAAM,OAChB,CAAC,MAAM,IAAI,IACX,UAAM,uBAAK,MAAM,SAAS,CAAC,GAAG,EAAE,KAAK,IAAI,CAAC;AAE9C,QAAM,QAAQ;AAAA,IACZ,MAAM,IAAI,OAAO,SAAS;AACxB,YAAM,WAAW,MAAM,SAAS,IAAI;AACpC,UAAI,CAAC,UAAU;AACb;AAAA,MACF;AAEA,UAAI,MAAM,QAAQ,CAAC,MAAM,KAAK,KAAK,QAAQ,GAAG;AAC5C;AAAA,MACF;AAEA,YAAM,YAAY,MAAM,QAAQ;AAAA,QAC9B;AAAA,QACA,QAAQ,SAAS;AAAA,MACnB;AAEA,YAAM,gBAAAA,QAAG,SAAS;AAAA,QAChB;AAAA,QACA,MAAM,OAAO,SAAS,WAAW,MAAM,MAAM,SAAS,IAAI;AAAA,MAC5D;AAAA,IACF,CAAC;AAAA,EACH;AACF;AAEA,MAAM,UAAU,OAAO,SAAiB,QAAgB;AACtD,QAAM,QAAQ;AAAA,IACZ,WAAW,IAAI,CAAC,aAAa,YAAY,SAAS,KAAK,QAAQ,CAAC;AAAA,EAClE;AACF;AAEO,MAAM,uBAAuB,OAClC,SACA,MAAM,QAAQ,IAAI,MACf;AACH,qBAAI,GAAG,wBAAwB,OAAO,EAAE;AACxC,MAAI;AACF,UAAM,QAAQ,SAAS,GAAG;AAC1B,uBAAI,GAAG,uBAAuB,OAAO;AAAA,EACvC,SAAS,KAAK;AACZ,uBAAI,IAAI,mBAAmB;AAC3B,uBAAI,WAAO,qBAAQ,GAAG,CAAC;AACvB,YAAQ,WAAW;AAAA,EACrB;AACF;",
4
+ "sourcesContent": ["import { inspect } from 'util';\n\nimport { glob } from 'fast-glob';\nimport fs from 'fs-extra';\n\nimport { log } from '../../../utils/logging';\nimport { createDestinationFileReader } from '../../configure/analysis/project';\n\nimport { getNode22TypesVersion } from './getNode22TypesVersion';\nimport { checkServerlessVersion, checkSkubaType } from './packageJsonChecks';\n\nconst DEFAULT_NODE_TYPES = '22.9.0';\n\ntype SubPatch =\n | (({ files: string; file?: never } | { file: string; files?: never }) & {\n test?: RegExp;\n replace: string;\n id: string;\n })\n | Array<\n ({ files: string; file?: never } | { file: string; files?: never }) & {\n test?: RegExp;\n replace: string;\n id: string;\n }\n >;\n\ntype VersionResult = {\n version: string;\n err: string | undefined;\n};\n\nexport const getNode22TypeVersion = (\n major: number,\n defaultVersion: string,\n): VersionResult => {\n try {\n const version = getNode22TypesVersion(major);\n if (!version || !/^22.\\d+\\.\\d+$/.test(version)) {\n throw new Error('No version found');\n }\n return {\n version,\n err: undefined,\n };\n } catch {\n return {\n version: defaultVersion,\n err: 'Failed to fetch latest version, using fallback version',\n };\n }\n};\n\nconst SHA_REGEX = /(?<=node.*)(@sha256:[a-f0-9]{64})/gm;\n\nconst subPatches: SubPatch[] = [\n { id: 'nvmrc', file: '.nvmrc', replace: '<%- version %>\\n' },\n {\n id: 'Dockerfile-1',\n files: 'Dockerfile*',\n test: /^FROM(.*) (public.ecr.aws\\/docker\\/library\\/)?node:[0-9.]+(@sha256:[a-f0-9]{64})?(\\.[^- \\n]+)?(-[^ \\n]+)?( .+|)$/gm,\n replace: 'FROM$1 $2node:<%- version %>$3$5$6',\n },\n {\n id: 'Dockerfile-2',\n files: 'Dockerfile*',\n test: /^FROM(.*) gcr.io\\/distroless\\/nodejs\\d+-debian(.+)$/gm,\n replace: 'FROM$1 gcr.io/distroless/nodejs<%- version %>-debian$2',\n },\n {\n id: 'serverless',\n files: 'serverless*.y*ml',\n test: /nodejs\\d+.x/gm,\n replace: 'nodejs<%- version %>.x',\n },\n [\n {\n id: 'cdk-1',\n files: 'infra/**/*.ts',\n test: /NODEJS_\\d+_X/g,\n replace: 'NODEJS_<%- version %>_X',\n },\n {\n id: 'cdk-2',\n files: 'infra/**/*.ts',\n test: /(target:\\s*'node)(\\d+)(.+)$/gm,\n replace: '$1<%- version %>$3',\n },\n ],\n {\n id: 'buildkite',\n files: '**/.buildkite/*',\n test: /(image: )(public.ecr.aws\\/docker\\/library\\/)?(node:)[0-9.]+(\\.[^- \\n]+)?(-[^ \\n]+)?$/gm,\n replace: '$1$2$3<%- version %>$5',\n },\n {\n id: 'node-version',\n files: '.node-version*',\n test: /(v)?\\d+\\.\\d+\\.\\d+(.+)?/gm,\n replace: '$1<%- version %>$2',\n },\n {\n id: 'package-json-1',\n files: '**/package.json',\n test: /(\"@types\\/node\": \")(\\^)?[0-9.]+\"/gm,\n replace: '$1$2<%- version %>\"',\n },\n {\n id: 'package-json-2',\n files: '**/package.json',\n test: /(\"engines\":\\s*{[^}]*\"node\":\\s*\">=)(\\d+)(\"[^}]*})(?![^}]*\"skuba\":\\s*{[^}]*\"type\":\\s*\"package\")/gm,\n replace: '$1<%- version %>$3',\n },\n {\n id: 'tsconfig',\n files: '**/tsconfig.json',\n test: /(\"target\":\\s*\")(ES?:[0-9]+|Next|[A-Za-z]+[0-9]*)\"/gim,\n replace: '$1<%- version %>\"',\n },\n {\n id: 'docker-compose',\n files: '**/docker-compose*.y*ml',\n test: /(image: )(public.ecr.aws\\/docker\\/library\\/)?(node:)[0-9.]+(\\.[^- \\n]+)?(-[^ \\n]+)?$/gm,\n replace: '$1$2$3<%- version %>$5',\n },\n];\n\nconst removeNodeShas = (content: string): string =>\n content.replace(SHA_REGEX, '');\n\ntype Versions = {\n nodeVersion: number;\n nodeTypesVersion: string;\n ECMAScriptVersion: string;\n};\n\nconst runSubPatch = async (\n { nodeVersion, nodeTypesVersion, ECMAScriptVersion }: Versions,\n dir: string,\n patch: SubPatch,\n) => {\n if (Array.isArray(patch)) {\n for (const subPatch of patch) {\n await runSubPatch(\n { nodeVersion, nodeTypesVersion, ECMAScriptVersion },\n dir,\n subPatch,\n );\n }\n return;\n }\n const readFile = createDestinationFileReader(dir);\n const paths = patch.file\n ? [patch.file]\n : await glob(patch.files ?? [], { cwd: dir });\n\n await Promise.all(\n paths.map(async (path) => {\n const contents = await readFile(path);\n if (!contents) {\n return;\n }\n\n if (patch.test && !patch.test.test(contents)) {\n return;\n }\n\n const unPinnedContents = removeNodeShas(contents);\n\n if (patch.id === 'package-json-1') {\n return await writePatchedContents({\n path,\n contents: unPinnedContents,\n templated: patch.replace.replaceAll(\n '<%- version %>',\n nodeTypesVersion,\n ),\n test: patch.test,\n });\n }\n if (patch.id === 'tsconfig') {\n await checkSkubaType();\n return await writePatchedContents({\n path,\n contents: unPinnedContents,\n templated: patch.replace.replaceAll(\n '<%- version %>',\n ECMAScriptVersion,\n ),\n test: patch.test,\n });\n }\n\n if (patch.id === 'package-json-2') {\n await checkSkubaType();\n }\n\n await writePatchedContents({\n path,\n contents: unPinnedContents,\n templated: patch.replace.replaceAll(\n '<%- version %>',\n nodeVersion.toString(),\n ),\n test: patch.test,\n });\n }),\n );\n};\n\nconst writePatchedContents = async ({\n path,\n contents,\n templated,\n test,\n}: {\n path: string;\n contents: string;\n templated: string;\n test?: RegExp;\n}) =>\n await fs.promises.writeFile(\n path,\n test ? contents.replaceAll(test, templated) : templated,\n );\n\nconst upgrade = async (\n { nodeVersion, nodeTypesVersion, ECMAScriptVersion }: Versions,\n dir: string,\n) => {\n await Promise.all(\n subPatches.map((subPatch) =>\n runSubPatch(\n { nodeVersion, nodeTypesVersion, ECMAScriptVersion },\n dir,\n subPatch,\n ),\n ),\n );\n};\n\nexport const nodeVersionMigration = async (\n {\n nodeVersion,\n ECMAScriptVersion,\n }: { nodeVersion: number; ECMAScriptVersion: string },\n dir = process.cwd(),\n) => {\n log.ok(`Upgrading to Node.js ${nodeVersion}`);\n try {\n await checkServerlessVersion();\n const { version: nodeTypesVersion, err } = getNode22TypeVersion(\n nodeVersion,\n DEFAULT_NODE_TYPES,\n );\n if (err) {\n log.warn(err);\n }\n await upgrade({ nodeVersion, nodeTypesVersion, ECMAScriptVersion }, dir);\n log.ok('Upgraded to Node.js', nodeVersion);\n } catch (err) {\n log.err('Failed to upgrade');\n log.subtle(inspect(err));\n process.exitCode = 1;\n }\n};\n"],
5
+ "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAwB;AAExB,uBAAqB;AACrB,sBAAe;AAEf,qBAAoB;AACpB,qBAA4C;AAE5C,mCAAsC;AACtC,+BAAuD;AAEvD,MAAM,qBAAqB;AAqBpB,MAAM,uBAAuB,CAClC,OACA,mBACkB;AAClB,MAAI;AACF,UAAM,cAAU,oDAAsB,KAAK;AAC3C,QAAI,CAAC,WAAW,CAAC,gBAAgB,KAAK,OAAO,GAAG;AAC9C,YAAM,IAAI,MAAM,kBAAkB;AAAA,IACpC;AACA,WAAO;AAAA,MACL;AAAA,MACA,KAAK;AAAA,IACP;AAAA,EACF,QAAQ;AACN,WAAO;AAAA,MACL,SAAS;AAAA,MACT,KAAK;AAAA,IACP;AAAA,EACF;AACF;AAEA,MAAM,YAAY;AAElB,MAAM,aAAyB;AAAA,EAC7B,EAAE,IAAI,SAAS,MAAM,UAAU,SAAS,mBAAmB;AAAA,EAC3D;AAAA,IACE,IAAI;AAAA,IACJ,OAAO;AAAA,IACP,MAAM;AAAA,IACN,SAAS;AAAA,EACX;AAAA,EACA;AAAA,IACE,IAAI;AAAA,IACJ,OAAO;AAAA,IACP,MAAM;AAAA,IACN,SAAS;AAAA,EACX;AAAA,EACA;AAAA,IACE,IAAI;AAAA,IACJ,OAAO;AAAA,IACP,MAAM;AAAA,IACN,SAAS;AAAA,EACX;AAAA,EACA;AAAA,IACE;AAAA,MACE,IAAI;AAAA,MACJ,OAAO;AAAA,MACP,MAAM;AAAA,MACN,SAAS;AAAA,IACX;AAAA,IACA;AAAA,MACE,IAAI;AAAA,MACJ,OAAO;AAAA,MACP,MAAM;AAAA,MACN,SAAS;AAAA,IACX;AAAA,EACF;AAAA,EACA;AAAA,IACE,IAAI;AAAA,IACJ,OAAO;AAAA,IACP,MAAM;AAAA,IACN,SAAS;AAAA,EACX;AAAA,EACA;AAAA,IACE,IAAI;AAAA,IACJ,OAAO;AAAA,IACP,MAAM;AAAA,IACN,SAAS;AAAA,EACX;AAAA,EACA;AAAA,IACE,IAAI;AAAA,IACJ,OAAO;AAAA,IACP,MAAM;AAAA,IACN,SAAS;AAAA,EACX;AAAA,EACA;AAAA,IACE,IAAI;AAAA,IACJ,OAAO;AAAA,IACP,MAAM;AAAA,IACN,SAAS;AAAA,EACX;AAAA,EACA;AAAA,IACE,IAAI;AAAA,IACJ,OAAO;AAAA,IACP,MAAM;AAAA,IACN,SAAS;AAAA,EACX;AAAA,EACA;AAAA,IACE,IAAI;AAAA,IACJ,OAAO;AAAA,IACP,MAAM;AAAA,IACN,SAAS;AAAA,EACX;AACF;AAEA,MAAM,iBAAiB,CAAC,YACtB,QAAQ,QAAQ,WAAW,EAAE;AAQ/B,MAAM,cAAc,OAClB,EAAE,aAAa,kBAAkB,kBAAkB,GACnD,KACA,UACG;AACH,MAAI,MAAM,QAAQ,KAAK,GAAG;AACxB,eAAW,YAAY,OAAO;AAC5B,YAAM;AAAA,QACJ,EAAE,aAAa,kBAAkB,kBAAkB;AAAA,QACnD;AAAA,QACA;AAAA,MACF;AAAA,IACF;AACA;AAAA,EACF;AACA,QAAM,eAAW,4CAA4B,GAAG;AAChD,QAAM,QAAQ,MAAM,OAChB,CAAC,MAAM,IAAI,IACX,UAAM,uBAAK,MAAM,SAAS,CAAC,GAAG,EAAE,KAAK,IAAI,CAAC;AAE9C,QAAM,QAAQ;AAAA,IACZ,MAAM,IAAI,OAAO,SAAS;AACxB,YAAM,WAAW,MAAM,SAAS,IAAI;AACpC,UAAI,CAAC,UAAU;AACb;AAAA,MACF;AAEA,UAAI,MAAM,QAAQ,CAAC,MAAM,KAAK,KAAK,QAAQ,GAAG;AAC5C;AAAA,MACF;AAEA,YAAM,mBAAmB,eAAe,QAAQ;AAEhD,UAAI,MAAM,OAAO,kBAAkB;AACjC,eAAO,MAAM,qBAAqB;AAAA,UAChC;AAAA,UACA,UAAU;AAAA,UACV,WAAW,MAAM,QAAQ;AAAA,YACvB;AAAA,YACA;AAAA,UACF;AAAA,UACA,MAAM,MAAM;AAAA,QACd,CAAC;AAAA,MACH;AACA,UAAI,MAAM,OAAO,YAAY;AAC3B,kBAAM,yCAAe;AACrB,eAAO,MAAM,qBAAqB;AAAA,UAChC;AAAA,UACA,UAAU;AAAA,UACV,WAAW,MAAM,QAAQ;AAAA,YACvB;AAAA,YACA;AAAA,UACF;AAAA,UACA,MAAM,MAAM;AAAA,QACd,CAAC;AAAA,MACH;AAEA,UAAI,MAAM,OAAO,kBAAkB;AACjC,kBAAM,yCAAe;AAAA,MACvB;AAEA,YAAM,qBAAqB;AAAA,QACzB;AAAA,QACA,UAAU;AAAA,QACV,WAAW,MAAM,QAAQ;AAAA,UACvB;AAAA,UACA,YAAY,SAAS;AAAA,QACvB;AAAA,QACA,MAAM,MAAM;AAAA,MACd,CAAC;AAAA,IACH,CAAC;AAAA,EACH;AACF;AAEA,MAAM,uBAAuB,OAAO;AAAA,EAClC;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF,MAME,MAAM,gBAAAA,QAAG,SAAS;AAAA,EAChB;AAAA,EACA,OAAO,SAAS,WAAW,MAAM,SAAS,IAAI;AAChD;AAEF,MAAM,UAAU,OACd,EAAE,aAAa,kBAAkB,kBAAkB,GACnD,QACG;AACH,QAAM,QAAQ;AAAA,IACZ,WAAW;AAAA,MAAI,CAAC,aACd;AAAA,QACE,EAAE,aAAa,kBAAkB,kBAAkB;AAAA,QACnD;AAAA,QACA;AAAA,MACF;AAAA,IACF;AAAA,EACF;AACF;AAEO,MAAM,uBAAuB,OAClC;AAAA,EACE;AAAA,EACA;AACF,GACA,MAAM,QAAQ,IAAI,MACf;AACH,qBAAI,GAAG,wBAAwB,WAAW,EAAE;AAC5C,MAAI;AACF,cAAM,iDAAuB;AAC7B,UAAM,EAAE,SAAS,kBAAkB,IAAI,IAAI;AAAA,MACzC;AAAA,MACA;AAAA,IACF;AACA,QAAI,KAAK;AACP,yBAAI,KAAK,GAAG;AAAA,IACd;AACA,UAAM,QAAQ,EAAE,aAAa,kBAAkB,kBAAkB,GAAG,GAAG;AACvE,uBAAI,GAAG,uBAAuB,WAAW;AAAA,EAC3C,SAAS,KAAK;AACZ,uBAAI,IAAI,mBAAmB;AAC3B,uBAAI,WAAO,qBAAQ,GAAG,CAAC;AACvB,YAAQ,WAAW;AAAA,EACrB;AACF;",
6
6
  "names": ["fs"]
7
7
  }
@@ -0,0 +1,2 @@
1
+ export declare const checkServerlessVersion: () => Promise<void>;
2
+ export declare const checkSkubaType: () => Promise<void>;
@@ -0,0 +1,89 @@
1
+ "use strict";
2
+ var __create = Object.create;
3
+ var __defProp = Object.defineProperty;
4
+ var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
5
+ var __getOwnPropNames = Object.getOwnPropertyNames;
6
+ var __getProtoOf = Object.getPrototypeOf;
7
+ var __hasOwnProp = Object.prototype.hasOwnProperty;
8
+ var __export = (target, all) => {
9
+ for (var name in all)
10
+ __defProp(target, name, { get: all[name], enumerable: true });
11
+ };
12
+ var __copyProps = (to, from, except, desc) => {
13
+ if (from && typeof from === "object" || typeof from === "function") {
14
+ for (let key of __getOwnPropNames(from))
15
+ if (!__hasOwnProp.call(to, key) && key !== except)
16
+ __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
17
+ }
18
+ return to;
19
+ };
20
+ var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
21
+ // If the importer is in node compatibility mode or this is not an ESM
22
+ // file that has been converted to a CommonJS file using a Babel-
23
+ // compatible transform (i.e. "__esModule" has not been set), then set
24
+ // "default" to the CommonJS "module.exports" for node compatibility.
25
+ isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
26
+ mod
27
+ ));
28
+ var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
29
+ var packageJsonChecks_exports = {};
30
+ __export(packageJsonChecks_exports, {
31
+ checkServerlessVersion: () => checkServerlessVersion,
32
+ checkSkubaType: () => checkSkubaType
33
+ });
34
+ module.exports = __toCommonJS(packageJsonChecks_exports);
35
+ var import_find_up = __toESM(require("find-up"));
36
+ var import_fs_extra = __toESM(require("fs-extra"));
37
+ const getParentPackageJson = async () => {
38
+ const packageJsonPath = await (0, import_find_up.default)("package.json", { cwd: process.cwd() });
39
+ if (!packageJsonPath) {
40
+ throw new Error("package.json not found");
41
+ }
42
+ return import_fs_extra.default.readFile(packageJsonPath);
43
+ };
44
+ const isTypeError = (error) => error instanceof TypeError && error.message.includes("Cannot read properties of undefined");
45
+ const isSyntaxError = (error) => error instanceof SyntaxError && error.message.includes("Unexpected token");
46
+ const checkServerlessVersion = async () => {
47
+ const packageJson = await getParentPackageJson();
48
+ try {
49
+ const serverlessVersion = JSON.parse(packageJson.toString()).devDependencies.serverless;
50
+ if (!serverlessVersion) {
51
+ return;
52
+ }
53
+ if (!serverlessVersion.startsWith("4")) {
54
+ throw new Error(
55
+ "Serverless version not supported, please upgrade to 4.x"
56
+ );
57
+ }
58
+ } catch (error) {
59
+ if (isTypeError(error) || isSyntaxError(error)) {
60
+ return;
61
+ }
62
+ throw error;
63
+ }
64
+ };
65
+ const checkSkubaType = async () => {
66
+ const packageJson = await getParentPackageJson();
67
+ try {
68
+ const type = JSON.parse(packageJson.toString()).skuba.type;
69
+ if (!type) {
70
+ return;
71
+ }
72
+ if (type === "package") {
73
+ throw new Error(
74
+ "Skuba type package is not supported, packages should be updated manually to ensure major runtime depreciations are intended"
75
+ );
76
+ }
77
+ } catch (error) {
78
+ if (isTypeError(error) || isSyntaxError(error)) {
79
+ return;
80
+ }
81
+ throw error;
82
+ }
83
+ };
84
+ // Annotate the CommonJS export names for ESM import in node:
85
+ 0 && (module.exports = {
86
+ checkServerlessVersion,
87
+ checkSkubaType
88
+ });
89
+ //# sourceMappingURL=packageJsonChecks.js.map
@@ -0,0 +1,7 @@
1
+ {
2
+ "version": 3,
3
+ "sources": ["../../../../src/cli/migrate/nodeVersion/packageJsonChecks.ts"],
4
+ "sourcesContent": ["import findUp from 'find-up';\nimport fs from 'fs-extra';\n\nconst getParentPackageJson = async () => {\n const packageJsonPath = await findUp('package.json', { cwd: process.cwd() });\n if (!packageJsonPath) {\n throw new Error('package.json not found');\n }\n return fs.readFile(packageJsonPath);\n};\n\nconst isTypeError = (error: unknown): error is TypeError =>\n error instanceof TypeError &&\n error.message.includes('Cannot read properties of undefined');\n\nconst isSyntaxError = (error: unknown): error is SyntaxError =>\n error instanceof SyntaxError && error.message.includes('Unexpected token');\n\nexport const checkServerlessVersion = async () => {\n const packageJson = await getParentPackageJson();\n\n try {\n const serverlessVersion = (\n JSON.parse(packageJson.toString()) as {\n devDependencies: Record<string, string>;\n }\n ).devDependencies.serverless;\n if (!serverlessVersion) {\n return;\n }\n\n if (!serverlessVersion.startsWith('4')) {\n throw new Error(\n 'Serverless version not supported, please upgrade to 4.x',\n );\n }\n } catch (error) {\n if (isTypeError(error) || isSyntaxError(error)) {\n return;\n }\n throw error;\n }\n};\n\nexport const checkSkubaType = async () => {\n const packageJson = await getParentPackageJson();\n\n try {\n const type = (\n JSON.parse(packageJson.toString()) as {\n skuba: Record<string, string>;\n }\n ).skuba.type;\n if (!type) {\n return;\n }\n\n if (type === 'package') {\n throw new Error(\n 'Skuba type package is not supported, packages should be updated manually to ensure major runtime depreciations are intended',\n );\n }\n } catch (error) {\n if (isTypeError(error) || isSyntaxError(error)) {\n return;\n }\n throw error;\n }\n};\n"],
5
+ "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,qBAAmB;AACnB,sBAAe;AAEf,MAAM,uBAAuB,YAAY;AACvC,QAAM,kBAAkB,UAAM,eAAAA,SAAO,gBAAgB,EAAE,KAAK,QAAQ,IAAI,EAAE,CAAC;AAC3E,MAAI,CAAC,iBAAiB;AACpB,UAAM,IAAI,MAAM,wBAAwB;AAAA,EAC1C;AACA,SAAO,gBAAAC,QAAG,SAAS,eAAe;AACpC;AAEA,MAAM,cAAc,CAAC,UACnB,iBAAiB,aACjB,MAAM,QAAQ,SAAS,qCAAqC;AAE9D,MAAM,gBAAgB,CAAC,UACrB,iBAAiB,eAAe,MAAM,QAAQ,SAAS,kBAAkB;AAEpE,MAAM,yBAAyB,YAAY;AAChD,QAAM,cAAc,MAAM,qBAAqB;AAE/C,MAAI;AACF,UAAM,oBACJ,KAAK,MAAM,YAAY,SAAS,CAAC,EAGjC,gBAAgB;AAClB,QAAI,CAAC,mBAAmB;AACtB;AAAA,IACF;AAEA,QAAI,CAAC,kBAAkB,WAAW,GAAG,GAAG;AACtC,YAAM,IAAI;AAAA,QACR;AAAA,MACF;AAAA,IACF;AAAA,EACF,SAAS,OAAO;AACd,QAAI,YAAY,KAAK,KAAK,cAAc,KAAK,GAAG;AAC9C;AAAA,IACF;AACA,UAAM;AAAA,EACR;AACF;AAEO,MAAM,iBAAiB,YAAY;AACxC,QAAM,cAAc,MAAM,qBAAqB;AAE/C,MAAI;AACF,UAAM,OACJ,KAAK,MAAM,YAAY,SAAS,CAAC,EAGjC,MAAM;AACR,QAAI,CAAC,MAAM;AACT;AAAA,IACF;AAEA,QAAI,SAAS,WAAW;AACtB,YAAM,IAAI;AAAA,QACR;AAAA,MACF;AAAA,IACF;AAAA,EACF,SAAS,OAAO;AACd,QAAI,YAAY,KAAK,KAAK,cAAc,KAAK,GAAG;AAC9C;AAAA,IACF;AACA,UAAM;AAAA,EACR;AACF;",
6
+ "names": ["findUp", "fs"]
7
+ }
@@ -34,19 +34,16 @@ __export(http_exports, {
34
34
  });
35
35
  module.exports = __toCommonJS(http_exports);
36
36
  var import_http = __toESM(require("http"));
37
- var import_serialize_error = require("serialize-error");
37
+ var import_util = __toESM(require("util"));
38
38
  var import_logging = require("../utils/logging");
39
39
  const createRequestListenerFromFunction = (fn) => async (req, res) => {
40
- const writeJsonResponse = (statusCode, jsonResponse) => {
41
- res.writeHead(statusCode, { "Content-Type": "application/json" });
42
- return new Promise(
43
- (resolve, reject) => jsonResponse === void 0 ? res.end(resolve) : res.write(
44
- JSON.stringify(jsonResponse, null, 2),
45
- "utf8",
46
- (err) => err ? reject(err) : res.end(resolve)
47
- )
48
- );
49
- };
40
+ const writeResponse = (response) => new Promise(
41
+ (resolve, reject) => response === void 0 ? res.end(resolve) : res.write(
42
+ response,
43
+ "utf8",
44
+ (err) => err ? reject(err) : res.end(resolve)
45
+ )
46
+ );
50
47
  try {
51
48
  const requestBody = await new Promise((resolve, reject) => {
52
49
  const data = [];
@@ -55,9 +52,11 @@ const createRequestListenerFromFunction = (fn) => async (req, res) => {
55
52
  const jsonRequest = requestBody ? JSON.parse(requestBody) : [];
56
53
  const args = Array.isArray(jsonRequest) ? jsonRequest : [jsonRequest];
57
54
  const response = await fn(...args);
58
- await writeJsonResponse(200, response);
55
+ res.writeHead(200, { "Content-Type": "application/json" });
56
+ await writeResponse(JSON.stringify(response, null, 2));
59
57
  } catch (err) {
60
- await writeJsonResponse(500, (0, import_serialize_error.serializeError)(err));
58
+ res.writeHead(500);
59
+ await writeResponse(import_util.default.inspect(err));
61
60
  }
62
61
  };
63
62
  const serveRequestListener = (requestListener, port) => {
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "version": 3,
3
3
  "sources": ["../../src/wrapper/http.ts"],
4
- "sourcesContent": ["import http from 'http';\nimport type { AddressInfo } from 'net';\n\nimport { serializeError } from 'serialize-error';\n\nimport { log } from '../utils/logging';\n\n/**\n * Create an HTTP request listener based on the supplied function.\n *\n * - The request body is JSON parsed and passed into the function as parameters.\n * - The function's return value is JSON stringified into the response body.\n */\nexport const createRequestListenerFromFunction =\n (fn: (...args: unknown[]) => Promise<unknown>): http.RequestListener =>\n async (req, res) => {\n const writeJsonResponse = (statusCode: number, jsonResponse: unknown) => {\n res.writeHead(statusCode, { 'Content-Type': 'application/json' });\n\n return new Promise<void>((resolve, reject) =>\n jsonResponse === undefined\n ? res.end(resolve)\n : res.write(JSON.stringify(jsonResponse, null, 2), 'utf8', (err) =>\n err ? reject(err) : res.end(resolve),\n ),\n );\n };\n\n try {\n const requestBody = await new Promise<string>((resolve, reject) => {\n const data: Buffer[] = [];\n\n req\n .on('data', (chunk: Buffer) => data.push(chunk))\n .on('end', () => resolve(Buffer.concat(data).toString()))\n .on('error', (err) => reject(err));\n });\n\n // Treat an empty body as no arguments\n const jsonRequest: unknown = requestBody ? JSON.parse(requestBody) : [];\n\n // Pass a non-array request body as the first parameter\n const args: unknown[] = Array.isArray(jsonRequest)\n ? jsonRequest\n : [jsonRequest];\n\n const response: unknown = await fn(...args);\n\n await writeJsonResponse(200, response);\n } catch (err) {\n await writeJsonResponse(500, serializeError(err));\n }\n };\n\n/**\n * Create a HTTP server based on the supplied `http.RequestListener`.\n *\n * This function resolves when the server is closed.\n */\nexport const serveRequestListener = (\n requestListener: http.RequestListener,\n port?: number,\n) => {\n const server = http.createServer(requestListener);\n return startServer(server, port);\n};\n\n/**\n * Returns a HTTP server wrapped in a promise\n *\n * This function resolves when the server is closed.\n */\nexport const startServer = (server: http.Server, port?: number) =>\n new Promise<void>((resolve, reject) =>\n server\n .listen(port)\n .on('close', resolve)\n .on('error', reject)\n .on('listening', () => {\n const address = server.address() as AddressInfo;\n\n log.ok('listening on port', log.bold(address.port));\n }),\n );\n"],
5
- "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAiB;AAGjB,6BAA+B;AAE/B,qBAAoB;AAQb,MAAM,oCACX,CAAC,OACD,OAAO,KAAK,QAAQ;AAClB,QAAM,oBAAoB,CAAC,YAAoB,iBAA0B;AACvE,QAAI,UAAU,YAAY,EAAE,gBAAgB,mBAAmB,CAAC;AAEhE,WAAO,IAAI;AAAA,MAAc,CAAC,SAAS,WACjC,iBAAiB,SACb,IAAI,IAAI,OAAO,IACf,IAAI;AAAA,QAAM,KAAK,UAAU,cAAc,MAAM,CAAC;AAAA,QAAG;AAAA,QAAQ,CAAC,QACxD,MAAM,OAAO,GAAG,IAAI,IAAI,IAAI,OAAO;AAAA,MACrC;AAAA,IACN;AAAA,EACF;AAEA,MAAI;AACF,UAAM,cAAc,MAAM,IAAI,QAAgB,CAAC,SAAS,WAAW;AACjE,YAAM,OAAiB,CAAC;AAExB,UACG,GAAG,QAAQ,CAAC,UAAkB,KAAK,KAAK,KAAK,CAAC,EAC9C,GAAG,OAAO,MAAM,QAAQ,OAAO,OAAO,IAAI,EAAE,SAAS,CAAC,CAAC,EACvD,GAAG,SAAS,CAAC,QAAQ,OAAO,GAAG,CAAC;AAAA,IACrC,CAAC;AAGD,UAAM,cAAuB,cAAc,KAAK,MAAM,WAAW,IAAI,CAAC;AAGtE,UAAM,OAAkB,MAAM,QAAQ,WAAW,IAC7C,cACA,CAAC,WAAW;AAEhB,UAAM,WAAoB,MAAM,GAAG,GAAG,IAAI;AAE1C,UAAM,kBAAkB,KAAK,QAAQ;AAAA,EACvC,SAAS,KAAK;AACZ,UAAM,kBAAkB,SAAK,uCAAe,GAAG,CAAC;AAAA,EAClD;AACF;AAOK,MAAM,uBAAuB,CAClC,iBACA,SACG;AACH,QAAM,SAAS,YAAAA,QAAK,aAAa,eAAe;AAChD,SAAO,YAAY,QAAQ,IAAI;AACjC;AAOO,MAAM,cAAc,CAAC,QAAqB,SAC/C,IAAI;AAAA,EAAc,CAAC,SAAS,WAC1B,OACG,OAAO,IAAI,EACX,GAAG,SAAS,OAAO,EACnB,GAAG,SAAS,MAAM,EAClB,GAAG,aAAa,MAAM;AACrB,UAAM,UAAU,OAAO,QAAQ;AAE/B,uBAAI,GAAG,qBAAqB,mBAAI,KAAK,QAAQ,IAAI,CAAC;AAAA,EACpD,CAAC;AACL;",
6
- "names": ["http"]
4
+ "sourcesContent": ["import http from 'http';\nimport type { AddressInfo } from 'net';\nimport util from 'util';\n\nimport { log } from '../utils/logging';\n\n/**\n * Create an HTTP request listener based on the supplied function.\n *\n * - The request body is JSON parsed and passed into the function as parameters.\n * - The function's return value is JSON stringified into the response body.\n */\nexport const createRequestListenerFromFunction =\n (fn: (...args: unknown[]) => Promise<unknown>): http.RequestListener =>\n async (req, res) => {\n const writeResponse = (response: unknown) =>\n new Promise<void>((resolve, reject) =>\n response === undefined\n ? res.end(resolve)\n : res.write(response, 'utf8', (err) =>\n err ? reject(err) : res.end(resolve),\n ),\n );\n\n try {\n const requestBody = await new Promise<string>((resolve, reject) => {\n const data: Buffer[] = [];\n\n req\n .on('data', (chunk: Buffer) => data.push(chunk))\n .on('end', () => resolve(Buffer.concat(data).toString()))\n .on('error', (err) => reject(err));\n });\n\n // Treat an empty body as no arguments\n const jsonRequest: unknown = requestBody ? JSON.parse(requestBody) : [];\n\n // Pass a non-array request body as the first parameter\n const args: unknown[] = Array.isArray(jsonRequest)\n ? jsonRequest\n : [jsonRequest];\n\n const response: unknown = await fn(...args);\n\n res.writeHead(200, { 'Content-Type': 'application/json' });\n\n await writeResponse(JSON.stringify(response, null, 2));\n } catch (err) {\n res.writeHead(500);\n\n await writeResponse(util.inspect(err));\n }\n };\n\n/**\n * Create a HTTP server based on the supplied `http.RequestListener`.\n *\n * This function resolves when the server is closed.\n */\nexport const serveRequestListener = (\n requestListener: http.RequestListener,\n port?: number,\n) => {\n const server = http.createServer(requestListener);\n return startServer(server, port);\n};\n\n/**\n * Returns a HTTP server wrapped in a promise\n *\n * This function resolves when the server is closed.\n */\nexport const startServer = (server: http.Server, port?: number) =>\n new Promise<void>((resolve, reject) =>\n server\n .listen(port)\n .on('close', resolve)\n .on('error', reject)\n .on('listening', () => {\n const address = server.address() as AddressInfo;\n\n log.ok('listening on port', log.bold(address.port));\n }),\n );\n"],
5
+ "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAiB;AAEjB,kBAAiB;AAEjB,qBAAoB;AAQb,MAAM,oCACX,CAAC,OACD,OAAO,KAAK,QAAQ;AAClB,QAAM,gBAAgB,CAAC,aACrB,IAAI;AAAA,IAAc,CAAC,SAAS,WAC1B,aAAa,SACT,IAAI,IAAI,OAAO,IACf,IAAI;AAAA,MAAM;AAAA,MAAU;AAAA,MAAQ,CAAC,QAC3B,MAAM,OAAO,GAAG,IAAI,IAAI,IAAI,OAAO;AAAA,IACrC;AAAA,EACN;AAEF,MAAI;AACF,UAAM,cAAc,MAAM,IAAI,QAAgB,CAAC,SAAS,WAAW;AACjE,YAAM,OAAiB,CAAC;AAExB,UACG,GAAG,QAAQ,CAAC,UAAkB,KAAK,KAAK,KAAK,CAAC,EAC9C,GAAG,OAAO,MAAM,QAAQ,OAAO,OAAO,IAAI,EAAE,SAAS,CAAC,CAAC,EACvD,GAAG,SAAS,CAAC,QAAQ,OAAO,GAAG,CAAC;AAAA,IACrC,CAAC;AAGD,UAAM,cAAuB,cAAc,KAAK,MAAM,WAAW,IAAI,CAAC;AAGtE,UAAM,OAAkB,MAAM,QAAQ,WAAW,IAC7C,cACA,CAAC,WAAW;AAEhB,UAAM,WAAoB,MAAM,GAAG,GAAG,IAAI;AAE1C,QAAI,UAAU,KAAK,EAAE,gBAAgB,mBAAmB,CAAC;AAEzD,UAAM,cAAc,KAAK,UAAU,UAAU,MAAM,CAAC,CAAC;AAAA,EACvD,SAAS,KAAK;AACZ,QAAI,UAAU,GAAG;AAEjB,UAAM,cAAc,YAAAA,QAAK,QAAQ,GAAG,CAAC;AAAA,EACvC;AACF;AAOK,MAAM,uBAAuB,CAClC,iBACA,SACG;AACH,QAAM,SAAS,YAAAC,QAAK,aAAa,eAAe;AAChD,SAAO,YAAY,QAAQ,IAAI;AACjC;AAOO,MAAM,cAAc,CAAC,QAAqB,SAC/C,IAAI;AAAA,EAAc,CAAC,SAAS,WAC1B,OACG,OAAO,IAAI,EACX,GAAG,SAAS,OAAO,EACnB,GAAG,SAAS,MAAM,EAClB,GAAG,aAAa,MAAM;AACrB,UAAM,UAAU,OAAO,QAAQ;AAE/B,uBAAI,GAAG,qBAAqB,mBAAI,KAAK,QAAQ,IAAI,CAAC;AAAA,EACpD,CAAC;AACL;",
6
+ "names": ["util", "http"]
7
7
  }
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "skuba",
3
- "version": "9.1.0",
3
+ "version": "10.0.0-node-22-20250110051224",
4
4
  "private": false,
5
5
  "description": "SEEK development toolkit for backend applications and packages",
6
6
  "homepage": "https://github.com/seek-oss/skuba#readme",
@@ -73,7 +73,7 @@
73
73
  "function-arguments": "^1.0.9",
74
74
  "get-port": "^5.1.1",
75
75
  "golden-fleece": "^1.0.9",
76
- "ignore": "^5.1.8",
76
+ "ignore": "^7.0.0",
77
77
  "is-installed-globally": "^0.4.0",
78
78
  "isomorphic-git": "^1.11.1",
79
79
  "jest": "^29.0.1",
@@ -85,11 +85,10 @@
85
85
  "npm-run-path": "^4.0.1",
86
86
  "npm-which": "^3.0.1",
87
87
  "picomatch": "^4.0.0",
88
- "prettier": "~3.3.0",
88
+ "prettier": "~3.4.0",
89
89
  "prettier-plugin-packagejson": "^2.4.10",
90
90
  "read-pkg-up": "^7.0.1",
91
91
  "semantic-release": "^22.0.12",
92
- "serialize-error": "^8.0.1",
93
92
  "simple-git": "^3.5.0",
94
93
  "ts-dedent": "^2.2.0",
95
94
  "ts-jest": "^29.1.0",
@@ -97,13 +96,13 @@
97
96
  "tsconfig-paths": "^4.0.0",
98
97
  "tsconfig-seek": "2.0.0",
99
98
  "tsx": "^4.16.2",
100
- "typescript": "~5.6.0",
99
+ "typescript": "~5.7.0",
101
100
  "validate-npm-package-name": "^6.0.0",
102
101
  "zod": "^3.22.4",
103
- "eslint-config-skuba": "5.0.0"
102
+ "eslint-config-skuba": "5.1.0-node-22-20250110051224"
104
103
  },
105
104
  "devDependencies": {
106
- "@changesets/cli": "2.27.9",
105
+ "@changesets/cli": "2.27.11",
107
106
  "@changesets/get-github-info": "0.6.0",
108
107
  "@jest/reporters": "29.7.0",
109
108
  "@jest/test-result": "29.7.0",
@@ -117,15 +116,16 @@
117
116
  "@types/module-alias": "2.0.4",
118
117
  "@types/npm-which": "3.0.3",
119
118
  "@types/picomatch": "3.0.1",
119
+ "@types/semver": "7.5.8",
120
120
  "@types/supertest": "6.0.2",
121
121
  "@types/validate-npm-package-name": "4.0.2",
122
- "enhanced-resolve": "5.17.1",
123
- "express": "4.21.1",
124
- "fastify": "5.0.0",
122
+ "enhanced-resolve": "5.18.0",
123
+ "express": "4.21.2",
124
+ "fastify": "5.2.0",
125
125
  "jest-diff": "29.7.0",
126
126
  "jsonfile": "6.1.0",
127
127
  "koa": "2.15.3",
128
- "memfs": "4.13.0",
128
+ "memfs": "4.15.1",
129
129
  "remark-cli": "12.0.1",
130
130
  "remark-preset-lint-recommended": "7.0.0",
131
131
  "semver": "7.6.3",
@@ -9,7 +9,7 @@ configs:
9
9
  NPM_READ_TOKEN: arn:aws:secretsmanager:ap-southeast-2:987872074697:secret:npm/npm-read-token
10
10
 
11
11
  - &docker-ecr-cache
12
- seek-oss/docker-ecr-cache#v2.2.0: &docker-ecr-cache-defaults
12
+ seek-oss/docker-ecr-cache#v2.2.1: &docker-ecr-cache-defaults
13
13
  cache-on:
14
14
  - .npmrc
15
15
  - package.json#.packageManager
@@ -18,7 +18,7 @@ configs:
18
18
  secrets: id=npm,src=/tmp/.npmrc
19
19
 
20
20
  - &private-npm
21
- seek-oss/private-npm#v1.2.0:
21
+ seek-oss/private-npm#v1.3.0:
22
22
  env: NPM_READ_TOKEN
23
23
  output-path: /tmp/
24
24
 
@@ -38,7 +38,7 @@ steps:
38
38
  plugins:
39
39
  - *aws-sm
40
40
  - *private-npm
41
- - seek-oss/docker-ecr-cache#v2.2.0:
41
+ - seek-oss/docker-ecr-cache#v2.2.1:
42
42
  <<: *docker-ecr-cache-defaults
43
43
  skip-pull-from-cache: true
44
44
 
@@ -57,7 +57,7 @@ steps:
57
57
  - *aws-sm
58
58
  - *private-npm
59
59
  - *docker-ecr-cache
60
- - docker-compose#v5.4.1:
60
+ - docker-compose#v5.5.0:
61
61
  run: app
62
62
  environment:
63
63
  - GITHUB_API_TOKEN
@@ -1,4 +1,4 @@
1
- # syntax=docker/dockerfile:1.10
1
+ # syntax=docker/dockerfile:1.12
2
2
 
3
3
  FROM public.ecr.aws/docker/library/node:20-alpine AS dev-deps
4
4
 
@@ -8,7 +8,4 @@ buildArgs:
8
8
  # https://github.com/seek-oss/docker-ecr-cache-buildkite-plugin#building-on-the-resulting-image
9
9
  BASE_IMAGE: '{{.Env.BUILDKITE_PLUGIN_DOCKER_ECR_CACHE_EXPORT_IMAGE}}:{{.Env.BUILDKITE_PLUGIN_DOCKER_ECR_CACHE_EXPORT_TAG}}'
10
10
 
11
- # SEEK-Jobs/gantry#1661
12
- failOnScanFindings: false
13
-
14
11
  cpuArchitecture: <%- platformName %>
@@ -20,15 +20,15 @@
20
20
  "skuba-dive": "^2.0.0"
21
21
  },
22
22
  "devDependencies": {
23
- "@types/express": "^4.17.13",
23
+ "@types/express": "^5.0.0",
24
24
  "@types/node": "^20.16.5",
25
25
  "@types/supertest": "^6.0.0",
26
26
  "mime": "^4.0.1",
27
- "pino-pretty": "^11.0.0",
27
+ "pino-pretty": "^13.0.0",
28
28
  "skuba": "*",
29
29
  "supertest": "^7.0.0"
30
30
  },
31
- "packageManager": "pnpm@9.12.2",
31
+ "packageManager": "pnpm@9.15.2",
32
32
  "engines": {
33
33
  "node": ">=20"
34
34
  }
@@ -11,7 +11,7 @@ configs:
11
11
  NPM_READ_TOKEN: arn:aws:secretsmanager:ap-southeast-2:987872074697:secret:npm/npm-read-token
12
12
 
13
13
  - &docker-ecr-cache
14
- seek-oss/docker-ecr-cache#v2.2.0:
14
+ seek-oss/docker-ecr-cache#v2.2.1:
15
15
  cache-on:
16
16
  - .npmrc
17
17
  - package.json#.packageManager
@@ -19,7 +19,7 @@ configs:
19
19
  secrets: id=npm,src=/tmp/.npmrc
20
20
 
21
21
  - &private-npm
22
- seek-oss/private-npm#v1.2.0:
22
+ seek-oss/private-npm#v1.3.0:
23
23
  env: NPM_READ_TOKEN
24
24
  output-path: /tmp/
25
25
 
@@ -38,7 +38,7 @@ steps:
38
38
  - *aws-sm
39
39
  - *private-npm
40
40
  - *docker-ecr-cache
41
- - docker-compose#v5.4.1:
41
+ - docker-compose#v5.5.0:
42
42
  run: app
43
43
  environment:
44
44
  - GITHUB_API_TOKEN
@@ -1,4 +1,4 @@
1
- # syntax=docker/dockerfile:1.10
1
+ # syntax=docker/dockerfile:1.12
2
2
 
3
3
  FROM public.ecr.aws/docker/library/node:20-alpine AS dev-deps
4
4
 
@@ -17,9 +17,9 @@
17
17
  },
18
18
  "devDependencies": {
19
19
  "@types/node": "^20.9.0",
20
- "skuba": "*"
20
+ "skuba": "10.0.0-node-22-20250110051224"
21
21
  },
22
- "packageManager": "pnpm@9.12.2",
22
+ "packageManager": "pnpm@9.15.2",
23
23
  "engines": {
24
24
  "node": ">=20"
25
25
  }
@@ -9,7 +9,7 @@ configs:
9
9
  NPM_READ_TOKEN: arn:aws:secretsmanager:ap-southeast-2:987872074697:secret:npm/npm-read-token
10
10
 
11
11
  - &docker-ecr-cache
12
- seek-oss/docker-ecr-cache#v2.2.0: &docker-ecr-cache-defaults
12
+ seek-oss/docker-ecr-cache#v2.2.1: &docker-ecr-cache-defaults
13
13
  cache-on:
14
14
  - .npmrc
15
15
  - package.json#.packageManager
@@ -18,7 +18,7 @@ configs:
18
18
  secrets: id=npm,src=/tmp/.npmrc
19
19
 
20
20
  - &private-npm
21
- seek-oss/private-npm#v1.2.0:
21
+ seek-oss/private-npm#v1.3.0:
22
22
  env: NPM_READ_TOKEN
23
23
  output-path: /tmp/
24
24
 
@@ -38,7 +38,7 @@ steps:
38
38
  plugins:
39
39
  - *aws-sm
40
40
  - *private-npm
41
- - seek-oss/docker-ecr-cache#v2.2.0:
41
+ - seek-oss/docker-ecr-cache#v2.2.1:
42
42
  <<: *docker-ecr-cache-defaults
43
43
  skip-pull-from-cache: true
44
44
 
@@ -57,7 +57,7 @@ steps:
57
57
  - *aws-sm
58
58
  - *private-npm
59
59
  - *docker-ecr-cache
60
- - docker-compose#v5.4.1:
60
+ - docker-compose#v5.5.0:
61
61
  run: app
62
62
  environment:
63
63
  - GITHUB_API_TOKEN
@@ -1,4 +1,4 @@
1
- # syntax=docker/dockerfile:1.10
1
+ # syntax=docker/dockerfile:1.12
2
2
 
3
3
  FROM public.ecr.aws/docker/library/node:20-alpine AS dev-deps
4
4
 
@@ -8,7 +8,4 @@ buildArgs:
8
8
  # https://github.com/seek-oss/docker-ecr-cache-buildkite-plugin#building-on-the-resulting-image
9
9
  BASE_IMAGE: '{{.Env.BUILDKITE_PLUGIN_DOCKER_ECR_CACHE_EXPORT_IMAGE}}:{{.Env.BUILDKITE_PLUGIN_DOCKER_ECR_CACHE_EXPORT_TAG}}'
10
10
 
11
- # SEEK-Jobs/gantry#1661
12
- failOnScanFindings: false
13
-
14
11
  cpuArchitecture: <%- platformName %>
@@ -17,11 +17,11 @@
17
17
  "@koa/router": "^13.0.0",
18
18
  "@opentelemetry/api": "^1.9.0",
19
19
  "@opentelemetry/core": "^1.25.0",
20
- "@opentelemetry/exporter-trace-otlp-grpc": "^0.53.0",
21
- "@opentelemetry/instrumentation-aws-sdk": "^0.44.0",
22
- "@opentelemetry/instrumentation-http": "^0.53.0",
20
+ "@opentelemetry/exporter-trace-otlp-grpc": "^0.57.0",
21
+ "@opentelemetry/instrumentation-aws-sdk": "^0.49.0",
22
+ "@opentelemetry/instrumentation-http": "^0.57.0",
23
23
  "@opentelemetry/propagator-b3": "^1.25.0",
24
- "@opentelemetry/sdk-node": "^0.53.0",
24
+ "@opentelemetry/sdk-node": "^0.57.0",
25
25
  "@seek/logger": "^9.0.0",
26
26
  "hot-shots": "^10.0.0",
27
27
  "koa": "^2.13.4",
@@ -40,11 +40,11 @@
40
40
  "@types/supertest": "^6.0.0",
41
41
  "chance": "^1.1.8",
42
42
  "mime": "^4.0.1",
43
- "pino-pretty": "^11.0.0",
43
+ "pino-pretty": "^13.0.0",
44
44
  "skuba": "*",
45
45
  "supertest": "^7.0.0"
46
46
  },
47
- "packageManager": "pnpm@9.12.2",
47
+ "packageManager": "pnpm@9.15.2",
48
48
  "engines": {
49
49
  "node": ">=20"
50
50
  }
@@ -9,7 +9,7 @@ configs:
9
9
  NPM_READ_TOKEN: arn:aws:secretsmanager:ap-southeast-2:987872074697:secret:npm/npm-read-token
10
10
 
11
11
  - &docker-ecr-cache
12
- seek-oss/docker-ecr-cache#v2.2.0: &docker-ecr-cache-defaults
12
+ seek-oss/docker-ecr-cache#v2.2.1: &docker-ecr-cache-defaults
13
13
  cache-on:
14
14
  - .npmrc
15
15
  - package.json#.packageManager
@@ -17,7 +17,7 @@ configs:
17
17
  secrets: id=npm,src=/tmp/.npmrc
18
18
 
19
19
  - &private-npm
20
- seek-oss/private-npm#v1.2.0:
20
+ seek-oss/private-npm#v1.3.0:
21
21
  env: NPM_READ_TOKEN
22
22
  output-path: /tmp/
23
23
 
@@ -36,7 +36,7 @@ configs:
36
36
  - *aws-sm
37
37
  - *private-npm
38
38
  - *docker-ecr-cache
39
- - docker-compose#v5.4.1:
39
+ - docker-compose#v5.5.0:
40
40
  dependencies: false
41
41
  run: app
42
42
  propagate-environment: true
@@ -67,7 +67,7 @@ steps:
67
67
  - *aws-sm
68
68
  - *private-npm
69
69
  - *docker-ecr-cache
70
- - docker-compose#v5.4.1:
70
+ - docker-compose#v5.5.0:
71
71
  run: app
72
72
  environment:
73
73
  - GITHUB_API_TOKEN
@@ -82,7 +82,7 @@ steps:
82
82
  plugins:
83
83
  - *aws-sm
84
84
  - *private-npm
85
- - seek-oss/docker-ecr-cache#v2.2.0:
85
+ - seek-oss/docker-ecr-cache#v2.2.1:
86
86
  <<: *docker-ecr-cache-defaults
87
87
  skip-pull-from-cache: true
88
88
 
@@ -1,4 +1,4 @@
1
- # syntax=docker/dockerfile:1.10
1
+ # syntax=docker/dockerfile:1.12
2
2
 
3
3
  FROM public.ecr.aws/docker/library/node:20-alpine AS dev-deps
4
4
 
@@ -31,14 +31,14 @@
31
31
  "aws-sdk-client-mock": "^4.0.0",
32
32
  "aws-sdk-client-mock-jest": "^4.0.0",
33
33
  "chance": "^1.1.8",
34
- "pino-pretty": "^11.0.0",
34
+ "pino-pretty": "^13.0.0",
35
35
  "serverless": "^3.39.0",
36
36
  "serverless-plugin-canary-deployments": "^0.8.0",
37
37
  "serverless-plugin-datadog": "^5.12.0",
38
38
  "serverless-prune-plugin": "^2.0.0",
39
39
  "skuba": "*"
40
40
  },
41
- "packageManager": "pnpm@9.12.2",
41
+ "packageManager": "pnpm@9.15.2",
42
42
  "engines": {
43
43
  "node": ">=20"
44
44
  }
@@ -9,7 +9,7 @@ configs:
9
9
  NPM_READ_TOKEN: arn:aws:secretsmanager:ap-southeast-2:987872074697:secret:npm/npm-read-token
10
10
 
11
11
  - &docker-ecr-cache
12
- seek-oss/docker-ecr-cache#v2.2.0: &docker-ecr-cache-defaults
12
+ seek-oss/docker-ecr-cache#v2.2.1: &docker-ecr-cache-defaults
13
13
  cache-on:
14
14
  - .npmrc
15
15
  - package.json#.packageManager
@@ -17,7 +17,7 @@ configs:
17
17
  secrets: id=npm,src=/tmp/.npmrc
18
18
 
19
19
  - &private-npm
20
- seek-oss/private-npm#v1.2.0:
20
+ seek-oss/private-npm#v1.3.0:
21
21
  env: NPM_READ_TOKEN
22
22
  output-path: /tmp/
23
23
 
@@ -33,7 +33,7 @@ configs:
33
33
  - *aws-sm
34
34
  - *private-npm
35
35
  - *docker-ecr-cache
36
- - docker-compose#v5.4.1:
36
+ - docker-compose#v5.5.0:
37
37
  dependencies: false
38
38
  run: app
39
39
  environment:
@@ -63,7 +63,7 @@ steps:
63
63
  - *aws-sm
64
64
  - *private-npm
65
65
  - *docker-ecr-cache
66
- - docker-compose#v5.4.1:
66
+ - docker-compose#v5.5.0:
67
67
  run: app
68
68
  environment:
69
69
  - GITHUB_API_TOKEN
@@ -78,7 +78,7 @@ steps:
78
78
  plugins:
79
79
  - *aws-sm
80
80
  - *private-npm
81
- - seek-oss/docker-ecr-cache#v2.2.0:
81
+ - seek-oss/docker-ecr-cache#v2.2.1:
82
82
  <<: *docker-ecr-cache-defaults
83
83
  skip-pull-from-cache: true
84
84
 
@@ -1,4 +1,4 @@
1
- # syntax=docker/dockerfile:1.10
1
+ # syntax=docker/dockerfile:1.12
2
2
 
3
3
  FROM public.ecr.aws/docker/library/node:20-alpine AS dev-deps
4
4
 
@@ -14,9 +14,10 @@ Next steps:
14
14
  3. [ ] Add the repository to BuildAgency;
15
15
  see our internal [Buildkite Docs] for more information.
16
16
  4. [ ] Add Datadog extension, deployment bucket configuration and data classification tags to [infra/config.ts](infra/config.ts).
17
- 5. [ ] Push local commits to the upstream GitHub branch.
18
- 6. [ ] Configure [GitHub repository settings].
19
- 7. [ ] Delete this checklist 😌.
17
+ 5. [ ] For the smoke test, make sure Lambda has permissions to publish SNS message and configure `sourceSnsTopicArn` in [infra/config.ts](infra/config.ts).
18
+ 6. [ ] Push local commits to the upstream GitHub branch.
19
+ 7. [ ] Configure [GitHub repository settings].
20
+ 8. [ ] Delete this checklist 😌.
20
21
 
21
22
  [Buildkite Docs]: https://backstage.myseek.xyz/docs/default/component/buildkite-docs
22
23
  [GitHub repository settings]: https://github.com/<%-orgName%>/<%-repoName%>/settings