@geekmidas/constructs 0.0.18 → 0.0.19

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (165) hide show
  1. package/dist/{AWSLambdaFunction-H65WfXLt.mjs → AWSLambdaFunction-DBUENdP0.mjs} +2 -2
  2. package/dist/{AWSLambdaFunction-H65WfXLt.mjs.map → AWSLambdaFunction-DBUENdP0.mjs.map} +1 -1
  3. package/dist/{AWSLambdaFunction-C-fuCLA3.cjs → AWSLambdaFunction-vobYqQ0w.cjs} +2 -2
  4. package/dist/{AWSLambdaFunction-C-fuCLA3.cjs.map → AWSLambdaFunction-vobYqQ0w.cjs.map} +1 -1
  5. package/dist/{AWSLambdaSubscriberAdaptor-CyFh7MN8.mjs → AWSLambdaSubscriberAdaptor-BLHDyqzQ.mjs} +1 -1
  6. package/dist/{AWSLambdaSubscriberAdaptor-CyFh7MN8.mjs.map → AWSLambdaSubscriberAdaptor-BLHDyqzQ.mjs.map} +1 -1
  7. package/dist/{AWSLambdaSubscriberAdaptor-Dum5bkw3.cjs → AWSLambdaSubscriberAdaptor-DVC4VAQR.cjs} +1 -1
  8. package/dist/{AWSLambdaSubscriberAdaptor-Dum5bkw3.cjs.map → AWSLambdaSubscriberAdaptor-DVC4VAQR.cjs.map} +1 -1
  9. package/dist/{AmazonApiGatewayEndpointAdaptor-CI9L7Ucn.cjs → AmazonApiGatewayEndpointAdaptor-BLUW--OF.cjs} +4 -4
  10. package/dist/{AmazonApiGatewayEndpointAdaptor-CI9L7Ucn.cjs.map → AmazonApiGatewayEndpointAdaptor-BLUW--OF.cjs.map} +1 -1
  11. package/dist/{AmazonApiGatewayEndpointAdaptor-C6Jk5HSy.mjs → AmazonApiGatewayEndpointAdaptor-DBK53gB5.mjs} +4 -4
  12. package/dist/{AmazonApiGatewayEndpointAdaptor-C6Jk5HSy.mjs.map → AmazonApiGatewayEndpointAdaptor-DBK53gB5.mjs.map} +1 -1
  13. package/dist/{AmazonApiGatewayV1EndpointAdaptor-DYL1bCBS.cjs → AmazonApiGatewayV1EndpointAdaptor-B-i9_OtQ.cjs} +3 -3
  14. package/dist/{AmazonApiGatewayV1EndpointAdaptor-DYL1bCBS.cjs.map → AmazonApiGatewayV1EndpointAdaptor-B-i9_OtQ.cjs.map} +1 -1
  15. package/dist/{AmazonApiGatewayV1EndpointAdaptor-BMy8DdNJ.mjs → AmazonApiGatewayV1EndpointAdaptor-DfU3n5im.mjs} +3 -3
  16. package/dist/{AmazonApiGatewayV1EndpointAdaptor-BMy8DdNJ.mjs.map → AmazonApiGatewayV1EndpointAdaptor-DfU3n5im.mjs.map} +1 -1
  17. package/dist/{AmazonApiGatewayV2EndpointAdaptor-BU5wQMOe.mjs → AmazonApiGatewayV2EndpointAdaptor-D-AFyzaQ.mjs} +3 -3
  18. package/dist/{AmazonApiGatewayV2EndpointAdaptor-BU5wQMOe.mjs.map → AmazonApiGatewayV2EndpointAdaptor-D-AFyzaQ.mjs.map} +1 -1
  19. package/dist/{AmazonApiGatewayV2EndpointAdaptor-CPLCMeaN.cjs → AmazonApiGatewayV2EndpointAdaptor-D4k_Bg7Q.cjs} +3 -3
  20. package/dist/{AmazonApiGatewayV2EndpointAdaptor-CPLCMeaN.cjs.map → AmazonApiGatewayV2EndpointAdaptor-D4k_Bg7Q.cjs.map} +1 -1
  21. package/dist/{Cron-Bi3QOge_.cjs → Cron-CmtKQOmE.cjs} +1 -1
  22. package/dist/{Cron-Bi3QOge_.cjs.map → Cron-CmtKQOmE.cjs.map} +1 -1
  23. package/dist/{Cron-Dy_HW2Vv.mjs → Cron-mWi3PQxt.mjs} +1 -1
  24. package/dist/{Cron-Dy_HW2Vv.mjs.map → Cron-mWi3PQxt.mjs.map} +1 -1
  25. package/dist/{CronBuilder-Bl3A2Zp4.mjs → CronBuilder-4DxT6wUa.mjs} +2 -2
  26. package/dist/{CronBuilder-Bl3A2Zp4.mjs.map → CronBuilder-4DxT6wUa.mjs.map} +1 -1
  27. package/dist/{CronBuilder-Dv_w7Yri.cjs → CronBuilder-CeffP9Rs.cjs} +2 -2
  28. package/dist/{CronBuilder-Dv_w7Yri.cjs.map → CronBuilder-CeffP9Rs.cjs.map} +1 -1
  29. package/dist/{Endpoint-DDpF7NO1.cjs → Endpoint-BTvS2vwp.cjs} +1 -1
  30. package/dist/{Endpoint-DDpF7NO1.cjs.map → Endpoint-BTvS2vwp.cjs.map} +1 -1
  31. package/dist/{Endpoint-S6Yh2_PN.mjs → Endpoint-D2LVHBEO.mjs} +1 -1
  32. package/dist/{Endpoint-S6Yh2_PN.mjs.map → Endpoint-D2LVHBEO.mjs.map} +1 -1
  33. package/dist/{EndpointBuilder-CFnjYXmL.cjs → EndpointBuilder-C4qahFeS.cjs} +2 -2
  34. package/dist/{EndpointBuilder-CFnjYXmL.cjs.map → EndpointBuilder-C4qahFeS.cjs.map} +1 -1
  35. package/dist/{EndpointBuilder-DlDft4mJ.mjs → EndpointBuilder-O6B1zJ6v.mjs} +2 -2
  36. package/dist/{EndpointBuilder-DlDft4mJ.mjs.map → EndpointBuilder-O6B1zJ6v.mjs.map} +1 -1
  37. package/dist/{EndpointFactory-Ctln6czP.mjs → EndpointFactory-BUYrnjau.mjs} +2 -2
  38. package/dist/EndpointFactory-BUYrnjau.mjs.map +1 -0
  39. package/dist/{EndpointFactory-mTfi8x1X.cjs → EndpointFactory-C_neYSiA.cjs} +2 -2
  40. package/dist/EndpointFactory-C_neYSiA.cjs.map +1 -0
  41. package/dist/{FunctionExecutionWrapper-DkNycmOh.cjs → FunctionExecutionWrapper-B8agyYHk.cjs} +1 -1
  42. package/dist/{FunctionExecutionWrapper-DkNycmOh.cjs.map → FunctionExecutionWrapper-B8agyYHk.cjs.map} +1 -1
  43. package/dist/{FunctionExecutionWrapper-Bubnr0zA.mjs → FunctionExecutionWrapper-BPIdmPe8.mjs} +1 -1
  44. package/dist/{FunctionExecutionWrapper-Bubnr0zA.mjs.map → FunctionExecutionWrapper-BPIdmPe8.mjs.map} +1 -1
  45. package/dist/{HonoEndpointAdaptor-DsqGuEIb.d.mts → HonoEndpointAdaptor-Br1vuQ3A.d.mts} +3 -3
  46. package/dist/{HonoEndpointAdaptor-DajXbh80.d.cts → HonoEndpointAdaptor-C9wC10-w.d.cts} +3 -3
  47. package/dist/{HonoEndpointAdaptor-DuyE06nH.mjs → HonoEndpointAdaptor-DEFNrIv7.mjs} +5 -5
  48. package/dist/{HonoEndpointAdaptor-DuyE06nH.mjs.map → HonoEndpointAdaptor-DEFNrIv7.mjs.map} +1 -1
  49. package/dist/{HonoEndpointAdaptor-CfLRHHFw.cjs → HonoEndpointAdaptor-DbLeXkR6.cjs} +5 -5
  50. package/dist/{HonoEndpointAdaptor-CfLRHHFw.cjs.map → HonoEndpointAdaptor-DbLeXkR6.cjs.map} +1 -1
  51. package/dist/{TestEndpointAdaptor-DbwrL-RJ.mjs → TestEndpointAdaptor-BGrZsg5c.mjs} +38 -18
  52. package/dist/TestEndpointAdaptor-BGrZsg5c.mjs.map +1 -0
  53. package/dist/{TestEndpointAdaptor-DhRjJHyk.d.mts → TestEndpointAdaptor-Bl2ic-yr.d.mts} +9 -9
  54. package/dist/{TestEndpointAdaptor-B9tUIlCC.d.cts → TestEndpointAdaptor-ByXqQufk.d.cts} +9 -9
  55. package/dist/{TestEndpointAdaptor-B9hyZ-mF.cjs → TestEndpointAdaptor-JCvZ3VVi.cjs} +38 -18
  56. package/dist/TestEndpointAdaptor-JCvZ3VVi.cjs.map +1 -0
  57. package/dist/adaptors/aws.cjs +9 -9
  58. package/dist/adaptors/aws.d.cts +1 -1
  59. package/dist/adaptors/aws.d.mts +1 -1
  60. package/dist/adaptors/aws.mjs +9 -9
  61. package/dist/adaptors/hono.cjs +5 -5
  62. package/dist/adaptors/hono.d.cts +2 -2
  63. package/dist/adaptors/hono.d.mts +2 -2
  64. package/dist/adaptors/hono.mjs +5 -5
  65. package/dist/adaptors/testing.cjs +3 -3
  66. package/dist/adaptors/testing.d.cts +2 -2
  67. package/dist/adaptors/testing.d.mts +2 -2
  68. package/dist/adaptors/testing.mjs +3 -3
  69. package/dist/crons/Cron.cjs +1 -1
  70. package/dist/crons/Cron.d.cts +1 -1
  71. package/dist/crons/Cron.d.mts +1 -1
  72. package/dist/crons/Cron.mjs +1 -1
  73. package/dist/crons/CronBuilder.cjs +2 -2
  74. package/dist/crons/CronBuilder.d.cts +1 -1
  75. package/dist/crons/CronBuilder.d.mts +1 -1
  76. package/dist/crons/CronBuilder.mjs +2 -2
  77. package/dist/crons/index.cjs +2 -2
  78. package/dist/crons/index.d.cts +5 -5
  79. package/dist/crons/index.d.mts +5 -5
  80. package/dist/crons/index.mjs +2 -2
  81. package/dist/endpoints/AmazonApiGatewayEndpointAdaptor.cjs +3 -3
  82. package/dist/endpoints/AmazonApiGatewayEndpointAdaptor.d.cts +1 -1
  83. package/dist/endpoints/AmazonApiGatewayEndpointAdaptor.d.mts +1 -1
  84. package/dist/endpoints/AmazonApiGatewayEndpointAdaptor.mjs +3 -3
  85. package/dist/endpoints/AmazonApiGatewayV1EndpointAdaptor.cjs +5 -5
  86. package/dist/endpoints/AmazonApiGatewayV1EndpointAdaptor.d.cts +1 -1
  87. package/dist/endpoints/AmazonApiGatewayV1EndpointAdaptor.d.mts +1 -1
  88. package/dist/endpoints/AmazonApiGatewayV1EndpointAdaptor.mjs +5 -5
  89. package/dist/endpoints/AmazonApiGatewayV2EndpointAdaptor.cjs +5 -5
  90. package/dist/endpoints/AmazonApiGatewayV2EndpointAdaptor.d.cts +1 -1
  91. package/dist/endpoints/AmazonApiGatewayV2EndpointAdaptor.d.mts +1 -1
  92. package/dist/endpoints/AmazonApiGatewayV2EndpointAdaptor.mjs +5 -5
  93. package/dist/endpoints/Endpoint.cjs +1 -1
  94. package/dist/endpoints/Endpoint.d.cts +1 -1
  95. package/dist/endpoints/Endpoint.d.mts +1 -1
  96. package/dist/endpoints/Endpoint.mjs +1 -1
  97. package/dist/endpoints/EndpointBuilder.cjs +2 -2
  98. package/dist/endpoints/EndpointBuilder.d.cts +1 -1
  99. package/dist/endpoints/EndpointBuilder.d.mts +1 -1
  100. package/dist/endpoints/EndpointBuilder.mjs +2 -2
  101. package/dist/endpoints/EndpointFactory.cjs +3 -3
  102. package/dist/endpoints/EndpointFactory.d.cts +1 -1
  103. package/dist/endpoints/EndpointFactory.d.mts +1 -1
  104. package/dist/endpoints/EndpointFactory.mjs +3 -3
  105. package/dist/endpoints/HonoEndpointAdaptor.cjs +5 -5
  106. package/dist/endpoints/HonoEndpointAdaptor.d.cts +2 -2
  107. package/dist/endpoints/HonoEndpointAdaptor.d.mts +2 -2
  108. package/dist/endpoints/HonoEndpointAdaptor.mjs +5 -5
  109. package/dist/endpoints/TestEndpointAdaptor.cjs +3 -3
  110. package/dist/endpoints/TestEndpointAdaptor.d.cts +2 -2
  111. package/dist/endpoints/TestEndpointAdaptor.d.mts +2 -2
  112. package/dist/endpoints/TestEndpointAdaptor.mjs +3 -3
  113. package/dist/endpoints/audit.d.cts +1 -1
  114. package/dist/endpoints/audit.d.mts +1 -1
  115. package/dist/endpoints/helpers.cjs +2 -2
  116. package/dist/endpoints/helpers.d.cts +1 -1
  117. package/dist/endpoints/helpers.d.mts +1 -1
  118. package/dist/endpoints/helpers.mjs +2 -2
  119. package/dist/endpoints/index.cjs +3 -3
  120. package/dist/endpoints/index.d.cts +3 -3
  121. package/dist/endpoints/index.d.mts +3 -3
  122. package/dist/endpoints/index.mjs +3 -3
  123. package/dist/endpoints/parseHonoQuery.cjs +1 -1
  124. package/dist/endpoints/parseHonoQuery.mjs +1 -1
  125. package/dist/endpoints/parseQueryParams.cjs +1 -1
  126. package/dist/endpoints/parseQueryParams.mjs +1 -1
  127. package/dist/endpoints/processAudits.cjs +1 -1
  128. package/dist/endpoints/processAudits.d.cts +1 -1
  129. package/dist/endpoints/processAudits.d.mts +1 -1
  130. package/dist/endpoints/processAudits.mjs +1 -1
  131. package/dist/functions/AWSLambdaFunction.cjs +2 -2
  132. package/dist/functions/AWSLambdaFunction.mjs +2 -2
  133. package/dist/functions/FunctionExecutionWrapper.cjs +1 -1
  134. package/dist/functions/FunctionExecutionWrapper.mjs +1 -1
  135. package/dist/functions/index.d.cts +1 -1
  136. package/dist/functions/index.d.mts +1 -1
  137. package/dist/{helpers-Khuhi_Qx.cjs → helpers-CUYRcimZ.cjs} +2 -2
  138. package/dist/{helpers-Khuhi_Qx.cjs.map → helpers-CUYRcimZ.cjs.map} +1 -1
  139. package/dist/{helpers-2CLKTnRm.mjs → helpers-D-OW3LI_.mjs} +2 -2
  140. package/dist/{helpers-2CLKTnRm.mjs.map → helpers-D-OW3LI_.mjs.map} +1 -1
  141. package/dist/index-Doa8YPmH.d.cts +10 -0
  142. package/dist/index-TxufD5Xp.d.mts +10 -0
  143. package/dist/{parseHonoQuery-CwFKw2ua.mjs → parseHonoQuery-BlwMModJ.mjs} +1 -1
  144. package/dist/{parseHonoQuery-CwFKw2ua.mjs.map → parseHonoQuery-BlwMModJ.mjs.map} +1 -1
  145. package/dist/{parseHonoQuery-CT8Cvin-.cjs → parseHonoQuery-D-fMmSbA.cjs} +1 -1
  146. package/dist/{parseHonoQuery-CT8Cvin-.cjs.map → parseHonoQuery-D-fMmSbA.cjs.map} +1 -1
  147. package/dist/{parseQueryParams-CwvXXwkW.cjs → parseQueryParams-CbY1zcCU.cjs} +1 -1
  148. package/dist/{parseQueryParams-CwvXXwkW.cjs.map → parseQueryParams-CbY1zcCU.cjs.map} +1 -1
  149. package/dist/{parseQueryParams-CHINupbZ.mjs → parseQueryParams-DlbV3_SB.mjs} +1 -1
  150. package/dist/{parseQueryParams-CHINupbZ.mjs.map → parseQueryParams-DlbV3_SB.mjs.map} +1 -1
  151. package/dist/{processAudits-DfcB-X-4.mjs → processAudits-CW7z5Kj9.mjs} +1 -1
  152. package/dist/{processAudits-DfcB-X-4.mjs.map → processAudits-CW7z5Kj9.mjs.map} +1 -1
  153. package/dist/{processAudits-BFokHhCO.cjs → processAudits-MHp5_fc7.cjs} +1 -1
  154. package/dist/{processAudits-BFokHhCO.cjs.map → processAudits-MHp5_fc7.cjs.map} +1 -1
  155. package/dist/subscribers/AWSLambdaSubscriberAdaptor.cjs +1 -1
  156. package/dist/subscribers/AWSLambdaSubscriberAdaptor.mjs +1 -1
  157. package/package.json +4 -4
  158. package/src/endpoints/TestEndpointAdaptor.ts +51 -52
  159. package/src/endpoints/__tests__/TestEndpointAdaptor.audits.spec.ts +614 -0
  160. package/dist/EndpointFactory-Ctln6czP.mjs.map +0 -1
  161. package/dist/EndpointFactory-mTfi8x1X.cjs.map +0 -1
  162. package/dist/TestEndpointAdaptor-B9hyZ-mF.cjs.map +0 -1
  163. package/dist/TestEndpointAdaptor-DbwrL-RJ.mjs.map +0 -1
  164. package/dist/index-Sxtb_Pzw.d.mts +0 -10
  165. package/dist/index-m7xBtcAW.d.cts +0 -10
@@ -1,5 +1,5 @@
1
1
  const require_chunk = require('./chunk-CUT6urMc.cjs');
2
- const require_Endpoint = require('./Endpoint-DDpF7NO1.cjs');
2
+ const require_Endpoint = require('./Endpoint-BTvS2vwp.cjs');
3
3
  const node_path = require_chunk.__toESM(require("node:path"));
4
4
  const fast_glob = require_chunk.__toESM(require("fast-glob"));
5
5
 
@@ -92,4 +92,4 @@ Object.defineProperty(exports, 'getProjectRoot', {
92
92
  return getProjectRoot;
93
93
  }
94
94
  });
95
- //# sourceMappingURL=helpers-Khuhi_Qx.cjs.map
95
+ //# sourceMappingURL=helpers-CUYRcimZ.cjs.map
@@ -1 +1 @@
1
- {"version":3,"file":"helpers-Khuhi_Qx.cjs","names":["cwd: string","routes: string[]","endpoints: Endpoint<string, HttpMethod, any, any, TServices>[]"],"sources":["../src/endpoints/helpers.ts"],"sourcesContent":["import path from 'node:path';\nimport fg from 'fast-glob';\nimport { Endpoint } from './Endpoint';\n\nimport type { Service } from '@geekmidas/services';\nimport type { HttpMethod } from '../types';\n\n// Re-export utility functions\n\n/**\n * Recursively finds the project root directory by looking for lock files.\n * Traverses up the directory tree until it finds a package manager lock file.\n *\n * @param cwd - The current working directory to start searching from\n * @returns Promise resolving to the absolute path of the project root\n *\n * @example\n * ```typescript\n * const projectRoot = await getProjectRoot(process.cwd());\n * console.log(`Project root: ${projectRoot}`);\n * // Output: Project root: /Users/user/my-project\n * ```\n */\nexport async function getProjectRoot(cwd: string): Promise<string> {\n if (cwd === '/') {\n return cwd;\n }\n\n const stream = fg.stream(\n ['yarn.lock', 'pnpm-lock.yaml', 'package-lock.json', 'deno.lock'],\n { dot: true, cwd },\n );\n\n let isRoot = false;\n\n for await (const _ of stream) {\n isRoot = true;\n break;\n }\n\n if (isRoot) {\n return cwd;\n }\n\n return getProjectRoot(path.resolve(cwd, '..'));\n}\n\n/**\n * Discovers and imports all Endpoint instances from the specified route patterns.\n * Uses fast-glob to find files matching the patterns and extracts exported Endpoints.\n *\n * @template TServices - Array of service types used by the endpoints\n * @param routes - Array of glob patterns to match route files (e.g., ['src/routes/*.ts'])\n * @param cwd - The current working directory to resolve paths from\n * @returns Promise resolving to an array of Endpoint instances found in the matched files\n *\n * @example\n * ```typescript\n * // Find all endpoints in the routes directory\n * const endpoints = await getEndpointsFromRoutes(\n * ['src/routes/**\\/*.ts'],\n * process.cwd()\n * );\n *\n * // Register endpoints with your server\n * for (const endpoint of endpoints) {\n * server.register(endpoint);\n * }\n * ```\n *\n * @remarks\n * - Only exports that are valid Endpoint instances are included\n * - Files are imported dynamically, so they must be valid ES modules\n * - The function filters out non-Endpoint exports automatically\n */\nexport async function getEndpointsFromRoutes<TServices extends Service[]>(\n routes: string[],\n cwd: string,\n): Promise<Endpoint<string, HttpMethod, any, any, TServices>[]> {\n const stream = fg.stream(routes, { cwd });\n\n const endpoints: Endpoint<string, HttpMethod, any, any, TServices>[] = [];\n\n for await (const f of stream) {\n // Resolve the absolute path for the route file\n const routePath = path.resolve(cwd, f.toString());\n // Dynamically import the route module\n const route = await import(routePath);\n\n // Filter exported values to find only Endpoint instances\n const handlers = Object.values(route).filter((value) => {\n return Endpoint.isEndpoint(value);\n }) as unknown as Endpoint<string, HttpMethod, any, any, TServices>[];\n\n endpoints.push(...handlers);\n }\n\n return endpoints;\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;AAuBA,eAAsB,eAAeA,KAA8B;AACjE,KAAI,QAAQ,IACV,QAAO;CAGT,MAAM,SAAS,kBAAG,OAChB;EAAC;EAAa;EAAkB;EAAqB;CAAY,GACjE;EAAE,KAAK;EAAM;CAAK,EACnB;CAED,IAAI,SAAS;AAEb,YAAW,MAAM,KAAK,QAAQ;AAC5B,WAAS;AACT;CACD;AAED,KAAI,OACF,QAAO;AAGT,QAAO,eAAe,kBAAK,QAAQ,KAAK,KAAK,CAAC;AAC/C;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AA8BD,eAAsB,uBACpBC,QACAD,KAC8D;CAC9D,MAAM,SAAS,kBAAG,OAAO,QAAQ,EAAE,IAAK,EAAC;CAEzC,MAAME,YAAiE,CAAE;AAEzE,YAAW,MAAM,KAAK,QAAQ;EAE5B,MAAM,YAAY,kBAAK,QAAQ,KAAK,EAAE,UAAU,CAAC;EAEjD,MAAM,QAAQ,MAAM,OAAO;EAG3B,MAAM,WAAW,OAAO,OAAO,MAAM,CAAC,OAAO,CAAC,UAAU;AACtD,UAAO,0BAAS,WAAW,MAAM;EAClC,EAAC;AAEF,YAAU,KAAK,GAAG,SAAS;CAC5B;AAED,QAAO;AACR"}
1
+ {"version":3,"file":"helpers-CUYRcimZ.cjs","names":["cwd: string","routes: string[]","endpoints: Endpoint<string, HttpMethod, any, any, TServices>[]"],"sources":["../src/endpoints/helpers.ts"],"sourcesContent":["import path from 'node:path';\nimport fg from 'fast-glob';\nimport { Endpoint } from './Endpoint';\n\nimport type { Service } from '@geekmidas/services';\nimport type { HttpMethod } from '../types';\n\n// Re-export utility functions\n\n/**\n * Recursively finds the project root directory by looking for lock files.\n * Traverses up the directory tree until it finds a package manager lock file.\n *\n * @param cwd - The current working directory to start searching from\n * @returns Promise resolving to the absolute path of the project root\n *\n * @example\n * ```typescript\n * const projectRoot = await getProjectRoot(process.cwd());\n * console.log(`Project root: ${projectRoot}`);\n * // Output: Project root: /Users/user/my-project\n * ```\n */\nexport async function getProjectRoot(cwd: string): Promise<string> {\n if (cwd === '/') {\n return cwd;\n }\n\n const stream = fg.stream(\n ['yarn.lock', 'pnpm-lock.yaml', 'package-lock.json', 'deno.lock'],\n { dot: true, cwd },\n );\n\n let isRoot = false;\n\n for await (const _ of stream) {\n isRoot = true;\n break;\n }\n\n if (isRoot) {\n return cwd;\n }\n\n return getProjectRoot(path.resolve(cwd, '..'));\n}\n\n/**\n * Discovers and imports all Endpoint instances from the specified route patterns.\n * Uses fast-glob to find files matching the patterns and extracts exported Endpoints.\n *\n * @template TServices - Array of service types used by the endpoints\n * @param routes - Array of glob patterns to match route files (e.g., ['src/routes/*.ts'])\n * @param cwd - The current working directory to resolve paths from\n * @returns Promise resolving to an array of Endpoint instances found in the matched files\n *\n * @example\n * ```typescript\n * // Find all endpoints in the routes directory\n * const endpoints = await getEndpointsFromRoutes(\n * ['src/routes/**\\/*.ts'],\n * process.cwd()\n * );\n *\n * // Register endpoints with your server\n * for (const endpoint of endpoints) {\n * server.register(endpoint);\n * }\n * ```\n *\n * @remarks\n * - Only exports that are valid Endpoint instances are included\n * - Files are imported dynamically, so they must be valid ES modules\n * - The function filters out non-Endpoint exports automatically\n */\nexport async function getEndpointsFromRoutes<TServices extends Service[]>(\n routes: string[],\n cwd: string,\n): Promise<Endpoint<string, HttpMethod, any, any, TServices>[]> {\n const stream = fg.stream(routes, { cwd });\n\n const endpoints: Endpoint<string, HttpMethod, any, any, TServices>[] = [];\n\n for await (const f of stream) {\n // Resolve the absolute path for the route file\n const routePath = path.resolve(cwd, f.toString());\n // Dynamically import the route module\n const route = await import(routePath);\n\n // Filter exported values to find only Endpoint instances\n const handlers = Object.values(route).filter((value) => {\n return Endpoint.isEndpoint(value);\n }) as unknown as Endpoint<string, HttpMethod, any, any, TServices>[];\n\n endpoints.push(...handlers);\n }\n\n return endpoints;\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;AAuBA,eAAsB,eAAeA,KAA8B;AACjE,KAAI,QAAQ,IACV,QAAO;CAGT,MAAM,SAAS,kBAAG,OAChB;EAAC;EAAa;EAAkB;EAAqB;CAAY,GACjE;EAAE,KAAK;EAAM;CAAK,EACnB;CAED,IAAI,SAAS;AAEb,YAAW,MAAM,KAAK,QAAQ;AAC5B,WAAS;AACT;CACD;AAED,KAAI,OACF,QAAO;AAGT,QAAO,eAAe,kBAAK,QAAQ,KAAK,KAAK,CAAC;AAC/C;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AA8BD,eAAsB,uBACpBC,QACAD,KAC8D;CAC9D,MAAM,SAAS,kBAAG,OAAO,QAAQ,EAAE,IAAK,EAAC;CAEzC,MAAME,YAAiE,CAAE;AAEzE,YAAW,MAAM,KAAK,QAAQ;EAE5B,MAAM,YAAY,kBAAK,QAAQ,KAAK,EAAE,UAAU,CAAC;EAEjD,MAAM,QAAQ,MAAM,OAAO;EAG3B,MAAM,WAAW,OAAO,OAAO,MAAM,CAAC,OAAO,CAAC,UAAU;AACtD,UAAO,0BAAS,WAAW,MAAM;EAClC,EAAC;AAEF,YAAU,KAAK,GAAG,SAAS;CAC5B;AAED,QAAO;AACR"}
@@ -1,4 +1,4 @@
1
- import { Endpoint } from "./Endpoint-S6Yh2_PN.mjs";
1
+ import { Endpoint } from "./Endpoint-D2LVHBEO.mjs";
2
2
  import path from "node:path";
3
3
  import fg from "fast-glob";
4
4
 
@@ -80,4 +80,4 @@ async function getEndpointsFromRoutes(routes, cwd) {
80
80
 
81
81
  //#endregion
82
82
  export { getEndpointsFromRoutes, getProjectRoot };
83
- //# sourceMappingURL=helpers-2CLKTnRm.mjs.map
83
+ //# sourceMappingURL=helpers-D-OW3LI_.mjs.map
@@ -1 +1 @@
1
- {"version":3,"file":"helpers-2CLKTnRm.mjs","names":["cwd: string","routes: string[]","endpoints: Endpoint<string, HttpMethod, any, any, TServices>[]"],"sources":["../src/endpoints/helpers.ts"],"sourcesContent":["import path from 'node:path';\nimport fg from 'fast-glob';\nimport { Endpoint } from './Endpoint';\n\nimport type { Service } from '@geekmidas/services';\nimport type { HttpMethod } from '../types';\n\n// Re-export utility functions\n\n/**\n * Recursively finds the project root directory by looking for lock files.\n * Traverses up the directory tree until it finds a package manager lock file.\n *\n * @param cwd - The current working directory to start searching from\n * @returns Promise resolving to the absolute path of the project root\n *\n * @example\n * ```typescript\n * const projectRoot = await getProjectRoot(process.cwd());\n * console.log(`Project root: ${projectRoot}`);\n * // Output: Project root: /Users/user/my-project\n * ```\n */\nexport async function getProjectRoot(cwd: string): Promise<string> {\n if (cwd === '/') {\n return cwd;\n }\n\n const stream = fg.stream(\n ['yarn.lock', 'pnpm-lock.yaml', 'package-lock.json', 'deno.lock'],\n { dot: true, cwd },\n );\n\n let isRoot = false;\n\n for await (const _ of stream) {\n isRoot = true;\n break;\n }\n\n if (isRoot) {\n return cwd;\n }\n\n return getProjectRoot(path.resolve(cwd, '..'));\n}\n\n/**\n * Discovers and imports all Endpoint instances from the specified route patterns.\n * Uses fast-glob to find files matching the patterns and extracts exported Endpoints.\n *\n * @template TServices - Array of service types used by the endpoints\n * @param routes - Array of glob patterns to match route files (e.g., ['src/routes/*.ts'])\n * @param cwd - The current working directory to resolve paths from\n * @returns Promise resolving to an array of Endpoint instances found in the matched files\n *\n * @example\n * ```typescript\n * // Find all endpoints in the routes directory\n * const endpoints = await getEndpointsFromRoutes(\n * ['src/routes/**\\/*.ts'],\n * process.cwd()\n * );\n *\n * // Register endpoints with your server\n * for (const endpoint of endpoints) {\n * server.register(endpoint);\n * }\n * ```\n *\n * @remarks\n * - Only exports that are valid Endpoint instances are included\n * - Files are imported dynamically, so they must be valid ES modules\n * - The function filters out non-Endpoint exports automatically\n */\nexport async function getEndpointsFromRoutes<TServices extends Service[]>(\n routes: string[],\n cwd: string,\n): Promise<Endpoint<string, HttpMethod, any, any, TServices>[]> {\n const stream = fg.stream(routes, { cwd });\n\n const endpoints: Endpoint<string, HttpMethod, any, any, TServices>[] = [];\n\n for await (const f of stream) {\n // Resolve the absolute path for the route file\n const routePath = path.resolve(cwd, f.toString());\n // Dynamically import the route module\n const route = await import(routePath);\n\n // Filter exported values to find only Endpoint instances\n const handlers = Object.values(route).filter((value) => {\n return Endpoint.isEndpoint(value);\n }) as unknown as Endpoint<string, HttpMethod, any, any, TServices>[];\n\n endpoints.push(...handlers);\n }\n\n return endpoints;\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;AAuBA,eAAsB,eAAeA,KAA8B;AACjE,KAAI,QAAQ,IACV,QAAO;CAGT,MAAM,SAAS,GAAG,OAChB;EAAC;EAAa;EAAkB;EAAqB;CAAY,GACjE;EAAE,KAAK;EAAM;CAAK,EACnB;CAED,IAAI,SAAS;AAEb,YAAW,MAAM,KAAK,QAAQ;AAC5B,WAAS;AACT;CACD;AAED,KAAI,OACF,QAAO;AAGT,QAAO,eAAe,KAAK,QAAQ,KAAK,KAAK,CAAC;AAC/C;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AA8BD,eAAsB,uBACpBC,QACAD,KAC8D;CAC9D,MAAM,SAAS,GAAG,OAAO,QAAQ,EAAE,IAAK,EAAC;CAEzC,MAAME,YAAiE,CAAE;AAEzE,YAAW,MAAM,KAAK,QAAQ;EAE5B,MAAM,YAAY,KAAK,QAAQ,KAAK,EAAE,UAAU,CAAC;EAEjD,MAAM,QAAQ,MAAM,OAAO;EAG3B,MAAM,WAAW,OAAO,OAAO,MAAM,CAAC,OAAO,CAAC,UAAU;AACtD,UAAO,SAAS,WAAW,MAAM;EAClC,EAAC;AAEF,YAAU,KAAK,GAAG,SAAS;CAC5B;AAED,QAAO;AACR"}
1
+ {"version":3,"file":"helpers-D-OW3LI_.mjs","names":["cwd: string","routes: string[]","endpoints: Endpoint<string, HttpMethod, any, any, TServices>[]"],"sources":["../src/endpoints/helpers.ts"],"sourcesContent":["import path from 'node:path';\nimport fg from 'fast-glob';\nimport { Endpoint } from './Endpoint';\n\nimport type { Service } from '@geekmidas/services';\nimport type { HttpMethod } from '../types';\n\n// Re-export utility functions\n\n/**\n * Recursively finds the project root directory by looking for lock files.\n * Traverses up the directory tree until it finds a package manager lock file.\n *\n * @param cwd - The current working directory to start searching from\n * @returns Promise resolving to the absolute path of the project root\n *\n * @example\n * ```typescript\n * const projectRoot = await getProjectRoot(process.cwd());\n * console.log(`Project root: ${projectRoot}`);\n * // Output: Project root: /Users/user/my-project\n * ```\n */\nexport async function getProjectRoot(cwd: string): Promise<string> {\n if (cwd === '/') {\n return cwd;\n }\n\n const stream = fg.stream(\n ['yarn.lock', 'pnpm-lock.yaml', 'package-lock.json', 'deno.lock'],\n { dot: true, cwd },\n );\n\n let isRoot = false;\n\n for await (const _ of stream) {\n isRoot = true;\n break;\n }\n\n if (isRoot) {\n return cwd;\n }\n\n return getProjectRoot(path.resolve(cwd, '..'));\n}\n\n/**\n * Discovers and imports all Endpoint instances from the specified route patterns.\n * Uses fast-glob to find files matching the patterns and extracts exported Endpoints.\n *\n * @template TServices - Array of service types used by the endpoints\n * @param routes - Array of glob patterns to match route files (e.g., ['src/routes/*.ts'])\n * @param cwd - The current working directory to resolve paths from\n * @returns Promise resolving to an array of Endpoint instances found in the matched files\n *\n * @example\n * ```typescript\n * // Find all endpoints in the routes directory\n * const endpoints = await getEndpointsFromRoutes(\n * ['src/routes/**\\/*.ts'],\n * process.cwd()\n * );\n *\n * // Register endpoints with your server\n * for (const endpoint of endpoints) {\n * server.register(endpoint);\n * }\n * ```\n *\n * @remarks\n * - Only exports that are valid Endpoint instances are included\n * - Files are imported dynamically, so they must be valid ES modules\n * - The function filters out non-Endpoint exports automatically\n */\nexport async function getEndpointsFromRoutes<TServices extends Service[]>(\n routes: string[],\n cwd: string,\n): Promise<Endpoint<string, HttpMethod, any, any, TServices>[]> {\n const stream = fg.stream(routes, { cwd });\n\n const endpoints: Endpoint<string, HttpMethod, any, any, TServices>[] = [];\n\n for await (const f of stream) {\n // Resolve the absolute path for the route file\n const routePath = path.resolve(cwd, f.toString());\n // Dynamically import the route module\n const route = await import(routePath);\n\n // Filter exported values to find only Endpoint instances\n const handlers = Object.values(route).filter((value) => {\n return Endpoint.isEndpoint(value);\n }) as unknown as Endpoint<string, HttpMethod, any, any, TServices>[];\n\n endpoints.push(...handlers);\n }\n\n return endpoints;\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;AAuBA,eAAsB,eAAeA,KAA8B;AACjE,KAAI,QAAQ,IACV,QAAO;CAGT,MAAM,SAAS,GAAG,OAChB;EAAC;EAAa;EAAkB;EAAqB;CAAY,GACjE;EAAE,KAAK;EAAM;CAAK,EACnB;CAED,IAAI,SAAS;AAEb,YAAW,MAAM,KAAK,QAAQ;AAC5B,WAAS;AACT;CACD;AAED,KAAI,OACF,QAAO;AAGT,QAAO,eAAe,KAAK,QAAQ,KAAK,KAAK,CAAC;AAC/C;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AA8BD,eAAsB,uBACpBC,QACAD,KAC8D;CAC9D,MAAM,SAAS,GAAG,OAAO,QAAQ,EAAE,IAAK,EAAC;CAEzC,MAAME,YAAiE,CAAE;AAEzE,YAAW,MAAM,KAAK,QAAQ;EAE5B,MAAM,YAAY,KAAK,QAAQ,KAAK,EAAE,UAAU,CAAC;EAEjD,MAAM,QAAQ,MAAM,OAAO;EAG3B,MAAM,WAAW,OAAO,OAAO,MAAM,CAAC,OAAO,CAAC,UAAU;AACtD,UAAO,SAAS,WAAW,MAAM;EAClC,EAAC;AAEF,YAAU,KAAK,GAAG,SAAS;CAC5B;AAED,QAAO;AACR"}
@@ -0,0 +1,10 @@
1
+ import { FunctionBuilder } from "./FunctionBuilder-CjVEFTYC.cjs";
2
+ import * as _geekmidas_audit8 from "@geekmidas/audit";
3
+ import * as _geekmidas_logger7 from "@geekmidas/logger";
4
+ import * as _geekmidas_schema6 from "@geekmidas/schema";
5
+
6
+ //#region src/functions/index.d.ts
7
+ declare const f: FunctionBuilder<_geekmidas_schema6.ComposableStandardSchema, undefined, [], _geekmidas_logger7.Logger, undefined, string, undefined, string, undefined, string, _geekmidas_audit8.AuditableAction<string, unknown>>;
8
+ //#endregion
9
+ export { f };
10
+ //# sourceMappingURL=index-Doa8YPmH.d.cts.map
@@ -0,0 +1,10 @@
1
+ import { FunctionBuilder } from "./FunctionBuilder-D1ofSeMd.mjs";
2
+ import * as _geekmidas_audit8 from "@geekmidas/audit";
3
+ import * as _geekmidas_logger7 from "@geekmidas/logger";
4
+ import * as _geekmidas_schema6 from "@geekmidas/schema";
5
+
6
+ //#region src/functions/index.d.ts
7
+ declare const f: FunctionBuilder<_geekmidas_schema6.ComposableStandardSchema, undefined, [], _geekmidas_logger7.Logger, undefined, string, undefined, string, undefined, string, _geekmidas_audit8.AuditableAction<string, unknown>>;
8
+ //#endregion
9
+ export { f };
10
+ //# sourceMappingURL=index-TxufD5Xp.d.mts.map
@@ -28,4 +28,4 @@ function parseHonoQuery(c) {
28
28
 
29
29
  //#endregion
30
30
  export { parseHonoQuery };
31
- //# sourceMappingURL=parseHonoQuery-CwFKw2ua.mjs.map
31
+ //# sourceMappingURL=parseHonoQuery-BlwMModJ.mjs.map
@@ -1 +1 @@
1
- {"version":3,"file":"parseHonoQuery-CwFKw2ua.mjs","names":["c: Context","result: Record<string, any>"],"sources":["../src/endpoints/parseHonoQuery.ts"],"sourcesContent":["import type { Context } from 'hono';\n\n/**\n * Parse Hono query parameters to handle arrays and nested objects\n * Hono provides c.req.queries() for arrays, but we need to handle dot notation for objects\n */\nexport function parseHonoQuery(c: Context): Record<string, any> {\n const allParams = c.req.query();\n const result: Record<string, any> = {};\n\n // First, handle all query parameters\n for (const [key, value] of Object.entries(allParams)) {\n if (key.includes('.')) {\n // Handle dot notation for objects\n const parts = key.split('.');\n let current = result;\n\n // Navigate/create the nested structure\n for (let i = 0; i < parts.length - 1; i++) {\n const part = parts[i];\n if (\n !current[part] ||\n typeof current[part] !== 'object' ||\n Array.isArray(current[part])\n ) {\n current[part] = {};\n }\n current = current[part];\n }\n\n // Set the final value, checking for arrays in nested keys\n const lastPart = parts[parts.length - 1];\n const multipleValues = c.req.queries(key);\n if (multipleValues && multipleValues.length > 1) {\n current[lastPart] = multipleValues;\n } else {\n current[lastPart] = value;\n }\n } else {\n // For regular keys, check if there are multiple values\n const multipleValues = c.req.queries(key);\n if (multipleValues && multipleValues.length > 1) {\n result[key] = multipleValues;\n } else {\n result[key] = value;\n }\n }\n }\n\n return result;\n}\n"],"mappings":";;;;;AAMA,SAAgB,eAAeA,GAAiC;CAC9D,MAAM,YAAY,EAAE,IAAI,OAAO;CAC/B,MAAMC,SAA8B,CAAE;AAGtC,MAAK,MAAM,CAAC,KAAK,MAAM,IAAI,OAAO,QAAQ,UAAU,CAClD,KAAI,IAAI,SAAS,IAAI,EAAE;EAErB,MAAM,QAAQ,IAAI,MAAM,IAAI;EAC5B,IAAI,UAAU;AAGd,OAAK,IAAI,IAAI,GAAG,IAAI,MAAM,SAAS,GAAG,KAAK;GACzC,MAAM,OAAO,MAAM;AACnB,QACG,QAAQ,gBACF,QAAQ,UAAU,YACzB,MAAM,QAAQ,QAAQ,MAAM,CAE5B,SAAQ,QAAQ,CAAE;AAEpB,aAAU,QAAQ;EACnB;EAGD,MAAM,WAAW,MAAM,MAAM,SAAS;EACtC,MAAM,iBAAiB,EAAE,IAAI,QAAQ,IAAI;AACzC,MAAI,kBAAkB,eAAe,SAAS,EAC5C,SAAQ,YAAY;MAEpB,SAAQ,YAAY;CAEvB,OAAM;EAEL,MAAM,iBAAiB,EAAE,IAAI,QAAQ,IAAI;AACzC,MAAI,kBAAkB,eAAe,SAAS,EAC5C,QAAO,OAAO;MAEd,QAAO,OAAO;CAEjB;AAGH,QAAO;AACR"}
1
+ {"version":3,"file":"parseHonoQuery-BlwMModJ.mjs","names":["c: Context","result: Record<string, any>"],"sources":["../src/endpoints/parseHonoQuery.ts"],"sourcesContent":["import type { Context } from 'hono';\n\n/**\n * Parse Hono query parameters to handle arrays and nested objects\n * Hono provides c.req.queries() for arrays, but we need to handle dot notation for objects\n */\nexport function parseHonoQuery(c: Context): Record<string, any> {\n const allParams = c.req.query();\n const result: Record<string, any> = {};\n\n // First, handle all query parameters\n for (const [key, value] of Object.entries(allParams)) {\n if (key.includes('.')) {\n // Handle dot notation for objects\n const parts = key.split('.');\n let current = result;\n\n // Navigate/create the nested structure\n for (let i = 0; i < parts.length - 1; i++) {\n const part = parts[i];\n if (\n !current[part] ||\n typeof current[part] !== 'object' ||\n Array.isArray(current[part])\n ) {\n current[part] = {};\n }\n current = current[part];\n }\n\n // Set the final value, checking for arrays in nested keys\n const lastPart = parts[parts.length - 1];\n const multipleValues = c.req.queries(key);\n if (multipleValues && multipleValues.length > 1) {\n current[lastPart] = multipleValues;\n } else {\n current[lastPart] = value;\n }\n } else {\n // For regular keys, check if there are multiple values\n const multipleValues = c.req.queries(key);\n if (multipleValues && multipleValues.length > 1) {\n result[key] = multipleValues;\n } else {\n result[key] = value;\n }\n }\n }\n\n return result;\n}\n"],"mappings":";;;;;AAMA,SAAgB,eAAeA,GAAiC;CAC9D,MAAM,YAAY,EAAE,IAAI,OAAO;CAC/B,MAAMC,SAA8B,CAAE;AAGtC,MAAK,MAAM,CAAC,KAAK,MAAM,IAAI,OAAO,QAAQ,UAAU,CAClD,KAAI,IAAI,SAAS,IAAI,EAAE;EAErB,MAAM,QAAQ,IAAI,MAAM,IAAI;EAC5B,IAAI,UAAU;AAGd,OAAK,IAAI,IAAI,GAAG,IAAI,MAAM,SAAS,GAAG,KAAK;GACzC,MAAM,OAAO,MAAM;AACnB,QACG,QAAQ,gBACF,QAAQ,UAAU,YACzB,MAAM,QAAQ,QAAQ,MAAM,CAE5B,SAAQ,QAAQ,CAAE;AAEpB,aAAU,QAAQ;EACnB;EAGD,MAAM,WAAW,MAAM,MAAM,SAAS;EACtC,MAAM,iBAAiB,EAAE,IAAI,QAAQ,IAAI;AACzC,MAAI,kBAAkB,eAAe,SAAS,EAC5C,SAAQ,YAAY;MAEpB,SAAQ,YAAY;CAEvB,OAAM;EAEL,MAAM,iBAAiB,EAAE,IAAI,QAAQ,IAAI;AACzC,MAAI,kBAAkB,eAAe,SAAS,EAC5C,QAAO,OAAO;MAEd,QAAO,OAAO;CAEjB;AAGH,QAAO;AACR"}
@@ -34,4 +34,4 @@ Object.defineProperty(exports, 'parseHonoQuery', {
34
34
  return parseHonoQuery;
35
35
  }
36
36
  });
37
- //# sourceMappingURL=parseHonoQuery-CT8Cvin-.cjs.map
37
+ //# sourceMappingURL=parseHonoQuery-D-fMmSbA.cjs.map
@@ -1 +1 @@
1
- {"version":3,"file":"parseHonoQuery-CT8Cvin-.cjs","names":["c: Context","result: Record<string, any>"],"sources":["../src/endpoints/parseHonoQuery.ts"],"sourcesContent":["import type { Context } from 'hono';\n\n/**\n * Parse Hono query parameters to handle arrays and nested objects\n * Hono provides c.req.queries() for arrays, but we need to handle dot notation for objects\n */\nexport function parseHonoQuery(c: Context): Record<string, any> {\n const allParams = c.req.query();\n const result: Record<string, any> = {};\n\n // First, handle all query parameters\n for (const [key, value] of Object.entries(allParams)) {\n if (key.includes('.')) {\n // Handle dot notation for objects\n const parts = key.split('.');\n let current = result;\n\n // Navigate/create the nested structure\n for (let i = 0; i < parts.length - 1; i++) {\n const part = parts[i];\n if (\n !current[part] ||\n typeof current[part] !== 'object' ||\n Array.isArray(current[part])\n ) {\n current[part] = {};\n }\n current = current[part];\n }\n\n // Set the final value, checking for arrays in nested keys\n const lastPart = parts[parts.length - 1];\n const multipleValues = c.req.queries(key);\n if (multipleValues && multipleValues.length > 1) {\n current[lastPart] = multipleValues;\n } else {\n current[lastPart] = value;\n }\n } else {\n // For regular keys, check if there are multiple values\n const multipleValues = c.req.queries(key);\n if (multipleValues && multipleValues.length > 1) {\n result[key] = multipleValues;\n } else {\n result[key] = value;\n }\n }\n }\n\n return result;\n}\n"],"mappings":";;;;;;AAMA,SAAgB,eAAeA,GAAiC;CAC9D,MAAM,YAAY,EAAE,IAAI,OAAO;CAC/B,MAAMC,SAA8B,CAAE;AAGtC,MAAK,MAAM,CAAC,KAAK,MAAM,IAAI,OAAO,QAAQ,UAAU,CAClD,KAAI,IAAI,SAAS,IAAI,EAAE;EAErB,MAAM,QAAQ,IAAI,MAAM,IAAI;EAC5B,IAAI,UAAU;AAGd,OAAK,IAAI,IAAI,GAAG,IAAI,MAAM,SAAS,GAAG,KAAK;GACzC,MAAM,OAAO,MAAM;AACnB,QACG,QAAQ,gBACF,QAAQ,UAAU,YACzB,MAAM,QAAQ,QAAQ,MAAM,CAE5B,SAAQ,QAAQ,CAAE;AAEpB,aAAU,QAAQ;EACnB;EAGD,MAAM,WAAW,MAAM,MAAM,SAAS;EACtC,MAAM,iBAAiB,EAAE,IAAI,QAAQ,IAAI;AACzC,MAAI,kBAAkB,eAAe,SAAS,EAC5C,SAAQ,YAAY;MAEpB,SAAQ,YAAY;CAEvB,OAAM;EAEL,MAAM,iBAAiB,EAAE,IAAI,QAAQ,IAAI;AACzC,MAAI,kBAAkB,eAAe,SAAS,EAC5C,QAAO,OAAO;MAEd,QAAO,OAAO;CAEjB;AAGH,QAAO;AACR"}
1
+ {"version":3,"file":"parseHonoQuery-D-fMmSbA.cjs","names":["c: Context","result: Record<string, any>"],"sources":["../src/endpoints/parseHonoQuery.ts"],"sourcesContent":["import type { Context } from 'hono';\n\n/**\n * Parse Hono query parameters to handle arrays and nested objects\n * Hono provides c.req.queries() for arrays, but we need to handle dot notation for objects\n */\nexport function parseHonoQuery(c: Context): Record<string, any> {\n const allParams = c.req.query();\n const result: Record<string, any> = {};\n\n // First, handle all query parameters\n for (const [key, value] of Object.entries(allParams)) {\n if (key.includes('.')) {\n // Handle dot notation for objects\n const parts = key.split('.');\n let current = result;\n\n // Navigate/create the nested structure\n for (let i = 0; i < parts.length - 1; i++) {\n const part = parts[i];\n if (\n !current[part] ||\n typeof current[part] !== 'object' ||\n Array.isArray(current[part])\n ) {\n current[part] = {};\n }\n current = current[part];\n }\n\n // Set the final value, checking for arrays in nested keys\n const lastPart = parts[parts.length - 1];\n const multipleValues = c.req.queries(key);\n if (multipleValues && multipleValues.length > 1) {\n current[lastPart] = multipleValues;\n } else {\n current[lastPart] = value;\n }\n } else {\n // For regular keys, check if there are multiple values\n const multipleValues = c.req.queries(key);\n if (multipleValues && multipleValues.length > 1) {\n result[key] = multipleValues;\n } else {\n result[key] = value;\n }\n }\n }\n\n return result;\n}\n"],"mappings":";;;;;;AAMA,SAAgB,eAAeA,GAAiC;CAC9D,MAAM,YAAY,EAAE,IAAI,OAAO;CAC/B,MAAMC,SAA8B,CAAE;AAGtC,MAAK,MAAM,CAAC,KAAK,MAAM,IAAI,OAAO,QAAQ,UAAU,CAClD,KAAI,IAAI,SAAS,IAAI,EAAE;EAErB,MAAM,QAAQ,IAAI,MAAM,IAAI;EAC5B,IAAI,UAAU;AAGd,OAAK,IAAI,IAAI,GAAG,IAAI,MAAM,SAAS,GAAG,KAAK;GACzC,MAAM,OAAO,MAAM;AACnB,QACG,QAAQ,gBACF,QAAQ,UAAU,YACzB,MAAM,QAAQ,QAAQ,MAAM,CAE5B,SAAQ,QAAQ,CAAE;AAEpB,aAAU,QAAQ;EACnB;EAGD,MAAM,WAAW,MAAM,MAAM,SAAS;EACtC,MAAM,iBAAiB,EAAE,IAAI,QAAQ,IAAI;AACzC,MAAI,kBAAkB,eAAe,SAAS,EAC5C,SAAQ,YAAY;MAEpB,SAAQ,YAAY;CAEvB,OAAM;EAEL,MAAM,iBAAiB,EAAE,IAAI,QAAQ,IAAI;AACzC,MAAI,kBAAkB,eAAe,SAAS,EAC5C,QAAO,OAAO;MAEd,QAAO,OAAO;CAEjB;AAGH,QAAO;AACR"}
@@ -35,4 +35,4 @@ Object.defineProperty(exports, 'parseQueryParams', {
35
35
  return parseQueryParams;
36
36
  }
37
37
  });
38
- //# sourceMappingURL=parseQueryParams-CwvXXwkW.cjs.map
38
+ //# sourceMappingURL=parseQueryParams-CbY1zcCU.cjs.map
@@ -1 +1 @@
1
- {"version":3,"file":"parseQueryParams-CwvXXwkW.cjs","names":["queryParams: Record<string, string | string[] | undefined> | null","result: Record<string, any>"],"sources":["../src/endpoints/parseQueryParams.ts"],"sourcesContent":["/**\n * Parse query parameters from a flat object into a nested structure\n * Handles arrays (multiple values with same key) and objects (dot notation)\n *\n * @example\n * parseQueryParams({ 'tags': ['a', 'b'], 'filter.name': 'john' })\n * // Returns: { tags: ['a', 'b'], filter: { name: 'john' } }\n */\nexport function parseQueryParams(\n queryParams: Record<string, string | string[] | undefined> | null,\n): Record<string, any> {\n if (!queryParams) {\n return {};\n }\n\n const result: Record<string, any> = {};\n\n for (const [key, value] of Object.entries(queryParams)) {\n if (value === undefined) {\n continue;\n }\n\n // Check if the key contains dot notation\n if (key.includes('.')) {\n const parts = key.split('.');\n let current = result;\n\n // Navigate/create the nested structure\n for (let i = 0; i < parts.length - 1; i++) {\n const part = parts[i];\n if (\n !current[part] ||\n typeof current[part] !== 'object' ||\n Array.isArray(current[part])\n ) {\n current[part] = {};\n }\n current = current[part];\n }\n\n // Set the final value\n const lastPart = parts[parts.length - 1];\n current[lastPart] = value;\n } else {\n // Simple key, just assign the value\n result[key] = value;\n }\n }\n\n return result;\n}\n"],"mappings":";;;;;;;;;;AAQA,SAAgB,iBACdA,aACqB;AACrB,MAAK,YACH,QAAO,CAAE;CAGX,MAAMC,SAA8B,CAAE;AAEtC,MAAK,MAAM,CAAC,KAAK,MAAM,IAAI,OAAO,QAAQ,YAAY,EAAE;AACtD,MAAI,iBACF;AAIF,MAAI,IAAI,SAAS,IAAI,EAAE;GACrB,MAAM,QAAQ,IAAI,MAAM,IAAI;GAC5B,IAAI,UAAU;AAGd,QAAK,IAAI,IAAI,GAAG,IAAI,MAAM,SAAS,GAAG,KAAK;IACzC,MAAM,OAAO,MAAM;AACnB,SACG,QAAQ,gBACF,QAAQ,UAAU,YACzB,MAAM,QAAQ,QAAQ,MAAM,CAE5B,SAAQ,QAAQ,CAAE;AAEpB,cAAU,QAAQ;GACnB;GAGD,MAAM,WAAW,MAAM,MAAM,SAAS;AACtC,WAAQ,YAAY;EACrB,MAEC,QAAO,OAAO;CAEjB;AAED,QAAO;AACR"}
1
+ {"version":3,"file":"parseQueryParams-CbY1zcCU.cjs","names":["queryParams: Record<string, string | string[] | undefined> | null","result: Record<string, any>"],"sources":["../src/endpoints/parseQueryParams.ts"],"sourcesContent":["/**\n * Parse query parameters from a flat object into a nested structure\n * Handles arrays (multiple values with same key) and objects (dot notation)\n *\n * @example\n * parseQueryParams({ 'tags': ['a', 'b'], 'filter.name': 'john' })\n * // Returns: { tags: ['a', 'b'], filter: { name: 'john' } }\n */\nexport function parseQueryParams(\n queryParams: Record<string, string | string[] | undefined> | null,\n): Record<string, any> {\n if (!queryParams) {\n return {};\n }\n\n const result: Record<string, any> = {};\n\n for (const [key, value] of Object.entries(queryParams)) {\n if (value === undefined) {\n continue;\n }\n\n // Check if the key contains dot notation\n if (key.includes('.')) {\n const parts = key.split('.');\n let current = result;\n\n // Navigate/create the nested structure\n for (let i = 0; i < parts.length - 1; i++) {\n const part = parts[i];\n if (\n !current[part] ||\n typeof current[part] !== 'object' ||\n Array.isArray(current[part])\n ) {\n current[part] = {};\n }\n current = current[part];\n }\n\n // Set the final value\n const lastPart = parts[parts.length - 1];\n current[lastPart] = value;\n } else {\n // Simple key, just assign the value\n result[key] = value;\n }\n }\n\n return result;\n}\n"],"mappings":";;;;;;;;;;AAQA,SAAgB,iBACdA,aACqB;AACrB,MAAK,YACH,QAAO,CAAE;CAGX,MAAMC,SAA8B,CAAE;AAEtC,MAAK,MAAM,CAAC,KAAK,MAAM,IAAI,OAAO,QAAQ,YAAY,EAAE;AACtD,MAAI,iBACF;AAIF,MAAI,IAAI,SAAS,IAAI,EAAE;GACrB,MAAM,QAAQ,IAAI,MAAM,IAAI;GAC5B,IAAI,UAAU;AAGd,QAAK,IAAI,IAAI,GAAG,IAAI,MAAM,SAAS,GAAG,KAAK;IACzC,MAAM,OAAO,MAAM;AACnB,SACG,QAAQ,gBACF,QAAQ,UAAU,YACzB,MAAM,QAAQ,QAAQ,MAAM,CAE5B,SAAQ,QAAQ,CAAE;AAEpB,cAAU,QAAQ;GACnB;GAGD,MAAM,WAAW,MAAM,MAAM,SAAS;AACtC,WAAQ,YAAY;EACrB,MAEC,QAAO,OAAO;CAEjB;AAED,QAAO;AACR"}
@@ -29,4 +29,4 @@ function parseQueryParams(queryParams) {
29
29
 
30
30
  //#endregion
31
31
  export { parseQueryParams };
32
- //# sourceMappingURL=parseQueryParams-CHINupbZ.mjs.map
32
+ //# sourceMappingURL=parseQueryParams-DlbV3_SB.mjs.map
@@ -1 +1 @@
1
- {"version":3,"file":"parseQueryParams-CHINupbZ.mjs","names":["queryParams: Record<string, string | string[] | undefined> | null","result: Record<string, any>"],"sources":["../src/endpoints/parseQueryParams.ts"],"sourcesContent":["/**\n * Parse query parameters from a flat object into a nested structure\n * Handles arrays (multiple values with same key) and objects (dot notation)\n *\n * @example\n * parseQueryParams({ 'tags': ['a', 'b'], 'filter.name': 'john' })\n * // Returns: { tags: ['a', 'b'], filter: { name: 'john' } }\n */\nexport function parseQueryParams(\n queryParams: Record<string, string | string[] | undefined> | null,\n): Record<string, any> {\n if (!queryParams) {\n return {};\n }\n\n const result: Record<string, any> = {};\n\n for (const [key, value] of Object.entries(queryParams)) {\n if (value === undefined) {\n continue;\n }\n\n // Check if the key contains dot notation\n if (key.includes('.')) {\n const parts = key.split('.');\n let current = result;\n\n // Navigate/create the nested structure\n for (let i = 0; i < parts.length - 1; i++) {\n const part = parts[i];\n if (\n !current[part] ||\n typeof current[part] !== 'object' ||\n Array.isArray(current[part])\n ) {\n current[part] = {};\n }\n current = current[part];\n }\n\n // Set the final value\n const lastPart = parts[parts.length - 1];\n current[lastPart] = value;\n } else {\n // Simple key, just assign the value\n result[key] = value;\n }\n }\n\n return result;\n}\n"],"mappings":";;;;;;;;;AAQA,SAAgB,iBACdA,aACqB;AACrB,MAAK,YACH,QAAO,CAAE;CAGX,MAAMC,SAA8B,CAAE;AAEtC,MAAK,MAAM,CAAC,KAAK,MAAM,IAAI,OAAO,QAAQ,YAAY,EAAE;AACtD,MAAI,iBACF;AAIF,MAAI,IAAI,SAAS,IAAI,EAAE;GACrB,MAAM,QAAQ,IAAI,MAAM,IAAI;GAC5B,IAAI,UAAU;AAGd,QAAK,IAAI,IAAI,GAAG,IAAI,MAAM,SAAS,GAAG,KAAK;IACzC,MAAM,OAAO,MAAM;AACnB,SACG,QAAQ,gBACF,QAAQ,UAAU,YACzB,MAAM,QAAQ,QAAQ,MAAM,CAE5B,SAAQ,QAAQ,CAAE;AAEpB,cAAU,QAAQ;GACnB;GAGD,MAAM,WAAW,MAAM,MAAM,SAAS;AACtC,WAAQ,YAAY;EACrB,MAEC,QAAO,OAAO;CAEjB;AAED,QAAO;AACR"}
1
+ {"version":3,"file":"parseQueryParams-DlbV3_SB.mjs","names":["queryParams: Record<string, string | string[] | undefined> | null","result: Record<string, any>"],"sources":["../src/endpoints/parseQueryParams.ts"],"sourcesContent":["/**\n * Parse query parameters from a flat object into a nested structure\n * Handles arrays (multiple values with same key) and objects (dot notation)\n *\n * @example\n * parseQueryParams({ 'tags': ['a', 'b'], 'filter.name': 'john' })\n * // Returns: { tags: ['a', 'b'], filter: { name: 'john' } }\n */\nexport function parseQueryParams(\n queryParams: Record<string, string | string[] | undefined> | null,\n): Record<string, any> {\n if (!queryParams) {\n return {};\n }\n\n const result: Record<string, any> = {};\n\n for (const [key, value] of Object.entries(queryParams)) {\n if (value === undefined) {\n continue;\n }\n\n // Check if the key contains dot notation\n if (key.includes('.')) {\n const parts = key.split('.');\n let current = result;\n\n // Navigate/create the nested structure\n for (let i = 0; i < parts.length - 1; i++) {\n const part = parts[i];\n if (\n !current[part] ||\n typeof current[part] !== 'object' ||\n Array.isArray(current[part])\n ) {\n current[part] = {};\n }\n current = current[part];\n }\n\n // Set the final value\n const lastPart = parts[parts.length - 1];\n current[lastPart] = value;\n } else {\n // Simple key, just assign the value\n result[key] = value;\n }\n }\n\n return result;\n}\n"],"mappings":";;;;;;;;;AAQA,SAAgB,iBACdA,aACqB;AACrB,MAAK,YACH,QAAO,CAAE;CAGX,MAAMC,SAA8B,CAAE;AAEtC,MAAK,MAAM,CAAC,KAAK,MAAM,IAAI,OAAO,QAAQ,YAAY,EAAE;AACtD,MAAI,iBACF;AAIF,MAAI,IAAI,SAAS,IAAI,EAAE;GACrB,MAAM,QAAQ,IAAI,MAAM,IAAI;GAC5B,IAAI,UAAU;AAGd,QAAK,IAAI,IAAI,GAAG,IAAI,MAAM,SAAS,GAAG,KAAK;IACzC,MAAM,OAAO,MAAM;AACnB,SACG,QAAQ,gBACF,QAAQ,UAAU,YACzB,MAAM,QAAQ,QAAQ,MAAM,CAE5B,SAAQ,QAAQ,CAAE;AAEpB,cAAU,QAAQ;GACnB;GAGD,MAAM,WAAW,MAAM,MAAM,SAAS;AACtC,WAAQ,YAAY;EACrB,MAEC,QAAO,OAAO;CAEjB;AAED,QAAO;AACR"}
@@ -153,4 +153,4 @@ async function executeWithAuditTransaction(auditContext, handler, onComplete) {
153
153
 
154
154
  //#endregion
155
155
  export { createAuditContext, executeWithAuditTransaction, processEndpointAudits };
156
- //# sourceMappingURL=processAudits-DfcB-X-4.mjs.map
156
+ //# sourceMappingURL=processAudits-CW7z5Kj9.mjs.map
@@ -1 +1 @@
1
- {"version":3,"file":"processAudits-DfcB-X-4.mjs","names":["endpoint: Endpoint<\n any,\n any,\n any,\n OutSchema,\n TServices,\n TLogger,\n TSession,\n any,\n any,\n TAuditStorage,\n TAuditStorageServiceName,\n TAuditAction\n >","response: InferStandardSchema<OutSchema>","serviceDiscovery: ServiceDiscovery<any, any>","logger: TLogger","ctx: {\n session: TSession;\n header: HeaderFn;\n cookie: CookieFn;\n services: Record<string, unknown>;\n }","existingAuditor?: Auditor<TAuditAction>","auditor: Auditor<TAuditAction>","actor: AuditActor","endpoint: Endpoint<\n any,\n any,\n any,\n any,\n TServices,\n TLogger,\n TSession,\n any,\n any,\n TAuditStorage,\n TAuditStorageServiceName,\n TAuditAction,\n TDatabase,\n TDatabaseServiceName\n >","auditContext: AuditExecutionContext<TAuditAction> | undefined","handler: (auditor?: Auditor<TAuditAction>) => Promise<T>","onComplete?: (response: T, auditor: Auditor<TAuditAction>) => Promise<void>","response"],"sources":["../src/endpoints/processAudits.ts"],"sourcesContent":["import type {\n AuditActor,\n AuditStorage,\n AuditableAction,\n Auditor,\n} from '@geekmidas/audit';\nimport { DefaultAuditor } from '@geekmidas/audit';\nimport { withAuditableTransaction } from '@geekmidas/audit/kysely';\nimport type { Logger } from '@geekmidas/logger';\nimport type { InferStandardSchema } from '@geekmidas/schema';\nimport type { Service, ServiceDiscovery } from '@geekmidas/services';\nimport type { StandardSchemaV1 } from '@standard-schema/spec';\nimport type { CookieFn, Endpoint, HeaderFn } from './Endpoint';\nimport type { ActorExtractor, MappedAudit } from './audit';\n\n/**\n * Process declarative audit definitions after successful endpoint execution.\n * Similar to publishConstructEvents for events.\n *\n * @param endpoint - The endpoint with audit configuration\n * @param response - The handler response to generate audit payloads from\n * @param serviceDiscovery - Service discovery for registering audit storage\n * @param logger - Logger for debug/error messages\n * @param ctx - Request context (session, headers, cookies, services)\n * @param existingAuditor - Optional existing auditor instance (e.g., from handler context).\n * If provided, uses this auditor (with its stored transaction).\n * If not provided, creates a new auditor.\n */\nexport async function processEndpointAudits<\n TServices extends Service[] = [],\n TSession = unknown,\n TLogger extends Logger = Logger,\n OutSchema extends StandardSchemaV1 | undefined = undefined,\n TAuditStorage extends AuditStorage | undefined = undefined,\n TAuditStorageServiceName extends string = string,\n TAuditAction extends AuditableAction<string, unknown> = AuditableAction<\n string,\n unknown\n >,\n>(\n endpoint: Endpoint<\n any,\n any,\n any,\n OutSchema,\n TServices,\n TLogger,\n TSession,\n any,\n any,\n TAuditStorage,\n TAuditStorageServiceName,\n TAuditAction\n >,\n response: InferStandardSchema<OutSchema>,\n serviceDiscovery: ServiceDiscovery<any, any>,\n logger: TLogger,\n ctx: {\n session: TSession;\n header: HeaderFn;\n cookie: CookieFn;\n services: Record<string, unknown>;\n },\n existingAuditor?: Auditor<TAuditAction>,\n): Promise<void> {\n try {\n const audits = endpoint.audits as MappedAudit<TAuditAction, OutSchema>[];\n\n // If we have an existing auditor (from handler context), we need to flush\n // any manual audits it collected, even if there are no declarative audits\n const hasExistingRecords =\n existingAuditor && existingAuditor.getRecords().length > 0;\n\n // Skip if no declarative audits and no existing records to flush\n if (!audits?.length && !hasExistingRecords) {\n logger.debug('No audits to process');\n return;\n }\n\n // If no auditor storage service and we have things to process, warn\n if (!endpoint.auditorStorageService) {\n if (hasExistingRecords || audits?.length) {\n logger.warn('No auditor storage service available');\n }\n return;\n }\n\n // Get or create auditor\n let auditor: Auditor<TAuditAction>;\n\n if (existingAuditor) {\n // Use existing auditor (preserves stored transaction and manual audits)\n auditor = existingAuditor;\n logger.debug('Using existing auditor from handler context');\n } else {\n // Create new auditor (backward compatibility)\n const services = await serviceDiscovery.register([\n endpoint.auditorStorageService,\n ]);\n const storage = services[\n endpoint.auditorStorageService.serviceName\n ] as AuditStorage;\n\n // Extract actor if configured\n let actor: AuditActor = { id: 'system', type: 'system' };\n if (endpoint.actorExtractor) {\n try {\n actor = await (\n endpoint.actorExtractor as ActorExtractor<\n TServices,\n TSession,\n TLogger\n >\n )({\n services: ctx.services as any,\n session: ctx.session,\n header: ctx.header,\n cookie: ctx.cookie,\n logger,\n });\n } catch (error) {\n logger.error(error as Error, 'Failed to extract actor for audits');\n // Continue with system actor\n }\n }\n\n auditor = new DefaultAuditor<TAuditAction>({\n actor,\n storage,\n metadata: {\n endpoint: endpoint.route,\n method: endpoint.method,\n },\n });\n }\n\n // Process each declarative audit\n if (audits?.length) {\n for (const audit of audits) {\n logger.debug({ audit: audit.type }, 'Processing declarative audit');\n\n // Check when condition\n if (audit.when && !audit.when(response as any)) {\n logger.debug(\n { audit: audit.type },\n 'Audit skipped due to when condition',\n );\n continue;\n }\n\n // Extract payload\n const payload = audit.payload(response as any);\n\n // Extract entityId if configured\n const entityId = audit.entityId?.(response as any);\n\n // Record the audit\n auditor.audit(audit.type as any, payload as any, {\n table: audit.table,\n entityId,\n });\n }\n }\n\n // Flush audits to storage\n // Note: If existingAuditor has a stored transaction (via setTransaction),\n // flush() will use it automatically\n const recordCount = auditor.getRecords().length;\n if (recordCount > 0) {\n // Check if auditor has a stored transaction (for logging purposes)\n const trx =\n 'getTransaction' in auditor\n ? (auditor as { getTransaction(): unknown }).getTransaction()\n : undefined;\n logger.debug(\n { auditCount: recordCount, hasTransaction: !!trx },\n 'Flushing audits',\n );\n await auditor.flush();\n }\n } catch (error) {\n logger.error(error as Error, 'Failed to process audits');\n // Don't rethrow - audit failures shouldn't fail the request\n }\n}\n\n/**\n * Context for audit-aware handler execution.\n */\nexport interface AuditExecutionContext<\n TAuditAction extends AuditableAction<string, unknown> = AuditableAction<\n string,\n unknown\n >,\n> {\n /** The auditor instance for recording audits */\n auditor: Auditor<TAuditAction>;\n /** The audit storage instance */\n storage: AuditStorage;\n}\n\n/**\n * Create audit context for handler execution.\n * Returns the auditor and storage for use in the handler.\n *\n * @param endpoint - The endpoint with audit configuration\n * @param serviceDiscovery - Service discovery for getting audit storage\n * @param logger - Logger for debug/error messages\n * @param ctx - Request context for actor extraction\n * @returns Audit context with auditor and storage, or undefined if not configured\n */\nexport async function createAuditContext<\n TServices extends Service[] = [],\n TSession = unknown,\n TLogger extends Logger = Logger,\n TAuditStorage extends AuditStorage | undefined = undefined,\n TAuditStorageServiceName extends string = string,\n TAuditAction extends AuditableAction<string, unknown> = AuditableAction<\n string,\n unknown\n >,\n TDatabase = undefined,\n TDatabaseServiceName extends string = string,\n>(\n endpoint: Endpoint<\n any,\n any,\n any,\n any,\n TServices,\n TLogger,\n TSession,\n any,\n any,\n TAuditStorage,\n TAuditStorageServiceName,\n TAuditAction,\n TDatabase,\n TDatabaseServiceName\n >,\n serviceDiscovery: ServiceDiscovery<any, any>,\n logger: TLogger,\n ctx: {\n session: TSession;\n header: HeaderFn;\n cookie: CookieFn;\n services: Record<string, unknown>;\n },\n): Promise<AuditExecutionContext<TAuditAction> | undefined> {\n if (!endpoint.auditorStorageService) {\n return undefined;\n }\n\n const services = await serviceDiscovery.register([\n endpoint.auditorStorageService,\n ]);\n const storage = services[\n endpoint.auditorStorageService.serviceName\n ] as AuditStorage;\n\n // Extract actor if configured\n let actor: AuditActor = { id: 'system', type: 'system' };\n if (endpoint.actorExtractor) {\n try {\n actor = await (\n endpoint.actorExtractor as ActorExtractor<TServices, TSession, TLogger>\n )({\n services: ctx.services as any,\n session: ctx.session,\n header: ctx.header,\n cookie: ctx.cookie,\n logger,\n });\n } catch (error) {\n logger.error(error as Error, 'Failed to extract actor for audits');\n }\n }\n\n const auditor = new DefaultAuditor<TAuditAction>({\n actor,\n storage,\n metadata: {\n endpoint: endpoint.route,\n method: endpoint.method,\n },\n });\n\n return { auditor, storage };\n}\n\n/**\n * Execute a handler with automatic audit transaction support.\n * If the audit storage has a database (via getDatabase()), wraps execution\n * in a transaction so audits are atomic with handler's database operations.\n *\n * @param auditContext - The audit context from createAuditContext\n * @param handler - The handler function to execute (receives auditor)\n * @param onComplete - Called after handler with response, to process declarative audits\n * @returns The handler result\n */\nexport async function executeWithAuditTransaction<\n T,\n TAuditAction extends AuditableAction<string, unknown> = AuditableAction<\n string,\n unknown\n >,\n>(\n auditContext: AuditExecutionContext<TAuditAction> | undefined,\n handler: (auditor?: Auditor<TAuditAction>) => Promise<T>,\n onComplete?: (response: T, auditor: Auditor<TAuditAction>) => Promise<void>,\n): Promise<T> {\n // No audit context - just run handler\n if (!auditContext) {\n return handler(undefined);\n }\n\n const { auditor, storage } = auditContext;\n\n // Check if storage has a database for transactional execution\n const db = storage.getDatabase?.();\n\n if (db) {\n // Wrap in transaction - audits are atomic with handler operations\n return withAuditableTransaction(db as any, auditor as any, async () => {\n const response = await handler(auditor);\n\n // Process declarative audits within the transaction\n if (onComplete) {\n await onComplete(response, auditor);\n }\n\n // Audits are flushed by withAuditableTransaction before commit\n return response;\n });\n }\n\n // No database - run handler and flush audits after\n const response = await handler(auditor);\n\n if (onComplete) {\n await onComplete(response, auditor);\n }\n\n // Flush audits (no transaction)\n await auditor.flush();\n\n return response;\n}\n"],"mappings":";;;;;;;;;;;;;;;;;AA4BA,eAAsB,sBAYpBA,UAcAC,UACAC,kBACAC,QACAC,KAMAC,iBACe;AACf,KAAI;EACF,MAAM,SAAS,SAAS;EAIxB,MAAM,qBACJ,mBAAmB,gBAAgB,YAAY,CAAC,SAAS;AAG3D,OAAK,QAAQ,WAAW,oBAAoB;AAC1C,UAAO,MAAM,uBAAuB;AACpC;EACD;AAGD,OAAK,SAAS,uBAAuB;AACnC,OAAI,sBAAsB,QAAQ,OAChC,QAAO,KAAK,uCAAuC;AAErD;EACD;EAGD,IAAIC;AAEJ,MAAI,iBAAiB;AAEnB,aAAU;AACV,UAAO,MAAM,8CAA8C;EAC5D,OAAM;GAEL,MAAM,WAAW,MAAM,iBAAiB,SAAS,CAC/C,SAAS,qBACV,EAAC;GACF,MAAM,UAAU,SACd,SAAS,sBAAsB;GAIjC,IAAIC,QAAoB;IAAE,IAAI;IAAU,MAAM;GAAU;AACxD,OAAI,SAAS,eACX,KAAI;AACF,YAAQ,MAAM,AACZ,SAAS,eAKT;KACA,UAAU,IAAI;KACd,SAAS,IAAI;KACb,QAAQ,IAAI;KACZ,QAAQ,IAAI;KACZ;IACD,EAAC;GACH,SAAQ,OAAO;AACd,WAAO,MAAM,OAAgB,qCAAqC;GAEnE;AAGH,aAAU,IAAI,eAA6B;IACzC;IACA;IACA,UAAU;KACR,UAAU,SAAS;KACnB,QAAQ,SAAS;IAClB;GACF;EACF;AAGD,MAAI,QAAQ,OACV,MAAK,MAAM,SAAS,QAAQ;AAC1B,UAAO,MAAM,EAAE,OAAO,MAAM,KAAM,GAAE,+BAA+B;AAGnE,OAAI,MAAM,SAAS,MAAM,KAAK,SAAgB,EAAE;AAC9C,WAAO,MACL,EAAE,OAAO,MAAM,KAAM,GACrB,sCACD;AACD;GACD;GAGD,MAAM,UAAU,MAAM,QAAQ,SAAgB;GAG9C,MAAM,WAAW,MAAM,WAAW,SAAgB;AAGlD,WAAQ,MAAM,MAAM,MAAa,SAAgB;IAC/C,OAAO,MAAM;IACb;GACD,EAAC;EACH;EAMH,MAAM,cAAc,QAAQ,YAAY,CAAC;AACzC,MAAI,cAAc,GAAG;GAEnB,MAAM,MACJ,oBAAoB,UAChB,AAAC,QAA0C,gBAAgB;AAEjE,UAAO,MACL;IAAE,YAAY;IAAa,kBAAkB;GAAK,GAClD,kBACD;AACD,SAAM,QAAQ,OAAO;EACtB;CACF,SAAQ,OAAO;AACd,SAAO,MAAM,OAAgB,2BAA2B;CAEzD;AACF;;;;;;;;;;;AA2BD,eAAsB,mBAapBC,UAgBAN,kBACAC,QACAC,KAM0D;AAC1D,MAAK,SAAS,sBACZ;CAGF,MAAM,WAAW,MAAM,iBAAiB,SAAS,CAC/C,SAAS,qBACV,EAAC;CACF,MAAM,UAAU,SACd,SAAS,sBAAsB;CAIjC,IAAIG,QAAoB;EAAE,IAAI;EAAU,MAAM;CAAU;AACxD,KAAI,SAAS,eACX,KAAI;AACF,UAAQ,MAAM,AACZ,SAAS,eACT;GACA,UAAU,IAAI;GACd,SAAS,IAAI;GACb,QAAQ,IAAI;GACZ,QAAQ,IAAI;GACZ;EACD,EAAC;CACH,SAAQ,OAAO;AACd,SAAO,MAAM,OAAgB,qCAAqC;CACnE;CAGH,MAAM,UAAU,IAAI,eAA6B;EAC/C;EACA;EACA,UAAU;GACR,UAAU,SAAS;GACnB,QAAQ,SAAS;EAClB;CACF;AAED,QAAO;EAAE;EAAS;CAAS;AAC5B;;;;;;;;;;;AAYD,eAAsB,4BAOpBE,cACAC,SACAC,YACY;AAEZ,MAAK,aACH,QAAO,eAAkB;CAG3B,MAAM,EAAE,SAAS,SAAS,GAAG;CAG7B,MAAM,KAAK,QAAQ,eAAe;AAElC,KAAI,GAEF,QAAO,yBAAyB,IAAW,SAAgB,YAAY;EACrE,MAAMC,aAAW,MAAM,QAAQ,QAAQ;AAGvC,MAAI,WACF,OAAM,WAAWA,YAAU,QAAQ;AAIrC,SAAOA;CACR,EAAC;CAIJ,MAAM,WAAW,MAAM,QAAQ,QAAQ;AAEvC,KAAI,WACF,OAAM,WAAW,UAAU,QAAQ;AAIrC,OAAM,QAAQ,OAAO;AAErB,QAAO;AACR"}
1
+ {"version":3,"file":"processAudits-CW7z5Kj9.mjs","names":["endpoint: Endpoint<\n any,\n any,\n any,\n OutSchema,\n TServices,\n TLogger,\n TSession,\n any,\n any,\n TAuditStorage,\n TAuditStorageServiceName,\n TAuditAction\n >","response: InferStandardSchema<OutSchema>","serviceDiscovery: ServiceDiscovery<any, any>","logger: TLogger","ctx: {\n session: TSession;\n header: HeaderFn;\n cookie: CookieFn;\n services: Record<string, unknown>;\n }","existingAuditor?: Auditor<TAuditAction>","auditor: Auditor<TAuditAction>","actor: AuditActor","endpoint: Endpoint<\n any,\n any,\n any,\n any,\n TServices,\n TLogger,\n TSession,\n any,\n any,\n TAuditStorage,\n TAuditStorageServiceName,\n TAuditAction,\n TDatabase,\n TDatabaseServiceName\n >","auditContext: AuditExecutionContext<TAuditAction> | undefined","handler: (auditor?: Auditor<TAuditAction>) => Promise<T>","onComplete?: (response: T, auditor: Auditor<TAuditAction>) => Promise<void>","response"],"sources":["../src/endpoints/processAudits.ts"],"sourcesContent":["import type {\n AuditActor,\n AuditStorage,\n AuditableAction,\n Auditor,\n} from '@geekmidas/audit';\nimport { DefaultAuditor } from '@geekmidas/audit';\nimport { withAuditableTransaction } from '@geekmidas/audit/kysely';\nimport type { Logger } from '@geekmidas/logger';\nimport type { InferStandardSchema } from '@geekmidas/schema';\nimport type { Service, ServiceDiscovery } from '@geekmidas/services';\nimport type { StandardSchemaV1 } from '@standard-schema/spec';\nimport type { CookieFn, Endpoint, HeaderFn } from './Endpoint';\nimport type { ActorExtractor, MappedAudit } from './audit';\n\n/**\n * Process declarative audit definitions after successful endpoint execution.\n * Similar to publishConstructEvents for events.\n *\n * @param endpoint - The endpoint with audit configuration\n * @param response - The handler response to generate audit payloads from\n * @param serviceDiscovery - Service discovery for registering audit storage\n * @param logger - Logger for debug/error messages\n * @param ctx - Request context (session, headers, cookies, services)\n * @param existingAuditor - Optional existing auditor instance (e.g., from handler context).\n * If provided, uses this auditor (with its stored transaction).\n * If not provided, creates a new auditor.\n */\nexport async function processEndpointAudits<\n TServices extends Service[] = [],\n TSession = unknown,\n TLogger extends Logger = Logger,\n OutSchema extends StandardSchemaV1 | undefined = undefined,\n TAuditStorage extends AuditStorage | undefined = undefined,\n TAuditStorageServiceName extends string = string,\n TAuditAction extends AuditableAction<string, unknown> = AuditableAction<\n string,\n unknown\n >,\n>(\n endpoint: Endpoint<\n any,\n any,\n any,\n OutSchema,\n TServices,\n TLogger,\n TSession,\n any,\n any,\n TAuditStorage,\n TAuditStorageServiceName,\n TAuditAction\n >,\n response: InferStandardSchema<OutSchema>,\n serviceDiscovery: ServiceDiscovery<any, any>,\n logger: TLogger,\n ctx: {\n session: TSession;\n header: HeaderFn;\n cookie: CookieFn;\n services: Record<string, unknown>;\n },\n existingAuditor?: Auditor<TAuditAction>,\n): Promise<void> {\n try {\n const audits = endpoint.audits as MappedAudit<TAuditAction, OutSchema>[];\n\n // If we have an existing auditor (from handler context), we need to flush\n // any manual audits it collected, even if there are no declarative audits\n const hasExistingRecords =\n existingAuditor && existingAuditor.getRecords().length > 0;\n\n // Skip if no declarative audits and no existing records to flush\n if (!audits?.length && !hasExistingRecords) {\n logger.debug('No audits to process');\n return;\n }\n\n // If no auditor storage service and we have things to process, warn\n if (!endpoint.auditorStorageService) {\n if (hasExistingRecords || audits?.length) {\n logger.warn('No auditor storage service available');\n }\n return;\n }\n\n // Get or create auditor\n let auditor: Auditor<TAuditAction>;\n\n if (existingAuditor) {\n // Use existing auditor (preserves stored transaction and manual audits)\n auditor = existingAuditor;\n logger.debug('Using existing auditor from handler context');\n } else {\n // Create new auditor (backward compatibility)\n const services = await serviceDiscovery.register([\n endpoint.auditorStorageService,\n ]);\n const storage = services[\n endpoint.auditorStorageService.serviceName\n ] as AuditStorage;\n\n // Extract actor if configured\n let actor: AuditActor = { id: 'system', type: 'system' };\n if (endpoint.actorExtractor) {\n try {\n actor = await (\n endpoint.actorExtractor as ActorExtractor<\n TServices,\n TSession,\n TLogger\n >\n )({\n services: ctx.services as any,\n session: ctx.session,\n header: ctx.header,\n cookie: ctx.cookie,\n logger,\n });\n } catch (error) {\n logger.error(error as Error, 'Failed to extract actor for audits');\n // Continue with system actor\n }\n }\n\n auditor = new DefaultAuditor<TAuditAction>({\n actor,\n storage,\n metadata: {\n endpoint: endpoint.route,\n method: endpoint.method,\n },\n });\n }\n\n // Process each declarative audit\n if (audits?.length) {\n for (const audit of audits) {\n logger.debug({ audit: audit.type }, 'Processing declarative audit');\n\n // Check when condition\n if (audit.when && !audit.when(response as any)) {\n logger.debug(\n { audit: audit.type },\n 'Audit skipped due to when condition',\n );\n continue;\n }\n\n // Extract payload\n const payload = audit.payload(response as any);\n\n // Extract entityId if configured\n const entityId = audit.entityId?.(response as any);\n\n // Record the audit\n auditor.audit(audit.type as any, payload as any, {\n table: audit.table,\n entityId,\n });\n }\n }\n\n // Flush audits to storage\n // Note: If existingAuditor has a stored transaction (via setTransaction),\n // flush() will use it automatically\n const recordCount = auditor.getRecords().length;\n if (recordCount > 0) {\n // Check if auditor has a stored transaction (for logging purposes)\n const trx =\n 'getTransaction' in auditor\n ? (auditor as { getTransaction(): unknown }).getTransaction()\n : undefined;\n logger.debug(\n { auditCount: recordCount, hasTransaction: !!trx },\n 'Flushing audits',\n );\n await auditor.flush();\n }\n } catch (error) {\n logger.error(error as Error, 'Failed to process audits');\n // Don't rethrow - audit failures shouldn't fail the request\n }\n}\n\n/**\n * Context for audit-aware handler execution.\n */\nexport interface AuditExecutionContext<\n TAuditAction extends AuditableAction<string, unknown> = AuditableAction<\n string,\n unknown\n >,\n> {\n /** The auditor instance for recording audits */\n auditor: Auditor<TAuditAction>;\n /** The audit storage instance */\n storage: AuditStorage;\n}\n\n/**\n * Create audit context for handler execution.\n * Returns the auditor and storage for use in the handler.\n *\n * @param endpoint - The endpoint with audit configuration\n * @param serviceDiscovery - Service discovery for getting audit storage\n * @param logger - Logger for debug/error messages\n * @param ctx - Request context for actor extraction\n * @returns Audit context with auditor and storage, or undefined if not configured\n */\nexport async function createAuditContext<\n TServices extends Service[] = [],\n TSession = unknown,\n TLogger extends Logger = Logger,\n TAuditStorage extends AuditStorage | undefined = undefined,\n TAuditStorageServiceName extends string = string,\n TAuditAction extends AuditableAction<string, unknown> = AuditableAction<\n string,\n unknown\n >,\n TDatabase = undefined,\n TDatabaseServiceName extends string = string,\n>(\n endpoint: Endpoint<\n any,\n any,\n any,\n any,\n TServices,\n TLogger,\n TSession,\n any,\n any,\n TAuditStorage,\n TAuditStorageServiceName,\n TAuditAction,\n TDatabase,\n TDatabaseServiceName\n >,\n serviceDiscovery: ServiceDiscovery<any, any>,\n logger: TLogger,\n ctx: {\n session: TSession;\n header: HeaderFn;\n cookie: CookieFn;\n services: Record<string, unknown>;\n },\n): Promise<AuditExecutionContext<TAuditAction> | undefined> {\n if (!endpoint.auditorStorageService) {\n return undefined;\n }\n\n const services = await serviceDiscovery.register([\n endpoint.auditorStorageService,\n ]);\n const storage = services[\n endpoint.auditorStorageService.serviceName\n ] as AuditStorage;\n\n // Extract actor if configured\n let actor: AuditActor = { id: 'system', type: 'system' };\n if (endpoint.actorExtractor) {\n try {\n actor = await (\n endpoint.actorExtractor as ActorExtractor<TServices, TSession, TLogger>\n )({\n services: ctx.services as any,\n session: ctx.session,\n header: ctx.header,\n cookie: ctx.cookie,\n logger,\n });\n } catch (error) {\n logger.error(error as Error, 'Failed to extract actor for audits');\n }\n }\n\n const auditor = new DefaultAuditor<TAuditAction>({\n actor,\n storage,\n metadata: {\n endpoint: endpoint.route,\n method: endpoint.method,\n },\n });\n\n return { auditor, storage };\n}\n\n/**\n * Execute a handler with automatic audit transaction support.\n * If the audit storage has a database (via getDatabase()), wraps execution\n * in a transaction so audits are atomic with handler's database operations.\n *\n * @param auditContext - The audit context from createAuditContext\n * @param handler - The handler function to execute (receives auditor)\n * @param onComplete - Called after handler with response, to process declarative audits\n * @returns The handler result\n */\nexport async function executeWithAuditTransaction<\n T,\n TAuditAction extends AuditableAction<string, unknown> = AuditableAction<\n string,\n unknown\n >,\n>(\n auditContext: AuditExecutionContext<TAuditAction> | undefined,\n handler: (auditor?: Auditor<TAuditAction>) => Promise<T>,\n onComplete?: (response: T, auditor: Auditor<TAuditAction>) => Promise<void>,\n): Promise<T> {\n // No audit context - just run handler\n if (!auditContext) {\n return handler(undefined);\n }\n\n const { auditor, storage } = auditContext;\n\n // Check if storage has a database for transactional execution\n const db = storage.getDatabase?.();\n\n if (db) {\n // Wrap in transaction - audits are atomic with handler operations\n return withAuditableTransaction(db as any, auditor as any, async () => {\n const response = await handler(auditor);\n\n // Process declarative audits within the transaction\n if (onComplete) {\n await onComplete(response, auditor);\n }\n\n // Audits are flushed by withAuditableTransaction before commit\n return response;\n });\n }\n\n // No database - run handler and flush audits after\n const response = await handler(auditor);\n\n if (onComplete) {\n await onComplete(response, auditor);\n }\n\n // Flush audits (no transaction)\n await auditor.flush();\n\n return response;\n}\n"],"mappings":";;;;;;;;;;;;;;;;;AA4BA,eAAsB,sBAYpBA,UAcAC,UACAC,kBACAC,QACAC,KAMAC,iBACe;AACf,KAAI;EACF,MAAM,SAAS,SAAS;EAIxB,MAAM,qBACJ,mBAAmB,gBAAgB,YAAY,CAAC,SAAS;AAG3D,OAAK,QAAQ,WAAW,oBAAoB;AAC1C,UAAO,MAAM,uBAAuB;AACpC;EACD;AAGD,OAAK,SAAS,uBAAuB;AACnC,OAAI,sBAAsB,QAAQ,OAChC,QAAO,KAAK,uCAAuC;AAErD;EACD;EAGD,IAAIC;AAEJ,MAAI,iBAAiB;AAEnB,aAAU;AACV,UAAO,MAAM,8CAA8C;EAC5D,OAAM;GAEL,MAAM,WAAW,MAAM,iBAAiB,SAAS,CAC/C,SAAS,qBACV,EAAC;GACF,MAAM,UAAU,SACd,SAAS,sBAAsB;GAIjC,IAAIC,QAAoB;IAAE,IAAI;IAAU,MAAM;GAAU;AACxD,OAAI,SAAS,eACX,KAAI;AACF,YAAQ,MAAM,AACZ,SAAS,eAKT;KACA,UAAU,IAAI;KACd,SAAS,IAAI;KACb,QAAQ,IAAI;KACZ,QAAQ,IAAI;KACZ;IACD,EAAC;GACH,SAAQ,OAAO;AACd,WAAO,MAAM,OAAgB,qCAAqC;GAEnE;AAGH,aAAU,IAAI,eAA6B;IACzC;IACA;IACA,UAAU;KACR,UAAU,SAAS;KACnB,QAAQ,SAAS;IAClB;GACF;EACF;AAGD,MAAI,QAAQ,OACV,MAAK,MAAM,SAAS,QAAQ;AAC1B,UAAO,MAAM,EAAE,OAAO,MAAM,KAAM,GAAE,+BAA+B;AAGnE,OAAI,MAAM,SAAS,MAAM,KAAK,SAAgB,EAAE;AAC9C,WAAO,MACL,EAAE,OAAO,MAAM,KAAM,GACrB,sCACD;AACD;GACD;GAGD,MAAM,UAAU,MAAM,QAAQ,SAAgB;GAG9C,MAAM,WAAW,MAAM,WAAW,SAAgB;AAGlD,WAAQ,MAAM,MAAM,MAAa,SAAgB;IAC/C,OAAO,MAAM;IACb;GACD,EAAC;EACH;EAMH,MAAM,cAAc,QAAQ,YAAY,CAAC;AACzC,MAAI,cAAc,GAAG;GAEnB,MAAM,MACJ,oBAAoB,UAChB,AAAC,QAA0C,gBAAgB;AAEjE,UAAO,MACL;IAAE,YAAY;IAAa,kBAAkB;GAAK,GAClD,kBACD;AACD,SAAM,QAAQ,OAAO;EACtB;CACF,SAAQ,OAAO;AACd,SAAO,MAAM,OAAgB,2BAA2B;CAEzD;AACF;;;;;;;;;;;AA2BD,eAAsB,mBAapBC,UAgBAN,kBACAC,QACAC,KAM0D;AAC1D,MAAK,SAAS,sBACZ;CAGF,MAAM,WAAW,MAAM,iBAAiB,SAAS,CAC/C,SAAS,qBACV,EAAC;CACF,MAAM,UAAU,SACd,SAAS,sBAAsB;CAIjC,IAAIG,QAAoB;EAAE,IAAI;EAAU,MAAM;CAAU;AACxD,KAAI,SAAS,eACX,KAAI;AACF,UAAQ,MAAM,AACZ,SAAS,eACT;GACA,UAAU,IAAI;GACd,SAAS,IAAI;GACb,QAAQ,IAAI;GACZ,QAAQ,IAAI;GACZ;EACD,EAAC;CACH,SAAQ,OAAO;AACd,SAAO,MAAM,OAAgB,qCAAqC;CACnE;CAGH,MAAM,UAAU,IAAI,eAA6B;EAC/C;EACA;EACA,UAAU;GACR,UAAU,SAAS;GACnB,QAAQ,SAAS;EAClB;CACF;AAED,QAAO;EAAE;EAAS;CAAS;AAC5B;;;;;;;;;;;AAYD,eAAsB,4BAOpBE,cACAC,SACAC,YACY;AAEZ,MAAK,aACH,QAAO,eAAkB;CAG3B,MAAM,EAAE,SAAS,SAAS,GAAG;CAG7B,MAAM,KAAK,QAAQ,eAAe;AAElC,KAAI,GAEF,QAAO,yBAAyB,IAAW,SAAgB,YAAY;EACrE,MAAMC,aAAW,MAAM,QAAQ,QAAQ;AAGvC,MAAI,WACF,OAAM,WAAWA,YAAU,QAAQ;AAIrC,SAAOA;CACR,EAAC;CAIJ,MAAM,WAAW,MAAM,QAAQ,QAAQ;AAEvC,KAAI,WACF,OAAM,WAAW,UAAU,QAAQ;AAIrC,OAAM,QAAQ,OAAO;AAErB,QAAO;AACR"}
@@ -171,4 +171,4 @@ Object.defineProperty(exports, 'processEndpointAudits', {
171
171
  return processEndpointAudits;
172
172
  }
173
173
  });
174
- //# sourceMappingURL=processAudits-BFokHhCO.cjs.map
174
+ //# sourceMappingURL=processAudits-MHp5_fc7.cjs.map
@@ -1 +1 @@
1
- {"version":3,"file":"processAudits-BFokHhCO.cjs","names":["endpoint: Endpoint<\n any,\n any,\n any,\n OutSchema,\n TServices,\n TLogger,\n TSession,\n any,\n any,\n TAuditStorage,\n TAuditStorageServiceName,\n TAuditAction\n >","response: InferStandardSchema<OutSchema>","serviceDiscovery: ServiceDiscovery<any, any>","logger: TLogger","ctx: {\n session: TSession;\n header: HeaderFn;\n cookie: CookieFn;\n services: Record<string, unknown>;\n }","existingAuditor?: Auditor<TAuditAction>","auditor: Auditor<TAuditAction>","actor: AuditActor","DefaultAuditor","endpoint: Endpoint<\n any,\n any,\n any,\n any,\n TServices,\n TLogger,\n TSession,\n any,\n any,\n TAuditStorage,\n TAuditStorageServiceName,\n TAuditAction,\n TDatabase,\n TDatabaseServiceName\n >","auditContext: AuditExecutionContext<TAuditAction> | undefined","handler: (auditor?: Auditor<TAuditAction>) => Promise<T>","onComplete?: (response: T, auditor: Auditor<TAuditAction>) => Promise<void>","response"],"sources":["../src/endpoints/processAudits.ts"],"sourcesContent":["import type {\n AuditActor,\n AuditStorage,\n AuditableAction,\n Auditor,\n} from '@geekmidas/audit';\nimport { DefaultAuditor } from '@geekmidas/audit';\nimport { withAuditableTransaction } from '@geekmidas/audit/kysely';\nimport type { Logger } from '@geekmidas/logger';\nimport type { InferStandardSchema } from '@geekmidas/schema';\nimport type { Service, ServiceDiscovery } from '@geekmidas/services';\nimport type { StandardSchemaV1 } from '@standard-schema/spec';\nimport type { CookieFn, Endpoint, HeaderFn } from './Endpoint';\nimport type { ActorExtractor, MappedAudit } from './audit';\n\n/**\n * Process declarative audit definitions after successful endpoint execution.\n * Similar to publishConstructEvents for events.\n *\n * @param endpoint - The endpoint with audit configuration\n * @param response - The handler response to generate audit payloads from\n * @param serviceDiscovery - Service discovery for registering audit storage\n * @param logger - Logger for debug/error messages\n * @param ctx - Request context (session, headers, cookies, services)\n * @param existingAuditor - Optional existing auditor instance (e.g., from handler context).\n * If provided, uses this auditor (with its stored transaction).\n * If not provided, creates a new auditor.\n */\nexport async function processEndpointAudits<\n TServices extends Service[] = [],\n TSession = unknown,\n TLogger extends Logger = Logger,\n OutSchema extends StandardSchemaV1 | undefined = undefined,\n TAuditStorage extends AuditStorage | undefined = undefined,\n TAuditStorageServiceName extends string = string,\n TAuditAction extends AuditableAction<string, unknown> = AuditableAction<\n string,\n unknown\n >,\n>(\n endpoint: Endpoint<\n any,\n any,\n any,\n OutSchema,\n TServices,\n TLogger,\n TSession,\n any,\n any,\n TAuditStorage,\n TAuditStorageServiceName,\n TAuditAction\n >,\n response: InferStandardSchema<OutSchema>,\n serviceDiscovery: ServiceDiscovery<any, any>,\n logger: TLogger,\n ctx: {\n session: TSession;\n header: HeaderFn;\n cookie: CookieFn;\n services: Record<string, unknown>;\n },\n existingAuditor?: Auditor<TAuditAction>,\n): Promise<void> {\n try {\n const audits = endpoint.audits as MappedAudit<TAuditAction, OutSchema>[];\n\n // If we have an existing auditor (from handler context), we need to flush\n // any manual audits it collected, even if there are no declarative audits\n const hasExistingRecords =\n existingAuditor && existingAuditor.getRecords().length > 0;\n\n // Skip if no declarative audits and no existing records to flush\n if (!audits?.length && !hasExistingRecords) {\n logger.debug('No audits to process');\n return;\n }\n\n // If no auditor storage service and we have things to process, warn\n if (!endpoint.auditorStorageService) {\n if (hasExistingRecords || audits?.length) {\n logger.warn('No auditor storage service available');\n }\n return;\n }\n\n // Get or create auditor\n let auditor: Auditor<TAuditAction>;\n\n if (existingAuditor) {\n // Use existing auditor (preserves stored transaction and manual audits)\n auditor = existingAuditor;\n logger.debug('Using existing auditor from handler context');\n } else {\n // Create new auditor (backward compatibility)\n const services = await serviceDiscovery.register([\n endpoint.auditorStorageService,\n ]);\n const storage = services[\n endpoint.auditorStorageService.serviceName\n ] as AuditStorage;\n\n // Extract actor if configured\n let actor: AuditActor = { id: 'system', type: 'system' };\n if (endpoint.actorExtractor) {\n try {\n actor = await (\n endpoint.actorExtractor as ActorExtractor<\n TServices,\n TSession,\n TLogger\n >\n )({\n services: ctx.services as any,\n session: ctx.session,\n header: ctx.header,\n cookie: ctx.cookie,\n logger,\n });\n } catch (error) {\n logger.error(error as Error, 'Failed to extract actor for audits');\n // Continue with system actor\n }\n }\n\n auditor = new DefaultAuditor<TAuditAction>({\n actor,\n storage,\n metadata: {\n endpoint: endpoint.route,\n method: endpoint.method,\n },\n });\n }\n\n // Process each declarative audit\n if (audits?.length) {\n for (const audit of audits) {\n logger.debug({ audit: audit.type }, 'Processing declarative audit');\n\n // Check when condition\n if (audit.when && !audit.when(response as any)) {\n logger.debug(\n { audit: audit.type },\n 'Audit skipped due to when condition',\n );\n continue;\n }\n\n // Extract payload\n const payload = audit.payload(response as any);\n\n // Extract entityId if configured\n const entityId = audit.entityId?.(response as any);\n\n // Record the audit\n auditor.audit(audit.type as any, payload as any, {\n table: audit.table,\n entityId,\n });\n }\n }\n\n // Flush audits to storage\n // Note: If existingAuditor has a stored transaction (via setTransaction),\n // flush() will use it automatically\n const recordCount = auditor.getRecords().length;\n if (recordCount > 0) {\n // Check if auditor has a stored transaction (for logging purposes)\n const trx =\n 'getTransaction' in auditor\n ? (auditor as { getTransaction(): unknown }).getTransaction()\n : undefined;\n logger.debug(\n { auditCount: recordCount, hasTransaction: !!trx },\n 'Flushing audits',\n );\n await auditor.flush();\n }\n } catch (error) {\n logger.error(error as Error, 'Failed to process audits');\n // Don't rethrow - audit failures shouldn't fail the request\n }\n}\n\n/**\n * Context for audit-aware handler execution.\n */\nexport interface AuditExecutionContext<\n TAuditAction extends AuditableAction<string, unknown> = AuditableAction<\n string,\n unknown\n >,\n> {\n /** The auditor instance for recording audits */\n auditor: Auditor<TAuditAction>;\n /** The audit storage instance */\n storage: AuditStorage;\n}\n\n/**\n * Create audit context for handler execution.\n * Returns the auditor and storage for use in the handler.\n *\n * @param endpoint - The endpoint with audit configuration\n * @param serviceDiscovery - Service discovery for getting audit storage\n * @param logger - Logger for debug/error messages\n * @param ctx - Request context for actor extraction\n * @returns Audit context with auditor and storage, or undefined if not configured\n */\nexport async function createAuditContext<\n TServices extends Service[] = [],\n TSession = unknown,\n TLogger extends Logger = Logger,\n TAuditStorage extends AuditStorage | undefined = undefined,\n TAuditStorageServiceName extends string = string,\n TAuditAction extends AuditableAction<string, unknown> = AuditableAction<\n string,\n unknown\n >,\n TDatabase = undefined,\n TDatabaseServiceName extends string = string,\n>(\n endpoint: Endpoint<\n any,\n any,\n any,\n any,\n TServices,\n TLogger,\n TSession,\n any,\n any,\n TAuditStorage,\n TAuditStorageServiceName,\n TAuditAction,\n TDatabase,\n TDatabaseServiceName\n >,\n serviceDiscovery: ServiceDiscovery<any, any>,\n logger: TLogger,\n ctx: {\n session: TSession;\n header: HeaderFn;\n cookie: CookieFn;\n services: Record<string, unknown>;\n },\n): Promise<AuditExecutionContext<TAuditAction> | undefined> {\n if (!endpoint.auditorStorageService) {\n return undefined;\n }\n\n const services = await serviceDiscovery.register([\n endpoint.auditorStorageService,\n ]);\n const storage = services[\n endpoint.auditorStorageService.serviceName\n ] as AuditStorage;\n\n // Extract actor if configured\n let actor: AuditActor = { id: 'system', type: 'system' };\n if (endpoint.actorExtractor) {\n try {\n actor = await (\n endpoint.actorExtractor as ActorExtractor<TServices, TSession, TLogger>\n )({\n services: ctx.services as any,\n session: ctx.session,\n header: ctx.header,\n cookie: ctx.cookie,\n logger,\n });\n } catch (error) {\n logger.error(error as Error, 'Failed to extract actor for audits');\n }\n }\n\n const auditor = new DefaultAuditor<TAuditAction>({\n actor,\n storage,\n metadata: {\n endpoint: endpoint.route,\n method: endpoint.method,\n },\n });\n\n return { auditor, storage };\n}\n\n/**\n * Execute a handler with automatic audit transaction support.\n * If the audit storage has a database (via getDatabase()), wraps execution\n * in a transaction so audits are atomic with handler's database operations.\n *\n * @param auditContext - The audit context from createAuditContext\n * @param handler - The handler function to execute (receives auditor)\n * @param onComplete - Called after handler with response, to process declarative audits\n * @returns The handler result\n */\nexport async function executeWithAuditTransaction<\n T,\n TAuditAction extends AuditableAction<string, unknown> = AuditableAction<\n string,\n unknown\n >,\n>(\n auditContext: AuditExecutionContext<TAuditAction> | undefined,\n handler: (auditor?: Auditor<TAuditAction>) => Promise<T>,\n onComplete?: (response: T, auditor: Auditor<TAuditAction>) => Promise<void>,\n): Promise<T> {\n // No audit context - just run handler\n if (!auditContext) {\n return handler(undefined);\n }\n\n const { auditor, storage } = auditContext;\n\n // Check if storage has a database for transactional execution\n const db = storage.getDatabase?.();\n\n if (db) {\n // Wrap in transaction - audits are atomic with handler operations\n return withAuditableTransaction(db as any, auditor as any, async () => {\n const response = await handler(auditor);\n\n // Process declarative audits within the transaction\n if (onComplete) {\n await onComplete(response, auditor);\n }\n\n // Audits are flushed by withAuditableTransaction before commit\n return response;\n });\n }\n\n // No database - run handler and flush audits after\n const response = await handler(auditor);\n\n if (onComplete) {\n await onComplete(response, auditor);\n }\n\n // Flush audits (no transaction)\n await auditor.flush();\n\n return response;\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;AA4BA,eAAsB,sBAYpBA,UAcAC,UACAC,kBACAC,QACAC,KAMAC,iBACe;AACf,KAAI;EACF,MAAM,SAAS,SAAS;EAIxB,MAAM,qBACJ,mBAAmB,gBAAgB,YAAY,CAAC,SAAS;AAG3D,OAAK,QAAQ,WAAW,oBAAoB;AAC1C,UAAO,MAAM,uBAAuB;AACpC;EACD;AAGD,OAAK,SAAS,uBAAuB;AACnC,OAAI,sBAAsB,QAAQ,OAChC,QAAO,KAAK,uCAAuC;AAErD;EACD;EAGD,IAAIC;AAEJ,MAAI,iBAAiB;AAEnB,aAAU;AACV,UAAO,MAAM,8CAA8C;EAC5D,OAAM;GAEL,MAAM,WAAW,MAAM,iBAAiB,SAAS,CAC/C,SAAS,qBACV,EAAC;GACF,MAAM,UAAU,SACd,SAAS,sBAAsB;GAIjC,IAAIC,QAAoB;IAAE,IAAI;IAAU,MAAM;GAAU;AACxD,OAAI,SAAS,eACX,KAAI;AACF,YAAQ,MAAM,AACZ,SAAS,eAKT;KACA,UAAU,IAAI;KACd,SAAS,IAAI;KACb,QAAQ,IAAI;KACZ,QAAQ,IAAI;KACZ;IACD,EAAC;GACH,SAAQ,OAAO;AACd,WAAO,MAAM,OAAgB,qCAAqC;GAEnE;AAGH,aAAU,IAAIC,iCAA6B;IACzC;IACA;IACA,UAAU;KACR,UAAU,SAAS;KACnB,QAAQ,SAAS;IAClB;GACF;EACF;AAGD,MAAI,QAAQ,OACV,MAAK,MAAM,SAAS,QAAQ;AAC1B,UAAO,MAAM,EAAE,OAAO,MAAM,KAAM,GAAE,+BAA+B;AAGnE,OAAI,MAAM,SAAS,MAAM,KAAK,SAAgB,EAAE;AAC9C,WAAO,MACL,EAAE,OAAO,MAAM,KAAM,GACrB,sCACD;AACD;GACD;GAGD,MAAM,UAAU,MAAM,QAAQ,SAAgB;GAG9C,MAAM,WAAW,MAAM,WAAW,SAAgB;AAGlD,WAAQ,MAAM,MAAM,MAAa,SAAgB;IAC/C,OAAO,MAAM;IACb;GACD,EAAC;EACH;EAMH,MAAM,cAAc,QAAQ,YAAY,CAAC;AACzC,MAAI,cAAc,GAAG;GAEnB,MAAM,MACJ,oBAAoB,UAChB,AAAC,QAA0C,gBAAgB;AAEjE,UAAO,MACL;IAAE,YAAY;IAAa,kBAAkB;GAAK,GAClD,kBACD;AACD,SAAM,QAAQ,OAAO;EACtB;CACF,SAAQ,OAAO;AACd,SAAO,MAAM,OAAgB,2BAA2B;CAEzD;AACF;;;;;;;;;;;AA2BD,eAAsB,mBAapBC,UAgBAP,kBACAC,QACAC,KAM0D;AAC1D,MAAK,SAAS,sBACZ;CAGF,MAAM,WAAW,MAAM,iBAAiB,SAAS,CAC/C,SAAS,qBACV,EAAC;CACF,MAAM,UAAU,SACd,SAAS,sBAAsB;CAIjC,IAAIG,QAAoB;EAAE,IAAI;EAAU,MAAM;CAAU;AACxD,KAAI,SAAS,eACX,KAAI;AACF,UAAQ,MAAM,AACZ,SAAS,eACT;GACA,UAAU,IAAI;GACd,SAAS,IAAI;GACb,QAAQ,IAAI;GACZ,QAAQ,IAAI;GACZ;EACD,EAAC;CACH,SAAQ,OAAO;AACd,SAAO,MAAM,OAAgB,qCAAqC;CACnE;CAGH,MAAM,UAAU,IAAIC,iCAA6B;EAC/C;EACA;EACA,UAAU;GACR,UAAU,SAAS;GACnB,QAAQ,SAAS;EAClB;CACF;AAED,QAAO;EAAE;EAAS;CAAS;AAC5B;;;;;;;;;;;AAYD,eAAsB,4BAOpBE,cACAC,SACAC,YACY;AAEZ,MAAK,aACH,QAAO,eAAkB;CAG3B,MAAM,EAAE,SAAS,SAAS,GAAG;CAG7B,MAAM,KAAK,QAAQ,eAAe;AAElC,KAAI,GAEF,QAAO,uDAAyB,IAAW,SAAgB,YAAY;EACrE,MAAMC,aAAW,MAAM,QAAQ,QAAQ;AAGvC,MAAI,WACF,OAAM,WAAWA,YAAU,QAAQ;AAIrC,SAAOA;CACR,EAAC;CAIJ,MAAM,WAAW,MAAM,QAAQ,QAAQ;AAEvC,KAAI,WACF,OAAM,WAAW,UAAU,QAAQ;AAIrC,OAAM,QAAQ,OAAO;AAErB,QAAO;AACR"}
1
+ {"version":3,"file":"processAudits-MHp5_fc7.cjs","names":["endpoint: Endpoint<\n any,\n any,\n any,\n OutSchema,\n TServices,\n TLogger,\n TSession,\n any,\n any,\n TAuditStorage,\n TAuditStorageServiceName,\n TAuditAction\n >","response: InferStandardSchema<OutSchema>","serviceDiscovery: ServiceDiscovery<any, any>","logger: TLogger","ctx: {\n session: TSession;\n header: HeaderFn;\n cookie: CookieFn;\n services: Record<string, unknown>;\n }","existingAuditor?: Auditor<TAuditAction>","auditor: Auditor<TAuditAction>","actor: AuditActor","DefaultAuditor","endpoint: Endpoint<\n any,\n any,\n any,\n any,\n TServices,\n TLogger,\n TSession,\n any,\n any,\n TAuditStorage,\n TAuditStorageServiceName,\n TAuditAction,\n TDatabase,\n TDatabaseServiceName\n >","auditContext: AuditExecutionContext<TAuditAction> | undefined","handler: (auditor?: Auditor<TAuditAction>) => Promise<T>","onComplete?: (response: T, auditor: Auditor<TAuditAction>) => Promise<void>","response"],"sources":["../src/endpoints/processAudits.ts"],"sourcesContent":["import type {\n AuditActor,\n AuditStorage,\n AuditableAction,\n Auditor,\n} from '@geekmidas/audit';\nimport { DefaultAuditor } from '@geekmidas/audit';\nimport { withAuditableTransaction } from '@geekmidas/audit/kysely';\nimport type { Logger } from '@geekmidas/logger';\nimport type { InferStandardSchema } from '@geekmidas/schema';\nimport type { Service, ServiceDiscovery } from '@geekmidas/services';\nimport type { StandardSchemaV1 } from '@standard-schema/spec';\nimport type { CookieFn, Endpoint, HeaderFn } from './Endpoint';\nimport type { ActorExtractor, MappedAudit } from './audit';\n\n/**\n * Process declarative audit definitions after successful endpoint execution.\n * Similar to publishConstructEvents for events.\n *\n * @param endpoint - The endpoint with audit configuration\n * @param response - The handler response to generate audit payloads from\n * @param serviceDiscovery - Service discovery for registering audit storage\n * @param logger - Logger for debug/error messages\n * @param ctx - Request context (session, headers, cookies, services)\n * @param existingAuditor - Optional existing auditor instance (e.g., from handler context).\n * If provided, uses this auditor (with its stored transaction).\n * If not provided, creates a new auditor.\n */\nexport async function processEndpointAudits<\n TServices extends Service[] = [],\n TSession = unknown,\n TLogger extends Logger = Logger,\n OutSchema extends StandardSchemaV1 | undefined = undefined,\n TAuditStorage extends AuditStorage | undefined = undefined,\n TAuditStorageServiceName extends string = string,\n TAuditAction extends AuditableAction<string, unknown> = AuditableAction<\n string,\n unknown\n >,\n>(\n endpoint: Endpoint<\n any,\n any,\n any,\n OutSchema,\n TServices,\n TLogger,\n TSession,\n any,\n any,\n TAuditStorage,\n TAuditStorageServiceName,\n TAuditAction\n >,\n response: InferStandardSchema<OutSchema>,\n serviceDiscovery: ServiceDiscovery<any, any>,\n logger: TLogger,\n ctx: {\n session: TSession;\n header: HeaderFn;\n cookie: CookieFn;\n services: Record<string, unknown>;\n },\n existingAuditor?: Auditor<TAuditAction>,\n): Promise<void> {\n try {\n const audits = endpoint.audits as MappedAudit<TAuditAction, OutSchema>[];\n\n // If we have an existing auditor (from handler context), we need to flush\n // any manual audits it collected, even if there are no declarative audits\n const hasExistingRecords =\n existingAuditor && existingAuditor.getRecords().length > 0;\n\n // Skip if no declarative audits and no existing records to flush\n if (!audits?.length && !hasExistingRecords) {\n logger.debug('No audits to process');\n return;\n }\n\n // If no auditor storage service and we have things to process, warn\n if (!endpoint.auditorStorageService) {\n if (hasExistingRecords || audits?.length) {\n logger.warn('No auditor storage service available');\n }\n return;\n }\n\n // Get or create auditor\n let auditor: Auditor<TAuditAction>;\n\n if (existingAuditor) {\n // Use existing auditor (preserves stored transaction and manual audits)\n auditor = existingAuditor;\n logger.debug('Using existing auditor from handler context');\n } else {\n // Create new auditor (backward compatibility)\n const services = await serviceDiscovery.register([\n endpoint.auditorStorageService,\n ]);\n const storage = services[\n endpoint.auditorStorageService.serviceName\n ] as AuditStorage;\n\n // Extract actor if configured\n let actor: AuditActor = { id: 'system', type: 'system' };\n if (endpoint.actorExtractor) {\n try {\n actor = await (\n endpoint.actorExtractor as ActorExtractor<\n TServices,\n TSession,\n TLogger\n >\n )({\n services: ctx.services as any,\n session: ctx.session,\n header: ctx.header,\n cookie: ctx.cookie,\n logger,\n });\n } catch (error) {\n logger.error(error as Error, 'Failed to extract actor for audits');\n // Continue with system actor\n }\n }\n\n auditor = new DefaultAuditor<TAuditAction>({\n actor,\n storage,\n metadata: {\n endpoint: endpoint.route,\n method: endpoint.method,\n },\n });\n }\n\n // Process each declarative audit\n if (audits?.length) {\n for (const audit of audits) {\n logger.debug({ audit: audit.type }, 'Processing declarative audit');\n\n // Check when condition\n if (audit.when && !audit.when(response as any)) {\n logger.debug(\n { audit: audit.type },\n 'Audit skipped due to when condition',\n );\n continue;\n }\n\n // Extract payload\n const payload = audit.payload(response as any);\n\n // Extract entityId if configured\n const entityId = audit.entityId?.(response as any);\n\n // Record the audit\n auditor.audit(audit.type as any, payload as any, {\n table: audit.table,\n entityId,\n });\n }\n }\n\n // Flush audits to storage\n // Note: If existingAuditor has a stored transaction (via setTransaction),\n // flush() will use it automatically\n const recordCount = auditor.getRecords().length;\n if (recordCount > 0) {\n // Check if auditor has a stored transaction (for logging purposes)\n const trx =\n 'getTransaction' in auditor\n ? (auditor as { getTransaction(): unknown }).getTransaction()\n : undefined;\n logger.debug(\n { auditCount: recordCount, hasTransaction: !!trx },\n 'Flushing audits',\n );\n await auditor.flush();\n }\n } catch (error) {\n logger.error(error as Error, 'Failed to process audits');\n // Don't rethrow - audit failures shouldn't fail the request\n }\n}\n\n/**\n * Context for audit-aware handler execution.\n */\nexport interface AuditExecutionContext<\n TAuditAction extends AuditableAction<string, unknown> = AuditableAction<\n string,\n unknown\n >,\n> {\n /** The auditor instance for recording audits */\n auditor: Auditor<TAuditAction>;\n /** The audit storage instance */\n storage: AuditStorage;\n}\n\n/**\n * Create audit context for handler execution.\n * Returns the auditor and storage for use in the handler.\n *\n * @param endpoint - The endpoint with audit configuration\n * @param serviceDiscovery - Service discovery for getting audit storage\n * @param logger - Logger for debug/error messages\n * @param ctx - Request context for actor extraction\n * @returns Audit context with auditor and storage, or undefined if not configured\n */\nexport async function createAuditContext<\n TServices extends Service[] = [],\n TSession = unknown,\n TLogger extends Logger = Logger,\n TAuditStorage extends AuditStorage | undefined = undefined,\n TAuditStorageServiceName extends string = string,\n TAuditAction extends AuditableAction<string, unknown> = AuditableAction<\n string,\n unknown\n >,\n TDatabase = undefined,\n TDatabaseServiceName extends string = string,\n>(\n endpoint: Endpoint<\n any,\n any,\n any,\n any,\n TServices,\n TLogger,\n TSession,\n any,\n any,\n TAuditStorage,\n TAuditStorageServiceName,\n TAuditAction,\n TDatabase,\n TDatabaseServiceName\n >,\n serviceDiscovery: ServiceDiscovery<any, any>,\n logger: TLogger,\n ctx: {\n session: TSession;\n header: HeaderFn;\n cookie: CookieFn;\n services: Record<string, unknown>;\n },\n): Promise<AuditExecutionContext<TAuditAction> | undefined> {\n if (!endpoint.auditorStorageService) {\n return undefined;\n }\n\n const services = await serviceDiscovery.register([\n endpoint.auditorStorageService,\n ]);\n const storage = services[\n endpoint.auditorStorageService.serviceName\n ] as AuditStorage;\n\n // Extract actor if configured\n let actor: AuditActor = { id: 'system', type: 'system' };\n if (endpoint.actorExtractor) {\n try {\n actor = await (\n endpoint.actorExtractor as ActorExtractor<TServices, TSession, TLogger>\n )({\n services: ctx.services as any,\n session: ctx.session,\n header: ctx.header,\n cookie: ctx.cookie,\n logger,\n });\n } catch (error) {\n logger.error(error as Error, 'Failed to extract actor for audits');\n }\n }\n\n const auditor = new DefaultAuditor<TAuditAction>({\n actor,\n storage,\n metadata: {\n endpoint: endpoint.route,\n method: endpoint.method,\n },\n });\n\n return { auditor, storage };\n}\n\n/**\n * Execute a handler with automatic audit transaction support.\n * If the audit storage has a database (via getDatabase()), wraps execution\n * in a transaction so audits are atomic with handler's database operations.\n *\n * @param auditContext - The audit context from createAuditContext\n * @param handler - The handler function to execute (receives auditor)\n * @param onComplete - Called after handler with response, to process declarative audits\n * @returns The handler result\n */\nexport async function executeWithAuditTransaction<\n T,\n TAuditAction extends AuditableAction<string, unknown> = AuditableAction<\n string,\n unknown\n >,\n>(\n auditContext: AuditExecutionContext<TAuditAction> | undefined,\n handler: (auditor?: Auditor<TAuditAction>) => Promise<T>,\n onComplete?: (response: T, auditor: Auditor<TAuditAction>) => Promise<void>,\n): Promise<T> {\n // No audit context - just run handler\n if (!auditContext) {\n return handler(undefined);\n }\n\n const { auditor, storage } = auditContext;\n\n // Check if storage has a database for transactional execution\n const db = storage.getDatabase?.();\n\n if (db) {\n // Wrap in transaction - audits are atomic with handler operations\n return withAuditableTransaction(db as any, auditor as any, async () => {\n const response = await handler(auditor);\n\n // Process declarative audits within the transaction\n if (onComplete) {\n await onComplete(response, auditor);\n }\n\n // Audits are flushed by withAuditableTransaction before commit\n return response;\n });\n }\n\n // No database - run handler and flush audits after\n const response = await handler(auditor);\n\n if (onComplete) {\n await onComplete(response, auditor);\n }\n\n // Flush audits (no transaction)\n await auditor.flush();\n\n return response;\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;AA4BA,eAAsB,sBAYpBA,UAcAC,UACAC,kBACAC,QACAC,KAMAC,iBACe;AACf,KAAI;EACF,MAAM,SAAS,SAAS;EAIxB,MAAM,qBACJ,mBAAmB,gBAAgB,YAAY,CAAC,SAAS;AAG3D,OAAK,QAAQ,WAAW,oBAAoB;AAC1C,UAAO,MAAM,uBAAuB;AACpC;EACD;AAGD,OAAK,SAAS,uBAAuB;AACnC,OAAI,sBAAsB,QAAQ,OAChC,QAAO,KAAK,uCAAuC;AAErD;EACD;EAGD,IAAIC;AAEJ,MAAI,iBAAiB;AAEnB,aAAU;AACV,UAAO,MAAM,8CAA8C;EAC5D,OAAM;GAEL,MAAM,WAAW,MAAM,iBAAiB,SAAS,CAC/C,SAAS,qBACV,EAAC;GACF,MAAM,UAAU,SACd,SAAS,sBAAsB;GAIjC,IAAIC,QAAoB;IAAE,IAAI;IAAU,MAAM;GAAU;AACxD,OAAI,SAAS,eACX,KAAI;AACF,YAAQ,MAAM,AACZ,SAAS,eAKT;KACA,UAAU,IAAI;KACd,SAAS,IAAI;KACb,QAAQ,IAAI;KACZ,QAAQ,IAAI;KACZ;IACD,EAAC;GACH,SAAQ,OAAO;AACd,WAAO,MAAM,OAAgB,qCAAqC;GAEnE;AAGH,aAAU,IAAIC,iCAA6B;IACzC;IACA;IACA,UAAU;KACR,UAAU,SAAS;KACnB,QAAQ,SAAS;IAClB;GACF;EACF;AAGD,MAAI,QAAQ,OACV,MAAK,MAAM,SAAS,QAAQ;AAC1B,UAAO,MAAM,EAAE,OAAO,MAAM,KAAM,GAAE,+BAA+B;AAGnE,OAAI,MAAM,SAAS,MAAM,KAAK,SAAgB,EAAE;AAC9C,WAAO,MACL,EAAE,OAAO,MAAM,KAAM,GACrB,sCACD;AACD;GACD;GAGD,MAAM,UAAU,MAAM,QAAQ,SAAgB;GAG9C,MAAM,WAAW,MAAM,WAAW,SAAgB;AAGlD,WAAQ,MAAM,MAAM,MAAa,SAAgB;IAC/C,OAAO,MAAM;IACb;GACD,EAAC;EACH;EAMH,MAAM,cAAc,QAAQ,YAAY,CAAC;AACzC,MAAI,cAAc,GAAG;GAEnB,MAAM,MACJ,oBAAoB,UAChB,AAAC,QAA0C,gBAAgB;AAEjE,UAAO,MACL;IAAE,YAAY;IAAa,kBAAkB;GAAK,GAClD,kBACD;AACD,SAAM,QAAQ,OAAO;EACtB;CACF,SAAQ,OAAO;AACd,SAAO,MAAM,OAAgB,2BAA2B;CAEzD;AACF;;;;;;;;;;;AA2BD,eAAsB,mBAapBC,UAgBAP,kBACAC,QACAC,KAM0D;AAC1D,MAAK,SAAS,sBACZ;CAGF,MAAM,WAAW,MAAM,iBAAiB,SAAS,CAC/C,SAAS,qBACV,EAAC;CACF,MAAM,UAAU,SACd,SAAS,sBAAsB;CAIjC,IAAIG,QAAoB;EAAE,IAAI;EAAU,MAAM;CAAU;AACxD,KAAI,SAAS,eACX,KAAI;AACF,UAAQ,MAAM,AACZ,SAAS,eACT;GACA,UAAU,IAAI;GACd,SAAS,IAAI;GACb,QAAQ,IAAI;GACZ,QAAQ,IAAI;GACZ;EACD,EAAC;CACH,SAAQ,OAAO;AACd,SAAO,MAAM,OAAgB,qCAAqC;CACnE;CAGH,MAAM,UAAU,IAAIC,iCAA6B;EAC/C;EACA;EACA,UAAU;GACR,UAAU,SAAS;GACnB,QAAQ,SAAS;EAClB;CACF;AAED,QAAO;EAAE;EAAS;CAAS;AAC5B;;;;;;;;;;;AAYD,eAAsB,4BAOpBE,cACAC,SACAC,YACY;AAEZ,MAAK,aACH,QAAO,eAAkB;CAG3B,MAAM,EAAE,SAAS,SAAS,GAAG;CAG7B,MAAM,KAAK,QAAQ,eAAe;AAElC,KAAI,GAEF,QAAO,uDAAyB,IAAW,SAAgB,YAAY;EACrE,MAAMC,aAAW,MAAM,QAAQ,QAAQ;AAGvC,MAAI,WACF,OAAM,WAAWA,YAAU,QAAQ;AAIrC,SAAOA;CACR,EAAC;CAIJ,MAAM,WAAW,MAAM,QAAQ,QAAQ;AAEvC,KAAI,WACF,OAAM,WAAW,UAAU,QAAQ;AAIrC,OAAM,QAAQ,OAAO;AAErB,QAAO;AACR"}
@@ -1,3 +1,3 @@
1
- const require_AWSLambdaSubscriberAdaptor = require('../AWSLambdaSubscriberAdaptor-Dum5bkw3.cjs');
1
+ const require_AWSLambdaSubscriberAdaptor = require('../AWSLambdaSubscriberAdaptor-DVC4VAQR.cjs');
2
2
 
3
3
  exports.AWSLambdaSubscriber = require_AWSLambdaSubscriberAdaptor.AWSLambdaSubscriber;
@@ -1,3 +1,3 @@
1
- import { AWSLambdaSubscriber } from "../AWSLambdaSubscriberAdaptor-CyFh7MN8.mjs";
1
+ import { AWSLambdaSubscriber } from "../AWSLambdaSubscriberAdaptor-BLHDyqzQ.mjs";
2
2
 
3
3
  export { AWSLambdaSubscriber };
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@geekmidas/constructs",
3
- "version": "0.0.18",
3
+ "version": "0.0.19",
4
4
  "private": false,
5
5
  "type": "module",
6
6
  "exports": {
@@ -67,12 +67,12 @@
67
67
  "lodash.uniqby": "~4.7.0",
68
68
  "openapi-types": "~12.1.3",
69
69
  "@geekmidas/audit": "0.0.6",
70
- "@geekmidas/cache": "0.0.7",
71
- "@geekmidas/logger": "0.0.1",
72
70
  "@geekmidas/errors": "0.0.1",
73
- "@geekmidas/rate-limit": "0.1.0",
71
+ "@geekmidas/cache": "0.0.7",
74
72
  "@geekmidas/events": "0.0.2",
73
+ "@geekmidas/rate-limit": "0.1.0",
75
74
  "@geekmidas/schema": "0.0.2",
75
+ "@geekmidas/logger": "0.0.1",
76
76
  "@geekmidas/services": "0.0.1"
77
77
  },
78
78
  "devDependencies": {
@@ -1,4 +1,5 @@
1
- import type { AuditStorage, AuditableAction } from '@geekmidas/audit';
1
+ import type { AuditActor, AuditStorage, AuditableAction } from '@geekmidas/audit';
2
+ import { DefaultAuditor } from '@geekmidas/audit';
2
3
  import { EnvironmentParser } from '@geekmidas/envkit';
3
4
  import type { EventPublisher } from '@geekmidas/events';
4
5
  import type { Logger } from '@geekmidas/logger';
@@ -22,7 +23,7 @@ import {
22
23
  } from './Endpoint';
23
24
  import type { MappedAudit } from './audit';
24
25
  import {
25
- createAuditContext,
26
+ type AuditExecutionContext,
26
27
  executeWithAuditTransaction,
27
28
  } from './processAudits';
28
29
 
@@ -185,26 +186,39 @@ export class TestEndpointAdaptor<
185
186
  cookie,
186
187
  });
187
188
 
188
- // Create audit context if audit storage is configured
189
- // The auditorStorage is required when endpoint uses .auditor()
190
- const auditorStorageService = (ctx as any).auditorStorage as
191
- | Service<TAuditStorageServiceName, TAuditStorage>
192
- | undefined;
193
- const endpointWithAuditor = auditorStorageService
194
- ? { ...this.endpoint, auditorStorageService }
195
- : this.endpoint;
189
+ // Create audit context if audit storage is provided
190
+ // The auditorStorage instance is required when endpoint uses .auditor()
191
+ const auditorStorage = (ctx as any).auditorStorage as TAuditStorage;
192
+ let auditContext: AuditExecutionContext<TAuditAction> | undefined;
196
193
 
197
- const auditContext = await createAuditContext(
198
- endpointWithAuditor as typeof this.endpoint,
199
- this.serviceDiscovery,
200
- logger,
201
- {
202
- session,
203
- header,
204
- cookie,
205
- services: ctx.services as Record<string, unknown>,
206
- },
207
- );
194
+ if (auditorStorage) {
195
+ // Extract actor if configured
196
+ let actor: AuditActor = { id: 'system', type: 'system' };
197
+ if (this.endpoint.actorExtractor) {
198
+ try {
199
+ actor = await this.endpoint.actorExtractor({
200
+ services: ctx.services as any,
201
+ session,
202
+ header,
203
+ cookie,
204
+ logger,
205
+ });
206
+ } catch (error) {
207
+ logger.error(error as Error, 'Failed to extract actor for audits');
208
+ }
209
+ }
210
+
211
+ const auditor = new DefaultAuditor<TAuditAction>({
212
+ actor,
213
+ storage: auditorStorage as AuditStorage,
214
+ metadata: {
215
+ endpoint: this.endpoint.route,
216
+ method: this.endpoint.method,
217
+ },
218
+ });
219
+
220
+ auditContext = { auditor, storage: auditorStorage as AuditStorage };
221
+ }
208
222
 
209
223
  // Warn if declarative audits are configured but no audit storage
210
224
  const audits = this.endpoint.audits as MappedAudit<
@@ -215,29 +229,18 @@ export class TestEndpointAdaptor<
215
229
  logger.warn('No auditor storage service available');
216
230
  }
217
231
 
218
- // Resolve database service if configured
219
- // The database is required when endpoint uses .database()
220
- const databaseService = (ctx as any).database as
221
- | Service<TDatabaseServiceName, TDatabase>
222
- | undefined;
223
- const rawDb = databaseService
224
- ? await this.serviceDiscovery
225
- .register([databaseService])
226
- .then((s) => s[databaseService.serviceName as keyof typeof s])
227
- : undefined;
232
+ // Use database instance directly from context
233
+ // The database instance is required when endpoint uses .database()
234
+ const rawDb = (ctx as any).database as TDatabase;
228
235
 
229
236
  // Execute handler with automatic audit transaction support
230
237
  const result = await executeWithAuditTransaction(
231
238
  auditContext,
232
239
  async (auditor) => {
233
- // Use audit transaction as db only if the storage uses the same database service
234
- const sameDatabase =
235
- auditContext?.storage?.databaseServiceName &&
236
- auditContext.storage.databaseServiceName ===
237
- databaseService?.serviceName;
238
- const db = sameDatabase
239
- ? (auditor?.getTransaction?.() ?? rawDb)
240
- : rawDb;
240
+ // Use audit transaction as db if available (when storage has same database)
241
+ // For testing, the tester controls whether to use transactional auditing
242
+ const trx = auditor?.getTransaction?.();
243
+ const db = trx ?? rawDb;
241
244
 
242
245
  const responseBuilder = new ResponseBuilder();
243
246
  const response = await this.endpoint.handler(
@@ -337,25 +340,21 @@ export class TestEndpointAdaptor<
337
340
  */
338
341
  type AuditStorageRequirement<
339
342
  TAuditStorage extends AuditStorage | undefined = undefined,
340
- TAuditStorageServiceName extends string = string,
341
343
  > = TAuditStorage extends undefined
342
344
  ? {}
343
345
  : {
344
- /** Audit storage service - required when endpoint uses .auditor() */
345
- auditorStorage: Service<TAuditStorageServiceName, TAuditStorage>;
346
+ /** Audit storage instance - required when endpoint uses .auditor() */
347
+ auditorStorage: TAuditStorage;
346
348
  };
347
349
 
348
350
  /**
349
351
  * Conditional database requirement - required when TDatabase is configured
350
352
  */
351
- type DatabaseRequirement<
352
- TDatabase = undefined,
353
- TDatabaseServiceName extends string = string,
354
- > = TDatabase extends undefined
353
+ type DatabaseRequirement<TDatabase = undefined> = TDatabase extends undefined
355
354
  ? {}
356
355
  : {
357
- /** Database service - required when endpoint uses .database() */
358
- database: Service<TDatabaseServiceName, TDatabase>;
356
+ /** Database instance - required when endpoint uses .database() */
357
+ database: TDatabase;
359
358
  };
360
359
 
361
360
  export type TestRequestAdaptor<
@@ -364,13 +363,13 @@ export type TestRequestAdaptor<
364
363
  TEventPublisher extends EventPublisher<any> | undefined = undefined,
365
364
  TEventPublisherServiceName extends string = string,
366
365
  TAuditStorage extends AuditStorage | undefined = undefined,
367
- TAuditStorageServiceName extends string = string,
366
+ _TAuditStorageServiceName extends string = string,
368
367
  TDatabase = undefined,
369
- TDatabaseServiceName extends string = string,
368
+ _TDatabaseServiceName extends string = string,
370
369
  > = {
371
370
  services: ServiceRecord<TServices>;
372
371
  headers: Record<string, string>;
373
372
  publisher?: Service<TEventPublisherServiceName, TEventPublisher>;
374
373
  } & InferComposableStandardSchema<TInput> &
375
- AuditStorageRequirement<TAuditStorage, TAuditStorageServiceName> &
376
- DatabaseRequirement<TDatabase, TDatabaseServiceName>;
374
+ AuditStorageRequirement<TAuditStorage> &
375
+ DatabaseRequirement<TDatabase>;