@geekmidas/constructs 0.3.2 → 0.6.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (557) hide show
  1. package/dist/{AWSLambdaFunction-C54a1doJ.mjs → AWSLambdaFunction-BMTaCrG9.mjs} +6 -6
  2. package/dist/AWSLambdaFunction-BMTaCrG9.mjs.map +1 -0
  3. package/dist/{AWSLambdaFunction-BdebRMUh.d.mts → AWSLambdaFunction-BfLF5vNa.d.cts} +6 -6
  4. package/dist/AWSLambdaFunction-BfLF5vNa.d.cts.map +1 -0
  5. package/dist/{AWSLambdaFunction-EPGY4s7i.cjs → AWSLambdaFunction-C1gRqLO7.cjs} +6 -6
  6. package/dist/AWSLambdaFunction-C1gRqLO7.cjs.map +1 -0
  7. package/dist/{AWSLambdaFunction-D0tEOTXQ.d.cts → AWSLambdaFunction-K94K8JU2.d.mts} +6 -6
  8. package/dist/AWSLambdaFunction-K94K8JU2.d.mts.map +1 -0
  9. package/dist/{AWSLambdaSubscriberAdaptor-Dx-Ks1Jp.d.cts → AWSLambdaSubscriberAdaptor-B2Gk3fgx.d.cts} +2 -2
  10. package/dist/AWSLambdaSubscriberAdaptor-B2Gk3fgx.d.cts.map +1 -0
  11. package/dist/{AWSLambdaSubscriberAdaptor-BLfO612H.mjs → AWSLambdaSubscriberAdaptor-BD3FwGUb.mjs} +3 -3
  12. package/dist/AWSLambdaSubscriberAdaptor-BD3FwGUb.mjs.map +1 -0
  13. package/dist/{AWSLambdaSubscriberAdaptor-BNcYYZ-P.cjs → AWSLambdaSubscriberAdaptor-DQDnt1Xk.cjs} +3 -3
  14. package/dist/AWSLambdaSubscriberAdaptor-DQDnt1Xk.cjs.map +1 -0
  15. package/dist/{AWSLambdaSubscriberAdaptor-DrFAvHOp.d.mts → AWSLambdaSubscriberAdaptor-QuMFwltJ.d.mts} +2 -2
  16. package/dist/AWSLambdaSubscriberAdaptor-QuMFwltJ.d.mts.map +1 -0
  17. package/dist/{AmazonApiGatewayEndpointAdaptor-BT9JXihC.mjs → AmazonApiGatewayEndpointAdaptor-B4gLJ2dx.mjs} +56 -14
  18. package/dist/AmazonApiGatewayEndpointAdaptor-B4gLJ2dx.mjs.map +1 -0
  19. package/dist/{AmazonApiGatewayEndpointAdaptor-DLqnYQ4E.d.cts → AmazonApiGatewayEndpointAdaptor-C987ZCsM.d.cts} +51 -5
  20. package/dist/AmazonApiGatewayEndpointAdaptor-C987ZCsM.d.cts.map +1 -0
  21. package/dist/{AmazonApiGatewayEndpointAdaptor-DNFvvdmW.cjs → AmazonApiGatewayEndpointAdaptor-CwRKMRr_.cjs} +55 -13
  22. package/dist/AmazonApiGatewayEndpointAdaptor-CwRKMRr_.cjs.map +1 -0
  23. package/dist/{AmazonApiGatewayEndpointAdaptor-qlKXuZxy.d.mts → AmazonApiGatewayEndpointAdaptor-DpPfO6Vj.d.mts} +52 -6
  24. package/dist/AmazonApiGatewayEndpointAdaptor-DpPfO6Vj.d.mts.map +1 -0
  25. package/dist/{AmazonApiGatewayV1EndpointAdaptor-Ccl8B8kG.cjs → AmazonApiGatewayV1EndpointAdaptor-BeZMu5DZ.cjs} +4 -4
  26. package/dist/AmazonApiGatewayV1EndpointAdaptor-BeZMu5DZ.cjs.map +1 -0
  27. package/dist/{AmazonApiGatewayV1EndpointAdaptor-MRUxs3Xi.d.mts → AmazonApiGatewayV1EndpointAdaptor-DI_5kdqw.d.mts} +5 -5
  28. package/dist/AmazonApiGatewayV1EndpointAdaptor-DI_5kdqw.d.mts.map +1 -0
  29. package/dist/{AmazonApiGatewayV1EndpointAdaptor-DyUaJIhB.d.cts → AmazonApiGatewayV1EndpointAdaptor-DkGMOpoB.d.cts} +4 -4
  30. package/dist/AmazonApiGatewayV1EndpointAdaptor-DkGMOpoB.d.cts.map +1 -0
  31. package/dist/{AmazonApiGatewayV1EndpointAdaptor-CShQI8Gk.mjs → AmazonApiGatewayV1EndpointAdaptor-VccB6FKp.mjs} +4 -4
  32. package/dist/AmazonApiGatewayV1EndpointAdaptor-VccB6FKp.mjs.map +1 -0
  33. package/dist/{AmazonApiGatewayV2EndpointAdaptor-M1-w0U5R.d.cts → AmazonApiGatewayV2EndpointAdaptor-BtBnMJS_.d.cts} +4 -6
  34. package/dist/AmazonApiGatewayV2EndpointAdaptor-BtBnMJS_.d.cts.map +1 -0
  35. package/dist/{AmazonApiGatewayV2EndpointAdaptor-D8-0Aab4.cjs → AmazonApiGatewayV2EndpointAdaptor-BwsL9Gia.cjs} +4 -4
  36. package/dist/AmazonApiGatewayV2EndpointAdaptor-BwsL9Gia.cjs.map +1 -0
  37. package/dist/{AmazonApiGatewayV2EndpointAdaptor-lCRzGE4q.mjs → AmazonApiGatewayV2EndpointAdaptor-CljxmUfz.mjs} +4 -4
  38. package/dist/AmazonApiGatewayV2EndpointAdaptor-CljxmUfz.mjs.map +1 -0
  39. package/dist/{AmazonApiGatewayV2EndpointAdaptor-fuLM6M9k.d.mts → AmazonApiGatewayV2EndpointAdaptor-DnCoqlvw.d.mts} +5 -7
  40. package/dist/AmazonApiGatewayV2EndpointAdaptor-DnCoqlvw.d.mts.map +1 -0
  41. package/dist/{Authorizer-C0ge_tc8.cjs → Authorizer-BXxBee2P.cjs} +1 -1
  42. package/dist/Authorizer-BXxBee2P.cjs.map +1 -0
  43. package/dist/{Authorizer-r9U3y_ms.mjs → Authorizer-BgjU8-z6.mjs} +1 -1
  44. package/dist/Authorizer-BgjU8-z6.mjs.map +1 -0
  45. package/dist/{Authorizer-gWxYsGEp.d.mts → Authorizer-DCcYOx3h.d.mts} +1 -1
  46. package/dist/Authorizer-DCcYOx3h.d.mts.map +1 -0
  47. package/dist/{Authorizer-B-btowNd.d.cts → Authorizer-DWtwC8we.d.cts} +1 -1
  48. package/dist/Authorizer-DWtwC8we.d.cts.map +1 -0
  49. package/dist/{BaseFunctionBuilder-BAtutR6q.d.cts → BaseFunctionBuilder-BqZCqIeU.d.mts} +3 -3
  50. package/dist/BaseFunctionBuilder-BqZCqIeU.d.mts.map +1 -0
  51. package/dist/{BaseFunctionBuilder-DRY419e7.d.mts → BaseFunctionBuilder-CcK691ni.d.cts} +3 -3
  52. package/dist/BaseFunctionBuilder-CcK691ni.d.cts.map +1 -0
  53. package/dist/{BaseFunctionBuilder-Czi1Jwza.mjs → BaseFunctionBuilder-Dsqe6pnn.mjs} +2 -2
  54. package/dist/BaseFunctionBuilder-Dsqe6pnn.mjs.map +1 -0
  55. package/dist/{BaseFunctionBuilder-MYG3C9ug.cjs → BaseFunctionBuilder-Z0XwrKB5.cjs} +2 -2
  56. package/dist/BaseFunctionBuilder-Z0XwrKB5.cjs.map +1 -0
  57. package/dist/Construct-BNDLJJfD.mjs +150 -0
  58. package/dist/Construct-BNDLJJfD.mjs.map +1 -0
  59. package/dist/{Construct-Ba5cMxib.cjs → Construct-CPrCF8NK.cjs} +93 -9
  60. package/dist/Construct-CPrCF8NK.cjs.map +1 -0
  61. package/dist/{Construct-C4rPE67v.d.cts → Construct-Dl0l2d8d.d.cts} +30 -4
  62. package/dist/Construct-Dl0l2d8d.d.cts.map +1 -0
  63. package/dist/{Construct-XrijZFFh.d.mts → Construct-E8QPyHh4.d.mts} +30 -4
  64. package/dist/Construct-E8QPyHh4.d.mts.map +1 -0
  65. package/dist/Construct.cjs +4 -2
  66. package/dist/Construct.d.cts +2 -2
  67. package/dist/Construct.d.mts +2 -2
  68. package/dist/Construct.mjs +2 -2
  69. package/dist/{Cron-CGF4YAfM.cjs → Cron-BmPNTLla.cjs} +3 -3
  70. package/dist/Cron-BmPNTLla.cjs.map +1 -0
  71. package/dist/{Cron-BxhGs5up.mjs → Cron-BpJONaFv.mjs} +3 -3
  72. package/dist/Cron-BpJONaFv.mjs.map +1 -0
  73. package/dist/{Cron-bDLcTvV5.d.cts → Cron-CnmLLh3E.d.mts} +3 -3
  74. package/dist/Cron-CnmLLh3E.d.mts.map +1 -0
  75. package/dist/{Cron-cdjlSKNp.d.mts → Cron-DufAeHry.d.cts} +3 -3
  76. package/dist/Cron-DufAeHry.d.cts.map +1 -0
  77. package/dist/{CronBuilder-d2jh-IB2.mjs → CronBuilder-02HSIHJr.mjs} +4 -4
  78. package/dist/CronBuilder-02HSIHJr.mjs.map +1 -0
  79. package/dist/{CronBuilder-CcxKRtVP.cjs → CronBuilder-DC2zUS8a.cjs} +4 -4
  80. package/dist/CronBuilder-DC2zUS8a.cjs.map +1 -0
  81. package/dist/{CronBuilder-BC4m5-p1.d.mts → CronBuilder-anUSbMxb.d.mts} +4 -4
  82. package/dist/CronBuilder-anUSbMxb.d.mts.map +1 -0
  83. package/dist/{CronBuilder-DKVXyE0Q.d.cts → CronBuilder-fyqf_X8n.d.cts} +4 -4
  84. package/dist/CronBuilder-fyqf_X8n.d.cts.map +1 -0
  85. package/dist/{Endpoint-BVGZXFyV.cjs → Endpoint-BCWVbi8l.cjs} +6 -6
  86. package/dist/Endpoint-BCWVbi8l.cjs.map +1 -0
  87. package/dist/{Endpoint-BPv9_-m_.d.cts → Endpoint-BlcHiGNx.d.cts} +3 -3
  88. package/dist/Endpoint-BlcHiGNx.d.cts.map +1 -0
  89. package/dist/{Endpoint-BdwG75G_.d.mts → Endpoint-DEHuy46S.d.mts} +4 -4
  90. package/dist/Endpoint-DEHuy46S.d.mts.map +1 -0
  91. package/dist/{Endpoint-CuOEswxJ.mjs → Endpoint-ygxv2Ia6.mjs} +6 -6
  92. package/dist/Endpoint-ygxv2Ia6.mjs.map +1 -0
  93. package/dist/{EndpointBuilder-DIy_m1bu.d.cts → EndpointBuilder-DPLAUHqg.d.cts} +4 -4
  94. package/dist/EndpointBuilder-DPLAUHqg.d.cts.map +1 -0
  95. package/dist/{EndpointBuilder-B3az942t.d.mts → EndpointBuilder-DaeCeneF.d.mts} +5 -5
  96. package/dist/EndpointBuilder-DaeCeneF.d.mts.map +1 -0
  97. package/dist/{EndpointBuilder-Cgj1P_ra.cjs → EndpointBuilder-J5D67Y6a.cjs} +6 -6
  98. package/dist/EndpointBuilder-J5D67Y6a.cjs.map +1 -0
  99. package/dist/{EndpointBuilder-DnCB1h1j.mjs → EndpointBuilder-tn4zqoyw.mjs} +6 -6
  100. package/dist/EndpointBuilder-tn4zqoyw.mjs.map +1 -0
  101. package/dist/{EndpointFactory-CbdxPCIH.mjs → EndpointFactory-CIUZTTAF.mjs} +7 -7
  102. package/dist/EndpointFactory-CIUZTTAF.mjs.map +1 -0
  103. package/dist/{EndpointFactory-CyPbm3AD.d.cts → EndpointFactory-CiKcb5PX.d.cts} +4 -4
  104. package/dist/EndpointFactory-CiKcb5PX.d.cts.map +1 -0
  105. package/dist/{EndpointFactory-C-0nE6Jg.d.mts → EndpointFactory-DufM0t2z.d.mts} +4 -4
  106. package/dist/EndpointFactory-DufM0t2z.d.mts.map +1 -0
  107. package/dist/{EndpointFactory-CYj6BYok.cjs → EndpointFactory-d5uwWFkK.cjs} +7 -7
  108. package/dist/EndpointFactory-d5uwWFkK.cjs.map +1 -0
  109. package/dist/{Function-DDZb1525.cjs → Function-2qWSZqYB.cjs} +3 -3
  110. package/dist/Function-2qWSZqYB.cjs.map +1 -0
  111. package/dist/{Function-Cf7f_kCz.d.cts → Function-CbEohg13.d.cts} +3 -3
  112. package/dist/Function-CbEohg13.d.cts.map +1 -0
  113. package/dist/{Function-BVHqIDp9.mjs → Function-CdF1HmFu.mjs} +3 -3
  114. package/dist/Function-CdF1HmFu.mjs.map +1 -0
  115. package/dist/{Function-DN2G6OT5.d.mts → Function-EjIOLxlh.d.mts} +3 -3
  116. package/dist/Function-EjIOLxlh.d.mts.map +1 -0
  117. package/dist/{FunctionBuilder-DswJ-9sD.cjs → FunctionBuilder-B-pbgm09.cjs} +4 -4
  118. package/dist/FunctionBuilder-B-pbgm09.cjs.map +1 -0
  119. package/dist/{FunctionBuilder-CJBzzXL3.d.cts → FunctionBuilder-CX3ooNdq.d.cts} +4 -4
  120. package/dist/FunctionBuilder-CX3ooNdq.d.cts.map +1 -0
  121. package/dist/{FunctionBuilder-Cxx8D2na.d.mts → FunctionBuilder-D11ytDyy.d.mts} +4 -4
  122. package/dist/FunctionBuilder-D11ytDyy.d.mts.map +1 -0
  123. package/dist/{FunctionBuilder-CrDYgfiI.mjs → FunctionBuilder-DaWVthAJ.mjs} +4 -4
  124. package/dist/FunctionBuilder-DaWVthAJ.mjs.map +1 -0
  125. package/dist/{FunctionExecutionWrapper-DF260Aaj.d.mts → FunctionExecutionWrapper-BJcRjH9Z.d.cts} +5 -5
  126. package/dist/FunctionExecutionWrapper-BJcRjH9Z.d.cts.map +1 -0
  127. package/dist/{FunctionExecutionWrapper-BYI2bGTL.cjs → FunctionExecutionWrapper-C6ChBNHs.cjs} +3 -3
  128. package/dist/FunctionExecutionWrapper-C6ChBNHs.cjs.map +1 -0
  129. package/dist/{FunctionExecutionWrapper-Qy8bmgFR.d.cts → FunctionExecutionWrapper-D3RNjGIR.d.mts} +5 -5
  130. package/dist/FunctionExecutionWrapper-D3RNjGIR.d.mts.map +1 -0
  131. package/dist/{FunctionExecutionWrapper-CLDh7Z2_.mjs → FunctionExecutionWrapper-Dj3pmXaN.mjs} +3 -3
  132. package/dist/FunctionExecutionWrapper-Dj3pmXaN.mjs.map +1 -0
  133. package/dist/{HonoEndpointAdaptor-BaPlUhz0.d.mts → HonoEndpointAdaptor-CKP6nrG2.d.mts} +11 -6
  134. package/dist/HonoEndpointAdaptor-CKP6nrG2.d.mts.map +1 -0
  135. package/dist/HonoEndpointAdaptor-Cc8Rnp9G.mjs +251 -0
  136. package/dist/HonoEndpointAdaptor-Cc8Rnp9G.mjs.map +1 -0
  137. package/dist/{HonoEndpointAdaptor-YcRHYALH.d.cts → HonoEndpointAdaptor-hrrvyfw9.d.cts} +10 -5
  138. package/dist/HonoEndpointAdaptor-hrrvyfw9.d.cts.map +1 -0
  139. package/dist/HonoEndpointAdaptor-xGnDZa3a.cjs +257 -0
  140. package/dist/HonoEndpointAdaptor-xGnDZa3a.cjs.map +1 -0
  141. package/dist/{Subscriber-COYMSevD.d.cts → Subscriber-BBcIOkhW.d.cts} +2 -2
  142. package/dist/Subscriber-BBcIOkhW.d.cts.map +1 -0
  143. package/dist/{Subscriber-ikctpU3I.d.mts → Subscriber-BpTAXvbM.d.mts} +2 -2
  144. package/dist/Subscriber-BpTAXvbM.d.mts.map +1 -0
  145. package/dist/{Subscriber-BiHjVXtM.cjs → Subscriber-CL4iYm01.cjs} +2 -2
  146. package/dist/Subscriber-CL4iYm01.cjs.map +1 -0
  147. package/dist/{Subscriber-BmPf9GFb.mjs → Subscriber-CZ8Smwd2.mjs} +2 -2
  148. package/dist/Subscriber-CZ8Smwd2.mjs.map +1 -0
  149. package/dist/{SubscriberBuilder-DJPEeYDJ.mjs → SubscriberBuilder-CxQg3TTm.mjs} +2 -2
  150. package/dist/SubscriberBuilder-CxQg3TTm.mjs.map +1 -0
  151. package/dist/{SubscriberBuilder-Cp1C-xtT.cjs → SubscriberBuilder-D0OS3hd7.cjs} +2 -2
  152. package/dist/SubscriberBuilder-D0OS3hd7.cjs.map +1 -0
  153. package/dist/{SubscriberBuilder-D_9zzllj.d.mts → SubscriberBuilder-D7IgufwB.d.cts} +3 -3
  154. package/dist/SubscriberBuilder-D7IgufwB.d.cts.map +1 -0
  155. package/dist/{SubscriberBuilder-ivHAGIVi.d.cts → SubscriberBuilder-DOFBbWLt.d.mts} +3 -3
  156. package/dist/SubscriberBuilder-DOFBbWLt.d.mts.map +1 -0
  157. package/dist/{TestEndpointAdaptor-DB7bREhS.d.mts → TestEndpointAdaptor-BnJusjf7.d.mts} +4 -4
  158. package/dist/TestEndpointAdaptor-BnJusjf7.d.mts.map +1 -0
  159. package/dist/TestEndpointAdaptor-CPL8ru6q.mjs +170 -0
  160. package/dist/TestEndpointAdaptor-CPL8ru6q.mjs.map +1 -0
  161. package/dist/TestEndpointAdaptor-Ca4K26_d.cjs +176 -0
  162. package/dist/TestEndpointAdaptor-Ca4K26_d.cjs.map +1 -0
  163. package/dist/{TestEndpointAdaptor-C10xBI--.d.cts → TestEndpointAdaptor-EPPTjntv.d.cts} +3 -3
  164. package/dist/TestEndpointAdaptor-EPPTjntv.d.cts.map +1 -0
  165. package/dist/adaptors/aws.cjs +12 -12
  166. package/dist/adaptors/aws.d.cts +15 -14
  167. package/dist/adaptors/aws.d.mts +16 -15
  168. package/dist/adaptors/aws.mjs +12 -12
  169. package/dist/adaptors/hono.cjs +10 -9
  170. package/dist/adaptors/hono.d.cts +8 -8
  171. package/dist/adaptors/hono.d.mts +9 -9
  172. package/dist/adaptors/hono.mjs +10 -9
  173. package/dist/adaptors/testing.cjs +8 -7
  174. package/dist/adaptors/testing.d.cts +8 -8
  175. package/dist/adaptors/testing.d.mts +9 -9
  176. package/dist/adaptors/testing.mjs +8 -7
  177. package/dist/crons/Cron.cjs +6 -6
  178. package/dist/crons/Cron.d.cts +6 -6
  179. package/dist/crons/Cron.d.mts +6 -6
  180. package/dist/crons/Cron.mjs +6 -6
  181. package/dist/crons/CronBuilder.cjs +7 -7
  182. package/dist/crons/CronBuilder.d.cts +7 -7
  183. package/dist/crons/CronBuilder.d.mts +7 -7
  184. package/dist/crons/CronBuilder.mjs +7 -7
  185. package/dist/crons/index.cjs +7 -7
  186. package/dist/crons/index.cjs.map +1 -1
  187. package/dist/crons/index.d.cts +11 -11
  188. package/dist/crons/index.d.mts +7 -7
  189. package/dist/crons/index.mjs +7 -7
  190. package/dist/crons/index.mjs.map +1 -1
  191. package/dist/endpoints/AmazonApiGatewayEndpointAdaptor.cjs +7 -7
  192. package/dist/endpoints/AmazonApiGatewayEndpointAdaptor.d.cts +10 -9
  193. package/dist/endpoints/AmazonApiGatewayEndpointAdaptor.d.mts +11 -10
  194. package/dist/endpoints/AmazonApiGatewayEndpointAdaptor.mjs +7 -7
  195. package/dist/endpoints/AmazonApiGatewayV1EndpointAdaptor.cjs +8 -8
  196. package/dist/endpoints/AmazonApiGatewayV1EndpointAdaptor.d.cts +10 -9
  197. package/dist/endpoints/AmazonApiGatewayV1EndpointAdaptor.d.mts +11 -10
  198. package/dist/endpoints/AmazonApiGatewayV1EndpointAdaptor.mjs +8 -8
  199. package/dist/endpoints/AmazonApiGatewayV2EndpointAdaptor.cjs +8 -8
  200. package/dist/endpoints/AmazonApiGatewayV2EndpointAdaptor.d.cts +11 -10
  201. package/dist/endpoints/AmazonApiGatewayV2EndpointAdaptor.d.mts +12 -11
  202. package/dist/endpoints/AmazonApiGatewayV2EndpointAdaptor.mjs +8 -8
  203. package/dist/endpoints/Authorizer.cjs +1 -1
  204. package/dist/endpoints/Authorizer.d.cts +1 -1
  205. package/dist/endpoints/Authorizer.d.mts +1 -1
  206. package/dist/endpoints/Authorizer.mjs +1 -1
  207. package/dist/endpoints/Endpoint.cjs +6 -6
  208. package/dist/endpoints/Endpoint.d.cts +7 -7
  209. package/dist/endpoints/Endpoint.d.mts +8 -8
  210. package/dist/endpoints/Endpoint.mjs +6 -6
  211. package/dist/endpoints/EndpointBuilder.cjs +9 -9
  212. package/dist/endpoints/EndpointBuilder.d.cts +8 -8
  213. package/dist/endpoints/EndpointBuilder.d.mts +9 -9
  214. package/dist/endpoints/EndpointBuilder.mjs +9 -9
  215. package/dist/endpoints/EndpointFactory.cjs +10 -10
  216. package/dist/endpoints/EndpointFactory.d.cts +9 -9
  217. package/dist/endpoints/EndpointFactory.d.mts +10 -10
  218. package/dist/endpoints/EndpointFactory.mjs +10 -10
  219. package/dist/endpoints/HonoEndpointAdaptor.cjs +10 -9
  220. package/dist/endpoints/HonoEndpointAdaptor.d.cts +8 -8
  221. package/dist/endpoints/HonoEndpointAdaptor.d.mts +9 -9
  222. package/dist/endpoints/HonoEndpointAdaptor.mjs +10 -9
  223. package/dist/endpoints/TestEndpointAdaptor.cjs +8 -7
  224. package/dist/endpoints/TestEndpointAdaptor.d.cts +8 -8
  225. package/dist/endpoints/TestEndpointAdaptor.d.mts +9 -9
  226. package/dist/endpoints/TestEndpointAdaptor.mjs +8 -7
  227. package/dist/endpoints/audit.d.cts +7 -7
  228. package/dist/endpoints/audit.d.mts +8 -8
  229. package/dist/endpoints/helpers.cjs +7 -7
  230. package/dist/endpoints/helpers.d.cts +7 -7
  231. package/dist/endpoints/helpers.d.cts.map +1 -1
  232. package/dist/endpoints/helpers.d.mts +8 -8
  233. package/dist/endpoints/helpers.d.mts.map +1 -1
  234. package/dist/endpoints/helpers.mjs +7 -7
  235. package/dist/endpoints/index.cjs +20 -10
  236. package/dist/endpoints/index.cjs.map +1 -1
  237. package/dist/endpoints/index.d.cts +14 -12
  238. package/dist/endpoints/index.d.cts.map +1 -1
  239. package/dist/endpoints/index.d.mts +15 -13
  240. package/dist/endpoints/index.d.mts.map +1 -1
  241. package/dist/endpoints/index.mjs +13 -11
  242. package/dist/endpoints/index.mjs.map +1 -1
  243. package/dist/endpoints/lazyAccessors.cjs +10 -0
  244. package/dist/endpoints/lazyAccessors.d.cts +10 -0
  245. package/dist/endpoints/lazyAccessors.d.mts +10 -0
  246. package/dist/endpoints/lazyAccessors.mjs +3 -0
  247. package/dist/endpoints/parseHonoQuery.cjs +1 -1
  248. package/dist/endpoints/parseHonoQuery.mjs +1 -1
  249. package/dist/endpoints/parseQueryParams.d.cts.map +1 -1
  250. package/dist/endpoints/parseQueryParams.d.mts.map +1 -1
  251. package/dist/endpoints/processAudits.d.cts +7 -7
  252. package/dist/endpoints/processAudits.d.cts.map +1 -1
  253. package/dist/endpoints/processAudits.d.mts +8 -8
  254. package/dist/endpoints/processAudits.d.mts.map +1 -1
  255. package/dist/endpoints/rls.cjs +1 -1
  256. package/dist/endpoints/rls.d.cts +7 -7
  257. package/dist/endpoints/rls.d.mts +8 -8
  258. package/dist/endpoints/rls.mjs +1 -1
  259. package/dist/functions/AWSLambdaFunction.cjs +6 -6
  260. package/dist/functions/AWSLambdaFunction.d.cts +4 -4
  261. package/dist/functions/AWSLambdaFunction.d.mts +4 -4
  262. package/dist/functions/AWSLambdaFunction.mjs +6 -6
  263. package/dist/functions/BaseFunctionBuilder.cjs +2 -2
  264. package/dist/functions/BaseFunctionBuilder.d.cts +2 -2
  265. package/dist/functions/BaseFunctionBuilder.d.mts +2 -2
  266. package/dist/functions/BaseFunctionBuilder.mjs +2 -2
  267. package/dist/functions/Function.cjs +2 -2
  268. package/dist/functions/Function.d.cts +2 -2
  269. package/dist/functions/Function.d.mts +2 -2
  270. package/dist/functions/Function.mjs +2 -2
  271. package/dist/functions/FunctionBuilder.cjs +4 -4
  272. package/dist/functions/FunctionBuilder.d.cts +4 -4
  273. package/dist/functions/FunctionBuilder.d.mts +4 -4
  274. package/dist/functions/FunctionBuilder.mjs +4 -4
  275. package/dist/functions/FunctionExecutionWrapper.cjs +5 -5
  276. package/dist/functions/FunctionExecutionWrapper.d.cts +3 -3
  277. package/dist/functions/FunctionExecutionWrapper.d.mts +3 -3
  278. package/dist/functions/FunctionExecutionWrapper.mjs +5 -5
  279. package/dist/functions/TestFunctionAdaptor.cjs +7 -7
  280. package/dist/functions/TestFunctionAdaptor.cjs.map +1 -1
  281. package/dist/functions/TestFunctionAdaptor.d.cts +3 -3
  282. package/dist/functions/TestFunctionAdaptor.d.cts.map +1 -1
  283. package/dist/functions/TestFunctionAdaptor.d.mts +3 -3
  284. package/dist/functions/TestFunctionAdaptor.d.mts.map +1 -1
  285. package/dist/functions/TestFunctionAdaptor.mjs +7 -7
  286. package/dist/functions/TestFunctionAdaptor.mjs.map +1 -1
  287. package/dist/functions/index.cjs +5 -5
  288. package/dist/functions/index.d.cts +5 -5
  289. package/dist/functions/index.d.mts +5 -5
  290. package/dist/functions/index.mjs +5 -5
  291. package/dist/{functions-fTid0RMK.cjs → functions-CUEv5NC3.cjs} +2 -2
  292. package/dist/functions-CUEv5NC3.cjs.map +1 -0
  293. package/dist/functions-DOYBrb7n.mjs +8 -0
  294. package/dist/functions-DOYBrb7n.mjs.map +1 -0
  295. package/dist/{helpers-ByRTDO_m.mjs → helpers-Dl1eszfi.mjs} +2 -2
  296. package/dist/helpers-Dl1eszfi.mjs.map +1 -0
  297. package/dist/{helpers-BcP1tXAi.cjs → helpers-QMdfdnvU.cjs} +2 -2
  298. package/dist/helpers-QMdfdnvU.cjs.map +1 -0
  299. package/dist/{index-BWzGIj06.d.mts → index-BrGtMlpJ.d.cts} +2 -2
  300. package/dist/index-BrGtMlpJ.d.cts.map +1 -0
  301. package/dist/index-Dn4py3Db.d.mts +12 -0
  302. package/dist/index-Dn4py3Db.d.mts.map +1 -0
  303. package/dist/index.cjs +4 -2
  304. package/dist/index.d.cts +3 -2
  305. package/dist/index.d.mts +4 -3
  306. package/dist/index.mjs +2 -2
  307. package/dist/lazyAccessors-B-Jgkg2o.mjs +175 -0
  308. package/dist/lazyAccessors-B-Jgkg2o.mjs.map +1 -0
  309. package/dist/lazyAccessors-B8Hhras9.cjs +223 -0
  310. package/dist/lazyAccessors-B8Hhras9.cjs.map +1 -0
  311. package/dist/lazyAccessors-BFAj2-S4.d.mts +49 -0
  312. package/dist/lazyAccessors-BFAj2-S4.d.mts.map +1 -0
  313. package/dist/lazyAccessors-knaNZuTN.d.cts +49 -0
  314. package/dist/lazyAccessors-knaNZuTN.d.cts.map +1 -0
  315. package/dist/{parseHonoQuery-CZC5_97v.cjs → parseHonoQuery-D4MhxTRc.cjs} +1 -1
  316. package/dist/parseHonoQuery-D4MhxTRc.cjs.map +1 -0
  317. package/dist/{parseHonoQuery-DDgIkTO4.mjs → parseHonoQuery-DpK3sGPc.mjs} +1 -1
  318. package/dist/parseHonoQuery-DpK3sGPc.mjs.map +1 -0
  319. package/dist/parseQueryParams-BSNkjmZ9.cjs.map +1 -1
  320. package/dist/parseQueryParams-UMTRnRrW.mjs.map +1 -1
  321. package/dist/processAudits-CzHkPokQ.cjs.map +1 -1
  322. package/dist/processAudits-Dj8UGqcW.mjs.map +1 -1
  323. package/dist/publisher-BXG9YiRi.d.mts +16 -0
  324. package/dist/publisher-BXG9YiRi.d.mts.map +1 -0
  325. package/dist/publisher-Bw4770Hi.mjs.map +1 -1
  326. package/dist/publisher-D9ngDXg3.d.cts +16 -0
  327. package/dist/publisher-D9ngDXg3.d.cts.map +1 -0
  328. package/dist/publisher-lFQleddL.cjs.map +1 -1
  329. package/dist/publisher.d.cts +3 -16
  330. package/dist/publisher.d.mts +3 -16
  331. package/dist/{rls-CmJ7bRsz.cjs → rls-BrywnrQb.cjs} +1 -1
  332. package/dist/{rls-CmJ7bRsz.cjs.map → rls-BrywnrQb.cjs.map} +1 -1
  333. package/dist/{rls-Bf3FRwto.mjs → rls-C0cWOnk4.mjs} +1 -1
  334. package/dist/{rls-Bf3FRwto.mjs.map → rls-C0cWOnk4.mjs.map} +1 -1
  335. package/dist/subscribers/AWSLambdaSubscriberAdaptor.cjs +1 -1
  336. package/dist/subscribers/AWSLambdaSubscriberAdaptor.d.cts +3 -3
  337. package/dist/subscribers/AWSLambdaSubscriberAdaptor.d.mts +3 -3
  338. package/dist/subscribers/AWSLambdaSubscriberAdaptor.mjs +1 -1
  339. package/dist/subscribers/Subscriber.cjs +2 -2
  340. package/dist/subscribers/Subscriber.d.cts +2 -2
  341. package/dist/subscribers/Subscriber.d.mts +2 -2
  342. package/dist/subscribers/Subscriber.mjs +2 -2
  343. package/dist/subscribers/SubscriberBuilder.cjs +3 -3
  344. package/dist/subscribers/SubscriberBuilder.d.cts +3 -3
  345. package/dist/subscribers/SubscriberBuilder.d.mts +3 -3
  346. package/dist/subscribers/SubscriberBuilder.mjs +3 -3
  347. package/dist/subscribers/index.cjs +3 -3
  348. package/dist/subscribers/index.cjs.map +1 -1
  349. package/dist/subscribers/index.d.cts +5 -5
  350. package/dist/subscribers/index.d.cts.map +1 -1
  351. package/dist/subscribers/index.d.mts +5 -5
  352. package/dist/subscribers/index.d.mts.map +1 -1
  353. package/dist/subscribers/index.mjs +3 -3
  354. package/dist/subscribers/index.mjs.map +1 -1
  355. package/dist/telemetry-BTaiRqPo.d.cts +95 -0
  356. package/dist/telemetry-BTaiRqPo.d.cts.map +1 -0
  357. package/dist/telemetry-yAHf5yDs.d.mts +95 -0
  358. package/dist/telemetry-yAHf5yDs.d.mts.map +1 -0
  359. package/dist/telemetry.cjs +0 -0
  360. package/dist/telemetry.d.cts +2 -0
  361. package/dist/telemetry.d.mts +2 -0
  362. package/dist/telemetry.mjs +0 -0
  363. package/dist/types-B5H3piDg.d.cts.map +1 -1
  364. package/dist/{types-DdIlpxAd.d.mts → types-Dw-iLd3Y.d.mts} +1 -1
  365. package/dist/types-Dw-iLd3Y.d.mts.map +1 -0
  366. package/dist/types.d.mts +1 -1
  367. package/package.json +19 -18
  368. package/src/Construct.ts +189 -94
  369. package/src/__benchmarks__/build-time-optimization.bench.ts +274 -0
  370. package/src/__benchmarks__/endpoint.bench.ts +375 -375
  371. package/src/__benchmarks__/fixtures.ts +241 -0
  372. package/src/__benchmarks__/hono-adaptor.bench.ts +533 -0
  373. package/src/__benchmarks__/hono-server.bench.ts +206 -206
  374. package/src/__benchmarks__/response-builder.bench.ts +428 -0
  375. package/src/__benchmarks__/strategies/strategy-a-lazy-services.ts +319 -0
  376. package/src/__benchmarks__/strategies/strategy-c-middleware.ts +530 -0
  377. package/src/__benchmarks__/strategies/strategy-d-opt-in-events.ts +567 -0
  378. package/src/__tests__/Construct.environment.spec.ts +572 -351
  379. package/src/__tests__/publisher.setting.spec.ts +496 -499
  380. package/src/__tests__/publisher.spec.ts +440 -442
  381. package/src/crons/Cron.ts +117 -117
  382. package/src/crons/CronBuilder.ts +237 -238
  383. package/src/crons/__tests__/Cron.spec.ts +448 -448
  384. package/src/crons/__tests__/CronBuilder.state-isolation.spec.ts +214 -214
  385. package/src/crons/index.ts +4 -4
  386. package/src/endpoints/AmazonApiGatewayEndpointAdaptor.ts +571 -463
  387. package/src/endpoints/AmazonApiGatewayV1EndpointAdaptor.ts +86 -86
  388. package/src/endpoints/AmazonApiGatewayV2EndpointAdaptor.ts +85 -81
  389. package/src/endpoints/Authorizer.ts +100 -100
  390. package/src/endpoints/Endpoint.ts +921 -937
  391. package/src/endpoints/EndpointBuilder.ts +703 -703
  392. package/src/endpoints/EndpointFactory.ts +1054 -1056
  393. package/src/endpoints/HonoEndpointAdaptor.ts +621 -548
  394. package/src/endpoints/TestEndpointAdaptor.ts +372 -353
  395. package/src/endpoints/__tests__/AmazonApiGatewayV1EndpointAdaptor.audits.spec.ts +499 -0
  396. package/src/endpoints/__tests__/AmazonApiGatewayV1EndpointAdaptor.events.spec.ts +541 -542
  397. package/src/endpoints/__tests__/AmazonApiGatewayV1EndpointAdaptor.spec.ts +1172 -1174
  398. package/src/endpoints/__tests__/AmazonApiGatewayV2EndpointAdaptor.audits.spec.ts +599 -599
  399. package/src/endpoints/__tests__/AmazonApiGatewayV2EndpointAdaptor.events.spec.ts +710 -710
  400. package/src/endpoints/__tests__/AmazonApiGatewayV2EndpointAdaptor.factory-publisher.spec.ts +280 -280
  401. package/src/endpoints/__tests__/AmazonApiGatewayV2EndpointAdaptor.kysely-audit.integration.spec.ts +924 -730
  402. package/src/endpoints/__tests__/AmazonApiGatewayV2EndpointAdaptor.rls.spec.ts +307 -0
  403. package/src/endpoints/__tests__/AmazonApiGatewayV2EndpointAdaptor.spec.ts +408 -408
  404. package/src/endpoints/__tests__/Authorizer.spec.ts +286 -0
  405. package/src/endpoints/__tests__/Endpoint.cookies.spec.ts +162 -162
  406. package/src/endpoints/__tests__/Endpoint.headers.spec.ts +182 -182
  407. package/src/endpoints/__tests__/Endpoint.manifest.spec.ts +158 -159
  408. package/src/endpoints/__tests__/Endpoint.spec.ts +819 -822
  409. package/src/endpoints/__tests__/EndpointBuilder.audit.spec.ts +427 -427
  410. package/src/endpoints/__tests__/EndpointBuilder.spec.ts +478 -478
  411. package/src/endpoints/__tests__/EndpointFactory.authorizers.spec.ts +426 -426
  412. package/src/endpoints/__tests__/EndpointFactory.database-auditor.spec.ts +501 -501
  413. package/src/endpoints/__tests__/EndpointFactory.reference-audit.spec.ts +177 -177
  414. package/src/endpoints/__tests__/EndpointFactory.spec.ts +471 -471
  415. package/src/endpoints/__tests__/EndpointFactory.state-isolation.spec.ts +143 -143
  416. package/src/endpoints/__tests__/HonoEndpointAdaptor.audit-transactions.spec.ts +663 -663
  417. package/src/endpoints/__tests__/HonoEndpointAdaptor.audits.spec.ts +608 -608
  418. package/src/endpoints/__tests__/HonoEndpointAdaptor.events.spec.ts +561 -561
  419. package/src/endpoints/__tests__/HonoEndpointAdaptor.kysely-audit.integration.spec.ts +1036 -1036
  420. package/src/endpoints/__tests__/HonoEndpointAdaptor.openapi.spec.ts +278 -278
  421. package/src/endpoints/__tests__/HonoEndpointAdaptor.spec.ts +1093 -1093
  422. package/src/endpoints/__tests__/ResponseBuilder.spec.ts +230 -230
  423. package/src/endpoints/__tests__/TestEndpointAdaptor.audits.spec.ts +569 -569
  424. package/src/endpoints/__tests__/TestEndpointAdaptor.spec.ts +841 -841
  425. package/src/endpoints/__tests__/endpoint-types.test.ts +68 -68
  426. package/src/endpoints/__tests__/helpers.spec.ts +37 -0
  427. package/src/endpoints/__tests__/lazyAccessors.spec.ts +330 -0
  428. package/src/endpoints/__tests__/processAudits.spec.ts +631 -0
  429. package/src/endpoints/audit.ts +51 -51
  430. package/src/endpoints/helpers.ts +34 -35
  431. package/src/endpoints/index.ts +33 -23
  432. package/src/endpoints/lazyAccessors.ts +241 -0
  433. package/src/endpoints/parseHonoQuery.ts +41 -41
  434. package/src/endpoints/parseQueryParams.ts +36 -36
  435. package/src/endpoints/processAudits.ts +269 -269
  436. package/src/endpoints/rls.ts +16 -16
  437. package/src/functions/AWSLambdaFunction.ts +294 -295
  438. package/src/functions/BaseFunctionBuilder.ts +185 -185
  439. package/src/functions/Function.ts +190 -190
  440. package/src/functions/FunctionBuilder.ts +353 -353
  441. package/src/functions/FunctionExecutionWrapper.ts +199 -200
  442. package/src/functions/TestFunctionAdaptor.ts +191 -191
  443. package/src/functions/__tests__/AWSLambdaFunctionAdaptor.spec.ts +424 -424
  444. package/src/functions/__tests__/Function.audits.spec.ts +364 -364
  445. package/src/functions/__tests__/Function.spec.ts +453 -454
  446. package/src/functions/__tests__/FunctionBuilder.state-isolation.spec.ts +179 -179
  447. package/src/functions/__tests__/TestFunctionAdaptor.spec.ts +369 -369
  448. package/src/functions/index.ts +6 -4
  449. package/src/index.ts +18 -9
  450. package/src/publisher.ts +71 -72
  451. package/src/subscribers/AWSLambdaSubscriberAdaptor.ts +244 -248
  452. package/src/subscribers/Subscriber.ts +82 -84
  453. package/src/subscribers/SubscriberBuilder.ts +129 -129
  454. package/src/subscribers/__tests__/AWSLambdaSubscriberAdaptor.spec.ts +589 -589
  455. package/src/subscribers/__tests__/Subscriber.spec.ts +402 -404
  456. package/src/subscribers/index.ts +1 -0
  457. package/src/telemetry.ts +103 -0
  458. package/src/types.ts +7 -7
  459. package/test.ts +58 -59
  460. package/dist/AWSLambdaFunction-BdebRMUh.d.mts.map +0 -1
  461. package/dist/AWSLambdaFunction-C54a1doJ.mjs.map +0 -1
  462. package/dist/AWSLambdaFunction-D0tEOTXQ.d.cts.map +0 -1
  463. package/dist/AWSLambdaFunction-EPGY4s7i.cjs.map +0 -1
  464. package/dist/AWSLambdaSubscriberAdaptor-BLfO612H.mjs.map +0 -1
  465. package/dist/AWSLambdaSubscriberAdaptor-BNcYYZ-P.cjs.map +0 -1
  466. package/dist/AWSLambdaSubscriberAdaptor-DrFAvHOp.d.mts.map +0 -1
  467. package/dist/AWSLambdaSubscriberAdaptor-Dx-Ks1Jp.d.cts.map +0 -1
  468. package/dist/AmazonApiGatewayEndpointAdaptor-BT9JXihC.mjs.map +0 -1
  469. package/dist/AmazonApiGatewayEndpointAdaptor-DLqnYQ4E.d.cts.map +0 -1
  470. package/dist/AmazonApiGatewayEndpointAdaptor-DNFvvdmW.cjs.map +0 -1
  471. package/dist/AmazonApiGatewayEndpointAdaptor-qlKXuZxy.d.mts.map +0 -1
  472. package/dist/AmazonApiGatewayV1EndpointAdaptor-CShQI8Gk.mjs.map +0 -1
  473. package/dist/AmazonApiGatewayV1EndpointAdaptor-Ccl8B8kG.cjs.map +0 -1
  474. package/dist/AmazonApiGatewayV1EndpointAdaptor-DyUaJIhB.d.cts.map +0 -1
  475. package/dist/AmazonApiGatewayV1EndpointAdaptor-MRUxs3Xi.d.mts.map +0 -1
  476. package/dist/AmazonApiGatewayV2EndpointAdaptor-D8-0Aab4.cjs.map +0 -1
  477. package/dist/AmazonApiGatewayV2EndpointAdaptor-M1-w0U5R.d.cts.map +0 -1
  478. package/dist/AmazonApiGatewayV2EndpointAdaptor-fuLM6M9k.d.mts.map +0 -1
  479. package/dist/AmazonApiGatewayV2EndpointAdaptor-lCRzGE4q.mjs.map +0 -1
  480. package/dist/Authorizer-B-btowNd.d.cts.map +0 -1
  481. package/dist/Authorizer-C0ge_tc8.cjs.map +0 -1
  482. package/dist/Authorizer-gWxYsGEp.d.mts.map +0 -1
  483. package/dist/Authorizer-r9U3y_ms.mjs.map +0 -1
  484. package/dist/BaseFunctionBuilder-BAtutR6q.d.cts.map +0 -1
  485. package/dist/BaseFunctionBuilder-Czi1Jwza.mjs.map +0 -1
  486. package/dist/BaseFunctionBuilder-DRY419e7.d.mts.map +0 -1
  487. package/dist/BaseFunctionBuilder-MYG3C9ug.cjs.map +0 -1
  488. package/dist/Construct-Ba5cMxib.cjs.map +0 -1
  489. package/dist/Construct-C4rPE67v.d.cts.map +0 -1
  490. package/dist/Construct-DdyGHuag.mjs +0 -78
  491. package/dist/Construct-DdyGHuag.mjs.map +0 -1
  492. package/dist/Construct-XrijZFFh.d.mts.map +0 -1
  493. package/dist/Cron-BxhGs5up.mjs.map +0 -1
  494. package/dist/Cron-CGF4YAfM.cjs.map +0 -1
  495. package/dist/Cron-bDLcTvV5.d.cts.map +0 -1
  496. package/dist/Cron-cdjlSKNp.d.mts.map +0 -1
  497. package/dist/CronBuilder-BC4m5-p1.d.mts.map +0 -1
  498. package/dist/CronBuilder-CcxKRtVP.cjs.map +0 -1
  499. package/dist/CronBuilder-DKVXyE0Q.d.cts.map +0 -1
  500. package/dist/CronBuilder-d2jh-IB2.mjs.map +0 -1
  501. package/dist/Endpoint-BPv9_-m_.d.cts.map +0 -1
  502. package/dist/Endpoint-BVGZXFyV.cjs.map +0 -1
  503. package/dist/Endpoint-BdwG75G_.d.mts.map +0 -1
  504. package/dist/Endpoint-CuOEswxJ.mjs.map +0 -1
  505. package/dist/EndpointBuilder-B3az942t.d.mts.map +0 -1
  506. package/dist/EndpointBuilder-Cgj1P_ra.cjs.map +0 -1
  507. package/dist/EndpointBuilder-DIy_m1bu.d.cts.map +0 -1
  508. package/dist/EndpointBuilder-DnCB1h1j.mjs.map +0 -1
  509. package/dist/EndpointFactory-C-0nE6Jg.d.mts.map +0 -1
  510. package/dist/EndpointFactory-CYj6BYok.cjs.map +0 -1
  511. package/dist/EndpointFactory-CbdxPCIH.mjs.map +0 -1
  512. package/dist/EndpointFactory-CyPbm3AD.d.cts.map +0 -1
  513. package/dist/Function-BVHqIDp9.mjs.map +0 -1
  514. package/dist/Function-Cf7f_kCz.d.cts.map +0 -1
  515. package/dist/Function-DDZb1525.cjs.map +0 -1
  516. package/dist/Function-DN2G6OT5.d.mts.map +0 -1
  517. package/dist/FunctionBuilder-CJBzzXL3.d.cts.map +0 -1
  518. package/dist/FunctionBuilder-CrDYgfiI.mjs.map +0 -1
  519. package/dist/FunctionBuilder-Cxx8D2na.d.mts.map +0 -1
  520. package/dist/FunctionBuilder-DswJ-9sD.cjs.map +0 -1
  521. package/dist/FunctionExecutionWrapper-BYI2bGTL.cjs.map +0 -1
  522. package/dist/FunctionExecutionWrapper-CLDh7Z2_.mjs.map +0 -1
  523. package/dist/FunctionExecutionWrapper-DF260Aaj.d.mts.map +0 -1
  524. package/dist/FunctionExecutionWrapper-Qy8bmgFR.d.cts.map +0 -1
  525. package/dist/HonoEndpointAdaptor-BaPlUhz0.d.mts.map +0 -1
  526. package/dist/HonoEndpointAdaptor-CQe2FqMR.cjs +0 -234
  527. package/dist/HonoEndpointAdaptor-CQe2FqMR.cjs.map +0 -1
  528. package/dist/HonoEndpointAdaptor-Ce-2HBxn.mjs +0 -228
  529. package/dist/HonoEndpointAdaptor-Ce-2HBxn.mjs.map +0 -1
  530. package/dist/HonoEndpointAdaptor-YcRHYALH.d.cts.map +0 -1
  531. package/dist/Subscriber-BiHjVXtM.cjs.map +0 -1
  532. package/dist/Subscriber-BmPf9GFb.mjs.map +0 -1
  533. package/dist/Subscriber-COYMSevD.d.cts.map +0 -1
  534. package/dist/Subscriber-ikctpU3I.d.mts.map +0 -1
  535. package/dist/SubscriberBuilder-Cp1C-xtT.cjs.map +0 -1
  536. package/dist/SubscriberBuilder-DJPEeYDJ.mjs.map +0 -1
  537. package/dist/SubscriberBuilder-D_9zzllj.d.mts.map +0 -1
  538. package/dist/SubscriberBuilder-ivHAGIVi.d.cts.map +0 -1
  539. package/dist/TestEndpointAdaptor-C10xBI--.d.cts.map +0 -1
  540. package/dist/TestEndpointAdaptor-DB7bREhS.d.mts.map +0 -1
  541. package/dist/TestEndpointAdaptor-DJgik2Wj.mjs +0 -160
  542. package/dist/TestEndpointAdaptor-DJgik2Wj.mjs.map +0 -1
  543. package/dist/TestEndpointAdaptor-ZbtQiuHE.cjs +0 -166
  544. package/dist/TestEndpointAdaptor-ZbtQiuHE.cjs.map +0 -1
  545. package/dist/functions-C6EK1xL6.mjs +0 -8
  546. package/dist/functions-C6EK1xL6.mjs.map +0 -1
  547. package/dist/functions-fTid0RMK.cjs.map +0 -1
  548. package/dist/helpers-BcP1tXAi.cjs.map +0 -1
  549. package/dist/helpers-ByRTDO_m.mjs.map +0 -1
  550. package/dist/index-BWzGIj06.d.mts.map +0 -1
  551. package/dist/index-DmFozqLd.d.cts +0 -12
  552. package/dist/index-DmFozqLd.d.cts.map +0 -1
  553. package/dist/parseHonoQuery-CZC5_97v.cjs.map +0 -1
  554. package/dist/parseHonoQuery-DDgIkTO4.mjs.map +0 -1
  555. package/dist/publisher.d.cts.map +0 -1
  556. package/dist/publisher.d.mts.map +0 -1
  557. package/dist/types-DdIlpxAd.d.mts.map +0 -1
@@ -1 +1 @@
1
- {"version":3,"file":"processAudits-CzHkPokQ.cjs","names":["endpoint: Endpoint<\n any,\n any,\n any,\n OutSchema,\n TServices,\n TLogger,\n TSession,\n any,\n any,\n TAuditStorage,\n TAuditStorageServiceName,\n TAuditAction\n >","response: InferStandardSchema<OutSchema>","serviceDiscovery: ServiceDiscovery<any, any>","logger: TLogger","ctx: {\n session: TSession;\n header: HeaderFn;\n cookie: CookieFn;\n services: Record<string, unknown>;\n }","existingAuditor?: Auditor<TAuditAction>","auditor: Auditor<TAuditAction>","actor: AuditActor","DefaultAuditor","endpoint: Endpoint<\n any,\n any,\n any,\n any,\n TServices,\n TLogger,\n TSession,\n any,\n any,\n TAuditStorage,\n TAuditStorageServiceName,\n TAuditAction,\n TDatabase,\n TDatabaseServiceName\n >","auditContext: AuditExecutionContext<TAuditAction> | undefined","handler: (auditor?: Auditor<TAuditAction>) => Promise<T>","onComplete?: (response: T, auditor: Auditor<TAuditAction>) => Promise<void>","options?: ExecuteWithAuditTransactionOptions","response"],"sources":["../src/endpoints/processAudits.ts"],"sourcesContent":["import type {\n AuditActor,\n AuditStorage,\n AuditableAction,\n Auditor,\n} from '@geekmidas/audit';\nimport { DefaultAuditor } from '@geekmidas/audit';\nimport type { Logger } from '@geekmidas/logger';\nimport type { InferStandardSchema } from '@geekmidas/schema';\nimport type { Service, ServiceDiscovery } from '@geekmidas/services';\nimport type { StandardSchemaV1 } from '@standard-schema/spec';\nimport type { CookieFn, Endpoint, HeaderFn } from './Endpoint';\nimport type { ActorExtractor, MappedAudit } from './audit';\n\n/**\n * Process declarative audit definitions after successful endpoint execution.\n * Similar to publishConstructEvents for events.\n *\n * @param endpoint - The endpoint with audit configuration\n * @param response - The handler response to generate audit payloads from\n * @param serviceDiscovery - Service discovery for registering audit storage\n * @param logger - Logger for debug/error messages\n * @param ctx - Request context (session, headers, cookies, services)\n * @param existingAuditor - Optional existing auditor instance (e.g., from handler context).\n * If provided, uses this auditor (with its stored transaction).\n * If not provided, creates a new auditor.\n */\nexport async function processEndpointAudits<\n TServices extends Service[] = [],\n TSession = unknown,\n TLogger extends Logger = Logger,\n OutSchema extends StandardSchemaV1 | undefined = undefined,\n TAuditStorage extends AuditStorage | undefined = undefined,\n TAuditStorageServiceName extends string = string,\n TAuditAction extends AuditableAction<string, unknown> = AuditableAction<\n string,\n unknown\n >,\n>(\n endpoint: Endpoint<\n any,\n any,\n any,\n OutSchema,\n TServices,\n TLogger,\n TSession,\n any,\n any,\n TAuditStorage,\n TAuditStorageServiceName,\n TAuditAction\n >,\n response: InferStandardSchema<OutSchema>,\n serviceDiscovery: ServiceDiscovery<any, any>,\n logger: TLogger,\n ctx: {\n session: TSession;\n header: HeaderFn;\n cookie: CookieFn;\n services: Record<string, unknown>;\n },\n existingAuditor?: Auditor<TAuditAction>,\n): Promise<void> {\n try {\n const audits = endpoint.audits as MappedAudit<TAuditAction, OutSchema>[];\n\n // If we have an existing auditor (from handler context), we need to flush\n // any manual audits it collected, even if there are no declarative audits\n const hasExistingRecords =\n existingAuditor && existingAuditor.getRecords().length > 0;\n\n // Skip if no declarative audits and no existing records to flush\n if (!audits?.length && !hasExistingRecords) {\n logger.debug('No audits to process');\n return;\n }\n\n // If no auditor storage service and we have things to process, warn\n if (!endpoint.auditorStorageService) {\n if (hasExistingRecords || audits?.length) {\n logger.warn('No auditor storage service available');\n }\n return;\n }\n\n // Get or create auditor\n let auditor: Auditor<TAuditAction>;\n\n if (existingAuditor) {\n // Use existing auditor (preserves stored transaction and manual audits)\n auditor = existingAuditor;\n logger.debug('Using existing auditor from handler context');\n } else {\n // Create new auditor (backward compatibility)\n const services = await serviceDiscovery.register([\n endpoint.auditorStorageService,\n ]);\n const storage = services[\n endpoint.auditorStorageService.serviceName\n ] as AuditStorage;\n\n // Extract actor if configured\n let actor: AuditActor = { id: 'system', type: 'system' };\n if (endpoint.actorExtractor) {\n try {\n actor = await (\n endpoint.actorExtractor as ActorExtractor<\n TServices,\n TSession,\n TLogger\n >\n )({\n services: ctx.services as any,\n session: ctx.session,\n header: ctx.header,\n cookie: ctx.cookie,\n logger,\n });\n } catch (error) {\n logger.error(error as Error, 'Failed to extract actor for audits');\n // Continue with system actor\n }\n }\n\n auditor = new DefaultAuditor<TAuditAction>({\n actor,\n storage,\n metadata: {\n endpoint: endpoint.route,\n method: endpoint.method,\n },\n });\n }\n\n // Process each declarative audit\n if (audits?.length) {\n for (const audit of audits) {\n logger.debug({ audit: audit.type }, 'Processing declarative audit');\n\n // Check when condition\n if (audit.when && !audit.when(response as any)) {\n logger.debug(\n { audit: audit.type },\n 'Audit skipped due to when condition',\n );\n continue;\n }\n\n // Extract payload\n const payload = audit.payload(response as any);\n\n // Extract entityId if configured\n const entityId = audit.entityId?.(response as any);\n\n // Record the audit\n auditor.audit(audit.type as any, payload as any, {\n table: audit.table,\n entityId,\n });\n }\n }\n\n // Flush audits to storage\n // Note: If existingAuditor has a stored transaction (via setTransaction),\n // flush() will use it automatically\n const recordCount = auditor.getRecords().length;\n if (recordCount > 0) {\n // Check if auditor has a stored transaction (for logging purposes)\n const trx =\n 'getTransaction' in auditor\n ? (auditor as { getTransaction(): unknown }).getTransaction()\n : undefined;\n logger.debug(\n { auditCount: recordCount, hasTransaction: !!trx },\n 'Flushing audits',\n );\n await auditor.flush();\n }\n } catch (error) {\n logger.error(error as Error, 'Failed to process audits');\n // Don't rethrow - audit failures shouldn't fail the request\n }\n}\n\n/**\n * Context for audit-aware handler execution.\n */\nexport interface AuditExecutionContext<\n TAuditAction extends AuditableAction<string, unknown> = AuditableAction<\n string,\n unknown\n >,\n> {\n /** The auditor instance for recording audits */\n auditor: Auditor<TAuditAction>;\n /** The audit storage instance */\n storage: AuditStorage;\n}\n\n/**\n * Create audit context for handler execution.\n * Returns the auditor and storage for use in the handler.\n *\n * @param endpoint - The endpoint with audit configuration\n * @param serviceDiscovery - Service discovery for getting audit storage\n * @param logger - Logger for debug/error messages\n * @param ctx - Request context for actor extraction\n * @returns Audit context with auditor and storage, or undefined if not configured\n */\nexport async function createAuditContext<\n TServices extends Service[] = [],\n TSession = unknown,\n TLogger extends Logger = Logger,\n TAuditStorage extends AuditStorage | undefined = undefined,\n TAuditStorageServiceName extends string = string,\n TAuditAction extends AuditableAction<string, unknown> = AuditableAction<\n string,\n unknown\n >,\n TDatabase = undefined,\n TDatabaseServiceName extends string = string,\n>(\n endpoint: Endpoint<\n any,\n any,\n any,\n any,\n TServices,\n TLogger,\n TSession,\n any,\n any,\n TAuditStorage,\n TAuditStorageServiceName,\n TAuditAction,\n TDatabase,\n TDatabaseServiceName\n >,\n serviceDiscovery: ServiceDiscovery<any, any>,\n logger: TLogger,\n ctx: {\n session: TSession;\n header: HeaderFn;\n cookie: CookieFn;\n services: Record<string, unknown>;\n },\n): Promise<AuditExecutionContext<TAuditAction> | undefined> {\n if (!endpoint.auditorStorageService) {\n return undefined;\n }\n\n const services = await serviceDiscovery.register([\n endpoint.auditorStorageService,\n ]);\n const storage = services[\n endpoint.auditorStorageService.serviceName\n ] as AuditStorage;\n\n // Extract actor if configured\n let actor: AuditActor = { id: 'system', type: 'system' };\n if (endpoint.actorExtractor) {\n try {\n actor = await (\n endpoint.actorExtractor as ActorExtractor<TServices, TSession, TLogger>\n )({\n services: ctx.services as any,\n session: ctx.session,\n header: ctx.header,\n cookie: ctx.cookie,\n logger,\n });\n } catch (error) {\n logger.error(error as Error, 'Failed to extract actor for audits');\n }\n }\n\n const auditor = new DefaultAuditor<TAuditAction>({\n actor,\n storage,\n metadata: {\n endpoint: endpoint.route,\n method: endpoint.method,\n },\n });\n\n return { auditor, storage };\n}\n\n/**\n * Options for executeWithAuditTransaction.\n */\nexport interface ExecuteWithAuditTransactionOptions {\n /**\n * Database connection to use for the transaction.\n * If this is already a transaction, it will be reused instead of creating a nested one.\n * If not provided, the storage's internal database is used.\n */\n db?: unknown;\n}\n\n/**\n * Execute a handler with automatic audit transaction support.\n * If the audit storage provides a withTransaction method, wraps execution\n * in a transaction so audits are atomic with handler's database operations.\n *\n * This is database-agnostic - each storage implementation provides its own\n * transaction handling based on the underlying database (Kysely, Drizzle, etc.).\n *\n * If the db parameter is provided and is already a transaction, the storage\n * will reuse it instead of creating a nested transaction (similar to\n * packages/db/src/kysely.ts#withTransaction).\n *\n * @param auditContext - The audit context from createAuditContext\n * @param handler - The handler function to execute (receives auditor)\n * @param onComplete - Called after handler with response, to process declarative audits\n * @param options - Optional configuration including database connection\n * @returns The handler result\n */\nexport async function executeWithAuditTransaction<\n T,\n TAuditAction extends AuditableAction<string, unknown> = AuditableAction<\n string,\n unknown\n >,\n>(\n auditContext: AuditExecutionContext<TAuditAction> | undefined,\n handler: (auditor?: Auditor<TAuditAction>) => Promise<T>,\n onComplete?: (response: T, auditor: Auditor<TAuditAction>) => Promise<void>,\n options?: ExecuteWithAuditTransactionOptions,\n): Promise<T> {\n // No audit context - just run handler\n if (!auditContext) {\n return handler(undefined);\n }\n\n const { auditor, storage } = auditContext;\n\n // Check if storage provides a transaction wrapper\n if (storage.withTransaction) {\n // Wrap in transaction - audits are atomic with handler operations\n // The storage's withTransaction handles setTransaction and flush\n // Pass db so existing transactions are reused\n return storage.withTransaction(\n auditor,\n async () => {\n const response = await handler(auditor);\n\n // Process declarative audits within the transaction\n if (onComplete) {\n await onComplete(response, auditor);\n }\n\n return response;\n },\n options?.db,\n );\n }\n\n // No transaction support - run handler and flush audits after\n const response = await handler(auditor);\n\n if (onComplete) {\n await onComplete(response, auditor);\n }\n\n // Flush audits (no transaction)\n await auditor.flush();\n\n return response;\n}\n"],"mappings":";;;;;;;;;;;;;;;;;AA2BA,eAAsB,sBAYpBA,UAcAC,UACAC,kBACAC,QACAC,KAMAC,iBACe;AACf,KAAI;EACF,MAAM,SAAS,SAAS;EAIxB,MAAM,qBACJ,mBAAmB,gBAAgB,YAAY,CAAC,SAAS;AAG3D,OAAK,QAAQ,WAAW,oBAAoB;AAC1C,UAAO,MAAM,uBAAuB;AACpC;EACD;AAGD,OAAK,SAAS,uBAAuB;AACnC,OAAI,sBAAsB,QAAQ,OAChC,QAAO,KAAK,uCAAuC;AAErD;EACD;EAGD,IAAIC;AAEJ,MAAI,iBAAiB;AAEnB,aAAU;AACV,UAAO,MAAM,8CAA8C;EAC5D,OAAM;GAEL,MAAM,WAAW,MAAM,iBAAiB,SAAS,CAC/C,SAAS,qBACV,EAAC;GACF,MAAM,UAAU,SACd,SAAS,sBAAsB;GAIjC,IAAIC,QAAoB;IAAE,IAAI;IAAU,MAAM;GAAU;AACxD,OAAI,SAAS,eACX,KAAI;AACF,YAAQ,MAAM,AACZ,SAAS,eAKT;KACA,UAAU,IAAI;KACd,SAAS,IAAI;KACb,QAAQ,IAAI;KACZ,QAAQ,IAAI;KACZ;IACD,EAAC;GACH,SAAQ,OAAO;AACd,WAAO,MAAM,OAAgB,qCAAqC;GAEnE;AAGH,aAAU,IAAIC,iCAA6B;IACzC;IACA;IACA,UAAU;KACR,UAAU,SAAS;KACnB,QAAQ,SAAS;IAClB;GACF;EACF;AAGD,MAAI,QAAQ,OACV,MAAK,MAAM,SAAS,QAAQ;AAC1B,UAAO,MAAM,EAAE,OAAO,MAAM,KAAM,GAAE,+BAA+B;AAGnE,OAAI,MAAM,SAAS,MAAM,KAAK,SAAgB,EAAE;AAC9C,WAAO,MACL,EAAE,OAAO,MAAM,KAAM,GACrB,sCACD;AACD;GACD;GAGD,MAAM,UAAU,MAAM,QAAQ,SAAgB;GAG9C,MAAM,WAAW,MAAM,WAAW,SAAgB;AAGlD,WAAQ,MAAM,MAAM,MAAa,SAAgB;IAC/C,OAAO,MAAM;IACb;GACD,EAAC;EACH;EAMH,MAAM,cAAc,QAAQ,YAAY,CAAC;AACzC,MAAI,cAAc,GAAG;GAEnB,MAAM,MACJ,oBAAoB,UAChB,AAAC,QAA0C,gBAAgB;AAEjE,UAAO,MACL;IAAE,YAAY;IAAa,kBAAkB;GAAK,GAClD,kBACD;AACD,SAAM,QAAQ,OAAO;EACtB;CACF,SAAQ,OAAO;AACd,SAAO,MAAM,OAAgB,2BAA2B;CAEzD;AACF;;;;;;;;;;;AA2BD,eAAsB,mBAapBC,UAgBAP,kBACAC,QACAC,KAM0D;AAC1D,MAAK,SAAS,sBACZ;CAGF,MAAM,WAAW,MAAM,iBAAiB,SAAS,CAC/C,SAAS,qBACV,EAAC;CACF,MAAM,UAAU,SACd,SAAS,sBAAsB;CAIjC,IAAIG,QAAoB;EAAE,IAAI;EAAU,MAAM;CAAU;AACxD,KAAI,SAAS,eACX,KAAI;AACF,UAAQ,MAAM,AACZ,SAAS,eACT;GACA,UAAU,IAAI;GACd,SAAS,IAAI;GACb,QAAQ,IAAI;GACZ,QAAQ,IAAI;GACZ;EACD,EAAC;CACH,SAAQ,OAAO;AACd,SAAO,MAAM,OAAgB,qCAAqC;CACnE;CAGH,MAAM,UAAU,IAAIC,iCAA6B;EAC/C;EACA;EACA,UAAU;GACR,UAAU,SAAS;GACnB,QAAQ,SAAS;EAClB;CACF;AAED,QAAO;EAAE;EAAS;CAAS;AAC5B;;;;;;;;;;;;;;;;;;;AAgCD,eAAsB,4BAOpBE,cACAC,SACAC,YACAC,SACY;AAEZ,MAAK,aACH,QAAO,eAAkB;CAG3B,MAAM,EAAE,SAAS,SAAS,GAAG;AAG7B,KAAI,QAAQ,gBAIV,QAAO,QAAQ,gBACb,SACA,YAAY;EACV,MAAMC,aAAW,MAAM,QAAQ,QAAQ;AAGvC,MAAI,WACF,OAAM,WAAWA,YAAU,QAAQ;AAGrC,SAAOA;CACR,GACD,SAAS,GACV;CAIH,MAAM,WAAW,MAAM,QAAQ,QAAQ;AAEvC,KAAI,WACF,OAAM,WAAW,UAAU,QAAQ;AAIrC,OAAM,QAAQ,OAAO;AAErB,QAAO;AACR"}
1
+ {"version":3,"file":"processAudits-CzHkPokQ.cjs","names":["endpoint: Endpoint<\n\t\tany,\n\t\tany,\n\t\tany,\n\t\tOutSchema,\n\t\tTServices,\n\t\tTLogger,\n\t\tTSession,\n\t\tany,\n\t\tany,\n\t\tTAuditStorage,\n\t\tTAuditStorageServiceName,\n\t\tTAuditAction\n\t>","response: InferStandardSchema<OutSchema>","serviceDiscovery: ServiceDiscovery<any, any>","logger: TLogger","ctx: {\n\t\tsession: TSession;\n\t\theader: HeaderFn;\n\t\tcookie: CookieFn;\n\t\tservices: Record<string, unknown>;\n\t}","existingAuditor?: Auditor<TAuditAction>","auditor: Auditor<TAuditAction>","actor: AuditActor","DefaultAuditor","endpoint: Endpoint<\n\t\tany,\n\t\tany,\n\t\tany,\n\t\tany,\n\t\tTServices,\n\t\tTLogger,\n\t\tTSession,\n\t\tany,\n\t\tany,\n\t\tTAuditStorage,\n\t\tTAuditStorageServiceName,\n\t\tTAuditAction,\n\t\tTDatabase,\n\t\tTDatabaseServiceName\n\t>","auditContext: AuditExecutionContext<TAuditAction> | undefined","handler: (auditor?: Auditor<TAuditAction>) => Promise<T>","onComplete?: (response: T, auditor: Auditor<TAuditAction>) => Promise<void>","options?: ExecuteWithAuditTransactionOptions","response"],"sources":["../src/endpoints/processAudits.ts"],"sourcesContent":["import type {\n\tAuditActor,\n\tAuditableAction,\n\tAuditor,\n\tAuditStorage,\n} from '@geekmidas/audit';\nimport { DefaultAuditor } from '@geekmidas/audit';\nimport type { Logger } from '@geekmidas/logger';\nimport type { InferStandardSchema } from '@geekmidas/schema';\nimport type { Service, ServiceDiscovery } from '@geekmidas/services';\nimport type { StandardSchemaV1 } from '@standard-schema/spec';\nimport type { ActorExtractor, MappedAudit } from './audit';\nimport type { CookieFn, Endpoint, HeaderFn } from './Endpoint';\n\n/**\n * Process declarative audit definitions after successful endpoint execution.\n * Similar to publishConstructEvents for events.\n *\n * @param endpoint - The endpoint with audit configuration\n * @param response - The handler response to generate audit payloads from\n * @param serviceDiscovery - Service discovery for registering audit storage\n * @param logger - Logger for debug/error messages\n * @param ctx - Request context (session, headers, cookies, services)\n * @param existingAuditor - Optional existing auditor instance (e.g., from handler context).\n * If provided, uses this auditor (with its stored transaction).\n * If not provided, creates a new auditor.\n */\nexport async function processEndpointAudits<\n\tTServices extends Service[] = [],\n\tTSession = unknown,\n\tTLogger extends Logger = Logger,\n\tOutSchema extends StandardSchemaV1 | undefined = undefined,\n\tTAuditStorage extends AuditStorage | undefined = undefined,\n\tTAuditStorageServiceName extends string = string,\n\tTAuditAction extends AuditableAction<string, unknown> = AuditableAction<\n\t\tstring,\n\t\tunknown\n\t>,\n>(\n\tendpoint: Endpoint<\n\t\tany,\n\t\tany,\n\t\tany,\n\t\tOutSchema,\n\t\tTServices,\n\t\tTLogger,\n\t\tTSession,\n\t\tany,\n\t\tany,\n\t\tTAuditStorage,\n\t\tTAuditStorageServiceName,\n\t\tTAuditAction\n\t>,\n\tresponse: InferStandardSchema<OutSchema>,\n\tserviceDiscovery: ServiceDiscovery<any, any>,\n\tlogger: TLogger,\n\tctx: {\n\t\tsession: TSession;\n\t\theader: HeaderFn;\n\t\tcookie: CookieFn;\n\t\tservices: Record<string, unknown>;\n\t},\n\texistingAuditor?: Auditor<TAuditAction>,\n): Promise<void> {\n\ttry {\n\t\tconst audits = endpoint.audits as MappedAudit<TAuditAction, OutSchema>[];\n\n\t\t// If we have an existing auditor (from handler context), we need to flush\n\t\t// any manual audits it collected, even if there are no declarative audits\n\t\tconst hasExistingRecords =\n\t\t\texistingAuditor && existingAuditor.getRecords().length > 0;\n\n\t\t// Skip if no declarative audits and no existing records to flush\n\t\tif (!audits?.length && !hasExistingRecords) {\n\t\t\tlogger.debug('No audits to process');\n\t\t\treturn;\n\t\t}\n\n\t\t// If no auditor storage service and we have things to process, warn\n\t\tif (!endpoint.auditorStorageService) {\n\t\t\tif (hasExistingRecords || audits?.length) {\n\t\t\t\tlogger.warn('No auditor storage service available');\n\t\t\t}\n\t\t\treturn;\n\t\t}\n\n\t\t// Get or create auditor\n\t\tlet auditor: Auditor<TAuditAction>;\n\n\t\tif (existingAuditor) {\n\t\t\t// Use existing auditor (preserves stored transaction and manual audits)\n\t\t\tauditor = existingAuditor;\n\t\t\tlogger.debug('Using existing auditor from handler context');\n\t\t} else {\n\t\t\t// Create new auditor (backward compatibility)\n\t\t\tconst services = await serviceDiscovery.register([\n\t\t\t\tendpoint.auditorStorageService,\n\t\t\t]);\n\t\t\tconst storage = services[\n\t\t\t\tendpoint.auditorStorageService.serviceName\n\t\t\t] as AuditStorage;\n\n\t\t\t// Extract actor if configured\n\t\t\tlet actor: AuditActor = { id: 'system', type: 'system' };\n\t\t\tif (endpoint.actorExtractor) {\n\t\t\t\ttry {\n\t\t\t\t\tactor = await (\n\t\t\t\t\t\tendpoint.actorExtractor as ActorExtractor<\n\t\t\t\t\t\t\tTServices,\n\t\t\t\t\t\t\tTSession,\n\t\t\t\t\t\t\tTLogger\n\t\t\t\t\t\t>\n\t\t\t\t\t)({\n\t\t\t\t\t\tservices: ctx.services as any,\n\t\t\t\t\t\tsession: ctx.session,\n\t\t\t\t\t\theader: ctx.header,\n\t\t\t\t\t\tcookie: ctx.cookie,\n\t\t\t\t\t\tlogger,\n\t\t\t\t\t});\n\t\t\t\t} catch (error) {\n\t\t\t\t\tlogger.error(error as Error, 'Failed to extract actor for audits');\n\t\t\t\t\t// Continue with system actor\n\t\t\t\t}\n\t\t\t}\n\n\t\t\tauditor = new DefaultAuditor<TAuditAction>({\n\t\t\t\tactor,\n\t\t\t\tstorage,\n\t\t\t\tmetadata: {\n\t\t\t\t\tendpoint: endpoint.route,\n\t\t\t\t\tmethod: endpoint.method,\n\t\t\t\t},\n\t\t\t});\n\t\t}\n\n\t\t// Process each declarative audit\n\t\tif (audits?.length) {\n\t\t\tfor (const audit of audits) {\n\t\t\t\tlogger.debug({ audit: audit.type }, 'Processing declarative audit');\n\n\t\t\t\t// Check when condition\n\t\t\t\tif (audit.when && !audit.when(response as any)) {\n\t\t\t\t\tlogger.debug(\n\t\t\t\t\t\t{ audit: audit.type },\n\t\t\t\t\t\t'Audit skipped due to when condition',\n\t\t\t\t\t);\n\t\t\t\t\tcontinue;\n\t\t\t\t}\n\n\t\t\t\t// Extract payload\n\t\t\t\tconst payload = audit.payload(response as any);\n\n\t\t\t\t// Extract entityId if configured\n\t\t\t\tconst entityId = audit.entityId?.(response as any);\n\n\t\t\t\t// Record the audit\n\t\t\t\tauditor.audit(audit.type as any, payload as any, {\n\t\t\t\t\ttable: audit.table,\n\t\t\t\t\tentityId,\n\t\t\t\t});\n\t\t\t}\n\t\t}\n\n\t\t// Flush audits to storage\n\t\t// Note: If existingAuditor has a stored transaction (via setTransaction),\n\t\t// flush() will use it automatically\n\t\tconst recordCount = auditor.getRecords().length;\n\t\tif (recordCount > 0) {\n\t\t\t// Check if auditor has a stored transaction (for logging purposes)\n\t\t\tconst trx =\n\t\t\t\t'getTransaction' in auditor\n\t\t\t\t\t? (auditor as { getTransaction(): unknown }).getTransaction()\n\t\t\t\t\t: undefined;\n\t\t\tlogger.debug(\n\t\t\t\t{ auditCount: recordCount, hasTransaction: !!trx },\n\t\t\t\t'Flushing audits',\n\t\t\t);\n\t\t\tawait auditor.flush();\n\t\t}\n\t} catch (error) {\n\t\tlogger.error(error as Error, 'Failed to process audits');\n\t\t// Don't rethrow - audit failures shouldn't fail the request\n\t}\n}\n\n/**\n * Context for audit-aware handler execution.\n */\nexport interface AuditExecutionContext<\n\tTAuditAction extends AuditableAction<string, unknown> = AuditableAction<\n\t\tstring,\n\t\tunknown\n\t>,\n> {\n\t/** The auditor instance for recording audits */\n\tauditor: Auditor<TAuditAction>;\n\t/** The audit storage instance */\n\tstorage: AuditStorage;\n}\n\n/**\n * Create audit context for handler execution.\n * Returns the auditor and storage for use in the handler.\n *\n * @param endpoint - The endpoint with audit configuration\n * @param serviceDiscovery - Service discovery for getting audit storage\n * @param logger - Logger for debug/error messages\n * @param ctx - Request context for actor extraction\n * @returns Audit context with auditor and storage, or undefined if not configured\n */\nexport async function createAuditContext<\n\tTServices extends Service[] = [],\n\tTSession = unknown,\n\tTLogger extends Logger = Logger,\n\tTAuditStorage extends AuditStorage | undefined = undefined,\n\tTAuditStorageServiceName extends string = string,\n\tTAuditAction extends AuditableAction<string, unknown> = AuditableAction<\n\t\tstring,\n\t\tunknown\n\t>,\n\tTDatabase = undefined,\n\tTDatabaseServiceName extends string = string,\n>(\n\tendpoint: Endpoint<\n\t\tany,\n\t\tany,\n\t\tany,\n\t\tany,\n\t\tTServices,\n\t\tTLogger,\n\t\tTSession,\n\t\tany,\n\t\tany,\n\t\tTAuditStorage,\n\t\tTAuditStorageServiceName,\n\t\tTAuditAction,\n\t\tTDatabase,\n\t\tTDatabaseServiceName\n\t>,\n\tserviceDiscovery: ServiceDiscovery<any, any>,\n\tlogger: TLogger,\n\tctx: {\n\t\tsession: TSession;\n\t\theader: HeaderFn;\n\t\tcookie: CookieFn;\n\t\tservices: Record<string, unknown>;\n\t},\n): Promise<AuditExecutionContext<TAuditAction> | undefined> {\n\tif (!endpoint.auditorStorageService) {\n\t\treturn undefined;\n\t}\n\n\tconst services = await serviceDiscovery.register([\n\t\tendpoint.auditorStorageService,\n\t]);\n\tconst storage = services[\n\t\tendpoint.auditorStorageService.serviceName\n\t] as AuditStorage;\n\n\t// Extract actor if configured\n\tlet actor: AuditActor = { id: 'system', type: 'system' };\n\tif (endpoint.actorExtractor) {\n\t\ttry {\n\t\t\tactor = await (\n\t\t\t\tendpoint.actorExtractor as ActorExtractor<TServices, TSession, TLogger>\n\t\t\t)({\n\t\t\t\tservices: ctx.services as any,\n\t\t\t\tsession: ctx.session,\n\t\t\t\theader: ctx.header,\n\t\t\t\tcookie: ctx.cookie,\n\t\t\t\tlogger,\n\t\t\t});\n\t\t} catch (error) {\n\t\t\tlogger.error(error as Error, 'Failed to extract actor for audits');\n\t\t}\n\t}\n\n\tconst auditor = new DefaultAuditor<TAuditAction>({\n\t\tactor,\n\t\tstorage,\n\t\tmetadata: {\n\t\t\tendpoint: endpoint.route,\n\t\t\tmethod: endpoint.method,\n\t\t},\n\t});\n\n\treturn { auditor, storage };\n}\n\n/**\n * Options for executeWithAuditTransaction.\n */\nexport interface ExecuteWithAuditTransactionOptions {\n\t/**\n\t * Database connection to use for the transaction.\n\t * If this is already a transaction, it will be reused instead of creating a nested one.\n\t * If not provided, the storage's internal database is used.\n\t */\n\tdb?: unknown;\n}\n\n/**\n * Execute a handler with automatic audit transaction support.\n * If the audit storage provides a withTransaction method, wraps execution\n * in a transaction so audits are atomic with handler's database operations.\n *\n * This is database-agnostic - each storage implementation provides its own\n * transaction handling based on the underlying database (Kysely, Drizzle, etc.).\n *\n * If the db parameter is provided and is already a transaction, the storage\n * will reuse it instead of creating a nested transaction (similar to\n * packages/db/src/kysely.ts#withTransaction).\n *\n * @param auditContext - The audit context from createAuditContext\n * @param handler - The handler function to execute (receives auditor)\n * @param onComplete - Called after handler with response, to process declarative audits\n * @param options - Optional configuration including database connection\n * @returns The handler result\n */\nexport async function executeWithAuditTransaction<\n\tT,\n\tTAuditAction extends AuditableAction<string, unknown> = AuditableAction<\n\t\tstring,\n\t\tunknown\n\t>,\n>(\n\tauditContext: AuditExecutionContext<TAuditAction> | undefined,\n\thandler: (auditor?: Auditor<TAuditAction>) => Promise<T>,\n\tonComplete?: (response: T, auditor: Auditor<TAuditAction>) => Promise<void>,\n\toptions?: ExecuteWithAuditTransactionOptions,\n): Promise<T> {\n\t// No audit context - just run handler\n\tif (!auditContext) {\n\t\treturn handler(undefined);\n\t}\n\n\tconst { auditor, storage } = auditContext;\n\n\t// Check if storage provides a transaction wrapper\n\tif (storage.withTransaction) {\n\t\t// Wrap in transaction - audits are atomic with handler operations\n\t\t// The storage's withTransaction handles setTransaction and flush\n\t\t// Pass db so existing transactions are reused\n\t\treturn storage.withTransaction(\n\t\t\tauditor,\n\t\t\tasync () => {\n\t\t\t\tconst response = await handler(auditor);\n\n\t\t\t\t// Process declarative audits within the transaction\n\t\t\t\tif (onComplete) {\n\t\t\t\t\tawait onComplete(response, auditor);\n\t\t\t\t}\n\n\t\t\t\treturn response;\n\t\t\t},\n\t\t\toptions?.db,\n\t\t);\n\t}\n\n\t// No transaction support - run handler and flush audits after\n\tconst response = await handler(auditor);\n\n\tif (onComplete) {\n\t\tawait onComplete(response, auditor);\n\t}\n\n\t// Flush audits (no transaction)\n\tawait auditor.flush();\n\n\treturn response;\n}\n"],"mappings":";;;;;;;;;;;;;;;;;AA2BA,eAAsB,sBAYrBA,UAcAC,UACAC,kBACAC,QACAC,KAMAC,iBACgB;AAChB,KAAI;EACH,MAAM,SAAS,SAAS;EAIxB,MAAM,qBACL,mBAAmB,gBAAgB,YAAY,CAAC,SAAS;AAG1D,OAAK,QAAQ,WAAW,oBAAoB;AAC3C,UAAO,MAAM,uBAAuB;AACpC;EACA;AAGD,OAAK,SAAS,uBAAuB;AACpC,OAAI,sBAAsB,QAAQ,OACjC,QAAO,KAAK,uCAAuC;AAEpD;EACA;EAGD,IAAIC;AAEJ,MAAI,iBAAiB;AAEpB,aAAU;AACV,UAAO,MAAM,8CAA8C;EAC3D,OAAM;GAEN,MAAM,WAAW,MAAM,iBAAiB,SAAS,CAChD,SAAS,qBACT,EAAC;GACF,MAAM,UAAU,SACf,SAAS,sBAAsB;GAIhC,IAAIC,QAAoB;IAAE,IAAI;IAAU,MAAM;GAAU;AACxD,OAAI,SAAS,eACZ,KAAI;AACH,YAAQ,MAAM,AACb,SAAS,eAKR;KACD,UAAU,IAAI;KACd,SAAS,IAAI;KACb,QAAQ,IAAI;KACZ,QAAQ,IAAI;KACZ;IACA,EAAC;GACF,SAAQ,OAAO;AACf,WAAO,MAAM,OAAgB,qCAAqC;GAElE;AAGF,aAAU,IAAIC,iCAA6B;IAC1C;IACA;IACA,UAAU;KACT,UAAU,SAAS;KACnB,QAAQ,SAAS;IACjB;GACD;EACD;AAGD,MAAI,QAAQ,OACX,MAAK,MAAM,SAAS,QAAQ;AAC3B,UAAO,MAAM,EAAE,OAAO,MAAM,KAAM,GAAE,+BAA+B;AAGnE,OAAI,MAAM,SAAS,MAAM,KAAK,SAAgB,EAAE;AAC/C,WAAO,MACN,EAAE,OAAO,MAAM,KAAM,GACrB,sCACA;AACD;GACA;GAGD,MAAM,UAAU,MAAM,QAAQ,SAAgB;GAG9C,MAAM,WAAW,MAAM,WAAW,SAAgB;AAGlD,WAAQ,MAAM,MAAM,MAAa,SAAgB;IAChD,OAAO,MAAM;IACb;GACA,EAAC;EACF;EAMF,MAAM,cAAc,QAAQ,YAAY,CAAC;AACzC,MAAI,cAAc,GAAG;GAEpB,MAAM,MACL,oBAAoB,UACjB,AAAC,QAA0C,gBAAgB;AAE/D,UAAO,MACN;IAAE,YAAY;IAAa,kBAAkB;GAAK,GAClD,kBACA;AACD,SAAM,QAAQ,OAAO;EACrB;CACD,SAAQ,OAAO;AACf,SAAO,MAAM,OAAgB,2BAA2B;CAExD;AACD;;;;;;;;;;;AA2BD,eAAsB,mBAarBC,UAgBAP,kBACAC,QACAC,KAM2D;AAC3D,MAAK,SAAS,sBACb;CAGD,MAAM,WAAW,MAAM,iBAAiB,SAAS,CAChD,SAAS,qBACT,EAAC;CACF,MAAM,UAAU,SACf,SAAS,sBAAsB;CAIhC,IAAIG,QAAoB;EAAE,IAAI;EAAU,MAAM;CAAU;AACxD,KAAI,SAAS,eACZ,KAAI;AACH,UAAQ,MAAM,AACb,SAAS,eACR;GACD,UAAU,IAAI;GACd,SAAS,IAAI;GACb,QAAQ,IAAI;GACZ,QAAQ,IAAI;GACZ;EACA,EAAC;CACF,SAAQ,OAAO;AACf,SAAO,MAAM,OAAgB,qCAAqC;CAClE;CAGF,MAAM,UAAU,IAAIC,iCAA6B;EAChD;EACA;EACA,UAAU;GACT,UAAU,SAAS;GACnB,QAAQ,SAAS;EACjB;CACD;AAED,QAAO;EAAE;EAAS;CAAS;AAC3B;;;;;;;;;;;;;;;;;;;AAgCD,eAAsB,4BAOrBE,cACAC,SACAC,YACAC,SACa;AAEb,MAAK,aACJ,QAAO,eAAkB;CAG1B,MAAM,EAAE,SAAS,SAAS,GAAG;AAG7B,KAAI,QAAQ,gBAIX,QAAO,QAAQ,gBACd,SACA,YAAY;EACX,MAAMC,aAAW,MAAM,QAAQ,QAAQ;AAGvC,MAAI,WACH,OAAM,WAAWA,YAAU,QAAQ;AAGpC,SAAOA;CACP,GACD,SAAS,GACT;CAIF,MAAM,WAAW,MAAM,QAAQ,QAAQ;AAEvC,KAAI,WACH,OAAM,WAAW,UAAU,QAAQ;AAIpC,OAAM,QAAQ,OAAO;AAErB,QAAO;AACP"}
@@ -1 +1 @@
1
- {"version":3,"file":"processAudits-Dj8UGqcW.mjs","names":["endpoint: Endpoint<\n any,\n any,\n any,\n OutSchema,\n TServices,\n TLogger,\n TSession,\n any,\n any,\n TAuditStorage,\n TAuditStorageServiceName,\n TAuditAction\n >","response: InferStandardSchema<OutSchema>","serviceDiscovery: ServiceDiscovery<any, any>","logger: TLogger","ctx: {\n session: TSession;\n header: HeaderFn;\n cookie: CookieFn;\n services: Record<string, unknown>;\n }","existingAuditor?: Auditor<TAuditAction>","auditor: Auditor<TAuditAction>","actor: AuditActor","endpoint: Endpoint<\n any,\n any,\n any,\n any,\n TServices,\n TLogger,\n TSession,\n any,\n any,\n TAuditStorage,\n TAuditStorageServiceName,\n TAuditAction,\n TDatabase,\n TDatabaseServiceName\n >","auditContext: AuditExecutionContext<TAuditAction> | undefined","handler: (auditor?: Auditor<TAuditAction>) => Promise<T>","onComplete?: (response: T, auditor: Auditor<TAuditAction>) => Promise<void>","options?: ExecuteWithAuditTransactionOptions","response"],"sources":["../src/endpoints/processAudits.ts"],"sourcesContent":["import type {\n AuditActor,\n AuditStorage,\n AuditableAction,\n Auditor,\n} from '@geekmidas/audit';\nimport { DefaultAuditor } from '@geekmidas/audit';\nimport type { Logger } from '@geekmidas/logger';\nimport type { InferStandardSchema } from '@geekmidas/schema';\nimport type { Service, ServiceDiscovery } from '@geekmidas/services';\nimport type { StandardSchemaV1 } from '@standard-schema/spec';\nimport type { CookieFn, Endpoint, HeaderFn } from './Endpoint';\nimport type { ActorExtractor, MappedAudit } from './audit';\n\n/**\n * Process declarative audit definitions after successful endpoint execution.\n * Similar to publishConstructEvents for events.\n *\n * @param endpoint - The endpoint with audit configuration\n * @param response - The handler response to generate audit payloads from\n * @param serviceDiscovery - Service discovery for registering audit storage\n * @param logger - Logger for debug/error messages\n * @param ctx - Request context (session, headers, cookies, services)\n * @param existingAuditor - Optional existing auditor instance (e.g., from handler context).\n * If provided, uses this auditor (with its stored transaction).\n * If not provided, creates a new auditor.\n */\nexport async function processEndpointAudits<\n TServices extends Service[] = [],\n TSession = unknown,\n TLogger extends Logger = Logger,\n OutSchema extends StandardSchemaV1 | undefined = undefined,\n TAuditStorage extends AuditStorage | undefined = undefined,\n TAuditStorageServiceName extends string = string,\n TAuditAction extends AuditableAction<string, unknown> = AuditableAction<\n string,\n unknown\n >,\n>(\n endpoint: Endpoint<\n any,\n any,\n any,\n OutSchema,\n TServices,\n TLogger,\n TSession,\n any,\n any,\n TAuditStorage,\n TAuditStorageServiceName,\n TAuditAction\n >,\n response: InferStandardSchema<OutSchema>,\n serviceDiscovery: ServiceDiscovery<any, any>,\n logger: TLogger,\n ctx: {\n session: TSession;\n header: HeaderFn;\n cookie: CookieFn;\n services: Record<string, unknown>;\n },\n existingAuditor?: Auditor<TAuditAction>,\n): Promise<void> {\n try {\n const audits = endpoint.audits as MappedAudit<TAuditAction, OutSchema>[];\n\n // If we have an existing auditor (from handler context), we need to flush\n // any manual audits it collected, even if there are no declarative audits\n const hasExistingRecords =\n existingAuditor && existingAuditor.getRecords().length > 0;\n\n // Skip if no declarative audits and no existing records to flush\n if (!audits?.length && !hasExistingRecords) {\n logger.debug('No audits to process');\n return;\n }\n\n // If no auditor storage service and we have things to process, warn\n if (!endpoint.auditorStorageService) {\n if (hasExistingRecords || audits?.length) {\n logger.warn('No auditor storage service available');\n }\n return;\n }\n\n // Get or create auditor\n let auditor: Auditor<TAuditAction>;\n\n if (existingAuditor) {\n // Use existing auditor (preserves stored transaction and manual audits)\n auditor = existingAuditor;\n logger.debug('Using existing auditor from handler context');\n } else {\n // Create new auditor (backward compatibility)\n const services = await serviceDiscovery.register([\n endpoint.auditorStorageService,\n ]);\n const storage = services[\n endpoint.auditorStorageService.serviceName\n ] as AuditStorage;\n\n // Extract actor if configured\n let actor: AuditActor = { id: 'system', type: 'system' };\n if (endpoint.actorExtractor) {\n try {\n actor = await (\n endpoint.actorExtractor as ActorExtractor<\n TServices,\n TSession,\n TLogger\n >\n )({\n services: ctx.services as any,\n session: ctx.session,\n header: ctx.header,\n cookie: ctx.cookie,\n logger,\n });\n } catch (error) {\n logger.error(error as Error, 'Failed to extract actor for audits');\n // Continue with system actor\n }\n }\n\n auditor = new DefaultAuditor<TAuditAction>({\n actor,\n storage,\n metadata: {\n endpoint: endpoint.route,\n method: endpoint.method,\n },\n });\n }\n\n // Process each declarative audit\n if (audits?.length) {\n for (const audit of audits) {\n logger.debug({ audit: audit.type }, 'Processing declarative audit');\n\n // Check when condition\n if (audit.when && !audit.when(response as any)) {\n logger.debug(\n { audit: audit.type },\n 'Audit skipped due to when condition',\n );\n continue;\n }\n\n // Extract payload\n const payload = audit.payload(response as any);\n\n // Extract entityId if configured\n const entityId = audit.entityId?.(response as any);\n\n // Record the audit\n auditor.audit(audit.type as any, payload as any, {\n table: audit.table,\n entityId,\n });\n }\n }\n\n // Flush audits to storage\n // Note: If existingAuditor has a stored transaction (via setTransaction),\n // flush() will use it automatically\n const recordCount = auditor.getRecords().length;\n if (recordCount > 0) {\n // Check if auditor has a stored transaction (for logging purposes)\n const trx =\n 'getTransaction' in auditor\n ? (auditor as { getTransaction(): unknown }).getTransaction()\n : undefined;\n logger.debug(\n { auditCount: recordCount, hasTransaction: !!trx },\n 'Flushing audits',\n );\n await auditor.flush();\n }\n } catch (error) {\n logger.error(error as Error, 'Failed to process audits');\n // Don't rethrow - audit failures shouldn't fail the request\n }\n}\n\n/**\n * Context for audit-aware handler execution.\n */\nexport interface AuditExecutionContext<\n TAuditAction extends AuditableAction<string, unknown> = AuditableAction<\n string,\n unknown\n >,\n> {\n /** The auditor instance for recording audits */\n auditor: Auditor<TAuditAction>;\n /** The audit storage instance */\n storage: AuditStorage;\n}\n\n/**\n * Create audit context for handler execution.\n * Returns the auditor and storage for use in the handler.\n *\n * @param endpoint - The endpoint with audit configuration\n * @param serviceDiscovery - Service discovery for getting audit storage\n * @param logger - Logger for debug/error messages\n * @param ctx - Request context for actor extraction\n * @returns Audit context with auditor and storage, or undefined if not configured\n */\nexport async function createAuditContext<\n TServices extends Service[] = [],\n TSession = unknown,\n TLogger extends Logger = Logger,\n TAuditStorage extends AuditStorage | undefined = undefined,\n TAuditStorageServiceName extends string = string,\n TAuditAction extends AuditableAction<string, unknown> = AuditableAction<\n string,\n unknown\n >,\n TDatabase = undefined,\n TDatabaseServiceName extends string = string,\n>(\n endpoint: Endpoint<\n any,\n any,\n any,\n any,\n TServices,\n TLogger,\n TSession,\n any,\n any,\n TAuditStorage,\n TAuditStorageServiceName,\n TAuditAction,\n TDatabase,\n TDatabaseServiceName\n >,\n serviceDiscovery: ServiceDiscovery<any, any>,\n logger: TLogger,\n ctx: {\n session: TSession;\n header: HeaderFn;\n cookie: CookieFn;\n services: Record<string, unknown>;\n },\n): Promise<AuditExecutionContext<TAuditAction> | undefined> {\n if (!endpoint.auditorStorageService) {\n return undefined;\n }\n\n const services = await serviceDiscovery.register([\n endpoint.auditorStorageService,\n ]);\n const storage = services[\n endpoint.auditorStorageService.serviceName\n ] as AuditStorage;\n\n // Extract actor if configured\n let actor: AuditActor = { id: 'system', type: 'system' };\n if (endpoint.actorExtractor) {\n try {\n actor = await (\n endpoint.actorExtractor as ActorExtractor<TServices, TSession, TLogger>\n )({\n services: ctx.services as any,\n session: ctx.session,\n header: ctx.header,\n cookie: ctx.cookie,\n logger,\n });\n } catch (error) {\n logger.error(error as Error, 'Failed to extract actor for audits');\n }\n }\n\n const auditor = new DefaultAuditor<TAuditAction>({\n actor,\n storage,\n metadata: {\n endpoint: endpoint.route,\n method: endpoint.method,\n },\n });\n\n return { auditor, storage };\n}\n\n/**\n * Options for executeWithAuditTransaction.\n */\nexport interface ExecuteWithAuditTransactionOptions {\n /**\n * Database connection to use for the transaction.\n * If this is already a transaction, it will be reused instead of creating a nested one.\n * If not provided, the storage's internal database is used.\n */\n db?: unknown;\n}\n\n/**\n * Execute a handler with automatic audit transaction support.\n * If the audit storage provides a withTransaction method, wraps execution\n * in a transaction so audits are atomic with handler's database operations.\n *\n * This is database-agnostic - each storage implementation provides its own\n * transaction handling based on the underlying database (Kysely, Drizzle, etc.).\n *\n * If the db parameter is provided and is already a transaction, the storage\n * will reuse it instead of creating a nested transaction (similar to\n * packages/db/src/kysely.ts#withTransaction).\n *\n * @param auditContext - The audit context from createAuditContext\n * @param handler - The handler function to execute (receives auditor)\n * @param onComplete - Called after handler with response, to process declarative audits\n * @param options - Optional configuration including database connection\n * @returns The handler result\n */\nexport async function executeWithAuditTransaction<\n T,\n TAuditAction extends AuditableAction<string, unknown> = AuditableAction<\n string,\n unknown\n >,\n>(\n auditContext: AuditExecutionContext<TAuditAction> | undefined,\n handler: (auditor?: Auditor<TAuditAction>) => Promise<T>,\n onComplete?: (response: T, auditor: Auditor<TAuditAction>) => Promise<void>,\n options?: ExecuteWithAuditTransactionOptions,\n): Promise<T> {\n // No audit context - just run handler\n if (!auditContext) {\n return handler(undefined);\n }\n\n const { auditor, storage } = auditContext;\n\n // Check if storage provides a transaction wrapper\n if (storage.withTransaction) {\n // Wrap in transaction - audits are atomic with handler operations\n // The storage's withTransaction handles setTransaction and flush\n // Pass db so existing transactions are reused\n return storage.withTransaction(\n auditor,\n async () => {\n const response = await handler(auditor);\n\n // Process declarative audits within the transaction\n if (onComplete) {\n await onComplete(response, auditor);\n }\n\n return response;\n },\n options?.db,\n );\n }\n\n // No transaction support - run handler and flush audits after\n const response = await handler(auditor);\n\n if (onComplete) {\n await onComplete(response, auditor);\n }\n\n // Flush audits (no transaction)\n await auditor.flush();\n\n return response;\n}\n"],"mappings":";;;;;;;;;;;;;;;;AA2BA,eAAsB,sBAYpBA,UAcAC,UACAC,kBACAC,QACAC,KAMAC,iBACe;AACf,KAAI;EACF,MAAM,SAAS,SAAS;EAIxB,MAAM,qBACJ,mBAAmB,gBAAgB,YAAY,CAAC,SAAS;AAG3D,OAAK,QAAQ,WAAW,oBAAoB;AAC1C,UAAO,MAAM,uBAAuB;AACpC;EACD;AAGD,OAAK,SAAS,uBAAuB;AACnC,OAAI,sBAAsB,QAAQ,OAChC,QAAO,KAAK,uCAAuC;AAErD;EACD;EAGD,IAAIC;AAEJ,MAAI,iBAAiB;AAEnB,aAAU;AACV,UAAO,MAAM,8CAA8C;EAC5D,OAAM;GAEL,MAAM,WAAW,MAAM,iBAAiB,SAAS,CAC/C,SAAS,qBACV,EAAC;GACF,MAAM,UAAU,SACd,SAAS,sBAAsB;GAIjC,IAAIC,QAAoB;IAAE,IAAI;IAAU,MAAM;GAAU;AACxD,OAAI,SAAS,eACX,KAAI;AACF,YAAQ,MAAM,AACZ,SAAS,eAKT;KACA,UAAU,IAAI;KACd,SAAS,IAAI;KACb,QAAQ,IAAI;KACZ,QAAQ,IAAI;KACZ;IACD,EAAC;GACH,SAAQ,OAAO;AACd,WAAO,MAAM,OAAgB,qCAAqC;GAEnE;AAGH,aAAU,IAAI,eAA6B;IACzC;IACA;IACA,UAAU;KACR,UAAU,SAAS;KACnB,QAAQ,SAAS;IAClB;GACF;EACF;AAGD,MAAI,QAAQ,OACV,MAAK,MAAM,SAAS,QAAQ;AAC1B,UAAO,MAAM,EAAE,OAAO,MAAM,KAAM,GAAE,+BAA+B;AAGnE,OAAI,MAAM,SAAS,MAAM,KAAK,SAAgB,EAAE;AAC9C,WAAO,MACL,EAAE,OAAO,MAAM,KAAM,GACrB,sCACD;AACD;GACD;GAGD,MAAM,UAAU,MAAM,QAAQ,SAAgB;GAG9C,MAAM,WAAW,MAAM,WAAW,SAAgB;AAGlD,WAAQ,MAAM,MAAM,MAAa,SAAgB;IAC/C,OAAO,MAAM;IACb;GACD,EAAC;EACH;EAMH,MAAM,cAAc,QAAQ,YAAY,CAAC;AACzC,MAAI,cAAc,GAAG;GAEnB,MAAM,MACJ,oBAAoB,UAChB,AAAC,QAA0C,gBAAgB;AAEjE,UAAO,MACL;IAAE,YAAY;IAAa,kBAAkB;GAAK,GAClD,kBACD;AACD,SAAM,QAAQ,OAAO;EACtB;CACF,SAAQ,OAAO;AACd,SAAO,MAAM,OAAgB,2BAA2B;CAEzD;AACF;;;;;;;;;;;AA2BD,eAAsB,mBAapBC,UAgBAN,kBACAC,QACAC,KAM0D;AAC1D,MAAK,SAAS,sBACZ;CAGF,MAAM,WAAW,MAAM,iBAAiB,SAAS,CAC/C,SAAS,qBACV,EAAC;CACF,MAAM,UAAU,SACd,SAAS,sBAAsB;CAIjC,IAAIG,QAAoB;EAAE,IAAI;EAAU,MAAM;CAAU;AACxD,KAAI,SAAS,eACX,KAAI;AACF,UAAQ,MAAM,AACZ,SAAS,eACT;GACA,UAAU,IAAI;GACd,SAAS,IAAI;GACb,QAAQ,IAAI;GACZ,QAAQ,IAAI;GACZ;EACD,EAAC;CACH,SAAQ,OAAO;AACd,SAAO,MAAM,OAAgB,qCAAqC;CACnE;CAGH,MAAM,UAAU,IAAI,eAA6B;EAC/C;EACA;EACA,UAAU;GACR,UAAU,SAAS;GACnB,QAAQ,SAAS;EAClB;CACF;AAED,QAAO;EAAE;EAAS;CAAS;AAC5B;;;;;;;;;;;;;;;;;;;AAgCD,eAAsB,4BAOpBE,cACAC,SACAC,YACAC,SACY;AAEZ,MAAK,aACH,QAAO,eAAkB;CAG3B,MAAM,EAAE,SAAS,SAAS,GAAG;AAG7B,KAAI,QAAQ,gBAIV,QAAO,QAAQ,gBACb,SACA,YAAY;EACV,MAAMC,aAAW,MAAM,QAAQ,QAAQ;AAGvC,MAAI,WACF,OAAM,WAAWA,YAAU,QAAQ;AAGrC,SAAOA;CACR,GACD,SAAS,GACV;CAIH,MAAM,WAAW,MAAM,QAAQ,QAAQ;AAEvC,KAAI,WACF,OAAM,WAAW,UAAU,QAAQ;AAIrC,OAAM,QAAQ,OAAO;AAErB,QAAO;AACR"}
1
+ {"version":3,"file":"processAudits-Dj8UGqcW.mjs","names":["endpoint: Endpoint<\n\t\tany,\n\t\tany,\n\t\tany,\n\t\tOutSchema,\n\t\tTServices,\n\t\tTLogger,\n\t\tTSession,\n\t\tany,\n\t\tany,\n\t\tTAuditStorage,\n\t\tTAuditStorageServiceName,\n\t\tTAuditAction\n\t>","response: InferStandardSchema<OutSchema>","serviceDiscovery: ServiceDiscovery<any, any>","logger: TLogger","ctx: {\n\t\tsession: TSession;\n\t\theader: HeaderFn;\n\t\tcookie: CookieFn;\n\t\tservices: Record<string, unknown>;\n\t}","existingAuditor?: Auditor<TAuditAction>","auditor: Auditor<TAuditAction>","actor: AuditActor","endpoint: Endpoint<\n\t\tany,\n\t\tany,\n\t\tany,\n\t\tany,\n\t\tTServices,\n\t\tTLogger,\n\t\tTSession,\n\t\tany,\n\t\tany,\n\t\tTAuditStorage,\n\t\tTAuditStorageServiceName,\n\t\tTAuditAction,\n\t\tTDatabase,\n\t\tTDatabaseServiceName\n\t>","auditContext: AuditExecutionContext<TAuditAction> | undefined","handler: (auditor?: Auditor<TAuditAction>) => Promise<T>","onComplete?: (response: T, auditor: Auditor<TAuditAction>) => Promise<void>","options?: ExecuteWithAuditTransactionOptions","response"],"sources":["../src/endpoints/processAudits.ts"],"sourcesContent":["import type {\n\tAuditActor,\n\tAuditableAction,\n\tAuditor,\n\tAuditStorage,\n} from '@geekmidas/audit';\nimport { DefaultAuditor } from '@geekmidas/audit';\nimport type { Logger } from '@geekmidas/logger';\nimport type { InferStandardSchema } from '@geekmidas/schema';\nimport type { Service, ServiceDiscovery } from '@geekmidas/services';\nimport type { StandardSchemaV1 } from '@standard-schema/spec';\nimport type { ActorExtractor, MappedAudit } from './audit';\nimport type { CookieFn, Endpoint, HeaderFn } from './Endpoint';\n\n/**\n * Process declarative audit definitions after successful endpoint execution.\n * Similar to publishConstructEvents for events.\n *\n * @param endpoint - The endpoint with audit configuration\n * @param response - The handler response to generate audit payloads from\n * @param serviceDiscovery - Service discovery for registering audit storage\n * @param logger - Logger for debug/error messages\n * @param ctx - Request context (session, headers, cookies, services)\n * @param existingAuditor - Optional existing auditor instance (e.g., from handler context).\n * If provided, uses this auditor (with its stored transaction).\n * If not provided, creates a new auditor.\n */\nexport async function processEndpointAudits<\n\tTServices extends Service[] = [],\n\tTSession = unknown,\n\tTLogger extends Logger = Logger,\n\tOutSchema extends StandardSchemaV1 | undefined = undefined,\n\tTAuditStorage extends AuditStorage | undefined = undefined,\n\tTAuditStorageServiceName extends string = string,\n\tTAuditAction extends AuditableAction<string, unknown> = AuditableAction<\n\t\tstring,\n\t\tunknown\n\t>,\n>(\n\tendpoint: Endpoint<\n\t\tany,\n\t\tany,\n\t\tany,\n\t\tOutSchema,\n\t\tTServices,\n\t\tTLogger,\n\t\tTSession,\n\t\tany,\n\t\tany,\n\t\tTAuditStorage,\n\t\tTAuditStorageServiceName,\n\t\tTAuditAction\n\t>,\n\tresponse: InferStandardSchema<OutSchema>,\n\tserviceDiscovery: ServiceDiscovery<any, any>,\n\tlogger: TLogger,\n\tctx: {\n\t\tsession: TSession;\n\t\theader: HeaderFn;\n\t\tcookie: CookieFn;\n\t\tservices: Record<string, unknown>;\n\t},\n\texistingAuditor?: Auditor<TAuditAction>,\n): Promise<void> {\n\ttry {\n\t\tconst audits = endpoint.audits as MappedAudit<TAuditAction, OutSchema>[];\n\n\t\t// If we have an existing auditor (from handler context), we need to flush\n\t\t// any manual audits it collected, even if there are no declarative audits\n\t\tconst hasExistingRecords =\n\t\t\texistingAuditor && existingAuditor.getRecords().length > 0;\n\n\t\t// Skip if no declarative audits and no existing records to flush\n\t\tif (!audits?.length && !hasExistingRecords) {\n\t\t\tlogger.debug('No audits to process');\n\t\t\treturn;\n\t\t}\n\n\t\t// If no auditor storage service and we have things to process, warn\n\t\tif (!endpoint.auditorStorageService) {\n\t\t\tif (hasExistingRecords || audits?.length) {\n\t\t\t\tlogger.warn('No auditor storage service available');\n\t\t\t}\n\t\t\treturn;\n\t\t}\n\n\t\t// Get or create auditor\n\t\tlet auditor: Auditor<TAuditAction>;\n\n\t\tif (existingAuditor) {\n\t\t\t// Use existing auditor (preserves stored transaction and manual audits)\n\t\t\tauditor = existingAuditor;\n\t\t\tlogger.debug('Using existing auditor from handler context');\n\t\t} else {\n\t\t\t// Create new auditor (backward compatibility)\n\t\t\tconst services = await serviceDiscovery.register([\n\t\t\t\tendpoint.auditorStorageService,\n\t\t\t]);\n\t\t\tconst storage = services[\n\t\t\t\tendpoint.auditorStorageService.serviceName\n\t\t\t] as AuditStorage;\n\n\t\t\t// Extract actor if configured\n\t\t\tlet actor: AuditActor = { id: 'system', type: 'system' };\n\t\t\tif (endpoint.actorExtractor) {\n\t\t\t\ttry {\n\t\t\t\t\tactor = await (\n\t\t\t\t\t\tendpoint.actorExtractor as ActorExtractor<\n\t\t\t\t\t\t\tTServices,\n\t\t\t\t\t\t\tTSession,\n\t\t\t\t\t\t\tTLogger\n\t\t\t\t\t\t>\n\t\t\t\t\t)({\n\t\t\t\t\t\tservices: ctx.services as any,\n\t\t\t\t\t\tsession: ctx.session,\n\t\t\t\t\t\theader: ctx.header,\n\t\t\t\t\t\tcookie: ctx.cookie,\n\t\t\t\t\t\tlogger,\n\t\t\t\t\t});\n\t\t\t\t} catch (error) {\n\t\t\t\t\tlogger.error(error as Error, 'Failed to extract actor for audits');\n\t\t\t\t\t// Continue with system actor\n\t\t\t\t}\n\t\t\t}\n\n\t\t\tauditor = new DefaultAuditor<TAuditAction>({\n\t\t\t\tactor,\n\t\t\t\tstorage,\n\t\t\t\tmetadata: {\n\t\t\t\t\tendpoint: endpoint.route,\n\t\t\t\t\tmethod: endpoint.method,\n\t\t\t\t},\n\t\t\t});\n\t\t}\n\n\t\t// Process each declarative audit\n\t\tif (audits?.length) {\n\t\t\tfor (const audit of audits) {\n\t\t\t\tlogger.debug({ audit: audit.type }, 'Processing declarative audit');\n\n\t\t\t\t// Check when condition\n\t\t\t\tif (audit.when && !audit.when(response as any)) {\n\t\t\t\t\tlogger.debug(\n\t\t\t\t\t\t{ audit: audit.type },\n\t\t\t\t\t\t'Audit skipped due to when condition',\n\t\t\t\t\t);\n\t\t\t\t\tcontinue;\n\t\t\t\t}\n\n\t\t\t\t// Extract payload\n\t\t\t\tconst payload = audit.payload(response as any);\n\n\t\t\t\t// Extract entityId if configured\n\t\t\t\tconst entityId = audit.entityId?.(response as any);\n\n\t\t\t\t// Record the audit\n\t\t\t\tauditor.audit(audit.type as any, payload as any, {\n\t\t\t\t\ttable: audit.table,\n\t\t\t\t\tentityId,\n\t\t\t\t});\n\t\t\t}\n\t\t}\n\n\t\t// Flush audits to storage\n\t\t// Note: If existingAuditor has a stored transaction (via setTransaction),\n\t\t// flush() will use it automatically\n\t\tconst recordCount = auditor.getRecords().length;\n\t\tif (recordCount > 0) {\n\t\t\t// Check if auditor has a stored transaction (for logging purposes)\n\t\t\tconst trx =\n\t\t\t\t'getTransaction' in auditor\n\t\t\t\t\t? (auditor as { getTransaction(): unknown }).getTransaction()\n\t\t\t\t\t: undefined;\n\t\t\tlogger.debug(\n\t\t\t\t{ auditCount: recordCount, hasTransaction: !!trx },\n\t\t\t\t'Flushing audits',\n\t\t\t);\n\t\t\tawait auditor.flush();\n\t\t}\n\t} catch (error) {\n\t\tlogger.error(error as Error, 'Failed to process audits');\n\t\t// Don't rethrow - audit failures shouldn't fail the request\n\t}\n}\n\n/**\n * Context for audit-aware handler execution.\n */\nexport interface AuditExecutionContext<\n\tTAuditAction extends AuditableAction<string, unknown> = AuditableAction<\n\t\tstring,\n\t\tunknown\n\t>,\n> {\n\t/** The auditor instance for recording audits */\n\tauditor: Auditor<TAuditAction>;\n\t/** The audit storage instance */\n\tstorage: AuditStorage;\n}\n\n/**\n * Create audit context for handler execution.\n * Returns the auditor and storage for use in the handler.\n *\n * @param endpoint - The endpoint with audit configuration\n * @param serviceDiscovery - Service discovery for getting audit storage\n * @param logger - Logger for debug/error messages\n * @param ctx - Request context for actor extraction\n * @returns Audit context with auditor and storage, or undefined if not configured\n */\nexport async function createAuditContext<\n\tTServices extends Service[] = [],\n\tTSession = unknown,\n\tTLogger extends Logger = Logger,\n\tTAuditStorage extends AuditStorage | undefined = undefined,\n\tTAuditStorageServiceName extends string = string,\n\tTAuditAction extends AuditableAction<string, unknown> = AuditableAction<\n\t\tstring,\n\t\tunknown\n\t>,\n\tTDatabase = undefined,\n\tTDatabaseServiceName extends string = string,\n>(\n\tendpoint: Endpoint<\n\t\tany,\n\t\tany,\n\t\tany,\n\t\tany,\n\t\tTServices,\n\t\tTLogger,\n\t\tTSession,\n\t\tany,\n\t\tany,\n\t\tTAuditStorage,\n\t\tTAuditStorageServiceName,\n\t\tTAuditAction,\n\t\tTDatabase,\n\t\tTDatabaseServiceName\n\t>,\n\tserviceDiscovery: ServiceDiscovery<any, any>,\n\tlogger: TLogger,\n\tctx: {\n\t\tsession: TSession;\n\t\theader: HeaderFn;\n\t\tcookie: CookieFn;\n\t\tservices: Record<string, unknown>;\n\t},\n): Promise<AuditExecutionContext<TAuditAction> | undefined> {\n\tif (!endpoint.auditorStorageService) {\n\t\treturn undefined;\n\t}\n\n\tconst services = await serviceDiscovery.register([\n\t\tendpoint.auditorStorageService,\n\t]);\n\tconst storage = services[\n\t\tendpoint.auditorStorageService.serviceName\n\t] as AuditStorage;\n\n\t// Extract actor if configured\n\tlet actor: AuditActor = { id: 'system', type: 'system' };\n\tif (endpoint.actorExtractor) {\n\t\ttry {\n\t\t\tactor = await (\n\t\t\t\tendpoint.actorExtractor as ActorExtractor<TServices, TSession, TLogger>\n\t\t\t)({\n\t\t\t\tservices: ctx.services as any,\n\t\t\t\tsession: ctx.session,\n\t\t\t\theader: ctx.header,\n\t\t\t\tcookie: ctx.cookie,\n\t\t\t\tlogger,\n\t\t\t});\n\t\t} catch (error) {\n\t\t\tlogger.error(error as Error, 'Failed to extract actor for audits');\n\t\t}\n\t}\n\n\tconst auditor = new DefaultAuditor<TAuditAction>({\n\t\tactor,\n\t\tstorage,\n\t\tmetadata: {\n\t\t\tendpoint: endpoint.route,\n\t\t\tmethod: endpoint.method,\n\t\t},\n\t});\n\n\treturn { auditor, storage };\n}\n\n/**\n * Options for executeWithAuditTransaction.\n */\nexport interface ExecuteWithAuditTransactionOptions {\n\t/**\n\t * Database connection to use for the transaction.\n\t * If this is already a transaction, it will be reused instead of creating a nested one.\n\t * If not provided, the storage's internal database is used.\n\t */\n\tdb?: unknown;\n}\n\n/**\n * Execute a handler with automatic audit transaction support.\n * If the audit storage provides a withTransaction method, wraps execution\n * in a transaction so audits are atomic with handler's database operations.\n *\n * This is database-agnostic - each storage implementation provides its own\n * transaction handling based on the underlying database (Kysely, Drizzle, etc.).\n *\n * If the db parameter is provided and is already a transaction, the storage\n * will reuse it instead of creating a nested transaction (similar to\n * packages/db/src/kysely.ts#withTransaction).\n *\n * @param auditContext - The audit context from createAuditContext\n * @param handler - The handler function to execute (receives auditor)\n * @param onComplete - Called after handler with response, to process declarative audits\n * @param options - Optional configuration including database connection\n * @returns The handler result\n */\nexport async function executeWithAuditTransaction<\n\tT,\n\tTAuditAction extends AuditableAction<string, unknown> = AuditableAction<\n\t\tstring,\n\t\tunknown\n\t>,\n>(\n\tauditContext: AuditExecutionContext<TAuditAction> | undefined,\n\thandler: (auditor?: Auditor<TAuditAction>) => Promise<T>,\n\tonComplete?: (response: T, auditor: Auditor<TAuditAction>) => Promise<void>,\n\toptions?: ExecuteWithAuditTransactionOptions,\n): Promise<T> {\n\t// No audit context - just run handler\n\tif (!auditContext) {\n\t\treturn handler(undefined);\n\t}\n\n\tconst { auditor, storage } = auditContext;\n\n\t// Check if storage provides a transaction wrapper\n\tif (storage.withTransaction) {\n\t\t// Wrap in transaction - audits are atomic with handler operations\n\t\t// The storage's withTransaction handles setTransaction and flush\n\t\t// Pass db so existing transactions are reused\n\t\treturn storage.withTransaction(\n\t\t\tauditor,\n\t\t\tasync () => {\n\t\t\t\tconst response = await handler(auditor);\n\n\t\t\t\t// Process declarative audits within the transaction\n\t\t\t\tif (onComplete) {\n\t\t\t\t\tawait onComplete(response, auditor);\n\t\t\t\t}\n\n\t\t\t\treturn response;\n\t\t\t},\n\t\t\toptions?.db,\n\t\t);\n\t}\n\n\t// No transaction support - run handler and flush audits after\n\tconst response = await handler(auditor);\n\n\tif (onComplete) {\n\t\tawait onComplete(response, auditor);\n\t}\n\n\t// Flush audits (no transaction)\n\tawait auditor.flush();\n\n\treturn response;\n}\n"],"mappings":";;;;;;;;;;;;;;;;AA2BA,eAAsB,sBAYrBA,UAcAC,UACAC,kBACAC,QACAC,KAMAC,iBACgB;AAChB,KAAI;EACH,MAAM,SAAS,SAAS;EAIxB,MAAM,qBACL,mBAAmB,gBAAgB,YAAY,CAAC,SAAS;AAG1D,OAAK,QAAQ,WAAW,oBAAoB;AAC3C,UAAO,MAAM,uBAAuB;AACpC;EACA;AAGD,OAAK,SAAS,uBAAuB;AACpC,OAAI,sBAAsB,QAAQ,OACjC,QAAO,KAAK,uCAAuC;AAEpD;EACA;EAGD,IAAIC;AAEJ,MAAI,iBAAiB;AAEpB,aAAU;AACV,UAAO,MAAM,8CAA8C;EAC3D,OAAM;GAEN,MAAM,WAAW,MAAM,iBAAiB,SAAS,CAChD,SAAS,qBACT,EAAC;GACF,MAAM,UAAU,SACf,SAAS,sBAAsB;GAIhC,IAAIC,QAAoB;IAAE,IAAI;IAAU,MAAM;GAAU;AACxD,OAAI,SAAS,eACZ,KAAI;AACH,YAAQ,MAAM,AACb,SAAS,eAKR;KACD,UAAU,IAAI;KACd,SAAS,IAAI;KACb,QAAQ,IAAI;KACZ,QAAQ,IAAI;KACZ;IACA,EAAC;GACF,SAAQ,OAAO;AACf,WAAO,MAAM,OAAgB,qCAAqC;GAElE;AAGF,aAAU,IAAI,eAA6B;IAC1C;IACA;IACA,UAAU;KACT,UAAU,SAAS;KACnB,QAAQ,SAAS;IACjB;GACD;EACD;AAGD,MAAI,QAAQ,OACX,MAAK,MAAM,SAAS,QAAQ;AAC3B,UAAO,MAAM,EAAE,OAAO,MAAM,KAAM,GAAE,+BAA+B;AAGnE,OAAI,MAAM,SAAS,MAAM,KAAK,SAAgB,EAAE;AAC/C,WAAO,MACN,EAAE,OAAO,MAAM,KAAM,GACrB,sCACA;AACD;GACA;GAGD,MAAM,UAAU,MAAM,QAAQ,SAAgB;GAG9C,MAAM,WAAW,MAAM,WAAW,SAAgB;AAGlD,WAAQ,MAAM,MAAM,MAAa,SAAgB;IAChD,OAAO,MAAM;IACb;GACA,EAAC;EACF;EAMF,MAAM,cAAc,QAAQ,YAAY,CAAC;AACzC,MAAI,cAAc,GAAG;GAEpB,MAAM,MACL,oBAAoB,UACjB,AAAC,QAA0C,gBAAgB;AAE/D,UAAO,MACN;IAAE,YAAY;IAAa,kBAAkB;GAAK,GAClD,kBACA;AACD,SAAM,QAAQ,OAAO;EACrB;CACD,SAAQ,OAAO;AACf,SAAO,MAAM,OAAgB,2BAA2B;CAExD;AACD;;;;;;;;;;;AA2BD,eAAsB,mBAarBC,UAgBAN,kBACAC,QACAC,KAM2D;AAC3D,MAAK,SAAS,sBACb;CAGD,MAAM,WAAW,MAAM,iBAAiB,SAAS,CAChD,SAAS,qBACT,EAAC;CACF,MAAM,UAAU,SACf,SAAS,sBAAsB;CAIhC,IAAIG,QAAoB;EAAE,IAAI;EAAU,MAAM;CAAU;AACxD,KAAI,SAAS,eACZ,KAAI;AACH,UAAQ,MAAM,AACb,SAAS,eACR;GACD,UAAU,IAAI;GACd,SAAS,IAAI;GACb,QAAQ,IAAI;GACZ,QAAQ,IAAI;GACZ;EACA,EAAC;CACF,SAAQ,OAAO;AACf,SAAO,MAAM,OAAgB,qCAAqC;CAClE;CAGF,MAAM,UAAU,IAAI,eAA6B;EAChD;EACA;EACA,UAAU;GACT,UAAU,SAAS;GACnB,QAAQ,SAAS;EACjB;CACD;AAED,QAAO;EAAE;EAAS;CAAS;AAC3B;;;;;;;;;;;;;;;;;;;AAgCD,eAAsB,4BAOrBE,cACAC,SACAC,YACAC,SACa;AAEb,MAAK,aACJ,QAAO,eAAkB;CAG1B,MAAM,EAAE,SAAS,SAAS,GAAG;AAG7B,KAAI,QAAQ,gBAIX,QAAO,QAAQ,gBACd,SACA,YAAY;EACX,MAAMC,aAAW,MAAM,QAAQ,QAAQ;AAGvC,MAAI,WACH,OAAM,WAAWA,YAAU,QAAQ;AAGpC,SAAOA;CACP,GACD,SAAS,GACT;CAIF,MAAM,WAAW,MAAM,QAAQ,QAAQ;AAEvC,KAAI,WACH,OAAM,WAAW,UAAU,QAAQ;AAIpC,OAAM,QAAQ,OAAO;AAErB,QAAO;AACP"}
@@ -0,0 +1,16 @@
1
+ import { Construct } from "./Construct-E8QPyHh4.mjs";
2
+ import { Service, ServiceDiscovery } from "@geekmidas/services";
3
+ import { AuditStorage } from "@geekmidas/audit";
4
+ import { EventPublisher, MappedEvent } from "@geekmidas/events";
5
+ import { Logger } from "@geekmidas/logger";
6
+ import { StandardSchemaV1 } from "@standard-schema/spec";
7
+ import { InferStandardSchema } from "@geekmidas/schema";
8
+
9
+ //#region src/publisher.d.ts
10
+ declare function publishEvents<T extends EventPublisher<any> | undefined, OutSchema extends StandardSchemaV1 | undefined = undefined, TServiceName extends string = string, TPublisherService extends Service<TServiceName, T> | undefined = undefined>(logger: Logger, serviceDiscovery: ServiceDiscovery<any, any>, ev: MappedEvent<T, OutSchema>[] | undefined, response: InferStandardSchema<OutSchema>, publisherService: TPublisherService): Promise<void>;
11
+ declare function publishConstructEvents<T extends EventPublisher<any> | undefined, OutSchema extends StandardSchemaV1 | undefined = undefined, TServiceName extends string = string, TServices extends Service[] = [], TAuditStorageServiceName extends string = string, TAuditStorage extends AuditStorage | undefined = undefined>(construct: Construct<Logger, TServiceName, T, OutSchema, TServices, TAuditStorageServiceName, TAuditStorage>, response: InferStandardSchema<OutSchema>, serviceDiscovery: ServiceDiscovery<any, any>, logger?: Logger): Promise<void>;
12
+ //# sourceMappingURL=publisher.d.ts.map
13
+
14
+ //#endregion
15
+ export { publishConstructEvents, publishEvents };
16
+ //# sourceMappingURL=publisher-BXG9YiRi.d.mts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"publisher-BXG9YiRi.d.mts","names":[],"sources":["../src/publisher.ts"],"sourcesContent":[],"mappings":";;;;;;;;;iBAQsB,wBACX,mDACQ,0GAEQ,QAAQ,cAAc,oCAExC,0BACU,gCACd,YAAY,GAAG,oCACT,oBAAoB,8BACZ,oBAAiB;iBA8Cd,iCACX,mDACQ,kGAEA,wFAEI,iDAEX,UACV,QACA,cACA,GACA,WACA,WACA,0BACA,0BAES,oBAAoB,8BACZ,qCACV,SAAyB;AA3ElC"}
@@ -1 +1 @@
1
- {"version":3,"file":"publisher-Bw4770Hi.mjs","names":["logger: Logger","serviceDiscovery: ServiceDiscovery<any, any>","ev: MappedEvent<T, OutSchema>[]","response: InferStandardSchema<OutSchema>","publisherService: TPublisherService","events: MappedEvent<T, OutSchema>[]","construct: Construct<\n Logger,\n TServiceName,\n T,\n OutSchema,\n TServices,\n TAuditStorageServiceName,\n TAuditStorage\n >"],"sources":["../src/publisher.ts"],"sourcesContent":["import type { AuditStorage } from '@geekmidas/audit';\nimport type { Logger } from '@geekmidas/logger';\nimport type { StandardSchemaV1 } from '@standard-schema/spec';\n\nimport type { EventPublisher, MappedEvent } from '@geekmidas/events';\nimport type { InferStandardSchema } from '@geekmidas/schema';\nimport type { Service, ServiceDiscovery } from '@geekmidas/services';\nimport type { Construct } from './Construct';\n\nexport async function publishEvents<\n T extends EventPublisher<any> | undefined,\n OutSchema extends StandardSchemaV1 | undefined = undefined,\n TServiceName extends string = string,\n TPublisherService extends Service<TServiceName, T> | undefined = undefined,\n>(\n logger: Logger,\n serviceDiscovery: ServiceDiscovery<any, any>,\n ev: MappedEvent<T, OutSchema>[] = [],\n response: InferStandardSchema<OutSchema>,\n publisherService: TPublisherService,\n) {\n try {\n if (!ev?.length) {\n logger.debug('No events to publish');\n return;\n }\n if (!publisherService) {\n logger.warn('No publisher service available');\n return;\n }\n\n const services = await serviceDiscovery.register([publisherService]);\n\n const publisher = services[\n publisherService.serviceName\n ] as EventPublisher<any>;\n\n const events: MappedEvent<T, OutSchema>[] = [];\n\n for (const { when, payload, type, ...e } of ev) {\n logger.debug({ event: type }, 'Processing event');\n const resolvedPayload = await payload(response);\n const event = {\n ...e,\n type,\n payload: resolvedPayload,\n };\n\n if (!when || when(response as any)) {\n events.push(event);\n }\n }\n\n if (events.length) {\n logger.debug({ eventCount: ev.length }, 'Publishing events');\n\n await publisher.publish(events).catch((err) => {\n logger.error(err, 'Failed to publish events');\n });\n }\n } catch (error) {\n logger.error(error as any, 'Something went wrong publishing events');\n }\n}\n\nexport async function publishConstructEvents<\n T extends EventPublisher<any> | undefined,\n OutSchema extends StandardSchemaV1 | undefined = undefined,\n TServiceName extends string = string,\n TServices extends Service[] = [],\n TAuditStorageServiceName extends string = string,\n TAuditStorage extends AuditStorage | undefined = undefined,\n>(\n construct: Construct<\n Logger,\n TServiceName,\n T,\n OutSchema,\n TServices,\n TAuditStorageServiceName,\n TAuditStorage\n >,\n response: InferStandardSchema<OutSchema>,\n serviceDiscovery: ServiceDiscovery<any, any>,\n logger: Logger = construct.logger,\n) {\n return publishEvents(\n logger,\n serviceDiscovery,\n construct.events,\n response,\n construct.publisherService,\n );\n}\n"],"mappings":";AASA,eAAsB,cAMpBA,QACAC,kBACAC,KAAkC,CAAE,GACpCC,UACAC,kBACA;AACA,KAAI;AACF,OAAK,IAAI,QAAQ;AACf,UAAO,MAAM,uBAAuB;AACpC;EACD;AACD,OAAK,kBAAkB;AACrB,UAAO,KAAK,iCAAiC;AAC7C;EACD;EAED,MAAM,WAAW,MAAM,iBAAiB,SAAS,CAAC,gBAAiB,EAAC;EAEpE,MAAM,YAAY,SAChB,iBAAiB;EAGnB,MAAMC,SAAsC,CAAE;AAE9C,OAAK,MAAM,EAAE,MAAM,SAAS,KAAM,GAAG,GAAG,IAAI,IAAI;AAC9C,UAAO,MAAM,EAAE,OAAO,KAAM,GAAE,mBAAmB;GACjD,MAAM,kBAAkB,MAAM,QAAQ,SAAS;GAC/C,MAAM,QAAQ;IACZ,GAAG;IACH;IACA,SAAS;GACV;AAED,QAAK,QAAQ,KAAK,SAAgB,CAChC,QAAO,KAAK,MAAM;EAErB;AAED,MAAI,OAAO,QAAQ;AACjB,UAAO,MAAM,EAAE,YAAY,GAAG,OAAQ,GAAE,oBAAoB;AAE5D,SAAM,UAAU,QAAQ,OAAO,CAAC,MAAM,CAAC,QAAQ;AAC7C,WAAO,MAAM,KAAK,2BAA2B;GAC9C,EAAC;EACH;CACF,SAAQ,OAAO;AACd,SAAO,MAAM,OAAc,yCAAyC;CACrE;AACF;AAED,eAAsB,uBAQpBC,WASAH,UACAF,kBACAD,SAAiB,UAAU,QAC3B;AACA,QAAO,cACL,QACA,kBACA,UAAU,QACV,UACA,UAAU,iBACX;AACF"}
1
+ {"version":3,"file":"publisher-Bw4770Hi.mjs","names":["logger: Logger","serviceDiscovery: ServiceDiscovery<any, any>","ev: MappedEvent<T, OutSchema>[]","response: InferStandardSchema<OutSchema>","publisherService: TPublisherService","events: MappedEvent<T, OutSchema>[]","construct: Construct<\n\t\tLogger,\n\t\tTServiceName,\n\t\tT,\n\t\tOutSchema,\n\t\tTServices,\n\t\tTAuditStorageServiceName,\n\t\tTAuditStorage\n\t>"],"sources":["../src/publisher.ts"],"sourcesContent":["import type { AuditStorage } from '@geekmidas/audit';\nimport type { EventPublisher, MappedEvent } from '@geekmidas/events';\nimport type { Logger } from '@geekmidas/logger';\nimport type { InferStandardSchema } from '@geekmidas/schema';\nimport type { Service, ServiceDiscovery } from '@geekmidas/services';\nimport type { StandardSchemaV1 } from '@standard-schema/spec';\nimport type { Construct } from './Construct';\n\nexport async function publishEvents<\n\tT extends EventPublisher<any> | undefined,\n\tOutSchema extends StandardSchemaV1 | undefined = undefined,\n\tTServiceName extends string = string,\n\tTPublisherService extends Service<TServiceName, T> | undefined = undefined,\n>(\n\tlogger: Logger,\n\tserviceDiscovery: ServiceDiscovery<any, any>,\n\tev: MappedEvent<T, OutSchema>[] = [],\n\tresponse: InferStandardSchema<OutSchema>,\n\tpublisherService: TPublisherService,\n) {\n\ttry {\n\t\tif (!ev?.length) {\n\t\t\tlogger.debug('No events to publish');\n\t\t\treturn;\n\t\t}\n\t\tif (!publisherService) {\n\t\t\tlogger.warn('No publisher service available');\n\t\t\treturn;\n\t\t}\n\n\t\tconst services = await serviceDiscovery.register([publisherService]);\n\n\t\tconst publisher = services[\n\t\t\tpublisherService.serviceName\n\t\t] as EventPublisher<any>;\n\n\t\tconst events: MappedEvent<T, OutSchema>[] = [];\n\n\t\tfor (const { when, payload, type, ...e } of ev) {\n\t\t\tlogger.debug({ event: type }, 'Processing event');\n\t\t\tconst resolvedPayload = await payload(response);\n\t\t\tconst event = {\n\t\t\t\t...e,\n\t\t\t\ttype,\n\t\t\t\tpayload: resolvedPayload,\n\t\t\t};\n\n\t\t\tif (!when || when(response as any)) {\n\t\t\t\tevents.push(event);\n\t\t\t}\n\t\t}\n\n\t\tif (events.length) {\n\t\t\tlogger.debug({ eventCount: ev.length }, 'Publishing events');\n\n\t\t\tawait publisher.publish(events).catch((err) => {\n\t\t\t\tlogger.error(err, 'Failed to publish events');\n\t\t\t});\n\t\t}\n\t} catch (error) {\n\t\tlogger.error(error as any, 'Something went wrong publishing events');\n\t}\n}\n\nexport async function publishConstructEvents<\n\tT extends EventPublisher<any> | undefined,\n\tOutSchema extends StandardSchemaV1 | undefined = undefined,\n\tTServiceName extends string = string,\n\tTServices extends Service[] = [],\n\tTAuditStorageServiceName extends string = string,\n\tTAuditStorage extends AuditStorage | undefined = undefined,\n>(\n\tconstruct: Construct<\n\t\tLogger,\n\t\tTServiceName,\n\t\tT,\n\t\tOutSchema,\n\t\tTServices,\n\t\tTAuditStorageServiceName,\n\t\tTAuditStorage\n\t>,\n\tresponse: InferStandardSchema<OutSchema>,\n\tserviceDiscovery: ServiceDiscovery<any, any>,\n\tlogger: Logger = construct.logger,\n) {\n\treturn publishEvents(\n\t\tlogger,\n\t\tserviceDiscovery,\n\t\tconstruct.events,\n\t\tresponse,\n\t\tconstruct.publisherService,\n\t);\n}\n"],"mappings":";AAQA,eAAsB,cAMrBA,QACAC,kBACAC,KAAkC,CAAE,GACpCC,UACAC,kBACC;AACD,KAAI;AACH,OAAK,IAAI,QAAQ;AAChB,UAAO,MAAM,uBAAuB;AACpC;EACA;AACD,OAAK,kBAAkB;AACtB,UAAO,KAAK,iCAAiC;AAC7C;EACA;EAED,MAAM,WAAW,MAAM,iBAAiB,SAAS,CAAC,gBAAiB,EAAC;EAEpE,MAAM,YAAY,SACjB,iBAAiB;EAGlB,MAAMC,SAAsC,CAAE;AAE9C,OAAK,MAAM,EAAE,MAAM,SAAS,KAAM,GAAG,GAAG,IAAI,IAAI;AAC/C,UAAO,MAAM,EAAE,OAAO,KAAM,GAAE,mBAAmB;GACjD,MAAM,kBAAkB,MAAM,QAAQ,SAAS;GAC/C,MAAM,QAAQ;IACb,GAAG;IACH;IACA,SAAS;GACT;AAED,QAAK,QAAQ,KAAK,SAAgB,CACjC,QAAO,KAAK,MAAM;EAEnB;AAED,MAAI,OAAO,QAAQ;AAClB,UAAO,MAAM,EAAE,YAAY,GAAG,OAAQ,GAAE,oBAAoB;AAE5D,SAAM,UAAU,QAAQ,OAAO,CAAC,MAAM,CAAC,QAAQ;AAC9C,WAAO,MAAM,KAAK,2BAA2B;GAC7C,EAAC;EACF;CACD,SAAQ,OAAO;AACf,SAAO,MAAM,OAAc,yCAAyC;CACpE;AACD;AAED,eAAsB,uBAQrBC,WASAH,UACAF,kBACAD,SAAiB,UAAU,QAC1B;AACD,QAAO,cACN,QACA,kBACA,UAAU,QACV,UACA,UAAU,iBACV;AACD"}
@@ -0,0 +1,16 @@
1
+ import { Construct } from "./Construct-Dl0l2d8d.cjs";
2
+ import { AuditStorage } from "@geekmidas/audit";
3
+ import { EventPublisher, MappedEvent } from "@geekmidas/events";
4
+ import { Logger } from "@geekmidas/logger";
5
+ import { Service, ServiceDiscovery } from "@geekmidas/services";
6
+ import { StandardSchemaV1 } from "@standard-schema/spec";
7
+ import { InferStandardSchema } from "@geekmidas/schema";
8
+
9
+ //#region src/publisher.d.ts
10
+ declare function publishEvents<T extends EventPublisher<any> | undefined, OutSchema extends StandardSchemaV1 | undefined = undefined, TServiceName extends string = string, TPublisherService extends Service<TServiceName, T> | undefined = undefined>(logger: Logger, serviceDiscovery: ServiceDiscovery<any, any>, ev: MappedEvent<T, OutSchema>[] | undefined, response: InferStandardSchema<OutSchema>, publisherService: TPublisherService): Promise<void>;
11
+ declare function publishConstructEvents<T extends EventPublisher<any> | undefined, OutSchema extends StandardSchemaV1 | undefined = undefined, TServiceName extends string = string, TServices extends Service[] = [], TAuditStorageServiceName extends string = string, TAuditStorage extends AuditStorage | undefined = undefined>(construct: Construct<Logger, TServiceName, T, OutSchema, TServices, TAuditStorageServiceName, TAuditStorage>, response: InferStandardSchema<OutSchema>, serviceDiscovery: ServiceDiscovery<any, any>, logger?: Logger): Promise<void>;
12
+ //# sourceMappingURL=publisher.d.ts.map
13
+
14
+ //#endregion
15
+ export { publishConstructEvents, publishEvents };
16
+ //# sourceMappingURL=publisher-D9ngDXg3.d.cts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"publisher-D9ngDXg3.d.cts","names":[],"sources":["../src/publisher.ts"],"sourcesContent":[],"mappings":";;;;;;;;;iBAQsB,wBACX,mDACQ,0GAEQ,QAAQ,cAAc,oCAExC,0BACU,gCACd,YAAY,GAAG,oCACT,oBAAoB,8BACZ,oBAAiB;iBA8Cd,iCACX,mDACQ,kGAEA,wFAEI,iDAEX,UACV,QACA,cACA,GACA,WACA,WACA,0BACA,0BAES,oBAAoB,8BACZ,qCACV,SAAyB;AA3ElC"}
@@ -1 +1 @@
1
- {"version":3,"file":"publisher-lFQleddL.cjs","names":["logger: Logger","serviceDiscovery: ServiceDiscovery<any, any>","ev: MappedEvent<T, OutSchema>[]","response: InferStandardSchema<OutSchema>","publisherService: TPublisherService","events: MappedEvent<T, OutSchema>[]","construct: Construct<\n Logger,\n TServiceName,\n T,\n OutSchema,\n TServices,\n TAuditStorageServiceName,\n TAuditStorage\n >"],"sources":["../src/publisher.ts"],"sourcesContent":["import type { AuditStorage } from '@geekmidas/audit';\nimport type { Logger } from '@geekmidas/logger';\nimport type { StandardSchemaV1 } from '@standard-schema/spec';\n\nimport type { EventPublisher, MappedEvent } from '@geekmidas/events';\nimport type { InferStandardSchema } from '@geekmidas/schema';\nimport type { Service, ServiceDiscovery } from '@geekmidas/services';\nimport type { Construct } from './Construct';\n\nexport async function publishEvents<\n T extends EventPublisher<any> | undefined,\n OutSchema extends StandardSchemaV1 | undefined = undefined,\n TServiceName extends string = string,\n TPublisherService extends Service<TServiceName, T> | undefined = undefined,\n>(\n logger: Logger,\n serviceDiscovery: ServiceDiscovery<any, any>,\n ev: MappedEvent<T, OutSchema>[] = [],\n response: InferStandardSchema<OutSchema>,\n publisherService: TPublisherService,\n) {\n try {\n if (!ev?.length) {\n logger.debug('No events to publish');\n return;\n }\n if (!publisherService) {\n logger.warn('No publisher service available');\n return;\n }\n\n const services = await serviceDiscovery.register([publisherService]);\n\n const publisher = services[\n publisherService.serviceName\n ] as EventPublisher<any>;\n\n const events: MappedEvent<T, OutSchema>[] = [];\n\n for (const { when, payload, type, ...e } of ev) {\n logger.debug({ event: type }, 'Processing event');\n const resolvedPayload = await payload(response);\n const event = {\n ...e,\n type,\n payload: resolvedPayload,\n };\n\n if (!when || when(response as any)) {\n events.push(event);\n }\n }\n\n if (events.length) {\n logger.debug({ eventCount: ev.length }, 'Publishing events');\n\n await publisher.publish(events).catch((err) => {\n logger.error(err, 'Failed to publish events');\n });\n }\n } catch (error) {\n logger.error(error as any, 'Something went wrong publishing events');\n }\n}\n\nexport async function publishConstructEvents<\n T extends EventPublisher<any> | undefined,\n OutSchema extends StandardSchemaV1 | undefined = undefined,\n TServiceName extends string = string,\n TServices extends Service[] = [],\n TAuditStorageServiceName extends string = string,\n TAuditStorage extends AuditStorage | undefined = undefined,\n>(\n construct: Construct<\n Logger,\n TServiceName,\n T,\n OutSchema,\n TServices,\n TAuditStorageServiceName,\n TAuditStorage\n >,\n response: InferStandardSchema<OutSchema>,\n serviceDiscovery: ServiceDiscovery<any, any>,\n logger: Logger = construct.logger,\n) {\n return publishEvents(\n logger,\n serviceDiscovery,\n construct.events,\n response,\n construct.publisherService,\n );\n}\n"],"mappings":";;AASA,eAAsB,cAMpBA,QACAC,kBACAC,KAAkC,CAAE,GACpCC,UACAC,kBACA;AACA,KAAI;AACF,OAAK,IAAI,QAAQ;AACf,UAAO,MAAM,uBAAuB;AACpC;EACD;AACD,OAAK,kBAAkB;AACrB,UAAO,KAAK,iCAAiC;AAC7C;EACD;EAED,MAAM,WAAW,MAAM,iBAAiB,SAAS,CAAC,gBAAiB,EAAC;EAEpE,MAAM,YAAY,SAChB,iBAAiB;EAGnB,MAAMC,SAAsC,CAAE;AAE9C,OAAK,MAAM,EAAE,MAAM,SAAS,KAAM,GAAG,GAAG,IAAI,IAAI;AAC9C,UAAO,MAAM,EAAE,OAAO,KAAM,GAAE,mBAAmB;GACjD,MAAM,kBAAkB,MAAM,QAAQ,SAAS;GAC/C,MAAM,QAAQ;IACZ,GAAG;IACH;IACA,SAAS;GACV;AAED,QAAK,QAAQ,KAAK,SAAgB,CAChC,QAAO,KAAK,MAAM;EAErB;AAED,MAAI,OAAO,QAAQ;AACjB,UAAO,MAAM,EAAE,YAAY,GAAG,OAAQ,GAAE,oBAAoB;AAE5D,SAAM,UAAU,QAAQ,OAAO,CAAC,MAAM,CAAC,QAAQ;AAC7C,WAAO,MAAM,KAAK,2BAA2B;GAC9C,EAAC;EACH;CACF,SAAQ,OAAO;AACd,SAAO,MAAM,OAAc,yCAAyC;CACrE;AACF;AAED,eAAsB,uBAQpBC,WASAH,UACAF,kBACAD,SAAiB,UAAU,QAC3B;AACA,QAAO,cACL,QACA,kBACA,UAAU,QACV,UACA,UAAU,iBACX;AACF"}
1
+ {"version":3,"file":"publisher-lFQleddL.cjs","names":["logger: Logger","serviceDiscovery: ServiceDiscovery<any, any>","ev: MappedEvent<T, OutSchema>[]","response: InferStandardSchema<OutSchema>","publisherService: TPublisherService","events: MappedEvent<T, OutSchema>[]","construct: Construct<\n\t\tLogger,\n\t\tTServiceName,\n\t\tT,\n\t\tOutSchema,\n\t\tTServices,\n\t\tTAuditStorageServiceName,\n\t\tTAuditStorage\n\t>"],"sources":["../src/publisher.ts"],"sourcesContent":["import type { AuditStorage } from '@geekmidas/audit';\nimport type { EventPublisher, MappedEvent } from '@geekmidas/events';\nimport type { Logger } from '@geekmidas/logger';\nimport type { InferStandardSchema } from '@geekmidas/schema';\nimport type { Service, ServiceDiscovery } from '@geekmidas/services';\nimport type { StandardSchemaV1 } from '@standard-schema/spec';\nimport type { Construct } from './Construct';\n\nexport async function publishEvents<\n\tT extends EventPublisher<any> | undefined,\n\tOutSchema extends StandardSchemaV1 | undefined = undefined,\n\tTServiceName extends string = string,\n\tTPublisherService extends Service<TServiceName, T> | undefined = undefined,\n>(\n\tlogger: Logger,\n\tserviceDiscovery: ServiceDiscovery<any, any>,\n\tev: MappedEvent<T, OutSchema>[] = [],\n\tresponse: InferStandardSchema<OutSchema>,\n\tpublisherService: TPublisherService,\n) {\n\ttry {\n\t\tif (!ev?.length) {\n\t\t\tlogger.debug('No events to publish');\n\t\t\treturn;\n\t\t}\n\t\tif (!publisherService) {\n\t\t\tlogger.warn('No publisher service available');\n\t\t\treturn;\n\t\t}\n\n\t\tconst services = await serviceDiscovery.register([publisherService]);\n\n\t\tconst publisher = services[\n\t\t\tpublisherService.serviceName\n\t\t] as EventPublisher<any>;\n\n\t\tconst events: MappedEvent<T, OutSchema>[] = [];\n\n\t\tfor (const { when, payload, type, ...e } of ev) {\n\t\t\tlogger.debug({ event: type }, 'Processing event');\n\t\t\tconst resolvedPayload = await payload(response);\n\t\t\tconst event = {\n\t\t\t\t...e,\n\t\t\t\ttype,\n\t\t\t\tpayload: resolvedPayload,\n\t\t\t};\n\n\t\t\tif (!when || when(response as any)) {\n\t\t\t\tevents.push(event);\n\t\t\t}\n\t\t}\n\n\t\tif (events.length) {\n\t\t\tlogger.debug({ eventCount: ev.length }, 'Publishing events');\n\n\t\t\tawait publisher.publish(events).catch((err) => {\n\t\t\t\tlogger.error(err, 'Failed to publish events');\n\t\t\t});\n\t\t}\n\t} catch (error) {\n\t\tlogger.error(error as any, 'Something went wrong publishing events');\n\t}\n}\n\nexport async function publishConstructEvents<\n\tT extends EventPublisher<any> | undefined,\n\tOutSchema extends StandardSchemaV1 | undefined = undefined,\n\tTServiceName extends string = string,\n\tTServices extends Service[] = [],\n\tTAuditStorageServiceName extends string = string,\n\tTAuditStorage extends AuditStorage | undefined = undefined,\n>(\n\tconstruct: Construct<\n\t\tLogger,\n\t\tTServiceName,\n\t\tT,\n\t\tOutSchema,\n\t\tTServices,\n\t\tTAuditStorageServiceName,\n\t\tTAuditStorage\n\t>,\n\tresponse: InferStandardSchema<OutSchema>,\n\tserviceDiscovery: ServiceDiscovery<any, any>,\n\tlogger: Logger = construct.logger,\n) {\n\treturn publishEvents(\n\t\tlogger,\n\t\tserviceDiscovery,\n\t\tconstruct.events,\n\t\tresponse,\n\t\tconstruct.publisherService,\n\t);\n}\n"],"mappings":";;AAQA,eAAsB,cAMrBA,QACAC,kBACAC,KAAkC,CAAE,GACpCC,UACAC,kBACC;AACD,KAAI;AACH,OAAK,IAAI,QAAQ;AAChB,UAAO,MAAM,uBAAuB;AACpC;EACA;AACD,OAAK,kBAAkB;AACtB,UAAO,KAAK,iCAAiC;AAC7C;EACA;EAED,MAAM,WAAW,MAAM,iBAAiB,SAAS,CAAC,gBAAiB,EAAC;EAEpE,MAAM,YAAY,SACjB,iBAAiB;EAGlB,MAAMC,SAAsC,CAAE;AAE9C,OAAK,MAAM,EAAE,MAAM,SAAS,KAAM,GAAG,GAAG,IAAI,IAAI;AAC/C,UAAO,MAAM,EAAE,OAAO,KAAM,GAAE,mBAAmB;GACjD,MAAM,kBAAkB,MAAM,QAAQ,SAAS;GAC/C,MAAM,QAAQ;IACb,GAAG;IACH;IACA,SAAS;GACT;AAED,QAAK,QAAQ,KAAK,SAAgB,CACjC,QAAO,KAAK,MAAM;EAEnB;AAED,MAAI,OAAO,QAAQ;AAClB,UAAO,MAAM,EAAE,YAAY,GAAG,OAAQ,GAAE,oBAAoB;AAE5D,SAAM,UAAU,QAAQ,OAAO,CAAC,MAAM,CAAC,QAAQ;AAC9C,WAAO,MAAM,KAAK,2BAA2B;GAC7C,EAAC;EACF;CACD,SAAQ,OAAO;AACf,SAAO,MAAM,OAAc,yCAAyC;CACpE;AACD;AAED,eAAsB,uBAQrBC,WASAH,UACAF,kBACAD,SAAiB,UAAU,QAC1B;AACD,QAAO,cACN,QACA,kBACA,UAAU,QACV,UACA,UAAU,iBACV;AACD"}
@@ -1,16 +1,3 @@
1
- import { Construct } from "./Construct-C4rPE67v.cjs";
2
- import { AuditStorage } from "@geekmidas/audit";
3
- import { EventPublisher, MappedEvent } from "@geekmidas/events";
4
- import { Logger } from "@geekmidas/logger";
5
- import { Service, ServiceDiscovery } from "@geekmidas/services";
6
- import { StandardSchemaV1 } from "@standard-schema/spec";
7
- import { InferStandardSchema } from "@geekmidas/schema";
8
-
9
- //#region src/publisher.d.ts
10
- declare function publishEvents<T extends EventPublisher<any> | undefined, OutSchema extends StandardSchemaV1 | undefined = undefined, TServiceName extends string = string, TPublisherService extends Service<TServiceName, T> | undefined = undefined>(logger: Logger, serviceDiscovery: ServiceDiscovery<any, any>, ev: MappedEvent<T, OutSchema>[] | undefined, response: InferStandardSchema<OutSchema>, publisherService: TPublisherService): Promise<void>;
11
- declare function publishConstructEvents<T extends EventPublisher<any> | undefined, OutSchema extends StandardSchemaV1 | undefined = undefined, TServiceName extends string = string, TServices extends Service[] = [], TAuditStorageServiceName extends string = string, TAuditStorage extends AuditStorage | undefined = undefined>(construct: Construct<Logger, TServiceName, T, OutSchema, TServices, TAuditStorageServiceName, TAuditStorage>, response: InferStandardSchema<OutSchema>, serviceDiscovery: ServiceDiscovery<any, any>, logger?: Logger): Promise<void>;
12
- //# sourceMappingURL=publisher.d.ts.map
13
-
14
- //#endregion
15
- export { publishConstructEvents, publishEvents };
16
- //# sourceMappingURL=publisher.d.cts.map
1
+ import "./Construct-Dl0l2d8d.cjs";
2
+ import { publishConstructEvents, publishEvents } from "./publisher-D9ngDXg3.cjs";
3
+ export { publishConstructEvents, publishEvents };
@@ -1,16 +1,3 @@
1
- import { Construct } from "./Construct-XrijZFFh.mjs";
2
- import { Service, ServiceDiscovery } from "@geekmidas/services";
3
- import { AuditStorage } from "@geekmidas/audit";
4
- import { EventPublisher, MappedEvent } from "@geekmidas/events";
5
- import { Logger } from "@geekmidas/logger";
6
- import { StandardSchemaV1 } from "@standard-schema/spec";
7
- import { InferStandardSchema } from "@geekmidas/schema";
8
-
9
- //#region src/publisher.d.ts
10
- declare function publishEvents<T extends EventPublisher<any> | undefined, OutSchema extends StandardSchemaV1 | undefined = undefined, TServiceName extends string = string, TPublisherService extends Service<TServiceName, T> | undefined = undefined>(logger: Logger, serviceDiscovery: ServiceDiscovery<any, any>, ev: MappedEvent<T, OutSchema>[] | undefined, response: InferStandardSchema<OutSchema>, publisherService: TPublisherService): Promise<void>;
11
- declare function publishConstructEvents<T extends EventPublisher<any> | undefined, OutSchema extends StandardSchemaV1 | undefined = undefined, TServiceName extends string = string, TServices extends Service[] = [], TAuditStorageServiceName extends string = string, TAuditStorage extends AuditStorage | undefined = undefined>(construct: Construct<Logger, TServiceName, T, OutSchema, TServices, TAuditStorageServiceName, TAuditStorage>, response: InferStandardSchema<OutSchema>, serviceDiscovery: ServiceDiscovery<any, any>, logger?: Logger): Promise<void>;
12
- //# sourceMappingURL=publisher.d.ts.map
13
-
14
- //#endregion
15
- export { publishConstructEvents, publishEvents };
16
- //# sourceMappingURL=publisher.d.mts.map
1
+ import "./Construct-E8QPyHh4.mjs";
2
+ import { publishConstructEvents, publishEvents } from "./publisher-BXG9YiRi.mjs";
3
+ export { publishConstructEvents, publishEvents };
@@ -12,4 +12,4 @@ Object.defineProperty(exports, 'RLS_BYPASS', {
12
12
  return RLS_BYPASS;
13
13
  }
14
14
  });
15
- //# sourceMappingURL=rls-CmJ7bRsz.cjs.map
15
+ //# sourceMappingURL=rls-BrywnrQb.cjs.map
@@ -1 +1 @@
1
- {"version":3,"file":"rls-CmJ7bRsz.cjs","names":[],"sources":["../src/endpoints/rls.ts"],"sourcesContent":["import type { Logger } from '@geekmidas/logger';\nimport type { Service, ServiceRecord } from '@geekmidas/services';\nimport type { CookieFn, HeaderFn } from './Endpoint';\n\n/**\n * RLS context - key-value pairs to set as PostgreSQL session variables.\n * Keys become `prefix.key` (e.g., `app.user_id`).\n */\nexport interface RlsContext {\n [key: string]: string | number | boolean | null | undefined;\n}\n\n/**\n * Function type for extracting RLS context from request context.\n *\n * @template TServices - Available service dependencies\n * @template TSession - Session data type\n * @template TLogger - Logger type\n *\n * @example\n * ```ts\n * const extractor: RlsContextExtractor<[], UserSession> = ({ session }) => ({\n * user_id: session.userId,\n * tenant_id: session.tenantId,\n * roles: session.roles.join(','),\n * });\n * ```\n */\nexport type RlsContextExtractor<\n TServices extends Service[] = [],\n TSession = unknown,\n TLogger extends Logger = Logger,\n> = (ctx: {\n services: ServiceRecord<TServices>;\n session: TSession;\n header: HeaderFn;\n cookie: CookieFn;\n logger: TLogger;\n}) => RlsContext | Promise<RlsContext>;\n\n/**\n * Configuration for RLS on an endpoint or factory.\n *\n * @template TServices - Available service dependencies\n * @template TSession - Session data type\n * @template TLogger - Logger type\n */\nexport interface RlsConfig<\n TServices extends Service[] = [],\n TSession = unknown,\n TLogger extends Logger = Logger,\n> {\n /** Function to extract RLS context from request */\n extractor: RlsContextExtractor<TServices, TSession, TLogger>;\n /** Prefix for PostgreSQL session variables (default: 'app') */\n prefix?: string;\n}\n\n/**\n * Symbol used to bypass RLS for an endpoint.\n */\nexport const RLS_BYPASS = Symbol.for('geekmidas.rls.bypass');\n\n/**\n * Type for RLS bypass marker.\n */\nexport type RlsBypass = typeof RLS_BYPASS;\n"],"mappings":";;;;;AA6DA,MAAa,aAAa,OAAO,IAAI,uBAAuB"}
1
+ {"version":3,"file":"rls-BrywnrQb.cjs","names":[],"sources":["../src/endpoints/rls.ts"],"sourcesContent":["import type { Logger } from '@geekmidas/logger';\nimport type { Service, ServiceRecord } from '@geekmidas/services';\nimport type { CookieFn, HeaderFn } from './Endpoint';\n\n/**\n * RLS context - key-value pairs to set as PostgreSQL session variables.\n * Keys become `prefix.key` (e.g., `app.user_id`).\n */\nexport interface RlsContext {\n\t[key: string]: string | number | boolean | null | undefined;\n}\n\n/**\n * Function type for extracting RLS context from request context.\n *\n * @template TServices - Available service dependencies\n * @template TSession - Session data type\n * @template TLogger - Logger type\n *\n * @example\n * ```ts\n * const extractor: RlsContextExtractor<[], UserSession> = ({ session }) => ({\n * user_id: session.userId,\n * tenant_id: session.tenantId,\n * roles: session.roles.join(','),\n * });\n * ```\n */\nexport type RlsContextExtractor<\n\tTServices extends Service[] = [],\n\tTSession = unknown,\n\tTLogger extends Logger = Logger,\n> = (ctx: {\n\tservices: ServiceRecord<TServices>;\n\tsession: TSession;\n\theader: HeaderFn;\n\tcookie: CookieFn;\n\tlogger: TLogger;\n}) => RlsContext | Promise<RlsContext>;\n\n/**\n * Configuration for RLS on an endpoint or factory.\n *\n * @template TServices - Available service dependencies\n * @template TSession - Session data type\n * @template TLogger - Logger type\n */\nexport interface RlsConfig<\n\tTServices extends Service[] = [],\n\tTSession = unknown,\n\tTLogger extends Logger = Logger,\n> {\n\t/** Function to extract RLS context from request */\n\textractor: RlsContextExtractor<TServices, TSession, TLogger>;\n\t/** Prefix for PostgreSQL session variables (default: 'app') */\n\tprefix?: string;\n}\n\n/**\n * Symbol used to bypass RLS for an endpoint.\n */\nexport const RLS_BYPASS = Symbol.for('geekmidas.rls.bypass');\n\n/**\n * Type for RLS bypass marker.\n */\nexport type RlsBypass = typeof RLS_BYPASS;\n"],"mappings":";;;;;AA6DA,MAAa,aAAa,OAAO,IAAI,uBAAuB"}
@@ -6,4 +6,4 @@ const RLS_BYPASS = Symbol.for("geekmidas.rls.bypass");
6
6
 
7
7
  //#endregion
8
8
  export { RLS_BYPASS };
9
- //# sourceMappingURL=rls-Bf3FRwto.mjs.map
9
+ //# sourceMappingURL=rls-C0cWOnk4.mjs.map
@@ -1 +1 @@
1
- {"version":3,"file":"rls-Bf3FRwto.mjs","names":[],"sources":["../src/endpoints/rls.ts"],"sourcesContent":["import type { Logger } from '@geekmidas/logger';\nimport type { Service, ServiceRecord } from '@geekmidas/services';\nimport type { CookieFn, HeaderFn } from './Endpoint';\n\n/**\n * RLS context - key-value pairs to set as PostgreSQL session variables.\n * Keys become `prefix.key` (e.g., `app.user_id`).\n */\nexport interface RlsContext {\n [key: string]: string | number | boolean | null | undefined;\n}\n\n/**\n * Function type for extracting RLS context from request context.\n *\n * @template TServices - Available service dependencies\n * @template TSession - Session data type\n * @template TLogger - Logger type\n *\n * @example\n * ```ts\n * const extractor: RlsContextExtractor<[], UserSession> = ({ session }) => ({\n * user_id: session.userId,\n * tenant_id: session.tenantId,\n * roles: session.roles.join(','),\n * });\n * ```\n */\nexport type RlsContextExtractor<\n TServices extends Service[] = [],\n TSession = unknown,\n TLogger extends Logger = Logger,\n> = (ctx: {\n services: ServiceRecord<TServices>;\n session: TSession;\n header: HeaderFn;\n cookie: CookieFn;\n logger: TLogger;\n}) => RlsContext | Promise<RlsContext>;\n\n/**\n * Configuration for RLS on an endpoint or factory.\n *\n * @template TServices - Available service dependencies\n * @template TSession - Session data type\n * @template TLogger - Logger type\n */\nexport interface RlsConfig<\n TServices extends Service[] = [],\n TSession = unknown,\n TLogger extends Logger = Logger,\n> {\n /** Function to extract RLS context from request */\n extractor: RlsContextExtractor<TServices, TSession, TLogger>;\n /** Prefix for PostgreSQL session variables (default: 'app') */\n prefix?: string;\n}\n\n/**\n * Symbol used to bypass RLS for an endpoint.\n */\nexport const RLS_BYPASS = Symbol.for('geekmidas.rls.bypass');\n\n/**\n * Type for RLS bypass marker.\n */\nexport type RlsBypass = typeof RLS_BYPASS;\n"],"mappings":";;;;AA6DA,MAAa,aAAa,OAAO,IAAI,uBAAuB"}
1
+ {"version":3,"file":"rls-C0cWOnk4.mjs","names":[],"sources":["../src/endpoints/rls.ts"],"sourcesContent":["import type { Logger } from '@geekmidas/logger';\nimport type { Service, ServiceRecord } from '@geekmidas/services';\nimport type { CookieFn, HeaderFn } from './Endpoint';\n\n/**\n * RLS context - key-value pairs to set as PostgreSQL session variables.\n * Keys become `prefix.key` (e.g., `app.user_id`).\n */\nexport interface RlsContext {\n\t[key: string]: string | number | boolean | null | undefined;\n}\n\n/**\n * Function type for extracting RLS context from request context.\n *\n * @template TServices - Available service dependencies\n * @template TSession - Session data type\n * @template TLogger - Logger type\n *\n * @example\n * ```ts\n * const extractor: RlsContextExtractor<[], UserSession> = ({ session }) => ({\n * user_id: session.userId,\n * tenant_id: session.tenantId,\n * roles: session.roles.join(','),\n * });\n * ```\n */\nexport type RlsContextExtractor<\n\tTServices extends Service[] = [],\n\tTSession = unknown,\n\tTLogger extends Logger = Logger,\n> = (ctx: {\n\tservices: ServiceRecord<TServices>;\n\tsession: TSession;\n\theader: HeaderFn;\n\tcookie: CookieFn;\n\tlogger: TLogger;\n}) => RlsContext | Promise<RlsContext>;\n\n/**\n * Configuration for RLS on an endpoint or factory.\n *\n * @template TServices - Available service dependencies\n * @template TSession - Session data type\n * @template TLogger - Logger type\n */\nexport interface RlsConfig<\n\tTServices extends Service[] = [],\n\tTSession = unknown,\n\tTLogger extends Logger = Logger,\n> {\n\t/** Function to extract RLS context from request */\n\textractor: RlsContextExtractor<TServices, TSession, TLogger>;\n\t/** Prefix for PostgreSQL session variables (default: 'app') */\n\tprefix?: string;\n}\n\n/**\n * Symbol used to bypass RLS for an endpoint.\n */\nexport const RLS_BYPASS = Symbol.for('geekmidas.rls.bypass');\n\n/**\n * Type for RLS bypass marker.\n */\nexport type RlsBypass = typeof RLS_BYPASS;\n"],"mappings":";;;;AA6DA,MAAa,aAAa,OAAO,IAAI,uBAAuB"}
@@ -1,3 +1,3 @@
1
- const require_AWSLambdaSubscriberAdaptor = require('../AWSLambdaSubscriberAdaptor-BNcYYZ-P.cjs');
1
+ const require_AWSLambdaSubscriberAdaptor = require('../AWSLambdaSubscriberAdaptor-DQDnt1Xk.cjs');
2
2
 
3
3
  exports.AWSLambdaSubscriber = require_AWSLambdaSubscriberAdaptor.AWSLambdaSubscriber;
@@ -1,4 +1,4 @@
1
- import "../Construct-C4rPE67v.cjs";
2
- import "../Subscriber-COYMSevD.cjs";
3
- import { AWSLambdaHandler, AWSLambdaSubscriber } from "../AWSLambdaSubscriberAdaptor-Dx-Ks1Jp.cjs";
1
+ import "../Construct-Dl0l2d8d.cjs";
2
+ import "../Subscriber-BBcIOkhW.cjs";
3
+ import { AWSLambdaHandler, AWSLambdaSubscriber } from "../AWSLambdaSubscriberAdaptor-B2Gk3fgx.cjs";
4
4
  export { AWSLambdaHandler, AWSLambdaSubscriber };
@@ -1,4 +1,4 @@
1
- import "../Construct-XrijZFFh.mjs";
2
- import "../Subscriber-ikctpU3I.mjs";
3
- import { AWSLambdaHandler, AWSLambdaSubscriber } from "../AWSLambdaSubscriberAdaptor-DrFAvHOp.mjs";
1
+ import "../Construct-E8QPyHh4.mjs";
2
+ import "../Subscriber-BpTAXvbM.mjs";
3
+ import { AWSLambdaHandler, AWSLambdaSubscriber } from "../AWSLambdaSubscriberAdaptor-QuMFwltJ.mjs";
4
4
  export { AWSLambdaHandler, AWSLambdaSubscriber };
@@ -1,3 +1,3 @@
1
- import { AWSLambdaSubscriber } from "../AWSLambdaSubscriberAdaptor-BLfO612H.mjs";
1
+ import { AWSLambdaSubscriber } from "../AWSLambdaSubscriberAdaptor-BD3FwGUb.mjs";
2
2
 
3
3
  export { AWSLambdaSubscriber };
@@ -1,4 +1,4 @@
1
- require('../Construct-Ba5cMxib.cjs');
2
- const require_Subscriber = require('../Subscriber-BiHjVXtM.cjs');
1
+ require('../Construct-CPrCF8NK.cjs');
2
+ const require_Subscriber = require('../Subscriber-CL4iYm01.cjs');
3
3
 
4
4
  exports.Subscriber = require_Subscriber.Subscriber;
@@ -1,3 +1,3 @@
1
- import "../Construct-C4rPE67v.cjs";
2
- import { Subscriber, SubscriberContext, SubscriberHandler } from "../Subscriber-COYMSevD.cjs";
1
+ import "../Construct-Dl0l2d8d.cjs";
2
+ import { Subscriber, SubscriberContext, SubscriberHandler } from "../Subscriber-BBcIOkhW.cjs";
3
3
  export { Subscriber, SubscriberContext, SubscriberHandler };
@@ -1,3 +1,3 @@
1
- import "../Construct-XrijZFFh.mjs";
2
- import { Subscriber, SubscriberContext, SubscriberHandler } from "../Subscriber-ikctpU3I.mjs";
1
+ import "../Construct-E8QPyHh4.mjs";
2
+ import { Subscriber, SubscriberContext, SubscriberHandler } from "../Subscriber-BpTAXvbM.mjs";
3
3
  export { Subscriber, SubscriberContext, SubscriberHandler };
@@ -1,4 +1,4 @@
1
- import "../Construct-DdyGHuag.mjs";
2
- import { Subscriber } from "../Subscriber-BmPf9GFb.mjs";
1
+ import "../Construct-BNDLJJfD.mjs";
2
+ import { Subscriber } from "../Subscriber-CZ8Smwd2.mjs";
3
3
 
4
4
  export { Subscriber };
@@ -1,5 +1,5 @@
1
- require('../Construct-Ba5cMxib.cjs');
2
- require('../Subscriber-BiHjVXtM.cjs');
3
- const require_SubscriberBuilder = require('../SubscriberBuilder-Cp1C-xtT.cjs');
1
+ require('../Construct-CPrCF8NK.cjs');
2
+ require('../Subscriber-CL4iYm01.cjs');
3
+ const require_SubscriberBuilder = require('../SubscriberBuilder-D0OS3hd7.cjs');
4
4
 
5
5
  exports.SubscriberBuilder = require_SubscriberBuilder.SubscriberBuilder;
@@ -1,4 +1,4 @@
1
- import "../Construct-C4rPE67v.cjs";
2
- import "../Subscriber-COYMSevD.cjs";
3
- import { SubscriberBuilder } from "../SubscriberBuilder-ivHAGIVi.cjs";
1
+ import "../Construct-Dl0l2d8d.cjs";
2
+ import "../Subscriber-BBcIOkhW.cjs";
3
+ import { SubscriberBuilder } from "../SubscriberBuilder-D7IgufwB.cjs";
4
4
  export { SubscriberBuilder };
@@ -1,4 +1,4 @@
1
- import "../Construct-XrijZFFh.mjs";
2
- import "../Subscriber-ikctpU3I.mjs";
3
- import { SubscriberBuilder } from "../SubscriberBuilder-D_9zzllj.mjs";
1
+ import "../Construct-E8QPyHh4.mjs";
2
+ import "../Subscriber-BpTAXvbM.mjs";
3
+ import { SubscriberBuilder } from "../SubscriberBuilder-DOFBbWLt.mjs";
4
4
  export { SubscriberBuilder };
@@ -1,5 +1,5 @@
1
- import "../Construct-DdyGHuag.mjs";
2
- import "../Subscriber-BmPf9GFb.mjs";
3
- import { SubscriberBuilder } from "../SubscriberBuilder-DJPEeYDJ.mjs";
1
+ import "../Construct-BNDLJJfD.mjs";
2
+ import "../Subscriber-CZ8Smwd2.mjs";
3
+ import { SubscriberBuilder } from "../SubscriberBuilder-CxQg3TTm.mjs";
4
4
 
5
5
  export { SubscriberBuilder };
@@ -1,6 +1,6 @@
1
- require('../Construct-Ba5cMxib.cjs');
2
- const require_Subscriber = require('../Subscriber-BiHjVXtM.cjs');
3
- const require_SubscriberBuilder = require('../SubscriberBuilder-Cp1C-xtT.cjs');
1
+ require('../Construct-CPrCF8NK.cjs');
2
+ const require_Subscriber = require('../Subscriber-CL4iYm01.cjs');
3
+ const require_SubscriberBuilder = require('../SubscriberBuilder-D0OS3hd7.cjs');
4
4
 
5
5
  //#region src/subscribers/index.ts
6
6
  const s = new require_SubscriberBuilder.SubscriberBuilder();
@@ -1 +1 @@
1
- {"version":3,"file":"index.cjs","names":["SubscriberBuilder"],"sources":["../../src/subscribers/index.ts"],"sourcesContent":["import { SubscriberBuilder } from './SubscriberBuilder';\nexport { Subscriber } from './Subscriber';\nexport { SubscriberBuilder };\n\nexport const s = new SubscriberBuilder();\n"],"mappings":";;;;;AAIA,MAAa,IAAI,IAAIA"}
1
+ {"version":3,"file":"index.cjs","names":["SubscriberBuilder"],"sources":["../../src/subscribers/index.ts"],"sourcesContent":["import { SubscriberBuilder } from './SubscriberBuilder';\n\nexport { Subscriber } from './Subscriber';\nexport { SubscriberBuilder };\n\nexport const s = new SubscriberBuilder();\n"],"mappings":";;;;;AAKA,MAAa,IAAI,IAAIA"}
@@ -1,10 +1,10 @@
1
- import "../Construct-C4rPE67v.cjs";
2
- import { Subscriber } from "../Subscriber-COYMSevD.cjs";
3
- import { SubscriberBuilder } from "../SubscriberBuilder-ivHAGIVi.cjs";
4
- import * as _geekmidas_logger8 from "@geekmidas/logger";
1
+ import "../Construct-Dl0l2d8d.cjs";
2
+ import { Subscriber } from "../Subscriber-BBcIOkhW.cjs";
3
+ import { SubscriberBuilder } from "../SubscriberBuilder-D7IgufwB.cjs";
4
+ import * as _geekmidas_logger10 from "@geekmidas/logger";
5
5
 
6
6
  //#region src/subscribers/index.d.ts
7
- declare const s: SubscriberBuilder<[], _geekmidas_logger8.Logger, undefined, undefined, string, []>;
7
+ declare const s: SubscriberBuilder<[], _geekmidas_logger10.Logger, undefined, undefined, string, []>;
8
8
  //# sourceMappingURL=index.d.ts.map
9
9
 
10
10
  //#endregion
@@ -1 +1 @@
1
- {"version":3,"file":"index.d.cts","names":[],"sources":["../../src/subscribers/index.ts"],"sourcesContent":[],"mappings":";;;;;;cAIa,GAAC,sBAA0B,kBAAA,CAA1B,MAAA"}
1
+ {"version":3,"file":"index.d.cts","names":[],"sources":["../../src/subscribers/index.ts"],"sourcesContent":[],"mappings":";;;;;;cAKa,GAAC,sBAA0B,mBAAA,CAA1B,MAAA"}
@@ -1,10 +1,10 @@
1
- import "../Construct-XrijZFFh.mjs";
2
- import { Subscriber } from "../Subscriber-ikctpU3I.mjs";
3
- import { SubscriberBuilder } from "../SubscriberBuilder-D_9zzllj.mjs";
4
- import * as _geekmidas_logger10 from "@geekmidas/logger";
1
+ import "../Construct-E8QPyHh4.mjs";
2
+ import { Subscriber } from "../Subscriber-BpTAXvbM.mjs";
3
+ import { SubscriberBuilder } from "../SubscriberBuilder-DOFBbWLt.mjs";
4
+ import * as _geekmidas_logger8 from "@geekmidas/logger";
5
5
 
6
6
  //#region src/subscribers/index.d.ts
7
- declare const s: SubscriberBuilder<[], _geekmidas_logger10.Logger, undefined, undefined, string, []>;
7
+ declare const s: SubscriberBuilder<[], _geekmidas_logger8.Logger, undefined, undefined, string, []>;
8
8
  //# sourceMappingURL=index.d.ts.map
9
9
 
10
10
  //#endregion
@@ -1 +1 @@
1
- {"version":3,"file":"index.d.mts","names":[],"sources":["../../src/subscribers/index.ts"],"sourcesContent":[],"mappings":";;;;;;cAIa,GAAC,sBAA0B,mBAAA,CAA1B,MAAA"}
1
+ {"version":3,"file":"index.d.mts","names":[],"sources":["../../src/subscribers/index.ts"],"sourcesContent":[],"mappings":";;;;;;cAKa,GAAC,sBAA0B,kBAAA,CAA1B,MAAA"}
@@ -1,6 +1,6 @@
1
- import "../Construct-DdyGHuag.mjs";
2
- import { Subscriber } from "../Subscriber-BmPf9GFb.mjs";
3
- import { SubscriberBuilder } from "../SubscriberBuilder-DJPEeYDJ.mjs";
1
+ import "../Construct-BNDLJJfD.mjs";
2
+ import { Subscriber } from "../Subscriber-CZ8Smwd2.mjs";
3
+ import { SubscriberBuilder } from "../SubscriberBuilder-CxQg3TTm.mjs";
4
4
 
5
5
  //#region src/subscribers/index.ts
6
6
  const s = new SubscriberBuilder();
@@ -1 +1 @@
1
- {"version":3,"file":"index.mjs","names":[],"sources":["../../src/subscribers/index.ts"],"sourcesContent":["import { SubscriberBuilder } from './SubscriberBuilder';\nexport { Subscriber } from './Subscriber';\nexport { SubscriberBuilder };\n\nexport const s = new SubscriberBuilder();\n"],"mappings":";;;;;AAIA,MAAa,IAAI,IAAI"}
1
+ {"version":3,"file":"index.mjs","names":[],"sources":["../../src/subscribers/index.ts"],"sourcesContent":["import { SubscriberBuilder } from './SubscriberBuilder';\n\nexport { Subscriber } from './Subscriber';\nexport { SubscriberBuilder };\n\nexport const s = new SubscriberBuilder();\n"],"mappings":";;;;;AAKA,MAAa,IAAI,IAAI"}