@geekmidas/constructs 0.0.10 → 0.0.12
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/{AWSLambdaFunction-DhUb-Vs6.cjs → AWSLambdaFunction-DMxScuaw.cjs} +3 -3
- package/dist/{AWSLambdaFunction-DhUb-Vs6.cjs.map → AWSLambdaFunction-DMxScuaw.cjs.map} +1 -1
- package/dist/{AWSLambdaFunction-D5V3YVqv.d.cts → AWSLambdaFunction-DSB2oaFG.d.mts} +5 -5
- package/dist/{AWSLambdaFunction-D_V-ZQmS.mjs → AWSLambdaFunction-cL8A169J.mjs} +3 -3
- package/dist/{AWSLambdaFunction-D_V-ZQmS.mjs.map → AWSLambdaFunction-cL8A169J.mjs.map} +1 -1
- package/dist/{AWSLambdaFunction-DvZcnL8a.d.mts → AWSLambdaFunction-t6q2o8EL.d.cts} +5 -5
- package/dist/{AWSLambdaSubscriberAdaptor-J_pSz6pu.d.cts → AWSLambdaSubscriberAdaptor-Cknp_nn1.d.cts} +2 -2
- package/dist/{AWSLambdaSubscriberAdaptor-G8y3YkWj.mjs → AWSLambdaSubscriberAdaptor-CyFh7MN8.mjs} +1 -1
- package/dist/{AWSLambdaSubscriberAdaptor-G8y3YkWj.mjs.map → AWSLambdaSubscriberAdaptor-CyFh7MN8.mjs.map} +1 -1
- package/dist/{AWSLambdaSubscriberAdaptor-D6kjKjSf.d.mts → AWSLambdaSubscriberAdaptor-DpHzp-AM.d.mts} +2 -2
- package/dist/{AWSLambdaSubscriberAdaptor-CmPZ10JF.cjs → AWSLambdaSubscriberAdaptor-Dum5bkw3.cjs} +1 -1
- package/dist/{AWSLambdaSubscriberAdaptor-CmPZ10JF.cjs.map → AWSLambdaSubscriberAdaptor-Dum5bkw3.cjs.map} +1 -1
- package/dist/{AmazonApiGatewayEndpointAdaptor-DHcUetbw.mjs → AmazonApiGatewayEndpointAdaptor-CIEhW1TQ.mjs} +47 -16
- package/dist/AmazonApiGatewayEndpointAdaptor-CIEhW1TQ.mjs.map +1 -0
- package/dist/{AmazonApiGatewayEndpointAdaptor-BnNd8tCz.d.cts → AmazonApiGatewayEndpointAdaptor-CwItKPz2.d.cts} +6 -5
- package/dist/{AmazonApiGatewayEndpointAdaptor-DHBF_5jn.cjs → AmazonApiGatewayEndpointAdaptor-H8YvtfQm.cjs} +47 -16
- package/dist/AmazonApiGatewayEndpointAdaptor-H8YvtfQm.cjs.map +1 -0
- package/dist/{AmazonApiGatewayEndpointAdaptor-B7MKo8h3.d.mts → AmazonApiGatewayEndpointAdaptor-eDQgPNLH.d.mts} +7 -6
- package/dist/{AmazonApiGatewayV1EndpointAdaptor-CmUxFoPx.cjs → AmazonApiGatewayV1EndpointAdaptor-0n71d3gq.cjs} +3 -3
- package/dist/{AmazonApiGatewayV1EndpointAdaptor-CmUxFoPx.cjs.map → AmazonApiGatewayV1EndpointAdaptor-0n71d3gq.cjs.map} +1 -1
- package/dist/{AmazonApiGatewayV1EndpointAdaptor-CsV6zyvn.mjs → AmazonApiGatewayV1EndpointAdaptor-B36zXLJ7.mjs} +3 -3
- package/dist/{AmazonApiGatewayV1EndpointAdaptor-CsV6zyvn.mjs.map → AmazonApiGatewayV1EndpointAdaptor-B36zXLJ7.mjs.map} +1 -1
- package/dist/{AmazonApiGatewayV1EndpointAdaptor-gO9OxdZl.d.mts → AmazonApiGatewayV1EndpointAdaptor-CnGVpA38.d.mts} +4 -4
- package/dist/{AmazonApiGatewayV1EndpointAdaptor-BC-a2tt3.d.cts → AmazonApiGatewayV1EndpointAdaptor-DaCvUL6y.d.cts} +3 -3
- package/dist/{AmazonApiGatewayV2EndpointAdaptor-D1uz4wNg.d.mts → AmazonApiGatewayV2EndpointAdaptor-BELz2ijs.d.mts} +4 -4
- package/dist/{AmazonApiGatewayV2EndpointAdaptor-_bFcbEa-.mjs → AmazonApiGatewayV2EndpointAdaptor-CE3wZEb8.mjs} +3 -3
- package/dist/{AmazonApiGatewayV2EndpointAdaptor-_bFcbEa-.mjs.map → AmazonApiGatewayV2EndpointAdaptor-CE3wZEb8.mjs.map} +1 -1
- package/dist/{AmazonApiGatewayV2EndpointAdaptor-zNFUPxSY.d.cts → AmazonApiGatewayV2EndpointAdaptor-DA1PH0nc.d.cts} +3 -3
- package/dist/{AmazonApiGatewayV2EndpointAdaptor-cZbaqiUi.cjs → AmazonApiGatewayV2EndpointAdaptor-DtU3Cb8F.cjs} +3 -3
- package/dist/{AmazonApiGatewayV2EndpointAdaptor-cZbaqiUi.cjs.map → AmazonApiGatewayV2EndpointAdaptor-DtU3Cb8F.cjs.map} +1 -1
- package/dist/{Authorizer-Bbk9ziuG.d.mts → Authorizer-BRCVPz_O.d.mts} +1 -1
- package/dist/{Authorizer-iXsSB600.d.cts → Authorizer-DG54w1m2.d.cts} +1 -1
- package/dist/{BaseFunctionBuilder-BmsbV0BU.cjs → BaseFunctionBuilder-B8rT07QR.cjs} +7 -2
- package/dist/BaseFunctionBuilder-B8rT07QR.cjs.map +1 -0
- package/dist/{BaseFunctionBuilder-DCUtCdVL.mjs → BaseFunctionBuilder-CT7p10K1.mjs} +7 -2
- package/dist/BaseFunctionBuilder-CT7p10K1.mjs.map +1 -0
- package/dist/{BaseFunctionBuilder-C4ZJPkIZ.d.cts → BaseFunctionBuilder-Cf0op65o.d.cts} +7 -4
- package/dist/{BaseFunctionBuilder-CxBX0arJ.d.mts → BaseFunctionBuilder-DilipY1y.d.mts} +7 -4
- package/dist/{Construct-VEI7M3fs.cjs → Construct-BYSPikVm.cjs} +28 -10
- package/dist/Construct-BYSPikVm.cjs.map +1 -0
- package/dist/{Construct-C3hsQBy4.mjs → Construct-LWeB1rSQ.mjs} +28 -10
- package/dist/Construct-LWeB1rSQ.mjs.map +1 -0
- package/dist/{Construct-ci5u8Xnu.d.cts → Construct-ZPqE0vhn.d.mts} +9 -4
- package/dist/{Construct-zhmcmIdY.d.mts → Construct-dI_rgdSp.d.cts} +9 -4
- package/dist/Construct.cjs +1 -1
- package/dist/Construct.d.cts +1 -1
- package/dist/Construct.d.mts +1 -1
- package/dist/Construct.mjs +1 -1
- package/dist/{Cron-ByNYsQDH.d.mts → Cron-6lOgKqSA.d.cts} +3 -3
- package/dist/{Cron-DkPL-Fms.d.cts → Cron-BH_07atD.d.mts} +3 -3
- package/dist/{Cron-Dfgr8F9d.cjs → Cron-Bnd-2pgE.cjs} +3 -3
- package/dist/{Cron-Dfgr8F9d.cjs.map → Cron-Bnd-2pgE.cjs.map} +1 -1
- package/dist/{Cron-wP6Ob48_.mjs → Cron-DNRjf2cp.mjs} +3 -3
- package/dist/{Cron-wP6Ob48_.mjs.map → Cron-DNRjf2cp.mjs.map} +1 -1
- package/dist/{CronBuilder-BknVTWLE.d.mts → CronBuilder-5oK2AL2n.d.cts} +5 -5
- package/dist/{CronBuilder-BqTTozUi.d.cts → CronBuilder-D2b4zY4l.d.mts} +5 -5
- package/dist/{CronBuilder-C27c5oqh.mjs → CronBuilder-DdR2TuQa.mjs} +5 -5
- package/dist/{CronBuilder-C27c5oqh.mjs.map → CronBuilder-DdR2TuQa.mjs.map} +1 -1
- package/dist/{CronBuilder-W1ZqCJ7m.cjs → CronBuilder-dtw4ZyH6.cjs} +5 -5
- package/dist/{CronBuilder-W1ZqCJ7m.cjs.map → CronBuilder-dtw4ZyH6.cjs.map} +1 -1
- package/dist/{Endpoint-C98BwZjA.mjs → Endpoint-B69TqESg.mjs} +15 -8
- package/dist/Endpoint-B69TqESg.mjs.map +1 -0
- package/dist/{Endpoint-BjpQmTek.cjs → Endpoint-B9PryZES.cjs} +15 -8
- package/dist/Endpoint-B9PryZES.cjs.map +1 -0
- package/dist/{Endpoint-zHPjZ35J.d.cts → Endpoint-Cs-MsYlY.d.cts} +80 -12
- package/dist/{Endpoint-C16whGI-.d.mts → Endpoint-DuZlyjd4.d.mts} +80 -12
- package/dist/EndpointBuilder-BrB-K1jO.d.mts +86 -0
- package/dist/EndpointBuilder-C-PHInEW.d.cts +86 -0
- package/dist/{EndpointBuilder-CCUx4vep.mjs → EndpointBuilder-DnVL-EU_.mjs} +47 -5
- package/dist/EndpointBuilder-DnVL-EU_.mjs.map +1 -0
- package/dist/{EndpointBuilder-D2Zu8i9b.cjs → EndpointBuilder-DofwCnWJ.cjs} +47 -5
- package/dist/EndpointBuilder-DofwCnWJ.cjs.map +1 -0
- package/dist/{EndpointFactory-KkyIOE62.d.cts → EndpointFactory-6zNpVSYp.d.mts} +12 -11
- package/dist/{EndpointFactory-C4YhgXOc.cjs → EndpointFactory-Ba9mx9MU.cjs} +3 -3
- package/dist/{EndpointFactory-C4YhgXOc.cjs.map → EndpointFactory-Ba9mx9MU.cjs.map} +1 -1
- package/dist/{EndpointFactory-VnSAdwdv.d.mts → EndpointFactory-e5WYVR6t.d.cts} +12 -11
- package/dist/{EndpointFactory-RAb2zcw0.mjs → EndpointFactory-pPaIGFHV.mjs} +3 -3
- package/dist/{EndpointFactory-RAb2zcw0.mjs.map → EndpointFactory-pPaIGFHV.mjs.map} +1 -1
- package/dist/{Function-BbLYIKLL.d.cts → Function-6EWabl_X.d.cts} +5 -4
- package/dist/{Function-1Fh6Tdkg.cjs → Function-CO-s2pB8.cjs} +6 -6
- package/dist/Function-CO-s2pB8.cjs.map +1 -0
- package/dist/{Function-D-QEfH7k.mjs → Function-COnc-tWM.mjs} +6 -6
- package/dist/Function-COnc-tWM.mjs.map +1 -0
- package/dist/{Function-DFRZZCC-.d.mts → Function-G3JPHMaY.d.mts} +5 -4
- package/dist/{FunctionBuilder-DdGjpiFT.d.cts → FunctionBuilder-B3fpp3hA.d.cts} +13 -11
- package/dist/{FunctionBuilder-DuktGyZc.d.mts → FunctionBuilder-ByaB_LQ4.d.mts} +13 -11
- package/dist/{FunctionBuilder-Bxyr1Pf9.mjs → FunctionBuilder-CMhLQ4dt.mjs} +11 -6
- package/dist/FunctionBuilder-CMhLQ4dt.mjs.map +1 -0
- package/dist/{FunctionBuilder-DcEFYgbn.cjs → FunctionBuilder-_hMwZUof.cjs} +11 -6
- package/dist/FunctionBuilder-_hMwZUof.cjs.map +1 -0
- package/dist/{FunctionExecutionWrapper-CRYi047B.d.cts → FunctionExecutionWrapper-Ci-ookJG.d.cts} +2 -2
- package/dist/{FunctionExecutionWrapper-DaR-dSLw.d.mts → FunctionExecutionWrapper-DHFMLrOl.d.mts} +2 -2
- package/dist/{FunctionExecutionWrapper-BL6PE6Dv.mjs → FunctionExecutionWrapper-i9v5L3Av.mjs} +2 -2
- package/dist/{FunctionExecutionWrapper-BL6PE6Dv.mjs.map → FunctionExecutionWrapper-i9v5L3Av.mjs.map} +1 -1
- package/dist/{FunctionExecutionWrapper-Ee-CE8Fz.cjs → FunctionExecutionWrapper-sxJNTpuc.cjs} +2 -2
- package/dist/{FunctionExecutionWrapper-Ee-CE8Fz.cjs.map → FunctionExecutionWrapper-sxJNTpuc.cjs.map} +1 -1
- package/dist/{HonoEndpointAdaptor-CY1mXTe6.d.mts → HonoEndpointAdaptor-BElil8O5.d.mts} +9 -8
- package/dist/{HonoEndpointAdaptor-DcvZdYzx.cjs → HonoEndpointAdaptor-Cw2if5cG.cjs} +51 -20
- package/dist/HonoEndpointAdaptor-Cw2if5cG.cjs.map +1 -0
- package/dist/{HonoEndpointAdaptor-CyVPl4w2.mjs → HonoEndpointAdaptor-DAfnTFVS.mjs} +51 -20
- package/dist/HonoEndpointAdaptor-DAfnTFVS.mjs.map +1 -0
- package/dist/{HonoEndpointAdaptor-CYvrXBe-.d.cts → HonoEndpointAdaptor-DSHl8ZCY.d.cts} +8 -7
- package/dist/{Subscriber-Cy28j8MS.d.cts → Subscriber-BhzqUzs-.d.cts} +2 -2
- package/dist/{Subscriber-CTczVFsF.mjs → Subscriber-CGb8LjZa.mjs} +2 -2
- package/dist/{Subscriber-CTczVFsF.mjs.map → Subscriber-CGb8LjZa.mjs.map} +1 -1
- package/dist/{Subscriber-BL30GpWp.cjs → Subscriber-D-FPWts6.cjs} +2 -2
- package/dist/{Subscriber-BL30GpWp.cjs.map → Subscriber-D-FPWts6.cjs.map} +1 -1
- package/dist/{Subscriber-g3IWM1_d.d.mts → Subscriber-s6yfjeOc.d.mts} +2 -2
- package/dist/{SubscriberBuilder-BzK8kc2a.d.cts → SubscriberBuilder-BCVkp-ga.d.cts} +2 -2
- package/dist/{SubscriberBuilder-CekL3BoP.mjs → SubscriberBuilder-BcAspHv9.mjs} +2 -2
- package/dist/{SubscriberBuilder-CekL3BoP.mjs.map → SubscriberBuilder-BcAspHv9.mjs.map} +1 -1
- package/dist/{SubscriberBuilder-D1hojYLa.cjs → SubscriberBuilder-BfE2cL1q.cjs} +2 -2
- package/dist/{SubscriberBuilder-D1hojYLa.cjs.map → SubscriberBuilder-BfE2cL1q.cjs.map} +1 -1
- package/dist/{SubscriberBuilder-YjQ7qIpQ.d.mts → SubscriberBuilder-aCua5_wA.d.mts} +2 -2
- package/dist/{TestEndpointAdaptor-C-ahwGW6.cjs → TestEndpointAdaptor-Bn1WRFph.cjs} +47 -18
- package/dist/TestEndpointAdaptor-Bn1WRFph.cjs.map +1 -0
- package/dist/{TestEndpointAdaptor-DGWuai69.d.mts → TestEndpointAdaptor-DnlAA_rm.d.mts} +7 -6
- package/dist/{TestEndpointAdaptor-B4z9G5Ap.mjs → TestEndpointAdaptor-DubQOJk_.mjs} +47 -18
- package/dist/TestEndpointAdaptor-DubQOJk_.mjs.map +1 -0
- package/dist/{TestEndpointAdaptor-uXliWYjS.d.cts → TestEndpointAdaptor-o-xtSyQ3.d.cts} +6 -5
- package/dist/adaptors/aws.cjs +14 -13
- package/dist/adaptors/aws.d.cts +14 -14
- package/dist/adaptors/aws.d.mts +15 -15
- package/dist/adaptors/aws.mjs +14 -13
- package/dist/adaptors/hono.cjs +10 -9
- package/dist/adaptors/hono.d.cts +8 -8
- package/dist/adaptors/hono.d.mts +9 -9
- package/dist/adaptors/hono.mjs +10 -9
- package/dist/adaptors/testing.cjs +8 -7
- package/dist/adaptors/testing.d.cts +8 -8
- package/dist/adaptors/testing.d.mts +9 -9
- package/dist/adaptors/testing.mjs +8 -7
- package/dist/crons/Cron.cjs +6 -6
- package/dist/crons/Cron.d.cts +6 -6
- package/dist/crons/Cron.d.mts +6 -6
- package/dist/crons/Cron.mjs +6 -6
- package/dist/crons/CronBuilder.cjs +7 -7
- package/dist/crons/CronBuilder.d.cts +7 -7
- package/dist/crons/CronBuilder.d.mts +7 -7
- package/dist/crons/CronBuilder.mjs +7 -7
- package/dist/crons/index.cjs +7 -7
- package/dist/crons/index.d.cts +11 -11
- package/dist/crons/index.d.mts +11 -11
- package/dist/crons/index.mjs +7 -7
- package/dist/endpoints/AmazonApiGatewayEndpointAdaptor.cjs +8 -7
- package/dist/endpoints/AmazonApiGatewayEndpointAdaptor.d.cts +8 -8
- package/dist/endpoints/AmazonApiGatewayEndpointAdaptor.d.mts +9 -9
- package/dist/endpoints/AmazonApiGatewayEndpointAdaptor.mjs +8 -7
- package/dist/endpoints/AmazonApiGatewayV1EndpointAdaptor.cjs +10 -9
- package/dist/endpoints/AmazonApiGatewayV1EndpointAdaptor.d.cts +9 -9
- package/dist/endpoints/AmazonApiGatewayV1EndpointAdaptor.d.mts +10 -10
- package/dist/endpoints/AmazonApiGatewayV1EndpointAdaptor.mjs +10 -9
- package/dist/endpoints/AmazonApiGatewayV2EndpointAdaptor.cjs +10 -9
- package/dist/endpoints/AmazonApiGatewayV2EndpointAdaptor.d.cts +9 -9
- package/dist/endpoints/AmazonApiGatewayV2EndpointAdaptor.d.mts +10 -10
- package/dist/endpoints/AmazonApiGatewayV2EndpointAdaptor.mjs +10 -9
- package/dist/endpoints/Authorizer.d.cts +1 -1
- package/dist/endpoints/Authorizer.d.mts +1 -1
- package/dist/endpoints/Endpoint.cjs +6 -6
- package/dist/endpoints/Endpoint.d.cts +7 -7
- package/dist/endpoints/Endpoint.d.mts +8 -8
- package/dist/endpoints/Endpoint.mjs +6 -6
- package/dist/endpoints/EndpointBuilder.cjs +7 -7
- package/dist/endpoints/EndpointBuilder.d.cts +8 -8
- package/dist/endpoints/EndpointBuilder.d.mts +9 -9
- package/dist/endpoints/EndpointBuilder.mjs +7 -7
- package/dist/endpoints/EndpointFactory.cjs +8 -8
- package/dist/endpoints/EndpointFactory.d.cts +9 -9
- package/dist/endpoints/EndpointFactory.d.mts +10 -10
- package/dist/endpoints/EndpointFactory.mjs +8 -8
- package/dist/endpoints/HonoEndpointAdaptor.cjs +10 -9
- package/dist/endpoints/HonoEndpointAdaptor.d.cts +8 -8
- package/dist/endpoints/HonoEndpointAdaptor.d.mts +9 -9
- package/dist/endpoints/HonoEndpointAdaptor.mjs +10 -9
- package/dist/endpoints/TestEndpointAdaptor.cjs +8 -7
- package/dist/endpoints/TestEndpointAdaptor.d.cts +8 -8
- package/dist/endpoints/TestEndpointAdaptor.d.mts +9 -9
- package/dist/endpoints/TestEndpointAdaptor.mjs +8 -7
- package/dist/endpoints/audit.cjs +0 -0
- package/dist/endpoints/audit.d.cts +9 -0
- package/dist/endpoints/audit.d.mts +9 -0
- package/dist/endpoints/audit.mjs +0 -0
- package/dist/endpoints/helpers.cjs +7 -7
- package/dist/endpoints/helpers.d.cts +7 -7
- package/dist/endpoints/helpers.d.mts +8 -8
- package/dist/endpoints/helpers.mjs +7 -7
- package/dist/endpoints/index.cjs +8 -8
- package/dist/endpoints/index.cjs.map +1 -1
- package/dist/endpoints/index.d.cts +12 -12
- package/dist/endpoints/index.d.mts +13 -13
- package/dist/endpoints/index.mjs +8 -8
- package/dist/endpoints/index.mjs.map +1 -1
- package/dist/endpoints/parseHonoQuery.cjs +1 -1
- package/dist/endpoints/parseHonoQuery.mjs +1 -1
- package/dist/endpoints/parseQueryParams.cjs +1 -1
- package/dist/endpoints/parseQueryParams.mjs +1 -1
- package/dist/endpoints/processAudits.cjs +5 -0
- package/dist/endpoints/processAudits.d.cts +74 -0
- package/dist/endpoints/processAudits.d.mts +74 -0
- package/dist/endpoints/processAudits.mjs +3 -0
- package/dist/functions/AWSLambdaFunction.cjs +6 -6
- package/dist/functions/AWSLambdaFunction.d.cts +4 -4
- package/dist/functions/AWSLambdaFunction.d.mts +4 -4
- package/dist/functions/AWSLambdaFunction.mjs +6 -6
- package/dist/functions/BaseFunctionBuilder.cjs +2 -2
- package/dist/functions/BaseFunctionBuilder.d.cts +2 -2
- package/dist/functions/BaseFunctionBuilder.d.mts +2 -2
- package/dist/functions/BaseFunctionBuilder.mjs +2 -2
- package/dist/functions/Function.cjs +2 -2
- package/dist/functions/Function.d.cts +2 -2
- package/dist/functions/Function.d.mts +2 -2
- package/dist/functions/Function.mjs +2 -2
- package/dist/functions/FunctionBuilder.cjs +4 -4
- package/dist/functions/FunctionBuilder.d.cts +4 -4
- package/dist/functions/FunctionBuilder.d.mts +4 -4
- package/dist/functions/FunctionBuilder.mjs +4 -4
- package/dist/functions/FunctionExecutionWrapper.cjs +5 -5
- package/dist/functions/FunctionExecutionWrapper.d.cts +3 -3
- package/dist/functions/FunctionExecutionWrapper.d.mts +3 -3
- package/dist/functions/FunctionExecutionWrapper.mjs +5 -5
- package/dist/functions/TestFunctionAdaptor.cjs +4 -4
- package/dist/functions/TestFunctionAdaptor.d.cts +2 -2
- package/dist/functions/TestFunctionAdaptor.d.mts +2 -2
- package/dist/functions/TestFunctionAdaptor.mjs +4 -4
- package/dist/functions/index.cjs +5 -5
- package/dist/functions/index.d.cts +5 -5
- package/dist/functions/index.d.mts +5 -5
- package/dist/functions/index.mjs +5 -5
- package/dist/functions-BYqZAob8.mjs +8 -0
- package/dist/{functions-DD-00sWF.mjs.map → functions-BYqZAob8.mjs.map} +1 -1
- package/dist/{functions-BtgBiuC_.cjs → functions-D03lqK-r.cjs} +2 -2
- package/dist/{functions-BtgBiuC_.cjs.map → functions-D03lqK-r.cjs.map} +1 -1
- package/dist/{helpers-QM_FSjPY.cjs → helpers-BApRyhly.cjs} +2 -2
- package/dist/{helpers-QM_FSjPY.cjs.map → helpers-BApRyhly.cjs.map} +1 -1
- package/dist/{helpers-DtPeOhUV.mjs → helpers-BPDogwac.mjs} +2 -2
- package/dist/{helpers-DtPeOhUV.mjs.map → helpers-BPDogwac.mjs.map} +1 -1
- package/dist/index-CUg_hSq-.d.cts +9 -0
- package/dist/index-D-a7e2gv.d.mts +9 -0
- package/dist/index.cjs +1 -1
- package/dist/index.d.cts +1 -1
- package/dist/index.d.mts +2 -2
- package/dist/index.mjs +1 -1
- package/dist/{parseHonoQuery-DopC24vB.cjs → parseHonoQuery-CT8Cvin-.cjs} +1 -1
- package/dist/{parseHonoQuery-DopC24vB.cjs.map → parseHonoQuery-CT8Cvin-.cjs.map} +1 -1
- package/dist/{parseHonoQuery-znDKBhdE.mjs → parseHonoQuery-CwFKw2ua.mjs} +1 -1
- package/dist/{parseHonoQuery-znDKBhdE.mjs.map → parseHonoQuery-CwFKw2ua.mjs.map} +1 -1
- package/dist/{parseQueryParams-BJaRh3OB.mjs → parseQueryParams-CHINupbZ.mjs} +1 -1
- package/dist/{parseQueryParams-BJaRh3OB.mjs.map → parseQueryParams-CHINupbZ.mjs.map} +1 -1
- package/dist/{parseQueryParams-BzPop4I1.cjs → parseQueryParams-CwvXXwkW.cjs} +1 -1
- package/dist/{parseQueryParams-BzPop4I1.cjs.map → parseQueryParams-CwvXXwkW.cjs.map} +1 -1
- package/dist/processAudits-BFokHhCO.cjs +174 -0
- package/dist/processAudits-BFokHhCO.cjs.map +1 -0
- package/dist/processAudits-DfcB-X-4.mjs +156 -0
- package/dist/processAudits-DfcB-X-4.mjs.map +1 -0
- package/dist/publisher.d.cts +1 -1
- package/dist/publisher.d.mts +1 -1
- package/dist/subscribers/AWSLambdaSubscriberAdaptor.cjs +1 -1
- package/dist/subscribers/AWSLambdaSubscriberAdaptor.d.cts +3 -3
- package/dist/subscribers/AWSLambdaSubscriberAdaptor.d.mts +3 -3
- package/dist/subscribers/AWSLambdaSubscriberAdaptor.mjs +1 -1
- package/dist/subscribers/Subscriber.cjs +2 -2
- package/dist/subscribers/Subscriber.d.cts +2 -2
- package/dist/subscribers/Subscriber.d.mts +2 -2
- package/dist/subscribers/Subscriber.mjs +2 -2
- package/dist/subscribers/SubscriberBuilder.cjs +3 -3
- package/dist/subscribers/SubscriberBuilder.d.cts +3 -3
- package/dist/subscribers/SubscriberBuilder.d.mts +3 -3
- package/dist/subscribers/SubscriberBuilder.mjs +3 -3
- package/dist/subscribers/index.cjs +3 -3
- package/dist/subscribers/index.d.cts +5 -5
- package/dist/subscribers/index.d.mts +5 -5
- package/dist/subscribers/index.mjs +3 -3
- package/dist/{types-CVq20-fE.d.mts → types-DKf0juBf.d.mts} +1 -1
- package/dist/types.d.mts +1 -1
- package/package.json +14 -10
- package/src/Construct.ts +31 -5
- package/src/endpoints/AmazonApiGatewayEndpointAdaptor.ts +88 -19
- package/src/endpoints/Endpoint.ts +66 -13
- package/src/endpoints/EndpointBuilder.ts +183 -19
- package/src/endpoints/HonoEndpointAdaptor.ts +114 -33
- package/src/endpoints/TestEndpointAdaptor.ts +96 -21
- package/src/endpoints/__tests__/AmazonApiGatewayV1EndpointAdaptor.events.spec.ts +8 -0
- package/src/endpoints/__tests__/AmazonApiGatewayV1EndpointAdaptor.spec.ts +29 -0
- package/src/endpoints/__tests__/AmazonApiGatewayV2EndpointAdaptor.audits.spec.ts +626 -0
- package/src/endpoints/__tests__/AmazonApiGatewayV2EndpointAdaptor.events.spec.ts +9 -0
- package/src/endpoints/__tests__/AmazonApiGatewayV2EndpointAdaptor.kysely-audit.integration.spec.ts +718 -0
- package/src/endpoints/__tests__/Endpoint.spec.ts +19 -0
- package/src/endpoints/__tests__/EndpointBuilder.audit.spec.ts +424 -0
- package/src/endpoints/__tests__/HonoEndpointAdaptor.audit-transactions.spec.ts +685 -0
- package/src/endpoints/__tests__/HonoEndpointAdaptor.audits.spec.ts +632 -0
- package/src/endpoints/__tests__/HonoEndpointAdaptor.events.spec.ts +8 -0
- package/src/endpoints/__tests__/HonoEndpointAdaptor.kysely-audit.integration.spec.ts +678 -0
- package/src/endpoints/__tests__/HonoEndpointAdaptor.openapi.spec.ts +4 -0
- package/src/endpoints/__tests__/HonoEndpointAdaptor.spec.ts +25 -0
- package/src/endpoints/audit.ts +87 -0
- package/src/endpoints/index.ts +1 -0
- package/src/endpoints/processAudits.ts +339 -0
- package/src/functions/BaseFunctionBuilder.ts +50 -7
- package/src/functions/Function.ts +13 -9
- package/src/functions/FunctionBuilder.ts +78 -14
- package/src/functions/__tests__/FunctionBuilder.state-isolation.spec.ts +43 -2
- package/dist/AmazonApiGatewayEndpointAdaptor-DHBF_5jn.cjs.map +0 -1
- package/dist/AmazonApiGatewayEndpointAdaptor-DHcUetbw.mjs.map +0 -1
- package/dist/BaseFunctionBuilder-BmsbV0BU.cjs.map +0 -1
- package/dist/BaseFunctionBuilder-DCUtCdVL.mjs.map +0 -1
- package/dist/Construct-C3hsQBy4.mjs.map +0 -1
- package/dist/Construct-VEI7M3fs.cjs.map +0 -1
- package/dist/Endpoint-BjpQmTek.cjs.map +0 -1
- package/dist/Endpoint-C98BwZjA.mjs.map +0 -1
- package/dist/EndpointBuilder-B5l7zQU1.d.cts +0 -55
- package/dist/EndpointBuilder-BzqR0xvt.d.mts +0 -55
- package/dist/EndpointBuilder-CCUx4vep.mjs.map +0 -1
- package/dist/EndpointBuilder-D2Zu8i9b.cjs.map +0 -1
- package/dist/Function-1Fh6Tdkg.cjs.map +0 -1
- package/dist/Function-D-QEfH7k.mjs.map +0 -1
- package/dist/FunctionBuilder-Bxyr1Pf9.mjs.map +0 -1
- package/dist/FunctionBuilder-DcEFYgbn.cjs.map +0 -1
- package/dist/HonoEndpointAdaptor-CyVPl4w2.mjs.map +0 -1
- package/dist/HonoEndpointAdaptor-DcvZdYzx.cjs.map +0 -1
- package/dist/TestEndpointAdaptor-B4z9G5Ap.mjs.map +0 -1
- package/dist/TestEndpointAdaptor-C-ahwGW6.cjs.map +0 -1
- package/dist/functions-DD-00sWF.mjs +0 -8
- package/dist/index-CcmV3PKF.d.cts +0 -9
- package/dist/index-DQt3pQtF.d.mts +0 -9
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"parseHonoQuery-
|
|
1
|
+
{"version":3,"file":"parseHonoQuery-CwFKw2ua.mjs","names":["c: Context","result: Record<string, any>"],"sources":["../src/endpoints/parseHonoQuery.ts"],"sourcesContent":["import type { Context } from 'hono';\n\n/**\n * Parse Hono query parameters to handle arrays and nested objects\n * Hono provides c.req.queries() for arrays, but we need to handle dot notation for objects\n */\nexport function parseHonoQuery(c: Context): Record<string, any> {\n const allParams = c.req.query();\n const result: Record<string, any> = {};\n\n // First, handle all query parameters\n for (const [key, value] of Object.entries(allParams)) {\n if (key.includes('.')) {\n // Handle dot notation for objects\n const parts = key.split('.');\n let current = result;\n\n // Navigate/create the nested structure\n for (let i = 0; i < parts.length - 1; i++) {\n const part = parts[i];\n if (\n !current[part] ||\n typeof current[part] !== 'object' ||\n Array.isArray(current[part])\n ) {\n current[part] = {};\n }\n current = current[part];\n }\n\n // Set the final value, checking for arrays in nested keys\n const lastPart = parts[parts.length - 1];\n const multipleValues = c.req.queries(key);\n if (multipleValues && multipleValues.length > 1) {\n current[lastPart] = multipleValues;\n } else {\n current[lastPart] = value;\n }\n } else {\n // For regular keys, check if there are multiple values\n const multipleValues = c.req.queries(key);\n if (multipleValues && multipleValues.length > 1) {\n result[key] = multipleValues;\n } else {\n result[key] = value;\n }\n }\n }\n\n return result;\n}\n"],"mappings":";;;;;AAMA,SAAgB,eAAeA,GAAiC;CAC9D,MAAM,YAAY,EAAE,IAAI,OAAO;CAC/B,MAAMC,SAA8B,CAAE;AAGtC,MAAK,MAAM,CAAC,KAAK,MAAM,IAAI,OAAO,QAAQ,UAAU,CAClD,KAAI,IAAI,SAAS,IAAI,EAAE;EAErB,MAAM,QAAQ,IAAI,MAAM,IAAI;EAC5B,IAAI,UAAU;AAGd,OAAK,IAAI,IAAI,GAAG,IAAI,MAAM,SAAS,GAAG,KAAK;GACzC,MAAM,OAAO,MAAM;AACnB,QACG,QAAQ,gBACF,QAAQ,UAAU,YACzB,MAAM,QAAQ,QAAQ,MAAM,CAE5B,SAAQ,QAAQ,CAAE;AAEpB,aAAU,QAAQ;EACnB;EAGD,MAAM,WAAW,MAAM,MAAM,SAAS;EACtC,MAAM,iBAAiB,EAAE,IAAI,QAAQ,IAAI;AACzC,MAAI,kBAAkB,eAAe,SAAS,EAC5C,SAAQ,YAAY;MAEpB,SAAQ,YAAY;CAEvB,OAAM;EAEL,MAAM,iBAAiB,EAAE,IAAI,QAAQ,IAAI;AACzC,MAAI,kBAAkB,eAAe,SAAS,EAC5C,QAAO,OAAO;MAEd,QAAO,OAAO;CAEjB;AAGH,QAAO;AACR"}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"parseQueryParams-
|
|
1
|
+
{"version":3,"file":"parseQueryParams-CHINupbZ.mjs","names":["queryParams: Record<string, string | string[] | undefined> | null","result: Record<string, any>"],"sources":["../src/endpoints/parseQueryParams.ts"],"sourcesContent":["/**\n * Parse query parameters from a flat object into a nested structure\n * Handles arrays (multiple values with same key) and objects (dot notation)\n *\n * @example\n * parseQueryParams({ 'tags': ['a', 'b'], 'filter.name': 'john' })\n * // Returns: { tags: ['a', 'b'], filter: { name: 'john' } }\n */\nexport function parseQueryParams(\n queryParams: Record<string, string | string[] | undefined> | null,\n): Record<string, any> {\n if (!queryParams) {\n return {};\n }\n\n const result: Record<string, any> = {};\n\n for (const [key, value] of Object.entries(queryParams)) {\n if (value === undefined) {\n continue;\n }\n\n // Check if the key contains dot notation\n if (key.includes('.')) {\n const parts = key.split('.');\n let current = result;\n\n // Navigate/create the nested structure\n for (let i = 0; i < parts.length - 1; i++) {\n const part = parts[i];\n if (\n !current[part] ||\n typeof current[part] !== 'object' ||\n Array.isArray(current[part])\n ) {\n current[part] = {};\n }\n current = current[part];\n }\n\n // Set the final value\n const lastPart = parts[parts.length - 1];\n current[lastPart] = value;\n } else {\n // Simple key, just assign the value\n result[key] = value;\n }\n }\n\n return result;\n}\n"],"mappings":";;;;;;;;;AAQA,SAAgB,iBACdA,aACqB;AACrB,MAAK,YACH,QAAO,CAAE;CAGX,MAAMC,SAA8B,CAAE;AAEtC,MAAK,MAAM,CAAC,KAAK,MAAM,IAAI,OAAO,QAAQ,YAAY,EAAE;AACtD,MAAI,iBACF;AAIF,MAAI,IAAI,SAAS,IAAI,EAAE;GACrB,MAAM,QAAQ,IAAI,MAAM,IAAI;GAC5B,IAAI,UAAU;AAGd,QAAK,IAAI,IAAI,GAAG,IAAI,MAAM,SAAS,GAAG,KAAK;IACzC,MAAM,OAAO,MAAM;AACnB,SACG,QAAQ,gBACF,QAAQ,UAAU,YACzB,MAAM,QAAQ,QAAQ,MAAM,CAE5B,SAAQ,QAAQ,CAAE;AAEpB,cAAU,QAAQ;GACnB;GAGD,MAAM,WAAW,MAAM,MAAM,SAAS;AACtC,WAAQ,YAAY;EACrB,MAEC,QAAO,OAAO;CAEjB;AAED,QAAO;AACR"}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"parseQueryParams-
|
|
1
|
+
{"version":3,"file":"parseQueryParams-CwvXXwkW.cjs","names":["queryParams: Record<string, string | string[] | undefined> | null","result: Record<string, any>"],"sources":["../src/endpoints/parseQueryParams.ts"],"sourcesContent":["/**\n * Parse query parameters from a flat object into a nested structure\n * Handles arrays (multiple values with same key) and objects (dot notation)\n *\n * @example\n * parseQueryParams({ 'tags': ['a', 'b'], 'filter.name': 'john' })\n * // Returns: { tags: ['a', 'b'], filter: { name: 'john' } }\n */\nexport function parseQueryParams(\n queryParams: Record<string, string | string[] | undefined> | null,\n): Record<string, any> {\n if (!queryParams) {\n return {};\n }\n\n const result: Record<string, any> = {};\n\n for (const [key, value] of Object.entries(queryParams)) {\n if (value === undefined) {\n continue;\n }\n\n // Check if the key contains dot notation\n if (key.includes('.')) {\n const parts = key.split('.');\n let current = result;\n\n // Navigate/create the nested structure\n for (let i = 0; i < parts.length - 1; i++) {\n const part = parts[i];\n if (\n !current[part] ||\n typeof current[part] !== 'object' ||\n Array.isArray(current[part])\n ) {\n current[part] = {};\n }\n current = current[part];\n }\n\n // Set the final value\n const lastPart = parts[parts.length - 1];\n current[lastPart] = value;\n } else {\n // Simple key, just assign the value\n result[key] = value;\n }\n }\n\n return result;\n}\n"],"mappings":";;;;;;;;;;AAQA,SAAgB,iBACdA,aACqB;AACrB,MAAK,YACH,QAAO,CAAE;CAGX,MAAMC,SAA8B,CAAE;AAEtC,MAAK,MAAM,CAAC,KAAK,MAAM,IAAI,OAAO,QAAQ,YAAY,EAAE;AACtD,MAAI,iBACF;AAIF,MAAI,IAAI,SAAS,IAAI,EAAE;GACrB,MAAM,QAAQ,IAAI,MAAM,IAAI;GAC5B,IAAI,UAAU;AAGd,QAAK,IAAI,IAAI,GAAG,IAAI,MAAM,SAAS,GAAG,KAAK;IACzC,MAAM,OAAO,MAAM;AACnB,SACG,QAAQ,gBACF,QAAQ,UAAU,YACzB,MAAM,QAAQ,QAAQ,MAAM,CAE5B,SAAQ,QAAQ,CAAE;AAEpB,cAAU,QAAQ;GACnB;GAGD,MAAM,WAAW,MAAM,MAAM,SAAS;AACtC,WAAQ,YAAY;EACrB,MAEC,QAAO,OAAO;CAEjB;AAED,QAAO;AACR"}
|
|
@@ -0,0 +1,174 @@
|
|
|
1
|
+
const require_chunk = require('./chunk-CUT6urMc.cjs');
|
|
2
|
+
const __geekmidas_audit = require_chunk.__toESM(require("@geekmidas/audit"));
|
|
3
|
+
const __geekmidas_audit_kysely = require_chunk.__toESM(require("@geekmidas/audit/kysely"));
|
|
4
|
+
|
|
5
|
+
//#region src/endpoints/processAudits.ts
|
|
6
|
+
/**
|
|
7
|
+
* Process declarative audit definitions after successful endpoint execution.
|
|
8
|
+
* Similar to publishConstructEvents for events.
|
|
9
|
+
*
|
|
10
|
+
* @param endpoint - The endpoint with audit configuration
|
|
11
|
+
* @param response - The handler response to generate audit payloads from
|
|
12
|
+
* @param serviceDiscovery - Service discovery for registering audit storage
|
|
13
|
+
* @param logger - Logger for debug/error messages
|
|
14
|
+
* @param ctx - Request context (session, headers, cookies, services)
|
|
15
|
+
* @param existingAuditor - Optional existing auditor instance (e.g., from handler context).
|
|
16
|
+
* If provided, uses this auditor (with its stored transaction).
|
|
17
|
+
* If not provided, creates a new auditor.
|
|
18
|
+
*/
|
|
19
|
+
async function processEndpointAudits(endpoint, response, serviceDiscovery, logger, ctx, existingAuditor) {
|
|
20
|
+
try {
|
|
21
|
+
const audits = endpoint.audits;
|
|
22
|
+
const hasExistingRecords = existingAuditor && existingAuditor.getRecords().length > 0;
|
|
23
|
+
if (!audits?.length && !hasExistingRecords) {
|
|
24
|
+
logger.debug("No audits to process");
|
|
25
|
+
return;
|
|
26
|
+
}
|
|
27
|
+
if (!endpoint.auditorStorageService) {
|
|
28
|
+
if (hasExistingRecords || audits?.length) logger.warn("No auditor storage service available");
|
|
29
|
+
return;
|
|
30
|
+
}
|
|
31
|
+
let auditor;
|
|
32
|
+
if (existingAuditor) {
|
|
33
|
+
auditor = existingAuditor;
|
|
34
|
+
logger.debug("Using existing auditor from handler context");
|
|
35
|
+
} else {
|
|
36
|
+
const services = await serviceDiscovery.register([endpoint.auditorStorageService]);
|
|
37
|
+
const storage = services[endpoint.auditorStorageService.serviceName];
|
|
38
|
+
let actor = {
|
|
39
|
+
id: "system",
|
|
40
|
+
type: "system"
|
|
41
|
+
};
|
|
42
|
+
if (endpoint.actorExtractor) try {
|
|
43
|
+
actor = await endpoint.actorExtractor({
|
|
44
|
+
services: ctx.services,
|
|
45
|
+
session: ctx.session,
|
|
46
|
+
header: ctx.header,
|
|
47
|
+
cookie: ctx.cookie,
|
|
48
|
+
logger
|
|
49
|
+
});
|
|
50
|
+
} catch (error) {
|
|
51
|
+
logger.error(error, "Failed to extract actor for audits");
|
|
52
|
+
}
|
|
53
|
+
auditor = new __geekmidas_audit.DefaultAuditor({
|
|
54
|
+
actor,
|
|
55
|
+
storage,
|
|
56
|
+
metadata: {
|
|
57
|
+
endpoint: endpoint.route,
|
|
58
|
+
method: endpoint.method
|
|
59
|
+
}
|
|
60
|
+
});
|
|
61
|
+
}
|
|
62
|
+
if (audits?.length) for (const audit of audits) {
|
|
63
|
+
logger.debug({ audit: audit.type }, "Processing declarative audit");
|
|
64
|
+
if (audit.when && !audit.when(response)) {
|
|
65
|
+
logger.debug({ audit: audit.type }, "Audit skipped due to when condition");
|
|
66
|
+
continue;
|
|
67
|
+
}
|
|
68
|
+
const payload = audit.payload(response);
|
|
69
|
+
const entityId = audit.entityId?.(response);
|
|
70
|
+
auditor.audit(audit.type, payload, {
|
|
71
|
+
table: audit.table,
|
|
72
|
+
entityId
|
|
73
|
+
});
|
|
74
|
+
}
|
|
75
|
+
const recordCount = auditor.getRecords().length;
|
|
76
|
+
if (recordCount > 0) {
|
|
77
|
+
const trx = "getTransaction" in auditor ? auditor.getTransaction() : void 0;
|
|
78
|
+
logger.debug({
|
|
79
|
+
auditCount: recordCount,
|
|
80
|
+
hasTransaction: !!trx
|
|
81
|
+
}, "Flushing audits");
|
|
82
|
+
await auditor.flush();
|
|
83
|
+
}
|
|
84
|
+
} catch (error) {
|
|
85
|
+
logger.error(error, "Failed to process audits");
|
|
86
|
+
}
|
|
87
|
+
}
|
|
88
|
+
/**
|
|
89
|
+
* Create audit context for handler execution.
|
|
90
|
+
* Returns the auditor and storage for use in the handler.
|
|
91
|
+
*
|
|
92
|
+
* @param endpoint - The endpoint with audit configuration
|
|
93
|
+
* @param serviceDiscovery - Service discovery for getting audit storage
|
|
94
|
+
* @param logger - Logger for debug/error messages
|
|
95
|
+
* @param ctx - Request context for actor extraction
|
|
96
|
+
* @returns Audit context with auditor and storage, or undefined if not configured
|
|
97
|
+
*/
|
|
98
|
+
async function createAuditContext(endpoint, serviceDiscovery, logger, ctx) {
|
|
99
|
+
if (!endpoint.auditorStorageService) return void 0;
|
|
100
|
+
const services = await serviceDiscovery.register([endpoint.auditorStorageService]);
|
|
101
|
+
const storage = services[endpoint.auditorStorageService.serviceName];
|
|
102
|
+
let actor = {
|
|
103
|
+
id: "system",
|
|
104
|
+
type: "system"
|
|
105
|
+
};
|
|
106
|
+
if (endpoint.actorExtractor) try {
|
|
107
|
+
actor = await endpoint.actorExtractor({
|
|
108
|
+
services: ctx.services,
|
|
109
|
+
session: ctx.session,
|
|
110
|
+
header: ctx.header,
|
|
111
|
+
cookie: ctx.cookie,
|
|
112
|
+
logger
|
|
113
|
+
});
|
|
114
|
+
} catch (error) {
|
|
115
|
+
logger.error(error, "Failed to extract actor for audits");
|
|
116
|
+
}
|
|
117
|
+
const auditor = new __geekmidas_audit.DefaultAuditor({
|
|
118
|
+
actor,
|
|
119
|
+
storage,
|
|
120
|
+
metadata: {
|
|
121
|
+
endpoint: endpoint.route,
|
|
122
|
+
method: endpoint.method
|
|
123
|
+
}
|
|
124
|
+
});
|
|
125
|
+
return {
|
|
126
|
+
auditor,
|
|
127
|
+
storage
|
|
128
|
+
};
|
|
129
|
+
}
|
|
130
|
+
/**
|
|
131
|
+
* Execute a handler with automatic audit transaction support.
|
|
132
|
+
* If the audit storage has a database (via getDatabase()), wraps execution
|
|
133
|
+
* in a transaction so audits are atomic with handler's database operations.
|
|
134
|
+
*
|
|
135
|
+
* @param auditContext - The audit context from createAuditContext
|
|
136
|
+
* @param handler - The handler function to execute (receives auditor)
|
|
137
|
+
* @param onComplete - Called after handler with response, to process declarative audits
|
|
138
|
+
* @returns The handler result
|
|
139
|
+
*/
|
|
140
|
+
async function executeWithAuditTransaction(auditContext, handler, onComplete) {
|
|
141
|
+
if (!auditContext) return handler(void 0);
|
|
142
|
+
const { auditor, storage } = auditContext;
|
|
143
|
+
const db = storage.getDatabase?.();
|
|
144
|
+
if (db) return (0, __geekmidas_audit_kysely.withAuditableTransaction)(db, auditor, async () => {
|
|
145
|
+
const response$1 = await handler(auditor);
|
|
146
|
+
if (onComplete) await onComplete(response$1, auditor);
|
|
147
|
+
return response$1;
|
|
148
|
+
});
|
|
149
|
+
const response = await handler(auditor);
|
|
150
|
+
if (onComplete) await onComplete(response, auditor);
|
|
151
|
+
await auditor.flush();
|
|
152
|
+
return response;
|
|
153
|
+
}
|
|
154
|
+
|
|
155
|
+
//#endregion
|
|
156
|
+
Object.defineProperty(exports, 'createAuditContext', {
|
|
157
|
+
enumerable: true,
|
|
158
|
+
get: function () {
|
|
159
|
+
return createAuditContext;
|
|
160
|
+
}
|
|
161
|
+
});
|
|
162
|
+
Object.defineProperty(exports, 'executeWithAuditTransaction', {
|
|
163
|
+
enumerable: true,
|
|
164
|
+
get: function () {
|
|
165
|
+
return executeWithAuditTransaction;
|
|
166
|
+
}
|
|
167
|
+
});
|
|
168
|
+
Object.defineProperty(exports, 'processEndpointAudits', {
|
|
169
|
+
enumerable: true,
|
|
170
|
+
get: function () {
|
|
171
|
+
return processEndpointAudits;
|
|
172
|
+
}
|
|
173
|
+
});
|
|
174
|
+
//# sourceMappingURL=processAudits-BFokHhCO.cjs.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"processAudits-BFokHhCO.cjs","names":["endpoint: Endpoint<\n any,\n any,\n any,\n OutSchema,\n TServices,\n TLogger,\n TSession,\n any,\n any,\n TAuditStorage,\n TAuditStorageServiceName,\n TAuditAction\n >","response: InferStandardSchema<OutSchema>","serviceDiscovery: ServiceDiscovery<any, any>","logger: TLogger","ctx: {\n session: TSession;\n header: HeaderFn;\n cookie: CookieFn;\n services: Record<string, unknown>;\n }","existingAuditor?: Auditor<TAuditAction>","auditor: Auditor<TAuditAction>","actor: AuditActor","DefaultAuditor","endpoint: Endpoint<\n any,\n any,\n any,\n any,\n TServices,\n TLogger,\n TSession,\n any,\n any,\n TAuditStorage,\n TAuditStorageServiceName,\n TAuditAction\n >","auditContext: AuditExecutionContext<TAuditAction> | undefined","handler: (auditor?: Auditor<TAuditAction>) => Promise<T>","onComplete?: (response: T, auditor: Auditor<TAuditAction>) => Promise<void>","response"],"sources":["../src/endpoints/processAudits.ts"],"sourcesContent":["import type {\n AuditableAction,\n AuditActor,\n Auditor,\n AuditStorage,\n} from '@geekmidas/audit';\nimport { DefaultAuditor } from '@geekmidas/audit';\nimport { withAuditableTransaction } from '@geekmidas/audit/kysely';\nimport type { Logger } from '@geekmidas/logger';\nimport type { InferStandardSchema } from '@geekmidas/schema';\nimport type { Service, ServiceDiscovery } from '@geekmidas/services';\nimport type { StandardSchemaV1 } from '@standard-schema/spec';\nimport type { Endpoint, CookieFn, HeaderFn } from './Endpoint';\nimport type { ActorExtractor, MappedAudit } from './audit';\n\n/**\n * Process declarative audit definitions after successful endpoint execution.\n * Similar to publishConstructEvents for events.\n *\n * @param endpoint - The endpoint with audit configuration\n * @param response - The handler response to generate audit payloads from\n * @param serviceDiscovery - Service discovery for registering audit storage\n * @param logger - Logger for debug/error messages\n * @param ctx - Request context (session, headers, cookies, services)\n * @param existingAuditor - Optional existing auditor instance (e.g., from handler context).\n * If provided, uses this auditor (with its stored transaction).\n * If not provided, creates a new auditor.\n */\nexport async function processEndpointAudits<\n TServices extends Service[] = [],\n TSession = unknown,\n TLogger extends Logger = Logger,\n OutSchema extends StandardSchemaV1 | undefined = undefined,\n TAuditStorage extends AuditStorage | undefined = undefined,\n TAuditStorageServiceName extends string = string,\n TAuditAction extends AuditableAction<string, unknown> = AuditableAction<\n string,\n unknown\n >,\n>(\n endpoint: Endpoint<\n any,\n any,\n any,\n OutSchema,\n TServices,\n TLogger,\n TSession,\n any,\n any,\n TAuditStorage,\n TAuditStorageServiceName,\n TAuditAction\n >,\n response: InferStandardSchema<OutSchema>,\n serviceDiscovery: ServiceDiscovery<any, any>,\n logger: TLogger,\n ctx: {\n session: TSession;\n header: HeaderFn;\n cookie: CookieFn;\n services: Record<string, unknown>;\n },\n existingAuditor?: Auditor<TAuditAction>,\n): Promise<void> {\n try {\n const audits = endpoint.audits as MappedAudit<TAuditAction, OutSchema>[];\n\n // If we have an existing auditor (from handler context), we need to flush\n // any manual audits it collected, even if there are no declarative audits\n const hasExistingRecords = existingAuditor && existingAuditor.getRecords().length > 0;\n\n // Skip if no declarative audits and no existing records to flush\n if (!audits?.length && !hasExistingRecords) {\n logger.debug('No audits to process');\n return;\n }\n\n // If no auditor storage service and we have things to process, warn\n if (!endpoint.auditorStorageService) {\n if (hasExistingRecords || audits?.length) {\n logger.warn('No auditor storage service available');\n }\n return;\n }\n\n // Get or create auditor\n let auditor: Auditor<TAuditAction>;\n\n if (existingAuditor) {\n // Use existing auditor (preserves stored transaction and manual audits)\n auditor = existingAuditor;\n logger.debug('Using existing auditor from handler context');\n } else {\n // Create new auditor (backward compatibility)\n const services = await serviceDiscovery.register([\n endpoint.auditorStorageService,\n ]);\n const storage = services[\n endpoint.auditorStorageService.serviceName\n ] as AuditStorage;\n\n // Extract actor if configured\n let actor: AuditActor = { id: 'system', type: 'system' };\n if (endpoint.actorExtractor) {\n try {\n actor = await (\n endpoint.actorExtractor as ActorExtractor<TServices, TSession, TLogger>\n )({\n services: ctx.services as any,\n session: ctx.session,\n header: ctx.header,\n cookie: ctx.cookie,\n logger,\n });\n } catch (error) {\n logger.error(error as Error, 'Failed to extract actor for audits');\n // Continue with system actor\n }\n }\n\n auditor = new DefaultAuditor<TAuditAction>({\n actor,\n storage,\n metadata: {\n endpoint: endpoint.route,\n method: endpoint.method,\n },\n });\n }\n\n // Process each declarative audit\n if (audits?.length) {\n for (const audit of audits) {\n logger.debug({ audit: audit.type }, 'Processing declarative audit');\n\n // Check when condition\n if (audit.when && !audit.when(response as any)) {\n logger.debug({ audit: audit.type }, 'Audit skipped due to when condition');\n continue;\n }\n\n // Extract payload\n const payload = audit.payload(response as any);\n\n // Extract entityId if configured\n const entityId = audit.entityId?.(response as any);\n\n // Record the audit\n auditor.audit(audit.type as any, payload as any, {\n table: audit.table,\n entityId,\n });\n }\n }\n\n // Flush audits to storage\n // Note: If existingAuditor has a stored transaction (via setTransaction),\n // flush() will use it automatically\n const recordCount = auditor.getRecords().length;\n if (recordCount > 0) {\n // Check if auditor has a stored transaction (for logging purposes)\n const trx = 'getTransaction' in auditor\n ? (auditor as { getTransaction(): unknown }).getTransaction()\n : undefined;\n logger.debug(\n { auditCount: recordCount, hasTransaction: !!trx },\n 'Flushing audits',\n );\n await auditor.flush();\n }\n } catch (error) {\n logger.error(error as Error, 'Failed to process audits');\n // Don't rethrow - audit failures shouldn't fail the request\n }\n}\n\n/**\n * Context for audit-aware handler execution.\n */\nexport interface AuditExecutionContext<\n TAuditAction extends AuditableAction<string, unknown> = AuditableAction<\n string,\n unknown\n >,\n> {\n /** The auditor instance for recording audits */\n auditor: Auditor<TAuditAction>;\n /** The audit storage instance */\n storage: AuditStorage;\n}\n\n/**\n * Create audit context for handler execution.\n * Returns the auditor and storage for use in the handler.\n *\n * @param endpoint - The endpoint with audit configuration\n * @param serviceDiscovery - Service discovery for getting audit storage\n * @param logger - Logger for debug/error messages\n * @param ctx - Request context for actor extraction\n * @returns Audit context with auditor and storage, or undefined if not configured\n */\nexport async function createAuditContext<\n TServices extends Service[] = [],\n TSession = unknown,\n TLogger extends Logger = Logger,\n TAuditStorage extends AuditStorage | undefined = undefined,\n TAuditStorageServiceName extends string = string,\n TAuditAction extends AuditableAction<string, unknown> = AuditableAction<\n string,\n unknown\n >,\n>(\n endpoint: Endpoint<\n any,\n any,\n any,\n any,\n TServices,\n TLogger,\n TSession,\n any,\n any,\n TAuditStorage,\n TAuditStorageServiceName,\n TAuditAction\n >,\n serviceDiscovery: ServiceDiscovery<any, any>,\n logger: TLogger,\n ctx: {\n session: TSession;\n header: HeaderFn;\n cookie: CookieFn;\n services: Record<string, unknown>;\n },\n): Promise<AuditExecutionContext<TAuditAction> | undefined> {\n if (!endpoint.auditorStorageService) {\n return undefined;\n }\n\n const services = await serviceDiscovery.register([\n endpoint.auditorStorageService,\n ]);\n const storage = services[\n endpoint.auditorStorageService.serviceName\n ] as AuditStorage;\n\n // Extract actor if configured\n let actor: AuditActor = { id: 'system', type: 'system' };\n if (endpoint.actorExtractor) {\n try {\n actor = await (\n endpoint.actorExtractor as ActorExtractor<TServices, TSession, TLogger>\n )({\n services: ctx.services as any,\n session: ctx.session,\n header: ctx.header,\n cookie: ctx.cookie,\n logger,\n });\n } catch (error) {\n logger.error(error as Error, 'Failed to extract actor for audits');\n }\n }\n\n const auditor = new DefaultAuditor<TAuditAction>({\n actor,\n storage,\n metadata: {\n endpoint: endpoint.route,\n method: endpoint.method,\n },\n });\n\n return { auditor, storage };\n}\n\n/**\n * Execute a handler with automatic audit transaction support.\n * If the audit storage has a database (via getDatabase()), wraps execution\n * in a transaction so audits are atomic with handler's database operations.\n *\n * @param auditContext - The audit context from createAuditContext\n * @param handler - The handler function to execute (receives auditor)\n * @param onComplete - Called after handler with response, to process declarative audits\n * @returns The handler result\n */\nexport async function executeWithAuditTransaction<\n T,\n TAuditAction extends AuditableAction<string, unknown> = AuditableAction<\n string,\n unknown\n >,\n>(\n auditContext: AuditExecutionContext<TAuditAction> | undefined,\n handler: (auditor?: Auditor<TAuditAction>) => Promise<T>,\n onComplete?: (response: T, auditor: Auditor<TAuditAction>) => Promise<void>,\n): Promise<T> {\n // No audit context - just run handler\n if (!auditContext) {\n return handler(undefined);\n }\n\n const { auditor, storage } = auditContext;\n\n // Check if storage has a database for transactional execution\n const db = storage.getDatabase?.();\n\n if (db) {\n // Wrap in transaction - audits are atomic with handler operations\n return withAuditableTransaction(\n db as any,\n auditor as any,\n async () => {\n const response = await handler(auditor);\n\n // Process declarative audits within the transaction\n if (onComplete) {\n await onComplete(response, auditor);\n }\n\n // Audits are flushed by withAuditableTransaction before commit\n return response;\n },\n );\n }\n\n // No database - run handler and flush audits after\n const response = await handler(auditor);\n\n if (onComplete) {\n await onComplete(response, auditor);\n }\n\n // Flush audits (no transaction)\n await auditor.flush();\n\n return response;\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;AA4BA,eAAsB,sBAYpBA,UAcAC,UACAC,kBACAC,QACAC,KAMAC,iBACe;AACf,KAAI;EACF,MAAM,SAAS,SAAS;EAIxB,MAAM,qBAAqB,mBAAmB,gBAAgB,YAAY,CAAC,SAAS;AAGpF,OAAK,QAAQ,WAAW,oBAAoB;AAC1C,UAAO,MAAM,uBAAuB;AACpC;EACD;AAGD,OAAK,SAAS,uBAAuB;AACnC,OAAI,sBAAsB,QAAQ,OAChC,QAAO,KAAK,uCAAuC;AAErD;EACD;EAGD,IAAIC;AAEJ,MAAI,iBAAiB;AAEnB,aAAU;AACV,UAAO,MAAM,8CAA8C;EAC5D,OAAM;GAEL,MAAM,WAAW,MAAM,iBAAiB,SAAS,CAC/C,SAAS,qBACV,EAAC;GACF,MAAM,UAAU,SACd,SAAS,sBAAsB;GAIjC,IAAIC,QAAoB;IAAE,IAAI;IAAU,MAAM;GAAU;AACxD,OAAI,SAAS,eACX,KAAI;AACF,YAAQ,MAAM,AACZ,SAAS,eACT;KACA,UAAU,IAAI;KACd,SAAS,IAAI;KACb,QAAQ,IAAI;KACZ,QAAQ,IAAI;KACZ;IACD,EAAC;GACH,SAAQ,OAAO;AACd,WAAO,MAAM,OAAgB,qCAAqC;GAEnE;AAGH,aAAU,IAAIC,iCAA6B;IACzC;IACA;IACA,UAAU;KACR,UAAU,SAAS;KACnB,QAAQ,SAAS;IAClB;GACF;EACF;AAGD,MAAI,QAAQ,OACV,MAAK,MAAM,SAAS,QAAQ;AAC1B,UAAO,MAAM,EAAE,OAAO,MAAM,KAAM,GAAE,+BAA+B;AAGnE,OAAI,MAAM,SAAS,MAAM,KAAK,SAAgB,EAAE;AAC9C,WAAO,MAAM,EAAE,OAAO,MAAM,KAAM,GAAE,sCAAsC;AAC1E;GACD;GAGD,MAAM,UAAU,MAAM,QAAQ,SAAgB;GAG9C,MAAM,WAAW,MAAM,WAAW,SAAgB;AAGlD,WAAQ,MAAM,MAAM,MAAa,SAAgB;IAC/C,OAAO,MAAM;IACb;GACD,EAAC;EACH;EAMH,MAAM,cAAc,QAAQ,YAAY,CAAC;AACzC,MAAI,cAAc,GAAG;GAEnB,MAAM,MAAM,oBAAoB,UAC5B,AAAC,QAA0C,gBAAgB;AAE/D,UAAO,MACL;IAAE,YAAY;IAAa,kBAAkB;GAAK,GAClD,kBACD;AACD,SAAM,QAAQ,OAAO;EACtB;CACF,SAAQ,OAAO;AACd,SAAO,MAAM,OAAgB,2BAA2B;CAEzD;AACF;;;;;;;;;;;AA2BD,eAAsB,mBAWpBC,UAcAP,kBACAC,QACAC,KAM0D;AAC1D,MAAK,SAAS,sBACZ;CAGF,MAAM,WAAW,MAAM,iBAAiB,SAAS,CAC/C,SAAS,qBACV,EAAC;CACF,MAAM,UAAU,SACd,SAAS,sBAAsB;CAIjC,IAAIG,QAAoB;EAAE,IAAI;EAAU,MAAM;CAAU;AACxD,KAAI,SAAS,eACX,KAAI;AACF,UAAQ,MAAM,AACZ,SAAS,eACT;GACA,UAAU,IAAI;GACd,SAAS,IAAI;GACb,QAAQ,IAAI;GACZ,QAAQ,IAAI;GACZ;EACD,EAAC;CACH,SAAQ,OAAO;AACd,SAAO,MAAM,OAAgB,qCAAqC;CACnE;CAGH,MAAM,UAAU,IAAIC,iCAA6B;EAC/C;EACA;EACA,UAAU;GACR,UAAU,SAAS;GACnB,QAAQ,SAAS;EAClB;CACF;AAED,QAAO;EAAE;EAAS;CAAS;AAC5B;;;;;;;;;;;AAYD,eAAsB,4BAOpBE,cACAC,SACAC,YACY;AAEZ,MAAK,aACH,QAAO,eAAkB;CAG3B,MAAM,EAAE,SAAS,SAAS,GAAG;CAG7B,MAAM,KAAK,QAAQ,eAAe;AAElC,KAAI,GAEF,QAAO,uDACL,IACA,SACA,YAAY;EACV,MAAMC,aAAW,MAAM,QAAQ,QAAQ;AAGvC,MAAI,WACF,OAAM,WAAWA,YAAU,QAAQ;AAIrC,SAAOA;CACR,EACF;CAIH,MAAM,WAAW,MAAM,QAAQ,QAAQ;AAEvC,KAAI,WACF,OAAM,WAAW,UAAU,QAAQ;AAIrC,OAAM,QAAQ,OAAO;AAErB,QAAO;AACR"}
|
|
@@ -0,0 +1,156 @@
|
|
|
1
|
+
import { DefaultAuditor } from "@geekmidas/audit";
|
|
2
|
+
import { withAuditableTransaction } from "@geekmidas/audit/kysely";
|
|
3
|
+
|
|
4
|
+
//#region src/endpoints/processAudits.ts
|
|
5
|
+
/**
|
|
6
|
+
* Process declarative audit definitions after successful endpoint execution.
|
|
7
|
+
* Similar to publishConstructEvents for events.
|
|
8
|
+
*
|
|
9
|
+
* @param endpoint - The endpoint with audit configuration
|
|
10
|
+
* @param response - The handler response to generate audit payloads from
|
|
11
|
+
* @param serviceDiscovery - Service discovery for registering audit storage
|
|
12
|
+
* @param logger - Logger for debug/error messages
|
|
13
|
+
* @param ctx - Request context (session, headers, cookies, services)
|
|
14
|
+
* @param existingAuditor - Optional existing auditor instance (e.g., from handler context).
|
|
15
|
+
* If provided, uses this auditor (with its stored transaction).
|
|
16
|
+
* If not provided, creates a new auditor.
|
|
17
|
+
*/
|
|
18
|
+
async function processEndpointAudits(endpoint, response, serviceDiscovery, logger, ctx, existingAuditor) {
|
|
19
|
+
try {
|
|
20
|
+
const audits = endpoint.audits;
|
|
21
|
+
const hasExistingRecords = existingAuditor && existingAuditor.getRecords().length > 0;
|
|
22
|
+
if (!audits?.length && !hasExistingRecords) {
|
|
23
|
+
logger.debug("No audits to process");
|
|
24
|
+
return;
|
|
25
|
+
}
|
|
26
|
+
if (!endpoint.auditorStorageService) {
|
|
27
|
+
if (hasExistingRecords || audits?.length) logger.warn("No auditor storage service available");
|
|
28
|
+
return;
|
|
29
|
+
}
|
|
30
|
+
let auditor;
|
|
31
|
+
if (existingAuditor) {
|
|
32
|
+
auditor = existingAuditor;
|
|
33
|
+
logger.debug("Using existing auditor from handler context");
|
|
34
|
+
} else {
|
|
35
|
+
const services = await serviceDiscovery.register([endpoint.auditorStorageService]);
|
|
36
|
+
const storage = services[endpoint.auditorStorageService.serviceName];
|
|
37
|
+
let actor = {
|
|
38
|
+
id: "system",
|
|
39
|
+
type: "system"
|
|
40
|
+
};
|
|
41
|
+
if (endpoint.actorExtractor) try {
|
|
42
|
+
actor = await endpoint.actorExtractor({
|
|
43
|
+
services: ctx.services,
|
|
44
|
+
session: ctx.session,
|
|
45
|
+
header: ctx.header,
|
|
46
|
+
cookie: ctx.cookie,
|
|
47
|
+
logger
|
|
48
|
+
});
|
|
49
|
+
} catch (error) {
|
|
50
|
+
logger.error(error, "Failed to extract actor for audits");
|
|
51
|
+
}
|
|
52
|
+
auditor = new DefaultAuditor({
|
|
53
|
+
actor,
|
|
54
|
+
storage,
|
|
55
|
+
metadata: {
|
|
56
|
+
endpoint: endpoint.route,
|
|
57
|
+
method: endpoint.method
|
|
58
|
+
}
|
|
59
|
+
});
|
|
60
|
+
}
|
|
61
|
+
if (audits?.length) for (const audit of audits) {
|
|
62
|
+
logger.debug({ audit: audit.type }, "Processing declarative audit");
|
|
63
|
+
if (audit.when && !audit.when(response)) {
|
|
64
|
+
logger.debug({ audit: audit.type }, "Audit skipped due to when condition");
|
|
65
|
+
continue;
|
|
66
|
+
}
|
|
67
|
+
const payload = audit.payload(response);
|
|
68
|
+
const entityId = audit.entityId?.(response);
|
|
69
|
+
auditor.audit(audit.type, payload, {
|
|
70
|
+
table: audit.table,
|
|
71
|
+
entityId
|
|
72
|
+
});
|
|
73
|
+
}
|
|
74
|
+
const recordCount = auditor.getRecords().length;
|
|
75
|
+
if (recordCount > 0) {
|
|
76
|
+
const trx = "getTransaction" in auditor ? auditor.getTransaction() : void 0;
|
|
77
|
+
logger.debug({
|
|
78
|
+
auditCount: recordCount,
|
|
79
|
+
hasTransaction: !!trx
|
|
80
|
+
}, "Flushing audits");
|
|
81
|
+
await auditor.flush();
|
|
82
|
+
}
|
|
83
|
+
} catch (error) {
|
|
84
|
+
logger.error(error, "Failed to process audits");
|
|
85
|
+
}
|
|
86
|
+
}
|
|
87
|
+
/**
|
|
88
|
+
* Create audit context for handler execution.
|
|
89
|
+
* Returns the auditor and storage for use in the handler.
|
|
90
|
+
*
|
|
91
|
+
* @param endpoint - The endpoint with audit configuration
|
|
92
|
+
* @param serviceDiscovery - Service discovery for getting audit storage
|
|
93
|
+
* @param logger - Logger for debug/error messages
|
|
94
|
+
* @param ctx - Request context for actor extraction
|
|
95
|
+
* @returns Audit context with auditor and storage, or undefined if not configured
|
|
96
|
+
*/
|
|
97
|
+
async function createAuditContext(endpoint, serviceDiscovery, logger, ctx) {
|
|
98
|
+
if (!endpoint.auditorStorageService) return void 0;
|
|
99
|
+
const services = await serviceDiscovery.register([endpoint.auditorStorageService]);
|
|
100
|
+
const storage = services[endpoint.auditorStorageService.serviceName];
|
|
101
|
+
let actor = {
|
|
102
|
+
id: "system",
|
|
103
|
+
type: "system"
|
|
104
|
+
};
|
|
105
|
+
if (endpoint.actorExtractor) try {
|
|
106
|
+
actor = await endpoint.actorExtractor({
|
|
107
|
+
services: ctx.services,
|
|
108
|
+
session: ctx.session,
|
|
109
|
+
header: ctx.header,
|
|
110
|
+
cookie: ctx.cookie,
|
|
111
|
+
logger
|
|
112
|
+
});
|
|
113
|
+
} catch (error) {
|
|
114
|
+
logger.error(error, "Failed to extract actor for audits");
|
|
115
|
+
}
|
|
116
|
+
const auditor = new DefaultAuditor({
|
|
117
|
+
actor,
|
|
118
|
+
storage,
|
|
119
|
+
metadata: {
|
|
120
|
+
endpoint: endpoint.route,
|
|
121
|
+
method: endpoint.method
|
|
122
|
+
}
|
|
123
|
+
});
|
|
124
|
+
return {
|
|
125
|
+
auditor,
|
|
126
|
+
storage
|
|
127
|
+
};
|
|
128
|
+
}
|
|
129
|
+
/**
|
|
130
|
+
* Execute a handler with automatic audit transaction support.
|
|
131
|
+
* If the audit storage has a database (via getDatabase()), wraps execution
|
|
132
|
+
* in a transaction so audits are atomic with handler's database operations.
|
|
133
|
+
*
|
|
134
|
+
* @param auditContext - The audit context from createAuditContext
|
|
135
|
+
* @param handler - The handler function to execute (receives auditor)
|
|
136
|
+
* @param onComplete - Called after handler with response, to process declarative audits
|
|
137
|
+
* @returns The handler result
|
|
138
|
+
*/
|
|
139
|
+
async function executeWithAuditTransaction(auditContext, handler, onComplete) {
|
|
140
|
+
if (!auditContext) return handler(void 0);
|
|
141
|
+
const { auditor, storage } = auditContext;
|
|
142
|
+
const db = storage.getDatabase?.();
|
|
143
|
+
if (db) return withAuditableTransaction(db, auditor, async () => {
|
|
144
|
+
const response$1 = await handler(auditor);
|
|
145
|
+
if (onComplete) await onComplete(response$1, auditor);
|
|
146
|
+
return response$1;
|
|
147
|
+
});
|
|
148
|
+
const response = await handler(auditor);
|
|
149
|
+
if (onComplete) await onComplete(response, auditor);
|
|
150
|
+
await auditor.flush();
|
|
151
|
+
return response;
|
|
152
|
+
}
|
|
153
|
+
|
|
154
|
+
//#endregion
|
|
155
|
+
export { createAuditContext, executeWithAuditTransaction, processEndpointAudits };
|
|
156
|
+
//# sourceMappingURL=processAudits-DfcB-X-4.mjs.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"processAudits-DfcB-X-4.mjs","names":["endpoint: Endpoint<\n any,\n any,\n any,\n OutSchema,\n TServices,\n TLogger,\n TSession,\n any,\n any,\n TAuditStorage,\n TAuditStorageServiceName,\n TAuditAction\n >","response: InferStandardSchema<OutSchema>","serviceDiscovery: ServiceDiscovery<any, any>","logger: TLogger","ctx: {\n session: TSession;\n header: HeaderFn;\n cookie: CookieFn;\n services: Record<string, unknown>;\n }","existingAuditor?: Auditor<TAuditAction>","auditor: Auditor<TAuditAction>","actor: AuditActor","endpoint: Endpoint<\n any,\n any,\n any,\n any,\n TServices,\n TLogger,\n TSession,\n any,\n any,\n TAuditStorage,\n TAuditStorageServiceName,\n TAuditAction\n >","auditContext: AuditExecutionContext<TAuditAction> | undefined","handler: (auditor?: Auditor<TAuditAction>) => Promise<T>","onComplete?: (response: T, auditor: Auditor<TAuditAction>) => Promise<void>","response"],"sources":["../src/endpoints/processAudits.ts"],"sourcesContent":["import type {\n AuditableAction,\n AuditActor,\n Auditor,\n AuditStorage,\n} from '@geekmidas/audit';\nimport { DefaultAuditor } from '@geekmidas/audit';\nimport { withAuditableTransaction } from '@geekmidas/audit/kysely';\nimport type { Logger } from '@geekmidas/logger';\nimport type { InferStandardSchema } from '@geekmidas/schema';\nimport type { Service, ServiceDiscovery } from '@geekmidas/services';\nimport type { StandardSchemaV1 } from '@standard-schema/spec';\nimport type { Endpoint, CookieFn, HeaderFn } from './Endpoint';\nimport type { ActorExtractor, MappedAudit } from './audit';\n\n/**\n * Process declarative audit definitions after successful endpoint execution.\n * Similar to publishConstructEvents for events.\n *\n * @param endpoint - The endpoint with audit configuration\n * @param response - The handler response to generate audit payloads from\n * @param serviceDiscovery - Service discovery for registering audit storage\n * @param logger - Logger for debug/error messages\n * @param ctx - Request context (session, headers, cookies, services)\n * @param existingAuditor - Optional existing auditor instance (e.g., from handler context).\n * If provided, uses this auditor (with its stored transaction).\n * If not provided, creates a new auditor.\n */\nexport async function processEndpointAudits<\n TServices extends Service[] = [],\n TSession = unknown,\n TLogger extends Logger = Logger,\n OutSchema extends StandardSchemaV1 | undefined = undefined,\n TAuditStorage extends AuditStorage | undefined = undefined,\n TAuditStorageServiceName extends string = string,\n TAuditAction extends AuditableAction<string, unknown> = AuditableAction<\n string,\n unknown\n >,\n>(\n endpoint: Endpoint<\n any,\n any,\n any,\n OutSchema,\n TServices,\n TLogger,\n TSession,\n any,\n any,\n TAuditStorage,\n TAuditStorageServiceName,\n TAuditAction\n >,\n response: InferStandardSchema<OutSchema>,\n serviceDiscovery: ServiceDiscovery<any, any>,\n logger: TLogger,\n ctx: {\n session: TSession;\n header: HeaderFn;\n cookie: CookieFn;\n services: Record<string, unknown>;\n },\n existingAuditor?: Auditor<TAuditAction>,\n): Promise<void> {\n try {\n const audits = endpoint.audits as MappedAudit<TAuditAction, OutSchema>[];\n\n // If we have an existing auditor (from handler context), we need to flush\n // any manual audits it collected, even if there are no declarative audits\n const hasExistingRecords = existingAuditor && existingAuditor.getRecords().length > 0;\n\n // Skip if no declarative audits and no existing records to flush\n if (!audits?.length && !hasExistingRecords) {\n logger.debug('No audits to process');\n return;\n }\n\n // If no auditor storage service and we have things to process, warn\n if (!endpoint.auditorStorageService) {\n if (hasExistingRecords || audits?.length) {\n logger.warn('No auditor storage service available');\n }\n return;\n }\n\n // Get or create auditor\n let auditor: Auditor<TAuditAction>;\n\n if (existingAuditor) {\n // Use existing auditor (preserves stored transaction and manual audits)\n auditor = existingAuditor;\n logger.debug('Using existing auditor from handler context');\n } else {\n // Create new auditor (backward compatibility)\n const services = await serviceDiscovery.register([\n endpoint.auditorStorageService,\n ]);\n const storage = services[\n endpoint.auditorStorageService.serviceName\n ] as AuditStorage;\n\n // Extract actor if configured\n let actor: AuditActor = { id: 'system', type: 'system' };\n if (endpoint.actorExtractor) {\n try {\n actor = await (\n endpoint.actorExtractor as ActorExtractor<TServices, TSession, TLogger>\n )({\n services: ctx.services as any,\n session: ctx.session,\n header: ctx.header,\n cookie: ctx.cookie,\n logger,\n });\n } catch (error) {\n logger.error(error as Error, 'Failed to extract actor for audits');\n // Continue with system actor\n }\n }\n\n auditor = new DefaultAuditor<TAuditAction>({\n actor,\n storage,\n metadata: {\n endpoint: endpoint.route,\n method: endpoint.method,\n },\n });\n }\n\n // Process each declarative audit\n if (audits?.length) {\n for (const audit of audits) {\n logger.debug({ audit: audit.type }, 'Processing declarative audit');\n\n // Check when condition\n if (audit.when && !audit.when(response as any)) {\n logger.debug({ audit: audit.type }, 'Audit skipped due to when condition');\n continue;\n }\n\n // Extract payload\n const payload = audit.payload(response as any);\n\n // Extract entityId if configured\n const entityId = audit.entityId?.(response as any);\n\n // Record the audit\n auditor.audit(audit.type as any, payload as any, {\n table: audit.table,\n entityId,\n });\n }\n }\n\n // Flush audits to storage\n // Note: If existingAuditor has a stored transaction (via setTransaction),\n // flush() will use it automatically\n const recordCount = auditor.getRecords().length;\n if (recordCount > 0) {\n // Check if auditor has a stored transaction (for logging purposes)\n const trx = 'getTransaction' in auditor\n ? (auditor as { getTransaction(): unknown }).getTransaction()\n : undefined;\n logger.debug(\n { auditCount: recordCount, hasTransaction: !!trx },\n 'Flushing audits',\n );\n await auditor.flush();\n }\n } catch (error) {\n logger.error(error as Error, 'Failed to process audits');\n // Don't rethrow - audit failures shouldn't fail the request\n }\n}\n\n/**\n * Context for audit-aware handler execution.\n */\nexport interface AuditExecutionContext<\n TAuditAction extends AuditableAction<string, unknown> = AuditableAction<\n string,\n unknown\n >,\n> {\n /** The auditor instance for recording audits */\n auditor: Auditor<TAuditAction>;\n /** The audit storage instance */\n storage: AuditStorage;\n}\n\n/**\n * Create audit context for handler execution.\n * Returns the auditor and storage for use in the handler.\n *\n * @param endpoint - The endpoint with audit configuration\n * @param serviceDiscovery - Service discovery for getting audit storage\n * @param logger - Logger for debug/error messages\n * @param ctx - Request context for actor extraction\n * @returns Audit context with auditor and storage, or undefined if not configured\n */\nexport async function createAuditContext<\n TServices extends Service[] = [],\n TSession = unknown,\n TLogger extends Logger = Logger,\n TAuditStorage extends AuditStorage | undefined = undefined,\n TAuditStorageServiceName extends string = string,\n TAuditAction extends AuditableAction<string, unknown> = AuditableAction<\n string,\n unknown\n >,\n>(\n endpoint: Endpoint<\n any,\n any,\n any,\n any,\n TServices,\n TLogger,\n TSession,\n any,\n any,\n TAuditStorage,\n TAuditStorageServiceName,\n TAuditAction\n >,\n serviceDiscovery: ServiceDiscovery<any, any>,\n logger: TLogger,\n ctx: {\n session: TSession;\n header: HeaderFn;\n cookie: CookieFn;\n services: Record<string, unknown>;\n },\n): Promise<AuditExecutionContext<TAuditAction> | undefined> {\n if (!endpoint.auditorStorageService) {\n return undefined;\n }\n\n const services = await serviceDiscovery.register([\n endpoint.auditorStorageService,\n ]);\n const storage = services[\n endpoint.auditorStorageService.serviceName\n ] as AuditStorage;\n\n // Extract actor if configured\n let actor: AuditActor = { id: 'system', type: 'system' };\n if (endpoint.actorExtractor) {\n try {\n actor = await (\n endpoint.actorExtractor as ActorExtractor<TServices, TSession, TLogger>\n )({\n services: ctx.services as any,\n session: ctx.session,\n header: ctx.header,\n cookie: ctx.cookie,\n logger,\n });\n } catch (error) {\n logger.error(error as Error, 'Failed to extract actor for audits');\n }\n }\n\n const auditor = new DefaultAuditor<TAuditAction>({\n actor,\n storage,\n metadata: {\n endpoint: endpoint.route,\n method: endpoint.method,\n },\n });\n\n return { auditor, storage };\n}\n\n/**\n * Execute a handler with automatic audit transaction support.\n * If the audit storage has a database (via getDatabase()), wraps execution\n * in a transaction so audits are atomic with handler's database operations.\n *\n * @param auditContext - The audit context from createAuditContext\n * @param handler - The handler function to execute (receives auditor)\n * @param onComplete - Called after handler with response, to process declarative audits\n * @returns The handler result\n */\nexport async function executeWithAuditTransaction<\n T,\n TAuditAction extends AuditableAction<string, unknown> = AuditableAction<\n string,\n unknown\n >,\n>(\n auditContext: AuditExecutionContext<TAuditAction> | undefined,\n handler: (auditor?: Auditor<TAuditAction>) => Promise<T>,\n onComplete?: (response: T, auditor: Auditor<TAuditAction>) => Promise<void>,\n): Promise<T> {\n // No audit context - just run handler\n if (!auditContext) {\n return handler(undefined);\n }\n\n const { auditor, storage } = auditContext;\n\n // Check if storage has a database for transactional execution\n const db = storage.getDatabase?.();\n\n if (db) {\n // Wrap in transaction - audits are atomic with handler operations\n return withAuditableTransaction(\n db as any,\n auditor as any,\n async () => {\n const response = await handler(auditor);\n\n // Process declarative audits within the transaction\n if (onComplete) {\n await onComplete(response, auditor);\n }\n\n // Audits are flushed by withAuditableTransaction before commit\n return response;\n },\n );\n }\n\n // No database - run handler and flush audits after\n const response = await handler(auditor);\n\n if (onComplete) {\n await onComplete(response, auditor);\n }\n\n // Flush audits (no transaction)\n await auditor.flush();\n\n return response;\n}\n"],"mappings":";;;;;;;;;;;;;;;;;AA4BA,eAAsB,sBAYpBA,UAcAC,UACAC,kBACAC,QACAC,KAMAC,iBACe;AACf,KAAI;EACF,MAAM,SAAS,SAAS;EAIxB,MAAM,qBAAqB,mBAAmB,gBAAgB,YAAY,CAAC,SAAS;AAGpF,OAAK,QAAQ,WAAW,oBAAoB;AAC1C,UAAO,MAAM,uBAAuB;AACpC;EACD;AAGD,OAAK,SAAS,uBAAuB;AACnC,OAAI,sBAAsB,QAAQ,OAChC,QAAO,KAAK,uCAAuC;AAErD;EACD;EAGD,IAAIC;AAEJ,MAAI,iBAAiB;AAEnB,aAAU;AACV,UAAO,MAAM,8CAA8C;EAC5D,OAAM;GAEL,MAAM,WAAW,MAAM,iBAAiB,SAAS,CAC/C,SAAS,qBACV,EAAC;GACF,MAAM,UAAU,SACd,SAAS,sBAAsB;GAIjC,IAAIC,QAAoB;IAAE,IAAI;IAAU,MAAM;GAAU;AACxD,OAAI,SAAS,eACX,KAAI;AACF,YAAQ,MAAM,AACZ,SAAS,eACT;KACA,UAAU,IAAI;KACd,SAAS,IAAI;KACb,QAAQ,IAAI;KACZ,QAAQ,IAAI;KACZ;IACD,EAAC;GACH,SAAQ,OAAO;AACd,WAAO,MAAM,OAAgB,qCAAqC;GAEnE;AAGH,aAAU,IAAI,eAA6B;IACzC;IACA;IACA,UAAU;KACR,UAAU,SAAS;KACnB,QAAQ,SAAS;IAClB;GACF;EACF;AAGD,MAAI,QAAQ,OACV,MAAK,MAAM,SAAS,QAAQ;AAC1B,UAAO,MAAM,EAAE,OAAO,MAAM,KAAM,GAAE,+BAA+B;AAGnE,OAAI,MAAM,SAAS,MAAM,KAAK,SAAgB,EAAE;AAC9C,WAAO,MAAM,EAAE,OAAO,MAAM,KAAM,GAAE,sCAAsC;AAC1E;GACD;GAGD,MAAM,UAAU,MAAM,QAAQ,SAAgB;GAG9C,MAAM,WAAW,MAAM,WAAW,SAAgB;AAGlD,WAAQ,MAAM,MAAM,MAAa,SAAgB;IAC/C,OAAO,MAAM;IACb;GACD,EAAC;EACH;EAMH,MAAM,cAAc,QAAQ,YAAY,CAAC;AACzC,MAAI,cAAc,GAAG;GAEnB,MAAM,MAAM,oBAAoB,UAC5B,AAAC,QAA0C,gBAAgB;AAE/D,UAAO,MACL;IAAE,YAAY;IAAa,kBAAkB;GAAK,GAClD,kBACD;AACD,SAAM,QAAQ,OAAO;EACtB;CACF,SAAQ,OAAO;AACd,SAAO,MAAM,OAAgB,2BAA2B;CAEzD;AACF;;;;;;;;;;;AA2BD,eAAsB,mBAWpBC,UAcAN,kBACAC,QACAC,KAM0D;AAC1D,MAAK,SAAS,sBACZ;CAGF,MAAM,WAAW,MAAM,iBAAiB,SAAS,CAC/C,SAAS,qBACV,EAAC;CACF,MAAM,UAAU,SACd,SAAS,sBAAsB;CAIjC,IAAIG,QAAoB;EAAE,IAAI;EAAU,MAAM;CAAU;AACxD,KAAI,SAAS,eACX,KAAI;AACF,UAAQ,MAAM,AACZ,SAAS,eACT;GACA,UAAU,IAAI;GACd,SAAS,IAAI;GACb,QAAQ,IAAI;GACZ,QAAQ,IAAI;GACZ;EACD,EAAC;CACH,SAAQ,OAAO;AACd,SAAO,MAAM,OAAgB,qCAAqC;CACnE;CAGH,MAAM,UAAU,IAAI,eAA6B;EAC/C;EACA;EACA,UAAU;GACR,UAAU,SAAS;GACnB,QAAQ,SAAS;EAClB;CACF;AAED,QAAO;EAAE;EAAS;CAAS;AAC5B;;;;;;;;;;;AAYD,eAAsB,4BAOpBE,cACAC,SACAC,YACY;AAEZ,MAAK,aACH,QAAO,eAAkB;CAG3B,MAAM,EAAE,SAAS,SAAS,GAAG;CAG7B,MAAM,KAAK,QAAQ,eAAe;AAElC,KAAI,GAEF,QAAO,yBACL,IACA,SACA,YAAY;EACV,MAAMC,aAAW,MAAM,QAAQ,QAAQ;AAGvC,MAAI,WACF,OAAM,WAAWA,YAAU,QAAQ;AAIrC,SAAOA;CACR,EACF;CAIH,MAAM,WAAW,MAAM,QAAQ,QAAQ;AAEvC,KAAI,WACF,OAAM,WAAW,UAAU,QAAQ;AAIrC,OAAM,QAAQ,OAAO;AAErB,QAAO;AACR"}
|
package/dist/publisher.d.cts
CHANGED
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import { Construct } from "./Construct-
|
|
1
|
+
import { Construct } from "./Construct-dI_rgdSp.cjs";
|
|
2
2
|
import { EventPublisher, MappedEvent } from "@geekmidas/events";
|
|
3
3
|
import { Logger } from "@geekmidas/logger";
|
|
4
4
|
import { Service, ServiceDiscovery } from "@geekmidas/services";
|
package/dist/publisher.d.mts
CHANGED
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import { Construct } from "./Construct-
|
|
1
|
+
import { Construct } from "./Construct-ZPqE0vhn.mjs";
|
|
2
2
|
import { Service, ServiceDiscovery } from "@geekmidas/services";
|
|
3
3
|
import { EventPublisher, MappedEvent } from "@geekmidas/events";
|
|
4
4
|
import { Logger } from "@geekmidas/logger";
|
|
@@ -1,3 +1,3 @@
|
|
|
1
|
-
const require_AWSLambdaSubscriberAdaptor = require('../AWSLambdaSubscriberAdaptor-
|
|
1
|
+
const require_AWSLambdaSubscriberAdaptor = require('../AWSLambdaSubscriberAdaptor-Dum5bkw3.cjs');
|
|
2
2
|
|
|
3
3
|
exports.AWSLambdaSubscriber = require_AWSLambdaSubscriberAdaptor.AWSLambdaSubscriber;
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import "../Construct-
|
|
2
|
-
import "../Subscriber-
|
|
3
|
-
import { AWSLambdaHandler, AWSLambdaSubscriber } from "../AWSLambdaSubscriberAdaptor-
|
|
1
|
+
import "../Construct-dI_rgdSp.cjs";
|
|
2
|
+
import "../Subscriber-BhzqUzs-.cjs";
|
|
3
|
+
import { AWSLambdaHandler, AWSLambdaSubscriber } from "../AWSLambdaSubscriberAdaptor-Cknp_nn1.cjs";
|
|
4
4
|
export { AWSLambdaHandler, AWSLambdaSubscriber };
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import "../Construct-
|
|
2
|
-
import "../Subscriber-
|
|
3
|
-
import { AWSLambdaHandler, AWSLambdaSubscriber } from "../AWSLambdaSubscriberAdaptor-
|
|
1
|
+
import "../Construct-ZPqE0vhn.mjs";
|
|
2
|
+
import "../Subscriber-s6yfjeOc.mjs";
|
|
3
|
+
import { AWSLambdaHandler, AWSLambdaSubscriber } from "../AWSLambdaSubscriberAdaptor-DpHzp-AM.mjs";
|
|
4
4
|
export { AWSLambdaHandler, AWSLambdaSubscriber };
|
|
@@ -1,3 +1,3 @@
|
|
|
1
|
-
import "../Construct-
|
|
2
|
-
import { Subscriber, SubscriberContext, SubscriberHandler } from "../Subscriber-
|
|
1
|
+
import "../Construct-dI_rgdSp.cjs";
|
|
2
|
+
import { Subscriber, SubscriberContext, SubscriberHandler } from "../Subscriber-BhzqUzs-.cjs";
|
|
3
3
|
export { Subscriber, SubscriberContext, SubscriberHandler };
|
|
@@ -1,3 +1,3 @@
|
|
|
1
|
-
import "../Construct-
|
|
2
|
-
import { Subscriber, SubscriberContext, SubscriberHandler } from "../Subscriber-
|
|
1
|
+
import "../Construct-ZPqE0vhn.mjs";
|
|
2
|
+
import { Subscriber, SubscriberContext, SubscriberHandler } from "../Subscriber-s6yfjeOc.mjs";
|
|
3
3
|
export { Subscriber, SubscriberContext, SubscriberHandler };
|
|
@@ -1,5 +1,5 @@
|
|
|
1
|
-
require('../Construct-
|
|
2
|
-
require('../Subscriber-
|
|
3
|
-
const require_SubscriberBuilder = require('../SubscriberBuilder-
|
|
1
|
+
require('../Construct-BYSPikVm.cjs');
|
|
2
|
+
require('../Subscriber-D-FPWts6.cjs');
|
|
3
|
+
const require_SubscriberBuilder = require('../SubscriberBuilder-BfE2cL1q.cjs');
|
|
4
4
|
|
|
5
5
|
exports.SubscriberBuilder = require_SubscriberBuilder.SubscriberBuilder;
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import "../Construct-
|
|
2
|
-
import "../Subscriber-
|
|
3
|
-
import { SubscriberBuilder } from "../SubscriberBuilder-
|
|
1
|
+
import "../Construct-dI_rgdSp.cjs";
|
|
2
|
+
import "../Subscriber-BhzqUzs-.cjs";
|
|
3
|
+
import { SubscriberBuilder } from "../SubscriberBuilder-BCVkp-ga.cjs";
|
|
4
4
|
export { SubscriberBuilder };
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import "../Construct-
|
|
2
|
-
import "../Subscriber-
|
|
3
|
-
import { SubscriberBuilder } from "../SubscriberBuilder-
|
|
1
|
+
import "../Construct-ZPqE0vhn.mjs";
|
|
2
|
+
import "../Subscriber-s6yfjeOc.mjs";
|
|
3
|
+
import { SubscriberBuilder } from "../SubscriberBuilder-aCua5_wA.mjs";
|
|
4
4
|
export { SubscriberBuilder };
|
|
@@ -1,5 +1,5 @@
|
|
|
1
|
-
import "../Construct-
|
|
2
|
-
import "../Subscriber-
|
|
3
|
-
import { SubscriberBuilder } from "../SubscriberBuilder-
|
|
1
|
+
import "../Construct-LWeB1rSQ.mjs";
|
|
2
|
+
import "../Subscriber-CGb8LjZa.mjs";
|
|
3
|
+
import { SubscriberBuilder } from "../SubscriberBuilder-BcAspHv9.mjs";
|
|
4
4
|
|
|
5
5
|
export { SubscriberBuilder };
|
|
@@ -1,6 +1,6 @@
|
|
|
1
|
-
require('../Construct-
|
|
2
|
-
const require_Subscriber = require('../Subscriber-
|
|
3
|
-
const require_SubscriberBuilder = require('../SubscriberBuilder-
|
|
1
|
+
require('../Construct-BYSPikVm.cjs');
|
|
2
|
+
const require_Subscriber = require('../Subscriber-D-FPWts6.cjs');
|
|
3
|
+
const require_SubscriberBuilder = require('../SubscriberBuilder-BfE2cL1q.cjs');
|
|
4
4
|
|
|
5
5
|
//#region src/subscribers/index.ts
|
|
6
6
|
const s = new require_SubscriberBuilder.SubscriberBuilder();
|
|
@@ -1,10 +1,10 @@
|
|
|
1
|
-
import "../Construct-
|
|
2
|
-
import { Subscriber } from "../Subscriber-
|
|
3
|
-
import { SubscriberBuilder } from "../SubscriberBuilder-
|
|
4
|
-
import * as
|
|
1
|
+
import "../Construct-dI_rgdSp.cjs";
|
|
2
|
+
import { Subscriber } from "../Subscriber-BhzqUzs-.cjs";
|
|
3
|
+
import { SubscriberBuilder } from "../SubscriberBuilder-BCVkp-ga.cjs";
|
|
4
|
+
import * as _geekmidas_logger2 from "@geekmidas/logger";
|
|
5
5
|
|
|
6
6
|
//#region src/subscribers/index.d.ts
|
|
7
|
-
declare const s: SubscriberBuilder<[],
|
|
7
|
+
declare const s: SubscriberBuilder<[], _geekmidas_logger2.Logger, undefined, undefined, string, []>;
|
|
8
8
|
//#endregion
|
|
9
9
|
export { Subscriber, SubscriberBuilder, s };
|
|
10
10
|
//# sourceMappingURL=index.d.cts.map
|
|
@@ -1,10 +1,10 @@
|
|
|
1
|
-
import "../Construct-
|
|
2
|
-
import { Subscriber } from "../Subscriber-
|
|
3
|
-
import { SubscriberBuilder } from "../SubscriberBuilder-
|
|
4
|
-
import * as
|
|
1
|
+
import "../Construct-ZPqE0vhn.mjs";
|
|
2
|
+
import { Subscriber } from "../Subscriber-s6yfjeOc.mjs";
|
|
3
|
+
import { SubscriberBuilder } from "../SubscriberBuilder-aCua5_wA.mjs";
|
|
4
|
+
import * as _geekmidas_logger14 from "@geekmidas/logger";
|
|
5
5
|
|
|
6
6
|
//#region src/subscribers/index.d.ts
|
|
7
|
-
declare const s: SubscriberBuilder<[],
|
|
7
|
+
declare const s: SubscriberBuilder<[], _geekmidas_logger14.Logger, undefined, undefined, string, []>;
|
|
8
8
|
//#endregion
|
|
9
9
|
export { Subscriber, SubscriberBuilder, s };
|
|
10
10
|
//# sourceMappingURL=index.d.mts.map
|
|
@@ -1,6 +1,6 @@
|
|
|
1
|
-
import "../Construct-
|
|
2
|
-
import { Subscriber } from "../Subscriber-
|
|
3
|
-
import { SubscriberBuilder } from "../SubscriberBuilder-
|
|
1
|
+
import "../Construct-LWeB1rSQ.mjs";
|
|
2
|
+
import { Subscriber } from "../Subscriber-CGb8LjZa.mjs";
|
|
3
|
+
import { SubscriberBuilder } from "../SubscriberBuilder-BcAspHv9.mjs";
|
|
4
4
|
|
|
5
5
|
//#region src/subscribers/index.ts
|
|
6
6
|
const s = new SubscriberBuilder();
|
|
@@ -4,4 +4,4 @@ type HttpMethod = 'GET' | 'POST' | 'PUT' | 'PATCH' | 'DELETE' | 'OPTIONS';
|
|
|
4
4
|
type LowerHttpMethod<T extends HttpMethod> = Lowercase<T>;
|
|
5
5
|
//#endregion
|
|
6
6
|
export { HttpMethod, LowerHttpMethod, RemoveUndefined };
|
|
7
|
-
//# sourceMappingURL=types-
|
|
7
|
+
//# sourceMappingURL=types-DKf0juBf.d.mts.map
|