@geekmidas/constructs 0.0.12 → 0.0.14
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +448 -0
- package/dist/AWSLambdaFunction-B-Oxr8qt.d.cts +30 -0
- package/dist/{AWSLambdaFunction-DMxScuaw.cjs → AWSLambdaFunction-C-fuCLA3.cjs} +28 -5
- package/dist/AWSLambdaFunction-C-fuCLA3.cjs.map +1 -0
- package/dist/AWSLambdaFunction-CAm9r5ZX.d.mts +30 -0
- package/dist/{AWSLambdaFunction-cL8A169J.mjs → AWSLambdaFunction-H65WfXLt.mjs} +28 -5
- package/dist/AWSLambdaFunction-H65WfXLt.mjs.map +1 -0
- package/dist/{AmazonApiGatewayEndpointAdaptor-eDQgPNLH.d.mts → AmazonApiGatewayEndpointAdaptor-4hPy5vty.d.mts} +4 -4
- package/dist/{AmazonApiGatewayEndpointAdaptor-CIEhW1TQ.mjs → AmazonApiGatewayEndpointAdaptor-C6Jk5HSy.mjs} +6 -2
- package/dist/AmazonApiGatewayEndpointAdaptor-C6Jk5HSy.mjs.map +1 -0
- package/dist/{AmazonApiGatewayEndpointAdaptor-H8YvtfQm.cjs → AmazonApiGatewayEndpointAdaptor-CI9L7Ucn.cjs} +6 -2
- package/dist/AmazonApiGatewayEndpointAdaptor-CI9L7Ucn.cjs.map +1 -0
- package/dist/{AmazonApiGatewayEndpointAdaptor-CwItKPz2.d.cts → AmazonApiGatewayEndpointAdaptor-ro0RMLzr.d.cts} +4 -4
- package/dist/{AmazonApiGatewayV1EndpointAdaptor-B36zXLJ7.mjs → AmazonApiGatewayV1EndpointAdaptor-BMy8DdNJ.mjs} +2 -2
- package/dist/{AmazonApiGatewayV1EndpointAdaptor-B36zXLJ7.mjs.map → AmazonApiGatewayV1EndpointAdaptor-BMy8DdNJ.mjs.map} +1 -1
- package/dist/{AmazonApiGatewayV1EndpointAdaptor-DaCvUL6y.d.cts → AmazonApiGatewayV1EndpointAdaptor-BWJWKqQT.d.cts} +3 -3
- package/dist/{AmazonApiGatewayV1EndpointAdaptor-0n71d3gq.cjs → AmazonApiGatewayV1EndpointAdaptor-DYL1bCBS.cjs} +2 -2
- package/dist/{AmazonApiGatewayV1EndpointAdaptor-0n71d3gq.cjs.map → AmazonApiGatewayV1EndpointAdaptor-DYL1bCBS.cjs.map} +1 -1
- package/dist/{AmazonApiGatewayV1EndpointAdaptor-CnGVpA38.d.mts → AmazonApiGatewayV1EndpointAdaptor-hyR-WwyP.d.mts} +3 -3
- package/dist/{AmazonApiGatewayV2EndpointAdaptor-CE3wZEb8.mjs → AmazonApiGatewayV2EndpointAdaptor-BU5wQMOe.mjs} +2 -2
- package/dist/{AmazonApiGatewayV2EndpointAdaptor-CE3wZEb8.mjs.map → AmazonApiGatewayV2EndpointAdaptor-BU5wQMOe.mjs.map} +1 -1
- package/dist/{AmazonApiGatewayV2EndpointAdaptor-DtU3Cb8F.cjs → AmazonApiGatewayV2EndpointAdaptor-CPLCMeaN.cjs} +2 -2
- package/dist/{AmazonApiGatewayV2EndpointAdaptor-DtU3Cb8F.cjs.map → AmazonApiGatewayV2EndpointAdaptor-CPLCMeaN.cjs.map} +1 -1
- package/dist/{AmazonApiGatewayV2EndpointAdaptor-DA1PH0nc.d.cts → AmazonApiGatewayV2EndpointAdaptor-D1Irdggp.d.cts} +3 -3
- package/dist/{AmazonApiGatewayV2EndpointAdaptor-BELz2ijs.d.mts → AmazonApiGatewayV2EndpointAdaptor-DX3SuI5S.d.mts} +3 -3
- package/dist/{Authorizer-BRCVPz_O.d.mts → Authorizer-BTmly8ps.d.cts} +1 -1
- package/dist/{Authorizer-DG54w1m2.d.cts → Authorizer-pmPvIVgv.d.mts} +1 -1
- package/dist/{BaseFunctionBuilder-CT7p10K1.mjs → BaseFunctionBuilder-B5gkW0Kt.mjs} +10 -1
- package/dist/BaseFunctionBuilder-B5gkW0Kt.mjs.map +1 -0
- package/dist/{BaseFunctionBuilder-B8rT07QR.cjs → BaseFunctionBuilder-C5Se7pdL.cjs} +10 -1
- package/dist/BaseFunctionBuilder-C5Se7pdL.cjs.map +1 -0
- package/dist/{BaseFunctionBuilder-DilipY1y.d.mts → BaseFunctionBuilder-CbDnPZpD.d.mts} +10 -4
- package/dist/{BaseFunctionBuilder-Cf0op65o.d.cts → BaseFunctionBuilder-DUZMbEr3.d.cts} +10 -4
- package/dist/{Cron-Bnd-2pgE.cjs → Cron-Bi3QOge_.cjs} +4 -4
- package/dist/Cron-Bi3QOge_.cjs.map +1 -0
- package/dist/{Cron-6lOgKqSA.d.cts → Cron-COdfP0Jd.d.cts} +4 -4
- package/dist/{Cron-BH_07atD.d.mts → Cron-D8cn_ahj.d.mts} +4 -4
- package/dist/{Cron-DNRjf2cp.mjs → Cron-Dy_HW2Vv.mjs} +4 -4
- package/dist/Cron-Dy_HW2Vv.mjs.map +1 -0
- package/dist/{CronBuilder-DdR2TuQa.mjs → CronBuilder-Bl3A2Zp4.mjs} +13 -4
- package/dist/CronBuilder-Bl3A2Zp4.mjs.map +1 -0
- package/dist/{CronBuilder-5oK2AL2n.d.cts → CronBuilder-DntF6H3A.d.cts} +17 -12
- package/dist/{CronBuilder-D2b4zY4l.d.mts → CronBuilder-DoMnSs_0.d.mts} +17 -12
- package/dist/{CronBuilder-dtw4ZyH6.cjs → CronBuilder-Dv_w7Yri.cjs} +13 -4
- package/dist/CronBuilder-Dv_w7Yri.cjs.map +1 -0
- package/dist/{Endpoint-DuZlyjd4.d.mts → Endpoint-Bbs_sFvg.d.mts} +49 -20
- package/dist/{Endpoint-Cs-MsYlY.d.cts → Endpoint-Bu8Phz6y.d.cts} +49 -20
- package/dist/{Endpoint-B9PryZES.cjs → Endpoint-DDpF7NO1.cjs} +11 -6
- package/dist/Endpoint-DDpF7NO1.cjs.map +1 -0
- package/dist/{Endpoint-B69TqESg.mjs → Endpoint-S6Yh2_PN.mjs} +11 -6
- package/dist/Endpoint-S6Yh2_PN.mjs.map +1 -0
- package/dist/{EndpointBuilder-C-PHInEW.d.cts → EndpointBuilder-CPxmF_w7.d.cts} +30 -13
- package/dist/{EndpointBuilder-BrB-K1jO.d.mts → EndpointBuilder-Csfyfjd7.d.mts} +30 -13
- package/dist/{EndpointBuilder-DofwCnWJ.cjs → EndpointBuilder-DpGmObMb.cjs} +25 -4
- package/dist/EndpointBuilder-DpGmObMb.cjs.map +1 -0
- package/dist/{EndpointBuilder-DnVL-EU_.mjs → EndpointBuilder-aE2E6WTx.mjs} +25 -4
- package/dist/EndpointBuilder-aE2E6WTx.mjs.map +1 -0
- package/dist/{EndpointFactory-6zNpVSYp.d.mts → EndpointFactory-BU_R-9LH.d.mts} +10 -10
- package/dist/{EndpointFactory-Ba9mx9MU.cjs → EndpointFactory-BfH6mjJ3.cjs} +2 -2
- package/dist/EndpointFactory-BfH6mjJ3.cjs.map +1 -0
- package/dist/{EndpointFactory-e5WYVR6t.d.cts → EndpointFactory-D0Ql2Ofm.d.cts} +11 -11
- package/dist/{EndpointFactory-pPaIGFHV.mjs → EndpointFactory-D4leYk1N.mjs} +2 -2
- package/dist/EndpointFactory-D4leYk1N.mjs.map +1 -0
- package/dist/{Function-CO-s2pB8.cjs → Function-DagDbeXo.cjs} +3 -2
- package/dist/Function-DagDbeXo.cjs.map +1 -0
- package/dist/{Function-COnc-tWM.mjs → Function-DfKsM5Kx.mjs} +3 -2
- package/dist/Function-DfKsM5Kx.mjs.map +1 -0
- package/dist/{Function-G3JPHMaY.d.mts → Function-V9M9UVHp.d.mts} +24 -7
- package/dist/{Function-6EWabl_X.d.cts → Function-VI1TB3Mh.d.cts} +24 -7
- package/dist/{FunctionBuilder-CMhLQ4dt.mjs → FunctionBuilder-CVT7bG2o.mjs} +20 -4
- package/dist/FunctionBuilder-CVT7bG2o.mjs.map +1 -0
- package/dist/{FunctionBuilder-B3fpp3hA.d.cts → FunctionBuilder-CjVEFTYC.d.cts} +22 -12
- package/dist/{FunctionBuilder-ByaB_LQ4.d.mts → FunctionBuilder-D1ofSeMd.d.mts} +22 -12
- package/dist/{FunctionBuilder-_hMwZUof.cjs → FunctionBuilder-DXvG_XD-.cjs} +20 -4
- package/dist/FunctionBuilder-DXvG_XD-.cjs.map +1 -0
- package/dist/FunctionExecutionWrapper-Bubnr0zA.mjs +101 -0
- package/dist/FunctionExecutionWrapper-Bubnr0zA.mjs.map +1 -0
- package/dist/FunctionExecutionWrapper-CwtwYozd.d.cts +48 -0
- package/dist/FunctionExecutionWrapper-DkNycmOh.cjs +107 -0
- package/dist/FunctionExecutionWrapper-DkNycmOh.cjs.map +1 -0
- package/dist/FunctionExecutionWrapper-rhbIYT0Q.d.mts +48 -0
- package/dist/{HonoEndpointAdaptor-Cw2if5cG.cjs → HonoEndpointAdaptor-CfLRHHFw.cjs} +8 -4
- package/dist/HonoEndpointAdaptor-CfLRHHFw.cjs.map +1 -0
- package/dist/{HonoEndpointAdaptor-BElil8O5.d.mts → HonoEndpointAdaptor-DANYfDu9.d.mts} +7 -7
- package/dist/{HonoEndpointAdaptor-DAfnTFVS.mjs → HonoEndpointAdaptor-DuyE06nH.mjs} +8 -4
- package/dist/HonoEndpointAdaptor-DuyE06nH.mjs.map +1 -0
- package/dist/{HonoEndpointAdaptor-DSHl8ZCY.d.cts → HonoEndpointAdaptor-_uLz8Bak.d.cts} +7 -7
- package/dist/{Subscriber-D-FPWts6.cjs → Subscriber-Bdh8rMSL.cjs} +1 -1
- package/dist/{Subscriber-D-FPWts6.cjs.map → Subscriber-Bdh8rMSL.cjs.map} +1 -1
- package/dist/{Subscriber-CGb8LjZa.mjs → Subscriber-CJOWwaw1.mjs} +1 -1
- package/dist/{Subscriber-CGb8LjZa.mjs.map → Subscriber-CJOWwaw1.mjs.map} +1 -1
- package/dist/{SubscriberBuilder-BcAspHv9.mjs → SubscriberBuilder-BWQmiYd8.mjs} +2 -2
- package/dist/{SubscriberBuilder-BcAspHv9.mjs.map → SubscriberBuilder-BWQmiYd8.mjs.map} +1 -1
- package/dist/{SubscriberBuilder-BfE2cL1q.cjs → SubscriberBuilder-DieD_60p.cjs} +2 -2
- package/dist/{SubscriberBuilder-BfE2cL1q.cjs.map → SubscriberBuilder-DieD_60p.cjs.map} +1 -1
- package/dist/{TestEndpointAdaptor-DubQOJk_.mjs → TestEndpointAdaptor-BEyZa0Yg.mjs} +7 -3
- package/dist/TestEndpointAdaptor-BEyZa0Yg.mjs.map +1 -0
- package/dist/{TestEndpointAdaptor-Bn1WRFph.cjs → TestEndpointAdaptor-C8425RJ0.cjs} +7 -3
- package/dist/TestEndpointAdaptor-C8425RJ0.cjs.map +1 -0
- package/dist/{TestEndpointAdaptor-o-xtSyQ3.d.cts → TestEndpointAdaptor-H5To8PH7.d.cts} +2 -2
- package/dist/{TestEndpointAdaptor-DnlAA_rm.d.mts → TestEndpointAdaptor-jxn68ayg.d.mts} +2 -2
- package/dist/adaptors/aws.cjs +10 -10
- package/dist/adaptors/aws.d.cts +11 -11
- package/dist/adaptors/aws.d.mts +11 -11
- package/dist/adaptors/aws.mjs +10 -10
- package/dist/adaptors/hono.cjs +7 -7
- package/dist/adaptors/hono.d.cts +7 -7
- package/dist/adaptors/hono.d.mts +7 -7
- package/dist/adaptors/hono.mjs +7 -7
- package/dist/adaptors/testing.cjs +6 -6
- package/dist/adaptors/testing.d.cts +7 -7
- package/dist/adaptors/testing.d.mts +7 -7
- package/dist/adaptors/testing.mjs +6 -6
- package/dist/crons/Cron.cjs +5 -5
- package/dist/crons/Cron.d.cts +5 -5
- package/dist/crons/Cron.d.mts +5 -5
- package/dist/crons/Cron.mjs +5 -5
- package/dist/crons/CronBuilder.cjs +6 -6
- package/dist/crons/CronBuilder.d.cts +6 -6
- package/dist/crons/CronBuilder.d.mts +6 -6
- package/dist/crons/CronBuilder.mjs +6 -6
- package/dist/crons/index.cjs +6 -6
- package/dist/crons/index.d.cts +10 -10
- package/dist/crons/index.d.mts +10 -10
- package/dist/crons/index.mjs +6 -6
- package/dist/endpoints/AmazonApiGatewayEndpointAdaptor.cjs +6 -6
- package/dist/endpoints/AmazonApiGatewayEndpointAdaptor.d.cts +7 -7
- package/dist/endpoints/AmazonApiGatewayEndpointAdaptor.d.mts +7 -7
- package/dist/endpoints/AmazonApiGatewayEndpointAdaptor.mjs +6 -6
- package/dist/endpoints/AmazonApiGatewayV1EndpointAdaptor.cjs +7 -7
- package/dist/endpoints/AmazonApiGatewayV1EndpointAdaptor.d.cts +8 -8
- package/dist/endpoints/AmazonApiGatewayV1EndpointAdaptor.d.mts +8 -8
- package/dist/endpoints/AmazonApiGatewayV1EndpointAdaptor.mjs +7 -7
- package/dist/endpoints/AmazonApiGatewayV2EndpointAdaptor.cjs +7 -7
- package/dist/endpoints/AmazonApiGatewayV2EndpointAdaptor.d.cts +8 -8
- package/dist/endpoints/AmazonApiGatewayV2EndpointAdaptor.d.mts +8 -8
- package/dist/endpoints/AmazonApiGatewayV2EndpointAdaptor.mjs +7 -7
- package/dist/endpoints/Authorizer.d.cts +1 -1
- package/dist/endpoints/Authorizer.d.mts +1 -1
- package/dist/endpoints/Endpoint.cjs +5 -5
- package/dist/endpoints/Endpoint.d.cts +6 -6
- package/dist/endpoints/Endpoint.d.mts +6 -6
- package/dist/endpoints/Endpoint.mjs +5 -5
- package/dist/endpoints/EndpointBuilder.cjs +6 -6
- package/dist/endpoints/EndpointBuilder.d.cts +7 -7
- package/dist/endpoints/EndpointBuilder.d.mts +7 -7
- package/dist/endpoints/EndpointBuilder.mjs +6 -6
- package/dist/endpoints/EndpointFactory.cjs +7 -7
- package/dist/endpoints/EndpointFactory.d.cts +8 -8
- package/dist/endpoints/EndpointFactory.d.mts +8 -8
- package/dist/endpoints/EndpointFactory.mjs +7 -7
- package/dist/endpoints/HonoEndpointAdaptor.cjs +7 -7
- package/dist/endpoints/HonoEndpointAdaptor.d.cts +7 -7
- package/dist/endpoints/HonoEndpointAdaptor.d.mts +7 -7
- package/dist/endpoints/HonoEndpointAdaptor.mjs +7 -7
- package/dist/endpoints/TestEndpointAdaptor.cjs +6 -6
- package/dist/endpoints/TestEndpointAdaptor.d.cts +7 -7
- package/dist/endpoints/TestEndpointAdaptor.d.mts +7 -7
- package/dist/endpoints/TestEndpointAdaptor.mjs +6 -6
- package/dist/endpoints/audit.d.cts +6 -6
- package/dist/endpoints/audit.d.mts +6 -6
- package/dist/endpoints/helpers.cjs +6 -6
- package/dist/endpoints/helpers.d.cts +6 -6
- package/dist/endpoints/helpers.d.mts +6 -6
- package/dist/endpoints/helpers.mjs +6 -6
- package/dist/endpoints/index.cjs +7 -7
- package/dist/endpoints/index.d.cts +10 -10
- package/dist/endpoints/index.d.mts +10 -10
- package/dist/endpoints/index.mjs +7 -7
- package/dist/endpoints/processAudits.d.cts +7 -7
- package/dist/endpoints/processAudits.d.mts +7 -7
- package/dist/functions/AWSLambdaFunction.cjs +5 -5
- package/dist/functions/AWSLambdaFunction.d.cts +3 -3
- package/dist/functions/AWSLambdaFunction.d.mts +3 -3
- package/dist/functions/AWSLambdaFunction.mjs +5 -5
- package/dist/functions/BaseFunctionBuilder.cjs +1 -1
- package/dist/functions/BaseFunctionBuilder.d.cts +1 -1
- package/dist/functions/BaseFunctionBuilder.d.mts +1 -1
- package/dist/functions/BaseFunctionBuilder.mjs +1 -1
- package/dist/functions/Function.cjs +1 -1
- package/dist/functions/Function.d.cts +1 -1
- package/dist/functions/Function.d.mts +1 -1
- package/dist/functions/Function.mjs +1 -1
- package/dist/functions/FunctionBuilder.cjs +3 -3
- package/dist/functions/FunctionBuilder.d.cts +3 -3
- package/dist/functions/FunctionBuilder.d.mts +3 -3
- package/dist/functions/FunctionBuilder.mjs +3 -3
- package/dist/functions/FunctionExecutionWrapper.cjs +4 -4
- package/dist/functions/FunctionExecutionWrapper.d.cts +2 -2
- package/dist/functions/FunctionExecutionWrapper.d.mts +2 -2
- package/dist/functions/FunctionExecutionWrapper.mjs +4 -4
- package/dist/functions/TestFunctionAdaptor.cjs +37 -4
- package/dist/functions/TestFunctionAdaptor.cjs.map +1 -1
- package/dist/functions/TestFunctionAdaptor.d.cts +9 -6
- package/dist/functions/TestFunctionAdaptor.d.mts +9 -6
- package/dist/functions/TestFunctionAdaptor.mjs +37 -4
- package/dist/functions/TestFunctionAdaptor.mjs.map +1 -1
- package/dist/functions/index.cjs +4 -4
- package/dist/functions/index.d.cts +4 -4
- package/dist/functions/index.d.mts +4 -4
- package/dist/functions/index.mjs +4 -4
- package/dist/{functions-D03lqK-r.cjs → functions-FCb-wWFC.cjs} +2 -2
- package/dist/{functions-D03lqK-r.cjs.map → functions-FCb-wWFC.cjs.map} +1 -1
- package/dist/functions-JhRsNoAZ.mjs +8 -0
- package/dist/{functions-BYqZAob8.mjs.map → functions-JhRsNoAZ.mjs.map} +1 -1
- package/dist/{helpers-BPDogwac.mjs → helpers-2CLKTnRm.mjs} +2 -2
- package/dist/{helpers-BPDogwac.mjs.map → helpers-2CLKTnRm.mjs.map} +1 -1
- package/dist/{helpers-BApRyhly.cjs → helpers-Khuhi_Qx.cjs} +2 -2
- package/dist/{helpers-BApRyhly.cjs.map → helpers-Khuhi_Qx.cjs.map} +1 -1
- package/dist/{index-CUg_hSq-.d.cts → index-DRf5AP3P.d.mts} +4 -3
- package/dist/index-twsdbZWU.d.cts +10 -0
- package/dist/processAudits-BFokHhCO.cjs.map +1 -1
- package/dist/processAudits-DfcB-X-4.mjs.map +1 -1
- package/dist/publisher-Bw4770Hi.mjs.map +1 -1
- package/dist/publisher-lFQleddL.cjs.map +1 -1
- package/dist/publisher.d.cts +2 -1
- package/dist/publisher.d.mts +2 -1
- package/dist/subscribers/Subscriber.cjs +1 -1
- package/dist/subscribers/Subscriber.mjs +1 -1
- package/dist/subscribers/SubscriberBuilder.cjs +2 -2
- package/dist/subscribers/SubscriberBuilder.mjs +2 -2
- package/dist/subscribers/index.cjs +2 -2
- package/dist/subscribers/index.d.cts +2 -2
- package/dist/subscribers/index.d.mts +2 -2
- package/dist/subscribers/index.mjs +2 -2
- package/package.json +7 -7
- package/src/crons/Cron.ts +12 -3
- package/src/crons/CronBuilder.ts +85 -13
- package/src/crons/__tests__/CronBuilder.state-isolation.spec.ts +2 -2
- package/src/endpoints/AmazonApiGatewayEndpointAdaptor.ts +29 -6
- package/src/endpoints/Endpoint.ts +156 -40
- package/src/endpoints/EndpointBuilder.ts +123 -17
- package/src/endpoints/EndpointFactory.ts +5 -1
- package/src/endpoints/HonoEndpointAdaptor.ts +35 -5
- package/src/endpoints/TestEndpointAdaptor.ts +22 -2
- package/src/endpoints/__tests__/AmazonApiGatewayV1EndpointAdaptor.spec.ts +1 -1
- package/src/endpoints/__tests__/AmazonApiGatewayV2EndpointAdaptor.audits.spec.ts +2 -2
- package/src/endpoints/__tests__/AmazonApiGatewayV2EndpointAdaptor.events.spec.ts +9 -9
- package/src/endpoints/__tests__/AmazonApiGatewayV2EndpointAdaptor.kysely-audit.integration.spec.ts +79 -40
- package/src/endpoints/__tests__/Endpoint.cookies.spec.ts +3 -1
- package/src/endpoints/__tests__/Endpoint.manifest.spec.ts +1 -1
- package/src/endpoints/__tests__/EndpointBuilder.audit.spec.ts +35 -11
- package/src/endpoints/__tests__/EndpointFactory.authorizers.spec.ts +51 -14
- package/src/endpoints/__tests__/EndpointFactory.reference-audit.spec.ts +8 -8
- package/src/endpoints/__tests__/EndpointFactory.state-isolation.spec.ts +11 -11
- package/src/endpoints/__tests__/HonoEndpointAdaptor.audit-transactions.spec.ts +44 -26
- package/src/endpoints/__tests__/HonoEndpointAdaptor.audits.spec.ts +10 -10
- package/src/endpoints/__tests__/HonoEndpointAdaptor.events.spec.ts +8 -8
- package/src/endpoints/__tests__/HonoEndpointAdaptor.kysely-audit.integration.spec.ts +446 -61
- package/src/endpoints/__tests__/HonoEndpointAdaptor.openapi.spec.ts +4 -4
- package/src/endpoints/audit.ts +1 -1
- package/src/endpoints/processAudits.ts +32 -23
- package/src/functions/AWSLambdaFunction.ts +125 -12
- package/src/functions/BaseFunctionBuilder.ts +51 -3
- package/src/functions/Function.ts +73 -9
- package/src/functions/FunctionBuilder.ts +153 -17
- package/src/functions/FunctionExecutionWrapper.ts +133 -2
- package/src/functions/TestFunctionAdaptor.ts +94 -8
- package/src/functions/__tests__/AWSLambdaFunctionAdaptor.spec.ts +82 -0
- package/src/functions/__tests__/Function.audits.spec.ts +393 -0
- package/src/functions/__tests__/Function.spec.ts +76 -0
- package/src/functions/__tests__/FunctionBuilder.state-isolation.spec.ts +11 -5
- package/src/publisher.ts +12 -1
- package/dist/AWSLambdaFunction-DMxScuaw.cjs.map +0 -1
- package/dist/AWSLambdaFunction-DSB2oaFG.d.mts +0 -27
- package/dist/AWSLambdaFunction-cL8A169J.mjs.map +0 -1
- package/dist/AWSLambdaFunction-t6q2o8EL.d.cts +0 -27
- package/dist/AmazonApiGatewayEndpointAdaptor-CIEhW1TQ.mjs.map +0 -1
- package/dist/AmazonApiGatewayEndpointAdaptor-H8YvtfQm.cjs.map +0 -1
- package/dist/BaseFunctionBuilder-B8rT07QR.cjs.map +0 -1
- package/dist/BaseFunctionBuilder-CT7p10K1.mjs.map +0 -1
- package/dist/Cron-Bnd-2pgE.cjs.map +0 -1
- package/dist/Cron-DNRjf2cp.mjs.map +0 -1
- package/dist/CronBuilder-DdR2TuQa.mjs.map +0 -1
- package/dist/CronBuilder-dtw4ZyH6.cjs.map +0 -1
- package/dist/Endpoint-B69TqESg.mjs.map +0 -1
- package/dist/Endpoint-B9PryZES.cjs.map +0 -1
- package/dist/EndpointBuilder-DnVL-EU_.mjs.map +0 -1
- package/dist/EndpointBuilder-DofwCnWJ.cjs.map +0 -1
- package/dist/EndpointFactory-Ba9mx9MU.cjs.map +0 -1
- package/dist/EndpointFactory-pPaIGFHV.mjs.map +0 -1
- package/dist/Function-CO-s2pB8.cjs.map +0 -1
- package/dist/Function-COnc-tWM.mjs.map +0 -1
- package/dist/FunctionBuilder-CMhLQ4dt.mjs.map +0 -1
- package/dist/FunctionBuilder-_hMwZUof.cjs.map +0 -1
- package/dist/FunctionExecutionWrapper-Ci-ookJG.d.cts +0 -24
- package/dist/FunctionExecutionWrapper-DHFMLrOl.d.mts +0 -24
- package/dist/FunctionExecutionWrapper-i9v5L3Av.mjs +0 -36
- package/dist/FunctionExecutionWrapper-i9v5L3Av.mjs.map +0 -1
- package/dist/FunctionExecutionWrapper-sxJNTpuc.cjs +0 -42
- package/dist/FunctionExecutionWrapper-sxJNTpuc.cjs.map +0 -1
- package/dist/HonoEndpointAdaptor-Cw2if5cG.cjs.map +0 -1
- package/dist/HonoEndpointAdaptor-DAfnTFVS.mjs.map +0 -1
- package/dist/TestEndpointAdaptor-Bn1WRFph.cjs.map +0 -1
- package/dist/TestEndpointAdaptor-DubQOJk_.mjs.map +0 -1
- package/dist/functions-BYqZAob8.mjs +0 -8
- package/dist/index-D-a7e2gv.d.mts +0 -9
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"processAudits-DfcB-X-4.mjs","names":["endpoint: Endpoint<\n any,\n any,\n any,\n OutSchema,\n TServices,\n TLogger,\n TSession,\n any,\n any,\n TAuditStorage,\n TAuditStorageServiceName,\n TAuditAction\n >","response: InferStandardSchema<OutSchema>","serviceDiscovery: ServiceDiscovery<any, any>","logger: TLogger","ctx: {\n session: TSession;\n header: HeaderFn;\n cookie: CookieFn;\n services: Record<string, unknown>;\n }","existingAuditor?: Auditor<TAuditAction>","auditor: Auditor<TAuditAction>","actor: AuditActor","endpoint: Endpoint<\n any,\n any,\n any,\n any,\n TServices,\n TLogger,\n TSession,\n any,\n any,\n TAuditStorage,\n TAuditStorageServiceName,\n TAuditAction\n >","auditContext: AuditExecutionContext<TAuditAction> | undefined","handler: (auditor?: Auditor<TAuditAction>) => Promise<T>","onComplete?: (response: T, auditor: Auditor<TAuditAction>) => Promise<void>","response"],"sources":["../src/endpoints/processAudits.ts"],"sourcesContent":["import type {\n AuditableAction,\n AuditActor,\n Auditor,\n AuditStorage,\n} from '@geekmidas/audit';\nimport { DefaultAuditor } from '@geekmidas/audit';\nimport { withAuditableTransaction } from '@geekmidas/audit/kysely';\nimport type { Logger } from '@geekmidas/logger';\nimport type { InferStandardSchema } from '@geekmidas/schema';\nimport type { Service, ServiceDiscovery } from '@geekmidas/services';\nimport type { StandardSchemaV1 } from '@standard-schema/spec';\nimport type { Endpoint, CookieFn, HeaderFn } from './Endpoint';\nimport type { ActorExtractor, MappedAudit } from './audit';\n\n/**\n * Process declarative audit definitions after successful endpoint execution.\n * Similar to publishConstructEvents for events.\n *\n * @param endpoint - The endpoint with audit configuration\n * @param response - The handler response to generate audit payloads from\n * @param serviceDiscovery - Service discovery for registering audit storage\n * @param logger - Logger for debug/error messages\n * @param ctx - Request context (session, headers, cookies, services)\n * @param existingAuditor - Optional existing auditor instance (e.g., from handler context).\n * If provided, uses this auditor (with its stored transaction).\n * If not provided, creates a new auditor.\n */\nexport async function processEndpointAudits<\n TServices extends Service[] = [],\n TSession = unknown,\n TLogger extends Logger = Logger,\n OutSchema extends StandardSchemaV1 | undefined = undefined,\n TAuditStorage extends AuditStorage | undefined = undefined,\n TAuditStorageServiceName extends string = string,\n TAuditAction extends AuditableAction<string, unknown> = AuditableAction<\n string,\n unknown\n >,\n>(\n endpoint: Endpoint<\n any,\n any,\n any,\n OutSchema,\n TServices,\n TLogger,\n TSession,\n any,\n any,\n TAuditStorage,\n TAuditStorageServiceName,\n TAuditAction\n >,\n response: InferStandardSchema<OutSchema>,\n serviceDiscovery: ServiceDiscovery<any, any>,\n logger: TLogger,\n ctx: {\n session: TSession;\n header: HeaderFn;\n cookie: CookieFn;\n services: Record<string, unknown>;\n },\n existingAuditor?: Auditor<TAuditAction>,\n): Promise<void> {\n try {\n const audits = endpoint.audits as MappedAudit<TAuditAction, OutSchema>[];\n\n // If we have an existing auditor (from handler context), we need to flush\n // any manual audits it collected, even if there are no declarative audits\n const hasExistingRecords = existingAuditor && existingAuditor.getRecords().length > 0;\n\n // Skip if no declarative audits and no existing records to flush\n if (!audits?.length && !hasExistingRecords) {\n logger.debug('No audits to process');\n return;\n }\n\n // If no auditor storage service and we have things to process, warn\n if (!endpoint.auditorStorageService) {\n if (hasExistingRecords || audits?.length) {\n logger.warn('No auditor storage service available');\n }\n return;\n }\n\n // Get or create auditor\n let auditor: Auditor<TAuditAction>;\n\n if (existingAuditor) {\n // Use existing auditor (preserves stored transaction and manual audits)\n auditor = existingAuditor;\n logger.debug('Using existing auditor from handler context');\n } else {\n // Create new auditor (backward compatibility)\n const services = await serviceDiscovery.register([\n endpoint.auditorStorageService,\n ]);\n const storage = services[\n endpoint.auditorStorageService.serviceName\n ] as AuditStorage;\n\n // Extract actor if configured\n let actor: AuditActor = { id: 'system', type: 'system' };\n if (endpoint.actorExtractor) {\n try {\n actor = await (\n endpoint.actorExtractor as ActorExtractor<TServices, TSession, TLogger>\n )({\n services: ctx.services as any,\n session: ctx.session,\n header: ctx.header,\n cookie: ctx.cookie,\n logger,\n });\n } catch (error) {\n logger.error(error as Error, 'Failed to extract actor for audits');\n // Continue with system actor\n }\n }\n\n auditor = new DefaultAuditor<TAuditAction>({\n actor,\n storage,\n metadata: {\n endpoint: endpoint.route,\n method: endpoint.method,\n },\n });\n }\n\n // Process each declarative audit\n if (audits?.length) {\n for (const audit of audits) {\n logger.debug({ audit: audit.type }, 'Processing declarative audit');\n\n // Check when condition\n if (audit.when && !audit.when(response as any)) {\n logger.debug({ audit: audit.type }, 'Audit skipped due to when condition');\n continue;\n }\n\n // Extract payload\n const payload = audit.payload(response as any);\n\n // Extract entityId if configured\n const entityId = audit.entityId?.(response as any);\n\n // Record the audit\n auditor.audit(audit.type as any, payload as any, {\n table: audit.table,\n entityId,\n });\n }\n }\n\n // Flush audits to storage\n // Note: If existingAuditor has a stored transaction (via setTransaction),\n // flush() will use it automatically\n const recordCount = auditor.getRecords().length;\n if (recordCount > 0) {\n // Check if auditor has a stored transaction (for logging purposes)\n const trx = 'getTransaction' in auditor\n ? (auditor as { getTransaction(): unknown }).getTransaction()\n : undefined;\n logger.debug(\n { auditCount: recordCount, hasTransaction: !!trx },\n 'Flushing audits',\n );\n await auditor.flush();\n }\n } catch (error) {\n logger.error(error as Error, 'Failed to process audits');\n // Don't rethrow - audit failures shouldn't fail the request\n }\n}\n\n/**\n * Context for audit-aware handler execution.\n */\nexport interface AuditExecutionContext<\n TAuditAction extends AuditableAction<string, unknown> = AuditableAction<\n string,\n unknown\n >,\n> {\n /** The auditor instance for recording audits */\n auditor: Auditor<TAuditAction>;\n /** The audit storage instance */\n storage: AuditStorage;\n}\n\n/**\n * Create audit context for handler execution.\n * Returns the auditor and storage for use in the handler.\n *\n * @param endpoint - The endpoint with audit configuration\n * @param serviceDiscovery - Service discovery for getting audit storage\n * @param logger - Logger for debug/error messages\n * @param ctx - Request context for actor extraction\n * @returns Audit context with auditor and storage, or undefined if not configured\n */\nexport async function createAuditContext<\n TServices extends Service[] = [],\n TSession = unknown,\n TLogger extends Logger = Logger,\n TAuditStorage extends AuditStorage | undefined = undefined,\n TAuditStorageServiceName extends string = string,\n TAuditAction extends AuditableAction<string, unknown> = AuditableAction<\n string,\n unknown\n >,\n>(\n endpoint: Endpoint<\n any,\n any,\n any,\n any,\n TServices,\n TLogger,\n TSession,\n any,\n any,\n TAuditStorage,\n TAuditStorageServiceName,\n TAuditAction\n >,\n serviceDiscovery: ServiceDiscovery<any, any>,\n logger: TLogger,\n ctx: {\n session: TSession;\n header: HeaderFn;\n cookie: CookieFn;\n services: Record<string, unknown>;\n },\n): Promise<AuditExecutionContext<TAuditAction> | undefined> {\n if (!endpoint.auditorStorageService) {\n return undefined;\n }\n\n const services = await serviceDiscovery.register([\n endpoint.auditorStorageService,\n ]);\n const storage = services[\n endpoint.auditorStorageService.serviceName\n ] as AuditStorage;\n\n // Extract actor if configured\n let actor: AuditActor = { id: 'system', type: 'system' };\n if (endpoint.actorExtractor) {\n try {\n actor = await (\n endpoint.actorExtractor as ActorExtractor<TServices, TSession, TLogger>\n )({\n services: ctx.services as any,\n session: ctx.session,\n header: ctx.header,\n cookie: ctx.cookie,\n logger,\n });\n } catch (error) {\n logger.error(error as Error, 'Failed to extract actor for audits');\n }\n }\n\n const auditor = new DefaultAuditor<TAuditAction>({\n actor,\n storage,\n metadata: {\n endpoint: endpoint.route,\n method: endpoint.method,\n },\n });\n\n return { auditor, storage };\n}\n\n/**\n * Execute a handler with automatic audit transaction support.\n * If the audit storage has a database (via getDatabase()), wraps execution\n * in a transaction so audits are atomic with handler's database operations.\n *\n * @param auditContext - The audit context from createAuditContext\n * @param handler - The handler function to execute (receives auditor)\n * @param onComplete - Called after handler with response, to process declarative audits\n * @returns The handler result\n */\nexport async function executeWithAuditTransaction<\n T,\n TAuditAction extends AuditableAction<string, unknown> = AuditableAction<\n string,\n unknown\n >,\n>(\n auditContext: AuditExecutionContext<TAuditAction> | undefined,\n handler: (auditor?: Auditor<TAuditAction>) => Promise<T>,\n onComplete?: (response: T, auditor: Auditor<TAuditAction>) => Promise<void>,\n): Promise<T> {\n // No audit context - just run handler\n if (!auditContext) {\n return handler(undefined);\n }\n\n const { auditor, storage } = auditContext;\n\n // Check if storage has a database for transactional execution\n const db = storage.getDatabase?.();\n\n if (db) {\n // Wrap in transaction - audits are atomic with handler operations\n return withAuditableTransaction(\n db as any,\n auditor as any,\n async () => {\n const response = await handler(auditor);\n\n // Process declarative audits within the transaction\n if (onComplete) {\n await onComplete(response, auditor);\n }\n\n // Audits are flushed by withAuditableTransaction before commit\n return response;\n },\n );\n }\n\n // No database - run handler and flush audits after\n const response = await handler(auditor);\n\n if (onComplete) {\n await onComplete(response, auditor);\n }\n\n // Flush audits (no transaction)\n await auditor.flush();\n\n return response;\n}\n"],"mappings":";;;;;;;;;;;;;;;;;AA4BA,eAAsB,sBAYpBA,UAcAC,UACAC,kBACAC,QACAC,KAMAC,iBACe;AACf,KAAI;EACF,MAAM,SAAS,SAAS;EAIxB,MAAM,qBAAqB,mBAAmB,gBAAgB,YAAY,CAAC,SAAS;AAGpF,OAAK,QAAQ,WAAW,oBAAoB;AAC1C,UAAO,MAAM,uBAAuB;AACpC;EACD;AAGD,OAAK,SAAS,uBAAuB;AACnC,OAAI,sBAAsB,QAAQ,OAChC,QAAO,KAAK,uCAAuC;AAErD;EACD;EAGD,IAAIC;AAEJ,MAAI,iBAAiB;AAEnB,aAAU;AACV,UAAO,MAAM,8CAA8C;EAC5D,OAAM;GAEL,MAAM,WAAW,MAAM,iBAAiB,SAAS,CAC/C,SAAS,qBACV,EAAC;GACF,MAAM,UAAU,SACd,SAAS,sBAAsB;GAIjC,IAAIC,QAAoB;IAAE,IAAI;IAAU,MAAM;GAAU;AACxD,OAAI,SAAS,eACX,KAAI;AACF,YAAQ,MAAM,AACZ,SAAS,eACT;KACA,UAAU,IAAI;KACd,SAAS,IAAI;KACb,QAAQ,IAAI;KACZ,QAAQ,IAAI;KACZ;IACD,EAAC;GACH,SAAQ,OAAO;AACd,WAAO,MAAM,OAAgB,qCAAqC;GAEnE;AAGH,aAAU,IAAI,eAA6B;IACzC;IACA;IACA,UAAU;KACR,UAAU,SAAS;KACnB,QAAQ,SAAS;IAClB;GACF;EACF;AAGD,MAAI,QAAQ,OACV,MAAK,MAAM,SAAS,QAAQ;AAC1B,UAAO,MAAM,EAAE,OAAO,MAAM,KAAM,GAAE,+BAA+B;AAGnE,OAAI,MAAM,SAAS,MAAM,KAAK,SAAgB,EAAE;AAC9C,WAAO,MAAM,EAAE,OAAO,MAAM,KAAM,GAAE,sCAAsC;AAC1E;GACD;GAGD,MAAM,UAAU,MAAM,QAAQ,SAAgB;GAG9C,MAAM,WAAW,MAAM,WAAW,SAAgB;AAGlD,WAAQ,MAAM,MAAM,MAAa,SAAgB;IAC/C,OAAO,MAAM;IACb;GACD,EAAC;EACH;EAMH,MAAM,cAAc,QAAQ,YAAY,CAAC;AACzC,MAAI,cAAc,GAAG;GAEnB,MAAM,MAAM,oBAAoB,UAC5B,AAAC,QAA0C,gBAAgB;AAE/D,UAAO,MACL;IAAE,YAAY;IAAa,kBAAkB;GAAK,GAClD,kBACD;AACD,SAAM,QAAQ,OAAO;EACtB;CACF,SAAQ,OAAO;AACd,SAAO,MAAM,OAAgB,2BAA2B;CAEzD;AACF;;;;;;;;;;;AA2BD,eAAsB,mBAWpBC,UAcAN,kBACAC,QACAC,KAM0D;AAC1D,MAAK,SAAS,sBACZ;CAGF,MAAM,WAAW,MAAM,iBAAiB,SAAS,CAC/C,SAAS,qBACV,EAAC;CACF,MAAM,UAAU,SACd,SAAS,sBAAsB;CAIjC,IAAIG,QAAoB;EAAE,IAAI;EAAU,MAAM;CAAU;AACxD,KAAI,SAAS,eACX,KAAI;AACF,UAAQ,MAAM,AACZ,SAAS,eACT;GACA,UAAU,IAAI;GACd,SAAS,IAAI;GACb,QAAQ,IAAI;GACZ,QAAQ,IAAI;GACZ;EACD,EAAC;CACH,SAAQ,OAAO;AACd,SAAO,MAAM,OAAgB,qCAAqC;CACnE;CAGH,MAAM,UAAU,IAAI,eAA6B;EAC/C;EACA;EACA,UAAU;GACR,UAAU,SAAS;GACnB,QAAQ,SAAS;EAClB;CACF;AAED,QAAO;EAAE;EAAS;CAAS;AAC5B;;;;;;;;;;;AAYD,eAAsB,4BAOpBE,cACAC,SACAC,YACY;AAEZ,MAAK,aACH,QAAO,eAAkB;CAG3B,MAAM,EAAE,SAAS,SAAS,GAAG;CAG7B,MAAM,KAAK,QAAQ,eAAe;AAElC,KAAI,GAEF,QAAO,yBACL,IACA,SACA,YAAY;EACV,MAAMC,aAAW,MAAM,QAAQ,QAAQ;AAGvC,MAAI,WACF,OAAM,WAAWA,YAAU,QAAQ;AAIrC,SAAOA;CACR,EACF;CAIH,MAAM,WAAW,MAAM,QAAQ,QAAQ;AAEvC,KAAI,WACF,OAAM,WAAW,UAAU,QAAQ;AAIrC,OAAM,QAAQ,OAAO;AAErB,QAAO;AACR"}
|
|
1
|
+
{"version":3,"file":"processAudits-DfcB-X-4.mjs","names":["endpoint: Endpoint<\n any,\n any,\n any,\n OutSchema,\n TServices,\n TLogger,\n TSession,\n any,\n any,\n TAuditStorage,\n TAuditStorageServiceName,\n TAuditAction\n >","response: InferStandardSchema<OutSchema>","serviceDiscovery: ServiceDiscovery<any, any>","logger: TLogger","ctx: {\n session: TSession;\n header: HeaderFn;\n cookie: CookieFn;\n services: Record<string, unknown>;\n }","existingAuditor?: Auditor<TAuditAction>","auditor: Auditor<TAuditAction>","actor: AuditActor","endpoint: Endpoint<\n any,\n any,\n any,\n any,\n TServices,\n TLogger,\n TSession,\n any,\n any,\n TAuditStorage,\n TAuditStorageServiceName,\n TAuditAction,\n TDatabase,\n TDatabaseServiceName\n >","auditContext: AuditExecutionContext<TAuditAction> | undefined","handler: (auditor?: Auditor<TAuditAction>) => Promise<T>","onComplete?: (response: T, auditor: Auditor<TAuditAction>) => Promise<void>","response"],"sources":["../src/endpoints/processAudits.ts"],"sourcesContent":["import type {\n AuditActor,\n AuditStorage,\n AuditableAction,\n Auditor,\n} from '@geekmidas/audit';\nimport { DefaultAuditor } from '@geekmidas/audit';\nimport { withAuditableTransaction } from '@geekmidas/audit/kysely';\nimport type { Logger } from '@geekmidas/logger';\nimport type { InferStandardSchema } from '@geekmidas/schema';\nimport type { Service, ServiceDiscovery } from '@geekmidas/services';\nimport type { StandardSchemaV1 } from '@standard-schema/spec';\nimport type { CookieFn, Endpoint, HeaderFn } from './Endpoint';\nimport type { ActorExtractor, MappedAudit } from './audit';\n\n/**\n * Process declarative audit definitions after successful endpoint execution.\n * Similar to publishConstructEvents for events.\n *\n * @param endpoint - The endpoint with audit configuration\n * @param response - The handler response to generate audit payloads from\n * @param serviceDiscovery - Service discovery for registering audit storage\n * @param logger - Logger for debug/error messages\n * @param ctx - Request context (session, headers, cookies, services)\n * @param existingAuditor - Optional existing auditor instance (e.g., from handler context).\n * If provided, uses this auditor (with its stored transaction).\n * If not provided, creates a new auditor.\n */\nexport async function processEndpointAudits<\n TServices extends Service[] = [],\n TSession = unknown,\n TLogger extends Logger = Logger,\n OutSchema extends StandardSchemaV1 | undefined = undefined,\n TAuditStorage extends AuditStorage | undefined = undefined,\n TAuditStorageServiceName extends string = string,\n TAuditAction extends AuditableAction<string, unknown> = AuditableAction<\n string,\n unknown\n >,\n>(\n endpoint: Endpoint<\n any,\n any,\n any,\n OutSchema,\n TServices,\n TLogger,\n TSession,\n any,\n any,\n TAuditStorage,\n TAuditStorageServiceName,\n TAuditAction\n >,\n response: InferStandardSchema<OutSchema>,\n serviceDiscovery: ServiceDiscovery<any, any>,\n logger: TLogger,\n ctx: {\n session: TSession;\n header: HeaderFn;\n cookie: CookieFn;\n services: Record<string, unknown>;\n },\n existingAuditor?: Auditor<TAuditAction>,\n): Promise<void> {\n try {\n const audits = endpoint.audits as MappedAudit<TAuditAction, OutSchema>[];\n\n // If we have an existing auditor (from handler context), we need to flush\n // any manual audits it collected, even if there are no declarative audits\n const hasExistingRecords =\n existingAuditor && existingAuditor.getRecords().length > 0;\n\n // Skip if no declarative audits and no existing records to flush\n if (!audits?.length && !hasExistingRecords) {\n logger.debug('No audits to process');\n return;\n }\n\n // If no auditor storage service and we have things to process, warn\n if (!endpoint.auditorStorageService) {\n if (hasExistingRecords || audits?.length) {\n logger.warn('No auditor storage service available');\n }\n return;\n }\n\n // Get or create auditor\n let auditor: Auditor<TAuditAction>;\n\n if (existingAuditor) {\n // Use existing auditor (preserves stored transaction and manual audits)\n auditor = existingAuditor;\n logger.debug('Using existing auditor from handler context');\n } else {\n // Create new auditor (backward compatibility)\n const services = await serviceDiscovery.register([\n endpoint.auditorStorageService,\n ]);\n const storage = services[\n endpoint.auditorStorageService.serviceName\n ] as AuditStorage;\n\n // Extract actor if configured\n let actor: AuditActor = { id: 'system', type: 'system' };\n if (endpoint.actorExtractor) {\n try {\n actor = await (\n endpoint.actorExtractor as ActorExtractor<\n TServices,\n TSession,\n TLogger\n >\n )({\n services: ctx.services as any,\n session: ctx.session,\n header: ctx.header,\n cookie: ctx.cookie,\n logger,\n });\n } catch (error) {\n logger.error(error as Error, 'Failed to extract actor for audits');\n // Continue with system actor\n }\n }\n\n auditor = new DefaultAuditor<TAuditAction>({\n actor,\n storage,\n metadata: {\n endpoint: endpoint.route,\n method: endpoint.method,\n },\n });\n }\n\n // Process each declarative audit\n if (audits?.length) {\n for (const audit of audits) {\n logger.debug({ audit: audit.type }, 'Processing declarative audit');\n\n // Check when condition\n if (audit.when && !audit.when(response as any)) {\n logger.debug(\n { audit: audit.type },\n 'Audit skipped due to when condition',\n );\n continue;\n }\n\n // Extract payload\n const payload = audit.payload(response as any);\n\n // Extract entityId if configured\n const entityId = audit.entityId?.(response as any);\n\n // Record the audit\n auditor.audit(audit.type as any, payload as any, {\n table: audit.table,\n entityId,\n });\n }\n }\n\n // Flush audits to storage\n // Note: If existingAuditor has a stored transaction (via setTransaction),\n // flush() will use it automatically\n const recordCount = auditor.getRecords().length;\n if (recordCount > 0) {\n // Check if auditor has a stored transaction (for logging purposes)\n const trx =\n 'getTransaction' in auditor\n ? (auditor as { getTransaction(): unknown }).getTransaction()\n : undefined;\n logger.debug(\n { auditCount: recordCount, hasTransaction: !!trx },\n 'Flushing audits',\n );\n await auditor.flush();\n }\n } catch (error) {\n logger.error(error as Error, 'Failed to process audits');\n // Don't rethrow - audit failures shouldn't fail the request\n }\n}\n\n/**\n * Context for audit-aware handler execution.\n */\nexport interface AuditExecutionContext<\n TAuditAction extends AuditableAction<string, unknown> = AuditableAction<\n string,\n unknown\n >,\n> {\n /** The auditor instance for recording audits */\n auditor: Auditor<TAuditAction>;\n /** The audit storage instance */\n storage: AuditStorage;\n}\n\n/**\n * Create audit context for handler execution.\n * Returns the auditor and storage for use in the handler.\n *\n * @param endpoint - The endpoint with audit configuration\n * @param serviceDiscovery - Service discovery for getting audit storage\n * @param logger - Logger for debug/error messages\n * @param ctx - Request context for actor extraction\n * @returns Audit context with auditor and storage, or undefined if not configured\n */\nexport async function createAuditContext<\n TServices extends Service[] = [],\n TSession = unknown,\n TLogger extends Logger = Logger,\n TAuditStorage extends AuditStorage | undefined = undefined,\n TAuditStorageServiceName extends string = string,\n TAuditAction extends AuditableAction<string, unknown> = AuditableAction<\n string,\n unknown\n >,\n TDatabase = undefined,\n TDatabaseServiceName extends string = string,\n>(\n endpoint: Endpoint<\n any,\n any,\n any,\n any,\n TServices,\n TLogger,\n TSession,\n any,\n any,\n TAuditStorage,\n TAuditStorageServiceName,\n TAuditAction,\n TDatabase,\n TDatabaseServiceName\n >,\n serviceDiscovery: ServiceDiscovery<any, any>,\n logger: TLogger,\n ctx: {\n session: TSession;\n header: HeaderFn;\n cookie: CookieFn;\n services: Record<string, unknown>;\n },\n): Promise<AuditExecutionContext<TAuditAction> | undefined> {\n if (!endpoint.auditorStorageService) {\n return undefined;\n }\n\n const services = await serviceDiscovery.register([\n endpoint.auditorStorageService,\n ]);\n const storage = services[\n endpoint.auditorStorageService.serviceName\n ] as AuditStorage;\n\n // Extract actor if configured\n let actor: AuditActor = { id: 'system', type: 'system' };\n if (endpoint.actorExtractor) {\n try {\n actor = await (\n endpoint.actorExtractor as ActorExtractor<TServices, TSession, TLogger>\n )({\n services: ctx.services as any,\n session: ctx.session,\n header: ctx.header,\n cookie: ctx.cookie,\n logger,\n });\n } catch (error) {\n logger.error(error as Error, 'Failed to extract actor for audits');\n }\n }\n\n const auditor = new DefaultAuditor<TAuditAction>({\n actor,\n storage,\n metadata: {\n endpoint: endpoint.route,\n method: endpoint.method,\n },\n });\n\n return { auditor, storage };\n}\n\n/**\n * Execute a handler with automatic audit transaction support.\n * If the audit storage has a database (via getDatabase()), wraps execution\n * in a transaction so audits are atomic with handler's database operations.\n *\n * @param auditContext - The audit context from createAuditContext\n * @param handler - The handler function to execute (receives auditor)\n * @param onComplete - Called after handler with response, to process declarative audits\n * @returns The handler result\n */\nexport async function executeWithAuditTransaction<\n T,\n TAuditAction extends AuditableAction<string, unknown> = AuditableAction<\n string,\n unknown\n >,\n>(\n auditContext: AuditExecutionContext<TAuditAction> | undefined,\n handler: (auditor?: Auditor<TAuditAction>) => Promise<T>,\n onComplete?: (response: T, auditor: Auditor<TAuditAction>) => Promise<void>,\n): Promise<T> {\n // No audit context - just run handler\n if (!auditContext) {\n return handler(undefined);\n }\n\n const { auditor, storage } = auditContext;\n\n // Check if storage has a database for transactional execution\n const db = storage.getDatabase?.();\n\n if (db) {\n // Wrap in transaction - audits are atomic with handler operations\n return withAuditableTransaction(db as any, auditor as any, async () => {\n const response = await handler(auditor);\n\n // Process declarative audits within the transaction\n if (onComplete) {\n await onComplete(response, auditor);\n }\n\n // Audits are flushed by withAuditableTransaction before commit\n return response;\n });\n }\n\n // No database - run handler and flush audits after\n const response = await handler(auditor);\n\n if (onComplete) {\n await onComplete(response, auditor);\n }\n\n // Flush audits (no transaction)\n await auditor.flush();\n\n return response;\n}\n"],"mappings":";;;;;;;;;;;;;;;;;AA4BA,eAAsB,sBAYpBA,UAcAC,UACAC,kBACAC,QACAC,KAMAC,iBACe;AACf,KAAI;EACF,MAAM,SAAS,SAAS;EAIxB,MAAM,qBACJ,mBAAmB,gBAAgB,YAAY,CAAC,SAAS;AAG3D,OAAK,QAAQ,WAAW,oBAAoB;AAC1C,UAAO,MAAM,uBAAuB;AACpC;EACD;AAGD,OAAK,SAAS,uBAAuB;AACnC,OAAI,sBAAsB,QAAQ,OAChC,QAAO,KAAK,uCAAuC;AAErD;EACD;EAGD,IAAIC;AAEJ,MAAI,iBAAiB;AAEnB,aAAU;AACV,UAAO,MAAM,8CAA8C;EAC5D,OAAM;GAEL,MAAM,WAAW,MAAM,iBAAiB,SAAS,CAC/C,SAAS,qBACV,EAAC;GACF,MAAM,UAAU,SACd,SAAS,sBAAsB;GAIjC,IAAIC,QAAoB;IAAE,IAAI;IAAU,MAAM;GAAU;AACxD,OAAI,SAAS,eACX,KAAI;AACF,YAAQ,MAAM,AACZ,SAAS,eAKT;KACA,UAAU,IAAI;KACd,SAAS,IAAI;KACb,QAAQ,IAAI;KACZ,QAAQ,IAAI;KACZ;IACD,EAAC;GACH,SAAQ,OAAO;AACd,WAAO,MAAM,OAAgB,qCAAqC;GAEnE;AAGH,aAAU,IAAI,eAA6B;IACzC;IACA;IACA,UAAU;KACR,UAAU,SAAS;KACnB,QAAQ,SAAS;IAClB;GACF;EACF;AAGD,MAAI,QAAQ,OACV,MAAK,MAAM,SAAS,QAAQ;AAC1B,UAAO,MAAM,EAAE,OAAO,MAAM,KAAM,GAAE,+BAA+B;AAGnE,OAAI,MAAM,SAAS,MAAM,KAAK,SAAgB,EAAE;AAC9C,WAAO,MACL,EAAE,OAAO,MAAM,KAAM,GACrB,sCACD;AACD;GACD;GAGD,MAAM,UAAU,MAAM,QAAQ,SAAgB;GAG9C,MAAM,WAAW,MAAM,WAAW,SAAgB;AAGlD,WAAQ,MAAM,MAAM,MAAa,SAAgB;IAC/C,OAAO,MAAM;IACb;GACD,EAAC;EACH;EAMH,MAAM,cAAc,QAAQ,YAAY,CAAC;AACzC,MAAI,cAAc,GAAG;GAEnB,MAAM,MACJ,oBAAoB,UAChB,AAAC,QAA0C,gBAAgB;AAEjE,UAAO,MACL;IAAE,YAAY;IAAa,kBAAkB;GAAK,GAClD,kBACD;AACD,SAAM,QAAQ,OAAO;EACtB;CACF,SAAQ,OAAO;AACd,SAAO,MAAM,OAAgB,2BAA2B;CAEzD;AACF;;;;;;;;;;;AA2BD,eAAsB,mBAapBC,UAgBAN,kBACAC,QACAC,KAM0D;AAC1D,MAAK,SAAS,sBACZ;CAGF,MAAM,WAAW,MAAM,iBAAiB,SAAS,CAC/C,SAAS,qBACV,EAAC;CACF,MAAM,UAAU,SACd,SAAS,sBAAsB;CAIjC,IAAIG,QAAoB;EAAE,IAAI;EAAU,MAAM;CAAU;AACxD,KAAI,SAAS,eACX,KAAI;AACF,UAAQ,MAAM,AACZ,SAAS,eACT;GACA,UAAU,IAAI;GACd,SAAS,IAAI;GACb,QAAQ,IAAI;GACZ,QAAQ,IAAI;GACZ;EACD,EAAC;CACH,SAAQ,OAAO;AACd,SAAO,MAAM,OAAgB,qCAAqC;CACnE;CAGH,MAAM,UAAU,IAAI,eAA6B;EAC/C;EACA;EACA,UAAU;GACR,UAAU,SAAS;GACnB,QAAQ,SAAS;EAClB;CACF;AAED,QAAO;EAAE;EAAS;CAAS;AAC5B;;;;;;;;;;;AAYD,eAAsB,4BAOpBE,cACAC,SACAC,YACY;AAEZ,MAAK,aACH,QAAO,eAAkB;CAG3B,MAAM,EAAE,SAAS,SAAS,GAAG;CAG7B,MAAM,KAAK,QAAQ,eAAe;AAElC,KAAI,GAEF,QAAO,yBAAyB,IAAW,SAAgB,YAAY;EACrE,MAAMC,aAAW,MAAM,QAAQ,QAAQ;AAGvC,MAAI,WACF,OAAM,WAAWA,YAAU,QAAQ;AAIrC,SAAOA;CACR,EAAC;CAIJ,MAAM,WAAW,MAAM,QAAQ,QAAQ;AAEvC,KAAI,WACF,OAAM,WAAW,UAAU,QAAQ;AAIrC,OAAM,QAAQ,OAAO;AAErB,QAAO;AACR"}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"publisher-Bw4770Hi.mjs","names":["logger: Logger","serviceDiscovery: ServiceDiscovery<any, any>","ev: MappedEvent<T, OutSchema>[]","response: InferStandardSchema<OutSchema>","publisherService: TPublisherService","events: MappedEvent<T, OutSchema>[]","construct: Construct
|
|
1
|
+
{"version":3,"file":"publisher-Bw4770Hi.mjs","names":["logger: Logger","serviceDiscovery: ServiceDiscovery<any, any>","ev: MappedEvent<T, OutSchema>[]","response: InferStandardSchema<OutSchema>","publisherService: TPublisherService","events: MappedEvent<T, OutSchema>[]","construct: Construct<\n Logger,\n TServiceName,\n T,\n OutSchema,\n TServices,\n TAuditStorageServiceName,\n TAuditStorage\n >"],"sources":["../src/publisher.ts"],"sourcesContent":["import type { AuditStorage } from '@geekmidas/audit';\nimport type { Logger } from '@geekmidas/logger';\nimport type { StandardSchemaV1 } from '@standard-schema/spec';\n\nimport type { EventPublisher, MappedEvent } from '@geekmidas/events';\nimport type { InferStandardSchema } from '@geekmidas/schema';\nimport type { Service, ServiceDiscovery } from '@geekmidas/services';\nimport type { Construct } from './Construct';\n\nexport async function publishEvents<\n T extends EventPublisher<any> | undefined,\n OutSchema extends StandardSchemaV1 | undefined = undefined,\n TServiceName extends string = string,\n TPublisherService extends Service<TServiceName, T> | undefined = undefined,\n>(\n logger: Logger,\n serviceDiscovery: ServiceDiscovery<any, any>,\n ev: MappedEvent<T, OutSchema>[] = [],\n response: InferStandardSchema<OutSchema>,\n publisherService: TPublisherService,\n) {\n try {\n if (!ev?.length) {\n logger.debug('No events to publish');\n return;\n }\n if (!publisherService) {\n logger.warn('No publisher service available');\n return;\n }\n\n const services = await serviceDiscovery.register([publisherService]);\n\n const publisher = services[\n publisherService.serviceName\n ] as EventPublisher<any>;\n\n const events: MappedEvent<T, OutSchema>[] = [];\n\n for (const { when, payload, type, ...e } of ev) {\n logger.debug({ event: type }, 'Processing event');\n const resolvedPayload = await payload(response);\n const event = {\n ...e,\n type,\n payload: resolvedPayload,\n };\n\n if (!when || when(response as any)) {\n events.push(event);\n }\n }\n\n if (events.length) {\n logger.debug({ eventCount: ev.length }, 'Publishing events');\n\n await publisher.publish(events).catch((err) => {\n logger.error(err, 'Failed to publish events');\n });\n }\n } catch (error) {\n logger.error(error as any, 'Something went wrong publishing events');\n }\n}\n\nexport async function publishConstructEvents<\n T extends EventPublisher<any> | undefined,\n OutSchema extends StandardSchemaV1 | undefined = undefined,\n TServiceName extends string = string,\n TServices extends Service[] = [],\n TAuditStorageServiceName extends string = string,\n TAuditStorage extends AuditStorage | undefined = undefined,\n>(\n construct: Construct<\n Logger,\n TServiceName,\n T,\n OutSchema,\n TServices,\n TAuditStorageServiceName,\n TAuditStorage\n >,\n response: InferStandardSchema<OutSchema>,\n serviceDiscovery: ServiceDiscovery<any, any>,\n logger: Logger = construct.logger,\n) {\n return publishEvents(\n logger,\n serviceDiscovery,\n construct.events,\n response,\n construct.publisherService,\n );\n}\n"],"mappings":";AASA,eAAsB,cAMpBA,QACAC,kBACAC,KAAkC,CAAE,GACpCC,UACAC,kBACA;AACA,KAAI;AACF,OAAK,IAAI,QAAQ;AACf,UAAO,MAAM,uBAAuB;AACpC;EACD;AACD,OAAK,kBAAkB;AACrB,UAAO,KAAK,iCAAiC;AAC7C;EACD;EAED,MAAM,WAAW,MAAM,iBAAiB,SAAS,CAAC,gBAAiB,EAAC;EAEpE,MAAM,YAAY,SAChB,iBAAiB;EAGnB,MAAMC,SAAsC,CAAE;AAE9C,OAAK,MAAM,EAAE,MAAM,SAAS,KAAM,GAAG,GAAG,IAAI,IAAI;AAC9C,UAAO,MAAM,EAAE,OAAO,KAAM,GAAE,mBAAmB;GACjD,MAAM,kBAAkB,MAAM,QAAQ,SAAS;GAC/C,MAAM,QAAQ;IACZ,GAAG;IACH;IACA,SAAS;GACV;AAED,QAAK,QAAQ,KAAK,SAAgB,CAChC,QAAO,KAAK,MAAM;EAErB;AAED,MAAI,OAAO,QAAQ;AACjB,UAAO,MAAM,EAAE,YAAY,GAAG,OAAQ,GAAE,oBAAoB;AAE5D,SAAM,UAAU,QAAQ,OAAO,CAAC,MAAM,CAAC,QAAQ;AAC7C,WAAO,MAAM,KAAK,2BAA2B;GAC9C,EAAC;EACH;CACF,SAAQ,OAAO;AACd,SAAO,MAAM,OAAc,yCAAyC;CACrE;AACF;AAED,eAAsB,uBAQpBC,WASAH,UACAF,kBACAD,SAAiB,UAAU,QAC3B;AACA,QAAO,cACL,QACA,kBACA,UAAU,QACV,UACA,UAAU,iBACX;AACF"}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"publisher-lFQleddL.cjs","names":["logger: Logger","serviceDiscovery: ServiceDiscovery<any, any>","ev: MappedEvent<T, OutSchema>[]","response: InferStandardSchema<OutSchema>","publisherService: TPublisherService","events: MappedEvent<T, OutSchema>[]","construct: Construct
|
|
1
|
+
{"version":3,"file":"publisher-lFQleddL.cjs","names":["logger: Logger","serviceDiscovery: ServiceDiscovery<any, any>","ev: MappedEvent<T, OutSchema>[]","response: InferStandardSchema<OutSchema>","publisherService: TPublisherService","events: MappedEvent<T, OutSchema>[]","construct: Construct<\n Logger,\n TServiceName,\n T,\n OutSchema,\n TServices,\n TAuditStorageServiceName,\n TAuditStorage\n >"],"sources":["../src/publisher.ts"],"sourcesContent":["import type { AuditStorage } from '@geekmidas/audit';\nimport type { Logger } from '@geekmidas/logger';\nimport type { StandardSchemaV1 } from '@standard-schema/spec';\n\nimport type { EventPublisher, MappedEvent } from '@geekmidas/events';\nimport type { InferStandardSchema } from '@geekmidas/schema';\nimport type { Service, ServiceDiscovery } from '@geekmidas/services';\nimport type { Construct } from './Construct';\n\nexport async function publishEvents<\n T extends EventPublisher<any> | undefined,\n OutSchema extends StandardSchemaV1 | undefined = undefined,\n TServiceName extends string = string,\n TPublisherService extends Service<TServiceName, T> | undefined = undefined,\n>(\n logger: Logger,\n serviceDiscovery: ServiceDiscovery<any, any>,\n ev: MappedEvent<T, OutSchema>[] = [],\n response: InferStandardSchema<OutSchema>,\n publisherService: TPublisherService,\n) {\n try {\n if (!ev?.length) {\n logger.debug('No events to publish');\n return;\n }\n if (!publisherService) {\n logger.warn('No publisher service available');\n return;\n }\n\n const services = await serviceDiscovery.register([publisherService]);\n\n const publisher = services[\n publisherService.serviceName\n ] as EventPublisher<any>;\n\n const events: MappedEvent<T, OutSchema>[] = [];\n\n for (const { when, payload, type, ...e } of ev) {\n logger.debug({ event: type }, 'Processing event');\n const resolvedPayload = await payload(response);\n const event = {\n ...e,\n type,\n payload: resolvedPayload,\n };\n\n if (!when || when(response as any)) {\n events.push(event);\n }\n }\n\n if (events.length) {\n logger.debug({ eventCount: ev.length }, 'Publishing events');\n\n await publisher.publish(events).catch((err) => {\n logger.error(err, 'Failed to publish events');\n });\n }\n } catch (error) {\n logger.error(error as any, 'Something went wrong publishing events');\n }\n}\n\nexport async function publishConstructEvents<\n T extends EventPublisher<any> | undefined,\n OutSchema extends StandardSchemaV1 | undefined = undefined,\n TServiceName extends string = string,\n TServices extends Service[] = [],\n TAuditStorageServiceName extends string = string,\n TAuditStorage extends AuditStorage | undefined = undefined,\n>(\n construct: Construct<\n Logger,\n TServiceName,\n T,\n OutSchema,\n TServices,\n TAuditStorageServiceName,\n TAuditStorage\n >,\n response: InferStandardSchema<OutSchema>,\n serviceDiscovery: ServiceDiscovery<any, any>,\n logger: Logger = construct.logger,\n) {\n return publishEvents(\n logger,\n serviceDiscovery,\n construct.events,\n response,\n construct.publisherService,\n );\n}\n"],"mappings":";;AASA,eAAsB,cAMpBA,QACAC,kBACAC,KAAkC,CAAE,GACpCC,UACAC,kBACA;AACA,KAAI;AACF,OAAK,IAAI,QAAQ;AACf,UAAO,MAAM,uBAAuB;AACpC;EACD;AACD,OAAK,kBAAkB;AACrB,UAAO,KAAK,iCAAiC;AAC7C;EACD;EAED,MAAM,WAAW,MAAM,iBAAiB,SAAS,CAAC,gBAAiB,EAAC;EAEpE,MAAM,YAAY,SAChB,iBAAiB;EAGnB,MAAMC,SAAsC,CAAE;AAE9C,OAAK,MAAM,EAAE,MAAM,SAAS,KAAM,GAAG,GAAG,IAAI,IAAI;AAC9C,UAAO,MAAM,EAAE,OAAO,KAAM,GAAE,mBAAmB;GACjD,MAAM,kBAAkB,MAAM,QAAQ,SAAS;GAC/C,MAAM,QAAQ;IACZ,GAAG;IACH;IACA,SAAS;GACV;AAED,QAAK,QAAQ,KAAK,SAAgB,CAChC,QAAO,KAAK,MAAM;EAErB;AAED,MAAI,OAAO,QAAQ;AACjB,UAAO,MAAM,EAAE,YAAY,GAAG,OAAQ,GAAE,oBAAoB;AAE5D,SAAM,UAAU,QAAQ,OAAO,CAAC,MAAM,CAAC,QAAQ;AAC7C,WAAO,MAAM,KAAK,2BAA2B;GAC9C,EAAC;EACH;CACF,SAAQ,OAAO;AACd,SAAO,MAAM,OAAc,yCAAyC;CACrE;AACF;AAED,eAAsB,uBAQpBC,WASAH,UACAF,kBACAD,SAAiB,UAAU,QAC3B;AACA,QAAO,cACL,QACA,kBACA,UAAU,QACV,UACA,UAAU,iBACX;AACF"}
|
package/dist/publisher.d.cts
CHANGED
|
@@ -1,4 +1,5 @@
|
|
|
1
1
|
import { Construct } from "./Construct-dI_rgdSp.cjs";
|
|
2
|
+
import { AuditStorage } from "@geekmidas/audit";
|
|
2
3
|
import { EventPublisher, MappedEvent } from "@geekmidas/events";
|
|
3
4
|
import { Logger } from "@geekmidas/logger";
|
|
4
5
|
import { Service, ServiceDiscovery } from "@geekmidas/services";
|
|
@@ -7,7 +8,7 @@ import { InferStandardSchema } from "@geekmidas/schema";
|
|
|
7
8
|
|
|
8
9
|
//#region src/publisher.d.ts
|
|
9
10
|
declare function publishEvents<T extends EventPublisher<any> | undefined, OutSchema extends StandardSchemaV1 | undefined = undefined, TServiceName extends string = string, TPublisherService extends Service<TServiceName, T> | undefined = undefined>(logger: Logger, serviceDiscovery: ServiceDiscovery<any, any>, ev: MappedEvent<T, OutSchema>[] | undefined, response: InferStandardSchema<OutSchema>, publisherService: TPublisherService): Promise<void>;
|
|
10
|
-
declare function publishConstructEvents<T extends EventPublisher<any> | undefined, OutSchema extends StandardSchemaV1 | undefined = undefined, TServiceName extends string = string, TServices extends Service[] = []>(construct: Construct<Logger, TServiceName, T, OutSchema, TServices>, response: InferStandardSchema<OutSchema>, serviceDiscovery: ServiceDiscovery<any, any>, logger?: Logger): Promise<void>;
|
|
11
|
+
declare function publishConstructEvents<T extends EventPublisher<any> | undefined, OutSchema extends StandardSchemaV1 | undefined = undefined, TServiceName extends string = string, TServices extends Service[] = [], TAuditStorageServiceName extends string = string, TAuditStorage extends AuditStorage | undefined = undefined>(construct: Construct<Logger, TServiceName, T, OutSchema, TServices, TAuditStorageServiceName, TAuditStorage>, response: InferStandardSchema<OutSchema>, serviceDiscovery: ServiceDiscovery<any, any>, logger?: Logger): Promise<void>;
|
|
11
12
|
//#endregion
|
|
12
13
|
export { publishConstructEvents, publishEvents };
|
|
13
14
|
//# sourceMappingURL=publisher.d.cts.map
|
package/dist/publisher.d.mts
CHANGED
|
@@ -1,5 +1,6 @@
|
|
|
1
1
|
import { Construct } from "./Construct-ZPqE0vhn.mjs";
|
|
2
2
|
import { Service, ServiceDiscovery } from "@geekmidas/services";
|
|
3
|
+
import { AuditStorage } from "@geekmidas/audit";
|
|
3
4
|
import { EventPublisher, MappedEvent } from "@geekmidas/events";
|
|
4
5
|
import { Logger } from "@geekmidas/logger";
|
|
5
6
|
import { StandardSchemaV1 } from "@standard-schema/spec";
|
|
@@ -7,7 +8,7 @@ import { InferStandardSchema } from "@geekmidas/schema";
|
|
|
7
8
|
|
|
8
9
|
//#region src/publisher.d.ts
|
|
9
10
|
declare function publishEvents<T extends EventPublisher<any> | undefined, OutSchema extends StandardSchemaV1 | undefined = undefined, TServiceName extends string = string, TPublisherService extends Service<TServiceName, T> | undefined = undefined>(logger: Logger, serviceDiscovery: ServiceDiscovery<any, any>, ev: MappedEvent<T, OutSchema>[] | undefined, response: InferStandardSchema<OutSchema>, publisherService: TPublisherService): Promise<void>;
|
|
10
|
-
declare function publishConstructEvents<T extends EventPublisher<any> | undefined, OutSchema extends StandardSchemaV1 | undefined = undefined, TServiceName extends string = string, TServices extends Service[] = []>(construct: Construct<Logger, TServiceName, T, OutSchema, TServices>, response: InferStandardSchema<OutSchema>, serviceDiscovery: ServiceDiscovery<any, any>, logger?: Logger): Promise<void>;
|
|
11
|
+
declare function publishConstructEvents<T extends EventPublisher<any> | undefined, OutSchema extends StandardSchemaV1 | undefined = undefined, TServiceName extends string = string, TServices extends Service[] = [], TAuditStorageServiceName extends string = string, TAuditStorage extends AuditStorage | undefined = undefined>(construct: Construct<Logger, TServiceName, T, OutSchema, TServices, TAuditStorageServiceName, TAuditStorage>, response: InferStandardSchema<OutSchema>, serviceDiscovery: ServiceDiscovery<any, any>, logger?: Logger): Promise<void>;
|
|
11
12
|
//#endregion
|
|
12
13
|
export { publishConstructEvents, publishEvents };
|
|
13
14
|
//# sourceMappingURL=publisher.d.mts.map
|
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
require('../Construct-BYSPikVm.cjs');
|
|
2
|
-
require('../Subscriber-
|
|
3
|
-
const require_SubscriberBuilder = require('../SubscriberBuilder-
|
|
2
|
+
require('../Subscriber-Bdh8rMSL.cjs');
|
|
3
|
+
const require_SubscriberBuilder = require('../SubscriberBuilder-DieD_60p.cjs');
|
|
4
4
|
|
|
5
5
|
exports.SubscriberBuilder = require_SubscriberBuilder.SubscriberBuilder;
|
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
import "../Construct-LWeB1rSQ.mjs";
|
|
2
|
-
import "../Subscriber-
|
|
3
|
-
import { SubscriberBuilder } from "../SubscriberBuilder-
|
|
2
|
+
import "../Subscriber-CJOWwaw1.mjs";
|
|
3
|
+
import { SubscriberBuilder } from "../SubscriberBuilder-BWQmiYd8.mjs";
|
|
4
4
|
|
|
5
5
|
export { SubscriberBuilder };
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
require('../Construct-BYSPikVm.cjs');
|
|
2
|
-
const require_Subscriber = require('../Subscriber-
|
|
3
|
-
const require_SubscriberBuilder = require('../SubscriberBuilder-
|
|
2
|
+
const require_Subscriber = require('../Subscriber-Bdh8rMSL.cjs');
|
|
3
|
+
const require_SubscriberBuilder = require('../SubscriberBuilder-DieD_60p.cjs');
|
|
4
4
|
|
|
5
5
|
//#region src/subscribers/index.ts
|
|
6
6
|
const s = new require_SubscriberBuilder.SubscriberBuilder();
|
|
@@ -1,10 +1,10 @@
|
|
|
1
1
|
import "../Construct-dI_rgdSp.cjs";
|
|
2
2
|
import { Subscriber } from "../Subscriber-BhzqUzs-.cjs";
|
|
3
3
|
import { SubscriberBuilder } from "../SubscriberBuilder-BCVkp-ga.cjs";
|
|
4
|
-
import * as
|
|
4
|
+
import * as _geekmidas_logger4 from "@geekmidas/logger";
|
|
5
5
|
|
|
6
6
|
//#region src/subscribers/index.d.ts
|
|
7
|
-
declare const s: SubscriberBuilder<[],
|
|
7
|
+
declare const s: SubscriberBuilder<[], _geekmidas_logger4.Logger, undefined, undefined, string, []>;
|
|
8
8
|
//#endregion
|
|
9
9
|
export { Subscriber, SubscriberBuilder, s };
|
|
10
10
|
//# sourceMappingURL=index.d.cts.map
|
|
@@ -1,10 +1,10 @@
|
|
|
1
1
|
import "../Construct-ZPqE0vhn.mjs";
|
|
2
2
|
import { Subscriber } from "../Subscriber-s6yfjeOc.mjs";
|
|
3
3
|
import { SubscriberBuilder } from "../SubscriberBuilder-aCua5_wA.mjs";
|
|
4
|
-
import * as
|
|
4
|
+
import * as _geekmidas_logger6 from "@geekmidas/logger";
|
|
5
5
|
|
|
6
6
|
//#region src/subscribers/index.d.ts
|
|
7
|
-
declare const s: SubscriberBuilder<[],
|
|
7
|
+
declare const s: SubscriberBuilder<[], _geekmidas_logger6.Logger, undefined, undefined, string, []>;
|
|
8
8
|
//#endregion
|
|
9
9
|
export { Subscriber, SubscriberBuilder, s };
|
|
10
10
|
//# sourceMappingURL=index.d.mts.map
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
import "../Construct-LWeB1rSQ.mjs";
|
|
2
|
-
import { Subscriber } from "../Subscriber-
|
|
3
|
-
import { SubscriberBuilder } from "../SubscriberBuilder-
|
|
2
|
+
import { Subscriber } from "../Subscriber-CJOWwaw1.mjs";
|
|
3
|
+
import { SubscriberBuilder } from "../SubscriberBuilder-BWQmiYd8.mjs";
|
|
4
4
|
|
|
5
5
|
//#region src/subscribers/index.ts
|
|
6
6
|
const s = new SubscriberBuilder();
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@geekmidas/constructs",
|
|
3
|
-
"version": "0.0.
|
|
3
|
+
"version": "0.0.14",
|
|
4
4
|
"private": false,
|
|
5
5
|
"type": "module",
|
|
6
6
|
"exports": {
|
|
@@ -66,14 +66,14 @@
|
|
|
66
66
|
"lodash.set": "~4.3.2",
|
|
67
67
|
"lodash.uniqby": "~4.7.0",
|
|
68
68
|
"openapi-types": "~12.1.3",
|
|
69
|
+
"@geekmidas/audit": "0.0.4",
|
|
70
|
+
"@geekmidas/errors": "0.0.1",
|
|
69
71
|
"@geekmidas/cache": "0.0.7",
|
|
70
|
-
"@geekmidas/audit": "0.0.1",
|
|
71
|
-
"@geekmidas/logger": "0.0.1",
|
|
72
|
-
"@geekmidas/rate-limit": "0.1.0",
|
|
73
|
-
"@geekmidas/schema": "0.0.2",
|
|
74
|
-
"@geekmidas/services": "0.0.1",
|
|
75
72
|
"@geekmidas/events": "0.0.2",
|
|
76
|
-
"@geekmidas/
|
|
73
|
+
"@geekmidas/services": "0.0.1",
|
|
74
|
+
"@geekmidas/schema": "0.0.2",
|
|
75
|
+
"@geekmidas/rate-limit": "0.1.0",
|
|
76
|
+
"@geekmidas/logger": "0.0.1"
|
|
77
77
|
},
|
|
78
78
|
"devDependencies": {
|
|
79
79
|
"@types/lodash.compact": "~3.0.9",
|
package/src/crons/Cron.ts
CHANGED
|
@@ -15,14 +15,20 @@ export class Cron<
|
|
|
15
15
|
OutSchema extends StandardSchemaV1 | undefined = undefined,
|
|
16
16
|
TEventPublisher extends EventPublisher<any> | undefined = undefined,
|
|
17
17
|
TEventPublisherServiceName extends string = string,
|
|
18
|
+
TDatabase = undefined,
|
|
19
|
+
TDatabaseServiceName extends string = string,
|
|
18
20
|
> extends Function<
|
|
19
21
|
TInput,
|
|
20
22
|
TServices,
|
|
21
23
|
TLogger,
|
|
22
24
|
OutSchema,
|
|
23
|
-
FunctionHandler<TInput, TServices, TLogger, OutSchema>,
|
|
25
|
+
FunctionHandler<TInput, TServices, TLogger, OutSchema, TDatabase>,
|
|
24
26
|
TEventPublisher,
|
|
25
|
-
TEventPublisherServiceName
|
|
27
|
+
TEventPublisherServiceName,
|
|
28
|
+
undefined,
|
|
29
|
+
string,
|
|
30
|
+
TDatabase,
|
|
31
|
+
TDatabaseServiceName
|
|
26
32
|
> {
|
|
27
33
|
static isCron(obj: any): obj is Cron<any, any, any, any> {
|
|
28
34
|
return Boolean(
|
|
@@ -33,7 +39,7 @@ export class Cron<
|
|
|
33
39
|
}
|
|
34
40
|
|
|
35
41
|
constructor(
|
|
36
|
-
fn: FunctionHandler<TInput, TServices, TLogger, OutSchema>,
|
|
42
|
+
fn: FunctionHandler<TInput, TServices, TLogger, OutSchema, TDatabase>,
|
|
37
43
|
timeout?: number,
|
|
38
44
|
protected _schedule?: ScheduleExpression,
|
|
39
45
|
input?: TInput,
|
|
@@ -43,6 +49,7 @@ export class Cron<
|
|
|
43
49
|
publisherService?: Service<TEventPublisherServiceName, TEventPublisher>,
|
|
44
50
|
events: any[] = [],
|
|
45
51
|
memorySize?: number,
|
|
52
|
+
databaseService?: Service<TDatabaseServiceName, TDatabase>,
|
|
46
53
|
) {
|
|
47
54
|
super(
|
|
48
55
|
fn,
|
|
@@ -55,6 +62,8 @@ export class Cron<
|
|
|
55
62
|
publisherService,
|
|
56
63
|
events,
|
|
57
64
|
memorySize,
|
|
65
|
+
undefined, // auditorStorageService
|
|
66
|
+
databaseService,
|
|
58
67
|
);
|
|
59
68
|
}
|
|
60
69
|
|
package/src/crons/CronBuilder.ts
CHANGED
|
@@ -19,13 +19,19 @@ export class CronBuilder<
|
|
|
19
19
|
OutSchema extends StandardSchemaV1 | undefined = undefined,
|
|
20
20
|
TEventPublisher extends EventPublisher<any> | undefined = undefined,
|
|
21
21
|
TEventPublisherServiceName extends string = string,
|
|
22
|
+
TDatabase = undefined,
|
|
23
|
+
TDatabaseServiceName extends string = string,
|
|
22
24
|
> extends FunctionBuilder<
|
|
23
25
|
TInput,
|
|
24
26
|
OutSchema,
|
|
25
27
|
TServices,
|
|
26
28
|
TLogger,
|
|
27
29
|
TEventPublisher,
|
|
28
|
-
TEventPublisherServiceName
|
|
30
|
+
TEventPublisherServiceName,
|
|
31
|
+
undefined,
|
|
32
|
+
string,
|
|
33
|
+
TDatabase,
|
|
34
|
+
TDatabaseServiceName
|
|
29
35
|
> {
|
|
30
36
|
private _schedule?: ScheduleExpression;
|
|
31
37
|
|
|
@@ -46,7 +52,9 @@ export class CronBuilder<
|
|
|
46
52
|
TLogger,
|
|
47
53
|
OutSchema,
|
|
48
54
|
TEventPublisher,
|
|
49
|
-
TEventPublisherServiceName
|
|
55
|
+
TEventPublisherServiceName,
|
|
56
|
+
TDatabase,
|
|
57
|
+
TDatabaseServiceName
|
|
50
58
|
> {
|
|
51
59
|
this._schedule = _expression;
|
|
52
60
|
return this;
|
|
@@ -60,7 +68,9 @@ export class CronBuilder<
|
|
|
60
68
|
TLogger,
|
|
61
69
|
OutSchema,
|
|
62
70
|
TEventPublisher,
|
|
63
|
-
TEventPublisherServiceName
|
|
71
|
+
TEventPublisherServiceName,
|
|
72
|
+
TDatabase,
|
|
73
|
+
TDatabaseServiceName
|
|
64
74
|
> {
|
|
65
75
|
this.inputSchema = schema as unknown as TInput;
|
|
66
76
|
|
|
@@ -70,7 +80,9 @@ export class CronBuilder<
|
|
|
70
80
|
TLogger,
|
|
71
81
|
OutSchema,
|
|
72
82
|
TEventPublisher,
|
|
73
|
-
TEventPublisherServiceName
|
|
83
|
+
TEventPublisherServiceName,
|
|
84
|
+
TDatabase,
|
|
85
|
+
TDatabaseServiceName
|
|
74
86
|
>;
|
|
75
87
|
}
|
|
76
88
|
|
|
@@ -82,7 +94,9 @@ export class CronBuilder<
|
|
|
82
94
|
TLogger,
|
|
83
95
|
T,
|
|
84
96
|
TEventPublisher,
|
|
85
|
-
TEventPublisherServiceName
|
|
97
|
+
TEventPublisherServiceName,
|
|
98
|
+
TDatabase,
|
|
99
|
+
TDatabaseServiceName
|
|
86
100
|
> {
|
|
87
101
|
this.outputSchema = schema as unknown as OutSchema;
|
|
88
102
|
|
|
@@ -92,7 +106,9 @@ export class CronBuilder<
|
|
|
92
106
|
TLogger,
|
|
93
107
|
T,
|
|
94
108
|
TEventPublisher,
|
|
95
|
-
TEventPublisherServiceName
|
|
109
|
+
TEventPublisherServiceName,
|
|
110
|
+
TDatabase,
|
|
111
|
+
TDatabaseServiceName
|
|
96
112
|
>;
|
|
97
113
|
}
|
|
98
114
|
|
|
@@ -104,7 +120,9 @@ export class CronBuilder<
|
|
|
104
120
|
TLogger,
|
|
105
121
|
OutSchema,
|
|
106
122
|
TEventPublisher,
|
|
107
|
-
TEventPublisherServiceName
|
|
123
|
+
TEventPublisherServiceName,
|
|
124
|
+
TDatabase,
|
|
125
|
+
TDatabaseServiceName
|
|
108
126
|
> {
|
|
109
127
|
this._services = uniqBy(
|
|
110
128
|
[...this._services, ...services],
|
|
@@ -117,7 +135,9 @@ export class CronBuilder<
|
|
|
117
135
|
TLogger,
|
|
118
136
|
OutSchema,
|
|
119
137
|
TEventPublisher,
|
|
120
|
-
TEventPublisherServiceName
|
|
138
|
+
TEventPublisherServiceName,
|
|
139
|
+
TDatabase,
|
|
140
|
+
TDatabaseServiceName
|
|
121
141
|
>;
|
|
122
142
|
}
|
|
123
143
|
|
|
@@ -129,7 +149,9 @@ export class CronBuilder<
|
|
|
129
149
|
T,
|
|
130
150
|
OutSchema,
|
|
131
151
|
TEventPublisher,
|
|
132
|
-
TEventPublisherServiceName
|
|
152
|
+
TEventPublisherServiceName,
|
|
153
|
+
TDatabase,
|
|
154
|
+
TDatabaseServiceName
|
|
133
155
|
> {
|
|
134
156
|
this._logger = logger as unknown as TLogger;
|
|
135
157
|
|
|
@@ -139,13 +161,24 @@ export class CronBuilder<
|
|
|
139
161
|
T,
|
|
140
162
|
OutSchema,
|
|
141
163
|
TEventPublisher,
|
|
142
|
-
TEventPublisherServiceName
|
|
164
|
+
TEventPublisherServiceName,
|
|
165
|
+
TDatabase,
|
|
166
|
+
TDatabaseServiceName
|
|
143
167
|
>;
|
|
144
168
|
}
|
|
145
169
|
|
|
146
170
|
publisher<T extends EventPublisher<any>, TName extends string>(
|
|
147
171
|
publisher: Service<TName, T>,
|
|
148
|
-
): CronBuilder<
|
|
172
|
+
): CronBuilder<
|
|
173
|
+
TInput,
|
|
174
|
+
TServices,
|
|
175
|
+
TLogger,
|
|
176
|
+
OutSchema,
|
|
177
|
+
T,
|
|
178
|
+
TName,
|
|
179
|
+
TDatabase,
|
|
180
|
+
TDatabaseServiceName
|
|
181
|
+
> {
|
|
149
182
|
this._publisher = publisher as unknown as Service<
|
|
150
183
|
TEventPublisherServiceName,
|
|
151
184
|
TEventPublisher
|
|
@@ -157,19 +190,56 @@ export class CronBuilder<
|
|
|
157
190
|
TLogger,
|
|
158
191
|
OutSchema,
|
|
159
192
|
T,
|
|
193
|
+
TName,
|
|
194
|
+
TDatabase,
|
|
195
|
+
TDatabaseServiceName
|
|
196
|
+
>;
|
|
197
|
+
}
|
|
198
|
+
|
|
199
|
+
/**
|
|
200
|
+
* Set the database service for this cron job.
|
|
201
|
+
* The database will be available in the handler context as `db`.
|
|
202
|
+
*/
|
|
203
|
+
database<T, TName extends string>(
|
|
204
|
+
service: Service<TName, T>,
|
|
205
|
+
): CronBuilder<
|
|
206
|
+
TInput,
|
|
207
|
+
TServices,
|
|
208
|
+
TLogger,
|
|
209
|
+
OutSchema,
|
|
210
|
+
TEventPublisher,
|
|
211
|
+
TEventPublisherServiceName,
|
|
212
|
+
T,
|
|
213
|
+
TName
|
|
214
|
+
> {
|
|
215
|
+
this._databaseService = service as unknown as Service<
|
|
216
|
+
TDatabaseServiceName,
|
|
217
|
+
TDatabase
|
|
218
|
+
>;
|
|
219
|
+
|
|
220
|
+
return this as unknown as CronBuilder<
|
|
221
|
+
TInput,
|
|
222
|
+
TServices,
|
|
223
|
+
TLogger,
|
|
224
|
+
OutSchema,
|
|
225
|
+
TEventPublisher,
|
|
226
|
+
TEventPublisherServiceName,
|
|
227
|
+
T,
|
|
160
228
|
TName
|
|
161
229
|
>;
|
|
162
230
|
}
|
|
163
231
|
|
|
164
232
|
handle(
|
|
165
|
-
fn: FunctionHandler<TInput, TServices, TLogger, OutSchema>,
|
|
233
|
+
fn: FunctionHandler<TInput, TServices, TLogger, OutSchema, TDatabase>,
|
|
166
234
|
): Cron<
|
|
167
235
|
TInput,
|
|
168
236
|
TServices,
|
|
169
237
|
TLogger,
|
|
170
238
|
OutSchema,
|
|
171
239
|
TEventPublisher,
|
|
172
|
-
TEventPublisherServiceName
|
|
240
|
+
TEventPublisherServiceName,
|
|
241
|
+
TDatabase,
|
|
242
|
+
TDatabaseServiceName
|
|
173
243
|
> {
|
|
174
244
|
const cron = new Cron(
|
|
175
245
|
fn,
|
|
@@ -182,6 +252,7 @@ export class CronBuilder<
|
|
|
182
252
|
this._publisher,
|
|
183
253
|
this._events,
|
|
184
254
|
this._memorySize,
|
|
255
|
+
this._databaseService,
|
|
185
256
|
);
|
|
186
257
|
|
|
187
258
|
// Reset builder state after creating the cron to prevent pollution
|
|
@@ -189,6 +260,7 @@ export class CronBuilder<
|
|
|
189
260
|
this._logger = DEFAULT_LOGGER;
|
|
190
261
|
this._events = [];
|
|
191
262
|
this._publisher = undefined;
|
|
263
|
+
this._databaseService = undefined;
|
|
192
264
|
this._schedule = undefined;
|
|
193
265
|
this.inputSchema = undefined;
|
|
194
266
|
this.outputSchema = undefined;
|
|
@@ -1,7 +1,7 @@
|
|
|
1
|
-
import { describe, expect, it } from 'vitest';
|
|
2
1
|
import { ConsoleLogger } from '@geekmidas/logger/console';
|
|
3
|
-
import { CronBuilder } from '../CronBuilder';
|
|
4
2
|
import type { Service } from '@geekmidas/services';
|
|
3
|
+
import { describe, expect, it } from 'vitest';
|
|
4
|
+
import { CronBuilder } from '../CronBuilder';
|
|
5
5
|
|
|
6
6
|
const ServiceA = {
|
|
7
7
|
serviceName: 'a' as const,
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import type {
|
|
1
|
+
import type { AuditStorage, AuditableAction } from '@geekmidas/audit';
|
|
2
2
|
import type { Logger } from '@geekmidas/logger';
|
|
3
3
|
import type { StandardSchemaV1 } from '@standard-schema/spec';
|
|
4
4
|
import type { HttpMethod } from '../types';
|
|
@@ -30,12 +30,12 @@ import type {
|
|
|
30
30
|
InferStandardSchema,
|
|
31
31
|
} from '@geekmidas/schema';
|
|
32
32
|
import { publishConstructEvents } from '../publisher';
|
|
33
|
+
import type { CookieFn, HeaderFn } from './Endpoint';
|
|
34
|
+
import type { MappedAudit } from './audit';
|
|
33
35
|
import {
|
|
34
|
-
type AuditExecutionContext,
|
|
35
36
|
createAuditContext,
|
|
36
37
|
executeWithAuditTransaction,
|
|
37
38
|
} from './processAudits';
|
|
38
|
-
import type { MappedAudit } from './audit';
|
|
39
39
|
|
|
40
40
|
// Helper function to publish events
|
|
41
41
|
|
|
@@ -259,15 +259,37 @@ export abstract class AmazonApiGatewayEndpoint<
|
|
|
259
259
|
);
|
|
260
260
|
|
|
261
261
|
// Warn if declarative audits are configured but no audit storage
|
|
262
|
-
const audits = this.endpoint.audits as MappedAudit<
|
|
262
|
+
const audits = this.endpoint.audits as MappedAudit<
|
|
263
|
+
TAuditAction,
|
|
264
|
+
TOutSchema
|
|
265
|
+
>[];
|
|
263
266
|
if (!auditContext && audits?.length) {
|
|
264
267
|
logger.warn('No auditor storage service available');
|
|
265
268
|
}
|
|
266
269
|
|
|
270
|
+
// Resolve database service if configured
|
|
271
|
+
const rawDb = this.endpoint.databaseService
|
|
272
|
+
? await serviceDiscovery
|
|
273
|
+
.register([this.endpoint.databaseService])
|
|
274
|
+
.then(
|
|
275
|
+
(s) =>
|
|
276
|
+
s[this.endpoint.databaseService!.serviceName as keyof typeof s],
|
|
277
|
+
)
|
|
278
|
+
: undefined;
|
|
279
|
+
|
|
267
280
|
// Execute handler with automatic audit transaction support
|
|
268
281
|
const result = await executeWithAuditTransaction(
|
|
269
282
|
auditContext,
|
|
270
283
|
async (auditor) => {
|
|
284
|
+
// Use audit transaction as db only if the storage uses the same database service
|
|
285
|
+
const sameDatabase =
|
|
286
|
+
auditContext?.storage?.databaseServiceName &&
|
|
287
|
+
auditContext.storage.databaseServiceName ===
|
|
288
|
+
this.endpoint.databaseService?.serviceName;
|
|
289
|
+
const db = sameDatabase
|
|
290
|
+
? (auditor?.getTransaction?.() ?? rawDb)
|
|
291
|
+
: rawDb;
|
|
292
|
+
|
|
271
293
|
const responseBuilder = new ResponseBuilder();
|
|
272
294
|
const response = await this.endpoint.handler(
|
|
273
295
|
{
|
|
@@ -277,6 +299,7 @@ export abstract class AmazonApiGatewayEndpoint<
|
|
|
277
299
|
services: event.services,
|
|
278
300
|
session: event.session,
|
|
279
301
|
auditor,
|
|
302
|
+
db,
|
|
280
303
|
...input,
|
|
281
304
|
} as any,
|
|
282
305
|
responseBuilder,
|
|
@@ -374,8 +397,8 @@ export type Event<
|
|
|
374
397
|
> = {
|
|
375
398
|
services: ServiceRecord<TServices>;
|
|
376
399
|
logger: TLogger;
|
|
377
|
-
header
|
|
378
|
-
cookie
|
|
400
|
+
header: HeaderFn;
|
|
401
|
+
cookie: CookieFn;
|
|
379
402
|
session: TSession;
|
|
380
403
|
} & TEvent &
|
|
381
404
|
InferComposableStandardSchema<TInput>;
|