@geekmidas/constructs 0.2.4 → 0.3.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/{AWSLambdaFunction-H65WfXLt.mjs → AWSLambdaFunction-C54a1doJ.mjs} +3 -3
- package/dist/{AWSLambdaFunction-H65WfXLt.mjs.map → AWSLambdaFunction-C54a1doJ.mjs.map} +1 -1
- package/dist/{AWSLambdaFunction-C-fuCLA3.cjs → AWSLambdaFunction-EPGY4s7i.cjs} +3 -3
- package/dist/{AWSLambdaFunction-C-fuCLA3.cjs.map → AWSLambdaFunction-EPGY4s7i.cjs.map} +1 -1
- package/dist/{AmazonApiGatewayEndpointAdaptor-pEWzF2uY.mjs → AmazonApiGatewayEndpointAdaptor-BT9JXihC.mjs} +2 -2
- package/dist/{AmazonApiGatewayEndpointAdaptor-pEWzF2uY.mjs.map → AmazonApiGatewayEndpointAdaptor-BT9JXihC.mjs.map} +1 -1
- package/dist/{AmazonApiGatewayEndpointAdaptor-Bk6ssx3K.cjs → AmazonApiGatewayEndpointAdaptor-DNFvvdmW.cjs} +2 -2
- package/dist/{AmazonApiGatewayEndpointAdaptor-Bk6ssx3K.cjs.map → AmazonApiGatewayEndpointAdaptor-DNFvvdmW.cjs.map} +1 -1
- package/dist/{AmazonApiGatewayV1EndpointAdaptor-MJpRbIaQ.mjs → AmazonApiGatewayV1EndpointAdaptor-BbkfC1dk.mjs} +2 -2
- package/dist/{AmazonApiGatewayV1EndpointAdaptor-MJpRbIaQ.mjs.map → AmazonApiGatewayV1EndpointAdaptor-BbkfC1dk.mjs.map} +1 -1
- package/dist/{AmazonApiGatewayV1EndpointAdaptor-uBp_4zLf.cjs → AmazonApiGatewayV1EndpointAdaptor-BiNzaAxD.cjs} +2 -2
- package/dist/{AmazonApiGatewayV1EndpointAdaptor-uBp_4zLf.cjs.map → AmazonApiGatewayV1EndpointAdaptor-BiNzaAxD.cjs.map} +1 -1
- package/dist/{AmazonApiGatewayV2EndpointAdaptor-I1W23Nvn.cjs → AmazonApiGatewayV2EndpointAdaptor-DZclCykB.cjs} +2 -2
- package/dist/{AmazonApiGatewayV2EndpointAdaptor-I1W23Nvn.cjs.map → AmazonApiGatewayV2EndpointAdaptor-DZclCykB.cjs.map} +1 -1
- package/dist/{AmazonApiGatewayV2EndpointAdaptor-ChO8BlDz.mjs → AmazonApiGatewayV2EndpointAdaptor-jRrQrIdm.mjs} +2 -2
- package/dist/{AmazonApiGatewayV2EndpointAdaptor-ChO8BlDz.mjs.map → AmazonApiGatewayV2EndpointAdaptor-jRrQrIdm.mjs.map} +1 -1
- package/dist/{BaseFunctionBuilder-B5gkW0Kt.mjs → BaseFunctionBuilder-Czi1Jwza.mjs} +2 -2
- package/dist/{BaseFunctionBuilder-B5gkW0Kt.mjs.map → BaseFunctionBuilder-Czi1Jwza.mjs.map} +1 -1
- package/dist/{BaseFunctionBuilder-C5Se7pdL.cjs → BaseFunctionBuilder-MYG3C9ug.cjs} +2 -2
- package/dist/{BaseFunctionBuilder-C5Se7pdL.cjs.map → BaseFunctionBuilder-MYG3C9ug.cjs.map} +1 -1
- package/dist/{Construct-BYSPikVm.cjs → Construct-Ba5cMxib.cjs} +2 -2
- package/dist/{Construct-BYSPikVm.cjs.map → Construct-Ba5cMxib.cjs.map} +1 -1
- package/dist/{Construct-LWeB1rSQ.mjs → Construct-DdyGHuag.mjs} +2 -2
- package/dist/{Construct-LWeB1rSQ.mjs.map → Construct-DdyGHuag.mjs.map} +1 -1
- package/dist/Construct.cjs +1 -1
- package/dist/Construct.mjs +1 -1
- package/dist/{Cron-Dy_HW2Vv.mjs → Cron-BxhGs5up.mjs} +3 -3
- package/dist/{Cron-Dy_HW2Vv.mjs.map → Cron-BxhGs5up.mjs.map} +1 -1
- package/dist/{Cron-Bi3QOge_.cjs → Cron-CGF4YAfM.cjs} +3 -3
- package/dist/{Cron-Bi3QOge_.cjs.map → Cron-CGF4YAfM.cjs.map} +1 -1
- package/dist/{CronBuilder-Dv_w7Yri.cjs → CronBuilder-CcxKRtVP.cjs} +4 -4
- package/dist/{CronBuilder-Dv_w7Yri.cjs.map → CronBuilder-CcxKRtVP.cjs.map} +1 -1
- package/dist/{CronBuilder-Bl3A2Zp4.mjs → CronBuilder-d2jh-IB2.mjs} +4 -4
- package/dist/{CronBuilder-Bl3A2Zp4.mjs.map → CronBuilder-d2jh-IB2.mjs.map} +1 -1
- package/dist/{Endpoint-BJo9Hhwm.cjs → Endpoint-BVGZXFyV.cjs} +3 -3
- package/dist/{Endpoint-BJo9Hhwm.cjs.map → Endpoint-BVGZXFyV.cjs.map} +1 -1
- package/dist/{Endpoint-B70_KKhu.mjs → Endpoint-CuOEswxJ.mjs} +3 -3
- package/dist/{Endpoint-B70_KKhu.mjs.map → Endpoint-CuOEswxJ.mjs.map} +1 -1
- package/dist/{EndpointBuilder-DeswNQdG.cjs → EndpointBuilder-Cgj1P_ra.cjs} +4 -4
- package/dist/{EndpointBuilder-DeswNQdG.cjs.map → EndpointBuilder-Cgj1P_ra.cjs.map} +1 -1
- package/dist/{EndpointBuilder-FyyoFTJ5.mjs → EndpointBuilder-DnCB1h1j.mjs} +4 -4
- package/dist/{EndpointBuilder-FyyoFTJ5.mjs.map → EndpointBuilder-DnCB1h1j.mjs.map} +1 -1
- package/dist/{EndpointFactory-eZc-XpNm.cjs → EndpointFactory-CYj6BYok.cjs} +2 -2
- package/dist/{EndpointFactory-eZc-XpNm.cjs.map → EndpointFactory-CYj6BYok.cjs.map} +1 -1
- package/dist/{EndpointFactory-CAneQs06.mjs → EndpointFactory-CbdxPCIH.mjs} +2 -2
- package/dist/{EndpointFactory-CAneQs06.mjs.map → EndpointFactory-CbdxPCIH.mjs.map} +1 -1
- package/dist/{Function-DfKsM5Kx.mjs → Function-BVHqIDp9.mjs} +2 -2
- package/dist/{Function-DfKsM5Kx.mjs.map → Function-BVHqIDp9.mjs.map} +1 -1
- package/dist/{Function-DagDbeXo.cjs → Function-DDZb1525.cjs} +2 -2
- package/dist/{Function-DagDbeXo.cjs.map → Function-DDZb1525.cjs.map} +1 -1
- package/dist/{FunctionBuilder-CVT7bG2o.mjs → FunctionBuilder-CrDYgfiI.mjs} +4 -4
- package/dist/{FunctionBuilder-CVT7bG2o.mjs.map → FunctionBuilder-CrDYgfiI.mjs.map} +1 -1
- package/dist/{FunctionBuilder-DXvG_XD-.cjs → FunctionBuilder-DswJ-9sD.cjs} +4 -4
- package/dist/{FunctionBuilder-DXvG_XD-.cjs.map → FunctionBuilder-DswJ-9sD.cjs.map} +1 -1
- package/dist/{FunctionExecutionWrapper-DkNycmOh.cjs → FunctionExecutionWrapper-BYI2bGTL.cjs} +2 -2
- package/dist/{FunctionExecutionWrapper-DkNycmOh.cjs.map → FunctionExecutionWrapper-BYI2bGTL.cjs.map} +1 -1
- package/dist/{FunctionExecutionWrapper-Bubnr0zA.mjs → FunctionExecutionWrapper-CLDh7Z2_.mjs} +2 -2
- package/dist/{FunctionExecutionWrapper-Bubnr0zA.mjs.map → FunctionExecutionWrapper-CLDh7Z2_.mjs.map} +1 -1
- package/dist/{HonoEndpointAdaptor-BusUWu1w.d.cts → HonoEndpointAdaptor-Bx9Y1bCZ.d.cts} +3 -3
- package/dist/{HonoEndpointAdaptor-CcvXzoYV.mjs → HonoEndpointAdaptor-NLlQk5iU.mjs} +4 -5
- package/dist/{HonoEndpointAdaptor-CcvXzoYV.mjs.map → HonoEndpointAdaptor-NLlQk5iU.mjs.map} +1 -1
- package/dist/{HonoEndpointAdaptor-DodwLM0-.cjs → HonoEndpointAdaptor-e6l9eVDU.cjs} +4 -5
- package/dist/{HonoEndpointAdaptor-DodwLM0-.cjs.map → HonoEndpointAdaptor-e6l9eVDU.cjs.map} +1 -1
- package/dist/{HonoEndpointAdaptor-g8xxh3tS.d.mts → HonoEndpointAdaptor-kb1ByjUL.d.mts} +3 -3
- package/dist/{Subscriber-DOt3svUC.cjs → Subscriber-BiHjVXtM.cjs} +2 -2
- package/dist/{Subscriber-DOt3svUC.cjs.map → Subscriber-BiHjVXtM.cjs.map} +1 -1
- package/dist/{Subscriber-kCHbH2fZ.mjs → Subscriber-BmPf9GFb.mjs} +2 -2
- package/dist/{Subscriber-kCHbH2fZ.mjs.map → Subscriber-BmPf9GFb.mjs.map} +1 -1
- package/dist/{SubscriberBuilder-Cj2u9k5Q.cjs → SubscriberBuilder-Cp1C-xtT.cjs} +2 -2
- package/dist/{SubscriberBuilder-Cj2u9k5Q.cjs.map → SubscriberBuilder-Cp1C-xtT.cjs.map} +1 -1
- package/dist/{SubscriberBuilder-DmxMU89X.mjs → SubscriberBuilder-DJPEeYDJ.mjs} +2 -2
- package/dist/{SubscriberBuilder-DmxMU89X.mjs.map → SubscriberBuilder-DJPEeYDJ.mjs.map} +1 -1
- package/dist/{TestEndpointAdaptor-wA-fmq4v.cjs → TestEndpointAdaptor-opEisC30.cjs} +2 -2
- package/dist/{TestEndpointAdaptor-wA-fmq4v.cjs.map → TestEndpointAdaptor-opEisC30.cjs.map} +1 -1
- package/dist/{TestEndpointAdaptor-1pPixE6y.mjs → TestEndpointAdaptor-oq5mfglM.mjs} +2 -2
- package/dist/{TestEndpointAdaptor-1pPixE6y.mjs.map → TestEndpointAdaptor-oq5mfglM.mjs.map} +1 -1
- package/dist/adaptors/aws.cjs +11 -11
- package/dist/adaptors/aws.mjs +11 -11
- package/dist/adaptors/hono.cjs +8 -8
- package/dist/adaptors/hono.d.cts +1 -1
- package/dist/adaptors/hono.d.mts +1 -1
- package/dist/adaptors/hono.mjs +8 -8
- package/dist/adaptors/testing.cjs +7 -7
- package/dist/adaptors/testing.mjs +7 -7
- package/dist/crons/Cron.cjs +6 -6
- package/dist/crons/Cron.mjs +6 -6
- package/dist/crons/CronBuilder.cjs +7 -7
- package/dist/crons/CronBuilder.mjs +7 -7
- package/dist/crons/index.cjs +7 -7
- package/dist/crons/index.d.mts +4 -4
- package/dist/crons/index.mjs +7 -7
- package/dist/endpoints/AmazonApiGatewayEndpointAdaptor.cjs +7 -7
- package/dist/endpoints/AmazonApiGatewayEndpointAdaptor.mjs +7 -7
- package/dist/endpoints/AmazonApiGatewayV1EndpointAdaptor.cjs +8 -8
- package/dist/endpoints/AmazonApiGatewayV1EndpointAdaptor.mjs +8 -8
- package/dist/endpoints/AmazonApiGatewayV2EndpointAdaptor.cjs +8 -8
- package/dist/endpoints/AmazonApiGatewayV2EndpointAdaptor.mjs +8 -8
- package/dist/endpoints/Endpoint.cjs +6 -6
- package/dist/endpoints/Endpoint.mjs +6 -6
- package/dist/endpoints/EndpointBuilder.cjs +7 -7
- package/dist/endpoints/EndpointBuilder.mjs +7 -7
- package/dist/endpoints/EndpointFactory.cjs +8 -8
- package/dist/endpoints/EndpointFactory.mjs +8 -8
- package/dist/endpoints/HonoEndpointAdaptor.cjs +8 -8
- package/dist/endpoints/HonoEndpointAdaptor.d.cts +1 -1
- package/dist/endpoints/HonoEndpointAdaptor.d.mts +1 -1
- package/dist/endpoints/HonoEndpointAdaptor.mjs +8 -8
- package/dist/endpoints/TestEndpointAdaptor.cjs +7 -7
- package/dist/endpoints/TestEndpointAdaptor.mjs +7 -7
- package/dist/endpoints/helpers.cjs +7 -7
- package/dist/endpoints/helpers.mjs +7 -7
- package/dist/endpoints/index.cjs +8 -8
- package/dist/endpoints/index.d.cts +2 -2
- package/dist/endpoints/index.d.mts +2 -2
- package/dist/endpoints/index.mjs +8 -8
- package/dist/functions/AWSLambdaFunction.cjs +6 -6
- package/dist/functions/AWSLambdaFunction.mjs +6 -6
- package/dist/functions/BaseFunctionBuilder.cjs +2 -2
- package/dist/functions/BaseFunctionBuilder.mjs +2 -2
- package/dist/functions/Function.cjs +2 -2
- package/dist/functions/Function.mjs +2 -2
- package/dist/functions/FunctionBuilder.cjs +4 -4
- package/dist/functions/FunctionBuilder.mjs +4 -4
- package/dist/functions/FunctionExecutionWrapper.cjs +5 -5
- package/dist/functions/FunctionExecutionWrapper.mjs +5 -5
- package/dist/functions/TestFunctionAdaptor.cjs +4 -4
- package/dist/functions/TestFunctionAdaptor.mjs +4 -4
- package/dist/functions/index.cjs +5 -5
- package/dist/functions/index.mjs +5 -5
- package/dist/functions-C6EK1xL6.mjs +8 -0
- package/dist/{functions-JhRsNoAZ.mjs.map → functions-C6EK1xL6.mjs.map} +1 -1
- package/dist/{functions-FCb-wWFC.cjs → functions-fTid0RMK.cjs} +2 -2
- package/dist/{functions-FCb-wWFC.cjs.map → functions-fTid0RMK.cjs.map} +1 -1
- package/dist/{helpers-DxxSpLfw.cjs → helpers-BcP1tXAi.cjs} +2 -2
- package/dist/{helpers-DxxSpLfw.cjs.map → helpers-BcP1tXAi.cjs.map} +1 -1
- package/dist/{helpers-C3B2lVrM.mjs → helpers-ByRTDO_m.mjs} +2 -2
- package/dist/{helpers-C3B2lVrM.mjs.map → helpers-ByRTDO_m.mjs.map} +1 -1
- package/dist/index.cjs +1 -1
- package/dist/index.mjs +1 -1
- package/dist/subscribers/Subscriber.cjs +2 -2
- package/dist/subscribers/Subscriber.mjs +2 -2
- package/dist/subscribers/SubscriberBuilder.cjs +3 -3
- package/dist/subscribers/SubscriberBuilder.mjs +3 -3
- package/dist/subscribers/index.cjs +3 -3
- package/dist/subscribers/index.d.cts +2 -2
- package/dist/subscribers/index.mjs +3 -3
- package/package.json +64 -13
- package/src/Construct.ts +3 -3
- package/src/__benchmarks__/endpoint.bench.ts +494 -0
- package/src/__benchmarks__/hono-server.bench.ts +249 -0
- package/src/endpoints/HonoEndpointAdaptor.ts +1 -2
- package/dist/functions-JhRsNoAZ.mjs +0 -8
|
@@ -6,7 +6,7 @@ import { Logger } from "@geekmidas/logger";
|
|
|
6
6
|
import { Service, ServiceDiscovery, ServiceRecord } from "@geekmidas/services";
|
|
7
7
|
import { StandardSchemaV1 } from "@standard-schema/spec";
|
|
8
8
|
import { EnvironmentParser } from "@geekmidas/envkit";
|
|
9
|
-
import * as
|
|
9
|
+
import * as hono_types3 from "hono/types";
|
|
10
10
|
import { Context, Hono } from "hono";
|
|
11
11
|
|
|
12
12
|
//#region src/endpoints/HonoEndpointAdaptor.d.ts
|
|
@@ -33,11 +33,11 @@ declare class HonoEndpoint<TRoute extends string, TMethod extends HttpMethod, TI
|
|
|
33
33
|
static validate<T extends StandardSchemaV1>(c: Context<any, string, {}>, data: unknown, schema?: T): Promise<any>;
|
|
34
34
|
addRoute(serviceDiscovery: ServiceDiscovery<ServiceRecord<TServices>, TLogger>, app: Hono): void;
|
|
35
35
|
static applyEventMiddleware(app: Hono, serviceDiscovery: ServiceDiscovery<any, any>): void;
|
|
36
|
-
static fromRoutes<TLogger extends Logger, TServices extends Service[]>(routes: string[], envParser: EnvironmentParser<{}>, app: Hono<
|
|
36
|
+
static fromRoutes<TLogger extends Logger, TServices extends Service[]>(routes: string[], envParser: EnvironmentParser<{}>, app: Hono<hono_types3.BlankEnv, hono_types3.BlankSchema, "/">, logger: TLogger, cwd?: string, options?: HonoEndpointOptions): Promise<Hono>;
|
|
37
37
|
static addRoutes<TServices extends Service[] = [], TLogger extends Logger = Logger>(endpoints: Endpoint<string, HttpMethod, any, any, TServices, TLogger>[], serviceDiscovery: ServiceDiscovery<ServiceRecord<TServices>, TLogger>, app: Hono, options?: HonoEndpointOptions): void;
|
|
38
38
|
static addRoute<TRoute extends string, TMethod extends HttpMethod, TInput extends EndpointSchemas = {}, TOutSchema extends StandardSchemaV1 | undefined = undefined, TServices extends Service[] = [], TLogger extends Logger = Logger, TSession = unknown, TEventPublisher extends EventPublisher<any> | undefined = undefined, TEventPublisherServiceName extends string = string, TAuditStorage extends AuditStorage | undefined = undefined, TAuditStorageServiceName extends string = string, TAuditAction extends AuditableAction<string, unknown> = AuditableAction<string, unknown>, TDatabase = undefined, TDatabaseServiceName extends string = string>(endpoint: Endpoint<TRoute, TMethod, TInput, TOutSchema, TServices, TLogger, TSession, TEventPublisher, TEventPublisherServiceName, TAuditStorage, TAuditStorageServiceName, TAuditAction, TDatabase, TDatabaseServiceName>, serviceDiscovery: ServiceDiscovery<ServiceRecord<TServices>, TLogger>, app: Hono): void;
|
|
39
39
|
static addDocsRoute<TServices extends Service[] = [], TLogger extends Logger = Logger>(endpoints: Endpoint<string, HttpMethod, any, any, TServices, TLogger>[], app: Hono, docsPath: string, openApiOptions?: HonoEndpointOptions['openApiOptions']): void;
|
|
40
40
|
}
|
|
41
41
|
//#endregion
|
|
42
42
|
export { HonoEndpoint, HonoEndpointOptions };
|
|
43
|
-
//# sourceMappingURL=HonoEndpointAdaptor-
|
|
43
|
+
//# sourceMappingURL=HonoEndpointAdaptor-Bx9Y1bCZ.d.cts.map
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
import { publishConstructEvents } from "./publisher-Bw4770Hi.mjs";
|
|
2
|
-
import { Endpoint, ResponseBuilder } from "./Endpoint-
|
|
2
|
+
import { Endpoint, ResponseBuilder } from "./Endpoint-CuOEswxJ.mjs";
|
|
3
3
|
import { createAuditContext, executeWithAuditTransaction } from "./processAudits-Dj8UGqcW.mjs";
|
|
4
|
-
import { getEndpointsFromRoutes } from "./helpers-
|
|
4
|
+
import { getEndpointsFromRoutes } from "./helpers-ByRTDO_m.mjs";
|
|
5
5
|
import { parseHonoQuery } from "./parseHonoQuery-CwFKw2ua.mjs";
|
|
6
6
|
import { withRlsContext } from "@geekmidas/db/rls";
|
|
7
7
|
import { wrapError } from "@geekmidas/errors";
|
|
@@ -216,8 +216,7 @@ var HonoEndpoint = class HonoEndpoint {
|
|
|
216
216
|
try {
|
|
217
217
|
const openApiSchema = await Endpoint.buildOpenApiSchema(endpoints, openApiOptions);
|
|
218
218
|
return c.json(openApiSchema);
|
|
219
|
-
} catch
|
|
220
|
-
console.error("Error generating OpenAPI schema:", error);
|
|
219
|
+
} catch {
|
|
221
220
|
return c.json({ error: "Failed to generate OpenAPI documentation" }, 500);
|
|
222
221
|
}
|
|
223
222
|
});
|
|
@@ -226,4 +225,4 @@ var HonoEndpoint = class HonoEndpoint {
|
|
|
226
225
|
|
|
227
226
|
//#endregion
|
|
228
227
|
export { HonoEndpoint };
|
|
229
|
-
//# sourceMappingURL=HonoEndpointAdaptor-
|
|
228
|
+
//# sourceMappingURL=HonoEndpointAdaptor-NLlQk5iU.mjs.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"HonoEndpointAdaptor-CcvXzoYV.mjs","names":["endpoint: Endpoint<\n TRoute,\n TMethod,\n TInput,\n TOutSchema,\n TServices,\n TLogger,\n TSession,\n TEventPublisher,\n TEventPublisherServiceName,\n TAuditStorage,\n TAuditStorageServiceName,\n TAuditAction,\n TDatabase,\n TDatabaseServiceName\n >","c: Context<any, string, {}>","data: unknown","schema?: T","serviceDiscovery: ServiceDiscovery<ServiceRecord<TServices>, TLogger>","app: Hono","serviceDiscovery: ServiceDiscovery<any, any>","logger","routes: string[]","envParser: EnvironmentParser<{}>","logger: TLogger","options?: HonoEndpointOptions","endpoints: Endpoint<string, HttpMethod, any, any, TServices, TLogger>[]","db: TDatabase | undefined","metadata","output","result","validationError: any","e: any","docsPath: string","openApiOptions?: HonoEndpointOptions['openApiOptions']"],"sources":["../src/endpoints/HonoEndpointAdaptor.ts"],"sourcesContent":["import type { AuditStorage, AuditableAction } from '@geekmidas/audit';\nimport type { EnvironmentParser } from '@geekmidas/envkit';\nimport type { EventPublisher } from '@geekmidas/events';\nimport type { Logger } from '@geekmidas/logger';\nimport { checkRateLimit, getRateLimitHeaders } from '@geekmidas/rate-limit';\nimport type { StandardSchemaV1 } from '@standard-schema/spec';\nimport { type Context, Hono } from 'hono';\nimport { setCookie } from 'hono/cookie';\nimport { logger as honoLogger } from 'hono/logger';\nimport { timing } from 'hono/timing';\nimport { validator } from 'hono/validator';\nimport type { HttpMethod, LowerHttpMethod } from '../types';\nimport {\n Endpoint,\n type EndpointContext,\n type EndpointSchemas,\n ResponseBuilder,\n} from './Endpoint';\nimport { getEndpointsFromRoutes } from './helpers';\nimport { parseHonoQuery } from './parseHonoQuery';\n\nimport { withRlsContext } from '@geekmidas/db/rls';\nimport { wrapError } from '@geekmidas/errors';\nimport {\n type Service,\n ServiceDiscovery,\n type ServiceRecord,\n} from '@geekmidas/services';\nimport type { ContentfulStatusCode } from 'hono/utils/http-status';\nimport { publishConstructEvents } from '../publisher';\nimport type { MappedAudit } from './audit';\nimport {\n createAuditContext,\n executeWithAuditTransaction,\n} from './processAudits';\n\nexport interface HonoEndpointOptions {\n /**\n * Path where OpenAPI documentation will be served.\n * Set to false to disable docs route.\n * @default '/docs'\n */\n docsPath?: string | false;\n /**\n * OpenAPI schema options\n */\n openApiOptions?: {\n title?: string;\n version?: string;\n description?: string;\n };\n}\n\nexport class HonoEndpoint<\n TRoute extends string,\n TMethod extends HttpMethod,\n TInput extends EndpointSchemas = {},\n TOutSchema extends StandardSchemaV1 | undefined = undefined,\n TServices extends Service[] = [],\n TLogger extends Logger = Logger,\n TSession = unknown,\n TEventPublisher extends EventPublisher<any> | undefined = undefined,\n TEventPublisherServiceName extends string = string,\n TAuditStorage extends AuditStorage | undefined = undefined,\n TAuditStorageServiceName extends string = string,\n TAuditAction extends AuditableAction<string, unknown> = AuditableAction<\n string,\n unknown\n >,\n TDatabase = undefined,\n TDatabaseServiceName extends string = string,\n> {\n constructor(\n private readonly endpoint: Endpoint<\n TRoute,\n TMethod,\n TInput,\n TOutSchema,\n TServices,\n TLogger,\n TSession,\n TEventPublisher,\n TEventPublisherServiceName,\n TAuditStorage,\n TAuditStorageServiceName,\n TAuditAction,\n TDatabase,\n TDatabaseServiceName\n >,\n ) {}\n\n static isDev = process.env.NODE_ENV === 'development';\n\n static async validate<T extends StandardSchemaV1>(\n c: Context<any, string, {}>,\n data: unknown,\n schema?: T,\n ) {\n if (!schema) {\n return undefined;\n }\n\n const parsed = await Endpoint.validate(schema, data);\n\n if (parsed.issues) {\n return c.json(parsed.issues, 422);\n }\n\n return parsed.value;\n }\n addRoute(\n serviceDiscovery: ServiceDiscovery<ServiceRecord<TServices>, TLogger>,\n app: Hono,\n ): void {\n HonoEndpoint.addRoute(this.endpoint, serviceDiscovery, app);\n }\n\n static applyEventMiddleware(\n app: Hono,\n serviceDiscovery: ServiceDiscovery<any, any>,\n ) {\n app.use(async (c, next) => {\n await next();\n // @ts-ignore\n const endpoint = c.get('__endpoint') as Endpoint<\n any,\n any,\n any,\n any,\n any,\n any,\n any,\n any\n >;\n // @ts-ignore\n const response = c.get('__response');\n // @ts-ignore\n const logger = c.get('__logger') as Logger;\n\n if (Endpoint.isSuccessStatus(c.res.status) && endpoint) {\n // Process events (audits are handled in the handler with transaction support)\n await publishConstructEvents<any, any>(\n endpoint,\n response,\n serviceDiscovery,\n logger,\n );\n }\n });\n }\n\n static async fromRoutes<TLogger extends Logger, TServices extends Service[]>(\n routes: string[],\n envParser: EnvironmentParser<{}>,\n app = new Hono(),\n logger: TLogger,\n cwd = process.cwd(),\n options?: HonoEndpointOptions,\n ): Promise<Hono> {\n const endpoints = await getEndpointsFromRoutes<TServices>(routes, cwd);\n const serviceDiscovery = ServiceDiscovery.getInstance<\n ServiceRecord<TServices>,\n TLogger\n >(logger, envParser);\n\n HonoEndpoint.addRoutes(endpoints, serviceDiscovery, app, options);\n\n return app;\n }\n\n static addRoutes<\n TServices extends Service[] = [],\n TLogger extends Logger = Logger,\n >(\n endpoints: Endpoint<string, HttpMethod, any, any, TServices, TLogger>[],\n serviceDiscovery: ServiceDiscovery<ServiceRecord<TServices>, TLogger>,\n app: Hono,\n options?: HonoEndpointOptions,\n ): void {\n // Add timing middleware (always enabled)\n app.use('*', timing());\n\n // Add logger middleware in development mode\n\n if (HonoEndpoint.isDev) {\n app.use('*', honoLogger());\n }\n\n // Add docs route if not disabled\n const docsPath =\n options?.docsPath !== false ? options?.docsPath || '/docs' : null;\n if (docsPath) {\n HonoEndpoint.addDocsRoute(\n endpoints,\n app,\n docsPath,\n options?.openApiOptions,\n );\n }\n\n // Sort endpoints to ensure static routes come before dynamic ones\n const sortedEndpoints = endpoints.sort((a, b) => {\n const aSegments = a.route.split('/');\n const bSegments = b.route.split('/');\n\n // Compare each segment\n for (let i = 0; i < Math.max(aSegments.length, bSegments.length); i++) {\n const aSegment = aSegments[i] || '';\n const bSegment = bSegments[i] || '';\n\n // If one is dynamic and the other is not, static comes first\n const aIsDynamic = aSegment.startsWith(':');\n const bIsDynamic = bSegment.startsWith(':');\n\n if (!aIsDynamic && bIsDynamic) return -1;\n if (aIsDynamic && !bIsDynamic) return 1;\n\n // If both are the same type, compare alphabetically\n if (aSegment !== bSegment) {\n return aSegment.localeCompare(bSegment);\n }\n }\n\n return 0;\n });\n HonoEndpoint.applyEventMiddleware(app, serviceDiscovery);\n for (const endpoint of sortedEndpoints) {\n HonoEndpoint.addRoute(endpoint, serviceDiscovery, app);\n }\n }\n\n static addRoute<\n TRoute extends string,\n TMethod extends HttpMethod,\n TInput extends EndpointSchemas = {},\n TOutSchema extends StandardSchemaV1 | undefined = undefined,\n TServices extends Service[] = [],\n TLogger extends Logger = Logger,\n TSession = unknown,\n TEventPublisher extends EventPublisher<any> | undefined = undefined,\n TEventPublisherServiceName extends string = string,\n TAuditStorage extends AuditStorage | undefined = undefined,\n TAuditStorageServiceName extends string = string,\n TAuditAction extends AuditableAction<string, unknown> = AuditableAction<\n string,\n unknown\n >,\n TDatabase = undefined,\n TDatabaseServiceName extends string = string,\n >(\n endpoint: Endpoint<\n TRoute,\n TMethod,\n TInput,\n TOutSchema,\n TServices,\n TLogger,\n TSession,\n TEventPublisher,\n TEventPublisherServiceName,\n TAuditStorage,\n TAuditStorageServiceName,\n TAuditAction,\n TDatabase,\n TDatabaseServiceName\n >,\n serviceDiscovery: ServiceDiscovery<ServiceRecord<TServices>, TLogger>,\n app: Hono,\n ): void {\n const { route } = endpoint;\n const method = endpoint.method.toLowerCase() as LowerHttpMethod<TMethod>;\n\n app[method](\n route,\n validator('json', (value, c) =>\n HonoEndpoint.validate(c, value, endpoint.input?.body),\n ),\n validator('query', (_, c) => {\n const parsedQuery = parseHonoQuery(c);\n return HonoEndpoint.validate(c, parsedQuery, endpoint.input?.query);\n }),\n validator('param', (params, c) =>\n HonoEndpoint.validate(c, params, endpoint.input?.params),\n ),\n async (c) => {\n const logger = endpoint.logger.child({\n endpoint: endpoint.fullPath,\n route: endpoint.route,\n host: c.header('host'),\n method: endpoint.method,\n path: c.req.path,\n }) as TLogger;\n\n try {\n const headerValues = c.req.header();\n\n const header = Endpoint.createHeaders(headerValues);\n const cookie = Endpoint.createCookies(headerValues.cookie);\n\n const services = await serviceDiscovery.register(endpoint.services);\n\n // Resolve database service early so it's available for session extraction\n const rawDb = endpoint.databaseService\n ? await serviceDiscovery\n .register([endpoint.databaseService])\n .then(\n (s) =>\n s[endpoint.databaseService!.serviceName as keyof typeof s],\n )\n : undefined;\n\n const session = await endpoint.getSession({\n services,\n logger,\n header,\n cookie,\n ...(rawDb !== undefined && { db: rawDb }),\n } as any);\n\n const isAuthorized = await endpoint.authorize({\n header,\n cookie,\n services,\n logger,\n session,\n });\n\n if (!isAuthorized) {\n logger.warn('Unauthorized access attempt');\n return c.json({ error: 'Unauthorized' }, 401);\n }\n\n // Check rate limit if configured\n if (endpoint.rateLimit) {\n const rateLimitInfo = await checkRateLimit(endpoint.rateLimit, {\n header,\n services,\n logger,\n session,\n path: c.req.path,\n method: endpoint.method,\n });\n\n // Set rate limit headers\n const rateLimitHeaders = getRateLimitHeaders(\n rateLimitInfo,\n endpoint.rateLimit,\n );\n for (const [key, value] of Object.entries(rateLimitHeaders)) {\n if (value) {\n c.header(key, value);\n }\n }\n }\n\n // Create audit context if audit storage is configured\n const auditContext = await createAuditContext(\n endpoint,\n serviceDiscovery,\n logger,\n {\n session,\n header,\n cookie,\n services: services as Record<string, unknown>,\n },\n );\n\n // Warn if declarative audits are configured but no audit storage\n const audits = endpoint.audits as MappedAudit<\n TAuditAction,\n TOutSchema\n >[];\n if (!auditContext && audits?.length) {\n logger.warn('No auditor storage service available');\n }\n\n // Extract RLS context if configured and not bypassed\n const rlsActive =\n endpoint.rlsConfig && !endpoint.rlsBypass && rawDb !== undefined;\n const rlsContext = rlsActive\n ? await endpoint.rlsConfig!.extractor({\n services,\n session,\n header,\n cookie,\n logger,\n })\n : undefined;\n\n // Execute handler with automatic audit transaction support\n const result = await executeWithAuditTransaction(\n auditContext,\n async (auditor) => {\n // Use audit transaction as db only if the storage uses the same database service\n const sameDatabase =\n auditContext?.storage?.databaseServiceName &&\n auditContext.storage.databaseServiceName ===\n endpoint.databaseService?.serviceName;\n const baseDb = sameDatabase\n ? (auditor?.getTransaction?.() ?? rawDb)\n : rawDb;\n\n // Helper to execute handler with given db\n const executeHandler = async (db: TDatabase | undefined) => {\n const responseBuilder = new ResponseBuilder();\n const response = await endpoint.handler(\n {\n services,\n logger,\n body: c.req.valid('json'),\n query: c.req.valid('query'),\n params: c.req.valid('param'),\n session,\n header,\n cookie,\n auditor,\n db,\n } as unknown as EndpointContext<\n TInput,\n TServices,\n TLogger,\n TSession,\n TAuditAction,\n TDatabase,\n TAuditStorage\n >,\n responseBuilder,\n );\n\n // Check if response has metadata\n let data = response;\n let metadata = responseBuilder.getMetadata();\n\n if (Endpoint.hasMetadata(response)) {\n data = response.data;\n metadata = response.metadata;\n }\n\n const output = endpoint.outputSchema\n ? await endpoint.parseOutput(data)\n : undefined;\n\n return { output, metadata, responseBuilder };\n };\n\n // If RLS is active, wrap handler with RLS context\n if (rlsActive && rlsContext && baseDb) {\n return withRlsContext(\n baseDb as any,\n rlsContext,\n async (trx) => executeHandler(trx as TDatabase),\n { prefix: endpoint.rlsConfig!.prefix },\n );\n }\n\n return executeHandler(baseDb as TDatabase | undefined);\n },\n // Process declarative audits after handler (inside transaction)\n async (result, auditor) => {\n if (!audits?.length) return;\n\n for (const audit of audits) {\n if (audit.when && !audit.when(result.output as any)) {\n continue;\n }\n const payload = audit.payload(result.output as any);\n const entityId = audit.entityId?.(result.output as any);\n auditor.audit(audit.type as any, payload as any, {\n table: audit.table,\n entityId,\n });\n }\n },\n // Pass rawDb so storage can reuse existing transactions\n { db: rawDb },\n );\n\n const { output, metadata } = result;\n\n try {\n let status = endpoint.status as ContentfulStatusCode;\n\n // Apply response metadata\n if (metadata.status) {\n status = metadata.status as ContentfulStatusCode;\n }\n\n if (metadata.headers) {\n for (const [key, value] of Object.entries(metadata.headers)) {\n c.header(key, value);\n }\n }\n\n if (metadata.cookies) {\n for (const [name, { value, options }] of metadata.cookies) {\n setCookie(c, name, value, options);\n }\n }\n\n // @ts-ignore\n c.set('__response', output);\n // @ts-ignore\n c.set('__endpoint', endpoint);\n // @ts-ignore\n c.set('__logger', logger);\n // @ts-ignore\n c.set('__session', session);\n // @ts-ignore\n c.set('__services', services);\n\n if (HonoEndpoint.isDev) {\n logger.info({ status, body: output }, 'Outgoing response');\n }\n // @ts-ignore\n return c.json(output, status);\n } catch (validationError: any) {\n logger.error(validationError, 'Output validation failed');\n const error = wrapError(\n validationError,\n 422,\n 'Response validation failed',\n );\n if (HonoEndpoint.isDev) {\n logger.info(\n { status: error.statusCode, body: error },\n 'Outgoing response',\n );\n }\n return c.json(error, error.statusCode as ContentfulStatusCode);\n }\n } catch (e: any) {\n logger.error(e, 'Error processing endpoint request');\n const error = wrapError(e, 500, 'Internal Server Error');\n if (HonoEndpoint.isDev) {\n logger.info(\n { status: error.statusCode, body: error },\n 'Outgoing response',\n );\n }\n return c.json(error, error.statusCode as ContentfulStatusCode);\n }\n },\n );\n }\n\n static addDocsRoute<\n TServices extends Service[] = [],\n TLogger extends Logger = Logger,\n >(\n endpoints: Endpoint<string, HttpMethod, any, any, TServices, TLogger>[],\n app: Hono,\n docsPath: string,\n openApiOptions?: HonoEndpointOptions['openApiOptions'],\n ): void {\n app.get(docsPath, async (c) => {\n try {\n const openApiSchema = await Endpoint.buildOpenApiSchema(\n endpoints,\n openApiOptions,\n );\n\n return c.json(openApiSchema);\n } catch (error) {\n console.error('Error generating OpenAPI schema:', error);\n return c.json(\n { error: 'Failed to generate OpenAPI documentation' },\n 500,\n );\n }\n });\n }\n}\n"],"mappings":";;;;;;;;;;;;;;;;AAqDA,IAAa,eAAb,MAAa,aAkBX;CACA,YACmBA,UAgBjB;EAhBiB;CAgBf;CAEJ,OAAO,QAAQ,QAAQ,IAAI,aAAa;CAExC,aAAa,SACXC,GACAC,MACAC,QACA;AACA,OAAK,OACH;EAGF,MAAM,SAAS,MAAM,SAAS,SAAS,QAAQ,KAAK;AAEpD,MAAI,OAAO,OACT,QAAO,EAAE,KAAK,OAAO,QAAQ,IAAI;AAGnC,SAAO,OAAO;CACf;CACD,SACEC,kBACAC,KACM;AACN,eAAa,SAAS,KAAK,UAAU,kBAAkB,IAAI;CAC5D;CAED,OAAO,qBACLA,KACAC,kBACA;AACA,MAAI,IAAI,OAAO,GAAG,SAAS;AACzB,SAAM,MAAM;GAEZ,MAAM,WAAW,EAAE,IAAI,aAAa;GAWpC,MAAM,WAAW,EAAE,IAAI,aAAa;GAEpC,MAAMC,WAAS,EAAE,IAAI,WAAW;AAEhC,OAAI,SAAS,gBAAgB,EAAE,IAAI,OAAO,IAAI,SAE5C,OAAM,uBACJ,UACA,UACA,kBACAA,SACD;EAEJ,EAAC;CACH;CAED,aAAa,WACXC,QACAC,WACA,MAAM,IAAI,QACVC,UACA,MAAM,QAAQ,KAAK,EACnBC,SACe;EACf,MAAM,YAAY,MAAM,uBAAkC,QAAQ,IAAI;EACtE,MAAM,mBAAmB,iBAAiB,YAGxCJ,UAAQ,UAAU;AAEpB,eAAa,UAAU,WAAW,kBAAkB,KAAK,QAAQ;AAEjE,SAAO;CACR;CAED,OAAO,UAILK,WACAR,kBACAC,KACAM,SACM;AAEN,MAAI,IAAI,KAAK,QAAQ,CAAC;AAItB,MAAI,aAAa,MACf,KAAI,IAAI,KAAK,QAAY,CAAC;EAI5B,MAAM,WACJ,SAAS,aAAa,QAAQ,SAAS,YAAY,UAAU;AAC/D,MAAI,SACF,cAAa,aACX,WACA,KACA,UACA,SAAS,eACV;EAIH,MAAM,kBAAkB,UAAU,KAAK,CAAC,GAAG,MAAM;GAC/C,MAAM,YAAY,EAAE,MAAM,MAAM,IAAI;GACpC,MAAM,YAAY,EAAE,MAAM,MAAM,IAAI;AAGpC,QAAK,IAAI,IAAI,GAAG,IAAI,KAAK,IAAI,UAAU,QAAQ,UAAU,OAAO,EAAE,KAAK;IACrE,MAAM,WAAW,UAAU,MAAM;IACjC,MAAM,WAAW,UAAU,MAAM;IAGjC,MAAM,aAAa,SAAS,WAAW,IAAI;IAC3C,MAAM,aAAa,SAAS,WAAW,IAAI;AAE3C,SAAK,cAAc,WAAY,QAAO;AACtC,QAAI,eAAe,WAAY,QAAO;AAGtC,QAAI,aAAa,SACf,QAAO,SAAS,cAAc,SAAS;GAE1C;AAED,UAAO;EACR,EAAC;AACF,eAAa,qBAAqB,KAAK,iBAAiB;AACxD,OAAK,MAAM,YAAY,gBACrB,cAAa,SAAS,UAAU,kBAAkB,IAAI;CAEzD;CAED,OAAO,SAmBLX,UAgBAI,kBACAC,KACM;EACN,MAAM,EAAE,OAAO,GAAG;EAClB,MAAM,SAAS,SAAS,OAAO,aAAa;AAE5C,MAAI,QACF,OACA,UAAU,QAAQ,CAAC,OAAO,MACxB,aAAa,SAAS,GAAG,OAAO,SAAS,OAAO,KAAK,CACtD,EACD,UAAU,SAAS,CAAC,GAAG,MAAM;GAC3B,MAAM,cAAc,eAAe,EAAE;AACrC,UAAO,aAAa,SAAS,GAAG,aAAa,SAAS,OAAO,MAAM;EACpE,EAAC,EACF,UAAU,SAAS,CAAC,QAAQ,MAC1B,aAAa,SAAS,GAAG,QAAQ,SAAS,OAAO,OAAO,CACzD,EACD,OAAO,MAAM;GACX,MAAME,WAAS,SAAS,OAAO,MAAM;IACnC,UAAU,SAAS;IACnB,OAAO,SAAS;IAChB,MAAM,EAAE,OAAO,OAAO;IACtB,QAAQ,SAAS;IACjB,MAAM,EAAE,IAAI;GACb,EAAC;AAEF,OAAI;IACF,MAAM,eAAe,EAAE,IAAI,QAAQ;IAEnC,MAAM,SAAS,SAAS,cAAc,aAAa;IACnD,MAAM,SAAS,SAAS,cAAc,aAAa,OAAO;IAE1D,MAAM,WAAW,MAAM,iBAAiB,SAAS,SAAS,SAAS;IAGnE,MAAM,QAAQ,SAAS,kBACnB,MAAM,iBACH,SAAS,CAAC,SAAS,eAAgB,EAAC,CACpC,KACC,CAAC,MACC,EAAE,SAAS,gBAAiB,aAC/B;IAGP,MAAM,UAAU,MAAM,SAAS,WAAW;KACxC;KACA;KACA;KACA;KACA,GAAI,oBAAuB,EAAE,IAAI,MAAO;IACzC,EAAQ;IAET,MAAM,eAAe,MAAM,SAAS,UAAU;KAC5C;KACA;KACA;KACA;KACA;IACD,EAAC;AAEF,SAAK,cAAc;AACjB,cAAO,KAAK,8BAA8B;AAC1C,YAAO,EAAE,KAAK,EAAE,OAAO,eAAgB,GAAE,IAAI;IAC9C;AAGD,QAAI,SAAS,WAAW;KACtB,MAAM,gBAAgB,MAAM,eAAe,SAAS,WAAW;MAC7D;MACA;MACA;MACA;MACA,MAAM,EAAE,IAAI;MACZ,QAAQ,SAAS;KAClB,EAAC;KAGF,MAAM,mBAAmB,oBACvB,eACA,SAAS,UACV;AACD,UAAK,MAAM,CAAC,KAAK,MAAM,IAAI,OAAO,QAAQ,iBAAiB,CACzD,KAAI,MACF,GAAE,OAAO,KAAK,MAAM;IAGzB;IAGD,MAAM,eAAe,MAAM,mBACzB,UACA,kBACAA,UACA;KACE;KACA;KACA;KACU;IACX,EACF;IAGD,MAAM,SAAS,SAAS;AAIxB,SAAK,gBAAgB,QAAQ,OAC3B,UAAO,KAAK,uCAAuC;IAIrD,MAAM,YACJ,SAAS,cAAc,SAAS,aAAa;IAC/C,MAAM,aAAa,YACf,MAAM,SAAS,UAAW,UAAU;KAClC;KACA;KACA;KACA;KACA;IACD,EAAC;IAIN,MAAM,SAAS,MAAM,4BACnB,cACA,OAAO,YAAY;KAEjB,MAAM,eACJ,cAAc,SAAS,uBACvB,aAAa,QAAQ,wBACnB,SAAS,iBAAiB;KAC9B,MAAM,SAAS,eACV,SAAS,kBAAkB,IAAI,QAChC;KAGJ,MAAM,iBAAiB,OAAOM,OAA8B;MAC1D,MAAM,kBAAkB,IAAI;MAC5B,MAAM,WAAW,MAAM,SAAS,QAC9B;OACE;OACA;OACA,MAAM,EAAE,IAAI,MAAM,OAAO;OACzB,OAAO,EAAE,IAAI,MAAM,QAAQ;OAC3B,QAAQ,EAAE,IAAI,MAAM,QAAQ;OAC5B;OACA;OACA;OACA;OACA;MACD,GASD,gBACD;MAGD,IAAI,OAAO;MACX,IAAIC,aAAW,gBAAgB,aAAa;AAE5C,UAAI,SAAS,YAAY,SAAS,EAAE;AAClC,cAAO,SAAS;AAChB,oBAAW,SAAS;MACrB;MAED,MAAMC,WAAS,SAAS,eACpB,MAAM,SAAS,YAAY,KAAK;AAGpC,aAAO;OAAE;OAAQ;OAAU;MAAiB;KAC7C;AAGD,SAAI,aAAa,cAAc,OAC7B,QAAO,eACL,QACA,YACA,OAAO,QAAQ,eAAe,IAAiB,EAC/C,EAAE,QAAQ,SAAS,UAAW,OAAQ,EACvC;AAGH,YAAO,eAAe,OAAgC;IACvD,GAED,OAAOC,UAAQ,YAAY;AACzB,UAAK,QAAQ,OAAQ;AAErB,UAAK,MAAM,SAAS,QAAQ;AAC1B,UAAI,MAAM,SAAS,MAAM,KAAKA,SAAO,OAAc,CACjD;MAEF,MAAM,UAAU,MAAM,QAAQA,SAAO,OAAc;MACnD,MAAM,WAAW,MAAM,WAAWA,SAAO,OAAc;AACvD,cAAQ,MAAM,MAAM,MAAa,SAAgB;OAC/C,OAAO,MAAM;OACb;MACD,EAAC;KACH;IACF,GAED,EAAE,IAAI,MAAO,EACd;IAED,MAAM,EAAE,QAAQ,UAAU,GAAG;AAE7B,QAAI;KACF,IAAI,SAAS,SAAS;AAGtB,SAAI,SAAS,OACX,UAAS,SAAS;AAGpB,SAAI,SAAS,QACX,MAAK,MAAM,CAAC,KAAK,MAAM,IAAI,OAAO,QAAQ,SAAS,QAAQ,CACzD,GAAE,OAAO,KAAK,MAAM;AAIxB,SAAI,SAAS,QACX,MAAK,MAAM,CAAC,MAAM,EAAE,OAAO,SAAS,CAAC,IAAI,SAAS,QAChD,WAAU,GAAG,MAAM,OAAO,QAAQ;AAKtC,OAAE,IAAI,cAAc,OAAO;AAE3B,OAAE,IAAI,cAAc,SAAS;AAE7B,OAAE,IAAI,YAAYT,SAAO;AAEzB,OAAE,IAAI,aAAa,QAAQ;AAE3B,OAAE,IAAI,cAAc,SAAS;AAE7B,SAAI,aAAa,MACf,UAAO,KAAK;MAAE;MAAQ,MAAM;KAAQ,GAAE,oBAAoB;AAG5D,YAAO,EAAE,KAAK,QAAQ,OAAO;IAC9B,SAAQU,iBAAsB;AAC7B,cAAO,MAAM,iBAAiB,2BAA2B;KACzD,MAAM,QAAQ,UACZ,iBACA,KACA,6BACD;AACD,SAAI,aAAa,MACf,UAAO,KACL;MAAE,QAAQ,MAAM;MAAY,MAAM;KAAO,GACzC,oBACD;AAEH,YAAO,EAAE,KAAK,OAAO,MAAM,WAAmC;IAC/D;GACF,SAAQC,GAAQ;AACf,aAAO,MAAM,GAAG,oCAAoC;IACpD,MAAM,QAAQ,UAAU,GAAG,KAAK,wBAAwB;AACxD,QAAI,aAAa,MACf,UAAO,KACL;KAAE,QAAQ,MAAM;KAAY,MAAM;IAAO,GACzC,oBACD;AAEH,WAAO,EAAE,KAAK,OAAO,MAAM,WAAmC;GAC/D;EACF,EACF;CACF;CAED,OAAO,aAILN,WACAP,KACAc,UACAC,gBACM;AACN,MAAI,IAAI,UAAU,OAAO,MAAM;AAC7B,OAAI;IACF,MAAM,gBAAgB,MAAM,SAAS,mBACnC,WACA,eACD;AAED,WAAO,EAAE,KAAK,cAAc;GAC7B,SAAQ,OAAO;AACd,YAAQ,MAAM,oCAAoC,MAAM;AACxD,WAAO,EAAE,KACP,EAAE,OAAO,2CAA4C,GACrD,IACD;GACF;EACF,EAAC;CACH;AACF"}
|
|
1
|
+
{"version":3,"file":"HonoEndpointAdaptor-NLlQk5iU.mjs","names":["endpoint: Endpoint<\n TRoute,\n TMethod,\n TInput,\n TOutSchema,\n TServices,\n TLogger,\n TSession,\n TEventPublisher,\n TEventPublisherServiceName,\n TAuditStorage,\n TAuditStorageServiceName,\n TAuditAction,\n TDatabase,\n TDatabaseServiceName\n >","c: Context<any, string, {}>","data: unknown","schema?: T","serviceDiscovery: ServiceDiscovery<ServiceRecord<TServices>, TLogger>","app: Hono","serviceDiscovery: ServiceDiscovery<any, any>","logger","routes: string[]","envParser: EnvironmentParser<{}>","logger: TLogger","options?: HonoEndpointOptions","endpoints: Endpoint<string, HttpMethod, any, any, TServices, TLogger>[]","db: TDatabase | undefined","metadata","output","result","validationError: any","e: any","docsPath: string","openApiOptions?: HonoEndpointOptions['openApiOptions']"],"sources":["../src/endpoints/HonoEndpointAdaptor.ts"],"sourcesContent":["import type { AuditStorage, AuditableAction } from '@geekmidas/audit';\nimport type { EnvironmentParser } from '@geekmidas/envkit';\nimport type { EventPublisher } from '@geekmidas/events';\nimport type { Logger } from '@geekmidas/logger';\nimport { checkRateLimit, getRateLimitHeaders } from '@geekmidas/rate-limit';\nimport type { StandardSchemaV1 } from '@standard-schema/spec';\nimport { type Context, Hono } from 'hono';\nimport { setCookie } from 'hono/cookie';\nimport { logger as honoLogger } from 'hono/logger';\nimport { timing } from 'hono/timing';\nimport { validator } from 'hono/validator';\nimport type { HttpMethod, LowerHttpMethod } from '../types';\nimport {\n Endpoint,\n type EndpointContext,\n type EndpointSchemas,\n ResponseBuilder,\n} from './Endpoint';\nimport { getEndpointsFromRoutes } from './helpers';\nimport { parseHonoQuery } from './parseHonoQuery';\n\nimport { withRlsContext } from '@geekmidas/db/rls';\nimport { wrapError } from '@geekmidas/errors';\nimport {\n type Service,\n ServiceDiscovery,\n type ServiceRecord,\n} from '@geekmidas/services';\nimport type { ContentfulStatusCode } from 'hono/utils/http-status';\nimport { publishConstructEvents } from '../publisher';\nimport type { MappedAudit } from './audit';\nimport {\n createAuditContext,\n executeWithAuditTransaction,\n} from './processAudits';\n\nexport interface HonoEndpointOptions {\n /**\n * Path where OpenAPI documentation will be served.\n * Set to false to disable docs route.\n * @default '/docs'\n */\n docsPath?: string | false;\n /**\n * OpenAPI schema options\n */\n openApiOptions?: {\n title?: string;\n version?: string;\n description?: string;\n };\n}\n\nexport class HonoEndpoint<\n TRoute extends string,\n TMethod extends HttpMethod,\n TInput extends EndpointSchemas = {},\n TOutSchema extends StandardSchemaV1 | undefined = undefined,\n TServices extends Service[] = [],\n TLogger extends Logger = Logger,\n TSession = unknown,\n TEventPublisher extends EventPublisher<any> | undefined = undefined,\n TEventPublisherServiceName extends string = string,\n TAuditStorage extends AuditStorage | undefined = undefined,\n TAuditStorageServiceName extends string = string,\n TAuditAction extends AuditableAction<string, unknown> = AuditableAction<\n string,\n unknown\n >,\n TDatabase = undefined,\n TDatabaseServiceName extends string = string,\n> {\n constructor(\n private readonly endpoint: Endpoint<\n TRoute,\n TMethod,\n TInput,\n TOutSchema,\n TServices,\n TLogger,\n TSession,\n TEventPublisher,\n TEventPublisherServiceName,\n TAuditStorage,\n TAuditStorageServiceName,\n TAuditAction,\n TDatabase,\n TDatabaseServiceName\n >,\n ) {}\n\n static isDev = process.env.NODE_ENV === 'development';\n\n static async validate<T extends StandardSchemaV1>(\n c: Context<any, string, {}>,\n data: unknown,\n schema?: T,\n ) {\n if (!schema) {\n return undefined;\n }\n\n const parsed = await Endpoint.validate(schema, data);\n\n if (parsed.issues) {\n return c.json(parsed.issues, 422);\n }\n\n return parsed.value;\n }\n addRoute(\n serviceDiscovery: ServiceDiscovery<ServiceRecord<TServices>, TLogger>,\n app: Hono,\n ): void {\n HonoEndpoint.addRoute(this.endpoint, serviceDiscovery, app);\n }\n\n static applyEventMiddleware(\n app: Hono,\n serviceDiscovery: ServiceDiscovery<any, any>,\n ) {\n app.use(async (c, next) => {\n await next();\n // @ts-ignore\n const endpoint = c.get('__endpoint') as Endpoint<\n any,\n any,\n any,\n any,\n any,\n any,\n any,\n any\n >;\n // @ts-ignore\n const response = c.get('__response');\n // @ts-ignore\n const logger = c.get('__logger') as Logger;\n\n if (Endpoint.isSuccessStatus(c.res.status) && endpoint) {\n // Process events (audits are handled in the handler with transaction support)\n await publishConstructEvents<any, any>(\n endpoint,\n response,\n serviceDiscovery,\n logger,\n );\n }\n });\n }\n\n static async fromRoutes<TLogger extends Logger, TServices extends Service[]>(\n routes: string[],\n envParser: EnvironmentParser<{}>,\n app = new Hono(),\n logger: TLogger,\n cwd = process.cwd(),\n options?: HonoEndpointOptions,\n ): Promise<Hono> {\n const endpoints = await getEndpointsFromRoutes<TServices>(routes, cwd);\n const serviceDiscovery = ServiceDiscovery.getInstance<\n ServiceRecord<TServices>,\n TLogger\n >(logger, envParser);\n\n HonoEndpoint.addRoutes(endpoints, serviceDiscovery, app, options);\n\n return app;\n }\n\n static addRoutes<\n TServices extends Service[] = [],\n TLogger extends Logger = Logger,\n >(\n endpoints: Endpoint<string, HttpMethod, any, any, TServices, TLogger>[],\n serviceDiscovery: ServiceDiscovery<ServiceRecord<TServices>, TLogger>,\n app: Hono,\n options?: HonoEndpointOptions,\n ): void {\n // Add timing middleware (always enabled)\n app.use('*', timing());\n\n // Add logger middleware in development mode\n\n if (HonoEndpoint.isDev) {\n app.use('*', honoLogger());\n }\n\n // Add docs route if not disabled\n const docsPath =\n options?.docsPath !== false ? options?.docsPath || '/docs' : null;\n if (docsPath) {\n HonoEndpoint.addDocsRoute(\n endpoints,\n app,\n docsPath,\n options?.openApiOptions,\n );\n }\n\n // Sort endpoints to ensure static routes come before dynamic ones\n const sortedEndpoints = endpoints.sort((a, b) => {\n const aSegments = a.route.split('/');\n const bSegments = b.route.split('/');\n\n // Compare each segment\n for (let i = 0; i < Math.max(aSegments.length, bSegments.length); i++) {\n const aSegment = aSegments[i] || '';\n const bSegment = bSegments[i] || '';\n\n // If one is dynamic and the other is not, static comes first\n const aIsDynamic = aSegment.startsWith(':');\n const bIsDynamic = bSegment.startsWith(':');\n\n if (!aIsDynamic && bIsDynamic) return -1;\n if (aIsDynamic && !bIsDynamic) return 1;\n\n // If both are the same type, compare alphabetically\n if (aSegment !== bSegment) {\n return aSegment.localeCompare(bSegment);\n }\n }\n\n return 0;\n });\n HonoEndpoint.applyEventMiddleware(app, serviceDiscovery);\n for (const endpoint of sortedEndpoints) {\n HonoEndpoint.addRoute(endpoint, serviceDiscovery, app);\n }\n }\n\n static addRoute<\n TRoute extends string,\n TMethod extends HttpMethod,\n TInput extends EndpointSchemas = {},\n TOutSchema extends StandardSchemaV1 | undefined = undefined,\n TServices extends Service[] = [],\n TLogger extends Logger = Logger,\n TSession = unknown,\n TEventPublisher extends EventPublisher<any> | undefined = undefined,\n TEventPublisherServiceName extends string = string,\n TAuditStorage extends AuditStorage | undefined = undefined,\n TAuditStorageServiceName extends string = string,\n TAuditAction extends AuditableAction<string, unknown> = AuditableAction<\n string,\n unknown\n >,\n TDatabase = undefined,\n TDatabaseServiceName extends string = string,\n >(\n endpoint: Endpoint<\n TRoute,\n TMethod,\n TInput,\n TOutSchema,\n TServices,\n TLogger,\n TSession,\n TEventPublisher,\n TEventPublisherServiceName,\n TAuditStorage,\n TAuditStorageServiceName,\n TAuditAction,\n TDatabase,\n TDatabaseServiceName\n >,\n serviceDiscovery: ServiceDiscovery<ServiceRecord<TServices>, TLogger>,\n app: Hono,\n ): void {\n const { route } = endpoint;\n const method = endpoint.method.toLowerCase() as LowerHttpMethod<TMethod>;\n\n app[method](\n route,\n validator('json', (value, c) =>\n HonoEndpoint.validate(c, value, endpoint.input?.body),\n ),\n validator('query', (_, c) => {\n const parsedQuery = parseHonoQuery(c);\n return HonoEndpoint.validate(c, parsedQuery, endpoint.input?.query);\n }),\n validator('param', (params, c) =>\n HonoEndpoint.validate(c, params, endpoint.input?.params),\n ),\n async (c) => {\n const logger = endpoint.logger.child({\n endpoint: endpoint.fullPath,\n route: endpoint.route,\n host: c.header('host'),\n method: endpoint.method,\n path: c.req.path,\n }) as TLogger;\n\n try {\n const headerValues = c.req.header();\n\n const header = Endpoint.createHeaders(headerValues);\n const cookie = Endpoint.createCookies(headerValues.cookie);\n\n const services = await serviceDiscovery.register(endpoint.services);\n\n // Resolve database service early so it's available for session extraction\n const rawDb = endpoint.databaseService\n ? await serviceDiscovery\n .register([endpoint.databaseService])\n .then(\n (s) =>\n s[endpoint.databaseService!.serviceName as keyof typeof s],\n )\n : undefined;\n\n const session = await endpoint.getSession({\n services,\n logger,\n header,\n cookie,\n ...(rawDb !== undefined && { db: rawDb }),\n } as any);\n\n const isAuthorized = await endpoint.authorize({\n header,\n cookie,\n services,\n logger,\n session,\n });\n\n if (!isAuthorized) {\n logger.warn('Unauthorized access attempt');\n return c.json({ error: 'Unauthorized' }, 401);\n }\n\n // Check rate limit if configured\n if (endpoint.rateLimit) {\n const rateLimitInfo = await checkRateLimit(endpoint.rateLimit, {\n header,\n services,\n logger,\n session,\n path: c.req.path,\n method: endpoint.method,\n });\n\n // Set rate limit headers\n const rateLimitHeaders = getRateLimitHeaders(\n rateLimitInfo,\n endpoint.rateLimit,\n );\n for (const [key, value] of Object.entries(rateLimitHeaders)) {\n if (value) {\n c.header(key, value);\n }\n }\n }\n\n // Create audit context if audit storage is configured\n const auditContext = await createAuditContext(\n endpoint,\n serviceDiscovery,\n logger,\n {\n session,\n header,\n cookie,\n services: services as Record<string, unknown>,\n },\n );\n\n // Warn if declarative audits are configured but no audit storage\n const audits = endpoint.audits as MappedAudit<\n TAuditAction,\n TOutSchema\n >[];\n if (!auditContext && audits?.length) {\n logger.warn('No auditor storage service available');\n }\n\n // Extract RLS context if configured and not bypassed\n const rlsActive =\n endpoint.rlsConfig && !endpoint.rlsBypass && rawDb !== undefined;\n const rlsContext = rlsActive\n ? await endpoint.rlsConfig!.extractor({\n services,\n session,\n header,\n cookie,\n logger,\n })\n : undefined;\n\n // Execute handler with automatic audit transaction support\n const result = await executeWithAuditTransaction(\n auditContext,\n async (auditor) => {\n // Use audit transaction as db only if the storage uses the same database service\n const sameDatabase =\n auditContext?.storage?.databaseServiceName &&\n auditContext.storage.databaseServiceName ===\n endpoint.databaseService?.serviceName;\n const baseDb = sameDatabase\n ? (auditor?.getTransaction?.() ?? rawDb)\n : rawDb;\n\n // Helper to execute handler with given db\n const executeHandler = async (db: TDatabase | undefined) => {\n const responseBuilder = new ResponseBuilder();\n const response = await endpoint.handler(\n {\n services,\n logger,\n body: c.req.valid('json'),\n query: c.req.valid('query'),\n params: c.req.valid('param'),\n session,\n header,\n cookie,\n auditor,\n db,\n } as unknown as EndpointContext<\n TInput,\n TServices,\n TLogger,\n TSession,\n TAuditAction,\n TDatabase,\n TAuditStorage\n >,\n responseBuilder,\n );\n\n // Check if response has metadata\n let data = response;\n let metadata = responseBuilder.getMetadata();\n\n if (Endpoint.hasMetadata(response)) {\n data = response.data;\n metadata = response.metadata;\n }\n\n const output = endpoint.outputSchema\n ? await endpoint.parseOutput(data)\n : undefined;\n\n return { output, metadata, responseBuilder };\n };\n\n // If RLS is active, wrap handler with RLS context\n if (rlsActive && rlsContext && baseDb) {\n return withRlsContext(\n baseDb as any,\n rlsContext,\n async (trx) => executeHandler(trx as TDatabase),\n { prefix: endpoint.rlsConfig!.prefix },\n );\n }\n\n return executeHandler(baseDb as TDatabase | undefined);\n },\n // Process declarative audits after handler (inside transaction)\n async (result, auditor) => {\n if (!audits?.length) return;\n\n for (const audit of audits) {\n if (audit.when && !audit.when(result.output as any)) {\n continue;\n }\n const payload = audit.payload(result.output as any);\n const entityId = audit.entityId?.(result.output as any);\n auditor.audit(audit.type as any, payload as any, {\n table: audit.table,\n entityId,\n });\n }\n },\n // Pass rawDb so storage can reuse existing transactions\n { db: rawDb },\n );\n\n const { output, metadata } = result;\n\n try {\n let status = endpoint.status as ContentfulStatusCode;\n\n // Apply response metadata\n if (metadata.status) {\n status = metadata.status as ContentfulStatusCode;\n }\n\n if (metadata.headers) {\n for (const [key, value] of Object.entries(metadata.headers)) {\n c.header(key, value);\n }\n }\n\n if (metadata.cookies) {\n for (const [name, { value, options }] of metadata.cookies) {\n setCookie(c, name, value, options);\n }\n }\n\n // @ts-ignore\n c.set('__response', output);\n // @ts-ignore\n c.set('__endpoint', endpoint);\n // @ts-ignore\n c.set('__logger', logger);\n // @ts-ignore\n c.set('__session', session);\n // @ts-ignore\n c.set('__services', services);\n\n if (HonoEndpoint.isDev) {\n logger.info({ status, body: output }, 'Outgoing response');\n }\n // @ts-ignore\n return c.json(output, status);\n } catch (validationError: any) {\n logger.error(validationError, 'Output validation failed');\n const error = wrapError(\n validationError,\n 422,\n 'Response validation failed',\n );\n if (HonoEndpoint.isDev) {\n logger.info(\n { status: error.statusCode, body: error },\n 'Outgoing response',\n );\n }\n return c.json(error, error.statusCode as ContentfulStatusCode);\n }\n } catch (e: any) {\n logger.error(e, 'Error processing endpoint request');\n const error = wrapError(e, 500, 'Internal Server Error');\n if (HonoEndpoint.isDev) {\n logger.info(\n { status: error.statusCode, body: error },\n 'Outgoing response',\n );\n }\n return c.json(error, error.statusCode as ContentfulStatusCode);\n }\n },\n );\n }\n\n static addDocsRoute<\n TServices extends Service[] = [],\n TLogger extends Logger = Logger,\n >(\n endpoints: Endpoint<string, HttpMethod, any, any, TServices, TLogger>[],\n app: Hono,\n docsPath: string,\n openApiOptions?: HonoEndpointOptions['openApiOptions'],\n ): void {\n app.get(docsPath, async (c) => {\n try {\n const openApiSchema = await Endpoint.buildOpenApiSchema(\n endpoints,\n openApiOptions,\n );\n\n return c.json(openApiSchema);\n } catch {\n return c.json(\n { error: 'Failed to generate OpenAPI documentation' },\n 500,\n );\n }\n });\n }\n}\n"],"mappings":";;;;;;;;;;;;;;;;AAqDA,IAAa,eAAb,MAAa,aAkBX;CACA,YACmBA,UAgBjB;EAhBiB;CAgBf;CAEJ,OAAO,QAAQ,QAAQ,IAAI,aAAa;CAExC,aAAa,SACXC,GACAC,MACAC,QACA;AACA,OAAK,OACH;EAGF,MAAM,SAAS,MAAM,SAAS,SAAS,QAAQ,KAAK;AAEpD,MAAI,OAAO,OACT,QAAO,EAAE,KAAK,OAAO,QAAQ,IAAI;AAGnC,SAAO,OAAO;CACf;CACD,SACEC,kBACAC,KACM;AACN,eAAa,SAAS,KAAK,UAAU,kBAAkB,IAAI;CAC5D;CAED,OAAO,qBACLA,KACAC,kBACA;AACA,MAAI,IAAI,OAAO,GAAG,SAAS;AACzB,SAAM,MAAM;GAEZ,MAAM,WAAW,EAAE,IAAI,aAAa;GAWpC,MAAM,WAAW,EAAE,IAAI,aAAa;GAEpC,MAAMC,WAAS,EAAE,IAAI,WAAW;AAEhC,OAAI,SAAS,gBAAgB,EAAE,IAAI,OAAO,IAAI,SAE5C,OAAM,uBACJ,UACA,UACA,kBACAA,SACD;EAEJ,EAAC;CACH;CAED,aAAa,WACXC,QACAC,WACA,MAAM,IAAI,QACVC,UACA,MAAM,QAAQ,KAAK,EACnBC,SACe;EACf,MAAM,YAAY,MAAM,uBAAkC,QAAQ,IAAI;EACtE,MAAM,mBAAmB,iBAAiB,YAGxCJ,UAAQ,UAAU;AAEpB,eAAa,UAAU,WAAW,kBAAkB,KAAK,QAAQ;AAEjE,SAAO;CACR;CAED,OAAO,UAILK,WACAR,kBACAC,KACAM,SACM;AAEN,MAAI,IAAI,KAAK,QAAQ,CAAC;AAItB,MAAI,aAAa,MACf,KAAI,IAAI,KAAK,QAAY,CAAC;EAI5B,MAAM,WACJ,SAAS,aAAa,QAAQ,SAAS,YAAY,UAAU;AAC/D,MAAI,SACF,cAAa,aACX,WACA,KACA,UACA,SAAS,eACV;EAIH,MAAM,kBAAkB,UAAU,KAAK,CAAC,GAAG,MAAM;GAC/C,MAAM,YAAY,EAAE,MAAM,MAAM,IAAI;GACpC,MAAM,YAAY,EAAE,MAAM,MAAM,IAAI;AAGpC,QAAK,IAAI,IAAI,GAAG,IAAI,KAAK,IAAI,UAAU,QAAQ,UAAU,OAAO,EAAE,KAAK;IACrE,MAAM,WAAW,UAAU,MAAM;IACjC,MAAM,WAAW,UAAU,MAAM;IAGjC,MAAM,aAAa,SAAS,WAAW,IAAI;IAC3C,MAAM,aAAa,SAAS,WAAW,IAAI;AAE3C,SAAK,cAAc,WAAY,QAAO;AACtC,QAAI,eAAe,WAAY,QAAO;AAGtC,QAAI,aAAa,SACf,QAAO,SAAS,cAAc,SAAS;GAE1C;AAED,UAAO;EACR,EAAC;AACF,eAAa,qBAAqB,KAAK,iBAAiB;AACxD,OAAK,MAAM,YAAY,gBACrB,cAAa,SAAS,UAAU,kBAAkB,IAAI;CAEzD;CAED,OAAO,SAmBLX,UAgBAI,kBACAC,KACM;EACN,MAAM,EAAE,OAAO,GAAG;EAClB,MAAM,SAAS,SAAS,OAAO,aAAa;AAE5C,MAAI,QACF,OACA,UAAU,QAAQ,CAAC,OAAO,MACxB,aAAa,SAAS,GAAG,OAAO,SAAS,OAAO,KAAK,CACtD,EACD,UAAU,SAAS,CAAC,GAAG,MAAM;GAC3B,MAAM,cAAc,eAAe,EAAE;AACrC,UAAO,aAAa,SAAS,GAAG,aAAa,SAAS,OAAO,MAAM;EACpE,EAAC,EACF,UAAU,SAAS,CAAC,QAAQ,MAC1B,aAAa,SAAS,GAAG,QAAQ,SAAS,OAAO,OAAO,CACzD,EACD,OAAO,MAAM;GACX,MAAME,WAAS,SAAS,OAAO,MAAM;IACnC,UAAU,SAAS;IACnB,OAAO,SAAS;IAChB,MAAM,EAAE,OAAO,OAAO;IACtB,QAAQ,SAAS;IACjB,MAAM,EAAE,IAAI;GACb,EAAC;AAEF,OAAI;IACF,MAAM,eAAe,EAAE,IAAI,QAAQ;IAEnC,MAAM,SAAS,SAAS,cAAc,aAAa;IACnD,MAAM,SAAS,SAAS,cAAc,aAAa,OAAO;IAE1D,MAAM,WAAW,MAAM,iBAAiB,SAAS,SAAS,SAAS;IAGnE,MAAM,QAAQ,SAAS,kBACnB,MAAM,iBACH,SAAS,CAAC,SAAS,eAAgB,EAAC,CACpC,KACC,CAAC,MACC,EAAE,SAAS,gBAAiB,aAC/B;IAGP,MAAM,UAAU,MAAM,SAAS,WAAW;KACxC;KACA;KACA;KACA;KACA,GAAI,oBAAuB,EAAE,IAAI,MAAO;IACzC,EAAQ;IAET,MAAM,eAAe,MAAM,SAAS,UAAU;KAC5C;KACA;KACA;KACA;KACA;IACD,EAAC;AAEF,SAAK,cAAc;AACjB,cAAO,KAAK,8BAA8B;AAC1C,YAAO,EAAE,KAAK,EAAE,OAAO,eAAgB,GAAE,IAAI;IAC9C;AAGD,QAAI,SAAS,WAAW;KACtB,MAAM,gBAAgB,MAAM,eAAe,SAAS,WAAW;MAC7D;MACA;MACA;MACA;MACA,MAAM,EAAE,IAAI;MACZ,QAAQ,SAAS;KAClB,EAAC;KAGF,MAAM,mBAAmB,oBACvB,eACA,SAAS,UACV;AACD,UAAK,MAAM,CAAC,KAAK,MAAM,IAAI,OAAO,QAAQ,iBAAiB,CACzD,KAAI,MACF,GAAE,OAAO,KAAK,MAAM;IAGzB;IAGD,MAAM,eAAe,MAAM,mBACzB,UACA,kBACAA,UACA;KACE;KACA;KACA;KACU;IACX,EACF;IAGD,MAAM,SAAS,SAAS;AAIxB,SAAK,gBAAgB,QAAQ,OAC3B,UAAO,KAAK,uCAAuC;IAIrD,MAAM,YACJ,SAAS,cAAc,SAAS,aAAa;IAC/C,MAAM,aAAa,YACf,MAAM,SAAS,UAAW,UAAU;KAClC;KACA;KACA;KACA;KACA;IACD,EAAC;IAIN,MAAM,SAAS,MAAM,4BACnB,cACA,OAAO,YAAY;KAEjB,MAAM,eACJ,cAAc,SAAS,uBACvB,aAAa,QAAQ,wBACnB,SAAS,iBAAiB;KAC9B,MAAM,SAAS,eACV,SAAS,kBAAkB,IAAI,QAChC;KAGJ,MAAM,iBAAiB,OAAOM,OAA8B;MAC1D,MAAM,kBAAkB,IAAI;MAC5B,MAAM,WAAW,MAAM,SAAS,QAC9B;OACE;OACA;OACA,MAAM,EAAE,IAAI,MAAM,OAAO;OACzB,OAAO,EAAE,IAAI,MAAM,QAAQ;OAC3B,QAAQ,EAAE,IAAI,MAAM,QAAQ;OAC5B;OACA;OACA;OACA;OACA;MACD,GASD,gBACD;MAGD,IAAI,OAAO;MACX,IAAIC,aAAW,gBAAgB,aAAa;AAE5C,UAAI,SAAS,YAAY,SAAS,EAAE;AAClC,cAAO,SAAS;AAChB,oBAAW,SAAS;MACrB;MAED,MAAMC,WAAS,SAAS,eACpB,MAAM,SAAS,YAAY,KAAK;AAGpC,aAAO;OAAE;OAAQ;OAAU;MAAiB;KAC7C;AAGD,SAAI,aAAa,cAAc,OAC7B,QAAO,eACL,QACA,YACA,OAAO,QAAQ,eAAe,IAAiB,EAC/C,EAAE,QAAQ,SAAS,UAAW,OAAQ,EACvC;AAGH,YAAO,eAAe,OAAgC;IACvD,GAED,OAAOC,UAAQ,YAAY;AACzB,UAAK,QAAQ,OAAQ;AAErB,UAAK,MAAM,SAAS,QAAQ;AAC1B,UAAI,MAAM,SAAS,MAAM,KAAKA,SAAO,OAAc,CACjD;MAEF,MAAM,UAAU,MAAM,QAAQA,SAAO,OAAc;MACnD,MAAM,WAAW,MAAM,WAAWA,SAAO,OAAc;AACvD,cAAQ,MAAM,MAAM,MAAa,SAAgB;OAC/C,OAAO,MAAM;OACb;MACD,EAAC;KACH;IACF,GAED,EAAE,IAAI,MAAO,EACd;IAED,MAAM,EAAE,QAAQ,UAAU,GAAG;AAE7B,QAAI;KACF,IAAI,SAAS,SAAS;AAGtB,SAAI,SAAS,OACX,UAAS,SAAS;AAGpB,SAAI,SAAS,QACX,MAAK,MAAM,CAAC,KAAK,MAAM,IAAI,OAAO,QAAQ,SAAS,QAAQ,CACzD,GAAE,OAAO,KAAK,MAAM;AAIxB,SAAI,SAAS,QACX,MAAK,MAAM,CAAC,MAAM,EAAE,OAAO,SAAS,CAAC,IAAI,SAAS,QAChD,WAAU,GAAG,MAAM,OAAO,QAAQ;AAKtC,OAAE,IAAI,cAAc,OAAO;AAE3B,OAAE,IAAI,cAAc,SAAS;AAE7B,OAAE,IAAI,YAAYT,SAAO;AAEzB,OAAE,IAAI,aAAa,QAAQ;AAE3B,OAAE,IAAI,cAAc,SAAS;AAE7B,SAAI,aAAa,MACf,UAAO,KAAK;MAAE;MAAQ,MAAM;KAAQ,GAAE,oBAAoB;AAG5D,YAAO,EAAE,KAAK,QAAQ,OAAO;IAC9B,SAAQU,iBAAsB;AAC7B,cAAO,MAAM,iBAAiB,2BAA2B;KACzD,MAAM,QAAQ,UACZ,iBACA,KACA,6BACD;AACD,SAAI,aAAa,MACf,UAAO,KACL;MAAE,QAAQ,MAAM;MAAY,MAAM;KAAO,GACzC,oBACD;AAEH,YAAO,EAAE,KAAK,OAAO,MAAM,WAAmC;IAC/D;GACF,SAAQC,GAAQ;AACf,aAAO,MAAM,GAAG,oCAAoC;IACpD,MAAM,QAAQ,UAAU,GAAG,KAAK,wBAAwB;AACxD,QAAI,aAAa,MACf,UAAO,KACL;KAAE,QAAQ,MAAM;KAAY,MAAM;IAAO,GACzC,oBACD;AAEH,WAAO,EAAE,KAAK,OAAO,MAAM,WAAmC;GAC/D;EACF,EACF;CACF;CAED,OAAO,aAILN,WACAP,KACAc,UACAC,gBACM;AACN,MAAI,IAAI,UAAU,OAAO,MAAM;AAC7B,OAAI;IACF,MAAM,gBAAgB,MAAM,SAAS,mBACnC,WACA,eACD;AAED,WAAO,EAAE,KAAK,cAAc;GAC7B,QAAO;AACN,WAAO,EAAE,KACP,EAAE,OAAO,2CAA4C,GACrD,IACD;GACF;EACF,EAAC;CACH;AACF"}
|
|
@@ -1,8 +1,8 @@
|
|
|
1
1
|
const require_chunk = require('./chunk-CUT6urMc.cjs');
|
|
2
2
|
const require_publisher = require('./publisher-lFQleddL.cjs');
|
|
3
|
-
const require_Endpoint = require('./Endpoint-
|
|
3
|
+
const require_Endpoint = require('./Endpoint-BVGZXFyV.cjs');
|
|
4
4
|
const require_processAudits = require('./processAudits-CzHkPokQ.cjs');
|
|
5
|
-
const require_helpers = require('./helpers-
|
|
5
|
+
const require_helpers = require('./helpers-BcP1tXAi.cjs');
|
|
6
6
|
const require_parseHonoQuery = require('./parseHonoQuery-CT8Cvin-.cjs');
|
|
7
7
|
const __geekmidas_db_rls = require_chunk.__toESM(require("@geekmidas/db/rls"));
|
|
8
8
|
const __geekmidas_errors = require_chunk.__toESM(require("@geekmidas/errors"));
|
|
@@ -217,8 +217,7 @@ var HonoEndpoint = class HonoEndpoint {
|
|
|
217
217
|
try {
|
|
218
218
|
const openApiSchema = await require_Endpoint.Endpoint.buildOpenApiSchema(endpoints, openApiOptions);
|
|
219
219
|
return c.json(openApiSchema);
|
|
220
|
-
} catch
|
|
221
|
-
console.error("Error generating OpenAPI schema:", error);
|
|
220
|
+
} catch {
|
|
222
221
|
return c.json({ error: "Failed to generate OpenAPI documentation" }, 500);
|
|
223
222
|
}
|
|
224
223
|
});
|
|
@@ -232,4 +231,4 @@ Object.defineProperty(exports, 'HonoEndpoint', {
|
|
|
232
231
|
return HonoEndpoint;
|
|
233
232
|
}
|
|
234
233
|
});
|
|
235
|
-
//# sourceMappingURL=HonoEndpointAdaptor-
|
|
234
|
+
//# sourceMappingURL=HonoEndpointAdaptor-e6l9eVDU.cjs.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"HonoEndpointAdaptor-DodwLM0-.cjs","names":["endpoint: Endpoint<\n TRoute,\n TMethod,\n TInput,\n TOutSchema,\n TServices,\n TLogger,\n TSession,\n TEventPublisher,\n TEventPublisherServiceName,\n TAuditStorage,\n TAuditStorageServiceName,\n TAuditAction,\n TDatabase,\n TDatabaseServiceName\n >","c: Context<any, string, {}>","data: unknown","schema?: T","serviceDiscovery: ServiceDiscovery<ServiceRecord<TServices>, TLogger>","app: Hono","serviceDiscovery: ServiceDiscovery<any, any>","routes: string[]","envParser: EnvironmentParser<{}>","Hono","logger: TLogger","options?: HonoEndpointOptions","endpoints: Endpoint<string, HttpMethod, any, any, TServices, TLogger>[]","db: TDatabase | undefined","ResponseBuilder","metadata","output","result","validationError: any","e: any","docsPath: string","openApiOptions?: HonoEndpointOptions['openApiOptions']"],"sources":["../src/endpoints/HonoEndpointAdaptor.ts"],"sourcesContent":["import type { AuditStorage, AuditableAction } from '@geekmidas/audit';\nimport type { EnvironmentParser } from '@geekmidas/envkit';\nimport type { EventPublisher } from '@geekmidas/events';\nimport type { Logger } from '@geekmidas/logger';\nimport { checkRateLimit, getRateLimitHeaders } from '@geekmidas/rate-limit';\nimport type { StandardSchemaV1 } from '@standard-schema/spec';\nimport { type Context, Hono } from 'hono';\nimport { setCookie } from 'hono/cookie';\nimport { logger as honoLogger } from 'hono/logger';\nimport { timing } from 'hono/timing';\nimport { validator } from 'hono/validator';\nimport type { HttpMethod, LowerHttpMethod } from '../types';\nimport {\n Endpoint,\n type EndpointContext,\n type EndpointSchemas,\n ResponseBuilder,\n} from './Endpoint';\nimport { getEndpointsFromRoutes } from './helpers';\nimport { parseHonoQuery } from './parseHonoQuery';\n\nimport { withRlsContext } from '@geekmidas/db/rls';\nimport { wrapError } from '@geekmidas/errors';\nimport {\n type Service,\n ServiceDiscovery,\n type ServiceRecord,\n} from '@geekmidas/services';\nimport type { ContentfulStatusCode } from 'hono/utils/http-status';\nimport { publishConstructEvents } from '../publisher';\nimport type { MappedAudit } from './audit';\nimport {\n createAuditContext,\n executeWithAuditTransaction,\n} from './processAudits';\n\nexport interface HonoEndpointOptions {\n /**\n * Path where OpenAPI documentation will be served.\n * Set to false to disable docs route.\n * @default '/docs'\n */\n docsPath?: string | false;\n /**\n * OpenAPI schema options\n */\n openApiOptions?: {\n title?: string;\n version?: string;\n description?: string;\n };\n}\n\nexport class HonoEndpoint<\n TRoute extends string,\n TMethod extends HttpMethod,\n TInput extends EndpointSchemas = {},\n TOutSchema extends StandardSchemaV1 | undefined = undefined,\n TServices extends Service[] = [],\n TLogger extends Logger = Logger,\n TSession = unknown,\n TEventPublisher extends EventPublisher<any> | undefined = undefined,\n TEventPublisherServiceName extends string = string,\n TAuditStorage extends AuditStorage | undefined = undefined,\n TAuditStorageServiceName extends string = string,\n TAuditAction extends AuditableAction<string, unknown> = AuditableAction<\n string,\n unknown\n >,\n TDatabase = undefined,\n TDatabaseServiceName extends string = string,\n> {\n constructor(\n private readonly endpoint: Endpoint<\n TRoute,\n TMethod,\n TInput,\n TOutSchema,\n TServices,\n TLogger,\n TSession,\n TEventPublisher,\n TEventPublisherServiceName,\n TAuditStorage,\n TAuditStorageServiceName,\n TAuditAction,\n TDatabase,\n TDatabaseServiceName\n >,\n ) {}\n\n static isDev = process.env.NODE_ENV === 'development';\n\n static async validate<T extends StandardSchemaV1>(\n c: Context<any, string, {}>,\n data: unknown,\n schema?: T,\n ) {\n if (!schema) {\n return undefined;\n }\n\n const parsed = await Endpoint.validate(schema, data);\n\n if (parsed.issues) {\n return c.json(parsed.issues, 422);\n }\n\n return parsed.value;\n }\n addRoute(\n serviceDiscovery: ServiceDiscovery<ServiceRecord<TServices>, TLogger>,\n app: Hono,\n ): void {\n HonoEndpoint.addRoute(this.endpoint, serviceDiscovery, app);\n }\n\n static applyEventMiddleware(\n app: Hono,\n serviceDiscovery: ServiceDiscovery<any, any>,\n ) {\n app.use(async (c, next) => {\n await next();\n // @ts-ignore\n const endpoint = c.get('__endpoint') as Endpoint<\n any,\n any,\n any,\n any,\n any,\n any,\n any,\n any\n >;\n // @ts-ignore\n const response = c.get('__response');\n // @ts-ignore\n const logger = c.get('__logger') as Logger;\n\n if (Endpoint.isSuccessStatus(c.res.status) && endpoint) {\n // Process events (audits are handled in the handler with transaction support)\n await publishConstructEvents<any, any>(\n endpoint,\n response,\n serviceDiscovery,\n logger,\n );\n }\n });\n }\n\n static async fromRoutes<TLogger extends Logger, TServices extends Service[]>(\n routes: string[],\n envParser: EnvironmentParser<{}>,\n app = new Hono(),\n logger: TLogger,\n cwd = process.cwd(),\n options?: HonoEndpointOptions,\n ): Promise<Hono> {\n const endpoints = await getEndpointsFromRoutes<TServices>(routes, cwd);\n const serviceDiscovery = ServiceDiscovery.getInstance<\n ServiceRecord<TServices>,\n TLogger\n >(logger, envParser);\n\n HonoEndpoint.addRoutes(endpoints, serviceDiscovery, app, options);\n\n return app;\n }\n\n static addRoutes<\n TServices extends Service[] = [],\n TLogger extends Logger = Logger,\n >(\n endpoints: Endpoint<string, HttpMethod, any, any, TServices, TLogger>[],\n serviceDiscovery: ServiceDiscovery<ServiceRecord<TServices>, TLogger>,\n app: Hono,\n options?: HonoEndpointOptions,\n ): void {\n // Add timing middleware (always enabled)\n app.use('*', timing());\n\n // Add logger middleware in development mode\n\n if (HonoEndpoint.isDev) {\n app.use('*', honoLogger());\n }\n\n // Add docs route if not disabled\n const docsPath =\n options?.docsPath !== false ? options?.docsPath || '/docs' : null;\n if (docsPath) {\n HonoEndpoint.addDocsRoute(\n endpoints,\n app,\n docsPath,\n options?.openApiOptions,\n );\n }\n\n // Sort endpoints to ensure static routes come before dynamic ones\n const sortedEndpoints = endpoints.sort((a, b) => {\n const aSegments = a.route.split('/');\n const bSegments = b.route.split('/');\n\n // Compare each segment\n for (let i = 0; i < Math.max(aSegments.length, bSegments.length); i++) {\n const aSegment = aSegments[i] || '';\n const bSegment = bSegments[i] || '';\n\n // If one is dynamic and the other is not, static comes first\n const aIsDynamic = aSegment.startsWith(':');\n const bIsDynamic = bSegment.startsWith(':');\n\n if (!aIsDynamic && bIsDynamic) return -1;\n if (aIsDynamic && !bIsDynamic) return 1;\n\n // If both are the same type, compare alphabetically\n if (aSegment !== bSegment) {\n return aSegment.localeCompare(bSegment);\n }\n }\n\n return 0;\n });\n HonoEndpoint.applyEventMiddleware(app, serviceDiscovery);\n for (const endpoint of sortedEndpoints) {\n HonoEndpoint.addRoute(endpoint, serviceDiscovery, app);\n }\n }\n\n static addRoute<\n TRoute extends string,\n TMethod extends HttpMethod,\n TInput extends EndpointSchemas = {},\n TOutSchema extends StandardSchemaV1 | undefined = undefined,\n TServices extends Service[] = [],\n TLogger extends Logger = Logger,\n TSession = unknown,\n TEventPublisher extends EventPublisher<any> | undefined = undefined,\n TEventPublisherServiceName extends string = string,\n TAuditStorage extends AuditStorage | undefined = undefined,\n TAuditStorageServiceName extends string = string,\n TAuditAction extends AuditableAction<string, unknown> = AuditableAction<\n string,\n unknown\n >,\n TDatabase = undefined,\n TDatabaseServiceName extends string = string,\n >(\n endpoint: Endpoint<\n TRoute,\n TMethod,\n TInput,\n TOutSchema,\n TServices,\n TLogger,\n TSession,\n TEventPublisher,\n TEventPublisherServiceName,\n TAuditStorage,\n TAuditStorageServiceName,\n TAuditAction,\n TDatabase,\n TDatabaseServiceName\n >,\n serviceDiscovery: ServiceDiscovery<ServiceRecord<TServices>, TLogger>,\n app: Hono,\n ): void {\n const { route } = endpoint;\n const method = endpoint.method.toLowerCase() as LowerHttpMethod<TMethod>;\n\n app[method](\n route,\n validator('json', (value, c) =>\n HonoEndpoint.validate(c, value, endpoint.input?.body),\n ),\n validator('query', (_, c) => {\n const parsedQuery = parseHonoQuery(c);\n return HonoEndpoint.validate(c, parsedQuery, endpoint.input?.query);\n }),\n validator('param', (params, c) =>\n HonoEndpoint.validate(c, params, endpoint.input?.params),\n ),\n async (c) => {\n const logger = endpoint.logger.child({\n endpoint: endpoint.fullPath,\n route: endpoint.route,\n host: c.header('host'),\n method: endpoint.method,\n path: c.req.path,\n }) as TLogger;\n\n try {\n const headerValues = c.req.header();\n\n const header = Endpoint.createHeaders(headerValues);\n const cookie = Endpoint.createCookies(headerValues.cookie);\n\n const services = await serviceDiscovery.register(endpoint.services);\n\n // Resolve database service early so it's available for session extraction\n const rawDb = endpoint.databaseService\n ? await serviceDiscovery\n .register([endpoint.databaseService])\n .then(\n (s) =>\n s[endpoint.databaseService!.serviceName as keyof typeof s],\n )\n : undefined;\n\n const session = await endpoint.getSession({\n services,\n logger,\n header,\n cookie,\n ...(rawDb !== undefined && { db: rawDb }),\n } as any);\n\n const isAuthorized = await endpoint.authorize({\n header,\n cookie,\n services,\n logger,\n session,\n });\n\n if (!isAuthorized) {\n logger.warn('Unauthorized access attempt');\n return c.json({ error: 'Unauthorized' }, 401);\n }\n\n // Check rate limit if configured\n if (endpoint.rateLimit) {\n const rateLimitInfo = await checkRateLimit(endpoint.rateLimit, {\n header,\n services,\n logger,\n session,\n path: c.req.path,\n method: endpoint.method,\n });\n\n // Set rate limit headers\n const rateLimitHeaders = getRateLimitHeaders(\n rateLimitInfo,\n endpoint.rateLimit,\n );\n for (const [key, value] of Object.entries(rateLimitHeaders)) {\n if (value) {\n c.header(key, value);\n }\n }\n }\n\n // Create audit context if audit storage is configured\n const auditContext = await createAuditContext(\n endpoint,\n serviceDiscovery,\n logger,\n {\n session,\n header,\n cookie,\n services: services as Record<string, unknown>,\n },\n );\n\n // Warn if declarative audits are configured but no audit storage\n const audits = endpoint.audits as MappedAudit<\n TAuditAction,\n TOutSchema\n >[];\n if (!auditContext && audits?.length) {\n logger.warn('No auditor storage service available');\n }\n\n // Extract RLS context if configured and not bypassed\n const rlsActive =\n endpoint.rlsConfig && !endpoint.rlsBypass && rawDb !== undefined;\n const rlsContext = rlsActive\n ? await endpoint.rlsConfig!.extractor({\n services,\n session,\n header,\n cookie,\n logger,\n })\n : undefined;\n\n // Execute handler with automatic audit transaction support\n const result = await executeWithAuditTransaction(\n auditContext,\n async (auditor) => {\n // Use audit transaction as db only if the storage uses the same database service\n const sameDatabase =\n auditContext?.storage?.databaseServiceName &&\n auditContext.storage.databaseServiceName ===\n endpoint.databaseService?.serviceName;\n const baseDb = sameDatabase\n ? (auditor?.getTransaction?.() ?? rawDb)\n : rawDb;\n\n // Helper to execute handler with given db\n const executeHandler = async (db: TDatabase | undefined) => {\n const responseBuilder = new ResponseBuilder();\n const response = await endpoint.handler(\n {\n services,\n logger,\n body: c.req.valid('json'),\n query: c.req.valid('query'),\n params: c.req.valid('param'),\n session,\n header,\n cookie,\n auditor,\n db,\n } as unknown as EndpointContext<\n TInput,\n TServices,\n TLogger,\n TSession,\n TAuditAction,\n TDatabase,\n TAuditStorage\n >,\n responseBuilder,\n );\n\n // Check if response has metadata\n let data = response;\n let metadata = responseBuilder.getMetadata();\n\n if (Endpoint.hasMetadata(response)) {\n data = response.data;\n metadata = response.metadata;\n }\n\n const output = endpoint.outputSchema\n ? await endpoint.parseOutput(data)\n : undefined;\n\n return { output, metadata, responseBuilder };\n };\n\n // If RLS is active, wrap handler with RLS context\n if (rlsActive && rlsContext && baseDb) {\n return withRlsContext(\n baseDb as any,\n rlsContext,\n async (trx) => executeHandler(trx as TDatabase),\n { prefix: endpoint.rlsConfig!.prefix },\n );\n }\n\n return executeHandler(baseDb as TDatabase | undefined);\n },\n // Process declarative audits after handler (inside transaction)\n async (result, auditor) => {\n if (!audits?.length) return;\n\n for (const audit of audits) {\n if (audit.when && !audit.when(result.output as any)) {\n continue;\n }\n const payload = audit.payload(result.output as any);\n const entityId = audit.entityId?.(result.output as any);\n auditor.audit(audit.type as any, payload as any, {\n table: audit.table,\n entityId,\n });\n }\n },\n // Pass rawDb so storage can reuse existing transactions\n { db: rawDb },\n );\n\n const { output, metadata } = result;\n\n try {\n let status = endpoint.status as ContentfulStatusCode;\n\n // Apply response metadata\n if (metadata.status) {\n status = metadata.status as ContentfulStatusCode;\n }\n\n if (metadata.headers) {\n for (const [key, value] of Object.entries(metadata.headers)) {\n c.header(key, value);\n }\n }\n\n if (metadata.cookies) {\n for (const [name, { value, options }] of metadata.cookies) {\n setCookie(c, name, value, options);\n }\n }\n\n // @ts-ignore\n c.set('__response', output);\n // @ts-ignore\n c.set('__endpoint', endpoint);\n // @ts-ignore\n c.set('__logger', logger);\n // @ts-ignore\n c.set('__session', session);\n // @ts-ignore\n c.set('__services', services);\n\n if (HonoEndpoint.isDev) {\n logger.info({ status, body: output }, 'Outgoing response');\n }\n // @ts-ignore\n return c.json(output, status);\n } catch (validationError: any) {\n logger.error(validationError, 'Output validation failed');\n const error = wrapError(\n validationError,\n 422,\n 'Response validation failed',\n );\n if (HonoEndpoint.isDev) {\n logger.info(\n { status: error.statusCode, body: error },\n 'Outgoing response',\n );\n }\n return c.json(error, error.statusCode as ContentfulStatusCode);\n }\n } catch (e: any) {\n logger.error(e, 'Error processing endpoint request');\n const error = wrapError(e, 500, 'Internal Server Error');\n if (HonoEndpoint.isDev) {\n logger.info(\n { status: error.statusCode, body: error },\n 'Outgoing response',\n );\n }\n return c.json(error, error.statusCode as ContentfulStatusCode);\n }\n },\n );\n }\n\n static addDocsRoute<\n TServices extends Service[] = [],\n TLogger extends Logger = Logger,\n >(\n endpoints: Endpoint<string, HttpMethod, any, any, TServices, TLogger>[],\n app: Hono,\n docsPath: string,\n openApiOptions?: HonoEndpointOptions['openApiOptions'],\n ): void {\n app.get(docsPath, async (c) => {\n try {\n const openApiSchema = await Endpoint.buildOpenApiSchema(\n endpoints,\n openApiOptions,\n );\n\n return c.json(openApiSchema);\n } catch (error) {\n console.error('Error generating OpenAPI schema:', error);\n return c.json(\n { error: 'Failed to generate OpenAPI documentation' },\n 500,\n );\n }\n });\n }\n}\n"],"mappings":";;;;;;;;;;;;;;;;;AAqDA,IAAa,eAAb,MAAa,aAkBX;CACA,YACmBA,UAgBjB;EAhBiB;CAgBf;CAEJ,OAAO,QAAQ,QAAQ,IAAI,aAAa;CAExC,aAAa,SACXC,GACAC,MACAC,QACA;AACA,OAAK,OACH;EAGF,MAAM,SAAS,MAAM,0BAAS,SAAS,QAAQ,KAAK;AAEpD,MAAI,OAAO,OACT,QAAO,EAAE,KAAK,OAAO,QAAQ,IAAI;AAGnC,SAAO,OAAO;CACf;CACD,SACEC,kBACAC,KACM;AACN,eAAa,SAAS,KAAK,UAAU,kBAAkB,IAAI;CAC5D;CAED,OAAO,qBACLA,KACAC,kBACA;AACA,MAAI,IAAI,OAAO,GAAG,SAAS;AACzB,SAAM,MAAM;GAEZ,MAAM,WAAW,EAAE,IAAI,aAAa;GAWpC,MAAM,WAAW,EAAE,IAAI,aAAa;GAEpC,MAAM,SAAS,EAAE,IAAI,WAAW;AAEhC,OAAI,0BAAS,gBAAgB,EAAE,IAAI,OAAO,IAAI,SAE5C,OAAM,yCACJ,UACA,UACA,kBACA,OACD;EAEJ,EAAC;CACH;CAED,aAAa,WACXC,QACAC,WACA,MAAM,IAAIC,aACVC,QACA,MAAM,QAAQ,KAAK,EACnBC,SACe;EACf,MAAM,YAAY,MAAM,uCAAkC,QAAQ,IAAI;EACtE,MAAM,mBAAmB,sCAAiB,YAGxC,QAAQ,UAAU;AAEpB,eAAa,UAAU,WAAW,kBAAkB,KAAK,QAAQ;AAEjE,SAAO;CACR;CAED,OAAO,UAILC,WACAR,kBACAC,KACAM,SACM;AAEN,MAAI,IAAI,KAAK,yBAAQ,CAAC;AAItB,MAAI,aAAa,MACf,KAAI,IAAI,KAAK,yBAAY,CAAC;EAI5B,MAAM,WACJ,SAAS,aAAa,QAAQ,SAAS,YAAY,UAAU;AAC/D,MAAI,SACF,cAAa,aACX,WACA,KACA,UACA,SAAS,eACV;EAIH,MAAM,kBAAkB,UAAU,KAAK,CAAC,GAAG,MAAM;GAC/C,MAAM,YAAY,EAAE,MAAM,MAAM,IAAI;GACpC,MAAM,YAAY,EAAE,MAAM,MAAM,IAAI;AAGpC,QAAK,IAAI,IAAI,GAAG,IAAI,KAAK,IAAI,UAAU,QAAQ,UAAU,OAAO,EAAE,KAAK;IACrE,MAAM,WAAW,UAAU,MAAM;IACjC,MAAM,WAAW,UAAU,MAAM;IAGjC,MAAM,aAAa,SAAS,WAAW,IAAI;IAC3C,MAAM,aAAa,SAAS,WAAW,IAAI;AAE3C,SAAK,cAAc,WAAY,QAAO;AACtC,QAAI,eAAe,WAAY,QAAO;AAGtC,QAAI,aAAa,SACf,QAAO,SAAS,cAAc,SAAS;GAE1C;AAED,UAAO;EACR,EAAC;AACF,eAAa,qBAAqB,KAAK,iBAAiB;AACxD,OAAK,MAAM,YAAY,gBACrB,cAAa,SAAS,UAAU,kBAAkB,IAAI;CAEzD;CAED,OAAO,SAmBLX,UAgBAI,kBACAC,KACM;EACN,MAAM,EAAE,OAAO,GAAG;EAClB,MAAM,SAAS,SAAS,OAAO,aAAa;AAE5C,MAAI,QACF,OACA,8BAAU,QAAQ,CAAC,OAAO,MACxB,aAAa,SAAS,GAAG,OAAO,SAAS,OAAO,KAAK,CACtD,EACD,8BAAU,SAAS,CAAC,GAAG,MAAM;GAC3B,MAAM,cAAc,sCAAe,EAAE;AACrC,UAAO,aAAa,SAAS,GAAG,aAAa,SAAS,OAAO,MAAM;EACpE,EAAC,EACF,8BAAU,SAAS,CAAC,QAAQ,MAC1B,aAAa,SAAS,GAAG,QAAQ,SAAS,OAAO,OAAO,CACzD,EACD,OAAO,MAAM;GACX,MAAM,SAAS,SAAS,OAAO,MAAM;IACnC,UAAU,SAAS;IACnB,OAAO,SAAS;IAChB,MAAM,EAAE,OAAO,OAAO;IACtB,QAAQ,SAAS;IACjB,MAAM,EAAE,IAAI;GACb,EAAC;AAEF,OAAI;IACF,MAAM,eAAe,EAAE,IAAI,QAAQ;IAEnC,MAAM,SAAS,0BAAS,cAAc,aAAa;IACnD,MAAM,SAAS,0BAAS,cAAc,aAAa,OAAO;IAE1D,MAAM,WAAW,MAAM,iBAAiB,SAAS,SAAS,SAAS;IAGnE,MAAM,QAAQ,SAAS,kBACnB,MAAM,iBACH,SAAS,CAAC,SAAS,eAAgB,EAAC,CACpC,KACC,CAAC,MACC,EAAE,SAAS,gBAAiB,aAC/B;IAGP,MAAM,UAAU,MAAM,SAAS,WAAW;KACxC;KACA;KACA;KACA;KACA,GAAI,oBAAuB,EAAE,IAAI,MAAO;IACzC,EAAQ;IAET,MAAM,eAAe,MAAM,SAAS,UAAU;KAC5C;KACA;KACA;KACA;KACA;IACD,EAAC;AAEF,SAAK,cAAc;AACjB,YAAO,KAAK,8BAA8B;AAC1C,YAAO,EAAE,KAAK,EAAE,OAAO,eAAgB,GAAE,IAAI;IAC9C;AAGD,QAAI,SAAS,WAAW;KACtB,MAAM,gBAAgB,MAAM,2CAAe,SAAS,WAAW;MAC7D;MACA;MACA;MACA;MACA,MAAM,EAAE,IAAI;MACZ,QAAQ,SAAS;KAClB,EAAC;KAGF,MAAM,mBAAmB,gDACvB,eACA,SAAS,UACV;AACD,UAAK,MAAM,CAAC,KAAK,MAAM,IAAI,OAAO,QAAQ,iBAAiB,CACzD,KAAI,MACF,GAAE,OAAO,KAAK,MAAM;IAGzB;IAGD,MAAM,eAAe,MAAM,yCACzB,UACA,kBACA,QACA;KACE;KACA;KACA;KACU;IACX,EACF;IAGD,MAAM,SAAS,SAAS;AAIxB,SAAK,gBAAgB,QAAQ,OAC3B,QAAO,KAAK,uCAAuC;IAIrD,MAAM,YACJ,SAAS,cAAc,SAAS,aAAa;IAC/C,MAAM,aAAa,YACf,MAAM,SAAS,UAAW,UAAU;KAClC;KACA;KACA;KACA;KACA;IACD,EAAC;IAIN,MAAM,SAAS,MAAM,kDACnB,cACA,OAAO,YAAY;KAEjB,MAAM,eACJ,cAAc,SAAS,uBACvB,aAAa,QAAQ,wBACnB,SAAS,iBAAiB;KAC9B,MAAM,SAAS,eACV,SAAS,kBAAkB,IAAI,QAChC;KAGJ,MAAM,iBAAiB,OAAOQ,OAA8B;MAC1D,MAAM,kBAAkB,IAAIC;MAC5B,MAAM,WAAW,MAAM,SAAS,QAC9B;OACE;OACA;OACA,MAAM,EAAE,IAAI,MAAM,OAAO;OACzB,OAAO,EAAE,IAAI,MAAM,QAAQ;OAC3B,QAAQ,EAAE,IAAI,MAAM,QAAQ;OAC5B;OACA;OACA;OACA;OACA;MACD,GASD,gBACD;MAGD,IAAI,OAAO;MACX,IAAIC,aAAW,gBAAgB,aAAa;AAE5C,UAAI,0BAAS,YAAY,SAAS,EAAE;AAClC,cAAO,SAAS;AAChB,oBAAW,SAAS;MACrB;MAED,MAAMC,WAAS,SAAS,eACpB,MAAM,SAAS,YAAY,KAAK;AAGpC,aAAO;OAAE;OAAQ;OAAU;MAAiB;KAC7C;AAGD,SAAI,aAAa,cAAc,OAC7B,QAAO,uCACL,QACA,YACA,OAAO,QAAQ,eAAe,IAAiB,EAC/C,EAAE,QAAQ,SAAS,UAAW,OAAQ,EACvC;AAGH,YAAO,eAAe,OAAgC;IACvD,GAED,OAAOC,UAAQ,YAAY;AACzB,UAAK,QAAQ,OAAQ;AAErB,UAAK,MAAM,SAAS,QAAQ;AAC1B,UAAI,MAAM,SAAS,MAAM,KAAKA,SAAO,OAAc,CACjD;MAEF,MAAM,UAAU,MAAM,QAAQA,SAAO,OAAc;MACnD,MAAM,WAAW,MAAM,WAAWA,SAAO,OAAc;AACvD,cAAQ,MAAM,MAAM,MAAa,SAAgB;OAC/C,OAAO,MAAM;OACb;MACD,EAAC;KACH;IACF,GAED,EAAE,IAAI,MAAO,EACd;IAED,MAAM,EAAE,QAAQ,UAAU,GAAG;AAE7B,QAAI;KACF,IAAI,SAAS,SAAS;AAGtB,SAAI,SAAS,OACX,UAAS,SAAS;AAGpB,SAAI,SAAS,QACX,MAAK,MAAM,CAAC,KAAK,MAAM,IAAI,OAAO,QAAQ,SAAS,QAAQ,CACzD,GAAE,OAAO,KAAK,MAAM;AAIxB,SAAI,SAAS,QACX,MAAK,MAAM,CAAC,MAAM,EAAE,OAAO,SAAS,CAAC,IAAI,SAAS,QAChD,4BAAU,GAAG,MAAM,OAAO,QAAQ;AAKtC,OAAE,IAAI,cAAc,OAAO;AAE3B,OAAE,IAAI,cAAc,SAAS;AAE7B,OAAE,IAAI,YAAY,OAAO;AAEzB,OAAE,IAAI,aAAa,QAAQ;AAE3B,OAAE,IAAI,cAAc,SAAS;AAE7B,SAAI,aAAa,MACf,QAAO,KAAK;MAAE;MAAQ,MAAM;KAAQ,GAAE,oBAAoB;AAG5D,YAAO,EAAE,KAAK,QAAQ,OAAO;IAC9B,SAAQC,iBAAsB;AAC7B,YAAO,MAAM,iBAAiB,2BAA2B;KACzD,MAAM,QAAQ,kCACZ,iBACA,KACA,6BACD;AACD,SAAI,aAAa,MACf,QAAO,KACL;MAAE,QAAQ,MAAM;MAAY,MAAM;KAAO,GACzC,oBACD;AAEH,YAAO,EAAE,KAAK,OAAO,MAAM,WAAmC;IAC/D;GACF,SAAQC,GAAQ;AACf,WAAO,MAAM,GAAG,oCAAoC;IACpD,MAAM,QAAQ,kCAAU,GAAG,KAAK,wBAAwB;AACxD,QAAI,aAAa,MACf,QAAO,KACL;KAAE,QAAQ,MAAM;KAAY,MAAM;IAAO,GACzC,oBACD;AAEH,WAAO,EAAE,KAAK,OAAO,MAAM,WAAmC;GAC/D;EACF,EACF;CACF;CAED,OAAO,aAILP,WACAP,KACAe,UACAC,gBACM;AACN,MAAI,IAAI,UAAU,OAAO,MAAM;AAC7B,OAAI;IACF,MAAM,gBAAgB,MAAM,0BAAS,mBACnC,WACA,eACD;AAED,WAAO,EAAE,KAAK,cAAc;GAC7B,SAAQ,OAAO;AACd,YAAQ,MAAM,oCAAoC,MAAM;AACxD,WAAO,EAAE,KACP,EAAE,OAAO,2CAA4C,GACrD,IACD;GACF;EACF,EAAC;CACH;AACF"}
|
|
1
|
+
{"version":3,"file":"HonoEndpointAdaptor-e6l9eVDU.cjs","names":["endpoint: Endpoint<\n TRoute,\n TMethod,\n TInput,\n TOutSchema,\n TServices,\n TLogger,\n TSession,\n TEventPublisher,\n TEventPublisherServiceName,\n TAuditStorage,\n TAuditStorageServiceName,\n TAuditAction,\n TDatabase,\n TDatabaseServiceName\n >","c: Context<any, string, {}>","data: unknown","schema?: T","serviceDiscovery: ServiceDiscovery<ServiceRecord<TServices>, TLogger>","app: Hono","serviceDiscovery: ServiceDiscovery<any, any>","routes: string[]","envParser: EnvironmentParser<{}>","Hono","logger: TLogger","options?: HonoEndpointOptions","endpoints: Endpoint<string, HttpMethod, any, any, TServices, TLogger>[]","db: TDatabase | undefined","ResponseBuilder","metadata","output","result","validationError: any","e: any","docsPath: string","openApiOptions?: HonoEndpointOptions['openApiOptions']"],"sources":["../src/endpoints/HonoEndpointAdaptor.ts"],"sourcesContent":["import type { AuditStorage, AuditableAction } from '@geekmidas/audit';\nimport type { EnvironmentParser } from '@geekmidas/envkit';\nimport type { EventPublisher } from '@geekmidas/events';\nimport type { Logger } from '@geekmidas/logger';\nimport { checkRateLimit, getRateLimitHeaders } from '@geekmidas/rate-limit';\nimport type { StandardSchemaV1 } from '@standard-schema/spec';\nimport { type Context, Hono } from 'hono';\nimport { setCookie } from 'hono/cookie';\nimport { logger as honoLogger } from 'hono/logger';\nimport { timing } from 'hono/timing';\nimport { validator } from 'hono/validator';\nimport type { HttpMethod, LowerHttpMethod } from '../types';\nimport {\n Endpoint,\n type EndpointContext,\n type EndpointSchemas,\n ResponseBuilder,\n} from './Endpoint';\nimport { getEndpointsFromRoutes } from './helpers';\nimport { parseHonoQuery } from './parseHonoQuery';\n\nimport { withRlsContext } from '@geekmidas/db/rls';\nimport { wrapError } from '@geekmidas/errors';\nimport {\n type Service,\n ServiceDiscovery,\n type ServiceRecord,\n} from '@geekmidas/services';\nimport type { ContentfulStatusCode } from 'hono/utils/http-status';\nimport { publishConstructEvents } from '../publisher';\nimport type { MappedAudit } from './audit';\nimport {\n createAuditContext,\n executeWithAuditTransaction,\n} from './processAudits';\n\nexport interface HonoEndpointOptions {\n /**\n * Path where OpenAPI documentation will be served.\n * Set to false to disable docs route.\n * @default '/docs'\n */\n docsPath?: string | false;\n /**\n * OpenAPI schema options\n */\n openApiOptions?: {\n title?: string;\n version?: string;\n description?: string;\n };\n}\n\nexport class HonoEndpoint<\n TRoute extends string,\n TMethod extends HttpMethod,\n TInput extends EndpointSchemas = {},\n TOutSchema extends StandardSchemaV1 | undefined = undefined,\n TServices extends Service[] = [],\n TLogger extends Logger = Logger,\n TSession = unknown,\n TEventPublisher extends EventPublisher<any> | undefined = undefined,\n TEventPublisherServiceName extends string = string,\n TAuditStorage extends AuditStorage | undefined = undefined,\n TAuditStorageServiceName extends string = string,\n TAuditAction extends AuditableAction<string, unknown> = AuditableAction<\n string,\n unknown\n >,\n TDatabase = undefined,\n TDatabaseServiceName extends string = string,\n> {\n constructor(\n private readonly endpoint: Endpoint<\n TRoute,\n TMethod,\n TInput,\n TOutSchema,\n TServices,\n TLogger,\n TSession,\n TEventPublisher,\n TEventPublisherServiceName,\n TAuditStorage,\n TAuditStorageServiceName,\n TAuditAction,\n TDatabase,\n TDatabaseServiceName\n >,\n ) {}\n\n static isDev = process.env.NODE_ENV === 'development';\n\n static async validate<T extends StandardSchemaV1>(\n c: Context<any, string, {}>,\n data: unknown,\n schema?: T,\n ) {\n if (!schema) {\n return undefined;\n }\n\n const parsed = await Endpoint.validate(schema, data);\n\n if (parsed.issues) {\n return c.json(parsed.issues, 422);\n }\n\n return parsed.value;\n }\n addRoute(\n serviceDiscovery: ServiceDiscovery<ServiceRecord<TServices>, TLogger>,\n app: Hono,\n ): void {\n HonoEndpoint.addRoute(this.endpoint, serviceDiscovery, app);\n }\n\n static applyEventMiddleware(\n app: Hono,\n serviceDiscovery: ServiceDiscovery<any, any>,\n ) {\n app.use(async (c, next) => {\n await next();\n // @ts-ignore\n const endpoint = c.get('__endpoint') as Endpoint<\n any,\n any,\n any,\n any,\n any,\n any,\n any,\n any\n >;\n // @ts-ignore\n const response = c.get('__response');\n // @ts-ignore\n const logger = c.get('__logger') as Logger;\n\n if (Endpoint.isSuccessStatus(c.res.status) && endpoint) {\n // Process events (audits are handled in the handler with transaction support)\n await publishConstructEvents<any, any>(\n endpoint,\n response,\n serviceDiscovery,\n logger,\n );\n }\n });\n }\n\n static async fromRoutes<TLogger extends Logger, TServices extends Service[]>(\n routes: string[],\n envParser: EnvironmentParser<{}>,\n app = new Hono(),\n logger: TLogger,\n cwd = process.cwd(),\n options?: HonoEndpointOptions,\n ): Promise<Hono> {\n const endpoints = await getEndpointsFromRoutes<TServices>(routes, cwd);\n const serviceDiscovery = ServiceDiscovery.getInstance<\n ServiceRecord<TServices>,\n TLogger\n >(logger, envParser);\n\n HonoEndpoint.addRoutes(endpoints, serviceDiscovery, app, options);\n\n return app;\n }\n\n static addRoutes<\n TServices extends Service[] = [],\n TLogger extends Logger = Logger,\n >(\n endpoints: Endpoint<string, HttpMethod, any, any, TServices, TLogger>[],\n serviceDiscovery: ServiceDiscovery<ServiceRecord<TServices>, TLogger>,\n app: Hono,\n options?: HonoEndpointOptions,\n ): void {\n // Add timing middleware (always enabled)\n app.use('*', timing());\n\n // Add logger middleware in development mode\n\n if (HonoEndpoint.isDev) {\n app.use('*', honoLogger());\n }\n\n // Add docs route if not disabled\n const docsPath =\n options?.docsPath !== false ? options?.docsPath || '/docs' : null;\n if (docsPath) {\n HonoEndpoint.addDocsRoute(\n endpoints,\n app,\n docsPath,\n options?.openApiOptions,\n );\n }\n\n // Sort endpoints to ensure static routes come before dynamic ones\n const sortedEndpoints = endpoints.sort((a, b) => {\n const aSegments = a.route.split('/');\n const bSegments = b.route.split('/');\n\n // Compare each segment\n for (let i = 0; i < Math.max(aSegments.length, bSegments.length); i++) {\n const aSegment = aSegments[i] || '';\n const bSegment = bSegments[i] || '';\n\n // If one is dynamic and the other is not, static comes first\n const aIsDynamic = aSegment.startsWith(':');\n const bIsDynamic = bSegment.startsWith(':');\n\n if (!aIsDynamic && bIsDynamic) return -1;\n if (aIsDynamic && !bIsDynamic) return 1;\n\n // If both are the same type, compare alphabetically\n if (aSegment !== bSegment) {\n return aSegment.localeCompare(bSegment);\n }\n }\n\n return 0;\n });\n HonoEndpoint.applyEventMiddleware(app, serviceDiscovery);\n for (const endpoint of sortedEndpoints) {\n HonoEndpoint.addRoute(endpoint, serviceDiscovery, app);\n }\n }\n\n static addRoute<\n TRoute extends string,\n TMethod extends HttpMethod,\n TInput extends EndpointSchemas = {},\n TOutSchema extends StandardSchemaV1 | undefined = undefined,\n TServices extends Service[] = [],\n TLogger extends Logger = Logger,\n TSession = unknown,\n TEventPublisher extends EventPublisher<any> | undefined = undefined,\n TEventPublisherServiceName extends string = string,\n TAuditStorage extends AuditStorage | undefined = undefined,\n TAuditStorageServiceName extends string = string,\n TAuditAction extends AuditableAction<string, unknown> = AuditableAction<\n string,\n unknown\n >,\n TDatabase = undefined,\n TDatabaseServiceName extends string = string,\n >(\n endpoint: Endpoint<\n TRoute,\n TMethod,\n TInput,\n TOutSchema,\n TServices,\n TLogger,\n TSession,\n TEventPublisher,\n TEventPublisherServiceName,\n TAuditStorage,\n TAuditStorageServiceName,\n TAuditAction,\n TDatabase,\n TDatabaseServiceName\n >,\n serviceDiscovery: ServiceDiscovery<ServiceRecord<TServices>, TLogger>,\n app: Hono,\n ): void {\n const { route } = endpoint;\n const method = endpoint.method.toLowerCase() as LowerHttpMethod<TMethod>;\n\n app[method](\n route,\n validator('json', (value, c) =>\n HonoEndpoint.validate(c, value, endpoint.input?.body),\n ),\n validator('query', (_, c) => {\n const parsedQuery = parseHonoQuery(c);\n return HonoEndpoint.validate(c, parsedQuery, endpoint.input?.query);\n }),\n validator('param', (params, c) =>\n HonoEndpoint.validate(c, params, endpoint.input?.params),\n ),\n async (c) => {\n const logger = endpoint.logger.child({\n endpoint: endpoint.fullPath,\n route: endpoint.route,\n host: c.header('host'),\n method: endpoint.method,\n path: c.req.path,\n }) as TLogger;\n\n try {\n const headerValues = c.req.header();\n\n const header = Endpoint.createHeaders(headerValues);\n const cookie = Endpoint.createCookies(headerValues.cookie);\n\n const services = await serviceDiscovery.register(endpoint.services);\n\n // Resolve database service early so it's available for session extraction\n const rawDb = endpoint.databaseService\n ? await serviceDiscovery\n .register([endpoint.databaseService])\n .then(\n (s) =>\n s[endpoint.databaseService!.serviceName as keyof typeof s],\n )\n : undefined;\n\n const session = await endpoint.getSession({\n services,\n logger,\n header,\n cookie,\n ...(rawDb !== undefined && { db: rawDb }),\n } as any);\n\n const isAuthorized = await endpoint.authorize({\n header,\n cookie,\n services,\n logger,\n session,\n });\n\n if (!isAuthorized) {\n logger.warn('Unauthorized access attempt');\n return c.json({ error: 'Unauthorized' }, 401);\n }\n\n // Check rate limit if configured\n if (endpoint.rateLimit) {\n const rateLimitInfo = await checkRateLimit(endpoint.rateLimit, {\n header,\n services,\n logger,\n session,\n path: c.req.path,\n method: endpoint.method,\n });\n\n // Set rate limit headers\n const rateLimitHeaders = getRateLimitHeaders(\n rateLimitInfo,\n endpoint.rateLimit,\n );\n for (const [key, value] of Object.entries(rateLimitHeaders)) {\n if (value) {\n c.header(key, value);\n }\n }\n }\n\n // Create audit context if audit storage is configured\n const auditContext = await createAuditContext(\n endpoint,\n serviceDiscovery,\n logger,\n {\n session,\n header,\n cookie,\n services: services as Record<string, unknown>,\n },\n );\n\n // Warn if declarative audits are configured but no audit storage\n const audits = endpoint.audits as MappedAudit<\n TAuditAction,\n TOutSchema\n >[];\n if (!auditContext && audits?.length) {\n logger.warn('No auditor storage service available');\n }\n\n // Extract RLS context if configured and not bypassed\n const rlsActive =\n endpoint.rlsConfig && !endpoint.rlsBypass && rawDb !== undefined;\n const rlsContext = rlsActive\n ? await endpoint.rlsConfig!.extractor({\n services,\n session,\n header,\n cookie,\n logger,\n })\n : undefined;\n\n // Execute handler with automatic audit transaction support\n const result = await executeWithAuditTransaction(\n auditContext,\n async (auditor) => {\n // Use audit transaction as db only if the storage uses the same database service\n const sameDatabase =\n auditContext?.storage?.databaseServiceName &&\n auditContext.storage.databaseServiceName ===\n endpoint.databaseService?.serviceName;\n const baseDb = sameDatabase\n ? (auditor?.getTransaction?.() ?? rawDb)\n : rawDb;\n\n // Helper to execute handler with given db\n const executeHandler = async (db: TDatabase | undefined) => {\n const responseBuilder = new ResponseBuilder();\n const response = await endpoint.handler(\n {\n services,\n logger,\n body: c.req.valid('json'),\n query: c.req.valid('query'),\n params: c.req.valid('param'),\n session,\n header,\n cookie,\n auditor,\n db,\n } as unknown as EndpointContext<\n TInput,\n TServices,\n TLogger,\n TSession,\n TAuditAction,\n TDatabase,\n TAuditStorage\n >,\n responseBuilder,\n );\n\n // Check if response has metadata\n let data = response;\n let metadata = responseBuilder.getMetadata();\n\n if (Endpoint.hasMetadata(response)) {\n data = response.data;\n metadata = response.metadata;\n }\n\n const output = endpoint.outputSchema\n ? await endpoint.parseOutput(data)\n : undefined;\n\n return { output, metadata, responseBuilder };\n };\n\n // If RLS is active, wrap handler with RLS context\n if (rlsActive && rlsContext && baseDb) {\n return withRlsContext(\n baseDb as any,\n rlsContext,\n async (trx) => executeHandler(trx as TDatabase),\n { prefix: endpoint.rlsConfig!.prefix },\n );\n }\n\n return executeHandler(baseDb as TDatabase | undefined);\n },\n // Process declarative audits after handler (inside transaction)\n async (result, auditor) => {\n if (!audits?.length) return;\n\n for (const audit of audits) {\n if (audit.when && !audit.when(result.output as any)) {\n continue;\n }\n const payload = audit.payload(result.output as any);\n const entityId = audit.entityId?.(result.output as any);\n auditor.audit(audit.type as any, payload as any, {\n table: audit.table,\n entityId,\n });\n }\n },\n // Pass rawDb so storage can reuse existing transactions\n { db: rawDb },\n );\n\n const { output, metadata } = result;\n\n try {\n let status = endpoint.status as ContentfulStatusCode;\n\n // Apply response metadata\n if (metadata.status) {\n status = metadata.status as ContentfulStatusCode;\n }\n\n if (metadata.headers) {\n for (const [key, value] of Object.entries(metadata.headers)) {\n c.header(key, value);\n }\n }\n\n if (metadata.cookies) {\n for (const [name, { value, options }] of metadata.cookies) {\n setCookie(c, name, value, options);\n }\n }\n\n // @ts-ignore\n c.set('__response', output);\n // @ts-ignore\n c.set('__endpoint', endpoint);\n // @ts-ignore\n c.set('__logger', logger);\n // @ts-ignore\n c.set('__session', session);\n // @ts-ignore\n c.set('__services', services);\n\n if (HonoEndpoint.isDev) {\n logger.info({ status, body: output }, 'Outgoing response');\n }\n // @ts-ignore\n return c.json(output, status);\n } catch (validationError: any) {\n logger.error(validationError, 'Output validation failed');\n const error = wrapError(\n validationError,\n 422,\n 'Response validation failed',\n );\n if (HonoEndpoint.isDev) {\n logger.info(\n { status: error.statusCode, body: error },\n 'Outgoing response',\n );\n }\n return c.json(error, error.statusCode as ContentfulStatusCode);\n }\n } catch (e: any) {\n logger.error(e, 'Error processing endpoint request');\n const error = wrapError(e, 500, 'Internal Server Error');\n if (HonoEndpoint.isDev) {\n logger.info(\n { status: error.statusCode, body: error },\n 'Outgoing response',\n );\n }\n return c.json(error, error.statusCode as ContentfulStatusCode);\n }\n },\n );\n }\n\n static addDocsRoute<\n TServices extends Service[] = [],\n TLogger extends Logger = Logger,\n >(\n endpoints: Endpoint<string, HttpMethod, any, any, TServices, TLogger>[],\n app: Hono,\n docsPath: string,\n openApiOptions?: HonoEndpointOptions['openApiOptions'],\n ): void {\n app.get(docsPath, async (c) => {\n try {\n const openApiSchema = await Endpoint.buildOpenApiSchema(\n endpoints,\n openApiOptions,\n );\n\n return c.json(openApiSchema);\n } catch {\n return c.json(\n { error: 'Failed to generate OpenAPI documentation' },\n 500,\n );\n }\n });\n }\n}\n"],"mappings":";;;;;;;;;;;;;;;;;AAqDA,IAAa,eAAb,MAAa,aAkBX;CACA,YACmBA,UAgBjB;EAhBiB;CAgBf;CAEJ,OAAO,QAAQ,QAAQ,IAAI,aAAa;CAExC,aAAa,SACXC,GACAC,MACAC,QACA;AACA,OAAK,OACH;EAGF,MAAM,SAAS,MAAM,0BAAS,SAAS,QAAQ,KAAK;AAEpD,MAAI,OAAO,OACT,QAAO,EAAE,KAAK,OAAO,QAAQ,IAAI;AAGnC,SAAO,OAAO;CACf;CACD,SACEC,kBACAC,KACM;AACN,eAAa,SAAS,KAAK,UAAU,kBAAkB,IAAI;CAC5D;CAED,OAAO,qBACLA,KACAC,kBACA;AACA,MAAI,IAAI,OAAO,GAAG,SAAS;AACzB,SAAM,MAAM;GAEZ,MAAM,WAAW,EAAE,IAAI,aAAa;GAWpC,MAAM,WAAW,EAAE,IAAI,aAAa;GAEpC,MAAM,SAAS,EAAE,IAAI,WAAW;AAEhC,OAAI,0BAAS,gBAAgB,EAAE,IAAI,OAAO,IAAI,SAE5C,OAAM,yCACJ,UACA,UACA,kBACA,OACD;EAEJ,EAAC;CACH;CAED,aAAa,WACXC,QACAC,WACA,MAAM,IAAIC,aACVC,QACA,MAAM,QAAQ,KAAK,EACnBC,SACe;EACf,MAAM,YAAY,MAAM,uCAAkC,QAAQ,IAAI;EACtE,MAAM,mBAAmB,sCAAiB,YAGxC,QAAQ,UAAU;AAEpB,eAAa,UAAU,WAAW,kBAAkB,KAAK,QAAQ;AAEjE,SAAO;CACR;CAED,OAAO,UAILC,WACAR,kBACAC,KACAM,SACM;AAEN,MAAI,IAAI,KAAK,yBAAQ,CAAC;AAItB,MAAI,aAAa,MACf,KAAI,IAAI,KAAK,yBAAY,CAAC;EAI5B,MAAM,WACJ,SAAS,aAAa,QAAQ,SAAS,YAAY,UAAU;AAC/D,MAAI,SACF,cAAa,aACX,WACA,KACA,UACA,SAAS,eACV;EAIH,MAAM,kBAAkB,UAAU,KAAK,CAAC,GAAG,MAAM;GAC/C,MAAM,YAAY,EAAE,MAAM,MAAM,IAAI;GACpC,MAAM,YAAY,EAAE,MAAM,MAAM,IAAI;AAGpC,QAAK,IAAI,IAAI,GAAG,IAAI,KAAK,IAAI,UAAU,QAAQ,UAAU,OAAO,EAAE,KAAK;IACrE,MAAM,WAAW,UAAU,MAAM;IACjC,MAAM,WAAW,UAAU,MAAM;IAGjC,MAAM,aAAa,SAAS,WAAW,IAAI;IAC3C,MAAM,aAAa,SAAS,WAAW,IAAI;AAE3C,SAAK,cAAc,WAAY,QAAO;AACtC,QAAI,eAAe,WAAY,QAAO;AAGtC,QAAI,aAAa,SACf,QAAO,SAAS,cAAc,SAAS;GAE1C;AAED,UAAO;EACR,EAAC;AACF,eAAa,qBAAqB,KAAK,iBAAiB;AACxD,OAAK,MAAM,YAAY,gBACrB,cAAa,SAAS,UAAU,kBAAkB,IAAI;CAEzD;CAED,OAAO,SAmBLX,UAgBAI,kBACAC,KACM;EACN,MAAM,EAAE,OAAO,GAAG;EAClB,MAAM,SAAS,SAAS,OAAO,aAAa;AAE5C,MAAI,QACF,OACA,8BAAU,QAAQ,CAAC,OAAO,MACxB,aAAa,SAAS,GAAG,OAAO,SAAS,OAAO,KAAK,CACtD,EACD,8BAAU,SAAS,CAAC,GAAG,MAAM;GAC3B,MAAM,cAAc,sCAAe,EAAE;AACrC,UAAO,aAAa,SAAS,GAAG,aAAa,SAAS,OAAO,MAAM;EACpE,EAAC,EACF,8BAAU,SAAS,CAAC,QAAQ,MAC1B,aAAa,SAAS,GAAG,QAAQ,SAAS,OAAO,OAAO,CACzD,EACD,OAAO,MAAM;GACX,MAAM,SAAS,SAAS,OAAO,MAAM;IACnC,UAAU,SAAS;IACnB,OAAO,SAAS;IAChB,MAAM,EAAE,OAAO,OAAO;IACtB,QAAQ,SAAS;IACjB,MAAM,EAAE,IAAI;GACb,EAAC;AAEF,OAAI;IACF,MAAM,eAAe,EAAE,IAAI,QAAQ;IAEnC,MAAM,SAAS,0BAAS,cAAc,aAAa;IACnD,MAAM,SAAS,0BAAS,cAAc,aAAa,OAAO;IAE1D,MAAM,WAAW,MAAM,iBAAiB,SAAS,SAAS,SAAS;IAGnE,MAAM,QAAQ,SAAS,kBACnB,MAAM,iBACH,SAAS,CAAC,SAAS,eAAgB,EAAC,CACpC,KACC,CAAC,MACC,EAAE,SAAS,gBAAiB,aAC/B;IAGP,MAAM,UAAU,MAAM,SAAS,WAAW;KACxC;KACA;KACA;KACA;KACA,GAAI,oBAAuB,EAAE,IAAI,MAAO;IACzC,EAAQ;IAET,MAAM,eAAe,MAAM,SAAS,UAAU;KAC5C;KACA;KACA;KACA;KACA;IACD,EAAC;AAEF,SAAK,cAAc;AACjB,YAAO,KAAK,8BAA8B;AAC1C,YAAO,EAAE,KAAK,EAAE,OAAO,eAAgB,GAAE,IAAI;IAC9C;AAGD,QAAI,SAAS,WAAW;KACtB,MAAM,gBAAgB,MAAM,2CAAe,SAAS,WAAW;MAC7D;MACA;MACA;MACA;MACA,MAAM,EAAE,IAAI;MACZ,QAAQ,SAAS;KAClB,EAAC;KAGF,MAAM,mBAAmB,gDACvB,eACA,SAAS,UACV;AACD,UAAK,MAAM,CAAC,KAAK,MAAM,IAAI,OAAO,QAAQ,iBAAiB,CACzD,KAAI,MACF,GAAE,OAAO,KAAK,MAAM;IAGzB;IAGD,MAAM,eAAe,MAAM,yCACzB,UACA,kBACA,QACA;KACE;KACA;KACA;KACU;IACX,EACF;IAGD,MAAM,SAAS,SAAS;AAIxB,SAAK,gBAAgB,QAAQ,OAC3B,QAAO,KAAK,uCAAuC;IAIrD,MAAM,YACJ,SAAS,cAAc,SAAS,aAAa;IAC/C,MAAM,aAAa,YACf,MAAM,SAAS,UAAW,UAAU;KAClC;KACA;KACA;KACA;KACA;IACD,EAAC;IAIN,MAAM,SAAS,MAAM,kDACnB,cACA,OAAO,YAAY;KAEjB,MAAM,eACJ,cAAc,SAAS,uBACvB,aAAa,QAAQ,wBACnB,SAAS,iBAAiB;KAC9B,MAAM,SAAS,eACV,SAAS,kBAAkB,IAAI,QAChC;KAGJ,MAAM,iBAAiB,OAAOQ,OAA8B;MAC1D,MAAM,kBAAkB,IAAIC;MAC5B,MAAM,WAAW,MAAM,SAAS,QAC9B;OACE;OACA;OACA,MAAM,EAAE,IAAI,MAAM,OAAO;OACzB,OAAO,EAAE,IAAI,MAAM,QAAQ;OAC3B,QAAQ,EAAE,IAAI,MAAM,QAAQ;OAC5B;OACA;OACA;OACA;OACA;MACD,GASD,gBACD;MAGD,IAAI,OAAO;MACX,IAAIC,aAAW,gBAAgB,aAAa;AAE5C,UAAI,0BAAS,YAAY,SAAS,EAAE;AAClC,cAAO,SAAS;AAChB,oBAAW,SAAS;MACrB;MAED,MAAMC,WAAS,SAAS,eACpB,MAAM,SAAS,YAAY,KAAK;AAGpC,aAAO;OAAE;OAAQ;OAAU;MAAiB;KAC7C;AAGD,SAAI,aAAa,cAAc,OAC7B,QAAO,uCACL,QACA,YACA,OAAO,QAAQ,eAAe,IAAiB,EAC/C,EAAE,QAAQ,SAAS,UAAW,OAAQ,EACvC;AAGH,YAAO,eAAe,OAAgC;IACvD,GAED,OAAOC,UAAQ,YAAY;AACzB,UAAK,QAAQ,OAAQ;AAErB,UAAK,MAAM,SAAS,QAAQ;AAC1B,UAAI,MAAM,SAAS,MAAM,KAAKA,SAAO,OAAc,CACjD;MAEF,MAAM,UAAU,MAAM,QAAQA,SAAO,OAAc;MACnD,MAAM,WAAW,MAAM,WAAWA,SAAO,OAAc;AACvD,cAAQ,MAAM,MAAM,MAAa,SAAgB;OAC/C,OAAO,MAAM;OACb;MACD,EAAC;KACH;IACF,GAED,EAAE,IAAI,MAAO,EACd;IAED,MAAM,EAAE,QAAQ,UAAU,GAAG;AAE7B,QAAI;KACF,IAAI,SAAS,SAAS;AAGtB,SAAI,SAAS,OACX,UAAS,SAAS;AAGpB,SAAI,SAAS,QACX,MAAK,MAAM,CAAC,KAAK,MAAM,IAAI,OAAO,QAAQ,SAAS,QAAQ,CACzD,GAAE,OAAO,KAAK,MAAM;AAIxB,SAAI,SAAS,QACX,MAAK,MAAM,CAAC,MAAM,EAAE,OAAO,SAAS,CAAC,IAAI,SAAS,QAChD,4BAAU,GAAG,MAAM,OAAO,QAAQ;AAKtC,OAAE,IAAI,cAAc,OAAO;AAE3B,OAAE,IAAI,cAAc,SAAS;AAE7B,OAAE,IAAI,YAAY,OAAO;AAEzB,OAAE,IAAI,aAAa,QAAQ;AAE3B,OAAE,IAAI,cAAc,SAAS;AAE7B,SAAI,aAAa,MACf,QAAO,KAAK;MAAE;MAAQ,MAAM;KAAQ,GAAE,oBAAoB;AAG5D,YAAO,EAAE,KAAK,QAAQ,OAAO;IAC9B,SAAQC,iBAAsB;AAC7B,YAAO,MAAM,iBAAiB,2BAA2B;KACzD,MAAM,QAAQ,kCACZ,iBACA,KACA,6BACD;AACD,SAAI,aAAa,MACf,QAAO,KACL;MAAE,QAAQ,MAAM;MAAY,MAAM;KAAO,GACzC,oBACD;AAEH,YAAO,EAAE,KAAK,OAAO,MAAM,WAAmC;IAC/D;GACF,SAAQC,GAAQ;AACf,WAAO,MAAM,GAAG,oCAAoC;IACpD,MAAM,QAAQ,kCAAU,GAAG,KAAK,wBAAwB;AACxD,QAAI,aAAa,MACf,QAAO,KACL;KAAE,QAAQ,MAAM;KAAY,MAAM;IAAO,GACzC,oBACD;AAEH,WAAO,EAAE,KAAK,OAAO,MAAM,WAAmC;GAC/D;EACF,EACF;CACF;CAED,OAAO,aAILP,WACAP,KACAe,UACAC,gBACM;AACN,MAAI,IAAI,UAAU,OAAO,MAAM;AAC7B,OAAI;IACF,MAAM,gBAAgB,MAAM,0BAAS,mBACnC,WACA,eACD;AAED,WAAO,EAAE,KAAK,cAAc;GAC7B,QAAO;AACN,WAAO,EAAE,KACP,EAAE,OAAO,2CAA4C,GACrD,IACD;GACF;EACF,EAAC;CACH;AACF"}
|
|
@@ -7,7 +7,7 @@ import { EnvironmentParser } from "@geekmidas/envkit";
|
|
|
7
7
|
import { EventPublisher } from "@geekmidas/events";
|
|
8
8
|
import { Logger } from "@geekmidas/logger";
|
|
9
9
|
import { StandardSchemaV1 } from "@standard-schema/spec";
|
|
10
|
-
import * as
|
|
10
|
+
import * as hono_types3 from "hono/types";
|
|
11
11
|
|
|
12
12
|
//#region src/endpoints/HonoEndpointAdaptor.d.ts
|
|
13
13
|
interface HonoEndpointOptions {
|
|
@@ -33,11 +33,11 @@ declare class HonoEndpoint<TRoute extends string, TMethod extends HttpMethod, TI
|
|
|
33
33
|
static validate<T extends StandardSchemaV1>(c: Context<any, string, {}>, data: unknown, schema?: T): Promise<any>;
|
|
34
34
|
addRoute(serviceDiscovery: ServiceDiscovery<ServiceRecord<TServices>, TLogger>, app: Hono): void;
|
|
35
35
|
static applyEventMiddleware(app: Hono, serviceDiscovery: ServiceDiscovery<any, any>): void;
|
|
36
|
-
static fromRoutes<TLogger extends Logger, TServices extends Service[]>(routes: string[], envParser: EnvironmentParser<{}>, app: Hono<
|
|
36
|
+
static fromRoutes<TLogger extends Logger, TServices extends Service[]>(routes: string[], envParser: EnvironmentParser<{}>, app: Hono<hono_types3.BlankEnv, hono_types3.BlankSchema, "/">, logger: TLogger, cwd?: string, options?: HonoEndpointOptions): Promise<Hono>;
|
|
37
37
|
static addRoutes<TServices extends Service[] = [], TLogger extends Logger = Logger>(endpoints: Endpoint<string, HttpMethod, any, any, TServices, TLogger>[], serviceDiscovery: ServiceDiscovery<ServiceRecord<TServices>, TLogger>, app: Hono, options?: HonoEndpointOptions): void;
|
|
38
38
|
static addRoute<TRoute extends string, TMethod extends HttpMethod, TInput extends EndpointSchemas = {}, TOutSchema extends StandardSchemaV1 | undefined = undefined, TServices extends Service[] = [], TLogger extends Logger = Logger, TSession = unknown, TEventPublisher extends EventPublisher<any> | undefined = undefined, TEventPublisherServiceName extends string = string, TAuditStorage extends AuditStorage | undefined = undefined, TAuditStorageServiceName extends string = string, TAuditAction extends AuditableAction<string, unknown> = AuditableAction<string, unknown>, TDatabase = undefined, TDatabaseServiceName extends string = string>(endpoint: Endpoint<TRoute, TMethod, TInput, TOutSchema, TServices, TLogger, TSession, TEventPublisher, TEventPublisherServiceName, TAuditStorage, TAuditStorageServiceName, TAuditAction, TDatabase, TDatabaseServiceName>, serviceDiscovery: ServiceDiscovery<ServiceRecord<TServices>, TLogger>, app: Hono): void;
|
|
39
39
|
static addDocsRoute<TServices extends Service[] = [], TLogger extends Logger = Logger>(endpoints: Endpoint<string, HttpMethod, any, any, TServices, TLogger>[], app: Hono, docsPath: string, openApiOptions?: HonoEndpointOptions['openApiOptions']): void;
|
|
40
40
|
}
|
|
41
41
|
//#endregion
|
|
42
42
|
export { HonoEndpoint, HonoEndpointOptions };
|
|
43
|
-
//# sourceMappingURL=HonoEndpointAdaptor-
|
|
43
|
+
//# sourceMappingURL=HonoEndpointAdaptor-kb1ByjUL.d.mts.map
|
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
const require_chunk = require('./chunk-CUT6urMc.cjs');
|
|
2
|
-
const require_Construct = require('./Construct-
|
|
2
|
+
const require_Construct = require('./Construct-Ba5cMxib.cjs');
|
|
3
3
|
const __geekmidas_logger_console = require_chunk.__toESM(require("@geekmidas/logger/console"));
|
|
4
4
|
|
|
5
5
|
//#region src/subscribers/Subscriber.ts
|
|
@@ -31,4 +31,4 @@ Object.defineProperty(exports, 'Subscriber', {
|
|
|
31
31
|
return Subscriber;
|
|
32
32
|
}
|
|
33
33
|
});
|
|
34
|
-
//# sourceMappingURL=Subscriber-
|
|
34
|
+
//# sourceMappingURL=Subscriber-BiHjVXtM.cjs.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"Subscriber-
|
|
1
|
+
{"version":3,"file":"Subscriber-BiHjVXtM.cjs","names":["ConsoleLogger","Construct","obj: any","ConstructType","handler: SubscriberHandler<\n TEventPublisher,\n TSubscribedEvents,\n TServices,\n TLogger,\n OutSchema\n >","timeout: number","_subscribedEvents?: TSubscribedEvents","outputSchema?: OutSchema","services: TServices","logger: TLogger","publisherService?: Service<\n TEventPublisherServiceName,\n TEventPublisher\n >"],"sources":["../src/subscribers/Subscriber.ts"],"sourcesContent":["import type { Logger } from '@geekmidas/logger';\nimport { ConsoleLogger } from '@geekmidas/logger/console';\nimport type { StandardSchemaV1 } from '@standard-schema/spec';\n\nimport { Construct, ConstructType } from '../Construct';\n\nimport type {\n EventPublisher,\n ExtractPublisherMessage,\n} from '@geekmidas/events';\nimport type { InferStandardSchema } from '@geekmidas/schema';\nimport type { Service, ServiceRecord } from '@geekmidas/services';\n\nconst DEFAULT_LOGGER = new ConsoleLogger() as any;\n\n// Helper type to extract payload types for subscribed events\ntype ExtractEventPayloads<\n TPublisher extends EventPublisher<any> | undefined,\n TEventTypes extends any[],\n> = TPublisher extends EventPublisher<any>\n ? Extract<ExtractPublisherMessage<TPublisher>, { type: TEventTypes[number] }>\n : never;\n\nexport class Subscriber<\n TServices extends Service[] = [],\n TLogger extends Logger = Logger,\n OutSchema extends StandardSchemaV1 | undefined = undefined,\n TEventPublisher extends EventPublisher<any> | undefined = undefined,\n TEventPublisherServiceName extends string = string,\n TSubscribedEvents extends\n ExtractPublisherMessage<TEventPublisher>['type'][] = ExtractPublisherMessage<TEventPublisher>['type'][],\n> extends Construct<\n TLogger,\n TEventPublisherServiceName,\n TEventPublisher,\n OutSchema,\n TServices\n> {\n __IS_SUBSCRIBER__ = true;\n\n static isSubscriber(\n obj: any,\n ): obj is Subscriber<any, any, any, any, any, any> {\n return Boolean(\n obj &&\n obj.__IS_SUBSCRIBER__ === true &&\n obj.type === ConstructType.Subscriber,\n );\n }\n\n constructor(\n public readonly handler: SubscriberHandler<\n TEventPublisher,\n TSubscribedEvents,\n TServices,\n TLogger,\n OutSchema\n >,\n public readonly timeout: number = 30000,\n protected _subscribedEvents?: TSubscribedEvents,\n public readonly outputSchema?: OutSchema,\n public readonly services: TServices = [] as unknown as TServices,\n public readonly logger: TLogger = DEFAULT_LOGGER as TLogger,\n public readonly publisherService?: Service<\n TEventPublisherServiceName,\n TEventPublisher\n >,\n ) {\n super(\n ConstructType.Subscriber,\n logger,\n services,\n [],\n publisherService,\n outputSchema,\n );\n }\n\n get subscribedEvents(): TSubscribedEvents | undefined {\n return this._subscribedEvents;\n }\n}\n\n// Handler type for subscribers that receives an array of events\nexport type SubscriberHandler<\n TEventPublisher extends EventPublisher<any> | undefined,\n TSubscribedEvents extends ExtractPublisherMessage<TEventPublisher>['type'][],\n TServices extends Service[] = [],\n TLogger extends Logger = Logger,\n OutSchema extends StandardSchemaV1 | undefined = undefined,\n> = (\n ctx: SubscriberContext<\n TEventPublisher,\n TSubscribedEvents,\n TServices,\n TLogger\n >,\n) => OutSchema extends StandardSchemaV1\n ? InferStandardSchema<OutSchema> | Promise<InferStandardSchema<OutSchema>>\n : any | Promise<any>;\n\n// Context type for subscriber handlers\nexport type SubscriberContext<\n TEventPublisher extends EventPublisher<any> | undefined,\n TSubscribedEvents extends ExtractPublisherMessage<TEventPublisher>['type'][],\n TServices extends Service[] = [],\n TLogger extends Logger = Logger,\n> = {\n events: ExtractEventPayloads<TEventPublisher, TSubscribedEvents>[];\n services: ServiceRecord<TServices>;\n logger: TLogger;\n};\n"],"mappings":";;;;;AAaA,MAAM,iBAAiB,IAAIA;AAU3B,IAAa,aAAb,cAQUC,4BAMR;CACA,oBAAoB;CAEpB,OAAO,aACLC,KACiD;AACjD,SAAO,QACL,OACE,IAAI,sBAAsB,QAC1B,IAAI,SAASC,gCAAc,WAC9B;CACF;CAED,YACkBC,SAOAC,UAAkB,KACxBC,mBACMC,cACAC,WAAsB,CAAE,GACxBC,SAAkB,gBAClBC,kBAIhB;AACA,QACEP,gCAAc,YACd,QACA,UACA,CAAE,GACF,kBACA,aACD;EAxBe;EAOA;EACN;EACM;EACA;EACA;EACA;CAajB;CAED,IAAI,mBAAkD;AACpD,SAAO,KAAK;CACb;AACF"}
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import { Construct, ConstructType } from "./Construct-
|
|
1
|
+
import { Construct, ConstructType } from "./Construct-DdyGHuag.mjs";
|
|
2
2
|
import { ConsoleLogger } from "@geekmidas/logger/console";
|
|
3
3
|
|
|
4
4
|
//#region src/subscribers/Subscriber.ts
|
|
@@ -25,4 +25,4 @@ var Subscriber = class extends Construct {
|
|
|
25
25
|
|
|
26
26
|
//#endregion
|
|
27
27
|
export { Subscriber };
|
|
28
|
-
//# sourceMappingURL=Subscriber-
|
|
28
|
+
//# sourceMappingURL=Subscriber-BmPf9GFb.mjs.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"Subscriber-
|
|
1
|
+
{"version":3,"file":"Subscriber-BmPf9GFb.mjs","names":["DEFAULT_LOGGER","obj: any","handler: SubscriberHandler<\n TEventPublisher,\n TSubscribedEvents,\n TServices,\n TLogger,\n OutSchema\n >","timeout: number","_subscribedEvents?: TSubscribedEvents","outputSchema?: OutSchema","services: TServices","logger: TLogger","publisherService?: Service<\n TEventPublisherServiceName,\n TEventPublisher\n >"],"sources":["../src/subscribers/Subscriber.ts"],"sourcesContent":["import type { Logger } from '@geekmidas/logger';\nimport { ConsoleLogger } from '@geekmidas/logger/console';\nimport type { StandardSchemaV1 } from '@standard-schema/spec';\n\nimport { Construct, ConstructType } from '../Construct';\n\nimport type {\n EventPublisher,\n ExtractPublisherMessage,\n} from '@geekmidas/events';\nimport type { InferStandardSchema } from '@geekmidas/schema';\nimport type { Service, ServiceRecord } from '@geekmidas/services';\n\nconst DEFAULT_LOGGER = new ConsoleLogger() as any;\n\n// Helper type to extract payload types for subscribed events\ntype ExtractEventPayloads<\n TPublisher extends EventPublisher<any> | undefined,\n TEventTypes extends any[],\n> = TPublisher extends EventPublisher<any>\n ? Extract<ExtractPublisherMessage<TPublisher>, { type: TEventTypes[number] }>\n : never;\n\nexport class Subscriber<\n TServices extends Service[] = [],\n TLogger extends Logger = Logger,\n OutSchema extends StandardSchemaV1 | undefined = undefined,\n TEventPublisher extends EventPublisher<any> | undefined = undefined,\n TEventPublisherServiceName extends string = string,\n TSubscribedEvents extends\n ExtractPublisherMessage<TEventPublisher>['type'][] = ExtractPublisherMessage<TEventPublisher>['type'][],\n> extends Construct<\n TLogger,\n TEventPublisherServiceName,\n TEventPublisher,\n OutSchema,\n TServices\n> {\n __IS_SUBSCRIBER__ = true;\n\n static isSubscriber(\n obj: any,\n ): obj is Subscriber<any, any, any, any, any, any> {\n return Boolean(\n obj &&\n obj.__IS_SUBSCRIBER__ === true &&\n obj.type === ConstructType.Subscriber,\n );\n }\n\n constructor(\n public readonly handler: SubscriberHandler<\n TEventPublisher,\n TSubscribedEvents,\n TServices,\n TLogger,\n OutSchema\n >,\n public readonly timeout: number = 30000,\n protected _subscribedEvents?: TSubscribedEvents,\n public readonly outputSchema?: OutSchema,\n public readonly services: TServices = [] as unknown as TServices,\n public readonly logger: TLogger = DEFAULT_LOGGER as TLogger,\n public readonly publisherService?: Service<\n TEventPublisherServiceName,\n TEventPublisher\n >,\n ) {\n super(\n ConstructType.Subscriber,\n logger,\n services,\n [],\n publisherService,\n outputSchema,\n );\n }\n\n get subscribedEvents(): TSubscribedEvents | undefined {\n return this._subscribedEvents;\n }\n}\n\n// Handler type for subscribers that receives an array of events\nexport type SubscriberHandler<\n TEventPublisher extends EventPublisher<any> | undefined,\n TSubscribedEvents extends ExtractPublisherMessage<TEventPublisher>['type'][],\n TServices extends Service[] = [],\n TLogger extends Logger = Logger,\n OutSchema extends StandardSchemaV1 | undefined = undefined,\n> = (\n ctx: SubscriberContext<\n TEventPublisher,\n TSubscribedEvents,\n TServices,\n TLogger\n >,\n) => OutSchema extends StandardSchemaV1\n ? InferStandardSchema<OutSchema> | Promise<InferStandardSchema<OutSchema>>\n : any | Promise<any>;\n\n// Context type for subscriber handlers\nexport type SubscriberContext<\n TEventPublisher extends EventPublisher<any> | undefined,\n TSubscribedEvents extends ExtractPublisherMessage<TEventPublisher>['type'][],\n TServices extends Service[] = [],\n TLogger extends Logger = Logger,\n> = {\n events: ExtractEventPayloads<TEventPublisher, TSubscribedEvents>[];\n services: ServiceRecord<TServices>;\n logger: TLogger;\n};\n"],"mappings":";;;;AAaA,MAAMA,mBAAiB,IAAI;AAU3B,IAAa,aAAb,cAQU,UAMR;CACA,oBAAoB;CAEpB,OAAO,aACLC,KACiD;AACjD,SAAO,QACL,OACE,IAAI,sBAAsB,QAC1B,IAAI,SAAS,cAAc,WAC9B;CACF;CAED,YACkBC,SAOAC,UAAkB,KACxBC,mBACMC,cACAC,WAAsB,CAAE,GACxBC,SAAkBP,kBAClBQ,kBAIhB;AACA,QACE,cAAc,YACd,QACA,UACA,CAAE,GACF,kBACA,aACD;EAxBe;EAOA;EACN;EACM;EACA;EACA;EACA;CAajB;CAED,IAAI,mBAAkD;AACpD,SAAO,KAAK;CACb;AACF"}
|
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
const require_chunk = require('./chunk-CUT6urMc.cjs');
|
|
2
|
-
const require_Subscriber = require('./Subscriber-
|
|
2
|
+
const require_Subscriber = require('./Subscriber-BiHjVXtM.cjs');
|
|
3
3
|
const __geekmidas_logger_console = require_chunk.__toESM(require("@geekmidas/logger/console"));
|
|
4
4
|
|
|
5
5
|
//#region src/subscribers/SubscriberBuilder.ts
|
|
@@ -57,4 +57,4 @@ Object.defineProperty(exports, 'SubscriberBuilder', {
|
|
|
57
57
|
return SubscriberBuilder;
|
|
58
58
|
}
|
|
59
59
|
});
|
|
60
|
-
//# sourceMappingURL=SubscriberBuilder-
|
|
60
|
+
//# sourceMappingURL=SubscriberBuilder-Cp1C-xtT.cjs.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"SubscriberBuilder-
|
|
1
|
+
{"version":3,"file":"SubscriberBuilder-Cp1C-xtT.cjs","names":["DEFAULT_LOGGER","timeout: number","schema: T","services: T","logger: T","publisher: Service<TName, T>","event: TEvent","fn: SubscriberHandler<\n TEventPublisher,\n TSubscribedEvents,\n TServices,\n TLogger,\n OutSchema\n >","Subscriber"],"sources":["../src/subscribers/SubscriberBuilder.ts"],"sourcesContent":["import type {\n EventPublisher,\n ExtractPublisherMessage,\n} from '@geekmidas/events';\nimport type { Logger } from '@geekmidas/logger';\nimport { DEFAULT_LOGGER } from '@geekmidas/logger/console';\nimport type { Service } from '@geekmidas/services';\nimport type { StandardSchemaV1 } from '@standard-schema/spec';\nimport { Subscriber, type SubscriberHandler } from './Subscriber';\n\nexport class SubscriberBuilder<\n TServices extends Service[] = [],\n TLogger extends Logger = Logger,\n OutSchema extends StandardSchemaV1 | undefined = undefined,\n TEventPublisher extends EventPublisher<any> | undefined = undefined,\n TEventPublisherServiceName extends string = string,\n TSubscribedEvents extends any[] = [],\n> {\n private _subscribedEvents: TSubscribedEvents = [] as any;\n private _timeout?: number;\n private outputSchema?: OutSchema;\n private _services: TServices = [] as Service[] as TServices;\n private _logger: TLogger = DEFAULT_LOGGER;\n private _publisher?: Service<TEventPublisherServiceName, TEventPublisher>;\n\n constructor() {\n this._timeout = 30000; // Default timeout\n }\n\n timeout(timeout: number): this {\n this._timeout = timeout;\n return this;\n }\n\n output<T extends StandardSchemaV1>(\n schema: T,\n ): SubscriberBuilder<\n TServices,\n TLogger,\n T,\n TEventPublisher,\n TEventPublisherServiceName,\n TSubscribedEvents\n > {\n this.outputSchema = schema as unknown as OutSchema;\n return this as any;\n }\n\n services<T extends Service[]>(\n services: T,\n ): SubscriberBuilder<\n [...TServices, ...T],\n TLogger,\n OutSchema,\n TEventPublisher,\n TEventPublisherServiceName,\n TSubscribedEvents\n > {\n this._services = [...this._services, ...services] as any;\n return this as any;\n }\n\n logger<T extends Logger>(\n logger: T,\n ): SubscriberBuilder<\n TServices,\n T,\n OutSchema,\n TEventPublisher,\n TEventPublisherServiceName,\n TSubscribedEvents\n > {\n this._logger = logger as unknown as TLogger;\n return this as any;\n }\n\n publisher<T extends EventPublisher<any>, TName extends string>(\n publisher: Service<TName, T>,\n ): SubscriberBuilder<\n TServices,\n TLogger,\n OutSchema,\n T,\n TName,\n TSubscribedEvents\n > {\n this._publisher = publisher as any;\n return this as any;\n }\n\n subscribe<\n TEvent extends TEventPublisher extends EventPublisher<any>\n ?\n | ExtractPublisherMessage<TEventPublisher>['type']\n | ExtractPublisherMessage<TEventPublisher>['type'][]\n : never,\n >(\n event: TEvent,\n ): SubscriberBuilder<\n TServices,\n TLogger,\n OutSchema,\n TEventPublisher,\n TEventPublisherServiceName,\n TEvent extends any[]\n ? [...TSubscribedEvents, ...TEvent]\n : [...TSubscribedEvents, TEvent]\n > {\n const eventsToAdd = Array.isArray(event) ? event : [event];\n this._subscribedEvents = [...this._subscribedEvents, ...eventsToAdd] as any;\n return this as any;\n }\n\n handle(\n fn: SubscriberHandler<\n TEventPublisher,\n TSubscribedEvents,\n TServices,\n TLogger,\n OutSchema\n >,\n ): Subscriber<\n TServices,\n TLogger,\n OutSchema,\n TEventPublisher,\n TEventPublisherServiceName,\n TSubscribedEvents\n > {\n const subscriber = new Subscriber(\n fn,\n this._timeout,\n this._subscribedEvents,\n this.outputSchema,\n this._services,\n this._logger,\n this._publisher,\n );\n\n // Reset builder state after creating the subscriber to prevent pollution\n this._services = [] as Service[] as TServices;\n this._logger = DEFAULT_LOGGER;\n this._publisher = undefined;\n this._subscribedEvents = [] as any;\n this._timeout = 30000; // Reset to default\n this.outputSchema = undefined;\n\n return subscriber;\n }\n}\n"],"mappings":";;;;;AAUA,IAAa,oBAAb,MAOE;CACA,AAAQ,oBAAuC,CAAE;CACjD,AAAQ;CACR,AAAQ;CACR,AAAQ,YAAuB,CAAE;CACjC,AAAQ,UAAmBA;CAC3B,AAAQ;CAER,cAAc;AACZ,OAAK,WAAW;CACjB;CAED,QAAQC,SAAuB;AAC7B,OAAK,WAAW;AAChB,SAAO;CACR;CAED,OACEC,QAQA;AACA,OAAK,eAAe;AACpB,SAAO;CACR;CAED,SACEC,UAQA;AACA,OAAK,YAAY,CAAC,GAAG,KAAK,WAAW,GAAG,QAAS;AACjD,SAAO;CACR;CAED,OACEC,QAQA;AACA,OAAK,UAAU;AACf,SAAO;CACR;CAED,UACEC,WAQA;AACA,OAAK,aAAa;AAClB,SAAO;CACR;CAED,UAOEC,OAUA;EACA,MAAM,cAAc,MAAM,QAAQ,MAAM,GAAG,QAAQ,CAAC,KAAM;AAC1D,OAAK,oBAAoB,CAAC,GAAG,KAAK,mBAAmB,GAAG,WAAY;AACpE,SAAO;CACR;CAED,OACEC,IAcA;EACA,MAAM,aAAa,IAAIC,8BACrB,IACA,KAAK,UACL,KAAK,mBACL,KAAK,cACL,KAAK,WACL,KAAK,SACL,KAAK;AAIP,OAAK,YAAY,CAAE;AACnB,OAAK,UAAUR;AACf,OAAK;AACL,OAAK,oBAAoB,CAAE;AAC3B,OAAK,WAAW;AAChB,OAAK;AAEL,SAAO;CACR;AACF"}
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import { Subscriber } from "./Subscriber-
|
|
1
|
+
import { Subscriber } from "./Subscriber-BmPf9GFb.mjs";
|
|
2
2
|
import { DEFAULT_LOGGER } from "@geekmidas/logger/console";
|
|
3
3
|
|
|
4
4
|
//#region src/subscribers/SubscriberBuilder.ts
|
|
@@ -51,4 +51,4 @@ var SubscriberBuilder = class {
|
|
|
51
51
|
|
|
52
52
|
//#endregion
|
|
53
53
|
export { SubscriberBuilder };
|
|
54
|
-
//# sourceMappingURL=SubscriberBuilder-
|
|
54
|
+
//# sourceMappingURL=SubscriberBuilder-DJPEeYDJ.mjs.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"SubscriberBuilder-
|
|
1
|
+
{"version":3,"file":"SubscriberBuilder-DJPEeYDJ.mjs","names":["timeout: number","schema: T","services: T","logger: T","publisher: Service<TName, T>","event: TEvent","fn: SubscriberHandler<\n TEventPublisher,\n TSubscribedEvents,\n TServices,\n TLogger,\n OutSchema\n >"],"sources":["../src/subscribers/SubscriberBuilder.ts"],"sourcesContent":["import type {\n EventPublisher,\n ExtractPublisherMessage,\n} from '@geekmidas/events';\nimport type { Logger } from '@geekmidas/logger';\nimport { DEFAULT_LOGGER } from '@geekmidas/logger/console';\nimport type { Service } from '@geekmidas/services';\nimport type { StandardSchemaV1 } from '@standard-schema/spec';\nimport { Subscriber, type SubscriberHandler } from './Subscriber';\n\nexport class SubscriberBuilder<\n TServices extends Service[] = [],\n TLogger extends Logger = Logger,\n OutSchema extends StandardSchemaV1 | undefined = undefined,\n TEventPublisher extends EventPublisher<any> | undefined = undefined,\n TEventPublisherServiceName extends string = string,\n TSubscribedEvents extends any[] = [],\n> {\n private _subscribedEvents: TSubscribedEvents = [] as any;\n private _timeout?: number;\n private outputSchema?: OutSchema;\n private _services: TServices = [] as Service[] as TServices;\n private _logger: TLogger = DEFAULT_LOGGER;\n private _publisher?: Service<TEventPublisherServiceName, TEventPublisher>;\n\n constructor() {\n this._timeout = 30000; // Default timeout\n }\n\n timeout(timeout: number): this {\n this._timeout = timeout;\n return this;\n }\n\n output<T extends StandardSchemaV1>(\n schema: T,\n ): SubscriberBuilder<\n TServices,\n TLogger,\n T,\n TEventPublisher,\n TEventPublisherServiceName,\n TSubscribedEvents\n > {\n this.outputSchema = schema as unknown as OutSchema;\n return this as any;\n }\n\n services<T extends Service[]>(\n services: T,\n ): SubscriberBuilder<\n [...TServices, ...T],\n TLogger,\n OutSchema,\n TEventPublisher,\n TEventPublisherServiceName,\n TSubscribedEvents\n > {\n this._services = [...this._services, ...services] as any;\n return this as any;\n }\n\n logger<T extends Logger>(\n logger: T,\n ): SubscriberBuilder<\n TServices,\n T,\n OutSchema,\n TEventPublisher,\n TEventPublisherServiceName,\n TSubscribedEvents\n > {\n this._logger = logger as unknown as TLogger;\n return this as any;\n }\n\n publisher<T extends EventPublisher<any>, TName extends string>(\n publisher: Service<TName, T>,\n ): SubscriberBuilder<\n TServices,\n TLogger,\n OutSchema,\n T,\n TName,\n TSubscribedEvents\n > {\n this._publisher = publisher as any;\n return this as any;\n }\n\n subscribe<\n TEvent extends TEventPublisher extends EventPublisher<any>\n ?\n | ExtractPublisherMessage<TEventPublisher>['type']\n | ExtractPublisherMessage<TEventPublisher>['type'][]\n : never,\n >(\n event: TEvent,\n ): SubscriberBuilder<\n TServices,\n TLogger,\n OutSchema,\n TEventPublisher,\n TEventPublisherServiceName,\n TEvent extends any[]\n ? [...TSubscribedEvents, ...TEvent]\n : [...TSubscribedEvents, TEvent]\n > {\n const eventsToAdd = Array.isArray(event) ? event : [event];\n this._subscribedEvents = [...this._subscribedEvents, ...eventsToAdd] as any;\n return this as any;\n }\n\n handle(\n fn: SubscriberHandler<\n TEventPublisher,\n TSubscribedEvents,\n TServices,\n TLogger,\n OutSchema\n >,\n ): Subscriber<\n TServices,\n TLogger,\n OutSchema,\n TEventPublisher,\n TEventPublisherServiceName,\n TSubscribedEvents\n > {\n const subscriber = new Subscriber(\n fn,\n this._timeout,\n this._subscribedEvents,\n this.outputSchema,\n this._services,\n this._logger,\n this._publisher,\n );\n\n // Reset builder state after creating the subscriber to prevent pollution\n this._services = [] as Service[] as TServices;\n this._logger = DEFAULT_LOGGER;\n this._publisher = undefined;\n this._subscribedEvents = [] as any;\n this._timeout = 30000; // Reset to default\n this.outputSchema = undefined;\n\n return subscriber;\n }\n}\n"],"mappings":";;;;AAUA,IAAa,oBAAb,MAOE;CACA,AAAQ,oBAAuC,CAAE;CACjD,AAAQ;CACR,AAAQ;CACR,AAAQ,YAAuB,CAAE;CACjC,AAAQ,UAAmB;CAC3B,AAAQ;CAER,cAAc;AACZ,OAAK,WAAW;CACjB;CAED,QAAQA,SAAuB;AAC7B,OAAK,WAAW;AAChB,SAAO;CACR;CAED,OACEC,QAQA;AACA,OAAK,eAAe;AACpB,SAAO;CACR;CAED,SACEC,UAQA;AACA,OAAK,YAAY,CAAC,GAAG,KAAK,WAAW,GAAG,QAAS;AACjD,SAAO;CACR;CAED,OACEC,QAQA;AACA,OAAK,UAAU;AACf,SAAO;CACR;CAED,UACEC,WAQA;AACA,OAAK,aAAa;AAClB,SAAO;CACR;CAED,UAOEC,OAUA;EACA,MAAM,cAAc,MAAM,QAAQ,MAAM,GAAG,QAAQ,CAAC,KAAM;AAC1D,OAAK,oBAAoB,CAAC,GAAG,KAAK,mBAAmB,GAAG,WAAY;AACpE,SAAO;CACR;CAED,OACEC,IAcA;EACA,MAAM,aAAa,IAAI,WACrB,IACA,KAAK,UACL,KAAK,mBACL,KAAK,cACL,KAAK,WACL,KAAK,SACL,KAAK;AAIP,OAAK,YAAY,CAAE;AACnB,OAAK,UAAU;AACf,OAAK;AACL,OAAK,oBAAoB,CAAE;AAC3B,OAAK,WAAW;AAChB,OAAK;AAEL,SAAO;CACR;AACF"}
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
const require_chunk = require('./chunk-CUT6urMc.cjs');
|
|
2
2
|
const require_publisher = require('./publisher-lFQleddL.cjs');
|
|
3
|
-
const require_Endpoint = require('./Endpoint-
|
|
3
|
+
const require_Endpoint = require('./Endpoint-BVGZXFyV.cjs');
|
|
4
4
|
const require_processAudits = require('./processAudits-CzHkPokQ.cjs');
|
|
5
5
|
const __geekmidas_services = require_chunk.__toESM(require("@geekmidas/services"));
|
|
6
6
|
const __geekmidas_audit = require_chunk.__toESM(require("@geekmidas/audit"));
|
|
@@ -151,4 +151,4 @@ Object.defineProperty(exports, 'TestEndpointAdaptor', {
|
|
|
151
151
|
return TestEndpointAdaptor;
|
|
152
152
|
}
|
|
153
153
|
});
|
|
154
|
-
//# sourceMappingURL=TestEndpointAdaptor-
|
|
154
|
+
//# sourceMappingURL=TestEndpointAdaptor-opEisC30.cjs.map
|