@geekmidas/cli 0.0.26 → 0.2.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/FUNCTION_CRON_SUPPORT.md +266 -0
- package/README.md +84 -17
- package/dist/CronGenerator-1PflEYe2.cjs +60 -0
- package/dist/CronGenerator-1PflEYe2.cjs.map +1 -0
- package/dist/CronGenerator-DXRfHQcV.mjs +54 -0
- package/dist/CronGenerator-DXRfHQcV.mjs.map +1 -0
- package/dist/EndpointGenerator-BbGrDiCP.cjs +264 -0
- package/dist/EndpointGenerator-BbGrDiCP.cjs.map +1 -0
- package/dist/EndpointGenerator-BmZ9BxbO.mjs +258 -0
- package/dist/EndpointGenerator-BmZ9BxbO.mjs.map +1 -0
- package/dist/FunctionGenerator-Clw64SwQ.cjs +59 -0
- package/dist/FunctionGenerator-Clw64SwQ.cjs.map +1 -0
- package/dist/FunctionGenerator-DOEB_yPh.mjs +53 -0
- package/dist/FunctionGenerator-DOEB_yPh.mjs.map +1 -0
- package/dist/Generator-CDoEXCDg.cjs +47 -0
- package/dist/Generator-CDoEXCDg.cjs.map +1 -0
- package/dist/Generator-UanJW0_V.mjs +41 -0
- package/dist/Generator-UanJW0_V.mjs.map +1 -0
- package/dist/SubscriberGenerator-BfMZCVNy.cjs +204 -0
- package/dist/SubscriberGenerator-BfMZCVNy.cjs.map +1 -0
- package/dist/SubscriberGenerator-D2u00NI3.mjs +198 -0
- package/dist/SubscriberGenerator-D2u00NI3.mjs.map +1 -0
- package/dist/build/index.cjs +12 -0
- package/dist/build/index.mjs +12 -0
- package/dist/build/manifests.cjs +3 -0
- package/dist/build/manifests.mjs +3 -0
- package/dist/build/providerResolver.cjs +5 -0
- package/dist/build/providerResolver.mjs +3 -0
- package/dist/build/types.cjs +0 -0
- package/dist/build/types.mjs +0 -0
- package/dist/build-BBhlEjf5.cjs +89 -0
- package/dist/build-BBhlEjf5.cjs.map +1 -0
- package/dist/build-kY-lG30Q.mjs +83 -0
- package/dist/build-kY-lG30Q.mjs.map +1 -0
- package/dist/config-D1EpSGk6.cjs +36 -0
- package/dist/config-D1EpSGk6.cjs.map +1 -0
- package/dist/config-U-mdW-7Y.mjs +30 -0
- package/dist/config-U-mdW-7Y.mjs.map +1 -0
- package/dist/config.cjs +1 -1
- package/dist/config.mjs +1 -1
- package/dist/generators/CronGenerator.cjs +4 -0
- package/dist/generators/CronGenerator.mjs +4 -0
- package/dist/generators/EndpointGenerator.cjs +4 -0
- package/dist/generators/EndpointGenerator.mjs +4 -0
- package/dist/generators/FunctionGenerator.cjs +4 -0
- package/dist/generators/FunctionGenerator.mjs +4 -0
- package/dist/generators/Generator.cjs +3 -0
- package/dist/generators/Generator.mjs +3 -0
- package/dist/generators/SubscriberGenerator.cjs +4 -0
- package/dist/generators/SubscriberGenerator.mjs +4 -0
- package/dist/generators/index.cjs +12 -0
- package/dist/generators/index.mjs +8 -0
- package/dist/generators-CEKtVh81.cjs +0 -0
- package/dist/generators-CsLujGXs.mjs +0 -0
- package/dist/index.cjs +71 -25
- package/dist/index.cjs.map +1 -0
- package/dist/index.mjs +71 -25
- package/dist/index.mjs.map +1 -0
- package/dist/manifests-BrJXpHrf.mjs +21 -0
- package/dist/manifests-BrJXpHrf.mjs.map +1 -0
- package/dist/manifests-D0saShvH.cjs +27 -0
- package/dist/manifests-D0saShvH.cjs.map +1 -0
- package/dist/{openapi-CksVdkh2.mjs → openapi-BQx3_JbM.mjs} +8 -6
- package/dist/openapi-BQx3_JbM.mjs.map +1 -0
- package/dist/{openapi-D4QQJUPY.cjs → openapi-CMLr04cz.cjs} +9 -7
- package/dist/openapi-CMLr04cz.cjs.map +1 -0
- package/dist/{openapi-react-query-DpT3XHFC.mjs → openapi-react-query-DbrWwQzb.mjs} +5 -3
- package/dist/openapi-react-query-DbrWwQzb.mjs.map +1 -0
- package/dist/{openapi-react-query-C1JLYUOs.cjs → openapi-react-query-Dvjqx_Eo.cjs} +5 -3
- package/dist/openapi-react-query-Dvjqx_Eo.cjs.map +1 -0
- package/dist/openapi-react-query.cjs +1 -1
- package/dist/openapi-react-query.mjs +1 -1
- package/dist/openapi.cjs +4 -3
- package/dist/openapi.mjs +4 -3
- package/dist/providerResolver-B_TjNF0_.mjs +96 -0
- package/dist/providerResolver-B_TjNF0_.mjs.map +1 -0
- package/dist/providerResolver-DgvzNfP4.cjs +114 -0
- package/dist/providerResolver-DgvzNfP4.cjs.map +1 -0
- package/examples/cron-example.ts +45 -0
- package/examples/function-example.ts +40 -0
- package/examples/gkm.config.json +22 -0
- package/examples/gkm.minimal.config.json +7 -0
- package/examples/gkm.production.config.json +27 -0
- package/examples/logger.ts +1 -1
- package/package.json +38 -14
- package/src/__tests__/config.spec.ts +110 -0
- package/src/__tests__/openapi-react-query.spec.ts +506 -0
- package/src/__tests__/openapi.spec.ts +362 -0
- package/src/__tests__/test-helpers.ts +180 -0
- package/src/build/__tests__/index-new.spec.ts +577 -0
- package/src/build/index.ts +197 -0
- package/src/build/manifests.ts +35 -0
- package/src/build/providerResolver.ts +184 -0
- package/src/build/types.ts +37 -0
- package/src/config.ts +14 -6
- package/src/generators/CronGenerator.ts +98 -0
- package/src/generators/EndpointGenerator.ts +389 -0
- package/src/generators/FunctionGenerator.ts +97 -0
- package/src/generators/Generator.ts +95 -0
- package/src/generators/SubscriberGenerator.ts +271 -0
- package/src/generators/__tests__/CronGenerator.spec.ts +445 -0
- package/src/generators/__tests__/EndpointGenerator.spec.ts +394 -0
- package/src/generators/__tests__/FunctionGenerator.spec.ts +256 -0
- package/src/generators/__tests__/SubscriberGenerator.spec.ts +341 -0
- package/src/generators/index.ts +9 -0
- package/src/index.ts +57 -22
- package/src/openapi-react-query.ts +2 -1
- package/src/openapi.ts +5 -4
- package/src/types.ts +91 -2
- package/dist/build-BTggTCYL.cjs +0 -176
- package/dist/build-Ca4P6_lY.mjs +0 -170
- package/dist/build.cjs +0 -5
- package/dist/build.mjs +0 -5
- package/dist/config-BNqUMsvc.cjs +0 -24
- package/dist/config-BciAdY6_.mjs +0 -18
- package/dist/loadEndpoints-BBIavB9h.cjs +0 -37
- package/dist/loadEndpoints-DAZ53Og2.mjs +0 -31
- package/dist/loadEndpoints.cjs +0 -3
- package/dist/loadEndpoints.mjs +0 -3
- package/src/build.ts +0 -305
- package/src/loadEndpoints.ts +0 -48
|
@@ -0,0 +1,53 @@
|
|
|
1
|
+
import { ConstructGenerator } from "./Generator-UanJW0_V.mjs";
|
|
2
|
+
import { mkdir, writeFile } from "node:fs/promises";
|
|
3
|
+
import { dirname, join, relative } from "node:path";
|
|
4
|
+
import { Function } from "@geekmidas/constructs/functions";
|
|
5
|
+
|
|
6
|
+
//#region src/generators/FunctionGenerator.ts
|
|
7
|
+
var FunctionGenerator = class extends ConstructGenerator {
|
|
8
|
+
isConstruct(value) {
|
|
9
|
+
return Function.isFunction(value);
|
|
10
|
+
}
|
|
11
|
+
async build(context, constructs, outputDir, options) {
|
|
12
|
+
const provider = options?.provider || "aws-lambda";
|
|
13
|
+
const logger = console;
|
|
14
|
+
const functionInfos = [];
|
|
15
|
+
if (constructs.length === 0 || provider !== "aws-lambda") return functionInfos;
|
|
16
|
+
const functionsDir = join(outputDir, "functions");
|
|
17
|
+
await mkdir(functionsDir, { recursive: true });
|
|
18
|
+
for (const { key, construct, path } of constructs) {
|
|
19
|
+
const handlerFile = await this.generateFunctionHandler(functionsDir, path.relative, key, context);
|
|
20
|
+
functionInfos.push({
|
|
21
|
+
name: key,
|
|
22
|
+
handler: relative(process.cwd(), handlerFile).replace(/\.ts$/, ".handler"),
|
|
23
|
+
timeout: construct.timeout,
|
|
24
|
+
environment: await construct.getEnvironment()
|
|
25
|
+
});
|
|
26
|
+
logger.log(`Generated function handler: ${key}`);
|
|
27
|
+
}
|
|
28
|
+
return functionInfos;
|
|
29
|
+
}
|
|
30
|
+
async generateFunctionHandler(outputDir, sourceFile, exportName, context) {
|
|
31
|
+
const handlerFileName = `${exportName}.ts`;
|
|
32
|
+
const handlerPath = join(outputDir, handlerFileName);
|
|
33
|
+
const relativePath = relative(dirname(handlerPath), sourceFile);
|
|
34
|
+
const importPath = relativePath.replace(/\.ts$/, ".js");
|
|
35
|
+
const relativeEnvParserPath = relative(dirname(handlerPath), context.envParserPath);
|
|
36
|
+
const relativeLoggerPath = relative(dirname(handlerPath), context.loggerPath);
|
|
37
|
+
const content = `import { AWSLambdaFunction } from '@geekmidas/constructs/functions';
|
|
38
|
+
import { ${exportName} } from '${importPath}';
|
|
39
|
+
import ${context.envParserImportPattern} from '${relativeEnvParserPath}';
|
|
40
|
+
import ${context.loggerImportPattern} from '${relativeLoggerPath}';
|
|
41
|
+
|
|
42
|
+
const adapter = new AWSLambdaFunction(envParser, ${exportName});
|
|
43
|
+
|
|
44
|
+
export const handler = adapter.handler;
|
|
45
|
+
`;
|
|
46
|
+
await writeFile(handlerPath, content);
|
|
47
|
+
return handlerPath;
|
|
48
|
+
}
|
|
49
|
+
};
|
|
50
|
+
|
|
51
|
+
//#endregion
|
|
52
|
+
export { FunctionGenerator };
|
|
53
|
+
//# sourceMappingURL=FunctionGenerator-DOEB_yPh.mjs.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"FunctionGenerator-DOEB_yPh.mjs","names":["value: any","context: BuildContext","constructs: GeneratedConstruct<Function<any, any, any, any>>[]","outputDir: string","options?: GeneratorOptions","functionInfos: FunctionInfo[]","sourceFile: string","exportName: string"],"sources":["../src/generators/FunctionGenerator.ts"],"sourcesContent":["import { mkdir, writeFile } from 'node:fs/promises';\nimport { dirname, join, relative } from 'node:path';\nimport { Function } from '@geekmidas/constructs/functions';\nimport type { BuildContext } from '../build/types';\nimport type { FunctionInfo } from '../types';\nimport {\n ConstructGenerator,\n type GeneratedConstruct,\n type GeneratorOptions,\n} from './Generator';\n\nexport class FunctionGenerator extends ConstructGenerator<\n Function<any, any, any, any>,\n FunctionInfo[]\n> {\n isConstruct(value: any): value is Function<any, any, any, any> {\n return Function.isFunction(value);\n }\n\n async build(\n context: BuildContext,\n constructs: GeneratedConstruct<Function<any, any, any, any>>[],\n outputDir: string,\n options?: GeneratorOptions,\n ): Promise<FunctionInfo[]> {\n const provider = options?.provider || 'aws-lambda';\n const logger = console;\n const functionInfos: FunctionInfo[] = [];\n\n if (constructs.length === 0 || provider !== 'aws-lambda') {\n return functionInfos;\n }\n\n // Create functions subdirectory\n const functionsDir = join(outputDir, 'functions');\n await mkdir(functionsDir, { recursive: true });\n\n // Generate function handlers\n for (const { key, construct, path } of constructs) {\n const handlerFile = await this.generateFunctionHandler(\n functionsDir,\n path.relative,\n key,\n context,\n );\n\n functionInfos.push({\n name: key,\n handler: relative(process.cwd(), handlerFile).replace(\n /\\.ts$/,\n '.handler',\n ),\n timeout: construct.timeout,\n environment: await construct.getEnvironment(),\n });\n\n logger.log(`Generated function handler: ${key}`);\n }\n\n return functionInfos;\n }\n\n private async generateFunctionHandler(\n outputDir: string,\n sourceFile: string,\n exportName: string,\n context: BuildContext,\n ): Promise<string> {\n const handlerFileName = `${exportName}.ts`;\n const handlerPath = join(outputDir, handlerFileName);\n\n const relativePath = relative(dirname(handlerPath), sourceFile);\n const importPath = relativePath.replace(/\\.ts$/, '.js');\n\n const relativeEnvParserPath = relative(\n dirname(handlerPath),\n context.envParserPath,\n );\n const relativeLoggerPath = relative(\n dirname(handlerPath),\n context.loggerPath,\n );\n\n const content = `import { AWSLambdaFunction } from '@geekmidas/constructs/functions';\nimport { ${exportName} } from '${importPath}';\nimport ${context.envParserImportPattern} from '${relativeEnvParserPath}';\nimport ${context.loggerImportPattern} from '${relativeLoggerPath}';\n\nconst adapter = new AWSLambdaFunction(envParser, ${exportName});\n\nexport const handler = adapter.handler;\n`;\n\n await writeFile(handlerPath, content);\n return handlerPath;\n }\n}\n"],"mappings":";;;;;;AAWA,IAAa,oBAAb,cAAuC,mBAGrC;CACA,YAAYA,OAAmD;AAC7D,SAAO,SAAS,WAAW,MAAM;CAClC;CAED,MAAM,MACJC,SACAC,YACAC,WACAC,SACyB;EACzB,MAAM,WAAW,SAAS,YAAY;EACtC,MAAM,SAAS;EACf,MAAMC,gBAAgC,CAAE;AAExC,MAAI,WAAW,WAAW,KAAK,aAAa,aAC1C,QAAO;EAIT,MAAM,eAAe,KAAK,WAAW,YAAY;AACjD,QAAM,MAAM,cAAc,EAAE,WAAW,KAAM,EAAC;AAG9C,OAAK,MAAM,EAAE,KAAK,WAAW,MAAM,IAAI,YAAY;GACjD,MAAM,cAAc,MAAM,KAAK,wBAC7B,cACA,KAAK,UACL,KACA,QACD;AAED,iBAAc,KAAK;IACjB,MAAM;IACN,SAAS,SAAS,QAAQ,KAAK,EAAE,YAAY,CAAC,QAC5C,SACA,WACD;IACD,SAAS,UAAU;IACnB,aAAa,MAAM,UAAU,gBAAgB;GAC9C,EAAC;AAEF,UAAO,KAAK,8BAA8B,IAAI,EAAE;EACjD;AAED,SAAO;CACR;CAED,MAAc,wBACZF,WACAG,YACAC,YACAN,SACiB;EACjB,MAAM,mBAAmB,EAAE,WAAW;EACtC,MAAM,cAAc,KAAK,WAAW,gBAAgB;EAEpD,MAAM,eAAe,SAAS,QAAQ,YAAY,EAAE,WAAW;EAC/D,MAAM,aAAa,aAAa,QAAQ,SAAS,MAAM;EAEvD,MAAM,wBAAwB,SAC5B,QAAQ,YAAY,EACpB,QAAQ,cACT;EACD,MAAM,qBAAqB,SACzB,QAAQ,YAAY,EACpB,QAAQ,WACT;EAED,MAAM,WAAW;WACV,WAAW,WAAW,WAAW;SACnC,QAAQ,uBAAuB,SAAS,sBAAsB;SAC9D,QAAQ,oBAAoB,SAAS,mBAAmB;;mDAEd,WAAW;;;;AAK1D,QAAM,UAAU,aAAa,QAAQ;AACrC,SAAO;CACR;AACF"}
|
|
@@ -0,0 +1,47 @@
|
|
|
1
|
+
const require_chunk = require('./chunk-CUT6urMc.cjs');
|
|
2
|
+
const path = require_chunk.__toESM(require("path"));
|
|
3
|
+
const fast_glob = require_chunk.__toESM(require("fast-glob"));
|
|
4
|
+
const lodash_kebabcase = require_chunk.__toESM(require("lodash.kebabcase"));
|
|
5
|
+
|
|
6
|
+
//#region src/generators/Generator.ts
|
|
7
|
+
var ConstructGenerator = class {
|
|
8
|
+
static async build(context, outputDir, generator, patterns, options) {
|
|
9
|
+
const constructs = await generator.load(patterns);
|
|
10
|
+
return generator.build(context, constructs, outputDir, options);
|
|
11
|
+
}
|
|
12
|
+
async load(patterns, cwd = process.cwd()) {
|
|
13
|
+
const logger = console;
|
|
14
|
+
const globPatterns = Array.isArray(patterns) ? patterns : patterns ? [patterns] : [];
|
|
15
|
+
const files = fast_glob.default.stream(globPatterns, {
|
|
16
|
+
cwd,
|
|
17
|
+
absolute: true
|
|
18
|
+
});
|
|
19
|
+
const constructs = [];
|
|
20
|
+
for await (const f of files) try {
|
|
21
|
+
const file = f.toString();
|
|
22
|
+
const module$1 = await import(file);
|
|
23
|
+
for (const [key, construct] of Object.entries(module$1)) if (this.isConstruct(construct)) constructs.push({
|
|
24
|
+
key,
|
|
25
|
+
name: (0, lodash_kebabcase.default)(key),
|
|
26
|
+
construct,
|
|
27
|
+
path: {
|
|
28
|
+
absolute: file,
|
|
29
|
+
relative: (0, path.relative)(process.cwd(), file)
|
|
30
|
+
}
|
|
31
|
+
});
|
|
32
|
+
} catch (error) {
|
|
33
|
+
logger.warn(`Failed to load ${f}:`, error.message);
|
|
34
|
+
throw new Error("Failed to load constructs. Please check the logs for details.");
|
|
35
|
+
}
|
|
36
|
+
return constructs;
|
|
37
|
+
}
|
|
38
|
+
};
|
|
39
|
+
|
|
40
|
+
//#endregion
|
|
41
|
+
Object.defineProperty(exports, 'ConstructGenerator', {
|
|
42
|
+
enumerable: true,
|
|
43
|
+
get: function () {
|
|
44
|
+
return ConstructGenerator;
|
|
45
|
+
}
|
|
46
|
+
});
|
|
47
|
+
//# sourceMappingURL=Generator-CDoEXCDg.cjs.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"Generator-CDoEXCDg.cjs","names":["context: BuildContext","outputDir: string","generator: ConstructGenerator<T, R>","patterns?: Routes","options?: GeneratorOptions","constructs: GeneratedConstruct<T>[]","module"],"sources":["../src/generators/Generator.ts"],"sourcesContent":["import { relative } from 'path';\nimport type { Construct } from '@geekmidas/constructs';\nimport fg from 'fast-glob';\nimport kebabCase from 'lodash.kebabcase';\nimport type { BuildContext } from '../build/types';\nimport type { LegacyProvider, Routes } from '../types';\n\nexport interface GeneratorOptions {\n provider?: LegacyProvider;\n [key: string]: any;\n}\n\nexport abstract class ConstructGenerator<T extends Construct, R = void> {\n abstract isConstruct(value: any): value is T;\n\n static async build<T extends Construct, R = void>(\n context: BuildContext,\n outputDir: string,\n generator: ConstructGenerator<T, R>,\n patterns?: Routes,\n options?: GeneratorOptions,\n ): Promise<R> {\n const constructs = await generator.load(patterns);\n return generator.build(context, constructs, outputDir, options);\n }\n\n abstract build(\n context: BuildContext,\n constructs: GeneratedConstruct<T>[],\n outputDir: string,\n options?: GeneratorOptions,\n ): Promise<R>;\n\n async load(\n patterns?: Routes,\n cwd = process.cwd(),\n ): Promise<GeneratedConstruct<T>[]> {\n const logger = console;\n\n // Normalize patterns to array\n const globPatterns = Array.isArray(patterns)\n ? patterns\n : patterns\n ? [patterns]\n : [];\n\n // Find all files\n const files = fg.stream(globPatterns, {\n cwd,\n absolute: true,\n });\n\n // Load constructs\n const constructs: GeneratedConstruct<T>[] = [];\n\n for await (const f of files) {\n try {\n const file = f.toString();\n const module = await import(file);\n\n // Check all exports for constructs\n for (const [key, construct] of Object.entries(module)) {\n if (this.isConstruct(construct)) {\n constructs.push({\n key,\n name: kebabCase(key),\n construct,\n path: {\n absolute: file,\n relative: relative(process.cwd(), file),\n },\n });\n }\n }\n } catch (error) {\n logger.warn(`Failed to load ${f}:`, (error as Error).message);\n throw new Error(\n 'Failed to load constructs. Please check the logs for details.',\n );\n }\n }\n\n return constructs;\n }\n}\n\nexport interface GeneratedConstruct<T extends Construct> {\n key: string;\n name: string;\n construct: T;\n path: {\n absolute: string;\n relative: string;\n };\n}\n"],"mappings":";;;;;;AAYA,IAAsB,qBAAtB,MAAwE;CAGtE,aAAa,MACXA,SACAC,WACAC,WACAC,UACAC,SACY;EACZ,MAAM,aAAa,MAAM,UAAU,KAAK,SAAS;AACjD,SAAO,UAAU,MAAM,SAAS,YAAY,WAAW,QAAQ;CAChE;CASD,MAAM,KACJD,UACA,MAAM,QAAQ,KAAK,EACe;EAClC,MAAM,SAAS;EAGf,MAAM,eAAe,MAAM,QAAQ,SAAS,GACxC,WACA,WACE,CAAC,QAAS,IACV,CAAE;EAGR,MAAM,QAAQ,kBAAG,OAAO,cAAc;GACpC;GACA,UAAU;EACX,EAAC;EAGF,MAAME,aAAsC,CAAE;AAE9C,aAAW,MAAM,KAAK,MACpB,KAAI;GACF,MAAM,OAAO,EAAE,UAAU;GACzB,MAAMC,WAAS,MAAM,OAAO;AAG5B,QAAK,MAAM,CAAC,KAAK,UAAU,IAAI,OAAO,QAAQA,SAAO,CACnD,KAAI,KAAK,YAAY,UAAU,CAC7B,YAAW,KAAK;IACd;IACA,MAAM,8BAAU,IAAI;IACpB;IACA,MAAM;KACJ,UAAU;KACV,UAAU,mBAAS,QAAQ,KAAK,EAAE,KAAK;IACxC;GACF,EAAC;EAGP,SAAQ,OAAO;AACd,UAAO,MAAM,iBAAiB,EAAE,IAAK,MAAgB,QAAQ;AAC7D,SAAM,IAAI,MACR;EAEH;AAGH,SAAO;CACR;AACF"}
|
|
@@ -0,0 +1,41 @@
|
|
|
1
|
+
import { relative } from "path";
|
|
2
|
+
import fg from "fast-glob";
|
|
3
|
+
import kebabCase from "lodash.kebabcase";
|
|
4
|
+
|
|
5
|
+
//#region src/generators/Generator.ts
|
|
6
|
+
var ConstructGenerator = class {
|
|
7
|
+
static async build(context, outputDir, generator, patterns, options) {
|
|
8
|
+
const constructs = await generator.load(patterns);
|
|
9
|
+
return generator.build(context, constructs, outputDir, options);
|
|
10
|
+
}
|
|
11
|
+
async load(patterns, cwd = process.cwd()) {
|
|
12
|
+
const logger = console;
|
|
13
|
+
const globPatterns = Array.isArray(patterns) ? patterns : patterns ? [patterns] : [];
|
|
14
|
+
const files = fg.stream(globPatterns, {
|
|
15
|
+
cwd,
|
|
16
|
+
absolute: true
|
|
17
|
+
});
|
|
18
|
+
const constructs = [];
|
|
19
|
+
for await (const f of files) try {
|
|
20
|
+
const file = f.toString();
|
|
21
|
+
const module = await import(file);
|
|
22
|
+
for (const [key, construct] of Object.entries(module)) if (this.isConstruct(construct)) constructs.push({
|
|
23
|
+
key,
|
|
24
|
+
name: kebabCase(key),
|
|
25
|
+
construct,
|
|
26
|
+
path: {
|
|
27
|
+
absolute: file,
|
|
28
|
+
relative: relative(process.cwd(), file)
|
|
29
|
+
}
|
|
30
|
+
});
|
|
31
|
+
} catch (error) {
|
|
32
|
+
logger.warn(`Failed to load ${f}:`, error.message);
|
|
33
|
+
throw new Error("Failed to load constructs. Please check the logs for details.");
|
|
34
|
+
}
|
|
35
|
+
return constructs;
|
|
36
|
+
}
|
|
37
|
+
};
|
|
38
|
+
|
|
39
|
+
//#endregion
|
|
40
|
+
export { ConstructGenerator };
|
|
41
|
+
//# sourceMappingURL=Generator-UanJW0_V.mjs.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"Generator-UanJW0_V.mjs","names":["context: BuildContext","outputDir: string","generator: ConstructGenerator<T, R>","patterns?: Routes","options?: GeneratorOptions","constructs: GeneratedConstruct<T>[]"],"sources":["../src/generators/Generator.ts"],"sourcesContent":["import { relative } from 'path';\nimport type { Construct } from '@geekmidas/constructs';\nimport fg from 'fast-glob';\nimport kebabCase from 'lodash.kebabcase';\nimport type { BuildContext } from '../build/types';\nimport type { LegacyProvider, Routes } from '../types';\n\nexport interface GeneratorOptions {\n provider?: LegacyProvider;\n [key: string]: any;\n}\n\nexport abstract class ConstructGenerator<T extends Construct, R = void> {\n abstract isConstruct(value: any): value is T;\n\n static async build<T extends Construct, R = void>(\n context: BuildContext,\n outputDir: string,\n generator: ConstructGenerator<T, R>,\n patterns?: Routes,\n options?: GeneratorOptions,\n ): Promise<R> {\n const constructs = await generator.load(patterns);\n return generator.build(context, constructs, outputDir, options);\n }\n\n abstract build(\n context: BuildContext,\n constructs: GeneratedConstruct<T>[],\n outputDir: string,\n options?: GeneratorOptions,\n ): Promise<R>;\n\n async load(\n patterns?: Routes,\n cwd = process.cwd(),\n ): Promise<GeneratedConstruct<T>[]> {\n const logger = console;\n\n // Normalize patterns to array\n const globPatterns = Array.isArray(patterns)\n ? patterns\n : patterns\n ? [patterns]\n : [];\n\n // Find all files\n const files = fg.stream(globPatterns, {\n cwd,\n absolute: true,\n });\n\n // Load constructs\n const constructs: GeneratedConstruct<T>[] = [];\n\n for await (const f of files) {\n try {\n const file = f.toString();\n const module = await import(file);\n\n // Check all exports for constructs\n for (const [key, construct] of Object.entries(module)) {\n if (this.isConstruct(construct)) {\n constructs.push({\n key,\n name: kebabCase(key),\n construct,\n path: {\n absolute: file,\n relative: relative(process.cwd(), file),\n },\n });\n }\n }\n } catch (error) {\n logger.warn(`Failed to load ${f}:`, (error as Error).message);\n throw new Error(\n 'Failed to load constructs. Please check the logs for details.',\n );\n }\n }\n\n return constructs;\n }\n}\n\nexport interface GeneratedConstruct<T extends Construct> {\n key: string;\n name: string;\n construct: T;\n path: {\n absolute: string;\n relative: string;\n };\n}\n"],"mappings":";;;;;AAYA,IAAsB,qBAAtB,MAAwE;CAGtE,aAAa,MACXA,SACAC,WACAC,WACAC,UACAC,SACY;EACZ,MAAM,aAAa,MAAM,UAAU,KAAK,SAAS;AACjD,SAAO,UAAU,MAAM,SAAS,YAAY,WAAW,QAAQ;CAChE;CASD,MAAM,KACJD,UACA,MAAM,QAAQ,KAAK,EACe;EAClC,MAAM,SAAS;EAGf,MAAM,eAAe,MAAM,QAAQ,SAAS,GACxC,WACA,WACE,CAAC,QAAS,IACV,CAAE;EAGR,MAAM,QAAQ,GAAG,OAAO,cAAc;GACpC;GACA,UAAU;EACX,EAAC;EAGF,MAAME,aAAsC,CAAE;AAE9C,aAAW,MAAM,KAAK,MACpB,KAAI;GACF,MAAM,OAAO,EAAE,UAAU;GACzB,MAAM,SAAS,MAAM,OAAO;AAG5B,QAAK,MAAM,CAAC,KAAK,UAAU,IAAI,OAAO,QAAQ,OAAO,CACnD,KAAI,KAAK,YAAY,UAAU,CAC7B,YAAW,KAAK;IACd;IACA,MAAM,UAAU,IAAI;IACpB;IACA,MAAM;KACJ,UAAU;KACV,UAAU,SAAS,QAAQ,KAAK,EAAE,KAAK;IACxC;GACF,EAAC;EAGP,SAAQ,OAAO;AACd,UAAO,MAAM,iBAAiB,EAAE,IAAK,MAAgB,QAAQ;AAC7D,SAAM,IAAI,MACR;EAEH;AAGH,SAAO;CACR;AACF"}
|
|
@@ -0,0 +1,204 @@
|
|
|
1
|
+
const require_chunk = require('./chunk-CUT6urMc.cjs');
|
|
2
|
+
const require_Generator = require('./Generator-CDoEXCDg.cjs');
|
|
3
|
+
const node_fs_promises = require_chunk.__toESM(require("node:fs/promises"));
|
|
4
|
+
const node_path = require_chunk.__toESM(require("node:path"));
|
|
5
|
+
const __geekmidas_constructs_subscribers = require_chunk.__toESM(require("@geekmidas/constructs/subscribers"));
|
|
6
|
+
|
|
7
|
+
//#region src/generators/SubscriberGenerator.ts
|
|
8
|
+
var SubscriberGenerator = class extends require_Generator.ConstructGenerator {
|
|
9
|
+
isConstruct(value) {
|
|
10
|
+
return __geekmidas_constructs_subscribers.Subscriber.isSubscriber(value);
|
|
11
|
+
}
|
|
12
|
+
async build(context, constructs, outputDir, options) {
|
|
13
|
+
const provider = options?.provider || "aws-lambda";
|
|
14
|
+
const logger = console;
|
|
15
|
+
const subscriberInfos = [];
|
|
16
|
+
if (constructs.length === 0) return subscriberInfos;
|
|
17
|
+
if (provider === "server") {
|
|
18
|
+
await this.generateServerSubscribersFile(outputDir, constructs);
|
|
19
|
+
logger.log(`Generated server subscribers file with ${constructs.length} subscribers (polling mode)`);
|
|
20
|
+
return subscriberInfos;
|
|
21
|
+
}
|
|
22
|
+
if (provider !== "aws-lambda") return subscriberInfos;
|
|
23
|
+
const subscribersDir = (0, node_path.join)(outputDir, "subscribers");
|
|
24
|
+
await (0, node_fs_promises.mkdir)(subscribersDir, { recursive: true });
|
|
25
|
+
for (const { key, construct, path } of constructs) {
|
|
26
|
+
const handlerFile = await this.generateSubscriberHandler(subscribersDir, path.relative, key, construct, context);
|
|
27
|
+
subscriberInfos.push({
|
|
28
|
+
name: key,
|
|
29
|
+
handler: (0, node_path.relative)(process.cwd(), handlerFile).replace(/\.ts$/, ".handler"),
|
|
30
|
+
subscribedEvents: construct.subscribedEvents || [],
|
|
31
|
+
timeout: construct.timeout,
|
|
32
|
+
environment: await construct.getEnvironment()
|
|
33
|
+
});
|
|
34
|
+
logger.log(`Generated subscriber handler: ${key}`);
|
|
35
|
+
}
|
|
36
|
+
return subscriberInfos;
|
|
37
|
+
}
|
|
38
|
+
async generateSubscriberHandler(outputDir, sourceFile, exportName, _subscriber, context) {
|
|
39
|
+
const handlerFileName = `${exportName}.ts`;
|
|
40
|
+
const handlerPath = (0, node_path.join)(outputDir, handlerFileName);
|
|
41
|
+
const relativePath = (0, node_path.relative)((0, node_path.dirname)(handlerPath), sourceFile);
|
|
42
|
+
const importPath = relativePath.replace(/\.ts$/, ".js");
|
|
43
|
+
const relativeEnvParserPath = (0, node_path.relative)((0, node_path.dirname)(handlerPath), context.envParserPath);
|
|
44
|
+
const content = `import { AWSLambdaSubscriber } from '@geekmidas/constructs/aws';
|
|
45
|
+
import { ${exportName} } from '${importPath}';
|
|
46
|
+
import ${context.envParserImportPattern} from '${relativeEnvParserPath}';
|
|
47
|
+
|
|
48
|
+
const adapter = new AWSLambdaSubscriber(envParser, ${exportName});
|
|
49
|
+
|
|
50
|
+
export const handler = adapter.handler;
|
|
51
|
+
`;
|
|
52
|
+
await (0, node_fs_promises.writeFile)(handlerPath, content);
|
|
53
|
+
return handlerPath;
|
|
54
|
+
}
|
|
55
|
+
async generateServerSubscribersFile(outputDir, subscribers) {
|
|
56
|
+
const subscribersFileName = "subscribers.ts";
|
|
57
|
+
const subscribersPath = (0, node_path.join)(outputDir, subscribersFileName);
|
|
58
|
+
const importsByFile = /* @__PURE__ */ new Map();
|
|
59
|
+
for (const { path, key } of subscribers) {
|
|
60
|
+
const relativePath = (0, node_path.relative)((0, node_path.dirname)(subscribersPath), path.relative);
|
|
61
|
+
const importPath = relativePath.replace(/\.ts$/, ".js");
|
|
62
|
+
if (!importsByFile.has(importPath)) importsByFile.set(importPath, []);
|
|
63
|
+
importsByFile.get(importPath).push(key);
|
|
64
|
+
}
|
|
65
|
+
const imports = Array.from(importsByFile.entries()).map(([importPath, exports$1]) => `import { ${exports$1.join(", ")} } from '${importPath}';`).join("\n");
|
|
66
|
+
const allExportNames = subscribers.map(({ key }) => key);
|
|
67
|
+
const content = `/**
|
|
68
|
+
* Generated subscribers setup
|
|
69
|
+
*
|
|
70
|
+
* ⚠️ WARNING: This is for LOCAL DEVELOPMENT ONLY
|
|
71
|
+
* This uses event polling which is not suitable for production.
|
|
72
|
+
*
|
|
73
|
+
* For production, use AWS Lambda with SQS/SNS event source mappings.
|
|
74
|
+
* Lambda automatically:
|
|
75
|
+
* - Scales based on queue depth
|
|
76
|
+
* - Handles batch processing and retries
|
|
77
|
+
* - Manages dead letter queues
|
|
78
|
+
* - Provides better cost optimization
|
|
79
|
+
*
|
|
80
|
+
* This polling implementation is useful for:
|
|
81
|
+
* - Local development and testing
|
|
82
|
+
* - Understanding event flow without Lambda deployment
|
|
83
|
+
*
|
|
84
|
+
* Supported connection strings:
|
|
85
|
+
* - sqs://region/account-id/queue-name (SQS queue)
|
|
86
|
+
* - sns://region/account-id/topic-name (SNS topic)
|
|
87
|
+
* - rabbitmq://host:port/queue-name (RabbitMQ)
|
|
88
|
+
* - basic://in-memory (In-memory for testing)
|
|
89
|
+
*/
|
|
90
|
+
import type { EnvironmentParser } from '@geekmidas/envkit';
|
|
91
|
+
import type { Logger } from '@geekmidas/logger';
|
|
92
|
+
import { EventConnectionFactory, Subscriber } from '@geekmidas/events';
|
|
93
|
+
import type { EventConnection, EventSubscriber } from '@geekmidas/events';
|
|
94
|
+
import { ServiceDiscovery } from '@geekmidas/services';
|
|
95
|
+
${imports}
|
|
96
|
+
|
|
97
|
+
const subscribers = [
|
|
98
|
+
${allExportNames.join(",\n ")}
|
|
99
|
+
];
|
|
100
|
+
|
|
101
|
+
const activeSubscribers: EventSubscriber<any>[] = [];
|
|
102
|
+
|
|
103
|
+
export async function setupSubscribers(
|
|
104
|
+
envParser: EnvironmentParser<any>,
|
|
105
|
+
logger: Logger,
|
|
106
|
+
): Promise<void> {
|
|
107
|
+
logger.info('Setting up subscribers in polling mode (local development)');
|
|
108
|
+
|
|
109
|
+
const config = envParser.create((get) => ({
|
|
110
|
+
connectionString: get('EVENT_SUBSCRIBER_CONNECTION_STRING').string().optional(),
|
|
111
|
+
})).parse();
|
|
112
|
+
|
|
113
|
+
if (!config.connectionString) {
|
|
114
|
+
logger.warn('EVENT_SUBSCRIBER_CONNECTION_STRING not configured, skipping subscriber setup');
|
|
115
|
+
return;
|
|
116
|
+
}
|
|
117
|
+
|
|
118
|
+
const serviceDiscovery = ServiceDiscovery.getInstance(logger, envParser);
|
|
119
|
+
|
|
120
|
+
// Create connection once, outside the loop (more efficient)
|
|
121
|
+
// EventConnectionFactory automatically determines the right connection type
|
|
122
|
+
let connection: EventConnection;
|
|
123
|
+
try {
|
|
124
|
+
connection = await EventConnectionFactory.fromConnectionString(config.connectionString);
|
|
125
|
+
|
|
126
|
+
const connectionType = new URL(config.connectionString).protocol.replace(':', '');
|
|
127
|
+
logger.info({ connectionType }, 'Created shared event connection');
|
|
128
|
+
} catch (error) {
|
|
129
|
+
logger.error({ error }, 'Failed to create event connection');
|
|
130
|
+
return;
|
|
131
|
+
}
|
|
132
|
+
|
|
133
|
+
for (const subscriber of subscribers) {
|
|
134
|
+
try {
|
|
135
|
+
// Create subscriber from shared connection
|
|
136
|
+
const eventSubscriber = await Subscriber.fromConnection(connection);
|
|
137
|
+
|
|
138
|
+
// Register services
|
|
139
|
+
const services = subscriber.services.length > 0
|
|
140
|
+
? await serviceDiscovery.register(subscriber.services)
|
|
141
|
+
: {};
|
|
142
|
+
|
|
143
|
+
// Subscribe to events
|
|
144
|
+
const subscribedEvents = subscriber.subscribedEvents || [];
|
|
145
|
+
|
|
146
|
+
if (subscribedEvents.length === 0) {
|
|
147
|
+
logger.warn({ subscriber: subscriber.constructor.name }, 'Subscriber has no subscribed events, skipping');
|
|
148
|
+
continue;
|
|
149
|
+
}
|
|
150
|
+
|
|
151
|
+
await eventSubscriber.subscribe(subscribedEvents, async (event) => {
|
|
152
|
+
try {
|
|
153
|
+
// Process single event (batch of 1)
|
|
154
|
+
await subscriber.handler({
|
|
155
|
+
events: [event],
|
|
156
|
+
services: services as any,
|
|
157
|
+
logger: subscriber.logger,
|
|
158
|
+
});
|
|
159
|
+
|
|
160
|
+
logger.debug({ eventType: event.type }, 'Successfully processed event');
|
|
161
|
+
} catch (error) {
|
|
162
|
+
logger.error({ error, event }, 'Failed to process event');
|
|
163
|
+
// Event will become visible again for retry
|
|
164
|
+
}
|
|
165
|
+
});
|
|
166
|
+
|
|
167
|
+
activeSubscribers.push(eventSubscriber);
|
|
168
|
+
|
|
169
|
+
logger.info(
|
|
170
|
+
{
|
|
171
|
+
events: subscribedEvents,
|
|
172
|
+
},
|
|
173
|
+
'Subscriber started polling'
|
|
174
|
+
);
|
|
175
|
+
} catch (error) {
|
|
176
|
+
logger.error({ error, subscriber: subscriber.constructor.name }, 'Failed to setup subscriber');
|
|
177
|
+
}
|
|
178
|
+
}
|
|
179
|
+
|
|
180
|
+
// Setup graceful shutdown
|
|
181
|
+
const shutdown = () => {
|
|
182
|
+
logger.info('Stopping all subscribers');
|
|
183
|
+
for (const eventSubscriber of activeSubscribers) {
|
|
184
|
+
connection.stop();
|
|
185
|
+
}
|
|
186
|
+
};
|
|
187
|
+
|
|
188
|
+
process.on('SIGTERM', shutdown);
|
|
189
|
+
process.on('SIGINT', shutdown);
|
|
190
|
+
}
|
|
191
|
+
`;
|
|
192
|
+
await (0, node_fs_promises.writeFile)(subscribersPath, content);
|
|
193
|
+
return subscribersPath;
|
|
194
|
+
}
|
|
195
|
+
};
|
|
196
|
+
|
|
197
|
+
//#endregion
|
|
198
|
+
Object.defineProperty(exports, 'SubscriberGenerator', {
|
|
199
|
+
enumerable: true,
|
|
200
|
+
get: function () {
|
|
201
|
+
return SubscriberGenerator;
|
|
202
|
+
}
|
|
203
|
+
});
|
|
204
|
+
//# sourceMappingURL=SubscriberGenerator-BfMZCVNy.cjs.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"SubscriberGenerator-BfMZCVNy.cjs","names":["ConstructGenerator","value: any","context: BuildContext","constructs: GeneratedConstruct<Subscriber<any, any, any, any, any, any>>[]","outputDir: string","options?: GeneratorOptions","subscriberInfos: SubscriberInfo[]","sourceFile: string","exportName: string","_subscriber: Subscriber<any, any, any, any, any, any>","subscribers: GeneratedConstruct<Subscriber<any, any, any, any, any, any>>[]","exports"],"sources":["../src/generators/SubscriberGenerator.ts"],"sourcesContent":["import { mkdir, writeFile } from 'node:fs/promises';\nimport { dirname, join, relative } from 'node:path';\nimport { Subscriber } from '@geekmidas/constructs/subscribers';\nimport type { BuildContext } from '../build/types';\nimport type { SubscriberInfo } from '../types';\nimport {\n ConstructGenerator,\n type GeneratedConstruct,\n type GeneratorOptions,\n} from './Generator';\n\nexport class SubscriberGenerator extends ConstructGenerator<\n Subscriber<any, any, any, any, any, any>,\n SubscriberInfo[]\n> {\n isConstruct(value: any): value is Subscriber<any, any, any, any, any, any> {\n return Subscriber.isSubscriber(value);\n }\n\n async build(\n context: BuildContext,\n constructs: GeneratedConstruct<Subscriber<any, any, any, any, any, any>>[],\n outputDir: string,\n options?: GeneratorOptions,\n ): Promise<SubscriberInfo[]> {\n const provider = options?.provider || 'aws-lambda';\n const logger = console;\n const subscriberInfos: SubscriberInfo[] = [];\n\n if (constructs.length === 0) {\n return subscriberInfos;\n }\n\n if (provider === 'server') {\n // Generate subscribers.ts for server-based polling\n await this.generateServerSubscribersFile(outputDir, constructs);\n\n logger.log(\n `Generated server subscribers file with ${constructs.length} subscribers (polling mode)`,\n );\n\n // Return empty array as server subscribers don't have individual handlers\n return subscriberInfos;\n }\n\n if (provider !== 'aws-lambda') {\n return subscriberInfos;\n }\n\n // Create subscribers subdirectory\n const subscribersDir = join(outputDir, 'subscribers');\n await mkdir(subscribersDir, { recursive: true });\n\n // Generate subscriber handlers\n for (const { key, construct, path } of constructs) {\n const handlerFile = await this.generateSubscriberHandler(\n subscribersDir,\n path.relative,\n key,\n construct,\n context,\n );\n\n subscriberInfos.push({\n name: key,\n handler: relative(process.cwd(), handlerFile).replace(\n /\\.ts$/,\n '.handler',\n ),\n subscribedEvents: construct.subscribedEvents || [],\n timeout: construct.timeout,\n environment: await construct.getEnvironment(),\n });\n\n logger.log(`Generated subscriber handler: ${key}`);\n }\n\n return subscriberInfos;\n }\n\n private async generateSubscriberHandler(\n outputDir: string,\n sourceFile: string,\n exportName: string,\n _subscriber: Subscriber<any, any, any, any, any, any>,\n context: BuildContext,\n ): Promise<string> {\n const handlerFileName = `${exportName}.ts`;\n const handlerPath = join(outputDir, handlerFileName);\n\n const relativePath = relative(dirname(handlerPath), sourceFile);\n const importPath = relativePath.replace(/\\.ts$/, '.js');\n\n const relativeEnvParserPath = relative(\n dirname(handlerPath),\n context.envParserPath,\n );\n\n const content = `import { AWSLambdaSubscriber } from '@geekmidas/constructs/aws';\nimport { ${exportName} } from '${importPath}';\nimport ${context.envParserImportPattern} from '${relativeEnvParserPath}';\n\nconst adapter = new AWSLambdaSubscriber(envParser, ${exportName});\n\nexport const handler = adapter.handler;\n`;\n\n await writeFile(handlerPath, content);\n return handlerPath;\n }\n\n private async generateServerSubscribersFile(\n outputDir: string,\n subscribers: GeneratedConstruct<Subscriber<any, any, any, any, any, any>>[],\n ): Promise<string> {\n const subscribersFileName = 'subscribers.ts';\n const subscribersPath = join(outputDir, subscribersFileName);\n\n // Group imports by file\n const importsByFile = new Map<string, string[]>();\n\n for (const { path, key } of subscribers) {\n const relativePath = relative(dirname(subscribersPath), path.relative);\n const importPath = relativePath.replace(/\\.ts$/, '.js');\n\n if (!importsByFile.has(importPath)) {\n importsByFile.set(importPath, []);\n }\n importsByFile.get(importPath)!.push(key);\n }\n\n // Generate import statements\n const imports = Array.from(importsByFile.entries())\n .map(\n ([importPath, exports]) =>\n `import { ${exports.join(', ')} } from '${importPath}';`,\n )\n .join('\\n');\n\n const allExportNames = subscribers.map(({ key }) => key);\n\n const content = `/**\n * Generated subscribers setup\n *\n * ⚠️ WARNING: This is for LOCAL DEVELOPMENT ONLY\n * This uses event polling which is not suitable for production.\n *\n * For production, use AWS Lambda with SQS/SNS event source mappings.\n * Lambda automatically:\n * - Scales based on queue depth\n * - Handles batch processing and retries\n * - Manages dead letter queues\n * - Provides better cost optimization\n *\n * This polling implementation is useful for:\n * - Local development and testing\n * - Understanding event flow without Lambda deployment\n *\n * Supported connection strings:\n * - sqs://region/account-id/queue-name (SQS queue)\n * - sns://region/account-id/topic-name (SNS topic)\n * - rabbitmq://host:port/queue-name (RabbitMQ)\n * - basic://in-memory (In-memory for testing)\n */\nimport type { EnvironmentParser } from '@geekmidas/envkit';\nimport type { Logger } from '@geekmidas/logger';\nimport { EventConnectionFactory, Subscriber } from '@geekmidas/events';\nimport type { EventConnection, EventSubscriber } from '@geekmidas/events';\nimport { ServiceDiscovery } from '@geekmidas/services';\n${imports}\n\nconst subscribers = [\n ${allExportNames.join(',\\n ')}\n];\n\nconst activeSubscribers: EventSubscriber<any>[] = [];\n\nexport async function setupSubscribers(\n envParser: EnvironmentParser<any>,\n logger: Logger,\n): Promise<void> {\n logger.info('Setting up subscribers in polling mode (local development)');\n\n const config = envParser.create((get) => ({\n connectionString: get('EVENT_SUBSCRIBER_CONNECTION_STRING').string().optional(),\n })).parse();\n\n if (!config.connectionString) {\n logger.warn('EVENT_SUBSCRIBER_CONNECTION_STRING not configured, skipping subscriber setup');\n return;\n }\n\n const serviceDiscovery = ServiceDiscovery.getInstance(logger, envParser);\n\n // Create connection once, outside the loop (more efficient)\n // EventConnectionFactory automatically determines the right connection type\n let connection: EventConnection;\n try {\n connection = await EventConnectionFactory.fromConnectionString(config.connectionString);\n\n const connectionType = new URL(config.connectionString).protocol.replace(':', '');\n logger.info({ connectionType }, 'Created shared event connection');\n } catch (error) {\n logger.error({ error }, 'Failed to create event connection');\n return;\n }\n\n for (const subscriber of subscribers) {\n try {\n // Create subscriber from shared connection\n const eventSubscriber = await Subscriber.fromConnection(connection);\n\n // Register services\n const services = subscriber.services.length > 0\n ? await serviceDiscovery.register(subscriber.services)\n : {};\n\n // Subscribe to events\n const subscribedEvents = subscriber.subscribedEvents || [];\n\n if (subscribedEvents.length === 0) {\n logger.warn({ subscriber: subscriber.constructor.name }, 'Subscriber has no subscribed events, skipping');\n continue;\n }\n\n await eventSubscriber.subscribe(subscribedEvents, async (event) => {\n try {\n // Process single event (batch of 1)\n await subscriber.handler({\n events: [event],\n services: services as any,\n logger: subscriber.logger,\n });\n\n logger.debug({ eventType: event.type }, 'Successfully processed event');\n } catch (error) {\n logger.error({ error, event }, 'Failed to process event');\n // Event will become visible again for retry\n }\n });\n\n activeSubscribers.push(eventSubscriber);\n\n logger.info(\n {\n events: subscribedEvents,\n },\n 'Subscriber started polling'\n );\n } catch (error) {\n logger.error({ error, subscriber: subscriber.constructor.name }, 'Failed to setup subscriber');\n }\n }\n\n // Setup graceful shutdown\n const shutdown = () => {\n logger.info('Stopping all subscribers');\n for (const eventSubscriber of activeSubscribers) {\n connection.stop();\n }\n };\n\n process.on('SIGTERM', shutdown);\n process.on('SIGINT', shutdown);\n}\n`;\n\n await writeFile(subscribersPath, content);\n return subscribersPath;\n }\n}\n"],"mappings":";;;;;;;AAWA,IAAa,sBAAb,cAAyCA,qCAGvC;CACA,YAAYC,OAA+D;AACzE,SAAO,8CAAW,aAAa,MAAM;CACtC;CAED,MAAM,MACJC,SACAC,YACAC,WACAC,SAC2B;EAC3B,MAAM,WAAW,SAAS,YAAY;EACtC,MAAM,SAAS;EACf,MAAMC,kBAAoC,CAAE;AAE5C,MAAI,WAAW,WAAW,EACxB,QAAO;AAGT,MAAI,aAAa,UAAU;AAEzB,SAAM,KAAK,8BAA8B,WAAW,WAAW;AAE/D,UAAO,KACJ,yCAAyC,WAAW,OAAO,6BAC7D;AAGD,UAAO;EACR;AAED,MAAI,aAAa,aACf,QAAO;EAIT,MAAM,iBAAiB,oBAAK,WAAW,cAAc;AACrD,QAAM,4BAAM,gBAAgB,EAAE,WAAW,KAAM,EAAC;AAGhD,OAAK,MAAM,EAAE,KAAK,WAAW,MAAM,IAAI,YAAY;GACjD,MAAM,cAAc,MAAM,KAAK,0BAC7B,gBACA,KAAK,UACL,KACA,WACA,QACD;AAED,mBAAgB,KAAK;IACnB,MAAM;IACN,SAAS,wBAAS,QAAQ,KAAK,EAAE,YAAY,CAAC,QAC5C,SACA,WACD;IACD,kBAAkB,UAAU,oBAAoB,CAAE;IAClD,SAAS,UAAU;IACnB,aAAa,MAAM,UAAU,gBAAgB;GAC9C,EAAC;AAEF,UAAO,KAAK,gCAAgC,IAAI,EAAE;EACnD;AAED,SAAO;CACR;CAED,MAAc,0BACZF,WACAG,YACAC,YACAC,aACAP,SACiB;EACjB,MAAM,mBAAmB,EAAE,WAAW;EACtC,MAAM,cAAc,oBAAK,WAAW,gBAAgB;EAEpD,MAAM,eAAe,wBAAS,uBAAQ,YAAY,EAAE,WAAW;EAC/D,MAAM,aAAa,aAAa,QAAQ,SAAS,MAAM;EAEvD,MAAM,wBAAwB,wBAC5B,uBAAQ,YAAY,EACpB,QAAQ,cACT;EAED,MAAM,WAAW;WACV,WAAW,WAAW,WAAW;SACnC,QAAQ,uBAAuB,SAAS,sBAAsB;;qDAElB,WAAW;;;;AAK5D,QAAM,gCAAU,aAAa,QAAQ;AACrC,SAAO;CACR;CAED,MAAc,8BACZE,WACAM,aACiB;EACjB,MAAM,sBAAsB;EAC5B,MAAM,kBAAkB,oBAAK,WAAW,oBAAoB;EAG5D,MAAM,gCAAgB,IAAI;AAE1B,OAAK,MAAM,EAAE,MAAM,KAAK,IAAI,aAAa;GACvC,MAAM,eAAe,wBAAS,uBAAQ,gBAAgB,EAAE,KAAK,SAAS;GACtE,MAAM,aAAa,aAAa,QAAQ,SAAS,MAAM;AAEvD,QAAK,cAAc,IAAI,WAAW,CAChC,eAAc,IAAI,YAAY,CAAE,EAAC;AAEnC,iBAAc,IAAI,WAAW,CAAE,KAAK,IAAI;EACzC;EAGD,MAAM,UAAU,MAAM,KAAK,cAAc,SAAS,CAAC,CAChD,IACC,CAAC,CAAC,YAAYC,UAAQ,MACnB,WAAW,UAAQ,KAAK,KAAK,CAAC,WAAW,WAAW,IACxD,CACA,KAAK,KAAK;EAEb,MAAM,iBAAiB,YAAY,IAAI,CAAC,EAAE,KAAK,KAAK,IAAI;EAExD,MAAM,WAAW;;;;;;;;;;;;;;;;;;;;;;;;;;;;EA4BnB,QAAQ;;;IAGN,eAAe,KAAK,QAAQ,CAAC;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AA+F7B,QAAM,gCAAU,iBAAiB,QAAQ;AACzC,SAAO;CACR;AACF"}
|
|
@@ -0,0 +1,198 @@
|
|
|
1
|
+
import { ConstructGenerator } from "./Generator-UanJW0_V.mjs";
|
|
2
|
+
import { mkdir, writeFile } from "node:fs/promises";
|
|
3
|
+
import { dirname, join, relative } from "node:path";
|
|
4
|
+
import { Subscriber } from "@geekmidas/constructs/subscribers";
|
|
5
|
+
|
|
6
|
+
//#region src/generators/SubscriberGenerator.ts
|
|
7
|
+
var SubscriberGenerator = class extends ConstructGenerator {
|
|
8
|
+
isConstruct(value) {
|
|
9
|
+
return Subscriber.isSubscriber(value);
|
|
10
|
+
}
|
|
11
|
+
async build(context, constructs, outputDir, options) {
|
|
12
|
+
const provider = options?.provider || "aws-lambda";
|
|
13
|
+
const logger = console;
|
|
14
|
+
const subscriberInfos = [];
|
|
15
|
+
if (constructs.length === 0) return subscriberInfos;
|
|
16
|
+
if (provider === "server") {
|
|
17
|
+
await this.generateServerSubscribersFile(outputDir, constructs);
|
|
18
|
+
logger.log(`Generated server subscribers file with ${constructs.length} subscribers (polling mode)`);
|
|
19
|
+
return subscriberInfos;
|
|
20
|
+
}
|
|
21
|
+
if (provider !== "aws-lambda") return subscriberInfos;
|
|
22
|
+
const subscribersDir = join(outputDir, "subscribers");
|
|
23
|
+
await mkdir(subscribersDir, { recursive: true });
|
|
24
|
+
for (const { key, construct, path } of constructs) {
|
|
25
|
+
const handlerFile = await this.generateSubscriberHandler(subscribersDir, path.relative, key, construct, context);
|
|
26
|
+
subscriberInfos.push({
|
|
27
|
+
name: key,
|
|
28
|
+
handler: relative(process.cwd(), handlerFile).replace(/\.ts$/, ".handler"),
|
|
29
|
+
subscribedEvents: construct.subscribedEvents || [],
|
|
30
|
+
timeout: construct.timeout,
|
|
31
|
+
environment: await construct.getEnvironment()
|
|
32
|
+
});
|
|
33
|
+
logger.log(`Generated subscriber handler: ${key}`);
|
|
34
|
+
}
|
|
35
|
+
return subscriberInfos;
|
|
36
|
+
}
|
|
37
|
+
async generateSubscriberHandler(outputDir, sourceFile, exportName, _subscriber, context) {
|
|
38
|
+
const handlerFileName = `${exportName}.ts`;
|
|
39
|
+
const handlerPath = join(outputDir, handlerFileName);
|
|
40
|
+
const relativePath = relative(dirname(handlerPath), sourceFile);
|
|
41
|
+
const importPath = relativePath.replace(/\.ts$/, ".js");
|
|
42
|
+
const relativeEnvParserPath = relative(dirname(handlerPath), context.envParserPath);
|
|
43
|
+
const content = `import { AWSLambdaSubscriber } from '@geekmidas/constructs/aws';
|
|
44
|
+
import { ${exportName} } from '${importPath}';
|
|
45
|
+
import ${context.envParserImportPattern} from '${relativeEnvParserPath}';
|
|
46
|
+
|
|
47
|
+
const adapter = new AWSLambdaSubscriber(envParser, ${exportName});
|
|
48
|
+
|
|
49
|
+
export const handler = adapter.handler;
|
|
50
|
+
`;
|
|
51
|
+
await writeFile(handlerPath, content);
|
|
52
|
+
return handlerPath;
|
|
53
|
+
}
|
|
54
|
+
async generateServerSubscribersFile(outputDir, subscribers) {
|
|
55
|
+
const subscribersFileName = "subscribers.ts";
|
|
56
|
+
const subscribersPath = join(outputDir, subscribersFileName);
|
|
57
|
+
const importsByFile = /* @__PURE__ */ new Map();
|
|
58
|
+
for (const { path, key } of subscribers) {
|
|
59
|
+
const relativePath = relative(dirname(subscribersPath), path.relative);
|
|
60
|
+
const importPath = relativePath.replace(/\.ts$/, ".js");
|
|
61
|
+
if (!importsByFile.has(importPath)) importsByFile.set(importPath, []);
|
|
62
|
+
importsByFile.get(importPath).push(key);
|
|
63
|
+
}
|
|
64
|
+
const imports = Array.from(importsByFile.entries()).map(([importPath, exports]) => `import { ${exports.join(", ")} } from '${importPath}';`).join("\n");
|
|
65
|
+
const allExportNames = subscribers.map(({ key }) => key);
|
|
66
|
+
const content = `/**
|
|
67
|
+
* Generated subscribers setup
|
|
68
|
+
*
|
|
69
|
+
* ⚠️ WARNING: This is for LOCAL DEVELOPMENT ONLY
|
|
70
|
+
* This uses event polling which is not suitable for production.
|
|
71
|
+
*
|
|
72
|
+
* For production, use AWS Lambda with SQS/SNS event source mappings.
|
|
73
|
+
* Lambda automatically:
|
|
74
|
+
* - Scales based on queue depth
|
|
75
|
+
* - Handles batch processing and retries
|
|
76
|
+
* - Manages dead letter queues
|
|
77
|
+
* - Provides better cost optimization
|
|
78
|
+
*
|
|
79
|
+
* This polling implementation is useful for:
|
|
80
|
+
* - Local development and testing
|
|
81
|
+
* - Understanding event flow without Lambda deployment
|
|
82
|
+
*
|
|
83
|
+
* Supported connection strings:
|
|
84
|
+
* - sqs://region/account-id/queue-name (SQS queue)
|
|
85
|
+
* - sns://region/account-id/topic-name (SNS topic)
|
|
86
|
+
* - rabbitmq://host:port/queue-name (RabbitMQ)
|
|
87
|
+
* - basic://in-memory (In-memory for testing)
|
|
88
|
+
*/
|
|
89
|
+
import type { EnvironmentParser } from '@geekmidas/envkit';
|
|
90
|
+
import type { Logger } from '@geekmidas/logger';
|
|
91
|
+
import { EventConnectionFactory, Subscriber } from '@geekmidas/events';
|
|
92
|
+
import type { EventConnection, EventSubscriber } from '@geekmidas/events';
|
|
93
|
+
import { ServiceDiscovery } from '@geekmidas/services';
|
|
94
|
+
${imports}
|
|
95
|
+
|
|
96
|
+
const subscribers = [
|
|
97
|
+
${allExportNames.join(",\n ")}
|
|
98
|
+
];
|
|
99
|
+
|
|
100
|
+
const activeSubscribers: EventSubscriber<any>[] = [];
|
|
101
|
+
|
|
102
|
+
export async function setupSubscribers(
|
|
103
|
+
envParser: EnvironmentParser<any>,
|
|
104
|
+
logger: Logger,
|
|
105
|
+
): Promise<void> {
|
|
106
|
+
logger.info('Setting up subscribers in polling mode (local development)');
|
|
107
|
+
|
|
108
|
+
const config = envParser.create((get) => ({
|
|
109
|
+
connectionString: get('EVENT_SUBSCRIBER_CONNECTION_STRING').string().optional(),
|
|
110
|
+
})).parse();
|
|
111
|
+
|
|
112
|
+
if (!config.connectionString) {
|
|
113
|
+
logger.warn('EVENT_SUBSCRIBER_CONNECTION_STRING not configured, skipping subscriber setup');
|
|
114
|
+
return;
|
|
115
|
+
}
|
|
116
|
+
|
|
117
|
+
const serviceDiscovery = ServiceDiscovery.getInstance(logger, envParser);
|
|
118
|
+
|
|
119
|
+
// Create connection once, outside the loop (more efficient)
|
|
120
|
+
// EventConnectionFactory automatically determines the right connection type
|
|
121
|
+
let connection: EventConnection;
|
|
122
|
+
try {
|
|
123
|
+
connection = await EventConnectionFactory.fromConnectionString(config.connectionString);
|
|
124
|
+
|
|
125
|
+
const connectionType = new URL(config.connectionString).protocol.replace(':', '');
|
|
126
|
+
logger.info({ connectionType }, 'Created shared event connection');
|
|
127
|
+
} catch (error) {
|
|
128
|
+
logger.error({ error }, 'Failed to create event connection');
|
|
129
|
+
return;
|
|
130
|
+
}
|
|
131
|
+
|
|
132
|
+
for (const subscriber of subscribers) {
|
|
133
|
+
try {
|
|
134
|
+
// Create subscriber from shared connection
|
|
135
|
+
const eventSubscriber = await Subscriber.fromConnection(connection);
|
|
136
|
+
|
|
137
|
+
// Register services
|
|
138
|
+
const services = subscriber.services.length > 0
|
|
139
|
+
? await serviceDiscovery.register(subscriber.services)
|
|
140
|
+
: {};
|
|
141
|
+
|
|
142
|
+
// Subscribe to events
|
|
143
|
+
const subscribedEvents = subscriber.subscribedEvents || [];
|
|
144
|
+
|
|
145
|
+
if (subscribedEvents.length === 0) {
|
|
146
|
+
logger.warn({ subscriber: subscriber.constructor.name }, 'Subscriber has no subscribed events, skipping');
|
|
147
|
+
continue;
|
|
148
|
+
}
|
|
149
|
+
|
|
150
|
+
await eventSubscriber.subscribe(subscribedEvents, async (event) => {
|
|
151
|
+
try {
|
|
152
|
+
// Process single event (batch of 1)
|
|
153
|
+
await subscriber.handler({
|
|
154
|
+
events: [event],
|
|
155
|
+
services: services as any,
|
|
156
|
+
logger: subscriber.logger,
|
|
157
|
+
});
|
|
158
|
+
|
|
159
|
+
logger.debug({ eventType: event.type }, 'Successfully processed event');
|
|
160
|
+
} catch (error) {
|
|
161
|
+
logger.error({ error, event }, 'Failed to process event');
|
|
162
|
+
// Event will become visible again for retry
|
|
163
|
+
}
|
|
164
|
+
});
|
|
165
|
+
|
|
166
|
+
activeSubscribers.push(eventSubscriber);
|
|
167
|
+
|
|
168
|
+
logger.info(
|
|
169
|
+
{
|
|
170
|
+
events: subscribedEvents,
|
|
171
|
+
},
|
|
172
|
+
'Subscriber started polling'
|
|
173
|
+
);
|
|
174
|
+
} catch (error) {
|
|
175
|
+
logger.error({ error, subscriber: subscriber.constructor.name }, 'Failed to setup subscriber');
|
|
176
|
+
}
|
|
177
|
+
}
|
|
178
|
+
|
|
179
|
+
// Setup graceful shutdown
|
|
180
|
+
const shutdown = () => {
|
|
181
|
+
logger.info('Stopping all subscribers');
|
|
182
|
+
for (const eventSubscriber of activeSubscribers) {
|
|
183
|
+
connection.stop();
|
|
184
|
+
}
|
|
185
|
+
};
|
|
186
|
+
|
|
187
|
+
process.on('SIGTERM', shutdown);
|
|
188
|
+
process.on('SIGINT', shutdown);
|
|
189
|
+
}
|
|
190
|
+
`;
|
|
191
|
+
await writeFile(subscribersPath, content);
|
|
192
|
+
return subscribersPath;
|
|
193
|
+
}
|
|
194
|
+
};
|
|
195
|
+
|
|
196
|
+
//#endregion
|
|
197
|
+
export { SubscriberGenerator };
|
|
198
|
+
//# sourceMappingURL=SubscriberGenerator-D2u00NI3.mjs.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"SubscriberGenerator-D2u00NI3.mjs","names":["value: any","context: BuildContext","constructs: GeneratedConstruct<Subscriber<any, any, any, any, any, any>>[]","outputDir: string","options?: GeneratorOptions","subscriberInfos: SubscriberInfo[]","sourceFile: string","exportName: string","_subscriber: Subscriber<any, any, any, any, any, any>","subscribers: GeneratedConstruct<Subscriber<any, any, any, any, any, any>>[]"],"sources":["../src/generators/SubscriberGenerator.ts"],"sourcesContent":["import { mkdir, writeFile } from 'node:fs/promises';\nimport { dirname, join, relative } from 'node:path';\nimport { Subscriber } from '@geekmidas/constructs/subscribers';\nimport type { BuildContext } from '../build/types';\nimport type { SubscriberInfo } from '../types';\nimport {\n ConstructGenerator,\n type GeneratedConstruct,\n type GeneratorOptions,\n} from './Generator';\n\nexport class SubscriberGenerator extends ConstructGenerator<\n Subscriber<any, any, any, any, any, any>,\n SubscriberInfo[]\n> {\n isConstruct(value: any): value is Subscriber<any, any, any, any, any, any> {\n return Subscriber.isSubscriber(value);\n }\n\n async build(\n context: BuildContext,\n constructs: GeneratedConstruct<Subscriber<any, any, any, any, any, any>>[],\n outputDir: string,\n options?: GeneratorOptions,\n ): Promise<SubscriberInfo[]> {\n const provider = options?.provider || 'aws-lambda';\n const logger = console;\n const subscriberInfos: SubscriberInfo[] = [];\n\n if (constructs.length === 0) {\n return subscriberInfos;\n }\n\n if (provider === 'server') {\n // Generate subscribers.ts for server-based polling\n await this.generateServerSubscribersFile(outputDir, constructs);\n\n logger.log(\n `Generated server subscribers file with ${constructs.length} subscribers (polling mode)`,\n );\n\n // Return empty array as server subscribers don't have individual handlers\n return subscriberInfos;\n }\n\n if (provider !== 'aws-lambda') {\n return subscriberInfos;\n }\n\n // Create subscribers subdirectory\n const subscribersDir = join(outputDir, 'subscribers');\n await mkdir(subscribersDir, { recursive: true });\n\n // Generate subscriber handlers\n for (const { key, construct, path } of constructs) {\n const handlerFile = await this.generateSubscriberHandler(\n subscribersDir,\n path.relative,\n key,\n construct,\n context,\n );\n\n subscriberInfos.push({\n name: key,\n handler: relative(process.cwd(), handlerFile).replace(\n /\\.ts$/,\n '.handler',\n ),\n subscribedEvents: construct.subscribedEvents || [],\n timeout: construct.timeout,\n environment: await construct.getEnvironment(),\n });\n\n logger.log(`Generated subscriber handler: ${key}`);\n }\n\n return subscriberInfos;\n }\n\n private async generateSubscriberHandler(\n outputDir: string,\n sourceFile: string,\n exportName: string,\n _subscriber: Subscriber<any, any, any, any, any, any>,\n context: BuildContext,\n ): Promise<string> {\n const handlerFileName = `${exportName}.ts`;\n const handlerPath = join(outputDir, handlerFileName);\n\n const relativePath = relative(dirname(handlerPath), sourceFile);\n const importPath = relativePath.replace(/\\.ts$/, '.js');\n\n const relativeEnvParserPath = relative(\n dirname(handlerPath),\n context.envParserPath,\n );\n\n const content = `import { AWSLambdaSubscriber } from '@geekmidas/constructs/aws';\nimport { ${exportName} } from '${importPath}';\nimport ${context.envParserImportPattern} from '${relativeEnvParserPath}';\n\nconst adapter = new AWSLambdaSubscriber(envParser, ${exportName});\n\nexport const handler = adapter.handler;\n`;\n\n await writeFile(handlerPath, content);\n return handlerPath;\n }\n\n private async generateServerSubscribersFile(\n outputDir: string,\n subscribers: GeneratedConstruct<Subscriber<any, any, any, any, any, any>>[],\n ): Promise<string> {\n const subscribersFileName = 'subscribers.ts';\n const subscribersPath = join(outputDir, subscribersFileName);\n\n // Group imports by file\n const importsByFile = new Map<string, string[]>();\n\n for (const { path, key } of subscribers) {\n const relativePath = relative(dirname(subscribersPath), path.relative);\n const importPath = relativePath.replace(/\\.ts$/, '.js');\n\n if (!importsByFile.has(importPath)) {\n importsByFile.set(importPath, []);\n }\n importsByFile.get(importPath)!.push(key);\n }\n\n // Generate import statements\n const imports = Array.from(importsByFile.entries())\n .map(\n ([importPath, exports]) =>\n `import { ${exports.join(', ')} } from '${importPath}';`,\n )\n .join('\\n');\n\n const allExportNames = subscribers.map(({ key }) => key);\n\n const content = `/**\n * Generated subscribers setup\n *\n * ⚠️ WARNING: This is for LOCAL DEVELOPMENT ONLY\n * This uses event polling which is not suitable for production.\n *\n * For production, use AWS Lambda with SQS/SNS event source mappings.\n * Lambda automatically:\n * - Scales based on queue depth\n * - Handles batch processing and retries\n * - Manages dead letter queues\n * - Provides better cost optimization\n *\n * This polling implementation is useful for:\n * - Local development and testing\n * - Understanding event flow without Lambda deployment\n *\n * Supported connection strings:\n * - sqs://region/account-id/queue-name (SQS queue)\n * - sns://region/account-id/topic-name (SNS topic)\n * - rabbitmq://host:port/queue-name (RabbitMQ)\n * - basic://in-memory (In-memory for testing)\n */\nimport type { EnvironmentParser } from '@geekmidas/envkit';\nimport type { Logger } from '@geekmidas/logger';\nimport { EventConnectionFactory, Subscriber } from '@geekmidas/events';\nimport type { EventConnection, EventSubscriber } from '@geekmidas/events';\nimport { ServiceDiscovery } from '@geekmidas/services';\n${imports}\n\nconst subscribers = [\n ${allExportNames.join(',\\n ')}\n];\n\nconst activeSubscribers: EventSubscriber<any>[] = [];\n\nexport async function setupSubscribers(\n envParser: EnvironmentParser<any>,\n logger: Logger,\n): Promise<void> {\n logger.info('Setting up subscribers in polling mode (local development)');\n\n const config = envParser.create((get) => ({\n connectionString: get('EVENT_SUBSCRIBER_CONNECTION_STRING').string().optional(),\n })).parse();\n\n if (!config.connectionString) {\n logger.warn('EVENT_SUBSCRIBER_CONNECTION_STRING not configured, skipping subscriber setup');\n return;\n }\n\n const serviceDiscovery = ServiceDiscovery.getInstance(logger, envParser);\n\n // Create connection once, outside the loop (more efficient)\n // EventConnectionFactory automatically determines the right connection type\n let connection: EventConnection;\n try {\n connection = await EventConnectionFactory.fromConnectionString(config.connectionString);\n\n const connectionType = new URL(config.connectionString).protocol.replace(':', '');\n logger.info({ connectionType }, 'Created shared event connection');\n } catch (error) {\n logger.error({ error }, 'Failed to create event connection');\n return;\n }\n\n for (const subscriber of subscribers) {\n try {\n // Create subscriber from shared connection\n const eventSubscriber = await Subscriber.fromConnection(connection);\n\n // Register services\n const services = subscriber.services.length > 0\n ? await serviceDiscovery.register(subscriber.services)\n : {};\n\n // Subscribe to events\n const subscribedEvents = subscriber.subscribedEvents || [];\n\n if (subscribedEvents.length === 0) {\n logger.warn({ subscriber: subscriber.constructor.name }, 'Subscriber has no subscribed events, skipping');\n continue;\n }\n\n await eventSubscriber.subscribe(subscribedEvents, async (event) => {\n try {\n // Process single event (batch of 1)\n await subscriber.handler({\n events: [event],\n services: services as any,\n logger: subscriber.logger,\n });\n\n logger.debug({ eventType: event.type }, 'Successfully processed event');\n } catch (error) {\n logger.error({ error, event }, 'Failed to process event');\n // Event will become visible again for retry\n }\n });\n\n activeSubscribers.push(eventSubscriber);\n\n logger.info(\n {\n events: subscribedEvents,\n },\n 'Subscriber started polling'\n );\n } catch (error) {\n logger.error({ error, subscriber: subscriber.constructor.name }, 'Failed to setup subscriber');\n }\n }\n\n // Setup graceful shutdown\n const shutdown = () => {\n logger.info('Stopping all subscribers');\n for (const eventSubscriber of activeSubscribers) {\n connection.stop();\n }\n };\n\n process.on('SIGTERM', shutdown);\n process.on('SIGINT', shutdown);\n}\n`;\n\n await writeFile(subscribersPath, content);\n return subscribersPath;\n }\n}\n"],"mappings":";;;;;;AAWA,IAAa,sBAAb,cAAyC,mBAGvC;CACA,YAAYA,OAA+D;AACzE,SAAO,WAAW,aAAa,MAAM;CACtC;CAED,MAAM,MACJC,SACAC,YACAC,WACAC,SAC2B;EAC3B,MAAM,WAAW,SAAS,YAAY;EACtC,MAAM,SAAS;EACf,MAAMC,kBAAoC,CAAE;AAE5C,MAAI,WAAW,WAAW,EACxB,QAAO;AAGT,MAAI,aAAa,UAAU;AAEzB,SAAM,KAAK,8BAA8B,WAAW,WAAW;AAE/D,UAAO,KACJ,yCAAyC,WAAW,OAAO,6BAC7D;AAGD,UAAO;EACR;AAED,MAAI,aAAa,aACf,QAAO;EAIT,MAAM,iBAAiB,KAAK,WAAW,cAAc;AACrD,QAAM,MAAM,gBAAgB,EAAE,WAAW,KAAM,EAAC;AAGhD,OAAK,MAAM,EAAE,KAAK,WAAW,MAAM,IAAI,YAAY;GACjD,MAAM,cAAc,MAAM,KAAK,0BAC7B,gBACA,KAAK,UACL,KACA,WACA,QACD;AAED,mBAAgB,KAAK;IACnB,MAAM;IACN,SAAS,SAAS,QAAQ,KAAK,EAAE,YAAY,CAAC,QAC5C,SACA,WACD;IACD,kBAAkB,UAAU,oBAAoB,CAAE;IAClD,SAAS,UAAU;IACnB,aAAa,MAAM,UAAU,gBAAgB;GAC9C,EAAC;AAEF,UAAO,KAAK,gCAAgC,IAAI,EAAE;EACnD;AAED,SAAO;CACR;CAED,MAAc,0BACZF,WACAG,YACAC,YACAC,aACAP,SACiB;EACjB,MAAM,mBAAmB,EAAE,WAAW;EACtC,MAAM,cAAc,KAAK,WAAW,gBAAgB;EAEpD,MAAM,eAAe,SAAS,QAAQ,YAAY,EAAE,WAAW;EAC/D,MAAM,aAAa,aAAa,QAAQ,SAAS,MAAM;EAEvD,MAAM,wBAAwB,SAC5B,QAAQ,YAAY,EACpB,QAAQ,cACT;EAED,MAAM,WAAW;WACV,WAAW,WAAW,WAAW;SACnC,QAAQ,uBAAuB,SAAS,sBAAsB;;qDAElB,WAAW;;;;AAK5D,QAAM,UAAU,aAAa,QAAQ;AACrC,SAAO;CACR;CAED,MAAc,8BACZE,WACAM,aACiB;EACjB,MAAM,sBAAsB;EAC5B,MAAM,kBAAkB,KAAK,WAAW,oBAAoB;EAG5D,MAAM,gCAAgB,IAAI;AAE1B,OAAK,MAAM,EAAE,MAAM,KAAK,IAAI,aAAa;GACvC,MAAM,eAAe,SAAS,QAAQ,gBAAgB,EAAE,KAAK,SAAS;GACtE,MAAM,aAAa,aAAa,QAAQ,SAAS,MAAM;AAEvD,QAAK,cAAc,IAAI,WAAW,CAChC,eAAc,IAAI,YAAY,CAAE,EAAC;AAEnC,iBAAc,IAAI,WAAW,CAAE,KAAK,IAAI;EACzC;EAGD,MAAM,UAAU,MAAM,KAAK,cAAc,SAAS,CAAC,CAChD,IACC,CAAC,CAAC,YAAY,QAAQ,MACnB,WAAW,QAAQ,KAAK,KAAK,CAAC,WAAW,WAAW,IACxD,CACA,KAAK,KAAK;EAEb,MAAM,iBAAiB,YAAY,IAAI,CAAC,EAAE,KAAK,KAAK,IAAI;EAExD,MAAM,WAAW;;;;;;;;;;;;;;;;;;;;;;;;;;;;EA4BnB,QAAQ;;;IAGN,eAAe,KAAK,QAAQ,CAAC;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AA+F7B,QAAM,UAAU,iBAAiB,QAAQ;AACzC,SAAO;CACR;AACF"}
|
|
@@ -0,0 +1,12 @@
|
|
|
1
|
+
require('../config-D1EpSGk6.cjs');
|
|
2
|
+
require('../Generator-CDoEXCDg.cjs');
|
|
3
|
+
require('../CronGenerator-1PflEYe2.cjs');
|
|
4
|
+
require('../EndpointGenerator-BbGrDiCP.cjs');
|
|
5
|
+
require('../FunctionGenerator-Clw64SwQ.cjs');
|
|
6
|
+
require('../SubscriberGenerator-BfMZCVNy.cjs');
|
|
7
|
+
require('../generators-CEKtVh81.cjs');
|
|
8
|
+
require('../manifests-D0saShvH.cjs');
|
|
9
|
+
require('../providerResolver-DgvzNfP4.cjs');
|
|
10
|
+
const require_build = require('../build-BBhlEjf5.cjs');
|
|
11
|
+
|
|
12
|
+
exports.buildCommand = require_build.buildCommand;
|
|
@@ -0,0 +1,12 @@
|
|
|
1
|
+
import "../config-U-mdW-7Y.mjs";
|
|
2
|
+
import "../Generator-UanJW0_V.mjs";
|
|
3
|
+
import "../CronGenerator-DXRfHQcV.mjs";
|
|
4
|
+
import "../EndpointGenerator-BmZ9BxbO.mjs";
|
|
5
|
+
import "../FunctionGenerator-DOEB_yPh.mjs";
|
|
6
|
+
import "../SubscriberGenerator-D2u00NI3.mjs";
|
|
7
|
+
import "../generators-CsLujGXs.mjs";
|
|
8
|
+
import "../manifests-BrJXpHrf.mjs";
|
|
9
|
+
import "../providerResolver-B_TjNF0_.mjs";
|
|
10
|
+
import { buildCommand } from "../build-kY-lG30Q.mjs";
|
|
11
|
+
|
|
12
|
+
export { buildCommand };
|