@powerhousedao/codegen 0.16.0 → 0.17.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (32) hide show
  1. package/dist/cli.js +20 -2
  2. package/dist/cli.js.map +1 -1
  3. package/dist/codegen/.hygen/templates/powerhouse/generate-processor-analytics/index.esm.t +12 -0
  4. package/dist/codegen/.hygen/templates/powerhouse/generate-processor-analytics/index.js +17 -0
  5. package/dist/codegen/.hygen/templates/powerhouse/generate-processor-analytics/index.js.map +1 -0
  6. package/dist/codegen/.hygen/templates/powerhouse/generate-processor-analytics/lib.esm.t +9 -0
  7. package/dist/codegen/.hygen/templates/powerhouse/generate-processor-analytics/lib.inject_export.esm.t +7 -0
  8. package/dist/codegen/.hygen/templates/powerhouse/generate-processor-analytics/options.esm.t +21 -0
  9. package/dist/codegen/.hygen/templates/powerhouse/generate-processor-analytics/service.esm.t +23 -0
  10. package/dist/codegen/.hygen/templates/powerhouse/generate-processor-analytics/test.esm.t +35 -0
  11. package/dist/codegen/.hygen/templates/powerhouse/generate-processor-analytics/transmit.esm.t +56 -0
  12. package/dist/codegen/.hygen/templates/powerhouse/generate-processor-operational/db-schema.esm.t +19 -0
  13. package/dist/codegen/.hygen/templates/powerhouse/generate-processor-operational/graphql-definitions.esm.t +17 -0
  14. package/dist/codegen/.hygen/templates/powerhouse/generate-processor-operational/index.esm.t +11 -0
  15. package/dist/codegen/.hygen/templates/powerhouse/generate-processor-operational/index.js +17 -0
  16. package/dist/codegen/.hygen/templates/powerhouse/generate-processor-operational/index.js.map +1 -0
  17. package/dist/codegen/.hygen/templates/powerhouse/generate-processor-operational/lib.esm.t +9 -0
  18. package/dist/codegen/.hygen/templates/powerhouse/generate-processor-operational/lib.inject_export.esm.t +8 -0
  19. package/dist/codegen/.hygen/templates/powerhouse/generate-processor-operational/options.esm.t +21 -0
  20. package/dist/codegen/.hygen/templates/powerhouse/generate-processor-operational/resolvers.esm.t +17 -0
  21. package/dist/codegen/.hygen/templates/powerhouse/generate-processor-operational/test.esm.t +46 -0
  22. package/dist/codegen/.hygen/templates/powerhouse/generate-processor-operational/transmit.esm.t +69 -0
  23. package/dist/codegen/hygen.js +21 -1
  24. package/dist/codegen/hygen.js.map +1 -1
  25. package/dist/codegen/index.js +21 -2
  26. package/dist/codegen/index.js.map +1 -1
  27. package/dist/hygen.d.ts +4 -1
  28. package/dist/index.d.ts +2 -1
  29. package/dist/utils/cli.js +2 -0
  30. package/dist/utils/cli.js.map +1 -1
  31. package/dist/utils.d.ts +1 -0
  32. package/package.json +7 -7
package/dist/cli.js CHANGED
@@ -1,15 +1,22 @@
1
1
  #! /usr/bin/env node
2
- import { generateEditor, generateFromFile, generate } from './codegen/index.js';
2
+ import { generateEditor, generateProcessor, generateFromFile, generate } from './codegen/index.js';
3
3
  import { getConfig, parseConfig, promptDirectories, parseArgs } from './utils/index.js';
4
4
 
5
5
  function parseCommand(argv) {
6
6
  const args = parseArgs(argv, {
7
7
  "--editor": String,
8
8
  "-e": "--editor",
9
- "--document-types": String
9
+ "--processor": String,
10
+ "--document-types": String,
11
+ "--processor-type": String
10
12
  });
11
13
  const editorName = args["--editor"];
14
+ const processorName = args["--processor"];
15
+ const processorType = args["--processor-type"];
12
16
  return {
17
+ processor: !!processorName,
18
+ processorName,
19
+ processorType,
13
20
  editor: !!editorName,
14
21
  editorName,
15
22
  documentTypes: args["--document-types"],
@@ -35,6 +42,17 @@ async function main() {
35
42
  command.documentTypes?.split(/[|,;]/g) ?? [],
36
43
  config
37
44
  );
45
+ } else if (command.processor) {
46
+ if (!command.processorName) {
47
+ throw new Error("processor name is required (--processor)");
48
+ }
49
+ const type = command.processorType === "analytics" ? "analytics" : "operational";
50
+ await generateProcessor(
51
+ command.processorName,
52
+ type,
53
+ command.documentTypes?.split(/[|,;]/g) ?? [],
54
+ config
55
+ );
38
56
  } else if (command.arg.length === 2) {
39
57
  await generateFromFile(command.arg[1], config);
40
58
  } else {
package/dist/cli.js.map CHANGED
@@ -1 +1 @@
1
- {"version":3,"sources":["../src/cli.ts"],"names":[],"mappings":";;;;AASA,SAAS,aAAa,IAAgB,EAAA;AACpC,EAAM,MAAA,IAAA,GAAO,UAAU,IAAM,EAAA;AAAA,IAC3B,UAAY,EAAA,MAAA;AAAA,IACZ,IAAM,EAAA,UAAA;AAAA,IACN,kBAAoB,EAAA;AAAA,GACrB,CAAA;AACD,EAAM,MAAA,UAAA,GAAa,KAAK,UAAU,CAAA;AAClC,EAAO,OAAA;AAAA,IACL,MAAA,EAAQ,CAAC,CAAC,UAAA;AAAA,IACV,UAAA;AAAA,IACA,aAAA,EAAe,KAAK,kBAAkB,CAAA;AAAA,IACtC,KAAK,IAAK,CAAA;AAAA,GACZ;AACF;AAEA,eAAe,IAAO,GAAA;AACpB,EAAA,MAAM,IAAO,GAAA,OAAA,CAAQ,IAAK,CAAA,KAAA,CAAM,CAAC,CAAA;AACjC,EAAA,MAAM,aAAa,SAAU,EAAA;AAC7B,EAAM,MAAA,UAAA,GAAa,YAAY,IAAI,CAAA;AACnC,EAAA,MAAM,MAAS,GAAA,EAAE,GAAG,UAAA,EAAY,GAAG,UAAW,EAAA;AAC9C,EAAA,IAAI,OAAO,WAAa,EAAA;AACtB,IAAM,MAAA,MAAA,GAAS,MAAM,iBAAA,CAAkB,MAAM,CAAA;AAC7C,IAAO,MAAA,CAAA,MAAA,CAAO,QAAQ,MAAM,CAAA;AAAA;AAG9B,EAAM,MAAA,OAAA,GAAU,aAAa,IAAI,CAAA;AACjC,EAAA,IAAI,QAAQ,MAAQ,EAAA;AAClB,IAAI,IAAA,CAAC,QAAQ,UAAY,EAAA;AACvB,MAAM,MAAA,IAAI,MAAM,0CAA0C,CAAA;AAAA;AAE5D,IAAM,MAAA,cAAA;AAAA,MACJ,OAAQ,CAAA,UAAA;AAAA,MACR,OAAQ,CAAA,aAAA,EAAe,KAAM,CAAA,QAAQ,KAAK,EAAC;AAAA,MAC3C;AAAA,KACF;AAAA,GACS,MAAA,IAAA,OAAA,CAAQ,GAAI,CAAA,MAAA,KAAW,CAAG,EAAA;AACnC,IAAA,MAAM,gBAAiB,CAAA,OAAA,CAAQ,GAAI,CAAA,CAAC,GAAG,MAAM,CAAA;AAAA,GACxC,MAAA;AACL,IAAA,MAAM,SAAS,MAAM,CAAA;AAAA;AAEzB;AAEA,IAAK,EAAA,CAAE,KAAM,CAAA,CAAC,CAAe,KAAA;AAC3B,EAAM,MAAA,CAAA;AACR,CAAC,CAAA","file":"cli.js","sourcesContent":["#! /usr/bin/env node\nimport { generate, generateEditor, generateFromFile } from \"./codegen/index\";\nimport {\n parseArgs,\n getConfig,\n promptDirectories,\n parseConfig,\n} from \"./utils/index\";\n\nfunction parseCommand(argv: string[]) {\n const args = parseArgs(argv, {\n \"--editor\": String,\n \"-e\": \"--editor\",\n \"--document-types\": String,\n });\n const editorName = args[\"--editor\"];\n return {\n editor: !!editorName,\n editorName,\n documentTypes: args[\"--document-types\"],\n arg: args._,\n };\n}\n\nasync function main() {\n const argv = process.argv.slice(2);\n const baseConfig = getConfig();\n const argsConfig = parseConfig(argv);\n const config = { ...baseConfig, ...argsConfig };\n if (config.interactive) {\n const result = await promptDirectories(config);\n Object.assign(config, result);\n }\n\n const command = parseCommand(argv);\n if (command.editor) {\n if (!command.editorName) {\n throw new Error(\"Editor name is required (--editor or -e)\");\n }\n await generateEditor(\n command.editorName,\n command.documentTypes?.split(/[|,;]/g) ?? [],\n config,\n );\n } else if (command.arg.length === 2) {\n await generateFromFile(command.arg[1], config);\n } else {\n await generate(config);\n }\n}\n\nmain().catch((e: unknown) => {\n throw e;\n});\n"]}
1
+ {"version":3,"sources":["../src/cli.ts"],"names":[],"mappings":";;;;AAcA,SAAS,aAAa,IAAgB,EAAA;AACpC,EAAM,MAAA,IAAA,GAAO,UAAU,IAAM,EAAA;AAAA,IAC3B,UAAY,EAAA,MAAA;AAAA,IACZ,IAAM,EAAA,UAAA;AAAA,IACN,aAAe,EAAA,MAAA;AAAA,IACf,kBAAoB,EAAA,MAAA;AAAA,IACpB,kBAAoB,EAAA;AAAA,GACrB,CAAA;AACD,EAAM,MAAA,UAAA,GAAa,KAAK,UAAU,CAAA;AAClC,EAAM,MAAA,aAAA,GAAgB,KAAK,aAAa,CAAA;AACxC,EAAM,MAAA,aAAA,GAAgB,KAAK,kBAAkB,CAAA;AAC7C,EAAO,OAAA;AAAA,IACL,SAAA,EAAW,CAAC,CAAC,aAAA;AAAA,IACb,aAAA;AAAA,IACA,aAAA;AAAA,IACA,MAAA,EAAQ,CAAC,CAAC,UAAA;AAAA,IACV,UAAA;AAAA,IACA,aAAA,EAAe,KAAK,kBAAkB,CAAA;AAAA,IACtC,KAAK,IAAK,CAAA;AAAA,GACZ;AACF;AAEA,eAAe,IAAO,GAAA;AACpB,EAAA,MAAM,IAAO,GAAA,OAAA,CAAQ,IAAK,CAAA,KAAA,CAAM,CAAC,CAAA;AACjC,EAAA,MAAM,aAAa,SAAU,EAAA;AAC7B,EAAM,MAAA,UAAA,GAAa,YAAY,IAAI,CAAA;AACnC,EAAA,MAAM,MAAS,GAAA,EAAE,GAAG,UAAA,EAAY,GAAG,UAAW,EAAA;AAC9C,EAAA,IAAI,OAAO,WAAa,EAAA;AACtB,IAAM,MAAA,MAAA,GAAS,MAAM,iBAAA,CAAkB,MAAM,CAAA;AAC7C,IAAO,MAAA,CAAA,MAAA,CAAO,QAAQ,MAAM,CAAA;AAAA;AAG9B,EAAM,MAAA,OAAA,GAAU,aAAa,IAAI,CAAA;AAEjC,EAAA,IAAI,QAAQ,MAAQ,EAAA;AAClB,IAAI,IAAA,CAAC,QAAQ,UAAY,EAAA;AACvB,MAAM,MAAA,IAAI,MAAM,0CAA0C,CAAA;AAAA;AAE5D,IAAM,MAAA,cAAA;AAAA,MACJ,OAAQ,CAAA,UAAA;AAAA,MACR,OAAQ,CAAA,aAAA,EAAe,KAAM,CAAA,QAAQ,KAAK,EAAC;AAAA,MAC3C;AAAA,KACF;AAAA,GACF,MAAA,IAAW,QAAQ,SAAW,EAAA;AAC5B,IAAI,IAAA,CAAC,QAAQ,aAAe,EAAA;AAC1B,MAAM,MAAA,IAAI,MAAM,0CAA0C,CAAA;AAAA;AAG5D,IAAA,MAAM,IACJ,GAAA,OAAA,CAAQ,aAAkB,KAAA,WAAA,GAAc,WAAc,GAAA,aAAA;AAExD,IAAM,MAAA,iBAAA;AAAA,MACJ,OAAQ,CAAA,aAAA;AAAA,MACR,IAAA;AAAA,MACA,OAAQ,CAAA,aAAA,EAAe,KAAM,CAAA,QAAQ,KAAK,EAAC;AAAA,MAC3C;AAAA,KACF;AAAA,GACS,MAAA,IAAA,OAAA,CAAQ,GAAI,CAAA,MAAA,KAAW,CAAG,EAAA;AACnC,IAAA,MAAM,gBAAiB,CAAA,OAAA,CAAQ,GAAI,CAAA,CAAC,GAAG,MAAM,CAAA;AAAA,GACxC,MAAA;AACL,IAAA,MAAM,SAAS,MAAM,CAAA;AAAA;AAEzB;AAEA,IAAK,EAAA,CAAE,KAAM,CAAA,CAAC,CAAe,KAAA;AAC3B,EAAM,MAAA,CAAA;AACR,CAAC,CAAA","file":"cli.js","sourcesContent":["#! /usr/bin/env node\nimport {\n generate,\n generateEditor,\n generateFromFile,\n generateProcessor,\n} from \"./codegen/index\";\nimport {\n parseArgs,\n getConfig,\n promptDirectories,\n parseConfig,\n} from \"./utils/index\";\n\nfunction parseCommand(argv: string[]) {\n const args = parseArgs(argv, {\n \"--editor\": String,\n \"-e\": \"--editor\",\n \"--processor\": String,\n \"--document-types\": String,\n \"--processor-type\": String,\n });\n const editorName = args[\"--editor\"];\n const processorName = args[\"--processor\"];\n const processorType = args[\"--processor-type\"];\n return {\n processor: !!processorName,\n processorName,\n processorType,\n editor: !!editorName,\n editorName,\n documentTypes: args[\"--document-types\"],\n arg: args._,\n };\n}\n\nasync function main() {\n const argv = process.argv.slice(2);\n const baseConfig = getConfig();\n const argsConfig = parseConfig(argv);\n const config = { ...baseConfig, ...argsConfig };\n if (config.interactive) {\n const result = await promptDirectories(config);\n Object.assign(config, result);\n }\n\n const command = parseCommand(argv);\n\n if (command.editor) {\n if (!command.editorName) {\n throw new Error(\"Editor name is required (--editor or -e)\");\n }\n await generateEditor(\n command.editorName,\n command.documentTypes?.split(/[|,;]/g) ?? [],\n config,\n );\n } else if (command.processor) {\n if (!command.processorName) {\n throw new Error(\"processor name is required (--processor)\");\n }\n\n const type =\n command.processorType === \"analytics\" ? \"analytics\" : \"operational\";\n\n await generateProcessor(\n command.processorName,\n type,\n command.documentTypes?.split(/[|,;]/g) ?? [],\n config,\n );\n } else if (command.arg.length === 2) {\n await generateFromFile(command.arg[1], config);\n } else {\n await generate(config);\n }\n}\n\nmain().catch((e: unknown) => {\n throw e;\n});\n"]}
@@ -0,0 +1,12 @@
1
+ ---
2
+ to: "<%= rootDir %>/<%= h.changeCase.param(name) %>/src/index.ts"
3
+ force: true
4
+ ---
5
+ import {
6
+ AnalyticsResolvers as resolvers,
7
+ typedefs as typeDefs,
8
+ } from "@powerhousedao/analytics-engine-graphql";
9
+ import { transmit } from "./transmit";
10
+ import { options } from "./options";
11
+
12
+ export { options, resolvers, transmit, typeDefs };
@@ -0,0 +1,17 @@
1
+ 'use strict';
2
+
3
+ var generate_processor_analytics_default = {
4
+ params: ({ args }) => {
5
+ return {
6
+ rootDir: args.rootDir,
7
+ documentModelsDir: args.documentModelsDir,
8
+ name: args.name,
9
+ documentTypes: args.documentTypes.split(",").filter((type) => type !== ""),
10
+ documentTypesMap: JSON.parse(args.documentTypesMap)
11
+ };
12
+ }
13
+ };
14
+
15
+ module.exports = generate_processor_analytics_default;
16
+ //# sourceMappingURL=index.js.map
17
+ //# sourceMappingURL=index.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../../../../../../src/codegen/.hygen/templates/powerhouse/generate-processor-analytics/index.ts"],"names":[],"mappings":";;AAQA,IAAO,oCAAQ,GAAA;AAAA,EACb,MAAQ,EAAA,CAAC,EAAE,IAAA,EAA2B,KAAA;AACpC,IAAO,OAAA;AAAA,MACL,SAAS,IAAK,CAAA,OAAA;AAAA,MACd,mBAAmB,IAAK,CAAA,iBAAA;AAAA,MACxB,MAAM,IAAK,CAAA,IAAA;AAAA,MACX,aAAA,EAAe,IAAK,CAAA,aAAA,CACjB,KAAM,CAAA,GAAG,EACT,MAAO,CAAA,CAAC,IAAS,KAAA,IAAA,KAAS,EAAE,CAAA;AAAA,MAC/B,gBAAkB,EAAA,IAAA,CAAK,KAAM,CAAA,IAAA,CAAK,gBAAgB;AAAA,KAIpD;AAAA;AAEJ","file":"index.js","sourcesContent":["export type Args = {\n name: string;\n rootDir: string;\n documentModelsDir: string;\n documentTypes: string;\n documentTypesMap: string;\n};\n\nexport default {\n params: ({ args }: { args: Args }) => {\n return {\n rootDir: args.rootDir,\n documentModelsDir: args.documentModelsDir,\n name: args.name,\n documentTypes: args.documentTypes\n .split(\",\")\n .filter((type) => type !== \"\"),\n documentTypesMap: JSON.parse(args.documentTypesMap) as Record<\n string,\n string\n >,\n };\n },\n};\n"]}
@@ -0,0 +1,9 @@
1
+ ---
2
+ to: "<%= rootDir %>/index.ts"
3
+ unless_exists: true
4
+ ---
5
+ /**
6
+ * This is a scaffold file meant for customization.
7
+ * Delete the file and run the code generator again to have it reset
8
+ */
9
+
@@ -0,0 +1,7 @@
1
+ ---
2
+ inject: true
3
+ append: true
4
+ to: "<%= rootDir %>/index.ts"
5
+ skip_if: "<%= h.changeCase.pascal(name) %>"
6
+ ---
7
+ export * as <%= h.changeCase.pascal(name) %>AnalyticsProcessor from "./<%= h.changeCase.param(name) %>/src";
@@ -0,0 +1,21 @@
1
+ ---
2
+ to: "<%= rootDir %>/<%= h.changeCase.param(name) %>/src/options.ts"
3
+ force: true
4
+ ---
5
+ import { Listener } from "document-drive";
6
+
7
+ /**
8
+ * The options for the processor.
9
+ */
10
+ export const options: Omit<Listener, "driveId"> = {
11
+ listenerId: "test",
12
+ filter: {
13
+ branch: ["main"],
14
+ documentId: ["*"],
15
+ documentType: ["frank/test"],
16
+ scope: ["global"],
17
+ },
18
+ block: false,
19
+ label: "test",
20
+ system: true,
21
+ };
@@ -0,0 +1,23 @@
1
+ ---
2
+ to: "<%= rootDir %>/<%= h.changeCase.param(name) %>/src/service.ts"
3
+ force: true
4
+ ---
5
+ import { IAnalyticsStore } from "@powerhousedao/analytics-engine-core";
6
+ import { PostgresAnalyticsStore } from "@powerhousedao/analytics-engine-pg";
7
+
8
+ let store: IAnalyticsStore | null = null;
9
+
10
+ export default function get(): IAnalyticsStore {
11
+ if (!store) {
12
+ const connectionString = process.env.PG_CONNECTION_STRING;
13
+ if (!connectionString) {
14
+ throw new Error("PG_CONNECTION_STRING not set");
15
+ }
16
+
17
+ store = new PostgresAnalyticsStore(connectionString, (i, q) =>
18
+ console.log(`[PG] ${i}: ${q}`)
19
+ );
20
+ }
21
+
22
+ return store;
23
+ }
@@ -0,0 +1,35 @@
1
+ ---
2
+ to: "<%= rootDir %>/<%= h.changeCase.param(name) %>/test/example.test.ts"
3
+ force: true
4
+ ---
5
+ import { transmit } from "../src/transmit";
6
+ import { actions } from "document-model-libs/document-drive";
7
+ import { describe, it } from "vitest";
8
+ import get from "../src/service";
9
+
10
+ describe("processor example test", () => {
11
+ it("should count amount of operations", async () => {
12
+ const exampleEntry = actions.addFile({
13
+ documentType: "powerhouse/budget-statement",
14
+ id: "1",
15
+ name: "test",
16
+ synchronizationUnits: [],
17
+ });
18
+
19
+ await transmit([
20
+ {
21
+ branch: "main",
22
+ documentId: "1",
23
+ driveId: "1",
24
+ operations: [
25
+ { ...exampleEntry, timestamp: "1", index: 1, skip: 0, hash: "1" },
26
+ ],
27
+ scope: "global",
28
+ state: {},
29
+ },
30
+ ]);
31
+
32
+ const analyticsStore = get();
33
+ // @todo: add test
34
+ });
35
+ });
@@ -0,0 +1,56 @@
1
+ ---
2
+ to: "<%= rootDir %>/<%= h.changeCase.param(name) %>/src/transmit.ts"
3
+ force: true
4
+ ---
5
+ import { InternalTransmitterUpdate } from "document-drive";
6
+ import get from "./service";
7
+ import { AnalyticsPath } from "@powerhousedao/analytics-engine-core";
8
+ import { DateTime } from "luxon";
9
+
10
+ export async function transmit(strands: InternalTransmitterUpdate[]) {
11
+ for (const strand of strands) {
12
+ handle(strand).catch((err) => {
13
+ console.error("Error handling strand", err);
14
+ });
15
+ }
16
+
17
+ return Promise.resolve();
18
+ }
19
+
20
+ async function handle(strand: InternalTransmitterUpdate) {
21
+ const analytics = get();
22
+
23
+ await Promise.all(
24
+ strand.operations.map((operation) => {
25
+ const source = AnalyticsPath.fromString(
26
+ `switchboard/default/${strand.driveId}`,
27
+ );
28
+
29
+ const start = DateTime.fromISO(operation.timestamp);
30
+ const dimensions: any = {
31
+ type: AnalyticsPath.fromString(`operation/type/${operation.type}`),
32
+ drive: AnalyticsPath.fromString(`drive/${strand.driveId}`),
33
+ };
34
+
35
+ if (strand.documentId) {
36
+ dimensions["document"] = AnalyticsPath.fromString(
37
+ `document/${strand.documentId}/${strand.scope}`,
38
+ );
39
+ }
40
+
41
+ // todo: +1, -1 for doc created and destroyed
42
+
43
+ // todo: count documents of each type (ADD_FILE, input.documentType)
44
+
45
+ return analytics.addSeriesValues([
46
+ {
47
+ source,
48
+ start,
49
+ value: 1,
50
+ metric: "Count",
51
+ dimensions,
52
+ },
53
+ ]);
54
+ }),
55
+ );
56
+ }
@@ -0,0 +1,19 @@
1
+ ---
2
+ to: "<%= rootDir %>/<%= h.changeCase.param(name) %>/src/db-schema.ts"
3
+ force: true
4
+ ---
5
+ import { integer, pgTable, primaryKey, uuid } from "drizzle-orm/pg-core";
6
+
7
+ /**
8
+ * The database schema for the processor.
9
+ */
10
+ export const exampleTable = pgTable(
11
+ "example_table",
12
+ {
13
+ id: uuid("id").notNull(),
14
+ value: integer("value").notNull(),
15
+ },
16
+ (table) => ({
17
+ pk: primaryKey({ columns: [table.id] }),
18
+ })
19
+ );
@@ -0,0 +1,17 @@
1
+ ---
2
+ to: "<%= rootDir %>/<%= h.changeCase.param(name) %>/src/graphql-definitions.ts"
3
+ force: true
4
+ ---
5
+ /**
6
+ * The GraphQL type definitions for the processor.
7
+ */
8
+ export const typeDefs = `
9
+ type Query {
10
+ example: [Example!]!
11
+ }
12
+
13
+ type Example {
14
+ id: String!
15
+ value: String!
16
+ }
17
+ `;
@@ -0,0 +1,11 @@
1
+ ---
2
+ to: "<%= rootDir %>/<%= h.changeCase.param(name) %>/src/index.ts"
3
+ force: true
4
+ ---
5
+ import { transmit } from "./transmit";
6
+ import { resolvers } from "./resolvers";
7
+ import { typeDefs } from "./graphql-definitions";
8
+ import { options } from "./options";
9
+ import * as dbSchema from "./db-schema";
10
+
11
+ export { options, resolvers, transmit, typeDefs, dbSchema };
@@ -0,0 +1,17 @@
1
+ 'use strict';
2
+
3
+ var generate_processor_operational_default = {
4
+ params: ({ args }) => {
5
+ return {
6
+ rootDir: args.rootDir,
7
+ documentModelsDir: args.documentModelsDir,
8
+ name: args.name,
9
+ documentTypes: args.documentTypes.split(",").filter((type) => type !== ""),
10
+ documentTypesMap: JSON.parse(args.documentTypesMap)
11
+ };
12
+ }
13
+ };
14
+
15
+ module.exports = generate_processor_operational_default;
16
+ //# sourceMappingURL=index.js.map
17
+ //# sourceMappingURL=index.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../../../../../../src/codegen/.hygen/templates/powerhouse/generate-processor-operational/index.ts"],"names":[],"mappings":";;AAQA,IAAO,sCAAQ,GAAA;AAAA,EACb,MAAQ,EAAA,CAAC,EAAE,IAAA,EAA2B,KAAA;AACpC,IAAO,OAAA;AAAA,MACL,SAAS,IAAK,CAAA,OAAA;AAAA,MACd,mBAAmB,IAAK,CAAA,iBAAA;AAAA,MACxB,MAAM,IAAK,CAAA,IAAA;AAAA,MACX,aAAA,EAAe,IAAK,CAAA,aAAA,CACjB,KAAM,CAAA,GAAG,EACT,MAAO,CAAA,CAAC,IAAS,KAAA,IAAA,KAAS,EAAE,CAAA;AAAA,MAC/B,gBAAkB,EAAA,IAAA,CAAK,KAAM,CAAA,IAAA,CAAK,gBAAgB;AAAA,KAIpD;AAAA;AAEJ","file":"index.js","sourcesContent":["export type Args = {\n name: string;\n rootDir: string;\n documentModelsDir: string;\n documentTypes: string;\n documentTypesMap: string;\n};\n\nexport default {\n params: ({ args }: { args: Args }) => {\n return {\n rootDir: args.rootDir,\n documentModelsDir: args.documentModelsDir,\n name: args.name,\n documentTypes: args.documentTypes\n .split(\",\")\n .filter((type) => type !== \"\"),\n documentTypesMap: JSON.parse(args.documentTypesMap) as Record<\n string,\n string\n >,\n };\n },\n};\n"]}
@@ -0,0 +1,9 @@
1
+ ---
2
+ to: "<%= rootDir %>/index.ts"
3
+ unless_exists: true
4
+ ---
5
+ /**
6
+ * This is a scaffold file meant for customization.
7
+ * Delete the file and run the code generator again to have it reset
8
+ */
9
+
@@ -0,0 +1,8 @@
1
+ ---
2
+ inject: true
3
+ append: true
4
+ to: "<%= rootDir %>/index.ts"
5
+ skip_if: "<%= h.changeCase.pascal(name) %>"
6
+ force: true
7
+ ---
8
+ export * as <%= h.changeCase.pascal(name) %>OperationalProcessor from "./<%= h.changeCase.param(name) %>/src";
@@ -0,0 +1,21 @@
1
+ ---
2
+ to: "<%= rootDir %>/<%= h.changeCase.param(name) %>/src/options.ts"
3
+ force: true
4
+ ---
5
+ import { Listener } from "document-drive";
6
+
7
+ /**
8
+ * The options for the processor.
9
+ */
10
+ export const options: Omit<Listener, "driveId"> = {
11
+ listenerId: "test",
12
+ filter: {
13
+ branch: ["main"],
14
+ documentId: ["*"],
15
+ documentType: ["frank/test"],
16
+ scope: ["global"],
17
+ },
18
+ block: false,
19
+ label: "test",
20
+ system: true,
21
+ };
@@ -0,0 +1,17 @@
1
+ ---
2
+ to: "<%= rootDir %>/<%= h.changeCase.param(name) %>/src/resolvers.ts"
3
+ force: true
4
+ ---
5
+ import { exampleTable } from "./db-schema";
6
+
7
+ /**
8
+ * The resolvers for the processor.
9
+ */
10
+ export const resolvers = {
11
+ Query: {
12
+ example: async (root, args, ctx, info) => {
13
+ const results = await ctx.db.select().from(exampleTable);
14
+ return results;
15
+ },
16
+ },
17
+ };
@@ -0,0 +1,46 @@
1
+ ---
2
+ to: "<%= rootDir %>/<%= h.changeCase.param(name) %>/test/example.test.ts"
3
+ force: true
4
+ ---
5
+ import { actions } from "document-model-libs/document-drive";
6
+ import { drizzle } from "drizzle-orm/connect";
7
+ import { PgDatabase } from "drizzle-orm/pg-core";
8
+ import { beforeAll, describe, expect, it } from "vitest";
9
+ import { exampleTable } from "../src/db-schema";
10
+ import { transmit } from "../src/transmit";
11
+
12
+ describe("processor example test", () => {
13
+ let db: PgDatabase<any, any, any>;
14
+ beforeAll(async () => {
15
+ db = await drizzle("pglite", "./dev.db");
16
+ await db.delete(exampleTable).execute();
17
+ });
18
+
19
+ it("should count amount of operations", async () => {
20
+ const exampleEntry = actions.addFile({
21
+ documentType: "example",
22
+ id: "1",
23
+ name: "example",
24
+ synchronizationUnits: [],
25
+ });
26
+
27
+ await transmit(
28
+ [
29
+ {
30
+ branch: "main",
31
+ documentId: "1",
32
+ driveId: "1",
33
+ operations: [
34
+ { ...exampleEntry, timestamp: "1", index: 1, skip: 0, hash: "1" },
35
+ ],
36
+ scope: "global",
37
+ state: {},
38
+ },
39
+ ],
40
+ db
41
+ );
42
+
43
+ const [result] = await db.select().from(exampleTable);
44
+ expect(result.value).toBe(1);
45
+ });
46
+ });
@@ -0,0 +1,69 @@
1
+ ---
2
+ to: "<%= rootDir %>/<%= h.changeCase.param(name) %>/src/transmit.ts"
3
+ force: true
4
+ ---
5
+ import { InternalTransmitterUpdate, Listener } from "document-drive";
6
+ import { eq } from "drizzle-orm";
7
+ import { PgDatabase } from "drizzle-orm/pg-core";
8
+ import { exampleTable } from "./db-schema";
9
+
10
+ /**
11
+ * Handles an array of strands of operations.
12
+ * Gets called if the options filter matches the strand.
13
+ * @param strands - The strands of operations to handle.
14
+ * @param db - The database to use.
15
+ */
16
+ export async function transmit(
17
+ strands: InternalTransmitterUpdate[],
18
+ db: PgDatabase<any, any, any>
19
+ ) {
20
+ for (const strand of strands) {
21
+ await handleStrand(strand, db);
22
+ }
23
+ }
24
+
25
+ /**
26
+ * Handles a single strand of operations.
27
+ * @param strand - The strand of operations to handle.
28
+ * @param db - The database to use.
29
+ */
30
+ async function handleStrand(
31
+ strand: InternalTransmitterUpdate,
32
+ db: PgDatabase<any, any, any>
33
+ ) {
34
+ // reset db if first operation
35
+ if (isFirstOperation(strand)) {
36
+ await resetDb(db);
37
+ }
38
+
39
+ for (const op of strand.operations) {
40
+ // get first entry
41
+ const [entry] = await db
42
+ .select()
43
+ .from(exampleTable)
44
+ .where(eq(exampleTable.id, strand.documentId));
45
+
46
+ if (!entry) {
47
+ // insert new entry if not exists
48
+ await db.insert(exampleTable).values({ id: strand.documentId, value: 1 });
49
+ } else {
50
+ // update existing entry
51
+ await db
52
+ .update(exampleTable)
53
+ .set({ value: entry.value + 1 })
54
+ .where(eq(exampleTable.id, strand.documentId));
55
+ }
56
+ }
57
+ }
58
+
59
+ function isFirstOperation(strand: InternalTransmitterUpdate) {
60
+ const [firstOperation] = strand.operations;
61
+ if (firstOperation.index === 0) {
62
+ return true;
63
+ }
64
+ return false;
65
+ }
66
+
67
+ async function resetDb(db: PgDatabase<any, any, any>) {
68
+ await db.delete(exampleTable);
69
+ }
@@ -104,7 +104,27 @@ async function generateEditor(name, documentTypes, documentTypesMap, dir, docume
104
104
  { skipFormat }
105
105
  );
106
106
  }
107
+ async function generateProcessor(name, documentTypes, documentTypesMap, dir, documentModelsDir, type = "analytics", { skipFormat = false } = {}) {
108
+ const processorType = type === "analytics" ? "analytics" : "operational";
109
+ await run(
110
+ [
111
+ "powerhouse",
112
+ `generate-processor-${processorType}`,
113
+ "--name",
114
+ name,
115
+ "--root-dir",
116
+ dir,
117
+ "--document-types",
118
+ documentTypes.join(","),
119
+ "--document-types-map",
120
+ JSON.stringify(documentTypesMap),
121
+ "--document-models-dir",
122
+ documentModelsDir
123
+ ],
124
+ { skipFormat }
125
+ );
126
+ }
107
127
 
108
- export { generateAll, generateDocumentModel, generateEditor };
128
+ export { generateAll, generateDocumentModel, generateEditor, generateProcessor };
109
129
  //# sourceMappingURL=hygen.js.map
110
130
  //# sourceMappingURL=hygen.js.map
@@ -1 +1 @@
1
- {"version":3,"sources":["../../src/codegen/hygen.ts"],"names":["require"],"mappings":";;;;;;;AAQA,MAAMA,QAAAA,GAAU,aAAc,CAAA,MAAA,CAAA,IAAA,CAAY,GAAG,CAAA;AAE7C,MAAM,SAAA,GACJ,YAAY,OAAW,IAAA,IAAA,CAAK,QAAQ,aAAc,CAAA,MAAA,CAAA,IAAA,CAAY,GAAG,CAAC,CAAA;AACpE,MAAM,SAAS,IAAI,MAAA,CAAO,QAAQ,GAAI,CAAA,IAAA,CAAK,OAAO,CAAC,CAAA;AACnD,MAAM,gBAAmB,GAAA,IAAA,CAAK,IAAK,CAAA,SAAA,EAAW,UAAU,WAAW,CAAA;AAGnE,eAAe,GAAA,CAAI,MAAgB,EAAE,KAAA,GAAQ,OAAO,UAAa,GAAA,KAAA,EAAU,GAAA,EAAI,EAAA;AAC7E,EAAM,MAAA,MAAA,GAAS,MAAM,MAAA,CAAO,IAAM,EAAA;AAAA,IAChC,SAAW,EAAA,gBAAA;AAAA,IACX,GAAA,EAAK,QAAQ,GAAI,EAAA;AAAA,IACjB,MAAA;AAAA,IACA,gBAAgB,MAAM;AAEpB,MAAA,OAAOA,SAAQ,UAAU,CAAA;AAAA,KAC3B;AAAA,IACA,IAAA,EAAM,CAAC,MAAA,EAAQ,IAAS,KAAA;AACtB,MAAM,MAAA,IAAA,GAAO,QAAQ,IAAK,CAAA,MAAA,GAAS,IAAI,EAAE,KAAA,EAAO,IAAK,EAAA,GAAI,EAAC;AAE1D,MAAA,OAAOA,QAAQ,CAAA,OAAO,CAAE,CAAA,KAAA,CAAM,QAAQ,IAAI,CAAA;AAAA,KAC5C;AAAA,IACA,KAAO,EAAA,CAAC,CAAC,OAAA,CAAQ,GAAI,CAAA;AAAA,GACtB,CAAA;AACD,EAAA,IAAI,CAAC,UAAY,EAAA;AACf,IAAM,MAAA,KAAA,GAAQ,MAAM,OAAO,OAAO,CAAA;AAClC,IAAA,MAAM,UAAU,MAAO,CAAA,OAAA;AACvB,IAAA,OAAA,CACG,MAAO,CAAA,CAAC,MAAW,KAAA,CAAC,SAAS,QAAQ,CAAA,CAAE,QAAS,CAAA,MAAA,CAAO,MAAM,CAAC,CAC9D,CAAA,OAAA,CAAQ,CAAC,MAAW,KAAA;AACnB,MAAM,KAAA,CAAA,CAAA,CAAA,+BAAA,EAAmC,OAAO,OAAQ,CAAA,OAAA;AAAA,QACtD,GAAA;AAAA,QACA,QAAQ,GAAI;AAAA,OACb,CAAA,CAAA,CAAG,KAAM,CAAA,CAAC,GAAiB,KAAA;AAC1B,QAAA,OAAA,CAAQ,IAAI,GAAG,CAAA;AAAA,OAChB,CAAA;AAAA,KACF,CAAA;AAAA;AAGL,EAAO,OAAA,MAAA;AACT;AAEA,eAAsB,WAAA,CACpB,KACA,EAAE,KAAA,GAAQ,OAAO,UAAa,GAAA,KAAA,EAAU,GAAA,EACxC,EAAA;AACA,EAAA,MAAM,QAAQ,EAAG,CAAA,WAAA,CAAY,KAAK,EAAE,aAAA,EAAe,MAAM,CAAA;AACzD,EAAW,KAAA,MAAA,SAAA,IAAa,MAAM,MAAO,CAAA,CAAC,MAAM,CAAE,CAAA,WAAA,EAAa,CAAG,EAAA;AAC5D,IAAA,MAAM,oBAAoB,IAAK,CAAA,IAAA;AAAA,MAC7B,GAAA;AAAA,MACA,SAAU,CAAA,IAAA;AAAA,MACV,CAAA,EAAG,UAAU,IAAI,CAAA,KAAA;AAAA,KACnB;AACA,IAAA,IAAI,CAAC,EAAA,CAAG,UAAW,CAAA,iBAAiB,CAAG,EAAA;AACrC,MAAA;AAAA;AAGF,IAAI,IAAA;AACF,MAAM,MAAA,aAAA,GAAgB,MAAM,iBAAA,CAAkB,iBAAiB,CAAA;AAC/D,MAAA,MAAM,sBAAsB,aAAe,EAAA,GAAA,EAAK,EAAE,KAAA,EAAO,YAAY,CAAA;AAAA,aAC9D,KAAO,EAAA;AACd,MAAQ,OAAA,CAAA,KAAA,CAAM,SAAU,CAAA,IAAA,EAAM,KAAK,CAAA;AAAA;AACrC;AAEJ;AAEA,eAAsB,qBAAA,CACpB,aACA,EAAA,GAAA,EACA,EAAE,KAAA,GAAQ,OAAO,UAAa,GAAA,KAAA,EAAU,GAAA,EACxC,EAAA;AAEA,EAAM,MAAA,GAAA;AAAA,IACJ;AAAA,MACE,YAAA;AAAA,MACA,yBAAA;AAAA,MACA,kBAAA;AAAA,MACA,IAAA,CAAK,UAAU,aAAa,CAAA;AAAA,MAC5B,YAAA;AAAA,MACA;AAAA,KACF;AAAA,IACA,EAAE,OAAO,UAAW;AAAA,GACtB;AAGA,EAAA,MAAM,aACJ,aAAc,CAAA,cAAA,CAAe,aAAc,CAAA,cAAA,CAAe,SAAS,CAAC,CAAA;AACtE,EAAW,KAAA,MAAA,MAAA,IAAU,WAAW,OAAS,EAAA;AACvC,IAAM,MAAA,GAAA;AAAA,MACJ;AAAA,QACE,YAAA;AAAA,QACA,gCAAA;AAAA,QACA,kBAAA;AAAA,QACA,IAAA,CAAK,UAAU,aAAa,CAAA;AAAA,QAC5B,YAAA;AAAA,QACA,GAAA;AAAA,QACA,UAAA;AAAA,QACA,MAAO,CAAA;AAAA,OACT;AAAA,MACA,EAAE,OAAO,UAAW;AAAA,KACtB;AAAA;AAEJ;AAEA,eAAsB,cAAA,CACpB,IACA,EAAA,aAAA,EACA,gBACA,EAAA,GAAA,EACA,iBACA,EAAA,EAAE,UAAa,GAAA,KAAA,EAAU,GAAA,EACzB,EAAA;AAEA,EAAM,MAAA,GAAA;AAAA,IACJ;AAAA,MACE,YAAA;AAAA,MACA,iBAAA;AAAA,MACA,QAAA;AAAA,MACA,IAAA;AAAA,MACA,YAAA;AAAA,MACA,GAAA;AAAA,MACA,kBAAA;AAAA,MACA,aAAA,CAAc,KAAK,GAAG,CAAA;AAAA,MACtB,sBAAA;AAAA,MACA,IAAA,CAAK,UAAU,gBAAgB,CAAA;AAAA,MAC/B,uBAAA;AAAA,MACA;AAAA,KACF;AAAA,IACA,EAAE,UAAW;AAAA,GACf;AACF","file":"hygen.js","sourcesContent":["import fs from \"node:fs\";\nimport { createRequire } from \"node:module\";\nimport path from \"node:path\";\nimport { fileURLToPath } from \"node:url\";\nimport { Logger, runner } from \"hygen\";\nimport { DocumentModel } from \"document-model\";\nimport { loadDocumentModel } from \"./utils\";\n\nconst require = createRequire(import.meta.url);\n\nconst __dirname =\n import.meta.dirname || path.dirname(fileURLToPath(import.meta.url));\nconst logger = new Logger(console.log.bind(console));\nconst defaultTemplates = path.join(__dirname, \".hygen\", \"templates\");\n\n// eslint-disable-next-line @typescript-eslint/no-unused-vars\nasync function run(args: string[], { watch = false, skipFormat = false } = {}) {\n const result = await runner(args, {\n templates: defaultTemplates,\n cwd: process.cwd(),\n logger,\n createPrompter: () => {\n // eslint-disable-next-line @typescript-eslint/no-unsafe-return\n return require(\"enquirer\");\n },\n exec: (action, body) => {\n const opts = body && body.length > 0 ? { input: body } : {};\n // eslint-disable-next-line @typescript-eslint/no-unsafe-return, @typescript-eslint/no-unsafe-call, @typescript-eslint/no-unsafe-member-access\n return require(\"execa\").shell(action, opts);\n },\n debug: !!process.env.DEBUG,\n });\n if (!skipFormat) {\n const execa = await import(\"execa\");\n const actions = result.actions as { status: string; subject: string }[];\n actions\n .filter((action) => [\"added\", \"inject\"].includes(action.status))\n .forEach((action) => {\n execa.$`prettier --ignore-path --write ${action.subject.replace(\n \".\",\n process.cwd(),\n )}`.catch((err: unknown) => {\n console.log(err);\n });\n });\n }\n\n return result;\n}\n\nexport async function generateAll(\n dir: string,\n { watch = false, skipFormat = false } = {},\n) {\n const files = fs.readdirSync(dir, { withFileTypes: true });\n for (const directory of files.filter((f) => f.isDirectory())) {\n const documentModelPath = path.join(\n dir,\n directory.name,\n `${directory.name}.json`,\n );\n if (!fs.existsSync(documentModelPath)) {\n continue;\n }\n\n try {\n const documentModel = await loadDocumentModel(documentModelPath);\n await generateDocumentModel(documentModel, dir, { watch, skipFormat });\n } catch (error) {\n console.error(directory.name, error);\n }\n }\n}\n\nexport async function generateDocumentModel(\n documentModel: DocumentModel.DocumentModelState,\n dir: string,\n { watch = false, skipFormat = false } = {},\n) {\n // Generate the singular files for the document model logic\n await run(\n [\n \"powerhouse\",\n \"generate-document-model\",\n \"--document-model\",\n JSON.stringify(documentModel),\n \"--root-dir\",\n dir,\n ],\n { watch, skipFormat },\n );\n\n // Generate the module-specific files for the document model logic\n const latestSpec =\n documentModel.specifications[documentModel.specifications.length - 1];\n for (const module of latestSpec.modules) {\n await run(\n [\n \"powerhouse\",\n \"generate-document-model-module\",\n \"--document-model\",\n JSON.stringify(documentModel),\n \"--root-dir\",\n dir,\n \"--module\",\n module.name,\n ],\n { watch, skipFormat },\n );\n }\n}\n\nexport async function generateEditor(\n name: string,\n documentTypes: string[],\n documentTypesMap: Record<string, string>,\n dir: string,\n documentModelsDir: string,\n { skipFormat = false } = {},\n) {\n // Generate the singular files for the document model logic\n await run(\n [\n \"powerhouse\",\n \"generate-editor\",\n \"--name\",\n name,\n \"--root-dir\",\n dir,\n \"--document-types\",\n documentTypes.join(\",\"),\n \"--document-types-map\",\n JSON.stringify(documentTypesMap),\n \"--document-models-dir\",\n documentModelsDir,\n ],\n { skipFormat },\n );\n}\n"]}
1
+ {"version":3,"sources":["../../src/codegen/hygen.ts"],"names":["require"],"mappings":";;;;;;;AAQA,MAAMA,QAAAA,GAAU,aAAc,CAAA,MAAA,CAAA,IAAA,CAAY,GAAG,CAAA;AAE7C,MAAM,SAAA,GACJ,YAAY,OAAW,IAAA,IAAA,CAAK,QAAQ,aAAc,CAAA,MAAA,CAAA,IAAA,CAAY,GAAG,CAAC,CAAA;AACpE,MAAM,SAAS,IAAI,MAAA,CAAO,QAAQ,GAAI,CAAA,IAAA,CAAK,OAAO,CAAC,CAAA;AACnD,MAAM,gBAAmB,GAAA,IAAA,CAAK,IAAK,CAAA,SAAA,EAAW,UAAU,WAAW,CAAA;AAGnE,eAAe,GAAA,CAAI,MAAgB,EAAE,KAAA,GAAQ,OAAO,UAAa,GAAA,KAAA,EAAU,GAAA,EAAI,EAAA;AAC7E,EAAM,MAAA,MAAA,GAAS,MAAM,MAAA,CAAO,IAAM,EAAA;AAAA,IAChC,SAAW,EAAA,gBAAA;AAAA,IACX,GAAA,EAAK,QAAQ,GAAI,EAAA;AAAA,IACjB,MAAA;AAAA,IACA,gBAAgB,MAAM;AAEpB,MAAA,OAAOA,SAAQ,UAAU,CAAA;AAAA,KAC3B;AAAA,IACA,IAAA,EAAM,CAAC,MAAA,EAAQ,IAAS,KAAA;AACtB,MAAM,MAAA,IAAA,GAAO,QAAQ,IAAK,CAAA,MAAA,GAAS,IAAI,EAAE,KAAA,EAAO,IAAK,EAAA,GAAI,EAAC;AAE1D,MAAA,OAAOA,QAAQ,CAAA,OAAO,CAAE,CAAA,KAAA,CAAM,QAAQ,IAAI,CAAA;AAAA,KAC5C;AAAA,IACA,KAAO,EAAA,CAAC,CAAC,OAAA,CAAQ,GAAI,CAAA;AAAA,GACtB,CAAA;AACD,EAAA,IAAI,CAAC,UAAY,EAAA;AACf,IAAM,MAAA,KAAA,GAAQ,MAAM,OAAO,OAAO,CAAA;AAClC,IAAA,MAAM,UAAU,MAAO,CAAA,OAAA;AACvB,IAAA,OAAA,CACG,MAAO,CAAA,CAAC,MAAW,KAAA,CAAC,SAAS,QAAQ,CAAA,CAAE,QAAS,CAAA,MAAA,CAAO,MAAM,CAAC,CAC9D,CAAA,OAAA,CAAQ,CAAC,MAAW,KAAA;AACnB,MAAM,KAAA,CAAA,CAAA,CAAA,+BAAA,EAAmC,OAAO,OAAQ,CAAA,OAAA;AAAA,QACtD,GAAA;AAAA,QACA,QAAQ,GAAI;AAAA,OACb,CAAA,CAAA,CAAG,KAAM,CAAA,CAAC,GAAiB,KAAA;AAC1B,QAAA,OAAA,CAAQ,IAAI,GAAG,CAAA;AAAA,OAChB,CAAA;AAAA,KACF,CAAA;AAAA;AAGL,EAAO,OAAA,MAAA;AACT;AAEA,eAAsB,WAAA,CACpB,KACA,EAAE,KAAA,GAAQ,OAAO,UAAa,GAAA,KAAA,EAAU,GAAA,EACxC,EAAA;AACA,EAAA,MAAM,QAAQ,EAAG,CAAA,WAAA,CAAY,KAAK,EAAE,aAAA,EAAe,MAAM,CAAA;AACzD,EAAW,KAAA,MAAA,SAAA,IAAa,MAAM,MAAO,CAAA,CAAC,MAAM,CAAE,CAAA,WAAA,EAAa,CAAG,EAAA;AAC5D,IAAA,MAAM,oBAAoB,IAAK,CAAA,IAAA;AAAA,MAC7B,GAAA;AAAA,MACA,SAAU,CAAA,IAAA;AAAA,MACV,CAAA,EAAG,UAAU,IAAI,CAAA,KAAA;AAAA,KACnB;AACA,IAAA,IAAI,CAAC,EAAA,CAAG,UAAW,CAAA,iBAAiB,CAAG,EAAA;AACrC,MAAA;AAAA;AAGF,IAAI,IAAA;AACF,MAAM,MAAA,aAAA,GAAgB,MAAM,iBAAA,CAAkB,iBAAiB,CAAA;AAC/D,MAAA,MAAM,sBAAsB,aAAe,EAAA,GAAA,EAAK,EAAE,KAAA,EAAO,YAAY,CAAA;AAAA,aAC9D,KAAO,EAAA;AACd,MAAQ,OAAA,CAAA,KAAA,CAAM,SAAU,CAAA,IAAA,EAAM,KAAK,CAAA;AAAA;AACrC;AAEJ;AAEA,eAAsB,qBAAA,CACpB,aACA,EAAA,GAAA,EACA,EAAE,KAAA,GAAQ,OAAO,UAAa,GAAA,KAAA,EAAU,GAAA,EACxC,EAAA;AAEA,EAAM,MAAA,GAAA;AAAA,IACJ;AAAA,MACE,YAAA;AAAA,MACA,yBAAA;AAAA,MACA,kBAAA;AAAA,MACA,IAAA,CAAK,UAAU,aAAa,CAAA;AAAA,MAC5B,YAAA;AAAA,MACA;AAAA,KACF;AAAA,IACA,EAAE,OAAO,UAAW;AAAA,GACtB;AAGA,EAAA,MAAM,aACJ,aAAc,CAAA,cAAA,CAAe,aAAc,CAAA,cAAA,CAAe,SAAS,CAAC,CAAA;AACtE,EAAW,KAAA,MAAA,MAAA,IAAU,WAAW,OAAS,EAAA;AACvC,IAAM,MAAA,GAAA;AAAA,MACJ;AAAA,QACE,YAAA;AAAA,QACA,gCAAA;AAAA,QACA,kBAAA;AAAA,QACA,IAAA,CAAK,UAAU,aAAa,CAAA;AAAA,QAC5B,YAAA;AAAA,QACA,GAAA;AAAA,QACA,UAAA;AAAA,QACA,MAAO,CAAA;AAAA,OACT;AAAA,MACA,EAAE,OAAO,UAAW;AAAA,KACtB;AAAA;AAEJ;AAEA,eAAsB,cAAA,CACpB,IACA,EAAA,aAAA,EACA,gBACA,EAAA,GAAA,EACA,iBACA,EAAA,EAAE,UAAa,GAAA,KAAA,EAAU,GAAA,EACzB,EAAA;AAEA,EAAM,MAAA,GAAA;AAAA,IACJ;AAAA,MACE,YAAA;AAAA,MACA,iBAAA;AAAA,MACA,QAAA;AAAA,MACA,IAAA;AAAA,MACA,YAAA;AAAA,MACA,GAAA;AAAA,MACA,kBAAA;AAAA,MACA,aAAA,CAAc,KAAK,GAAG,CAAA;AAAA,MACtB,sBAAA;AAAA,MACA,IAAA,CAAK,UAAU,gBAAgB,CAAA;AAAA,MAC/B,uBAAA;AAAA,MACA;AAAA,KACF;AAAA,IACA,EAAE,UAAW;AAAA,GACf;AACF;AAEA,eAAsB,iBACpB,CAAA,IAAA,EACA,aACA,EAAA,gBAAA,EACA,GACA,EAAA,iBAAA,EACA,IAAO,GAAA,WAAA,EACP,EAAE,UAAA,GAAa,KAAM,EAAA,GAAI,EACzB,EAAA;AAEA,EAAM,MAAA,aAAA,GAAgB,IAAS,KAAA,WAAA,GAAc,WAAc,GAAA,aAAA;AAE3D,EAAM,MAAA,GAAA;AAAA,IACJ;AAAA,MACE,YAAA;AAAA,MACA,sBAAsB,aAAa,CAAA,CAAA;AAAA,MACnC,QAAA;AAAA,MACA,IAAA;AAAA,MACA,YAAA;AAAA,MACA,GAAA;AAAA,MACA,kBAAA;AAAA,MACA,aAAA,CAAc,KAAK,GAAG,CAAA;AAAA,MACtB,sBAAA;AAAA,MACA,IAAA,CAAK,UAAU,gBAAgB,CAAA;AAAA,MAC/B,uBAAA;AAAA,MACA;AAAA,KACF;AAAA,IACA,EAAE,UAAW;AAAA,GACf;AACF","file":"hygen.js","sourcesContent":["import fs from \"node:fs\";\nimport { createRequire } from \"node:module\";\nimport path from \"node:path\";\nimport { fileURLToPath } from \"node:url\";\nimport { Logger, runner } from \"hygen\";\nimport { DocumentModel } from \"document-model\";\nimport { loadDocumentModel } from \"./utils\";\n\nconst require = createRequire(import.meta.url);\n\nconst __dirname =\n import.meta.dirname || path.dirname(fileURLToPath(import.meta.url));\nconst logger = new Logger(console.log.bind(console));\nconst defaultTemplates = path.join(__dirname, \".hygen\", \"templates\");\n\n// eslint-disable-next-line @typescript-eslint/no-unused-vars\nasync function run(args: string[], { watch = false, skipFormat = false } = {}) {\n const result = await runner(args, {\n templates: defaultTemplates,\n cwd: process.cwd(),\n logger,\n createPrompter: () => {\n // eslint-disable-next-line @typescript-eslint/no-unsafe-return\n return require(\"enquirer\");\n },\n exec: (action, body) => {\n const opts = body && body.length > 0 ? { input: body } : {};\n // eslint-disable-next-line @typescript-eslint/no-unsafe-return, @typescript-eslint/no-unsafe-call, @typescript-eslint/no-unsafe-member-access\n return require(\"execa\").shell(action, opts);\n },\n debug: !!process.env.DEBUG,\n });\n if (!skipFormat) {\n const execa = await import(\"execa\");\n const actions = result.actions as { status: string; subject: string }[];\n actions\n .filter((action) => [\"added\", \"inject\"].includes(action.status))\n .forEach((action) => {\n execa.$`prettier --ignore-path --write ${action.subject.replace(\n \".\",\n process.cwd(),\n )}`.catch((err: unknown) => {\n console.log(err);\n });\n });\n }\n\n return result;\n}\n\nexport async function generateAll(\n dir: string,\n { watch = false, skipFormat = false } = {},\n) {\n const files = fs.readdirSync(dir, { withFileTypes: true });\n for (const directory of files.filter((f) => f.isDirectory())) {\n const documentModelPath = path.join(\n dir,\n directory.name,\n `${directory.name}.json`,\n );\n if (!fs.existsSync(documentModelPath)) {\n continue;\n }\n\n try {\n const documentModel = await loadDocumentModel(documentModelPath);\n await generateDocumentModel(documentModel, dir, { watch, skipFormat });\n } catch (error) {\n console.error(directory.name, error);\n }\n }\n}\n\nexport async function generateDocumentModel(\n documentModel: DocumentModel.DocumentModelState,\n dir: string,\n { watch = false, skipFormat = false } = {},\n) {\n // Generate the singular files for the document model logic\n await run(\n [\n \"powerhouse\",\n \"generate-document-model\",\n \"--document-model\",\n JSON.stringify(documentModel),\n \"--root-dir\",\n dir,\n ],\n { watch, skipFormat },\n );\n\n // Generate the module-specific files for the document model logic\n const latestSpec =\n documentModel.specifications[documentModel.specifications.length - 1];\n for (const module of latestSpec.modules) {\n await run(\n [\n \"powerhouse\",\n \"generate-document-model-module\",\n \"--document-model\",\n JSON.stringify(documentModel),\n \"--root-dir\",\n dir,\n \"--module\",\n module.name,\n ],\n { watch, skipFormat },\n );\n }\n}\n\nexport async function generateEditor(\n name: string,\n documentTypes: string[],\n documentTypesMap: Record<string, string>,\n dir: string,\n documentModelsDir: string,\n { skipFormat = false } = {},\n) {\n // Generate the singular files for the document model logic\n await run(\n [\n \"powerhouse\",\n \"generate-editor\",\n \"--name\",\n name,\n \"--root-dir\",\n dir,\n \"--document-types\",\n documentTypes.join(\",\"),\n \"--document-types-map\",\n JSON.stringify(documentTypesMap),\n \"--document-models-dir\",\n documentModelsDir,\n ],\n { skipFormat },\n );\n}\n\nexport async function generateProcessor(\n name: string,\n documentTypes: string[],\n documentTypesMap: Record<string, string>,\n dir: string,\n documentModelsDir: string,\n type = \"analytics\",\n { skipFormat = false } = {},\n) {\n // Generate the singular files for the document model logic\n const processorType = type === \"analytics\" ? \"analytics\" : \"operational\";\n\n await run(\n [\n \"powerhouse\",\n `generate-processor-${processorType}`,\n \"--name\",\n name,\n \"--root-dir\",\n dir,\n \"--document-types\",\n documentTypes.join(\",\"),\n \"--document-types-map\",\n JSON.stringify(documentTypesMap),\n \"--document-models-dir\",\n documentModelsDir,\n ],\n { skipFormat },\n );\n}\n"]}
@@ -1,6 +1,6 @@
1
1
  #! /usr/bin/env node
2
2
  import { typeDefs } from '@powerhousedao/scalars';
3
- import { generateAll, generateDocumentModel, generateEditor as generateEditor$1 } from './hygen.js';
3
+ import { generateAll, generateDocumentModel, generateEditor as generateEditor$1, generateProcessor as generateProcessor$1 } from './hygen.js';
4
4
  import { generateSchemas, generateSchema } from './graphql.js';
5
5
  import fs from 'node:fs';
6
6
  import { join, resolve } from 'path';
@@ -90,7 +90,26 @@ async function generateEditor(name, documentTypes, config) {
90
90
  { skipFormat }
91
91
  );
92
92
  }
93
+ async function generateProcessor(name, type, documentTypes, config) {
94
+ const { documentModelsDir, skipFormat } = config;
95
+ const docummentTypesMap = getDocumentTypesMap(documentModelsDir);
96
+ const invalidType = documentTypes.find(
97
+ (type2) => !Object.keys(docummentTypesMap).includes(type2)
98
+ );
99
+ if (invalidType) {
100
+ throw new Error(`Document model for ${invalidType} not found`);
101
+ }
102
+ return generateProcessor$1(
103
+ name,
104
+ documentTypes,
105
+ docummentTypesMap,
106
+ config.processorsDir,
107
+ config.documentModelsDir,
108
+ type,
109
+ { skipFormat }
110
+ );
111
+ }
93
112
 
94
- export { generate, generateEditor, generateFromFile };
113
+ export { generate, generateEditor, generateFromFile, generateProcessor };
95
114
  //# sourceMappingURL=index.js.map
96
115
  //# sourceMappingURL=index.js.map
@@ -1 +1 @@
1
- {"version":3,"sources":["../../src/codegen/index.ts"],"names":["_generateEditor"],"mappings":";;;;;;;;;AAeA,SAAS,sBAAsB,aAAmC,EAAA;AAChE,EAAA,MAAM,OACJ,aAAc,CAAA,cAAA,CAAe,aAAc,CAAA,cAAA,CAAe,SAAS,CAAC,CAAA;AAGtE,EAAA,IAAI,CAAC,IAAM,EAAA;AACT,IAAA,MAAM,IAAI,KAAA,CAAM,CAAqB,kBAAA,EAAA,aAAA,CAAc,EAAE,CAAE,CAAA,CAAA;AAAA;AAGzD,EAAM,MAAA;AAAA,IACJ,OAAA;AAAA,IACA,KAAA,EAAO,EAAE,MAAA,EAAQ,KAAM;AAAA,GACrB,GAAA,IAAA;AACJ,EAAA,MAAM,OAAU,GAAA;AAAA,IACd,MAAO,CAAA,MAAA;AAAA,IACP,KAAM,CAAA,MAAA;AAAA,IACN,GAAG,OAAA,CACA,GAAI,CAAA,CAAC,MAAW,KAAA;AAAA,MACf,CAAA,EAAA,EAAK,OAAO,IAAI,CAAA,CAAA;AAAA,MAChB,GAAG,MAAO,CAAA,UAAA,CAAW,IAAI,CAAC,EAAA,KAAO,GAAG,MAAM;AAAA,KAC3C,CACA,CAAA,IAAA,EACA,CAAA,MAAA,CAAO,CAAC,MAAW,KAAA,MAAA,IAAU,MAAO,CAAA,MAAA,GAAS,CAAC;AAAA,GACnD;AACA,EAAO,OAAA,OAAA,CAAQ,KAAK,MAAM,CAAA;AAC5B;AAEA,SAAS,oBAAoB,GAAa,EAAA;AACxC,EAAA,MAAM,mBAA2C,EAAC;AAClD,EAAG,EAAA,CAAA,WAAA,CAAY,KAAK,EAAE,aAAA,EAAe,MAAM,CAAA,CACxC,MAAO,CAAA,CAAC,MAAW,KAAA,MAAA,CAAO,aAAa,CAAA,CACvC,IAAI,CAAC,MAAA,KAAW,OAAO,IAAI,CAAA,CAC3B,OAAQ,CAAA,CAAC,IAAS,KAAA;AACjB,IAAA,MAAM,WAAW,OAAQ,CAAA,GAAA,EAAK,IAAM,EAAA,CAAA,EAAG,IAAI,CAAO,KAAA,CAAA,CAAA;AAClD,IAAA,IAAI,CAAC,EAAA,CAAG,UAAW,CAAA,QAAQ,CAAG,EAAA;AAC5B,MAAA;AAAA;AAGF,IAAA,MAAM,OAAU,GAAA,EAAA,CAAG,YAAa,CAAA,QAAA,EAAU,OAAO,CAAA;AACjD,IAAI,IAAA;AACF,MAAM,MAAA,IAAA,GAAO,IAAK,CAAA,KAAA,CAAM,OAAO,CAAA;AAC/B,MAAA,IAAI,KAAK,EAAI,EAAA;AACX,QAAA,gBAAA,CAAiB,IAAK,CAAA,EAAE,CAAI,GAAA,UAAA,CAAW,IAAI,CAAA;AAAA;AAC7C,KACM,CAAA,MAAA;AACN,MAAQ,OAAA,CAAA,KAAA,CAAM,CAAmB,gBAAA,EAAA,QAAQ,CAAE,CAAA,CAAA;AAAA;AAC7C,GACD,CAAA;AACH,EAAO,OAAA,gBAAA;AACT;AAEA,eAAsB,SAAS,MAA0B,EAAA;AACvD,EAAM,MAAA,EAAE,UAAY,EAAA,KAAA,EAAU,GAAA,MAAA;AAC9B,EAAA,MAAM,gBAAgB,MAAO,CAAA,iBAAA,EAAmB,EAAE,UAAA,EAAY,OAAO,CAAA;AACrE,EAAA,MAAM,YAAY,MAAO,CAAA,iBAAA,EAAmB,EAAE,UAAA,EAAY,OAAO,CAAA;AACnE;AAEA,eAAsB,gBAAA,CAAiB,MAAc,MAA0B,EAAA;AAE7E,EAAM,MAAA,aAAA,GAAgB,MAAM,iBAAA,CAAkB,IAAI,CAAA;AAElD,EAAM,MAAA,IAAA,GAAO,SAAU,CAAA,aAAA,CAAc,IAAI,CAAA;AAGzC,EAAG,EAAA,CAAA,SAAA,CAAU,KAAK,MAAO,CAAA,iBAAA,EAAmB,IAAI,CAAG,EAAA,EAAE,SAAW,EAAA,IAAA,EAAM,CAAA;AACtE,EAAG,EAAA,CAAA,aAAA;AAAA,IACD,KAAK,MAAO,CAAA,iBAAA,EAAmB,IAAM,EAAA,CAAA,EAAG,IAAI,CAAO,KAAA,CAAA,CAAA;AAAA,IACnD,IAAK,CAAA,SAAA,CAAU,aAAe,EAAA,IAAA,EAAM,CAAC;AAAA,GACvC;AAGA,EAAA,MAAM,SAAY,GAAA;AAAA,IAChB,QAAA,CAAS,KAAK,IAAI,CAAA;AAAA;AAAA,IAClB,sBAAsB,aAAa;AAAA,GACrC,CAAE,KAAK,IAAI,CAAA;AAEX,EAAA,IAAI,SAAW,EAAA;AACb,IAAG,EAAA,CAAA,aAAA;AAAA,MACD,IAAK,CAAA,MAAA,CAAO,iBAAmB,EAAA,IAAA,EAAM,CAAgB,cAAA,CAAA,CAAA;AAAA,MACrD;AAAA,KACF;AAAA;AAGF,EAAA,MAAM,cAAe,CAAA,IAAA,EAAM,MAAO,CAAA,iBAAA,EAAmB,MAAM,CAAA;AAC3D,EAAA,MAAM,qBAAsB,CAAA,aAAA,EAAe,MAAO,CAAA,iBAAA,EAAmB,MAAM,CAAA;AAC7E;AAEA,eAAsB,cAAA,CACpB,IACA,EAAA,aAAA,EACA,MACA,EAAA;AACA,EAAM,MAAA,EAAE,iBAAmB,EAAA,UAAA,EAAe,GAAA,MAAA;AAC1C,EAAM,MAAA,iBAAA,GAAoB,oBAAoB,iBAAiB,CAAA;AAE/D,EAAA,MAAM,cAAc,aAAc,CAAA,IAAA;AAAA,IAChC,CAAC,SAAS,CAAC,MAAA,CAAO,KAAK,iBAAiB,CAAA,CAAE,SAAS,IAAI;AAAA,GACzD;AACA,EAAA,IAAI,WAAa,EAAA;AACf,IAAA,MAAM,IAAI,KAAA,CAAM,CAAsB,mBAAA,EAAA,WAAW,CAAY,UAAA,CAAA,CAAA;AAAA;AAE/D,EAAO,OAAAA,gBAAA;AAAA,IACL,IAAA;AAAA,IACA,aAAA;AAAA,IACA,iBAAA;AAAA,IACA,MAAO,CAAA,UAAA;AAAA,IACP,MAAO,CAAA,iBAAA;AAAA,IACP,EAAE,UAAW;AAAA,GACf;AACF","file":"index.js","sourcesContent":["#! /usr/bin/env node\nimport { DocumentModelState } from \"document-model/document-model\";\nimport { typeDefs } from \"@powerhousedao/scalars\";\nimport {\n generateAll,\n generateEditor as _generateEditor,\n generateDocumentModel,\n} from \"./hygen\";\nimport { generateSchemas, generateSchema } from \"./graphql\";\nimport type { PowerhouseConfig } from \"../utils/index\";\nimport fs from \"node:fs\";\nimport { join, resolve } from \"path\";\nimport { paramCase, pascalCase } from \"change-case\";\nimport { loadDocumentModel } from \"./utils\";\n\nfunction generateGraphqlSchema(documentModel: DocumentModelState) {\n const spec =\n documentModel.specifications[documentModel.specifications.length - 1];\n\n // eslint-disable-next-line @typescript-eslint/no-unnecessary-condition\n if (!spec) {\n throw new Error(`No spec found for ${documentModel.id}`);\n }\n\n const {\n modules,\n state: { global, local },\n } = spec;\n const schemas = [\n global.schema,\n local.schema,\n ...modules\n .map((module) => [\n `# ${module.name}`,\n ...module.operations.map((op) => op.schema),\n ])\n .flat()\n .filter((schema) => schema && schema.length > 0),\n ];\n return schemas.join(\"\\n\\n\");\n}\n\nfunction getDocumentTypesMap(dir: string) {\n const documentTypesMap: Record<string, string> = {};\n fs.readdirSync(dir, { withFileTypes: true })\n .filter((dirent) => dirent.isDirectory())\n .map((dirent) => dirent.name)\n .forEach((name) => {\n const specPath = resolve(dir, name, `${name}.json`);\n if (!fs.existsSync(specPath)) {\n return;\n }\n\n const specRaw = fs.readFileSync(specPath, \"utf-8\");\n try {\n const spec = JSON.parse(specRaw) as DocumentModelState;\n if (spec.id) {\n documentTypesMap[spec.id] = pascalCase(name);\n }\n } catch {\n console.error(`Failed to parse ${specPath}`);\n }\n });\n return documentTypesMap;\n}\n\nexport async function generate(config: PowerhouseConfig) {\n const { skipFormat, watch } = config;\n await generateSchemas(config.documentModelsDir, { skipFormat, watch });\n await generateAll(config.documentModelsDir, { skipFormat, watch });\n}\n\nexport async function generateFromFile(path: string, config: PowerhouseConfig) {\n // load document model spec from file\n const documentModel = await loadDocumentModel(path);\n\n const name = paramCase(documentModel.name);\n\n // create document model folder and spec as json\n fs.mkdirSync(join(config.documentModelsDir, name), { recursive: true });\n fs.writeFileSync(\n join(config.documentModelsDir, name, `${name}.json`),\n JSON.stringify(documentModel, null, 4),\n );\n\n // bundle graphql schemas together\n const schemaStr = [\n typeDefs.join(\"\\n\"), // inject ph scalars\n generateGraphqlSchema(documentModel),\n ].join(\"\\n\");\n\n if (schemaStr) {\n fs.writeFileSync(\n join(config.documentModelsDir, name, `schema.graphql`),\n schemaStr,\n );\n }\n\n await generateSchema(name, config.documentModelsDir, config);\n await generateDocumentModel(documentModel, config.documentModelsDir, config);\n}\n\nexport async function generateEditor(\n name: string,\n documentTypes: string[],\n config: PowerhouseConfig,\n) {\n const { documentModelsDir, skipFormat } = config;\n const docummentTypesMap = getDocumentTypesMap(documentModelsDir);\n\n const invalidType = documentTypes.find(\n (type) => !Object.keys(docummentTypesMap).includes(type),\n );\n if (invalidType) {\n throw new Error(`Document model for ${invalidType} not found`);\n }\n return _generateEditor(\n name,\n documentTypes,\n docummentTypesMap,\n config.editorsDir,\n config.documentModelsDir,\n { skipFormat },\n );\n}\n"]}
1
+ {"version":3,"sources":["../../src/codegen/index.ts"],"names":["_generateEditor","type","_generateProcessor"],"mappings":";;;;;;;;;AAgBA,SAAS,sBAAsB,aAAmC,EAAA;AAChE,EAAA,MAAM,OACJ,aAAc,CAAA,cAAA,CAAe,aAAc,CAAA,cAAA,CAAe,SAAS,CAAC,CAAA;AAGtE,EAAA,IAAI,CAAC,IAAM,EAAA;AACT,IAAA,MAAM,IAAI,KAAA,CAAM,CAAqB,kBAAA,EAAA,aAAA,CAAc,EAAE,CAAE,CAAA,CAAA;AAAA;AAGzD,EAAM,MAAA;AAAA,IACJ,OAAA;AAAA,IACA,KAAA,EAAO,EAAE,MAAA,EAAQ,KAAM;AAAA,GACrB,GAAA,IAAA;AACJ,EAAA,MAAM,OAAU,GAAA;AAAA,IACd,MAAO,CAAA,MAAA;AAAA,IACP,KAAM,CAAA,MAAA;AAAA,IACN,GAAG,OAAA,CACA,GAAI,CAAA,CAAC,MAAW,KAAA;AAAA,MACf,CAAA,EAAA,EAAK,OAAO,IAAI,CAAA,CAAA;AAAA,MAChB,GAAG,MAAO,CAAA,UAAA,CAAW,IAAI,CAAC,EAAA,KAAO,GAAG,MAAM;AAAA,KAC3C,CACA,CAAA,IAAA,EACA,CAAA,MAAA,CAAO,CAAC,MAAW,KAAA,MAAA,IAAU,MAAO,CAAA,MAAA,GAAS,CAAC;AAAA,GACnD;AACA,EAAO,OAAA,OAAA,CAAQ,KAAK,MAAM,CAAA;AAC5B;AAEA,SAAS,oBAAoB,GAAa,EAAA;AACxC,EAAA,MAAM,mBAA2C,EAAC;AAClD,EAAG,EAAA,CAAA,WAAA,CAAY,KAAK,EAAE,aAAA,EAAe,MAAM,CAAA,CACxC,MAAO,CAAA,CAAC,MAAW,KAAA,MAAA,CAAO,aAAa,CAAA,CACvC,IAAI,CAAC,MAAA,KAAW,OAAO,IAAI,CAAA,CAC3B,OAAQ,CAAA,CAAC,IAAS,KAAA;AACjB,IAAA,MAAM,WAAW,OAAQ,CAAA,GAAA,EAAK,IAAM,EAAA,CAAA,EAAG,IAAI,CAAO,KAAA,CAAA,CAAA;AAClD,IAAA,IAAI,CAAC,EAAA,CAAG,UAAW,CAAA,QAAQ,CAAG,EAAA;AAC5B,MAAA;AAAA;AAGF,IAAA,MAAM,OAAU,GAAA,EAAA,CAAG,YAAa,CAAA,QAAA,EAAU,OAAO,CAAA;AACjD,IAAI,IAAA;AACF,MAAM,MAAA,IAAA,GAAO,IAAK,CAAA,KAAA,CAAM,OAAO,CAAA;AAC/B,MAAA,IAAI,KAAK,EAAI,EAAA;AACX,QAAA,gBAAA,CAAiB,IAAK,CAAA,EAAE,CAAI,GAAA,UAAA,CAAW,IAAI,CAAA;AAAA;AAC7C,KACM,CAAA,MAAA;AACN,MAAQ,OAAA,CAAA,KAAA,CAAM,CAAmB,gBAAA,EAAA,QAAQ,CAAE,CAAA,CAAA;AAAA;AAC7C,GACD,CAAA;AACH,EAAO,OAAA,gBAAA;AACT;AAEA,eAAsB,SAAS,MAA0B,EAAA;AACvD,EAAM,MAAA,EAAE,UAAY,EAAA,KAAA,EAAU,GAAA,MAAA;AAC9B,EAAA,MAAM,gBAAgB,MAAO,CAAA,iBAAA,EAAmB,EAAE,UAAA,EAAY,OAAO,CAAA;AACrE,EAAA,MAAM,YAAY,MAAO,CAAA,iBAAA,EAAmB,EAAE,UAAA,EAAY,OAAO,CAAA;AACnE;AAEA,eAAsB,gBAAA,CAAiB,MAAc,MAA0B,EAAA;AAE7E,EAAM,MAAA,aAAA,GAAgB,MAAM,iBAAA,CAAkB,IAAI,CAAA;AAElD,EAAM,MAAA,IAAA,GAAO,SAAU,CAAA,aAAA,CAAc,IAAI,CAAA;AAGzC,EAAG,EAAA,CAAA,SAAA,CAAU,KAAK,MAAO,CAAA,iBAAA,EAAmB,IAAI,CAAG,EAAA,EAAE,SAAW,EAAA,IAAA,EAAM,CAAA;AACtE,EAAG,EAAA,CAAA,aAAA;AAAA,IACD,KAAK,MAAO,CAAA,iBAAA,EAAmB,IAAM,EAAA,CAAA,EAAG,IAAI,CAAO,KAAA,CAAA,CAAA;AAAA,IACnD,IAAK,CAAA,SAAA,CAAU,aAAe,EAAA,IAAA,EAAM,CAAC;AAAA,GACvC;AAGA,EAAA,MAAM,SAAY,GAAA;AAAA,IAChB,QAAA,CAAS,KAAK,IAAI,CAAA;AAAA;AAAA,IAClB,sBAAsB,aAAa;AAAA,GACrC,CAAE,KAAK,IAAI,CAAA;AAEX,EAAA,IAAI,SAAW,EAAA;AACb,IAAG,EAAA,CAAA,aAAA;AAAA,MACD,IAAK,CAAA,MAAA,CAAO,iBAAmB,EAAA,IAAA,EAAM,CAAgB,cAAA,CAAA,CAAA;AAAA,MACrD;AAAA,KACF;AAAA;AAGF,EAAA,MAAM,cAAe,CAAA,IAAA,EAAM,MAAO,CAAA,iBAAA,EAAmB,MAAM,CAAA;AAC3D,EAAA,MAAM,qBAAsB,CAAA,aAAA,EAAe,MAAO,CAAA,iBAAA,EAAmB,MAAM,CAAA;AAC7E;AAEA,eAAsB,cAAA,CACpB,IACA,EAAA,aAAA,EACA,MACA,EAAA;AACA,EAAM,MAAA,EAAE,iBAAmB,EAAA,UAAA,EAAe,GAAA,MAAA;AAC1C,EAAM,MAAA,iBAAA,GAAoB,oBAAoB,iBAAiB,CAAA;AAE/D,EAAA,MAAM,cAAc,aAAc,CAAA,IAAA;AAAA,IAChC,CAAC,SAAS,CAAC,MAAA,CAAO,KAAK,iBAAiB,CAAA,CAAE,SAAS,IAAI;AAAA,GACzD;AACA,EAAA,IAAI,WAAa,EAAA;AACf,IAAA,MAAM,IAAI,KAAA,CAAM,CAAsB,mBAAA,EAAA,WAAW,CAAY,UAAA,CAAA,CAAA;AAAA;AAE/D,EAAO,OAAAA,gBAAA;AAAA,IACL,IAAA;AAAA,IACA,aAAA;AAAA,IACA,iBAAA;AAAA,IACA,MAAO,CAAA,UAAA;AAAA,IACP,MAAO,CAAA,iBAAA;AAAA,IACP,EAAE,UAAW;AAAA,GACf;AACF;AAEA,eAAsB,iBACpB,CAAA,IAAA,EACA,IACA,EAAA,aAAA,EACA,MACA,EAAA;AACA,EAAM,MAAA,EAAE,iBAAmB,EAAA,UAAA,EAAe,GAAA,MAAA;AAC1C,EAAM,MAAA,iBAAA,GAAoB,oBAAoB,iBAAiB,CAAA;AAE/D,EAAA,MAAM,cAAc,aAAc,CAAA,IAAA;AAAA,IAChC,CAACC,UAAS,CAAC,MAAA,CAAO,KAAK,iBAAiB,CAAA,CAAE,SAASA,KAAI;AAAA,GACzD;AACA,EAAA,IAAI,WAAa,EAAA;AACf,IAAA,MAAM,IAAI,KAAA,CAAM,CAAsB,mBAAA,EAAA,WAAW,CAAY,UAAA,CAAA,CAAA;AAAA;AAE/D,EAAO,OAAAC,mBAAA;AAAA,IACL,IAAA;AAAA,IACA,aAAA;AAAA,IACA,iBAAA;AAAA,IACA,MAAO,CAAA,aAAA;AAAA,IACP,MAAO,CAAA,iBAAA;AAAA,IACP,IAAA;AAAA,IACA,EAAE,UAAW;AAAA,GACf;AACF","file":"index.js","sourcesContent":["#! /usr/bin/env node\nimport { DocumentModelState } from \"document-model/document-model\";\nimport { typeDefs } from \"@powerhousedao/scalars\";\nimport {\n generateAll,\n generateEditor as _generateEditor,\n generateProcessor as _generateProcessor,\n generateDocumentModel,\n} from \"./hygen\";\nimport { generateSchemas, generateSchema } from \"./graphql\";\nimport type { PowerhouseConfig } from \"../utils/index\";\nimport fs from \"node:fs\";\nimport { join, resolve } from \"path\";\nimport { paramCase, pascalCase } from \"change-case\";\nimport { loadDocumentModel } from \"./utils\";\n\nfunction generateGraphqlSchema(documentModel: DocumentModelState) {\n const spec =\n documentModel.specifications[documentModel.specifications.length - 1];\n\n // eslint-disable-next-line @typescript-eslint/no-unnecessary-condition\n if (!spec) {\n throw new Error(`No spec found for ${documentModel.id}`);\n }\n\n const {\n modules,\n state: { global, local },\n } = spec;\n const schemas = [\n global.schema,\n local.schema,\n ...modules\n .map((module) => [\n `# ${module.name}`,\n ...module.operations.map((op) => op.schema),\n ])\n .flat()\n .filter((schema) => schema && schema.length > 0),\n ];\n return schemas.join(\"\\n\\n\");\n}\n\nfunction getDocumentTypesMap(dir: string) {\n const documentTypesMap: Record<string, string> = {};\n fs.readdirSync(dir, { withFileTypes: true })\n .filter((dirent) => dirent.isDirectory())\n .map((dirent) => dirent.name)\n .forEach((name) => {\n const specPath = resolve(dir, name, `${name}.json`);\n if (!fs.existsSync(specPath)) {\n return;\n }\n\n const specRaw = fs.readFileSync(specPath, \"utf-8\");\n try {\n const spec = JSON.parse(specRaw) as DocumentModelState;\n if (spec.id) {\n documentTypesMap[spec.id] = pascalCase(name);\n }\n } catch {\n console.error(`Failed to parse ${specPath}`);\n }\n });\n return documentTypesMap;\n}\n\nexport async function generate(config: PowerhouseConfig) {\n const { skipFormat, watch } = config;\n await generateSchemas(config.documentModelsDir, { skipFormat, watch });\n await generateAll(config.documentModelsDir, { skipFormat, watch });\n}\n\nexport async function generateFromFile(path: string, config: PowerhouseConfig) {\n // load document model spec from file\n const documentModel = await loadDocumentModel(path);\n\n const name = paramCase(documentModel.name);\n\n // create document model folder and spec as json\n fs.mkdirSync(join(config.documentModelsDir, name), { recursive: true });\n fs.writeFileSync(\n join(config.documentModelsDir, name, `${name}.json`),\n JSON.stringify(documentModel, null, 4),\n );\n\n // bundle graphql schemas together\n const schemaStr = [\n typeDefs.join(\"\\n\"), // inject ph scalars\n generateGraphqlSchema(documentModel),\n ].join(\"\\n\");\n\n if (schemaStr) {\n fs.writeFileSync(\n join(config.documentModelsDir, name, `schema.graphql`),\n schemaStr,\n );\n }\n\n await generateSchema(name, config.documentModelsDir, config);\n await generateDocumentModel(documentModel, config.documentModelsDir, config);\n}\n\nexport async function generateEditor(\n name: string,\n documentTypes: string[],\n config: PowerhouseConfig,\n) {\n const { documentModelsDir, skipFormat } = config;\n const docummentTypesMap = getDocumentTypesMap(documentModelsDir);\n\n const invalidType = documentTypes.find(\n (type) => !Object.keys(docummentTypesMap).includes(type),\n );\n if (invalidType) {\n throw new Error(`Document model for ${invalidType} not found`);\n }\n return _generateEditor(\n name,\n documentTypes,\n docummentTypesMap,\n config.editorsDir,\n config.documentModelsDir,\n { skipFormat },\n );\n}\n\nexport async function generateProcessor(\n name: string,\n type: \"analytics\" | \"operational\",\n documentTypes: string[],\n config: PowerhouseConfig,\n) {\n const { documentModelsDir, skipFormat } = config;\n const docummentTypesMap = getDocumentTypesMap(documentModelsDir);\n\n const invalidType = documentTypes.find(\n (type) => !Object.keys(docummentTypesMap).includes(type),\n );\n if (invalidType) {\n throw new Error(`Document model for ${invalidType} not found`);\n }\n return _generateProcessor(\n name,\n documentTypes,\n docummentTypesMap,\n config.processorsDir,\n config.documentModelsDir,\n type,\n { skipFormat },\n );\n}\n"]}
package/dist/hygen.d.ts CHANGED
@@ -11,5 +11,8 @@ declare function generateDocumentModel(documentModel: DocumentModel.DocumentMode
11
11
  declare function generateEditor(name: string, documentTypes: string[], documentTypesMap: Record<string, string>, dir: string, documentModelsDir: string, { skipFormat }?: {
12
12
  skipFormat?: boolean | undefined;
13
13
  }): Promise<void>;
14
+ declare function generateProcessor(name: string, documentTypes: string[], documentTypesMap: Record<string, string>, dir: string, documentModelsDir: string, type?: string, { skipFormat }?: {
15
+ skipFormat?: boolean | undefined;
16
+ }): Promise<void>;
14
17
 
15
- export { generateAll, generateDocumentModel, generateEditor };
18
+ export { generateAll, generateDocumentModel, generateEditor, generateProcessor };
package/dist/index.d.ts CHANGED
@@ -6,5 +6,6 @@ import '@anatine/zod-mock';
6
6
  declare function generate(config: PowerhouseConfig): Promise<void>;
7
7
  declare function generateFromFile(path: string, config: PowerhouseConfig): Promise<void>;
8
8
  declare function generateEditor(name: string, documentTypes: string[], config: PowerhouseConfig): Promise<void>;
9
+ declare function generateProcessor(name: string, type: "analytics" | "operational", documentTypes: string[], config: PowerhouseConfig): Promise<void>;
9
10
 
10
- export { PowerhouseConfig, generate, generateEditor, generateFromFile };
11
+ export { PowerhouseConfig, generate, generateEditor, generateFromFile, generateProcessor };
package/dist/utils/cli.js CHANGED
@@ -5,9 +5,11 @@ import { readFileSync, writeFileSync } from 'node:fs';
5
5
  const { prompt } = enquirer;
6
6
  const DEFAULT_DOCUMENT_MODELS_DIR = "./document-models";
7
7
  const DEFAULT_EDITORS_DIR = "./editors";
8
+ const DEFAULT_PROCESSORS_DIR = "./processors";
8
9
  const DEFAULT_CONFIG = {
9
10
  documentModelsDir: DEFAULT_DOCUMENT_MODELS_DIR,
10
11
  editorsDir: DEFAULT_EDITORS_DIR,
12
+ processorsDir: DEFAULT_PROCESSORS_DIR,
11
13
  skipFormat: false
12
14
  };
13
15
  const configSpec = {
@@ -1 +1 @@
1
- {"version":3,"sources":["../../src/utils/cli.ts"],"names":[],"mappings":";;;;AAIA,MAAM,EAAE,QAAW,GAAA,QAAA;AAUnB,MAAM,2BAA8B,GAAA,mBAAA;AACpC,MAAM,mBAAsB,GAAA,WAAA;AAErB,MAAM,cAAmC,GAAA;AAAA,EAC9C,iBAAmB,EAAA,2BAAA;AAAA,EACnB,UAAY,EAAA,mBAAA;AAAA,EACZ,UAAY,EAAA;AACd;AAEO,MAAM,UAAa,GAAA;AAAA,EACxB,mBAAqB,EAAA,MAAA;AAAA,EACrB,WAAa,EAAA,MAAA;AAAA,EACb,eAAiB,EAAA,OAAA;AAAA,EACjB,eAAiB,EAAA,OAAA;AAAA,EACjB,SAAW,EAAA,OAAA;AAAA,EACX,IAAM,EAAA,eAAA;AAAA,EACN,KAAO,EAAA,eAAA;AAAA,EACP,IAAM,EAAA;AACR;AAEO,SAAS,SAAY,GAAA;AAC1B,EAAI,IAAA,MAAA,GAA2B,EAAE,GAAG,cAAe,EAAA;AACnD,EAAI,IAAA;AACF,IAAM,MAAA,SAAA,GAAY,YAAa,CAAA,0BAAA,EAA4B,OAAO,CAAA;AAClE,IAAM,MAAA,UAAA,GAAa,IAAK,CAAA,KAAA,CAAM,SAAS,CAAA;AACvC,IAAA,MAAA,GAAS,EAAE,GAAG,MAAQ,EAAA,GAAG,UAAW,EAAA;AAAA,GAC9B,CAAA,MAAA;AACN,IAAA,OAAA,CAAQ,KAAK,iDAAiD,CAAA;AAAA;AAEhE,EAAO,OAAA,MAAA;AACT;AAEO,SAAS,YAAY,MAA0B,EAAA;AACpD,EAAA,aAAA,CAAc,4BAA4B,IAAK,CAAA,SAAA,CAAU,MAAQ,EAAA,IAAA,EAAM,CAAC,CAAC,CAAA;AAC3E;AAEO,SAAS,SAAA,CAA8B,MAAgB,IAAS,EAAA;AACrE,EAAM,MAAA,IAAA,GAAO,IAAI,IAAM,EAAA;AAAA,IACrB,UAAY,EAAA,IAAA;AAAA,IACZ;AAAA,GACD,CAAA;AAED,EAAO,OAAA,IAAA;AACT;AAEO,SAAS,YAAY,IAAgB,EAAA;AAC1C,EAAA,MAAM,SAAoC,EAAC;AAC3C,EAAM,MAAA,IAAA,GAAO,SAAU,CAAA,IAAA,EAAM,UAAU,CAAA;AAEvC,EAAA,IAAI,uBAAuB,IAAM,EAAA;AAC/B,IAAO,MAAA,CAAA,iBAAA,GAAoB,KAAK,mBAAmB,CAAA;AAAA;AAGrD,EAAA,IAAI,eAAe,IAAM,EAAA;AACvB,IAAO,MAAA,CAAA,UAAA,GAAa,KAAK,WAAW,CAAA;AAAA;AAGtC,EAAA,IAAI,mBAAmB,IAAM,EAAA;AAC3B,IAAA,MAAA,CAAO,UAAa,GAAA,IAAA;AAAA;AAEtB,EAAA,IAAI,mBAAmB,IAAM,EAAA;AAC3B,IAAA,MAAA,CAAO,WAAc,GAAA,IAAA;AAAA;AAEvB,EAAA,IAAI,aAAa,IAAM,EAAA;AACrB,IAAA,MAAA,CAAO,KAAQ,GAAA,IAAA;AAAA;AAGjB,EAAO,OAAA,MAAA;AACT;AAEA,eAAsB,iBAAA,CACpB,SAA2B,cAC3B,EAAA;AACA,EAAA,OAAO,MAAmE,CAAA;AAAA,IACxE;AAAA,MACE,IAAM,EAAA,OAAA;AAAA,MACN,IAAM,EAAA,mBAAA;AAAA,MACN,OAAS,EAAA,qCAAA;AAAA,MACT,SAAS,MAAO,CAAA;AAAA,KAClB;AAAA,IACA;AAAA,MACE,IAAM,EAAA,OAAA;AAAA,MACN,IAAM,EAAA,YAAA;AAAA,MACN,OAAS,EAAA,6BAAA;AAAA,MACT,SAAS,MAAO,CAAA;AAAA;AAClB,GACD,CAAA;AACH","file":"cli.js","sourcesContent":["import arg from \"arg\";\nimport enquirer from \"enquirer\";\nimport { readFileSync, writeFileSync } from \"node:fs\";\n\nconst { prompt } = enquirer;\n\nexport type PowerhouseConfig = {\n documentModelsDir: string;\n editorsDir: string;\n interactive?: boolean;\n skipFormat?: boolean;\n watch?: boolean;\n};\n\nconst DEFAULT_DOCUMENT_MODELS_DIR = \"./document-models\";\nconst DEFAULT_EDITORS_DIR = \"./editors\";\n\nexport const DEFAULT_CONFIG: PowerhouseConfig = {\n documentModelsDir: DEFAULT_DOCUMENT_MODELS_DIR,\n editorsDir: DEFAULT_EDITORS_DIR,\n skipFormat: false,\n};\n\nexport const configSpec = {\n \"--document-models\": String,\n \"--editors\": String,\n \"--interactive\": Boolean,\n \"--skip-format\": Boolean,\n \"--watch\": Boolean,\n \"-i\": \"--interactive\",\n \"-sf\": \"--skip-format\",\n \"-w\": \"--watch\",\n} as const;\n\nexport function getConfig() {\n let config: PowerhouseConfig = { ...DEFAULT_CONFIG };\n try {\n const configStr = readFileSync(\"./powerhouse.config.json\", \"utf-8\");\n const userConfig = JSON.parse(configStr) as PowerhouseConfig;\n config = { ...config, ...userConfig };\n } catch {\n console.warn(\"No powerhouse.config.json found, using defaults\");\n }\n return config;\n}\n\nexport function writeConfig(config: PowerhouseConfig) {\n writeFileSync(\"./powerhouse.config.json\", JSON.stringify(config, null, 4));\n}\n\nexport function parseArgs<T extends arg.Spec>(argv: string[], spec: T) {\n const args = arg(spec, {\n permissive: true,\n argv,\n });\n\n return args;\n}\n\nexport function parseConfig(argv: string[]) {\n const config: Partial<PowerhouseConfig> = {};\n const args = parseArgs(argv, configSpec);\n\n if (\"--document-models\" in args) {\n config.documentModelsDir = args[\"--document-models\"];\n }\n\n if (\"--editors\" in args) {\n config.editorsDir = args[\"--editors\"];\n }\n\n if (\"--skip-format\" in args) {\n config.skipFormat = true;\n }\n if (\"--interactive\" in args) {\n config.interactive = true;\n }\n if (\"--watch\" in args) {\n config.watch = true;\n }\n\n return config;\n}\n\nexport async function promptDirectories(\n config: PowerhouseConfig = DEFAULT_CONFIG,\n) {\n return prompt<Pick<PowerhouseConfig, \"documentModelsDir\" | \"editorsDir\">>([\n {\n type: \"input\",\n name: \"documentModelsDir\",\n message: \"Where to place the Document Models?\",\n initial: config.documentModelsDir,\n },\n {\n type: \"input\",\n name: \"editorsDir\",\n message: \"Where to place the Editors?\",\n initial: config.editorsDir,\n },\n ]);\n}\n"]}
1
+ {"version":3,"sources":["../../src/utils/cli.ts"],"names":[],"mappings":";;;;AAIA,MAAM,EAAE,QAAW,GAAA,QAAA;AAWnB,MAAM,2BAA8B,GAAA,mBAAA;AACpC,MAAM,mBAAsB,GAAA,WAAA;AAC5B,MAAM,sBAAyB,GAAA,cAAA;AAExB,MAAM,cAAmC,GAAA;AAAA,EAC9C,iBAAmB,EAAA,2BAAA;AAAA,EACnB,UAAY,EAAA,mBAAA;AAAA,EACZ,aAAe,EAAA,sBAAA;AAAA,EACf,UAAY,EAAA;AACd;AAEO,MAAM,UAAa,GAAA;AAAA,EACxB,mBAAqB,EAAA,MAAA;AAAA,EACrB,WAAa,EAAA,MAAA;AAAA,EACb,eAAiB,EAAA,OAAA;AAAA,EACjB,eAAiB,EAAA,OAAA;AAAA,EACjB,SAAW,EAAA,OAAA;AAAA,EACX,IAAM,EAAA,eAAA;AAAA,EACN,KAAO,EAAA,eAAA;AAAA,EACP,IAAM,EAAA;AACR;AAEO,SAAS,SAAY,GAAA;AAC1B,EAAI,IAAA,MAAA,GAA2B,EAAE,GAAG,cAAe,EAAA;AACnD,EAAI,IAAA;AACF,IAAM,MAAA,SAAA,GAAY,YAAa,CAAA,0BAAA,EAA4B,OAAO,CAAA;AAClE,IAAM,MAAA,UAAA,GAAa,IAAK,CAAA,KAAA,CAAM,SAAS,CAAA;AACvC,IAAA,MAAA,GAAS,EAAE,GAAG,MAAQ,EAAA,GAAG,UAAW,EAAA;AAAA,GAC9B,CAAA,MAAA;AACN,IAAA,OAAA,CAAQ,KAAK,iDAAiD,CAAA;AAAA;AAEhE,EAAO,OAAA,MAAA;AACT;AAEO,SAAS,YAAY,MAA0B,EAAA;AACpD,EAAA,aAAA,CAAc,4BAA4B,IAAK,CAAA,SAAA,CAAU,MAAQ,EAAA,IAAA,EAAM,CAAC,CAAC,CAAA;AAC3E;AAEO,SAAS,SAAA,CAA8B,MAAgB,IAAS,EAAA;AACrE,EAAM,MAAA,IAAA,GAAO,IAAI,IAAM,EAAA;AAAA,IACrB,UAAY,EAAA,IAAA;AAAA,IACZ;AAAA,GACD,CAAA;AAED,EAAO,OAAA,IAAA;AACT;AAEO,SAAS,YAAY,IAAgB,EAAA;AAC1C,EAAA,MAAM,SAAoC,EAAC;AAC3C,EAAM,MAAA,IAAA,GAAO,SAAU,CAAA,IAAA,EAAM,UAAU,CAAA;AAEvC,EAAA,IAAI,uBAAuB,IAAM,EAAA;AAC/B,IAAO,MAAA,CAAA,iBAAA,GAAoB,KAAK,mBAAmB,CAAA;AAAA;AAGrD,EAAA,IAAI,eAAe,IAAM,EAAA;AACvB,IAAO,MAAA,CAAA,UAAA,GAAa,KAAK,WAAW,CAAA;AAAA;AAGtC,EAAA,IAAI,mBAAmB,IAAM,EAAA;AAC3B,IAAA,MAAA,CAAO,UAAa,GAAA,IAAA;AAAA;AAEtB,EAAA,IAAI,mBAAmB,IAAM,EAAA;AAC3B,IAAA,MAAA,CAAO,WAAc,GAAA,IAAA;AAAA;AAEvB,EAAA,IAAI,aAAa,IAAM,EAAA;AACrB,IAAA,MAAA,CAAO,KAAQ,GAAA,IAAA;AAAA;AAGjB,EAAO,OAAA,MAAA;AACT;AAEA,eAAsB,iBAAA,CACpB,SAA2B,cAC3B,EAAA;AACA,EAAA,OAAO,MAAmE,CAAA;AAAA,IACxE;AAAA,MACE,IAAM,EAAA,OAAA;AAAA,MACN,IAAM,EAAA,mBAAA;AAAA,MACN,OAAS,EAAA,qCAAA;AAAA,MACT,SAAS,MAAO,CAAA;AAAA,KAClB;AAAA,IACA;AAAA,MACE,IAAM,EAAA,OAAA;AAAA,MACN,IAAM,EAAA,YAAA;AAAA,MACN,OAAS,EAAA,6BAAA;AAAA,MACT,SAAS,MAAO,CAAA;AAAA;AAClB,GACD,CAAA;AACH","file":"cli.js","sourcesContent":["import arg from \"arg\";\nimport enquirer from \"enquirer\";\nimport { readFileSync, writeFileSync } from \"node:fs\";\n\nconst { prompt } = enquirer;\n\nexport type PowerhouseConfig = {\n documentModelsDir: string;\n editorsDir: string;\n processorsDir: string;\n interactive?: boolean;\n skipFormat?: boolean;\n watch?: boolean;\n};\n\nconst DEFAULT_DOCUMENT_MODELS_DIR = \"./document-models\";\nconst DEFAULT_EDITORS_DIR = \"./editors\";\nconst DEFAULT_PROCESSORS_DIR = \"./processors\";\n\nexport const DEFAULT_CONFIG: PowerhouseConfig = {\n documentModelsDir: DEFAULT_DOCUMENT_MODELS_DIR,\n editorsDir: DEFAULT_EDITORS_DIR,\n processorsDir: DEFAULT_PROCESSORS_DIR,\n skipFormat: false,\n};\n\nexport const configSpec = {\n \"--document-models\": String,\n \"--editors\": String,\n \"--interactive\": Boolean,\n \"--skip-format\": Boolean,\n \"--watch\": Boolean,\n \"-i\": \"--interactive\",\n \"-sf\": \"--skip-format\",\n \"-w\": \"--watch\",\n} as const;\n\nexport function getConfig() {\n let config: PowerhouseConfig = { ...DEFAULT_CONFIG };\n try {\n const configStr = readFileSync(\"./powerhouse.config.json\", \"utf-8\");\n const userConfig = JSON.parse(configStr) as PowerhouseConfig;\n config = { ...config, ...userConfig };\n } catch {\n console.warn(\"No powerhouse.config.json found, using defaults\");\n }\n return config;\n}\n\nexport function writeConfig(config: PowerhouseConfig) {\n writeFileSync(\"./powerhouse.config.json\", JSON.stringify(config, null, 4));\n}\n\nexport function parseArgs<T extends arg.Spec>(argv: string[], spec: T) {\n const args = arg(spec, {\n permissive: true,\n argv,\n });\n\n return args;\n}\n\nexport function parseConfig(argv: string[]) {\n const config: Partial<PowerhouseConfig> = {};\n const args = parseArgs(argv, configSpec);\n\n if (\"--document-models\" in args) {\n config.documentModelsDir = args[\"--document-models\"];\n }\n\n if (\"--editors\" in args) {\n config.editorsDir = args[\"--editors\"];\n }\n\n if (\"--skip-format\" in args) {\n config.skipFormat = true;\n }\n if (\"--interactive\" in args) {\n config.interactive = true;\n }\n if (\"--watch\" in args) {\n config.watch = true;\n }\n\n return config;\n}\n\nexport async function promptDirectories(\n config: PowerhouseConfig = DEFAULT_CONFIG,\n) {\n return prompt<Pick<PowerhouseConfig, \"documentModelsDir\" | \"editorsDir\">>([\n {\n type: \"input\",\n name: \"documentModelsDir\",\n message: \"Where to place the Document Models?\",\n initial: config.documentModelsDir,\n },\n {\n type: \"input\",\n name: \"editorsDir\",\n message: \"Where to place the Editors?\",\n initial: config.editorsDir,\n },\n ]);\n}\n"]}
package/dist/utils.d.ts CHANGED
@@ -4,6 +4,7 @@ import { generateMock as generateMock$1 } from '@anatine/zod-mock';
4
4
  type PowerhouseConfig = {
5
5
  documentModelsDir: string;
6
6
  editorsDir: string;
7
+ processorsDir: string;
7
8
  interactive?: boolean;
8
9
  skipFormat?: boolean;
9
10
  watch?: boolean;
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@powerhousedao/codegen",
3
- "version": "0.16.0",
3
+ "version": "0.17.0",
4
4
  "license": "AGPL-3.0-only",
5
5
  "private": false,
6
6
  "main": "dist/index.js",
@@ -29,24 +29,24 @@
29
29
  "@types/node": "^20.11.2",
30
30
  "arg": "^5.0.2",
31
31
  "change-case": "^4.1.2",
32
- "document-model": "^2.7.1",
33
32
  "enquirer": "^2.4.1",
34
33
  "execa": "^8.0.1",
35
- "hygen": "^6.2.11"
34
+ "hygen": "^6.2.11",
35
+ "document-model": "2.7.1"
36
36
  },
37
37
  "devDependencies": {
38
- "@powerhousedao/scalars": "1.9.0",
39
38
  "copyfiles": "^2.4.1",
40
39
  "esbuild-fix-imports-plugin": "^1.0.6",
41
40
  "graphql": "^16.8.1",
42
41
  "husky": "^8.0.3",
43
42
  "rimraf": "^5.0.5",
44
43
  "tsup": "^8.3.0",
45
- "tsx": "^4.19.1"
44
+ "tsx": "^4.19.1",
45
+ "@powerhousedao/scalars": "1.9.0"
46
46
  },
47
47
  "peerDependencies": {
48
- "@powerhousedao/scalars": "latest",
49
- "graphql": "^16.8.1"
48
+ "graphql": "^16.8.1",
49
+ "@powerhousedao/scalars": "1.9.0"
50
50
  },
51
51
  "scripts": {
52
52
  "check-types": "tsc --noEmit",