@forwardimpact/libcodegen 0.1.27

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/LICENSE ADDED
@@ -0,0 +1,201 @@
1
+ Apache License
2
+ Version 2.0, January 2004
3
+ http://www.apache.org/licenses/
4
+
5
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
6
+
7
+ 1. Definitions.
8
+
9
+ "License" shall mean the terms and conditions for use, reproduction,
10
+ and distribution as defined by Sections 1 through 9 of this document.
11
+
12
+ "Licensor" shall mean the copyright owner or entity authorized by
13
+ the copyright owner that is granting the License.
14
+
15
+ "Legal Entity" shall mean the union of the acting entity and all
16
+ other entities that control, are controlled by, or are under common
17
+ control with that entity. For the purposes of this definition,
18
+ "control" means (i) the power, direct or indirect, to cause the
19
+ direction or management of such entity, whether by contract or
20
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
21
+ outstanding shares, or (iii) beneficial ownership of such entity.
22
+
23
+ "You" (or "Your") shall mean an individual or Legal Entity
24
+ exercising permissions granted by this License.
25
+
26
+ "Source" form shall mean the preferred form for making modifications,
27
+ including but not limited to software source code, documentation
28
+ source, and configuration files.
29
+
30
+ "Object" form shall mean any form resulting from mechanical
31
+ transformation or translation of a Source form, including but
32
+ not limited to compiled object code, generated documentation,
33
+ and conversions to other media types.
34
+
35
+ "Work" shall mean the work of authorship, whether in Source or
36
+ Object form, made available under the License, as indicated by a
37
+ copyright notice that is included in or attached to the work
38
+ (an example is provided in the Appendix below).
39
+
40
+ "Derivative Works" shall mean any work, whether in Source or Object
41
+ form, that is based on (or derived from) the Work and for which the
42
+ editorial revisions, annotations, elaborations, or other modifications
43
+ represent, as a whole, an original work of authorship. For the purposes
44
+ of this License, Derivative Works shall not include works that remain
45
+ separable from, or merely link (or bind by name) to the interfaces of,
46
+ the Work and Derivative Works thereof.
47
+
48
+ "Contribution" shall mean any work of authorship, including
49
+ the original version of the Work and any modifications or additions
50
+ to that Work or Derivative Works thereof, that is intentionally
51
+ submitted to the Licensor for inclusion in the Work by the copyright owner
52
+ or by an individual or Legal Entity authorized to submit on behalf of
53
+ the copyright owner. For the purposes of this definition, "submitted"
54
+ means any form of electronic, verbal, or written communication sent
55
+ to the Licensor or its representatives, including but not limited to
56
+ communication on electronic mailing lists, source code control systems,
57
+ and issue tracking systems that are managed by, or on behalf of, the
58
+ Licensor for the purpose of discussing and improving the Work, but
59
+ excluding communication that is conspicuously marked or otherwise
60
+ designated in writing by the copyright owner as "Not a Contribution."
61
+
62
+ "Contributor" shall mean Licensor and any individual or Legal Entity
63
+ on behalf of whom a Contribution has been received by Licensor and
64
+ subsequently incorporated within the Work.
65
+
66
+ 2. Grant of Copyright License. Subject to the terms and conditions of
67
+ this License, each Contributor hereby grants to You a perpetual,
68
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
69
+ copyright license to reproduce, prepare Derivative Works of,
70
+ publicly display, publicly perform, sublicense, and distribute the
71
+ Work and such Derivative Works in Source or Object form.
72
+
73
+ 3. Grant of Patent License. Subject to the terms and conditions of
74
+ this License, each Contributor hereby grants to You a perpetual,
75
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
76
+ (except as stated in this section) patent license to make, have made,
77
+ use, offer to sell, sell, import, and otherwise transfer the Work,
78
+ where such license applies only to those patent claims licensable
79
+ by such Contributor that are necessarily infringed by their
80
+ Contribution(s) alone or by combination of their Contribution(s)
81
+ with the Work to which such Contribution(s) was submitted. If You
82
+ institute patent litigation against any entity (including a
83
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
84
+ or a Contribution incorporated within the Work constitutes direct
85
+ or contributory patent infringement, then any patent licenses
86
+ granted to You under this License for that Work shall terminate
87
+ as of the date such litigation is filed.
88
+
89
+ 4. Redistribution. You may reproduce and distribute copies of the
90
+ Work or Derivative Works thereof in any medium, with or without
91
+ modifications, and in Source or Object form, provided that You
92
+ meet the following conditions:
93
+
94
+ (a) You must give any other recipients of the Work or
95
+ Derivative Works a copy of this License; and
96
+
97
+ (b) You must cause any modified files to carry prominent notices
98
+ stating that You changed the files; and
99
+
100
+ (c) You must retain, in the Source form of any Derivative Works
101
+ that You distribute, all copyright, patent, trademark, and
102
+ attribution notices from the Source form of the Work,
103
+ excluding those notices that do not pertain to any part of
104
+ the Derivative Works; and
105
+
106
+ (d) If the Work includes a "NOTICE" text file as part of its
107
+ distribution, then any Derivative Works that You distribute must
108
+ include a readable copy of the attribution notices contained
109
+ within such NOTICE file, excluding those notices that do not
110
+ pertain to any part of the Derivative Works, in at least one
111
+ of the following places: within a NOTICE text file distributed
112
+ as part of the Derivative Works; within the Source form or
113
+ documentation, if provided along with the Derivative Works; or,
114
+ within a display generated by the Derivative Works, if and
115
+ wherever such third-party notices normally appear. The contents
116
+ of the NOTICE file are for informational purposes only and
117
+ do not modify the License. You may add Your own attribution
118
+ notices within Derivative Works that You distribute, alongside
119
+ or as an addendum to the NOTICE text from the Work, provided
120
+ that such additional attribution notices cannot be construed
121
+ as modifying the License.
122
+
123
+ You may add Your own copyright statement to Your modifications and
124
+ may provide additional or different license terms and conditions
125
+ for use, reproduction, or distribution of Your modifications, or
126
+ for any such Derivative Works as a whole, provided Your use,
127
+ reproduction, and distribution of the Work otherwise complies with
128
+ the conditions stated in this License.
129
+
130
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
131
+ any Contribution intentionally submitted for inclusion in the Work
132
+ by You to the Licensor shall be under the terms and conditions of
133
+ this License, without any additional terms or conditions.
134
+ Notwithstanding the above, nothing herein shall supersede or modify
135
+ the terms of any separate license agreement you may have executed
136
+ with Licensor regarding such Contributions.
137
+
138
+ 6. Trademarks. This License does not grant permission to use the trade
139
+ names, trademarks, service marks, or product names of the Licensor,
140
+ except as required for reasonable and customary use in describing the
141
+ origin of the Work and reproducing the content of the NOTICE file.
142
+
143
+ 7. Disclaimer of Warranty. Unless required by applicable law or
144
+ agreed to in writing, Licensor provides the Work (and each
145
+ Contributor provides its Contributions) on an "AS IS" BASIS,
146
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
147
+ implied, including, without limitation, any warranties or conditions
148
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
149
+ PARTICULAR PURPOSE. You are solely responsible for determining the
150
+ appropriateness of using or redistributing the Work and assume any
151
+ risks associated with Your exercise of permissions under this License.
152
+
153
+ 8. Limitation of Liability. In no event and under no legal theory,
154
+ whether in tort (including negligence), contract, or otherwise,
155
+ unless required by applicable law (such as deliberate and grossly
156
+ negligent acts) or agreed to in writing, shall any Contributor be
157
+ liable to You for damages, including any direct, indirect, special,
158
+ incidental, or consequential damages of any character arising as a
159
+ result of this License or out of the use or inability to use the
160
+ Work (including but not limited to damages for loss of goodwill,
161
+ work stoppage, computer failure or malfunction, or any and all
162
+ other commercial damages or losses), even if such Contributor
163
+ has been advised of the possibility of such damages.
164
+
165
+ 9. Accepting Warranty or Additional Liability. While redistributing
166
+ the Work or Derivative Works thereof, You may choose to offer,
167
+ and charge a fee for, acceptance of support, warranty, indemnity,
168
+ or other liability obligations and/or rights consistent with this
169
+ License. However, in accepting such obligations, You may act only
170
+ on Your own behalf and on Your sole responsibility, not on behalf
171
+ of any other Contributor, and only if You agree to indemnify,
172
+ defend, and hold each Contributor harmless for any liability
173
+ incurred by, or claims asserted against, such Contributor by reason
174
+ of your accepting any such warranty or additional liability.
175
+
176
+ END OF TERMS AND CONDITIONS
177
+
178
+ APPENDIX: How to apply the Apache License to your work.
179
+
180
+ To apply the Apache License to your work, attach the following
181
+ boilerplate notice, with the fields enclosed by brackets "[]"
182
+ replaced with your own identifying information. (Don't include
183
+ the brackets!) The text should be enclosed in the appropriate
184
+ comment syntax for the file format. We also recommend that a
185
+ file or class name and description of purpose be included on the
186
+ same "printed page" as the copyright notice for easier
187
+ identification within third-party archives.
188
+
189
+ Copyright 2026 Dick Olsson
190
+
191
+ Licensed under the Apache License, Version 2.0 (the "License");
192
+ you may not use this file except in compliance with the License.
193
+ You may obtain a copy of the License at
194
+
195
+ http://www.apache.org/licenses/LICENSE-2.0
196
+
197
+ Unless required by applicable law or agreed to in writing, software
198
+ distributed under the License is distributed on an "AS IS" BASIS,
199
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
200
+ See the License for the specific language governing permissions and
201
+ limitations under the License.
package/base.js ADDED
@@ -0,0 +1,310 @@
1
+ import { execFile } from "node:child_process";
2
+ import { fileURLToPath } from "node:url";
3
+
4
+ /**
5
+ * Base class for code generation utilities providing shared functionality
6
+ * Implements dependency injection pattern with explicit validation
7
+ */
8
+ export class CodegenBase {
9
+ #projectRoot;
10
+ #path;
11
+ #mustache;
12
+ #protoLoader;
13
+ #fs;
14
+
15
+ /**
16
+ * Creates a new codegen base instance with dependency injection
17
+ * @param {string} projectRoot - Project root directory path
18
+ * @param {object} path - Path module for file operations
19
+ * @param {object} mustache - Mustache template rendering module
20
+ * @param {object} protoLoader - Protocol buffer loader module
21
+ * @param {object} fs - File system module (sync operations only)
22
+ */
23
+ constructor(projectRoot, path, mustache, protoLoader, fs) {
24
+ if (!projectRoot) throw new Error("projectRoot is required");
25
+ if (!path) throw new Error("path module is required");
26
+ if (!mustache) throw new Error("mustache module is required");
27
+ if (!protoLoader) throw new Error("protoLoader module is required");
28
+ if (!fs) throw new Error("fs module is required");
29
+
30
+ this.#projectRoot = projectRoot;
31
+ this.#path = path;
32
+ this.#mustache = mustache;
33
+ this.#protoLoader = protoLoader;
34
+ this.#fs = fs;
35
+ }
36
+
37
+ /**
38
+ * Collect all proto files from project proto directory and tools directory
39
+ * @param {object} opts - Collection options
40
+ * @param {boolean} opts.includeTools - Whether to include tool proto files
41
+ * @returns {string[]} Array of absolute proto file paths
42
+ */
43
+ collectProtoFiles(opts = {}) {
44
+ const { includeTools = true } = opts;
45
+ const protoDir = this.#path.join(this.#projectRoot, "proto");
46
+
47
+ const discovered = this.#fs
48
+ .readdirSync(protoDir)
49
+ .filter((f) => f.endsWith(".proto"))
50
+ .sort();
51
+
52
+ const ordered = discovered.includes("common.proto")
53
+ ? [
54
+ this.#path.join(protoDir, "common.proto"),
55
+ ...discovered
56
+ .filter((f) => f !== "common.proto")
57
+ .map((f) => this.#path.join(protoDir, f)),
58
+ ]
59
+ : discovered.map((f) => this.#path.join(protoDir, f));
60
+
61
+ if (includeTools) {
62
+ try {
63
+ ordered.push(
64
+ ...this.#fs
65
+ .readdirSync(this.#path.join(this.#projectRoot, "tools"))
66
+ .filter((f) => f.endsWith(".proto"))
67
+ .map((f) => this.#path.join(this.#projectRoot, "tools", f)),
68
+ );
69
+ } catch {
70
+ // tools directory may not exist; ignore
71
+ }
72
+ }
73
+
74
+ return ordered;
75
+ }
76
+
77
+ /**
78
+ * Load mustache template for given kind
79
+ * @param {"service"|"client"|"exports"|"definition"|"definitions-exports"|"services-exports"} kind - Template kind
80
+ * @returns {string} Template content
81
+ */
82
+ loadTemplate(kind) {
83
+ const __filename = fileURLToPath(import.meta.url);
84
+ const __dirname = this.#path.dirname(__filename);
85
+ const templatePath = this.#path.join(
86
+ __dirname,
87
+ "templates",
88
+ `${kind}.js.mustache`,
89
+ );
90
+
91
+ if (!this.#fs.existsSync(templatePath)) {
92
+ throw new Error(`Missing ${kind}.js.mustache template`);
93
+ }
94
+ return this.#fs.readFileSync(templatePath, "utf8");
95
+ }
96
+
97
+ /**
98
+ * Render mustache template with given data
99
+ * @param {string} template - Template content
100
+ * @param {object} data - Template data
101
+ * @returns {string} Rendered content
102
+ */
103
+ renderTemplate(template, data) {
104
+ return this.#mustache.render(template, data);
105
+ }
106
+
107
+ /**
108
+ * Run a command with arguments and options
109
+ * @param {string} cmd - Command to execute
110
+ * @param {string[]} args - Command-line arguments
111
+ * @param {object} [opts] - Child process options
112
+ * @returns {Promise<void>} Resolves when the command completes successfully
113
+ */
114
+ run(cmd, args, opts = {}) {
115
+ return new Promise((resolvePromise, reject) => {
116
+ const child = execFile(
117
+ cmd,
118
+ args,
119
+ { stdio: "inherit", ...opts },
120
+ (err) => {
121
+ if (err) reject(err);
122
+ else resolvePromise();
123
+ },
124
+ );
125
+ child.on("error", reject);
126
+ });
127
+ }
128
+
129
+ /**
130
+ * Convert string to PascalCase
131
+ * @param {string} str - String to convert
132
+ * @returns {string} PascalCase string
133
+ */
134
+ pascalCase(str) {
135
+ return str
136
+ .split(/[-_\s]+/)
137
+ .map((word) => word.charAt(0).toUpperCase() + word.slice(1).toLowerCase())
138
+ .join("");
139
+ }
140
+
141
+ /**
142
+ * Parse a .proto file to extract a single service definition and method shapes
143
+ * @param {string} protoPath - Absolute path to .proto file
144
+ * @returns {{packageName:string, serviceName:string, methods:Array, namespaceName:string}|null} Parsed service info
145
+ */
146
+ parseProtoFile(protoPath) {
147
+ const def = this.#protoLoader.loadSync(protoPath, {
148
+ includeDirs: [this.#path.dirname(protoPath)],
149
+ keepCase: true,
150
+ });
151
+
152
+ const serviceKey = Object.keys(def).find((key) => {
153
+ const val = def[key];
154
+ if (!val || typeof val !== "object") return false;
155
+ const methods = Object.values(val);
156
+ return (
157
+ methods.length > 0 &&
158
+ methods.every(
159
+ (m) =>
160
+ m &&
161
+ typeof m === "object" &&
162
+ "requestType" in m &&
163
+ "responseType" in m,
164
+ )
165
+ );
166
+ });
167
+
168
+ if (!serviceKey) return null; // Indicate no service for this proto (pure message proto)
169
+
170
+ const serviceDef = def[serviceKey];
171
+ const parts = serviceKey.split(".");
172
+ const serviceName = parts[parts.length - 1];
173
+ const packageName = parts.slice(0, -1).join(".");
174
+
175
+ const methods = Object.entries(serviceDef).map(
176
+ ([name, method], index, array) => {
177
+ const req = method.requestType.type;
178
+ const res = method.responseType.type;
179
+
180
+ return {
181
+ name,
182
+ requestType: req.name,
183
+ responseType: res.name,
184
+ requestTypeNamespace: this.findTypeNamespace(req, def, packageName),
185
+ responseTypeNamespace: this.findTypeNamespace(res, def, packageName),
186
+ paramName: "req",
187
+ isLast: index === array.length - 1,
188
+ responseStream: !!method.responseStream,
189
+ requestStream: !!method.requestStream,
190
+ };
191
+ },
192
+ );
193
+
194
+ // Collect all unique namespaces needed for imports
195
+ const namespaces = new Set([
196
+ packageName,
197
+ ...methods.flatMap((m) => [
198
+ m.requestTypeNamespace,
199
+ m.responseTypeNamespace,
200
+ ]),
201
+ ]);
202
+
203
+ // Filter out well-known google namespaces as they are not in libtype
204
+ const filteredNamespaces = Array.from(namespaces).filter(
205
+ (ns) => !ns.startsWith("google"),
206
+ );
207
+
208
+ return {
209
+ packageName,
210
+ serviceName,
211
+ methods,
212
+ namespaceName: packageName,
213
+ importNamespaces: filteredNamespaces.map((ns, index, array) => ({
214
+ name: ns,
215
+ isLast: index === array.length - 1,
216
+ })),
217
+ };
218
+ }
219
+
220
+ /**
221
+ * Find the namespace for a given type by comparing structure
222
+ * @param {object} typeToFind - The type definition to find namespace for
223
+ * @param {object} allTypes - All available type definitions
224
+ * @param {string} fallbackPackage - Fallback package name
225
+ * @returns {string} The namespace string for the type
226
+ */
227
+ findTypeNamespace(typeToFind, allTypes, fallbackPackage) {
228
+ // Find matching type definition by structure comparison
229
+ for (const [key, typeDef] of Object.entries(allTypes)) {
230
+ if (typeDef.type && typeDef.type.name === typeToFind.name) {
231
+ const typeFields = typeDef.type.field || [];
232
+ const targetFields = typeToFind.field || [];
233
+
234
+ // Compare fields to see if structures match
235
+ const fieldsMatch =
236
+ typeFields.length === targetFields.length &&
237
+ typeFields.every(
238
+ (field, i) =>
239
+ field.name === targetFields[i].name &&
240
+ field.type === targetFields[i].type &&
241
+ field.typeName === targetFields[i].typeName,
242
+ );
243
+
244
+ if (fieldsMatch) {
245
+ const parts = key.split(".");
246
+ return parts.length > 1
247
+ ? parts.slice(0, -1).join(".")
248
+ : fallbackPackage;
249
+ }
250
+ }
251
+ }
252
+ return fallbackPackage;
253
+ }
254
+
255
+ /**
256
+ * Render and write a service/client artifact for a given proto into a service dir
257
+ * @param {"service"|"client"|"definition"} kind - Artifact kind to generate
258
+ * @param {string} protoPath - Absolute path to .proto file
259
+ * @param {string} outputDir - Absolute directory path for output
260
+ * @param {string} [filename] - Optional custom filename (defaults to kind.js)
261
+ * @returns {Promise<void>}
262
+ */
263
+ async generateArtifact(kind, protoPath, outputDir, filename) {
264
+ const parsed = this.parseProtoFile(protoPath);
265
+ if (!parsed) return; // Skip non-service proto
266
+
267
+ const {
268
+ packageName,
269
+ serviceName,
270
+ methods,
271
+ namespaceName,
272
+ importNamespaces,
273
+ } = parsed;
274
+ const rendered = this.#mustache.render(this.loadTemplate(kind), {
275
+ packageName,
276
+ serviceName,
277
+ methods,
278
+ namespaceName,
279
+ importNamespaces,
280
+ className: `${serviceName}${kind === "service" ? "Base" : kind === "client" ? "Client" : "ServiceDefinition"}`,
281
+ });
282
+
283
+ const jsFile = this.#path.join(outputDir, filename || `${kind}.js`);
284
+ this.#fs.writeFileSync(jsFile, rendered);
285
+ }
286
+
287
+ /**
288
+ * Get path module instance
289
+ * @returns {object} Path module
290
+ */
291
+ get path() {
292
+ return this.#path;
293
+ }
294
+
295
+ /**
296
+ * Get fs module instance
297
+ * @returns {object} File system module
298
+ */
299
+ get fs() {
300
+ return this.#fs;
301
+ }
302
+
303
+ /**
304
+ * Get project root path
305
+ * @returns {string} Project root directory path
306
+ */
307
+ get projectRoot() {
308
+ return this.#projectRoot;
309
+ }
310
+ }
@@ -0,0 +1,240 @@
1
+ #!/usr/bin/env node
2
+
3
+ import fs from "node:fs";
4
+ import fsAsync from "node:fs/promises";
5
+ import path from "node:path";
6
+ import { fileURLToPath } from "node:url";
7
+ import { execSync } from "node:child_process";
8
+ import { parseArgs } from "node:util";
9
+
10
+ import protoLoader from "@grpc/proto-loader";
11
+ import mustache from "mustache";
12
+
13
+ import { Finder } from "@forwardimpact/libutil";
14
+ import { Logger } from "@forwardimpact/libtelemetry";
15
+ import {
16
+ CodegenBase,
17
+ CodegenTypes,
18
+ CodegenServices,
19
+ CodegenDefinitions,
20
+ } from "@forwardimpact/libcodegen";
21
+ import { createStorage } from "@forwardimpact/libstorage";
22
+
23
+ const __filename = fileURLToPath(import.meta.url);
24
+ const __dirname = path.dirname(__filename);
25
+
26
+ /**
27
+ * Create tar.gz bundle of all directories inside sourcePath
28
+ * @param {string} sourcePath - Path containing directories to bundle
29
+ */
30
+ async function createBundle(sourcePath) {
31
+ const bundlePath = path.join(sourcePath, "bundle.tar.gz");
32
+
33
+ // Get all directories in sourcePath
34
+ const entries = fs.readdirSync(sourcePath, { withFileTypes: true });
35
+ const directories = entries
36
+ .filter((entry) => entry.isDirectory())
37
+ .map((entry) => entry.name);
38
+
39
+ if (directories.length === 0) {
40
+ return; // No directories to bundle
41
+ }
42
+
43
+ // Create tar.gz archive using system tar command
44
+ try {
45
+ const directoriesArg = directories.join(" ");
46
+ execSync(`tar -czf "${bundlePath}" -C "${sourcePath}" ${directoriesArg}`, {
47
+ stdio: "pipe",
48
+ });
49
+ } catch (error) {
50
+ throw new Error(`Failed to create bundle: ${error.message}`);
51
+ }
52
+ }
53
+
54
+ /**
55
+ * Print CLI usage help
56
+ */
57
+ function printUsage() {
58
+ process.stdout.write(
59
+ [
60
+ "Usage:",
61
+ ` npx fit-codegen --all # Generate all code`,
62
+ ` npx fit-codegen --type # Generate protobuf types only`,
63
+ ` npx fit-codegen --service # Generate service bases only`,
64
+ ` npx fit-codegen --client # Generate clients only`,
65
+ ` npx fit-codegen --definition # Generate service definitions only`,
66
+ ].join("\n") + "\n",
67
+ );
68
+ }
69
+
70
+ /**
71
+ * Parse command line flags
72
+ * @returns {object} Parsed flags with convenience methods
73
+ */
74
+ function parseFlags() {
75
+ const { values } = parseArgs({
76
+ options: {
77
+ all: {
78
+ type: "boolean",
79
+ default: false,
80
+ },
81
+ type: {
82
+ type: "boolean",
83
+ default: false,
84
+ },
85
+ service: {
86
+ type: "boolean",
87
+ default: false,
88
+ },
89
+ client: {
90
+ type: "boolean",
91
+ default: false,
92
+ },
93
+ definition: {
94
+ type: "boolean",
95
+ default: false,
96
+ },
97
+ },
98
+ });
99
+
100
+ const doAll = values.all;
101
+ return {
102
+ doTypes: doAll || values.type,
103
+ doServices: doAll || values.service,
104
+ doClients: doAll || values.client,
105
+ doDefinitions: doAll || values.definition,
106
+ hasGenerationFlags() {
107
+ return (
108
+ this.doTypes || this.doServices || this.doClients || this.doDefinitions
109
+ );
110
+ },
111
+ };
112
+ }
113
+
114
+ /**
115
+ * Create codegen instances
116
+ * @param {string} projectRoot - Project root directory path
117
+ * @param {object} path - Path module
118
+ * @param {object} mustache - Mustache module
119
+ * @param {object} protoLoader - Proto loader module
120
+ * @param {object} fs - File system module
121
+ * @returns {object} Codegen instances
122
+ */
123
+ function createCodegen(projectRoot, path, mustache, protoLoader, fs) {
124
+ const base = new CodegenBase(projectRoot, path, mustache, protoLoader, fs);
125
+ return {
126
+ types: new CodegenTypes(base),
127
+ services: new CodegenServices(base),
128
+ definitions: new CodegenDefinitions(base),
129
+ };
130
+ }
131
+
132
+ /**
133
+ * Execute code generation tasks
134
+ * @param {object} codegens - Codegen instances
135
+ * @param {string} sourcePath - Generated source path
136
+ * @param {object} flags - Parsed flags
137
+ * @returns {Promise<void>}
138
+ */
139
+ async function executeGeneration(codegens, sourcePath, flags) {
140
+ const tasks = [];
141
+
142
+ if (flags.doTypes) {
143
+ tasks.push(codegens.types.run(sourcePath));
144
+ }
145
+ if (flags.doServices) {
146
+ tasks.push(codegens.services.runForKind("service", sourcePath));
147
+ }
148
+ if (flags.doClients) {
149
+ tasks.push(codegens.services.runForKind("client", sourcePath));
150
+ }
151
+ if (flags.doDefinitions) {
152
+ tasks.push(codegens.definitions.run(sourcePath));
153
+ }
154
+
155
+ await Promise.all(tasks);
156
+
157
+ // Generate exports if needed
158
+ const needsServicesExports = flags.doServices || flags.doClients;
159
+ const needsDefinitionsExports = flags.doDefinitions;
160
+
161
+ const exportTasks = [];
162
+ if (needsServicesExports) {
163
+ exportTasks.push(codegens.services.runExports(sourcePath));
164
+ }
165
+ if (needsDefinitionsExports) {
166
+ exportTasks.push(codegens.definitions.runExports(sourcePath));
167
+ }
168
+
169
+ await Promise.all(exportTasks);
170
+ }
171
+
172
+ /**
173
+ * Simplified main function
174
+ * @param {string} projectRoot - Project root directory path
175
+ * @param {object} finder - Finder instance for path management
176
+ */
177
+ async function runCodegen(projectRoot, finder) {
178
+ const parsedFlags = parseFlags();
179
+
180
+ if (!parsedFlags.hasGenerationFlags()) {
181
+ printUsage();
182
+ process.exitCode = 1;
183
+ return;
184
+ }
185
+
186
+ const generatedStorage = createStorage("generated", "local");
187
+ const sourcePath = generatedStorage.path();
188
+
189
+ await generatedStorage.ensureBucket();
190
+
191
+ const codegens = createCodegen(projectRoot, path, mustache, protoLoader, fs);
192
+ await executeGeneration(codegens, sourcePath, parsedFlags);
193
+
194
+ await finder.createPackageSymlinks(sourcePath);
195
+ await createBundle(sourcePath);
196
+ }
197
+
198
+ /**
199
+ * Find the monorepo root directory (the one with workspaces)
200
+ * @param {string} startPath - Starting directory path
201
+ * @returns {string} Project root directory path
202
+ */
203
+ function findMonorepoRoot(startPath) {
204
+ let current = startPath;
205
+ for (let depth = 0; depth < 10; depth++) {
206
+ const packageJsonPath = path.join(current, "package.json");
207
+ if (fs.existsSync(packageJsonPath)) {
208
+ const packageJson = JSON.parse(fs.readFileSync(packageJsonPath, "utf8"));
209
+ // Check if this package.json has workspaces (indicates monorepo root)
210
+ if (packageJson.workspaces) {
211
+ return current;
212
+ }
213
+ }
214
+ const parent = path.dirname(current);
215
+ if (parent === current) break;
216
+ current = parent;
217
+ }
218
+ throw new Error("Could not find monorepo root");
219
+ }
220
+
221
+ /**
222
+ * CLI entry point
223
+ */
224
+ async function main() {
225
+ try {
226
+ const logger = new Logger("codegen");
227
+ const finder = new Finder(fsAsync, logger, process);
228
+ const projectRoot = findMonorepoRoot(__dirname);
229
+
230
+ await runCodegen(projectRoot, finder);
231
+ } catch (err) {
232
+ process.stderr.write(`Error: ${err.message}\n`);
233
+ process.exit(1);
234
+ }
235
+ }
236
+
237
+ main().catch((err) => {
238
+ process.stderr.write(`Unexpected error: ${err.message}\n`);
239
+ process.exit(1);
240
+ });
package/definitions.js ADDED
@@ -0,0 +1,82 @@
1
+ /**
2
+ * Handles service definition generation from protobuf files
3
+ * Specializes in gRPC service definition creation for runtime registration
4
+ */
5
+ export class CodegenDefinitions {
6
+ #base;
7
+
8
+ /**
9
+ * Creates a new definitions generator with base functionality
10
+ * @param {object} base - CodegenBase instance providing shared utilities
11
+ */
12
+ constructor(base) {
13
+ if (!base) throw new Error("CodegenBase instance is required");
14
+ this.#base = base;
15
+ }
16
+
17
+ /**
18
+ * Generate service definitions for all proto files
19
+ * @param {string} generatedPath - Path to generated code directory
20
+ * @returns {Promise<void>}
21
+ */
22
+ async run(generatedPath) {
23
+ if (!generatedPath) throw new Error("generatedPath is required");
24
+ const protoFiles = this.#base
25
+ .collectProtoFiles({ includeTools: true })
26
+ .filter((file) => !file.endsWith(this.#base.path.sep + "common.proto"));
27
+
28
+ const definitionsDir = this.#base.path.join(generatedPath, "definitions");
29
+ this.#base.fs.mkdirSync(definitionsDir, { recursive: true });
30
+
31
+ for (const protoFile of protoFiles) {
32
+ const basename = this.#base.path.basename(protoFile, ".proto");
33
+ await this.#base.generateArtifact(
34
+ "definition",
35
+ protoFile,
36
+ definitionsDir,
37
+ `${basename}.js`,
38
+ );
39
+ }
40
+
41
+ // Generate the definitions exports file
42
+ await this.runExports(generatedPath);
43
+ }
44
+
45
+ /**
46
+ * Generate definitions exports file with all service definitions
47
+ * @param {string} generatedPath - Path to generated code directory
48
+ * @returns {Promise<void>}
49
+ */
50
+ async runExports(generatedPath) {
51
+ if (!generatedPath) throw new Error("generatedPath is required");
52
+ const definitionsDir = this.#base.path.join(generatedPath, "definitions");
53
+ const outputFile = this.#base.path.join(definitionsDir, "exports.js");
54
+
55
+ this.#base.fs.mkdirSync(this.#base.path.dirname(outputFile), {
56
+ recursive: true,
57
+ });
58
+
59
+ const definitions = [];
60
+
61
+ if (this.#base.fs.existsSync(definitionsDir)) {
62
+ for (const file of this.#base.fs.readdirSync(definitionsDir)) {
63
+ if (!file.endsWith(".js") || file === "exports.js") continue;
64
+
65
+ const serviceName = this.#base.path.basename(file, ".js");
66
+ const pascalServiceName = this.#base.pascalCase(serviceName);
67
+ definitions.push({
68
+ name: `${pascalServiceName}ServiceDefinition`,
69
+ serviceName: serviceName,
70
+ });
71
+ }
72
+ }
73
+
74
+ const template = this.#base.loadTemplate("definitions-exports");
75
+ const content = this.#base.renderTemplate(template, {
76
+ definitions,
77
+ hasDefinitions: definitions.length > 0,
78
+ });
79
+
80
+ this.#base.fs.writeFileSync(outputFile, content);
81
+ }
82
+ }
package/index.js ADDED
@@ -0,0 +1,5 @@
1
+ // Export the new modular classes
2
+ export { CodegenBase } from "./base.js";
3
+ export { CodegenTypes } from "./types.js";
4
+ export { CodegenServices } from "./services.js";
5
+ export { CodegenDefinitions } from "./definitions.js";
package/package.json ADDED
@@ -0,0 +1,27 @@
1
+ {
2
+ "name": "@forwardimpact/libcodegen",
3
+ "version": "0.1.27",
4
+ "description": "Protocol Buffer code generation utilities for Guide",
5
+ "license": "Apache-2.0",
6
+ "author": "D. Olsson <hi@senzilla.io>",
7
+ "type": "module",
8
+ "main": "index.js",
9
+ "bin": {
10
+ "fit-codegen": "./bin/fit-codegen.js"
11
+ },
12
+ "engines": {
13
+ "node": ">=22.0.0"
14
+ },
15
+ "scripts": {
16
+ "test": "node --test test/*.test.js"
17
+ },
18
+ "dependencies": {
19
+ "@grpc/proto-loader": "^0.8.0",
20
+ "mustache": "^4.2.0",
21
+ "protobufjs": "^7.5.4",
22
+ "protobufjs-cli": "^1.2.0"
23
+ },
24
+ "devDependencies": {
25
+ "@forwardimpact/libharness": "^0.1.5"
26
+ }
27
+ }
package/services.js ADDED
@@ -0,0 +1,94 @@
1
+ /**
2
+ * Handles service and client generation from protobuf files
3
+ * Specializes in gRPC service base classes and client generation
4
+ */
5
+ export class CodegenServices {
6
+ #base;
7
+
8
+ /**
9
+ * Creates a new services generator with base functionality
10
+ * @param {object} base - CodegenBase instance providing shared utilities
11
+ */
12
+ constructor(base) {
13
+ if (!base) throw new Error("CodegenBase instance is required");
14
+ this.#base = base;
15
+ }
16
+
17
+ /**
18
+ * Generate service or client artifacts for all proto files
19
+ * @param {"service"|"client"} kind - Type of artifacts to generate
20
+ * @param {string} generatedPath - Path to generated code directory
21
+ * @returns {Promise<void>}
22
+ */
23
+ async runForKind(kind, generatedPath) {
24
+ if (!generatedPath) throw new Error("generatedPath is required");
25
+ const protoFiles = this.#base
26
+ .collectProtoFiles({ includeTools: true })
27
+ .filter((file) => !file.endsWith(this.#base.path.sep + "common.proto"));
28
+
29
+ for (const protoFile of protoFiles) {
30
+ const basename = this.#base.path.basename(protoFile, ".proto");
31
+ const outDir = this.#base.path.join(generatedPath, "services", basename);
32
+ this.#base.fs.mkdirSync(outDir, { recursive: true });
33
+ await this.#base.generateArtifact(kind, protoFile, outDir);
34
+ }
35
+ }
36
+
37
+ /**
38
+ * Generate services exports file with all service bases and clients
39
+ * @param {string} generatedPath - Path to generated code directory
40
+ * @returns {Promise<void>}
41
+ */
42
+ async runExports(generatedPath) {
43
+ if (!generatedPath) throw new Error("generatedPath is required");
44
+ const serviceDir = this.#base.path.join(generatedPath, "services");
45
+ const outputFile = this.#base.path.join(serviceDir, "exports.js");
46
+
47
+ this.#base.fs.mkdirSync(this.#base.path.dirname(outputFile), {
48
+ recursive: true,
49
+ });
50
+
51
+ const services = [];
52
+ const clients = [];
53
+
54
+ if (this.#base.fs.existsSync(serviceDir)) {
55
+ for (const dir of this.#base.fs.readdirSync(serviceDir)) {
56
+ const servicePath = this.#base.path.join(serviceDir, dir);
57
+ if (!this.#base.fs.statSync(servicePath).isDirectory()) continue;
58
+
59
+ const serviceName = this.#base.pascalCase(dir);
60
+ if (
61
+ this.#base.fs.existsSync(
62
+ this.#base.path.join(servicePath, "service.js"),
63
+ )
64
+ ) {
65
+ services.push({
66
+ name: `${serviceName}Base`,
67
+ path: `./${dir}/service.js`,
68
+ });
69
+ }
70
+ if (
71
+ this.#base.fs.existsSync(
72
+ this.#base.path.join(servicePath, "client.js"),
73
+ )
74
+ ) {
75
+ clients.push({
76
+ name: `${serviceName}Client`,
77
+ path: `./${dir}/client.js`,
78
+ });
79
+ }
80
+ }
81
+ }
82
+
83
+ const template = this.#base.loadTemplate("services-exports");
84
+
85
+ const content = this.#base.renderTemplate(template, {
86
+ services,
87
+ clients,
88
+ hasServices: services.length > 0,
89
+ hasClients: clients.length > 0,
90
+ });
91
+
92
+ this.#base.fs.writeFileSync(outputFile, content);
93
+ }
94
+ }
@@ -0,0 +1,70 @@
1
+ /* eslint no-unused-vars: "off" */
2
+
3
+ import { Client } from "@forwardimpact/librpc/client.js";
4
+ import { createAuth, createGrpc } from "@forwardimpact/librpc/base.js";
5
+ import { createObserver } from "@forwardimpact/libtelemetry";
6
+ import { {{#importNamespaces}}{{name}}{{^isLast}}, {{/isLast}}{{/importNamespaces}} } from "@forwardimpact/libtype";
7
+
8
+ /**
9
+ * Typed client for the {{serviceName}} gRPC service.
10
+ * Extends the `Client` class for shared gRPC client functionality.
11
+ */
12
+ export class {{className}} extends Client {
13
+ /**
14
+ * Creates a new {{serviceName}} client instance
15
+ * @param {object} config - Service configuration
16
+ * @param {import("@forwardimpact/libtelemetry").Logger} [logger] - Optional logger instance
17
+ * @param {import("@forwardimpact/libtelemetry").Tracer} [tracer] - Optional tracer for distributed tracing
18
+ * @param {Function} [authFn] - Optional authentication function
19
+ */
20
+ constructor(config, logger = null, tracer = null, authFn = createAuth) {
21
+ super(config, logger, tracer, createObserver, createGrpc, authFn);
22
+ }
23
+
24
+ {{#methods}}
25
+ {{#responseStream}}
26
+ /**
27
+ * Call the `{{name}}` RPC with request/response type conversion.
28
+ * @param { {{requestTypeNamespace}}.{{requestType}} } {{paramName}} - Typed request message.
29
+ * @returns { import("@grpc/grpc-js").ClientReadableStream<{{responseTypeNamespace}}.{{responseType}}> } Response stream emitting typed messages.
30
+ */
31
+ {{name}}({{paramName}}) {
32
+ // Type validation
33
+ if (!({{paramName}} instanceof {{requestTypeNamespace}}.{{requestType}})) {
34
+ throw new TypeError(
35
+ `{{name}}: Expected parameter to be instanceof {{requestTypeNamespace}}.{{requestType}}`,
36
+ );
37
+ }
38
+
39
+ // Convert to plain object
40
+ const request = {{requestTypeNamespace}}.{{requestType}}.toObject({{paramName}});
41
+
42
+ // Make gRPC call
43
+ return this.callStream("{{name}}", request, (res) => {{responseTypeNamespace}}.{{responseType}}.fromObject(res));
44
+ }
45
+ {{/responseStream}}
46
+ {{^responseStream}}
47
+ /**
48
+ * Call the `{{name}}` RPC with request/response type conversion.
49
+ * @param { {{requestTypeNamespace}}.{{requestType}} } {{paramName}} - Typed request message.
50
+ * @returns { Promise<{{responseTypeNamespace}}.{{responseType}}> } Typed response message.
51
+ */
52
+ async {{name}}({{paramName}}) {
53
+ // Type validation
54
+ if (!({{paramName}} instanceof {{requestTypeNamespace}}.{{requestType}})) {
55
+ throw new TypeError(
56
+ `{{name}}: Expected parameter to be instanceof {{requestTypeNamespace}}.{{requestType}}`,
57
+ );
58
+ }
59
+
60
+ // Convert to plain object
61
+ const request = {{requestTypeNamespace}}.{{requestType}}.toObject({{paramName}});
62
+
63
+ // Make gRPC call (tracing handled by base Client class)
64
+ return this.callUnary("{{name}}", request, (res) => {{responseTypeNamespace}}.{{responseType}}.fromObject(res));
65
+ }
66
+ {{/responseStream}}
67
+ {{^isLast}}
68
+
69
+ {{/isLast}}{{/methods}}
70
+ }
@@ -0,0 +1,31 @@
1
+ import { {{#importNamespaces}}{{name}}{{^isLast}}, {{/isLast}}{{/importNamespaces}} } from "@forwardimpact/libtype";
2
+
3
+ /**
4
+ * Pre-compiled gRPC service definition for {{serviceName}}
5
+ * Generated at build time
6
+ */
7
+ export const {{serviceName}}ServiceDefinition = {
8
+ {{#methods}}
9
+ {{name}}: {
10
+ path: '/{{packageName}}.{{serviceName}}/{{name}}',
11
+ requestStream: {{requestStream}},
12
+ responseStream: {{responseStream}},
13
+ requestSerialize: (value) => {
14
+ return Buffer.from({{requestTypeNamespace}}.{{requestType}}.encode(value).finish());
15
+ },
16
+ requestDeserialize: (value) => {
17
+ return {{requestTypeNamespace}}.{{requestType}}.toObject(
18
+ {{requestTypeNamespace}}.{{requestType}}.decode(value)
19
+ );
20
+ },
21
+ responseSerialize: (value) => {
22
+ return Buffer.from({{responseTypeNamespace}}.{{responseType}}.encode(value).finish());
23
+ },
24
+ responseDeserialize: (value) => {
25
+ return {{responseTypeNamespace}}.{{responseType}}.toObject(
26
+ {{responseTypeNamespace}}.{{responseType}}.decode(value)
27
+ );
28
+ },
29
+ },
30
+ {{/methods}}
31
+ };
@@ -0,0 +1,31 @@
1
+ // THIS FILE IS AUTO-GENERATED - DO NOT EDIT
2
+ // Generated by scripts/codegen.js
3
+
4
+ {{#hasDefinitions}}
5
+ // Service Definitions
6
+ {{#definitions}}
7
+ export { {{name}} } from "./{{{serviceName}}}.js";
8
+ {{/definitions}}
9
+ {{/hasDefinitions}}
10
+
11
+ {{#hasDefinitions}}
12
+ // Import service definitions to make them available for aggregation
13
+ {{#definitions}}
14
+ import { {{name}} } from "./{{{serviceName}}}.js";
15
+ {{/definitions}}
16
+ {{/hasDefinitions}}
17
+
18
+ // Aggregate exports for convenience
19
+ {{#hasDefinitions}}
20
+ export const definitions = {
21
+ {{#definitions}}
22
+ {{serviceName}}: {{name}},
23
+ {{/definitions}}
24
+ };
25
+ {{/hasDefinitions}}
26
+ {{^hasDefinitions}}
27
+ export const definitions = {};
28
+ {{/hasDefinitions}}
29
+
30
+ // Re-export as default for dynamic importing
31
+ export default definitions;
@@ -0,0 +1,82 @@
1
+ /* eslint no-unused-vars: "off" */
2
+
3
+ import { {{#importNamespaces}}{{name}}{{^isLast}}, {{/isLast}}{{/importNamespaces}} } from "@forwardimpact/libtype";
4
+
5
+ /**
6
+ * Base class for {{serviceName}} service implementation
7
+ */
8
+ export class {{className}} {
9
+ config;
10
+
11
+ /**
12
+ * Creates a new {{serviceName}} service instance
13
+ * @param {object} config - Service configuration
14
+ */
15
+ constructor(config) {
16
+ if (!config) throw new Error("config is required");
17
+ this.config = config;
18
+ }
19
+
20
+ {{#methods}}
21
+ {{#responseStream}}
22
+ /**
23
+ * Must be implemented by subclass (server streaming)
24
+ * @param { {{requestTypeNamespace}}.{{requestType}} } {{paramName}} - Request parameters
25
+ * @param { (response: {{responseTypeNamespace}}.{{responseType}}) => void } write - Callback to write response messages
26
+ * @returns { Promise<void> } Resolves when streaming is complete
27
+ */
28
+ async {{name}}({{paramName}}, write) {
29
+ throw new Error("{{name}} not implemented");
30
+ }
31
+ {{/responseStream}}
32
+ {{^responseStream}}
33
+ /**
34
+ * Must be implemented by subclass
35
+ * @param { {{requestTypeNamespace}}.{{requestType}} } {{paramName}} - Request parameters
36
+ * @returns { Promise<{{responseTypeNamespace}}.{{responseType}}> } Response object
37
+ */
38
+ async {{name}}({{paramName}}) {
39
+ throw new Error("{{name}} not implemented");
40
+ }
41
+ {{/responseStream}}
42
+
43
+ {{/methods}}
44
+ /**
45
+ * Creates gRPC handlers for this service instance
46
+ * @returns { object } Map of method names to handler functions
47
+ */
48
+ getHandlers() {
49
+ return {
50
+ {{#methods}}
51
+ {{#responseStream}}
52
+ {{name}}: async (call) => {
53
+ // Validate and convert request
54
+ const error = {{requestTypeNamespace}}.{{requestType}}.verify(call.request);
55
+ if (error) throw new Error(`{{name}}: ${error}`);
56
+ const req = {{requestTypeNamespace}}.{{requestType}}.fromObject(call.request);
57
+
58
+ // Stream wrapper that writes typed responses
59
+ const write = (response) => {
60
+ call.write({{responseTypeNamespace}}.{{responseType}}.toObject(response));
61
+ };
62
+
63
+ // Call implementation and end stream
64
+ await this.{{name}}(req, write);
65
+ call.end();
66
+ },
67
+ {{/responseStream}}
68
+ {{^responseStream}}
69
+ {{name}}: async (call) => {
70
+ // Validate and convert request
71
+ const error = {{requestTypeNamespace}}.{{requestType}}.verify(call.request);
72
+ if (error) throw new Error(`{{name}}: ${error}`);
73
+ const req = {{requestTypeNamespace}}.{{requestType}}.fromObject(call.request);
74
+
75
+ // Call implementation
76
+ return await this.{{name}}(req);
77
+ },
78
+ {{/responseStream}}
79
+ {{/methods}}
80
+ };
81
+ }
82
+ }
@@ -0,0 +1,65 @@
1
+ // THIS FILE IS AUTO-GENERATED - DO NOT EDIT
2
+ // Generated by scripts/codegen.js
3
+
4
+ {{#hasServices}}
5
+ // Service Base Classes
6
+ {{#services}}
7
+ export { {{name}} } from "{{{path}}}";
8
+ {{/services}}
9
+ {{/hasServices}}
10
+
11
+ {{#hasClients}}
12
+ // Client Classes
13
+ {{#clients}}
14
+ export { {{name}} } from "{{{path}}}";
15
+ {{/clients}}
16
+ {{/hasClients}}
17
+
18
+ {{#hasServices}}
19
+ // Import service base classes to make them available for aggregation
20
+ {{#services}}
21
+ import { {{name}} } from "{{{path}}}";
22
+ {{/services}}
23
+ {{/hasServices}}
24
+
25
+ {{#hasClients}}
26
+ // Import client classes to make them available for aggregation
27
+ {{#clients}}
28
+ import { {{name}} } from "{{{path}}}";
29
+ {{/clients}}
30
+ {{/hasClients}}
31
+
32
+ // Aggregate exports for convenience
33
+ {{#hasServices}}
34
+ export const services = {
35
+ {{#services}}
36
+ {{name}},
37
+ {{/services}}
38
+ };
39
+ {{/hasServices}}
40
+ {{^hasServices}}
41
+ export const services = {};
42
+ {{/hasServices}}
43
+
44
+ {{#hasClients}}
45
+ export const clients = {
46
+ {{#clients}}
47
+ {{name}},
48
+ {{/clients}}
49
+ };
50
+ {{/hasClients}}
51
+ {{^hasClients}}
52
+ export const clients = {};
53
+ {{/hasClients}}
54
+
55
+ // Re-export everything as default for dynamic importing
56
+ export default {
57
+ services,
58
+ clients,
59
+ {{#services}}
60
+ {{name}},
61
+ {{/services}}
62
+ {{#clients}}
63
+ {{name}},
64
+ {{/clients}}
65
+ };
package/types.js ADDED
@@ -0,0 +1,92 @@
1
+ /**
2
+ * Handles JavaScript type generation from protobuf files
3
+ * Specializes in Protocol Buffer to JavaScript type conversion
4
+ */
5
+ export class CodegenTypes {
6
+ #base;
7
+
8
+ /**
9
+ * Creates a new types generator with base functionality
10
+ * @param {object} base - CodegenBase instance providing shared utilities
11
+ */
12
+ constructor(base) {
13
+ if (!base) throw new Error("CodegenBase instance is required");
14
+ this.#base = base;
15
+ }
16
+
17
+ /**
18
+ * Generate JavaScript types from protobuf files
19
+ * @param {string} generatedPath - Path to generated code directory
20
+ * @returns {Promise<void>}
21
+ */
22
+ async run(generatedPath) {
23
+ if (!generatedPath) throw new Error("generatedPath is required");
24
+ const typesDir = this.#base.path.resolve(generatedPath, "types");
25
+ const protoOutDir = this.#base.path.resolve(generatedPath, "proto");
26
+ const jsOutFile = this.#base.path.resolve(typesDir, "types.js");
27
+
28
+ // Create directories and clean up existing files
29
+ [typesDir, protoOutDir].forEach((dir) => {
30
+ this.#base.fs.mkdirSync(dir, { recursive: true });
31
+ });
32
+
33
+ if (this.#base.fs.existsSync(jsOutFile)) {
34
+ this.#base.fs.unlinkSync(jsOutFile);
35
+ }
36
+
37
+ const protoFiles = this.#base.collectProtoFiles({ includeTools: true });
38
+
39
+ // Copy all proto source files into generated/proto for runtime loading
40
+ protoFiles.forEach((protoFile) => {
41
+ this.#base.fs.copyFileSync(
42
+ protoFile,
43
+ this.#base.path.resolve(
44
+ protoOutDir,
45
+ this.#base.path.basename(protoFile),
46
+ ),
47
+ );
48
+ });
49
+
50
+ await this.generateJavaScriptTypes(protoFiles, jsOutFile);
51
+
52
+ // ESM resolution fix: ensure explicit extension for Node ESM and default import
53
+ const content = this.#base.fs.readFileSync(jsOutFile, "utf8");
54
+ const fixed = content
55
+ .replace(/from\s+"protobufjs\/minimal";/, 'from "protobufjs/minimal.js";')
56
+ .replace(
57
+ /import\s+\*\s+as\s+\$protobuf\s+from\s+"protobufjs\/minimal\.js";/,
58
+ 'import $protobuf from "protobufjs/minimal.js";',
59
+ );
60
+
61
+ if (fixed !== content) {
62
+ this.#base.fs.writeFileSync(jsOutFile, fixed, "utf8");
63
+ }
64
+ }
65
+
66
+ /**
67
+ * Generate JavaScript types using protobufjs compiler
68
+ * @param {string[]} protoFiles - Array of proto file paths to compile
69
+ * @param {string} outFile - Output JavaScript file path
70
+ * @returns {Promise<void>}
71
+ */
72
+ async generateJavaScriptTypes(protoFiles, outFile) {
73
+ const args = [
74
+ "-t",
75
+ "static-module",
76
+ "-w",
77
+ "es6",
78
+ "--no-delimited",
79
+ "--no-create",
80
+ "--no-service",
81
+ "--force-message",
82
+ "--keep-case",
83
+ "-o",
84
+ outFile,
85
+ ...protoFiles,
86
+ ];
87
+
88
+ await this.#base.run("npx", ["pbjs", ...args], {
89
+ cwd: this.#base.projectRoot,
90
+ });
91
+ }
92
+ }