deukpack 1.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (122) hide show
  1. package/LICENSE +201 -0
  2. package/NOTICE +6 -0
  3. package/README.ko.md +138 -0
  4. package/README.md +182 -0
  5. package/RELEASING.md +71 -0
  6. package/bin/deukpack.js +9 -0
  7. package/dist/ast/DeukPackASTBuilder.d.ts +153 -0
  8. package/dist/ast/DeukPackASTBuilder.d.ts.map +1 -0
  9. package/dist/ast/DeukPackASTBuilder.js +931 -0
  10. package/dist/ast/DeukPackASTBuilder.js.map +1 -0
  11. package/dist/codegen/CSharpGenerator.d.ts +136 -0
  12. package/dist/codegen/CSharpGenerator.d.ts.map +1 -0
  13. package/dist/codegen/CSharpGenerator.js +2303 -0
  14. package/dist/codegen/CSharpGenerator.js.map +1 -0
  15. package/dist/codegen/CodeGenerator.d.ts +11 -0
  16. package/dist/codegen/CodeGenerator.d.ts.map +1 -0
  17. package/dist/codegen/CodeGenerator.js +11 -0
  18. package/dist/codegen/CodeGenerator.js.map +1 -0
  19. package/dist/codegen/CppGenerator.d.ts +23 -0
  20. package/dist/codegen/CppGenerator.d.ts.map +1 -0
  21. package/dist/codegen/CppGenerator.js +215 -0
  22. package/dist/codegen/CppGenerator.js.map +1 -0
  23. package/dist/codegen/HighPerformanceCSharpGenerator.d.ts +29 -0
  24. package/dist/codegen/HighPerformanceCSharpGenerator.d.ts.map +1 -0
  25. package/dist/codegen/HighPerformanceCSharpGenerator.js +486 -0
  26. package/dist/codegen/HighPerformanceCSharpGenerator.js.map +1 -0
  27. package/dist/core/DeukPackEngine.d.ts +69 -0
  28. package/dist/core/DeukPackEngine.d.ts.map +1 -0
  29. package/dist/core/DeukPackEngine.js +379 -0
  30. package/dist/core/DeukPackEngine.js.map +1 -0
  31. package/dist/core/DeukPackGenerator.d.ts +9 -0
  32. package/dist/core/DeukPackGenerator.d.ts.map +1 -0
  33. package/dist/core/DeukPackGenerator.js +15 -0
  34. package/dist/core/DeukPackGenerator.js.map +1 -0
  35. package/dist/core/DeukParser.d.ts +12 -0
  36. package/dist/core/DeukParser.d.ts.map +1 -0
  37. package/dist/core/DeukParser.js +27 -0
  38. package/dist/core/DeukParser.js.map +1 -0
  39. package/dist/core/IdlParser.d.ts +27 -0
  40. package/dist/core/IdlParser.d.ts.map +1 -0
  41. package/dist/core/IdlParser.js +157 -0
  42. package/dist/core/IdlParser.js.map +1 -0
  43. package/dist/core/ProtoParser.d.ts +12 -0
  44. package/dist/core/ProtoParser.d.ts.map +1 -0
  45. package/dist/core/ProtoParser.js +27 -0
  46. package/dist/core/ProtoParser.js.map +1 -0
  47. package/dist/csharp/DpExcelProtocol.cs +3005 -0
  48. package/dist/csharp/DpProtocolLibrary.cs +13 -0
  49. package/dist/index.d.ts +22 -0
  50. package/dist/index.d.ts.map +1 -0
  51. package/dist/index.js +43 -0
  52. package/dist/index.js.map +1 -0
  53. package/dist/lexer/DeukLexer.d.ts +31 -0
  54. package/dist/lexer/DeukLexer.d.ts.map +1 -0
  55. package/dist/lexer/DeukLexer.js +292 -0
  56. package/dist/lexer/DeukLexer.js.map +1 -0
  57. package/dist/lexer/IdlLexer.d.ts +33 -0
  58. package/dist/lexer/IdlLexer.d.ts.map +1 -0
  59. package/dist/lexer/IdlLexer.js +286 -0
  60. package/dist/lexer/IdlLexer.js.map +1 -0
  61. package/dist/native/NativeDeukPackEngine.d.ts +30 -0
  62. package/dist/native/NativeDeukPackEngine.d.ts.map +1 -0
  63. package/dist/native/NativeDeukPackEngine.js +99 -0
  64. package/dist/native/NativeDeukPackEngine.js.map +1 -0
  65. package/dist/proto/ProtoASTBuilder.d.ts +29 -0
  66. package/dist/proto/ProtoASTBuilder.d.ts.map +1 -0
  67. package/dist/proto/ProtoASTBuilder.js +239 -0
  68. package/dist/proto/ProtoASTBuilder.js.map +1 -0
  69. package/dist/proto/ProtoLexer.d.ts +29 -0
  70. package/dist/proto/ProtoLexer.d.ts.map +1 -0
  71. package/dist/proto/ProtoLexer.js +264 -0
  72. package/dist/proto/ProtoLexer.js.map +1 -0
  73. package/dist/proto/ProtoTypes.d.ts +40 -0
  74. package/dist/proto/ProtoTypes.d.ts.map +1 -0
  75. package/dist/proto/ProtoTypes.js +37 -0
  76. package/dist/proto/ProtoTypes.js.map +1 -0
  77. package/dist/protocols/BinaryProtocol.d.ts +7 -0
  78. package/dist/protocols/BinaryProtocol.d.ts.map +1 -0
  79. package/dist/protocols/BinaryProtocol.js +11 -0
  80. package/dist/protocols/BinaryProtocol.js.map +1 -0
  81. package/dist/protocols/BinaryWriter.d.ts +22 -0
  82. package/dist/protocols/BinaryWriter.d.ts.map +1 -0
  83. package/dist/protocols/BinaryWriter.js +104 -0
  84. package/dist/protocols/BinaryWriter.js.map +1 -0
  85. package/dist/protocols/CompactProtocol.d.ts +7 -0
  86. package/dist/protocols/CompactProtocol.d.ts.map +1 -0
  87. package/dist/protocols/CompactProtocol.js +11 -0
  88. package/dist/protocols/CompactProtocol.js.map +1 -0
  89. package/dist/protocols/ExcelProtocol.d.ts +98 -0
  90. package/dist/protocols/ExcelProtocol.d.ts.map +1 -0
  91. package/dist/protocols/ExcelProtocol.js +639 -0
  92. package/dist/protocols/ExcelProtocol.js.map +1 -0
  93. package/dist/protocols/JsonProtocol.d.ts +68 -0
  94. package/dist/protocols/JsonProtocol.d.ts.map +1 -0
  95. package/dist/protocols/JsonProtocol.js +422 -0
  96. package/dist/protocols/JsonProtocol.js.map +1 -0
  97. package/dist/protocols/WireProtocol.d.ts +348 -0
  98. package/dist/protocols/WireProtocol.d.ts.map +1 -0
  99. package/dist/protocols/WireProtocol.js +912 -0
  100. package/dist/protocols/WireProtocol.js.map +1 -0
  101. package/dist/serialization/WireDeserializer.d.ts +8 -0
  102. package/dist/serialization/WireDeserializer.d.ts.map +1 -0
  103. package/dist/serialization/WireDeserializer.js +13 -0
  104. package/dist/serialization/WireDeserializer.js.map +1 -0
  105. package/dist/serialization/WireSerializer.d.ts +20 -0
  106. package/dist/serialization/WireSerializer.d.ts.map +1 -0
  107. package/dist/serialization/WireSerializer.js +100 -0
  108. package/dist/serialization/WireSerializer.js.map +1 -0
  109. package/dist/types/DeukPackTypes.d.ts +291 -0
  110. package/dist/types/DeukPackTypes.d.ts.map +1 -0
  111. package/dist/types/DeukPackTypes.js +76 -0
  112. package/dist/types/DeukPackTypes.js.map +1 -0
  113. package/dist/utils/EndianUtils.d.ts +11 -0
  114. package/dist/utils/EndianUtils.d.ts.map +1 -0
  115. package/dist/utils/EndianUtils.js +32 -0
  116. package/dist/utils/EndianUtils.js.map +1 -0
  117. package/dist/utils/PerformanceMonitor.d.ts +26 -0
  118. package/dist/utils/PerformanceMonitor.d.ts.map +1 -0
  119. package/dist/utils/PerformanceMonitor.js +57 -0
  120. package/dist/utils/PerformanceMonitor.js.map +1 -0
  121. package/package.json +77 -0
  122. package/scripts/build_deukpack.js +669 -0
@@ -0,0 +1,669 @@
1
+ #!/usr/bin/env node
2
+
3
+ /**
4
+ * DeukPack Builder
5
+ * 100x faster than Apache Thrift with multi-language support
6
+ * Supports single-file mode and --pipeline <config.json> for multi-job + copy steps.
7
+ */
8
+
9
+ const { DeukPackEngine } = require('../dist/index');
10
+ const path = require('path');
11
+ const fs = require('fs').promises;
12
+
13
+ async function main() {
14
+ const args = process.argv.slice(2);
15
+
16
+ // Pipeline mode: --pipeline <config.json>
17
+ if (args[0] === '--pipeline' && args[1]) {
18
+ try {
19
+ await runPipeline(path.resolve(args[1]));
20
+ } catch (error) {
21
+ console.error('❌ Pipeline failed:', error.message);
22
+ process.exit(1);
23
+ }
24
+ return;
25
+ }
26
+
27
+ if (args.length < 2) {
28
+ console.error('Usage: node build_deukpack.js <thrift_file> <output_dir> [options]');
29
+ console.error(' node build_deukpack.js --pipeline <pipeline_config.json>');
30
+ console.error('Options:');
31
+ console.error(' -I, -i <path> Include path (individual)');
32
+ console.error(' -r <path> Include path + direct subdirs (recursive)');
33
+ console.error(' --define-root <name> IDL root folder (default: _deuk_define, legacy: _thrift)');
34
+ console.error(' --csharp Generate C# code');
35
+ console.error(' --cpp Generate C++ code');
36
+ console.error(' --js Generate JavaScript code (for meta editor: Thrift JS <-> Webix/Thrift JSON/Excel)');
37
+ console.error(' --protocol <protocol> Serialization protocol (binary|compact|json)');
38
+ console.error(' --endianness <endian> Endianness (little|big)');
39
+ console.error(' --convert-to-deuk [subdir] Emit .deuk from parsed Thrift (subdir default: deuk). Legacy→table migration.');
40
+ console.error(' --ef Enable Entity Framework support ( [Table]/[Key]/[Column] + DeukPackDbContext.g.cs ).');
41
+ process.exit(1);
42
+ }
43
+
44
+ const thriftFile = args[0];
45
+ const outputDir = args[1];
46
+ const { options, includePaths: extraIncludePaths, includePathsRecursive } = parseOptions(args.slice(2));
47
+
48
+ const baseDir = path.dirname(path.resolve(thriftFile));
49
+ const expandedRecursive = await expandRecursiveIncludePaths(includePathsRecursive.map(p => path.resolve(p)));
50
+ const includePaths = [baseDir, ...extraIncludePaths, ...expandedRecursive];
51
+ const parseOpts = {
52
+ includePaths,
53
+ defineRoot: options.defineRoot
54
+ };
55
+
56
+ console.log(`🚀 DeukPack Builder v1.0.0`);
57
+ console.log(`📁 Input: ${thriftFile}`);
58
+ console.log(`📁 Output: ${outputDir}`);
59
+ console.log(`⚙️ Options:`, options);
60
+
61
+ try {
62
+ await runOneBuild(thriftFile, outputDir, options, parseOpts);
63
+ console.log('\n🎉 Build completed successfully!');
64
+ } catch (error) {
65
+ console.error('❌ Build failed:', error.message);
66
+ process.exit(1);
67
+ }
68
+ }
69
+
70
+ /**
71
+ * Run a single thrift build: parse + generate. Used by both single-file and pipeline mode.
72
+ * @param {string} thriftFile - Path to root thrift file
73
+ * @param {string} outputDir - Directory for generated output (csharp/, cpp/, etc.)
74
+ * @param {object} options - { csharp, cpp, js, json, defineRoot }
75
+ * @param {object} parseOpts - { includePaths, defineRoot } for parseFileWithIncludes
76
+ */
77
+ async function runOneBuild(thriftFile, outputDir, options, parseOpts) {
78
+ await fs.mkdir(outputDir, { recursive: true });
79
+ const engine = new DeukPackEngine();
80
+ const defineVersionFile = path.join(path.dirname(path.resolve(thriftFile)), 'define_version.txt');
81
+
82
+ console.log('📖 Parsing Thrift files...');
83
+ const startTime = Date.now();
84
+ const ast = await engine.parseFileWithIncludes(thriftFile, parseOpts);
85
+ const parseTime = Date.now() - startTime;
86
+ console.log(`Parsed ${ast.filesProcessed || 1} files in ${parseTime}ms`);
87
+ console.log(`✅ Parsed in ${parseTime}ms`);
88
+
89
+ const generationPromises = [];
90
+ if (options.csharp) generationPromises.push(generateCSharp(engine, ast, outputDir, { ...options, defineVersionFile }));
91
+ if (options.cpp) generationPromises.push(generateCpp(engine, ast, outputDir));
92
+ if (options.js) generationPromises.push(generateJavaScript(engine, ast, outputDir));
93
+ await Promise.all(generationPromises);
94
+
95
+ if (options.convertToDeuk) {
96
+ const legacyMigrator = path.join(__dirname, 'internal', 'legacy-migration', 'convert_thrift_to_deuk.js');
97
+ let run;
98
+ try {
99
+ ({ run } = require(legacyMigrator));
100
+ } catch (e) {
101
+ console.error('❌ --convert-to-deuk: internal legacy migration scripts are not available.');
102
+ console.error(' (OSS / npm package excludes project-specific Thrift→.deuk rules.)');
103
+ console.error(' Use the full DeukPack tree with scripts/internal/legacy-migration/, or migrate by other means.');
104
+ process.exit(1);
105
+ }
106
+ await run(ast, thriftFile, outputDir, options.convertToDeukOutputDir, { emitPerFile: options.emitPerFile });
107
+ }
108
+
109
+ const metrics = engine.getPerformanceMetrics();
110
+ console.log('\n📊 Performance Metrics:');
111
+ console.log(` Parse Time: ${metrics.parseTime}ms`);
112
+ console.log(` Generate Time: ${metrics.generateTime}ms`);
113
+ console.log(` Memory Usage: ${(metrics.memoryUsage / 1024 / 1024).toFixed(2)} MB`);
114
+ console.log(` Files Processed: ${metrics.fileCount}`);
115
+ }
116
+
117
+ /**
118
+ * Expand directories to [dir, ...direct subdirs]. Each entry in dirPaths is an absolute path.
119
+ * Used for recursive include: one path becomes root + all first-level subdirs.
120
+ */
121
+ async function expandRecursiveIncludePaths(dirPaths) {
122
+ const result = [];
123
+ for (const dir of dirPaths) {
124
+ try {
125
+ const entries = await fs.readdir(dir, { withFileTypes: true });
126
+ result.push(dir);
127
+ for (const e of entries) {
128
+ if (e.isDirectory()) result.push(path.join(dir, e.name));
129
+ }
130
+ } catch (e) {
131
+ console.warn(` ⚠️ Skip recursive include (missing or not readable): ${dir}`);
132
+ }
133
+ }
134
+ return result;
135
+ }
136
+
137
+ /**
138
+ * Resolve includePaths from config: entries can be string (individual) or { path, recursive: true }.
139
+ * All paths resolved relative to configDir. Returns flat array of absolute paths.
140
+ */
141
+ async function resolveIncludePathsFromConfig(includePaths, configDir) {
142
+ if (!Array.isArray(includePaths) || includePaths.length === 0) return [];
143
+ const resolved = [];
144
+ const recursiveDirs = [];
145
+ for (const entry of includePaths) {
146
+ if (typeof entry === 'string') {
147
+ resolved.push(path.resolve(configDir, entry));
148
+ } else if (entry && typeof entry === 'object' && entry.path != null && entry.recursive) {
149
+ recursiveDirs.push(path.resolve(configDir, entry.path));
150
+ }
151
+ }
152
+ const expanded = await expandRecursiveIncludePaths(recursiveDirs);
153
+ return [...resolved, ...expanded];
154
+ }
155
+
156
+ /**
157
+ * Pipeline config (JSON): { defineRoot?, includePaths?, jobs: [ { name?, thriftFile, outputDir, includePaths?, csharp?, cpp?, js?, json?, copy?: [ { from, to } ] } ] }
158
+ * includePaths: string[] (individual) or mixed: string | { path: string, recursive: true } (recursive = path + direct subdirs).
159
+ * Paths in config are resolved relative to the config file directory.
160
+ */
161
+ async function runPipeline(configPath) {
162
+ const configDir = path.dirname(path.resolve(configPath));
163
+ let config;
164
+ try {
165
+ const raw = await fs.readFile(configPath, 'utf8');
166
+ config = JSON.parse(raw);
167
+ } catch (e) {
168
+ throw new Error(`Failed to load pipeline config: ${e.message}`);
169
+ }
170
+ const jobs = config.jobs;
171
+ if (!Array.isArray(jobs) || jobs.length === 0) {
172
+ throw new Error('Pipeline config must have a non-empty "jobs" array');
173
+ }
174
+
175
+ const defineRoot = config.defineRoot;
176
+ const globalIncludePaths = await resolveIncludePathsFromConfig(config.includePaths || [], configDir);
177
+
178
+ console.log(`🚀 DeukPack Pipeline v1.0.0`);
179
+ console.log(`📄 Config: ${configPath}`);
180
+ console.log(` Jobs: ${jobs.length}`);
181
+
182
+ for (let i = 0; i < jobs.length; i++) {
183
+ const job = jobs[i];
184
+ const name = job.name || job.thriftFile || `job${i + 1}`;
185
+ const thriftFile = path.resolve(configDir, job.thriftFile);
186
+ const outputDir = path.resolve(configDir, job.outputDir);
187
+ const jobIncludePaths = await resolveIncludePathsFromConfig(job.includePaths || [], configDir);
188
+ const baseDir = path.dirname(thriftFile);
189
+ const includePaths = [baseDir, ...globalIncludePaths, ...jobIncludePaths];
190
+ const parseOpts = { includePaths, defineRoot };
191
+ const options = {
192
+ csharp: !!job.csharp,
193
+ cpp: !!job.cpp,
194
+ js: !!job.js,
195
+ json: !!job.json,
196
+ ef: !!job.ef,
197
+ defineRoot,
198
+ convertToDeuk: !!job.convertToDeuk,
199
+ convertToDeukOutputDir: job.convertToDeukOutputDir || 'deuk'
200
+ };
201
+
202
+ console.log(`\n--- Job: ${name} ---`);
203
+ await runOneBuild(thriftFile, outputDir, options, parseOpts);
204
+
205
+ const copyList = job.copy || [];
206
+ for (const rule of copyList) {
207
+ const from = path.resolve(configDir, rule.from);
208
+ const to = path.resolve(configDir, rule.to);
209
+ console.log(` 📋 Copy: ${rule.from} → ${rule.to}`);
210
+ await copyDir(from, to);
211
+ }
212
+ }
213
+
214
+ console.log('\n🎉 Pipeline completed successfully!');
215
+ }
216
+
217
+ const WINDOWS_RESERVED = ['CON', 'PRN', 'AUX', 'NUL', 'COM1', 'COM2', 'COM3', 'COM4', 'COM5', 'COM6', 'COM7', 'COM8', 'COM9', 'LPT1', 'LPT2', 'LPT3', 'LPT4', 'LPT5', 'LPT6', 'LPT7', 'LPT8', 'LPT9'];
218
+
219
+ async function copyDir(fromDir, toDir) {
220
+ try {
221
+ await fs.access(fromDir);
222
+ } catch {
223
+ console.warn(` ⚠️ Skip copy (source missing): ${fromDir}`);
224
+ return;
225
+ }
226
+ await fs.mkdir(toDir, { recursive: true });
227
+ const entries = await fs.readdir(fromDir, { withFileTypes: true });
228
+ for (const entry of entries) {
229
+ const name = entry.name;
230
+ if (name === '' || name === 'nul' || WINDOWS_RESERVED.includes(name.toUpperCase())) {
231
+ console.warn(` ⚠️ Skipping reserved filename: ${name}`);
232
+ continue;
233
+ }
234
+ if (name.endsWith('.Thrift.cs') || /\.Thrift\.cs$/i.test(name)) {
235
+ console.warn(` ⚠️ Skipping duplicate: ${name}`);
236
+ continue;
237
+ }
238
+ const src = path.join(fromDir, name);
239
+ const dest = path.join(toDir, name);
240
+ if (entry.isDirectory()) {
241
+ await copyDir(src, dest);
242
+ } else {
243
+ if (src.includes('\\nul\\') || src.endsWith('\\nul') || dest.includes('\\nul\\') || dest.endsWith('\\nul')) {
244
+ console.warn(` ⚠️ Skipping invalid path: ${name}`);
245
+ continue;
246
+ }
247
+ await fs.copyFile(src, dest);
248
+ }
249
+ }
250
+ }
251
+
252
+ function parseOptions(args) {
253
+ const options = {
254
+ csharp: false,
255
+ cpp: false,
256
+ js: false,
257
+ json: false,
258
+ protocol: 'binary',
259
+ endianness: 'little',
260
+ defineRoot: undefined, // --define-root _deuk_define | _thrift
261
+ convertToDeuk: false,
262
+ convertToDeukOutputDir: 'deuk', // --convert-to-deuk [subdir]
263
+ emitPerFile: false, // --emit-per-file AST 내 각 sourceFile별 .deuk 추가 출력 (server_msg_db 등)
264
+ ef: false // --ef Entity Framework support (meta table entities + DbContext)
265
+ };
266
+
267
+ const includePaths = [];
268
+ const includePathsRecursive = [];
269
+
270
+ for (let i = 0; i < args.length; i++) {
271
+ const arg = args[i];
272
+ switch (arg) {
273
+ case '--convert-to-deuk':
274
+ options.convertToDeuk = true;
275
+ if (i + 1 < args.length && !args[i + 1].startsWith('-')) {
276
+ options.convertToDeukOutputDir = args[++i];
277
+ }
278
+ break;
279
+ case '--emit-per-file':
280
+ options.emitPerFile = true;
281
+ break;
282
+ case '-I':
283
+ case '-i':
284
+ if (i + 1 < args.length) {
285
+ includePaths.push(args[++i]);
286
+ }
287
+ break;
288
+ case '-r':
289
+ case '--include-recursive':
290
+ if (i + 1 < args.length) {
291
+ includePathsRecursive.push(args[++i]);
292
+ }
293
+ break;
294
+ case '--define-root':
295
+ if (i + 1 < args.length) {
296
+ options.defineRoot = args[++i];
297
+ }
298
+ break;
299
+ case '--csharp':
300
+ options.csharp = true;
301
+ break;
302
+ case '--cpp':
303
+ options.cpp = true;
304
+ break;
305
+ case '--js':
306
+ options.js = true;
307
+ break;
308
+ case '--ef':
309
+ options.ef = true;
310
+ break;
311
+ case '--protocol':
312
+ if (i + 1 < args.length) {
313
+ options.protocol = args[++i];
314
+ }
315
+ break;
316
+ case '--endianness':
317
+ if (i + 1 < args.length) {
318
+ options.endianness = args[++i];
319
+ }
320
+ break;
321
+ }
322
+ }
323
+
324
+ return { options, includePaths, includePathsRecursive };
325
+ }
326
+
327
+ async function generateCSharp(engine, ast, outputDir, options = {}) {
328
+ console.log('🔧 Generating C# code...');
329
+ const startTime = Date.now();
330
+
331
+ const csharpDir = path.join(outputDir, 'csharp');
332
+ await fs.mkdir(csharpDir, { recursive: true });
333
+
334
+ // Use the actual C# generator
335
+ const { CSharpGenerator } = require('../dist/codegen/CSharpGenerator');
336
+ const generator = new CSharpGenerator();
337
+
338
+ const genOptions = { efSupport: options.ef === true, defineVersionFile: options.defineVersionFile };
339
+ const csharpFiles = await generator.generate(ast, genOptions);
340
+
341
+ // Write each file separately
342
+ for (const [filename, content] of Object.entries(csharpFiles)) {
343
+ // Windows 예약어 필터링 (nul 등 방지)
344
+ if (!filename || filename === 'nul.cs' || filename.startsWith('nul')) {
345
+ console.warn(` ⚠️ Skipping invalid filename: ${filename}`);
346
+ continue;
347
+ }
348
+
349
+ const filePath = path.join(csharpDir, filename);
350
+ // 경로에 nul이 포함되어 있으면 건너뛰기
351
+ if (filePath.includes('\\nul\\') || filePath.endsWith('\\nul')) {
352
+ console.warn(` ⚠️ Skipping invalid path: ${filePath}`);
353
+ continue;
354
+ }
355
+
356
+ await fs.writeFile(filePath, content, 'utf8');
357
+ console.log(` 📄 Generated: ${filename}`);
358
+ }
359
+
360
+ const generateTime = Date.now() - startTime;
361
+ console.log(`✅ C# generated ${Object.keys(csharpFiles).length} files in ${generateTime}ms`);
362
+ }
363
+
364
+ async function generateCpp(engine, ast, outputDir) {
365
+ console.log('🔧 Generating C++ code...');
366
+ const startTime = Date.now();
367
+
368
+ const cppDir = path.join(outputDir, 'cpp');
369
+ await fs.mkdir(cppDir, { recursive: true });
370
+
371
+ const { CppGenerator } = require('../dist/codegen/CppGenerator');
372
+ const generator = new CppGenerator();
373
+ const cppFiles = await generator.generate(ast, {});
374
+
375
+ for (const [filename, content] of Object.entries(cppFiles)) {
376
+ if (!filename || filename === 'nul.h' || filename === 'nul.cpp' || filename.startsWith('nul')) {
377
+ console.warn(` ⚠️ Skipping invalid filename: ${filename}`);
378
+ continue;
379
+ }
380
+ const filePath = path.join(cppDir, filename);
381
+ if (filePath.includes('\\nul\\') || filePath.endsWith('\\nul')) {
382
+ console.warn(` ⚠️ Skipping invalid path: ${filePath}`);
383
+ continue;
384
+ }
385
+ await fs.writeFile(filePath, content, 'utf8');
386
+ console.log(` 📄 Generated: ${filename}`);
387
+ }
388
+
389
+ const generateTime = Date.now() - startTime;
390
+ console.log(`✅ C++ generated ${Object.keys(cppFiles).length} files in ${generateTime}ms`);
391
+ }
392
+
393
+ /**
394
+ * 레거시 타입명 → 득팩 표준 typeName (엑셀 스키마 뷰/비교에서 동일하게 표기).
395
+ */
396
+ function toDeukPackStandardTypeName(t) {
397
+ if (!t || typeof t !== 'string') return t;
398
+ const k = t.trim().toLowerCase();
399
+ const map = { i16: 'int16', i32: 'int32', i64: 'int64', i8: 'int8', list: 'list', lst: 'list', set: 'set', map: 'map', struct: 'record', rec: 'record' };
400
+ return map[k] !== undefined ? map[k] : t;
401
+ }
402
+
403
+ /**
404
+ * Get schema type string for a Thrift field type. typeName은 득팩 표준으로 출력(엑셀 스키마 뷰/비교용).
405
+ * @param {any} fieldType - string e.g. 'i32' or object e.g. { type: 'list', elementType: 'i32' }
406
+ * @returns {{ type: string, typeName: string }}
407
+ */
408
+ function getSchemaTypeInfo(fieldType) {
409
+ if (typeof fieldType === 'string') {
410
+ const typeMap = {
411
+ bool: 'Bool', byte: 'Byte', i8: 'Byte', i16: 'I16', i32: 'I32', i64: 'I64',
412
+ double: 'Double', string: 'String', binary: 'Binary'
413
+ };
414
+ const typeName = toDeukPackStandardTypeName(fieldType);
415
+ return { type: typeMap[fieldType] || 'Struct', typeName };
416
+ }
417
+ if (fieldType && typeof fieldType === 'object') {
418
+ if (fieldType.type === 'list') {
419
+ const elem = getSchemaTypeInfo(fieldType.elementType);
420
+ return { type: 'List', typeName: elem.typeName };
421
+ }
422
+ if (fieldType.type === 'set') {
423
+ const elem = getSchemaTypeInfo(fieldType.elementType);
424
+ return { type: 'Set', typeName: elem.typeName };
425
+ }
426
+ if (fieldType.type === 'map') {
427
+ const k = getSchemaTypeInfo(fieldType.keyType);
428
+ const v = getSchemaTypeInfo(fieldType.valueType);
429
+ return { type: 'Map', typeName: `map<${k.typeName},${v.typeName}>` };
430
+ }
431
+ }
432
+ const raw = typeof fieldType === 'string' ? fieldType : 'struct';
433
+ return { type: 'Struct', typeName: toDeukPackStandardTypeName(raw) };
434
+ }
435
+
436
+ async function generateJavaScript(engine, ast, outputDir) {
437
+ // Meta editor loads this JS and uses it for Thrift JS <-> Webix JSON, Thrift JSON, Excel JSON.
438
+ // Generated Thrift objects include getSchema() (same idea as C# GetSchema()) so schema is self-contained.
439
+ console.log('🔧 Generating JavaScript code (meta editor runtime)...');
440
+ const startTime = Date.now();
441
+
442
+ const jsDir = path.join(outputDir, 'javascript');
443
+ await fs.mkdir(jsDir, { recursive: true });
444
+
445
+ const lines = [];
446
+ lines.push('// Generated by DeukPack v1.0.0');
447
+ lines.push('// ' + new Date().toISOString());
448
+ lines.push('// Thrift JS objects with embedded schema (getSchema()) for meta editor.');
449
+ lines.push('// Protocol helpers: toJson/fromJson (Thrift JSON), toBinary/fromBinary (binary).');
450
+ lines.push('');
451
+
452
+ lines.push('// --- Protocol runtime (Thrift JSON shape: field-id keys, { i32|str|lst|map|rec|... } wrappers) ---');
453
+ lines.push('function _wrapThriftJson(type, typeName, val, schemas) {');
454
+ lines.push(' if (val === null || val === undefined) return null;');
455
+ lines.push(' switch (type) {');
456
+ lines.push(' case "Bool": return { tf: !!val };');
457
+ lines.push(' case "Byte": case "I16": case "I32": return { i32: Number(val) };');
458
+ lines.push(' case "I64": return { i64: Number(val) };');
459
+ lines.push(' case "Double": return { dbl: Number(val) };');
460
+ lines.push(' case "String": return { str: String(val) };');
461
+ lines.push(' case "Binary":');
462
+ lines.push(' if (typeof Buffer !== "undefined") return { str: Buffer.from(val).toString("base64") };');
463
+ lines.push(' var arr = val && val.length != null ? val : []; var s = ""; for (var i = 0; i < arr.length; i++) s += String.fromCharCode(arr[i] & 255); return { str: (typeof btoa !== "undefined" ? btoa(s) : "") };');
464
+ lines.push(' case "List": case "Set":');
465
+ lines.push(' var elem = (typeName.match(/^(?:list|set)<(.+)>$/) || [])[1];');
466
+ lines.push(' return { lst: (val || []).map(function(e) { return _wrapThriftJson(_elemType(elem), elem, e, schemas); }) };');
467
+ lines.push(' case "Map":');
468
+ lines.push(' var m = (typeName.match(/^map<([^,]+),(.+)>$/) || []);');
469
+ lines.push(' var out = {};');
470
+ lines.push(' for (var k in val) if (Object.prototype.hasOwnProperty.call(val, k)) out[String(k)] = _wrapThriftJson(_elemType(m[2]), m[2], val[k], schemas);');
471
+ lines.push(' return { map: out };');
472
+ lines.push(' default:');
473
+ lines.push(' var s = schemas && schemas[typeName];');
474
+ lines.push(' return s ? { rec: _toThriftJson(s, val, schemas) } : { str: String(val) };');
475
+ lines.push(' }');
476
+ lines.push('}');
477
+ lines.push('function _elemType(tn) { if (!tn) return "String"; var m = tn.match(/^(?:list|set)<(.+)>$/); return m ? _elemType(m[1]) : (tn === "i32" || tn === "i64" ? "I32" : (tn === "double" ? "Double" : (tn === "string" ? "String" : "Struct"))); }');
478
+ lines.push('function _toThriftJson(schema, obj, schemas) {');
479
+ lines.push(' if (!schema || schema.type !== "Struct" || !schema.fields) return obj;');
480
+ lines.push(' var out = {};');
481
+ lines.push(' for (var id in schema.fields) { var f = schema.fields[id]; var v = obj && obj[f.name]; if (v === undefined && f.defaultValue !== undefined && f.defaultValue !== null) v = f.defaultValue; if (v !== undefined) out[String(id)] = _wrapThriftJson(f.type, f.typeName, v, schemas); }');
482
+ lines.push(' return out;');
483
+ lines.push('}');
484
+ lines.push('function _unwrapThriftJson(type, typeName, jsonVal, schemas) {');
485
+ lines.push(' if (jsonVal === null || jsonVal === undefined) return null;');
486
+ lines.push(' if (type !== "Struct" && typeof jsonVal === "object" && !Array.isArray(jsonVal)) {');
487
+ lines.push(' if (type === "Binary" && jsonVal.str) { var b64 = jsonVal.str; if (typeof Buffer !== "undefined") return new Uint8Array(Buffer.from(b64, "base64")); var bin = typeof atob !== "undefined" ? atob(b64) : ""; var arr = new Uint8Array(bin.length); for (var i = 0; i < bin.length; i++) arr[i] = bin.charCodeAt(i); return arr; }');
488
+ lines.push(' if (jsonVal.str !== undefined) return jsonVal.str;');
489
+ lines.push(' if (jsonVal.i32 !== undefined) return jsonVal.i32;');
490
+ lines.push(' if (jsonVal.i64 !== undefined) return Number(jsonVal.i64);');
491
+ lines.push(' if (jsonVal.dbl !== undefined) return jsonVal.dbl;');
492
+ lines.push(' if (jsonVal.tf !== undefined) return jsonVal.tf;');
493
+ lines.push(' if (jsonVal.lst !== undefined) { var elem = (typeName.match(/^(?:list|set)<(.+)>$/) || [])[1]; return jsonVal.lst.map(function(e) { return _unwrapThriftJson(_elemType(elem), elem, e, schemas); }); }');
494
+ lines.push(' if (jsonVal.map !== undefined) { var m = (typeName.match(/^map<([^,]+),(.+)>$/) || []); var o = {}; for (var k in jsonVal.map) o[k] = _unwrapThriftJson(_elemType(m[2]), m[2], jsonVal.map[k], schemas); return o; }');
495
+ lines.push(' if (jsonVal.rec !== undefined) { var s = schemas && schemas[typeName]; return s ? _fromThriftJson(s, jsonVal.rec, schemas) : jsonVal.rec; }');
496
+ lines.push(' }');
497
+ lines.push(' return jsonVal;');
498
+ lines.push('}');
499
+ lines.push('function _fromThriftJson(schema, jsonObj, schemas) {');
500
+ lines.push(' if (!schema || schema.type !== "Struct" || !schema.fields) return jsonObj || {};');
501
+ lines.push(' var out = {};');
502
+ lines.push(' for (var id in schema.fields) { var f = schema.fields[id]; var w = jsonObj && jsonObj[String(id)]; if (w !== undefined) out[f.name] = _unwrapThriftJson(f.type, f.typeName, w, schemas); }');
503
+ lines.push(' return out;');
504
+ lines.push('}');
505
+ lines.push('');
506
+
507
+ // --- Excel protocol runtime (IExcelSheet → JS object, schema-driven) ---
508
+ lines.push('// Excel row constants: Row 1=HIERARCHY_ID, Row 2=DATATYPE, Row 5+=DATA');
509
+ lines.push('var _XL_HIER_ROW = 1, _XL_DT_ROW = 2, _XL_FIRST_DATA = 5;');
510
+ lines.push('function _xlBuildColMap(sheet) {');
511
+ lines.push(' var m = {}; for (var c = 1; c <= sheet.lastColumn; c++) { var h = (sheet.cellValue(_XL_HIER_ROW, c) || "").trim(); if (!h) break; m[h] = { col: c, dt: (sheet.cellValue(_XL_DT_ROW, c) || "").trim() }; } return m;');
512
+ lines.push('}');
513
+ lines.push('function _xlParsePrimitive(type, raw, dt, enums) {');
514
+ lines.push(' if (raw === null || raw === undefined || raw === "") return type === "String" ? "" : undefined;');
515
+ lines.push(' var tok = String(raw).split(":")[0].trim();');
516
+ lines.push(' switch (type) {');
517
+ lines.push(' case "Bool": return tok === "1" || tok.toLowerCase() === "true";');
518
+ lines.push(' case "Byte": case "I16": case "I32":');
519
+ lines.push(' var n = parseInt(tok, 10); if (!isNaN(n)) return n;');
520
+ lines.push(' if (enums && dt && dt.indexOf("enum") === 0) { var en = dt.replace(/^enum[<(]|[>)]$/g,""); var ed = enums[en]; if (ed && ed.values && ed.values[tok] !== undefined) return ed.values[tok]; }');
521
+ lines.push(' return 0;');
522
+ lines.push(' case "I64": return parseInt(tok, 10) || 0;');
523
+ lines.push(' case "Double": return parseFloat(raw) || 0;');
524
+ lines.push(' case "String": return String(raw).trim();');
525
+ lines.push(' default: return String(raw).trim();');
526
+ lines.push(' }');
527
+ lines.push('}');
528
+ lines.push('function _xlMapType(tn) { var m = {i32:"I32",i64:"I64",i16:"I16",i8:"Byte",byte:"Byte",double:"Double",dbl:"Double",string:"String",str:"String",bool:"Bool",tf:"Bool"}; return m[tn] || "Struct"; }');
529
+ lines.push('function _xlCountListRows(sheet, row, listPath, colMap) {');
530
+ lines.push(' var tuid = sheet.cellValue(row, 1) || "", pfx = listPath + ".", cnt = 0;');
531
+ lines.push(' for (var r = row; r <= sheet.lastRow; r++) {');
532
+ lines.push(' if (r > row) { var rid = sheet.cellValue(r, 1) || ""; if (rid && rid !== tuid) break; }');
533
+ lines.push(' var has = false; for (var p in colMap) { if ((p === listPath || p.indexOf(pfx) === 0) && !sheet.isCellEmpty(r, colMap[p].col)) { has = true; break; } }');
534
+ lines.push(' if (has) cnt++; else if (cnt > 0) break;');
535
+ lines.push(' } return cnt;');
536
+ lines.push('}');
537
+ lines.push('function _xlReadField(field, fid, colMap, sheet, row, schemas, enums, parentPath) {');
538
+ lines.push(' var fp = parentPath ? parentPath + "." + fid : String(fid);');
539
+ lines.push(' var type = field.type, tn = field.typeName;');
540
+ lines.push(' if (type === "Struct") {');
541
+ lines.push(' var cs = schemas[tn]; if (!cs || !cs.fields) return undefined;');
542
+ lines.push(' var obj = {}, has = false;');
543
+ lines.push(' for (var cid in cs.fields) { var cf = cs.fields[cid]; var v = _xlReadField(cf, cid, colMap, sheet, row, schemas, enums, fp); if (v !== undefined && v !== null) { obj[cf.name] = v; has = true; } }');
544
+ lines.push(' return has ? obj : undefined;');
545
+ lines.push(' }');
546
+ lines.push(' if (type === "List" || type === "Set") {');
547
+ lines.push(' var cnt = _xlCountListRows(sheet, row, fp, colMap); if (cnt === 0) return undefined;');
548
+ lines.push(' var em = (tn.match(/^(?:list|set)<(.+)>$/) || [])[1] || "struct";');
549
+ lines.push(' var isPrim = ["i32","i64","i16","i8","double","string","bool","byte","dbl","str","tf"].indexOf(em) >= 0;');
550
+ lines.push(' var arr = [], pfx = fp + ".";');
551
+ lines.push(' for (var i = 0; i < cnt; i++) { var er = row + i;');
552
+ lines.push(' if (isPrim) { for (var p in colMap) { if (p.indexOf(pfx) === 0) { arr.push(_xlParsePrimitive(_xlMapType(em), sheet.cellValue(er, colMap[p].col), colMap[p].dt, enums)); break; } } }');
553
+ lines.push(' else { var es = schemas[em], eo = {};');
554
+ lines.push(' if (es && es.fields) { for (var cid in es.fields) { var cf = es.fields[cid]; var v = _xlReadField(cf, cid, colMap, sheet, er, schemas, enums, fp); if (v !== undefined && v !== null) eo[cf.name] = v; } }');
555
+ lines.push(' else { for (var p in colMap) { if (p.indexOf(pfx) === 0) { var rem = p.substring(pfx.length); if (rem.indexOf(".") < 0 && !sheet.isCellEmpty(er, colMap[p].col)) eo[rem] = sheet.cellValue(er, colMap[p].col); } } }');
556
+ lines.push(' arr.push(eo);');
557
+ lines.push(' }');
558
+ lines.push(' } return arr.length > 0 ? arr : undefined;');
559
+ lines.push(' }');
560
+ lines.push(' var e = colMap[fp]; if (!e) return undefined;');
561
+ lines.push(' if (sheet.isCellEmpty(row, e.col)) return undefined;');
562
+ lines.push(' return _xlParsePrimitive(type, sheet.cellValue(row, e.col), e.dt, enums);');
563
+ lines.push('}');
564
+ lines.push('function _fromExcelRow(schema, sheet, row, schemas, enums) {');
565
+ lines.push(' if (!schema || schema.type !== "Struct" || !schema.fields) return {};');
566
+ lines.push(' var colMap = _xlBuildColMap(sheet), out = {};');
567
+ lines.push(' for (var fid in schema.fields) { var f = schema.fields[fid]; var v = _xlReadField(f, fid, colMap, sheet, row, schemas, enums); if (v !== undefined && v !== null) out[f.name] = v; }');
568
+ lines.push(' return out;');
569
+ lines.push('}');
570
+ lines.push('function _fromExcelSheet(schema, sheet, schemas, enums, firstRow) {');
571
+ lines.push(' firstRow = firstRow || _XL_FIRST_DATA; var result = {};');
572
+ lines.push(' for (var r = firstRow; r <= sheet.lastRow; r++) { var tuid = sheet.cellValue(r, 1); if (!tuid) continue; var id = parseInt(String(tuid).split(":")[0], 10); if (isNaN(id)) continue; result[id] = _fromExcelRow(schema, sheet, r, schemas, enums); }');
573
+ lines.push(' return result;');
574
+ lines.push('}');
575
+ lines.push('');
576
+
577
+ // Enums: value map + getSchema() with docComment, annotations, valueComments (full recoverable)
578
+ for (const enumDef of ast.enums || []) {
579
+ const safeName = enumDef.name.replace(/\./g, '_');
580
+ const schemaObj = {
581
+ name: enumDef.name,
582
+ type: 'Enum',
583
+ values: enumDef.values,
584
+ docComment: enumDef.docComment != null ? enumDef.docComment : undefined,
585
+ valueComments: enumDef.valueComments && Object.keys(enumDef.valueComments || {}).length ? enumDef.valueComments : undefined,
586
+ annotations: enumDef.annotations && Object.keys(enumDef.annotations || {}).length ? enumDef.annotations : undefined
587
+ };
588
+ lines.push('const _schema_' + safeName + ' = ' + JSON.stringify(schemaObj) + ';');
589
+ lines.push(`const ${safeName} = {`);
590
+ lines.push(' values: _schema_' + safeName + '.values,');
591
+ lines.push(' getSchema() { return _schema_' + safeName + '; }');
592
+ lines.push('};');
593
+ lines.push('');
594
+ }
595
+
596
+ // Structs: schema constant + getSchema() with docComment, annotations, full defaultValue (mirrors C# GetSchema())
597
+ for (const struct of ast.structs || []) {
598
+ const safeName = struct.name.replace(/\./g, '_');
599
+ const fieldsObj = {};
600
+ for (const field of struct.fields || []) {
601
+ const ti = getSchemaTypeInfo(field.type);
602
+ const f = {
603
+ id: field.id,
604
+ name: field.name,
605
+ type: ti.type,
606
+ typeName: ti.typeName,
607
+ required: !!field.required,
608
+ defaultValue: field.defaultValue !== undefined ? field.defaultValue : null,
609
+ docComment: field.docComment != null ? field.docComment : undefined,
610
+ annotations: field.annotations && Object.keys(field.annotations).length ? field.annotations : undefined
611
+ };
612
+ fieldsObj[field.id] = f;
613
+ }
614
+ const schemaObj = {
615
+ name: struct.name,
616
+ type: 'Struct',
617
+ fields: fieldsObj,
618
+ docComment: struct.docComment != null ? struct.docComment : undefined,
619
+ annotations: struct.annotations && Object.keys(struct.annotations || {}).length ? struct.annotations : undefined
620
+ };
621
+ const schemaJson = JSON.stringify(schemaObj);
622
+ lines.push(`const _schema_${safeName} = ${schemaJson};`);
623
+ lines.push(`const ${safeName} = {`);
624
+ lines.push(' getSchema() { return _schema_' + safeName + '; },');
625
+ lines.push(' create() { return {}; },');
626
+ lines.push(' toJson(obj) { return JSON.stringify(_toThriftJson(_schema_' + safeName + ', obj, _schemas)); },');
627
+ lines.push(' fromJson(str) { return _fromThriftJson(_schema_' + safeName + ', JSON.parse(str || "{}"), _schemas); },');
628
+ lines.push(' toBinary(obj) { throw new Error("toBinary: use C#/TS runtime or DeukPack binary protocol"); },');
629
+ lines.push(' fromBinary(buf) { throw new Error("fromBinary: use C#/TS runtime or DeukPack binary protocol"); },');
630
+ lines.push(' fromExcel(sheet, row) { return _fromExcelRow(_schema_' + safeName + ', sheet, row, _schemas, _enums); },');
631
+ lines.push(' fromExcelSheet(sheet, firstRow) { return _fromExcelSheet(_schema_' + safeName + ', sheet, _schemas, _enums, firstRow); }');
632
+ lines.push('};');
633
+ lines.push('');
634
+ }
635
+
636
+ lines.push('var _schemas = {};');
637
+ for (const struct of ast.structs || []) {
638
+ const safeName = struct.name.replace(/\./g, '_');
639
+ lines.push('_schemas["' + struct.name.replace(/"/g, '\\"') + '"] = _schema_' + safeName + ';');
640
+ }
641
+ lines.push('var _enums = {};');
642
+ for (const enumDef of ast.enums || []) {
643
+ const safeName = enumDef.name.replace(/\./g, '_');
644
+ lines.push('_enums["' + enumDef.name.replace(/"/g, '\\"') + '"] = _schema_' + safeName + ';');
645
+ }
646
+ lines.push('');
647
+
648
+ // Export for Node/UMD (meta editor can assign to window or use module)
649
+ lines.push('if (typeof module !== "undefined" && module.exports) {');
650
+ const allNames = []
651
+ .concat((ast.enums || []).map(e => e.name.replace(/\./g, '_')))
652
+ .concat((ast.structs || []).map(s => s.name.replace(/\./g, '_')));
653
+ allNames.forEach(n => { lines.push(' module.exports.' + n + ' = ' + n + ';'); });
654
+ lines.push('}');
655
+ lines.push('');
656
+
657
+ const jsContent = lines.join('\n');
658
+ await fs.writeFile(path.join(jsDir, 'generated.js'), jsContent);
659
+
660
+ const generateTime = Date.now() - startTime;
661
+ console.log(`✅ JavaScript generated (${ast.structs.length} structs, ${ast.enums.length} enums) in ${generateTime}ms`);
662
+ }
663
+
664
+ // Run the main function
665
+ if (require.main === module) {
666
+ main().catch(console.error);
667
+ }
668
+
669
+ module.exports = { main, runOneBuild, runPipeline, copyDir };