@lark-apaas/fullstack-cli 1.1.16-alpha.3 → 1.1.16-alpha.4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.js CHANGED
@@ -1,5 +1,5 @@
1
1
  // src/index.ts
2
- import fs21 from "fs";
2
+ import fs20 from "fs";
3
3
  import path17 from "path";
4
4
  import { fileURLToPath as fileURLToPath4 } from "url";
5
5
  import { config as dotenvConfig } from "dotenv";
@@ -63,18 +63,9 @@ var FullstackCLI = class {
63
63
  constructor(version) {
64
64
  this.program = new Command();
65
65
  this.hooks = new HooksManager();
66
- this.program.name("fullstack-cli").description("CLI tool for fullstack template management").version(version).option("--canary <env>", "Set canary environment (e.g., boe_canary)");
67
- this.setupGlobalOptions();
66
+ this.program.name("fullstack-cli").description("CLI tool for fullstack template management").version(version);
68
67
  this.setupDefaultHooks();
69
68
  }
70
- setupGlobalOptions() {
71
- this.program.hook("preAction", (thisCommand) => {
72
- const opts = thisCommand.opts();
73
- if (opts.canary) {
74
- process.env.FORCE_FRAMEWORK_CLI_CANARY_ENV = opts.canary;
75
- }
76
- });
77
- }
78
69
  setupDefaultHooks() {
79
70
  this.hooks.afterRun((ctx, error) => {
80
71
  if (error) {
@@ -126,94 +117,104 @@ Command "${ctx.commandName}" completed in ${elapsed}ms`);
126
117
 
127
118
  // src/commands/db/schema.handler.ts
128
119
  import path2 from "path";
129
- import fs4 from "fs";
120
+ import fs3 from "fs";
130
121
  import { fileURLToPath as fileURLToPath2 } from "url";
131
122
  import { spawnSync } from "child_process";
132
123
  import { createRequire } from "module";
133
124
  import { config as loadEnv } from "dotenv";
134
125
 
135
- // src/commands/db/gen-dbschema/index.ts
136
- import fs3 from "fs";
137
- import path from "path";
138
-
139
126
  // src/commands/db/gen-dbschema/postprocess.ts
140
127
  import fs2 from "fs";
128
+ import path from "path";
141
129
 
142
- // src/commands/db/gen-dbschema/transforms/core.ts
143
- import { IndentationText, Project, QuoteKind } from "ts-morph";
144
-
145
- // src/commands/db/gen-dbschema/transforms/types.ts
146
- function createEmptyStats() {
147
- return {
148
- patchedDefects: 0,
149
- removedPgSchemas: 0,
150
- convertedSchemaCalls: 0,
151
- renamedIdentifiers: [],
152
- replacedUnknown: 0,
153
- fallbackToText: 0,
154
- unmatchedUnknown: [],
155
- replacedTimestamp: 0,
156
- replacedDefaultNow: 0,
157
- removedSystemFields: 0,
158
- addedImports: [],
159
- removedImports: []
160
- };
161
- }
162
-
163
- // src/commands/db/gen-dbschema/transforms/core.ts
164
- var PROJECT_OPTIONS = {
165
- skipAddingFilesFromTsConfig: true,
166
- skipFileDependencyResolution: true,
167
- manipulationSettings: {
168
- indentationText: IndentationText.TwoSpaces,
169
- quoteKind: QuoteKind.Single
130
+ // src/commands/db/gen-dbschema/helper/header-format.ts
131
+ var HEADER_COMMENT = "/** auto generated, do not edit */";
132
+ function ensureHeaderComment(source) {
133
+ let text = source.startsWith("\uFEFF") ? source.slice(1) : source;
134
+ while (text.startsWith(HEADER_COMMENT)) {
135
+ text = text.slice(HEADER_COMMENT.length);
136
+ text = stripLeadingNewlines(text);
137
+ }
138
+ const trimmed = stripLeadingNewlines(text);
139
+ if (trimmed.length === 0) {
140
+ return `${HEADER_COMMENT}
141
+ `;
170
142
  }
171
- };
172
- function parseSource(source, fileName = "schema.ts") {
173
- const project = new Project({
174
- ...PROJECT_OPTIONS,
175
- useInMemoryFileSystem: true
176
- });
177
- const sourceFile = project.createSourceFile(fileName, source);
178
- return { project, sourceFile };
143
+ return `${HEADER_COMMENT}
144
+ ${trimmed}`;
179
145
  }
180
- function applyTransforms(sourceFile, transforms) {
181
- const stats = createEmptyStats();
182
- for (const transform of transforms) {
183
- try {
184
- transform.transform({ sourceFile, stats });
185
- } catch (error) {
186
- console.error(`[ast] Transform "${transform.name}" failed:`, error);
187
- throw error;
188
- }
146
+ function stripLeadingNewlines(value) {
147
+ let current = value;
148
+ while (current.startsWith("\r\n") || current.startsWith("\n")) {
149
+ current = current.startsWith("\r\n") ? current.slice(2) : current.slice(1);
189
150
  }
190
- return stats;
191
- }
192
- function formatSourceFile(sourceFile) {
193
- sourceFile.formatText({
194
- indentSize: 2,
195
- convertTabsToSpaces: true
196
- });
151
+ return current;
197
152
  }
198
- function printSourceFile(sourceFile) {
199
- return sourceFile.getFullText();
153
+ function collapseExtraBlankLines(text) {
154
+ return text.replace(/\n{3,}/g, "\n\n");
200
155
  }
201
156
 
202
- // src/commands/db/gen-dbschema/transforms/ast/rename-identifiers.ts
203
- import { Node } from "ts-morph";
157
+ // src/commands/db/gen-dbschema/helper/schema-conversion.ts
158
+ function removePgSchemaDeclarations(source) {
159
+ return source.replace(/export const \w+ = pgSchema\([\s\S]*?\);\n*/g, "");
160
+ }
161
+ function convertSchemaTableInvocations(source) {
162
+ let converted = 0;
163
+ let text = source.replace(/([A-Za-z0-9_]+)\.table\(/g, () => {
164
+ converted += 1;
165
+ return "pgTable(";
166
+ });
167
+ text = text.replace(/([A-Za-z0-9_]+)\.view\(/g, () => {
168
+ converted += 1;
169
+ return "pgView(";
170
+ });
171
+ text = text.replace(/([A-Za-z0-9_]+)\.materializedView\(/g, () => {
172
+ converted += 1;
173
+ return "pgMaterializedView(";
174
+ });
175
+ text = text.replace(/([A-Za-z0-9_]+)\.enum\(/g, () => {
176
+ converted += 1;
177
+ return "pgEnum(";
178
+ });
179
+ text = text.replace(/([A-Za-z0-9_]+)\.sequence\(/g, () => {
180
+ converted += 1;
181
+ return "pgSequence(";
182
+ });
183
+ return { text, converted };
184
+ }
204
185
 
205
- // src/commands/db/gen-dbschema/utils/identifier.ts
186
+ // src/commands/db/gen-dbschema/helper/table-rename.ts
206
187
  import { pinyin } from "pinyin-pro";
207
- function toAsciiName(name) {
208
- if (!/[^\x00-\x7F]/.test(name)) {
209
- return name;
210
- }
211
- try {
212
- const transliterated = pinyin(name, { toneType: "none", type: "array" }).join("_");
213
- return transliterated || name;
214
- } catch {
215
- return name;
188
+ function renamePgTableConstants(source) {
189
+ const pgTableRegex = /export const\s+([^\s=]+)\s*=\s*(pgTable|pgView|pgMaterializedView)\(\s*["'`]([^"'`]+)["'`]/gu;
190
+ const renames = [];
191
+ const updated = source.replace(pgTableRegex, (match, currentName, factory, tableName) => {
192
+ const sanitized = sanitizeIdentifier(tableName);
193
+ if (sanitized === currentName) {
194
+ return match;
195
+ }
196
+ renames.push({ from: currentName, to: sanitized });
197
+ const equalsIndex = match.indexOf("=");
198
+ const suffix = equalsIndex >= 0 ? match.slice(equalsIndex) : ` = ${factory}("${tableName}"`;
199
+ const normalizedSuffix = suffix.trimStart();
200
+ return `export const ${sanitized} ${normalizedSuffix}`;
201
+ });
202
+ return { text: updated, renames };
203
+ }
204
+ function updateTableReferenceIdentifiers(source, renames) {
205
+ if (renames.length === 0) {
206
+ return source;
216
207
  }
208
+ return renames.reduce((acc, rename) => {
209
+ if (!rename.from || rename.from === rename.to) {
210
+ return acc;
211
+ }
212
+ const pattern = new RegExp(`\\b${escapeRegExp(rename.from)}(\\s*\\.)`, "g");
213
+ return acc.replace(pattern, `${rename.to}$1`);
214
+ }, source);
215
+ }
216
+ function escapeRegExp(value) {
217
+ return value.replace(/[.*+?^${}()|[\]\\]/g, "\\$&").replace(/\//g, "\\/");
217
218
  }
218
219
  function toCamelCase(str) {
219
220
  const words = str.split(/[_\-\s]+/).filter(Boolean);
@@ -241,684 +242,120 @@ function sanitizeIdentifier(name) {
241
242
  }
242
243
  return sanitized;
243
244
  }
244
- function getUniqueIdentifier(name, usedIdentifiers) {
245
- const base = sanitizeIdentifier(name);
246
- if (!usedIdentifiers.has(base)) {
247
- usedIdentifiers.add(base);
248
- return base;
249
- }
250
- let suffix = 2;
251
- while (usedIdentifiers.has(`${base}${suffix}`)) {
252
- suffix++;
253
- }
254
- const unique = `${base}${suffix}`;
255
- usedIdentifiers.add(unique);
256
- return unique;
257
- }
258
-
259
- // src/commands/db/gen-dbschema/transforms/ast/rename-identifiers.ts
260
- var PG_FACTORIES = [
261
- "pgTable",
262
- "pgView",
263
- "pgMaterializedView",
264
- "pgEnum",
265
- "pgSequence",
266
- // Also match schema.xxx() format (before conversion)
267
- "table",
268
- "view",
269
- "materializedView",
270
- "enum",
271
- "sequence"
272
- ];
273
- function extractFactoryName(initializer) {
274
- if (!Node.isCallExpression(initializer)) {
275
- return void 0;
276
- }
277
- const args = initializer.getArguments();
278
- if (args.length === 0) {
279
- return void 0;
280
- }
281
- const firstArg = args[0];
282
- if (Node.isStringLiteral(firstArg)) {
283
- return firstArg.getLiteralText();
284
- }
285
- return void 0;
286
- }
287
- function isPgFactoryCall(initializer) {
288
- if (!Node.isCallExpression(initializer)) {
289
- return false;
290
- }
291
- const expression = initializer.getExpression();
292
- const exprText = expression.getText();
293
- if (PG_FACTORIES.includes(exprText)) {
294
- return true;
295
- }
296
- if (Node.isPropertyAccessExpression(expression)) {
297
- const propName = expression.getName();
298
- if (PG_FACTORIES.includes(propName)) {
299
- return true;
300
- }
245
+ function toAsciiName(name) {
246
+ if (!/[^\x00-\x7F]/.test(name)) {
247
+ return name;
301
248
  }
302
- return false;
303
- }
304
- function getCurrentName(decl) {
305
- const name = decl.getName();
306
- return name.replace(/^"|"$/g, "");
307
- }
308
- function collectExistingIdentifiers(ctx) {
309
- const { sourceFile } = ctx;
310
- const identifiers = /* @__PURE__ */ new Set();
311
- for (const statement of sourceFile.getStatements()) {
312
- if (!Node.isVariableStatement(statement)) {
313
- continue;
314
- }
315
- for (const decl of statement.getDeclarations()) {
316
- const name = getCurrentName(decl);
317
- if (name) {
318
- identifiers.add(name);
319
- }
320
- }
249
+ try {
250
+ const transliterated = pinyin(name, { toneType: "none", type: "array" }).join("_");
251
+ return transliterated || name;
252
+ } catch (error) {
253
+ return name;
321
254
  }
322
- return identifiers;
323
255
  }
324
- var renameIdentifiersTransform = {
325
- name: "rename-identifiers",
326
- transform(ctx) {
327
- const { sourceFile, stats } = ctx;
328
- const usedIdentifiers = collectExistingIdentifiers(ctx);
329
- const renames = [];
330
- for (const statement of sourceFile.getStatements()) {
331
- if (!Node.isVariableStatement(statement)) {
332
- continue;
333
- }
334
- if (!statement.hasExportKeyword()) {
335
- continue;
336
- }
337
- for (const decl of statement.getDeclarations()) {
338
- const initializer = decl.getInitializer();
339
- if (!initializer) {
340
- continue;
341
- }
342
- if (!isPgFactoryCall(initializer)) {
343
- continue;
344
- }
345
- const tableName = extractFactoryName(initializer);
346
- if (!tableName) {
347
- continue;
348
- }
349
- const currentName = getCurrentName(decl);
350
- const baseSanitized = sanitizeIdentifier(tableName);
351
- if (baseSanitized === currentName) {
352
- continue;
353
- }
354
- usedIdentifiers.delete(currentName);
355
- const sanitized = getUniqueIdentifier(tableName, usedIdentifiers);
356
- renames.push({ decl, from: currentName, to: sanitized });
357
- }
358
- }
359
- for (const { decl, from, to } of renames.reverse()) {
360
- try {
361
- decl.rename(to);
362
- stats.renamedIdentifiers.push({ from, to });
363
- } catch (error) {
364
- console.warn(`[rename-identifiers] Failed to rename "${from}" to "${to}":`, error);
365
- }
366
- }
367
- stats.renamedIdentifiers.reverse();
368
- }
369
- };
370
-
371
- // src/commands/db/gen-dbschema/transforms/ast/remove-pg-schema.ts
372
- import { Node as Node2 } from "ts-morph";
373
- var removePgSchemaTransform = {
374
- name: "remove-pg-schema",
375
- transform(ctx) {
376
- const { sourceFile, stats } = ctx;
377
- const statementsToRemove = [];
378
- for (const statement of sourceFile.getStatements()) {
379
- if (!Node2.isVariableStatement(statement)) {
380
- continue;
381
- }
382
- for (const decl of statement.getDeclarations()) {
383
- const initializer = decl.getInitializer();
384
- if (!initializer || !Node2.isCallExpression(initializer)) {
385
- continue;
386
- }
387
- const calleeText = initializer.getExpression().getText();
388
- if (calleeText === "pgSchema") {
389
- statementsToRemove.push(statement);
390
- stats.removedPgSchemas++;
391
- break;
392
- }
393
- }
394
- }
395
- for (const statement of statementsToRemove.reverse()) {
396
- statement.remove();
397
- }
398
- }
399
- };
400
-
401
- // src/commands/db/gen-dbschema/transforms/ast/convert-schema-calls.ts
402
- import { Node as Node3 } from "ts-morph";
403
- var SCHEMA_METHOD_TO_PG = {
404
- table: "pgTable",
405
- view: "pgView",
406
- materializedView: "pgMaterializedView",
407
- enum: "pgEnum",
408
- sequence: "pgSequence"
409
- };
410
- var convertSchemaCallsTransform = {
411
- name: "convert-schema-calls",
412
- transform(ctx) {
413
- const { sourceFile, stats } = ctx;
414
- sourceFile.forEachDescendant((node) => {
415
- if (!Node3.isCallExpression(node)) {
416
- return;
417
- }
418
- const expression = node.getExpression();
419
- if (!Node3.isPropertyAccessExpression(expression)) {
420
- return;
421
- }
422
- const objectExpr = expression.getExpression();
423
- const methodName = expression.getName();
424
- const pgFactoryName = SCHEMA_METHOD_TO_PG[methodName];
425
- if (!pgFactoryName) {
426
- return;
427
- }
428
- if (!Node3.isIdentifier(objectExpr)) {
429
- return;
430
- }
431
- expression.replaceWithText(pgFactoryName);
432
- stats.convertedSchemaCalls++;
433
- });
434
- }
435
- };
436
256
 
437
- // src/commands/db/gen-dbschema/transforms/ast/patch-defects.ts
438
- import { Node as Node4 } from "ts-morph";
439
- var patchDefectsTransform = {
440
- name: "patch-defects",
441
- transform(ctx) {
442
- const { sourceFile, stats } = ctx;
443
- sourceFile.forEachDescendant((node) => {
444
- if (!Node4.isCallExpression(node)) {
445
- return;
446
- }
447
- const expr = node.getExpression();
448
- if (!Node4.isPropertyAccessExpression(expr)) {
449
- return;
450
- }
451
- if (expr.getName() !== "default") {
452
- return;
453
- }
454
- const args = node.getArguments();
455
- if (args.length !== 1) {
456
- return;
457
- }
458
- const arg = args[0];
459
- if (Node4.isStringLiteral(arg)) {
460
- const text = arg.getLiteralText();
461
- if (text === "") {
462
- stats.patchedDefects++;
463
- }
464
- }
465
- });
466
- }
467
- };
468
-
469
- // src/commands/db/gen-dbschema/transforms/ast/replace-unknown.ts
470
- import { Node as Node5 } from "ts-morph";
471
- var KNOWN_TYPE_FACTORIES = {
472
- user_profile: "userProfile",
473
- file_attachment: "fileAttachment"
474
- };
475
- var replaceUnknownTransform = {
476
- name: "replace-unknown",
477
- transform(ctx) {
478
- const { sourceFile, stats } = ctx;
479
- const fullText = sourceFile.getFullText();
480
- sourceFile.forEachDescendant((node) => {
481
- if (!Node5.isCallExpression(node)) {
482
- return;
483
- }
484
- const expression = node.getExpression();
485
- if (!Node5.isIdentifier(expression) || expression.getText() !== "unknown") {
486
- return;
487
- }
488
- const nodeStart = node.getStart();
489
- const textBefore = fullText.slice(Math.max(0, nodeStart - 500), nodeStart);
490
- const lines = textBefore.split("\n");
491
- let factoryName = "text";
492
- let foundKnownType = false;
493
- for (let i = lines.length - 1; i >= Math.max(0, lines.length - 5); i--) {
494
- const line = lines[i];
495
- const todoMatch = line.match(/\/\/ TODO: failed to parse database type '(?:\w+\.)?([\w_]+)(\[\])?'/);
496
- if (todoMatch) {
497
- const typeName = todoMatch[1];
498
- if (KNOWN_TYPE_FACTORIES[typeName]) {
499
- factoryName = KNOWN_TYPE_FACTORIES[typeName];
500
- foundKnownType = true;
501
- }
502
- break;
503
- }
504
- }
505
- expression.replaceWithText(factoryName);
506
- if (foundKnownType) {
507
- stats.replacedUnknown++;
257
+ // src/commands/db/gen-dbschema/helper/custom-types.ts
258
+ var CUSTOM_TYPE_PATTERN = /\/\/ TODO: failed to parse database type '(?:\w+\.)?(user_profile|file_attachment)(\[\])?'/;
259
+ function replaceUnknownColumns(source) {
260
+ const lines = source.split("\n");
261
+ const result = [];
262
+ let replaced = 0;
263
+ const unmatched = [];
264
+ for (let i = 0; i < lines.length; i += 1) {
265
+ const line = lines[i];
266
+ const match = line.match(CUSTOM_TYPE_PATTERN);
267
+ if (match) {
268
+ const typeName = match[1];
269
+ const factory = typeName === "user_profile" ? "userProfile" : "fileAttachment";
270
+ const replacedLine = replaceFollowingUnknown(lines[i + 1], factory);
271
+ if (replacedLine) {
272
+ result.push(replacedLine);
273
+ replaced += 1;
274
+ i += 1;
508
275
  } else {
509
- stats.fallbackToText++;
276
+ unmatched.push(line.trim());
277
+ result.push(line);
510
278
  }
511
- });
512
- const todoCommentRegex = /\/\/ TODO: failed to parse database type '[^']+'\s*\n/g;
513
- const currentText = sourceFile.getFullText();
514
- const cleanedText = currentText.replace(todoCommentRegex, "");
515
- if (cleanedText !== currentText) {
516
- sourceFile.replaceWithText(cleanedText);
517
- }
518
- }
519
- };
520
-
521
- // src/commands/db/gen-dbschema/transforms/ast/replace-timestamp.ts
522
- import { Node as Node6 } from "ts-morph";
523
- function checkTimestampOptions(optionsArg) {
524
- let hasWithTimezone = false;
525
- let hasModeString = false;
526
- for (const prop of optionsArg.getProperties()) {
527
- if (!Node6.isPropertyAssignment(prop)) {
528
279
  continue;
529
280
  }
530
- const propName = prop.getName();
531
- const initializer = prop.getInitializer();
532
- if (propName === "withTimezone") {
533
- if (Node6.isTrueLiteral(initializer)) {
534
- hasWithTimezone = true;
535
- }
536
- }
537
- if (propName === "mode") {
538
- if (Node6.isStringLiteral(initializer) && initializer.getLiteralText() === "string") {
539
- hasModeString = true;
540
- }
281
+ if (line.includes("unknown(")) {
282
+ unmatched.push(line.trim());
541
283
  }
284
+ result.push(line);
542
285
  }
543
- return hasWithTimezone && hasModeString;
286
+ return {
287
+ text: result.join("\n"),
288
+ replaced,
289
+ unmatched
290
+ };
544
291
  }
545
- var replaceTimestampTransform = {
546
- name: "replace-timestamp",
547
- transform(ctx) {
548
- const { sourceFile, stats } = ctx;
549
- const replacements = [];
550
- sourceFile.forEachDescendant((node) => {
551
- if (!Node6.isCallExpression(node)) {
552
- return;
553
- }
554
- const expression = node.getExpression();
555
- if (!Node6.isIdentifier(expression) || expression.getText() !== "timestamp") {
556
- return;
557
- }
558
- const args = node.getArguments();
559
- if (args.length === 2) {
560
- const [fieldArg, optionsArg] = args;
561
- if (!Node6.isStringLiteral(fieldArg)) {
562
- return;
563
- }
564
- if (!Node6.isObjectLiteralExpression(optionsArg)) {
565
- return;
566
- }
567
- if (checkTimestampOptions(optionsArg)) {
568
- const quote = fieldArg.getQuoteKind() === 1 ? '"' : "'";
569
- const fieldName = fieldArg.getLiteralText();
570
- replacements.push({
571
- node,
572
- replacement: `customTimestamptz(${quote}${fieldName}${quote})`
573
- });
574
- }
575
- return;
576
- }
577
- if (args.length === 1) {
578
- const [optionsArg] = args;
579
- if (!Node6.isObjectLiteralExpression(optionsArg)) {
580
- return;
581
- }
582
- if (checkTimestampOptions(optionsArg)) {
583
- replacements.push({
584
- node,
585
- replacement: "customTimestamptz()"
586
- });
587
- }
588
- return;
589
- }
590
- });
591
- for (const { node, replacement } of replacements.reverse()) {
592
- node.replaceWithText(replacement);
593
- stats.replacedTimestamp++;
594
- }
595
- }
596
- };
597
-
598
- // src/commands/db/gen-dbschema/transforms/ast/replace-default-now.ts
599
- import { Node as Node7 } from "ts-morph";
600
- var replaceDefaultNowTransform = {
601
- name: "replace-default-now",
602
- transform(ctx) {
603
- const { sourceFile, stats } = ctx;
604
- sourceFile.forEachDescendant((node) => {
605
- if (!Node7.isCallExpression(node)) {
606
- return;
607
- }
608
- const expression = node.getExpression();
609
- if (!Node7.isPropertyAccessExpression(expression)) {
610
- return;
611
- }
612
- if (expression.getName() !== "defaultNow") {
613
- return;
614
- }
615
- if (node.getArguments().length !== 0) {
616
- return;
617
- }
618
- const objectExpr = expression.getExpression();
619
- const objectText = objectExpr.getText();
620
- node.replaceWithText(`${objectText}.default(sql\`CURRENT_TIMESTAMP\`)`);
621
- stats.replacedDefaultNow++;
622
- });
292
+ function replaceFollowingUnknown(nextLine, factory) {
293
+ if (!nextLine || !nextLine.includes("unknown(")) {
294
+ return void 0;
623
295
  }
624
- };
296
+ return nextLine.replace("unknown(", `${factory}(`);
297
+ }
625
298
 
626
- // src/commands/db/gen-dbschema/transforms/ast/remove-system-fields.ts
627
- import { Node as Node8 } from "ts-morph";
628
- var SYSTEM_TO_BUSINESS = {
629
- _created_at: "created_at",
630
- _created_by: "created_by",
631
- _updated_at: "updated_at",
632
- _updated_by: "updated_by"
633
- };
634
- var TABLE_FACTORIES = ["pgTable", "pgView", "pgMaterializedView", "table", "view", "materializedView"];
635
- var removeSystemFieldsTransform = {
636
- name: "remove-system-fields",
637
- transform(ctx) {
638
- const { sourceFile, stats } = ctx;
639
- sourceFile.forEachDescendant((node) => {
640
- if (!Node8.isCallExpression(node)) {
641
- return;
642
- }
643
- const expression = node.getExpression();
644
- let factoryName = "";
645
- if (Node8.isIdentifier(expression)) {
646
- factoryName = expression.getText();
647
- } else if (Node8.isPropertyAccessExpression(expression)) {
648
- factoryName = expression.getName();
649
- }
650
- if (!TABLE_FACTORIES.includes(factoryName)) {
651
- return;
652
- }
653
- const args = node.getArguments();
654
- if (args.length < 2) {
655
- return;
656
- }
657
- const columnsArg = args[1];
658
- if (!Node8.isObjectLiteralExpression(columnsArg)) {
659
- return;
660
- }
661
- const fieldNames = /* @__PURE__ */ new Set();
662
- const properties = columnsArg.getProperties();
663
- for (const prop of properties) {
664
- if (!Node8.isPropertyAssignment(prop)) {
665
- continue;
666
- }
667
- const nameNode = prop.getNameNode();
668
- let fieldName = "";
669
- if (Node8.isStringLiteral(nameNode)) {
670
- fieldName = nameNode.getLiteralText();
671
- } else if (Node8.isIdentifier(nameNode)) {
672
- fieldName = nameNode.getText();
673
- }
674
- if (fieldName) {
675
- fieldNames.add(fieldName);
676
- }
677
- }
678
- const propsToRemove = [];
679
- for (const prop of properties) {
680
- if (!Node8.isPropertyAssignment(prop)) {
681
- continue;
682
- }
683
- const nameNode = prop.getNameNode();
684
- let fieldName = "";
685
- if (Node8.isStringLiteral(nameNode)) {
686
- fieldName = nameNode.getLiteralText();
687
- } else if (Node8.isIdentifier(nameNode)) {
688
- fieldName = nameNode.getText();
689
- }
690
- const businessField = SYSTEM_TO_BUSINESS[fieldName];
691
- if (businessField && fieldNames.has(businessField)) {
692
- propsToRemove.push(prop);
693
- }
694
- }
695
- for (const prop of propsToRemove) {
696
- const leadingCommentRanges = prop.getLeadingCommentRanges();
697
- for (const comment of leadingCommentRanges) {
698
- const commentText = comment.getText();
699
- if (commentText.includes("System field:")) {
700
- }
701
- }
702
- prop.remove();
703
- stats.removedSystemFields++;
704
- }
705
- });
299
+ // src/commands/db/gen-dbschema/helper/imports.ts
300
+ import fs from "fs";
301
+ import { fileURLToPath } from "url";
302
+ function tweakImports(source) {
303
+ const importRegex = /import \{([^}]*)\} from "drizzle-orm\/pg-core";?/;
304
+ const match = source.match(importRegex);
305
+ if (!match) {
306
+ return source;
706
307
  }
707
- };
708
-
709
- // src/commands/db/gen-dbschema/transforms/ast/tweak-imports.ts
710
- var REMOVE_IMPORTS = ["pgSchema", "customType"];
711
- var PG_FACTORIES2 = ["pgTable", "pgView", "pgMaterializedView", "pgEnum", "pgSequence"];
712
- var tweakImportsTransform = {
713
- name: "tweak-imports",
714
- transform(ctx) {
715
- const { sourceFile, stats } = ctx;
716
- const fullText = sourceFile.getFullText();
717
- const imports = sourceFile.getImportDeclarations();
718
- const pgCoreImport = imports.find((imp) => {
719
- const moduleSpec = imp.getModuleSpecifierValue();
720
- return moduleSpec === "drizzle-orm/pg-core";
721
- });
722
- if (!pgCoreImport) {
723
- return;
724
- }
725
- const namedImports = pgCoreImport.getNamedImports();
726
- const currentImports = namedImports.map((ni) => ni.getName());
727
- const toRemove = [];
728
- const toAdd = [];
729
- for (const identifier of REMOVE_IMPORTS) {
730
- if (currentImports.includes(identifier)) {
731
- toRemove.push(identifier);
732
- stats.removedImports.push(identifier);
733
- }
734
- }
735
- if (currentImports.includes("timestamp")) {
736
- const timestampUsed = /timestamp\s*\(/.test(fullText);
737
- if (!timestampUsed) {
738
- toRemove.push("timestamp");
739
- stats.removedImports.push("timestamp");
740
- }
741
- }
742
- for (const factory of PG_FACTORIES2) {
743
- if (!currentImports.includes(factory)) {
744
- const pattern = new RegExp(`${factory}\\s*\\(`);
745
- if (pattern.test(fullText)) {
746
- toAdd.push(factory);
747
- stats.addedImports.push(factory);
748
- }
749
- }
750
- }
751
- for (const identifier of toRemove) {
752
- const freshNamedImports = pgCoreImport.getNamedImports();
753
- const namedImport = freshNamedImports.find((ni) => ni.getName() === identifier);
754
- if (namedImport) {
755
- namedImport.remove();
756
- }
757
- }
758
- for (const identifier of toAdd) {
759
- pgCoreImport.addNamedImport(identifier);
760
- }
761
- if (fullText.includes("sql`CURRENT_TIMESTAMP`")) {
762
- const drizzleOrmImport = imports.find((imp) => {
763
- const moduleSpec = imp.getModuleSpecifierValue();
764
- return moduleSpec === "drizzle-orm";
765
- });
766
- if (!drizzleOrmImport) {
767
- sourceFile.addImportDeclaration({
768
- moduleSpecifier: "drizzle-orm",
769
- namedImports: ["sql"]
770
- });
771
- stats.addedImports.push("sql");
772
- } else {
773
- const hasSql = drizzleOrmImport.getNamedImports().some((ni) => ni.getName() === "sql");
774
- if (!hasSql) {
775
- drizzleOrmImport.addNamedImport("sql");
776
- stats.addedImports.push("sql");
777
- }
778
- }
308
+ const identifiers = match[1].split(",").map((id) => id.trim()).filter(Boolean).filter((id) => id !== "pgSchema" && id !== "customType");
309
+ const filteredIdentifiers = identifiers.filter((id) => {
310
+ if (id === "timestamp") {
311
+ const timestampUsageRegex = /timestamp\s*\(/;
312
+ return timestampUsageRegex.test(source);
779
313
  }
314
+ return true;
315
+ });
316
+ if (source.includes("pgTable(") && !filteredIdentifiers.includes("pgTable")) {
317
+ filteredIdentifiers.push("pgTable");
780
318
  }
781
- };
782
-
783
- // src/commands/db/gen-dbschema/transforms/ast/index.ts
784
- var defaultTransforms = [
785
- patchDefectsTransform,
786
- // #2 Fix syntax errors first
787
- removePgSchemaTransform,
788
- // #3 Remove pgSchema declarations
789
- convertSchemaCallsTransform,
790
- // #4 Convert schema.xxx() to pgXxx()
791
- renameIdentifiersTransform,
792
- // #5+#6 Rename identifiers (auto-updates refs)
793
- replaceUnknownTransform,
794
- // #7 Replace unknown types
795
- replaceTimestampTransform,
796
- // #8 Replace timestamp
797
- replaceDefaultNowTransform,
798
- // #9 Replace .defaultNow()
799
- removeSystemFieldsTransform,
800
- // #10 Remove conflicting system fields
801
- tweakImportsTransform
802
- // #12 Adjust imports
803
- ];
804
-
805
- // src/commands/db/gen-dbschema/transforms/text/patch-defects.ts
806
- function collectExistingIdentifiers2(source) {
807
- const identifiers = /* @__PURE__ */ new Set();
808
- const exportPattern = /export\s+const\s+([a-zA-Z_$][a-zA-Z0-9_$]*)\s*=/g;
809
- let match;
810
- while ((match = exportPattern.exec(source)) !== null) {
811
- identifiers.add(match[1]);
319
+ if (source.includes("pgView(") && !filteredIdentifiers.includes("pgView")) {
320
+ filteredIdentifiers.push("pgView");
812
321
  }
813
- return identifiers;
814
- }
815
- function patchDefects(source) {
816
- let fixed = 0;
817
- const renamedQuotedExports = [];
818
- let text = source;
819
- const usedIdentifiers = collectExistingIdentifiers2(source);
820
- text = text.replace(/\.default\('\)/g, () => {
821
- fixed += 1;
822
- return `.default('')`;
823
- });
824
- const quotedExportPattern = /export const\s+"([^"]+)"\s*=/g;
825
- text = text.replace(quotedExportPattern, (_match, quotedName) => {
826
- const sanitized = getUniqueIdentifier(quotedName, usedIdentifiers);
827
- renamedQuotedExports.push({ from: quotedName, to: sanitized });
828
- fixed += 1;
829
- return `export const ${sanitized} =`;
830
- });
831
- for (const { from, to } of renamedQuotedExports) {
832
- const escaped = from.replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
833
- const callPattern = new RegExp(`"${escaped}"\\s*\\(`, "g");
834
- text = text.replace(callPattern, `${to}(`);
835
- const dotPattern = new RegExp(`"${escaped}"\\s*\\.`, "g");
836
- text = text.replace(dotPattern, `${to}.`);
322
+ if (source.includes("pgMaterializedView(") && !filteredIdentifiers.includes("pgMaterializedView")) {
323
+ filteredIdentifiers.push("pgMaterializedView");
837
324
  }
838
- return { text, fixed, renamedQuotedExports };
839
- }
840
-
841
- // src/commands/db/gen-dbschema/transforms/text/header.ts
842
- var ESLINT_DISABLE = "/* eslint-disable */";
843
- var HEADER_COMMENT = "/** auto generated, do not edit */";
844
- var FULL_HEADER = `${ESLINT_DISABLE}
845
- ${HEADER_COMMENT}`;
846
- function ensureHeader(source) {
847
- let trimmed = source;
848
- const headerPatterns = [
849
- /^\/\*\s*eslint-disable\s*\*\/\s*\n?/,
850
- /^\/\*\*\s*auto generated[^*]*\*\/\s*\n?/
851
- ];
852
- for (const pattern of headerPatterns) {
853
- while (pattern.test(trimmed)) {
854
- trimmed = trimmed.replace(pattern, "");
855
- }
325
+ if (source.includes("pgEnum(") && !filteredIdentifiers.includes("pgEnum")) {
326
+ filteredIdentifiers.push("pgEnum");
856
327
  }
857
- trimmed = trimmed.trimStart();
858
- return `${FULL_HEADER}
859
- ${trimmed}`;
860
- }
861
-
862
- // src/commands/db/gen-dbschema/transforms/text/system-comments.ts
863
- var SYSTEM_FIELD_COMMENTS = {
864
- _created_at: "Creation time",
865
- _created_by: "Creator",
866
- _updated_at: "Update time",
867
- _updated_by: "Updater"
868
- };
869
- function addSystemFieldComments(source) {
870
- const lines = source.split("\n");
871
- for (let i = 0; i < lines.length; i += 1) {
872
- const line = lines[i];
873
- const entry = Object.entries(SYSTEM_FIELD_COMMENTS).find(
874
- ([key]) => line.includes(`"${key}"`) || line.includes(`'${key}'`)
875
- );
876
- if (!entry) {
877
- continue;
878
- }
879
- const [, description] = entry;
880
- const previousLine = lines[i - 1]?.trim() ?? "";
881
- if (previousLine.startsWith("//") && previousLine.includes("System field")) {
882
- continue;
883
- }
884
- const indentMatch = line.match(/^\s*/);
885
- const indent = indentMatch ? indentMatch[0] : "";
886
- const comment = `${indent}// System field: ${description} (auto-filled, do not modify)`;
887
- lines.splice(i, 0, comment);
888
- i += 1;
328
+ if (source.includes("pgSequence(") && !filteredIdentifiers.includes("pgSequence")) {
329
+ filteredIdentifiers.push("pgSequence");
889
330
  }
890
- return lines.join("\n");
331
+ const unique = Array.from(new Set(filteredIdentifiers));
332
+ const replacement = `import { ${unique.join(", ")} } from "drizzle-orm/pg-core"`;
333
+ return source.replace(importRegex, replacement);
891
334
  }
892
-
893
- // src/commands/db/gen-dbschema/transforms/text/inline-types.ts
894
- import fs from "fs";
895
- import { fileURLToPath } from "url";
896
335
  function inlineCustomTypes(source) {
897
- let text = source.replace(/import \{[^}]*\} from ["']\.\/types["'];?\n*/g, "");
336
+ const text = source.replace(/import \{[^}]*\} from ["']\.\/types["'];?\n*/g, "");
898
337
  const templatePath = resolveTemplateTypesPath();
899
338
  if (!templatePath) {
900
- console.warn("[text/inline-types] Template types file not found.");
339
+ console.warn("[postprocess-drizzle-schema] Template types file not found.");
901
340
  return text;
902
341
  }
903
- const templateContent = fs.readFileSync(templatePath, "utf8");
904
- return inlineFromTemplate(text, templateContent);
342
+ return inlineFromTemplateContent(text, fs.readFileSync(templatePath, "utf8"));
905
343
  }
906
- function inlineFromTemplate(source, templateContent) {
344
+ function inlineFromTemplateContent(text, templateContent) {
907
345
  const typeDefinitions = templateContent.replace(/^import\s+.*;\r?\n*/gm, "").trim();
908
- let text = source;
909
346
  const needsSql = typeDefinitions.includes("sql`") && !text.includes("from 'drizzle-orm'") && !text.includes('from "drizzle-orm"');
910
347
  const needsCustomType = typeDefinitions.includes("customType<") && !text.includes("customType");
911
348
  if (needsCustomType) {
912
349
  text = ensureImportIdentifier(text, "drizzle-orm/pg-core", "customType");
913
350
  }
914
- if (needsSql) {
351
+ if (needsSql && !text.includes("from 'drizzle-orm'") && !text.includes('from "drizzle-orm"')) {
915
352
  const importMatch = text.match(/^import [\s\S]*?from ["']drizzle-orm\/pg-core["'];?\n/m);
916
353
  if (importMatch) {
917
354
  const insertPoint = text.indexOf(importMatch[0]) + importMatch[0].length;
918
355
  text = text.slice(0, insertPoint) + "import { sql } from 'drizzle-orm';\n" + text.slice(insertPoint);
919
356
  }
920
357
  }
921
- const headerPrefix = `${FULL_HEADER}
358
+ const headerPrefix = `${HEADER_COMMENT}
922
359
  `;
923
360
  let insertionPoint = 0;
924
361
  if (text.startsWith(headerPrefix)) {
@@ -952,10 +389,7 @@ function ensureImportIdentifier(source, packageName, identifier) {
952
389
  }
953
390
  function resolveTemplateTypesPath() {
954
391
  const candidates = [
955
- // Source code paths (relative to transforms/text/)
956
- new URL("../../template/types.ts", import.meta.url),
957
392
  new URL("../template/types.ts", import.meta.url),
958
- // Bundled path (relative to dist/index.js)
959
393
  new URL("./gen-dbschema-template/types.ts", import.meta.url)
960
394
  ];
961
395
  for (const url of candidates) {
@@ -967,9 +401,131 @@ function resolveTemplateTypesPath() {
967
401
  return void 0;
968
402
  }
969
403
 
970
- // src/commands/db/gen-dbschema/transforms/text/table-aliases.ts
404
+ // src/commands/db/gen-dbschema/helper/system-fields.ts
405
+ function addSystemFieldComments(source) {
406
+ const commentMap = {
407
+ "_created_at": "Creation time",
408
+ "_created_by": "Creator",
409
+ "_updated_at": "Update time",
410
+ "_updated_by": "Updater"
411
+ };
412
+ const lines = source.split("\n");
413
+ for (let i = 0; i < lines.length; i += 1) {
414
+ const line = lines[i];
415
+ const entry = Object.entries(commentMap).find(([key]) => line.includes(`"${key}"`));
416
+ if (!entry) {
417
+ continue;
418
+ }
419
+ const [, description] = entry;
420
+ const previousLine = lines[i - 1]?.trim() ?? "";
421
+ if (previousLine.startsWith("//") && previousLine.includes("System field")) {
422
+ continue;
423
+ }
424
+ const indentMatch = line.match(/^\s*/);
425
+ const indent = indentMatch ? indentMatch[0] : "";
426
+ const comment = `${indent}// System field: ${description} (auto-filled, do not modify)`;
427
+ lines.splice(i, 0, comment);
428
+ i += 1;
429
+ }
430
+ return lines.join("\n");
431
+ }
432
+ function removeConflictingSystemFields(source) {
433
+ const systemFieldMap = {
434
+ "_created_at": "created_at",
435
+ "_created_by": "created_by",
436
+ "_updated_at": "updated_at",
437
+ "_updated_by": "updated_by"
438
+ };
439
+ const lines = source.split("\n");
440
+ const result = [];
441
+ let inTable = false;
442
+ let tableStartLine = -1;
443
+ const tableBusinessFields = /* @__PURE__ */ new Set();
444
+ let bracketDepth = 0;
445
+ for (let i = 0; i < lines.length; i += 1) {
446
+ const line = lines[i];
447
+ if (!inTable && /=\s*(pgTable|pgView|pgMaterializedView)\s*\(/.test(line)) {
448
+ inTable = true;
449
+ tableStartLine = result.length;
450
+ tableBusinessFields.clear();
451
+ bracketDepth = 0;
452
+ }
453
+ if (inTable) {
454
+ for (const char of line) {
455
+ if (char === "{") bracketDepth++;
456
+ if (char === "}") bracketDepth--;
457
+ }
458
+ for (const businessField of Object.values(systemFieldMap)) {
459
+ if (line.includes(`"${businessField}"`) || line.includes(`'${businessField}'`)) {
460
+ tableBusinessFields.add(businessField);
461
+ }
462
+ }
463
+ if (bracketDepth === 0 && line.includes(");")) {
464
+ inTable = false;
465
+ const tableEndLine = result.length;
466
+ for (let j = tableStartLine; j <= tableEndLine; j++) {
467
+ const tableLine = result[j] || "";
468
+ let shouldRemove = false;
469
+ for (const [systemField, businessField] of Object.entries(systemFieldMap)) {
470
+ if (tableBusinessFields.has(businessField)) {
471
+ if (tableLine.includes(`"${systemField}"`) || tableLine.includes(`'${systemField}'`)) {
472
+ shouldRemove = true;
473
+ if (j > 0 && result[j - 1]?.includes("// System field:")) {
474
+ result[j - 1] = null;
475
+ }
476
+ break;
477
+ }
478
+ }
479
+ }
480
+ if (shouldRemove) {
481
+ result[j] = null;
482
+ }
483
+ }
484
+ }
485
+ }
486
+ result.push(line);
487
+ }
488
+ return result.filter((line) => line !== null).join("\n");
489
+ }
490
+
491
+ // src/commands/db/gen-dbschema/helper/patch-helper.ts
492
+ function patchDrizzleKitDefects(source) {
493
+ let fixed = 0;
494
+ const text = source.replace(/\.default\('\)/g, () => {
495
+ fixed += 1;
496
+ return `.default('')`;
497
+ });
498
+ return { text, fixed };
499
+ }
500
+
501
+ // src/commands/db/gen-dbschema/helper/timestamp-replacement.ts
502
+ function replaceTimestampWithCustomTypes(source) {
503
+ let replaced = 0;
504
+ const pattern = /timestamp\((['"])(.*?)\1,\s*(\{[^}]*\})\)/g;
505
+ const text = source.replace(pattern, (match, quote, fieldName, options) => {
506
+ const hasWithTimezone = /withTimezone:\s*true/.test(options);
507
+ const hasModeString = /mode:\s*['"]string['"]/.test(options);
508
+ if (hasWithTimezone && hasModeString) {
509
+ replaced += 1;
510
+ return `customTimestamptz(${quote}${fieldName}${quote})`;
511
+ }
512
+ return match;
513
+ });
514
+ return { text, replaced };
515
+ }
516
+ function replaceDefaultNowWithSql(source) {
517
+ let replaced = 0;
518
+ const pattern = /\.defaultNow\(\)/g;
519
+ const text = source.replace(pattern, () => {
520
+ replaced += 1;
521
+ return ".default(sql`CURRENT_TIMESTAMP`)";
522
+ });
523
+ return { text, replaced };
524
+ }
525
+
526
+ // src/commands/db/gen-dbschema/helper/appendTableAliases.ts
971
527
  var TABLE_ALIAS_MARKER = "// table aliases";
972
- function generateTableAliases(source) {
528
+ function appendTableAliases(source) {
973
529
  const markerIndex = source.indexOf(`
974
530
  ${TABLE_ALIAS_MARKER}`);
975
531
  const base = markerIndex === -1 ? source : source.slice(0, markerIndex);
@@ -991,100 +547,62 @@ ${aliasLines}
991
547
  `;
992
548
  }
993
549
 
994
- // src/commands/db/gen-dbschema/transforms/text/format.ts
995
- function formatSource(source) {
996
- let text = source;
997
- text = text.replace(/\r\n/g, "\n");
998
- text = text.replace(/\n{3,}/g, "\n\n");
999
- if (!text.endsWith("\n")) {
1000
- text += "\n";
1001
- }
1002
- return text;
1003
- }
1004
-
1005
550
  // src/commands/db/gen-dbschema/postprocess.ts
1006
- function postprocessSchema(rawSource) {
1007
- const patchResult = patchDefects(rawSource);
1008
- let source = patchResult.text;
1009
- const { sourceFile } = parseSource(source);
1010
- const astStats = applyTransforms(sourceFile, defaultTransforms);
1011
- formatSourceFile(sourceFile);
1012
- source = printSourceFile(sourceFile);
1013
- source = ensureHeader(source);
1014
- source = addSystemFieldComments(source);
1015
- source = inlineCustomTypes(source);
1016
- source = generateTableAliases(source);
1017
- source = formatSource(source);
1018
- return {
1019
- source,
1020
- astStats,
1021
- patchedDefects: patchResult.fixed
1022
- };
1023
- }
1024
- function logStats(result, prefix = "[postprocess]") {
1025
- const { astStats, patchedDefects } = result;
1026
- if (patchedDefects > 0) {
1027
- console.info(`${prefix} Patched ${patchedDefects} syntax defects`);
1028
- }
1029
- if (astStats.removedPgSchemas > 0) {
1030
- console.info(`${prefix} Removed ${astStats.removedPgSchemas} pgSchema declarations`);
1031
- }
1032
- if (astStats.convertedSchemaCalls > 0) {
1033
- console.info(`${prefix} Converted ${astStats.convertedSchemaCalls} schema.xxx() calls`);
1034
- }
1035
- if (astStats.renamedIdentifiers.length > 0) {
1036
- console.info(`${prefix} Renamed ${astStats.renamedIdentifiers.length} identifiers:`);
1037
- for (const { from, to } of astStats.renamedIdentifiers) {
1038
- console.info(`${prefix} ${from} -> ${to}`);
1039
- }
1040
- }
1041
- if (astStats.replacedUnknown > 0) {
1042
- console.info(`${prefix} Replaced ${astStats.replacedUnknown} unknown types with custom types`);
1043
- }
1044
- if (astStats.fallbackToText > 0) {
1045
- console.info(`${prefix} Replaced ${astStats.fallbackToText} unknown types with text (fallback)`);
1046
- }
1047
- if (astStats.unmatchedUnknown.length > 0) {
1048
- console.warn(`${prefix} Unmatched unknown types:`);
1049
- for (const line of astStats.unmatchedUnknown) {
1050
- console.warn(`${prefix} ${line}`);
1051
- }
1052
- }
1053
- if (astStats.replacedTimestamp > 0) {
1054
- console.info(`${prefix} Replaced ${astStats.replacedTimestamp} timestamp with customTimestamptz`);
1055
- }
1056
- if (astStats.replacedDefaultNow > 0) {
1057
- console.info(`${prefix} Replaced ${astStats.replacedDefaultNow} .defaultNow() calls`);
1058
- }
1059
- if (astStats.removedSystemFields > 0) {
1060
- console.info(`${prefix} Removed ${astStats.removedSystemFields} conflicting system fields`);
1061
- }
1062
- if (astStats.addedImports.length > 0) {
1063
- console.info(`${prefix} Added imports: ${astStats.addedImports.join(", ")}`);
1064
- }
1065
- if (astStats.removedImports.length > 0) {
1066
- console.info(`${prefix} Removed imports: ${astStats.removedImports.join(", ")}`);
1067
- }
1068
- }
1069
-
1070
- // src/commands/db/gen-dbschema/index.ts
1071
551
  function postprocessDrizzleSchema(targetPath) {
1072
552
  const resolvedPath = path.resolve(targetPath);
1073
- if (!fs3.existsSync(resolvedPath)) {
553
+ if (!fs2.existsSync(resolvedPath)) {
1074
554
  console.warn(`[postprocess-drizzle-schema] File not found: ${resolvedPath}`);
1075
555
  return void 0;
1076
556
  }
1077
- const rawSource = fs3.readFileSync(resolvedPath, "utf8");
1078
- const result = postprocessSchema(rawSource);
1079
- fs3.writeFileSync(resolvedPath, result.source, "utf8");
1080
- logStats(result, "[postprocess-drizzle-schema]");
557
+ let text = fs2.readFileSync(resolvedPath, "utf8");
558
+ text = ensureHeaderComment(text);
559
+ const patchResult = patchDrizzleKitDefects(text);
560
+ text = patchResult.text;
561
+ text = removePgSchemaDeclarations(text);
562
+ const tableConversion = convertSchemaTableInvocations(text);
563
+ text = tableConversion.text;
564
+ const renameResult = renamePgTableConstants(text);
565
+ text = renameResult.text;
566
+ text = updateTableReferenceIdentifiers(text, renameResult.renames);
567
+ const replacement = replaceUnknownColumns(text);
568
+ text = replacement.text;
569
+ const timestampReplacement = replaceTimestampWithCustomTypes(text);
570
+ text = timestampReplacement.text;
571
+ const defaultNowReplacement = replaceDefaultNowWithSql(text);
572
+ text = defaultNowReplacement.text;
573
+ text = removeConflictingSystemFields(text);
574
+ text = addSystemFieldComments(text);
575
+ text = tweakImports(text);
576
+ text = inlineCustomTypes(text);
577
+ text = appendTableAliases(text);
578
+ text = text.replace(/\r?\n/g, "\n");
579
+ text = collapseExtraBlankLines(text);
580
+ fs2.writeFileSync(resolvedPath, text, "utf8");
581
+ if (patchResult.fixed > 0) {
582
+ console.info(`[postprocess-drizzle-schema] Patched ${patchResult.fixed} drizzle-kit defects (.default(') -> .default(''))`);
583
+ }
584
+ if (replacement.replaced > 0) {
585
+ console.info(`[postprocess-drizzle-schema] Replaced ${replacement.replaced} unknown columns`);
586
+ }
587
+ if (replacement.unmatched.length > 0) {
588
+ console.warn("[postprocess-drizzle-schema] Unmatched custom types:", replacement.unmatched.length);
589
+ replacement.unmatched.forEach((line) => console.warn(` ${line}`));
590
+ }
591
+ if (tableConversion.converted > 0) {
592
+ console.info(`[postprocess-drizzle-schema] Converted ${tableConversion.converted} schema.table invocations to pgTable`);
593
+ }
594
+ if (timestampReplacement.replaced > 0) {
595
+ console.info(`[postprocess-drizzle-schema] Replaced ${timestampReplacement.replaced} timestamp fields with customTimestamptz`);
596
+ }
597
+ if (defaultNowReplacement.replaced > 0) {
598
+ console.info(`[postprocess-drizzle-schema] Replaced ${defaultNowReplacement.replaced} .defaultNow() with .default(sql\`CURRENT_TIMESTAMP\`)`);
599
+ }
1081
600
  return {
1082
- replacedUnknown: result.astStats.replacedUnknown,
1083
- fallbackToText: result.astStats.fallbackToText,
1084
- unmatchedUnknown: result.astStats.unmatchedUnknown,
1085
- patchedDefects: result.patchedDefects,
1086
- replacedTimestamps: result.astStats.replacedTimestamp,
1087
- replacedDefaultNow: result.astStats.replacedDefaultNow
601
+ replacedUnknown: replacement.replaced,
602
+ unmatchedUnknown: replacement.unmatched,
603
+ patchedDefects: patchResult.fixed,
604
+ replacedTimestamps: timestampReplacement.replaced,
605
+ replacedDefaultNow: defaultNowReplacement.replaced
1088
606
  };
1089
607
  }
1090
608
 
@@ -1506,10 +1024,10 @@ export class ${className}Module {}
1506
1024
  }
1507
1025
 
1508
1026
  // src/commands/db/gen-nest-resource/schema-parser.ts
1509
- import { Project as Project2, Node as Node9 } from "ts-morph";
1027
+ import { Project, Node } from "ts-morph";
1510
1028
  var DrizzleSchemaParser = class {
1511
1029
  constructor(projectOptions) {
1512
- this.project = new Project2(projectOptions);
1030
+ this.project = new Project(projectOptions);
1513
1031
  }
1514
1032
  parseSchemaFile(filePath) {
1515
1033
  const sourceFile = this.project.addSourceFileAtPath(filePath);
@@ -1519,7 +1037,7 @@ var DrizzleSchemaParser = class {
1519
1037
  const declarations = statement.getDeclarations();
1520
1038
  for (const declaration of declarations) {
1521
1039
  const initializer = declaration.getInitializer();
1522
- if (initializer && Node9.isCallExpression(initializer)) {
1040
+ if (initializer && Node.isCallExpression(initializer)) {
1523
1041
  const expression = initializer.getExpression();
1524
1042
  if (expression.getText() === "pgTable") {
1525
1043
  const tableInfo = this.parsePgTable(
@@ -1542,13 +1060,13 @@ var DrizzleSchemaParser = class {
1542
1060
  }
1543
1061
  const tableName = args[0].getText().replace(/['"]/g, "");
1544
1062
  const fieldsArg = args[1];
1545
- if (!Node9.isObjectLiteralExpression(fieldsArg)) {
1063
+ if (!Node.isObjectLiteralExpression(fieldsArg)) {
1546
1064
  return null;
1547
1065
  }
1548
1066
  const fields = [];
1549
1067
  const properties = fieldsArg.getProperties();
1550
1068
  for (const prop of properties) {
1551
- if (Node9.isPropertyAssignment(prop)) {
1069
+ if (Node.isPropertyAssignment(prop)) {
1552
1070
  const fieldName = prop.getName();
1553
1071
  const initializer = prop.getInitializer();
1554
1072
  const leadingComments = prop.getLeadingCommentRanges();
@@ -1556,7 +1074,7 @@ var DrizzleSchemaParser = class {
1556
1074
  if (leadingComments.length > 0) {
1557
1075
  comment = leadingComments.map((c) => c.getText()).join("\n").replace(/\/\//g, "").trim();
1558
1076
  }
1559
- if (initializer && Node9.isCallExpression(initializer)) {
1077
+ if (initializer && Node.isCallExpression(initializer)) {
1560
1078
  const fieldInfo = this.parseField(fieldName, initializer, comment);
1561
1079
  fields.push(fieldInfo);
1562
1080
  }
@@ -1588,10 +1106,10 @@ var DrizzleSchemaParser = class {
1588
1106
  parseBaseType(callExpr, fieldInfo) {
1589
1107
  let current = callExpr;
1590
1108
  let baseCall = null;
1591
- while (Node9.isCallExpression(current)) {
1109
+ while (Node.isCallExpression(current)) {
1592
1110
  baseCall = current;
1593
1111
  const expression2 = current.getExpression();
1594
- if (Node9.isPropertyAccessExpression(expression2)) {
1112
+ if (Node.isPropertyAccessExpression(expression2)) {
1595
1113
  current = expression2.getExpression();
1596
1114
  } else {
1597
1115
  break;
@@ -1602,7 +1120,7 @@ var DrizzleSchemaParser = class {
1602
1120
  }
1603
1121
  const expression = baseCall.getExpression();
1604
1122
  let typeName = "";
1605
- if (Node9.isPropertyAccessExpression(expression)) {
1123
+ if (Node.isPropertyAccessExpression(expression)) {
1606
1124
  typeName = expression.getName();
1607
1125
  } else {
1608
1126
  typeName = expression.getText();
@@ -1611,25 +1129,25 @@ var DrizzleSchemaParser = class {
1611
1129
  const args = baseCall.getArguments();
1612
1130
  if (args.length > 0) {
1613
1131
  const firstArg = args[0];
1614
- if (Node9.isStringLiteral(firstArg)) {
1132
+ if (Node.isStringLiteral(firstArg)) {
1615
1133
  fieldInfo.columnName = firstArg.getLiteralText();
1616
- } else if (Node9.isObjectLiteralExpression(firstArg)) {
1134
+ } else if (Node.isObjectLiteralExpression(firstArg)) {
1617
1135
  this.parseTypeConfig(firstArg, fieldInfo);
1618
- } else if (Node9.isArrayLiteralExpression(firstArg)) {
1136
+ } else if (Node.isArrayLiteralExpression(firstArg)) {
1619
1137
  fieldInfo.enumValues = firstArg.getElements().map((el) => el.getText().replace(/['"]/g, ""));
1620
1138
  }
1621
1139
  }
1622
- if (args.length > 1 && Node9.isObjectLiteralExpression(args[1])) {
1140
+ if (args.length > 1 && Node.isObjectLiteralExpression(args[1])) {
1623
1141
  this.parseTypeConfig(args[1], fieldInfo);
1624
1142
  }
1625
1143
  }
1626
1144
  parseTypeConfig(objLiteral, fieldInfo) {
1627
- if (!Node9.isObjectLiteralExpression(objLiteral)) {
1145
+ if (!Node.isObjectLiteralExpression(objLiteral)) {
1628
1146
  return;
1629
1147
  }
1630
1148
  const properties = objLiteral.getProperties();
1631
1149
  for (const prop of properties) {
1632
- if (Node9.isPropertyAssignment(prop)) {
1150
+ if (Node.isPropertyAssignment(prop)) {
1633
1151
  const propName = prop.getName();
1634
1152
  const value = prop.getInitializer()?.getText();
1635
1153
  switch (propName) {
@@ -1661,9 +1179,9 @@ var DrizzleSchemaParser = class {
1661
1179
  }
1662
1180
  parseCallChain(callExpr, fieldInfo) {
1663
1181
  let current = callExpr;
1664
- while (Node9.isCallExpression(current)) {
1182
+ while (Node.isCallExpression(current)) {
1665
1183
  const expression = current.getExpression();
1666
- if (Node9.isPropertyAccessExpression(expression)) {
1184
+ if (Node.isPropertyAccessExpression(expression)) {
1667
1185
  const methodName = expression.getName();
1668
1186
  const args = current.getArguments();
1669
1187
  switch (methodName) {
@@ -1760,7 +1278,7 @@ var require2 = createRequire(import.meta.url);
1760
1278
  async function run(options = {}) {
1761
1279
  let exitCode = 0;
1762
1280
  const envPath2 = path2.resolve(process.cwd(), ".env");
1763
- if (fs4.existsSync(envPath2)) {
1281
+ if (fs3.existsSync(envPath2)) {
1764
1282
  loadEnv({ path: envPath2 });
1765
1283
  console.log("[gen-db-schema] \u2713 Loaded .env file");
1766
1284
  }
@@ -1780,7 +1298,7 @@ async function run(options = {}) {
1780
1298
  path2.resolve(__dirname2, "../../config/drizzle.config.js"),
1781
1299
  path2.resolve(__dirname2, "../../../dist/config/drizzle.config.js")
1782
1300
  ];
1783
- const configPath = configPathCandidates.find((p) => fs4.existsSync(p));
1301
+ const configPath = configPathCandidates.find((p) => fs3.existsSync(p));
1784
1302
  console.log("[gen-db-schema] Using drizzle config from:", configPath ?? "(not found)");
1785
1303
  if (!configPath) {
1786
1304
  console.error("[gen-db-schema] Error: drizzle config not found in CLI package");
@@ -1792,8 +1310,8 @@ async function run(options = {}) {
1792
1310
  let lastDir = null;
1793
1311
  while (currentDir !== lastDir) {
1794
1312
  const pkgJsonPath = path2.join(currentDir, "package.json");
1795
- if (fs4.existsSync(pkgJsonPath)) {
1796
- const pkgJsonRaw = fs4.readFileSync(pkgJsonPath, "utf8");
1313
+ if (fs3.existsSync(pkgJsonPath)) {
1314
+ const pkgJsonRaw = fs3.readFileSync(pkgJsonPath, "utf8");
1797
1315
  const pkgJson = JSON.parse(pkgJsonRaw);
1798
1316
  if (pkgJson.name === "drizzle-kit") {
1799
1317
  const binField = pkgJson.bin;
@@ -1828,7 +1346,7 @@ async function run(options = {}) {
1828
1346
  throw new Error(`drizzle-kit introspect failed with status ${result.status}`);
1829
1347
  }
1830
1348
  const generatedSchema = path2.join(OUT_DIR, "schema.ts");
1831
- if (!fs4.existsSync(generatedSchema)) {
1349
+ if (!fs3.existsSync(generatedSchema)) {
1832
1350
  console.error("[gen-db-schema] schema.ts not generated");
1833
1351
  throw new Error("drizzle-kit introspect failed to generate schema.ts");
1834
1352
  }
@@ -1837,8 +1355,8 @@ async function run(options = {}) {
1837
1355
  console.warn("[gen-db-schema] Unmatched custom types detected:", stats.unmatchedUnknown);
1838
1356
  }
1839
1357
  console.log("[gen-db-schema] \u2713 Postprocessed schema");
1840
- fs4.mkdirSync(path2.dirname(SCHEMA_FILE), { recursive: true });
1841
- fs4.copyFileSync(generatedSchema, SCHEMA_FILE);
1358
+ fs3.mkdirSync(path2.dirname(SCHEMA_FILE), { recursive: true });
1359
+ fs3.copyFileSync(generatedSchema, SCHEMA_FILE);
1842
1360
  console.log(`[gen-db-schema] \u2713 Copied to ${outputPath}`);
1843
1361
  try {
1844
1362
  if (options.enableNestModuleGenerate) {
@@ -1859,8 +1377,8 @@ async function run(options = {}) {
1859
1377
  console.error("[gen-db-schema] Failed:", err instanceof Error ? err.message : String(err));
1860
1378
  exitCode = 1;
1861
1379
  } finally {
1862
- if (fs4.existsSync(OUT_DIR)) {
1863
- fs4.rmSync(OUT_DIR, { recursive: true, force: true });
1380
+ if (fs3.existsSync(OUT_DIR)) {
1381
+ fs3.rmSync(OUT_DIR, { recursive: true, force: true });
1864
1382
  }
1865
1383
  process.exit(exitCode);
1866
1384
  }
@@ -1879,7 +1397,7 @@ var genDbSchemaCommand = {
1879
1397
 
1880
1398
  // src/commands/sync/run.handler.ts
1881
1399
  import path4 from "path";
1882
- import fs6 from "fs";
1400
+ import fs5 from "fs";
1883
1401
  import { fileURLToPath as fileURLToPath3 } from "url";
1884
1402
 
1885
1403
  // src/config/sync.ts
@@ -1943,14 +1461,14 @@ function genSyncConfig(perms = {}) {
1943
1461
  }
1944
1462
 
1945
1463
  // src/utils/file-ops.ts
1946
- import fs5 from "fs";
1464
+ import fs4 from "fs";
1947
1465
  import path3 from "path";
1948
1466
  function removeLineFromFile(filePath, pattern) {
1949
- if (!fs5.existsSync(filePath)) {
1467
+ if (!fs4.existsSync(filePath)) {
1950
1468
  console.log(`[fullstack-cli] \u25CB ${path3.basename(filePath)} (not found)`);
1951
1469
  return false;
1952
1470
  }
1953
- const content = fs5.readFileSync(filePath, "utf-8");
1471
+ const content = fs4.readFileSync(filePath, "utf-8");
1954
1472
  const lines = content.split("\n");
1955
1473
  const trimmedPattern = pattern.trim();
1956
1474
  const filteredLines = lines.filter((line) => line.trim() !== trimmedPattern);
@@ -1958,7 +1476,7 @@ function removeLineFromFile(filePath, pattern) {
1958
1476
  console.log(`[fullstack-cli] \u25CB ${path3.basename(filePath)} (pattern not found: ${pattern})`);
1959
1477
  return false;
1960
1478
  }
1961
- fs5.writeFileSync(filePath, filteredLines.join("\n"));
1479
+ fs4.writeFileSync(filePath, filteredLines.join("\n"));
1962
1480
  console.log(`[fullstack-cli] \u2713 ${path3.basename(filePath)} (removed: ${pattern})`);
1963
1481
  return true;
1964
1482
  }
@@ -1974,7 +1492,7 @@ async function run2(options) {
1974
1492
  process.exit(0);
1975
1493
  }
1976
1494
  const userPackageJson = path4.join(userProjectRoot, "package.json");
1977
- if (!fs6.existsSync(userPackageJson)) {
1495
+ if (!fs5.existsSync(userPackageJson)) {
1978
1496
  console.log("[fullstack-cli] Skip syncing (not a valid npm project)");
1979
1497
  process.exit(0);
1980
1498
  }
@@ -2020,7 +1538,7 @@ async function syncRule(rule, pluginRoot, userProjectRoot) {
2020
1538
  }
2021
1539
  const srcPath = path4.join(pluginRoot, rule.from);
2022
1540
  const destPath = path4.join(userProjectRoot, rule.to);
2023
- if (!fs6.existsSync(srcPath)) {
1541
+ if (!fs5.existsSync(srcPath)) {
2024
1542
  console.warn(`[fullstack-cli] Source not found: ${rule.from}`);
2025
1543
  return;
2026
1544
  }
@@ -2038,31 +1556,31 @@ async function syncRule(rule, pluginRoot, userProjectRoot) {
2038
1556
  }
2039
1557
  function syncFile(src, dest, overwrite = true) {
2040
1558
  const destDir = path4.dirname(dest);
2041
- if (!fs6.existsSync(destDir)) {
2042
- fs6.mkdirSync(destDir, { recursive: true });
1559
+ if (!fs5.existsSync(destDir)) {
1560
+ fs5.mkdirSync(destDir, { recursive: true });
2043
1561
  }
2044
- if (fs6.existsSync(dest) && !overwrite) {
1562
+ if (fs5.existsSync(dest) && !overwrite) {
2045
1563
  console.log(`[fullstack-cli] \u25CB ${path4.basename(dest)} (skipped, already exists)`);
2046
1564
  return;
2047
1565
  }
2048
- fs6.copyFileSync(src, dest);
1566
+ fs5.copyFileSync(src, dest);
2049
1567
  console.log(`[fullstack-cli] \u2713 ${path4.basename(dest)}`);
2050
1568
  }
2051
1569
  function syncDirectory(src, dest, overwrite = true) {
2052
- if (!fs6.existsSync(dest)) {
2053
- fs6.mkdirSync(dest, { recursive: true });
1570
+ if (!fs5.existsSync(dest)) {
1571
+ fs5.mkdirSync(dest, { recursive: true });
2054
1572
  }
2055
- const files = fs6.readdirSync(src);
1573
+ const files = fs5.readdirSync(src);
2056
1574
  let count = 0;
2057
1575
  files.forEach((file) => {
2058
1576
  const srcFile = path4.join(src, file);
2059
1577
  const destFile = path4.join(dest, file);
2060
- const stats = fs6.statSync(srcFile);
1578
+ const stats = fs5.statSync(srcFile);
2061
1579
  if (stats.isDirectory()) {
2062
1580
  syncDirectory(srcFile, destFile, overwrite);
2063
1581
  } else {
2064
- if (overwrite || !fs6.existsSync(destFile)) {
2065
- fs6.copyFileSync(srcFile, destFile);
1582
+ if (overwrite || !fs5.existsSync(destFile)) {
1583
+ fs5.copyFileSync(srcFile, destFile);
2066
1584
  console.log(`[fullstack-cli] \u2713 ${path4.relative(dest, destFile)}`);
2067
1585
  count++;
2068
1586
  }
@@ -2073,28 +1591,28 @@ function syncDirectory(src, dest, overwrite = true) {
2073
1591
  }
2074
1592
  }
2075
1593
  function appendToFile(src, dest) {
2076
- const content = fs6.readFileSync(src, "utf-8");
1594
+ const content = fs5.readFileSync(src, "utf-8");
2077
1595
  let existingContent = "";
2078
- if (fs6.existsSync(dest)) {
2079
- existingContent = fs6.readFileSync(dest, "utf-8");
1596
+ if (fs5.existsSync(dest)) {
1597
+ existingContent = fs5.readFileSync(dest, "utf-8");
2080
1598
  }
2081
1599
  if (existingContent.includes(content.trim())) {
2082
1600
  console.log(`[fullstack-cli] \u25CB ${path4.basename(dest)} (already contains content)`);
2083
1601
  return;
2084
1602
  }
2085
- fs6.appendFileSync(dest, content);
1603
+ fs5.appendFileSync(dest, content);
2086
1604
  console.log(`[fullstack-cli] \u2713 ${path4.basename(dest)} (appended)`);
2087
1605
  }
2088
1606
  function setPermissions(permissions, projectRoot) {
2089
1607
  for (const [pattern, mode] of Object.entries(permissions)) {
2090
1608
  if (pattern === "**/*.sh") {
2091
1609
  const scriptsDir = path4.join(projectRoot, "scripts");
2092
- if (fs6.existsSync(scriptsDir)) {
2093
- const files = fs6.readdirSync(scriptsDir);
1610
+ if (fs5.existsSync(scriptsDir)) {
1611
+ const files = fs5.readdirSync(scriptsDir);
2094
1612
  files.forEach((file) => {
2095
1613
  if (file.endsWith(".sh")) {
2096
1614
  const filePath = path4.join(scriptsDir, file);
2097
- fs6.chmodSync(filePath, mode);
1615
+ fs5.chmodSync(filePath, mode);
2098
1616
  }
2099
1617
  });
2100
1618
  }
@@ -2102,16 +1620,16 @@ function setPermissions(permissions, projectRoot) {
2102
1620
  }
2103
1621
  }
2104
1622
  function deleteFile(filePath) {
2105
- if (fs6.existsSync(filePath)) {
2106
- fs6.unlinkSync(filePath);
1623
+ if (fs5.existsSync(filePath)) {
1624
+ fs5.unlinkSync(filePath);
2107
1625
  console.log(`[fullstack-cli] \u2713 ${path4.basename(filePath)} (deleted)`);
2108
1626
  } else {
2109
1627
  console.log(`[fullstack-cli] \u25CB ${path4.basename(filePath)} (not found)`);
2110
1628
  }
2111
1629
  }
2112
1630
  function deleteDirectory(dirPath) {
2113
- if (fs6.existsSync(dirPath)) {
2114
- fs6.rmSync(dirPath, { recursive: true });
1631
+ if (fs5.existsSync(dirPath)) {
1632
+ fs5.rmSync(dirPath, { recursive: true });
2115
1633
  console.log(`[fullstack-cli] \u2713 ${path4.basename(dirPath)} (deleted)`);
2116
1634
  } else {
2117
1635
  console.log(`[fullstack-cli] \u25CB ${path4.basename(dirPath)} (not found)`);
@@ -2130,7 +1648,7 @@ var syncCommand = {
2130
1648
  };
2131
1649
 
2132
1650
  // src/commands/action-plugin/utils.ts
2133
- import fs7 from "fs";
1651
+ import fs6 from "fs";
2134
1652
  import path5 from "path";
2135
1653
  import { spawnSync as spawnSync2, execSync } from "child_process";
2136
1654
  function parsePluginName(input) {
@@ -2156,11 +1674,11 @@ function getPluginPath(pluginName) {
2156
1674
  }
2157
1675
  function readPackageJson() {
2158
1676
  const pkgPath = getPackageJsonPath();
2159
- if (!fs7.existsSync(pkgPath)) {
1677
+ if (!fs6.existsSync(pkgPath)) {
2160
1678
  throw new Error("package.json not found in current directory");
2161
1679
  }
2162
1680
  try {
2163
- const content = fs7.readFileSync(pkgPath, "utf-8");
1681
+ const content = fs6.readFileSync(pkgPath, "utf-8");
2164
1682
  return JSON.parse(content);
2165
1683
  } catch {
2166
1684
  throw new Error("Failed to parse package.json");
@@ -2168,7 +1686,7 @@ function readPackageJson() {
2168
1686
  }
2169
1687
  function writePackageJson(pkg2) {
2170
1688
  const pkgPath = getPackageJsonPath();
2171
- fs7.writeFileSync(pkgPath, JSON.stringify(pkg2, null, 2) + "\n", "utf-8");
1689
+ fs6.writeFileSync(pkgPath, JSON.stringify(pkg2, null, 2) + "\n", "utf-8");
2172
1690
  }
2173
1691
  function readActionPlugins() {
2174
1692
  const pkg2 = readPackageJson();
@@ -2202,11 +1720,11 @@ function npmInstall(tgzPath) {
2202
1720
  }
2203
1721
  function getPackageVersion(pluginName) {
2204
1722
  const pkgJsonPath = path5.join(getPluginPath(pluginName), "package.json");
2205
- if (!fs7.existsSync(pkgJsonPath)) {
1723
+ if (!fs6.existsSync(pkgJsonPath)) {
2206
1724
  return null;
2207
1725
  }
2208
1726
  try {
2209
- const content = fs7.readFileSync(pkgJsonPath, "utf-8");
1727
+ const content = fs6.readFileSync(pkgJsonPath, "utf-8");
2210
1728
  const pkg2 = JSON.parse(content);
2211
1729
  return pkg2.version || null;
2212
1730
  } catch {
@@ -2215,11 +1733,11 @@ function getPackageVersion(pluginName) {
2215
1733
  }
2216
1734
  function readPluginPackageJson(pluginPath) {
2217
1735
  const pkgJsonPath = path5.join(pluginPath, "package.json");
2218
- if (!fs7.existsSync(pkgJsonPath)) {
1736
+ if (!fs6.existsSync(pkgJsonPath)) {
2219
1737
  return null;
2220
1738
  }
2221
1739
  try {
2222
- const content = fs7.readFileSync(pkgJsonPath, "utf-8");
1740
+ const content = fs6.readFileSync(pkgJsonPath, "utf-8");
2223
1741
  return JSON.parse(content);
2224
1742
  } catch {
2225
1743
  return null;
@@ -2229,34 +1747,34 @@ function extractTgzToNodeModules(tgzPath, pluginName) {
2229
1747
  const nodeModulesPath = path5.join(getProjectRoot(), "node_modules");
2230
1748
  const targetDir = path5.join(nodeModulesPath, pluginName);
2231
1749
  const scopeDir = path5.dirname(targetDir);
2232
- if (!fs7.existsSync(scopeDir)) {
2233
- fs7.mkdirSync(scopeDir, { recursive: true });
1750
+ if (!fs6.existsSync(scopeDir)) {
1751
+ fs6.mkdirSync(scopeDir, { recursive: true });
2234
1752
  }
2235
- if (fs7.existsSync(targetDir)) {
2236
- fs7.rmSync(targetDir, { recursive: true });
1753
+ if (fs6.existsSync(targetDir)) {
1754
+ fs6.rmSync(targetDir, { recursive: true });
2237
1755
  }
2238
1756
  const tempDir = path5.join(nodeModulesPath, ".cache", "fullstack-cli", "extract-temp");
2239
- if (fs7.existsSync(tempDir)) {
2240
- fs7.rmSync(tempDir, { recursive: true });
1757
+ if (fs6.existsSync(tempDir)) {
1758
+ fs6.rmSync(tempDir, { recursive: true });
2241
1759
  }
2242
- fs7.mkdirSync(tempDir, { recursive: true });
1760
+ fs6.mkdirSync(tempDir, { recursive: true });
2243
1761
  try {
2244
1762
  execSync(`tar -xzf "${tgzPath}" -C "${tempDir}"`, { stdio: "pipe" });
2245
1763
  const extractedDir = path5.join(tempDir, "package");
2246
- if (fs7.existsSync(extractedDir)) {
2247
- fs7.renameSync(extractedDir, targetDir);
1764
+ if (fs6.existsSync(extractedDir)) {
1765
+ fs6.renameSync(extractedDir, targetDir);
2248
1766
  } else {
2249
- const files = fs7.readdirSync(tempDir);
1767
+ const files = fs6.readdirSync(tempDir);
2250
1768
  if (files.length === 1) {
2251
- fs7.renameSync(path5.join(tempDir, files[0]), targetDir);
1769
+ fs6.renameSync(path5.join(tempDir, files[0]), targetDir);
2252
1770
  } else {
2253
1771
  throw new Error("Unexpected tgz structure");
2254
1772
  }
2255
1773
  }
2256
1774
  return targetDir;
2257
1775
  } finally {
2258
- if (fs7.existsSync(tempDir)) {
2259
- fs7.rmSync(tempDir, { recursive: true });
1776
+ if (fs6.existsSync(tempDir)) {
1777
+ fs6.rmSync(tempDir, { recursive: true });
2260
1778
  }
2261
1779
  }
2262
1780
  }
@@ -2268,7 +1786,7 @@ function checkMissingPeerDeps(peerDeps) {
2268
1786
  const nodeModulesPath = path5.join(getProjectRoot(), "node_modules");
2269
1787
  for (const [depName, _version] of Object.entries(peerDeps)) {
2270
1788
  const depPath = path5.join(nodeModulesPath, depName);
2271
- if (!fs7.existsSync(depPath)) {
1789
+ if (!fs6.existsSync(depPath)) {
2272
1790
  missing.push(depName);
2273
1791
  }
2274
1792
  }
@@ -2292,15 +1810,15 @@ function installMissingDeps(deps) {
2292
1810
  }
2293
1811
  function removePluginDirectory(pluginName) {
2294
1812
  const pluginPath = getPluginPath(pluginName);
2295
- if (fs7.existsSync(pluginPath)) {
2296
- fs7.rmSync(pluginPath, { recursive: true });
1813
+ if (fs6.existsSync(pluginPath)) {
1814
+ fs6.rmSync(pluginPath, { recursive: true });
2297
1815
  console.log(`[action-plugin] Removed ${pluginName}`);
2298
1816
  }
2299
1817
  }
2300
1818
 
2301
1819
  // src/commands/action-plugin/api-client.ts
2302
1820
  import { HttpClient as HttpClient2 } from "@lark-apaas/http-client";
2303
- import fs8 from "fs";
1821
+ import fs7 from "fs";
2304
1822
  import path6 from "path";
2305
1823
 
2306
1824
  // src/utils/http-client.ts
@@ -2314,13 +1832,10 @@ function getHttpClient() {
2314
1832
  enabled: true
2315
1833
  }
2316
1834
  });
2317
- const canaryEnv = process.env.FORCE_FRAMEWORK_CLI_CANARY_ENV;
2318
- if (canaryEnv) {
2319
- clientInstance.interceptors.request.use((req) => {
2320
- req.headers["x-tt-env"] = canaryEnv;
2321
- return req;
2322
- });
2323
- }
1835
+ clientInstance.interceptors.request.use((req) => {
1836
+ req.headers["x-tt-env"] = "boe_miaoda_plugin";
1837
+ return req;
1838
+ });
2324
1839
  }
2325
1840
  return clientInstance;
2326
1841
  }
@@ -2393,8 +1908,8 @@ function getPluginCacheDir() {
2393
1908
  }
2394
1909
  function ensureCacheDir() {
2395
1910
  const cacheDir = getPluginCacheDir();
2396
- if (!fs8.existsSync(cacheDir)) {
2397
- fs8.mkdirSync(cacheDir, { recursive: true });
1911
+ if (!fs7.existsSync(cacheDir)) {
1912
+ fs7.mkdirSync(cacheDir, { recursive: true });
2398
1913
  }
2399
1914
  }
2400
1915
  function getTempFilePath(pluginKey, version) {
@@ -2417,7 +1932,7 @@ async function downloadPlugin(pluginKey, requestedVersion) {
2417
1932
  tgzBuffer = await downloadFromPublic(pluginInfo.downloadURL);
2418
1933
  }
2419
1934
  const tgzPath = getTempFilePath(pluginKey, pluginInfo.version);
2420
- fs8.writeFileSync(tgzPath, tgzBuffer);
1935
+ fs7.writeFileSync(tgzPath, tgzBuffer);
2421
1936
  console.log(`[action-plugin] Downloaded to ${tgzPath} (${(tgzBuffer.length / 1024).toFixed(2)} KB)`);
2422
1937
  return {
2423
1938
  tgzPath,
@@ -2427,8 +1942,8 @@ async function downloadPlugin(pluginKey, requestedVersion) {
2427
1942
  }
2428
1943
  function cleanupTempFile(tgzPath) {
2429
1944
  try {
2430
- if (fs8.existsSync(tgzPath)) {
2431
- fs8.unlinkSync(tgzPath);
1945
+ if (fs7.existsSync(tgzPath)) {
1946
+ fs7.unlinkSync(tgzPath);
2432
1947
  }
2433
1948
  } catch {
2434
1949
  }
@@ -2764,7 +2279,7 @@ var actionPluginCommandGroup = {
2764
2279
  };
2765
2280
 
2766
2281
  // src/commands/capability/utils.ts
2767
- import fs9 from "fs";
2282
+ import fs8 from "fs";
2768
2283
  import path7 from "path";
2769
2284
  var CAPABILITIES_DIR = "server/capabilities";
2770
2285
  function getProjectRoot2() {
@@ -2780,23 +2295,23 @@ function getPluginManifestPath(pluginKey) {
2780
2295
  return path7.join(getProjectRoot2(), "node_modules", pluginKey, "manifest.json");
2781
2296
  }
2782
2297
  function capabilitiesDirExists() {
2783
- return fs9.existsSync(getCapabilitiesDir());
2298
+ return fs8.existsSync(getCapabilitiesDir());
2784
2299
  }
2785
2300
  function listCapabilityIds() {
2786
2301
  const dir = getCapabilitiesDir();
2787
- if (!fs9.existsSync(dir)) {
2302
+ if (!fs8.existsSync(dir)) {
2788
2303
  return [];
2789
2304
  }
2790
- const files = fs9.readdirSync(dir);
2791
- return files.filter((f) => f.endsWith(".json") && f !== "capabilities.json").map((f) => f.replace(/\.json$/, ""));
2305
+ const files = fs8.readdirSync(dir);
2306
+ return files.filter((f) => f.endsWith(".json")).map((f) => f.replace(/\.json$/, ""));
2792
2307
  }
2793
2308
  function readCapability(id) {
2794
2309
  const filePath = getCapabilityPath(id);
2795
- if (!fs9.existsSync(filePath)) {
2310
+ if (!fs8.existsSync(filePath)) {
2796
2311
  throw new Error(`Capability not found: ${id}`);
2797
2312
  }
2798
2313
  try {
2799
- const content = fs9.readFileSync(filePath, "utf-8");
2314
+ const content = fs8.readFileSync(filePath, "utf-8");
2800
2315
  return JSON.parse(content);
2801
2316
  } catch (error) {
2802
2317
  if (error instanceof SyntaxError) {
@@ -2807,27 +2322,15 @@ function readCapability(id) {
2807
2322
  }
2808
2323
  function readAllCapabilities() {
2809
2324
  const ids = listCapabilityIds();
2810
- const capabilities = [];
2811
- for (const id of ids) {
2812
- try {
2813
- const capability = readCapability(id);
2814
- if (!capability.pluginKey) {
2815
- continue;
2816
- }
2817
- capabilities.push(capability);
2818
- } catch {
2819
- continue;
2820
- }
2821
- }
2822
- return capabilities;
2325
+ return ids.map((id) => readCapability(id));
2823
2326
  }
2824
2327
  function readPluginManifest(pluginKey) {
2825
2328
  const manifestPath = getPluginManifestPath(pluginKey);
2826
- if (!fs9.existsSync(manifestPath)) {
2329
+ if (!fs8.existsSync(manifestPath)) {
2827
2330
  throw new Error(`Plugin not installed: ${pluginKey} (manifest.json not found)`);
2828
2331
  }
2829
2332
  try {
2830
- const content = fs9.readFileSync(manifestPath, "utf-8");
2333
+ const content = fs8.readFileSync(manifestPath, "utf-8");
2831
2334
  return JSON.parse(content);
2832
2335
  } catch (error) {
2833
2336
  if (error instanceof SyntaxError) {
@@ -3001,7 +2504,7 @@ var capabilityCommandGroup = {
3001
2504
  };
3002
2505
 
3003
2506
  // src/commands/migration/version-manager.ts
3004
- import fs10 from "fs";
2507
+ import fs9 from "fs";
3005
2508
  import path8 from "path";
3006
2509
  var PACKAGE_JSON = "package.json";
3007
2510
  var VERSION_FIELD = "migrationVersion";
@@ -3010,25 +2513,25 @@ function getPackageJsonPath2() {
3010
2513
  }
3011
2514
  function getCurrentVersion() {
3012
2515
  const pkgPath = getPackageJsonPath2();
3013
- if (!fs10.existsSync(pkgPath)) {
2516
+ if (!fs9.existsSync(pkgPath)) {
3014
2517
  throw new Error("package.json not found");
3015
2518
  }
3016
- const pkg2 = JSON.parse(fs10.readFileSync(pkgPath, "utf-8"));
2519
+ const pkg2 = JSON.parse(fs9.readFileSync(pkgPath, "utf-8"));
3017
2520
  return pkg2[VERSION_FIELD] ?? 0;
3018
2521
  }
3019
2522
  function setCurrentVersion(version) {
3020
2523
  const pkgPath = getPackageJsonPath2();
3021
- const pkg2 = JSON.parse(fs10.readFileSync(pkgPath, "utf-8"));
2524
+ const pkg2 = JSON.parse(fs9.readFileSync(pkgPath, "utf-8"));
3022
2525
  pkg2[VERSION_FIELD] = version;
3023
- fs10.writeFileSync(pkgPath, JSON.stringify(pkg2, null, 2) + "\n", "utf-8");
2526
+ fs9.writeFileSync(pkgPath, JSON.stringify(pkg2, null, 2) + "\n", "utf-8");
3024
2527
  }
3025
2528
 
3026
2529
  // src/commands/migration/versions/v001_capability/json-migrator/detector.ts
3027
- import fs12 from "fs";
2530
+ import fs11 from "fs";
3028
2531
  import path10 from "path";
3029
2532
 
3030
2533
  // src/commands/migration/versions/v001_capability/utils.ts
3031
- import fs11 from "fs";
2534
+ import fs10 from "fs";
3032
2535
  import path9 from "path";
3033
2536
  var CAPABILITIES_DIR2 = "server/capabilities";
3034
2537
  function getProjectRoot3() {
@@ -3045,30 +2548,19 @@ function getPluginManifestPath2(pluginKey) {
3045
2548
  function detectJsonMigration() {
3046
2549
  const capabilitiesDir = getCapabilitiesDir2();
3047
2550
  const oldFilePath = path10.join(capabilitiesDir, "capabilities.json");
3048
- if (!fs12.existsSync(oldFilePath)) {
2551
+ if (!fs11.existsSync(oldFilePath)) {
3049
2552
  return {
3050
2553
  needsMigration: false,
3051
2554
  reason: "capabilities.json not found"
3052
2555
  };
3053
2556
  }
3054
2557
  try {
3055
- const content = fs12.readFileSync(oldFilePath, "utf-8");
2558
+ const content = fs11.readFileSync(oldFilePath, "utf-8");
3056
2559
  const parsed = JSON.parse(content);
3057
- if (!Array.isArray(parsed)) {
3058
- return {
3059
- needsMigration: false,
3060
- reason: "capabilities.json is not a valid array"
3061
- };
3062
- }
3063
- if (parsed.length === 0) {
3064
- return {
3065
- needsMigration: false,
3066
- reason: "capabilities.json is an empty array"
3067
- };
3068
- }
2560
+ const capabilities = Array.isArray(parsed) ? parsed : [];
3069
2561
  return {
3070
2562
  needsMigration: true,
3071
- oldCapabilities: parsed,
2563
+ oldCapabilities: capabilities,
3072
2564
  oldFilePath
3073
2565
  };
3074
2566
  } catch (error) {
@@ -3103,7 +2595,7 @@ async function check(options) {
3103
2595
  }
3104
2596
 
3105
2597
  // src/commands/migration/versions/v001_capability/json-migrator/index.ts
3106
- import fs13 from "fs";
2598
+ import fs12 from "fs";
3107
2599
  import path11 from "path";
3108
2600
 
3109
2601
  // src/commands/migration/versions/v001_capability/mapping.ts
@@ -3334,10 +2826,10 @@ function transformCapabilities(oldCapabilities) {
3334
2826
  // src/commands/migration/versions/v001_capability/json-migrator/index.ts
3335
2827
  function loadExistingCapabilities() {
3336
2828
  const capabilitiesDir = getCapabilitiesDir2();
3337
- if (!fs13.existsSync(capabilitiesDir)) {
2829
+ if (!fs12.existsSync(capabilitiesDir)) {
3338
2830
  return [];
3339
2831
  }
3340
- const files = fs13.readdirSync(capabilitiesDir);
2832
+ const files = fs12.readdirSync(capabilitiesDir);
3341
2833
  const capabilities = [];
3342
2834
  for (const file of files) {
3343
2835
  if (file === "capabilities.json" || !file.endsWith(".json")) {
@@ -3345,7 +2837,7 @@ function loadExistingCapabilities() {
3345
2837
  }
3346
2838
  try {
3347
2839
  const filePath = path11.join(capabilitiesDir, file);
3348
- const content = fs13.readFileSync(filePath, "utf-8");
2840
+ const content = fs12.readFileSync(filePath, "utf-8");
3349
2841
  const capability = JSON.parse(content);
3350
2842
  if (capability.id && capability.pluginKey) {
3351
2843
  capabilities.push(capability);
@@ -3405,7 +2897,7 @@ async function migrateJsonFiles(options) {
3405
2897
  for (const cap of newCapabilities) {
3406
2898
  const filePath = path11.join(capabilitiesDir, `${cap.id}.json`);
3407
2899
  const content = JSON.stringify(cap, null, 2);
3408
- fs13.writeFileSync(filePath, content, "utf-8");
2900
+ fs12.writeFileSync(filePath, content, "utf-8");
3409
2901
  console.log(` \u2713 Created: ${cap.id}.json`);
3410
2902
  }
3411
2903
  return {
@@ -3417,11 +2909,11 @@ async function migrateJsonFiles(options) {
3417
2909
  }
3418
2910
 
3419
2911
  // src/commands/migration/versions/v001_capability/plugin-installer/detector.ts
3420
- import fs14 from "fs";
2912
+ import fs13 from "fs";
3421
2913
  function isPluginInstalled2(pluginKey) {
3422
2914
  const actionPlugins = readActionPlugins();
3423
2915
  const manifestPath = getPluginManifestPath2(pluginKey);
3424
- return fs14.existsSync(manifestPath) && !!actionPlugins[pluginKey];
2916
+ return fs13.existsSync(manifestPath) && !!actionPlugins[pluginKey];
3425
2917
  }
3426
2918
  function detectPluginsToInstall(capabilities) {
3427
2919
  const pluginKeys = /* @__PURE__ */ new Set();
@@ -3498,10 +2990,10 @@ async function installPlugins(capabilities, options) {
3498
2990
 
3499
2991
  // src/commands/migration/versions/v001_capability/code-migrator/index.ts
3500
2992
  import path13 from "path";
3501
- import { Project as Project3 } from "ts-morph";
2993
+ import { Project as Project2 } from "ts-morph";
3502
2994
 
3503
2995
  // src/commands/migration/versions/v001_capability/code-migrator/scanner.ts
3504
- import fs15 from "fs";
2996
+ import fs14 from "fs";
3505
2997
  import path12 from "path";
3506
2998
  var EXCLUDED_DIRS = [
3507
2999
  "node_modules",
@@ -3517,7 +3009,7 @@ var EXCLUDED_PATTERNS = [
3517
3009
  /\.d\.ts$/
3518
3010
  ];
3519
3011
  function scanDirectory(dir, files = []) {
3520
- const entries = fs15.readdirSync(dir, { withFileTypes: true });
3012
+ const entries = fs14.readdirSync(dir, { withFileTypes: true });
3521
3013
  for (const entry of entries) {
3522
3014
  const fullPath = path12.join(dir, entry.name);
3523
3015
  if (entry.isDirectory()) {
@@ -3536,13 +3028,13 @@ function scanDirectory(dir, files = []) {
3536
3028
  }
3537
3029
  function scanServerFiles() {
3538
3030
  const serverDir = path12.join(getProjectRoot3(), "server");
3539
- if (!fs15.existsSync(serverDir)) {
3031
+ if (!fs14.existsSync(serverDir)) {
3540
3032
  return [];
3541
3033
  }
3542
3034
  return scanDirectory(serverDir);
3543
3035
  }
3544
3036
  function hasCapabilityImport(filePath) {
3545
- const content = fs15.readFileSync(filePath, "utf-8");
3037
+ const content = fs14.readFileSync(filePath, "utf-8");
3546
3038
  return /import\s+.*from\s+['"][^'"]*capabilities[^'"]*['"]/.test(content);
3547
3039
  }
3548
3040
  function scanFilesToMigrate() {
@@ -3606,17 +3098,17 @@ function analyzeImports(sourceFile) {
3606
3098
  }
3607
3099
 
3608
3100
  // src/commands/migration/versions/v001_capability/code-migrator/analyzers/call-site-analyzer.ts
3609
- import { SyntaxKind as SyntaxKind5 } from "ts-morph";
3101
+ import { SyntaxKind } from "ts-morph";
3610
3102
  function analyzeCallSites(sourceFile, imports) {
3611
3103
  const callSites = [];
3612
3104
  const importMap = /* @__PURE__ */ new Map();
3613
3105
  for (const imp of imports) {
3614
3106
  importMap.set(imp.importName, imp.capabilityId);
3615
3107
  }
3616
- const callExpressions = sourceFile.getDescendantsOfKind(SyntaxKind5.CallExpression);
3108
+ const callExpressions = sourceFile.getDescendantsOfKind(SyntaxKind.CallExpression);
3617
3109
  for (const callExpr of callExpressions) {
3618
3110
  const expression = callExpr.getExpression();
3619
- if (expression.getKind() === SyntaxKind5.Identifier) {
3111
+ if (expression.getKind() === SyntaxKind.Identifier) {
3620
3112
  const functionName = expression.getText();
3621
3113
  const capabilityId = importMap.get(functionName);
3622
3114
  if (capabilityId) {
@@ -3629,11 +3121,11 @@ function analyzeCallSites(sourceFile, imports) {
3629
3121
  text: callExpr.getText()
3630
3122
  });
3631
3123
  }
3632
- } else if (expression.getKind() === SyntaxKind5.PropertyAccessExpression) {
3633
- const propAccess = expression.asKind(SyntaxKind5.PropertyAccessExpression);
3124
+ } else if (expression.getKind() === SyntaxKind.PropertyAccessExpression) {
3125
+ const propAccess = expression.asKind(SyntaxKind.PropertyAccessExpression);
3634
3126
  if (propAccess) {
3635
3127
  const objectExpr = propAccess.getExpression();
3636
- if (objectExpr.getKind() === SyntaxKind5.Identifier) {
3128
+ if (objectExpr.getKind() === SyntaxKind.Identifier) {
3637
3129
  const objectName = objectExpr.getText();
3638
3130
  const capabilityId = importMap.get(objectName);
3639
3131
  if (capabilityId) {
@@ -3669,15 +3161,13 @@ function analyzeClass(sourceFile) {
3669
3161
  if (!name) continue;
3670
3162
  const isInjectable = hasDecorator(classDecl, "Injectable");
3671
3163
  const isController = hasDecorator(classDecl, "Controller");
3672
- const isAutomation = hasDecorator(classDecl, "Automation");
3673
- if (classInfo && classInfo.isInjectable && !isInjectable && !isController && !isAutomation) {
3164
+ if (classInfo && classInfo.isInjectable && !isInjectable && !isController) {
3674
3165
  continue;
3675
3166
  }
3676
3167
  const info = {
3677
3168
  name,
3678
3169
  isInjectable,
3679
3170
  isController,
3680
- isAutomation,
3681
3171
  constructorParamCount: 0
3682
3172
  };
3683
3173
  const classBody = classDecl.getChildSyntaxListOrThrow();
@@ -3696,7 +3186,7 @@ function analyzeClass(sourceFile) {
3696
3186
  info.constructorParamsEnd = ctor.getStart();
3697
3187
  }
3698
3188
  }
3699
- if (isInjectable || isController || isAutomation || !classInfo) {
3189
+ if (isInjectable || isController || !classInfo) {
3700
3190
  classInfo = info;
3701
3191
  }
3702
3192
  }
@@ -3709,10 +3199,10 @@ function canAutoMigrate(classInfo) {
3709
3199
  reason: "No class found in file"
3710
3200
  };
3711
3201
  }
3712
- if (!classInfo.isInjectable && !classInfo.isController && !classInfo.isAutomation) {
3202
+ if (!classInfo.isInjectable && !classInfo.isController) {
3713
3203
  return {
3714
3204
  canMigrate: false,
3715
- reason: `Class "${classInfo.name}" is not @Injectable, @Controller or @Automation`
3205
+ reason: `Class "${classInfo.name}" is not @Injectable or @Controller`
3716
3206
  };
3717
3207
  }
3718
3208
  return { canMigrate: true };
@@ -3862,7 +3352,7 @@ function addInjection(sourceFile) {
3862
3352
  }
3863
3353
 
3864
3354
  // src/commands/migration/versions/v001_capability/code-migrator/transformers/call-site-transformer.ts
3865
- import { SyntaxKind as SyntaxKind6 } from "ts-morph";
3355
+ import { SyntaxKind as SyntaxKind2 } from "ts-morph";
3866
3356
  var DEFAULT_ACTION_NAME = "run";
3867
3357
  function generateNewCallText(capabilityId, actionName, args) {
3868
3358
  const argsText = args.trim() || "{}";
@@ -3877,19 +3367,19 @@ function transformCallSites(sourceFile, imports) {
3877
3367
  });
3878
3368
  }
3879
3369
  let replacedCount = 0;
3880
- const callExpressions = sourceFile.getDescendantsOfKind(SyntaxKind6.CallExpression);
3370
+ const callExpressions = sourceFile.getDescendantsOfKind(SyntaxKind2.CallExpression);
3881
3371
  const sortedCalls = [...callExpressions].sort((a, b) => b.getStart() - a.getStart());
3882
3372
  for (const callExpr of sortedCalls) {
3883
3373
  const expression = callExpr.getExpression();
3884
3374
  let importInfo;
3885
- if (expression.getKind() === SyntaxKind6.Identifier) {
3375
+ if (expression.getKind() === SyntaxKind2.Identifier) {
3886
3376
  const functionName = expression.getText();
3887
3377
  importInfo = importMap.get(functionName);
3888
- } else if (expression.getKind() === SyntaxKind6.PropertyAccessExpression) {
3889
- const propAccess = expression.asKind(SyntaxKind6.PropertyAccessExpression);
3378
+ } else if (expression.getKind() === SyntaxKind2.PropertyAccessExpression) {
3379
+ const propAccess = expression.asKind(SyntaxKind2.PropertyAccessExpression);
3890
3380
  if (propAccess) {
3891
3381
  const objectExpr = propAccess.getExpression();
3892
- if (objectExpr.getKind() === SyntaxKind6.Identifier) {
3382
+ if (objectExpr.getKind() === SyntaxKind2.Identifier) {
3893
3383
  const objectName = objectExpr.getText();
3894
3384
  importInfo = importMap.get(objectName);
3895
3385
  }
@@ -3990,7 +3480,7 @@ async function migrateCode(options, capabilities) {
3990
3480
  console.log(" No files need code migration.\n");
3991
3481
  return result;
3992
3482
  }
3993
- const project = new Project3({
3483
+ const project = new Project2({
3994
3484
  skipAddingFilesFromTsConfig: true,
3995
3485
  compilerOptions: {
3996
3486
  allowJs: true
@@ -4033,17 +3523,17 @@ function getSuggestion(analysis) {
4033
3523
  }
4034
3524
 
4035
3525
  // src/commands/migration/versions/v001_capability/cleanup.ts
4036
- import fs16 from "fs";
3526
+ import fs15 from "fs";
4037
3527
  import path14 from "path";
4038
3528
  function cleanupOldFiles(capabilities, dryRun) {
4039
3529
  const deletedFiles = [];
4040
3530
  const errors = [];
4041
3531
  const capabilitiesDir = getCapabilitiesDir2();
4042
3532
  const oldJsonPath = path14.join(capabilitiesDir, "capabilities.json");
4043
- if (fs16.existsSync(oldJsonPath)) {
3533
+ if (fs15.existsSync(oldJsonPath)) {
4044
3534
  try {
4045
3535
  if (!dryRun) {
4046
- fs16.unlinkSync(oldJsonPath);
3536
+ fs15.unlinkSync(oldJsonPath);
4047
3537
  }
4048
3538
  deletedFiles.push("capabilities.json");
4049
3539
  } catch (error) {
@@ -4052,10 +3542,10 @@ function cleanupOldFiles(capabilities, dryRun) {
4052
3542
  }
4053
3543
  for (const cap of capabilities) {
4054
3544
  const tsFilePath = path14.join(capabilitiesDir, `${cap.id}.ts`);
4055
- if (fs16.existsSync(tsFilePath)) {
3545
+ if (fs15.existsSync(tsFilePath)) {
4056
3546
  try {
4057
3547
  if (!dryRun) {
4058
- fs16.unlinkSync(tsFilePath);
3548
+ fs15.unlinkSync(tsFilePath);
4059
3549
  }
4060
3550
  deletedFiles.push(`${cap.id}.ts`);
4061
3551
  } catch (error) {
@@ -4071,7 +3561,7 @@ function cleanupOldFiles(capabilities, dryRun) {
4071
3561
  }
4072
3562
 
4073
3563
  // src/commands/migration/versions/v001_capability/report-generator.ts
4074
- import fs17 from "fs";
3564
+ import fs16 from "fs";
4075
3565
  import path15 from "path";
4076
3566
  var REPORT_FILE = "capability-migration-report.md";
4077
3567
  function printSummary(result) {
@@ -4235,15 +3725,15 @@ async function generateReport(result) {
4235
3725
  }
4236
3726
  lines.push("");
4237
3727
  const logDir = process.env.LOG_DIR || "logs";
4238
- if (!fs17.existsSync(logDir)) {
3728
+ if (!fs16.existsSync(logDir)) {
4239
3729
  return;
4240
3730
  }
4241
3731
  const reportDir = path15.join(logDir, "migration");
4242
- if (!fs17.existsSync(reportDir)) {
4243
- fs17.mkdirSync(reportDir, { recursive: true });
3732
+ if (!fs16.existsSync(reportDir)) {
3733
+ fs16.mkdirSync(reportDir, { recursive: true });
4244
3734
  }
4245
3735
  const reportPath = path15.join(reportDir, REPORT_FILE);
4246
- fs17.writeFileSync(reportPath, lines.join("\n"), "utf-8");
3736
+ fs16.writeFileSync(reportPath, lines.join("\n"), "utf-8");
4247
3737
  console.log(`\u{1F4C4} Report generated: ${reportPath}`);
4248
3738
  }
4249
3739
 
@@ -4778,7 +4268,7 @@ var migrationCommand = {
4778
4268
  import path16 from "path";
4779
4269
 
4780
4270
  // src/commands/read-logs/std-utils.ts
4781
- import fs18 from "fs";
4271
+ import fs17 from "fs";
4782
4272
  function formatStdPrefixTime(localTime) {
4783
4273
  const match = localTime.match(/^(\d{4})-(\d{2})-(\d{2}) (\d{2}):(\d{2}):(\d{2})$/);
4784
4274
  if (!match) return localTime;
@@ -4808,11 +4298,11 @@ function stripPrefixFromStdLine(line) {
4808
4298
  return `[${time}] ${content}`;
4809
4299
  }
4810
4300
  function readStdLinesTailFromLastMarkerPaged(filePath, maxLines, offset, isMarker) {
4811
- const stat = fs18.statSync(filePath);
4301
+ const stat = fs17.statSync(filePath);
4812
4302
  if (stat.size === 0) {
4813
4303
  return { lines: [], markerFound: false, totalLinesCount: 0 };
4814
4304
  }
4815
- const fd = fs18.openSync(filePath, "r");
4305
+ const fd = fs17.openSync(filePath, "r");
4816
4306
  const chunkSize = 64 * 1024;
4817
4307
  let position = stat.size;
4818
4308
  let remainder = "";
@@ -4826,7 +4316,7 @@ function readStdLinesTailFromLastMarkerPaged(filePath, maxLines, offset, isMarke
4826
4316
  const length = Math.min(chunkSize, position);
4827
4317
  position -= length;
4828
4318
  const buffer = Buffer.alloc(length);
4829
- fs18.readSync(fd, buffer, 0, length, position);
4319
+ fs17.readSync(fd, buffer, 0, length, position);
4830
4320
  let chunk = buffer.toString("utf8");
4831
4321
  if (remainder) {
4832
4322
  chunk += remainder;
@@ -4868,7 +4358,7 @@ function readStdLinesTailFromLastMarkerPaged(filePath, maxLines, offset, isMarke
4868
4358
  }
4869
4359
  }
4870
4360
  } finally {
4871
- fs18.closeSync(fd);
4361
+ fs17.closeSync(fd);
4872
4362
  }
4873
4363
  return { lines: collected.reverse(), markerFound, totalLinesCount };
4874
4364
  }
@@ -4889,21 +4379,21 @@ function readServerStdSegment(filePath, maxLines, offset) {
4889
4379
  }
4890
4380
 
4891
4381
  // src/commands/read-logs/tail.ts
4892
- import fs19 from "fs";
4382
+ import fs18 from "fs";
4893
4383
  function fileExists(filePath) {
4894
4384
  try {
4895
- fs19.accessSync(filePath, fs19.constants.F_OK | fs19.constants.R_OK);
4385
+ fs18.accessSync(filePath, fs18.constants.F_OK | fs18.constants.R_OK);
4896
4386
  return true;
4897
4387
  } catch {
4898
4388
  return false;
4899
4389
  }
4900
4390
  }
4901
4391
  function readFileTailLines(filePath, maxLines) {
4902
- const stat = fs19.statSync(filePath);
4392
+ const stat = fs18.statSync(filePath);
4903
4393
  if (stat.size === 0) {
4904
4394
  return [];
4905
4395
  }
4906
- const fd = fs19.openSync(filePath, "r");
4396
+ const fd = fs18.openSync(filePath, "r");
4907
4397
  const chunkSize = 64 * 1024;
4908
4398
  const chunks = [];
4909
4399
  let position = stat.size;
@@ -4913,13 +4403,13 @@ function readFileTailLines(filePath, maxLines) {
4913
4403
  const length = Math.min(chunkSize, position);
4914
4404
  position -= length;
4915
4405
  const buffer = Buffer.alloc(length);
4916
- fs19.readSync(fd, buffer, 0, length, position);
4406
+ fs18.readSync(fd, buffer, 0, length, position);
4917
4407
  chunks.unshift(buffer.toString("utf8"));
4918
4408
  const chunkLines = buffer.toString("utf8").split("\n").length - 1;
4919
4409
  collectedLines += chunkLines;
4920
4410
  }
4921
4411
  } finally {
4922
- fs19.closeSync(fd);
4412
+ fs18.closeSync(fd);
4923
4413
  }
4924
4414
  const content = chunks.join("");
4925
4415
  const allLines = content.split("\n");
@@ -4935,11 +4425,11 @@ function readFileTailLines(filePath, maxLines) {
4935
4425
  return allLines.slice(allLines.length - maxLines);
4936
4426
  }
4937
4427
  function readFileTailNonEmptyLinesWithOffset(filePath, maxLines, offset) {
4938
- const stat = fs19.statSync(filePath);
4428
+ const stat = fs18.statSync(filePath);
4939
4429
  if (stat.size === 0) {
4940
4430
  return { lines: [], totalLinesCount: 0 };
4941
4431
  }
4942
- const fd = fs19.openSync(filePath, "r");
4432
+ const fd = fs18.openSync(filePath, "r");
4943
4433
  const chunkSize = 64 * 1024;
4944
4434
  let position = stat.size;
4945
4435
  let remainder = "";
@@ -4951,7 +4441,7 @@ function readFileTailNonEmptyLinesWithOffset(filePath, maxLines, offset) {
4951
4441
  const length = Math.min(chunkSize, position);
4952
4442
  position -= length;
4953
4443
  const buffer = Buffer.alloc(length);
4954
- fs19.readSync(fd, buffer, 0, length, position);
4444
+ fs18.readSync(fd, buffer, 0, length, position);
4955
4445
  let chunk = buffer.toString("utf8");
4956
4446
  if (remainder) {
4957
4447
  chunk += remainder;
@@ -4982,7 +4472,7 @@ function readFileTailNonEmptyLinesWithOffset(filePath, maxLines, offset) {
4982
4472
  }
4983
4473
  }
4984
4474
  } finally {
4985
- fs19.closeSync(fd);
4475
+ fs18.closeSync(fd);
4986
4476
  }
4987
4477
  return { lines: collected.reverse(), totalLinesCount };
4988
4478
  }
@@ -5084,7 +4574,7 @@ function extractClientStdSegment(lines, maxLines, offset) {
5084
4574
  }
5085
4575
 
5086
4576
  // src/commands/read-logs/json-lines.ts
5087
- import fs20 from "fs";
4577
+ import fs19 from "fs";
5088
4578
  function normalizePid(value) {
5089
4579
  if (typeof value === "number") {
5090
4580
  return String(value);
@@ -5135,11 +4625,11 @@ function buildWantedLevelSet(levels) {
5135
4625
  return set.size > 0 ? set : null;
5136
4626
  }
5137
4627
  function readJsonLinesLastPid(filePath, maxLines, offset, levels) {
5138
- const stat = fs20.statSync(filePath);
4628
+ const stat = fs19.statSync(filePath);
5139
4629
  if (stat.size === 0) {
5140
4630
  return { lines: [], totalLinesCount: 0 };
5141
4631
  }
5142
- const fd = fs20.openSync(filePath, "r");
4632
+ const fd = fs19.openSync(filePath, "r");
5143
4633
  const chunkSize = 64 * 1024;
5144
4634
  let position = stat.size;
5145
4635
  let remainder = "";
@@ -5154,7 +4644,7 @@ function readJsonLinesLastPid(filePath, maxLines, offset, levels) {
5154
4644
  const length = Math.min(chunkSize, position);
5155
4645
  position -= length;
5156
4646
  const buffer = Buffer.alloc(length);
5157
- fs20.readSync(fd, buffer, 0, length, position);
4647
+ fs19.readSync(fd, buffer, 0, length, position);
5158
4648
  let chunk = buffer.toString("utf8");
5159
4649
  if (remainder) {
5160
4650
  chunk += remainder;
@@ -5216,7 +4706,7 @@ function readJsonLinesLastPid(filePath, maxLines, offset, levels) {
5216
4706
  }
5217
4707
  }
5218
4708
  } finally {
5219
- fs20.closeSync(fd);
4709
+ fs19.closeSync(fd);
5220
4710
  }
5221
4711
  return { lines: collected.reverse(), totalLinesCount };
5222
4712
  }
@@ -5259,11 +4749,11 @@ function extractTraceId(obj) {
5259
4749
  function readJsonLinesByTraceId(filePath, traceId, maxLines, offset, levels) {
5260
4750
  const wanted = traceId.trim();
5261
4751
  if (!wanted) return { lines: [], totalLinesCount: 0 };
5262
- const stat = fs20.statSync(filePath);
4752
+ const stat = fs19.statSync(filePath);
5263
4753
  if (stat.size === 0) {
5264
4754
  return { lines: [], totalLinesCount: 0 };
5265
4755
  }
5266
- const fd = fs20.openSync(filePath, "r");
4756
+ const fd = fs19.openSync(filePath, "r");
5267
4757
  const chunkSize = 64 * 1024;
5268
4758
  let position = stat.size;
5269
4759
  let remainder = "";
@@ -5276,7 +4766,7 @@ function readJsonLinesByTraceId(filePath, traceId, maxLines, offset, levels) {
5276
4766
  const length = Math.min(chunkSize, position);
5277
4767
  position -= length;
5278
4768
  const buffer = Buffer.alloc(length);
5279
- fs20.readSync(fd, buffer, 0, length, position);
4769
+ fs19.readSync(fd, buffer, 0, length, position);
5280
4770
  let chunk = buffer.toString("utf8");
5281
4771
  if (remainder) {
5282
4772
  chunk += remainder;
@@ -5329,7 +4819,7 @@ function readJsonLinesByTraceId(filePath, traceId, maxLines, offset, levels) {
5329
4819
  }
5330
4820
  }
5331
4821
  } finally {
5332
- fs20.closeSync(fd);
4822
+ fs19.closeSync(fd);
5333
4823
  }
5334
4824
  return { lines: collected.reverse(), totalLinesCount };
5335
4825
  }
@@ -5338,11 +4828,11 @@ function readJsonLinesTailByLevel(filePath, maxLines, offset, levels) {
5338
4828
  if (!wantedLevelSet) {
5339
4829
  return { lines: [], totalLinesCount: 0 };
5340
4830
  }
5341
- const stat = fs20.statSync(filePath);
4831
+ const stat = fs19.statSync(filePath);
5342
4832
  if (stat.size === 0) {
5343
4833
  return { lines: [], totalLinesCount: 0 };
5344
4834
  }
5345
- const fd = fs20.openSync(filePath, "r");
4835
+ const fd = fs19.openSync(filePath, "r");
5346
4836
  const chunkSize = 64 * 1024;
5347
4837
  let position = stat.size;
5348
4838
  let remainder = "";
@@ -5354,7 +4844,7 @@ function readJsonLinesTailByLevel(filePath, maxLines, offset, levels) {
5354
4844
  const length = Math.min(chunkSize, position);
5355
4845
  position -= length;
5356
4846
  const buffer = Buffer.alloc(length);
5357
- fs20.readSync(fd, buffer, 0, length, position);
4847
+ fs19.readSync(fd, buffer, 0, length, position);
5358
4848
  let chunk = buffer.toString("utf8");
5359
4849
  if (remainder) {
5360
4850
  chunk += remainder;
@@ -5401,13 +4891,97 @@ function readJsonLinesTailByLevel(filePath, maxLines, offset, levels) {
5401
4891
  }
5402
4892
  }
5403
4893
  } finally {
5404
- fs20.closeSync(fd);
4894
+ fs19.closeSync(fd);
5405
4895
  }
5406
4896
  return { lines: collected.reverse(), totalLinesCount };
5407
4897
  }
5408
4898
 
5409
4899
  // src/commands/read-logs/index.ts
5410
4900
  var LOG_TYPES = ["server", "trace", "server-std", "client-std", "browser"];
4901
+ function normalizeObjectKey(key) {
4902
+ return key.toLowerCase().replace(/_/g, "");
4903
+ }
4904
+ function formatIsoWithLocalOffset(date, includeMilliseconds) {
4905
+ const pad2 = (n) => String(n).padStart(2, "0");
4906
+ const pad3 = (n) => String(n).padStart(3, "0");
4907
+ const year = date.getFullYear();
4908
+ const month = pad2(date.getMonth() + 1);
4909
+ const day = pad2(date.getDate());
4910
+ const hour = pad2(date.getHours());
4911
+ const minute = pad2(date.getMinutes());
4912
+ const second = pad2(date.getSeconds());
4913
+ const ms = pad3(date.getMilliseconds());
4914
+ const tzOffsetMinutes = -date.getTimezoneOffset();
4915
+ const sign = tzOffsetMinutes >= 0 ? "+" : "-";
4916
+ const abs = Math.abs(tzOffsetMinutes);
4917
+ const offsetHH = pad2(Math.floor(abs / 60));
4918
+ const offsetMM = pad2(abs % 60);
4919
+ const msPart = includeMilliseconds ? `.${ms}` : "";
4920
+ return `${year}-${month}-${day}T${hour}:${minute}:${second}${msPart}${sign}${offsetHH}:${offsetMM}`;
4921
+ }
4922
+ function formatLocalDateTimeStringWithOffset(input) {
4923
+ const match = input.match(/^(\d{4})-(\d{2})-(\d{2}) (\d{2}):(\d{2}):(\d{2})$/);
4924
+ if (!match) return input;
4925
+ const year = Number(match[1]);
4926
+ const month = Number(match[2]);
4927
+ const day = Number(match[3]);
4928
+ const hour = Number(match[4]);
4929
+ const minute = Number(match[5]);
4930
+ const second = Number(match[6]);
4931
+ const date = new Date(year, month - 1, day, hour, minute, second, 0);
4932
+ if (Number.isNaN(date.getTime())) return input;
4933
+ return formatIsoWithLocalOffset(date, false);
4934
+ }
4935
+ function formatIsoNoTzStringWithOffset(input) {
4936
+ const match = input.match(
4937
+ /^(\d{4})-(\d{2})-(\d{2})T(\d{2}):(\d{2}):(\d{2})(?:\.(\d{1,3}))?$/
4938
+ );
4939
+ if (!match) return input;
4940
+ const year = Number(match[1]);
4941
+ const month = Number(match[2]);
4942
+ const day = Number(match[3]);
4943
+ const hour = Number(match[4]);
4944
+ const minute = Number(match[5]);
4945
+ const second = Number(match[6]);
4946
+ const msRaw = match[7];
4947
+ const ms = typeof msRaw === "string" ? Number(msRaw.padEnd(3, "0")) : 0;
4948
+ const date = new Date(year, month - 1, day, hour, minute, second, ms);
4949
+ if (Number.isNaN(date.getTime())) return input;
4950
+ return formatIsoWithLocalOffset(date, typeof msRaw === "string");
4951
+ }
4952
+ function normalizeTimeString(input) {
4953
+ const localDateTime = /^\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2}$/;
4954
+ if (localDateTime.test(input)) {
4955
+ return formatLocalDateTimeStringWithOffset(input);
4956
+ }
4957
+ const isoWithTz = /^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}(?:\.\d{1,3})?(?:Z|[+-]\d{2}:\d{2})$/i;
4958
+ if (isoWithTz.test(input)) {
4959
+ const date = new Date(input);
4960
+ if (Number.isNaN(date.getTime())) return input;
4961
+ const includeMs = /\.\d{1,3}/.test(input);
4962
+ return formatIsoWithLocalOffset(date, includeMs);
4963
+ }
4964
+ const isoNoTz = /^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}(?:\.\d{1,3})?$/;
4965
+ if (isoNoTz.test(input)) {
4966
+ return formatIsoNoTzStringWithOffset(input);
4967
+ }
4968
+ return input;
4969
+ }
4970
+ function normalizeTimezonesInValue(value, keyNormalized) {
4971
+ if (typeof value === "string" && (keyNormalized === "time" || keyNormalized === "servertime")) {
4972
+ return normalizeTimeString(value);
4973
+ }
4974
+ if (!value || typeof value !== "object") return value;
4975
+ if (Array.isArray(value)) {
4976
+ return value.map((item) => normalizeTimezonesInValue(item));
4977
+ }
4978
+ const obj = value;
4979
+ const result = {};
4980
+ for (const [key, child] of Object.entries(obj)) {
4981
+ result[key] = normalizeTimezonesInValue(child, normalizeObjectKey(key));
4982
+ }
4983
+ return result;
4984
+ }
5411
4985
  function sanitizeStructuredLog(value) {
5412
4986
  if (!value || typeof value !== "object") return value;
5413
4987
  if (Array.isArray(value)) return value;
@@ -5512,16 +5086,17 @@ async function readLogsJsonResult(options) {
5512
5086
  const logs = [];
5513
5087
  let hasError = false;
5514
5088
  for (const line of lines) {
5515
- if (!hasError && hasErrorInStdLines([line])) {
5516
- hasError = true;
5517
- }
5518
5089
  try {
5519
5090
  const parsed = JSON.parse(line);
5520
- logs.push(sanitizeStructuredLog(parsed));
5091
+ const normalized = normalizeTimezonesInValue(parsed);
5092
+ logs.push(sanitizeStructuredLog(normalized));
5521
5093
  if (!hasError && hasErrorInLogObject(parsed)) {
5522
5094
  hasError = true;
5523
5095
  }
5524
5096
  } catch {
5097
+ if (!hasError && hasErrorInStdLines([line])) {
5098
+ hasError = true;
5099
+ }
5525
5100
  continue;
5526
5101
  }
5527
5102
  }
@@ -5551,15 +5126,8 @@ function resolveLogFilePath(logDir, type) {
5551
5126
  throw new Error(`Unsupported log type: ${type}`);
5552
5127
  }
5553
5128
  async function run4(options) {
5554
- try {
5555
- const result = await readLogsJsonResult(options);
5556
- process.stdout.write(JSON.stringify(result) + "\n");
5557
- } catch (error) {
5558
- const message = error instanceof Error ? error.message : String(error);
5559
- const result = { hasError: true, totalLinesCount: 0, logs: [message] };
5560
- process.stdout.write(JSON.stringify(result) + "\n");
5561
- process.exitCode = 1;
5562
- }
5129
+ const result = await readLogsJsonResult(options);
5130
+ process.stdout.write(JSON.stringify(result) + "\n");
5563
5131
  }
5564
5132
  function parseLogType(input) {
5565
5133
  const value = typeof input === "string" ? input.trim() : "";
@@ -5591,9 +5159,9 @@ var readLogsCommand = {
5591
5159
  name: "read-logs",
5592
5160
  description: "Read latest logs from log files",
5593
5161
  register(program) {
5594
- program.command(this.name).description(this.description).option("--dir <path>", "Logs directory", "logs").option("--type <type>", `Log type: ${LOG_TYPES.join("|")}`, "server-std").option("--max-lines <lines>", "Max lines to return", "30").option("--offset <lines>", "Skip latest N lines for pagination", "0").option("--trace-id <id>", "Filter structured logs by trace id").option("--level <levels>", "Filter structured logs by level (comma-separated)").action(async (rawOptions) => {
5162
+ program.command(this.name).description(this.description).option("--dir <path>", "Logs directory", "/tmp").option("--type <type>", `Log type: ${LOG_TYPES.join("|")}`, "server-std").option("--max-lines <lines>", "Max lines to return", "30").option("--offset <lines>", "Skip latest N lines for pagination", "0").option("--trace-id <id>", "Filter structured logs by trace id").option("--level <levels>", "Filter structured logs by level (comma-separated)").action(async (rawOptions) => {
5595
5163
  try {
5596
- const logDir = typeof rawOptions.dir === "string" ? rawOptions.dir : "logs";
5164
+ const logDir = typeof rawOptions.dir === "string" ? rawOptions.dir : "/tmp";
5597
5165
  const type = parseLogType(rawOptions.type);
5598
5166
  const maxLines = parsePositiveInt(rawOptions.maxLines, "--max-lines");
5599
5167
  const offset = parseNonNegativeInt(rawOptions.offset, "--offset");
@@ -5602,8 +5170,7 @@ var readLogsCommand = {
5602
5170
  await run4({ logDir, type, maxLines, offset, traceId, levels });
5603
5171
  } catch (error) {
5604
5172
  const message = error instanceof Error ? error.message : String(error);
5605
- const result = { hasError: true, totalLinesCount: 0, logs: [message] };
5606
- process.stdout.write(JSON.stringify(result) + "\n");
5173
+ process.stderr.write(message + "\n");
5607
5174
  process.exitCode = 1;
5608
5175
  }
5609
5176
  });
@@ -5622,11 +5189,11 @@ var commands = [
5622
5189
 
5623
5190
  // src/index.ts
5624
5191
  var envPath = path17.join(process.cwd(), ".env");
5625
- if (fs21.existsSync(envPath)) {
5192
+ if (fs20.existsSync(envPath)) {
5626
5193
  dotenvConfig({ path: envPath });
5627
5194
  }
5628
5195
  var __dirname = path17.dirname(fileURLToPath4(import.meta.url));
5629
- var pkg = JSON.parse(fs21.readFileSync(path17.join(__dirname, "../package.json"), "utf-8"));
5196
+ var pkg = JSON.parse(fs20.readFileSync(path17.join(__dirname, "../package.json"), "utf-8"));
5630
5197
  var cli = new FullstackCLI(pkg.version);
5631
5198
  cli.useAll(commands);
5632
5199
  cli.run();