@3lineas/d1-orm 1.0.9 → 1.0.11
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/cli/index.cjs +27 -9
- package/dist/cli/index.js +27 -9
- package/package.json +1 -1
package/dist/cli/index.cjs
CHANGED
|
@@ -61,10 +61,10 @@ export class User extends Model {
|
|
|
61
61
|
declare updated_at: string;
|
|
62
62
|
}
|
|
63
63
|
`;
|
|
64
|
-
const userModelPath = path.join(process.cwd(), modelsPath, "User.
|
|
64
|
+
const userModelPath = path.join(process.cwd(), modelsPath, "User.mts");
|
|
65
65
|
if (!fs.existsSync(userModelPath)) {
|
|
66
66
|
fs.writeFileSync(userModelPath, userModelContent);
|
|
67
|
-
p.log.step(`Created model: ${modelsPath}/User.
|
|
67
|
+
p.log.step(`Created model: ${modelsPath}/User.mts`);
|
|
68
68
|
}
|
|
69
69
|
const timestamp = (/* @__PURE__ */ new Date()).toISOString().replace(/[-:]/g, "").split(".")[0].replace("T", "_");
|
|
70
70
|
const migrationName = `${timestamp}_create_users_table.mts`;
|
|
@@ -93,7 +93,7 @@ export const down = async () => {
|
|
|
93
93
|
fs.writeFileSync(fullMigrationsPath, migrationContent);
|
|
94
94
|
p.log.step(`Created migration: ${migrationsPath}/${migrationName}`);
|
|
95
95
|
}
|
|
96
|
-
const seederContent = `import { User } from '../models/User.
|
|
96
|
+
const seederContent = `import { User } from '../models/User.mts';
|
|
97
97
|
|
|
98
98
|
export const seed = async () => {
|
|
99
99
|
await User.create({
|
|
@@ -227,7 +227,7 @@ async function makeModel(name) {
|
|
|
227
227
|
}
|
|
228
228
|
}
|
|
229
229
|
const modelPath = await findModelsPath() || "src/database/models";
|
|
230
|
-
const filename = `${modelName}.
|
|
230
|
+
const filename = `${modelName}.mts`;
|
|
231
231
|
const targetPath = path3.join(process.cwd(), modelPath, filename);
|
|
232
232
|
const template = `import { Model } from '@3lineas/d1-orm';
|
|
233
233
|
|
|
@@ -293,7 +293,7 @@ async function makeSeeder(modelName, modelPath) {
|
|
|
293
293
|
fs3.mkdirSync(seederDir, { recursive: true });
|
|
294
294
|
}
|
|
295
295
|
const relativeModelPath = path3.relative(seederDir, path3.join(process.cwd(), modelPath, modelName)).replace(/\\/g, "/");
|
|
296
|
-
const template = `import { ${modelName} } from '${relativeModelPath}.
|
|
296
|
+
const template = `import { ${modelName} } from '${relativeModelPath}.mts';
|
|
297
297
|
|
|
298
298
|
export const seed = async () => {
|
|
299
299
|
// await ${modelName}.create({ ... });
|
|
@@ -340,7 +340,7 @@ var Config = class {
|
|
|
340
340
|
const content = fs4.readFileSync(fullPath, "utf-8");
|
|
341
341
|
if (configName.endsWith(".json") || configName.endsWith(".jsonc")) {
|
|
342
342
|
try {
|
|
343
|
-
const jsonStr = content.replace(
|
|
343
|
+
const jsonStr = content.replace(/\/\/.*$/gm, "").replace(/\/\*[\s\S]*?\*\//g, "");
|
|
344
344
|
const config = JSON.parse(jsonStr);
|
|
345
345
|
const d1Databases = config.d1_databases;
|
|
346
346
|
if (Array.isArray(d1Databases) && d1Databases.length > 0) {
|
|
@@ -357,6 +357,20 @@ var Config = class {
|
|
|
357
357
|
}
|
|
358
358
|
return "DB";
|
|
359
359
|
}
|
|
360
|
+
/**
|
|
361
|
+
* Get a filtered environment object to avoid system warnings.
|
|
362
|
+
* Filters out npm_config_* variables and sets NODE_NO_WARNINGS.
|
|
363
|
+
*/
|
|
364
|
+
static getCleanEnv() {
|
|
365
|
+
const cleanEnv = { ...process.env };
|
|
366
|
+
Object.keys(cleanEnv).forEach((key) => {
|
|
367
|
+
if (key.startsWith("npm_config_")) {
|
|
368
|
+
delete cleanEnv[key];
|
|
369
|
+
}
|
|
370
|
+
});
|
|
371
|
+
cleanEnv.NODE_NO_WARNINGS = "1";
|
|
372
|
+
return cleanEnv;
|
|
373
|
+
}
|
|
360
374
|
/**
|
|
361
375
|
* Detect if the project is ESM.
|
|
362
376
|
*/
|
|
@@ -699,7 +713,7 @@ async function migrate(args2) {
|
|
|
699
713
|
const execCmd = `npx wrangler d1 execute ${dbName} --command "${sql.replace(/"/g, '\\"')}" ${command2}`;
|
|
700
714
|
(0, import_child_process2.execSync)(execCmd, {
|
|
701
715
|
stdio: "pipe",
|
|
702
|
-
env:
|
|
716
|
+
env: Config.getCleanEnv()
|
|
703
717
|
});
|
|
704
718
|
s.stop(`Migrated: ${file}`);
|
|
705
719
|
} catch (e) {
|
|
@@ -738,7 +752,8 @@ async function migrateFresh(args2) {
|
|
|
738
752
|
const listTablesCmd = `npx wrangler d1 execute ${dbName} --command "SELECT name FROM sqlite_master WHERE type='table' AND name NOT LIKE 'sqlite_%' AND name NOT LIKE '_cf_%'" ${flag} --json`;
|
|
739
753
|
const output = (0, import_child_process3.execSync)(listTablesCmd, {
|
|
740
754
|
encoding: "utf-8",
|
|
741
|
-
stdio: ["ignore", "pipe", "
|
|
755
|
+
stdio: ["ignore", "pipe", "pipe"],
|
|
756
|
+
env: Config.getCleanEnv()
|
|
742
757
|
});
|
|
743
758
|
const jsonStart = output.indexOf("[");
|
|
744
759
|
if (jsonStart !== -1) {
|
|
@@ -748,7 +763,10 @@ async function migrateFresh(args2) {
|
|
|
748
763
|
s.message(`Dropping ${tables.length} tables...`);
|
|
749
764
|
const dropCommands = tables.map((t) => `DROP TABLE IF EXISTS ${t.name};`).join(" ");
|
|
750
765
|
const dropCmd = `npx wrangler d1 execute ${dbName} --command "${dropCommands}" ${flag}`;
|
|
751
|
-
(0, import_child_process3.execSync)(dropCmd, {
|
|
766
|
+
(0, import_child_process3.execSync)(dropCmd, {
|
|
767
|
+
stdio: "pipe",
|
|
768
|
+
env: Config.getCleanEnv()
|
|
769
|
+
});
|
|
752
770
|
s.stop("All tables dropped successfully.");
|
|
753
771
|
} else {
|
|
754
772
|
s.stop("No tables found to drop.");
|
package/dist/cli/index.js
CHANGED
|
@@ -41,10 +41,10 @@ export class User extends Model {
|
|
|
41
41
|
declare updated_at: string;
|
|
42
42
|
}
|
|
43
43
|
`;
|
|
44
|
-
const userModelPath = path.join(process.cwd(), modelsPath, "User.
|
|
44
|
+
const userModelPath = path.join(process.cwd(), modelsPath, "User.mts");
|
|
45
45
|
if (!fs.existsSync(userModelPath)) {
|
|
46
46
|
fs.writeFileSync(userModelPath, userModelContent);
|
|
47
|
-
p.log.step(`Created model: ${modelsPath}/User.
|
|
47
|
+
p.log.step(`Created model: ${modelsPath}/User.mts`);
|
|
48
48
|
}
|
|
49
49
|
const timestamp = (/* @__PURE__ */ new Date()).toISOString().replace(/[-:]/g, "").split(".")[0].replace("T", "_");
|
|
50
50
|
const migrationName = `${timestamp}_create_users_table.mts`;
|
|
@@ -73,7 +73,7 @@ export const down = async () => {
|
|
|
73
73
|
fs.writeFileSync(fullMigrationsPath, migrationContent);
|
|
74
74
|
p.log.step(`Created migration: ${migrationsPath}/${migrationName}`);
|
|
75
75
|
}
|
|
76
|
-
const seederContent = `import { User } from '../models/User.
|
|
76
|
+
const seederContent = `import { User } from '../models/User.mts';
|
|
77
77
|
|
|
78
78
|
export const seed = async () => {
|
|
79
79
|
await User.create({
|
|
@@ -207,7 +207,7 @@ async function makeModel(name) {
|
|
|
207
207
|
}
|
|
208
208
|
}
|
|
209
209
|
const modelPath = await findModelsPath() || "src/database/models";
|
|
210
|
-
const filename = `${modelName}.
|
|
210
|
+
const filename = `${modelName}.mts`;
|
|
211
211
|
const targetPath = path3.join(process.cwd(), modelPath, filename);
|
|
212
212
|
const template = `import { Model } from '@3lineas/d1-orm';
|
|
213
213
|
|
|
@@ -273,7 +273,7 @@ async function makeSeeder(modelName, modelPath) {
|
|
|
273
273
|
fs3.mkdirSync(seederDir, { recursive: true });
|
|
274
274
|
}
|
|
275
275
|
const relativeModelPath = path3.relative(seederDir, path3.join(process.cwd(), modelPath, modelName)).replace(/\\/g, "/");
|
|
276
|
-
const template = `import { ${modelName} } from '${relativeModelPath}.
|
|
276
|
+
const template = `import { ${modelName} } from '${relativeModelPath}.mts';
|
|
277
277
|
|
|
278
278
|
export const seed = async () => {
|
|
279
279
|
// await ${modelName}.create({ ... });
|
|
@@ -320,7 +320,7 @@ var Config = class {
|
|
|
320
320
|
const content = fs4.readFileSync(fullPath, "utf-8");
|
|
321
321
|
if (configName.endsWith(".json") || configName.endsWith(".jsonc")) {
|
|
322
322
|
try {
|
|
323
|
-
const jsonStr = content.replace(
|
|
323
|
+
const jsonStr = content.replace(/\/\/.*$/gm, "").replace(/\/\*[\s\S]*?\*\//g, "");
|
|
324
324
|
const config = JSON.parse(jsonStr);
|
|
325
325
|
const d1Databases = config.d1_databases;
|
|
326
326
|
if (Array.isArray(d1Databases) && d1Databases.length > 0) {
|
|
@@ -337,6 +337,20 @@ var Config = class {
|
|
|
337
337
|
}
|
|
338
338
|
return "DB";
|
|
339
339
|
}
|
|
340
|
+
/**
|
|
341
|
+
* Get a filtered environment object to avoid system warnings.
|
|
342
|
+
* Filters out npm_config_* variables and sets NODE_NO_WARNINGS.
|
|
343
|
+
*/
|
|
344
|
+
static getCleanEnv() {
|
|
345
|
+
const cleanEnv = { ...process.env };
|
|
346
|
+
Object.keys(cleanEnv).forEach((key) => {
|
|
347
|
+
if (key.startsWith("npm_config_")) {
|
|
348
|
+
delete cleanEnv[key];
|
|
349
|
+
}
|
|
350
|
+
});
|
|
351
|
+
cleanEnv.NODE_NO_WARNINGS = "1";
|
|
352
|
+
return cleanEnv;
|
|
353
|
+
}
|
|
340
354
|
/**
|
|
341
355
|
* Detect if the project is ESM.
|
|
342
356
|
*/
|
|
@@ -556,7 +570,7 @@ async function migrate(args2) {
|
|
|
556
570
|
const execCmd = `npx wrangler d1 execute ${dbName} --command "${sql.replace(/"/g, '\\"')}" ${command2}`;
|
|
557
571
|
execSync2(execCmd, {
|
|
558
572
|
stdio: "pipe",
|
|
559
|
-
env:
|
|
573
|
+
env: Config.getCleanEnv()
|
|
560
574
|
});
|
|
561
575
|
s.stop(`Migrated: ${file}`);
|
|
562
576
|
} catch (e) {
|
|
@@ -595,7 +609,8 @@ async function migrateFresh(args2) {
|
|
|
595
609
|
const listTablesCmd = `npx wrangler d1 execute ${dbName} --command "SELECT name FROM sqlite_master WHERE type='table' AND name NOT LIKE 'sqlite_%' AND name NOT LIKE '_cf_%'" ${flag} --json`;
|
|
596
610
|
const output = execSync3(listTablesCmd, {
|
|
597
611
|
encoding: "utf-8",
|
|
598
|
-
stdio: ["ignore", "pipe", "
|
|
612
|
+
stdio: ["ignore", "pipe", "pipe"],
|
|
613
|
+
env: Config.getCleanEnv()
|
|
599
614
|
});
|
|
600
615
|
const jsonStart = output.indexOf("[");
|
|
601
616
|
if (jsonStart !== -1) {
|
|
@@ -605,7 +620,10 @@ async function migrateFresh(args2) {
|
|
|
605
620
|
s.message(`Dropping ${tables.length} tables...`);
|
|
606
621
|
const dropCommands = tables.map((t) => `DROP TABLE IF EXISTS ${t.name};`).join(" ");
|
|
607
622
|
const dropCmd = `npx wrangler d1 execute ${dbName} --command "${dropCommands}" ${flag}`;
|
|
608
|
-
execSync3(dropCmd, {
|
|
623
|
+
execSync3(dropCmd, {
|
|
624
|
+
stdio: "pipe",
|
|
625
|
+
env: Config.getCleanEnv()
|
|
626
|
+
});
|
|
609
627
|
s.stop("All tables dropped successfully.");
|
|
610
628
|
} else {
|
|
611
629
|
s.stop("No tables found to drop.");
|