@sebspark/spanner-migrate 0.1.0 → 0.2.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -1,3 +1,124 @@
1
- # `@sebspark/spanner-migrate`
1
+ # @sebspark/spanner-migrate
2
2
 
3
- Migration tool for Spanner DB
3
+ `spanner-migrate` is a CLI tool for managing schema migrations for Google Cloud Spanner. It simplifies schema evolution by allowing you to create, apply, rollback, and track migrations.
4
+
5
+ ---
6
+
7
+ ## Installation
8
+
9
+ Install `@sebspark/spanner-migrate` as a global package:
10
+
11
+ ```zsh
12
+ yarn add -D @sebspark/spanner-migrate
13
+ ```
14
+
15
+ ---
16
+
17
+ ## CLI Usage
18
+
19
+ Run `spanner-migrate` from your project root. If no command is provided, the help message is displayed.
20
+
21
+ ```zsh
22
+ spanner-migrate [command] [options]
23
+ ```
24
+
25
+ ### Commands
26
+
27
+ #### `init`
28
+ Initialize a Spanner migration configuration file (`.spanner-migrate.config.json`).
29
+
30
+ **Usage:**
31
+
32
+ ```zsh
33
+ spanner-migrate init
34
+ ```
35
+
36
+ **Prompts:**
37
+ - `Enter the path for your migrations`: Directory for migration files (default: `./migrations`).
38
+ - `Enter Spanner instance name`: The name of the Spanner instance.
39
+ - `Enter Spanner database name`: The name of the Spanner database.
40
+ - `Enter Google Cloud project name`: (Optional) The Google Cloud project name.
41
+
42
+ ---
43
+
44
+ #### `create <description>`
45
+ Create a new migration file.
46
+
47
+ **Usage:**
48
+
49
+ ```zsh
50
+ spanner-migrate create add users table
51
+ ```
52
+
53
+ **Result:**
54
+
55
+ Example:
56
+
57
+ `./migrations/20250120145638000_create_table_users.sql`
58
+
59
+ ```sql
60
+ -- Created: 2025-01-20T14:56:38.000Z
61
+ -- Description: create table users
62
+
63
+ ---- UP ----
64
+
65
+
66
+
67
+ ---- DOWN ----
68
+
69
+
70
+
71
+ ```
72
+
73
+ #### `up`
74
+ Apply pending migrations
75
+
76
+ **Usage:**
77
+
78
+ ```zsh
79
+ spanner-migrate up
80
+ ```
81
+
82
+ If you don't want to apply all pending migrations, use the `--max` or `-m` flag
83
+
84
+ ```zsh
85
+ spanner migrate up --max 1
86
+ ```
87
+
88
+ #### `down`
89
+ Rollback one migration
90
+
91
+ **Usage:**
92
+
93
+ ```zsh
94
+ spanner-migrate down
95
+ ```
96
+
97
+ #### `status`
98
+ Check migration status
99
+
100
+ **Usage:**
101
+
102
+ ```zsh
103
+ spanner-migrate status
104
+ ```
105
+ Displays an overview of applied and peding migrations
106
+
107
+ ```text
108
+ Migrations
109
+
110
+ Applied
111
+ --------------------------------------------------------------------------------
112
+ 20250122080434866_add_users_table
113
+ 20250122080444982_add_index_on_users
114
+
115
+ New
116
+ --------------------------------------------------------------------------------
117
+ 20250122080444982_add_index_on_users
118
+ ```
119
+
120
+ ---
121
+
122
+ ## License
123
+
124
+ [Apache-2.0](LICENSE)
@@ -70,12 +70,11 @@ var runScript = async (db, script) => {
70
70
  }
71
71
  for (const statement of statements) {
72
72
  console.log(`Executing statement: ${statement}`);
73
- const sql = statement.replace(/--.*$/gm, "");
74
- if (isSchemaChange(sql)) {
75
- await db.updateSchema(sql);
73
+ if (isSchemaChange(statement)) {
74
+ await db.updateSchema(statement);
76
75
  } else {
77
76
  await db.runTransactionAsync(async (transaction) => {
78
- await transaction.runUpdate(sql);
77
+ await transaction.runUpdate(statement);
79
78
  await transaction.commit();
80
79
  });
81
80
  }
@@ -132,12 +131,12 @@ var getAppliedMigrations = async (db) => {
132
131
  };
133
132
 
134
133
  // src/files.ts
135
- import { access, mkdir, readdir, writeFile } from "node:fs/promises";
134
+ import { access, mkdir, readFile, readdir, writeFile } from "node:fs/promises";
136
135
  import { join, resolve } from "node:path";
137
136
  var getMigrationFiles = async (path) => {
138
137
  try {
139
138
  const files = await readdir(path);
140
- const migrationFileIds = files.filter((file) => file.endsWith(".ts")).map((file) => file.replace(/\.ts$/, ""));
139
+ const migrationFileIds = files.filter((file) => file.endsWith(".sql")).map((file) => file.replace(/\.sql$/, ""));
141
140
  return migrationFileIds;
142
141
  } catch (error) {
143
142
  throw new Error(
@@ -147,31 +146,40 @@ var getMigrationFiles = async (path) => {
147
146
  };
148
147
  var getMigration = async (path, id) => {
149
148
  try {
150
- const filePath = resolve(process.cwd(), join(path, `${id}.ts`));
149
+ const filePath = resolve(process.cwd(), join(path, `${id}.sql`));
151
150
  try {
152
151
  await access(filePath);
153
152
  } catch (err) {
154
153
  throw new Error(`Migration file not found: ${filePath}`);
155
154
  }
156
- const migrationModule = await import(filePath);
157
- if (!migrationModule.up || !migrationModule.down) {
155
+ const migrationText = await readFile(filePath, "utf8");
156
+ const up2 = getSql(migrationText, "up");
157
+ const down2 = getSql(migrationText, "down");
158
+ const description = getDescription(migrationText);
159
+ if (!up2 || !down2) {
158
160
  throw new Error(
159
161
  `Migration file ${filePath} does not export required scripts (up, down).`
160
162
  );
161
163
  }
162
- return {
163
- id,
164
- description: id.split("_").slice(1).map((word) => word.charAt(0).toUpperCase() + word.slice(1)).join(" "),
165
- // Generate a human-readable description
166
- up: migrationModule.up,
167
- down: migrationModule.down
168
- };
164
+ return { id, description, up: up2, down: down2 };
169
165
  } catch (error) {
170
166
  throw new Error(
171
167
  `Failed to get migration ${id}: ${error.message}`
172
168
  );
173
169
  }
174
170
  };
171
+ var getDescription = (text) => {
172
+ var _a, _b;
173
+ return ((_b = (_a = text == null ? void 0 : text.match(/^--\s*Description:\s*(.+)$/m)) == null ? void 0 : _a[1]) == null ? void 0 : _b.trim()) || "";
174
+ };
175
+ var getSql = (text, direction) => {
176
+ var _a, _b;
177
+ const rx = {
178
+ up: /---- UP ----\n([\s\S]*?)\n---- DOWN ----/,
179
+ down: /---- DOWN ----\n([\s\S]*)$/
180
+ };
181
+ return (_b = (_a = text == null ? void 0 : text.match(rx[direction])) == null ? void 0 : _a[1]) == null ? void 0 : _b.replace(/--.*$/gm, "").trim();
182
+ };
175
183
  var getNewMigrations = (applied, files) => {
176
184
  const sortedFiles = files.sort();
177
185
  for (let ix = 0; ix < applied.length; ix++) {
@@ -190,18 +198,18 @@ var createMigration = async (path, description) => {
190
198
  const timestamp = (/* @__PURE__ */ new Date()).toISOString();
191
199
  const compactTimestamp = timestamp.replace(/[-:.TZ]/g, "");
192
200
  const parsedDescription = description.replace(/\s+/g, "_").toLowerCase();
193
- const filename = `${compactTimestamp}_${parsedDescription}.ts`;
201
+ const filename = `${compactTimestamp}_${parsedDescription}.sql`;
194
202
  const filePath = join(path, filename);
195
- const template = `// ${timestamp}
196
- // ${description}
203
+ const template = `-- Created: ${timestamp}
204
+ -- Description: ${description}
205
+
206
+ ---- UP ----
207
+
208
+
209
+
210
+ ---- DOWN ----
197
211
 
198
- export const up = \`
199
- -- SQL for migrate up
200
- \`
201
212
 
202
- export const down = \`
203
- -- SQL for migrate down
204
- \`
205
213
  `;
206
214
  try {
207
215
  await mkdir(path, { recursive: true });
package/dist/cli.js CHANGED
@@ -26,6 +26,7 @@ var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__ge
26
26
  // src/cli.ts
27
27
  var import_promises2 = __toESM(require("fs/promises"));
28
28
  var import_node_path2 = require("path");
29
+ var import_input = __toESM(require("@inquirer/input"));
29
30
  var import_yargs = __toESM(require("yargs"));
30
31
  var import_helpers = require("yargs/helpers");
31
32
 
@@ -101,12 +102,11 @@ var runScript = async (db, script) => {
101
102
  }
102
103
  for (const statement of statements) {
103
104
  console.log(`Executing statement: ${statement}`);
104
- const sql = statement.replace(/--.*$/gm, "");
105
- if (isSchemaChange(sql)) {
106
- await db.updateSchema(sql);
105
+ if (isSchemaChange(statement)) {
106
+ await db.updateSchema(statement);
107
107
  } else {
108
108
  await db.runTransactionAsync(async (transaction) => {
109
- await transaction.runUpdate(sql);
109
+ await transaction.runUpdate(statement);
110
110
  await transaction.commit();
111
111
  });
112
112
  }
@@ -168,7 +168,7 @@ var import_node_path = require("path");
168
168
  var getMigrationFiles = async (path) => {
169
169
  try {
170
170
  const files = await (0, import_promises.readdir)(path);
171
- const migrationFileIds = files.filter((file) => file.endsWith(".ts")).map((file) => file.replace(/\.ts$/, ""));
171
+ const migrationFileIds = files.filter((file) => file.endsWith(".sql")).map((file) => file.replace(/\.sql$/, ""));
172
172
  return migrationFileIds;
173
173
  } catch (error) {
174
174
  throw new Error(
@@ -178,31 +178,40 @@ var getMigrationFiles = async (path) => {
178
178
  };
179
179
  var getMigration = async (path, id) => {
180
180
  try {
181
- const filePath = (0, import_node_path.resolve)(process.cwd(), (0, import_node_path.join)(path, `${id}.ts`));
181
+ const filePath = (0, import_node_path.resolve)(process.cwd(), (0, import_node_path.join)(path, `${id}.sql`));
182
182
  try {
183
183
  await (0, import_promises.access)(filePath);
184
184
  } catch (err) {
185
185
  throw new Error(`Migration file not found: ${filePath}`);
186
186
  }
187
- const migrationModule = await import(filePath);
188
- if (!migrationModule.up || !migrationModule.down) {
187
+ const migrationText = await (0, import_promises.readFile)(filePath, "utf8");
188
+ const up2 = getSql(migrationText, "up");
189
+ const down2 = getSql(migrationText, "down");
190
+ const description = getDescription(migrationText);
191
+ if (!up2 || !down2) {
189
192
  throw new Error(
190
193
  `Migration file ${filePath} does not export required scripts (up, down).`
191
194
  );
192
195
  }
193
- return {
194
- id,
195
- description: id.split("_").slice(1).map((word) => word.charAt(0).toUpperCase() + word.slice(1)).join(" "),
196
- // Generate a human-readable description
197
- up: migrationModule.up,
198
- down: migrationModule.down
199
- };
196
+ return { id, description, up: up2, down: down2 };
200
197
  } catch (error) {
201
198
  throw new Error(
202
199
  `Failed to get migration ${id}: ${error.message}`
203
200
  );
204
201
  }
205
202
  };
203
+ var getDescription = (text) => {
204
+ var _a, _b;
205
+ return ((_b = (_a = text == null ? void 0 : text.match(/^--\s*Description:\s*(.+)$/m)) == null ? void 0 : _a[1]) == null ? void 0 : _b.trim()) || "";
206
+ };
207
+ var getSql = (text, direction) => {
208
+ var _a, _b;
209
+ const rx = {
210
+ up: /---- UP ----\n([\s\S]*?)\n---- DOWN ----/,
211
+ down: /---- DOWN ----\n([\s\S]*)$/
212
+ };
213
+ return (_b = (_a = text == null ? void 0 : text.match(rx[direction])) == null ? void 0 : _a[1]) == null ? void 0 : _b.replace(/--.*$/gm, "").trim();
214
+ };
206
215
  var getNewMigrations = (applied, files) => {
207
216
  const sortedFiles = files.sort();
208
217
  for (let ix = 0; ix < applied.length; ix++) {
@@ -221,18 +230,18 @@ var createMigration = async (path, description) => {
221
230
  const timestamp = (/* @__PURE__ */ new Date()).toISOString();
222
231
  const compactTimestamp = timestamp.replace(/[-:.TZ]/g, "");
223
232
  const parsedDescription = description.replace(/\s+/g, "_").toLowerCase();
224
- const filename = `${compactTimestamp}_${parsedDescription}.ts`;
233
+ const filename = `${compactTimestamp}_${parsedDescription}.sql`;
225
234
  const filePath = (0, import_node_path.join)(path, filename);
226
- const template = `// ${timestamp}
227
- // ${description}
235
+ const template = `-- Created: ${timestamp}
236
+ -- Description: ${description}
237
+
238
+ ---- UP ----
239
+
240
+
241
+
242
+ ---- DOWN ----
228
243
 
229
- export const up = \`
230
- -- SQL for migrate up
231
- \`
232
244
 
233
- export const down = \`
234
- -- SQL for migrate down
235
- \`
236
245
  `;
237
246
  try {
238
247
  await (0, import_promises.mkdir)(path, { recursive: true });
@@ -314,36 +323,34 @@ async function loadConfig() {
314
323
  process.exit(1);
315
324
  }
316
325
  }
317
- (0, import_yargs.default)((0, import_helpers.hideBin)(process.argv)).scriptName("spanner-migrate").usage("$0 <command> [options]").command(
326
+ (0, import_yargs.default)((0, import_helpers.hideBin)(process.argv)).scriptName("spanner-migrate").usage("$0 <command>").command(
318
327
  "init",
319
328
  "Initialize a .spanner-migrate.config.json file",
320
- (yargs2) => {
321
- yargs2.option("migrationsPath", {
322
- type: "string",
323
- describe: "Path to the migrations folder",
324
- default: "./spanner-migrations"
329
+ async () => {
330
+ const migrationsPath = await (0, import_input.default)({
331
+ message: "Enter the path for your migrations",
332
+ required: true,
333
+ default: "./migrations"
325
334
  });
326
- yargs2.option("instanceName", {
327
- type: "string",
328
- describe: "Spanner instance name",
329
- demandOption: true
335
+ const instanceName = await (0, import_input.default)({
336
+ message: "Enter Spanner instance name",
337
+ required: true
330
338
  });
331
- yargs2.option("databaseName", {
332
- type: "string",
333
- describe: "Spanner database name",
334
- demandOption: true
339
+ const databaseName = await (0, import_input.default)({
340
+ message: "Enter Spanner database name",
341
+ required: true
335
342
  });
336
- yargs2.option("projectName", {
337
- type: "string",
338
- describe: "Google Cloud project name (optional)"
343
+ const projectId = await (0, import_input.default)({
344
+ message: "Enter Google Cloud project name",
345
+ required: false
339
346
  });
340
- },
341
- async (args) => {
342
- await init(args, CONFIG_FILE);
347
+ const config = { instanceName, databaseName, migrationsPath };
348
+ if (projectId) config.projectId = projectId;
349
+ await init(config, CONFIG_FILE);
343
350
  console.log(`Configuration written to ${CONFIG_FILE}`);
344
351
  }
345
352
  ).command(
346
- "create <description>",
353
+ "create <description ...>",
347
354
  "Create a new migration file",
348
355
  (yargs2) => {
349
356
  yargs2.positional("description", {
@@ -354,9 +361,10 @@ async function loadConfig() {
354
361
  },
355
362
  async (args) => {
356
363
  const config = await loadConfig();
357
- await create(config, args.description);
364
+ const fullDescription = args.description.join(" ");
365
+ await create(config, fullDescription);
358
366
  console.log(
359
- `Migration file created: '${(0, import_node_path2.join)(config.migrationsPath, args.description)}'`
367
+ `Migration file created: '${(0, import_node_path2.join)(config.migrationsPath, args.description.join("_"))}.sql'`
360
368
  );
361
369
  }
362
370
  ).command(
@@ -383,4 +391,4 @@ async function loadConfig() {
383
391
  const config = await loadConfig();
384
392
  const migrationStatus = await status(config);
385
393
  console.log(migrationStatus);
386
- }).help().strict().parse();
394
+ }).demandCommand().help().parse();
package/dist/cli.mjs CHANGED
@@ -5,11 +5,12 @@ import {
5
5
  init,
6
6
  status,
7
7
  up
8
- } from "./chunk-K5WX6ESL.mjs";
8
+ } from "./chunk-Z3ZLNDVO.mjs";
9
9
 
10
10
  // src/cli.ts
11
11
  import fs from "node:fs/promises";
12
12
  import { join } from "node:path";
13
+ import input from "@inquirer/input";
13
14
  import yargs from "yargs";
14
15
  import { hideBin } from "yargs/helpers";
15
16
  var CONFIG_FILE = "./.spanner-migrate.config.json";
@@ -22,36 +23,34 @@ async function loadConfig() {
22
23
  process.exit(1);
23
24
  }
24
25
  }
25
- yargs(hideBin(process.argv)).scriptName("spanner-migrate").usage("$0 <command> [options]").command(
26
+ yargs(hideBin(process.argv)).scriptName("spanner-migrate").usage("$0 <command>").command(
26
27
  "init",
27
28
  "Initialize a .spanner-migrate.config.json file",
28
- (yargs2) => {
29
- yargs2.option("migrationsPath", {
30
- type: "string",
31
- describe: "Path to the migrations folder",
32
- default: "./spanner-migrations"
29
+ async () => {
30
+ const migrationsPath = await input({
31
+ message: "Enter the path for your migrations",
32
+ required: true,
33
+ default: "./migrations"
33
34
  });
34
- yargs2.option("instanceName", {
35
- type: "string",
36
- describe: "Spanner instance name",
37
- demandOption: true
35
+ const instanceName = await input({
36
+ message: "Enter Spanner instance name",
37
+ required: true
38
38
  });
39
- yargs2.option("databaseName", {
40
- type: "string",
41
- describe: "Spanner database name",
42
- demandOption: true
39
+ const databaseName = await input({
40
+ message: "Enter Spanner database name",
41
+ required: true
43
42
  });
44
- yargs2.option("projectName", {
45
- type: "string",
46
- describe: "Google Cloud project name (optional)"
43
+ const projectId = await input({
44
+ message: "Enter Google Cloud project name",
45
+ required: false
47
46
  });
48
- },
49
- async (args) => {
50
- await init(args, CONFIG_FILE);
47
+ const config = { instanceName, databaseName, migrationsPath };
48
+ if (projectId) config.projectId = projectId;
49
+ await init(config, CONFIG_FILE);
51
50
  console.log(`Configuration written to ${CONFIG_FILE}`);
52
51
  }
53
52
  ).command(
54
- "create <description>",
53
+ "create <description ...>",
55
54
  "Create a new migration file",
56
55
  (yargs2) => {
57
56
  yargs2.positional("description", {
@@ -62,9 +61,10 @@ yargs(hideBin(process.argv)).scriptName("spanner-migrate").usage("$0 <command> [
62
61
  },
63
62
  async (args) => {
64
63
  const config = await loadConfig();
65
- await create(config, args.description);
64
+ const fullDescription = args.description.join(" ");
65
+ await create(config, fullDescription);
66
66
  console.log(
67
- `Migration file created: '${join(config.migrationsPath, args.description)}'`
67
+ `Migration file created: '${join(config.migrationsPath, args.description.join("_"))}.sql'`
68
68
  );
69
69
  }
70
70
  ).command(
@@ -91,4 +91,4 @@ yargs(hideBin(process.argv)).scriptName("spanner-migrate").usage("$0 <command> [
91
91
  const config = await loadConfig();
92
92
  const migrationStatus = await status(config);
93
93
  console.log(migrationStatus);
94
- }).help().strict().parse();
94
+ }).demandCommand().help().parse();
package/dist/index.js CHANGED
@@ -98,12 +98,11 @@ var runScript = async (db, script) => {
98
98
  }
99
99
  for (const statement of statements) {
100
100
  console.log(`Executing statement: ${statement}`);
101
- const sql = statement.replace(/--.*$/gm, "");
102
- if (isSchemaChange(sql)) {
103
- await db.updateSchema(sql);
101
+ if (isSchemaChange(statement)) {
102
+ await db.updateSchema(statement);
104
103
  } else {
105
104
  await db.runTransactionAsync(async (transaction) => {
106
- await transaction.runUpdate(sql);
105
+ await transaction.runUpdate(statement);
107
106
  await transaction.commit();
108
107
  });
109
108
  }
@@ -165,7 +164,7 @@ var import_node_path = require("path");
165
164
  var getMigrationFiles = async (path) => {
166
165
  try {
167
166
  const files = await (0, import_promises.readdir)(path);
168
- const migrationFileIds = files.filter((file) => file.endsWith(".ts")).map((file) => file.replace(/\.ts$/, ""));
167
+ const migrationFileIds = files.filter((file) => file.endsWith(".sql")).map((file) => file.replace(/\.sql$/, ""));
169
168
  return migrationFileIds;
170
169
  } catch (error) {
171
170
  throw new Error(
@@ -175,31 +174,40 @@ var getMigrationFiles = async (path) => {
175
174
  };
176
175
  var getMigration = async (path, id) => {
177
176
  try {
178
- const filePath = (0, import_node_path.resolve)(process.cwd(), (0, import_node_path.join)(path, `${id}.ts`));
177
+ const filePath = (0, import_node_path.resolve)(process.cwd(), (0, import_node_path.join)(path, `${id}.sql`));
179
178
  try {
180
179
  await (0, import_promises.access)(filePath);
181
180
  } catch (err) {
182
181
  throw new Error(`Migration file not found: ${filePath}`);
183
182
  }
184
- const migrationModule = await import(filePath);
185
- if (!migrationModule.up || !migrationModule.down) {
183
+ const migrationText = await (0, import_promises.readFile)(filePath, "utf8");
184
+ const up2 = getSql(migrationText, "up");
185
+ const down2 = getSql(migrationText, "down");
186
+ const description = getDescription(migrationText);
187
+ if (!up2 || !down2) {
186
188
  throw new Error(
187
189
  `Migration file ${filePath} does not export required scripts (up, down).`
188
190
  );
189
191
  }
190
- return {
191
- id,
192
- description: id.split("_").slice(1).map((word) => word.charAt(0).toUpperCase() + word.slice(1)).join(" "),
193
- // Generate a human-readable description
194
- up: migrationModule.up,
195
- down: migrationModule.down
196
- };
192
+ return { id, description, up: up2, down: down2 };
197
193
  } catch (error) {
198
194
  throw new Error(
199
195
  `Failed to get migration ${id}: ${error.message}`
200
196
  );
201
197
  }
202
198
  };
199
+ var getDescription = (text) => {
200
+ var _a, _b;
201
+ return ((_b = (_a = text == null ? void 0 : text.match(/^--\s*Description:\s*(.+)$/m)) == null ? void 0 : _a[1]) == null ? void 0 : _b.trim()) || "";
202
+ };
203
+ var getSql = (text, direction) => {
204
+ var _a, _b;
205
+ const rx = {
206
+ up: /---- UP ----\n([\s\S]*?)\n---- DOWN ----/,
207
+ down: /---- DOWN ----\n([\s\S]*)$/
208
+ };
209
+ return (_b = (_a = text == null ? void 0 : text.match(rx[direction])) == null ? void 0 : _a[1]) == null ? void 0 : _b.replace(/--.*$/gm, "").trim();
210
+ };
203
211
  var getNewMigrations = (applied, files) => {
204
212
  const sortedFiles = files.sort();
205
213
  for (let ix = 0; ix < applied.length; ix++) {
@@ -218,18 +226,18 @@ var createMigration = async (path, description) => {
218
226
  const timestamp = (/* @__PURE__ */ new Date()).toISOString();
219
227
  const compactTimestamp = timestamp.replace(/[-:.TZ]/g, "");
220
228
  const parsedDescription = description.replace(/\s+/g, "_").toLowerCase();
221
- const filename = `${compactTimestamp}_${parsedDescription}.ts`;
229
+ const filename = `${compactTimestamp}_${parsedDescription}.sql`;
222
230
  const filePath = (0, import_node_path.join)(path, filename);
223
- const template = `// ${timestamp}
224
- // ${description}
231
+ const template = `-- Created: ${timestamp}
232
+ -- Description: ${description}
233
+
234
+ ---- UP ----
235
+
236
+
237
+
238
+ ---- DOWN ----
225
239
 
226
- export const up = \`
227
- -- SQL for migrate up
228
- \`
229
240
 
230
- export const down = \`
231
- -- SQL for migrate down
232
- \`
233
241
  `;
234
242
  try {
235
243
  await (0, import_promises.mkdir)(path, { recursive: true });
package/dist/index.mjs CHANGED
@@ -4,7 +4,7 @@ import {
4
4
  init,
5
5
  status,
6
6
  up
7
- } from "./chunk-K5WX6ESL.mjs";
7
+ } from "./chunk-Z3ZLNDVO.mjs";
8
8
  export {
9
9
  create,
10
10
  down,
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@sebspark/spanner-migrate",
3
- "version": "0.1.0",
3
+ "version": "0.2.0",
4
4
  "license": "Apache-2.0",
5
5
  "main": "dist/index.js",
6
6
  "module": "dist/index.mjs",
@@ -31,6 +31,7 @@
31
31
  "@google-cloud/spanner": "*"
32
32
  },
33
33
  "dependencies": {
34
+ "@inquirer/input": "4.1.3",
34
35
  "yargs": "17.7.2"
35
36
  }
36
37
  }