@astrojs/db 0.0.0-edge-nested-20240223135627
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +59 -0
- package/config-augment.d.ts +4 -0
- package/dist/core/cli/commands/gen/index.d.ts +6 -0
- package/dist/core/cli/commands/gen/index.js +39 -0
- package/dist/core/cli/commands/link/index.d.ts +8 -0
- package/dist/core/cli/commands/link/index.js +78 -0
- package/dist/core/cli/commands/login/index.d.ts +6 -0
- package/dist/core/cli/commands/login/index.js +46 -0
- package/dist/core/cli/commands/logout/index.d.ts +6 -0
- package/dist/core/cli/commands/logout/index.js +9 -0
- package/dist/core/cli/commands/push/index.d.ts +6 -0
- package/dist/core/cli/commands/push/index.js +209 -0
- package/dist/core/cli/commands/shell/index.d.ts +6 -0
- package/dist/core/cli/commands/shell/index.js +15 -0
- package/dist/core/cli/commands/verify/index.d.ts +6 -0
- package/dist/core/cli/commands/verify/index.js +43 -0
- package/dist/core/cli/index.d.ts +6 -0
- package/dist/core/cli/index.js +68 -0
- package/dist/core/cli/migration-queries.d.ts +26 -0
- package/dist/core/cli/migration-queries.js +418 -0
- package/dist/core/cli/migrations.d.ts +34 -0
- package/dist/core/cli/migrations.js +129 -0
- package/dist/core/consts.d.ts +6 -0
- package/dist/core/consts.js +17 -0
- package/dist/core/errors.d.ts +7 -0
- package/dist/core/errors.js +54 -0
- package/dist/core/integration/error-map.d.ts +6 -0
- package/dist/core/integration/error-map.js +79 -0
- package/dist/core/integration/file-url.d.ts +2 -0
- package/dist/core/integration/file-url.js +84 -0
- package/dist/core/integration/index.d.ts +2 -0
- package/dist/core/integration/index.js +173 -0
- package/dist/core/integration/load-astro-config.d.ts +6 -0
- package/dist/core/integration/load-astro-config.js +79 -0
- package/dist/core/integration/typegen.d.ts +5 -0
- package/dist/core/integration/typegen.js +41 -0
- package/dist/core/integration/vite-plugin-db.d.ts +25 -0
- package/dist/core/integration/vite-plugin-db.js +73 -0
- package/dist/core/integration/vite-plugin-inject-env-ts.d.ts +11 -0
- package/dist/core/integration/vite-plugin-inject-env-ts.js +53 -0
- package/dist/core/queries.d.ts +78 -0
- package/dist/core/queries.js +218 -0
- package/dist/core/tokens.d.ts +11 -0
- package/dist/core/tokens.js +131 -0
- package/dist/core/types.d.ts +8059 -0
- package/dist/core/types.js +209 -0
- package/dist/core/utils.d.ts +5 -0
- package/dist/core/utils.js +18 -0
- package/dist/index.d.ts +5 -0
- package/dist/index.js +16 -0
- package/dist/runtime/db-client.d.ts +12 -0
- package/dist/runtime/db-client.js +96 -0
- package/dist/runtime/drizzle.d.ts +1 -0
- package/dist/runtime/drizzle.js +48 -0
- package/dist/runtime/index.d.ts +30 -0
- package/dist/runtime/index.js +124 -0
- package/dist/runtime/types.d.ts +69 -0
- package/dist/runtime/types.js +8 -0
- package/index.d.ts +3 -0
- package/package.json +81 -0
|
@@ -0,0 +1,418 @@
|
|
|
1
|
+
import * as color from "kleur/colors";
|
|
2
|
+
import deepDiff from "deep-diff";
|
|
3
|
+
import {
|
|
4
|
+
columnSchema
|
|
5
|
+
} from "../types.js";
|
|
6
|
+
import { SQLiteAsyncDialect } from "drizzle-orm/sqlite-core";
|
|
7
|
+
import { customAlphabet } from "nanoid";
|
|
8
|
+
import prompts from "prompts";
|
|
9
|
+
import {
|
|
10
|
+
getCreateIndexQueries,
|
|
11
|
+
getCreateTableQuery,
|
|
12
|
+
getModifiers,
|
|
13
|
+
getReferencesConfig,
|
|
14
|
+
hasDefault,
|
|
15
|
+
schemaTypeToSqlType
|
|
16
|
+
} from "../queries.js";
|
|
17
|
+
import { hasPrimaryKey } from "../../runtime/index.js";
|
|
18
|
+
import { isSerializedSQL } from "../../runtime/types.js";
|
|
19
|
+
const sqlite = new SQLiteAsyncDialect();
|
|
20
|
+
const genTempTableName = customAlphabet("abcdefghijklmnopqrstuvwxyz", 10);
|
|
21
|
+
async function getMigrationQueries({
|
|
22
|
+
oldSnapshot,
|
|
23
|
+
newSnapshot,
|
|
24
|
+
ambiguityResponses
|
|
25
|
+
}) {
|
|
26
|
+
const queries = [];
|
|
27
|
+
const confirmations = [];
|
|
28
|
+
let added = getAddedCollections(oldSnapshot, newSnapshot);
|
|
29
|
+
let dropped = getDroppedCollections(oldSnapshot, newSnapshot);
|
|
30
|
+
if (!isEmpty(added) && !isEmpty(dropped)) {
|
|
31
|
+
const resolved = await resolveCollectionRenames(added, dropped, ambiguityResponses);
|
|
32
|
+
added = resolved.added;
|
|
33
|
+
dropped = resolved.dropped;
|
|
34
|
+
for (const { from, to } of resolved.renamed) {
|
|
35
|
+
const renameQuery = `ALTER TABLE ${sqlite.escapeName(from)} RENAME TO ${sqlite.escapeName(
|
|
36
|
+
to
|
|
37
|
+
)}`;
|
|
38
|
+
queries.push(renameQuery);
|
|
39
|
+
}
|
|
40
|
+
}
|
|
41
|
+
for (const [collectionName, collection] of Object.entries(added)) {
|
|
42
|
+
queries.push(getCreateTableQuery(collectionName, collection));
|
|
43
|
+
queries.push(...getCreateIndexQueries(collectionName, collection));
|
|
44
|
+
}
|
|
45
|
+
for (const [collectionName] of Object.entries(dropped)) {
|
|
46
|
+
const dropQuery = `DROP TABLE ${sqlite.escapeName(collectionName)}`;
|
|
47
|
+
queries.push(dropQuery);
|
|
48
|
+
}
|
|
49
|
+
for (const [collectionName, newCollection] of Object.entries(newSnapshot.schema)) {
|
|
50
|
+
const oldCollection = oldSnapshot.schema[collectionName];
|
|
51
|
+
if (!oldCollection)
|
|
52
|
+
continue;
|
|
53
|
+
const result = await getCollectionChangeQueries({
|
|
54
|
+
collectionName,
|
|
55
|
+
oldCollection,
|
|
56
|
+
newCollection
|
|
57
|
+
});
|
|
58
|
+
queries.push(...result.queries);
|
|
59
|
+
confirmations.push(...result.confirmations);
|
|
60
|
+
}
|
|
61
|
+
return { queries, confirmations };
|
|
62
|
+
}
|
|
63
|
+
async function getCollectionChangeQueries({
|
|
64
|
+
collectionName,
|
|
65
|
+
oldCollection,
|
|
66
|
+
newCollection,
|
|
67
|
+
ambiguityResponses
|
|
68
|
+
}) {
|
|
69
|
+
const queries = [];
|
|
70
|
+
const confirmations = [];
|
|
71
|
+
const updated = getUpdatedColumns(oldCollection.columns, newCollection.columns);
|
|
72
|
+
let added = getAdded(oldCollection.columns, newCollection.columns);
|
|
73
|
+
let dropped = getDropped(oldCollection.columns, newCollection.columns);
|
|
74
|
+
const hasForeignKeyChanges = Boolean(
|
|
75
|
+
deepDiff(oldCollection.foreignKeys, newCollection.foreignKeys)
|
|
76
|
+
);
|
|
77
|
+
if (!hasForeignKeyChanges && isEmpty(updated) && isEmpty(added) && isEmpty(dropped)) {
|
|
78
|
+
return {
|
|
79
|
+
queries: getChangeIndexQueries({
|
|
80
|
+
collectionName,
|
|
81
|
+
oldIndexes: oldCollection.indexes,
|
|
82
|
+
newIndexes: newCollection.indexes
|
|
83
|
+
}),
|
|
84
|
+
confirmations
|
|
85
|
+
};
|
|
86
|
+
}
|
|
87
|
+
if (!hasForeignKeyChanges && !isEmpty(added) && !isEmpty(dropped)) {
|
|
88
|
+
const resolved = await resolveColumnRenames(collectionName, added, dropped, ambiguityResponses);
|
|
89
|
+
added = resolved.added;
|
|
90
|
+
dropped = resolved.dropped;
|
|
91
|
+
queries.push(...getColumnRenameQueries(collectionName, resolved.renamed));
|
|
92
|
+
}
|
|
93
|
+
if (!hasForeignKeyChanges && isEmpty(updated) && Object.values(dropped).every(canAlterTableDropColumn) && Object.values(added).every(canAlterTableAddColumn)) {
|
|
94
|
+
queries.push(
|
|
95
|
+
...getAlterTableQueries(collectionName, added, dropped),
|
|
96
|
+
...getChangeIndexQueries({
|
|
97
|
+
collectionName,
|
|
98
|
+
oldIndexes: oldCollection.indexes,
|
|
99
|
+
newIndexes: newCollection.indexes
|
|
100
|
+
})
|
|
101
|
+
);
|
|
102
|
+
return { queries, confirmations };
|
|
103
|
+
}
|
|
104
|
+
const dataLossCheck = canRecreateTableWithoutDataLoss(added, updated);
|
|
105
|
+
if (dataLossCheck.dataLoss) {
|
|
106
|
+
const { reason, columnName } = dataLossCheck;
|
|
107
|
+
const reasonMsgs = {
|
|
108
|
+
"added-required": `New column ${color.bold(
|
|
109
|
+
collectionName + "." + columnName
|
|
110
|
+
)} is required with no default value.
|
|
111
|
+
This requires deleting existing data in the ${color.bold(
|
|
112
|
+
collectionName
|
|
113
|
+
)} collection.`,
|
|
114
|
+
"added-unique": `New column ${color.bold(
|
|
115
|
+
collectionName + "." + columnName
|
|
116
|
+
)} is marked as unique.
|
|
117
|
+
This requires deleting existing data in the ${color.bold(
|
|
118
|
+
collectionName
|
|
119
|
+
)} collection.`,
|
|
120
|
+
"updated-type": `Updated column ${color.bold(
|
|
121
|
+
collectionName + "." + columnName
|
|
122
|
+
)} cannot convert data to new column data type.
|
|
123
|
+
This requires deleting existing data in the ${color.bold(
|
|
124
|
+
collectionName
|
|
125
|
+
)} collection.`
|
|
126
|
+
};
|
|
127
|
+
confirmations.push(reasonMsgs[reason]);
|
|
128
|
+
}
|
|
129
|
+
const primaryKeyExists = Object.entries(newCollection.columns).find(
|
|
130
|
+
([, column]) => hasPrimaryKey(column)
|
|
131
|
+
);
|
|
132
|
+
const droppedPrimaryKey = Object.entries(dropped).find(([, column]) => hasPrimaryKey(column));
|
|
133
|
+
const recreateTableQueries = getRecreateTableQueries({
|
|
134
|
+
collectionName,
|
|
135
|
+
newCollection,
|
|
136
|
+
added,
|
|
137
|
+
hasDataLoss: dataLossCheck.dataLoss,
|
|
138
|
+
migrateHiddenPrimaryKey: !primaryKeyExists && !droppedPrimaryKey
|
|
139
|
+
});
|
|
140
|
+
queries.push(...recreateTableQueries, ...getCreateIndexQueries(collectionName, newCollection));
|
|
141
|
+
return { queries, confirmations };
|
|
142
|
+
}
|
|
143
|
+
function getChangeIndexQueries({
|
|
144
|
+
collectionName,
|
|
145
|
+
oldIndexes = {},
|
|
146
|
+
newIndexes = {}
|
|
147
|
+
}) {
|
|
148
|
+
const added = getAdded(oldIndexes, newIndexes);
|
|
149
|
+
const dropped = getDropped(oldIndexes, newIndexes);
|
|
150
|
+
const updated = getUpdated(oldIndexes, newIndexes);
|
|
151
|
+
Object.assign(dropped, updated);
|
|
152
|
+
Object.assign(added, updated);
|
|
153
|
+
const queries = [];
|
|
154
|
+
for (const indexName of Object.keys(dropped)) {
|
|
155
|
+
const dropQuery = `DROP INDEX ${sqlite.escapeName(indexName)}`;
|
|
156
|
+
queries.push(dropQuery);
|
|
157
|
+
}
|
|
158
|
+
queries.push(...getCreateIndexQueries(collectionName, { indexes: added }));
|
|
159
|
+
return queries;
|
|
160
|
+
}
|
|
161
|
+
async function resolveColumnRenames(collectionName, mightAdd, mightDrop, ambiguityResponses) {
|
|
162
|
+
const added = {};
|
|
163
|
+
const dropped = {};
|
|
164
|
+
const renamed = [];
|
|
165
|
+
for (const [columnName, column] of Object.entries(mightAdd)) {
|
|
166
|
+
let oldColumnName = ambiguityResponses ? ambiguityResponses.columnRenames[collectionName]?.[columnName] ?? "__NEW__" : void 0;
|
|
167
|
+
if (!oldColumnName) {
|
|
168
|
+
const res = await prompts(
|
|
169
|
+
{
|
|
170
|
+
type: "select",
|
|
171
|
+
name: "columnName",
|
|
172
|
+
message: "New column " + color.blue(color.bold(`${collectionName}.${columnName}`)) + " detected. Was this renamed from an existing column?",
|
|
173
|
+
choices: [
|
|
174
|
+
{ title: "New column (not renamed from existing)", value: "__NEW__" },
|
|
175
|
+
...Object.keys(mightDrop).filter((key) => !(key in renamed)).map((key) => ({ title: key, value: key }))
|
|
176
|
+
]
|
|
177
|
+
},
|
|
178
|
+
{
|
|
179
|
+
onCancel: () => {
|
|
180
|
+
process.exit(1);
|
|
181
|
+
}
|
|
182
|
+
}
|
|
183
|
+
);
|
|
184
|
+
oldColumnName = res.columnName;
|
|
185
|
+
}
|
|
186
|
+
if (oldColumnName === "__NEW__") {
|
|
187
|
+
added[columnName] = column;
|
|
188
|
+
} else {
|
|
189
|
+
renamed.push({ from: oldColumnName, to: columnName });
|
|
190
|
+
}
|
|
191
|
+
}
|
|
192
|
+
for (const [droppedColumnName, droppedColumn] of Object.entries(mightDrop)) {
|
|
193
|
+
if (!renamed.find((r) => r.from === droppedColumnName)) {
|
|
194
|
+
dropped[droppedColumnName] = droppedColumn;
|
|
195
|
+
}
|
|
196
|
+
}
|
|
197
|
+
return { added, dropped, renamed };
|
|
198
|
+
}
|
|
199
|
+
async function resolveCollectionRenames(mightAdd, mightDrop, ambiguityResponses) {
|
|
200
|
+
const added = {};
|
|
201
|
+
const dropped = {};
|
|
202
|
+
const renamed = [];
|
|
203
|
+
for (const [collectionName, collection] of Object.entries(mightAdd)) {
|
|
204
|
+
let oldCollectionName = ambiguityResponses ? ambiguityResponses.collectionRenames[collectionName] ?? "__NEW__" : void 0;
|
|
205
|
+
if (!oldCollectionName) {
|
|
206
|
+
const res = await prompts(
|
|
207
|
+
{
|
|
208
|
+
type: "select",
|
|
209
|
+
name: "collectionName",
|
|
210
|
+
message: "New collection " + color.blue(color.bold(collectionName)) + " detected. Was this renamed from an existing collection?",
|
|
211
|
+
choices: [
|
|
212
|
+
{ title: "New collection (not renamed from existing)", value: "__NEW__" },
|
|
213
|
+
...Object.keys(mightDrop).filter((key) => !(key in renamed)).map((key) => ({ title: key, value: key }))
|
|
214
|
+
]
|
|
215
|
+
},
|
|
216
|
+
{
|
|
217
|
+
onCancel: () => {
|
|
218
|
+
process.exit(1);
|
|
219
|
+
}
|
|
220
|
+
}
|
|
221
|
+
);
|
|
222
|
+
oldCollectionName = res.collectionName;
|
|
223
|
+
}
|
|
224
|
+
if (oldCollectionName === "__NEW__") {
|
|
225
|
+
added[collectionName] = collection;
|
|
226
|
+
} else {
|
|
227
|
+
renamed.push({ from: oldCollectionName, to: collectionName });
|
|
228
|
+
}
|
|
229
|
+
}
|
|
230
|
+
for (const [droppedCollectionName, droppedCollection] of Object.entries(mightDrop)) {
|
|
231
|
+
if (!renamed.find((r) => r.from === droppedCollectionName)) {
|
|
232
|
+
dropped[droppedCollectionName] = droppedCollection;
|
|
233
|
+
}
|
|
234
|
+
}
|
|
235
|
+
return { added, dropped, renamed };
|
|
236
|
+
}
|
|
237
|
+
function getAddedCollections(oldCollections, newCollections) {
|
|
238
|
+
const added = {};
|
|
239
|
+
for (const [key, newCollection] of Object.entries(newCollections.schema)) {
|
|
240
|
+
if (!(key in oldCollections.schema))
|
|
241
|
+
added[key] = newCollection;
|
|
242
|
+
}
|
|
243
|
+
return added;
|
|
244
|
+
}
|
|
245
|
+
function getDroppedCollections(oldCollections, newCollections) {
|
|
246
|
+
const dropped = {};
|
|
247
|
+
for (const [key, oldCollection] of Object.entries(oldCollections.schema)) {
|
|
248
|
+
if (!(key in newCollections.schema))
|
|
249
|
+
dropped[key] = oldCollection;
|
|
250
|
+
}
|
|
251
|
+
return dropped;
|
|
252
|
+
}
|
|
253
|
+
function getColumnRenameQueries(unescapedCollectionName, renamed) {
|
|
254
|
+
const queries = [];
|
|
255
|
+
const collectionName = sqlite.escapeName(unescapedCollectionName);
|
|
256
|
+
for (const { from, to } of renamed) {
|
|
257
|
+
const q = `ALTER TABLE ${collectionName} RENAME COLUMN ${sqlite.escapeName(
|
|
258
|
+
from
|
|
259
|
+
)} TO ${sqlite.escapeName(to)}`;
|
|
260
|
+
queries.push(q);
|
|
261
|
+
}
|
|
262
|
+
return queries;
|
|
263
|
+
}
|
|
264
|
+
function getAlterTableQueries(unescapedCollectionName, added, dropped) {
|
|
265
|
+
const queries = [];
|
|
266
|
+
const collectionName = sqlite.escapeName(unescapedCollectionName);
|
|
267
|
+
for (const [unescColumnName, column] of Object.entries(added)) {
|
|
268
|
+
const columnName = sqlite.escapeName(unescColumnName);
|
|
269
|
+
const type = schemaTypeToSqlType(column.type);
|
|
270
|
+
const q = `ALTER TABLE ${collectionName} ADD COLUMN ${columnName} ${type}${getModifiers(
|
|
271
|
+
columnName,
|
|
272
|
+
column
|
|
273
|
+
)}`;
|
|
274
|
+
queries.push(q);
|
|
275
|
+
}
|
|
276
|
+
for (const unescColumnName of Object.keys(dropped)) {
|
|
277
|
+
const columnName = sqlite.escapeName(unescColumnName);
|
|
278
|
+
const q = `ALTER TABLE ${collectionName} DROP COLUMN ${columnName}`;
|
|
279
|
+
queries.push(q);
|
|
280
|
+
}
|
|
281
|
+
return queries;
|
|
282
|
+
}
|
|
283
|
+
function getRecreateTableQueries({
|
|
284
|
+
collectionName: unescCollectionName,
|
|
285
|
+
newCollection,
|
|
286
|
+
added,
|
|
287
|
+
hasDataLoss,
|
|
288
|
+
migrateHiddenPrimaryKey
|
|
289
|
+
}) {
|
|
290
|
+
const unescTempName = `${unescCollectionName}_${genTempTableName()}`;
|
|
291
|
+
const tempName = sqlite.escapeName(unescTempName);
|
|
292
|
+
const collectionName = sqlite.escapeName(unescCollectionName);
|
|
293
|
+
if (hasDataLoss) {
|
|
294
|
+
return [
|
|
295
|
+
`DROP TABLE ${collectionName}`,
|
|
296
|
+
getCreateTableQuery(unescCollectionName, newCollection)
|
|
297
|
+
];
|
|
298
|
+
}
|
|
299
|
+
const newColumns = [...Object.keys(newCollection.columns)];
|
|
300
|
+
if (migrateHiddenPrimaryKey) {
|
|
301
|
+
newColumns.unshift("_id");
|
|
302
|
+
}
|
|
303
|
+
const escapedColumns = newColumns.filter((i) => !(i in added)).map((c) => sqlite.escapeName(c)).join(", ");
|
|
304
|
+
return [
|
|
305
|
+
getCreateTableQuery(unescTempName, newCollection),
|
|
306
|
+
`INSERT INTO ${tempName} (${escapedColumns}) SELECT ${escapedColumns} FROM ${collectionName}`,
|
|
307
|
+
`DROP TABLE ${collectionName}`,
|
|
308
|
+
`ALTER TABLE ${tempName} RENAME TO ${collectionName}`
|
|
309
|
+
];
|
|
310
|
+
}
|
|
311
|
+
function isEmpty(obj) {
|
|
312
|
+
return Object.keys(obj).length === 0;
|
|
313
|
+
}
|
|
314
|
+
function canAlterTableAddColumn(column) {
|
|
315
|
+
if (column.schema.unique)
|
|
316
|
+
return false;
|
|
317
|
+
if (hasRuntimeDefault(column))
|
|
318
|
+
return false;
|
|
319
|
+
if (!column.schema.optional && !hasDefault(column))
|
|
320
|
+
return false;
|
|
321
|
+
if (hasPrimaryKey(column))
|
|
322
|
+
return false;
|
|
323
|
+
if (getReferencesConfig(column))
|
|
324
|
+
return false;
|
|
325
|
+
return true;
|
|
326
|
+
}
|
|
327
|
+
function canAlterTableDropColumn(column) {
|
|
328
|
+
if (column.schema.unique)
|
|
329
|
+
return false;
|
|
330
|
+
if (hasPrimaryKey(column))
|
|
331
|
+
return false;
|
|
332
|
+
return true;
|
|
333
|
+
}
|
|
334
|
+
function canRecreateTableWithoutDataLoss(added, updated) {
|
|
335
|
+
for (const [columnName, a] of Object.entries(added)) {
|
|
336
|
+
if (hasPrimaryKey(a) && a.type !== "number" && !hasDefault(a)) {
|
|
337
|
+
return { dataLoss: true, columnName, reason: "added-required" };
|
|
338
|
+
}
|
|
339
|
+
if (!a.schema.optional && !hasDefault(a)) {
|
|
340
|
+
return { dataLoss: true, columnName, reason: "added-required" };
|
|
341
|
+
}
|
|
342
|
+
if (!a.schema.optional && a.schema.unique) {
|
|
343
|
+
return { dataLoss: true, columnName, reason: "added-unique" };
|
|
344
|
+
}
|
|
345
|
+
}
|
|
346
|
+
for (const [columnName, u] of Object.entries(updated)) {
|
|
347
|
+
if (u.old.type !== u.new.type && !canChangeTypeWithoutQuery(u.old, u.new)) {
|
|
348
|
+
return { dataLoss: true, columnName, reason: "updated-type" };
|
|
349
|
+
}
|
|
350
|
+
}
|
|
351
|
+
return { dataLoss: false };
|
|
352
|
+
}
|
|
353
|
+
function getAdded(oldObj, newObj) {
|
|
354
|
+
const added = {};
|
|
355
|
+
for (const [key, value] of Object.entries(newObj)) {
|
|
356
|
+
if (!(key in oldObj))
|
|
357
|
+
added[key] = value;
|
|
358
|
+
}
|
|
359
|
+
return added;
|
|
360
|
+
}
|
|
361
|
+
function getDropped(oldObj, newObj) {
|
|
362
|
+
const dropped = {};
|
|
363
|
+
for (const [key, value] of Object.entries(oldObj)) {
|
|
364
|
+
if (!(key in newObj))
|
|
365
|
+
dropped[key] = value;
|
|
366
|
+
}
|
|
367
|
+
return dropped;
|
|
368
|
+
}
|
|
369
|
+
function getUpdated(oldObj, newObj) {
|
|
370
|
+
const updated = {};
|
|
371
|
+
for (const [key, value] of Object.entries(newObj)) {
|
|
372
|
+
const oldValue = oldObj[key];
|
|
373
|
+
if (!oldValue)
|
|
374
|
+
continue;
|
|
375
|
+
if (deepDiff(oldValue, value))
|
|
376
|
+
updated[key] = value;
|
|
377
|
+
}
|
|
378
|
+
return updated;
|
|
379
|
+
}
|
|
380
|
+
function getUpdatedColumns(oldColumns, newColumns) {
|
|
381
|
+
const updated = {};
|
|
382
|
+
for (const [key, newColumn] of Object.entries(newColumns)) {
|
|
383
|
+
let oldColumn = oldColumns[key];
|
|
384
|
+
if (!oldColumn)
|
|
385
|
+
continue;
|
|
386
|
+
if (oldColumn.type !== newColumn.type && canChangeTypeWithoutQuery(oldColumn, newColumn)) {
|
|
387
|
+
const asNewColumn = columnSchema.safeParse({
|
|
388
|
+
type: newColumn.type,
|
|
389
|
+
schema: oldColumn.schema
|
|
390
|
+
});
|
|
391
|
+
if (asNewColumn.success) {
|
|
392
|
+
oldColumn = asNewColumn.data;
|
|
393
|
+
}
|
|
394
|
+
}
|
|
395
|
+
const diff = deepDiff(oldColumn, newColumn);
|
|
396
|
+
if (diff) {
|
|
397
|
+
updated[key] = { old: oldColumn, new: newColumn };
|
|
398
|
+
}
|
|
399
|
+
}
|
|
400
|
+
return updated;
|
|
401
|
+
}
|
|
402
|
+
const typeChangesWithoutQuery = [
|
|
403
|
+
{ from: "boolean", to: "number" },
|
|
404
|
+
{ from: "date", to: "text" },
|
|
405
|
+
{ from: "json", to: "text" }
|
|
406
|
+
];
|
|
407
|
+
function canChangeTypeWithoutQuery(oldColumn, newColumn) {
|
|
408
|
+
return typeChangesWithoutQuery.some(
|
|
409
|
+
({ from, to }) => oldColumn.type === from && newColumn.type === to
|
|
410
|
+
);
|
|
411
|
+
}
|
|
412
|
+
function hasRuntimeDefault(column) {
|
|
413
|
+
return !!(column.schema.default && isSerializedSQL(column.schema.default));
|
|
414
|
+
}
|
|
415
|
+
export {
|
|
416
|
+
getCollectionChangeQueries,
|
|
417
|
+
getMigrationQueries
|
|
418
|
+
};
|
|
@@ -0,0 +1,34 @@
|
|
|
1
|
+
import deepDiff from 'deep-diff';
|
|
2
|
+
import { type DBSnapshot } from '../types.js';
|
|
3
|
+
import type { AstroConfig } from 'astro';
|
|
4
|
+
export type MigrationStatus = {
|
|
5
|
+
state: 'no-migrations-found';
|
|
6
|
+
currentSnapshot: DBSnapshot;
|
|
7
|
+
} | {
|
|
8
|
+
state: 'ahead';
|
|
9
|
+
oldSnapshot: DBSnapshot;
|
|
10
|
+
newSnapshot: DBSnapshot;
|
|
11
|
+
diff: deepDiff.Diff<DBSnapshot, DBSnapshot>[];
|
|
12
|
+
newFilename: string;
|
|
13
|
+
summary: string;
|
|
14
|
+
newFileContent?: string;
|
|
15
|
+
} | {
|
|
16
|
+
state: 'up-to-date';
|
|
17
|
+
currentSnapshot: DBSnapshot;
|
|
18
|
+
};
|
|
19
|
+
export declare function getMigrationStatus(config: AstroConfig): Promise<MigrationStatus>;
|
|
20
|
+
export declare const MIGRATIONS_CREATED: string;
|
|
21
|
+
export declare const MIGRATIONS_UP_TO_DATE: string;
|
|
22
|
+
export declare const MIGRATIONS_NOT_INITIALIZED: string;
|
|
23
|
+
export declare const MIGRATION_NEEDED: string;
|
|
24
|
+
export declare function getMigrations(): Promise<string[]>;
|
|
25
|
+
export declare function loadMigration(migration: string): Promise<{
|
|
26
|
+
diff: any[];
|
|
27
|
+
db: string[];
|
|
28
|
+
confirm?: string[];
|
|
29
|
+
}>;
|
|
30
|
+
export declare function loadInitialSnapshot(): Promise<DBSnapshot>;
|
|
31
|
+
export declare function initializeMigrationsDirectory(currentSnapshot: DBSnapshot): Promise<void>;
|
|
32
|
+
export declare function initializeFromMigrations(allMigrationFiles: string[]): Promise<DBSnapshot>;
|
|
33
|
+
export declare function createCurrentSnapshot(config: AstroConfig): DBSnapshot;
|
|
34
|
+
export declare function createEmptySnapshot(): DBSnapshot;
|
|
@@ -0,0 +1,129 @@
|
|
|
1
|
+
import deepDiff from "deep-diff";
|
|
2
|
+
import { mkdir, readFile, readdir, writeFile } from "fs/promises";
|
|
3
|
+
import { tablesSchema } from "../types.js";
|
|
4
|
+
import { cyan, green, yellow } from "kleur/colors";
|
|
5
|
+
const { applyChange, diff: generateDiff } = deepDiff;
|
|
6
|
+
async function getMigrationStatus(config) {
|
|
7
|
+
const currentSnapshot = createCurrentSnapshot(config);
|
|
8
|
+
const allMigrationFiles = await getMigrations();
|
|
9
|
+
if (allMigrationFiles.length === 0) {
|
|
10
|
+
return {
|
|
11
|
+
state: "no-migrations-found",
|
|
12
|
+
currentSnapshot
|
|
13
|
+
};
|
|
14
|
+
}
|
|
15
|
+
const previousSnapshot = await initializeFromMigrations(allMigrationFiles);
|
|
16
|
+
const diff = generateDiff(previousSnapshot, currentSnapshot);
|
|
17
|
+
if (diff) {
|
|
18
|
+
const n = getNewMigrationNumber(allMigrationFiles);
|
|
19
|
+
const newFilename = `${String(n + 1).padStart(4, "0")}_migration.json`;
|
|
20
|
+
return {
|
|
21
|
+
state: "ahead",
|
|
22
|
+
oldSnapshot: previousSnapshot,
|
|
23
|
+
newSnapshot: currentSnapshot,
|
|
24
|
+
diff,
|
|
25
|
+
newFilename,
|
|
26
|
+
summary: generateDiffSummary(diff)
|
|
27
|
+
};
|
|
28
|
+
}
|
|
29
|
+
return {
|
|
30
|
+
state: "up-to-date",
|
|
31
|
+
currentSnapshot
|
|
32
|
+
};
|
|
33
|
+
}
|
|
34
|
+
const MIGRATIONS_CREATED = `${green(
|
|
35
|
+
"\u25A0 Migrations initialized!"
|
|
36
|
+
)}
|
|
37
|
+
|
|
38
|
+
To execute your migrations, run
|
|
39
|
+
${cyan("astro db push")}`;
|
|
40
|
+
const MIGRATIONS_UP_TO_DATE = `${green(
|
|
41
|
+
"\u25A0 No migrations needed!"
|
|
42
|
+
)}
|
|
43
|
+
|
|
44
|
+
Your database is up to date.
|
|
45
|
+
`;
|
|
46
|
+
const MIGRATIONS_NOT_INITIALIZED = `${yellow(
|
|
47
|
+
"\u25B6 No migrations found!"
|
|
48
|
+
)}
|
|
49
|
+
|
|
50
|
+
To scaffold your migrations folder, run
|
|
51
|
+
${cyan("astro db sync")}
|
|
52
|
+
`;
|
|
53
|
+
const MIGRATION_NEEDED = `${yellow(
|
|
54
|
+
"\u25B6 Changes detected!"
|
|
55
|
+
)}
|
|
56
|
+
|
|
57
|
+
To create the necessary migration file, run
|
|
58
|
+
${cyan("astro db sync")}
|
|
59
|
+
`;
|
|
60
|
+
function generateDiffSummary(diff) {
|
|
61
|
+
return JSON.stringify(diff, null, 2);
|
|
62
|
+
}
|
|
63
|
+
function getNewMigrationNumber(allMigrationFiles) {
|
|
64
|
+
const len = allMigrationFiles.length - 1;
|
|
65
|
+
return allMigrationFiles.reduce((acc, curr) => {
|
|
66
|
+
const num = Number.parseInt(curr.split("_")[0] ?? len, 10);
|
|
67
|
+
return num > acc ? num : acc;
|
|
68
|
+
}, 0);
|
|
69
|
+
}
|
|
70
|
+
async function getMigrations() {
|
|
71
|
+
const migrationFiles = await readdir("./migrations").catch((err) => {
|
|
72
|
+
if (err.code === "ENOENT") {
|
|
73
|
+
return [];
|
|
74
|
+
}
|
|
75
|
+
throw err;
|
|
76
|
+
});
|
|
77
|
+
return migrationFiles;
|
|
78
|
+
}
|
|
79
|
+
async function loadMigration(migration) {
|
|
80
|
+
return JSON.parse(await readFile(`./migrations/${migration}`, "utf-8"));
|
|
81
|
+
}
|
|
82
|
+
async function loadInitialSnapshot() {
|
|
83
|
+
const snapshot = JSON.parse(await readFile("./migrations/0000_snapshot.json", "utf-8"));
|
|
84
|
+
if (snapshot.experimentalVersion === 1) {
|
|
85
|
+
return snapshot;
|
|
86
|
+
}
|
|
87
|
+
if (!snapshot.schema) {
|
|
88
|
+
return { experimentalVersion: 1, schema: snapshot };
|
|
89
|
+
}
|
|
90
|
+
throw new Error("Invalid snapshot format");
|
|
91
|
+
}
|
|
92
|
+
async function initializeMigrationsDirectory(currentSnapshot) {
|
|
93
|
+
await mkdir("./migrations", { recursive: true });
|
|
94
|
+
await writeFile("./migrations/0000_snapshot.json", JSON.stringify(currentSnapshot, void 0, 2));
|
|
95
|
+
}
|
|
96
|
+
async function initializeFromMigrations(allMigrationFiles) {
|
|
97
|
+
const prevSnapshot = await loadInitialSnapshot();
|
|
98
|
+
for (const migration of allMigrationFiles) {
|
|
99
|
+
if (migration === "0000_snapshot.json")
|
|
100
|
+
continue;
|
|
101
|
+
const migrationContent = await loadMigration(migration);
|
|
102
|
+
migrationContent.diff.forEach((change) => {
|
|
103
|
+
applyChange(prevSnapshot, {}, change);
|
|
104
|
+
});
|
|
105
|
+
}
|
|
106
|
+
return prevSnapshot;
|
|
107
|
+
}
|
|
108
|
+
function createCurrentSnapshot(config) {
|
|
109
|
+
const tablesConfig = tablesSchema.parse(config.db?.tables ?? {});
|
|
110
|
+
const schema = JSON.parse(JSON.stringify(tablesConfig));
|
|
111
|
+
return { experimentalVersion: 1, schema };
|
|
112
|
+
}
|
|
113
|
+
function createEmptySnapshot() {
|
|
114
|
+
return { experimentalVersion: 1, schema: {} };
|
|
115
|
+
}
|
|
116
|
+
export {
|
|
117
|
+
MIGRATIONS_CREATED,
|
|
118
|
+
MIGRATIONS_NOT_INITIALIZED,
|
|
119
|
+
MIGRATIONS_UP_TO_DATE,
|
|
120
|
+
MIGRATION_NEEDED,
|
|
121
|
+
createCurrentSnapshot,
|
|
122
|
+
createEmptySnapshot,
|
|
123
|
+
getMigrationStatus,
|
|
124
|
+
getMigrations,
|
|
125
|
+
initializeFromMigrations,
|
|
126
|
+
initializeMigrationsDirectory,
|
|
127
|
+
loadInitialSnapshot,
|
|
128
|
+
loadMigration
|
|
129
|
+
};
|
|
@@ -0,0 +1,6 @@
|
|
|
1
|
+
export declare const PACKAGE_NAME: any;
|
|
2
|
+
export declare const RUNTIME_IMPORT: string;
|
|
3
|
+
export declare const RUNTIME_DRIZZLE_IMPORT: string;
|
|
4
|
+
export declare const DB_TYPES_FILE = "db-types.d.ts";
|
|
5
|
+
export declare const VIRTUAL_MODULE_ID = "astro:db";
|
|
6
|
+
export declare const DB_PATH = ".astro/content.db";
|
|
@@ -0,0 +1,17 @@
|
|
|
1
|
+
import { readFileSync } from "node:fs";
|
|
2
|
+
const PACKAGE_NAME = JSON.parse(
|
|
3
|
+
readFileSync(new URL("../../package.json", import.meta.url), "utf8")
|
|
4
|
+
).name;
|
|
5
|
+
const RUNTIME_IMPORT = JSON.stringify(`${PACKAGE_NAME}/runtime`);
|
|
6
|
+
const RUNTIME_DRIZZLE_IMPORT = JSON.stringify(`${PACKAGE_NAME}/runtime/drizzle`);
|
|
7
|
+
const DB_TYPES_FILE = "db-types.d.ts";
|
|
8
|
+
const VIRTUAL_MODULE_ID = "astro:db";
|
|
9
|
+
const DB_PATH = ".astro/content.db";
|
|
10
|
+
export {
|
|
11
|
+
DB_PATH,
|
|
12
|
+
DB_TYPES_FILE,
|
|
13
|
+
PACKAGE_NAME,
|
|
14
|
+
RUNTIME_DRIZZLE_IMPORT,
|
|
15
|
+
RUNTIME_IMPORT,
|
|
16
|
+
VIRTUAL_MODULE_ID
|
|
17
|
+
};
|
|
@@ -0,0 +1,7 @@
|
|
|
1
|
+
export declare const MISSING_SESSION_ID_ERROR: string;
|
|
2
|
+
export declare const MISSING_PROJECT_ID_ERROR: string;
|
|
3
|
+
export declare const STUDIO_CONFIG_MISSING_WRITABLE_COLLECTIONS_ERROR: (collectionName: string) => string;
|
|
4
|
+
export declare const UNSAFE_WRITABLE_WARNING: string;
|
|
5
|
+
export declare const STUDIO_CONFIG_MISSING_CLI_ERROR: string;
|
|
6
|
+
export declare const MIGRATIONS_NOT_INITIALIZED: string;
|
|
7
|
+
export declare const SEED_WRITABLE_IN_PROD_ERROR: (collectionName: string) => string;
|
|
@@ -0,0 +1,54 @@
|
|
|
1
|
+
import { cyan, bold, red, green, yellow } from "kleur/colors";
|
|
2
|
+
const MISSING_SESSION_ID_ERROR = `${red("\u25B6 Login required!")}
|
|
3
|
+
|
|
4
|
+
To authenticate with Astro Studio, run
|
|
5
|
+
${cyan("astro db login")}
|
|
6
|
+
`;
|
|
7
|
+
const MISSING_PROJECT_ID_ERROR = `${red("\u25B6 Directory not linked.")}
|
|
8
|
+
|
|
9
|
+
To link this directory to an Astro Studio project, run
|
|
10
|
+
${cyan("astro db link")}
|
|
11
|
+
`;
|
|
12
|
+
const STUDIO_CONFIG_MISSING_WRITABLE_COLLECTIONS_ERROR = (collectionName) => `${red(
|
|
13
|
+
`\u25B6 Writable collection ${bold(collectionName)} requires Astro Studio or the ${yellow(
|
|
14
|
+
"unsafeWritable"
|
|
15
|
+
)} option.`
|
|
16
|
+
)}
|
|
17
|
+
|
|
18
|
+
Visit ${cyan("https://astro.build/studio")} to create your account
|
|
19
|
+
and set ${green("studio: true")} in your astro.config.mjs file to enable Studio.
|
|
20
|
+
`;
|
|
21
|
+
const UNSAFE_WRITABLE_WARNING = `${yellow(
|
|
22
|
+
"unsafeWritable"
|
|
23
|
+
)} option is enabled and you are using writable tables.
|
|
24
|
+
Redeploying your app may result in wiping away your database.
|
|
25
|
+
I hope you know what you are doing.
|
|
26
|
+
`;
|
|
27
|
+
const STUDIO_CONFIG_MISSING_CLI_ERROR = `${red("\u25B6 This command requires Astro Studio.")}
|
|
28
|
+
|
|
29
|
+
Visit ${cyan("https://astro.build/studio")} to create your account
|
|
30
|
+
and set ${green("studio: true")} in your astro.config.mjs file to enable Studio.
|
|
31
|
+
`;
|
|
32
|
+
const MIGRATIONS_NOT_INITIALIZED = `${yellow(
|
|
33
|
+
"\u25B6 No migrations found!"
|
|
34
|
+
)}
|
|
35
|
+
|
|
36
|
+
To scaffold your migrations folder, run
|
|
37
|
+
${cyan("astro db sync")}
|
|
38
|
+
`;
|
|
39
|
+
const SEED_WRITABLE_IN_PROD_ERROR = (collectionName) => {
|
|
40
|
+
return `${red(
|
|
41
|
+
`Writable tables should not be seeded in production with data().`
|
|
42
|
+
)} You can seed ${bold(
|
|
43
|
+
collectionName
|
|
44
|
+
)} in development mode only using the "mode" flag. See the docs for more: https://www.notion.so/astroinc/astrojs-db-README-dcf6fa10de9a4f528be56cee96e8c054?pvs=4#278aed3fc37e4cec80240d1552ff6ac5`;
|
|
45
|
+
};
|
|
46
|
+
export {
|
|
47
|
+
MIGRATIONS_NOT_INITIALIZED,
|
|
48
|
+
MISSING_PROJECT_ID_ERROR,
|
|
49
|
+
MISSING_SESSION_ID_ERROR,
|
|
50
|
+
SEED_WRITABLE_IN_PROD_ERROR,
|
|
51
|
+
STUDIO_CONFIG_MISSING_CLI_ERROR,
|
|
52
|
+
STUDIO_CONFIG_MISSING_WRITABLE_COLLECTIONS_ERROR,
|
|
53
|
+
UNSAFE_WRITABLE_WARNING
|
|
54
|
+
};
|