edinburgh 0.1.3 → 0.4.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +450 -218
- package/build/src/datapack.d.ts +138 -0
- package/build/src/datapack.js +684 -0
- package/build/src/datapack.js.map +1 -0
- package/build/src/edinburgh.d.ts +41 -11
- package/build/src/edinburgh.js +163 -43
- package/build/src/edinburgh.js.map +1 -1
- package/build/src/indexes.d.ts +100 -111
- package/build/src/indexes.js +679 -369
- package/build/src/indexes.js.map +1 -1
- package/build/src/migrate-cli.d.ts +20 -0
- package/build/src/migrate-cli.js +122 -0
- package/build/src/migrate-cli.js.map +1 -0
- package/build/src/migrate.d.ts +33 -0
- package/build/src/migrate.js +225 -0
- package/build/src/migrate.js.map +1 -0
- package/build/src/models.d.ts +147 -46
- package/build/src/models.js +322 -268
- package/build/src/models.js.map +1 -1
- package/build/src/types.d.ts +209 -260
- package/build/src/types.js +423 -324
- package/build/src/types.js.map +1 -1
- package/build/src/utils.d.ts +9 -9
- package/build/src/utils.js +32 -9
- package/build/src/utils.js.map +1 -1
- package/package.json +14 -11
- package/src/datapack.ts +726 -0
- package/src/edinburgh.ts +174 -43
- package/src/indexes.ts +722 -380
- package/src/migrate-cli.ts +138 -0
- package/src/migrate.ts +267 -0
- package/src/models.ts +415 -285
- package/src/types.ts +510 -391
- package/src/utils.ts +40 -12
- package/build/src/bytes.d.ts +0 -155
- package/build/src/bytes.js +0 -455
- package/build/src/bytes.js.map +0 -1
- package/src/bytes.ts +0 -500
|
@@ -0,0 +1,138 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
|
|
3
|
+
/**
|
|
4
|
+
* migrate-edinburgh CLI tool
|
|
5
|
+
*
|
|
6
|
+
* Runs database migrations: upgrades all rows to the latest schema version,
|
|
7
|
+
* converts old primary indices, and cleans up orphaned secondary indices.
|
|
8
|
+
*
|
|
9
|
+
* Usage:
|
|
10
|
+
* npx migrate-edinburgh --import ./src/models.ts [options]
|
|
11
|
+
*
|
|
12
|
+
* Options:
|
|
13
|
+
* --import <path> Path to the module that registers all models (required)
|
|
14
|
+
* --db <path> Database directory (default: .edinburgh)
|
|
15
|
+
* --tables <names> Comma-separated list of table names to migrate
|
|
16
|
+
* --batch-size <n> Number of rows per transaction batch (default: 500)
|
|
17
|
+
* --no-convert Skip converting old primary indices
|
|
18
|
+
* --no-cleanup Skip deleting orphaned secondary indices
|
|
19
|
+
* --no-upgrade Skip upgrading rows to latest version
|
|
20
|
+
*/
|
|
21
|
+
|
|
22
|
+
import { runMigration, type MigrationOptions } from './migrate.js';
|
|
23
|
+
|
|
24
|
+
function parseArgs(args: string[]): { importPath: string, options: MigrationOptions & { dbDir?: string } } {
|
|
25
|
+
let importPath = '';
|
|
26
|
+
const options: MigrationOptions & { dbDir?: string } = {};
|
|
27
|
+
|
|
28
|
+
for (let i = 0; i < args.length; i++) {
|
|
29
|
+
switch (args[i]) {
|
|
30
|
+
case '--import':
|
|
31
|
+
importPath = args[++i];
|
|
32
|
+
break;
|
|
33
|
+
case '--db':
|
|
34
|
+
options.dbDir = args[++i];
|
|
35
|
+
break;
|
|
36
|
+
case '--tables':
|
|
37
|
+
options.tables = args[++i].split(',').map(s => s.trim());
|
|
38
|
+
break;
|
|
39
|
+
case '--no-convert':
|
|
40
|
+
options.convertOldPrimaries = false;
|
|
41
|
+
break;
|
|
42
|
+
case '--no-cleanup':
|
|
43
|
+
options.deleteOrphanedIndexes = false;
|
|
44
|
+
break;
|
|
45
|
+
case '--no-upgrade':
|
|
46
|
+
options.upgradeVersions = false;
|
|
47
|
+
break;
|
|
48
|
+
default:
|
|
49
|
+
if (args[i].startsWith('-')) {
|
|
50
|
+
console.error(`Unknown option: ${args[i]}`);
|
|
51
|
+
process.exit(1);
|
|
52
|
+
}
|
|
53
|
+
}
|
|
54
|
+
}
|
|
55
|
+
|
|
56
|
+
if (!importPath) {
|
|
57
|
+
console.error('Usage: npx migrate-edinburgh --import <path> [options]');
|
|
58
|
+
console.error(' --import <path> Module that registers all models (required)');
|
|
59
|
+
console.error(' --db <path> Database directory (default: .edinburgh)');
|
|
60
|
+
console.error(' --tables <names> Comma-separated table names');
|
|
61
|
+
console.error(' --no-convert Skip old primary conversion');
|
|
62
|
+
console.error(' --no-cleanup Skip orphaned index cleanup');
|
|
63
|
+
console.error(' --no-upgrade Skip version upgrades');
|
|
64
|
+
process.exit(1);
|
|
65
|
+
}
|
|
66
|
+
|
|
67
|
+
return { importPath, options };
|
|
68
|
+
}
|
|
69
|
+
|
|
70
|
+
async function main() {
|
|
71
|
+
const nodeArgs = process.argv.slice(2);
|
|
72
|
+
const { importPath, options } = parseArgs(nodeArgs);
|
|
73
|
+
|
|
74
|
+
// Initialize DB if specified
|
|
75
|
+
if (options.dbDir) {
|
|
76
|
+
const E = await import('./edinburgh.js');
|
|
77
|
+
E.init(options.dbDir);
|
|
78
|
+
}
|
|
79
|
+
|
|
80
|
+
// Import user's models module (this registers all models)
|
|
81
|
+
const resolvedPath = importPath.startsWith('.') || importPath.startsWith('/')
|
|
82
|
+
? (await import('node:path')).resolve(process.cwd(), importPath)
|
|
83
|
+
: importPath;
|
|
84
|
+
await import(resolvedPath);
|
|
85
|
+
|
|
86
|
+
let lastPhase = '';
|
|
87
|
+
options.onProgress = (info) => {
|
|
88
|
+
if (info.phase !== lastPhase) {
|
|
89
|
+
if (lastPhase) console.log();
|
|
90
|
+
lastPhase = info.phase;
|
|
91
|
+
}
|
|
92
|
+
const suffix = info.table ? ` [${info.table}]` : '';
|
|
93
|
+
process.stdout.write(`\r ${info.phase}: ${info.processed} upgraded${suffix} `);
|
|
94
|
+
};
|
|
95
|
+
|
|
96
|
+
console.log('Starting migration...');
|
|
97
|
+
const result = await runMigration(options);
|
|
98
|
+
console.log('\n');
|
|
99
|
+
|
|
100
|
+
// Report results
|
|
101
|
+
if (Object.keys(result.secondaries).length > 0) {
|
|
102
|
+
console.log('Upgraded rows:');
|
|
103
|
+
for (const [table, count] of Object.entries(result.secondaries)) {
|
|
104
|
+
console.log(` ${table}: ${count}`);
|
|
105
|
+
}
|
|
106
|
+
}
|
|
107
|
+
|
|
108
|
+
if (Object.keys(result.primaries).length > 0) {
|
|
109
|
+
console.log('Converted old primary rows:');
|
|
110
|
+
for (const [table, count] of Object.entries(result.primaries)) {
|
|
111
|
+
console.log(` ${table}: ${count}`);
|
|
112
|
+
}
|
|
113
|
+
}
|
|
114
|
+
|
|
115
|
+
if (Object.keys(result.conversionFailures).length > 0) {
|
|
116
|
+
console.log('Conversion failures:');
|
|
117
|
+
for (const [table, failures] of Object.entries(result.conversionFailures)) {
|
|
118
|
+
for (const [reason, count] of Object.entries(failures)) {
|
|
119
|
+
console.log(` ${table}: ${count} (${reason})`);
|
|
120
|
+
}
|
|
121
|
+
}
|
|
122
|
+
}
|
|
123
|
+
|
|
124
|
+
if (result.orphaned > 0) {
|
|
125
|
+
console.log(`Deleted ${result.orphaned} orphaned index entries`);
|
|
126
|
+
}
|
|
127
|
+
|
|
128
|
+
if (Object.keys(result.secondaries).length === 0 && Object.keys(result.primaries).length === 0 && result.orphaned === 0) {
|
|
129
|
+
console.log('No migration needed - database is up to date.');
|
|
130
|
+
}
|
|
131
|
+
|
|
132
|
+
console.log('Done.');
|
|
133
|
+
}
|
|
134
|
+
|
|
135
|
+
main().catch(e => {
|
|
136
|
+
console.error('Migration failed:', e);
|
|
137
|
+
process.exit(1);
|
|
138
|
+
});
|
package/src/migrate.ts
ADDED
|
@@ -0,0 +1,267 @@
|
|
|
1
|
+
import * as lowlevel from "olmdb/lowlevel";
|
|
2
|
+
import DataPack from "./datapack.js";
|
|
3
|
+
import { modelRegistry, currentTxn, Transaction } from "./models.js";
|
|
4
|
+
import { dbDel, toBuffer } from "./utils.js";
|
|
5
|
+
import { PrimaryIndex } from "./indexes.js";
|
|
6
|
+
import { deserializeType, TypeWrapper } from "./types.js";
|
|
7
|
+
import { transact } from "./edinburgh.js";
|
|
8
|
+
|
|
9
|
+
const INDEX_ID_PREFIX = -2;
|
|
10
|
+
|
|
11
|
+
export interface MigrationOptions {
|
|
12
|
+
/** Limit migration to specific table names. */
|
|
13
|
+
tables?: string[];
|
|
14
|
+
/** Whether to convert old primary indices for known tables (default: true). */
|
|
15
|
+
convertOldPrimaries?: boolean;
|
|
16
|
+
/** Whether to delete orphaned secondary/unique indices (default: true). */
|
|
17
|
+
deleteOrphanedIndexes?: boolean;
|
|
18
|
+
/** Whether to upgrade rows to the latest version (default: true). */
|
|
19
|
+
upgradeVersions?: boolean;
|
|
20
|
+
/** Progress callback. */
|
|
21
|
+
onProgress?: (info: ProgressInfo) => void;
|
|
22
|
+
}
|
|
23
|
+
|
|
24
|
+
export interface ProgressInfo {
|
|
25
|
+
phase: string;
|
|
26
|
+
processed: number;
|
|
27
|
+
total?: number;
|
|
28
|
+
table?: string;
|
|
29
|
+
}
|
|
30
|
+
|
|
31
|
+
export interface MigrationResult {
|
|
32
|
+
/** Per-table stats for row upgrades. */
|
|
33
|
+
secondaries: Record<string, number>;
|
|
34
|
+
/** Per-table stats for old primary conversions. */
|
|
35
|
+
primaries: Record<string, number>;
|
|
36
|
+
/** Per-table conversion failure counts by reason. */
|
|
37
|
+
conversionFailures: Record<string, Record<string, number>>;
|
|
38
|
+
/** Number of orphaned index entries deleted. */
|
|
39
|
+
orphaned: number;
|
|
40
|
+
}
|
|
41
|
+
|
|
42
|
+
interface IndexDef {
|
|
43
|
+
id: number;
|
|
44
|
+
tableName: string;
|
|
45
|
+
typeName: string;
|
|
46
|
+
fieldNames: string[];
|
|
47
|
+
fieldTypes: TypeWrapper<any>[];
|
|
48
|
+
}
|
|
49
|
+
|
|
50
|
+
/**
|
|
51
|
+
* Iterate over all rows for a given index ID prefix in batches,
|
|
52
|
+
* calling processBatch for each row within a transaction.
|
|
53
|
+
*/
|
|
54
|
+
async function forEachRow(
|
|
55
|
+
indexId: number,
|
|
56
|
+
processBatch: (txn: Transaction, key: Uint8Array, value: Uint8Array) => void
|
|
57
|
+
): Promise<void> {
|
|
58
|
+
let done = false;
|
|
59
|
+
let lastKey: Uint8Array | undefined;
|
|
60
|
+
const prefixPack = new DataPack().write(indexId);
|
|
61
|
+
const endBuf = toBuffer(prefixPack.clone(true).increment()!.toUint8Array());
|
|
62
|
+
|
|
63
|
+
while (!done) {
|
|
64
|
+
await transact(() => {
|
|
65
|
+
const txn = currentTxn();
|
|
66
|
+
let startBuf: ArrayBufferLike;
|
|
67
|
+
if (lastKey) {
|
|
68
|
+
const resumePack = new DataPack(lastKey).increment();
|
|
69
|
+
if (!resumePack) { done = true; return; }
|
|
70
|
+
startBuf = toBuffer(resumePack.toUint8Array());
|
|
71
|
+
} else {
|
|
72
|
+
startBuf = toBuffer(prefixPack.toUint8Array());
|
|
73
|
+
}
|
|
74
|
+
const iteratorId = lowlevel.createIterator(txn.id, startBuf, endBuf, false);
|
|
75
|
+
const batchStart = Date.now();
|
|
76
|
+
let batchCount = 0;
|
|
77
|
+
try {
|
|
78
|
+
while (true) {
|
|
79
|
+
const raw = lowlevel.readIterator(iteratorId);
|
|
80
|
+
if (!raw) { done = true; break; }
|
|
81
|
+
const keyBuf = new Uint8Array(raw.key);
|
|
82
|
+
lastKey = keyBuf;
|
|
83
|
+
processBatch(txn, keyBuf, new Uint8Array(raw.value));
|
|
84
|
+
if (++batchCount >= 4096 || Date.now() - batchStart >= 2000) break;
|
|
85
|
+
}
|
|
86
|
+
} finally {
|
|
87
|
+
lowlevel.closeIterator(iteratorId);
|
|
88
|
+
}
|
|
89
|
+
});
|
|
90
|
+
}
|
|
91
|
+
}
|
|
92
|
+
|
|
93
|
+
/**
|
|
94
|
+
* Run database migration: upgrade all rows to the latest schema version,
|
|
95
|
+
* convert old primary indices, and clean up orphaned secondary indices.
|
|
96
|
+
*/
|
|
97
|
+
export async function runMigration(options: MigrationOptions = {}): Promise<MigrationResult> {
|
|
98
|
+
// Ensure any pending model/index inits are completed before building index maps
|
|
99
|
+
await transact(() => {});
|
|
100
|
+
|
|
101
|
+
const convertOldPrimaries = options.convertOldPrimaries ?? true;
|
|
102
|
+
const deleteOrphanedIndexes = options.deleteOrphanedIndexes ?? true;
|
|
103
|
+
const upgradeVersions = options.upgradeVersions ?? true;
|
|
104
|
+
const onProgress = options.onProgress;
|
|
105
|
+
|
|
106
|
+
const result: MigrationResult = {
|
|
107
|
+
secondaries: {},
|
|
108
|
+
primaries: {},
|
|
109
|
+
conversionFailures: {},
|
|
110
|
+
orphaned: 0,
|
|
111
|
+
};
|
|
112
|
+
|
|
113
|
+
// Build maps of known index IDs
|
|
114
|
+
const knownIndexIds = new Set<number>();
|
|
115
|
+
const primaryByIndexId = new Map<number, { model: typeof modelRegistry[string], primary: PrimaryIndex<any, any> }>();
|
|
116
|
+
|
|
117
|
+
for (const model of Object.values(modelRegistry)) {
|
|
118
|
+
if (options.tables && !options.tables.includes(model.tableName)) continue;
|
|
119
|
+
const primary = model._primary;
|
|
120
|
+
knownIndexIds.add(primary._indexId!);
|
|
121
|
+
primaryByIndexId.set(primary._indexId!, { model, primary });
|
|
122
|
+
for (const sec of model._secondaries || []) {
|
|
123
|
+
knownIndexIds.add(sec._indexId!);
|
|
124
|
+
}
|
|
125
|
+
}
|
|
126
|
+
|
|
127
|
+
// Scan all index definitions in the DB to find old/orphaned ones
|
|
128
|
+
const allIndexDefs: IndexDef[] = [];
|
|
129
|
+
await forEachRow(INDEX_ID_PREFIX, (_txn, keyBuf, valueBuf) => {
|
|
130
|
+
const kb = new DataPack(keyBuf);
|
|
131
|
+
kb.readNumber(); // skip INDEX_ID_PREFIX
|
|
132
|
+
const tableName = kb.readString();
|
|
133
|
+
const typeName = kb.readString();
|
|
134
|
+
const fieldNames: string[] = [];
|
|
135
|
+
const fieldTypes: TypeWrapper<any>[] = [];
|
|
136
|
+
// Read field names and types (may be followed by separator + pk fields for non-primary indexes)
|
|
137
|
+
while (kb.readAvailable()) {
|
|
138
|
+
const name = kb.read();
|
|
139
|
+
if (typeof name !== 'string') break; // 'undefined' separator before pk fields
|
|
140
|
+
fieldNames.push(name);
|
|
141
|
+
fieldTypes.push(deserializeType(kb, 0));
|
|
142
|
+
}
|
|
143
|
+
const id = new DataPack(valueBuf).readNumber();
|
|
144
|
+
allIndexDefs.push({ id, tableName, typeName, fieldNames, fieldTypes });
|
|
145
|
+
});
|
|
146
|
+
|
|
147
|
+
// Phase 1: Upgrade existing rows to latest version
|
|
148
|
+
if (upgradeVersions) {
|
|
149
|
+
for (const [indexId, { model, primary }] of primaryByIndexId) {
|
|
150
|
+
let upgraded = 0;
|
|
151
|
+
const migrateFn = (model as any).migrate as ((record: Record<string, any>) => void) | undefined;
|
|
152
|
+
const secondaries = model._secondaries || [];
|
|
153
|
+
|
|
154
|
+
await forEachRow(indexId, (txn, keyBuf, valueBuf) => {
|
|
155
|
+
const valuePack = new DataPack(valueBuf);
|
|
156
|
+
const version = valuePack.readNumber();
|
|
157
|
+
if (version === primary._currentVersion) return; // Already current
|
|
158
|
+
|
|
159
|
+
const versionInfo = primary._loadVersionInfo(txn.id, version);
|
|
160
|
+
|
|
161
|
+
// Deserialize pre-migrate values from key + old-format value
|
|
162
|
+
const record: Record<string, any> = {};
|
|
163
|
+
const keyPack = new DataPack(keyBuf);
|
|
164
|
+
keyPack.readNumber(); // skip indexId
|
|
165
|
+
for (const [name, type] of primary._fieldTypes.entries()) {
|
|
166
|
+
record[name] = type.deserialize(keyPack);
|
|
167
|
+
}
|
|
168
|
+
for (const [name, type] of versionInfo.nonKeyFields.entries()) {
|
|
169
|
+
record[name] = type.deserialize(valuePack);
|
|
170
|
+
}
|
|
171
|
+
|
|
172
|
+
// Deep-copy pre-migrate values (if migrate exists), then run migrate
|
|
173
|
+
const preMigrate = migrateFn ? structuredClone(record) : undefined;
|
|
174
|
+
if (migrateFn) migrateFn(record);
|
|
175
|
+
|
|
176
|
+
// Handle secondaries (primary is left as-is for lazy migration on read)
|
|
177
|
+
for (const sec of secondaries) {
|
|
178
|
+
if (!versionInfo.secondaryKeys.has(sec._signature!)) {
|
|
179
|
+
// New secondary, write entry
|
|
180
|
+
sec._write(txn, keyBuf, record as any);
|
|
181
|
+
upgraded++;
|
|
182
|
+
} else if (preMigrate) {
|
|
183
|
+
// Existing secondary, update if migrate changed any of its fields
|
|
184
|
+
for (const [field, type] of sec._fieldTypes.entries()) {
|
|
185
|
+
if (!type.equals(preMigrate[field], record[field])) {
|
|
186
|
+
sec._delete(txn, keyBuf, preMigrate as any);
|
|
187
|
+
sec._write(txn, keyBuf, record as any);
|
|
188
|
+
upgraded++;
|
|
189
|
+
break;
|
|
190
|
+
}
|
|
191
|
+
}
|
|
192
|
+
}
|
|
193
|
+
}
|
|
194
|
+
|
|
195
|
+
});
|
|
196
|
+
|
|
197
|
+
onProgress?.({ phase: 'secondaries', processed: upgraded, table: model.tableName });
|
|
198
|
+
if (upgraded > 0) result.secondaries[model.tableName] = upgraded;
|
|
199
|
+
}
|
|
200
|
+
}
|
|
201
|
+
|
|
202
|
+
// Phase 2: Convert old primary indices with known table names
|
|
203
|
+
if (convertOldPrimaries) {
|
|
204
|
+
for (const oldDef of allIndexDefs) {
|
|
205
|
+
if (oldDef.typeName !== 'primary') continue;
|
|
206
|
+
if (knownIndexIds.has(oldDef.id)) continue; // Known index, skip
|
|
207
|
+
|
|
208
|
+
const model = modelRegistry[oldDef.tableName];
|
|
209
|
+
if (!model) continue; // Unknown table
|
|
210
|
+
if (options.tables && !options.tables.includes(oldDef.tableName)) continue;
|
|
211
|
+
|
|
212
|
+
let converted = 0;
|
|
213
|
+
const failures: Record<string, number> = {};
|
|
214
|
+
|
|
215
|
+
await forEachRow(oldDef.id, (txn, keyBuf) => {
|
|
216
|
+
let instance;
|
|
217
|
+
try {
|
|
218
|
+
// Deserialize old key
|
|
219
|
+
const keyPack = new DataPack(keyBuf);
|
|
220
|
+
keyPack.readNumber(); // skip old index id
|
|
221
|
+
const record: Record<string, any> = {};
|
|
222
|
+
for (let i = 0; i < oldDef.fieldNames.length; i++) {
|
|
223
|
+
record[oldDef.fieldNames[i]] = oldDef.fieldTypes[i].deserialize(keyPack);
|
|
224
|
+
}
|
|
225
|
+
|
|
226
|
+
// Run migrate
|
|
227
|
+
const migrateFn = (model as any).migrate;
|
|
228
|
+
if (migrateFn) migrateFn(record);
|
|
229
|
+
|
|
230
|
+
// _write validates, checks duplicates, writes primary + secondaries
|
|
231
|
+
instance = new (model as any)(record, txn);
|
|
232
|
+
instance._write(txn);
|
|
233
|
+
dbDel(txn.id, keyBuf);
|
|
234
|
+
converted++;
|
|
235
|
+
} catch (e: any) {
|
|
236
|
+
if (e.code === 'UNIQUE_CONSTRAINT') {
|
|
237
|
+
failures['duplicate_key'] = (failures['duplicate_key'] || 0) + 1;
|
|
238
|
+
} else {
|
|
239
|
+
failures['error'] = (failures['error'] || 0) + 1;
|
|
240
|
+
}
|
|
241
|
+
} finally {
|
|
242
|
+
if (instance) txn.instances.delete(instance);
|
|
243
|
+
}
|
|
244
|
+
});
|
|
245
|
+
|
|
246
|
+
onProgress?.({ phase: 'primaries', processed: converted, table: oldDef.tableName });
|
|
247
|
+
if (converted > 0) result.primaries[oldDef.tableName] = converted;
|
|
248
|
+
if (Object.keys(failures).length > 0) {
|
|
249
|
+
result.conversionFailures[oldDef.tableName] = failures;
|
|
250
|
+
}
|
|
251
|
+
}
|
|
252
|
+
}
|
|
253
|
+
|
|
254
|
+
// Phase 3: Delete orphaned secondary/unique index entries
|
|
255
|
+
if (deleteOrphanedIndexes) {
|
|
256
|
+
for (const def of allIndexDefs) {
|
|
257
|
+
if (knownIndexIds.has(def.id) || def.typeName === 'primary') continue;
|
|
258
|
+
await forEachRow(def.id, (txn, keyBuf) => {
|
|
259
|
+
dbDel(txn.id, keyBuf);
|
|
260
|
+
result.orphaned++;
|
|
261
|
+
});
|
|
262
|
+
onProgress?.({ phase: 'orphaned', processed: result.orphaned });
|
|
263
|
+
}
|
|
264
|
+
}
|
|
265
|
+
|
|
266
|
+
return result;
|
|
267
|
+
}
|