better-convex 0.7.2 → 0.8.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/aggregate/index.d.ts +1 -1
- package/dist/aggregate/index.js +1 -1
- package/dist/auth/http/index.d.ts +1 -1
- package/dist/auth/index.d.ts +10 -10
- package/dist/auth/index.js +5 -4
- package/dist/auth/nextjs/index.d.ts +2 -2
- package/dist/auth/nextjs/index.js +2 -2
- package/dist/{caller-factory-D3OuR1eI.js → caller-factory-CCsm4Dut.js} +2 -2
- package/dist/cli.mjs +414 -5
- package/dist/{codegen-Cz1idI3-.mjs → codegen-BS36cYTH.mjs} +88 -5
- package/dist/{create-schema-orm-69VF4CFV.js → create-schema-orm-OcyA0apQ.js} +10 -13
- package/dist/crpc/index.d.ts +2 -2
- package/dist/crpc/index.js +3 -3
- package/dist/customFunctions-RnzME_cJ.js +167 -0
- package/dist/{http-types-BCf2wCgp.d.ts → http-types-BK7FuIcR.d.ts} +1 -1
- package/dist/id-BcBb900m.js +121 -0
- package/dist/orm/index.d.ts +4 -3
- package/dist/orm/index.js +706 -165
- package/dist/plugins/index.d.ts +9 -0
- package/dist/plugins/index.js +3 -0
- package/dist/plugins/ratelimit/index.d.ts +222 -0
- package/dist/plugins/ratelimit/index.js +846 -0
- package/dist/plugins/ratelimit/react/index.d.ts +76 -0
- package/dist/plugins/ratelimit/react/index.js +294 -0
- package/dist/{procedure-caller-CcjtUFvL.d.ts → procedure-caller-DYjpq7rG.d.ts} +4 -19
- package/dist/rsc/index.d.ts +3 -3
- package/dist/rsc/index.js +4 -4
- package/dist/runtime-C0WcYGY0.js +1028 -0
- package/dist/schema-Bx6j2doh.js +204 -0
- package/dist/server/index.d.ts +2 -2
- package/dist/server/index.js +4 -3
- package/dist/{runtime-B9xQFY8W.js → table-B7yzBihE.js} +3 -1088
- package/dist/text-enum-CFdcLUuw.js +30 -0
- package/dist/{types-CIBGEYXq.d.ts → types-f53SgpBL.d.ts} +1 -1
- package/dist/validators-BcQFm1oY.d.ts +88 -0
- package/dist/{customFunctions-CZnCwoR3.js → validators-D_i3BK7v.js} +67 -165
- package/dist/watcher.mjs +1 -1
- package/dist/{where-clause-compiler-CRP-i1Qa.d.ts → where-clause-compiler-BIjTkVVJ.d.ts} +138 -2
- package/package.json +4 -1
- /package/dist/{create-schema-BdZOL6ns.js → create-schema-BsN0jL5S.js} +0 -0
- /package/dist/{error-Be4OcwwD.js → error-CAGGSN5H.js} +0 -0
- /package/dist/{meta-utils-DDVYp9Xf.js → meta-utils-NRyocOSc.js} +0 -0
- /package/dist/{query-context-BDSis9rT.js → query-context-DEUFBhXS.js} +0 -0
- /package/dist/{query-context-DGExXZIV.d.ts → query-context-ji7By8u0.d.ts} +0 -0
- /package/dist/{query-options-B0c1b6pZ.js → query-options-CSCmKYdJ.js} +0 -0
- /package/dist/{transformer-Dh0w2py0.js → transformer-ogg-4d78.js} +0 -0
- /package/dist/{types-DwGkkq2s.d.ts → types-BTb_4BaU.d.ts} +0 -0
- /package/dist/{types-DgwvxKbT.d.ts → types-CM67ko7K.d.ts} +0 -0
package/dist/orm/index.js
CHANGED
|
@@ -1,5 +1,10 @@
|
|
|
1
|
-
import {
|
|
2
|
-
import {
|
|
1
|
+
import { t as DirectAggregate } from "../runtime-C0WcYGY0.js";
|
|
2
|
+
import { C as integer, S as createSystemFields, T as entityKind, _ as rankIndex, a as EnableRLS, b as vectorIndex, c as OrmSchemaOptions, d as TableDeleteConfig, f as TableName, g as index, h as aggregateIndex, i as Columns, l as OrmSchemaPluginTables, m as rlsPolicy, n as deletion, o as OrmContext, p as RlsPolicy, r as Brand, s as OrmSchemaDefinition, t as convexTable, u as RlsPolicies, v as searchIndex, w as ConvexColumnBuilder, x as text, y as uniqueIndex } from "../table-B7yzBihE.js";
|
|
3
|
+
import { a as AGGREGATE_EXTREMA_TABLE, c as aggregatePlugin, i as AGGREGATE_BUCKET_TABLE, l as boolean, n as MIGRATION_STATE_TABLE, o as AGGREGATE_MEMBER_TABLE, r as migrationPlugin, s as AGGREGATE_STATE_TABLE, t as MIGRATION_RUN_TABLE } from "../schema-Bx6j2doh.js";
|
|
4
|
+
import { a as pretendRequired, i as pretend, n as deprecated } from "../validators-D_i3BK7v.js";
|
|
5
|
+
import { a as objectOf, i as json, n as arrayOf, r as custom, t as id } from "../id-BcBb900m.js";
|
|
6
|
+
import { t as textEnum } from "../text-enum-CFdcLUuw.js";
|
|
7
|
+
import { A as ne, C as inArray, D as like, E as isNull, F as notLike, I as or, L as startsWith, M as notBetween, N as notIlike, O as lt, P as notInArray, S as ilike, T as isNotNull, _ as endsWith, a as mergedStream, b as gt, c as isUnsetToken, d as arrayContained, f as arrayContains, g as contains, h as column, i as getIndexFields, j as not, k as lte, l as unsetToken, m as between, n as EmptyStream, o as stream, p as arrayOverlaps, r as QueryStream, s as streamIndexRange, t as getByIdWithOrmQueryFallback, u as and, v as eq, w as isFieldReference, x as gte, y as fieldRef } from "../query-context-DEUFBhXS.js";
|
|
3
8
|
import { v } from "convex/values";
|
|
4
9
|
import { defineSchema as defineSchema$1, internalActionGeneric, internalMutationGeneric } from "convex/server";
|
|
5
10
|
|
|
@@ -33,37 +38,6 @@ function bigint(name) {
|
|
|
33
38
|
return new ConvexBigIntBuilder(name ?? "");
|
|
34
39
|
}
|
|
35
40
|
|
|
36
|
-
//#endregion
|
|
37
|
-
//#region src/orm/builders/boolean.ts
|
|
38
|
-
/**
|
|
39
|
-
* Boolean column builder class
|
|
40
|
-
* Compiles to v.boolean() or v.optional(v.boolean())
|
|
41
|
-
*/
|
|
42
|
-
var ConvexBooleanBuilder = class extends ConvexColumnBuilder {
|
|
43
|
-
static [entityKind] = "ConvexBooleanBuilder";
|
|
44
|
-
constructor(name) {
|
|
45
|
-
super(name, "boolean", "ConvexBoolean");
|
|
46
|
-
}
|
|
47
|
-
/**
|
|
48
|
-
* Expose Convex validator for schema integration
|
|
49
|
-
*/
|
|
50
|
-
get convexValidator() {
|
|
51
|
-
if (this.config.notNull) return v.boolean();
|
|
52
|
-
return v.optional(v.union(v.null(), v.boolean()));
|
|
53
|
-
}
|
|
54
|
-
/**
|
|
55
|
-
* Compile to Convex validator
|
|
56
|
-
* .notNull() → v.boolean()
|
|
57
|
-
* nullable → v.optional(v.boolean())
|
|
58
|
-
*/
|
|
59
|
-
build() {
|
|
60
|
-
return this.convexValidator;
|
|
61
|
-
}
|
|
62
|
-
};
|
|
63
|
-
function boolean(name) {
|
|
64
|
-
return new ConvexBooleanBuilder(name ?? "");
|
|
65
|
-
}
|
|
66
|
-
|
|
67
41
|
//#endregion
|
|
68
42
|
//#region src/orm/builders/bytes.ts
|
|
69
43
|
var ConvexBytesBuilder = class extends ConvexColumnBuilder {
|
|
@@ -115,32 +89,6 @@ function date(nameOrConfig, maybeConfig) {
|
|
|
115
89
|
return new ConvexDateBuilder(name, mode);
|
|
116
90
|
}
|
|
117
91
|
|
|
118
|
-
//#endregion
|
|
119
|
-
//#region src/orm/builders/text-enum.ts
|
|
120
|
-
var ConvexTextEnumBuilder = class extends ConvexColumnBuilder {
|
|
121
|
-
static [entityKind] = "ConvexTextEnumBuilder";
|
|
122
|
-
constructor(name, values) {
|
|
123
|
-
super(name, "string", "ConvexText");
|
|
124
|
-
this.config.values = [...values];
|
|
125
|
-
}
|
|
126
|
-
_enumValidator() {
|
|
127
|
-
const literals = this.config.values.map((value) => v.literal(value));
|
|
128
|
-
if (literals.length === 1) return literals[0];
|
|
129
|
-
return v.union(...literals);
|
|
130
|
-
}
|
|
131
|
-
get convexValidator() {
|
|
132
|
-
const base = this._enumValidator();
|
|
133
|
-
if (this.config.notNull) return base;
|
|
134
|
-
return v.optional(v.union(v.null(), base));
|
|
135
|
-
}
|
|
136
|
-
build() {
|
|
137
|
-
return this.convexValidator;
|
|
138
|
-
}
|
|
139
|
-
};
|
|
140
|
-
function textEnum(values) {
|
|
141
|
-
return new ConvexTextEnumBuilder("", values);
|
|
142
|
-
}
|
|
143
|
-
|
|
144
92
|
//#endregion
|
|
145
93
|
//#region src/orm/builders/timestamp.ts
|
|
146
94
|
var ConvexTimestampBuilder = class extends ConvexColumnBuilder {
|
|
@@ -289,105 +237,6 @@ function check(name, expression) {
|
|
|
289
237
|
return new ConvexCheckBuilder(name, expression);
|
|
290
238
|
}
|
|
291
239
|
|
|
292
|
-
//#endregion
|
|
293
|
-
//#region src/orm/aggregate-index/schema.ts
|
|
294
|
-
const AGGREGATE_BUCKET_TABLE = "aggregate_bucket";
|
|
295
|
-
const AGGREGATE_MEMBER_TABLE = "aggregate_member";
|
|
296
|
-
const AGGREGATE_EXTREMA_TABLE = "aggregate_extrema";
|
|
297
|
-
const AGGREGATE_RANK_TREE_TABLE = "aggregate_rank_tree";
|
|
298
|
-
const AGGREGATE_RANK_NODE_TABLE = "aggregate_rank_node";
|
|
299
|
-
const AGGREGATE_STATE_TABLE = "aggregate_state";
|
|
300
|
-
const countBucketTable = convexTable(AGGREGATE_BUCKET_TABLE, {
|
|
301
|
-
tableKey: text().notNull(),
|
|
302
|
-
indexName: text().notNull(),
|
|
303
|
-
keyHash: text().notNull(),
|
|
304
|
-
keyParts: custom(v.array(v.any())).notNull(),
|
|
305
|
-
count: integer().notNull(),
|
|
306
|
-
sumValues: custom(v.record(v.string(), v.number())).notNull(),
|
|
307
|
-
nonNullCountValues: custom(v.record(v.string(), v.number())).notNull(),
|
|
308
|
-
updatedAt: integer().notNull()
|
|
309
|
-
}, (t) => [index("by_table_index_hash").on(t.tableKey, t.indexName, t.keyHash), index("by_table_index").on(t.tableKey, t.indexName)]);
|
|
310
|
-
const countMemberTable = convexTable(AGGREGATE_MEMBER_TABLE, {
|
|
311
|
-
kind: text().notNull(),
|
|
312
|
-
tableKey: text().notNull(),
|
|
313
|
-
indexName: text().notNull(),
|
|
314
|
-
docId: text().notNull(),
|
|
315
|
-
keyHash: text().notNull(),
|
|
316
|
-
keyParts: custom(v.array(v.any())).notNull(),
|
|
317
|
-
sumValues: custom(v.record(v.string(), v.number())).notNull(),
|
|
318
|
-
nonNullCountValues: custom(v.record(v.string(), v.number())).notNull(),
|
|
319
|
-
extremaValues: custom(v.record(v.string(), v.any())).notNull(),
|
|
320
|
-
rankNamespace: custom(v.any()),
|
|
321
|
-
rankKey: custom(v.any()),
|
|
322
|
-
rankSumValue: integer(),
|
|
323
|
-
updatedAt: integer().notNull()
|
|
324
|
-
}, (t) => [index("by_kind_table_index_doc").on(t.kind, t.tableKey, t.indexName, t.docId), index("by_kind_table_index").on(t.kind, t.tableKey, t.indexName)]);
|
|
325
|
-
const countExtremaTable = convexTable(AGGREGATE_EXTREMA_TABLE, {
|
|
326
|
-
tableKey: text().notNull(),
|
|
327
|
-
indexName: text().notNull(),
|
|
328
|
-
keyHash: text().notNull(),
|
|
329
|
-
fieldName: text().notNull(),
|
|
330
|
-
valueHash: text().notNull(),
|
|
331
|
-
value: custom(v.any()).notNull(),
|
|
332
|
-
sortKey: text().notNull(),
|
|
333
|
-
count: integer().notNull(),
|
|
334
|
-
updatedAt: integer().notNull()
|
|
335
|
-
}, (t) => [
|
|
336
|
-
index("by_table_index").on(t.tableKey, t.indexName),
|
|
337
|
-
index("by_table_index_hash_field_value").on(t.tableKey, t.indexName, t.keyHash, t.fieldName, t.valueHash),
|
|
338
|
-
index("by_table_index_hash_field_sort").on(t.tableKey, t.indexName, t.keyHash, t.fieldName, t.sortKey)
|
|
339
|
-
]);
|
|
340
|
-
const countStateTable = convexTable(AGGREGATE_STATE_TABLE, {
|
|
341
|
-
kind: text().notNull(),
|
|
342
|
-
tableKey: text().notNull(),
|
|
343
|
-
indexName: text().notNull(),
|
|
344
|
-
keyDefinitionHash: text().notNull(),
|
|
345
|
-
metricDefinitionHash: text().notNull(),
|
|
346
|
-
status: text().notNull(),
|
|
347
|
-
cursor: text(),
|
|
348
|
-
processed: integer().notNull(),
|
|
349
|
-
startedAt: integer().notNull(),
|
|
350
|
-
updatedAt: integer().notNull(),
|
|
351
|
-
completedAt: integer(),
|
|
352
|
-
lastError: text()
|
|
353
|
-
}, (t) => [index("by_kind_table_index").on(t.kind, t.tableKey, t.indexName), index("by_kind_status").on(t.kind, t.status)]);
|
|
354
|
-
const aggregateCounterValidator = v.object({
|
|
355
|
-
count: v.number(),
|
|
356
|
-
sum: v.number()
|
|
357
|
-
});
|
|
358
|
-
const aggregateItemValidator = v.object({
|
|
359
|
-
k: v.any(),
|
|
360
|
-
v: v.any(),
|
|
361
|
-
s: v.number()
|
|
362
|
-
});
|
|
363
|
-
const rankTreeTable = convexTable(AGGREGATE_RANK_TREE_TABLE, {
|
|
364
|
-
aggregateName: text().notNull(),
|
|
365
|
-
maxNodeSize: integer().notNull(),
|
|
366
|
-
namespace: custom(v.any()),
|
|
367
|
-
root: id(AGGREGATE_RANK_NODE_TABLE).notNull()
|
|
368
|
-
}, (tree) => [index("by_namespace").on(tree.namespace), index("by_aggregate_name").on(tree.aggregateName)]);
|
|
369
|
-
const rankNodeTable = convexTable(AGGREGATE_RANK_NODE_TABLE, {
|
|
370
|
-
aggregate: custom(aggregateCounterValidator),
|
|
371
|
-
items: custom(v.array(aggregateItemValidator)).notNull(),
|
|
372
|
-
subtrees: custom(v.array(v.string())).notNull()
|
|
373
|
-
});
|
|
374
|
-
const aggregateStorageTables = {
|
|
375
|
-
[AGGREGATE_BUCKET_TABLE]: countBucketTable,
|
|
376
|
-
[AGGREGATE_MEMBER_TABLE]: countMemberTable,
|
|
377
|
-
[AGGREGATE_EXTREMA_TABLE]: countExtremaTable,
|
|
378
|
-
[AGGREGATE_RANK_TREE_TABLE]: rankTreeTable,
|
|
379
|
-
[AGGREGATE_RANK_NODE_TABLE]: rankNodeTable,
|
|
380
|
-
[AGGREGATE_STATE_TABLE]: countStateTable
|
|
381
|
-
};
|
|
382
|
-
function injectAggregateStorageTables(schema) {
|
|
383
|
-
const merged = { ...schema };
|
|
384
|
-
for (const [tableName, tableDef] of Object.entries(aggregateStorageTables)) {
|
|
385
|
-
if (tableName in schema && schema[tableName] !== tableDef) throw new Error(`defineSchema cannot inject internal table '${tableName}' because the name is already in use.`);
|
|
386
|
-
merged[tableName] = tableDef;
|
|
387
|
-
}
|
|
388
|
-
return merged;
|
|
389
|
-
}
|
|
390
|
-
|
|
391
240
|
//#endregion
|
|
392
241
|
//#region src/orm/aggregate-index/runtime.ts
|
|
393
242
|
const UNDEFINED_SENTINEL = "__betterConvexUndefined";
|
|
@@ -8924,6 +8773,624 @@ function isOne(relation) {
|
|
|
8924
8773
|
return relation.relationType === "one";
|
|
8925
8774
|
}
|
|
8926
8775
|
|
|
8776
|
+
//#endregion
|
|
8777
|
+
//#region src/orm/migrations/definitions.ts
|
|
8778
|
+
const MIGRATION_ID_RE = /^[a-zA-Z0-9_:-]+$/;
|
|
8779
|
+
const FUNCTION_SOURCE_WHITESPACE_RE = /\s+/g;
|
|
8780
|
+
const FUNCTION_SOURCE_PUNCTUATION_SPACE_RE = /\s*([{}();,:])\s*/g;
|
|
8781
|
+
function defineMigration(migration) {
|
|
8782
|
+
validateMigrationId(migration.id);
|
|
8783
|
+
validateMigrationStep("up", migration.up);
|
|
8784
|
+
if (migration.down) validateMigrationStep("down", migration.down);
|
|
8785
|
+
return migration;
|
|
8786
|
+
}
|
|
8787
|
+
function defineMigrationSet(migrations) {
|
|
8788
|
+
const normalized = [...migrations].map((migration) => {
|
|
8789
|
+
const defined = defineMigration(migration);
|
|
8790
|
+
return {
|
|
8791
|
+
...defined,
|
|
8792
|
+
checksum: defined.checksum ?? computeMigrationChecksum(defined)
|
|
8793
|
+
};
|
|
8794
|
+
});
|
|
8795
|
+
normalized.sort((a, b) => a.id.localeCompare(b.id));
|
|
8796
|
+
const byId = {};
|
|
8797
|
+
for (const migration of normalized) {
|
|
8798
|
+
if (byId[migration.id]) throw new Error(`defineMigrationSet received duplicate migration id '${migration.id}'.`);
|
|
8799
|
+
byId[migration.id] = migration;
|
|
8800
|
+
}
|
|
8801
|
+
return {
|
|
8802
|
+
migrations: normalized,
|
|
8803
|
+
ids: normalized.map((migration) => migration.id),
|
|
8804
|
+
byId
|
|
8805
|
+
};
|
|
8806
|
+
}
|
|
8807
|
+
function detectMigrationDrift(params) {
|
|
8808
|
+
const { migrationSet, appliedState } = params;
|
|
8809
|
+
const issues = [];
|
|
8810
|
+
for (const [migrationId, state] of Object.entries(appliedState)) {
|
|
8811
|
+
if (!state.applied) continue;
|
|
8812
|
+
const migration = migrationSet.byId[migrationId];
|
|
8813
|
+
if (!migration) {
|
|
8814
|
+
issues.push({
|
|
8815
|
+
kind: "missing_from_manifest",
|
|
8816
|
+
migrationId,
|
|
8817
|
+
message: `Applied migration '${migrationId}' is missing from the current migration manifest.`
|
|
8818
|
+
});
|
|
8819
|
+
continue;
|
|
8820
|
+
}
|
|
8821
|
+
if (state.checksum && state.checksum !== migration.checksum) issues.push({
|
|
8822
|
+
kind: "checksum_mismatch",
|
|
8823
|
+
migrationId,
|
|
8824
|
+
expectedChecksum: migration.checksum,
|
|
8825
|
+
actualChecksum: state.checksum,
|
|
8826
|
+
message: `Applied migration '${migrationId}' checksum drift detected.`
|
|
8827
|
+
});
|
|
8828
|
+
}
|
|
8829
|
+
return issues;
|
|
8830
|
+
}
|
|
8831
|
+
function buildMigrationPlan(params) {
|
|
8832
|
+
const { direction, migrationSet, appliedState, steps, to } = params;
|
|
8833
|
+
if (direction === "up") return {
|
|
8834
|
+
direction,
|
|
8835
|
+
migrations: migrationSet.migrations.filter((migration) => !appliedState[migration.id]?.applied)
|
|
8836
|
+
};
|
|
8837
|
+
if (steps !== void 0 && to !== void 0) throw new Error("Use either down steps or down to, not both.");
|
|
8838
|
+
const appliedInOrder = migrationSet.migrations.filter((migration) => appliedState[migration.id]?.applied);
|
|
8839
|
+
const ensureDownSteps = (selected) => {
|
|
8840
|
+
const missingDown = selected.find((migration) => !migration.down);
|
|
8841
|
+
if (missingDown) throw new Error(`Cannot execute down migration for '${missingDown.id}': missing down migration handler.`);
|
|
8842
|
+
return selected;
|
|
8843
|
+
};
|
|
8844
|
+
if (to) {
|
|
8845
|
+
const targetIndex = migrationSet.ids.indexOf(to);
|
|
8846
|
+
if (targetIndex === -1) throw new Error(`Unknown migration id '${to}' for down --to.`);
|
|
8847
|
+
return {
|
|
8848
|
+
direction,
|
|
8849
|
+
migrations: ensureDownSteps(appliedInOrder.filter((migration) => {
|
|
8850
|
+
return migrationSet.ids.indexOf(migration.id) > targetIndex;
|
|
8851
|
+
})).reverse()
|
|
8852
|
+
};
|
|
8853
|
+
}
|
|
8854
|
+
const resolvedSteps = steps ?? 1;
|
|
8855
|
+
if (!Number.isInteger(resolvedSteps) || resolvedSteps < 1) throw new Error("Down steps must be a positive integer.");
|
|
8856
|
+
return {
|
|
8857
|
+
direction,
|
|
8858
|
+
migrations: ensureDownSteps(appliedInOrder.slice(-resolvedSteps).reverse())
|
|
8859
|
+
};
|
|
8860
|
+
}
|
|
8861
|
+
function validateMigrationId(id) {
|
|
8862
|
+
if (!id || typeof id !== "string") throw new Error("Migration id must be a non-empty string.");
|
|
8863
|
+
if (!MIGRATION_ID_RE.test(id)) throw new Error(`Migration id '${id}' is invalid. Use alphanumeric characters, '_' ':' or '-'.`);
|
|
8864
|
+
}
|
|
8865
|
+
function validateMigrationStep(direction, step) {
|
|
8866
|
+
if (!step || typeof step !== "object") throw new Error(`Migration ${direction} step must be an object.`);
|
|
8867
|
+
if (!step.table || typeof step.table !== "string") throw new Error(`Migration ${direction} step.table must be a string.`);
|
|
8868
|
+
if (typeof step.migrateOne !== "function") throw new Error(`Migration ${direction} step.migrateOne must be a function.`);
|
|
8869
|
+
if (step.batchSize !== void 0 && (!Number.isInteger(step.batchSize) || step.batchSize < 1)) throw new Error(`Migration ${direction} step.batchSize must be a positive integer.`);
|
|
8870
|
+
if (step.writeMode !== void 0 && step.writeMode !== "safe_bypass" && step.writeMode !== "normal") throw new Error(`Migration ${direction} step.writeMode must be 'safe_bypass' or 'normal'.`);
|
|
8871
|
+
}
|
|
8872
|
+
function computeMigrationChecksum(migration) {
|
|
8873
|
+
return simpleStableHash(JSON.stringify({
|
|
8874
|
+
id: migration.id,
|
|
8875
|
+
name: migration.name ?? null,
|
|
8876
|
+
description: migration.description ?? null,
|
|
8877
|
+
up: serializeStep(migration.up),
|
|
8878
|
+
down: migration.down ? serializeStep(migration.down) : null
|
|
8879
|
+
}));
|
|
8880
|
+
}
|
|
8881
|
+
function serializeStep(step) {
|
|
8882
|
+
return {
|
|
8883
|
+
table: step.table,
|
|
8884
|
+
batchSize: step.batchSize ?? null,
|
|
8885
|
+
writeMode: step.writeMode ?? "safe_bypass",
|
|
8886
|
+
source: normalizeFunctionSource(step.migrateOne)
|
|
8887
|
+
};
|
|
8888
|
+
}
|
|
8889
|
+
function normalizeFunctionSource(fn) {
|
|
8890
|
+
return fn.toString().replace(FUNCTION_SOURCE_WHITESPACE_RE, " ").replace(FUNCTION_SOURCE_PUNCTUATION_SPACE_RE, "$1").trim();
|
|
8891
|
+
}
|
|
8892
|
+
function simpleStableHash(value) {
|
|
8893
|
+
let hashA = 2166136261;
|
|
8894
|
+
let hashB = 16777619;
|
|
8895
|
+
for (let i = 0; i < value.length; i += 1) {
|
|
8896
|
+
const code = value.charCodeAt(i);
|
|
8897
|
+
hashA ^= code;
|
|
8898
|
+
hashA = Math.imul(hashA, 16777619);
|
|
8899
|
+
hashB ^= code + i;
|
|
8900
|
+
hashB = Math.imul(hashB, 2246822507);
|
|
8901
|
+
}
|
|
8902
|
+
return `m_${(hashA >>> 0).toString(16).padStart(8, "0")}${(hashB >>> 0).toString(16).padStart(8, "0")}`;
|
|
8903
|
+
}
|
|
8904
|
+
|
|
8905
|
+
//#endregion
|
|
8906
|
+
//#region src/orm/migrations/runtime.ts
|
|
8907
|
+
const DEFAULT_BATCH_SIZE = 128;
|
|
8908
|
+
function createMigrationHandlers(params) {
|
|
8909
|
+
const { schema, migrations, getOrm, getChunkRef } = params;
|
|
8910
|
+
const knownTables = new Set(Object.values(schema).map((tableConfig) => tableConfig.name));
|
|
8911
|
+
const run = async (ctx, args = {}) => {
|
|
8912
|
+
if (!migrations || migrations.migrations.length === 0) return {
|
|
8913
|
+
status: "noop",
|
|
8914
|
+
reason: "no_migrations_registered"
|
|
8915
|
+
};
|
|
8916
|
+
const direction = parseDirection(args.direction);
|
|
8917
|
+
const dryRun = args.dryRun === true;
|
|
8918
|
+
const allowDrift = args.allowDrift === true;
|
|
8919
|
+
const restart = args.restart === true;
|
|
8920
|
+
const batchSize = parseOptionalPositiveInteger(args.batchSize, "batchSize");
|
|
8921
|
+
const steps = parseOptionalPositiveInteger(args.steps, "steps");
|
|
8922
|
+
const to = parseOptionalString(args.to, "to");
|
|
8923
|
+
const stateRows = await getAllStateRows(ctx.db);
|
|
8924
|
+
const appliedState = toAppliedStateMap(stateRows);
|
|
8925
|
+
const drift = detectMigrationDrift({
|
|
8926
|
+
migrationSet: migrations,
|
|
8927
|
+
appliedState
|
|
8928
|
+
});
|
|
8929
|
+
if (drift.length > 0 && !allowDrift) return {
|
|
8930
|
+
status: "drift_blocked",
|
|
8931
|
+
direction,
|
|
8932
|
+
drift
|
|
8933
|
+
};
|
|
8934
|
+
const plan = buildMigrationPlan({
|
|
8935
|
+
direction,
|
|
8936
|
+
migrationSet: migrations,
|
|
8937
|
+
appliedState,
|
|
8938
|
+
steps,
|
|
8939
|
+
to
|
|
8940
|
+
});
|
|
8941
|
+
if (plan.migrations.length === 0) return {
|
|
8942
|
+
status: "noop",
|
|
8943
|
+
direction,
|
|
8944
|
+
drift,
|
|
8945
|
+
plan: []
|
|
8946
|
+
};
|
|
8947
|
+
for (const migration of plan.migrations) {
|
|
8948
|
+
const step = getStepForDirection(migration, direction);
|
|
8949
|
+
if (!knownTables.has(step.table)) throw new Error(`Migration '${migration.id}' references unknown table '${step.table}'.`);
|
|
8950
|
+
}
|
|
8951
|
+
if (dryRun) return {
|
|
8952
|
+
status: "dry_run",
|
|
8953
|
+
direction,
|
|
8954
|
+
drift,
|
|
8955
|
+
plan: plan.migrations.map((migration) => migration.id)
|
|
8956
|
+
};
|
|
8957
|
+
const activeRun = await getActiveRun(ctx.db);
|
|
8958
|
+
if (activeRun) return {
|
|
8959
|
+
status: "running",
|
|
8960
|
+
runId: activeRun.runId
|
|
8961
|
+
};
|
|
8962
|
+
const now = Date.now();
|
|
8963
|
+
const runId = createRunId(now);
|
|
8964
|
+
await ctx.db.insert(MIGRATION_RUN_TABLE, {
|
|
8965
|
+
runId,
|
|
8966
|
+
direction,
|
|
8967
|
+
status: "running",
|
|
8968
|
+
dryRun: false,
|
|
8969
|
+
allowDrift,
|
|
8970
|
+
migrationIds: plan.migrations.map((migration) => migration.id),
|
|
8971
|
+
currentIndex: 0,
|
|
8972
|
+
startedAt: now,
|
|
8973
|
+
updatedAt: now,
|
|
8974
|
+
cancelRequested: false
|
|
8975
|
+
});
|
|
8976
|
+
const stateById = new Map(stateRows.map((row) => [row.migrationId, row]));
|
|
8977
|
+
for (const migration of plan.migrations) {
|
|
8978
|
+
const writeMode = getStepForDirection(migration, direction).writeMode ?? "safe_bypass";
|
|
8979
|
+
const existing = stateById.get(migration.id);
|
|
8980
|
+
const resetProgress = restart || existing?.direction !== direction;
|
|
8981
|
+
if (existing) await ctx.db.patch(existing._id, cleanUndefined({
|
|
8982
|
+
checksum: migration.checksum,
|
|
8983
|
+
status: "pending",
|
|
8984
|
+
direction,
|
|
8985
|
+
runId,
|
|
8986
|
+
cursor: resetProgress ? null : existing.cursor ?? null,
|
|
8987
|
+
processed: resetProgress ? 0 : existing.processed ?? 0,
|
|
8988
|
+
startedAt: now,
|
|
8989
|
+
updatedAt: now,
|
|
8990
|
+
completedAt: null,
|
|
8991
|
+
lastError: null,
|
|
8992
|
+
writeMode
|
|
8993
|
+
}));
|
|
8994
|
+
else await ctx.db.insert(MIGRATION_STATE_TABLE, {
|
|
8995
|
+
migrationId: migration.id,
|
|
8996
|
+
checksum: migration.checksum,
|
|
8997
|
+
applied: direction === "down",
|
|
8998
|
+
status: "pending",
|
|
8999
|
+
direction,
|
|
9000
|
+
runId,
|
|
9001
|
+
cursor: null,
|
|
9002
|
+
processed: 0,
|
|
9003
|
+
startedAt: now,
|
|
9004
|
+
updatedAt: now,
|
|
9005
|
+
completedAt: null,
|
|
9006
|
+
lastError: null,
|
|
9007
|
+
writeMode
|
|
9008
|
+
});
|
|
9009
|
+
}
|
|
9010
|
+
if (ctx.scheduler) {
|
|
9011
|
+
const chunkRef = getChunkRef();
|
|
9012
|
+
if (chunkRef) {
|
|
9013
|
+
const chunkArgs = { runId };
|
|
9014
|
+
if (batchSize !== void 0) chunkArgs.batchSize = batchSize;
|
|
9015
|
+
await ctx.scheduler.runAfter(0, chunkRef, chunkArgs);
|
|
9016
|
+
return {
|
|
9017
|
+
status: "running",
|
|
9018
|
+
direction,
|
|
9019
|
+
runId,
|
|
9020
|
+
plan: plan.migrations.map((migration) => migration.id)
|
|
9021
|
+
};
|
|
9022
|
+
}
|
|
9023
|
+
}
|
|
9024
|
+
const inlineResult = await chunk(ctx, {
|
|
9025
|
+
runId,
|
|
9026
|
+
batchSize
|
|
9027
|
+
});
|
|
9028
|
+
return {
|
|
9029
|
+
status: "running",
|
|
9030
|
+
direction,
|
|
9031
|
+
runId,
|
|
9032
|
+
plan: plan.migrations.map((migration) => migration.id),
|
|
9033
|
+
inlineResult
|
|
9034
|
+
};
|
|
9035
|
+
};
|
|
9036
|
+
const chunk = async (ctx, args) => {
|
|
9037
|
+
const runId = parseRequiredString(args.runId, "runId");
|
|
9038
|
+
const batchSize = parseOptionalPositiveInteger(args.batchSize, "batchSize");
|
|
9039
|
+
const runRow = await getRunById(ctx.db, runId);
|
|
9040
|
+
if (!runRow) return {
|
|
9041
|
+
status: "missing",
|
|
9042
|
+
runId
|
|
9043
|
+
};
|
|
9044
|
+
if (runRow.status !== "running") return {
|
|
9045
|
+
status: runRow.status,
|
|
9046
|
+
runId
|
|
9047
|
+
};
|
|
9048
|
+
if (runRow.cancelRequested) {
|
|
9049
|
+
await markRunCanceled(ctx.db, runRow);
|
|
9050
|
+
return {
|
|
9051
|
+
status: "canceled",
|
|
9052
|
+
runId
|
|
9053
|
+
};
|
|
9054
|
+
}
|
|
9055
|
+
const migrationId = runRow.migrationIds[runRow.currentIndex];
|
|
9056
|
+
if (!migrationId) {
|
|
9057
|
+
await markRunCompleted(ctx.db, runRow);
|
|
9058
|
+
return {
|
|
9059
|
+
status: "completed",
|
|
9060
|
+
runId
|
|
9061
|
+
};
|
|
9062
|
+
}
|
|
9063
|
+
const migration = migrations?.byId[migrationId];
|
|
9064
|
+
if (!migration) {
|
|
9065
|
+
await markRunFailed(ctx.db, runRow, `Migration '${migrationId}' is missing from registry.`);
|
|
9066
|
+
return {
|
|
9067
|
+
status: "failed",
|
|
9068
|
+
runId
|
|
9069
|
+
};
|
|
9070
|
+
}
|
|
9071
|
+
const direction = parseDirection(runRow.direction);
|
|
9072
|
+
const step = getStepForDirection(migration, direction);
|
|
9073
|
+
const resolvedBatchSize = batchSize ?? step.batchSize ?? DEFAULT_BATCH_SIZE;
|
|
9074
|
+
const stateRow = await getOrCreateStateRow(ctx.db, migration, direction, step);
|
|
9075
|
+
const cursor = stateRow.cursor ?? null;
|
|
9076
|
+
try {
|
|
9077
|
+
const page = await ctx.db.query(step.table).paginate({
|
|
9078
|
+
cursor,
|
|
9079
|
+
numItems: resolvedBatchSize
|
|
9080
|
+
});
|
|
9081
|
+
const docs = Array.isArray(page?.page) ? page.page : [];
|
|
9082
|
+
const orm = getOrm(ctx);
|
|
9083
|
+
const writeMode = step.writeMode ?? "safe_bypass";
|
|
9084
|
+
let processedInBatch = 0;
|
|
9085
|
+
for (const doc of docs) {
|
|
9086
|
+
const migrationCtxBase = {
|
|
9087
|
+
db: ctx.db,
|
|
9088
|
+
migrationId,
|
|
9089
|
+
runId,
|
|
9090
|
+
direction,
|
|
9091
|
+
dryRun: false,
|
|
9092
|
+
writeMode
|
|
9093
|
+
};
|
|
9094
|
+
const result = await runWithWriteMode(orm, writeMode, (resolvedOrm) => step.migrateOne({
|
|
9095
|
+
...migrationCtxBase,
|
|
9096
|
+
orm: resolvedOrm
|
|
9097
|
+
}, doc));
|
|
9098
|
+
if (isPatchPayload(result) && hasDocId(doc)) await ctx.db.patch(doc._id, result);
|
|
9099
|
+
processedInBatch += 1;
|
|
9100
|
+
}
|
|
9101
|
+
const now = Date.now();
|
|
9102
|
+
const isDone = Boolean(page?.isDone);
|
|
9103
|
+
const nextCursor = isDone ? null : page?.continueCursor ?? null;
|
|
9104
|
+
const nextProcessed = (stateRow.processed ?? 0) + processedInBatch;
|
|
9105
|
+
if (isDone) {
|
|
9106
|
+
await ctx.db.patch(stateRow._id, cleanUndefined({
|
|
9107
|
+
status: "completed",
|
|
9108
|
+
applied: direction === "up",
|
|
9109
|
+
cursor: null,
|
|
9110
|
+
processed: nextProcessed,
|
|
9111
|
+
completedAt: now,
|
|
9112
|
+
updatedAt: now,
|
|
9113
|
+
lastError: null,
|
|
9114
|
+
runId,
|
|
9115
|
+
direction,
|
|
9116
|
+
writeMode
|
|
9117
|
+
}));
|
|
9118
|
+
const nextIndex = runRow.currentIndex + 1;
|
|
9119
|
+
const done = nextIndex >= runRow.migrationIds.length;
|
|
9120
|
+
await ctx.db.patch(runRow._id, cleanUndefined({
|
|
9121
|
+
currentIndex: nextIndex,
|
|
9122
|
+
status: done ? "completed" : "running",
|
|
9123
|
+
updatedAt: now,
|
|
9124
|
+
completedAt: done ? now : null,
|
|
9125
|
+
lastError: null
|
|
9126
|
+
}));
|
|
9127
|
+
if (!done && ctx.scheduler) {
|
|
9128
|
+
const chunkRef = getChunkRef();
|
|
9129
|
+
if (chunkRef) await ctx.scheduler.runAfter(0, chunkRef, {
|
|
9130
|
+
runId,
|
|
9131
|
+
batchSize: resolvedBatchSize
|
|
9132
|
+
});
|
|
9133
|
+
}
|
|
9134
|
+
return {
|
|
9135
|
+
status: done ? "completed" : "running",
|
|
9136
|
+
runId,
|
|
9137
|
+
migrationId,
|
|
9138
|
+
processedInBatch,
|
|
9139
|
+
processed: nextProcessed,
|
|
9140
|
+
currentIndex: nextIndex,
|
|
9141
|
+
total: runRow.migrationIds.length
|
|
9142
|
+
};
|
|
9143
|
+
}
|
|
9144
|
+
await ctx.db.patch(stateRow._id, cleanUndefined({
|
|
9145
|
+
status: "running",
|
|
9146
|
+
cursor: nextCursor,
|
|
9147
|
+
processed: nextProcessed,
|
|
9148
|
+
updatedAt: now,
|
|
9149
|
+
runId,
|
|
9150
|
+
direction,
|
|
9151
|
+
writeMode
|
|
9152
|
+
}));
|
|
9153
|
+
await ctx.db.patch(runRow._id, cleanUndefined({
|
|
9154
|
+
status: "running",
|
|
9155
|
+
updatedAt: now
|
|
9156
|
+
}));
|
|
9157
|
+
if (ctx.scheduler) {
|
|
9158
|
+
const chunkRef = getChunkRef();
|
|
9159
|
+
if (chunkRef) await ctx.scheduler.runAfter(0, chunkRef, {
|
|
9160
|
+
runId,
|
|
9161
|
+
batchSize: resolvedBatchSize
|
|
9162
|
+
});
|
|
9163
|
+
}
|
|
9164
|
+
return {
|
|
9165
|
+
status: "running",
|
|
9166
|
+
runId,
|
|
9167
|
+
migrationId,
|
|
9168
|
+
processedInBatch,
|
|
9169
|
+
processed: nextProcessed,
|
|
9170
|
+
cursor: nextCursor
|
|
9171
|
+
};
|
|
9172
|
+
} catch (error) {
|
|
9173
|
+
const message = error.message || String(error);
|
|
9174
|
+
await ctx.db.patch(stateRow._id, cleanUndefined({
|
|
9175
|
+
status: "failed",
|
|
9176
|
+
lastError: message,
|
|
9177
|
+
updatedAt: Date.now()
|
|
9178
|
+
}));
|
|
9179
|
+
await markRunFailed(ctx.db, runRow, message);
|
|
9180
|
+
return {
|
|
9181
|
+
status: "failed",
|
|
9182
|
+
runId,
|
|
9183
|
+
migrationId,
|
|
9184
|
+
error: message
|
|
9185
|
+
};
|
|
9186
|
+
}
|
|
9187
|
+
};
|
|
9188
|
+
const status = async (ctx, args = {}) => {
|
|
9189
|
+
if (!migrations) return {
|
|
9190
|
+
status: "noop",
|
|
9191
|
+
reason: "no_migrations_registered"
|
|
9192
|
+
};
|
|
9193
|
+
const limit = parseOptionalPositiveInteger(args.limit, "limit") ?? 25;
|
|
9194
|
+
const runId = parseOptionalString(args.runId, "runId");
|
|
9195
|
+
const stateRows = await getAllStateRows(ctx.db);
|
|
9196
|
+
const sortedRuns = [...await getAllRunRows(ctx.db)].sort((left, right) => right.startedAt - left.startedAt);
|
|
9197
|
+
const selectedRuns = runId ? sortedRuns.filter((entry) => entry.runId === runId).slice(0, 1) : sortedRuns.slice(0, limit);
|
|
9198
|
+
const activeRun = sortedRuns.find((entry) => entry.status === "running") ?? null;
|
|
9199
|
+
const appliedState = toAppliedStateMap(stateRows);
|
|
9200
|
+
const drift = detectMigrationDrift({
|
|
9201
|
+
migrationSet: migrations,
|
|
9202
|
+
appliedState
|
|
9203
|
+
});
|
|
9204
|
+
const pendingUp = buildMigrationPlan({
|
|
9205
|
+
direction: "up",
|
|
9206
|
+
migrationSet: migrations,
|
|
9207
|
+
appliedState
|
|
9208
|
+
});
|
|
9209
|
+
return {
|
|
9210
|
+
status: activeRun ? "running" : "idle",
|
|
9211
|
+
activeRun,
|
|
9212
|
+
runs: selectedRuns,
|
|
9213
|
+
migrations: stateRows.map((row) => ({
|
|
9214
|
+
migrationId: row.migrationId,
|
|
9215
|
+
checksum: row.checksum,
|
|
9216
|
+
applied: row.applied,
|
|
9217
|
+
status: row.status,
|
|
9218
|
+
direction: row.direction ?? null,
|
|
9219
|
+
runId: row.runId ?? null,
|
|
9220
|
+
cursor: row.cursor ?? null,
|
|
9221
|
+
processed: row.processed,
|
|
9222
|
+
updatedAt: row.updatedAt,
|
|
9223
|
+
startedAt: row.startedAt ?? null,
|
|
9224
|
+
completedAt: row.completedAt ?? null,
|
|
9225
|
+
lastError: row.lastError ?? null,
|
|
9226
|
+
writeMode: row.writeMode
|
|
9227
|
+
})),
|
|
9228
|
+
pending: pendingUp.migrations.map((migration) => migration.id),
|
|
9229
|
+
drift
|
|
9230
|
+
};
|
|
9231
|
+
};
|
|
9232
|
+
const cancel = async (ctx, args = {}) => {
|
|
9233
|
+
if (!migrations) return {
|
|
9234
|
+
status: "noop",
|
|
9235
|
+
reason: "no_migrations_registered"
|
|
9236
|
+
};
|
|
9237
|
+
const runId = parseOptionalString(args.runId, "runId");
|
|
9238
|
+
const runRow = runId ? await getRunById(ctx.db, runId) : await getActiveRun(ctx.db);
|
|
9239
|
+
if (!runRow) return {
|
|
9240
|
+
status: "noop",
|
|
9241
|
+
reason: "no_active_run"
|
|
9242
|
+
};
|
|
9243
|
+
if (runRow.status !== "running") return {
|
|
9244
|
+
status: "noop",
|
|
9245
|
+
reason: "run_not_running",
|
|
9246
|
+
runId: runRow.runId,
|
|
9247
|
+
runStatus: runRow.status
|
|
9248
|
+
};
|
|
9249
|
+
const now = Date.now();
|
|
9250
|
+
await ctx.db.patch(runRow._id, cleanUndefined({
|
|
9251
|
+
cancelRequested: true,
|
|
9252
|
+
updatedAt: now
|
|
9253
|
+
}));
|
|
9254
|
+
if (!(ctx.scheduler ? getChunkRef() : void 0)) {
|
|
9255
|
+
await markRunCanceled(ctx.db, runRow);
|
|
9256
|
+
return {
|
|
9257
|
+
status: "canceled",
|
|
9258
|
+
runId: runRow.runId
|
|
9259
|
+
};
|
|
9260
|
+
}
|
|
9261
|
+
return {
|
|
9262
|
+
status: "cancel_requested",
|
|
9263
|
+
runId: runRow.runId
|
|
9264
|
+
};
|
|
9265
|
+
};
|
|
9266
|
+
return {
|
|
9267
|
+
run,
|
|
9268
|
+
chunk,
|
|
9269
|
+
status,
|
|
9270
|
+
cancel
|
|
9271
|
+
};
|
|
9272
|
+
}
|
|
9273
|
+
function getStepForDirection(migration, direction) {
|
|
9274
|
+
if (direction === "up") return migration.up;
|
|
9275
|
+
if (!migration.down) throw new Error(`Migration '${migration.id}' is missing down migration.`);
|
|
9276
|
+
return migration.down;
|
|
9277
|
+
}
|
|
9278
|
+
function createRunId(now) {
|
|
9279
|
+
return `mr_${now}_${Math.random().toString(36).slice(2, 10)}`;
|
|
9280
|
+
}
|
|
9281
|
+
function parseDirection(value) {
|
|
9282
|
+
if (value === void 0 || value === null) return "up";
|
|
9283
|
+
if (value === "up" || value === "down") return value;
|
|
9284
|
+
throw new Error("Migration direction must be either 'up' or 'down'.");
|
|
9285
|
+
}
|
|
9286
|
+
function parseOptionalPositiveInteger(value, fieldName) {
|
|
9287
|
+
if (value === void 0 || value === null) return;
|
|
9288
|
+
if (typeof value !== "number" || !Number.isInteger(value) || value < 1) throw new Error(`Migration ${fieldName} must be a positive integer.`);
|
|
9289
|
+
return value;
|
|
9290
|
+
}
|
|
9291
|
+
function parseOptionalString(value, fieldName) {
|
|
9292
|
+
if (value === void 0 || value === null) return;
|
|
9293
|
+
if (typeof value !== "string" || value.length === 0) throw new Error(`Migration ${fieldName} must be a non-empty string.`);
|
|
9294
|
+
return value;
|
|
9295
|
+
}
|
|
9296
|
+
function parseRequiredString(value, fieldName) {
|
|
9297
|
+
const parsed = parseOptionalString(value, fieldName);
|
|
9298
|
+
if (!parsed) throw new Error(`Migration ${fieldName} is required.`);
|
|
9299
|
+
return parsed;
|
|
9300
|
+
}
|
|
9301
|
+
function toAppliedStateMap(stateRows) {
|
|
9302
|
+
const entries = {};
|
|
9303
|
+
for (const row of stateRows) entries[row.migrationId] = {
|
|
9304
|
+
applied: row.applied,
|
|
9305
|
+
checksum: row.checksum,
|
|
9306
|
+
cursor: row.cursor ?? null,
|
|
9307
|
+
processed: row.processed
|
|
9308
|
+
};
|
|
9309
|
+
return entries;
|
|
9310
|
+
}
|
|
9311
|
+
async function getAllStateRows(db) {
|
|
9312
|
+
return await db.query(MIGRATION_STATE_TABLE).collect();
|
|
9313
|
+
}
|
|
9314
|
+
async function getAllRunRows(db) {
|
|
9315
|
+
return await db.query(MIGRATION_RUN_TABLE).collect();
|
|
9316
|
+
}
|
|
9317
|
+
async function getRunById(db, runId) {
|
|
9318
|
+
return await db.query(MIGRATION_RUN_TABLE).withIndex("by_run_id", (query) => query.eq("runId", runId)).first() ?? null;
|
|
9319
|
+
}
|
|
9320
|
+
async function getActiveRun(db) {
|
|
9321
|
+
return await db.query(MIGRATION_RUN_TABLE).withIndex("by_status", (query) => query.eq("status", "running")).first() ?? null;
|
|
9322
|
+
}
|
|
9323
|
+
async function getOrCreateStateRow(db, migration, direction, step) {
|
|
9324
|
+
const existing = await db.query(MIGRATION_STATE_TABLE).withIndex("by_migration_id", (query) => query.eq("migrationId", migration.id)).first();
|
|
9325
|
+
if (existing) return existing;
|
|
9326
|
+
const now = Date.now();
|
|
9327
|
+
const stateId = await db.insert(MIGRATION_STATE_TABLE, {
|
|
9328
|
+
migrationId: migration.id,
|
|
9329
|
+
checksum: migration.checksum,
|
|
9330
|
+
applied: direction === "down",
|
|
9331
|
+
status: "pending",
|
|
9332
|
+
direction,
|
|
9333
|
+
runId: null,
|
|
9334
|
+
cursor: null,
|
|
9335
|
+
processed: 0,
|
|
9336
|
+
startedAt: now,
|
|
9337
|
+
updatedAt: now,
|
|
9338
|
+
completedAt: null,
|
|
9339
|
+
lastError: null,
|
|
9340
|
+
writeMode: step.writeMode ?? "safe_bypass"
|
|
9341
|
+
});
|
|
9342
|
+
const created = await db.get(stateId);
|
|
9343
|
+
if (!created) throw new Error(`Failed to create migration state row for '${migration.id}'.`);
|
|
9344
|
+
return created;
|
|
9345
|
+
}
|
|
9346
|
+
async function markRunCompleted(db, runRow) {
|
|
9347
|
+
const now = Date.now();
|
|
9348
|
+
await db.patch(runRow._id, cleanUndefined({
|
|
9349
|
+
status: "completed",
|
|
9350
|
+
updatedAt: now,
|
|
9351
|
+
completedAt: now,
|
|
9352
|
+
cancelRequested: false,
|
|
9353
|
+
lastError: null
|
|
9354
|
+
}));
|
|
9355
|
+
}
|
|
9356
|
+
async function markRunCanceled(db, runRow) {
|
|
9357
|
+
const now = Date.now();
|
|
9358
|
+
await db.patch(runRow._id, cleanUndefined({
|
|
9359
|
+
status: "canceled",
|
|
9360
|
+
updatedAt: now,
|
|
9361
|
+
completedAt: now,
|
|
9362
|
+
cancelRequested: true
|
|
9363
|
+
}));
|
|
9364
|
+
for (const stateRow of await getAllStateRows(db)) if (stateRow.runId === runRow.runId && (stateRow.status === "running" || stateRow.status === "pending")) await db.patch(stateRow._id, cleanUndefined({
|
|
9365
|
+
status: "canceled",
|
|
9366
|
+
updatedAt: now
|
|
9367
|
+
}));
|
|
9368
|
+
}
|
|
9369
|
+
async function markRunFailed(db, runRow, message) {
|
|
9370
|
+
const now = Date.now();
|
|
9371
|
+
await db.patch(runRow._id, cleanUndefined({
|
|
9372
|
+
status: "failed",
|
|
9373
|
+
updatedAt: now,
|
|
9374
|
+
completedAt: now,
|
|
9375
|
+
lastError: message
|
|
9376
|
+
}));
|
|
9377
|
+
}
|
|
9378
|
+
async function runWithWriteMode(orm, writeMode, callback) {
|
|
9379
|
+
if (writeMode === "normal") return await callback(orm);
|
|
9380
|
+
return await orm.skipRules.withoutTriggers(async (noTriggersOrm) => {
|
|
9381
|
+
return await callback(noTriggersOrm);
|
|
9382
|
+
});
|
|
9383
|
+
}
|
|
9384
|
+
function isPatchPayload(value) {
|
|
9385
|
+
return Boolean(value && typeof value === "object" && !Array.isArray(value));
|
|
9386
|
+
}
|
|
9387
|
+
function hasDocId(doc) {
|
|
9388
|
+
return "_id" in doc && doc._id !== void 0 && doc._id !== null;
|
|
9389
|
+
}
|
|
9390
|
+
function cleanUndefined(value) {
|
|
9391
|
+
return Object.fromEntries(Object.entries(value).filter(([, candidate]) => candidate !== void 0));
|
|
9392
|
+
}
|
|
9393
|
+
|
|
8927
9394
|
//#endregion
|
|
8928
9395
|
//#region src/orm/scheduled-delete.ts
|
|
8929
9396
|
function scheduledDeleteFactory(schema, edgeMetadata, scheduledMutationBatch) {
|
|
@@ -9093,6 +9560,10 @@ function scheduledMutationBatchFactory(schema, edgeMetadata, scheduledMutationBa
|
|
|
9093
9560
|
|
|
9094
9561
|
//#endregion
|
|
9095
9562
|
//#region src/orm/create-orm.ts
|
|
9563
|
+
function getResetTableNames(schema) {
|
|
9564
|
+
const pluginTables = schema[OrmSchemaPluginTables];
|
|
9565
|
+
return [...new Set([...Object.values(schema).map((tableConfig) => tableConfig.name), ...pluginTables ?? []])];
|
|
9566
|
+
}
|
|
9096
9567
|
function isOrmCtx(source) {
|
|
9097
9568
|
return !!source && typeof source === "object" && "db" in source;
|
|
9098
9569
|
}
|
|
@@ -9143,13 +9614,25 @@ function createOrm(config) {
|
|
|
9143
9614
|
with: withContext,
|
|
9144
9615
|
api: () => {
|
|
9145
9616
|
let aggregateBackfillChunkRef = config.ormFunctions.aggregateBackfillChunk;
|
|
9617
|
+
let migrationRunChunkRef = config.ormFunctions.migrationRunChunk;
|
|
9146
9618
|
let resetChunkRef = config.ormFunctions.resetChunk;
|
|
9147
9619
|
const countBackfillHandlers = createCountBackfillHandlers(config.schema, () => aggregateBackfillChunkRef);
|
|
9620
|
+
const migrationHandlers = createMigrationHandlers({
|
|
9621
|
+
schema: config.schema,
|
|
9622
|
+
migrations: config.migrations,
|
|
9623
|
+
getOrm: (ctx) => db(ctx),
|
|
9624
|
+
getChunkRef: () => migrationRunChunkRef
|
|
9625
|
+
});
|
|
9148
9626
|
const aggregateBackfillChunk = mutationBuilder({
|
|
9149
9627
|
args: v.any(),
|
|
9150
9628
|
handler: countBackfillHandlers.chunk
|
|
9151
9629
|
});
|
|
9152
9630
|
if (!aggregateBackfillChunkRef) aggregateBackfillChunkRef = aggregateBackfillChunk;
|
|
9631
|
+
const migrationRunChunk = mutationBuilder({
|
|
9632
|
+
args: v.any(),
|
|
9633
|
+
handler: migrationHandlers.chunk
|
|
9634
|
+
});
|
|
9635
|
+
if (!migrationRunChunkRef) migrationRunChunkRef = migrationRunChunk;
|
|
9153
9636
|
const resetChunk = mutationBuilder({
|
|
9154
9637
|
args: v.object({
|
|
9155
9638
|
tableName: v.string(),
|
|
@@ -9193,11 +9676,24 @@ function createOrm(config) {
|
|
|
9193
9676
|
args: v.any(),
|
|
9194
9677
|
handler: countBackfillHandlers.status
|
|
9195
9678
|
}),
|
|
9679
|
+
migrationRun: mutationBuilder({
|
|
9680
|
+
args: v.any(),
|
|
9681
|
+
handler: migrationHandlers.run
|
|
9682
|
+
}),
|
|
9683
|
+
migrationRunChunk,
|
|
9684
|
+
migrationStatus: mutationBuilder({
|
|
9685
|
+
args: v.any(),
|
|
9686
|
+
handler: migrationHandlers.status
|
|
9687
|
+
}),
|
|
9688
|
+
migrationCancel: mutationBuilder({
|
|
9689
|
+
args: v.any(),
|
|
9690
|
+
handler: migrationHandlers.cancel
|
|
9691
|
+
}),
|
|
9196
9692
|
resetChunk,
|
|
9197
9693
|
reset: internalActionGeneric({
|
|
9198
9694
|
args: v.any(),
|
|
9199
9695
|
handler: async (ctx) => {
|
|
9200
|
-
const tableNames =
|
|
9696
|
+
const tableNames = getResetTableNames(config.schema);
|
|
9201
9697
|
let deleted = 0;
|
|
9202
9698
|
for (const tableName of tableNames) {
|
|
9203
9699
|
let cursor = null;
|
|
@@ -9440,6 +9936,7 @@ function defineRelations(schema, relations) {
|
|
|
9440
9936
|
const strict = schemaOptions?.strict ?? true;
|
|
9441
9937
|
const defaults = schemaOptions?.defaults;
|
|
9442
9938
|
const schemaDefinition = schema[OrmSchemaDefinition];
|
|
9939
|
+
const pluginTableNames = schema[OrmSchemaPluginTables];
|
|
9443
9940
|
const tablesConfig = buildRelations(tables, relations ? relations(createRelationsHelper(tables)) : {}, strict, defaults);
|
|
9444
9941
|
Object.defineProperty(tablesConfig, OrmSchemaOptions, {
|
|
9445
9942
|
value: {
|
|
@@ -9452,6 +9949,10 @@ function defineRelations(schema, relations) {
|
|
|
9452
9949
|
value: schemaDefinition,
|
|
9453
9950
|
enumerable: false
|
|
9454
9951
|
});
|
|
9952
|
+
if (pluginTableNames) Object.defineProperty(tablesConfig, OrmSchemaPluginTables, {
|
|
9953
|
+
value: pluginTableNames,
|
|
9954
|
+
enumerable: false
|
|
9955
|
+
});
|
|
9455
9956
|
return tablesConfig;
|
|
9456
9957
|
}
|
|
9457
9958
|
function defineRelationsPart(schema, relations) {
|
|
@@ -9460,6 +9961,7 @@ function defineRelationsPart(schema, relations) {
|
|
|
9460
9961
|
const strict = schemaOptions?.strict ?? true;
|
|
9461
9962
|
const defaults = schemaOptions?.defaults;
|
|
9462
9963
|
const schemaDefinition = schema[OrmSchemaDefinition];
|
|
9964
|
+
const pluginTableNames = schema[OrmSchemaPluginTables];
|
|
9463
9965
|
const tablesConfig = buildRelationsParts(tables, relations ? relations(createRelationsHelper(tables)) : Object.fromEntries(Object.keys(tables).map((k) => [k, {}])), strict, defaults);
|
|
9464
9966
|
Object.defineProperty(tablesConfig, OrmSchemaOptions, {
|
|
9465
9967
|
value: {
|
|
@@ -9472,6 +9974,10 @@ function defineRelationsPart(schema, relations) {
|
|
|
9472
9974
|
value: schemaDefinition,
|
|
9473
9975
|
enumerable: false
|
|
9474
9976
|
});
|
|
9977
|
+
if (pluginTableNames) Object.defineProperty(tablesConfig, OrmSchemaPluginTables, {
|
|
9978
|
+
value: pluginTableNames,
|
|
9979
|
+
enumerable: false
|
|
9980
|
+
});
|
|
9475
9981
|
return tablesConfig;
|
|
9476
9982
|
}
|
|
9477
9983
|
function processRelations(tablesConfig, tables) {
|
|
@@ -9559,6 +10065,7 @@ const DEFAULTS_NUMERIC_FIELDS = [
|
|
|
9559
10065
|
"mutationScheduleCallCap"
|
|
9560
10066
|
];
|
|
9561
10067
|
const MUTATION_EXECUTION_MODES = ["sync", "async"];
|
|
10068
|
+
const BUILTIN_SCHEMA_PLUGINS = [aggregatePlugin(), migrationPlugin()];
|
|
9562
10069
|
const normalizeDefaults = (defaults) => {
|
|
9563
10070
|
if (!defaults) return;
|
|
9564
10071
|
const normalized = {};
|
|
@@ -9579,6 +10086,27 @@ const normalizeDefaults = (defaults) => {
|
|
|
9579
10086
|
}
|
|
9580
10087
|
return normalized;
|
|
9581
10088
|
};
|
|
10089
|
+
function resolveSchemaPlugins(plugins) {
|
|
10090
|
+
const resolved = [...BUILTIN_SCHEMA_PLUGINS, ...plugins ?? []];
|
|
10091
|
+
const seen = /* @__PURE__ */ new Set();
|
|
10092
|
+
for (const plugin of resolved) {
|
|
10093
|
+
if (seen.has(plugin.key)) throw new Error(`defineSchema received duplicate plugin '${plugin.key}'. Remove duplicate plugin registrations.`);
|
|
10094
|
+
seen.add(plugin.key);
|
|
10095
|
+
}
|
|
10096
|
+
return resolved;
|
|
10097
|
+
}
|
|
10098
|
+
function applySchemaPlugins(schema, plugins) {
|
|
10099
|
+
let current = schema;
|
|
10100
|
+
const pluginTableNames = [];
|
|
10101
|
+
for (const plugin of plugins) {
|
|
10102
|
+
current = plugin.inject(current);
|
|
10103
|
+
for (const tableName of plugin.tableNames) if (!pluginTableNames.includes(tableName)) pluginTableNames.push(tableName);
|
|
10104
|
+
}
|
|
10105
|
+
return {
|
|
10106
|
+
schema: current,
|
|
10107
|
+
pluginTableNames
|
|
10108
|
+
};
|
|
10109
|
+
}
|
|
9582
10110
|
/**
|
|
9583
10111
|
* Better Convex schema definition
|
|
9584
10112
|
*
|
|
@@ -9589,7 +10117,8 @@ const normalizeDefaults = (defaults) => {
|
|
|
9589
10117
|
function defineSchema(schema, options) {
|
|
9590
10118
|
const strict = options?.strict ?? true;
|
|
9591
10119
|
const defaults = normalizeDefaults(options?.defaults);
|
|
9592
|
-
const
|
|
10120
|
+
const { schema: schemaWithPlugins, pluginTableNames } = applySchemaPlugins(schema, resolveSchemaPlugins(options?.plugins));
|
|
10121
|
+
const frozenPluginTableNames = Object.freeze([...pluginTableNames]);
|
|
9593
10122
|
Object.defineProperty(schema, OrmSchemaOptions, {
|
|
9594
10123
|
value: {
|
|
9595
10124
|
strict,
|
|
@@ -9597,15 +10126,23 @@ function defineSchema(schema, options) {
|
|
|
9597
10126
|
},
|
|
9598
10127
|
enumerable: false
|
|
9599
10128
|
});
|
|
9600
|
-
Object.defineProperty(
|
|
10129
|
+
Object.defineProperty(schemaWithPlugins, OrmSchemaOptions, {
|
|
9601
10130
|
value: {
|
|
9602
10131
|
strict,
|
|
9603
10132
|
defaults
|
|
9604
10133
|
},
|
|
9605
10134
|
enumerable: false
|
|
9606
10135
|
});
|
|
9607
|
-
|
|
9608
|
-
|
|
10136
|
+
Object.defineProperty(schema, OrmSchemaPluginTables, {
|
|
10137
|
+
value: frozenPluginTableNames,
|
|
10138
|
+
enumerable: false
|
|
10139
|
+
});
|
|
10140
|
+
Object.defineProperty(schemaWithPlugins, OrmSchemaPluginTables, {
|
|
10141
|
+
value: frozenPluginTableNames,
|
|
10142
|
+
enumerable: false
|
|
10143
|
+
});
|
|
10144
|
+
const { strict: _strict, defaults: _defaults, plugins: _plugins, ...convexOptions } = options ?? {};
|
|
10145
|
+
const convexSchema = defineSchema$1(schemaWithPlugins, convexOptions);
|
|
9609
10146
|
Object.defineProperty(convexSchema, OrmSchemaOptions, {
|
|
9610
10147
|
value: {
|
|
9611
10148
|
strict,
|
|
@@ -9613,11 +10150,15 @@ function defineSchema(schema, options) {
|
|
|
9613
10150
|
},
|
|
9614
10151
|
enumerable: false
|
|
9615
10152
|
});
|
|
10153
|
+
Object.defineProperty(convexSchema, OrmSchemaPluginTables, {
|
|
10154
|
+
value: frozenPluginTableNames,
|
|
10155
|
+
enumerable: false
|
|
10156
|
+
});
|
|
9616
10157
|
Object.defineProperty(schema, OrmSchemaDefinition, {
|
|
9617
10158
|
value: convexSchema,
|
|
9618
10159
|
enumerable: false
|
|
9619
10160
|
});
|
|
9620
|
-
Object.defineProperty(
|
|
10161
|
+
Object.defineProperty(schemaWithPlugins, OrmSchemaDefinition, {
|
|
9621
10162
|
value: convexSchema,
|
|
9622
10163
|
enumerable: false
|
|
9623
10164
|
});
|
|
@@ -9629,4 +10170,4 @@ function defineSchema(schema, options) {
|
|
|
9629
10170
|
}
|
|
9630
10171
|
|
|
9631
10172
|
//#endregion
|
|
9632
|
-
export { Brand, Columns, OrmNotFoundError, RlsPolicy, RlsRole, TableName, aggregateIndex, and, asc, between, bigint, boolean, bytes, check, contains, convexTable, createOrm, custom, date, defineRelations, defineRelationsPart, defineSchema, defineTriggers, deletion, desc, endsWith, eq, extractRelationsConfig, fieldRef, foreignKey, getByIdWithOrmQueryFallback, getTableColumns, getTableConfig, gt, gte, id, ilike, inArray, index, integer, isFieldReference, isNotNull, isNull, json, like, lt, lte, ne, not, notBetween, notInArray, or, rankIndex, rlsPolicy, rlsRole, scheduledDeleteFactory, scheduledMutationBatchFactory, searchIndex, startsWith, text, textEnum, timestamp, unique, uniqueIndex, unsetToken, vector, vectorIndex };
|
|
10173
|
+
export { Brand, Columns, OrmNotFoundError, OrmSchemaPluginTables, RlsPolicy, RlsRole, TableName, aggregateIndex, and, arrayOf, asc, between, bigint, boolean, buildMigrationPlan, bytes, check, contains, convexTable, createOrm, custom, date, defineMigration, defineMigrationSet, defineRelations, defineRelationsPart, defineSchema, defineTriggers, deletion, deprecated, desc, detectMigrationDrift, endsWith, eq, extractRelationsConfig, fieldRef, foreignKey, getByIdWithOrmQueryFallback, getTableColumns, getTableConfig, gt, gte, id, ilike, inArray, index, integer, isFieldReference, isNotNull, isNull, json, like, lt, lte, ne, not, notBetween, notInArray, objectOf, or, pretend, pretendRequired, rankIndex, rlsPolicy, rlsRole, scheduledDeleteFactory, scheduledMutationBatchFactory, searchIndex, startsWith, text, textEnum, timestamp, unique, uniqueIndex, unsetToken, vector, vectorIndex };
|