better-convex 0.7.2 → 0.7.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/aggregate/index.d.ts +1 -1
- package/dist/auth/http/index.d.ts +1 -1
- package/dist/auth/index.d.ts +4 -4
- package/dist/auth/index.js +5 -4
- package/dist/auth/nextjs/index.d.ts +2 -2
- package/dist/auth/nextjs/index.js +2 -2
- package/dist/{caller-factory-D3OuR1eI.js → caller-factory-4uND4vnj.js} +2 -2
- package/dist/cli.mjs +413 -5
- package/dist/{codegen-Cz1idI3-.mjs → codegen-BS36cYTH.mjs} +88 -5
- package/dist/{create-schema-orm-69VF4CFV.js → create-schema-orm-DtuyK2RB.js} +1 -1
- package/dist/crpc/index.d.ts +2 -2
- package/dist/crpc/index.js +3 -3
- package/dist/customFunctions-C_i_0joT.js +167 -0
- package/dist/{http-types-BCf2wCgp.d.ts → http-types-BsnDV7Je.d.ts} +1 -1
- package/dist/orm/index.d.ts +4 -3
- package/dist/orm/index.js +707 -4
- package/dist/{procedure-caller-CcjtUFvL.d.ts → procedure-caller-Cj_lgUev.d.ts} +4 -19
- package/dist/rsc/index.d.ts +3 -3
- package/dist/rsc/index.js +4 -4
- package/dist/server/index.d.ts +2 -2
- package/dist/server/index.js +4 -3
- package/dist/{types-CIBGEYXq.d.ts → types-DZFvhoPJ.d.ts} +1 -1
- package/dist/{customFunctions-CZnCwoR3.js → validators-B7oIJCAp.js} +67 -165
- package/dist/validators-BDrWGp4M.d.ts +88 -0
- package/dist/watcher.mjs +1 -1
- package/dist/{where-clause-compiler-CRP-i1Qa.d.ts → where-clause-compiler-HUa2223D.d.ts} +106 -2
- package/package.json +1 -1
- /package/dist/{create-schema-BdZOL6ns.js → create-schema-DE9ZtH8n.js} +0 -0
- /package/dist/{error-Be4OcwwD.js → error-C7AOPlv2.js} +0 -0
- /package/dist/{meta-utils-DDVYp9Xf.js → meta-utils-C9_6WIzj.js} +0 -0
- /package/dist/{query-context-DGExXZIV.d.ts → query-context-BMXt2TKe.d.ts} +0 -0
- /package/dist/{query-context-BDSis9rT.js → query-context-yQVARct0.js} +0 -0
- /package/dist/{query-options-B0c1b6pZ.js → query-options-Bjo6j5cC.js} +0 -0
- /package/dist/{transformer-Dh0w2py0.js → transformer-BsX4RWes.js} +0 -0
- /package/dist/{types-DgwvxKbT.d.ts → types-DarApWtO.d.ts} +0 -0
- /package/dist/{types-DwGkkq2s.d.ts → types-kgwiK-xe.d.ts} +0 -0
package/dist/orm/index.js
CHANGED
|
@@ -1,5 +1,6 @@
|
|
|
1
1
|
import { A as entityKind, C as text, D as custom, E as id, O as json, S as vectorIndex, T as integer, _ as aggregateIndex, a as deletion, b as searchIndex, c as EnableRLS, d as OrmSchemaOptions, f as RlsPolicies, g as rlsPolicy, h as RlsPolicy, i as convexTable, k as ConvexColumnBuilder, l as OrmContext, m as TableName, o as Brand, p as TableDeleteConfig, s as Columns, t as DirectAggregate, u as OrmSchemaDefinition, v as index, w as createSystemFields, x as uniqueIndex, y as rankIndex } from "../runtime-B9xQFY8W.js";
|
|
2
|
-
import {
|
|
2
|
+
import { a as pretendRequired, i as pretend, n as deprecated } from "../validators-B7oIJCAp.js";
|
|
3
|
+
import { A as ne, C as inArray, D as like, E as isNull, F as notLike, I as or, L as startsWith, M as notBetween, N as notIlike, O as lt, P as notInArray, S as ilike, T as isNotNull, _ as endsWith, a as mergedStream, b as gt, c as isUnsetToken, d as arrayContained, f as arrayContains, g as contains, h as column, i as getIndexFields, j as not, k as lte, l as unsetToken, m as between, n as EmptyStream, o as stream, p as arrayOverlaps, r as QueryStream, s as streamIndexRange, t as getByIdWithOrmQueryFallback, u as and, v as eq, w as isFieldReference, x as gte, y as fieldRef } from "../query-context-yQVARct0.js";
|
|
3
4
|
import { v } from "convex/values";
|
|
4
5
|
import { defineSchema as defineSchema$1, internalActionGeneric, internalMutationGeneric } from "convex/server";
|
|
5
6
|
|
|
@@ -379,6 +380,14 @@ const aggregateStorageTables = {
|
|
|
379
380
|
[AGGREGATE_RANK_NODE_TABLE]: rankNodeTable,
|
|
380
381
|
[AGGREGATE_STATE_TABLE]: countStateTable
|
|
381
382
|
};
|
|
383
|
+
const AGGREGATE_STORAGE_TABLE_NAMES = new Set([
|
|
384
|
+
AGGREGATE_BUCKET_TABLE,
|
|
385
|
+
AGGREGATE_MEMBER_TABLE,
|
|
386
|
+
AGGREGATE_EXTREMA_TABLE,
|
|
387
|
+
AGGREGATE_RANK_TREE_TABLE,
|
|
388
|
+
AGGREGATE_RANK_NODE_TABLE,
|
|
389
|
+
AGGREGATE_STATE_TABLE
|
|
390
|
+
]);
|
|
382
391
|
function injectAggregateStorageTables(schema) {
|
|
383
392
|
const merged = { ...schema };
|
|
384
393
|
for (const [tableName, tableDef] of Object.entries(aggregateStorageTables)) {
|
|
@@ -8924,6 +8933,671 @@ function isOne(relation) {
|
|
|
8924
8933
|
return relation.relationType === "one";
|
|
8925
8934
|
}
|
|
8926
8935
|
|
|
8936
|
+
//#endregion
|
|
8937
|
+
//#region src/orm/migrations/definitions.ts
|
|
8938
|
+
const MIGRATION_ID_RE = /^[a-zA-Z0-9_:-]+$/;
|
|
8939
|
+
const FUNCTION_SOURCE_WHITESPACE_RE = /\s+/g;
|
|
8940
|
+
const FUNCTION_SOURCE_PUNCTUATION_SPACE_RE = /\s*([{}();,:])\s*/g;
|
|
8941
|
+
function defineMigration(migration) {
|
|
8942
|
+
validateMigrationId(migration.id);
|
|
8943
|
+
validateMigrationStep("up", migration.up);
|
|
8944
|
+
if (migration.down) validateMigrationStep("down", migration.down);
|
|
8945
|
+
return migration;
|
|
8946
|
+
}
|
|
8947
|
+
function defineMigrationSet(migrations) {
|
|
8948
|
+
const normalized = [...migrations].map((migration) => {
|
|
8949
|
+
const defined = defineMigration(migration);
|
|
8950
|
+
return {
|
|
8951
|
+
...defined,
|
|
8952
|
+
checksum: defined.checksum ?? computeMigrationChecksum(defined)
|
|
8953
|
+
};
|
|
8954
|
+
});
|
|
8955
|
+
normalized.sort((a, b) => a.id.localeCompare(b.id));
|
|
8956
|
+
const byId = {};
|
|
8957
|
+
for (const migration of normalized) {
|
|
8958
|
+
if (byId[migration.id]) throw new Error(`defineMigrationSet received duplicate migration id '${migration.id}'.`);
|
|
8959
|
+
byId[migration.id] = migration;
|
|
8960
|
+
}
|
|
8961
|
+
return {
|
|
8962
|
+
migrations: normalized,
|
|
8963
|
+
ids: normalized.map((migration) => migration.id),
|
|
8964
|
+
byId
|
|
8965
|
+
};
|
|
8966
|
+
}
|
|
8967
|
+
function detectMigrationDrift(params) {
|
|
8968
|
+
const { migrationSet, appliedState } = params;
|
|
8969
|
+
const issues = [];
|
|
8970
|
+
for (const [migrationId, state] of Object.entries(appliedState)) {
|
|
8971
|
+
if (!state.applied) continue;
|
|
8972
|
+
const migration = migrationSet.byId[migrationId];
|
|
8973
|
+
if (!migration) {
|
|
8974
|
+
issues.push({
|
|
8975
|
+
kind: "missing_from_manifest",
|
|
8976
|
+
migrationId,
|
|
8977
|
+
message: `Applied migration '${migrationId}' is missing from the current migration manifest.`
|
|
8978
|
+
});
|
|
8979
|
+
continue;
|
|
8980
|
+
}
|
|
8981
|
+
if (state.checksum && state.checksum !== migration.checksum) issues.push({
|
|
8982
|
+
kind: "checksum_mismatch",
|
|
8983
|
+
migrationId,
|
|
8984
|
+
expectedChecksum: migration.checksum,
|
|
8985
|
+
actualChecksum: state.checksum,
|
|
8986
|
+
message: `Applied migration '${migrationId}' checksum drift detected.`
|
|
8987
|
+
});
|
|
8988
|
+
}
|
|
8989
|
+
return issues;
|
|
8990
|
+
}
|
|
8991
|
+
function buildMigrationPlan(params) {
|
|
8992
|
+
const { direction, migrationSet, appliedState, steps, to } = params;
|
|
8993
|
+
if (direction === "up") return {
|
|
8994
|
+
direction,
|
|
8995
|
+
migrations: migrationSet.migrations.filter((migration) => !appliedState[migration.id]?.applied)
|
|
8996
|
+
};
|
|
8997
|
+
if (steps !== void 0 && to !== void 0) throw new Error("Use either down steps or down to, not both.");
|
|
8998
|
+
const appliedInOrder = migrationSet.migrations.filter((migration) => appliedState[migration.id]?.applied);
|
|
8999
|
+
const ensureDownSteps = (selected) => {
|
|
9000
|
+
const missingDown = selected.find((migration) => !migration.down);
|
|
9001
|
+
if (missingDown) throw new Error(`Cannot execute down migration for '${missingDown.id}': missing down migration handler.`);
|
|
9002
|
+
return selected;
|
|
9003
|
+
};
|
|
9004
|
+
if (to) {
|
|
9005
|
+
const targetIndex = migrationSet.ids.indexOf(to);
|
|
9006
|
+
if (targetIndex === -1) throw new Error(`Unknown migration id '${to}' for down --to.`);
|
|
9007
|
+
return {
|
|
9008
|
+
direction,
|
|
9009
|
+
migrations: ensureDownSteps(appliedInOrder.filter((migration) => {
|
|
9010
|
+
return migrationSet.ids.indexOf(migration.id) > targetIndex;
|
|
9011
|
+
})).reverse()
|
|
9012
|
+
};
|
|
9013
|
+
}
|
|
9014
|
+
const resolvedSteps = steps ?? 1;
|
|
9015
|
+
if (!Number.isInteger(resolvedSteps) || resolvedSteps < 1) throw new Error("Down steps must be a positive integer.");
|
|
9016
|
+
return {
|
|
9017
|
+
direction,
|
|
9018
|
+
migrations: ensureDownSteps(appliedInOrder.slice(-resolvedSteps).reverse())
|
|
9019
|
+
};
|
|
9020
|
+
}
|
|
9021
|
+
function validateMigrationId(id) {
|
|
9022
|
+
if (!id || typeof id !== "string") throw new Error("Migration id must be a non-empty string.");
|
|
9023
|
+
if (!MIGRATION_ID_RE.test(id)) throw new Error(`Migration id '${id}' is invalid. Use alphanumeric characters, '_' ':' or '-'.`);
|
|
9024
|
+
}
|
|
9025
|
+
function validateMigrationStep(direction, step) {
|
|
9026
|
+
if (!step || typeof step !== "object") throw new Error(`Migration ${direction} step must be an object.`);
|
|
9027
|
+
if (!step.table || typeof step.table !== "string") throw new Error(`Migration ${direction} step.table must be a string.`);
|
|
9028
|
+
if (typeof step.migrateOne !== "function") throw new Error(`Migration ${direction} step.migrateOne must be a function.`);
|
|
9029
|
+
if (step.batchSize !== void 0 && (!Number.isInteger(step.batchSize) || step.batchSize < 1)) throw new Error(`Migration ${direction} step.batchSize must be a positive integer.`);
|
|
9030
|
+
if (step.writeMode !== void 0 && step.writeMode !== "safe_bypass" && step.writeMode !== "normal") throw new Error(`Migration ${direction} step.writeMode must be 'safe_bypass' or 'normal'.`);
|
|
9031
|
+
}
|
|
9032
|
+
function computeMigrationChecksum(migration) {
|
|
9033
|
+
return simpleStableHash(JSON.stringify({
|
|
9034
|
+
id: migration.id,
|
|
9035
|
+
name: migration.name ?? null,
|
|
9036
|
+
description: migration.description ?? null,
|
|
9037
|
+
up: serializeStep(migration.up),
|
|
9038
|
+
down: migration.down ? serializeStep(migration.down) : null
|
|
9039
|
+
}));
|
|
9040
|
+
}
|
|
9041
|
+
function serializeStep(step) {
|
|
9042
|
+
return {
|
|
9043
|
+
table: step.table,
|
|
9044
|
+
batchSize: step.batchSize ?? null,
|
|
9045
|
+
writeMode: step.writeMode ?? "safe_bypass",
|
|
9046
|
+
source: normalizeFunctionSource(step.migrateOne)
|
|
9047
|
+
};
|
|
9048
|
+
}
|
|
9049
|
+
function normalizeFunctionSource(fn) {
|
|
9050
|
+
return fn.toString().replace(FUNCTION_SOURCE_WHITESPACE_RE, " ").replace(FUNCTION_SOURCE_PUNCTUATION_SPACE_RE, "$1").trim();
|
|
9051
|
+
}
|
|
9052
|
+
function simpleStableHash(value) {
|
|
9053
|
+
let hashA = 2166136261;
|
|
9054
|
+
let hashB = 16777619;
|
|
9055
|
+
for (let i = 0; i < value.length; i += 1) {
|
|
9056
|
+
const code = value.charCodeAt(i);
|
|
9057
|
+
hashA ^= code;
|
|
9058
|
+
hashA = Math.imul(hashA, 16777619);
|
|
9059
|
+
hashB ^= code + i;
|
|
9060
|
+
hashB = Math.imul(hashB, 2246822507);
|
|
9061
|
+
}
|
|
9062
|
+
return `m_${(hashA >>> 0).toString(16).padStart(8, "0")}${(hashB >>> 0).toString(16).padStart(8, "0")}`;
|
|
9063
|
+
}
|
|
9064
|
+
|
|
9065
|
+
//#endregion
|
|
9066
|
+
//#region src/orm/migrations/schema.ts
|
|
9067
|
+
const MIGRATION_STATE_TABLE = "migration_state";
|
|
9068
|
+
const MIGRATION_RUN_TABLE = "migration_run";
|
|
9069
|
+
const migrationStateTable = convexTable(MIGRATION_STATE_TABLE, {
|
|
9070
|
+
migrationId: text().notNull(),
|
|
9071
|
+
checksum: text().notNull(),
|
|
9072
|
+
applied: boolean().notNull(),
|
|
9073
|
+
status: text().notNull(),
|
|
9074
|
+
direction: text(),
|
|
9075
|
+
runId: text(),
|
|
9076
|
+
cursor: text(),
|
|
9077
|
+
processed: integer().notNull(),
|
|
9078
|
+
startedAt: integer(),
|
|
9079
|
+
updatedAt: integer().notNull(),
|
|
9080
|
+
completedAt: integer(),
|
|
9081
|
+
lastError: text(),
|
|
9082
|
+
writeMode: text().notNull()
|
|
9083
|
+
}, (t) => [index("by_migration_id").on(t.migrationId), index("by_status").on(t.status)]);
|
|
9084
|
+
const migrationRunTable = convexTable(MIGRATION_RUN_TABLE, {
|
|
9085
|
+
runId: text().notNull(),
|
|
9086
|
+
direction: text().notNull(),
|
|
9087
|
+
status: text().notNull(),
|
|
9088
|
+
dryRun: boolean().notNull(),
|
|
9089
|
+
allowDrift: boolean().notNull(),
|
|
9090
|
+
migrationIds: custom(v.array(v.string())).notNull(),
|
|
9091
|
+
currentIndex: integer().notNull(),
|
|
9092
|
+
startedAt: integer().notNull(),
|
|
9093
|
+
updatedAt: integer().notNull(),
|
|
9094
|
+
completedAt: integer(),
|
|
9095
|
+
cancelRequested: boolean().notNull(),
|
|
9096
|
+
lastError: text()
|
|
9097
|
+
}, (t) => [index("by_run_id").on(t.runId), index("by_status").on(t.status)]);
|
|
9098
|
+
const migrationStorageTables = {
|
|
9099
|
+
[MIGRATION_STATE_TABLE]: migrationStateTable,
|
|
9100
|
+
[MIGRATION_RUN_TABLE]: migrationRunTable
|
|
9101
|
+
};
|
|
9102
|
+
const MIGRATION_STORAGE_TABLE_NAMES = new Set([MIGRATION_STATE_TABLE, MIGRATION_RUN_TABLE]);
|
|
9103
|
+
function injectMigrationStorageTables(schema) {
|
|
9104
|
+
const merged = { ...schema };
|
|
9105
|
+
for (const [tableName, tableDef] of Object.entries(migrationStorageTables)) {
|
|
9106
|
+
if (tableName in schema && schema[tableName] !== tableDef) throw new Error(`defineSchema cannot inject internal table '${tableName}' because the name is already in use.`);
|
|
9107
|
+
merged[tableName] = tableDef;
|
|
9108
|
+
}
|
|
9109
|
+
return merged;
|
|
9110
|
+
}
|
|
9111
|
+
|
|
9112
|
+
//#endregion
|
|
9113
|
+
//#region src/orm/migrations/runtime.ts
|
|
9114
|
+
const DEFAULT_BATCH_SIZE = 128;
|
|
9115
|
+
function createMigrationHandlers(params) {
|
|
9116
|
+
const { schema, migrations, getOrm, getChunkRef } = params;
|
|
9117
|
+
const knownTables = new Set(Object.values(schema).map((tableConfig) => tableConfig.name));
|
|
9118
|
+
const run = async (ctx, args = {}) => {
|
|
9119
|
+
if (!migrations || migrations.migrations.length === 0) return {
|
|
9120
|
+
status: "noop",
|
|
9121
|
+
reason: "no_migrations_registered"
|
|
9122
|
+
};
|
|
9123
|
+
const direction = parseDirection(args.direction);
|
|
9124
|
+
const dryRun = args.dryRun === true;
|
|
9125
|
+
const allowDrift = args.allowDrift === true;
|
|
9126
|
+
const restart = args.restart === true;
|
|
9127
|
+
const batchSize = parseOptionalPositiveInteger(args.batchSize, "batchSize");
|
|
9128
|
+
const steps = parseOptionalPositiveInteger(args.steps, "steps");
|
|
9129
|
+
const to = parseOptionalString(args.to, "to");
|
|
9130
|
+
const stateRows = await getAllStateRows(ctx.db);
|
|
9131
|
+
const appliedState = toAppliedStateMap(stateRows);
|
|
9132
|
+
const drift = detectMigrationDrift({
|
|
9133
|
+
migrationSet: migrations,
|
|
9134
|
+
appliedState
|
|
9135
|
+
});
|
|
9136
|
+
if (drift.length > 0 && !allowDrift) return {
|
|
9137
|
+
status: "drift_blocked",
|
|
9138
|
+
direction,
|
|
9139
|
+
drift
|
|
9140
|
+
};
|
|
9141
|
+
const plan = buildMigrationPlan({
|
|
9142
|
+
direction,
|
|
9143
|
+
migrationSet: migrations,
|
|
9144
|
+
appliedState,
|
|
9145
|
+
steps,
|
|
9146
|
+
to
|
|
9147
|
+
});
|
|
9148
|
+
if (plan.migrations.length === 0) return {
|
|
9149
|
+
status: "noop",
|
|
9150
|
+
direction,
|
|
9151
|
+
drift,
|
|
9152
|
+
plan: []
|
|
9153
|
+
};
|
|
9154
|
+
for (const migration of plan.migrations) {
|
|
9155
|
+
const step = getStepForDirection(migration, direction);
|
|
9156
|
+
if (!knownTables.has(step.table)) throw new Error(`Migration '${migration.id}' references unknown table '${step.table}'.`);
|
|
9157
|
+
}
|
|
9158
|
+
if (dryRun) return {
|
|
9159
|
+
status: "dry_run",
|
|
9160
|
+
direction,
|
|
9161
|
+
drift,
|
|
9162
|
+
plan: plan.migrations.map((migration) => migration.id)
|
|
9163
|
+
};
|
|
9164
|
+
const activeRun = await getActiveRun(ctx.db);
|
|
9165
|
+
if (activeRun) return {
|
|
9166
|
+
status: "running",
|
|
9167
|
+
runId: activeRun.runId
|
|
9168
|
+
};
|
|
9169
|
+
const now = Date.now();
|
|
9170
|
+
const runId = createRunId(now);
|
|
9171
|
+
await ctx.db.insert(MIGRATION_RUN_TABLE, {
|
|
9172
|
+
runId,
|
|
9173
|
+
direction,
|
|
9174
|
+
status: "running",
|
|
9175
|
+
dryRun: false,
|
|
9176
|
+
allowDrift,
|
|
9177
|
+
migrationIds: plan.migrations.map((migration) => migration.id),
|
|
9178
|
+
currentIndex: 0,
|
|
9179
|
+
startedAt: now,
|
|
9180
|
+
updatedAt: now,
|
|
9181
|
+
cancelRequested: false
|
|
9182
|
+
});
|
|
9183
|
+
const stateById = new Map(stateRows.map((row) => [row.migrationId, row]));
|
|
9184
|
+
for (const migration of plan.migrations) {
|
|
9185
|
+
const writeMode = getStepForDirection(migration, direction).writeMode ?? "safe_bypass";
|
|
9186
|
+
const existing = stateById.get(migration.id);
|
|
9187
|
+
const resetProgress = restart || existing?.direction !== direction;
|
|
9188
|
+
if (existing) await ctx.db.patch(existing._id, cleanUndefined({
|
|
9189
|
+
checksum: migration.checksum,
|
|
9190
|
+
status: "pending",
|
|
9191
|
+
direction,
|
|
9192
|
+
runId,
|
|
9193
|
+
cursor: resetProgress ? null : existing.cursor ?? null,
|
|
9194
|
+
processed: resetProgress ? 0 : existing.processed ?? 0,
|
|
9195
|
+
startedAt: now,
|
|
9196
|
+
updatedAt: now,
|
|
9197
|
+
completedAt: null,
|
|
9198
|
+
lastError: null,
|
|
9199
|
+
writeMode
|
|
9200
|
+
}));
|
|
9201
|
+
else await ctx.db.insert(MIGRATION_STATE_TABLE, {
|
|
9202
|
+
migrationId: migration.id,
|
|
9203
|
+
checksum: migration.checksum,
|
|
9204
|
+
applied: direction === "down",
|
|
9205
|
+
status: "pending",
|
|
9206
|
+
direction,
|
|
9207
|
+
runId,
|
|
9208
|
+
cursor: null,
|
|
9209
|
+
processed: 0,
|
|
9210
|
+
startedAt: now,
|
|
9211
|
+
updatedAt: now,
|
|
9212
|
+
completedAt: null,
|
|
9213
|
+
lastError: null,
|
|
9214
|
+
writeMode
|
|
9215
|
+
});
|
|
9216
|
+
}
|
|
9217
|
+
if (ctx.scheduler) {
|
|
9218
|
+
const chunkRef = getChunkRef();
|
|
9219
|
+
if (chunkRef) {
|
|
9220
|
+
const chunkArgs = { runId };
|
|
9221
|
+
if (batchSize !== void 0) chunkArgs.batchSize = batchSize;
|
|
9222
|
+
await ctx.scheduler.runAfter(0, chunkRef, chunkArgs);
|
|
9223
|
+
return {
|
|
9224
|
+
status: "running",
|
|
9225
|
+
direction,
|
|
9226
|
+
runId,
|
|
9227
|
+
plan: plan.migrations.map((migration) => migration.id)
|
|
9228
|
+
};
|
|
9229
|
+
}
|
|
9230
|
+
}
|
|
9231
|
+
const inlineResult = await chunk(ctx, {
|
|
9232
|
+
runId,
|
|
9233
|
+
batchSize
|
|
9234
|
+
});
|
|
9235
|
+
return {
|
|
9236
|
+
status: "running",
|
|
9237
|
+
direction,
|
|
9238
|
+
runId,
|
|
9239
|
+
plan: plan.migrations.map((migration) => migration.id),
|
|
9240
|
+
inlineResult
|
|
9241
|
+
};
|
|
9242
|
+
};
|
|
9243
|
+
const chunk = async (ctx, args) => {
|
|
9244
|
+
const runId = parseRequiredString(args.runId, "runId");
|
|
9245
|
+
const batchSize = parseOptionalPositiveInteger(args.batchSize, "batchSize");
|
|
9246
|
+
const runRow = await getRunById(ctx.db, runId);
|
|
9247
|
+
if (!runRow) return {
|
|
9248
|
+
status: "missing",
|
|
9249
|
+
runId
|
|
9250
|
+
};
|
|
9251
|
+
if (runRow.status !== "running") return {
|
|
9252
|
+
status: runRow.status,
|
|
9253
|
+
runId
|
|
9254
|
+
};
|
|
9255
|
+
if (runRow.cancelRequested) {
|
|
9256
|
+
await markRunCanceled(ctx.db, runRow);
|
|
9257
|
+
return {
|
|
9258
|
+
status: "canceled",
|
|
9259
|
+
runId
|
|
9260
|
+
};
|
|
9261
|
+
}
|
|
9262
|
+
const migrationId = runRow.migrationIds[runRow.currentIndex];
|
|
9263
|
+
if (!migrationId) {
|
|
9264
|
+
await markRunCompleted(ctx.db, runRow);
|
|
9265
|
+
return {
|
|
9266
|
+
status: "completed",
|
|
9267
|
+
runId
|
|
9268
|
+
};
|
|
9269
|
+
}
|
|
9270
|
+
const migration = migrations?.byId[migrationId];
|
|
9271
|
+
if (!migration) {
|
|
9272
|
+
await markRunFailed(ctx.db, runRow, `Migration '${migrationId}' is missing from registry.`);
|
|
9273
|
+
return {
|
|
9274
|
+
status: "failed",
|
|
9275
|
+
runId
|
|
9276
|
+
};
|
|
9277
|
+
}
|
|
9278
|
+
const direction = parseDirection(runRow.direction);
|
|
9279
|
+
const step = getStepForDirection(migration, direction);
|
|
9280
|
+
const resolvedBatchSize = batchSize ?? step.batchSize ?? DEFAULT_BATCH_SIZE;
|
|
9281
|
+
const stateRow = await getOrCreateStateRow(ctx.db, migration, direction, step);
|
|
9282
|
+
const cursor = stateRow.cursor ?? null;
|
|
9283
|
+
try {
|
|
9284
|
+
const page = await ctx.db.query(step.table).paginate({
|
|
9285
|
+
cursor,
|
|
9286
|
+
numItems: resolvedBatchSize
|
|
9287
|
+
});
|
|
9288
|
+
const docs = Array.isArray(page?.page) ? page.page : [];
|
|
9289
|
+
const orm = getOrm(ctx);
|
|
9290
|
+
const writeMode = step.writeMode ?? "safe_bypass";
|
|
9291
|
+
let processedInBatch = 0;
|
|
9292
|
+
for (const doc of docs) {
|
|
9293
|
+
const migrationCtxBase = {
|
|
9294
|
+
db: ctx.db,
|
|
9295
|
+
migrationId,
|
|
9296
|
+
runId,
|
|
9297
|
+
direction,
|
|
9298
|
+
dryRun: false,
|
|
9299
|
+
writeMode
|
|
9300
|
+
};
|
|
9301
|
+
const result = await runWithWriteMode(orm, writeMode, (resolvedOrm) => step.migrateOne({
|
|
9302
|
+
...migrationCtxBase,
|
|
9303
|
+
orm: resolvedOrm
|
|
9304
|
+
}, doc));
|
|
9305
|
+
if (isPatchPayload(result) && hasDocId(doc)) await ctx.db.patch(doc._id, result);
|
|
9306
|
+
processedInBatch += 1;
|
|
9307
|
+
}
|
|
9308
|
+
const now = Date.now();
|
|
9309
|
+
const isDone = Boolean(page?.isDone);
|
|
9310
|
+
const nextCursor = isDone ? null : page?.continueCursor ?? null;
|
|
9311
|
+
const nextProcessed = (stateRow.processed ?? 0) + processedInBatch;
|
|
9312
|
+
if (isDone) {
|
|
9313
|
+
await ctx.db.patch(stateRow._id, cleanUndefined({
|
|
9314
|
+
status: "completed",
|
|
9315
|
+
applied: direction === "up",
|
|
9316
|
+
cursor: null,
|
|
9317
|
+
processed: nextProcessed,
|
|
9318
|
+
completedAt: now,
|
|
9319
|
+
updatedAt: now,
|
|
9320
|
+
lastError: null,
|
|
9321
|
+
runId,
|
|
9322
|
+
direction,
|
|
9323
|
+
writeMode
|
|
9324
|
+
}));
|
|
9325
|
+
const nextIndex = runRow.currentIndex + 1;
|
|
9326
|
+
const done = nextIndex >= runRow.migrationIds.length;
|
|
9327
|
+
await ctx.db.patch(runRow._id, cleanUndefined({
|
|
9328
|
+
currentIndex: nextIndex,
|
|
9329
|
+
status: done ? "completed" : "running",
|
|
9330
|
+
updatedAt: now,
|
|
9331
|
+
completedAt: done ? now : null,
|
|
9332
|
+
lastError: null
|
|
9333
|
+
}));
|
|
9334
|
+
if (!done && ctx.scheduler) {
|
|
9335
|
+
const chunkRef = getChunkRef();
|
|
9336
|
+
if (chunkRef) await ctx.scheduler.runAfter(0, chunkRef, {
|
|
9337
|
+
runId,
|
|
9338
|
+
batchSize: resolvedBatchSize
|
|
9339
|
+
});
|
|
9340
|
+
}
|
|
9341
|
+
return {
|
|
9342
|
+
status: done ? "completed" : "running",
|
|
9343
|
+
runId,
|
|
9344
|
+
migrationId,
|
|
9345
|
+
processedInBatch,
|
|
9346
|
+
processed: nextProcessed,
|
|
9347
|
+
currentIndex: nextIndex,
|
|
9348
|
+
total: runRow.migrationIds.length
|
|
9349
|
+
};
|
|
9350
|
+
}
|
|
9351
|
+
await ctx.db.patch(stateRow._id, cleanUndefined({
|
|
9352
|
+
status: "running",
|
|
9353
|
+
cursor: nextCursor,
|
|
9354
|
+
processed: nextProcessed,
|
|
9355
|
+
updatedAt: now,
|
|
9356
|
+
runId,
|
|
9357
|
+
direction,
|
|
9358
|
+
writeMode
|
|
9359
|
+
}));
|
|
9360
|
+
await ctx.db.patch(runRow._id, cleanUndefined({
|
|
9361
|
+
status: "running",
|
|
9362
|
+
updatedAt: now
|
|
9363
|
+
}));
|
|
9364
|
+
if (ctx.scheduler) {
|
|
9365
|
+
const chunkRef = getChunkRef();
|
|
9366
|
+
if (chunkRef) await ctx.scheduler.runAfter(0, chunkRef, {
|
|
9367
|
+
runId,
|
|
9368
|
+
batchSize: resolvedBatchSize
|
|
9369
|
+
});
|
|
9370
|
+
}
|
|
9371
|
+
return {
|
|
9372
|
+
status: "running",
|
|
9373
|
+
runId,
|
|
9374
|
+
migrationId,
|
|
9375
|
+
processedInBatch,
|
|
9376
|
+
processed: nextProcessed,
|
|
9377
|
+
cursor: nextCursor
|
|
9378
|
+
};
|
|
9379
|
+
} catch (error) {
|
|
9380
|
+
const message = error.message || String(error);
|
|
9381
|
+
await ctx.db.patch(stateRow._id, cleanUndefined({
|
|
9382
|
+
status: "failed",
|
|
9383
|
+
lastError: message,
|
|
9384
|
+
updatedAt: Date.now()
|
|
9385
|
+
}));
|
|
9386
|
+
await markRunFailed(ctx.db, runRow, message);
|
|
9387
|
+
return {
|
|
9388
|
+
status: "failed",
|
|
9389
|
+
runId,
|
|
9390
|
+
migrationId,
|
|
9391
|
+
error: message
|
|
9392
|
+
};
|
|
9393
|
+
}
|
|
9394
|
+
};
|
|
9395
|
+
const status = async (ctx, args = {}) => {
|
|
9396
|
+
if (!migrations) return {
|
|
9397
|
+
status: "noop",
|
|
9398
|
+
reason: "no_migrations_registered"
|
|
9399
|
+
};
|
|
9400
|
+
const limit = parseOptionalPositiveInteger(args.limit, "limit") ?? 25;
|
|
9401
|
+
const runId = parseOptionalString(args.runId, "runId");
|
|
9402
|
+
const stateRows = await getAllStateRows(ctx.db);
|
|
9403
|
+
const sortedRuns = [...await getAllRunRows(ctx.db)].sort((left, right) => right.startedAt - left.startedAt);
|
|
9404
|
+
const selectedRuns = runId ? sortedRuns.filter((entry) => entry.runId === runId).slice(0, 1) : sortedRuns.slice(0, limit);
|
|
9405
|
+
const activeRun = sortedRuns.find((entry) => entry.status === "running") ?? null;
|
|
9406
|
+
const appliedState = toAppliedStateMap(stateRows);
|
|
9407
|
+
const drift = detectMigrationDrift({
|
|
9408
|
+
migrationSet: migrations,
|
|
9409
|
+
appliedState
|
|
9410
|
+
});
|
|
9411
|
+
const pendingUp = buildMigrationPlan({
|
|
9412
|
+
direction: "up",
|
|
9413
|
+
migrationSet: migrations,
|
|
9414
|
+
appliedState
|
|
9415
|
+
});
|
|
9416
|
+
return {
|
|
9417
|
+
status: activeRun ? "running" : "idle",
|
|
9418
|
+
activeRun,
|
|
9419
|
+
runs: selectedRuns,
|
|
9420
|
+
migrations: stateRows.map((row) => ({
|
|
9421
|
+
migrationId: row.migrationId,
|
|
9422
|
+
checksum: row.checksum,
|
|
9423
|
+
applied: row.applied,
|
|
9424
|
+
status: row.status,
|
|
9425
|
+
direction: row.direction ?? null,
|
|
9426
|
+
runId: row.runId ?? null,
|
|
9427
|
+
cursor: row.cursor ?? null,
|
|
9428
|
+
processed: row.processed,
|
|
9429
|
+
updatedAt: row.updatedAt,
|
|
9430
|
+
startedAt: row.startedAt ?? null,
|
|
9431
|
+
completedAt: row.completedAt ?? null,
|
|
9432
|
+
lastError: row.lastError ?? null,
|
|
9433
|
+
writeMode: row.writeMode
|
|
9434
|
+
})),
|
|
9435
|
+
pending: pendingUp.migrations.map((migration) => migration.id),
|
|
9436
|
+
drift
|
|
9437
|
+
};
|
|
9438
|
+
};
|
|
9439
|
+
const cancel = async (ctx, args = {}) => {
|
|
9440
|
+
if (!migrations) return {
|
|
9441
|
+
status: "noop",
|
|
9442
|
+
reason: "no_migrations_registered"
|
|
9443
|
+
};
|
|
9444
|
+
const runId = parseOptionalString(args.runId, "runId");
|
|
9445
|
+
const runRow = runId ? await getRunById(ctx.db, runId) : await getActiveRun(ctx.db);
|
|
9446
|
+
if (!runRow) return {
|
|
9447
|
+
status: "noop",
|
|
9448
|
+
reason: "no_active_run"
|
|
9449
|
+
};
|
|
9450
|
+
if (runRow.status !== "running") return {
|
|
9451
|
+
status: "noop",
|
|
9452
|
+
reason: "run_not_running",
|
|
9453
|
+
runId: runRow.runId,
|
|
9454
|
+
runStatus: runRow.status
|
|
9455
|
+
};
|
|
9456
|
+
const now = Date.now();
|
|
9457
|
+
await ctx.db.patch(runRow._id, cleanUndefined({
|
|
9458
|
+
cancelRequested: true,
|
|
9459
|
+
updatedAt: now
|
|
9460
|
+
}));
|
|
9461
|
+
if (!(ctx.scheduler ? getChunkRef() : void 0)) {
|
|
9462
|
+
await markRunCanceled(ctx.db, runRow);
|
|
9463
|
+
return {
|
|
9464
|
+
status: "canceled",
|
|
9465
|
+
runId: runRow.runId
|
|
9466
|
+
};
|
|
9467
|
+
}
|
|
9468
|
+
return {
|
|
9469
|
+
status: "cancel_requested",
|
|
9470
|
+
runId: runRow.runId
|
|
9471
|
+
};
|
|
9472
|
+
};
|
|
9473
|
+
return {
|
|
9474
|
+
run,
|
|
9475
|
+
chunk,
|
|
9476
|
+
status,
|
|
9477
|
+
cancel
|
|
9478
|
+
};
|
|
9479
|
+
}
|
|
9480
|
+
function getStepForDirection(migration, direction) {
|
|
9481
|
+
if (direction === "up") return migration.up;
|
|
9482
|
+
if (!migration.down) throw new Error(`Migration '${migration.id}' is missing down migration.`);
|
|
9483
|
+
return migration.down;
|
|
9484
|
+
}
|
|
9485
|
+
function createRunId(now) {
|
|
9486
|
+
return `mr_${now}_${Math.random().toString(36).slice(2, 10)}`;
|
|
9487
|
+
}
|
|
9488
|
+
function parseDirection(value) {
|
|
9489
|
+
if (value === void 0 || value === null) return "up";
|
|
9490
|
+
if (value === "up" || value === "down") return value;
|
|
9491
|
+
throw new Error("Migration direction must be either 'up' or 'down'.");
|
|
9492
|
+
}
|
|
9493
|
+
function parseOptionalPositiveInteger(value, fieldName) {
|
|
9494
|
+
if (value === void 0 || value === null) return;
|
|
9495
|
+
if (typeof value !== "number" || !Number.isInteger(value) || value < 1) throw new Error(`Migration ${fieldName} must be a positive integer.`);
|
|
9496
|
+
return value;
|
|
9497
|
+
}
|
|
9498
|
+
function parseOptionalString(value, fieldName) {
|
|
9499
|
+
if (value === void 0 || value === null) return;
|
|
9500
|
+
if (typeof value !== "string" || value.length === 0) throw new Error(`Migration ${fieldName} must be a non-empty string.`);
|
|
9501
|
+
return value;
|
|
9502
|
+
}
|
|
9503
|
+
function parseRequiredString(value, fieldName) {
|
|
9504
|
+
const parsed = parseOptionalString(value, fieldName);
|
|
9505
|
+
if (!parsed) throw new Error(`Migration ${fieldName} is required.`);
|
|
9506
|
+
return parsed;
|
|
9507
|
+
}
|
|
9508
|
+
function toAppliedStateMap(stateRows) {
|
|
9509
|
+
const entries = {};
|
|
9510
|
+
for (const row of stateRows) entries[row.migrationId] = {
|
|
9511
|
+
applied: row.applied,
|
|
9512
|
+
checksum: row.checksum,
|
|
9513
|
+
cursor: row.cursor ?? null,
|
|
9514
|
+
processed: row.processed
|
|
9515
|
+
};
|
|
9516
|
+
return entries;
|
|
9517
|
+
}
|
|
9518
|
+
async function getAllStateRows(db) {
|
|
9519
|
+
return await db.query(MIGRATION_STATE_TABLE).collect();
|
|
9520
|
+
}
|
|
9521
|
+
async function getAllRunRows(db) {
|
|
9522
|
+
return await db.query(MIGRATION_RUN_TABLE).collect();
|
|
9523
|
+
}
|
|
9524
|
+
async function getRunById(db, runId) {
|
|
9525
|
+
return await db.query(MIGRATION_RUN_TABLE).withIndex("by_run_id", (query) => query.eq("runId", runId)).first() ?? null;
|
|
9526
|
+
}
|
|
9527
|
+
async function getActiveRun(db) {
|
|
9528
|
+
return await db.query(MIGRATION_RUN_TABLE).withIndex("by_status", (query) => query.eq("status", "running")).first() ?? null;
|
|
9529
|
+
}
|
|
9530
|
+
async function getOrCreateStateRow(db, migration, direction, step) {
|
|
9531
|
+
const existing = await db.query(MIGRATION_STATE_TABLE).withIndex("by_migration_id", (query) => query.eq("migrationId", migration.id)).first();
|
|
9532
|
+
if (existing) return existing;
|
|
9533
|
+
const now = Date.now();
|
|
9534
|
+
const stateId = await db.insert(MIGRATION_STATE_TABLE, {
|
|
9535
|
+
migrationId: migration.id,
|
|
9536
|
+
checksum: migration.checksum,
|
|
9537
|
+
applied: direction === "down",
|
|
9538
|
+
status: "pending",
|
|
9539
|
+
direction,
|
|
9540
|
+
runId: null,
|
|
9541
|
+
cursor: null,
|
|
9542
|
+
processed: 0,
|
|
9543
|
+
startedAt: now,
|
|
9544
|
+
updatedAt: now,
|
|
9545
|
+
completedAt: null,
|
|
9546
|
+
lastError: null,
|
|
9547
|
+
writeMode: step.writeMode ?? "safe_bypass"
|
|
9548
|
+
});
|
|
9549
|
+
const created = await db.get(stateId);
|
|
9550
|
+
if (!created) throw new Error(`Failed to create migration state row for '${migration.id}'.`);
|
|
9551
|
+
return created;
|
|
9552
|
+
}
|
|
9553
|
+
async function markRunCompleted(db, runRow) {
|
|
9554
|
+
const now = Date.now();
|
|
9555
|
+
await db.patch(runRow._id, cleanUndefined({
|
|
9556
|
+
status: "completed",
|
|
9557
|
+
updatedAt: now,
|
|
9558
|
+
completedAt: now,
|
|
9559
|
+
cancelRequested: false,
|
|
9560
|
+
lastError: null
|
|
9561
|
+
}));
|
|
9562
|
+
}
|
|
9563
|
+
async function markRunCanceled(db, runRow) {
|
|
9564
|
+
const now = Date.now();
|
|
9565
|
+
await db.patch(runRow._id, cleanUndefined({
|
|
9566
|
+
status: "canceled",
|
|
9567
|
+
updatedAt: now,
|
|
9568
|
+
completedAt: now,
|
|
9569
|
+
cancelRequested: true
|
|
9570
|
+
}));
|
|
9571
|
+
for (const stateRow of await getAllStateRows(db)) if (stateRow.runId === runRow.runId && (stateRow.status === "running" || stateRow.status === "pending")) await db.patch(stateRow._id, cleanUndefined({
|
|
9572
|
+
status: "canceled",
|
|
9573
|
+
updatedAt: now
|
|
9574
|
+
}));
|
|
9575
|
+
}
|
|
9576
|
+
async function markRunFailed(db, runRow, message) {
|
|
9577
|
+
const now = Date.now();
|
|
9578
|
+
await db.patch(runRow._id, cleanUndefined({
|
|
9579
|
+
status: "failed",
|
|
9580
|
+
updatedAt: now,
|
|
9581
|
+
completedAt: now,
|
|
9582
|
+
lastError: message
|
|
9583
|
+
}));
|
|
9584
|
+
}
|
|
9585
|
+
async function runWithWriteMode(orm, writeMode, callback) {
|
|
9586
|
+
if (writeMode === "normal") return await callback(orm);
|
|
9587
|
+
return await orm.skipRules.withoutTriggers(async (noTriggersOrm) => {
|
|
9588
|
+
return await callback(noTriggersOrm);
|
|
9589
|
+
});
|
|
9590
|
+
}
|
|
9591
|
+
function isPatchPayload(value) {
|
|
9592
|
+
return Boolean(value && typeof value === "object" && !Array.isArray(value));
|
|
9593
|
+
}
|
|
9594
|
+
function hasDocId(doc) {
|
|
9595
|
+
return "_id" in doc && doc._id !== void 0 && doc._id !== null;
|
|
9596
|
+
}
|
|
9597
|
+
function cleanUndefined(value) {
|
|
9598
|
+
return Object.fromEntries(Object.entries(value).filter(([, candidate]) => candidate !== void 0));
|
|
9599
|
+
}
|
|
9600
|
+
|
|
8927
9601
|
//#endregion
|
|
8928
9602
|
//#region src/orm/scheduled-delete.ts
|
|
8929
9603
|
function scheduledDeleteFactory(schema, edgeMetadata, scheduledMutationBatch) {
|
|
@@ -9093,6 +9767,10 @@ function scheduledMutationBatchFactory(schema, edgeMetadata, scheduledMutationBa
|
|
|
9093
9767
|
|
|
9094
9768
|
//#endregion
|
|
9095
9769
|
//#region src/orm/create-orm.ts
|
|
9770
|
+
const RESET_INTERNAL_TABLE_NAMES = [...AGGREGATE_STORAGE_TABLE_NAMES, ...MIGRATION_STORAGE_TABLE_NAMES];
|
|
9771
|
+
function getResetTableNames(schema) {
|
|
9772
|
+
return [...new Set([...Object.values(schema).map((tableConfig) => tableConfig.name), ...RESET_INTERNAL_TABLE_NAMES])];
|
|
9773
|
+
}
|
|
9096
9774
|
function isOrmCtx(source) {
|
|
9097
9775
|
return !!source && typeof source === "object" && "db" in source;
|
|
9098
9776
|
}
|
|
@@ -9143,13 +9821,25 @@ function createOrm(config) {
|
|
|
9143
9821
|
with: withContext,
|
|
9144
9822
|
api: () => {
|
|
9145
9823
|
let aggregateBackfillChunkRef = config.ormFunctions.aggregateBackfillChunk;
|
|
9824
|
+
let migrationRunChunkRef = config.ormFunctions.migrationRunChunk;
|
|
9146
9825
|
let resetChunkRef = config.ormFunctions.resetChunk;
|
|
9147
9826
|
const countBackfillHandlers = createCountBackfillHandlers(config.schema, () => aggregateBackfillChunkRef);
|
|
9827
|
+
const migrationHandlers = createMigrationHandlers({
|
|
9828
|
+
schema: config.schema,
|
|
9829
|
+
migrations: config.migrations,
|
|
9830
|
+
getOrm: (ctx) => db(ctx),
|
|
9831
|
+
getChunkRef: () => migrationRunChunkRef
|
|
9832
|
+
});
|
|
9148
9833
|
const aggregateBackfillChunk = mutationBuilder({
|
|
9149
9834
|
args: v.any(),
|
|
9150
9835
|
handler: countBackfillHandlers.chunk
|
|
9151
9836
|
});
|
|
9152
9837
|
if (!aggregateBackfillChunkRef) aggregateBackfillChunkRef = aggregateBackfillChunk;
|
|
9838
|
+
const migrationRunChunk = mutationBuilder({
|
|
9839
|
+
args: v.any(),
|
|
9840
|
+
handler: migrationHandlers.chunk
|
|
9841
|
+
});
|
|
9842
|
+
if (!migrationRunChunkRef) migrationRunChunkRef = migrationRunChunk;
|
|
9153
9843
|
const resetChunk = mutationBuilder({
|
|
9154
9844
|
args: v.object({
|
|
9155
9845
|
tableName: v.string(),
|
|
@@ -9193,11 +9883,24 @@ function createOrm(config) {
|
|
|
9193
9883
|
args: v.any(),
|
|
9194
9884
|
handler: countBackfillHandlers.status
|
|
9195
9885
|
}),
|
|
9886
|
+
migrationRun: mutationBuilder({
|
|
9887
|
+
args: v.any(),
|
|
9888
|
+
handler: migrationHandlers.run
|
|
9889
|
+
}),
|
|
9890
|
+
migrationRunChunk,
|
|
9891
|
+
migrationStatus: mutationBuilder({
|
|
9892
|
+
args: v.any(),
|
|
9893
|
+
handler: migrationHandlers.status
|
|
9894
|
+
}),
|
|
9895
|
+
migrationCancel: mutationBuilder({
|
|
9896
|
+
args: v.any(),
|
|
9897
|
+
handler: migrationHandlers.cancel
|
|
9898
|
+
}),
|
|
9196
9899
|
resetChunk,
|
|
9197
9900
|
reset: internalActionGeneric({
|
|
9198
9901
|
args: v.any(),
|
|
9199
9902
|
handler: async (ctx) => {
|
|
9200
|
-
const tableNames =
|
|
9903
|
+
const tableNames = getResetTableNames(config.schema);
|
|
9201
9904
|
let deleted = 0;
|
|
9202
9905
|
for (const tableName of tableNames) {
|
|
9203
9906
|
let cursor = null;
|
|
@@ -9589,7 +10292,7 @@ const normalizeDefaults = (defaults) => {
|
|
|
9589
10292
|
function defineSchema(schema, options) {
|
|
9590
10293
|
const strict = options?.strict ?? true;
|
|
9591
10294
|
const defaults = normalizeDefaults(options?.defaults);
|
|
9592
|
-
const schemaWithInternals = injectAggregateStorageTables(schema);
|
|
10295
|
+
const schemaWithInternals = injectMigrationStorageTables(injectAggregateStorageTables(schema));
|
|
9593
10296
|
Object.defineProperty(schema, OrmSchemaOptions, {
|
|
9594
10297
|
value: {
|
|
9595
10298
|
strict,
|
|
@@ -9629,4 +10332,4 @@ function defineSchema(schema, options) {
|
|
|
9629
10332
|
}
|
|
9630
10333
|
|
|
9631
10334
|
//#endregion
|
|
9632
|
-
export { Brand, Columns, OrmNotFoundError, RlsPolicy, RlsRole, TableName, aggregateIndex, and, asc, between, bigint, boolean, bytes, check, contains, convexTable, createOrm, custom, date, defineRelations, defineRelationsPart, defineSchema, defineTriggers, deletion, desc, endsWith, eq, extractRelationsConfig, fieldRef, foreignKey, getByIdWithOrmQueryFallback, getTableColumns, getTableConfig, gt, gte, id, ilike, inArray, index, integer, isFieldReference, isNotNull, isNull, json, like, lt, lte, ne, not, notBetween, notInArray, or, rankIndex, rlsPolicy, rlsRole, scheduledDeleteFactory, scheduledMutationBatchFactory, searchIndex, startsWith, text, textEnum, timestamp, unique, uniqueIndex, unsetToken, vector, vectorIndex };
|
|
10335
|
+
export { Brand, Columns, OrmNotFoundError, RlsPolicy, RlsRole, TableName, aggregateIndex, and, asc, between, bigint, boolean, buildMigrationPlan, bytes, check, contains, convexTable, createOrm, custom, date, defineMigration, defineMigrationSet, defineRelations, defineRelationsPart, defineSchema, defineTriggers, deletion, deprecated, desc, detectMigrationDrift, endsWith, eq, extractRelationsConfig, fieldRef, foreignKey, getByIdWithOrmQueryFallback, getTableColumns, getTableConfig, gt, gte, id, ilike, inArray, index, integer, isFieldReference, isNotNull, isNull, json, like, lt, lte, ne, not, notBetween, notInArray, or, pretend, pretendRequired, rankIndex, rlsPolicy, rlsRole, scheduledDeleteFactory, scheduledMutationBatchFactory, searchIndex, startsWith, text, textEnum, timestamp, unique, uniqueIndex, unsetToken, vector, vectorIndex };
|