convex-verify 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +403 -0
- package/dist/configs/index.d.mts +51 -0
- package/dist/configs/index.d.ts +51 -0
- package/dist/configs/index.js +38 -0
- package/dist/configs/index.js.map +1 -0
- package/dist/configs/index.mjs +11 -0
- package/dist/configs/index.mjs.map +1 -0
- package/dist/core/index.d.mts +58 -0
- package/dist/core/index.d.ts +58 -0
- package/dist/core/index.js +144 -0
- package/dist/core/index.js.map +1 -0
- package/dist/core/index.mjs +113 -0
- package/dist/core/index.mjs.map +1 -0
- package/dist/index.d.mts +9 -0
- package/dist/index.d.ts +9 -0
- package/dist/index.js +442 -0
- package/dist/index.js.map +1 -0
- package/dist/index.mjs +404 -0
- package/dist/index.mjs.map +1 -0
- package/dist/plugin-BjJ7yjrc.d.ts +141 -0
- package/dist/plugin-mHMV2-SG.d.mts +141 -0
- package/dist/plugins/index.d.mts +85 -0
- package/dist/plugins/index.d.ts +85 -0
- package/dist/plugins/index.js +317 -0
- package/dist/plugins/index.js.map +1 -0
- package/dist/plugins/index.mjs +289 -0
- package/dist/plugins/index.mjs.map +1 -0
- package/dist/transforms/index.d.mts +38 -0
- package/dist/transforms/index.d.ts +38 -0
- package/dist/transforms/index.js +46 -0
- package/dist/transforms/index.js.map +1 -0
- package/dist/transforms/index.mjs +19 -0
- package/dist/transforms/index.mjs.map +1 -0
- package/dist/types-_64SXyva.d.mts +151 -0
- package/dist/types-_64SXyva.d.ts +151 -0
- package/dist/utils/index.d.mts +36 -0
- package/dist/utils/index.d.ts +36 -0
- package/dist/utils/index.js +113 -0
- package/dist/utils/index.js.map +1 -0
- package/dist/utils/index.mjs +83 -0
- package/dist/utils/index.mjs.map +1 -0
- package/package.json +75 -0
package/dist/index.js
ADDED
|
@@ -0,0 +1,442 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __defProp = Object.defineProperty;
|
|
3
|
+
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
4
|
+
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
5
|
+
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
|
6
|
+
var __export = (target, all) => {
|
|
7
|
+
for (var name in all)
|
|
8
|
+
__defProp(target, name, { get: all[name], enumerable: true });
|
|
9
|
+
};
|
|
10
|
+
var __copyProps = (to, from, except, desc) => {
|
|
11
|
+
if (from && typeof from === "object" || typeof from === "function") {
|
|
12
|
+
for (let key of __getOwnPropNames(from))
|
|
13
|
+
if (!__hasOwnProp.call(to, key) && key !== except)
|
|
14
|
+
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
|
15
|
+
}
|
|
16
|
+
return to;
|
|
17
|
+
};
|
|
18
|
+
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
|
19
|
+
|
|
20
|
+
// src/index.ts
|
|
21
|
+
var src_exports = {};
|
|
22
|
+
__export(src_exports, {
|
|
23
|
+
constructColumnData: () => constructColumnData,
|
|
24
|
+
constructIndexData: () => constructIndexData,
|
|
25
|
+
createValidatePlugin: () => createValidatePlugin,
|
|
26
|
+
defaultValuesConfig: () => defaultValuesConfig,
|
|
27
|
+
getTableIndexes: () => getTableIndexes,
|
|
28
|
+
isValidatePlugin: () => isValidatePlugin,
|
|
29
|
+
normalizeIndexConfigEntry: () => normalizeIndexConfigEntry,
|
|
30
|
+
protectedColumnsConfig: () => protectedColumnsConfig,
|
|
31
|
+
runValidatePlugins: () => runValidatePlugins,
|
|
32
|
+
uniqueColumnConfig: () => uniqueColumnConfig,
|
|
33
|
+
uniqueRowConfig: () => uniqueRowConfig,
|
|
34
|
+
verifyConfig: () => verifyConfig
|
|
35
|
+
});
|
|
36
|
+
module.exports = __toCommonJS(src_exports);
|
|
37
|
+
|
|
38
|
+
// src/core/plugin.ts
|
|
39
|
+
function isValidatePlugin(obj) {
|
|
40
|
+
return typeof obj === "object" && obj !== null && "_type" in obj && typeof obj._type === "string" && "verify" in obj && typeof obj.verify === "object";
|
|
41
|
+
}
|
|
42
|
+
async function runValidatePlugins(plugins, context, data) {
|
|
43
|
+
let result = data;
|
|
44
|
+
for (const plugin of plugins) {
|
|
45
|
+
const verifyFn = context.operation === "insert" ? plugin.verify.insert : plugin.verify.patch;
|
|
46
|
+
if (verifyFn) {
|
|
47
|
+
result = await verifyFn(context, result);
|
|
48
|
+
}
|
|
49
|
+
}
|
|
50
|
+
return result;
|
|
51
|
+
}
|
|
52
|
+
function createValidatePlugin(type, config, verify) {
|
|
53
|
+
return {
|
|
54
|
+
_type: type,
|
|
55
|
+
config,
|
|
56
|
+
verify
|
|
57
|
+
};
|
|
58
|
+
}
|
|
59
|
+
|
|
60
|
+
// src/core/verifyConfig.ts
|
|
61
|
+
var verifyConfig = (_schema, configs) => {
|
|
62
|
+
const validatePlugins = configs.plugins ?? [];
|
|
63
|
+
const insert = async (ctx, tableName, data, options) => {
|
|
64
|
+
let verifiedData = data;
|
|
65
|
+
if (configs.defaultValues) {
|
|
66
|
+
verifiedData = await configs.defaultValues.verify(tableName, verifiedData);
|
|
67
|
+
}
|
|
68
|
+
if (validatePlugins.length > 0) {
|
|
69
|
+
verifiedData = await runValidatePlugins(
|
|
70
|
+
validatePlugins,
|
|
71
|
+
{
|
|
72
|
+
ctx,
|
|
73
|
+
tableName,
|
|
74
|
+
operation: "insert",
|
|
75
|
+
onFail: options?.onFail,
|
|
76
|
+
schema: _schema
|
|
77
|
+
},
|
|
78
|
+
verifiedData
|
|
79
|
+
);
|
|
80
|
+
}
|
|
81
|
+
return await ctx.db.insert(tableName, verifiedData);
|
|
82
|
+
};
|
|
83
|
+
const patch = async (ctx, tableName, id, data, options) => {
|
|
84
|
+
let verifiedData = data;
|
|
85
|
+
if (validatePlugins.length > 0) {
|
|
86
|
+
verifiedData = await runValidatePlugins(
|
|
87
|
+
validatePlugins,
|
|
88
|
+
{
|
|
89
|
+
ctx,
|
|
90
|
+
tableName,
|
|
91
|
+
operation: "patch",
|
|
92
|
+
patchId: id,
|
|
93
|
+
onFail: options?.onFail,
|
|
94
|
+
schema: _schema
|
|
95
|
+
},
|
|
96
|
+
verifiedData
|
|
97
|
+
);
|
|
98
|
+
}
|
|
99
|
+
await ctx.db.patch(id, verifiedData);
|
|
100
|
+
};
|
|
101
|
+
const dangerouslyPatch = async (ctx, tableName, id, data, options) => {
|
|
102
|
+
let verifiedData = data;
|
|
103
|
+
if (validatePlugins.length > 0) {
|
|
104
|
+
verifiedData = await runValidatePlugins(
|
|
105
|
+
validatePlugins,
|
|
106
|
+
{
|
|
107
|
+
ctx,
|
|
108
|
+
tableName,
|
|
109
|
+
operation: "patch",
|
|
110
|
+
patchId: id,
|
|
111
|
+
onFail: options?.onFail,
|
|
112
|
+
schema: _schema
|
|
113
|
+
},
|
|
114
|
+
verifiedData
|
|
115
|
+
);
|
|
116
|
+
}
|
|
117
|
+
await ctx.db.patch(id, verifiedData);
|
|
118
|
+
};
|
|
119
|
+
return {
|
|
120
|
+
insert,
|
|
121
|
+
patch,
|
|
122
|
+
dangerouslyPatch,
|
|
123
|
+
// Expose configs for debugging/advanced usage
|
|
124
|
+
configs
|
|
125
|
+
};
|
|
126
|
+
};
|
|
127
|
+
|
|
128
|
+
// src/core/types.ts
|
|
129
|
+
function normalizeIndexConfigEntry(entry, defaultIdentifiers = ["_id"]) {
|
|
130
|
+
if (typeof entry === "string") {
|
|
131
|
+
return {
|
|
132
|
+
index: entry,
|
|
133
|
+
identifiers: defaultIdentifiers
|
|
134
|
+
};
|
|
135
|
+
}
|
|
136
|
+
const { index, identifiers, ...rest } = entry;
|
|
137
|
+
return {
|
|
138
|
+
index: String(index),
|
|
139
|
+
identifiers: identifiers?.map(String) ?? defaultIdentifiers,
|
|
140
|
+
...rest
|
|
141
|
+
};
|
|
142
|
+
}
|
|
143
|
+
|
|
144
|
+
// src/transforms/defaultValuesConfig.ts
|
|
145
|
+
var defaultValuesConfig = (_schema, config) => {
|
|
146
|
+
const verify = async (tableName, data) => {
|
|
147
|
+
const resolvedConfig = typeof config === "function" ? await config() : config;
|
|
148
|
+
return {
|
|
149
|
+
...resolvedConfig[tableName],
|
|
150
|
+
...data
|
|
151
|
+
};
|
|
152
|
+
};
|
|
153
|
+
return {
|
|
154
|
+
_type: "defaultValues",
|
|
155
|
+
verify,
|
|
156
|
+
config
|
|
157
|
+
};
|
|
158
|
+
};
|
|
159
|
+
|
|
160
|
+
// src/configs/protectedColumnsConfig.ts
|
|
161
|
+
var protectedColumnsConfig = (_schema, config) => {
|
|
162
|
+
return {
|
|
163
|
+
_type: "protectedColumns",
|
|
164
|
+
config
|
|
165
|
+
};
|
|
166
|
+
};
|
|
167
|
+
|
|
168
|
+
// src/plugins/uniqueRowConfig.ts
|
|
169
|
+
var import_values = require("convex/values");
|
|
170
|
+
|
|
171
|
+
// src/utils/helpers.ts
|
|
172
|
+
var getTableIndexes = (schema, tableName) => {
|
|
173
|
+
return schema.tables[tableName][" indexes"]();
|
|
174
|
+
};
|
|
175
|
+
var constructColumnData = (fields, data, {
|
|
176
|
+
allowNullishValue = false,
|
|
177
|
+
allOrNothing = true
|
|
178
|
+
}) => {
|
|
179
|
+
const lengthOfFields = fields.length;
|
|
180
|
+
const columnData = fields.map((_, index) => {
|
|
181
|
+
const column = fields?.[index];
|
|
182
|
+
const value = data?.[column];
|
|
183
|
+
if (!column || !allowNullishValue && !value) {
|
|
184
|
+
return;
|
|
185
|
+
}
|
|
186
|
+
return {
|
|
187
|
+
column,
|
|
188
|
+
value
|
|
189
|
+
};
|
|
190
|
+
}).filter((e) => !!e);
|
|
191
|
+
if (allOrNothing && columnData.length !== lengthOfFields) {
|
|
192
|
+
console.warn(
|
|
193
|
+
"The index was NOT supplied with the same amount data as there was fields. This warning only appears when setting `allOrNothing` to `true`.",
|
|
194
|
+
"`fields: `",
|
|
195
|
+
fields,
|
|
196
|
+
"`columnData: `",
|
|
197
|
+
columnData
|
|
198
|
+
);
|
|
199
|
+
return null;
|
|
200
|
+
}
|
|
201
|
+
return columnData.length > 0 ? columnData : null;
|
|
202
|
+
};
|
|
203
|
+
var constructIndexData = (schema, tableName, indexConfig) => {
|
|
204
|
+
if (!indexConfig) {
|
|
205
|
+
return;
|
|
206
|
+
}
|
|
207
|
+
const tableConfig = indexConfig?.[tableName];
|
|
208
|
+
if (!tableConfig) {
|
|
209
|
+
return;
|
|
210
|
+
}
|
|
211
|
+
return tableConfig.map((entry) => {
|
|
212
|
+
const normalized = normalizeIndexConfigEntry(entry);
|
|
213
|
+
const { index, identifiers, ...rest } = normalized;
|
|
214
|
+
const fields = getTableIndexes(schema, tableName).find(
|
|
215
|
+
(i) => i.indexDescriptor == index
|
|
216
|
+
)?.fields;
|
|
217
|
+
if (!fields) {
|
|
218
|
+
throw new Error(`Error in 'constructIndexData()'. No fields found for index: [${index}]`);
|
|
219
|
+
}
|
|
220
|
+
const identifierMap = new Map(
|
|
221
|
+
[...identifiers, "_id"].map((i) => [String(i), String(i)])
|
|
222
|
+
);
|
|
223
|
+
return {
|
|
224
|
+
name: index,
|
|
225
|
+
fields,
|
|
226
|
+
identifiers: Array.from(identifierMap.values()),
|
|
227
|
+
...rest
|
|
228
|
+
};
|
|
229
|
+
});
|
|
230
|
+
};
|
|
231
|
+
|
|
232
|
+
// src/plugins/uniqueRowConfig.ts
|
|
233
|
+
var uniqueRowConfig = (schema, config) => {
|
|
234
|
+
const uniqueRowError = (message) => {
|
|
235
|
+
throw new import_values.ConvexError({
|
|
236
|
+
message,
|
|
237
|
+
code: "UNIQUE_ROW_VERIFICATION_ERROR"
|
|
238
|
+
});
|
|
239
|
+
};
|
|
240
|
+
const verifyUniqueness = async (context, data, tableName) => {
|
|
241
|
+
const { ctx, operation, patchId, onFail } = context;
|
|
242
|
+
const indexesData = constructIndexData(schema, tableName, config);
|
|
243
|
+
if (!indexesData && !!config[tableName]) {
|
|
244
|
+
uniqueRowError(`Index data was not found where there should have been.`);
|
|
245
|
+
}
|
|
246
|
+
if (!indexesData) {
|
|
247
|
+
return data;
|
|
248
|
+
}
|
|
249
|
+
for (const indexInfo of indexesData) {
|
|
250
|
+
const { name, fields, identifiers, ...rest } = indexInfo;
|
|
251
|
+
const _options = rest;
|
|
252
|
+
if (!fields[0] && !fields[1]) {
|
|
253
|
+
uniqueRowError(
|
|
254
|
+
`Error in 'verifyRowUniqueness()'. There must be two columns to test against. If you are attempting to enforce a unique column, use the 'uniqueColumns' config option.`
|
|
255
|
+
);
|
|
256
|
+
}
|
|
257
|
+
const columnData = constructColumnData(fields, data, {});
|
|
258
|
+
const getExisting = async (cd) => {
|
|
259
|
+
let existingByIndex = [];
|
|
260
|
+
if (!cd) {
|
|
261
|
+
existingByIndex = [];
|
|
262
|
+
} else {
|
|
263
|
+
existingByIndex = await ctx.db.query(tableName).withIndex(
|
|
264
|
+
name,
|
|
265
|
+
(q) => cd.reduce((query, { column, value }) => query.eq(column, value), q)
|
|
266
|
+
).collect();
|
|
267
|
+
}
|
|
268
|
+
if (existingByIndex.length > 1) {
|
|
269
|
+
console.warn(
|
|
270
|
+
`There was more than one existing result found for index ${name}. Check the following IDs:`,
|
|
271
|
+
existingByIndex.map((r) => r._id)
|
|
272
|
+
);
|
|
273
|
+
console.warn(
|
|
274
|
+
`It is recommended that you triage the rows listed above since they have data that go against a rule of row uniqueness.`
|
|
275
|
+
);
|
|
276
|
+
}
|
|
277
|
+
return existingByIndex.length > 0 ? existingByIndex[0] : null;
|
|
278
|
+
};
|
|
279
|
+
const existing = await getExisting(columnData);
|
|
280
|
+
if (operation === "insert") {
|
|
281
|
+
if (!existing) {
|
|
282
|
+
continue;
|
|
283
|
+
}
|
|
284
|
+
onFail?.({
|
|
285
|
+
uniqueRow: {
|
|
286
|
+
existingData: existing
|
|
287
|
+
}
|
|
288
|
+
});
|
|
289
|
+
uniqueRowError(
|
|
290
|
+
`Unable to [${operation}] document. In table [${tableName}], there is an existing row that has the same data combination in the columns: [${fields.join(`, `)}].`
|
|
291
|
+
);
|
|
292
|
+
}
|
|
293
|
+
if (operation === "patch") {
|
|
294
|
+
if (!patchId) {
|
|
295
|
+
uniqueRowError(`Unable to patch document without an id.`);
|
|
296
|
+
}
|
|
297
|
+
const matchedToExisting = (_existing, _data) => {
|
|
298
|
+
let idMatchedToExisting = null;
|
|
299
|
+
if (_existing) {
|
|
300
|
+
for (const identifier of identifiers) {
|
|
301
|
+
if (_existing[identifier] && _data[identifier] && _existing[identifier] === _data[identifier] || identifier === "_id" && _existing[identifier] === patchId) {
|
|
302
|
+
idMatchedToExisting = String(identifier);
|
|
303
|
+
break;
|
|
304
|
+
}
|
|
305
|
+
}
|
|
306
|
+
}
|
|
307
|
+
return idMatchedToExisting;
|
|
308
|
+
};
|
|
309
|
+
const checkExisting = (_existing, _data) => {
|
|
310
|
+
const matchedId = matchedToExisting(_existing, _data);
|
|
311
|
+
if (!_existing) {
|
|
312
|
+
return;
|
|
313
|
+
}
|
|
314
|
+
if (matchedId) {
|
|
315
|
+
return;
|
|
316
|
+
} else {
|
|
317
|
+
onFail?.({
|
|
318
|
+
uniqueRow: {
|
|
319
|
+
existingData: _existing
|
|
320
|
+
}
|
|
321
|
+
});
|
|
322
|
+
uniqueRowError(
|
|
323
|
+
`In '${tableName}' table, there already exists a value match of the columns: [${fields.join(`,`)}].`
|
|
324
|
+
);
|
|
325
|
+
}
|
|
326
|
+
};
|
|
327
|
+
if (!existing && !columnData && patchId) {
|
|
328
|
+
const match = await ctx.db.get(patchId);
|
|
329
|
+
if (!match) {
|
|
330
|
+
uniqueRowError(`No document found for id ${patchId}`);
|
|
331
|
+
return data;
|
|
332
|
+
}
|
|
333
|
+
const extensiveColumnData = constructColumnData(
|
|
334
|
+
fields,
|
|
335
|
+
{
|
|
336
|
+
...match,
|
|
337
|
+
...data
|
|
338
|
+
},
|
|
339
|
+
{}
|
|
340
|
+
);
|
|
341
|
+
if (extensiveColumnData) {
|
|
342
|
+
const extensiveExisting = await getExisting(extensiveColumnData);
|
|
343
|
+
checkExisting(extensiveExisting, data);
|
|
344
|
+
} else {
|
|
345
|
+
uniqueRowError(`Incomplete data when there should have been enough.`);
|
|
346
|
+
}
|
|
347
|
+
} else {
|
|
348
|
+
checkExisting(existing, data);
|
|
349
|
+
}
|
|
350
|
+
}
|
|
351
|
+
}
|
|
352
|
+
return data;
|
|
353
|
+
};
|
|
354
|
+
return createValidatePlugin("uniqueRow", config, {
|
|
355
|
+
insert: async (context, data) => {
|
|
356
|
+
return verifyUniqueness(context, data, context.tableName);
|
|
357
|
+
},
|
|
358
|
+
patch: async (context, data) => {
|
|
359
|
+
return verifyUniqueness(context, data, context.tableName);
|
|
360
|
+
}
|
|
361
|
+
});
|
|
362
|
+
};
|
|
363
|
+
|
|
364
|
+
// src/plugins/uniqueColumnConfig.ts
|
|
365
|
+
var import_values2 = require("convex/values");
|
|
366
|
+
var uniqueColumnConfig = (_schema, config) => {
|
|
367
|
+
const uniqueColumnError = (message) => {
|
|
368
|
+
throw new import_values2.ConvexError({
|
|
369
|
+
message,
|
|
370
|
+
code: "UNIQUE_COLUMN_VERIFICATION_ERROR"
|
|
371
|
+
});
|
|
372
|
+
};
|
|
373
|
+
const verifyUniqueness = async (context, data) => {
|
|
374
|
+
const { ctx, tableName, patchId, onFail } = context;
|
|
375
|
+
const tableConfig = config[tableName];
|
|
376
|
+
if (!tableConfig) {
|
|
377
|
+
return data;
|
|
378
|
+
}
|
|
379
|
+
for (const entry of tableConfig) {
|
|
380
|
+
const { index, identifiers } = normalizeIndexConfigEntry(
|
|
381
|
+
entry
|
|
382
|
+
);
|
|
383
|
+
const columnName = index.replace("by_", "");
|
|
384
|
+
const value = data[columnName];
|
|
385
|
+
if (value === void 0 || value === null) {
|
|
386
|
+
continue;
|
|
387
|
+
}
|
|
388
|
+
const existing = await ctx.db.query(tableName).withIndex(index, (q) => q.eq(columnName, value)).unique();
|
|
389
|
+
if (!existing) {
|
|
390
|
+
continue;
|
|
391
|
+
}
|
|
392
|
+
let isOwnDocument = false;
|
|
393
|
+
for (const identifier of identifiers) {
|
|
394
|
+
if (identifier === "_id" && patchId && existing._id === patchId) {
|
|
395
|
+
isOwnDocument = true;
|
|
396
|
+
break;
|
|
397
|
+
}
|
|
398
|
+
if (existing[identifier] && data[identifier] && existing[identifier] === data[identifier]) {
|
|
399
|
+
isOwnDocument = true;
|
|
400
|
+
break;
|
|
401
|
+
}
|
|
402
|
+
}
|
|
403
|
+
if (isOwnDocument) {
|
|
404
|
+
continue;
|
|
405
|
+
}
|
|
406
|
+
onFail?.({
|
|
407
|
+
uniqueColumn: {
|
|
408
|
+
conflictingColumn: columnName,
|
|
409
|
+
existingData: existing
|
|
410
|
+
}
|
|
411
|
+
});
|
|
412
|
+
uniqueColumnError(
|
|
413
|
+
`In [${tableName}] table, there already exists value "${value}" in column [${columnName}].`
|
|
414
|
+
);
|
|
415
|
+
}
|
|
416
|
+
return data;
|
|
417
|
+
};
|
|
418
|
+
return createValidatePlugin("uniqueColumn", config, {
|
|
419
|
+
insert: async (context, data) => {
|
|
420
|
+
return verifyUniqueness(context, data);
|
|
421
|
+
},
|
|
422
|
+
patch: async (context, data) => {
|
|
423
|
+
return verifyUniqueness(context, data);
|
|
424
|
+
}
|
|
425
|
+
});
|
|
426
|
+
};
|
|
427
|
+
// Annotate the CommonJS export names for ESM import in node:
|
|
428
|
+
0 && (module.exports = {
|
|
429
|
+
constructColumnData,
|
|
430
|
+
constructIndexData,
|
|
431
|
+
createValidatePlugin,
|
|
432
|
+
defaultValuesConfig,
|
|
433
|
+
getTableIndexes,
|
|
434
|
+
isValidatePlugin,
|
|
435
|
+
normalizeIndexConfigEntry,
|
|
436
|
+
protectedColumnsConfig,
|
|
437
|
+
runValidatePlugins,
|
|
438
|
+
uniqueColumnConfig,
|
|
439
|
+
uniqueRowConfig,
|
|
440
|
+
verifyConfig
|
|
441
|
+
});
|
|
442
|
+
//# sourceMappingURL=index.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../src/index.ts","../src/core/plugin.ts","../src/core/verifyConfig.ts","../src/core/types.ts","../src/transforms/defaultValuesConfig.ts","../src/configs/protectedColumnsConfig.ts","../src/plugins/uniqueRowConfig.ts","../src/utils/helpers.ts","../src/plugins/uniqueColumnConfig.ts"],"sourcesContent":["// =============================================================================\n// Core\n// =============================================================================\n\nexport { verifyConfig } from './core';\nexport { createValidatePlugin, isValidatePlugin, runValidatePlugins } from './core';\nexport type { ValidateContext, ValidatePlugin, ValidatePluginRecord } from './core';\nexport type {\n\t// Utility types\n\tPrettify,\n\tMakeOptional,\n\t// OnFail types\n\tOnFailArgs,\n\tOnFailCallback,\n\t// VerifyConfig types\n\tVerifyConfigInput,\n\t// Type extraction helpers\n\tExtractDefaultValuesConfig,\n\tOptionalKeysForTable,\n\tHasKey,\n\tExtractProtectedColumnsConfig,\n\tProtectedKeysForTable,\n} from './core';\n\n// =============================================================================\n// Transforms\n// =============================================================================\n\nexport { defaultValuesConfig } from './transforms';\nexport type { DefaultValuesConfigData } from './transforms';\n\n// =============================================================================\n// Configs\n// =============================================================================\n\nexport { protectedColumnsConfig } from './configs';\nexport type { ProtectedColumnsConfigData } from './configs';\n\n// =============================================================================\n// Plugins\n// =============================================================================\n\nexport { uniqueRowConfig, uniqueColumnConfig } from './plugins';\nexport type {\n\tUniqueRowConfigData,\n\tUniqueRowConfigEntry,\n\tUniqueRowConfigOptions,\n\tUniqueColumnConfigData,\n\tUniqueColumnConfigEntry,\n\tUniqueColumnConfigOptions,\n} from './plugins';\n\n// =============================================================================\n// Utils\n// =============================================================================\n\nexport { getTableIndexes, constructColumnData, constructIndexData } from './utils';\nexport { normalizeIndexConfigEntry } from './utils';\nexport type { NormalizedIndexConfig, IndexConfigBaseOptions, IndexConfigEntry } from './utils';\n","import { GenericMutationCtx, GenericSchema, SchemaDefinition } from 'convex/server';\nimport { GenericId } from 'convex/values';\n\nimport { OnFailCallback } from './types';\n\n// =============================================================================\n// Plugin Types\n// =============================================================================\n\n/**\n * Context passed to validate plugin functions.\n *\n * Provides access to:\n * - `ctx` - Full Convex mutation context (includes `ctx.db` for queries)\n * - `tableName` - The table being operated on\n * - `operation` - 'insert' or 'patch'\n * - `patchId` - Document ID (only for patch operations)\n * - `onFail` - Callback to report validation failures before throwing\n * - `schema` - Optional schema reference (if provided by verifyConfig)\n */\nexport type ValidateContext<TN extends string = string> = {\n\t/** Full Convex mutation context - use ctx.db for database queries */\n\tctx: Omit<GenericMutationCtx<any>, never>;\n\t/** Table name being operated on */\n\ttableName: TN;\n\t/** Operation type: 'insert' or 'patch' */\n\toperation: 'insert' | 'patch';\n\t/** Document ID (only available for patch operations) */\n\tpatchId?: GenericId<any>;\n\t/** Callback for validation failures - call before throwing to provide details */\n\tonFail?: OnFailCallback<any>;\n\t/** Schema reference (if provided to verifyConfig) */\n\tschema?: SchemaDefinition<GenericSchema, boolean>;\n};\n\n/**\n * A validate plugin that can check data during insert/patch operations.\n *\n * Validate plugins:\n * - Run AFTER transform plugins (like defaultValues)\n * - Can be async (use await for API calls, db queries, etc.)\n * - Can throw errors to prevent the operation\n * - Should return the data unchanged (validation only, no transformation)\n * - Do NOT affect the TypeScript types of the input data\n *\n * @example\n * ```ts\n * // Simple sync plugin\n * const requiredFields = createValidatePlugin(\n * 'requiredFields',\n * { fields: ['title', 'content'] },\n * {\n * insert: (context, data) => {\n * for (const field of config.fields) {\n * if (!data[field]) {\n * throw new ConvexError({ message: `Missing required field: ${field}` });\n * }\n * }\n * return data;\n * },\n * }\n * );\n *\n * // Async plugin with database query\n * const checkOwnership = createValidatePlugin(\n * 'checkOwnership',\n * {},\n * {\n * patch: async (context, data) => {\n * const existing = await context.ctx.db.get(context.patchId);\n * if (existing?.ownerId !== getCurrentUserId()) {\n * throw new ConvexError({ message: 'Not authorized' });\n * }\n * return data;\n * },\n * }\n * );\n * ```\n */\nexport interface ValidatePlugin<Type extends string = string, Config = unknown> {\n\t/** Unique identifier for this plugin */\n\treadonly _type: Type;\n\n\t/** Plugin configuration */\n\treadonly config: Config;\n\n\t/** Verify functions for insert and/or patch operations */\n\tverify: {\n\t\t/**\n\t\t * Validate data for insert operations.\n\t\t * Can be sync or async.\n\t\t *\n\t\t * @param context - Plugin context with ctx, tableName, schema, etc.\n\t\t * @param data - The data to validate (after transforms applied)\n\t\t * @returns The data unchanged (or Promise resolving to data)\n\t\t * @throws ConvexError if validation fails\n\t\t */\n\t\tinsert?: (context: ValidateContext, data: any) => Promise<any> | any;\n\n\t\t/**\n\t\t * Validate data for patch operations.\n\t\t * Can be sync or async.\n\t\t *\n\t\t * @param context - Plugin context with ctx, tableName, patchId, schema, etc.\n\t\t * @param data - The partial data to validate\n\t\t * @returns The data unchanged (or Promise resolving to data)\n\t\t * @throws ConvexError if validation fails\n\t\t */\n\t\tpatch?: (context: ValidateContext, data: any) => Promise<any> | any;\n\t};\n}\n\n/**\n * Type guard to check if something is a ValidatePlugin\n */\nexport function isValidatePlugin(obj: unknown): obj is ValidatePlugin {\n\treturn (\n\t\ttypeof obj === 'object' &&\n\t\tobj !== null &&\n\t\t'_type' in obj &&\n\t\ttypeof (obj as any)._type === 'string' &&\n\t\t'verify' in obj &&\n\t\ttypeof (obj as any).verify === 'object'\n\t);\n}\n\n// =============================================================================\n// Plugin Collection Types\n// =============================================================================\n\n/**\n * A collection of validate plugins\n */\nexport type ValidatePluginRecord = Record<string, ValidatePlugin>;\n\n// =============================================================================\n// Plugin Helpers\n// =============================================================================\n\n/**\n * Run all validate plugins for an operation.\n * Plugins are run in order and each receives the output of the previous.\n * All plugin verify functions are awaited (supports async plugins).\n */\nexport async function runValidatePlugins(\n\tplugins: ValidatePlugin[],\n\tcontext: ValidateContext,\n\tdata: any\n): Promise<any> {\n\tlet result = data;\n\n\tfor (const plugin of plugins) {\n\t\tconst verifyFn = context.operation === 'insert' ? plugin.verify.insert : plugin.verify.patch;\n\n\t\tif (verifyFn) {\n\t\t\t// Always await - works for both sync and async functions\n\t\t\tresult = await verifyFn(context, result);\n\t\t}\n\t}\n\n\treturn result;\n}\n\n/**\n * Helper to create a validate plugin with proper typing.\n *\n * @param type - Unique identifier for this plugin type\n * @param config - Plugin configuration data\n * @param verify - Object with insert and/or patch verify functions\n * @returns A ValidatePlugin instance\n *\n * @example\n * ```ts\n * const myPlugin = createValidatePlugin(\n * 'myPlugin',\n * { maxLength: 100 },\n * {\n * insert: async (context, data) => {\n * // Validation logic here\n * return data;\n * },\n * }\n * );\n * ```\n */\nexport function createValidatePlugin<Type extends string, Config>(\n\ttype: Type,\n\tconfig: Config,\n\tverify: ValidatePlugin<Type, Config>['verify']\n): ValidatePlugin<Type, Config> {\n\treturn {\n\t\t_type: type,\n\t\tconfig,\n\t\tverify,\n\t};\n}\n","import {\n\tDataModelFromSchemaDefinition,\n\tDocumentByName,\n\tGenericMutationCtx,\n\tGenericSchema,\n\tSchemaDefinition,\n\tTableNamesInDataModel,\n\tWithoutSystemFields,\n} from 'convex/server';\nimport { GenericId } from 'convex/values';\n\nimport { runValidatePlugins, ValidatePlugin } from './plugin';\nimport {\n\tHasKey,\n\tMakeOptional,\n\tOnFailCallback,\n\tOptionalKeysForTable,\n\tProtectedKeysForTable,\n\tVerifyConfigInput,\n} from './types';\n\n/**\n * Extended config input that includes optional validate plugins\n */\ntype VerifyConfigInputWithPlugins = VerifyConfigInput & {\n\t/**\n\t * Validate plugins to run after transforms.\n\t * These plugins can validate data but don't affect input types.\n\t */\n\tplugins?: ValidatePlugin[];\n};\n\n/**\n * Configure type-safe insert and patch functions with validation and transforms.\n *\n * @param schema - Your Convex schema definition\n * @param configs - Configuration object with transforms, configs, and plugins\n * @returns Object with `insert`, `patch`, and `dangerouslyPatch` functions\n *\n * @example\n * ```ts\n * import { verifyConfig, defaultValuesConfig, protectedColumnsConfig, uniqueRowConfig } from 'convex-verify';\n * import schema from './schema';\n *\n * export const { insert, patch, dangerouslyPatch } = verifyConfig(schema, {\n * defaultValues: defaultValuesConfig(schema, () => ({\n * posts: { status: 'draft', views: 0 },\n * })),\n * protectedColumns: protectedColumnsConfig(schema, {\n * posts: ['authorId'],\n * }),\n * plugins: [\n * uniqueRowConfig(schema, {\n * posts: ['by_slug'],\n * }),\n * ],\n * });\n * ```\n */\nexport const verifyConfig = <\n\tS extends SchemaDefinition<GenericSchema, boolean>,\n\tDataModel extends DataModelFromSchemaDefinition<S>,\n\tconst VC extends VerifyConfigInputWithPlugins,\n>(\n\t_schema: S,\n\tconfigs: VC\n) => {\n\t// Get all validate plugins\n\tconst validatePlugins = configs.plugins ?? [];\n\n\t/**\n\t * Insert a document with all configured verifications applied.\n\t *\n\t * Execution order:\n\t * 1. Transform: defaultValues (makes fields optional, applies defaults)\n\t * 2. Validate: plugins (in order provided)\n\t * 3. Insert into database\n\t */\n\tconst insert = async <\n\t\tconst TN extends TableNamesInDataModel<DataModel>,\n\t\tconst D extends DocumentByName<DataModel, TN>,\n\t>(\n\t\tctx: Omit<GenericMutationCtx<DataModel>, never>,\n\t\ttableName: TN,\n\t\tdata: HasKey<VC, 'defaultValues'> extends true\n\t\t\t? MakeOptional<\n\t\t\t\t\tWithoutSystemFields<D>,\n\t\t\t\t\tOptionalKeysForTable<VC, TN> & keyof WithoutSystemFields<D>\n\t\t\t\t>\n\t\t\t: WithoutSystemFields<D>,\n\t\toptions?: {\n\t\t\tonFail?: OnFailCallback<D>;\n\t\t}\n\t): Promise<GenericId<TN>> => {\n\t\tlet verifiedData = data as WithoutSystemFields<DocumentByName<DataModel, TN>>;\n\n\t\t// === TRANSFORM PHASE ===\n\n\t\t// Apply default values (transforms data)\n\t\tif (configs.defaultValues) {\n\t\t\tverifiedData = await configs.defaultValues.verify(tableName, verifiedData);\n\t\t}\n\n\t\t// === VALIDATE PHASE ===\n\n\t\t// Run all validate plugins\n\t\tif (validatePlugins.length > 0) {\n\t\t\tverifiedData = await runValidatePlugins(\n\t\t\t\tvalidatePlugins,\n\t\t\t\t{\n\t\t\t\t\tctx,\n\t\t\t\t\ttableName: tableName as string,\n\t\t\t\t\toperation: 'insert',\n\t\t\t\t\tonFail: options?.onFail,\n\t\t\t\t\tschema: _schema,\n\t\t\t\t},\n\t\t\t\tverifiedData\n\t\t\t);\n\t\t}\n\n\t\t// Final insert\n\t\treturn await ctx.db.insert(tableName, verifiedData);\n\t};\n\n\t/**\n\t * Patch a document with all configured verifications applied.\n\t *\n\t * Protected columns (if configured) are removed from the input type.\n\t * Use dangerouslyPatch() to bypass protected column restrictions.\n\t *\n\t * Execution order:\n\t * 1. Validate: plugins (in order provided)\n\t * 2. Patch in database\n\t *\n\t * Note: defaultValues is skipped for patch operations\n\t */\n\tconst patch = async <\n\t\tconst TN extends TableNamesInDataModel<DataModel>,\n\t\tconst D extends DocumentByName<DataModel, TN>,\n\t>(\n\t\tctx: Omit<GenericMutationCtx<DataModel>, never>,\n\t\ttableName: TN,\n\t\tid: GenericId<TN>,\n\t\tdata: HasKey<VC, 'protectedColumns'> extends true\n\t\t\t? Omit<\n\t\t\t\t\tPartial<WithoutSystemFields<D>>,\n\t\t\t\t\tProtectedKeysForTable<VC, TN> & keyof WithoutSystemFields<D>\n\t\t\t\t>\n\t\t\t: Partial<WithoutSystemFields<D>>,\n\t\toptions?: {\n\t\t\tonFail?: OnFailCallback<D>;\n\t\t}\n\t): Promise<void> => {\n\t\tlet verifiedData = data as Partial<WithoutSystemFields<DocumentByName<DataModel, TN>>>;\n\n\t\t// === VALIDATE PHASE ===\n\n\t\t// Run all validate plugins\n\t\tif (validatePlugins.length > 0) {\n\t\t\tverifiedData = await runValidatePlugins(\n\t\t\t\tvalidatePlugins,\n\t\t\t\t{\n\t\t\t\t\tctx,\n\t\t\t\t\ttableName: tableName as string,\n\t\t\t\t\toperation: 'patch',\n\t\t\t\t\tpatchId: id,\n\t\t\t\t\tonFail: options?.onFail,\n\t\t\t\t\tschema: _schema,\n\t\t\t\t},\n\t\t\t\tverifiedData\n\t\t\t);\n\t\t}\n\n\t\tawait ctx.db.patch(id, verifiedData);\n\t};\n\n\t/**\n\t * Patch a document bypassing protected column restrictions.\n\t *\n\t * WARNING: This allows patching ANY column, including protected ones.\n\t * Only use this when you explicitly need to update a protected column.\n\t *\n\t * Validation plugins still run - only type restrictions are bypassed.\n\t */\n\tconst dangerouslyPatch = async <\n\t\tconst TN extends TableNamesInDataModel<DataModel>,\n\t\tconst D extends DocumentByName<DataModel, TN>,\n\t>(\n\t\tctx: Omit<GenericMutationCtx<DataModel>, never>,\n\t\ttableName: TN,\n\t\tid: GenericId<TN>,\n\t\tdata: Partial<WithoutSystemFields<D>>,\n\t\toptions?: {\n\t\t\tonFail?: OnFailCallback<D>;\n\t\t}\n\t): Promise<void> => {\n\t\tlet verifiedData = data;\n\n\t\t// === VALIDATE PHASE ===\n\n\t\t// Run all validate plugins (protection is bypassed, but validation still runs)\n\t\tif (validatePlugins.length > 0) {\n\t\t\tverifiedData = await runValidatePlugins(\n\t\t\t\tvalidatePlugins,\n\t\t\t\t{\n\t\t\t\t\tctx,\n\t\t\t\t\ttableName: tableName as string,\n\t\t\t\t\toperation: 'patch',\n\t\t\t\t\tpatchId: id,\n\t\t\t\t\tonFail: options?.onFail,\n\t\t\t\t\tschema: _schema,\n\t\t\t\t},\n\t\t\t\tverifiedData\n\t\t\t);\n\t\t}\n\n\t\tawait ctx.db.patch(id, verifiedData);\n\t};\n\n\treturn {\n\t\tinsert,\n\t\tpatch,\n\t\tdangerouslyPatch,\n\t\t// Expose configs for debugging/advanced usage\n\t\tconfigs,\n\t};\n};\n","import {\n\tDataModelFromSchemaDefinition,\n\tIndexes,\n\tNamedTableInfo,\n\tSchemaDefinition,\n\tWithoutSystemFields,\n} from 'convex/server';\n\n// =============================================================================\n// Utility Types\n// =============================================================================\n\nexport type Prettify<T> = { [K in keyof T]: T[K] } & {};\n\nexport type MakeOptional<T, K extends PropertyKey> = Prettify<\n\tOmit<T, K & keyof T> & Partial<Pick<T, K & keyof T>>\n>;\n\n// =============================================================================\n// Base Types for Config Functions\n// =============================================================================\n\n/**\n * Base interface that all config functions should return.\n * Each config type can have its own `verify` signature and additional properties.\n */\nexport type BaseConfigReturn = {\n\tconfig: Record<string, any>;\n};\n\n// =============================================================================\n// OnFail Types\n// =============================================================================\n\nexport type OnFailArgs<D> = {\n\tuniqueColumn?: {\n\t\tconflictingColumn: keyof D;\n\t\texistingData: D;\n\t};\n\tuniqueRow?: {\n\t\texistingData: D | null;\n\t};\n\teditableColumn?: {\n\t\tremovedColumns: string[];\n\t\tfilteredData: D;\n\t};\n\trequiredColumn?: {\n\t\tmissingColumn: keyof D;\n\t};\n};\n\nexport type OnFailCallback<D> = (args: OnFailArgs<D>) => void;\n\n// =============================================================================\n// Config Data Types (what the user provides)\n// =============================================================================\n\nexport type DMGeneric = DataModelFromSchemaDefinition<SchemaDefinition<any, boolean>>;\n\nexport type DefaultValuesConfigData<DM extends DMGeneric> = {\n\t[K in keyof DM]?: {\n\t\t[column in keyof WithoutSystemFields<DM[K]['document']>]?: DM[K]['document'][column];\n\t};\n};\n\n// =============================================================================\n// Index-Based Config Types (shared between uniqueRow, uniqueColumn, etc.)\n// =============================================================================\n\n/**\n * Base options shared by all index-based config entries.\n * Individual plugins can extend this with their own options.\n */\nexport type IndexConfigBaseOptions = {\n\t/** Additional identifiers to check if the existing row is the same document being updated */\n\tidentifiers?: string[];\n};\n\n/**\n * A config entry that can be either:\n * - A string (index name) for shorthand\n * - An object with `index` and additional options\n *\n * @example\n * ```ts\n * // These are equivalent:\n * 'by_username'\n * { index: 'by_username' }\n *\n * // With options:\n * { index: 'by_username', identifiers: ['_id', 'userId'] }\n * ```\n */\nexport type IndexConfigEntry<\n\tDM extends DMGeneric,\n\tK extends keyof DM,\n\tOptions extends IndexConfigBaseOptions = IndexConfigBaseOptions,\n> =\n\t| keyof Indexes<NamedTableInfo<DM, K>>\n\t| ({\n\t\t\tindex: keyof Indexes<NamedTableInfo<DM, K>>;\n\t\t\tidentifiers?: (keyof NamedTableInfo<DM, K>['document'])[];\n\t } & Omit<Options, 'identifiers'>);\n\n/**\n * Normalized form of an index config entry (always an object)\n */\nexport type NormalizedIndexConfig<Options extends IndexConfigBaseOptions = IndexConfigBaseOptions> =\n\t{\n\t\tindex: string;\n\t\tidentifiers: string[];\n\t} & Omit<Options, 'identifiers'>;\n\n/**\n * Normalize a config entry to always have index and identifiers.\n * Works for both string shorthand and full object configs.\n */\nexport function normalizeIndexConfigEntry<\n\tOptions extends IndexConfigBaseOptions = IndexConfigBaseOptions,\n>(\n\tentry: string | ({ index: string; identifiers?: string[] } & Omit<Options, 'identifiers'>),\n\tdefaultIdentifiers: string[] = ['_id']\n): NormalizedIndexConfig<Options> {\n\tif (typeof entry === 'string') {\n\t\treturn {\n\t\t\tindex: entry,\n\t\t\tidentifiers: defaultIdentifiers,\n\t\t} as NormalizedIndexConfig<Options>;\n\t}\n\n\tconst { index, identifiers, ...rest } = entry;\n\treturn {\n\t\tindex: String(index),\n\t\tidentifiers: identifiers?.map(String) ?? defaultIdentifiers,\n\t\t...rest,\n\t} as NormalizedIndexConfig<Options>;\n}\n\n// =============================================================================\n// UniqueRow Config Types\n// =============================================================================\n\nexport type UniqueRowConfigOptions = IndexConfigBaseOptions & {\n\tqueryExistingWithNullish?: boolean;\n};\n\nexport type UniqueRowConfigEntry<DM extends DMGeneric, K extends keyof DM> = IndexConfigEntry<\n\tDM,\n\tK,\n\tUniqueRowConfigOptions\n>;\n\nexport type UniqueRowConfigData<DM extends DMGeneric> = {\n\t[K in keyof DM]?: UniqueRowConfigEntry<DM, K>[];\n};\n\n// =============================================================================\n// UniqueColumn Config Types\n// =============================================================================\n\nexport type UniqueColumnConfigOptions = IndexConfigBaseOptions;\n\nexport type UniqueColumnConfigEntry<DM extends DMGeneric, K extends keyof DM> = IndexConfigEntry<\n\tDM,\n\tK,\n\tUniqueColumnConfigOptions\n>;\n\nexport type UniqueColumnConfigData<DM extends DMGeneric> = {\n\t[K in keyof DM]?: UniqueColumnConfigEntry<DM, K>[];\n};\n\n// =============================================================================\n// Input Types (loose types for verifyConfig to accept)\n// =============================================================================\n\n/**\n * Loose input types that accept any return from config functions.\n * We use loose types here to avoid complex generic matching,\n * then extract the specific config types using conditional types.\n */\nexport type DefaultValuesInput = {\n\t_type: 'defaultValues';\n\tverify: (tableName: any, data: any) => Promise<any>;\n\tconfig:\n\t\t| Record<string, Record<string, any>>\n\t\t| (() => Record<string, Record<string, any>> | Promise<Record<string, Record<string, any>>>);\n};\n\n/**\n * Loose input type for protectedColumnsConfig return value.\n */\nexport type ProtectedColumnsInput = {\n\t_type: 'protectedColumns';\n\tconfig: Record<string, string[]>;\n};\n\n// =============================================================================\n// Object-Based Types (for verifyConfig)\n// =============================================================================\n\n/**\n * Config input for verifyConfig.\n *\n * - `defaultValues`: Transform plugin that makes fields optional (affects types)\n * - `protectedColumns`: Columns that cannot be patched (affects patch() types)\n * - `plugins`: Array of validate plugins (use for uniqueRow, uniqueColumn, custom plugins, etc.)\n */\nexport type VerifyConfigInput = {\n\tdefaultValues?: DefaultValuesInput;\n\tprotectedColumns?: ProtectedColumnsInput;\n};\n\n// =============================================================================\n// Type Extraction Helpers\n// =============================================================================\n\n/**\n * Extract the config type from defaultValues.config.\n * Handles both direct object and function forms.\n */\nexport type ExtractDefaultValuesConfig<VC> = VC extends {\n\tdefaultValues: { config: infer C };\n}\n\t? C extends () => infer R\n\t\t? Awaited<R>\n\t\t: C\n\t: Record<string, never>;\n\n/**\n * Compute which keys should be optional for a given table based on all configs.\n * Currently only defaultValues affects optionality.\n */\nexport type OptionalKeysForTable<VC, TN> = TN extends keyof ExtractDefaultValuesConfig<VC>\n\t? keyof ExtractDefaultValuesConfig<VC>[TN]\n\t: never;\n\n/**\n * Helper to check if a key exists in a type\n */\nexport type HasKey<T, K extends PropertyKey> = K extends keyof T ? true : false;\n\n// =============================================================================\n// Protected Columns Type Extraction\n// =============================================================================\n\n/**\n * Extract the config type from protectedColumns.config\n */\nexport type ExtractProtectedColumnsConfig<VC> = VC extends {\n\tprotectedColumns: { config: infer C };\n}\n\t? C\n\t: Record<string, never>;\n\n/**\n * Get protected column keys for a specific table.\n * Returns the column names that should be omitted from patch() input.\n */\nexport type ProtectedKeysForTable<VC, TN> = TN extends keyof ExtractProtectedColumnsConfig<VC>\n\t? ExtractProtectedColumnsConfig<VC>[TN] extends readonly (infer K)[]\n\t\t? K\n\t\t: never\n\t: never;\n","import {\n\tDataModelFromSchemaDefinition,\n\tDocumentByName,\n\tGenericSchema,\n\tSchemaDefinition,\n\tTableNamesInDataModel,\n\tWithoutSystemFields,\n} from 'convex/server';\n\nimport { DefaultValuesConfigData, MakeOptional } from '../core/types';\n\n/**\n * Creates a default values transform.\n *\n * Makes specified fields optional in insert() by providing default values.\n * Supports both static config objects and dynamic functions (sync or async).\n *\n * @param schema - Your Convex schema definition\n * @param config - Default values config (object or function returning object)\n * @returns Config object for use with verifyConfig\n *\n * @example\n * ```ts\n * // Static config (same values reused)\n * const defaults = defaultValuesConfig(schema, {\n * posts: { status: 'draft', views: 0 },\n * });\n *\n * // Dynamic config (fresh values on each insert)\n * const defaults = defaultValuesConfig(schema, () => ({\n * posts: { status: 'draft', slug: generateRandomSlug() },\n * }));\n *\n * // Async config\n * const defaults = defaultValuesConfig(schema, async () => ({\n * posts: { category: await fetchDefaultCategory() },\n * }));\n * ```\n */\nexport const defaultValuesConfig = <\n\tS extends SchemaDefinition<GenericSchema, boolean>,\n\tDataModel extends DataModelFromSchemaDefinition<S>,\n\tconst C extends DefaultValuesConfigData<DataModel>,\n>(\n\t_schema: S,\n\tconfig: C | (() => C | Promise<C>)\n) => {\n\t/**\n\t * Apply default values to the data for a given table.\n\t * Async to support dynamic config functions.\n\t */\n\tconst verify = async <TN extends TableNamesInDataModel<DataModel>>(\n\t\ttableName: TN,\n\t\tdata: MakeOptional<WithoutSystemFields<DocumentByName<DataModel, TN>>, keyof C[TN]>\n\t): Promise<WithoutSystemFields<DocumentByName<DataModel, TN>>> => {\n\t\t// Resolve config - handle both direct object and function forms\n\t\tconst resolvedConfig = typeof config === 'function' ? await config() : config;\n\n\t\treturn {\n\t\t\t...(resolvedConfig[tableName] as Partial<WithoutSystemFields<DocumentByName<DataModel, TN>>>),\n\t\t\t...(data as WithoutSystemFields<DocumentByName<DataModel, TN>>),\n\t\t};\n\t};\n\n\treturn {\n\t\t_type: 'defaultValues' as const,\n\t\tverify,\n\t\tconfig,\n\t};\n};\n","import {\n\tDataModelFromSchemaDefinition,\n\tGenericSchema,\n\tSchemaDefinition,\n\tWithoutSystemFields,\n} from 'convex/server';\n\nimport { DMGeneric } from '../core/types';\n\n/**\n * Config data type for protected columns.\n * Maps table names to arrays of column names that should be protected from patching.\n */\nexport type ProtectedColumnsConfigData<DM extends DMGeneric> = {\n\t[K in keyof DM]?: (keyof WithoutSystemFields<DM[K]['document']>)[];\n};\n\n/**\n * Creates a protected columns config.\n *\n * Protected columns are removed from the patch() input type,\n * preventing accidental updates to critical fields like foreign keys.\n * Use dangerouslyPatch() to bypass this protection when needed.\n *\n * @param schema - Your Convex schema definition\n * @param config - Object mapping table names to arrays of protected column names\n * @returns Config object for use with verifyConfig\n *\n * @example\n * ```ts\n * const protectedColumns = protectedColumnsConfig(schema, {\n * posts: ['authorId', 'createdAt'],\n * comments: ['postId', 'authorId'],\n * });\n *\n * // In verifyConfig:\n * const { patch, dangerouslyPatch } = verifyConfig(schema, {\n * protectedColumns,\n * });\n *\n * // patch() won't allow authorId\n * await patch(ctx, 'posts', id, {\n * authorId: '...', // TS Error - property doesn't exist\n * title: 'new', // OK\n * });\n *\n * // dangerouslyPatch() allows all columns\n * await dangerouslyPatch(ctx, 'posts', id, {\n * authorId: '...', // OK - bypasses protection\n * });\n * ```\n */\nexport const protectedColumnsConfig = <\n\tS extends SchemaDefinition<GenericSchema, boolean>,\n\tDataModel extends DataModelFromSchemaDefinition<S>,\n\tconst C extends ProtectedColumnsConfigData<DataModel>,\n>(\n\t_schema: S,\n\tconfig: C\n) => {\n\treturn {\n\t\t_type: 'protectedColumns' as const,\n\t\tconfig,\n\t};\n};\n","import {\n\tDataModelFromSchemaDefinition,\n\tDocumentByName,\n\tGenericSchema,\n\tSchemaDefinition,\n\tTableNamesInDataModel,\n} from 'convex/server';\nimport { ConvexError } from 'convex/values';\n\nimport { createValidatePlugin, ValidateContext, ValidatePlugin } from '../core/plugin';\nimport { UniqueRowConfigData, UniqueRowConfigOptions } from '../core/types';\nimport { constructColumnData, constructIndexData } from '../utils/helpers';\n\n/**\n * Creates a validate plugin that enforces row uniqueness based on database indexes.\n *\n * This plugin checks that the combination of column values defined in your indexes\n * doesn't already exist in the database before allowing insert/patch operations.\n *\n * @param schema - Your Convex schema definition\n * @param config - Object mapping table names to arrays of index configs\n * @returns A ValidatePlugin for use with verifyConfig\n *\n * @example\n * ```ts\n * // Simple shorthand - just index names\n * const uniqueRow = uniqueRowConfig(schema, {\n * posts: ['by_slug'],\n * users: ['by_email', 'by_username'],\n * });\n *\n * // With options\n * const uniqueRow = uniqueRowConfig(schema, {\n * posts: [\n * { index: 'by_author_slug', identifiers: ['_id', 'authorId'] },\n * ],\n * });\n *\n * // Use with verifyConfig\n * const { insert, patch } = verifyConfig(schema, {\n * plugins: [uniqueRow],\n * });\n * ```\n */\nexport const uniqueRowConfig = <\n\tS extends SchemaDefinition<GenericSchema, boolean>,\n\tDataModel extends DataModelFromSchemaDefinition<S>,\n\tconst C extends UniqueRowConfigData<DataModel>,\n>(\n\tschema: S,\n\tconfig: C\n): ValidatePlugin<'uniqueRow', C> => {\n\tconst uniqueRowError = (message: string): never => {\n\t\tthrow new ConvexError({\n\t\t\tmessage,\n\t\t\tcode: 'UNIQUE_ROW_VERIFICATION_ERROR',\n\t\t});\n\t};\n\n\t/**\n\t * Core verification logic shared between insert and patch\n\t */\n\tconst verifyUniqueness = async <TN extends TableNamesInDataModel<DataModel>>(\n\t\tcontext: ValidateContext<string>,\n\t\tdata: Record<string, any>,\n\t\ttableName: TN\n\t): Promise<Record<string, any>> => {\n\t\tconst { ctx, operation, patchId, onFail } = context;\n\n\t\tconst indexesData = constructIndexData(schema, tableName, config);\n\n\t\tif (!indexesData && !!config[tableName]) {\n\t\t\tuniqueRowError(`Index data was not found where there should have been.`);\n\t\t}\n\n\t\t// No indexes provided for this table\n\t\tif (!indexesData) {\n\t\t\treturn data;\n\t\t}\n\n\t\tfor (const indexInfo of indexesData) {\n\t\t\tconst { name, fields, identifiers, ...rest } = indexInfo;\n\t\t\tconst _options = rest as UniqueRowConfigOptions;\n\n\t\t\tif (!fields[0] && !fields[1]) {\n\t\t\t\tuniqueRowError(\n\t\t\t\t\t`Error in 'verifyRowUniqueness()'. There must be two columns to test against. If you are attempting to enforce a unique column, use the 'uniqueColumns' config option.`\n\t\t\t\t);\n\t\t\t}\n\n\t\t\tconst columnData = constructColumnData(fields, data, {});\n\n\t\t\tconst getExisting = async (cd: ReturnType<typeof constructColumnData>) => {\n\t\t\t\ttype D = DocumentByName<DataModel, TN>;\n\t\t\t\tlet existingByIndex: D[] = [];\n\n\t\t\t\tif (!cd) {\n\t\t\t\t\texistingByIndex = [];\n\t\t\t\t} else {\n\t\t\t\t\texistingByIndex = await ctx.db\n\t\t\t\t\t\t.query(tableName)\n\t\t\t\t\t\t.withIndex(name, (q: any) =>\n\t\t\t\t\t\t\tcd.reduce((query: any, { column, value }) => query.eq(column, value), q)\n\t\t\t\t\t\t)\n\t\t\t\t\t\t.collect();\n\t\t\t\t}\n\n\t\t\t\tif (existingByIndex.length > 1) {\n\t\t\t\t\tconsole.warn(\n\t\t\t\t\t\t`There was more than one existing result found for index ${name}. Check the following IDs:`,\n\t\t\t\t\t\texistingByIndex.map((r) => r._id)\n\t\t\t\t\t);\n\t\t\t\t\tconsole.warn(\n\t\t\t\t\t\t`It is recommended that you triage the rows listed above since they have data that go against a rule of row uniqueness.`\n\t\t\t\t\t);\n\t\t\t\t}\n\n\t\t\t\treturn existingByIndex.length > 0 ? existingByIndex[0] : null;\n\t\t\t};\n\n\t\t\tconst existing = await getExisting(columnData);\n\n\t\t\t/**\n\t\t\t * Insert check\n\t\t\t */\n\t\t\tif (operation === 'insert') {\n\t\t\t\tif (!existing) {\n\t\t\t\t\t// All good, verify passes for this index, continue to next\n\t\t\t\t\tcontinue;\n\t\t\t\t}\n\n\t\t\t\t// Found existing - fail\n\t\t\t\tonFail?.({\n\t\t\t\t\tuniqueRow: {\n\t\t\t\t\t\texistingData: existing,\n\t\t\t\t\t},\n\t\t\t\t});\n\t\t\t\tuniqueRowError(\n\t\t\t\t\t`Unable to [${operation}] document. In table [${tableName}], there is an existing row that has the same data combination in the columns: [${fields.join(`, `)}].`\n\t\t\t\t);\n\t\t\t}\n\n\t\t\t/**\n\t\t\t * Patch check\n\t\t\t */\n\t\t\tif (operation === 'patch') {\n\t\t\t\tif (!patchId) {\n\t\t\t\t\tuniqueRowError(`Unable to patch document without an id.`);\n\t\t\t\t}\n\n\t\t\t\ttype D = DocumentByName<DataModel, TN>;\n\n\t\t\t\t/**\n\t\t\t\t * Check if the existing document matches one of the identifiers\n\t\t\t\t * (meaning we're updating the same document, not creating a conflict)\n\t\t\t\t */\n\t\t\t\tconst matchedToExisting = (_existing: D | null, _data: Partial<D>) => {\n\t\t\t\t\tlet idMatchedToExisting: string | null = null;\n\n\t\t\t\t\tif (_existing) {\n\t\t\t\t\t\tfor (const identifier of identifiers) {\n\t\t\t\t\t\t\tif (\n\t\t\t\t\t\t\t\t(_existing[identifier as keyof D] &&\n\t\t\t\t\t\t\t\t\t_data[identifier as keyof D] &&\n\t\t\t\t\t\t\t\t\t_existing[identifier as keyof D] === _data[identifier as keyof D]) ||\n\t\t\t\t\t\t\t\t(identifier === '_id' && _existing[identifier as keyof D] === patchId)\n\t\t\t\t\t\t\t) {\n\t\t\t\t\t\t\t\tidMatchedToExisting = String(identifier);\n\t\t\t\t\t\t\t\tbreak;\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t\treturn idMatchedToExisting;\n\t\t\t\t};\n\n\t\t\t\tconst checkExisting = (_existing: D | null, _data: Partial<D>) => {\n\t\t\t\t\tconst matchedId = matchedToExisting(_existing, _data);\n\n\t\t\t\t\tif (!_existing) {\n\t\t\t\t\t\t// No existing found, no conflict\n\t\t\t\t\t\treturn;\n\t\t\t\t\t}\n\n\t\t\t\t\tif (matchedId) {\n\t\t\t\t\t\t// The existing document is the same one we're patching - OK\n\t\t\t\t\t\treturn;\n\t\t\t\t\t} else {\n\t\t\t\t\t\t// Found a different document with the same unique values - fail\n\t\t\t\t\t\tonFail?.({\n\t\t\t\t\t\t\tuniqueRow: {\n\t\t\t\t\t\t\t\texistingData: _existing,\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t});\n\t\t\t\t\t\tuniqueRowError(\n\t\t\t\t\t\t\t`In '${tableName}' table, there already exists a value match of the columns: [${fields.join(`,`)}].`\n\t\t\t\t\t\t);\n\t\t\t\t\t}\n\t\t\t\t};\n\n\t\t\t\tif (!existing && !columnData && patchId) {\n\t\t\t\t\t// No existing results found because there wasn't complete data provided\n\t\t\t\t\t// to match the provided index. We need to merge with existing document\n\t\t\t\t\t// to check what WOULD be the potential data conflict.\n\t\t\t\t\tconst match = await ctx.db.get(patchId);\n\n\t\t\t\t\tif (!match) {\n\t\t\t\t\t\tuniqueRowError(`No document found for id ${patchId}`);\n\t\t\t\t\t\treturn data; // TypeScript needs this even though we throw\n\t\t\t\t\t}\n\n\t\t\t\t\tconst extensiveColumnData = constructColumnData(\n\t\t\t\t\t\tfields,\n\t\t\t\t\t\t{\n\t\t\t\t\t\t\t...match,\n\t\t\t\t\t\t\t...data,\n\t\t\t\t\t\t},\n\t\t\t\t\t\t{}\n\t\t\t\t\t);\n\n\t\t\t\t\tif (extensiveColumnData) {\n\t\t\t\t\t\tconst extensiveExisting = await getExisting(extensiveColumnData);\n\t\t\t\t\t\tcheckExisting(extensiveExisting as D | null, data as Partial<D>);\n\t\t\t\t\t} else {\n\t\t\t\t\t\tuniqueRowError(`Incomplete data when there should have been enough.`);\n\t\t\t\t\t}\n\t\t\t\t} else {\n\t\t\t\t\tcheckExisting(existing as D | null, data as Partial<D>);\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\n\t\treturn data;\n\t};\n\n\treturn createValidatePlugin('uniqueRow', config, {\n\t\tinsert: async (context, data) => {\n\t\t\treturn verifyUniqueness(context, data, context.tableName as TableNamesInDataModel<DataModel>);\n\t\t},\n\t\tpatch: async (context, data) => {\n\t\t\treturn verifyUniqueness(context, data, context.tableName as TableNamesInDataModel<DataModel>);\n\t\t},\n\t});\n};\n","import {\n\tDataModelFromSchemaDefinition,\n\tDocumentByName,\n\tGenericSchema,\n\tSchemaDefinition,\n\tTableNamesInDataModel,\n} from 'convex/server';\n\nimport {\n\tIndexConfigBaseOptions,\n\tNormalizedIndexConfig,\n\tnormalizeIndexConfigEntry,\n\tUniqueRowConfigData,\n} from '../core/types';\n\n/**\n * Get Table indexes helper\n *\n * Note: this is using an experimental API in convex-js\n * https://github.com/get-convex/convex-js/commit/04c3b44cab54c4d2230cce9312bdff074d54ab04\n */\nexport const getTableIndexes = <\n\tS extends SchemaDefinition<GenericSchema, boolean>,\n\tDataModel extends DataModelFromSchemaDefinition<S>,\n\tTN extends TableNamesInDataModel<DataModel>,\n>(\n\tschema: S,\n\ttableName: TN\n) => {\n\treturn schema.tables[tableName][' indexes']();\n};\n\n/**\n * Generate column data from fields and data object\n */\nexport const constructColumnData = <\n\tS extends SchemaDefinition<GenericSchema, boolean>,\n\tDataModel extends DataModelFromSchemaDefinition<S>,\n\tTN extends TableNamesInDataModel<DataModel>,\n\tD extends Partial<DocumentByName<DataModel, TN>>,\n>(\n\tfields: string[],\n\tdata: D,\n\t{\n\t\tallowNullishValue = false,\n\t\tallOrNothing = true,\n\t}: {\n\t\tallowNullishValue?: boolean;\n\t\tallOrNothing?: boolean;\n\t}\n) => {\n\tconst lengthOfFields = fields.length;\n\n\tconst columnData = fields\n\t\t.map((_, index) => {\n\t\t\tconst column = fields?.[index];\n\t\t\tconst value = data?.[column];\n\n\t\t\tif (!column || (!allowNullishValue && !value)) {\n\t\t\t\treturn;\n\t\t\t}\n\n\t\t\treturn {\n\t\t\t\tcolumn,\n\t\t\t\tvalue,\n\t\t\t};\n\t\t})\n\t\t.filter((e) => !!e);\n\n\tif (allOrNothing && columnData.length !== lengthOfFields) {\n\t\tconsole.warn(\n\t\t\t'The index was NOT supplied with the same amount data as there was fields. This warning only appears when setting `allOrNothing` to `true`.',\n\t\t\t'`fields: `',\n\t\t\tfields,\n\t\t\t'`columnData: `',\n\t\t\tcolumnData\n\t\t);\n\t\treturn null;\n\t}\n\n\treturn columnData.length > 0 ? columnData : null;\n};\n\n/**\n * Construct index data from schema and config.\n * Handles both string shorthand and full object config entries.\n *\n * @returns Array of normalized index configs with resolved field names from schema\n */\nexport const constructIndexData = <\n\tS extends SchemaDefinition<GenericSchema, boolean>,\n\tDataModel extends DataModelFromSchemaDefinition<S>,\n\tTN extends TableNamesInDataModel<DataModel>,\n\tOptions extends IndexConfigBaseOptions = IndexConfigBaseOptions,\n>(\n\tschema: S,\n\ttableName: TN,\n\tindexConfig?: UniqueRowConfigData<DataModel>\n): (NormalizedIndexConfig<Options> & { name: string; fields: string[] })[] | undefined => {\n\tif (!indexConfig) {\n\t\treturn;\n\t}\n\n\tconst tableConfig = indexConfig?.[tableName];\n\tif (!tableConfig) {\n\t\treturn;\n\t}\n\n\treturn tableConfig.map((entry) => {\n\t\t// Normalize the entry (handles both string and object forms)\n\t\tconst normalized = normalizeIndexConfigEntry<Options>(entry as any);\n\t\tconst { index, identifiers, ...rest } = normalized;\n\n\t\tconst fields = getTableIndexes(schema, tableName).find(\n\t\t\t(i) => i.indexDescriptor == index\n\t\t)?.fields;\n\n\t\tif (!fields) {\n\t\t\tthrow new Error(`Error in 'constructIndexData()'. No fields found for index: [${index}]`);\n\t\t}\n\n\t\t// Create a unique map in case there is any overlap in identifiers\n\t\t// Always include '_id' as a fallback identifier\n\t\tconst identifierMap = new Map<string, string>(\n\t\t\t[...identifiers, '_id'].map((i) => [String(i), String(i)])\n\t\t);\n\n\t\treturn {\n\t\t\tname: index,\n\t\t\tfields,\n\t\t\tidentifiers: Array.from(identifierMap.values()),\n\t\t\t...rest,\n\t\t} as NormalizedIndexConfig<Options> & { name: string; fields: string[] };\n\t});\n};\n","import { DataModelFromSchemaDefinition, GenericSchema, SchemaDefinition } from 'convex/server';\nimport { ConvexError } from 'convex/values';\n\nimport { createValidatePlugin, ValidateContext, ValidatePlugin } from '../core/plugin';\nimport {\n\tnormalizeIndexConfigEntry,\n\tUniqueColumnConfigData,\n\tUniqueColumnConfigOptions,\n} from '../core/types';\n\n/**\n * Creates a validate plugin that enforces column uniqueness using single-column indexes.\n *\n * This is useful when you have a column that must be unique across all rows,\n * like usernames or email addresses.\n *\n * The column name is derived from the index name by removing the 'by_' prefix.\n * For example, 'by_username' checks the 'username' column.\n *\n * @param schema - Your Convex schema definition\n * @param config - Object mapping table names to arrays of index configs\n * @returns A ValidatePlugin for use with verifyConfig\n *\n * @example\n * ```ts\n * // Shorthand: just pass index names as strings\n * const uniqueColumn = uniqueColumnConfig(schema, {\n * users: ['by_username', 'by_email'],\n * organizations: ['by_slug'],\n * });\n *\n * // Full config: pass objects with options\n * const uniqueColumn = uniqueColumnConfig(schema, {\n * users: [\n * { index: 'by_username', identifiers: ['_id', 'userId'] },\n * { index: 'by_email', identifiers: ['_id'] },\n * ],\n * });\n *\n * // Mix and match\n * const uniqueColumn = uniqueColumnConfig(schema, {\n * users: [\n * 'by_username', // shorthand\n * { index: 'by_email', identifiers: ['_id', 'clerkId'] }, // full config\n * ],\n * });\n *\n * // Use with verifyConfig\n * const { insert, patch } = verifyConfig(schema, {\n * plugins: [uniqueColumn],\n * });\n * ```\n */\nexport const uniqueColumnConfig = <\n\tS extends SchemaDefinition<GenericSchema, boolean>,\n\tDataModel extends DataModelFromSchemaDefinition<S>,\n\tconst C extends UniqueColumnConfigData<DataModel>,\n>(\n\t_schema: S,\n\tconfig: C\n): ValidatePlugin<'uniqueColumn', C> => {\n\tconst uniqueColumnError = (message: string): never => {\n\t\tthrow new ConvexError({\n\t\t\tmessage,\n\t\t\tcode: 'UNIQUE_COLUMN_VERIFICATION_ERROR',\n\t\t});\n\t};\n\n\t/**\n\t * Core verification logic shared between insert and patch\n\t */\n\tconst verifyUniqueness = async (\n\t\tcontext: ValidateContext<string>,\n\t\tdata: Record<string, any>\n\t): Promise<Record<string, any>> => {\n\t\tconst { ctx, tableName, patchId, onFail } = context;\n\n\t\tconst tableConfig = config[tableName as keyof typeof config] as\n\t\t\t| (string | { index: string; identifiers?: string[] })[]\n\t\t\t| undefined;\n\n\t\t// No config for this table\n\t\tif (!tableConfig) {\n\t\t\treturn data;\n\t\t}\n\n\t\tfor (const entry of tableConfig) {\n\t\t\tconst { index, identifiers } = normalizeIndexConfigEntry<UniqueColumnConfigOptions>(\n\t\t\t\tentry as any\n\t\t\t);\n\n\t\t\t// Extract column name from index name (e.g., 'by_username' -> 'username')\n\t\t\tconst columnName = index.replace('by_', '');\n\t\t\tconst value = data[columnName];\n\n\t\t\t// Skip if the column isn't in the data being inserted/patched\n\t\t\tif (value === undefined || value === null) {\n\t\t\t\tcontinue;\n\t\t\t}\n\n\t\t\t// Query for existing row with this value\n\t\t\tconst existing = await ctx.db\n\t\t\t\t.query(tableName)\n\t\t\t\t.withIndex(index, (q: any) => q.eq(columnName, value))\n\t\t\t\t.unique();\n\n\t\t\tif (!existing) {\n\t\t\t\t// No conflict, continue to next index\n\t\t\t\tcontinue;\n\t\t\t}\n\n\t\t\t// Check if the existing row matches one of the identifiers\n\t\t\t// (meaning we're updating the same document, not creating a conflict)\n\t\t\tlet isOwnDocument = false;\n\n\t\t\tfor (const identifier of identifiers) {\n\t\t\t\t// For patch operations, also check against patchId when identifier is '_id'\n\t\t\t\tif (identifier === '_id' && patchId && existing._id === patchId) {\n\t\t\t\t\tisOwnDocument = true;\n\t\t\t\t\tbreak;\n\t\t\t\t}\n\n\t\t\t\t// Check if both existing and data have the same identifier value\n\t\t\t\tif (existing[identifier] && data[identifier] && existing[identifier] === data[identifier]) {\n\t\t\t\t\tisOwnDocument = true;\n\t\t\t\t\tbreak;\n\t\t\t\t}\n\t\t\t}\n\n\t\t\tif (isOwnDocument) {\n\t\t\t\t// Same document, no conflict\n\t\t\t\tcontinue;\n\t\t\t}\n\n\t\t\t// Different document has this value - fail\n\t\t\tonFail?.({\n\t\t\t\tuniqueColumn: {\n\t\t\t\t\tconflictingColumn: columnName,\n\t\t\t\t\texistingData: existing,\n\t\t\t\t},\n\t\t\t});\n\n\t\t\tuniqueColumnError(\n\t\t\t\t`In [${tableName}] table, there already exists value \"${value}\" in column [${columnName}].`\n\t\t\t);\n\t\t}\n\n\t\treturn data;\n\t};\n\n\treturn createValidatePlugin('uniqueColumn', config, {\n\t\tinsert: async (context, data) => {\n\t\t\treturn verifyUniqueness(context, data);\n\t\t},\n\t\tpatch: async (context, data) => {\n\t\t\treturn verifyUniqueness(context, data);\n\t\t},\n\t});\n};\n"],"mappings":";;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;;ACmHO,SAAS,iBAAiB,KAAqC;AACrE,SACC,OAAO,QAAQ,YACf,QAAQ,QACR,WAAW,OACX,OAAQ,IAAY,UAAU,YAC9B,YAAY,OACZ,OAAQ,IAAY,WAAW;AAEjC;AAoBA,eAAsB,mBACrB,SACA,SACA,MACe;AACf,MAAI,SAAS;AAEb,aAAW,UAAU,SAAS;AAC7B,UAAM,WAAW,QAAQ,cAAc,WAAW,OAAO,OAAO,SAAS,OAAO,OAAO;AAEvF,QAAI,UAAU;AAEb,eAAS,MAAM,SAAS,SAAS,MAAM;AAAA,IACxC;AAAA,EACD;AAEA,SAAO;AACR;AAwBO,SAAS,qBACf,MACA,QACA,QAC+B;AAC/B,SAAO;AAAA,IACN,OAAO;AAAA,IACP;AAAA,IACA;AAAA,EACD;AACD;;;ACxIO,IAAM,eAAe,CAK3B,SACA,YACI;AAEJ,QAAM,kBAAkB,QAAQ,WAAW,CAAC;AAU5C,QAAM,SAAS,OAId,KACA,WACA,MAMA,YAG4B;AAC5B,QAAI,eAAe;AAKnB,QAAI,QAAQ,eAAe;AAC1B,qBAAe,MAAM,QAAQ,cAAc,OAAO,WAAW,YAAY;AAAA,IAC1E;AAKA,QAAI,gBAAgB,SAAS,GAAG;AAC/B,qBAAe,MAAM;AAAA,QACpB;AAAA,QACA;AAAA,UACC;AAAA,UACA;AAAA,UACA,WAAW;AAAA,UACX,QAAQ,SAAS;AAAA,UACjB,QAAQ;AAAA,QACT;AAAA,QACA;AAAA,MACD;AAAA,IACD;AAGA,WAAO,MAAM,IAAI,GAAG,OAAO,WAAW,YAAY;AAAA,EACnD;AAcA,QAAM,QAAQ,OAIb,KACA,WACA,IACA,MAMA,YAGmB;AACnB,QAAI,eAAe;AAKnB,QAAI,gBAAgB,SAAS,GAAG;AAC/B,qBAAe,MAAM;AAAA,QACpB;AAAA,QACA;AAAA,UACC;AAAA,UACA;AAAA,UACA,WAAW;AAAA,UACX,SAAS;AAAA,UACT,QAAQ,SAAS;AAAA,UACjB,QAAQ;AAAA,QACT;AAAA,QACA;AAAA,MACD;AAAA,IACD;AAEA,UAAM,IAAI,GAAG,MAAM,IAAI,YAAY;AAAA,EACpC;AAUA,QAAM,mBAAmB,OAIxB,KACA,WACA,IACA,MACA,YAGmB;AACnB,QAAI,eAAe;AAKnB,QAAI,gBAAgB,SAAS,GAAG;AAC/B,qBAAe,MAAM;AAAA,QACpB;AAAA,QACA;AAAA,UACC;AAAA,UACA;AAAA,UACA,WAAW;AAAA,UACX,SAAS;AAAA,UACT,QAAQ,SAAS;AAAA,UACjB,QAAQ;AAAA,QACT;AAAA,QACA;AAAA,MACD;AAAA,IACD;AAEA,UAAM,IAAI,GAAG,MAAM,IAAI,YAAY;AAAA,EACpC;AAEA,SAAO;AAAA,IACN;AAAA,IACA;AAAA,IACA;AAAA;AAAA,IAEA;AAAA,EACD;AACD;;;AC7GO,SAAS,0BAGf,OACA,qBAA+B,CAAC,KAAK,GACJ;AACjC,MAAI,OAAO,UAAU,UAAU;AAC9B,WAAO;AAAA,MACN,OAAO;AAAA,MACP,aAAa;AAAA,IACd;AAAA,EACD;AAEA,QAAM,EAAE,OAAO,aAAa,GAAG,KAAK,IAAI;AACxC,SAAO;AAAA,IACN,OAAO,OAAO,KAAK;AAAA,IACnB,aAAa,aAAa,IAAI,MAAM,KAAK;AAAA,IACzC,GAAG;AAAA,EACJ;AACD;;;ACjGO,IAAM,sBAAsB,CAKlC,SACA,WACI;AAKJ,QAAM,SAAS,OACd,WACA,SACiE;AAEjE,UAAM,iBAAiB,OAAO,WAAW,aAAa,MAAM,OAAO,IAAI;AAEvE,WAAO;AAAA,MACN,GAAI,eAAe,SAAS;AAAA,MAC5B,GAAI;AAAA,IACL;AAAA,EACD;AAEA,SAAO;AAAA,IACN,OAAO;AAAA,IACP;AAAA,IACA;AAAA,EACD;AACD;;;ACjBO,IAAM,yBAAyB,CAKrC,SACA,WACI;AACJ,SAAO;AAAA,IACN,OAAO;AAAA,IACP;AAAA,EACD;AACD;;;ACzDA,oBAA4B;;;ACcrB,IAAM,kBAAkB,CAK9B,QACA,cACI;AACJ,SAAO,OAAO,OAAO,SAAS,EAAE,UAAU,EAAE;AAC7C;AAKO,IAAM,sBAAsB,CAMlC,QACA,MACA;AAAA,EACC,oBAAoB;AAAA,EACpB,eAAe;AAChB,MAII;AACJ,QAAM,iBAAiB,OAAO;AAE9B,QAAM,aAAa,OACjB,IAAI,CAAC,GAAG,UAAU;AAClB,UAAM,SAAS,SAAS,KAAK;AAC7B,UAAM,QAAQ,OAAO,MAAM;AAE3B,QAAI,CAAC,UAAW,CAAC,qBAAqB,CAAC,OAAQ;AAC9C;AAAA,IACD;AAEA,WAAO;AAAA,MACN;AAAA,MACA;AAAA,IACD;AAAA,EACD,CAAC,EACA,OAAO,CAAC,MAAM,CAAC,CAAC,CAAC;AAEnB,MAAI,gBAAgB,WAAW,WAAW,gBAAgB;AACzD,YAAQ;AAAA,MACP;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACD;AACA,WAAO;AAAA,EACR;AAEA,SAAO,WAAW,SAAS,IAAI,aAAa;AAC7C;AAQO,IAAM,qBAAqB,CAMjC,QACA,WACA,gBACyF;AACzF,MAAI,CAAC,aAAa;AACjB;AAAA,EACD;AAEA,QAAM,cAAc,cAAc,SAAS;AAC3C,MAAI,CAAC,aAAa;AACjB;AAAA,EACD;AAEA,SAAO,YAAY,IAAI,CAAC,UAAU;AAEjC,UAAM,aAAa,0BAAmC,KAAY;AAClE,UAAM,EAAE,OAAO,aAAa,GAAG,KAAK,IAAI;AAExC,UAAM,SAAS,gBAAgB,QAAQ,SAAS,EAAE;AAAA,MACjD,CAAC,MAAM,EAAE,mBAAmB;AAAA,IAC7B,GAAG;AAEH,QAAI,CAAC,QAAQ;AACZ,YAAM,IAAI,MAAM,gEAAgE,KAAK,GAAG;AAAA,IACzF;AAIA,UAAM,gBAAgB,IAAI;AAAA,MACzB,CAAC,GAAG,aAAa,KAAK,EAAE,IAAI,CAAC,MAAM,CAAC,OAAO,CAAC,GAAG,OAAO,CAAC,CAAC,CAAC;AAAA,IAC1D;AAEA,WAAO;AAAA,MACN,MAAM;AAAA,MACN;AAAA,MACA,aAAa,MAAM,KAAK,cAAc,OAAO,CAAC;AAAA,MAC9C,GAAG;AAAA,IACJ;AAAA,EACD,CAAC;AACF;;;AD1FO,IAAM,kBAAkB,CAK9B,QACA,WACoC;AACpC,QAAM,iBAAiB,CAAC,YAA2B;AAClD,UAAM,IAAI,0BAAY;AAAA,MACrB;AAAA,MACA,MAAM;AAAA,IACP,CAAC;AAAA,EACF;AAKA,QAAM,mBAAmB,OACxB,SACA,MACA,cACkC;AAClC,UAAM,EAAE,KAAK,WAAW,SAAS,OAAO,IAAI;AAE5C,UAAM,cAAc,mBAAmB,QAAQ,WAAW,MAAM;AAEhE,QAAI,CAAC,eAAe,CAAC,CAAC,OAAO,SAAS,GAAG;AACxC,qBAAe,wDAAwD;AAAA,IACxE;AAGA,QAAI,CAAC,aAAa;AACjB,aAAO;AAAA,IACR;AAEA,eAAW,aAAa,aAAa;AACpC,YAAM,EAAE,MAAM,QAAQ,aAAa,GAAG,KAAK,IAAI;AAC/C,YAAM,WAAW;AAEjB,UAAI,CAAC,OAAO,CAAC,KAAK,CAAC,OAAO,CAAC,GAAG;AAC7B;AAAA,UACC;AAAA,QACD;AAAA,MACD;AAEA,YAAM,aAAa,oBAAoB,QAAQ,MAAM,CAAC,CAAC;AAEvD,YAAM,cAAc,OAAO,OAA+C;AAEzE,YAAI,kBAAuB,CAAC;AAE5B,YAAI,CAAC,IAAI;AACR,4BAAkB,CAAC;AAAA,QACpB,OAAO;AACN,4BAAkB,MAAM,IAAI,GAC1B,MAAM,SAAS,EACf;AAAA,YAAU;AAAA,YAAM,CAAC,MACjB,GAAG,OAAO,CAAC,OAAY,EAAE,QAAQ,MAAM,MAAM,MAAM,GAAG,QAAQ,KAAK,GAAG,CAAC;AAAA,UACxE,EACC,QAAQ;AAAA,QACX;AAEA,YAAI,gBAAgB,SAAS,GAAG;AAC/B,kBAAQ;AAAA,YACP,2DAA2D,IAAI;AAAA,YAC/D,gBAAgB,IAAI,CAAC,MAAM,EAAE,GAAG;AAAA,UACjC;AACA,kBAAQ;AAAA,YACP;AAAA,UACD;AAAA,QACD;AAEA,eAAO,gBAAgB,SAAS,IAAI,gBAAgB,CAAC,IAAI;AAAA,MAC1D;AAEA,YAAM,WAAW,MAAM,YAAY,UAAU;AAK7C,UAAI,cAAc,UAAU;AAC3B,YAAI,CAAC,UAAU;AAEd;AAAA,QACD;AAGA,iBAAS;AAAA,UACR,WAAW;AAAA,YACV,cAAc;AAAA,UACf;AAAA,QACD,CAAC;AACD;AAAA,UACC,cAAc,SAAS,yBAAyB,SAAS,mFAAmF,OAAO,KAAK,IAAI,CAAC;AAAA,QAC9J;AAAA,MACD;AAKA,UAAI,cAAc,SAAS;AAC1B,YAAI,CAAC,SAAS;AACb,yBAAe,yCAAyC;AAAA,QACzD;AAQA,cAAM,oBAAoB,CAAC,WAAqB,UAAsB;AACrE,cAAI,sBAAqC;AAEzC,cAAI,WAAW;AACd,uBAAW,cAAc,aAAa;AACrC,kBACE,UAAU,UAAqB,KAC/B,MAAM,UAAqB,KAC3B,UAAU,UAAqB,MAAM,MAAM,UAAqB,KAChE,eAAe,SAAS,UAAU,UAAqB,MAAM,SAC7D;AACD,sCAAsB,OAAO,UAAU;AACvC;AAAA,cACD;AAAA,YACD;AAAA,UACD;AACA,iBAAO;AAAA,QACR;AAEA,cAAM,gBAAgB,CAAC,WAAqB,UAAsB;AACjE,gBAAM,YAAY,kBAAkB,WAAW,KAAK;AAEpD,cAAI,CAAC,WAAW;AAEf;AAAA,UACD;AAEA,cAAI,WAAW;AAEd;AAAA,UACD,OAAO;AAEN,qBAAS;AAAA,cACR,WAAW;AAAA,gBACV,cAAc;AAAA,cACf;AAAA,YACD,CAAC;AACD;AAAA,cACC,OAAO,SAAS,gEAAgE,OAAO,KAAK,GAAG,CAAC;AAAA,YACjG;AAAA,UACD;AAAA,QACD;AAEA,YAAI,CAAC,YAAY,CAAC,cAAc,SAAS;AAIxC,gBAAM,QAAQ,MAAM,IAAI,GAAG,IAAI,OAAO;AAEtC,cAAI,CAAC,OAAO;AACX,2BAAe,4BAA4B,OAAO,EAAE;AACpD,mBAAO;AAAA,UACR;AAEA,gBAAM,sBAAsB;AAAA,YAC3B;AAAA,YACA;AAAA,cACC,GAAG;AAAA,cACH,GAAG;AAAA,YACJ;AAAA,YACA,CAAC;AAAA,UACF;AAEA,cAAI,qBAAqB;AACxB,kBAAM,oBAAoB,MAAM,YAAY,mBAAmB;AAC/D,0BAAc,mBAA+B,IAAkB;AAAA,UAChE,OAAO;AACN,2BAAe,qDAAqD;AAAA,UACrE;AAAA,QACD,OAAO;AACN,wBAAc,UAAsB,IAAkB;AAAA,QACvD;AAAA,MACD;AAAA,IACD;AAEA,WAAO;AAAA,EACR;AAEA,SAAO,qBAAqB,aAAa,QAAQ;AAAA,IAChD,QAAQ,OAAO,SAAS,SAAS;AAChC,aAAO,iBAAiB,SAAS,MAAM,QAAQ,SAA6C;AAAA,IAC7F;AAAA,IACA,OAAO,OAAO,SAAS,SAAS;AAC/B,aAAO,iBAAiB,SAAS,MAAM,QAAQ,SAA6C;AAAA,IAC7F;AAAA,EACD,CAAC;AACF;;;AEjPA,IAAAA,iBAA4B;AAoDrB,IAAM,qBAAqB,CAKjC,SACA,WACuC;AACvC,QAAM,oBAAoB,CAAC,YAA2B;AACrD,UAAM,IAAI,2BAAY;AAAA,MACrB;AAAA,MACA,MAAM;AAAA,IACP,CAAC;AAAA,EACF;AAKA,QAAM,mBAAmB,OACxB,SACA,SACkC;AAClC,UAAM,EAAE,KAAK,WAAW,SAAS,OAAO,IAAI;AAE5C,UAAM,cAAc,OAAO,SAAgC;AAK3D,QAAI,CAAC,aAAa;AACjB,aAAO;AAAA,IACR;AAEA,eAAW,SAAS,aAAa;AAChC,YAAM,EAAE,OAAO,YAAY,IAAI;AAAA,QAC9B;AAAA,MACD;AAGA,YAAM,aAAa,MAAM,QAAQ,OAAO,EAAE;AAC1C,YAAM,QAAQ,KAAK,UAAU;AAG7B,UAAI,UAAU,UAAa,UAAU,MAAM;AAC1C;AAAA,MACD;AAGA,YAAM,WAAW,MAAM,IAAI,GACzB,MAAM,SAAS,EACf,UAAU,OAAO,CAAC,MAAW,EAAE,GAAG,YAAY,KAAK,CAAC,EACpD,OAAO;AAET,UAAI,CAAC,UAAU;AAEd;AAAA,MACD;AAIA,UAAI,gBAAgB;AAEpB,iBAAW,cAAc,aAAa;AAErC,YAAI,eAAe,SAAS,WAAW,SAAS,QAAQ,SAAS;AAChE,0BAAgB;AAChB;AAAA,QACD;AAGA,YAAI,SAAS,UAAU,KAAK,KAAK,UAAU,KAAK,SAAS,UAAU,MAAM,KAAK,UAAU,GAAG;AAC1F,0BAAgB;AAChB;AAAA,QACD;AAAA,MACD;AAEA,UAAI,eAAe;AAElB;AAAA,MACD;AAGA,eAAS;AAAA,QACR,cAAc;AAAA,UACb,mBAAmB;AAAA,UACnB,cAAc;AAAA,QACf;AAAA,MACD,CAAC;AAED;AAAA,QACC,OAAO,SAAS,wCAAwC,KAAK,gBAAgB,UAAU;AAAA,MACxF;AAAA,IACD;AAEA,WAAO;AAAA,EACR;AAEA,SAAO,qBAAqB,gBAAgB,QAAQ;AAAA,IACnD,QAAQ,OAAO,SAAS,SAAS;AAChC,aAAO,iBAAiB,SAAS,IAAI;AAAA,IACtC;AAAA,IACA,OAAO,OAAO,SAAS,SAAS;AAC/B,aAAO,iBAAiB,SAAS,IAAI;AAAA,IACtC;AAAA,EACD,CAAC;AACF;","names":["import_values"]}
|