convex-ents 0.18.1 → 0.19.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/deletion.js +2 -6
- package/dist/deletion.js.map +1 -1
- package/dist/functions.js +50 -16
- package/dist/functions.js.map +1 -1
- package/dist/index.js +52 -22
- package/dist/index.js.map +1 -1
- package/dist/shared.d.ts +9 -5
- package/dist/shared.js +17 -2
- package/dist/shared.js.map +1 -1
- package/dist/writer.d.ts +4 -5
- package/dist/writer.js +26 -14
- package/dist/writer.js.map +1 -1
- package/package.json +6 -4
package/dist/deletion.js
CHANGED
|
@@ -69,11 +69,7 @@ function scheduledDeleteFactory(entDefinitions, options) {
|
|
|
69
69
|
inProgress: import_values.v.boolean()
|
|
70
70
|
},
|
|
71
71
|
handler: async (ctx, { origin, stack, inProgress }) => {
|
|
72
|
-
const
|
|
73
|
-
if (originId === null) {
|
|
74
|
-
throw new Error(`Invalid ID "${origin.id}" for table ${origin.table}`);
|
|
75
|
-
}
|
|
76
|
-
const doc = await ctx.db.get(originId);
|
|
72
|
+
const doc = await ctx.db.get(origin.table, origin.id);
|
|
77
73
|
if (doc.deletionTime !== origin.deletionTime) {
|
|
78
74
|
if (inProgress) {
|
|
79
75
|
console.error(
|
|
@@ -91,7 +87,7 @@ function scheduledDeleteFactory(entDefinitions, options) {
|
|
|
91
87
|
newCounter(),
|
|
92
88
|
inProgress ? stack : [
|
|
93
89
|
{
|
|
94
|
-
id:
|
|
90
|
+
id: origin.id,
|
|
95
91
|
table: origin.table,
|
|
96
92
|
edges: getEdgeArgs(entDefinitions, origin.table)
|
|
97
93
|
}
|
package/dist/deletion.js.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../src/deletion.ts","../src/shared.ts"],"sourcesContent":["import {\n FunctionReference,\n GenericMutationCtx,\n IndexRangeBuilder,\n RegisteredMutation,\n internalMutationGeneric as internalMutation,\n makeFunctionReference,\n} from \"convex/server\";\nimport { GenericId, Infer, convexToJson, v } from \"convex/values\";\nimport { GenericEntsDataModel } from \"./schema\";\nimport { getEdgeDefinitions } from \"./shared\";\n\nexport type ScheduledDeleteFuncRef = FunctionReference<\n \"mutation\",\n \"internal\",\n {\n origin: Origin;\n stack: Stack;\n inProgress: boolean;\n },\n void\n>;\n\ntype Origin = {\n id: string;\n table: string;\n deletionTime: number;\n};\n\nconst vApproach = v.union(v.literal(\"cascade\"), v.literal(\"paginate\"));\n\ntype Approach = Infer<typeof vApproach>;\n\nexport function scheduledDeleteFactory<\n EntsDataModel extends GenericEntsDataModel,\n>(\n entDefinitions: EntsDataModel,\n options?: {\n scheduledDelete: ScheduledDeleteFuncRef;\n },\n): RegisteredMutation<\n \"internal\",\n { origin: Origin; stack: Stack; inProgress: boolean },\n Promise<void>\n> {\n const selfRef =\n options?.scheduledDelete ??\n (makeFunctionReference(\n \"functions:scheduledDelete\",\n ) as unknown as ScheduledDeleteFuncRef);\n return internalMutation({\n args: {\n origin: v.object({\n id: v.string(),\n table: v.string(),\n deletionTime: v.number(),\n }),\n stack: v.array(\n v.union(\n v.object({\n id: v.string(),\n table: v.string(),\n edges: v.array(\n v.object({\n approach: vApproach,\n table: v.string(),\n indexName: v.string(),\n }),\n ),\n }),\n v.object({\n approach: vApproach,\n cursor: v.union(v.string(), v.null()),\n table: v.string(),\n indexName: v.string(),\n fieldValue: v.any(),\n }),\n ),\n ),\n inProgress: v.boolean(),\n },\n handler: async (ctx, { origin, stack, inProgress }) => {\n const originId = ctx.db.normalizeId(origin.table, origin.id);\n if (originId === null) {\n throw new Error(`Invalid ID \"${origin.id}\" for table ${origin.table}`);\n }\n // Check that we still want to delete\n // Note: Doesn't support scheduled deletion starting with system table\n const doc = await ctx.db.get(originId);\n if (doc.deletionTime !== origin.deletionTime) {\n if (inProgress) {\n console.error(\n `[Ents] Already in-progress scheduled deletion for \"${origin.id}\" was canceled!`,\n );\n } else {\n console.log(\n `[Ents] Scheduled deletion for \"${origin.id}\" was canceled`,\n );\n }\n return;\n }\n await progressScheduledDeletion(\n { ctx, entDefinitions, selfRef, origin },\n newCounter(),\n inProgress\n ? stack\n : [\n {\n id: originId,\n table: origin.table,\n edges: getEdgeArgs(entDefinitions, origin.table),\n },\n ],\n );\n },\n });\n}\n\n// Heuristic:\n// Ent at the end of an edge\n// has soft or scheduled deletion behavior && has cascading edges: schedule individually\n// has cascading edges: paginate by 1\n// else: paginate by decent number\nfunction getEdgeArgs(entDefinitions: GenericEntsDataModel, table: string) {\n const edges = getEdgeDefinitions(entDefinitions, table);\n return Object.values(edges).flatMap((edgeDefinition) => {\n if (\n (edgeDefinition.cardinality === \"single\" &&\n edgeDefinition.type === \"ref\") ||\n (edgeDefinition.cardinality === \"multiple\" &&\n edgeDefinition.type === \"field\")\n ) {\n const table = edgeDefinition.to;\n const targetEdges = getEdgeDefinitions(entDefinitions, table);\n const hasCascadingEdges = Object.values(targetEdges).some(\n (edgeDefinition) =>\n (edgeDefinition.cardinality === \"single\" &&\n edgeDefinition.type === \"ref\") ||\n edgeDefinition.cardinality === \"multiple\",\n );\n const approach = hasCascadingEdges ? \"cascade\" : \"paginate\";\n\n const indexName = edgeDefinition.ref;\n return [{ table, indexName, approach } as const];\n } else if (edgeDefinition.cardinality === \"multiple\") {\n const table = edgeDefinition.table;\n return [\n {\n table,\n indexName: edgeDefinition.field,\n approach: \"paginate\",\n } as const,\n ...(edgeDefinition.symmetric\n ? [\n {\n table,\n indexName: edgeDefinition.ref,\n approach: \"paginate\",\n } as const,\n ]\n : []),\n ];\n } else {\n return [];\n }\n });\n}\n\ntype PaginationArgs = {\n approach: Approach;\n table: string;\n cursor: string | null;\n indexName: string;\n fieldValue: any;\n};\n\ntype EdgeArgs = {\n approach: Approach;\n table: string;\n indexName: string;\n};\n\ntype Stack = (\n | { id: string; table: string; edges: EdgeArgs[] }\n | PaginationArgs\n)[];\n\ntype CascadeCtx = {\n ctx: GenericMutationCtx<any>;\n entDefinitions: GenericEntsDataModel;\n selfRef: ScheduledDeleteFuncRef;\n origin: Origin;\n};\n\nasync function progressScheduledDeletion(\n cascade: CascadeCtx,\n counter: Counter,\n stack: Stack,\n) {\n const { ctx } = cascade;\n const last = stack[stack.length - 1];\n\n if (\"id\" in last) {\n const edgeArgs = last.edges[0];\n if (edgeArgs === undefined) {\n await ctx.db.delete(last.table, last.id as GenericId<any>);\n if (stack.length > 1) {\n await continueOrSchedule(cascade, counter, stack.slice(0, -1));\n }\n } else {\n const updated = { ...last, edges: last.edges.slice(1) };\n await paginateOrCascade(\n cascade,\n counter,\n stack.slice(0, -1).concat(updated),\n {\n cursor: null,\n fieldValue: last.id,\n ...edgeArgs,\n },\n );\n }\n } else {\n await paginateOrCascade(cascade, counter, stack, last);\n }\n}\n\nconst MAXIMUM_DOCUMENTS_READ = 8192 / 4;\nconst MAXIMUM_BYTES_READ = 2 ** 18;\n\nasync function paginateOrCascade(\n cascade: CascadeCtx,\n counter: Counter,\n stack: Stack,\n { table, approach, indexName, fieldValue, cursor }: PaginationArgs,\n) {\n const { ctx, entDefinitions } = cascade;\n const { page, continueCursor, isDone, bytesRead } = await paginate(\n ctx,\n { table, indexName, fieldValue },\n {\n cursor,\n ...limitsBasedOnCounter(\n counter,\n approach === \"paginate\"\n ? { numItems: MAXIMUM_DOCUMENTS_READ }\n : { numItems: 1 },\n ),\n },\n );\n\n const updatedCounter = incrementCounter(counter, page.length, bytesRead);\n const updated = {\n approach,\n table,\n cursor: continueCursor,\n indexName,\n fieldValue,\n };\n const relevantStack = cursor === null ? stack : stack.slice(0, -1);\n const updatedStack =\n isDone && (approach === \"paginate\" || page.length === 0)\n ? relevantStack\n : relevantStack.concat(\n approach === \"cascade\"\n ? [\n updated,\n {\n id: page[0]._id,\n table,\n edges: getEdgeArgs(entDefinitions, table),\n },\n ]\n : [updated],\n );\n if (approach === \"paginate\") {\n await Promise.all(page.map((doc) => ctx.db.delete(table, doc._id)));\n }\n await continueOrSchedule(cascade, updatedCounter, updatedStack);\n}\n\nasync function continueOrSchedule(\n cascade: CascadeCtx,\n counter: Counter,\n stack: Stack,\n) {\n if (shouldSchedule(counter)) {\n const { ctx, selfRef, origin } = cascade;\n await ctx.scheduler.runAfter(0, selfRef, {\n origin,\n stack,\n inProgress: true,\n });\n } else {\n await progressScheduledDeletion(cascade, counter, stack);\n }\n}\n\ntype Counter = {\n numDocuments: number;\n numBytesRead: number;\n};\n\nfunction newCounter() {\n return {\n numDocuments: 0,\n numBytesRead: 0,\n };\n}\n\nfunction incrementCounter(\n counter: Counter,\n numDocuments: number,\n numBytesRead: number,\n) {\n return {\n numDocuments: counter.numDocuments + numDocuments,\n numBytesRead: counter.numBytesRead + numBytesRead,\n };\n}\n\nfunction limitsBasedOnCounter(\n counter: Counter,\n { numItems }: { numItems: number },\n) {\n return {\n numItems: Math.max(1, numItems - counter.numDocuments),\n maximumBytesRead: Math.max(1, MAXIMUM_BYTES_READ - counter.numBytesRead),\n };\n}\n\nfunction shouldSchedule(counter: Counter) {\n return (\n counter.numDocuments >= MAXIMUM_DOCUMENTS_READ ||\n counter.numBytesRead >= MAXIMUM_BYTES_READ\n );\n}\n\nasync function paginate(\n ctx: GenericMutationCtx<any>,\n {\n table,\n indexName,\n fieldValue,\n }: { table: string; indexName: string; fieldValue: any },\n {\n cursor,\n numItems,\n maximumBytesRead,\n }: {\n cursor: string | null;\n numItems: number;\n maximumBytesRead: number;\n },\n) {\n const query = ctx.db\n .query(table)\n .withIndex(indexName, (q) =>\n (q.eq(indexName, fieldValue) as IndexRangeBuilder<any, any, any>).gt(\n \"_creationTime\",\n cursor === null ? cursor : +cursor,\n ),\n );\n\n let bytesRead = 0;\n const results = [];\n let isDone = true;\n\n for await (const doc of query) {\n if (results.length >= numItems) {\n isDone = false;\n break;\n }\n const size = JSON.stringify(convexToJson(doc)).length * 8;\n\n results.push(doc);\n bytesRead += size;\n\n // Check this after we read the doc, since reading it already\n // happened anyway, and to make sure we return at least one\n // result.\n if (bytesRead > maximumBytesRead) {\n isDone = false;\n break;\n }\n }\n return {\n page: results,\n continueCursor:\n results.length === 0\n ? cursor\n : \"\" + results[results.length - 1]._creationTime,\n isDone,\n bytesRead,\n };\n}\n","import {\n DocumentByName,\n FieldTypeFromFieldPath,\n SystemDataModel,\n TableNamesInDataModel,\n} from \"convex/server\";\nimport { EdgeConfig, GenericEdgeConfig, GenericEntsDataModel } from \"./schema\";\n\nexport type EntsSystemDataModel = {\n [key in keyof SystemDataModel]: SystemDataModel[key] & {\n edges: Record<string, never>;\n };\n};\n\nexport type PromiseEdgeResult<\n EdgeConfig extends GenericEdgeConfig,\n MultipleRef,\n MultipleField,\n SingleOptional,\n Single,\n> = EdgeConfig[\"cardinality\"] extends \"multiple\"\n ? EdgeConfig[\"type\"] extends \"ref\"\n ? MultipleRef\n : MultipleField\n : EdgeConfig[\"type\"] extends \"ref\"\n ? SingleOptional\n : EdgeConfig[\"optional\"] extends true\n ? SingleOptional\n : Single;\n\nexport type IndexFieldTypesForEq<\n EntsDataModel extends GenericEntsDataModel,\n Table extends TableNamesInDataModel<EntsDataModel>,\n T extends string[],\n> = Pop<{\n [K in keyof T]: FieldTypeFromFieldPath<\n DocumentByName<EntsDataModel, Table>,\n T[K]\n >;\n}>;\n\ntype Pop<T extends any[]> = T extends [...infer Rest, infer _Last]\n ? Rest\n : never;\n\nexport function getEdgeDefinitions<\n EntsDataModel extends GenericEntsDataModel,\n Table extends TableNamesInDataModel<EntsDataModel>,\n>(entDefinitions: EntsDataModel, table: Table) {\n return entDefinitions[table].edges as Record<\n keyof EntsDataModel[Table][\"edges\"],\n EdgeConfig\n >;\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,oBAOO;AACP,oBAAkD;;;ACqC3C,SAAS,mBAGd,gBAA+B,OAAc;AAC7C,SAAO,eAAe,KAAK,EAAE;AAI/B;;;ADxBA,IAAM,YAAY,gBAAE,MAAM,gBAAE,QAAQ,SAAS,GAAG,gBAAE,QAAQ,UAAU,CAAC;AAI9D,SAAS,uBAGd,gBACA,SAOA;AACA,QAAM,UACJ,SAAS,uBACR;AAAA,IACC;AAAA,EACF;AACF,aAAO,cAAAA,yBAAiB;AAAA,IACtB,MAAM;AAAA,MACJ,QAAQ,gBAAE,OAAO;AAAA,QACf,IAAI,gBAAE,OAAO;AAAA,QACb,OAAO,gBAAE,OAAO;AAAA,QAChB,cAAc,gBAAE,OAAO;AAAA,MACzB,CAAC;AAAA,MACD,OAAO,gBAAE;AAAA,QACP,gBAAE;AAAA,UACA,gBAAE,OAAO;AAAA,YACP,IAAI,gBAAE,OAAO;AAAA,YACb,OAAO,gBAAE,OAAO;AAAA,YAChB,OAAO,gBAAE;AAAA,cACP,gBAAE,OAAO;AAAA,gBACP,UAAU;AAAA,gBACV,OAAO,gBAAE,OAAO;AAAA,gBAChB,WAAW,gBAAE,OAAO;AAAA,cACtB,CAAC;AAAA,YACH;AAAA,UACF,CAAC;AAAA,UACD,gBAAE,OAAO;AAAA,YACP,UAAU;AAAA,YACV,QAAQ,gBAAE,MAAM,gBAAE,OAAO,GAAG,gBAAE,KAAK,CAAC;AAAA,YACpC,OAAO,gBAAE,OAAO;AAAA,YAChB,WAAW,gBAAE,OAAO;AAAA,YACpB,YAAY,gBAAE,IAAI;AAAA,UACpB,CAAC;AAAA,QACH;AAAA,MACF;AAAA,MACA,YAAY,gBAAE,QAAQ;AAAA,IACxB;AAAA,IACA,SAAS,OAAO,KAAK,EAAE,QAAQ,OAAO,WAAW,MAAM;AACrD,YAAM,WAAW,IAAI,GAAG,YAAY,OAAO,OAAO,OAAO,EAAE;AAC3D,UAAI,aAAa,MAAM;AACrB,cAAM,IAAI,MAAM,eAAe,OAAO,EAAE,eAAe,OAAO,KAAK,EAAE;AAAA,MACvE;AAGA,YAAM,MAAM,MAAM,IAAI,GAAG,IAAI,QAAQ;AACrC,UAAI,IAAI,iBAAiB,OAAO,cAAc;AAC5C,YAAI,YAAY;AACd,kBAAQ;AAAA,YACN,sDAAsD,OAAO,EAAE;AAAA,UACjE;AAAA,QACF,OAAO;AACL,kBAAQ;AAAA,YACN,kCAAkC,OAAO,EAAE;AAAA,UAC7C;AAAA,QACF;AACA;AAAA,MACF;AACA,YAAM;AAAA,QACJ,EAAE,KAAK,gBAAgB,SAAS,OAAO;AAAA,QACvC,WAAW;AAAA,QACX,aACI,QACA;AAAA,UACE;AAAA,YACE,IAAI;AAAA,YACJ,OAAO,OAAO;AAAA,YACd,OAAO,YAAY,gBAAgB,OAAO,KAAK;AAAA,UACjD;AAAA,QACF;AAAA,MACN;AAAA,IACF;AAAA,EACF,CAAC;AACH;AAOA,SAAS,YAAY,gBAAsC,OAAe;AACxE,QAAM,QAAQ,mBAAmB,gBAAgB,KAAK;AACtD,SAAO,OAAO,OAAO,KAAK,EAAE,QAAQ,CAAC,mBAAmB;AACtD,QACG,eAAe,gBAAgB,YAC9B,eAAe,SAAS,SACzB,eAAe,gBAAgB,cAC9B,eAAe,SAAS,SAC1B;AACA,YAAMC,SAAQ,eAAe;AAC7B,YAAM,cAAc,mBAAmB,gBAAgBA,MAAK;AAC5D,YAAM,oBAAoB,OAAO,OAAO,WAAW,EAAE;AAAA,QACnD,CAACC,oBACEA,gBAAe,gBAAgB,YAC9BA,gBAAe,SAAS,SAC1BA,gBAAe,gBAAgB;AAAA,MACnC;AACA,YAAM,WAAW,oBAAoB,YAAY;AAEjD,YAAM,YAAY,eAAe;AACjC,aAAO,CAAC,EAAE,OAAAD,QAAO,WAAW,SAAS,CAAU;AAAA,IACjD,WAAW,eAAe,gBAAgB,YAAY;AACpD,YAAMA,SAAQ,eAAe;AAC7B,aAAO;AAAA,QACL;AAAA,UACE,OAAAA;AAAA,UACA,WAAW,eAAe;AAAA,UAC1B,UAAU;AAAA,QACZ;AAAA,QACA,GAAI,eAAe,YACf;AAAA,UACE;AAAA,YACE,OAAAA;AAAA,YACA,WAAW,eAAe;AAAA,YAC1B,UAAU;AAAA,UACZ;AAAA,QACF,IACA,CAAC;AAAA,MACP;AAAA,IACF,OAAO;AACL,aAAO,CAAC;AAAA,IACV;AAAA,EACF,CAAC;AACH;AA4BA,eAAe,0BACb,SACA,SACA,OACA;AACA,QAAM,EAAE,IAAI,IAAI;AAChB,QAAM,OAAO,MAAM,MAAM,SAAS,CAAC;AAEnC,MAAI,QAAQ,MAAM;AAChB,UAAM,WAAW,KAAK,MAAM,CAAC;AAC7B,QAAI,aAAa,QAAW;AAC1B,YAAM,IAAI,GAAG,OAAO,KAAK,OAAO,KAAK,EAAoB;AACzD,UAAI,MAAM,SAAS,GAAG;AACpB,cAAM,mBAAmB,SAAS,SAAS,MAAM,MAAM,GAAG,EAAE,CAAC;AAAA,MAC/D;AAAA,IACF,OAAO;AACL,YAAM,UAAU,EAAE,GAAG,MAAM,OAAO,KAAK,MAAM,MAAM,CAAC,EAAE;AACtD,YAAM;AAAA,QACJ;AAAA,QACA;AAAA,QACA,MAAM,MAAM,GAAG,EAAE,EAAE,OAAO,OAAO;AAAA,QACjC;AAAA,UACE,QAAQ;AAAA,UACR,YAAY,KAAK;AAAA,UACjB,GAAG;AAAA,QACL;AAAA,MACF;AAAA,IACF;AAAA,EACF,OAAO;AACL,UAAM,kBAAkB,SAAS,SAAS,OAAO,IAAI;AAAA,EACvD;AACF;AAEA,IAAM,yBAAyB,OAAO;AACtC,IAAM,qBAAqB,KAAK;AAEhC,eAAe,kBACb,SACA,SACA,OACA,EAAE,OAAO,UAAU,WAAW,YAAY,OAAO,GACjD;AACA,QAAM,EAAE,KAAK,eAAe,IAAI;AAChC,QAAM,EAAE,MAAM,gBAAgB,QAAQ,UAAU,IAAI,MAAM;AAAA,IACxD;AAAA,IACA,EAAE,OAAO,WAAW,WAAW;AAAA,IAC/B;AAAA,MACE;AAAA,MACA,GAAG;AAAA,QACD;AAAA,QACA,aAAa,aACT,EAAE,UAAU,uBAAuB,IACnC,EAAE,UAAU,EAAE;AAAA,MACpB;AAAA,IACF;AAAA,EACF;AAEA,QAAM,iBAAiB,iBAAiB,SAAS,KAAK,QAAQ,SAAS;AACvE,QAAM,UAAU;AAAA,IACd;AAAA,IACA;AAAA,IACA,QAAQ;AAAA,IACR;AAAA,IACA;AAAA,EACF;AACA,QAAM,gBAAgB,WAAW,OAAO,QAAQ,MAAM,MAAM,GAAG,EAAE;AACjE,QAAM,eACJ,WAAW,aAAa,cAAc,KAAK,WAAW,KAClD,gBACA,cAAc;AAAA,IACZ,aAAa,YACT;AAAA,MACE;AAAA,MACA;AAAA,QACE,IAAI,KAAK,CAAC,EAAE;AAAA,QACZ;AAAA,QACA,OAAO,YAAY,gBAAgB,KAAK;AAAA,MAC1C;AAAA,IACF,IACA,CAAC,OAAO;AAAA,EACd;AACN,MAAI,aAAa,YAAY;AAC3B,UAAM,QAAQ,IAAI,KAAK,IAAI,CAAC,QAAQ,IAAI,GAAG,OAAO,OAAO,IAAI,GAAG,CAAC,CAAC;AAAA,EACpE;AACA,QAAM,mBAAmB,SAAS,gBAAgB,YAAY;AAChE;AAEA,eAAe,mBACb,SACA,SACA,OACA;AACA,MAAI,eAAe,OAAO,GAAG;AAC3B,UAAM,EAAE,KAAK,SAAS,OAAO,IAAI;AACjC,UAAM,IAAI,UAAU,SAAS,GAAG,SAAS;AAAA,MACvC;AAAA,MACA;AAAA,MACA,YAAY;AAAA,IACd,CAAC;AAAA,EACH,OAAO;AACL,UAAM,0BAA0B,SAAS,SAAS,KAAK;AAAA,EACzD;AACF;AAOA,SAAS,aAAa;AACpB,SAAO;AAAA,IACL,cAAc;AAAA,IACd,cAAc;AAAA,EAChB;AACF;AAEA,SAAS,iBACP,SACA,cACA,cACA;AACA,SAAO;AAAA,IACL,cAAc,QAAQ,eAAe;AAAA,IACrC,cAAc,QAAQ,eAAe;AAAA,EACvC;AACF;AAEA,SAAS,qBACP,SACA,EAAE,SAAS,GACX;AACA,SAAO;AAAA,IACL,UAAU,KAAK,IAAI,GAAG,WAAW,QAAQ,YAAY;AAAA,IACrD,kBAAkB,KAAK,IAAI,GAAG,qBAAqB,QAAQ,YAAY;AAAA,EACzE;AACF;AAEA,SAAS,eAAe,SAAkB;AACxC,SACE,QAAQ,gBAAgB,0BACxB,QAAQ,gBAAgB;AAE5B;AAEA,eAAe,SACb,KACA;AAAA,EACE;AAAA,EACA;AAAA,EACA;AACF,GACA;AAAA,EACE;AAAA,EACA;AAAA,EACA;AACF,GAKA;AACA,QAAM,QAAQ,IAAI,GACf,MAAM,KAAK,EACX;AAAA,IAAU;AAAA,IAAW,CAAC,MACpB,EAAE,GAAG,WAAW,UAAU,EAAuC;AAAA,MAChE;AAAA,MACA,WAAW,OAAO,SAAS,CAAC;AAAA,IAC9B;AAAA,EACF;AAEF,MAAI,YAAY;AAChB,QAAM,UAAU,CAAC;AACjB,MAAI,SAAS;AAEb,mBAAiB,OAAO,OAAO;AAC7B,QAAI,QAAQ,UAAU,UAAU;AAC9B,eAAS;AACT;AAAA,IACF;AACA,UAAM,OAAO,KAAK,cAAU,4BAAa,GAAG,CAAC,EAAE,SAAS;AAExD,YAAQ,KAAK,GAAG;AAChB,iBAAa;AAKb,QAAI,YAAY,kBAAkB;AAChC,eAAS;AACT;AAAA,IACF;AAAA,EACF;AACA,SAAO;AAAA,IACL,MAAM;AAAA,IACN,gBACE,QAAQ,WAAW,IACf,SACA,KAAK,QAAQ,QAAQ,SAAS,CAAC,EAAE;AAAA,IACvC;AAAA,IACA;AAAA,EACF;AACF;","names":["internalMutation","table","edgeDefinition"]}
|
|
1
|
+
{"version":3,"sources":["../src/deletion.ts","../src/shared.ts"],"sourcesContent":["import {\n FunctionReference,\n GenericMutationCtx,\n IndexRangeBuilder,\n RegisteredMutation,\n internalMutationGeneric as internalMutation,\n makeFunctionReference,\n} from \"convex/server\";\nimport { GenericId, Infer, convexToJson, v } from \"convex/values\";\nimport { GenericEntsDataModel } from \"./schema\";\nimport { getEdgeDefinitions } from \"./shared\";\n\nexport type ScheduledDeleteFuncRef = FunctionReference<\n \"mutation\",\n \"internal\",\n {\n origin: Origin;\n stack: Stack;\n inProgress: boolean;\n },\n void\n>;\n\ntype Origin = {\n id: string;\n table: string;\n deletionTime: number;\n};\n\nconst vApproach = v.union(v.literal(\"cascade\"), v.literal(\"paginate\"));\n\ntype Approach = Infer<typeof vApproach>;\n\nexport function scheduledDeleteFactory<\n EntsDataModel extends GenericEntsDataModel,\n>(\n entDefinitions: EntsDataModel,\n options?: {\n scheduledDelete: ScheduledDeleteFuncRef;\n },\n): RegisteredMutation<\n \"internal\",\n { origin: Origin; stack: Stack; inProgress: boolean },\n Promise<void>\n> {\n const selfRef =\n options?.scheduledDelete ??\n (makeFunctionReference(\n \"functions:scheduledDelete\",\n ) as unknown as ScheduledDeleteFuncRef);\n return internalMutation({\n args: {\n origin: v.object({\n id: v.string(),\n table: v.string(),\n deletionTime: v.number(),\n }),\n stack: v.array(\n v.union(\n v.object({\n id: v.string(),\n table: v.string(),\n edges: v.array(\n v.object({\n approach: vApproach,\n table: v.string(),\n indexName: v.string(),\n }),\n ),\n }),\n v.object({\n approach: vApproach,\n cursor: v.union(v.string(), v.null()),\n table: v.string(),\n indexName: v.string(),\n fieldValue: v.any(),\n }),\n ),\n ),\n inProgress: v.boolean(),\n },\n handler: async (ctx, { origin, stack, inProgress }) => {\n // Check that we still want to delete\n // Note: Doesn't support scheduled deletion starting with system table\n const doc = await ctx.db.get(origin.table, origin.id as GenericId<any>);\n if (doc.deletionTime !== origin.deletionTime) {\n if (inProgress) {\n console.error(\n `[Ents] Already in-progress scheduled deletion for \"${origin.id}\" was canceled!`,\n );\n } else {\n console.log(\n `[Ents] Scheduled deletion for \"${origin.id}\" was canceled`,\n );\n }\n return;\n }\n await progressScheduledDeletion(\n { ctx, entDefinitions, selfRef, origin },\n newCounter(),\n inProgress\n ? stack\n : [\n {\n id: origin.id,\n table: origin.table,\n edges: getEdgeArgs(entDefinitions, origin.table),\n },\n ],\n );\n },\n });\n}\n\n// Heuristic:\n// Ent at the end of an edge\n// has soft or scheduled deletion behavior && has cascading edges: schedule individually\n// has cascading edges: paginate by 1\n// else: paginate by decent number\nfunction getEdgeArgs(entDefinitions: GenericEntsDataModel, table: string) {\n const edges = getEdgeDefinitions(entDefinitions, table);\n return Object.values(edges).flatMap((edgeDefinition) => {\n if (\n (edgeDefinition.cardinality === \"single\" &&\n edgeDefinition.type === \"ref\") ||\n (edgeDefinition.cardinality === \"multiple\" &&\n edgeDefinition.type === \"field\")\n ) {\n const table = edgeDefinition.to;\n const targetEdges = getEdgeDefinitions(entDefinitions, table);\n const hasCascadingEdges = Object.values(targetEdges).some(\n (edgeDefinition) =>\n (edgeDefinition.cardinality === \"single\" &&\n edgeDefinition.type === \"ref\") ||\n edgeDefinition.cardinality === \"multiple\",\n );\n const approach = hasCascadingEdges ? \"cascade\" : \"paginate\";\n\n const indexName = edgeDefinition.ref;\n return [{ table, indexName, approach } as const];\n } else if (edgeDefinition.cardinality === \"multiple\") {\n const table = edgeDefinition.table;\n return [\n {\n table,\n indexName: edgeDefinition.field,\n approach: \"paginate\",\n } as const,\n ...(edgeDefinition.symmetric\n ? [\n {\n table,\n indexName: edgeDefinition.ref,\n approach: \"paginate\",\n } as const,\n ]\n : []),\n ];\n } else {\n return [];\n }\n });\n}\n\ntype PaginationArgs = {\n approach: Approach;\n table: string;\n cursor: string | null;\n indexName: string;\n fieldValue: any;\n};\n\ntype EdgeArgs = {\n approach: Approach;\n table: string;\n indexName: string;\n};\n\ntype Stack = (\n | { id: string; table: string; edges: EdgeArgs[] }\n | PaginationArgs\n)[];\n\ntype CascadeCtx = {\n ctx: GenericMutationCtx<any>;\n entDefinitions: GenericEntsDataModel;\n selfRef: ScheduledDeleteFuncRef;\n origin: Origin;\n};\n\nasync function progressScheduledDeletion(\n cascade: CascadeCtx,\n counter: Counter,\n stack: Stack,\n) {\n const { ctx } = cascade;\n const last = stack[stack.length - 1];\n\n if (\"id\" in last) {\n const edgeArgs = last.edges[0];\n if (edgeArgs === undefined) {\n await ctx.db.delete(last.table, last.id as GenericId<any>);\n if (stack.length > 1) {\n await continueOrSchedule(cascade, counter, stack.slice(0, -1));\n }\n } else {\n const updated = { ...last, edges: last.edges.slice(1) };\n await paginateOrCascade(\n cascade,\n counter,\n stack.slice(0, -1).concat(updated),\n {\n cursor: null,\n fieldValue: last.id,\n ...edgeArgs,\n },\n );\n }\n } else {\n await paginateOrCascade(cascade, counter, stack, last);\n }\n}\n\nconst MAXIMUM_DOCUMENTS_READ = 8192 / 4;\nconst MAXIMUM_BYTES_READ = 2 ** 18;\n\nasync function paginateOrCascade(\n cascade: CascadeCtx,\n counter: Counter,\n stack: Stack,\n { table, approach, indexName, fieldValue, cursor }: PaginationArgs,\n) {\n const { ctx, entDefinitions } = cascade;\n const { page, continueCursor, isDone, bytesRead } = await paginate(\n ctx,\n { table, indexName, fieldValue },\n {\n cursor,\n ...limitsBasedOnCounter(\n counter,\n approach === \"paginate\"\n ? { numItems: MAXIMUM_DOCUMENTS_READ }\n : { numItems: 1 },\n ),\n },\n );\n\n const updatedCounter = incrementCounter(counter, page.length, bytesRead);\n const updated = {\n approach,\n table,\n cursor: continueCursor,\n indexName,\n fieldValue,\n };\n const relevantStack = cursor === null ? stack : stack.slice(0, -1);\n const updatedStack =\n isDone && (approach === \"paginate\" || page.length === 0)\n ? relevantStack\n : relevantStack.concat(\n approach === \"cascade\"\n ? [\n updated,\n {\n id: page[0]._id,\n table,\n edges: getEdgeArgs(entDefinitions, table),\n },\n ]\n : [updated],\n );\n if (approach === \"paginate\") {\n await Promise.all(page.map((doc) => ctx.db.delete(table, doc._id)));\n }\n await continueOrSchedule(cascade, updatedCounter, updatedStack);\n}\n\nasync function continueOrSchedule(\n cascade: CascadeCtx,\n counter: Counter,\n stack: Stack,\n) {\n if (shouldSchedule(counter)) {\n const { ctx, selfRef, origin } = cascade;\n await ctx.scheduler.runAfter(0, selfRef, {\n origin,\n stack,\n inProgress: true,\n });\n } else {\n await progressScheduledDeletion(cascade, counter, stack);\n }\n}\n\ntype Counter = {\n numDocuments: number;\n numBytesRead: number;\n};\n\nfunction newCounter() {\n return {\n numDocuments: 0,\n numBytesRead: 0,\n };\n}\n\nfunction incrementCounter(\n counter: Counter,\n numDocuments: number,\n numBytesRead: number,\n) {\n return {\n numDocuments: counter.numDocuments + numDocuments,\n numBytesRead: counter.numBytesRead + numBytesRead,\n };\n}\n\nfunction limitsBasedOnCounter(\n counter: Counter,\n { numItems }: { numItems: number },\n) {\n return {\n numItems: Math.max(1, numItems - counter.numDocuments),\n maximumBytesRead: Math.max(1, MAXIMUM_BYTES_READ - counter.numBytesRead),\n };\n}\n\nfunction shouldSchedule(counter: Counter) {\n return (\n counter.numDocuments >= MAXIMUM_DOCUMENTS_READ ||\n counter.numBytesRead >= MAXIMUM_BYTES_READ\n );\n}\n\nasync function paginate(\n ctx: GenericMutationCtx<any>,\n {\n table,\n indexName,\n fieldValue,\n }: { table: string; indexName: string; fieldValue: any },\n {\n cursor,\n numItems,\n maximumBytesRead,\n }: {\n cursor: string | null;\n numItems: number;\n maximumBytesRead: number;\n },\n) {\n const query = ctx.db\n .query(table)\n .withIndex(indexName, (q) =>\n (q.eq(indexName, fieldValue) as IndexRangeBuilder<any, any, any>).gt(\n \"_creationTime\",\n cursor === null ? cursor : +cursor,\n ),\n );\n\n let bytesRead = 0;\n const results = [];\n let isDone = true;\n\n for await (const doc of query) {\n if (results.length >= numItems) {\n isDone = false;\n break;\n }\n const size = JSON.stringify(convexToJson(doc)).length * 8;\n\n results.push(doc);\n bytesRead += size;\n\n // Check this after we read the doc, since reading it already\n // happened anyway, and to make sure we return at least one\n // result.\n if (bytesRead > maximumBytesRead) {\n isDone = false;\n break;\n }\n }\n return {\n page: results,\n continueCursor:\n results.length === 0\n ? cursor\n : \"\" + results[results.length - 1]._creationTime,\n isDone,\n bytesRead,\n };\n}\n","import {\n DocumentByName,\n FieldTypeFromFieldPath,\n GenericDatabaseReader,\n GenericDataModel,\n NamedTableInfo,\n QueryInitializer,\n SystemDataModel,\n SystemTableNames,\n TableNamesInDataModel,\n} from \"convex/server\";\nimport { EdgeConfig, GenericEdgeConfig, GenericEntsDataModel } from \"./schema\";\nimport { GenericId } from \"convex/values\";\n\nexport type EntsSystemDataModel = {\n [key in keyof SystemDataModel]: SystemDataModel[key] & {\n edges: Record<string, never>;\n };\n};\n\nexport type PromiseEdgeResult<\n EdgeConfig extends GenericEdgeConfig,\n MultipleRef,\n MultipleField,\n SingleOptional,\n Single,\n> = EdgeConfig[\"cardinality\"] extends \"multiple\"\n ? EdgeConfig[\"type\"] extends \"ref\"\n ? MultipleRef\n : MultipleField\n : EdgeConfig[\"type\"] extends \"ref\"\n ? SingleOptional\n : EdgeConfig[\"optional\"] extends true\n ? SingleOptional\n : Single;\n\nexport type IndexFieldTypesForEq<\n EntsDataModel extends GenericEntsDataModel,\n Table extends TableNamesInDataModel<EntsDataModel>,\n T extends string[],\n> = PopIfSeveral<{\n [K in keyof T]: FieldTypeFromFieldPath<\n DocumentByName<EntsDataModel, Table>,\n T[K]\n >;\n}>;\n\n// System indexes have only a single field, so we won't to perform\n// equality check on that field. Normal indexes always have _creationTime as the last field.\ntype PopIfSeveral<T extends any[]> = T extends [infer Only]\n ? [Only]\n : T extends [...infer Rest, infer _Last]\n ? Rest\n : never;\n\nexport function getEdgeDefinitions<\n EntsDataModel extends GenericEntsDataModel,\n Table extends TableNamesInDataModel<EntsDataModel>,\n>(entDefinitions: EntsDataModel, table: Table) {\n return entDefinitions[table].edges as Record<\n keyof EntsDataModel[Table][\"edges\"],\n EdgeConfig\n >;\n}\n\nexport type UniqueIndexFieldName<T extends string[]> = T extends [infer Only]\n ? Only\n : T extends [infer Single, \"_creationTime\"]\n ? Single\n : never;\n\nexport function systemAwareGet<\n DataModel extends GenericDataModel,\n Table extends TableNamesInDataModel<DataModel>,\n>(db: GenericDatabaseReader<DataModel>, table: Table, id: GenericId<Table>) {\n return isSystemTable(table)\n ? db.system.get(table, id as any)\n : db.get(table, id);\n}\n\nexport function systemAwareQuery<\n DataModel extends GenericDataModel,\n Table extends TableNamesInDataModel<DataModel>,\n>(\n db: GenericDatabaseReader<DataModel>,\n table: Table,\n): QueryInitializer<NamedTableInfo<DataModel, Table>> {\n return isSystemTable(table)\n ? (db.system.query(table) as any)\n : db.query(table);\n}\n\nexport function isSystemTable(table: string): table is SystemTableNames {\n return table.startsWith(\"_\");\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,oBAOO;AACP,oBAAkD;;;AC+C3C,SAAS,mBAGd,gBAA+B,OAAc;AAC7C,SAAO,eAAe,KAAK,EAAE;AAI/B;;;ADlCA,IAAM,YAAY,gBAAE,MAAM,gBAAE,QAAQ,SAAS,GAAG,gBAAE,QAAQ,UAAU,CAAC;AAI9D,SAAS,uBAGd,gBACA,SAOA;AACA,QAAM,UACJ,SAAS,uBACR;AAAA,IACC;AAAA,EACF;AACF,aAAO,cAAAA,yBAAiB;AAAA,IACtB,MAAM;AAAA,MACJ,QAAQ,gBAAE,OAAO;AAAA,QACf,IAAI,gBAAE,OAAO;AAAA,QACb,OAAO,gBAAE,OAAO;AAAA,QAChB,cAAc,gBAAE,OAAO;AAAA,MACzB,CAAC;AAAA,MACD,OAAO,gBAAE;AAAA,QACP,gBAAE;AAAA,UACA,gBAAE,OAAO;AAAA,YACP,IAAI,gBAAE,OAAO;AAAA,YACb,OAAO,gBAAE,OAAO;AAAA,YAChB,OAAO,gBAAE;AAAA,cACP,gBAAE,OAAO;AAAA,gBACP,UAAU;AAAA,gBACV,OAAO,gBAAE,OAAO;AAAA,gBAChB,WAAW,gBAAE,OAAO;AAAA,cACtB,CAAC;AAAA,YACH;AAAA,UACF,CAAC;AAAA,UACD,gBAAE,OAAO;AAAA,YACP,UAAU;AAAA,YACV,QAAQ,gBAAE,MAAM,gBAAE,OAAO,GAAG,gBAAE,KAAK,CAAC;AAAA,YACpC,OAAO,gBAAE,OAAO;AAAA,YAChB,WAAW,gBAAE,OAAO;AAAA,YACpB,YAAY,gBAAE,IAAI;AAAA,UACpB,CAAC;AAAA,QACH;AAAA,MACF;AAAA,MACA,YAAY,gBAAE,QAAQ;AAAA,IACxB;AAAA,IACA,SAAS,OAAO,KAAK,EAAE,QAAQ,OAAO,WAAW,MAAM;AAGrD,YAAM,MAAM,MAAM,IAAI,GAAG,IAAI,OAAO,OAAO,OAAO,EAAoB;AACtE,UAAI,IAAI,iBAAiB,OAAO,cAAc;AAC5C,YAAI,YAAY;AACd,kBAAQ;AAAA,YACN,sDAAsD,OAAO,EAAE;AAAA,UACjE;AAAA,QACF,OAAO;AACL,kBAAQ;AAAA,YACN,kCAAkC,OAAO,EAAE;AAAA,UAC7C;AAAA,QACF;AACA;AAAA,MACF;AACA,YAAM;AAAA,QACJ,EAAE,KAAK,gBAAgB,SAAS,OAAO;AAAA,QACvC,WAAW;AAAA,QACX,aACI,QACA;AAAA,UACE;AAAA,YACE,IAAI,OAAO;AAAA,YACX,OAAO,OAAO;AAAA,YACd,OAAO,YAAY,gBAAgB,OAAO,KAAK;AAAA,UACjD;AAAA,QACF;AAAA,MACN;AAAA,IACF;AAAA,EACF,CAAC;AACH;AAOA,SAAS,YAAY,gBAAsC,OAAe;AACxE,QAAM,QAAQ,mBAAmB,gBAAgB,KAAK;AACtD,SAAO,OAAO,OAAO,KAAK,EAAE,QAAQ,CAAC,mBAAmB;AACtD,QACG,eAAe,gBAAgB,YAC9B,eAAe,SAAS,SACzB,eAAe,gBAAgB,cAC9B,eAAe,SAAS,SAC1B;AACA,YAAMC,SAAQ,eAAe;AAC7B,YAAM,cAAc,mBAAmB,gBAAgBA,MAAK;AAC5D,YAAM,oBAAoB,OAAO,OAAO,WAAW,EAAE;AAAA,QACnD,CAACC,oBACEA,gBAAe,gBAAgB,YAC9BA,gBAAe,SAAS,SAC1BA,gBAAe,gBAAgB;AAAA,MACnC;AACA,YAAM,WAAW,oBAAoB,YAAY;AAEjD,YAAM,YAAY,eAAe;AACjC,aAAO,CAAC,EAAE,OAAAD,QAAO,WAAW,SAAS,CAAU;AAAA,IACjD,WAAW,eAAe,gBAAgB,YAAY;AACpD,YAAMA,SAAQ,eAAe;AAC7B,aAAO;AAAA,QACL;AAAA,UACE,OAAAA;AAAA,UACA,WAAW,eAAe;AAAA,UAC1B,UAAU;AAAA,QACZ;AAAA,QACA,GAAI,eAAe,YACf;AAAA,UACE;AAAA,YACE,OAAAA;AAAA,YACA,WAAW,eAAe;AAAA,YAC1B,UAAU;AAAA,UACZ;AAAA,QACF,IACA,CAAC;AAAA,MACP;AAAA,IACF,OAAO;AACL,aAAO,CAAC;AAAA,IACV;AAAA,EACF,CAAC;AACH;AA4BA,eAAe,0BACb,SACA,SACA,OACA;AACA,QAAM,EAAE,IAAI,IAAI;AAChB,QAAM,OAAO,MAAM,MAAM,SAAS,CAAC;AAEnC,MAAI,QAAQ,MAAM;AAChB,UAAM,WAAW,KAAK,MAAM,CAAC;AAC7B,QAAI,aAAa,QAAW;AAC1B,YAAM,IAAI,GAAG,OAAO,KAAK,OAAO,KAAK,EAAoB;AACzD,UAAI,MAAM,SAAS,GAAG;AACpB,cAAM,mBAAmB,SAAS,SAAS,MAAM,MAAM,GAAG,EAAE,CAAC;AAAA,MAC/D;AAAA,IACF,OAAO;AACL,YAAM,UAAU,EAAE,GAAG,MAAM,OAAO,KAAK,MAAM,MAAM,CAAC,EAAE;AACtD,YAAM;AAAA,QACJ;AAAA,QACA;AAAA,QACA,MAAM,MAAM,GAAG,EAAE,EAAE,OAAO,OAAO;AAAA,QACjC;AAAA,UACE,QAAQ;AAAA,UACR,YAAY,KAAK;AAAA,UACjB,GAAG;AAAA,QACL;AAAA,MACF;AAAA,IACF;AAAA,EACF,OAAO;AACL,UAAM,kBAAkB,SAAS,SAAS,OAAO,IAAI;AAAA,EACvD;AACF;AAEA,IAAM,yBAAyB,OAAO;AACtC,IAAM,qBAAqB,KAAK;AAEhC,eAAe,kBACb,SACA,SACA,OACA,EAAE,OAAO,UAAU,WAAW,YAAY,OAAO,GACjD;AACA,QAAM,EAAE,KAAK,eAAe,IAAI;AAChC,QAAM,EAAE,MAAM,gBAAgB,QAAQ,UAAU,IAAI,MAAM;AAAA,IACxD;AAAA,IACA,EAAE,OAAO,WAAW,WAAW;AAAA,IAC/B;AAAA,MACE;AAAA,MACA,GAAG;AAAA,QACD;AAAA,QACA,aAAa,aACT,EAAE,UAAU,uBAAuB,IACnC,EAAE,UAAU,EAAE;AAAA,MACpB;AAAA,IACF;AAAA,EACF;AAEA,QAAM,iBAAiB,iBAAiB,SAAS,KAAK,QAAQ,SAAS;AACvE,QAAM,UAAU;AAAA,IACd;AAAA,IACA;AAAA,IACA,QAAQ;AAAA,IACR;AAAA,IACA;AAAA,EACF;AACA,QAAM,gBAAgB,WAAW,OAAO,QAAQ,MAAM,MAAM,GAAG,EAAE;AACjE,QAAM,eACJ,WAAW,aAAa,cAAc,KAAK,WAAW,KAClD,gBACA,cAAc;AAAA,IACZ,aAAa,YACT;AAAA,MACE;AAAA,MACA;AAAA,QACE,IAAI,KAAK,CAAC,EAAE;AAAA,QACZ;AAAA,QACA,OAAO,YAAY,gBAAgB,KAAK;AAAA,MAC1C;AAAA,IACF,IACA,CAAC,OAAO;AAAA,EACd;AACN,MAAI,aAAa,YAAY;AAC3B,UAAM,QAAQ,IAAI,KAAK,IAAI,CAAC,QAAQ,IAAI,GAAG,OAAO,OAAO,IAAI,GAAG,CAAC,CAAC;AAAA,EACpE;AACA,QAAM,mBAAmB,SAAS,gBAAgB,YAAY;AAChE;AAEA,eAAe,mBACb,SACA,SACA,OACA;AACA,MAAI,eAAe,OAAO,GAAG;AAC3B,UAAM,EAAE,KAAK,SAAS,OAAO,IAAI;AACjC,UAAM,IAAI,UAAU,SAAS,GAAG,SAAS;AAAA,MACvC;AAAA,MACA;AAAA,MACA,YAAY;AAAA,IACd,CAAC;AAAA,EACH,OAAO;AACL,UAAM,0BAA0B,SAAS,SAAS,KAAK;AAAA,EACzD;AACF;AAOA,SAAS,aAAa;AACpB,SAAO;AAAA,IACL,cAAc;AAAA,IACd,cAAc;AAAA,EAChB;AACF;AAEA,SAAS,iBACP,SACA,cACA,cACA;AACA,SAAO;AAAA,IACL,cAAc,QAAQ,eAAe;AAAA,IACrC,cAAc,QAAQ,eAAe;AAAA,EACvC;AACF;AAEA,SAAS,qBACP,SACA,EAAE,SAAS,GACX;AACA,SAAO;AAAA,IACL,UAAU,KAAK,IAAI,GAAG,WAAW,QAAQ,YAAY;AAAA,IACrD,kBAAkB,KAAK,IAAI,GAAG,qBAAqB,QAAQ,YAAY;AAAA,EACzE;AACF;AAEA,SAAS,eAAe,SAAkB;AACxC,SACE,QAAQ,gBAAgB,0BACxB,QAAQ,gBAAgB;AAE5B;AAEA,eAAe,SACb,KACA;AAAA,EACE;AAAA,EACA;AAAA,EACA;AACF,GACA;AAAA,EACE;AAAA,EACA;AAAA,EACA;AACF,GAKA;AACA,QAAM,QAAQ,IAAI,GACf,MAAM,KAAK,EACX;AAAA,IAAU;AAAA,IAAW,CAAC,MACpB,EAAE,GAAG,WAAW,UAAU,EAAuC;AAAA,MAChE;AAAA,MACA,WAAW,OAAO,SAAS,CAAC;AAAA,IAC9B;AAAA,EACF;AAEF,MAAI,YAAY;AAChB,QAAM,UAAU,CAAC;AACjB,MAAI,SAAS;AAEb,mBAAiB,OAAO,OAAO;AAC7B,QAAI,QAAQ,UAAU,UAAU;AAC9B,eAAS;AACT;AAAA,IACF;AACA,UAAM,OAAO,KAAK,cAAU,4BAAa,GAAG,CAAC,EAAE,SAAS;AAExD,YAAQ,KAAK,GAAG;AAChB,iBAAa;AAKb,QAAI,YAAY,kBAAkB;AAChC,eAAS;AACT;AAAA,IACF;AAAA,EACF;AACA,SAAO;AAAA,IACL,MAAM;AAAA,IACN,gBACE,QAAQ,WAAW,IACf,SACA,KAAK,QAAQ,QAAQ,SAAS,CAAC,EAAE;AAAA,IACvC;AAAA,IACA;AAAA,EACF;AACF;","names":["internalMutation","table","edgeDefinition"]}
|
package/dist/functions.js
CHANGED
|
@@ -43,6 +43,15 @@ function edgeCompoundIndexNameRaw(idA, idB) {
|
|
|
43
43
|
function getEdgeDefinitions(entDefinitions, table) {
|
|
44
44
|
return entDefinitions[table].edges;
|
|
45
45
|
}
|
|
46
|
+
function systemAwareGet(db, table, id) {
|
|
47
|
+
return isSystemTable(table) ? db.system.get(table, id) : db.get(table, id);
|
|
48
|
+
}
|
|
49
|
+
function systemAwareQuery(db, table) {
|
|
50
|
+
return isSystemTable(table) ? db.system.query(table) : db.query(table);
|
|
51
|
+
}
|
|
52
|
+
function isSystemTable(table) {
|
|
53
|
+
return table.startsWith("_");
|
|
54
|
+
}
|
|
46
55
|
|
|
47
56
|
// src/writer.ts
|
|
48
57
|
var import_server2 = require("convex/server");
|
|
@@ -76,7 +85,7 @@ var WriterImplBase = class _WriterImplBase {
|
|
|
76
85
|
}
|
|
77
86
|
} else if (edgeDefinition.cardinality === "single") {
|
|
78
87
|
if (edgeDefinition.deletion !== void 0 && (!isDeletingSoftly || edgeDefinition.deletion === "soft")) {
|
|
79
|
-
const doc = await this.ctx.db.get(id);
|
|
88
|
+
const doc = await this.ctx.db.get(this.table, id);
|
|
80
89
|
if (doc !== null) {
|
|
81
90
|
const otherId = doc[edgeDefinition.field];
|
|
82
91
|
edges[key] = {
|
|
@@ -282,7 +291,7 @@ var WriterImplBase = class _WriterImplBase {
|
|
|
282
291
|
if (id !== void 0) {
|
|
283
292
|
const readPolicy = getReadRule(this.entDefinitions, this.table);
|
|
284
293
|
if (readPolicy !== void 0) {
|
|
285
|
-
const doc = await this.ctx.db.get(id);
|
|
294
|
+
const doc = await this.ctx.db.get(this.table, id);
|
|
286
295
|
if (doc === null) {
|
|
287
296
|
throw new Error(
|
|
288
297
|
`Cannot update document with ID "${id}" in table "${this.table} because it does not exist"`
|
|
@@ -301,7 +310,7 @@ var WriterImplBase = class _WriterImplBase {
|
|
|
301
310
|
return;
|
|
302
311
|
}
|
|
303
312
|
const ent = id === void 0 ? void 0 : entWrapper(
|
|
304
|
-
await this.ctx.db.get(id),
|
|
313
|
+
await this.ctx.db.get(this.table, id),
|
|
305
314
|
this.ctx,
|
|
306
315
|
this.entDefinitions,
|
|
307
316
|
this.table
|
|
@@ -331,9 +340,6 @@ var WriterImplBase = class _WriterImplBase {
|
|
|
331
340
|
}
|
|
332
341
|
}
|
|
333
342
|
};
|
|
334
|
-
function isSystemTable(table) {
|
|
335
|
-
return table.startsWith("_");
|
|
336
|
-
}
|
|
337
343
|
|
|
338
344
|
// src/functions.ts
|
|
339
345
|
var PromiseQueryOrNullImpl = class _PromiseQueryOrNullImpl extends Promise {
|
|
@@ -573,7 +579,7 @@ var PromiseTableImpl = class extends PromiseQueryOrNullImpl {
|
|
|
573
579
|
ctx,
|
|
574
580
|
entDefinitions,
|
|
575
581
|
table,
|
|
576
|
-
async () =>
|
|
582
|
+
async () => systemAwareQuery(ctx.db, table)
|
|
577
583
|
);
|
|
578
584
|
}
|
|
579
585
|
get(...args) {
|
|
@@ -601,7 +607,7 @@ var PromiseTableImpl = class extends PromiseQueryOrNullImpl {
|
|
|
601
607
|
return {
|
|
602
608
|
id,
|
|
603
609
|
doc: async () => {
|
|
604
|
-
const doc = await (
|
|
610
|
+
const doc = await systemAwareGet(this.ctx.db, this.table, id);
|
|
605
611
|
if (throwIfNull && doc === null) {
|
|
606
612
|
throw new Error(
|
|
607
613
|
`Document not found with id \`${id}\` in table "${this.table}"`
|
|
@@ -617,7 +623,7 @@ var PromiseTableImpl = class extends PromiseQueryOrNullImpl {
|
|
|
617
623
|
this.table,
|
|
618
624
|
indexName
|
|
619
625
|
);
|
|
620
|
-
const doc = await this.ctx.db
|
|
626
|
+
const doc = await systemAwareQuery(this.ctx.db, this.table).withIndex(
|
|
621
627
|
indexName,
|
|
622
628
|
(q) => values.reduce((q2, value, i) => q2.eq(fieldNames[i], value), q)
|
|
623
629
|
).unique();
|
|
@@ -650,7 +656,7 @@ var PromiseTableImpl = class extends PromiseQueryOrNullImpl {
|
|
|
650
656
|
});
|
|
651
657
|
return await Promise.all(
|
|
652
658
|
ids.map(async (id) => {
|
|
653
|
-
const doc = await (
|
|
659
|
+
const doc = await systemAwareGet(this.ctx.db, this.table, id);
|
|
654
660
|
if (throwIfNull && doc === null) {
|
|
655
661
|
throw new Error(
|
|
656
662
|
`Document not found with id \`${id}\` in table "${this.table}"`
|
|
@@ -661,12 +667,23 @@ var PromiseTableImpl = class extends PromiseQueryOrNullImpl {
|
|
|
661
667
|
);
|
|
662
668
|
} : async () => {
|
|
663
669
|
const [indexName, values] = args;
|
|
670
|
+
const fieldNames = getIndexFields(
|
|
671
|
+
this.entDefinitions,
|
|
672
|
+
this.table,
|
|
673
|
+
indexName
|
|
674
|
+
);
|
|
675
|
+
if (fieldNames.length > 1) {
|
|
676
|
+
throw new Error(
|
|
677
|
+
`Index "${indexName}" has ${fieldNames.length} fields, but getMany() supports only single field indexes`
|
|
678
|
+
);
|
|
679
|
+
}
|
|
680
|
+
const fieldName = fieldNames[0];
|
|
664
681
|
return await Promise.all(
|
|
665
682
|
values.map(async (value) => {
|
|
666
|
-
const doc = await this.ctx.db
|
|
683
|
+
const doc = await systemAwareQuery(this.ctx.db, this.table).withIndex(indexName, (q) => q.eq(fieldName, value)).unique();
|
|
667
684
|
if (throwIfNull && doc === null) {
|
|
668
685
|
throw new Error(
|
|
669
|
-
`Table "${this.table}" does not contain document with field "${
|
|
686
|
+
`Table "${this.table}" does not contain document with field "${fieldName}" = \`${value}\``
|
|
670
687
|
);
|
|
671
688
|
}
|
|
672
689
|
return doc;
|
|
@@ -827,7 +844,11 @@ var PromiseEdgeOrNullImpl = class _PromiseEdgeOrNullImpl extends PromiseEntsOrNu
|
|
|
827
844
|
constructor(ctx, entDefinitions, table, edgeDefinition, retrieveSourceId, retrieveQuery, retrieveDoc = async (edgeDoc) => {
|
|
828
845
|
const sourceId = edgeDoc[edgeDefinition.field];
|
|
829
846
|
const targetId = edgeDoc[edgeDefinition.ref];
|
|
830
|
-
const doc = await
|
|
847
|
+
const doc = await systemAwareGet(
|
|
848
|
+
this.ctx.db,
|
|
849
|
+
edgeDefinition.to,
|
|
850
|
+
targetId
|
|
851
|
+
);
|
|
831
852
|
if (doc === null) {
|
|
832
853
|
throw new Error(
|
|
833
854
|
`Dangling reference for edge "${edgeDefinition.name}" in table "${this.table}" for document with ID "${sourceId}": Could not find a document with ID "${targetId}" in table "${edgeDefinition.to}" (edge document ID is "${edgeDoc._id}").`
|
|
@@ -1123,7 +1144,11 @@ var PromiseEntOrNullImpl = class extends Promise {
|
|
|
1123
1144
|
`Unexpected null reference for edge "${edgeDefinition.name}" in table "${this.table}" on document with ID "${id}": Expected an ID for a document in table "${edgeDefinition.to}".`
|
|
1124
1145
|
);
|
|
1125
1146
|
}
|
|
1126
|
-
const otherDoc = await
|
|
1147
|
+
const otherDoc = await systemAwareGet(
|
|
1148
|
+
this.ctx.db,
|
|
1149
|
+
edgeDefinition.to,
|
|
1150
|
+
otherId
|
|
1151
|
+
);
|
|
1127
1152
|
if (otherDoc === null && edgeDefinition.to !== "_scheduled_functions") {
|
|
1128
1153
|
throw new Error(
|
|
1129
1154
|
`Dangling reference for edge "${edgeDefinition.name}" in table "${this.table}" on document with ID "${id}": Could not find a document with ID "${otherId}" in table "${edgeDefinition.to}".`
|
|
@@ -1396,7 +1421,7 @@ var PromiseEntWriterImpl = class extends PromiseEntOrNullImpl {
|
|
|
1396
1421
|
}
|
|
1397
1422
|
if (edgeDefinition.cardinality === "single") {
|
|
1398
1423
|
if (edgeDefinition.type === "ref") {
|
|
1399
|
-
const oldDoc = await this.ctx.db.get(docId);
|
|
1424
|
+
const oldDoc = await this.ctx.db.get(this.table, docId);
|
|
1400
1425
|
if (oldDoc[key] !== void 0 && oldDoc[key] !== idOrIds) {
|
|
1401
1426
|
throw new Error("Cannot set 1:1 edge from ref end.");
|
|
1402
1427
|
}
|
|
@@ -1458,7 +1483,10 @@ var PromiseEntIdImpl = class extends Promise {
|
|
|
1458
1483
|
this.table,
|
|
1459
1484
|
async () => {
|
|
1460
1485
|
const id = await this.retrieve();
|
|
1461
|
-
return {
|
|
1486
|
+
return {
|
|
1487
|
+
id,
|
|
1488
|
+
doc: async () => systemAwareGet(this.ctx.db, this.table, id)
|
|
1489
|
+
};
|
|
1462
1490
|
},
|
|
1463
1491
|
true
|
|
1464
1492
|
);
|
|
@@ -1543,6 +1571,12 @@ async function filterByReadRule(ctx, entDefinitions, table, docs, throwIfNull) {
|
|
|
1543
1571
|
return docs.filter((_, i) => decisions[i]);
|
|
1544
1572
|
}
|
|
1545
1573
|
function getIndexFields(entDefinitions, table, index) {
|
|
1574
|
+
if (index === "by_id") {
|
|
1575
|
+
return ["_id"];
|
|
1576
|
+
}
|
|
1577
|
+
if (index === "by_creation_time") {
|
|
1578
|
+
return ["_creationTime"];
|
|
1579
|
+
}
|
|
1546
1580
|
return entDefinitions[table].indexes[index];
|
|
1547
1581
|
}
|
|
1548
1582
|
function getReadRule(entDefinitions, table) {
|