convex-ents 0.18.1 → 0.20.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/deletion.js CHANGED
@@ -69,11 +69,7 @@ function scheduledDeleteFactory(entDefinitions, options) {
69
69
  inProgress: import_values.v.boolean()
70
70
  },
71
71
  handler: async (ctx, { origin, stack, inProgress }) => {
72
- const originId = ctx.db.normalizeId(origin.table, origin.id);
73
- if (originId === null) {
74
- throw new Error(`Invalid ID "${origin.id}" for table ${origin.table}`);
75
- }
76
- const doc = await ctx.db.get(originId);
72
+ const doc = await ctx.db.get(origin.table, origin.id);
77
73
  if (doc.deletionTime !== origin.deletionTime) {
78
74
  if (inProgress) {
79
75
  console.error(
@@ -91,7 +87,7 @@ function scheduledDeleteFactory(entDefinitions, options) {
91
87
  newCounter(),
92
88
  inProgress ? stack : [
93
89
  {
94
- id: originId,
90
+ id: origin.id,
95
91
  table: origin.table,
96
92
  edges: getEdgeArgs(entDefinitions, origin.table)
97
93
  }
@@ -1 +1 @@
1
- {"version":3,"sources":["../src/deletion.ts","../src/shared.ts"],"sourcesContent":["import {\n FunctionReference,\n GenericMutationCtx,\n IndexRangeBuilder,\n RegisteredMutation,\n internalMutationGeneric as internalMutation,\n makeFunctionReference,\n} from \"convex/server\";\nimport { GenericId, Infer, convexToJson, v } from \"convex/values\";\nimport { GenericEntsDataModel } from \"./schema\";\nimport { getEdgeDefinitions } from \"./shared\";\n\nexport type ScheduledDeleteFuncRef = FunctionReference<\n \"mutation\",\n \"internal\",\n {\n origin: Origin;\n stack: Stack;\n inProgress: boolean;\n },\n void\n>;\n\ntype Origin = {\n id: string;\n table: string;\n deletionTime: number;\n};\n\nconst vApproach = v.union(v.literal(\"cascade\"), v.literal(\"paginate\"));\n\ntype Approach = Infer<typeof vApproach>;\n\nexport function scheduledDeleteFactory<\n EntsDataModel extends GenericEntsDataModel,\n>(\n entDefinitions: EntsDataModel,\n options?: {\n scheduledDelete: ScheduledDeleteFuncRef;\n },\n): RegisteredMutation<\n \"internal\",\n { origin: Origin; stack: Stack; inProgress: boolean },\n Promise<void>\n> {\n const selfRef =\n options?.scheduledDelete ??\n (makeFunctionReference(\n \"functions:scheduledDelete\",\n ) as unknown as ScheduledDeleteFuncRef);\n return internalMutation({\n args: {\n origin: v.object({\n id: v.string(),\n table: v.string(),\n deletionTime: v.number(),\n }),\n stack: v.array(\n v.union(\n v.object({\n id: v.string(),\n table: v.string(),\n edges: v.array(\n v.object({\n approach: vApproach,\n table: v.string(),\n indexName: v.string(),\n }),\n ),\n }),\n v.object({\n approach: vApproach,\n cursor: v.union(v.string(), v.null()),\n table: v.string(),\n indexName: v.string(),\n fieldValue: v.any(),\n }),\n ),\n ),\n inProgress: v.boolean(),\n },\n handler: async (ctx, { origin, stack, inProgress }) => {\n const originId = ctx.db.normalizeId(origin.table, origin.id);\n if (originId === null) {\n throw new Error(`Invalid ID \"${origin.id}\" for table ${origin.table}`);\n }\n // Check that we still want to delete\n // Note: Doesn't support scheduled deletion starting with system table\n const doc = await ctx.db.get(originId);\n if (doc.deletionTime !== origin.deletionTime) {\n if (inProgress) {\n console.error(\n `[Ents] Already in-progress scheduled deletion for \"${origin.id}\" was canceled!`,\n );\n } else {\n console.log(\n `[Ents] Scheduled deletion for \"${origin.id}\" was canceled`,\n );\n }\n return;\n }\n await progressScheduledDeletion(\n { ctx, entDefinitions, selfRef, origin },\n newCounter(),\n inProgress\n ? stack\n : [\n {\n id: originId,\n table: origin.table,\n edges: getEdgeArgs(entDefinitions, origin.table),\n },\n ],\n );\n },\n });\n}\n\n// Heuristic:\n// Ent at the end of an edge\n// has soft or scheduled deletion behavior && has cascading edges: schedule individually\n// has cascading edges: paginate by 1\n// else: paginate by decent number\nfunction getEdgeArgs(entDefinitions: GenericEntsDataModel, table: string) {\n const edges = getEdgeDefinitions(entDefinitions, table);\n return Object.values(edges).flatMap((edgeDefinition) => {\n if (\n (edgeDefinition.cardinality === \"single\" &&\n edgeDefinition.type === \"ref\") ||\n (edgeDefinition.cardinality === \"multiple\" &&\n edgeDefinition.type === \"field\")\n ) {\n const table = edgeDefinition.to;\n const targetEdges = getEdgeDefinitions(entDefinitions, table);\n const hasCascadingEdges = Object.values(targetEdges).some(\n (edgeDefinition) =>\n (edgeDefinition.cardinality === \"single\" &&\n edgeDefinition.type === \"ref\") ||\n edgeDefinition.cardinality === \"multiple\",\n );\n const approach = hasCascadingEdges ? \"cascade\" : \"paginate\";\n\n const indexName = edgeDefinition.ref;\n return [{ table, indexName, approach } as const];\n } else if (edgeDefinition.cardinality === \"multiple\") {\n const table = edgeDefinition.table;\n return [\n {\n table,\n indexName: edgeDefinition.field,\n approach: \"paginate\",\n } as const,\n ...(edgeDefinition.symmetric\n ? [\n {\n table,\n indexName: edgeDefinition.ref,\n approach: \"paginate\",\n } as const,\n ]\n : []),\n ];\n } else {\n return [];\n }\n });\n}\n\ntype PaginationArgs = {\n approach: Approach;\n table: string;\n cursor: string | null;\n indexName: string;\n fieldValue: any;\n};\n\ntype EdgeArgs = {\n approach: Approach;\n table: string;\n indexName: string;\n};\n\ntype Stack = (\n | { id: string; table: string; edges: EdgeArgs[] }\n | PaginationArgs\n)[];\n\ntype CascadeCtx = {\n ctx: GenericMutationCtx<any>;\n entDefinitions: GenericEntsDataModel;\n selfRef: ScheduledDeleteFuncRef;\n origin: Origin;\n};\n\nasync function progressScheduledDeletion(\n cascade: CascadeCtx,\n counter: Counter,\n stack: Stack,\n) {\n const { ctx } = cascade;\n const last = stack[stack.length - 1];\n\n if (\"id\" in last) {\n const edgeArgs = last.edges[0];\n if (edgeArgs === undefined) {\n await ctx.db.delete(last.table, last.id as GenericId<any>);\n if (stack.length > 1) {\n await continueOrSchedule(cascade, counter, stack.slice(0, -1));\n }\n } else {\n const updated = { ...last, edges: last.edges.slice(1) };\n await paginateOrCascade(\n cascade,\n counter,\n stack.slice(0, -1).concat(updated),\n {\n cursor: null,\n fieldValue: last.id,\n ...edgeArgs,\n },\n );\n }\n } else {\n await paginateOrCascade(cascade, counter, stack, last);\n }\n}\n\nconst MAXIMUM_DOCUMENTS_READ = 8192 / 4;\nconst MAXIMUM_BYTES_READ = 2 ** 18;\n\nasync function paginateOrCascade(\n cascade: CascadeCtx,\n counter: Counter,\n stack: Stack,\n { table, approach, indexName, fieldValue, cursor }: PaginationArgs,\n) {\n const { ctx, entDefinitions } = cascade;\n const { page, continueCursor, isDone, bytesRead } = await paginate(\n ctx,\n { table, indexName, fieldValue },\n {\n cursor,\n ...limitsBasedOnCounter(\n counter,\n approach === \"paginate\"\n ? { numItems: MAXIMUM_DOCUMENTS_READ }\n : { numItems: 1 },\n ),\n },\n );\n\n const updatedCounter = incrementCounter(counter, page.length, bytesRead);\n const updated = {\n approach,\n table,\n cursor: continueCursor,\n indexName,\n fieldValue,\n };\n const relevantStack = cursor === null ? stack : stack.slice(0, -1);\n const updatedStack =\n isDone && (approach === \"paginate\" || page.length === 0)\n ? relevantStack\n : relevantStack.concat(\n approach === \"cascade\"\n ? [\n updated,\n {\n id: page[0]._id,\n table,\n edges: getEdgeArgs(entDefinitions, table),\n },\n ]\n : [updated],\n );\n if (approach === \"paginate\") {\n await Promise.all(page.map((doc) => ctx.db.delete(table, doc._id)));\n }\n await continueOrSchedule(cascade, updatedCounter, updatedStack);\n}\n\nasync function continueOrSchedule(\n cascade: CascadeCtx,\n counter: Counter,\n stack: Stack,\n) {\n if (shouldSchedule(counter)) {\n const { ctx, selfRef, origin } = cascade;\n await ctx.scheduler.runAfter(0, selfRef, {\n origin,\n stack,\n inProgress: true,\n });\n } else {\n await progressScheduledDeletion(cascade, counter, stack);\n }\n}\n\ntype Counter = {\n numDocuments: number;\n numBytesRead: number;\n};\n\nfunction newCounter() {\n return {\n numDocuments: 0,\n numBytesRead: 0,\n };\n}\n\nfunction incrementCounter(\n counter: Counter,\n numDocuments: number,\n numBytesRead: number,\n) {\n return {\n numDocuments: counter.numDocuments + numDocuments,\n numBytesRead: counter.numBytesRead + numBytesRead,\n };\n}\n\nfunction limitsBasedOnCounter(\n counter: Counter,\n { numItems }: { numItems: number },\n) {\n return {\n numItems: Math.max(1, numItems - counter.numDocuments),\n maximumBytesRead: Math.max(1, MAXIMUM_BYTES_READ - counter.numBytesRead),\n };\n}\n\nfunction shouldSchedule(counter: Counter) {\n return (\n counter.numDocuments >= MAXIMUM_DOCUMENTS_READ ||\n counter.numBytesRead >= MAXIMUM_BYTES_READ\n );\n}\n\nasync function paginate(\n ctx: GenericMutationCtx<any>,\n {\n table,\n indexName,\n fieldValue,\n }: { table: string; indexName: string; fieldValue: any },\n {\n cursor,\n numItems,\n maximumBytesRead,\n }: {\n cursor: string | null;\n numItems: number;\n maximumBytesRead: number;\n },\n) {\n const query = ctx.db\n .query(table)\n .withIndex(indexName, (q) =>\n (q.eq(indexName, fieldValue) as IndexRangeBuilder<any, any, any>).gt(\n \"_creationTime\",\n cursor === null ? cursor : +cursor,\n ),\n );\n\n let bytesRead = 0;\n const results = [];\n let isDone = true;\n\n for await (const doc of query) {\n if (results.length >= numItems) {\n isDone = false;\n break;\n }\n const size = JSON.stringify(convexToJson(doc)).length * 8;\n\n results.push(doc);\n bytesRead += size;\n\n // Check this after we read the doc, since reading it already\n // happened anyway, and to make sure we return at least one\n // result.\n if (bytesRead > maximumBytesRead) {\n isDone = false;\n break;\n }\n }\n return {\n page: results,\n continueCursor:\n results.length === 0\n ? cursor\n : \"\" + results[results.length - 1]._creationTime,\n isDone,\n bytesRead,\n };\n}\n","import {\n DocumentByName,\n FieldTypeFromFieldPath,\n SystemDataModel,\n TableNamesInDataModel,\n} from \"convex/server\";\nimport { EdgeConfig, GenericEdgeConfig, GenericEntsDataModel } from \"./schema\";\n\nexport type EntsSystemDataModel = {\n [key in keyof SystemDataModel]: SystemDataModel[key] & {\n edges: Record<string, never>;\n };\n};\n\nexport type PromiseEdgeResult<\n EdgeConfig extends GenericEdgeConfig,\n MultipleRef,\n MultipleField,\n SingleOptional,\n Single,\n> = EdgeConfig[\"cardinality\"] extends \"multiple\"\n ? EdgeConfig[\"type\"] extends \"ref\"\n ? MultipleRef\n : MultipleField\n : EdgeConfig[\"type\"] extends \"ref\"\n ? SingleOptional\n : EdgeConfig[\"optional\"] extends true\n ? SingleOptional\n : Single;\n\nexport type IndexFieldTypesForEq<\n EntsDataModel extends GenericEntsDataModel,\n Table extends TableNamesInDataModel<EntsDataModel>,\n T extends string[],\n> = Pop<{\n [K in keyof T]: FieldTypeFromFieldPath<\n DocumentByName<EntsDataModel, Table>,\n T[K]\n >;\n}>;\n\ntype Pop<T extends any[]> = T extends [...infer Rest, infer _Last]\n ? Rest\n : never;\n\nexport function getEdgeDefinitions<\n EntsDataModel extends GenericEntsDataModel,\n Table extends TableNamesInDataModel<EntsDataModel>,\n>(entDefinitions: EntsDataModel, table: Table) {\n return entDefinitions[table].edges as Record<\n keyof EntsDataModel[Table][\"edges\"],\n EdgeConfig\n >;\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,oBAOO;AACP,oBAAkD;;;ACqC3C,SAAS,mBAGd,gBAA+B,OAAc;AAC7C,SAAO,eAAe,KAAK,EAAE;AAI/B;;;ADxBA,IAAM,YAAY,gBAAE,MAAM,gBAAE,QAAQ,SAAS,GAAG,gBAAE,QAAQ,UAAU,CAAC;AAI9D,SAAS,uBAGd,gBACA,SAOA;AACA,QAAM,UACJ,SAAS,uBACR;AAAA,IACC;AAAA,EACF;AACF,aAAO,cAAAA,yBAAiB;AAAA,IACtB,MAAM;AAAA,MACJ,QAAQ,gBAAE,OAAO;AAAA,QACf,IAAI,gBAAE,OAAO;AAAA,QACb,OAAO,gBAAE,OAAO;AAAA,QAChB,cAAc,gBAAE,OAAO;AAAA,MACzB,CAAC;AAAA,MACD,OAAO,gBAAE;AAAA,QACP,gBAAE;AAAA,UACA,gBAAE,OAAO;AAAA,YACP,IAAI,gBAAE,OAAO;AAAA,YACb,OAAO,gBAAE,OAAO;AAAA,YAChB,OAAO,gBAAE;AAAA,cACP,gBAAE,OAAO;AAAA,gBACP,UAAU;AAAA,gBACV,OAAO,gBAAE,OAAO;AAAA,gBAChB,WAAW,gBAAE,OAAO;AAAA,cACtB,CAAC;AAAA,YACH;AAAA,UACF,CAAC;AAAA,UACD,gBAAE,OAAO;AAAA,YACP,UAAU;AAAA,YACV,QAAQ,gBAAE,MAAM,gBAAE,OAAO,GAAG,gBAAE,KAAK,CAAC;AAAA,YACpC,OAAO,gBAAE,OAAO;AAAA,YAChB,WAAW,gBAAE,OAAO;AAAA,YACpB,YAAY,gBAAE,IAAI;AAAA,UACpB,CAAC;AAAA,QACH;AAAA,MACF;AAAA,MACA,YAAY,gBAAE,QAAQ;AAAA,IACxB;AAAA,IACA,SAAS,OAAO,KAAK,EAAE,QAAQ,OAAO,WAAW,MAAM;AACrD,YAAM,WAAW,IAAI,GAAG,YAAY,OAAO,OAAO,OAAO,EAAE;AAC3D,UAAI,aAAa,MAAM;AACrB,cAAM,IAAI,MAAM,eAAe,OAAO,EAAE,eAAe,OAAO,KAAK,EAAE;AAAA,MACvE;AAGA,YAAM,MAAM,MAAM,IAAI,GAAG,IAAI,QAAQ;AACrC,UAAI,IAAI,iBAAiB,OAAO,cAAc;AAC5C,YAAI,YAAY;AACd,kBAAQ;AAAA,YACN,sDAAsD,OAAO,EAAE;AAAA,UACjE;AAAA,QACF,OAAO;AACL,kBAAQ;AAAA,YACN,kCAAkC,OAAO,EAAE;AAAA,UAC7C;AAAA,QACF;AACA;AAAA,MACF;AACA,YAAM;AAAA,QACJ,EAAE,KAAK,gBAAgB,SAAS,OAAO;AAAA,QACvC,WAAW;AAAA,QACX,aACI,QACA;AAAA,UACE;AAAA,YACE,IAAI;AAAA,YACJ,OAAO,OAAO;AAAA,YACd,OAAO,YAAY,gBAAgB,OAAO,KAAK;AAAA,UACjD;AAAA,QACF;AAAA,MACN;AAAA,IACF;AAAA,EACF,CAAC;AACH;AAOA,SAAS,YAAY,gBAAsC,OAAe;AACxE,QAAM,QAAQ,mBAAmB,gBAAgB,KAAK;AACtD,SAAO,OAAO,OAAO,KAAK,EAAE,QAAQ,CAAC,mBAAmB;AACtD,QACG,eAAe,gBAAgB,YAC9B,eAAe,SAAS,SACzB,eAAe,gBAAgB,cAC9B,eAAe,SAAS,SAC1B;AACA,YAAMC,SAAQ,eAAe;AAC7B,YAAM,cAAc,mBAAmB,gBAAgBA,MAAK;AAC5D,YAAM,oBAAoB,OAAO,OAAO,WAAW,EAAE;AAAA,QACnD,CAACC,oBACEA,gBAAe,gBAAgB,YAC9BA,gBAAe,SAAS,SAC1BA,gBAAe,gBAAgB;AAAA,MACnC;AACA,YAAM,WAAW,oBAAoB,YAAY;AAEjD,YAAM,YAAY,eAAe;AACjC,aAAO,CAAC,EAAE,OAAAD,QAAO,WAAW,SAAS,CAAU;AAAA,IACjD,WAAW,eAAe,gBAAgB,YAAY;AACpD,YAAMA,SAAQ,eAAe;AAC7B,aAAO;AAAA,QACL;AAAA,UACE,OAAAA;AAAA,UACA,WAAW,eAAe;AAAA,UAC1B,UAAU;AAAA,QACZ;AAAA,QACA,GAAI,eAAe,YACf;AAAA,UACE;AAAA,YACE,OAAAA;AAAA,YACA,WAAW,eAAe;AAAA,YAC1B,UAAU;AAAA,UACZ;AAAA,QACF,IACA,CAAC;AAAA,MACP;AAAA,IACF,OAAO;AACL,aAAO,CAAC;AAAA,IACV;AAAA,EACF,CAAC;AACH;AA4BA,eAAe,0BACb,SACA,SACA,OACA;AACA,QAAM,EAAE,IAAI,IAAI;AAChB,QAAM,OAAO,MAAM,MAAM,SAAS,CAAC;AAEnC,MAAI,QAAQ,MAAM;AAChB,UAAM,WAAW,KAAK,MAAM,CAAC;AAC7B,QAAI,aAAa,QAAW;AAC1B,YAAM,IAAI,GAAG,OAAO,KAAK,OAAO,KAAK,EAAoB;AACzD,UAAI,MAAM,SAAS,GAAG;AACpB,cAAM,mBAAmB,SAAS,SAAS,MAAM,MAAM,GAAG,EAAE,CAAC;AAAA,MAC/D;AAAA,IACF,OAAO;AACL,YAAM,UAAU,EAAE,GAAG,MAAM,OAAO,KAAK,MAAM,MAAM,CAAC,EAAE;AACtD,YAAM;AAAA,QACJ;AAAA,QACA;AAAA,QACA,MAAM,MAAM,GAAG,EAAE,EAAE,OAAO,OAAO;AAAA,QACjC;AAAA,UACE,QAAQ;AAAA,UACR,YAAY,KAAK;AAAA,UACjB,GAAG;AAAA,QACL;AAAA,MACF;AAAA,IACF;AAAA,EACF,OAAO;AACL,UAAM,kBAAkB,SAAS,SAAS,OAAO,IAAI;AAAA,EACvD;AACF;AAEA,IAAM,yBAAyB,OAAO;AACtC,IAAM,qBAAqB,KAAK;AAEhC,eAAe,kBACb,SACA,SACA,OACA,EAAE,OAAO,UAAU,WAAW,YAAY,OAAO,GACjD;AACA,QAAM,EAAE,KAAK,eAAe,IAAI;AAChC,QAAM,EAAE,MAAM,gBAAgB,QAAQ,UAAU,IAAI,MAAM;AAAA,IACxD;AAAA,IACA,EAAE,OAAO,WAAW,WAAW;AAAA,IAC/B;AAAA,MACE;AAAA,MACA,GAAG;AAAA,QACD;AAAA,QACA,aAAa,aACT,EAAE,UAAU,uBAAuB,IACnC,EAAE,UAAU,EAAE;AAAA,MACpB;AAAA,IACF;AAAA,EACF;AAEA,QAAM,iBAAiB,iBAAiB,SAAS,KAAK,QAAQ,SAAS;AACvE,QAAM,UAAU;AAAA,IACd;AAAA,IACA;AAAA,IACA,QAAQ;AAAA,IACR;AAAA,IACA;AAAA,EACF;AACA,QAAM,gBAAgB,WAAW,OAAO,QAAQ,MAAM,MAAM,GAAG,EAAE;AACjE,QAAM,eACJ,WAAW,aAAa,cAAc,KAAK,WAAW,KAClD,gBACA,cAAc;AAAA,IACZ,aAAa,YACT;AAAA,MACE;AAAA,MACA;AAAA,QACE,IAAI,KAAK,CAAC,EAAE;AAAA,QACZ;AAAA,QACA,OAAO,YAAY,gBAAgB,KAAK;AAAA,MAC1C;AAAA,IACF,IACA,CAAC,OAAO;AAAA,EACd;AACN,MAAI,aAAa,YAAY;AAC3B,UAAM,QAAQ,IAAI,KAAK,IAAI,CAAC,QAAQ,IAAI,GAAG,OAAO,OAAO,IAAI,GAAG,CAAC,CAAC;AAAA,EACpE;AACA,QAAM,mBAAmB,SAAS,gBAAgB,YAAY;AAChE;AAEA,eAAe,mBACb,SACA,SACA,OACA;AACA,MAAI,eAAe,OAAO,GAAG;AAC3B,UAAM,EAAE,KAAK,SAAS,OAAO,IAAI;AACjC,UAAM,IAAI,UAAU,SAAS,GAAG,SAAS;AAAA,MACvC;AAAA,MACA;AAAA,MACA,YAAY;AAAA,IACd,CAAC;AAAA,EACH,OAAO;AACL,UAAM,0BAA0B,SAAS,SAAS,KAAK;AAAA,EACzD;AACF;AAOA,SAAS,aAAa;AACpB,SAAO;AAAA,IACL,cAAc;AAAA,IACd,cAAc;AAAA,EAChB;AACF;AAEA,SAAS,iBACP,SACA,cACA,cACA;AACA,SAAO;AAAA,IACL,cAAc,QAAQ,eAAe;AAAA,IACrC,cAAc,QAAQ,eAAe;AAAA,EACvC;AACF;AAEA,SAAS,qBACP,SACA,EAAE,SAAS,GACX;AACA,SAAO;AAAA,IACL,UAAU,KAAK,IAAI,GAAG,WAAW,QAAQ,YAAY;AAAA,IACrD,kBAAkB,KAAK,IAAI,GAAG,qBAAqB,QAAQ,YAAY;AAAA,EACzE;AACF;AAEA,SAAS,eAAe,SAAkB;AACxC,SACE,QAAQ,gBAAgB,0BACxB,QAAQ,gBAAgB;AAE5B;AAEA,eAAe,SACb,KACA;AAAA,EACE;AAAA,EACA;AAAA,EACA;AACF,GACA;AAAA,EACE;AAAA,EACA;AAAA,EACA;AACF,GAKA;AACA,QAAM,QAAQ,IAAI,GACf,MAAM,KAAK,EACX;AAAA,IAAU;AAAA,IAAW,CAAC,MACpB,EAAE,GAAG,WAAW,UAAU,EAAuC;AAAA,MAChE;AAAA,MACA,WAAW,OAAO,SAAS,CAAC;AAAA,IAC9B;AAAA,EACF;AAEF,MAAI,YAAY;AAChB,QAAM,UAAU,CAAC;AACjB,MAAI,SAAS;AAEb,mBAAiB,OAAO,OAAO;AAC7B,QAAI,QAAQ,UAAU,UAAU;AAC9B,eAAS;AACT;AAAA,IACF;AACA,UAAM,OAAO,KAAK,cAAU,4BAAa,GAAG,CAAC,EAAE,SAAS;AAExD,YAAQ,KAAK,GAAG;AAChB,iBAAa;AAKb,QAAI,YAAY,kBAAkB;AAChC,eAAS;AACT;AAAA,IACF;AAAA,EACF;AACA,SAAO;AAAA,IACL,MAAM;AAAA,IACN,gBACE,QAAQ,WAAW,IACf,SACA,KAAK,QAAQ,QAAQ,SAAS,CAAC,EAAE;AAAA,IACvC;AAAA,IACA;AAAA,EACF;AACF;","names":["internalMutation","table","edgeDefinition"]}
1
+ {"version":3,"sources":["../src/deletion.ts","../src/shared.ts"],"sourcesContent":["import {\n FunctionReference,\n GenericMutationCtx,\n IndexRangeBuilder,\n RegisteredMutation,\n internalMutationGeneric as internalMutation,\n makeFunctionReference,\n} from \"convex/server\";\nimport { GenericId, Infer, convexToJson, v } from \"convex/values\";\nimport { GenericEntsDataModel } from \"./schema\";\nimport { getEdgeDefinitions } from \"./shared\";\n\nexport type ScheduledDeleteFuncRef = FunctionReference<\n \"mutation\",\n \"internal\",\n {\n origin: Origin;\n stack: Stack;\n inProgress: boolean;\n },\n void\n>;\n\ntype Origin = {\n id: string;\n table: string;\n deletionTime: number;\n};\n\nconst vApproach = v.union(v.literal(\"cascade\"), v.literal(\"paginate\"));\n\ntype Approach = Infer<typeof vApproach>;\n\nexport function scheduledDeleteFactory<\n EntsDataModel extends GenericEntsDataModel,\n>(\n entDefinitions: EntsDataModel,\n options?: {\n scheduledDelete: ScheduledDeleteFuncRef;\n },\n): RegisteredMutation<\n \"internal\",\n { origin: Origin; stack: Stack; inProgress: boolean },\n Promise<void>\n> {\n const selfRef =\n options?.scheduledDelete ??\n (makeFunctionReference(\n \"functions:scheduledDelete\",\n ) as unknown as ScheduledDeleteFuncRef);\n return internalMutation({\n args: {\n origin: v.object({\n id: v.string(),\n table: v.string(),\n deletionTime: v.number(),\n }),\n stack: v.array(\n v.union(\n v.object({\n id: v.string(),\n table: v.string(),\n edges: v.array(\n v.object({\n approach: vApproach,\n table: v.string(),\n indexName: v.string(),\n }),\n ),\n }),\n v.object({\n approach: vApproach,\n cursor: v.union(v.string(), v.null()),\n table: v.string(),\n indexName: v.string(),\n fieldValue: v.any(),\n }),\n ),\n ),\n inProgress: v.boolean(),\n },\n handler: async (ctx, { origin, stack, inProgress }) => {\n // Check that we still want to delete\n // Note: Doesn't support scheduled deletion starting with system table\n const doc = await ctx.db.get(origin.table, origin.id as GenericId<any>);\n if (doc.deletionTime !== origin.deletionTime) {\n if (inProgress) {\n console.error(\n `[Ents] Already in-progress scheduled deletion for \"${origin.id}\" was canceled!`,\n );\n } else {\n console.log(\n `[Ents] Scheduled deletion for \"${origin.id}\" was canceled`,\n );\n }\n return;\n }\n await progressScheduledDeletion(\n { ctx, entDefinitions, selfRef, origin },\n newCounter(),\n inProgress\n ? stack\n : [\n {\n id: origin.id,\n table: origin.table,\n edges: getEdgeArgs(entDefinitions, origin.table),\n },\n ],\n );\n },\n });\n}\n\n// Heuristic:\n// Ent at the end of an edge\n// has soft or scheduled deletion behavior && has cascading edges: schedule individually\n// has cascading edges: paginate by 1\n// else: paginate by decent number\nfunction getEdgeArgs(entDefinitions: GenericEntsDataModel, table: string) {\n const edges = getEdgeDefinitions(entDefinitions, table);\n return Object.values(edges).flatMap((edgeDefinition) => {\n if (\n (edgeDefinition.cardinality === \"single\" &&\n edgeDefinition.type === \"ref\") ||\n (edgeDefinition.cardinality === \"multiple\" &&\n edgeDefinition.type === \"field\")\n ) {\n const table = edgeDefinition.to;\n const targetEdges = getEdgeDefinitions(entDefinitions, table);\n const hasCascadingEdges = Object.values(targetEdges).some(\n (edgeDefinition) =>\n (edgeDefinition.cardinality === \"single\" &&\n edgeDefinition.type === \"ref\") ||\n edgeDefinition.cardinality === \"multiple\",\n );\n const approach = hasCascadingEdges ? \"cascade\" : \"paginate\";\n\n const indexName = edgeDefinition.ref;\n return [{ table, indexName, approach } as const];\n } else if (edgeDefinition.cardinality === \"multiple\") {\n const table = edgeDefinition.table;\n return [\n {\n table,\n indexName: edgeDefinition.field,\n approach: \"paginate\",\n } as const,\n ...(edgeDefinition.symmetric\n ? [\n {\n table,\n indexName: edgeDefinition.ref,\n approach: \"paginate\",\n } as const,\n ]\n : []),\n ];\n } else {\n return [];\n }\n });\n}\n\ntype PaginationArgs = {\n approach: Approach;\n table: string;\n cursor: string | null;\n indexName: string;\n fieldValue: any;\n};\n\ntype EdgeArgs = {\n approach: Approach;\n table: string;\n indexName: string;\n};\n\ntype Stack = (\n | { id: string; table: string; edges: EdgeArgs[] }\n | PaginationArgs\n)[];\n\ntype CascadeCtx = {\n ctx: GenericMutationCtx<any>;\n entDefinitions: GenericEntsDataModel;\n selfRef: ScheduledDeleteFuncRef;\n origin: Origin;\n};\n\nasync function progressScheduledDeletion(\n cascade: CascadeCtx,\n counter: Counter,\n stack: Stack,\n) {\n const { ctx } = cascade;\n const last = stack[stack.length - 1];\n\n if (\"id\" in last) {\n const edgeArgs = last.edges[0];\n if (edgeArgs === undefined) {\n await ctx.db.delete(last.table, last.id as GenericId<any>);\n if (stack.length > 1) {\n await continueOrSchedule(cascade, counter, stack.slice(0, -1));\n }\n } else {\n const updated = { ...last, edges: last.edges.slice(1) };\n await paginateOrCascade(\n cascade,\n counter,\n stack.slice(0, -1).concat(updated),\n {\n cursor: null,\n fieldValue: last.id,\n ...edgeArgs,\n },\n );\n }\n } else {\n await paginateOrCascade(cascade, counter, stack, last);\n }\n}\n\nconst MAXIMUM_DOCUMENTS_READ = 8192 / 4;\nconst MAXIMUM_BYTES_READ = 2 ** 18;\n\nasync function paginateOrCascade(\n cascade: CascadeCtx,\n counter: Counter,\n stack: Stack,\n { table, approach, indexName, fieldValue, cursor }: PaginationArgs,\n) {\n const { ctx, entDefinitions } = cascade;\n const { page, continueCursor, isDone, bytesRead } = await paginate(\n ctx,\n { table, indexName, fieldValue },\n {\n cursor,\n ...limitsBasedOnCounter(\n counter,\n approach === \"paginate\"\n ? { numItems: MAXIMUM_DOCUMENTS_READ }\n : { numItems: 1 },\n ),\n },\n );\n\n const updatedCounter = incrementCounter(counter, page.length, bytesRead);\n const updated = {\n approach,\n table,\n cursor: continueCursor,\n indexName,\n fieldValue,\n };\n const relevantStack = cursor === null ? stack : stack.slice(0, -1);\n const updatedStack =\n isDone && (approach === \"paginate\" || page.length === 0)\n ? relevantStack\n : relevantStack.concat(\n approach === \"cascade\"\n ? [\n updated,\n {\n id: page[0]._id,\n table,\n edges: getEdgeArgs(entDefinitions, table),\n },\n ]\n : [updated],\n );\n if (approach === \"paginate\") {\n await Promise.all(page.map((doc) => ctx.db.delete(table, doc._id)));\n }\n await continueOrSchedule(cascade, updatedCounter, updatedStack);\n}\n\nasync function continueOrSchedule(\n cascade: CascadeCtx,\n counter: Counter,\n stack: Stack,\n) {\n if (shouldSchedule(counter)) {\n const { ctx, selfRef, origin } = cascade;\n await ctx.scheduler.runAfter(0, selfRef, {\n origin,\n stack,\n inProgress: true,\n });\n } else {\n await progressScheduledDeletion(cascade, counter, stack);\n }\n}\n\ntype Counter = {\n numDocuments: number;\n numBytesRead: number;\n};\n\nfunction newCounter() {\n return {\n numDocuments: 0,\n numBytesRead: 0,\n };\n}\n\nfunction incrementCounter(\n counter: Counter,\n numDocuments: number,\n numBytesRead: number,\n) {\n return {\n numDocuments: counter.numDocuments + numDocuments,\n numBytesRead: counter.numBytesRead + numBytesRead,\n };\n}\n\nfunction limitsBasedOnCounter(\n counter: Counter,\n { numItems }: { numItems: number },\n) {\n return {\n numItems: Math.max(1, numItems - counter.numDocuments),\n maximumBytesRead: Math.max(1, MAXIMUM_BYTES_READ - counter.numBytesRead),\n };\n}\n\nfunction shouldSchedule(counter: Counter) {\n return (\n counter.numDocuments >= MAXIMUM_DOCUMENTS_READ ||\n counter.numBytesRead >= MAXIMUM_BYTES_READ\n );\n}\n\nasync function paginate(\n ctx: GenericMutationCtx<any>,\n {\n table,\n indexName,\n fieldValue,\n }: { table: string; indexName: string; fieldValue: any },\n {\n cursor,\n numItems,\n maximumBytesRead,\n }: {\n cursor: string | null;\n numItems: number;\n maximumBytesRead: number;\n },\n) {\n const query = ctx.db\n .query(table)\n .withIndex(indexName, (q) =>\n (q.eq(indexName, fieldValue) as IndexRangeBuilder<any, any, any>).gt(\n \"_creationTime\",\n cursor === null ? cursor : +cursor,\n ),\n );\n\n let bytesRead = 0;\n const results = [];\n let isDone = true;\n\n for await (const doc of query) {\n if (results.length >= numItems) {\n isDone = false;\n break;\n }\n const size = JSON.stringify(convexToJson(doc)).length * 8;\n\n results.push(doc);\n bytesRead += size;\n\n // Check this after we read the doc, since reading it already\n // happened anyway, and to make sure we return at least one\n // result.\n if (bytesRead > maximumBytesRead) {\n isDone = false;\n break;\n }\n }\n return {\n page: results,\n continueCursor:\n results.length === 0\n ? cursor\n : \"\" + results[results.length - 1]._creationTime,\n isDone,\n bytesRead,\n };\n}\n","import {\n DocumentByName,\n FieldTypeFromFieldPath,\n GenericDatabaseReader,\n GenericDataModel,\n NamedTableInfo,\n QueryInitializer,\n SystemDataModel,\n SystemTableNames,\n TableNamesInDataModel,\n} from \"convex/server\";\nimport { EdgeConfig, GenericEdgeConfig, GenericEntsDataModel } from \"./schema\";\nimport { GenericId } from \"convex/values\";\n\nexport type EntsSystemDataModel = {\n [key in keyof SystemDataModel]: SystemDataModel[key] & {\n edges: Record<string, never>;\n };\n};\n\nexport type PromiseEdgeResult<\n EdgeConfig extends GenericEdgeConfig,\n MultipleRef,\n MultipleField,\n SingleOptional,\n Single,\n> = EdgeConfig[\"cardinality\"] extends \"multiple\"\n ? EdgeConfig[\"type\"] extends \"ref\"\n ? MultipleRef\n : MultipleField\n : EdgeConfig[\"type\"] extends \"ref\"\n ? SingleOptional\n : EdgeConfig[\"optional\"] extends true\n ? SingleOptional\n : Single;\n\nexport type IndexFieldTypesForEq<\n EntsDataModel extends GenericEntsDataModel,\n Table extends TableNamesInDataModel<EntsDataModel>,\n T extends string[],\n> = PopIfSeveral<{\n [K in keyof T]: FieldTypeFromFieldPath<\n DocumentByName<EntsDataModel, Table>,\n T[K]\n >;\n}>;\n\n// System indexes have only a single field, so we won't to perform\n// equality check on that field. Normal indexes always have _creationTime as the last field.\ntype PopIfSeveral<T extends any[]> = T extends [infer Only]\n ? [Only]\n : T extends [...infer Rest, infer _Last]\n ? Rest\n : never;\n\nexport function getEdgeDefinitions<\n EntsDataModel extends GenericEntsDataModel,\n Table extends TableNamesInDataModel<EntsDataModel>,\n>(entDefinitions: EntsDataModel, table: Table) {\n return entDefinitions[table].edges as Record<\n keyof EntsDataModel[Table][\"edges\"],\n EdgeConfig\n >;\n}\n\nexport type UniqueIndexFieldName<T extends string[]> = T extends [infer Only]\n ? Only\n : T extends [infer Single, \"_creationTime\"]\n ? Single\n : never;\n\nexport function systemAwareGet<\n DataModel extends GenericDataModel,\n Table extends TableNamesInDataModel<DataModel>,\n>(db: GenericDatabaseReader<DataModel>, table: Table, id: GenericId<Table>) {\n return isSystemTable(table)\n ? db.system.get(table, id as any)\n : db.get(table, id);\n}\n\nexport function systemAwareQuery<\n DataModel extends GenericDataModel,\n Table extends TableNamesInDataModel<DataModel>,\n>(\n db: GenericDatabaseReader<DataModel>,\n table: Table,\n): QueryInitializer<NamedTableInfo<DataModel, Table>> {\n return isSystemTable(table)\n ? (db.system.query(table) as any)\n : db.query(table);\n}\n\nexport function isSystemTable(table: string): table is SystemTableNames {\n return table.startsWith(\"_\");\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,oBAOO;AACP,oBAAkD;;;AC+C3C,SAAS,mBAGd,gBAA+B,OAAc;AAC7C,SAAO,eAAe,KAAK,EAAE;AAI/B;;;ADlCA,IAAM,YAAY,gBAAE,MAAM,gBAAE,QAAQ,SAAS,GAAG,gBAAE,QAAQ,UAAU,CAAC;AAI9D,SAAS,uBAGd,gBACA,SAOA;AACA,QAAM,UACJ,SAAS,uBACR;AAAA,IACC;AAAA,EACF;AACF,aAAO,cAAAA,yBAAiB;AAAA,IACtB,MAAM;AAAA,MACJ,QAAQ,gBAAE,OAAO;AAAA,QACf,IAAI,gBAAE,OAAO;AAAA,QACb,OAAO,gBAAE,OAAO;AAAA,QAChB,cAAc,gBAAE,OAAO;AAAA,MACzB,CAAC;AAAA,MACD,OAAO,gBAAE;AAAA,QACP,gBAAE;AAAA,UACA,gBAAE,OAAO;AAAA,YACP,IAAI,gBAAE,OAAO;AAAA,YACb,OAAO,gBAAE,OAAO;AAAA,YAChB,OAAO,gBAAE;AAAA,cACP,gBAAE,OAAO;AAAA,gBACP,UAAU;AAAA,gBACV,OAAO,gBAAE,OAAO;AAAA,gBAChB,WAAW,gBAAE,OAAO;AAAA,cACtB,CAAC;AAAA,YACH;AAAA,UACF,CAAC;AAAA,UACD,gBAAE,OAAO;AAAA,YACP,UAAU;AAAA,YACV,QAAQ,gBAAE,MAAM,gBAAE,OAAO,GAAG,gBAAE,KAAK,CAAC;AAAA,YACpC,OAAO,gBAAE,OAAO;AAAA,YAChB,WAAW,gBAAE,OAAO;AAAA,YACpB,YAAY,gBAAE,IAAI;AAAA,UACpB,CAAC;AAAA,QACH;AAAA,MACF;AAAA,MACA,YAAY,gBAAE,QAAQ;AAAA,IACxB;AAAA,IACA,SAAS,OAAO,KAAK,EAAE,QAAQ,OAAO,WAAW,MAAM;AAGrD,YAAM,MAAM,MAAM,IAAI,GAAG,IAAI,OAAO,OAAO,OAAO,EAAoB;AACtE,UAAI,IAAI,iBAAiB,OAAO,cAAc;AAC5C,YAAI,YAAY;AACd,kBAAQ;AAAA,YACN,sDAAsD,OAAO,EAAE;AAAA,UACjE;AAAA,QACF,OAAO;AACL,kBAAQ;AAAA,YACN,kCAAkC,OAAO,EAAE;AAAA,UAC7C;AAAA,QACF;AACA;AAAA,MACF;AACA,YAAM;AAAA,QACJ,EAAE,KAAK,gBAAgB,SAAS,OAAO;AAAA,QACvC,WAAW;AAAA,QACX,aACI,QACA;AAAA,UACE;AAAA,YACE,IAAI,OAAO;AAAA,YACX,OAAO,OAAO;AAAA,YACd,OAAO,YAAY,gBAAgB,OAAO,KAAK;AAAA,UACjD;AAAA,QACF;AAAA,MACN;AAAA,IACF;AAAA,EACF,CAAC;AACH;AAOA,SAAS,YAAY,gBAAsC,OAAe;AACxE,QAAM,QAAQ,mBAAmB,gBAAgB,KAAK;AACtD,SAAO,OAAO,OAAO,KAAK,EAAE,QAAQ,CAAC,mBAAmB;AACtD,QACG,eAAe,gBAAgB,YAC9B,eAAe,SAAS,SACzB,eAAe,gBAAgB,cAC9B,eAAe,SAAS,SAC1B;AACA,YAAMC,SAAQ,eAAe;AAC7B,YAAM,cAAc,mBAAmB,gBAAgBA,MAAK;AAC5D,YAAM,oBAAoB,OAAO,OAAO,WAAW,EAAE;AAAA,QACnD,CAACC,oBACEA,gBAAe,gBAAgB,YAC9BA,gBAAe,SAAS,SAC1BA,gBAAe,gBAAgB;AAAA,MACnC;AACA,YAAM,WAAW,oBAAoB,YAAY;AAEjD,YAAM,YAAY,eAAe;AACjC,aAAO,CAAC,EAAE,OAAAD,QAAO,WAAW,SAAS,CAAU;AAAA,IACjD,WAAW,eAAe,gBAAgB,YAAY;AACpD,YAAMA,SAAQ,eAAe;AAC7B,aAAO;AAAA,QACL;AAAA,UACE,OAAAA;AAAA,UACA,WAAW,eAAe;AAAA,UAC1B,UAAU;AAAA,QACZ;AAAA,QACA,GAAI,eAAe,YACf;AAAA,UACE;AAAA,YACE,OAAAA;AAAA,YACA,WAAW,eAAe;AAAA,YAC1B,UAAU;AAAA,UACZ;AAAA,QACF,IACA,CAAC;AAAA,MACP;AAAA,IACF,OAAO;AACL,aAAO,CAAC;AAAA,IACV;AAAA,EACF,CAAC;AACH;AA4BA,eAAe,0BACb,SACA,SACA,OACA;AACA,QAAM,EAAE,IAAI,IAAI;AAChB,QAAM,OAAO,MAAM,MAAM,SAAS,CAAC;AAEnC,MAAI,QAAQ,MAAM;AAChB,UAAM,WAAW,KAAK,MAAM,CAAC;AAC7B,QAAI,aAAa,QAAW;AAC1B,YAAM,IAAI,GAAG,OAAO,KAAK,OAAO,KAAK,EAAoB;AACzD,UAAI,MAAM,SAAS,GAAG;AACpB,cAAM,mBAAmB,SAAS,SAAS,MAAM,MAAM,GAAG,EAAE,CAAC;AAAA,MAC/D;AAAA,IACF,OAAO;AACL,YAAM,UAAU,EAAE,GAAG,MAAM,OAAO,KAAK,MAAM,MAAM,CAAC,EAAE;AACtD,YAAM;AAAA,QACJ;AAAA,QACA;AAAA,QACA,MAAM,MAAM,GAAG,EAAE,EAAE,OAAO,OAAO;AAAA,QACjC;AAAA,UACE,QAAQ;AAAA,UACR,YAAY,KAAK;AAAA,UACjB,GAAG;AAAA,QACL;AAAA,MACF;AAAA,IACF;AAAA,EACF,OAAO;AACL,UAAM,kBAAkB,SAAS,SAAS,OAAO,IAAI;AAAA,EACvD;AACF;AAEA,IAAM,yBAAyB,OAAO;AACtC,IAAM,qBAAqB,KAAK;AAEhC,eAAe,kBACb,SACA,SACA,OACA,EAAE,OAAO,UAAU,WAAW,YAAY,OAAO,GACjD;AACA,QAAM,EAAE,KAAK,eAAe,IAAI;AAChC,QAAM,EAAE,MAAM,gBAAgB,QAAQ,UAAU,IAAI,MAAM;AAAA,IACxD;AAAA,IACA,EAAE,OAAO,WAAW,WAAW;AAAA,IAC/B;AAAA,MACE;AAAA,MACA,GAAG;AAAA,QACD;AAAA,QACA,aAAa,aACT,EAAE,UAAU,uBAAuB,IACnC,EAAE,UAAU,EAAE;AAAA,MACpB;AAAA,IACF;AAAA,EACF;AAEA,QAAM,iBAAiB,iBAAiB,SAAS,KAAK,QAAQ,SAAS;AACvE,QAAM,UAAU;AAAA,IACd;AAAA,IACA;AAAA,IACA,QAAQ;AAAA,IACR;AAAA,IACA;AAAA,EACF;AACA,QAAM,gBAAgB,WAAW,OAAO,QAAQ,MAAM,MAAM,GAAG,EAAE;AACjE,QAAM,eACJ,WAAW,aAAa,cAAc,KAAK,WAAW,KAClD,gBACA,cAAc;AAAA,IACZ,aAAa,YACT;AAAA,MACE;AAAA,MACA;AAAA,QACE,IAAI,KAAK,CAAC,EAAE;AAAA,QACZ;AAAA,QACA,OAAO,YAAY,gBAAgB,KAAK;AAAA,MAC1C;AAAA,IACF,IACA,CAAC,OAAO;AAAA,EACd;AACN,MAAI,aAAa,YAAY;AAC3B,UAAM,QAAQ,IAAI,KAAK,IAAI,CAAC,QAAQ,IAAI,GAAG,OAAO,OAAO,IAAI,GAAG,CAAC,CAAC;AAAA,EACpE;AACA,QAAM,mBAAmB,SAAS,gBAAgB,YAAY;AAChE;AAEA,eAAe,mBACb,SACA,SACA,OACA;AACA,MAAI,eAAe,OAAO,GAAG;AAC3B,UAAM,EAAE,KAAK,SAAS,OAAO,IAAI;AACjC,UAAM,IAAI,UAAU,SAAS,GAAG,SAAS;AAAA,MACvC;AAAA,MACA;AAAA,MACA,YAAY;AAAA,IACd,CAAC;AAAA,EACH,OAAO;AACL,UAAM,0BAA0B,SAAS,SAAS,KAAK;AAAA,EACzD;AACF;AAOA,SAAS,aAAa;AACpB,SAAO;AAAA,IACL,cAAc;AAAA,IACd,cAAc;AAAA,EAChB;AACF;AAEA,SAAS,iBACP,SACA,cACA,cACA;AACA,SAAO;AAAA,IACL,cAAc,QAAQ,eAAe;AAAA,IACrC,cAAc,QAAQ,eAAe;AAAA,EACvC;AACF;AAEA,SAAS,qBACP,SACA,EAAE,SAAS,GACX;AACA,SAAO;AAAA,IACL,UAAU,KAAK,IAAI,GAAG,WAAW,QAAQ,YAAY;AAAA,IACrD,kBAAkB,KAAK,IAAI,GAAG,qBAAqB,QAAQ,YAAY;AAAA,EACzE;AACF;AAEA,SAAS,eAAe,SAAkB;AACxC,SACE,QAAQ,gBAAgB,0BACxB,QAAQ,gBAAgB;AAE5B;AAEA,eAAe,SACb,KACA;AAAA,EACE;AAAA,EACA;AAAA,EACA;AACF,GACA;AAAA,EACE;AAAA,EACA;AAAA,EACA;AACF,GAKA;AACA,QAAM,QAAQ,IAAI,GACf,MAAM,KAAK,EACX;AAAA,IAAU;AAAA,IAAW,CAAC,MACpB,EAAE,GAAG,WAAW,UAAU,EAAuC;AAAA,MAChE;AAAA,MACA,WAAW,OAAO,SAAS,CAAC;AAAA,IAC9B;AAAA,EACF;AAEF,MAAI,YAAY;AAChB,QAAM,UAAU,CAAC;AACjB,MAAI,SAAS;AAEb,mBAAiB,OAAO,OAAO;AAC7B,QAAI,QAAQ,UAAU,UAAU;AAC9B,eAAS;AACT;AAAA,IACF;AACA,UAAM,OAAO,KAAK,cAAU,4BAAa,GAAG,CAAC,EAAE,SAAS;AAExD,YAAQ,KAAK,GAAG;AAChB,iBAAa;AAKb,QAAI,YAAY,kBAAkB;AAChC,eAAS;AACT;AAAA,IACF;AAAA,EACF;AACA,SAAO;AAAA,IACL,MAAM;AAAA,IACN,gBACE,QAAQ,WAAW,IACf,SACA,KAAK,QAAQ,QAAQ,SAAS,CAAC,EAAE;AAAA,IACvC;AAAA,IACA;AAAA,EACF;AACF;","names":["internalMutation","table","edgeDefinition"]}
@@ -3,4 +3,4 @@ import 'convex/values';
3
3
  import './deletion.js';
4
4
  import './schema.js';
5
5
  import './shared.js';
6
- export { Y as DocRetriever, D as Ent, X as EntMutationCtx, W as EntQueryCtx, E as EntsTable, C as EntsTableWriter, G as GenericEnt, U as GenericEntWriter, z as PromiseArray, y as PromiseArrayOrNull, F as PromiseEdge, t as PromiseEdgeEnts, p as PromiseEdgeEntsOrNull, v as PromiseEdgeEntsWriter, r as PromiseEdgeEntsWriterOrNull, H as PromiseEdgeOrThrow, s as PromiseEdgeOrderedEnts, o as PromiseEdgeOrderedEntsOrNull, u as PromiseEdgeOrderedEntsWriter, q as PromiseEdgeOrderedEntsWriterOrNull, I as PromiseEdgeWriter, K as PromiseEdgeWriterOrNull, J as PromiseEdgeWriterOrThrow, x as PromiseEnt, V as PromiseEntId, w as PromiseEntOrNull, T as PromiseEntWriter, S as PromiseEntWriterOrNull, m as PromiseEnts, k as PromiseEntsOrNull, n as PromiseEntsOrNulls, N as PromiseEntsWriter, l as PromiseEntsWriterOrNull, g as PromiseOrderedQuery, f as PromiseOrderedQueryBase, P as PromiseOrderedQueryOrNull, L as PromiseOrderedQueryWriter, a as PromiseOrderedQueryWriterOrNull, j as PromisePaginationResult, i as PromisePaginationResultOrNull, Q as PromisePaginationResultWriter, O as PromisePaginationResultWriterOrNull, h as PromiseQuery, b as PromiseQueryOrNull, M as PromiseQueryWriter, c as PromiseQueryWriterOrNull, e as PromiseTable, d as PromiseTableBase, R as PromiseTableWriter, Z as addEntRules, A as entWrapper, B as entsTableFactory, a0 as getDeletionConfig, _ as getReadRule, $ as getWriteRule } from './writer.js';
6
+ export { a2 as DocRetriever, K as Ent, a1 as EntMutationCtx, a0 as EntQueryCtx, I as EntsTable, J as EntsTableWriter, L as GenericEnt, _ as GenericEntWriter, F as PromiseArray, E as PromiseArrayOrNull, M as PromiseEdge, z as PromiseEdgeEnts, v as PromiseEdgeEntsOrNull, B as PromiseEdgeEntsWriter, x as PromiseEdgeEntsWriterOrNull, N as PromiseEdgeOrThrow, y as PromiseEdgeOrderedEnts, u as PromiseEdgeOrderedEntsOrNull, A as PromiseEdgeOrderedEntsWriter, w as PromiseEdgeOrderedEntsWriterOrNull, O as PromiseEdgeWriter, R as PromiseEdgeWriterOrNull, Q as PromiseEdgeWriterOrThrow, D as PromiseEnt, $ as PromiseEntId, C as PromiseEntOrNull, Z as PromiseEntWriter, Y as PromiseEntWriterOrNull, q as PromiseEnts, o as PromiseEntsOrNull, r as PromiseEntsOrNulls, U as PromiseEntsWriter, p as PromiseEntsWriterOrNull, t as PromiseIds, s as PromiseIdsOrNull, n as PromiseIdsPaginationResult, m as PromiseIdsPaginationResultOrNull, i as PromiseOrderedIdsQuery, a as PromiseOrderedIdsQueryOrNull, h as PromiseOrderedQuery, g as PromiseOrderedQueryBase, P as PromiseOrderedQueryOrNull, S as PromiseOrderedQueryWriter, b as PromiseOrderedQueryWriterOrNull, l as PromisePaginationResult, k as PromisePaginationResultOrNull, W as PromisePaginationResultWriter, V as PromisePaginationResultWriterOrNull, j as PromiseQuery, c as PromiseQueryOrNull, T as PromiseQueryWriter, d as PromiseQueryWriterOrNull, f as PromiseTable, e as PromiseTableBase, X as PromiseTableWriter, a3 as addEntRules, G as entWrapper, H as entsTableFactory, a6 as getDeletionConfig, a4 as getReadRule, a5 as getWriteRule } from './writer.js';
package/dist/functions.js CHANGED
@@ -43,6 +43,15 @@ function edgeCompoundIndexNameRaw(idA, idB) {
43
43
  function getEdgeDefinitions(entDefinitions, table) {
44
44
  return entDefinitions[table].edges;
45
45
  }
46
+ function systemAwareGet(db, table, id) {
47
+ return isSystemTable(table) ? db.system.get(table, id) : db.get(table, id);
48
+ }
49
+ function systemAwareQuery(db, table) {
50
+ return isSystemTable(table) ? db.system.query(table) : db.query(table);
51
+ }
52
+ function isSystemTable(table) {
53
+ return table.startsWith("_");
54
+ }
46
55
 
47
56
  // src/writer.ts
48
57
  var import_server2 = require("convex/server");
@@ -76,7 +85,7 @@ var WriterImplBase = class _WriterImplBase {
76
85
  }
77
86
  } else if (edgeDefinition.cardinality === "single") {
78
87
  if (edgeDefinition.deletion !== void 0 && (!isDeletingSoftly || edgeDefinition.deletion === "soft")) {
79
- const doc = await this.ctx.db.get(id);
88
+ const doc = await this.ctx.db.get(this.table, id);
80
89
  if (doc !== null) {
81
90
  const otherId = doc[edgeDefinition.field];
82
91
  edges[key] = {
@@ -282,7 +291,7 @@ var WriterImplBase = class _WriterImplBase {
282
291
  if (id !== void 0) {
283
292
  const readPolicy = getReadRule(this.entDefinitions, this.table);
284
293
  if (readPolicy !== void 0) {
285
- const doc = await this.ctx.db.get(id);
294
+ const doc = await this.ctx.db.get(this.table, id);
286
295
  if (doc === null) {
287
296
  throw new Error(
288
297
  `Cannot update document with ID "${id}" in table "${this.table} because it does not exist"`
@@ -301,7 +310,7 @@ var WriterImplBase = class _WriterImplBase {
301
310
  return;
302
311
  }
303
312
  const ent = id === void 0 ? void 0 : entWrapper(
304
- await this.ctx.db.get(id),
313
+ await this.ctx.db.get(this.table, id),
305
314
  this.ctx,
306
315
  this.entDefinitions,
307
316
  this.table
@@ -331,9 +340,6 @@ var WriterImplBase = class _WriterImplBase {
331
340
  }
332
341
  }
333
342
  };
334
- function isSystemTable(table) {
335
- return table.startsWith("_");
336
- }
337
343
 
338
344
  // src/functions.ts
339
345
  var PromiseQueryOrNullImpl = class _PromiseQueryOrNullImpl extends Promise {
@@ -360,7 +366,7 @@ var PromiseQueryOrNullImpl = class _PromiseQueryOrNullImpl extends Promise {
360
366
  );
361
367
  }
362
368
  map(callbackFn) {
363
- return new PromiseArrayImpl(async () => {
369
+ return new PromiseArrayOrNullImpl(async () => {
364
370
  const array = await this;
365
371
  if (array === null) {
366
372
  return null;
@@ -521,6 +527,90 @@ var PromiseQueryOrNullImpl = class _PromiseQueryOrNullImpl extends Promise {
521
527
  );
522
528
  }
523
529
  };
530
+ var PromiseIdsQueryOrNullImpl = class extends Promise {
531
+ constructor(retrieve, field) {
532
+ super(() => {
533
+ });
534
+ this.retrieve = retrieve;
535
+ this.field = field;
536
+ }
537
+ map(callbackFn) {
538
+ return new PromiseArrayOrNullImpl(async () => {
539
+ const array = await this;
540
+ if (array === null) {
541
+ return null;
542
+ }
543
+ return await Promise.all(array.map(callbackFn));
544
+ });
545
+ }
546
+ paginate(paginationOpts) {
547
+ return new PromiseIdsPaginationResultOrNullImpl(async () => {
548
+ const query = await this.retrieve();
549
+ if (query === null) {
550
+ return null;
551
+ }
552
+ const result = await query.paginate(paginationOpts);
553
+ return {
554
+ ...result,
555
+ page: result.page.map((id) => this._getId(id))
556
+ };
557
+ });
558
+ }
559
+ take(n) {
560
+ return new PromiseIdsOrNullImpl(async () => {
561
+ const query = await this.retrieve();
562
+ if (query === null) {
563
+ return null;
564
+ }
565
+ const result = await query.take(n);
566
+ return result.map((id) => this._getId(id));
567
+ });
568
+ }
569
+ async first() {
570
+ const query = await this.retrieve();
571
+ if (query === null) {
572
+ return null;
573
+ }
574
+ const doc = await query.first();
575
+ if (doc === null) {
576
+ return null;
577
+ }
578
+ return this._getId(doc);
579
+ }
580
+ async firstX() {
581
+ const id = await this.first();
582
+ if (id === null) {
583
+ throw new Error("Expected at least one ID, but got none");
584
+ }
585
+ return id;
586
+ }
587
+ async unique() {
588
+ const query = await this.retrieve();
589
+ if (query === null) {
590
+ return null;
591
+ }
592
+ const result = await query.unique();
593
+ if (result === null) {
594
+ return null;
595
+ }
596
+ return this._getId(result);
597
+ }
598
+ async uniqueX() {
599
+ const id = await this.unique();
600
+ if (id === null) {
601
+ throw new Error("Expected one unique ID, but got none");
602
+ }
603
+ return id;
604
+ }
605
+ then(onfulfilled, onrejected) {
606
+ return this.retrieve().then((query) => query === null ? null : query.collect()).then(
607
+ (docs) => docs === null ? null : docs.map((doc) => this._getId(doc))
608
+ ).then(onfulfilled, onrejected);
609
+ }
610
+ _getId(doc) {
611
+ return doc[this.field];
612
+ }
613
+ };
524
614
  var PromisePaginationResultOrNullImpl = class extends Promise {
525
615
  constructor(ctx, entDefinitions, table, retrieve) {
526
616
  super(() => {
@@ -567,13 +657,33 @@ var PromisePaginationResultOrNullImpl = class extends Promise {
567
657
  ).then(onfulfilled, onrejected);
568
658
  }
569
659
  };
660
+ var PromiseIdsPaginationResultOrNullImpl = class extends Promise {
661
+ constructor(retrieve) {
662
+ super(() => {
663
+ });
664
+ this.retrieve = retrieve;
665
+ }
666
+ async map(callbackFn) {
667
+ const result = await this;
668
+ if (result === null) {
669
+ return null;
670
+ }
671
+ return {
672
+ ...result,
673
+ page: await Promise.all(result.page.map(callbackFn))
674
+ };
675
+ }
676
+ then(onfulfilled, onrejected) {
677
+ return this.retrieve().then(onfulfilled, onrejected);
678
+ }
679
+ };
570
680
  var PromiseTableImpl = class extends PromiseQueryOrNullImpl {
571
681
  constructor(ctx, entDefinitions, table) {
572
682
  super(
573
683
  ctx,
574
684
  entDefinitions,
575
685
  table,
576
- async () => isSystemTable(table) ? ctx.db.system.query(table) : ctx.db.query(table)
686
+ async () => systemAwareQuery(ctx.db, table)
577
687
  );
578
688
  }
579
689
  get(...args) {
@@ -601,7 +711,7 @@ var PromiseTableImpl = class extends PromiseQueryOrNullImpl {
601
711
  return {
602
712
  id,
603
713
  doc: async () => {
604
- const doc = await (isSystemTable(this.table) ? this.ctx.db.system.get(id) : this.ctx.db.get(id));
714
+ const doc = await systemAwareGet(this.ctx.db, this.table, id);
605
715
  if (throwIfNull && doc === null) {
606
716
  throw new Error(
607
717
  `Document not found with id \`${id}\` in table "${this.table}"`
@@ -617,7 +727,7 @@ var PromiseTableImpl = class extends PromiseQueryOrNullImpl {
617
727
  this.table,
618
728
  indexName
619
729
  );
620
- const doc = await this.ctx.db.query(this.table).withIndex(
730
+ const doc = await systemAwareQuery(this.ctx.db, this.table).withIndex(
621
731
  indexName,
622
732
  (q) => values.reduce((q2, value, i) => q2.eq(fieldNames[i], value), q)
623
733
  ).unique();
@@ -650,7 +760,7 @@ var PromiseTableImpl = class extends PromiseQueryOrNullImpl {
650
760
  });
651
761
  return await Promise.all(
652
762
  ids.map(async (id) => {
653
- const doc = await (isSystemTable(this.table) ? this.ctx.db.system.get(id) : this.ctx.db.get(id));
763
+ const doc = await systemAwareGet(this.ctx.db, this.table, id);
654
764
  if (throwIfNull && doc === null) {
655
765
  throw new Error(
656
766
  `Document not found with id \`${id}\` in table "${this.table}"`
@@ -661,12 +771,23 @@ var PromiseTableImpl = class extends PromiseQueryOrNullImpl {
661
771
  );
662
772
  } : async () => {
663
773
  const [indexName, values] = args;
774
+ const fieldNames = getIndexFields(
775
+ this.entDefinitions,
776
+ this.table,
777
+ indexName
778
+ );
779
+ if (fieldNames.length > 1) {
780
+ throw new Error(
781
+ `Index "${indexName}" has ${fieldNames.length} fields, but getMany() supports only single field indexes`
782
+ );
783
+ }
784
+ const fieldName = fieldNames[0];
664
785
  return await Promise.all(
665
786
  values.map(async (value) => {
666
- const doc = await this.ctx.db.query(this.table).withIndex(indexName, (q) => q.eq(indexName, value)).unique();
787
+ const doc = await systemAwareQuery(this.ctx.db, this.table).withIndex(indexName, (q) => q.eq(fieldName, value)).unique();
667
788
  if (throwIfNull && doc === null) {
668
789
  throw new Error(
669
- `Table "${this.table}" does not contain document with field "${indexName}" = \`${value}\``
790
+ `Table "${this.table}" does not contain document with field "${fieldName}" = \`${value}\``
670
791
  );
671
792
  }
672
793
  return doc;
@@ -721,7 +842,7 @@ var PromiseEntsOrNullImpl = class extends Promise {
721
842
  this.throwIfNull = throwIfNull;
722
843
  }
723
844
  map(callbackFn) {
724
- return new PromiseArrayImpl(async () => {
845
+ return new PromiseArrayOrNullImpl(async () => {
725
846
  const array = await this;
726
847
  if (array === null) {
727
848
  return null;
@@ -823,11 +944,34 @@ var PromiseEntsOrNullImpl = class extends Promise {
823
944
  ).then(onfulfilled, onrejected);
824
945
  }
825
946
  };
947
+ var PromiseIdsOrNullImpl = class extends Promise {
948
+ constructor(retrieve) {
949
+ super(() => {
950
+ });
951
+ this.retrieve = retrieve;
952
+ }
953
+ map(callbackFn) {
954
+ return new PromiseArrayOrNullImpl(async () => {
955
+ const array = await this;
956
+ if (array === null) {
957
+ return null;
958
+ }
959
+ return await Promise.all(array.map(callbackFn));
960
+ });
961
+ }
962
+ then(onfulfilled, onrejected) {
963
+ return this.retrieve().then(onfulfilled, onrejected);
964
+ }
965
+ };
826
966
  var PromiseEdgeOrNullImpl = class _PromiseEdgeOrNullImpl extends PromiseEntsOrNullImpl {
827
967
  constructor(ctx, entDefinitions, table, edgeDefinition, retrieveSourceId, retrieveQuery, retrieveDoc = async (edgeDoc) => {
828
968
  const sourceId = edgeDoc[edgeDefinition.field];
829
969
  const targetId = edgeDoc[edgeDefinition.ref];
830
- const doc = await this.ctx.db.get(targetId);
970
+ const doc = await systemAwareGet(
971
+ this.ctx.db,
972
+ edgeDefinition.to,
973
+ targetId
974
+ );
831
975
  if (doc === null) {
832
976
  throw new Error(
833
977
  `Dangling reference for edge "${edgeDefinition.name}" in table "${this.table}" for document with ID "${sourceId}": Could not find a document with ID "${targetId}" in table "${edgeDefinition.to}" (edge document ID is "${edgeDoc._id}").`
@@ -854,6 +998,12 @@ var PromiseEdgeOrNullImpl = class _PromiseEdgeOrNullImpl extends PromiseEntsOrNu
854
998
  this.retrieveQuery = retrieveQuery;
855
999
  this.retrieveDoc = retrieveDoc;
856
1000
  }
1001
+ ids() {
1002
+ return new PromiseIdsQueryOrNullImpl(
1003
+ () => this.retrieveQuery(),
1004
+ this.edgeDefinition.ref
1005
+ );
1006
+ }
857
1007
  async has(targetId) {
858
1008
  const sourceId = await this.retrieveSourceId();
859
1009
  if (sourceId === null) {
@@ -1123,7 +1273,11 @@ var PromiseEntOrNullImpl = class extends Promise {
1123
1273
  `Unexpected null reference for edge "${edgeDefinition.name}" in table "${this.table}" on document with ID "${id}": Expected an ID for a document in table "${edgeDefinition.to}".`
1124
1274
  );
1125
1275
  }
1126
- const otherDoc = await this.ctx.db.get(otherId);
1276
+ const otherDoc = await systemAwareGet(
1277
+ this.ctx.db,
1278
+ edgeDefinition.to,
1279
+ otherId
1280
+ );
1127
1281
  if (otherDoc === null && edgeDefinition.to !== "_scheduled_functions") {
1128
1282
  throw new Error(
1129
1283
  `Dangling reference for edge "${edgeDefinition.name}" in table "${this.table}" on document with ID "${id}": Could not find a document with ID "${otherId}" in table "${edgeDefinition.to}".`
@@ -1137,7 +1291,7 @@ var PromiseEntOrNullImpl = class extends Promise {
1137
1291
  );
1138
1292
  }
1139
1293
  };
1140
- var PromiseArrayImpl = class extends Promise {
1294
+ var PromiseArrayOrNullImpl = class extends Promise {
1141
1295
  constructor(retrieve) {
1142
1296
  super(() => {
1143
1297
  });
@@ -1396,7 +1550,7 @@ var PromiseEntWriterImpl = class extends PromiseEntOrNullImpl {
1396
1550
  }
1397
1551
  if (edgeDefinition.cardinality === "single") {
1398
1552
  if (edgeDefinition.type === "ref") {
1399
- const oldDoc = await this.ctx.db.get(docId);
1553
+ const oldDoc = await this.ctx.db.get(this.table, docId);
1400
1554
  if (oldDoc[key] !== void 0 && oldDoc[key] !== idOrIds) {
1401
1555
  throw new Error("Cannot set 1:1 edge from ref end.");
1402
1556
  }
@@ -1458,7 +1612,10 @@ var PromiseEntIdImpl = class extends Promise {
1458
1612
  this.table,
1459
1613
  async () => {
1460
1614
  const id = await this.retrieve();
1461
- return { id, doc: async () => this.ctx.db.get(id) };
1615
+ return {
1616
+ id,
1617
+ doc: async () => systemAwareGet(this.ctx.db, this.table, id)
1618
+ };
1462
1619
  },
1463
1620
  true
1464
1621
  );
@@ -1543,6 +1700,12 @@ async function filterByReadRule(ctx, entDefinitions, table, docs, throwIfNull) {
1543
1700
  return docs.filter((_, i) => decisions[i]);
1544
1701
  }
1545
1702
  function getIndexFields(entDefinitions, table, index) {
1703
+ if (index === "by_id") {
1704
+ return ["_id"];
1705
+ }
1706
+ if (index === "by_creation_time") {
1707
+ return ["_creationTime"];
1708
+ }
1546
1709
  return entDefinitions[table].indexes[index];
1547
1710
  }
1548
1711
  function getReadRule(entDefinitions, table) {