@fragno-dev/db 0.1.11 → 0.1.13

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (71) hide show
  1. package/.turbo/turbo-build.log +41 -39
  2. package/CHANGELOG.md +19 -0
  3. package/dist/adapters/drizzle/drizzle-adapter.d.ts.map +1 -1
  4. package/dist/adapters/drizzle/drizzle-adapter.js +1 -1
  5. package/dist/adapters/drizzle/drizzle-query.d.ts.map +1 -1
  6. package/dist/adapters/drizzle/drizzle-query.js +42 -34
  7. package/dist/adapters/drizzle/drizzle-query.js.map +1 -1
  8. package/dist/adapters/drizzle/drizzle-uow-compiler.js +2 -1
  9. package/dist/adapters/drizzle/drizzle-uow-compiler.js.map +1 -1
  10. package/dist/adapters/drizzle/drizzle-uow-decoder.js +25 -1
  11. package/dist/adapters/drizzle/drizzle-uow-decoder.js.map +1 -1
  12. package/dist/adapters/drizzle/generate.js +1 -1
  13. package/dist/adapters/kysely/kysely-adapter.d.ts +4 -3
  14. package/dist/adapters/kysely/kysely-adapter.d.ts.map +1 -1
  15. package/dist/adapters/kysely/kysely-adapter.js.map +1 -1
  16. package/dist/adapters/kysely/kysely-query.d.ts +22 -0
  17. package/dist/adapters/kysely/kysely-query.d.ts.map +1 -0
  18. package/dist/adapters/kysely/kysely-query.js +101 -51
  19. package/dist/adapters/kysely/kysely-query.js.map +1 -1
  20. package/dist/adapters/kysely/kysely-uow-compiler.js +2 -1
  21. package/dist/adapters/kysely/kysely-uow-compiler.js.map +1 -1
  22. package/dist/adapters/kysely/kysely-uow-executor.js +2 -2
  23. package/dist/adapters/kysely/kysely-uow-executor.js.map +1 -1
  24. package/dist/adapters/kysely/migration/execute-base.js +1 -1
  25. package/dist/migration-engine/generation-engine.d.ts +1 -1
  26. package/dist/migration-engine/generation-engine.d.ts.map +1 -1
  27. package/dist/migration-engine/generation-engine.js.map +1 -1
  28. package/dist/mod.d.ts +7 -6
  29. package/dist/mod.d.ts.map +1 -1
  30. package/dist/mod.js +2 -1
  31. package/dist/mod.js.map +1 -1
  32. package/dist/query/cursor.d.ts +67 -32
  33. package/dist/query/cursor.d.ts.map +1 -1
  34. package/dist/query/cursor.js +84 -31
  35. package/dist/query/cursor.js.map +1 -1
  36. package/dist/query/query.d.ts +29 -8
  37. package/dist/query/query.d.ts.map +1 -1
  38. package/dist/query/result-transform.js +17 -5
  39. package/dist/query/result-transform.js.map +1 -1
  40. package/dist/query/unit-of-work.d.ts +19 -8
  41. package/dist/query/unit-of-work.d.ts.map +1 -1
  42. package/dist/query/unit-of-work.js +54 -12
  43. package/dist/query/unit-of-work.js.map +1 -1
  44. package/dist/schema/serialize.js +2 -0
  45. package/dist/schema/serialize.js.map +1 -1
  46. package/package.json +3 -3
  47. package/src/adapters/drizzle/drizzle-adapter-pglite.test.ts +242 -55
  48. package/src/adapters/drizzle/drizzle-adapter-sqlite.test.ts +95 -39
  49. package/src/adapters/drizzle/drizzle-query.test.ts +54 -4
  50. package/src/adapters/drizzle/drizzle-query.ts +74 -60
  51. package/src/adapters/drizzle/drizzle-uow-compiler.test.ts +82 -6
  52. package/src/adapters/drizzle/drizzle-uow-compiler.ts +3 -2
  53. package/src/adapters/drizzle/drizzle-uow-decoder.ts +40 -1
  54. package/src/adapters/kysely/kysely-adapter-pglite.test.ts +190 -4
  55. package/src/adapters/kysely/kysely-adapter.ts +6 -3
  56. package/src/adapters/kysely/kysely-query.test.ts +498 -0
  57. package/src/adapters/kysely/kysely-query.ts +187 -83
  58. package/src/adapters/kysely/kysely-uow-compiler.test.ts +85 -3
  59. package/src/adapters/kysely/kysely-uow-compiler.ts +3 -2
  60. package/src/adapters/kysely/kysely-uow-executor.ts +5 -9
  61. package/src/migration-engine/generation-engine.ts +2 -1
  62. package/src/mod.ts +12 -7
  63. package/src/query/cursor.test.ts +113 -68
  64. package/src/query/cursor.ts +127 -36
  65. package/src/query/query-type.test.ts +34 -14
  66. package/src/query/query.ts +94 -34
  67. package/src/query/result-transform.test.ts +5 -5
  68. package/src/query/result-transform.ts +29 -11
  69. package/src/query/unit-of-work.ts +141 -26
  70. package/src/schema/serialize.test.ts +223 -0
  71. package/src/schema/serialize.ts +16 -0
@@ -25,8 +25,8 @@ async function executeKyselyRetrievalPhase(kysely, retrievalBatch) {
25
25
  if (retrievalBatch.length === 0) return [];
26
26
  const retrievalResults = [];
27
27
  await kysely.transaction().execute(async (tx) => {
28
- for (const query of retrievalBatch) {
29
- const result = await tx.executeQuery(query);
28
+ for (const compiledQuery of retrievalBatch) {
29
+ const result = await tx.executeQuery(compiledQuery);
30
30
  retrievalResults.push(result.rows);
31
31
  }
32
32
  });
@@ -1 +1 @@
1
- {"version":3,"file":"kysely-uow-executor.js","names":["retrievalResults: unknown[]","createdInternalIds: (bigint | null)[]"],"sources":["../../../src/adapters/kysely/kysely-uow-executor.ts"],"sourcesContent":["import type { Kysely, QueryResult } from \"kysely\";\nimport type { CompiledMutation, MutationResult } from \"../../query/unit-of-work\";\n\nfunction getAffectedRows(result: QueryResult<unknown>): number {\n const affectedRows =\n result.numAffectedRows ??\n result.numChangedRows ??\n // PGLite returns `affectedRows` instead of `numAffectedRows` or `numChangedRows`\n (\"affectedRows\" in result &&\n (typeof result[\"affectedRows\"] === \"number\" || typeof result[\"affectedRows\"] === \"bigint\")\n ? result[\"affectedRows\"]\n : undefined);\n\n if (affectedRows === undefined) {\n throw new Error(\"No affected rows found\");\n }\n\n if (affectedRows > Number.MAX_SAFE_INTEGER) {\n throw new Error(\n `affectedRows BigInt value ${affectedRows.toString()} exceeds JS safe integer range`,\n );\n }\n\n return Number(affectedRows);\n}\n\n/**\n * Execute the retrieval phase of a Unit of Work using Kysely\n *\n * All retrieval queries are executed inside a single transaction to ensure\n * snapshot isolation - all reads see a consistent view of the database.\n *\n * @param kysely - The Kysely database instance\n * @param retrievalBatch - Array of compiled retrieval queries\n * @returns Array of query results matching the retrieval operations order\n *\n * @example\n * ```ts\n * const retrievalResults = await executeKyselyRetrievalPhase(kysely, compiled.retrievalBatch);\n * const [users, posts] = retrievalResults;\n * ```\n */\nexport async function executeKyselyRetrievalPhase(\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n kysely: Kysely<any>,\n retrievalBatch: (Kysely<unknown>[\"executeQuery\"] extends (query: infer Q) => unknown\n ? Q\n : never)[],\n): Promise<unknown[]> {\n // If no retrieval operations, return empty array immediately\n if (retrievalBatch.length === 0) {\n return [];\n }\n\n const retrievalResults: unknown[] = [];\n\n // Execute all retrieval queries inside a transaction for snapshot isolation\n await kysely.transaction().execute(async (tx) => {\n for (const query of retrievalBatch) {\n const result = await tx.executeQuery(query);\n retrievalResults.push(result.rows);\n }\n });\n\n return retrievalResults;\n}\n\n/**\n * Execute the mutation phase of a Unit of Work using Kysely\n *\n * All mutation queries are executed in a transaction with optimistic locking.\n * If any version check fails, the entire transaction is rolled back and\n * success=false is returned.\n *\n * @param kysely - The Kysely database instance\n * @param mutationBatch - Array of compiled mutation queries with expected affected rows\n * @returns Object with success flag and internal IDs from create operations\n *\n * @example\n * ```ts\n * const { success, createdInternalIds } = await executeKyselyMutationPhase(kysely, compiled.mutationBatch);\n * if (!success) {\n * console.log(\"Version conflict detected, retrying...\");\n * }\n * ```\n */\nexport async function executeKyselyMutationPhase(\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n kysely: Kysely<any>,\n mutationBatch: CompiledMutation<\n Kysely<unknown>[\"executeQuery\"] extends (query: infer Q) => unknown ? Q : never\n >[],\n): Promise<MutationResult> {\n // If there are no mutations, return success immediately\n if (mutationBatch.length === 0) {\n return { success: true, createdInternalIds: [] };\n }\n\n const createdInternalIds: (bigint | null)[] = [];\n\n // Execute mutation batch in a transaction\n try {\n await kysely.transaction().execute(async (tx) => {\n for (const compiledMutation of mutationBatch) {\n const result = await tx.executeQuery(compiledMutation.query);\n\n // For creates (expectedAffectedRows === null), try to extract internal ID\n if (compiledMutation.expectedAffectedRows === null) {\n // Check if result has rows (RETURNING clause supported)\n if (Array.isArray(result.rows) && result.rows.length > 0) {\n const row = result.rows[0] as Record<string, unknown>;\n if (\"_internalId\" in row || \"_internal_id\" in row) {\n const internalId = (row[\"_internalId\"] ?? row[\"_internal_id\"]) as bigint;\n createdInternalIds.push(internalId);\n } else {\n // RETURNING supported but _internalId not found\n createdInternalIds.push(null);\n }\n } else {\n // No RETURNING support (e.g., MySQL)\n createdInternalIds.push(null);\n }\n } else {\n // Check affected rows for updates/deletes\n const affectedRows = getAffectedRows(result);\n\n if (affectedRows !== compiledMutation.expectedAffectedRows) {\n // Version conflict detected - the UPDATE/DELETE didn't affect the expected number of rows\n // This means either the row doesn't exist or the version has changed\n throw new Error(\n `Version conflict: expected ${compiledMutation.expectedAffectedRows} rows affected, but got ${affectedRows}`,\n );\n }\n }\n }\n });\n\n return { success: true, createdInternalIds };\n } catch (error) {\n // Transaction failed - could be version conflict or other constraint violation\n // Return success=false to indicate the UOW should be retried\n if (error instanceof Error && error.message.includes(\"Version conflict\")) {\n return { success: false };\n }\n\n // Other database errors should be thrown\n throw error;\n }\n}\n"],"mappings":";AAGA,SAAS,gBAAgB,QAAsC;CAC7D,MAAM,eACJ,OAAO,mBACP,OAAO,mBAEN,kBAAkB,WAClB,OAAO,OAAO,oBAAoB,YAAY,OAAO,OAAO,oBAAoB,YAC7E,OAAO,kBACP;AAEN,KAAI,iBAAiB,OACnB,OAAM,IAAI,MAAM,yBAAyB;AAG3C,KAAI,eAAe,OAAO,iBACxB,OAAM,IAAI,MACR,6BAA6B,aAAa,UAAU,CAAC,gCACtD;AAGH,QAAO,OAAO,aAAa;;;;;;;;;;;;;;;;;;AAmB7B,eAAsB,4BAEpB,QACA,gBAGoB;AAEpB,KAAI,eAAe,WAAW,EAC5B,QAAO,EAAE;CAGX,MAAMA,mBAA8B,EAAE;AAGtC,OAAM,OAAO,aAAa,CAAC,QAAQ,OAAO,OAAO;AAC/C,OAAK,MAAM,SAAS,gBAAgB;GAClC,MAAM,SAAS,MAAM,GAAG,aAAa,MAAM;AAC3C,oBAAiB,KAAK,OAAO,KAAK;;GAEpC;AAEF,QAAO;;;;;;;;;;;;;;;;;;;;;AAsBT,eAAsB,2BAEpB,QACA,eAGyB;AAEzB,KAAI,cAAc,WAAW,EAC3B,QAAO;EAAE,SAAS;EAAM,oBAAoB,EAAE;EAAE;CAGlD,MAAMC,qBAAwC,EAAE;AAGhD,KAAI;AACF,QAAM,OAAO,aAAa,CAAC,QAAQ,OAAO,OAAO;AAC/C,QAAK,MAAM,oBAAoB,eAAe;IAC5C,MAAM,SAAS,MAAM,GAAG,aAAa,iBAAiB,MAAM;AAG5D,QAAI,iBAAiB,yBAAyB,KAE5C,KAAI,MAAM,QAAQ,OAAO,KAAK,IAAI,OAAO,KAAK,SAAS,GAAG;KACxD,MAAM,MAAM,OAAO,KAAK;AACxB,SAAI,iBAAiB,OAAO,kBAAkB,KAAK;MACjD,MAAM,aAAc,IAAI,kBAAkB,IAAI;AAC9C,yBAAmB,KAAK,WAAW;WAGnC,oBAAmB,KAAK,KAAK;UAI/B,oBAAmB,KAAK,KAAK;SAE1B;KAEL,MAAM,eAAe,gBAAgB,OAAO;AAE5C,SAAI,iBAAiB,iBAAiB,qBAGpC,OAAM,IAAI,MACR,8BAA8B,iBAAiB,qBAAqB,0BAA0B,eAC/F;;;IAIP;AAEF,SAAO;GAAE,SAAS;GAAM;GAAoB;UACrC,OAAO;AAGd,MAAI,iBAAiB,SAAS,MAAM,QAAQ,SAAS,mBAAmB,CACtE,QAAO,EAAE,SAAS,OAAO;AAI3B,QAAM"}
1
+ {"version":3,"file":"kysely-uow-executor.js","names":["retrievalResults: unknown[]","createdInternalIds: (bigint | null)[]"],"sources":["../../../src/adapters/kysely/kysely-uow-executor.ts"],"sourcesContent":["import type { CompiledQuery, Kysely, QueryResult } from \"kysely\";\nimport type { CompiledMutation, MutationResult } from \"../../query/unit-of-work\";\n\nfunction getAffectedRows(result: QueryResult<unknown>): number {\n const affectedRows =\n result.numAffectedRows ??\n result.numChangedRows ??\n // PGLite returns `affectedRows` instead of `numAffectedRows` or `numChangedRows`\n (\"affectedRows\" in result &&\n (typeof result[\"affectedRows\"] === \"number\" || typeof result[\"affectedRows\"] === \"bigint\")\n ? result[\"affectedRows\"]\n : undefined);\n\n if (affectedRows === undefined) {\n throw new Error(\"No affected rows found\");\n }\n\n if (affectedRows > Number.MAX_SAFE_INTEGER) {\n throw new Error(\n `affectedRows BigInt value ${affectedRows.toString()} exceeds JS safe integer range`,\n );\n }\n\n return Number(affectedRows);\n}\n\n/**\n * Execute the retrieval phase of a Unit of Work using Kysely\n *\n * All retrieval queries are executed inside a single transaction to ensure\n * snapshot isolation - all reads see a consistent view of the database.\n *\n * @param kysely - The Kysely database instance\n * @param retrievalBatch - Array of compiled retrieval queries\n * @returns Array of query results matching the retrieval operations order\n *\n * @example\n * ```ts\n * const retrievalResults = await executeKyselyRetrievalPhase(kysely, compiled.retrievalBatch);\n * const [users, posts] = retrievalResults;\n * ```\n */\nexport async function executeKyselyRetrievalPhase(\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n kysely: Kysely<any>,\n retrievalBatch: CompiledQuery[],\n): Promise<unknown[]> {\n // If no retrieval operations, return empty array immediately\n if (retrievalBatch.length === 0) {\n return [];\n }\n\n const retrievalResults: unknown[] = [];\n\n // Execute all retrieval queries inside a transaction for snapshot isolation\n await kysely.transaction().execute(async (tx) => {\n for (const compiledQuery of retrievalBatch) {\n const result = await tx.executeQuery(compiledQuery);\n retrievalResults.push(result.rows);\n }\n });\n\n return retrievalResults;\n}\n\n/**\n * Execute the mutation phase of a Unit of Work using Kysely\n *\n * All mutation queries are executed in a transaction with optimistic locking.\n * If any version check fails, the entire transaction is rolled back and\n * success=false is returned.\n *\n * @param kysely - The Kysely database instance\n * @param mutationBatch - Array of compiled mutation queries with expected affected rows\n * @returns Object with success flag and internal IDs from create operations\n *\n * @example\n * ```ts\n * const { success, createdInternalIds } = await executeKyselyMutationPhase(kysely, compiled.mutationBatch);\n * if (!success) {\n * console.log(\"Version conflict detected, retrying...\");\n * }\n * ```\n */\nexport async function executeKyselyMutationPhase(\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n kysely: Kysely<any>,\n mutationBatch: CompiledMutation<CompiledQuery>[],\n): Promise<MutationResult> {\n // If there are no mutations, return success immediately\n if (mutationBatch.length === 0) {\n return { success: true, createdInternalIds: [] };\n }\n\n const createdInternalIds: (bigint | null)[] = [];\n\n // Execute mutation batch in a transaction\n try {\n await kysely.transaction().execute(async (tx) => {\n for (const compiledMutation of mutationBatch) {\n const result = await tx.executeQuery(compiledMutation.query);\n\n // For creates (expectedAffectedRows === null), try to extract internal ID\n if (compiledMutation.expectedAffectedRows === null) {\n // Check if result has rows (RETURNING clause supported)\n if (Array.isArray(result.rows) && result.rows.length > 0) {\n const row = result.rows[0] as Record<string, unknown>;\n if (\"_internalId\" in row || \"_internal_id\" in row) {\n const internalId = (row[\"_internalId\"] ?? row[\"_internal_id\"]) as bigint;\n createdInternalIds.push(internalId);\n } else {\n // RETURNING supported but _internalId not found\n createdInternalIds.push(null);\n }\n } else {\n // No RETURNING support (e.g., MySQL)\n createdInternalIds.push(null);\n }\n } else {\n // Check affected rows for updates/deletes\n const affectedRows = getAffectedRows(result);\n\n if (affectedRows !== compiledMutation.expectedAffectedRows) {\n // Version conflict detected - the UPDATE/DELETE didn't affect the expected number of rows\n // This means either the row doesn't exist or the version has changed\n throw new Error(\n `Version conflict: expected ${compiledMutation.expectedAffectedRows} rows affected, but got ${affectedRows}`,\n );\n }\n }\n }\n });\n\n return { success: true, createdInternalIds };\n } catch (error) {\n // Transaction failed - could be version conflict or other constraint violation\n // Return success=false to indicate the UOW should be retried\n if (error instanceof Error && error.message.includes(\"Version conflict\")) {\n return { success: false };\n }\n\n // Other database errors should be thrown\n throw error;\n }\n}\n"],"mappings":";AAGA,SAAS,gBAAgB,QAAsC;CAC7D,MAAM,eACJ,OAAO,mBACP,OAAO,mBAEN,kBAAkB,WAClB,OAAO,OAAO,oBAAoB,YAAY,OAAO,OAAO,oBAAoB,YAC7E,OAAO,kBACP;AAEN,KAAI,iBAAiB,OACnB,OAAM,IAAI,MAAM,yBAAyB;AAG3C,KAAI,eAAe,OAAO,iBACxB,OAAM,IAAI,MACR,6BAA6B,aAAa,UAAU,CAAC,gCACtD;AAGH,QAAO,OAAO,aAAa;;;;;;;;;;;;;;;;;;AAmB7B,eAAsB,4BAEpB,QACA,gBACoB;AAEpB,KAAI,eAAe,WAAW,EAC5B,QAAO,EAAE;CAGX,MAAMA,mBAA8B,EAAE;AAGtC,OAAM,OAAO,aAAa,CAAC,QAAQ,OAAO,OAAO;AAC/C,OAAK,MAAM,iBAAiB,gBAAgB;GAC1C,MAAM,SAAS,MAAM,GAAG,aAAa,cAAc;AACnD,oBAAiB,KAAK,OAAO,KAAK;;GAEpC;AAEF,QAAO;;;;;;;;;;;;;;;;;;;;;AAsBT,eAAsB,2BAEpB,QACA,eACyB;AAEzB,KAAI,cAAc,WAAW,EAC3B,QAAO;EAAE,SAAS;EAAM,oBAAoB,EAAE;EAAE;CAGlD,MAAMC,qBAAwC,EAAE;AAGhD,KAAI;AACF,QAAM,OAAO,aAAa,CAAC,QAAQ,OAAO,OAAO;AAC/C,QAAK,MAAM,oBAAoB,eAAe;IAC5C,MAAM,SAAS,MAAM,GAAG,aAAa,iBAAiB,MAAM;AAG5D,QAAI,iBAAiB,yBAAyB,KAE5C,KAAI,MAAM,QAAQ,OAAO,KAAK,IAAI,OAAO,KAAK,SAAS,GAAG;KACxD,MAAM,MAAM,OAAO,KAAK;AACxB,SAAI,iBAAiB,OAAO,kBAAkB,KAAK;MACjD,MAAM,aAAc,IAAI,kBAAkB,IAAI;AAC9C,yBAAmB,KAAK,WAAW;WAGnC,oBAAmB,KAAK,KAAK;UAI/B,oBAAmB,KAAK,KAAK;SAE1B;KAEL,MAAM,eAAe,gBAAgB,OAAO;AAE5C,SAAI,iBAAiB,iBAAiB,qBAGpC,OAAM,IAAI,MACR,8BAA8B,iBAAiB,qBAAqB,0BAA0B,eAC/F;;;IAIP;AAEF,SAAO;GAAE,SAAS;GAAM;GAAoB;UACrC,OAAO;AAGd,MAAI,iBAAiB,SAAS,MAAM,QAAQ,SAAS,mBAAmB,CACtE,QAAO,EAAE,SAAS,OAAO;AAI3B,QAAM"}
@@ -1,5 +1,5 @@
1
- import { SETTINGS_TABLE_NAME } from "../../../shared/settings-schema.js";
2
1
  import { schemaToDBType } from "../../../schema/serialize.js";
2
+ import { SETTINGS_TABLE_NAME } from "../../../shared/settings-schema.js";
3
3
  import { sql } from "kysely";
4
4
 
5
5
  //#region src/adapters/kysely/migration/execute-base.ts
@@ -22,7 +22,7 @@ interface ExecuteMigrationResult {
22
22
  fromVersion: number;
23
23
  toVersion: number;
24
24
  }
25
- declare function generateMigrationsOrSchema<const TDatabases extends FragnoDatabase<AnySchema>[]>(databases: TDatabases, options?: {
25
+ declare function generateMigrationsOrSchema<const TDatabases extends FragnoDatabase<AnySchema, any>[]>(databases: TDatabases, options?: {
26
26
  path?: string;
27
27
  toVersion?: number;
28
28
  fromVersion?: number;
@@ -1 +1 @@
1
- {"version":3,"file":"generation-engine.d.ts","names":[],"sources":["../../src/migration-engine/generation-engine.ts"],"sourcesContent":[],"mappings":";;;;;UAaiB,sBAAA;;EAAA,IAAA,EAAA,MAAA;EAMA,SAAA,EAAA,MAAA;AASjB;AAOsB,UAhBL,wBAAA,CAgB+B;EACN,MAAA,EAAA,MAAA;EAAf,IAAA,EAAA,MAAA;EAEd,SAAA,EAAA,MAAA;EAMF,WAAA,EAAA,MAAA;EAAR,SAAA,EAAA,MAAA;EAAO,iBAAA,CAAA,EAnBY,iBAmBZ;AAiJV;AAAgF,UAjK/D,sBAAA,CAiK+D;EAAf,SAAA,EAAA,MAAA;EACpD,UAAA,EAAA,OAAA;EACF,WAAA,EAAA,MAAA;EAAR,SAAA,EAAA,MAAA;;AAyIa,iBArSM,0BAsSb,CAAA,yBArSkB,cAsSxB,CAtSuC,SAsSjB,CAAA,EAAA,CAAA,CAAA,SAAA,EApSZ,UAoSY,EAAA,QAAA,EAAA;;;;IA9RtB,QAAQ;;;;;;;;iBAiJW,2CAA2C,eAAe,yBACnE,aACV,QAAQ;;;;;;;;;;;iBAyIK,6BAAA,QACP,6BACN"}
1
+ {"version":3,"file":"generation-engine.d.ts","names":[],"sources":["../../src/migration-engine/generation-engine.ts"],"sourcesContent":[],"mappings":";;;;;UAaiB,sBAAA;;EAAA,IAAA,EAAA,MAAA;EAMA,SAAA,EAAA,MAAA;AASjB;AAOsB,UAhBL,wBAAA,CAgB+B;EAEN,MAAA,EAAA,MAAA;EAAf,IAAA,EAAA,MAAA;EAEd,SAAA,EAAA,MAAA;EAMF,WAAA,EAAA,MAAA;EAAR,SAAA,EAAA,MAAA;EAAO,iBAAA,CAAA,EApBY,iBAoBZ;AAiJV;AAAgF,UAlK/D,sBAAA,CAkK+D;EAAf,SAAA,EAAA,MAAA;EACpD,UAAA,EAAA,OAAA;EACF,WAAA,EAAA,MAAA;EAAR,SAAA,EAAA,MAAA;;AAyIa,iBAtSM,0BAuSb,CAAA,yBArSkB,cAsSxB,CAtSuC,SAsSjB,EAAA,GAAA,CAAA,EAAA,CAAA,CAAA,SAAA,EApSZ,UAoSY,EAAA,QAAA,EAAA;;;;IA9RtB,QAAQ;;;;;;;;iBAiJW,2CAA2C,eAAe,yBACnE,aACV,QAAQ;;;;;;;;;;;iBAyIK,6BAAA,QACP,6BACN"}
@@ -1 +1 @@
1
- {"version":3,"file":"generation-engine.js","names":["settingsSourceVersion: number","generatedFiles: GenerationInternalResult[]","results: ExecuteMigrationResult[]","migrationsToExecute: Array<{\n namespace: string;\n fromVersion: number;\n toVersion: number;\n preparedMigration: PreparedMigration;\n }>"],"sources":["../../src/migration-engine/generation-engine.ts"],"sourcesContent":["import type { FragnoDatabase } from \"../mod\";\nimport type { AnySchema } from \"../schema/create\";\nimport type { PreparedMigration } from \"./create\";\nimport {\n settingsSchema,\n SETTINGS_NAMESPACE,\n createSettingsManager,\n} from \"../shared/settings-schema\";\nimport {\n fragnoDatabaseAdapterNameFakeSymbol,\n fragnoDatabaseAdapterVersionFakeSymbol,\n} from \"../adapters/adapters\";\n\nexport interface GenerationEngineResult {\n schema: string;\n path: string;\n namespace: string;\n}\n\nexport interface GenerationInternalResult {\n schema: string;\n path: string;\n namespace: string;\n fromVersion: number;\n toVersion: number;\n preparedMigration?: PreparedMigration;\n}\n\nexport interface ExecuteMigrationResult {\n namespace: string;\n didMigrate: boolean;\n fromVersion: number;\n toVersion: number;\n}\n\nexport async function generateMigrationsOrSchema<\n const TDatabases extends FragnoDatabase<AnySchema>[],\n>(\n databases: TDatabases,\n options?: {\n path?: string;\n toVersion?: number;\n fromVersion?: number;\n },\n): Promise<GenerationEngineResult[]> {\n if (databases.length === 0) {\n throw new Error(\"No databases provided for schema generation\");\n }\n\n const firstDb = databases[0];\n const adapter = firstDb.adapter;\n\n // If adapter has createSchemaGenerator, use it for combined generation (e.g., Drizzle)\n if (adapter.createSchemaGenerator) {\n if (options?.toVersion !== undefined || options?.fromVersion !== undefined) {\n console.warn(\n \"⚠️ Warning: --from and --to version options are not supported when generating schemas for multiple fragments and will be ignored.\",\n );\n }\n\n const fragments = databases.map((db) => ({\n schema: db.schema,\n namespace: db.namespace,\n }));\n\n const generator = adapter.createSchemaGenerator(fragments, {\n path: options?.path,\n });\n\n return [\n {\n ...generator.generateSchema(),\n namespace: firstDb.namespace,\n },\n ];\n }\n\n // Otherwise, use migration engine for individual generation (e.g., Kysely)\n if (!adapter.createMigrationEngine) {\n throw new Error(\n \"Adapter does not support migration-based schema generation. Ensure your adapter implements createMigrationEngine.\",\n );\n }\n\n if (!(await adapter.isConnectionHealthy())) {\n throw new Error(\n \"Database connection is not healthy. Please check your database connection and try again.\",\n );\n }\n\n const settingsQueryEngine = adapter.createQueryEngine(settingsSchema, \"\");\n const settingsManager = createSettingsManager(settingsQueryEngine, SETTINGS_NAMESPACE);\n\n let settingsSourceVersion: number;\n try {\n const result = await settingsManager.get(\"version\");\n\n if (!result) {\n settingsSourceVersion = 0;\n } else {\n settingsSourceVersion = parseInt(result.value);\n }\n } catch {\n // We don't really have a way to verify this error happens because the key doesn't exist in the database\n settingsSourceVersion = 0;\n }\n\n const generatedFiles: GenerationInternalResult[] = [];\n\n const settingsMigrator = adapter.createMigrationEngine(settingsSchema, SETTINGS_NAMESPACE);\n const settingsTargetVersion = settingsSchema.version;\n\n // Generate settings table migration\n const settingsMigration = await settingsMigrator.prepareMigrationTo(settingsTargetVersion, {\n fromVersion: settingsSourceVersion,\n });\n\n if (!settingsMigration.getSQL) {\n throw new Error(\n \"Migration engine does not support SQL generation. Ensure your adapter's migration engine provides getSQL().\",\n );\n }\n\n const settingsSql = settingsMigration.getSQL();\n\n if (settingsSql.trim()) {\n generatedFiles.push({\n schema: settingsSql,\n path: \"settings-migration.sql\", // Placeholder, will be renamed in post-processing\n namespace: SETTINGS_NAMESPACE,\n fromVersion: settingsSourceVersion,\n toVersion: settingsTargetVersion,\n preparedMigration: settingsMigration,\n });\n }\n\n // Generate migration for each fragment\n for (const db of databases) {\n const dbAdapter = db.adapter;\n\n // Use migration engine\n if (!dbAdapter.createMigrationEngine) {\n throw new Error(\n `Adapter for ${db.namespace} does not support schema generation. ` +\n `Ensure your adapter implements either createSchemaGenerator or createMigrationEngine.`,\n );\n }\n\n const migrator = dbAdapter.createMigrationEngine(db.schema, db.namespace);\n const targetVersion = options?.toVersion ?? db.schema.version;\n const sourceVersion = options?.fromVersion ?? 0;\n\n // Generate migration from source to target version\n const preparedMigration = await migrator.prepareMigrationTo(targetVersion, {\n fromVersion: sourceVersion,\n });\n\n if (!preparedMigration.getSQL) {\n throw new Error(\n \"Migration engine does not support SQL generation. Ensure your adapter's migration engine provides getSQL().\",\n );\n }\n\n const sql = preparedMigration.getSQL();\n\n // If no migrations needed, skip this fragment\n if (sql.trim()) {\n generatedFiles.push({\n schema: sql,\n path: \"schema.sql\", // Placeholder, will be renamed in post-processing\n namespace: db.namespace,\n fromVersion: sourceVersion,\n toVersion: targetVersion,\n preparedMigration: preparedMigration,\n });\n }\n }\n\n // Post-process filenames with ordering\n return postProcessMigrationFilenames(generatedFiles);\n}\n\n/**\n * Execute migrations for all fragments in the correct order.\n * Migrates settings table first, then fragments alphabetically.\n *\n * @param databases - Array of FragnoDatabase instances to migrate\n * @returns Array of execution results for each migration\n */\nexport async function executeMigrations<const TDatabases extends FragnoDatabase<AnySchema>[]>(\n databases: TDatabases,\n): Promise<ExecuteMigrationResult[]> {\n if (databases.length === 0) {\n throw new Error(\"No databases provided for migration\");\n }\n\n const firstDb = databases[0];\n const adapter = firstDb.adapter;\n\n // Validate adapter supports migrations\n if (!adapter.createMigrationEngine) {\n throw new Error(\n \"Adapter does not support running migrations. The adapter only supports schema generation.\\n\" +\n \"Try using 'generateMigrationsOrSchema' instead to generate schema files.\",\n );\n }\n\n // Validate all use same adapter name and version\n const firstAdapterName = adapter[fragnoDatabaseAdapterNameFakeSymbol];\n const firstAdapterVersion = adapter[fragnoDatabaseAdapterVersionFakeSymbol];\n\n for (const db of databases) {\n const dbAdapterName = db.adapter[fragnoDatabaseAdapterNameFakeSymbol];\n const dbAdapterVersion = db.adapter[fragnoDatabaseAdapterVersionFakeSymbol];\n\n if (dbAdapterName !== firstAdapterName || dbAdapterVersion !== firstAdapterVersion) {\n throw new Error(\n `All fragments must use the same database adapter. ` +\n `Found: ${firstAdapterName}@${firstAdapterVersion} and ${dbAdapterName}@${dbAdapterVersion}`,\n );\n }\n }\n\n if (!(await adapter.isConnectionHealthy())) {\n throw new Error(\n \"Database connection is not healthy. Please check your database connection and try again.\",\n );\n }\n\n const results: ExecuteMigrationResult[] = [];\n const migrationsToExecute: Array<{\n namespace: string;\n fromVersion: number;\n toVersion: number;\n preparedMigration: PreparedMigration;\n }> = [];\n\n // 1. Prepare settings table migration\n const settingsQueryEngine = adapter.createQueryEngine(settingsSchema, \"\");\n const settingsManager = createSettingsManager(settingsQueryEngine, SETTINGS_NAMESPACE);\n\n let settingsSourceVersion: number;\n try {\n const result = await settingsManager.get(\"version\");\n settingsSourceVersion = result ? parseInt(result.value) : 0;\n } catch {\n settingsSourceVersion = 0;\n }\n\n const settingsMigrator = adapter.createMigrationEngine(settingsSchema, SETTINGS_NAMESPACE);\n const settingsTargetVersion = settingsSchema.version;\n\n if (settingsSourceVersion < settingsTargetVersion) {\n const settingsMigration = await settingsMigrator.prepareMigrationTo(settingsTargetVersion, {\n fromVersion: settingsSourceVersion,\n updateSettings: true,\n });\n\n if (settingsMigration.operations.length > 0) {\n migrationsToExecute.push({\n namespace: SETTINGS_NAMESPACE,\n fromVersion: settingsSourceVersion,\n toVersion: settingsTargetVersion,\n preparedMigration: settingsMigration,\n });\n }\n }\n\n // 2. Prepare fragment migrations (sorted alphabetically)\n const sortedDatabases = [...databases].sort((a, b) => a.namespace.localeCompare(b.namespace));\n\n for (const fragnoDb of sortedDatabases) {\n const migrator = adapter.createMigrationEngine(fragnoDb.schema, fragnoDb.namespace);\n const currentVersion = await migrator.getVersion();\n const targetVersion = fragnoDb.schema.version;\n\n if (currentVersion < targetVersion) {\n const preparedMigration = await migrator.prepareMigrationTo(targetVersion, {\n updateSettings: true,\n });\n\n if (preparedMigration.operations.length > 0) {\n migrationsToExecute.push({\n namespace: fragnoDb.namespace,\n fromVersion: currentVersion,\n toVersion: targetVersion,\n preparedMigration: preparedMigration,\n });\n }\n }\n }\n\n // 3. Execute all migrations in order\n for (const migration of migrationsToExecute) {\n await migration.preparedMigration.execute();\n results.push({\n namespace: migration.namespace,\n didMigrate: true,\n fromVersion: migration.fromVersion,\n toVersion: migration.toVersion,\n });\n }\n\n // 4. Add skipped migrations (already up-to-date)\n for (const fragnoDb of databases) {\n if (!results.find((r) => r.namespace === fragnoDb.namespace)) {\n results.push({\n namespace: fragnoDb.namespace,\n didMigrate: false,\n fromVersion: fragnoDb.schema.version,\n toVersion: fragnoDb.schema.version,\n });\n }\n }\n\n return results;\n}\n\n/**\n * Post-processes migration files to add ordering and standardize naming.\n *\n * Sorts files with settings namespace first, then alphabetically by namespace,\n * and assigns ordering numbers. Transforms filenames to format:\n * `<date>_<n>_f<from>_t<to>_<namespace>.sql`\n *\n * @param files - Array of generated migration files with version information\n * @returns Array of files with standardized paths and ordering\n */\nexport function postProcessMigrationFilenames(\n files: GenerationInternalResult[],\n): GenerationEngineResult[] {\n if (files.length === 0) {\n return [];\n }\n\n // Sort files: settings namespace first, then alphabetically by namespace\n const sortedFiles = [...files].sort((a, b) => {\n if (a.namespace === SETTINGS_NAMESPACE) {\n return -1;\n }\n if (b.namespace === SETTINGS_NAMESPACE) {\n return 1;\n }\n return a.namespace.localeCompare(b.namespace);\n });\n\n // Generate date prefix for filenames\n const date = new Date().toISOString().split(\"T\")[0].replace(/-/g, \"\");\n\n // Rename files with ordering\n return sortedFiles.map((file, index) => {\n const fromVersion = file.fromVersion ?? 0;\n const toVersion = file.toVersion ?? 0;\n\n // Create new filename with ordering\n const orderNum = (index + 1).toString().padStart(3, \"0\");\n const fromPadded = fromVersion.toString().padStart(3, \"0\");\n const toPadded = toVersion.toString().padStart(3, \"0\");\n const safeName = file.namespace.replace(/[^a-z0-9-]/gi, \"_\");\n const newPath = `${date}_${orderNum}_f${fromPadded}_t${toPadded}_${safeName}.sql`;\n\n return {\n schema: file.schema,\n path: newPath,\n namespace: file.namespace,\n };\n });\n}\n"],"mappings":";;;;AAmCA,eAAsB,2BAGpB,WACA,SAKmC;AACnC,KAAI,UAAU,WAAW,EACvB,OAAM,IAAI,MAAM,8CAA8C;CAGhE,MAAM,UAAU,UAAU;CAC1B,MAAM,UAAU,QAAQ;AAGxB,KAAI,QAAQ,uBAAuB;AACjC,MAAI,SAAS,cAAc,UAAa,SAAS,gBAAgB,OAC/D,SAAQ,KACN,oIACD;EAGH,MAAM,YAAY,UAAU,KAAK,QAAQ;GACvC,QAAQ,GAAG;GACX,WAAW,GAAG;GACf,EAAE;AAMH,SAAO,CACL;GACE,GANc,QAAQ,sBAAsB,WAAW,EACzD,MAAM,SAAS,MAChB,CAAC,CAIe,gBAAgB;GAC7B,WAAW,QAAQ;GACpB,CACF;;AAIH,KAAI,CAAC,QAAQ,sBACX,OAAM,IAAI,MACR,oHACD;AAGH,KAAI,CAAE,MAAM,QAAQ,qBAAqB,CACvC,OAAM,IAAI,MACR,2FACD;CAIH,MAAM,kBAAkB,sBADI,QAAQ,kBAAkB,gBAAgB,GAAG,EACN,mBAAmB;CAEtF,IAAIA;AACJ,KAAI;EACF,MAAM,SAAS,MAAM,gBAAgB,IAAI,UAAU;AAEnD,MAAI,CAAC,OACH,yBAAwB;MAExB,yBAAwB,SAAS,OAAO,MAAM;SAE1C;AAEN,0BAAwB;;CAG1B,MAAMC,iBAA6C,EAAE;CAErD,MAAM,mBAAmB,QAAQ,sBAAsB,gBAAgB,mBAAmB;CAC1F,MAAM,wBAAwB,eAAe;CAG7C,MAAM,oBAAoB,MAAM,iBAAiB,mBAAmB,uBAAuB,EACzF,aAAa,uBACd,CAAC;AAEF,KAAI,CAAC,kBAAkB,OACrB,OAAM,IAAI,MACR,8GACD;CAGH,MAAM,cAAc,kBAAkB,QAAQ;AAE9C,KAAI,YAAY,MAAM,CACpB,gBAAe,KAAK;EAClB,QAAQ;EACR,MAAM;EACN,WAAW;EACX,aAAa;EACb,WAAW;EACX,mBAAmB;EACpB,CAAC;AAIJ,MAAK,MAAM,MAAM,WAAW;EAC1B,MAAM,YAAY,GAAG;AAGrB,MAAI,CAAC,UAAU,sBACb,OAAM,IAAI,MACR,eAAe,GAAG,UAAU,4HAE7B;EAGH,MAAM,WAAW,UAAU,sBAAsB,GAAG,QAAQ,GAAG,UAAU;EACzE,MAAM,gBAAgB,SAAS,aAAa,GAAG,OAAO;EACtD,MAAM,gBAAgB,SAAS,eAAe;EAG9C,MAAM,oBAAoB,MAAM,SAAS,mBAAmB,eAAe,EACzE,aAAa,eACd,CAAC;AAEF,MAAI,CAAC,kBAAkB,OACrB,OAAM,IAAI,MACR,8GACD;EAGH,MAAM,MAAM,kBAAkB,QAAQ;AAGtC,MAAI,IAAI,MAAM,CACZ,gBAAe,KAAK;GAClB,QAAQ;GACR,MAAM;GACN,WAAW,GAAG;GACd,aAAa;GACb,WAAW;GACQ;GACpB,CAAC;;AAKN,QAAO,8BAA8B,eAAe;;;;;;;;;AAUtD,eAAsB,kBACpB,WACmC;AACnC,KAAI,UAAU,WAAW,EACvB,OAAM,IAAI,MAAM,sCAAsC;CAIxD,MAAM,UADU,UAAU,GACF;AAGxB,KAAI,CAAC,QAAQ,sBACX,OAAM,IAAI,MACR,sKAED;CAIH,MAAM,mBAAmB,QAAQ;CACjC,MAAM,sBAAsB,QAAQ;AAEpC,MAAK,MAAM,MAAM,WAAW;EAC1B,MAAM,gBAAgB,GAAG,QAAQ;EACjC,MAAM,mBAAmB,GAAG,QAAQ;AAEpC,MAAI,kBAAkB,oBAAoB,qBAAqB,oBAC7D,OAAM,IAAI,MACR,4DACY,iBAAiB,GAAG,oBAAoB,OAAO,cAAc,GAAG,mBAC7E;;AAIL,KAAI,CAAE,MAAM,QAAQ,qBAAqB,CACvC,OAAM,IAAI,MACR,2FACD;CAGH,MAAMC,UAAoC,EAAE;CAC5C,MAAMC,sBAKD,EAAE;CAIP,MAAM,kBAAkB,sBADI,QAAQ,kBAAkB,gBAAgB,GAAG,EACN,mBAAmB;CAEtF,IAAIH;AACJ,KAAI;EACF,MAAM,SAAS,MAAM,gBAAgB,IAAI,UAAU;AACnD,0BAAwB,SAAS,SAAS,OAAO,MAAM,GAAG;SACpD;AACN,0BAAwB;;CAG1B,MAAM,mBAAmB,QAAQ,sBAAsB,gBAAgB,mBAAmB;CAC1F,MAAM,wBAAwB,eAAe;AAE7C,KAAI,wBAAwB,uBAAuB;EACjD,MAAM,oBAAoB,MAAM,iBAAiB,mBAAmB,uBAAuB;GACzF,aAAa;GACb,gBAAgB;GACjB,CAAC;AAEF,MAAI,kBAAkB,WAAW,SAAS,EACxC,qBAAoB,KAAK;GACvB,WAAW;GACX,aAAa;GACb,WAAW;GACX,mBAAmB;GACpB,CAAC;;CAKN,MAAM,kBAAkB,CAAC,GAAG,UAAU,CAAC,MAAM,GAAG,MAAM,EAAE,UAAU,cAAc,EAAE,UAAU,CAAC;AAE7F,MAAK,MAAM,YAAY,iBAAiB;EACtC,MAAM,WAAW,QAAQ,sBAAsB,SAAS,QAAQ,SAAS,UAAU;EACnF,MAAM,iBAAiB,MAAM,SAAS,YAAY;EAClD,MAAM,gBAAgB,SAAS,OAAO;AAEtC,MAAI,iBAAiB,eAAe;GAClC,MAAM,oBAAoB,MAAM,SAAS,mBAAmB,eAAe,EACzE,gBAAgB,MACjB,CAAC;AAEF,OAAI,kBAAkB,WAAW,SAAS,EACxC,qBAAoB,KAAK;IACvB,WAAW,SAAS;IACpB,aAAa;IACb,WAAW;IACQ;IACpB,CAAC;;;AAMR,MAAK,MAAM,aAAa,qBAAqB;AAC3C,QAAM,UAAU,kBAAkB,SAAS;AAC3C,UAAQ,KAAK;GACX,WAAW,UAAU;GACrB,YAAY;GACZ,aAAa,UAAU;GACvB,WAAW,UAAU;GACtB,CAAC;;AAIJ,MAAK,MAAM,YAAY,UACrB,KAAI,CAAC,QAAQ,MAAM,MAAM,EAAE,cAAc,SAAS,UAAU,CAC1D,SAAQ,KAAK;EACX,WAAW,SAAS;EACpB,YAAY;EACZ,aAAa,SAAS,OAAO;EAC7B,WAAW,SAAS,OAAO;EAC5B,CAAC;AAIN,QAAO;;;;;;;;;;;;AAaT,SAAgB,8BACd,OAC0B;AAC1B,KAAI,MAAM,WAAW,EACnB,QAAO,EAAE;CAIX,MAAM,cAAc,CAAC,GAAG,MAAM,CAAC,MAAM,GAAG,MAAM;AAC5C,MAAI,EAAE,cAAc,mBAClB,QAAO;AAET,MAAI,EAAE,cAAc,mBAClB,QAAO;AAET,SAAO,EAAE,UAAU,cAAc,EAAE,UAAU;GAC7C;CAGF,MAAM,wBAAO,IAAI,MAAM,EAAC,aAAa,CAAC,MAAM,IAAI,CAAC,GAAG,QAAQ,MAAM,GAAG;AAGrE,QAAO,YAAY,KAAK,MAAM,UAAU;EACtC,MAAM,cAAc,KAAK,eAAe;EACxC,MAAM,YAAY,KAAK,aAAa;EAOpC,MAAM,UAAU,GAAG,KAAK,IAJN,QAAQ,GAAG,UAAU,CAAC,SAAS,GAAG,IAAI,CAIpB,IAHjB,YAAY,UAAU,CAAC,SAAS,GAAG,IAAI,CAGP,IAFlC,UAAU,UAAU,CAAC,SAAS,GAAG,IAAI,CAEU,GAD/C,KAAK,UAAU,QAAQ,gBAAgB,IAAI,CACgB;AAE5E,SAAO;GACL,QAAQ,KAAK;GACb,MAAM;GACN,WAAW,KAAK;GACjB;GACD"}
1
+ {"version":3,"file":"generation-engine.js","names":["settingsSourceVersion: number","generatedFiles: GenerationInternalResult[]","results: ExecuteMigrationResult[]","migrationsToExecute: Array<{\n namespace: string;\n fromVersion: number;\n toVersion: number;\n preparedMigration: PreparedMigration;\n }>"],"sources":["../../src/migration-engine/generation-engine.ts"],"sourcesContent":["import type { FragnoDatabase } from \"../mod\";\nimport type { AnySchema } from \"../schema/create\";\nimport type { PreparedMigration } from \"./create\";\nimport {\n settingsSchema,\n SETTINGS_NAMESPACE,\n createSettingsManager,\n} from \"../shared/settings-schema\";\nimport {\n fragnoDatabaseAdapterNameFakeSymbol,\n fragnoDatabaseAdapterVersionFakeSymbol,\n} from \"../adapters/adapters\";\n\nexport interface GenerationEngineResult {\n schema: string;\n path: string;\n namespace: string;\n}\n\nexport interface GenerationInternalResult {\n schema: string;\n path: string;\n namespace: string;\n fromVersion: number;\n toVersion: number;\n preparedMigration?: PreparedMigration;\n}\n\nexport interface ExecuteMigrationResult {\n namespace: string;\n didMigrate: boolean;\n fromVersion: number;\n toVersion: number;\n}\n\nexport async function generateMigrationsOrSchema<\n // oxlint-disable-next-line no-explicit-any\n const TDatabases extends FragnoDatabase<AnySchema, any>[],\n>(\n databases: TDatabases,\n options?: {\n path?: string;\n toVersion?: number;\n fromVersion?: number;\n },\n): Promise<GenerationEngineResult[]> {\n if (databases.length === 0) {\n throw new Error(\"No databases provided for schema generation\");\n }\n\n const firstDb = databases[0];\n const adapter = firstDb.adapter;\n\n // If adapter has createSchemaGenerator, use it for combined generation (e.g., Drizzle)\n if (adapter.createSchemaGenerator) {\n if (options?.toVersion !== undefined || options?.fromVersion !== undefined) {\n console.warn(\n \"⚠️ Warning: --from and --to version options are not supported when generating schemas for multiple fragments and will be ignored.\",\n );\n }\n\n const fragments = databases.map((db) => ({\n schema: db.schema,\n namespace: db.namespace,\n }));\n\n const generator = adapter.createSchemaGenerator(fragments, {\n path: options?.path,\n });\n\n return [\n {\n ...generator.generateSchema(),\n namespace: firstDb.namespace,\n },\n ];\n }\n\n // Otherwise, use migration engine for individual generation (e.g., Kysely)\n if (!adapter.createMigrationEngine) {\n throw new Error(\n \"Adapter does not support migration-based schema generation. Ensure your adapter implements createMigrationEngine.\",\n );\n }\n\n if (!(await adapter.isConnectionHealthy())) {\n throw new Error(\n \"Database connection is not healthy. Please check your database connection and try again.\",\n );\n }\n\n const settingsQueryEngine = adapter.createQueryEngine(settingsSchema, \"\");\n const settingsManager = createSettingsManager(settingsQueryEngine, SETTINGS_NAMESPACE);\n\n let settingsSourceVersion: number;\n try {\n const result = await settingsManager.get(\"version\");\n\n if (!result) {\n settingsSourceVersion = 0;\n } else {\n settingsSourceVersion = parseInt(result.value);\n }\n } catch {\n // We don't really have a way to verify this error happens because the key doesn't exist in the database\n settingsSourceVersion = 0;\n }\n\n const generatedFiles: GenerationInternalResult[] = [];\n\n const settingsMigrator = adapter.createMigrationEngine(settingsSchema, SETTINGS_NAMESPACE);\n const settingsTargetVersion = settingsSchema.version;\n\n // Generate settings table migration\n const settingsMigration = await settingsMigrator.prepareMigrationTo(settingsTargetVersion, {\n fromVersion: settingsSourceVersion,\n });\n\n if (!settingsMigration.getSQL) {\n throw new Error(\n \"Migration engine does not support SQL generation. Ensure your adapter's migration engine provides getSQL().\",\n );\n }\n\n const settingsSql = settingsMigration.getSQL();\n\n if (settingsSql.trim()) {\n generatedFiles.push({\n schema: settingsSql,\n path: \"settings-migration.sql\", // Placeholder, will be renamed in post-processing\n namespace: SETTINGS_NAMESPACE,\n fromVersion: settingsSourceVersion,\n toVersion: settingsTargetVersion,\n preparedMigration: settingsMigration,\n });\n }\n\n // Generate migration for each fragment\n for (const db of databases) {\n const dbAdapter = db.adapter;\n\n // Use migration engine\n if (!dbAdapter.createMigrationEngine) {\n throw new Error(\n `Adapter for ${db.namespace} does not support schema generation. ` +\n `Ensure your adapter implements either createSchemaGenerator or createMigrationEngine.`,\n );\n }\n\n const migrator = dbAdapter.createMigrationEngine(db.schema, db.namespace);\n const targetVersion = options?.toVersion ?? db.schema.version;\n const sourceVersion = options?.fromVersion ?? 0;\n\n // Generate migration from source to target version\n const preparedMigration = await migrator.prepareMigrationTo(targetVersion, {\n fromVersion: sourceVersion,\n });\n\n if (!preparedMigration.getSQL) {\n throw new Error(\n \"Migration engine does not support SQL generation. Ensure your adapter's migration engine provides getSQL().\",\n );\n }\n\n const sql = preparedMigration.getSQL();\n\n // If no migrations needed, skip this fragment\n if (sql.trim()) {\n generatedFiles.push({\n schema: sql,\n path: \"schema.sql\", // Placeholder, will be renamed in post-processing\n namespace: db.namespace,\n fromVersion: sourceVersion,\n toVersion: targetVersion,\n preparedMigration: preparedMigration,\n });\n }\n }\n\n // Post-process filenames with ordering\n return postProcessMigrationFilenames(generatedFiles);\n}\n\n/**\n * Execute migrations for all fragments in the correct order.\n * Migrates settings table first, then fragments alphabetically.\n *\n * @param databases - Array of FragnoDatabase instances to migrate\n * @returns Array of execution results for each migration\n */\nexport async function executeMigrations<const TDatabases extends FragnoDatabase<AnySchema>[]>(\n databases: TDatabases,\n): Promise<ExecuteMigrationResult[]> {\n if (databases.length === 0) {\n throw new Error(\"No databases provided for migration\");\n }\n\n const firstDb = databases[0];\n const adapter = firstDb.adapter;\n\n // Validate adapter supports migrations\n if (!adapter.createMigrationEngine) {\n throw new Error(\n \"Adapter does not support running migrations. The adapter only supports schema generation.\\n\" +\n \"Try using 'generateMigrationsOrSchema' instead to generate schema files.\",\n );\n }\n\n // Validate all use same adapter name and version\n const firstAdapterName = adapter[fragnoDatabaseAdapterNameFakeSymbol];\n const firstAdapterVersion = adapter[fragnoDatabaseAdapterVersionFakeSymbol];\n\n for (const db of databases) {\n const dbAdapterName = db.adapter[fragnoDatabaseAdapterNameFakeSymbol];\n const dbAdapterVersion = db.adapter[fragnoDatabaseAdapterVersionFakeSymbol];\n\n if (dbAdapterName !== firstAdapterName || dbAdapterVersion !== firstAdapterVersion) {\n throw new Error(\n `All fragments must use the same database adapter. ` +\n `Found: ${firstAdapterName}@${firstAdapterVersion} and ${dbAdapterName}@${dbAdapterVersion}`,\n );\n }\n }\n\n if (!(await adapter.isConnectionHealthy())) {\n throw new Error(\n \"Database connection is not healthy. Please check your database connection and try again.\",\n );\n }\n\n const results: ExecuteMigrationResult[] = [];\n const migrationsToExecute: Array<{\n namespace: string;\n fromVersion: number;\n toVersion: number;\n preparedMigration: PreparedMigration;\n }> = [];\n\n // 1. Prepare settings table migration\n const settingsQueryEngine = adapter.createQueryEngine(settingsSchema, \"\");\n const settingsManager = createSettingsManager(settingsQueryEngine, SETTINGS_NAMESPACE);\n\n let settingsSourceVersion: number;\n try {\n const result = await settingsManager.get(\"version\");\n settingsSourceVersion = result ? parseInt(result.value) : 0;\n } catch {\n settingsSourceVersion = 0;\n }\n\n const settingsMigrator = adapter.createMigrationEngine(settingsSchema, SETTINGS_NAMESPACE);\n const settingsTargetVersion = settingsSchema.version;\n\n if (settingsSourceVersion < settingsTargetVersion) {\n const settingsMigration = await settingsMigrator.prepareMigrationTo(settingsTargetVersion, {\n fromVersion: settingsSourceVersion,\n updateSettings: true,\n });\n\n if (settingsMigration.operations.length > 0) {\n migrationsToExecute.push({\n namespace: SETTINGS_NAMESPACE,\n fromVersion: settingsSourceVersion,\n toVersion: settingsTargetVersion,\n preparedMigration: settingsMigration,\n });\n }\n }\n\n // 2. Prepare fragment migrations (sorted alphabetically)\n const sortedDatabases = [...databases].sort((a, b) => a.namespace.localeCompare(b.namespace));\n\n for (const fragnoDb of sortedDatabases) {\n const migrator = adapter.createMigrationEngine(fragnoDb.schema, fragnoDb.namespace);\n const currentVersion = await migrator.getVersion();\n const targetVersion = fragnoDb.schema.version;\n\n if (currentVersion < targetVersion) {\n const preparedMigration = await migrator.prepareMigrationTo(targetVersion, {\n updateSettings: true,\n });\n\n if (preparedMigration.operations.length > 0) {\n migrationsToExecute.push({\n namespace: fragnoDb.namespace,\n fromVersion: currentVersion,\n toVersion: targetVersion,\n preparedMigration: preparedMigration,\n });\n }\n }\n }\n\n // 3. Execute all migrations in order\n for (const migration of migrationsToExecute) {\n await migration.preparedMigration.execute();\n results.push({\n namespace: migration.namespace,\n didMigrate: true,\n fromVersion: migration.fromVersion,\n toVersion: migration.toVersion,\n });\n }\n\n // 4. Add skipped migrations (already up-to-date)\n for (const fragnoDb of databases) {\n if (!results.find((r) => r.namespace === fragnoDb.namespace)) {\n results.push({\n namespace: fragnoDb.namespace,\n didMigrate: false,\n fromVersion: fragnoDb.schema.version,\n toVersion: fragnoDb.schema.version,\n });\n }\n }\n\n return results;\n}\n\n/**\n * Post-processes migration files to add ordering and standardize naming.\n *\n * Sorts files with settings namespace first, then alphabetically by namespace,\n * and assigns ordering numbers. Transforms filenames to format:\n * `<date>_<n>_f<from>_t<to>_<namespace>.sql`\n *\n * @param files - Array of generated migration files with version information\n * @returns Array of files with standardized paths and ordering\n */\nexport function postProcessMigrationFilenames(\n files: GenerationInternalResult[],\n): GenerationEngineResult[] {\n if (files.length === 0) {\n return [];\n }\n\n // Sort files: settings namespace first, then alphabetically by namespace\n const sortedFiles = [...files].sort((a, b) => {\n if (a.namespace === SETTINGS_NAMESPACE) {\n return -1;\n }\n if (b.namespace === SETTINGS_NAMESPACE) {\n return 1;\n }\n return a.namespace.localeCompare(b.namespace);\n });\n\n // Generate date prefix for filenames\n const date = new Date().toISOString().split(\"T\")[0].replace(/-/g, \"\");\n\n // Rename files with ordering\n return sortedFiles.map((file, index) => {\n const fromVersion = file.fromVersion ?? 0;\n const toVersion = file.toVersion ?? 0;\n\n // Create new filename with ordering\n const orderNum = (index + 1).toString().padStart(3, \"0\");\n const fromPadded = fromVersion.toString().padStart(3, \"0\");\n const toPadded = toVersion.toString().padStart(3, \"0\");\n const safeName = file.namespace.replace(/[^a-z0-9-]/gi, \"_\");\n const newPath = `${date}_${orderNum}_f${fromPadded}_t${toPadded}_${safeName}.sql`;\n\n return {\n schema: file.schema,\n path: newPath,\n namespace: file.namespace,\n };\n });\n}\n"],"mappings":";;;;AAmCA,eAAsB,2BAIpB,WACA,SAKmC;AACnC,KAAI,UAAU,WAAW,EACvB,OAAM,IAAI,MAAM,8CAA8C;CAGhE,MAAM,UAAU,UAAU;CAC1B,MAAM,UAAU,QAAQ;AAGxB,KAAI,QAAQ,uBAAuB;AACjC,MAAI,SAAS,cAAc,UAAa,SAAS,gBAAgB,OAC/D,SAAQ,KACN,oIACD;EAGH,MAAM,YAAY,UAAU,KAAK,QAAQ;GACvC,QAAQ,GAAG;GACX,WAAW,GAAG;GACf,EAAE;AAMH,SAAO,CACL;GACE,GANc,QAAQ,sBAAsB,WAAW,EACzD,MAAM,SAAS,MAChB,CAAC,CAIe,gBAAgB;GAC7B,WAAW,QAAQ;GACpB,CACF;;AAIH,KAAI,CAAC,QAAQ,sBACX,OAAM,IAAI,MACR,oHACD;AAGH,KAAI,CAAE,MAAM,QAAQ,qBAAqB,CACvC,OAAM,IAAI,MACR,2FACD;CAIH,MAAM,kBAAkB,sBADI,QAAQ,kBAAkB,gBAAgB,GAAG,EACN,mBAAmB;CAEtF,IAAIA;AACJ,KAAI;EACF,MAAM,SAAS,MAAM,gBAAgB,IAAI,UAAU;AAEnD,MAAI,CAAC,OACH,yBAAwB;MAExB,yBAAwB,SAAS,OAAO,MAAM;SAE1C;AAEN,0BAAwB;;CAG1B,MAAMC,iBAA6C,EAAE;CAErD,MAAM,mBAAmB,QAAQ,sBAAsB,gBAAgB,mBAAmB;CAC1F,MAAM,wBAAwB,eAAe;CAG7C,MAAM,oBAAoB,MAAM,iBAAiB,mBAAmB,uBAAuB,EACzF,aAAa,uBACd,CAAC;AAEF,KAAI,CAAC,kBAAkB,OACrB,OAAM,IAAI,MACR,8GACD;CAGH,MAAM,cAAc,kBAAkB,QAAQ;AAE9C,KAAI,YAAY,MAAM,CACpB,gBAAe,KAAK;EAClB,QAAQ;EACR,MAAM;EACN,WAAW;EACX,aAAa;EACb,WAAW;EACX,mBAAmB;EACpB,CAAC;AAIJ,MAAK,MAAM,MAAM,WAAW;EAC1B,MAAM,YAAY,GAAG;AAGrB,MAAI,CAAC,UAAU,sBACb,OAAM,IAAI,MACR,eAAe,GAAG,UAAU,4HAE7B;EAGH,MAAM,WAAW,UAAU,sBAAsB,GAAG,QAAQ,GAAG,UAAU;EACzE,MAAM,gBAAgB,SAAS,aAAa,GAAG,OAAO;EACtD,MAAM,gBAAgB,SAAS,eAAe;EAG9C,MAAM,oBAAoB,MAAM,SAAS,mBAAmB,eAAe,EACzE,aAAa,eACd,CAAC;AAEF,MAAI,CAAC,kBAAkB,OACrB,OAAM,IAAI,MACR,8GACD;EAGH,MAAM,MAAM,kBAAkB,QAAQ;AAGtC,MAAI,IAAI,MAAM,CACZ,gBAAe,KAAK;GAClB,QAAQ;GACR,MAAM;GACN,WAAW,GAAG;GACd,aAAa;GACb,WAAW;GACQ;GACpB,CAAC;;AAKN,QAAO,8BAA8B,eAAe;;;;;;;;;AAUtD,eAAsB,kBACpB,WACmC;AACnC,KAAI,UAAU,WAAW,EACvB,OAAM,IAAI,MAAM,sCAAsC;CAIxD,MAAM,UADU,UAAU,GACF;AAGxB,KAAI,CAAC,QAAQ,sBACX,OAAM,IAAI,MACR,sKAED;CAIH,MAAM,mBAAmB,QAAQ;CACjC,MAAM,sBAAsB,QAAQ;AAEpC,MAAK,MAAM,MAAM,WAAW;EAC1B,MAAM,gBAAgB,GAAG,QAAQ;EACjC,MAAM,mBAAmB,GAAG,QAAQ;AAEpC,MAAI,kBAAkB,oBAAoB,qBAAqB,oBAC7D,OAAM,IAAI,MACR,4DACY,iBAAiB,GAAG,oBAAoB,OAAO,cAAc,GAAG,mBAC7E;;AAIL,KAAI,CAAE,MAAM,QAAQ,qBAAqB,CACvC,OAAM,IAAI,MACR,2FACD;CAGH,MAAMC,UAAoC,EAAE;CAC5C,MAAMC,sBAKD,EAAE;CAIP,MAAM,kBAAkB,sBADI,QAAQ,kBAAkB,gBAAgB,GAAG,EACN,mBAAmB;CAEtF,IAAIH;AACJ,KAAI;EACF,MAAM,SAAS,MAAM,gBAAgB,IAAI,UAAU;AACnD,0BAAwB,SAAS,SAAS,OAAO,MAAM,GAAG;SACpD;AACN,0BAAwB;;CAG1B,MAAM,mBAAmB,QAAQ,sBAAsB,gBAAgB,mBAAmB;CAC1F,MAAM,wBAAwB,eAAe;AAE7C,KAAI,wBAAwB,uBAAuB;EACjD,MAAM,oBAAoB,MAAM,iBAAiB,mBAAmB,uBAAuB;GACzF,aAAa;GACb,gBAAgB;GACjB,CAAC;AAEF,MAAI,kBAAkB,WAAW,SAAS,EACxC,qBAAoB,KAAK;GACvB,WAAW;GACX,aAAa;GACb,WAAW;GACX,mBAAmB;GACpB,CAAC;;CAKN,MAAM,kBAAkB,CAAC,GAAG,UAAU,CAAC,MAAM,GAAG,MAAM,EAAE,UAAU,cAAc,EAAE,UAAU,CAAC;AAE7F,MAAK,MAAM,YAAY,iBAAiB;EACtC,MAAM,WAAW,QAAQ,sBAAsB,SAAS,QAAQ,SAAS,UAAU;EACnF,MAAM,iBAAiB,MAAM,SAAS,YAAY;EAClD,MAAM,gBAAgB,SAAS,OAAO;AAEtC,MAAI,iBAAiB,eAAe;GAClC,MAAM,oBAAoB,MAAM,SAAS,mBAAmB,eAAe,EACzE,gBAAgB,MACjB,CAAC;AAEF,OAAI,kBAAkB,WAAW,SAAS,EACxC,qBAAoB,KAAK;IACvB,WAAW,SAAS;IACpB,aAAa;IACb,WAAW;IACQ;IACpB,CAAC;;;AAMR,MAAK,MAAM,aAAa,qBAAqB;AAC3C,QAAM,UAAU,kBAAkB,SAAS;AAC3C,UAAQ,KAAK;GACX,WAAW,UAAU;GACrB,YAAY;GACZ,aAAa,UAAU;GACvB,WAAW,UAAU;GACtB,CAAC;;AAIJ,MAAK,MAAM,YAAY,UACrB,KAAI,CAAC,QAAQ,MAAM,MAAM,EAAE,cAAc,SAAS,UAAU,CAC1D,SAAQ,KAAK;EACX,WAAW,SAAS;EACpB,YAAY;EACZ,aAAa,SAAS,OAAO;EAC7B,WAAW,SAAS,OAAO;EAC5B,CAAC;AAIN,QAAO;;;;;;;;;;;;AAaT,SAAgB,8BACd,OAC0B;AAC1B,KAAI,MAAM,WAAW,EACnB,QAAO,EAAE;CAIX,MAAM,cAAc,CAAC,GAAG,MAAM,CAAC,MAAM,GAAG,MAAM;AAC5C,MAAI,EAAE,cAAc,mBAClB,QAAO;AAET,MAAI,EAAE,cAAc,mBAClB,QAAO;AAET,SAAO,EAAE,UAAU,cAAc,EAAE,UAAU;GAC7C;CAGF,MAAM,wBAAO,IAAI,MAAM,EAAC,aAAa,CAAC,MAAM,IAAI,CAAC,GAAG,QAAQ,MAAM,GAAG;AAGrE,QAAO,YAAY,KAAK,MAAM,UAAU;EACtC,MAAM,cAAc,KAAK,eAAe;EACxC,MAAM,YAAY,KAAK,aAAa;EAOpC,MAAM,UAAU,GAAG,KAAK,IAJN,QAAQ,GAAG,UAAU,CAAC,SAAS,GAAG,IAAI,CAIpB,IAHjB,YAAY,UAAU,CAAC,SAAS,GAAG,IAAI,CAGP,IAFlC,UAAU,UAAU,CAAC,SAAS,GAAG,IAAI,CAEU,GAD/C,KAAK,UAAU,QAAQ,gBAAgB,IAAI,CACgB;AAE5E,SAAO;GACL,QAAQ,KAAK;GACb,MAAM;GACN,WAAW,KAAK;GACjB;GACD"}
package/dist/mod.d.ts CHANGED
@@ -1,4 +1,5 @@
1
1
  import { AnySchema } from "./schema/create.js";
2
+ import { Cursor, CursorData, CursorResult, decodeCursor } from "./query/cursor.js";
2
3
  import { AbstractQuery } from "./query/query.js";
3
4
  import { DatabaseAdapter } from "./adapters/adapters.js";
4
5
  import { DatabaseFragmentBuilder, DatabaseFragmentContext, FragnoPublicConfigWithDatabase, defineFragmentWithDatabase } from "./fragment.js";
@@ -24,27 +25,27 @@ declare class FragnoDatabaseDefinition<const T extends AnySchema> {
24
25
  /**
25
26
  * Creates a FragnoDatabase instance by binding an adapter to this definition.
26
27
  */
27
- create(adapter: DatabaseAdapter): FragnoDatabase<T>;
28
+ create<TUOWConfig = void>(adapter: DatabaseAdapter<TUOWConfig>): FragnoDatabase<T, TUOWConfig>;
28
29
  }
29
30
  /**
30
31
  * A Fragno database instance with a bound adapter.
31
32
  * Created from a FragnoDatabaseDefinition by calling .create(adapter).
32
33
  */
33
- declare class FragnoDatabase<const T extends AnySchema> {
34
+ declare class FragnoDatabase<const T extends AnySchema, TUOWConfig = void> {
34
35
  #private;
35
36
  constructor(options: {
36
37
  namespace: string;
37
38
  schema: T;
38
- adapter: DatabaseAdapter;
39
+ adapter: DatabaseAdapter<TUOWConfig>;
39
40
  });
40
41
  get [fragnoDatabaseFakeSymbol](): typeof fragnoDatabaseFakeSymbol;
41
- createClient(): Promise<AbstractQuery<T>>;
42
+ createClient(): Promise<AbstractQuery<T, TUOWConfig>>;
42
43
  runMigrations(): Promise<boolean>;
43
44
  get namespace(): string;
44
45
  get schema(): T;
45
- get adapter(): DatabaseAdapter;
46
+ get adapter(): DatabaseAdapter<TUOWConfig>;
46
47
  }
47
48
  declare function defineFragnoDatabase<const TSchema extends AnySchema>(options: CreateFragnoDatabaseDefinitionOptions<TSchema>): FragnoDatabaseDefinition<TSchema>;
48
49
  //#endregion
49
- export { CreateFragnoDatabaseDefinitionOptions, type DatabaseAdapter, DatabaseFragmentBuilder, type DatabaseFragmentContext, FragnoDatabase, FragnoDatabaseDefinition, type FragnoPublicConfigWithDatabase, defineFragmentWithDatabase, defineFragnoDatabase, fragnoDatabaseFakeSymbol, fragnoDatabaseLibraryVersion, isFragnoDatabase };
50
+ export { CreateFragnoDatabaseDefinitionOptions, Cursor, type CursorData, type CursorResult, type DatabaseAdapter, DatabaseFragmentBuilder, type DatabaseFragmentContext, FragnoDatabase, FragnoDatabaseDefinition, type FragnoPublicConfigWithDatabase, decodeCursor, defineFragmentWithDatabase, defineFragnoDatabase, fragnoDatabaseFakeSymbol, fragnoDatabaseLibraryVersion, isFragnoDatabase };
50
51
  //# sourceMappingURL=mod.d.ts.map
package/dist/mod.d.ts.map CHANGED
@@ -1 +1 @@
1
- {"version":3,"file":"mod.d.ts","names":[],"sources":["../src/mod.ts"],"sourcesContent":[],"mappings":";;;;;;cAMa;cACA;AADA,UAGI,qCAHkD,CAAA,UAGF,SAHE,CAAA,CAAA;EACtD,SAAA,EAAA,MAAA;EAEI,MAAA,EAEP,CAFO;AAKjB;AAoBa,iBApBG,gBAAA,CAoBqB,KAAA,EAAA,OAAA,CAAA,EAAA,KAAA,IApBsB,cAoBtB,CApBqC,SAoBrC,CAAA;;;;;;AAoBc,cApBtC,wBAoBsC,CAAA,gBApBG,SAoBH,CAAA,CAAA;EAAf,CAAA,OAAA;EAAc,WAAA,CAAA,OAAA,EAhB3B,qCAgB2B,CAhBW,CAgBX,CAAA;EAarC,IAAA,SAAA,CAAA,CAAA,EAAc,MAAA;EAAiB,IAAA,MAAA,CAAA,CAAA,EApBhC,CAoBgC;EAKQ;;;EAM7C,MAAA,CAAA,OAAA,EAxBW,eAwBX,CAAA,EAxB6B,cAwB7B,CAxB4C,CAwB5C,CAAA;;;;;;AAoCU,cA/CJ,cA+CI,CAAA,gBA/C2B,SA+C3B,CAAA,CAAA;EAAe,CAAA,OAAA;EAKhB,WAAA,CAAA,OAAA,EAAA;IAA2C,SAAA,EAAA,MAAA;IACV,MAAA,EAhDG,CAgDH;IAAtC,OAAA,EAhDqD,eAgDrD;EACiB,CAAA;EAAzB,KA3CI,wBAAA,GA2CJ,EAAA,OA3CwC,wBA2CxC;EAAwB,YAAA,CAAA,CAAA,EAvCH,OAuCG,CAvCK,aAuCL,CAvCmB,CAuCnB,CAAA,CAAA;mBA3BF;;gBAgBb;iBAIK;;iBAKD,2CAA2C,oBAChD,sCAAsC,WAC9C,yBAAyB"}
1
+ {"version":3,"file":"mod.d.ts","names":[],"sources":["../src/mod.ts"],"sourcesContent":[],"mappings":";;;;;;;AASa,cAAA,wBAAsD,EAAA,kBAAA;AACtD,cAAA,4BAA6C,EAAA,KAAA;AAEzC,UAAA,qCAAqC,CAAA,UAAW,SAEtD,CAAA,CAAA;EAGK,SAAA,EAAA,MAAA;EAoBH,MAAA,EAvBH,CAuBG;;AAIgD,iBAxB7C,gBAAA,CAwB6C,KAAA,EAAA,OAAA,CAAA,EAAA,KAAA,IAxBF,cAwBE,CAxBa,SAwBb,CAAA;;;;;;AAgBwB,cApBxE,wBAoBwE,CAAA,gBApB/B,SAoB+B,CAAA,CAAA;EAAlB,CAAA,OAAA;EAAc,WAAA,CAAA,OAAA,EAhB1D,qCAgB0D,CAhBpB,CAgBoB,CAAA;EAapE,IAAA,SAAA,CAAA,CAAA,EAAc,MAAA;EAAiB,IAAA,MAAA,CAAA,CAAA,EApBhC,CAoBgC;EAKQ;;;EAMT,MAAA,CAAA,aAAA,IAAA,CAAA,CAAA,OAAA,EAxBN,eAwBM,CAxBU,UAwBV,CAAA,CAAA,EAxBwB,cAwBxB,CAxBuC,CAwBvC,EAxB0C,UAwB1C,CAAA;;;;;;AAgBlB,cA3BZ,cA2BY,CAAA,gBA3BmB,SA2BnB,EAAA,aAAA,IAAA,CAAA,CAAA;EAgBb,CAAA,OAAA;EAIqB,WAAA,CAAA,OAAA,EAAA;IAAhB,SAAA,EAAA,MAAA;IAAe,MAAA,EA1CoB,CA0CpB;IAKhB,OAAA,EA/CgD,eA+C5B,CA/C4C,UA+C5C,CAAA;EAAuB,CAAA;EACV,KA1C1C,wBAAA,GA0C0C,EAAA,OA1CN,wBA0CM;EAAtC,YAAA,CAAA,CAAA,EAtCa,OAsCb,CAtCqB,aAsCrB,CAtCmC,CAsCnC,EAtCsC,UAsCtC,CAAA,CAAA;EACiB,aAAA,CAAA,CAAA,EA3BH,OA2BG,CAAA,OAAA,CAAA;EAAzB,IAAA,SAAA,CAAA,CAAA,EAAA,MAAA;EAAwB,IAAA,MAAA,CAAA,CAAA,EAXf,CAWe;iBAPV,gBAAgB;;iBAKjB,2CAA2C,oBAChD,sCAAsC,WAC9C,yBAAyB"}
package/dist/mod.js CHANGED
@@ -1,4 +1,5 @@
1
1
  import { DatabaseFragmentBuilder, defineFragmentWithDatabase } from "./fragment.js";
2
+ import { Cursor, decodeCursor } from "./query/cursor.js";
2
3
 
3
4
  //#region src/mod.ts
4
5
  const fragnoDatabaseFakeSymbol = "$fragno-database";
@@ -79,5 +80,5 @@ function defineFragnoDatabase(options) {
79
80
  }
80
81
 
81
82
  //#endregion
82
- export { DatabaseFragmentBuilder, FragnoDatabase, FragnoDatabaseDefinition, defineFragmentWithDatabase, defineFragnoDatabase, fragnoDatabaseFakeSymbol, fragnoDatabaseLibraryVersion, isFragnoDatabase };
83
+ export { Cursor, DatabaseFragmentBuilder, FragnoDatabase, FragnoDatabaseDefinition, decodeCursor, defineFragmentWithDatabase, defineFragnoDatabase, fragnoDatabaseFakeSymbol, fragnoDatabaseLibraryVersion, isFragnoDatabase };
83
84
  //# sourceMappingURL=mod.js.map
package/dist/mod.js.map CHANGED
@@ -1 +1 @@
1
- {"version":3,"file":"mod.js","names":["#namespace","#schema","#adapter"],"sources":["../src/mod.ts"],"sourcesContent":["import type { DatabaseAdapter } from \"./adapters/adapters\";\nimport type { AnySchema } from \"./schema/create\";\nimport type { AbstractQuery } from \"./query/query\";\n\nexport type { DatabaseAdapter };\n\nexport const fragnoDatabaseFakeSymbol = \"$fragno-database\" as const;\nexport const fragnoDatabaseLibraryVersion = \"0.1\" as const;\n\nexport interface CreateFragnoDatabaseDefinitionOptions<T extends AnySchema> {\n namespace: string;\n schema: T;\n}\n\nexport function isFragnoDatabase(value: unknown): value is FragnoDatabase<AnySchema> {\n if (value instanceof FragnoDatabase) {\n return true;\n }\n\n if (typeof value !== \"object\" || value === null) {\n return false;\n }\n\n return (\n fragnoDatabaseFakeSymbol in value &&\n value[fragnoDatabaseFakeSymbol] === fragnoDatabaseFakeSymbol\n );\n}\n\n/**\n * Definition of a Fragno database schema and namespace.\n * Created by library authors using defineFragnoDatabase().\n * Apps instantiate it by calling .create(adapter).\n */\nexport class FragnoDatabaseDefinition<const T extends AnySchema> {\n #namespace: string;\n #schema: T;\n\n constructor(options: CreateFragnoDatabaseDefinitionOptions<T>) {\n this.#namespace = options.namespace;\n this.#schema = options.schema;\n }\n\n get namespace() {\n return this.#namespace;\n }\n\n get schema() {\n return this.#schema;\n }\n\n /**\n * Creates a FragnoDatabase instance by binding an adapter to this definition.\n */\n create(adapter: DatabaseAdapter): FragnoDatabase<T> {\n return new FragnoDatabase({\n namespace: this.#namespace,\n schema: this.#schema,\n adapter,\n });\n }\n}\n\n/**\n * A Fragno database instance with a bound adapter.\n * Created from a FragnoDatabaseDefinition by calling .create(adapter).\n */\nexport class FragnoDatabase<const T extends AnySchema> {\n #namespace: string;\n #schema: T;\n #adapter: DatabaseAdapter;\n\n constructor(options: { namespace: string; schema: T; adapter: DatabaseAdapter }) {\n this.#namespace = options.namespace;\n this.#schema = options.schema;\n this.#adapter = options.adapter;\n }\n\n get [fragnoDatabaseFakeSymbol](): typeof fragnoDatabaseFakeSymbol {\n return fragnoDatabaseFakeSymbol;\n }\n\n async createClient(): Promise<AbstractQuery<T>> {\n const dbVersion = await this.#adapter.getSchemaVersion(this.#namespace);\n if (dbVersion !== this.#schema.version.toString()) {\n throw new Error(\n `Database is not at expected version. Did you forget to run migrations?` +\n ` Current version: ${dbVersion}, Expected version: ${this.#schema.version}`,\n );\n }\n\n return this.#adapter.createQueryEngine(this.#schema, this.#namespace);\n }\n\n async runMigrations(): Promise<boolean> {\n if (!this.#adapter.createMigrationEngine) {\n throw new Error(\"Migration engine not supported for this adapter.\");\n }\n\n const migrator = this.#adapter.createMigrationEngine(this.#schema, this.#namespace);\n const preparedMigration = await migrator.prepareMigration();\n await preparedMigration.execute();\n\n return preparedMigration.operations.length > 0;\n }\n\n get namespace() {\n return this.#namespace;\n }\n\n get schema() {\n return this.#schema;\n }\n\n get adapter(): DatabaseAdapter {\n return this.#adapter;\n }\n}\n\nexport function defineFragnoDatabase<const TSchema extends AnySchema>(\n options: CreateFragnoDatabaseDefinitionOptions<TSchema>,\n): FragnoDatabaseDefinition<TSchema> {\n return new FragnoDatabaseDefinition(options);\n}\n\nexport {\n defineFragmentWithDatabase,\n DatabaseFragmentBuilder,\n type FragnoPublicConfigWithDatabase,\n type DatabaseFragmentContext,\n} from \"./fragment\";\n"],"mappings":";;;AAMA,MAAa,2BAA2B;AACxC,MAAa,+BAA+B;AAO5C,SAAgB,iBAAiB,OAAoD;AACnF,KAAI,iBAAiB,eACnB,QAAO;AAGT,KAAI,OAAO,UAAU,YAAY,UAAU,KACzC,QAAO;AAGT,QACE,4BAA4B,SAC5B,MAAM,8BAA8B;;;;;;;AASxC,IAAa,2BAAb,MAAiE;CAC/D;CACA;CAEA,YAAY,SAAmD;AAC7D,QAAKA,YAAa,QAAQ;AAC1B,QAAKC,SAAU,QAAQ;;CAGzB,IAAI,YAAY;AACd,SAAO,MAAKD;;CAGd,IAAI,SAAS;AACX,SAAO,MAAKC;;;;;CAMd,OAAO,SAA6C;AAClD,SAAO,IAAI,eAAe;GACxB,WAAW,MAAKD;GAChB,QAAQ,MAAKC;GACb;GACD,CAAC;;;;;;;AAQN,IAAa,iBAAb,MAAuD;CACrD;CACA;CACA;CAEA,YAAY,SAAqE;AAC/E,QAAKD,YAAa,QAAQ;AAC1B,QAAKC,SAAU,QAAQ;AACvB,QAAKC,UAAW,QAAQ;;CAG1B,KAAK,4BAA6D;AAChE,SAAO;;CAGT,MAAM,eAA0C;EAC9C,MAAM,YAAY,MAAM,MAAKA,QAAS,iBAAiB,MAAKF,UAAW;AACvE,MAAI,cAAc,MAAKC,OAAQ,QAAQ,UAAU,CAC/C,OAAM,IAAI,MACR,2FACuB,UAAU,sBAAsB,MAAKA,OAAQ,UACrE;AAGH,SAAO,MAAKC,QAAS,kBAAkB,MAAKD,QAAS,MAAKD,UAAW;;CAGvE,MAAM,gBAAkC;AACtC,MAAI,CAAC,MAAKE,QAAS,sBACjB,OAAM,IAAI,MAAM,mDAAmD;EAIrE,MAAM,oBAAoB,MADT,MAAKA,QAAS,sBAAsB,MAAKD,QAAS,MAAKD,UAAW,CAC1C,kBAAkB;AAC3D,QAAM,kBAAkB,SAAS;AAEjC,SAAO,kBAAkB,WAAW,SAAS;;CAG/C,IAAI,YAAY;AACd,SAAO,MAAKA;;CAGd,IAAI,SAAS;AACX,SAAO,MAAKC;;CAGd,IAAI,UAA2B;AAC7B,SAAO,MAAKC;;;AAIhB,SAAgB,qBACd,SACmC;AACnC,QAAO,IAAI,yBAAyB,QAAQ"}
1
+ {"version":3,"file":"mod.js","names":["#namespace","#schema","#adapter"],"sources":["../src/mod.ts"],"sourcesContent":["import type { DatabaseAdapter } from \"./adapters/adapters\";\nimport type { AnySchema } from \"./schema/create\";\nimport type { AbstractQuery } from \"./query/query\";\nimport type { CursorResult } from \"./query/cursor\";\nimport { Cursor } from \"./query/cursor\";\n\nexport type { DatabaseAdapter, CursorResult };\nexport { Cursor };\n\nexport const fragnoDatabaseFakeSymbol = \"$fragno-database\" as const;\nexport const fragnoDatabaseLibraryVersion = \"0.1\" as const;\n\nexport interface CreateFragnoDatabaseDefinitionOptions<T extends AnySchema> {\n namespace: string;\n schema: T;\n}\n\nexport function isFragnoDatabase(value: unknown): value is FragnoDatabase<AnySchema> {\n if (value instanceof FragnoDatabase) {\n return true;\n }\n\n if (typeof value !== \"object\" || value === null) {\n return false;\n }\n\n return (\n fragnoDatabaseFakeSymbol in value &&\n value[fragnoDatabaseFakeSymbol] === fragnoDatabaseFakeSymbol\n );\n}\n\n/**\n * Definition of a Fragno database schema and namespace.\n * Created by library authors using defineFragnoDatabase().\n * Apps instantiate it by calling .create(adapter).\n */\nexport class FragnoDatabaseDefinition<const T extends AnySchema> {\n #namespace: string;\n #schema: T;\n\n constructor(options: CreateFragnoDatabaseDefinitionOptions<T>) {\n this.#namespace = options.namespace;\n this.#schema = options.schema;\n }\n\n get namespace() {\n return this.#namespace;\n }\n\n get schema() {\n return this.#schema;\n }\n\n /**\n * Creates a FragnoDatabase instance by binding an adapter to this definition.\n */\n create<TUOWConfig = void>(adapter: DatabaseAdapter<TUOWConfig>): FragnoDatabase<T, TUOWConfig> {\n return new FragnoDatabase({\n namespace: this.#namespace,\n schema: this.#schema,\n adapter,\n });\n }\n}\n\n/**\n * A Fragno database instance with a bound adapter.\n * Created from a FragnoDatabaseDefinition by calling .create(adapter).\n */\nexport class FragnoDatabase<const T extends AnySchema, TUOWConfig = void> {\n #namespace: string;\n #schema: T;\n #adapter: DatabaseAdapter<TUOWConfig>;\n\n constructor(options: { namespace: string; schema: T; adapter: DatabaseAdapter<TUOWConfig> }) {\n this.#namespace = options.namespace;\n this.#schema = options.schema;\n this.#adapter = options.adapter;\n }\n\n get [fragnoDatabaseFakeSymbol](): typeof fragnoDatabaseFakeSymbol {\n return fragnoDatabaseFakeSymbol;\n }\n\n async createClient(): Promise<AbstractQuery<T, TUOWConfig>> {\n const dbVersion = await this.#adapter.getSchemaVersion(this.#namespace);\n if (dbVersion !== this.#schema.version.toString()) {\n throw new Error(\n `Database is not at expected version. Did you forget to run migrations?` +\n ` Current version: ${dbVersion}, Expected version: ${this.#schema.version}`,\n );\n }\n\n return this.#adapter.createQueryEngine(this.#schema, this.#namespace);\n }\n\n async runMigrations(): Promise<boolean> {\n if (!this.#adapter.createMigrationEngine) {\n throw new Error(\"Migration engine not supported for this adapter.\");\n }\n\n const migrator = this.#adapter.createMigrationEngine(this.#schema, this.#namespace);\n const preparedMigration = await migrator.prepareMigration();\n await preparedMigration.execute();\n\n return preparedMigration.operations.length > 0;\n }\n\n get namespace() {\n return this.#namespace;\n }\n\n get schema() {\n return this.#schema;\n }\n\n get adapter(): DatabaseAdapter<TUOWConfig> {\n return this.#adapter;\n }\n}\n\nexport function defineFragnoDatabase<const TSchema extends AnySchema>(\n options: CreateFragnoDatabaseDefinitionOptions<TSchema>,\n): FragnoDatabaseDefinition<TSchema> {\n return new FragnoDatabaseDefinition(options);\n}\n\nexport {\n defineFragmentWithDatabase,\n DatabaseFragmentBuilder,\n type FragnoPublicConfigWithDatabase,\n type DatabaseFragmentContext,\n} from \"./fragment\";\n\nexport { decodeCursor, type CursorData } from \"./query/cursor\";\n"],"mappings":";;;;AASA,MAAa,2BAA2B;AACxC,MAAa,+BAA+B;AAO5C,SAAgB,iBAAiB,OAAoD;AACnF,KAAI,iBAAiB,eACnB,QAAO;AAGT,KAAI,OAAO,UAAU,YAAY,UAAU,KACzC,QAAO;AAGT,QACE,4BAA4B,SAC5B,MAAM,8BAA8B;;;;;;;AASxC,IAAa,2BAAb,MAAiE;CAC/D;CACA;CAEA,YAAY,SAAmD;AAC7D,QAAKA,YAAa,QAAQ;AAC1B,QAAKC,SAAU,QAAQ;;CAGzB,IAAI,YAAY;AACd,SAAO,MAAKD;;CAGd,IAAI,SAAS;AACX,SAAO,MAAKC;;;;;CAMd,OAA0B,SAAqE;AAC7F,SAAO,IAAI,eAAe;GACxB,WAAW,MAAKD;GAChB,QAAQ,MAAKC;GACb;GACD,CAAC;;;;;;;AAQN,IAAa,iBAAb,MAA0E;CACxE;CACA;CACA;CAEA,YAAY,SAAiF;AAC3F,QAAKD,YAAa,QAAQ;AAC1B,QAAKC,SAAU,QAAQ;AACvB,QAAKC,UAAW,QAAQ;;CAG1B,KAAK,4BAA6D;AAChE,SAAO;;CAGT,MAAM,eAAsD;EAC1D,MAAM,YAAY,MAAM,MAAKA,QAAS,iBAAiB,MAAKF,UAAW;AACvE,MAAI,cAAc,MAAKC,OAAQ,QAAQ,UAAU,CAC/C,OAAM,IAAI,MACR,2FACuB,UAAU,sBAAsB,MAAKA,OAAQ,UACrE;AAGH,SAAO,MAAKC,QAAS,kBAAkB,MAAKD,QAAS,MAAKD,UAAW;;CAGvE,MAAM,gBAAkC;AACtC,MAAI,CAAC,MAAKE,QAAS,sBACjB,OAAM,IAAI,MAAM,mDAAmD;EAIrE,MAAM,oBAAoB,MADT,MAAKA,QAAS,sBAAsB,MAAKD,QAAS,MAAKD,UAAW,CAC1C,kBAAkB;AAC3D,QAAM,kBAAkB,SAAS;AAEjC,SAAO,kBAAkB,WAAW,SAAS;;CAG/C,IAAI,YAAY;AACd,SAAO,MAAKA;;CAGd,IAAI,SAAS;AACX,SAAO,MAAKC;;CAGd,IAAI,UAAuC;AACzC,SAAO,MAAKC;;;AAIhB,SAAgB,qBACd,SACmC;AACnC,QAAO,IAAI,yBAAyB,QAAQ"}
@@ -4,71 +4,106 @@ import { SQLProvider } from "../shared/providers.js";
4
4
  //#region src/query/cursor.d.ts
5
5
 
6
6
  /**
7
- * Cursor data structure containing index values and pagination direction
7
+ * Cursor object containing all information needed for pagination
8
8
  */
9
- interface CursorData {
9
+ declare class Cursor {
10
+ #private;
11
+ constructor(data: {
12
+ indexName: string;
13
+ orderDirection: "asc" | "desc";
14
+ pageSize: number;
15
+ indexValues: Record<string, unknown>;
16
+ });
10
17
  /**
11
- * Values for each column in the index, keyed by column ORM name
18
+ * Get the index name being used for pagination
12
19
  */
13
- indexValues: Record<string, unknown>;
20
+ get indexName(): string;
21
+ /**
22
+ * Get the ordering direction
23
+ */
24
+ get orderDirection(): "asc" | "desc";
25
+ /**
26
+ * Get the page size
27
+ */
28
+ get pageSize(): number;
14
29
  /**
15
- * Direction of pagination
30
+ * Get the cursor position values
16
31
  */
17
- direction: "forward" | "backward";
32
+ get indexValues(): Record<string, unknown>;
33
+ /**
34
+ * Encode cursor to an opaque base64 string (safe to send to client)
35
+ */
36
+ encode(): string;
18
37
  }
19
38
  /**
20
- * Encode cursor data to a base64 string
21
- *
22
- * @param data - The cursor data to encode
23
- * @returns Base64-encoded cursor string
24
- *
25
- * @example
26
- * ```ts
27
- * const cursor = encodeCursor({
28
- * indexValues: { id: "abc123", createdAt: 1234567890 },
29
- * direction: "forward"
30
- * });
31
- * ```
39
+ * Result of a cursor-based query containing items and pagination cursor
32
40
  */
33
- declare function encodeCursor(data: CursorData): string;
41
+ interface CursorResult<T> {
42
+ /**
43
+ * The query results
44
+ */
45
+ items: T[];
46
+ /**
47
+ * Cursor to fetch the next page (undefined if no more results)
48
+ */
49
+ cursor?: Cursor;
50
+ }
51
+ /**
52
+ * Cursor data structure for serialization
53
+ */
54
+ interface CursorData {
55
+ v: number;
56
+ indexName: string;
57
+ orderDirection: "asc" | "desc";
58
+ pageSize: number;
59
+ indexValues: Record<string, unknown>;
60
+ }
34
61
  /**
35
- * Decode a base64 cursor string back to cursor data
62
+ * Decode a base64 cursor string back to a Cursor object
36
63
  *
37
64
  * @param cursor - The base64-encoded cursor string
38
- * @returns Decoded cursor data
65
+ * @returns Decoded Cursor object
39
66
  * @throws Error if cursor is invalid or malformed
40
67
  *
41
68
  * @example
42
69
  * ```ts
43
- * const data = decodeCursor("eyJpbmRleFZhbHVlcyI6e30sImRpcmVjdGlvbiI6ImZvcndhcmQifQ==");
70
+ * const cursor = decodeCursor("eyJpbmRleFZhbHVlcyI6e30sImRpcmVjdGlvbiI6ImZvcndhcmQifQ==");
44
71
  * ```
45
72
  */
46
- declare function decodeCursor(cursor: string): CursorData;
73
+ declare function decodeCursor(cursor: string): Cursor;
47
74
  /**
48
- * Create a cursor from a record and index columns
75
+ * Create a cursor from a record and pagination metadata
49
76
  *
50
77
  * @param record - The database record
51
78
  * @param indexColumns - The columns that make up the index
52
- * @param direction - The pagination direction
53
- * @returns Encoded cursor string
79
+ * @param metadata - Pagination metadata (index name, order direction, page size)
80
+ * @returns Cursor object
54
81
  *
55
82
  * @example
56
83
  * ```ts
57
84
  * const cursor = createCursorFromRecord(
58
85
  * { id: "abc", name: "Alice", createdAt: 123 },
59
86
  * [table.columns.createdAt, table.columns.id],
60
- * "forward"
87
+ * {
88
+ * indexName: "idx_created",
89
+ * orderDirection: "asc",
90
+ * pageSize: 10
91
+ * }
61
92
  * );
62
93
  * ```
63
94
  */
64
- declare function createCursorFromRecord(record: Record<string, unknown>, indexColumns: AnyColumn[], direction: "forward" | "backward"): string;
95
+ declare function createCursorFromRecord(record: Record<string, unknown>, indexColumns: AnyColumn[], metadata: {
96
+ indexName: string;
97
+ orderDirection: "asc" | "desc";
98
+ pageSize: number;
99
+ }): Cursor;
65
100
  /**
66
101
  * Serialize cursor values for database queries
67
102
  *
68
103
  * Converts cursor values (which are in application format) to database format
69
104
  * using the column serialization rules.
70
105
  *
71
- * @param cursorData - The decoded cursor data
106
+ * @param cursor - The cursor object
72
107
  * @param indexColumns - The columns that make up the index
73
108
  * @param provider - The SQL provider
74
109
  * @returns Serialized values ready for database queries
@@ -76,13 +111,13 @@ declare function createCursorFromRecord(record: Record<string, unknown>, indexCo
76
111
  * @example
77
112
  * ```ts
78
113
  * const serialized = serializeCursorValues(
79
- * cursorData,
114
+ * cursor,
80
115
  * [table.columns.createdAt],
81
116
  * "postgresql"
82
117
  * );
83
118
  * ```
84
119
  */
85
- declare function serializeCursorValues(cursorData: CursorData, indexColumns: AnyColumn[], provider: SQLProvider): Record<string, unknown>;
120
+ declare function serializeCursorValues(cursor: Cursor, indexColumns: AnyColumn[], provider: SQLProvider): Record<string, unknown>;
86
121
  //#endregion
87
- export { CursorData, createCursorFromRecord, decodeCursor, encodeCursor, serializeCursorValues };
122
+ export { Cursor, CursorData, CursorResult, createCursorFromRecord, decodeCursor, serializeCursorValues };
88
123
  //# sourceMappingURL=cursor.d.ts.map
@@ -1 +1 @@
1
- {"version":3,"file":"cursor.d.ts","names":[],"sources":["../../src/query/cursor.ts"],"sourcesContent":[],"mappings":";;;;;;;AAOA;AAyBgB,UAzBC,UAAA,CAyBW;EAqBZ;AA4ChB;AAkCA;EACc,WAAA,EAzHC,MAyHD,CAAA,MAAA,EAAA,OAAA,CAAA;EACE;;;EAEP,SAAA,EAAA,SAAA,GAAA,UAAA;;;;;;;;;;;;;;;;iBAvGO,YAAA,OAAmB;;;;;;;;;;;;;iBAqBnB,YAAA,kBAA8B;;;;;;;;;;;;;;;;;;iBA4C9B,sBAAA,SACN,uCACM;;;;;;;;;;;;;;;;;;;;;iBAgCA,qBAAA,aACF,0BACE,uBACJ,cACT"}
1
+ {"version":3,"file":"cursor.d.ts","names":[],"sources":["../../src/query/cursor.ts"],"sourcesContent":[],"mappings":";;;;;;;AAOA;AAgEiB,cAhEJ,MAAA,CAgEgB;EAcZ,CAAA,OAAA;EAgCD,WAAA,CAAA,IAAY,EAAA;IA8DZ,SAAA,EAAA,MAAA;IACN,cAAA,EAAA,KAAA,GAAA,MAAA;IACM,QAAA,EAAA,MAAA;IAMb,WAAA,EA1Kc,MA0Kd,CAAA,MAAA,EAAA,OAAA,CAAA;EAAM,CAAA;EAmCO;;;EAGJ,IAAA,SAAA,CAAA,CAAA,EAAA,MAAA;EACT;;;;;;;;;;;qBAjLkB;;;;;;;;;UAsBJ;;;;SAIR;;;;WAIE;;;;;UAMM,UAAA;;;;;eAKF;;;;;;;;;;;;;;iBA2BC,YAAA,kBAA8B;;;;;;;;;;;;;;;;;;;;;;iBA8D9B,sBAAA,SACN,uCACM;;;;IAMb;;;;;;;;;;;;;;;;;;;;;iBAmCa,qBAAA,SACN,sBACM,uBACJ,cACT"}
@@ -2,34 +2,74 @@ import { serialize } from "../schema/serialize.js";
2
2
 
3
3
  //#region src/query/cursor.ts
4
4
  /**
5
- * Encode cursor data to a base64 string
6
- *
7
- * @param data - The cursor data to encode
8
- * @returns Base64-encoded cursor string
9
- *
10
- * @example
11
- * ```ts
12
- * const cursor = encodeCursor({
13
- * indexValues: { id: "abc123", createdAt: 1234567890 },
14
- * direction: "forward"
15
- * });
16
- * ```
5
+ * Cursor object containing all information needed for pagination
6
+ */
7
+ var Cursor = class {
8
+ #indexName;
9
+ #orderDirection;
10
+ #pageSize;
11
+ #indexValues;
12
+ constructor(data) {
13
+ this.#indexName = data.indexName;
14
+ this.#orderDirection = data.orderDirection;
15
+ this.#pageSize = data.pageSize;
16
+ this.#indexValues = data.indexValues;
17
+ }
18
+ /**
19
+ * Get the index name being used for pagination
20
+ */
21
+ get indexName() {
22
+ return this.#indexName;
23
+ }
24
+ /**
25
+ * Get the ordering direction
26
+ */
27
+ get orderDirection() {
28
+ return this.#orderDirection;
29
+ }
30
+ /**
31
+ * Get the page size
32
+ */
33
+ get pageSize() {
34
+ return this.#pageSize;
35
+ }
36
+ /**
37
+ * Get the cursor position values
38
+ */
39
+ get indexValues() {
40
+ return this.#indexValues;
41
+ }
42
+ /**
43
+ * Encode cursor to an opaque base64 string (safe to send to client)
44
+ */
45
+ encode() {
46
+ return encodeCursorData({
47
+ v: 1,
48
+ indexName: this.#indexName,
49
+ orderDirection: this.#orderDirection,
50
+ pageSize: this.#pageSize,
51
+ indexValues: this.#indexValues
52
+ });
53
+ }
54
+ };
55
+ /**
56
+ * Encode cursor data to a base64 string (internal)
17
57
  */
18
- function encodeCursor(data) {
58
+ function encodeCursorData(data) {
19
59
  const json = JSON.stringify(data);
20
60
  if (typeof Buffer !== "undefined") return Buffer.from(json, "utf-8").toString("base64");
21
61
  return btoa(json);
22
62
  }
23
63
  /**
24
- * Decode a base64 cursor string back to cursor data
64
+ * Decode a base64 cursor string back to a Cursor object
25
65
  *
26
66
  * @param cursor - The base64-encoded cursor string
27
- * @returns Decoded cursor data
67
+ * @returns Decoded Cursor object
28
68
  * @throws Error if cursor is invalid or malformed
29
69
  *
30
70
  * @example
31
71
  * ```ts
32
- * const data = decodeCursor("eyJpbmRleFZhbHVlcyI6e30sImRpcmVjdGlvbiI6ImZvcndhcmQifQ==");
72
+ * const cursor = decodeCursor("eyJpbmRleFZhbHVlcyI6e30sImRpcmVjdGlvbiI6ImZvcndhcmQifQ==");
33
73
  * ```
34
74
  */
35
75
  function decodeCursor(cursor) {
@@ -38,35 +78,48 @@ function decodeCursor(cursor) {
38
78
  if (typeof Buffer !== "undefined") json = Buffer.from(cursor, "base64").toString("utf-8");
39
79
  else json = atob(cursor);
40
80
  const data = JSON.parse(json);
41
- if (!data || typeof data !== "object" || !data.indexValues || typeof data.indexValues !== "object" || data.direction !== "forward" && data.direction !== "backward") throw new Error("Invalid cursor structure");
42
- return data;
81
+ if (!data || typeof data !== "object" || !data.indexValues || typeof data.indexValues !== "object" || typeof data.pageSize !== "number" || !data.indexName || !data.orderDirection || data.orderDirection !== "asc" && data.orderDirection !== "desc") throw new Error("Invalid cursor structure");
82
+ const version = typeof data.v === "number" ? data.v : 0;
83
+ if (version !== 1) throw new Error(`Unsupported cursor version: ${version}. Only v1 is supported.`);
84
+ return new Cursor({
85
+ indexName: data.indexName,
86
+ orderDirection: data.orderDirection,
87
+ pageSize: data.pageSize,
88
+ indexValues: data.indexValues
89
+ });
43
90
  } catch (error) {
44
91
  throw new Error(`Invalid cursor: ${error instanceof Error ? error.message : "malformed data"}`);
45
92
  }
46
93
  }
47
94
  /**
48
- * Create a cursor from a record and index columns
95
+ * Create a cursor from a record and pagination metadata
49
96
  *
50
97
  * @param record - The database record
51
98
  * @param indexColumns - The columns that make up the index
52
- * @param direction - The pagination direction
53
- * @returns Encoded cursor string
99
+ * @param metadata - Pagination metadata (index name, order direction, page size)
100
+ * @returns Cursor object
54
101
  *
55
102
  * @example
56
103
  * ```ts
57
104
  * const cursor = createCursorFromRecord(
58
105
  * { id: "abc", name: "Alice", createdAt: 123 },
59
106
  * [table.columns.createdAt, table.columns.id],
60
- * "forward"
107
+ * {
108
+ * indexName: "idx_created",
109
+ * orderDirection: "asc",
110
+ * pageSize: 10
111
+ * }
61
112
  * );
62
113
  * ```
63
114
  */
64
- function createCursorFromRecord(record, indexColumns, direction) {
115
+ function createCursorFromRecord(record, indexColumns, metadata) {
65
116
  const indexValues = {};
66
117
  for (const col of indexColumns) indexValues[col.ormName] = record[col.ormName];
67
- return encodeCursor({
68
- indexValues,
69
- direction
118
+ return new Cursor({
119
+ indexName: metadata.indexName,
120
+ orderDirection: metadata.orderDirection,
121
+ pageSize: metadata.pageSize,
122
+ indexValues
70
123
  });
71
124
  }
72
125
  /**
@@ -75,7 +128,7 @@ function createCursorFromRecord(record, indexColumns, direction) {
75
128
  * Converts cursor values (which are in application format) to database format
76
129
  * using the column serialization rules.
77
130
  *
78
- * @param cursorData - The decoded cursor data
131
+ * @param cursor - The cursor object
79
132
  * @param indexColumns - The columns that make up the index
80
133
  * @param provider - The SQL provider
81
134
  * @returns Serialized values ready for database queries
@@ -83,21 +136,21 @@ function createCursorFromRecord(record, indexColumns, direction) {
83
136
  * @example
84
137
  * ```ts
85
138
  * const serialized = serializeCursorValues(
86
- * cursorData,
139
+ * cursor,
87
140
  * [table.columns.createdAt],
88
141
  * "postgresql"
89
142
  * );
90
143
  * ```
91
144
  */
92
- function serializeCursorValues(cursorData, indexColumns, provider) {
145
+ function serializeCursorValues(cursor, indexColumns, provider) {
93
146
  const serialized = {};
94
147
  for (const col of indexColumns) {
95
- const value = cursorData.indexValues[col.ormName];
148
+ const value = cursor.indexValues[col.ormName];
96
149
  if (value !== void 0) serialized[col.ormName] = serialize(value, col, provider);
97
150
  }
98
151
  return serialized;
99
152
  }
100
153
 
101
154
  //#endregion
102
- export { createCursorFromRecord, decodeCursor, encodeCursor, serializeCursorValues };
155
+ export { Cursor, createCursorFromRecord, decodeCursor, serializeCursorValues };
103
156
  //# sourceMappingURL=cursor.js.map