@fragno-dev/db 0.1.1 → 0.1.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (108) hide show
  1. package/.turbo/turbo-build.log +61 -53
  2. package/CHANGELOG.md +12 -0
  3. package/dist/adapters/adapters.d.ts +11 -1
  4. package/dist/adapters/adapters.d.ts.map +1 -1
  5. package/dist/adapters/drizzle/drizzle-adapter.d.ts +9 -2
  6. package/dist/adapters/drizzle/drizzle-adapter.d.ts.map +1 -1
  7. package/dist/adapters/drizzle/drizzle-adapter.js +21 -39
  8. package/dist/adapters/drizzle/drizzle-adapter.js.map +1 -1
  9. package/dist/adapters/drizzle/drizzle-query.d.ts.map +1 -1
  10. package/dist/adapters/drizzle/drizzle-query.js +3 -2
  11. package/dist/adapters/drizzle/drizzle-query.js.map +1 -1
  12. package/dist/adapters/drizzle/drizzle-uow-compiler.js +8 -6
  13. package/dist/adapters/drizzle/drizzle-uow-compiler.js.map +1 -1
  14. package/dist/adapters/drizzle/drizzle-uow-decoder.js.map +1 -1
  15. package/dist/adapters/drizzle/drizzle-uow-executor.js.map +1 -1
  16. package/dist/adapters/drizzle/generate.js +107 -34
  17. package/dist/adapters/drizzle/generate.js.map +1 -1
  18. package/dist/adapters/drizzle/shared.js +14 -1
  19. package/dist/adapters/drizzle/shared.js.map +1 -1
  20. package/dist/adapters/kysely/kysely-adapter.d.ts +2 -1
  21. package/dist/adapters/kysely/kysely-adapter.d.ts.map +1 -1
  22. package/dist/adapters/kysely/kysely-adapter.js +25 -30
  23. package/dist/adapters/kysely/kysely-adapter.js.map +1 -1
  24. package/dist/adapters/kysely/kysely-query-builder.js +48 -44
  25. package/dist/adapters/kysely/kysely-query-builder.js.map +1 -1
  26. package/dist/adapters/kysely/kysely-query-compiler.js +2 -2
  27. package/dist/adapters/kysely/kysely-query-compiler.js.map +1 -1
  28. package/dist/adapters/kysely/kysely-query.js +3 -2
  29. package/dist/adapters/kysely/kysely-query.js.map +1 -1
  30. package/dist/adapters/kysely/kysely-shared.js +18 -0
  31. package/dist/adapters/kysely/kysely-shared.js.map +1 -0
  32. package/dist/adapters/kysely/kysely-uow-compiler.js +4 -3
  33. package/dist/adapters/kysely/kysely-uow-compiler.js.map +1 -1
  34. package/dist/adapters/kysely/migration/execute.js +15 -12
  35. package/dist/adapters/kysely/migration/execute.js.map +1 -1
  36. package/dist/migration-engine/auto-from-schema.js +2 -8
  37. package/dist/migration-engine/auto-from-schema.js.map +1 -1
  38. package/dist/migration-engine/create.d.ts +1 -5
  39. package/dist/migration-engine/create.js +1 -1
  40. package/dist/migration-engine/create.js.map +1 -1
  41. package/dist/migration-engine/generation-engine.d.ts +51 -0
  42. package/dist/migration-engine/generation-engine.d.ts.map +1 -0
  43. package/dist/migration-engine/generation-engine.js +165 -0
  44. package/dist/migration-engine/generation-engine.js.map +1 -0
  45. package/dist/migration-engine/shared.d.ts +5 -2
  46. package/dist/migration-engine/shared.d.ts.map +1 -1
  47. package/dist/migration-engine/shared.js.map +1 -1
  48. package/dist/mod.d.ts +0 -8
  49. package/dist/mod.d.ts.map +1 -1
  50. package/dist/mod.js +0 -32
  51. package/dist/mod.js.map +1 -1
  52. package/dist/query/condition-builder.js.map +1 -1
  53. package/dist/query/result-transform.js +2 -1
  54. package/dist/query/result-transform.js.map +1 -1
  55. package/dist/schema/create.d.ts +74 -16
  56. package/dist/schema/create.d.ts.map +1 -1
  57. package/dist/schema/create.js +76 -11
  58. package/dist/schema/create.js.map +1 -1
  59. package/dist/schema/serialize.js.map +1 -1
  60. package/dist/shared/settings-schema.js +36 -0
  61. package/dist/shared/settings-schema.js.map +1 -0
  62. package/dist/util/import-generator.js.map +1 -1
  63. package/dist/util/parse.js.map +1 -1
  64. package/package.json +8 -2
  65. package/src/adapters/adapters.ts +10 -3
  66. package/src/adapters/drizzle/drizzle-adapter-pglite.test.ts +11 -7
  67. package/src/adapters/drizzle/drizzle-adapter.test.ts +77 -29
  68. package/src/adapters/drizzle/drizzle-adapter.ts +31 -78
  69. package/src/adapters/drizzle/drizzle-query.ts +4 -7
  70. package/src/adapters/drizzle/drizzle-uow-compiler.test.ts +9 -3
  71. package/src/adapters/drizzle/drizzle-uow-compiler.ts +12 -6
  72. package/src/adapters/drizzle/drizzle-uow-decoder.ts +1 -1
  73. package/src/adapters/drizzle/drizzle-uow-executor.ts +1 -1
  74. package/src/adapters/drizzle/generate.test.ts +573 -150
  75. package/src/adapters/drizzle/generate.ts +187 -36
  76. package/src/adapters/drizzle/migrate-drizzle.test.ts +30 -6
  77. package/src/adapters/drizzle/shared.ts +31 -1
  78. package/src/adapters/drizzle/test-utils.ts +3 -1
  79. package/src/adapters/kysely/kysely-adapter-pglite.test.ts +25 -27
  80. package/src/adapters/kysely/kysely-adapter.ts +35 -58
  81. package/src/adapters/kysely/kysely-query-builder.ts +75 -44
  82. package/src/adapters/kysely/kysely-query-compiler.ts +3 -1
  83. package/src/adapters/kysely/kysely-query.ts +8 -2
  84. package/src/adapters/kysely/kysely-shared.ts +23 -0
  85. package/src/adapters/kysely/kysely-uow-compiler.ts +5 -2
  86. package/src/adapters/kysely/migration/execute-mysql.test.ts +2 -2
  87. package/src/adapters/kysely/migration/execute-postgres.test.ts +19 -19
  88. package/src/adapters/kysely/migration/execute.ts +48 -17
  89. package/src/adapters/kysely/migration/kysely-migrator.test.ts +19 -37
  90. package/src/fragment.test.ts +1 -0
  91. package/src/migration-engine/auto-from-schema.ts +14 -18
  92. package/src/migration-engine/create.ts +1 -6
  93. package/src/migration-engine/generation-engine.test.ts +597 -0
  94. package/src/migration-engine/generation-engine.ts +356 -0
  95. package/src/migration-engine/shared.ts +1 -4
  96. package/src/mod.ts +0 -66
  97. package/src/query/condition-builder.ts +24 -8
  98. package/src/query/result-transform.ts +7 -1
  99. package/src/schema/create.test.ts +4 -1
  100. package/src/schema/create.ts +132 -24
  101. package/src/schema/serialize.ts +21 -7
  102. package/src/shared/settings-schema.ts +61 -0
  103. package/src/util/deep-equal.ts +21 -7
  104. package/src/util/import-generator.ts +3 -1
  105. package/src/util/parse.ts +3 -1
  106. package/tsdown.config.ts +1 -0
  107. package/.turbo/turbo-test.log +0 -37
  108. package/.turbo/turbo-types$colon$check.log +0 -1
@@ -0,0 +1,165 @@
1
+ import { SETTINGS_NAMESPACE, createSettingsManager, settingsSchema } from "../shared/settings-schema.js";
2
+
3
+ //#region src/migration-engine/generation-engine.ts
4
+ async function generateMigrationsOrSchema(databases, options) {
5
+ if (databases.length === 0) throw new Error("No databases provided for schema generation");
6
+ const firstDb = databases[0];
7
+ const adapter = firstDb.adapter;
8
+ if (adapter.createSchemaGenerator) {
9
+ if (options?.toVersion !== void 0 || options?.fromVersion !== void 0) console.warn("⚠️ Warning: --from and --to version options are not supported when generating schemas for multiple fragments and will be ignored.");
10
+ const fragments = databases.map((db) => ({
11
+ schema: db.schema,
12
+ namespace: db.namespace
13
+ }));
14
+ return [{
15
+ ...adapter.createSchemaGenerator(fragments, { path: options?.path }).generateSchema(),
16
+ namespace: firstDb.namespace
17
+ }];
18
+ }
19
+ if (!adapter.createMigrationEngine) throw new Error("Adapter does not support migration-based schema generation. Ensure your adapter implements createMigrationEngine.");
20
+ if (!await adapter.isConnectionHealthy()) throw new Error("Database connection is not healthy. Please check your database connection and try again.");
21
+ const settingsManager = createSettingsManager(adapter.createQueryEngine(settingsSchema, ""), SETTINGS_NAMESPACE);
22
+ let settingsSourceVersion;
23
+ try {
24
+ const result = await settingsManager.get("version");
25
+ if (!result) settingsSourceVersion = 0;
26
+ else settingsSourceVersion = parseInt(result.value);
27
+ } catch {
28
+ settingsSourceVersion = 0;
29
+ }
30
+ const generatedFiles = [];
31
+ const settingsMigrator = adapter.createMigrationEngine(settingsSchema, SETTINGS_NAMESPACE);
32
+ const settingsTargetVersion = settingsSchema.version;
33
+ const settingsMigration = await settingsMigrator.prepareMigrationTo(settingsTargetVersion, { fromVersion: settingsSourceVersion });
34
+ if (!settingsMigration.getSQL) throw new Error("Migration engine does not support SQL generation. Ensure your adapter's migration engine provides getSQL().");
35
+ const settingsSql = settingsMigration.getSQL();
36
+ if (settingsSql.trim()) generatedFiles.push({
37
+ schema: settingsSql,
38
+ path: "settings-migration.sql",
39
+ namespace: SETTINGS_NAMESPACE,
40
+ fromVersion: settingsSourceVersion,
41
+ toVersion: settingsTargetVersion,
42
+ preparedMigration: settingsMigration
43
+ });
44
+ for (const db of databases) {
45
+ const dbAdapter = db.adapter;
46
+ if (!dbAdapter.createMigrationEngine) throw new Error(`Adapter for ${db.namespace} does not support schema generation. Ensure your adapter implements either createSchemaGenerator or createMigrationEngine.`);
47
+ const migrator = dbAdapter.createMigrationEngine(db.schema, db.namespace);
48
+ const targetVersion = options?.toVersion ?? db.schema.version;
49
+ const sourceVersion = options?.fromVersion ?? 0;
50
+ const preparedMigration = await migrator.prepareMigrationTo(targetVersion, { fromVersion: sourceVersion });
51
+ if (!preparedMigration.getSQL) throw new Error("Migration engine does not support SQL generation. Ensure your adapter's migration engine provides getSQL().");
52
+ const sql = preparedMigration.getSQL();
53
+ if (sql.trim()) generatedFiles.push({
54
+ schema: sql,
55
+ path: "schema.sql",
56
+ namespace: db.namespace,
57
+ fromVersion: sourceVersion,
58
+ toVersion: targetVersion,
59
+ preparedMigration
60
+ });
61
+ }
62
+ return postProcessMigrationFilenames(generatedFiles);
63
+ }
64
+ /**
65
+ * Execute migrations for all fragments in the correct order.
66
+ * Migrates settings table first, then fragments alphabetically.
67
+ *
68
+ * @param databases - Array of FragnoDatabase instances to migrate
69
+ * @returns Array of execution results for each migration
70
+ */
71
+ async function executeMigrations(databases) {
72
+ if (databases.length === 0) throw new Error("No databases provided for migration");
73
+ const adapter = databases[0].adapter;
74
+ if (!adapter.createMigrationEngine) throw new Error("Adapter does not support running migrations. The adapter only supports schema generation.\nTry using 'generateMigrationsOrSchema' instead to generate schema files.");
75
+ if (!databases.every((db) => db.adapter === adapter)) throw new Error("All fragments must use the same database adapter instance. Mixed adapters are not supported.");
76
+ if (!await adapter.isConnectionHealthy()) throw new Error("Database connection is not healthy. Please check your database connection and try again.");
77
+ const results = [];
78
+ const migrationsToExecute = [];
79
+ const settingsManager = createSettingsManager(adapter.createQueryEngine(settingsSchema, ""), SETTINGS_NAMESPACE);
80
+ let settingsSourceVersion;
81
+ try {
82
+ const result = await settingsManager.get("version");
83
+ settingsSourceVersion = result ? parseInt(result.value) : 0;
84
+ } catch {
85
+ settingsSourceVersion = 0;
86
+ }
87
+ const settingsMigrator = adapter.createMigrationEngine(settingsSchema, SETTINGS_NAMESPACE);
88
+ const settingsTargetVersion = settingsSchema.version;
89
+ if (settingsSourceVersion < settingsTargetVersion) {
90
+ const settingsMigration = await settingsMigrator.prepareMigrationTo(settingsTargetVersion, {
91
+ fromVersion: settingsSourceVersion,
92
+ updateSettings: true
93
+ });
94
+ if (settingsMigration.operations.length > 0) migrationsToExecute.push({
95
+ namespace: SETTINGS_NAMESPACE,
96
+ fromVersion: settingsSourceVersion,
97
+ toVersion: settingsTargetVersion,
98
+ preparedMigration: settingsMigration
99
+ });
100
+ }
101
+ const sortedDatabases = [...databases].sort((a, b) => a.namespace.localeCompare(b.namespace));
102
+ for (const fragnoDb of sortedDatabases) {
103
+ const migrator = adapter.createMigrationEngine(fragnoDb.schema, fragnoDb.namespace);
104
+ const currentVersion = await migrator.getVersion();
105
+ const targetVersion = fragnoDb.schema.version;
106
+ if (currentVersion < targetVersion) {
107
+ const preparedMigration = await migrator.prepareMigrationTo(targetVersion, { updateSettings: true });
108
+ if (preparedMigration.operations.length > 0) migrationsToExecute.push({
109
+ namespace: fragnoDb.namespace,
110
+ fromVersion: currentVersion,
111
+ toVersion: targetVersion,
112
+ preparedMigration
113
+ });
114
+ }
115
+ }
116
+ for (const migration of migrationsToExecute) {
117
+ await migration.preparedMigration.execute();
118
+ results.push({
119
+ namespace: migration.namespace,
120
+ didMigrate: true,
121
+ fromVersion: migration.fromVersion,
122
+ toVersion: migration.toVersion
123
+ });
124
+ }
125
+ for (const fragnoDb of databases) if (!results.find((r) => r.namespace === fragnoDb.namespace)) results.push({
126
+ namespace: fragnoDb.namespace,
127
+ didMigrate: false,
128
+ fromVersion: fragnoDb.schema.version,
129
+ toVersion: fragnoDb.schema.version
130
+ });
131
+ return results;
132
+ }
133
+ /**
134
+ * Post-processes migration files to add ordering and standardize naming.
135
+ *
136
+ * Sorts files with settings namespace first, then alphabetically by namespace,
137
+ * and assigns ordering numbers. Transforms filenames to format:
138
+ * `<date>_<n>_f<from>_t<to>_<namespace>.sql`
139
+ *
140
+ * @param files - Array of generated migration files with version information
141
+ * @returns Array of files with standardized paths and ordering
142
+ */
143
+ function postProcessMigrationFilenames(files) {
144
+ if (files.length === 0) return [];
145
+ const sortedFiles = [...files].sort((a, b) => {
146
+ if (a.namespace === SETTINGS_NAMESPACE) return -1;
147
+ if (b.namespace === SETTINGS_NAMESPACE) return 1;
148
+ return a.namespace.localeCompare(b.namespace);
149
+ });
150
+ const date = (/* @__PURE__ */ new Date()).toISOString().split("T")[0].replace(/-/g, "");
151
+ return sortedFiles.map((file, index) => {
152
+ const fromVersion = file.fromVersion ?? 0;
153
+ const toVersion = file.toVersion ?? 0;
154
+ const newPath = `${date}_${(index + 1).toString().padStart(3, "0")}_f${fromVersion.toString().padStart(3, "0")}_t${toVersion.toString().padStart(3, "0")}_${file.namespace.replace(/[^a-z0-9-]/gi, "_")}.sql`;
155
+ return {
156
+ schema: file.schema,
157
+ path: newPath,
158
+ namespace: file.namespace
159
+ };
160
+ });
161
+ }
162
+
163
+ //#endregion
164
+ export { executeMigrations, generateMigrationsOrSchema, postProcessMigrationFilenames };
165
+ //# sourceMappingURL=generation-engine.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"generation-engine.js","names":["settingsSourceVersion: number","generatedFiles: GenerationInternalResult[]","results: ExecuteMigrationResult[]","migrationsToExecute: Array<{\n namespace: string;\n fromVersion: number;\n toVersion: number;\n preparedMigration: PreparedMigration;\n }>"],"sources":["../../src/migration-engine/generation-engine.ts"],"sourcesContent":["import type { FragnoDatabase } from \"../mod\";\nimport type { AnySchema } from \"../schema/create\";\nimport type { PreparedMigration } from \"./create\";\nimport {\n settingsSchema,\n SETTINGS_NAMESPACE,\n createSettingsManager,\n} from \"../shared/settings-schema\";\n\nexport interface GenerationEngineResult {\n schema: string;\n path: string;\n namespace: string;\n}\n\nexport interface GenerationInternalResult {\n schema: string;\n path: string;\n namespace: string;\n fromVersion: number;\n toVersion: number;\n preparedMigration?: PreparedMigration;\n}\n\nexport interface ExecuteMigrationResult {\n namespace: string;\n didMigrate: boolean;\n fromVersion: number;\n toVersion: number;\n}\n\nexport async function generateMigrationsOrSchema<\n const TDatabases extends FragnoDatabase<AnySchema>[],\n>(\n databases: TDatabases,\n options?: {\n path?: string;\n toVersion?: number;\n fromVersion?: number;\n },\n): Promise<GenerationEngineResult[]> {\n if (databases.length === 0) {\n throw new Error(\"No databases provided for schema generation\");\n }\n\n const firstDb = databases[0];\n const adapter = firstDb.adapter;\n\n // If adapter has createSchemaGenerator, use it for combined generation (e.g., Drizzle)\n if (adapter.createSchemaGenerator) {\n if (options?.toVersion !== undefined || options?.fromVersion !== undefined) {\n console.warn(\n \"⚠️ Warning: --from and --to version options are not supported when generating schemas for multiple fragments and will be ignored.\",\n );\n }\n\n const fragments = databases.map((db) => ({\n schema: db.schema,\n namespace: db.namespace,\n }));\n\n const generator = adapter.createSchemaGenerator(fragments, {\n path: options?.path,\n });\n\n return [\n {\n ...generator.generateSchema(),\n namespace: firstDb.namespace,\n },\n ];\n }\n\n // Otherwise, use migration engine for individual generation (e.g., Kysely)\n if (!adapter.createMigrationEngine) {\n throw new Error(\n \"Adapter does not support migration-based schema generation. Ensure your adapter implements createMigrationEngine.\",\n );\n }\n\n if (!(await adapter.isConnectionHealthy())) {\n throw new Error(\n \"Database connection is not healthy. Please check your database connection and try again.\",\n );\n }\n\n const settingsQueryEngine = adapter.createQueryEngine(settingsSchema, \"\");\n const settingsManager = createSettingsManager(settingsQueryEngine, SETTINGS_NAMESPACE);\n\n let settingsSourceVersion: number;\n try {\n const result = await settingsManager.get(\"version\");\n\n if (!result) {\n settingsSourceVersion = 0;\n } else {\n settingsSourceVersion = parseInt(result.value);\n }\n } catch {\n // We don't really have a way to verify this error happens because the key doesn't exist in the database\n settingsSourceVersion = 0;\n }\n\n const generatedFiles: GenerationInternalResult[] = [];\n\n const settingsMigrator = adapter.createMigrationEngine(settingsSchema, SETTINGS_NAMESPACE);\n const settingsTargetVersion = settingsSchema.version;\n\n // Generate settings table migration\n const settingsMigration = await settingsMigrator.prepareMigrationTo(settingsTargetVersion, {\n fromVersion: settingsSourceVersion,\n });\n\n if (!settingsMigration.getSQL) {\n throw new Error(\n \"Migration engine does not support SQL generation. Ensure your adapter's migration engine provides getSQL().\",\n );\n }\n\n const settingsSql = settingsMigration.getSQL();\n\n if (settingsSql.trim()) {\n generatedFiles.push({\n schema: settingsSql,\n path: \"settings-migration.sql\", // Placeholder, will be renamed in post-processing\n namespace: SETTINGS_NAMESPACE,\n fromVersion: settingsSourceVersion,\n toVersion: settingsTargetVersion,\n preparedMigration: settingsMigration,\n });\n }\n\n // Generate migration for each fragment\n for (const db of databases) {\n const dbAdapter = db.adapter;\n\n // Use migration engine\n if (!dbAdapter.createMigrationEngine) {\n throw new Error(\n `Adapter for ${db.namespace} does not support schema generation. ` +\n `Ensure your adapter implements either createSchemaGenerator or createMigrationEngine.`,\n );\n }\n\n const migrator = dbAdapter.createMigrationEngine(db.schema, db.namespace);\n const targetVersion = options?.toVersion ?? db.schema.version;\n const sourceVersion = options?.fromVersion ?? 0;\n\n // Generate migration from source to target version\n const preparedMigration = await migrator.prepareMigrationTo(targetVersion, {\n fromVersion: sourceVersion,\n });\n\n if (!preparedMigration.getSQL) {\n throw new Error(\n \"Migration engine does not support SQL generation. Ensure your adapter's migration engine provides getSQL().\",\n );\n }\n\n const sql = preparedMigration.getSQL();\n\n // If no migrations needed, skip this fragment\n if (sql.trim()) {\n generatedFiles.push({\n schema: sql,\n path: \"schema.sql\", // Placeholder, will be renamed in post-processing\n namespace: db.namespace,\n fromVersion: sourceVersion,\n toVersion: targetVersion,\n preparedMigration: preparedMigration,\n });\n }\n }\n\n // Post-process filenames with ordering\n return postProcessMigrationFilenames(generatedFiles);\n}\n\n/**\n * Execute migrations for all fragments in the correct order.\n * Migrates settings table first, then fragments alphabetically.\n *\n * @param databases - Array of FragnoDatabase instances to migrate\n * @returns Array of execution results for each migration\n */\nexport async function executeMigrations<const TDatabases extends FragnoDatabase<AnySchema>[]>(\n databases: TDatabases,\n): Promise<ExecuteMigrationResult[]> {\n if (databases.length === 0) {\n throw new Error(\"No databases provided for migration\");\n }\n\n const firstDb = databases[0];\n const adapter = firstDb.adapter;\n\n // Validate adapter supports migrations\n if (!adapter.createMigrationEngine) {\n throw new Error(\n \"Adapter does not support running migrations. The adapter only supports schema generation.\\n\" +\n \"Try using 'generateMigrationsOrSchema' instead to generate schema files.\",\n );\n }\n\n // Validate all use same adapter\n const allSameAdapter = databases.every((db) => db.adapter === adapter);\n if (!allSameAdapter) {\n throw new Error(\n \"All fragments must use the same database adapter instance. Mixed adapters are not supported.\",\n );\n }\n\n if (!(await adapter.isConnectionHealthy())) {\n throw new Error(\n \"Database connection is not healthy. Please check your database connection and try again.\",\n );\n }\n\n const results: ExecuteMigrationResult[] = [];\n const migrationsToExecute: Array<{\n namespace: string;\n fromVersion: number;\n toVersion: number;\n preparedMigration: PreparedMigration;\n }> = [];\n\n // 1. Prepare settings table migration\n const settingsQueryEngine = adapter.createQueryEngine(settingsSchema, \"\");\n const settingsManager = createSettingsManager(settingsQueryEngine, SETTINGS_NAMESPACE);\n\n let settingsSourceVersion: number;\n try {\n const result = await settingsManager.get(\"version\");\n settingsSourceVersion = result ? parseInt(result.value) : 0;\n } catch {\n settingsSourceVersion = 0;\n }\n\n const settingsMigrator = adapter.createMigrationEngine(settingsSchema, SETTINGS_NAMESPACE);\n const settingsTargetVersion = settingsSchema.version;\n\n if (settingsSourceVersion < settingsTargetVersion) {\n const settingsMigration = await settingsMigrator.prepareMigrationTo(settingsTargetVersion, {\n fromVersion: settingsSourceVersion,\n updateSettings: true,\n });\n\n if (settingsMigration.operations.length > 0) {\n migrationsToExecute.push({\n namespace: SETTINGS_NAMESPACE,\n fromVersion: settingsSourceVersion,\n toVersion: settingsTargetVersion,\n preparedMigration: settingsMigration,\n });\n }\n }\n\n // 2. Prepare fragment migrations (sorted alphabetically)\n const sortedDatabases = [...databases].sort((a, b) => a.namespace.localeCompare(b.namespace));\n\n for (const fragnoDb of sortedDatabases) {\n const migrator = adapter.createMigrationEngine(fragnoDb.schema, fragnoDb.namespace);\n const currentVersion = await migrator.getVersion();\n const targetVersion = fragnoDb.schema.version;\n\n if (currentVersion < targetVersion) {\n const preparedMigration = await migrator.prepareMigrationTo(targetVersion, {\n updateSettings: true,\n });\n\n if (preparedMigration.operations.length > 0) {\n migrationsToExecute.push({\n namespace: fragnoDb.namespace,\n fromVersion: currentVersion,\n toVersion: targetVersion,\n preparedMigration: preparedMigration,\n });\n }\n }\n }\n\n // 3. Execute all migrations in order\n for (const migration of migrationsToExecute) {\n await migration.preparedMigration.execute();\n results.push({\n namespace: migration.namespace,\n didMigrate: true,\n fromVersion: migration.fromVersion,\n toVersion: migration.toVersion,\n });\n }\n\n // 4. Add skipped migrations (already up-to-date)\n for (const fragnoDb of databases) {\n if (!results.find((r) => r.namespace === fragnoDb.namespace)) {\n results.push({\n namespace: fragnoDb.namespace,\n didMigrate: false,\n fromVersion: fragnoDb.schema.version,\n toVersion: fragnoDb.schema.version,\n });\n }\n }\n\n return results;\n}\n\n/**\n * Post-processes migration files to add ordering and standardize naming.\n *\n * Sorts files with settings namespace first, then alphabetically by namespace,\n * and assigns ordering numbers. Transforms filenames to format:\n * `<date>_<n>_f<from>_t<to>_<namespace>.sql`\n *\n * @param files - Array of generated migration files with version information\n * @returns Array of files with standardized paths and ordering\n */\nexport function postProcessMigrationFilenames(\n files: GenerationInternalResult[],\n): GenerationEngineResult[] {\n if (files.length === 0) {\n return [];\n }\n\n // Sort files: settings namespace first, then alphabetically by namespace\n const sortedFiles = [...files].sort((a, b) => {\n if (a.namespace === SETTINGS_NAMESPACE) {\n return -1;\n }\n if (b.namespace === SETTINGS_NAMESPACE) {\n return 1;\n }\n return a.namespace.localeCompare(b.namespace);\n });\n\n // Generate date prefix for filenames\n const date = new Date().toISOString().split(\"T\")[0].replace(/-/g, \"\");\n\n // Rename files with ordering\n return sortedFiles.map((file, index) => {\n const fromVersion = file.fromVersion ?? 0;\n const toVersion = file.toVersion ?? 0;\n\n // Create new filename with ordering\n const orderNum = (index + 1).toString().padStart(3, \"0\");\n const fromPadded = fromVersion.toString().padStart(3, \"0\");\n const toPadded = toVersion.toString().padStart(3, \"0\");\n const safeName = file.namespace.replace(/[^a-z0-9-]/gi, \"_\");\n const newPath = `${date}_${orderNum}_f${fromPadded}_t${toPadded}_${safeName}.sql`;\n\n return {\n schema: file.schema,\n path: newPath,\n namespace: file.namespace,\n };\n });\n}\n"],"mappings":";;;AA+BA,eAAsB,2BAGpB,WACA,SAKmC;AACnC,KAAI,UAAU,WAAW,EACvB,OAAM,IAAI,MAAM,8CAA8C;CAGhE,MAAM,UAAU,UAAU;CAC1B,MAAM,UAAU,QAAQ;AAGxB,KAAI,QAAQ,uBAAuB;AACjC,MAAI,SAAS,cAAc,UAAa,SAAS,gBAAgB,OAC/D,SAAQ,KACN,oIACD;EAGH,MAAM,YAAY,UAAU,KAAK,QAAQ;GACvC,QAAQ,GAAG;GACX,WAAW,GAAG;GACf,EAAE;AAMH,SAAO,CACL;GACE,GANc,QAAQ,sBAAsB,WAAW,EACzD,MAAM,SAAS,MAChB,CAAC,CAIe,gBAAgB;GAC7B,WAAW,QAAQ;GACpB,CACF;;AAIH,KAAI,CAAC,QAAQ,sBACX,OAAM,IAAI,MACR,oHACD;AAGH,KAAI,CAAE,MAAM,QAAQ,qBAAqB,CACvC,OAAM,IAAI,MACR,2FACD;CAIH,MAAM,kBAAkB,sBADI,QAAQ,kBAAkB,gBAAgB,GAAG,EACN,mBAAmB;CAEtF,IAAIA;AACJ,KAAI;EACF,MAAM,SAAS,MAAM,gBAAgB,IAAI,UAAU;AAEnD,MAAI,CAAC,OACH,yBAAwB;MAExB,yBAAwB,SAAS,OAAO,MAAM;SAE1C;AAEN,0BAAwB;;CAG1B,MAAMC,iBAA6C,EAAE;CAErD,MAAM,mBAAmB,QAAQ,sBAAsB,gBAAgB,mBAAmB;CAC1F,MAAM,wBAAwB,eAAe;CAG7C,MAAM,oBAAoB,MAAM,iBAAiB,mBAAmB,uBAAuB,EACzF,aAAa,uBACd,CAAC;AAEF,KAAI,CAAC,kBAAkB,OACrB,OAAM,IAAI,MACR,8GACD;CAGH,MAAM,cAAc,kBAAkB,QAAQ;AAE9C,KAAI,YAAY,MAAM,CACpB,gBAAe,KAAK;EAClB,QAAQ;EACR,MAAM;EACN,WAAW;EACX,aAAa;EACb,WAAW;EACX,mBAAmB;EACpB,CAAC;AAIJ,MAAK,MAAM,MAAM,WAAW;EAC1B,MAAM,YAAY,GAAG;AAGrB,MAAI,CAAC,UAAU,sBACb,OAAM,IAAI,MACR,eAAe,GAAG,UAAU,4HAE7B;EAGH,MAAM,WAAW,UAAU,sBAAsB,GAAG,QAAQ,GAAG,UAAU;EACzE,MAAM,gBAAgB,SAAS,aAAa,GAAG,OAAO;EACtD,MAAM,gBAAgB,SAAS,eAAe;EAG9C,MAAM,oBAAoB,MAAM,SAAS,mBAAmB,eAAe,EACzE,aAAa,eACd,CAAC;AAEF,MAAI,CAAC,kBAAkB,OACrB,OAAM,IAAI,MACR,8GACD;EAGH,MAAM,MAAM,kBAAkB,QAAQ;AAGtC,MAAI,IAAI,MAAM,CACZ,gBAAe,KAAK;GAClB,QAAQ;GACR,MAAM;GACN,WAAW,GAAG;GACd,aAAa;GACb,WAAW;GACQ;GACpB,CAAC;;AAKN,QAAO,8BAA8B,eAAe;;;;;;;;;AAUtD,eAAsB,kBACpB,WACmC;AACnC,KAAI,UAAU,WAAW,EACvB,OAAM,IAAI,MAAM,sCAAsC;CAIxD,MAAM,UADU,UAAU,GACF;AAGxB,KAAI,CAAC,QAAQ,sBACX,OAAM,IAAI,MACR,sKAED;AAKH,KAAI,CADmB,UAAU,OAAO,OAAO,GAAG,YAAY,QAAQ,CAEpE,OAAM,IAAI,MACR,+FACD;AAGH,KAAI,CAAE,MAAM,QAAQ,qBAAqB,CACvC,OAAM,IAAI,MACR,2FACD;CAGH,MAAMC,UAAoC,EAAE;CAC5C,MAAMC,sBAKD,EAAE;CAIP,MAAM,kBAAkB,sBADI,QAAQ,kBAAkB,gBAAgB,GAAG,EACN,mBAAmB;CAEtF,IAAIH;AACJ,KAAI;EACF,MAAM,SAAS,MAAM,gBAAgB,IAAI,UAAU;AACnD,0BAAwB,SAAS,SAAS,OAAO,MAAM,GAAG;SACpD;AACN,0BAAwB;;CAG1B,MAAM,mBAAmB,QAAQ,sBAAsB,gBAAgB,mBAAmB;CAC1F,MAAM,wBAAwB,eAAe;AAE7C,KAAI,wBAAwB,uBAAuB;EACjD,MAAM,oBAAoB,MAAM,iBAAiB,mBAAmB,uBAAuB;GACzF,aAAa;GACb,gBAAgB;GACjB,CAAC;AAEF,MAAI,kBAAkB,WAAW,SAAS,EACxC,qBAAoB,KAAK;GACvB,WAAW;GACX,aAAa;GACb,WAAW;GACX,mBAAmB;GACpB,CAAC;;CAKN,MAAM,kBAAkB,CAAC,GAAG,UAAU,CAAC,MAAM,GAAG,MAAM,EAAE,UAAU,cAAc,EAAE,UAAU,CAAC;AAE7F,MAAK,MAAM,YAAY,iBAAiB;EACtC,MAAM,WAAW,QAAQ,sBAAsB,SAAS,QAAQ,SAAS,UAAU;EACnF,MAAM,iBAAiB,MAAM,SAAS,YAAY;EAClD,MAAM,gBAAgB,SAAS,OAAO;AAEtC,MAAI,iBAAiB,eAAe;GAClC,MAAM,oBAAoB,MAAM,SAAS,mBAAmB,eAAe,EACzE,gBAAgB,MACjB,CAAC;AAEF,OAAI,kBAAkB,WAAW,SAAS,EACxC,qBAAoB,KAAK;IACvB,WAAW,SAAS;IACpB,aAAa;IACb,WAAW;IACQ;IACpB,CAAC;;;AAMR,MAAK,MAAM,aAAa,qBAAqB;AAC3C,QAAM,UAAU,kBAAkB,SAAS;AAC3C,UAAQ,KAAK;GACX,WAAW,UAAU;GACrB,YAAY;GACZ,aAAa,UAAU;GACvB,WAAW,UAAU;GACtB,CAAC;;AAIJ,MAAK,MAAM,YAAY,UACrB,KAAI,CAAC,QAAQ,MAAM,MAAM,EAAE,cAAc,SAAS,UAAU,CAC1D,SAAQ,KAAK;EACX,WAAW,SAAS;EACpB,YAAY;EACZ,aAAa,SAAS,OAAO;EAC7B,WAAW,SAAS,OAAO;EAC5B,CAAC;AAIN,QAAO;;;;;;;;;;;;AAaT,SAAgB,8BACd,OAC0B;AAC1B,KAAI,MAAM,WAAW,EACnB,QAAO,EAAE;CAIX,MAAM,cAAc,CAAC,GAAG,MAAM,CAAC,MAAM,GAAG,MAAM;AAC5C,MAAI,EAAE,cAAc,mBAClB,QAAO;AAET,MAAI,EAAE,cAAc,mBAClB,QAAO;AAET,SAAO,EAAE,UAAU,cAAc,EAAE,UAAU;GAC7C;CAGF,MAAM,wBAAO,IAAI,MAAM,EAAC,aAAa,CAAC,MAAM,IAAI,CAAC,GAAG,QAAQ,MAAM,GAAG;AAGrE,QAAO,YAAY,KAAK,MAAM,UAAU;EACtC,MAAM,cAAc,KAAK,eAAe;EACxC,MAAM,YAAY,KAAK,aAAa;EAOpC,MAAM,UAAU,GAAG,KAAK,IAJN,QAAQ,GAAG,UAAU,CAAC,SAAS,GAAG,IAAI,CAIpB,IAHjB,YAAY,UAAU,CAAC,SAAS,GAAG,IAAI,CAGP,IAFlC,UAAU,UAAU,CAAC,SAAS,GAAG,IAAI,CAEU,GAD/C,KAAK,UAAU,QAAQ,gBAAgB,IAAI,CACgB;AAE5E,SAAO;GACL,QAAQ,KAAK;GACb,MAAM;GACN,WAAW,KAAK;GACjB;GACD"}
@@ -11,8 +11,11 @@ interface ColumnInfo {
11
11
  isNullable: boolean;
12
12
  role: "external-id" | "internal-id" | "version" | "reference" | "regular";
13
13
  default?: {
14
- value?: unknown;
15
- runtime?: "now" | "auto";
14
+ value: unknown;
15
+ } | {
16
+ dbSpecial: "now";
17
+ } | {
18
+ runtime: "cuid" | "now";
16
19
  };
17
20
  }
18
21
  type MigrationOperation = TableOperation | {
@@ -1 +1 @@
1
- {"version":3,"file":"shared.d.ts","names":[],"sources":["../../src/migration-engine/shared.ts"],"sourcesContent":[],"mappings":";UAAiB,cAAA;EAAA,IAAA,EAAA,MAAA;EAOA,OAAA,EAAA,MAAU,EAAA;EAqBf,eAAA,EAAA,MAAkB;EAC1B,iBAAA,EAAA,MAAA,EAAA;;AAyBA,UA/Ca,UAAA,CA+Cb;EAAe,IAAA,EAAA,MAAA;EAEP,IAAA,EAAA,QAAA,GAAA,SAAe,GAAA,QAEvB,GAAM,SAAA,GAAA,MAAA,GAAA,MAAA,GAAA,WAAA,GAAA,MAAA,GAAA,QAAA,GAAA,WAAA,MAAA,GAAA;EAEE,UAAA,EAAA,OAAc;EAqBd,IAAA,EAAA,aAAe,GAAA,aAed,GAAA,SAgBA,GAAA,WAAU,GAAA,SAAA;;;;;;KApFX,kBAAA,GACR;;;SAKS;;;;;;;;;;;;;;;IAoBT;KAEQ,eAAA;;IAER;KAEQ,cAAA;;;WAIG;;;;;;;SASF;;;;;;KAQD,eAAA;;;;;;;;;;;;SAeC;;;;;;;;;;;;;;;SAgBA"}
1
+ {"version":3,"file":"shared.d.ts","names":[],"sources":["../../src/migration-engine/shared.ts"],"sourcesContent":[],"mappings":";UAAiB,cAAA;EAAA,IAAA,EAAA,MAAA;EAOA,OAAA,EAAA,MAAU,EAAA;EAkBf,eAAA,EAAA,MAAkB;EAC1B,iBAAA,EAAA,MAAA,EAAA;;AAyBA,UA5Ca,UAAA,CA4Cb;EAAe,IAAA,EAAA,MAAA;EAEP,IAAA,EAAA,QAAA,GAAA,SAAe,GAAA,QAEvB,GAAM,SAAA,GAAA,MAAA,GAAA,MAAA,GAAA,WAAA,GAAA,MAAA,GAAA,QAAA,GAAA,WAAA,MAAA,GAAA;EAEE,UAAA,EAAA,OAAc;EAqBd,IAAA,EAAA,aAAe,GAAA,aAed,GAAA,SAgBA,GAAA,WAAU,GAAA,SAAA;;;;;;;;;KApFX,kBAAA,GACR;;;SAKS;;;;;;;;;;;;;;;IAoBT;KAEQ,eAAA;;IAER;KAEQ,cAAA;;;WAIG;;;;;;;SASF;;;;;;KAQD,eAAA;;;;;;;;;;;;SAeC;;;;;;;;;;;;;;;SAgBA"}
@@ -1 +1 @@
1
- {"version":3,"file":"shared.js","names":[],"sources":["../../src/migration-engine/shared.ts"],"sourcesContent":["export interface ForeignKeyInfo {\n name: string;\n columns: string[];\n referencedTable: string;\n referencedColumns: string[];\n}\n\nexport interface ColumnInfo {\n name: string;\n type:\n | \"string\"\n | \"integer\"\n | \"bigint\"\n | \"decimal\"\n | \"bool\"\n | \"date\"\n | \"timestamp\"\n | \"json\"\n | \"binary\"\n | `varchar(${number})`;\n isNullable: boolean;\n role: \"external-id\" | \"internal-id\" | \"version\" | \"reference\" | \"regular\";\n default?: {\n value?: unknown;\n runtime?: \"now\" | \"auto\";\n };\n}\n\nexport type MigrationOperation =\n | TableOperation\n | {\n // warning: not supported by SQLite\n type: \"add-foreign-key\";\n table: string;\n value: ForeignKeyInfo;\n }\n | {\n // warning: not supported by SQLite\n type: \"drop-foreign-key\";\n table: string;\n name: string;\n }\n | {\n type: \"drop-index\";\n table: string;\n name: string;\n }\n | {\n type: \"add-index\";\n table: string;\n columns: string[];\n name: string;\n unique: boolean;\n }\n | CustomOperation;\n\nexport type CustomOperation = {\n type: \"custom\";\n} & Record<string, unknown>;\n\nexport type TableOperation =\n | {\n type: \"create-table\";\n name: string;\n columns: ColumnInfo[];\n }\n | {\n type: \"drop-table\";\n name: string;\n }\n | {\n type: \"alter-table\";\n name: string;\n value: ColumnOperation[];\n }\n | {\n type: \"rename-table\";\n from: string;\n to: string;\n };\n\nexport type ColumnOperation =\n | {\n type: \"rename-column\";\n from: string;\n to: string;\n }\n | {\n type: \"drop-column\";\n name: string;\n }\n | {\n /**\n * Note: unique constraints are not created, please use dedicated operations like `add-index` instead\n */\n type: \"create-column\";\n value: ColumnInfo;\n }\n | {\n /**\n * warning: Not supported by SQLite\n */\n type: \"update-column\";\n name: string;\n /**\n * For databases like MySQL, it requires the full definition for any modify column statement.\n * Hence, you need to specify the full information of your column here.\n *\n * Then, opt-in for in-detail modification for other databases that supports changing data type/nullable/default separately, such as PostgreSQL.\n *\n * Note: unique constraints are not updated, please use dedicated operations like `add-index` instead\n */\n value: ColumnInfo;\n\n updateNullable: boolean;\n updateDefault: boolean;\n updateDataType: boolean;\n };\n\nexport function isUpdated(op: Extract<ColumnOperation, { type: \"update-column\" }>): boolean {\n return op.updateDataType || op.updateDefault || op.updateNullable;\n}\n"],"mappings":";AAuHA,SAAgB,UAAU,IAAkE;AAC1F,QAAO,GAAG,kBAAkB,GAAG,iBAAiB,GAAG"}
1
+ {"version":3,"file":"shared.js","names":[],"sources":["../../src/migration-engine/shared.ts"],"sourcesContent":["export interface ForeignKeyInfo {\n name: string;\n columns: string[];\n referencedTable: string;\n referencedColumns: string[];\n}\n\nexport interface ColumnInfo {\n name: string;\n type:\n | \"string\"\n | \"integer\"\n | \"bigint\"\n | \"decimal\"\n | \"bool\"\n | \"date\"\n | \"timestamp\"\n | \"json\"\n | \"binary\"\n | `varchar(${number})`;\n isNullable: boolean;\n role: \"external-id\" | \"internal-id\" | \"version\" | \"reference\" | \"regular\";\n default?: { value: unknown } | { dbSpecial: \"now\" } | { runtime: \"cuid\" | \"now\" };\n}\n\nexport type MigrationOperation =\n | TableOperation\n | {\n // warning: not supported by SQLite\n type: \"add-foreign-key\";\n table: string;\n value: ForeignKeyInfo;\n }\n | {\n // warning: not supported by SQLite\n type: \"drop-foreign-key\";\n table: string;\n name: string;\n }\n | {\n type: \"drop-index\";\n table: string;\n name: string;\n }\n | {\n type: \"add-index\";\n table: string;\n columns: string[];\n name: string;\n unique: boolean;\n }\n | CustomOperation;\n\nexport type CustomOperation = {\n type: \"custom\";\n} & Record<string, unknown>;\n\nexport type TableOperation =\n | {\n type: \"create-table\";\n name: string;\n columns: ColumnInfo[];\n }\n | {\n type: \"drop-table\";\n name: string;\n }\n | {\n type: \"alter-table\";\n name: string;\n value: ColumnOperation[];\n }\n | {\n type: \"rename-table\";\n from: string;\n to: string;\n };\n\nexport type ColumnOperation =\n | {\n type: \"rename-column\";\n from: string;\n to: string;\n }\n | {\n type: \"drop-column\";\n name: string;\n }\n | {\n /**\n * Note: unique constraints are not created, please use dedicated operations like `add-index` instead\n */\n type: \"create-column\";\n value: ColumnInfo;\n }\n | {\n /**\n * warning: Not supported by SQLite\n */\n type: \"update-column\";\n name: string;\n /**\n * For databases like MySQL, it requires the full definition for any modify column statement.\n * Hence, you need to specify the full information of your column here.\n *\n * Then, opt-in for in-detail modification for other databases that supports changing data type/nullable/default separately, such as PostgreSQL.\n *\n * Note: unique constraints are not updated, please use dedicated operations like `add-index` instead\n */\n value: ColumnInfo;\n\n updateNullable: boolean;\n updateDefault: boolean;\n updateDataType: boolean;\n };\n\nexport function isUpdated(op: Extract<ColumnOperation, { type: \"update-column\" }>): boolean {\n return op.updateDataType || op.updateDefault || op.updateNullable;\n}\n"],"mappings":";AAoHA,SAAgB,UAAU,IAAkE;AAC1F,QAAO,GAAG,kBAAkB,GAAG,iBAAiB,GAAG"}
package/dist/mod.d.ts CHANGED
@@ -43,14 +43,6 @@ declare class FragnoDatabase<const T extends AnySchema> {
43
43
  get namespace(): string;
44
44
  get schema(): T;
45
45
  get adapter(): DatabaseAdapter<void>;
46
- generateSchema(options?: {
47
- path?: string;
48
- toVersion?: number;
49
- fromVersion?: number;
50
- }): Promise<{
51
- schema: string;
52
- path: string;
53
- }>;
54
46
  }
55
47
  declare function defineFragnoDatabase<const TSchema extends AnySchema>(options: CreateFragnoDatabaseDefinitionOptions<TSchema>): FragnoDatabaseDefinition<TSchema>;
56
48
  //#endregion
package/dist/mod.d.ts.map CHANGED
@@ -1 +1 @@
1
- {"version":3,"file":"mod.d.ts","names":[],"sources":["../src/mod.ts"],"sourcesContent":[],"mappings":";;;;;;cAMa;cACA;AADA,UAGI,qCAHkD,CAAA,UAGF,SAHE,CAAA,CAAA;EACtD,SAAA,EAAA,MAAA;EAEI,MAAA,EAEP,CAFO;AAKjB;AAoBa,iBApBG,gBAAA,CAoBqB,KAAA,EAAA,OAAA,CAAA,EAAA,KAAA,IApBsB,cAoBtB,CApBqC,SAoBrC,CAAA;;;;;;AAoBc,cApBtC,wBAoBsC,CAAA,gBApBG,SAoBH,CAAA,CAAA;EAAf,CAAA,OAAA;EAAc,WAAA,CAAA,OAAA,EAhB3B,qCAgB2B,CAhBW,CAgBX,CAAA;EAarC,IAAA,SAAA,CAAA,CAAA,EAAc,MAAA;EAAiB,IAAA,MAAA,CAAA,CAAA,EApBhC,CAoBgC;EAKQ;;;EAM7C,MAAA,CAAA,OAAA,EAxBW,eAwBX,CAAA,EAxB6B,cAwB7B,CAxB4C,CAwB5C,CAAA;;;;;;AAoCM,cA/CA,cA+CA,CAAA,gBA/C+B,SA+C/B,CAAA,CAAA;EAQP,CAAA,OAAA;EAAO,WAAA,CAAA,OAAA,EAAA;IA+DG,SAAA,EAAA,MAAA;IAA2C,MAAA,EAjHP,CAiHO;IACV,OAAA,EAlHe,eAkHf;EAAtC,CAAA;EACiB,KA7GrB,wBAAA,GA6GqB,EAAA,OA7Ge,wBA6Gf;EAAzB,YAAA,CAAA,CAAA,EAzGqB,OAyGrB,CAzG6B,aAyG7B,CAzG2C,CAyG3C,CAAA,CAAA;EAAwB,aAAA,CAAA,CAAA,EA7FF,OA6FE,CAAA,OAAA,CAAA;;gBA7Ef;iBAIC;;;;;MAQP;;;;;iBA+DU,2CAA2C,oBAChD,sCAAsC,WAC9C,yBAAyB"}
1
+ {"version":3,"file":"mod.d.ts","names":[],"sources":["../src/mod.ts"],"sourcesContent":[],"mappings":";;;;;;cAMa;cACA;AADA,UAGI,qCAHkD,CAAA,UAGF,SAHE,CAAA,CAAA;EACtD,SAAA,EAAA,MAAA;EAEI,MAAA,EAEP,CAFO;AAKjB;AAoBa,iBApBG,gBAAA,CAoBqB,KAAA,EAAA,OAAA,CAAA,EAAA,KAAA,IApBsB,cAoBtB,CApBqC,SAoBrC,CAAA;;;;;;AAoBc,cApBtC,wBAoBsC,CAAA,gBApBG,SAoBH,CAAA,CAAA;EAAf,CAAA,OAAA;EAAc,WAAA,CAAA,OAAA,EAhB3B,qCAgB2B,CAhBW,CAgBX,CAAA;EAarC,IAAA,SAAA,CAAA,CAAA,EAAc,MAAA;EAAiB,IAAA,MAAA,CAAA,CAAA,EApBhC,CAoBgC;EAKQ;;;EAM7C,MAAA,CAAA,OAAA,EAxBW,eAwBX,CAAA,EAxB6B,cAwB7B,CAxB4C,CAwB5C,CAAA;;;;;;AAoCM,cA/CA,cA+CA,CAAA,gBA/C+B,SA+C/B,CAAA,CAAA;EAAA,CAAA,OAAA;EAKG,WAAA,CAAA,OAAA,EAAA;IAA2C,SAAA,EAAA,MAAA;IACV,MAAA,EAhDG,CAgDH;IAAtC,OAAA,EAhDqD,eAgDrD;EACiB,CAAA;EAAzB,KA3CI,wBAAA,GA2CJ,EAAA,OA3CwC,wBA2CxC;EAAwB,YAAA,CAAA,CAAA,EAvCH,OAuCG,CAvCK,aAuCL,CAvCmB,CAuCnB,CAAA,CAAA;mBA3BF;;gBAgBb;iBAIC;;iBAKG,2CAA2C,oBAChD,sCAAsC,WAC9C,yBAAyB"}
package/dist/mod.js CHANGED
@@ -73,38 +73,6 @@ var FragnoDatabase = class {
73
73
  get adapter() {
74
74
  return this.#adapter;
75
75
  }
76
- async generateSchema(options) {
77
- const adapter = this.#adapter;
78
- if (adapter.createSchemaGenerator) {
79
- if (options?.toVersion !== void 0 || options?.fromVersion !== void 0) console.warn("⚠️ toVersion and fromVersion are not supported for schema generation.");
80
- const generator = adapter.createSchemaGenerator(this.#schema, this.#namespace);
81
- const defaultPath = options?.path ?? "schema.ts";
82
- return generator.generateSchema({
83
- path: defaultPath,
84
- toVersion: options?.toVersion,
85
- fromVersion: options?.fromVersion
86
- });
87
- }
88
- if (adapter.createMigrationEngine) {
89
- const migrator = adapter.createMigrationEngine(this.#schema, this.#namespace);
90
- const targetVersion = options?.toVersion ?? this.#schema.version;
91
- const sourceVersion = options?.fromVersion;
92
- const currentVersion = sourceVersion ?? await migrator.getVersion();
93
- const defaultPath = options?.path ?? (migrator.getDefaultFileName ? migrator.getDefaultFileName(this.#namespace, currentVersion, targetVersion) : "schema.sql");
94
- const preparedMigration = await migrator.prepareMigrationTo(targetVersion, {
95
- updateSettings: true,
96
- fromVersion: sourceVersion
97
- });
98
- if (!preparedMigration.getSQL) throw new Error("Migration engine does not support SQL generation. Ensure your adapter's migration engine provides getSQL().");
99
- const sql = preparedMigration.getSQL();
100
- if (!sql.trim()) throw new Error("No migrations needed. Database is already at the target version.");
101
- return {
102
- schema: sql,
103
- path: defaultPath
104
- };
105
- }
106
- throw new Error("Adapter does not support schema generation. Ensure your adapter implements either createSchemaGenerator or createMigrationEngine.");
107
- }
108
76
  };
109
77
  function defineFragnoDatabase(options) {
110
78
  return new FragnoDatabaseDefinition(options);
package/dist/mod.js.map CHANGED
@@ -1 +1 @@
1
- {"version":3,"file":"mod.js","names":["#namespace","#schema","#adapter"],"sources":["../src/mod.ts"],"sourcesContent":["import type { DatabaseAdapter } from \"./adapters/adapters\";\nimport type { AnySchema } from \"./schema/create\";\nimport type { AbstractQuery } from \"./query/query\";\n\nexport type { DatabaseAdapter };\n\nexport const fragnoDatabaseFakeSymbol = \"$fragno-database\" as const;\nexport const fragnoDatabaseLibraryVersion = \"0.1\" as const;\n\nexport interface CreateFragnoDatabaseDefinitionOptions<T extends AnySchema> {\n namespace: string;\n schema: T;\n}\n\nexport function isFragnoDatabase(value: unknown): value is FragnoDatabase<AnySchema> {\n if (value instanceof FragnoDatabase) {\n return true;\n }\n\n if (typeof value !== \"object\" || value === null) {\n return false;\n }\n\n return (\n fragnoDatabaseFakeSymbol in value &&\n value[fragnoDatabaseFakeSymbol] === fragnoDatabaseFakeSymbol\n );\n}\n\n/**\n * Definition of a Fragno database schema and namespace.\n * Created by library authors using defineFragnoDatabase().\n * Apps instantiate it by calling .create(adapter).\n */\nexport class FragnoDatabaseDefinition<const T extends AnySchema> {\n #namespace: string;\n #schema: T;\n\n constructor(options: CreateFragnoDatabaseDefinitionOptions<T>) {\n this.#namespace = options.namespace;\n this.#schema = options.schema;\n }\n\n get namespace() {\n return this.#namespace;\n }\n\n get schema() {\n return this.#schema;\n }\n\n /**\n * Creates a FragnoDatabase instance by binding an adapter to this definition.\n */\n create(adapter: DatabaseAdapter): FragnoDatabase<T> {\n return new FragnoDatabase({\n namespace: this.#namespace,\n schema: this.#schema,\n adapter,\n });\n }\n}\n\n/**\n * A Fragno database instance with a bound adapter.\n * Created from a FragnoDatabaseDefinition by calling .create(adapter).\n */\nexport class FragnoDatabase<const T extends AnySchema> {\n #namespace: string;\n #schema: T;\n #adapter: DatabaseAdapter;\n\n constructor(options: { namespace: string; schema: T; adapter: DatabaseAdapter }) {\n this.#namespace = options.namespace;\n this.#schema = options.schema;\n this.#adapter = options.adapter;\n }\n\n get [fragnoDatabaseFakeSymbol](): typeof fragnoDatabaseFakeSymbol {\n return fragnoDatabaseFakeSymbol;\n }\n\n async createClient(): Promise<AbstractQuery<T>> {\n const dbVersion = await this.#adapter.getSchemaVersion(this.#namespace);\n if (dbVersion !== this.#schema.version.toString()) {\n throw new Error(\n `Database is not at expected version. Did you forget to run migrations?` +\n ` Current version: ${dbVersion}, Expected version: ${this.#schema.version}`,\n );\n }\n\n return this.#adapter.createQueryEngine(this.#schema, this.#namespace);\n }\n\n async runMigrations(): Promise<boolean> {\n if (!this.#adapter.createMigrationEngine) {\n throw new Error(\"Migration engine not supported for this adapter.\");\n }\n\n const migrator = this.#adapter.createMigrationEngine(this.#schema, this.#namespace);\n const preparedMigration = await migrator.prepareMigration();\n await preparedMigration.execute();\n\n return preparedMigration.operations.length > 0;\n }\n\n get namespace() {\n return this.#namespace;\n }\n\n get schema() {\n return this.#schema;\n }\n\n get adapter() {\n return this.#adapter;\n }\n\n async generateSchema(options?: {\n path?: string;\n toVersion?: number;\n fromVersion?: number;\n }): Promise<{ schema: string; path: string }> {\n const adapter = this.#adapter;\n\n if (adapter.createSchemaGenerator) {\n if (options?.toVersion !== undefined || options?.fromVersion !== undefined) {\n console.warn(\"⚠️ toVersion and fromVersion are not supported for schema generation.\");\n }\n\n const generator = adapter.createSchemaGenerator(this.#schema, this.#namespace);\n const defaultPath = options?.path ?? \"schema.ts\";\n return generator.generateSchema({\n path: defaultPath,\n toVersion: options?.toVersion,\n fromVersion: options?.fromVersion,\n });\n }\n\n if (adapter.createMigrationEngine) {\n const migrator = adapter.createMigrationEngine(this.#schema, this.#namespace);\n const targetVersion = options?.toVersion ?? this.#schema.version;\n const sourceVersion = options?.fromVersion;\n\n // Get current version for file naming if not provided\n const currentVersion = sourceVersion ?? (await migrator.getVersion());\n\n // Determine the default path using the migrator's getDefaultFileName if available\n const defaultPath =\n options?.path ??\n (migrator.getDefaultFileName\n ? migrator.getDefaultFileName(this.#namespace, currentVersion, targetVersion)\n : \"schema.sql\");\n\n // Generate migration from source to target version\n const preparedMigration = await migrator.prepareMigrationTo(targetVersion, {\n updateSettings: true,\n fromVersion: sourceVersion,\n });\n\n if (!preparedMigration.getSQL) {\n throw new Error(\n \"Migration engine does not support SQL generation. Ensure your adapter's migration engine provides getSQL().\",\n );\n }\n\n const sql = preparedMigration.getSQL();\n\n // If no migrations needed, return informative message\n if (!sql.trim()) {\n throw new Error(\"No migrations needed. Database is already at the target version.\");\n }\n\n return {\n schema: sql,\n path: defaultPath,\n };\n }\n\n throw new Error(\n \"Adapter does not support schema generation. Ensure your adapter implements either createSchemaGenerator or createMigrationEngine.\",\n );\n }\n}\n\nexport function defineFragnoDatabase<const TSchema extends AnySchema>(\n options: CreateFragnoDatabaseDefinitionOptions<TSchema>,\n): FragnoDatabaseDefinition<TSchema> {\n return new FragnoDatabaseDefinition(options);\n}\n\nexport {\n defineFragmentWithDatabase,\n DatabaseFragmentBuilder,\n type FragnoPublicConfigWithDatabase,\n type DatabaseFragmentContext,\n} from \"./fragment\";\n"],"mappings":";;;AAMA,MAAa,2BAA2B;AACxC,MAAa,+BAA+B;AAO5C,SAAgB,iBAAiB,OAAoD;AACnF,KAAI,iBAAiB,eACnB,QAAO;AAGT,KAAI,OAAO,UAAU,YAAY,UAAU,KACzC,QAAO;AAGT,QACE,4BAA4B,SAC5B,MAAM,8BAA8B;;;;;;;AASxC,IAAa,2BAAb,MAAiE;CAC/D;CACA;CAEA,YAAY,SAAmD;AAC7D,QAAKA,YAAa,QAAQ;AAC1B,QAAKC,SAAU,QAAQ;;CAGzB,IAAI,YAAY;AACd,SAAO,MAAKD;;CAGd,IAAI,SAAS;AACX,SAAO,MAAKC;;;;;CAMd,OAAO,SAA6C;AAClD,SAAO,IAAI,eAAe;GACxB,WAAW,MAAKD;GAChB,QAAQ,MAAKC;GACb;GACD,CAAC;;;;;;;AAQN,IAAa,iBAAb,MAAuD;CACrD;CACA;CACA;CAEA,YAAY,SAAqE;AAC/E,QAAKD,YAAa,QAAQ;AAC1B,QAAKC,SAAU,QAAQ;AACvB,QAAKC,UAAW,QAAQ;;CAG1B,KAAK,4BAA6D;AAChE,SAAO;;CAGT,MAAM,eAA0C;EAC9C,MAAM,YAAY,MAAM,MAAKA,QAAS,iBAAiB,MAAKF,UAAW;AACvE,MAAI,cAAc,MAAKC,OAAQ,QAAQ,UAAU,CAC/C,OAAM,IAAI,MACR,2FACuB,UAAU,sBAAsB,MAAKA,OAAQ,UACrE;AAGH,SAAO,MAAKC,QAAS,kBAAkB,MAAKD,QAAS,MAAKD,UAAW;;CAGvE,MAAM,gBAAkC;AACtC,MAAI,CAAC,MAAKE,QAAS,sBACjB,OAAM,IAAI,MAAM,mDAAmD;EAIrE,MAAM,oBAAoB,MADT,MAAKA,QAAS,sBAAsB,MAAKD,QAAS,MAAKD,UAAW,CAC1C,kBAAkB;AAC3D,QAAM,kBAAkB,SAAS;AAEjC,SAAO,kBAAkB,WAAW,SAAS;;CAG/C,IAAI,YAAY;AACd,SAAO,MAAKA;;CAGd,IAAI,SAAS;AACX,SAAO,MAAKC;;CAGd,IAAI,UAAU;AACZ,SAAO,MAAKC;;CAGd,MAAM,eAAe,SAIyB;EAC5C,MAAM,UAAU,MAAKA;AAErB,MAAI,QAAQ,uBAAuB;AACjC,OAAI,SAAS,cAAc,UAAa,SAAS,gBAAgB,OAC/D,SAAQ,KAAK,wEAAwE;GAGvF,MAAM,YAAY,QAAQ,sBAAsB,MAAKD,QAAS,MAAKD,UAAW;GAC9E,MAAM,cAAc,SAAS,QAAQ;AACrC,UAAO,UAAU,eAAe;IAC9B,MAAM;IACN,WAAW,SAAS;IACpB,aAAa,SAAS;IACvB,CAAC;;AAGJ,MAAI,QAAQ,uBAAuB;GACjC,MAAM,WAAW,QAAQ,sBAAsB,MAAKC,QAAS,MAAKD,UAAW;GAC7E,MAAM,gBAAgB,SAAS,aAAa,MAAKC,OAAQ;GACzD,MAAM,gBAAgB,SAAS;GAG/B,MAAM,iBAAiB,iBAAkB,MAAM,SAAS,YAAY;GAGpE,MAAM,cACJ,SAAS,SACR,SAAS,qBACN,SAAS,mBAAmB,MAAKD,WAAY,gBAAgB,cAAc,GAC3E;GAGN,MAAM,oBAAoB,MAAM,SAAS,mBAAmB,eAAe;IACzE,gBAAgB;IAChB,aAAa;IACd,CAAC;AAEF,OAAI,CAAC,kBAAkB,OACrB,OAAM,IAAI,MACR,8GACD;GAGH,MAAM,MAAM,kBAAkB,QAAQ;AAGtC,OAAI,CAAC,IAAI,MAAM,CACb,OAAM,IAAI,MAAM,mEAAmE;AAGrF,UAAO;IACL,QAAQ;IACR,MAAM;IACP;;AAGH,QAAM,IAAI,MACR,oIACD;;;AAIL,SAAgB,qBACd,SACmC;AACnC,QAAO,IAAI,yBAAyB,QAAQ"}
1
+ {"version":3,"file":"mod.js","names":["#namespace","#schema","#adapter"],"sources":["../src/mod.ts"],"sourcesContent":["import type { DatabaseAdapter } from \"./adapters/adapters\";\nimport type { AnySchema } from \"./schema/create\";\nimport type { AbstractQuery } from \"./query/query\";\n\nexport type { DatabaseAdapter };\n\nexport const fragnoDatabaseFakeSymbol = \"$fragno-database\" as const;\nexport const fragnoDatabaseLibraryVersion = \"0.1\" as const;\n\nexport interface CreateFragnoDatabaseDefinitionOptions<T extends AnySchema> {\n namespace: string;\n schema: T;\n}\n\nexport function isFragnoDatabase(value: unknown): value is FragnoDatabase<AnySchema> {\n if (value instanceof FragnoDatabase) {\n return true;\n }\n\n if (typeof value !== \"object\" || value === null) {\n return false;\n }\n\n return (\n fragnoDatabaseFakeSymbol in value &&\n value[fragnoDatabaseFakeSymbol] === fragnoDatabaseFakeSymbol\n );\n}\n\n/**\n * Definition of a Fragno database schema and namespace.\n * Created by library authors using defineFragnoDatabase().\n * Apps instantiate it by calling .create(adapter).\n */\nexport class FragnoDatabaseDefinition<const T extends AnySchema> {\n #namespace: string;\n #schema: T;\n\n constructor(options: CreateFragnoDatabaseDefinitionOptions<T>) {\n this.#namespace = options.namespace;\n this.#schema = options.schema;\n }\n\n get namespace() {\n return this.#namespace;\n }\n\n get schema() {\n return this.#schema;\n }\n\n /**\n * Creates a FragnoDatabase instance by binding an adapter to this definition.\n */\n create(adapter: DatabaseAdapter): FragnoDatabase<T> {\n return new FragnoDatabase({\n namespace: this.#namespace,\n schema: this.#schema,\n adapter,\n });\n }\n}\n\n/**\n * A Fragno database instance with a bound adapter.\n * Created from a FragnoDatabaseDefinition by calling .create(adapter).\n */\nexport class FragnoDatabase<const T extends AnySchema> {\n #namespace: string;\n #schema: T;\n #adapter: DatabaseAdapter;\n\n constructor(options: { namespace: string; schema: T; adapter: DatabaseAdapter }) {\n this.#namespace = options.namespace;\n this.#schema = options.schema;\n this.#adapter = options.adapter;\n }\n\n get [fragnoDatabaseFakeSymbol](): typeof fragnoDatabaseFakeSymbol {\n return fragnoDatabaseFakeSymbol;\n }\n\n async createClient(): Promise<AbstractQuery<T>> {\n const dbVersion = await this.#adapter.getSchemaVersion(this.#namespace);\n if (dbVersion !== this.#schema.version.toString()) {\n throw new Error(\n `Database is not at expected version. Did you forget to run migrations?` +\n ` Current version: ${dbVersion}, Expected version: ${this.#schema.version}`,\n );\n }\n\n return this.#adapter.createQueryEngine(this.#schema, this.#namespace);\n }\n\n async runMigrations(): Promise<boolean> {\n if (!this.#adapter.createMigrationEngine) {\n throw new Error(\"Migration engine not supported for this adapter.\");\n }\n\n const migrator = this.#adapter.createMigrationEngine(this.#schema, this.#namespace);\n const preparedMigration = await migrator.prepareMigration();\n await preparedMigration.execute();\n\n return preparedMigration.operations.length > 0;\n }\n\n get namespace() {\n return this.#namespace;\n }\n\n get schema() {\n return this.#schema;\n }\n\n get adapter() {\n return this.#adapter;\n }\n}\n\nexport function defineFragnoDatabase<const TSchema extends AnySchema>(\n options: CreateFragnoDatabaseDefinitionOptions<TSchema>,\n): FragnoDatabaseDefinition<TSchema> {\n return new FragnoDatabaseDefinition(options);\n}\n\nexport {\n defineFragmentWithDatabase,\n DatabaseFragmentBuilder,\n type FragnoPublicConfigWithDatabase,\n type DatabaseFragmentContext,\n} from \"./fragment\";\n"],"mappings":";;;AAMA,MAAa,2BAA2B;AACxC,MAAa,+BAA+B;AAO5C,SAAgB,iBAAiB,OAAoD;AACnF,KAAI,iBAAiB,eACnB,QAAO;AAGT,KAAI,OAAO,UAAU,YAAY,UAAU,KACzC,QAAO;AAGT,QACE,4BAA4B,SAC5B,MAAM,8BAA8B;;;;;;;AASxC,IAAa,2BAAb,MAAiE;CAC/D;CACA;CAEA,YAAY,SAAmD;AAC7D,QAAKA,YAAa,QAAQ;AAC1B,QAAKC,SAAU,QAAQ;;CAGzB,IAAI,YAAY;AACd,SAAO,MAAKD;;CAGd,IAAI,SAAS;AACX,SAAO,MAAKC;;;;;CAMd,OAAO,SAA6C;AAClD,SAAO,IAAI,eAAe;GACxB,WAAW,MAAKD;GAChB,QAAQ,MAAKC;GACb;GACD,CAAC;;;;;;;AAQN,IAAa,iBAAb,MAAuD;CACrD;CACA;CACA;CAEA,YAAY,SAAqE;AAC/E,QAAKD,YAAa,QAAQ;AAC1B,QAAKC,SAAU,QAAQ;AACvB,QAAKC,UAAW,QAAQ;;CAG1B,KAAK,4BAA6D;AAChE,SAAO;;CAGT,MAAM,eAA0C;EAC9C,MAAM,YAAY,MAAM,MAAKA,QAAS,iBAAiB,MAAKF,UAAW;AACvE,MAAI,cAAc,MAAKC,OAAQ,QAAQ,UAAU,CAC/C,OAAM,IAAI,MACR,2FACuB,UAAU,sBAAsB,MAAKA,OAAQ,UACrE;AAGH,SAAO,MAAKC,QAAS,kBAAkB,MAAKD,QAAS,MAAKD,UAAW;;CAGvE,MAAM,gBAAkC;AACtC,MAAI,CAAC,MAAKE,QAAS,sBACjB,OAAM,IAAI,MAAM,mDAAmD;EAIrE,MAAM,oBAAoB,MADT,MAAKA,QAAS,sBAAsB,MAAKD,QAAS,MAAKD,UAAW,CAC1C,kBAAkB;AAC3D,QAAM,kBAAkB,SAAS;AAEjC,SAAO,kBAAkB,WAAW,SAAS;;CAG/C,IAAI,YAAY;AACd,SAAO,MAAKA;;CAGd,IAAI,SAAS;AACX,SAAO,MAAKC;;CAGd,IAAI,UAAU;AACZ,SAAO,MAAKC;;;AAIhB,SAAgB,qBACd,SACmC;AACnC,QAAO,IAAI,yBAAyB,QAAQ"}
@@ -1 +1 @@
1
- {"version":3,"file":"condition-builder.js","names":["builder: ConditionBuilder<Columns>"],"sources":["../../src/query/condition-builder.ts"],"sourcesContent":["import type { AnyColumn, FragnoId, IdColumn } from \"../schema/create\";\n\nexport type ConditionType = \"compare\" | \"and\" | \"or\" | \"not\";\n\nexport type Condition =\n | {\n type: \"compare\";\n a: AnyColumn;\n operator: Operator;\n b: AnyColumn | unknown | null;\n }\n | {\n type: \"or\" | \"and\";\n items: Condition[];\n }\n | {\n type: \"not\";\n item: Condition;\n };\n\n// TODO: we temporarily dropped support for comparing against another column, because Prisma ORM still have problems with it.\n\n/**\n * Helper type that allows FragnoId for ID columns and reference columns (bigint).\n * Used in ConditionBuilder to accept FragnoId values in where conditions.\n */\ntype AcceptsFragnoId<T extends AnyColumn> = T extends IdColumn\n ? T[\"$in\"] | FragnoId\n : T[\"$in\"] extends bigint\n ? T[\"$in\"] | FragnoId\n : T[\"$in\"];\n\nexport type ConditionBuilder<Columns extends Record<string, AnyColumn>> = {\n <ColName extends keyof Columns>(\n a: ColName,\n operator: (typeof valueOperators)[number] | (typeof stringOperators)[number],\n b: AcceptsFragnoId<Columns[ColName]> | null,\n ): Condition;\n\n <ColName extends keyof Columns>(\n a: ColName,\n operator: (typeof arrayOperators)[number],\n b: AcceptsFragnoId<Columns[ColName]>[],\n ): Condition;\n\n /**\n * Boolean values\n */\n <ColName extends keyof Columns>(a: ColName): Condition;\n\n and: (...v: (Condition | boolean)[]) => Condition | boolean;\n or: (...v: (Condition | boolean)[]) => Condition | boolean;\n not: (v: Condition | boolean) => Condition | boolean;\n\n isNull: (a: keyof Columns) => Condition;\n isNotNull: (a: keyof Columns) => Condition;\n};\n\n// replacement for `like` (Prisma doesn't support `like`)\nconst stringOperators = [\n \"contains\",\n \"starts with\",\n \"ends with\",\n\n \"not contains\",\n \"not starts with\",\n \"not ends with\",\n // excluded `regexp` since MSSQL doesn't support it, may re-consider\n] as const;\n\nconst arrayOperators = [\"in\", \"not in\"] as const;\n\nconst valueOperators = [\"=\", \"!=\", \">\", \">=\", \"<\", \"<=\", \"is\", \"is not\"] as const;\n\n// JSON specific operators are not included, some databases don't support them\n// `match` requires additional extensions & configurations on SQLite and PostgreSQL\n// MySQL & SQLite requires workarounds to support `ilike`\nexport const operators = [...valueOperators, ...arrayOperators, ...stringOperators] as const;\n\nexport type Operator = (typeof operators)[number];\n\nexport function createBuilder<Columns extends Record<string, AnyColumn>>(\n columns: Columns,\n): ConditionBuilder<Columns> {\n function col(name: keyof Columns) {\n const out = columns[name];\n if (!out) {\n throw new Error(`Invalid column name ${String(name)}`);\n }\n\n return out;\n }\n\n const builder: ConditionBuilder<Columns> = (...args: [string, Operator, unknown] | [string]) => {\n if (args.length === 3) {\n const [a, operator, b] = args;\n\n if (!operators.includes(operator)) throw new Error(`Unsupported operator: ${operator}`);\n\n return {\n type: \"compare\",\n a: col(a),\n b,\n operator,\n };\n }\n\n return {\n type: \"compare\",\n a: col(args[0]),\n operator: \"=\",\n b: true,\n };\n };\n\n builder.isNull = (a) => builder(a, \"is\", null);\n builder.isNotNull = (a) => builder(a, \"is not\", null);\n builder.not = (condition) => {\n if (typeof condition === \"boolean\") return !condition;\n\n return {\n type: \"not\",\n item: condition,\n };\n };\n\n builder.or = (...conditions) => {\n const out = {\n type: \"or\",\n items: [] as Condition[],\n } as const;\n\n for (const item of conditions) {\n if (item === true) return true;\n if (item === false) continue;\n\n out.items.push(item);\n }\n\n if (out.items.length === 0) return false;\n return out;\n };\n\n builder.and = (...conditions) => {\n const out = {\n type: \"and\",\n items: [] as Condition[],\n } as const;\n\n for (const item of conditions) {\n if (item === true) continue;\n if (item === false) return false;\n\n out.items.push(item);\n }\n\n if (out.items.length === 0) return true;\n return out;\n };\n\n return builder;\n}\n\nexport function buildCondition<T, Columns extends Record<string, AnyColumn>>(\n columns: Columns,\n input: (builder: ConditionBuilder<Columns>) => T,\n): T {\n return input(createBuilder(columns));\n}\n\n/**\n * Create a ConditionBuilder that only allows comparisons on indexed columns.\n * Used in Unit of Work to ensure queries can leverage indexes for optimal performance.\n *\n * @param columns - The full set of columns from the table\n * @param indexedColumnNames - Set of column names that are part of indexes\n * @returns A ConditionBuilder restricted to indexed columns only\n *\n * @example\n * ```ts\n * const builder = createIndexedBuilder(\n * table.columns,\n * new Set([\"id\", \"userId\", \"createdAt\"])\n * );\n * const condition = builder(\"userId\", \"=\", \"123\");\n * ```\n */\nexport function createIndexedBuilder<Columns extends Record<string, AnyColumn>>(\n columns: Columns,\n indexedColumnNames: Set<string>,\n): ConditionBuilder<Columns> {\n function col(name: keyof Columns) {\n const columnName = String(name);\n\n if (!indexedColumnNames.has(columnName)) {\n throw new Error(\n `Column \"${columnName}\" is not indexed. Only indexed columns can be used in Unit of Work queries. ` +\n `Available indexed columns: ${Array.from(indexedColumnNames).join(\", \")}`,\n );\n }\n\n const out = columns[name];\n if (!out) {\n throw new Error(`Invalid column name ${columnName}`);\n }\n\n return out;\n }\n\n const builder: ConditionBuilder<Columns> = (...args: [string, Operator, unknown] | [string]) => {\n if (args.length === 3) {\n const [a, operator, b] = args;\n\n if (!operators.includes(operator)) {\n throw new Error(`Unsupported operator: ${operator}`);\n }\n\n return {\n type: \"compare\",\n a: col(a),\n b,\n operator,\n };\n }\n\n return {\n type: \"compare\",\n a: col(args[0]),\n operator: \"=\",\n b: true,\n };\n };\n\n builder.isNull = (a) => builder(a, \"is\", null);\n builder.isNotNull = (a) => builder(a, \"is not\", null);\n builder.not = (condition) => {\n if (typeof condition === \"boolean\") {\n return !condition;\n }\n\n return {\n type: \"not\",\n item: condition,\n };\n };\n\n builder.or = (...conditions) => {\n const out = {\n type: \"or\",\n items: [] as Condition[],\n } as const;\n\n for (const item of conditions) {\n if (item === true) {\n return true;\n }\n if (item === false) {\n continue;\n }\n\n out.items.push(item);\n }\n\n if (out.items.length === 0) {\n return false;\n }\n return out;\n };\n\n builder.and = (...conditions) => {\n const out = {\n type: \"and\",\n items: [] as Condition[],\n } as const;\n\n for (const item of conditions) {\n if (item === true) {\n continue;\n }\n if (item === false) {\n return false;\n }\n\n out.items.push(item);\n }\n\n if (out.items.length === 0) {\n return true;\n }\n return out;\n };\n\n return builder;\n}\n"],"mappings":";AA2DA,MAAM,kBAAkB;CACtB;CACA;CACA;CAEA;CACA;CACA;CAED;AAED,MAAM,iBAAiB,CAAC,MAAM,SAAS;AAEvC,MAAM,iBAAiB;CAAC;CAAK;CAAM;CAAK;CAAM;CAAK;CAAM;CAAM;CAAS;AAKxE,MAAa,YAAY;CAAC,GAAG;CAAgB,GAAG;CAAgB,GAAG;CAAgB;AAInF,SAAgB,cACd,SAC2B;CAC3B,SAAS,IAAI,MAAqB;EAChC,MAAM,MAAM,QAAQ;AACpB,MAAI,CAAC,IACH,OAAM,IAAI,MAAM,uBAAuB,OAAO,KAAK,GAAG;AAGxD,SAAO;;CAGT,MAAMA,WAAsC,GAAG,SAAiD;AAC9F,MAAI,KAAK,WAAW,GAAG;GACrB,MAAM,CAAC,GAAG,UAAU,KAAK;AAEzB,OAAI,CAAC,UAAU,SAAS,SAAS,CAAE,OAAM,IAAI,MAAM,yBAAyB,WAAW;AAEvF,UAAO;IACL,MAAM;IACN,GAAG,IAAI,EAAE;IACT;IACA;IACD;;AAGH,SAAO;GACL,MAAM;GACN,GAAG,IAAI,KAAK,GAAG;GACf,UAAU;GACV,GAAG;GACJ;;AAGH,SAAQ,UAAU,MAAM,QAAQ,GAAG,MAAM,KAAK;AAC9C,SAAQ,aAAa,MAAM,QAAQ,GAAG,UAAU,KAAK;AACrD,SAAQ,OAAO,cAAc;AAC3B,MAAI,OAAO,cAAc,UAAW,QAAO,CAAC;AAE5C,SAAO;GACL,MAAM;GACN,MAAM;GACP;;AAGH,SAAQ,MAAM,GAAG,eAAe;EAC9B,MAAM,MAAM;GACV,MAAM;GACN,OAAO,EAAE;GACV;AAED,OAAK,MAAM,QAAQ,YAAY;AAC7B,OAAI,SAAS,KAAM,QAAO;AAC1B,OAAI,SAAS,MAAO;AAEpB,OAAI,MAAM,KAAK,KAAK;;AAGtB,MAAI,IAAI,MAAM,WAAW,EAAG,QAAO;AACnC,SAAO;;AAGT,SAAQ,OAAO,GAAG,eAAe;EAC/B,MAAM,MAAM;GACV,MAAM;GACN,OAAO,EAAE;GACV;AAED,OAAK,MAAM,QAAQ,YAAY;AAC7B,OAAI,SAAS,KAAM;AACnB,OAAI,SAAS,MAAO,QAAO;AAE3B,OAAI,MAAM,KAAK,KAAK;;AAGtB,MAAI,IAAI,MAAM,WAAW,EAAG,QAAO;AACnC,SAAO;;AAGT,QAAO;;AAGT,SAAgB,eACd,SACA,OACG;AACH,QAAO,MAAM,cAAc,QAAQ,CAAC"}
1
+ {"version":3,"file":"condition-builder.js","names":["builder: ConditionBuilder<Columns>"],"sources":["../../src/query/condition-builder.ts"],"sourcesContent":["import type { AnyColumn, FragnoId, IdColumn } from \"../schema/create\";\n\nexport type ConditionType = \"compare\" | \"and\" | \"or\" | \"not\";\n\nexport type Condition =\n | {\n type: \"compare\";\n a: AnyColumn;\n operator: Operator;\n b: AnyColumn | unknown | null;\n }\n | {\n type: \"or\" | \"and\";\n items: Condition[];\n }\n | {\n type: \"not\";\n item: Condition;\n };\n\n// TODO: we temporarily dropped support for comparing against another column, because Prisma ORM still have problems with it.\n\n/**\n * Helper type that allows FragnoId for ID columns and reference columns (bigint).\n * Used in ConditionBuilder to accept FragnoId values in where conditions.\n */\ntype AcceptsFragnoId<T extends AnyColumn> = T extends IdColumn\n ? T[\"$in\"] | FragnoId\n : T[\"$in\"] extends bigint\n ? T[\"$in\"] | FragnoId\n : T[\"$in\"];\n\nexport type ConditionBuilder<Columns extends Record<string, AnyColumn>> = {\n <ColName extends keyof Columns>(\n a: ColName,\n operator: (typeof valueOperators)[number] | (typeof stringOperators)[number],\n b: AcceptsFragnoId<Columns[ColName]> | null,\n ): Condition;\n\n <ColName extends keyof Columns>(\n a: ColName,\n operator: (typeof arrayOperators)[number],\n b: AcceptsFragnoId<Columns[ColName]>[],\n ): Condition;\n\n /**\n * Boolean values\n */\n <ColName extends keyof Columns>(a: ColName): Condition;\n\n and: (...v: (Condition | boolean)[]) => Condition | boolean;\n or: (...v: (Condition | boolean)[]) => Condition | boolean;\n not: (v: Condition | boolean) => Condition | boolean;\n\n isNull: (a: keyof Columns) => Condition;\n isNotNull: (a: keyof Columns) => Condition;\n};\n\n// replacement for `like` (Prisma doesn't support `like`)\nconst stringOperators = [\n \"contains\",\n \"starts with\",\n \"ends with\",\n\n \"not contains\",\n \"not starts with\",\n \"not ends with\",\n // excluded `regexp` since MSSQL doesn't support it, may re-consider\n] as const;\n\nconst arrayOperators = [\"in\", \"not in\"] as const;\n\nconst valueOperators = [\"=\", \"!=\", \">\", \">=\", \"<\", \"<=\", \"is\", \"is not\"] as const;\n\n// JSON specific operators are not included, some databases don't support them\n// `match` requires additional extensions & configurations on SQLite and PostgreSQL\n// MySQL & SQLite requires workarounds to support `ilike`\nexport const operators = [...valueOperators, ...arrayOperators, ...stringOperators] as const;\n\nexport type Operator = (typeof operators)[number];\n\nexport function createBuilder<Columns extends Record<string, AnyColumn>>(\n columns: Columns,\n): ConditionBuilder<Columns> {\n function col(name: keyof Columns) {\n const out = columns[name];\n if (!out) {\n throw new Error(`Invalid column name ${String(name)}`);\n }\n\n return out;\n }\n\n const builder: ConditionBuilder<Columns> = (...args: [string, Operator, unknown] | [string]) => {\n if (args.length === 3) {\n const [a, operator, b] = args;\n\n if (!operators.includes(operator)) {\n throw new Error(`Unsupported operator: ${operator}`);\n }\n\n return {\n type: \"compare\",\n a: col(a),\n b,\n operator,\n };\n }\n\n return {\n type: \"compare\",\n a: col(args[0]),\n operator: \"=\",\n b: true,\n };\n };\n\n builder.isNull = (a) => builder(a, \"is\", null);\n builder.isNotNull = (a) => builder(a, \"is not\", null);\n builder.not = (condition) => {\n if (typeof condition === \"boolean\") {\n return !condition;\n }\n\n return {\n type: \"not\",\n item: condition,\n };\n };\n\n builder.or = (...conditions) => {\n const out = {\n type: \"or\",\n items: [] as Condition[],\n } as const;\n\n for (const item of conditions) {\n if (item === true) {\n return true;\n }\n if (item === false) {\n continue;\n }\n\n out.items.push(item);\n }\n\n if (out.items.length === 0) {\n return false;\n }\n return out;\n };\n\n builder.and = (...conditions) => {\n const out = {\n type: \"and\",\n items: [] as Condition[],\n } as const;\n\n for (const item of conditions) {\n if (item === true) {\n continue;\n }\n if (item === false) {\n return false;\n }\n\n out.items.push(item);\n }\n\n if (out.items.length === 0) {\n return true;\n }\n return out;\n };\n\n return builder;\n}\n\nexport function buildCondition<T, Columns extends Record<string, AnyColumn>>(\n columns: Columns,\n input: (builder: ConditionBuilder<Columns>) => T,\n): T {\n return input(createBuilder(columns));\n}\n\n/**\n * Create a ConditionBuilder that only allows comparisons on indexed columns.\n * Used in Unit of Work to ensure queries can leverage indexes for optimal performance.\n *\n * @param columns - The full set of columns from the table\n * @param indexedColumnNames - Set of column names that are part of indexes\n * @returns A ConditionBuilder restricted to indexed columns only\n *\n * @example\n * ```ts\n * const builder = createIndexedBuilder(\n * table.columns,\n * new Set([\"id\", \"userId\", \"createdAt\"])\n * );\n * const condition = builder(\"userId\", \"=\", \"123\");\n * ```\n */\nexport function createIndexedBuilder<Columns extends Record<string, AnyColumn>>(\n columns: Columns,\n indexedColumnNames: Set<string>,\n): ConditionBuilder<Columns> {\n function col(name: keyof Columns) {\n const columnName = String(name);\n\n if (!indexedColumnNames.has(columnName)) {\n throw new Error(\n `Column \"${columnName}\" is not indexed. Only indexed columns can be used in Unit of Work queries. ` +\n `Available indexed columns: ${Array.from(indexedColumnNames).join(\", \")}`,\n );\n }\n\n const out = columns[name];\n if (!out) {\n throw new Error(`Invalid column name ${columnName}`);\n }\n\n return out;\n }\n\n const builder: ConditionBuilder<Columns> = (...args: [string, Operator, unknown] | [string]) => {\n if (args.length === 3) {\n const [a, operator, b] = args;\n\n if (!operators.includes(operator)) {\n throw new Error(`Unsupported operator: ${operator}`);\n }\n\n return {\n type: \"compare\",\n a: col(a),\n b,\n operator,\n };\n }\n\n return {\n type: \"compare\",\n a: col(args[0]),\n operator: \"=\",\n b: true,\n };\n };\n\n builder.isNull = (a) => builder(a, \"is\", null);\n builder.isNotNull = (a) => builder(a, \"is not\", null);\n builder.not = (condition) => {\n if (typeof condition === \"boolean\") {\n return !condition;\n }\n\n return {\n type: \"not\",\n item: condition,\n };\n };\n\n builder.or = (...conditions) => {\n const out = {\n type: \"or\",\n items: [] as Condition[],\n } as const;\n\n for (const item of conditions) {\n if (item === true) {\n return true;\n }\n if (item === false) {\n continue;\n }\n\n out.items.push(item);\n }\n\n if (out.items.length === 0) {\n return false;\n }\n return out;\n };\n\n builder.and = (...conditions) => {\n const out = {\n type: \"and\",\n items: [] as Condition[],\n } as const;\n\n for (const item of conditions) {\n if (item === true) {\n continue;\n }\n if (item === false) {\n return false;\n }\n\n out.items.push(item);\n }\n\n if (out.items.length === 0) {\n return true;\n }\n return out;\n };\n\n return builder;\n}\n"],"mappings":";AA2DA,MAAM,kBAAkB;CACtB;CACA;CACA;CAEA;CACA;CACA;CAED;AAED,MAAM,iBAAiB,CAAC,MAAM,SAAS;AAEvC,MAAM,iBAAiB;CAAC;CAAK;CAAM;CAAK;CAAM;CAAK;CAAM;CAAM;CAAS;AAKxE,MAAa,YAAY;CAAC,GAAG;CAAgB,GAAG;CAAgB,GAAG;CAAgB;AAInF,SAAgB,cACd,SAC2B;CAC3B,SAAS,IAAI,MAAqB;EAChC,MAAM,MAAM,QAAQ;AACpB,MAAI,CAAC,IACH,OAAM,IAAI,MAAM,uBAAuB,OAAO,KAAK,GAAG;AAGxD,SAAO;;CAGT,MAAMA,WAAsC,GAAG,SAAiD;AAC9F,MAAI,KAAK,WAAW,GAAG;GACrB,MAAM,CAAC,GAAG,UAAU,KAAK;AAEzB,OAAI,CAAC,UAAU,SAAS,SAAS,CAC/B,OAAM,IAAI,MAAM,yBAAyB,WAAW;AAGtD,UAAO;IACL,MAAM;IACN,GAAG,IAAI,EAAE;IACT;IACA;IACD;;AAGH,SAAO;GACL,MAAM;GACN,GAAG,IAAI,KAAK,GAAG;GACf,UAAU;GACV,GAAG;GACJ;;AAGH,SAAQ,UAAU,MAAM,QAAQ,GAAG,MAAM,KAAK;AAC9C,SAAQ,aAAa,MAAM,QAAQ,GAAG,UAAU,KAAK;AACrD,SAAQ,OAAO,cAAc;AAC3B,MAAI,OAAO,cAAc,UACvB,QAAO,CAAC;AAGV,SAAO;GACL,MAAM;GACN,MAAM;GACP;;AAGH,SAAQ,MAAM,GAAG,eAAe;EAC9B,MAAM,MAAM;GACV,MAAM;GACN,OAAO,EAAE;GACV;AAED,OAAK,MAAM,QAAQ,YAAY;AAC7B,OAAI,SAAS,KACX,QAAO;AAET,OAAI,SAAS,MACX;AAGF,OAAI,MAAM,KAAK,KAAK;;AAGtB,MAAI,IAAI,MAAM,WAAW,EACvB,QAAO;AAET,SAAO;;AAGT,SAAQ,OAAO,GAAG,eAAe;EAC/B,MAAM,MAAM;GACV,MAAM;GACN,OAAO,EAAE;GACV;AAED,OAAK,MAAM,QAAQ,YAAY;AAC7B,OAAI,SAAS,KACX;AAEF,OAAI,SAAS,MACX,QAAO;AAGT,OAAI,MAAM,KAAK,KAAK;;AAGtB,MAAI,IAAI,MAAM,WAAW,EACvB,QAAO;AAET,SAAO;;AAGT,QAAO;;AAGT,SAAgB,eACd,SACA,OACG;AACH,QAAO,MAAM,cAAc,QAAQ,CAAC"}
@@ -37,8 +37,9 @@ var ReferenceSubquery = class {
37
37
  function generateRuntimeDefault(column) {
38
38
  if (!column.default) return;
39
39
  if ("value" in column.default) return;
40
+ if ("dbSpecial" in column.default) return;
40
41
  const runtime = column.default.runtime;
41
- if (runtime === "auto") return createId();
42
+ if (runtime === "cuid") return createId();
42
43
  if (runtime === "now") return /* @__PURE__ */ new Date();
43
44
  if (typeof runtime === "function") return runtime();
44
45
  }
@@ -1 +1 @@
1
- {"version":3,"file":"result-transform.js","names":["#referencedTable","#externalIdValue","result: Record<string, unknown>","output: Record<string, unknown>","columnValues: Record<string, unknown>","relationData: Record<string, Record<string, unknown>>"],"sources":["../../src/query/result-transform.ts"],"sourcesContent":["import type { AnyColumn, AnyTable } from \"../schema/create\";\nimport type { SQLProvider } from \"../shared/providers\";\nimport { deserialize, serialize } from \"../schema/serialize\";\nimport { FragnoId, FragnoReference } from \"../schema/create\";\nimport { createId } from \"../id\";\n\n/**\n * Marker class for reference column values that need subquery resolution.\n * When a reference column receives a string (external ID), this marker tells\n * the query builder to generate a subquery to look up the internal ID.\n * @internal\n */\nexport class ReferenceSubquery {\n #referencedTable: AnyTable;\n #externalIdValue: string;\n\n constructor(referencedTable: AnyTable, externalIdValue: string) {\n this.#referencedTable = referencedTable;\n this.#externalIdValue = externalIdValue;\n }\n\n get referencedTable() {\n return this.#referencedTable;\n }\n\n get externalIdValue() {\n return this.#externalIdValue;\n }\n}\n\n/**\n * Generate a runtime default value for a column that has defaultTo$()\n *\n * Only generates values for runtime defaults (defaultTo$), NOT static defaults (defaultTo).\n * Static defaults should be handled by the database via DEFAULT constraints.\n *\n * @param column - The column with a default value configuration\n * @returns The generated default value, or undefined if the column has no runtime default\n *\n * @internal\n */\nexport function generateRuntimeDefault(column: AnyColumn): unknown {\n // Check if column has a default value configuration\n if (!column.default) {\n return undefined;\n }\n\n // If it's a static default value (defaultTo), return undefined\n // as the database should handle this via DEFAULT constraint\n if (\"value\" in column.default) {\n return undefined;\n }\n\n // Handle runtime defaults (defaultTo$)\n const runtime = column.default.runtime;\n\n if (runtime === \"auto\") {\n return createId();\n }\n\n if (runtime === \"now\") {\n return new Date();\n }\n\n if (typeof runtime === \"function\") {\n return runtime();\n }\n\n return undefined;\n}\n\n/**\n * Encodes a record of values from the application format to database format.\n *\n * This function transforms object keys to match SQL column names and serializes\n * values according to the database provider's requirements (e.g., converting\n * JavaScript Date objects to numbers for SQLite).\n *\n * @param values - The record of values to encode in application format\n * @param table - The table schema definition containing column information\n * @param generateDefault - Whether to generate default values for undefined columns\n * @param provider - The SQL provider (sqlite, postgresql, mysql, etc.)\n * @returns A record with database-compatible column names and serialized values\n *\n * @example\n * ```ts\n * const encoded = encodeValues(\n * { userId: 123, createdAt: new Date() },\n * userTable,\n * true,\n * 'sqlite'\n * );\n * // Returns: { user_id: 123, created_at: 1234567890 }\n * ```\n */\nexport function encodeValues(\n values: Record<string, unknown>,\n table: AnyTable,\n generateDefault: boolean,\n provider: SQLProvider,\n): Record<string, unknown> {\n const result: Record<string, unknown> = {};\n\n for (const k in table.columns) {\n const col = table.columns[k];\n\n // Skip internal ID - never provided by user, auto-generated by database\n if (col.role === \"internal-id\") {\n continue;\n }\n let value = values[k];\n\n if (generateDefault && value === undefined) {\n // Only generate runtime defaults (defaultTo$), not static defaults (defaultTo).\n // Static defaults should be handled by the database via DEFAULT constraints.\n value = generateRuntimeDefault(col);\n }\n\n if (value !== undefined) {\n // Handle string references - convert external ID to internal ID via subquery\n if (col.role === \"reference\" && typeof value === \"string\") {\n // Find relation that uses this column\n const relation = Object.values(table.relations).find((rel) =>\n rel.on.some(([localCol]) => localCol === k),\n );\n if (relation) {\n result[col.name] = new ReferenceSubquery(relation.table, value);\n continue;\n }\n\n throw new Error(`Reference column ${k} not found in table ${table.name}`);\n }\n\n result[col.name] = serialize(value, col, provider);\n }\n }\n\n return result;\n}\n\n/**\n * Decodes a database result record to application format.\n *\n * This function transforms database column names back to application property names\n * and deserializes values according to the database provider's format (e.g., converting\n * SQLite integers back to JavaScript Date objects).\n *\n * Supports relation data encoded with the pattern `relationName:columnName`.\n *\n * @param result - The raw database result record\n * @param table - The table schema definition containing column and relation information\n * @param provider - The SQL provider (sqlite, postgresql, mysql, etc.)\n * @returns A record in application format with deserialized values\n *\n * @example\n * ```ts\n * const decoded = decodeResult(\n * { user_id: 123, created_at: 1234567890, 'posts:title': 'Hello' },\n * userTable,\n * 'sqlite'\n * );\n * // Returns: { userId: 123, createdAt: Date, posts: { title: 'Hello' } }\n * ```\n */\nexport function decodeResult(\n result: Record<string, unknown>,\n table: AnyTable,\n provider: SQLProvider,\n): Record<string, unknown> {\n const output: Record<string, unknown> = {};\n // First pass: collect all column values\n const columnValues: Record<string, unknown> = {};\n\n // Collect all relation data (including nested) keyed by relation name\n const relationData: Record<string, Record<string, unknown>> = {};\n\n for (const k in result) {\n const colonIndex = k.indexOf(\":\");\n const value = result[k];\n\n // Direct column (no colon)\n if (colonIndex === -1) {\n const col = table.columns[k];\n if (!col) {\n continue;\n }\n\n // Store all column values (including hidden ones for FragnoId creation)\n columnValues[k] = deserialize(value, col, provider);\n continue;\n }\n\n // Relation column (has colon)\n const relationName = k.slice(0, colonIndex);\n const remainder = k.slice(colonIndex + 1);\n\n const relation = table.relations[relationName];\n if (relation === undefined) {\n continue;\n }\n\n // Collect relation data with the remaining key path\n relationData[relationName] ??= {};\n relationData[relationName][remainder] = value;\n }\n\n // Process each relation's data recursively\n for (const relationName in relationData) {\n const relation = table.relations[relationName];\n if (!relation) {\n continue;\n }\n\n // Recursively decode the relation data\n output[relationName] = decodeResult(relationData[relationName], relation.table, provider);\n }\n\n // Second pass: create output with FragnoId objects where appropriate\n for (const k in columnValues) {\n const col = table.columns[k];\n if (!col) {\n continue;\n }\n\n // Filter out hidden columns (like _internalId, _version) from results\n if (col.isHidden) {\n continue;\n }\n\n // For external ID columns, create FragnoId if we have both external and internal IDs\n if (col.role === \"external-id\" && columnValues[\"_internalId\"] !== undefined) {\n output[k] = new FragnoId({\n externalId: columnValues[k] as string,\n internalId: columnValues[\"_internalId\"] as bigint,\n // _version is always selected as a hidden column, so it should always be present\n version: columnValues[\"_version\"] as number,\n });\n } else if (col.role === \"reference\") {\n // For reference columns, create FragnoReference with internal ID\n output[k] = FragnoReference.fromInternal(columnValues[k] as bigint);\n } else {\n output[k] = columnValues[k];\n }\n }\n\n return output;\n}\n"],"mappings":";;;;;;;;;;;AAYA,IAAa,oBAAb,MAA+B;CAC7B;CACA;CAEA,YAAY,iBAA2B,iBAAyB;AAC9D,QAAKA,kBAAmB;AACxB,QAAKC,kBAAmB;;CAG1B,IAAI,kBAAkB;AACpB,SAAO,MAAKD;;CAGd,IAAI,kBAAkB;AACpB,SAAO,MAAKC;;;;;;;;;;;;;;AAehB,SAAgB,uBAAuB,QAA4B;AAEjE,KAAI,CAAC,OAAO,QACV;AAKF,KAAI,WAAW,OAAO,QACpB;CAIF,MAAM,UAAU,OAAO,QAAQ;AAE/B,KAAI,YAAY,OACd,QAAO,UAAU;AAGnB,KAAI,YAAY,MACd,wBAAO,IAAI,MAAM;AAGnB,KAAI,OAAO,YAAY,WACrB,QAAO,SAAS;;;;;;;;;;;;;;;;;;;;;;;;;;AA8BpB,SAAgB,aACd,QACA,OACA,iBACA,UACyB;CACzB,MAAMC,SAAkC,EAAE;AAE1C,MAAK,MAAM,KAAK,MAAM,SAAS;EAC7B,MAAM,MAAM,MAAM,QAAQ;AAG1B,MAAI,IAAI,SAAS,cACf;EAEF,IAAI,QAAQ,OAAO;AAEnB,MAAI,mBAAmB,UAAU,OAG/B,SAAQ,uBAAuB,IAAI;AAGrC,MAAI,UAAU,QAAW;AAEvB,OAAI,IAAI,SAAS,eAAe,OAAO,UAAU,UAAU;IAEzD,MAAM,WAAW,OAAO,OAAO,MAAM,UAAU,CAAC,MAAM,QACpD,IAAI,GAAG,MAAM,CAAC,cAAc,aAAa,EAAE,CAC5C;AACD,QAAI,UAAU;AACZ,YAAO,IAAI,QAAQ,IAAI,kBAAkB,SAAS,OAAO,MAAM;AAC/D;;AAGF,UAAM,IAAI,MAAM,oBAAoB,EAAE,sBAAsB,MAAM,OAAO;;AAG3E,UAAO,IAAI,QAAQ,UAAU,OAAO,KAAK,SAAS;;;AAItD,QAAO;;;;;;;;;;;;;;;;;;;;;;;;;;AA2BT,SAAgB,aACd,QACA,OACA,UACyB;CACzB,MAAMC,SAAkC,EAAE;CAE1C,MAAMC,eAAwC,EAAE;CAGhD,MAAMC,eAAwD,EAAE;AAEhE,MAAK,MAAM,KAAK,QAAQ;EACtB,MAAM,aAAa,EAAE,QAAQ,IAAI;EACjC,MAAM,QAAQ,OAAO;AAGrB,MAAI,eAAe,IAAI;GACrB,MAAM,MAAM,MAAM,QAAQ;AAC1B,OAAI,CAAC,IACH;AAIF,gBAAa,KAAK,YAAY,OAAO,KAAK,SAAS;AACnD;;EAIF,MAAM,eAAe,EAAE,MAAM,GAAG,WAAW;EAC3C,MAAM,YAAY,EAAE,MAAM,aAAa,EAAE;AAGzC,MADiB,MAAM,UAAU,kBAChB,OACf;AAIF,eAAa,kBAAkB,EAAE;AACjC,eAAa,cAAc,aAAa;;AAI1C,MAAK,MAAM,gBAAgB,cAAc;EACvC,MAAM,WAAW,MAAM,UAAU;AACjC,MAAI,CAAC,SACH;AAIF,SAAO,gBAAgB,aAAa,aAAa,eAAe,SAAS,OAAO,SAAS;;AAI3F,MAAK,MAAM,KAAK,cAAc;EAC5B,MAAM,MAAM,MAAM,QAAQ;AAC1B,MAAI,CAAC,IACH;AAIF,MAAI,IAAI,SACN;AAIF,MAAI,IAAI,SAAS,iBAAiB,aAAa,mBAAmB,OAChE,QAAO,KAAK,IAAI,SAAS;GACvB,YAAY,aAAa;GACzB,YAAY,aAAa;GAEzB,SAAS,aAAa;GACvB,CAAC;WACO,IAAI,SAAS,YAEtB,QAAO,KAAK,gBAAgB,aAAa,aAAa,GAAa;MAEnE,QAAO,KAAK,aAAa;;AAI7B,QAAO"}
1
+ {"version":3,"file":"result-transform.js","names":["#referencedTable","#externalIdValue","result: Record<string, unknown>","output: Record<string, unknown>","columnValues: Record<string, unknown>","relationData: Record<string, Record<string, unknown>>"],"sources":["../../src/query/result-transform.ts"],"sourcesContent":["import type { AnyColumn, AnyTable } from \"../schema/create\";\nimport type { SQLProvider } from \"../shared/providers\";\nimport { deserialize, serialize } from \"../schema/serialize\";\nimport { FragnoId, FragnoReference } from \"../schema/create\";\nimport { createId } from \"../id\";\n\n/**\n * Marker class for reference column values that need subquery resolution.\n * When a reference column receives a string (external ID), this marker tells\n * the query builder to generate a subquery to look up the internal ID.\n * @internal\n */\nexport class ReferenceSubquery {\n #referencedTable: AnyTable;\n #externalIdValue: string;\n\n constructor(referencedTable: AnyTable, externalIdValue: string) {\n this.#referencedTable = referencedTable;\n this.#externalIdValue = externalIdValue;\n }\n\n get referencedTable() {\n return this.#referencedTable;\n }\n\n get externalIdValue() {\n return this.#externalIdValue;\n }\n}\n\n/**\n * Generate a runtime default value for a column that has defaultTo$()\n *\n * Only generates values for runtime defaults (defaultTo$), NOT static defaults (defaultTo).\n * Static defaults should be handled by the database via DEFAULT constraints.\n *\n * @param column - The column with a default value configuration\n * @returns The generated default value, or undefined if the column has no runtime default\n *\n * @internal\n */\nexport function generateRuntimeDefault(column: AnyColumn): unknown {\n // Check if column has a default value configuration\n if (!column.default) {\n return undefined;\n }\n\n // If it's a static default value (defaultTo), return undefined\n // as the database should handle this via DEFAULT constraint\n if (\"value\" in column.default) {\n return undefined;\n }\n\n // If it's a database-level special function (defaultTo(b => b.now())), return undefined\n // as the database should handle this via DEFAULT NOW() or equivalent\n if (\"dbSpecial\" in column.default) {\n return undefined;\n }\n\n // Handle runtime defaults (defaultTo$)\n const runtime = column.default.runtime;\n\n if (runtime === \"cuid\") {\n return createId();\n }\n\n if (runtime === \"now\") {\n return new Date();\n }\n\n if (typeof runtime === \"function\") {\n return runtime();\n }\n\n return undefined;\n}\n\n/**\n * Encodes a record of values from the application format to database format.\n *\n * This function transforms object keys to match SQL column names and serializes\n * values according to the database provider's requirements (e.g., converting\n * JavaScript Date objects to numbers for SQLite).\n *\n * @param values - The record of values to encode in application format\n * @param table - The table schema definition containing column information\n * @param generateDefault - Whether to generate default values for undefined columns\n * @param provider - The SQL provider (sqlite, postgresql, mysql, etc.)\n * @returns A record with database-compatible column names and serialized values\n *\n * @example\n * ```ts\n * const encoded = encodeValues(\n * { userId: 123, createdAt: new Date() },\n * userTable,\n * true,\n * 'sqlite'\n * );\n * // Returns: { user_id: 123, created_at: 1234567890 }\n * ```\n */\nexport function encodeValues(\n values: Record<string, unknown>,\n table: AnyTable,\n generateDefault: boolean,\n provider: SQLProvider,\n): Record<string, unknown> {\n const result: Record<string, unknown> = {};\n\n for (const k in table.columns) {\n const col = table.columns[k];\n\n // Skip internal ID - never provided by user, auto-generated by database\n if (col.role === \"internal-id\") {\n continue;\n }\n let value = values[k];\n\n if (generateDefault && value === undefined) {\n // Only generate runtime defaults (defaultTo$), not static defaults (defaultTo).\n // Static defaults should be handled by the database via DEFAULT constraints.\n value = generateRuntimeDefault(col);\n }\n\n if (value !== undefined) {\n // Handle string references - convert external ID to internal ID via subquery\n if (col.role === \"reference\" && typeof value === \"string\") {\n // Find relation that uses this column\n const relation = Object.values(table.relations).find((rel) =>\n rel.on.some(([localCol]) => localCol === k),\n );\n if (relation) {\n result[col.name] = new ReferenceSubquery(relation.table, value);\n continue;\n }\n\n throw new Error(`Reference column ${k} not found in table ${table.name}`);\n }\n\n result[col.name] = serialize(value, col, provider);\n }\n }\n\n return result;\n}\n\n/**\n * Decodes a database result record to application format.\n *\n * This function transforms database column names back to application property names\n * and deserializes values according to the database provider's format (e.g., converting\n * SQLite integers back to JavaScript Date objects).\n *\n * Supports relation data encoded with the pattern `relationName:columnName`.\n *\n * @param result - The raw database result record\n * @param table - The table schema definition containing column and relation information\n * @param provider - The SQL provider (sqlite, postgresql, mysql, etc.)\n * @returns A record in application format with deserialized values\n *\n * @example\n * ```ts\n * const decoded = decodeResult(\n * { user_id: 123, created_at: 1234567890, 'posts:title': 'Hello' },\n * userTable,\n * 'sqlite'\n * );\n * // Returns: { userId: 123, createdAt: Date, posts: { title: 'Hello' } }\n * ```\n */\nexport function decodeResult(\n result: Record<string, unknown>,\n table: AnyTable,\n provider: SQLProvider,\n): Record<string, unknown> {\n const output: Record<string, unknown> = {};\n // First pass: collect all column values\n const columnValues: Record<string, unknown> = {};\n\n // Collect all relation data (including nested) keyed by relation name\n const relationData: Record<string, Record<string, unknown>> = {};\n\n for (const k in result) {\n const colonIndex = k.indexOf(\":\");\n const value = result[k];\n\n // Direct column (no colon)\n if (colonIndex === -1) {\n const col = table.columns[k];\n if (!col) {\n continue;\n }\n\n // Store all column values (including hidden ones for FragnoId creation)\n columnValues[k] = deserialize(value, col, provider);\n continue;\n }\n\n // Relation column (has colon)\n const relationName = k.slice(0, colonIndex);\n const remainder = k.slice(colonIndex + 1);\n\n const relation = table.relations[relationName];\n if (relation === undefined) {\n continue;\n }\n\n // Collect relation data with the remaining key path\n relationData[relationName] ??= {};\n relationData[relationName][remainder] = value;\n }\n\n // Process each relation's data recursively\n for (const relationName in relationData) {\n const relation = table.relations[relationName];\n if (!relation) {\n continue;\n }\n\n // Recursively decode the relation data\n output[relationName] = decodeResult(relationData[relationName], relation.table, provider);\n }\n\n // Second pass: create output with FragnoId objects where appropriate\n for (const k in columnValues) {\n const col = table.columns[k];\n if (!col) {\n continue;\n }\n\n // Filter out hidden columns (like _internalId, _version) from results\n if (col.isHidden) {\n continue;\n }\n\n // For external ID columns, create FragnoId if we have both external and internal IDs\n if (col.role === \"external-id\" && columnValues[\"_internalId\"] !== undefined) {\n output[k] = new FragnoId({\n externalId: columnValues[k] as string,\n internalId: columnValues[\"_internalId\"] as bigint,\n // _version is always selected as a hidden column, so it should always be present\n version: columnValues[\"_version\"] as number,\n });\n } else if (col.role === \"reference\") {\n // For reference columns, create FragnoReference with internal ID\n output[k] = FragnoReference.fromInternal(columnValues[k] as bigint);\n } else {\n output[k] = columnValues[k];\n }\n }\n\n return output;\n}\n"],"mappings":";;;;;;;;;;;AAYA,IAAa,oBAAb,MAA+B;CAC7B;CACA;CAEA,YAAY,iBAA2B,iBAAyB;AAC9D,QAAKA,kBAAmB;AACxB,QAAKC,kBAAmB;;CAG1B,IAAI,kBAAkB;AACpB,SAAO,MAAKD;;CAGd,IAAI,kBAAkB;AACpB,SAAO,MAAKC;;;;;;;;;;;;;;AAehB,SAAgB,uBAAuB,QAA4B;AAEjE,KAAI,CAAC,OAAO,QACV;AAKF,KAAI,WAAW,OAAO,QACpB;AAKF,KAAI,eAAe,OAAO,QACxB;CAIF,MAAM,UAAU,OAAO,QAAQ;AAE/B,KAAI,YAAY,OACd,QAAO,UAAU;AAGnB,KAAI,YAAY,MACd,wBAAO,IAAI,MAAM;AAGnB,KAAI,OAAO,YAAY,WACrB,QAAO,SAAS;;;;;;;;;;;;;;;;;;;;;;;;;;AA8BpB,SAAgB,aACd,QACA,OACA,iBACA,UACyB;CACzB,MAAMC,SAAkC,EAAE;AAE1C,MAAK,MAAM,KAAK,MAAM,SAAS;EAC7B,MAAM,MAAM,MAAM,QAAQ;AAG1B,MAAI,IAAI,SAAS,cACf;EAEF,IAAI,QAAQ,OAAO;AAEnB,MAAI,mBAAmB,UAAU,OAG/B,SAAQ,uBAAuB,IAAI;AAGrC,MAAI,UAAU,QAAW;AAEvB,OAAI,IAAI,SAAS,eAAe,OAAO,UAAU,UAAU;IAEzD,MAAM,WAAW,OAAO,OAAO,MAAM,UAAU,CAAC,MAAM,QACpD,IAAI,GAAG,MAAM,CAAC,cAAc,aAAa,EAAE,CAC5C;AACD,QAAI,UAAU;AACZ,YAAO,IAAI,QAAQ,IAAI,kBAAkB,SAAS,OAAO,MAAM;AAC/D;;AAGF,UAAM,IAAI,MAAM,oBAAoB,EAAE,sBAAsB,MAAM,OAAO;;AAG3E,UAAO,IAAI,QAAQ,UAAU,OAAO,KAAK,SAAS;;;AAItD,QAAO;;;;;;;;;;;;;;;;;;;;;;;;;;AA2BT,SAAgB,aACd,QACA,OACA,UACyB;CACzB,MAAMC,SAAkC,EAAE;CAE1C,MAAMC,eAAwC,EAAE;CAGhD,MAAMC,eAAwD,EAAE;AAEhE,MAAK,MAAM,KAAK,QAAQ;EACtB,MAAM,aAAa,EAAE,QAAQ,IAAI;EACjC,MAAM,QAAQ,OAAO;AAGrB,MAAI,eAAe,IAAI;GACrB,MAAM,MAAM,MAAM,QAAQ;AAC1B,OAAI,CAAC,IACH;AAIF,gBAAa,KAAK,YAAY,OAAO,KAAK,SAAS;AACnD;;EAIF,MAAM,eAAe,EAAE,MAAM,GAAG,WAAW;EAC3C,MAAM,YAAY,EAAE,MAAM,aAAa,EAAE;AAGzC,MADiB,MAAM,UAAU,kBAChB,OACf;AAIF,eAAa,kBAAkB,EAAE;AACjC,eAAa,cAAc,aAAa;;AAI1C,MAAK,MAAM,gBAAgB,cAAc;EACvC,MAAM,WAAW,MAAM,UAAU;AACjC,MAAI,CAAC,SACH;AAIF,SAAO,gBAAgB,aAAa,aAAa,eAAe,SAAS,OAAO,SAAS;;AAI3F,MAAK,MAAM,KAAK,cAAc;EAC5B,MAAM,MAAM,MAAM,QAAQ;AAC1B,MAAI,CAAC,IACH;AAIF,MAAI,IAAI,SACN;AAIF,MAAI,IAAI,SAAS,iBAAiB,aAAa,mBAAmB,OAChE,QAAO,KAAK,IAAI,SAAS;GACvB,YAAY,aAAa;GACzB,YAAY,aAAa;GAEzB,SAAS,aAAa;GACvB,CAAC;WACO,IAAI,SAAS,YAEtB,QAAO,KAAK,gBAAgB,aAAa,aAAa,GAAa;MAEnE,QAAO,KAAK,aAAa;;AAI7B,QAAO"}
@@ -105,12 +105,30 @@ interface Table<TColumns$1 extends Record<string, AnyColumn> = Record<string, An
105
105
  */
106
106
  getVersionColumn: () => AnyColumn;
107
107
  }
108
- type DefaultFunctionMap = {
109
- date: "now";
110
- timestamp: "now";
111
- string: "auto";
112
- } & Record<`varchar(${number})`, "auto">;
113
- type DefaultFunction<TType$1 extends keyof TypeMap> = (TType$1 extends keyof DefaultFunctionMap ? DefaultFunctionMap[TType$1] : never) | (() => TypeMap[TType$1]);
108
+ type DBSpecial = {
109
+ tag: "special";
110
+ value: "now";
111
+ };
112
+ type RuntimeSpecial = {
113
+ tag: "special";
114
+ value: "cuid" | "now";
115
+ };
116
+ /**
117
+ * Builder for database-level default values.
118
+ */
119
+ interface DefaultBuilder {
120
+ /** Database-generated timestamp (DEFAULT NOW()) */
121
+ now(): DBSpecial;
122
+ }
123
+ /**
124
+ * Builder for runtime-generated default values.
125
+ */
126
+ interface RuntimeDefaultBuilder {
127
+ /** Generate CUID identifier */
128
+ cuid(): RuntimeSpecial;
129
+ /** Generate current timestamp */
130
+ now(): RuntimeSpecial;
131
+ }
114
132
  type IdColumnType = `varchar(${number})`;
115
133
  type TypeMap = {
116
134
  string: string;
@@ -136,24 +154,64 @@ declare class Column<TType$1 extends keyof TypeMap, TIn = unknown, TOut = unknow
136
154
  default?: {
137
155
  value: TypeMap[TType$1];
138
156
  } | {
139
- runtime: DefaultFunction<TType$1>;
157
+ dbSpecial: "now";
158
+ } | {
159
+ runtime: "cuid" | "now" | (() => TypeMap[TType$1]);
140
160
  };
141
161
  tableName: string;
142
162
  constructor(type: TType$1);
143
163
  nullable<TNullable extends boolean = true>(nullable?: TNullable): Column<TType$1, TNullable extends true ? TIn | null : Exclude<TIn, null>, TNullable extends true ? TOut | null : Exclude<TOut, null>>;
144
164
  hidden<THidden extends boolean = true>(hidden?: THidden): Column<TType$1, null, null>;
145
165
  /**
146
- * Generate default value on runtime
166
+ * Generate default value at runtime in application code (not in the database).
167
+ *
168
+ * Use this when you need values generated in your application code, either because:
169
+ * - Your database doesn't support the operation (e.g., generating CUIDs)
170
+ * - You want consistent behavior across all databases
171
+ * - You need custom generation logic
172
+ *
173
+ * @param value - Either a literal value or builder callback:
174
+ * - Literal: Any static value of the column type
175
+ * - `(b) => b.cuid()` - Generate a CUID identifier
176
+ * - `(b) => b.now()` - Generate current timestamp
177
+ * - `(b) => ...` - Custom function that returns the default value
178
+ *
179
+ * @example
180
+ * ```ts
181
+ * column("string").defaultTo$((b) => b.cuid()) // Generate CUID at runtime
182
+ * column("timestamp").defaultTo$((b) => b.now()) // Generate timestamp at runtime
183
+ * column("integer").defaultTo$(42) // Static literal
184
+ * column("integer").defaultTo$((b) => Math.floor(Math.random() * 100)) // Custom function
185
+ * ```
147
186
  */
148
- defaultTo$(fn: DefaultFunction<TType$1>): Column<TType$1, TIn | null, TOut>;
187
+ defaultTo$(value: TypeMap[TType$1] | ((builder: RuntimeDefaultBuilder) => RuntimeSpecial | TypeMap[TType$1])): Column<TType$1, TIn | null, TOut>;
149
188
  /**
150
- * Set a database-level default value
189
+ * Set a database-level default value (generated by the database, not application code).
190
+ *
191
+ * The database will generate the default value when inserting rows. If the database
192
+ * doesn't support the operation, Fragno will fall back to generating the value in
193
+ * application code.
151
194
  *
152
- * For schemaless database, it's still generated on runtime
195
+ * @param value - Either a literal value or builder callback:
196
+ * - Literal: Any static value of the column type
197
+ * - `(b) => b.now()` - Database-generated timestamp
198
+ *
199
+ * @example
200
+ * ```ts
201
+ * // Static defaults
202
+ * column("string").defaultTo("active")
203
+ * column("integer").defaultTo(0)
204
+ * column("boolean").defaultTo(true)
205
+ *
206
+ * // Database-generated timestamp (with fallback)
207
+ * column("timestamp").defaultTo((b) => b.now())
208
+ * ```
153
209
  */
154
- defaultTo(value: TypeMap[TType$1]): Column<TType$1, TIn | null, TOut>;
210
+ defaultTo(value: TypeMap[TType$1] | ((builder: DefaultBuilder) => DBSpecial | TypeMap[TType$1])): Column<TType$1, TIn | null, TOut>;
155
211
  /**
156
- * Generate default value for the column on runtime.
212
+ * Generate default value for the column at runtime.
213
+ * Used for both runtime defaults (defaultTo$) and fallback generation for
214
+ * database defaults (defaultTo) when the database doesn't support them.
157
215
  */
158
216
  generateDefaultValue(): TypeMap[TType$1] | undefined;
159
217
  /**
@@ -169,8 +227,8 @@ declare class Column<TType$1 extends keyof TypeMap, TIn = unknown, TOut = unknow
169
227
  }
170
228
  declare class IdColumn<TType$1 extends IdColumnType = IdColumnType, TIn = unknown, TOut = unknown> extends Column<TType$1, TIn, TOut> {
171
229
  id: boolean;
172
- defaultTo$(fn: DefaultFunction<TType$1>): IdColumn<TType$1, TIn | null, TOut>;
173
- defaultTo(value: TypeMap[TType$1]): IdColumn<TType$1, TIn | null, TOut>;
230
+ defaultTo$(value: TypeMap[TType$1] | ((builder: RuntimeDefaultBuilder) => RuntimeSpecial | TypeMap[TType$1])): IdColumn<TType$1, TIn | null, TOut>;
231
+ defaultTo(value: TypeMap[TType$1] | ((builder: DefaultBuilder) => DBSpecial | TypeMap[TType$1])): IdColumn<TType$1, TIn | null, TOut>;
174
232
  }
175
233
  /**
176
234
  * Internal ID column - used for database-native joins and foreign keys.
@@ -435,5 +493,5 @@ declare function compileForeignKey(key: ForeignKey, nameType?: "sql" | "orm"): {
435
493
  columns: string[];
436
494
  };
437
495
  //#endregion
438
- export { AnyColumn, AnyRelation, AnySchema, AnyTable, Column, ExplicitRelationInit, ForeignKey, FragnoId, FragnoReference, IdColumn, Index, InternalIdColumn, Relation, Schema, SchemaBuilder, SchemaOperation, Table, TableBuilder, TableSubOperation, TypeMap, VersionColumn, column, compileForeignKey, idColumn, internalIdColumn, referenceColumn, schema, versionColumn };
496
+ export { AnyColumn, AnyRelation, AnySchema, AnyTable, Column, DefaultBuilder, ExplicitRelationInit, ForeignKey, FragnoId, FragnoReference, IdColumn, Index, InternalIdColumn, Relation, RuntimeDefaultBuilder, Schema, SchemaBuilder, SchemaOperation, Table, TableBuilder, TableSubOperation, TypeMap, VersionColumn, column, compileForeignKey, idColumn, internalIdColumn, referenceColumn, schema, versionColumn };
439
497
  //# sourceMappingURL=create.d.ts.map