@inflector/optima 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/package.json +21 -0
- package/src/Qfluent.ts +179 -0
- package/src/database.ts +284 -0
- package/src/fluent.ts +55 -0
- package/src/index.ts +14 -0
- package/src/schema.ts +446 -0
- package/src/table.ts +926 -0
- package/test/Schema.ts +39 -0
- package/test/index.ts +16 -0
- package/tsconfig.json +15 -0
package/package.json
ADDED
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "@inflector/optima",
|
|
3
|
+
"version": "1.0.0",
|
|
4
|
+
"module": "src/index.ts",
|
|
5
|
+
"type": "module",
|
|
6
|
+
"devDependencies": {
|
|
7
|
+
"@types/bun": "latest",
|
|
8
|
+
"typescript": "^5.9.3"
|
|
9
|
+
},
|
|
10
|
+
"scripts": {
|
|
11
|
+
"dev": "bun index.ts",
|
|
12
|
+
"build": "tsc"
|
|
13
|
+
},
|
|
14
|
+
"peerDependencies": {
|
|
15
|
+
"typescript": "^5.9.3"
|
|
16
|
+
},
|
|
17
|
+
"dependencies": {
|
|
18
|
+
"@duckdb/node-api": "^1.4.1-r.5",
|
|
19
|
+
"zod": "^4.2.1"
|
|
20
|
+
}
|
|
21
|
+
}
|
package/src/Qfluent.ts
ADDED
|
@@ -0,0 +1,179 @@
|
|
|
1
|
+
import type { DuckDBValue } from "@duckdb/node-api";
|
|
2
|
+
import type { Infer } from "./schema";
|
|
3
|
+
import type { Extension, OptimaTable, Where } from "./table";
|
|
4
|
+
|
|
5
|
+
// --- Type Utilities ---
|
|
6
|
+
export type Prettify<T> = {
|
|
7
|
+
[K in keyof T]: T[K];
|
|
8
|
+
} & {};
|
|
9
|
+
|
|
10
|
+
export type SetFlag<State, Key extends keyof State> = {
|
|
11
|
+
[P in keyof State]: P extends Key ? true : State[P];
|
|
12
|
+
};
|
|
13
|
+
export type MapToFalse<T> = {
|
|
14
|
+
[K in keyof T]: false;
|
|
15
|
+
};
|
|
16
|
+
|
|
17
|
+
// Auto-generate initial "false" state for methods + the 'extended' flag
|
|
18
|
+
export type InitialState<T> = {
|
|
19
|
+
[K in keyof T]: false;
|
|
20
|
+
} & { extended: false };
|
|
21
|
+
|
|
22
|
+
// --- Schema Definitions ---
|
|
23
|
+
|
|
24
|
+
export type QueryMethods<TDef extends Record<string, any>> = {
|
|
25
|
+
limit: [n: number];
|
|
26
|
+
offset: [n: number];
|
|
27
|
+
orderBy: [col: keyof TDef | Array<keyof TDef>, dir?: "ASC" | "DESC"];
|
|
28
|
+
where: [w: Where<Infer<TDef>>];
|
|
29
|
+
};
|
|
30
|
+
|
|
31
|
+
export type QueryOneMethods<TDef extends Record<string, any>> = {
|
|
32
|
+
where: [w: Where<Infer<TDef>>];
|
|
33
|
+
};
|
|
34
|
+
|
|
35
|
+
// --- Fluent Interface Definitions ---
|
|
36
|
+
|
|
37
|
+
// 1. Fluent Builder for Multiple Results (Array)
|
|
38
|
+
export type FluentQueryBuilder<
|
|
39
|
+
TDef extends Record<string, any>,
|
|
40
|
+
Result,
|
|
41
|
+
// State includes standard methods + 'extended' flag
|
|
42
|
+
State extends Record<keyof QueryMethods<TDef> | "extended", boolean>
|
|
43
|
+
> = {
|
|
44
|
+
// 1. Standard Methods (limit, where, etc.)
|
|
45
|
+
[K in keyof QueryMethods<TDef> as State[K] extends true ? never : K]: (
|
|
46
|
+
...args: QueryMethods<TDef>[K]
|
|
47
|
+
) => FluentQueryBuilder<TDef, Result, SetFlag<State, K>>;
|
|
48
|
+
} & {
|
|
49
|
+
// 2. Extend Method (Conditional: Hide if State['extended'] is true)
|
|
50
|
+
[K in "extend" as State["extended"] extends true
|
|
51
|
+
? never
|
|
52
|
+
: K]: <ExtTable extends OptimaTable<any>>(
|
|
53
|
+
table: ExtTable
|
|
54
|
+
) => FluentQueryBuilder<
|
|
55
|
+
TDef,
|
|
56
|
+
Prettify<Result & Extension<TDef, ExtTable>>,
|
|
57
|
+
SetFlag<State, "extended"> // Mark extended as true
|
|
58
|
+
>;
|
|
59
|
+
} & PromiseLike<Result[]>;
|
|
60
|
+
|
|
61
|
+
// 2. Fluent Builder for Single Result (One)
|
|
62
|
+
export type FluentQueryBuilderOne<
|
|
63
|
+
TDef extends Record<string, any>,
|
|
64
|
+
Result,
|
|
65
|
+
State extends Record<keyof QueryOneMethods<TDef> | "extended", boolean>
|
|
66
|
+
> = {
|
|
67
|
+
[K in keyof QueryOneMethods<TDef> as State[K] extends true ? never : K]: (
|
|
68
|
+
...args: QueryOneMethods<TDef>[K]
|
|
69
|
+
) => FluentQueryBuilderOne<TDef, Result, SetFlag<State, K>>;
|
|
70
|
+
} & {
|
|
71
|
+
[K in "extend" as State["extended"] extends true
|
|
72
|
+
? never
|
|
73
|
+
: K]: <ExtTable extends OptimaTable<any>>(
|
|
74
|
+
table: ExtTable
|
|
75
|
+
) => FluentQueryBuilderOne<
|
|
76
|
+
TDef,
|
|
77
|
+
Prettify<Result & Extension<TDef, ExtTable>>,
|
|
78
|
+
SetFlag<State, "extended">
|
|
79
|
+
>;
|
|
80
|
+
} & PromiseLike<Result | undefined>;
|
|
81
|
+
|
|
82
|
+
// --- Implementation ---
|
|
83
|
+
|
|
84
|
+
export const createProxyHandler = (
|
|
85
|
+
table: OptimaTable<any>,
|
|
86
|
+
config: any,
|
|
87
|
+
isSingleResult: boolean
|
|
88
|
+
) => {
|
|
89
|
+
return {
|
|
90
|
+
get(target: any, prop: string | symbol) {
|
|
91
|
+
// 1. Handle Promise Execution (.then)
|
|
92
|
+
if (prop === "then") {
|
|
93
|
+
return async (
|
|
94
|
+
resolve: (value: any) => void,
|
|
95
|
+
reject: (reason: any) => void
|
|
96
|
+
) => {
|
|
97
|
+
const tableAny = table as any;
|
|
98
|
+
try {
|
|
99
|
+
const { sql } = tableAny.BuildSelect(tableAny["Name"], config);
|
|
100
|
+
const rawResult: Record<string, DuckDBValue>[] = await (
|
|
101
|
+
await tableAny.Connection.run(sql)
|
|
102
|
+
).getRowObjects();
|
|
103
|
+
const formatted = tableAny.FormatOut(rawResult);
|
|
104
|
+
|
|
105
|
+
if (isSingleResult) {
|
|
106
|
+
const val = formatted.length > 0 ? formatted[0] : undefined;
|
|
107
|
+
resolve(val);
|
|
108
|
+
} else {
|
|
109
|
+
resolve(formatted);
|
|
110
|
+
}
|
|
111
|
+
} catch (e) {
|
|
112
|
+
reject(e);
|
|
113
|
+
}
|
|
114
|
+
};
|
|
115
|
+
}
|
|
116
|
+
|
|
117
|
+
// 2. Handle 'extend'
|
|
118
|
+
if (prop === "extend") {
|
|
119
|
+
return (extTable: OptimaTable<any>) => {
|
|
120
|
+
config.extend = config.extend || [];
|
|
121
|
+
|
|
122
|
+
// Optional Runtime Safety: Prevent adding the exact same table instance twice
|
|
123
|
+
// (Even though types now prevent calling extend() twice on the same chain)
|
|
124
|
+
if (!config.extend.includes(extTable)) {
|
|
125
|
+
config.extend.push(extTable);
|
|
126
|
+
}
|
|
127
|
+
|
|
128
|
+
return new Proxy(
|
|
129
|
+
target,
|
|
130
|
+
createProxyHandler(table, config, isSingleResult)
|
|
131
|
+
);
|
|
132
|
+
};
|
|
133
|
+
}
|
|
134
|
+
|
|
135
|
+
// 3. Handle Standard Schema Methods
|
|
136
|
+
return (...args: any[]) => {
|
|
137
|
+
if (args.length === 1) {
|
|
138
|
+
config[prop as string] = args[0];
|
|
139
|
+
} else if (args.length > 1) {
|
|
140
|
+
config[prop as string] = args;
|
|
141
|
+
} else {
|
|
142
|
+
config[prop as string] = true;
|
|
143
|
+
}
|
|
144
|
+
|
|
145
|
+
return new Proxy(
|
|
146
|
+
target,
|
|
147
|
+
createProxyHandler(table, config, isSingleResult)
|
|
148
|
+
);
|
|
149
|
+
};
|
|
150
|
+
},
|
|
151
|
+
};
|
|
152
|
+
};
|
|
153
|
+
|
|
154
|
+
// --- Factory Functions ---
|
|
155
|
+
|
|
156
|
+
export function createQueryBuilder<TDef extends Record<string, any>, Result>(
|
|
157
|
+
table: OptimaTable<TDef>,
|
|
158
|
+
initialConfig: any = {}
|
|
159
|
+
): FluentQueryBuilder<TDef, Result, InitialState<QueryMethods<TDef>>> {
|
|
160
|
+
const dummyState = {};
|
|
161
|
+
// The InitialState type ensures 'extended' starts as false
|
|
162
|
+
return new Proxy(
|
|
163
|
+
dummyState,
|
|
164
|
+
createProxyHandler(table, initialConfig, false)
|
|
165
|
+
) as any;
|
|
166
|
+
}
|
|
167
|
+
|
|
168
|
+
export function createQueryBuilderOne<TDef extends Record<string, any>, Result>(
|
|
169
|
+
table: OptimaTable<TDef>,
|
|
170
|
+
initialConfig: any = {}
|
|
171
|
+
): FluentQueryBuilderOne<TDef, Result, InitialState<QueryOneMethods<TDef>>> {
|
|
172
|
+
const config = { ...initialConfig, limit: 1 };
|
|
173
|
+
const dummyState = {};
|
|
174
|
+
|
|
175
|
+
return new Proxy(
|
|
176
|
+
dummyState,
|
|
177
|
+
createProxyHandler(table, config, true)
|
|
178
|
+
) as any;
|
|
179
|
+
}
|
package/src/database.ts
ADDED
|
@@ -0,0 +1,284 @@
|
|
|
1
|
+
import { DuckDBConnection, DuckDBInstance } from "@duckdb/node-api";
|
|
2
|
+
import { OptimaTable } from "./table";
|
|
3
|
+
|
|
4
|
+
// -- Types --
|
|
5
|
+
type DBRow = Record<string, unknown>;
|
|
6
|
+
type SchemaConfig = Record<string, any>;
|
|
7
|
+
|
|
8
|
+
export class OptimaDB<T extends Record<string, OptimaTable>> {
|
|
9
|
+
private instance: DuckDBInstance;
|
|
10
|
+
private connection: DuckDBConnection;
|
|
11
|
+
public Tables: T;
|
|
12
|
+
|
|
13
|
+
constructor(
|
|
14
|
+
instance: DuckDBInstance,
|
|
15
|
+
connection: DuckDBConnection,
|
|
16
|
+
tables: T
|
|
17
|
+
) {
|
|
18
|
+
this.instance = instance;
|
|
19
|
+
this.connection = connection;
|
|
20
|
+
this.Tables = tables;
|
|
21
|
+
}
|
|
22
|
+
|
|
23
|
+
/**
|
|
24
|
+
* Initializes the database, connects, detects schema changes,
|
|
25
|
+
* runs migrations, and returns the DB instance.
|
|
26
|
+
*/
|
|
27
|
+
static async Open<TSchema extends SchemaConfig>(
|
|
28
|
+
name: string = ":memory:",
|
|
29
|
+
schema: TSchema,
|
|
30
|
+
options?: { MemoryLimit?: string; ThreadCount?: number }
|
|
31
|
+
): Promise<OptimaDB<{ [K in keyof TSchema]: OptimaTable<TSchema[K]> }>> {
|
|
32
|
+
const instance = await DuckDBInstance.create(name);
|
|
33
|
+
const connection = await instance.connect();
|
|
34
|
+
|
|
35
|
+
// 1. Apply Options
|
|
36
|
+
if (options?.MemoryLimit) {
|
|
37
|
+
await connection.run(`SET memory_limit = '${options.MemoryLimit}';`);
|
|
38
|
+
}
|
|
39
|
+
if (options?.ThreadCount) {
|
|
40
|
+
await connection.run(`SET threads = ${options.ThreadCount};`);
|
|
41
|
+
}
|
|
42
|
+
|
|
43
|
+
// 2. Auto-Migration
|
|
44
|
+
const existingTables = await (await connection.run(`SELECT * from duckdb_tables`)).getRowObjects();
|
|
45
|
+
const cleanSchema = SchemaMigrator.transformSchema(schema);
|
|
46
|
+
const migrationSQL = SchemaMigrator.generateSQL(existingTables, cleanSchema);
|
|
47
|
+
|
|
48
|
+
if (migrationSQL.length > 0) {
|
|
49
|
+
console.log(`[OptimaDB] Applying ${migrationSQL.length} migrations...`);
|
|
50
|
+
// Run migrations in a transaction to ensure safety
|
|
51
|
+
await connection.run("BEGIN TRANSACTION;");
|
|
52
|
+
try {
|
|
53
|
+
for (const sql of migrationSQL) {
|
|
54
|
+
await connection.run(sql);
|
|
55
|
+
}
|
|
56
|
+
await connection.run("COMMIT;");
|
|
57
|
+
} catch (err) {
|
|
58
|
+
await connection.run("ROLLBACK;");
|
|
59
|
+
console.error("[OptimaDB] Migration failed, rolling back.", err);
|
|
60
|
+
throw err;
|
|
61
|
+
}
|
|
62
|
+
}
|
|
63
|
+
|
|
64
|
+
// 3. Initialize Table Wrappers
|
|
65
|
+
const tables = {} as { [K in keyof TSchema]: OptimaTable<TSchema[K]> };
|
|
66
|
+
for (const key of Object.keys(schema) as Array<keyof TSchema>) {
|
|
67
|
+
tables[key] = await OptimaTable.create<TSchema[typeof key]>(
|
|
68
|
+
key as string,
|
|
69
|
+
schema[key],
|
|
70
|
+
connection
|
|
71
|
+
);
|
|
72
|
+
}
|
|
73
|
+
|
|
74
|
+
return new OptimaDB(instance, connection, tables);
|
|
75
|
+
}
|
|
76
|
+
|
|
77
|
+
/**
|
|
78
|
+
* Executes a raw SQL query and returns rows as objects.
|
|
79
|
+
*/
|
|
80
|
+
async execute(sql: string): Promise<DBRow[]> {
|
|
81
|
+
return (await this.connection.run(sql)).getRowObjects();
|
|
82
|
+
}
|
|
83
|
+
|
|
84
|
+
async getTables(): Promise<string[]> {
|
|
85
|
+
const result = await this.execute("PRAGMA show_tables;");
|
|
86
|
+
return result.map((t: any) => t.name);
|
|
87
|
+
}
|
|
88
|
+
|
|
89
|
+
async getDiskSize(): Promise<unknown> {
|
|
90
|
+
const result = await this.execute("PRAGMA database_size;");
|
|
91
|
+
return result[0];
|
|
92
|
+
}
|
|
93
|
+
|
|
94
|
+
/**
|
|
95
|
+
* Runs a function within a transaction.
|
|
96
|
+
* Re-throws error after rollback so caller handles logic failure.
|
|
97
|
+
*/
|
|
98
|
+
async transaction<R>(fn: () => Promise<R>): Promise<R> {
|
|
99
|
+
await this.connection.run("BEGIN TRANSACTION;");
|
|
100
|
+
try {
|
|
101
|
+
const result = await fn();
|
|
102
|
+
await this.connection.run("COMMIT;");
|
|
103
|
+
return result;
|
|
104
|
+
} catch (e) {
|
|
105
|
+
await this.connection.run("ROLLBACK;");
|
|
106
|
+
throw e;
|
|
107
|
+
}
|
|
108
|
+
}
|
|
109
|
+
}
|
|
110
|
+
|
|
111
|
+
/**
|
|
112
|
+
* ------------------------------------------------------------------
|
|
113
|
+
* INTERNAL MIGRATION LOGIC
|
|
114
|
+
* Encapsulated to keep the main class clean.
|
|
115
|
+
* ------------------------------------------------------------------
|
|
116
|
+
*/
|
|
117
|
+
class SchemaMigrator {
|
|
118
|
+
/**
|
|
119
|
+
* Transforms the complex Module/ColumnImpl schema into a simple config object.
|
|
120
|
+
*/
|
|
121
|
+
static transformSchema(schemaObject: SchemaConfig) {
|
|
122
|
+
const cleanSchema: Record<string, Record<string, any>> = {};
|
|
123
|
+
|
|
124
|
+
for (const [tableName, tableDef] of Object.entries(schemaObject)) {
|
|
125
|
+
if (typeof tableDef !== "object" || tableDef === null) continue;
|
|
126
|
+
|
|
127
|
+
const realTableName = tableDef.__tableName || tableName;
|
|
128
|
+
cleanSchema[realTableName] = {};
|
|
129
|
+
|
|
130
|
+
for (const [colName, colImpl] of Object.entries(tableDef)) {
|
|
131
|
+
if (colName.startsWith("__")) continue; // Skip metadata
|
|
132
|
+
|
|
133
|
+
// @ts-ignore - Assuming colImpl structure based on input
|
|
134
|
+
if (colImpl?.config) {
|
|
135
|
+
// @ts-ignore
|
|
136
|
+
const config = { ...colImpl.config };
|
|
137
|
+
|
|
138
|
+
// Handle STRUCT merging
|
|
139
|
+
if (config.SQlType === "STRUCT" && config.STRUCTType) {
|
|
140
|
+
config.SQlType = config.STRUCTType;
|
|
141
|
+
}
|
|
142
|
+
cleanSchema[realTableName][colName] = config;
|
|
143
|
+
}
|
|
144
|
+
}
|
|
145
|
+
}
|
|
146
|
+
return cleanSchema;
|
|
147
|
+
}
|
|
148
|
+
|
|
149
|
+
/**
|
|
150
|
+
* Generates SQL statements to migrate DB from Current -> Desired state.
|
|
151
|
+
*/
|
|
152
|
+
static generateSQL(existingTablesRaw: any[], desiredSchema: Record<string, any>) {
|
|
153
|
+
const currentSchema = this.parseDuckDBTableSQL(existingTablesRaw);
|
|
154
|
+
const steps: string[] = [];
|
|
155
|
+
|
|
156
|
+
for (const [tableName, fields] of Object.entries(desiredSchema)) {
|
|
157
|
+
|
|
158
|
+
// 1. CREATE TABLE
|
|
159
|
+
if (!currentSchema[tableName]) {
|
|
160
|
+
const fieldDefs = Object.entries(fields).map(([colName, config]: [string, any]) => {
|
|
161
|
+
let def = `"${colName}" ${config.SQlType}`;
|
|
162
|
+
if (config.primaryKey) def += " PRIMARY KEY";
|
|
163
|
+
if (config.notnull) def += " NOT NULL";
|
|
164
|
+
return def;
|
|
165
|
+
});
|
|
166
|
+
steps.push(`CREATE TABLE ${tableName} (${fieldDefs.join(", ")});`);
|
|
167
|
+
continue;
|
|
168
|
+
}
|
|
169
|
+
|
|
170
|
+
const currentTableCols = currentSchema[tableName];
|
|
171
|
+
|
|
172
|
+
for (const [colName, config] of Object.entries(fields) as [string, any][]) {
|
|
173
|
+
|
|
174
|
+
// 2. ADD COLUMN
|
|
175
|
+
if (!currentTableCols[colName]) {
|
|
176
|
+
let def = `${config.SQlType}`;
|
|
177
|
+
// If adding NOT NULL to existing table, we need a default value
|
|
178
|
+
if (config.notnull) {
|
|
179
|
+
def += ` DEFAULT ${this.getDefaultValueForType(config.SQlType)}`;
|
|
180
|
+
}
|
|
181
|
+
steps.push(`ALTER TABLE ${tableName} ADD COLUMN "${colName}" ${def};`);
|
|
182
|
+
|
|
183
|
+
// Enforce strict constraint after adding (if supported by version, else implied by definition)
|
|
184
|
+
if (config.notnull) {
|
|
185
|
+
steps.push(`ALTER TABLE ${tableName} ALTER "${colName}" SET NOT NULL;`);
|
|
186
|
+
}
|
|
187
|
+
continue;
|
|
188
|
+
}
|
|
189
|
+
|
|
190
|
+
// 3. ALTER COLUMN (Type or Constraints)
|
|
191
|
+
const currentCol = currentTableCols[colName];
|
|
192
|
+
const normCurrentType = currentCol.type.replace(/\s+/g, "").toUpperCase();
|
|
193
|
+
const normNewType = config.SQlType.replace(/\s+/g, "").toUpperCase();
|
|
194
|
+
|
|
195
|
+
// 3a. Type Mismatch
|
|
196
|
+
// We skip complex struct comparison here to avoid false positives on spacing
|
|
197
|
+
if (normCurrentType !== normNewType && !normNewType.startsWith("STRUCT")) {
|
|
198
|
+
steps.push(`ALTER TABLE ${tableName} ALTER "${colName}" TYPE ${config.SQlType};`);
|
|
199
|
+
}
|
|
200
|
+
|
|
201
|
+
// 3b. Not Null Mismatch
|
|
202
|
+
const desiredNotNull = !!config.notnull;
|
|
203
|
+
if (desiredNotNull !== currentCol.isNotNull) {
|
|
204
|
+
if (desiredNotNull) {
|
|
205
|
+
steps.push(`ALTER TABLE ${tableName} ALTER "${colName}" SET NOT NULL;`);
|
|
206
|
+
} else {
|
|
207
|
+
steps.push(`ALTER TABLE ${tableName} ALTER "${colName}" DROP NOT NULL;`);
|
|
208
|
+
}
|
|
209
|
+
}
|
|
210
|
+
}
|
|
211
|
+
}
|
|
212
|
+
return steps;
|
|
213
|
+
}
|
|
214
|
+
|
|
215
|
+
/**
|
|
216
|
+
* Parses raw "CREATE TABLE" SQL from DuckDB into a usable map.
|
|
217
|
+
*/
|
|
218
|
+
private static parseDuckDBTableSQL(existingTables: any[]) {
|
|
219
|
+
const schemaMap: Record<string, Record<string, { type: string; isNotNull: boolean; isPrimaryKey: boolean }>> = {};
|
|
220
|
+
|
|
221
|
+
existingTables.forEach((table) => {
|
|
222
|
+
const match = table.sql.match(/CREATE TABLE "?\w+"?\s*\((.*)\);/s);
|
|
223
|
+
if (!match) return;
|
|
224
|
+
|
|
225
|
+
const columnBody = match[1];
|
|
226
|
+
const columns: Record<string, any> = {};
|
|
227
|
+
|
|
228
|
+
let depth = 0;
|
|
229
|
+
let currentChunk = "";
|
|
230
|
+
|
|
231
|
+
for (let i = 0; i < columnBody.length; i++) {
|
|
232
|
+
const char = columnBody[i];
|
|
233
|
+
if (char === "(") depth++;
|
|
234
|
+
if (char === ")") depth--;
|
|
235
|
+
|
|
236
|
+
if (char === "," && depth === 0) {
|
|
237
|
+
this.processColumnLine(currentChunk, columns);
|
|
238
|
+
currentChunk = "";
|
|
239
|
+
} else {
|
|
240
|
+
currentChunk += char;
|
|
241
|
+
}
|
|
242
|
+
}
|
|
243
|
+
if (currentChunk.trim()) this.processColumnLine(currentChunk, columns);
|
|
244
|
+
|
|
245
|
+
schemaMap[table.table_name] = columns;
|
|
246
|
+
});
|
|
247
|
+
|
|
248
|
+
return schemaMap;
|
|
249
|
+
}
|
|
250
|
+
|
|
251
|
+
private static processColumnLine(line: string, columnMap: Record<string, any>) {
|
|
252
|
+
const parts = line.trim().match(/^("?[\w]+"?)\s+(.*)$/);
|
|
253
|
+
if (!parts) return;
|
|
254
|
+
|
|
255
|
+
const name = parts[1]?.replace(/"/g, "");
|
|
256
|
+
const definition = parts[2];
|
|
257
|
+
const upDef = definition?.toUpperCase();
|
|
258
|
+
|
|
259
|
+
let type = definition?.split(" ")[0];
|
|
260
|
+
if (upDef?.startsWith("STRUCT")) {
|
|
261
|
+
type = definition; // Simplification: capture full struct definition
|
|
262
|
+
}
|
|
263
|
+
|
|
264
|
+
if (name !== undefined) {
|
|
265
|
+
columnMap[name] = {
|
|
266
|
+
type: type,
|
|
267
|
+
isNotNull: upDef?.includes("NOT NULL") ?? false,
|
|
268
|
+
isPrimaryKey: upDef?.includes("PRIMARY KEY") ?? false,
|
|
269
|
+
};
|
|
270
|
+
}
|
|
271
|
+
}
|
|
272
|
+
|
|
273
|
+
/**
|
|
274
|
+
* Returns a safe default value for a given SQL type.
|
|
275
|
+
* Required when adding NOT NULL columns to existing tables.
|
|
276
|
+
*/
|
|
277
|
+
private static getDefaultValueForType(type: string): string {
|
|
278
|
+
const t = type.toUpperCase();
|
|
279
|
+
if (t.includes("INT") || t.includes("FLOAT") || t.includes("DECIMAL") || t.includes("DOUBLE")) return "0";
|
|
280
|
+
if (t.includes("BOOL")) return "FALSE";
|
|
281
|
+
if (t.includes("STRUCT") || t.includes("MAP") || t.includes("LIST")) return "NULL"; // Complex types usually default to NULL
|
|
282
|
+
return "''"; // VARCHAR, TEXT, etc.
|
|
283
|
+
}
|
|
284
|
+
}
|
package/src/fluent.ts
ADDED
|
@@ -0,0 +1,55 @@
|
|
|
1
|
+
export type SetFlag<State, Key extends keyof State> = {
|
|
2
|
+
[P in keyof State]: P extends Key ? true : State[P];
|
|
3
|
+
};
|
|
4
|
+
type MapToFalse<T> = {
|
|
5
|
+
[K in keyof T]: false;
|
|
6
|
+
};
|
|
7
|
+
type FluentStep<
|
|
8
|
+
Schema,
|
|
9
|
+
State extends { [K in keyof Schema]: boolean | unknown },
|
|
10
|
+
Result
|
|
11
|
+
> = {
|
|
12
|
+
[K in keyof Schema as State[K] extends true
|
|
13
|
+
? never
|
|
14
|
+
: K]: Schema[K] extends boolean
|
|
15
|
+
? () => FluentStep<Schema, SetFlag<State, K>, Result>
|
|
16
|
+
: Schema[K] extends any[]
|
|
17
|
+
? (...args: Schema[K]) => FluentStep<Schema, SetFlag<State, K>, Result>
|
|
18
|
+
: (arg: Schema[K]) => FluentStep<Schema, SetFlag<State, K>, Result>;
|
|
19
|
+
} & PromiseLike<Result>;
|
|
20
|
+
|
|
21
|
+
export function createFluentBuilder<Schema, R = Schema>(
|
|
22
|
+
onBuild?: (data: Schema) => R | Promise<R>
|
|
23
|
+
): FluentStep<Schema, MapToFalse<Schema>, R> {
|
|
24
|
+
const data: any = {};
|
|
25
|
+
|
|
26
|
+
const handler: ProxyHandler<any> = {
|
|
27
|
+
get(target, prop) {
|
|
28
|
+
if (prop === "then") {
|
|
29
|
+
return (resolve: any, reject: any) => {
|
|
30
|
+
const result = onBuild ? onBuild(target) : target;
|
|
31
|
+
Promise.resolve(result).then(resolve, reject);
|
|
32
|
+
};
|
|
33
|
+
}
|
|
34
|
+
|
|
35
|
+
return (...args: any[]) => {
|
|
36
|
+
// 1. No Args -> True (Flag)
|
|
37
|
+
if (args.length === 0) {
|
|
38
|
+
target[prop as string] = true;
|
|
39
|
+
}
|
|
40
|
+
// 2. One Arg -> Store value
|
|
41
|
+
else if (args.length === 1) {
|
|
42
|
+
target[prop as string] = args[0];
|
|
43
|
+
}
|
|
44
|
+
// 3. Multiple Args -> Store as Array/Tuple
|
|
45
|
+
else {
|
|
46
|
+
target[prop as string] = args;
|
|
47
|
+
}
|
|
48
|
+
|
|
49
|
+
return new Proxy(target, handler);
|
|
50
|
+
};
|
|
51
|
+
},
|
|
52
|
+
};
|
|
53
|
+
|
|
54
|
+
return new Proxy(data, handler);
|
|
55
|
+
}
|
package/src/index.ts
ADDED
|
@@ -0,0 +1,14 @@
|
|
|
1
|
+
export * from "./database";
|
|
2
|
+
export * from "./schema";
|
|
3
|
+
export {
|
|
4
|
+
eq, ne, gt, gte, lt, lte,
|
|
5
|
+
inOp, notIn,
|
|
6
|
+
between, notBetween,
|
|
7
|
+
is, isNot,
|
|
8
|
+
like, notLike,
|
|
9
|
+
startsWith, endsWith,
|
|
10
|
+
contains,
|
|
11
|
+
regexp, notRegexp,
|
|
12
|
+
cond,
|
|
13
|
+
type ConditionBuilder
|
|
14
|
+
} from "./table";
|