pecunia-root 0.0.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/adapters/drizzle/index.d.mts +47 -0
- package/dist/adapters/drizzle/index.mjs +218 -0
- package/dist/adapters/drizzle/index.mjs.map +1 -0
- package/dist/adapters/get-adapter.d.mts +7 -0
- package/dist/adapters/get-adapter.mjs +31 -0
- package/dist/adapters/get-adapter.mjs.map +1 -0
- package/dist/adapters/internal/index.mjs +11 -0
- package/dist/adapters/internal/index.mjs.map +1 -0
- package/dist/adapters/kysely/bun-sqlite-dialect.mjs +156 -0
- package/dist/adapters/kysely/bun-sqlite-dialect.mjs.map +1 -0
- package/dist/adapters/kysely/dialect.mjs +83 -0
- package/dist/adapters/kysely/dialect.mjs.map +1 -0
- package/dist/adapters/kysely/index.d.mts +34 -0
- package/dist/adapters/kysely/index.mjs +183 -0
- package/dist/adapters/kysely/index.mjs.map +1 -0
- package/dist/adapters/kysely/node-sqlite-dialect.mjs +156 -0
- package/dist/adapters/kysely/node-sqlite-dialect.mjs.map +1 -0
- package/dist/adapters/mongodb/index.d.mts +35 -0
- package/dist/adapters/mongodb/index.mjs +313 -0
- package/dist/adapters/mongodb/index.mjs.map +1 -0
- package/dist/adapters/prisma/index.d.mts +34 -0
- package/dist/adapters/prisma/index.mjs +213 -0
- package/dist/adapters/prisma/index.mjs.map +1 -0
- package/dist/api/index.d.mts +23 -0
- package/dist/api/index.mjs +126 -0
- package/dist/api/index.mjs.map +1 -0
- package/dist/context/index.mjs +77 -0
- package/dist/context/index.mjs.map +1 -0
- package/dist/db/index.d.mts +3 -0
- package/dist/db/index.mjs +4 -0
- package/dist/db/migrations/index.d.mts +21 -0
- package/dist/db/migrations/index.mjs +327 -0
- package/dist/db/migrations/index.mjs.map +1 -0
- package/dist/db/schema/get-schema.d.mts +10 -0
- package/dist/db/schema/get-schema.mjs +39 -0
- package/dist/db/schema/get-schema.mjs.map +1 -0
- package/dist/index.d.mts +9 -0
- package/dist/index.mjs +7 -0
- package/dist/payment/base.mjs +38 -0
- package/dist/payment/base.mjs.map +1 -0
- package/dist/payment/index.d.mts +21 -0
- package/dist/payment/index.mjs +23 -0
- package/dist/payment/index.mjs.map +1 -0
- package/dist/types/payment.d.mts +11 -0
- package/dist/types/payment.mjs +1 -0
- package/dist/utils/is-promise.mjs +8 -0
- package/dist/utils/is-promise.mjs.map +1 -0
- package/dist/utils/url.mjs +77 -0
- package/dist/utils/url.mjs.map +1 -0
- package/package.json +183 -0
|
@@ -0,0 +1,47 @@
|
|
|
1
|
+
import { DBAdapter, DBAdapterDebugLogOption, PecuniaOptions } from "pecunia-core";
|
|
2
|
+
|
|
3
|
+
//#region src/adapters/drizzle/index.d.ts
|
|
4
|
+
interface DB {
|
|
5
|
+
[key: string]: any;
|
|
6
|
+
}
|
|
7
|
+
interface DrizzleAdapterConfig {
|
|
8
|
+
/**
|
|
9
|
+
* The schema object that defines the tables and fields
|
|
10
|
+
*/
|
|
11
|
+
schema?: Record<string, any> | undefined;
|
|
12
|
+
/**
|
|
13
|
+
* The database provider
|
|
14
|
+
*/
|
|
15
|
+
provider: "pg" | "mysql" | "sqlite";
|
|
16
|
+
/**
|
|
17
|
+
* If the table names in the schema are plural
|
|
18
|
+
* set this to true. For example, if the schema
|
|
19
|
+
* has an object with a key "users" instead of "user"
|
|
20
|
+
*/
|
|
21
|
+
usePlural?: boolean | undefined;
|
|
22
|
+
/**
|
|
23
|
+
* Enable debug logs for the adapter
|
|
24
|
+
*
|
|
25
|
+
* @default false
|
|
26
|
+
*/
|
|
27
|
+
debugLogs?: DBAdapterDebugLogOption | undefined;
|
|
28
|
+
/**
|
|
29
|
+
* By default snake case is used for table and field names
|
|
30
|
+
* when the CLI is used to generate the schema. If you want
|
|
31
|
+
* to use camel case, set this to true.
|
|
32
|
+
* @default false
|
|
33
|
+
*/
|
|
34
|
+
camelCase?: boolean | undefined;
|
|
35
|
+
/**
|
|
36
|
+
* Whether to execute multiple operations in a transaction.
|
|
37
|
+
*
|
|
38
|
+
* If the database doesn't support transactions,
|
|
39
|
+
* set this to `false` and operations will be executed sequentially.
|
|
40
|
+
* @default false
|
|
41
|
+
*/
|
|
42
|
+
transaction?: boolean | undefined;
|
|
43
|
+
}
|
|
44
|
+
declare const drizzleAdapter: (db: DB, config: DrizzleAdapterConfig) => (options: PecuniaOptions) => DBAdapter<PecuniaOptions>;
|
|
45
|
+
//#endregion
|
|
46
|
+
export { DB, DrizzleAdapterConfig, drizzleAdapter };
|
|
47
|
+
//# sourceMappingURL=index.d.mts.map
|
|
@@ -0,0 +1,218 @@
|
|
|
1
|
+
import { PecuniaError, createAdapterFactory, logger } from "pecunia-core";
|
|
2
|
+
import { and, asc, count, desc, eq, gt, gte, inArray, like, lt, lte, ne, notInArray, or, sql } from "drizzle-orm";
|
|
3
|
+
|
|
4
|
+
//#region src/adapters/drizzle/index.ts
|
|
5
|
+
const drizzleAdapter = (db, config) => {
|
|
6
|
+
let lazyOptions = null;
|
|
7
|
+
const createCustomAdapter = (db$1) => ({ getFieldName, options }) => {
|
|
8
|
+
function getSchema(model) {
|
|
9
|
+
const schema = config.schema || db$1._.fullSchema;
|
|
10
|
+
if (!schema) throw new PecuniaError("Drizzle adapter failed to initialize. Schema not found. Please provide a schema object in the adapter options object.");
|
|
11
|
+
const schemaModel = schema[model];
|
|
12
|
+
if (!schemaModel) throw new PecuniaError(`[# Drizzle Adapter]: The model "${model}" was not found in the schema object. Please pass the schema directly to the adapter options.`);
|
|
13
|
+
return schemaModel;
|
|
14
|
+
}
|
|
15
|
+
const withReturning = async (model, builder, data, where) => {
|
|
16
|
+
if (config.provider !== "mysql") return (await builder.returning())[0];
|
|
17
|
+
await builder.execute();
|
|
18
|
+
const schemaModel = getSchema(model);
|
|
19
|
+
const builderVal = builder.config?.values;
|
|
20
|
+
if (where?.length) {
|
|
21
|
+
const clause = convertWhereClause(where.map((w) => {
|
|
22
|
+
if (data[w.field] !== void 0) return {
|
|
23
|
+
...w,
|
|
24
|
+
value: data[w.field]
|
|
25
|
+
};
|
|
26
|
+
return w;
|
|
27
|
+
}), model);
|
|
28
|
+
return (await db$1.select().from(schemaModel).where(...clause))[0];
|
|
29
|
+
} else if (builderVal && builderVal[0]?.id?.value) {
|
|
30
|
+
let tId = builderVal[0]?.id?.value;
|
|
31
|
+
if (!tId) tId = (await db$1.select({ id: sql`LAST_INSERT_ID()` }).from(schemaModel).orderBy(desc(schemaModel.id)).limit(1))[0].id;
|
|
32
|
+
return (await db$1.select().from(schemaModel).where(eq(schemaModel.id, tId)).limit(1).execute())[0];
|
|
33
|
+
} else if (data.id) return (await db$1.select().from(schemaModel).where(eq(schemaModel.id, data.id)).limit(1).execute())[0];
|
|
34
|
+
else {
|
|
35
|
+
if (!("id" in schemaModel)) throw new PecuniaError(`The model "${model}" does not have an "id" field. Please use the "id" field as your primary key.`);
|
|
36
|
+
return (await db$1.select().from(schemaModel).orderBy(desc(schemaModel.id)).limit(1).execute())[0];
|
|
37
|
+
}
|
|
38
|
+
};
|
|
39
|
+
function convertWhereClause(where, model) {
|
|
40
|
+
const schemaModel = getSchema(model);
|
|
41
|
+
if (!where) return [];
|
|
42
|
+
if (where.length === 1) {
|
|
43
|
+
const w = where[0];
|
|
44
|
+
if (!w) return [];
|
|
45
|
+
const field = getFieldName({
|
|
46
|
+
model,
|
|
47
|
+
field: w.field
|
|
48
|
+
});
|
|
49
|
+
if (!schemaModel[field]) throw new PecuniaError(`The field "${w.field}" does not exist in the schema for the model "${model}". Please update your schema.`);
|
|
50
|
+
if (w.operator === "in") {
|
|
51
|
+
if (!Array.isArray(w.value)) throw new PecuniaError(`The value for the field "${w.field}" must be an array when using the "in" operator.`);
|
|
52
|
+
return [inArray(schemaModel[field], w.value)];
|
|
53
|
+
}
|
|
54
|
+
if (w.operator === "not_in") {
|
|
55
|
+
if (!Array.isArray(w.value)) throw new PecuniaError(`The value for the field "${w.field}" must be an array when using the "not_in" operator.`);
|
|
56
|
+
return [notInArray(schemaModel[field], w.value)];
|
|
57
|
+
}
|
|
58
|
+
if (w.operator === "contains") return [like(schemaModel[field], `%${w.value}%`)];
|
|
59
|
+
if (w.operator === "starts_with") return [like(schemaModel[field], `${w.value}%`)];
|
|
60
|
+
if (w.operator === "ends_with") return [like(schemaModel[field], `%${w.value}`)];
|
|
61
|
+
if (w.operator === "lt") return [lt(schemaModel[field], w.value)];
|
|
62
|
+
if (w.operator === "lte") return [lte(schemaModel[field], w.value)];
|
|
63
|
+
if (w.operator === "ne") return [ne(schemaModel[field], w.value)];
|
|
64
|
+
if (w.operator === "gt") return [gt(schemaModel[field], w.value)];
|
|
65
|
+
if (w.operator === "gte") return [gte(schemaModel[field], w.value)];
|
|
66
|
+
return [eq(schemaModel[field], w.value)];
|
|
67
|
+
}
|
|
68
|
+
const andGroup = where.filter((w) => w.connector === "AND" || !w.connector);
|
|
69
|
+
const orGroup = where.filter((w) => w.connector === "OR");
|
|
70
|
+
const andClause = and(...andGroup.map((w) => {
|
|
71
|
+
const field = getFieldName({
|
|
72
|
+
model,
|
|
73
|
+
field: w.field
|
|
74
|
+
});
|
|
75
|
+
if (w.operator === "in") {
|
|
76
|
+
if (!Array.isArray(w.value)) throw new PecuniaError(`The value for the field "${w.field}" must be an array when using the "in" operator.`);
|
|
77
|
+
return inArray(schemaModel[field], w.value);
|
|
78
|
+
}
|
|
79
|
+
if (w.operator === "not_in") {
|
|
80
|
+
if (!Array.isArray(w.value)) throw new PecuniaError(`The value for the field "${w.field}" must be an array when using the "not_in" operator.`);
|
|
81
|
+
return notInArray(schemaModel[field], w.value);
|
|
82
|
+
}
|
|
83
|
+
if (w.operator === "contains") return like(schemaModel[field], `%${w.value}%`);
|
|
84
|
+
if (w.operator === "starts_with") return like(schemaModel[field], `${w.value}%`);
|
|
85
|
+
if (w.operator === "ends_with") return like(schemaModel[field], `%${w.value}`);
|
|
86
|
+
if (w.operator === "lt") return lt(schemaModel[field], w.value);
|
|
87
|
+
if (w.operator === "lte") return lte(schemaModel[field], w.value);
|
|
88
|
+
if (w.operator === "gt") return gt(schemaModel[field], w.value);
|
|
89
|
+
if (w.operator === "gte") return gte(schemaModel[field], w.value);
|
|
90
|
+
if (w.operator === "ne") return ne(schemaModel[field], w.value);
|
|
91
|
+
return eq(schemaModel[field], w.value);
|
|
92
|
+
}));
|
|
93
|
+
const orClause = or(...orGroup.map((w) => {
|
|
94
|
+
const field = getFieldName({
|
|
95
|
+
model,
|
|
96
|
+
field: w.field
|
|
97
|
+
});
|
|
98
|
+
if (w.operator === "in") {
|
|
99
|
+
if (!Array.isArray(w.value)) throw new PecuniaError(`The value for the field "${w.field}" must be an array when using the "in" operator.`);
|
|
100
|
+
return inArray(schemaModel[field], w.value);
|
|
101
|
+
}
|
|
102
|
+
if (w.operator === "not_in") {
|
|
103
|
+
if (!Array.isArray(w.value)) throw new PecuniaError(`The value for the field "${w.field}" must be an array when using the "not_in" operator.`);
|
|
104
|
+
return notInArray(schemaModel[field], w.value);
|
|
105
|
+
}
|
|
106
|
+
if (w.operator === "contains") return like(schemaModel[field], `%${w.value}%`);
|
|
107
|
+
if (w.operator === "starts_with") return like(schemaModel[field], `${w.value}%`);
|
|
108
|
+
if (w.operator === "ends_with") return like(schemaModel[field], `%${w.value}`);
|
|
109
|
+
if (w.operator === "lt") return lt(schemaModel[field], w.value);
|
|
110
|
+
if (w.operator === "lte") return lte(schemaModel[field], w.value);
|
|
111
|
+
if (w.operator === "gt") return gt(schemaModel[field], w.value);
|
|
112
|
+
if (w.operator === "gte") return gte(schemaModel[field], w.value);
|
|
113
|
+
if (w.operator === "ne") return ne(schemaModel[field], w.value);
|
|
114
|
+
return eq(schemaModel[field], w.value);
|
|
115
|
+
}));
|
|
116
|
+
const clause = [];
|
|
117
|
+
if (andGroup.length) clause.push(andClause);
|
|
118
|
+
if (orGroup.length) clause.push(orClause);
|
|
119
|
+
return clause;
|
|
120
|
+
}
|
|
121
|
+
function checkMissingFields(schema, model, values) {
|
|
122
|
+
if (!schema) throw new PecuniaError("Drizzle adapter failed to initialize. Drizzle Schema not found. Please provide a schema object in the adapter options object.");
|
|
123
|
+
for (const key in values) if (!schema[key]) throw new PecuniaError(`The field "${key}" does not exist in the "${model}" Drizzle schema. Please update your drizzle schema or re-generate using "npx @pecunia/cli@latest generate".`);
|
|
124
|
+
}
|
|
125
|
+
return {
|
|
126
|
+
async create({ model, data: values }) {
|
|
127
|
+
const schemaModel = getSchema(model);
|
|
128
|
+
checkMissingFields(schemaModel, model, values);
|
|
129
|
+
return await withReturning(model, db$1.insert(schemaModel).values(values), values);
|
|
130
|
+
},
|
|
131
|
+
async findOne({ model, where }) {
|
|
132
|
+
const schemaModel = getSchema(model);
|
|
133
|
+
const clause = convertWhereClause(where, model);
|
|
134
|
+
const res = await db$1.select().from(schemaModel).where(...clause);
|
|
135
|
+
if (!res.length) return null;
|
|
136
|
+
return res[0];
|
|
137
|
+
},
|
|
138
|
+
async findMany({ model, where, sortBy, limit, offset }) {
|
|
139
|
+
const schemaModel = getSchema(model);
|
|
140
|
+
const clause = where ? convertWhereClause(where, model) : [];
|
|
141
|
+
const sortFn = sortBy?.direction === "desc" ? desc : asc;
|
|
142
|
+
let builder = db$1.select().from(schemaModel);
|
|
143
|
+
const effectiveLimit = limit;
|
|
144
|
+
const effectiveOffset = offset;
|
|
145
|
+
if (typeof effectiveLimit !== "undefined") builder = builder.limit(effectiveLimit);
|
|
146
|
+
if (typeof effectiveOffset !== "undefined") builder = builder.offset(effectiveOffset);
|
|
147
|
+
if (sortBy?.field) builder = builder.orderBy(sortFn(schemaModel[getFieldName({
|
|
148
|
+
model,
|
|
149
|
+
field: sortBy?.field
|
|
150
|
+
})]));
|
|
151
|
+
return await builder.where(...clause);
|
|
152
|
+
},
|
|
153
|
+
async count({ model, where }) {
|
|
154
|
+
const schemaModel = getSchema(model);
|
|
155
|
+
const clause = where ? convertWhereClause(where, model) : [];
|
|
156
|
+
return (await db$1.select({ count: count() }).from(schemaModel).where(...clause))[0].count;
|
|
157
|
+
},
|
|
158
|
+
async update({ model, where, update: values }) {
|
|
159
|
+
const schemaModel = getSchema(model);
|
|
160
|
+
const clause = convertWhereClause(where, model);
|
|
161
|
+
return await withReturning(model, db$1.update(schemaModel).set(values).where(...clause), values, where);
|
|
162
|
+
},
|
|
163
|
+
async updateMany({ model, where, update: values }) {
|
|
164
|
+
const schemaModel = getSchema(model);
|
|
165
|
+
const clause = convertWhereClause(where, model);
|
|
166
|
+
return await db$1.update(schemaModel).set(values).where(...clause);
|
|
167
|
+
},
|
|
168
|
+
async delete({ model, where }) {
|
|
169
|
+
const schemaModel = getSchema(model);
|
|
170
|
+
const clause = convertWhereClause(where, model);
|
|
171
|
+
return await db$1.delete(schemaModel).where(...clause);
|
|
172
|
+
},
|
|
173
|
+
async deleteMany({ model, where }) {
|
|
174
|
+
const schemaModel = getSchema(model);
|
|
175
|
+
const clause = convertWhereClause(where, model);
|
|
176
|
+
const res = await db$1.delete(schemaModel).where(...clause);
|
|
177
|
+
let count$1 = 0;
|
|
178
|
+
if (res && "rowCount" in res) count$1 = res.rowCount;
|
|
179
|
+
else if (Array.isArray(res)) count$1 = res.length;
|
|
180
|
+
else if (res && ("affectedRows" in res || "rowsAffected" in res || "changes" in res)) count$1 = res.affectedRows ?? res.rowsAffected ?? res.changes;
|
|
181
|
+
if (typeof count$1 !== "number") logger.error("[Drizzle Adapter] The result of the deleteMany operation is not a number. This is likely a bug in the adapter.", {
|
|
182
|
+
res,
|
|
183
|
+
model,
|
|
184
|
+
where
|
|
185
|
+
});
|
|
186
|
+
return count$1;
|
|
187
|
+
},
|
|
188
|
+
options: config
|
|
189
|
+
};
|
|
190
|
+
};
|
|
191
|
+
let adapterOptions = null;
|
|
192
|
+
adapterOptions = {
|
|
193
|
+
config: {
|
|
194
|
+
adapterId: "drizzle",
|
|
195
|
+
adapterName: "Drizzle Adapter",
|
|
196
|
+
usePlural: config.usePlural ?? false,
|
|
197
|
+
supportsUUIDs: config.provider === "pg" ? true : false,
|
|
198
|
+
supportsJSON: config.provider === "pg" ? true : false,
|
|
199
|
+
supportsArrays: config.provider === "pg" ? true : false,
|
|
200
|
+
transaction: config.transaction ?? false ? (cb) => db.transaction((tx) => {
|
|
201
|
+
return cb(createAdapterFactory({
|
|
202
|
+
config: adapterOptions.config,
|
|
203
|
+
adapter: createCustomAdapter(tx)
|
|
204
|
+
})(lazyOptions));
|
|
205
|
+
}) : false
|
|
206
|
+
},
|
|
207
|
+
adapter: createCustomAdapter(db)
|
|
208
|
+
};
|
|
209
|
+
const adapter = createAdapterFactory(adapterOptions);
|
|
210
|
+
return (options) => {
|
|
211
|
+
lazyOptions = options;
|
|
212
|
+
return adapter(options);
|
|
213
|
+
};
|
|
214
|
+
};
|
|
215
|
+
|
|
216
|
+
//#endregion
|
|
217
|
+
export { drizzleAdapter };
|
|
218
|
+
//# sourceMappingURL=index.mjs.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"index.mjs","names":["lazyOptions: PecuniaOptions | null","db","clause: SQL<unknown>[]","count","adapterOptions: AdapterFactoryOptions | null"],"sources":["../../../src/adapters/drizzle/index.ts"],"sourcesContent":["import type { PecuniaOptions } from \"pecunia-core\"\nimport type {\n\tAdapterFactoryCreator,\n\tAdapterFactoryOptions,\n\tDBAdapter,\n\tDBAdapterDebugLogOption,\n\tWhere,\n} from \"pecunia-core\";\nimport { createAdapterFactory } from \"pecunia-core\"\nimport { logger } from \"pecunia-core\";\nimport { PecuniaError } from \"pecunia-core\";\nimport type { SQL } from \"drizzle-orm\";\nimport {\n\tand,\n\tasc,\n\tcount,\n\tdesc,\n\teq,\n\tgt,\n\tgte,\n\tinArray,\n\tlike,\n\tlt,\n\tlte,\n\tne,\n\tnotInArray,\n\tor,\n\tsql,\n} from \"drizzle-orm\";\n\nexport interface DB {\n\t[key: string]: any;\n}\n\nexport interface DrizzleAdapterConfig {\n\t/**\n\t * The schema object that defines the tables and fields\n\t */\n\tschema?: Record<string, any> | undefined;\n\t/**\n\t * The database provider\n\t */\n\tprovider: \"pg\" | \"mysql\" | \"sqlite\";\n\t/**\n\t * If the table names in the schema are plural\n\t * set this to true. For example, if the schema\n\t * has an object with a key \"users\" instead of \"user\"\n\t */\n\tusePlural?: boolean | undefined;\n\t/**\n\t * Enable debug logs for the adapter\n\t *\n\t * @default false\n\t */\n\tdebugLogs?: DBAdapterDebugLogOption | undefined;\n\t/**\n\t * By default snake case is used for table and field names\n\t * when the CLI is used to generate the schema. If you want\n\t * to use camel case, set this to true.\n\t * @default false\n\t */\n\tcamelCase?: boolean | undefined;\n\t/**\n\t * Whether to execute multiple operations in a transaction.\n\t *\n\t * If the database doesn't support transactions,\n\t * set this to `false` and operations will be executed sequentially.\n\t * @default false\n\t */\n\ttransaction?: boolean | undefined;\n}\n\nexport const drizzleAdapter = (db: DB, config: DrizzleAdapterConfig) => {\n\tlet lazyOptions: PecuniaOptions | null = null;\n\tconst createCustomAdapter =\n\t\t(db: DB): AdapterFactoryCreator =>\n\t\t({ getFieldName, options }) => {\n\t\t\tfunction getSchema(model: string) {\n\t\t\t\tconst schema = config.schema || db._.fullSchema;\n\t\t\t\tif (!schema) {\n\t\t\t\t\tthrow new PecuniaError(\n\t\t\t\t\t\t\"Drizzle adapter failed to initialize. Schema not found. Please provide a schema object in the adapter options object.\",\n\t\t\t\t\t);\n\t\t\t\t}\n\t\t\t\tconst schemaModel = schema[model];\n\t\t\t\tif (!schemaModel) {\n\t\t\t\t\tthrow new PecuniaError(\n\t\t\t\t\t\t`[# Drizzle Adapter]: The model \"${model}\" was not found in the schema object. Please pass the schema directly to the adapter options.`,\n\t\t\t\t\t);\n\t\t\t\t}\n\t\t\t\treturn schemaModel;\n\t\t\t}\n\t\t\tconst withReturning = async (\n\t\t\t\tmodel: string,\n\t\t\t\tbuilder: any,\n\t\t\t\tdata: Record<string, any>,\n\t\t\t\twhere?: Where[] | undefined,\n\t\t\t) => {\n\t\t\t\tif (config.provider !== \"mysql\") {\n\t\t\t\t\tconst c = await builder.returning();\n\t\t\t\t\treturn c[0];\n\t\t\t\t}\n\t\t\t\tawait builder.execute();\n\t\t\t\tconst schemaModel = getSchema(model);\n\t\t\t\tconst builderVal = builder.config?.values;\n\t\t\t\tif (where?.length) {\n\t\t\t\t\t// If we're updating a field that's in the where clause, use the new value\n\t\t\t\t\tconst updatedWhere = where.map((w) => {\n\t\t\t\t\t\t// If this field was updated, use the new value for lookup\n\t\t\t\t\t\tif (data[w.field] !== undefined) {\n\t\t\t\t\t\t\treturn { ...w, value: data[w.field] };\n\t\t\t\t\t\t}\n\t\t\t\t\t\treturn w;\n\t\t\t\t\t});\n\n\t\t\t\t\tconst clause = convertWhereClause(updatedWhere, model);\n\t\t\t\t\tconst res = await db\n\t\t\t\t\t\t.select()\n\t\t\t\t\t\t.from(schemaModel)\n\t\t\t\t\t\t.where(...clause);\n\t\t\t\t\treturn res[0];\n\t\t\t\t} else if (builderVal && builderVal[0]?.id?.value) {\n\t\t\t\t\tlet tId = builderVal[0]?.id?.value;\n\t\t\t\t\tif (!tId) {\n\t\t\t\t\t\t//get last inserted id\n\t\t\t\t\t\tconst lastInsertId = await db\n\t\t\t\t\t\t\t.select({ id: sql`LAST_INSERT_ID()` })\n\t\t\t\t\t\t\t.from(schemaModel)\n\t\t\t\t\t\t\t.orderBy(desc(schemaModel.id))\n\t\t\t\t\t\t\t.limit(1);\n\t\t\t\t\t\ttId = lastInsertId[0].id;\n\t\t\t\t\t}\n\t\t\t\t\tconst res = await db\n\t\t\t\t\t\t.select()\n\t\t\t\t\t\t.from(schemaModel)\n\t\t\t\t\t\t.where(eq(schemaModel.id, tId))\n\t\t\t\t\t\t.limit(1)\n\t\t\t\t\t\t.execute();\n\t\t\t\t\treturn res[0];\n\t\t\t\t} else if (data.id) {\n\t\t\t\t\tconst res = await db\n\t\t\t\t\t\t.select()\n\t\t\t\t\t\t.from(schemaModel)\n\t\t\t\t\t\t.where(eq(schemaModel.id, data.id))\n\t\t\t\t\t\t.limit(1)\n\t\t\t\t\t\t.execute();\n\t\t\t\t\treturn res[0];\n\t\t\t\t} else {\n\t\t\t\t\t// If the user doesn't have `id` as a field, then this will fail.\n\t\t\t\t\t// We expect that they defined `id` in all of their models.\n\t\t\t\t\tif (!(\"id\" in schemaModel)) {\n\t\t\t\t\t\tthrow new PecuniaError(\n\t\t\t\t\t\t\t`The model \"${model}\" does not have an \"id\" field. Please use the \"id\" field as your primary key.`,\n\t\t\t\t\t\t);\n\t\t\t\t\t}\n\t\t\t\t\tconst res = await db\n\t\t\t\t\t\t.select()\n\t\t\t\t\t\t.from(schemaModel)\n\t\t\t\t\t\t.orderBy(desc(schemaModel.id))\n\t\t\t\t\t\t.limit(1)\n\t\t\t\t\t\t.execute();\n\t\t\t\t\treturn res[0];\n\t\t\t\t}\n\t\t\t};\n\t\t\tfunction convertWhereClause(where: Where[], model: string) {\n\t\t\t\tconst schemaModel = getSchema(model);\n\t\t\t\tif (!where) return [];\n\t\t\t\tif (where.length === 1) {\n\t\t\t\t\tconst w = where[0];\n\t\t\t\t\tif (!w) {\n\t\t\t\t\t\treturn [];\n\t\t\t\t\t}\n\t\t\t\t\tconst field = getFieldName({ model, field: w.field });\n\t\t\t\t\tif (!schemaModel[field]) {\n\t\t\t\t\t\tthrow new PecuniaError(\n\t\t\t\t\t\t\t`The field \"${w.field}\" does not exist in the schema for the model \"${model}\". Please update your schema.`,\n\t\t\t\t\t\t);\n\t\t\t\t\t}\n\t\t\t\t\tif (w.operator === \"in\") {\n\t\t\t\t\t\tif (!Array.isArray(w.value)) {\n\t\t\t\t\t\t\tthrow new PecuniaError(\n\t\t\t\t\t\t\t\t`The value for the field \"${w.field}\" must be an array when using the \"in\" operator.`,\n\t\t\t\t\t\t\t);\n\t\t\t\t\t\t}\n\t\t\t\t\t\treturn [inArray(schemaModel[field], w.value)];\n\t\t\t\t\t}\n\n\t\t\t\t\tif (w.operator === \"not_in\") {\n\t\t\t\t\t\tif (!Array.isArray(w.value)) {\n\t\t\t\t\t\t\tthrow new PecuniaError(\n\t\t\t\t\t\t\t\t`The value for the field \"${w.field}\" must be an array when using the \"not_in\" operator.`,\n\t\t\t\t\t\t\t);\n\t\t\t\t\t\t}\n\t\t\t\t\t\treturn [notInArray(schemaModel[field], w.value)];\n\t\t\t\t\t}\n\n\t\t\t\t\tif (w.operator === \"contains\") {\n\t\t\t\t\t\treturn [like(schemaModel[field], `%${w.value}%`)];\n\t\t\t\t\t}\n\n\t\t\t\t\tif (w.operator === \"starts_with\") {\n\t\t\t\t\t\treturn [like(schemaModel[field], `${w.value}%`)];\n\t\t\t\t\t}\n\n\t\t\t\t\tif (w.operator === \"ends_with\") {\n\t\t\t\t\t\treturn [like(schemaModel[field], `%${w.value}`)];\n\t\t\t\t\t}\n\n\t\t\t\t\tif (w.operator === \"lt\") {\n\t\t\t\t\t\treturn [lt(schemaModel[field], w.value)];\n\t\t\t\t\t}\n\n\t\t\t\t\tif (w.operator === \"lte\") {\n\t\t\t\t\t\treturn [lte(schemaModel[field], w.value)];\n\t\t\t\t\t}\n\n\t\t\t\t\tif (w.operator === \"ne\") {\n\t\t\t\t\t\treturn [ne(schemaModel[field], w.value)];\n\t\t\t\t\t}\n\n\t\t\t\t\tif (w.operator === \"gt\") {\n\t\t\t\t\t\treturn [gt(schemaModel[field], w.value)];\n\t\t\t\t\t}\n\n\t\t\t\t\tif (w.operator === \"gte\") {\n\t\t\t\t\t\treturn [gte(schemaModel[field], w.value)];\n\t\t\t\t\t}\n\n\t\t\t\t\treturn [eq(schemaModel[field], w.value)];\n\t\t\t\t}\n\t\t\t\tconst andGroup = where.filter(\n\t\t\t\t\t(w) => w.connector === \"AND\" || !w.connector,\n\t\t\t\t);\n\t\t\t\tconst orGroup = where.filter((w) => w.connector === \"OR\");\n\n\t\t\t\tconst andClause = and(\n\t\t\t\t\t...andGroup.map((w) => {\n\t\t\t\t\t\tconst field = getFieldName({ model, field: w.field });\n\t\t\t\t\t\tif (w.operator === \"in\") {\n\t\t\t\t\t\t\tif (!Array.isArray(w.value)) {\n\t\t\t\t\t\t\t\tthrow new PecuniaError(\n\t\t\t\t\t\t\t\t\t`The value for the field \"${w.field}\" must be an array when using the \"in\" operator.`,\n\t\t\t\t\t\t\t\t);\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\treturn inArray(schemaModel[field], w.value);\n\t\t\t\t\t\t}\n\t\t\t\t\t\tif (w.operator === \"not_in\") {\n\t\t\t\t\t\t\tif (!Array.isArray(w.value)) {\n\t\t\t\t\t\t\t\tthrow new PecuniaError(\n\t\t\t\t\t\t\t\t\t`The value for the field \"${w.field}\" must be an array when using the \"not_in\" operator.`,\n\t\t\t\t\t\t\t\t);\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\treturn notInArray(schemaModel[field], w.value);\n\t\t\t\t\t\t}\n\t\t\t\t\t\tif (w.operator === \"contains\") {\n\t\t\t\t\t\t\treturn like(schemaModel[field], `%${w.value}%`);\n\t\t\t\t\t\t}\n\t\t\t\t\t\tif (w.operator === \"starts_with\") {\n\t\t\t\t\t\t\treturn like(schemaModel[field], `${w.value}%`);\n\t\t\t\t\t\t}\n\t\t\t\t\t\tif (w.operator === \"ends_with\") {\n\t\t\t\t\t\t\treturn like(schemaModel[field], `%${w.value}`);\n\t\t\t\t\t\t}\n\t\t\t\t\t\tif (w.operator === \"lt\") {\n\t\t\t\t\t\t\treturn lt(schemaModel[field], w.value);\n\t\t\t\t\t\t}\n\t\t\t\t\t\tif (w.operator === \"lte\") {\n\t\t\t\t\t\t\treturn lte(schemaModel[field], w.value);\n\t\t\t\t\t\t}\n\t\t\t\t\t\tif (w.operator === \"gt\") {\n\t\t\t\t\t\t\treturn gt(schemaModel[field], w.value);\n\t\t\t\t\t\t}\n\t\t\t\t\t\tif (w.operator === \"gte\") {\n\t\t\t\t\t\t\treturn gte(schemaModel[field], w.value);\n\t\t\t\t\t\t}\n\t\t\t\t\t\tif (w.operator === \"ne\") {\n\t\t\t\t\t\t\treturn ne(schemaModel[field], w.value);\n\t\t\t\t\t\t}\n\t\t\t\t\t\treturn eq(schemaModel[field], w.value);\n\t\t\t\t\t}),\n\t\t\t\t);\n\t\t\t\tconst orClause = or(\n\t\t\t\t\t...orGroup.map((w) => {\n\t\t\t\t\t\tconst field = getFieldName({ model, field: w.field });\n\t\t\t\t\t\tif (w.operator === \"in\") {\n\t\t\t\t\t\t\tif (!Array.isArray(w.value)) {\n\t\t\t\t\t\t\t\tthrow new PecuniaError(\n\t\t\t\t\t\t\t\t\t`The value for the field \"${w.field}\" must be an array when using the \"in\" operator.`,\n\t\t\t\t\t\t\t\t);\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\treturn inArray(schemaModel[field], w.value);\n\t\t\t\t\t\t}\n\t\t\t\t\t\tif (w.operator === \"not_in\") {\n\t\t\t\t\t\t\tif (!Array.isArray(w.value)) {\n\t\t\t\t\t\t\t\tthrow new PecuniaError(\n\t\t\t\t\t\t\t\t\t`The value for the field \"${w.field}\" must be an array when using the \"not_in\" operator.`,\n\t\t\t\t\t\t\t\t);\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\treturn notInArray(schemaModel[field], w.value);\n\t\t\t\t\t\t}\n\t\t\t\t\t\tif (w.operator === \"contains\") {\n\t\t\t\t\t\t\treturn like(schemaModel[field], `%${w.value}%`);\n\t\t\t\t\t\t}\n\t\t\t\t\t\tif (w.operator === \"starts_with\") {\n\t\t\t\t\t\t\treturn like(schemaModel[field], `${w.value}%`);\n\t\t\t\t\t\t}\n\t\t\t\t\t\tif (w.operator === \"ends_with\") {\n\t\t\t\t\t\t\treturn like(schemaModel[field], `%${w.value}`);\n\t\t\t\t\t\t}\n\t\t\t\t\t\tif (w.operator === \"lt\") {\n\t\t\t\t\t\t\treturn lt(schemaModel[field], w.value);\n\t\t\t\t\t\t}\n\t\t\t\t\t\tif (w.operator === \"lte\") {\n\t\t\t\t\t\t\treturn lte(schemaModel[field], w.value);\n\t\t\t\t\t\t}\n\t\t\t\t\t\tif (w.operator === \"gt\") {\n\t\t\t\t\t\t\treturn gt(schemaModel[field], w.value);\n\t\t\t\t\t\t}\n\t\t\t\t\t\tif (w.operator === \"gte\") {\n\t\t\t\t\t\t\treturn gte(schemaModel[field], w.value);\n\t\t\t\t\t\t}\n\t\t\t\t\t\tif (w.operator === \"ne\") {\n\t\t\t\t\t\t\treturn ne(schemaModel[field], w.value);\n\t\t\t\t\t\t}\n\t\t\t\t\t\treturn eq(schemaModel[field], w.value);\n\t\t\t\t\t}),\n\t\t\t\t);\n\n\t\t\t\tconst clause: SQL<unknown>[] = [];\n\n\t\t\t\tif (andGroup.length) clause.push(andClause!);\n\t\t\t\tif (orGroup.length) clause.push(orClause!);\n\t\t\t\treturn clause;\n\t\t\t}\n\t\t\tfunction checkMissingFields(\n\t\t\t\tschema: Record<string, any>,\n\t\t\t\tmodel: string,\n\t\t\t\tvalues: Record<string, any>,\n\t\t\t) {\n\t\t\t\tif (!schema) {\n\t\t\t\t\tthrow new PecuniaError(\n\t\t\t\t\t\t\"Drizzle adapter failed to initialize. Drizzle Schema not found. Please provide a schema object in the adapter options object.\",\n\t\t\t\t\t);\n\t\t\t\t}\n\t\t\t\tfor (const key in values) {\n\t\t\t\t\tif (!schema[key]) {\n\t\t\t\t\t\tthrow new PecuniaError(\n\t\t\t\t\t\t\t`The field \"${key}\" does not exist in the \"${model}\" Drizzle schema. Please update your drizzle schema or re-generate using \"npx @pecunia/cli@latest generate\".`,\n\t\t\t\t\t\t);\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}\n\n\t\t\treturn {\n\t\t\t\tasync create({ model, data: values }) {\n\t\t\t\t\tconst schemaModel = getSchema(model);\n\t\t\t\t\tcheckMissingFields(schemaModel, model, values);\n\t\t\t\t\tconst builder = db.insert(schemaModel).values(values);\n\t\t\t\t\tconst returned = await withReturning(model, builder, values);\n\t\t\t\t\treturn returned;\n\t\t\t\t},\n\t\t\t\tasync findOne({ model, where }) {\n\t\t\t\t\tconst schemaModel = getSchema(model);\n\t\t\t\t\tconst clause = convertWhereClause(where, model);\n\n\t\t\t\t\tlet query = db\n\t\t\t\t\t\t.select()\n\t\t\t\t\t\t.from(schemaModel)\n\t\t\t\t\t\t.where(...clause);\n\n\t\t\t\t\tconst res = await query;\n\n\t\t\t\t\tif (!res.length) return null;\n\t\t\t\t\treturn res[0];\n\t\t\t\t},\n\t\t\t\tasync findMany({ model, where, sortBy, limit, offset }) {\n\t\t\t\t\tconst schemaModel = getSchema(model);\n\t\t\t\t\tconst clause = where ? convertWhereClause(where, model) : [];\n\t\t\t\t\tconst sortFn = sortBy?.direction === \"desc\" ? desc : asc;\n\n\t\t\t\t\tlet builder = db.select().from(schemaModel);\n\n\t\t\t\t\tconst effectiveLimit = limit;\n\t\t\t\t\tconst effectiveOffset = offset;\n\n\t\t\t\t\tif (typeof effectiveLimit !== \"undefined\") {\n\t\t\t\t\t\tbuilder = builder.limit(effectiveLimit);\n\t\t\t\t\t}\n\n\t\t\t\t\tif (typeof effectiveOffset !== \"undefined\") {\n\t\t\t\t\t\tbuilder = builder.offset(effectiveOffset);\n\t\t\t\t\t}\n\n\t\t\t\t\tif (sortBy?.field) {\n\t\t\t\t\t\tbuilder = builder.orderBy(\n\t\t\t\t\t\t\tsortFn(\n\t\t\t\t\t\t\t\tschemaModel[getFieldName({ model, field: sortBy?.field })],\n\t\t\t\t\t\t\t),\n\t\t\t\t\t\t);\n\t\t\t\t\t}\n\n\t\t\t\t\tconst res = await builder.where(...clause);\n\t\t\t\t\treturn res;\n\t\t\t\t},\n\t\t\t\tasync count({ model, where }) {\n\t\t\t\t\tconst schemaModel = getSchema(model);\n\t\t\t\t\tconst clause = where ? convertWhereClause(where, model) : [];\n\t\t\t\t\tconst res = await db\n\t\t\t\t\t\t.select({ count: count() })\n\t\t\t\t\t\t.from(schemaModel)\n\t\t\t\t\t\t.where(...clause);\n\t\t\t\t\treturn res[0].count;\n\t\t\t\t},\n\t\t\t\tasync update({ model, where, update: values }) {\n\t\t\t\t\tconst schemaModel = getSchema(model);\n\t\t\t\t\tconst clause = convertWhereClause(where, model);\n\t\t\t\t\tconst builder = db\n\t\t\t\t\t\t.update(schemaModel)\n\t\t\t\t\t\t.set(values)\n\t\t\t\t\t\t.where(...clause);\n\t\t\t\t\treturn await withReturning(model, builder, values as any, where);\n\t\t\t\t},\n\t\t\t\tasync updateMany({ model, where, update: values }) {\n\t\t\t\t\tconst schemaModel = getSchema(model);\n\t\t\t\t\tconst clause = convertWhereClause(where, model);\n\t\t\t\t\tconst builder = db\n\t\t\t\t\t\t.update(schemaModel)\n\t\t\t\t\t\t.set(values)\n\t\t\t\t\t\t.where(...clause);\n\t\t\t\t\treturn await builder;\n\t\t\t\t},\n\t\t\t\tasync delete({ model, where }) {\n\t\t\t\t\tconst schemaModel = getSchema(model);\n\t\t\t\t\tconst clause = convertWhereClause(where, model);\n\t\t\t\t\tconst builder = db.delete(schemaModel).where(...clause);\n\t\t\t\t\treturn await builder;\n\t\t\t\t},\n\t\t\t\tasync deleteMany({ model, where }) {\n\t\t\t\t\tconst schemaModel = getSchema(model);\n\t\t\t\t\tconst clause = convertWhereClause(where, model);\n\t\t\t\t\tconst builder = db.delete(schemaModel).where(...clause);\n\t\t\t\t\tconst res = await builder;\n\t\t\t\t\tlet count = 0;\n\t\t\t\t\tif (res && \"rowCount\" in res) count = res.rowCount;\n\t\t\t\t\telse if (Array.isArray(res)) count = res.length;\n\t\t\t\t\telse if (\n\t\t\t\t\t\tres &&\n\t\t\t\t\t\t(\"affectedRows\" in res || \"rowsAffected\" in res || \"changes\" in res)\n\t\t\t\t\t)\n\t\t\t\t\t\tcount = res.affectedRows ?? res.rowsAffected ?? res.changes;\n\t\t\t\t\tif (typeof count !== \"number\") {\n\t\t\t\t\t\tlogger.error(\n\t\t\t\t\t\t\t\"[Drizzle Adapter] The result of the deleteMany operation is not a number. This is likely a bug in the adapter.\",\n\t\t\t\t\t\t\t{ res, model, where },\n\t\t\t\t\t\t);\n\t\t\t\t\t}\n\t\t\t\t\treturn count;\n\t\t\t\t},\n\t\t\t\toptions: config,\n\t\t\t};\n\t\t};\n\tlet adapterOptions: AdapterFactoryOptions | null = null;\n\tadapterOptions = {\n\t\tconfig: {\n\t\t\tadapterId: \"drizzle\",\n\t\t\tadapterName: \"Drizzle Adapter\",\n\t\t\tusePlural: config.usePlural ?? false,\n\t\t\tsupportsUUIDs: config.provider === \"pg\" ? true : false,\n\t\t\tsupportsJSON:\n\t\t\t\tconfig.provider === \"pg\" // even though mysql also supports it, mysql requires to pass stringified json anyway.\n\t\t\t\t\t? true\n\t\t\t\t\t: false,\n\t\t\tsupportsArrays: config.provider === \"pg\" ? true : false,\n\t\t\ttransaction:\n\t\t\t\t(config.transaction ?? false)\n\t\t\t\t\t? (cb) =>\n\t\t\t\t\t\t\tdb.transaction((tx: DB) => {\n\t\t\t\t\t\t\t\tconst adapter = createAdapterFactory({\n\t\t\t\t\t\t\t\t\tconfig: adapterOptions!.config,\n\t\t\t\t\t\t\t\t\tadapter: createCustomAdapter(tx),\n\t\t\t\t\t\t\t\t})(lazyOptions!);\n\t\t\t\t\t\t\t\treturn cb(adapter);\n\t\t\t\t\t\t\t})\n\t\t\t\t\t: false,\n\t\t},\n\t\tadapter: createCustomAdapter(db),\n\t};\n\tconst adapter = createAdapterFactory(adapterOptions);\n\treturn (options: PecuniaOptions): DBAdapter<PecuniaOptions> => {\n\t\tlazyOptions = options;\n\t\treturn adapter(options);\n\t};\n};\n"],"mappings":";;;;AAwEA,MAAa,kBAAkB,IAAQ,WAAiC;CACvE,IAAIA,cAAqC;CACzC,MAAM,uBACJ,UACA,EAAE,cAAc,cAAc;EAC9B,SAAS,UAAU,OAAe;GACjC,MAAM,SAAS,OAAO,UAAUC,KAAG,EAAE;AACrC,OAAI,CAAC,OACJ,OAAM,IAAI,aACT,wHACA;GAEF,MAAM,cAAc,OAAO;AAC3B,OAAI,CAAC,YACJ,OAAM,IAAI,aACT,mCAAmC,MAAM,+FACzC;AAEF,UAAO;;EAER,MAAM,gBAAgB,OACrB,OACA,SACA,MACA,UACI;AACJ,OAAI,OAAO,aAAa,QAEvB,SADU,MAAM,QAAQ,WAAW,EAC1B;AAEV,SAAM,QAAQ,SAAS;GACvB,MAAM,cAAc,UAAU,MAAM;GACpC,MAAM,aAAa,QAAQ,QAAQ;AACnC,OAAI,OAAO,QAAQ;IAUlB,MAAM,SAAS,mBARM,MAAM,KAAK,MAAM;AAErC,SAAI,KAAK,EAAE,WAAW,OACrB,QAAO;MAAE,GAAG;MAAG,OAAO,KAAK,EAAE;MAAQ;AAEtC,YAAO;MACN,EAE8C,MAAM;AAKtD,YAJY,MAAMA,KAChB,QAAQ,CACR,KAAK,YAAY,CACjB,MAAM,GAAG,OAAO,EACP;cACD,cAAc,WAAW,IAAI,IAAI,OAAO;IAClD,IAAI,MAAM,WAAW,IAAI,IAAI;AAC7B,QAAI,CAAC,IAOJ,QALqB,MAAMA,KACzB,OAAO,EAAE,IAAI,GAAG,oBAAoB,CAAC,CACrC,KAAK,YAAY,CACjB,QAAQ,KAAK,YAAY,GAAG,CAAC,CAC7B,MAAM,EAAE,EACS,GAAG;AAQvB,YANY,MAAMA,KAChB,QAAQ,CACR,KAAK,YAAY,CACjB,MAAM,GAAG,YAAY,IAAI,IAAI,CAAC,CAC9B,MAAM,EAAE,CACR,SAAS,EACA;cACD,KAAK,GAOf,SANY,MAAMA,KAChB,QAAQ,CACR,KAAK,YAAY,CACjB,MAAM,GAAG,YAAY,IAAI,KAAK,GAAG,CAAC,CAClC,MAAM,EAAE,CACR,SAAS,EACA;QACL;AAGN,QAAI,EAAE,QAAQ,aACb,OAAM,IAAI,aACT,cAAc,MAAM,+EACpB;AAQF,YANY,MAAMA,KAChB,QAAQ,CACR,KAAK,YAAY,CACjB,QAAQ,KAAK,YAAY,GAAG,CAAC,CAC7B,MAAM,EAAE,CACR,SAAS,EACA;;;EAGb,SAAS,mBAAmB,OAAgB,OAAe;GAC1D,MAAM,cAAc,UAAU,MAAM;AACpC,OAAI,CAAC,MAAO,QAAO,EAAE;AACrB,OAAI,MAAM,WAAW,GAAG;IACvB,MAAM,IAAI,MAAM;AAChB,QAAI,CAAC,EACJ,QAAO,EAAE;IAEV,MAAM,QAAQ,aAAa;KAAE;KAAO,OAAO,EAAE;KAAO,CAAC;AACrD,QAAI,CAAC,YAAY,OAChB,OAAM,IAAI,aACT,cAAc,EAAE,MAAM,gDAAgD,MAAM,+BAC5E;AAEF,QAAI,EAAE,aAAa,MAAM;AACxB,SAAI,CAAC,MAAM,QAAQ,EAAE,MAAM,CAC1B,OAAM,IAAI,aACT,4BAA4B,EAAE,MAAM,kDACpC;AAEF,YAAO,CAAC,QAAQ,YAAY,QAAQ,EAAE,MAAM,CAAC;;AAG9C,QAAI,EAAE,aAAa,UAAU;AAC5B,SAAI,CAAC,MAAM,QAAQ,EAAE,MAAM,CAC1B,OAAM,IAAI,aACT,4BAA4B,EAAE,MAAM,sDACpC;AAEF,YAAO,CAAC,WAAW,YAAY,QAAQ,EAAE,MAAM,CAAC;;AAGjD,QAAI,EAAE,aAAa,WAClB,QAAO,CAAC,KAAK,YAAY,QAAQ,IAAI,EAAE,MAAM,GAAG,CAAC;AAGlD,QAAI,EAAE,aAAa,cAClB,QAAO,CAAC,KAAK,YAAY,QAAQ,GAAG,EAAE,MAAM,GAAG,CAAC;AAGjD,QAAI,EAAE,aAAa,YAClB,QAAO,CAAC,KAAK,YAAY,QAAQ,IAAI,EAAE,QAAQ,CAAC;AAGjD,QAAI,EAAE,aAAa,KAClB,QAAO,CAAC,GAAG,YAAY,QAAQ,EAAE,MAAM,CAAC;AAGzC,QAAI,EAAE,aAAa,MAClB,QAAO,CAAC,IAAI,YAAY,QAAQ,EAAE,MAAM,CAAC;AAG1C,QAAI,EAAE,aAAa,KAClB,QAAO,CAAC,GAAG,YAAY,QAAQ,EAAE,MAAM,CAAC;AAGzC,QAAI,EAAE,aAAa,KAClB,QAAO,CAAC,GAAG,YAAY,QAAQ,EAAE,MAAM,CAAC;AAGzC,QAAI,EAAE,aAAa,MAClB,QAAO,CAAC,IAAI,YAAY,QAAQ,EAAE,MAAM,CAAC;AAG1C,WAAO,CAAC,GAAG,YAAY,QAAQ,EAAE,MAAM,CAAC;;GAEzC,MAAM,WAAW,MAAM,QACrB,MAAM,EAAE,cAAc,SAAS,CAAC,EAAE,UACnC;GACD,MAAM,UAAU,MAAM,QAAQ,MAAM,EAAE,cAAc,KAAK;GAEzD,MAAM,YAAY,IACjB,GAAG,SAAS,KAAK,MAAM;IACtB,MAAM,QAAQ,aAAa;KAAE;KAAO,OAAO,EAAE;KAAO,CAAC;AACrD,QAAI,EAAE,aAAa,MAAM;AACxB,SAAI,CAAC,MAAM,QAAQ,EAAE,MAAM,CAC1B,OAAM,IAAI,aACT,4BAA4B,EAAE,MAAM,kDACpC;AAEF,YAAO,QAAQ,YAAY,QAAQ,EAAE,MAAM;;AAE5C,QAAI,EAAE,aAAa,UAAU;AAC5B,SAAI,CAAC,MAAM,QAAQ,EAAE,MAAM,CAC1B,OAAM,IAAI,aACT,4BAA4B,EAAE,MAAM,sDACpC;AAEF,YAAO,WAAW,YAAY,QAAQ,EAAE,MAAM;;AAE/C,QAAI,EAAE,aAAa,WAClB,QAAO,KAAK,YAAY,QAAQ,IAAI,EAAE,MAAM,GAAG;AAEhD,QAAI,EAAE,aAAa,cAClB,QAAO,KAAK,YAAY,QAAQ,GAAG,EAAE,MAAM,GAAG;AAE/C,QAAI,EAAE,aAAa,YAClB,QAAO,KAAK,YAAY,QAAQ,IAAI,EAAE,QAAQ;AAE/C,QAAI,EAAE,aAAa,KAClB,QAAO,GAAG,YAAY,QAAQ,EAAE,MAAM;AAEvC,QAAI,EAAE,aAAa,MAClB,QAAO,IAAI,YAAY,QAAQ,EAAE,MAAM;AAExC,QAAI,EAAE,aAAa,KAClB,QAAO,GAAG,YAAY,QAAQ,EAAE,MAAM;AAEvC,QAAI,EAAE,aAAa,MAClB,QAAO,IAAI,YAAY,QAAQ,EAAE,MAAM;AAExC,QAAI,EAAE,aAAa,KAClB,QAAO,GAAG,YAAY,QAAQ,EAAE,MAAM;AAEvC,WAAO,GAAG,YAAY,QAAQ,EAAE,MAAM;KACrC,CACF;GACD,MAAM,WAAW,GAChB,GAAG,QAAQ,KAAK,MAAM;IACrB,MAAM,QAAQ,aAAa;KAAE;KAAO,OAAO,EAAE;KAAO,CAAC;AACrD,QAAI,EAAE,aAAa,MAAM;AACxB,SAAI,CAAC,MAAM,QAAQ,EAAE,MAAM,CAC1B,OAAM,IAAI,aACT,4BAA4B,EAAE,MAAM,kDACpC;AAEF,YAAO,QAAQ,YAAY,QAAQ,EAAE,MAAM;;AAE5C,QAAI,EAAE,aAAa,UAAU;AAC5B,SAAI,CAAC,MAAM,QAAQ,EAAE,MAAM,CAC1B,OAAM,IAAI,aACT,4BAA4B,EAAE,MAAM,sDACpC;AAEF,YAAO,WAAW,YAAY,QAAQ,EAAE,MAAM;;AAE/C,QAAI,EAAE,aAAa,WAClB,QAAO,KAAK,YAAY,QAAQ,IAAI,EAAE,MAAM,GAAG;AAEhD,QAAI,EAAE,aAAa,cAClB,QAAO,KAAK,YAAY,QAAQ,GAAG,EAAE,MAAM,GAAG;AAE/C,QAAI,EAAE,aAAa,YAClB,QAAO,KAAK,YAAY,QAAQ,IAAI,EAAE,QAAQ;AAE/C,QAAI,EAAE,aAAa,KAClB,QAAO,GAAG,YAAY,QAAQ,EAAE,MAAM;AAEvC,QAAI,EAAE,aAAa,MAClB,QAAO,IAAI,YAAY,QAAQ,EAAE,MAAM;AAExC,QAAI,EAAE,aAAa,KAClB,QAAO,GAAG,YAAY,QAAQ,EAAE,MAAM;AAEvC,QAAI,EAAE,aAAa,MAClB,QAAO,IAAI,YAAY,QAAQ,EAAE,MAAM;AAExC,QAAI,EAAE,aAAa,KAClB,QAAO,GAAG,YAAY,QAAQ,EAAE,MAAM;AAEvC,WAAO,GAAG,YAAY,QAAQ,EAAE,MAAM;KACrC,CACF;GAED,MAAMC,SAAyB,EAAE;AAEjC,OAAI,SAAS,OAAQ,QAAO,KAAK,UAAW;AAC5C,OAAI,QAAQ,OAAQ,QAAO,KAAK,SAAU;AAC1C,UAAO;;EAER,SAAS,mBACR,QACA,OACA,QACC;AACD,OAAI,CAAC,OACJ,OAAM,IAAI,aACT,gIACA;AAEF,QAAK,MAAM,OAAO,OACjB,KAAI,CAAC,OAAO,KACX,OAAM,IAAI,aACT,cAAc,IAAI,2BAA2B,MAAM,8GACnD;;AAKJ,SAAO;GACN,MAAM,OAAO,EAAE,OAAO,MAAM,UAAU;IACrC,MAAM,cAAc,UAAU,MAAM;AACpC,uBAAmB,aAAa,OAAO,OAAO;AAG9C,WADiB,MAAM,cAAc,OADrBD,KAAG,OAAO,YAAY,CAAC,OAAO,OAAO,EACA,OAAO;;GAG7D,MAAM,QAAQ,EAAE,OAAO,SAAS;IAC/B,MAAM,cAAc,UAAU,MAAM;IACpC,MAAM,SAAS,mBAAmB,OAAO,MAAM;IAO/C,MAAM,MAAM,MALAA,KACV,QAAQ,CACR,KAAK,YAAY,CACjB,MAAM,GAAG,OAAO;AAIlB,QAAI,CAAC,IAAI,OAAQ,QAAO;AACxB,WAAO,IAAI;;GAEZ,MAAM,SAAS,EAAE,OAAO,OAAO,QAAQ,OAAO,UAAU;IACvD,MAAM,cAAc,UAAU,MAAM;IACpC,MAAM,SAAS,QAAQ,mBAAmB,OAAO,MAAM,GAAG,EAAE;IAC5D,MAAM,SAAS,QAAQ,cAAc,SAAS,OAAO;IAErD,IAAI,UAAUA,KAAG,QAAQ,CAAC,KAAK,YAAY;IAE3C,MAAM,iBAAiB;IACvB,MAAM,kBAAkB;AAExB,QAAI,OAAO,mBAAmB,YAC7B,WAAU,QAAQ,MAAM,eAAe;AAGxC,QAAI,OAAO,oBAAoB,YAC9B,WAAU,QAAQ,OAAO,gBAAgB;AAG1C,QAAI,QAAQ,MACX,WAAU,QAAQ,QACjB,OACC,YAAY,aAAa;KAAE;KAAO,OAAO,QAAQ;KAAO,CAAC,EACzD,CACD;AAIF,WADY,MAAM,QAAQ,MAAM,GAAG,OAAO;;GAG3C,MAAM,MAAM,EAAE,OAAO,SAAS;IAC7B,MAAM,cAAc,UAAU,MAAM;IACpC,MAAM,SAAS,QAAQ,mBAAmB,OAAO,MAAM,GAAG,EAAE;AAK5D,YAJY,MAAMA,KAChB,OAAO,EAAE,OAAO,OAAO,EAAE,CAAC,CAC1B,KAAK,YAAY,CACjB,MAAM,GAAG,OAAO,EACP,GAAG;;GAEf,MAAM,OAAO,EAAE,OAAO,OAAO,QAAQ,UAAU;IAC9C,MAAM,cAAc,UAAU,MAAM;IACpC,MAAM,SAAS,mBAAmB,OAAO,MAAM;AAK/C,WAAO,MAAM,cAAc,OAJXA,KACd,OAAO,YAAY,CACnB,IAAI,OAAO,CACX,MAAM,GAAG,OAAO,EACyB,QAAe,MAAM;;GAEjE,MAAM,WAAW,EAAE,OAAO,OAAO,QAAQ,UAAU;IAClD,MAAM,cAAc,UAAU,MAAM;IACpC,MAAM,SAAS,mBAAmB,OAAO,MAAM;AAK/C,WAAO,MAJSA,KACd,OAAO,YAAY,CACnB,IAAI,OAAO,CACX,MAAM,GAAG,OAAO;;GAGnB,MAAM,OAAO,EAAE,OAAO,SAAS;IAC9B,MAAM,cAAc,UAAU,MAAM;IACpC,MAAM,SAAS,mBAAmB,OAAO,MAAM;AAE/C,WAAO,MADSA,KAAG,OAAO,YAAY,CAAC,MAAM,GAAG,OAAO;;GAGxD,MAAM,WAAW,EAAE,OAAO,SAAS;IAClC,MAAM,cAAc,UAAU,MAAM;IACpC,MAAM,SAAS,mBAAmB,OAAO,MAAM;IAE/C,MAAM,MAAM,MADIA,KAAG,OAAO,YAAY,CAAC,MAAM,GAAG,OAAO;IAEvD,IAAIE,UAAQ;AACZ,QAAI,OAAO,cAAc,IAAK,WAAQ,IAAI;aACjC,MAAM,QAAQ,IAAI,CAAE,WAAQ,IAAI;aAExC,QACC,kBAAkB,OAAO,kBAAkB,OAAO,aAAa,KAEhE,WAAQ,IAAI,gBAAgB,IAAI,gBAAgB,IAAI;AACrD,QAAI,OAAOA,YAAU,SACpB,QAAO,MACN,kHACA;KAAE;KAAK;KAAO;KAAO,CACrB;AAEF,WAAOA;;GAER,SAAS;GACT;;CAEH,IAAIC,iBAA+C;AACnD,kBAAiB;EAChB,QAAQ;GACP,WAAW;GACX,aAAa;GACb,WAAW,OAAO,aAAa;GAC/B,eAAe,OAAO,aAAa,OAAO,OAAO;GACjD,cACC,OAAO,aAAa,OACjB,OACA;GACJ,gBAAgB,OAAO,aAAa,OAAO,OAAO;GAClD,aACE,OAAO,eAAe,SACnB,OACD,GAAG,aAAa,OAAW;AAK1B,WAAO,GAJS,qBAAqB;KACpC,QAAQ,eAAgB;KACxB,SAAS,oBAAoB,GAAG;KAChC,CAAC,CAAC,YAAa,CACE;KACjB,GACF;GACJ;EACD,SAAS,oBAAoB,GAAG;EAChC;CACD,MAAM,UAAU,qBAAqB,eAAe;AACpD,SAAQ,YAAuD;AAC9D,gBAAc;AACd,SAAO,QAAQ,QAAQ"}
|
|
@@ -0,0 +1,7 @@
|
|
|
1
|
+
import { DBAdapter, PecuniaOptions } from "pecunia-core";
|
|
2
|
+
|
|
3
|
+
//#region src/adapters/get-adapter.d.ts
|
|
4
|
+
declare function getAdapter(options: PecuniaOptions): Promise<DBAdapter<PecuniaOptions>>;
|
|
5
|
+
//#endregion
|
|
6
|
+
export { getAdapter };
|
|
7
|
+
//# sourceMappingURL=get-adapter.d.mts.map
|
|
@@ -0,0 +1,31 @@
|
|
|
1
|
+
import { PecuniaError } from "pecunia-core";
|
|
2
|
+
|
|
3
|
+
//#region src/adapters/get-adapter.ts
|
|
4
|
+
async function getBaseAdapter(options, handleDirectDatabase) {
|
|
5
|
+
let adapter;
|
|
6
|
+
if (typeof options.database === "function") adapter = options.database(options);
|
|
7
|
+
else adapter = await handleDirectDatabase(options);
|
|
8
|
+
if (!adapter.transaction) {
|
|
9
|
+
console.warn("Adapter does not correctly implement transaction function, patching it automatically. Please update your adapter implementation.");
|
|
10
|
+
adapter.transaction = async (cb) => {
|
|
11
|
+
return cb(adapter);
|
|
12
|
+
};
|
|
13
|
+
}
|
|
14
|
+
return adapter;
|
|
15
|
+
}
|
|
16
|
+
async function getAdapter(options) {
|
|
17
|
+
return getBaseAdapter(options, async (opts) => {
|
|
18
|
+
const { createKyselyAdapter } = await import("./kysely/dialect.mjs");
|
|
19
|
+
const { kysely, databaseType, transaction } = await createKyselyAdapter(opts);
|
|
20
|
+
if (!kysely) throw new PecuniaError("Failed to initialize database adapter");
|
|
21
|
+
const { kyselyAdapter } = await import("./kysely/index.mjs");
|
|
22
|
+
return kyselyAdapter(kysely, {
|
|
23
|
+
type: databaseType || "sqlite",
|
|
24
|
+
transaction
|
|
25
|
+
})(opts);
|
|
26
|
+
});
|
|
27
|
+
}
|
|
28
|
+
|
|
29
|
+
//#endregion
|
|
30
|
+
export { getAdapter };
|
|
31
|
+
//# sourceMappingURL=get-adapter.mjs.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"get-adapter.mjs","names":["adapter: DBAdapter<PecuniaOptions>"],"sources":["../../src/adapters/get-adapter.ts"],"sourcesContent":["import type { PecuniaOptions } from \"pecunia-core\";\nimport { PecuniaError } from \"pecunia-core\";\nimport type { DBAdapter } from \"pecunia-core\";\n\nexport async function getBaseAdapter(\n options: PecuniaOptions,\n handleDirectDatabase: (\n options: PecuniaOptions,\n ) => Promise<DBAdapter<PecuniaOptions>>,\n): Promise<DBAdapter<PecuniaOptions>> {\n let adapter: DBAdapter<PecuniaOptions>;\n if (typeof options.database === \"function\") {\n adapter = options.database(options);\n } else {\n adapter = await handleDirectDatabase(options);\n }\n\n if (!adapter.transaction) {\n console.warn(\n \"Adapter does not correctly implement transaction function, patching it automatically. Please update your adapter implementation.\",\n );\n adapter.transaction = async (cb) => {\n return cb(adapter);\n };\n }\n\n return adapter;\n}\n\nexport async function getAdapter(\n options: PecuniaOptions,\n): Promise<DBAdapter<PecuniaOptions>> {\n return getBaseAdapter(options, async (opts) => {\n const { createKyselyAdapter } = await import(\"./kysely/dialect\");\n const { kysely, databaseType, transaction } =\n await createKyselyAdapter(opts);\n if (!kysely) {\n throw new PecuniaError(\"Failed to initialize database adapter\");\n }\n const { kyselyAdapter } = await import(\"./kysely\");\n return kyselyAdapter(kysely, {\n type: databaseType || \"sqlite\",\n transaction: transaction,\n })(opts);\n });\n}\n"],"mappings":";;;AAIA,eAAsB,eACpB,SACA,sBAGoC;CACpC,IAAIA;AACJ,KAAI,OAAO,QAAQ,aAAa,WAC9B,WAAU,QAAQ,SAAS,QAAQ;KAEnC,WAAU,MAAM,qBAAqB,QAAQ;AAG/C,KAAI,CAAC,QAAQ,aAAa;AACxB,UAAQ,KACN,mIACD;AACD,UAAQ,cAAc,OAAO,OAAO;AAClC,UAAO,GAAG,QAAQ;;;AAItB,QAAO;;AAGT,eAAsB,WACpB,SACoC;AACpC,QAAO,eAAe,SAAS,OAAO,SAAS;EAC7C,MAAM,EAAE,wBAAwB,MAAM,OAAO;EAC7C,MAAM,EAAE,QAAQ,cAAc,gBAC5B,MAAM,oBAAoB,KAAK;AACjC,MAAI,CAAC,OACH,OAAM,IAAI,aAAa,wCAAwC;EAEjE,MAAM,EAAE,kBAAkB,MAAM,OAAO;AACvC,SAAO,cAAc,QAAQ;GAC3B,MAAM,gBAAgB;GACT;GACd,CAAC,CAAC,KAAK;GACR"}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"index.mjs","names":[],"sources":["../../../src/adapters/internal/index.ts"],"sourcesContent":["import type { PecuniaOptions, PecuniaContext } from \"pecunia-core\";\nimport type { InternalAdapter } from \"pecunia-core\";\nimport { getCurrentAdapter, runWithTransaction } from \"pecunia-core\";\nimport type { DBAdapter, Where } from \"pecunia-core\";\n// import type { InternalLogger } from \"@better-auth/core/env\";\nimport { safeJSONParse } from \"pecunia-core\";\n// import type { Account, Session, User, Verification } from \"../types\";\n// import { getDate } from \"../utils/date\";\n// import { getIp } from \"../utils/get-request-ip\";\n// import {\n// \tparseSessionInput,\n// \tparseSessionOutput,\n// \tparseUserOutput,\n// } from \"./schema\";\n\nexport const createInternalAdapter = (\n\tadapter: DBAdapter<PecuniaOptions>,\n\tctx: {\n\t\toptions: Omit<PecuniaOptions, \"logger\">;\n\t},\n): InternalAdapter => {\n\tconst options = ctx.options;\n return {\n \n }\n};\n"],"mappings":";;;AAeA,MAAa,yBACZ,SACA,QAGqB;AACL,KAAI;AACnB,QAAO,EAEP"}
|
|
@@ -0,0 +1,156 @@
|
|
|
1
|
+
import { CompiledQuery, DEFAULT_MIGRATION_LOCK_TABLE, DEFAULT_MIGRATION_TABLE, DefaultQueryCompiler, sql } from "kysely";
|
|
2
|
+
|
|
3
|
+
//#region src/adapters/kysely/bun-sqlite-dialect.ts
|
|
4
|
+
var BunSqliteAdapter = class {
|
|
5
|
+
get supportsCreateIfNotExists() {
|
|
6
|
+
return true;
|
|
7
|
+
}
|
|
8
|
+
get supportsTransactionalDdl() {
|
|
9
|
+
return false;
|
|
10
|
+
}
|
|
11
|
+
get supportsReturning() {
|
|
12
|
+
return true;
|
|
13
|
+
}
|
|
14
|
+
async acquireMigrationLock() {}
|
|
15
|
+
async releaseMigrationLock() {}
|
|
16
|
+
get supportsOutput() {
|
|
17
|
+
return true;
|
|
18
|
+
}
|
|
19
|
+
};
|
|
20
|
+
var BunSqliteDriver = class {
|
|
21
|
+
#config;
|
|
22
|
+
#connectionMutex = new ConnectionMutex();
|
|
23
|
+
#db;
|
|
24
|
+
#connection;
|
|
25
|
+
constructor(config) {
|
|
26
|
+
this.#config = { ...config };
|
|
27
|
+
}
|
|
28
|
+
async init() {
|
|
29
|
+
this.#db = this.#config.database;
|
|
30
|
+
this.#connection = new BunSqliteConnection(this.#db);
|
|
31
|
+
if (this.#config.onCreateConnection) await this.#config.onCreateConnection(this.#connection);
|
|
32
|
+
}
|
|
33
|
+
async acquireConnection() {
|
|
34
|
+
await this.#connectionMutex.lock();
|
|
35
|
+
return this.#connection;
|
|
36
|
+
}
|
|
37
|
+
async beginTransaction(connection) {
|
|
38
|
+
await connection.executeQuery(CompiledQuery.raw("begin"));
|
|
39
|
+
}
|
|
40
|
+
async commitTransaction(connection) {
|
|
41
|
+
await connection.executeQuery(CompiledQuery.raw("commit"));
|
|
42
|
+
}
|
|
43
|
+
async rollbackTransaction(connection) {
|
|
44
|
+
await connection.executeQuery(CompiledQuery.raw("rollback"));
|
|
45
|
+
}
|
|
46
|
+
async releaseConnection() {
|
|
47
|
+
this.#connectionMutex.unlock();
|
|
48
|
+
}
|
|
49
|
+
async destroy() {
|
|
50
|
+
this.#db?.close();
|
|
51
|
+
}
|
|
52
|
+
};
|
|
53
|
+
var BunSqliteConnection = class {
|
|
54
|
+
#db;
|
|
55
|
+
constructor(db) {
|
|
56
|
+
this.#db = db;
|
|
57
|
+
}
|
|
58
|
+
executeQuery(compiledQuery) {
|
|
59
|
+
const { sql: sql$1, parameters } = compiledQuery;
|
|
60
|
+
const stmt = this.#db.prepare(sql$1);
|
|
61
|
+
return Promise.resolve({ rows: stmt.all(parameters) });
|
|
62
|
+
}
|
|
63
|
+
async *streamQuery() {
|
|
64
|
+
throw new Error("Streaming query is not supported by SQLite driver.");
|
|
65
|
+
}
|
|
66
|
+
};
|
|
67
|
+
var ConnectionMutex = class {
|
|
68
|
+
#promise;
|
|
69
|
+
#resolve;
|
|
70
|
+
async lock() {
|
|
71
|
+
while (this.#promise) await this.#promise;
|
|
72
|
+
this.#promise = new Promise((resolve) => {
|
|
73
|
+
this.#resolve = resolve;
|
|
74
|
+
});
|
|
75
|
+
}
|
|
76
|
+
unlock() {
|
|
77
|
+
const resolve = this.#resolve;
|
|
78
|
+
this.#promise = void 0;
|
|
79
|
+
this.#resolve = void 0;
|
|
80
|
+
resolve?.();
|
|
81
|
+
}
|
|
82
|
+
};
|
|
83
|
+
var BunSqliteIntrospector = class {
|
|
84
|
+
#db;
|
|
85
|
+
constructor(db) {
|
|
86
|
+
this.#db = db;
|
|
87
|
+
}
|
|
88
|
+
async getSchemas() {
|
|
89
|
+
return [];
|
|
90
|
+
}
|
|
91
|
+
async getTables(options = { withInternalKyselyTables: false }) {
|
|
92
|
+
let query = this.#db.selectFrom("sqlite_schema").where("type", "=", "table").where("name", "not like", "sqlite_%").select("name").$castTo();
|
|
93
|
+
if (!options.withInternalKyselyTables) query = query.where("name", "!=", DEFAULT_MIGRATION_TABLE).where("name", "!=", DEFAULT_MIGRATION_LOCK_TABLE);
|
|
94
|
+
const tables = await query.execute();
|
|
95
|
+
return Promise.all(tables.map(({ name }) => this.#getTableMetadata(name)));
|
|
96
|
+
}
|
|
97
|
+
async getMetadata(options) {
|
|
98
|
+
return { tables: await this.getTables(options) };
|
|
99
|
+
}
|
|
100
|
+
async #getTableMetadata(table) {
|
|
101
|
+
const db = this.#db;
|
|
102
|
+
const autoIncrementCol = (await db.selectFrom("sqlite_master").where("name", "=", table).select("sql").$castTo().execute())[0]?.sql?.split(/[\(\),]/)?.find((it) => it.toLowerCase().includes("autoincrement"))?.split(/\s+/)?.[0]?.replace(/["`]/g, "");
|
|
103
|
+
return {
|
|
104
|
+
name: table,
|
|
105
|
+
columns: (await db.selectFrom(sql`pragma_table_info(${table})`.as("table_info")).select([
|
|
106
|
+
"name",
|
|
107
|
+
"type",
|
|
108
|
+
"notnull",
|
|
109
|
+
"dflt_value"
|
|
110
|
+
]).execute()).map((col) => ({
|
|
111
|
+
name: col.name,
|
|
112
|
+
dataType: col.type,
|
|
113
|
+
isNullable: !col.notnull,
|
|
114
|
+
isAutoIncrementing: col.name === autoIncrementCol,
|
|
115
|
+
hasDefaultValue: col.dflt_value != null
|
|
116
|
+
})),
|
|
117
|
+
isView: true
|
|
118
|
+
};
|
|
119
|
+
}
|
|
120
|
+
};
|
|
121
|
+
var BunSqliteQueryCompiler = class extends DefaultQueryCompiler {
|
|
122
|
+
getCurrentParameterPlaceholder() {
|
|
123
|
+
return "?";
|
|
124
|
+
}
|
|
125
|
+
getLeftIdentifierWrapper() {
|
|
126
|
+
return "\"";
|
|
127
|
+
}
|
|
128
|
+
getRightIdentifierWrapper() {
|
|
129
|
+
return "\"";
|
|
130
|
+
}
|
|
131
|
+
getAutoIncrement() {
|
|
132
|
+
return "autoincrement";
|
|
133
|
+
}
|
|
134
|
+
};
|
|
135
|
+
var BunSqliteDialect = class {
|
|
136
|
+
#config;
|
|
137
|
+
constructor(config) {
|
|
138
|
+
this.#config = { ...config };
|
|
139
|
+
}
|
|
140
|
+
createDriver() {
|
|
141
|
+
return new BunSqliteDriver(this.#config);
|
|
142
|
+
}
|
|
143
|
+
createQueryCompiler() {
|
|
144
|
+
return new BunSqliteQueryCompiler();
|
|
145
|
+
}
|
|
146
|
+
createAdapter() {
|
|
147
|
+
return new BunSqliteAdapter();
|
|
148
|
+
}
|
|
149
|
+
createIntrospector(db) {
|
|
150
|
+
return new BunSqliteIntrospector(db);
|
|
151
|
+
}
|
|
152
|
+
};
|
|
153
|
+
|
|
154
|
+
//#endregion
|
|
155
|
+
export { BunSqliteDialect };
|
|
156
|
+
//# sourceMappingURL=bun-sqlite-dialect.mjs.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"bun-sqlite-dialect.mjs","names":["#config","#connectionMutex","#db","#connection","sql","#promise","#resolve","#getTableMetadata"],"sources":["../../../src/adapters/kysely/bun-sqlite-dialect.ts"],"sourcesContent":["/**\n * @see {@link https://github.com/dylanblokhuis/kysely-bun-sqlite} - Fork of the original kysely-bun-sqlite package by @dylanblokhuis\n */\n\nimport type { Database } from \"bun:sqlite\";\nimport type {\n DatabaseConnection,\n DatabaseIntrospector,\n DatabaseMetadata,\n DatabaseMetadataOptions,\n Dialect,\n DialectAdapter,\n DialectAdapterBase,\n Driver,\n Kysely,\n QueryCompiler,\n QueryResult,\n SchemaMetadata,\n TableMetadata,\n} from \"kysely\";\nimport {\n CompiledQuery,\n DEFAULT_MIGRATION_LOCK_TABLE,\n DEFAULT_MIGRATION_TABLE,\n DefaultQueryCompiler,\n sql,\n} from \"kysely\";\n\nclass BunSqliteAdapter implements DialectAdapterBase {\n get supportsCreateIfNotExists(): boolean {\n return true;\n }\n\n get supportsTransactionalDdl(): boolean {\n return false;\n }\n\n get supportsReturning(): boolean {\n return true;\n }\n\n async acquireMigrationLock(): Promise<void> {\n // SQLite only has one connection that's reserved by the migration system\n // for the whole time between acquireMigrationLock and releaseMigrationLock.\n // We don't need to do anything here.\n }\n\n async releaseMigrationLock(): Promise<void> {\n // SQLite only has one connection that's reserved by the migration system\n // for the whole time between acquireMigrationLock and releaseMigrationLock.\n // We don't need to do anything here.\n }\n get supportsOutput(): boolean {\n return true;\n }\n}\n\n/**\n * Config for the SQLite dialect.\n */\nexport interface BunSqliteDialectConfig {\n /**\n * An sqlite Database instance or a function that returns one.\n */\n database: Database;\n\n /**\n * Called once when the first query is executed.\n */\n onCreateConnection?:\n | ((connection: DatabaseConnection) => Promise<void>)\n | undefined;\n}\n\nclass BunSqliteDriver implements Driver {\n readonly #config: BunSqliteDialectConfig;\n readonly #connectionMutex = new ConnectionMutex();\n\n #db?: Database;\n #connection?: DatabaseConnection;\n\n constructor(config: BunSqliteDialectConfig) {\n this.#config = { ...config };\n }\n\n async init(): Promise<void> {\n this.#db = this.#config.database;\n\n this.#connection = new BunSqliteConnection(this.#db);\n\n if (this.#config.onCreateConnection) {\n await this.#config.onCreateConnection(this.#connection);\n }\n }\n\n async acquireConnection(): Promise<DatabaseConnection> {\n // SQLite only has one single connection. We use a mutex here to wait\n // until the single connection has been released.\n await this.#connectionMutex.lock();\n return this.#connection!;\n }\n\n async beginTransaction(connection: DatabaseConnection): Promise<void> {\n await connection.executeQuery(CompiledQuery.raw(\"begin\"));\n }\n\n async commitTransaction(connection: DatabaseConnection): Promise<void> {\n await connection.executeQuery(CompiledQuery.raw(\"commit\"));\n }\n\n async rollbackTransaction(connection: DatabaseConnection): Promise<void> {\n await connection.executeQuery(CompiledQuery.raw(\"rollback\"));\n }\n\n async releaseConnection(): Promise<void> {\n this.#connectionMutex.unlock();\n }\n\n async destroy(): Promise<void> {\n this.#db?.close();\n }\n}\n\nclass BunSqliteConnection implements DatabaseConnection {\n readonly #db: Database;\n\n constructor(db: Database) {\n this.#db = db;\n }\n\n executeQuery<O>(compiledQuery: CompiledQuery): Promise<QueryResult<O>> {\n const { sql, parameters } = compiledQuery;\n const stmt = this.#db.prepare(sql);\n\n return Promise.resolve({\n rows: stmt.all(parameters as any) as O[],\n });\n }\n\n async *streamQuery() {\n throw new Error(\"Streaming query is not supported by SQLite driver.\");\n }\n}\n\nclass ConnectionMutex {\n #promise?: Promise<void>;\n #resolve?: () => void;\n\n async lock(): Promise<void> {\n while (this.#promise) {\n await this.#promise;\n }\n\n this.#promise = new Promise((resolve) => {\n this.#resolve = resolve;\n });\n }\n\n unlock(): void {\n const resolve = this.#resolve;\n\n this.#promise = undefined;\n this.#resolve = undefined;\n\n resolve?.();\n }\n}\n\nclass BunSqliteIntrospector implements DatabaseIntrospector {\n readonly #db: Kysely<unknown>;\n\n constructor(db: Kysely<unknown>) {\n this.#db = db;\n }\n\n async getSchemas(): Promise<SchemaMetadata[]> {\n // Sqlite doesn't support schemas.\n return [];\n }\n\n async getTables(\n options: DatabaseMetadataOptions = { withInternalKyselyTables: false },\n ): Promise<TableMetadata[]> {\n let query = this.#db\n // @ts-expect-error\n .selectFrom(\"sqlite_schema\")\n // @ts-expect-error\n .where(\"type\", \"=\", \"table\")\n // @ts-expect-error\n .where(\"name\", \"not like\", \"sqlite_%\")\n .select(\"name\")\n .$castTo<{ name: string }>();\n\n if (!options.withInternalKyselyTables) {\n query = query\n // @ts-expect-error\n .where(\"name\", \"!=\", DEFAULT_MIGRATION_TABLE)\n // @ts-expect-error\n .where(\"name\", \"!=\", DEFAULT_MIGRATION_LOCK_TABLE);\n }\n\n const tables = await query.execute();\n return Promise.all(tables.map(({ name }) => this.#getTableMetadata(name)));\n }\n\n async getMetadata(\n options?: DatabaseMetadataOptions | undefined,\n ): Promise<DatabaseMetadata> {\n return {\n tables: await this.getTables(options),\n };\n }\n\n async #getTableMetadata(table: string): Promise<TableMetadata> {\n const db = this.#db;\n\n // Get the SQL that was used to create the table.\n const createSql = await db\n // @ts-expect-error\n .selectFrom(\"sqlite_master\")\n // @ts-expect-error\n .where(\"name\", \"=\", table)\n .select(\"sql\")\n .$castTo<{ sql: string | undefined }>()\n .execute();\n\n // Try to find the name of the column that has `autoincrement` 🤦\n const autoIncrementCol = createSql[0]?.sql\n ?.split(/[\\(\\),]/)\n ?.find((it) => it.toLowerCase().includes(\"autoincrement\"))\n ?.split(/\\s+/)?.[0]\n ?.replace(/[\"`]/g, \"\");\n\n const columns = await db\n .selectFrom(\n sql<{\n name: string;\n type: string;\n notnull: 0 | 1;\n dflt_value: any;\n }>`pragma_table_info(${table})`.as(\"table_info\"),\n )\n .select([\"name\", \"type\", \"notnull\", \"dflt_value\"])\n .execute();\n\n return {\n name: table,\n columns: columns.map((col) => ({\n name: col.name,\n dataType: col.type,\n isNullable: !col.notnull,\n isAutoIncrementing: col.name === autoIncrementCol,\n hasDefaultValue: col.dflt_value != null,\n })),\n isView: true,\n };\n }\n}\n\nclass BunSqliteQueryCompiler extends DefaultQueryCompiler {\n protected override getCurrentParameterPlaceholder() {\n return \"?\";\n }\n\n protected override getLeftIdentifierWrapper(): string {\n return '\"';\n }\n\n protected override getRightIdentifierWrapper(): string {\n return '\"';\n }\n\n protected override getAutoIncrement() {\n return \"autoincrement\";\n }\n}\n\nexport class BunSqliteDialect implements Dialect {\n readonly #config: BunSqliteDialectConfig;\n\n constructor(config: BunSqliteDialectConfig) {\n this.#config = { ...config };\n }\n\n createDriver(): Driver {\n return new BunSqliteDriver(this.#config);\n }\n\n createQueryCompiler(): QueryCompiler {\n return new BunSqliteQueryCompiler();\n }\n\n createAdapter(): DialectAdapter {\n return new BunSqliteAdapter();\n }\n\n createIntrospector(db: Kysely<any>): DatabaseIntrospector {\n return new BunSqliteIntrospector(db);\n }\n}\n"],"mappings":";;;AA4BA,IAAM,mBAAN,MAAqD;CACnD,IAAI,4BAAqC;AACvC,SAAO;;CAGT,IAAI,2BAAoC;AACtC,SAAO;;CAGT,IAAI,oBAA6B;AAC/B,SAAO;;CAGT,MAAM,uBAAsC;CAM5C,MAAM,uBAAsC;CAK5C,IAAI,iBAA0B;AAC5B,SAAO;;;AAqBX,IAAM,kBAAN,MAAwC;CACtC,CAASA;CACT,CAASC,kBAAmB,IAAI,iBAAiB;CAEjD;CACA;CAEA,YAAY,QAAgC;AAC1C,QAAKD,SAAU,EAAE,GAAG,QAAQ;;CAG9B,MAAM,OAAsB;AAC1B,QAAKE,KAAM,MAAKF,OAAQ;AAExB,QAAKG,aAAc,IAAI,oBAAoB,MAAKD,GAAI;AAEpD,MAAI,MAAKF,OAAQ,mBACf,OAAM,MAAKA,OAAQ,mBAAmB,MAAKG,WAAY;;CAI3D,MAAM,oBAAiD;AAGrD,QAAM,MAAKF,gBAAiB,MAAM;AAClC,SAAO,MAAKE;;CAGd,MAAM,iBAAiB,YAA+C;AACpE,QAAM,WAAW,aAAa,cAAc,IAAI,QAAQ,CAAC;;CAG3D,MAAM,kBAAkB,YAA+C;AACrE,QAAM,WAAW,aAAa,cAAc,IAAI,SAAS,CAAC;;CAG5D,MAAM,oBAAoB,YAA+C;AACvE,QAAM,WAAW,aAAa,cAAc,IAAI,WAAW,CAAC;;CAG9D,MAAM,oBAAmC;AACvC,QAAKF,gBAAiB,QAAQ;;CAGhC,MAAM,UAAyB;AAC7B,QAAKC,IAAK,OAAO;;;AAIrB,IAAM,sBAAN,MAAwD;CACtD,CAASA;CAET,YAAY,IAAc;AACxB,QAAKA,KAAM;;CAGb,aAAgB,eAAuD;EACrE,MAAM,EAAE,YAAK,eAAe;EAC5B,MAAM,OAAO,MAAKA,GAAI,QAAQE,MAAI;AAElC,SAAO,QAAQ,QAAQ,EACrB,MAAM,KAAK,IAAI,WAAkB,EAClC,CAAC;;CAGJ,OAAO,cAAc;AACnB,QAAM,IAAI,MAAM,qDAAqD;;;AAIzE,IAAM,kBAAN,MAAsB;CACpB;CACA;CAEA,MAAM,OAAsB;AAC1B,SAAO,MAAKC,QACV,OAAM,MAAKA;AAGb,QAAKA,UAAW,IAAI,SAAS,YAAY;AACvC,SAAKC,UAAW;IAChB;;CAGJ,SAAe;EACb,MAAM,UAAU,MAAKA;AAErB,QAAKD,UAAW;AAChB,QAAKC,UAAW;AAEhB,aAAW;;;AAIf,IAAM,wBAAN,MAA4D;CAC1D,CAASJ;CAET,YAAY,IAAqB;AAC/B,QAAKA,KAAM;;CAGb,MAAM,aAAwC;AAE5C,SAAO,EAAE;;CAGX,MAAM,UACJ,UAAmC,EAAE,0BAA0B,OAAO,EAC5C;EAC1B,IAAI,QAAQ,MAAKA,GAEd,WAAW,gBAAgB,CAE3B,MAAM,QAAQ,KAAK,QAAQ,CAE3B,MAAM,QAAQ,YAAY,WAAW,CACrC,OAAO,OAAO,CACd,SAA2B;AAE9B,MAAI,CAAC,QAAQ,yBACX,SAAQ,MAEL,MAAM,QAAQ,MAAM,wBAAwB,CAE5C,MAAM,QAAQ,MAAM,6BAA6B;EAGtD,MAAM,SAAS,MAAM,MAAM,SAAS;AACpC,SAAO,QAAQ,IAAI,OAAO,KAAK,EAAE,WAAW,MAAKK,iBAAkB,KAAK,CAAC,CAAC;;CAG5E,MAAM,YACJ,SAC2B;AAC3B,SAAO,EACL,QAAQ,MAAM,KAAK,UAAU,QAAQ,EACtC;;CAGH,OAAMA,iBAAkB,OAAuC;EAC7D,MAAM,KAAK,MAAKL;EAahB,MAAM,oBAVY,MAAM,GAErB,WAAW,gBAAgB,CAE3B,MAAM,QAAQ,KAAK,MAAM,CACzB,OAAO,MAAM,CACb,SAAsC,CACtC,SAAS,EAGuB,IAAI,KACnC,MAAM,UAAU,EAChB,MAAM,OAAO,GAAG,aAAa,CAAC,SAAS,gBAAgB,CAAC,EACxD,MAAM,MAAM,GAAG,IACf,QAAQ,SAAS,GAAG;AAcxB,SAAO;GACL,MAAM;GACN,UAdc,MAAM,GACnB,WACC,GAKE,qBAAqB,MAAM,GAAG,GAAG,aAAa,CACjD,CACA,OAAO;IAAC;IAAQ;IAAQ;IAAW;IAAa,CAAC,CACjD,SAAS,EAIO,KAAK,SAAS;IAC7B,MAAM,IAAI;IACV,UAAU,IAAI;IACd,YAAY,CAAC,IAAI;IACjB,oBAAoB,IAAI,SAAS;IACjC,iBAAiB,IAAI,cAAc;IACpC,EAAE;GACH,QAAQ;GACT;;;AAIL,IAAM,yBAAN,cAAqC,qBAAqB;CACxD,AAAmB,iCAAiC;AAClD,SAAO;;CAGT,AAAmB,2BAAmC;AACpD,SAAO;;CAGT,AAAmB,4BAAoC;AACrD,SAAO;;CAGT,AAAmB,mBAAmB;AACpC,SAAO;;;AAIX,IAAa,mBAAb,MAAiD;CAC/C,CAASF;CAET,YAAY,QAAgC;AAC1C,QAAKA,SAAU,EAAE,GAAG,QAAQ;;CAG9B,eAAuB;AACrB,SAAO,IAAI,gBAAgB,MAAKA,OAAQ;;CAG1C,sBAAqC;AACnC,SAAO,IAAI,wBAAwB;;CAGrC,gBAAgC;AAC9B,SAAO,IAAI,kBAAkB;;CAG/B,mBAAmB,IAAuC;AACxD,SAAO,IAAI,sBAAsB,GAAG"}
|
|
@@ -0,0 +1,83 @@
|
|
|
1
|
+
import { Kysely, MssqlDialect, MysqlDialect, PostgresDialect, SqliteDialect } from "kysely";
|
|
2
|
+
|
|
3
|
+
//#region src/adapters/kysely/dialect.ts
|
|
4
|
+
function isSqliteDatabase(x) {
|
|
5
|
+
return typeof x === "object" && x !== null && "prepare" in x;
|
|
6
|
+
}
|
|
7
|
+
function isSqliteDatabaseFactory(x) {
|
|
8
|
+
return typeof x === "function";
|
|
9
|
+
}
|
|
10
|
+
function isBunSqliteDatabase(x) {
|
|
11
|
+
return typeof x === "object" && x !== null && "fileControl" in x;
|
|
12
|
+
}
|
|
13
|
+
function getKyselyDatabaseType(db) {
|
|
14
|
+
if (!db) return null;
|
|
15
|
+
if ("dialect" in db) return getKyselyDatabaseType(db.dialect);
|
|
16
|
+
if ("createDriver" in db) {
|
|
17
|
+
if (db instanceof SqliteDialect) return "sqlite";
|
|
18
|
+
if (db instanceof MysqlDialect) return "mysql";
|
|
19
|
+
if (db instanceof PostgresDialect) return "postgres";
|
|
20
|
+
if (db instanceof MssqlDialect) return "mssql";
|
|
21
|
+
}
|
|
22
|
+
if ("aggregate" in db) return "sqlite";
|
|
23
|
+
if ("getConnection" in db) return "mysql";
|
|
24
|
+
if ("connect" in db) return "postgres";
|
|
25
|
+
if ("fileControl" in db) return "sqlite";
|
|
26
|
+
if ("open" in db && "close" in db && "prepare" in db) return "sqlite";
|
|
27
|
+
return null;
|
|
28
|
+
}
|
|
29
|
+
const createKyselyAdapter = async (config) => {
|
|
30
|
+
const db = config.database;
|
|
31
|
+
if (!db) return {
|
|
32
|
+
kysely: null,
|
|
33
|
+
databaseType: null,
|
|
34
|
+
transaction: void 0
|
|
35
|
+
};
|
|
36
|
+
if ("db" in db) return {
|
|
37
|
+
kysely: db.db,
|
|
38
|
+
databaseType: db.type,
|
|
39
|
+
transaction: db.transaction
|
|
40
|
+
};
|
|
41
|
+
if ("dialect" in db) return {
|
|
42
|
+
kysely: new Kysely({ dialect: db.dialect }),
|
|
43
|
+
databaseType: db.type,
|
|
44
|
+
transaction: db.transaction
|
|
45
|
+
};
|
|
46
|
+
let dialect = void 0;
|
|
47
|
+
const databaseType = getKyselyDatabaseType(db);
|
|
48
|
+
if ("createDriver" in db) dialect = db;
|
|
49
|
+
if ("aggregate" in db) if (isSqliteDatabase(db) || isSqliteDatabaseFactory(db)) dialect = new SqliteDialect({ database: db });
|
|
50
|
+
else throw new Error("db matched 'aggregate' check, but is not a Kysely SqliteDatabase");
|
|
51
|
+
if ("getConnection" in db) dialect = new MysqlDialect(db);
|
|
52
|
+
if ("connect" in db) dialect = new PostgresDialect({ pool: db });
|
|
53
|
+
if (isBunSqliteDatabase(db)) {
|
|
54
|
+
const { BunSqliteDialect } = await import("./bun-sqlite-dialect.mjs");
|
|
55
|
+
dialect = new BunSqliteDialect({ database: db });
|
|
56
|
+
}
|
|
57
|
+
if ("createSession" in db && typeof window === "undefined") {
|
|
58
|
+
let DatabaseSync = void 0;
|
|
59
|
+
try {
|
|
60
|
+
let nodeSqlite = "node:sqlite";
|
|
61
|
+
({DatabaseSync} = await import(
|
|
62
|
+
/* @vite-ignore */
|
|
63
|
+
/* webpackIgnore: true */
|
|
64
|
+
nodeSqlite
|
|
65
|
+
));
|
|
66
|
+
} catch (error) {
|
|
67
|
+
if (error !== null && typeof error === "object" && "code" in error && error.code !== "ERR_UNKNOWN_BUILTIN_MODULE") throw error;
|
|
68
|
+
}
|
|
69
|
+
if (DatabaseSync && db instanceof DatabaseSync) {
|
|
70
|
+
const { NodeSqliteDialect } = await import("./node-sqlite-dialect.mjs");
|
|
71
|
+
dialect = new NodeSqliteDialect({ database: db });
|
|
72
|
+
}
|
|
73
|
+
}
|
|
74
|
+
return {
|
|
75
|
+
kysely: dialect ? new Kysely({ dialect }) : null,
|
|
76
|
+
databaseType,
|
|
77
|
+
transaction: void 0
|
|
78
|
+
};
|
|
79
|
+
};
|
|
80
|
+
|
|
81
|
+
//#endregion
|
|
82
|
+
export { createKyselyAdapter, getKyselyDatabaseType };
|
|
83
|
+
//# sourceMappingURL=dialect.mjs.map
|