pecunia-root 0.0.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/adapters/drizzle/index.d.mts +47 -0
- package/dist/adapters/drizzle/index.mjs +218 -0
- package/dist/adapters/drizzle/index.mjs.map +1 -0
- package/dist/adapters/get-adapter.d.mts +7 -0
- package/dist/adapters/get-adapter.mjs +31 -0
- package/dist/adapters/get-adapter.mjs.map +1 -0
- package/dist/adapters/internal/index.mjs +11 -0
- package/dist/adapters/internal/index.mjs.map +1 -0
- package/dist/adapters/kysely/bun-sqlite-dialect.mjs +156 -0
- package/dist/adapters/kysely/bun-sqlite-dialect.mjs.map +1 -0
- package/dist/adapters/kysely/dialect.mjs +83 -0
- package/dist/adapters/kysely/dialect.mjs.map +1 -0
- package/dist/adapters/kysely/index.d.mts +34 -0
- package/dist/adapters/kysely/index.mjs +183 -0
- package/dist/adapters/kysely/index.mjs.map +1 -0
- package/dist/adapters/kysely/node-sqlite-dialect.mjs +156 -0
- package/dist/adapters/kysely/node-sqlite-dialect.mjs.map +1 -0
- package/dist/adapters/mongodb/index.d.mts +35 -0
- package/dist/adapters/mongodb/index.mjs +313 -0
- package/dist/adapters/mongodb/index.mjs.map +1 -0
- package/dist/adapters/prisma/index.d.mts +34 -0
- package/dist/adapters/prisma/index.mjs +213 -0
- package/dist/adapters/prisma/index.mjs.map +1 -0
- package/dist/api/index.d.mts +23 -0
- package/dist/api/index.mjs +126 -0
- package/dist/api/index.mjs.map +1 -0
- package/dist/context/index.mjs +77 -0
- package/dist/context/index.mjs.map +1 -0
- package/dist/db/index.d.mts +3 -0
- package/dist/db/index.mjs +4 -0
- package/dist/db/migrations/index.d.mts +21 -0
- package/dist/db/migrations/index.mjs +327 -0
- package/dist/db/migrations/index.mjs.map +1 -0
- package/dist/db/schema/get-schema.d.mts +10 -0
- package/dist/db/schema/get-schema.mjs +39 -0
- package/dist/db/schema/get-schema.mjs.map +1 -0
- package/dist/index.d.mts +9 -0
- package/dist/index.mjs +7 -0
- package/dist/payment/base.mjs +38 -0
- package/dist/payment/base.mjs.map +1 -0
- package/dist/payment/index.d.mts +21 -0
- package/dist/payment/index.mjs +23 -0
- package/dist/payment/index.mjs.map +1 -0
- package/dist/types/payment.d.mts +11 -0
- package/dist/types/payment.mjs +1 -0
- package/dist/utils/is-promise.mjs +8 -0
- package/dist/utils/is-promise.mjs.map +1 -0
- package/dist/utils/url.mjs +77 -0
- package/dist/utils/url.mjs.map +1 -0
- package/package.json +183 -0
|
@@ -0,0 +1,126 @@
|
|
|
1
|
+
import { hasRequestState, runWithEndpointContext, runWithRequestState } from "pecunia-core";
|
|
2
|
+
import { createDefu } from "defu";
|
|
3
|
+
import { createRouter, toResponse } from "better-call";
|
|
4
|
+
|
|
5
|
+
//#region src/api/index.ts
|
|
6
|
+
createDefu((obj, key, value) => {
|
|
7
|
+
if (Array.isArray(obj[key]) && Array.isArray(value)) {
|
|
8
|
+
obj[key] = value;
|
|
9
|
+
return true;
|
|
10
|
+
}
|
|
11
|
+
});
|
|
12
|
+
function toAuthEndpoints(endpoints, ctx) {
|
|
13
|
+
const api = {};
|
|
14
|
+
for (const [key, endpoint] of Object.entries(endpoints)) {
|
|
15
|
+
api[key] = async (context) => {
|
|
16
|
+
const run = async () => {
|
|
17
|
+
const authContext = await ctx;
|
|
18
|
+
let internalContext = {
|
|
19
|
+
...context,
|
|
20
|
+
context: {
|
|
21
|
+
...authContext,
|
|
22
|
+
returned: void 0,
|
|
23
|
+
responseHeaders: void 0,
|
|
24
|
+
session: null
|
|
25
|
+
},
|
|
26
|
+
path: endpoint.path,
|
|
27
|
+
headers: context?.headers ? new Headers(context?.headers) : void 0
|
|
28
|
+
};
|
|
29
|
+
return runWithEndpointContext(internalContext, async () => {
|
|
30
|
+
internalContext.asResponse = false;
|
|
31
|
+
internalContext.returnHeaders = true;
|
|
32
|
+
internalContext.returnStatus = true;
|
|
33
|
+
const result = await runWithEndpointContext(internalContext, () => endpoint(internalContext)).catch((e) => {
|
|
34
|
+
throw e;
|
|
35
|
+
});
|
|
36
|
+
if (result && result instanceof Response) return result;
|
|
37
|
+
internalContext.context.returned = result.response;
|
|
38
|
+
internalContext.context.responseHeaders = result.headers;
|
|
39
|
+
return context?.asResponse ? toResponse(result.response, {
|
|
40
|
+
headers: result.headers,
|
|
41
|
+
status: result.status
|
|
42
|
+
}) : context?.returnHeaders ? context?.returnStatus ? {
|
|
43
|
+
headers: result.headers,
|
|
44
|
+
response: result.response,
|
|
45
|
+
status: result.status
|
|
46
|
+
} : {
|
|
47
|
+
headers: result.headers,
|
|
48
|
+
response: result.response
|
|
49
|
+
} : context?.returnStatus ? {
|
|
50
|
+
response: result.response,
|
|
51
|
+
status: result.status
|
|
52
|
+
} : result.response;
|
|
53
|
+
});
|
|
54
|
+
};
|
|
55
|
+
if (await hasRequestState()) return run();
|
|
56
|
+
else return runWithRequestState(/* @__PURE__ */ new WeakMap(), run);
|
|
57
|
+
};
|
|
58
|
+
api[key].path = endpoint.path;
|
|
59
|
+
api[key].options = endpoint.options;
|
|
60
|
+
}
|
|
61
|
+
return api;
|
|
62
|
+
}
|
|
63
|
+
function getEndpoints(ctx, options) {
|
|
64
|
+
const pluginEndpoints = options.plugins?.reduce((acc, plugin) => {
|
|
65
|
+
return {
|
|
66
|
+
...acc,
|
|
67
|
+
...plugin.endpoints
|
|
68
|
+
};
|
|
69
|
+
}, {}) ?? {};
|
|
70
|
+
const middlewares = options.plugins?.map((plugin) => plugin.middlewares?.map((m) => {
|
|
71
|
+
const middleware = (async (context) => {
|
|
72
|
+
const authContext = await ctx;
|
|
73
|
+
return m.middleware({
|
|
74
|
+
...context,
|
|
75
|
+
context: {
|
|
76
|
+
...authContext,
|
|
77
|
+
...context.context
|
|
78
|
+
}
|
|
79
|
+
});
|
|
80
|
+
});
|
|
81
|
+
middleware.options = m.middleware.options;
|
|
82
|
+
return {
|
|
83
|
+
path: m.path,
|
|
84
|
+
middleware
|
|
85
|
+
};
|
|
86
|
+
})).filter((plugin) => plugin !== void 0).flat() || [];
|
|
87
|
+
return {
|
|
88
|
+
api: toAuthEndpoints({ ...pluginEndpoints }, ctx),
|
|
89
|
+
middlewares
|
|
90
|
+
};
|
|
91
|
+
}
|
|
92
|
+
const router = (ctx, options) => {
|
|
93
|
+
const { api, middlewares } = getEndpoints(ctx, options);
|
|
94
|
+
const basePath = new URL(ctx.baseURL).pathname;
|
|
95
|
+
return createRouter(api, {
|
|
96
|
+
routerContext: ctx,
|
|
97
|
+
openapi: { disabled: true },
|
|
98
|
+
basePath,
|
|
99
|
+
routerMiddleware: [{
|
|
100
|
+
path: "/**",
|
|
101
|
+
middleware: []
|
|
102
|
+
}, ...middlewares],
|
|
103
|
+
allowedMediaTypes: ["application/json"],
|
|
104
|
+
async onRequest(req) {
|
|
105
|
+
let currentRequest = req;
|
|
106
|
+
for (const plugin of ctx.options.plugins || []) if (plugin.onRequest) {
|
|
107
|
+
const response = await plugin.onRequest(currentRequest, ctx);
|
|
108
|
+
if (response && "response" in response) return response.response;
|
|
109
|
+
if (response && "request" in response) currentRequest = response.request;
|
|
110
|
+
}
|
|
111
|
+
return currentRequest;
|
|
112
|
+
},
|
|
113
|
+
async onResponse(res) {
|
|
114
|
+
for (const plugin of ctx.options.plugins || []) if (plugin.onResponse) {
|
|
115
|
+
const response = await plugin.onResponse(res, ctx);
|
|
116
|
+
if (response) return response.response;
|
|
117
|
+
}
|
|
118
|
+
return res;
|
|
119
|
+
},
|
|
120
|
+
onError(e) {}
|
|
121
|
+
});
|
|
122
|
+
};
|
|
123
|
+
|
|
124
|
+
//#endregion
|
|
125
|
+
export { getEndpoints, router, toAuthEndpoints };
|
|
126
|
+
//# sourceMappingURL=index.mjs.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"index.mjs","names":["api: Record<\n\t\tstring,\n\t\t((\n\t\t\tcontext: EndpointContext<string, any> & InputContext<string, any>,\n\t\t) => Promise<any>) & {\n\t\t\tpath?: string | undefined;\n\t\t\toptions?: EndpointOptions | undefined;\n\t\t}\n\t>","internalContext: InternalContext"],"sources":["../../src/api/index.ts"],"sourcesContent":["import { type PecuniaEndpoint, type Awaitable, type PecuniaContext, type PecuniaOptions, type PecuniaMiddleware, runWithEndpointContext, type PecuniaPlugin, hasRequestState, runWithRequestState } from \"pecunia-core\";\nimport type {\n Endpoint,\n\tEndpointContext,\n\tEndpointOptions,\n\tInputContext,\n Middleware,\n UnionToIntersection,\n} from \"better-call\";\nimport { createRouter, toResponse } from \"better-call\";\nimport { createDefu } from \"defu\";\n\ntype InternalContext = Partial<\n\tInputContext<string, any> & EndpointContext<string, any>\n> & {\n\tpath: string;\n\tasResponse?: boolean | undefined;\n\tcontext: PecuniaContext & {\n\t\t// logger: PecuniaContext[\"logger\"];\n\t\treturned?: unknown | undefined;\n\t\tresponseHeaders?: Headers | undefined;\n\t};\n};\n\nconst defuReplaceArrays = createDefu((obj, key, value) => {\n\tif (Array.isArray(obj[key]) && Array.isArray(value)) {\n\t\tobj[key] = value;\n\t\treturn true;\n\t}\n});\n\nconst hooksSourceWeakMap = new WeakMap<\n\tPecuniaMiddleware,\n\t`user` | `plugin:${string}`\n>();\n\ntype UserInputContext = Partial<\n\tInputContext<string, any> & EndpointContext<string, any>\n >;\n\nexport function toAuthEndpoints<\n\tconst E extends Record<\n\t\tstring,\n\t\tOmit<PecuniaEndpoint<string, EndpointOptions, any>, \"wrap\">\n\t>,\n>(endpoints: E, ctx: PecuniaContext | Promise<PecuniaContext>): E {\n\tconst api: Record<\n\t\tstring,\n\t\t((\n\t\t\tcontext: EndpointContext<string, any> & InputContext<string, any>,\n\t\t) => Promise<any>) & {\n\t\t\tpath?: string | undefined;\n\t\t\toptions?: EndpointOptions | undefined;\n\t\t}\n\t> = {};\n\n\tfor (const [key, endpoint] of Object.entries(endpoints)) {\n\t\tapi[key] = async (context?: UserInputContext) => {\n\t\t\tconst run = async () => {\n\t\t\t\tconst authContext = await ctx;\n\t\t\t\tlet internalContext: InternalContext = {\n\t\t\t\t\t...context,\n\t\t\t\t\tcontext: {\n\t\t\t\t\t\t...authContext,\n\t\t\t\t\t\treturned: undefined,\n\t\t\t\t\t\tresponseHeaders: undefined,\n\t\t\t\t\t\tsession: null,\n\t\t\t\t\t},\n\t\t\t\t\tpath: endpoint.path,\n\t\t\t\t\theaders: context?.headers ? new Headers(context?.headers) : undefined,\n\t\t\t\t};\n\t\t\t\treturn runWithEndpointContext(internalContext, async () => {\n\t\t\t\t\tinternalContext.asResponse = false;\n\t\t\t\t\tinternalContext.returnHeaders = true;\n\t\t\t\t\tinternalContext.returnStatus = true;\n\t\t\t\t\tconst result = (await runWithEndpointContext(internalContext, () =>\n\t\t\t\t\t\t(endpoint as any)(internalContext as any),\n\t\t\t\t\t).catch((e: any) => {\n\t\t\t\t\t\t// if (isAPIError(e)) {\n\t\t\t\t\t\t// \t/**\n\t\t\t\t\t\t// \t * API Errors from response are caught\n\t\t\t\t\t\t// \t * and returned to hooks\n\t\t\t\t\t\t// \t */\n\t\t\t\t\t\t// \treturn {\n\t\t\t\t\t\t// \t\tresponse: e,\n\t\t\t\t\t\t// \t\tstatus: e.statusCode,\n\t\t\t\t\t\t// \t\theaders: e.headers ? new Headers(e.headers) : null,\n\t\t\t\t\t\t// \t};\n\t\t\t\t\t\t// }\n\t\t\t\t\t\tthrow e;\n\t\t\t\t\t})) as {\n\t\t\t\t\t\theaders: Headers;\n\t\t\t\t\t\tresponse: any;\n\t\t\t\t\t\tstatus: number;\n\t\t\t\t\t};\n\n\t\t\t\t\t//if response object is returned we skip after hooks and post processing\n\t\t\t\t\tif (result && result instanceof Response) {\n\t\t\t\t\t\treturn result;\n\t\t\t\t\t}\n\n\t\t\t\t\tinternalContext.context.returned = result.response;\n\t\t\t\t\tinternalContext.context.responseHeaders = result.headers;\n\n\t\t\t\t\t// const after = await runAfterHooks(internalContext, afterHooks);\n\n\t\t\t\t\t// if (after.response) {\n\t\t\t\t\t// \tresult.response = after.response;\n\t\t\t\t\t// }\n\n\t\t\t\t\t// if (\n\t\t\t\t\t// \tisAPIError(result.response) &&\n\t\t\t\t\t// \tshouldPublishLog(authContext.logger.level, \"debug\")\n\t\t\t\t\t// ) {\n\t\t\t\t\t// \t// inherit stack from errorStack if debug mode is enabled\n\t\t\t\t\t// \tresult.response.stack = result.response.errorStack;\n\t\t\t\t\t// }\n\n\t\t\t\t\t// if (isAPIError(result.response) && !context?.asResponse) {\n\t\t\t\t\t// \tthrow result.response;\n\t\t\t\t\t// }\n\n\t\t\t\t\tconst response = context?.asResponse\n\t\t\t\t\t\t? toResponse(result.response, {\n\t\t\t\t\t\t\t\theaders: result.headers,\n\t\t\t\t\t\t\t\tstatus: result.status,\n\t\t\t\t\t\t\t})\n\t\t\t\t\t\t: context?.returnHeaders\n\t\t\t\t\t\t\t? context?.returnStatus\n\t\t\t\t\t\t\t\t? {\n\t\t\t\t\t\t\t\t\t\theaders: result.headers,\n\t\t\t\t\t\t\t\t\t\tresponse: result.response,\n\t\t\t\t\t\t\t\t\t\tstatus: result.status,\n\t\t\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\t\t: {\n\t\t\t\t\t\t\t\t\t\theaders: result.headers,\n\t\t\t\t\t\t\t\t\t\tresponse: result.response,\n\t\t\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\t: context?.returnStatus\n\t\t\t\t\t\t\t\t? { response: result.response, status: result.status }\n\t\t\t\t\t\t\t\t: result.response;\n\t\t\t\t\treturn response;\n\t\t\t\t});\n\t\t\t};\n\t\t\tif (await hasRequestState()) {\n\t\t\t\treturn run();\n\t\t\t} else {\n\t\t\t\tconst store = new WeakMap();\n\t\t\t\treturn runWithRequestState(store, run);\n\t\t\t}\n\t\t};\n\t\tapi[key].path = endpoint.path;\n\t\tapi[key].options = endpoint.options;\n\t}\n\treturn api as unknown as E;\n}\nexport function getEndpoints<Option extends PecuniaOptions>(\n\tctx: Awaitable<PecuniaContext>,\n\toptions: Option,\n) {\n\tconst pluginEndpoints =\n\t\toptions.plugins?.reduce<Record<string, Endpoint>>((acc, plugin) => {\n\t\t\treturn {\n\t\t\t\t...acc,\n\t\t\t\t...plugin.endpoints,\n\t\t\t};\n\t\t}, {}) ?? {};\n\n\ttype PluginEndpoint = UnionToIntersection<\n\t\tOption[\"plugins\"] extends Array<infer T>\n\t\t\t? T extends PecuniaPlugin\n\t\t\t\t? T extends {\n\t\t\t\t\t\tendpoints: infer E;\n\t\t\t\t\t}\n\t\t\t\t\t? E\n\t\t\t\t\t: {}\n\t\t\t\t: {}\n\t\t\t: {}\n\t>;\n\n\tconst middlewares =\n\t\toptions.plugins\n\t\t\t?.map((plugin) =>\n\t\t\t\tplugin.middlewares?.map((m) => {\n\t\t\t\t\tconst middleware = (async (context: any) => {\n\t\t\t\t\t\tconst authContext = await ctx;\n\t\t\t\t\t\treturn m.middleware({\n\t\t\t\t\t\t\t...context,\n\t\t\t\t\t\t\tcontext: {\n\t\t\t\t\t\t\t\t...authContext,\n\t\t\t\t\t\t\t\t...context.context,\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t});\n\t\t\t\t\t}) as Middleware;\n\t\t\t\t\tmiddleware.options = m.middleware.options;\n\t\t\t\t\treturn {\n\t\t\t\t\t\tpath: m.path,\n\t\t\t\t\t\tmiddleware,\n\t\t\t\t\t};\n\t\t\t\t}),\n\t\t\t)\n\t\t\t.filter((plugin) => plugin !== undefined)\n\t\t\t.flat() || [];\n\n\tconst baseEndpoints = {};\n\tconst endpoints = {\n\t\t...baseEndpoints,\n\t\t...pluginEndpoints,\n\t} as const;\n\tconst api = toAuthEndpoints(endpoints, ctx);\n\treturn {\n\t\tapi: api as typeof endpoints & PluginEndpoint,\n\t\tmiddlewares,\n\t};\n}\n\nexport const router = <Option extends PecuniaOptions>(\n\tctx: PecuniaContext,\n\toptions: Option,\n) => {\n\tconst { api, middlewares } = getEndpoints(ctx, options);\n\tconst basePath = new URL(ctx.baseURL).pathname;\n\n\treturn createRouter(api, {\n\t\trouterContext: ctx,\n\t\topenapi: {\n\t\t\tdisabled: true,\n\t\t},\n\t\tbasePath,\n\t\trouterMiddleware: [\n\t\t\t{\n\t\t\t\tpath: \"/**\",\n\t\t\t\tmiddleware: [],\n\t\t\t},\n\t\t\t...middlewares,\n\t\t],\n\t\tallowedMediaTypes: [\"application/json\"],\n\t\tasync onRequest(req) {\n\t\t\tlet currentRequest = req;\n\t\t\tfor (const plugin of ctx.options.plugins || []) {\n\t\t\t\tif (plugin.onRequest) {\n\t\t\t\t\tconst response = await plugin.onRequest(currentRequest, ctx);\n\t\t\t\t\tif (response && \"response\" in response) {\n\t\t\t\t\t\treturn response.response;\n\t\t\t\t\t}\n\t\t\t\t\tif (response && \"request\" in response) {\n\t\t\t\t\t\tcurrentRequest = response.request;\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}\n\n\t\t\t// const rateLimitResponse = await onRequestRateLimit(currentRequest, ctx);\n\t\t\t// if (rateLimitResponse) {\n\t\t\t// \treturn rateLimitResponse;\n\t\t\t// }\n\n\t\t\treturn currentRequest;\n\t\t},\n\t\tasync onResponse(res) {\n\t\t\tfor (const plugin of ctx.options.plugins || []) {\n\t\t\t\tif (plugin.onResponse) {\n\t\t\t\t\tconst response = await plugin.onResponse(res, ctx);\n\t\t\t\t\tif (response) {\n\t\t\t\t\t\treturn response.response;\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}\n\t\t\treturn res;\n\t\t},\n\t\tonError(e) {\n\t\t\t// if (isAPIError(e) && e.status === \"FOUND\") {\n\t\t\t// \treturn;\n\t\t\t// }\n\n\t\t\t// const optLogLevel = options.logger?.level;\n\t\t\t// const log =\n\t\t\t// \toptLogLevel === \"error\" ||\n\t\t\t// \toptLogLevel === \"warn\" ||\n\t\t\t// \toptLogLevel === \"debug\"\n\t\t\t// \t\t? logger\n\t\t\t// \t\t: undefined;\n\t\t\t// if (options.logger?.disabled !== true) {\n\t\t\t// \tif (\n\t\t\t// \t\te &&\n\t\t\t// \t\ttypeof e === \"object\" &&\n\t\t\t// \t\t\"message\" in e &&\n\t\t\t// \t\ttypeof e.message === \"string\"\n\t\t\t// \t) {\n\t\t\t// \t\tif (\n\t\t\t// \t\t\te.message.includes(\"no column\") ||\n\t\t\t// \t\t\te.message.includes(\"column\") ||\n\t\t\t// \t\t\te.message.includes(\"relation\") ||\n\t\t\t// \t\t\te.message.includes(\"table\") ||\n\t\t\t// \t\t\te.message.includes(\"does not exist\")\n\t\t\t// \t\t) {\n\t\t\t// \t\t\tctx.logger?.error(e.message);\n\t\t\t// \t\t\treturn;\n\t\t\t// \t\t}\n\t\t\t// \t}\n\n\t\t\t// \tif (isAPIError(e)) {\n\t\t\t// \t\tif (e.status === \"INTERNAL_SERVER_ERROR\") {\n\t\t\t// \t\t\tctx.logger.error(e.status, e);\n\t\t\t// \t\t}\n\t\t\t// \t\tlog?.error(e.message);\n\t\t\t// \t} else {\n\t\t\t// \t\tctx.logger?.error(\n\t\t\t// \t\t\te && typeof e === \"object\" && \"name\" in e ? (e.name as string) : \"\",\n\t\t\t// \t\t\te,\n\t\t\t// \t\t);\n\t\t\t// \t}\n\t\t\t// }\n\t\t},\n\t});\n};"],"mappings":";;;;;AAwB0B,YAAY,KAAK,KAAK,UAAU;AACzD,KAAI,MAAM,QAAQ,IAAI,KAAK,IAAI,MAAM,QAAQ,MAAM,EAAE;AACpD,MAAI,OAAO;AACX,SAAO;;EAEP;AAWF,SAAgB,gBAKd,WAAc,KAAkD;CACjE,MAAMA,MAQF,EAAE;AAEN,MAAK,MAAM,CAAC,KAAK,aAAa,OAAO,QAAQ,UAAU,EAAE;AACxD,MAAI,OAAO,OAAO,YAA+B;GAChD,MAAM,MAAM,YAAY;IACvB,MAAM,cAAc,MAAM;IAC1B,IAAIC,kBAAmC;KACtC,GAAG;KACH,SAAS;MACR,GAAG;MACH,UAAU;MACV,iBAAiB;MACjB,SAAS;MACT;KACD,MAAM,SAAS;KACf,SAAS,SAAS,UAAU,IAAI,QAAQ,SAAS,QAAQ,GAAG;KAC5D;AACD,WAAO,uBAAuB,iBAAiB,YAAY;AAC1D,qBAAgB,aAAa;AAC7B,qBAAgB,gBAAgB;AAChC,qBAAgB,eAAe;KAC/B,MAAM,SAAU,MAAM,uBAAuB,uBAC3C,SAAiB,gBAAuB,CACzC,CAAC,OAAO,MAAW;AAYnB,YAAM;OACL;AAOF,SAAI,UAAU,kBAAkB,SAC/B,QAAO;AAGR,qBAAgB,QAAQ,WAAW,OAAO;AAC1C,qBAAgB,QAAQ,kBAAkB,OAAO;AAuCjD,YAnBiB,SAAS,aACvB,WAAW,OAAO,UAAU;MAC5B,SAAS,OAAO;MAChB,QAAQ,OAAO;MACf,CAAC,GACD,SAAS,gBACR,SAAS,eACR;MACA,SAAS,OAAO;MAChB,UAAU,OAAO;MACjB,QAAQ,OAAO;MACf,GACA;MACA,SAAS,OAAO;MAChB,UAAU,OAAO;MACjB,GACD,SAAS,eACR;MAAE,UAAU,OAAO;MAAU,QAAQ,OAAO;MAAQ,GACpD,OAAO;MAEX;;AAEH,OAAI,MAAM,iBAAiB,CAC1B,QAAO,KAAK;OAGZ,QAAO,oCADO,IAAI,SAAS,EACO,IAAI;;AAGxC,MAAI,KAAK,OAAO,SAAS;AACzB,MAAI,KAAK,UAAU,SAAS;;AAE7B,QAAO;;AAER,SAAgB,aACf,KACA,SACC;CACD,MAAM,kBACL,QAAQ,SAAS,QAAkC,KAAK,WAAW;AAClE,SAAO;GACN,GAAG;GACH,GAAG,OAAO;GACV;IACC,EAAE,CAAC,IAAI,EAAE;CAcb,MAAM,cACL,QAAQ,SACL,KAAK,WACN,OAAO,aAAa,KAAK,MAAM;EAC9B,MAAM,cAAc,OAAO,YAAiB;GAC3C,MAAM,cAAc,MAAM;AAC1B,UAAO,EAAE,WAAW;IACnB,GAAG;IACH,SAAS;KACR,GAAG;KACH,GAAG,QAAQ;KACX;IACD,CAAC;;AAEH,aAAW,UAAU,EAAE,WAAW;AAClC,SAAO;GACN,MAAM,EAAE;GACR;GACA;GACA,CACF,CACA,QAAQ,WAAW,WAAW,OAAU,CACxC,MAAM,IAAI,EAAE;AAQf,QAAO;EACN,KAFW,gBAJM,EAEjB,GAAG,iBACH,EACsC,IAAI;EAG1C;EACA;;AAGF,MAAa,UACZ,KACA,YACI;CACJ,MAAM,EAAE,KAAK,gBAAgB,aAAa,KAAK,QAAQ;CACvD,MAAM,WAAW,IAAI,IAAI,IAAI,QAAQ,CAAC;AAEtC,QAAO,aAAa,KAAK;EACxB,eAAe;EACf,SAAS,EACR,UAAU,MACV;EACD;EACA,kBAAkB,CACjB;GACC,MAAM;GACN,YAAY,EAAE;GACd,EACD,GAAG,YACH;EACD,mBAAmB,CAAC,mBAAmB;EACvC,MAAM,UAAU,KAAK;GACpB,IAAI,iBAAiB;AACrB,QAAK,MAAM,UAAU,IAAI,QAAQ,WAAW,EAAE,CAC7C,KAAI,OAAO,WAAW;IACrB,MAAM,WAAW,MAAM,OAAO,UAAU,gBAAgB,IAAI;AAC5D,QAAI,YAAY,cAAc,SAC7B,QAAO,SAAS;AAEjB,QAAI,YAAY,aAAa,SAC5B,kBAAiB,SAAS;;AAU7B,UAAO;;EAER,MAAM,WAAW,KAAK;AACrB,QAAK,MAAM,UAAU,IAAI,QAAQ,WAAW,EAAE,CAC7C,KAAI,OAAO,YAAY;IACtB,MAAM,WAAW,MAAM,OAAO,WAAW,KAAK,IAAI;AAClD,QAAI,SACH,QAAO,SAAS;;AAInB,UAAO;;EAER,QAAQ,GAAG;EA4CX,CAAC"}
|
|
@@ -0,0 +1,77 @@
|
|
|
1
|
+
import { getKyselyDatabaseType } from "../adapters/kysely/dialect.mjs";
|
|
2
|
+
import { getMigrations } from "../db/migrations/index.mjs";
|
|
3
|
+
import { createInternalAdapter } from "../adapters/internal/index.mjs";
|
|
4
|
+
import { isPromise } from "../utils/is-promise.mjs";
|
|
5
|
+
import { getBaseURL } from "../utils/url.mjs";
|
|
6
|
+
import { getAdapter } from "../adapters/get-adapter.mjs";
|
|
7
|
+
import { PecuniaError, env, getPaymentTables } from "pecunia-core";
|
|
8
|
+
import defu from "defu";
|
|
9
|
+
|
|
10
|
+
//#region src/context/index.ts
|
|
11
|
+
const init = async (options) => {
|
|
12
|
+
const adapter = await getAdapter(options);
|
|
13
|
+
const getDatabaseType = (database) => getKyselyDatabaseType(database) || "unknown";
|
|
14
|
+
const ctx = await createPecuniaContext(adapter, options, getDatabaseType);
|
|
15
|
+
ctx.runMigrations = async function() {
|
|
16
|
+
if (!options.database || "updateMany" in options.database) throw new PecuniaError("Database is not provided or it's an adapter. Migrations are only supported with a database instance.");
|
|
17
|
+
const { runMigrations } = await getMigrations(options);
|
|
18
|
+
await runMigrations();
|
|
19
|
+
};
|
|
20
|
+
return ctx;
|
|
21
|
+
};
|
|
22
|
+
async function runPluginInit(ctx) {
|
|
23
|
+
let options = ctx.options;
|
|
24
|
+
const plugins = options.plugins || [];
|
|
25
|
+
let context = ctx;
|
|
26
|
+
for (const plugin of plugins) if (plugin.init) {
|
|
27
|
+
let initPromise = plugin.init(context);
|
|
28
|
+
let result;
|
|
29
|
+
if (isPromise(initPromise)) result = await initPromise;
|
|
30
|
+
else result = initPromise;
|
|
31
|
+
if (typeof result === "object") {
|
|
32
|
+
if (result.options) {
|
|
33
|
+
const { ...restOpts } = result.options;
|
|
34
|
+
options = defu(options, restOpts);
|
|
35
|
+
}
|
|
36
|
+
if (result.context) context = {
|
|
37
|
+
...context,
|
|
38
|
+
...result.context
|
|
39
|
+
};
|
|
40
|
+
}
|
|
41
|
+
}
|
|
42
|
+
context.internalAdapter = createInternalAdapter(context.adapter, { options });
|
|
43
|
+
context.options = options;
|
|
44
|
+
return { context };
|
|
45
|
+
}
|
|
46
|
+
async function createPecuniaContext(adapter, options, getDatabaseType) {
|
|
47
|
+
const plugins = options.plugins || [];
|
|
48
|
+
const baseURL = getBaseURL(options.baseURL, options.basePath);
|
|
49
|
+
if (!baseURL) console.warn(`[senly] Base URL could not be determined. Please set a valid base URL using the baseURL config option or the BETTER_AUTH_BASE_URL environment variable. Without this, callbacks and redirects may not work correctly.`);
|
|
50
|
+
env.BETTER_AUTH_SECRET || env.AUTH_SECRET;
|
|
51
|
+
options = {
|
|
52
|
+
...options,
|
|
53
|
+
baseURL: baseURL ? new URL(baseURL).origin : "",
|
|
54
|
+
basePath: options.basePath || "/api/payment",
|
|
55
|
+
plugins
|
|
56
|
+
};
|
|
57
|
+
const tables = getPaymentTables(options);
|
|
58
|
+
const initOrPromise = runPluginInit({
|
|
59
|
+
appName: "Pecunia",
|
|
60
|
+
options,
|
|
61
|
+
tables,
|
|
62
|
+
baseURL: baseURL || "",
|
|
63
|
+
adapter,
|
|
64
|
+
internalAdapter: createInternalAdapter(adapter, { options }),
|
|
65
|
+
async runMigrations() {
|
|
66
|
+
throw new PecuniaError("runMigrations will be set by the specific init implementation");
|
|
67
|
+
}
|
|
68
|
+
});
|
|
69
|
+
let context;
|
|
70
|
+
if (isPromise(initOrPromise)) ({context} = await initOrPromise);
|
|
71
|
+
else ({context} = initOrPromise);
|
|
72
|
+
return context;
|
|
73
|
+
}
|
|
74
|
+
|
|
75
|
+
//#endregion
|
|
76
|
+
export { init };
|
|
77
|
+
//# sourceMappingURL=index.mjs.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"index.mjs","names":["context: PecuniaContext","result: ReturnType<Required<PecuniaPlugin>[\"init\"]>"],"sources":["../../src/context/index.ts"],"sourcesContent":["import type { PecuniaContext, PecuniaOptions, PecuniaPlugin } from \"pecunia-core\";\nimport { getPaymentTables } from \"pecunia-core\";\nimport type { DBAdapter } from \"pecunia-core\";\nimport { createLogger, env, isProduction, isTest } from \"pecunia-core\";\nimport { PecuniaError } from \"pecunia-core\";\nimport defu from \"defu\";\n// import type { Entries } from \"type-fest\";\n// import { checkEndpointConflicts } from \"../api\";\n// import { matchesOriginPattern } from \"../payment/trusted-origins\";\nimport { createInternalAdapter } from \"../adapters/internal\";\nimport { isPromise } from \"../utils/is-promise\";\nimport { getBaseURL } from \"../utils/url\";\nimport { getKyselyDatabaseType } from \"../adapters/kysely/dialect\";\nimport { getMigrations } from \"../db/migrations\";\nimport { getAdapter } from \"../adapters/get-adapter\";\n\nexport const init = async (options: PecuniaOptions) => {\n\tconst adapter = await getAdapter(options);\n\n\t// Get database type using Kysely's dialect detection\n\tconst getDatabaseType = (database: PecuniaOptions[\"database\"]) =>\n\t\tgetKyselyDatabaseType(database) || \"unknown\";\n\n\t// Use base context creation\n\tconst ctx = await createPecuniaContext(adapter, options, getDatabaseType);\n\n\t// Add runMigrations with Kysely support\n\tctx.runMigrations = async function () {\n\t\t// only run migrations if database is provided and it's not an adapter\n\t\tif (!options.database || \"updateMany\" in options.database) {\n\t\t\tthrow new PecuniaError(\n\t\t\t\t\"Database is not provided or it's an adapter. Migrations are only supported with a database instance.\",\n\t\t\t);\n\t\t}\n\t\tconst { runMigrations } = await getMigrations(options);\n\t\tawait runMigrations();\n\t};\n\n\treturn ctx;\n};\n\n\nexport async function runPluginInit(ctx: PecuniaContext) {\n\tlet options = ctx.options;\n\tconst plugins = options.plugins || [];\n\tlet context: PecuniaContext = ctx;\n\tfor (const plugin of plugins) {\n\t\tif (plugin.init) {\n\t\t\tlet initPromise = plugin.init(context);\n\t\t\tlet result: ReturnType<Required<PecuniaPlugin>[\"init\"]>;\n\t\t\tif (isPromise(initPromise)) {\n\t\t\t\tresult = await initPromise;\n\t\t\t} else {\n\t\t\t\tresult = initPromise;\n\t\t\t}\n\t\t\tif (typeof result === \"object\") {\n\t\t\t\tif (result.options) {\n\t\t\t\t\tconst { ...restOpts } = result.options;\n\t\t\t\t\toptions = defu(options, restOpts);\n\t\t\t\t}\n\t\t\t\tif (result.context) {\n\t\t\t\t\tcontext = {\n\t\t\t\t\t\t...context,\n\t\t\t\t\t\t...(result.context as Partial<PecuniaContext>),\n\t\t\t\t\t};\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}\n\tcontext.internalAdapter = createInternalAdapter(context.adapter, {\n\t\toptions\n\t});\n\tcontext.options = options;\n\treturn { context };\n}\n\nexport async function createPecuniaContext(\n\tadapter: DBAdapter<PecuniaOptions>,\n\toptions: PecuniaOptions,\n\tgetDatabaseType: (database: PecuniaOptions[\"database\"]) => string,\n): Promise<PecuniaContext> {\n\tconst plugins = options.plugins || [];\n\tconst baseURL = getBaseURL(options.baseURL, options.basePath);\n\n\tif (!baseURL) {\n\t\tconsole.warn(\n\t\t\t`[senly] Base URL could not be determined. Please set a valid base URL using the baseURL config option or the BETTER_AUTH_BASE_URL environment variable. Without this, callbacks and redirects may not work correctly.`,\n\t\t);\n\t}\n\n\tconst secret =\n\t\tenv.BETTER_AUTH_SECRET ||\n\t\tenv.AUTH_SECRET\n\n\toptions = {\n\t\t...options,\n\t\t// secret,\n\t\tbaseURL: baseURL ? new URL(baseURL).origin : \"\",\n\t\tbasePath: options.basePath || \"/api/payment\",\n\t\tplugins\n\t};\n\n\t// checkEndpointConflicts(options);\n\tconst tables = getPaymentTables(options);\n\n\n\tlet ctx: PecuniaContext = {\n\t\tappName: 'Pecunia',\n\t\toptions,\n\t\ttables,\n\t\t// trustedOrigins: await getTrustedOrigins(options),\n\t\t// isTrustedOrigin(\n\t\t// \turl: string,\n\t\t// \tsettings?: {\n\t\t// \t\tallowRelativePaths: boolean;\n\t\t// \t},\n\t\t// ) {\n\t\t// \treturn this.trustedOrigins.some((origin) =>\n\t\t// \t\tmatchesOriginPattern(url, origin, settings),\n\t\t// \t);\n\t\t// },\n\t\tbaseURL: baseURL || \"\",\n\t\t// generateId: generateIdFunc,\n\t\tadapter: adapter,\n\t\tinternalAdapter: createInternalAdapter(adapter, {\n\t\t\toptions\n\t\t}),\n\t\tasync runMigrations() {\n\t\t\tthrow new PecuniaError(\n\t\t\t\t\"runMigrations will be set by the specific init implementation\",\n\t\t\t);\n\t\t},\n\t};\n\n\tconst initOrPromise = runPluginInit(ctx);\n\tlet context: PecuniaContext;\n\tif (isPromise(initOrPromise)) {\n\t\t({ context } = await initOrPromise);\n\t} else {\n\t\t({ context } = initOrPromise);\n\t}\n\n\treturn context;\n}\n"],"mappings":";;;;;;;;;;AAgBA,MAAa,OAAO,OAAO,YAA4B;CACtD,MAAM,UAAU,MAAM,WAAW,QAAQ;CAGzC,MAAM,mBAAmB,aACxB,sBAAsB,SAAS,IAAI;CAGpC,MAAM,MAAM,MAAM,qBAAqB,SAAS,SAAS,gBAAgB;AAGzE,KAAI,gBAAgB,iBAAkB;AAErC,MAAI,CAAC,QAAQ,YAAY,gBAAgB,QAAQ,SAChD,OAAM,IAAI,aACT,uGACA;EAEF,MAAM,EAAE,kBAAkB,MAAM,cAAc,QAAQ;AACtD,QAAM,eAAe;;AAGtB,QAAO;;AAIR,eAAsB,cAAc,KAAqB;CACxD,IAAI,UAAU,IAAI;CAClB,MAAM,UAAU,QAAQ,WAAW,EAAE;CACrC,IAAIA,UAA0B;AAC9B,MAAK,MAAM,UAAU,QACpB,KAAI,OAAO,MAAM;EAChB,IAAI,cAAc,OAAO,KAAK,QAAQ;EACtC,IAAIC;AACJ,MAAI,UAAU,YAAY,CACzB,UAAS,MAAM;MAEf,UAAS;AAEV,MAAI,OAAO,WAAW,UAAU;AAC/B,OAAI,OAAO,SAAS;IACnB,MAAM,EAAE,GAAG,aAAa,OAAO;AAC/B,cAAU,KAAK,SAAS,SAAS;;AAElC,OAAI,OAAO,QACV,WAAU;IACT,GAAG;IACH,GAAI,OAAO;IACX;;;AAKL,SAAQ,kBAAkB,sBAAsB,QAAQ,SAAS,EAChE,SACA,CAAC;AACF,SAAQ,UAAU;AAClB,QAAO,EAAE,SAAS;;AAGnB,eAAsB,qBACrB,SACA,SACA,iBAC0B;CAC1B,MAAM,UAAU,QAAQ,WAAW,EAAE;CACrC,MAAM,UAAU,WAAW,QAAQ,SAAS,QAAQ,SAAS;AAE7D,KAAI,CAAC,QACJ,SAAQ,KACP,wNACA;AAID,KAAI,sBACJ,IAAI;AAEL,WAAU;EACT,GAAG;EAEH,SAAS,UAAU,IAAI,IAAI,QAAQ,CAAC,SAAS;EAC7C,UAAU,QAAQ,YAAY;EAC9B;EACA;CAGD,MAAM,SAAS,iBAAiB,QAAQ;CA+BxC,MAAM,gBAAgB,cA5BI;EACzB,SAAS;EACT;EACA;EAYA,SAAS,WAAW;EAEX;EACT,iBAAiB,sBAAsB,SAAS,EAC/C,SACA,CAAC;EACF,MAAM,gBAAgB;AACrB,SAAM,IAAI,aACT,gEACA;;EAEF,CAEuC;CACxC,IAAID;AACJ,KAAI,UAAU,cAAc,CAC3B,EAAC,CAAE,WAAY,MAAM;KAErB,EAAC,CAAE,WAAY;AAGhB,QAAO"}
|
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
import { DBFieldAttribute, DBFieldType, KyselyDatabaseDialectType, PecuniaOptions } from "pecunia-core";
|
|
2
|
+
|
|
3
|
+
//#region src/db/migrations/index.d.ts
|
|
4
|
+
declare function matchType(columnDataType: string, fieldType: DBFieldType, dbType: KyselyDatabaseDialectType): boolean | undefined;
|
|
5
|
+
declare function getMigrations(config: PecuniaOptions): Promise<{
|
|
6
|
+
toBeCreated: {
|
|
7
|
+
table: string;
|
|
8
|
+
fields: Record<string, DBFieldAttribute>;
|
|
9
|
+
order: number;
|
|
10
|
+
}[];
|
|
11
|
+
toBeAdded: {
|
|
12
|
+
table: string;
|
|
13
|
+
fields: Record<string, DBFieldAttribute>;
|
|
14
|
+
order: number;
|
|
15
|
+
}[];
|
|
16
|
+
runMigrations: () => Promise<void>;
|
|
17
|
+
compileMigrations: () => Promise<string>;
|
|
18
|
+
}>;
|
|
19
|
+
//#endregion
|
|
20
|
+
export { getMigrations, matchType };
|
|
21
|
+
//# sourceMappingURL=index.d.mts.map
|
|
@@ -0,0 +1,327 @@
|
|
|
1
|
+
import { createKyselyAdapter } from "../../adapters/kysely/dialect.mjs";
|
|
2
|
+
import { getSchema } from "../schema/get-schema.mjs";
|
|
3
|
+
import { getPaymentTables, initGetFieldName, initGetModelName } from "pecunia-core";
|
|
4
|
+
import { sql } from "kysely";
|
|
5
|
+
|
|
6
|
+
//#region src/db/migrations/index.ts
|
|
7
|
+
const map = {
|
|
8
|
+
postgres: {
|
|
9
|
+
string: [
|
|
10
|
+
"character varying",
|
|
11
|
+
"varchar",
|
|
12
|
+
"text",
|
|
13
|
+
"uuid"
|
|
14
|
+
],
|
|
15
|
+
number: [
|
|
16
|
+
"int4",
|
|
17
|
+
"integer",
|
|
18
|
+
"bigint",
|
|
19
|
+
"smallint",
|
|
20
|
+
"numeric",
|
|
21
|
+
"real",
|
|
22
|
+
"double precision"
|
|
23
|
+
],
|
|
24
|
+
boolean: ["bool", "boolean"],
|
|
25
|
+
date: [
|
|
26
|
+
"timestamptz",
|
|
27
|
+
"timestamp",
|
|
28
|
+
"date"
|
|
29
|
+
],
|
|
30
|
+
json: ["json", "jsonb"]
|
|
31
|
+
},
|
|
32
|
+
mysql: {
|
|
33
|
+
string: [
|
|
34
|
+
"varchar",
|
|
35
|
+
"text",
|
|
36
|
+
"uuid"
|
|
37
|
+
],
|
|
38
|
+
number: [
|
|
39
|
+
"integer",
|
|
40
|
+
"int",
|
|
41
|
+
"bigint",
|
|
42
|
+
"smallint",
|
|
43
|
+
"decimal",
|
|
44
|
+
"float",
|
|
45
|
+
"double"
|
|
46
|
+
],
|
|
47
|
+
boolean: ["boolean", "tinyint"],
|
|
48
|
+
date: [
|
|
49
|
+
"timestamp",
|
|
50
|
+
"datetime",
|
|
51
|
+
"date"
|
|
52
|
+
],
|
|
53
|
+
json: ["json"]
|
|
54
|
+
},
|
|
55
|
+
sqlite: {
|
|
56
|
+
string: ["TEXT"],
|
|
57
|
+
number: ["INTEGER", "REAL"],
|
|
58
|
+
boolean: ["INTEGER", "BOOLEAN"],
|
|
59
|
+
date: ["DATE", "INTEGER"],
|
|
60
|
+
json: ["TEXT"]
|
|
61
|
+
},
|
|
62
|
+
mssql: {
|
|
63
|
+
string: [
|
|
64
|
+
"varchar",
|
|
65
|
+
"nvarchar",
|
|
66
|
+
"uniqueidentifier"
|
|
67
|
+
],
|
|
68
|
+
number: [
|
|
69
|
+
"int",
|
|
70
|
+
"bigint",
|
|
71
|
+
"smallint",
|
|
72
|
+
"decimal",
|
|
73
|
+
"float",
|
|
74
|
+
"double"
|
|
75
|
+
],
|
|
76
|
+
boolean: ["bit", "smallint"],
|
|
77
|
+
date: [
|
|
78
|
+
"datetime2",
|
|
79
|
+
"date",
|
|
80
|
+
"datetime"
|
|
81
|
+
],
|
|
82
|
+
json: ["varchar", "nvarchar"]
|
|
83
|
+
}
|
|
84
|
+
};
|
|
85
|
+
function matchType(columnDataType, fieldType, dbType) {
|
|
86
|
+
function normalize(type) {
|
|
87
|
+
return type.toLowerCase().split("(")[0].trim();
|
|
88
|
+
}
|
|
89
|
+
if (fieldType === "string[]" || fieldType === "number[]") return columnDataType.toLowerCase().includes("json");
|
|
90
|
+
const types = map[dbType];
|
|
91
|
+
return (Array.isArray(fieldType) ? types.string?.map((t) => t.toLowerCase()) : types[fieldType].map((t) => t.toLowerCase()))?.includes(normalize(columnDataType));
|
|
92
|
+
}
|
|
93
|
+
/**
|
|
94
|
+
* Get the current PostgreSQL schema (search_path) for the database connection
|
|
95
|
+
* Returns the first schema in the search_path, defaulting to 'public' if not found
|
|
96
|
+
*/
|
|
97
|
+
async function getPostgresSchema(db) {
|
|
98
|
+
try {
|
|
99
|
+
const result = await sql`SHOW search_path`.execute(db);
|
|
100
|
+
if (result.rows[0]?.search_path) return result.rows[0].search_path.split(",").map((s) => s.trim()).map((s) => s.replace(/^["']|["']$/g, "")).filter((s) => !s.startsWith("$"))[0] || "public";
|
|
101
|
+
} catch {}
|
|
102
|
+
return "public";
|
|
103
|
+
}
|
|
104
|
+
async function getMigrations(config) {
|
|
105
|
+
const billingEngineSchema = getSchema(config);
|
|
106
|
+
let { kysely: db, databaseType: dbType } = await createKyselyAdapter(config);
|
|
107
|
+
if (!dbType) {
|
|
108
|
+
console.warn("Could not determine database type, defaulting to sqlite. Please provide a type in the database options to avoid this.");
|
|
109
|
+
dbType = "sqlite";
|
|
110
|
+
}
|
|
111
|
+
if (!db) {
|
|
112
|
+
console.error("Only kysely adapter is supported for migrations. You can use `generate` command to generate the schema, if you're using a different adapter.");
|
|
113
|
+
process.exit(1);
|
|
114
|
+
}
|
|
115
|
+
let currentSchema = "public";
|
|
116
|
+
if (dbType === "postgres") {
|
|
117
|
+
currentSchema = await getPostgresSchema(db);
|
|
118
|
+
console.debug(`PostgreSQL migration: Using schema '${currentSchema}' (from search_path)`);
|
|
119
|
+
try {
|
|
120
|
+
if (!(await sql`
|
|
121
|
+
SELECT schema_name
|
|
122
|
+
FROM information_schema.schemata
|
|
123
|
+
WHERE schema_name = ${currentSchema}
|
|
124
|
+
`.execute(db)).rows[0]) console.warn(`Schema '${currentSchema}' does not exist. Tables will be inspected from available schemas. Consider creating the schema first or checking your database configuration.`);
|
|
125
|
+
} catch (error) {
|
|
126
|
+
console.debug(`Could not verify schema existence: ${error instanceof Error ? error.message : String(error)}`);
|
|
127
|
+
}
|
|
128
|
+
}
|
|
129
|
+
const allTableMetadata = await db.introspection.getTables();
|
|
130
|
+
let tableMetadata = allTableMetadata;
|
|
131
|
+
if (dbType === "postgres") try {
|
|
132
|
+
const tablesInSchema = await sql`
|
|
133
|
+
SELECT table_name
|
|
134
|
+
FROM information_schema.tables
|
|
135
|
+
WHERE table_schema = ${currentSchema}
|
|
136
|
+
AND table_type = 'BASE TABLE'
|
|
137
|
+
`.execute(db);
|
|
138
|
+
const tableNamesInSchema = new Set(tablesInSchema.rows.map((row) => row.table_name));
|
|
139
|
+
tableMetadata = allTableMetadata.filter((table) => table.schema === currentSchema && tableNamesInSchema.has(table.name));
|
|
140
|
+
console.debug(`Found ${tableMetadata.length} table(s) in schema '${currentSchema}': ${tableMetadata.map((t) => t.name).join(", ") || "(none)"}`);
|
|
141
|
+
} catch (error) {
|
|
142
|
+
console.warn(`Could not filter tables by schema. Using all discovered tables. Error: ${error instanceof Error ? error.message : String(error)}`);
|
|
143
|
+
}
|
|
144
|
+
const toBeCreated = [];
|
|
145
|
+
const toBeAdded = [];
|
|
146
|
+
for (const [key, value] of Object.entries(billingEngineSchema)) {
|
|
147
|
+
const table = tableMetadata.find((t) => t.name === key);
|
|
148
|
+
if (!table) {
|
|
149
|
+
const tIndex = toBeCreated.findIndex((t) => t.table === key);
|
|
150
|
+
const tableData = {
|
|
151
|
+
table: key,
|
|
152
|
+
fields: value.fields,
|
|
153
|
+
order: value.order || Infinity
|
|
154
|
+
};
|
|
155
|
+
const insertIndex = toBeCreated.findIndex((t) => (t.order || Infinity) > tableData.order);
|
|
156
|
+
if (insertIndex === -1) if (tIndex === -1) toBeCreated.push(tableData);
|
|
157
|
+
else toBeCreated[tIndex].fields = {
|
|
158
|
+
...toBeCreated[tIndex].fields,
|
|
159
|
+
...value.fields
|
|
160
|
+
};
|
|
161
|
+
else toBeCreated.splice(insertIndex, 0, tableData);
|
|
162
|
+
continue;
|
|
163
|
+
}
|
|
164
|
+
const toBeAddedFields = {};
|
|
165
|
+
for (const [fieldName, field] of Object.entries(value.fields)) {
|
|
166
|
+
const column = table.columns.find((c) => c.name === fieldName);
|
|
167
|
+
if (!column) {
|
|
168
|
+
toBeAddedFields[fieldName] = field;
|
|
169
|
+
continue;
|
|
170
|
+
}
|
|
171
|
+
if (matchType(column.dataType, field.type, dbType)) continue;
|
|
172
|
+
console.warn(`Field ${fieldName} in table ${key} has a different type in the database. Expected ${field.type} but got ${column.dataType}.`);
|
|
173
|
+
}
|
|
174
|
+
if (Object.keys(toBeAddedFields).length > 0) toBeAdded.push({
|
|
175
|
+
table: key,
|
|
176
|
+
fields: toBeAddedFields,
|
|
177
|
+
order: value.order || Infinity
|
|
178
|
+
});
|
|
179
|
+
}
|
|
180
|
+
const migrations = [];
|
|
181
|
+
function getType(field, fieldName) {
|
|
182
|
+
const type = field.type;
|
|
183
|
+
const provider = dbType || "sqlite";
|
|
184
|
+
const typeMap = {
|
|
185
|
+
uuid: {
|
|
186
|
+
postgres: "uuid",
|
|
187
|
+
mysql: "varchar(36)",
|
|
188
|
+
mssql: "varchar(36)",
|
|
189
|
+
sqlite: "text"
|
|
190
|
+
},
|
|
191
|
+
string: {
|
|
192
|
+
sqlite: "text",
|
|
193
|
+
postgres: "text",
|
|
194
|
+
mysql: field.unique ? "varchar(255)" : field.references ? "varchar(36)" : field.sortable ? "varchar(255)" : field.index ? "varchar(255)" : "text",
|
|
195
|
+
mssql: field.unique || field.sortable ? "varchar(255)" : field.references ? "varchar(36)" : "varchar(8000)"
|
|
196
|
+
},
|
|
197
|
+
boolean: {
|
|
198
|
+
sqlite: "integer",
|
|
199
|
+
postgres: "boolean",
|
|
200
|
+
mysql: "boolean",
|
|
201
|
+
mssql: "smallint"
|
|
202
|
+
},
|
|
203
|
+
number: {
|
|
204
|
+
sqlite: field.bigint ? "bigint" : "integer",
|
|
205
|
+
postgres: field.bigint ? "bigint" : "integer",
|
|
206
|
+
mysql: field.bigint ? "bigint" : "integer",
|
|
207
|
+
mssql: field.bigint ? "bigint" : "integer"
|
|
208
|
+
},
|
|
209
|
+
date: {
|
|
210
|
+
sqlite: "date",
|
|
211
|
+
postgres: "timestamptz",
|
|
212
|
+
mysql: "timestamp(3)",
|
|
213
|
+
mssql: sql`datetime2(3)`
|
|
214
|
+
},
|
|
215
|
+
json: {
|
|
216
|
+
sqlite: "text",
|
|
217
|
+
postgres: "jsonb",
|
|
218
|
+
mysql: "json",
|
|
219
|
+
mssql: "varchar(8000)"
|
|
220
|
+
},
|
|
221
|
+
id: {
|
|
222
|
+
postgres: "uuid",
|
|
223
|
+
mysql: "varchar(36)",
|
|
224
|
+
mssql: "varchar(36)",
|
|
225
|
+
sqlite: "text"
|
|
226
|
+
},
|
|
227
|
+
foreignKeyId: {
|
|
228
|
+
postgres: "uuid",
|
|
229
|
+
mysql: "varchar(36)",
|
|
230
|
+
mssql: "varchar(36)",
|
|
231
|
+
sqlite: "text"
|
|
232
|
+
},
|
|
233
|
+
"string[]": {
|
|
234
|
+
sqlite: "text",
|
|
235
|
+
postgres: "jsonb",
|
|
236
|
+
mysql: "json",
|
|
237
|
+
mssql: "varchar(8000)"
|
|
238
|
+
},
|
|
239
|
+
"number[]": {
|
|
240
|
+
sqlite: "text",
|
|
241
|
+
postgres: "jsonb",
|
|
242
|
+
mysql: "json",
|
|
243
|
+
mssql: "varchar(8000)"
|
|
244
|
+
}
|
|
245
|
+
};
|
|
246
|
+
if (fieldName === "id" || field.references?.field === "id") {
|
|
247
|
+
if (fieldName === "id") return typeMap.id[provider];
|
|
248
|
+
return typeMap.foreignKeyId[provider];
|
|
249
|
+
}
|
|
250
|
+
if (Array.isArray(type)) return "text";
|
|
251
|
+
if (!(type in typeMap)) throw new Error(`Unsupported field type '${String(type)}' for field '${fieldName}'. Allowed types are: string, number, boolean, date, string[], number[]. If you need to store structured data, store it as a JSON string (type: "string") or split it into primitive fields. See https://better-auth.com/docs/advanced/schema#additional-fields`);
|
|
252
|
+
return typeMap[type][provider];
|
|
253
|
+
}
|
|
254
|
+
const getModelName = initGetModelName({
|
|
255
|
+
schema: getPaymentTables(config),
|
|
256
|
+
usePlural: false
|
|
257
|
+
});
|
|
258
|
+
const getFieldName = initGetFieldName({
|
|
259
|
+
schema: getPaymentTables(config),
|
|
260
|
+
usePlural: false
|
|
261
|
+
});
|
|
262
|
+
function getReferencePath(model, field) {
|
|
263
|
+
try {
|
|
264
|
+
return `${getModelName(model)}.${getFieldName({
|
|
265
|
+
model,
|
|
266
|
+
field
|
|
267
|
+
})}`;
|
|
268
|
+
} catch {
|
|
269
|
+
return `${model}.${field}`;
|
|
270
|
+
}
|
|
271
|
+
}
|
|
272
|
+
if (toBeAdded.length) for (const table of toBeAdded) for (const [fieldName, field] of Object.entries(table.fields)) {
|
|
273
|
+
const type = getType(field, fieldName);
|
|
274
|
+
if (field.index) {
|
|
275
|
+
const index = db.schema.alterTable(table.table).addIndex(`${table.table}_${fieldName}_idx`);
|
|
276
|
+
migrations.push(index);
|
|
277
|
+
}
|
|
278
|
+
const built = db.schema.alterTable(table.table).addColumn(fieldName, type, (col) => {
|
|
279
|
+
col = field.required !== false ? col.notNull() : col;
|
|
280
|
+
if (field.references) col = col.references(getReferencePath(field.references.model, field.references.field)).onDelete(field.references.onDelete || "cascade");
|
|
281
|
+
if (field.unique) col = col.unique();
|
|
282
|
+
if (field.type === "date" && typeof field.defaultValue === "function" && (dbType === "postgres" || dbType === "mysql" || dbType === "mssql")) col = dbType === "mysql" ? col.defaultTo(sql`CURRENT_TIMESTAMP(3)`) : col.defaultTo(sql`CURRENT_TIMESTAMP`);
|
|
283
|
+
return col;
|
|
284
|
+
});
|
|
285
|
+
migrations.push(built);
|
|
286
|
+
}
|
|
287
|
+
const toBeIndexed = [];
|
|
288
|
+
if (toBeCreated.length) for (const table of toBeCreated) {
|
|
289
|
+
const idType = getType({ type: "string" }, "id");
|
|
290
|
+
let dbT = db.schema.createTable(table.table).addColumn("id", idType, (col) => {
|
|
291
|
+
if (dbType === "postgres") return col.primaryKey().defaultTo(sql`pg_catalog.gen_random_uuid()`).notNull();
|
|
292
|
+
return col.primaryKey().notNull();
|
|
293
|
+
});
|
|
294
|
+
for (const [fieldName, field] of Object.entries(table.fields)) {
|
|
295
|
+
const type = getType(field, fieldName);
|
|
296
|
+
dbT = dbT.addColumn(fieldName, type, (col) => {
|
|
297
|
+
col = field.required !== false ? col.notNull() : col;
|
|
298
|
+
if (field.references) col = col.references(getReferencePath(field.references.model, field.references.field)).onDelete(field.references.onDelete || "cascade");
|
|
299
|
+
if (field.unique) col = col.unique();
|
|
300
|
+
if (field.type === "date" && typeof field.defaultValue === "function" && (dbType === "postgres" || dbType === "mysql" || dbType === "mssql")) col = dbType === "mysql" ? col.defaultTo(sql`CURRENT_TIMESTAMP(3)`) : col.defaultTo(sql`CURRENT_TIMESTAMP`);
|
|
301
|
+
return col;
|
|
302
|
+
});
|
|
303
|
+
if (field.index) {
|
|
304
|
+
const idx = db.schema.createIndex(`${table.table}_${fieldName}_${field.unique ? "uidx" : "idx"}`).on(table.table).columns([fieldName]);
|
|
305
|
+
toBeIndexed.push(field.unique ? idx.unique() : idx);
|
|
306
|
+
}
|
|
307
|
+
}
|
|
308
|
+
migrations.push(dbT);
|
|
309
|
+
}
|
|
310
|
+
for (const index of toBeIndexed) migrations.push(index);
|
|
311
|
+
async function runMigrations() {
|
|
312
|
+
for (const migration of migrations) await migration.execute();
|
|
313
|
+
}
|
|
314
|
+
async function compileMigrations() {
|
|
315
|
+
return migrations.map((m) => m.compile().sql).join(";\n\n") + ";";
|
|
316
|
+
}
|
|
317
|
+
return {
|
|
318
|
+
toBeCreated,
|
|
319
|
+
toBeAdded,
|
|
320
|
+
runMigrations,
|
|
321
|
+
compileMigrations
|
|
322
|
+
};
|
|
323
|
+
}
|
|
324
|
+
|
|
325
|
+
//#endregion
|
|
326
|
+
export { getMigrations, matchType };
|
|
327
|
+
//# sourceMappingURL=index.mjs.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"index.mjs","names":["types: Partial<DbTypeBuckets>","toBeCreated: {\n table: string;\n fields: Record<string, DBFieldAttribute>;\n order: number;\n }[]","toBeAdded: {\n table: string;\n fields: Record<string, DBFieldAttribute>;\n order: number;\n }[]","toBeAddedFields: Record<string, DBFieldAttribute>","migrations: (\n | AlterTableColumnAlteringBuilder\n | ReturnType<AlterTableBuilder[\"addIndex\"]>\n | CreateTableBuilder<string, string>\n | CreateIndexBuilder\n )[]","typeMap: Record<\n StringOnlyUnion<DBFieldType> | \"id\" | \"foreignKeyId\",\n Record<KyselyDatabaseDialectType, ColumnDataType | RawBuilder<unknown>>\n >","toBeIndexed: CreateIndexBuilder[]"],"sources":["../../../src/db/migrations/index.ts"],"sourcesContent":["import type { PecuniaOptions } from \"pecunia-core\";\nimport type { DBFieldAttribute, DBFieldType } from \"pecunia-core\";\nimport { initGetFieldName, initGetModelName } from \"pecunia-core\";\nimport type {\n AlterTableBuilder,\n AlterTableColumnAlteringBuilder,\n ColumnDataType,\n CreateIndexBuilder,\n CreateTableBuilder,\n Kysely,\n RawBuilder,\n} from \"kysely\";\nimport { sql } from \"kysely\";\nimport { createKyselyAdapter } from \"../../adapters/kysely/dialect\";\nimport type { KyselyDatabaseDialectType } from \"pecunia-core\";\nimport { getSchema } from \"../schema/get-schema\";\nimport { getPaymentTables } from \"pecunia-core\";\n\ntype DbTypeBuckets = Record<\n \"string\" | \"uuid\" | \"number\" | \"boolean\" | \"date\" | \"json\",\n string[]\n>;\n\nconst postgresMap = {\n string: [\"character varying\", \"varchar\", \"text\", \"uuid\"],\n number: [\n \"int4\",\n \"integer\",\n \"bigint\",\n \"smallint\",\n \"numeric\",\n \"real\",\n \"double precision\",\n ],\n boolean: [\"bool\", \"boolean\"],\n date: [\"timestamptz\", \"timestamp\", \"date\"],\n json: [\"json\", \"jsonb\"],\n};\n\nconst mysqlMap = {\n string: [\"varchar\", \"text\", \"uuid\"],\n number: [\n \"integer\",\n \"int\",\n \"bigint\",\n \"smallint\",\n \"decimal\",\n \"float\",\n \"double\",\n ],\n boolean: [\"boolean\", \"tinyint\"],\n date: [\"timestamp\", \"datetime\", \"date\"],\n json: [\"json\"],\n};\n\nconst sqliteMap = {\n string: [\"TEXT\"],\n number: [\"INTEGER\", \"REAL\"],\n boolean: [\"INTEGER\", \"BOOLEAN\"], // 0 or 1\n date: [\"DATE\", \"INTEGER\"],\n json: [\"TEXT\"],\n};\n\nconst mssqlMap = {\n string: [\"varchar\", \"nvarchar\", \"uniqueidentifier\"],\n number: [\"int\", \"bigint\", \"smallint\", \"decimal\", \"float\", \"double\"],\n boolean: [\"bit\", \"smallint\"],\n date: [\"datetime2\", \"date\", \"datetime\"],\n json: [\"varchar\", \"nvarchar\"],\n};\n\nconst map = {\n postgres: postgresMap,\n mysql: mysqlMap,\n sqlite: sqliteMap,\n mssql: mssqlMap,\n};\n\nexport function matchType(\n columnDataType: string,\n fieldType: DBFieldType,\n dbType: KyselyDatabaseDialectType,\n) {\n function normalize(type: string) {\n return type.toLowerCase().split(\"(\")[0]!.trim();\n }\n\n if (fieldType === \"string[]\" || fieldType === \"number[]\") {\n return columnDataType.toLowerCase().includes(\"json\");\n }\n\n const types: Partial<DbTypeBuckets> = map[dbType]!;\n const expected = Array.isArray(fieldType)\n ? types.string?.map((t) => t.toLowerCase())\n : types[fieldType]!.map((t: string) => t.toLowerCase());\n\n return expected?.includes(normalize(columnDataType));\n}\n\n/**\n * Get the current PostgreSQL schema (search_path) for the database connection\n * Returns the first schema in the search_path, defaulting to 'public' if not found\n */\nasync function getPostgresSchema(db: Kysely<unknown>): Promise<string> {\n try {\n const result = await sql<{ search_path: string }>`SHOW search_path`.execute(\n db,\n );\n\n if (result.rows[0]?.search_path) {\n const schemas = result.rows[0].search_path\n .split(\",\")\n .map((s) => s.trim())\n .map((s) => s.replace(/^[\"']|[\"']$/g, \"\"))\n .filter((s) => !s.startsWith(\"$\"));\n\n return schemas[0] || \"public\";\n }\n } catch {\n // fall back to public schema\n }\n\n return \"public\";\n}\n\nexport async function getMigrations(config: PecuniaOptions) {\n const billingEngineSchema = getSchema(config);\n\n let { kysely: db, databaseType: dbType } = await createKyselyAdapter(config);\n\n if (!dbType) {\n console.warn(\n \"Could not determine database type, defaulting to sqlite. Please provide a type in the database options to avoid this.\",\n );\n dbType = \"sqlite\";\n }\n\n if (!db) {\n console.error(\n \"Only kysely adapter is supported for migrations. You can use `generate` command to generate the schema, if you're using a different adapter.\",\n );\n process.exit(1);\n }\n\n // For PostgreSQL, detect and log the current schema being used\n let currentSchema = \"public\";\n if (dbType === \"postgres\") {\n currentSchema = await getPostgresSchema(db);\n console.debug(\n `PostgreSQL migration: Using schema '${currentSchema}' (from search_path)`,\n );\n\n // Verify the schema exists\n try {\n const schemaCheck = await sql<{ schema_name: string }>`\n SELECT schema_name\n FROM information_schema.schemata\n WHERE schema_name = ${currentSchema}\n `.execute(db);\n\n if (!schemaCheck.rows[0]) {\n console.warn(\n `Schema '${currentSchema}' does not exist. Tables will be inspected from available schemas. Consider creating the schema first or checking your database configuration.`,\n );\n }\n } catch (error) {\n console.debug(\n `Could not verify schema existence: ${\n error instanceof Error ? error.message : String(error)\n }`,\n );\n }\n }\n\n const allTableMetadata = await db.introspection.getTables();\n\n // For PostgreSQL, filter tables to only those in the target schema\n let tableMetadata = allTableMetadata;\n if (dbType === \"postgres\") {\n try {\n const tablesInSchema = await sql<{\n table_name: string;\n }>`\n SELECT table_name\n FROM information_schema.tables\n WHERE table_schema = ${currentSchema}\n AND table_type = 'BASE TABLE'\n `.execute(db);\n\n const tableNamesInSchema = new Set(\n tablesInSchema.rows.map((row) => row.table_name),\n );\n\n tableMetadata = allTableMetadata.filter(\n (table) =>\n table.schema === currentSchema && tableNamesInSchema.has(table.name),\n );\n\n console.debug(\n `Found ${tableMetadata.length} table(s) in schema '${currentSchema}': ${\n tableMetadata.map((t) => t.name).join(\", \") || \"(none)\"\n }`,\n );\n } catch (error) {\n console.warn(\n `Could not filter tables by schema. Using all discovered tables. Error: ${\n error instanceof Error ? error.message : String(error)\n }`,\n );\n }\n }\n\n const toBeCreated: {\n table: string;\n fields: Record<string, DBFieldAttribute>;\n order: number;\n }[] = [];\n\n const toBeAdded: {\n table: string;\n fields: Record<string, DBFieldAttribute>;\n order: number;\n }[] = [];\n\n for (const [key, value] of Object.entries(billingEngineSchema)) {\n const table = tableMetadata.find((t) => t.name === key);\n\n if (!table) {\n const tIndex = toBeCreated.findIndex((t) => t.table === key);\n const tableData = {\n table: key,\n fields: value.fields,\n order: value.order || Infinity,\n };\n\n const insertIndex = toBeCreated.findIndex(\n (t) => (t.order || Infinity) > tableData.order,\n );\n\n if (insertIndex === -1) {\n if (tIndex === -1) {\n toBeCreated.push(tableData);\n } else {\n toBeCreated[tIndex]!.fields = {\n ...toBeCreated[tIndex]!.fields,\n ...value.fields,\n };\n }\n } else {\n toBeCreated.splice(insertIndex, 0, tableData);\n }\n\n continue;\n }\n\n const toBeAddedFields: Record<string, DBFieldAttribute> = {};\n\n for (const [fieldName, field] of Object.entries(value.fields)) {\n const column = table.columns.find((c) => c.name === fieldName);\n\n if (!column) {\n toBeAddedFields[fieldName] = field;\n continue;\n }\n\n if (matchType(column.dataType, field.type, dbType)) {\n continue;\n }\n\n console.warn(\n `Field ${fieldName} in table ${key} has a different type in the database. Expected ${field.type} but got ${column.dataType}.`,\n );\n }\n\n if (Object.keys(toBeAddedFields).length > 0) {\n toBeAdded.push({\n table: key,\n fields: toBeAddedFields,\n order: value.order || Infinity,\n });\n }\n }\n\n const migrations: (\n | AlterTableColumnAlteringBuilder\n | ReturnType<AlterTableBuilder[\"addIndex\"]>\n | CreateTableBuilder<string, string>\n | CreateIndexBuilder\n )[] = [];\n\n // Adapter-enforced strategy: UUID ids at the DB level\n const useUUIDs = true;\n\n function getType(field: DBFieldAttribute, fieldName: string) {\n const type = field.type;\n const provider = dbType || \"sqlite\";\n\n type StringOnlyUnion<T> = T extends string ? T : never;\n\n const typeMap: Record<\n StringOnlyUnion<DBFieldType> | \"id\" | \"foreignKeyId\",\n Record<KyselyDatabaseDialectType, ColumnDataType | RawBuilder<unknown>>\n > = {\n uuid: {\n postgres: \"uuid\",\n mysql: \"varchar(36)\",\n mssql: \"varchar(36)\",\n sqlite: \"text\",\n },\n string: {\n sqlite: \"text\",\n postgres: \"text\",\n mysql: field.unique\n ? \"varchar(255)\"\n : field.references\n ? \"varchar(36)\"\n : field.sortable\n ? \"varchar(255)\"\n : field.index\n ? \"varchar(255)\"\n : \"text\",\n mssql:\n field.unique || field.sortable\n ? \"varchar(255)\"\n : field.references\n ? \"varchar(36)\"\n : \"varchar(8000)\",\n },\n boolean: {\n sqlite: \"integer\",\n postgres: \"boolean\",\n mysql: \"boolean\",\n mssql: \"smallint\",\n },\n number: {\n sqlite: field.bigint ? \"bigint\" : \"integer\",\n postgres: field.bigint ? \"bigint\" : \"integer\",\n mysql: field.bigint ? \"bigint\" : \"integer\",\n mssql: field.bigint ? \"bigint\" : \"integer\",\n },\n date: {\n sqlite: \"date\",\n postgres: \"timestamptz\",\n mysql: \"timestamp(3)\",\n mssql: sql`datetime2(3)`,\n },\n json: {\n sqlite: \"text\",\n postgres: \"jsonb\",\n mysql: \"json\",\n mssql: \"varchar(8000)\",\n },\n id: {\n postgres: useUUIDs ? \"uuid\" : \"text\",\n mysql: \"varchar(36)\",\n mssql: \"varchar(36)\", // ideally UNIQUEIDENTIFIER, but not in Kysely's type interface\n sqlite: \"text\",\n },\n foreignKeyId: {\n postgres: useUUIDs ? \"uuid\" : \"text\",\n mysql: \"varchar(36)\",\n mssql: \"varchar(36)\",\n sqlite: \"text\",\n },\n \"string[]\": {\n sqlite: \"text\",\n postgres: \"jsonb\",\n mysql: \"json\",\n mssql: \"varchar(8000)\",\n },\n \"number[]\": {\n sqlite: \"text\",\n postgres: \"jsonb\",\n mysql: \"json\",\n mssql: \"varchar(8000)\",\n },\n } as const;\n\n if (fieldName === \"id\" || field.references?.field === \"id\") {\n if (fieldName === \"id\") return typeMap.id[provider];\n return typeMap.foreignKeyId[provider];\n }\n\n if (Array.isArray(type)) return \"text\";\n\n if (!(type in typeMap)) {\n throw new Error(\n `Unsupported field type '${String(type)}' for field '${fieldName}'. Allowed types are: string, number, boolean, date, string[], number[]. If you need to store structured data, store it as a JSON string (type: \"string\") or split it into primitive fields. See https://better-auth.com/docs/advanced/schema#additional-fields`,\n );\n }\n\n return typeMap[type][provider];\n }\n\n const getModelName = initGetModelName({\n schema: getPaymentTables(config),\n usePlural: false,\n });\n\n const getFieldName = initGetFieldName({\n schema: getPaymentTables(config),\n usePlural: false,\n });\n\n function getReferencePath(model: string, field: string): string {\n try {\n const modelName = getModelName(model);\n const fieldName = getFieldName({ model, field });\n return `${modelName}.${fieldName}`;\n } catch {\n return `${model}.${field}`;\n }\n }\n\n if (toBeAdded.length) {\n for (const table of toBeAdded) {\n for (const [fieldName, field] of Object.entries(table.fields)) {\n const type = getType(field, fieldName);\n\n if (field.index) {\n const index = db.schema\n .alterTable(table.table)\n .addIndex(`${table.table}_${fieldName}_idx`);\n migrations.push(index);\n }\n\n const built = db.schema\n .alterTable(table.table)\n .addColumn(fieldName, type, (col) => {\n col = field.required !== false ? col.notNull() : col;\n\n if (field.references) {\n col = col\n .references(\n getReferencePath(\n field.references.model,\n field.references.field,\n ),\n )\n .onDelete(field.references.onDelete || \"cascade\");\n }\n\n if (field.unique) col = col.unique();\n\n if (\n field.type === \"date\" &&\n typeof field.defaultValue === \"function\" &&\n (dbType === \"postgres\" ||\n dbType === \"mysql\" ||\n dbType === \"mssql\")\n ) {\n col =\n dbType === \"mysql\"\n ? col.defaultTo(sql`CURRENT_TIMESTAMP(3)`)\n : col.defaultTo(sql`CURRENT_TIMESTAMP`);\n }\n\n return col;\n });\n\n migrations.push(built);\n }\n }\n }\n\n const toBeIndexed: CreateIndexBuilder[] = [];\n\n if (toBeCreated.length) {\n for (const table of toBeCreated) {\n const idType = getType({ type: \"string\" }, \"id\");\n\n let dbT = db.schema\n .createTable(table.table)\n .addColumn(\"id\", idType, (col) => {\n // UUID IDs at DB level\n if (dbType === \"postgres\") {\n return col\n .primaryKey()\n .defaultTo(sql`pg_catalog.gen_random_uuid()`)\n .notNull();\n }\n // For non-postgres, don't assume a DB-level UUID generator exists.\n // UUIDs can still be generated in application code.\n return col.primaryKey().notNull();\n });\n\n for (const [fieldName, field] of Object.entries(table.fields)) {\n const type = getType(field, fieldName);\n\n dbT = dbT.addColumn(fieldName, type, (col) => {\n col = field.required !== false ? col.notNull() : col;\n\n if (field.references) {\n col = col\n .references(\n getReferencePath(\n field.references.model,\n field.references.field,\n ),\n )\n .onDelete(field.references.onDelete || \"cascade\");\n }\n\n if (field.unique) col = col.unique();\n\n if (\n field.type === \"date\" &&\n typeof field.defaultValue === \"function\" &&\n (dbType === \"postgres\" || dbType === \"mysql\" || dbType === \"mssql\")\n ) {\n col =\n dbType === \"mysql\"\n ? col.defaultTo(sql`CURRENT_TIMESTAMP(3)`)\n : col.defaultTo(sql`CURRENT_TIMESTAMP`);\n }\n\n return col;\n });\n\n if (field.index) {\n const idx = db.schema\n .createIndex(\n `${table.table}_${fieldName}_${field.unique ? \"uidx\" : \"idx\"}`,\n )\n .on(table.table)\n .columns([fieldName]);\n\n toBeIndexed.push(field.unique ? idx.unique() : idx);\n }\n }\n\n migrations.push(dbT);\n }\n }\n\n for (const index of toBeIndexed) {\n migrations.push(index);\n }\n\n async function runMigrations() {\n for (const migration of migrations) {\n await migration.execute();\n }\n }\n\n async function compileMigrations() {\n const compiled = migrations.map((m) => m.compile().sql);\n return compiled.join(\";\\n\\n\") + \";\";\n }\n\n return { toBeCreated, toBeAdded, runMigrations, compileMigrations };\n}\n"],"mappings":";;;;;;AAuEA,MAAM,MAAM;CACV,UAjDkB;EAClB,QAAQ;GAAC;GAAqB;GAAW;GAAQ;GAAO;EACxD,QAAQ;GACN;GACA;GACA;GACA;GACA;GACA;GACA;GACD;EACD,SAAS,CAAC,QAAQ,UAAU;EAC5B,MAAM;GAAC;GAAe;GAAa;GAAO;EAC1C,MAAM,CAAC,QAAQ,QAAQ;EACxB;CAoCC,OAlCe;EACf,QAAQ;GAAC;GAAW;GAAQ;GAAO;EACnC,QAAQ;GACN;GACA;GACA;GACA;GACA;GACA;GACA;GACD;EACD,SAAS,CAAC,WAAW,UAAU;EAC/B,MAAM;GAAC;GAAa;GAAY;GAAO;EACvC,MAAM,CAAC,OAAO;EACf;CAqBC,QAnBgB;EAChB,QAAQ,CAAC,OAAO;EAChB,QAAQ,CAAC,WAAW,OAAO;EAC3B,SAAS,CAAC,WAAW,UAAU;EAC/B,MAAM,CAAC,QAAQ,UAAU;EACzB,MAAM,CAAC,OAAO;EACf;CAcC,OAZe;EACf,QAAQ;GAAC;GAAW;GAAY;GAAmB;EACnD,QAAQ;GAAC;GAAO;GAAU;GAAY;GAAW;GAAS;GAAS;EACnE,SAAS,CAAC,OAAO,WAAW;EAC5B,MAAM;GAAC;GAAa;GAAQ;GAAW;EACvC,MAAM,CAAC,WAAW,WAAW;EAC9B;CAOA;AAED,SAAgB,UACd,gBACA,WACA,QACA;CACA,SAAS,UAAU,MAAc;AAC/B,SAAO,KAAK,aAAa,CAAC,MAAM,IAAI,CAAC,GAAI,MAAM;;AAGjD,KAAI,cAAc,cAAc,cAAc,WAC5C,QAAO,eAAe,aAAa,CAAC,SAAS,OAAO;CAGtD,MAAMA,QAAgC,IAAI;AAK1C,SAJiB,MAAM,QAAQ,UAAU,GACrC,MAAM,QAAQ,KAAK,MAAM,EAAE,aAAa,CAAC,GACzC,MAAM,WAAY,KAAK,MAAc,EAAE,aAAa,CAAC,GAExC,SAAS,UAAU,eAAe,CAAC;;;;;;AAOtD,eAAe,kBAAkB,IAAsC;AACrE,KAAI;EACF,MAAM,SAAS,MAAM,GAA4B,mBAAmB,QAClE,GACD;AAED,MAAI,OAAO,KAAK,IAAI,YAOlB,QANgB,OAAO,KAAK,GAAG,YAC5B,MAAM,IAAI,CACV,KAAK,MAAM,EAAE,MAAM,CAAC,CACpB,KAAK,MAAM,EAAE,QAAQ,gBAAgB,GAAG,CAAC,CACzC,QAAQ,MAAM,CAAC,EAAE,WAAW,IAAI,CAAC,CAErB,MAAM;SAEjB;AAIR,QAAO;;AAGT,eAAsB,cAAc,QAAwB;CAC1D,MAAM,sBAAsB,UAAU,OAAO;CAE7C,IAAI,EAAE,QAAQ,IAAI,cAAc,WAAW,MAAM,oBAAoB,OAAO;AAE5E,KAAI,CAAC,QAAQ;AACX,UAAQ,KACN,wHACD;AACD,WAAS;;AAGX,KAAI,CAAC,IAAI;AACP,UAAQ,MACN,+IACD;AACD,UAAQ,KAAK,EAAE;;CAIjB,IAAI,gBAAgB;AACpB,KAAI,WAAW,YAAY;AACzB,kBAAgB,MAAM,kBAAkB,GAAG;AAC3C,UAAQ,MACN,uCAAuC,cAAc,sBACtD;AAGD,MAAI;AAOF,OAAI,EANgB,MAAM,GAA4B;;;8BAG9B,cAAc;QACpC,QAAQ,GAAG,EAEI,KAAK,GACpB,SAAQ,KACN,WAAW,cAAc,gJAC1B;WAEI,OAAO;AACd,WAAQ,MACN,sCACE,iBAAiB,QAAQ,MAAM,UAAU,OAAO,MAAM,GAEzD;;;CAIL,MAAM,mBAAmB,MAAM,GAAG,cAAc,WAAW;CAG3D,IAAI,gBAAgB;AACpB,KAAI,WAAW,WACb,KAAI;EACF,MAAM,iBAAiB,MAAM,GAE3B;;;+BAGuB,cAAc;;QAErC,QAAQ,GAAG;EAEb,MAAM,qBAAqB,IAAI,IAC7B,eAAe,KAAK,KAAK,QAAQ,IAAI,WAAW,CACjD;AAED,kBAAgB,iBAAiB,QAC9B,UACC,MAAM,WAAW,iBAAiB,mBAAmB,IAAI,MAAM,KAAK,CACvE;AAED,UAAQ,MACN,SAAS,cAAc,OAAO,uBAAuB,cAAc,KACjE,cAAc,KAAK,MAAM,EAAE,KAAK,CAAC,KAAK,KAAK,IAAI,WAElD;UACM,OAAO;AACd,UAAQ,KACN,0EACE,iBAAiB,QAAQ,MAAM,UAAU,OAAO,MAAM,GAEzD;;CAIL,MAAMC,cAIA,EAAE;CAER,MAAMC,YAIA,EAAE;AAER,MAAK,MAAM,CAAC,KAAK,UAAU,OAAO,QAAQ,oBAAoB,EAAE;EAC9D,MAAM,QAAQ,cAAc,MAAM,MAAM,EAAE,SAAS,IAAI;AAEvD,MAAI,CAAC,OAAO;GACV,MAAM,SAAS,YAAY,WAAW,MAAM,EAAE,UAAU,IAAI;GAC5D,MAAM,YAAY;IAChB,OAAO;IACP,QAAQ,MAAM;IACd,OAAO,MAAM,SAAS;IACvB;GAED,MAAM,cAAc,YAAY,WAC7B,OAAO,EAAE,SAAS,YAAY,UAAU,MAC1C;AAED,OAAI,gBAAgB,GAClB,KAAI,WAAW,GACb,aAAY,KAAK,UAAU;OAE3B,aAAY,QAAS,SAAS;IAC5B,GAAG,YAAY,QAAS;IACxB,GAAG,MAAM;IACV;OAGH,aAAY,OAAO,aAAa,GAAG,UAAU;AAG/C;;EAGF,MAAMC,kBAAoD,EAAE;AAE5D,OAAK,MAAM,CAAC,WAAW,UAAU,OAAO,QAAQ,MAAM,OAAO,EAAE;GAC7D,MAAM,SAAS,MAAM,QAAQ,MAAM,MAAM,EAAE,SAAS,UAAU;AAE9D,OAAI,CAAC,QAAQ;AACX,oBAAgB,aAAa;AAC7B;;AAGF,OAAI,UAAU,OAAO,UAAU,MAAM,MAAM,OAAO,CAChD;AAGF,WAAQ,KACN,SAAS,UAAU,YAAY,IAAI,kDAAkD,MAAM,KAAK,WAAW,OAAO,SAAS,GAC5H;;AAGH,MAAI,OAAO,KAAK,gBAAgB,CAAC,SAAS,EACxC,WAAU,KAAK;GACb,OAAO;GACP,QAAQ;GACR,OAAO,MAAM,SAAS;GACvB,CAAC;;CAIN,MAAMC,aAKA,EAAE;CAKR,SAAS,QAAQ,OAAyB,WAAmB;EAC3D,MAAM,OAAO,MAAM;EACnB,MAAM,WAAW,UAAU;EAI3B,MAAMC,UAGF;GACF,MAAM;IACJ,UAAU;IACV,OAAO;IACP,OAAO;IACP,QAAQ;IACT;GACD,QAAQ;IACN,QAAQ;IACR,UAAU;IACV,OAAO,MAAM,SACT,iBACA,MAAM,aACJ,gBACA,MAAM,WACJ,iBACA,MAAM,QACJ,iBACA;IACV,OACE,MAAM,UAAU,MAAM,WAClB,iBACA,MAAM,aACJ,gBACA;IACT;GACD,SAAS;IACP,QAAQ;IACR,UAAU;IACV,OAAO;IACP,OAAO;IACR;GACD,QAAQ;IACN,QAAQ,MAAM,SAAS,WAAW;IAClC,UAAU,MAAM,SAAS,WAAW;IACpC,OAAO,MAAM,SAAS,WAAW;IACjC,OAAO,MAAM,SAAS,WAAW;IAClC;GACD,MAAM;IACJ,QAAQ;IACR,UAAU;IACV,OAAO;IACP,OAAO,GAAG;IACX;GACD,MAAM;IACJ,QAAQ;IACR,UAAU;IACV,OAAO;IACP,OAAO;IACR;GACD,IAAI;IACF,UAAqB;IACrB,OAAO;IACP,OAAO;IACP,QAAQ;IACT;GACD,cAAc;IACZ,UAAqB;IACrB,OAAO;IACP,OAAO;IACP,QAAQ;IACT;GACD,YAAY;IACV,QAAQ;IACR,UAAU;IACV,OAAO;IACP,OAAO;IACR;GACD,YAAY;IACV,QAAQ;IACR,UAAU;IACV,OAAO;IACP,OAAO;IACR;GACF;AAED,MAAI,cAAc,QAAQ,MAAM,YAAY,UAAU,MAAM;AAC1D,OAAI,cAAc,KAAM,QAAO,QAAQ,GAAG;AAC1C,UAAO,QAAQ,aAAa;;AAG9B,MAAI,MAAM,QAAQ,KAAK,CAAE,QAAO;AAEhC,MAAI,EAAE,QAAQ,SACZ,OAAM,IAAI,MACR,2BAA2B,OAAO,KAAK,CAAC,eAAe,UAAU,iQAClE;AAGH,SAAO,QAAQ,MAAM;;CAGvB,MAAM,eAAe,iBAAiB;EACpC,QAAQ,iBAAiB,OAAO;EAChC,WAAW;EACZ,CAAC;CAEF,MAAM,eAAe,iBAAiB;EACpC,QAAQ,iBAAiB,OAAO;EAChC,WAAW;EACZ,CAAC;CAEF,SAAS,iBAAiB,OAAe,OAAuB;AAC9D,MAAI;AAGF,UAAO,GAFW,aAAa,MAAM,CAEjB,GADF,aAAa;IAAE;IAAO;IAAO,CAAC;UAE1C;AACN,UAAO,GAAG,MAAM,GAAG;;;AAIvB,KAAI,UAAU,OACZ,MAAK,MAAM,SAAS,UAClB,MAAK,MAAM,CAAC,WAAW,UAAU,OAAO,QAAQ,MAAM,OAAO,EAAE;EAC7D,MAAM,OAAO,QAAQ,OAAO,UAAU;AAEtC,MAAI,MAAM,OAAO;GACf,MAAM,QAAQ,GAAG,OACd,WAAW,MAAM,MAAM,CACvB,SAAS,GAAG,MAAM,MAAM,GAAG,UAAU,MAAM;AAC9C,cAAW,KAAK,MAAM;;EAGxB,MAAM,QAAQ,GAAG,OACd,WAAW,MAAM,MAAM,CACvB,UAAU,WAAW,OAAO,QAAQ;AACnC,SAAM,MAAM,aAAa,QAAQ,IAAI,SAAS,GAAG;AAEjD,OAAI,MAAM,WACR,OAAM,IACH,WACC,iBACE,MAAM,WAAW,OACjB,MAAM,WAAW,MAClB,CACF,CACA,SAAS,MAAM,WAAW,YAAY,UAAU;AAGrD,OAAI,MAAM,OAAQ,OAAM,IAAI,QAAQ;AAEpC,OACE,MAAM,SAAS,UACf,OAAO,MAAM,iBAAiB,eAC7B,WAAW,cACV,WAAW,WACX,WAAW,SAEb,OACE,WAAW,UACP,IAAI,UAAU,GAAG,uBAAuB,GACxC,IAAI,UAAU,GAAG,oBAAoB;AAG7C,UAAO;IACP;AAEJ,aAAW,KAAK,MAAM;;CAK5B,MAAMC,cAAoC,EAAE;AAE5C,KAAI,YAAY,OACd,MAAK,MAAM,SAAS,aAAa;EAC/B,MAAM,SAAS,QAAQ,EAAE,MAAM,UAAU,EAAE,KAAK;EAEhD,IAAI,MAAM,GAAG,OACV,YAAY,MAAM,MAAM,CACxB,UAAU,MAAM,SAAS,QAAQ;AAEhC,OAAI,WAAW,WACb,QAAO,IACJ,YAAY,CACZ,UAAU,GAAG,+BAA+B,CAC5C,SAAS;AAId,UAAO,IAAI,YAAY,CAAC,SAAS;IACjC;AAEJ,OAAK,MAAM,CAAC,WAAW,UAAU,OAAO,QAAQ,MAAM,OAAO,EAAE;GAC7D,MAAM,OAAO,QAAQ,OAAO,UAAU;AAEtC,SAAM,IAAI,UAAU,WAAW,OAAO,QAAQ;AAC5C,UAAM,MAAM,aAAa,QAAQ,IAAI,SAAS,GAAG;AAEjD,QAAI,MAAM,WACR,OAAM,IACH,WACC,iBACE,MAAM,WAAW,OACjB,MAAM,WAAW,MAClB,CACF,CACA,SAAS,MAAM,WAAW,YAAY,UAAU;AAGrD,QAAI,MAAM,OAAQ,OAAM,IAAI,QAAQ;AAEpC,QACE,MAAM,SAAS,UACf,OAAO,MAAM,iBAAiB,eAC7B,WAAW,cAAc,WAAW,WAAW,WAAW,SAE3D,OACE,WAAW,UACP,IAAI,UAAU,GAAG,uBAAuB,GACxC,IAAI,UAAU,GAAG,oBAAoB;AAG7C,WAAO;KACP;AAEF,OAAI,MAAM,OAAO;IACf,MAAM,MAAM,GAAG,OACZ,YACC,GAAG,MAAM,MAAM,GAAG,UAAU,GAAG,MAAM,SAAS,SAAS,QACxD,CACA,GAAG,MAAM,MAAM,CACf,QAAQ,CAAC,UAAU,CAAC;AAEvB,gBAAY,KAAK,MAAM,SAAS,IAAI,QAAQ,GAAG,IAAI;;;AAIvD,aAAW,KAAK,IAAI;;AAIxB,MAAK,MAAM,SAAS,YAClB,YAAW,KAAK,MAAM;CAGxB,eAAe,gBAAgB;AAC7B,OAAK,MAAM,aAAa,WACtB,OAAM,UAAU,SAAS;;CAI7B,eAAe,oBAAoB;AAEjC,SADiB,WAAW,KAAK,MAAM,EAAE,SAAS,CAAC,IAAI,CACvC,KAAK,QAAQ,GAAG;;AAGlC,QAAO;EAAE;EAAa;EAAW;EAAe;EAAmB"}
|