@conorroberts/utils 0.0.16 → 0.0.19
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/env.cjs.map +1 -1
- package/dist/env.d.cts +2 -2
- package/dist/env.d.ts +2 -2
- package/dist/env.js.map +1 -1
- package/dist/images.cjs +8 -5
- package/dist/images.cjs.map +1 -1
- package/dist/images.d.cts +2 -1
- package/dist/images.d.ts +2 -1
- package/dist/images.js +8 -5
- package/dist/images.js.map +1 -1
- package/package.json +1 -1
- package/dist/db.cjs +0 -45
- package/dist/db.cjs.map +0 -1
- package/dist/db.d.cts +0 -15
- package/dist/db.d.ts +0 -15
- package/dist/db.js +0 -19
- package/dist/db.js.map +0 -1
- package/dist/migrate.cjs +0 -55
- package/dist/migrate.cjs.map +0 -1
- package/dist/migrate.d.cts +0 -7
- package/dist/migrate.d.ts +0 -7
- package/dist/migrate.js +0 -30
- package/dist/migrate.js.map +0 -1
- package/dist/schema.cjs +0 -44
- package/dist/schema.cjs.map +0 -1
- package/dist/schema.d.cts +0 -16
- package/dist/schema.d.ts +0 -16
- package/dist/schema.js +0 -19
- package/dist/schema.js.map +0 -1
package/dist/env.cjs.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../src/env.ts"],"sourcesContent":["import { pipe } from \"remeda\";\r\nimport * as v from \"valibot\";\r\n\r\nconst PUBLIC_ENV_PREFIX = \"PUBLIC_\" as const;\r\n\r\n/**\r\n * Validates your environment variables against the given Valibot schema;\r\n * @param args\r\n * @returns An object containing client environment variables and another containing server environment variables\r\n */\r\nexport const createEnv = <\r\n Schema extends Record<string, v.GenericSchema>,\r\n Env = {\r\n [K in keyof Schema]: v.InferOutput<Schema[K]>;\r\n }\r\n>(args: {\r\n schema: (valibot: typeof v) => Schema;\r\n env:
|
|
1
|
+
{"version":3,"sources":["../src/env.ts"],"sourcesContent":["import { pipe } from \"remeda\";\r\nimport * as v from \"valibot\";\r\n\r\nconst PUBLIC_ENV_PREFIX = \"PUBLIC_\" as const;\r\n\r\n/**\r\n * Validates your environment variables against the given Valibot schema;\r\n * @param args\r\n * @returns An object containing client environment variables and another containing server environment variables\r\n */\r\nexport const createEnv = <\r\n EnvInput extends Record<string, unknown>,\r\n Schema extends Record<string, v.GenericSchema>,\r\n Env = {\r\n [K in keyof Schema]: v.InferOutput<Schema[K]>;\r\n }\r\n>(args: {\r\n schema: (valibot: typeof v) => Schema;\r\n env: EnvInput;\r\n}) => {\r\n const pairs = Object.entries(args.schema(v));\r\n const serverEnv = new Map();\r\n\r\n for (const [key, value] of pairs) {\r\n const result = v.safeParse(value, args.env[key]);\r\n\r\n if (!result.success) {\r\n console.error(`Environment variable \"${key}\" is invalid`);\r\n process.exit(1);\r\n }\r\n\r\n serverEnv.set(key, result.output);\r\n }\r\n\r\n type ClientEnvKeys = Exclude<\r\n {\r\n [K in keyof Env]: K extends `${typeof PUBLIC_ENV_PREFIX}${string}`\r\n ? K\r\n : never;\r\n }[keyof Env],\r\n undefined\r\n >;\r\n\r\n type ClientEnv = {\r\n [B in ClientEnvKeys]: Env[B];\r\n };\r\n\r\n const clientEnv = pipe(\r\n serverEnv,\r\n (obj) => Array.from(obj.entries()),\r\n (pairs) => pairs.filter(([k]) => k.startsWith(PUBLIC_ENV_PREFIX)),\r\n (pairs) => Object.fromEntries(pairs)\r\n ) as ClientEnv;\r\n\r\n return {\r\n client: clientEnv,\r\n server: Object.fromEntries(serverEnv.entries()) as Env,\r\n };\r\n};\r\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,oBAAqB;AACrB,QAAmB;AAEnB,IAAM,oBAAoB;AAOnB,IAAM,YAAY,CAMvB,SAGI;AACJ,QAAM,QAAQ,OAAO,QAAQ,KAAK,OAAO,CAAC,CAAC;AAC3C,QAAM,YAAY,oBAAI,IAAI;AAE1B,aAAW,CAAC,KAAK,KAAK,KAAK,OAAO;AAChC,UAAM,SAAW,YAAU,OAAO,KAAK,IAAI,GAAG,CAAC;AAE/C,QAAI,CAAC,OAAO,SAAS;AACnB,cAAQ,MAAM,yBAAyB,GAAG,cAAc;AACxD,cAAQ,KAAK,CAAC;AAAA,IAChB;AAEA,cAAU,IAAI,KAAK,OAAO,MAAM;AAAA,EAClC;AAeA,QAAM,gBAAY;AAAA,IAChB;AAAA,IACA,CAAC,QAAQ,MAAM,KAAK,IAAI,QAAQ,CAAC;AAAA,IACjC,CAACA,WAAUA,OAAM,OAAO,CAAC,CAAC,CAAC,MAAM,EAAE,WAAW,iBAAiB,CAAC;AAAA,IAChE,CAACA,WAAU,OAAO,YAAYA,MAAK;AAAA,EACrC;AAEA,SAAO;AAAA,IACL,QAAQ;AAAA,IACR,QAAQ,OAAO,YAAY,UAAU,QAAQ,CAAC;AAAA,EAChD;AACF;","names":["pairs"]}
|
package/dist/env.d.cts
CHANGED
|
@@ -5,9 +5,9 @@ import * as v from 'valibot';
|
|
|
5
5
|
* @param args
|
|
6
6
|
* @returns An object containing client environment variables and another containing server environment variables
|
|
7
7
|
*/
|
|
8
|
-
declare const createEnv: <Schema extends Record<string, v.GenericSchema>, Env = { [K in keyof Schema]: v.InferOutput<Schema[K]>; }>(args: {
|
|
8
|
+
declare const createEnv: <EnvInput extends Record<string, unknown>, Schema extends Record<string, v.GenericSchema>, Env = { [K in keyof Schema]: v.InferOutput<Schema[K]>; }>(args: {
|
|
9
9
|
schema: (valibot: typeof v) => Schema;
|
|
10
|
-
env:
|
|
10
|
+
env: EnvInput;
|
|
11
11
|
}) => {
|
|
12
12
|
client: { [B in Exclude<{ [K_1 in keyof Env]: K_1 extends `PUBLIC_${string}` ? K_1 : never; }[keyof Env], undefined>]: Env[B]; };
|
|
13
13
|
server: Env;
|
package/dist/env.d.ts
CHANGED
|
@@ -5,9 +5,9 @@ import * as v from 'valibot';
|
|
|
5
5
|
* @param args
|
|
6
6
|
* @returns An object containing client environment variables and another containing server environment variables
|
|
7
7
|
*/
|
|
8
|
-
declare const createEnv: <Schema extends Record<string, v.GenericSchema>, Env = { [K in keyof Schema]: v.InferOutput<Schema[K]>; }>(args: {
|
|
8
|
+
declare const createEnv: <EnvInput extends Record<string, unknown>, Schema extends Record<string, v.GenericSchema>, Env = { [K in keyof Schema]: v.InferOutput<Schema[K]>; }>(args: {
|
|
9
9
|
schema: (valibot: typeof v) => Schema;
|
|
10
|
-
env:
|
|
10
|
+
env: EnvInput;
|
|
11
11
|
}) => {
|
|
12
12
|
client: { [B in Exclude<{ [K_1 in keyof Env]: K_1 extends `PUBLIC_${string}` ? K_1 : never; }[keyof Env], undefined>]: Env[B]; };
|
|
13
13
|
server: Env;
|
package/dist/env.js.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../src/env.ts"],"sourcesContent":["import { pipe } from \"remeda\";\r\nimport * as v from \"valibot\";\r\n\r\nconst PUBLIC_ENV_PREFIX = \"PUBLIC_\" as const;\r\n\r\n/**\r\n * Validates your environment variables against the given Valibot schema;\r\n * @param args\r\n * @returns An object containing client environment variables and another containing server environment variables\r\n */\r\nexport const createEnv = <\r\n Schema extends Record<string, v.GenericSchema>,\r\n Env = {\r\n [K in keyof Schema]: v.InferOutput<Schema[K]>;\r\n }\r\n>(args: {\r\n schema: (valibot: typeof v) => Schema;\r\n env:
|
|
1
|
+
{"version":3,"sources":["../src/env.ts"],"sourcesContent":["import { pipe } from \"remeda\";\r\nimport * as v from \"valibot\";\r\n\r\nconst PUBLIC_ENV_PREFIX = \"PUBLIC_\" as const;\r\n\r\n/**\r\n * Validates your environment variables against the given Valibot schema;\r\n * @param args\r\n * @returns An object containing client environment variables and another containing server environment variables\r\n */\r\nexport const createEnv = <\r\n EnvInput extends Record<string, unknown>,\r\n Schema extends Record<string, v.GenericSchema>,\r\n Env = {\r\n [K in keyof Schema]: v.InferOutput<Schema[K]>;\r\n }\r\n>(args: {\r\n schema: (valibot: typeof v) => Schema;\r\n env: EnvInput;\r\n}) => {\r\n const pairs = Object.entries(args.schema(v));\r\n const serverEnv = new Map();\r\n\r\n for (const [key, value] of pairs) {\r\n const result = v.safeParse(value, args.env[key]);\r\n\r\n if (!result.success) {\r\n console.error(`Environment variable \"${key}\" is invalid`);\r\n process.exit(1);\r\n }\r\n\r\n serverEnv.set(key, result.output);\r\n }\r\n\r\n type ClientEnvKeys = Exclude<\r\n {\r\n [K in keyof Env]: K extends `${typeof PUBLIC_ENV_PREFIX}${string}`\r\n ? K\r\n : never;\r\n }[keyof Env],\r\n undefined\r\n >;\r\n\r\n type ClientEnv = {\r\n [B in ClientEnvKeys]: Env[B];\r\n };\r\n\r\n const clientEnv = pipe(\r\n serverEnv,\r\n (obj) => Array.from(obj.entries()),\r\n (pairs) => pairs.filter(([k]) => k.startsWith(PUBLIC_ENV_PREFIX)),\r\n (pairs) => Object.fromEntries(pairs)\r\n ) as ClientEnv;\r\n\r\n return {\r\n client: clientEnv,\r\n server: Object.fromEntries(serverEnv.entries()) as Env,\r\n };\r\n};\r\n"],"mappings":";AAAA,SAAS,YAAY;AACrB,YAAY,OAAO;AAEnB,IAAM,oBAAoB;AAOnB,IAAM,YAAY,CAMvB,SAGI;AACJ,QAAM,QAAQ,OAAO,QAAQ,KAAK,OAAO,CAAC,CAAC;AAC3C,QAAM,YAAY,oBAAI,IAAI;AAE1B,aAAW,CAAC,KAAK,KAAK,KAAK,OAAO;AAChC,UAAM,SAAW,YAAU,OAAO,KAAK,IAAI,GAAG,CAAC;AAE/C,QAAI,CAAC,OAAO,SAAS;AACnB,cAAQ,MAAM,yBAAyB,GAAG,cAAc;AACxD,cAAQ,KAAK,CAAC;AAAA,IAChB;AAEA,cAAU,IAAI,KAAK,OAAO,MAAM;AAAA,EAClC;AAeA,QAAM,YAAY;AAAA,IAChB;AAAA,IACA,CAAC,QAAQ,MAAM,KAAK,IAAI,QAAQ,CAAC;AAAA,IACjC,CAACA,WAAUA,OAAM,OAAO,CAAC,CAAC,CAAC,MAAM,EAAE,WAAW,iBAAiB,CAAC;AAAA,IAChE,CAACA,WAAU,OAAO,YAAYA,MAAK;AAAA,EACrC;AAEA,SAAO;AAAA,IACL,QAAQ;AAAA,IACR,QAAQ,OAAO,YAAY,UAAU,QAAQ,CAAC;AAAA,EAChD;AACF;","names":["pairs"]}
|
package/dist/images.cjs
CHANGED
|
@@ -38,10 +38,10 @@ var import_dayjs = __toESM(require("dayjs"), 1);
|
|
|
38
38
|
var import_ofetch = require("ofetch");
|
|
39
39
|
var ImageUtils = class {
|
|
40
40
|
blacklist = ["img.clerk.com"];
|
|
41
|
-
|
|
41
|
+
_accountId;
|
|
42
42
|
_imageIds;
|
|
43
43
|
constructor(args) {
|
|
44
|
-
this.
|
|
44
|
+
this._accountId = args.accountId;
|
|
45
45
|
this._imageIds = args.imageIds;
|
|
46
46
|
if (args.blacklist) {
|
|
47
47
|
this.blacklist.push(...args.blacklist);
|
|
@@ -53,8 +53,11 @@ var ImageUtils = class {
|
|
|
53
53
|
}
|
|
54
54
|
return this._imageIds;
|
|
55
55
|
}
|
|
56
|
+
get accountId() {
|
|
57
|
+
return this._accountId;
|
|
58
|
+
}
|
|
56
59
|
url(id) {
|
|
57
|
-
return `https://imagedelivery.net/${this.
|
|
60
|
+
return `https://imagedelivery.net/${this.accountId}/${id}/public`;
|
|
58
61
|
}
|
|
59
62
|
isBlacklisted(url) {
|
|
60
63
|
return this.blacklist.some((u) => url.includes(u));
|
|
@@ -106,7 +109,7 @@ var ImageUtils = class {
|
|
|
106
109
|
form.append("id", id);
|
|
107
110
|
form.append("expiry", (0, import_dayjs.default)().add(5, "minute").toISOString());
|
|
108
111
|
const img = await (0, import_ofetch.ofetch)(
|
|
109
|
-
`https://api.cloudflare.com/client/v4/accounts/${this.
|
|
112
|
+
`https://api.cloudflare.com/client/v4/accounts/${this.accountId}/images/v2/direct_upload`,
|
|
110
113
|
{ method: "POST", headers, body: form }
|
|
111
114
|
);
|
|
112
115
|
if (!img.success) {
|
|
@@ -143,7 +146,7 @@ var ImageUtils = class {
|
|
|
143
146
|
const headers = new Headers();
|
|
144
147
|
headers.set("Authorization", `Bearer ${args.apiKey}`);
|
|
145
148
|
await (0, import_ofetch.ofetch)(
|
|
146
|
-
`https://api.cloudflare.com/client/v4/accounts/${this.
|
|
149
|
+
`https://api.cloudflare.com/client/v4/accounts/${this.accountId}/images/v1/${id}`,
|
|
147
150
|
{
|
|
148
151
|
method: "POST",
|
|
149
152
|
headers
|
package/dist/images.cjs.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../src/images.ts"],"sourcesContent":["import { createId } from \"@paralleldrive/cuid2\";\r\nimport dayjs from \"dayjs\";\r\nimport { ofetch } from \"ofetch\";\r\n\r\nexport interface OptimizedImageOptions {\r\n anim?: boolean;\r\n background?: string;\r\n blur?: number;\r\n brightness?: number;\r\n compression?: \"fast\"; // faster compression = larger file size\r\n contrast?: number;\r\n dpr?: number;\r\n fit?: \"scale-down\" | \"contain\" | \"cover\" | \"crop\" | \"pad\";\r\n format?: \"webp\" | \"avif\" | \"json\";\r\n gamma?: number;\r\n width?: number;\r\n height?: number;\r\n metadata?: \"keep\" | \"copyright\" | \"none\";\r\n quality?: number;\r\n rotate?: number;\r\n sharpen?: number;\r\n}\r\n\r\nexport interface CreateImageUrlResponse {\r\n result: {\r\n id: string;\r\n uploadURL: string;\r\n };\r\n success: boolean;\r\n errors: unknown[];\r\n messages: unknown[];\r\n}\r\n\r\ninterface UploadImageResponse {\r\n result: {\r\n id: string;\r\n filename: string;\r\n uploaded: string;\r\n requireSignedURLs: boolean;\r\n variants: string[];\r\n };\r\n success: boolean;\r\n errors: unknown[];\r\n messages: unknown[];\r\n}\r\n\r\nexport class ImageUtils<ImageIds extends Record<string, any>> {\r\n private blacklist: string[] = [\"img.clerk.com\"];\r\n private
|
|
1
|
+
{"version":3,"sources":["../src/images.ts"],"sourcesContent":["import { createId } from \"@paralleldrive/cuid2\";\r\nimport dayjs from \"dayjs\";\r\nimport { ofetch } from \"ofetch\";\r\n\r\nexport interface OptimizedImageOptions {\r\n anim?: boolean;\r\n background?: string;\r\n blur?: number;\r\n brightness?: number;\r\n compression?: \"fast\"; // faster compression = larger file size\r\n contrast?: number;\r\n dpr?: number;\r\n fit?: \"scale-down\" | \"contain\" | \"cover\" | \"crop\" | \"pad\";\r\n format?: \"webp\" | \"avif\" | \"json\";\r\n gamma?: number;\r\n width?: number;\r\n height?: number;\r\n metadata?: \"keep\" | \"copyright\" | \"none\";\r\n quality?: number;\r\n rotate?: number;\r\n sharpen?: number;\r\n}\r\n\r\nexport interface CreateImageUrlResponse {\r\n result: {\r\n id: string;\r\n uploadURL: string;\r\n };\r\n success: boolean;\r\n errors: unknown[];\r\n messages: unknown[];\r\n}\r\n\r\ninterface UploadImageResponse {\r\n result: {\r\n id: string;\r\n filename: string;\r\n uploaded: string;\r\n requireSignedURLs: boolean;\r\n variants: string[];\r\n };\r\n success: boolean;\r\n errors: unknown[];\r\n messages: unknown[];\r\n}\r\n\r\nexport class ImageUtils<ImageIds extends Record<string, any>> {\r\n private blacklist: string[] = [\"img.clerk.com\"];\r\n private _accountId: string;\r\n private _imageIds: ImageIds | undefined;\r\n\r\n constructor(args: {\r\n accountId: string;\r\n blacklist?: string[];\r\n imageIds?: ImageIds;\r\n }) {\r\n this._accountId = args.accountId;\r\n\r\n this._imageIds = args.imageIds;\r\n\r\n if (args.blacklist) {\r\n this.blacklist.push(...args.blacklist);\r\n }\r\n }\r\n\r\n get imageIds() {\r\n if (!this._imageIds) {\r\n throw new Error(\"imageIds was not supplied in constructor\");\r\n }\r\n\r\n return this._imageIds;\r\n }\r\n\r\n get accountId() {\r\n return this._accountId;\r\n }\r\n\r\n public url(id: string) {\r\n return `https://imagedelivery.net/${this.accountId}/${id}/public`;\r\n }\r\n\r\n private isBlacklisted(url: string) {\r\n return this.blacklist.some((u) => url.includes(u));\r\n }\r\n\r\n private isProtected(id: string) {\r\n if (!this._imageIds) {\r\n return false;\r\n }\r\n\r\n return Object.values(this._imageIds).some((e) => e === id);\r\n }\r\n\r\n /**\r\n * Will only operate on images that have been uploaded via cloudflare images\r\n */\r\n public optimizeUrl(url: string, options: OptimizedImageOptions) {\r\n if (this.isBlacklisted(url)) {\r\n return url;\r\n }\r\n\r\n // Final format should look similar to: https://imagedelivery.net/<ACCOUNT_HASH>/<IMAGE_ID>/w=400,sharpen=3\r\n return url.replace(\"public\", this.createImageOptionsString(options));\r\n }\r\n\r\n public optimizeId(id: string, options: OptimizedImageOptions) {\r\n return this.optimizeUrl(this.url(id), options);\r\n }\r\n\r\n public createOptionsSearchParams(options: OptimizedImageOptions) {\r\n const params = new URLSearchParams();\r\n\r\n const pairs = Object.entries(options);\r\n\r\n for (const [key, val] of pairs) {\r\n if (val === undefined) {\r\n continue;\r\n }\r\n\r\n params.set(key, val.toString());\r\n }\r\n\r\n return params;\r\n }\r\n\r\n public createImageOptionsString(options: OptimizedImageOptions) {\r\n const params = this.createOptionsSearchParams(options);\r\n\r\n return Array.from(params.entries())\r\n .map(([key, val]) => `${key}=${val}`)\r\n .join(\",\");\r\n }\r\n\r\n public async createUploadUrls(count: number, args: { apiKey: string }) {\r\n if (count === 0) {\r\n return [];\r\n }\r\n\r\n const headers = new Headers();\r\n headers.set(\"Authorization\", `Bearer ${args.apiKey}`);\r\n\r\n const urls = await Promise.all(\r\n Array.from({ length: count }).map(async () => {\r\n try {\r\n const form = new FormData();\r\n const id = createId();\r\n form.append(\"id\", id);\r\n form.append(\"expiry\", dayjs().add(5, \"minute\").toISOString());\r\n\r\n const img = await ofetch<CreateImageUrlResponse>(\r\n `https://api.cloudflare.com/client/v4/accounts/${this.accountId}/images/v2/direct_upload`,\r\n { method: \"POST\", headers, body: form }\r\n );\r\n\r\n if (!img.success) {\r\n throw new Error(\"Error uploading image\");\r\n }\r\n\r\n return { url: img.result.uploadURL, id };\r\n } catch (e) {\r\n console.error(\"Error uploading image\");\r\n throw e;\r\n }\r\n })\r\n );\r\n\r\n return urls;\r\n }\r\n\r\n public async upload(url: string, body: FormData) {\r\n const fetchResponse = await ofetch<UploadImageResponse>(url, {\r\n method: \"POST\",\r\n body,\r\n });\r\n\r\n if (!fetchResponse.success) {\r\n throw new Error(\"Failed to upload image\");\r\n }\r\n\r\n const downloadUrl = fetchResponse.result.variants[0];\r\n\r\n if (!downloadUrl) {\r\n throw new Error(\"Could not find download URL\");\r\n }\r\n\r\n return downloadUrl;\r\n }\r\n\r\n public async delete(id: string, args: { apiKey: string }) {\r\n if (this.isProtected(id)) {\r\n return { success: true };\r\n }\r\n\r\n try {\r\n const headers = new Headers();\r\n headers.set(\"Authorization\", `Bearer ${args.apiKey}`);\r\n\r\n await ofetch(\r\n `https://api.cloudflare.com/client/v4/accounts/${this.accountId}/images/v1/${id}`,\r\n {\r\n method: \"POST\",\r\n headers,\r\n }\r\n );\r\n return { success: true };\r\n } catch (_e) {\r\n return { success: false };\r\n }\r\n }\r\n\r\n public async batchUpload(\r\n files: { file: File; url: { id: string; value: string } }[]\r\n ) {\r\n return await Promise.all(\r\n files.map(async (e) => {\r\n const formData = new FormData();\r\n formData.append(\"file\", e.file);\r\n\r\n const downloadUrl = await this.upload(e.url.value, formData);\r\n\r\n return {\r\n url: downloadUrl,\r\n id: e.url.id,\r\n };\r\n })\r\n );\r\n }\r\n}\r\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,mBAAyB;AACzB,mBAAkB;AAClB,oBAAuB;AA4ChB,IAAM,aAAN,MAAuD;AAAA,EACpD,YAAsB,CAAC,eAAe;AAAA,EACtC;AAAA,EACA;AAAA,EAER,YAAY,MAIT;AACD,SAAK,aAAa,KAAK;AAEvB,SAAK,YAAY,KAAK;AAEtB,QAAI,KAAK,WAAW;AAClB,WAAK,UAAU,KAAK,GAAG,KAAK,SAAS;AAAA,IACvC;AAAA,EACF;AAAA,EAEA,IAAI,WAAW;AACb,QAAI,CAAC,KAAK,WAAW;AACnB,YAAM,IAAI,MAAM,0CAA0C;AAAA,IAC5D;AAEA,WAAO,KAAK;AAAA,EACd;AAAA,EAEA,IAAI,YAAY;AACd,WAAO,KAAK;AAAA,EACd;AAAA,EAEO,IAAI,IAAY;AACrB,WAAO,6BAA6B,KAAK,SAAS,IAAI,EAAE;AAAA,EAC1D;AAAA,EAEQ,cAAc,KAAa;AACjC,WAAO,KAAK,UAAU,KAAK,CAAC,MAAM,IAAI,SAAS,CAAC,CAAC;AAAA,EACnD;AAAA,EAEQ,YAAY,IAAY;AAC9B,QAAI,CAAC,KAAK,WAAW;AACnB,aAAO;AAAA,IACT;AAEA,WAAO,OAAO,OAAO,KAAK,SAAS,EAAE,KAAK,CAAC,MAAM,MAAM,EAAE;AAAA,EAC3D;AAAA;AAAA;AAAA;AAAA,EAKO,YAAY,KAAa,SAAgC;AAC9D,QAAI,KAAK,cAAc,GAAG,GAAG;AAC3B,aAAO;AAAA,IACT;AAGA,WAAO,IAAI,QAAQ,UAAU,KAAK,yBAAyB,OAAO,CAAC;AAAA,EACrE;AAAA,EAEO,WAAW,IAAY,SAAgC;AAC5D,WAAO,KAAK,YAAY,KAAK,IAAI,EAAE,GAAG,OAAO;AAAA,EAC/C;AAAA,EAEO,0BAA0B,SAAgC;AAC/D,UAAM,SAAS,IAAI,gBAAgB;AAEnC,UAAM,QAAQ,OAAO,QAAQ,OAAO;AAEpC,eAAW,CAAC,KAAK,GAAG,KAAK,OAAO;AAC9B,UAAI,QAAQ,QAAW;AACrB;AAAA,MACF;AAEA,aAAO,IAAI,KAAK,IAAI,SAAS,CAAC;AAAA,IAChC;AAEA,WAAO;AAAA,EACT;AAAA,EAEO,yBAAyB,SAAgC;AAC9D,UAAM,SAAS,KAAK,0BAA0B,OAAO;AAErD,WAAO,MAAM,KAAK,OAAO,QAAQ,CAAC,EAC/B,IAAI,CAAC,CAAC,KAAK,GAAG,MAAM,GAAG,GAAG,IAAI,GAAG,EAAE,EACnC,KAAK,GAAG;AAAA,EACb;AAAA,EAEA,MAAa,iBAAiB,OAAe,MAA0B;AACrE,QAAI,UAAU,GAAG;AACf,aAAO,CAAC;AAAA,IACV;AAEA,UAAM,UAAU,IAAI,QAAQ;AAC5B,YAAQ,IAAI,iBAAiB,UAAU,KAAK,MAAM,EAAE;AAEpD,UAAM,OAAO,MAAM,QAAQ;AAAA,MACzB,MAAM,KAAK,EAAE,QAAQ,MAAM,CAAC,EAAE,IAAI,YAAY;AAC5C,YAAI;AACF,gBAAM,OAAO,IAAI,SAAS;AAC1B,gBAAM,SAAK,uBAAS;AACpB,eAAK,OAAO,MAAM,EAAE;AACpB,eAAK,OAAO,cAAU,aAAAA,SAAM,EAAE,IAAI,GAAG,QAAQ,EAAE,YAAY,CAAC;AAE5D,gBAAM,MAAM,UAAM;AAAA,YAChB,iDAAiD,KAAK,SAAS;AAAA,YAC/D,EAAE,QAAQ,QAAQ,SAAS,MAAM,KAAK;AAAA,UACxC;AAEA,cAAI,CAAC,IAAI,SAAS;AAChB,kBAAM,IAAI,MAAM,uBAAuB;AAAA,UACzC;AAEA,iBAAO,EAAE,KAAK,IAAI,OAAO,WAAW,GAAG;AAAA,QACzC,SAAS,GAAG;AACV,kBAAQ,MAAM,uBAAuB;AACrC,gBAAM;AAAA,QACR;AAAA,MACF,CAAC;AAAA,IACH;AAEA,WAAO;AAAA,EACT;AAAA,EAEA,MAAa,OAAO,KAAa,MAAgB;AAC/C,UAAM,gBAAgB,UAAM,sBAA4B,KAAK;AAAA,MAC3D,QAAQ;AAAA,MACR;AAAA,IACF,CAAC;AAED,QAAI,CAAC,cAAc,SAAS;AAC1B,YAAM,IAAI,MAAM,wBAAwB;AAAA,IAC1C;AAEA,UAAM,cAAc,cAAc,OAAO,SAAS,CAAC;AAEnD,QAAI,CAAC,aAAa;AAChB,YAAM,IAAI,MAAM,6BAA6B;AAAA,IAC/C;AAEA,WAAO;AAAA,EACT;AAAA,EAEA,MAAa,OAAO,IAAY,MAA0B;AACxD,QAAI,KAAK,YAAY,EAAE,GAAG;AACxB,aAAO,EAAE,SAAS,KAAK;AAAA,IACzB;AAEA,QAAI;AACF,YAAM,UAAU,IAAI,QAAQ;AAC5B,cAAQ,IAAI,iBAAiB,UAAU,KAAK,MAAM,EAAE;AAEpD,gBAAM;AAAA,QACJ,iDAAiD,KAAK,SAAS,cAAc,EAAE;AAAA,QAC/E;AAAA,UACE,QAAQ;AAAA,UACR;AAAA,QACF;AAAA,MACF;AACA,aAAO,EAAE,SAAS,KAAK;AAAA,IACzB,SAAS,IAAI;AACX,aAAO,EAAE,SAAS,MAAM;AAAA,IAC1B;AAAA,EACF;AAAA,EAEA,MAAa,YACX,OACA;AACA,WAAO,MAAM,QAAQ;AAAA,MACnB,MAAM,IAAI,OAAO,MAAM;AACrB,cAAM,WAAW,IAAI,SAAS;AAC9B,iBAAS,OAAO,QAAQ,EAAE,IAAI;AAE9B,cAAM,cAAc,MAAM,KAAK,OAAO,EAAE,IAAI,OAAO,QAAQ;AAE3D,eAAO;AAAA,UACL,KAAK;AAAA,UACL,IAAI,EAAE,IAAI;AAAA,QACZ;AAAA,MACF,CAAC;AAAA,IACH;AAAA,EACF;AACF;","names":["dayjs"]}
|
package/dist/images.d.cts
CHANGED
|
@@ -27,7 +27,7 @@ interface CreateImageUrlResponse {
|
|
|
27
27
|
}
|
|
28
28
|
declare class ImageUtils<ImageIds extends Record<string, any>> {
|
|
29
29
|
private blacklist;
|
|
30
|
-
private
|
|
30
|
+
private _accountId;
|
|
31
31
|
private _imageIds;
|
|
32
32
|
constructor(args: {
|
|
33
33
|
accountId: string;
|
|
@@ -35,6 +35,7 @@ declare class ImageUtils<ImageIds extends Record<string, any>> {
|
|
|
35
35
|
imageIds?: ImageIds;
|
|
36
36
|
});
|
|
37
37
|
get imageIds(): ImageIds;
|
|
38
|
+
get accountId(): string;
|
|
38
39
|
url(id: string): string;
|
|
39
40
|
private isBlacklisted;
|
|
40
41
|
private isProtected;
|
package/dist/images.d.ts
CHANGED
|
@@ -27,7 +27,7 @@ interface CreateImageUrlResponse {
|
|
|
27
27
|
}
|
|
28
28
|
declare class ImageUtils<ImageIds extends Record<string, any>> {
|
|
29
29
|
private blacklist;
|
|
30
|
-
private
|
|
30
|
+
private _accountId;
|
|
31
31
|
private _imageIds;
|
|
32
32
|
constructor(args: {
|
|
33
33
|
accountId: string;
|
|
@@ -35,6 +35,7 @@ declare class ImageUtils<ImageIds extends Record<string, any>> {
|
|
|
35
35
|
imageIds?: ImageIds;
|
|
36
36
|
});
|
|
37
37
|
get imageIds(): ImageIds;
|
|
38
|
+
get accountId(): string;
|
|
38
39
|
url(id: string): string;
|
|
39
40
|
private isBlacklisted;
|
|
40
41
|
private isProtected;
|
package/dist/images.js
CHANGED
|
@@ -4,10 +4,10 @@ import dayjs from "dayjs";
|
|
|
4
4
|
import { ofetch } from "ofetch";
|
|
5
5
|
var ImageUtils = class {
|
|
6
6
|
blacklist = ["img.clerk.com"];
|
|
7
|
-
|
|
7
|
+
_accountId;
|
|
8
8
|
_imageIds;
|
|
9
9
|
constructor(args) {
|
|
10
|
-
this.
|
|
10
|
+
this._accountId = args.accountId;
|
|
11
11
|
this._imageIds = args.imageIds;
|
|
12
12
|
if (args.blacklist) {
|
|
13
13
|
this.blacklist.push(...args.blacklist);
|
|
@@ -19,8 +19,11 @@ var ImageUtils = class {
|
|
|
19
19
|
}
|
|
20
20
|
return this._imageIds;
|
|
21
21
|
}
|
|
22
|
+
get accountId() {
|
|
23
|
+
return this._accountId;
|
|
24
|
+
}
|
|
22
25
|
url(id) {
|
|
23
|
-
return `https://imagedelivery.net/${this.
|
|
26
|
+
return `https://imagedelivery.net/${this.accountId}/${id}/public`;
|
|
24
27
|
}
|
|
25
28
|
isBlacklisted(url) {
|
|
26
29
|
return this.blacklist.some((u) => url.includes(u));
|
|
@@ -72,7 +75,7 @@ var ImageUtils = class {
|
|
|
72
75
|
form.append("id", id);
|
|
73
76
|
form.append("expiry", dayjs().add(5, "minute").toISOString());
|
|
74
77
|
const img = await ofetch(
|
|
75
|
-
`https://api.cloudflare.com/client/v4/accounts/${this.
|
|
78
|
+
`https://api.cloudflare.com/client/v4/accounts/${this.accountId}/images/v2/direct_upload`,
|
|
76
79
|
{ method: "POST", headers, body: form }
|
|
77
80
|
);
|
|
78
81
|
if (!img.success) {
|
|
@@ -109,7 +112,7 @@ var ImageUtils = class {
|
|
|
109
112
|
const headers = new Headers();
|
|
110
113
|
headers.set("Authorization", `Bearer ${args.apiKey}`);
|
|
111
114
|
await ofetch(
|
|
112
|
-
`https://api.cloudflare.com/client/v4/accounts/${this.
|
|
115
|
+
`https://api.cloudflare.com/client/v4/accounts/${this.accountId}/images/v1/${id}`,
|
|
113
116
|
{
|
|
114
117
|
method: "POST",
|
|
115
118
|
headers
|
package/dist/images.js.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../src/images.ts"],"sourcesContent":["import { createId } from \"@paralleldrive/cuid2\";\r\nimport dayjs from \"dayjs\";\r\nimport { ofetch } from \"ofetch\";\r\n\r\nexport interface OptimizedImageOptions {\r\n anim?: boolean;\r\n background?: string;\r\n blur?: number;\r\n brightness?: number;\r\n compression?: \"fast\"; // faster compression = larger file size\r\n contrast?: number;\r\n dpr?: number;\r\n fit?: \"scale-down\" | \"contain\" | \"cover\" | \"crop\" | \"pad\";\r\n format?: \"webp\" | \"avif\" | \"json\";\r\n gamma?: number;\r\n width?: number;\r\n height?: number;\r\n metadata?: \"keep\" | \"copyright\" | \"none\";\r\n quality?: number;\r\n rotate?: number;\r\n sharpen?: number;\r\n}\r\n\r\nexport interface CreateImageUrlResponse {\r\n result: {\r\n id: string;\r\n uploadURL: string;\r\n };\r\n success: boolean;\r\n errors: unknown[];\r\n messages: unknown[];\r\n}\r\n\r\ninterface UploadImageResponse {\r\n result: {\r\n id: string;\r\n filename: string;\r\n uploaded: string;\r\n requireSignedURLs: boolean;\r\n variants: string[];\r\n };\r\n success: boolean;\r\n errors: unknown[];\r\n messages: unknown[];\r\n}\r\n\r\nexport class ImageUtils<ImageIds extends Record<string, any>> {\r\n private blacklist: string[] = [\"img.clerk.com\"];\r\n private
|
|
1
|
+
{"version":3,"sources":["../src/images.ts"],"sourcesContent":["import { createId } from \"@paralleldrive/cuid2\";\r\nimport dayjs from \"dayjs\";\r\nimport { ofetch } from \"ofetch\";\r\n\r\nexport interface OptimizedImageOptions {\r\n anim?: boolean;\r\n background?: string;\r\n blur?: number;\r\n brightness?: number;\r\n compression?: \"fast\"; // faster compression = larger file size\r\n contrast?: number;\r\n dpr?: number;\r\n fit?: \"scale-down\" | \"contain\" | \"cover\" | \"crop\" | \"pad\";\r\n format?: \"webp\" | \"avif\" | \"json\";\r\n gamma?: number;\r\n width?: number;\r\n height?: number;\r\n metadata?: \"keep\" | \"copyright\" | \"none\";\r\n quality?: number;\r\n rotate?: number;\r\n sharpen?: number;\r\n}\r\n\r\nexport interface CreateImageUrlResponse {\r\n result: {\r\n id: string;\r\n uploadURL: string;\r\n };\r\n success: boolean;\r\n errors: unknown[];\r\n messages: unknown[];\r\n}\r\n\r\ninterface UploadImageResponse {\r\n result: {\r\n id: string;\r\n filename: string;\r\n uploaded: string;\r\n requireSignedURLs: boolean;\r\n variants: string[];\r\n };\r\n success: boolean;\r\n errors: unknown[];\r\n messages: unknown[];\r\n}\r\n\r\nexport class ImageUtils<ImageIds extends Record<string, any>> {\r\n private blacklist: string[] = [\"img.clerk.com\"];\r\n private _accountId: string;\r\n private _imageIds: ImageIds | undefined;\r\n\r\n constructor(args: {\r\n accountId: string;\r\n blacklist?: string[];\r\n imageIds?: ImageIds;\r\n }) {\r\n this._accountId = args.accountId;\r\n\r\n this._imageIds = args.imageIds;\r\n\r\n if (args.blacklist) {\r\n this.blacklist.push(...args.blacklist);\r\n }\r\n }\r\n\r\n get imageIds() {\r\n if (!this._imageIds) {\r\n throw new Error(\"imageIds was not supplied in constructor\");\r\n }\r\n\r\n return this._imageIds;\r\n }\r\n\r\n get accountId() {\r\n return this._accountId;\r\n }\r\n\r\n public url(id: string) {\r\n return `https://imagedelivery.net/${this.accountId}/${id}/public`;\r\n }\r\n\r\n private isBlacklisted(url: string) {\r\n return this.blacklist.some((u) => url.includes(u));\r\n }\r\n\r\n private isProtected(id: string) {\r\n if (!this._imageIds) {\r\n return false;\r\n }\r\n\r\n return Object.values(this._imageIds).some((e) => e === id);\r\n }\r\n\r\n /**\r\n * Will only operate on images that have been uploaded via cloudflare images\r\n */\r\n public optimizeUrl(url: string, options: OptimizedImageOptions) {\r\n if (this.isBlacklisted(url)) {\r\n return url;\r\n }\r\n\r\n // Final format should look similar to: https://imagedelivery.net/<ACCOUNT_HASH>/<IMAGE_ID>/w=400,sharpen=3\r\n return url.replace(\"public\", this.createImageOptionsString(options));\r\n }\r\n\r\n public optimizeId(id: string, options: OptimizedImageOptions) {\r\n return this.optimizeUrl(this.url(id), options);\r\n }\r\n\r\n public createOptionsSearchParams(options: OptimizedImageOptions) {\r\n const params = new URLSearchParams();\r\n\r\n const pairs = Object.entries(options);\r\n\r\n for (const [key, val] of pairs) {\r\n if (val === undefined) {\r\n continue;\r\n }\r\n\r\n params.set(key, val.toString());\r\n }\r\n\r\n return params;\r\n }\r\n\r\n public createImageOptionsString(options: OptimizedImageOptions) {\r\n const params = this.createOptionsSearchParams(options);\r\n\r\n return Array.from(params.entries())\r\n .map(([key, val]) => `${key}=${val}`)\r\n .join(\",\");\r\n }\r\n\r\n public async createUploadUrls(count: number, args: { apiKey: string }) {\r\n if (count === 0) {\r\n return [];\r\n }\r\n\r\n const headers = new Headers();\r\n headers.set(\"Authorization\", `Bearer ${args.apiKey}`);\r\n\r\n const urls = await Promise.all(\r\n Array.from({ length: count }).map(async () => {\r\n try {\r\n const form = new FormData();\r\n const id = createId();\r\n form.append(\"id\", id);\r\n form.append(\"expiry\", dayjs().add(5, \"minute\").toISOString());\r\n\r\n const img = await ofetch<CreateImageUrlResponse>(\r\n `https://api.cloudflare.com/client/v4/accounts/${this.accountId}/images/v2/direct_upload`,\r\n { method: \"POST\", headers, body: form }\r\n );\r\n\r\n if (!img.success) {\r\n throw new Error(\"Error uploading image\");\r\n }\r\n\r\n return { url: img.result.uploadURL, id };\r\n } catch (e) {\r\n console.error(\"Error uploading image\");\r\n throw e;\r\n }\r\n })\r\n );\r\n\r\n return urls;\r\n }\r\n\r\n public async upload(url: string, body: FormData) {\r\n const fetchResponse = await ofetch<UploadImageResponse>(url, {\r\n method: \"POST\",\r\n body,\r\n });\r\n\r\n if (!fetchResponse.success) {\r\n throw new Error(\"Failed to upload image\");\r\n }\r\n\r\n const downloadUrl = fetchResponse.result.variants[0];\r\n\r\n if (!downloadUrl) {\r\n throw new Error(\"Could not find download URL\");\r\n }\r\n\r\n return downloadUrl;\r\n }\r\n\r\n public async delete(id: string, args: { apiKey: string }) {\r\n if (this.isProtected(id)) {\r\n return { success: true };\r\n }\r\n\r\n try {\r\n const headers = new Headers();\r\n headers.set(\"Authorization\", `Bearer ${args.apiKey}`);\r\n\r\n await ofetch(\r\n `https://api.cloudflare.com/client/v4/accounts/${this.accountId}/images/v1/${id}`,\r\n {\r\n method: \"POST\",\r\n headers,\r\n }\r\n );\r\n return { success: true };\r\n } catch (_e) {\r\n return { success: false };\r\n }\r\n }\r\n\r\n public async batchUpload(\r\n files: { file: File; url: { id: string; value: string } }[]\r\n ) {\r\n return await Promise.all(\r\n files.map(async (e) => {\r\n const formData = new FormData();\r\n formData.append(\"file\", e.file);\r\n\r\n const downloadUrl = await this.upload(e.url.value, formData);\r\n\r\n return {\r\n url: downloadUrl,\r\n id: e.url.id,\r\n };\r\n })\r\n );\r\n }\r\n}\r\n"],"mappings":";AAAA,SAAS,gBAAgB;AACzB,OAAO,WAAW;AAClB,SAAS,cAAc;AA4ChB,IAAM,aAAN,MAAuD;AAAA,EACpD,YAAsB,CAAC,eAAe;AAAA,EACtC;AAAA,EACA;AAAA,EAER,YAAY,MAIT;AACD,SAAK,aAAa,KAAK;AAEvB,SAAK,YAAY,KAAK;AAEtB,QAAI,KAAK,WAAW;AAClB,WAAK,UAAU,KAAK,GAAG,KAAK,SAAS;AAAA,IACvC;AAAA,EACF;AAAA,EAEA,IAAI,WAAW;AACb,QAAI,CAAC,KAAK,WAAW;AACnB,YAAM,IAAI,MAAM,0CAA0C;AAAA,IAC5D;AAEA,WAAO,KAAK;AAAA,EACd;AAAA,EAEA,IAAI,YAAY;AACd,WAAO,KAAK;AAAA,EACd;AAAA,EAEO,IAAI,IAAY;AACrB,WAAO,6BAA6B,KAAK,SAAS,IAAI,EAAE;AAAA,EAC1D;AAAA,EAEQ,cAAc,KAAa;AACjC,WAAO,KAAK,UAAU,KAAK,CAAC,MAAM,IAAI,SAAS,CAAC,CAAC;AAAA,EACnD;AAAA,EAEQ,YAAY,IAAY;AAC9B,QAAI,CAAC,KAAK,WAAW;AACnB,aAAO;AAAA,IACT;AAEA,WAAO,OAAO,OAAO,KAAK,SAAS,EAAE,KAAK,CAAC,MAAM,MAAM,EAAE;AAAA,EAC3D;AAAA;AAAA;AAAA;AAAA,EAKO,YAAY,KAAa,SAAgC;AAC9D,QAAI,KAAK,cAAc,GAAG,GAAG;AAC3B,aAAO;AAAA,IACT;AAGA,WAAO,IAAI,QAAQ,UAAU,KAAK,yBAAyB,OAAO,CAAC;AAAA,EACrE;AAAA,EAEO,WAAW,IAAY,SAAgC;AAC5D,WAAO,KAAK,YAAY,KAAK,IAAI,EAAE,GAAG,OAAO;AAAA,EAC/C;AAAA,EAEO,0BAA0B,SAAgC;AAC/D,UAAM,SAAS,IAAI,gBAAgB;AAEnC,UAAM,QAAQ,OAAO,QAAQ,OAAO;AAEpC,eAAW,CAAC,KAAK,GAAG,KAAK,OAAO;AAC9B,UAAI,QAAQ,QAAW;AACrB;AAAA,MACF;AAEA,aAAO,IAAI,KAAK,IAAI,SAAS,CAAC;AAAA,IAChC;AAEA,WAAO;AAAA,EACT;AAAA,EAEO,yBAAyB,SAAgC;AAC9D,UAAM,SAAS,KAAK,0BAA0B,OAAO;AAErD,WAAO,MAAM,KAAK,OAAO,QAAQ,CAAC,EAC/B,IAAI,CAAC,CAAC,KAAK,GAAG,MAAM,GAAG,GAAG,IAAI,GAAG,EAAE,EACnC,KAAK,GAAG;AAAA,EACb;AAAA,EAEA,MAAa,iBAAiB,OAAe,MAA0B;AACrE,QAAI,UAAU,GAAG;AACf,aAAO,CAAC;AAAA,IACV;AAEA,UAAM,UAAU,IAAI,QAAQ;AAC5B,YAAQ,IAAI,iBAAiB,UAAU,KAAK,MAAM,EAAE;AAEpD,UAAM,OAAO,MAAM,QAAQ;AAAA,MACzB,MAAM,KAAK,EAAE,QAAQ,MAAM,CAAC,EAAE,IAAI,YAAY;AAC5C,YAAI;AACF,gBAAM,OAAO,IAAI,SAAS;AAC1B,gBAAM,KAAK,SAAS;AACpB,eAAK,OAAO,MAAM,EAAE;AACpB,eAAK,OAAO,UAAU,MAAM,EAAE,IAAI,GAAG,QAAQ,EAAE,YAAY,CAAC;AAE5D,gBAAM,MAAM,MAAM;AAAA,YAChB,iDAAiD,KAAK,SAAS;AAAA,YAC/D,EAAE,QAAQ,QAAQ,SAAS,MAAM,KAAK;AAAA,UACxC;AAEA,cAAI,CAAC,IAAI,SAAS;AAChB,kBAAM,IAAI,MAAM,uBAAuB;AAAA,UACzC;AAEA,iBAAO,EAAE,KAAK,IAAI,OAAO,WAAW,GAAG;AAAA,QACzC,SAAS,GAAG;AACV,kBAAQ,MAAM,uBAAuB;AACrC,gBAAM;AAAA,QACR;AAAA,MACF,CAAC;AAAA,IACH;AAEA,WAAO;AAAA,EACT;AAAA,EAEA,MAAa,OAAO,KAAa,MAAgB;AAC/C,UAAM,gBAAgB,MAAM,OAA4B,KAAK;AAAA,MAC3D,QAAQ;AAAA,MACR;AAAA,IACF,CAAC;AAED,QAAI,CAAC,cAAc,SAAS;AAC1B,YAAM,IAAI,MAAM,wBAAwB;AAAA,IAC1C;AAEA,UAAM,cAAc,cAAc,OAAO,SAAS,CAAC;AAEnD,QAAI,CAAC,aAAa;AAChB,YAAM,IAAI,MAAM,6BAA6B;AAAA,IAC/C;AAEA,WAAO;AAAA,EACT;AAAA,EAEA,MAAa,OAAO,IAAY,MAA0B;AACxD,QAAI,KAAK,YAAY,EAAE,GAAG;AACxB,aAAO,EAAE,SAAS,KAAK;AAAA,IACzB;AAEA,QAAI;AACF,YAAM,UAAU,IAAI,QAAQ;AAC5B,cAAQ,IAAI,iBAAiB,UAAU,KAAK,MAAM,EAAE;AAEpD,YAAM;AAAA,QACJ,iDAAiD,KAAK,SAAS,cAAc,EAAE;AAAA,QAC/E;AAAA,UACE,QAAQ;AAAA,UACR;AAAA,QACF;AAAA,MACF;AACA,aAAO,EAAE,SAAS,KAAK;AAAA,IACzB,SAAS,IAAI;AACX,aAAO,EAAE,SAAS,MAAM;AAAA,IAC1B;AAAA,EACF;AAAA,EAEA,MAAa,YACX,OACA;AACA,WAAO,MAAM,QAAQ;AAAA,MACnB,MAAM,IAAI,OAAO,MAAM;AACrB,cAAM,WAAW,IAAI,SAAS;AAC9B,iBAAS,OAAO,QAAQ,EAAE,IAAI;AAE9B,cAAM,cAAc,MAAM,KAAK,OAAO,EAAE,IAAI,OAAO,QAAQ;AAE3D,eAAO;AAAA,UACL,KAAK;AAAA,UACL,IAAI,EAAE,IAAI;AAAA,QACZ;AAAA,MACF,CAAC;AAAA,IACH;AAAA,EACF;AACF;","names":[]}
|
package/package.json
CHANGED
package/dist/db.cjs
DELETED
|
@@ -1,45 +0,0 @@
|
|
|
1
|
-
"use strict";
|
|
2
|
-
var __defProp = Object.defineProperty;
|
|
3
|
-
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
4
|
-
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
5
|
-
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
|
6
|
-
var __export = (target, all) => {
|
|
7
|
-
for (var name in all)
|
|
8
|
-
__defProp(target, name, { get: all[name], enumerable: true });
|
|
9
|
-
};
|
|
10
|
-
var __copyProps = (to, from, except, desc) => {
|
|
11
|
-
if (from && typeof from === "object" || typeof from === "function") {
|
|
12
|
-
for (let key of __getOwnPropNames(from))
|
|
13
|
-
if (!__hasOwnProp.call(to, key) && key !== except)
|
|
14
|
-
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
|
15
|
-
}
|
|
16
|
-
return to;
|
|
17
|
-
};
|
|
18
|
-
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
|
19
|
-
|
|
20
|
-
// src/db.ts
|
|
21
|
-
var db_exports = {};
|
|
22
|
-
__export(db_exports, {
|
|
23
|
-
createDbClient: () => createDbClient,
|
|
24
|
-
createLibsqlClient: () => createLibsqlClient
|
|
25
|
-
});
|
|
26
|
-
module.exports = __toCommonJS(db_exports);
|
|
27
|
-
var import_client = require("@libsql/client");
|
|
28
|
-
var import_libsql = require("drizzle-orm/libsql");
|
|
29
|
-
var createLibsqlClient = (args) => {
|
|
30
|
-
return (0, import_client.createClient)(args);
|
|
31
|
-
};
|
|
32
|
-
var createDbClient = (schema, args) => {
|
|
33
|
-
const client = createLibsqlClient(args);
|
|
34
|
-
const db = (0, import_libsql.drizzle)(client, {
|
|
35
|
-
schema,
|
|
36
|
-
logger: false
|
|
37
|
-
});
|
|
38
|
-
return db;
|
|
39
|
-
};
|
|
40
|
-
// Annotate the CommonJS export names for ESM import in node:
|
|
41
|
-
0 && (module.exports = {
|
|
42
|
-
createDbClient,
|
|
43
|
-
createLibsqlClient
|
|
44
|
-
});
|
|
45
|
-
//# sourceMappingURL=db.cjs.map
|
package/dist/db.cjs.map
DELETED
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"sources":["../src/db.ts"],"sourcesContent":["import { createClient } from \"@libsql/client\";\r\nimport { LibSQLDatabase, drizzle } from \"drizzle-orm/libsql\";\r\n\r\nexport const createLibsqlClient = (args: {\r\n url: string;\r\n authToken?: string;\r\n}) => {\r\n return createClient(args);\r\n};\r\n\r\nexport const createDbClient = <TSchema extends Record<string, unknown>>(\r\n schema: TSchema,\r\n args: { url: string; authToken?: string }\r\n) => {\r\n const client = createLibsqlClient(args);\r\n const db = drizzle(client, {\r\n schema,\r\n logger: false,\r\n });\r\n\r\n return db;\r\n};\r\n\r\nexport type DatabaseClient<TSchema extends Record<string, unknown>> =\r\n LibSQLDatabase<TSchema>;\r\nexport type DatabaseClientTransactionContext<\r\n TSchema extends Record<string, unknown>\r\n> = Parameters<Parameters<DatabaseClient<TSchema>[\"transaction\"]>[0]>[0];\r\n"],"mappings":";;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,oBAA6B;AAC7B,oBAAwC;AAEjC,IAAM,qBAAqB,CAAC,SAG7B;AACJ,aAAO,4BAAa,IAAI;AAC1B;AAEO,IAAM,iBAAiB,CAC5B,QACA,SACG;AACH,QAAM,SAAS,mBAAmB,IAAI;AACtC,QAAM,SAAK,uBAAQ,QAAQ;AAAA,IACzB;AAAA,IACA,QAAQ;AAAA,EACV,CAAC;AAED,SAAO;AACT;","names":[]}
|
package/dist/db.d.cts
DELETED
|
@@ -1,15 +0,0 @@
|
|
|
1
|
-
import * as _libsql_client from '@libsql/client';
|
|
2
|
-
import { LibSQLDatabase } from 'drizzle-orm/libsql';
|
|
3
|
-
|
|
4
|
-
declare const createLibsqlClient: (args: {
|
|
5
|
-
url: string;
|
|
6
|
-
authToken?: string;
|
|
7
|
-
}) => _libsql_client.Client;
|
|
8
|
-
declare const createDbClient: <TSchema extends Record<string, unknown>>(schema: TSchema, args: {
|
|
9
|
-
url: string;
|
|
10
|
-
authToken?: string;
|
|
11
|
-
}) => LibSQLDatabase<TSchema>;
|
|
12
|
-
type DatabaseClient<TSchema extends Record<string, unknown>> = LibSQLDatabase<TSchema>;
|
|
13
|
-
type DatabaseClientTransactionContext<TSchema extends Record<string, unknown>> = Parameters<Parameters<DatabaseClient<TSchema>["transaction"]>[0]>[0];
|
|
14
|
-
|
|
15
|
-
export { type DatabaseClient, type DatabaseClientTransactionContext, createDbClient, createLibsqlClient };
|
package/dist/db.d.ts
DELETED
|
@@ -1,15 +0,0 @@
|
|
|
1
|
-
import * as _libsql_client from '@libsql/client';
|
|
2
|
-
import { LibSQLDatabase } from 'drizzle-orm/libsql';
|
|
3
|
-
|
|
4
|
-
declare const createLibsqlClient: (args: {
|
|
5
|
-
url: string;
|
|
6
|
-
authToken?: string;
|
|
7
|
-
}) => _libsql_client.Client;
|
|
8
|
-
declare const createDbClient: <TSchema extends Record<string, unknown>>(schema: TSchema, args: {
|
|
9
|
-
url: string;
|
|
10
|
-
authToken?: string;
|
|
11
|
-
}) => LibSQLDatabase<TSchema>;
|
|
12
|
-
type DatabaseClient<TSchema extends Record<string, unknown>> = LibSQLDatabase<TSchema>;
|
|
13
|
-
type DatabaseClientTransactionContext<TSchema extends Record<string, unknown>> = Parameters<Parameters<DatabaseClient<TSchema>["transaction"]>[0]>[0];
|
|
14
|
-
|
|
15
|
-
export { type DatabaseClient, type DatabaseClientTransactionContext, createDbClient, createLibsqlClient };
|
package/dist/db.js
DELETED
|
@@ -1,19 +0,0 @@
|
|
|
1
|
-
// src/db.ts
|
|
2
|
-
import { createClient } from "@libsql/client";
|
|
3
|
-
import { drizzle } from "drizzle-orm/libsql";
|
|
4
|
-
var createLibsqlClient = (args) => {
|
|
5
|
-
return createClient(args);
|
|
6
|
-
};
|
|
7
|
-
var createDbClient = (schema, args) => {
|
|
8
|
-
const client = createLibsqlClient(args);
|
|
9
|
-
const db = drizzle(client, {
|
|
10
|
-
schema,
|
|
11
|
-
logger: false
|
|
12
|
-
});
|
|
13
|
-
return db;
|
|
14
|
-
};
|
|
15
|
-
export {
|
|
16
|
-
createDbClient,
|
|
17
|
-
createLibsqlClient
|
|
18
|
-
};
|
|
19
|
-
//# sourceMappingURL=db.js.map
|
package/dist/db.js.map
DELETED
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"sources":["../src/db.ts"],"sourcesContent":["import { createClient } from \"@libsql/client\";\r\nimport { LibSQLDatabase, drizzle } from \"drizzle-orm/libsql\";\r\n\r\nexport const createLibsqlClient = (args: {\r\n url: string;\r\n authToken?: string;\r\n}) => {\r\n return createClient(args);\r\n};\r\n\r\nexport const createDbClient = <TSchema extends Record<string, unknown>>(\r\n schema: TSchema,\r\n args: { url: string; authToken?: string }\r\n) => {\r\n const client = createLibsqlClient(args);\r\n const db = drizzle(client, {\r\n schema,\r\n logger: false,\r\n });\r\n\r\n return db;\r\n};\r\n\r\nexport type DatabaseClient<TSchema extends Record<string, unknown>> =\r\n LibSQLDatabase<TSchema>;\r\nexport type DatabaseClientTransactionContext<\r\n TSchema extends Record<string, unknown>\r\n> = Parameters<Parameters<DatabaseClient<TSchema>[\"transaction\"]>[0]>[0];\r\n"],"mappings":";AAAA,SAAS,oBAAoB;AAC7B,SAAyB,eAAe;AAEjC,IAAM,qBAAqB,CAAC,SAG7B;AACJ,SAAO,aAAa,IAAI;AAC1B;AAEO,IAAM,iBAAiB,CAC5B,QACA,SACG;AACH,QAAM,SAAS,mBAAmB,IAAI;AACtC,QAAM,KAAK,QAAQ,QAAQ;AAAA,IACzB;AAAA,IACA,QAAQ;AAAA,EACV,CAAC;AAED,SAAO;AACT;","names":[]}
|
package/dist/migrate.cjs
DELETED
|
@@ -1,55 +0,0 @@
|
|
|
1
|
-
"use strict";
|
|
2
|
-
var __defProp = Object.defineProperty;
|
|
3
|
-
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
4
|
-
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
5
|
-
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
|
6
|
-
var __export = (target, all) => {
|
|
7
|
-
for (var name in all)
|
|
8
|
-
__defProp(target, name, { get: all[name], enumerable: true });
|
|
9
|
-
};
|
|
10
|
-
var __copyProps = (to, from, except, desc) => {
|
|
11
|
-
if (from && typeof from === "object" || typeof from === "function") {
|
|
12
|
-
for (let key of __getOwnPropNames(from))
|
|
13
|
-
if (!__hasOwnProp.call(to, key) && key !== except)
|
|
14
|
-
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
|
15
|
-
}
|
|
16
|
-
return to;
|
|
17
|
-
};
|
|
18
|
-
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
|
19
|
-
|
|
20
|
-
// src/migrate.ts
|
|
21
|
-
var migrate_exports = {};
|
|
22
|
-
__export(migrate_exports, {
|
|
23
|
-
migrate: () => migrate
|
|
24
|
-
});
|
|
25
|
-
module.exports = __toCommonJS(migrate_exports);
|
|
26
|
-
var import_client = require("@libsql/client");
|
|
27
|
-
var import_libsql = require("drizzle-orm/libsql");
|
|
28
|
-
var import_migrator = require("drizzle-orm/libsql/migrator");
|
|
29
|
-
var migrate = async (schema, args) => {
|
|
30
|
-
let url = args.url;
|
|
31
|
-
if (url.startsWith("http")) {
|
|
32
|
-
url = url.replace(/http(s)?/, "libsql");
|
|
33
|
-
}
|
|
34
|
-
const db = (0, import_libsql.drizzle)(
|
|
35
|
-
(0, import_client.createClient)(
|
|
36
|
-
// Auth token must be either 1) present and not undefined or 2) not present
|
|
37
|
-
args.token ? {
|
|
38
|
-
url,
|
|
39
|
-
authToken: args.token
|
|
40
|
-
} : { url }
|
|
41
|
-
),
|
|
42
|
-
{ schema }
|
|
43
|
-
);
|
|
44
|
-
console.info("Running migrations");
|
|
45
|
-
await (0, import_migrator.migrate)(db, {
|
|
46
|
-
migrationsFolder: args.migrationsFolder
|
|
47
|
-
});
|
|
48
|
-
console.info("Migrations applied");
|
|
49
|
-
process.exit(0);
|
|
50
|
-
};
|
|
51
|
-
// Annotate the CommonJS export names for ESM import in node:
|
|
52
|
-
0 && (module.exports = {
|
|
53
|
-
migrate
|
|
54
|
-
});
|
|
55
|
-
//# sourceMappingURL=migrate.cjs.map
|
package/dist/migrate.cjs.map
DELETED
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"sources":["../src/migrate.ts"],"sourcesContent":["import { createClient } from \"@libsql/client\";\r\nimport { drizzle } from \"drizzle-orm/libsql\";\r\nimport { migrate as runDrizzleMigrate } from \"drizzle-orm/libsql/migrator\";\r\n\r\nexport const migrate = async <TSchema extends Record<string, any>>(\r\n schema: TSchema,\r\n args: {\r\n url: string;\r\n token?: string;\r\n migrationsFolder: string;\r\n }\r\n) => {\r\n let url = args.url;\r\n\r\n // Migrations are only supported via the libsql protocol\r\n if (url.startsWith(\"http\")) {\r\n url = url.replace(/http(s)?/, \"libsql\");\r\n }\r\n\r\n const db = drizzle(\r\n createClient(\r\n // Auth token must be either 1) present and not undefined or 2) not present\r\n args.token\r\n ? {\r\n url,\r\n authToken: args.token,\r\n }\r\n : { url }\r\n ),\r\n { schema }\r\n );\r\n\r\n console.info(\"Running migrations\");\r\n\r\n await runDrizzleMigrate(db, {\r\n migrationsFolder: args.migrationsFolder,\r\n });\r\n\r\n console.info(\"Migrations applied\");\r\n process.exit(0);\r\n};\r\n"],"mappings":";;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,oBAA6B;AAC7B,oBAAwB;AACxB,sBAA6C;AAEtC,IAAM,UAAU,OACrB,QACA,SAKG;AACH,MAAI,MAAM,KAAK;AAGf,MAAI,IAAI,WAAW,MAAM,GAAG;AAC1B,UAAM,IAAI,QAAQ,YAAY,QAAQ;AAAA,EACxC;AAEA,QAAM,SAAK;AAAA,QACT;AAAA;AAAA,MAEE,KAAK,QACD;AAAA,QACE;AAAA,QACA,WAAW,KAAK;AAAA,MAClB,IACA,EAAE,IAAI;AAAA,IACZ;AAAA,IACA,EAAE,OAAO;AAAA,EACX;AAEA,UAAQ,KAAK,oBAAoB;AAEjC,YAAM,gBAAAA,SAAkB,IAAI;AAAA,IAC1B,kBAAkB,KAAK;AAAA,EACzB,CAAC;AAED,UAAQ,KAAK,oBAAoB;AACjC,UAAQ,KAAK,CAAC;AAChB;","names":["runDrizzleMigrate"]}
|
package/dist/migrate.d.cts
DELETED
package/dist/migrate.d.ts
DELETED
package/dist/migrate.js
DELETED
|
@@ -1,30 +0,0 @@
|
|
|
1
|
-
// src/migrate.ts
|
|
2
|
-
import { createClient } from "@libsql/client";
|
|
3
|
-
import { drizzle } from "drizzle-orm/libsql";
|
|
4
|
-
import { migrate as runDrizzleMigrate } from "drizzle-orm/libsql/migrator";
|
|
5
|
-
var migrate = async (schema, args) => {
|
|
6
|
-
let url = args.url;
|
|
7
|
-
if (url.startsWith("http")) {
|
|
8
|
-
url = url.replace(/http(s)?/, "libsql");
|
|
9
|
-
}
|
|
10
|
-
const db = drizzle(
|
|
11
|
-
createClient(
|
|
12
|
-
// Auth token must be either 1) present and not undefined or 2) not present
|
|
13
|
-
args.token ? {
|
|
14
|
-
url,
|
|
15
|
-
authToken: args.token
|
|
16
|
-
} : { url }
|
|
17
|
-
),
|
|
18
|
-
{ schema }
|
|
19
|
-
);
|
|
20
|
-
console.info("Running migrations");
|
|
21
|
-
await runDrizzleMigrate(db, {
|
|
22
|
-
migrationsFolder: args.migrationsFolder
|
|
23
|
-
});
|
|
24
|
-
console.info("Migrations applied");
|
|
25
|
-
process.exit(0);
|
|
26
|
-
};
|
|
27
|
-
export {
|
|
28
|
-
migrate
|
|
29
|
-
};
|
|
30
|
-
//# sourceMappingURL=migrate.js.map
|
package/dist/migrate.js.map
DELETED
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"sources":["../src/migrate.ts"],"sourcesContent":["import { createClient } from \"@libsql/client\";\r\nimport { drizzle } from \"drizzle-orm/libsql\";\r\nimport { migrate as runDrizzleMigrate } from \"drizzle-orm/libsql/migrator\";\r\n\r\nexport const migrate = async <TSchema extends Record<string, any>>(\r\n schema: TSchema,\r\n args: {\r\n url: string;\r\n token?: string;\r\n migrationsFolder: string;\r\n }\r\n) => {\r\n let url = args.url;\r\n\r\n // Migrations are only supported via the libsql protocol\r\n if (url.startsWith(\"http\")) {\r\n url = url.replace(/http(s)?/, \"libsql\");\r\n }\r\n\r\n const db = drizzle(\r\n createClient(\r\n // Auth token must be either 1) present and not undefined or 2) not present\r\n args.token\r\n ? {\r\n url,\r\n authToken: args.token,\r\n }\r\n : { url }\r\n ),\r\n { schema }\r\n );\r\n\r\n console.info(\"Running migrations\");\r\n\r\n await runDrizzleMigrate(db, {\r\n migrationsFolder: args.migrationsFolder,\r\n });\r\n\r\n console.info(\"Migrations applied\");\r\n process.exit(0);\r\n};\r\n"],"mappings":";AAAA,SAAS,oBAAoB;AAC7B,SAAS,eAAe;AACxB,SAAS,WAAW,yBAAyB;AAEtC,IAAM,UAAU,OACrB,QACA,SAKG;AACH,MAAI,MAAM,KAAK;AAGf,MAAI,IAAI,WAAW,MAAM,GAAG;AAC1B,UAAM,IAAI,QAAQ,YAAY,QAAQ;AAAA,EACxC;AAEA,QAAM,KAAK;AAAA,IACT;AAAA;AAAA,MAEE,KAAK,QACD;AAAA,QACE;AAAA,QACA,WAAW,KAAK;AAAA,MAClB,IACA,EAAE,IAAI;AAAA,IACZ;AAAA,IACA,EAAE,OAAO;AAAA,EACX;AAEA,UAAQ,KAAK,oBAAoB;AAEjC,QAAM,kBAAkB,IAAI;AAAA,IAC1B,kBAAkB,KAAK;AAAA,EACzB,CAAC;AAED,UAAQ,KAAK,oBAAoB;AACjC,UAAQ,KAAK,CAAC;AAChB;","names":[]}
|
package/dist/schema.cjs
DELETED
|
@@ -1,44 +0,0 @@
|
|
|
1
|
-
"use strict";
|
|
2
|
-
var __defProp = Object.defineProperty;
|
|
3
|
-
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
4
|
-
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
5
|
-
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
|
6
|
-
var __export = (target, all) => {
|
|
7
|
-
for (var name in all)
|
|
8
|
-
__defProp(target, name, { get: all[name], enumerable: true });
|
|
9
|
-
};
|
|
10
|
-
var __copyProps = (to, from, except, desc) => {
|
|
11
|
-
if (from && typeof from === "object" || typeof from === "function") {
|
|
12
|
-
for (let key of __getOwnPropNames(from))
|
|
13
|
-
if (!__hasOwnProp.call(to, key) && key !== except)
|
|
14
|
-
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
|
15
|
-
}
|
|
16
|
-
return to;
|
|
17
|
-
};
|
|
18
|
-
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
|
19
|
-
|
|
20
|
-
// src/schema.ts
|
|
21
|
-
var schema_exports = {};
|
|
22
|
-
__export(schema_exports, {
|
|
23
|
-
columns: () => columns
|
|
24
|
-
});
|
|
25
|
-
module.exports = __toCommonJS(schema_exports);
|
|
26
|
-
var import_cuid2 = require("@paralleldrive/cuid2");
|
|
27
|
-
var import_sqlite_core = require("drizzle-orm/sqlite-core");
|
|
28
|
-
var timeColumns = {
|
|
29
|
-
createdAt: (0, import_sqlite_core.int)("created_at", { mode: "timestamp_ms" }).notNull().$default(() => /* @__PURE__ */ new Date()),
|
|
30
|
-
updatedAt: (0, import_sqlite_core.int)("updated_at", { mode: "timestamp_ms" }).notNull().$default(() => /* @__PURE__ */ new Date()).$onUpdate(() => /* @__PURE__ */ new Date())
|
|
31
|
-
};
|
|
32
|
-
var commonColumns = {
|
|
33
|
-
id: (0, import_sqlite_core.text)("id").primaryKey().$defaultFn(() => (0, import_cuid2.createId)()),
|
|
34
|
-
...timeColumns
|
|
35
|
-
};
|
|
36
|
-
var columns = {
|
|
37
|
-
time: timeColumns,
|
|
38
|
-
common: commonColumns
|
|
39
|
-
};
|
|
40
|
-
// Annotate the CommonJS export names for ESM import in node:
|
|
41
|
-
0 && (module.exports = {
|
|
42
|
-
columns
|
|
43
|
-
});
|
|
44
|
-
//# sourceMappingURL=schema.cjs.map
|
package/dist/schema.cjs.map
DELETED
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"sources":["../src/schema.ts"],"sourcesContent":["import { createId } from \"@paralleldrive/cuid2\";\r\nimport { int, text } from \"drizzle-orm/sqlite-core\";\r\n\r\nconst timeColumns = {\r\n createdAt: int(\"created_at\", { mode: \"timestamp_ms\" })\r\n .notNull()\r\n .$default(() => new Date()),\r\n updatedAt: int(\"updated_at\", { mode: \"timestamp_ms\" })\r\n .notNull()\r\n .$default(() => new Date())\r\n .$onUpdate(() => new Date()),\r\n};\r\n\r\nconst commonColumns = {\r\n id: text(\"id\")\r\n .primaryKey()\r\n .$defaultFn(() => createId()),\r\n ...timeColumns,\r\n};\r\n\r\nexport const columns = {\r\n time: timeColumns,\r\n common: commonColumns,\r\n};\r\n"],"mappings":";;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,mBAAyB;AACzB,yBAA0B;AAE1B,IAAM,cAAc;AAAA,EAClB,eAAW,wBAAI,cAAc,EAAE,MAAM,eAAe,CAAC,EAClD,QAAQ,EACR,SAAS,MAAM,oBAAI,KAAK,CAAC;AAAA,EAC5B,eAAW,wBAAI,cAAc,EAAE,MAAM,eAAe,CAAC,EAClD,QAAQ,EACR,SAAS,MAAM,oBAAI,KAAK,CAAC,EACzB,UAAU,MAAM,oBAAI,KAAK,CAAC;AAC/B;AAEA,IAAM,gBAAgB;AAAA,EACpB,QAAI,yBAAK,IAAI,EACV,WAAW,EACX,WAAW,UAAM,uBAAS,CAAC;AAAA,EAC9B,GAAG;AACL;AAEO,IAAM,UAAU;AAAA,EACrB,MAAM;AAAA,EACN,QAAQ;AACV;","names":[]}
|
package/dist/schema.d.cts
DELETED
|
@@ -1,16 +0,0 @@
|
|
|
1
|
-
import * as drizzle_orm from 'drizzle-orm';
|
|
2
|
-
import * as drizzle_orm_sqlite_core from 'drizzle-orm/sqlite-core';
|
|
3
|
-
|
|
4
|
-
declare const columns: {
|
|
5
|
-
time: {
|
|
6
|
-
createdAt: drizzle_orm.HasDefault<drizzle_orm.NotNull<drizzle_orm_sqlite_core.SQLiteTimestampBuilderInitial<"created_at">>>;
|
|
7
|
-
updatedAt: drizzle_orm.HasDefault<drizzle_orm.HasDefault<drizzle_orm.NotNull<drizzle_orm_sqlite_core.SQLiteTimestampBuilderInitial<"updated_at">>>>;
|
|
8
|
-
};
|
|
9
|
-
common: {
|
|
10
|
-
createdAt: drizzle_orm.HasDefault<drizzle_orm.NotNull<drizzle_orm_sqlite_core.SQLiteTimestampBuilderInitial<"created_at">>>;
|
|
11
|
-
updatedAt: drizzle_orm.HasDefault<drizzle_orm.HasDefault<drizzle_orm.NotNull<drizzle_orm_sqlite_core.SQLiteTimestampBuilderInitial<"updated_at">>>>;
|
|
12
|
-
id: drizzle_orm.HasDefault<drizzle_orm.NotNull<drizzle_orm_sqlite_core.SQLiteTextBuilderInitial<"id", [string, ...string[]]>>>;
|
|
13
|
-
};
|
|
14
|
-
};
|
|
15
|
-
|
|
16
|
-
export { columns };
|
package/dist/schema.d.ts
DELETED
|
@@ -1,16 +0,0 @@
|
|
|
1
|
-
import * as drizzle_orm from 'drizzle-orm';
|
|
2
|
-
import * as drizzle_orm_sqlite_core from 'drizzle-orm/sqlite-core';
|
|
3
|
-
|
|
4
|
-
declare const columns: {
|
|
5
|
-
time: {
|
|
6
|
-
createdAt: drizzle_orm.HasDefault<drizzle_orm.NotNull<drizzle_orm_sqlite_core.SQLiteTimestampBuilderInitial<"created_at">>>;
|
|
7
|
-
updatedAt: drizzle_orm.HasDefault<drizzle_orm.HasDefault<drizzle_orm.NotNull<drizzle_orm_sqlite_core.SQLiteTimestampBuilderInitial<"updated_at">>>>;
|
|
8
|
-
};
|
|
9
|
-
common: {
|
|
10
|
-
createdAt: drizzle_orm.HasDefault<drizzle_orm.NotNull<drizzle_orm_sqlite_core.SQLiteTimestampBuilderInitial<"created_at">>>;
|
|
11
|
-
updatedAt: drizzle_orm.HasDefault<drizzle_orm.HasDefault<drizzle_orm.NotNull<drizzle_orm_sqlite_core.SQLiteTimestampBuilderInitial<"updated_at">>>>;
|
|
12
|
-
id: drizzle_orm.HasDefault<drizzle_orm.NotNull<drizzle_orm_sqlite_core.SQLiteTextBuilderInitial<"id", [string, ...string[]]>>>;
|
|
13
|
-
};
|
|
14
|
-
};
|
|
15
|
-
|
|
16
|
-
export { columns };
|
package/dist/schema.js
DELETED
|
@@ -1,19 +0,0 @@
|
|
|
1
|
-
// src/schema.ts
|
|
2
|
-
import { createId } from "@paralleldrive/cuid2";
|
|
3
|
-
import { int, text } from "drizzle-orm/sqlite-core";
|
|
4
|
-
var timeColumns = {
|
|
5
|
-
createdAt: int("created_at", { mode: "timestamp_ms" }).notNull().$default(() => /* @__PURE__ */ new Date()),
|
|
6
|
-
updatedAt: int("updated_at", { mode: "timestamp_ms" }).notNull().$default(() => /* @__PURE__ */ new Date()).$onUpdate(() => /* @__PURE__ */ new Date())
|
|
7
|
-
};
|
|
8
|
-
var commonColumns = {
|
|
9
|
-
id: text("id").primaryKey().$defaultFn(() => createId()),
|
|
10
|
-
...timeColumns
|
|
11
|
-
};
|
|
12
|
-
var columns = {
|
|
13
|
-
time: timeColumns,
|
|
14
|
-
common: commonColumns
|
|
15
|
-
};
|
|
16
|
-
export {
|
|
17
|
-
columns
|
|
18
|
-
};
|
|
19
|
-
//# sourceMappingURL=schema.js.map
|
package/dist/schema.js.map
DELETED
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"sources":["../src/schema.ts"],"sourcesContent":["import { createId } from \"@paralleldrive/cuid2\";\r\nimport { int, text } from \"drizzle-orm/sqlite-core\";\r\n\r\nconst timeColumns = {\r\n createdAt: int(\"created_at\", { mode: \"timestamp_ms\" })\r\n .notNull()\r\n .$default(() => new Date()),\r\n updatedAt: int(\"updated_at\", { mode: \"timestamp_ms\" })\r\n .notNull()\r\n .$default(() => new Date())\r\n .$onUpdate(() => new Date()),\r\n};\r\n\r\nconst commonColumns = {\r\n id: text(\"id\")\r\n .primaryKey()\r\n .$defaultFn(() => createId()),\r\n ...timeColumns,\r\n};\r\n\r\nexport const columns = {\r\n time: timeColumns,\r\n common: commonColumns,\r\n};\r\n"],"mappings":";AAAA,SAAS,gBAAgB;AACzB,SAAS,KAAK,YAAY;AAE1B,IAAM,cAAc;AAAA,EAClB,WAAW,IAAI,cAAc,EAAE,MAAM,eAAe,CAAC,EAClD,QAAQ,EACR,SAAS,MAAM,oBAAI,KAAK,CAAC;AAAA,EAC5B,WAAW,IAAI,cAAc,EAAE,MAAM,eAAe,CAAC,EAClD,QAAQ,EACR,SAAS,MAAM,oBAAI,KAAK,CAAC,EACzB,UAAU,MAAM,oBAAI,KAAK,CAAC;AAC/B;AAEA,IAAM,gBAAgB;AAAA,EACpB,IAAI,KAAK,IAAI,EACV,WAAW,EACX,WAAW,MAAM,SAAS,CAAC;AAAA,EAC9B,GAAG;AACL;AAEO,IAAM,UAAU;AAAA,EACrB,MAAM;AAAA,EACN,QAAQ;AACV;","names":[]}
|