@conorroberts/utils 0.0.10 → 0.0.11
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/cache.js.map +1 -1
- package/dist/db.js.map +1 -1
- package/dist/env.js.map +1 -1
- package/dist/images.js.map +1 -1
- package/dist/logger.js.map +1 -1
- package/dist/migrate.js.map +1 -1
- package/dist/schema.d.ts +2 -2
- package/dist/schema.js +2 -2
- package/dist/schema.js.map +1 -1
- package/package.json +1 -1
package/dist/cache.js.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../src/cache.ts"],"sourcesContent":["import {\n createStorage as createUnstorage,\n type Storage,\n type StorageValue,\n} from \"unstorage\";\nimport redisDriver, { type RedisOptions } from \"unstorage/drivers/redis\";\n\nexport class Cache {\n private _cache: Storage<StorageValue>;\n\n constructor(args: RedisOptions) {\n this._cache = createUnstorage({\n driver: redisDriver(args),\n });\n }\n\n get cache() {\n return this._cache;\n }\n\n public ttl(date: Date) {\n return Math.floor((date.getTime() - Date.now()) / 1000);\n }\n}\n"],"mappings":";AAAA;AAAA,EACE,iBAAiB;AAAA,OAGZ;AACP,OAAO,iBAAwC;AAExC,IAAM,QAAN,MAAY;AAAA,EACT;AAAA,EAER,YAAY,MAAoB;AAC9B,SAAK,SAAS,gBAAgB;AAAA,MAC5B,QAAQ,YAAY,IAAI;AAAA,IAC1B,CAAC;AAAA,EACH;AAAA,EAEA,IAAI,QAAQ;AACV,WAAO,KAAK;AAAA,EACd;AAAA,EAEO,IAAI,MAAY;AACrB,WAAO,KAAK,OAAO,KAAK,QAAQ,IAAI,KAAK,IAAI,KAAK,GAAI;AAAA,EACxD;AACF;","names":[]}
|
|
1
|
+
{"version":3,"sources":["../src/cache.ts"],"sourcesContent":["import {\r\n createStorage as createUnstorage,\r\n type Storage,\r\n type StorageValue,\r\n} from \"unstorage\";\r\nimport redisDriver, { type RedisOptions } from \"unstorage/drivers/redis\";\r\n\r\nexport class Cache {\r\n private _cache: Storage<StorageValue>;\r\n\r\n constructor(args: RedisOptions) {\r\n this._cache = createUnstorage({\r\n driver: redisDriver(args),\r\n });\r\n }\r\n\r\n get cache() {\r\n return this._cache;\r\n }\r\n\r\n public ttl(date: Date) {\r\n return Math.floor((date.getTime() - Date.now()) / 1000);\r\n }\r\n}\r\n"],"mappings":";AAAA;AAAA,EACE,iBAAiB;AAAA,OAGZ;AACP,OAAO,iBAAwC;AAExC,IAAM,QAAN,MAAY;AAAA,EACT;AAAA,EAER,YAAY,MAAoB;AAC9B,SAAK,SAAS,gBAAgB;AAAA,MAC5B,QAAQ,YAAY,IAAI;AAAA,IAC1B,CAAC;AAAA,EACH;AAAA,EAEA,IAAI,QAAQ;AACV,WAAO,KAAK;AAAA,EACd;AAAA,EAEO,IAAI,MAAY;AACrB,WAAO,KAAK,OAAO,KAAK,QAAQ,IAAI,KAAK,IAAI,KAAK,GAAI;AAAA,EACxD;AACF;","names":[]}
|
package/dist/db.js.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../src/db.ts"],"sourcesContent":["import { createClient } from \"@libsql/client\";\nimport { LibSQLDatabase, drizzle } from \"drizzle-orm/libsql\";\n\nexport const createLibsqlClient = (args: {\n url: string;\n authToken?: string;\n}) => {\n return createClient(args);\n};\n\nexport const createDbClient = <TSchema extends Record<string, unknown>>(\n schema: TSchema,\n args: { url: string; authToken?: string }\n) => {\n const client = createLibsqlClient(args);\n const db = drizzle(client, {\n schema,\n logger: false,\n });\n\n return db;\n};\n\nexport type DatabaseClient<TSchema extends Record<string, unknown>> =\n LibSQLDatabase<TSchema>;\nexport type DatabaseClientTransactionContext<\n TSchema extends Record<string, unknown>\n> = Parameters<Parameters<DatabaseClient<TSchema>[\"transaction\"]>[0]>[0];\n"],"mappings":";AAAA,SAAS,oBAAoB;AAC7B,SAAyB,eAAe;AAEjC,IAAM,qBAAqB,CAAC,SAG7B;AACJ,SAAO,aAAa,IAAI;AAC1B;AAEO,IAAM,iBAAiB,CAC5B,QACA,SACG;AACH,QAAM,SAAS,mBAAmB,IAAI;AACtC,QAAM,KAAK,QAAQ,QAAQ;AAAA,IACzB;AAAA,IACA,QAAQ;AAAA,EACV,CAAC;AAED,SAAO;AACT;","names":[]}
|
|
1
|
+
{"version":3,"sources":["../src/db.ts"],"sourcesContent":["import { createClient } from \"@libsql/client\";\r\nimport { LibSQLDatabase, drizzle } from \"drizzle-orm/libsql\";\r\n\r\nexport const createLibsqlClient = (args: {\r\n url: string;\r\n authToken?: string;\r\n}) => {\r\n return createClient(args);\r\n};\r\n\r\nexport const createDbClient = <TSchema extends Record<string, unknown>>(\r\n schema: TSchema,\r\n args: { url: string; authToken?: string }\r\n) => {\r\n const client = createLibsqlClient(args);\r\n const db = drizzle(client, {\r\n schema,\r\n logger: false,\r\n });\r\n\r\n return db;\r\n};\r\n\r\nexport type DatabaseClient<TSchema extends Record<string, unknown>> =\r\n LibSQLDatabase<TSchema>;\r\nexport type DatabaseClientTransactionContext<\r\n TSchema extends Record<string, unknown>\r\n> = Parameters<Parameters<DatabaseClient<TSchema>[\"transaction\"]>[0]>[0];\r\n"],"mappings":";AAAA,SAAS,oBAAoB;AAC7B,SAAyB,eAAe;AAEjC,IAAM,qBAAqB,CAAC,SAG7B;AACJ,SAAO,aAAa,IAAI;AAC1B;AAEO,IAAM,iBAAiB,CAC5B,QACA,SACG;AACH,QAAM,SAAS,mBAAmB,IAAI;AACtC,QAAM,KAAK,QAAQ,QAAQ;AAAA,IACzB;AAAA,IACA,QAAQ;AAAA,EACV,CAAC;AAED,SAAO;AACT;","names":[]}
|
package/dist/env.js.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../src/env.ts"],"sourcesContent":["import { pipe } from \"remeda\";\nimport * as v from \"valibot\";\n\nconst PUBLIC_ENV_PREFIX = \"PUBLIC_\" as const;\n\n/**\n * Validates your environment variables against the given Valibot schema;\n * @param args\n * @returns An object containing client environment variables and another containing server environment variables\n */\nexport const createEnv = <\n Schema extends Record<string, v.GenericSchema>,\n Env = {\n [K in keyof Schema]: v.InferOutput<Schema[K]>;\n }\n>(args: {\n schema: Schema;\n env: any;\n}) => {\n const pairs = Object.entries(args.schema);\n const serverEnv = new Map();\n\n for (const [key, value] of pairs) {\n const result = v.safeParse(value, args.env[key] ?? null);\n\n if (!result.success) {\n console.error(`Environment variable \"${key}\" is invalid`);\n process.exit(1);\n }\n\n serverEnv.set(key, result.output);\n }\n\n type ClientEnvKeys = Exclude<\n {\n [K in keyof Env]: K extends `${typeof PUBLIC_ENV_PREFIX}${string}`\n ? K\n : never;\n }[keyof Env],\n undefined\n >;\n\n type ClientEnv = {\n [B in ClientEnvKeys]: Env[B];\n };\n\n const clientEnv = pipe(\n serverEnv,\n (obj) => Array.from(obj.entries()),\n (pairs) => pairs.filter(([k]) => k.startsWith(PUBLIC_ENV_PREFIX)),\n (pairs) => Object.fromEntries(pairs)\n ) as ClientEnv;\n\n return {\n client: clientEnv,\n server: Object.fromEntries(serverEnv.entries()) as Env,\n };\n};\n"],"mappings":";AAAA,SAAS,YAAY;AACrB,YAAY,OAAO;AAEnB,IAAM,oBAAoB;AAOnB,IAAM,YAAY,CAKvB,SAGI;AACJ,QAAM,QAAQ,OAAO,QAAQ,KAAK,MAAM;AACxC,QAAM,YAAY,oBAAI,IAAI;AAE1B,aAAW,CAAC,KAAK,KAAK,KAAK,OAAO;AAChC,UAAM,SAAW,YAAU,OAAO,KAAK,IAAI,GAAG,KAAK,IAAI;AAEvD,QAAI,CAAC,OAAO,SAAS;AACnB,cAAQ,MAAM,yBAAyB,GAAG,cAAc;AACxD,cAAQ,KAAK,CAAC;AAAA,IAChB;AAEA,cAAU,IAAI,KAAK,OAAO,MAAM;AAAA,EAClC;AAeA,QAAM,YAAY;AAAA,IAChB;AAAA,IACA,CAAC,QAAQ,MAAM,KAAK,IAAI,QAAQ,CAAC;AAAA,IACjC,CAACA,WAAUA,OAAM,OAAO,CAAC,CAAC,CAAC,MAAM,EAAE,WAAW,iBAAiB,CAAC;AAAA,IAChE,CAACA,WAAU,OAAO,YAAYA,MAAK;AAAA,EACrC;AAEA,SAAO;AAAA,IACL,QAAQ;AAAA,IACR,QAAQ,OAAO,YAAY,UAAU,QAAQ,CAAC;AAAA,EAChD;AACF;","names":["pairs"]}
|
|
1
|
+
{"version":3,"sources":["../src/env.ts"],"sourcesContent":["import { pipe } from \"remeda\";\r\nimport * as v from \"valibot\";\r\n\r\nconst PUBLIC_ENV_PREFIX = \"PUBLIC_\" as const;\r\n\r\n/**\r\n * Validates your environment variables against the given Valibot schema;\r\n * @param args\r\n * @returns An object containing client environment variables and another containing server environment variables\r\n */\r\nexport const createEnv = <\r\n Schema extends Record<string, v.GenericSchema>,\r\n Env = {\r\n [K in keyof Schema]: v.InferOutput<Schema[K]>;\r\n }\r\n>(args: {\r\n schema: Schema;\r\n env: any;\r\n}) => {\r\n const pairs = Object.entries(args.schema);\r\n const serverEnv = new Map();\r\n\r\n for (const [key, value] of pairs) {\r\n const result = v.safeParse(value, args.env[key] ?? null);\r\n\r\n if (!result.success) {\r\n console.error(`Environment variable \"${key}\" is invalid`);\r\n process.exit(1);\r\n }\r\n\r\n serverEnv.set(key, result.output);\r\n }\r\n\r\n type ClientEnvKeys = Exclude<\r\n {\r\n [K in keyof Env]: K extends `${typeof PUBLIC_ENV_PREFIX}${string}`\r\n ? K\r\n : never;\r\n }[keyof Env],\r\n undefined\r\n >;\r\n\r\n type ClientEnv = {\r\n [B in ClientEnvKeys]: Env[B];\r\n };\r\n\r\n const clientEnv = pipe(\r\n serverEnv,\r\n (obj) => Array.from(obj.entries()),\r\n (pairs) => pairs.filter(([k]) => k.startsWith(PUBLIC_ENV_PREFIX)),\r\n (pairs) => Object.fromEntries(pairs)\r\n ) as ClientEnv;\r\n\r\n return {\r\n client: clientEnv,\r\n server: Object.fromEntries(serverEnv.entries()) as Env,\r\n };\r\n};\r\n"],"mappings":";AAAA,SAAS,YAAY;AACrB,YAAY,OAAO;AAEnB,IAAM,oBAAoB;AAOnB,IAAM,YAAY,CAKvB,SAGI;AACJ,QAAM,QAAQ,OAAO,QAAQ,KAAK,MAAM;AACxC,QAAM,YAAY,oBAAI,IAAI;AAE1B,aAAW,CAAC,KAAK,KAAK,KAAK,OAAO;AAChC,UAAM,SAAW,YAAU,OAAO,KAAK,IAAI,GAAG,KAAK,IAAI;AAEvD,QAAI,CAAC,OAAO,SAAS;AACnB,cAAQ,MAAM,yBAAyB,GAAG,cAAc;AACxD,cAAQ,KAAK,CAAC;AAAA,IAChB;AAEA,cAAU,IAAI,KAAK,OAAO,MAAM;AAAA,EAClC;AAeA,QAAM,YAAY;AAAA,IAChB;AAAA,IACA,CAAC,QAAQ,MAAM,KAAK,IAAI,QAAQ,CAAC;AAAA,IACjC,CAACA,WAAUA,OAAM,OAAO,CAAC,CAAC,CAAC,MAAM,EAAE,WAAW,iBAAiB,CAAC;AAAA,IAChE,CAACA,WAAU,OAAO,YAAYA,MAAK;AAAA,EACrC;AAEA,SAAO;AAAA,IACL,QAAQ;AAAA,IACR,QAAQ,OAAO,YAAY,UAAU,QAAQ,CAAC;AAAA,EAChD;AACF;","names":["pairs"]}
|
package/dist/images.js.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../src/images.ts"],"sourcesContent":["import { createId } from \"@paralleldrive/cuid2\";\nimport dayjs from \"dayjs\";\nimport { ofetch } from \"ofetch\";\n\nexport interface OptimizedImageOptions {\n anim?: boolean;\n background?: string;\n blur?: number;\n brightness?: number;\n compression?: \"fast\"; // faster compression = larger file size\n contrast?: number;\n dpr?: number;\n fit?: \"scale-down\" | \"contain\" | \"cover\" | \"crop\" | \"pad\";\n format?: \"webp\" | \"avif\" | \"json\";\n gamma?: number;\n width?: number;\n height?: number;\n metadata?: \"keep\" | \"copyright\" | \"none\";\n quality?: number;\n rotate?: number;\n sharpen?: number;\n}\n\nexport interface CreateImageUrlResponse {\n result: {\n id: string;\n uploadURL: string;\n };\n success: boolean;\n errors: unknown[];\n messages: unknown[];\n}\n\ninterface UploadImageResponse {\n result: {\n id: string;\n filename: string;\n uploaded: string;\n requireSignedURLs: boolean;\n variants: string[];\n };\n success: boolean;\n errors: unknown[];\n messages: unknown[];\n}\n\nexport class ImageUtils<ImageIds extends Record<string, any>> {\n private blacklist: string[] = [\"img.clerk.com\"];\n private account: string;\n private _imageIds: ImageIds | undefined;\n\n constructor(args: {\n accountId: string;\n blacklist?: string[];\n imageIds?: ImageIds;\n }) {\n this.account = args.accountId;\n\n this._imageIds = args.imageIds;\n\n if (args.blacklist) {\n this.blacklist.push(...args.blacklist);\n }\n }\n\n get imageIds() {\n if (!this._imageIds) {\n throw new Error(\"imageIds was not supplied in constructor\");\n }\n\n return this._imageIds;\n }\n\n public url(id: string) {\n return `https://imagedelivery.net/${this.account}/${id}/public`;\n }\n\n private isBlacklisted(url: string) {\n return this.blacklist.some((u) => url.includes(u));\n }\n\n private isProtected(id: string) {\n if (!this._imageIds) {\n return false;\n }\n\n return Object.values(this._imageIds).some((e) => e === id);\n }\n\n /**\n * Will only operate on images that have been uploaded via cloudflare images\n */\n public optimizeUrl(url: string, options: OptimizedImageOptions) {\n if (this.isBlacklisted(url)) {\n return url;\n }\n\n // Final format should look similar to: https://imagedelivery.net/<ACCOUNT_HASH>/<IMAGE_ID>/w=400,sharpen=3\n return url.replace(\"public\", this.createImageOptionsString(options));\n }\n\n public optimizeId(id: string, options: OptimizedImageOptions) {\n return this.optimizeUrl(this.url(id), options);\n }\n\n public createOptionsSearchParams(options: OptimizedImageOptions) {\n const params = new URLSearchParams();\n\n const pairs = Object.entries(options);\n\n for (const [key, val] of pairs) {\n if (val === undefined) {\n continue;\n }\n\n params.set(key, val.toString());\n }\n\n return params;\n }\n\n public createImageOptionsString(options: OptimizedImageOptions) {\n const params = this.createOptionsSearchParams(options);\n\n return Array.from(params.entries())\n .map(([key, val]) => `${key}=${val}`)\n .join(\",\");\n }\n\n public async createUploadUrls(count: number, args: { apiKey: string }) {\n if (count === 0) {\n return [];\n }\n\n const headers = new Headers();\n headers.set(\"Authorization\", `Bearer ${args.apiKey}`);\n\n const urls = await Promise.all(\n Array.from({ length: count }).map(async () => {\n try {\n const form = new FormData();\n const id = createId();\n form.append(\"id\", id);\n form.append(\"expiry\", dayjs().add(5, \"minute\").toISOString());\n\n const img = await ofetch<CreateImageUrlResponse>(\n `https://api.cloudflare.com/client/v4/accounts/${this.account}/images/v2/direct_upload`,\n { method: \"POST\", headers, body: form }\n );\n\n if (!img.success) {\n throw new Error(\"Error uploading image\");\n }\n\n return { url: img.result.uploadURL, id };\n } catch (e) {\n console.error(\"Error uploading image\");\n throw e;\n }\n })\n );\n\n return urls;\n }\n\n public async upload(url: string, body: FormData) {\n const fetchResponse = await ofetch<UploadImageResponse>(url, {\n method: \"POST\",\n body,\n });\n\n if (!fetchResponse.success) {\n throw new Error(\"Failed to upload image\");\n }\n\n const downloadUrl = fetchResponse.result.variants[0];\n\n if (!downloadUrl) {\n throw new Error(\"Could not find download URL\");\n }\n\n return downloadUrl;\n }\n\n public async delete(id: string, args: { apiKey: string }) {\n if (this.isProtected(id)) {\n return { success: true };\n }\n\n try {\n const headers = new Headers();\n headers.set(\"Authorization\", `Bearer ${args.apiKey}`);\n\n await ofetch(\n `https://api.cloudflare.com/client/v4/accounts/${this.account}/images/v1/${id}`,\n {\n method: \"POST\",\n headers,\n }\n );\n return { success: true };\n } catch (_e) {\n return { success: false };\n }\n }\n\n public async batchUpload(\n files: { file: File; url: { id: string; value: string } }[]\n ) {\n return await Promise.all(\n files.map(async (e) => {\n const formData = new FormData();\n formData.append(\"file\", e.file);\n\n const downloadUrl = await this.upload(e.url.value, formData);\n\n return {\n url: downloadUrl,\n id: e.url.id,\n };\n })\n );\n }\n}\n"],"mappings":";AAAA,SAAS,gBAAgB;AACzB,OAAO,WAAW;AAClB,SAAS,cAAc;AA4ChB,IAAM,aAAN,MAAuD;AAAA,EACpD,YAAsB,CAAC,eAAe;AAAA,EACtC;AAAA,EACA;AAAA,EAER,YAAY,MAIT;AACD,SAAK,UAAU,KAAK;AAEpB,SAAK,YAAY,KAAK;AAEtB,QAAI,KAAK,WAAW;AAClB,WAAK,UAAU,KAAK,GAAG,KAAK,SAAS;AAAA,IACvC;AAAA,EACF;AAAA,EAEA,IAAI,WAAW;AACb,QAAI,CAAC,KAAK,WAAW;AACnB,YAAM,IAAI,MAAM,0CAA0C;AAAA,IAC5D;AAEA,WAAO,KAAK;AAAA,EACd;AAAA,EAEO,IAAI,IAAY;AACrB,WAAO,6BAA6B,KAAK,OAAO,IAAI,EAAE;AAAA,EACxD;AAAA,EAEQ,cAAc,KAAa;AACjC,WAAO,KAAK,UAAU,KAAK,CAAC,MAAM,IAAI,SAAS,CAAC,CAAC;AAAA,EACnD;AAAA,EAEQ,YAAY,IAAY;AAC9B,QAAI,CAAC,KAAK,WAAW;AACnB,aAAO;AAAA,IACT;AAEA,WAAO,OAAO,OAAO,KAAK,SAAS,EAAE,KAAK,CAAC,MAAM,MAAM,EAAE;AAAA,EAC3D;AAAA;AAAA;AAAA;AAAA,EAKO,YAAY,KAAa,SAAgC;AAC9D,QAAI,KAAK,cAAc,GAAG,GAAG;AAC3B,aAAO;AAAA,IACT;AAGA,WAAO,IAAI,QAAQ,UAAU,KAAK,yBAAyB,OAAO,CAAC;AAAA,EACrE;AAAA,EAEO,WAAW,IAAY,SAAgC;AAC5D,WAAO,KAAK,YAAY,KAAK,IAAI,EAAE,GAAG,OAAO;AAAA,EAC/C;AAAA,EAEO,0BAA0B,SAAgC;AAC/D,UAAM,SAAS,IAAI,gBAAgB;AAEnC,UAAM,QAAQ,OAAO,QAAQ,OAAO;AAEpC,eAAW,CAAC,KAAK,GAAG,KAAK,OAAO;AAC9B,UAAI,QAAQ,QAAW;AACrB;AAAA,MACF;AAEA,aAAO,IAAI,KAAK,IAAI,SAAS,CAAC;AAAA,IAChC;AAEA,WAAO;AAAA,EACT;AAAA,EAEO,yBAAyB,SAAgC;AAC9D,UAAM,SAAS,KAAK,0BAA0B,OAAO;AAErD,WAAO,MAAM,KAAK,OAAO,QAAQ,CAAC,EAC/B,IAAI,CAAC,CAAC,KAAK,GAAG,MAAM,GAAG,GAAG,IAAI,GAAG,EAAE,EACnC,KAAK,GAAG;AAAA,EACb;AAAA,EAEA,MAAa,iBAAiB,OAAe,MAA0B;AACrE,QAAI,UAAU,GAAG;AACf,aAAO,CAAC;AAAA,IACV;AAEA,UAAM,UAAU,IAAI,QAAQ;AAC5B,YAAQ,IAAI,iBAAiB,UAAU,KAAK,MAAM,EAAE;AAEpD,UAAM,OAAO,MAAM,QAAQ;AAAA,MACzB,MAAM,KAAK,EAAE,QAAQ,MAAM,CAAC,EAAE,IAAI,YAAY;AAC5C,YAAI;AACF,gBAAM,OAAO,IAAI,SAAS;AAC1B,gBAAM,KAAK,SAAS;AACpB,eAAK,OAAO,MAAM,EAAE;AACpB,eAAK,OAAO,UAAU,MAAM,EAAE,IAAI,GAAG,QAAQ,EAAE,YAAY,CAAC;AAE5D,gBAAM,MAAM,MAAM;AAAA,YAChB,iDAAiD,KAAK,OAAO;AAAA,YAC7D,EAAE,QAAQ,QAAQ,SAAS,MAAM,KAAK;AAAA,UACxC;AAEA,cAAI,CAAC,IAAI,SAAS;AAChB,kBAAM,IAAI,MAAM,uBAAuB;AAAA,UACzC;AAEA,iBAAO,EAAE,KAAK,IAAI,OAAO,WAAW,GAAG;AAAA,QACzC,SAAS,GAAG;AACV,kBAAQ,MAAM,uBAAuB;AACrC,gBAAM;AAAA,QACR;AAAA,MACF,CAAC;AAAA,IACH;AAEA,WAAO;AAAA,EACT;AAAA,EAEA,MAAa,OAAO,KAAa,MAAgB;AAC/C,UAAM,gBAAgB,MAAM,OAA4B,KAAK;AAAA,MAC3D,QAAQ;AAAA,MACR;AAAA,IACF,CAAC;AAED,QAAI,CAAC,cAAc,SAAS;AAC1B,YAAM,IAAI,MAAM,wBAAwB;AAAA,IAC1C;AAEA,UAAM,cAAc,cAAc,OAAO,SAAS,CAAC;AAEnD,QAAI,CAAC,aAAa;AAChB,YAAM,IAAI,MAAM,6BAA6B;AAAA,IAC/C;AAEA,WAAO;AAAA,EACT;AAAA,EAEA,MAAa,OAAO,IAAY,MAA0B;AACxD,QAAI,KAAK,YAAY,EAAE,GAAG;AACxB,aAAO,EAAE,SAAS,KAAK;AAAA,IACzB;AAEA,QAAI;AACF,YAAM,UAAU,IAAI,QAAQ;AAC5B,cAAQ,IAAI,iBAAiB,UAAU,KAAK,MAAM,EAAE;AAEpD,YAAM;AAAA,QACJ,iDAAiD,KAAK,OAAO,cAAc,EAAE;AAAA,QAC7E;AAAA,UACE,QAAQ;AAAA,UACR;AAAA,QACF;AAAA,MACF;AACA,aAAO,EAAE,SAAS,KAAK;AAAA,IACzB,SAAS,IAAI;AACX,aAAO,EAAE,SAAS,MAAM;AAAA,IAC1B;AAAA,EACF;AAAA,EAEA,MAAa,YACX,OACA;AACA,WAAO,MAAM,QAAQ;AAAA,MACnB,MAAM,IAAI,OAAO,MAAM;AACrB,cAAM,WAAW,IAAI,SAAS;AAC9B,iBAAS,OAAO,QAAQ,EAAE,IAAI;AAE9B,cAAM,cAAc,MAAM,KAAK,OAAO,EAAE,IAAI,OAAO,QAAQ;AAE3D,eAAO;AAAA,UACL,KAAK;AAAA,UACL,IAAI,EAAE,IAAI;AAAA,QACZ;AAAA,MACF,CAAC;AAAA,IACH;AAAA,EACF;AACF;","names":[]}
|
|
1
|
+
{"version":3,"sources":["../src/images.ts"],"sourcesContent":["import { createId } from \"@paralleldrive/cuid2\";\r\nimport dayjs from \"dayjs\";\r\nimport { ofetch } from \"ofetch\";\r\n\r\nexport interface OptimizedImageOptions {\r\n anim?: boolean;\r\n background?: string;\r\n blur?: number;\r\n brightness?: number;\r\n compression?: \"fast\"; // faster compression = larger file size\r\n contrast?: number;\r\n dpr?: number;\r\n fit?: \"scale-down\" | \"contain\" | \"cover\" | \"crop\" | \"pad\";\r\n format?: \"webp\" | \"avif\" | \"json\";\r\n gamma?: number;\r\n width?: number;\r\n height?: number;\r\n metadata?: \"keep\" | \"copyright\" | \"none\";\r\n quality?: number;\r\n rotate?: number;\r\n sharpen?: number;\r\n}\r\n\r\nexport interface CreateImageUrlResponse {\r\n result: {\r\n id: string;\r\n uploadURL: string;\r\n };\r\n success: boolean;\r\n errors: unknown[];\r\n messages: unknown[];\r\n}\r\n\r\ninterface UploadImageResponse {\r\n result: {\r\n id: string;\r\n filename: string;\r\n uploaded: string;\r\n requireSignedURLs: boolean;\r\n variants: string[];\r\n };\r\n success: boolean;\r\n errors: unknown[];\r\n messages: unknown[];\r\n}\r\n\r\nexport class ImageUtils<ImageIds extends Record<string, any>> {\r\n private blacklist: string[] = [\"img.clerk.com\"];\r\n private account: string;\r\n private _imageIds: ImageIds | undefined;\r\n\r\n constructor(args: {\r\n accountId: string;\r\n blacklist?: string[];\r\n imageIds?: ImageIds;\r\n }) {\r\n this.account = args.accountId;\r\n\r\n this._imageIds = args.imageIds;\r\n\r\n if (args.blacklist) {\r\n this.blacklist.push(...args.blacklist);\r\n }\r\n }\r\n\r\n get imageIds() {\r\n if (!this._imageIds) {\r\n throw new Error(\"imageIds was not supplied in constructor\");\r\n }\r\n\r\n return this._imageIds;\r\n }\r\n\r\n public url(id: string) {\r\n return `https://imagedelivery.net/${this.account}/${id}/public`;\r\n }\r\n\r\n private isBlacklisted(url: string) {\r\n return this.blacklist.some((u) => url.includes(u));\r\n }\r\n\r\n private isProtected(id: string) {\r\n if (!this._imageIds) {\r\n return false;\r\n }\r\n\r\n return Object.values(this._imageIds).some((e) => e === id);\r\n }\r\n\r\n /**\r\n * Will only operate on images that have been uploaded via cloudflare images\r\n */\r\n public optimizeUrl(url: string, options: OptimizedImageOptions) {\r\n if (this.isBlacklisted(url)) {\r\n return url;\r\n }\r\n\r\n // Final format should look similar to: https://imagedelivery.net/<ACCOUNT_HASH>/<IMAGE_ID>/w=400,sharpen=3\r\n return url.replace(\"public\", this.createImageOptionsString(options));\r\n }\r\n\r\n public optimizeId(id: string, options: OptimizedImageOptions) {\r\n return this.optimizeUrl(this.url(id), options);\r\n }\r\n\r\n public createOptionsSearchParams(options: OptimizedImageOptions) {\r\n const params = new URLSearchParams();\r\n\r\n const pairs = Object.entries(options);\r\n\r\n for (const [key, val] of pairs) {\r\n if (val === undefined) {\r\n continue;\r\n }\r\n\r\n params.set(key, val.toString());\r\n }\r\n\r\n return params;\r\n }\r\n\r\n public createImageOptionsString(options: OptimizedImageOptions) {\r\n const params = this.createOptionsSearchParams(options);\r\n\r\n return Array.from(params.entries())\r\n .map(([key, val]) => `${key}=${val}`)\r\n .join(\",\");\r\n }\r\n\r\n public async createUploadUrls(count: number, args: { apiKey: string }) {\r\n if (count === 0) {\r\n return [];\r\n }\r\n\r\n const headers = new Headers();\r\n headers.set(\"Authorization\", `Bearer ${args.apiKey}`);\r\n\r\n const urls = await Promise.all(\r\n Array.from({ length: count }).map(async () => {\r\n try {\r\n const form = new FormData();\r\n const id = createId();\r\n form.append(\"id\", id);\r\n form.append(\"expiry\", dayjs().add(5, \"minute\").toISOString());\r\n\r\n const img = await ofetch<CreateImageUrlResponse>(\r\n `https://api.cloudflare.com/client/v4/accounts/${this.account}/images/v2/direct_upload`,\r\n { method: \"POST\", headers, body: form }\r\n );\r\n\r\n if (!img.success) {\r\n throw new Error(\"Error uploading image\");\r\n }\r\n\r\n return { url: img.result.uploadURL, id };\r\n } catch (e) {\r\n console.error(\"Error uploading image\");\r\n throw e;\r\n }\r\n })\r\n );\r\n\r\n return urls;\r\n }\r\n\r\n public async upload(url: string, body: FormData) {\r\n const fetchResponse = await ofetch<UploadImageResponse>(url, {\r\n method: \"POST\",\r\n body,\r\n });\r\n\r\n if (!fetchResponse.success) {\r\n throw new Error(\"Failed to upload image\");\r\n }\r\n\r\n const downloadUrl = fetchResponse.result.variants[0];\r\n\r\n if (!downloadUrl) {\r\n throw new Error(\"Could not find download URL\");\r\n }\r\n\r\n return downloadUrl;\r\n }\r\n\r\n public async delete(id: string, args: { apiKey: string }) {\r\n if (this.isProtected(id)) {\r\n return { success: true };\r\n }\r\n\r\n try {\r\n const headers = new Headers();\r\n headers.set(\"Authorization\", `Bearer ${args.apiKey}`);\r\n\r\n await ofetch(\r\n `https://api.cloudflare.com/client/v4/accounts/${this.account}/images/v1/${id}`,\r\n {\r\n method: \"POST\",\r\n headers,\r\n }\r\n );\r\n return { success: true };\r\n } catch (_e) {\r\n return { success: false };\r\n }\r\n }\r\n\r\n public async batchUpload(\r\n files: { file: File; url: { id: string; value: string } }[]\r\n ) {\r\n return await Promise.all(\r\n files.map(async (e) => {\r\n const formData = new FormData();\r\n formData.append(\"file\", e.file);\r\n\r\n const downloadUrl = await this.upload(e.url.value, formData);\r\n\r\n return {\r\n url: downloadUrl,\r\n id: e.url.id,\r\n };\r\n })\r\n );\r\n }\r\n}\r\n"],"mappings":";AAAA,SAAS,gBAAgB;AACzB,OAAO,WAAW;AAClB,SAAS,cAAc;AA4ChB,IAAM,aAAN,MAAuD;AAAA,EACpD,YAAsB,CAAC,eAAe;AAAA,EACtC;AAAA,EACA;AAAA,EAER,YAAY,MAIT;AACD,SAAK,UAAU,KAAK;AAEpB,SAAK,YAAY,KAAK;AAEtB,QAAI,KAAK,WAAW;AAClB,WAAK,UAAU,KAAK,GAAG,KAAK,SAAS;AAAA,IACvC;AAAA,EACF;AAAA,EAEA,IAAI,WAAW;AACb,QAAI,CAAC,KAAK,WAAW;AACnB,YAAM,IAAI,MAAM,0CAA0C;AAAA,IAC5D;AAEA,WAAO,KAAK;AAAA,EACd;AAAA,EAEO,IAAI,IAAY;AACrB,WAAO,6BAA6B,KAAK,OAAO,IAAI,EAAE;AAAA,EACxD;AAAA,EAEQ,cAAc,KAAa;AACjC,WAAO,KAAK,UAAU,KAAK,CAAC,MAAM,IAAI,SAAS,CAAC,CAAC;AAAA,EACnD;AAAA,EAEQ,YAAY,IAAY;AAC9B,QAAI,CAAC,KAAK,WAAW;AACnB,aAAO;AAAA,IACT;AAEA,WAAO,OAAO,OAAO,KAAK,SAAS,EAAE,KAAK,CAAC,MAAM,MAAM,EAAE;AAAA,EAC3D;AAAA;AAAA;AAAA;AAAA,EAKO,YAAY,KAAa,SAAgC;AAC9D,QAAI,KAAK,cAAc,GAAG,GAAG;AAC3B,aAAO;AAAA,IACT;AAGA,WAAO,IAAI,QAAQ,UAAU,KAAK,yBAAyB,OAAO,CAAC;AAAA,EACrE;AAAA,EAEO,WAAW,IAAY,SAAgC;AAC5D,WAAO,KAAK,YAAY,KAAK,IAAI,EAAE,GAAG,OAAO;AAAA,EAC/C;AAAA,EAEO,0BAA0B,SAAgC;AAC/D,UAAM,SAAS,IAAI,gBAAgB;AAEnC,UAAM,QAAQ,OAAO,QAAQ,OAAO;AAEpC,eAAW,CAAC,KAAK,GAAG,KAAK,OAAO;AAC9B,UAAI,QAAQ,QAAW;AACrB;AAAA,MACF;AAEA,aAAO,IAAI,KAAK,IAAI,SAAS,CAAC;AAAA,IAChC;AAEA,WAAO;AAAA,EACT;AAAA,EAEO,yBAAyB,SAAgC;AAC9D,UAAM,SAAS,KAAK,0BAA0B,OAAO;AAErD,WAAO,MAAM,KAAK,OAAO,QAAQ,CAAC,EAC/B,IAAI,CAAC,CAAC,KAAK,GAAG,MAAM,GAAG,GAAG,IAAI,GAAG,EAAE,EACnC,KAAK,GAAG;AAAA,EACb;AAAA,EAEA,MAAa,iBAAiB,OAAe,MAA0B;AACrE,QAAI,UAAU,GAAG;AACf,aAAO,CAAC;AAAA,IACV;AAEA,UAAM,UAAU,IAAI,QAAQ;AAC5B,YAAQ,IAAI,iBAAiB,UAAU,KAAK,MAAM,EAAE;AAEpD,UAAM,OAAO,MAAM,QAAQ;AAAA,MACzB,MAAM,KAAK,EAAE,QAAQ,MAAM,CAAC,EAAE,IAAI,YAAY;AAC5C,YAAI;AACF,gBAAM,OAAO,IAAI,SAAS;AAC1B,gBAAM,KAAK,SAAS;AACpB,eAAK,OAAO,MAAM,EAAE;AACpB,eAAK,OAAO,UAAU,MAAM,EAAE,IAAI,GAAG,QAAQ,EAAE,YAAY,CAAC;AAE5D,gBAAM,MAAM,MAAM;AAAA,YAChB,iDAAiD,KAAK,OAAO;AAAA,YAC7D,EAAE,QAAQ,QAAQ,SAAS,MAAM,KAAK;AAAA,UACxC;AAEA,cAAI,CAAC,IAAI,SAAS;AAChB,kBAAM,IAAI,MAAM,uBAAuB;AAAA,UACzC;AAEA,iBAAO,EAAE,KAAK,IAAI,OAAO,WAAW,GAAG;AAAA,QACzC,SAAS,GAAG;AACV,kBAAQ,MAAM,uBAAuB;AACrC,gBAAM;AAAA,QACR;AAAA,MACF,CAAC;AAAA,IACH;AAEA,WAAO;AAAA,EACT;AAAA,EAEA,MAAa,OAAO,KAAa,MAAgB;AAC/C,UAAM,gBAAgB,MAAM,OAA4B,KAAK;AAAA,MAC3D,QAAQ;AAAA,MACR;AAAA,IACF,CAAC;AAED,QAAI,CAAC,cAAc,SAAS;AAC1B,YAAM,IAAI,MAAM,wBAAwB;AAAA,IAC1C;AAEA,UAAM,cAAc,cAAc,OAAO,SAAS,CAAC;AAEnD,QAAI,CAAC,aAAa;AAChB,YAAM,IAAI,MAAM,6BAA6B;AAAA,IAC/C;AAEA,WAAO;AAAA,EACT;AAAA,EAEA,MAAa,OAAO,IAAY,MAA0B;AACxD,QAAI,KAAK,YAAY,EAAE,GAAG;AACxB,aAAO,EAAE,SAAS,KAAK;AAAA,IACzB;AAEA,QAAI;AACF,YAAM,UAAU,IAAI,QAAQ;AAC5B,cAAQ,IAAI,iBAAiB,UAAU,KAAK,MAAM,EAAE;AAEpD,YAAM;AAAA,QACJ,iDAAiD,KAAK,OAAO,cAAc,EAAE;AAAA,QAC7E;AAAA,UACE,QAAQ;AAAA,UACR;AAAA,QACF;AAAA,MACF;AACA,aAAO,EAAE,SAAS,KAAK;AAAA,IACzB,SAAS,IAAI;AACX,aAAO,EAAE,SAAS,MAAM;AAAA,IAC1B;AAAA,EACF;AAAA,EAEA,MAAa,YACX,OACA;AACA,WAAO,MAAM,QAAQ;AAAA,MACnB,MAAM,IAAI,OAAO,MAAM;AACrB,cAAM,WAAW,IAAI,SAAS;AAC9B,iBAAS,OAAO,QAAQ,EAAE,IAAI;AAE9B,cAAM,cAAc,MAAM,KAAK,OAAO,EAAE,IAAI,OAAO,QAAQ;AAE3D,eAAO;AAAA,UACL,KAAK;AAAA,UACL,IAAI,EAAE,IAAI;AAAA,QACZ;AAAA,MACF,CAAC;AAAA,IACH;AAAA,EACF;AACF;","names":[]}
|
package/dist/logger.js.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../src/logger.ts"],"sourcesContent":["import { pino } from \"pino\";\n\nexport const createLogger = (args: {\n token?: string | undefined | null;\n pretty?: boolean;\n service: string;\n}) => {\n const l = pino(\n {\n level: \"info\",\n redact: [],\n transport: args.pretty\n ? {\n target: \"pino-pretty\",\n }\n : undefined,\n },\n\n args.token\n ? pino.transport({\n target: \"@logtail/pino\",\n options: { sourceToken: args.token },\n })\n : undefined,\n );\n\n l.child({ service: args.service });\n\n return l;\n};\n"],"mappings":";AAAA,SAAS,YAAY;AAEd,IAAM,eAAe,CAAC,SAIvB;AACJ,QAAM,IAAI;AAAA,IACR;AAAA,MACE,OAAO;AAAA,MACP,QAAQ,CAAC;AAAA,MACT,WAAW,KAAK,SACZ;AAAA,QACE,QAAQ;AAAA,MACV,IACA;AAAA,IACN;AAAA,IAEA,KAAK,QACD,KAAK,UAAU;AAAA,MACb,QAAQ;AAAA,MACR,SAAS,EAAE,aAAa,KAAK,MAAM;AAAA,IACrC,CAAC,IACD;AAAA,EACN;AAEA,IAAE,MAAM,EAAE,SAAS,KAAK,QAAQ,CAAC;AAEjC,SAAO;AACT;","names":[]}
|
|
1
|
+
{"version":3,"sources":["../src/logger.ts"],"sourcesContent":["import { pino } from \"pino\";\r\n\r\nexport const createLogger = (args: {\r\n token?: string | undefined | null;\r\n pretty?: boolean;\r\n service: string;\r\n}) => {\r\n const l = pino(\r\n {\r\n level: \"info\",\r\n redact: [],\r\n transport: args.pretty\r\n ? {\r\n target: \"pino-pretty\",\r\n }\r\n : undefined,\r\n },\r\n\r\n args.token\r\n ? pino.transport({\r\n target: \"@logtail/pino\",\r\n options: { sourceToken: args.token },\r\n })\r\n : undefined,\r\n );\r\n\r\n l.child({ service: args.service });\r\n\r\n return l;\r\n};\r\n"],"mappings":";AAAA,SAAS,YAAY;AAEd,IAAM,eAAe,CAAC,SAIvB;AACJ,QAAM,IAAI;AAAA,IACR;AAAA,MACE,OAAO;AAAA,MACP,QAAQ,CAAC;AAAA,MACT,WAAW,KAAK,SACZ;AAAA,QACE,QAAQ;AAAA,MACV,IACA;AAAA,IACN;AAAA,IAEA,KAAK,QACD,KAAK,UAAU;AAAA,MACb,QAAQ;AAAA,MACR,SAAS,EAAE,aAAa,KAAK,MAAM;AAAA,IACrC,CAAC,IACD;AAAA,EACN;AAEA,IAAE,MAAM,EAAE,SAAS,KAAK,QAAQ,CAAC;AAEjC,SAAO;AACT;","names":[]}
|
package/dist/migrate.js.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../src/migrate.ts"],"sourcesContent":["import { createClient } from \"@libsql/client\";\nimport { drizzle } from \"drizzle-orm/libsql\";\nimport { migrate as runDrizzleMigrate } from \"drizzle-orm/libsql/migrator\";\n\nexport const migrate = async <TSchema extends Record<string, any>>(\n schema: TSchema,\n args: {\n url: string;\n token?: string;\n migrationsFolder: string;\n }\n) => {\n let url = args.url;\n\n // Migrations are only supported via the libsql protocol\n if (url.startsWith(\"http\")) {\n url = url.replace(/http(s)?/, \"libsql\");\n }\n\n const db = drizzle(\n createClient(\n // Auth token must be either 1) present and not undefined or 2) not present\n args.token\n ? {\n url,\n authToken: args.token,\n }\n : { url }\n ),\n { schema }\n );\n\n console.info(\"Running migrations\");\n\n await runDrizzleMigrate(db, {\n migrationsFolder: args.migrationsFolder,\n });\n\n console.info(\"Migrations applied\");\n process.exit(0);\n};\n"],"mappings":";AAAA,SAAS,oBAAoB;AAC7B,SAAS,eAAe;AACxB,SAAS,WAAW,yBAAyB;AAEtC,IAAM,UAAU,OACrB,QACA,SAKG;AACH,MAAI,MAAM,KAAK;AAGf,MAAI,IAAI,WAAW,MAAM,GAAG;AAC1B,UAAM,IAAI,QAAQ,YAAY,QAAQ;AAAA,EACxC;AAEA,QAAM,KAAK;AAAA,IACT;AAAA;AAAA,MAEE,KAAK,QACD;AAAA,QACE;AAAA,QACA,WAAW,KAAK;AAAA,MAClB,IACA,EAAE,IAAI;AAAA,IACZ;AAAA,IACA,EAAE,OAAO;AAAA,EACX;AAEA,UAAQ,KAAK,oBAAoB;AAEjC,QAAM,kBAAkB,IAAI;AAAA,IAC1B,kBAAkB,KAAK;AAAA,EACzB,CAAC;AAED,UAAQ,KAAK,oBAAoB;AACjC,UAAQ,KAAK,CAAC;AAChB;","names":[]}
|
|
1
|
+
{"version":3,"sources":["../src/migrate.ts"],"sourcesContent":["import { createClient } from \"@libsql/client\";\r\nimport { drizzle } from \"drizzle-orm/libsql\";\r\nimport { migrate as runDrizzleMigrate } from \"drizzle-orm/libsql/migrator\";\r\n\r\nexport const migrate = async <TSchema extends Record<string, any>>(\r\n schema: TSchema,\r\n args: {\r\n url: string;\r\n token?: string;\r\n migrationsFolder: string;\r\n }\r\n) => {\r\n let url = args.url;\r\n\r\n // Migrations are only supported via the libsql protocol\r\n if (url.startsWith(\"http\")) {\r\n url = url.replace(/http(s)?/, \"libsql\");\r\n }\r\n\r\n const db = drizzle(\r\n createClient(\r\n // Auth token must be either 1) present and not undefined or 2) not present\r\n args.token\r\n ? {\r\n url,\r\n authToken: args.token,\r\n }\r\n : { url }\r\n ),\r\n { schema }\r\n );\r\n\r\n console.info(\"Running migrations\");\r\n\r\n await runDrizzleMigrate(db, {\r\n migrationsFolder: args.migrationsFolder,\r\n });\r\n\r\n console.info(\"Migrations applied\");\r\n process.exit(0);\r\n};\r\n"],"mappings":";AAAA,SAAS,oBAAoB;AAC7B,SAAS,eAAe;AACxB,SAAS,WAAW,yBAAyB;AAEtC,IAAM,UAAU,OACrB,QACA,SAKG;AACH,MAAI,MAAM,KAAK;AAGf,MAAI,IAAI,WAAW,MAAM,GAAG;AAC1B,UAAM,IAAI,QAAQ,YAAY,QAAQ;AAAA,EACxC;AAEA,QAAM,KAAK;AAAA,IACT;AAAA;AAAA,MAEE,KAAK,QACD;AAAA,QACE;AAAA,QACA,WAAW,KAAK;AAAA,MAClB,IACA,EAAE,IAAI;AAAA,IACZ;AAAA,IACA,EAAE,OAAO;AAAA,EACX;AAEA,UAAQ,KAAK,oBAAoB;AAEjC,QAAM,kBAAkB,IAAI;AAAA,IAC1B,kBAAkB,KAAK;AAAA,EACzB,CAAC;AAED,UAAQ,KAAK,oBAAoB;AACjC,UAAQ,KAAK,CAAC;AAChB;","names":[]}
|
package/dist/schema.d.ts
CHANGED
|
@@ -4,11 +4,11 @@ import * as drizzle_orm_sqlite_core from 'drizzle-orm/sqlite-core';
|
|
|
4
4
|
declare const columns: {
|
|
5
5
|
time: {
|
|
6
6
|
createdAt: drizzle_orm.HasDefault<drizzle_orm.NotNull<drizzle_orm_sqlite_core.SQLiteTimestampBuilderInitial<"created_at">>>;
|
|
7
|
-
updatedAt: drizzle_orm.HasDefault<drizzle_orm.NotNull<drizzle_orm_sqlite_core.SQLiteTimestampBuilderInitial<"updated_at"
|
|
7
|
+
updatedAt: drizzle_orm.HasDefault<drizzle_orm.HasDefault<drizzle_orm.NotNull<drizzle_orm_sqlite_core.SQLiteTimestampBuilderInitial<"updated_at">>>>;
|
|
8
8
|
};
|
|
9
9
|
common: {
|
|
10
10
|
createdAt: drizzle_orm.HasDefault<drizzle_orm.NotNull<drizzle_orm_sqlite_core.SQLiteTimestampBuilderInitial<"created_at">>>;
|
|
11
|
-
updatedAt: drizzle_orm.HasDefault<drizzle_orm.NotNull<drizzle_orm_sqlite_core.SQLiteTimestampBuilderInitial<"updated_at"
|
|
11
|
+
updatedAt: drizzle_orm.HasDefault<drizzle_orm.HasDefault<drizzle_orm.NotNull<drizzle_orm_sqlite_core.SQLiteTimestampBuilderInitial<"updated_at">>>>;
|
|
12
12
|
id: drizzle_orm.HasDefault<drizzle_orm.NotNull<drizzle_orm_sqlite_core.SQLiteTextBuilderInitial<"id", [string, ...string[]]>>>;
|
|
13
13
|
};
|
|
14
14
|
};
|
package/dist/schema.js
CHANGED
|
@@ -2,8 +2,8 @@
|
|
|
2
2
|
import { createId } from "@paralleldrive/cuid2";
|
|
3
3
|
import { int, text } from "drizzle-orm/sqlite-core";
|
|
4
4
|
var timeColumns = {
|
|
5
|
-
createdAt: int("created_at", { mode: "timestamp_ms" }).notNull().$
|
|
6
|
-
updatedAt: int("updated_at", { mode: "timestamp_ms" }).notNull().$
|
|
5
|
+
createdAt: int("created_at", { mode: "timestamp_ms" }).notNull().$default(() => /* @__PURE__ */ new Date()),
|
|
6
|
+
updatedAt: int("updated_at", { mode: "timestamp_ms" }).notNull().$default(() => /* @__PURE__ */ new Date()).$onUpdate(() => /* @__PURE__ */ new Date())
|
|
7
7
|
};
|
|
8
8
|
var commonColumns = {
|
|
9
9
|
id: text("id").primaryKey().$defaultFn(() => createId()),
|
package/dist/schema.js.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../src/schema.ts"],"sourcesContent":["import { createId } from \"@paralleldrive/cuid2\";\nimport { int, text } from \"drizzle-orm/sqlite-core\";\n\nconst timeColumns = {\n createdAt: int(\"created_at\", { mode: \"timestamp_ms\" })\n .notNull()\n .$
|
|
1
|
+
{"version":3,"sources":["../src/schema.ts"],"sourcesContent":["import { createId } from \"@paralleldrive/cuid2\";\r\nimport { int, text } from \"drizzle-orm/sqlite-core\";\r\n\r\nconst timeColumns = {\r\n createdAt: int(\"created_at\", { mode: \"timestamp_ms\" })\r\n .notNull()\r\n .$default(() => new Date()),\r\n updatedAt: int(\"updated_at\", { mode: \"timestamp_ms\" })\r\n .notNull()\r\n .$default(() => new Date())\r\n .$onUpdate(() => new Date()),\r\n};\r\n\r\nconst commonColumns = {\r\n id: text(\"id\")\r\n .primaryKey()\r\n .$defaultFn(() => createId()),\r\n ...timeColumns,\r\n};\r\n\r\nexport const columns = {\r\n time: timeColumns,\r\n common: commonColumns,\r\n};\r\n"],"mappings":";AAAA,SAAS,gBAAgB;AACzB,SAAS,KAAK,YAAY;AAE1B,IAAM,cAAc;AAAA,EAClB,WAAW,IAAI,cAAc,EAAE,MAAM,eAAe,CAAC,EAClD,QAAQ,EACR,SAAS,MAAM,oBAAI,KAAK,CAAC;AAAA,EAC5B,WAAW,IAAI,cAAc,EAAE,MAAM,eAAe,CAAC,EAClD,QAAQ,EACR,SAAS,MAAM,oBAAI,KAAK,CAAC,EACzB,UAAU,MAAM,oBAAI,KAAK,CAAC;AAC/B;AAEA,IAAM,gBAAgB;AAAA,EACpB,IAAI,KAAK,IAAI,EACV,WAAW,EACX,WAAW,MAAM,SAAS,CAAC;AAAA,EAC9B,GAAG;AACL;AAEO,IAAM,UAAU;AAAA,EACrB,MAAM;AAAA,EACN,QAAQ;AACV;","names":[]}
|