appos 0.2.3-0 → 0.2.4-0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/bin/main.mjs +1 -1
- package/dist/exports/api/index.d.mts +1 -2
- package/dist/exports/api/index.mjs +1 -1
- package/dist/exports/api/storage.d.mts +1 -1
- package/dist/exports/api/storage.mjs +1 -1
- package/dist/exports/api/workflows/extract-blob-metadata.mjs +1 -1
- package/dist/exports/api/workflows/generate-image-variant.d.mts +3 -3
- package/dist/exports/api/workflows/generate-image-variant.mjs +1 -1
- package/dist/exports/api/workflows/generate-preview.mjs +1 -1
- package/dist/exports/api/workflows/purge-attachment.mjs +1 -1
- package/dist/exports/api/workflows/purge-audit-logs.mjs +1 -1
- package/dist/exports/api/workflows/purge-unattached-blobs.mjs +1 -1
- package/dist/exports/api/workflows/storage.d.mts +1 -1
- package/dist/exports/api/workflows/track-db-changes.mjs +1 -1
- package/dist/exports/cli/api/storage.d.mts +1 -1
- package/dist/exports/cli/api/workflows/generate-image-variant.d.mts +1 -1
- package/dist/exports/tests/packages/appos/src/api/index.d.mts +1 -2
- package/dist/exports/tests/packages/appos/src/api/storage.d.mts +5 -5
- package/dist/exports/tests/packages/appos/src/api/workflows/generate-image-variant.d.mts +2 -38
- package/package.json +1 -1
package/dist/bin/main.mjs
CHANGED
|
@@ -335,7 +335,7 @@ export default async function seed(container: AppOS.Container) {}
|
|
|
335
335
|
`;async function exists(filePath){try{return await fs$1.access(filePath),!0}catch{return!1}}async function findLatestSnapshot(migrationsFolder){if(!await exists(migrationsFolder))return null;let migrationDirs=(await fs$1.readdir(migrationsFolder,{withFileTypes:!0})).filter(e=>e.isDirectory()&&/^\d{14}_/.test(e.name)).map(e=>e.name).sort().reverse();if(migrationDirs.length===0)return null;let snapshotPath=path.join(migrationsFolder,migrationDirs[0],`snapshot.json`);if(!await exists(snapshotPath))return null;let content=await fs$1.readFile(snapshotPath,`utf-8`);return JSON.parse(content)}async function ensureDatabaseScaffolding(dbName){let dbFolder=path.join(process.cwd(),APPOS_DIR,DATABASES_DIR,dbName);await exists(dbFolder)||await fs$1.mkdir(dbFolder,{recursive:!0});let schemaPath=path.join(dbFolder,`schema.ts`);await exists(schemaPath)||await fs$1.writeFile(schemaPath,SCHEMA_TEMPLATE(dbName));let relationsPath=path.join(dbFolder,`relations.ts`);await exists(relationsPath)||await fs$1.writeFile(relationsPath,RELATIONS_TEMPLATE(dbName));let seedPath=path.join(dbFolder,`seed.ts`);await exists(seedPath)||await fs$1.writeFile(seedPath,SEED_TEMPLATE(dbName));let schemaMigrationsFolder=path.join(dbFolder,`schema-migrations`);await exists(schemaMigrationsFolder)||await fs$1.mkdir(schemaMigrationsFolder,{recursive:!0});let dataMigrationsFolder=path.join(dbFolder,`data-migrations`);await exists(dataMigrationsFolder)||await fs$1.mkdir(dataMigrationsFolder,{recursive:!0})}async function generateDataMigrations(opts){let{ctx}=opts,{container}=ctx,databaseNames=Object.keys(container.db);if(ctx.info(`Generating data migration`),databaseNames.length===0){ctx.info(`No databases available`);return}let selectedDb=await ctx.choice({message:`Select database`,options:databaseNames.map(name=>({value:name,label:name}))});ctx.isCancel(selectedDb)&&ctx.cancel();let dbName=selectedDb;await ensureDatabaseScaffolding(dbName);let{migrationsFolder}=defineMigrationOpts(dbName,`data`),migrationFile=await ctx.task(dbName,async()=>{let migrationTag=`${new Date().toISOString().replace(/[-:T]/g,``).slice(0,14)}_${generateMigrationName()}`,migrationFolder=path.join(migrationsFolder,migrationTag);await fs$1.mkdir(migrationFolder,{recursive:!0});let file=path.join(migrationFolder,`migration.sql`);return await fs$1.writeFile(file,`-- Write your data migration SQL here
|
|
336
336
|
`),file});ctx.success(migrationFile)}async function generateMigrations(opts){let{ctx,migrationType}=opts,{container}=ctx;if(migrationType===`data`){await generateDataMigrations(opts);return}let databaseNames=Object.keys(container.db);if(ctx.info(`Generating ${migrationType} migrations`),databaseNames.length===0){ctx.info(`Nothing to generate`);return}for(let name of databaseNames)await ensureDatabaseScaffolding(name);let createdAny=!1;for(let name of databaseNames){let schemaPath=path.join(process.cwd(),APPOS_DIR,DATABASES_DIR,name,`schema.ts`),{migrationsFolder}=defineMigrationOpts(name,migrationType);try{let schemaModule=await import(schemaPath),schema=schemaModule.default||schemaModule,prevJson=await findLatestSnapshot(migrationsFolder)??await generateDrizzleJson({}),currentJson=await generateDrizzleJson(schema),migrationStatements=await generateMigration(prevJson,currentJson);if(!migrationStatements||migrationStatements.length===0){ctx.taskStatus(name,`no changes`);continue}let migrationFile=await ctx.task(name,async()=>{let migrationTag=`${new Date().toISOString().replace(/[-:T]/g,``).slice(0,14)}_${generateMigrationName()}`,migrationFolder=path.join(migrationsFolder,migrationTag);await fs$1.mkdir(migrationFolder,{recursive:!0});let sqlContent=`${migrationStatements.join(`;
|
|
337
337
|
|
|
338
|
-
`)};`,file=path.join(migrationFolder,`migration.sql`),snapshotFile=path.join(migrationFolder,`snapshot.json`);return await fs$1.writeFile(file,sqlContent),await fs$1.writeFile(snapshotFile,JSON.stringify(currentJson,null,2)),file});ctx.success(migrationFile),createdAny=!0}catch(error){ctx.fail(error instanceof Error?error.message:String(error))}}createdAny||ctx.info(`Nothing to generate`)}async function runMigrations(opts){let{ctx,migrationType}=opts,{container}=ctx,databaseNames=Object.keys(container.db),originalLogLevel=container.logger.level;container.logger.level=`silent`,migrationType===`schema`&&await(0,import_system_database.ensureSystemDatabase)(container.worker.dbUrl,container.logger);let hasAnyMigrations=(await Promise.all(databaseNames.map(async name=>{let{migrationsFolder}=defineMigrationOpts(name,migrationType);return exists(migrationsFolder)}))).some(Boolean);if(databaseNames.length===0||!hasAnyMigrations){ctx.info(`Nothing to migrate`),container.logger.level=originalLogLevel;return}for(let name of databaseNames){let db=container.db[name],migrationOpts=defineMigrationOpts(name,migrationType);if(await exists(migrationOpts.migrationsFolder))try{await ctx.task(name,async()=>{await migrate(db,migrationOpts)})}catch(error){ctx.fail(error instanceof Error?error.message:String(error))}}container.logger.level=originalLogLevel}async function runSeeds(opts){let{ctx}=opts,{container}=ctx,databaseNames=Object.keys(container.db);if(databaseNames.length===0){ctx.info(`Nothing to seed`);return}for(let name of databaseNames)await ensureDatabaseScaffolding(name);for(let name of databaseNames){let seedPath=path.join(process.cwd(),APPOS_DIR,DATABASES_DIR,name,`seed.ts`);try{await ctx.task(name,async()=>{let seedModule=await import(seedPath),seedFn=seedModule.default||seedModule.seed;if(typeof seedFn!=`function`)throw Error(`seed file does not export a function`);await seedFn(container)})}catch(error){ctx.fail(error instanceof Error?error.message:String(error))}}}var gen_default$1=defineCommand({description:`Generate database data migrations`,args:z.tuple([]),opts:z.object({}),async run(ctx){ctx.intro(`db:data:gen`),await generateMigrations({ctx,migrationType:`data`}),await ctx.cleanup(),ctx.outro()}}),migrate_default$1=defineCommand({description:`Run pending database data migrations`,args:z.tuple([]),opts:z.object({}),async run(ctx){ctx.intro(`db:data:migrate`),await runMigrations({ctx,migrationType:`data`}),await ctx.cleanup(),ctx.outro()}}),gen_default=defineCommand({description:`Generate database schema migrations from schema changes`,args:z.tuple([]),opts:z.object({}),async run(ctx){ctx.intro(`db:schema:gen`),await generateMigrations({ctx,migrationType:`schema`}),await ctx.cleanup(),ctx.outro()}}),migrate_default=defineCommand({description:`Run pending database schema migrations`,args:z.tuple([]),opts:z.object({}),async run(ctx){ctx.intro(`db:schema:migrate`),await runMigrations({ctx,migrationType:`schema`}),await ctx.cleanup(),ctx.outro()}}),seed_default=defineCommand({description:`Seed databases with initial data`,args:z.tuple([]),opts:z.object({}),async run(ctx){await runSeeds({ctx}),await ctx.cleanup()}}),down_default=defineCommand({description:`Teardown the infrastructure`,args:z.tuple([]),opts:z.object({}),async run(ctx){ctx.intro(`down`);try{await ctx.spinner(`Stopping containers`,()=>ctx.exec(`docker`,[`compose`,`--profile=infra`,`down`,`--remove-orphans`,`--timeout=0`]))}catch(error){ctx.fail(error instanceof Error?error.message:String(error))}ctx.outro()}}),preview_default=defineCommand({description:`Preview the application`,args:z.tuple([]),opts:z.object({}),async run(ctx){ctx.intro(`preview`),ctx.step(`Starting containers (Ctrl+C to stop)`);let proc=spawn(`docker`,[`compose`,`--profile=app`,`up`,`--build`],{stdio:`inherit`,cwd:process.cwd()}),isShuttingDown=!1,cleanup=async()=>{if(!isShuttingDown){isShuttingDown=!0,proc.kill(`SIGTERM`),ctx.step(`Stopping containers`);try{await ctx.exec(`docker`,[`compose`,`--profile=app`,`down`,`--remove-orphans`,`--timeout=0`])}catch{}process.exit(0)}};process.on(`SIGINT`,cleanup),process.on(`SIGTERM`,cleanup),proc.on(`close`,()=>{isShuttingDown||cleanup()}),await new Promise(()=>{})}}),version=`0.2.
|
|
338
|
+
`)};`,file=path.join(migrationFolder,`migration.sql`),snapshotFile=path.join(migrationFolder,`snapshot.json`);return await fs$1.writeFile(file,sqlContent),await fs$1.writeFile(snapshotFile,JSON.stringify(currentJson,null,2)),file});ctx.success(migrationFile),createdAny=!0}catch(error){ctx.fail(error instanceof Error?error.message:String(error))}}createdAny||ctx.info(`Nothing to generate`)}async function runMigrations(opts){let{ctx,migrationType}=opts,{container}=ctx,databaseNames=Object.keys(container.db),originalLogLevel=container.logger.level;container.logger.level=`silent`,migrationType===`schema`&&await(0,import_system_database.ensureSystemDatabase)(container.worker.dbUrl,container.logger);let hasAnyMigrations=(await Promise.all(databaseNames.map(async name=>{let{migrationsFolder}=defineMigrationOpts(name,migrationType);return exists(migrationsFolder)}))).some(Boolean);if(databaseNames.length===0||!hasAnyMigrations){ctx.info(`Nothing to migrate`),container.logger.level=originalLogLevel;return}for(let name of databaseNames){let db=container.db[name],migrationOpts=defineMigrationOpts(name,migrationType);if(await exists(migrationOpts.migrationsFolder))try{await ctx.task(name,async()=>{await migrate(db,migrationOpts)})}catch(error){ctx.fail(error instanceof Error?error.message:String(error))}}container.logger.level=originalLogLevel}async function runSeeds(opts){let{ctx}=opts,{container}=ctx,databaseNames=Object.keys(container.db);if(databaseNames.length===0){ctx.info(`Nothing to seed`);return}for(let name of databaseNames)await ensureDatabaseScaffolding(name);for(let name of databaseNames){let seedPath=path.join(process.cwd(),APPOS_DIR,DATABASES_DIR,name,`seed.ts`);try{await ctx.task(name,async()=>{let seedModule=await import(seedPath),seedFn=seedModule.default||seedModule.seed;if(typeof seedFn!=`function`)throw Error(`seed file does not export a function`);await seedFn(container)})}catch(error){ctx.fail(error instanceof Error?error.message:String(error))}}}var gen_default$1=defineCommand({description:`Generate database data migrations`,args:z.tuple([]),opts:z.object({}),async run(ctx){ctx.intro(`db:data:gen`),await generateMigrations({ctx,migrationType:`data`}),await ctx.cleanup(),ctx.outro()}}),migrate_default$1=defineCommand({description:`Run pending database data migrations`,args:z.tuple([]),opts:z.object({}),async run(ctx){ctx.intro(`db:data:migrate`),await runMigrations({ctx,migrationType:`data`}),await ctx.cleanup(),ctx.outro()}}),gen_default=defineCommand({description:`Generate database schema migrations from schema changes`,args:z.tuple([]),opts:z.object({}),async run(ctx){ctx.intro(`db:schema:gen`),await generateMigrations({ctx,migrationType:`schema`}),await ctx.cleanup(),ctx.outro()}}),migrate_default=defineCommand({description:`Run pending database schema migrations`,args:z.tuple([]),opts:z.object({}),async run(ctx){ctx.intro(`db:schema:migrate`),await runMigrations({ctx,migrationType:`schema`}),await ctx.cleanup(),ctx.outro()}}),seed_default=defineCommand({description:`Seed databases with initial data`,args:z.tuple([]),opts:z.object({}),async run(ctx){await runSeeds({ctx}),await ctx.cleanup()}}),down_default=defineCommand({description:`Teardown the infrastructure`,args:z.tuple([]),opts:z.object({}),async run(ctx){ctx.intro(`down`);try{await ctx.spinner(`Stopping containers`,()=>ctx.exec(`docker`,[`compose`,`--profile=infra`,`down`,`--remove-orphans`,`--timeout=0`]))}catch(error){ctx.fail(error instanceof Error?error.message:String(error))}ctx.outro()}}),preview_default=defineCommand({description:`Preview the application`,args:z.tuple([]),opts:z.object({}),async run(ctx){ctx.intro(`preview`),ctx.step(`Starting containers (Ctrl+C to stop)`);let proc=spawn(`docker`,[`compose`,`--profile=app`,`up`,`--build`],{stdio:`inherit`,cwd:process.cwd()}),isShuttingDown=!1,cleanup=async()=>{if(!isShuttingDown){isShuttingDown=!0,proc.kill(`SIGTERM`),ctx.step(`Stopping containers`);try{await ctx.exec(`docker`,[`compose`,`--profile=app`,`down`,`--remove-orphans`,`--timeout=0`])}catch{}process.exit(0)}};process.on(`SIGINT`,cleanup),process.on(`SIGTERM`,cleanup),proc.on(`close`,()=>{isShuttingDown||cleanup()}),await new Promise(()=>{})}}),version=`0.2.4-0`;const runtimeInfo$1=`(AppOS v${version} | Node ${process.version})`;var repl_default=defineCommand({description:`Start an interactive REPL with container access`,aliases:[`r`],args:z.tuple([]),opts:z.object({}),async run(ctx){ctx.intro(`repl`);let{container}=ctx,available=[],dbNames=Object.keys(container.db);dbNames.length>0&&available.push(`container.db.{${dbNames.join(`, `)}}`);let cacheNames=Object.keys(container.cache);cacheNames.length>0&&available.push(`container.cache.{${cacheNames.join(`, `)}}`);let otherKeys=Object.keys(container).filter(k=>k!==`db`&&k!==`cache`).map(k=>`container.${k}`);available.push(...otherKeys),ctx.info(`Starting REPL ${runtimeInfo$1}`),ctx.info(`Available: ${available.join(`, `)}`),ctx.line(``);let replServer=repl.start({prompt:`appos(${container.config.APP_NAME})> `,useColors:!0,useGlobal:!0,breakEvalOnSigint:!0});replServer.context.container=container,replServer.on(`exit`,async()=>{await ctx.cleanup(),ctx.info(`Bye`),process.exit(0)}),await new Promise(()=>{})}}),reset_default=defineCommand({description:`Reset the infrastructure`,args:z.tuple([]),opts:z.object({}),async run(ctx){ctx.intro(`reset`);try{await ctx.spinner(`Stopping containers`,()=>ctx.exec(`docker`,[`compose`,`--profile=infra`,`down`,`--remove-orphans`,`--timeout=0`]))}catch(error){ctx.fail(error instanceof Error?error.message:String(error))}try{await ctx.spinner(`Starting containers`,()=>ctx.exec(`docker`,[`compose`,`--profile=infra`,`up`,`--wait`]))}catch(error){ctx.fail(error instanceof Error?error.message:String(error))}ctx.step(`Running schema migrations`),await runMigrations({ctx,migrationType:`schema`}),ctx.step(`Seeding databases`),await runSeeds({ctx}),await ctx.cleanup(),ctx.outro()}});const scripts={increment:`
|
|
339
339
|
local windowMs = tonumber(ARGV[2])
|
|
340
340
|
local resetOnChange = ARGV[1] == "1"
|
|
341
341
|
|
|
@@ -7,7 +7,6 @@ import { DbChangeInput, DefineEventBusOptions, Event, EventBus, EventContext, db
|
|
|
7
7
|
import { DefineMailerOptions, Mailer, MailerPayload, MailerPayloadHtml, MailerPayloadReact, defineMailer } from "./mailer.mjs";
|
|
8
8
|
import { NewStorageAttachment, NewStorageBlob, NewStorageVariantRecord, StorageAttachment, StorageBlob, StorageRelations, StorageRelationsConfig, StorageTables, StorageVariantRecord, defineStorageSchema } from "./storage-schema.mjs";
|
|
9
9
|
import { ScheduledWorkflowContext, WorkflowContext, WorkflowHandle, defineScheduledWorkflow, defineWorkflow, loadWorkflows } from "./workflow.mjs";
|
|
10
|
-
import { ImageTransformations, ResizeOptions, resizeSchema, transformationsSchema } from "./workflows/generate-image-variant.mjs";
|
|
11
10
|
import { DatabaseWithStorage, DefineS3DiskOptions, DefineStorageOptions, Storage, StorageService, defineS3Disk, defineStorage } from "./storage.mjs";
|
|
12
11
|
import { AppContainer, Container, ServerConfig, WorkerConfig, defineAppContainer } from "./container.mjs";
|
|
13
12
|
import { AppContext, DefineAppContextOpts, SessionData, defineAppContext } from "./app-context.mjs";
|
|
@@ -18,4 +17,4 @@ import { DefineOpenAPIConfig, DefineOpenAPIConfigInput, DefineOpenAPIReturn, Han
|
|
|
18
17
|
import { withOtelSpan } from "./otel.mjs";
|
|
19
18
|
import { DefineRedisClientOptions, RedisClient, defineRedisClient } from "./redis.mjs";
|
|
20
19
|
import { CustomTypeOptions } from "i18next";
|
|
21
|
-
export { AccessControlRoles, AccessController, AppContainer, AppContext, AuditAction, Auth, AuthConfig, AuthPasskeyConfig, AuthSessionConfig, Cache, Config, Container, type CustomTypeOptions, Database, DatabaseWithStorage, DbChangeInput, DefineAppContextOpts, DefineAuthOptions, DefineCacheOptions, DefineDatabaseOptions, DefineEventBusOptions, DefineLoggerOptions, DefineMailerOptions, DefineOpenAPIConfig, DefineOpenAPIConfigInput, DefineOpenAPIReturn, DefineRedisClientOptions, DefineS3DiskOptions, DefineStorageOptions, DefineTestDatabaseOptions, Event, EventBus, EventContext, HandlerParams, I18nInitOptions,
|
|
20
|
+
export { AccessControlRoles, AccessController, AppContainer, AppContext, AuditAction, Auth, AuthConfig, AuthPasskeyConfig, AuthSessionConfig, Cache, Config, Container, type CustomTypeOptions, Database, DatabaseWithStorage, DbChangeInput, DefineAppContextOpts, DefineAuthOptions, DefineCacheOptions, DefineDatabaseOptions, DefineEventBusOptions, DefineLoggerOptions, DefineMailerOptions, DefineOpenAPIConfig, DefineOpenAPIConfigInput, DefineOpenAPIReturn, DefineRedisClientOptions, DefineS3DiskOptions, DefineStorageOptions, DefineTestDatabaseOptions, Event, EventBus, EventContext, HandlerParams, I18nInitOptions, Logger, Mailer, MailerPayload, MailerPayloadHtml, MailerPayloadReact, Middleware, MigrationType, NewStorageAttachment, NewStorageBlob, NewStorageVariantRecord, OpenAPIMethodSpec, OpenAPIObjectConfigV31, OpenAPIRegistration, QualifiedTableNames, RedisClient, Role, RouteModule, ScheduledWorkflowContext, ServerConfig, SessionData, Storage, StorageAttachment, StorageBlob, StorageRelations, StorageRelationsConfig, StorageService, StorageTables, StorageVariantRecord, ValidationErrorResponse, WorkerConfig, WorkflowContext, WorkflowHandle, appDir, auditActionSchema, baseSchema, createAccessControl, dbChangeInputSchema, dbChanges, dbChangesEvent, defaultI18nOptions, defineAppContainer, defineAppContext, defineAuth, defineAuthSchema, defineCache, defineConfig, defineDatabase, defineEvent, defineEventBus, defineI18n, defineLogger, defineMailer, defineMiddleware, defineMigrationOpts, defineOpenAPI, defineOpenAPIConfig, defineOpenAPIEndpoint, defineRedisClient, defineS3Disk, defineScheduledWorkflow, defineStorage, defineStorageSchema, defineTestDatabase, defineTypedResponses, defineWorkflow, generateOpenAPIDocument, i18n, loadAndRegisterAPIRoutes, loadEvents, loadMiddleware, loadWorkflows, migrationsSchema, registerRoutes, scanAPIRoutes, withOtelSpan, writeOpenAPISpecs };
|
|
@@ -1 +1 @@
|
|
|
1
|
-
import{defineAppContext as e}from"./app-context.mjs";import{auditActionSchema as t,createAccessControl as n,defineAuth as r}from"./auth.mjs";import{defineAuthSchema as i}from"./auth-schema.mjs";import{defineRedisClient as a}from"./redis.mjs";import{defineCache as o}from"./cache.mjs";import{baseSchema as s,defineConfig as c}from"./config.mjs";import{defineAppContainer as l}from"./container.mjs";import{dbChanges as u,defineDatabase as d,defineMigrationOpts as f,defineTestDatabase as p,migrationsSchema as m}from"./database.mjs";import{dbChangeInputSchema as h,dbChangesEvent as g,defineEvent as _,defineEventBus as v,loadEvents as y}from"./event.mjs";import{defaultI18nOptions as b,defineI18n as x}from"./i18n.mjs";import{defineLogger as S}from"./logger.mjs";import{defineMailer as C}from"./mailer.mjs";import{defineMiddleware as w,loadMiddleware as T}from"./middleware.mjs";import{appDir as E,defineOpenAPI as D,defineOpenAPIConfig as O,defineOpenAPIEndpoint as k,defineTypedResponses as A,generateOpenAPIDocument as j,loadAndRegisterAPIRoutes as M,registerRoutes as N,scanAPIRoutes as P,writeOpenAPISpecs as F}from"./openapi.mjs";import{withOtelSpan as I}from"./otel.mjs";import{defineScheduledWorkflow as L,defineWorkflow as R,loadWorkflows as z}from"./workflow.mjs";import{
|
|
1
|
+
import{defineAppContext as e}from"./app-context.mjs";import{auditActionSchema as t,createAccessControl as n,defineAuth as r}from"./auth.mjs";import{defineAuthSchema as i}from"./auth-schema.mjs";import{defineRedisClient as a}from"./redis.mjs";import{defineCache as o}from"./cache.mjs";import{baseSchema as s,defineConfig as c}from"./config.mjs";import{defineAppContainer as l}from"./container.mjs";import{dbChanges as u,defineDatabase as d,defineMigrationOpts as f,defineTestDatabase as p,migrationsSchema as m}from"./database.mjs";import{dbChangeInputSchema as h,dbChangesEvent as g,defineEvent as _,defineEventBus as v,loadEvents as y}from"./event.mjs";import{defaultI18nOptions as b,defineI18n as x}from"./i18n.mjs";import{defineLogger as S}from"./logger.mjs";import{defineMailer as C}from"./mailer.mjs";import{defineMiddleware as w,loadMiddleware as T}from"./middleware.mjs";import{appDir as E,defineOpenAPI as D,defineOpenAPIConfig as O,defineOpenAPIEndpoint as k,defineTypedResponses as A,generateOpenAPIDocument as j,loadAndRegisterAPIRoutes as M,registerRoutes as N,scanAPIRoutes as P,writeOpenAPISpecs as F}from"./openapi.mjs";import{withOtelSpan as I}from"./otel.mjs";import{defineScheduledWorkflow as L,defineWorkflow as R,loadWorkflows as z}from"./workflow.mjs";import{StorageService as B,defineS3Disk as V,defineStorage as H}from"./storage.mjs";import{defineStorageSchema as U}from"./storage-schema.mjs";export{B as StorageService,E as appDir,t as auditActionSchema,s as baseSchema,n as createAccessControl,h as dbChangeInputSchema,u as dbChanges,g as dbChangesEvent,b as defaultI18nOptions,l as defineAppContainer,e as defineAppContext,r as defineAuth,i as defineAuthSchema,o as defineCache,c as defineConfig,d as defineDatabase,_ as defineEvent,v as defineEventBus,x as defineI18n,S as defineLogger,C as defineMailer,w as defineMiddleware,f as defineMigrationOpts,D as defineOpenAPI,O as defineOpenAPIConfig,k as defineOpenAPIEndpoint,a as defineRedisClient,V as defineS3Disk,L as defineScheduledWorkflow,H as defineStorage,U as defineStorageSchema,p as defineTestDatabase,A as defineTypedResponses,R as defineWorkflow,j as generateOpenAPIDocument,M as loadAndRegisterAPIRoutes,y as loadEvents,T as loadMiddleware,z as loadWorkflows,m as migrationsSchema,N as registerRoutes,P as scanAPIRoutes,I as withOtelSpan,F as writeOpenAPISpecs};
|
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
import { StorageBlob, StorageRelationsConfig, StorageTables } from "./storage-schema.mjs";
|
|
2
|
-
import { ImageTransformations
|
|
2
|
+
import { ImageTransformations } from "./workflows/generate-image-variant.mjs";
|
|
3
3
|
import { NodePgDatabase } from "drizzle-orm/node-postgres";
|
|
4
4
|
import { Pool } from "pg";
|
|
5
5
|
import { DriveManager } from "flydrive";
|
|
@@ -1 +1 @@
|
|
|
1
|
-
import{orm_exports as e}from"./orm.mjs";import{extractBlobMetadata as t}from"./workflows/extract-blob-metadata.mjs";import{generateImageVariant as n,resizeSchema as r,transformationsSchema as i}from"./workflows/generate-image-variant.mjs";import{generatePreview as a}from"./workflows/generate-preview.mjs";import{purgeAttachment as o}from"./workflows/purge-attachment.mjs";import{join as s}from"node:path";import{mkdtemp as c,rm as l,writeFile as u}from"node:fs/promises";import{createHash as d,createHmac as f,randomBytes as p,timingSafeEqual as m}from"node:crypto";import{tmpdir as h}from"node:os";import{DriveManager as g}from"flydrive";import{FSDriver as _}from"flydrive/drivers/fs";import{S3Driver as v}from"flydrive/drivers/s3";function y(e){return{bucket:e.bucket,region:e.region,visibility:e.visibility??`private`,...e.endpoint&&{endpoint:e.endpoint,forcePathStyle:!0},...e.credentials&&{credentials:e.credentials}}}function b(e){return d(`md5`).update(e).digest(`hex`)}function x(e,t){return`variants/${e}/${d(`sha256`).update(JSON.stringify({blobId:e,transformations:t})).digest(`hex`)}`}function S(e){let t=`${Date.now()}-${p(16).toString(`hex`)}`;return e?`${e}/${t}`:t}function C(e){if(!e.default)throw Error(`Storage: 'default' disk must be specified`);if(!e.disks||Object.keys(e.disks).length===0)throw Error(`Storage: At least one disk must be configured`);let t=e.default,n={},r={};for(let[t,i]of Object.entries(e.disks)){let a=i;if(`driver`in a)if(a.driver===`fs`){let e=()=>new _(a);n[t]=e,r[t]=e}else a.driver===`s3`&&(n[t]=()=>new v(a),e.publicEndpoint?r[t]=()=>new v({...a,endpoint:e.publicEndpoint}):r[t]=()=>new v(a));else if(`location`in a){let e=()=>new _(a);n[t]=e,r[t]=e}else if(`bucket`in a)if(n[t]=()=>new v(a),e.publicEndpoint){let n={...a,endpoint:e.publicEndpoint};r[t]=()=>new v(n)}else r[t]=()=>new v(a)}let i=new g({default:t,services:n}),a=new g({default:t,services:r});return new w(i,e.database,t,a,e.purgeCron,e.secret)}var w=class{drive;db;defaultDisk;purgeCron;secret;signedUrlDrive;constructor(e,t,n,r,i,a){this.drive=e,this.db=t,this.defaultDisk=n,this.signedUrlDrive=r,this.purgeCron=i,this.secret=a}async createBlob(e,t){let n=S(t.prefix),r=Buffer.from(e),i=b(r),a=t.serviceName||this.defaultDisk;await(t.serviceName?this.drive.use(t.serviceName):this.drive.use()).put(n,r,{contentType:t.contentType});let[o]=await this.db.insert(this.db._.fullSchema.storageBlobs).values({key:n,filename:t.filename,contentType:t.contentType,metadata:t.metadata,serviceName:a,byteSize:r.byteLength,checksum:i}).returning();return o}async getBlob(t){let[n]=await this.db.select().from(this.db._.fullSchema.storageBlobs).where((0,e.eq)(this.db._.fullSchema.storageBlobs.id,t));return n||null}async downloadBlob(e){let t=await this.getBlob(e);if(!t)return null;let n=await this.drive.use(t.serviceName).getBytes(t.key);return Buffer.from(n)}async deleteBlob(t){let n=await this.getBlob(t);return n?(await this.drive.use(n.serviceName).delete(n.key),await this.db.delete(this.db._.fullSchema.storageBlobs).where((0,e.eq)(this.db._.fullSchema.storageBlobs.id,t)),!0):!1}async getSignedUrl(e,t={}){let n=await this.getBlob(e);if(!n)return null;if(n.serviceName===`public`)return this.getPublicUrl(e);let r=this.signedUrlDrive.use(n.serviceName);try{let e={expiresIn:t.expiresIn||3600};if(t.disposition){let r=t.filename||n.filename;e.contentDisposition=t.disposition===`attachment`?`attachment; filename="${r}"`:`inline; filename="${r}"`}return await r.getSignedUrl(n.key,e)}catch{let e=`/storage/${n.id}`;return t.disposition?`${e}?${new URLSearchParams({disposition:t.disposition,...t.filename&&{filename:t.filename}}).toString()}`:e}}async getPublicUrl(e){let t=await this.getBlob(e);if(!t)return null;let n=this.drive.use(t.serviceName);if(t.serviceName===`public`)try{if(`getUrl`in n&&typeof n.getUrl==`function`)return await n.getUrl(t.key)}catch{}return`/storage/${t.id}`}async createAttachment(t,n,r,i,a=!1){if(a){let a=`${t}:${n}:${i}`,o=Array.from(a).reduce((e,t)=>(e<<5)-e+t.charCodeAt(0)|0,0);return await this.db.transaction(async a=>{await a.execute(e.sql`SELECT pg_advisory_xact_lock(${o})`),await a.delete(this.db._.fullSchema.storageAttachments).where((0,e.and)((0,e.eq)(this.db._.fullSchema.storageAttachments.recordType,t),(0,e.eq)(this.db._.fullSchema.storageAttachments.recordId,n),(0,e.eq)(this.db._.fullSchema.storageAttachments.name,i)));let[s]=await a.insert(this.db._.fullSchema.storageAttachments).values({recordType:t,recordId:n,blobId:r,name:i}).returning();return s})}let[o]=await this.db.insert(this.db._.fullSchema.storageAttachments).values({recordType:t,recordId:n,blobId:r,name:i}).returning();return o}async getAttachments(e,t,n){return this.db.query.storageAttachments.findMany({where:{recordType:e,recordId:t,...n&&{name:n}},with:{blob:!0}})}async getAttachmentsByIds(e){return e.length===0?[]:this.db.query.storageAttachments.findMany({where:{id:{in:e}},with:{blob:!0}})}async deleteAttachment(t){return((await this.db.delete(this.db._.fullSchema.storageAttachments).where((0,e.eq)(this.db._.fullSchema.storageAttachments.id,t))).rowCount??0)>0}async deleteAttachments(t,n,r){let i=r?(0,e.and)((0,e.eq)(this.db._.fullSchema.storageAttachments.recordType,t),(0,e.eq)(this.db._.fullSchema.storageAttachments.recordId,n),(0,e.eq)(this.db._.fullSchema.storageAttachments.name,r)):(0,e.and)((0,e.eq)(this.db._.fullSchema.storageAttachments.recordType,t),(0,e.eq)(this.db._.fullSchema.storageAttachments.recordId,n));return(await this.db.delete(this.db._.fullSchema.storageAttachments).where(i)).rowCount??0}async getDirectUploadUrl(e){let t=e.serviceName||this.defaultDisk,n=this.drive.use(t);try{let r=S();if(`putSignedUrl`in n){let i=await n.putSignedUrl(r,{expiresIn:e.expiresIn||3600,contentType:e.contentType}),[a]=await this.db.insert(this.db._.fullSchema.storageBlobs).values({key:r,filename:e.filename,contentType:e.contentType,metadata:{...e.metadata,pending:!0},serviceName:t,byteSize:0,checksum:``}).returning();return{url:i,key:r,blobId:a.id,headers:{"Content-Type":e.contentType||`application/octet-stream`}}}}catch{}return null}async finalizeDirectUpload(t,n){let r=await this.getBlob(t);if(!r)throw Error(`Blob ${t} not found`);let i=r.metadata&&typeof r.metadata==`object`?{...r.metadata}:{};delete i.pending,await this.db.update(this.db._.fullSchema.storageBlobs).set({byteSize:n,metadata:i}).where((0,e.eq)(this.db._.fullSchema.storageBlobs.id,t))}async updateBlobMetadata(t,n){let r=await this.getBlob(t);if(!r)throw Error(`Blob ${t} not found`);let i={...r.metadata&&typeof r.metadata==`object`?r.metadata:{},...n};await this.db.update(this.db._.fullSchema.storageBlobs).set({metadata:i}).where((0,e.eq)(this.db._.fullSchema.storageBlobs.id,t))}async getVariant(e,t){let n=d(`sha256`).update(JSON.stringify(t)).digest(`hex`),[r]=await this.db.query.storageVariantRecords.findMany({where:{blobId:e,variationDigest:n},with:{blob:!0},limit:1});return r?.blob||null}async createVariant(e,t,n){let r=await this.getBlob(e);if(!r)throw Error(`Blob ${e} not found`);let i=x(e,t),a=b(n);await this.drive.use(r.serviceName).put(i,n,{contentType:r.contentType||`application/octet-stream`});let[o]=await this.db.insert(this.db._.fullSchema.storageBlobs).values({key:i,filename:r.filename,contentType:r.contentType,metadata:{sourceBlob:e,transformations:t},serviceName:r.serviceName,byteSize:n.byteLength,checksum:a}).returning(),s=d(`sha256`).update(JSON.stringify(t)).digest(`hex`);return await this.db.insert(this.db._.fullSchema.storageVariantRecords).values({blobId:e,variationDigest:s,id:o.id}),o}async getUnattachedBlobs(t={}){let n=t.olderThan||new Date(Date.now()-2880*60*1e3).toISOString(),r=t.limit||1e3;return(await this.db.select({blob:this.db._.fullSchema.storageBlobs}).from(this.db._.fullSchema.storageBlobs).leftJoin(this.db._.fullSchema.storageAttachments,(0,e.eq)(this.db._.fullSchema.storageBlobs.id,this.db._.fullSchema.storageAttachments.blobId)).where((0,e.and)((0,e.isNull)(this.db._.fullSchema.storageAttachments.id),(0,e.lt)(this.db._.fullSchema.storageBlobs.createdAt,n))).limit(r)).map(e=>e.blob)}async getPendingBlobs(t=new Date(Date.now()-1440*60*1e3).toISOString()){return await this.db.select().from(this.db._.fullSchema.storageBlobs).where((0,e.and)((0,e.lt)(this.db._.fullSchema.storageBlobs.createdAt,t),e.sql`${this.db._.fullSchema.storageBlobs.metadata}->>'pending' = 'true'`))}async purgeUnattached(e){let t=await this.getUnattachedBlobs({olderThan:e}),n=0;for(let e of t)try{await this.deleteBlob(e.id),n++}catch(t){console.error(`Failed to purge blob ${e.id}:`,t)}return n}signedId(e,t=3600){if(!this.secret)throw Error(`Storage secret not configured`);let n={blobId:e,exp:Math.floor(Date.now()/1e3)+t},r=JSON.stringify(n),i=f(`sha256`,this.secret).update(r).digest(`base64url`);return`${Buffer.from(r).toString(`base64url`)}.${i}`}async findSigned(e){if(!this.secret)throw Error(`Storage secret not configured`);try{let[t,n]=e.split(`.`);if(!t||!n)return null;let r=Buffer.from(t,`base64url`).toString(),i=f(`sha256`,this.secret).update(r).digest(`base64url`),a=Buffer.from(n,`base64url`),o=Buffer.from(i,`base64url`);if(a.length!==o.length||!m(a,o))return null;let s=JSON.parse(r);return s.exp&&s.exp<Math.floor(Date.now()/1e3)?null:this.getBlob(s.blobId)}catch{return null}}one(e,r,i){let d=this,f=i;return{async attach(n){if(!await d.getBlob(n))throw Error(`Blob ${n} not found`);let i=await d.createAttachment(e,r,n,f,!0);return await t.start({blobId:n}),i},async get(){return(await d.getAttachments(e,r,f))[0]?.blob??null},async attached(){return await this.get()!==null},async url(e){let t=await this.get();return t?d.getSignedUrl(t.id,e):null},async publicUrl(){let e=await this.get();return e?d.getPublicUrl(e.id):null},async metadata(){return(await this.get())?.metadata??null},async analyzed(){return(await this.metadata())?.analyzed===!0},async representable(){let e=await this.get();return e?.contentType?[`image/`,`video/`,`application/pdf`].some(t=>e.contentType?.startsWith(t)):!1},async variant(e,t=3600){let r=await this.get();if(!r)return null;let i=await d.getVariant(r.id,e);return i?d.getSignedUrl(i.id,{expiresIn:t}):(await n.start({blobId:r.id,transformations:e}),null)},async preview(e=3600,t=1){let n=await this.get();if(!n)return null;let r=await d.getVariant(n.id,{preview:!0});return r?d.getSignedUrl(r.id,{expiresIn:e}):(await a.start({blobId:n.id,timeInSeconds:t}),null)},async detach(){return await d.deleteAttachments(e,r,f)>0},async purge(){let t=await d.getAttachments(e,r,f);if(t.length===0)return!1;let n=t[0].blob;return n?(await d.deleteAttachments(e,r,f),await d.deleteBlob(n.id),!0):!1},async purgeLater(){let t=await d.getAttachments(e,r,f);return t.length===0?!1:(await o.start({attachmentIds:[t[0].id]}),!0)},async download(){let e=await this.get();return e?d.downloadBlob(e.id):null},async open(e){let t=await this.get();if(!t)return null;let n=await d.downloadBlob(t.id);if(!n)return null;let r=await c(s(h(),`attachment-`)),i=s(r,t.filename);try{return await u(i,n),await e(i)}finally{await l(r,{recursive:!0,force:!0})}},async representation(e,t=3600){let n=await this.get();return n?n.contentType?.startsWith(`image/`)?this.variant(e,t):this.preview(t):null},async byteSize(){return(await this.get())?.byteSize??null},async contentType(){return(await this.get())?.contentType??null},async filename(){return(await this.get())?.filename??null},async signedId(e=3600){let t=await this.get();return t?d.signedId(t.id,e):null}}}many(e,r,i){let d=this,f=i;return{async attach(n){let i=Array.isArray(n)?n:[n],a=await Promise.all(i.map(e=>d.getBlob(e))),o=i.filter((e,t)=>!a[t]);if(o.length>0)throw Error(`Blobs not found: ${o.join(`, `)}`);let s=await Promise.all(i.map(t=>d.createAttachment(e,r,t,f)));return await Promise.all(i.map(e=>t.start({blobId:e}))),s},async list(){return(await d.getAttachments(e,r,f)).map(e=>e.blob).filter(e=>e!==null)},async count(){return(await this.list()).length},async urls(e){let t=await this.list();return(await Promise.all(t.map(t=>d.getSignedUrl(t.id,e)))).filter(e=>e!==null)},async publicUrls(){let e=await this.list();return Promise.all(e.map(e=>d.getPublicUrl(e.id)))},async metadata(){return(await this.list()).map(e=>e.metadata??null)},async analyzed(){return(await this.metadata()).map(e=>e?.analyzed===!0)},async representable(){return(await this.list()).map(e=>e.contentType?[`image/`,`video/`,`application/pdf`].some(t=>e.contentType?.startsWith(t)):!1)},async variants(e,t=3600){let r=await this.list();return Promise.all(r.map(async r=>{let i=await d.getVariant(r.id,e);return i?d.getSignedUrl(i.id,{expiresIn:t}):(await n.start({blobId:r.id,transformations:e}),null)}))},async previews(e=3600,t=1){let n=await this.list();return Promise.all(n.map(async n=>{let r=await d.getVariant(n.id,{preview:!0});return r?d.getSignedUrl(r.id,{expiresIn:e}):(await a.start({blobId:n.id,timeInSeconds:t}),null)}))},async detach(t){if(t){let n=(await d.getAttachments(e,r,f)).find(e=>e.blob?.id===t);return n?(await d.deleteAttachment(n.id),1):0}return d.deleteAttachments(e,r,f)},async purge(t){let n=await d.getAttachments(e,r,f);if(t){let e=n.find(e=>e.blob?.id===t);return e?(await d.deleteAttachment(e.id),await d.deleteBlob(t),1):0}let i=0;for(let e of n)await d.deleteAttachment(e.id),e.blob&&(await d.deleteBlob(e.blob.id),i++);return i},async purgeLater(t){let n=await d.getAttachments(e,r,f);if(t){let e=n.find(e=>e.blob?.id===t);return e?(await o.start({attachmentIds:[e.id]}),1):0}return n.length>0&&await o.start({attachmentIds:n.map(e=>e.id)}),n.length},async download(){let e=await this.list(),t=[];for(let n of e){let e=await d.downloadBlob(n.id);e&&t.push(e)}return t},async open(e){let t=await this.list(),n=[];for(let r of t){let t=await d.downloadBlob(r.id);if(!t)continue;let i=await c(s(h(),`attachment-`)),a=s(i,r.filename);try{await u(a,t),n.push(await e(a,r))}finally{await l(i,{recursive:!0,force:!0})}}return n},async representations(e,t=3600){let r=await this.list();return Promise.all(r.map(async r=>{if(r.contentType?.startsWith(`image/`)){let i=await d.getVariant(r.id,e);return i?d.getSignedUrl(i.id,{expiresIn:t}):(await n.start({blobId:r.id,transformations:e}),null)}let i=await d.getVariant(r.id,{preview:!0});return i?d.getSignedUrl(i.id,{expiresIn:t}):(await a.start({blobId:r.id}),null)}))},async byteSizes(){return(await this.list()).map(e=>e.byteSize)},async contentTypes(){return(await this.list()).map(e=>e.contentType)},async filenames(){return(await this.list()).map(e=>e.filename)},async signedIds(e=3600){return(await this.list()).map(t=>d.signedId(t.id,e))}}}};export{w as StorageService,y as defineS3Disk,C as defineStorage};
|
|
1
|
+
import{orm_exports as e}from"./orm.mjs";import{extractBlobMetadata as t}from"./workflows/extract-blob-metadata.mjs";import{generateImageVariant as n}from"./workflows/generate-image-variant.mjs";import{generatePreview as r}from"./workflows/generate-preview.mjs";import{purgeAttachment as i}from"./workflows/purge-attachment.mjs";import{join as a}from"node:path";import{mkdtemp as o,rm as s,writeFile as c}from"node:fs/promises";import{createHash as l,createHmac as u,randomBytes as d,timingSafeEqual as f}from"node:crypto";import{tmpdir as p}from"node:os";import{DriveManager as m}from"flydrive";import{FSDriver as h}from"flydrive/drivers/fs";import{S3Driver as g}from"flydrive/drivers/s3";function _(e){return{bucket:e.bucket,region:e.region,visibility:e.visibility??`private`,...e.endpoint&&{endpoint:e.endpoint,forcePathStyle:!0},...e.credentials&&{credentials:e.credentials}}}function v(e){return l(`md5`).update(e).digest(`hex`)}function y(e,t){return`variants/${e}/${l(`sha256`).update(JSON.stringify({blobId:e,transformations:t})).digest(`hex`)}`}function b(e){let t=`${Date.now()}-${d(16).toString(`hex`)}`;return e?`${e}/${t}`:t}function x(e){if(!e.default)throw Error(`Storage: 'default' disk must be specified`);if(!e.disks||Object.keys(e.disks).length===0)throw Error(`Storage: At least one disk must be configured`);let t=e.default,n={},r={};for(let[t,i]of Object.entries(e.disks)){let a=i;if(`driver`in a)if(a.driver===`fs`){let e=()=>new h(a);n[t]=e,r[t]=e}else a.driver===`s3`&&(n[t]=()=>new g(a),e.publicEndpoint?r[t]=()=>new g({...a,endpoint:e.publicEndpoint}):r[t]=()=>new g(a));else if(`location`in a){let e=()=>new h(a);n[t]=e,r[t]=e}else if(`bucket`in a)if(n[t]=()=>new g(a),e.publicEndpoint){let n={...a,endpoint:e.publicEndpoint};r[t]=()=>new g(n)}else r[t]=()=>new g(a)}let i=new m({default:t,services:n}),a=new m({default:t,services:r});return new S(i,e.database,t,a,e.purgeCron,e.secret)}var S=class{drive;db;defaultDisk;purgeCron;secret;signedUrlDrive;constructor(e,t,n,r,i,a){this.drive=e,this.db=t,this.defaultDisk=n,this.signedUrlDrive=r,this.purgeCron=i,this.secret=a}async createBlob(e,t){let n=b(t.prefix),r=Buffer.from(e),i=v(r),a=t.serviceName||this.defaultDisk;await(t.serviceName?this.drive.use(t.serviceName):this.drive.use()).put(n,r,{contentType:t.contentType});let[o]=await this.db.insert(this.db._.fullSchema.storageBlobs).values({key:n,filename:t.filename,contentType:t.contentType,metadata:t.metadata,serviceName:a,byteSize:r.byteLength,checksum:i}).returning();return o}async getBlob(t){let[n]=await this.db.select().from(this.db._.fullSchema.storageBlobs).where((0,e.eq)(this.db._.fullSchema.storageBlobs.id,t));return n||null}async downloadBlob(e){let t=await this.getBlob(e);if(!t)return null;let n=await this.drive.use(t.serviceName).getBytes(t.key);return Buffer.from(n)}async deleteBlob(t){let n=await this.getBlob(t);return n?(await this.drive.use(n.serviceName).delete(n.key),await this.db.delete(this.db._.fullSchema.storageBlobs).where((0,e.eq)(this.db._.fullSchema.storageBlobs.id,t)),!0):!1}async getSignedUrl(e,t={}){let n=await this.getBlob(e);if(!n)return null;if(n.serviceName===`public`)return this.getPublicUrl(e);let r=this.signedUrlDrive.use(n.serviceName);try{let e={expiresIn:t.expiresIn||3600};if(t.disposition){let r=t.filename||n.filename;e.contentDisposition=t.disposition===`attachment`?`attachment; filename="${r}"`:`inline; filename="${r}"`}return await r.getSignedUrl(n.key,e)}catch{let e=`/storage/${n.id}`;return t.disposition?`${e}?${new URLSearchParams({disposition:t.disposition,...t.filename&&{filename:t.filename}}).toString()}`:e}}async getPublicUrl(e){let t=await this.getBlob(e);if(!t)return null;let n=this.drive.use(t.serviceName);if(t.serviceName===`public`)try{if(`getUrl`in n&&typeof n.getUrl==`function`)return await n.getUrl(t.key)}catch{}return`/storage/${t.id}`}async createAttachment(t,n,r,i,a=!1){if(a){let a=`${t}:${n}:${i}`,o=Array.from(a).reduce((e,t)=>(e<<5)-e+t.charCodeAt(0)|0,0);return await this.db.transaction(async a=>{await a.execute(e.sql`SELECT pg_advisory_xact_lock(${o})`),await a.delete(this.db._.fullSchema.storageAttachments).where((0,e.and)((0,e.eq)(this.db._.fullSchema.storageAttachments.recordType,t),(0,e.eq)(this.db._.fullSchema.storageAttachments.recordId,n),(0,e.eq)(this.db._.fullSchema.storageAttachments.name,i)));let[s]=await a.insert(this.db._.fullSchema.storageAttachments).values({recordType:t,recordId:n,blobId:r,name:i}).returning();return s})}let[o]=await this.db.insert(this.db._.fullSchema.storageAttachments).values({recordType:t,recordId:n,blobId:r,name:i}).returning();return o}async getAttachments(e,t,n){return this.db.query.storageAttachments.findMany({where:{recordType:e,recordId:t,...n&&{name:n}},with:{blob:!0}})}async getAttachmentsByIds(e){return e.length===0?[]:this.db.query.storageAttachments.findMany({where:{id:{in:e}},with:{blob:!0}})}async deleteAttachment(t){return((await this.db.delete(this.db._.fullSchema.storageAttachments).where((0,e.eq)(this.db._.fullSchema.storageAttachments.id,t))).rowCount??0)>0}async deleteAttachments(t,n,r){let i=r?(0,e.and)((0,e.eq)(this.db._.fullSchema.storageAttachments.recordType,t),(0,e.eq)(this.db._.fullSchema.storageAttachments.recordId,n),(0,e.eq)(this.db._.fullSchema.storageAttachments.name,r)):(0,e.and)((0,e.eq)(this.db._.fullSchema.storageAttachments.recordType,t),(0,e.eq)(this.db._.fullSchema.storageAttachments.recordId,n));return(await this.db.delete(this.db._.fullSchema.storageAttachments).where(i)).rowCount??0}async getDirectUploadUrl(e){let t=e.serviceName||this.defaultDisk,n=this.drive.use(t);try{let r=b();if(`putSignedUrl`in n){let i=await n.putSignedUrl(r,{expiresIn:e.expiresIn||3600,contentType:e.contentType}),[a]=await this.db.insert(this.db._.fullSchema.storageBlobs).values({key:r,filename:e.filename,contentType:e.contentType,metadata:{...e.metadata,pending:!0},serviceName:t,byteSize:0,checksum:``}).returning();return{url:i,key:r,blobId:a.id,headers:{"Content-Type":e.contentType||`application/octet-stream`}}}}catch{}return null}async finalizeDirectUpload(t,n){let r=await this.getBlob(t);if(!r)throw Error(`Blob ${t} not found`);let i=r.metadata&&typeof r.metadata==`object`?{...r.metadata}:{};delete i.pending,await this.db.update(this.db._.fullSchema.storageBlobs).set({byteSize:n,metadata:i}).where((0,e.eq)(this.db._.fullSchema.storageBlobs.id,t))}async updateBlobMetadata(t,n){let r=await this.getBlob(t);if(!r)throw Error(`Blob ${t} not found`);let i={...r.metadata&&typeof r.metadata==`object`?r.metadata:{},...n};await this.db.update(this.db._.fullSchema.storageBlobs).set({metadata:i}).where((0,e.eq)(this.db._.fullSchema.storageBlobs.id,t))}async getVariant(e,t){let n=l(`sha256`).update(JSON.stringify(t)).digest(`hex`),[r]=await this.db.query.storageVariantRecords.findMany({where:{blobId:e,variationDigest:n},with:{blob:!0},limit:1});return r?.blob||null}async createVariant(e,t,n){let r=await this.getBlob(e);if(!r)throw Error(`Blob ${e} not found`);let i=y(e,t),a=v(n);await this.drive.use(r.serviceName).put(i,n,{contentType:r.contentType||`application/octet-stream`});let[o]=await this.db.insert(this.db._.fullSchema.storageBlobs).values({key:i,filename:r.filename,contentType:r.contentType,metadata:{sourceBlob:e,transformations:t},serviceName:r.serviceName,byteSize:n.byteLength,checksum:a}).returning(),s=l(`sha256`).update(JSON.stringify(t)).digest(`hex`);return await this.db.insert(this.db._.fullSchema.storageVariantRecords).values({blobId:e,variationDigest:s,id:o.id}),o}async getUnattachedBlobs(t={}){let n=t.olderThan||new Date(Date.now()-2880*60*1e3).toISOString(),r=t.limit||1e3;return(await this.db.select({blob:this.db._.fullSchema.storageBlobs}).from(this.db._.fullSchema.storageBlobs).leftJoin(this.db._.fullSchema.storageAttachments,(0,e.eq)(this.db._.fullSchema.storageBlobs.id,this.db._.fullSchema.storageAttachments.blobId)).where((0,e.and)((0,e.isNull)(this.db._.fullSchema.storageAttachments.id),(0,e.lt)(this.db._.fullSchema.storageBlobs.createdAt,n))).limit(r)).map(e=>e.blob)}async getPendingBlobs(t=new Date(Date.now()-1440*60*1e3).toISOString()){return await this.db.select().from(this.db._.fullSchema.storageBlobs).where((0,e.and)((0,e.lt)(this.db._.fullSchema.storageBlobs.createdAt,t),e.sql`${this.db._.fullSchema.storageBlobs.metadata}->>'pending' = 'true'`))}async purgeUnattached(e){let t=await this.getUnattachedBlobs({olderThan:e}),n=0;for(let e of t)try{await this.deleteBlob(e.id),n++}catch(t){console.error(`Failed to purge blob ${e.id}:`,t)}return n}signedId(e,t=3600){if(!this.secret)throw Error(`Storage secret not configured`);let n={blobId:e,exp:Math.floor(Date.now()/1e3)+t},r=JSON.stringify(n),i=u(`sha256`,this.secret).update(r).digest(`base64url`);return`${Buffer.from(r).toString(`base64url`)}.${i}`}async findSigned(e){if(!this.secret)throw Error(`Storage secret not configured`);try{let[t,n]=e.split(`.`);if(!t||!n)return null;let r=Buffer.from(t,`base64url`).toString(),i=u(`sha256`,this.secret).update(r).digest(`base64url`),a=Buffer.from(n,`base64url`),o=Buffer.from(i,`base64url`);if(a.length!==o.length||!f(a,o))return null;let s=JSON.parse(r);return s.exp&&s.exp<Math.floor(Date.now()/1e3)?null:this.getBlob(s.blobId)}catch{return null}}one(e,l,u){let d=this,f=u;return{async attach(n){if(!await d.getBlob(n))throw Error(`Blob ${n} not found`);let r=await d.createAttachment(e,l,n,f,!0);return await t.start({blobId:n}),r},async get(){return(await d.getAttachments(e,l,f))[0]?.blob??null},async attached(){return await this.get()!==null},async url(e){let t=await this.get();return t?d.getSignedUrl(t.id,e):null},async publicUrl(){let e=await this.get();return e?d.getPublicUrl(e.id):null},async metadata(){return(await this.get())?.metadata??null},async analyzed(){return(await this.metadata())?.analyzed===!0},async representable(){let e=await this.get();return e?.contentType?[`image/`,`video/`,`application/pdf`].some(t=>e.contentType?.startsWith(t)):!1},async variant(e,t=3600){let r=await this.get();if(!r)return null;let i=await d.getVariant(r.id,e);return i?d.getSignedUrl(i.id,{expiresIn:t}):(await n.start({blobId:r.id,transformations:e}),null)},async preview(e=3600,t=1){let n=await this.get();if(!n)return null;let i=await d.getVariant(n.id,{preview:!0});return i?d.getSignedUrl(i.id,{expiresIn:e}):(await r.start({blobId:n.id,timeInSeconds:t}),null)},async detach(){return await d.deleteAttachments(e,l,f)>0},async purge(){let t=await d.getAttachments(e,l,f);if(t.length===0)return!1;let n=t[0].blob;return n?(await d.deleteAttachments(e,l,f),await d.deleteBlob(n.id),!0):!1},async purgeLater(){let t=await d.getAttachments(e,l,f);return t.length===0?!1:(await i.start({attachmentIds:[t[0].id]}),!0)},async download(){let e=await this.get();return e?d.downloadBlob(e.id):null},async open(e){let t=await this.get();if(!t)return null;let n=await d.downloadBlob(t.id);if(!n)return null;let r=await o(a(p(),`attachment-`)),i=a(r,t.filename);try{return await c(i,n),await e(i)}finally{await s(r,{recursive:!0,force:!0})}},async representation(e,t=3600){let n=await this.get();return n?n.contentType?.startsWith(`image/`)?this.variant(e,t):this.preview(t):null},async byteSize(){return(await this.get())?.byteSize??null},async contentType(){return(await this.get())?.contentType??null},async filename(){return(await this.get())?.filename??null},async signedId(e=3600){let t=await this.get();return t?d.signedId(t.id,e):null}}}many(e,l,u){let d=this,f=u;return{async attach(n){let r=Array.isArray(n)?n:[n],i=await Promise.all(r.map(e=>d.getBlob(e))),a=r.filter((e,t)=>!i[t]);if(a.length>0)throw Error(`Blobs not found: ${a.join(`, `)}`);let o=await Promise.all(r.map(t=>d.createAttachment(e,l,t,f)));return await Promise.all(r.map(e=>t.start({blobId:e}))),o},async list(){return(await d.getAttachments(e,l,f)).map(e=>e.blob).filter(e=>e!==null)},async count(){return(await this.list()).length},async urls(e){let t=await this.list();return(await Promise.all(t.map(t=>d.getSignedUrl(t.id,e)))).filter(e=>e!==null)},async publicUrls(){let e=await this.list();return Promise.all(e.map(e=>d.getPublicUrl(e.id)))},async metadata(){return(await this.list()).map(e=>e.metadata??null)},async analyzed(){return(await this.metadata()).map(e=>e?.analyzed===!0)},async representable(){return(await this.list()).map(e=>e.contentType?[`image/`,`video/`,`application/pdf`].some(t=>e.contentType?.startsWith(t)):!1)},async variants(e,t=3600){let r=await this.list();return Promise.all(r.map(async r=>{let i=await d.getVariant(r.id,e);return i?d.getSignedUrl(i.id,{expiresIn:t}):(await n.start({blobId:r.id,transformations:e}),null)}))},async previews(e=3600,t=1){let n=await this.list();return Promise.all(n.map(async n=>{let i=await d.getVariant(n.id,{preview:!0});return i?d.getSignedUrl(i.id,{expiresIn:e}):(await r.start({blobId:n.id,timeInSeconds:t}),null)}))},async detach(t){if(t){let n=(await d.getAttachments(e,l,f)).find(e=>e.blob?.id===t);return n?(await d.deleteAttachment(n.id),1):0}return d.deleteAttachments(e,l,f)},async purge(t){let n=await d.getAttachments(e,l,f);if(t){let e=n.find(e=>e.blob?.id===t);return e?(await d.deleteAttachment(e.id),await d.deleteBlob(t),1):0}let r=0;for(let e of n)await d.deleteAttachment(e.id),e.blob&&(await d.deleteBlob(e.blob.id),r++);return r},async purgeLater(t){let n=await d.getAttachments(e,l,f);if(t){let e=n.find(e=>e.blob?.id===t);return e?(await i.start({attachmentIds:[e.id]}),1):0}return n.length>0&&await i.start({attachmentIds:n.map(e=>e.id)}),n.length},async download(){let e=await this.list(),t=[];for(let n of e){let e=await d.downloadBlob(n.id);e&&t.push(e)}return t},async open(e){let t=await this.list(),n=[];for(let r of t){let t=await d.downloadBlob(r.id);if(!t)continue;let i=await o(a(p(),`attachment-`)),l=a(i,r.filename);try{await c(l,t),n.push(await e(l,r))}finally{await s(i,{recursive:!0,force:!0})}}return n},async representations(e,t=3600){let i=await this.list();return Promise.all(i.map(async i=>{if(i.contentType?.startsWith(`image/`)){let r=await d.getVariant(i.id,e);return r?d.getSignedUrl(r.id,{expiresIn:t}):(await n.start({blobId:i.id,transformations:e}),null)}let a=await d.getVariant(i.id,{preview:!0});return a?d.getSignedUrl(a.id,{expiresIn:t}):(await r.start({blobId:i.id}),null)}))},async byteSizes(){return(await this.list()).map(e=>e.byteSize)},async contentTypes(){return(await this.list()).map(e=>e.contentType)},async filenames(){return(await this.list()).map(e=>e.filename)},async signedIds(e=3600){return(await this.list()).map(t=>d.signedId(t.id,e))}}}};export{S as StorageService,_ as defineS3Disk,x as defineStorage};
|
|
@@ -1 +1 @@
|
|
|
1
|
-
import{defineWorkflow as e}from"
|
|
1
|
+
import{defineWorkflow as e}from"../workflow.mjs";import t from"zod";import{join as n}from"node:path";import{ALL_FORMATS as r,BlobSource as i,Input as a}from"mediabunny";import o from"sharp";const s=e({input:t.object({blobId:t.string()}),async run({container:e,input:{blobId:t},step:s}){let c=await s(`fetch-blob`,async()=>e.storage.primary.getBlob(t));if(!c)throw Error(`Blob ${t} not found`);let l=await s(`download-blob`,async()=>e.storage.primary.downloadBlob(t));if(!l)throw Error(`Failed to download blob ${t}`);let u={};return c.contentType?.startsWith(`image/`)?u=await s(`extract-image-metadata`,async()=>{let e=await o(l).metadata();return{width:e.width,height:e.height,format:e.format,hasAlpha:e.hasAlpha,space:e.space}}):c.contentType?.startsWith(`video/`)||c.contentType?.startsWith(`audio/`)?u=await s(`extract-media-metadata`,async()=>{let e=new Uint8Array(l),t=new a({source:new i(new Blob([e],{type:c.contentType||`video/mp4`})),formats:r}),n=await t.computeDuration(),o=await t.getMetadataTags(),s={},u={},d=!1,f=!1;try{let e=await t.getPrimaryVideoTrack();if(e){d=!0;let t=e.displayWidth&&e.displayHeight?e.displayWidth/e.displayHeight:null;s={width:e.displayWidth,height:e.displayHeight,rotation:e.rotation,angle:e.rotation,displayAspectRatio:t}}}catch{}try{let e=await t.getPrimaryAudioTrack();e&&(f=!0,u={sampleRate:e.sampleRate,channels:e.numberOfChannels})}catch{}return{duration:n,video:d,audio:f,...s,...u,tags:o}}):c.contentType===`application/pdf`&&(u=await s(`extract-pdf-metadata`,async()=>{try{let e=await import(`pdfjs-dist/legacy/build/pdf.mjs`),t=`${n(process.cwd(),`node_modules/pdfjs-dist/standard_fonts`)}/`,r=await e.getDocument({data:new Uint8Array(l),standardFontDataUrl:t}).promise,i=await r.getMetadata(),a=(await r.getPage(1)).getViewport({scale:1}),o=i.info;return{pageCount:r.numPages,width:a.width,height:a.height,title:o?.Title||null,author:o?.Author||null,subject:o?.Subject||null,keywords:o?.Keywords||null,creator:o?.Creator||null,producer:o?.Producer||null,creationDate:o?.CreationDate||null,modificationDate:o?.ModDate||null,pdfVersion:o?.PDFFormatVersion||null}}catch(n){return e.logger.error({error:n,errorMessage:n instanceof Error?n.message:String(n),errorStack:n instanceof Error?n.stack:void 0,errorCode:n?.code,blobId:t},`Failed to extract PDF metadata`),{error:`Failed to extract PDF metadata`,errorMessage:n instanceof Error?n.message:String(n)}}})),await s(`save-metadata`,async()=>{await e.storage.primary.updateBlobMetadata(t,{...u,analyzed:!0})}),e.logger.info({blobId:t,metadata:u},`Metadata extracted`),{...u,analyzed:!0}}});export{s as extractBlobMetadata};
|
|
@@ -1,5 +1,5 @@
|
|
|
1
|
-
import "
|
|
2
|
-
import "
|
|
1
|
+
import "../workflow.mjs";
|
|
2
|
+
import "../index.mjs";
|
|
3
3
|
import { z } from "zod";
|
|
4
4
|
|
|
5
5
|
//#region src/api/workflows/generate-image-variant.d.ts
|
|
@@ -60,4 +60,4 @@ declare const transformationsSchema: z.ZodObject<{
|
|
|
60
60
|
*/
|
|
61
61
|
type ImageTransformations = z.infer<typeof transformationsSchema>;
|
|
62
62
|
//#endregion
|
|
63
|
-
export { ImageTransformations
|
|
63
|
+
export { ImageTransformations };
|
|
@@ -1 +1 @@
|
|
|
1
|
-
import{defineWorkflow as e}from"
|
|
1
|
+
import{defineWorkflow as e}from"../workflow.mjs";import{z as t}from"zod";import n from"sharp";const r=t.object({width:t.number().optional(),height:t.number().optional(),fit:t.enum([`cover`,`contain`,`fill`,`inside`,`outside`]).optional(),position:t.enum([`top`,`right top`,`right`,`right bottom`,`bottom`,`left bottom`,`left`,`left top`,`centre`]).optional(),kernel:t.enum([`nearest`,`linear`,`cubic`,`mitchell`,`lanczos2`,`lanczos3`]).optional()}),i=t.object({resize:r.optional(),rotate:t.number().optional(),flip:t.boolean().optional(),flop:t.boolean().optional(),sharpen:t.boolean().optional(),blur:t.number().optional(),grayscale:t.boolean().optional(),format:t.enum([`jpeg`,`png`,`webp`,`avif`,`gif`]).optional(),quality:t.number().min(1).max(100).optional(),preview:t.literal(!0).optional()}),a=e({input:t.object({blobId:t.string(),transformations:i}),async run({container:e,input:{blobId:t,transformations:r},step:i}){if(!await i(`fetch-blob`,async()=>e.storage.primary.getBlob(t)))throw Error(`Blob ${t} not found`);let a=await i(`download-blob`,async()=>e.storage.primary.downloadBlob(t));if(!a)throw Error(`Failed to download blob ${t}`);let o=await i(`apply-transformations`,async()=>{let e=n(a);return r.resize&&(e=e.resize({width:r.resize.width,height:r.resize.height,fit:r.resize.fit,position:r.resize.position,kernel:r.resize.kernel})),r.rotate!==void 0&&(e=e.rotate(r.rotate)),r.flip&&(e=e.flip()),r.flop&&(e=e.flop()),r.sharpen&&(e=e.sharpen()),r.blur!==void 0&&(e=e.blur(r.blur)),r.grayscale&&(e=e.grayscale()),r.format&&(e=e.toFormat(r.format,{quality:r.quality})),e.toBuffer()}),s=await i(`store-variant`,async()=>e.storage.primary.createVariant(t,r,o));return e.logger.info({blobId:t,variantId:s.id},`Image variant generated`),s}});export{a as generateImageVariant};
|
|
@@ -1 +1 @@
|
|
|
1
|
-
import{defineWorkflow as e}from"
|
|
1
|
+
import{defineWorkflow as e}from"../workflow.mjs";import t from"zod";import{join as n}from"node:path";import r from"sharp";import{spawn as i}from"node:child_process";import{createCanvas as a}from"canvas";const o=e({input:t.object({blobId:t.string(),timeInSeconds:t.number().optional()}),async run({container:e,input:{blobId:t,timeInSeconds:o=1},step:s}){let c=await s(`fetch-blob`,async()=>e.storage.primary.getBlob(t));if(!c)throw Error(`Blob ${t} not found`);let l=await s(`download-blob`,async()=>e.storage.primary.downloadBlob(t));if(!l)throw Error(`Failed to download blob ${t}`);let u=null;if(c.contentType?.startsWith(`video/`))u=await s(`generate-video-preview`,async()=>new Promise((n,a)=>{try{let s=i(`ffmpeg`,[`-i`,`pipe:0`,`-ss`,o.toString(),`-frames:v`,`1`,`-f`,`image2pipe`,`-c:v`,`png`,`pipe:1`]),c=[],u=[];s.stdout.on(`data`,e=>{c.push(e)}),s.stderr.on(`data`,e=>{u.push(e)}),s.on(`close`,async i=>{if(i===0)try{n(await r(Buffer.concat(c)).jpeg({quality:80}).toBuffer())}catch(n){e.logger.error({error:n,blobId:t},`Failed to convert video frame to JPEG`),a(n)}else{let n=Buffer.concat(u).toString(),r=Error(`FFmpeg exited with code ${i}: ${n}`);e.logger.error({error:r,blobId:t,code:i,stderr:n},`Failed to generate video preview`),a(r)}}),s.on(`error`,n=>{e.logger.error({error:n,blobId:t},`Failed to spawn FFmpeg process`),a(n)}),s.stdin.on(`error`,n=>{n.code!==`EPIPE`&&e.logger.error({error:n,blobId:t},`Failed to write to FFmpeg stdin`)}),s.stdin.write(l),s.stdin.end()}catch(n){e.logger.error({error:n,blobId:t},`Failed to generate video preview`),a(n)}}));else if(c.contentType===`application/pdf`)u=await s(`generate-pdf-preview`,async()=>{try{let e=await import(`pdfjs-dist/legacy/build/pdf.mjs`),t=`${n(process.cwd(),`node_modules/pdfjs-dist/standard_fonts`)}/`,i=await(await e.getDocument({data:new Uint8Array(l),standardFontDataUrl:t}).promise).getPage(1),o=i.getViewport({scale:2}),s=a(o.width,o.height),c=s.getContext(`2d`);return await i.render({canvasContext:c,viewport:o,canvas:s}).promise,await r(s.toBuffer(`image/png`)).resize(800,800,{fit:`inside`,withoutEnlargement:!0}).jpeg({quality:85}).toBuffer()}catch(n){throw e.logger.error({error:n,errorMessage:n instanceof Error?n.message:String(n),errorStack:n instanceof Error?n.stack:void 0,errorCode:n?.code,blobId:t},`Failed to generate PDF preview`),n}});else if(c.contentType?.startsWith(`image/`))u=await s(`generate-image-preview`,async()=>await r(l).resize(800,800,{fit:`inside`,withoutEnlargement:!0}).jpeg({quality:85}).toBuffer());else throw Error(`Preview generation not supported for content type: ${c.contentType}`);let d=await s(`store-preview`,async()=>await e.storage.primary.createVariant(t,{preview:!0},u));return e.logger.info({blobId:t,previewId:d.id,contentType:c.contentType},`Preview generated`),d}});export{o as generatePreview};
|
|
@@ -1 +1 @@
|
|
|
1
|
-
import{defineWorkflow as e}from"
|
|
1
|
+
import{defineWorkflow as e}from"../workflow.mjs";import t from"zod";const n=e({input:t.object({attachmentIds:t.array(t.string()).min(1)}),async run({container:e,input:{attachmentIds:t},step:n}){let r=await n(`fetch-attachments`,async()=>(await e.storage.primary.getAttachmentsByIds(t)).filter(e=>e.blob!==null).map(e=>({attachmentId:e.id,blobId:e.blob.id})));return await n(`delete-attachments`,async()=>{for(let{attachmentId:t}of r)await e.storage.primary.deleteAttachment(t)}),await n(`delete-blobs`,async()=>{for(let{blobId:t}of r)await e.storage.primary.deleteBlob(t)}),e.logger.info({attachmentIds:t,blobCount:r.length},`Attachments and blobs purged`),{purgedCount:r.length}}});export{n as purgeAttachment};
|
|
@@ -1 +1 @@
|
|
|
1
|
-
import{defineAuthSchema as e}from"
|
|
1
|
+
import{defineAuthSchema as e}from"../auth-schema.mjs";import{defineScheduledWorkflow as t}from"../workflow.mjs";import{lt as n}from"drizzle-orm";const r=e();function i(e=`0 0 * * *`){return t({crontab:e,async run({container:e,step:t,scheduledTime:i}){let a=e.auth.auditLog?.retentionDays??90,o=new Date(i);o.setDate(o.getDate()-a);let s=o.toISOString(),c=await t(`delete-old-logs`,async()=>{let{auditLogs:t}=r.tables;return(await e.db.primary.delete(t).where(n(t.createdAt,s)).returning({id:t.id})).length});e.logger.info({deletedCount:c,retentionDays:a,cutoffDate:s},`Audit log purge completed`)}})}export{i as definePurgeAuditLogs};
|
|
@@ -1 +1 @@
|
|
|
1
|
-
import{defineScheduledWorkflow as e}from"
|
|
1
|
+
import{defineScheduledWorkflow as e}from"../workflow.mjs";function t(t=`0 0 * * *`){return e({crontab:t,async run({container:e,step:t}){let n=new Date(Date.now()-2880*60*1e3).toISOString(),r=await t(`fetch-unattached-blobs`,async()=>e.storage.primary.getUnattachedBlobs({olderThan:n})),i=await t(`fetch-pending-blobs`,async()=>e.storage.primary.getPendingBlobs(n)),a=[...r,...i],o=0;for(let n of a)await t(`delete-blob`,async()=>{await e.storage.primary.deleteBlob(n.id),o++});e.logger.info({purgedCount:o,unattachedCount:r.length,pendingCount:i.length},`Orphaned blobs purged`)}})}export{t as definePurgeUnattachedBlobs};
|
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
import { StorageBlob, StorageRelationsConfig, StorageTables } from "./storage-schema.mjs";
|
|
2
|
-
import { ImageTransformations
|
|
2
|
+
import { ImageTransformations } from "./generate-image-variant.mjs";
|
|
3
3
|
import { NodePgDatabase } from "drizzle-orm/node-postgres";
|
|
4
4
|
import { Pool } from "pg";
|
|
5
5
|
import { DriveManager } from "flydrive";
|
|
@@ -1 +1 @@
|
|
|
1
|
-
import{defineAuthSchema as e}from"
|
|
1
|
+
import{defineAuthSchema as e}from"../auth-schema.mjs";import{dbChangesEvent as t}from"../event.mjs";import{defineWorkflow as n}from"../workflow.mjs";import{z as r}from"zod";const i=e(),a=r.object({changes:r.array(r.object({_table:r.string(),old:r.record(r.string(),r.unknown()).nullable(),new:r.record(r.string(),r.unknown()).nullable()})),dbName:r.string(),organizationId:r.string().nullable(),requestId:r.string(),sessionId:r.string().nullable(),userId:r.string().nullable()});function o(e){return e.old===null?`INSERT`:e.new===null?`DELETE`:`UPDATE`}const s=n({input:a,async run({container:e,step:n,input:r}){let{dbName:a,changes:s,organizationId:c,userId:l,sessionId:u,requestId:d}=r;if(s.length===0)return{processed:0,audited:0,published:0};let f=new Date().toISOString(),p=0,m=0;for(let r of s){let s=r._table,h=o(r),g=`${a}.${s}`;e.auth.shouldAudit(g)&&(await n(`audit:${g}`,async()=>{await e.db.primary.insert(i.tables.auditLogs).values({tableName:g,action:h,oldData:r.old,newData:r.new,organizationId:c,userId:l,sessionId:u,requestId:d,createdAt:f})}),p++),await n(`event:${g}`,async()=>{await t.emit({action:h,oldData:r.old,newData:r.new,organizationId:c,tableName:g,timestamp:f,userId:l})}),m++}return{processed:s.length,audited:p,published:m}}});export{s as trackDbChanges};
|
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
import { StorageBlob, StorageRelationsConfig, StorageTables } from "./storage-schema.mjs";
|
|
2
|
-
import { ImageTransformations
|
|
2
|
+
import { ImageTransformations } from "./workflows/generate-image-variant.mjs";
|
|
3
3
|
import { NodePgDatabase } from "drizzle-orm/node-postgres";
|
|
4
4
|
import { Pool } from "pg";
|
|
5
5
|
import { DriveManager } from "flydrive";
|
|
@@ -7,7 +7,6 @@ import { DbChangeInput, DefineEventBusOptions, Event, EventBus, EventContext, db
|
|
|
7
7
|
import { DefineMailerOptions, Mailer, MailerPayload, MailerPayloadHtml, MailerPayloadReact, defineMailer } from "./mailer.mjs";
|
|
8
8
|
import { NewStorageAttachment, NewStorageBlob, NewStorageVariantRecord, StorageAttachment, StorageBlob, StorageRelations, StorageRelationsConfig, StorageTables, StorageVariantRecord, defineStorageSchema } from "./storage-schema.mjs";
|
|
9
9
|
import { ScheduledWorkflowContext, WorkflowContext, WorkflowHandle, defineScheduledWorkflow, defineWorkflow, loadWorkflows } from "./workflow.mjs";
|
|
10
|
-
import { ImageTransformations, ResizeOptions, resizeSchema, transformationsSchema } from "./workflows/generate-image-variant.mjs";
|
|
11
10
|
import { DatabaseWithStorage, DefineS3DiskOptions, DefineStorageOptions, Storage, StorageService, defineS3Disk, defineStorage } from "./storage.mjs";
|
|
12
11
|
import { AppContainer, Container, ServerConfig, WorkerConfig, defineAppContainer } from "./container.mjs";
|
|
13
12
|
import { AppContext, DefineAppContextOpts, SessionData, defineAppContext } from "./app-context.mjs";
|
|
@@ -21,7 +20,7 @@ import { CustomTypeOptions } from "i18next";
|
|
|
21
20
|
|
|
22
21
|
//#region src/api/index.d.ts
|
|
23
22
|
declare namespace index_d_exports {
|
|
24
|
-
export { AccessControlRoles, AccessController, AppContainer, AppContext, AuditAction, Auth, AuthConfig, AuthPasskeyConfig, AuthSessionConfig, Cache, Config, Container, CustomTypeOptions, Database, DatabaseWithStorage, DbChangeInput, DefineAppContextOpts, DefineAuthOptions, DefineCacheOptions, DefineDatabaseOptions, DefineEventBusOptions, DefineLoggerOptions, DefineMailerOptions, DefineOpenAPIConfig, DefineOpenAPIConfigInput, DefineOpenAPIReturn, DefineRedisClientOptions, DefineS3DiskOptions, DefineStorageOptions, DefineTestDatabaseOptions, Event, EventBus, EventContext, HandlerParams, I18nInitOptions,
|
|
23
|
+
export { AccessControlRoles, AccessController, AppContainer, AppContext, AuditAction, Auth, AuthConfig, AuthPasskeyConfig, AuthSessionConfig, Cache, Config, Container, CustomTypeOptions, Database, DatabaseWithStorage, DbChangeInput, DefineAppContextOpts, DefineAuthOptions, DefineCacheOptions, DefineDatabaseOptions, DefineEventBusOptions, DefineLoggerOptions, DefineMailerOptions, DefineOpenAPIConfig, DefineOpenAPIConfigInput, DefineOpenAPIReturn, DefineRedisClientOptions, DefineS3DiskOptions, DefineStorageOptions, DefineTestDatabaseOptions, Event, EventBus, EventContext, HandlerParams, I18nInitOptions, Logger, Mailer, MailerPayload, MailerPayloadHtml, MailerPayloadReact, Middleware, MigrationType, NewStorageAttachment, NewStorageBlob, NewStorageVariantRecord, OpenAPIMethodSpec, OpenAPIObjectConfigV31, OpenAPIRegistration, QualifiedTableNames, RedisClient, Role, RouteModule, ScheduledWorkflowContext, ServerConfig, SessionData, Storage, StorageAttachment, StorageBlob, StorageRelations, StorageRelationsConfig, StorageService, StorageTables, StorageVariantRecord, ValidationErrorResponse, WorkerConfig, WorkflowContext, WorkflowHandle, appDir, auditActionSchema, baseSchema, createAccessControl, dbChangeInputSchema, dbChanges, dbChangesEvent, defaultI18nOptions, defineAppContainer, defineAppContext, defineAuth, defineAuthSchema, defineCache, defineConfig, defineDatabase, defineEvent, defineEventBus, defineI18n, defineLogger, defineMailer, defineMiddleware, defineMigrationOpts, defineOpenAPI, defineOpenAPIConfig, defineOpenAPIEndpoint, defineRedisClient, defineS3Disk, defineScheduledWorkflow, defineStorage, defineStorageSchema, defineTestDatabase, defineTypedResponses, defineWorkflow, generateOpenAPIDocument, i18n$1 as i18n, loadAndRegisterAPIRoutes, loadEvents, loadMiddleware, loadWorkflows, migrationsSchema, registerRoutes, scanAPIRoutes, withOtelSpan, writeOpenAPISpecs };
|
|
25
24
|
}
|
|
26
25
|
//#endregion
|
|
27
26
|
export { index_d_exports };
|
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
import { StorageBlob, StorageRelationsConfig, StorageTables } from "./storage-schema.mjs";
|
|
2
|
-
import { ImageTransformations
|
|
2
|
+
import { ImageTransformations } from "./workflows/generate-image-variant.mjs";
|
|
3
3
|
import { NodePgDatabase } from "drizzle-orm/node-postgres";
|
|
4
4
|
import { Pool } from "pg";
|
|
5
5
|
import { DriveManager } from "flydrive";
|
|
@@ -239,14 +239,14 @@ declare class StorageService<TDiskNames extends string = string, TTableNames ext
|
|
|
239
239
|
blobId: string;
|
|
240
240
|
blob: {
|
|
241
241
|
id: string;
|
|
242
|
+
createdAt: string;
|
|
242
243
|
key: string;
|
|
244
|
+
metadata: unknown;
|
|
243
245
|
filename: string;
|
|
244
246
|
contentType: string | null;
|
|
245
|
-
metadata: unknown;
|
|
246
247
|
serviceName: string;
|
|
247
248
|
byteSize: number;
|
|
248
249
|
checksum: string | null;
|
|
249
|
-
createdAt: string;
|
|
250
250
|
} | null;
|
|
251
251
|
}[]>;
|
|
252
252
|
/**
|
|
@@ -263,14 +263,14 @@ declare class StorageService<TDiskNames extends string = string, TTableNames ext
|
|
|
263
263
|
blobId: string;
|
|
264
264
|
blob: {
|
|
265
265
|
id: string;
|
|
266
|
+
createdAt: string;
|
|
266
267
|
key: string;
|
|
268
|
+
metadata: unknown;
|
|
267
269
|
filename: string;
|
|
268
270
|
contentType: string | null;
|
|
269
|
-
metadata: unknown;
|
|
270
271
|
serviceName: string;
|
|
271
272
|
byteSize: number;
|
|
272
273
|
checksum: string | null;
|
|
273
|
-
createdAt: string;
|
|
274
274
|
} | null;
|
|
275
275
|
}[]>;
|
|
276
276
|
/**
|
|
@@ -3,39 +3,7 @@ import "../index.mjs";
|
|
|
3
3
|
import { z } from "zod";
|
|
4
4
|
|
|
5
5
|
//#region src/api/workflows/generate-image-variant.d.ts
|
|
6
|
-
|
|
7
|
-
* Resize options schema for image transformations.
|
|
8
|
-
*/
|
|
9
|
-
declare const resizeSchema: z.ZodObject<{
|
|
10
|
-
width: z.ZodOptional<z.ZodNumber>;
|
|
11
|
-
height: z.ZodOptional<z.ZodNumber>;
|
|
12
|
-
fit: z.ZodOptional<z.ZodEnum<{
|
|
13
|
-
fill: "fill";
|
|
14
|
-
cover: "cover";
|
|
15
|
-
contain: "contain";
|
|
16
|
-
inside: "inside";
|
|
17
|
-
outside: "outside";
|
|
18
|
-
}>>;
|
|
19
|
-
position: z.ZodOptional<z.ZodEnum<{
|
|
20
|
-
left: "left";
|
|
21
|
-
right: "right";
|
|
22
|
-
top: "top";
|
|
23
|
-
"right top": "right top";
|
|
24
|
-
"right bottom": "right bottom";
|
|
25
|
-
bottom: "bottom";
|
|
26
|
-
"left bottom": "left bottom";
|
|
27
|
-
"left top": "left top";
|
|
28
|
-
centre: "centre";
|
|
29
|
-
}>>;
|
|
30
|
-
kernel: z.ZodOptional<z.ZodEnum<{
|
|
31
|
-
nearest: "nearest";
|
|
32
|
-
linear: "linear";
|
|
33
|
-
cubic: "cubic";
|
|
34
|
-
mitchell: "mitchell";
|
|
35
|
-
lanczos2: "lanczos2";
|
|
36
|
-
lanczos3: "lanczos3";
|
|
37
|
-
}>>;
|
|
38
|
-
}, z.core.$strip>;
|
|
6
|
+
|
|
39
7
|
/**
|
|
40
8
|
* Image transformations schema.
|
|
41
9
|
* Supports resize, rotate, flip, flop, sharpen, blur, grayscale, format conversion.
|
|
@@ -91,9 +59,5 @@ declare const transformationsSchema: z.ZodObject<{
|
|
|
91
59
|
* Types for image transformations.
|
|
92
60
|
*/
|
|
93
61
|
type ImageTransformations = z.infer<typeof transformationsSchema>;
|
|
94
|
-
/**
|
|
95
|
-
* Types for resize options.
|
|
96
|
-
*/
|
|
97
|
-
type ResizeOptions = z.infer<typeof resizeSchema>;
|
|
98
62
|
//#endregion
|
|
99
|
-
export { ImageTransformations
|
|
63
|
+
export { ImageTransformations };
|