appos 0.2.3-0 → 0.3.0-0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/bin/event-Su-wo96Y.mjs +2 -0
- package/dist/bin/extract-blob-metadata-CPnQ5z-I.mjs +2 -0
- package/dist/bin/generate-image-variant-0jPXp8Gu.mjs +2 -0
- package/dist/bin/generate-preview-QDAk8NkM.mjs +2 -0
- package/dist/bin/main.mjs +13 -37
- package/dist/bin/{purge-attachment-CMlJMNOk.mjs → purge-attachment-BNMcVoEh.mjs} +1 -1
- package/dist/bin/{purge-audit-logs-hd6q6vnR.mjs → purge-audit-logs-C6bh2l3g.mjs} +1 -1
- package/dist/bin/{purge-unattached-blobs-BYv5b9R9.mjs → purge-unattached-blobs-CmLk3lNK.mjs} +1 -1
- package/dist/bin/{track-db-changes-q0Vl7Htm.mjs → track-db-changes-BUCo-YXD.mjs} +1 -1
- package/dist/bin/workflow-CsbetKWL.mjs +2 -0
- package/dist/exports/api/auth.mjs +1 -1
- package/dist/exports/api/constants.mjs +1 -0
- package/dist/exports/api/database.mjs +1 -1
- package/dist/exports/api/event.mjs +1 -1
- package/dist/exports/api/i18n.mjs +1 -1
- package/dist/exports/api/index.d.mts +2 -3
- package/dist/exports/api/index.mjs +1 -1
- package/dist/exports/api/instrumentation.mjs +1 -0
- package/dist/exports/api/middleware.mjs +1 -1
- package/dist/exports/api/openapi.d.mts +2 -2
- package/dist/exports/api/openapi.mjs +1 -1
- package/dist/exports/api/otel.d.mts +1 -1
- package/dist/exports/api/otel.mjs +1 -1
- package/dist/exports/api/storage.d.mts +1 -1
- package/dist/exports/api/storage.mjs +1 -1
- package/dist/exports/api/workflow.mjs +1 -1
- package/dist/exports/api/workflows/api/event.mjs +1 -0
- package/dist/exports/api/workflows/{orm.d.mts → api/orm.d.mts} +1 -1
- package/dist/exports/api/workflows/{storage.d.mts → api/storage.d.mts} +1 -1
- package/dist/exports/api/workflows/api/workflow.mjs +1 -0
- package/dist/exports/api/workflows/constants.mjs +1 -0
- package/dist/exports/api/workflows/extract-blob-metadata.mjs +1 -1
- package/dist/exports/api/workflows/generate-image-variant.d.mts +3 -3
- package/dist/exports/api/workflows/generate-image-variant.mjs +1 -1
- package/dist/exports/api/workflows/generate-preview.mjs +1 -1
- package/dist/exports/api/workflows/purge-attachment.mjs +1 -1
- package/dist/exports/api/workflows/purge-audit-logs.mjs +1 -1
- package/dist/exports/api/workflows/purge-unattached-blobs.mjs +1 -1
- package/dist/exports/api/workflows/track-db-changes.d.mts +2 -2
- package/dist/exports/api/workflows/track-db-changes.mjs +1 -1
- package/dist/exports/cli/api/event.mjs +1 -1
- package/dist/exports/cli/api/storage.d.mts +1 -1
- package/dist/exports/cli/api/workflow.mjs +1 -1
- package/dist/exports/cli/api/workflows/extract-blob-metadata.mjs +1 -1
- package/dist/exports/cli/api/workflows/generate-image-variant.d.mts +1 -1
- package/dist/exports/cli/api/workflows/generate-image-variant.mjs +1 -1
- package/dist/exports/cli/api/workflows/generate-preview.mjs +1 -1
- package/dist/exports/cli/constants.mjs +1 -0
- package/dist/exports/tests/api/app-context.mjs +1 -0
- package/dist/exports/tests/api/event.mjs +1 -0
- package/dist/exports/tests/{packages/appos/src/api → api}/index.d.mts +2 -3
- package/dist/exports/tests/api/middleware.mjs +1 -0
- package/dist/exports/tests/{packages/appos/src/api → api}/openapi.d.mts +2 -2
- package/dist/exports/tests/api/openapi.mjs +1 -0
- package/dist/exports/tests/{packages/appos/src/api → api}/orm.d.mts +1 -1
- package/dist/exports/tests/api/server.mjs +1 -0
- package/dist/exports/tests/{packages/appos/src/api → api}/storage.d.mts +5 -5
- package/dist/exports/tests/api/workflow.mjs +1 -0
- package/dist/exports/tests/api/workflows/extract-blob-metadata.mjs +1 -0
- package/dist/exports/tests/{packages/appos/src/api → api}/workflows/generate-image-variant.d.mts +2 -38
- package/dist/exports/tests/api/workflows/generate-image-variant.mjs +1 -0
- package/dist/exports/tests/api/workflows/generate-preview.mjs +1 -0
- package/dist/exports/tests/api.d.mts +3 -3
- package/dist/exports/tests/api.mjs +1 -1
- package/dist/exports/tests/constants.mjs +1 -0
- package/dist/exports/tests/instrumentation.mjs +1 -0
- package/package.json +8 -10
- package/dist/bin/event-v2sCJkNd.mjs +0 -2
- package/dist/bin/extract-blob-metadata-TqNd9w-6.mjs +0 -2
- package/dist/bin/generate-image-variant-D8H9FxgD.mjs +0 -2
- package/dist/bin/generate-preview-5jLZLX6I.mjs +0 -2
- package/dist/bin/workflow-BagSlsMp.mjs +0 -2
- package/dist/exports/api/node_modules/.bun/change-case@5.4.4/node_modules/change-case/dist/index.mjs +0 -1
- package/dist/exports/api/packages/appos/src/constants.mjs +0 -1
- package/dist/exports/api/packages/appos/src/instrumentation.mjs +0 -1
- package/dist/exports/api/workflows/event.mjs +0 -1
- package/dist/exports/api/workflows/workflow.mjs +0 -1
- package/dist/exports/tests/node_modules/.bun/change-case@5.4.4/node_modules/change-case/dist/index.mjs +0 -1
- package/dist/exports/tests/node_modules/.bun/rate-limit-redis@4.3.1_f1fa5524233c9c60/node_modules/rate-limit-redis/dist/index.mjs +0 -25
- package/dist/exports/tests/packages/appos/src/api/event.mjs +0 -1
- package/dist/exports/tests/packages/appos/src/api/middleware.mjs +0 -1
- package/dist/exports/tests/packages/appos/src/api/server.mjs +0 -1
- package/dist/exports/tests/packages/appos/src/api/workflow.mjs +0 -1
- package/dist/exports/tests/packages/appos/src/api/workflows/extract-blob-metadata.mjs +0 -1
- package/dist/exports/tests/packages/appos/src/api/workflows/generate-image-variant.mjs +0 -1
- package/dist/exports/tests/packages/appos/src/api/workflows/generate-preview.mjs +0 -1
- package/dist/exports/tests/packages/appos/src/constants.mjs +0 -1
- package/dist/exports/tests/packages/appos/src/instrumentation.mjs +0 -1
- /package/dist/exports/api/{packages/appos/src/instrumentation.d.mts → instrumentation.d.mts} +0 -0
- /package/dist/exports/api/{packages/appos/src/web → web}/auth.mjs +0 -0
- /package/dist/exports/api/workflows/{auth-schema.mjs → api/auth-schema.mjs} +0 -0
- /package/dist/exports/api/workflows/{auth.d.mts → api/auth.d.mts} +0 -0
- /package/dist/exports/api/workflows/{cache.d.mts → api/cache.d.mts} +0 -0
- /package/dist/exports/api/workflows/{config.d.mts → api/config.d.mts} +0 -0
- /package/dist/exports/api/workflows/{container.d.mts → api/container.d.mts} +0 -0
- /package/dist/exports/api/workflows/{database.d.mts → api/database.d.mts} +0 -0
- /package/dist/exports/api/workflows/{event.d.mts → api/event.d.mts} +0 -0
- /package/dist/exports/api/workflows/{logger.d.mts → api/logger.d.mts} +0 -0
- /package/dist/exports/api/workflows/{mailer.d.mts → api/mailer.d.mts} +0 -0
- /package/dist/exports/api/workflows/{redis.mjs → api/redis.mjs} +0 -0
- /package/dist/exports/api/workflows/{storage-schema.d.mts → api/storage-schema.d.mts} +0 -0
- /package/dist/exports/api/workflows/{workflow.d.mts → api/workflow.d.mts} +0 -0
- /package/dist/exports/tests/{packages/appos/src/api → api}/app-context.d.mts +0 -0
- /package/dist/exports/tests/{packages/appos/src/api → api}/auth-schema.d.mts +0 -0
- /package/dist/exports/tests/{packages/appos/src/api → api}/auth-schema.mjs +0 -0
- /package/dist/exports/tests/{packages/appos/src/api → api}/auth.d.mts +0 -0
- /package/dist/exports/tests/{packages/appos/src/api → api}/cache.d.mts +0 -0
- /package/dist/exports/tests/{packages/appos/src/api → api}/config.d.mts +0 -0
- /package/dist/exports/tests/{packages/appos/src/api → api}/container.d.mts +0 -0
- /package/dist/exports/tests/{packages/appos/src/api → api}/database.d.mts +0 -0
- /package/dist/exports/tests/{packages/appos/src/api → api}/database.mjs +0 -0
- /package/dist/exports/tests/{packages/appos/src/api → api}/event.d.mts +0 -0
- /package/dist/exports/tests/{packages/appos/src/api → api}/i18n.d.mts +0 -0
- /package/dist/exports/tests/{packages/appos/src/api → api}/logger.d.mts +0 -0
- /package/dist/exports/tests/{packages/appos/src/api → api}/mailer.d.mts +0 -0
- /package/dist/exports/tests/{packages/appos/src/api → api}/middleware/error-handler.mjs +0 -0
- /package/dist/exports/tests/{packages/appos/src/api → api}/middleware/health.mjs +0 -0
- /package/dist/exports/tests/{packages/appos/src/api → api}/middleware/i18n.mjs +0 -0
- /package/dist/exports/tests/{packages/appos/src/api → api}/middleware/request-logger.d.mts +0 -0
- /package/dist/exports/tests/{packages/appos/src/api → api}/middleware/request-logger.mjs +0 -0
- /package/dist/exports/tests/{packages/appos/src/api → api}/middleware/shutdown.mjs +0 -0
- /package/dist/exports/tests/{packages/appos/src/api → api}/middleware/timeout.mjs +0 -0
- /package/dist/exports/tests/{packages/appos/src/api → api}/middleware/youch-handler.mjs +0 -0
- /package/dist/exports/tests/{packages/appos/src/api → api}/middleware.d.mts +0 -0
- /package/dist/exports/tests/{packages/appos/src/api → api}/otel.d.mts +0 -0
- /package/dist/exports/tests/{packages/appos/src/api → api}/redis.d.mts +0 -0
- /package/dist/exports/tests/{packages/appos/src/api → api}/redis.mjs +0 -0
- /package/dist/exports/tests/{packages/appos/src/api → api}/storage-schema.d.mts +0 -0
- /package/dist/exports/tests/{packages/appos/src/api → api}/workflow.d.mts +0 -0
- /package/dist/exports/tests/{packages/appos/src/api → api}/workflows/purge-attachment.mjs +0 -0
- /package/dist/exports/tests/{packages/appos/src/api → api}/workflows/purge-audit-logs.mjs +0 -0
- /package/dist/exports/tests/{packages/appos/src/api → api}/workflows/purge-unattached-blobs.mjs +0 -0
- /package/dist/exports/tests/{packages/appos/src/api → api}/workflows/track-db-changes.mjs +0 -0
- /package/dist/exports/tests/{packages/appos/src/instrumentation.d.mts → instrumentation.d.mts} +0 -0
|
@@ -1 +1 @@
|
|
|
1
|
-
import{defineWorkflow as e}from"../workflow.mjs";import{join as t}from"node:path";import n from"
|
|
1
|
+
import{defineWorkflow as e}from"../workflow.mjs";import{join as t}from"node:path";import n from"zod";import{spawn as r}from"node:child_process";const i=e({input:n.object({blobId:n.string(),timeInSeconds:n.number().optional()}),async run({container:e,input:{blobId:n,timeInSeconds:i=1},step:a}){let o=await a(`fetch-blob`,async()=>e.storage.primary.getBlob(n));if(!o)throw Error(`Blob ${n} not found`);let s=await a(`download-blob`,async()=>e.storage.primary.downloadBlob(n));if(!s)throw Error(`Failed to download blob ${n}`);let c=null;if(o.contentType?.startsWith(`video/`))c=await a(`generate-video-preview`,async()=>new Promise((t,a)=>{try{let o=r(`ffmpeg`,[`-i`,`pipe:0`,`-ss`,i.toString(),`-frames:v`,`1`,`-f`,`image2pipe`,`-c:v`,`png`,`pipe:1`]),c=[],l=[];o.stdout.on(`data`,e=>{c.push(e)}),o.stderr.on(`data`,e=>{l.push(e)}),o.on(`close`,async r=>{if(r===0)try{let e=Buffer.concat(c),n=(await import(`sharp`)).default;t(await n(e).jpeg({quality:80}).toBuffer())}catch(t){e.logger.error({error:t,blobId:n},`Failed to convert video frame to JPEG`),a(t)}else{let t=Buffer.concat(l).toString(),i=Error(`FFmpeg exited with code ${r}: ${t}`);e.logger.error({error:i,blobId:n,code:r,stderr:t},`Failed to generate video preview`),a(i)}}),o.on(`error`,t=>{e.logger.error({error:t,blobId:n},`Failed to spawn FFmpeg process`),a(t)}),o.stdin.on(`error`,t=>{t.code!==`EPIPE`&&e.logger.error({error:t,blobId:n},`Failed to write to FFmpeg stdin`)}),o.stdin.write(s),o.stdin.end()}catch(t){e.logger.error({error:t,blobId:n},`Failed to generate video preview`),a(t)}}));else if(o.contentType===`application/pdf`)c=await a(`generate-pdf-preview`,async()=>{try{let e=await import(`pdfjs-dist/legacy/build/pdf.mjs`),{createCanvas:n}=await import(`canvas`),r=(await import(`sharp`)).default,i=`${t(process.cwd(),`node_modules/pdfjs-dist/standard_fonts`)}/`,a=await(await e.getDocument({data:new Uint8Array(s),standardFontDataUrl:i}).promise).getPage(1),o=a.getViewport({scale:2}),c=n(o.width,o.height),l=c.getContext(`2d`);return await a.render({canvasContext:l,viewport:o,canvas:c}).promise,await r(c.toBuffer(`image/png`)).resize(800,800,{fit:`inside`,withoutEnlargement:!0}).jpeg({quality:85}).toBuffer()}catch(t){throw e.logger.error({error:t,errorMessage:t instanceof Error?t.message:String(t),errorStack:t instanceof Error?t.stack:void 0,errorCode:t?.code,blobId:n},`Failed to generate PDF preview`),t}});else if(o.contentType?.startsWith(`image/`))c=await a(`generate-image-preview`,async()=>{let e=(await import(`sharp`)).default;return await e(s).resize(800,800,{fit:`inside`,withoutEnlargement:!0}).jpeg({quality:85}).toBuffer()});else throw Error(`Preview generation not supported for content type: ${o.contentType}`);let l=await a(`store-preview`,async()=>await e.storage.primary.createVariant(n,{preview:!0},c));return e.logger.info({blobId:n,previewId:l.id,contentType:o.contentType},`Preview generated`),l}});export{i as generatePreview};
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
process.env.NODE_ENV;export{};
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
import"better-auth/node";export{};
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
import"../constants.mjs";import"./redis.mjs";import{glob as e}from"node:fs/promises";import{basename as t,join as n}from"node:path";import{camelCase as r}from"es-toolkit";import{z as i}from"zod";function a(e){let t=null,n=null;return{inputSchema:e.input,get name(){return n},register(e,r){t=e,n=r},async emit(r){if(!t||!n)throw Error(`Event not registered. Ensure the worker is started before emitting events.`);let i=e.input.parse(r),a={container:t,input:i};await e.run(a),t.eventBus.publish(n,i).catch(e=>{t.logger.error({err:e,event:n},`Redis publish failed`)})},async subscribe(r){if(!t||!n)throw Error(`Event not registered. Ensure the worker is started before subscribing.`);return t.eventBus.subscribe(n,async i=>{let a=e.input.parse(i),o={container:t,input:a};try{await r(o)}catch(e){t.logger.error({err:e,event:n},`Event subscription handler error`)}})}}}const o=a({input:i.object({action:i.enum([`INSERT`,`UPDATE`,`DELETE`]),newData:i.record(i.string(),i.unknown()).nullable(),oldData:i.record(i.string(),i.unknown()).nullable(),organizationId:i.string().nullable(),tableName:i.string(),timestamp:i.string(),userId:i.string().nullable()}),async run(){}});export{o as dbChangesEvent};
|
|
@@ -7,21 +7,20 @@ import { DbChangeInput, DefineEventBusOptions, Event, EventBus, EventContext, db
|
|
|
7
7
|
import { DefineMailerOptions, Mailer, MailerPayload, MailerPayloadHtml, MailerPayloadReact, defineMailer } from "./mailer.mjs";
|
|
8
8
|
import { NewStorageAttachment, NewStorageBlob, NewStorageVariantRecord, StorageAttachment, StorageBlob, StorageRelations, StorageRelationsConfig, StorageTables, StorageVariantRecord, defineStorageSchema } from "./storage-schema.mjs";
|
|
9
9
|
import { ScheduledWorkflowContext, WorkflowContext, WorkflowHandle, defineScheduledWorkflow, defineWorkflow, loadWorkflows } from "./workflow.mjs";
|
|
10
|
-
import { ImageTransformations, ResizeOptions, resizeSchema, transformationsSchema } from "./workflows/generate-image-variant.mjs";
|
|
11
10
|
import { DatabaseWithStorage, DefineS3DiskOptions, DefineStorageOptions, Storage, StorageService, defineS3Disk, defineStorage } from "./storage.mjs";
|
|
12
11
|
import { AppContainer, Container, ServerConfig, WorkerConfig, defineAppContainer } from "./container.mjs";
|
|
13
12
|
import { AppContext, DefineAppContextOpts, SessionData, defineAppContext } from "./app-context.mjs";
|
|
14
13
|
import { defineAuthSchema } from "./auth-schema.mjs";
|
|
15
14
|
import { I18nInitOptions, defaultI18nOptions, defineI18n, i18n as i18n$1 } from "./i18n.mjs";
|
|
16
15
|
import { Middleware, defineMiddleware, loadMiddleware } from "./middleware.mjs";
|
|
17
|
-
import { DefineOpenAPIConfig, DefineOpenAPIConfigInput, DefineOpenAPIReturn, HandlerParams, OpenAPIMethodSpec, OpenAPIObjectConfigV31, OpenAPIRegistration, RouteModule, ValidationErrorResponse,
|
|
16
|
+
import { DefineOpenAPIConfig, DefineOpenAPIConfigInput, DefineOpenAPIReturn, HandlerParams, OpenAPIMethodSpec, OpenAPIObjectConfigV31, OpenAPIRegistration, RouteModule, ValidationErrorResponse, defineOpenAPI, defineOpenAPIConfig, defineOpenAPIEndpoint, defineTypedResponses, generateOpenAPIDocument, loadAndRegisterAPIRoutes, registerRoutes, scanAPIRoutes, writeOpenAPISpecs } from "./openapi.mjs";
|
|
18
17
|
import { withOtelSpan } from "./otel.mjs";
|
|
19
18
|
import { DefineRedisClientOptions, RedisClient, defineRedisClient } from "./redis.mjs";
|
|
20
19
|
import { CustomTypeOptions } from "i18next";
|
|
21
20
|
|
|
22
21
|
//#region src/api/index.d.ts
|
|
23
22
|
declare namespace index_d_exports {
|
|
24
|
-
export { AccessControlRoles, AccessController, AppContainer, AppContext, AuditAction, Auth, AuthConfig, AuthPasskeyConfig, AuthSessionConfig, Cache, Config, Container, CustomTypeOptions, Database, DatabaseWithStorage, DbChangeInput, DefineAppContextOpts, DefineAuthOptions, DefineCacheOptions, DefineDatabaseOptions, DefineEventBusOptions, DefineLoggerOptions, DefineMailerOptions, DefineOpenAPIConfig, DefineOpenAPIConfigInput, DefineOpenAPIReturn, DefineRedisClientOptions, DefineS3DiskOptions, DefineStorageOptions, DefineTestDatabaseOptions, Event, EventBus, EventContext, HandlerParams, I18nInitOptions,
|
|
23
|
+
export { AccessControlRoles, AccessController, AppContainer, AppContext, AuditAction, Auth, AuthConfig, AuthPasskeyConfig, AuthSessionConfig, Cache, Config, Container, CustomTypeOptions, Database, DatabaseWithStorage, DbChangeInput, DefineAppContextOpts, DefineAuthOptions, DefineCacheOptions, DefineDatabaseOptions, DefineEventBusOptions, DefineLoggerOptions, DefineMailerOptions, DefineOpenAPIConfig, DefineOpenAPIConfigInput, DefineOpenAPIReturn, DefineRedisClientOptions, DefineS3DiskOptions, DefineStorageOptions, DefineTestDatabaseOptions, Event, EventBus, EventContext, HandlerParams, I18nInitOptions, Logger, Mailer, MailerPayload, MailerPayloadHtml, MailerPayloadReact, Middleware, MigrationType, NewStorageAttachment, NewStorageBlob, NewStorageVariantRecord, OpenAPIMethodSpec, OpenAPIObjectConfigV31, OpenAPIRegistration, QualifiedTableNames, RedisClient, Role, RouteModule, ScheduledWorkflowContext, ServerConfig, SessionData, Storage, StorageAttachment, StorageBlob, StorageRelations, StorageRelationsConfig, StorageService, StorageTables, StorageVariantRecord, ValidationErrorResponse, WorkerConfig, WorkflowContext, WorkflowHandle, auditActionSchema, baseSchema, createAccessControl, dbChangeInputSchema, dbChanges, dbChangesEvent, defaultI18nOptions, defineAppContainer, defineAppContext, defineAuth, defineAuthSchema, defineCache, defineConfig, defineDatabase, defineEvent, defineEventBus, defineI18n, defineLogger, defineMailer, defineMiddleware, defineMigrationOpts, defineOpenAPI, defineOpenAPIConfig, defineOpenAPIEndpoint, defineRedisClient, defineS3Disk, defineScheduledWorkflow, defineStorage, defineStorageSchema, defineTestDatabase, defineTypedResponses, defineWorkflow, generateOpenAPIDocument, i18n$1 as i18n, loadAndRegisterAPIRoutes, loadEvents, loadMiddleware, loadWorkflows, migrationsSchema, registerRoutes, scanAPIRoutes, withOtelSpan, writeOpenAPISpecs };
|
|
25
24
|
}
|
|
26
25
|
//#endregion
|
|
27
26
|
export { index_d_exports };
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
import{FILE_EXT as e}from"../constants.mjs";import t,{glob as n}from"node:fs/promises";import{basename as r}from"node:path";import{camelCase as i}from"es-toolkit";function a(e){return i(r(e,`.ts`).replace(/^\d+_/,``))}async function o(i,o,s){try{await t.access(i)}catch{return}let c=[];for await(let t of n(`${i}/**/*.${e}`))!t.endsWith(`.test.ts`)&&!t.endsWith(`.spec.ts`)&&!t.endsWith(`.test.js`)&&!t.endsWith(`.spec.js`)&&c.push(t);c.sort((e,t)=>r(e).localeCompare(r(t)));for(let e of c)try{let t=(await import(e)).default;if(t&&typeof t.handler==`function`){let n=t.name??a(e);o.logger.debug({name:n,file:e},`Loading user middleware`),s.use(t.handler(o))}else o.logger.warn({file:e},`Middleware file missing default export with handler function`)}catch(t){throw o.logger.error({file:e,error:t instanceof Error?t.message:t},`Failed to load middleware`),t}}export{o as loadMiddleware};
|
|
@@ -5,7 +5,7 @@ import { z } from "zod";
|
|
|
5
5
|
import * as _node_modules_zod_openapi_dist_components_B1DX_zYv_mjs0 from "#node_modules/zod-openapi/dist/components-B1DX_zYv.mjs";
|
|
6
6
|
|
|
7
7
|
//#region src/api/openapi.d.ts
|
|
8
|
-
|
|
8
|
+
|
|
9
9
|
/**
|
|
10
10
|
* Utility type to enforce exact type matching (no extra properties).
|
|
11
11
|
* This uses a tuple check to prevent objects with extra keys from being assigned.
|
|
@@ -268,4 +268,4 @@ declare function writeOpenAPISpecs<TContainer extends Container>(container: TCon
|
|
|
268
268
|
*/
|
|
269
269
|
declare function loadAndRegisterAPIRoutes<TContainer extends Container>(app: Express): Promise<void>;
|
|
270
270
|
//#endregion
|
|
271
|
-
export { DefineOpenAPIConfig, DefineOpenAPIConfigInput, DefineOpenAPIReturn, HandlerParams, OpenAPIMethodSpec, OpenAPIObjectConfigV31, OpenAPIRegistration, RouteModule, ValidationErrorResponse,
|
|
271
|
+
export { DefineOpenAPIConfig, DefineOpenAPIConfigInput, DefineOpenAPIReturn, HandlerParams, OpenAPIMethodSpec, OpenAPIObjectConfigV31, OpenAPIRegistration, RouteModule, ValidationErrorResponse, defineOpenAPI, defineOpenAPIConfig, defineOpenAPIEndpoint, defineTypedResponses, generateOpenAPIDocument, loadAndRegisterAPIRoutes, registerRoutes, scanAPIRoutes, writeOpenAPISpecs };
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
import{APPOS_DIR as e,FILE_EXT as t,PUBLIC_DIR as n,ROUTES_DIR as r}from"../constants.mjs";import"./app-context.mjs";import{access as i,mkdir as a,writeFile as o}from"node:fs/promises";import{join as s,resolve as c}from"node:path";import{remixRoutesOptionAdapter as l}from"@react-router/remix-routes-option-adapter";import{isEmpty as u}from"es-toolkit/compat";import{flatRoutes as d}from"remix-flat-routes";import{z as f}from"zod";import{createDocument as p}from"zod-openapi";const m=c(n,`openapi`);function h(e){return e.match(/^\/(v\d+)/)?.[1]}function g(e){return e.replace(/:(\w+)/g,`{$1}`)}function _(e,t){let n=g(e),r={summary:t.summary,description:t.description};(t.request?.params||t.request?.query||t.request?.headers)&&(r.requestParams={},t.request.params&&(r.requestParams.path=t.request.params),t.request.query&&(r.requestParams.query=t.request.query),t.request.headers&&(r.requestParams.header=t.request.headers)),t.request?.body&&(r.requestBody={content:{"application/json":{schema:t.request.body}}}),r.responses={};for(let[e,n]of Object.entries(t.responses))r.responses[e]={description:n.description,content:{"application/json":{schema:n.content[`application/json`].schema}}};return{path:n,config:r}}function v(e,t,n){let r={};for(let n of e){if(n.version!==t)continue;let e=n.path.replace(`/${t}`,``)||`/`;for(let t of n.openAPISpec){let{path:n,config:i}=_(e,t);r[n]||(r[n]={}),r[n][t.method]=i}}return p({openapi:`3.1.0`,info:n?.info||{title:`API ${t.toUpperCase()}`,version:t.replace(`v`,``),description:`OpenAPI specification for ${t} API`},servers:n?.servers||[{url:`http://localhost:8000/${t}`,description:process.env.NODE_ENV===`production`?`Production server`:`Development server`}],paths:r})}async function y(t){let n=c(e,r);try{await i(n)}catch(e){return t.logger.error({error:e},`OpenAPI routes directory not found`),[]}let a=await l(t=>d(r,t,{appDir:e,ignoredRouteFiles:[`**/.*`,`**/*.{spec,test}.{ts,tsx}`,`**/*-????????.{js,ts}`]})),o=[];for(let[n,r]of Object.entries(a)){let n=c(e,r.file).replace(/.ts$/,process.env.NODE_ENV===`production`?`.js`:`.ts`);try{let i=(await import(n)).default;if(!i||u(i)||/\/openapi.(j|t)s$/.test(n))continue;if(!i?.handlers&&!i?.openAPISpec){t.logger.warn(`Missing default export with 'defineOpenAPI' for '${e}/${r.file}'`);continue}let a=`/${r.path}`;o.push({path:a,filePath:n,handlers:i.handlers,openAPISpec:i.openAPISpec,version:h(a)})}catch(e){t.logger.error(e,`Error loading route ${r.file}:`)}}return o}function b(e,t){for(let n of t)for(let[t,r]of Object.entries(n.handlers))e[t.toLowerCase()](n.path,r)}async function x(n,i){let a=c(e,r,`${i}+/openapi.${t}`);try{let e=(await import(a)).default;return e?e(n):void 0}catch{return}}async function S(e,t){let n=[...new Set(t.map(e=>e.version).filter(Boolean))];n.length>0&&await a(m,{recursive:!0});for(let r of n){let n=v(t,r,await x(e,r));await o(s(m,`${r}.json`),JSON.stringify(n,null,2),`utf-8`)}}async function C(e){let t=await y(e.locals.container);b(e,t),process.env.NODE_ENV!==`production`&&await S(e.locals.container,t)}export{C as loadAndRegisterAPIRoutes};
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import { __export, __reExport } from "
|
|
1
|
+
import { __export, __reExport } from "../_virtual/rolldown_runtime.mjs";
|
|
2
2
|
import { index, isPgEnum, isPgMaterializedView, isPgSchema, isPgSequence, isPgView, numeric, parsePgArray, parsePgNestedArray, pgEnum, pgMaterializedView, pgPolicy, pgRole, pgSchema, pgSequence, pgTable as pgTable$1, pgTableCreator, pgView, serial, smallint, smallserial, sparsevec, unique, uniqueIndex, uniqueKeyName, withReplicas } from "drizzle-orm/pg-core";
|
|
3
3
|
export * from "drizzle-orm";
|
|
4
4
|
export * from "drizzle-seed";
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
import{PUBLIC_DIR as e}from"../constants.mjs";import{defineErrorHandlerMiddleware as t}from"./middleware/error-handler.mjs";import{defineHealthMiddleware as n}from"./middleware/health.mjs";import{defineI18nMiddleware as r}from"./middleware/i18n.mjs";import{defineRequestLoggerMiddleware as i}from"./middleware/request-logger.mjs";import{defineShutdownMiddleware as a}from"./middleware/shutdown.mjs";import{defineTimeoutMiddleware as o}from"./middleware/timeout.mjs";import{loadMiddleware as s}from"./middleware.mjs";import{loadAndRegisterAPIRoutes as c}from"./openapi.mjs";import{join as l,resolve as u}from"node:path";import{toNodeHandler as d}from"better-auth/node";import f from"cors";import{rateLimit as p}from"express-rate-limit";import m from"helmet";import{RedisStore as h}from"rate-limit-redis";import{createClient as g}from"redis";import _ from"ultimate-express";async function v({container:v}){let y=!1,{host:b=`0.0.0.0`,port:x,timeout:S=3e4,bodyLimit:C=`1mb`,healthPath:w=`/health`,cors:T,helmet:E,rateLimit:D,redisUrl:O}=v.server,k=_();k.locals.container=v;let A=null;if(O&&D)try{A=g({url:O}),A.on(`error`,e=>{v.logger.error({error:e.message},`Redis client error`)}),await A.connect(),v.logger.info(`Connected to Redis for rate limiting`)}catch(e){v.logger.error({error:e instanceof Error?e.message:e},`Failed to connect to Redis, falling back to in-memory rate limiting`),A=null}if(k.use(a(v.logger,()=>y),o(v.logger,S),i(v.logger),n(w)),E!==void 0&&k.use(m(E)),T!==void 0&&k.use(f(T)),D)for(let[e,t]of D.entries()){let n=p({windowMs:t.windowMs??60*1e3,limit:t.limit??100,standardHeaders:t.standardHeaders??`draft-8`,legacyHeaders:t.legacyHeaders??!1,skip:t.skip,keyGenerator:t.keyGenerator,handler:t.handler,message:t.message,statusCode:t.statusCode,requestPropertyName:t.requestPropertyName,skipFailedRequests:t.skipFailedRequests,skipSuccessfulRequests:t.skipSuccessfulRequests,requestWasSuccessful:t.requestWasSuccessful,validate:t.validate,store:A?new h({sendCommand:(...e)=>A.sendCommand(e),prefix:`rl:${e}:`}):t.store});k.use(n)}if(k.use(r(v.i18n),_.json({limit:C}),_.urlencoded({extended:!0,limit:C})),await s(l(process.cwd(),`api`,`middleware`),v,k),k.get(`/`,(e,t)=>{t.json({message:`${v.config.APP_NAME} server is running.`})}).all(`${v.auth.options.basePath}/*`,d(v.auth)),await c(k),k.use(_.static(u(e))),process.env.NODE_ENV!==`production`){let{defineYouchErrorHandler:e}=await import(`./middleware/youch-handler.mjs`);k.use(e(v.logger))}else k.use(t(v.logger));return{app:k,host:b,port:x,async start(){await new Promise((e,t)=>{try{k.listen(x,b,()=>{e()})}catch(e){t(e)}})},async close(){if(y=!0,k.uwsApp.close(),A)try{await A.destroy()}catch(e){v.logger.warn({error:e instanceof Error?e.message:e},`Error disconnecting from Redis`)}}}}export{v as defineServer};
|
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
import { StorageBlob, StorageRelationsConfig, StorageTables } from "./storage-schema.mjs";
|
|
2
|
-
import { ImageTransformations
|
|
2
|
+
import { ImageTransformations } from "./workflows/generate-image-variant.mjs";
|
|
3
3
|
import { NodePgDatabase } from "drizzle-orm/node-postgres";
|
|
4
4
|
import { Pool } from "pg";
|
|
5
5
|
import { DriveManager } from "flydrive";
|
|
@@ -239,14 +239,14 @@ declare class StorageService<TDiskNames extends string = string, TTableNames ext
|
|
|
239
239
|
blobId: string;
|
|
240
240
|
blob: {
|
|
241
241
|
id: string;
|
|
242
|
+
createdAt: string;
|
|
242
243
|
key: string;
|
|
244
|
+
metadata: unknown;
|
|
243
245
|
filename: string;
|
|
244
246
|
contentType: string | null;
|
|
245
|
-
metadata: unknown;
|
|
246
247
|
serviceName: string;
|
|
247
248
|
byteSize: number;
|
|
248
249
|
checksum: string | null;
|
|
249
|
-
createdAt: string;
|
|
250
250
|
} | null;
|
|
251
251
|
}[]>;
|
|
252
252
|
/**
|
|
@@ -263,14 +263,14 @@ declare class StorageService<TDiskNames extends string = string, TTableNames ext
|
|
|
263
263
|
blobId: string;
|
|
264
264
|
blob: {
|
|
265
265
|
id: string;
|
|
266
|
+
createdAt: string;
|
|
266
267
|
key: string;
|
|
268
|
+
metadata: unknown;
|
|
267
269
|
filename: string;
|
|
268
270
|
contentType: string | null;
|
|
269
|
-
metadata: unknown;
|
|
270
271
|
serviceName: string;
|
|
271
272
|
byteSize: number;
|
|
272
273
|
checksum: string | null;
|
|
273
|
-
createdAt: string;
|
|
274
274
|
} | null;
|
|
275
275
|
}[]>;
|
|
276
276
|
/**
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
import"../constants.mjs";import{glob as e}from"node:fs/promises";import{basename as t,join as n}from"node:path";import{camelCase as r}from"es-toolkit";function i(e){let t=null,n=null,r=null,i=null,a=async i=>{if(!t||!r)throw Error(`Workflow "${n}" not registered`);let a=r,o=a.workflowID;if(!o)throw Error(`DBOS.workflowID is not available in this context`);let s={container:t,workflowId:o,input:i,step:(e,t)=>a.runStep(t,{name:e})};return e.run(s)};return{inputSchema:e.input,get name(){return n},register(o,s,c){t=o,n=s,r=c,i=c.registerWorkflow(a,{name:s,...e.config})},async start(t){if(!i||!n||!r)throw Error(`Workflow not registered. Ensure the worker is started before triggering workflows.`);let a=e.input.parse(t),o=await r.startWorkflow(i)(a);return{workflowId:o.workflowID,getStatus:()=>o.getStatus(),getResult:()=>o.getResult()}}}}function a(e){let t=null,n=null,r=null,i=async(i,a)=>{if(!t||!r)throw Error(`Workflow "${n}" not registered`);let o=r,s=o.workflowID;if(!s)throw Error(`DBOS.workflowID is not available in this context`);let c={container:t,workflowId:s,scheduledTime:i,step:(e,t)=>o.runStep(t,{name:e})};return e.run(c)};return{crontab:e.crontab,get name(){return n},register(a,o,s){t=a,n=o,r=s,s.registerScheduled(s.registerWorkflow(i,{name:o}),{crontab:e.crontab})}}}export{a as defineScheduledWorkflow,i as defineWorkflow};
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
import{defineWorkflow as e}from"../workflow.mjs";import{join as t}from"node:path";import n from"zod";import{ALL_FORMATS as r,BlobSource as i,Input as a}from"mediabunny";const o=e({input:n.object({blobId:n.string()}),async run({container:e,input:{blobId:n},step:o}){let s=await o(`fetch-blob`,async()=>e.storage.primary.getBlob(n));if(!s)throw Error(`Blob ${n} not found`);let c=await o(`download-blob`,async()=>e.storage.primary.downloadBlob(n));if(!c)throw Error(`Failed to download blob ${n}`);let l={};return s.contentType?.startsWith(`image/`)?l=await o(`extract-image-metadata`,async()=>{let e=(await import(`sharp`)).default,t=await e(c).metadata();return{width:t.width,height:t.height,format:t.format,hasAlpha:t.hasAlpha,space:t.space}}):s.contentType?.startsWith(`video/`)||s.contentType?.startsWith(`audio/`)?l=await o(`extract-media-metadata`,async()=>{let e=new Uint8Array(c),t=new a({source:new i(new Blob([e],{type:s.contentType||`video/mp4`})),formats:r}),n=await t.computeDuration(),o=await t.getMetadataTags(),l={},u={},d=!1,f=!1;try{let e=await t.getPrimaryVideoTrack();if(e){d=!0;let t=e.displayWidth&&e.displayHeight?e.displayWidth/e.displayHeight:null;l={width:e.displayWidth,height:e.displayHeight,rotation:e.rotation,angle:e.rotation,displayAspectRatio:t}}}catch{}try{let e=await t.getPrimaryAudioTrack();e&&(f=!0,u={sampleRate:e.sampleRate,channels:e.numberOfChannels})}catch{}return{duration:n,video:d,audio:f,...l,...u,tags:o}}):s.contentType===`application/pdf`&&(l=await o(`extract-pdf-metadata`,async()=>{try{let e=await import(`pdfjs-dist/legacy/build/pdf.mjs`),n=`${t(process.cwd(),`node_modules/pdfjs-dist/standard_fonts`)}/`,r=await e.getDocument({data:new Uint8Array(c),standardFontDataUrl:n}).promise,i=await r.getMetadata(),a=(await r.getPage(1)).getViewport({scale:1}),o=i.info;return{pageCount:r.numPages,width:a.width,height:a.height,title:o?.Title||null,author:o?.Author||null,subject:o?.Subject||null,keywords:o?.Keywords||null,creator:o?.Creator||null,producer:o?.Producer||null,creationDate:o?.CreationDate||null,modificationDate:o?.ModDate||null,pdfVersion:o?.PDFFormatVersion||null}}catch(t){return e.logger.error({error:t,errorMessage:t instanceof Error?t.message:String(t),errorStack:t instanceof Error?t.stack:void 0,errorCode:t?.code,blobId:n},`Failed to extract PDF metadata`),{error:`Failed to extract PDF metadata`,errorMessage:t instanceof Error?t.message:String(t)}}})),await o(`save-metadata`,async()=>{await e.storage.primary.updateBlobMetadata(n,{...l,analyzed:!0})}),e.logger.info({blobId:n,metadata:l},`Metadata extracted`),{...l,analyzed:!0}}});export{o as extractBlobMetadata};
|
package/dist/exports/tests/{packages/appos/src/api → api}/workflows/generate-image-variant.d.mts
RENAMED
|
@@ -3,39 +3,7 @@ import "../index.mjs";
|
|
|
3
3
|
import { z } from "zod";
|
|
4
4
|
|
|
5
5
|
//#region src/api/workflows/generate-image-variant.d.ts
|
|
6
|
-
|
|
7
|
-
* Resize options schema for image transformations.
|
|
8
|
-
*/
|
|
9
|
-
declare const resizeSchema: z.ZodObject<{
|
|
10
|
-
width: z.ZodOptional<z.ZodNumber>;
|
|
11
|
-
height: z.ZodOptional<z.ZodNumber>;
|
|
12
|
-
fit: z.ZodOptional<z.ZodEnum<{
|
|
13
|
-
fill: "fill";
|
|
14
|
-
cover: "cover";
|
|
15
|
-
contain: "contain";
|
|
16
|
-
inside: "inside";
|
|
17
|
-
outside: "outside";
|
|
18
|
-
}>>;
|
|
19
|
-
position: z.ZodOptional<z.ZodEnum<{
|
|
20
|
-
left: "left";
|
|
21
|
-
right: "right";
|
|
22
|
-
top: "top";
|
|
23
|
-
"right top": "right top";
|
|
24
|
-
"right bottom": "right bottom";
|
|
25
|
-
bottom: "bottom";
|
|
26
|
-
"left bottom": "left bottom";
|
|
27
|
-
"left top": "left top";
|
|
28
|
-
centre: "centre";
|
|
29
|
-
}>>;
|
|
30
|
-
kernel: z.ZodOptional<z.ZodEnum<{
|
|
31
|
-
nearest: "nearest";
|
|
32
|
-
linear: "linear";
|
|
33
|
-
cubic: "cubic";
|
|
34
|
-
mitchell: "mitchell";
|
|
35
|
-
lanczos2: "lanczos2";
|
|
36
|
-
lanczos3: "lanczos3";
|
|
37
|
-
}>>;
|
|
38
|
-
}, z.core.$strip>;
|
|
6
|
+
|
|
39
7
|
/**
|
|
40
8
|
* Image transformations schema.
|
|
41
9
|
* Supports resize, rotate, flip, flop, sharpen, blur, grayscale, format conversion.
|
|
@@ -91,9 +59,5 @@ declare const transformationsSchema: z.ZodObject<{
|
|
|
91
59
|
* Types for image transformations.
|
|
92
60
|
*/
|
|
93
61
|
type ImageTransformations = z.infer<typeof transformationsSchema>;
|
|
94
|
-
/**
|
|
95
|
-
* Types for resize options.
|
|
96
|
-
*/
|
|
97
|
-
type ResizeOptions = z.infer<typeof resizeSchema>;
|
|
98
62
|
//#endregion
|
|
99
|
-
export { ImageTransformations
|
|
63
|
+
export { ImageTransformations };
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
import{defineWorkflow as e}from"../workflow.mjs";import{z as t}from"zod";const n=t.object({width:t.number().optional(),height:t.number().optional(),fit:t.enum([`cover`,`contain`,`fill`,`inside`,`outside`]).optional(),position:t.enum([`top`,`right top`,`right`,`right bottom`,`bottom`,`left bottom`,`left`,`left top`,`centre`]).optional(),kernel:t.enum([`nearest`,`linear`,`cubic`,`mitchell`,`lanczos2`,`lanczos3`]).optional()}),r=t.object({resize:n.optional(),rotate:t.number().optional(),flip:t.boolean().optional(),flop:t.boolean().optional(),sharpen:t.boolean().optional(),blur:t.number().optional(),grayscale:t.boolean().optional(),format:t.enum([`jpeg`,`png`,`webp`,`avif`,`gif`]).optional(),quality:t.number().min(1).max(100).optional(),preview:t.literal(!0).optional()}),i=e({input:t.object({blobId:t.string(),transformations:r}),async run({container:e,input:{blobId:t,transformations:n},step:r}){if(!await r(`fetch-blob`,async()=>e.storage.primary.getBlob(t)))throw Error(`Blob ${t} not found`);let i=await r(`download-blob`,async()=>e.storage.primary.downloadBlob(t));if(!i)throw Error(`Failed to download blob ${t}`);let a=await r(`apply-transformations`,async()=>{let e=(await import(`sharp`)).default,t=e(i);return n.resize&&(t=t.resize({width:n.resize.width,height:n.resize.height,fit:n.resize.fit,position:n.resize.position,kernel:n.resize.kernel})),n.rotate!==void 0&&(t=t.rotate(n.rotate)),n.flip&&(t=t.flip()),n.flop&&(t=t.flop()),n.sharpen&&(t=t.sharpen()),n.blur!==void 0&&(t=t.blur(n.blur)),n.grayscale&&(t=t.grayscale()),n.format&&(t=t.toFormat(n.format,{quality:n.quality})),t.toBuffer()}),o=await r(`store-variant`,async()=>e.storage.primary.createVariant(t,n,a));return e.logger.info({blobId:t,variantId:o.id},`Image variant generated`),o}});export{i as generateImageVariant};
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
import{defineWorkflow as e}from"../workflow.mjs";import{join as t}from"node:path";import n from"zod";import{spawn as r}from"node:child_process";const i=e({input:n.object({blobId:n.string(),timeInSeconds:n.number().optional()}),async run({container:e,input:{blobId:n,timeInSeconds:i=1},step:a}){let o=await a(`fetch-blob`,async()=>e.storage.primary.getBlob(n));if(!o)throw Error(`Blob ${n} not found`);let s=await a(`download-blob`,async()=>e.storage.primary.downloadBlob(n));if(!s)throw Error(`Failed to download blob ${n}`);let c=null;if(o.contentType?.startsWith(`video/`))c=await a(`generate-video-preview`,async()=>new Promise((t,a)=>{try{let o=r(`ffmpeg`,[`-i`,`pipe:0`,`-ss`,i.toString(),`-frames:v`,`1`,`-f`,`image2pipe`,`-c:v`,`png`,`pipe:1`]),c=[],l=[];o.stdout.on(`data`,e=>{c.push(e)}),o.stderr.on(`data`,e=>{l.push(e)}),o.on(`close`,async r=>{if(r===0)try{let e=Buffer.concat(c),n=(await import(`sharp`)).default;t(await n(e).jpeg({quality:80}).toBuffer())}catch(t){e.logger.error({error:t,blobId:n},`Failed to convert video frame to JPEG`),a(t)}else{let t=Buffer.concat(l).toString(),i=Error(`FFmpeg exited with code ${r}: ${t}`);e.logger.error({error:i,blobId:n,code:r,stderr:t},`Failed to generate video preview`),a(i)}}),o.on(`error`,t=>{e.logger.error({error:t,blobId:n},`Failed to spawn FFmpeg process`),a(t)}),o.stdin.on(`error`,t=>{t.code!==`EPIPE`&&e.logger.error({error:t,blobId:n},`Failed to write to FFmpeg stdin`)}),o.stdin.write(s),o.stdin.end()}catch(t){e.logger.error({error:t,blobId:n},`Failed to generate video preview`),a(t)}}));else if(o.contentType===`application/pdf`)c=await a(`generate-pdf-preview`,async()=>{try{let e=await import(`pdfjs-dist/legacy/build/pdf.mjs`),{createCanvas:n}=await import(`canvas`),r=(await import(`sharp`)).default,i=`${t(process.cwd(),`node_modules/pdfjs-dist/standard_fonts`)}/`,a=await(await e.getDocument({data:new Uint8Array(s),standardFontDataUrl:i}).promise).getPage(1),o=a.getViewport({scale:2}),c=n(o.width,o.height),l=c.getContext(`2d`);return await a.render({canvasContext:l,viewport:o,canvas:c}).promise,await r(c.toBuffer(`image/png`)).resize(800,800,{fit:`inside`,withoutEnlargement:!0}).jpeg({quality:85}).toBuffer()}catch(t){throw e.logger.error({error:t,errorMessage:t instanceof Error?t.message:String(t),errorStack:t instanceof Error?t.stack:void 0,errorCode:t?.code,blobId:n},`Failed to generate PDF preview`),t}});else if(o.contentType?.startsWith(`image/`))c=await a(`generate-image-preview`,async()=>{let e=(await import(`sharp`)).default;return await e(s).resize(800,800,{fit:`inside`,withoutEnlargement:!0}).jpeg({quality:85}).toBuffer()});else throw Error(`Preview generation not supported for content type: ${o.contentType}`);let l=await a(`store-preview`,async()=>await e.storage.primary.createVariant(n,{preview:!0},c));return e.logger.info({blobId:n,previewId:l.id,contentType:o.contentType},`Preview generated`),l}});export{i as generatePreview};
|
|
@@ -1,6 +1,6 @@
|
|
|
1
|
-
import { DefineTestDatabaseOptions, defineTestDatabase } from "./
|
|
2
|
-
import { AppContainer } from "./
|
|
3
|
-
import { index_d_exports } from "./
|
|
1
|
+
import { DefineTestDatabaseOptions, defineTestDatabase } from "./api/database.mjs";
|
|
2
|
+
import { AppContainer } from "./api/container.mjs";
|
|
3
|
+
import { index_d_exports } from "./api/index.mjs";
|
|
4
4
|
import supertestOriginal from "supertest";
|
|
5
5
|
import { TestAPI } from "vitest";
|
|
6
6
|
import { Express } from "ultimate-express";
|
|
@@ -1 +1 @@
|
|
|
1
|
-
import{defineTestDatabase as e}from"./
|
|
1
|
+
import{defineTestDatabase as e}from"./api/database.mjs";import{defineServer as t}from"./api/server.mjs";import{mock_exports as n}from"./mock.mjs";import{mkdirSync as r,mkdtempSync as i}from"node:fs";import{rm as a}from"node:fs/promises";import{tmpdir as o}from"node:os";import{join as s}from"node:path";import c from"supertest";import{test as l}from"vitest";let u=[],d=(0,n.mock)();function f(){u=[],d=(0,n.mock)()}function p(t){return async c=>{let l=await c(),f=async t=>{let n=new URL(t.poolConfig.connectionString||`postgres://postgres:postgres@localhost:5432/test`),r=await e({connectionString:n.toString(),logger:d,name:n.pathname.slice(1)||`test`,schema:t.schema??{},relations:t.relations??{}});return u.push(r.cleanUp),r.db},p=e=>{let t=i(s(o(),`test-storage-`)),n={};for(let i of Object.keys(e.disks||{})){let e=s(t,i);r(e,{recursive:!0}),n[i]={location:e}}return u.push(()=>a(t,{recursive:!0,force:!0})),l.defineStorage({database:e.database,default:e.default,disks:n})};return{...l,defineDatabase:t?.defineDatabase??f,defineStorage:t?.defineStorage??p,defineCache:t?.defineCache??(()=>(0,n.mock)()),defineLogger:t?.defineLogger??(async()=>d),defineMailer:t?.defineMailer??(async()=>(0,n.mock)()),defineEventBus:t?.defineEventBus??(()=>(0,n.mockDeep)()),defineAuth:t?.defineAuth??(()=>(0,n.mockDeep)()),defineI18n:t?.defineI18n??(async()=>(0,n.mock)())}}}function m(e){let t=e.address?.bind(e);return e.address=()=>{let e=t?.();return e&&e.port===void 0?null:e},e.close||=t=>{e.uwsApp&&e.uwsApp.close(),t?.()},e}function h(e){return c(m(e))}function g(e,t){let n={...e};for(let r of Object.keys(t)){let i=t[r],a=e[r];i!==void 0&&typeof i==`object`&&i&&!Array.isArray(i)&&typeof a==`object`&&a&&!Array.isArray(a)?n[r]=g(a,i):i!==void 0&&(n[r]=i)}return n}function _(e,n){return l.extend({container:async({},t)=>{f();let r=await e();await t(n?g(r,n):r);for(let e of u.reverse())await e()},app:async({container:e},n)=>{await n((await t({container:e})).app)}})}export{p as defineApiMocks,_ as defineServerTest,e as defineTestDatabase,h as request};
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
const e=`api`,t=`databases`,n=`routes`,r=`public`,i=process.env.NODE_ENV===`production`?`js`:`ts`;export{e as APPOS_DIR,t as DATABASES_DIR,i as FILE_EXT,r as PUBLIC_DIR,n as ROUTES_DIR};
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
import{__reExport as e}from"./_virtual/rolldown_runtime.mjs";import{register as t}from"node:module";import{getNodeAutoInstrumentations as n}from"@opentelemetry/auto-instrumentations-node";import{OTLPTraceExporter as r}from"@opentelemetry/exporter-trace-otlp-http";import{PinoInstrumentation as i}from"@opentelemetry/instrumentation-pino";import{resourceFromAttributes as a}from"@opentelemetry/resources";import{NodeSDK as o}from"@opentelemetry/sdk-node";import{BatchSpanProcessor as s}from"@opentelemetry/sdk-trace-node";import{ATTR_SERVICE_NAME as c,ATTR_SERVICE_VERSION as l}from"@opentelemetry/semantic-conventions";export*from"@opentelemetry/api";var u={};import*as d from"@opentelemetry/api";e(u,d),t(`@opentelemetry/instrumentation/hook.mjs`,import.meta.url);const f={name:process.env.APP_NAME||`appos`,version:process.env.APP_VERSION||`development`};new o({resource:a({[c]:f.name,[l]:f.version}),spanProcessors:[new s(new r({url:process.env.OTEL_EXPORTER_OTLP_ENDPOINT||`http://localhost:4318/v1/traces`,headers:process.env.OTEL_EXPORTER_OTLP_HEADERS?JSON.parse(process.env.OTEL_EXPORTER_OTLP_HEADERS):void 0}))],instrumentations:[n({"@opentelemetry/instrumentation-fs":{enabled:!1},"@opentelemetry/instrumentation-dns":{enabled:!1}}),new i({logHook:(e,t)=>{t[`service.name`]=f.name}})]}).start();export{u as instrumentation_exports};
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "appos",
|
|
3
|
-
"version": "0.
|
|
3
|
+
"version": "0.3.0-0",
|
|
4
4
|
"type": "module",
|
|
5
5
|
"scripts": {
|
|
6
6
|
"release": "release-it",
|
|
@@ -127,8 +127,8 @@
|
|
|
127
127
|
"#*": "./*"
|
|
128
128
|
},
|
|
129
129
|
"dependencies": {
|
|
130
|
-
"@aws-sdk/client-s3": "^3.
|
|
131
|
-
"@aws-sdk/s3-request-presigner": "^3.
|
|
130
|
+
"@aws-sdk/client-s3": "^3.955.0",
|
|
131
|
+
"@aws-sdk/s3-request-presigner": "^3.955.0",
|
|
132
132
|
"@better-auth/passkey": "^1.4.7",
|
|
133
133
|
"@better-auth/sso": "^1.4.7",
|
|
134
134
|
"@clack/prompts": "^0.11.0",
|
|
@@ -193,6 +193,7 @@
|
|
|
193
193
|
"drizzle-zod": "^1.0.0-beta.2-f9236e3",
|
|
194
194
|
"embla-carousel-react": "^8.6.0",
|
|
195
195
|
"es-toolkit": "^1.43.0",
|
|
196
|
+
"esbuild": "^0.27.2",
|
|
196
197
|
"express-rate-limit": "^8.2.1",
|
|
197
198
|
"flydrive": "^1.3.0",
|
|
198
199
|
"helmet": "^8.1.0",
|
|
@@ -217,12 +218,13 @@
|
|
|
217
218
|
"pino": "^10.1.0",
|
|
218
219
|
"pino-pretty": "^13.1.3",
|
|
219
220
|
"radix-ui": "^1.4.3",
|
|
221
|
+
"rate-limit-redis": "^4.3.1",
|
|
220
222
|
"react": "^19.2.3",
|
|
221
223
|
"react-day-picker": "^9.13.0",
|
|
222
224
|
"react-dom": "^19.2.3",
|
|
223
225
|
"react-hook-form": "^7.68.0",
|
|
224
226
|
"react-i18next": "^16.5.0",
|
|
225
|
-
"react-resizable-panels": "^4.0.
|
|
227
|
+
"react-resizable-panels": "^4.0.8",
|
|
226
228
|
"recharts": "^3.6.0",
|
|
227
229
|
"redis": "^5.10.0",
|
|
228
230
|
"release-it": "^19.1.0",
|
|
@@ -237,7 +239,7 @@
|
|
|
237
239
|
"tw-animate-css": "^1.4.0",
|
|
238
240
|
"ultimate-express": "^2.0.13",
|
|
239
241
|
"vaul": "^1.1.2",
|
|
240
|
-
"vite": "
|
|
242
|
+
"vite": "^8.0.0-beta.3",
|
|
241
243
|
"vite-plugin-babel": "^1.3.2",
|
|
242
244
|
"vite-tsconfig-paths": "^6.0.3",
|
|
243
245
|
"vitest": "^4.0.16",
|
|
@@ -249,9 +251,5 @@
|
|
|
249
251
|
"trustedDependencies": [
|
|
250
252
|
"canvas",
|
|
251
253
|
"sharp"
|
|
252
|
-
]
|
|
253
|
-
"devDependencies": {
|
|
254
|
-
"change-case": "^5.4.4",
|
|
255
|
-
"rate-limit-redis": "^4.3.1"
|
|
256
|
-
}
|
|
254
|
+
]
|
|
257
255
|
}
|
|
@@ -1,2 +0,0 @@
|
|
|
1
|
-
#!/usr/bin/env node --import=appos/instrumentation --no-warnings
|
|
2
|
-
import{i as e,s as t}from"./workflow-BagSlsMp.mjs";import{basename as n,join as r}from"node:path";import{glob as i}from"node:fs/promises";import{camelCase as a}from"es-toolkit";import{z as o}from"zod";import{createClient as s}from"redis";function c(e){let t=null,n=null;return{inputSchema:e.input,get name(){return n},register(e,r){t=e,n=r},async emit(r){if(!t||!n)throw Error(`Event not registered. Ensure the worker is started before emitting events.`);let i=e.input.parse(r),a={container:t,input:i};await e.run(a),t.eventBus.publish(n,i).catch(e=>{t.logger.error({err:e,event:n},`Redis publish failed`)})},async subscribe(r){if(!t||!n)throw Error(`Event not registered. Ensure the worker is started before subscribing.`);return t.eventBus.subscribe(n,async i=>{let a=e.input.parse(i),o={container:t,input:a};try{await r(o)}catch(e){t.logger.error({err:e,event:n},`Event subscription handler error`)}})}}}const l=c({input:o.object({action:o.enum([`INSERT`,`UPDATE`,`DELETE`]),newData:o.record(o.string(),o.unknown()).nullable(),oldData:o.record(o.string(),o.unknown()).nullable(),organizationId:o.string().nullable(),tableName:o.string(),timestamp:o.string(),userId:o.string().nullable()}),async run(){}});function u(e){return a(n(e,`.ts`))}async function d(n){let{container:a}=n,o=n.eventsDir??r(process.cwd(),e,t);l.register(a,`dbChanges`);let s=await Array.fromAsync(i(`${o}/**/*.ts`,{exclude:[`**/*.test.ts`,`**/*.spec.ts`]}));for(let e of s){let t=await import(e);if(t.default&&typeof t.default==`object`&&`emit`in t.default&&`subscribe`in t.default){let n=u(e);t.default.register(a,n)}}}export{d as n,l as t};
|
|
@@ -1,2 +0,0 @@
|
|
|
1
|
-
#!/usr/bin/env node --import=appos/instrumentation --no-warnings
|
|
2
|
-
import{n as e}from"./workflow-BagSlsMp.mjs";import{join as t}from"node:path";import n from"zod";import{ALL_FORMATS as r,BlobSource as i,Input as a}from"mediabunny";import o from"sharp";const s=e({input:n.object({blobId:n.string()}),async run({container:e,input:{blobId:n},step:s}){let c=await s(`fetch-blob`,async()=>e.storage.primary.getBlob(n));if(!c)throw Error(`Blob ${n} not found`);let l=await s(`download-blob`,async()=>e.storage.primary.downloadBlob(n));if(!l)throw Error(`Failed to download blob ${n}`);let u={};return c.contentType?.startsWith(`image/`)?u=await s(`extract-image-metadata`,async()=>{let e=await o(l).metadata();return{width:e.width,height:e.height,format:e.format,hasAlpha:e.hasAlpha,space:e.space}}):c.contentType?.startsWith(`video/`)||c.contentType?.startsWith(`audio/`)?u=await s(`extract-media-metadata`,async()=>{let e=new Uint8Array(l),t=new a({source:new i(new Blob([e],{type:c.contentType||`video/mp4`})),formats:r}),n=await t.computeDuration(),o=await t.getMetadataTags(),s={},u={},d=!1,f=!1;try{let e=await t.getPrimaryVideoTrack();if(e){d=!0;let t=e.displayWidth&&e.displayHeight?e.displayWidth/e.displayHeight:null;s={width:e.displayWidth,height:e.displayHeight,rotation:e.rotation,angle:e.rotation,displayAspectRatio:t}}}catch{}try{let e=await t.getPrimaryAudioTrack();e&&(f=!0,u={sampleRate:e.sampleRate,channels:e.numberOfChannels})}catch{}return{duration:n,video:d,audio:f,...s,...u,tags:o}}):c.contentType===`application/pdf`&&(u=await s(`extract-pdf-metadata`,async()=>{try{let e=await import(`pdfjs-dist/legacy/build/pdf.mjs`),n=`${t(process.cwd(),`node_modules/pdfjs-dist/standard_fonts`)}/`,r=await e.getDocument({data:new Uint8Array(l),standardFontDataUrl:n}).promise,i=await r.getMetadata(),a=(await r.getPage(1)).getViewport({scale:1}),o=i.info;return{pageCount:r.numPages,width:a.width,height:a.height,title:o?.Title||null,author:o?.Author||null,subject:o?.Subject||null,keywords:o?.Keywords||null,creator:o?.Creator||null,producer:o?.Producer||null,creationDate:o?.CreationDate||null,modificationDate:o?.ModDate||null,pdfVersion:o?.PDFFormatVersion||null}}catch(t){return e.logger.error({error:t,errorMessage:t instanceof Error?t.message:String(t),errorStack:t instanceof Error?t.stack:void 0,errorCode:t?.code,blobId:n},`Failed to extract PDF metadata`),{error:`Failed to extract PDF metadata`,errorMessage:t instanceof Error?t.message:String(t)}}})),await s(`save-metadata`,async()=>{await e.storage.primary.updateBlobMetadata(n,{...u,analyzed:!0})}),e.logger.info({blobId:n,metadata:u},`Metadata extracted`),{...u,analyzed:!0}}});export{s as extractBlobMetadata};
|
|
@@ -1,2 +0,0 @@
|
|
|
1
|
-
#!/usr/bin/env node --import=appos/instrumentation --no-warnings
|
|
2
|
-
import{n as e}from"./workflow-BagSlsMp.mjs";import{z as t}from"zod";import n from"sharp";const r=t.object({width:t.number().optional(),height:t.number().optional(),fit:t.enum([`cover`,`contain`,`fill`,`inside`,`outside`]).optional(),position:t.enum([`top`,`right top`,`right`,`right bottom`,`bottom`,`left bottom`,`left`,`left top`,`centre`]).optional(),kernel:t.enum([`nearest`,`linear`,`cubic`,`mitchell`,`lanczos2`,`lanczos3`]).optional()}),i=t.object({resize:r.optional(),rotate:t.number().optional(),flip:t.boolean().optional(),flop:t.boolean().optional(),sharpen:t.boolean().optional(),blur:t.number().optional(),grayscale:t.boolean().optional(),format:t.enum([`jpeg`,`png`,`webp`,`avif`,`gif`]).optional(),quality:t.number().min(1).max(100).optional(),preview:t.literal(!0).optional()}),a=e({input:t.object({blobId:t.string(),transformations:i}),async run({container:e,input:{blobId:t,transformations:r},step:i}){if(!await i(`fetch-blob`,async()=>e.storage.primary.getBlob(t)))throw Error(`Blob ${t} not found`);let a=await i(`download-blob`,async()=>e.storage.primary.downloadBlob(t));if(!a)throw Error(`Failed to download blob ${t}`);let o=await i(`apply-transformations`,async()=>{let e=n(a);return r.resize&&(e=e.resize({width:r.resize.width,height:r.resize.height,fit:r.resize.fit,position:r.resize.position,kernel:r.resize.kernel})),r.rotate!==void 0&&(e=e.rotate(r.rotate)),r.flip&&(e=e.flip()),r.flop&&(e=e.flop()),r.sharpen&&(e=e.sharpen()),r.blur!==void 0&&(e=e.blur(r.blur)),r.grayscale&&(e=e.grayscale()),r.format&&(e=e.toFormat(r.format,{quality:r.quality})),e.toBuffer()}),s=await i(`store-variant`,async()=>e.storage.primary.createVariant(t,r,o));return e.logger.info({blobId:t,variantId:s.id},`Image variant generated`),s}});export{a as generateImageVariant};
|
|
@@ -1,2 +0,0 @@
|
|
|
1
|
-
#!/usr/bin/env node --import=appos/instrumentation --no-warnings
|
|
2
|
-
import{n as e}from"./workflow-BagSlsMp.mjs";import{spawn as t}from"node:child_process";import{join as n}from"node:path";import r from"zod";import i from"sharp";import{createCanvas as a}from"canvas";const o=e({input:r.object({blobId:r.string(),timeInSeconds:r.number().optional()}),async run({container:e,input:{blobId:r,timeInSeconds:o=1},step:s}){let c=await s(`fetch-blob`,async()=>e.storage.primary.getBlob(r));if(!c)throw Error(`Blob ${r} not found`);let l=await s(`download-blob`,async()=>e.storage.primary.downloadBlob(r));if(!l)throw Error(`Failed to download blob ${r}`);let u=null;if(c.contentType?.startsWith(`video/`))u=await s(`generate-video-preview`,async()=>new Promise((n,a)=>{try{let s=t(`ffmpeg`,[`-i`,`pipe:0`,`-ss`,o.toString(),`-frames:v`,`1`,`-f`,`image2pipe`,`-c:v`,`png`,`pipe:1`]),c=[],u=[];s.stdout.on(`data`,e=>{c.push(e)}),s.stderr.on(`data`,e=>{u.push(e)}),s.on(`close`,async t=>{if(t===0)try{n(await i(Buffer.concat(c)).jpeg({quality:80}).toBuffer())}catch(t){e.logger.error({error:t,blobId:r},`Failed to convert video frame to JPEG`),a(t)}else{let n=Buffer.concat(u).toString(),i=Error(`FFmpeg exited with code ${t}: ${n}`);e.logger.error({error:i,blobId:r,code:t,stderr:n},`Failed to generate video preview`),a(i)}}),s.on(`error`,t=>{e.logger.error({error:t,blobId:r},`Failed to spawn FFmpeg process`),a(t)}),s.stdin.on(`error`,t=>{t.code!==`EPIPE`&&e.logger.error({error:t,blobId:r},`Failed to write to FFmpeg stdin`)}),s.stdin.write(l),s.stdin.end()}catch(t){e.logger.error({error:t,blobId:r},`Failed to generate video preview`),a(t)}}));else if(c.contentType===`application/pdf`)u=await s(`generate-pdf-preview`,async()=>{try{let e=await import(`pdfjs-dist/legacy/build/pdf.mjs`),t=`${n(process.cwd(),`node_modules/pdfjs-dist/standard_fonts`)}/`,r=await(await e.getDocument({data:new Uint8Array(l),standardFontDataUrl:t}).promise).getPage(1),o=r.getViewport({scale:2}),s=a(o.width,o.height),c=s.getContext(`2d`);return await r.render({canvasContext:c,viewport:o,canvas:s}).promise,await i(s.toBuffer(`image/png`)).resize(800,800,{fit:`inside`,withoutEnlargement:!0}).jpeg({quality:85}).toBuffer()}catch(t){throw e.logger.error({error:t,errorMessage:t instanceof Error?t.message:String(t),errorStack:t instanceof Error?t.stack:void 0,errorCode:t?.code,blobId:r},`Failed to generate PDF preview`),t}});else if(c.contentType?.startsWith(`image/`))u=await s(`generate-image-preview`,async()=>await i(l).resize(800,800,{fit:`inside`,withoutEnlargement:!0}).jpeg({quality:85}).toBuffer());else throw Error(`Preview generation not supported for content type: ${c.contentType}`);let d=await s(`store-preview`,async()=>await e.storage.primary.createVariant(r,{preview:!0},u));return e.logger.info({blobId:r,previewId:d.id,contentType:c.contentType},`Preview generated`),d}});export{o as generatePreview};
|
|
@@ -1,2 +0,0 @@
|
|
|
1
|
-
#!/usr/bin/env node --import=appos/instrumentation --no-warnings
|
|
2
|
-
import{basename as e,join as t}from"node:path";import{glob as n}from"node:fs/promises";import{camelCase as r}from"es-toolkit";const i=`api`,a=`databases`,o=`events`,s=`commands`;function c(e){let t=null,n=null,r=null,i=null,a=async i=>{if(!t||!r)throw Error(`Workflow "${n}" not registered`);let a=r,o=a.workflowID;if(!o)throw Error(`DBOS.workflowID is not available in this context`);let s={container:t,workflowId:o,input:i,step:(e,t)=>a.runStep(t,{name:e})};return e.run(s)};return{inputSchema:e.input,get name(){return n},register(o,s,c){t=o,n=s,r=c,i=c.registerWorkflow(a,{name:s,...e.config})},async start(t){if(!i||!n||!r)throw Error(`Workflow not registered. Ensure the worker is started before triggering workflows.`);let a=e.input.parse(t),o=await r.startWorkflow(i)(a);return{workflowId:o.workflowID,getStatus:()=>o.getStatus(),getResult:()=>o.getResult()}}}}function l(e){let t=null,n=null,r=null,i=async(i,a)=>{if(!t||!r)throw Error(`Workflow "${n}" not registered`);let o=r,s=o.workflowID;if(!s)throw Error(`DBOS.workflowID is not available in this context`);let c={container:t,workflowId:s,scheduledTime:i,step:(e,t)=>o.runStep(t,{name:e})};return e.run(c)};return{crontab:e.crontab,get name(){return n},register(a,o,s){t=a,n=o,r=s,s.registerScheduled(s.registerWorkflow(i,{name:o}),{crontab:e.crontab})}}}function u(t){return r(e(t,`.ts`))}async function d(e){let{container:r,dbos:i}=e,a=e.workflowsDir??t(process.cwd(),`api`,`workflows`),o=(e,t)=>{try{e.register(r,t,i)}catch(e){if(!(e instanceof Error)||!e.message.includes(`already registered`))throw e}},{extractBlobMetadata:s}=await import(`./extract-blob-metadata-TqNd9w-6.mjs`),{generateImageVariant:c}=await import(`./generate-image-variant-D8H9FxgD.mjs`),{generatePreview:l}=await import(`./generate-preview-5jLZLX6I.mjs`),{purgeAttachment:d}=await import(`./purge-attachment-CMlJMNOk.mjs`),{trackDbChanges:f}=await import(`./track-db-changes-q0Vl7Htm.mjs`),{definePurgeAuditLogs:p}=await import(`./purge-audit-logs-hd6q6vnR.mjs`),{definePurgeUnattachedBlobs:m}=await import(`./purge-unattached-blobs-BYv5b9R9.mjs`);o(s,`extractBlobMetadata`),o(c,`generateImageVariant`),o(l,`generatePreview`),o(d,`purgeAttachment`),o(f,`trackDbChanges`),o(p(r.auth.auditLog?.purgeCron),`purgeAuditLogs`),o(m(r.storage.primary.purgeCron),`purgeUnattachedBlobs`);let h=await Array.fromAsync(n(`${a}/**/*.ts`,{exclude:[`**/*.test.ts`,`**/*.spec.ts`]}));for(let e of h){let t=await import(e);if(t.default&&typeof t.default==`object`&&`register`in t.default){let n=u(e);o(t.default,n)}}}export{s as a,i,c as n,a as o,d as r,o as s,l as t};
|
package/dist/exports/api/node_modules/.bun/change-case@5.4.4/node_modules/change-case/dist/index.mjs
DELETED
|
@@ -1 +0,0 @@
|
|
|
1
|
-
const e=/([\p{Ll}\d])(\p{Lu})/gu,t=/(\p{Lu})([\p{Lu}][\p{Ll}])/gu,n=/(\d)\p{Ll}|(\p{L})\d/u,r=/[^\p{L}\d]+/giu,i=`$1\0$2`;function a(n){let a=n.trim();a=a.replace(e,i).replace(t,i),a=a.replace(r,`\0`);let o=0,s=a.length;for(;a.charAt(o)===`\0`;)o++;if(o===s)return[];for(;a.charAt(s-1)===`\0`;)s--;return a.slice(o,s).split(/\0/g)}function o(e){let t=a(e);for(let e=0;e<t.length;e++){let r=t[e],i=n.exec(r);if(i){let n=i.index+(i[1]??i[2]).length;t.splice(e,1,r.slice(0,n),r.slice(n))}}return t}function s(e,t){let[n,r,i]=f(e,t),a=c(t?.locale),o=l(t?.locale),s=t?.mergeAmbiguousCharacters?u(a,o):d(a,o);return n+r.map((e,t)=>t===0?a(e):s(e,t)).join(t?.delimiter??``)+i}function c(e){return e===!1?e=>e.toLowerCase():t=>t.toLocaleLowerCase(e)}function l(e){return e===!1?e=>e.toUpperCase():t=>t.toLocaleUpperCase(e)}function u(e,t){return n=>`${t(n[0])}${e(n.slice(1))}`}function d(e,t){return(n,r)=>{let i=n[0];return(r>0&&i>=`0`&&i<=`9`?`_`+i:t(i))+e(n.slice(1))}}function f(e,t={}){let n=t.split??(t.separateNumbers?o:a),r=t.prefixCharacters??``,i=t.suffixCharacters??``,s=0,c=e.length;for(;s<e.length;){let t=e.charAt(s);if(!r.includes(t))break;s++}for(;c>s;){let t=c-1,n=e.charAt(t);if(!i.includes(n))break;c=t}return[e.slice(0,s),n(e.slice(s,c)),e.slice(c)]}export{s as camelCase};
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
const e=`api`,t=`databases`,n=`workflows`,r=`events`,i=`public`,a=`locales`;export{e as APPOS_DIR,t as DATABASES_DIR,r as EVENTS_DIR,a as LOCALES_DIR,i as PUBLIC_DIR,n as WORKFLOWS_DIR};
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
import{__reExport as e}from"../../../_virtual/rolldown_runtime.mjs";import{register as t}from"node:module";import{getNodeAutoInstrumentations as n}from"@opentelemetry/auto-instrumentations-node";import{OTLPTraceExporter as r}from"@opentelemetry/exporter-trace-otlp-http";import{PinoInstrumentation as i}from"@opentelemetry/instrumentation-pino";import{resourceFromAttributes as a}from"@opentelemetry/resources";import{NodeSDK as o}from"@opentelemetry/sdk-node";import{BatchSpanProcessor as s}from"@opentelemetry/sdk-trace-node";import{ATTR_SERVICE_NAME as c,ATTR_SERVICE_VERSION as l}from"@opentelemetry/semantic-conventions";export*from"@opentelemetry/api";var u={};import*as d from"@opentelemetry/api";e(u,d),t(`@opentelemetry/instrumentation/hook.mjs`,import.meta.url);const f={name:process.env.APP_NAME||`appos`,version:process.env.APP_VERSION||`development`};new o({resource:a({[c]:f.name,[l]:f.version}),spanProcessors:[new s(new r({url:process.env.OTEL_EXPORTER_OTLP_ENDPOINT||`http://localhost:4318/v1/traces`,headers:process.env.OTEL_EXPORTER_OTLP_HEADERS?JSON.parse(process.env.OTEL_EXPORTER_OTLP_HEADERS):void 0}))],instrumentations:[n({"@opentelemetry/instrumentation-fs":{enabled:!1},"@opentelemetry/instrumentation-dns":{enabled:!1}}),new i({logHook:(e,t)=>{t[`service.name`]=f.name}})]}).start();export{u as instrumentation_exports};
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
import"./redis.mjs";import{z as e}from"zod";import{join as t}from"node:path";import"es-toolkit";function n(e){let t=null,n=null;return{inputSchema:e.input,get name(){return n},register(e,r){t=e,n=r},async emit(r){if(!t||!n)throw Error(`Event not registered. Ensure the worker is started before emitting events.`);let i=e.input.parse(r),a={container:t,input:i};await e.run(a),t.eventBus.publish(n,i).catch(e=>{t.logger.error({err:e,event:n},`Redis publish failed`)})},async subscribe(r){if(!t||!n)throw Error(`Event not registered. Ensure the worker is started before subscribing.`);return t.eventBus.subscribe(n,async i=>{let a=e.input.parse(i),o={container:t,input:a};try{await r(o)}catch(e){t.logger.error({err:e,event:n},`Event subscription handler error`)}})}}}const r=n({input:e.object({action:e.enum([`INSERT`,`UPDATE`,`DELETE`]),newData:e.record(e.string(),e.unknown()).nullable(),oldData:e.record(e.string(),e.unknown()).nullable(),organizationId:e.string().nullable(),tableName:e.string(),timestamp:e.string(),userId:e.string().nullable()}),async run(){}});export{r as dbChangesEvent};
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
import{join as e}from"node:path";import"es-toolkit";function t(e){let t=null,n=null,r=null,i=null,a=async i=>{if(!t||!r)throw Error(`Workflow "${n}" not registered`);let a=r,o=a.workflowID;if(!o)throw Error(`DBOS.workflowID is not available in this context`);let s={container:t,workflowId:o,input:i,step:(e,t)=>a.runStep(t,{name:e})};return e.run(s)};return{inputSchema:e.input,get name(){return n},register(o,s,c){t=o,n=s,r=c,i=c.registerWorkflow(a,{name:s,...e.config})},async start(t){if(!i||!n||!r)throw Error(`Workflow not registered. Ensure the worker is started before triggering workflows.`);let a=e.input.parse(t),o=await r.startWorkflow(i)(a);return{workflowId:o.workflowID,getStatus:()=>o.getStatus(),getResult:()=>o.getResult()}}}}function n(e){let t=null,n=null,r=null,i=async(i,a)=>{if(!t||!r)throw Error(`Workflow "${n}" not registered`);let o=r,s=o.workflowID;if(!s)throw Error(`DBOS.workflowID is not available in this context`);let c={container:t,workflowId:s,scheduledTime:i,step:(e,t)=>o.runStep(t,{name:e})};return e.run(c)};return{crontab:e.crontab,get name(){return n},register(a,o,s){t=a,n=o,r=s,s.registerScheduled(s.registerWorkflow(i,{name:o}),{crontab:e.crontab})}}}export{n as defineScheduledWorkflow,t as defineWorkflow};
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
const e=/([\p{Ll}\d])(\p{Lu})/gu,t=/(\p{Lu})([\p{Lu}][\p{Ll}])/gu,n=/(\d)\p{Ll}|(\p{L})\d/u,r=/[^\p{L}\d]+/giu,i=`$1\0$2`;function a(n){let a=n.trim();a=a.replace(e,i).replace(t,i),a=a.replace(r,`\0`);let o=0,s=a.length;for(;a.charAt(o)===`\0`;)o++;if(o===s)return[];for(;a.charAt(s-1)===`\0`;)s--;return a.slice(o,s).split(/\0/g)}function o(e){let t=a(e);for(let e=0;e<t.length;e++){let r=t[e],i=n.exec(r);if(i){let n=i.index+(i[1]??i[2]).length;t.splice(e,1,r.slice(0,n),r.slice(n))}}return t}function s(e,t){let[n,r,i]=f(e,t),a=c(t?.locale),o=l(t?.locale),s=t?.mergeAmbiguousCharacters?u(a,o):d(a,o);return n+r.map((e,t)=>t===0?a(e):s(e,t)).join(t?.delimiter??``)+i}function c(e){return e===!1?e=>e.toLowerCase():t=>t.toLocaleLowerCase(e)}function l(e){return e===!1?e=>e.toUpperCase():t=>t.toLocaleUpperCase(e)}function u(e,t){return n=>`${t(n[0])}${e(n.slice(1))}`}function d(e,t){return(n,r)=>{let i=n[0];return(r>0&&i>=`0`&&i<=`9`?`_`+i:t(i))+e(n.slice(1))}}function f(e,t={}){let n=t.split??(t.separateNumbers?o:a),r=t.prefixCharacters??``,i=t.suffixCharacters??``,s=0,c=e.length;for(;s<e.length;){let t=e.charAt(s);if(!r.includes(t))break;s++}for(;c>s;){let t=c-1,n=e.charAt(t);if(!i.includes(n))break;c=t}return[e.slice(0,s),n(e.slice(s,c)),e.slice(c)]}export{s as camelCase};
|
|
@@ -1,25 +0,0 @@
|
|
|
1
|
-
const e={increment:`
|
|
2
|
-
local windowMs = tonumber(ARGV[2])
|
|
3
|
-
local resetOnChange = ARGV[1] == "1"
|
|
4
|
-
|
|
5
|
-
local timeToExpire = redis.call("PTTL", KEYS[1])
|
|
6
|
-
|
|
7
|
-
if timeToExpire <= 0 then
|
|
8
|
-
redis.call("SET", KEYS[1], 1, "PX", windowMs)
|
|
9
|
-
return { 1, windowMs }
|
|
10
|
-
end
|
|
11
|
-
|
|
12
|
-
local totalHits = redis.call("INCR", KEYS[1])
|
|
13
|
-
|
|
14
|
-
if resetOnChange then
|
|
15
|
-
redis.call("PEXPIRE", KEYS[1], windowMs)
|
|
16
|
-
timeToExpire = windowMs
|
|
17
|
-
end
|
|
18
|
-
|
|
19
|
-
return { totalHits, timeToExpire }
|
|
20
|
-
`.replaceAll(/^\s+/gm,``).trim(),get:`
|
|
21
|
-
local totalHits = redis.call("GET", KEYS[1])
|
|
22
|
-
local timeToExpire = redis.call("PTTL", KEYS[1])
|
|
23
|
-
|
|
24
|
-
return { totalHits, timeToExpire }
|
|
25
|
-
`.replaceAll(/^\s+/gm,``).trim()},t=e=>typeof e==`number`?e:Number.parseInt((e??``).toString(),10),n=e=>{if(!Array.isArray(e))throw TypeError(`Expected result to be array of values`);if(e.length!==2)throw Error(`Expected 2 replies, got ${e.length}`);let n=e[0]===!1?0:t(e[0]),r=t(e[1]);return{totalHits:n,resetTime:new Date(Date.now()+r)}};var r=class{sendCommand;prefix;resetExpiryOnChange;incrementScriptSha;getScriptSha;windowMs;constructor(e){if(typeof e!=`object`)throw TypeError(`rate-limit-redis: Error: options object is required`);if(`sendCommand`in e&&!(`sendCommandCluster`in e)){let t=e.sendCommand.bind(this);this.sendCommand=async({command:e})=>t(...e)}else if(!(`sendCommand`in e)&&`sendCommandCluster`in e)this.sendCommand=e.sendCommandCluster.bind(this);else throw Error(`rate-limit-redis: Error: options must include either sendCommand or sendCommandCluster (but not both)`);this.prefix=e.prefix??`rl:`,this.resetExpiryOnChange=e.resetExpiryOnChange??!1,this.incrementScriptSha=this.loadIncrementScript(),this.getScriptSha=this.loadGetScript()}async loadIncrementScript(t){let n=await this.sendCommand({key:t,isReadOnly:!1,command:[`SCRIPT`,`LOAD`,e.increment]});if(typeof n!=`string`)throw TypeError(`unexpected reply from redis client`);return n}async loadGetScript(t){let n=await this.sendCommand({key:t,isReadOnly:!1,command:[`SCRIPT`,`LOAD`,e.get]});if(typeof n!=`string`)throw TypeError(`unexpected reply from redis client`);return n}async retryableIncrement(e){let t=this.prefixKey(e),n=async()=>this.sendCommand({key:t,isReadOnly:!1,command:[`EVALSHA`,await this.incrementScriptSha,`1`,t,this.resetExpiryOnChange?`1`:`0`,this.windowMs.toString()]});try{return await n()}catch{return this.incrementScriptSha=this.loadIncrementScript(t),n()}}prefixKey(e){return`${this.prefix}${e}`}init(e){this.windowMs=e.windowMs}async get(e){let t=this.prefixKey(e),r,i=async()=>this.sendCommand({key:t,isReadOnly:!0,command:[`EVALSHA`,await this.getScriptSha,`1`,t]});try{r=await i()}catch{this.getScriptSha=this.loadGetScript(t),r=await i()}return n(r)}async increment(e){return n(await this.retryableIncrement(e))}async decrement(e){let t=this.prefixKey(e);await this.sendCommand({key:t,isReadOnly:!1,command:[`DECR`,t]})}async resetKey(e){let t=this.prefixKey(e);await this.sendCommand({key:t,isReadOnly:!1,command:[`DEL`,t]})}};export{r as RedisStore};
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
import"./redis.mjs";import{glob as e}from"node:fs/promises";import{basename as t,join as n}from"node:path";import{z as r}from"zod";import"es-toolkit";function i(e){let t=null,n=null;return{inputSchema:e.input,get name(){return n},register(e,r){t=e,n=r},async emit(r){if(!t||!n)throw Error(`Event not registered. Ensure the worker is started before emitting events.`);let i=e.input.parse(r),a={container:t,input:i};await e.run(a),t.eventBus.publish(n,i).catch(e=>{t.logger.error({err:e,event:n},`Redis publish failed`)})},async subscribe(r){if(!t||!n)throw Error(`Event not registered. Ensure the worker is started before subscribing.`);return t.eventBus.subscribe(n,async i=>{let a=e.input.parse(i),o={container:t,input:a};try{await r(o)}catch(e){t.logger.error({err:e,event:n},`Event subscription handler error`)}})}}}const a=i({input:r.object({action:r.enum([`INSERT`,`UPDATE`,`DELETE`]),newData:r.record(r.string(),r.unknown()).nullable(),oldData:r.record(r.string(),r.unknown()).nullable(),organizationId:r.string().nullable(),tableName:r.string(),timestamp:r.string(),userId:r.string().nullable()}),async run(){}});export{a as dbChangesEvent};
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
import{camelCase as e}from"../../../../node_modules/.bun/change-case@5.4.4/node_modules/change-case/dist/index.mjs";import t,{glob as n}from"node:fs/promises";import{basename as r}from"node:path";function i(t){return e(r(t,`.ts`).replace(/^\d+_/,``))}async function a(e,a,o){try{await t.access(e)}catch{return}let s=[];for await(let t of n(`${e}/**/*.ts`))!t.endsWith(`.test.ts`)&&!t.endsWith(`.spec.ts`)&&s.push(t);s.sort((e,t)=>r(e).localeCompare(r(t)));for(let e of s)try{let t=(await import(e)).default;if(t&&typeof t.handler==`function`){let n=t.name??i(e);a.logger.debug({name:n,file:e},`Loading user middleware`),o.use(t.handler(a))}else a.logger.warn({file:e},`Middleware file missing default export with handler function`)}catch(t){throw a.logger.error({file:e,error:t instanceof Error?t.message:t},`Failed to load middleware`),t}}export{a as loadMiddleware};
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
import{RedisStore as e}from"../../../../node_modules/.bun/rate-limit-redis@4.3.1_f1fa5524233c9c60/node_modules/rate-limit-redis/dist/index.mjs";import{defineErrorHandlerMiddleware as t}from"./middleware/error-handler.mjs";import{defineHealthMiddleware as n}from"./middleware/health.mjs";import{defineI18nMiddleware as r}from"./middleware/i18n.mjs";import{defineRequestLoggerMiddleware as i}from"./middleware/request-logger.mjs";import{defineShutdownMiddleware as a}from"./middleware/shutdown.mjs";import{defineTimeoutMiddleware as o}from"./middleware/timeout.mjs";import{loadMiddleware as s}from"./middleware.mjs";import{join as c}from"node:path";import l from"cors";import{rateLimit as u}from"express-rate-limit";import d from"helmet";import{createClient as f}from"redis";import p from"ultimate-express";async function m({container:m}){let h=!1,{host:g=`0.0.0.0`,port:_,timeout:v=3e4,bodyLimit:y=`1mb`,healthPath:b=`/health`,cors:x,helmet:S,rateLimit:C,redisUrl:w}=m.server,T=p();T.locals.container=m;let E=null;if(w&&C)try{E=f({url:w}),E.on(`error`,e=>{m.logger.error({error:e.message},`Redis client error`)}),await E.connect(),m.logger.info(`Connected to Redis for rate limiting`)}catch(e){m.logger.error({error:e instanceof Error?e.message:e},`Failed to connect to Redis, falling back to in-memory rate limiting`),E=null}if(T.use(a(m.logger,()=>h),o(m.logger,v),i(m.logger),n(b)),S!==void 0&&T.use(d(S)),x!==void 0&&T.use(l(x)),C)for(let[t,n]of C.entries()){let r=u({windowMs:n.windowMs??60*1e3,limit:n.limit??100,standardHeaders:n.standardHeaders??`draft-8`,legacyHeaders:n.legacyHeaders??!1,skip:n.skip,keyGenerator:n.keyGenerator,handler:n.handler,message:n.message,statusCode:n.statusCode,requestPropertyName:n.requestPropertyName,skipFailedRequests:n.skipFailedRequests,skipSuccessfulRequests:n.skipSuccessfulRequests,requestWasSuccessful:n.requestWasSuccessful,validate:n.validate,store:E?new e({sendCommand:(...e)=>E.sendCommand(e),prefix:`rl:${t}:`}):n.store});T.use(r)}if(T.use(r(m.i18n),p.json({limit:y}),p.urlencoded({extended:!0,limit:y})),await s(c(process.cwd(),`api`,`middleware`),m,T),T.get(`/`,(e,t)=>{t.json({message:`${m.config.APP_NAME} server is running.`})}),process.env.NODE_ENV!==`production`){let{defineYouchErrorHandler:e}=await import(`./middleware/youch-handler.mjs`);T.use(e(m.logger))}else T.use(t(m.logger));return{app:T,host:g,port:_,async start(){await new Promise((e,t)=>{try{T.listen(_,g,()=>{e()})}catch(e){t(e)}})},async close(){if(h=!0,T.uwsApp.close(),E)try{await E.destroy()}catch(e){m.logger.warn({error:e instanceof Error?e.message:e},`Error disconnecting from Redis`)}}}}export{m as defineServer};
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
import{glob as e}from"node:fs/promises";import{basename as t,join as n}from"node:path";import"es-toolkit";function r(e){let t=null,n=null,r=null,i=null,a=async i=>{if(!t||!r)throw Error(`Workflow "${n}" not registered`);let a=r,o=a.workflowID;if(!o)throw Error(`DBOS.workflowID is not available in this context`);let s={container:t,workflowId:o,input:i,step:(e,t)=>a.runStep(t,{name:e})};return e.run(s)};return{inputSchema:e.input,get name(){return n},register(o,s,c){t=o,n=s,r=c,i=c.registerWorkflow(a,{name:s,...e.config})},async start(t){if(!i||!n||!r)throw Error(`Workflow not registered. Ensure the worker is started before triggering workflows.`);let a=e.input.parse(t),o=await r.startWorkflow(i)(a);return{workflowId:o.workflowID,getStatus:()=>o.getStatus(),getResult:()=>o.getResult()}}}}function i(e){let t=null,n=null,r=null,i=async(i,a)=>{if(!t||!r)throw Error(`Workflow "${n}" not registered`);let o=r,s=o.workflowID;if(!s)throw Error(`DBOS.workflowID is not available in this context`);let c={container:t,workflowId:s,scheduledTime:i,step:(e,t)=>o.runStep(t,{name:e})};return e.run(c)};return{crontab:e.crontab,get name(){return n},register(a,o,s){t=a,n=o,r=s,s.registerScheduled(s.registerWorkflow(i,{name:o}),{crontab:e.crontab})}}}export{i as defineScheduledWorkflow,r as defineWorkflow};
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
import{defineWorkflow as e}from"../workflow.mjs";import{join as t}from"node:path";import{ALL_FORMATS as n,BlobSource as r,Input as i}from"mediabunny";import a from"sharp";import o from"zod";const s=e({input:o.object({blobId:o.string()}),async run({container:e,input:{blobId:o},step:s}){let c=await s(`fetch-blob`,async()=>e.storage.primary.getBlob(o));if(!c)throw Error(`Blob ${o} not found`);let l=await s(`download-blob`,async()=>e.storage.primary.downloadBlob(o));if(!l)throw Error(`Failed to download blob ${o}`);let u={};return c.contentType?.startsWith(`image/`)?u=await s(`extract-image-metadata`,async()=>{let e=await a(l).metadata();return{width:e.width,height:e.height,format:e.format,hasAlpha:e.hasAlpha,space:e.space}}):c.contentType?.startsWith(`video/`)||c.contentType?.startsWith(`audio/`)?u=await s(`extract-media-metadata`,async()=>{let e=new Uint8Array(l),t=new i({source:new r(new Blob([e],{type:c.contentType||`video/mp4`})),formats:n}),a=await t.computeDuration(),o=await t.getMetadataTags(),s={},u={},d=!1,f=!1;try{let e=await t.getPrimaryVideoTrack();if(e){d=!0;let t=e.displayWidth&&e.displayHeight?e.displayWidth/e.displayHeight:null;s={width:e.displayWidth,height:e.displayHeight,rotation:e.rotation,angle:e.rotation,displayAspectRatio:t}}}catch{}try{let e=await t.getPrimaryAudioTrack();e&&(f=!0,u={sampleRate:e.sampleRate,channels:e.numberOfChannels})}catch{}return{duration:a,video:d,audio:f,...s,...u,tags:o}}):c.contentType===`application/pdf`&&(u=await s(`extract-pdf-metadata`,async()=>{try{let e=await import(`pdfjs-dist/legacy/build/pdf.mjs`),n=`${t(process.cwd(),`node_modules/pdfjs-dist/standard_fonts`)}/`,r=await e.getDocument({data:new Uint8Array(l),standardFontDataUrl:n}).promise,i=await r.getMetadata(),a=(await r.getPage(1)).getViewport({scale:1}),o=i.info;return{pageCount:r.numPages,width:a.width,height:a.height,title:o?.Title||null,author:o?.Author||null,subject:o?.Subject||null,keywords:o?.Keywords||null,creator:o?.Creator||null,producer:o?.Producer||null,creationDate:o?.CreationDate||null,modificationDate:o?.ModDate||null,pdfVersion:o?.PDFFormatVersion||null}}catch(t){return e.logger.error({error:t,errorMessage:t instanceof Error?t.message:String(t),errorStack:t instanceof Error?t.stack:void 0,errorCode:t?.code,blobId:o},`Failed to extract PDF metadata`),{error:`Failed to extract PDF metadata`,errorMessage:t instanceof Error?t.message:String(t)}}})),await s(`save-metadata`,async()=>{await e.storage.primary.updateBlobMetadata(o,{...u,analyzed:!0})}),e.logger.info({blobId:o,metadata:u},`Metadata extracted`),{...u,analyzed:!0}}});export{s as extractBlobMetadata};
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
import{defineWorkflow as e}from"../workflow.mjs";import t from"sharp";import{z as n}from"zod";const r=n.object({width:n.number().optional(),height:n.number().optional(),fit:n.enum([`cover`,`contain`,`fill`,`inside`,`outside`]).optional(),position:n.enum([`top`,`right top`,`right`,`right bottom`,`bottom`,`left bottom`,`left`,`left top`,`centre`]).optional(),kernel:n.enum([`nearest`,`linear`,`cubic`,`mitchell`,`lanczos2`,`lanczos3`]).optional()}),i=n.object({resize:r.optional(),rotate:n.number().optional(),flip:n.boolean().optional(),flop:n.boolean().optional(),sharpen:n.boolean().optional(),blur:n.number().optional(),grayscale:n.boolean().optional(),format:n.enum([`jpeg`,`png`,`webp`,`avif`,`gif`]).optional(),quality:n.number().min(1).max(100).optional(),preview:n.literal(!0).optional()}),a=e({input:n.object({blobId:n.string(),transformations:i}),async run({container:e,input:{blobId:n,transformations:r},step:i}){if(!await i(`fetch-blob`,async()=>e.storage.primary.getBlob(n)))throw Error(`Blob ${n} not found`);let a=await i(`download-blob`,async()=>e.storage.primary.downloadBlob(n));if(!a)throw Error(`Failed to download blob ${n}`);let o=await i(`apply-transformations`,async()=>{let e=t(a);return r.resize&&(e=e.resize({width:r.resize.width,height:r.resize.height,fit:r.resize.fit,position:r.resize.position,kernel:r.resize.kernel})),r.rotate!==void 0&&(e=e.rotate(r.rotate)),r.flip&&(e=e.flip()),r.flop&&(e=e.flop()),r.sharpen&&(e=e.sharpen()),r.blur!==void 0&&(e=e.blur(r.blur)),r.grayscale&&(e=e.grayscale()),r.format&&(e=e.toFormat(r.format,{quality:r.quality})),e.toBuffer()}),s=await i(`store-variant`,async()=>e.storage.primary.createVariant(n,r,o));return e.logger.info({blobId:n,variantId:s.id},`Image variant generated`),s}});export{a as generateImageVariant};
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
import{defineWorkflow as e}from"../workflow.mjs";import{join as t}from"node:path";import n from"sharp";import r from"zod";import{spawn as i}from"node:child_process";import{createCanvas as a}from"canvas";const o=e({input:r.object({blobId:r.string(),timeInSeconds:r.number().optional()}),async run({container:e,input:{blobId:r,timeInSeconds:o=1},step:s}){let c=await s(`fetch-blob`,async()=>e.storage.primary.getBlob(r));if(!c)throw Error(`Blob ${r} not found`);let l=await s(`download-blob`,async()=>e.storage.primary.downloadBlob(r));if(!l)throw Error(`Failed to download blob ${r}`);let u=null;if(c.contentType?.startsWith(`video/`))u=await s(`generate-video-preview`,async()=>new Promise((t,a)=>{try{let s=i(`ffmpeg`,[`-i`,`pipe:0`,`-ss`,o.toString(),`-frames:v`,`1`,`-f`,`image2pipe`,`-c:v`,`png`,`pipe:1`]),c=[],u=[];s.stdout.on(`data`,e=>{c.push(e)}),s.stderr.on(`data`,e=>{u.push(e)}),s.on(`close`,async i=>{if(i===0)try{t(await n(Buffer.concat(c)).jpeg({quality:80}).toBuffer())}catch(t){e.logger.error({error:t,blobId:r},`Failed to convert video frame to JPEG`),a(t)}else{let t=Buffer.concat(u).toString(),n=Error(`FFmpeg exited with code ${i}: ${t}`);e.logger.error({error:n,blobId:r,code:i,stderr:t},`Failed to generate video preview`),a(n)}}),s.on(`error`,t=>{e.logger.error({error:t,blobId:r},`Failed to spawn FFmpeg process`),a(t)}),s.stdin.on(`error`,t=>{t.code!==`EPIPE`&&e.logger.error({error:t,blobId:r},`Failed to write to FFmpeg stdin`)}),s.stdin.write(l),s.stdin.end()}catch(t){e.logger.error({error:t,blobId:r},`Failed to generate video preview`),a(t)}}));else if(c.contentType===`application/pdf`)u=await s(`generate-pdf-preview`,async()=>{try{let e=await import(`pdfjs-dist/legacy/build/pdf.mjs`),r=`${t(process.cwd(),`node_modules/pdfjs-dist/standard_fonts`)}/`,i=await(await e.getDocument({data:new Uint8Array(l),standardFontDataUrl:r}).promise).getPage(1),o=i.getViewport({scale:2}),s=a(o.width,o.height),c=s.getContext(`2d`);return await i.render({canvasContext:c,viewport:o,canvas:s}).promise,await n(s.toBuffer(`image/png`)).resize(800,800,{fit:`inside`,withoutEnlargement:!0}).jpeg({quality:85}).toBuffer()}catch(t){throw e.logger.error({error:t,errorMessage:t instanceof Error?t.message:String(t),errorStack:t instanceof Error?t.stack:void 0,errorCode:t?.code,blobId:r},`Failed to generate PDF preview`),t}});else if(c.contentType?.startsWith(`image/`))u=await s(`generate-image-preview`,async()=>await n(l).resize(800,800,{fit:`inside`,withoutEnlargement:!0}).jpeg({quality:85}).toBuffer());else throw Error(`Preview generation not supported for content type: ${c.contentType}`);let d=await s(`store-preview`,async()=>await e.storage.primary.createVariant(r,{preview:!0},u));return e.logger.info({blobId:r,previewId:d.id,contentType:c.contentType},`Preview generated`),d}});export{o as generatePreview};
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
const e=`api`,t=`databases`;export{e as APPOS_DIR,t as DATABASES_DIR};
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
import{__reExport as e}from"../../../_virtual/rolldown_runtime.mjs";import{register as t}from"node:module";import{getNodeAutoInstrumentations as n}from"@opentelemetry/auto-instrumentations-node";import{OTLPTraceExporter as r}from"@opentelemetry/exporter-trace-otlp-http";import{PinoInstrumentation as i}from"@opentelemetry/instrumentation-pino";import{resourceFromAttributes as a}from"@opentelemetry/resources";import{NodeSDK as o}from"@opentelemetry/sdk-node";import{BatchSpanProcessor as s}from"@opentelemetry/sdk-trace-node";import{ATTR_SERVICE_NAME as c,ATTR_SERVICE_VERSION as l}from"@opentelemetry/semantic-conventions";export*from"@opentelemetry/api";var u={};import*as d from"@opentelemetry/api";e(u,d),t(`@opentelemetry/instrumentation/hook.mjs`,import.meta.url);const f={name:process.env.APP_NAME||`appos`,version:process.env.APP_VERSION||`development`};new o({resource:a({[c]:f.name,[l]:f.version}),spanProcessors:[new s(new r({url:process.env.OTEL_EXPORTER_OTLP_ENDPOINT||`http://localhost:4318/v1/traces`,headers:process.env.OTEL_EXPORTER_OTLP_HEADERS?JSON.parse(process.env.OTEL_EXPORTER_OTLP_HEADERS):void 0}))],instrumentations:[n({"@opentelemetry/instrumentation-fs":{enabled:!1},"@opentelemetry/instrumentation-dns":{enabled:!1}}),new i({logHook:(e,t)=>{t[`service.name`]=f.name}})]}).start();export{u as instrumentation_exports};
|
/package/dist/exports/api/{packages/appos/src/instrumentation.d.mts → instrumentation.d.mts}
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|