appos 0.3.2-0 → 0.3.4-0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/bin/auth-schema-CcqAJY9P.mjs +2 -0
- package/dist/bin/better-sqlite3-CuQ3hsWl.mjs +2 -0
- package/dist/bin/bun-sql-DGeo-s_M.mjs +2 -0
- package/dist/bin/cache-3oO07miM.mjs +2 -0
- package/dist/bin/chunk-l9p7A9gZ.mjs +2 -0
- package/dist/bin/cockroach-BaICwY7N.mjs +2 -0
- package/dist/bin/database-CaysWPpa.mjs +2 -0
- package/dist/bin/esm-BvsccvmM.mjs +2 -0
- package/dist/bin/esm-CGKzJ7Am.mjs +3 -0
- package/dist/bin/event-DnSe3eh0.mjs +8 -0
- package/dist/bin/extract-blob-metadata-iqwTl2ft.mjs +170 -0
- package/dist/bin/generate-image-variant-Lyx0vhM6.mjs +2 -0
- package/dist/bin/generate-preview-0MrKxslA.mjs +2 -0
- package/dist/bin/libsql-DQJrZsU9.mjs +2 -0
- package/dist/bin/logger-BAGZLUzj.mjs +2 -0
- package/dist/bin/main.mjs +1201 -190
- package/dist/bin/migrator-B7iNKM8N.mjs +2 -0
- package/dist/bin/migrator-BKE1cSQQ.mjs +2 -0
- package/dist/bin/migrator-BXcbc9zs.mjs +2 -0
- package/dist/bin/migrator-B_XhRWZC.mjs +8 -0
- package/dist/bin/migrator-Bz52Gtr8.mjs +2 -0
- package/dist/bin/migrator-C7W-cZHB.mjs +2 -0
- package/dist/bin/migrator-CEnKyGSW.mjs +2 -0
- package/dist/bin/migrator-CHzIIl5X.mjs +2 -0
- package/dist/bin/migrator-CR-rjZdM.mjs +2 -0
- package/dist/bin/migrator-CjIr1ZCx.mjs +8 -0
- package/dist/bin/migrator-Cuubh2dg.mjs +2 -0
- package/dist/bin/migrator-D8m-ORbr.mjs +8 -0
- package/dist/bin/migrator-DBFwrhZH.mjs +2 -0
- package/dist/bin/migrator-DLmhW9u_.mjs +2 -0
- package/dist/bin/migrator-DLoHx807.mjs +4 -0
- package/dist/bin/migrator-DtN_iS87.mjs +2 -0
- package/dist/bin/migrator-Yc57lb3w.mjs +2 -0
- package/dist/bin/migrator-cEVXH3xC.mjs +2 -0
- package/dist/bin/migrator-hWi-sYIq.mjs +2 -0
- package/dist/bin/mysql2-DufFWkj4.mjs +2 -0
- package/dist/bin/neon-serverless-5a4h2VFz.mjs +2 -0
- package/dist/bin/node-CiOp4xrR.mjs +22 -0
- package/dist/bin/node-mssql-DvZGaUkB.mjs +322 -0
- package/dist/bin/node-postgres-BqbJVBQY.mjs +2 -0
- package/dist/bin/node-postgres-DnhRTTO8.mjs +2 -0
- package/dist/bin/open-0ksnL0S8.mjs +2 -0
- package/dist/bin/pdf-sUYeFPr4.mjs +14 -0
- package/dist/bin/pg-CaH8ptj-.mjs +2 -0
- package/dist/bin/pg-core-BLTZt9AH.mjs +8 -0
- package/dist/bin/pg-core-CGzidKaA.mjs +2 -0
- package/dist/bin/pglite-BJB9z7Ju.mjs +2 -0
- package/dist/bin/planetscale-serverless-H3RfLlMK.mjs +13 -0
- package/dist/bin/postgres-js-DuOf1eWm.mjs +2 -0
- package/dist/bin/purge-attachment-DQXpTtTx.mjs +2 -0
- package/dist/bin/purge-audit-logs-BEt2J2gD.mjs +2 -0
- package/dist/bin/{purge-unattached-blobs-Duvv8Izd.mjs → purge-unattached-blobs-DOmk4ddJ.mjs} +1 -1
- package/dist/bin/query-builder-DSRrR6X_.mjs +8 -0
- package/dist/bin/query-builder-V8-LDhvA.mjs +3 -0
- package/dist/bin/session-CdB1A-LB.mjs +14 -0
- package/dist/bin/session-Cl2e-_i8.mjs +8 -0
- package/dist/bin/singlestore-COft6TlR.mjs +8 -0
- package/dist/bin/sql-D-eKV1Dn.mjs +2 -0
- package/dist/bin/sqlite-cloud-Co9jOn5G.mjs +2 -0
- package/dist/bin/sqlite-proxy-Cpu78gJF.mjs +2 -0
- package/dist/bin/src-C-oXmCzx.mjs +6 -0
- package/dist/bin/table-3zUpWkMg.mjs +2 -0
- package/dist/bin/track-db-changes-DWyY5jXm.mjs +2 -0
- package/dist/bin/utils-CyoeCJlf.mjs +2 -0
- package/dist/bin/utils-EoqYQKy1.mjs +2 -0
- package/dist/bin/utils-bsypyqPl.mjs +2 -0
- package/dist/bin/vercel-postgres-HWL6xtqi.mjs +2 -0
- package/dist/bin/workflow-zxHDyfLq.mjs +2 -0
- package/dist/bin/youch-handler-DrYdbUhe.mjs +2 -0
- package/dist/bin/zod-MJjkEkRY.mjs +24 -0
- package/dist/exports/api/_virtual/rolldown_runtime.mjs +36 -1
- package/dist/exports/api/app-context.mjs +24 -1
- package/dist/exports/api/auth-schema.mjs +373 -1
- package/dist/exports/api/auth.d.mts +4 -0
- package/dist/exports/api/auth.mjs +188 -1
- package/dist/exports/api/cache.d.mts +2 -2
- package/dist/exports/api/cache.mjs +28 -1
- package/dist/exports/api/config.mjs +72 -1
- package/dist/exports/api/constants.mjs +92 -1
- package/dist/exports/api/container.mjs +49 -1
- package/dist/exports/api/database.mjs +218 -1
- package/dist/exports/api/event.mjs +236 -1
- package/dist/exports/api/i18n.mjs +45 -1
- package/dist/exports/api/index.mjs +20 -1
- package/dist/exports/api/instrumentation.mjs +40 -1
- package/dist/exports/api/logger.mjs +26 -1
- package/dist/exports/api/mailer.mjs +37 -1
- package/dist/exports/api/middleware.mjs +73 -1
- package/dist/exports/api/openapi.mjs +507 -1
- package/dist/exports/api/orm.mjs +43 -1
- package/dist/exports/api/otel.mjs +56 -1
- package/dist/exports/api/redis.mjs +41 -1
- package/dist/exports/api/storage-schema.mjs +72 -1
- package/dist/exports/api/storage.mjs +833 -1
- package/dist/exports/api/web/auth.mjs +17 -1
- package/dist/exports/api/workflow.mjs +196 -1
- package/dist/exports/api/workflows/_virtual/rolldown_runtime.mjs +36 -1
- package/dist/exports/api/workflows/api/auth-schema.mjs +373 -1
- package/dist/exports/api/workflows/api/auth.d.mts +4 -0
- package/dist/exports/api/workflows/api/cache.d.mts +2 -2
- package/dist/exports/api/workflows/api/event.mjs +126 -1
- package/dist/exports/api/workflows/api/redis.mjs +3 -1
- package/dist/exports/api/workflows/api/workflow.mjs +135 -1
- package/dist/exports/api/workflows/constants.mjs +23 -1
- package/dist/exports/api/workflows/extract-blob-metadata.mjs +132 -1
- package/dist/exports/api/workflows/generate-image-variant.d.mts +2 -2
- package/dist/exports/api/workflows/generate-image-variant.mjs +118 -1
- package/dist/exports/api/workflows/generate-preview.mjs +160 -1
- package/dist/exports/api/workflows/index.mjs +3 -1
- package/dist/exports/api/workflows/purge-attachment.mjs +34 -1
- package/dist/exports/api/workflows/purge-audit-logs.mjs +47 -1
- package/dist/exports/api/workflows/purge-unattached-blobs.mjs +46 -1
- package/dist/exports/api/workflows/track-db-changes.mjs +110 -1
- package/dist/exports/cli/_virtual/rolldown_runtime.mjs +36 -1
- package/dist/exports/cli/api/auth-schema.mjs +373 -1
- package/dist/exports/cli/api/auth.d.mts +4 -0
- package/dist/exports/cli/api/cache.d.mts +2 -2
- package/dist/exports/cli/api/event.mjs +126 -1
- package/dist/exports/cli/api/redis.mjs +3 -1
- package/dist/exports/cli/api/workflow.mjs +135 -1
- package/dist/exports/cli/api/workflows/extract-blob-metadata.mjs +132 -1
- package/dist/exports/cli/api/workflows/generate-image-variant.mjs +118 -1
- package/dist/exports/cli/api/workflows/generate-preview.mjs +160 -1
- package/dist/exports/cli/api/workflows/purge-attachment.mjs +34 -1
- package/dist/exports/cli/api/workflows/purge-audit-logs.mjs +47 -1
- package/dist/exports/cli/api/workflows/purge-unattached-blobs.mjs +46 -1
- package/dist/exports/cli/api/workflows/track-db-changes.mjs +110 -1
- package/dist/exports/cli/command.d.mts +2 -0
- package/dist/exports/cli/command.mjs +43 -1
- package/dist/exports/cli/constants.mjs +23 -1
- package/dist/exports/cli/index.mjs +3 -1
- package/dist/exports/devtools/index.js +4 -1
- package/dist/exports/tests/api/auth.d.mts +4 -0
- package/dist/exports/tests/api/cache.d.mts +2 -2
- package/dist/exports/tests/api/middleware/i18n.mjs +1 -1
- package/dist/exports/tests/api/middleware/youch-handler.mjs +1 -1
- package/dist/exports/tests/api/openapi.mjs +1 -1
- package/dist/exports/tests/api/server.mjs +1 -1
- package/dist/exports/tests/api/storage.d.mts +4 -4
- package/dist/exports/tests/constants.mjs +1 -1
- package/dist/exports/vendors/date.js +1 -1
- package/dist/exports/vendors/toolkit.js +1 -1
- package/dist/exports/vendors/zod.js +1 -1
- package/dist/exports/vitest/globals.mjs +1 -1
- package/dist/exports/web/auth.js +75 -1
- package/dist/exports/web/i18n.js +45 -1
- package/dist/exports/web/index.js +8 -1
- package/package.json +19 -18
- package/dist/bin/auth-schema-Va0CYicu.mjs +0 -2
- package/dist/bin/event-8JibGFH_.mjs +0 -2
- package/dist/bin/extract-blob-metadata-DjPfHtQ2.mjs +0 -2
- package/dist/bin/generate-image-variant-D5VDFyWj.mjs +0 -2
- package/dist/bin/generate-preview-Dssw7w5U.mjs +0 -2
- package/dist/bin/purge-attachment-BBPzIxwt.mjs +0 -2
- package/dist/bin/purge-audit-logs-BeZy3IFM.mjs +0 -2
- package/dist/bin/track-db-changes-CFykw_YO.mjs +0 -2
- package/dist/bin/workflow-BNUZrj4F.mjs +0 -2
- package/dist/bin/youch-handler-BadUgHb0.mjs +0 -2
|
@@ -1 +1,132 @@
|
|
|
1
|
-
import{defineWorkflow
|
|
1
|
+
import { defineWorkflow } from "../workflow.mjs";
|
|
2
|
+
import { join } from "node:path";
|
|
3
|
+
import { ALL_FORMATS, BlobSource, Input } from "mediabunny";
|
|
4
|
+
import z$1 from "zod";
|
|
5
|
+
|
|
6
|
+
//#region src/api/workflows/extract-blob-metadata.ts
|
|
7
|
+
const extractBlobMetadata = defineWorkflow({
|
|
8
|
+
input: z$1.object({ blobId: z$1.string() }),
|
|
9
|
+
async run({ container, input: { blobId }, step }) {
|
|
10
|
+
const blob = await step("fetch-blob", async () => {
|
|
11
|
+
return container.storage.primary.getBlob(blobId);
|
|
12
|
+
});
|
|
13
|
+
if (!blob) throw new Error(`Blob ${blobId} not found`);
|
|
14
|
+
const buffer = await step("download-blob", async () => {
|
|
15
|
+
return container.storage.primary.downloadBlob(blobId);
|
|
16
|
+
});
|
|
17
|
+
if (!buffer) throw new Error(`Failed to download blob ${blobId}`);
|
|
18
|
+
let metadata = {};
|
|
19
|
+
if (blob.contentType?.startsWith("image/")) metadata = await step("extract-image-metadata", async () => {
|
|
20
|
+
const sharp = (await import("sharp")).default;
|
|
21
|
+
const info = await sharp(buffer).metadata();
|
|
22
|
+
return {
|
|
23
|
+
width: info.width,
|
|
24
|
+
height: info.height,
|
|
25
|
+
format: info.format,
|
|
26
|
+
hasAlpha: info.hasAlpha,
|
|
27
|
+
space: info.space
|
|
28
|
+
};
|
|
29
|
+
});
|
|
30
|
+
else if (blob.contentType?.startsWith("video/") || blob.contentType?.startsWith("audio/")) metadata = await step("extract-media-metadata", async () => {
|
|
31
|
+
const uint8Array = new Uint8Array(buffer);
|
|
32
|
+
const input = new Input({
|
|
33
|
+
source: new BlobSource(new Blob([uint8Array], { type: blob.contentType || "video/mp4" })),
|
|
34
|
+
formats: ALL_FORMATS
|
|
35
|
+
});
|
|
36
|
+
const duration = await input.computeDuration();
|
|
37
|
+
const tags = await input.getMetadataTags();
|
|
38
|
+
let videoData = {};
|
|
39
|
+
let audioData = {};
|
|
40
|
+
let hasVideo = false;
|
|
41
|
+
let hasAudio = false;
|
|
42
|
+
try {
|
|
43
|
+
const videoTrack = await input.getPrimaryVideoTrack();
|
|
44
|
+
if (videoTrack) {
|
|
45
|
+
hasVideo = true;
|
|
46
|
+
const displayAspectRatio = videoTrack.displayWidth && videoTrack.displayHeight ? videoTrack.displayWidth / videoTrack.displayHeight : null;
|
|
47
|
+
videoData = {
|
|
48
|
+
width: videoTrack.displayWidth,
|
|
49
|
+
height: videoTrack.displayHeight,
|
|
50
|
+
rotation: videoTrack.rotation,
|
|
51
|
+
angle: videoTrack.rotation,
|
|
52
|
+
displayAspectRatio
|
|
53
|
+
};
|
|
54
|
+
}
|
|
55
|
+
} catch {}
|
|
56
|
+
try {
|
|
57
|
+
const audioTrack = await input.getPrimaryAudioTrack();
|
|
58
|
+
if (audioTrack) {
|
|
59
|
+
hasAudio = true;
|
|
60
|
+
audioData = {
|
|
61
|
+
sampleRate: audioTrack.sampleRate,
|
|
62
|
+
channels: audioTrack.numberOfChannels
|
|
63
|
+
};
|
|
64
|
+
}
|
|
65
|
+
} catch {}
|
|
66
|
+
return {
|
|
67
|
+
duration,
|
|
68
|
+
video: hasVideo,
|
|
69
|
+
audio: hasAudio,
|
|
70
|
+
...videoData,
|
|
71
|
+
...audioData,
|
|
72
|
+
tags
|
|
73
|
+
};
|
|
74
|
+
});
|
|
75
|
+
else if (blob.contentType === "application/pdf") metadata = await step("extract-pdf-metadata", async () => {
|
|
76
|
+
try {
|
|
77
|
+
const pdfjsLib = await import("pdfjs-dist/legacy/build/pdf.mjs");
|
|
78
|
+
const standardFontDataUrl = `${join(process.cwd(), "node_modules/pdfjs-dist/standard_fonts")}/`;
|
|
79
|
+
const pdf = await pdfjsLib.getDocument({
|
|
80
|
+
data: new Uint8Array(buffer),
|
|
81
|
+
standardFontDataUrl
|
|
82
|
+
}).promise;
|
|
83
|
+
const pdfMetadata = await pdf.getMetadata();
|
|
84
|
+
const viewport = (await pdf.getPage(1)).getViewport({ scale: 1 });
|
|
85
|
+
const info = pdfMetadata.info;
|
|
86
|
+
return {
|
|
87
|
+
pageCount: pdf.numPages,
|
|
88
|
+
width: viewport.width,
|
|
89
|
+
height: viewport.height,
|
|
90
|
+
title: info?.Title || null,
|
|
91
|
+
author: info?.Author || null,
|
|
92
|
+
subject: info?.Subject || null,
|
|
93
|
+
keywords: info?.Keywords || null,
|
|
94
|
+
creator: info?.Creator || null,
|
|
95
|
+
producer: info?.Producer || null,
|
|
96
|
+
creationDate: info?.CreationDate || null,
|
|
97
|
+
modificationDate: info?.ModDate || null,
|
|
98
|
+
pdfVersion: info?.PDFFormatVersion || null
|
|
99
|
+
};
|
|
100
|
+
} catch (error) {
|
|
101
|
+
container.logger.error({
|
|
102
|
+
error,
|
|
103
|
+
errorMessage: error instanceof Error ? error.message : String(error),
|
|
104
|
+
errorStack: error instanceof Error ? error.stack : void 0,
|
|
105
|
+
errorCode: error?.code,
|
|
106
|
+
blobId
|
|
107
|
+
}, "Failed to extract PDF metadata");
|
|
108
|
+
return {
|
|
109
|
+
error: "Failed to extract PDF metadata",
|
|
110
|
+
errorMessage: error instanceof Error ? error.message : String(error)
|
|
111
|
+
};
|
|
112
|
+
}
|
|
113
|
+
});
|
|
114
|
+
await step("save-metadata", async () => {
|
|
115
|
+
await container.storage.primary.updateBlobMetadata(blobId, {
|
|
116
|
+
...metadata,
|
|
117
|
+
analyzed: true
|
|
118
|
+
});
|
|
119
|
+
});
|
|
120
|
+
container.logger.info({
|
|
121
|
+
blobId,
|
|
122
|
+
metadata
|
|
123
|
+
}, "Metadata extracted");
|
|
124
|
+
return {
|
|
125
|
+
...metadata,
|
|
126
|
+
analyzed: true
|
|
127
|
+
};
|
|
128
|
+
}
|
|
129
|
+
});
|
|
130
|
+
|
|
131
|
+
//#endregion
|
|
132
|
+
export { extractBlobMetadata };
|
|
@@ -1 +1,118 @@
|
|
|
1
|
-
import{defineWorkflow
|
|
1
|
+
import { defineWorkflow } from "../workflow.mjs";
|
|
2
|
+
import { z } from "zod";
|
|
3
|
+
|
|
4
|
+
//#region src/api/workflows/generate-image-variant.ts
|
|
5
|
+
/**
|
|
6
|
+
* Resize options schema for image transformations.
|
|
7
|
+
*/
|
|
8
|
+
const resizeSchema = z.object({
|
|
9
|
+
width: z.number().optional(),
|
|
10
|
+
height: z.number().optional(),
|
|
11
|
+
fit: z.enum([
|
|
12
|
+
"cover",
|
|
13
|
+
"contain",
|
|
14
|
+
"fill",
|
|
15
|
+
"inside",
|
|
16
|
+
"outside"
|
|
17
|
+
]).optional(),
|
|
18
|
+
position: z.enum([
|
|
19
|
+
"top",
|
|
20
|
+
"right top",
|
|
21
|
+
"right",
|
|
22
|
+
"right bottom",
|
|
23
|
+
"bottom",
|
|
24
|
+
"left bottom",
|
|
25
|
+
"left",
|
|
26
|
+
"left top",
|
|
27
|
+
"centre"
|
|
28
|
+
]).optional(),
|
|
29
|
+
kernel: z.enum([
|
|
30
|
+
"nearest",
|
|
31
|
+
"linear",
|
|
32
|
+
"cubic",
|
|
33
|
+
"mitchell",
|
|
34
|
+
"lanczos2",
|
|
35
|
+
"lanczos3"
|
|
36
|
+
]).optional()
|
|
37
|
+
});
|
|
38
|
+
/**
|
|
39
|
+
* Image transformations schema.
|
|
40
|
+
* Supports resize, rotate, flip, flop, sharpen, blur, grayscale, format conversion.
|
|
41
|
+
*/
|
|
42
|
+
const transformationsSchema = z.object({
|
|
43
|
+
resize: resizeSchema.optional(),
|
|
44
|
+
rotate: z.number().optional(),
|
|
45
|
+
flip: z.boolean().optional(),
|
|
46
|
+
flop: z.boolean().optional(),
|
|
47
|
+
sharpen: z.boolean().optional(),
|
|
48
|
+
blur: z.number().optional(),
|
|
49
|
+
grayscale: z.boolean().optional(),
|
|
50
|
+
format: z.enum([
|
|
51
|
+
"jpeg",
|
|
52
|
+
"png",
|
|
53
|
+
"webp",
|
|
54
|
+
"avif",
|
|
55
|
+
"gif"
|
|
56
|
+
]).optional(),
|
|
57
|
+
quality: z.number().min(1).max(100).optional(),
|
|
58
|
+
preview: z.literal(true).optional()
|
|
59
|
+
});
|
|
60
|
+
/**
|
|
61
|
+
* Generate image variant workflow. Applies transformations to create variants.
|
|
62
|
+
*
|
|
63
|
+
* Algorithm:
|
|
64
|
+
* 1. Fetch blob by ID
|
|
65
|
+
* 2. Download blob content
|
|
66
|
+
* 3. Apply transformations using Sharp:
|
|
67
|
+
* - Resize with various fit options
|
|
68
|
+
* - Rotate by degrees
|
|
69
|
+
* - Flip/flop (vertical/horizontal mirror)
|
|
70
|
+
* - Sharpen, blur, grayscale filters
|
|
71
|
+
* - Format conversion with quality settings
|
|
72
|
+
* 4. Store variant with transformation metadata
|
|
73
|
+
*/
|
|
74
|
+
const generateImageVariant = defineWorkflow({
|
|
75
|
+
input: z.object({
|
|
76
|
+
blobId: z.string(),
|
|
77
|
+
transformations: transformationsSchema
|
|
78
|
+
}),
|
|
79
|
+
async run({ container, input: { blobId, transformations }, step }) {
|
|
80
|
+
if (!await step("fetch-blob", async () => {
|
|
81
|
+
return container.storage.primary.getBlob(blobId);
|
|
82
|
+
})) throw new Error(`Blob ${blobId} not found`);
|
|
83
|
+
const buffer = await step("download-blob", async () => {
|
|
84
|
+
return container.storage.primary.downloadBlob(blobId);
|
|
85
|
+
});
|
|
86
|
+
if (!buffer) throw new Error(`Failed to download blob ${blobId}`);
|
|
87
|
+
const variantBuffer = await step("apply-transformations", async () => {
|
|
88
|
+
const sharp = (await import("sharp")).default;
|
|
89
|
+
let pipeline = sharp(buffer);
|
|
90
|
+
if (transformations.resize) pipeline = pipeline.resize({
|
|
91
|
+
width: transformations.resize.width,
|
|
92
|
+
height: transformations.resize.height,
|
|
93
|
+
fit: transformations.resize.fit,
|
|
94
|
+
position: transformations.resize.position,
|
|
95
|
+
kernel: transformations.resize.kernel
|
|
96
|
+
});
|
|
97
|
+
if (transformations.rotate !== void 0) pipeline = pipeline.rotate(transformations.rotate);
|
|
98
|
+
if (transformations.flip) pipeline = pipeline.flip();
|
|
99
|
+
if (transformations.flop) pipeline = pipeline.flop();
|
|
100
|
+
if (transformations.sharpen) pipeline = pipeline.sharpen();
|
|
101
|
+
if (transformations.blur !== void 0) pipeline = pipeline.blur(transformations.blur);
|
|
102
|
+
if (transformations.grayscale) pipeline = pipeline.grayscale();
|
|
103
|
+
if (transformations.format) pipeline = pipeline.toFormat(transformations.format, { quality: transformations.quality });
|
|
104
|
+
return pipeline.toBuffer();
|
|
105
|
+
});
|
|
106
|
+
const variant = await step("store-variant", async () => {
|
|
107
|
+
return container.storage.primary.createVariant(blobId, transformations, variantBuffer);
|
|
108
|
+
});
|
|
109
|
+
container.logger.info({
|
|
110
|
+
blobId,
|
|
111
|
+
variantId: variant.id
|
|
112
|
+
}, "Image variant generated");
|
|
113
|
+
return variant;
|
|
114
|
+
}
|
|
115
|
+
});
|
|
116
|
+
|
|
117
|
+
//#endregion
|
|
118
|
+
export { generateImageVariant };
|
|
@@ -1 +1,160 @@
|
|
|
1
|
-
import{defineWorkflow
|
|
1
|
+
import { defineWorkflow } from "../workflow.mjs";
|
|
2
|
+
import { join } from "node:path";
|
|
3
|
+
import z$1 from "zod";
|
|
4
|
+
import { spawn } from "node:child_process";
|
|
5
|
+
|
|
6
|
+
//#region src/api/workflows/generate-preview.ts
|
|
7
|
+
/**
|
|
8
|
+
* Generate preview workflow. Creates preview images/thumbnails for PDFs and videos.
|
|
9
|
+
*
|
|
10
|
+
* Algorithm:
|
|
11
|
+
* 1. Fetch blob by ID
|
|
12
|
+
* 2. Download blob content
|
|
13
|
+
* 3. Generate preview based on content type:
|
|
14
|
+
* - Video: Extract frame at specified time using FFmpeg
|
|
15
|
+
* - PDF: Render first page using pdfjs-dist + canvas
|
|
16
|
+
* - Image: Resize to max 800x800 maintaining aspect ratio
|
|
17
|
+
* 4. Store preview as a special variant with "preview" transformation key
|
|
18
|
+
*/
|
|
19
|
+
const generatePreview = defineWorkflow({
|
|
20
|
+
input: z$1.object({
|
|
21
|
+
blobId: z$1.string(),
|
|
22
|
+
timeInSeconds: z$1.number().optional()
|
|
23
|
+
}),
|
|
24
|
+
async run({ container, input: { blobId, timeInSeconds = 1 }, step }) {
|
|
25
|
+
const blob = await step("fetch-blob", async () => {
|
|
26
|
+
return container.storage.primary.getBlob(blobId);
|
|
27
|
+
});
|
|
28
|
+
if (!blob) throw new Error(`Blob ${blobId} not found`);
|
|
29
|
+
const buffer = await step("download-blob", async () => {
|
|
30
|
+
return container.storage.primary.downloadBlob(blobId);
|
|
31
|
+
});
|
|
32
|
+
if (!buffer) throw new Error(`Failed to download blob ${blobId}`);
|
|
33
|
+
let previewBuffer = null;
|
|
34
|
+
if (blob.contentType?.startsWith("video/")) previewBuffer = await step("generate-video-preview", async () => {
|
|
35
|
+
return new Promise((resolve, reject) => {
|
|
36
|
+
try {
|
|
37
|
+
const ffmpeg = spawn("ffmpeg", [
|
|
38
|
+
"-i",
|
|
39
|
+
"pipe:0",
|
|
40
|
+
"-ss",
|
|
41
|
+
timeInSeconds.toString(),
|
|
42
|
+
"-frames:v",
|
|
43
|
+
"1",
|
|
44
|
+
"-f",
|
|
45
|
+
"image2pipe",
|
|
46
|
+
"-c:v",
|
|
47
|
+
"png",
|
|
48
|
+
"pipe:1"
|
|
49
|
+
]);
|
|
50
|
+
const chunks = [];
|
|
51
|
+
const errorChunks = [];
|
|
52
|
+
ffmpeg.stdout.on("data", (chunk) => {
|
|
53
|
+
chunks.push(chunk);
|
|
54
|
+
});
|
|
55
|
+
ffmpeg.stderr.on("data", (chunk) => {
|
|
56
|
+
errorChunks.push(chunk);
|
|
57
|
+
});
|
|
58
|
+
ffmpeg.on("close", async (code) => {
|
|
59
|
+
if (code === 0) try {
|
|
60
|
+
const pngBuffer = Buffer.concat(chunks);
|
|
61
|
+
const sharp = (await import("sharp")).default;
|
|
62
|
+
resolve(await sharp(pngBuffer).jpeg({ quality: 80 }).toBuffer());
|
|
63
|
+
} catch (error) {
|
|
64
|
+
container.logger.error({
|
|
65
|
+
error,
|
|
66
|
+
blobId
|
|
67
|
+
}, "Failed to convert video frame to JPEG");
|
|
68
|
+
reject(error);
|
|
69
|
+
}
|
|
70
|
+
else {
|
|
71
|
+
const errorMessage = Buffer.concat(errorChunks).toString();
|
|
72
|
+
const error = /* @__PURE__ */ new Error(`FFmpeg exited with code ${code}: ${errorMessage}`);
|
|
73
|
+
container.logger.error({
|
|
74
|
+
error,
|
|
75
|
+
blobId,
|
|
76
|
+
code,
|
|
77
|
+
stderr: errorMessage
|
|
78
|
+
}, "Failed to generate video preview");
|
|
79
|
+
reject(error);
|
|
80
|
+
}
|
|
81
|
+
});
|
|
82
|
+
ffmpeg.on("error", (error) => {
|
|
83
|
+
container.logger.error({
|
|
84
|
+
error,
|
|
85
|
+
blobId
|
|
86
|
+
}, "Failed to spawn FFmpeg process");
|
|
87
|
+
reject(error);
|
|
88
|
+
});
|
|
89
|
+
ffmpeg.stdin.on("error", (error) => {
|
|
90
|
+
if (error.code !== "EPIPE") container.logger.error({
|
|
91
|
+
error,
|
|
92
|
+
blobId
|
|
93
|
+
}, "Failed to write to FFmpeg stdin");
|
|
94
|
+
});
|
|
95
|
+
ffmpeg.stdin.write(buffer);
|
|
96
|
+
ffmpeg.stdin.end();
|
|
97
|
+
} catch (error) {
|
|
98
|
+
container.logger.error({
|
|
99
|
+
error,
|
|
100
|
+
blobId
|
|
101
|
+
}, "Failed to generate video preview");
|
|
102
|
+
reject(error);
|
|
103
|
+
}
|
|
104
|
+
});
|
|
105
|
+
});
|
|
106
|
+
else if (blob.contentType === "application/pdf") previewBuffer = await step("generate-pdf-preview", async () => {
|
|
107
|
+
try {
|
|
108
|
+
const pdfjsLib = await import("pdfjs-dist/legacy/build/pdf.mjs");
|
|
109
|
+
const { createCanvas } = await import("canvas");
|
|
110
|
+
const sharp = (await import("sharp")).default;
|
|
111
|
+
const standardFontDataUrl = `${join(process.cwd(), "node_modules/pdfjs-dist/standard_fonts")}/`;
|
|
112
|
+
const page = await (await pdfjsLib.getDocument({
|
|
113
|
+
data: new Uint8Array(buffer),
|
|
114
|
+
standardFontDataUrl
|
|
115
|
+
}).promise).getPage(1);
|
|
116
|
+
const viewport = page.getViewport({ scale: 2 });
|
|
117
|
+
const canvas = createCanvas(viewport.width, viewport.height);
|
|
118
|
+
const context = canvas.getContext("2d");
|
|
119
|
+
await page.render({
|
|
120
|
+
canvasContext: context,
|
|
121
|
+
viewport,
|
|
122
|
+
canvas
|
|
123
|
+
}).promise;
|
|
124
|
+
return await sharp(canvas.toBuffer("image/png")).resize(800, 800, {
|
|
125
|
+
fit: "inside",
|
|
126
|
+
withoutEnlargement: true
|
|
127
|
+
}).jpeg({ quality: 85 }).toBuffer();
|
|
128
|
+
} catch (error) {
|
|
129
|
+
container.logger.error({
|
|
130
|
+
error,
|
|
131
|
+
errorMessage: error instanceof Error ? error.message : String(error),
|
|
132
|
+
errorStack: error instanceof Error ? error.stack : void 0,
|
|
133
|
+
errorCode: error?.code,
|
|
134
|
+
blobId
|
|
135
|
+
}, "Failed to generate PDF preview");
|
|
136
|
+
throw error;
|
|
137
|
+
}
|
|
138
|
+
});
|
|
139
|
+
else if (blob.contentType?.startsWith("image/")) previewBuffer = await step("generate-image-preview", async () => {
|
|
140
|
+
const sharp = (await import("sharp")).default;
|
|
141
|
+
return await sharp(buffer).resize(800, 800, {
|
|
142
|
+
fit: "inside",
|
|
143
|
+
withoutEnlargement: true
|
|
144
|
+
}).jpeg({ quality: 85 }).toBuffer();
|
|
145
|
+
});
|
|
146
|
+
else throw new Error(`Preview generation not supported for content type: ${blob.contentType}`);
|
|
147
|
+
const preview = await step("store-preview", async () => {
|
|
148
|
+
return await container.storage.primary.createVariant(blobId, { preview: true }, previewBuffer);
|
|
149
|
+
});
|
|
150
|
+
container.logger.info({
|
|
151
|
+
blobId,
|
|
152
|
+
previewId: preview.id,
|
|
153
|
+
contentType: blob.contentType
|
|
154
|
+
}, "Preview generated");
|
|
155
|
+
return preview;
|
|
156
|
+
}
|
|
157
|
+
});
|
|
158
|
+
|
|
159
|
+
//#endregion
|
|
160
|
+
export { generatePreview };
|
|
@@ -1 +1,34 @@
|
|
|
1
|
-
import{defineWorkflow
|
|
1
|
+
import { defineWorkflow } from "../workflow.mjs";
|
|
2
|
+
import z$1 from "zod";
|
|
3
|
+
|
|
4
|
+
//#region src/api/workflows/purge-attachment.ts
|
|
5
|
+
/**
|
|
6
|
+
* Purge attachment workflow. Deletes attachments and their associated blobs
|
|
7
|
+
* in the background. This workflow is enqueued by `.purge_later()` calls on
|
|
8
|
+
* attachment objects.
|
|
9
|
+
*/
|
|
10
|
+
const purgeAttachment = defineWorkflow({
|
|
11
|
+
input: z$1.object({ attachmentIds: z$1.array(z$1.string()).min(1) }),
|
|
12
|
+
async run({ container, input: { attachmentIds }, step }) {
|
|
13
|
+
const attachments = await step("fetch-attachments", async () => {
|
|
14
|
+
return (await container.storage.primary.getAttachmentsByIds(attachmentIds)).filter((r) => r.blob !== null).map((r) => ({
|
|
15
|
+
attachmentId: r.id,
|
|
16
|
+
blobId: r.blob.id
|
|
17
|
+
}));
|
|
18
|
+
});
|
|
19
|
+
await step("delete-attachments", async () => {
|
|
20
|
+
for (const { attachmentId } of attachments) await container.storage.primary.deleteAttachment(attachmentId);
|
|
21
|
+
});
|
|
22
|
+
await step("delete-blobs", async () => {
|
|
23
|
+
for (const { blobId } of attachments) await container.storage.primary.deleteBlob(blobId);
|
|
24
|
+
});
|
|
25
|
+
container.logger.info({
|
|
26
|
+
attachmentIds,
|
|
27
|
+
blobCount: attachments.length
|
|
28
|
+
}, "Attachments and blobs purged");
|
|
29
|
+
return { purgedCount: attachments.length };
|
|
30
|
+
}
|
|
31
|
+
});
|
|
32
|
+
|
|
33
|
+
//#endregion
|
|
34
|
+
export { purgeAttachment };
|
|
@@ -1 +1,47 @@
|
|
|
1
|
-
import{defineScheduledWorkflow
|
|
1
|
+
import { defineScheduledWorkflow } from "../workflow.mjs";
|
|
2
|
+
import { defineAuthSchema } from "../auth-schema.mjs";
|
|
3
|
+
import { lt } from "drizzle-orm";
|
|
4
|
+
|
|
5
|
+
//#region src/api/workflows/purge-audit-logs.ts
|
|
6
|
+
const authSchema = defineAuthSchema();
|
|
7
|
+
/**
|
|
8
|
+
* Default cron: midnight daily
|
|
9
|
+
*/
|
|
10
|
+
const DEFAULT_CRON = "0 0 * * *";
|
|
11
|
+
/**
|
|
12
|
+
* Default retention period: 90 days
|
|
13
|
+
*/
|
|
14
|
+
const DEFAULT_RETENTION_DAYS = 90;
|
|
15
|
+
/**
|
|
16
|
+
* Defines purge audit logs workflow with configurable cron schedule.
|
|
17
|
+
*
|
|
18
|
+
* Algorithm:
|
|
19
|
+
* 1. Calculate cutoff date from scheduledTime minus retentionDays
|
|
20
|
+
* 2. Delete all audit logs with createdAt before cutoff
|
|
21
|
+
* 3. Log the number of deleted records
|
|
22
|
+
*
|
|
23
|
+
* @param crontab - Cron expression from auth.auditLog.purgeCron.
|
|
24
|
+
*/
|
|
25
|
+
function definePurgeAuditLogs(crontab = DEFAULT_CRON) {
|
|
26
|
+
return defineScheduledWorkflow({
|
|
27
|
+
crontab,
|
|
28
|
+
async run({ container, step, scheduledTime }) {
|
|
29
|
+
const retentionDays = container.auth.auditLog?.retentionDays ?? DEFAULT_RETENTION_DAYS;
|
|
30
|
+
const cutoffDate = new Date(scheduledTime);
|
|
31
|
+
cutoffDate.setDate(cutoffDate.getDate() - retentionDays);
|
|
32
|
+
const cutoffISO = cutoffDate.toISOString();
|
|
33
|
+
const deletedCount = await step("delete-old-logs", async () => {
|
|
34
|
+
const { auditLogs } = authSchema.tables;
|
|
35
|
+
return (await container.db.primary.delete(auditLogs).where(lt(auditLogs.createdAt, cutoffISO)).returning({ id: auditLogs.id })).length;
|
|
36
|
+
});
|
|
37
|
+
container.logger.info({
|
|
38
|
+
deletedCount,
|
|
39
|
+
retentionDays,
|
|
40
|
+
cutoffDate: cutoffISO
|
|
41
|
+
}, "Audit log purge completed");
|
|
42
|
+
}
|
|
43
|
+
});
|
|
44
|
+
}
|
|
45
|
+
|
|
46
|
+
//#endregion
|
|
47
|
+
export { definePurgeAuditLogs };
|
|
@@ -1 +1,46 @@
|
|
|
1
|
-
import{defineScheduledWorkflow
|
|
1
|
+
import { defineScheduledWorkflow } from "../workflow.mjs";
|
|
2
|
+
|
|
3
|
+
//#region src/api/workflows/purge-unattached-blobs.ts
|
|
4
|
+
/**
|
|
5
|
+
* Default cron: midnight daily
|
|
6
|
+
*/
|
|
7
|
+
const DEFAULT_CRON = "0 0 * * *";
|
|
8
|
+
/**
|
|
9
|
+
* Defines purge unattached blobs workflow with configurable cron schedule.
|
|
10
|
+
*
|
|
11
|
+
* Algorithm:
|
|
12
|
+
* 1. Fetch unattached blobs older than 48 hours
|
|
13
|
+
* 2. Fetch pending blobs (stuck direct uploads) older than 48 hours
|
|
14
|
+
* 3. Delete each orphaned blob from storage and database
|
|
15
|
+
* 4. Log the purge summary
|
|
16
|
+
*
|
|
17
|
+
* @param crontab - Cron expression for when to run (default: daily at midnight)
|
|
18
|
+
*/
|
|
19
|
+
function definePurgeUnattachedBlobs(crontab = DEFAULT_CRON) {
|
|
20
|
+
return defineScheduledWorkflow({
|
|
21
|
+
crontab,
|
|
22
|
+
async run({ container, step }) {
|
|
23
|
+
const olderThan = (/* @__PURE__ */ new Date(Date.now() - 2880 * 60 * 1e3)).toISOString();
|
|
24
|
+
const unattachedBlobs = await step("fetch-unattached-blobs", async () => {
|
|
25
|
+
return container.storage.primary.getUnattachedBlobs({ olderThan });
|
|
26
|
+
});
|
|
27
|
+
const pendingBlobs = await step("fetch-pending-blobs", async () => {
|
|
28
|
+
return container.storage.primary.getPendingBlobs(olderThan);
|
|
29
|
+
});
|
|
30
|
+
const allOrphans = [...unattachedBlobs, ...pendingBlobs];
|
|
31
|
+
let purgedCount = 0;
|
|
32
|
+
for (const blob of allOrphans) await step("delete-blob", async () => {
|
|
33
|
+
await container.storage.primary.deleteBlob(blob.id);
|
|
34
|
+
purgedCount++;
|
|
35
|
+
});
|
|
36
|
+
container.logger.info({
|
|
37
|
+
purgedCount,
|
|
38
|
+
unattachedCount: unattachedBlobs.length,
|
|
39
|
+
pendingCount: pendingBlobs.length
|
|
40
|
+
}, "Orphaned blobs purged");
|
|
41
|
+
}
|
|
42
|
+
});
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
//#endregion
|
|
46
|
+
export { definePurgeUnattachedBlobs };
|
|
@@ -1 +1,110 @@
|
|
|
1
|
-
import{defineWorkflow
|
|
1
|
+
import { defineWorkflow } from "../workflow.mjs";
|
|
2
|
+
import { defineAuthSchema } from "../auth-schema.mjs";
|
|
3
|
+
import { dbChangesEvent } from "../event.mjs";
|
|
4
|
+
import { z } from "zod";
|
|
5
|
+
|
|
6
|
+
//#region src/api/workflows/track-db-changes.ts
|
|
7
|
+
const authSchema = defineAuthSchema();
|
|
8
|
+
/**
|
|
9
|
+
* Input schema for trackDbChanges workflow.
|
|
10
|
+
* Accepts dbChanges() output directly from .returning() clause.
|
|
11
|
+
*/
|
|
12
|
+
const trackDbChangesInputSchema = z.object({
|
|
13
|
+
changes: z.array(z.object({
|
|
14
|
+
_table: z.string(),
|
|
15
|
+
old: z.record(z.string(), z.unknown()).nullable(),
|
|
16
|
+
new: z.record(z.string(), z.unknown()).nullable()
|
|
17
|
+
})),
|
|
18
|
+
dbName: z.string(),
|
|
19
|
+
organizationId: z.string().nullable(),
|
|
20
|
+
requestId: z.string(),
|
|
21
|
+
sessionId: z.string().nullable(),
|
|
22
|
+
userId: z.string().nullable()
|
|
23
|
+
});
|
|
24
|
+
/**
|
|
25
|
+
* Infers action type from change record based on old/new nullness.
|
|
26
|
+
* Uses uppercase SQL-style actions to match OCSF/CADF standards.
|
|
27
|
+
*
|
|
28
|
+
* Algorithm:
|
|
29
|
+
* - old is null → INSERT (new row created)
|
|
30
|
+
* - new is null → DELETE (row removed)
|
|
31
|
+
* - both present → UPDATE (row modified)
|
|
32
|
+
*
|
|
33
|
+
* @param change - Change record with old and new values
|
|
34
|
+
* @returns Action type: "INSERT" if old is null, "DELETE" if new is null, otherwise "UPDATE"
|
|
35
|
+
*/
|
|
36
|
+
function inferAction(change) {
|
|
37
|
+
if (change.old === null) return "INSERT";
|
|
38
|
+
if (change.new === null) return "DELETE";
|
|
39
|
+
return "UPDATE";
|
|
40
|
+
}
|
|
41
|
+
/**
|
|
42
|
+
* Built-in workflow for processing database changes.
|
|
43
|
+
* Handles audit logging and pub/sub notifications with type-safe table filtering.
|
|
44
|
+
*
|
|
45
|
+
* Algorithm:
|
|
46
|
+
* 1. For each change in input, infer the action (INSERT/UPDATE/DELETE)
|
|
47
|
+
* 2. If table is not excluded from audit logging:
|
|
48
|
+
* - Insert into audit_logs table with full context
|
|
49
|
+
* 3. Emit to dbChangesEvent (publishes via Redis if subscribed handlers exist)
|
|
50
|
+
*
|
|
51
|
+
* Filtering is configured via:
|
|
52
|
+
* - `container.auth.shouldAudit()` - Check if table should be audited
|
|
53
|
+
*
|
|
54
|
+
* @returns Object with counts: processed (total changes), audited (logged to audit_logs), published (emitted to event)
|
|
55
|
+
*/
|
|
56
|
+
const trackDbChanges = defineWorkflow({
|
|
57
|
+
input: trackDbChangesInputSchema,
|
|
58
|
+
async run({ container, step, input }) {
|
|
59
|
+
const { dbName, changes, organizationId, userId, sessionId, requestId } = input;
|
|
60
|
+
if (changes.length === 0) return {
|
|
61
|
+
processed: 0,
|
|
62
|
+
audited: 0,
|
|
63
|
+
published: 0
|
|
64
|
+
};
|
|
65
|
+
const timestamp = (/* @__PURE__ */ new Date()).toISOString();
|
|
66
|
+
let audited = 0;
|
|
67
|
+
let published = 0;
|
|
68
|
+
for (const change of changes) {
|
|
69
|
+
const tableName = change._table;
|
|
70
|
+
const action = inferAction(change);
|
|
71
|
+
const qualifiedTable = `${dbName}.${tableName}`;
|
|
72
|
+
if (container.auth.shouldAudit(qualifiedTable)) {
|
|
73
|
+
await step(`audit:${qualifiedTable}`, async () => {
|
|
74
|
+
await container.db.primary.insert(authSchema.tables.auditLogs).values({
|
|
75
|
+
tableName: qualifiedTable,
|
|
76
|
+
action,
|
|
77
|
+
oldData: change.old,
|
|
78
|
+
newData: change.new,
|
|
79
|
+
organizationId,
|
|
80
|
+
userId,
|
|
81
|
+
sessionId,
|
|
82
|
+
requestId,
|
|
83
|
+
createdAt: timestamp
|
|
84
|
+
});
|
|
85
|
+
});
|
|
86
|
+
audited++;
|
|
87
|
+
}
|
|
88
|
+
await step(`event:${qualifiedTable}`, async () => {
|
|
89
|
+
await dbChangesEvent.emit({
|
|
90
|
+
action,
|
|
91
|
+
oldData: change.old,
|
|
92
|
+
newData: change.new,
|
|
93
|
+
organizationId,
|
|
94
|
+
tableName: qualifiedTable,
|
|
95
|
+
timestamp,
|
|
96
|
+
userId
|
|
97
|
+
});
|
|
98
|
+
});
|
|
99
|
+
published++;
|
|
100
|
+
}
|
|
101
|
+
return {
|
|
102
|
+
processed: changes.length,
|
|
103
|
+
audited,
|
|
104
|
+
published
|
|
105
|
+
};
|
|
106
|
+
}
|
|
107
|
+
});
|
|
108
|
+
|
|
109
|
+
//#endregion
|
|
110
|
+
export { trackDbChanges };
|