appos 0.3.2-0 → 0.3.4-0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (158) hide show
  1. package/dist/bin/auth-schema-CcqAJY9P.mjs +2 -0
  2. package/dist/bin/better-sqlite3-CuQ3hsWl.mjs +2 -0
  3. package/dist/bin/bun-sql-DGeo-s_M.mjs +2 -0
  4. package/dist/bin/cache-3oO07miM.mjs +2 -0
  5. package/dist/bin/chunk-l9p7A9gZ.mjs +2 -0
  6. package/dist/bin/cockroach-BaICwY7N.mjs +2 -0
  7. package/dist/bin/database-CaysWPpa.mjs +2 -0
  8. package/dist/bin/esm-BvsccvmM.mjs +2 -0
  9. package/dist/bin/esm-CGKzJ7Am.mjs +3 -0
  10. package/dist/bin/event-DnSe3eh0.mjs +8 -0
  11. package/dist/bin/extract-blob-metadata-iqwTl2ft.mjs +170 -0
  12. package/dist/bin/generate-image-variant-Lyx0vhM6.mjs +2 -0
  13. package/dist/bin/generate-preview-0MrKxslA.mjs +2 -0
  14. package/dist/bin/libsql-DQJrZsU9.mjs +2 -0
  15. package/dist/bin/logger-BAGZLUzj.mjs +2 -0
  16. package/dist/bin/main.mjs +1201 -190
  17. package/dist/bin/migrator-B7iNKM8N.mjs +2 -0
  18. package/dist/bin/migrator-BKE1cSQQ.mjs +2 -0
  19. package/dist/bin/migrator-BXcbc9zs.mjs +2 -0
  20. package/dist/bin/migrator-B_XhRWZC.mjs +8 -0
  21. package/dist/bin/migrator-Bz52Gtr8.mjs +2 -0
  22. package/dist/bin/migrator-C7W-cZHB.mjs +2 -0
  23. package/dist/bin/migrator-CEnKyGSW.mjs +2 -0
  24. package/dist/bin/migrator-CHzIIl5X.mjs +2 -0
  25. package/dist/bin/migrator-CR-rjZdM.mjs +2 -0
  26. package/dist/bin/migrator-CjIr1ZCx.mjs +8 -0
  27. package/dist/bin/migrator-Cuubh2dg.mjs +2 -0
  28. package/dist/bin/migrator-D8m-ORbr.mjs +8 -0
  29. package/dist/bin/migrator-DBFwrhZH.mjs +2 -0
  30. package/dist/bin/migrator-DLmhW9u_.mjs +2 -0
  31. package/dist/bin/migrator-DLoHx807.mjs +4 -0
  32. package/dist/bin/migrator-DtN_iS87.mjs +2 -0
  33. package/dist/bin/migrator-Yc57lb3w.mjs +2 -0
  34. package/dist/bin/migrator-cEVXH3xC.mjs +2 -0
  35. package/dist/bin/migrator-hWi-sYIq.mjs +2 -0
  36. package/dist/bin/mysql2-DufFWkj4.mjs +2 -0
  37. package/dist/bin/neon-serverless-5a4h2VFz.mjs +2 -0
  38. package/dist/bin/node-CiOp4xrR.mjs +22 -0
  39. package/dist/bin/node-mssql-DvZGaUkB.mjs +322 -0
  40. package/dist/bin/node-postgres-BqbJVBQY.mjs +2 -0
  41. package/dist/bin/node-postgres-DnhRTTO8.mjs +2 -0
  42. package/dist/bin/open-0ksnL0S8.mjs +2 -0
  43. package/dist/bin/pdf-sUYeFPr4.mjs +14 -0
  44. package/dist/bin/pg-CaH8ptj-.mjs +2 -0
  45. package/dist/bin/pg-core-BLTZt9AH.mjs +8 -0
  46. package/dist/bin/pg-core-CGzidKaA.mjs +2 -0
  47. package/dist/bin/pglite-BJB9z7Ju.mjs +2 -0
  48. package/dist/bin/planetscale-serverless-H3RfLlMK.mjs +13 -0
  49. package/dist/bin/postgres-js-DuOf1eWm.mjs +2 -0
  50. package/dist/bin/purge-attachment-DQXpTtTx.mjs +2 -0
  51. package/dist/bin/purge-audit-logs-BEt2J2gD.mjs +2 -0
  52. package/dist/bin/{purge-unattached-blobs-Duvv8Izd.mjs → purge-unattached-blobs-DOmk4ddJ.mjs} +1 -1
  53. package/dist/bin/query-builder-DSRrR6X_.mjs +8 -0
  54. package/dist/bin/query-builder-V8-LDhvA.mjs +3 -0
  55. package/dist/bin/session-CdB1A-LB.mjs +14 -0
  56. package/dist/bin/session-Cl2e-_i8.mjs +8 -0
  57. package/dist/bin/singlestore-COft6TlR.mjs +8 -0
  58. package/dist/bin/sql-D-eKV1Dn.mjs +2 -0
  59. package/dist/bin/sqlite-cloud-Co9jOn5G.mjs +2 -0
  60. package/dist/bin/sqlite-proxy-Cpu78gJF.mjs +2 -0
  61. package/dist/bin/src-C-oXmCzx.mjs +6 -0
  62. package/dist/bin/table-3zUpWkMg.mjs +2 -0
  63. package/dist/bin/track-db-changes-DWyY5jXm.mjs +2 -0
  64. package/dist/bin/utils-CyoeCJlf.mjs +2 -0
  65. package/dist/bin/utils-EoqYQKy1.mjs +2 -0
  66. package/dist/bin/utils-bsypyqPl.mjs +2 -0
  67. package/dist/bin/vercel-postgres-HWL6xtqi.mjs +2 -0
  68. package/dist/bin/workflow-zxHDyfLq.mjs +2 -0
  69. package/dist/bin/youch-handler-DrYdbUhe.mjs +2 -0
  70. package/dist/bin/zod-MJjkEkRY.mjs +24 -0
  71. package/dist/exports/api/_virtual/rolldown_runtime.mjs +36 -1
  72. package/dist/exports/api/app-context.mjs +24 -1
  73. package/dist/exports/api/auth-schema.mjs +373 -1
  74. package/dist/exports/api/auth.d.mts +4 -0
  75. package/dist/exports/api/auth.mjs +188 -1
  76. package/dist/exports/api/cache.d.mts +2 -2
  77. package/dist/exports/api/cache.mjs +28 -1
  78. package/dist/exports/api/config.mjs +72 -1
  79. package/dist/exports/api/constants.mjs +92 -1
  80. package/dist/exports/api/container.mjs +49 -1
  81. package/dist/exports/api/database.mjs +218 -1
  82. package/dist/exports/api/event.mjs +236 -1
  83. package/dist/exports/api/i18n.mjs +45 -1
  84. package/dist/exports/api/index.mjs +20 -1
  85. package/dist/exports/api/instrumentation.mjs +40 -1
  86. package/dist/exports/api/logger.mjs +26 -1
  87. package/dist/exports/api/mailer.mjs +37 -1
  88. package/dist/exports/api/middleware.mjs +73 -1
  89. package/dist/exports/api/openapi.mjs +507 -1
  90. package/dist/exports/api/orm.mjs +43 -1
  91. package/dist/exports/api/otel.mjs +56 -1
  92. package/dist/exports/api/redis.mjs +41 -1
  93. package/dist/exports/api/storage-schema.mjs +72 -1
  94. package/dist/exports/api/storage.mjs +833 -1
  95. package/dist/exports/api/web/auth.mjs +17 -1
  96. package/dist/exports/api/workflow.mjs +196 -1
  97. package/dist/exports/api/workflows/_virtual/rolldown_runtime.mjs +36 -1
  98. package/dist/exports/api/workflows/api/auth-schema.mjs +373 -1
  99. package/dist/exports/api/workflows/api/auth.d.mts +4 -0
  100. package/dist/exports/api/workflows/api/cache.d.mts +2 -2
  101. package/dist/exports/api/workflows/api/event.mjs +126 -1
  102. package/dist/exports/api/workflows/api/redis.mjs +3 -1
  103. package/dist/exports/api/workflows/api/workflow.mjs +135 -1
  104. package/dist/exports/api/workflows/constants.mjs +23 -1
  105. package/dist/exports/api/workflows/extract-blob-metadata.mjs +132 -1
  106. package/dist/exports/api/workflows/generate-image-variant.d.mts +2 -2
  107. package/dist/exports/api/workflows/generate-image-variant.mjs +118 -1
  108. package/dist/exports/api/workflows/generate-preview.mjs +160 -1
  109. package/dist/exports/api/workflows/index.mjs +3 -1
  110. package/dist/exports/api/workflows/purge-attachment.mjs +34 -1
  111. package/dist/exports/api/workflows/purge-audit-logs.mjs +47 -1
  112. package/dist/exports/api/workflows/purge-unattached-blobs.mjs +46 -1
  113. package/dist/exports/api/workflows/track-db-changes.mjs +110 -1
  114. package/dist/exports/cli/_virtual/rolldown_runtime.mjs +36 -1
  115. package/dist/exports/cli/api/auth-schema.mjs +373 -1
  116. package/dist/exports/cli/api/auth.d.mts +4 -0
  117. package/dist/exports/cli/api/cache.d.mts +2 -2
  118. package/dist/exports/cli/api/event.mjs +126 -1
  119. package/dist/exports/cli/api/redis.mjs +3 -1
  120. package/dist/exports/cli/api/workflow.mjs +135 -1
  121. package/dist/exports/cli/api/workflows/extract-blob-metadata.mjs +132 -1
  122. package/dist/exports/cli/api/workflows/generate-image-variant.mjs +118 -1
  123. package/dist/exports/cli/api/workflows/generate-preview.mjs +160 -1
  124. package/dist/exports/cli/api/workflows/purge-attachment.mjs +34 -1
  125. package/dist/exports/cli/api/workflows/purge-audit-logs.mjs +47 -1
  126. package/dist/exports/cli/api/workflows/purge-unattached-blobs.mjs +46 -1
  127. package/dist/exports/cli/api/workflows/track-db-changes.mjs +110 -1
  128. package/dist/exports/cli/command.d.mts +2 -0
  129. package/dist/exports/cli/command.mjs +43 -1
  130. package/dist/exports/cli/constants.mjs +23 -1
  131. package/dist/exports/cli/index.mjs +3 -1
  132. package/dist/exports/devtools/index.js +4 -1
  133. package/dist/exports/tests/api/auth.d.mts +4 -0
  134. package/dist/exports/tests/api/cache.d.mts +2 -2
  135. package/dist/exports/tests/api/middleware/i18n.mjs +1 -1
  136. package/dist/exports/tests/api/middleware/youch-handler.mjs +1 -1
  137. package/dist/exports/tests/api/openapi.mjs +1 -1
  138. package/dist/exports/tests/api/server.mjs +1 -1
  139. package/dist/exports/tests/api/storage.d.mts +4 -4
  140. package/dist/exports/tests/constants.mjs +1 -1
  141. package/dist/exports/vendors/date.js +1 -1
  142. package/dist/exports/vendors/toolkit.js +1 -1
  143. package/dist/exports/vendors/zod.js +1 -1
  144. package/dist/exports/vitest/globals.mjs +1 -1
  145. package/dist/exports/web/auth.js +75 -1
  146. package/dist/exports/web/i18n.js +45 -1
  147. package/dist/exports/web/index.js +8 -1
  148. package/package.json +19 -18
  149. package/dist/bin/auth-schema-Va0CYicu.mjs +0 -2
  150. package/dist/bin/event-8JibGFH_.mjs +0 -2
  151. package/dist/bin/extract-blob-metadata-DjPfHtQ2.mjs +0 -2
  152. package/dist/bin/generate-image-variant-D5VDFyWj.mjs +0 -2
  153. package/dist/bin/generate-preview-Dssw7w5U.mjs +0 -2
  154. package/dist/bin/purge-attachment-BBPzIxwt.mjs +0 -2
  155. package/dist/bin/purge-audit-logs-BeZy3IFM.mjs +0 -2
  156. package/dist/bin/track-db-changes-CFykw_YO.mjs +0 -2
  157. package/dist/bin/workflow-BNUZrj4F.mjs +0 -2
  158. package/dist/bin/youch-handler-BadUgHb0.mjs +0 -2
@@ -1 +1,132 @@
1
- import{defineWorkflow as e}from"../workflow.mjs";import{join as t}from"node:path";import{ALL_FORMATS as n,BlobSource as r,Input as i}from"mediabunny";import a from"zod";const o=e({input:a.object({blobId:a.string()}),async run({container:e,input:{blobId:a},step:o}){let s=await o(`fetch-blob`,async()=>e.storage.primary.getBlob(a));if(!s)throw Error(`Blob ${a} not found`);let c=await o(`download-blob`,async()=>e.storage.primary.downloadBlob(a));if(!c)throw Error(`Failed to download blob ${a}`);let l={};return s.contentType?.startsWith(`image/`)?l=await o(`extract-image-metadata`,async()=>{let e=(await import(`sharp`)).default,t=await e(c).metadata();return{width:t.width,height:t.height,format:t.format,hasAlpha:t.hasAlpha,space:t.space}}):s.contentType?.startsWith(`video/`)||s.contentType?.startsWith(`audio/`)?l=await o(`extract-media-metadata`,async()=>{let e=new Uint8Array(c),t=new i({source:new r(new Blob([e],{type:s.contentType||`video/mp4`})),formats:n}),a=await t.computeDuration(),o=await t.getMetadataTags(),l={},u={},d=!1,f=!1;try{let e=await t.getPrimaryVideoTrack();if(e){d=!0;let t=e.displayWidth&&e.displayHeight?e.displayWidth/e.displayHeight:null;l={width:e.displayWidth,height:e.displayHeight,rotation:e.rotation,angle:e.rotation,displayAspectRatio:t}}}catch{}try{let e=await t.getPrimaryAudioTrack();e&&(f=!0,u={sampleRate:e.sampleRate,channels:e.numberOfChannels})}catch{}return{duration:a,video:d,audio:f,...l,...u,tags:o}}):s.contentType===`application/pdf`&&(l=await o(`extract-pdf-metadata`,async()=>{try{let e=await import(`pdfjs-dist/legacy/build/pdf.mjs`),n=`${t(process.cwd(),`node_modules/pdfjs-dist/standard_fonts`)}/`,r=await e.getDocument({data:new Uint8Array(c),standardFontDataUrl:n}).promise,i=await r.getMetadata(),a=(await r.getPage(1)).getViewport({scale:1}),o=i.info;return{pageCount:r.numPages,width:a.width,height:a.height,title:o?.Title||null,author:o?.Author||null,subject:o?.Subject||null,keywords:o?.Keywords||null,creator:o?.Creator||null,producer:o?.Producer||null,creationDate:o?.CreationDate||null,modificationDate:o?.ModDate||null,pdfVersion:o?.PDFFormatVersion||null}}catch(t){return e.logger.error({error:t,errorMessage:t instanceof Error?t.message:String(t),errorStack:t instanceof Error?t.stack:void 0,errorCode:t?.code,blobId:a},`Failed to extract PDF metadata`),{error:`Failed to extract PDF metadata`,errorMessage:t instanceof Error?t.message:String(t)}}})),await o(`save-metadata`,async()=>{await e.storage.primary.updateBlobMetadata(a,{...l,analyzed:!0})}),e.logger.info({blobId:a,metadata:l},`Metadata extracted`),{...l,analyzed:!0}}});export{o as extractBlobMetadata};
1
+ import { defineWorkflow } from "../workflow.mjs";
2
+ import { join } from "node:path";
3
+ import { ALL_FORMATS, BlobSource, Input } from "mediabunny";
4
+ import z$1 from "zod";
5
+
6
+ //#region src/api/workflows/extract-blob-metadata.ts
7
+ const extractBlobMetadata = defineWorkflow({
8
+ input: z$1.object({ blobId: z$1.string() }),
9
+ async run({ container, input: { blobId }, step }) {
10
+ const blob = await step("fetch-blob", async () => {
11
+ return container.storage.primary.getBlob(blobId);
12
+ });
13
+ if (!blob) throw new Error(`Blob ${blobId} not found`);
14
+ const buffer = await step("download-blob", async () => {
15
+ return container.storage.primary.downloadBlob(blobId);
16
+ });
17
+ if (!buffer) throw new Error(`Failed to download blob ${blobId}`);
18
+ let metadata = {};
19
+ if (blob.contentType?.startsWith("image/")) metadata = await step("extract-image-metadata", async () => {
20
+ const sharp = (await import("sharp")).default;
21
+ const info = await sharp(buffer).metadata();
22
+ return {
23
+ width: info.width,
24
+ height: info.height,
25
+ format: info.format,
26
+ hasAlpha: info.hasAlpha,
27
+ space: info.space
28
+ };
29
+ });
30
+ else if (blob.contentType?.startsWith("video/") || blob.contentType?.startsWith("audio/")) metadata = await step("extract-media-metadata", async () => {
31
+ const uint8Array = new Uint8Array(buffer);
32
+ const input = new Input({
33
+ source: new BlobSource(new Blob([uint8Array], { type: blob.contentType || "video/mp4" })),
34
+ formats: ALL_FORMATS
35
+ });
36
+ const duration = await input.computeDuration();
37
+ const tags = await input.getMetadataTags();
38
+ let videoData = {};
39
+ let audioData = {};
40
+ let hasVideo = false;
41
+ let hasAudio = false;
42
+ try {
43
+ const videoTrack = await input.getPrimaryVideoTrack();
44
+ if (videoTrack) {
45
+ hasVideo = true;
46
+ const displayAspectRatio = videoTrack.displayWidth && videoTrack.displayHeight ? videoTrack.displayWidth / videoTrack.displayHeight : null;
47
+ videoData = {
48
+ width: videoTrack.displayWidth,
49
+ height: videoTrack.displayHeight,
50
+ rotation: videoTrack.rotation,
51
+ angle: videoTrack.rotation,
52
+ displayAspectRatio
53
+ };
54
+ }
55
+ } catch {}
56
+ try {
57
+ const audioTrack = await input.getPrimaryAudioTrack();
58
+ if (audioTrack) {
59
+ hasAudio = true;
60
+ audioData = {
61
+ sampleRate: audioTrack.sampleRate,
62
+ channels: audioTrack.numberOfChannels
63
+ };
64
+ }
65
+ } catch {}
66
+ return {
67
+ duration,
68
+ video: hasVideo,
69
+ audio: hasAudio,
70
+ ...videoData,
71
+ ...audioData,
72
+ tags
73
+ };
74
+ });
75
+ else if (blob.contentType === "application/pdf") metadata = await step("extract-pdf-metadata", async () => {
76
+ try {
77
+ const pdfjsLib = await import("pdfjs-dist/legacy/build/pdf.mjs");
78
+ const standardFontDataUrl = `${join(process.cwd(), "node_modules/pdfjs-dist/standard_fonts")}/`;
79
+ const pdf = await pdfjsLib.getDocument({
80
+ data: new Uint8Array(buffer),
81
+ standardFontDataUrl
82
+ }).promise;
83
+ const pdfMetadata = await pdf.getMetadata();
84
+ const viewport = (await pdf.getPage(1)).getViewport({ scale: 1 });
85
+ const info = pdfMetadata.info;
86
+ return {
87
+ pageCount: pdf.numPages,
88
+ width: viewport.width,
89
+ height: viewport.height,
90
+ title: info?.Title || null,
91
+ author: info?.Author || null,
92
+ subject: info?.Subject || null,
93
+ keywords: info?.Keywords || null,
94
+ creator: info?.Creator || null,
95
+ producer: info?.Producer || null,
96
+ creationDate: info?.CreationDate || null,
97
+ modificationDate: info?.ModDate || null,
98
+ pdfVersion: info?.PDFFormatVersion || null
99
+ };
100
+ } catch (error) {
101
+ container.logger.error({
102
+ error,
103
+ errorMessage: error instanceof Error ? error.message : String(error),
104
+ errorStack: error instanceof Error ? error.stack : void 0,
105
+ errorCode: error?.code,
106
+ blobId
107
+ }, "Failed to extract PDF metadata");
108
+ return {
109
+ error: "Failed to extract PDF metadata",
110
+ errorMessage: error instanceof Error ? error.message : String(error)
111
+ };
112
+ }
113
+ });
114
+ await step("save-metadata", async () => {
115
+ await container.storage.primary.updateBlobMetadata(blobId, {
116
+ ...metadata,
117
+ analyzed: true
118
+ });
119
+ });
120
+ container.logger.info({
121
+ blobId,
122
+ metadata
123
+ }, "Metadata extracted");
124
+ return {
125
+ ...metadata,
126
+ analyzed: true
127
+ };
128
+ }
129
+ });
130
+
131
+ //#endregion
132
+ export { extractBlobMetadata };
@@ -1 +1,118 @@
1
- import{defineWorkflow as e}from"../workflow.mjs";import{z as t}from"zod";const n=t.object({width:t.number().optional(),height:t.number().optional(),fit:t.enum([`cover`,`contain`,`fill`,`inside`,`outside`]).optional(),position:t.enum([`top`,`right top`,`right`,`right bottom`,`bottom`,`left bottom`,`left`,`left top`,`centre`]).optional(),kernel:t.enum([`nearest`,`linear`,`cubic`,`mitchell`,`lanczos2`,`lanczos3`]).optional()}),r=t.object({resize:n.optional(),rotate:t.number().optional(),flip:t.boolean().optional(),flop:t.boolean().optional(),sharpen:t.boolean().optional(),blur:t.number().optional(),grayscale:t.boolean().optional(),format:t.enum([`jpeg`,`png`,`webp`,`avif`,`gif`]).optional(),quality:t.number().min(1).max(100).optional(),preview:t.literal(!0).optional()}),i=e({input:t.object({blobId:t.string(),transformations:r}),async run({container:e,input:{blobId:t,transformations:n},step:r}){if(!await r(`fetch-blob`,async()=>e.storage.primary.getBlob(t)))throw Error(`Blob ${t} not found`);let i=await r(`download-blob`,async()=>e.storage.primary.downloadBlob(t));if(!i)throw Error(`Failed to download blob ${t}`);let a=await r(`apply-transformations`,async()=>{let e=(await import(`sharp`)).default,t=e(i);return n.resize&&(t=t.resize({width:n.resize.width,height:n.resize.height,fit:n.resize.fit,position:n.resize.position,kernel:n.resize.kernel})),n.rotate!==void 0&&(t=t.rotate(n.rotate)),n.flip&&(t=t.flip()),n.flop&&(t=t.flop()),n.sharpen&&(t=t.sharpen()),n.blur!==void 0&&(t=t.blur(n.blur)),n.grayscale&&(t=t.grayscale()),n.format&&(t=t.toFormat(n.format,{quality:n.quality})),t.toBuffer()}),o=await r(`store-variant`,async()=>e.storage.primary.createVariant(t,n,a));return e.logger.info({blobId:t,variantId:o.id},`Image variant generated`),o}});export{i as generateImageVariant};
1
+ import { defineWorkflow } from "../workflow.mjs";
2
+ import { z } from "zod";
3
+
4
+ //#region src/api/workflows/generate-image-variant.ts
5
+ /**
6
+ * Resize options schema for image transformations.
7
+ */
8
+ const resizeSchema = z.object({
9
+ width: z.number().optional(),
10
+ height: z.number().optional(),
11
+ fit: z.enum([
12
+ "cover",
13
+ "contain",
14
+ "fill",
15
+ "inside",
16
+ "outside"
17
+ ]).optional(),
18
+ position: z.enum([
19
+ "top",
20
+ "right top",
21
+ "right",
22
+ "right bottom",
23
+ "bottom",
24
+ "left bottom",
25
+ "left",
26
+ "left top",
27
+ "centre"
28
+ ]).optional(),
29
+ kernel: z.enum([
30
+ "nearest",
31
+ "linear",
32
+ "cubic",
33
+ "mitchell",
34
+ "lanczos2",
35
+ "lanczos3"
36
+ ]).optional()
37
+ });
38
+ /**
39
+ * Image transformations schema.
40
+ * Supports resize, rotate, flip, flop, sharpen, blur, grayscale, format conversion.
41
+ */
42
+ const transformationsSchema = z.object({
43
+ resize: resizeSchema.optional(),
44
+ rotate: z.number().optional(),
45
+ flip: z.boolean().optional(),
46
+ flop: z.boolean().optional(),
47
+ sharpen: z.boolean().optional(),
48
+ blur: z.number().optional(),
49
+ grayscale: z.boolean().optional(),
50
+ format: z.enum([
51
+ "jpeg",
52
+ "png",
53
+ "webp",
54
+ "avif",
55
+ "gif"
56
+ ]).optional(),
57
+ quality: z.number().min(1).max(100).optional(),
58
+ preview: z.literal(true).optional()
59
+ });
60
+ /**
61
+ * Generate image variant workflow. Applies transformations to create variants.
62
+ *
63
+ * Algorithm:
64
+ * 1. Fetch blob by ID
65
+ * 2. Download blob content
66
+ * 3. Apply transformations using Sharp:
67
+ * - Resize with various fit options
68
+ * - Rotate by degrees
69
+ * - Flip/flop (vertical/horizontal mirror)
70
+ * - Sharpen, blur, grayscale filters
71
+ * - Format conversion with quality settings
72
+ * 4. Store variant with transformation metadata
73
+ */
74
+ const generateImageVariant = defineWorkflow({
75
+ input: z.object({
76
+ blobId: z.string(),
77
+ transformations: transformationsSchema
78
+ }),
79
+ async run({ container, input: { blobId, transformations }, step }) {
80
+ if (!await step("fetch-blob", async () => {
81
+ return container.storage.primary.getBlob(blobId);
82
+ })) throw new Error(`Blob ${blobId} not found`);
83
+ const buffer = await step("download-blob", async () => {
84
+ return container.storage.primary.downloadBlob(blobId);
85
+ });
86
+ if (!buffer) throw new Error(`Failed to download blob ${blobId}`);
87
+ const variantBuffer = await step("apply-transformations", async () => {
88
+ const sharp = (await import("sharp")).default;
89
+ let pipeline = sharp(buffer);
90
+ if (transformations.resize) pipeline = pipeline.resize({
91
+ width: transformations.resize.width,
92
+ height: transformations.resize.height,
93
+ fit: transformations.resize.fit,
94
+ position: transformations.resize.position,
95
+ kernel: transformations.resize.kernel
96
+ });
97
+ if (transformations.rotate !== void 0) pipeline = pipeline.rotate(transformations.rotate);
98
+ if (transformations.flip) pipeline = pipeline.flip();
99
+ if (transformations.flop) pipeline = pipeline.flop();
100
+ if (transformations.sharpen) pipeline = pipeline.sharpen();
101
+ if (transformations.blur !== void 0) pipeline = pipeline.blur(transformations.blur);
102
+ if (transformations.grayscale) pipeline = pipeline.grayscale();
103
+ if (transformations.format) pipeline = pipeline.toFormat(transformations.format, { quality: transformations.quality });
104
+ return pipeline.toBuffer();
105
+ });
106
+ const variant = await step("store-variant", async () => {
107
+ return container.storage.primary.createVariant(blobId, transformations, variantBuffer);
108
+ });
109
+ container.logger.info({
110
+ blobId,
111
+ variantId: variant.id
112
+ }, "Image variant generated");
113
+ return variant;
114
+ }
115
+ });
116
+
117
+ //#endregion
118
+ export { generateImageVariant };
@@ -1 +1,160 @@
1
- import{defineWorkflow as e}from"../workflow.mjs";import{join as t}from"node:path";import n from"zod";import{spawn as r}from"node:child_process";const i=e({input:n.object({blobId:n.string(),timeInSeconds:n.number().optional()}),async run({container:e,input:{blobId:n,timeInSeconds:i=1},step:a}){let o=await a(`fetch-blob`,async()=>e.storage.primary.getBlob(n));if(!o)throw Error(`Blob ${n} not found`);let s=await a(`download-blob`,async()=>e.storage.primary.downloadBlob(n));if(!s)throw Error(`Failed to download blob ${n}`);let c=null;if(o.contentType?.startsWith(`video/`))c=await a(`generate-video-preview`,async()=>new Promise((t,a)=>{try{let o=r(`ffmpeg`,[`-i`,`pipe:0`,`-ss`,i.toString(),`-frames:v`,`1`,`-f`,`image2pipe`,`-c:v`,`png`,`pipe:1`]),c=[],l=[];o.stdout.on(`data`,e=>{c.push(e)}),o.stderr.on(`data`,e=>{l.push(e)}),o.on(`close`,async r=>{if(r===0)try{let e=Buffer.concat(c),n=(await import(`sharp`)).default;t(await n(e).jpeg({quality:80}).toBuffer())}catch(t){e.logger.error({error:t,blobId:n},`Failed to convert video frame to JPEG`),a(t)}else{let t=Buffer.concat(l).toString(),i=Error(`FFmpeg exited with code ${r}: ${t}`);e.logger.error({error:i,blobId:n,code:r,stderr:t},`Failed to generate video preview`),a(i)}}),o.on(`error`,t=>{e.logger.error({error:t,blobId:n},`Failed to spawn FFmpeg process`),a(t)}),o.stdin.on(`error`,t=>{t.code!==`EPIPE`&&e.logger.error({error:t,blobId:n},`Failed to write to FFmpeg stdin`)}),o.stdin.write(s),o.stdin.end()}catch(t){e.logger.error({error:t,blobId:n},`Failed to generate video preview`),a(t)}}));else if(o.contentType===`application/pdf`)c=await a(`generate-pdf-preview`,async()=>{try{let e=await import(`pdfjs-dist/legacy/build/pdf.mjs`),{createCanvas:n}=await import(`canvas`),r=(await import(`sharp`)).default,i=`${t(process.cwd(),`node_modules/pdfjs-dist/standard_fonts`)}/`,a=await(await e.getDocument({data:new Uint8Array(s),standardFontDataUrl:i}).promise).getPage(1),o=a.getViewport({scale:2}),c=n(o.width,o.height),l=c.getContext(`2d`);return await a.render({canvasContext:l,viewport:o,canvas:c}).promise,await r(c.toBuffer(`image/png`)).resize(800,800,{fit:`inside`,withoutEnlargement:!0}).jpeg({quality:85}).toBuffer()}catch(t){throw e.logger.error({error:t,errorMessage:t instanceof Error?t.message:String(t),errorStack:t instanceof Error?t.stack:void 0,errorCode:t?.code,blobId:n},`Failed to generate PDF preview`),t}});else if(o.contentType?.startsWith(`image/`))c=await a(`generate-image-preview`,async()=>{let e=(await import(`sharp`)).default;return await e(s).resize(800,800,{fit:`inside`,withoutEnlargement:!0}).jpeg({quality:85}).toBuffer()});else throw Error(`Preview generation not supported for content type: ${o.contentType}`);let l=await a(`store-preview`,async()=>await e.storage.primary.createVariant(n,{preview:!0},c));return e.logger.info({blobId:n,previewId:l.id,contentType:o.contentType},`Preview generated`),l}});export{i as generatePreview};
1
+ import { defineWorkflow } from "../workflow.mjs";
2
+ import { join } from "node:path";
3
+ import z$1 from "zod";
4
+ import { spawn } from "node:child_process";
5
+
6
+ //#region src/api/workflows/generate-preview.ts
7
+ /**
8
+ * Generate preview workflow. Creates preview images/thumbnails for PDFs and videos.
9
+ *
10
+ * Algorithm:
11
+ * 1. Fetch blob by ID
12
+ * 2. Download blob content
13
+ * 3. Generate preview based on content type:
14
+ * - Video: Extract frame at specified time using FFmpeg
15
+ * - PDF: Render first page using pdfjs-dist + canvas
16
+ * - Image: Resize to max 800x800 maintaining aspect ratio
17
+ * 4. Store preview as a special variant with "preview" transformation key
18
+ */
19
+ const generatePreview = defineWorkflow({
20
+ input: z$1.object({
21
+ blobId: z$1.string(),
22
+ timeInSeconds: z$1.number().optional()
23
+ }),
24
+ async run({ container, input: { blobId, timeInSeconds = 1 }, step }) {
25
+ const blob = await step("fetch-blob", async () => {
26
+ return container.storage.primary.getBlob(blobId);
27
+ });
28
+ if (!blob) throw new Error(`Blob ${blobId} not found`);
29
+ const buffer = await step("download-blob", async () => {
30
+ return container.storage.primary.downloadBlob(blobId);
31
+ });
32
+ if (!buffer) throw new Error(`Failed to download blob ${blobId}`);
33
+ let previewBuffer = null;
34
+ if (blob.contentType?.startsWith("video/")) previewBuffer = await step("generate-video-preview", async () => {
35
+ return new Promise((resolve, reject) => {
36
+ try {
37
+ const ffmpeg = spawn("ffmpeg", [
38
+ "-i",
39
+ "pipe:0",
40
+ "-ss",
41
+ timeInSeconds.toString(),
42
+ "-frames:v",
43
+ "1",
44
+ "-f",
45
+ "image2pipe",
46
+ "-c:v",
47
+ "png",
48
+ "pipe:1"
49
+ ]);
50
+ const chunks = [];
51
+ const errorChunks = [];
52
+ ffmpeg.stdout.on("data", (chunk) => {
53
+ chunks.push(chunk);
54
+ });
55
+ ffmpeg.stderr.on("data", (chunk) => {
56
+ errorChunks.push(chunk);
57
+ });
58
+ ffmpeg.on("close", async (code) => {
59
+ if (code === 0) try {
60
+ const pngBuffer = Buffer.concat(chunks);
61
+ const sharp = (await import("sharp")).default;
62
+ resolve(await sharp(pngBuffer).jpeg({ quality: 80 }).toBuffer());
63
+ } catch (error) {
64
+ container.logger.error({
65
+ error,
66
+ blobId
67
+ }, "Failed to convert video frame to JPEG");
68
+ reject(error);
69
+ }
70
+ else {
71
+ const errorMessage = Buffer.concat(errorChunks).toString();
72
+ const error = /* @__PURE__ */ new Error(`FFmpeg exited with code ${code}: ${errorMessage}`);
73
+ container.logger.error({
74
+ error,
75
+ blobId,
76
+ code,
77
+ stderr: errorMessage
78
+ }, "Failed to generate video preview");
79
+ reject(error);
80
+ }
81
+ });
82
+ ffmpeg.on("error", (error) => {
83
+ container.logger.error({
84
+ error,
85
+ blobId
86
+ }, "Failed to spawn FFmpeg process");
87
+ reject(error);
88
+ });
89
+ ffmpeg.stdin.on("error", (error) => {
90
+ if (error.code !== "EPIPE") container.logger.error({
91
+ error,
92
+ blobId
93
+ }, "Failed to write to FFmpeg stdin");
94
+ });
95
+ ffmpeg.stdin.write(buffer);
96
+ ffmpeg.stdin.end();
97
+ } catch (error) {
98
+ container.logger.error({
99
+ error,
100
+ blobId
101
+ }, "Failed to generate video preview");
102
+ reject(error);
103
+ }
104
+ });
105
+ });
106
+ else if (blob.contentType === "application/pdf") previewBuffer = await step("generate-pdf-preview", async () => {
107
+ try {
108
+ const pdfjsLib = await import("pdfjs-dist/legacy/build/pdf.mjs");
109
+ const { createCanvas } = await import("canvas");
110
+ const sharp = (await import("sharp")).default;
111
+ const standardFontDataUrl = `${join(process.cwd(), "node_modules/pdfjs-dist/standard_fonts")}/`;
112
+ const page = await (await pdfjsLib.getDocument({
113
+ data: new Uint8Array(buffer),
114
+ standardFontDataUrl
115
+ }).promise).getPage(1);
116
+ const viewport = page.getViewport({ scale: 2 });
117
+ const canvas = createCanvas(viewport.width, viewport.height);
118
+ const context = canvas.getContext("2d");
119
+ await page.render({
120
+ canvasContext: context,
121
+ viewport,
122
+ canvas
123
+ }).promise;
124
+ return await sharp(canvas.toBuffer("image/png")).resize(800, 800, {
125
+ fit: "inside",
126
+ withoutEnlargement: true
127
+ }).jpeg({ quality: 85 }).toBuffer();
128
+ } catch (error) {
129
+ container.logger.error({
130
+ error,
131
+ errorMessage: error instanceof Error ? error.message : String(error),
132
+ errorStack: error instanceof Error ? error.stack : void 0,
133
+ errorCode: error?.code,
134
+ blobId
135
+ }, "Failed to generate PDF preview");
136
+ throw error;
137
+ }
138
+ });
139
+ else if (blob.contentType?.startsWith("image/")) previewBuffer = await step("generate-image-preview", async () => {
140
+ const sharp = (await import("sharp")).default;
141
+ return await sharp(buffer).resize(800, 800, {
142
+ fit: "inside",
143
+ withoutEnlargement: true
144
+ }).jpeg({ quality: 85 }).toBuffer();
145
+ });
146
+ else throw new Error(`Preview generation not supported for content type: ${blob.contentType}`);
147
+ const preview = await step("store-preview", async () => {
148
+ return await container.storage.primary.createVariant(blobId, { preview: true }, previewBuffer);
149
+ });
150
+ container.logger.info({
151
+ blobId,
152
+ previewId: preview.id,
153
+ contentType: blob.contentType
154
+ }, "Preview generated");
155
+ return preview;
156
+ }
157
+ });
158
+
159
+ //#endregion
160
+ export { generatePreview };
@@ -1 +1,34 @@
1
- import{defineWorkflow as e}from"../workflow.mjs";import t from"zod";const n=e({input:t.object({attachmentIds:t.array(t.string()).min(1)}),async run({container:e,input:{attachmentIds:t},step:n}){let r=await n(`fetch-attachments`,async()=>(await e.storage.primary.getAttachmentsByIds(t)).filter(e=>e.blob!==null).map(e=>({attachmentId:e.id,blobId:e.blob.id})));return await n(`delete-attachments`,async()=>{for(let{attachmentId:t}of r)await e.storage.primary.deleteAttachment(t)}),await n(`delete-blobs`,async()=>{for(let{blobId:t}of r)await e.storage.primary.deleteBlob(t)}),e.logger.info({attachmentIds:t,blobCount:r.length},`Attachments and blobs purged`),{purgedCount:r.length}}});export{n as purgeAttachment};
1
+ import { defineWorkflow } from "../workflow.mjs";
2
+ import z$1 from "zod";
3
+
4
+ //#region src/api/workflows/purge-attachment.ts
5
+ /**
6
+ * Purge attachment workflow. Deletes attachments and their associated blobs
7
+ * in the background. This workflow is enqueued by `.purge_later()` calls on
8
+ * attachment objects.
9
+ */
10
+ const purgeAttachment = defineWorkflow({
11
+ input: z$1.object({ attachmentIds: z$1.array(z$1.string()).min(1) }),
12
+ async run({ container, input: { attachmentIds }, step }) {
13
+ const attachments = await step("fetch-attachments", async () => {
14
+ return (await container.storage.primary.getAttachmentsByIds(attachmentIds)).filter((r) => r.blob !== null).map((r) => ({
15
+ attachmentId: r.id,
16
+ blobId: r.blob.id
17
+ }));
18
+ });
19
+ await step("delete-attachments", async () => {
20
+ for (const { attachmentId } of attachments) await container.storage.primary.deleteAttachment(attachmentId);
21
+ });
22
+ await step("delete-blobs", async () => {
23
+ for (const { blobId } of attachments) await container.storage.primary.deleteBlob(blobId);
24
+ });
25
+ container.logger.info({
26
+ attachmentIds,
27
+ blobCount: attachments.length
28
+ }, "Attachments and blobs purged");
29
+ return { purgedCount: attachments.length };
30
+ }
31
+ });
32
+
33
+ //#endregion
34
+ export { purgeAttachment };
@@ -1 +1,47 @@
1
- import{defineScheduledWorkflow as e}from"../workflow.mjs";import{defineAuthSchema as t}from"../auth-schema.mjs";import{lt as n}from"drizzle-orm";const r=t();function i(t=`0 0 * * *`){return e({crontab:t,async run({container:e,step:t,scheduledTime:i}){let a=e.auth.auditLog?.retentionDays??90,o=new Date(i);o.setDate(o.getDate()-a);let s=o.toISOString(),c=await t(`delete-old-logs`,async()=>{let{auditLogs:t}=r.tables;return(await e.db.primary.delete(t).where(n(t.createdAt,s)).returning({id:t.id})).length});e.logger.info({deletedCount:c,retentionDays:a,cutoffDate:s},`Audit log purge completed`)}})}export{i as definePurgeAuditLogs};
1
+ import { defineScheduledWorkflow } from "../workflow.mjs";
2
+ import { defineAuthSchema } from "../auth-schema.mjs";
3
+ import { lt } from "drizzle-orm";
4
+
5
+ //#region src/api/workflows/purge-audit-logs.ts
6
+ const authSchema = defineAuthSchema();
7
+ /**
8
+ * Default cron: midnight daily
9
+ */
10
+ const DEFAULT_CRON = "0 0 * * *";
11
+ /**
12
+ * Default retention period: 90 days
13
+ */
14
+ const DEFAULT_RETENTION_DAYS = 90;
15
+ /**
16
+ * Defines purge audit logs workflow with configurable cron schedule.
17
+ *
18
+ * Algorithm:
19
+ * 1. Calculate cutoff date from scheduledTime minus retentionDays
20
+ * 2. Delete all audit logs with createdAt before cutoff
21
+ * 3. Log the number of deleted records
22
+ *
23
+ * @param crontab - Cron expression from auth.auditLog.purgeCron.
24
+ */
25
+ function definePurgeAuditLogs(crontab = DEFAULT_CRON) {
26
+ return defineScheduledWorkflow({
27
+ crontab,
28
+ async run({ container, step, scheduledTime }) {
29
+ const retentionDays = container.auth.auditLog?.retentionDays ?? DEFAULT_RETENTION_DAYS;
30
+ const cutoffDate = new Date(scheduledTime);
31
+ cutoffDate.setDate(cutoffDate.getDate() - retentionDays);
32
+ const cutoffISO = cutoffDate.toISOString();
33
+ const deletedCount = await step("delete-old-logs", async () => {
34
+ const { auditLogs } = authSchema.tables;
35
+ return (await container.db.primary.delete(auditLogs).where(lt(auditLogs.createdAt, cutoffISO)).returning({ id: auditLogs.id })).length;
36
+ });
37
+ container.logger.info({
38
+ deletedCount,
39
+ retentionDays,
40
+ cutoffDate: cutoffISO
41
+ }, "Audit log purge completed");
42
+ }
43
+ });
44
+ }
45
+
46
+ //#endregion
47
+ export { definePurgeAuditLogs };
@@ -1 +1,46 @@
1
- import{defineScheduledWorkflow as e}from"../workflow.mjs";function t(t=`0 0 * * *`){return e({crontab:t,async run({container:e,step:t}){let n=new Date(Date.now()-2880*60*1e3).toISOString(),r=await t(`fetch-unattached-blobs`,async()=>e.storage.primary.getUnattachedBlobs({olderThan:n})),i=await t(`fetch-pending-blobs`,async()=>e.storage.primary.getPendingBlobs(n)),a=[...r,...i],o=0;for(let n of a)await t(`delete-blob`,async()=>{await e.storage.primary.deleteBlob(n.id),o++});e.logger.info({purgedCount:o,unattachedCount:r.length,pendingCount:i.length},`Orphaned blobs purged`)}})}export{t as definePurgeUnattachedBlobs};
1
+ import { defineScheduledWorkflow } from "../workflow.mjs";
2
+
3
+ //#region src/api/workflows/purge-unattached-blobs.ts
4
+ /**
5
+ * Default cron: midnight daily
6
+ */
7
+ const DEFAULT_CRON = "0 0 * * *";
8
+ /**
9
+ * Defines purge unattached blobs workflow with configurable cron schedule.
10
+ *
11
+ * Algorithm:
12
+ * 1. Fetch unattached blobs older than 48 hours
13
+ * 2. Fetch pending blobs (stuck direct uploads) older than 48 hours
14
+ * 3. Delete each orphaned blob from storage and database
15
+ * 4. Log the purge summary
16
+ *
17
+ * @param crontab - Cron expression for when to run (default: daily at midnight)
18
+ */
19
+ function definePurgeUnattachedBlobs(crontab = DEFAULT_CRON) {
20
+ return defineScheduledWorkflow({
21
+ crontab,
22
+ async run({ container, step }) {
23
+ const olderThan = (/* @__PURE__ */ new Date(Date.now() - 2880 * 60 * 1e3)).toISOString();
24
+ const unattachedBlobs = await step("fetch-unattached-blobs", async () => {
25
+ return container.storage.primary.getUnattachedBlobs({ olderThan });
26
+ });
27
+ const pendingBlobs = await step("fetch-pending-blobs", async () => {
28
+ return container.storage.primary.getPendingBlobs(olderThan);
29
+ });
30
+ const allOrphans = [...unattachedBlobs, ...pendingBlobs];
31
+ let purgedCount = 0;
32
+ for (const blob of allOrphans) await step("delete-blob", async () => {
33
+ await container.storage.primary.deleteBlob(blob.id);
34
+ purgedCount++;
35
+ });
36
+ container.logger.info({
37
+ purgedCount,
38
+ unattachedCount: unattachedBlobs.length,
39
+ pendingCount: pendingBlobs.length
40
+ }, "Orphaned blobs purged");
41
+ }
42
+ });
43
+ }
44
+
45
+ //#endregion
46
+ export { definePurgeUnattachedBlobs };
@@ -1 +1,110 @@
1
- import{defineWorkflow as e}from"../workflow.mjs";import{defineAuthSchema as t}from"../auth-schema.mjs";import{dbChangesEvent as n}from"../event.mjs";import{z as r}from"zod";const i=t(),a=r.object({changes:r.array(r.object({_table:r.string(),old:r.record(r.string(),r.unknown()).nullable(),new:r.record(r.string(),r.unknown()).nullable()})),dbName:r.string(),organizationId:r.string().nullable(),requestId:r.string(),sessionId:r.string().nullable(),userId:r.string().nullable()});function o(e){return e.old===null?`INSERT`:e.new===null?`DELETE`:`UPDATE`}const s=e({input:a,async run({container:e,step:t,input:r}){let{dbName:a,changes:s,organizationId:c,userId:l,sessionId:u,requestId:d}=r;if(s.length===0)return{processed:0,audited:0,published:0};let f=new Date().toISOString(),p=0,m=0;for(let r of s){let s=r._table,h=o(r),g=`${a}.${s}`;e.auth.shouldAudit(g)&&(await t(`audit:${g}`,async()=>{await e.db.primary.insert(i.tables.auditLogs).values({tableName:g,action:h,oldData:r.old,newData:r.new,organizationId:c,userId:l,sessionId:u,requestId:d,createdAt:f})}),p++),await t(`event:${g}`,async()=>{await n.emit({action:h,oldData:r.old,newData:r.new,organizationId:c,tableName:g,timestamp:f,userId:l})}),m++}return{processed:s.length,audited:p,published:m}}});export{s as trackDbChanges};
1
+ import { defineWorkflow } from "../workflow.mjs";
2
+ import { defineAuthSchema } from "../auth-schema.mjs";
3
+ import { dbChangesEvent } from "../event.mjs";
4
+ import { z } from "zod";
5
+
6
+ //#region src/api/workflows/track-db-changes.ts
7
+ const authSchema = defineAuthSchema();
8
+ /**
9
+ * Input schema for trackDbChanges workflow.
10
+ * Accepts dbChanges() output directly from .returning() clause.
11
+ */
12
+ const trackDbChangesInputSchema = z.object({
13
+ changes: z.array(z.object({
14
+ _table: z.string(),
15
+ old: z.record(z.string(), z.unknown()).nullable(),
16
+ new: z.record(z.string(), z.unknown()).nullable()
17
+ })),
18
+ dbName: z.string(),
19
+ organizationId: z.string().nullable(),
20
+ requestId: z.string(),
21
+ sessionId: z.string().nullable(),
22
+ userId: z.string().nullable()
23
+ });
24
+ /**
25
+ * Infers action type from change record based on old/new nullness.
26
+ * Uses uppercase SQL-style actions to match OCSF/CADF standards.
27
+ *
28
+ * Algorithm:
29
+ * - old is null → INSERT (new row created)
30
+ * - new is null → DELETE (row removed)
31
+ * - both present → UPDATE (row modified)
32
+ *
33
+ * @param change - Change record with old and new values
34
+ * @returns Action type: "INSERT" if old is null, "DELETE" if new is null, otherwise "UPDATE"
35
+ */
36
+ function inferAction(change) {
37
+ if (change.old === null) return "INSERT";
38
+ if (change.new === null) return "DELETE";
39
+ return "UPDATE";
40
+ }
41
+ /**
42
+ * Built-in workflow for processing database changes.
43
+ * Handles audit logging and pub/sub notifications with type-safe table filtering.
44
+ *
45
+ * Algorithm:
46
+ * 1. For each change in input, infer the action (INSERT/UPDATE/DELETE)
47
+ * 2. If table is not excluded from audit logging:
48
+ * - Insert into audit_logs table with full context
49
+ * 3. Emit to dbChangesEvent (publishes via Redis if subscribed handlers exist)
50
+ *
51
+ * Filtering is configured via:
52
+ * - `container.auth.shouldAudit()` - Check if table should be audited
53
+ *
54
+ * @returns Object with counts: processed (total changes), audited (logged to audit_logs), published (emitted to event)
55
+ */
56
+ const trackDbChanges = defineWorkflow({
57
+ input: trackDbChangesInputSchema,
58
+ async run({ container, step, input }) {
59
+ const { dbName, changes, organizationId, userId, sessionId, requestId } = input;
60
+ if (changes.length === 0) return {
61
+ processed: 0,
62
+ audited: 0,
63
+ published: 0
64
+ };
65
+ const timestamp = (/* @__PURE__ */ new Date()).toISOString();
66
+ let audited = 0;
67
+ let published = 0;
68
+ for (const change of changes) {
69
+ const tableName = change._table;
70
+ const action = inferAction(change);
71
+ const qualifiedTable = `${dbName}.${tableName}`;
72
+ if (container.auth.shouldAudit(qualifiedTable)) {
73
+ await step(`audit:${qualifiedTable}`, async () => {
74
+ await container.db.primary.insert(authSchema.tables.auditLogs).values({
75
+ tableName: qualifiedTable,
76
+ action,
77
+ oldData: change.old,
78
+ newData: change.new,
79
+ organizationId,
80
+ userId,
81
+ sessionId,
82
+ requestId,
83
+ createdAt: timestamp
84
+ });
85
+ });
86
+ audited++;
87
+ }
88
+ await step(`event:${qualifiedTable}`, async () => {
89
+ await dbChangesEvent.emit({
90
+ action,
91
+ oldData: change.old,
92
+ newData: change.new,
93
+ organizationId,
94
+ tableName: qualifiedTable,
95
+ timestamp,
96
+ userId
97
+ });
98
+ });
99
+ published++;
100
+ }
101
+ return {
102
+ processed: changes.length,
103
+ audited,
104
+ published
105
+ };
106
+ }
107
+ });
108
+
109
+ //#endregion
110
+ export { trackDbChanges };