appos 0.3.2-0 → 0.3.3-0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (157) hide show
  1. package/dist/bin/auth-schema-CcqAJY9P.mjs +2 -0
  2. package/dist/bin/better-sqlite3-CuQ3hsWl.mjs +2 -0
  3. package/dist/bin/bun-sql-DGeo-s_M.mjs +2 -0
  4. package/dist/bin/cache-3oO07miM.mjs +2 -0
  5. package/dist/bin/chunk-l9p7A9gZ.mjs +2 -0
  6. package/dist/bin/cockroach-BaICwY7N.mjs +2 -0
  7. package/dist/bin/database-CaysWPpa.mjs +2 -0
  8. package/dist/bin/esm-BvsccvmM.mjs +2 -0
  9. package/dist/bin/esm-CGKzJ7Am.mjs +3 -0
  10. package/dist/bin/event-DnSe3eh0.mjs +8 -0
  11. package/dist/bin/extract-blob-metadata-iqwTl2ft.mjs +170 -0
  12. package/dist/bin/generate-image-variant-Lyx0vhM6.mjs +2 -0
  13. package/dist/bin/generate-preview-0MrKxslA.mjs +2 -0
  14. package/dist/bin/libsql-DQJrZsU9.mjs +2 -0
  15. package/dist/bin/logger-BAGZLUzj.mjs +2 -0
  16. package/dist/bin/main.mjs +1201 -190
  17. package/dist/bin/migrator-B7iNKM8N.mjs +2 -0
  18. package/dist/bin/migrator-BKE1cSQQ.mjs +2 -0
  19. package/dist/bin/migrator-BXcbc9zs.mjs +2 -0
  20. package/dist/bin/migrator-B_XhRWZC.mjs +8 -0
  21. package/dist/bin/migrator-Bz52Gtr8.mjs +2 -0
  22. package/dist/bin/migrator-C7W-cZHB.mjs +2 -0
  23. package/dist/bin/migrator-CEnKyGSW.mjs +2 -0
  24. package/dist/bin/migrator-CHzIIl5X.mjs +2 -0
  25. package/dist/bin/migrator-CR-rjZdM.mjs +2 -0
  26. package/dist/bin/migrator-CjIr1ZCx.mjs +8 -0
  27. package/dist/bin/migrator-Cuubh2dg.mjs +2 -0
  28. package/dist/bin/migrator-D8m-ORbr.mjs +8 -0
  29. package/dist/bin/migrator-DBFwrhZH.mjs +2 -0
  30. package/dist/bin/migrator-DLmhW9u_.mjs +2 -0
  31. package/dist/bin/migrator-DLoHx807.mjs +4 -0
  32. package/dist/bin/migrator-DtN_iS87.mjs +2 -0
  33. package/dist/bin/migrator-Yc57lb3w.mjs +2 -0
  34. package/dist/bin/migrator-cEVXH3xC.mjs +2 -0
  35. package/dist/bin/migrator-hWi-sYIq.mjs +2 -0
  36. package/dist/bin/mysql2-DufFWkj4.mjs +2 -0
  37. package/dist/bin/neon-serverless-5a4h2VFz.mjs +2 -0
  38. package/dist/bin/node-CiOp4xrR.mjs +22 -0
  39. package/dist/bin/node-mssql-DvZGaUkB.mjs +322 -0
  40. package/dist/bin/node-postgres-BqbJVBQY.mjs +2 -0
  41. package/dist/bin/node-postgres-DnhRTTO8.mjs +2 -0
  42. package/dist/bin/open-0ksnL0S8.mjs +2 -0
  43. package/dist/bin/pdf-sUYeFPr4.mjs +14 -0
  44. package/dist/bin/pg-CaH8ptj-.mjs +2 -0
  45. package/dist/bin/pg-core-BLTZt9AH.mjs +8 -0
  46. package/dist/bin/pg-core-CGzidKaA.mjs +2 -0
  47. package/dist/bin/pglite-BJB9z7Ju.mjs +2 -0
  48. package/dist/bin/planetscale-serverless-H3RfLlMK.mjs +13 -0
  49. package/dist/bin/postgres-js-DuOf1eWm.mjs +2 -0
  50. package/dist/bin/purge-attachment-DQXpTtTx.mjs +2 -0
  51. package/dist/bin/purge-audit-logs-BEt2J2gD.mjs +2 -0
  52. package/dist/bin/{purge-unattached-blobs-Duvv8Izd.mjs → purge-unattached-blobs-DOmk4ddJ.mjs} +1 -1
  53. package/dist/bin/query-builder-DSRrR6X_.mjs +8 -0
  54. package/dist/bin/query-builder-V8-LDhvA.mjs +3 -0
  55. package/dist/bin/session-CdB1A-LB.mjs +14 -0
  56. package/dist/bin/session-Cl2e-_i8.mjs +8 -0
  57. package/dist/bin/singlestore-COft6TlR.mjs +8 -0
  58. package/dist/bin/sql-D-eKV1Dn.mjs +2 -0
  59. package/dist/bin/sqlite-cloud-Co9jOn5G.mjs +2 -0
  60. package/dist/bin/sqlite-proxy-Cpu78gJF.mjs +2 -0
  61. package/dist/bin/src-C-oXmCzx.mjs +6 -0
  62. package/dist/bin/table-3zUpWkMg.mjs +2 -0
  63. package/dist/bin/track-db-changes-DWyY5jXm.mjs +2 -0
  64. package/dist/bin/utils-CyoeCJlf.mjs +2 -0
  65. package/dist/bin/utils-EoqYQKy1.mjs +2 -0
  66. package/dist/bin/utils-bsypyqPl.mjs +2 -0
  67. package/dist/bin/vercel-postgres-HWL6xtqi.mjs +2 -0
  68. package/dist/bin/workflow-zxHDyfLq.mjs +2 -0
  69. package/dist/bin/youch-handler-DrYdbUhe.mjs +2 -0
  70. package/dist/bin/zod-MJjkEkRY.mjs +24 -0
  71. package/dist/exports/api/_virtual/rolldown_runtime.mjs +36 -1
  72. package/dist/exports/api/app-context.mjs +24 -1
  73. package/dist/exports/api/auth-schema.mjs +373 -1
  74. package/dist/exports/api/auth.d.mts +4 -0
  75. package/dist/exports/api/auth.mjs +188 -1
  76. package/dist/exports/api/cache.d.mts +2 -2
  77. package/dist/exports/api/cache.mjs +28 -1
  78. package/dist/exports/api/config.mjs +72 -1
  79. package/dist/exports/api/constants.mjs +92 -1
  80. package/dist/exports/api/container.mjs +49 -1
  81. package/dist/exports/api/database.mjs +218 -1
  82. package/dist/exports/api/event.mjs +236 -1
  83. package/dist/exports/api/i18n.mjs +45 -1
  84. package/dist/exports/api/index.mjs +20 -1
  85. package/dist/exports/api/instrumentation.mjs +40 -1
  86. package/dist/exports/api/logger.mjs +26 -1
  87. package/dist/exports/api/mailer.mjs +37 -1
  88. package/dist/exports/api/middleware.mjs +73 -1
  89. package/dist/exports/api/openapi.mjs +507 -1
  90. package/dist/exports/api/orm.mjs +43 -1
  91. package/dist/exports/api/otel.mjs +56 -1
  92. package/dist/exports/api/redis.mjs +41 -1
  93. package/dist/exports/api/storage-schema.mjs +72 -1
  94. package/dist/exports/api/storage.mjs +833 -1
  95. package/dist/exports/api/web/auth.mjs +17 -1
  96. package/dist/exports/api/workflow.mjs +196 -1
  97. package/dist/exports/api/workflows/_virtual/rolldown_runtime.mjs +36 -1
  98. package/dist/exports/api/workflows/api/auth-schema.mjs +373 -1
  99. package/dist/exports/api/workflows/api/auth.d.mts +4 -0
  100. package/dist/exports/api/workflows/api/cache.d.mts +2 -2
  101. package/dist/exports/api/workflows/api/event.mjs +126 -1
  102. package/dist/exports/api/workflows/api/redis.mjs +3 -1
  103. package/dist/exports/api/workflows/api/workflow.mjs +135 -1
  104. package/dist/exports/api/workflows/constants.mjs +23 -1
  105. package/dist/exports/api/workflows/extract-blob-metadata.mjs +132 -1
  106. package/dist/exports/api/workflows/generate-image-variant.d.mts +2 -2
  107. package/dist/exports/api/workflows/generate-image-variant.mjs +118 -1
  108. package/dist/exports/api/workflows/generate-preview.mjs +160 -1
  109. package/dist/exports/api/workflows/index.mjs +3 -1
  110. package/dist/exports/api/workflows/purge-attachment.mjs +34 -1
  111. package/dist/exports/api/workflows/purge-audit-logs.mjs +47 -1
  112. package/dist/exports/api/workflows/purge-unattached-blobs.mjs +46 -1
  113. package/dist/exports/api/workflows/track-db-changes.mjs +110 -1
  114. package/dist/exports/cli/_virtual/rolldown_runtime.mjs +36 -1
  115. package/dist/exports/cli/api/auth-schema.mjs +373 -1
  116. package/dist/exports/cli/api/auth.d.mts +4 -0
  117. package/dist/exports/cli/api/cache.d.mts +2 -2
  118. package/dist/exports/cli/api/event.mjs +126 -1
  119. package/dist/exports/cli/api/redis.mjs +3 -1
  120. package/dist/exports/cli/api/workflow.mjs +135 -1
  121. package/dist/exports/cli/api/workflows/extract-blob-metadata.mjs +132 -1
  122. package/dist/exports/cli/api/workflows/generate-image-variant.mjs +118 -1
  123. package/dist/exports/cli/api/workflows/generate-preview.mjs +160 -1
  124. package/dist/exports/cli/api/workflows/purge-attachment.mjs +34 -1
  125. package/dist/exports/cli/api/workflows/purge-audit-logs.mjs +47 -1
  126. package/dist/exports/cli/api/workflows/purge-unattached-blobs.mjs +46 -1
  127. package/dist/exports/cli/api/workflows/track-db-changes.mjs +110 -1
  128. package/dist/exports/cli/command.d.mts +2 -0
  129. package/dist/exports/cli/command.mjs +43 -1
  130. package/dist/exports/cli/constants.mjs +23 -1
  131. package/dist/exports/cli/index.mjs +3 -1
  132. package/dist/exports/devtools/index.js +4 -1
  133. package/dist/exports/tests/api/auth.d.mts +4 -0
  134. package/dist/exports/tests/api/cache.d.mts +2 -2
  135. package/dist/exports/tests/api/middleware/i18n.mjs +1 -1
  136. package/dist/exports/tests/api/middleware/youch-handler.mjs +1 -1
  137. package/dist/exports/tests/api/openapi.mjs +1 -1
  138. package/dist/exports/tests/api/server.mjs +1 -1
  139. package/dist/exports/tests/constants.mjs +1 -1
  140. package/dist/exports/vendors/date.js +1 -1
  141. package/dist/exports/vendors/toolkit.js +1 -1
  142. package/dist/exports/vendors/zod.js +1 -1
  143. package/dist/exports/vitest/globals.mjs +1 -1
  144. package/dist/exports/web/auth.js +75 -1
  145. package/dist/exports/web/i18n.js +45 -1
  146. package/dist/exports/web/index.js +8 -1
  147. package/package.json +19 -17
  148. package/dist/bin/auth-schema-Va0CYicu.mjs +0 -2
  149. package/dist/bin/event-8JibGFH_.mjs +0 -2
  150. package/dist/bin/extract-blob-metadata-DjPfHtQ2.mjs +0 -2
  151. package/dist/bin/generate-image-variant-D5VDFyWj.mjs +0 -2
  152. package/dist/bin/generate-preview-Dssw7w5U.mjs +0 -2
  153. package/dist/bin/purge-attachment-BBPzIxwt.mjs +0 -2
  154. package/dist/bin/purge-audit-logs-BeZy3IFM.mjs +0 -2
  155. package/dist/bin/track-db-changes-CFykw_YO.mjs +0 -2
  156. package/dist/bin/workflow-BNUZrj4F.mjs +0 -2
  157. package/dist/bin/youch-handler-BadUgHb0.mjs +0 -2
@@ -1 +1,160 @@
1
- import{defineWorkflow as e}from"../workflow.mjs";import t from"zod";import{join as n}from"node:path";import{spawn as r}from"node:child_process";const i=e({input:t.object({blobId:t.string(),timeInSeconds:t.number().optional()}),async run({container:e,input:{blobId:t,timeInSeconds:i=1},step:a}){let o=await a(`fetch-blob`,async()=>e.storage.primary.getBlob(t));if(!o)throw Error(`Blob ${t} not found`);let s=await a(`download-blob`,async()=>e.storage.primary.downloadBlob(t));if(!s)throw Error(`Failed to download blob ${t}`);let c=null;if(o.contentType?.startsWith(`video/`))c=await a(`generate-video-preview`,async()=>new Promise((n,a)=>{try{let o=r(`ffmpeg`,[`-i`,`pipe:0`,`-ss`,i.toString(),`-frames:v`,`1`,`-f`,`image2pipe`,`-c:v`,`png`,`pipe:1`]),c=[],l=[];o.stdout.on(`data`,e=>{c.push(e)}),o.stderr.on(`data`,e=>{l.push(e)}),o.on(`close`,async r=>{if(r===0)try{let e=Buffer.concat(c),t=(await import(`sharp`)).default;n(await t(e).jpeg({quality:80}).toBuffer())}catch(n){e.logger.error({error:n,blobId:t},`Failed to convert video frame to JPEG`),a(n)}else{let n=Buffer.concat(l).toString(),i=Error(`FFmpeg exited with code ${r}: ${n}`);e.logger.error({error:i,blobId:t,code:r,stderr:n},`Failed to generate video preview`),a(i)}}),o.on(`error`,n=>{e.logger.error({error:n,blobId:t},`Failed to spawn FFmpeg process`),a(n)}),o.stdin.on(`error`,n=>{n.code!==`EPIPE`&&e.logger.error({error:n,blobId:t},`Failed to write to FFmpeg stdin`)}),o.stdin.write(s),o.stdin.end()}catch(n){e.logger.error({error:n,blobId:t},`Failed to generate video preview`),a(n)}}));else if(o.contentType===`application/pdf`)c=await a(`generate-pdf-preview`,async()=>{try{let e=await import(`pdfjs-dist/legacy/build/pdf.mjs`),{createCanvas:t}=await import(`canvas`),r=(await import(`sharp`)).default,i=`${n(process.cwd(),`node_modules/pdfjs-dist/standard_fonts`)}/`,a=await(await e.getDocument({data:new Uint8Array(s),standardFontDataUrl:i}).promise).getPage(1),o=a.getViewport({scale:2}),c=t(o.width,o.height),l=c.getContext(`2d`);return await a.render({canvasContext:l,viewport:o,canvas:c}).promise,await r(c.toBuffer(`image/png`)).resize(800,800,{fit:`inside`,withoutEnlargement:!0}).jpeg({quality:85}).toBuffer()}catch(n){throw e.logger.error({error:n,errorMessage:n instanceof Error?n.message:String(n),errorStack:n instanceof Error?n.stack:void 0,errorCode:n?.code,blobId:t},`Failed to generate PDF preview`),n}});else if(o.contentType?.startsWith(`image/`))c=await a(`generate-image-preview`,async()=>{let e=(await import(`sharp`)).default;return await e(s).resize(800,800,{fit:`inside`,withoutEnlargement:!0}).jpeg({quality:85}).toBuffer()});else throw Error(`Preview generation not supported for content type: ${o.contentType}`);let l=await a(`store-preview`,async()=>await e.storage.primary.createVariant(t,{preview:!0},c));return e.logger.info({blobId:t,previewId:l.id,contentType:o.contentType},`Preview generated`),l}});export{i as generatePreview};
1
+ import { defineWorkflow } from "./api/workflow.mjs";
2
+ import z$1 from "zod";
3
+ import { join } from "node:path";
4
+ import { spawn } from "node:child_process";
5
+
6
+ //#region src/api/workflows/generate-preview.ts
7
+ /**
8
+ * Generate preview workflow. Creates preview images/thumbnails for PDFs and videos.
9
+ *
10
+ * Algorithm:
11
+ * 1. Fetch blob by ID
12
+ * 2. Download blob content
13
+ * 3. Generate preview based on content type:
14
+ * - Video: Extract frame at specified time using FFmpeg
15
+ * - PDF: Render first page using pdfjs-dist + canvas
16
+ * - Image: Resize to max 800x800 maintaining aspect ratio
17
+ * 4. Store preview as a special variant with "preview" transformation key
18
+ */
19
+ const generatePreview = defineWorkflow({
20
+ input: z$1.object({
21
+ blobId: z$1.string(),
22
+ timeInSeconds: z$1.number().optional()
23
+ }),
24
+ async run({ container, input: { blobId, timeInSeconds = 1 }, step }) {
25
+ const blob = await step("fetch-blob", async () => {
26
+ return container.storage.primary.getBlob(blobId);
27
+ });
28
+ if (!blob) throw new Error(`Blob ${blobId} not found`);
29
+ const buffer = await step("download-blob", async () => {
30
+ return container.storage.primary.downloadBlob(blobId);
31
+ });
32
+ if (!buffer) throw new Error(`Failed to download blob ${blobId}`);
33
+ let previewBuffer = null;
34
+ if (blob.contentType?.startsWith("video/")) previewBuffer = await step("generate-video-preview", async () => {
35
+ return new Promise((resolve, reject) => {
36
+ try {
37
+ const ffmpeg = spawn("ffmpeg", [
38
+ "-i",
39
+ "pipe:0",
40
+ "-ss",
41
+ timeInSeconds.toString(),
42
+ "-frames:v",
43
+ "1",
44
+ "-f",
45
+ "image2pipe",
46
+ "-c:v",
47
+ "png",
48
+ "pipe:1"
49
+ ]);
50
+ const chunks = [];
51
+ const errorChunks = [];
52
+ ffmpeg.stdout.on("data", (chunk) => {
53
+ chunks.push(chunk);
54
+ });
55
+ ffmpeg.stderr.on("data", (chunk) => {
56
+ errorChunks.push(chunk);
57
+ });
58
+ ffmpeg.on("close", async (code) => {
59
+ if (code === 0) try {
60
+ const pngBuffer = Buffer.concat(chunks);
61
+ const sharp = (await import("sharp")).default;
62
+ resolve(await sharp(pngBuffer).jpeg({ quality: 80 }).toBuffer());
63
+ } catch (error) {
64
+ container.logger.error({
65
+ error,
66
+ blobId
67
+ }, "Failed to convert video frame to JPEG");
68
+ reject(error);
69
+ }
70
+ else {
71
+ const errorMessage = Buffer.concat(errorChunks).toString();
72
+ const error = /* @__PURE__ */ new Error(`FFmpeg exited with code ${code}: ${errorMessage}`);
73
+ container.logger.error({
74
+ error,
75
+ blobId,
76
+ code,
77
+ stderr: errorMessage
78
+ }, "Failed to generate video preview");
79
+ reject(error);
80
+ }
81
+ });
82
+ ffmpeg.on("error", (error) => {
83
+ container.logger.error({
84
+ error,
85
+ blobId
86
+ }, "Failed to spawn FFmpeg process");
87
+ reject(error);
88
+ });
89
+ ffmpeg.stdin.on("error", (error) => {
90
+ if (error.code !== "EPIPE") container.logger.error({
91
+ error,
92
+ blobId
93
+ }, "Failed to write to FFmpeg stdin");
94
+ });
95
+ ffmpeg.stdin.write(buffer);
96
+ ffmpeg.stdin.end();
97
+ } catch (error) {
98
+ container.logger.error({
99
+ error,
100
+ blobId
101
+ }, "Failed to generate video preview");
102
+ reject(error);
103
+ }
104
+ });
105
+ });
106
+ else if (blob.contentType === "application/pdf") previewBuffer = await step("generate-pdf-preview", async () => {
107
+ try {
108
+ const pdfjsLib = await import("pdfjs-dist/legacy/build/pdf.mjs");
109
+ const { createCanvas } = await import("canvas");
110
+ const sharp = (await import("sharp")).default;
111
+ const standardFontDataUrl = `${join(process.cwd(), "node_modules/pdfjs-dist/standard_fonts")}/`;
112
+ const page = await (await pdfjsLib.getDocument({
113
+ data: new Uint8Array(buffer),
114
+ standardFontDataUrl
115
+ }).promise).getPage(1);
116
+ const viewport = page.getViewport({ scale: 2 });
117
+ const canvas = createCanvas(viewport.width, viewport.height);
118
+ const context = canvas.getContext("2d");
119
+ await page.render({
120
+ canvasContext: context,
121
+ viewport,
122
+ canvas
123
+ }).promise;
124
+ return await sharp(canvas.toBuffer("image/png")).resize(800, 800, {
125
+ fit: "inside",
126
+ withoutEnlargement: true
127
+ }).jpeg({ quality: 85 }).toBuffer();
128
+ } catch (error) {
129
+ container.logger.error({
130
+ error,
131
+ errorMessage: error instanceof Error ? error.message : String(error),
132
+ errorStack: error instanceof Error ? error.stack : void 0,
133
+ errorCode: error?.code,
134
+ blobId
135
+ }, "Failed to generate PDF preview");
136
+ throw error;
137
+ }
138
+ });
139
+ else if (blob.contentType?.startsWith("image/")) previewBuffer = await step("generate-image-preview", async () => {
140
+ const sharp = (await import("sharp")).default;
141
+ return await sharp(buffer).resize(800, 800, {
142
+ fit: "inside",
143
+ withoutEnlargement: true
144
+ }).jpeg({ quality: 85 }).toBuffer();
145
+ });
146
+ else throw new Error(`Preview generation not supported for content type: ${blob.contentType}`);
147
+ const preview = await step("store-preview", async () => {
148
+ return await container.storage.primary.createVariant(blobId, { preview: true }, previewBuffer);
149
+ });
150
+ container.logger.info({
151
+ blobId,
152
+ previewId: preview.id,
153
+ contentType: blob.contentType
154
+ }, "Preview generated");
155
+ return preview;
156
+ }
157
+ });
158
+
159
+ //#endregion
160
+ export { generatePreview };
@@ -1 +1,3 @@
1
- import{trackDbChanges as e}from"./track-db-changes.mjs";export{e as trackDbChanges};
1
+ import { trackDbChanges } from "./track-db-changes.mjs";
2
+
3
+ export { trackDbChanges };
@@ -1 +1,34 @@
1
- import{defineWorkflow as e}from"../workflow.mjs";import t from"zod";const n=e({input:t.object({attachmentIds:t.array(t.string()).min(1)}),async run({container:e,input:{attachmentIds:t},step:n}){let r=await n(`fetch-attachments`,async()=>(await e.storage.primary.getAttachmentsByIds(t)).filter(e=>e.blob!==null).map(e=>({attachmentId:e.id,blobId:e.blob.id})));return await n(`delete-attachments`,async()=>{for(let{attachmentId:t}of r)await e.storage.primary.deleteAttachment(t)}),await n(`delete-blobs`,async()=>{for(let{blobId:t}of r)await e.storage.primary.deleteBlob(t)}),e.logger.info({attachmentIds:t,blobCount:r.length},`Attachments and blobs purged`),{purgedCount:r.length}}});export{n as purgeAttachment};
1
+ import { defineWorkflow } from "./api/workflow.mjs";
2
+ import z$1 from "zod";
3
+
4
+ //#region src/api/workflows/purge-attachment.ts
5
+ /**
6
+ * Purge attachment workflow. Deletes attachments and their associated blobs
7
+ * in the background. This workflow is enqueued by `.purge_later()` calls on
8
+ * attachment objects.
9
+ */
10
+ const purgeAttachment = defineWorkflow({
11
+ input: z$1.object({ attachmentIds: z$1.array(z$1.string()).min(1) }),
12
+ async run({ container, input: { attachmentIds }, step }) {
13
+ const attachments = await step("fetch-attachments", async () => {
14
+ return (await container.storage.primary.getAttachmentsByIds(attachmentIds)).filter((r) => r.blob !== null).map((r) => ({
15
+ attachmentId: r.id,
16
+ blobId: r.blob.id
17
+ }));
18
+ });
19
+ await step("delete-attachments", async () => {
20
+ for (const { attachmentId } of attachments) await container.storage.primary.deleteAttachment(attachmentId);
21
+ });
22
+ await step("delete-blobs", async () => {
23
+ for (const { blobId } of attachments) await container.storage.primary.deleteBlob(blobId);
24
+ });
25
+ container.logger.info({
26
+ attachmentIds,
27
+ blobCount: attachments.length
28
+ }, "Attachments and blobs purged");
29
+ return { purgedCount: attachments.length };
30
+ }
31
+ });
32
+
33
+ //#endregion
34
+ export { purgeAttachment };
@@ -1 +1,47 @@
1
- import{defineAuthSchema as e}from"../auth-schema.mjs";import{defineScheduledWorkflow as t}from"../workflow.mjs";import{lt as n}from"drizzle-orm";const r=e();function i(e=`0 0 * * *`){return t({crontab:e,async run({container:e,step:t,scheduledTime:i}){let a=e.auth.auditLog?.retentionDays??90,o=new Date(i);o.setDate(o.getDate()-a);let s=o.toISOString(),c=await t(`delete-old-logs`,async()=>{let{auditLogs:t}=r.tables;return(await e.db.primary.delete(t).where(n(t.createdAt,s)).returning({id:t.id})).length});e.logger.info({deletedCount:c,retentionDays:a,cutoffDate:s},`Audit log purge completed`)}})}export{i as definePurgeAuditLogs};
1
+ import { defineAuthSchema } from "./api/auth-schema.mjs";
2
+ import { defineScheduledWorkflow } from "./api/workflow.mjs";
3
+ import { lt } from "drizzle-orm";
4
+
5
+ //#region src/api/workflows/purge-audit-logs.ts
6
+ const authSchema = defineAuthSchema();
7
+ /**
8
+ * Default cron: midnight daily
9
+ */
10
+ const DEFAULT_CRON = "0 0 * * *";
11
+ /**
12
+ * Default retention period: 90 days
13
+ */
14
+ const DEFAULT_RETENTION_DAYS = 90;
15
+ /**
16
+ * Defines purge audit logs workflow with configurable cron schedule.
17
+ *
18
+ * Algorithm:
19
+ * 1. Calculate cutoff date from scheduledTime minus retentionDays
20
+ * 2. Delete all audit logs with createdAt before cutoff
21
+ * 3. Log the number of deleted records
22
+ *
23
+ * @param crontab - Cron expression from auth.auditLog.purgeCron.
24
+ */
25
+ function definePurgeAuditLogs(crontab = DEFAULT_CRON) {
26
+ return defineScheduledWorkflow({
27
+ crontab,
28
+ async run({ container, step, scheduledTime }) {
29
+ const retentionDays = container.auth.auditLog?.retentionDays ?? DEFAULT_RETENTION_DAYS;
30
+ const cutoffDate = new Date(scheduledTime);
31
+ cutoffDate.setDate(cutoffDate.getDate() - retentionDays);
32
+ const cutoffISO = cutoffDate.toISOString();
33
+ const deletedCount = await step("delete-old-logs", async () => {
34
+ const { auditLogs } = authSchema.tables;
35
+ return (await container.db.primary.delete(auditLogs).where(lt(auditLogs.createdAt, cutoffISO)).returning({ id: auditLogs.id })).length;
36
+ });
37
+ container.logger.info({
38
+ deletedCount,
39
+ retentionDays,
40
+ cutoffDate: cutoffISO
41
+ }, "Audit log purge completed");
42
+ }
43
+ });
44
+ }
45
+
46
+ //#endregion
47
+ export { definePurgeAuditLogs };
@@ -1 +1,46 @@
1
- import{defineScheduledWorkflow as e}from"../workflow.mjs";function t(t=`0 0 * * *`){return e({crontab:t,async run({container:e,step:t}){let n=new Date(Date.now()-2880*60*1e3).toISOString(),r=await t(`fetch-unattached-blobs`,async()=>e.storage.primary.getUnattachedBlobs({olderThan:n})),i=await t(`fetch-pending-blobs`,async()=>e.storage.primary.getPendingBlobs(n)),a=[...r,...i],o=0;for(let n of a)await t(`delete-blob`,async()=>{await e.storage.primary.deleteBlob(n.id),o++});e.logger.info({purgedCount:o,unattachedCount:r.length,pendingCount:i.length},`Orphaned blobs purged`)}})}export{t as definePurgeUnattachedBlobs};
1
+ import { defineScheduledWorkflow } from "./api/workflow.mjs";
2
+
3
+ //#region src/api/workflows/purge-unattached-blobs.ts
4
+ /**
5
+ * Default cron: midnight daily
6
+ */
7
+ const DEFAULT_CRON = "0 0 * * *";
8
+ /**
9
+ * Defines purge unattached blobs workflow with configurable cron schedule.
10
+ *
11
+ * Algorithm:
12
+ * 1. Fetch unattached blobs older than 48 hours
13
+ * 2. Fetch pending blobs (stuck direct uploads) older than 48 hours
14
+ * 3. Delete each orphaned blob from storage and database
15
+ * 4. Log the purge summary
16
+ *
17
+ * @param crontab - Cron expression for when to run (default: daily at midnight)
18
+ */
19
+ function definePurgeUnattachedBlobs(crontab = DEFAULT_CRON) {
20
+ return defineScheduledWorkflow({
21
+ crontab,
22
+ async run({ container, step }) {
23
+ const olderThan = (/* @__PURE__ */ new Date(Date.now() - 2880 * 60 * 1e3)).toISOString();
24
+ const unattachedBlobs = await step("fetch-unattached-blobs", async () => {
25
+ return container.storage.primary.getUnattachedBlobs({ olderThan });
26
+ });
27
+ const pendingBlobs = await step("fetch-pending-blobs", async () => {
28
+ return container.storage.primary.getPendingBlobs(olderThan);
29
+ });
30
+ const allOrphans = [...unattachedBlobs, ...pendingBlobs];
31
+ let purgedCount = 0;
32
+ for (const blob of allOrphans) await step("delete-blob", async () => {
33
+ await container.storage.primary.deleteBlob(blob.id);
34
+ purgedCount++;
35
+ });
36
+ container.logger.info({
37
+ purgedCount,
38
+ unattachedCount: unattachedBlobs.length,
39
+ pendingCount: pendingBlobs.length
40
+ }, "Orphaned blobs purged");
41
+ }
42
+ });
43
+ }
44
+
45
+ //#endregion
46
+ export { definePurgeUnattachedBlobs };
@@ -1 +1,110 @@
1
- import{defineAuthSchema as e}from"../auth-schema.mjs";import{dbChangesEvent as t}from"../event.mjs";import{defineWorkflow as n}from"../workflow.mjs";import{z as r}from"zod";const i=e(),a=r.object({changes:r.array(r.object({_table:r.string(),old:r.record(r.string(),r.unknown()).nullable(),new:r.record(r.string(),r.unknown()).nullable()})),dbName:r.string(),organizationId:r.string().nullable(),requestId:r.string(),sessionId:r.string().nullable(),userId:r.string().nullable()});function o(e){return e.old===null?`INSERT`:e.new===null?`DELETE`:`UPDATE`}const s=n({input:a,async run({container:e,step:n,input:r}){let{dbName:a,changes:s,organizationId:c,userId:l,sessionId:u,requestId:d}=r;if(s.length===0)return{processed:0,audited:0,published:0};let f=new Date().toISOString(),p=0,m=0;for(let r of s){let s=r._table,h=o(r),g=`${a}.${s}`;e.auth.shouldAudit(g)&&(await n(`audit:${g}`,async()=>{await e.db.primary.insert(i.tables.auditLogs).values({tableName:g,action:h,oldData:r.old,newData:r.new,organizationId:c,userId:l,sessionId:u,requestId:d,createdAt:f})}),p++),await n(`event:${g}`,async()=>{await t.emit({action:h,oldData:r.old,newData:r.new,organizationId:c,tableName:g,timestamp:f,userId:l})}),m++}return{processed:s.length,audited:p,published:m}}});export{s as trackDbChanges};
1
+ import { defineAuthSchema } from "./api/auth-schema.mjs";
2
+ import { dbChangesEvent } from "./api/event.mjs";
3
+ import { defineWorkflow } from "./api/workflow.mjs";
4
+ import { z } from "zod";
5
+
6
+ //#region src/api/workflows/track-db-changes.ts
7
+ const authSchema = defineAuthSchema();
8
+ /**
9
+ * Input schema for trackDbChanges workflow.
10
+ * Accepts dbChanges() output directly from .returning() clause.
11
+ */
12
+ const trackDbChangesInputSchema = z.object({
13
+ changes: z.array(z.object({
14
+ _table: z.string(),
15
+ old: z.record(z.string(), z.unknown()).nullable(),
16
+ new: z.record(z.string(), z.unknown()).nullable()
17
+ })),
18
+ dbName: z.string(),
19
+ organizationId: z.string().nullable(),
20
+ requestId: z.string(),
21
+ sessionId: z.string().nullable(),
22
+ userId: z.string().nullable()
23
+ });
24
+ /**
25
+ * Infers action type from change record based on old/new nullness.
26
+ * Uses uppercase SQL-style actions to match OCSF/CADF standards.
27
+ *
28
+ * Algorithm:
29
+ * - old is null → INSERT (new row created)
30
+ * - new is null → DELETE (row removed)
31
+ * - both present → UPDATE (row modified)
32
+ *
33
+ * @param change - Change record with old and new values
34
+ * @returns Action type: "INSERT" if old is null, "DELETE" if new is null, otherwise "UPDATE"
35
+ */
36
+ function inferAction(change) {
37
+ if (change.old === null) return "INSERT";
38
+ if (change.new === null) return "DELETE";
39
+ return "UPDATE";
40
+ }
41
+ /**
42
+ * Built-in workflow for processing database changes.
43
+ * Handles audit logging and pub/sub notifications with type-safe table filtering.
44
+ *
45
+ * Algorithm:
46
+ * 1. For each change in input, infer the action (INSERT/UPDATE/DELETE)
47
+ * 2. If table is not excluded from audit logging:
48
+ * - Insert into audit_logs table with full context
49
+ * 3. Emit to dbChangesEvent (publishes via Redis if subscribed handlers exist)
50
+ *
51
+ * Filtering is configured via:
52
+ * - `container.auth.shouldAudit()` - Check if table should be audited
53
+ *
54
+ * @returns Object with counts: processed (total changes), audited (logged to audit_logs), published (emitted to event)
55
+ */
56
+ const trackDbChanges = defineWorkflow({
57
+ input: trackDbChangesInputSchema,
58
+ async run({ container, step, input }) {
59
+ const { dbName, changes, organizationId, userId, sessionId, requestId } = input;
60
+ if (changes.length === 0) return {
61
+ processed: 0,
62
+ audited: 0,
63
+ published: 0
64
+ };
65
+ const timestamp = (/* @__PURE__ */ new Date()).toISOString();
66
+ let audited = 0;
67
+ let published = 0;
68
+ for (const change of changes) {
69
+ const tableName = change._table;
70
+ const action = inferAction(change);
71
+ const qualifiedTable = `${dbName}.${tableName}`;
72
+ if (container.auth.shouldAudit(qualifiedTable)) {
73
+ await step(`audit:${qualifiedTable}`, async () => {
74
+ await container.db.primary.insert(authSchema.tables.auditLogs).values({
75
+ tableName: qualifiedTable,
76
+ action,
77
+ oldData: change.old,
78
+ newData: change.new,
79
+ organizationId,
80
+ userId,
81
+ sessionId,
82
+ requestId,
83
+ createdAt: timestamp
84
+ });
85
+ });
86
+ audited++;
87
+ }
88
+ await step(`event:${qualifiedTable}`, async () => {
89
+ await dbChangesEvent.emit({
90
+ action,
91
+ oldData: change.old,
92
+ newData: change.new,
93
+ organizationId,
94
+ tableName: qualifiedTable,
95
+ timestamp,
96
+ userId
97
+ });
98
+ });
99
+ published++;
100
+ }
101
+ return {
102
+ processed: changes.length,
103
+ audited,
104
+ published
105
+ };
106
+ }
107
+ });
108
+
109
+ //#endregion
110
+ export { trackDbChanges };
@@ -1 +1,36 @@
1
- var e=Object.defineProperty,t=Object.getOwnPropertyDescriptor,n=Object.getOwnPropertyNames,r=Object.prototype.hasOwnProperty,i=(t,n)=>{let r={};for(var i in t)e(r,i,{get:t[i],enumerable:!0});return n&&e(r,Symbol.toStringTag,{value:`Module`}),r},a=(i,a,o,s)=>{if(a&&typeof a==`object`||typeof a==`function`)for(var c=n(a),l=0,u=c.length,d;l<u;l++)d=c[l],!r.call(i,d)&&d!==o&&e(i,d,{get:(e=>a[e]).bind(null,d),enumerable:!(s=t(a,d))||s.enumerable});return i},o=(e,t,n)=>(a(e,t,`default`),n&&a(n,t,`default`));export{i as __export,o as __reExport};
1
+ //#region rolldown:runtime
2
+ var __defProp = Object.defineProperty;
3
+ var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
4
+ var __getOwnPropNames = Object.getOwnPropertyNames;
5
+ var __hasOwnProp = Object.prototype.hasOwnProperty;
6
+ var __export = (all, symbols) => {
7
+ let target = {};
8
+ for (var name in all) {
9
+ __defProp(target, name, {
10
+ get: all[name],
11
+ enumerable: true
12
+ });
13
+ }
14
+ if (symbols) {
15
+ __defProp(target, Symbol.toStringTag, { value: "Module" });
16
+ }
17
+ return target;
18
+ };
19
+ var __copyProps = (to, from, except, desc) => {
20
+ if (from && typeof from === "object" || typeof from === "function") {
21
+ for (var keys = __getOwnPropNames(from), i = 0, n = keys.length, key; i < n; i++) {
22
+ key = keys[i];
23
+ if (!__hasOwnProp.call(to, key) && key !== except) {
24
+ __defProp(to, key, {
25
+ get: ((k) => from[k]).bind(null, key),
26
+ enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable
27
+ });
28
+ }
29
+ }
30
+ }
31
+ return to;
32
+ };
33
+ var __reExport = (target, mod, secondTarget) => (__copyProps(target, mod, "default"), secondTarget && __copyProps(secondTarget, mod, "default"));
34
+
35
+ //#endregion
36
+ export { __export, __reExport };