appos 0.3.2-0 → 0.3.4-0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (158) hide show
  1. package/dist/bin/auth-schema-CcqAJY9P.mjs +2 -0
  2. package/dist/bin/better-sqlite3-CuQ3hsWl.mjs +2 -0
  3. package/dist/bin/bun-sql-DGeo-s_M.mjs +2 -0
  4. package/dist/bin/cache-3oO07miM.mjs +2 -0
  5. package/dist/bin/chunk-l9p7A9gZ.mjs +2 -0
  6. package/dist/bin/cockroach-BaICwY7N.mjs +2 -0
  7. package/dist/bin/database-CaysWPpa.mjs +2 -0
  8. package/dist/bin/esm-BvsccvmM.mjs +2 -0
  9. package/dist/bin/esm-CGKzJ7Am.mjs +3 -0
  10. package/dist/bin/event-DnSe3eh0.mjs +8 -0
  11. package/dist/bin/extract-blob-metadata-iqwTl2ft.mjs +170 -0
  12. package/dist/bin/generate-image-variant-Lyx0vhM6.mjs +2 -0
  13. package/dist/bin/generate-preview-0MrKxslA.mjs +2 -0
  14. package/dist/bin/libsql-DQJrZsU9.mjs +2 -0
  15. package/dist/bin/logger-BAGZLUzj.mjs +2 -0
  16. package/dist/bin/main.mjs +1201 -190
  17. package/dist/bin/migrator-B7iNKM8N.mjs +2 -0
  18. package/dist/bin/migrator-BKE1cSQQ.mjs +2 -0
  19. package/dist/bin/migrator-BXcbc9zs.mjs +2 -0
  20. package/dist/bin/migrator-B_XhRWZC.mjs +8 -0
  21. package/dist/bin/migrator-Bz52Gtr8.mjs +2 -0
  22. package/dist/bin/migrator-C7W-cZHB.mjs +2 -0
  23. package/dist/bin/migrator-CEnKyGSW.mjs +2 -0
  24. package/dist/bin/migrator-CHzIIl5X.mjs +2 -0
  25. package/dist/bin/migrator-CR-rjZdM.mjs +2 -0
  26. package/dist/bin/migrator-CjIr1ZCx.mjs +8 -0
  27. package/dist/bin/migrator-Cuubh2dg.mjs +2 -0
  28. package/dist/bin/migrator-D8m-ORbr.mjs +8 -0
  29. package/dist/bin/migrator-DBFwrhZH.mjs +2 -0
  30. package/dist/bin/migrator-DLmhW9u_.mjs +2 -0
  31. package/dist/bin/migrator-DLoHx807.mjs +4 -0
  32. package/dist/bin/migrator-DtN_iS87.mjs +2 -0
  33. package/dist/bin/migrator-Yc57lb3w.mjs +2 -0
  34. package/dist/bin/migrator-cEVXH3xC.mjs +2 -0
  35. package/dist/bin/migrator-hWi-sYIq.mjs +2 -0
  36. package/dist/bin/mysql2-DufFWkj4.mjs +2 -0
  37. package/dist/bin/neon-serverless-5a4h2VFz.mjs +2 -0
  38. package/dist/bin/node-CiOp4xrR.mjs +22 -0
  39. package/dist/bin/node-mssql-DvZGaUkB.mjs +322 -0
  40. package/dist/bin/node-postgres-BqbJVBQY.mjs +2 -0
  41. package/dist/bin/node-postgres-DnhRTTO8.mjs +2 -0
  42. package/dist/bin/open-0ksnL0S8.mjs +2 -0
  43. package/dist/bin/pdf-sUYeFPr4.mjs +14 -0
  44. package/dist/bin/pg-CaH8ptj-.mjs +2 -0
  45. package/dist/bin/pg-core-BLTZt9AH.mjs +8 -0
  46. package/dist/bin/pg-core-CGzidKaA.mjs +2 -0
  47. package/dist/bin/pglite-BJB9z7Ju.mjs +2 -0
  48. package/dist/bin/planetscale-serverless-H3RfLlMK.mjs +13 -0
  49. package/dist/bin/postgres-js-DuOf1eWm.mjs +2 -0
  50. package/dist/bin/purge-attachment-DQXpTtTx.mjs +2 -0
  51. package/dist/bin/purge-audit-logs-BEt2J2gD.mjs +2 -0
  52. package/dist/bin/{purge-unattached-blobs-Duvv8Izd.mjs → purge-unattached-blobs-DOmk4ddJ.mjs} +1 -1
  53. package/dist/bin/query-builder-DSRrR6X_.mjs +8 -0
  54. package/dist/bin/query-builder-V8-LDhvA.mjs +3 -0
  55. package/dist/bin/session-CdB1A-LB.mjs +14 -0
  56. package/dist/bin/session-Cl2e-_i8.mjs +8 -0
  57. package/dist/bin/singlestore-COft6TlR.mjs +8 -0
  58. package/dist/bin/sql-D-eKV1Dn.mjs +2 -0
  59. package/dist/bin/sqlite-cloud-Co9jOn5G.mjs +2 -0
  60. package/dist/bin/sqlite-proxy-Cpu78gJF.mjs +2 -0
  61. package/dist/bin/src-C-oXmCzx.mjs +6 -0
  62. package/dist/bin/table-3zUpWkMg.mjs +2 -0
  63. package/dist/bin/track-db-changes-DWyY5jXm.mjs +2 -0
  64. package/dist/bin/utils-CyoeCJlf.mjs +2 -0
  65. package/dist/bin/utils-EoqYQKy1.mjs +2 -0
  66. package/dist/bin/utils-bsypyqPl.mjs +2 -0
  67. package/dist/bin/vercel-postgres-HWL6xtqi.mjs +2 -0
  68. package/dist/bin/workflow-zxHDyfLq.mjs +2 -0
  69. package/dist/bin/youch-handler-DrYdbUhe.mjs +2 -0
  70. package/dist/bin/zod-MJjkEkRY.mjs +24 -0
  71. package/dist/exports/api/_virtual/rolldown_runtime.mjs +36 -1
  72. package/dist/exports/api/app-context.mjs +24 -1
  73. package/dist/exports/api/auth-schema.mjs +373 -1
  74. package/dist/exports/api/auth.d.mts +4 -0
  75. package/dist/exports/api/auth.mjs +188 -1
  76. package/dist/exports/api/cache.d.mts +2 -2
  77. package/dist/exports/api/cache.mjs +28 -1
  78. package/dist/exports/api/config.mjs +72 -1
  79. package/dist/exports/api/constants.mjs +92 -1
  80. package/dist/exports/api/container.mjs +49 -1
  81. package/dist/exports/api/database.mjs +218 -1
  82. package/dist/exports/api/event.mjs +236 -1
  83. package/dist/exports/api/i18n.mjs +45 -1
  84. package/dist/exports/api/index.mjs +20 -1
  85. package/dist/exports/api/instrumentation.mjs +40 -1
  86. package/dist/exports/api/logger.mjs +26 -1
  87. package/dist/exports/api/mailer.mjs +37 -1
  88. package/dist/exports/api/middleware.mjs +73 -1
  89. package/dist/exports/api/openapi.mjs +507 -1
  90. package/dist/exports/api/orm.mjs +43 -1
  91. package/dist/exports/api/otel.mjs +56 -1
  92. package/dist/exports/api/redis.mjs +41 -1
  93. package/dist/exports/api/storage-schema.mjs +72 -1
  94. package/dist/exports/api/storage.mjs +833 -1
  95. package/dist/exports/api/web/auth.mjs +17 -1
  96. package/dist/exports/api/workflow.mjs +196 -1
  97. package/dist/exports/api/workflows/_virtual/rolldown_runtime.mjs +36 -1
  98. package/dist/exports/api/workflows/api/auth-schema.mjs +373 -1
  99. package/dist/exports/api/workflows/api/auth.d.mts +4 -0
  100. package/dist/exports/api/workflows/api/cache.d.mts +2 -2
  101. package/dist/exports/api/workflows/api/event.mjs +126 -1
  102. package/dist/exports/api/workflows/api/redis.mjs +3 -1
  103. package/dist/exports/api/workflows/api/workflow.mjs +135 -1
  104. package/dist/exports/api/workflows/constants.mjs +23 -1
  105. package/dist/exports/api/workflows/extract-blob-metadata.mjs +132 -1
  106. package/dist/exports/api/workflows/generate-image-variant.d.mts +2 -2
  107. package/dist/exports/api/workflows/generate-image-variant.mjs +118 -1
  108. package/dist/exports/api/workflows/generate-preview.mjs +160 -1
  109. package/dist/exports/api/workflows/index.mjs +3 -1
  110. package/dist/exports/api/workflows/purge-attachment.mjs +34 -1
  111. package/dist/exports/api/workflows/purge-audit-logs.mjs +47 -1
  112. package/dist/exports/api/workflows/purge-unattached-blobs.mjs +46 -1
  113. package/dist/exports/api/workflows/track-db-changes.mjs +110 -1
  114. package/dist/exports/cli/_virtual/rolldown_runtime.mjs +36 -1
  115. package/dist/exports/cli/api/auth-schema.mjs +373 -1
  116. package/dist/exports/cli/api/auth.d.mts +4 -0
  117. package/dist/exports/cli/api/cache.d.mts +2 -2
  118. package/dist/exports/cli/api/event.mjs +126 -1
  119. package/dist/exports/cli/api/redis.mjs +3 -1
  120. package/dist/exports/cli/api/workflow.mjs +135 -1
  121. package/dist/exports/cli/api/workflows/extract-blob-metadata.mjs +132 -1
  122. package/dist/exports/cli/api/workflows/generate-image-variant.mjs +118 -1
  123. package/dist/exports/cli/api/workflows/generate-preview.mjs +160 -1
  124. package/dist/exports/cli/api/workflows/purge-attachment.mjs +34 -1
  125. package/dist/exports/cli/api/workflows/purge-audit-logs.mjs +47 -1
  126. package/dist/exports/cli/api/workflows/purge-unattached-blobs.mjs +46 -1
  127. package/dist/exports/cli/api/workflows/track-db-changes.mjs +110 -1
  128. package/dist/exports/cli/command.d.mts +2 -0
  129. package/dist/exports/cli/command.mjs +43 -1
  130. package/dist/exports/cli/constants.mjs +23 -1
  131. package/dist/exports/cli/index.mjs +3 -1
  132. package/dist/exports/devtools/index.js +4 -1
  133. package/dist/exports/tests/api/auth.d.mts +4 -0
  134. package/dist/exports/tests/api/cache.d.mts +2 -2
  135. package/dist/exports/tests/api/middleware/i18n.mjs +1 -1
  136. package/dist/exports/tests/api/middleware/youch-handler.mjs +1 -1
  137. package/dist/exports/tests/api/openapi.mjs +1 -1
  138. package/dist/exports/tests/api/server.mjs +1 -1
  139. package/dist/exports/tests/api/storage.d.mts +4 -4
  140. package/dist/exports/tests/constants.mjs +1 -1
  141. package/dist/exports/vendors/date.js +1 -1
  142. package/dist/exports/vendors/toolkit.js +1 -1
  143. package/dist/exports/vendors/zod.js +1 -1
  144. package/dist/exports/vitest/globals.mjs +1 -1
  145. package/dist/exports/web/auth.js +75 -1
  146. package/dist/exports/web/i18n.js +45 -1
  147. package/dist/exports/web/index.js +8 -1
  148. package/package.json +19 -18
  149. package/dist/bin/auth-schema-Va0CYicu.mjs +0 -2
  150. package/dist/bin/event-8JibGFH_.mjs +0 -2
  151. package/dist/bin/extract-blob-metadata-DjPfHtQ2.mjs +0 -2
  152. package/dist/bin/generate-image-variant-D5VDFyWj.mjs +0 -2
  153. package/dist/bin/generate-preview-Dssw7w5U.mjs +0 -2
  154. package/dist/bin/purge-attachment-BBPzIxwt.mjs +0 -2
  155. package/dist/bin/purge-audit-logs-BeZy3IFM.mjs +0 -2
  156. package/dist/bin/track-db-changes-CFykw_YO.mjs +0 -2
  157. package/dist/bin/workflow-BNUZrj4F.mjs +0 -2
  158. package/dist/bin/youch-handler-BadUgHb0.mjs +0 -2
@@ -1 +1,126 @@
1
- import"./redis.mjs";import"../constants.mjs";import{z as e}from"zod";import{join as t}from"node:path";import"es-toolkit";function n(e){let t=null,n=null;return{inputSchema:e.input,get name(){return n},register(e,r){t=e,n=r},async emit(r){if(!t||!n)throw Error(`Event not registered. Ensure the worker is started before emitting events.`);let i=e.input.parse(r),a={container:t,input:i};await e.run(a),t.eventBus.publish(n,i).catch(e=>{t.logger.error({err:e,event:n},`Redis publish failed`)})},async subscribe(r){if(!t||!n)throw Error(`Event not registered. Ensure the worker is started before subscribing.`);return t.eventBus.subscribe(n,async i=>{let a=e.input.parse(i),o={container:t,input:a};try{await r(o)}catch(e){t.logger.error({err:e,event:n},`Event subscription handler error`)}})}}}const r=n({input:e.object({action:e.enum([`INSERT`,`UPDATE`,`DELETE`]),newData:e.record(e.string(),e.unknown()).nullable(),oldData:e.record(e.string(),e.unknown()).nullable(),organizationId:e.string().nullable(),tableName:e.string(),timestamp:e.string(),userId:e.string().nullable()}),async run(){}});export{r as dbChangesEvent};
1
+ import "./redis.mjs";
2
+ import "../constants.mjs";
3
+ import { z } from "zod";
4
+ import { join } from "node:path";
5
+ import "es-toolkit";
6
+
7
+ //#region src/api/event.ts
8
+ /**
9
+ * Defines a type-safe event with in-memory and Redis pub/sub support.
10
+ *
11
+ * Algorithm:
12
+ * 1. Define event with input schema and in-memory run handler
13
+ * 2. On emit(): validate input, run in-memory handler, publish to Redis (fire-and-forget)
14
+ * 3. .subscribe() creates Redis subscription for tRPC/SSE/WebSocket handlers
15
+ *
16
+ * @example
17
+ * ```typescript
18
+ * // api/events/order-status.ts
19
+ * export default defineEvent({
20
+ * input: z.object({
21
+ * orderId: z.string(),
22
+ * status: z.enum(["pending", "shipped", "delivered"]),
23
+ * }),
24
+ * async run(ctx) {
25
+ * ctx.container.logger.info(`Order ${ctx.input.orderId} is ${ctx.input.status}`);
26
+ * },
27
+ * });
28
+ *
29
+ * // Emit from anywhere
30
+ * await orderStatus.emit({ orderId: "123", status: "shipped" });
31
+ *
32
+ * // Subscribe (e.g., in tRPC router)
33
+ * const unsubscribe = await orderStatus.subscribe(async (ctx) => {
34
+ * // Push to client via SSE/WebSocket
35
+ * });
36
+ * // Cleanup when client disconnects
37
+ * unsubscribe();
38
+ * ```
39
+ */
40
+ function defineEvent(options) {
41
+ let container = null;
42
+ let eventName = null;
43
+ return {
44
+ inputSchema: options.input,
45
+ get name() {
46
+ return eventName;
47
+ },
48
+ register(c, name) {
49
+ container = c;
50
+ eventName = name;
51
+ },
52
+ async emit(input) {
53
+ if (!container || !eventName) throw new Error("Event not registered. Ensure the worker is started before emitting events.");
54
+ const validated = options.input.parse(input);
55
+ const ctx = {
56
+ container,
57
+ input: validated
58
+ };
59
+ await options.run(ctx);
60
+ container.eventBus.publish(eventName, validated).catch((err) => {
61
+ container.logger.error({
62
+ err,
63
+ event: eventName
64
+ }, "Redis publish failed");
65
+ });
66
+ },
67
+ async subscribe(handler) {
68
+ if (!container || !eventName) throw new Error("Event not registered. Ensure the worker is started before subscribing.");
69
+ return container.eventBus.subscribe(eventName, async (message) => {
70
+ const validated = options.input.parse(message);
71
+ const ctx = {
72
+ container,
73
+ input: validated
74
+ };
75
+ try {
76
+ await handler(ctx);
77
+ } catch (err) {
78
+ container.logger.error({
79
+ err,
80
+ event: eventName
81
+ }, "Event subscription handler error");
82
+ }
83
+ });
84
+ }
85
+ };
86
+ }
87
+ /**
88
+ * Input schema for dbChangesEvent.
89
+ */
90
+ const dbChangeInputSchema = z.object({
91
+ action: z.enum([
92
+ "INSERT",
93
+ "UPDATE",
94
+ "DELETE"
95
+ ]),
96
+ newData: z.record(z.string(), z.unknown()).nullable(),
97
+ oldData: z.record(z.string(), z.unknown()).nullable(),
98
+ organizationId: z.string().nullable(),
99
+ tableName: z.string(),
100
+ timestamp: z.string(),
101
+ userId: z.string().nullable()
102
+ });
103
+ /**
104
+ * Built-in event for database changes.
105
+ * Emitted by trackDbChanges workflow.
106
+ *
107
+ * @example
108
+ * ```typescript
109
+ * // Subscribe to DB changes (e.g., in tRPC subscription)
110
+ * import { dbChangesEvent } from "appos/api";
111
+ *
112
+ * const unsubscribe = await dbChangesEvent.subscribe(async (ctx) => {
113
+ * wsServer.publish("db-changes", JSON.stringify(ctx.input));
114
+ * });
115
+ *
116
+ * // Cleanup when done
117
+ * unsubscribe();
118
+ * ```
119
+ */
120
+ const dbChangesEvent = defineEvent({
121
+ input: dbChangeInputSchema,
122
+ async run() {}
123
+ });
124
+
125
+ //#endregion
126
+ export { dbChangesEvent };
@@ -1 +1,3 @@
1
- import"redis";export{};
1
+ import "redis";
2
+
3
+ export { };
@@ -1 +1,135 @@
1
- import"../constants.mjs";import{join as e}from"node:path";import"es-toolkit";function t(e){let t=null,n=null,r=null,i=null,a=async i=>{if(!t||!r)throw Error(`Workflow "${n}" not registered`);let a=r,o=a.workflowID;if(!o)throw Error(`DBOS.workflowID is not available in this context`);let s={container:t,workflowId:o,input:i,step:(e,t)=>a.runStep(t,{name:e})};return e.run(s)};return{inputSchema:e.input,get name(){return n},register(o,s,c){t=o,n=s,r=c,i=c.registerWorkflow(a,{name:s,...e.config})},async start(t){if(!i||!n||!r)throw Error(`Workflow not registered. Ensure the worker is started before triggering workflows.`);let a=e.input.parse(t),o=await r.startWorkflow(i)(a);return{workflowId:o.workflowID,getStatus:()=>o.getStatus(),getResult:()=>o.getResult()}}}}function n(e){let t=null,n=null,r=null,i=async(i,a)=>{if(!t||!r)throw Error(`Workflow "${n}" not registered`);let o=r,s=o.workflowID;if(!s)throw Error(`DBOS.workflowID is not available in this context`);let c={container:t,workflowId:s,scheduledTime:i,step:(e,t)=>o.runStep(t,{name:e})};return e.run(c)};return{crontab:e.crontab,get name(){return n},register(a,o,s){t=a,n=o,r=s,s.registerScheduled(s.registerWorkflow(i,{name:o}),{crontab:e.crontab})}}}export{n as defineScheduledWorkflow,t as defineWorkflow};
1
+ import "../constants.mjs";
2
+ import { join } from "node:path";
3
+ import "es-toolkit";
4
+
5
+ //#region src/api/workflow.ts
6
+ /**
7
+ * Defines a durable workflow that can be triggered with type-safe input.
8
+ *
9
+ * Workflows are:
10
+ * - Durable: Automatically resume after crashes or restarts
11
+ * - Type-safe: Input and output types are inferred from the schema
12
+ * - Container-aware: Access to db, mailer, logger via ctx.container
13
+ *
14
+ * @example
15
+ * ```typescript
16
+ * // api/workflows/send-welcome-email.ts
17
+ * export default defineWorkflow({
18
+ * input: z.object({
19
+ * userId: z.string(),
20
+ * email: z.string().email(),
21
+ * }),
22
+ * async run(ctx) {
23
+ * const user = await ctx.container.db.primary.query.users.findFirst({
24
+ * where: eq(users.id, ctx.input.userId),
25
+ * });
26
+ *
27
+ * await ctx.container.mailer.send({
28
+ * to: ctx.input.email,
29
+ * subject: "Welcome!",
30
+ * });
31
+ *
32
+ * return { sent: true };
33
+ * },
34
+ * });
35
+ *
36
+ * // Triggering from a route:
37
+ * import sendWelcomeEmail from "#api/workflows/send-welcome-email.ts";
38
+ * const handle = await sendWelcomeEmail.start({ userId: "123", email: "..." });
39
+ * ```
40
+ */
41
+ function defineWorkflow(options) {
42
+ let container = null;
43
+ let workflowName = null;
44
+ let dbosInstance = null;
45
+ let registeredFn = null;
46
+ const workflowFn = async (input) => {
47
+ if (!container || !dbosInstance) throw new Error(`Workflow "${workflowName}" not registered`);
48
+ const dbos = dbosInstance;
49
+ const workflowId = dbos.workflowID;
50
+ if (!workflowId) throw new Error("DBOS.workflowID is not available in this context");
51
+ const ctx = {
52
+ container,
53
+ workflowId,
54
+ input,
55
+ step: (name, fn) => dbos.runStep(fn, { name })
56
+ };
57
+ return options.run(ctx);
58
+ };
59
+ return {
60
+ inputSchema: options.input,
61
+ get name() {
62
+ return workflowName;
63
+ },
64
+ register(c, name, dbos) {
65
+ container = c;
66
+ workflowName = name;
67
+ dbosInstance = dbos;
68
+ registeredFn = dbos.registerWorkflow(workflowFn, {
69
+ name,
70
+ ...options.config
71
+ });
72
+ },
73
+ async start(input) {
74
+ if (!registeredFn || !workflowName || !dbosInstance) throw new Error("Workflow not registered. Ensure the worker is started before triggering workflows.");
75
+ const validated = options.input.parse(input);
76
+ const handle = await dbosInstance.startWorkflow(registeredFn)(validated);
77
+ return {
78
+ workflowId: handle.workflowID,
79
+ getStatus: () => handle.getStatus(),
80
+ getResult: () => handle.getResult()
81
+ };
82
+ }
83
+ };
84
+ }
85
+ /**
86
+ * Defines a scheduled workflow that runs on a cron schedule.
87
+ *
88
+ * @example
89
+ * ```typescript
90
+ * // api/workflows/daily-report.ts
91
+ * export default defineScheduledWorkflow({
92
+ * crontab: "0 9 * * *", // 9am daily
93
+ * async run(ctx) {
94
+ * const stats = await ctx.container.db.primary.query.stats.findMany();
95
+ * await ctx.container.mailer.send({
96
+ * to: "team@company.com",
97
+ * subject: `Daily Report - ${ctx.scheduledTime.toDateString()}`,
98
+ * });
99
+ * },
100
+ * });
101
+ * ```
102
+ */
103
+ function defineScheduledWorkflow(options) {
104
+ let container = null;
105
+ let workflowName = null;
106
+ let dbosInstance = null;
107
+ const workflowFn = async (scheduledTime, _startTime) => {
108
+ if (!container || !dbosInstance) throw new Error(`Workflow "${workflowName}" not registered`);
109
+ const dbos = dbosInstance;
110
+ const workflowId = dbos.workflowID;
111
+ if (!workflowId) throw new Error("DBOS.workflowID is not available in this context");
112
+ const ctx = {
113
+ container,
114
+ workflowId,
115
+ scheduledTime,
116
+ step: (name, fn) => dbos.runStep(fn, { name })
117
+ };
118
+ return options.run(ctx);
119
+ };
120
+ return {
121
+ crontab: options.crontab,
122
+ get name() {
123
+ return workflowName;
124
+ },
125
+ register(c, name, dbos) {
126
+ container = c;
127
+ workflowName = name;
128
+ dbosInstance = dbos;
129
+ dbos.registerScheduled(dbos.registerWorkflow(workflowFn, { name }), { crontab: options.crontab });
130
+ }
131
+ };
132
+ }
133
+
134
+ //#endregion
135
+ export { defineScheduledWorkflow, defineWorkflow };
@@ -1 +1,23 @@
1
- process.env.NODE_ENV;export{};
1
+ //#region src/constants.ts
2
+ /**
3
+ * Directory for public static assets.
4
+ *
5
+ * Expected structure:
6
+ * - `<project-root>/public/` - Public directory root
7
+ * - `<project-root>/public/locales/` - i18n translation files
8
+ *
9
+ * @default "public"
10
+ */
11
+ const PUBLIC_DIR = process.env.NODE_ENV === "production" ? "client" : "public";
12
+ /**
13
+ * File extension for code files based on environment.
14
+ *
15
+ * In development: `.ts` (TypeScript source files)
16
+ * In production: `.js` (bundled JavaScript files)
17
+ *
18
+ * @default "ts" in development, "js" in production
19
+ */
20
+ const FILE_EXT = process.env.NODE_ENV === "production" ? "js" : "ts";
21
+
22
+ //#endregion
23
+ export { };
@@ -1 +1,132 @@
1
- import{defineWorkflow as e}from"../workflow.mjs";import t from"zod";import{join as n}from"node:path";import{ALL_FORMATS as r,BlobSource as i,Input as a}from"mediabunny";const o=e({input:t.object({blobId:t.string()}),async run({container:e,input:{blobId:t},step:o}){let s=await o(`fetch-blob`,async()=>e.storage.primary.getBlob(t));if(!s)throw Error(`Blob ${t} not found`);let c=await o(`download-blob`,async()=>e.storage.primary.downloadBlob(t));if(!c)throw Error(`Failed to download blob ${t}`);let l={};return s.contentType?.startsWith(`image/`)?l=await o(`extract-image-metadata`,async()=>{let e=(await import(`sharp`)).default,t=await e(c).metadata();return{width:t.width,height:t.height,format:t.format,hasAlpha:t.hasAlpha,space:t.space}}):s.contentType?.startsWith(`video/`)||s.contentType?.startsWith(`audio/`)?l=await o(`extract-media-metadata`,async()=>{let e=new Uint8Array(c),t=new a({source:new i(new Blob([e],{type:s.contentType||`video/mp4`})),formats:r}),n=await t.computeDuration(),o=await t.getMetadataTags(),l={},u={},d=!1,f=!1;try{let e=await t.getPrimaryVideoTrack();if(e){d=!0;let t=e.displayWidth&&e.displayHeight?e.displayWidth/e.displayHeight:null;l={width:e.displayWidth,height:e.displayHeight,rotation:e.rotation,angle:e.rotation,displayAspectRatio:t}}}catch{}try{let e=await t.getPrimaryAudioTrack();e&&(f=!0,u={sampleRate:e.sampleRate,channels:e.numberOfChannels})}catch{}return{duration:n,video:d,audio:f,...l,...u,tags:o}}):s.contentType===`application/pdf`&&(l=await o(`extract-pdf-metadata`,async()=>{try{let e=await import(`pdfjs-dist/legacy/build/pdf.mjs`),t=`${n(process.cwd(),`node_modules/pdfjs-dist/standard_fonts`)}/`,r=await e.getDocument({data:new Uint8Array(c),standardFontDataUrl:t}).promise,i=await r.getMetadata(),a=(await r.getPage(1)).getViewport({scale:1}),o=i.info;return{pageCount:r.numPages,width:a.width,height:a.height,title:o?.Title||null,author:o?.Author||null,subject:o?.Subject||null,keywords:o?.Keywords||null,creator:o?.Creator||null,producer:o?.Producer||null,creationDate:o?.CreationDate||null,modificationDate:o?.ModDate||null,pdfVersion:o?.PDFFormatVersion||null}}catch(n){return e.logger.error({error:n,errorMessage:n instanceof Error?n.message:String(n),errorStack:n instanceof Error?n.stack:void 0,errorCode:n?.code,blobId:t},`Failed to extract PDF metadata`),{error:`Failed to extract PDF metadata`,errorMessage:n instanceof Error?n.message:String(n)}}})),await o(`save-metadata`,async()=>{await e.storage.primary.updateBlobMetadata(t,{...l,analyzed:!0})}),e.logger.info({blobId:t,metadata:l},`Metadata extracted`),{...l,analyzed:!0}}});export{o as extractBlobMetadata};
1
+ import { defineWorkflow } from "./api/workflow.mjs";
2
+ import z$1 from "zod";
3
+ import { join } from "node:path";
4
+ import { ALL_FORMATS, BlobSource, Input } from "mediabunny";
5
+
6
+ //#region src/api/workflows/extract-blob-metadata.ts
7
+ const extractBlobMetadata = defineWorkflow({
8
+ input: z$1.object({ blobId: z$1.string() }),
9
+ async run({ container, input: { blobId }, step }) {
10
+ const blob = await step("fetch-blob", async () => {
11
+ return container.storage.primary.getBlob(blobId);
12
+ });
13
+ if (!blob) throw new Error(`Blob ${blobId} not found`);
14
+ const buffer = await step("download-blob", async () => {
15
+ return container.storage.primary.downloadBlob(blobId);
16
+ });
17
+ if (!buffer) throw new Error(`Failed to download blob ${blobId}`);
18
+ let metadata = {};
19
+ if (blob.contentType?.startsWith("image/")) metadata = await step("extract-image-metadata", async () => {
20
+ const sharp = (await import("sharp")).default;
21
+ const info = await sharp(buffer).metadata();
22
+ return {
23
+ width: info.width,
24
+ height: info.height,
25
+ format: info.format,
26
+ hasAlpha: info.hasAlpha,
27
+ space: info.space
28
+ };
29
+ });
30
+ else if (blob.contentType?.startsWith("video/") || blob.contentType?.startsWith("audio/")) metadata = await step("extract-media-metadata", async () => {
31
+ const uint8Array = new Uint8Array(buffer);
32
+ const input = new Input({
33
+ source: new BlobSource(new Blob([uint8Array], { type: blob.contentType || "video/mp4" })),
34
+ formats: ALL_FORMATS
35
+ });
36
+ const duration = await input.computeDuration();
37
+ const tags = await input.getMetadataTags();
38
+ let videoData = {};
39
+ let audioData = {};
40
+ let hasVideo = false;
41
+ let hasAudio = false;
42
+ try {
43
+ const videoTrack = await input.getPrimaryVideoTrack();
44
+ if (videoTrack) {
45
+ hasVideo = true;
46
+ const displayAspectRatio = videoTrack.displayWidth && videoTrack.displayHeight ? videoTrack.displayWidth / videoTrack.displayHeight : null;
47
+ videoData = {
48
+ width: videoTrack.displayWidth,
49
+ height: videoTrack.displayHeight,
50
+ rotation: videoTrack.rotation,
51
+ angle: videoTrack.rotation,
52
+ displayAspectRatio
53
+ };
54
+ }
55
+ } catch {}
56
+ try {
57
+ const audioTrack = await input.getPrimaryAudioTrack();
58
+ if (audioTrack) {
59
+ hasAudio = true;
60
+ audioData = {
61
+ sampleRate: audioTrack.sampleRate,
62
+ channels: audioTrack.numberOfChannels
63
+ };
64
+ }
65
+ } catch {}
66
+ return {
67
+ duration,
68
+ video: hasVideo,
69
+ audio: hasAudio,
70
+ ...videoData,
71
+ ...audioData,
72
+ tags
73
+ };
74
+ });
75
+ else if (blob.contentType === "application/pdf") metadata = await step("extract-pdf-metadata", async () => {
76
+ try {
77
+ const pdfjsLib = await import("pdfjs-dist/legacy/build/pdf.mjs");
78
+ const standardFontDataUrl = `${join(process.cwd(), "node_modules/pdfjs-dist/standard_fonts")}/`;
79
+ const pdf = await pdfjsLib.getDocument({
80
+ data: new Uint8Array(buffer),
81
+ standardFontDataUrl
82
+ }).promise;
83
+ const pdfMetadata = await pdf.getMetadata();
84
+ const viewport = (await pdf.getPage(1)).getViewport({ scale: 1 });
85
+ const info = pdfMetadata.info;
86
+ return {
87
+ pageCount: pdf.numPages,
88
+ width: viewport.width,
89
+ height: viewport.height,
90
+ title: info?.Title || null,
91
+ author: info?.Author || null,
92
+ subject: info?.Subject || null,
93
+ keywords: info?.Keywords || null,
94
+ creator: info?.Creator || null,
95
+ producer: info?.Producer || null,
96
+ creationDate: info?.CreationDate || null,
97
+ modificationDate: info?.ModDate || null,
98
+ pdfVersion: info?.PDFFormatVersion || null
99
+ };
100
+ } catch (error) {
101
+ container.logger.error({
102
+ error,
103
+ errorMessage: error instanceof Error ? error.message : String(error),
104
+ errorStack: error instanceof Error ? error.stack : void 0,
105
+ errorCode: error?.code,
106
+ blobId
107
+ }, "Failed to extract PDF metadata");
108
+ return {
109
+ error: "Failed to extract PDF metadata",
110
+ errorMessage: error instanceof Error ? error.message : String(error)
111
+ };
112
+ }
113
+ });
114
+ await step("save-metadata", async () => {
115
+ await container.storage.primary.updateBlobMetadata(blobId, {
116
+ ...metadata,
117
+ analyzed: true
118
+ });
119
+ });
120
+ container.logger.info({
121
+ blobId,
122
+ metadata
123
+ }, "Metadata extracted");
124
+ return {
125
+ ...metadata,
126
+ analyzed: true
127
+ };
128
+ }
129
+ });
130
+
131
+ //#endregion
132
+ export { extractBlobMetadata };
@@ -1,5 +1,5 @@
1
- import "../workflow.mjs";
2
- import "../index.mjs";
1
+ import "./api/workflow.mjs";
2
+ import "./api/container.mjs";
3
3
  import { z } from "zod";
4
4
 
5
5
  //#region src/api/workflows/generate-image-variant.d.ts
@@ -1 +1,118 @@
1
- import{defineWorkflow as e}from"../workflow.mjs";import{z as t}from"zod";const n=t.object({width:t.number().optional(),height:t.number().optional(),fit:t.enum([`cover`,`contain`,`fill`,`inside`,`outside`]).optional(),position:t.enum([`top`,`right top`,`right`,`right bottom`,`bottom`,`left bottom`,`left`,`left top`,`centre`]).optional(),kernel:t.enum([`nearest`,`linear`,`cubic`,`mitchell`,`lanczos2`,`lanczos3`]).optional()}),r=t.object({resize:n.optional(),rotate:t.number().optional(),flip:t.boolean().optional(),flop:t.boolean().optional(),sharpen:t.boolean().optional(),blur:t.number().optional(),grayscale:t.boolean().optional(),format:t.enum([`jpeg`,`png`,`webp`,`avif`,`gif`]).optional(),quality:t.number().min(1).max(100).optional(),preview:t.literal(!0).optional()}),i=e({input:t.object({blobId:t.string(),transformations:r}),async run({container:e,input:{blobId:t,transformations:n},step:r}){if(!await r(`fetch-blob`,async()=>e.storage.primary.getBlob(t)))throw Error(`Blob ${t} not found`);let i=await r(`download-blob`,async()=>e.storage.primary.downloadBlob(t));if(!i)throw Error(`Failed to download blob ${t}`);let a=await r(`apply-transformations`,async()=>{let e=(await import(`sharp`)).default,t=e(i);return n.resize&&(t=t.resize({width:n.resize.width,height:n.resize.height,fit:n.resize.fit,position:n.resize.position,kernel:n.resize.kernel})),n.rotate!==void 0&&(t=t.rotate(n.rotate)),n.flip&&(t=t.flip()),n.flop&&(t=t.flop()),n.sharpen&&(t=t.sharpen()),n.blur!==void 0&&(t=t.blur(n.blur)),n.grayscale&&(t=t.grayscale()),n.format&&(t=t.toFormat(n.format,{quality:n.quality})),t.toBuffer()}),o=await r(`store-variant`,async()=>e.storage.primary.createVariant(t,n,a));return e.logger.info({blobId:t,variantId:o.id},`Image variant generated`),o}});export{i as generateImageVariant};
1
+ import { defineWorkflow } from "./api/workflow.mjs";
2
+ import { z } from "zod";
3
+
4
+ //#region src/api/workflows/generate-image-variant.ts
5
+ /**
6
+ * Resize options schema for image transformations.
7
+ */
8
+ const resizeSchema = z.object({
9
+ width: z.number().optional(),
10
+ height: z.number().optional(),
11
+ fit: z.enum([
12
+ "cover",
13
+ "contain",
14
+ "fill",
15
+ "inside",
16
+ "outside"
17
+ ]).optional(),
18
+ position: z.enum([
19
+ "top",
20
+ "right top",
21
+ "right",
22
+ "right bottom",
23
+ "bottom",
24
+ "left bottom",
25
+ "left",
26
+ "left top",
27
+ "centre"
28
+ ]).optional(),
29
+ kernel: z.enum([
30
+ "nearest",
31
+ "linear",
32
+ "cubic",
33
+ "mitchell",
34
+ "lanczos2",
35
+ "lanczos3"
36
+ ]).optional()
37
+ });
38
+ /**
39
+ * Image transformations schema.
40
+ * Supports resize, rotate, flip, flop, sharpen, blur, grayscale, format conversion.
41
+ */
42
+ const transformationsSchema = z.object({
43
+ resize: resizeSchema.optional(),
44
+ rotate: z.number().optional(),
45
+ flip: z.boolean().optional(),
46
+ flop: z.boolean().optional(),
47
+ sharpen: z.boolean().optional(),
48
+ blur: z.number().optional(),
49
+ grayscale: z.boolean().optional(),
50
+ format: z.enum([
51
+ "jpeg",
52
+ "png",
53
+ "webp",
54
+ "avif",
55
+ "gif"
56
+ ]).optional(),
57
+ quality: z.number().min(1).max(100).optional(),
58
+ preview: z.literal(true).optional()
59
+ });
60
+ /**
61
+ * Generate image variant workflow. Applies transformations to create variants.
62
+ *
63
+ * Algorithm:
64
+ * 1. Fetch blob by ID
65
+ * 2. Download blob content
66
+ * 3. Apply transformations using Sharp:
67
+ * - Resize with various fit options
68
+ * - Rotate by degrees
69
+ * - Flip/flop (vertical/horizontal mirror)
70
+ * - Sharpen, blur, grayscale filters
71
+ * - Format conversion with quality settings
72
+ * 4. Store variant with transformation metadata
73
+ */
74
+ const generateImageVariant = defineWorkflow({
75
+ input: z.object({
76
+ blobId: z.string(),
77
+ transformations: transformationsSchema
78
+ }),
79
+ async run({ container, input: { blobId, transformations }, step }) {
80
+ if (!await step("fetch-blob", async () => {
81
+ return container.storage.primary.getBlob(blobId);
82
+ })) throw new Error(`Blob ${blobId} not found`);
83
+ const buffer = await step("download-blob", async () => {
84
+ return container.storage.primary.downloadBlob(blobId);
85
+ });
86
+ if (!buffer) throw new Error(`Failed to download blob ${blobId}`);
87
+ const variantBuffer = await step("apply-transformations", async () => {
88
+ const sharp = (await import("sharp")).default;
89
+ let pipeline = sharp(buffer);
90
+ if (transformations.resize) pipeline = pipeline.resize({
91
+ width: transformations.resize.width,
92
+ height: transformations.resize.height,
93
+ fit: transformations.resize.fit,
94
+ position: transformations.resize.position,
95
+ kernel: transformations.resize.kernel
96
+ });
97
+ if (transformations.rotate !== void 0) pipeline = pipeline.rotate(transformations.rotate);
98
+ if (transformations.flip) pipeline = pipeline.flip();
99
+ if (transformations.flop) pipeline = pipeline.flop();
100
+ if (transformations.sharpen) pipeline = pipeline.sharpen();
101
+ if (transformations.blur !== void 0) pipeline = pipeline.blur(transformations.blur);
102
+ if (transformations.grayscale) pipeline = pipeline.grayscale();
103
+ if (transformations.format) pipeline = pipeline.toFormat(transformations.format, { quality: transformations.quality });
104
+ return pipeline.toBuffer();
105
+ });
106
+ const variant = await step("store-variant", async () => {
107
+ return container.storage.primary.createVariant(blobId, transformations, variantBuffer);
108
+ });
109
+ container.logger.info({
110
+ blobId,
111
+ variantId: variant.id
112
+ }, "Image variant generated");
113
+ return variant;
114
+ }
115
+ });
116
+
117
+ //#endregion
118
+ export { generateImageVariant };