@better-media/framework 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.mjs ADDED
@@ -0,0 +1,1044 @@
1
+ import { isPgPoolLike, TrustedMetadataSchema, HOOK_NAMES, schema, serializeData, toDbFieldName, deserializeData, toCamelCase, getColumnType, markFileContentVerified, resolveHookMode } from '@better-media/core';
2
+ export { HOOK_NAMES, MigrationPlanner, applyOperationsToMetadata, compileMigrationOperationsSql, deserializeData, getAdapter, getColumnType, getMigrations, isPgPoolLike, runHooks, runMigrations, schema, serializeData, toCamelCase, toDbFieldName } from '@better-media/core';
3
+ import fs2 from 'fs/promises';
4
+ import { randomUUID } from 'crypto';
5
+ import { memoryJobAdapter } from '@better-media/adapter-jobs';
6
+ import path from 'path';
7
+ import { createWriteStream } from 'fs';
8
+ import os from 'os';
9
+ import { Readable } from 'stream';
10
+
11
+ var __defProp = Object.defineProperty;
12
+ var __getOwnPropNames = Object.getOwnPropertyNames;
13
+ var __esm = (fn, res) => function __init() {
14
+ return fn && (res = (0, fn[__getOwnPropNames(fn)[0]])(fn = 0)), res;
15
+ };
16
+ var __export = (target, all) => {
17
+ for (var name in all)
18
+ __defProp(target, name, { get: all[name], enumerable: true });
19
+ };
20
+
21
+ // src/core/lifecycle-engine.ts
22
+ var lifecycle_engine_exports = {};
23
+ __export(lifecycle_engine_exports, {
24
+ LifecycleEngine: () => LifecycleEngine,
25
+ createSecureContext: () => createSecureContext
26
+ });
27
+ function createSecureContext(context, pluginName, namespace, trustLevel, capabilities) {
28
+ const api = {
29
+ emitMetadata(patch) {
30
+ if (!capabilities.includes("metadata.write.own")) {
31
+ console.warn(`[AUDIT] Denied metadata.write.own for plugin "${pluginName}"`);
32
+ throw new Error(`Plugin "${pluginName}" lacks "metadata.write.own" capability`);
33
+ }
34
+ context.metadata[namespace] = {
35
+ ...context.metadata[namespace] ?? {},
36
+ ...patch
37
+ };
38
+ },
39
+ emitProcessing(patch) {
40
+ if (!capabilities.includes("processing.write.own")) {
41
+ console.warn(`[AUDIT] Denied processing.write.own for plugin "${pluginName}"`);
42
+ throw new Error(`Plugin "${pluginName}" lacks "processing.write.own" capability`);
43
+ }
44
+ context.processing[namespace] = {
45
+ ...context.processing[namespace] ?? {},
46
+ ...patch
47
+ };
48
+ },
49
+ proposeTrusted(patch) {
50
+ if (trustLevel !== "trusted" || !capabilities.includes("trusted.propose")) {
51
+ console.warn(
52
+ `[AUDIT] Security Violation: Unauthorized trusted.propose from plugin "${pluginName}"`
53
+ );
54
+ throw new Error(`Plugin "${pluginName}" is not authorized to propose trusted metadata`);
55
+ }
56
+ const secureContext = context;
57
+ const verified = secureContext._verifiedSources ?? /* @__PURE__ */ new Set();
58
+ if (!verified.has("file:content") && (patch.file || patch.checksums || patch.media)) {
59
+ console.warn(
60
+ `[AUDIT] Provenance Failure: Plugin "${pluginName}" attempted to propose trusted metadata without independent verification (no file content read).`
61
+ );
62
+ throw new Error(
63
+ `Plugin "${pluginName}" must verify file content before proposing trusted metadata`
64
+ );
65
+ }
66
+ TrustedMetadataSchema.parse(patch);
67
+ const auditLog = secureContext._auditLog ??= [];
68
+ auditLog.push({
69
+ timestamp: (/* @__PURE__ */ new Date()).toISOString(),
70
+ plugin: pluginName,
71
+ action: "proposeTrusted",
72
+ patch: JSON.parse(JSON.stringify(patch))
73
+ // Snapshot the patch
74
+ });
75
+ console.info(
76
+ `[AUDIT] Accepted trusted proposal from plugin "${pluginName}" for file "${context.file.key}"`
77
+ );
78
+ context.trusted = {
79
+ ...context.trusted,
80
+ ...patch,
81
+ file: { ...context.trusted.file, ...patch.file },
82
+ checksums: { ...context.trusted.checksums, ...patch.checksums },
83
+ media: { ...context.trusted.media, ...patch.media }
84
+ };
85
+ if (patch.file) {
86
+ if (patch.file.mimeType != null) context.file.mimeType = patch.file.mimeType;
87
+ if (patch.file.size != null) context.file.size = patch.file.size;
88
+ if (patch.file.originalName != null) context.file.originalName = patch.file.originalName;
89
+ if (patch.file.extension != null) context.file.extension = patch.file.extension;
90
+ }
91
+ if (patch.checksums) {
92
+ context.file.checksums = { ...context.file.checksums, ...patch.checksums };
93
+ }
94
+ }
95
+ };
96
+ const proxy = new Proxy(context, {
97
+ get(target, prop, receiver) {
98
+ const value = Reflect.get(target, prop, receiver);
99
+ if (prop === "metadata" || prop === "processing" || prop === "trusted" || prop === "file") {
100
+ return new Proxy(value, {
101
+ set() {
102
+ throw new Error(
103
+ `Direct mutation of "context.${String(prop)}" is blocked. Use PluginApi instead. (Plugin: ${pluginName})`
104
+ );
105
+ },
106
+ deleteProperty() {
107
+ throw new Error(
108
+ `Direct deletion of "context.${String(prop)}" properties is blocked. (Plugin: ${pluginName})`
109
+ );
110
+ }
111
+ });
112
+ }
113
+ return value;
114
+ },
115
+ set(target, prop) {
116
+ throw new Error(
117
+ `Direct mutation of "context.${String(prop)}" is blocked. (Plugin: ${pluginName})`
118
+ );
119
+ }
120
+ });
121
+ return { proxy, api };
122
+ }
123
+ var JOB_QUEUE_NAME, LifecycleEngine;
124
+ var init_lifecycle_engine = __esm({
125
+ "src/core/lifecycle-engine.ts"() {
126
+ JOB_QUEUE_NAME = "better-media:background";
127
+ LifecycleEngine = class {
128
+ constructor(registry, jobAdapter) {
129
+ this.registry = registry;
130
+ this.jobAdapter = jobAdapter;
131
+ }
132
+ async trigger(hookName, context) {
133
+ const handlers = this.registry.get(hookName) ?? [];
134
+ const syncHandlers = handlers.filter((h) => h.mode === "sync");
135
+ const backgroundHandlers = handlers.filter((h) => h.mode === "background");
136
+ for (const { name, fn, manifest } of syncHandlers) {
137
+ const { proxy, api } = createSecureContext(
138
+ context,
139
+ name,
140
+ manifest.namespace,
141
+ manifest.trustLevel,
142
+ manifest.capabilities
143
+ );
144
+ const result = await fn(proxy, api);
145
+ if (result !== void 0 && typeof result === "object" && "valid" in result) {
146
+ if (result.valid === false) return result;
147
+ }
148
+ }
149
+ for (const { name, manifest } of backgroundHandlers) {
150
+ const payload = {
151
+ recordId: context.recordId,
152
+ metadata: JSON.parse(JSON.stringify(context.metadata)),
153
+ file: JSON.parse(JSON.stringify(context.file)),
154
+ storageLocation: JSON.parse(JSON.stringify(context.storageLocation)),
155
+ processing: JSON.parse(JSON.stringify(context.processing)),
156
+ hookName,
157
+ pluginName: name,
158
+ manifest: JSON.parse(JSON.stringify(manifest))
159
+ };
160
+ await this.jobAdapter.enqueue(JOB_QUEUE_NAME, payload);
161
+ }
162
+ }
163
+ };
164
+ }
165
+ });
166
+
167
+ // src/plugins/plugin-registry.ts
168
+ init_lifecycle_engine();
169
+ function createEmptyRegistry() {
170
+ const reg = /* @__PURE__ */ new Map();
171
+ for (const name of HOOK_NAMES) {
172
+ reg.set(name, []);
173
+ }
174
+ return reg;
175
+ }
176
+ function clearRegistry(registry) {
177
+ for (const [, list] of registry) {
178
+ list.length = 0;
179
+ }
180
+ }
181
+ function createHook(registry, hookName, manifest) {
182
+ return {
183
+ tap(name, fn, options) {
184
+ const requested = options?.mode ?? "sync";
185
+ const { effective, overridden } = resolveHookMode(hookName, requested);
186
+ if (overridden) {
187
+ console.warn(
188
+ `[better-media] Hook '${hookName}' does not support mode '${requested}'. Overriding to '${effective}'. Plugin: ${name}`
189
+ );
190
+ }
191
+ const list = registry.get(hookName);
192
+ list.push({ name, fn, mode: effective, manifest });
193
+ }
194
+ };
195
+ }
196
+ function createPluginRuntime(registry, plugin) {
197
+ const hooks = {};
198
+ for (const name of HOOK_NAMES) {
199
+ const baseTap = createHook(registry, name, plugin.runtimeManifest).tap;
200
+ hooks[name] = {
201
+ tap(handlerName, fn, options) {
202
+ const mode = options?.mode ?? (plugin.intensive ? "background" : "sync");
203
+ baseTap(handlerName, fn, { ...options, mode });
204
+ }
205
+ };
206
+ }
207
+ return { hooks };
208
+ }
209
+ var SYSTEM_MANIFEST = {
210
+ id: "better-media-system",
211
+ version: "1.0.0",
212
+ trustLevel: "trusted",
213
+ capabilities: ["file.read", "metadata.write.own", "processing.write.own", "trusted.propose"],
214
+ namespace: "system"
215
+ };
216
+ function createMediaRuntime(plugins, registry) {
217
+ const runtime = createPluginRuntime(registry, {
218
+ runtimeManifest: SYSTEM_MANIFEST
219
+ });
220
+ for (const plugin of plugins) {
221
+ const pluginRuntime = createPluginRuntime(registry, plugin);
222
+ if (plugin.apply) {
223
+ plugin.apply(pluginRuntime);
224
+ } else if (plugin.execute) {
225
+ const mode = plugin.executionMode ?? (plugin.intensive ? "background" : "sync");
226
+ const wrappedExecute = (ctx, _api) => plugin.execute(ctx);
227
+ pluginRuntime.hooks["process:run"].tap(plugin.name, wrappedExecute, { mode });
228
+ }
229
+ }
230
+ return runtime;
231
+ }
232
+ var DEFAULT_TRUSTED_POLICY = {
233
+ isAuthorized(id) {
234
+ const allowed = ["better-media-validation"];
235
+ return allowed.includes(id);
236
+ }
237
+ };
238
+ function buildPluginRegistry(plugins, policy = DEFAULT_TRUSTED_POLICY) {
239
+ const seenIds = /* @__PURE__ */ new Set();
240
+ const seenNamespaces = /* @__PURE__ */ new Set();
241
+ for (const plugin of plugins) {
242
+ validatePlugin(plugin);
243
+ const { id, namespace, trustLevel } = plugin.runtimeManifest;
244
+ if (trustLevel === "trusted" && !policy.isAuthorized(id, namespace)) {
245
+ throw new Error(
246
+ `Security Violation: Plugin "${plugin.name}" (${id}) is not authorized for "trusted" status.`
247
+ );
248
+ }
249
+ if (seenIds.has(id)) {
250
+ throw new Error(`Duplicate plugin ID detected: ${id} (Plugin: ${plugin.name})`);
251
+ }
252
+ if (seenNamespaces.has(namespace)) {
253
+ throw new Error(`Duplicate namespace detected: ${namespace} (Plugin: ${plugin.name})`);
254
+ }
255
+ seenIds.add(id);
256
+ seenNamespaces.add(namespace);
257
+ }
258
+ const registry = createEmptyRegistry();
259
+ const runtime = createMediaRuntime(plugins, registry);
260
+ return { registry, runtime };
261
+ }
262
+ function validatePlugin(plugin) {
263
+ if (!plugin.name || typeof plugin.name !== "string") {
264
+ throw new Error("Plugin must have a non-empty string name");
265
+ }
266
+ if (!plugin.apply && !plugin.execute) {
267
+ throw new Error(`Plugin "${plugin.name}" must define apply() or execute()`);
268
+ }
269
+ if (!plugin.runtimeManifest) {
270
+ throw new Error(
271
+ `Plugin "${plugin.name}" is missing runtimeManifest. V1 Secure Model requires manifests.`
272
+ );
273
+ }
274
+ const { id, version, trustLevel, capabilities, namespace } = plugin.runtimeManifest;
275
+ if (!id || !version || !namespace) {
276
+ throw new Error(
277
+ `Plugin "${plugin.name}" manifest is missing required fields (id, version, namespace)`
278
+ );
279
+ }
280
+ if (!["untrusted", "trusted"].includes(trustLevel)) {
281
+ throw new Error(`Plugin "${plugin.name}" has invalid trustLevel: ${trustLevel}`);
282
+ }
283
+ if (!Array.isArray(capabilities)) {
284
+ throw new Error(`Plugin "${plugin.name}" capabilities must be an array`);
285
+ }
286
+ if (trustLevel === "untrusted" && capabilities.includes("trusted.propose")) {
287
+ throw new Error(
288
+ `Untrusted plugin "${plugin.name}" cannot request "trusted.propose" capability`
289
+ );
290
+ }
291
+ }
292
+ var PluginRegistry = class {
293
+ plugins = [];
294
+ registry = createEmptyRegistry();
295
+ engine;
296
+ constructor(jobAdapter, initialPlugins = []) {
297
+ this.engine = new LifecycleEngine(this.registry, jobAdapter);
298
+ for (const plugin of initialPlugins) {
299
+ this.register(plugin);
300
+ }
301
+ }
302
+ /** Register a plugin and rebuild the hook map */
303
+ register(plugin) {
304
+ validatePlugin(plugin);
305
+ this.plugins.push(plugin);
306
+ clearRegistry(this.registry);
307
+ createMediaRuntime(this.plugins, this.registry);
308
+ }
309
+ /** Get all registered plugins (shallow copy) */
310
+ getPlugins() {
311
+ return [...this.plugins];
312
+ }
313
+ /**
314
+ * Execute all handlers for a hook. Runs sync handlers in series.
315
+ * Enqueues background handlers via JobAdapter.
316
+ * @returns ValidationResult if validation phase aborts (valid: false)
317
+ */
318
+ async executeHook(hookName, context) {
319
+ return this.engine.trigger(hookName, context);
320
+ }
321
+ /** Access the internal hook registry (for framework wiring) */
322
+ getRegistry() {
323
+ return this.registry;
324
+ }
325
+ };
326
+ function hasBackgroundHandlers(registry) {
327
+ for (const handlers of registry.values()) {
328
+ if (handlers.some((h) => h.mode === "background")) return true;
329
+ }
330
+ return false;
331
+ }
332
+
333
+ // src/index.ts
334
+ init_lifecycle_engine();
335
+ async function loadTrustedFromDb(database, recordId) {
336
+ const record = await database.findOne({
337
+ model: "media",
338
+ where: [{ field: "id", value: recordId }]
339
+ });
340
+ if (!record || typeof record !== "object") return null;
341
+ const rawTrusted = {
342
+ file: {
343
+ mimeType: record.mimeType ?? void 0,
344
+ size: record.size ?? void 0,
345
+ originalName: record.filename ?? void 0
346
+ },
347
+ checksums: {
348
+ sha256: record.checksum ?? void 0
349
+ },
350
+ media: {
351
+ width: record.width ?? void 0,
352
+ height: record.height ?? void 0,
353
+ duration: record.duration ?? void 0
354
+ }
355
+ };
356
+ const result = TrustedMetadataSchema.safeParse(rawTrusted);
357
+ if (!result.success) {
358
+ console.error(`[QUARANTINE] Invalid TrustedMetadata mapped from media record "${recordId}"!`);
359
+ console.error(`[QUARANTINE] Reason: ${JSON.stringify(result.error.format())}`);
360
+ console.error(`[QUARANTINE] Data: ${JSON.stringify(rawTrusted)}`);
361
+ return null;
362
+ }
363
+ const validated = result.data;
364
+ return validated.file || validated.checksums || validated.media ? validated : null;
365
+ }
366
+ async function saveTrustedToDb(database, recordId, fileKey, trusted, initialArgs) {
367
+ const updatePayload = {};
368
+ if (trusted.file?.mimeType !== void 0) updatePayload.mimeType = trusted.file.mimeType;
369
+ else if (initialArgs?.mimeType !== void 0) updatePayload.mimeType = initialArgs.mimeType;
370
+ if (trusted.file?.size !== void 0) updatePayload.size = trusted.file.size;
371
+ else if (initialArgs?.size !== void 0) updatePayload.size = initialArgs.size;
372
+ if (trusted.file?.originalName !== void 0) updatePayload.filename = trusted.file.originalName;
373
+ else if (initialArgs?.filename !== void 0) updatePayload.filename = initialArgs.filename;
374
+ if (trusted.checksums?.sha256 !== void 0) updatePayload.checksum = trusted.checksums.sha256;
375
+ if (trusted.media?.width !== void 0) updatePayload.width = trusted.media.width;
376
+ if (trusted.media?.height !== void 0) updatePayload.height = trusted.media.height;
377
+ if (trusted.media?.duration !== void 0) updatePayload.duration = trusted.media.duration;
378
+ if (initialArgs?.context !== void 0) updatePayload.context = initialArgs.context;
379
+ const existing = await database.findOne({
380
+ model: "media",
381
+ where: [{ field: "id", value: recordId }]
382
+ });
383
+ updatePayload.storageKey = fileKey;
384
+ if (existing) {
385
+ if (Object.keys(updatePayload).length > 0) {
386
+ await database.update({
387
+ model: "media",
388
+ where: [{ field: "id", value: recordId }],
389
+ update: updatePayload
390
+ });
391
+ }
392
+ } else {
393
+ updatePayload.id = recordId;
394
+ updatePayload.status = "PROCESSING";
395
+ updatePayload.createdAt = /* @__PURE__ */ new Date();
396
+ await database.create({
397
+ model: "media",
398
+ data: updatePayload
399
+ });
400
+ }
401
+ }
402
+ async function streamToTempFile(stream, fileKey) {
403
+ const ext = path.extname(fileKey) || ".bin";
404
+ const tmpPath = path.join(os.tmpdir(), `better-media-${randomUUID()}${ext}`);
405
+ const nodeStream = stream instanceof Readable ? stream : Readable.fromWeb(stream);
406
+ const writeStream = createWriteStream(tmpPath);
407
+ await new Promise((resolve, reject) => {
408
+ nodeStream.pipe(writeStream);
409
+ nodeStream.on("error", reject);
410
+ writeStream.on("error", reject);
411
+ writeStream.on("finish", resolve);
412
+ });
413
+ return tmpPath;
414
+ }
415
+ async function loadFileIntoContext(context, fileHandling) {
416
+ const { file, storage } = context;
417
+ const fileKey = file.key;
418
+ const maxBufferBytes = fileHandling.maxBufferBytes;
419
+ const storageWithExtras = storage;
420
+ if (!context.utilities) context.utilities = {};
421
+ const fileContent = {};
422
+ let useStream = false;
423
+ if (maxBufferBytes != null && typeof storageWithExtras.getSize === "function" && typeof storageWithExtras.getStream === "function") {
424
+ const size = await storageWithExtras.getSize(fileKey);
425
+ if (size != null && size > maxBufferBytes) {
426
+ useStream = true;
427
+ }
428
+ }
429
+ if (useStream && typeof storageWithExtras.getStream === "function") {
430
+ const stream = await storageWithExtras.getStream(fileKey);
431
+ if (stream != null) {
432
+ const tmpPath = await streamToTempFile(stream, fileKey);
433
+ fileContent.tempPath = tmpPath;
434
+ } else {
435
+ const buffer = await storage.get(fileKey);
436
+ if (buffer != null) fileContent.buffer = buffer;
437
+ }
438
+ } else {
439
+ const buffer = await storage.get(fileKey);
440
+ if (buffer != null) fileContent.buffer = buffer;
441
+ }
442
+ context.utilities.fileContent = fileContent;
443
+ if (fileContent.buffer != null || fileContent.tempPath != null) {
444
+ markFileContentVerified(context);
445
+ }
446
+ }
447
+ async function cleanupTempFile(context) {
448
+ const tmpPath = context.utilities?.fileContent?.tempPath;
449
+ if (tmpPath) {
450
+ await fs2.unlink(tmpPath).catch(() => {
451
+ });
452
+ if (context.utilities?.fileContent) {
453
+ context.utilities.fileContent.tempPath = void 0;
454
+ }
455
+ }
456
+ }
457
+
458
+ // src/core/pipeline-executor.ts
459
+ function buildFileInfo(fileKey, metadata) {
460
+ const mime = metadata.contentType ?? metadata.mimeType ?? metadata["content-type"];
461
+ const size = typeof metadata.size === "number" ? metadata.size : void 0;
462
+ const originalName = metadata.originalName ?? metadata.originalname;
463
+ const ext = originalName ? path.extname(originalName).toLowerCase() : path.extname(fileKey).toLowerCase();
464
+ return {
465
+ key: fileKey,
466
+ size,
467
+ mimeType: typeof mime === "string" ? mime : void 0,
468
+ originalName: typeof originalName === "string" ? originalName : void 0,
469
+ extension: ext || void 0,
470
+ checksums: void 0
471
+ };
472
+ }
473
+ function buildStorageLocation(fileKey) {
474
+ return {
475
+ key: fileKey,
476
+ bucket: void 0,
477
+ region: void 0,
478
+ url: void 0
479
+ };
480
+ }
481
+ function syncTrustedToFile(context) {
482
+ const { trusted, file } = context;
483
+ if (trusted.file?.mimeType != null) file.mimeType = trusted.file.mimeType;
484
+ if (trusted.file?.size != null) file.size = trusted.file.size;
485
+ if (trusted.file?.originalName != null) file.originalName = trusted.file.originalName;
486
+ if (trusted.checksums) file.checksums = { ...file.checksums, ...trusted.checksums };
487
+ }
488
+ var ValidationError = class extends Error {
489
+ constructor(recordId, fileKey, result) {
490
+ super(result.message ?? "Validation failed");
491
+ this.recordId = recordId;
492
+ this.fileKey = fileKey;
493
+ this.result = result;
494
+ this.name = "ValidationError";
495
+ }
496
+ };
497
+ var PipelineExecutor = class {
498
+ constructor(engine, storage, database, jobs, fileHandling = {}) {
499
+ this.engine = engine;
500
+ this.storage = storage;
501
+ this.database = database;
502
+ this.jobs = jobs;
503
+ this.fileHandling = fileHandling;
504
+ }
505
+ async run(recordId, fileKey, metadata = {}, appContext = {}) {
506
+ const meta = { ...metadata };
507
+ const trustedFromDb = await loadTrustedFromDb(this.database, recordId);
508
+ const context = {
509
+ recordId,
510
+ file: buildFileInfo(fileKey, meta),
511
+ storageLocation: buildStorageLocation(fileKey),
512
+ processing: {},
513
+ metadata: { ...meta, ...appContext },
514
+ // Merge for plugins to read backwards-compatibly
515
+ trusted: trustedFromDb ?? {},
516
+ utilities: {},
517
+ storage: this.storage,
518
+ database: this.database,
519
+ jobs: this.jobs
520
+ };
521
+ if (trustedFromDb) {
522
+ syncTrustedToFile(context);
523
+ }
524
+ try {
525
+ await loadFileIntoContext(context, this.fileHandling);
526
+ await saveTrustedToDb(this.database, recordId, fileKey, context.trusted, {
527
+ filename: context.file.originalName,
528
+ mimeType: context.file.mimeType,
529
+ size: context.file.size,
530
+ context: appContext
531
+ });
532
+ for (const phase of HOOK_NAMES) {
533
+ const result = await this.engine.trigger(phase, context);
534
+ if (result !== void 0 && typeof result === "object" && result.valid === false) {
535
+ throw new ValidationError(recordId, fileKey, result);
536
+ }
537
+ }
538
+ await saveTrustedToDb(this.database, recordId, fileKey, context.trusted, {
539
+ filename: context.file.originalName,
540
+ mimeType: context.file.mimeType,
541
+ size: context.file.size,
542
+ context: appContext
543
+ });
544
+ } finally {
545
+ await cleanupTempFile(context);
546
+ }
547
+ }
548
+ };
549
+ function syncTrustedToFile2(context) {
550
+ const { trusted, file } = context;
551
+ if (trusted.file?.mimeType != null) file.mimeType = trusted.file.mimeType;
552
+ if (trusted.file?.size != null) file.size = trusted.file.size;
553
+ if (trusted.file?.originalName != null) file.originalName = trusted.file.originalName;
554
+ if (trusted.checksums) file.checksums = { ...file.checksums, ...trusted.checksums };
555
+ }
556
+ async function runBackgroundJob(payload, registry, storage, database, jobs, fileHandling = {}) {
557
+ const {
558
+ recordId: payloadRecordId,
559
+ metadata = {},
560
+ file: payloadFile,
561
+ storageLocation: payloadStorage,
562
+ processing: payloadProcessing,
563
+ hookName,
564
+ pluginName
565
+ } = payload;
566
+ const meta = { ...metadata };
567
+ const legacyKey = payload.fileKey;
568
+ if (!payloadFile && !legacyKey) {
569
+ throw new Error("Background job payload must include file or fileKey");
570
+ }
571
+ const file = payloadFile ?? (legacyKey ? {
572
+ key: legacyKey,
573
+ size: typeof meta.size === "number" ? meta.size : void 0,
574
+ mimeType: typeof (meta.contentType ?? meta.mimeType ?? meta["content-type"]) === "string" ? meta.contentType ?? meta.mimeType ?? meta["content-type"] : void 0,
575
+ originalName: typeof (meta.originalName ?? meta.originalname) === "string" ? meta.originalName ?? meta.originalname : void 0,
576
+ extension: path.extname(legacyKey).toLowerCase() || void 0
577
+ } : { key: "" });
578
+ const recordId = payloadRecordId ?? file.key ?? "unknown";
579
+ const storageLocation = payloadStorage ?? { key: file.key };
580
+ const processing = payloadProcessing ?? {};
581
+ const trustedFromDb = await loadTrustedFromDb(database, recordId);
582
+ const context = {
583
+ recordId,
584
+ file,
585
+ storageLocation,
586
+ processing,
587
+ metadata: meta,
588
+ trusted: trustedFromDb ?? {},
589
+ utilities: {},
590
+ storage,
591
+ database,
592
+ jobs
593
+ };
594
+ if (trustedFromDb) {
595
+ syncTrustedToFile2(context);
596
+ }
597
+ try {
598
+ await loadFileIntoContext(context, fileHandling);
599
+ const handlers = registry.get(hookName) ?? [];
600
+ const handler = handlers.find((h) => h.name === pluginName);
601
+ if (!handler) {
602
+ throw new Error(`Handler not found: ${hookName}/${pluginName}`);
603
+ }
604
+ const manifest = handler.manifest;
605
+ const { createSecureContext: createSecureContext2 } = await Promise.resolve().then(() => (init_lifecycle_engine(), lifecycle_engine_exports));
606
+ const { proxy, api } = createSecureContext2(
607
+ context,
608
+ pluginName,
609
+ manifest.namespace,
610
+ manifest.trustLevel,
611
+ manifest.capabilities
612
+ );
613
+ await handler.fn(proxy, api);
614
+ if (context.trusted.file ?? context.trusted.checksums) {
615
+ await saveTrustedToDb(database, recordId, file.key, context.trusted);
616
+ }
617
+ } finally {
618
+ await cleanupTempFile(context);
619
+ }
620
+ }
621
+ function quote(name) {
622
+ return `"${name.replace(/"/g, '""')}"`;
623
+ }
624
+ function rowToAppKeys(row) {
625
+ const mapped = {};
626
+ for (const [key, value] of Object.entries(row)) {
627
+ mapped[toCamelCase(key)] = value;
628
+ }
629
+ return mapped;
630
+ }
631
+ function buildWhere(where, startAt = 1) {
632
+ if (!where?.length) return { sql: "", values: [] };
633
+ const parts = [];
634
+ const values = [];
635
+ let idx = startAt;
636
+ for (let i = 0; i < where.length; i++) {
637
+ const condition = where[i];
638
+ if (!condition) continue;
639
+ const connector = i > 0 ? where[i - 1]?.connector ?? "AND" : "AND";
640
+ const field = quote(toDbFieldName(condition.field));
641
+ const op = condition.operator ?? "=";
642
+ if (i > 0) parts.push(connector);
643
+ if (op === "contains" || op === "starts_with" || op === "ends_with") {
644
+ const raw = String(condition.value ?? "");
645
+ const value = op === "contains" ? `%${raw}%` : op === "starts_with" ? `${raw}%` : `%${raw}`;
646
+ parts.push(`${field} LIKE $${idx}`);
647
+ values.push(value);
648
+ idx += 1;
649
+ continue;
650
+ }
651
+ if (op === "in" || op === "not_in") {
652
+ const list = Array.isArray(condition.value) ? condition.value : [condition.value];
653
+ if (!list.length) {
654
+ parts.push(op === "in" ? "FALSE" : "TRUE");
655
+ continue;
656
+ }
657
+ const placeholders = list.map(() => `$${idx++}`).join(", ");
658
+ parts.push(`${field} ${op === "in" ? "IN" : "NOT IN"} (${placeholders})`);
659
+ values.push(...list);
660
+ continue;
661
+ }
662
+ if (condition.value === null && (op === "=" || op === "!=")) {
663
+ parts.push(`${field} ${op === "=" ? "IS" : "IS NOT"} NULL`);
664
+ continue;
665
+ }
666
+ const sqlOp = op === "!=" ? "<>" : op;
667
+ parts.push(`${field} ${sqlOp} $${idx}`);
668
+ values.push(condition.value);
669
+ idx += 1;
670
+ }
671
+ if (!parts.length) return { sql: "", values: [] };
672
+ return { sql: ` WHERE ${parts.join(" ")}`, values };
673
+ }
674
+ var PostgresDatabaseAdapter = class _PostgresDatabaseAdapter {
675
+ constructor(db) {
676
+ this.db = db;
677
+ }
678
+ id = "postgres";
679
+ modelFields(model) {
680
+ return schema[model]?.fields ?? {};
681
+ }
682
+ async create(options) {
683
+ const fields = this.modelFields(options.model);
684
+ const serialized = serializeData(fields, options.data);
685
+ const keys = Object.keys(serialized);
686
+ const columns = keys.map((k) => quote(toDbFieldName(k))).join(", ");
687
+ const placeholders = keys.map((_, i) => `$${i + 1}`).join(", ");
688
+ const values = keys.map((k) => serialized[k]);
689
+ const result = await this.db.query(
690
+ `INSERT INTO ${quote(options.model)} (${columns}) VALUES (${placeholders}) RETURNING *`,
691
+ values
692
+ );
693
+ return deserializeData(fields, rowToAppKeys(result.rows[0] ?? serialized));
694
+ }
695
+ async findOne(options) {
696
+ const rows = await this.findMany({ ...options, limit: 1 });
697
+ return rows[0] ?? null;
698
+ }
699
+ async findMany(options) {
700
+ const fields = this.modelFields(options.model);
701
+ const select = options.select && options.select.length > 0 ? options.select.map((f) => `${quote(toDbFieldName(f))} AS ${quote(f)}`).join(", ") : "*";
702
+ let query = `SELECT ${select} FROM ${quote(options.model)}`;
703
+ const where = buildWhere(options.where);
704
+ query += where.sql;
705
+ const values = [...where.values];
706
+ if (options.sortBy) {
707
+ query += ` ORDER BY ${quote(toDbFieldName(options.sortBy.field))} ${options.sortBy.direction.toUpperCase()}`;
708
+ }
709
+ if (typeof options.limit === "number") {
710
+ query += ` LIMIT $${values.length + 1}`;
711
+ values.push(options.limit);
712
+ }
713
+ if (typeof options.offset === "number") {
714
+ query += ` OFFSET $${values.length + 1}`;
715
+ values.push(options.offset);
716
+ }
717
+ const result = await this.db.query(query, values);
718
+ return result.rows.map((r) => deserializeData(fields, rowToAppKeys(r)));
719
+ }
720
+ async update(options) {
721
+ const updated = await this.updateMany(options);
722
+ if (!updated) return null;
723
+ return this.findOne({ model: options.model, where: options.where });
724
+ }
725
+ async updateMany(options) {
726
+ const fields = this.modelFields(options.model);
727
+ const serialized = serializeData(fields, options.update);
728
+ const entries = Object.entries(serialized);
729
+ if (!entries.length) return 0;
730
+ const setSql = entries.map(([key], i) => `${quote(toDbFieldName(key))} = $${i + 1}`).join(", ");
731
+ const setValues = entries.map(([, value]) => value);
732
+ const where = buildWhere(options.where, setValues.length + 1);
733
+ const query = `UPDATE ${quote(options.model)} SET ${setSql}${where.sql}`;
734
+ const result = await this.db.query(query, [...setValues, ...where.values]);
735
+ return Number(result.rowCount ?? 0);
736
+ }
737
+ async delete(options) {
738
+ await this.deleteMany(options);
739
+ }
740
+ async deleteMany(options) {
741
+ const where = buildWhere(options.where);
742
+ const query = `DELETE FROM ${quote(options.model)}${where.sql}`;
743
+ const result = await this.db.query(query, where.values);
744
+ return Number(result.rowCount ?? 0);
745
+ }
746
+ async count(options) {
747
+ const where = buildWhere(options.where);
748
+ const result = await this.db.query(
749
+ `SELECT COUNT(*)::int AS c FROM ${quote(options.model)}${where.sql}`,
750
+ where.values
751
+ );
752
+ return Number(result.rows[0]?.c ?? 0);
753
+ }
754
+ async raw(query, params) {
755
+ const result = await this.db.query(query, params);
756
+ return result.rows;
757
+ }
758
+ async transaction(callback) {
759
+ const pool = this.db;
760
+ if (typeof pool.connect !== "function") {
761
+ throw new Error("[better-media] The provided Postgres client does not support transactions.");
762
+ }
763
+ const client = await pool.connect();
764
+ try {
765
+ await client.query("BEGIN");
766
+ const trxAdapter = new _PostgresDatabaseAdapter(client);
767
+ const result = await callback(trxAdapter);
768
+ await client.query("COMMIT");
769
+ return result;
770
+ } catch (error) {
771
+ await client.query("ROLLBACK");
772
+ throw error;
773
+ } finally {
774
+ client.release?.();
775
+ }
776
+ }
777
+ __getDialect() {
778
+ return "postgres";
779
+ }
780
+ async __getMetadata() {
781
+ const result = await this.db.query(
782
+ `SELECT table_name AS "tableName", column_name AS "columnName", data_type AS "dataType", is_nullable AS "isNullable"
783
+ FROM information_schema.columns
784
+ WHERE table_schema = current_schema()
785
+ ORDER BY table_name, ordinal_position`
786
+ );
787
+ const grouped = /* @__PURE__ */ new Map();
788
+ for (const row of result.rows) {
789
+ const tableName = String(row.tableName);
790
+ if (!grouped.has(tableName)) grouped.set(tableName, { name: tableName, columns: [] });
791
+ grouped.get(tableName).columns.push({
792
+ name: toCamelCase(String(row.columnName)),
793
+ dataType: String(row.dataType),
794
+ isNullable: String(row.isNullable).toUpperCase() === "YES"
795
+ });
796
+ }
797
+ return [...grouped.values()];
798
+ }
799
+ async __executeMigration(operation) {
800
+ if (operation.type === "createTable") {
801
+ const columns = Object.entries(operation.definition.fields).map(([name, field]) => {
802
+ const parts = [quote(toDbFieldName(name)), getColumnType(field, "postgres")];
803
+ if (field.primaryKey) parts.push("PRIMARY KEY");
804
+ if (field.required) parts.push("NOT NULL");
805
+ if (field.unique) parts.push("UNIQUE");
806
+ if (field.references) {
807
+ parts.push(
808
+ `REFERENCES ${quote(field.references.model)}(${quote(toDbFieldName(field.references.field))}) ON DELETE ${String(
809
+ field.references.onDelete ?? "CASCADE"
810
+ ).toUpperCase()}`
811
+ );
812
+ }
813
+ return parts.join(" ");
814
+ }).join(", ");
815
+ await this.db.query(`CREATE TABLE IF NOT EXISTS ${quote(operation.table)} (${columns})`);
816
+ for (const index of operation.definition.indexes ?? []) {
817
+ const indexName = `idx_${operation.table}_${index.fields.map((f) => toDbFieldName(f)).join("_")}`;
818
+ await this.db.query(
819
+ `CREATE ${index.unique ? "UNIQUE " : ""}INDEX IF NOT EXISTS ${quote(indexName)} ON ${quote(
820
+ operation.table
821
+ )} (${index.fields.map((f) => quote(toDbFieldName(f))).join(", ")})`
822
+ );
823
+ }
824
+ return;
825
+ }
826
+ if (operation.type === "addColumn") {
827
+ const field = operation.definition;
828
+ const parts = [
829
+ quote(toDbFieldName(operation.field)),
830
+ getColumnType(operation.definition, "postgres")
831
+ ];
832
+ if (field.required) parts.push("NOT NULL");
833
+ if (field.unique) parts.push("UNIQUE");
834
+ if (field.references) {
835
+ parts.push(
836
+ `REFERENCES ${quote(field.references.model)}(${quote(toDbFieldName(field.references.field))}) ON DELETE ${String(
837
+ field.references.onDelete ?? "CASCADE"
838
+ ).toUpperCase()}`
839
+ );
840
+ }
841
+ await this.db.query(
842
+ `ALTER TABLE ${quote(operation.table)} ADD COLUMN IF NOT EXISTS ${parts.join(" ")}`
843
+ );
844
+ return;
845
+ }
846
+ if (operation.type === "createIndex") {
847
+ await this.db.query(
848
+ `CREATE ${operation.unique ? "UNIQUE " : ""}INDEX IF NOT EXISTS ${quote(
849
+ operation.name
850
+ )} ON ${quote(operation.table)} (${operation.fields.map((f) => quote(toDbFieldName(f))).join(", ")})`
851
+ );
852
+ }
853
+ }
854
+ };
855
+ function postgresDatabase(pool) {
856
+ return new PostgresDatabaseAdapter(pool);
857
+ }
858
+ function toDatabaseAdapter(database) {
859
+ if (isPgPoolLike(database) && typeof database.create !== "function") {
860
+ return postgresDatabase(database);
861
+ }
862
+ return database;
863
+ }
864
+ function createNoopJobAdapter() {
865
+ return {
866
+ async enqueue(_name, _payload) {
867
+ }
868
+ };
869
+ }
870
+ async function normalizeInput(input, fileHandling) {
871
+ const { file, metadata = {}, deleteAfterUpload = true } = input;
872
+ const maxBufferBytes = fileHandling.maxBufferBytes;
873
+ let data;
874
+ let shouldDeleteSource = false;
875
+ let sourcePath;
876
+ if ("buffer" in file && file.buffer) {
877
+ data = file.buffer;
878
+ } else if ("path" in file && file.path) {
879
+ if (maxBufferBytes != null) {
880
+ const stat = await fs2.stat(file.path);
881
+ if (stat.size > maxBufferBytes) {
882
+ throw new Error(
883
+ `File at "${file.path}" is ${stat.size} bytes, which exceeds the configured maxBufferBytes limit of ${maxBufferBytes}. Use a storage adapter that supports streaming uploads, or increase maxBufferBytes.`
884
+ );
885
+ }
886
+ }
887
+ data = await fs2.readFile(file.path);
888
+ if (deleteAfterUpload) {
889
+ shouldDeleteSource = true;
890
+ sourcePath = file.path;
891
+ }
892
+ } else if ("stream" in file && file.stream) {
893
+ const chunks = [];
894
+ let totalBytes = 0;
895
+ for await (const chunk of file.stream) {
896
+ const buf = Buffer.from(chunk);
897
+ totalBytes += buf.length;
898
+ if (maxBufferBytes != null && totalBytes > maxBufferBytes) {
899
+ throw new Error(
900
+ `Stream exceeded the configured maxBufferBytes limit of ${maxBufferBytes}. Use a storage adapter that supports streaming uploads, or increase maxBufferBytes.`
901
+ );
902
+ }
903
+ chunks.push(buf);
904
+ }
905
+ data = Buffer.concat(chunks);
906
+ } else if ("url" in file && file.url) {
907
+ if (file.mode === "reference") {
908
+ throw new Error("URL reference mode is not fully implemented yet.");
909
+ }
910
+ const response = await fetch(file.url);
911
+ if (!response.ok) throw new Error(`Failed to fetch URL: ${response.statusText}`);
912
+ data = Buffer.from(await response.arrayBuffer());
913
+ } else {
914
+ throw new Error("Invalid MediaFileInput. Must provide buffer, stream, path, or url.");
915
+ }
916
+ return { data, metadata, shouldDeleteSource, sourcePath };
917
+ }
918
+ function createBetterMedia(config) {
919
+ const { storage, plugins, trustedPolicy } = config;
920
+ const database = toDatabaseAdapter(config.database);
921
+ const { registry } = buildPluginRegistry(plugins, trustedPolicy);
922
+ const fileHandling = config.fileHandling ?? {};
923
+ const jobAdapter = config.jobs ?? (hasBackgroundHandlers(registry) ? (() => {
924
+ const adapter = memoryJobAdapter({
925
+ processor: (p) => runBackgroundJob(
926
+ p,
927
+ registry,
928
+ storage,
929
+ database,
930
+ adapter,
931
+ fileHandling
932
+ )
933
+ });
934
+ return adapter;
935
+ })() : createNoopJobAdapter());
936
+ const engine = new LifecycleEngine(registry, jobAdapter);
937
+ const executor = new PipelineExecutor(engine, storage, database, jobAdapter, fileHandling);
938
+ const runPipeline = (recordId, fileKey, metadata = {}, context = {}) => executor.run(recordId, fileKey, metadata, context);
939
+ return {
940
+ upload: {
941
+ async ingest(input) {
942
+ const normalized = await normalizeInput(input, fileHandling);
943
+ const recordId = randomUUID();
944
+ const finalKey = input.key ?? normalized.metadata.filename ?? recordId;
945
+ try {
946
+ await storage.put(finalKey, normalized.data);
947
+ await runPipeline(
948
+ recordId,
949
+ finalKey,
950
+ normalized.metadata,
951
+ normalized.metadata.context ?? {}
952
+ );
953
+ return {
954
+ id: recordId,
955
+ key: finalKey,
956
+ status: "processed",
957
+ metadata: normalized.metadata
958
+ };
959
+ } finally {
960
+ if (normalized.shouldDeleteSource && normalized.sourcePath) {
961
+ await fs2.unlink(normalized.sourcePath).catch((err) => console.warn("Cleanup failed:", err));
962
+ }
963
+ }
964
+ },
965
+ fromBuffer(buffer, input) {
966
+ return this.ingest({ file: { buffer }, ...input });
967
+ },
968
+ fromStream(stream, input) {
969
+ return this.ingest({ file: { stream }, ...input });
970
+ },
971
+ fromPath(path4, input) {
972
+ return this.ingest({ file: { path: path4 }, ...input });
973
+ },
974
+ fromUrl(url, input) {
975
+ return this.ingest({ file: { url, mode: input?.mode ?? "import" }, ...input });
976
+ },
977
+ async requestPresignedUpload(key, options) {
978
+ const fn = storage.createPresignedUpload;
979
+ if (typeof fn !== "function") {
980
+ throw new Error(
981
+ "Presigned upload not supported by this storage adapter. Use an S3/GCS adapter."
982
+ );
983
+ }
984
+ return fn.call(storage, key, options);
985
+ },
986
+ async complete(key, metadata = {}) {
987
+ const existing = await database.findOne({
988
+ model: "media",
989
+ where: [{ field: "storageKey", value: key }]
990
+ });
991
+ const recordId = existing?.id ?? randomUUID();
992
+ await runPipeline(
993
+ recordId,
994
+ key,
995
+ metadata,
996
+ metadata.context ?? {}
997
+ );
998
+ return {
999
+ id: recordId,
1000
+ key,
1001
+ status: "processed",
1002
+ metadata
1003
+ };
1004
+ }
1005
+ },
1006
+ files: {
1007
+ get(id) {
1008
+ return database.findOne({ model: "media", where: [{ field: "id", value: id }] });
1009
+ },
1010
+ async delete(id) {
1011
+ const record = await this.get(id);
1012
+ const storageKey = record?.storageKey ?? id;
1013
+ await Promise.all([
1014
+ storage.delete(storageKey),
1015
+ database.delete({ model: "media", where: [{ field: "id", value: id }] })
1016
+ ]);
1017
+ },
1018
+ async getUrl(id, options) {
1019
+ const record = await this.get(id);
1020
+ const storageKey = record?.storageKey ?? id;
1021
+ const fn = storage.getUrl;
1022
+ if (typeof fn !== "function") {
1023
+ throw new Error(
1024
+ "URL generation not supported by this storage adapter. Use an S3/GCS adapter."
1025
+ );
1026
+ }
1027
+ return fn.call(storage, storageKey, options);
1028
+ },
1029
+ async reprocess(id, metadata = {}) {
1030
+ const record = await this.get(id);
1031
+ if (!record) throw new Error(`Media record not found: ${id}`);
1032
+ const storageKey = record.storageKey ?? id;
1033
+ return runPipeline(id, storageKey, metadata);
1034
+ }
1035
+ },
1036
+ async runBackgroundJob(payload) {
1037
+ await runBackgroundJob(payload, registry, storage, database, jobAdapter, fileHandling);
1038
+ }
1039
+ };
1040
+ }
1041
+
1042
+ export { PluginRegistry, ValidationError, buildPluginRegistry, createBetterMedia, hasBackgroundHandlers, postgresDatabase, toDatabaseAdapter, validatePlugin };
1043
+ //# sourceMappingURL=index.mjs.map
1044
+ //# sourceMappingURL=index.mjs.map